@vaultgradient/pq-agent 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +152 -0
- package/dist/client.d.ts +22 -0
- package/dist/client.js +501 -0
- package/dist/index.js +645 -0
- package/dist/runtime.d.ts +23 -0
- package/dist/runtime.js +48 -0
- package/package.json +68 -0
package/dist/index.js
ADDED
|
@@ -0,0 +1,645 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
// src/index.ts
|
|
4
|
+
import { Command } from "commander";
|
|
5
|
+
import { homedir } from "os";
|
|
6
|
+
import { join, resolve as resolve2 } from "path";
|
|
7
|
+
|
|
8
|
+
// src/client.ts
|
|
9
|
+
import { randomUUID as randomUUID2 } from "crypto";
|
|
10
|
+
import { EventEmitter } from "events";
|
|
11
|
+
import WebSocket from "ws";
|
|
12
|
+
|
|
13
|
+
// ../protocol/dist/errors.js
|
|
14
|
+
var WS_CLOSE_CODES = {
|
|
15
|
+
GOING_AWAY: 1001,
|
|
16
|
+
INTERNAL_ERROR: 1011,
|
|
17
|
+
INVALID_AGENT_TOKEN: 4001,
|
|
18
|
+
TENANT_NOT_ENROLLED: 4002,
|
|
19
|
+
PROTOCOL_VERSION_MISMATCH: 4003
|
|
20
|
+
};
|
|
21
|
+
var NO_RECONNECT_CLOSE_CODES = /* @__PURE__ */ new Set([
|
|
22
|
+
WS_CLOSE_CODES.INVALID_AGENT_TOKEN,
|
|
23
|
+
WS_CLOSE_CODES.TENANT_NOT_ENROLLED,
|
|
24
|
+
WS_CLOSE_CODES.PROTOCOL_VERSION_MISMATCH
|
|
25
|
+
]);
|
|
26
|
+
|
|
27
|
+
// ../protocol/dist/version.js
|
|
28
|
+
var PROTOCOL_VERSION = 1;
|
|
29
|
+
var PROTOCOL_VERSION_HEADER = "x-pq-protocol-version";
|
|
30
|
+
|
|
31
|
+
// src/audit.ts
|
|
32
|
+
import { createHash } from "crypto";
|
|
33
|
+
var ZERO_HASH = "0".repeat(64);
|
|
34
|
+
function canonicalJson(value) {
|
|
35
|
+
if (value === null) return "null";
|
|
36
|
+
if (typeof value === "number" || typeof value === "boolean") {
|
|
37
|
+
return JSON.stringify(value);
|
|
38
|
+
}
|
|
39
|
+
if (typeof value === "string") return JSON.stringify(value);
|
|
40
|
+
if (Array.isArray(value)) {
|
|
41
|
+
return "[" + value.map(canonicalJson).join(",") + "]";
|
|
42
|
+
}
|
|
43
|
+
if (typeof value === "object") {
|
|
44
|
+
const obj = value;
|
|
45
|
+
const keys = Object.keys(obj).filter((k) => obj[k] !== void 0).sort();
|
|
46
|
+
const parts = keys.map(
|
|
47
|
+
(k) => `${JSON.stringify(k)}:${canonicalJson(obj[k])}`
|
|
48
|
+
);
|
|
49
|
+
return "{" + parts.join(",") + "}";
|
|
50
|
+
}
|
|
51
|
+
throw new Error(`canonicalJson: unsupported value of type ${typeof value}`);
|
|
52
|
+
}
|
|
53
|
+
function computeHash(prevHash, entry) {
|
|
54
|
+
return createHash("sha256").update(prevHash).update(canonicalJson(entry)).digest("hex");
|
|
55
|
+
}
|
|
56
|
+
var AuditEmitter = class {
|
|
57
|
+
constructor(cfg) {
|
|
58
|
+
this.cfg = cfg;
|
|
59
|
+
}
|
|
60
|
+
cfg;
|
|
61
|
+
prevHash = ZERO_HASH;
|
|
62
|
+
emit(partial) {
|
|
63
|
+
const { ts: tsOverride, ...rest } = partial;
|
|
64
|
+
const entry = {
|
|
65
|
+
tenant_id: this.cfg.tenantId,
|
|
66
|
+
agent_id: this.cfg.agentId,
|
|
67
|
+
ts: tsOverride ?? (/* @__PURE__ */ new Date()).toISOString(),
|
|
68
|
+
...rest
|
|
69
|
+
};
|
|
70
|
+
const prev_hash = this.prevHash;
|
|
71
|
+
const hash = computeHash(prev_hash, entry);
|
|
72
|
+
this.prevHash = hash;
|
|
73
|
+
return { entry, prev_hash, hash };
|
|
74
|
+
}
|
|
75
|
+
// For tests / observability.
|
|
76
|
+
currentHash() {
|
|
77
|
+
return this.prevHash;
|
|
78
|
+
}
|
|
79
|
+
};
|
|
80
|
+
|
|
81
|
+
// src/dispatch.ts
|
|
82
|
+
import { randomUUID } from "crypto";
|
|
83
|
+
var REQUEST_TIMEOUT_MS = 3e4;
|
|
84
|
+
var MAX_ROWS = 1e5;
|
|
85
|
+
var MAX_BYTES = 10 * 1024 * 1024;
|
|
86
|
+
async function dispatch(inbound, opts) {
|
|
87
|
+
switch (inbound.type) {
|
|
88
|
+
case "ping":
|
|
89
|
+
return { outbound: [{ type: "pong", id: inbound.id, payload: {} }] };
|
|
90
|
+
case "request.list_sources":
|
|
91
|
+
return wrapRequest(inbound.id, opts, () => listSources(opts.runtime));
|
|
92
|
+
case "request.describe_source":
|
|
93
|
+
return wrapRequest(
|
|
94
|
+
inbound.id,
|
|
95
|
+
opts,
|
|
96
|
+
() => describeSource(opts.runtime, inbound.payload.name, inbound.payload.sample_size)
|
|
97
|
+
);
|
|
98
|
+
case "request.list_endpoints":
|
|
99
|
+
return wrapRequest(inbound.id, opts, () => listEndpoints(opts.runtime));
|
|
100
|
+
case "request.call_endpoint":
|
|
101
|
+
return wrapRequest(
|
|
102
|
+
inbound.id,
|
|
103
|
+
opts,
|
|
104
|
+
() => callEndpoint(opts.runtime, inbound.payload.path)
|
|
105
|
+
);
|
|
106
|
+
case "request.query":
|
|
107
|
+
return wrapRequest(
|
|
108
|
+
inbound.id,
|
|
109
|
+
opts,
|
|
110
|
+
() => runQuery(opts.runtime, inbound.payload.expression)
|
|
111
|
+
);
|
|
112
|
+
case "control.rotate_token":
|
|
113
|
+
case "control.update_config":
|
|
114
|
+
case "control.shutdown":
|
|
115
|
+
return { outbound: [] };
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
async function wrapRequest(requestId, opts, handler) {
|
|
119
|
+
const timeoutMs = opts.timeoutMs ?? REQUEST_TIMEOUT_MS;
|
|
120
|
+
const startedAt = Date.now();
|
|
121
|
+
let result;
|
|
122
|
+
try {
|
|
123
|
+
result = await Promise.race([
|
|
124
|
+
handler(),
|
|
125
|
+
timeoutAfter(timeoutMs)
|
|
126
|
+
]);
|
|
127
|
+
} catch (err) {
|
|
128
|
+
if (err instanceof TimeoutError) {
|
|
129
|
+
return {
|
|
130
|
+
outbound: [
|
|
131
|
+
{
|
|
132
|
+
type: "response.error",
|
|
133
|
+
id: requestId,
|
|
134
|
+
payload: {
|
|
135
|
+
code: "timeout",
|
|
136
|
+
message: `request exceeded ${timeoutMs}ms`
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
]
|
|
140
|
+
};
|
|
141
|
+
}
|
|
142
|
+
return {
|
|
143
|
+
outbound: [
|
|
144
|
+
{
|
|
145
|
+
type: "response.error",
|
|
146
|
+
id: requestId,
|
|
147
|
+
payload: {
|
|
148
|
+
code: "execution",
|
|
149
|
+
message: err instanceof Error ? err.message : String(err)
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
]
|
|
153
|
+
};
|
|
154
|
+
}
|
|
155
|
+
const latency_ms = Date.now() - startedAt;
|
|
156
|
+
if (!result.ok) {
|
|
157
|
+
return {
|
|
158
|
+
outbound: [
|
|
159
|
+
{
|
|
160
|
+
type: "response.error",
|
|
161
|
+
id: requestId,
|
|
162
|
+
payload: { code: result.code, message: result.message }
|
|
163
|
+
}
|
|
164
|
+
]
|
|
165
|
+
};
|
|
166
|
+
}
|
|
167
|
+
const bytes = byteLengthOfJson(result.rows);
|
|
168
|
+
if (result.rows.length > MAX_ROWS || bytes > MAX_BYTES) {
|
|
169
|
+
return {
|
|
170
|
+
outbound: [
|
|
171
|
+
{
|
|
172
|
+
type: "response.error",
|
|
173
|
+
id: requestId,
|
|
174
|
+
payload: {
|
|
175
|
+
code: "too_large",
|
|
176
|
+
message: `result exceeded cap (rows=${result.rows.length}, bytes=${bytes})`,
|
|
177
|
+
details: {
|
|
178
|
+
row_count: result.rows.length,
|
|
179
|
+
bytes,
|
|
180
|
+
max_rows: MAX_ROWS,
|
|
181
|
+
max_bytes: MAX_BYTES
|
|
182
|
+
}
|
|
183
|
+
}
|
|
184
|
+
}
|
|
185
|
+
]
|
|
186
|
+
};
|
|
187
|
+
}
|
|
188
|
+
return {
|
|
189
|
+
outbound: [
|
|
190
|
+
{
|
|
191
|
+
type: "response.chunk",
|
|
192
|
+
id: requestId,
|
|
193
|
+
payload: { rows: result.rows, chunk_index: 0 }
|
|
194
|
+
},
|
|
195
|
+
{
|
|
196
|
+
type: "response.end",
|
|
197
|
+
id: requestId,
|
|
198
|
+
payload: {
|
|
199
|
+
total_rows: result.rows.length,
|
|
200
|
+
latency_ms,
|
|
201
|
+
// The published SourceManager.runQuery() returns rows without
|
|
202
|
+
// exposing whether push-down was used. Slice 4 reports false until
|
|
203
|
+
// the OSS API surfaces this; the wire field is still in the spec
|
|
204
|
+
// and consumed by future audit/billing observability.
|
|
205
|
+
pushed_down: false
|
|
206
|
+
}
|
|
207
|
+
}
|
|
208
|
+
]
|
|
209
|
+
};
|
|
210
|
+
}
|
|
211
|
+
var TimeoutError = class extends Error {
|
|
212
|
+
};
|
|
213
|
+
function timeoutAfter(ms) {
|
|
214
|
+
return new Promise(
|
|
215
|
+
(_, reject) => setTimeout(() => reject(new TimeoutError()), ms)
|
|
216
|
+
);
|
|
217
|
+
}
|
|
218
|
+
function byteLengthOfJson(value) {
|
|
219
|
+
return Buffer.byteLength(JSON.stringify(value), "utf8");
|
|
220
|
+
}
|
|
221
|
+
async function listSources(runtime) {
|
|
222
|
+
const names = runtime.sources.getSourceNames();
|
|
223
|
+
const statuses = runtime.sources.getAllStatuses();
|
|
224
|
+
const rows = names.map((name) => ({
|
|
225
|
+
name,
|
|
226
|
+
status: statuses[name] ?? null
|
|
227
|
+
}));
|
|
228
|
+
return { ok: true, rows };
|
|
229
|
+
}
|
|
230
|
+
async function describeSource(runtime, name, sampleSize) {
|
|
231
|
+
const status = runtime.sources.getSourceStatus(name);
|
|
232
|
+
if (!status) {
|
|
233
|
+
return { ok: false, code: "not_found", message: `source "${name}" is not configured` };
|
|
234
|
+
}
|
|
235
|
+
const data = runtime.sources.getSourceData(name) ?? [];
|
|
236
|
+
const size = Math.min(data.length, Math.max(1, sampleSize ?? 5));
|
|
237
|
+
const sample = data.slice(0, size);
|
|
238
|
+
const fields = /* @__PURE__ */ new Set();
|
|
239
|
+
for (const row of sample) {
|
|
240
|
+
if (row && typeof row === "object") {
|
|
241
|
+
for (const k of Object.keys(row)) fields.add(k);
|
|
242
|
+
}
|
|
243
|
+
}
|
|
244
|
+
return {
|
|
245
|
+
ok: true,
|
|
246
|
+
rows: [
|
|
247
|
+
{
|
|
248
|
+
name,
|
|
249
|
+
status,
|
|
250
|
+
fields: [...fields],
|
|
251
|
+
sample
|
|
252
|
+
}
|
|
253
|
+
]
|
|
254
|
+
};
|
|
255
|
+
}
|
|
256
|
+
async function listEndpoints(runtime) {
|
|
257
|
+
const rows = [...runtime.endpoints.values()];
|
|
258
|
+
return { ok: true, rows };
|
|
259
|
+
}
|
|
260
|
+
async function callEndpoint(runtime, path) {
|
|
261
|
+
const endpoint = runtime.endpoints.get(path);
|
|
262
|
+
if (!endpoint) {
|
|
263
|
+
return { ok: false, code: "not_found", message: `endpoint "${path}" is not registered` };
|
|
264
|
+
}
|
|
265
|
+
return runQuery(runtime, endpoint.query);
|
|
266
|
+
}
|
|
267
|
+
async function runQuery(runtime, expression) {
|
|
268
|
+
try {
|
|
269
|
+
const result = await runtime.sources.runQuery(expression);
|
|
270
|
+
const rows = Array.isArray(result) ? result : [result];
|
|
271
|
+
return { ok: true, rows };
|
|
272
|
+
} catch (err) {
|
|
273
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
274
|
+
return { ok: false, code: "execution", message };
|
|
275
|
+
}
|
|
276
|
+
}
|
|
277
|
+
function isInboundEnvelope(value) {
|
|
278
|
+
if (typeof value !== "object" || value === null) return false;
|
|
279
|
+
const env = value;
|
|
280
|
+
return typeof env.type === "string" && typeof env.id === "string" && typeof env.payload === "object" && env.payload !== null;
|
|
281
|
+
}
|
|
282
|
+
|
|
283
|
+
// src/reconnect.ts
|
|
284
|
+
var Backoff = class {
|
|
285
|
+
initialMs;
|
|
286
|
+
ceilingMs;
|
|
287
|
+
resetAfterStableMs;
|
|
288
|
+
currentMs;
|
|
289
|
+
constructor(opts = {}) {
|
|
290
|
+
this.initialMs = opts.initialMs ?? 1e3;
|
|
291
|
+
this.ceilingMs = opts.ceilingMs ?? 6e4;
|
|
292
|
+
this.resetAfterStableMs = opts.resetAfterStableMs ?? 5 * 6e4;
|
|
293
|
+
this.currentMs = this.initialMs;
|
|
294
|
+
}
|
|
295
|
+
// Returns the delay (ms) to wait before the next reconnect attempt, then
|
|
296
|
+
// doubles the cap for the following attempt (up to ceiling).
|
|
297
|
+
nextDelayMs(random = Math.random) {
|
|
298
|
+
const delay = Math.floor(random() * this.currentMs);
|
|
299
|
+
this.currentMs = Math.min(this.currentMs * 2, this.ceilingMs);
|
|
300
|
+
return delay;
|
|
301
|
+
}
|
|
302
|
+
reset() {
|
|
303
|
+
this.currentMs = this.initialMs;
|
|
304
|
+
}
|
|
305
|
+
};
|
|
306
|
+
|
|
307
|
+
// src/client.ts
|
|
308
|
+
function createClient(opts) {
|
|
309
|
+
const emitter = new EventEmitter();
|
|
310
|
+
const backoff = new Backoff();
|
|
311
|
+
const audit = new AuditEmitter({
|
|
312
|
+
tenantId: opts.tenantId ?? "dev-tenant",
|
|
313
|
+
agentId: opts.agentId ?? "dev-agent"
|
|
314
|
+
});
|
|
315
|
+
let socket = null;
|
|
316
|
+
let stopped = false;
|
|
317
|
+
let reconnectTimer = null;
|
|
318
|
+
let stableTimer = null;
|
|
319
|
+
function assertSafeUrl(url) {
|
|
320
|
+
if (url.startsWith("wss://")) return;
|
|
321
|
+
if (url.startsWith("ws://")) {
|
|
322
|
+
if (!opts.allowInsecure) {
|
|
323
|
+
throw new Error(
|
|
324
|
+
`Refusing to connect: URL must use wss:// (got ${url}). Pass allowInsecure for local dev only.`
|
|
325
|
+
);
|
|
326
|
+
}
|
|
327
|
+
let host;
|
|
328
|
+
try {
|
|
329
|
+
host = new URL(url).hostname;
|
|
330
|
+
} catch {
|
|
331
|
+
throw new Error(`Refusing to connect: invalid URL (${url}).`);
|
|
332
|
+
}
|
|
333
|
+
const localish = host === "localhost" || host === "127.0.0.1" || host === "::1" || host === "0.0.0.0" || host.endsWith(".local");
|
|
334
|
+
if (!localish) {
|
|
335
|
+
throw new Error(
|
|
336
|
+
`Refusing to connect: --insecure is only allowed for localhost targets, got host="${host}". Use wss:// for ${url}.`
|
|
337
|
+
);
|
|
338
|
+
}
|
|
339
|
+
return;
|
|
340
|
+
}
|
|
341
|
+
throw new Error(
|
|
342
|
+
`Refusing to connect: URL must be ws:// or wss:// (got ${url}).`
|
|
343
|
+
);
|
|
344
|
+
}
|
|
345
|
+
async function connect() {
|
|
346
|
+
assertSafeUrl(opts.url);
|
|
347
|
+
const ws = new WebSocket(opts.url, {
|
|
348
|
+
headers: {
|
|
349
|
+
Authorization: `Bearer ${opts.token}`,
|
|
350
|
+
[PROTOCOL_VERSION_HEADER]: String(PROTOCOL_VERSION)
|
|
351
|
+
}
|
|
352
|
+
});
|
|
353
|
+
socket = ws;
|
|
354
|
+
ws.on("open", () => {
|
|
355
|
+
stableTimer = setTimeout(
|
|
356
|
+
() => backoff.reset(),
|
|
357
|
+
backoff.resetAfterStableMs
|
|
358
|
+
);
|
|
359
|
+
emitter.emit("open");
|
|
360
|
+
});
|
|
361
|
+
ws.on("message", (raw) => {
|
|
362
|
+
void handleFrame(ws, raw.toString());
|
|
363
|
+
});
|
|
364
|
+
ws.on("close", (code) => {
|
|
365
|
+
if (stableTimer) {
|
|
366
|
+
clearTimeout(stableTimer);
|
|
367
|
+
stableTimer = null;
|
|
368
|
+
}
|
|
369
|
+
socket = null;
|
|
370
|
+
emitter.emit("close", code);
|
|
371
|
+
if (stopped) return;
|
|
372
|
+
if (NO_RECONNECT_CLOSE_CODES.has(code)) {
|
|
373
|
+
emitter.emit(
|
|
374
|
+
"error",
|
|
375
|
+
new Error(
|
|
376
|
+
`WS closed ${code} \u2014 non-recoverable per \xA77. Re-enrollment or operator action required.`
|
|
377
|
+
)
|
|
378
|
+
);
|
|
379
|
+
return;
|
|
380
|
+
}
|
|
381
|
+
scheduleReconnect();
|
|
382
|
+
});
|
|
383
|
+
ws.on("error", (err) => {
|
|
384
|
+
emitter.emit("error", err);
|
|
385
|
+
});
|
|
386
|
+
}
|
|
387
|
+
async function handleFrame(ws, raw) {
|
|
388
|
+
let parsed;
|
|
389
|
+
try {
|
|
390
|
+
parsed = JSON.parse(raw);
|
|
391
|
+
} catch (err) {
|
|
392
|
+
emitter.emit("error", new Error(`Invalid JSON frame: ${String(err)}`));
|
|
393
|
+
return;
|
|
394
|
+
}
|
|
395
|
+
if (!isInboundEnvelope(parsed)) {
|
|
396
|
+
emitter.emit("error", new Error("Frame missing required envelope fields"));
|
|
397
|
+
return;
|
|
398
|
+
}
|
|
399
|
+
const inbound = parsed;
|
|
400
|
+
const shouldAudit = inbound.type.startsWith("request.");
|
|
401
|
+
const start = Date.now();
|
|
402
|
+
const result = await dispatch(inbound, {
|
|
403
|
+
runtime: opts.runtime,
|
|
404
|
+
...opts.requestTimeoutMs !== void 0 && {
|
|
405
|
+
timeoutMs: opts.requestTimeoutMs
|
|
406
|
+
}
|
|
407
|
+
});
|
|
408
|
+
const latency_ms = Date.now() - start;
|
|
409
|
+
for (const outbound of result.outbound) {
|
|
410
|
+
send(ws, outbound);
|
|
411
|
+
}
|
|
412
|
+
if (shouldAudit) {
|
|
413
|
+
const auditEnvelope = buildAuditEvent(
|
|
414
|
+
inbound,
|
|
415
|
+
result.outbound,
|
|
416
|
+
latency_ms
|
|
417
|
+
);
|
|
418
|
+
send(ws, auditEnvelope);
|
|
419
|
+
}
|
|
420
|
+
}
|
|
421
|
+
function buildAuditEvent(inbound, outbound, latency_ms) {
|
|
422
|
+
const summary = summarizeOutbound(outbound);
|
|
423
|
+
let expression;
|
|
424
|
+
let endpoint_path;
|
|
425
|
+
if (inbound.type === "request.query") expression = inbound.payload.expression;
|
|
426
|
+
if (inbound.type === "request.call_endpoint") endpoint_path = inbound.payload.path;
|
|
427
|
+
const auditEvent = audit.emit({
|
|
428
|
+
request_id: inbound.id,
|
|
429
|
+
caller_id: "unknown",
|
|
430
|
+
// §13 spec gap — see audit.ts header
|
|
431
|
+
message_type: inbound.type,
|
|
432
|
+
...expression !== void 0 && { expression },
|
|
433
|
+
...endpoint_path !== void 0 && { endpoint_path },
|
|
434
|
+
pushed_down: summary.pushed_down,
|
|
435
|
+
row_count: summary.row_count,
|
|
436
|
+
bytes: summary.bytes,
|
|
437
|
+
latency_ms,
|
|
438
|
+
outcome: summary.outcome,
|
|
439
|
+
...summary.error_code !== void 0 && { error_code: summary.error_code }
|
|
440
|
+
});
|
|
441
|
+
return {
|
|
442
|
+
type: "event.audit",
|
|
443
|
+
id: randomUUID2(),
|
|
444
|
+
payload: auditEvent
|
|
445
|
+
};
|
|
446
|
+
}
|
|
447
|
+
function send(ws, envelope) {
|
|
448
|
+
if (ws.readyState !== WebSocket.OPEN) return;
|
|
449
|
+
ws.send(JSON.stringify(envelope));
|
|
450
|
+
}
|
|
451
|
+
function scheduleReconnect() {
|
|
452
|
+
const delay = backoff.nextDelayMs();
|
|
453
|
+
reconnectTimer = setTimeout(() => {
|
|
454
|
+
reconnectTimer = null;
|
|
455
|
+
if (stopped) return;
|
|
456
|
+
void connect().catch((err) => {
|
|
457
|
+
emitter.emit("error", err);
|
|
458
|
+
scheduleReconnect();
|
|
459
|
+
});
|
|
460
|
+
}, delay);
|
|
461
|
+
}
|
|
462
|
+
return {
|
|
463
|
+
async start() {
|
|
464
|
+
stopped = false;
|
|
465
|
+
await connect();
|
|
466
|
+
},
|
|
467
|
+
async stop() {
|
|
468
|
+
stopped = true;
|
|
469
|
+
if (reconnectTimer) {
|
|
470
|
+
clearTimeout(reconnectTimer);
|
|
471
|
+
reconnectTimer = null;
|
|
472
|
+
}
|
|
473
|
+
if (stableTimer) {
|
|
474
|
+
clearTimeout(stableTimer);
|
|
475
|
+
stableTimer = null;
|
|
476
|
+
}
|
|
477
|
+
if (socket) {
|
|
478
|
+
socket.close();
|
|
479
|
+
socket = null;
|
|
480
|
+
}
|
|
481
|
+
},
|
|
482
|
+
on(event, listener) {
|
|
483
|
+
emitter.on(event, listener);
|
|
484
|
+
}
|
|
485
|
+
};
|
|
486
|
+
}
|
|
487
|
+
function summarizeOutbound(outbound) {
|
|
488
|
+
let row_count = 0;
|
|
489
|
+
let bytes = 0;
|
|
490
|
+
let pushed_down = false;
|
|
491
|
+
let outcome = "success";
|
|
492
|
+
let error_code;
|
|
493
|
+
for (const env of outbound) {
|
|
494
|
+
if (env.type === "response.error") {
|
|
495
|
+
outcome = "error";
|
|
496
|
+
error_code = env.payload.code;
|
|
497
|
+
} else if (env.type === "response.chunk") {
|
|
498
|
+
bytes += JSON.stringify(env.payload).length;
|
|
499
|
+
} else if (env.type === "response.end") {
|
|
500
|
+
row_count = env.payload.total_rows;
|
|
501
|
+
pushed_down = env.payload.pushed_down;
|
|
502
|
+
}
|
|
503
|
+
}
|
|
504
|
+
return error_code !== void 0 ? { outcome, error_code, row_count, bytes, pushed_down } : { outcome, row_count, bytes, pushed_down };
|
|
505
|
+
}
|
|
506
|
+
|
|
507
|
+
// src/enrollment.ts
|
|
508
|
+
import { promises as fs } from "fs";
|
|
509
|
+
import { dirname } from "path";
|
|
510
|
+
async function readAgentToken(path) {
|
|
511
|
+
try {
|
|
512
|
+
const raw = await fs.readFile(path, "utf8");
|
|
513
|
+
return JSON.parse(raw);
|
|
514
|
+
} catch (err) {
|
|
515
|
+
if (err.code === "ENOENT") return null;
|
|
516
|
+
throw err;
|
|
517
|
+
}
|
|
518
|
+
}
|
|
519
|
+
function resolveBearerToken(stored, enrollmentTokenEnv) {
|
|
520
|
+
if (stored) return { token: stored.agentToken, kind: "agent" };
|
|
521
|
+
if (enrollmentTokenEnv) {
|
|
522
|
+
return { token: enrollmentTokenEnv, kind: "enrollment" };
|
|
523
|
+
}
|
|
524
|
+
throw new Error(
|
|
525
|
+
"No agent token on disk and no PQ_AGENT_ENROLLMENT_TOKEN set. Run `pq-agent enroll <token>` first, or set the env var for first connect."
|
|
526
|
+
);
|
|
527
|
+
}
|
|
528
|
+
|
|
529
|
+
// src/runtime.ts
|
|
530
|
+
import { dirname as dirname2, resolve } from "path";
|
|
531
|
+
import { SourceManager } from "@vaultgradient/pipequery-cli/sources";
|
|
532
|
+
|
|
533
|
+
// src/config.ts
|
|
534
|
+
import { readFile } from "fs/promises";
|
|
535
|
+
import { parse as parseYaml } from "yaml";
|
|
536
|
+
async function loadPipequeryConfig(path) {
|
|
537
|
+
const raw = await readFile(path, "utf8");
|
|
538
|
+
const parsed = parseYaml(raw);
|
|
539
|
+
if (typeof parsed !== "object" || parsed === null) {
|
|
540
|
+
throw new Error(`pipequery config at ${path} did not parse to an object`);
|
|
541
|
+
}
|
|
542
|
+
return parsed;
|
|
543
|
+
}
|
|
544
|
+
|
|
545
|
+
// src/runtime.ts
|
|
546
|
+
var AgentRuntime = class _AgentRuntime {
|
|
547
|
+
sources;
|
|
548
|
+
endpoints;
|
|
549
|
+
constructor(sources, endpoints) {
|
|
550
|
+
this.sources = sources;
|
|
551
|
+
this.endpoints = endpoints;
|
|
552
|
+
}
|
|
553
|
+
static async fromConfigFile(configPath) {
|
|
554
|
+
const absConfig = resolve(configPath);
|
|
555
|
+
const cwd = dirname2(absConfig);
|
|
556
|
+
const cfg = await loadPipequeryConfig(absConfig);
|
|
557
|
+
return _AgentRuntime.fromConfig(cfg, cwd);
|
|
558
|
+
}
|
|
559
|
+
static async fromConfig(config, cwd) {
|
|
560
|
+
const sources = new SourceManager(cwd);
|
|
561
|
+
for (const [name, sourceConfig] of Object.entries(config.sources ?? {})) {
|
|
562
|
+
await sources.addSource(name, sourceConfig);
|
|
563
|
+
}
|
|
564
|
+
const endpoints = /* @__PURE__ */ new Map();
|
|
565
|
+
for (const [path, ep] of Object.entries(config.endpoints ?? {})) {
|
|
566
|
+
endpoints.set(path, { path, query: ep.query });
|
|
567
|
+
}
|
|
568
|
+
return new _AgentRuntime(sources, endpoints);
|
|
569
|
+
}
|
|
570
|
+
async dispose() {
|
|
571
|
+
await this.sources.dispose();
|
|
572
|
+
}
|
|
573
|
+
};
|
|
574
|
+
|
|
575
|
+
// src/index.ts
|
|
576
|
+
var DEFAULT_TOKEN_PATH = join(
|
|
577
|
+
process.env.XDG_DATA_HOME ?? join(homedir(), ".local", "share"),
|
|
578
|
+
"pipequery",
|
|
579
|
+
"agent.token"
|
|
580
|
+
);
|
|
581
|
+
var program = new Command();
|
|
582
|
+
program.name("pq-agent").description(
|
|
583
|
+
"PipeQuery hosted agent \u2014 dials the control plane over WSS and serves MCP traffic from inside your network."
|
|
584
|
+
).version("0.0.0");
|
|
585
|
+
program.command("run").description("Connect to the control plane and serve requests.").option(
|
|
586
|
+
"--url <url>",
|
|
587
|
+
"Control-plane WSS URL",
|
|
588
|
+
process.env.PQ_AGENT_CONTROL_PLANE_URL ?? "ws://127.0.0.1:8787"
|
|
589
|
+
).option(
|
|
590
|
+
"--config <path>",
|
|
591
|
+
"Path to pipequery.yaml (sources + endpoints)",
|
|
592
|
+
process.env.PQ_AGENT_CONFIG ?? "./pipequery.yaml"
|
|
593
|
+
).option(
|
|
594
|
+
"--token-file <path>",
|
|
595
|
+
"Where to read/write the agent token",
|
|
596
|
+
process.env.PQ_AGENT_TOKEN_FILE ?? DEFAULT_TOKEN_PATH
|
|
597
|
+
).option(
|
|
598
|
+
"--insecure",
|
|
599
|
+
"Allow ws:// (dev only \u2014 refuses against prod control plane in real impl)",
|
|
600
|
+
false
|
|
601
|
+
).action(
|
|
602
|
+
async (opts) => {
|
|
603
|
+
const stored = await readAgentToken(opts.tokenFile);
|
|
604
|
+
const bearer = resolveBearerToken(
|
|
605
|
+
stored,
|
|
606
|
+
process.env.PQ_AGENT_ENROLLMENT_TOKEN
|
|
607
|
+
);
|
|
608
|
+
console.log(`[agent] loading config from ${resolve2(opts.config)}`);
|
|
609
|
+
const runtime = await AgentRuntime.fromConfigFile(opts.config);
|
|
610
|
+
console.log(
|
|
611
|
+
`[agent] loaded ${runtime.sources.getSourceNames().length} source(s), ${runtime.endpoints.size} endpoint(s)`
|
|
612
|
+
);
|
|
613
|
+
const client = createClient({
|
|
614
|
+
url: opts.url,
|
|
615
|
+
token: bearer.token,
|
|
616
|
+
runtime,
|
|
617
|
+
allowInsecure: opts.insecure
|
|
618
|
+
});
|
|
619
|
+
client.on("open", () => {
|
|
620
|
+
console.log(`[agent] connected to ${opts.url} (auth=${bearer.kind})`);
|
|
621
|
+
});
|
|
622
|
+
client.on("close", (code) => {
|
|
623
|
+
console.log(`[agent] disconnected (code=${code})`);
|
|
624
|
+
});
|
|
625
|
+
client.on("error", (err) => {
|
|
626
|
+
console.error(`[agent] error: ${err.message}`);
|
|
627
|
+
});
|
|
628
|
+
const shutdown = async (sig) => {
|
|
629
|
+
console.log(`[agent] ${sig} received, draining\u2026`);
|
|
630
|
+
await client.stop();
|
|
631
|
+
await runtime.dispose();
|
|
632
|
+
process.exit(0);
|
|
633
|
+
};
|
|
634
|
+
process.on("SIGINT", () => void shutdown("SIGINT"));
|
|
635
|
+
process.on("SIGTERM", () => void shutdown("SIGTERM"));
|
|
636
|
+
await client.start();
|
|
637
|
+
}
|
|
638
|
+
);
|
|
639
|
+
program.command("status").description("Print the current agent token + control-plane config.").action(() => {
|
|
640
|
+
console.log("status command not implemented yet");
|
|
641
|
+
});
|
|
642
|
+
program.parseAsync(process.argv).catch((err) => {
|
|
643
|
+
console.error(err.message);
|
|
644
|
+
process.exit(1);
|
|
645
|
+
});
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
import { SourceManager } from '@vaultgradient/pipequery-cli/sources';
|
|
2
|
+
|
|
3
|
+
interface PipequeryYaml {
|
|
4
|
+
sources?: Record<string, unknown>;
|
|
5
|
+
endpoints?: Record<string, {
|
|
6
|
+
query: string;
|
|
7
|
+
}>;
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
interface EndpointEntry {
|
|
11
|
+
path: string;
|
|
12
|
+
query: string;
|
|
13
|
+
}
|
|
14
|
+
declare class AgentRuntime {
|
|
15
|
+
readonly sources: SourceManager;
|
|
16
|
+
readonly endpoints: Map<string, EndpointEntry>;
|
|
17
|
+
private constructor();
|
|
18
|
+
static fromConfigFile(configPath: string): Promise<AgentRuntime>;
|
|
19
|
+
static fromConfig(config: PipequeryYaml, cwd: string): Promise<AgentRuntime>;
|
|
20
|
+
dispose(): Promise<void>;
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
export { AgentRuntime, type EndpointEntry };
|
package/dist/runtime.js
ADDED
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
// src/runtime.ts
|
|
2
|
+
import { dirname, resolve } from "path";
|
|
3
|
+
import { SourceManager } from "@vaultgradient/pipequery-cli/sources";
|
|
4
|
+
|
|
5
|
+
// src/config.ts
|
|
6
|
+
import { readFile } from "fs/promises";
|
|
7
|
+
import { parse as parseYaml } from "yaml";
|
|
8
|
+
async function loadPipequeryConfig(path) {
|
|
9
|
+
const raw = await readFile(path, "utf8");
|
|
10
|
+
const parsed = parseYaml(raw);
|
|
11
|
+
if (typeof parsed !== "object" || parsed === null) {
|
|
12
|
+
throw new Error(`pipequery config at ${path} did not parse to an object`);
|
|
13
|
+
}
|
|
14
|
+
return parsed;
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
// src/runtime.ts
|
|
18
|
+
var AgentRuntime = class _AgentRuntime {
|
|
19
|
+
sources;
|
|
20
|
+
endpoints;
|
|
21
|
+
constructor(sources, endpoints) {
|
|
22
|
+
this.sources = sources;
|
|
23
|
+
this.endpoints = endpoints;
|
|
24
|
+
}
|
|
25
|
+
static async fromConfigFile(configPath) {
|
|
26
|
+
const absConfig = resolve(configPath);
|
|
27
|
+
const cwd = dirname(absConfig);
|
|
28
|
+
const cfg = await loadPipequeryConfig(absConfig);
|
|
29
|
+
return _AgentRuntime.fromConfig(cfg, cwd);
|
|
30
|
+
}
|
|
31
|
+
static async fromConfig(config, cwd) {
|
|
32
|
+
const sources = new SourceManager(cwd);
|
|
33
|
+
for (const [name, sourceConfig] of Object.entries(config.sources ?? {})) {
|
|
34
|
+
await sources.addSource(name, sourceConfig);
|
|
35
|
+
}
|
|
36
|
+
const endpoints = /* @__PURE__ */ new Map();
|
|
37
|
+
for (const [path, ep] of Object.entries(config.endpoints ?? {})) {
|
|
38
|
+
endpoints.set(path, { path, query: ep.query });
|
|
39
|
+
}
|
|
40
|
+
return new _AgentRuntime(sources, endpoints);
|
|
41
|
+
}
|
|
42
|
+
async dispose() {
|
|
43
|
+
await this.sources.dispose();
|
|
44
|
+
}
|
|
45
|
+
};
|
|
46
|
+
export {
|
|
47
|
+
AgentRuntime
|
|
48
|
+
};
|