opencode-mem-agents 0.3.1 → 0.3.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -21,6 +21,7 @@
21
21
  * └── SQLite + FTS5
22
22
  */
23
23
  import { tool } from "@opencode-ai/plugin";
24
+ import { existsSync, mkdirSync, readFileSync, readdirSync, renameSync, unlinkSync, writeFileSync, } from "fs";
24
25
  import { fileURLToPath } from "url";
25
26
  import { dirname, join } from "path";
26
27
  const z = tool.schema;
@@ -29,14 +30,28 @@ const __dirname = dirname(__filename);
29
30
  // ---------------------------------------------------------------------------
30
31
  // Configuration
31
32
  // ---------------------------------------------------------------------------
32
- const WORKER_PORT = parseInt(process.env.OPENCODE_MEM_PORT ?? "37778", 10);
33
+ const WORKER_PORT = parsePort(process.env.OPENCODE_MEM_PORT, 37778);
33
34
  const WORKER_HOST = process.env.OPENCODE_MEM_HOST ?? "127.0.0.1";
34
35
  const WORKER_BASE = `http://${WORKER_HOST}:${WORKER_PORT}`;
36
+ const WORKER_API_TOKEN = process.env.OPENCODE_MEM_API_TOKEN ?? "";
35
37
  const HEALTH_TIMEOUT_MS = 2000;
36
38
  const STARTUP_TIMEOUT_MS = 15000;
39
+ const WORKER_REQUEST_TIMEOUT_MS = 5000;
37
40
  const FLUSH_INTERVAL_MS = 10_000;
38
41
  const MAX_BUFFER_SIZE = 20;
39
42
  const CONTEXT_CACHE_TTL_MS = 60_000;
43
+ const MAX_CONTEXT_CACHE_ENTRIES = 40;
44
+ const MAX_FLUSH_CONCURRENCY = 8;
45
+ function parsePort(rawValue, fallback) {
46
+ if (!rawValue)
47
+ return fallback;
48
+ const parsed = parseInt(rawValue, 10);
49
+ if (!Number.isFinite(parsed))
50
+ return fallback;
51
+ if (parsed < 1 || parsed > 65535)
52
+ return fallback;
53
+ return parsed;
54
+ }
40
55
  const TOOL_SIGNALS = {
41
56
  // HIGH — mutations and explicit saves (sent immediately)
42
57
  write: "high",
@@ -105,12 +120,15 @@ async function isWorkerHealthy() {
105
120
  }
106
121
  }
107
122
  const DATA_DIR = process.env.OPENCODE_MEM_DATA_DIR ?? `${process.env.HOME}/.opencode-mem`;
123
+ const PENDING_QUEUE_FILE = join(DATA_DIR, `pending-observations-${process.pid}.json`);
124
+ const PENDING_QUEUE_FILE_PATTERN = /^pending-observations(?:-\d+)?\.json$/;
125
+ if (!existsSync(DATA_DIR))
126
+ mkdirSync(DATA_DIR, { recursive: true });
108
127
  async function startWorker() {
109
128
  if (await isWorkerHealthy())
110
129
  return true;
111
130
  const workerScript = join(__dirname, "worker.js");
112
131
  const { spawn } = await import("child_process");
113
- const { existsSync } = await import("fs");
114
132
  if (!existsSync(workerScript)) {
115
133
  console.error(`[opencode-mem] Worker script not found: ${workerScript}`);
116
134
  return false;
@@ -146,44 +164,216 @@ async function workerGet(path, params) {
146
164
  url.searchParams.set(k, v);
147
165
  }
148
166
  }
149
- const controller = new AbortController();
150
- const timeout = setTimeout(() => controller.abort(), 5000);
151
167
  try {
152
- const res = await fetch(url.toString(), { signal: controller.signal });
168
+ const res = await fetchWithTimeout(url.toString(), {
169
+ headers: workerHeaders(),
170
+ });
153
171
  if (!res.ok)
154
- return JSON.stringify({ error: `HTTP ${res.status}` });
172
+ return await toErrorPayload(res);
155
173
  return await res.text();
156
174
  }
157
- finally {
158
- clearTimeout(timeout);
175
+ catch (error) {
176
+ return JSON.stringify({
177
+ error: `network_error:${error instanceof Error ? error.message : "request_failed"}`,
178
+ });
159
179
  }
160
180
  }
161
181
  async function workerPost(path, body) {
162
- const controller = new AbortController();
163
- const timeout = setTimeout(() => controller.abort(), 5000);
164
182
  try {
165
- const res = await fetch(`${WORKER_BASE}${path}`, {
183
+ const res = await fetchWithTimeout(`${WORKER_BASE}${path}`, {
166
184
  method: "POST",
167
- headers: { "Content-Type": "application/json" },
185
+ headers: workerHeaders("application/json"),
168
186
  body: JSON.stringify(body),
169
- signal: controller.signal,
170
187
  });
171
188
  if (!res.ok)
172
- return JSON.stringify({ error: `HTTP ${res.status}` });
189
+ return await toErrorPayload(res);
173
190
  return await res.text();
174
191
  }
192
+ catch (error) {
193
+ return JSON.stringify({
194
+ error: `network_error:${error instanceof Error ? error.message : "request_failed"}`,
195
+ });
196
+ }
197
+ }
198
+ async function fetchWithTimeout(url, init, timeoutMs = WORKER_REQUEST_TIMEOUT_MS) {
199
+ const controller = new AbortController();
200
+ const timeout = setTimeout(() => controller.abort(), timeoutMs);
201
+ try {
202
+ return await fetch(url, { ...init, signal: controller.signal });
203
+ }
175
204
  finally {
176
205
  clearTimeout(timeout);
177
206
  }
178
207
  }
208
+ async function toErrorPayload(res) {
209
+ const text = await res.text();
210
+ try {
211
+ const parsed = JSON.parse(text);
212
+ return JSON.stringify({
213
+ error: `HTTP ${res.status}`,
214
+ body: parsed,
215
+ });
216
+ }
217
+ catch {
218
+ return JSON.stringify({
219
+ error: `HTTP ${res.status}`,
220
+ body: text.slice(0, 1000),
221
+ });
222
+ }
223
+ }
224
+ function workerHeaders(contentType) {
225
+ const headers = {};
226
+ if (contentType)
227
+ headers["Content-Type"] = contentType;
228
+ if (WORKER_API_TOKEN)
229
+ headers["Authorization"] = `Bearer ${WORKER_API_TOKEN}`;
230
+ return headers;
231
+ }
232
+ function buildToolResultPayload(obs) {
233
+ return {
234
+ sessionId: obs.sessionId,
235
+ tool: obs.tool,
236
+ callId: obs.callId,
237
+ args: obs.args,
238
+ output: obs.output,
239
+ title: obs.title,
240
+ files_modified: obs.files_modified ?? [],
241
+ metadata: {
242
+ signal: obs.signal,
243
+ agent: obs.agent,
244
+ timestamp: obs.timestamp,
245
+ },
246
+ };
247
+ }
248
+ async function postToolResult(obs) {
249
+ try {
250
+ const response = await fetchWithTimeout(`${WORKER_BASE}/api/session/tool-result`, {
251
+ method: "POST",
252
+ headers: workerHeaders("application/json"),
253
+ body: JSON.stringify(buildToolResultPayload(obs)),
254
+ });
255
+ return response.ok;
256
+ }
257
+ catch {
258
+ return false;
259
+ }
260
+ }
261
+ function buildContextCacheKey(sessionId, agent) {
262
+ return [
263
+ sessionId,
264
+ agent.workflowId || "default-workflow",
265
+ agent.agentName || "default-agent",
266
+ agent.phase || "default-phase",
267
+ ].join("|");
268
+ }
269
+ function pruneContextCache(cache) {
270
+ if (cache.size <= MAX_CONTEXT_CACHE_ENTRIES)
271
+ return;
272
+ const sorted = Array.from(cache.entries()).sort((a, b) => a[1].timestamp - b[1].timestamp);
273
+ const overflow = cache.size - MAX_CONTEXT_CACHE_ENTRIES;
274
+ for (let i = 0; i < overflow; i += 1) {
275
+ const [key] = sorted[i];
276
+ cache.delete(key);
277
+ }
278
+ }
279
+ function formatRoleResults(raw, maxItems, maxItemLength) {
280
+ try {
281
+ const parsed = JSON.parse(raw);
282
+ const observations = Array.isArray(parsed.observations) ? parsed.observations : [];
283
+ if (observations.length === 0)
284
+ return "";
285
+ const lines = ["## Role-Relevant Observations"];
286
+ for (const row of observations.slice(0, maxItems)) {
287
+ const id = typeof row.id === "number" ? row.id : 0;
288
+ const type = typeof row.type === "string" ? row.type : "observation";
289
+ const titleValue = typeof row.title === "string" ? row.title : "";
290
+ const textValue = typeof row.text === "string" ? row.text : "";
291
+ const content = (titleValue || textValue || "(empty)").slice(0, maxItemLength);
292
+ lines.push(`- #${id} [${type}] ${content}`);
293
+ }
294
+ return lines.join("\n");
295
+ }
296
+ catch {
297
+ return "";
298
+ }
299
+ }
300
+ function isBufferedObservation(value) {
301
+ if (!value || typeof value !== "object")
302
+ return false;
303
+ const row = value;
304
+ return (typeof row.sessionId === "string" &&
305
+ typeof row.tool === "string" &&
306
+ typeof row.callId === "string" &&
307
+ typeof row.output === "string" &&
308
+ typeof row.signal === "string" &&
309
+ typeof row.timestamp === "number" &&
310
+ typeof row.agent === "object" &&
311
+ row.agent !== null);
312
+ }
179
313
  function createObservationBuffer() {
180
- const pending = [];
314
+ const pending = loadPendingFromDisk();
181
315
  let timer = null;
182
316
  let flushing = false;
183
317
  let flushFailures = 0;
184
318
  const MAX_FLUSH_FAILURES = 3;
185
319
  const CIRCUIT_RESET_MS = 30_000;
186
320
  let circuitOpenAt = 0;
321
+ const MAX_RETRY_QUEUE_SIZE = 2000;
322
+ function pendingKey(obs) {
323
+ return `${obs.sessionId}|${obs.callId}|${obs.tool}|${obs.timestamp}`;
324
+ }
325
+ function loadPendingFromDisk() {
326
+ try {
327
+ const entries = existsSync(DATA_DIR) ? readdirSync(DATA_DIR) : [];
328
+ const queueFiles = entries.filter((name) => PENDING_QUEUE_FILE_PATTERN.test(name));
329
+ if (queueFiles.length === 0)
330
+ return [];
331
+ const merged = new Map();
332
+ for (const fileName of queueFiles) {
333
+ const path = join(DATA_DIR, fileName);
334
+ try {
335
+ const raw = readFileSync(path, "utf-8");
336
+ const parsed = JSON.parse(raw);
337
+ if (!Array.isArray(parsed))
338
+ continue;
339
+ for (const row of parsed) {
340
+ if (!isBufferedObservation(row))
341
+ continue;
342
+ merged.set(pendingKey(row), row);
343
+ }
344
+ }
345
+ catch {
346
+ // best effort read
347
+ }
348
+ }
349
+ return Array.from(merged.values());
350
+ }
351
+ catch {
352
+ return [];
353
+ }
354
+ }
355
+ function persistPendingToDisk() {
356
+ try {
357
+ if (pending.length > 1) {
358
+ const deduped = new Map();
359
+ for (const row of pending) {
360
+ deduped.set(pendingKey(row), row);
361
+ }
362
+ pending.splice(0, pending.length, ...Array.from(deduped.values()));
363
+ }
364
+ if (pending.length === 0) {
365
+ if (existsSync(PENDING_QUEUE_FILE))
366
+ unlinkSync(PENDING_QUEUE_FILE);
367
+ return;
368
+ }
369
+ const tempPath = `${PENDING_QUEUE_FILE}.tmp`;
370
+ writeFileSync(tempPath, JSON.stringify(pending), "utf-8");
371
+ renameSync(tempPath, PENDING_QUEUE_FILE);
372
+ }
373
+ catch {
374
+ // best effort durability
375
+ }
376
+ }
187
377
  async function flush() {
188
378
  if (flushing || pending.length === 0)
189
379
  return;
@@ -196,26 +386,21 @@ function createObservationBuffer() {
196
386
  flushing = true;
197
387
  const batch = pending.splice(0);
198
388
  try {
199
- const results = await Promise.allSettled(batch.map((obs) => fetch(`${WORKER_BASE}/api/session/tool-result`, {
200
- method: "POST",
201
- headers: { "Content-Type": "application/json" },
202
- body: JSON.stringify({
203
- sessionId: obs.sessionId,
204
- tool: obs.tool,
205
- callId: obs.callId,
206
- args: obs.args,
207
- output: obs.output,
208
- title: obs.title,
209
- files_modified: obs.files_modified ?? [],
210
- metadata: {
211
- signal: obs.signal,
212
- agent: obs.agent,
213
- timestamp: obs.timestamp,
214
- },
215
- }),
216
- })));
217
- const failures = results.filter((r) => r.status === "rejected").length;
218
- if (failures > 0) {
389
+ const failed = [];
390
+ for (let i = 0; i < batch.length; i += MAX_FLUSH_CONCURRENCY) {
391
+ const slice = batch.slice(i, i + MAX_FLUSH_CONCURRENCY);
392
+ const results = await Promise.all(slice.map(async (obs) => ({ ok: await postToolResult(obs), obs })));
393
+ for (const result of results) {
394
+ if (!result.ok) {
395
+ failed.push(result.obs);
396
+ }
397
+ }
398
+ }
399
+ if (failed.length > 0) {
400
+ pending.unshift(...failed);
401
+ if (pending.length > MAX_RETRY_QUEUE_SIZE) {
402
+ pending.splice(MAX_RETRY_QUEUE_SIZE);
403
+ }
219
404
  flushFailures++;
220
405
  if (flushFailures >= MAX_FLUSH_FAILURES) {
221
406
  circuitOpenAt = Date.now();
@@ -225,9 +410,9 @@ function createObservationBuffer() {
225
410
  else {
226
411
  flushFailures = 0;
227
412
  }
228
- // Don't re-queue batch — accept data loss over unbounded growth
229
413
  }
230
414
  finally {
415
+ persistPendingToDisk();
231
416
  flushing = false;
232
417
  }
233
418
  }
@@ -235,35 +420,23 @@ function createObservationBuffer() {
235
420
  if (timer)
236
421
  return;
237
422
  timer = setInterval(() => flush().catch(() => { }), FLUSH_INTERVAL_MS);
423
+ if (pending.length > 0) {
424
+ flush().catch(() => { });
425
+ }
238
426
  }
239
427
  function add(obs) {
240
428
  pending.push(obs);
429
+ if (pending.length > MAX_RETRY_QUEUE_SIZE) {
430
+ pending.splice(0, pending.length - MAX_RETRY_QUEUE_SIZE);
431
+ }
432
+ persistPendingToDisk();
241
433
  if (pending.length >= MAX_BUFFER_SIZE)
242
434
  flush().catch(() => { });
243
435
  }
244
436
  async function sendImmediate(obs) {
245
- try {
246
- await fetch(`${WORKER_BASE}/api/session/tool-result`, {
247
- method: "POST",
248
- headers: { "Content-Type": "application/json" },
249
- body: JSON.stringify({
250
- sessionId: obs.sessionId,
251
- tool: obs.tool,
252
- callId: obs.callId,
253
- args: obs.args,
254
- output: obs.output,
255
- title: obs.title,
256
- files_modified: obs.files_modified ?? [],
257
- metadata: {
258
- signal: obs.signal,
259
- agent: obs.agent,
260
- timestamp: obs.timestamp,
261
- },
262
- }),
263
- });
264
- }
265
- catch {
266
- // Worker unavailable
437
+ const ok = await postToolResult(obs);
438
+ if (!ok) {
439
+ add(obs);
267
440
  }
268
441
  }
269
442
  return { start, add, flush, sendImmediate };
@@ -278,7 +451,7 @@ const plugin = async (_input) => {
278
451
  }
279
452
  const observationBuffer = createObservationBuffer();
280
453
  observationBuffer.start();
281
- let contextCache = null;
454
+ const contextCache = new Map();
282
455
  return {
283
456
  // ------------------------------------------------------------------
284
457
  // Tools
@@ -294,6 +467,18 @@ const plugin = async (_input) => {
294
467
  .string()
295
468
  .optional()
296
469
  .describe("Filter by type: decision, bugfix, feature, refactor, discovery, change"),
470
+ signal: z
471
+ .enum(["high", "medium", "low"])
472
+ .optional()
473
+ .describe("Filter by signal level"),
474
+ offset: z
475
+ .number()
476
+ .optional()
477
+ .describe("Pagination offset"),
478
+ orderBy: z
479
+ .enum(["created_desc", "created_asc", "id_desc", "id_asc"])
480
+ .optional()
481
+ .describe("Sort order"),
297
482
  dateStart: z.string().optional().describe("ISO date start filter"),
298
483
  dateEnd: z.string().optional().describe("ISO date end filter"),
299
484
  since_id: z
@@ -311,6 +496,9 @@ const plugin = async (_input) => {
311
496
  limit: String(args.limit ?? 20),
312
497
  project: args.project ?? "",
313
498
  type: args.type ?? "",
499
+ signal: args.signal ?? "",
500
+ offset: args.offset !== undefined ? String(args.offset) : "",
501
+ orderBy: args.orderBy ?? "",
314
502
  dateStart: args.dateStart ?? "",
315
503
  dateEnd: args.dateEnd ?? "",
316
504
  since_id: args.since_id !== undefined ? String(args.since_id) : "",
@@ -338,6 +526,10 @@ const plugin = async (_input) => {
338
526
  .optional()
339
527
  .describe("Records after anchor (default 3)"),
340
528
  project: z.string().optional().describe("Filter by project"),
529
+ format: z
530
+ .enum(["text", "json"])
531
+ .optional()
532
+ .describe("Response format (default text)"),
341
533
  },
342
534
  async execute(args) {
343
535
  return await workerGet("/api/timeline", {
@@ -346,6 +538,7 @@ const plugin = async (_input) => {
346
538
  depth_before: String(args.depth_before ?? 3),
347
539
  depth_after: String(args.depth_after ?? 3),
348
540
  project: args.project ?? "",
541
+ format: args.format ?? "",
349
542
  });
350
543
  },
351
544
  }),
@@ -415,6 +608,7 @@ const plugin = async (_input) => {
415
608
  const project = wfId || ctx.directory.split("/").pop() || "opencode-mem";
416
609
  return await workerGet("/api/activity", {
417
610
  project,
611
+ role: args.role ?? agentCtx.agentName ?? "default",
418
612
  });
419
613
  },
420
614
  }),
@@ -536,22 +730,23 @@ const plugin = async (_input) => {
536
730
  // ------------------------------------------------------------------
537
731
  "experimental.chat.system.transform": async (hookInput, hookOutput) => {
538
732
  if (!(await isWorkerHealthy())) {
539
- contextCache = null;
733
+ contextCache.clear();
540
734
  return;
541
735
  }
542
736
  const MAX_CONTEXT_BYTES = 4096;
543
737
  const MAX_CONTEXT_ITEMS = 10;
544
738
  const MAX_ITEM_LENGTH = 300;
545
- const now = Date.now();
546
- if (contextCache &&
547
- now - contextCache.timestamp < CONTEXT_CACHE_TTL_MS) {
548
- if (contextCache.content.length > 100) {
549
- hookOutput.system.push(contextCache.content);
550
- }
551
- return;
552
- }
553
739
  try {
554
740
  const agentCtx = getAgentContext();
741
+ const now = Date.now();
742
+ const cacheKey = buildContextCacheKey(hookInput.sessionID ?? "default", agentCtx);
743
+ const cached = contextCache.get(cacheKey);
744
+ if (cached && now - cached.timestamp < CONTEXT_CACHE_TTL_MS) {
745
+ if (cached.content.trim().length > 0) {
746
+ hookOutput.system.push(cached.content);
747
+ }
748
+ return;
749
+ }
555
750
  const roleQuery = getRoleQuery(agentCtx.agentName);
556
751
  const blocks = [];
557
752
  const [roleResults, sessionContext] = await Promise.all([
@@ -562,6 +757,7 @@ const plugin = async (_input) => {
562
757
  }).catch(() => ""),
563
758
  workerGet("/api/context/session", {
564
759
  sessionId: hookInput.sessionID ?? "default",
760
+ project: agentCtx.workflowId || "",
565
761
  limit: String(MAX_CONTEXT_ITEMS),
566
762
  }).catch(() => ""),
567
763
  ]);
@@ -569,17 +765,21 @@ const plugin = async (_input) => {
569
765
  blocks.push(`[Memory] Workflow: ${agentCtx.workflowId} | Agent: ${agentCtx.agentName}` +
570
766
  (agentCtx.phase ? ` | Phase: ${agentCtx.phase}` : ""));
571
767
  }
572
- if (roleResults && roleResults.length > 50)
573
- blocks.push(roleResults);
574
- if (sessionContext && sessionContext.length > 100)
768
+ if (roleResults && roleResults.length > 50) {
769
+ const formattedRoleResults = formatRoleResults(roleResults, MAX_CONTEXT_ITEMS, MAX_ITEM_LENGTH);
770
+ if (formattedRoleResults)
771
+ blocks.push(formattedRoleResults);
772
+ }
773
+ if (sessionContext && sessionContext.length > 40)
575
774
  blocks.push(sessionContext);
576
775
  let content = blocks.join("\n\n");
577
776
  // Enforce hard cap on injected context size
578
777
  if (content.length > MAX_CONTEXT_BYTES) {
579
778
  content = content.substring(0, MAX_CONTEXT_BYTES) + "\n[...truncated]";
580
779
  }
581
- contextCache = { content, timestamp: now };
582
- if (content.length > 100)
780
+ contextCache.set(cacheKey, { content, timestamp: now });
781
+ pruneContextCache(contextCache);
782
+ if (content.trim().length > 0)
583
783
  hookOutput.system.push(content);
584
784
  }
585
785
  catch {
@@ -0,0 +1,14 @@
1
+ export interface WorkerConfig {
2
+ port: number;
3
+ host: string;
4
+ dataDir: string;
5
+ dbPath: string;
6
+ pidFile: string;
7
+ maxBodyBytes: number;
8
+ apiToken: string;
9
+ corsOrigin: string;
10
+ traceRetentionDays: number;
11
+ traceMaxPayloadChars: number;
12
+ }
13
+ export declare const WORKER_VERSION = "0.3.1";
14
+ export declare function readWorkerConfig(): WorkerConfig;
@@ -0,0 +1,44 @@
1
+ import { existsSync, mkdirSync } from "fs";
2
+ import { join } from "path";
3
+ import { homedir } from "os";
4
+ export const WORKER_VERSION = "0.3.1";
5
+ export function readWorkerConfig() {
6
+ const port = parsePort(process.env.OPENCODE_MEM_PORT, 37778);
7
+ const host = process.env.OPENCODE_MEM_HOST ?? "127.0.0.1";
8
+ const dataDir = process.env.OPENCODE_MEM_DATA_DIR ?? join(homedir(), ".opencode-mem");
9
+ const traceRetentionDays = clampInt(process.env.OPENCODE_MEM_TRACE_RETENTION_DAYS, 1, 365, 7);
10
+ const traceMaxPayloadChars = clampInt(process.env.OPENCODE_MEM_TRACE_MAX_PAYLOAD_CHARS, 512, 200_000, 4_000);
11
+ if (!existsSync(dataDir)) {
12
+ mkdirSync(dataDir, { recursive: true });
13
+ }
14
+ return {
15
+ port,
16
+ host,
17
+ dataDir,
18
+ dbPath: join(dataDir, "opencode-mem.db"),
19
+ pidFile: join(dataDir, "worker.pid"),
20
+ maxBodyBytes: 64 * 1024,
21
+ apiToken: process.env.OPENCODE_MEM_API_TOKEN ?? "",
22
+ corsOrigin: (process.env.OPENCODE_MEM_CORS_ORIGIN ?? "").trim(),
23
+ traceRetentionDays,
24
+ traceMaxPayloadChars,
25
+ };
26
+ }
27
+ function clampInt(rawValue, min, max, fallback) {
28
+ if (!rawValue)
29
+ return fallback;
30
+ const parsed = parseInt(rawValue, 10);
31
+ if (!Number.isFinite(parsed))
32
+ return fallback;
33
+ return Math.min(max, Math.max(min, parsed));
34
+ }
35
+ function parsePort(rawValue, fallback) {
36
+ if (!rawValue)
37
+ return fallback;
38
+ const parsed = parseInt(rawValue, 10);
39
+ if (!Number.isFinite(parsed))
40
+ return fallback;
41
+ if (parsed < 1 || parsed > 65535)
42
+ return fallback;
43
+ return parsed;
44
+ }
@@ -0,0 +1,23 @@
1
+ import type http from "http";
2
+ export interface RequestContext {
3
+ traceId: string;
4
+ method: string;
5
+ path: string;
6
+ startedAt: number;
7
+ }
8
+ export type TraceDirection = "in" | "out" | "error";
9
+ export type TraceLogger = (ctx: RequestContext, direction: TraceDirection, status: number, payload: unknown, durationMs?: number) => void;
10
+ export declare class InputValidationError extends Error {
11
+ constructor(message: string);
12
+ }
13
+ export declare function readBody(req: http.IncomingMessage, maxBytes: number): Promise<string>;
14
+ export declare function parseSchema<T>(schema: {
15
+ parse: (value: unknown) => T;
16
+ }, value: unknown): T;
17
+ export declare function sendJson(ctx: RequestContext, res: http.ServerResponse, status: number, data: unknown, traceLog: TraceLogger): void;
18
+ export declare function sendText(ctx: RequestContext, res: http.ServerResponse, status: number, data: string, traceLog: TraceLogger): void;
19
+ export declare function sendHtml(ctx: RequestContext, res: http.ServerResponse, status: number, html: string, traceLog: TraceLogger): void;
20
+ export declare function sendError(ctx: RequestContext, res: http.ServerResponse, status: number, code: string, message: string, traceLog: TraceLogger, details?: unknown): void;
21
+ export declare function applyCors(req: http.IncomingMessage, res: http.ServerResponse, corsOrigin: string): boolean;
22
+ export declare function requiresAuth(path: string, apiToken: string): boolean;
23
+ export declare function isAuthorized(req: http.IncomingMessage, apiToken: string): boolean;