@wipcomputer/wip-ldm-os 0.4.73-alpha.11 → 0.4.73-alpha.13

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -6,6 +6,27 @@ import { homedir } from "os";
6
6
  import { promisify } from "util";
7
7
  import { randomUUID } from "crypto";
8
8
  var execAsync = promisify(exec);
9
+ var GATEWAY_HOST = "127.0.0.1";
10
+ var DEFAULT_GATEWAY_PORT = 18789;
11
+ var DEFAULT_INBOX_PORT = 18790;
12
+ var GATEWAY_TIMEOUT_MS = 15e3;
13
+ var OP_CLI_TIMEOUT_MS = 1e4;
14
+ var EMBEDDING_API_URL = "https://api.openai.com/v1/embeddings";
15
+ var DEFAULT_EMBEDDING_MODEL = "text-embedding-3-small";
16
+ var DEFAULT_EMBEDDING_DIMS = 1536;
17
+ var VECTOR_SEARCH_ROW_LIMIT = 1e3;
18
+ var RECENCY_DECAY_RATE = 0.01;
19
+ var RECENCY_FLOOR = 0.5;
20
+ var FRESHNESS_FRESH_DAYS = 3;
21
+ var FRESHNESS_RECENT_DAYS = 7;
22
+ var FRESHNESS_AGING_DAYS = 14;
23
+ var DEFAULT_SEARCH_LIMIT = 5;
24
+ var WORKSPACE_MAX_DEPTH = 4;
25
+ var WORKSPACE_MAX_EXCERPTS = 5;
26
+ var WORKSPACE_MAX_RESULTS = 10;
27
+ var SKILL_EXEC_TIMEOUT_MS = 12e4;
28
+ var SKILL_EXEC_MAX_BUFFER = 10 * 1024 * 1024;
29
+ var MS_PER_DAY = 1e3 * 60 * 60 * 24;
9
30
  var HOME = process.env.HOME || homedir();
10
31
  var LDM_ROOT = process.env.LDM_ROOT || join(HOME, ".ldm");
11
32
  function resolveConfig(overrides) {
@@ -14,9 +35,9 @@ function resolveConfig(overrides) {
14
35
  openclawDir,
15
36
  workspaceDir: overrides?.workspaceDir || join(openclawDir, "workspace"),
16
37
  dbPath: overrides?.dbPath || join(openclawDir, "memory", "context-embeddings.sqlite"),
17
- inboxPort: overrides?.inboxPort || parseInt(process.env.LESA_BRIDGE_INBOX_PORT || "18790", 10),
18
- embeddingModel: overrides?.embeddingModel || "text-embedding-3-small",
19
- embeddingDimensions: overrides?.embeddingDimensions || 1536
38
+ inboxPort: overrides?.inboxPort || parseInt(process.env.LESA_BRIDGE_INBOX_PORT || String(DEFAULT_INBOX_PORT), 10),
39
+ embeddingModel: overrides?.embeddingModel || DEFAULT_EMBEDDING_MODEL,
40
+ embeddingDimensions: overrides?.embeddingDimensions || DEFAULT_EMBEDDING_DIMS
20
41
  };
21
42
  }
22
43
  function resolveConfigMulti(overrides) {
@@ -29,9 +50,9 @@ function resolveConfigMulti(overrides) {
29
50
  openclawDir,
30
51
  workspaceDir: raw.workspaceDir || overrides?.workspaceDir || join(openclawDir, "workspace"),
31
52
  dbPath: raw.dbPath || overrides?.dbPath || join(openclawDir, "memory", "context-embeddings.sqlite"),
32
- inboxPort: raw.inboxPort || overrides?.inboxPort || parseInt(process.env.LESA_BRIDGE_INBOX_PORT || "18790", 10),
33
- embeddingModel: raw.embeddingModel || overrides?.embeddingModel || "text-embedding-3-small",
34
- embeddingDimensions: raw.embeddingDimensions || overrides?.embeddingDimensions || 1536
53
+ inboxPort: raw.inboxPort || overrides?.inboxPort || parseInt(process.env.LESA_BRIDGE_INBOX_PORT || String(DEFAULT_INBOX_PORT), 10),
54
+ embeddingModel: raw.embeddingModel || overrides?.embeddingModel || DEFAULT_EMBEDDING_MODEL,
55
+ embeddingDimensions: raw.embeddingDimensions || overrides?.embeddingDimensions || DEFAULT_EMBEDDING_DIMS
35
56
  };
36
57
  } catch {
37
58
  }
@@ -53,7 +74,7 @@ function resolveApiKey(openclawDir) {
53
74
  `op read "op://Agent Secrets/OpenAI API/api key" 2>/dev/null`,
54
75
  {
55
76
  env: { ...process.env, OP_SERVICE_ACCOUNT_TOKEN: saToken },
56
- timeout: 1e4,
77
+ timeout: OP_CLI_TIMEOUT_MS,
57
78
  encoding: "utf-8"
58
79
  }
59
80
  ).trim();
@@ -76,7 +97,7 @@ function resolveGatewayConfig(openclawDir) {
76
97
  }
77
98
  const config = JSON.parse(readFileSync(configPath, "utf-8"));
78
99
  const token = config?.gateway?.auth?.token;
79
- const port = config?.gateway?.port || 18789;
100
+ const port = config?.gateway?.port || DEFAULT_GATEWAY_PORT;
80
101
  if (!token) {
81
102
  throw new Error("No gateway.auth.token found in openclaw.json");
82
103
  }
@@ -262,37 +283,54 @@ async function sendMessage(openclawDir, message, options) {
262
283
  const { token, port } = resolveGatewayConfig(openclawDir);
263
284
  const agentId = options?.agentId || "main";
264
285
  const senderLabel = options?.senderLabel || "Claude Code";
265
- const response = await fetch(`http://127.0.0.1:${port}/v1/chat/completions`, {
266
- method: "POST",
267
- headers: {
268
- Authorization: `Bearer ${token}`,
269
- "Content-Type": "application/json",
270
- "x-openclaw-scopes": "operator.read,operator.write",
271
- "x-openclaw-session-key": `agent:${agentId}:main`
272
- },
273
- body: JSON.stringify({
274
- model: `openclaw/${agentId}`,
275
- messages: [
276
- {
277
- role: "user",
278
- content: `[${senderLabel}]: ${message}`
279
- }
280
- ]
281
- })
282
- });
283
- if (!response.ok) {
284
- const body = await response.text();
285
- throw new Error(`Gateway returned ${response.status}: ${body}`);
286
- }
287
- const data = await response.json();
288
- const reply = data.choices?.[0]?.message?.content;
289
- if (!reply) {
290
- throw new Error("No response content from gateway");
286
+ const controller = new AbortController();
287
+ const timeoutId = setTimeout(() => controller.abort(), GATEWAY_TIMEOUT_MS);
288
+ try {
289
+ const response = await fetch(
290
+ `http://${GATEWAY_HOST}:${port}/v1/chat/completions`,
291
+ {
292
+ method: "POST",
293
+ headers: {
294
+ Authorization: `Bearer ${token}`,
295
+ "Content-Type": "application/json",
296
+ "x-openclaw-scopes": "operator.read,operator.write",
297
+ "x-openclaw-session-key": `agent:${agentId}:main`
298
+ },
299
+ body: JSON.stringify({
300
+ model: `openclaw/${agentId}`,
301
+ messages: [
302
+ {
303
+ role: "user",
304
+ content: `[${senderLabel}]: ${message}`
305
+ }
306
+ ]
307
+ }),
308
+ signal: controller.signal
309
+ }
310
+ );
311
+ clearTimeout(timeoutId);
312
+ if (!response.ok) {
313
+ const body = await response.text();
314
+ throw new Error(`Gateway returned ${response.status}: ${body}`);
315
+ }
316
+ const data = await response.json();
317
+ const reply = data.choices?.[0]?.message?.content;
318
+ if (!reply) {
319
+ throw new Error("No response content from gateway");
320
+ }
321
+ return reply;
322
+ } catch (err) {
323
+ clearTimeout(timeoutId);
324
+ if (err.name === "AbortError") {
325
+ throw new Error(
326
+ "Gateway timeout: Lesa may be busy or the gateway is processing another request. Try again in a moment."
327
+ );
328
+ }
329
+ throw err;
291
330
  }
292
- return reply;
293
331
  }
294
- async function getQueryEmbedding(text, apiKey, model = "text-embedding-3-small", dimensions = 1536) {
295
- const response = await fetch("https://api.openai.com/v1/embeddings", {
332
+ async function getQueryEmbedding(text, apiKey, model = DEFAULT_EMBEDDING_MODEL, dimensions = DEFAULT_EMBEDDING_DIMS) {
333
+ const response = await fetch(EMBEDDING_API_URL, {
296
334
  method: "POST",
297
335
  headers: {
298
336
  Authorization: `Bearer ${apiKey}`,
@@ -331,15 +369,15 @@ function cosineSimilarity(a, b) {
331
369
  return denom === 0 ? 0 : dot / denom;
332
370
  }
333
371
  function recencyWeight(ageDays) {
334
- return Math.max(0.5, 1 - ageDays * 0.01);
372
+ return Math.max(RECENCY_FLOOR, 1 - ageDays * RECENCY_DECAY_RATE);
335
373
  }
336
374
  function freshnessLabel(ageDays) {
337
- if (ageDays < 3) return "fresh";
338
- if (ageDays < 7) return "recent";
339
- if (ageDays < 14) return "aging";
375
+ if (ageDays < FRESHNESS_FRESH_DAYS) return "fresh";
376
+ if (ageDays < FRESHNESS_RECENT_DAYS) return "recent";
377
+ if (ageDays < FRESHNESS_AGING_DAYS) return "aging";
340
378
  return "stale";
341
379
  }
342
- async function searchConversations(config, query, limit = 5) {
380
+ async function searchConversations(config, query, limit = DEFAULT_SEARCH_LIMIT) {
343
381
  const Database = (await import("better-sqlite3")).default;
344
382
  if (!existsSync(config.dbPath)) {
345
383
  throw new Error(`Database not found: ${config.dbPath}`);
@@ -360,12 +398,12 @@ async function searchConversations(config, query, limit = 5) {
360
398
  FROM conversation_chunks
361
399
  WHERE embedding IS NOT NULL
362
400
  ORDER BY timestamp DESC
363
- LIMIT 1000`
401
+ LIMIT ${VECTOR_SEARCH_ROW_LIMIT}`
364
402
  ).all();
365
403
  const now = Date.now();
366
404
  return rows.map((row) => {
367
405
  const cosine = cosineSimilarity(queryEmbedding, blobToEmbedding(row.embedding));
368
- const ageDays = (now - row.timestamp) / (1e3 * 60 * 60 * 24);
406
+ const ageDays = (now - row.timestamp) / MS_PER_DAY;
369
407
  const weight = recencyWeight(ageDays);
370
408
  return {
371
409
  text: row.chunk_text,
@@ -396,7 +434,7 @@ async function searchConversations(config, query, limit = 5) {
396
434
  db.close();
397
435
  }
398
436
  }
399
- function findMarkdownFiles(dir, maxDepth = 4, depth = 0) {
437
+ function findMarkdownFiles(dir, maxDepth = WORKSPACE_MAX_DEPTH, depth = 0) {
400
438
  if (depth > maxDepth || !existsSync(dir)) return [];
401
439
  const files = [];
402
440
  for (const entry of readdirSync(dir, { withFileTypes: true })) {
@@ -426,7 +464,7 @@ function searchWorkspace(workspaceDir, query) {
426
464
  if (score === 0) continue;
427
465
  const lines = content.split("\n");
428
466
  const excerpts = [];
429
- for (let i = 0; i < lines.length && excerpts.length < 5; i++) {
467
+ for (let i = 0; i < lines.length && excerpts.length < WORKSPACE_MAX_EXCERPTS; i++) {
430
468
  const lineLower = lines[i].toLowerCase();
431
469
  if (words.some((w) => lineLower.includes(w))) {
432
470
  const start = Math.max(0, i - 1);
@@ -438,7 +476,7 @@ function searchWorkspace(workspaceDir, query) {
438
476
  } catch {
439
477
  }
440
478
  }
441
- return results.sort((a, b) => b.score - a.score).slice(0, 10);
479
+ return results.sort((a, b) => b.score - a.score).slice(0, WORKSPACE_MAX_RESULTS);
442
480
  }
443
481
  function parseSkillFrontmatter(content) {
444
482
  const match = content.match(/^---\n([\s\S]*?)\n---/);
@@ -532,9 +570,8 @@ async function executeSkillScript(skillDir, scripts, scriptName, args) {
532
570
  `${interpreter} "${scriptPath}" ${args}`,
533
571
  {
534
572
  env: { ...process.env },
535
- timeout: 12e4,
536
- maxBuffer: 10 * 1024 * 1024
537
- // 10MB
573
+ timeout: SKILL_EXEC_TIMEOUT_MS,
574
+ maxBuffer: SKILL_EXEC_MAX_BUFFER
538
575
  }
539
576
  );
540
577
  return stdout || stderr || "(no output)";
@@ -8,7 +8,7 @@ import {
8
8
  searchConversations,
9
9
  searchWorkspace,
10
10
  sendMessage
11
- } from "./chunk-LF7EMFBY.js";
11
+ } from "./chunk-RUQEH7GZ.js";
12
12
 
13
13
  // cli.ts
14
14
  import { existsSync, statSync } from "fs";
@@ -23,7 +23,7 @@ import {
23
23
  sendLdmMessage,
24
24
  sendMessage,
25
25
  setSessionIdentity
26
- } from "./chunk-LF7EMFBY.js";
26
+ } from "./chunk-RUQEH7GZ.js";
27
27
  export {
28
28
  LDM_ROOT,
29
29
  blobToEmbedding,
@@ -15,7 +15,7 @@ import {
15
15
  sendLdmMessage,
16
16
  sendMessage,
17
17
  setSessionIdentity
18
- } from "./chunk-LF7EMFBY.js";
18
+ } from "./chunk-RUQEH7GZ.js";
19
19
 
20
20
  // mcp-server.ts
21
21
  import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@wipcomputer/wip-ldm-os",
3
- "version": "0.4.73-alpha.11",
3
+ "version": "0.4.73-alpha.13",
4
4
  "type": "module",
5
5
  "description": "LDM OS: identity, memory, and sovereignty infrastructure for AI agents",
6
6
  "engines": {
@@ -10,6 +10,31 @@ import { randomUUID } from "node:crypto";
10
10
 
11
11
  const execAsync = promisify(exec);
12
12
 
13
+ // ── Settings ─────────────────────────────────────────────────────────
14
+ // All tunable constants in one place. No magic numbers below this block.
15
+
16
+ const GATEWAY_HOST = "127.0.0.1";
17
+ const DEFAULT_GATEWAY_PORT = 18_789; // openclaw.json gateway.port fallback
18
+ const DEFAULT_INBOX_PORT = 18_790; // env LESA_BRIDGE_INBOX_PORT fallback
19
+ const GATEWAY_TIMEOUT_MS = 15_000; // max wait for gateway chat response
20
+ const OP_CLI_TIMEOUT_MS = 10_000; // max wait for 1Password CLI
21
+ const EMBEDDING_API_URL = "https://api.openai.com/v1/embeddings";
22
+ const DEFAULT_EMBEDDING_MODEL = "text-embedding-3-small";
23
+ const DEFAULT_EMBEDDING_DIMS = 1_536;
24
+ const VECTOR_SEARCH_ROW_LIMIT = 1_000; // max rows scanned for cosine ranking
25
+ const RECENCY_DECAY_RATE = 0.01; // per-day decay multiplier
26
+ const RECENCY_FLOOR = 0.5; // minimum recency weight
27
+ const FRESHNESS_FRESH_DAYS = 3;
28
+ const FRESHNESS_RECENT_DAYS = 7;
29
+ const FRESHNESS_AGING_DAYS = 14;
30
+ const DEFAULT_SEARCH_LIMIT = 5; // default results for searchConversations
31
+ const WORKSPACE_MAX_DEPTH = 4; // findMarkdownFiles recursion limit
32
+ const WORKSPACE_MAX_EXCERPTS = 5; // max excerpts per file in search
33
+ const WORKSPACE_MAX_RESULTS = 10; // max files returned from workspace search
34
+ const SKILL_EXEC_TIMEOUT_MS = 120_000; // max wait for skill script execution
35
+ const SKILL_EXEC_MAX_BUFFER = 10 * 1024 * 1024; // 10 MB stdout/stderr cap
36
+ const MS_PER_DAY = 1_000 * 60 * 60 * 24;
37
+
13
38
  // ── Constants ─────────────────────────────────────────────────────────
14
39
 
15
40
  const HOME = process.env.HOME || homedir();
@@ -66,9 +91,9 @@ export function resolveConfig(overrides?: Partial<BridgeConfig>): BridgeConfig {
66
91
  openclawDir,
67
92
  workspaceDir: overrides?.workspaceDir || join(openclawDir, "workspace"),
68
93
  dbPath: overrides?.dbPath || join(openclawDir, "memory", "context-embeddings.sqlite"),
69
- inboxPort: overrides?.inboxPort || parseInt(process.env.LESA_BRIDGE_INBOX_PORT || "18790", 10),
70
- embeddingModel: overrides?.embeddingModel || "text-embedding-3-small",
71
- embeddingDimensions: overrides?.embeddingDimensions || 1536,
94
+ inboxPort: overrides?.inboxPort || parseInt(process.env.LESA_BRIDGE_INBOX_PORT || String(DEFAULT_INBOX_PORT), 10),
95
+ embeddingModel: overrides?.embeddingModel || DEFAULT_EMBEDDING_MODEL,
96
+ embeddingDimensions: overrides?.embeddingDimensions || DEFAULT_EMBEDDING_DIMS,
72
97
  };
73
98
  }
74
99
 
@@ -88,9 +113,9 @@ export function resolveConfigMulti(overrides?: Partial<BridgeConfig>): BridgeCon
88
113
  openclawDir,
89
114
  workspaceDir: raw.workspaceDir || overrides?.workspaceDir || join(openclawDir, "workspace"),
90
115
  dbPath: raw.dbPath || overrides?.dbPath || join(openclawDir, "memory", "context-embeddings.sqlite"),
91
- inboxPort: raw.inboxPort || overrides?.inboxPort || parseInt(process.env.LESA_BRIDGE_INBOX_PORT || "18790", 10),
92
- embeddingModel: raw.embeddingModel || overrides?.embeddingModel || "text-embedding-3-small",
93
- embeddingDimensions: raw.embeddingDimensions || overrides?.embeddingDimensions || 1536,
116
+ inboxPort: raw.inboxPort || overrides?.inboxPort || parseInt(process.env.LESA_BRIDGE_INBOX_PORT || String(DEFAULT_INBOX_PORT), 10),
117
+ embeddingModel: raw.embeddingModel || overrides?.embeddingModel || DEFAULT_EMBEDDING_MODEL,
118
+ embeddingDimensions: raw.embeddingDimensions || overrides?.embeddingDimensions || DEFAULT_EMBEDDING_DIMS,
94
119
  };
95
120
  } catch {
96
121
  // LDM config unreadable, fall through to legacy
@@ -123,7 +148,7 @@ export function resolveApiKey(openclawDir: string): string | null {
123
148
  `op read "op://Agent Secrets/OpenAI API/api key" 2>/dev/null`,
124
149
  {
125
150
  env: { ...process.env, OP_SERVICE_ACCOUNT_TOKEN: saToken },
126
- timeout: 10000,
151
+ timeout: OP_CLI_TIMEOUT_MS,
127
152
  encoding: "utf-8",
128
153
  }
129
154
  ).trim();
@@ -154,7 +179,7 @@ export function resolveGatewayConfig(openclawDir: string): GatewayConfig {
154
179
 
155
180
  const config = JSON.parse(readFileSync(configPath, "utf-8"));
156
181
  const token = config?.gateway?.auth?.token;
157
- const port = config?.gateway?.port || 18789;
182
+ const port = config?.gateway?.port || DEFAULT_GATEWAY_PORT;
158
183
 
159
184
  if (!token) {
160
185
  throw new Error("No gateway.auth.token found in openclaw.json");
@@ -459,40 +484,58 @@ export async function sendMessage(
459
484
  // Send user: "main" to route to the main session (agent:main:main).
460
485
  // This ensures Parker sees CC's messages in the same stream as iMessage.
461
486
  // The OpenClaw gateway treats user: "main" as "use the default session."
462
- const response = await fetch(`http://127.0.0.1:${port}/v1/chat/completions`, {
463
- method: "POST",
464
- headers: {
465
- Authorization: `Bearer ${token}`,
466
- "Content-Type": "application/json",
467
- "x-openclaw-scopes": "operator.read,operator.write",
468
- "x-openclaw-session-key": `agent:${agentId}:main`,
469
- },
470
- body: JSON.stringify({
471
- model: `openclaw/${agentId}`,
472
- messages: [
473
- {
474
- role: "user",
475
- content: `[${senderLabel}]: ${message}`,
487
+ const controller = new AbortController();
488
+ const timeoutId = setTimeout(() => controller.abort(), GATEWAY_TIMEOUT_MS);
489
+
490
+ try {
491
+ const response = await fetch(
492
+ `http://${GATEWAY_HOST}:${port}/v1/chat/completions`,
493
+ {
494
+ method: "POST",
495
+ headers: {
496
+ Authorization: `Bearer ${token}`,
497
+ "Content-Type": "application/json",
498
+ "x-openclaw-scopes": "operator.read,operator.write",
499
+ "x-openclaw-session-key": `agent:${agentId}:main`,
476
500
  },
477
- ],
478
- }),
479
- });
501
+ body: JSON.stringify({
502
+ model: `openclaw/${agentId}`,
503
+ messages: [
504
+ {
505
+ role: "user",
506
+ content: `[${senderLabel}]: ${message}`,
507
+ },
508
+ ],
509
+ }),
510
+ signal: controller.signal,
511
+ }
512
+ );
513
+ clearTimeout(timeoutId);
480
514
 
481
- if (!response.ok) {
482
- const body = await response.text();
483
- throw new Error(`Gateway returned ${response.status}: ${body}`);
484
- }
515
+ if (!response.ok) {
516
+ const body = await response.text();
517
+ throw new Error(`Gateway returned ${response.status}: ${body}`);
518
+ }
485
519
 
486
- const data = (await response.json()) as {
487
- choices: Array<{ message: { content: string } }>;
488
- };
520
+ const data = (await response.json()) as {
521
+ choices: Array<{ message: { content: string } }>;
522
+ };
489
523
 
490
- const reply = data.choices?.[0]?.message?.content;
491
- if (!reply) {
492
- throw new Error("No response content from gateway");
493
- }
524
+ const reply = data.choices?.[0]?.message?.content;
525
+ if (!reply) {
526
+ throw new Error("No response content from gateway");
527
+ }
494
528
 
495
- return reply;
529
+ return reply;
530
+ } catch (err: any) {
531
+ clearTimeout(timeoutId);
532
+ if (err.name === "AbortError") {
533
+ throw new Error(
534
+ "Gateway timeout: Lesa may be busy or the gateway is processing another request. Try again in a moment."
535
+ );
536
+ }
537
+ throw err;
538
+ }
496
539
  }
497
540
 
498
541
  // ── Embedding helpers ────────────────────────────────────────────────
@@ -500,10 +543,10 @@ export async function sendMessage(
500
543
  export async function getQueryEmbedding(
501
544
  text: string,
502
545
  apiKey: string,
503
- model = "text-embedding-3-small",
504
- dimensions = 1536
546
+ model = DEFAULT_EMBEDDING_MODEL,
547
+ dimensions = DEFAULT_EMBEDDING_DIMS
505
548
  ): Promise<number[]> {
506
- const response = await fetch("https://api.openai.com/v1/embeddings", {
549
+ const response = await fetch(EMBEDDING_API_URL, {
507
550
  method: "POST",
508
551
  headers: {
509
552
  Authorization: `Bearer ${apiKey}`,
@@ -549,15 +592,15 @@ export function cosineSimilarity(a: number[], b: number[]): number {
549
592
  // ── Recency scoring ─────────────────────────────────────────────────
550
593
 
551
594
  function recencyWeight(ageDays: number): number {
552
- // Linear decay with floor at 0.5. Old stuff never fully disappears
595
+ // Linear decay with floor. Old stuff never fully disappears
553
596
  // but fresh context wins ties. ~50 days to hit the floor.
554
- return Math.max(0.5, 1.0 - ageDays * 0.01);
597
+ return Math.max(RECENCY_FLOOR, 1.0 - ageDays * RECENCY_DECAY_RATE);
555
598
  }
556
599
 
557
600
  function freshnessLabel(ageDays: number): "fresh" | "recent" | "aging" | "stale" {
558
- if (ageDays < 3) return "fresh";
559
- if (ageDays < 7) return "recent";
560
- if (ageDays < 14) return "aging";
601
+ if (ageDays < FRESHNESS_FRESH_DAYS) return "fresh";
602
+ if (ageDays < FRESHNESS_RECENT_DAYS) return "recent";
603
+ if (ageDays < FRESHNESS_AGING_DAYS) return "aging";
561
604
  return "stale";
562
605
  }
563
606
 
@@ -566,7 +609,7 @@ function freshnessLabel(ageDays: number): "fresh" | "recent" | "aging" | "stale"
566
609
  export async function searchConversations(
567
610
  config: BridgeConfig,
568
611
  query: string,
569
- limit = 5
612
+ limit = DEFAULT_SEARCH_LIMIT
570
613
  ): Promise<ConversationResult[]> {
571
614
  // Lazy import to avoid requiring better-sqlite3 if not needed
572
615
  const Database = (await import("better-sqlite3")).default;
@@ -593,7 +636,7 @@ export async function searchConversations(
593
636
  FROM conversation_chunks
594
637
  WHERE embedding IS NOT NULL
595
638
  ORDER BY timestamp DESC
596
- LIMIT 1000`
639
+ LIMIT ${VECTOR_SEARCH_ROW_LIMIT}`
597
640
  )
598
641
  .all() as Array<{
599
642
  chunk_text: string;
@@ -607,7 +650,7 @@ export async function searchConversations(
607
650
  return rows
608
651
  .map((row) => {
609
652
  const cosine = cosineSimilarity(queryEmbedding, blobToEmbedding(row.embedding));
610
- const ageDays = (now - row.timestamp) / (1000 * 60 * 60 * 24);
653
+ const ageDays = (now - row.timestamp) / MS_PER_DAY;
611
654
  const weight = recencyWeight(ageDays);
612
655
  return {
613
656
  text: row.chunk_text,
@@ -652,7 +695,7 @@ export async function searchConversations(
652
695
 
653
696
  // ── Workspace search ─────────────────────────────────────────────────
654
697
 
655
- export function findMarkdownFiles(dir: string, maxDepth = 4, depth = 0): string[] {
698
+ export function findMarkdownFiles(dir: string, maxDepth = WORKSPACE_MAX_DEPTH, depth = 0): string[] {
656
699
  if (depth > maxDepth || !existsSync(dir)) return [];
657
700
 
658
701
  const files: string[] = [];
@@ -687,7 +730,7 @@ export function searchWorkspace(workspaceDir: string, query: string): WorkspaceS
687
730
 
688
731
  const lines = content.split("\n");
689
732
  const excerpts: string[] = [];
690
- for (let i = 0; i < lines.length && excerpts.length < 5; i++) {
733
+ for (let i = 0; i < lines.length && excerpts.length < WORKSPACE_MAX_EXCERPTS; i++) {
691
734
  const lineLower = lines[i].toLowerCase();
692
735
  if (words.some((w) => lineLower.includes(w))) {
693
736
  const start = Math.max(0, i - 1);
@@ -702,7 +745,7 @@ export function searchWorkspace(workspaceDir: string, query: string): WorkspaceS
702
745
  }
703
746
  }
704
747
 
705
- return results.sort((a, b) => b.score - a.score).slice(0, 10);
748
+ return results.sort((a, b) => b.score - a.score).slice(0, WORKSPACE_MAX_RESULTS);
706
749
  }
707
750
 
708
751
  // ── Read workspace file ──────────────────────────────────────────────
@@ -858,8 +901,8 @@ export async function executeSkillScript(
858
901
  `${interpreter} "${scriptPath}" ${args}`,
859
902
  {
860
903
  env: { ...process.env },
861
- timeout: 120000,
862
- maxBuffer: 10 * 1024 * 1024, // 10MB
904
+ timeout: SKILL_EXEC_TIMEOUT_MS,
905
+ maxBuffer: SKILL_EXEC_MAX_BUFFER,
863
906
  }
864
907
  );
865
908
  return stdout || stderr || "(no output)";
@@ -0,0 +1,70 @@
1
+ #!/usr/bin/env bash
2
+ # deploy.sh: Deploy hosted MCP server to wip.computer
3
+ #
4
+ # Prerequisites:
5
+ # - SSH config has Host wip.computer
6
+ # - pm2 installed on the server
7
+ # - nginx configured on the server
8
+ #
9
+ # Usage: bash deploy.sh
10
+
11
+ set -euo pipefail
12
+
13
+ REMOTE="wip.computer"
14
+ REMOTE_DIR="/var/www/wip.computer/app/mcp-server"
15
+ SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
16
+
17
+ echo "Deploying hosted MCP server to ${REMOTE}..."
18
+
19
+ # 1. Create remote directory structure
20
+ echo "Creating remote directories..."
21
+ ssh "${REMOTE}" "mkdir -p ${REMOTE_DIR}/inbox"
22
+
23
+ # 2. Copy server files
24
+ echo "Copying files..."
25
+ scp "${SCRIPT_DIR}/server.mjs" "${REMOTE}:${REMOTE_DIR}/"
26
+ scp "${SCRIPT_DIR}/inbox.mjs" "${REMOTE}:${REMOTE_DIR}/"
27
+ scp "${SCRIPT_DIR}/tools.mjs" "${REMOTE}:${REMOTE_DIR}/"
28
+ scp "${SCRIPT_DIR}/package.json" "${REMOTE}:${REMOTE_DIR}/"
29
+
30
+ # 3. Install dependencies
31
+ echo "Installing dependencies..."
32
+ ssh "${REMOTE}" "cd ${REMOTE_DIR} && npm install --omit=dev"
33
+
34
+ # 4. Register with pm2 (restart if already running)
35
+ echo "Starting with pm2..."
36
+ ssh "${REMOTE}" "cd ${REMOTE_DIR} && pm2 delete mcp-server 2>/dev/null || true && pm2 start server.mjs --name mcp-server && pm2 save"
37
+
38
+ # 5. Configure nginx reverse proxy
39
+ echo "Configuring nginx..."
40
+ ssh "${REMOTE}" "cat > /tmp/mcp-server.conf << 'NGINX'
41
+ # MCP server reverse proxy
42
+ # Location block to add inside the wip.computer server block
43
+ location /mcp {
44
+ proxy_pass http://127.0.0.1:18800/mcp;
45
+ proxy_http_version 1.1;
46
+ proxy_set_header Host \$host;
47
+ proxy_set_header X-Real-IP \$remote_addr;
48
+ proxy_set_header X-Forwarded-For \$proxy_add_x_forwarded_for;
49
+ proxy_set_header X-Forwarded-Proto \$scheme;
50
+
51
+ # SSE support (for MCP Streamable HTTP GET streams)
52
+ proxy_set_header Connection '';
53
+ proxy_buffering off;
54
+ proxy_cache off;
55
+ proxy_read_timeout 86400;
56
+ chunked_transfer_encoding on;
57
+ }
58
+ NGINX
59
+ "
60
+
61
+ echo ""
62
+ echo "nginx config written to /tmp/mcp-server.conf on the server."
63
+ echo "To activate, add it to your server block and reload:"
64
+ echo " ssh ${REMOTE} 'sudo cp /tmp/mcp-server.conf /etc/nginx/snippets/mcp-server.conf'"
65
+ echo " # Then include it in your server block: include snippets/mcp-server.conf;"
66
+ echo " ssh ${REMOTE} 'sudo nginx -t && sudo systemctl reload nginx'"
67
+ echo ""
68
+ echo "Deployment complete."
69
+ echo "Health check: curl https://wip.computer/health"
70
+ echo "MCP endpoint: https://wip.computer/mcp"
@@ -0,0 +1,64 @@
1
+ // inbox.mjs: File-based message inbox for hosted MCP server.
2
+ // Each message is a JSON file. Same format as local bridge.
3
+
4
+ import { randomUUID } from "node:crypto";
5
+ import { mkdirSync, readdirSync, readFileSync, writeFileSync, unlinkSync } from "node:fs";
6
+ import { join } from "node:path";
7
+
8
+ const INBOX_DIR = process.env.MCP_INBOX_DIR || "/var/www/wip.computer/app/mcp-server/inbox";
9
+ mkdirSync(INBOX_DIR, { recursive: true });
10
+
11
+ /** Push a message into the inbox. Returns message ID. */
12
+ export function pushMessage({ from, to, body, type = "chat" }) {
13
+ const id = randomUUID();
14
+ const msg = { id, type, from, to, body, timestamp: new Date().toISOString(), read: false };
15
+ writeFileSync(join(INBOX_DIR, `${Date.now()}-${id}.json`), JSON.stringify(msg, null, 2));
16
+ return id;
17
+ }
18
+
19
+ /** Get messages for a recipient. If markRead, consumed messages are deleted. */
20
+ export function getMessages(to, markRead = false) {
21
+ const files = readdirSync(INBOX_DIR).filter((f) => f.endsWith(".json")).sort();
22
+ const matched = [];
23
+ for (const file of files) {
24
+ const fp = join(INBOX_DIR, file);
25
+ let msg;
26
+ try { msg = JSON.parse(readFileSync(fp, "utf-8")); } catch { continue; }
27
+ if (msg.read || !matches(msg.to, to)) continue;
28
+ matched.push(msg);
29
+ if (markRead) { try { unlinkSync(fp); } catch {} }
30
+ }
31
+ return matched;
32
+ }
33
+
34
+ /** Count pending (unread) messages for a recipient. */
35
+ export function countPending(to) {
36
+ const files = readdirSync(INBOX_DIR).filter((f) => f.endsWith(".json"));
37
+ let n = 0;
38
+ for (const file of files) {
39
+ try {
40
+ const msg = JSON.parse(readFileSync(join(INBOX_DIR, file), "utf-8"));
41
+ if (!msg.read && matches(msg.to, to)) n++;
42
+ } catch { continue; }
43
+ }
44
+ return n;
45
+ }
46
+
47
+ /**
48
+ * Recipient matching. Supports:
49
+ * "agent", "agent:session", "agent:*" (all sessions), "*" (broadcast)
50
+ */
51
+ function matches(msgTo, query) {
52
+ if (msgTo === "*" || query === "*" || msgTo === query) return true;
53
+ if (query.endsWith(":*")) {
54
+ const p = query.slice(0, -2);
55
+ if (msgTo === p || msgTo.startsWith(p + ":")) return true;
56
+ }
57
+ if (msgTo.endsWith(":*")) {
58
+ const p = msgTo.slice(0, -2);
59
+ if (query === p || query.startsWith(p + ":")) return true;
60
+ }
61
+ if (!query.includes(":") && msgTo === query + ":default") return true;
62
+ if (!msgTo.includes(":") && query === msgTo + ":default") return true;
63
+ return false;
64
+ }
@@ -0,0 +1,19 @@
1
+ {
2
+ "name": "@wipcomputer/hosted-mcp",
3
+ "version": "0.1.0",
4
+ "type": "module",
5
+ "description": "Hosted MCP server for wip.computer. Bridge + memory for any platform.",
6
+ "main": "server.mjs",
7
+ "scripts": {
8
+ "start": "node server.mjs",
9
+ "dev": "node --watch server.mjs"
10
+ },
11
+ "dependencies": {
12
+ "@modelcontextprotocol/sdk": "^1.29.0",
13
+ "zod": "^3.25.0"
14
+ },
15
+ "engines": {
16
+ "node": ">=20"
17
+ },
18
+ "private": true
19
+ }
@@ -0,0 +1,132 @@
1
+ // server.mjs: Hosted MCP server for wip.computer
2
+ // MCP Streamable HTTP transport at /mcp, health check at /health.
3
+ // Auth: Bearer ck-... API key maps to an agent ID.
4
+
5
+ import { randomUUID } from "node:crypto";
6
+ import { createServer } from "node:http";
7
+ import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
8
+ import { StreamableHTTPServerTransport } from "@modelcontextprotocol/sdk/server/streamableHttp.js";
9
+ import { isInitializeRequest } from "@modelcontextprotocol/sdk/types.js";
10
+ import { registerTools } from "./tools.mjs";
11
+
12
+ const PORT = parseInt(process.env.MCP_PORT || "18800", 10);
13
+
14
+ // Prototype: one hardcoded test key. Later: database / Agent Pay.
15
+ const API_KEYS = { "ck-test-001": "test-agent" };
16
+
17
+ // Session ID -> { transport, server, identity }
18
+ const sessions = {};
19
+
20
+ function authenticate(req) {
21
+ const auth = req.headers["authorization"];
22
+ if (!auth?.startsWith("Bearer ")) return null;
23
+ const key = auth.slice(7).trim();
24
+ return API_KEYS[key] ? { agentId: API_KEYS[key], apiKey: key } : null;
25
+ }
26
+
27
+ function readBody(req) {
28
+ return new Promise((resolve, reject) => {
29
+ const chunks = [];
30
+ req.on("data", (c) => chunks.push(c));
31
+ req.on("end", () => {
32
+ try { const raw = Buffer.concat(chunks).toString(); resolve(raw ? JSON.parse(raw) : undefined); }
33
+ catch (e) { reject(e); }
34
+ });
35
+ req.on("error", reject);
36
+ });
37
+ }
38
+
39
+ function json(res, status, body) {
40
+ res.writeHead(status, { "Content-Type": "application/json" });
41
+ res.end(JSON.stringify(body));
42
+ }
43
+
44
+ function rpcError(res, status, code, message) {
45
+ json(res, status, { jsonrpc: "2.0", error: { code, message }, id: null });
46
+ }
47
+
48
+ function cors(res) {
49
+ res.setHeader("Access-Control-Allow-Origin", "*");
50
+ res.setHeader("Access-Control-Allow-Methods", "GET, POST, DELETE, OPTIONS");
51
+ res.setHeader("Access-Control-Allow-Headers", "Content-Type, Authorization, Mcp-Session-Id, Last-Event-ID");
52
+ res.setHeader("Access-Control-Expose-Headers", "Mcp-Session-Id");
53
+ }
54
+
55
+ async function handlePost(req, res, identity) {
56
+ const sid = req.headers["mcp-session-id"];
57
+ let body;
58
+ try { body = await readBody(req); } catch { rpcError(res, 400, -32700, "Parse error"); return; }
59
+
60
+ if (sid && sessions[sid]) {
61
+ await sessions[sid].transport.handleRequest(req, res, body);
62
+ return;
63
+ }
64
+
65
+ if (!sid && isInitializeRequest(body)) {
66
+ const transport = new StreamableHTTPServerTransport({
67
+ sessionIdGenerator: () => randomUUID(),
68
+ onsessioninitialized: (id) => {
69
+ sessions[id] = { transport, server: mcpServer, identity };
70
+ console.log(`Session created: ${id} (agent: ${identity.agentId})`);
71
+ },
72
+ });
73
+ transport.onclose = () => {
74
+ const id = transport.sessionId;
75
+ if (id && sessions[id]) { console.log(`Session closed: ${id}`); delete sessions[id]; }
76
+ };
77
+ const mcpServer = new McpServer({ name: "wip-mcp", version: "0.1.0" });
78
+ registerTools(mcpServer, () => identity);
79
+ await mcpServer.connect(transport);
80
+ await transport.handleRequest(req, res, body);
81
+ return;
82
+ }
83
+
84
+ rpcError(res, 400, -32000, "Bad request: missing or invalid session");
85
+ }
86
+
87
+ async function handleGetOrDelete(req, res) {
88
+ const sid = req.headers["mcp-session-id"];
89
+ if (!sid || !sessions[sid]) { rpcError(res, 400, -32000, "Invalid or missing session ID"); return; }
90
+ await sessions[sid].transport.handleRequest(req, res);
91
+ }
92
+
93
+ const httpServer = createServer(async (req, res) => {
94
+ cors(res);
95
+ if (req.method === "OPTIONS") { res.writeHead(204); res.end(); return; }
96
+
97
+ if (req.method === "GET" && req.url === "/health") {
98
+ json(res, 200, { ok: true, server: "wip-mcp", version: "0.1.0", sessions: Object.keys(sessions).length, uptime: process.uptime() });
99
+ return;
100
+ }
101
+
102
+ if (req.url === "/mcp") {
103
+ const identity = authenticate(req);
104
+ if (!identity && req.method === "POST") { json(res, 401, { error: "Unauthorized. Provide Bearer ck-... token." }); return; }
105
+ try {
106
+ if (req.method === "POST") await handlePost(req, res, identity);
107
+ else if (req.method === "GET" || req.method === "DELETE") await handleGetOrDelete(req, res);
108
+ else rpcError(res, 405, -32000, "Method not allowed");
109
+ } catch (err) {
110
+ console.error("MCP error:", err);
111
+ if (!res.headersSent) rpcError(res, 500, -32603, "Internal server error");
112
+ }
113
+ return;
114
+ }
115
+
116
+ json(res, 404, { error: "Not found" });
117
+ });
118
+
119
+ httpServer.listen(PORT, "0.0.0.0", () => {
120
+ console.log(`wip-mcp listening on 0.0.0.0:${PORT}`);
121
+ console.log(`Health: http://localhost:${PORT}/health`);
122
+ console.log(`MCP: http://localhost:${PORT}/mcp`);
123
+ });
124
+
125
+ async function shutdown() {
126
+ console.log("Shutting down...");
127
+ for (const sid of Object.keys(sessions)) { try { await sessions[sid].transport.close(); } catch {} delete sessions[sid]; }
128
+ httpServer.close();
129
+ process.exit(0);
130
+ }
131
+ process.on("SIGINT", shutdown);
132
+ process.on("SIGTERM", shutdown);
@@ -0,0 +1,57 @@
1
+ // tools.mjs: MCP tool definitions. Bridge (messaging) + placeholder memory tools.
2
+
3
+ import { z } from "zod";
4
+ import { pushMessage, getMessages, countPending } from "./inbox.mjs";
5
+
6
+ /** Register all tools on an McpServer instance. */
7
+ export function registerTools(server, getIdentity) {
8
+
9
+ server.registerTool("send_message", {
10
+ description: "Send a message to any agent. Stored in inbox until read. " +
11
+ "Target: 'agent', 'agent:session', 'agent:*' (all sessions), '*' (broadcast).",
12
+ inputSchema: {
13
+ to: z.string().describe("Recipient"),
14
+ body: z.string().describe("Message body"),
15
+ type: z.string().optional().default("chat").describe("chat, system, or task"),
16
+ },
17
+ }, async ({ to, body, type }) => {
18
+ const id = pushMessage({ from: getIdentity().agentId, to, body, type });
19
+ return { content: [{ type: "text", text: `Sent (id: ${id}) to ${to}` }] };
20
+ });
21
+
22
+ server.registerTool("check_inbox", {
23
+ description: "Check for pending messages. Returns unread messages and marks them read.",
24
+ inputSchema: {},
25
+ }, async () => {
26
+ const msgs = getMessages(getIdentity().agentId, true);
27
+ if (!msgs.length) return { content: [{ type: "text", text: "No pending messages." }] };
28
+ const text = msgs.map((m) => `**${m.from}** [${m.type}] (${m.timestamp}):\n${m.body}`).join("\n\n---\n\n");
29
+ return { content: [{ type: "text", text: `${msgs.length} message(s):\n\n${text}` }] };
30
+ });
31
+
32
+ server.registerTool("search_memory", {
33
+ description: "Search semantic memory (Crystal). Placeholder... coming soon.",
34
+ inputSchema: { query: z.string().describe("Search query") },
35
+ }, async ({ query }) => {
36
+ return { content: [{ type: "text", text: `Memory search coming soon. Query: "${query}"` }] };
37
+ });
38
+
39
+ server.registerTool("remember", {
40
+ description: "Store a fact in memory (Crystal). Placeholder... coming soon.",
41
+ inputSchema: {
42
+ text: z.string().describe("What to remember"),
43
+ tags: z.string().optional().describe("Comma-separated tags"),
44
+ },
45
+ }, async ({ text, tags }) => {
46
+ return { content: [{ type: "text", text: `Memory storage coming soon. Would remember: "${text}"${tags ? ` (tags: ${tags})` : ""}` }] };
47
+ });
48
+
49
+ server.registerTool("status", {
50
+ description: "Show connection info and pending message count.",
51
+ inputSchema: {},
52
+ }, async () => {
53
+ const { agentId, apiKey } = getIdentity();
54
+ const masked = apiKey.slice(0, 7) + "..." + apiKey.slice(-4);
55
+ return { content: [{ type: "text", text: `Agent: ${agentId}\nAPI key: ${masked}\nPending: ${countPending(agentId)}\nServer: wip.computer hosted MCP` }] };
56
+ });
57
+ }