prism-mcp-server 9.3.0 β†’ 9.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -826,8 +826,9 @@ The Generator strips the `console.log`, resubmits, and the next `EVALUATE` retur
826
826
 
827
827
  ## πŸ†• What's New
828
828
 
829
- > **Current release: v9.3.0 β€” TurboQuant ResidualNorm Tiebreaker**
829
+ > **Current release: v9.4.0 β€” Adversarial Security Hardening & Bidirectional Sync**
830
830
 
831
+ - πŸ”’ **v9.4.0 β€” Security Hardening & Bidirectional Sync:** Two-pass adversarial audit found 18 vulnerabilities (4C/5H/9M) across Prism and Synalux β€” 17 fixed. Critical: fail-closed rate limiter, path traversal guards, error sanitization. High: plan name alignment (revenue fix), CORS allowlist, settings injection prevention. New: bidirectional `prism sync push` CLI command pushes local SQLite β†’ Supabase, NextAuth JWT enrichment eliminates N+1 DB queries, concurrency counter guaranteed via `try/finally`, 10MB request body limits.
831
832
  - 🎯 **v9.3.0 β€” TurboQuant ResidualNorm Tiebreaker:** Configurable ranking optimization for Tier-2 search. When compressed cosine scores are within Ξ΅ of each other, prefers the candidate with lower `residualNorm` (more trustworthy compressed representation). `PRISM_TURBOQUANT_TIEBREAKER_EPSILON=0.005` gives +2pp R@1, +1pp R@5. Empirically validated at N=5K with A/B test. 1066 tests, 0 regressions. Inspired by [@m13v's suggestion](https://github.com/xiaowu0162/LongMemEval/issues/31).
832
833
  - πŸ”’ **v9.2.7 β€” Security Hardening:** Typed `PrototypePollutionError` class (replaces generic `Error` in `sanitizeForMerge()` β€” enables catch-site discrimination and forensic logging with `offendingKey`), explicit null-byte path injection guard in `SafetyController.validateActionsInScope()` (C-string truncation attack vector), and corrected CRDT merge semantics documentation (Remove-Wins-from-Either, not Add-Wins). 1055 tests, 0 regressions.
833
834
  - πŸͺŸ **v9.2.6 β€” Windows CI Timeout Fix:** CLI integration tests timed out on Windows + Node 22.x GitHub Actions runners. Added `{ timeout: 30_000 }` to the describe block. 6 new residual distribution tests validating TurboQuant's QJL correction stability (zero R@5 delta between P50 and P95 residual vectors at d=128, 2K corpus).
@@ -913,6 +914,10 @@ prism load my-project --level deep # Full context with all enrichmen
913
914
  prism load my-project --level quick --json # Machine-readable JSON
914
915
  prism load my-project --role dev --json # Role-scoped loading
915
916
 
917
+ # Bidirectional sync (v9.4.0)
918
+ prism sync push # Push local SQLite β†’ Supabase
919
+ prism sync push --json # Machine-readable output
920
+
916
921
  # Verification harness
917
922
  prism verify status # Check verification state
918
923
  prism verify status --json # Machine-readable output
package/dist/cli.js CHANGED
@@ -7,7 +7,7 @@ import { getStorage, closeStorage } from './storage/index.js';
7
7
  import { getSetting } from './storage/configStorage.js';
8
8
  import { PRISM_USER_ID, SERVER_CONFIG } from './config.js';
9
9
  import { getCurrentGitState } from './utils/git.js';
10
- import { sessionLoadContextHandler } from './tools/ledgerHandlers.js';
10
+ import { sessionLoadContextHandler, sessionSaveLedgerHandler, sessionSaveHandoffHandler } from './tools/ledgerHandlers.js';
11
11
  const program = new Command();
12
12
  program
13
13
  .name('prism')
@@ -131,6 +131,149 @@ program
131
131
  process.exit(1);
132
132
  }
133
133
  });
134
+ // ─── prism save ───────────────────────────────────────────────
135
+ // Saves session state using the same storage layer as the MCP
136
+ // session_save_ledger and session_save_handoff tools. Works with
137
+ // both SQLite and Supabase.
138
+ //
139
+ // Designed for Antigravity and other environments that cannot use
140
+ // MCP tools directly. This is the counterpart to `prism load`.
141
+ //
142
+ // Two subcommands:
143
+ // prism save ledger <project> β€” append immutable session log entry
144
+ // prism save handoff <project> β€” update live project state for next session
145
+ const saveCmd = program
146
+ .command('save')
147
+ .description('Save session state (ledger entries and handoff)');
148
+ /**
149
+ * Parse a CLI string argument that may be a JSON array or a plain string.
150
+ * Returns string[] for arrays, wraps plain strings in an array.
151
+ */
152
+ function parseJsonArrayArg(val, fieldName) {
153
+ if (!val)
154
+ return undefined;
155
+ const trimmed = val.trim();
156
+ if (trimmed.startsWith('[')) {
157
+ try {
158
+ const parsed = JSON.parse(trimmed);
159
+ if (!Array.isArray(parsed))
160
+ throw new Error('not an array');
161
+ return parsed.map(String);
162
+ }
163
+ catch (err) {
164
+ console.error(`Error: --${fieldName} must be a valid JSON array. Got: ${trimmed}`);
165
+ process.exit(1);
166
+ }
167
+ }
168
+ // Plain string β†’ single-element array
169
+ return [trimmed];
170
+ }
171
+ saveCmd
172
+ .command('ledger <project>')
173
+ .description('Save an immutable session log entry (same as session_save_ledger MCP tool)')
174
+ .requiredOption('-c, --conversation-id <id>', 'Unique conversation/session identifier')
175
+ .requiredOption('-m, --summary <text>', 'Summary of what was accomplished')
176
+ .option('-t, --todos <json>', 'Open TODO items as JSON array, e.g. \'["item1","item2"]\'')
177
+ .option('-f, --files-changed <json>', 'Files changed as JSON array')
178
+ .option('-d, --decisions <json>', 'Key decisions as JSON array')
179
+ .option('-r, --role <role>', 'Agent role for Hivemind scoping')
180
+ .option('-s, --storage <backend>', 'Storage backend: local (SQLite) or supabase')
181
+ .option('--json', 'Emit machine-readable JSON output')
182
+ .action(async (project, options) => {
183
+ try {
184
+ // Storage override
185
+ if (options.storage) {
186
+ const validStorages = ['local', 'supabase'];
187
+ if (!validStorages.includes(options.storage)) {
188
+ console.error(`Error: Invalid storage "${options.storage}". Must be one of: ${validStorages.join(', ')}`);
189
+ process.exit(1);
190
+ }
191
+ process.env.PRISM_STORAGE = options.storage;
192
+ }
193
+ const args = {
194
+ project,
195
+ conversation_id: options.conversationId,
196
+ summary: options.summary,
197
+ todos: parseJsonArrayArg(options.todos, 'todos'),
198
+ files_changed: parseJsonArrayArg(options.filesChanged, 'files-changed'),
199
+ decisions: parseJsonArrayArg(options.decisions, 'decisions'),
200
+ role: options.role,
201
+ };
202
+ const result = await sessionSaveLedgerHandler(args);
203
+ if (options.json) {
204
+ console.log(JSON.stringify({
205
+ success: !result.isError,
206
+ text: result.content[0]?.text || '',
207
+ }, null, 2));
208
+ }
209
+ else {
210
+ console.log(result.content[0]?.text || 'Done');
211
+ }
212
+ if (result.isError) {
213
+ await closeStorage();
214
+ process.exit(1);
215
+ }
216
+ await closeStorage();
217
+ }
218
+ catch (err) {
219
+ console.error(`Error saving ledger: ${err instanceof Error ? err.message : String(err)}`);
220
+ await closeStorage().catch(() => { });
221
+ process.exit(1);
222
+ }
223
+ });
224
+ saveCmd
225
+ .command('handoff <project>')
226
+ .description('Update the live project state for next session (same as session_save_handoff MCP tool)')
227
+ .option('-m, --last-summary <text>', 'Summary of the most recent session')
228
+ .option('-t, --open-todos <json>', 'Current open TODO items as JSON array')
229
+ .option('-k, --key-context <text>', 'Free-form critical context for next session')
230
+ .option('-b, --active-branch <branch>', 'Git branch or context to resume on')
231
+ .option('-v, --expected-version <n>', 'Version for optimistic concurrency control', parseInt)
232
+ .option('-r, --role <role>', 'Agent role for Hivemind scoping')
233
+ .option('-s, --storage <backend>', 'Storage backend: local (SQLite) or supabase')
234
+ .option('--json', 'Emit machine-readable JSON output')
235
+ .action(async (project, options) => {
236
+ try {
237
+ // Storage override
238
+ if (options.storage) {
239
+ const validStorages = ['local', 'supabase'];
240
+ if (!validStorages.includes(options.storage)) {
241
+ console.error(`Error: Invalid storage "${options.storage}". Must be one of: ${validStorages.join(', ')}`);
242
+ process.exit(1);
243
+ }
244
+ process.env.PRISM_STORAGE = options.storage;
245
+ }
246
+ const args = {
247
+ project,
248
+ last_summary: options.lastSummary,
249
+ open_todos: parseJsonArrayArg(options.openTodos, 'open-todos'),
250
+ key_context: options.keyContext,
251
+ active_branch: options.activeBranch,
252
+ expected_version: options.expectedVersion,
253
+ role: options.role,
254
+ };
255
+ const result = await sessionSaveHandoffHandler(args);
256
+ if (options.json) {
257
+ console.log(JSON.stringify({
258
+ success: !result.isError,
259
+ text: result.content[0]?.text || '',
260
+ }, null, 2));
261
+ }
262
+ else {
263
+ console.log(result.content[0]?.text || 'Done');
264
+ }
265
+ if (result.isError) {
266
+ await closeStorage();
267
+ process.exit(1);
268
+ }
269
+ await closeStorage();
270
+ }
271
+ catch (err) {
272
+ console.error(`Error saving handoff: ${err instanceof Error ? err.message : String(err)}`);
273
+ await closeStorage().catch(() => { });
274
+ process.exit(1);
275
+ }
276
+ });
134
277
  // ─── prism verify ─────────────────────────────────────────────
135
278
  const verifyCmd = program
136
279
  .command('verify')
@@ -171,4 +314,58 @@ verifyCmd
171
314
  await storage.close();
172
315
  }
173
316
  });
317
+ // ─── prism sync ───────────────────────────────────────────────
318
+ // M4: Bidirectional reconciliation commands.
319
+ // `prism sync push` pushes local SQLite data to Supabase.
320
+ const syncCmd = program
321
+ .command('sync')
322
+ .description('Cross-backend data synchronization');
323
+ syncCmd
324
+ .command('push')
325
+ .description('Push local SQLite data to Supabase (handoffs + recent ledger)')
326
+ .option('--json', 'Emit machine-readable JSON output')
327
+ .action(async (options) => {
328
+ try {
329
+ // Force local storage mode to read from SQLite
330
+ process.env.PRISM_STORAGE = 'local';
331
+ const storage = await getStorage();
332
+ // Verify Supabase credentials are available
333
+ const { getSetting } = await import('./storage/configStorage.js');
334
+ const sbUrl = process.env.SUPABASE_URL || await getSetting('SUPABASE_URL');
335
+ const sbKey = process.env.SUPABASE_KEY || await getSetting('SUPABASE_KEY');
336
+ if (!sbUrl || !sbKey) {
337
+ console.error('❌ Supabase credentials not configured. Set SUPABASE_URL and SUPABASE_KEY.');
338
+ await closeStorage();
339
+ process.exit(1);
340
+ }
341
+ // Ensure process.env has the credentials for supabaseApi.ts
342
+ process.env.SUPABASE_URL = sbUrl;
343
+ process.env.SUPABASE_KEY = sbKey;
344
+ const { pushReconciliation } = await import('./storage/reconcile.js');
345
+ const { SqliteStorage } = await import('./storage/sqlite.js');
346
+ const sqliteInstance = storage;
347
+ const getTimestamps = () => sqliteInstance.getHandoffTimestamps();
348
+ const result = await pushReconciliation(storage, getTimestamps);
349
+ if (options.json) {
350
+ console.log(JSON.stringify(result, null, 2));
351
+ }
352
+ else {
353
+ if (result.handoffsPushed === 0 && result.ledgerEntriesPushed === 0) {
354
+ console.log('βœ… Supabase is already up-to-date with local data.');
355
+ }
356
+ else {
357
+ console.log(`βœ… Pushed ${result.handoffsPushed} handoff(s) + ${result.ledgerEntriesPushed} ledger entries to Supabase`);
358
+ if (result.projects.length > 0) {
359
+ console.log(` Projects: ${result.projects.join(', ')}`);
360
+ }
361
+ }
362
+ }
363
+ await closeStorage();
364
+ }
365
+ catch (err) {
366
+ console.error(`Error during sync push: ${err instanceof Error ? err.message : String(err)}`);
367
+ await closeStorage().catch(() => { });
368
+ process.exit(1);
369
+ }
370
+ });
174
371
  program.parse(process.argv);
package/dist/config.js CHANGED
@@ -73,12 +73,12 @@ if (!BRAVE_ANSWERS_API_KEY && process.env.PRISM_DEBUG_LOGGING === "true") {
73
73
  export const VOYAGE_API_KEY = process.env.VOYAGE_API_KEY;
74
74
  // ─── v2.0: Storage Backend Selection ─────────────────────────
75
75
  // REVIEWER NOTE: Step 1 of v2.0 introduces a storage abstraction.
76
- // Currently only "supabase" is implemented. "local" (SQLite) is
77
- // coming in Step 2. Default is "supabase" for backward compat.
76
+ // Both "local" (SQLite) and "supabase" (PostgreSQL) are implemented.
77
+ // Default is "local" for zero-config operation.
78
78
  //
79
- // Set PRISM_STORAGE=local to use SQLite (once implemented).
80
- // Set PRISM_STORAGE=supabase to use Supabase REST API (default).
81
- export const PRISM_STORAGE = process.env.PRISM_STORAGE || "supabase";
79
+ // Set PRISM_STORAGE=supabase to use Supabase REST API.
80
+ // Set PRISM_STORAGE=local to use SQLite (default).
81
+ export const PRISM_STORAGE = process.env.PRISM_STORAGE || "local";
82
82
  // Logged at debug level β€” see debug() at bottom of file
83
83
  // ─── Optional: Supabase (Session Memory Module) ───────────────
84
84
  // When both SUPABASE_URL and SUPABASE_KEY are set, session memory tools
@@ -16,10 +16,20 @@
16
16
  */
17
17
  import { recordSynthesisRun, recordTestMeRequest, getGraphMetricsSnapshot } from "../observability/graphMetrics.js";
18
18
  /** Read HTTP request body as string */
19
+ /** SECURITY: 10MB limit prevents memory exhaustion from oversized POST payloads. */
20
+ const MAX_BODY_BYTES = 10 * 1024 * 1024; // 10MB
19
21
  function readBody(req) {
20
22
  return new Promise((resolve, reject) => {
21
23
  const chunks = [];
22
- req.on("data", (chunk) => { chunks.push(chunk); });
24
+ let totalBytes = 0;
25
+ req.on("data", (chunk) => {
26
+ totalBytes += chunk.length;
27
+ if (totalBytes > MAX_BODY_BYTES) {
28
+ req.destroy(new Error("Request body too large (>10MB)"));
29
+ return reject(new Error("Request body too large"));
30
+ }
31
+ chunks.push(chunk);
32
+ });
23
33
  req.on("end", () => resolve(Buffer.concat(chunks).toString("utf-8")));
24
34
  req.on("error", reject);
25
35
  });
@@ -35,10 +35,20 @@ import { handleGraphRoutes } from "./graphRouter.js";
35
35
  import { safeCompare, generateToken, isAuthenticated, createRateLimiter, initJWKS, } from "./authUtils.js";
36
36
  const PORT = parseInt(process.env.PRISM_DASHBOARD_PORT || "3000", 10);
37
37
  /** Read HTTP request body as string (Buffer-based to avoid GC thrash on large imports) */
38
+ /** SECURITY: 10MB limit prevents memory exhaustion from oversized POST payloads. */
39
+ const MAX_BODY_BYTES = 10 * 1024 * 1024; // 10MB
38
40
  function readBody(req) {
39
41
  return new Promise((resolve, reject) => {
40
42
  const chunks = [];
41
- req.on("data", (chunk) => { chunks.push(chunk); });
43
+ let totalBytes = 0;
44
+ req.on("data", (chunk) => {
45
+ totalBytes += chunk.length;
46
+ if (totalBytes > MAX_BODY_BYTES) {
47
+ req.destroy(new Error("Request body too large (>10MB)"));
48
+ return reject(new Error("Request body too large"));
49
+ }
50
+ chunks.push(chunk);
51
+ });
42
52
  req.on("end", () => resolve(Buffer.concat(chunks).toString("utf-8")));
43
53
  req.on("error", reject);
44
54
  });
@@ -165,8 +175,13 @@ return false;}
165
175
  // v6.5.1: CORS β€” restrict origin when auth is enabled to prevent CSRF
166
176
  if (AUTH_ENABLED) {
167
177
  const origin = req.headers.origin || "";
168
- // Only echo back the origin if present (browser-initiated requests)
169
- if (origin) {
178
+ // SECURITY: Allowlist-based CORS β€” don't echo arbitrary origins with credentials
179
+ const allowedOrigins = new Set([
180
+ `http://localhost:${PORT}`,
181
+ `http://127.0.0.1:${PORT}`,
182
+ process.env.PRISM_DASHBOARD_ORIGIN || "",
183
+ ].filter(Boolean));
184
+ if (origin && allowedOrigins.has(origin)) {
170
185
  res.setHeader("Access-Control-Allow-Origin", origin);
171
186
  res.setHeader("Access-Control-Allow-Credentials", "true");
172
187
  }
@@ -176,6 +191,9 @@ return false;}
176
191
  }
177
192
  res.setHeader("Access-Control-Allow-Methods", "GET, POST, DELETE, OPTIONS");
178
193
  res.setHeader("Access-Control-Allow-Headers", "Content-Type, Authorization");
194
+ // SECURITY: Prevent clickjacking via X-Frame-Options and CSP
195
+ res.setHeader("X-Frame-Options", "DENY");
196
+ res.setHeader("Content-Security-Policy", "frame-ancestors 'none'");
179
197
  if (req.method === "OPTIONS") {
180
198
  res.writeHead(204);
181
199
  return res.end();
@@ -614,6 +632,26 @@ return false;}
614
632
  const body = await readBody(req);
615
633
  const parsed = JSON.parse(body);
616
634
  if (parsed.key && parsed.value !== undefined) {
635
+ // SECURITY: Allowlist of dashboard-settable keys to prevent
636
+ // credential overwrite (SUPABASE_KEY, STRIPE_SECRET_KEY, etc.)
637
+ const SETTABLE_KEYS = new Set([
638
+ "PRISM_STORAGE", "SUPABASE_URL", "SUPABASE_KEY",
639
+ "BRAVE_API_KEY", "BRAVE_ANSWERS_API_KEY",
640
+ "GOOGLE_API_KEY", "VOYAGE_API_KEY",
641
+ "FIRECRAWL_API_KEY", "TAVILY_API_KEY",
642
+ "embedding_provider", "embedding_model",
643
+ "PRISM_ENABLE_HIVEMIND", "PRISM_DARK_FACTORY_ENABLED",
644
+ "PRISM_TASK_ROUTER_ENABLED", "PRISM_SCHOLAR_ENABLED",
645
+ "PRISM_HDC_ENABLED", "PRISM_ACTR_ENABLED",
646
+ "PRISM_GRAPH_PRUNING_ENABLED",
647
+ ]);
648
+ const isSkillKey = parsed.key.startsWith("skill:");
649
+ const isTTLKey = parsed.key.startsWith("ttl:");
650
+ const isAutoloadKey = parsed.key.startsWith("autoload:");
651
+ if (!SETTABLE_KEYS.has(parsed.key) && !isSkillKey && !isTTLKey && !isAutoloadKey) {
652
+ res.writeHead(403, { "Content-Type": "application/json" });
653
+ return res.end(JSON.stringify({ error: `Setting key "${parsed.key}" is not allowed via the dashboard.` }));
654
+ }
617
655
  const { setSetting } = await import("../storage/configStorage.js");
618
656
  await setSetting(parsed.key, String(parsed.value));
619
657
  res.writeHead(200, { "Content-Type": "application/json" });
@@ -788,6 +826,16 @@ return false;}
788
826
  res.writeHead(400, { "Content-Type": "application/json" });
789
827
  return res.end(JSON.stringify({ error: "path is required" }));
790
828
  }
829
+ // SECURITY: Restrict import paths to prevent arbitrary file reads.
830
+ // Only allow files from home directory, /tmp, and current working directory.
831
+ const resolvedPath = path.resolve(filePath);
832
+ const homeDir = os.homedir();
833
+ const allowedPrefixes = [homeDir, os.tmpdir(), process.cwd()];
834
+ const isAllowed = allowedPrefixes.some(prefix => resolvedPath.startsWith(prefix + path.sep) || resolvedPath === prefix);
835
+ if (!isAllowed) {
836
+ res.writeHead(403, { "Content-Type": "application/json" });
837
+ return res.end(JSON.stringify({ error: `Import path must be under home directory or /tmp` }));
838
+ }
791
839
  // Verify file exists before starting import
792
840
  if (!fs.existsSync(filePath)) {
793
841
  res.writeHead(400, { "Content-Type": "application/json" });
@@ -235,3 +235,154 @@ async function reconcileLedger(localStorage, projects) {
235
235
  }
236
236
  return totalSynced;
237
237
  }
238
+ /**
239
+ * Push newer local handoffs and ledger entries to Supabase.
240
+ *
241
+ * @param localStorage - The initialized SQLite storage instance
242
+ * @param getLocalTimestamps - Function to bulk-read local handoff timestamps
243
+ */
244
+ export async function pushReconciliation(localStorage, getLocalTimestamps) {
245
+ const result = { handoffsPushed: 0, ledgerEntriesPushed: 0, projects: [] };
246
+ try {
247
+ // Step 1: Get all local handoffs
248
+ let localTimestamps;
249
+ if (getLocalTimestamps) {
250
+ localTimestamps = await getLocalTimestamps();
251
+ }
252
+ else {
253
+ debugLog("[Push Reconcile] No getLocalTimestamps provided β€” nothing to push");
254
+ return result;
255
+ }
256
+ if (localTimestamps.size === 0) {
257
+ debugLog("[Push Reconcile] No local handoffs β€” nothing to push");
258
+ return result;
259
+ }
260
+ // Step 2: Fetch all remote handoff timestamps for comparison
261
+ const remoteHandoffs = await withTimeout(supabaseGet("session_handoffs", {
262
+ user_id: `eq.${PRISM_USER_ID}`,
263
+ select: "project,role,updated_at",
264
+ }), RECONCILE_TIMEOUT_MS, "fetch remote handoff timestamps");
265
+ const remoteTimestamps = new Map();
266
+ if (Array.isArray(remoteHandoffs)) {
267
+ for (const r of remoteHandoffs) {
268
+ const key = `${r.project}::${r.role || "global"}`;
269
+ remoteTimestamps.set(key, r.updated_at);
270
+ }
271
+ }
272
+ // Step 3: Find local handoffs that are newer than remote
273
+ const projectsToPush = new Set();
274
+ for (const [key, localUpdatedAt] of localTimestamps) {
275
+ const remoteUpdatedAt = remoteTimestamps.get(key);
276
+ const localIsNewer = !remoteUpdatedAt
277
+ || (localUpdatedAt && new Date(localUpdatedAt) > new Date(remoteUpdatedAt));
278
+ if (localIsNewer) {
279
+ const [project, role] = key.split("::");
280
+ projectsToPush.add(project);
281
+ // Load the full handoff from local storage
282
+ const ctx = await localStorage.loadContext(project, "quick", PRISM_USER_ID, role || "global");
283
+ if (!ctx)
284
+ continue;
285
+ // Upsert to Supabase
286
+ try {
287
+ const { supabasePost } = await import("../utils/supabaseApi.js");
288
+ await withTimeout(supabasePost("session_handoffs", {
289
+ project,
290
+ user_id: PRISM_USER_ID,
291
+ role: role || "global",
292
+ last_summary: ctx.last_summary ?? null,
293
+ pending_todo: ctx.pending_todo ?? [],
294
+ active_decisions: ctx.active_decisions ?? [],
295
+ keywords: ctx.keywords ?? [],
296
+ key_context: ctx.key_context ?? null,
297
+ active_branch: ctx.active_branch ?? null,
298
+ metadata: ctx.metadata ?? {},
299
+ }, {
300
+ on_conflict: "project,user_id,role",
301
+ }, {
302
+ "Prefer": "return=representation,resolution=merge-duplicates",
303
+ }), RECONCILE_TIMEOUT_MS, `push handoff ${project}`);
304
+ result.handoffsPushed++;
305
+ result.projects.push(project);
306
+ debugLog(`[Push Reconcile] Pushed handoff "${project}" (role: ${role || "global"}) to Supabase`);
307
+ }
308
+ catch (pushErr) {
309
+ debugLog(`[Push Reconcile] Failed to push handoff "${project}": ` +
310
+ `${pushErr instanceof Error ? pushErr.message : String(pushErr)}`);
311
+ }
312
+ }
313
+ }
314
+ // Step 4: Push recent ledger entries for pushed projects
315
+ if (projectsToPush.size > 0) {
316
+ const { supabasePost, supabaseGet: sbGet } = await import("../utils/supabaseApi.js");
317
+ for (const project of projectsToPush) {
318
+ try {
319
+ // Get local recent entries
320
+ const localEntries = await localStorage.getLedgerEntries({
321
+ project: `eq.${project}`,
322
+ user_id: `eq.${PRISM_USER_ID}`,
323
+ archived_at: "is.null",
324
+ deleted_at: "is.null",
325
+ order: "created_at.desc",
326
+ limit: "20",
327
+ select: "id,project,conversation_id,summary,user_id,role,todos,files_changed,decisions,keywords,event_type,importance,created_at,session_date",
328
+ });
329
+ if (!Array.isArray(localEntries) || localEntries.length === 0)
330
+ continue;
331
+ // Check which IDs already exist in Supabase
332
+ const localIds = localEntries.map(e => e.id);
333
+ const remoteExisting = await withTimeout(sbGet("session_ledger", {
334
+ id: `in.(${localIds.join(",")})`,
335
+ select: "id",
336
+ }), RECONCILE_TIMEOUT_MS, `check remote ledger for ${project}`);
337
+ const existingRemoteIds = new Set((Array.isArray(remoteExisting) ? remoteExisting : []).map((e) => e.id));
338
+ // Push entries that don't exist remotely
339
+ for (const entry of localEntries) {
340
+ if (existingRemoteIds.has(entry.id))
341
+ continue;
342
+ try {
343
+ await supabasePost("session_ledger", {
344
+ id: entry.id,
345
+ project: entry.project,
346
+ conversation_id: entry.conversation_id || "pushed",
347
+ summary: entry.summary,
348
+ user_id: PRISM_USER_ID,
349
+ role: entry.role || "global",
350
+ todos: safeParseArray(entry.todos),
351
+ files_changed: safeParseArray(entry.files_changed),
352
+ decisions: safeParseArray(entry.decisions),
353
+ keywords: safeParseArray(entry.keywords),
354
+ event_type: entry.event_type || "session",
355
+ importance: entry.importance || 0,
356
+ });
357
+ result.ledgerEntriesPushed++;
358
+ }
359
+ catch (insertErr) {
360
+ const msg = insertErr instanceof Error ? insertErr.message : String(insertErr);
361
+ // Skip duplicate key violations silently
362
+ if (!msg.includes("duplicate") && !msg.includes("23505")) {
363
+ debugLog(`[Push Reconcile] Failed to push ledger entry ${entry.id}: ${msg}`);
364
+ }
365
+ }
366
+ }
367
+ }
368
+ catch (err) {
369
+ debugLog(`[Push Reconcile] Ledger push failed for "${project}": ` +
370
+ `${err instanceof Error ? err.message : String(err)}`);
371
+ }
372
+ }
373
+ }
374
+ if (result.handoffsPushed > 0 || result.ledgerEntriesPushed > 0) {
375
+ debugLog(`[Push Reconcile] Pushed ${result.handoffsPushed} handoff(s)` +
376
+ `${result.ledgerEntriesPushed > 0 ? ` + ${result.ledgerEntriesPushed} ledger entries` : ""}` +
377
+ ` from SQLite β†’ Supabase: ${result.projects.join(", ")}`);
378
+ }
379
+ else {
380
+ debugLog("[Push Reconcile] Supabase already up-to-date with local data");
381
+ }
382
+ }
383
+ catch (err) {
384
+ debugLog(`[Push Reconcile] Failed (non-fatal): ` +
385
+ `${err instanceof Error ? err.message : String(err)}`);
386
+ }
387
+ return result;
388
+ }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "prism-mcp-server",
3
- "version": "9.3.0",
3
+ "version": "9.4.0",
4
4
  "mcpName": "io.github.dcostenco/prism-mcp",
5
5
  "description": "The Mind Palace for AI Agents β€” a true Cognitive Architecture with Hebbian learning (episodicβ†’semantic consolidation), ACT-R spreading activation (multi-hop causal reasoning), uncertainty-aware rejection gates (agents that know when they don't know), adversarial evaluation (anti-sycophancy), fail-closed Dark Factory pipelines, persistent memory (SQLite/Supabase), multi-agent Hivemind, time travel & visual dashboard. Zero-config local mode.",
6
6
  "module": "index.ts",
@@ -101,7 +101,7 @@
101
101
  "typescript": "^5.0.0"
102
102
  },
103
103
  "dependencies": {
104
- "@anthropic-ai/sdk": "^0.80.0",
104
+ "@anthropic-ai/sdk": "^0.81.0",
105
105
  "@google-cloud/discoveryengine": "^2.5.3",
106
106
  "@google/generative-ai": "^0.24.1",
107
107
  "@libsql/client": "^0.17.2",