clementine-agent 1.1.20 → 1.1.22

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/cli/index.js CHANGED
@@ -428,17 +428,22 @@ async function cmdRestart(options) {
428
428
  }
429
429
  }
430
430
  function cmdStatus() {
431
+ const DIM = '\x1b[0;90m';
432
+ const RESET = '\x1b[0m';
431
433
  const pid = readPid();
432
434
  const name = getAssistantName();
435
+ const localVersion = readPkgVersion(PACKAGE_ROOT);
433
436
  if (!pid) {
434
- console.log(` ${name} is not running (no PID file).`);
437
+ console.log(` ${name} is not running ${DIM}(no PID file, v${localVersion})${RESET}.`);
438
+ surfaceUpdateNudge(localVersion);
435
439
  return;
436
440
  }
437
441
  if (!isProcessAlive(pid)) {
438
- console.log(` ${name} is not running (stale PID ${pid}).`);
442
+ console.log(` ${name} is not running ${DIM}(stale PID ${pid}, v${localVersion})${RESET}.`);
443
+ surfaceUpdateNudge(localVersion);
439
444
  return;
440
445
  }
441
- console.log(` ${name} is running (PID ${pid})`);
446
+ console.log(` ${name} is running ${DIM}(PID ${pid}, v${localVersion})${RESET}`);
442
447
  // Show uptime from PID file mtime
443
448
  try {
444
449
  const { mtimeMs } = statSync(getPidFilePath());
@@ -468,6 +473,36 @@ function cmdStatus() {
468
473
  if (channels.length > 0) {
469
474
  console.log(` Channels: ${channels.join(', ')}`);
470
475
  }
476
+ surfaceUpdateNudge(localVersion);
477
+ }
478
+ /**
479
+ * Print a one-line nudge if a newer version is on npm. Reads the cached
480
+ * result synchronously (no network on the hot path) and fires off an async
481
+ * refresh in the background so the next call has fresh data.
482
+ */
483
+ function surfaceUpdateNudge(localVersion) {
484
+ const DIM = '\x1b[0;90m';
485
+ const BOLD = '\x1b[1m';
486
+ const YELLOW = '\x1b[1;33m';
487
+ const RESET = '\x1b[0m';
488
+ try {
489
+ const cached = (() => {
490
+ // Lazy require to avoid pulling https/network into trivial CLI calls
491
+ // when the cache module isn't needed.
492
+ const { readCachedUpdateCheck } = require('./version-check.js');
493
+ return readCachedUpdateCheck(BASE_DIR, localVersion);
494
+ })();
495
+ if (cached?.updateAvailable && cached.latestVersion) {
496
+ console.log(` ${YELLOW}⬆${RESET} Update available: ${BOLD}v${cached.latestVersion}${RESET} ${DIM}(you're on v${localVersion})${RESET}`);
497
+ console.log(` ${DIM}Run: ${BOLD}clementine update restart${RESET}`);
498
+ }
499
+ // Fire-and-forget background refresh — never blocks status output.
500
+ const { checkForUpdate } = require('./version-check.js');
501
+ void checkForUpdate(BASE_DIR, localVersion).catch(() => { });
502
+ }
503
+ catch {
504
+ // version-check failed to load — degrade silently
505
+ }
471
506
  }
472
507
  function cmdDoctor(opts = {}) {
473
508
  const DIM = '\x1b[0;90m';
@@ -1991,11 +2026,16 @@ program
1991
2026
  });
1992
2027
  program
1993
2028
  .command('update')
1994
- .description('Pull latest code, rebuild, and reinstall (preserves config)')
1995
- .argument('[action]', 'Optional: "restart" to restart daemon after update')
2029
+ .description('Pull latest code, rebuild, and reinstall (preserves config). Pass "history" to show recent updates.')
2030
+ .argument('[action]', 'Optional: "restart" = restart daemon after update; "history" = show update log')
1996
2031
  .option('--restart', 'Restart daemon after update')
1997
2032
  .option('--dry-run', 'Preview what would happen without making changes')
2033
+ .option('-n, --limit <n>', 'For history mode: max entries to show', '10')
1998
2034
  .action((action, options) => {
2035
+ if (action === 'history') {
2036
+ cmdUpdateHistory(parseInt(options.limit ?? '10', 10));
2037
+ return;
2038
+ }
1999
2039
  if (action === 'restart')
2000
2040
  options.restart = true;
2001
2041
  cmdUpdate(options).catch((err) => {
@@ -2257,6 +2297,223 @@ agentCmd
2257
2297
  const memoryCmd = program
2258
2298
  .command('memory')
2259
2299
  .description('Search and manage memory');
2300
+ memoryCmd
2301
+ .command('status')
2302
+ .description('Show memory store stats — chunk count, embeddings coverage, agent/category breakdown, salience')
2303
+ .option('--json', 'Emit machine-readable JSON')
2304
+ .action(async (opts) => {
2305
+ const BOLD = '\x1b[1m';
2306
+ const DIM = '\x1b[0;90m';
2307
+ const CYAN = '\x1b[0;36m';
2308
+ const RESET = '\x1b[0m';
2309
+ try {
2310
+ const { MemoryStore } = await import('../memory/store.js');
2311
+ const VAULT_DIR = path.join(BASE_DIR, 'vault');
2312
+ const DB_PATH = path.join(VAULT_DIR, '.memory.db');
2313
+ const store = new MemoryStore(DB_PATH, VAULT_DIR);
2314
+ const stats = store.getMemoryStats();
2315
+ if (opts.json) {
2316
+ console.log(JSON.stringify(stats, null, 2));
2317
+ return;
2318
+ }
2319
+ const pct = stats.totalChunks > 0
2320
+ ? ((stats.chunksWithEmbeddings / stats.totalChunks) * 100).toFixed(1)
2321
+ : '0.0';
2322
+ console.log();
2323
+ console.log(` ${BOLD}Memory store${RESET} ${DIM}${DB_PATH}${RESET}`);
2324
+ console.log();
2325
+ console.log(` Total chunks: ${BOLD}${stats.totalChunks.toLocaleString()}${RESET}`);
2326
+ console.log(` With embeddings: ${stats.chunksWithEmbeddings.toLocaleString()} ${DIM}(${pct}%, TF-IDF 512-dim)${RESET}`);
2327
+ console.log(` Pinned (manual): ${stats.pinnedChunks}`);
2328
+ console.log(` Avg salience: ${stats.avgSalience.toFixed(3)} ${DIM}(0 = no access boost; >1 = strong reinforcement)${RESET}`);
2329
+ if (stats.oldestUpdated) {
2330
+ console.log(` Date range: ${stats.oldestUpdated.slice(0, 10)} → ${stats.newestUpdated?.slice(0, 10)}`);
2331
+ }
2332
+ console.log();
2333
+ console.log(` ${BOLD}Per agent${RESET}`);
2334
+ for (const a of stats.perAgent.slice(0, 10)) {
2335
+ console.log(` ${CYAN}${a.agentSlug.padEnd(28)}${RESET}${a.count.toLocaleString().padStart(8)}`);
2336
+ }
2337
+ if (stats.perAgent.length > 10)
2338
+ console.log(` ${DIM}…and ${stats.perAgent.length - 10} more${RESET}`);
2339
+ console.log();
2340
+ console.log(` ${BOLD}Per category${RESET}`);
2341
+ for (const c of stats.perCategory.slice(0, 10)) {
2342
+ console.log(` ${CYAN}${c.category.padEnd(28)}${RESET}${c.count.toLocaleString().padStart(8)}`);
2343
+ }
2344
+ console.log();
2345
+ }
2346
+ catch (err) {
2347
+ console.error(` Error reading memory stats: ${err}`);
2348
+ process.exit(1);
2349
+ }
2350
+ });
2351
+ memoryCmd
2352
+ .command('pin <chunkId>')
2353
+ .description('Pin a chunk — gives its score a 2x boost in recall (use chunk IDs from `memory search`)')
2354
+ .action(async (chunkIdStr) => {
2355
+ const GREEN = '\x1b[0;32m';
2356
+ const RED = '\x1b[0;31m';
2357
+ const RESET = '\x1b[0m';
2358
+ const chunkId = parseInt(chunkIdStr, 10);
2359
+ if (!Number.isFinite(chunkId) || chunkId <= 0) {
2360
+ console.error(` ${RED}Invalid chunk id${RESET}: "${chunkIdStr}". Use IDs from \`clementine memory search\`.`);
2361
+ process.exit(1);
2362
+ }
2363
+ try {
2364
+ const { MemoryStore } = await import('../memory/store.js');
2365
+ const VAULT_DIR = path.join(BASE_DIR, 'vault');
2366
+ const DB_PATH = path.join(VAULT_DIR, '.memory.db');
2367
+ const store = new MemoryStore(DB_PATH, VAULT_DIR);
2368
+ const ok = store.setPinned(chunkId, true);
2369
+ if (!ok) {
2370
+ console.error(` ${RED}Chunk ${chunkId} not found.${RESET}`);
2371
+ process.exit(1);
2372
+ }
2373
+ console.log(` ${GREEN}✓${RESET} Pinned chunk ${chunkId}. It now gets a 2× boost in memory_recall.`);
2374
+ }
2375
+ catch (err) {
2376
+ console.error(` Error pinning chunk: ${err}`);
2377
+ process.exit(1);
2378
+ }
2379
+ });
2380
+ memoryCmd
2381
+ .command('unpin <chunkId>')
2382
+ .description('Unpin a chunk — removes the manual 2x boost, leaves automatic salience untouched')
2383
+ .action(async (chunkIdStr) => {
2384
+ const GREEN = '\x1b[0;32m';
2385
+ const RED = '\x1b[0;31m';
2386
+ const RESET = '\x1b[0m';
2387
+ const chunkId = parseInt(chunkIdStr, 10);
2388
+ if (!Number.isFinite(chunkId) || chunkId <= 0) {
2389
+ console.error(` ${RED}Invalid chunk id${RESET}: "${chunkIdStr}".`);
2390
+ process.exit(1);
2391
+ }
2392
+ try {
2393
+ const { MemoryStore } = await import('../memory/store.js');
2394
+ const VAULT_DIR = path.join(BASE_DIR, 'vault');
2395
+ const DB_PATH = path.join(VAULT_DIR, '.memory.db');
2396
+ const store = new MemoryStore(DB_PATH, VAULT_DIR);
2397
+ const ok = store.setPinned(chunkId, false);
2398
+ if (!ok) {
2399
+ console.error(` ${RED}Chunk ${chunkId} not found.${RESET}`);
2400
+ process.exit(1);
2401
+ }
2402
+ console.log(` ${GREEN}✓${RESET} Unpinned chunk ${chunkId}.`);
2403
+ }
2404
+ catch (err) {
2405
+ console.error(` Error unpinning chunk: ${err}`);
2406
+ process.exit(1);
2407
+ }
2408
+ });
2409
+ memoryCmd
2410
+ .command('dedup')
2411
+ .description('Find near-duplicate chunks via embedding cosine similarity. Dry-run by default.')
2412
+ .option('--threshold <n>', 'Cosine similarity threshold (0-1)', '0.95')
2413
+ .option('--apply', 'Actually delete duplicates (default: dry-run preview only)')
2414
+ .option('--limit <n>', 'Max clusters to report', '50')
2415
+ .action(async (opts) => {
2416
+ const BOLD = '\x1b[1m';
2417
+ const DIM = '\x1b[0;90m';
2418
+ const GREEN = '\x1b[0;32m';
2419
+ const YELLOW = '\x1b[0;33m';
2420
+ const RESET = '\x1b[0m';
2421
+ const threshold = parseFloat(opts.threshold);
2422
+ const limit = parseInt(opts.limit, 10);
2423
+ try {
2424
+ const { MemoryStore } = await import('../memory/store.js');
2425
+ const VAULT_DIR = path.join(BASE_DIR, 'vault');
2426
+ const DB_PATH = path.join(VAULT_DIR, '.memory.db');
2427
+ const store = new MemoryStore(DB_PATH, VAULT_DIR);
2428
+ const clusters = store.findNearDuplicates({ threshold, limit });
2429
+ if (clusters.length === 0) {
2430
+ console.log(` ${GREEN}No near-duplicates found above threshold ${threshold}.${RESET}`);
2431
+ return;
2432
+ }
2433
+ const totalDupes = clusters.reduce((sum, c) => sum + c.duplicates.length, 0);
2434
+ console.log();
2435
+ console.log(` ${BOLD}Found ${clusters.length} cluster${clusters.length === 1 ? '' : 's'} (${totalDupes} duplicate chunk${totalDupes === 1 ? '' : 's'})${RESET}`);
2436
+ console.log(` ${DIM}Keeping the most-recent chunk per cluster; older copies will be removed if --apply is passed.${RESET}`);
2437
+ console.log();
2438
+ for (const cluster of clusters.slice(0, 20)) {
2439
+ const keepLabel = `${cluster.keep.sourceFile} > ${cluster.keep.section}`;
2440
+ const agent = cluster.keep.agentSlug ?? 'global';
2441
+ console.log(` ${BOLD}KEEP${RESET} #${cluster.keep.chunkId} ${DIM}[${agent}]${RESET} ${keepLabel}`);
2442
+ for (const dup of cluster.duplicates) {
2443
+ const dupLabel = `${dup.sourceFile} > ${dup.section}`;
2444
+ console.log(` ${YELLOW}drop${RESET} #${dup.chunkId} sim=${dup.similarity.toFixed(3)} ${DIM}${dupLabel}${RESET}`);
2445
+ }
2446
+ }
2447
+ if (clusters.length > 20) {
2448
+ console.log(` ${DIM}…and ${clusters.length - 20} more clusters (raise --limit to see them).${RESET}`);
2449
+ }
2450
+ console.log();
2451
+ if (opts.apply) {
2452
+ const allDupeIds = clusters.flatMap(c => c.duplicates.map(d => d.chunkId));
2453
+ const removed = store.deleteChunks(allDupeIds);
2454
+ console.log(` ${GREEN}✓${RESET} Deleted ${removed} duplicate chunk${removed === 1 ? '' : 's'}.`);
2455
+ }
2456
+ else {
2457
+ console.log(` ${DIM}This was a preview. Re-run with ${BOLD}--apply${RESET}${DIM} to delete the duplicates.${RESET}`);
2458
+ }
2459
+ console.log();
2460
+ }
2461
+ catch (err) {
2462
+ console.error(` Error during dedup: ${err}`);
2463
+ process.exit(1);
2464
+ }
2465
+ });
2466
+ memoryCmd
2467
+ .command('cross-agent')
2468
+ .description('Surface chunks that recur across 3+ agents — candidates for promotion to global memory')
2469
+ .option('--threshold <n>', 'Cosine similarity threshold for "same idea" (0-1)', '0.88')
2470
+ .option('--min-agents <n>', 'Minimum distinct agents touched by a cluster', '3')
2471
+ .option('--limit <n>', 'Max clusters to report', '30')
2472
+ .action(async (opts) => {
2473
+ const BOLD = '\x1b[1m';
2474
+ const DIM = '\x1b[0;90m';
2475
+ const CYAN = '\x1b[0;36m';
2476
+ const GREEN = '\x1b[0;32m';
2477
+ const RESET = '\x1b[0m';
2478
+ try {
2479
+ const { MemoryStore } = await import('../memory/store.js');
2480
+ const VAULT_DIR = path.join(BASE_DIR, 'vault');
2481
+ const DB_PATH = path.join(VAULT_DIR, '.memory.db');
2482
+ const store = new MemoryStore(DB_PATH, VAULT_DIR);
2483
+ const clusters = store.findCrossAgentRecurrence({
2484
+ threshold: parseFloat(opts.threshold),
2485
+ minAgents: parseInt(opts.minAgents, 10),
2486
+ limit: parseInt(opts.limit, 10),
2487
+ });
2488
+ if (clusters.length === 0) {
2489
+ console.log(` ${GREEN}No cross-agent recurrence found above threshold ${opts.threshold} touching ${opts.minAgents}+ agents.${RESET}`);
2490
+ return;
2491
+ }
2492
+ console.log();
2493
+ console.log(` ${BOLD}Found ${clusters.length} cluster${clusters.length === 1 ? '' : 's'} recurring across ${opts.minAgents}+ agents${RESET}`);
2494
+ console.log(` ${DIM}These are candidates for promotion to global memory — facts the team has independently arrived at.${RESET}`);
2495
+ console.log();
2496
+ for (const c of clusters) {
2497
+ const preview = c.representative.content.replace(/\n/g, ' ').slice(0, 140);
2498
+ console.log(` ${BOLD}Cluster (${c.agents.length} agents)${RESET} ${CYAN}${c.agents.join(', ')}${RESET}`);
2499
+ console.log(` representative #${c.representative.chunkId} ${DIM}${c.representative.sourceFile} > ${c.representative.section}${RESET}`);
2500
+ console.log(` ${DIM}${preview}${preview.length >= 140 ? '…' : ''}${RESET}`);
2501
+ for (const m of c.members.slice(1, 4)) {
2502
+ console.log(` ${DIM}└─ #${m.chunkId} [${m.agentSlug}] sim=${m.similarity.toFixed(3)}${RESET}`);
2503
+ }
2504
+ if (c.members.length > 4) {
2505
+ console.log(` ${DIM}└─ +${c.members.length - 4} more${RESET}`);
2506
+ }
2507
+ console.log();
2508
+ }
2509
+ console.log(` ${DIM}To promote a chunk to global, use the agent-side ${BOLD}memory_promote${RESET}${DIM} tool with the chunk id, or pin it with ${BOLD}clementine memory pin <id>${RESET}${DIM} for now.${RESET}`);
2510
+ console.log();
2511
+ }
2512
+ catch (err) {
2513
+ console.error(` Error finding cross-agent recurrence: ${err}`);
2514
+ process.exit(1);
2515
+ }
2516
+ });
2260
2517
  memoryCmd
2261
2518
  .command('search <query>')
2262
2519
  .description('Search memory (full-text)')
@@ -2281,10 +2538,13 @@ memoryCmd
2281
2538
  const source = r.sourceFile ? path.basename(r.sourceFile) : 'unknown';
2282
2539
  const section = r.section || '';
2283
2540
  const snippet = r.content.replace(/\n/g, ' ').slice(0, 120);
2284
- console.log(` ${BOLD}${source}${RESET}${section ? ` ${CYAN}${section}${RESET}` : ''}`);
2541
+ const pinned = r.pinned ? ' 📌' : '';
2542
+ console.log(` ${DIM}#${r.chunkId}${RESET} ${BOLD}${source}${RESET}${section ? ` › ${CYAN}${section}${RESET}` : ''}${pinned}`);
2285
2543
  console.log(` ${DIM}${snippet}${snippet.length >= 120 ? '…' : ''}${RESET}`);
2286
2544
  console.log();
2287
2545
  }
2546
+ console.log(` ${DIM}Tip: pin a chunk to boost its score in recall — ${BOLD}clementine memory pin <id>${RESET}`);
2547
+ console.log();
2288
2548
  }
2289
2549
  catch (err) {
2290
2550
  console.error(` Error searching memory: ${err}`);
@@ -2363,14 +2623,113 @@ projectsCmd
2363
2623
  }
2364
2624
  });
2365
2625
  // ── Update command ──────────────────────────────────────────────────
2626
+ /** Print the last N entries from update-history.jsonl. */
2627
+ function cmdUpdateHistory(limit) {
2628
+ const BOLD = '\x1b[1m';
2629
+ const DIM = '\x1b[0;90m';
2630
+ const GREEN = '\x1b[0;32m';
2631
+ const RED = '\x1b[0;31m';
2632
+ const RESET = '\x1b[0m';
2633
+ const historyPath = path.join(BASE_DIR, 'update-history.jsonl');
2634
+ if (!existsSync(historyPath)) {
2635
+ console.log();
2636
+ console.log(` ${DIM}No update history yet (${historyPath} doesn't exist).${RESET}`);
2637
+ console.log(` Run ${BOLD}clementine update${RESET} once to start the log.`);
2638
+ console.log();
2639
+ return;
2640
+ }
2641
+ const lines = readFileSync(historyPath, 'utf-8').split('\n').filter(Boolean);
2642
+ const entries = lines
2643
+ .map(l => { try {
2644
+ return JSON.parse(l);
2645
+ }
2646
+ catch {
2647
+ return null;
2648
+ } })
2649
+ .filter((e) => e !== null)
2650
+ .slice(-Math.max(1, limit))
2651
+ .reverse();
2652
+ if (entries.length === 0) {
2653
+ console.log(` ${DIM}History file exists but is empty or unparseable.${RESET}`);
2654
+ return;
2655
+ }
2656
+ console.log();
2657
+ console.log(` ${BOLD}Update history${RESET} ${DIM}(${historyPath})${RESET}`);
2658
+ console.log();
2659
+ for (const e of entries) {
2660
+ const ts = String(e.timestamp ?? '').slice(0, 19).replace('T', ' ');
2661
+ const from = String(e.fromVersion ?? '?');
2662
+ const to = String(e.toVersion ?? '?');
2663
+ const flavor = String(e.flavor ?? 'git');
2664
+ const failed = e.failed === true;
2665
+ const arrow = from === to ? '=' : '→';
2666
+ const verLabel = failed
2667
+ ? `${RED}v${from} ${arrow} v${to} FAILED${RESET}`
2668
+ : (from === to ? `${DIM}v${from}${RESET}` : `v${from} ${arrow} ${BOLD}v${to}${RESET}`);
2669
+ const dur = typeof e.durationMs === 'number' ? ` ${DIM}(${Math.round(e.durationMs / 1000)}s)${RESET}` : '';
2670
+ console.log(` ${DIM}${ts}${RESET} ${verLabel} ${DIM}[${flavor}]${RESET}${dur}`);
2671
+ if (typeof e.commitHash === 'string' && e.commitHash) {
2672
+ console.log(` ${DIM}commit ${e.commitHash}${e.commitDate ? ` (${e.commitDate})` : ''}, ${e.commitsPulled ?? 0} commit${e.commitsPulled === 1 ? '' : 's'} pulled${RESET}`);
2673
+ }
2674
+ if (typeof e.summary === 'string' && e.summary) {
2675
+ const trimmed = e.summary.length > 100 ? e.summary.slice(0, 100) + '…' : e.summary;
2676
+ console.log(` ${DIM}${trimmed}${RESET}`);
2677
+ }
2678
+ if (failed && typeof e.error === 'string') {
2679
+ console.log(` ${RED}error: ${e.error.slice(0, 120)}${RESET}`);
2680
+ }
2681
+ const modSummary = [];
2682
+ if (typeof e.modsReapplied === 'number' && e.modsReapplied > 0)
2683
+ modSummary.push(`${e.modsReapplied} re-applied`);
2684
+ if (typeof e.modsSuperseded === 'number' && e.modsSuperseded > 0)
2685
+ modSummary.push(`${e.modsSuperseded} superseded`);
2686
+ if (typeof e.modsNeedReconciliation === 'number' && e.modsNeedReconciliation > 0)
2687
+ modSummary.push(`${e.modsNeedReconciliation} need attention`);
2688
+ if (typeof e.modsFailed === 'number' && e.modsFailed > 0)
2689
+ modSummary.push(`${e.modsFailed} failed`);
2690
+ if (modSummary.length > 0) {
2691
+ console.log(` ${DIM}source mods: ${modSummary.join(', ')}${RESET}`);
2692
+ }
2693
+ }
2694
+ console.log();
2695
+ console.log(` ${GREEN}Showing ${entries.length}${RESET}${DIM} of ${lines.length} total entries.${RESET}`);
2696
+ console.log();
2697
+ }
2698
+ /** Read the npm version from a package.json (returns 'unknown' on failure). */
2699
+ function readPkgVersion(packageRoot) {
2700
+ try {
2701
+ const pkgPath = path.join(packageRoot, 'package.json');
2702
+ if (!existsSync(pkgPath))
2703
+ return 'unknown';
2704
+ const pkg = JSON.parse(readFileSync(pkgPath, 'utf-8'));
2705
+ return pkg.version ?? 'unknown';
2706
+ }
2707
+ catch {
2708
+ return 'unknown';
2709
+ }
2710
+ }
2711
+ /** Append one line to the update-history log. Append-only, never throws. */
2712
+ function appendUpdateHistory(entry) {
2713
+ try {
2714
+ const historyPath = path.join(BASE_DIR, 'update-history.jsonl');
2715
+ const line = JSON.stringify({ timestamp: new Date().toISOString(), ...entry }) + '\n';
2716
+ require('node:fs').appendFileSync(historyPath, line, { mode: 0o600 });
2717
+ }
2718
+ catch {
2719
+ // Non-fatal — history is observability, not critical state.
2720
+ }
2721
+ }
2366
2722
  async function cmdUpdate(options) {
2367
2723
  const DIM = '\x1b[0;90m';
2724
+ const BOLD = '\x1b[1m';
2368
2725
  const GREEN = '\x1b[0;32m';
2369
2726
  const YELLOW = '\x1b[1;33m';
2370
2727
  const RED = '\x1b[0;31m';
2371
2728
  const RESET = '\x1b[0m';
2729
+ const updateStartedAt = Date.now();
2730
+ const previousVersion = readPkgVersion(PACKAGE_ROOT);
2372
2731
  console.log();
2373
- console.log(` ${DIM}Updating ${getAssistantName()}...${RESET}`);
2732
+ console.log(` ${DIM}Updating ${getAssistantName()} (current: v${previousVersion})...${RESET}`);
2374
2733
  console.log();
2375
2734
  // 1. Detect install flavor. Two valid paths:
2376
2735
  // - git-clone install (PACKAGE_ROOT has .git) → pull + rebuild path below
@@ -2388,12 +2747,36 @@ async function cmdUpdate(options) {
2388
2747
  console.log();
2389
2748
  try {
2390
2749
  execSync('npm install -g clementine-agent@latest', { stdio: 'inherit' });
2750
+ const newVersion = readPkgVersion(PACKAGE_ROOT);
2391
2751
  console.log();
2392
- console.log(` ${GREEN}OK${RESET} Updated via npm`);
2752
+ if (previousVersion !== 'unknown' && newVersion !== 'unknown' && previousVersion !== newVersion) {
2753
+ console.log(` ${GREEN}OK${RESET} Updated v${previousVersion} → ${BOLD}v${newVersion}${RESET}`);
2754
+ }
2755
+ else if (previousVersion === newVersion) {
2756
+ console.log(` ${GREEN}OK${RESET} Already on latest (v${newVersion})`);
2757
+ }
2758
+ else {
2759
+ console.log(` ${GREEN}OK${RESET} Updated via npm`);
2760
+ }
2761
+ appendUpdateHistory({
2762
+ flavor: 'npm-global',
2763
+ fromVersion: previousVersion,
2764
+ toVersion: newVersion,
2765
+ durationMs: Date.now() - updateStartedAt,
2766
+ restartRequested: !!options.restart,
2767
+ });
2393
2768
  }
2394
2769
  catch (err) {
2395
2770
  console.error(` ${RED}FAIL${RESET} npm update failed: ${String(err).slice(0, 200)}`);
2396
2771
  console.error(` ${YELLOW}Hint${RESET} If you see EACCES, see README "Troubleshooting" for npm prefix setup.`);
2772
+ appendUpdateHistory({
2773
+ flavor: 'npm-global',
2774
+ fromVersion: previousVersion,
2775
+ toVersion: previousVersion,
2776
+ durationMs: Date.now() - updateStartedAt,
2777
+ failed: true,
2778
+ error: String(err).slice(0, 300),
2779
+ });
2397
2780
  process.exit(1);
2398
2781
  }
2399
2782
  if (options.restart) {
@@ -2932,6 +3315,26 @@ async function cmdUpdate(options) {
2932
3315
  }).trim().slice(0, 10);
2933
3316
  }
2934
3317
  catch { /* best effort */ }
3318
+ // Capture the new version once the build is verified — package.json on
3319
+ // disk is now authoritative for the version we're about to run.
3320
+ const newVersion = readPkgVersion(PACKAGE_ROOT);
3321
+ // Persist update history before the restart (in case daemon restart fails,
3322
+ // we still have the record of what was attempted).
3323
+ appendUpdateHistory({
3324
+ flavor: 'git',
3325
+ fromVersion: previousVersion,
3326
+ toVersion: newVersion,
3327
+ commitHash,
3328
+ commitDate,
3329
+ commitsPulled,
3330
+ summary: pullSummary.split('\n').slice(0, 5).join('; '),
3331
+ modsReapplied: reconcileResult?.reapplied.length ?? 0,
3332
+ modsSuperseded: reconcileResult?.superseded.length ?? 0,
3333
+ modsNeedReconciliation: reconcileResult?.needsReconciliation.length ?? 0,
3334
+ modsFailed: reconcileResult?.failed.length ?? 0,
3335
+ durationMs: Date.now() - updateStartedAt,
3336
+ restartRequested: !!(options.restart || wasRunning),
3337
+ });
2935
3338
  if (options.restart || wasRunning) {
2936
3339
  const sentinelPath = path.join(BASE_DIR, '.restart-sentinel.json');
2937
3340
  const sentinel = {
@@ -2939,6 +3342,8 @@ async function cmdUpdate(options) {
2939
3342
  restartedAt: new Date().toISOString(),
2940
3343
  reason: 'update',
2941
3344
  updateDetails: {
3345
+ previousVersion,
3346
+ newVersion,
2942
3347
  commitHash,
2943
3348
  commitDate,
2944
3349
  commitsBehind: commitsPulled,
@@ -3029,7 +3434,13 @@ async function cmdUpdate(options) {
3029
3434
  }
3030
3435
  // 14. Show current version
3031
3436
  console.log();
3032
- if (commitHash) {
3437
+ if (previousVersion !== 'unknown' && newVersion !== 'unknown' && previousVersion !== newVersion) {
3438
+ console.log(` ${GREEN}Updated v${previousVersion} → ${BOLD}v${newVersion}${RESET}${commitHash ? ` ${DIM}(${commitHash})${RESET}` : ''}`);
3439
+ }
3440
+ else if (previousVersion === newVersion && previousVersion !== 'unknown') {
3441
+ console.log(` ${GREEN}Already on latest (v${newVersion})${RESET}${commitHash ? ` ${DIM}(${commitHash})${RESET}` : ''}`);
3442
+ }
3443
+ else if (commitHash) {
3033
3444
  console.log(` ${GREEN}Updated to ${commitHash} (${commitDate})${RESET}`);
3034
3445
  }
3035
3446
  else {
@@ -0,0 +1,35 @@
1
+ /**
2
+ * Background "is there a newer version on npm?" check.
3
+ *
4
+ * Polls the public npm registry once per `CACHE_TTL_MS` (default 24h) and
5
+ * caches the result on disk so subsequent calls are instant. Surfaced in
6
+ * `clementine status` and the dashboard header so the user discovers
7
+ * updates without remembering to run `clementine update`.
8
+ *
9
+ * Pure read-only — never installs anything. Network failures are silent
10
+ * (offline → no nudge, not an error).
11
+ */
12
+ export interface VersionCheckResult {
13
+ localVersion: string;
14
+ latestVersion: string | null;
15
+ /** True when latestVersion is strictly greater than localVersion. */
16
+ updateAvailable: boolean;
17
+ /** ISO of last successful registry check. null = never checked or fetch failed. */
18
+ checkedAt: string | null;
19
+ /** True when the cache was used (no network call this invocation). */
20
+ fromCache: boolean;
21
+ }
22
+ /**
23
+ * Check whether a newer version is available. Uses the cache when fresh.
24
+ * Pass `force = true` to bypass the cache (e.g. from a "check now" CLI flag).
25
+ */
26
+ export declare function checkForUpdate(baseDir: string, localVersion: string, opts?: {
27
+ force?: boolean;
28
+ }): Promise<VersionCheckResult>;
29
+ /**
30
+ * Synchronous read of the cached result — used in fast paths like
31
+ * `clementine status` so we never block on a network call. Returns null
32
+ * when there's no cache yet.
33
+ */
34
+ export declare function readCachedUpdateCheck(baseDir: string, localVersion: string): VersionCheckResult | null;
35
+ //# sourceMappingURL=version-check.d.ts.map
@@ -0,0 +1,147 @@
1
+ /**
2
+ * Background "is there a newer version on npm?" check.
3
+ *
4
+ * Polls the public npm registry once per `CACHE_TTL_MS` (default 24h) and
5
+ * caches the result on disk so subsequent calls are instant. Surfaced in
6
+ * `clementine status` and the dashboard header so the user discovers
7
+ * updates without remembering to run `clementine update`.
8
+ *
9
+ * Pure read-only — never installs anything. Network failures are silent
10
+ * (offline → no nudge, not an error).
11
+ */
12
+ import { existsSync, readFileSync, writeFileSync } from 'node:fs';
13
+ import path from 'node:path';
14
+ import https from 'node:https';
15
+ const PACKAGE_NAME = 'clementine-agent';
16
+ const CACHE_TTL_MS = 24 * 60 * 60 * 1000; // 24 hours
17
+ function cachePath(baseDir) {
18
+ return path.join(baseDir, '.update-check.json');
19
+ }
20
+ function readCache(baseDir) {
21
+ try {
22
+ const p = cachePath(baseDir);
23
+ if (!existsSync(p))
24
+ return null;
25
+ return JSON.parse(readFileSync(p, 'utf-8'));
26
+ }
27
+ catch {
28
+ return null;
29
+ }
30
+ }
31
+ function writeCache(baseDir, entry) {
32
+ try {
33
+ writeFileSync(cachePath(baseDir), JSON.stringify(entry, null, 2), { mode: 0o600 });
34
+ }
35
+ catch {
36
+ // Non-fatal — cache is an optimization, not state.
37
+ }
38
+ }
39
+ /**
40
+ * Fetch the latest published version of the package from npm. Resolves null
41
+ * on any network/parse error so callers can degrade silently.
42
+ */
43
+ function fetchLatestFromNpm(timeoutMs = 5000) {
44
+ return new Promise(resolve => {
45
+ const req = https.get(`https://registry.npmjs.org/${PACKAGE_NAME}/latest`, { timeout: timeoutMs, headers: { Accept: 'application/json' } }, res => {
46
+ if (res.statusCode !== 200) {
47
+ res.resume();
48
+ resolve(null);
49
+ return;
50
+ }
51
+ let body = '';
52
+ res.setEncoding('utf-8');
53
+ res.on('data', chunk => { body += chunk; });
54
+ res.on('end', () => {
55
+ try {
56
+ const parsed = JSON.parse(body);
57
+ resolve(parsed.version ?? null);
58
+ }
59
+ catch {
60
+ resolve(null);
61
+ }
62
+ });
63
+ });
64
+ req.on('error', () => resolve(null));
65
+ req.on('timeout', () => { req.destroy(); resolve(null); });
66
+ });
67
+ }
68
+ /**
69
+ * Compare two semver strings lexicographically by parts. Returns positive
70
+ * when `a` > `b`, negative when `a` < `b`, zero when equal. Tolerates
71
+ * pre-release suffixes by ignoring them (we only care about released bumps).
72
+ */
73
+ function compareSemver(a, b) {
74
+ if (a === b)
75
+ return 0;
76
+ const partsA = a.replace(/[-+].*$/, '').split('.').map(n => parseInt(n, 10) || 0);
77
+ const partsB = b.replace(/[-+].*$/, '').split('.').map(n => parseInt(n, 10) || 0);
78
+ for (let i = 0; i < Math.max(partsA.length, partsB.length); i++) {
79
+ const av = partsA[i] ?? 0;
80
+ const bv = partsB[i] ?? 0;
81
+ if (av !== bv)
82
+ return av - bv;
83
+ }
84
+ return 0;
85
+ }
86
+ /**
87
+ * Check whether a newer version is available. Uses the cache when fresh.
88
+ * Pass `force = true` to bypass the cache (e.g. from a "check now" CLI flag).
89
+ */
90
+ export async function checkForUpdate(baseDir, localVersion, opts = {}) {
91
+ const cache = readCache(baseDir);
92
+ const now = Date.now();
93
+ const cacheFresh = !!cache && (now - new Date(cache.checkedAt).getTime() < CACHE_TTL_MS);
94
+ if (cache && cacheFresh && !opts.force) {
95
+ return {
96
+ localVersion,
97
+ latestVersion: cache.latestVersion,
98
+ updateAvailable: compareSemver(cache.latestVersion, localVersion) > 0,
99
+ checkedAt: cache.checkedAt,
100
+ fromCache: true,
101
+ };
102
+ }
103
+ const latest = await fetchLatestFromNpm();
104
+ if (!latest) {
105
+ // Couldn't reach the registry — fall back to stale cache if we have one.
106
+ if (cache) {
107
+ return {
108
+ localVersion,
109
+ latestVersion: cache.latestVersion,
110
+ updateAvailable: compareSemver(cache.latestVersion, localVersion) > 0,
111
+ checkedAt: cache.checkedAt,
112
+ fromCache: true,
113
+ };
114
+ }
115
+ return { localVersion, latestVersion: null, updateAvailable: false, checkedAt: null, fromCache: false };
116
+ }
117
+ writeCache(baseDir, {
118
+ checkedAt: new Date().toISOString(),
119
+ latestVersion: latest,
120
+ observedLocalVersion: localVersion,
121
+ });
122
+ return {
123
+ localVersion,
124
+ latestVersion: latest,
125
+ updateAvailable: compareSemver(latest, localVersion) > 0,
126
+ checkedAt: new Date().toISOString(),
127
+ fromCache: false,
128
+ };
129
+ }
130
+ /**
131
+ * Synchronous read of the cached result — used in fast paths like
132
+ * `clementine status` so we never block on a network call. Returns null
133
+ * when there's no cache yet.
134
+ */
135
+ export function readCachedUpdateCheck(baseDir, localVersion) {
136
+ const cache = readCache(baseDir);
137
+ if (!cache)
138
+ return null;
139
+ return {
140
+ localVersion,
141
+ latestVersion: cache.latestVersion,
142
+ updateAvailable: compareSemver(cache.latestVersion, localVersion) > 0,
143
+ checkedAt: cache.checkedAt,
144
+ fromCache: true,
145
+ };
146
+ }
147
+ //# sourceMappingURL=version-check.js.map
package/dist/index.js CHANGED
@@ -888,8 +888,14 @@ async function asyncMain() {
888
888
  else if (sentinel.reason === 'update' && sentinel.updateDetails) {
889
889
  const d = sentinel.updateDetails;
890
890
  const parts = [];
891
- // Version info
892
- if (d.commitHash) {
891
+ // Version info — prefer semver transition over commit hash for human readability.
892
+ if (d.previousVersion && d.newVersion && d.previousVersion !== d.newVersion) {
893
+ parts.push(`Updated v${d.previousVersion} → v${d.newVersion}`);
894
+ }
895
+ else if (d.newVersion) {
896
+ parts.push(`Now on v${d.newVersion}`);
897
+ }
898
+ else if (d.commitHash) {
893
899
  parts.push(`Updated to ${d.commitHash}${d.commitDate ? ` (${d.commitDate})` : ''}`);
894
900
  }
895
901
  else {
@@ -50,6 +50,94 @@ export declare class MemoryStore {
50
50
  private get conn();
51
51
  /** Return the total number of indexed chunks. */
52
52
  getChunkCount(): number;
53
+ /** Toggle the manual pin flag on a chunk. Pinned chunks get a 2x score boost in recall. */
54
+ setPinned(chunkId: number, pinned: boolean): boolean;
55
+ /**
56
+ * Aggregate stats for the memory store — used by `clementine memory status`.
57
+ * Single-pass scans so it stays fast even on large chunk tables.
58
+ */
59
+ getMemoryStats(): {
60
+ totalChunks: number;
61
+ chunksWithEmbeddings: number;
62
+ pinnedChunks: number;
63
+ perAgent: Array<{
64
+ agentSlug: string;
65
+ count: number;
66
+ }>;
67
+ perCategory: Array<{
68
+ category: string;
69
+ count: number;
70
+ }>;
71
+ avgSalience: number;
72
+ oldestUpdated: string | null;
73
+ newestUpdated: string | null;
74
+ };
75
+ /**
76
+ * Find clusters of near-duplicate chunks using embedding cosine similarity.
77
+ * Returns clusters where at least 2 chunks score above the threshold.
78
+ *
79
+ * Caller decides what to do — typical use is `clementine memory dedup` to
80
+ * preview / merge / mark-superseded. Per-pair O(n²) within agent scope to
81
+ * keep the search space tractable; cross-agent dupes are surfaced separately
82
+ * by the auto-promote flow.
83
+ */
84
+ findNearDuplicates(opts?: {
85
+ threshold?: number;
86
+ minLen?: number;
87
+ limit?: number;
88
+ }): Array<{
89
+ keep: {
90
+ chunkId: number;
91
+ sourceFile: string;
92
+ section: string;
93
+ content: string;
94
+ agentSlug: string | null;
95
+ updatedAt: string | null;
96
+ };
97
+ duplicates: Array<{
98
+ chunkId: number;
99
+ sourceFile: string;
100
+ section: string;
101
+ content: string;
102
+ agentSlug: string | null;
103
+ updatedAt: string | null;
104
+ similarity: number;
105
+ }>;
106
+ }>;
107
+ /** Delete chunks by id. Used by dedup --apply. */
108
+ deleteChunks(chunkIds: number[]): number;
109
+ /**
110
+ * Find chunks whose semantic content recurs across 3+ different agents —
111
+ * candidates for promotion to global memory. Detection-only; surfacing.
112
+ * The user (or a future cron) decides whether to actually promote.
113
+ *
114
+ * Approach: scan agent-scoped chunks with embeddings, cluster cross-agent
115
+ * pairs above the similarity threshold, return clusters touching >= minAgents
116
+ * distinct agents. Limits keep the O(n²) scan tractable on large stores.
117
+ */
118
+ findCrossAgentRecurrence(opts?: {
119
+ threshold?: number;
120
+ minAgents?: number;
121
+ minLen?: number;
122
+ limit?: number;
123
+ }): Array<{
124
+ representative: {
125
+ chunkId: number;
126
+ sourceFile: string;
127
+ section: string;
128
+ content: string;
129
+ agentSlug: string;
130
+ };
131
+ members: Array<{
132
+ chunkId: number;
133
+ sourceFile: string;
134
+ section: string;
135
+ agentSlug: string;
136
+ similarity: number;
137
+ updatedAt: string | null;
138
+ }>;
139
+ agents: string[];
140
+ }>;
53
141
  /**
54
142
  * Scan the entire vault, hash-compare, and re-index changed files.
55
143
  */
@@ -173,6 +173,15 @@ export class MemoryStore {
173
173
  catch {
174
174
  // Column already exists
175
175
  }
176
+ // Add pinned flag — manual salience reinforcement. When true, recall
177
+ // applies an extra score boost on top of the access-pattern salience.
178
+ // Toggled by `clementine memory pin/unpin <chunkId>` (or the dashboard).
179
+ try {
180
+ this.conn.exec('ALTER TABLE chunks ADD COLUMN pinned INTEGER DEFAULT 0');
181
+ }
182
+ catch {
183
+ // Column already exists
184
+ }
176
185
  // Indexes for category/topic filtering
177
186
  try {
178
187
  this.conn.exec('CREATE INDEX idx_chunks_category ON chunks(category)');
@@ -737,6 +746,213 @@ export class MemoryStore {
737
746
  return 0;
738
747
  }
739
748
  }
749
+ /** Toggle the manual pin flag on a chunk. Pinned chunks get a 2x score boost in recall. */
750
+ setPinned(chunkId, pinned) {
751
+ try {
752
+ const result = this.conn.prepare('UPDATE chunks SET pinned = ? WHERE id = ?')
753
+ .run(pinned ? 1 : 0, chunkId);
754
+ return result.changes > 0;
755
+ }
756
+ catch {
757
+ return false;
758
+ }
759
+ }
760
+ /**
761
+ * Aggregate stats for the memory store — used by `clementine memory status`.
762
+ * Single-pass scans so it stays fast even on large chunk tables.
763
+ */
764
+ getMemoryStats() {
765
+ const totalChunks = this.getChunkCount();
766
+ const chunksWithEmbeddings = this.conn
767
+ .prepare('SELECT COUNT(*) as cnt FROM chunks WHERE embedding IS NOT NULL')
768
+ .get()?.cnt ?? 0;
769
+ const pinnedChunks = this.conn
770
+ .prepare('SELECT COUNT(*) as cnt FROM chunks WHERE pinned = 1')
771
+ .get()?.cnt ?? 0;
772
+ const perAgent = this.conn
773
+ .prepare(`SELECT COALESCE(agent_slug, 'global') as agentSlug, COUNT(*) as count
774
+ FROM chunks GROUP BY agent_slug ORDER BY count DESC`)
775
+ .all();
776
+ const perCategory = this.conn
777
+ .prepare(`SELECT COALESCE(category, '(none)') as category, COUNT(*) as count
778
+ FROM chunks GROUP BY category ORDER BY count DESC`)
779
+ .all();
780
+ const avgRow = this.conn
781
+ .prepare('SELECT AVG(salience) as avg FROM chunks WHERE salience > 0')
782
+ .get();
783
+ const dateRow = this.conn
784
+ .prepare('SELECT MIN(updated_at) as oldest, MAX(updated_at) as newest FROM chunks WHERE updated_at IS NOT NULL')
785
+ .get();
786
+ return {
787
+ totalChunks,
788
+ chunksWithEmbeddings,
789
+ pinnedChunks,
790
+ perAgent,
791
+ perCategory,
792
+ avgSalience: avgRow?.avg ?? 0,
793
+ oldestUpdated: dateRow?.oldest ?? null,
794
+ newestUpdated: dateRow?.newest ?? null,
795
+ };
796
+ }
797
+ /**
798
+ * Find clusters of near-duplicate chunks using embedding cosine similarity.
799
+ * Returns clusters where at least 2 chunks score above the threshold.
800
+ *
801
+ * Caller decides what to do — typical use is `clementine memory dedup` to
802
+ * preview / merge / mark-superseded. Per-pair O(n²) within agent scope to
803
+ * keep the search space tractable; cross-agent dupes are surfaced separately
804
+ * by the auto-promote flow.
805
+ */
806
+ findNearDuplicates(opts = {}) {
807
+ const threshold = opts.threshold ?? 0.95;
808
+ const minLen = opts.minLen ?? 80; // skip very short chunks — too easily collide
809
+ const limitClusters = opts.limit ?? 50; // cap results so the CLI stays readable
810
+ if (!embeddingsModule.isReady())
811
+ return [];
812
+ const rows = this.conn.prepare(`SELECT id, source_file, section, content, embedding, agent_slug, updated_at
813
+ FROM chunks
814
+ WHERE embedding IS NOT NULL AND length(content) >= ?
815
+ ORDER BY agent_slug, updated_at DESC`).all(minLen);
816
+ // Group by agent first — only compare within the same scope to bound the
817
+ // O(n²) blow-up. Cross-agent dedup is the auto-promote flow's job.
818
+ const buckets = new Map();
819
+ for (const r of rows) {
820
+ const key = r.agent_slug ?? '__global__';
821
+ if (!buckets.has(key))
822
+ buckets.set(key, []);
823
+ buckets.get(key).push(r);
824
+ }
825
+ const clusters = [];
826
+ const consumed = new Set();
827
+ for (const bucket of buckets.values()) {
828
+ // Decode embeddings once per row.
829
+ const decoded = bucket.map(r => ({
830
+ ...r,
831
+ vec: embeddingsModule.deserializeEmbedding(r.embedding),
832
+ }));
833
+ for (let i = 0; i < decoded.length; i++) {
834
+ if (consumed.has(decoded[i].id))
835
+ continue;
836
+ const head = decoded[i];
837
+ const dupes = [];
838
+ for (let j = i + 1; j < decoded.length; j++) {
839
+ if (consumed.has(decoded[j].id))
840
+ continue;
841
+ const sim = embeddingsModule.cosineSimilarity(head.vec, decoded[j].vec);
842
+ if (sim >= threshold) {
843
+ dupes.push({
844
+ chunkId: decoded[j].id,
845
+ sourceFile: decoded[j].source_file,
846
+ section: decoded[j].section,
847
+ content: decoded[j].content,
848
+ agentSlug: decoded[j].agent_slug,
849
+ updatedAt: decoded[j].updated_at,
850
+ similarity: sim,
851
+ });
852
+ consumed.add(decoded[j].id);
853
+ }
854
+ }
855
+ if (dupes.length > 0) {
856
+ consumed.add(head.id);
857
+ clusters.push({
858
+ keep: {
859
+ chunkId: head.id,
860
+ sourceFile: head.source_file,
861
+ section: head.section,
862
+ content: head.content,
863
+ agentSlug: head.agent_slug,
864
+ updatedAt: head.updated_at,
865
+ },
866
+ duplicates: dupes,
867
+ });
868
+ if (clusters.length >= limitClusters)
869
+ return clusters;
870
+ }
871
+ }
872
+ }
873
+ return clusters;
874
+ }
875
+ /** Delete chunks by id. Used by dedup --apply. */
876
+ deleteChunks(chunkIds) {
877
+ if (!chunkIds.length)
878
+ return 0;
879
+ const placeholders = chunkIds.map(() => '?').join(',');
880
+ const result = this.conn.prepare(`DELETE FROM chunks WHERE id IN (${placeholders})`).run(...chunkIds);
881
+ return result.changes;
882
+ }
883
+ /**
884
+ * Find chunks whose semantic content recurs across 3+ different agents —
885
+ * candidates for promotion to global memory. Detection-only; surfacing.
886
+ * The user (or a future cron) decides whether to actually promote.
887
+ *
888
+ * Approach: scan agent-scoped chunks with embeddings, cluster cross-agent
889
+ * pairs above the similarity threshold, return clusters touching >= minAgents
890
+ * distinct agents. Limits keep the O(n²) scan tractable on large stores.
891
+ */
892
+ findCrossAgentRecurrence(opts = {}) {
893
+ const threshold = opts.threshold ?? 0.88; // looser than dedup — paraphrases count
894
+ const minAgents = opts.minAgents ?? 3;
895
+ const minLen = opts.minLen ?? 100;
896
+ const limitClusters = opts.limit ?? 30;
897
+ if (!embeddingsModule.isReady())
898
+ return [];
899
+ // Only consider chunks that ARE agent-scoped (NULL = already global).
900
+ const rows = this.conn.prepare(`SELECT id, source_file, section, content, embedding, agent_slug, updated_at
901
+ FROM chunks
902
+ WHERE embedding IS NOT NULL
903
+ AND agent_slug IS NOT NULL
904
+ AND length(content) >= ?
905
+ ORDER BY updated_at DESC`).all(minLen);
906
+ if (rows.length < minAgents)
907
+ return [];
908
+ const decoded = rows.map(r => ({ ...r, vec: embeddingsModule.deserializeEmbedding(r.embedding) }));
909
+ const clusters = [];
910
+ const consumed = new Set();
911
+ for (let i = 0; i < decoded.length; i++) {
912
+ if (consumed.has(decoded[i].id))
913
+ continue;
914
+ const head = decoded[i];
915
+ const members = [
916
+ { chunkId: head.id, sourceFile: head.source_file, section: head.section, agentSlug: head.agent_slug, similarity: 1.0, updatedAt: head.updated_at },
917
+ ];
918
+ const agentsTouched = new Set([head.agent_slug]);
919
+ for (let j = i + 1; j < decoded.length; j++) {
920
+ if (consumed.has(decoded[j].id))
921
+ continue;
922
+ const sim = embeddingsModule.cosineSimilarity(head.vec, decoded[j].vec);
923
+ if (sim >= threshold) {
924
+ members.push({
925
+ chunkId: decoded[j].id,
926
+ sourceFile: decoded[j].source_file,
927
+ section: decoded[j].section,
928
+ agentSlug: decoded[j].agent_slug,
929
+ similarity: sim,
930
+ updatedAt: decoded[j].updated_at,
931
+ });
932
+ agentsTouched.add(decoded[j].agent_slug);
933
+ }
934
+ }
935
+ if (agentsTouched.size >= minAgents) {
936
+ // Mark all in this cluster consumed so we don't re-cluster around them.
937
+ for (const m of members)
938
+ consumed.add(m.chunkId);
939
+ clusters.push({
940
+ representative: {
941
+ chunkId: head.id,
942
+ sourceFile: head.source_file,
943
+ section: head.section,
944
+ content: head.content,
945
+ agentSlug: head.agent_slug,
946
+ },
947
+ members,
948
+ agents: Array.from(agentsTouched).sort(),
949
+ });
950
+ if (clusters.length >= limitClusters)
951
+ break;
952
+ }
953
+ }
954
+ return clusters;
955
+ }
740
956
  // ── Full Sync ──────────────────────────────────────────────────────
741
957
  /**
742
958
  * Scan the entire vault, hash-compare, and re-index changed files.
@@ -878,7 +1094,7 @@ export class MemoryStore {
878
1094
  try {
879
1095
  let sql = `SELECT c.id, c.source_file, c.section, c.content, c.chunk_type,
880
1096
  c.updated_at, c.salience, c.last_outcome_score, c.agent_slug, c.category, c.topic,
881
- bm25(chunks_fts) as score
1097
+ c.pinned, bm25(chunks_fts) as score
882
1098
  FROM chunks_fts f
883
1099
  JOIN chunks c ON c.id = f.rowid
884
1100
  WHERE chunks_fts MATCH ?`;
@@ -912,6 +1128,7 @@ export class MemoryStore {
912
1128
  agentSlug: row.agent_slug ?? null,
913
1129
  category: row.category,
914
1130
  topic: row.topic,
1131
+ pinned: row.pinned === 1,
915
1132
  }));
916
1133
  }
917
1134
  catch {
@@ -1030,6 +1247,12 @@ export class MemoryStore {
1030
1247
  if (r.salience > 0) {
1031
1248
  r.score *= 1.0 + r.salience;
1032
1249
  }
1250
+ // Manual pin: stronger boost than access-pattern salience. Toggled via
1251
+ // `clementine memory pin <chunkId>`. Doubles the relevance score so
1252
+ // pinned chunks consistently rank near the top within their relevance band.
1253
+ if (r.pinned) {
1254
+ r.score *= 2.0;
1255
+ }
1033
1256
  // Outcome-driven adjustment: chunks that recently got cited in
1034
1257
  // responses get a small boost; chunks that were pulled in and
1035
1258
  // ignored get a small penalty. Bounded to ±30% so outcome noise
@@ -108,6 +108,51 @@ export function registerMemoryTools(server) {
108
108
  }
109
109
  }
110
110
  });
111
+ // ── 0b. team_scratchpad ────────────────────────────────────────────────
112
+ //
113
+ // Cross-agent shared scratchpad. Unlike working_memory (per-agent), this
114
+ // is a single shared markdown file every agent can read and append to.
115
+ // Use cases: live coordination ("Sasha is drafting the brief, Ross hold
116
+ // outbound for 30m"), cross-agent context drops, async hand-offs that
117
+ // don't warrant a full goal_create or task_add. Append tags every entry
118
+ // with the author's agent slug + ISO timestamp so the trail stays clear.
119
+ const TEAM_SCRATCHPAD_FILE = path.join(BASE_DIR, 'team-scratchpad.md');
120
+ server.tool('team_scratchpad', getToolDescription('team_scratchpad') ?? 'Cross-agent shared scratchpad for live team coordination. All agents read/write the same file. Use for hand-offs, "I am working on X", short-term context drops. For durable facts, use memory_write/MEMORY.md instead.', {
121
+ action: z.enum(['read', 'append', 'replace', 'clear']).describe('What to do with the team scratchpad'),
122
+ content: z.string().optional().describe('Text to append or replace with (required for append/replace)'),
123
+ }, async ({ action, content }) => {
124
+ const author = ACTIVE_AGENT_SLUG ?? 'clementine';
125
+ switch (action) {
126
+ case 'read': {
127
+ if (!existsSync(TEAM_SCRATCHPAD_FILE)) {
128
+ return textResult('Team scratchpad is empty.');
129
+ }
130
+ return textResult(readFileSync(TEAM_SCRATCHPAD_FILE, 'utf-8'));
131
+ }
132
+ case 'append': {
133
+ if (!content)
134
+ return textResult('Error: content is required for append.');
135
+ const stamp = new Date().toISOString();
136
+ const entry = `\n- **[${author}@${stamp}]** ${content}\n`;
137
+ const existing = existsSync(TEAM_SCRATCHPAD_FILE) ? readFileSync(TEAM_SCRATCHPAD_FILE, 'utf-8') : '# Team Scratchpad\n\nShared across all agents. Append tags entries with author + timestamp.\n';
138
+ writeFileSync(TEAM_SCRATCHPAD_FILE, existing + entry);
139
+ return textResult(`Appended to team scratchpad as ${author}.`);
140
+ }
141
+ case 'replace': {
142
+ if (!content)
143
+ return textResult('Error: content is required for replace.');
144
+ const stamp = new Date().toISOString();
145
+ const header = `# Team Scratchpad\n\n_Replaced by ${author} at ${stamp}._\n\n`;
146
+ writeFileSync(TEAM_SCRATCHPAD_FILE, header + content + '\n');
147
+ return textResult(`Team scratchpad replaced by ${author}.`);
148
+ }
149
+ case 'clear': {
150
+ if (existsSync(TEAM_SCRATCHPAD_FILE))
151
+ unlinkSync(TEAM_SCRATCHPAD_FILE);
152
+ return textResult('Team scratchpad cleared.');
153
+ }
154
+ }
155
+ });
111
156
  // ── 1. memory_read ─────────────────────────────────────────────────────
112
157
  server.tool('memory_read', getToolDescription('memory_read') ?? "Read a note from the Obsidian vault. Shortcuts: 'today', 'yesterday', 'memory', 'tasks', 'heartbeat', 'cron', 'soul'. Or pass a relative path or note name.", {
113
158
  name: z.string().describe('Note name, path, or shortcut'),
@@ -11,10 +11,15 @@
11
11
  const TOOL_META = {
12
12
  // ── Memory & Vault ────────────────────────────────────────────────
13
13
  working_memory: {
14
- description: 'Persistent scratchpad that survives across conversations. Use to jot down current project context, TODOs, reminders, or anything you need to remember for next time. Actions: read, append, replace, clear. ALWAYS read before replacing to avoid overwriting useful notes.',
14
+ description: 'Per-agent persistent scratchpad only YOU see it. Survives across conversations. Use for current project context, TODOs, reminders, or anything you need to remember for next time. Actions: read, append, replace, clear. ALWAYS read before replacing. For cross-agent coordination, use team_scratchpad instead.',
15
15
  exampleUsage: 'Before starting complex work, read working_memory to check for context from prior sessions.',
16
16
  returnHint: 'Full working memory contents (markdown text).',
17
17
  },
18
+ team_scratchpad: {
19
+ description: 'Cross-agent shared scratchpad — every agent on the team reads and writes the same file. Use for live coordination, hand-offs, "I am working on X — back off until Y", short-lived context drops. For durable facts that should outlive coordination noise, use memory_write to MEMORY.md instead. Append tags entries with author slug + timestamp.',
20
+ exampleUsage: 'Before starting outbound work, read team_scratchpad to see if another agent has already claimed a prospect or paused outreach.',
21
+ returnHint: 'Full scratchpad contents with per-entry author + ISO timestamp.',
22
+ },
18
23
  memory_search: {
19
24
  description: 'Full-text search across all vault notes. Best for finding specific keywords or phrases. For broader semantic matching, use memory_recall instead. Results include file path, section heading, and relevance score.',
20
25
  exampleUsage: 'Use when the user asks "what did we discuss about X" or you need to find a specific note.',
package/dist/types.d.ts CHANGED
@@ -15,6 +15,7 @@ export interface SearchResult {
15
15
  agentSlug?: string | null;
16
16
  category?: string | null;
17
17
  topic?: string | null;
18
+ pinned?: boolean;
18
19
  }
19
20
  export type ChunkCategory = 'facts' | 'events' | 'discoveries' | 'preferences' | 'advice';
20
21
  export interface Chunk {
@@ -572,6 +573,10 @@ export interface RestartSentinel {
572
573
  sessionKey?: string;
573
574
  changedFiles?: string[];
574
575
  updateDetails?: {
576
+ /** Semver before the update — read from package.json prior to git pull. */
577
+ previousVersion?: string;
578
+ /** Semver after the update — read from package.json after build. */
579
+ newVersion?: string;
575
580
  commitHash?: string;
576
581
  commitDate?: string;
577
582
  commitsBehind?: number;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "clementine-agent",
3
- "version": "1.1.20",
3
+ "version": "1.1.22",
4
4
  "description": "Clementine — Personal AI Assistant (TypeScript)",
5
5
  "type": "module",
6
6
  "main": "dist/index.js",