compound-agent 1.7.0 → 1.7.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.ts CHANGED
@@ -2453,11 +2453,11 @@ type Preference = z.infer<typeof PreferenceItemSchema>;
2453
2453
  type MemoryItemRecord = z.infer<typeof MemoryItemRecordSchema>;
2454
2454
  /**
2455
2455
  * Generate deterministic memory item ID from insight text.
2456
- * Format: {prefix} + 8 hex characters from SHA-256 hash.
2456
+ * Format: {prefix} + 16 hex characters from SHA-256 hash (64 bits of entropy).
2457
2457
  *
2458
2458
  * @param insight - The insight text to hash
2459
2459
  * @param type - Memory item type (default: 'lesson' for backward compat)
2460
- * @returns ID string like L1a2b3c4d, S1a2b3c4d, P1a2b3c4d, or R1a2b3c4d
2460
+ * @returns ID string like L1a2b3c4d5e6f7g8h
2461
2461
  */
2462
2462
  declare function generateId(insight: string, type?: MemoryItemType): string;
2463
2463
 
@@ -2509,6 +2509,8 @@ interface ReadLessonsResult {
2509
2509
  interface ReadMemoryItemsResult {
2510
2510
  /** Successfully parsed memory items */
2511
2511
  items: MemoryItem[];
2512
+ /** IDs that were tombstoned (deleted) */
2513
+ deletedIds: Set<string>;
2512
2514
  /** Number of lines skipped due to errors */
2513
2515
  skippedCount: number;
2514
2516
  }
@@ -3409,7 +3411,7 @@ type EmbedStatus = {
3409
3411
  };
3410
3412
  /** Write embedding status to disk. Creates parent directories if needed. */
3411
3413
  declare function writeEmbedStatus(repoRoot: string, status: EmbedStatus): void;
3412
- /** Read embedding status from disk. Returns null on missing file or parse error. */
3414
+ /** Read embedding status from disk. Returns null on missing file, parse error, or invalid shape. */
3413
3415
  declare function readEmbedStatus(repoRoot: string): EmbedStatus | null;
3414
3416
 
3415
3417
  /**
package/dist/index.js CHANGED
@@ -22,7 +22,7 @@ var __export = (target, all) => {
22
22
  function generateId(insight, type) {
23
23
  const prefix = TYPE_PREFIXES[type ?? "lesson"];
24
24
  const hash = createHash("sha256").update(insight).digest("hex");
25
- return `${prefix}${hash.slice(0, 8)}`;
25
+ return `${prefix}${hash.slice(0, 16)}`;
26
26
  }
27
27
  var SourceSchema, ContextSchema, PatternSchema, CitationSchema, SeveritySchema, CompactionLevelSchema, LessonTypeSchema, MemoryItemTypeSchema, baseFields, LessonItemSchema, SolutionItemSchema, PatternItemSchema, PreferenceItemSchema, MemoryItemSchema, LegacyLessonSchema, LessonSchema, LegacyTombstoneSchema, LessonRecordSchema, MemoryItemRecordSchema, TYPE_PREFIXES;
28
28
  var init_types = __esm({
@@ -185,9 +185,7 @@ function parseJsonLine(line, lineNumber, strict, onParseError) {
185
185
  return result.data;
186
186
  }
187
187
  function toMemoryItem(record) {
188
- if (record.deleted === true) {
189
- return null;
190
- }
188
+ if (record.deleted === true) return null;
191
189
  if (record.type === "quick" || record.type === "full") {
192
190
  return { ...record, type: "lesson" };
193
191
  }
@@ -201,11 +199,12 @@ async function readMemoryItems(repoRoot, options = {}) {
201
199
  content = await readFile(filePath, "utf-8");
202
200
  } catch (err) {
203
201
  if (err.code === "ENOENT") {
204
- return { items: [], skippedCount: 0 };
202
+ return { items: [], deletedIds: /* @__PURE__ */ new Set(), skippedCount: 0 };
205
203
  }
206
204
  throw err;
207
205
  }
208
206
  const items = /* @__PURE__ */ new Map();
207
+ const deletedIds = /* @__PURE__ */ new Set();
209
208
  let skippedCount = 0;
210
209
  const lines = content.split("\n");
211
210
  for (let i = 0; i < lines.length; i++) {
@@ -218,6 +217,7 @@ async function readMemoryItems(repoRoot, options = {}) {
218
217
  }
219
218
  if (record.deleted === true) {
220
219
  items.delete(record.id);
220
+ deletedIds.add(record.id);
221
221
  } else {
222
222
  const item = toMemoryItem(record);
223
223
  if (item) {
@@ -225,7 +225,7 @@ async function readMemoryItems(repoRoot, options = {}) {
225
225
  }
226
226
  }
227
227
  }
228
- return { items: Array.from(items.values()), skippedCount };
228
+ return { items: Array.from(items.values()), deletedIds, skippedCount };
229
229
  }
230
230
  async function readLessons(repoRoot, options = {}) {
231
231
  const result = await readMemoryItems(repoRoot, options);
@@ -271,12 +271,15 @@ var init_availability = __esm({
271
271
  // src/memory/storage/sqlite/schema.ts
272
272
  function createSchema(database) {
273
273
  database.exec(SCHEMA_SQL);
274
- database.pragma(`user_version = ${SCHEMA_VERSION}`);
274
+ const current = database.pragma("user_version", { simple: true });
275
+ if (current !== SCHEMA_VERSION) {
276
+ database.pragma(`user_version = ${SCHEMA_VERSION}`);
277
+ }
275
278
  }
276
279
  var SCHEMA_VERSION, SCHEMA_SQL;
277
280
  var init_schema = __esm({
278
281
  "src/memory/storage/sqlite/schema.ts"() {
279
- SCHEMA_VERSION = 4;
282
+ SCHEMA_VERSION = 5;
280
283
  SCHEMA_SQL = `
281
284
  CREATE TABLE IF NOT EXISTS lessons (
282
285
  id TEXT PRIMARY KEY,
@@ -325,7 +328,7 @@ var init_schema = __esm({
325
328
  VALUES ('delete', old.rowid, old.id, old.trigger, old.insight, old.tags, old.pattern_bad, old.pattern_good);
326
329
  END;
327
330
 
328
- CREATE TRIGGER IF NOT EXISTS lessons_au AFTER UPDATE ON lessons BEGIN
331
+ CREATE TRIGGER IF NOT EXISTS lessons_au AFTER UPDATE OF id, trigger, insight, tags, pattern_bad, pattern_good ON lessons BEGIN
329
332
  INSERT INTO lessons_fts(lessons_fts, rowid, id, trigger, insight, tags, pattern_bad, pattern_good)
330
333
  VALUES ('delete', old.rowid, old.id, old.trigger, old.insight, old.tags, old.pattern_bad, old.pattern_good);
331
334
  INSERT INTO lessons_fts(rowid, id, trigger, insight, tags, pattern_bad, pattern_good)
@@ -1009,8 +1012,16 @@ function clusterBySimilarity(items, embeddings, threshold = DEFAULT_THRESHOLD) {
1009
1012
  }
1010
1013
  group.push(items[i]);
1011
1014
  }
1012
- const clusters = Array.from(groups.values());
1013
- return { clusters, noise: [] };
1015
+ const clusters = [];
1016
+ const noise = [];
1017
+ for (const group of groups.values()) {
1018
+ if (group.length === 1) {
1019
+ noise.push(group[0]);
1020
+ } else {
1021
+ clusters.push(group);
1022
+ }
1023
+ }
1024
+ return { clusters, noise };
1014
1025
  }
1015
1026
  var DEFAULT_THRESHOLD;
1016
1027
  var init_clustering = __esm({
@@ -1056,7 +1067,12 @@ async function readCctPatterns(repoRoot) {
1056
1067
  for (const line of lines) {
1057
1068
  const trimmed = line.trim();
1058
1069
  if (!trimmed) continue;
1059
- const parsed = JSON.parse(trimmed);
1070
+ let parsed;
1071
+ try {
1072
+ parsed = JSON.parse(trimmed);
1073
+ } catch {
1074
+ continue;
1075
+ }
1060
1076
  const result = CctPatternSchema.safeParse(parsed);
1061
1077
  if (result.success) {
1062
1078
  patterns.push(result.data);
@@ -2076,15 +2092,20 @@ function writeEmbedStatus(repoRoot, status) {
2076
2092
  function readEmbedStatus(repoRoot) {
2077
2093
  try {
2078
2094
  const raw = readFileSync(statusPath(repoRoot), "utf-8");
2079
- return JSON.parse(raw);
2095
+ const parsed = JSON.parse(raw);
2096
+ if (!parsed || typeof parsed !== "object" || !VALID_STATES.has(parsed.state)) {
2097
+ return null;
2098
+ }
2099
+ return parsed;
2080
2100
  } catch {
2081
2101
  return null;
2082
2102
  }
2083
2103
  }
2084
- var STATUS_FILE;
2104
+ var STATUS_FILE, VALID_STATES;
2085
2105
  var init_embed_status = __esm({
2086
2106
  "src/memory/knowledge/embed-status.ts"() {
2087
2107
  STATUS_FILE = ".claude/.cache/embed-status.json";
2108
+ VALID_STATES = /* @__PURE__ */ new Set(["idle", "running", "completed", "failed"]);
2088
2109
  }
2089
2110
  });
2090
2111
  function resolveCliInvocation() {
@@ -2598,9 +2619,6 @@ init_storage();
2598
2619
 
2599
2620
  // src/commands/shared.ts
2600
2621
  init_utils();
2601
-
2602
- // src/commands/management-helpers.ts
2603
- init_storage();
2604
2622
  init_embeddings();
2605
2623
  init_storage();
2606
2624
  init_storage();
@@ -2609,9 +2627,64 @@ init_storage();
2609
2627
  init_storage();
2610
2628
  init_storage();
2611
2629
  init_storage();
2630
+ init_storage();
2631
+ var CACHE_TTL_MS = 24 * 60 * 60 * 1e3;
2632
+ var FETCH_TIMEOUT_MS = 3e3;
2633
+ var CACHE_FILENAME = "update-check.json";
2634
+ async function fetchLatestVersion(packageName = "compound-agent") {
2635
+ try {
2636
+ const res = await fetch(`https://registry.npmjs.org/${packageName}`, {
2637
+ signal: AbortSignal.timeout(FETCH_TIMEOUT_MS)
2638
+ });
2639
+ if (!res.ok) return null;
2640
+ const data = await res.json();
2641
+ return data["dist-tags"] ? data["dist-tags"].latest ?? null : null;
2642
+ } catch {
2643
+ return null;
2644
+ }
2645
+ }
2646
+ async function checkForUpdate(cacheDir) {
2647
+ try {
2648
+ const cachePath = join(cacheDir, CACHE_FILENAME);
2649
+ const cached = readCache(cachePath);
2650
+ if (cached) {
2651
+ return {
2652
+ current: VERSION,
2653
+ latest: cached.latest,
2654
+ updateAvailable: cached.latest !== VERSION
2655
+ };
2656
+ }
2657
+ const latest = await fetchLatestVersion();
2658
+ if (latest === null) return null;
2659
+ try {
2660
+ mkdirSync(cacheDir, { recursive: true });
2661
+ const cacheData = { latest };
2662
+ writeFileSync(cachePath, JSON.stringify(cacheData));
2663
+ } catch {
2664
+ }
2665
+ return {
2666
+ current: VERSION,
2667
+ latest,
2668
+ updateAvailable: latest !== VERSION
2669
+ };
2670
+ } catch {
2671
+ return null;
2672
+ }
2673
+ }
2674
+ function readCache(cachePath) {
2675
+ try {
2676
+ const stat = statSync(cachePath);
2677
+ if (Date.now() - stat.mtimeMs > CACHE_TTL_MS) return null;
2678
+ const raw = readFileSync(cachePath, "utf-8");
2679
+ const data = JSON.parse(raw);
2680
+ if (!data.latest) return null;
2681
+ return data;
2682
+ } catch {
2683
+ return null;
2684
+ }
2685
+ }
2612
2686
 
2613
2687
  // src/commands/management-prime.ts
2614
- init_storage();
2615
2688
  var TRUST_LANGUAGE_TEMPLATE = `# Compound Agent Active
2616
2689
 
2617
2690
  > **Context Recovery**: Run \`npx ca prime\` after compaction, clear, or new session
@@ -2723,6 +2796,17 @@ ${formattedLessons}
2723
2796
  if (cookitSection !== null) {
2724
2797
  output += cookitSection;
2725
2798
  }
2799
+ try {
2800
+ const updateResult = await checkForUpdate(join(root, ".claude", ".cache"));
2801
+ if (updateResult?.updateAvailable) {
2802
+ output += `
2803
+ ---
2804
+ # Update Available
2805
+ compound-agent v${updateResult.latest} is available (current: v${updateResult.current}). Run \`pnpm update compound-agent\` to update.
2806
+ `;
2807
+ }
2808
+ } catch {
2809
+ }
2726
2810
  return output;
2727
2811
  }
2728
2812
 
@@ -2763,7 +2847,8 @@ var FileSizeCheckSchema = z.object({
2763
2847
  var ScriptCheckSchema = z.object({
2764
2848
  type: z.literal("script"),
2765
2849
  command: z.string(),
2766
- expectExitCode: z.number().int().optional()
2850
+ expectExitCode: z.number().int().optional(),
2851
+ timeout: z.number().int().positive().optional()
2767
2852
  });
2768
2853
  var RuleCheckSchema = z.discriminatedUnion("type", [
2769
2854
  FilePatternCheckSchema,
@@ -2853,10 +2938,12 @@ function runFileSizeCheck(baseDir, check) {
2853
2938
  }
2854
2939
  return violations;
2855
2940
  }
2941
+ var DEFAULT_SCRIPT_TIMEOUT = 3e4;
2856
2942
  function runScriptCheck(check, baseDir) {
2857
2943
  const expectedCode = check.expectExitCode ?? 0;
2944
+ const timeout = check.timeout ?? DEFAULT_SCRIPT_TIMEOUT;
2858
2945
  try {
2859
- execSync(check.command, { stdio: ["pipe", "pipe", "pipe"], cwd: baseDir });
2946
+ execSync(check.command, { stdio: ["pipe", "pipe", "pipe"], cwd: baseDir, timeout });
2860
2947
  if (expectedCode !== 0) {
2861
2948
  return [{ message: `Script exited with exit code 0, expected ${expectedCode}` }];
2862
2949
  }