@memrosetta/cli 0.5.1 → 0.5.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. package/dist/chunk-47SU2YUJ.js +64 -0
  2. package/dist/chunk-C4ANKSCI.js +151 -0
  3. package/dist/chunk-CEHRM6IW.js +151 -0
  4. package/dist/chunk-G2W4YK2T.js +56 -0
  5. package/dist/chunk-GGXC7TAJ.js +139 -0
  6. package/dist/chunk-GRNZVSAF.js +56 -0
  7. package/dist/chunk-GZINXXM4.js +139 -0
  8. package/dist/chunk-RZFCVYTK.js +71 -0
  9. package/dist/chunk-US6CEDMU.js +66 -0
  10. package/dist/chunk-VMGX5FCY.js +64 -0
  11. package/dist/chunk-WYHEAKPC.js +71 -0
  12. package/dist/clear-32Y3U2WR.js +39 -0
  13. package/dist/clear-AFEJPCDA.js +39 -0
  14. package/dist/compress-CL5D4VVJ.js +33 -0
  15. package/dist/compress-UUEO7WCU.js +33 -0
  16. package/dist/count-U2ML5ZON.js +24 -0
  17. package/dist/count-VVOGYSM7.js +24 -0
  18. package/dist/duplicates-CEJ7WSGW.js +149 -0
  19. package/dist/duplicates-IBUS7CJS.js +149 -0
  20. package/dist/enforce-T7AS4PVD.js +381 -0
  21. package/dist/enforce-TC5SDPEZ.js +381 -0
  22. package/dist/feedback-3PJTTEOD.js +51 -0
  23. package/dist/feedback-IB7BHIRP.js +51 -0
  24. package/dist/get-TQ2U7HCD.js +30 -0
  25. package/dist/get-WPZIHQKW.js +30 -0
  26. package/dist/hooks/on-prompt.js +3 -3
  27. package/dist/hooks/on-stop.js +3 -3
  28. package/dist/index.js +30 -20
  29. package/dist/ingest-37UXPVT5.js +97 -0
  30. package/dist/ingest-TPQRH34A.js +97 -0
  31. package/dist/init-6YQL3RCQ.js +210 -0
  32. package/dist/init-LHXRCCLX.js +210 -0
  33. package/dist/invalidate-ER2TFFWK.js +40 -0
  34. package/dist/invalidate-PVHUGAJ6.js +40 -0
  35. package/dist/maintain-NICAXFK6.js +37 -0
  36. package/dist/maintain-Q553GBSF.js +37 -0
  37. package/dist/migrate-CZL3YNQK.js +255 -0
  38. package/dist/migrate-FI26FSBP.js +255 -0
  39. package/dist/relate-5TN2WEG3.js +57 -0
  40. package/dist/relate-KLBMYWB3.js +57 -0
  41. package/dist/reset-IPOAKTJM.js +132 -0
  42. package/dist/search-AYZBKRXF.js +48 -0
  43. package/dist/search-JQ3MLRKS.js +48 -0
  44. package/dist/status-JF2V7ZBX.js +184 -0
  45. package/dist/status-UV66PWUD.js +184 -0
  46. package/dist/store-AAJCT3PX.js +101 -0
  47. package/dist/store-OVDS57U5.js +101 -0
  48. package/dist/sync-56KJTKE7.js +542 -0
  49. package/dist/sync-BCKBYRXY.js +542 -0
  50. package/dist/working-memory-CJARSGEK.js +53 -0
  51. package/dist/working-memory-Z3RUGSTQ.js +53 -0
  52. package/package.json +4 -4
@@ -0,0 +1,149 @@
1
+ import {
2
+ hasFlag,
3
+ optionalOption
4
+ } from "./chunk-US6CEDMU.js";
5
+ import {
6
+ resolveDbPath
7
+ } from "./chunk-47SU2YUJ.js";
8
+ import {
9
+ output,
10
+ outputError
11
+ } from "./chunk-ET6TNQOJ.js";
12
+ import {
13
+ resolveCanonicalUserId
14
+ } from "./chunk-WYHEAKPC.js";
15
+
16
+ // src/commands/duplicates.ts
17
+ var PREVIEW_LIMIT = 200;
18
+ var MAX_GROUPS_IN_TEXT = 20;
19
+ function scanDuplicates(db, canonicalUserId, limit) {
20
+ const groups = db.prepare(
21
+ `SELECT
22
+ content,
23
+ memory_type AS memoryType,
24
+ COUNT(*) AS totalRows,
25
+ COUNT(DISTINCT user_id) AS distinctUsers,
26
+ GROUP_CONCAT(DISTINCT user_id) AS users
27
+ FROM memories
28
+ GROUP BY content, memory_type
29
+ HAVING totalRows > 1
30
+ ORDER BY totalRows DESC
31
+ LIMIT ?`
32
+ ).all(limit);
33
+ const memberStmt = db.prepare(
34
+ `SELECT
35
+ memory_id AS memoryId,
36
+ user_id AS userId,
37
+ namespace,
38
+ learned_at AS learnedAt,
39
+ use_count AS useCount,
40
+ success_count AS successCount
41
+ FROM memories
42
+ WHERE content = ? AND memory_type = ?
43
+ ORDER BY learned_at DESC`
44
+ );
45
+ return groups.map((g) => {
46
+ const members = memberStmt.all(g.content, g.memoryType);
47
+ const sorted = [...members].sort((a, b) => scoreMember(b, canonicalUserId) - scoreMember(a, canonicalUserId));
48
+ return {
49
+ content: g.content,
50
+ memoryType: g.memoryType,
51
+ totalRows: g.totalRows,
52
+ distinctUsers: g.distinctUsers,
53
+ users: (g.users ?? "").split(","),
54
+ members,
55
+ recommendedKeep: sorted[0]?.memoryId ?? null
56
+ };
57
+ });
58
+ }
59
+ function scoreMember(row, canonicalUserId) {
60
+ let score = 0;
61
+ if (row.userId === canonicalUserId) score += 1e6;
62
+ score += (row.successCount ?? 0) * 100;
63
+ score += (row.useCount ?? 0) * 10;
64
+ const ms = Date.parse(row.learnedAt);
65
+ if (!Number.isNaN(ms)) score += ms / 1e6;
66
+ return score;
67
+ }
68
+ async function run(options) {
69
+ const { args, format, db: dbOverride } = options;
70
+ const sub = args[0];
71
+ if (sub !== "report") {
72
+ outputError(
73
+ "Usage: memrosetta duplicates report [--format json|text] [--limit <n>] [--canonical <user>]",
74
+ format
75
+ );
76
+ process.exitCode = 1;
77
+ return;
78
+ }
79
+ const sliced = args.slice(1);
80
+ const limitRaw = optionalOption(sliced, "--limit");
81
+ const limit = limitRaw ? Math.max(1, parseInt(limitRaw, 10)) : PREVIEW_LIMIT;
82
+ const verbose = hasFlag(sliced, "--verbose");
83
+ const canonicalOverride = optionalOption(sliced, "--canonical");
84
+ const canonicalUserId = resolveCanonicalUserId(canonicalOverride ?? null);
85
+ const dbPath = resolveDbPath(dbOverride);
86
+ const { default: Database } = await import("better-sqlite3");
87
+ const db = new Database(dbPath, { readonly: true });
88
+ try {
89
+ const groups = scanDuplicates(db, canonicalUserId, limit);
90
+ const totalGroups = groups.length;
91
+ const crossUserGroups = groups.filter((g) => g.distinctUsers > 1).length;
92
+ const totalDuplicateRows = groups.reduce((sum, g) => sum + g.totalRows, 0);
93
+ if (format === "text") {
94
+ printText(groups, canonicalUserId, {
95
+ totalGroups,
96
+ crossUserGroups,
97
+ totalDuplicateRows,
98
+ verbose
99
+ });
100
+ return;
101
+ }
102
+ output(
103
+ {
104
+ canonicalUserId,
105
+ totalGroups,
106
+ crossUserGroups,
107
+ totalDuplicateRows,
108
+ groups
109
+ },
110
+ format
111
+ );
112
+ } finally {
113
+ db.close();
114
+ }
115
+ }
116
+ function printText(groups, canonicalUserId, summary) {
117
+ const lines = [];
118
+ lines.push(`Duplicate audit (canonical='${canonicalUserId}')`);
119
+ lines.push("=".repeat(60));
120
+ lines.push(` duplicate groups : ${summary.totalGroups}`);
121
+ lines.push(` cross-user groups : ${summary.crossUserGroups}`);
122
+ lines.push(` total duplicate rows : ${summary.totalDuplicateRows}`);
123
+ lines.push("");
124
+ const shown = groups.slice(0, summary.verbose ? groups.length : MAX_GROUPS_IN_TEXT);
125
+ for (const group of shown) {
126
+ const excerpt = group.content.length > 80 ? group.content.slice(0, 80) + "\u2026" : group.content;
127
+ lines.push(`- [${group.memoryType}] rows=${group.totalRows} users=${group.distinctUsers}`);
128
+ lines.push(` content: ${excerpt}`);
129
+ lines.push(` users: ${group.users.join(", ")}`);
130
+ if (group.recommendedKeep) {
131
+ lines.push(` recommended keep: ${group.recommendedKeep}`);
132
+ }
133
+ if (summary.verbose) {
134
+ for (const m of group.members) {
135
+ lines.push(
136
+ ` - ${m.memoryId} user=${m.userId} use=${m.useCount} success=${m.successCount} learned=${m.learnedAt}`
137
+ );
138
+ }
139
+ }
140
+ lines.push("");
141
+ }
142
+ if (!summary.verbose && groups.length > MAX_GROUPS_IN_TEXT) {
143
+ lines.push(`(+${groups.length - MAX_GROUPS_IN_TEXT} more groups \u2014 pass --verbose to list all)`);
144
+ }
145
+ process.stdout.write(lines.join("\n") + "\n");
146
+ }
147
+ export {
148
+ run
149
+ };
@@ -0,0 +1,149 @@
1
+ import {
2
+ hasFlag,
3
+ optionalOption
4
+ } from "./chunk-US6CEDMU.js";
5
+ import {
6
+ resolveDbPath
7
+ } from "./chunk-VMGX5FCY.js";
8
+ import {
9
+ output,
10
+ outputError
11
+ } from "./chunk-ET6TNQOJ.js";
12
+ import {
13
+ resolveCanonicalUserId
14
+ } from "./chunk-RZFCVYTK.js";
15
+
16
+ // src/commands/duplicates.ts
17
+ var PREVIEW_LIMIT = 200;
18
+ var MAX_GROUPS_IN_TEXT = 20;
19
+ function scanDuplicates(db, canonicalUserId, limit) {
20
+ const groups = db.prepare(
21
+ `SELECT
22
+ content,
23
+ memory_type AS memoryType,
24
+ COUNT(*) AS totalRows,
25
+ COUNT(DISTINCT user_id) AS distinctUsers,
26
+ GROUP_CONCAT(DISTINCT user_id) AS users
27
+ FROM memories
28
+ GROUP BY content, memory_type
29
+ HAVING totalRows > 1
30
+ ORDER BY totalRows DESC
31
+ LIMIT ?`
32
+ ).all(limit);
33
+ const memberStmt = db.prepare(
34
+ `SELECT
35
+ memory_id AS memoryId,
36
+ user_id AS userId,
37
+ namespace,
38
+ learned_at AS learnedAt,
39
+ use_count AS useCount,
40
+ success_count AS successCount
41
+ FROM memories
42
+ WHERE content = ? AND memory_type = ?
43
+ ORDER BY learned_at DESC`
44
+ );
45
+ return groups.map((g) => {
46
+ const members = memberStmt.all(g.content, g.memoryType);
47
+ const sorted = [...members].sort((a, b) => scoreMember(b, canonicalUserId) - scoreMember(a, canonicalUserId));
48
+ return {
49
+ content: g.content,
50
+ memoryType: g.memoryType,
51
+ totalRows: g.totalRows,
52
+ distinctUsers: g.distinctUsers,
53
+ users: (g.users ?? "").split(","),
54
+ members,
55
+ recommendedKeep: sorted[0]?.memoryId ?? null
56
+ };
57
+ });
58
+ }
59
+ function scoreMember(row, canonicalUserId) {
60
+ let score = 0;
61
+ if (row.userId === canonicalUserId) score += 1e6;
62
+ score += (row.successCount ?? 0) * 100;
63
+ score += (row.useCount ?? 0) * 10;
64
+ const ms = Date.parse(row.learnedAt);
65
+ if (!Number.isNaN(ms)) score += ms / 1e6;
66
+ return score;
67
+ }
68
+ async function run(options) {
69
+ const { args, format, db: dbOverride } = options;
70
+ const sub = args[0];
71
+ if (sub !== "report") {
72
+ outputError(
73
+ "Usage: memrosetta duplicates report [--format json|text] [--limit <n>] [--canonical <user>]",
74
+ format
75
+ );
76
+ process.exitCode = 1;
77
+ return;
78
+ }
79
+ const sliced = args.slice(1);
80
+ const limitRaw = optionalOption(sliced, "--limit");
81
+ const limit = limitRaw ? Math.max(1, parseInt(limitRaw, 10)) : PREVIEW_LIMIT;
82
+ const verbose = hasFlag(sliced, "--verbose");
83
+ const canonicalOverride = optionalOption(sliced, "--canonical");
84
+ const canonicalUserId = resolveCanonicalUserId(canonicalOverride ?? null);
85
+ const dbPath = resolveDbPath(dbOverride);
86
+ const { default: Database } = await import("better-sqlite3");
87
+ const db = new Database(dbPath, { readonly: true });
88
+ try {
89
+ const groups = scanDuplicates(db, canonicalUserId, limit);
90
+ const totalGroups = groups.length;
91
+ const crossUserGroups = groups.filter((g) => g.distinctUsers > 1).length;
92
+ const totalDuplicateRows = groups.reduce((sum, g) => sum + g.totalRows, 0);
93
+ if (format === "text") {
94
+ printText(groups, canonicalUserId, {
95
+ totalGroups,
96
+ crossUserGroups,
97
+ totalDuplicateRows,
98
+ verbose
99
+ });
100
+ return;
101
+ }
102
+ output(
103
+ {
104
+ canonicalUserId,
105
+ totalGroups,
106
+ crossUserGroups,
107
+ totalDuplicateRows,
108
+ groups
109
+ },
110
+ format
111
+ );
112
+ } finally {
113
+ db.close();
114
+ }
115
+ }
116
+ function printText(groups, canonicalUserId, summary) {
117
+ const lines = [];
118
+ lines.push(`Duplicate audit (canonical='${canonicalUserId}')`);
119
+ lines.push("=".repeat(60));
120
+ lines.push(` duplicate groups : ${summary.totalGroups}`);
121
+ lines.push(` cross-user groups : ${summary.crossUserGroups}`);
122
+ lines.push(` total duplicate rows : ${summary.totalDuplicateRows}`);
123
+ lines.push("");
124
+ const shown = groups.slice(0, summary.verbose ? groups.length : MAX_GROUPS_IN_TEXT);
125
+ for (const group of shown) {
126
+ const excerpt = group.content.length > 80 ? group.content.slice(0, 80) + "\u2026" : group.content;
127
+ lines.push(`- [${group.memoryType}] rows=${group.totalRows} users=${group.distinctUsers}`);
128
+ lines.push(` content: ${excerpt}`);
129
+ lines.push(` users: ${group.users.join(", ")}`);
130
+ if (group.recommendedKeep) {
131
+ lines.push(` recommended keep: ${group.recommendedKeep}`);
132
+ }
133
+ if (summary.verbose) {
134
+ for (const m of group.members) {
135
+ lines.push(
136
+ ` - ${m.memoryId} user=${m.userId} use=${m.useCount} success=${m.successCount} learned=${m.learnedAt}`
137
+ );
138
+ }
139
+ }
140
+ lines.push("");
141
+ }
142
+ if (!summary.verbose && groups.length > MAX_GROUPS_IN_TEXT) {
143
+ lines.push(`(+${groups.length - MAX_GROUPS_IN_TEXT} more groups \u2014 pass --verbose to list all)`);
144
+ }
145
+ process.stdout.write(lines.join("\n") + "\n");
146
+ }
147
+ export {
148
+ run
149
+ };
@@ -0,0 +1,381 @@
1
+ import {
2
+ resolveUserId
3
+ } from "./chunk-CEHRM6IW.js";
4
+ import {
5
+ hasFlag,
6
+ optionalOption,
7
+ requireOption
8
+ } from "./chunk-US6CEDMU.js";
9
+ import {
10
+ getEngine
11
+ } from "./chunk-47SU2YUJ.js";
12
+ import {
13
+ output,
14
+ outputError
15
+ } from "./chunk-ET6TNQOJ.js";
16
+ import "./chunk-WYHEAKPC.js";
17
+
18
+ // src/commands/enforce.ts
19
+ import { existsSync, readFileSync } from "fs";
20
+
21
+ // src/hooks/llm-extractor.ts
22
+ var SYSTEM_PROMPT = `You extract atomic long-term memories from a single
23
+ assistant turn in a coding assistant conversation.
24
+
25
+ Return a JSON object: { "memories": [...] }. Each memory has:
26
+ - "content": one self-contained, full-sentence fact, decision, preference,
27
+ or event. Keep proper nouns. Resolve pronouns. Korean stays Korean,
28
+ English stays English.
29
+ - "memoryType": one of "decision", "fact", "preference", "event".
30
+ - "keywords": 2-5 short keywords for search.
31
+ - "confidence": 0.0 to 1.0.
32
+
33
+ Only emit memories the user would still care about NEXT WEEK. Ignore:
34
+ - acknowledgements, greetings, status updates, confirmations
35
+ - code snippets and diffs (those belong in git)
36
+ - debugging steps and intermediate reasoning
37
+ - questions you asked the user
38
+
39
+ If nothing is worth storing, return { "memories": [] }.`;
40
+ var VALID_TYPES = /* @__PURE__ */ new Set([
41
+ "decision",
42
+ "fact",
43
+ "preference",
44
+ "event"
45
+ ]);
46
+ function safeParseMemories(raw) {
47
+ const stripped = raw.replace(/^```(?:json)?\s*|\s*```$/g, "").trim();
48
+ let parsed;
49
+ try {
50
+ parsed = JSON.parse(stripped);
51
+ } catch {
52
+ return [];
53
+ }
54
+ if (!parsed || typeof parsed !== "object") return [];
55
+ const obj = parsed;
56
+ if (!Array.isArray(obj.memories)) return [];
57
+ const out = [];
58
+ for (const item of obj.memories) {
59
+ if (!item || typeof item !== "object") continue;
60
+ const m = item;
61
+ if (typeof m.content !== "string" || m.content.trim().length === 0) continue;
62
+ if (!m.memoryType || !VALID_TYPES.has(m.memoryType)) continue;
63
+ out.push({
64
+ content: m.content.trim(),
65
+ memoryType: m.memoryType,
66
+ keywords: Array.isArray(m.keywords) && m.keywords.every((k) => typeof k === "string") ? m.keywords : void 0,
67
+ confidence: typeof m.confidence === "number" && m.confidence >= 0 && m.confidence <= 1 ? m.confidence : void 0
68
+ });
69
+ }
70
+ return out;
71
+ }
72
+ function buildUserPrompt(params) {
73
+ const ctx = params.userPrompt ? `User just asked:
74
+ ${params.userPrompt.slice(0, 800)}
75
+
76
+ ` : "";
77
+ return `${ctx}Assistant turn (client=${params.client}):
78
+ ${params.text}`;
79
+ }
80
+ async function extractAnthropic(params) {
81
+ const apiKey = process.env.ANTHROPIC_API_KEY;
82
+ if (!apiKey) return null;
83
+ try {
84
+ const res = await fetch("https://api.anthropic.com/v1/messages", {
85
+ method: "POST",
86
+ headers: {
87
+ "content-type": "application/json",
88
+ "x-api-key": apiKey,
89
+ "anthropic-version": "2023-06-01"
90
+ },
91
+ body: JSON.stringify({
92
+ model: "claude-haiku-4-5-20251001",
93
+ max_tokens: 1024,
94
+ system: SYSTEM_PROMPT,
95
+ messages: [{ role: "user", content: buildUserPrompt(params) }]
96
+ })
97
+ });
98
+ if (!res.ok) {
99
+ process.stderr.write(
100
+ `[enforce] anthropic returned ${res.status} ${res.statusText}
101
+ `
102
+ );
103
+ return null;
104
+ }
105
+ const body = await res.json();
106
+ const text = body.content?.find((c) => c.type === "text")?.text?.trim() ?? "";
107
+ if (!text) return [];
108
+ return safeParseMemories(text);
109
+ } catch (err) {
110
+ process.stderr.write(
111
+ `[enforce] anthropic call failed: ${err instanceof Error ? err.message : String(err)}
112
+ `
113
+ );
114
+ return null;
115
+ }
116
+ }
117
+ async function extractOpenAI(params) {
118
+ const apiKey = process.env.OPENAI_API_KEY;
119
+ if (!apiKey) return null;
120
+ try {
121
+ const res = await fetch("https://api.openai.com/v1/chat/completions", {
122
+ method: "POST",
123
+ headers: {
124
+ "content-type": "application/json",
125
+ authorization: `Bearer ${apiKey}`
126
+ },
127
+ body: JSON.stringify({
128
+ model: "gpt-4o-mini",
129
+ max_tokens: 1024,
130
+ response_format: { type: "json_object" },
131
+ messages: [
132
+ { role: "system", content: SYSTEM_PROMPT },
133
+ { role: "user", content: buildUserPrompt(params) }
134
+ ]
135
+ })
136
+ });
137
+ if (!res.ok) {
138
+ process.stderr.write(
139
+ `[enforce] openai returned ${res.status} ${res.statusText}
140
+ `
141
+ );
142
+ return null;
143
+ }
144
+ const body = await res.json();
145
+ const text = body.choices?.[0]?.message?.content?.trim() ?? "";
146
+ if (!text) return [];
147
+ return safeParseMemories(text);
148
+ } catch (err) {
149
+ process.stderr.write(
150
+ `[enforce] openai call failed: ${err instanceof Error ? err.message : String(err)}
151
+ `
152
+ );
153
+ return null;
154
+ }
155
+ }
156
+ async function extractPropositionizer(params) {
157
+ try {
158
+ const importer = new Function("m", "return import(m)");
159
+ const mod = await importer("@memrosetta/extractor").catch(() => null);
160
+ if (!mod?.PropositionizerDecomposer) return null;
161
+ const decomposer = new mod.PropositionizerDecomposer();
162
+ const facts = await decomposer.decompose(params.text);
163
+ if (!Array.isArray(facts) || facts.length === 0) return [];
164
+ return facts.map((content) => ({
165
+ content,
166
+ memoryType: "fact",
167
+ confidence: 0.6
168
+ }));
169
+ } catch (err) {
170
+ process.stderr.write(
171
+ `[enforce] propositionizer fallback failed: ${err instanceof Error ? err.message : String(err)}
172
+ `
173
+ );
174
+ return null;
175
+ }
176
+ }
177
+ async function extractWithLLM(params) {
178
+ const anthropic = await extractAnthropic(params);
179
+ if (anthropic !== null) {
180
+ return { memories: anthropic, source: "anthropic", attempted: true };
181
+ }
182
+ const openai = await extractOpenAI(params);
183
+ if (openai !== null) {
184
+ return { memories: openai, source: "openai", attempted: true };
185
+ }
186
+ const local = await extractPropositionizer(params);
187
+ if (local !== null) {
188
+ return { memories: local, source: "propositionizer", attempted: true };
189
+ }
190
+ return { memories: [], source: "none", attempted: false };
191
+ }
192
+
193
+ // src/commands/enforce.ts
194
+ var MAX_ATTEMPTS = 2;
195
+ var HEURISTIC_KEYWORDS = [
196
+ // English
197
+ "decided",
198
+ "choose",
199
+ "chose",
200
+ "use ",
201
+ "switch to",
202
+ "instead of",
203
+ "conclusion",
204
+ "agreed",
205
+ "fixed",
206
+ "fix:",
207
+ "released",
208
+ "deployed",
209
+ "discovered",
210
+ "found",
211
+ "turns out",
212
+ // Korean
213
+ "\uACB0\uC815",
214
+ "\uACB0\uB860",
215
+ "\uD569\uC758",
216
+ "\uD655\uC815",
217
+ "\uC218\uC815",
218
+ "\uD574\uACB0",
219
+ "\uBC1C\uACAC",
220
+ "\uBC30\uD3EC",
221
+ "\uBC14\uAFE8",
222
+ "\uBC14\uAFB8\uC790",
223
+ "\uBCC0\uACBD",
224
+ "\uAD50\uCCB4"
225
+ ];
226
+ function loadEvent(path) {
227
+ if (!existsSync(path)) {
228
+ throw new Error(`event-json file not found: ${path}`);
229
+ }
230
+ const raw = readFileSync(path, "utf-8");
231
+ let parsed;
232
+ try {
233
+ parsed = JSON.parse(raw);
234
+ } catch {
235
+ throw new Error(`event-json is not valid JSON: ${path}`);
236
+ }
237
+ const e = parsed;
238
+ if (!e.client || typeof e.assistantMessage !== "string") {
239
+ throw new Error(
240
+ "event-json must include at least { client, assistantMessage }"
241
+ );
242
+ }
243
+ return {
244
+ client: e.client,
245
+ turnId: e.turnId,
246
+ assistantMessage: e.assistantMessage,
247
+ userPrompt: e.userPrompt,
248
+ cwd: e.cwd,
249
+ transcriptPath: e.transcriptPath,
250
+ attempt: e.attempt ?? 1
251
+ };
252
+ }
253
+ function looksStorable(text) {
254
+ const lower = text.toLowerCase();
255
+ return HEURISTIC_KEYWORDS.some((k) => lower.includes(k.toLowerCase()));
256
+ }
257
+ function buildFooter(result) {
258
+ if (result.status === "noop") {
259
+ return "STORED: none (noop)";
260
+ }
261
+ if (result.status === "needs-continuation") {
262
+ return "STORED: pending (needs-continuation)";
263
+ }
264
+ if (result.memories.length === 0) {
265
+ return "STORED: failed";
266
+ }
267
+ const items = result.memories.map((m) => `${m.type}(${m.memoryId})`).join(", ");
268
+ return `STORED: ${items}`;
269
+ }
270
+ async function run(options) {
271
+ const { args, format, db, noEmbeddings } = options;
272
+ const sub = args[0];
273
+ if (sub !== "stop") {
274
+ outputError(
275
+ "Usage: memrosetta enforce stop --client <id> --event-json <path>",
276
+ format
277
+ );
278
+ process.exitCode = 1;
279
+ return;
280
+ }
281
+ const sliced = args.slice(1);
282
+ let client;
283
+ let eventPath;
284
+ try {
285
+ client = requireOption(sliced, "--client", "client identifier");
286
+ eventPath = requireOption(sliced, "--event-json", "event JSON path");
287
+ } catch (err) {
288
+ outputError(err instanceof Error ? err.message : String(err), format);
289
+ process.exitCode = 1;
290
+ return;
291
+ }
292
+ const explicitAttempt = optionalOption(sliced, "--attempt");
293
+ const dryRun = hasFlag(sliced, "--dry-run");
294
+ let event;
295
+ try {
296
+ event = loadEvent(eventPath);
297
+ } catch (err) {
298
+ outputError(err instanceof Error ? err.message : String(err), format);
299
+ process.exitCode = 1;
300
+ return;
301
+ }
302
+ const attempt = explicitAttempt ? Math.max(1, parseInt(explicitAttempt, 10)) : event.attempt ?? 1;
303
+ if (!event.assistantMessage.trim()) {
304
+ const result2 = {
305
+ status: "noop",
306
+ structuredCount: 0,
307
+ extractedCount: 0,
308
+ memories: [],
309
+ footer: "STORED: none (noop)",
310
+ attempt,
311
+ maxAttempts: MAX_ATTEMPTS,
312
+ reason: "empty assistant message"
313
+ };
314
+ output(result2, format);
315
+ return;
316
+ }
317
+ const extractor = await extractWithLLM({
318
+ text: event.assistantMessage,
319
+ userPrompt: event.userPrompt,
320
+ client
321
+ });
322
+ const stored = [];
323
+ if (!dryRun && extractor.memories.length > 0) {
324
+ try {
325
+ const engine = await getEngine({ db, noEmbeddings });
326
+ const userId = resolveUserId(event.cwd ?? process.cwd());
327
+ for (const m of extractor.memories) {
328
+ const input = {
329
+ userId,
330
+ content: m.content,
331
+ memoryType: m.memoryType,
332
+ keywords: m.keywords,
333
+ confidence: m.confidence
334
+ };
335
+ const memory = await engine.store(input);
336
+ stored.push({ type: memory.memoryType, memoryId: memory.memoryId });
337
+ }
338
+ } catch (err) {
339
+ const reason2 = err instanceof Error ? err.message : String(err);
340
+ const result2 = {
341
+ status: "noop",
342
+ structuredCount: 0,
343
+ extractedCount: extractor.memories.length,
344
+ memories: [],
345
+ footer: "STORED: failed",
346
+ attempt,
347
+ maxAttempts: MAX_ATTEMPTS,
348
+ reason: `engine.store failed: ${reason2}`
349
+ };
350
+ output(result2, format);
351
+ return;
352
+ }
353
+ }
354
+ const extractedCount = stored.length;
355
+ const structuredCount = 0;
356
+ let status;
357
+ let reason;
358
+ if (extractedCount > 0) {
359
+ status = "stored";
360
+ } else if (looksStorable(event.assistantMessage) && attempt < MAX_ATTEMPTS && extractor.attempted) {
361
+ status = "needs-continuation";
362
+ reason = "turn looked storable (decision/conclusion keywords present) but extractor returned 0 memories";
363
+ } else {
364
+ status = "noop";
365
+ reason = extractor.attempted ? "extractor returned 0 memories and turn does not look storable" : "no LLM extractor available (set ANTHROPIC_API_KEY / OPENAI_API_KEY or install propositionizer model)";
366
+ }
367
+ const partial = {
368
+ status,
369
+ structuredCount,
370
+ extractedCount,
371
+ memories: stored,
372
+ attempt,
373
+ maxAttempts: MAX_ATTEMPTS,
374
+ reason
375
+ };
376
+ const result = { ...partial, footer: buildFooter(partial) };
377
+ output(result, format);
378
+ }
379
+ export {
380
+ run
381
+ };