@openanonymity/nanomem 0.1.0 → 0.1.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +64 -18
  3. package/package.json +7 -3
  4. package/src/backends/BaseStorage.js +147 -3
  5. package/src/backends/indexeddb.js +21 -8
  6. package/src/browser.js +227 -0
  7. package/src/bullets/parser.js +8 -9
  8. package/src/cli/auth.js +1 -1
  9. package/src/cli/commands.js +58 -9
  10. package/src/cli/config.js +1 -1
  11. package/src/cli/help.js +5 -2
  12. package/src/cli/output.js +4 -0
  13. package/src/cli.js +6 -3
  14. package/src/engine/compactor.js +3 -6
  15. package/src/engine/deleter.js +187 -0
  16. package/src/engine/executors.js +474 -11
  17. package/src/engine/ingester.js +98 -63
  18. package/src/engine/recentConversation.js +110 -0
  19. package/src/engine/retriever.js +243 -37
  20. package/src/engine/toolLoop.js +51 -9
  21. package/src/imports/chatgpt.js +1 -1
  22. package/src/imports/claude.js +85 -0
  23. package/src/imports/importData.js +462 -0
  24. package/src/imports/index.js +10 -0
  25. package/src/index.js +95 -2
  26. package/src/llm/openai.js +204 -58
  27. package/src/llm/tinfoil.js +508 -0
  28. package/src/omf.js +343 -0
  29. package/src/prompt_sets/conversation/ingestion.js +111 -12
  30. package/src/prompt_sets/document/ingestion.js +98 -4
  31. package/src/prompt_sets/index.js +12 -4
  32. package/src/types.js +135 -4
  33. package/src/vendor/tinfoil.browser.d.ts +2 -0
  34. package/src/vendor/tinfoil.browser.js +41596 -0
  35. package/types/backends/BaseStorage.d.ts +19 -0
  36. package/types/backends/indexeddb.d.ts +1 -0
  37. package/types/browser.d.ts +17 -0
  38. package/types/engine/deleter.d.ts +67 -0
  39. package/types/engine/executors.d.ts +56 -2
  40. package/types/engine/recentConversation.d.ts +18 -0
  41. package/types/engine/retriever.d.ts +22 -9
  42. package/types/imports/claude.d.ts +14 -0
  43. package/types/imports/importData.d.ts +29 -0
  44. package/types/imports/index.d.ts +2 -0
  45. package/types/index.d.ts +9 -0
  46. package/types/llm/openai.d.ts +6 -9
  47. package/types/llm/tinfoil.d.ts +13 -0
  48. package/types/omf.d.ts +40 -0
  49. package/types/prompt_sets/conversation/ingestion.d.ts +8 -3
  50. package/types/prompt_sets/document/ingestion.d.ts +8 -3
  51. package/types/types.d.ts +127 -2
  52. package/types/vendor/tinfoil.browser.d.ts +6348 -0
package/src/browser.js ADDED
@@ -0,0 +1,227 @@
1
+ /**
2
+ * Browser-safe nanomem entrypoint.
3
+ *
4
+ * This mirrors createMemoryBank from index.js but excludes the filesystem
5
+ * backend so browser bundlers do not try to resolve node:* imports.
6
+ */
7
+ /** @import { MemoryBank, MemoryBankConfig, MemoryBankLLMConfig, Message, IngestOptions, AugmentQueryResult, RetrievalResult, StorageBackend } from './types.js' */
8
+
9
+ import { createOpenAIClient } from './llm/openai.js';
10
+ import { createAnthropicClient } from './llm/anthropic.js';
11
+ import { MemoryBulletIndex } from './bullets/bulletIndex.js';
12
+ import { MemoryRetriever } from './engine/retriever.js';
13
+ import { MemoryIngester } from './engine/ingester.js';
14
+ import { MemoryCompactor } from './engine/compactor.js';
15
+ import { InMemoryStorage } from './backends/ram.js';
16
+ import { importData as importMemoryData } from './imports/importData.js';
17
+ import { serialize, toZip } from './utils/portability.js';
18
+ import { buildOmfExport, previewOmfImport, importOmf, parseOmfText, validateOmf } from './omf.js';
19
+
20
+ /**
21
+ * Remove review-only [[user_data]] markers before sending the final prompt to
22
+ * the frontier model.
23
+ *
24
+ * @param {string} text
25
+ * @returns {string}
26
+ */
27
+ export function stripUserDataTags(text) {
28
+ return String(text ?? '')
29
+ .replace(/\[\[user_data\]\]/g, '')
30
+ .replace(/\[\[\/user_data\]\]/g, '');
31
+ }
32
+
33
+ /**
34
+ * @param {MemoryBankConfig} [config]
35
+ * @returns {MemoryBank}
36
+ */
37
+ export function createMemoryBank(config = {}) {
38
+ const llmClient = config.llmClient || createBrowserLlmClient(config.llm);
39
+ const model = config.model || config.llm?.model || 'gpt-4o';
40
+ const backend = createBrowserBackend(config.storage);
41
+ const bulletIndex = new MemoryBulletIndex(backend);
42
+
43
+ const retrieval = new MemoryRetriever({
44
+ backend,
45
+ bulletIndex,
46
+ llmClient,
47
+ model,
48
+ onProgress: config.onProgress,
49
+ onModelText: config.onModelText
50
+ });
51
+ const ingester = new MemoryIngester({
52
+ backend,
53
+ bulletIndex,
54
+ llmClient,
55
+ model,
56
+ onToolCall: config.onToolCall
57
+ });
58
+ const compactor = new MemoryCompactor({
59
+ backend,
60
+ bulletIndex,
61
+ llmClient,
62
+ model,
63
+ onProgress: config.onCompactProgress
64
+ });
65
+
66
+ async function write(path, content) {
67
+ await backend.write(path, content);
68
+ await bulletIndex.refreshPath(path);
69
+ }
70
+
71
+ async function remove(path) {
72
+ await backend.delete(path);
73
+ await bulletIndex.refreshPath(path);
74
+ }
75
+
76
+ async function rebuildTree() {
77
+ await backend.rebuildTree();
78
+ await bulletIndex.rebuild();
79
+ }
80
+
81
+ return {
82
+ init: () => backend.init(),
83
+ retrieve: (query, conversationText) => retrieval.retrieveForQuery(query, conversationText),
84
+ augmentQuery: (query, conversationText) => retrieval.augmentQueryForPrompt(query, conversationText),
85
+ ingest: (messages, options) => ingester.ingest(messages, options),
86
+ importData: (input, options) => importMemoryData({
87
+ init: () => backend.init(),
88
+ ingest: (messages, ingestOptions) => ingester.ingest(messages, ingestOptions)
89
+ }, input, options),
90
+ exportOmf: async () => {
91
+ await backend.init();
92
+ return buildOmfExport({
93
+ read: (path) => backend.read(path),
94
+ write: (path, content) => write(path, content),
95
+ delete: (path) => remove(path),
96
+ exists: (path) => backend.exists(path),
97
+ search: (query) => backend.search(query),
98
+ ls: (dirPath) => backend.ls(dirPath),
99
+ getTree: () => backend.getTree(),
100
+ rebuildTree: () => rebuildTree(),
101
+ exportAll: () => backend.exportAll(),
102
+ clear: () => backend.clear(),
103
+ }, { sourceApp: 'nanomem' });
104
+ },
105
+ previewOmfImport: async (doc, options) => {
106
+ await backend.init();
107
+ return previewOmfImport({
108
+ read: (path) => backend.read(path),
109
+ write: (path, content) => write(path, content),
110
+ delete: (path) => remove(path),
111
+ exists: (path) => backend.exists(path),
112
+ search: (query) => backend.search(query),
113
+ ls: (dirPath) => backend.ls(dirPath),
114
+ getTree: () => backend.getTree(),
115
+ rebuildTree: () => rebuildTree(),
116
+ exportAll: () => backend.exportAll(),
117
+ clear: () => backend.clear(),
118
+ }, doc, options);
119
+ },
120
+ importOmf: async (doc, options) => {
121
+ await backend.init();
122
+ return importOmf({
123
+ read: (path) => backend.read(path),
124
+ write: (path, content) => write(path, content),
125
+ delete: (path) => remove(path),
126
+ exists: (path) => backend.exists(path),
127
+ search: (query) => backend.search(query),
128
+ ls: (dirPath) => backend.ls(dirPath),
129
+ getTree: () => backend.getTree(),
130
+ rebuildTree: () => rebuildTree(),
131
+ exportAll: () => backend.exportAll(),
132
+ clear: () => backend.clear(),
133
+ }, doc, options);
134
+ },
135
+ compact: () => compactor.compactAll(),
136
+ storage: {
137
+ read: (path) => backend.read(path),
138
+ resolvePath: (path) => backend.resolvePath ? backend.resolvePath(path) : Promise.resolve(null),
139
+ write: (path, content) => write(path, content),
140
+ delete: (path) => remove(path),
141
+ exists: (path) => backend.exists(path),
142
+ search: (query) => backend.search(query),
143
+ ls: (dirPath) => backend.ls(dirPath),
144
+ getTree: () => backend.getTree(),
145
+ rebuildTree: () => rebuildTree(),
146
+ exportAll: () => backend.exportAll(),
147
+ clear: () => backend.clear()
148
+ },
149
+ serialize: async () => serialize(await backend.exportAll()),
150
+ toZip: async () => toZip(await backend.exportAll()),
151
+ _backend: backend,
152
+ _bulletIndex: bulletIndex
153
+ };
154
+ }
155
+
156
+ function createBrowserLlmClient(llmConfig = /** @type {MemoryBankLLMConfig} */ ({ apiKey: '' })) {
157
+ const { apiKey, baseUrl, headers, provider } = llmConfig;
158
+ if (!apiKey) {
159
+ throw new Error('createMemoryBank: config.llm.apiKey is required (or provide config.llmClient)');
160
+ }
161
+
162
+ const detectedProvider = provider || detectProvider(baseUrl);
163
+ if (detectedProvider === 'anthropic') {
164
+ return createAnthropicClient({ apiKey, baseUrl, headers });
165
+ }
166
+ if (detectedProvider === 'tinfoil') {
167
+ throw new Error(
168
+ 'createMemoryBank(browser): Tinfoil provider requires the Node.js entry (src/index.js). ' +
169
+ 'Use provider "openai" with baseUrl "https://inference.tinfoil.sh/v1" for browser builds.'
170
+ );
171
+ }
172
+ return createOpenAIClient({ apiKey, baseUrl, headers });
173
+ }
174
+
175
+ function detectProvider(baseUrl) {
176
+ if (!baseUrl) return 'openai';
177
+ const lower = baseUrl.toLowerCase();
178
+ if (lower.includes('anthropic.com')) return 'anthropic';
179
+ if (lower.includes('tinfoil.sh')) return 'tinfoil';
180
+ return 'openai';
181
+ }
182
+
183
+ function createBrowserBackend(storage) {
184
+ if (storage && typeof storage === 'object' && typeof storage.read === 'function') {
185
+ return storage;
186
+ }
187
+
188
+ const storageType = typeof storage === 'string' ? storage : 'ram';
189
+ switch (storageType) {
190
+ case 'indexeddb':
191
+ return asyncBackend(() => import('./backends/indexeddb.js').then((module) => new module.IndexedDBStorage()));
192
+ case 'filesystem':
193
+ throw new Error('createMemoryBank(browser): filesystem storage is not available in the browser entrypoint.');
194
+ case 'ram':
195
+ default:
196
+ return new InMemoryStorage();
197
+ }
198
+ }
199
+
200
+ function asyncBackend(loader) {
201
+ let backend = null;
202
+ let loading = null;
203
+
204
+ async function resolve() {
205
+ if (backend) return backend;
206
+ if (!loading) {
207
+ loading = loader().then((instance) => {
208
+ backend = instance;
209
+ return backend;
210
+ });
211
+ }
212
+ return loading;
213
+ }
214
+
215
+ const methods = ['init', 'read', 'resolvePath', 'write', 'delete', 'exists', 'ls', 'search', 'getTree', 'rebuildTree', 'exportAll', 'clear'];
216
+ const proxy = {};
217
+ for (const method of methods) {
218
+ proxy[method] = async (...args) => {
219
+ const resolved = await resolve();
220
+ return resolved[method](...args);
221
+ };
222
+ }
223
+ return /** @type {StorageBackend} */ (proxy);
224
+ }
225
+
226
+ export * from './bullets/index.js';
227
+ export { buildOmfExport, previewOmfImport, importOmf, parseOmfText, validateOmf } from './omf.js';
@@ -33,11 +33,11 @@ export function parseBullets(content) {
33
33
  const headingMatch = line.match(HEADING_REGEX);
34
34
  if (headingMatch) {
35
35
  currentHeading = headingMatch[1].trim() || currentHeading;
36
- if (/^(working)$/i.test(currentHeading)) {
36
+ if (/^working/i.test(currentHeading)) {
37
37
  section = 'working';
38
- } else if (/^(long[- ]?term|active)$/i.test(currentHeading)) {
38
+ } else if (/^(long[- ]?term|active)/i.test(currentHeading)) {
39
39
  section = 'long_term';
40
- } else if (/^(history|archive)$/i.test(currentHeading)) {
40
+ } else if (/^(history|archive)/i.test(currentHeading)) {
41
41
  section = 'history';
42
42
  }
43
43
  continue;
@@ -120,7 +120,7 @@ export function extractTitles(content) {
120
120
 
121
121
  const title = headingMatch[1].trim();
122
122
  if (!title) continue;
123
- if (/^(working|long[- ]?term|history|active|archive|current context|stable facts|no longer current)$/i.test(title)) continue;
123
+ if (/^(working|long[- ]?term|history|active|archive)/i.test(title)) continue;
124
124
  titles.push(title);
125
125
  }
126
126
 
@@ -160,9 +160,8 @@ function inferDocumentTopic(bullets, fallback = 'general') {
160
160
  return firstTopic || fallback;
161
161
  }
162
162
 
163
- function renderSection(lines, title, subsectionTitle, bullets, forceHistory = false) {
163
+ function renderSection(lines, title, bullets, forceHistory = false) {
164
164
  lines.push(`## ${title}`);
165
- lines.push(`### ${subsectionTitle}`);
166
165
 
167
166
  if (!bullets || bullets.length === 0) {
168
167
  lines.push('_No entries yet._');
@@ -189,11 +188,11 @@ export function renderCompactedDocument(working, longTerm, history, options = {}
189
188
  const docTopic = normalizeTopic(options.titleTopic || inferDocumentTopic([...working, ...longTerm, ...history], 'general'));
190
189
  lines.push(`# Memory: ${topicHeading(docTopic)}`);
191
190
  lines.push('');
192
- renderSection(lines, 'Working', 'Current context', working);
191
+ renderSection(lines, 'Working memory (current context subject to change)', working);
193
192
  lines.push('');
194
- renderSection(lines, 'Long-Term', 'Stable facts', longTerm);
193
+ renderSection(lines, 'Long-term memory (stable facts that are unlikely to change)', longTerm);
195
194
  lines.push('');
196
- renderSection(lines, 'History', 'No longer current', history, true);
195
+ renderSection(lines, 'History (no longer current)', history, true);
197
196
 
198
197
  return lines.join('\n').trim();
199
198
  }
package/src/cli/auth.js CHANGED
@@ -61,7 +61,7 @@ export async function loginInteractive() {
61
61
  process.stderr.write('\n');
62
62
  process.stderr.write(` ${c.bold}${c.cyan}Login${c.reset}\n`);
63
63
  process.stderr.write('\n');
64
- process.stderr.write(` ${c.white}simple-memory uses an LLM provider for extraction and retrieval.${c.reset}\n`);
64
+ process.stderr.write(` ${c.white}nanomem uses an LLM provider for extraction and retrieval.${c.reset}\n`);
65
65
  process.stderr.write(` ${c.white}Select your provider, model, and paste your API key to get started.${c.reset}\n`);
66
66
  process.stderr.write('\n');
67
67
 
@@ -8,6 +8,7 @@ import { serialize, toZip } from '../utils/portability.js';
8
8
  import { safeDateIso } from '../bullets/normalize.js';
9
9
  import { extractSessionsFromOAFastchatExport } from '../imports/oaFastchat.js';
10
10
  import { isChatGptExport, parseChatGptExport } from '../imports/chatgpt.js';
11
+ import { isClaudeExport, parseClaudeExport } from '../imports/claude.js';
11
12
  import { parseMarkdownFiles } from '../imports/markdown.js';
12
13
  import { loginInteractive } from './auth.js';
13
14
  import { writeConfigFile, CONFIG_PATH } from './config.js';
@@ -77,6 +78,11 @@ function parseConversations(input, flags) {
77
78
  return parseChatGptExport(parsed);
78
79
  }
79
80
 
81
+ // Claude export (conversations.json)
82
+ if (isClaudeExport(parsed)) {
83
+ return parseClaudeExport(parsed);
84
+ }
85
+
80
86
  // Plain messages array
81
87
  if (Array.isArray(parsed)) {
82
88
  return [{ title: null, messages: parsed }];
@@ -129,7 +135,7 @@ export async function retrieve(positionals, flags, mem) {
129
135
 
130
136
  const result = await mem.retrieve(query, conversationText);
131
137
  if (!result || !result.assembledContext) {
132
- return { assembledContext: null, message: 'No relevant context found.' };
138
+ return 'No relevant context found.';
133
139
  }
134
140
  return result;
135
141
  }
@@ -161,10 +167,18 @@ export async function importCmd(positionals, flags, mem, config, { showProgress,
161
167
 
162
168
  export async function add(positionals, flags, mem, config, { showProgress, spinnerHolder } = {}) {
163
169
  const input = positionals[0] ?? (!process.stdin.isTTY ? await readStdin() : null);
164
- if (!input) throw new Error('Usage: memory add <text>');
170
+ if (!input) throw new Error('Usage: nanomem add <text>');
171
+
172
+ const conversations = parseConversations(input, flags);
173
+ return ingestConversations(conversations, 'add', mem, { showProgress, spinnerHolder, status: 'added', showDiff: true });
174
+ }
175
+
176
+ export async function update(positionals, flags, mem, config, { showProgress, spinnerHolder } = {}) {
177
+ const input = positionals[0] ?? (!process.stdin.isTTY ? await readStdin() : null);
178
+ if (!input) throw new Error('Usage: nanomem update <text>');
165
179
 
166
180
  const conversations = parseConversations(input, flags);
167
- return ingestConversations(conversations, 'conversation', mem, { showProgress, spinnerHolder, status: 'added', showDiff: true });
181
+ return ingestConversations(conversations, 'update', mem, { showProgress, spinnerHolder, status: 'updated', showDiff: true });
168
182
  }
169
183
 
170
184
  async function ingestConversations(conversations, extractionMode, mem, { showProgress, spinnerHolder, status, showDiff = false }) {
@@ -180,7 +194,7 @@ async function ingestConversations(conversations, extractionMode, mem, { showPro
180
194
 
181
195
  for (let i = 0; i < total; i++) {
182
196
  const conv = conversations[i];
183
- const label = conv.title || `conversation ${i + 1}`;
197
+ const label = conv.title || (total > 1 ? `conversation ${i + 1}` : 'conversation');
184
198
 
185
199
  if (showProgress) {
186
200
  const counter = total > 1 ? `${c.gray}(${i + 1}/${total})${c.reset} ` : '';
@@ -259,13 +273,48 @@ export async function write(positionals, flags, mem) {
259
273
  return { status: 'written', path };
260
274
  }
261
275
 
262
- export async function del(positionals, flags, mem) {
263
- const path = positionals[0];
264
- if (!path) throw new Error('Usage: memory delete <path>');
276
+ export async function del(positionals, flags, mem, config, { showProgress, spinnerHolder } = {}) {
277
+ const query = positionals[0] ?? (!process.stdin.isTTY ? await readStdin() : null);
278
+ if (!query) throw new Error('Usage: nanomem delete <query>');
265
279
 
266
280
  await mem.init();
267
- await mem.storage.delete(path);
268
- return { status: 'deleted', path };
281
+
282
+ const isTTY = process.stderr.isTTY;
283
+ const c = isTTY ? { green: '\x1b[32m', yellow: '\x1b[33m', dim: '\x1b[2m', bold: '\x1b[1m', reset: '\x1b[0m' }
284
+ : { green: '', yellow: '', dim: '', bold: '', reset: '' };
285
+
286
+ let spinner = null;
287
+ if (showProgress && isTTY) {
288
+ spinner = createSpinner('thinking…');
289
+ if (spinnerHolder) spinnerHolder.current = spinner;
290
+ }
291
+
292
+ const result = await mem.deleteContent(query, { deep: !!flags.deep });
293
+
294
+ if (spinnerHolder) spinnerHolder.current = null;
295
+
296
+ if (showProgress) {
297
+ if (result.status === 'error') {
298
+ spinner?.stop(` ${c.yellow}⚠ ${result.error}${c.reset}`);
299
+ } else if (result.deleteCalls > 0) {
300
+ spinner?.stop(` ${c.green}✓ ${result.deleteCalls} fact${result.deleteCalls === 1 ? '' : 's'} deleted${c.reset}`);
301
+ } else {
302
+ spinner?.stop(` ${c.dim}– nothing matched${c.reset}`);
303
+ }
304
+ if (result.writes?.length) {
305
+ for (const { path, before, after } of result.writes) {
306
+ if (after === null) {
307
+ // Entire file was deleted (no bullets remained)
308
+ process.stderr.write(`\n \x1b[1m\x1b[36m${path}\x1b[0m \x1b[2mfile deleted\x1b[0m\n`);
309
+ } else {
310
+ printFileDiff(path, before, after);
311
+ }
312
+ }
313
+ }
314
+ }
315
+
316
+ const status = result.status === 'error' ? 'error' : 'deleted_content';
317
+ return { status, deleteCalls: result.deleteCalls, error: result.error };
269
318
  }
270
319
 
271
320
  export async function search(positionals, flags, mem) {
package/src/cli/config.js CHANGED
@@ -83,7 +83,7 @@ export async function resolveConfig(flags) {
83
83
 
84
84
  // ─── Create a memory instance from resolved config ──────────────
85
85
 
86
- const LLM_COMMANDS = new Set(['retrieve', 'extract', 'compact', 'import', 'add']);
86
+ const LLM_COMMANDS = new Set(['retrieve', 'extract', 'compact', 'import', 'add', 'update', 'delete']);
87
87
 
88
88
  export function createMemoryFromConfig(config, command, { onToolCall, onProgress, onCompactProgress } = {}) {
89
89
  const needsLlm = LLM_COMMANDS.has(command);
package/src/cli/help.js CHANGED
@@ -11,7 +11,8 @@ Commands:
11
11
  status Show current config and storage stats
12
12
 
13
13
  Memory:
14
- add <text> Add raw text directly and extract facts
14
+ add <text> Add new facts from text (creates or appends files)
15
+ update <text> Edit existing facts from text (only modifies existing files)
15
16
  import <file|dir|-> Import conversations or notes and extract facts
16
17
  retrieve <query> [--context <file>] Retrieve relevant context for a query
17
18
  compact Deduplicate and archive stale facts
@@ -39,6 +40,7 @@ Flags:
39
40
  Examples:
40
41
  nanomem login
41
42
  nanomem add "User: I moved to Seattle."
43
+ nanomem update "User: Actually I moved to Portland, not Seattle."
42
44
  nanomem import conversations.json
43
45
  nanomem import my-notes.md
44
46
  nanomem import ./notes/
@@ -48,7 +50,8 @@ Examples:
48
50
  `;
49
51
 
50
52
  export const COMMAND_HELP = {
51
- add: 'Usage: nanomem add <text>\n\nAdd raw text directly and extract facts into memory.\nAccepts quoted text or piped stdin.\nRequires an LLM API key.',
53
+ add: 'Usage: nanomem add <text>\n\nAdd new facts from text. The LLM will create a new file or append to an existing one.\nAccepts quoted text or piped stdin.\nRequires an LLM API key.',
54
+ update: 'Usage: nanomem update <text>\n\nEdit existing facts from text. The LLM will only modify files that already exist — no new files are created.\nAccepts quoted text or piped stdin.\nRequires an LLM API key.',
52
55
  retrieve: 'Usage: nanomem retrieve <query> [--context <file>]\n\nRetrieve relevant memory context for a query.\nRequires an LLM API key.',
53
56
  compact: 'Usage: nanomem compact\n\nDeduplicate and archive stale facts across all memory files.\nRequires an LLM API key.',
54
57
  ls: 'Usage: nanomem ls [path]\n\nList files and directories in storage.',
package/src/cli/output.js CHANGED
@@ -129,6 +129,10 @@ function formatAction(result) {
129
129
  return section(green('✓ Facts extracted'), [
130
130
  ['Files updated', result.writeCalls],
131
131
  ]);
132
+ case 'deleted_content':
133
+ return result.deleteCalls > 0
134
+ ? section(green('✓ Memory updated'), [['Facts removed', result.deleteCalls]])
135
+ : dim('– Nothing matched');
132
136
  case 'skipped':
133
137
  return dim('– Nothing to extract (conversation too short)');
134
138
  case 'imported':
package/src/cli.js CHANGED
@@ -34,11 +34,13 @@ const OPTIONS = {
34
34
  'session-id': { type: 'string' },
35
35
  'session-title': { type: 'string' },
36
36
  'confirm': { type: 'boolean', default: false },
37
- 'render': { type: 'boolean', default: false },
37
+ 'render': { type: 'boolean', default: false },
38
+ 'deep': { type: 'boolean', default: false },
38
39
  };
39
40
 
40
41
  const COMMAND_MAP = {
41
42
  add: commands.add,
43
+ update: commands.update,
42
44
  login: commands.login,
43
45
  init: commands.init,
44
46
  retrieve: commands.retrieve,
@@ -95,18 +97,19 @@ async function main() {
95
97
  const memOpts = {};
96
98
 
97
99
  // Wire progress for import/extract — spinner per session with live tool call updates
98
- const isImport = commandName === 'import' || commandName === 'add' || commandName === 'extract';
100
+ const isImport = commandName === 'import' || commandName === 'add' || commandName === 'update' || commandName === 'extract' || commandName === 'delete';
99
101
  const showProgress = isImport && !values.json && process.stderr.isTTY;
100
102
  const spinnerHolder = { current: null }; // shared mutable ref between onToolCall and import loop
101
103
  if (showProgress) {
102
104
  const TOOL_LABELS = {
103
105
  create_new_file: 'creating file',
104
106
  append_memory: 'appending',
105
- update_memory: 'updating',
106
107
  archive_memory: 'archiving',
107
108
  delete_memory: 'cleaning up',
108
109
  read_file: 'reading',
109
110
  list_files: 'scanning',
111
+ update_bullets: 'updating',
112
+ delete_bullet: 'deleting',
110
113
  };
111
114
  memOpts.onToolCall = (name) => {
112
115
  const label = TOOL_LABELS[name] || name;
@@ -33,16 +33,13 @@ Input is one memory file. Rewrite it into:
33
33
 
34
34
  # Memory: <Topic>
35
35
 
36
- ## Working
37
- ### <Topic>
36
+ ## Working memory (current context subject to change)
38
37
  - fact | topic=<topic> | tier=working | status=active | source=user_statement|assistant_summary|inference|system | confidence=high|medium|low | updated_at=YYYY-MM-DD | review_at=YYYY-MM-DD(optional) | expires_at=YYYY-MM-DD(optional)
39
38
 
40
- ## Long-Term
41
- ### <Topic>
39
+ ## Long-term memory (stable facts that are unlikely to change)
42
40
  - fact | topic=<topic> | tier=long_term | status=active | source=user_statement|assistant_summary|inference|system | confidence=high|medium|low | updated_at=YYYY-MM-DD | expires_at=YYYY-MM-DD(optional)
43
41
 
44
- ## History
45
- ### <Topic>
42
+ ## History (no longer current)
46
43
  - fact | topic=<topic> | tier=history | status=superseded|expired|uncertain | source=user_statement|assistant_summary|inference|system | confidence=high|medium|low | updated_at=YYYY-MM-DD | expires_at=YYYY-MM-DD(optional)
47
44
 
48
45
  Rules: