@openanonymity/nanomem 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +194 -0
- package/package.json +85 -0
- package/src/backends/BaseStorage.js +177 -0
- package/src/backends/filesystem.js +177 -0
- package/src/backends/indexeddb.js +208 -0
- package/src/backends/ram.js +113 -0
- package/src/backends/schema.js +42 -0
- package/src/bullets/bulletIndex.js +125 -0
- package/src/bullets/compaction.js +109 -0
- package/src/bullets/index.js +16 -0
- package/src/bullets/normalize.js +241 -0
- package/src/bullets/parser.js +199 -0
- package/src/bullets/scoring.js +53 -0
- package/src/cli/auth.js +323 -0
- package/src/cli/commands.js +411 -0
- package/src/cli/config.js +120 -0
- package/src/cli/diff.js +68 -0
- package/src/cli/help.js +84 -0
- package/src/cli/output.js +269 -0
- package/src/cli/spinner.js +54 -0
- package/src/cli.js +178 -0
- package/src/engine/compactor.js +247 -0
- package/src/engine/executors.js +152 -0
- package/src/engine/ingester.js +229 -0
- package/src/engine/retriever.js +414 -0
- package/src/engine/toolLoop.js +176 -0
- package/src/imports/chatgpt.js +160 -0
- package/src/imports/index.js +14 -0
- package/src/imports/markdown.js +104 -0
- package/src/imports/oaFastchat.js +124 -0
- package/src/index.js +199 -0
- package/src/llm/anthropic.js +264 -0
- package/src/llm/openai.js +179 -0
- package/src/prompt_sets/conversation/ingestion.js +51 -0
- package/src/prompt_sets/document/ingestion.js +43 -0
- package/src/prompt_sets/index.js +31 -0
- package/src/types.js +382 -0
- package/src/utils/portability.js +174 -0
- package/types/backends/BaseStorage.d.ts +42 -0
- package/types/backends/filesystem.d.ts +11 -0
- package/types/backends/indexeddb.d.ts +12 -0
- package/types/backends/ram.d.ts +8 -0
- package/types/backends/schema.d.ts +14 -0
- package/types/bullets/bulletIndex.d.ts +47 -0
- package/types/bullets/compaction.d.ts +10 -0
- package/types/bullets/index.d.ts +36 -0
- package/types/bullets/normalize.d.ts +95 -0
- package/types/bullets/parser.d.ts +31 -0
- package/types/bullets/scoring.d.ts +12 -0
- package/types/engine/compactor.d.ts +27 -0
- package/types/engine/executors.d.ts +46 -0
- package/types/engine/ingester.d.ts +29 -0
- package/types/engine/retriever.d.ts +50 -0
- package/types/engine/toolLoop.d.ts +9 -0
- package/types/imports/chatgpt.d.ts +14 -0
- package/types/imports/index.d.ts +3 -0
- package/types/imports/markdown.d.ts +31 -0
- package/types/imports/oaFastchat.d.ts +30 -0
- package/types/index.d.ts +21 -0
- package/types/llm/anthropic.d.ts +16 -0
- package/types/llm/openai.d.ts +16 -0
- package/types/prompt_sets/conversation/ingestion.d.ts +7 -0
- package/types/prompt_sets/document/ingestion.d.ts +7 -0
- package/types/prompt_sets/index.d.ts +11 -0
- package/types/types.d.ts +293 -0
- package/types/utils/portability.d.ts +33 -0
package/README.md
ADDED
|
@@ -0,0 +1,194 @@
|
|
|
1
|
+
# @openanonymity/nanomem
|
|
2
|
+
|
|
3
|
+
**Personal memory you own, in files you can actually read.**
|
|
4
|
+
|
|
5
|
+
`nanomem` turns chats, notes, and exports into a markdown memory system that an LLM can update and retrieve as facts evolve over time. The result stays inspectable, portable, and user-owned instead of disappearing into hidden vector state.
|
|
6
|
+
|
|
7
|
+
## Why it exists
|
|
8
|
+
|
|
9
|
+
`nanomem` is for building memory that can last beyond a single chat session, model, or tool.
|
|
10
|
+
|
|
11
|
+
It turns raw conversations, notes, and exports into a memory system that can:
|
|
12
|
+
|
|
13
|
+
- compress repeated interactions into stable knowledge
|
|
14
|
+
- keep up with changing facts over time
|
|
15
|
+
- preserve history without cluttering current context
|
|
16
|
+
- stay inspectable and user-controlled
|
|
17
|
+
|
|
18
|
+
Retrieval is only one part of memory. `nanomem` is built for the maintenance layer too: updating facts, resolving conflicts, and preserving history over time.
|
|
19
|
+
|
|
20
|
+
## Features
|
|
21
|
+
|
|
22
|
+
- **User-owned memory.** Keep memory in markdown files you can inspect, edit, version, and move across tools.
|
|
23
|
+
- **Evolving memory state.** Keep facts current as they change over time instead of treating memory as an append-only log.
|
|
24
|
+
- **Compaction and cleanup.** Collapse repeated signals into stable knowledge and move stale memory into history.
|
|
25
|
+
- **Conflict-aware updates.** Resolve outdated or contradictory facts using recency, source, and confidence.
|
|
26
|
+
- **Import your existing history.** Start from ChatGPT exports, [OA Chat](https://chat.openanonymity.ai) exports, transcripts, message arrays, markdown notes, or whole markdown directories.
|
|
27
|
+
- **Flexible storage.** Run on local files, IndexedDB, in-memory storage, or a custom backend.
|
|
28
|
+
- **Built to plug in.** Use it from the CLI, as a library, or as a memory layer for other agents.
|
|
29
|
+
|
|
30
|
+
## Quick start
|
|
31
|
+
|
|
32
|
+
Install:
|
|
33
|
+
|
|
34
|
+
```bash
|
|
35
|
+
npm install -g @openanonymity/nanomem
|
|
36
|
+
```
|
|
37
|
+
|
|
38
|
+
Set up once:
|
|
39
|
+
|
|
40
|
+
```bash
|
|
41
|
+
nanomem login
|
|
42
|
+
```
|
|
43
|
+
|
|
44
|
+
This walks you through provider, model, API key, and where to store your memory. Config is saved to `~/.config/nanomem/config.json`. Filesystem memory lives in `~/nanomem/` by default.
|
|
45
|
+
|
|
46
|
+
Import history or notes:
|
|
47
|
+
|
|
48
|
+
```bash
|
|
49
|
+
nanomem import conversations.json
|
|
50
|
+
nanomem import my-notes.md
|
|
51
|
+
nanomem import ./notes/
|
|
52
|
+
```
|
|
53
|
+
|
|
54
|
+
Retrieve memory later:
|
|
55
|
+
|
|
56
|
+
```bash
|
|
57
|
+
nanomem retrieve "what are my hobbies?"
|
|
58
|
+
nanomem retrieve "what are my hobbies?" --render
|
|
59
|
+
```
|
|
60
|
+
|
|
61
|
+
Compact and clean up memory:
|
|
62
|
+
|
|
63
|
+
```bash
|
|
64
|
+
nanomem compact
|
|
65
|
+
```
|
|
66
|
+
|
|
67
|
+
Scripted setup also works:
|
|
68
|
+
|
|
69
|
+
```bash
|
|
70
|
+
nanomem login --provider openai --api-key sk-... --model gpt-5.4-mini
|
|
71
|
+
nanomem login --provider anthropic --api-key sk-ant-... --model claude-sonnet-4-6 --path ~/project/memory
|
|
72
|
+
```
|
|
73
|
+
|
|
74
|
+
Supported providers include OpenAI, Anthropic, Tinfoil, OpenRouter, and OpenAI-compatible endpoints via `--base-url`.
|
|
75
|
+
|
|
76
|
+
## How it works
|
|
77
|
+
|
|
78
|
+
```text
|
|
79
|
+
conversation / notes / exports
|
|
80
|
+
|
|
|
81
|
+
v
|
|
82
|
+
memory import / ingest
|
|
83
|
+
|
|
|
84
|
+
| LLM extraction with tool calls
|
|
85
|
+
| create / append / update / archive / delete
|
|
86
|
+
v
|
|
87
|
+
markdown memory filesystem
|
|
88
|
+
|
|
|
89
|
+
| memory retrieve
|
|
90
|
+
| file selection + bullet-level scoring
|
|
91
|
+
v
|
|
92
|
+
assembled memory context
|
|
93
|
+
|
|
|
94
|
+
v
|
|
95
|
+
memory compact
|
|
96
|
+
dedup + temporal cleanup + history preservation
|
|
97
|
+
```
|
|
98
|
+
|
|
99
|
+
The core engine has three parts:
|
|
100
|
+
|
|
101
|
+
- **Ingestion.** Extract durable facts from conversations or documents and organize them into topic files.
|
|
102
|
+
- **Retrieval.** Navigate the memory filesystem and assemble relevant context for a query.
|
|
103
|
+
- **Compaction.** Deduplicate repeated facts, keep current memory concise, and move stale or superseded facts into history.
|
|
104
|
+
|
|
105
|
+
## Memory format
|
|
106
|
+
|
|
107
|
+
Memory is stored as markdown with structured metadata:
|
|
108
|
+
|
|
109
|
+
```md
|
|
110
|
+
# Memory: Work
|
|
111
|
+
|
|
112
|
+
## Working
|
|
113
|
+
### Current context
|
|
114
|
+
- Preparing for a product launch next month | topic=work | tier=working | status=active | source=user_statement | confidence=high | updated_at=2026-04-07 | review_at=2026-04-20
|
|
115
|
+
|
|
116
|
+
## Long-Term
|
|
117
|
+
### Stable facts
|
|
118
|
+
- Leads the backend team at Acme | topic=work | tier=long_term | status=active | source=user_statement | confidence=high | updated_at=2026-04-07
|
|
119
|
+
|
|
120
|
+
## History
|
|
121
|
+
### No longer current
|
|
122
|
+
- Previously lived in New York | topic=personal | tier=history | status=superseded | source=user_statement | confidence=high | updated_at=2024-06-01
|
|
123
|
+
```
|
|
124
|
+
|
|
125
|
+
That structure is what lets the system do more than retrieval: it can keep track of source, confidence, recency, temporary context, and historical state.
|
|
126
|
+
|
|
127
|
+
## Using it in code
|
|
128
|
+
|
|
129
|
+
```js
|
|
130
|
+
import { createMemoryBank } from '@openanonymity/nanomem';
|
|
131
|
+
|
|
132
|
+
const memory = createMemoryBank({
|
|
133
|
+
llm: { apiKey: 'sk-...', model: 'gpt-5.4-mini' },
|
|
134
|
+
storage: 'filesystem',
|
|
135
|
+
storagePath: './memory'
|
|
136
|
+
});
|
|
137
|
+
|
|
138
|
+
await memory.init();
|
|
139
|
+
|
|
140
|
+
await memory.ingest([
|
|
141
|
+
{ role: 'user', content: 'I just moved to Seattle.' },
|
|
142
|
+
{ role: 'assistant', content: 'Noted.' }
|
|
143
|
+
]);
|
|
144
|
+
|
|
145
|
+
const result = await memory.retrieve('Where do I live now?');
|
|
146
|
+
await memory.compact();
|
|
147
|
+
```
|
|
148
|
+
|
|
149
|
+
## Common commands
|
|
150
|
+
|
|
151
|
+
```bash
|
|
152
|
+
nanomem import <file|dir|->
|
|
153
|
+
nanomem retrieve <query> [--context <file>]
|
|
154
|
+
nanomem tree
|
|
155
|
+
nanomem compact
|
|
156
|
+
nanomem export --format zip
|
|
157
|
+
nanomem status
|
|
158
|
+
```
|
|
159
|
+
|
|
160
|
+
For terminal use, `--render` will format markdown-heavy output like `read` and `retrieve` into a more readable ANSI-rendered view while leaving `--json` and piped output unchanged.
|
|
161
|
+
|
|
162
|
+
## Import formats
|
|
163
|
+
|
|
164
|
+
`nanomem import` supports:
|
|
165
|
+
|
|
166
|
+
- ChatGPT exports
|
|
167
|
+
- [OA Chat](https://chat.openanonymity.ai) exports
|
|
168
|
+
- markdown notes
|
|
169
|
+
- recursive markdown directory imports
|
|
170
|
+
- JSON message arrays
|
|
171
|
+
- plain text `User:` / `Assistant:` transcripts
|
|
172
|
+
|
|
173
|
+
Import can operate in both conversation-oriented and document-oriented modes, depending on the source or explicit flags.
|
|
174
|
+
|
|
175
|
+
```bash
|
|
176
|
+
nanomem import conversations.json # conversation mode
|
|
177
|
+
nanomem import ./notes/ # document mode (auto for directories)
|
|
178
|
+
nanomem import my-notes.md --format markdown # document mode (explicit)
|
|
179
|
+
```
|
|
180
|
+
|
|
181
|
+
## Storage backends
|
|
182
|
+
|
|
183
|
+
- `filesystem` for local markdown folders
|
|
184
|
+
- `indexeddb` for browser storage
|
|
185
|
+
- `ram` for testing or ephemeral usage
|
|
186
|
+
- custom backend objects for your own storage layer
|
|
187
|
+
|
|
188
|
+
## Learn more
|
|
189
|
+
|
|
190
|
+
Internals: [docs/memory-system.md](./docs/memory-system.md)
|
|
191
|
+
|
|
192
|
+
## License
|
|
193
|
+
|
|
194
|
+
MIT
|
package/package.json
ADDED
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@openanonymity/nanomem",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "LLM-driven personal memory with agentic retrieval, extraction, and compaction",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"bin": {
|
|
7
|
+
"nanomem": "src/cli.js"
|
|
8
|
+
},
|
|
9
|
+
"engines": {
|
|
10
|
+
"node": ">=18"
|
|
11
|
+
},
|
|
12
|
+
"repository": {
|
|
13
|
+
"type": "git",
|
|
14
|
+
"url": "git+https://github.com/openanonymity/nanomem.git"
|
|
15
|
+
},
|
|
16
|
+
"bugs": {
|
|
17
|
+
"url": "https://github.com/openanonymity/nanomem/issues"
|
|
18
|
+
},
|
|
19
|
+
"scripts": {
|
|
20
|
+
"prepublishOnly": "npm run build:types",
|
|
21
|
+
"build:types": "tsc",
|
|
22
|
+
"prepack": "tsc"
|
|
23
|
+
},
|
|
24
|
+
"exports": {
|
|
25
|
+
".": {
|
|
26
|
+
"types": "./types/index.d.ts",
|
|
27
|
+
"default": "./src/index.js"
|
|
28
|
+
},
|
|
29
|
+
"./bullets": {
|
|
30
|
+
"types": "./types/bullets/index.d.ts",
|
|
31
|
+
"default": "./src/bullets/index.js"
|
|
32
|
+
},
|
|
33
|
+
"./backends": {
|
|
34
|
+
"types": "./types/backends/BaseStorage.d.ts",
|
|
35
|
+
"default": "./src/backends/BaseStorage.js"
|
|
36
|
+
},
|
|
37
|
+
"./utils": {
|
|
38
|
+
"types": "./types/utils/portability.d.ts",
|
|
39
|
+
"default": "./src/utils/portability.js"
|
|
40
|
+
},
|
|
41
|
+
"./imports": {
|
|
42
|
+
"types": "./types/imports/index.d.ts",
|
|
43
|
+
"default": "./src/imports/index.js"
|
|
44
|
+
},
|
|
45
|
+
"./llm": {
|
|
46
|
+
"types": "./types/llm/openai.d.ts",
|
|
47
|
+
"default": "./src/llm/openai.js"
|
|
48
|
+
},
|
|
49
|
+
"./llm/openai": {
|
|
50
|
+
"types": "./types/llm/openai.d.ts",
|
|
51
|
+
"default": "./src/llm/openai.js"
|
|
52
|
+
},
|
|
53
|
+
"./llm/anthropic": {
|
|
54
|
+
"types": "./types/llm/anthropic.d.ts",
|
|
55
|
+
"default": "./src/llm/anthropic.js"
|
|
56
|
+
}
|
|
57
|
+
},
|
|
58
|
+
"types": "./types/index.d.ts",
|
|
59
|
+
"main": "./src/index.js",
|
|
60
|
+
"files": [
|
|
61
|
+
"src/",
|
|
62
|
+
"types/"
|
|
63
|
+
],
|
|
64
|
+
"keywords": [
|
|
65
|
+
"memory",
|
|
66
|
+
"llm",
|
|
67
|
+
"ai",
|
|
68
|
+
"personal-memory",
|
|
69
|
+
"retrieval",
|
|
70
|
+
"extraction"
|
|
71
|
+
],
|
|
72
|
+
"license": "MIT",
|
|
73
|
+
"devDependencies": {
|
|
74
|
+
"@types/node": "^25.5.2",
|
|
75
|
+
"typescript": "^6.0.2"
|
|
76
|
+
},
|
|
77
|
+
"dependencies": {
|
|
78
|
+
"@pierre/diffs": "^1.1.12"
|
|
79
|
+
},
|
|
80
|
+
"directories": {
|
|
81
|
+
"doc": "docs"
|
|
82
|
+
},
|
|
83
|
+
"author": "openanonymity",
|
|
84
|
+
"homepage": "https://github.com/openanonymity/nanomem#readme"
|
|
85
|
+
}
|
|
@@ -0,0 +1,177 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* BaseStorage — Abstract storage backend interface.
|
|
3
|
+
*
|
|
4
|
+
* Subclasses must implement:
|
|
5
|
+
* init() → void
|
|
6
|
+
* _readRaw(path) → string|null
|
|
7
|
+
* _writeRaw(path, content, meta) → void
|
|
8
|
+
* delete(path) → void
|
|
9
|
+
* exists(path) → boolean
|
|
10
|
+
* rebuildTree() → void (regenerate _tree.md)
|
|
11
|
+
* exportAll() → [{path, content, ...}]
|
|
12
|
+
*
|
|
13
|
+
* BaseStorage provides (NOT abstract — do not override):
|
|
14
|
+
* read(path) → _readRaw
|
|
15
|
+
* write(path, content) → metadata generation + _writeRaw + rebuildTree
|
|
16
|
+
*
|
|
17
|
+
* BaseStorage also provides default implementations for:
|
|
18
|
+
* search(query) → [{path, snippet}]
|
|
19
|
+
* ls(dirPath) → {files: string[], dirs: string[]}
|
|
20
|
+
* getTree() → string
|
|
21
|
+
*/
|
|
22
|
+
/** @import { ExportRecord, ListResult, SearchResult, StorageMetadata } from '../types.js' */
|
|
23
|
+
import { parseBullets, extractTitles, countBullets } from '../bullets/index.js';
|
|
24
|
+
|
|
25
|
+
export class BaseStorage {
|
|
26
|
+
|
|
27
|
+
// ─── Abstract (backends must implement) ─────────────────────
|
|
28
|
+
|
|
29
|
+
/** @returns {Promise<void>} */ async init() { throw new Error('BaseStorage.init() not implemented'); }
|
|
30
|
+
/** @returns {Promise<string | null>} */ async _readRaw(_path) { throw new Error('BaseStorage._readRaw() not implemented'); }
|
|
31
|
+
async _writeRaw(_path, _content, _meta) { throw new Error('BaseStorage._writeRaw() not implemented'); }
|
|
32
|
+
/** @param {string} _path @returns {Promise<void>} */ async delete(_path) { throw new Error('BaseStorage.delete() not implemented'); }
|
|
33
|
+
/** @param {string} _path @returns {Promise<boolean>} */ async exists(_path) { throw new Error('BaseStorage.exists() not implemented'); }
|
|
34
|
+
/** @returns {Promise<void>} */ async rebuildTree() { throw new Error('BaseStorage.rebuildTree() not implemented'); }
|
|
35
|
+
/** @returns {Promise<ExportRecord[]>} */ async exportAll() { throw new Error('BaseStorage.exportAll() not implemented'); }
|
|
36
|
+
/** @returns {Promise<void>} */ async clear() { throw new Error('BaseStorage.clear() not implemented'); }
|
|
37
|
+
|
|
38
|
+
// ─── Provided: read/write ───────────────────────────────────
|
|
39
|
+
|
|
40
|
+
/**
|
|
41
|
+
* @param {string} path
|
|
42
|
+
* @returns {Promise<string | null>}
|
|
43
|
+
*/
|
|
44
|
+
async read(path) {
|
|
45
|
+
return this._readRaw(path);
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
/**
|
|
49
|
+
* @param {string} path
|
|
50
|
+
* @param {string} content
|
|
51
|
+
* @returns {Promise<void>}
|
|
52
|
+
*/
|
|
53
|
+
async write(path, content) {
|
|
54
|
+
if (this._isInternalPath(path)) {
|
|
55
|
+
await this._writeRaw(path, String(content || ''), {});
|
|
56
|
+
return;
|
|
57
|
+
}
|
|
58
|
+
const str = String(content || '');
|
|
59
|
+
const meta = {
|
|
60
|
+
oneLiner: this._generateOneLiner(str),
|
|
61
|
+
itemCount: countBullets(str),
|
|
62
|
+
titles: extractTitles(str),
|
|
63
|
+
};
|
|
64
|
+
await this._writeRaw(path, str, meta);
|
|
65
|
+
await this.rebuildTree();
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
// ─── Shared: getTree, search, ls ───────────────────────────
|
|
69
|
+
|
|
70
|
+
/** @returns {Promise<string | null>} */
|
|
71
|
+
async getTree() {
|
|
72
|
+
return this.read('_tree.md');
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
/**
|
|
76
|
+
* @param {string} query
|
|
77
|
+
* @returns {Promise<SearchResult[]>}
|
|
78
|
+
*/
|
|
79
|
+
async search(query) {
|
|
80
|
+
if (!query?.trim()) return [];
|
|
81
|
+
const lowerQuery = query.toLowerCase();
|
|
82
|
+
const all = await this.exportAll();
|
|
83
|
+
const results = [];
|
|
84
|
+
|
|
85
|
+
for (const rec of all) {
|
|
86
|
+
if (this._isInternalPath(rec.path)) continue;
|
|
87
|
+
const content = rec.content || '';
|
|
88
|
+
if (!content.toLowerCase().includes(lowerQuery)) continue;
|
|
89
|
+
|
|
90
|
+
// Return all lines that contain the query
|
|
91
|
+
const matchingLines = content.split('\n')
|
|
92
|
+
.filter(line => line.toLowerCase().includes(lowerQuery))
|
|
93
|
+
.map(line => line.trim())
|
|
94
|
+
.filter(Boolean);
|
|
95
|
+
|
|
96
|
+
results.push({
|
|
97
|
+
path: rec.path,
|
|
98
|
+
lines: matchingLines,
|
|
99
|
+
});
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
return results;
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
/**
|
|
106
|
+
* @param {string} [dirPath]
|
|
107
|
+
* @returns {Promise<ListResult>}
|
|
108
|
+
*/
|
|
109
|
+
async ls(dirPath) {
|
|
110
|
+
const allPaths = await this._listAllPaths();
|
|
111
|
+
const prefix = dirPath ? dirPath + '/' : '';
|
|
112
|
+
const files = [];
|
|
113
|
+
const dirSet = new Set();
|
|
114
|
+
|
|
115
|
+
for (const filePath of allPaths) {
|
|
116
|
+
if (this._isInternalPath(filePath)) continue;
|
|
117
|
+
if (prefix && !filePath.startsWith(prefix)) continue;
|
|
118
|
+
const relative = filePath.slice(prefix.length);
|
|
119
|
+
if (!relative.includes('/')) {
|
|
120
|
+
files.push(filePath);
|
|
121
|
+
} else {
|
|
122
|
+
dirSet.add(relative.split('/')[0]);
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
return { files, dirs: [...dirSet] };
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
// ─── Shared helpers ──────────────────────────────────────────
|
|
130
|
+
|
|
131
|
+
_isInternalPath(path) {
|
|
132
|
+
return path === '_tree.md';
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
/** Override for efficient path listing. Default uses exportAll(). */
|
|
136
|
+
async _listAllPaths() {
|
|
137
|
+
const all = await this.exportAll();
|
|
138
|
+
return all.map(r => r.path);
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
_parentPath(filePath) {
|
|
142
|
+
const lastSlash = filePath.lastIndexOf('/');
|
|
143
|
+
return lastSlash === -1 ? '' : filePath.slice(0, lastSlash);
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
/** Generate a one-line summary of file content for the index. */
|
|
147
|
+
_generateOneLiner(content) {
|
|
148
|
+
if (!content) return '';
|
|
149
|
+
|
|
150
|
+
const bullets = parseBullets(content);
|
|
151
|
+
if (bullets.length > 0) {
|
|
152
|
+
const factTexts = bullets
|
|
153
|
+
.filter(b => b.section !== 'archive')
|
|
154
|
+
.slice(0, 4)
|
|
155
|
+
.map(b => b.text.trim())
|
|
156
|
+
.filter(Boolean);
|
|
157
|
+
if (factTexts.length > 0) {
|
|
158
|
+
const joined = factTexts.join('; ');
|
|
159
|
+
return joined.length > 120 ? joined.slice(0, 117) + '...' : joined;
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
const titles = extractTitles(content);
|
|
164
|
+
if (titles.length > 0) {
|
|
165
|
+
const joined = titles.join('; ');
|
|
166
|
+
return joined.length > 120 ? joined.slice(0, 117) + '...' : joined;
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
const lines = content.split('\n');
|
|
170
|
+
for (const line of lines) {
|
|
171
|
+
const trimmed = line.trim();
|
|
172
|
+
if (!trimmed || trimmed.startsWith('#')) continue;
|
|
173
|
+
return trimmed.length > 120 ? trimmed.slice(0, 117) + '...' : trimmed;
|
|
174
|
+
}
|
|
175
|
+
return content.slice(0, 120);
|
|
176
|
+
}
|
|
177
|
+
}
|
|
@@ -0,0 +1,177 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* FileSystemStorage — Node.js filesystem storage backend.
|
|
3
|
+
*
|
|
4
|
+
* Stores memory files as .md files on disk. Uses fs/promises (Node 18+).
|
|
5
|
+
*/
|
|
6
|
+
/** @import { ExportRecord, StorageMetadata } from '../types.js' */
|
|
7
|
+
import { readdir, readFile, writeFile, unlink, mkdir, rm, stat } from 'node:fs/promises';
|
|
8
|
+
import { join, dirname } from 'node:path';
|
|
9
|
+
import { BaseStorage } from './BaseStorage.js';
|
|
10
|
+
import { countBullets } from '../bullets/index.js';
|
|
11
|
+
import { buildTree, createBootstrapRecords } from './schema.js';
|
|
12
|
+
|
|
13
|
+
class FileSystemStorage extends BaseStorage {
|
|
14
|
+
constructor(rootDir) {
|
|
15
|
+
super();
|
|
16
|
+
if (!rootDir) throw new Error('FileSystemStorage requires a rootDir');
|
|
17
|
+
this._root = rootDir;
|
|
18
|
+
this._initialized = false;
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
async init() {
|
|
22
|
+
if (this._initialized) return;
|
|
23
|
+
this._initialized = true;
|
|
24
|
+
|
|
25
|
+
await mkdir(this._root, { recursive: true });
|
|
26
|
+
|
|
27
|
+
const entries = await this._walkFiles();
|
|
28
|
+
if (entries.length === 0) {
|
|
29
|
+
const seeds = createBootstrapRecords(Date.now());
|
|
30
|
+
for (const seed of seeds) {
|
|
31
|
+
await this._writeRaw(seed.path, seed.content || '');
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
async _readRaw(path) {
|
|
38
|
+
await this.init();
|
|
39
|
+
try {
|
|
40
|
+
return await readFile(this._resolve(path), 'utf-8');
|
|
41
|
+
} catch (err) {
|
|
42
|
+
if (err && typeof err === 'object' && 'code' in err && err.code === 'ENOENT') return null;
|
|
43
|
+
throw err;
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
async _writeRaw(path, content, _meta = {}) {
|
|
48
|
+
const fullPath = this._resolve(path);
|
|
49
|
+
await mkdir(dirname(fullPath), { recursive: true });
|
|
50
|
+
await writeFile(fullPath, String(content || ''), 'utf-8');
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
/**
|
|
54
|
+
* @param {string} path
|
|
55
|
+
* @returns {Promise<void>}
|
|
56
|
+
*/
|
|
57
|
+
async delete(path) {
|
|
58
|
+
if (this._isInternalPath(path)) return;
|
|
59
|
+
await this.init();
|
|
60
|
+
try {
|
|
61
|
+
await unlink(this._resolve(path));
|
|
62
|
+
} catch (err) {
|
|
63
|
+
if (!(err && typeof err === 'object' && 'code' in err && err.code === 'ENOENT')) throw err;
|
|
64
|
+
}
|
|
65
|
+
await this.rebuildTree();
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
async clear() {
|
|
69
|
+
await rm(this._root, { recursive: true, force: true });
|
|
70
|
+
this._initialized = false;
|
|
71
|
+
await this.init();
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
/**
|
|
75
|
+
* @param {string} path
|
|
76
|
+
* @returns {Promise<boolean>}
|
|
77
|
+
*/
|
|
78
|
+
async exists(path) {
|
|
79
|
+
await this.init();
|
|
80
|
+
try {
|
|
81
|
+
await stat(this._resolve(path));
|
|
82
|
+
return true;
|
|
83
|
+
} catch {
|
|
84
|
+
return false;
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
async rebuildTree() {
|
|
89
|
+
await this.init();
|
|
90
|
+
const allFiles = await this._walkFiles();
|
|
91
|
+
const files = allFiles.filter((f) => !this._isInternalPath(f)).sort();
|
|
92
|
+
const fileRecords = [];
|
|
93
|
+
|
|
94
|
+
for (const filePath of files) {
|
|
95
|
+
const content = await this.read(filePath);
|
|
96
|
+
let updatedAt = Date.now();
|
|
97
|
+
try {
|
|
98
|
+
const s = await stat(this._resolve(filePath));
|
|
99
|
+
updatedAt = s.mtimeMs;
|
|
100
|
+
} catch { /* skip */ }
|
|
101
|
+
fileRecords.push({
|
|
102
|
+
path: filePath,
|
|
103
|
+
itemCount: countBullets(content || ''),
|
|
104
|
+
oneLiner: this._generateOneLiner(content || ''),
|
|
105
|
+
updatedAt,
|
|
106
|
+
});
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
await this._writeRaw('_tree.md', buildTree(fileRecords));
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
/** @returns {Promise<ExportRecord[]>} */
|
|
113
|
+
async exportAll() {
|
|
114
|
+
await this.init();
|
|
115
|
+
const allFiles = await this._walkFiles();
|
|
116
|
+
const records = [];
|
|
117
|
+
|
|
118
|
+
for (const filePath of allFiles) {
|
|
119
|
+
const content = await this.read(filePath) || '';
|
|
120
|
+
let updatedAt = Date.now();
|
|
121
|
+
try {
|
|
122
|
+
const s = await stat(this._resolve(filePath));
|
|
123
|
+
updatedAt = s.mtimeMs;
|
|
124
|
+
} catch { /* skip */ }
|
|
125
|
+
records.push({
|
|
126
|
+
path: filePath,
|
|
127
|
+
content,
|
|
128
|
+
oneLiner: this._generateOneLiner(content),
|
|
129
|
+
itemCount: countBullets(content),
|
|
130
|
+
updatedAt,
|
|
131
|
+
});
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
return records;
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
async _listAllPaths() {
|
|
138
|
+
await this.init();
|
|
139
|
+
return (await this._walkFiles()).filter((p) => !this._isInternalPath(p));
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
// ─── Internal helpers ────────────────────────────────────────
|
|
143
|
+
|
|
144
|
+
_resolve(path) {
|
|
145
|
+
return join(this._root, path);
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
async _ensureDir(dirPath) {
|
|
149
|
+
await mkdir(join(this._root, dirPath), { recursive: true });
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
async _walkFiles(dir = '') {
|
|
153
|
+
const results = [];
|
|
154
|
+
const fullDir = dir ? join(this._root, dir) : this._root;
|
|
155
|
+
|
|
156
|
+
let entries;
|
|
157
|
+
try {
|
|
158
|
+
entries = await readdir(fullDir, { withFileTypes: true });
|
|
159
|
+
} catch {
|
|
160
|
+
return results;
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
for (const entry of entries) {
|
|
164
|
+
if (entry.name.startsWith('.')) continue;
|
|
165
|
+
const relPath = dir ? `${dir}/${entry.name}` : entry.name;
|
|
166
|
+
if (entry.isDirectory()) {
|
|
167
|
+
results.push(...await this._walkFiles(relPath));
|
|
168
|
+
} else if (entry.name.endsWith('.md')) {
|
|
169
|
+
results.push(relPath);
|
|
170
|
+
}
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
return results;
|
|
174
|
+
}
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
export { FileSystemStorage };
|