memdir 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (4) hide show
  1. package/README.md +162 -0
  2. package/index.d.ts +30 -0
  3. package/index.js +532 -0
  4. package/package.json +20 -0
package/README.md ADDED
@@ -0,0 +1,162 @@
1
+ # memdir
2
+
3
+ File-based memory for AI agents.
4
+
5
+ Markdown is the source of truth. Embeddings are just a cache for semantic search.
6
+
7
+ Fully local - no data leaves your machine, no dependencies, no API keys or subscriptions.
8
+
9
+ ## How it works
10
+
11
+ `memdir` keeps two local sources of truth: facts worth remembering in `memory.md`, and chat history in daily logs. On startup, it rebuilds an in-memory semantic index from those files using the embedding model.
12
+
13
+ Two storage files:
14
+
15
+ - `memory.md` long-term memory
16
+ - `YYYY-MM-DD.jsonl` past conversation logs
17
+
18
+ On startup, it rebuilds an in-memory semantic index from `memory.md` and recent log files.
19
+
20
+ ## Usage
21
+
22
+ ```js
23
+ import { Memory } from "memdir"
24
+ import { Ollama } from "ollama"
25
+ import { SOUL, AGENT } from "./systemPrompts.js"
26
+
27
+ const memory = new Memory()
28
+ const ollama = new Ollama()
29
+
30
+ const { memoryPrompt, tools } = await memory.init(async (text) => {
31
+ const result = await ollama.embeddings({ model: "nomic-embed-text", prompt: text })
32
+ return result.embedding
33
+ })
34
+
35
+ const systemPrompt = `
36
+ ${SOUL}
37
+
38
+ ${AGENT}
39
+
40
+ ${memoryPrompt}
41
+ `.trim()
42
+
43
+ let messages = [{ role: "system", content: systemPrompt }]
44
+ ```
45
+
46
+ `init()` returns two things to wire into your agent:
47
+
48
+ - **`memoryPrompt`** — memory instructions and stored facts. Append to your own system prompt.
49
+ - **`tools`** — `memory_write` and `memory_search` tools. Pass these to your model.
50
+
51
+ Then after each turn:
52
+
53
+ ```js
54
+ messages = await memory.afterTurn(messages)
55
+ ```
56
+
57
+ ## API
58
+
59
+ ### `new Memory({ dir? })`
60
+
61
+ | Option | Default | Description |
62
+ | ------ | ------------ | ------------------------------------ |
63
+ | `dir` | `'./memory'` | Directory where all files are stored |
64
+
65
+ ### `await memory.init(embedding)`
66
+
67
+ Initialises the manager. Must be called once before anything else.
68
+
69
+ - **`embedding`** — `async (text) => number[]`
70
+
71
+ The embedding function must have this shape:
72
+
73
+ ```js
74
+ async function embed(text) {
75
+ return [
76
+ /* numbers */
77
+ ]
78
+ }
79
+ ```
80
+
81
+ Returns `{ memoryPrompt, tools }`.
82
+
83
+ ### `await memory.afterTurn(messages)`
84
+
85
+ Convenience helper for completed turns. It finds the latest user/assistant pair
86
+ already present in `messages`, appends it to today's log, then runs `maybeFlush()`
87
+ and returns the updated message array.
88
+
89
+ The latest completed user and assistant messages must already be in `messages`.
90
+
91
+ ### `await memory.reindex()`
92
+
93
+ Rebuilds the in-memory index from `memory.md` and recent log files. Runs automatically on `init()`. Call it manually if you edit memory files outside the library.
94
+
95
+ ## Tools
96
+
97
+ The agent gets two tools automatically:
98
+
99
+ **`memory_write`** — saves a fact to `memory.md` that should persist across future conversations.
100
+
101
+ **`memory_search`** — searches past conversations and stored facts by semantic similarity. Use when the current message likely depends on prior context.
102
+
103
+ Your app should assemble the final system prompt itself. A good pattern is:
104
+
105
+ ```js
106
+ import { SOUL, AGENT } from "./systemPrompts.js"
107
+
108
+ const systemPrompt = `
109
+ ${SOUL}
110
+
111
+ ${AGENT}
112
+
113
+ ${memoryPrompt}
114
+ `.trim()
115
+ ```
116
+
117
+ ## Embeddings
118
+
119
+ The library does not create embeddings for you. You should pass a function that
120
+ turns a single string into an embedding vector.
121
+
122
+ Ollama example:
123
+
124
+ ```js
125
+ import { Ollama } from "ollama"
126
+ import { Memory } from "memdir"
127
+ import agentPrompt from "./agentPrompt.js"
128
+
129
+ const memory = new Memory()
130
+ const ollama = new Ollama()
131
+
132
+ async function embed(text) {
133
+ const { embedding } = await ollama.embeddings({
134
+ model: "nomic-embed-text",
135
+ prompt: text,
136
+ })
137
+ return embedding
138
+ }
139
+
140
+ const { systemContent, tools } = await memory.init(agentPrompt, embed)
141
+ ```
142
+
143
+ node-llama-cpp example:
144
+
145
+ ```js
146
+ import { getLlama } from "node-llama-cpp"
147
+ import { Memory } from "memdir"
148
+ import agentPrompt from "./agentPrompt.js"
149
+
150
+ const memory = new Memory()
151
+
152
+ const llama = await getLlama()
153
+ const model = await llama.loadModel({ modelPath: "./models/bge-small-en-v1.5-q8_0.gguf" })
154
+ const context = await model.createEmbeddingContext()
155
+
156
+ async function embed(text) {
157
+ const embedding = await context.getEmbeddingFor(text)
158
+ return embedding.vector
159
+ }
160
+
161
+ const { systemContent, tools } = await memory.init(agentPrompt, embed)
162
+ ```
package/index.d.ts ADDED
@@ -0,0 +1,30 @@
1
+ export type EmbedFn = (text: string) => Promise<number[]>
2
+
3
+ export type Message = {
4
+ role: string
5
+ content: string
6
+ }
7
+
8
+ export type InitResult = {
9
+ memoryPrompt: string
10
+ tools: object[]
11
+ }
12
+
13
+ export type FlushOptions = {
14
+ charThreshold?: number
15
+ maxHistory?: number
16
+ basePrompt?: string
17
+ }
18
+
19
+ export type MemoryOptions = {
20
+ dir?: string
21
+ }
22
+
23
+ export class Memory {
24
+ constructor(opts?: MemoryOptions)
25
+ init(embedFn: EmbedFn): Promise<InitResult>
26
+ appendLog(userContent: string, assistantContent: string): Promise<void>
27
+ afterTurn(messages: Message[]): Promise<Message[]>
28
+ reindex(): Promise<void>
29
+ maybeFlush(messages: Message[], opts?: FlushOptions): Promise<Message[] | null>
30
+ }
package/index.js ADDED
@@ -0,0 +1,532 @@
1
+ /**
2
+ * memdir — agent memory management
3
+ *
4
+ * Directory layout:
5
+ * memory/
6
+ * memory.md — long-term facts (human-readable markdown bullets)
7
+ * YYYY-MM-DD.jsonl — daily conversation logs (one JSON entry per line)
8
+ *
9
+ * Usage:
10
+ * const memory = new Memory()
11
+ * const { systemContent, tools } = await memory.init(agentPrompt, async (text) => await embed(text))
12
+ *
13
+ * messages = await memory.afterTurn(messages)
14
+ *
15
+ * @typedef {(text: string) => Promise<number[]>} EmbedFn
16
+ * @typedef {(texts: string[]) => Promise<number[][]>} BatchEmbedFn
17
+ * @typedef {{ role: string, content: string }} Message
18
+ * @typedef {{ id: string, text: string, date?: string, source: 'log'|'memory', embedding?: number[] }} IndexEntry
19
+ * @typedef {{ systemContent: string, tools: object[] }} InitResult
20
+ */
21
+
22
+ import fs from "fs"
23
+ import path from "path"
24
+ import { createHash } from "crypto"
25
+
26
+ // ---------------------------------------------------------------------------
27
+ // Constants
28
+ // ---------------------------------------------------------------------------
29
+
30
+ const FLUSH_CHAR_THRESHOLD = 24_000
31
+ const FLUSH_MAX_HISTORY = 50
32
+ const LOG_LOOKBACK_DAYS = 30
33
+
34
+ // ---------------------------------------------------------------------------
35
+ // WriteQueue — serialises async mutations on a single promise chain
36
+ // ---------------------------------------------------------------------------
37
+
38
+ class WriteQueue {
39
+ #tail = Promise.resolve()
40
+
41
+ /** @param {() => Promise<void>} fn */
42
+ run(fn) {
43
+ const op = this.#tail.then(fn)
44
+ this.#tail = op.catch((err) => {
45
+ console.error("[WriteQueue]", err.message)
46
+ })
47
+ return op
48
+ }
49
+ }
50
+
51
+ // ---------------------------------------------------------------------------
52
+ // Pure helpers
53
+ // ---------------------------------------------------------------------------
54
+
55
+ /** @param {string} text @returns {string} */
56
+ const sha256 = (text) => createHash("sha256").update(text).digest("hex")
57
+
58
+ /**
59
+ * @param {number[]} a
60
+ * @param {number[]} b
61
+ * @returns {number}
62
+ */
63
+ function cosineSim(a, b) {
64
+ let dot = 0,
65
+ ma = 0,
66
+ mb = 0
67
+ for (let i = 0; i < a.length; i++) {
68
+ dot += a[i] * b[i]
69
+ ma += a[i] * a[i]
70
+ mb += b[i] * b[i]
71
+ }
72
+ return ma === 0 || mb === 0 ? 0 : dot / (Math.sqrt(ma) * Math.sqrt(mb))
73
+ }
74
+
75
+ /**
76
+ * Atomic write: write to .tmp then rename into place.
77
+ * Falls back gracefully on Windows cross-device rename (EXDEV).
78
+ * @param {string} filePath
79
+ * @param {string} content
80
+ */
81
+ async function atomicWrite(filePath, content) {
82
+ const tmp = `${filePath}.tmp`
83
+ await fs.promises.writeFile(tmp, content, "utf-8")
84
+ try {
85
+ await fs.promises.rename(tmp, filePath)
86
+ } catch (err) {
87
+ if (err.code !== "EXDEV") throw err
88
+ await fs.promises.writeFile(filePath, content, "utf-8")
89
+ await fs.promises.unlink(tmp).catch(() => {})
90
+ }
91
+ }
92
+
93
+ /** @param {string} filePath @returns {Promise<string>} */
94
+ async function readSafe(filePath) {
95
+ try {
96
+ return await fs.promises.readFile(filePath, "utf-8")
97
+ } catch (err) {
98
+ if (err.code === "ENOENT") return ""
99
+ throw err
100
+ }
101
+ }
102
+
103
+ /**
104
+ * Parse a JSONL string, skipping malformed lines with a warning.
105
+ * @template T
106
+ * @param {string} text
107
+ * @returns {T[]}
108
+ */
109
+ function parseJsonl(text) {
110
+ return text
111
+ .split("\n")
112
+ .filter(Boolean)
113
+ .flatMap((line) => {
114
+ try {
115
+ return [JSON.parse(line)]
116
+ } catch {
117
+ console.warn("[memory] skipping malformed line:", line.slice(0, 80))
118
+ return []
119
+ }
120
+ })
121
+ }
122
+
123
+ /** @param {number} offsetDays @returns {string} YYYY-MM-DD */
124
+ function dateStr(offsetDays = 0) {
125
+ const d = new Date()
126
+ d.setDate(d.getDate() + offsetDays)
127
+ return d.toISOString().slice(0, 10)
128
+ }
129
+
130
+ /** @param {unknown} embedding @returns {number[]} */
131
+ function assertEmbedding(embedding) {
132
+ if (!Array.isArray(embedding)) {
133
+ throw new Error("embedding function must return a number[]")
134
+ }
135
+ return embedding
136
+ }
137
+
138
+ /** @param {EmbedFn} embed @returns {BatchEmbedFn} */
139
+ function resolveEmbed(embed) {
140
+ if (typeof embed !== "function") {
141
+ throw new TypeError("init() requires an embedFn: async (text) => number[]")
142
+ }
143
+ return async (texts) => Promise.all(texts.map(async (text) => assertEmbedding(await embed(text))))
144
+ }
145
+
146
+ // ---------------------------------------------------------------------------
147
+ // Memory
148
+ // ---------------------------------------------------------------------------
149
+
150
+ export class Memory {
151
+ #dir
152
+ #memoryFile // <dir>/memory.md
153
+
154
+ /** @type {BatchEmbedFn|null} */
155
+ #embed = null
156
+ /** @type {IndexEntry[]} */
157
+ #index = []
158
+
159
+ #queue = new WriteQueue()
160
+ #indexQueue = new WriteQueue()
161
+
162
+ /** @param {{ dir?: string }} opts */
163
+ constructor({ dir = "./memory" } = {}) {
164
+ this.#dir = path.resolve(dir)
165
+ this.#memoryFile = path.join(this.#dir, "memory.md")
166
+ }
167
+
168
+ // -------------------------------------------------------------------------
169
+ // Public API
170
+ // -------------------------------------------------------------------------
171
+
172
+ /**
173
+ * Initialise the manager. Must be called once before anything else.
174
+ *
175
+ * @param {EmbedFn} embedFn
176
+ * @returns {Promise<{ memoryPrompt: string, tools: object[] }>}
177
+ */
178
+ async init(embedFn) {
179
+ this.#embed = resolveEmbed(embedFn)
180
+
181
+ await fs.promises.mkdir(this.#dir, { recursive: true })
182
+ await this.reindex()
183
+
184
+ const memory = await this.#readMemory()
185
+ return {
186
+ memoryPrompt: this.#buildSystemContent(memory),
187
+ tools: this.#buildTools(),
188
+ }
189
+ }
190
+
191
+ /**
192
+ * Append one exchange to today's JSONL log.
193
+ * Write is serialised through the queue, then the in-memory index is updated.
194
+ * Throws on write or indexing failure — callers should handle.
195
+ *
196
+ * @param {string} userContent
197
+ * @param {string} assistantContent
198
+ */
199
+ async appendLog(userContent, assistantContent) {
200
+ this.#assertReady()
201
+
202
+ const entry = {
203
+ ts: new Date().toISOString(),
204
+ user: userContent,
205
+ assistant: assistantContent,
206
+ }
207
+ const file = path.join(this.#dir, `${dateStr()}.jsonl`)
208
+
209
+ await this.#queue.run(() => fs.promises.appendFile(file, JSON.stringify(entry) + "\n", "utf-8"))
210
+
211
+ await this.#indexText(this.#logEntryToText(entry), "log", dateStr())
212
+ }
213
+
214
+ /**
215
+ * Convenience helper for completed turns. Logs the latest user/assistant pair
216
+ * already present in messages, then flushes if needed.
217
+ *
218
+ * @param {Message[]} messages
219
+ * @returns {Promise<Message[]>}
220
+ */
221
+ async afterTurn(messages) {
222
+ this.#assertReady()
223
+
224
+ const exchange = this.#latestExchange(messages)
225
+ if (exchange) {
226
+ await this.appendLog(exchange.user, exchange.assistant)
227
+ }
228
+
229
+ return (await this.maybeFlush(messages)) ?? messages
230
+ }
231
+
232
+ /**
233
+ * Rebuild the in-memory index from memory.md and recent log files.
234
+ */
235
+ async reindex() {
236
+ this.#assertReady()
237
+
238
+ const [logChunks, memoryChunks] = await Promise.all([this.#collectLogChunks(), this.#collectMemoryChunks()])
239
+
240
+ const unique = [...new Map([...memoryChunks, ...logChunks].map((entry) => [entry.id, entry])).values()]
241
+
242
+ await this.#indexQueue.run(async () => {
243
+ if (unique.length === 0) {
244
+ this.#index = []
245
+ return
246
+ }
247
+
248
+ const embeddings = await this.#embed(unique.map((entry) => entry.text))
249
+ this.#index = unique.map((entry, i) => ({
250
+ ...entry,
251
+ embedding: embeddings[i],
252
+ }))
253
+ })
254
+ }
255
+
256
+ /**
257
+ * Trim the conversation if it has grown past the char threshold, refreshing
258
+ * the system message with the latest memory.
259
+ *
260
+ * Returns a new array when trimmed, null when no action was needed.
261
+ * Usage: messages = await mm.maybeFlush(messages, { basePrompt }) ?? messages
262
+ *
263
+ * @param {Message[]} messages
264
+ * @param {{ charThreshold?: number, maxHistory?: number, basePrompt?: string }} opts
265
+ * @returns {Promise<Message[] | null>}
266
+ */
267
+ async maybeFlush(messages, { charThreshold = FLUSH_CHAR_THRESHOLD, maxHistory = FLUSH_MAX_HISTORY, basePrompt = '' } = {}) {
268
+ this.#assertReady()
269
+
270
+ if (messages.length === 0) return null
271
+
272
+ const totalChars = messages.reduce((n, m) => n + (m.content?.length ?? 0), 0)
273
+ if (totalChars < charThreshold) return null
274
+
275
+ const memory = await this.#readMemory()
276
+ const memoryPrompt = this.#buildSystemContent(memory)
277
+ const systemContent = [basePrompt, memoryPrompt].filter(Boolean).join('\n\n')
278
+ const system = { ...messages[0], content: systemContent }
279
+ const rest = messages.slice(1)
280
+ const tail = maxHistory > 1 ? rest.slice(-(maxHistory - 1)) : []
281
+
282
+ return [system, ...tail]
283
+ }
284
+
285
+ // -------------------------------------------------------------------------
286
+ // Private: memory.md
287
+ // -------------------------------------------------------------------------
288
+
289
+ /** @returns {Promise<string>} */
290
+ #readMemory() {
291
+ return readSafe(this.#memoryFile)
292
+ }
293
+
294
+ /**
295
+ * Append a markdown bullet to memory.md and index it.
296
+ * Read-modify-write is safe because all writes go through the queue.
297
+ * @param {string} content
298
+ */
299
+ async #writeMemory(content) {
300
+ const bullet = content.trim().startsWith("-") ? content.trim() : `- ${content.trim()}`
301
+
302
+ await this.#queue.run(async () => {
303
+ const existing = await this.#readMemory()
304
+ const updated = existing ? `${existing.trimEnd()}\n${bullet}\n` : `${bullet}\n`
305
+ await atomicWrite(this.#memoryFile, updated)
306
+ })
307
+
308
+ await this.#indexText(bullet, "memory")
309
+ }
310
+
311
+ // -------------------------------------------------------------------------
312
+ // Private: log helpers
313
+ // -------------------------------------------------------------------------
314
+
315
+ /** @param {{ ts: string, user: string, assistant: string }} entry @returns {string} */
316
+ #logEntryToText({ ts, user, assistant }) {
317
+ return `[${ts}] user: ${user}\n[${ts}] assistant: ${assistant}`
318
+ }
319
+
320
+ /** @param {Message[]} messages @returns {{ user: string, assistant: string } | null} */
321
+ #latestExchange(messages) {
322
+ let assistant = null
323
+
324
+ for (let i = messages.length - 1; i >= 0; i--) {
325
+ const message = messages[i]
326
+
327
+ if (
328
+ assistant === null &&
329
+ message?.role === "assistant" &&
330
+ typeof message.content === "string" &&
331
+ message.content.trim()
332
+ ) {
333
+ assistant = message.content
334
+ continue
335
+ }
336
+
337
+ if (
338
+ assistant !== null &&
339
+ message?.role === "user" &&
340
+ typeof message.content === "string" &&
341
+ message.content.trim()
342
+ ) {
343
+ return { user: message.content, assistant }
344
+ }
345
+ }
346
+
347
+ return null
348
+ }
349
+
350
+ /** @returns {Promise<Array<{ text: string, id: string, date: string, source: 'log' }>>} */
351
+ async #collectLogChunks() {
352
+ let files
353
+ try {
354
+ files = await fs.promises.readdir(this.#dir)
355
+ } catch (err) {
356
+ if (err.code === "ENOENT") return []
357
+ throw err
358
+ }
359
+
360
+ const chunks = await Promise.all(
361
+ files
362
+ .filter((f) => /^\d{4}-\d{2}-\d{2}\.jsonl$/.test(f))
363
+ .filter((f) => f.replace(".jsonl", "") >= dateStr(-LOG_LOOKBACK_DAYS))
364
+ .map(async (file) => {
365
+ const date = file.replace(".jsonl", "")
366
+ const content = await readSafe(path.join(this.#dir, file))
367
+ return parseJsonl(content).flatMap((entry) => {
368
+ if (
369
+ typeof entry?.ts !== "string" ||
370
+ typeof entry?.user !== "string" ||
371
+ typeof entry?.assistant !== "string"
372
+ ) {
373
+ return []
374
+ }
375
+ const text = this.#logEntryToText(entry)
376
+ return [{ text, id: sha256(text), date, source: /** @type {'log'} */ ("log") }]
377
+ })
378
+ }),
379
+ )
380
+ return chunks.flat()
381
+ }
382
+
383
+ /** @returns {Promise<Array<{ text: string, id: string, source: 'memory' }>>} */
384
+ async #collectMemoryChunks() {
385
+ const content = await this.#readMemory()
386
+ return content
387
+ .split("\n")
388
+ .filter((l) => l.trim().startsWith("-"))
389
+ .map((bullet) => ({
390
+ text: bullet.trim(),
391
+ id: sha256(bullet.trim()),
392
+ source: /** @type {'memory'} */ ("memory"),
393
+ }))
394
+ }
395
+
396
+ /**
397
+ * Embed a single text and add it to the in-memory index if not already present.
398
+ * @param {string} text
399
+ * @param {'log'|'memory'} source
400
+ * @param {string=} date
401
+ */
402
+ async #indexText(text, source, date) {
403
+ await this.#indexQueue.run(async () => {
404
+ const id = sha256(text)
405
+ if (this.#index.some((entry) => entry.id === id)) return
406
+
407
+ const [embedding] = await this.#embed([text])
408
+ this.#index = [...this.#index, { id, text, date, source, embedding }]
409
+ })
410
+ }
411
+
412
+ // -------------------------------------------------------------------------
413
+ // Private: system prompt
414
+ // -------------------------------------------------------------------------
415
+
416
+ /** @param {string} memory @returns {string} */
417
+ #buildSystemContent(memory) {
418
+ const memorySection = memory
419
+ ? `## Profile Memory\n\n${memory}\n\nProfile memory contains only stable facts across conversations. Do not surface it unless directly relevant to the current reply.`
420
+ : null
421
+
422
+ const whenToAccess = [
423
+ "## When to access memories",
424
+ "- When memories seem relevant, or the user references prior-conversation work.",
425
+ "- You MUST use memory_search when the user explicitly asks you to check, recall, or remember.",
426
+ "- If the user says to ignore or not use memory: proceed as if memory were empty. Do not apply remembered facts, cite, compare against, or mention memory content.",
427
+ "- Memory records can become stale over time. Use memory as context for what was true at a given point in time. Before answering based solely on a memory, verify it is still correct. If a recalled memory conflicts with what you observe now, trust what you observe — and update or remove the stale memory.",
428
+ ].join("\n")
429
+
430
+ const beforeRecommending = [
431
+ "## Before recommending from memory",
432
+ "A memory that names a specific function, file, or flag is a claim that it existed when the memory was written. It may have been renamed, removed, or never merged. Before recommending it:",
433
+ "- If the user is about to act on your recommendation (not just asking about history), verify first.",
434
+ '"The memory says X exists" is not the same as "X exists now."',
435
+ ].join("\n")
436
+
437
+ const whenToSave = [
438
+ "## When to save memories",
439
+ "Save immediately when you learn something worth remembering — do not wait for the user to ask. Save when:",
440
+ "- You learn details about the user's role, preferences, responsibilities, or knowledge",
441
+ "- The user corrects your approach or confirms a non-obvious approach worked — include why, so edge cases can be judged later",
442
+ "- You learn about ongoing work, goals, or deadlines not derivable from the conversation",
443
+ "- The user explicitly asks you to remember something",
444
+ "Do not save:",
445
+ "- Ephemeral details: in-progress work, temporary state, or summaries of the current turn",
446
+ "- Guesses, assumptions, or one-off topics",
447
+ ].join("\n")
448
+
449
+ return [memorySection, whenToSave, whenToAccess, beforeRecommending]
450
+ .filter(Boolean)
451
+ .join("\n\n")
452
+ }
453
+
454
+ // -------------------------------------------------------------------------
455
+ // Private: tools
456
+ // -------------------------------------------------------------------------
457
+
458
+ /** @returns {object[]} */
459
+ #buildTools() {
460
+ return [
461
+ {
462
+ name: "memory_write",
463
+ description: "Save a fact about the user that is worth remembering.",
464
+ parameters: {
465
+ type: "object",
466
+ properties: {
467
+ content: {
468
+ type: "string",
469
+ description: "A single concise sentence. State only what was explicitly said — no inference, no editorializing. For behavioral guidance, add the reason after a dash: \"Prefers concise responses — finds long explanations condescending.\"",
470
+ },
471
+ },
472
+ required: ["content"],
473
+ },
474
+ function: async ({ content }) => {
475
+ await this.#writeMemory(content)
476
+ return "Memory saved."
477
+ },
478
+ },
479
+ {
480
+ name: "memory_search",
481
+ description:
482
+ "Search past conversations and stored facts by semantic similarity. " +
483
+ "Call this only when the current message likely depends on prior context — " +
484
+ "for example the user refers to a past conversation, an ongoing project, a saved " +
485
+ "preference, or an unresolved task. Do not call it for greetings, acknowledgements, " +
486
+ "or standalone factual questions.",
487
+ parameters: {
488
+ type: "object",
489
+ properties: {
490
+ query: {
491
+ type: "string",
492
+ description: "Natural language description of what you want to recall.",
493
+ },
494
+ k: {
495
+ type: "number",
496
+ description: "Number of results to return (default 5, max 20).",
497
+ },
498
+ },
499
+ required: ["query"],
500
+ },
501
+ function: async ({ query, k = 5 }) => {
502
+ if (this.#index.length === 0) return "No history indexed yet."
503
+
504
+ const [queryEmbedding] = await this.#embed([query])
505
+ const safeK = Math.min(Math.max(1, k), 20)
506
+
507
+ const results = this.#index
508
+ .filter((e) => e.embedding?.length)
509
+ .map((e) => ({ ...e, score: cosineSim(queryEmbedding, e.embedding) }))
510
+ .sort((a, b) => b.score - a.score)
511
+ .slice(0, safeK)
512
+
513
+ if (results.length === 0) return "No relevant history found."
514
+
515
+ return results
516
+ .map((e) => (e.date ? `[${e.source} / ${e.date}]\n${e.text}` : `[${e.source}]\n${e.text}`))
517
+ .join("\n\n---\n\n")
518
+ },
519
+ },
520
+ ]
521
+ }
522
+
523
+ // -------------------------------------------------------------------------
524
+ // Private: guard
525
+ // -------------------------------------------------------------------------
526
+
527
+ #assertReady() {
528
+ if (!this.#embed) {
529
+ throw new Error("Memory not initialised — call init() first")
530
+ }
531
+ }
532
+ }
package/package.json ADDED
@@ -0,0 +1,20 @@
1
+ {
2
+ "name": "memdir",
3
+ "version": "0.0.1",
4
+ "description": "File-based memory for AI agents.",
5
+ "main": "index.js",
6
+ "types": "index.d.ts",
7
+ "files": ["index.js", "index.d.ts"],
8
+ "repository": {
9
+ "type": "git",
10
+ "url": "git+ssh://git@github.com/artiebits/memdir.git"
11
+ },
12
+ "license": "ISC",
13
+ "type": "module",
14
+ "keywords": [
15
+ "agent-memory",
16
+ "ai-memory",
17
+ "semantic-search",
18
+ "llm"
19
+ ]
20
+ }