zouroboros-memory 3.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/dist/capture.d.ts +57 -0
- package/dist/capture.js +181 -0
- package/dist/cli.d.ts +2 -0
- package/dist/cli.js +91 -0
- package/dist/conflict-resolver.d.ts +55 -0
- package/dist/conflict-resolver.js +221 -0
- package/dist/context-budget.d.ts +94 -0
- package/dist/context-budget.js +272 -0
- package/dist/cross-persona.d.ts +31 -0
- package/dist/cross-persona.js +188 -0
- package/dist/database.d.ts +35 -0
- package/dist/database.js +189 -0
- package/dist/embedding-benchmark.d.ts +12 -0
- package/dist/embedding-benchmark.js +224 -0
- package/dist/embeddings.d.ts +79 -0
- package/dist/embeddings.js +233 -0
- package/dist/episode-summarizer.d.ts +51 -0
- package/dist/episode-summarizer.js +285 -0
- package/dist/episodes.d.ts +41 -0
- package/dist/episodes.js +141 -0
- package/dist/facts.d.ts +60 -0
- package/dist/facts.js +263 -0
- package/dist/graph-traversal.d.ts +38 -0
- package/dist/graph-traversal.js +297 -0
- package/dist/graph.d.ts +51 -0
- package/dist/graph.js +221 -0
- package/dist/import-pipeline.d.ts +17 -0
- package/dist/import-pipeline.js +324 -0
- package/dist/index.d.ts +55 -0
- package/dist/index.js +62 -0
- package/dist/mcp-server.d.ts +31 -0
- package/dist/mcp-server.js +285 -0
- package/dist/metrics.d.ts +63 -0
- package/dist/metrics.js +243 -0
- package/dist/multi-hop.d.ts +30 -0
- package/dist/multi-hop.js +238 -0
- package/dist/profiles.d.ts +51 -0
- package/dist/profiles.js +149 -0
- package/package.json +52 -0
|
@@ -0,0 +1,324 @@
|
|
|
1
|
+
#!/usr/bin/env bun
|
|
2
|
+
/**
|
|
3
|
+
* zo-memory-system Import Pipeline v1.0
|
|
4
|
+
*
|
|
5
|
+
* Import facts from external sources into the memory system.
|
|
6
|
+
*
|
|
7
|
+
* Supported sources:
|
|
8
|
+
* chatgpt — ChatGPT JSON export (conversations format)
|
|
9
|
+
* obsidian — Obsidian vault (markdown files with frontmatter)
|
|
10
|
+
* markdown — Generic markdown files
|
|
11
|
+
*
|
|
12
|
+
* Usage:
|
|
13
|
+
* bun import.ts --source chatgpt --path ~/export.json
|
|
14
|
+
* bun import.ts --source obsidian --path ~/Vault --dry-run
|
|
15
|
+
* bun import.ts --source markdown --path ~/notes/file.md
|
|
16
|
+
*/
|
|
17
|
+
import { Database } from "bun:sqlite";
|
|
18
|
+
import { randomUUID } from "crypto";
|
|
19
|
+
import { existsSync, readFileSync, readdirSync, statSync } from "fs";
|
|
20
|
+
import { join, basename, extname } from "path";
|
|
21
|
+
// --- Config ---
|
|
22
|
+
const DB_PATH = process.env.ZO_MEMORY_DB || "/home/workspace/.zo/memory/shared-facts.db";
|
|
23
|
+
const OLLAMA_URL = process.env.OLLAMA_URL || "http://localhost:11434";
|
|
24
|
+
const EMBEDDING_MODEL = process.env.ZO_EMBEDDING_MODEL || "nomic-embed-text";
|
|
25
|
+
// --- Embedding helper ---
|
|
26
|
+
async function getEmbedding(text) {
|
|
27
|
+
try {
|
|
28
|
+
const resp = await fetch(`${OLLAMA_URL}/api/embeddings`, {
|
|
29
|
+
method: "POST",
|
|
30
|
+
headers: { "Content-Type": "application/json" },
|
|
31
|
+
body: JSON.stringify({ model: EMBEDDING_MODEL, prompt: text }),
|
|
32
|
+
signal: AbortSignal.timeout(30000),
|
|
33
|
+
});
|
|
34
|
+
if (!resp.ok)
|
|
35
|
+
return null;
|
|
36
|
+
const data = await resp.json();
|
|
37
|
+
return data.embedding || null;
|
|
38
|
+
}
|
|
39
|
+
catch {
|
|
40
|
+
return null;
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
// --- Store fact ---
|
|
44
|
+
async function storeFact(db, fact) {
|
|
45
|
+
try {
|
|
46
|
+
const id = randomUUID();
|
|
47
|
+
const now = Date.now();
|
|
48
|
+
const nowSec = Math.floor(now / 1000);
|
|
49
|
+
const expiresAt = nowSec + 90 * 86400; // stable decay = 90 days
|
|
50
|
+
db.prepare(`
|
|
51
|
+
INSERT INTO facts (id, persona, entity, key, value, text, category, decay_class,
|
|
52
|
+
importance, source, created_at, expires_at, last_accessed, confidence)
|
|
53
|
+
VALUES (?, 'shared', ?, ?, ?, ?, ?, 'stable', 1.0, ?, ?, ?, ?, 1.0)
|
|
54
|
+
`).run(id, fact.entity, fact.key, fact.value, fact.text, fact.category, fact.source, now, expiresAt, nowSec);
|
|
55
|
+
// Generate embedding
|
|
56
|
+
const embedding = await getEmbedding(fact.text);
|
|
57
|
+
if (embedding) {
|
|
58
|
+
db.prepare("INSERT INTO fact_embeddings (fact_id, embedding, model) VALUES (?, ?, ?)")
|
|
59
|
+
.run(id, Buffer.from(new Float32Array(embedding).buffer), EMBEDDING_MODEL);
|
|
60
|
+
}
|
|
61
|
+
return true;
|
|
62
|
+
}
|
|
63
|
+
catch (e) {
|
|
64
|
+
return false;
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
// --- Check for duplicate ---
|
|
68
|
+
function isDuplicate(db, entity, key, value) {
|
|
69
|
+
const row = key
|
|
70
|
+
? db.prepare("SELECT id FROM facts WHERE entity = ? AND key = ? AND value = ?").get(entity, key, value)
|
|
71
|
+
: db.prepare("SELECT id FROM facts WHERE entity = ? AND value = ?").get(entity, value);
|
|
72
|
+
return !!row;
|
|
73
|
+
}
|
|
74
|
+
// ==========================================================================
|
|
75
|
+
// IMPORTERS
|
|
76
|
+
// ==========================================================================
|
|
77
|
+
/** Import from ChatGPT JSON export (conversations format) */
|
|
78
|
+
function parseChatGPTExport(filePath) {
|
|
79
|
+
const data = JSON.parse(readFileSync(filePath, "utf-8"));
|
|
80
|
+
const facts = [];
|
|
81
|
+
// Handle array of conversations
|
|
82
|
+
const conversations = Array.isArray(data) ? data : [data];
|
|
83
|
+
for (const convo of conversations) {
|
|
84
|
+
const title = convo.title || "untitled";
|
|
85
|
+
const mapping = convo.mapping || {};
|
|
86
|
+
// Extract assistant messages
|
|
87
|
+
for (const [, node] of Object.entries(mapping)) {
|
|
88
|
+
if (!node?.message)
|
|
89
|
+
continue;
|
|
90
|
+
const msg = node.message;
|
|
91
|
+
if (msg.author?.role !== "assistant")
|
|
92
|
+
continue;
|
|
93
|
+
const content = msg.content?.parts?.join("\n") || "";
|
|
94
|
+
if (content.length < 50)
|
|
95
|
+
continue; // skip short responses
|
|
96
|
+
// Extract key decisions/facts from assistant responses
|
|
97
|
+
const lines = content.split("\n").filter((l) => l.trim().length > 20);
|
|
98
|
+
// Take meaningful lines (headers, key points, conclusions)
|
|
99
|
+
const keyLines = lines.filter((l) => l.startsWith("##") || l.startsWith("- ") || l.startsWith("* ") ||
|
|
100
|
+
l.includes(":") || /^\d+\./.test(l.trim())).slice(0, 5);
|
|
101
|
+
if (keyLines.length > 0) {
|
|
102
|
+
facts.push({
|
|
103
|
+
entity: `chatgpt.${title.replace(/[^a-zA-Z0-9]/g, "-").slice(0, 40)}`,
|
|
104
|
+
key: "summary",
|
|
105
|
+
value: keyLines.join("; ").slice(0, 500),
|
|
106
|
+
text: `ChatGPT conversation "${title}": ${keyLines.join("; ").slice(0, 300)}`,
|
|
107
|
+
category: "reference",
|
|
108
|
+
source: `chatgpt:${basename(filePath)}`,
|
|
109
|
+
});
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
return facts;
|
|
114
|
+
}
|
|
115
|
+
/** Import from Obsidian vault (markdown files with YAML frontmatter) */
|
|
116
|
+
function parseObsidianVault(dirPath) {
|
|
117
|
+
const facts = [];
|
|
118
|
+
function walkDir(dir) {
|
|
119
|
+
for (const entry of readdirSync(dir)) {
|
|
120
|
+
const fullPath = join(dir, entry);
|
|
121
|
+
const stat = statSync(fullPath);
|
|
122
|
+
if (stat.isDirectory()) {
|
|
123
|
+
if (!entry.startsWith("."))
|
|
124
|
+
walkDir(fullPath);
|
|
125
|
+
continue;
|
|
126
|
+
}
|
|
127
|
+
if (extname(entry) !== ".md")
|
|
128
|
+
continue;
|
|
129
|
+
const content = readFileSync(fullPath, "utf-8");
|
|
130
|
+
const name = basename(entry, ".md");
|
|
131
|
+
// Parse YAML frontmatter
|
|
132
|
+
let frontmatter = {};
|
|
133
|
+
const fmMatch = content.match(/^---\n([\s\S]*?)\n---/);
|
|
134
|
+
if (fmMatch) {
|
|
135
|
+
// Simple YAML parser for key: value pairs
|
|
136
|
+
for (const line of fmMatch[1].split("\n")) {
|
|
137
|
+
const kv = line.match(/^(\w+):\s*(.+)$/);
|
|
138
|
+
if (kv)
|
|
139
|
+
frontmatter[kv[1]] = kv[2].trim();
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
// Get body content (after frontmatter)
|
|
143
|
+
const body = fmMatch ? content.slice(fmMatch[0].length).trim() : content.trim();
|
|
144
|
+
if (body.length < 30)
|
|
145
|
+
return; // skip nearly empty files
|
|
146
|
+
// Extract entity from path or frontmatter
|
|
147
|
+
const entity = frontmatter.entity ||
|
|
148
|
+
`obsidian.${name.replace(/[^a-zA-Z0-9]/g, "-").slice(0, 40)}`;
|
|
149
|
+
const category = frontmatter.category || "reference";
|
|
150
|
+
// Extract key sections (headings and their first paragraph)
|
|
151
|
+
const sections = body.split(/^##\s+/m).filter(s => s.trim().length > 20);
|
|
152
|
+
if (sections.length > 1) {
|
|
153
|
+
// Multiple sections — create a fact per section
|
|
154
|
+
for (const section of sections.slice(0, 8)) {
|
|
155
|
+
const lines = section.split("\n");
|
|
156
|
+
const heading = lines[0]?.trim() || "untitled";
|
|
157
|
+
const sectionBody = lines.slice(1).join(" ").trim().slice(0, 400);
|
|
158
|
+
if (sectionBody.length < 20)
|
|
159
|
+
continue;
|
|
160
|
+
facts.push({
|
|
161
|
+
entity,
|
|
162
|
+
key: heading.replace(/[^a-zA-Z0-9\s-]/g, "").slice(0, 60),
|
|
163
|
+
value: sectionBody.slice(0, 300),
|
|
164
|
+
text: `${name} — ${heading}: ${sectionBody}`,
|
|
165
|
+
category,
|
|
166
|
+
source: `obsidian:${fullPath}`,
|
|
167
|
+
});
|
|
168
|
+
}
|
|
169
|
+
}
|
|
170
|
+
else {
|
|
171
|
+
// Single document — one fact
|
|
172
|
+
facts.push({
|
|
173
|
+
entity,
|
|
174
|
+
key: null,
|
|
175
|
+
value: body.slice(0, 500),
|
|
176
|
+
text: `${name}: ${body.slice(0, 400)}`,
|
|
177
|
+
category,
|
|
178
|
+
source: `obsidian:${fullPath}`,
|
|
179
|
+
});
|
|
180
|
+
}
|
|
181
|
+
}
|
|
182
|
+
}
|
|
183
|
+
walkDir(dirPath);
|
|
184
|
+
return facts;
|
|
185
|
+
}
|
|
186
|
+
/** Import from generic markdown file */
|
|
187
|
+
function parseMarkdownFile(filePath) {
|
|
188
|
+
const content = readFileSync(filePath, "utf-8");
|
|
189
|
+
const name = basename(filePath, ".md");
|
|
190
|
+
const facts = [];
|
|
191
|
+
const entity = `markdown.${name.replace(/[^a-zA-Z0-9]/g, "-").slice(0, 40)}`;
|
|
192
|
+
// Split by headings
|
|
193
|
+
const sections = content.split(/^##?\s+/m).filter(s => s.trim().length > 20);
|
|
194
|
+
if (sections.length > 1) {
|
|
195
|
+
for (const section of sections.slice(0, 10)) {
|
|
196
|
+
const lines = section.split("\n");
|
|
197
|
+
const heading = lines[0]?.trim() || "untitled";
|
|
198
|
+
const body = lines.slice(1).join(" ").trim().slice(0, 400);
|
|
199
|
+
if (body.length < 20)
|
|
200
|
+
continue;
|
|
201
|
+
facts.push({
|
|
202
|
+
entity,
|
|
203
|
+
key: heading.replace(/[^a-zA-Z0-9\s-]/g, "").slice(0, 60),
|
|
204
|
+
value: body.slice(0, 300),
|
|
205
|
+
text: `${name} — ${heading}: ${body}`,
|
|
206
|
+
category: "reference",
|
|
207
|
+
source: `markdown:${filePath}`,
|
|
208
|
+
});
|
|
209
|
+
}
|
|
210
|
+
}
|
|
211
|
+
else {
|
|
212
|
+
// Whole document as one fact
|
|
213
|
+
facts.push({
|
|
214
|
+
entity,
|
|
215
|
+
key: null,
|
|
216
|
+
value: content.slice(0, 500),
|
|
217
|
+
text: `${name}: ${content.slice(0, 400)}`,
|
|
218
|
+
category: "reference",
|
|
219
|
+
source: `markdown:${filePath}`,
|
|
220
|
+
});
|
|
221
|
+
}
|
|
222
|
+
return facts;
|
|
223
|
+
}
|
|
224
|
+
// ==========================================================================
|
|
225
|
+
// MAIN
|
|
226
|
+
// ==========================================================================
|
|
227
|
+
async function main() {
|
|
228
|
+
const args = process.argv.slice(2);
|
|
229
|
+
const flags = {};
|
|
230
|
+
const boolFlags = new Set();
|
|
231
|
+
for (let i = 0; i < args.length; i++) {
|
|
232
|
+
if (args[i] === "--dry-run") {
|
|
233
|
+
boolFlags.add("dry-run");
|
|
234
|
+
}
|
|
235
|
+
else if (args[i].startsWith("--")) {
|
|
236
|
+
flags[args[i].slice(2)] = args[i + 1] || "";
|
|
237
|
+
i++;
|
|
238
|
+
}
|
|
239
|
+
}
|
|
240
|
+
const source = flags.source;
|
|
241
|
+
const path = flags.path;
|
|
242
|
+
const dryRun = boolFlags.has("dry-run");
|
|
243
|
+
if (!source || !path) {
|
|
244
|
+
console.error("Usage: bun import.ts --source <chatgpt|obsidian|markdown> --path <file|dir> [--dry-run]");
|
|
245
|
+
process.exit(1);
|
|
246
|
+
}
|
|
247
|
+
if (!existsSync(path)) {
|
|
248
|
+
console.error(`Path not found: ${path}`);
|
|
249
|
+
process.exit(1);
|
|
250
|
+
}
|
|
251
|
+
console.log(`Importing from ${source}: ${path}${dryRun ? " (dry run)" : ""}\n`);
|
|
252
|
+
// Parse source
|
|
253
|
+
let facts;
|
|
254
|
+
switch (source) {
|
|
255
|
+
case "chatgpt":
|
|
256
|
+
facts = parseChatGPTExport(path);
|
|
257
|
+
break;
|
|
258
|
+
case "obsidian":
|
|
259
|
+
facts = parseObsidianVault(path);
|
|
260
|
+
break;
|
|
261
|
+
case "markdown":
|
|
262
|
+
facts = parseMarkdownFile(path);
|
|
263
|
+
break;
|
|
264
|
+
default:
|
|
265
|
+
console.error(`Unknown source: ${source}. Supported: chatgpt, obsidian, markdown`);
|
|
266
|
+
process.exit(1);
|
|
267
|
+
}
|
|
268
|
+
console.log(`Parsed ${facts.length} facts from ${source}.\n`);
|
|
269
|
+
if (facts.length === 0) {
|
|
270
|
+
console.log("No facts to import.");
|
|
271
|
+
return;
|
|
272
|
+
}
|
|
273
|
+
const result = {
|
|
274
|
+
source,
|
|
275
|
+
factsFound: facts.length,
|
|
276
|
+
factsStored: 0,
|
|
277
|
+
factsSkipped: 0,
|
|
278
|
+
errors: [],
|
|
279
|
+
};
|
|
280
|
+
if (dryRun) {
|
|
281
|
+
console.log("=== DRY RUN — Preview ===\n");
|
|
282
|
+
for (const fact of facts.slice(0, 20)) {
|
|
283
|
+
console.log(` ${fact.entity}.${fact.key || "_"} = ${fact.value.slice(0, 80)}`);
|
|
284
|
+
console.log(` category: ${fact.category} | source: ${fact.source}`);
|
|
285
|
+
console.log();
|
|
286
|
+
}
|
|
287
|
+
if (facts.length > 20) {
|
|
288
|
+
console.log(` ... and ${facts.length - 20} more\n`);
|
|
289
|
+
}
|
|
290
|
+
console.log(`Total: ${facts.length} facts would be imported.`);
|
|
291
|
+
return;
|
|
292
|
+
}
|
|
293
|
+
// Store facts
|
|
294
|
+
const db = new Database(DB_PATH);
|
|
295
|
+
db.exec("PRAGMA journal_mode = WAL");
|
|
296
|
+
for (const fact of facts) {
|
|
297
|
+
// Check for duplicates
|
|
298
|
+
if (isDuplicate(db, fact.entity, fact.key, fact.value)) {
|
|
299
|
+
result.factsSkipped++;
|
|
300
|
+
continue;
|
|
301
|
+
}
|
|
302
|
+
const stored = await storeFact(db, fact);
|
|
303
|
+
if (stored) {
|
|
304
|
+
result.factsStored++;
|
|
305
|
+
process.stdout.write(` Stored: ${fact.entity}.${fact.key || "_"} (${result.factsStored}/${result.factsFound})\r`);
|
|
306
|
+
}
|
|
307
|
+
else {
|
|
308
|
+
result.errors.push(`Failed to store: ${fact.entity}.${fact.key || "_"}`);
|
|
309
|
+
}
|
|
310
|
+
}
|
|
311
|
+
db.close();
|
|
312
|
+
console.log(`\n\n=== Import Complete ===`);
|
|
313
|
+
console.log(` Source: ${result.source}`);
|
|
314
|
+
console.log(` Found: ${result.factsFound}`);
|
|
315
|
+
console.log(` Stored: ${result.factsStored}`);
|
|
316
|
+
console.log(` Skipped (duplicates): ${result.factsSkipped}`);
|
|
317
|
+
if (result.errors.length > 0) {
|
|
318
|
+
console.log(` Errors: ${result.errors.length}`);
|
|
319
|
+
for (const err of result.errors.slice(0, 5)) {
|
|
320
|
+
console.log(` - ${err}`);
|
|
321
|
+
}
|
|
322
|
+
}
|
|
323
|
+
}
|
|
324
|
+
main().catch(console.error);
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Zouroboros Memory System
|
|
3
|
+
*
|
|
4
|
+
* Hybrid SQLite + Vector memory with episodic, procedural, and cognitive capabilities.
|
|
5
|
+
*
|
|
6
|
+
* @module zouroboros-memory
|
|
7
|
+
*/
|
|
8
|
+
export declare const VERSION = "3.0.0";
|
|
9
|
+
export { initDatabase, getDatabase, closeDatabase, isInitialized, runMigrations, getDbStats, } from './database.js';
|
|
10
|
+
export { generateEmbedding, generateHypotheticalAnswer, generateHyDEExpansion, blendEmbeddings, cosineSimilarity, serializeEmbedding, deserializeEmbedding, checkOllamaHealth, listAvailableModels, } from './embeddings.js';
|
|
11
|
+
export { storeFact, searchFacts, searchFactsVector, searchFactsHybrid, getFact, deleteFact, touchFact, cleanupExpiredFacts, } from './facts.js';
|
|
12
|
+
export { createEpisode, searchEpisodes, getEntityEpisodes, updateEpisodeOutcome, getEpisodeStats, } from './episodes.js';
|
|
13
|
+
export { buildEntityGraph, getRelatedEntities, searchFactsGraphBoosted, extractQueryEntities, invalidateGraphCache, } from './graph.js';
|
|
14
|
+
export { getProfile, updateTraits, updatePreferences, recordInteraction, getRecentInteractions, getProfileSummary, listProfiles, deleteProfile, ensureProfileSchema, } from './profiles.js';
|
|
15
|
+
export { extractFromText, autoCapture, bufferForCapture, startAutoCapture, stopAutoCapture, getCaptureBufferSize, } from './capture.js';
|
|
16
|
+
export type { CaptureResult, CaptureOptions } from './capture.js';
|
|
17
|
+
export { handleMessage, startMcpServer } from './mcp-server.js';
|
|
18
|
+
export { estimateTokens, estimateFactTokens, getBudget, updateBudget, resetBudget, initBudget, planCompression, createCheckpoint, loadCheckpoint, listCheckpoints, retrievalWithBudget, } from './context-budget.js';
|
|
19
|
+
export type { ContextBudget, BudgetCheckpoint, CompressedFact, BudgetMetrics, CompressionPlan, RetrievalBudgetResult, } from './context-budget.js';
|
|
20
|
+
export { compressEpisodes, getCompressedEpisode, listCompressedEpisodes, shouldSummarize, } from './episode-summarizer.js';
|
|
21
|
+
export type { CompressedEpisode, SummarizationResult, EpisodeForCompression, ShouldSummarizeResult, } from './episode-summarizer.js';
|
|
22
|
+
export { recordSearchOperation, recordCaptureOperation, recordGateDecision, collectMetrics, printReport, } from './metrics.js';
|
|
23
|
+
export type { MemoryMetrics, CaptureStats, SearchMetrics, OperationStats, GateMetrics, } from './metrics.js';
|
|
24
|
+
export type { HopResult, MultiHopResult, } from './multi-hop.js';
|
|
25
|
+
export { isContradiction, findEntityConflicts, detectNewConflict, resolveConflict, resolveAllPending, trackProvenance, getProvenance, getFactHistory, } from './conflict-resolver.js';
|
|
26
|
+
export type { ConflictType, ResolutionStrategy, ConflictRecord, ProvenanceRecord, } from './conflict-resolver.js';
|
|
27
|
+
export { listPools, createPool, addToPool, removeFromPool, setInheritance, getAccessiblePersonas, searchCrossPersona, } from './cross-persona.js';
|
|
28
|
+
export type { SharedPool, PersonaNode, } from './cross-persona.js';
|
|
29
|
+
export { getAncestors, getDescendants, detectCycles, inferRelations, exportDot, KNOWN_RELATIONS, } from './graph-traversal.js';
|
|
30
|
+
export type { MemoryEntry, MemorySearchResult, EpisodicMemory, TemporalQuery, CognitiveProfile, GraphNode, GraphEdge, } from 'zouroboros-core';
|
|
31
|
+
type MemoryConfig = import('zouroboros-core').MemoryConfig;
|
|
32
|
+
/**
|
|
33
|
+
* Initialize the memory system
|
|
34
|
+
*/
|
|
35
|
+
export declare function init(config: MemoryConfig): void;
|
|
36
|
+
/**
|
|
37
|
+
* Shutdown the memory system
|
|
38
|
+
*/
|
|
39
|
+
export declare function shutdown(): void;
|
|
40
|
+
/**
|
|
41
|
+
* Get memory system statistics
|
|
42
|
+
*/
|
|
43
|
+
export declare function getStats(config: MemoryConfig): {
|
|
44
|
+
database: {
|
|
45
|
+
facts: number;
|
|
46
|
+
episodes: number;
|
|
47
|
+
procedures: number;
|
|
48
|
+
openLoops: number;
|
|
49
|
+
embeddings: number;
|
|
50
|
+
};
|
|
51
|
+
episodes: {
|
|
52
|
+
total: number;
|
|
53
|
+
byOutcome: Record<string, number>;
|
|
54
|
+
};
|
|
55
|
+
};
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Zouroboros Memory System
|
|
3
|
+
*
|
|
4
|
+
* Hybrid SQLite + Vector memory with episodic, procedural, and cognitive capabilities.
|
|
5
|
+
*
|
|
6
|
+
* @module zouroboros-memory
|
|
7
|
+
*/
|
|
8
|
+
import { initDatabase as _initDb, closeDatabase as _closeDb, runMigrations as _runMigrations, getDbStats as _getDbStats } from './database.js';
|
|
9
|
+
import { getEpisodeStats as _getEpisodeStats } from './episodes.js';
|
|
10
|
+
import { ensureProfileSchema as _ensureProfileSchema } from './profiles.js';
|
|
11
|
+
export const VERSION = '3.0.0';
|
|
12
|
+
// Database
|
|
13
|
+
export { initDatabase, getDatabase, closeDatabase, isInitialized, runMigrations, getDbStats, } from './database.js';
|
|
14
|
+
// Embeddings
|
|
15
|
+
export { generateEmbedding, generateHypotheticalAnswer, generateHyDEExpansion, blendEmbeddings, cosineSimilarity, serializeEmbedding, deserializeEmbedding, checkOllamaHealth, listAvailableModels, } from './embeddings.js';
|
|
16
|
+
// Facts
|
|
17
|
+
export { storeFact, searchFacts, searchFactsVector, searchFactsHybrid, getFact, deleteFact, touchFact, cleanupExpiredFacts, } from './facts.js';
|
|
18
|
+
// Episodes
|
|
19
|
+
export { createEpisode, searchEpisodes, getEntityEpisodes, updateEpisodeOutcome, getEpisodeStats, } from './episodes.js';
|
|
20
|
+
// Graph
|
|
21
|
+
export { buildEntityGraph, getRelatedEntities, searchFactsGraphBoosted, extractQueryEntities, invalidateGraphCache, } from './graph.js';
|
|
22
|
+
// Cognitive Profiles
|
|
23
|
+
export { getProfile, updateTraits, updatePreferences, recordInteraction, getRecentInteractions, getProfileSummary, listProfiles, deleteProfile, ensureProfileSchema, } from './profiles.js';
|
|
24
|
+
// Auto-capture
|
|
25
|
+
export { extractFromText, autoCapture, bufferForCapture, startAutoCapture, stopAutoCapture, getCaptureBufferSize, } from './capture.js';
|
|
26
|
+
// MCP Server
|
|
27
|
+
export { handleMessage, startMcpServer } from './mcp-server.js';
|
|
28
|
+
// v4 Enhancements — Context Budget (MEM-001)
|
|
29
|
+
export { estimateTokens, estimateFactTokens, getBudget, updateBudget, resetBudget, initBudget, planCompression, createCheckpoint, loadCheckpoint, listCheckpoints, retrievalWithBudget, } from './context-budget.js';
|
|
30
|
+
// v4 Enhancements — Episode Summarizer (MEM-002)
|
|
31
|
+
export { compressEpisodes, getCompressedEpisode, listCompressedEpisodes, shouldSummarize, } from './episode-summarizer.js';
|
|
32
|
+
// v4 Enhancements — Metrics Dashboard (MEM-101)
|
|
33
|
+
export { recordSearchOperation, recordCaptureOperation, recordGateDecision, collectMetrics, printReport, } from './metrics.js';
|
|
34
|
+
// v4 Enhancements — Conflict Resolver (MEM-103)
|
|
35
|
+
export { isContradiction, findEntityConflicts, detectNewConflict, resolveConflict, resolveAllPending, trackProvenance, getProvenance, getFactHistory, } from './conflict-resolver.js';
|
|
36
|
+
// v4 Enhancements — Cross-Persona Memory (MEM-104)
|
|
37
|
+
export { listPools, createPool, addToPool, removeFromPool, setInheritance, getAccessiblePersonas, searchCrossPersona, } from './cross-persona.js';
|
|
38
|
+
// v4 Enhancements — Graph Traversal (MEM-105)
|
|
39
|
+
export { getAncestors, getDescendants, detectCycles, inferRelations, exportDot, KNOWN_RELATIONS, } from './graph-traversal.js';
|
|
40
|
+
/**
|
|
41
|
+
* Initialize the memory system
|
|
42
|
+
*/
|
|
43
|
+
export function init(config) {
|
|
44
|
+
_initDb(config);
|
|
45
|
+
_runMigrations(config);
|
|
46
|
+
_ensureProfileSchema();
|
|
47
|
+
}
|
|
48
|
+
/**
|
|
49
|
+
* Shutdown the memory system
|
|
50
|
+
*/
|
|
51
|
+
export function shutdown() {
|
|
52
|
+
_closeDb();
|
|
53
|
+
}
|
|
54
|
+
/**
|
|
55
|
+
* Get memory system statistics
|
|
56
|
+
*/
|
|
57
|
+
export function getStats(config) {
|
|
58
|
+
return {
|
|
59
|
+
database: _getDbStats(config),
|
|
60
|
+
episodes: _getEpisodeStats(),
|
|
61
|
+
};
|
|
62
|
+
}
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* MCP (Model Context Protocol) server for Zouroboros Memory
|
|
3
|
+
*
|
|
4
|
+
* Exposes memory operations as MCP tools accessible by external
|
|
5
|
+
* AI agents and clients via stdio transport.
|
|
6
|
+
*
|
|
7
|
+
* Usage: bun run packages/memory/src/mcp-server.ts [--db-path <path>]
|
|
8
|
+
*/
|
|
9
|
+
import type { MemoryConfig } from 'zouroboros-core';
|
|
10
|
+
interface McpRequest {
|
|
11
|
+
jsonrpc: '2.0';
|
|
12
|
+
id: number | string;
|
|
13
|
+
method: string;
|
|
14
|
+
params?: Record<string, unknown>;
|
|
15
|
+
}
|
|
16
|
+
interface McpResponse {
|
|
17
|
+
jsonrpc: '2.0';
|
|
18
|
+
id: number | string;
|
|
19
|
+
result?: unknown;
|
|
20
|
+
error?: {
|
|
21
|
+
code: number;
|
|
22
|
+
message: string;
|
|
23
|
+
data?: unknown;
|
|
24
|
+
};
|
|
25
|
+
}
|
|
26
|
+
export declare function handleMessage(message: McpRequest, config: MemoryConfig): Promise<McpResponse>;
|
|
27
|
+
/**
|
|
28
|
+
* Start the MCP server on stdio.
|
|
29
|
+
*/
|
|
30
|
+
export declare function startMcpServer(config: MemoryConfig): Promise<void>;
|
|
31
|
+
export {};
|