@claude-flow/cli 3.0.0-alpha.62 → 3.0.0-alpha.63
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/memory.db +0 -0
- package/dist/src/commands/embeddings.d.ts.map +1 -1
- package/dist/src/commands/embeddings.js +281 -66
- package/dist/src/commands/embeddings.js.map +1 -1
- package/dist/src/commands/memory.d.ts.map +1 -1
- package/dist/src/commands/memory.js +227 -223
- package/dist/src/commands/memory.js.map +1 -1
- package/dist/src/memory/memory-initializer.d.ts +229 -0
- package/dist/src/memory/memory-initializer.d.ts.map +1 -0
- package/dist/src/memory/memory-initializer.js +1248 -0
- package/dist/src/memory/memory-initializer.js.map +1 -0
- package/dist/tsconfig.tsbuildinfo +1 -1
- package/package.json +1 -1
|
@@ -0,0 +1,1248 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* V3 Memory Initializer
|
|
3
|
+
* Properly initializes the memory database with sql.js (WASM SQLite)
|
|
4
|
+
* Includes pattern tables, vector embeddings, migration state tracking
|
|
5
|
+
*
|
|
6
|
+
* @module v3/cli/memory-initializer
|
|
7
|
+
*/
|
|
8
|
+
import * as fs from 'fs';
|
|
9
|
+
import * as path from 'path';
|
|
10
|
+
/**
|
|
11
|
+
* Enhanced schema with pattern confidence, temporal decay, versioning
|
|
12
|
+
* Vector embeddings enabled for semantic search
|
|
13
|
+
*/
|
|
14
|
+
export const MEMORY_SCHEMA_V3 = `
|
|
15
|
+
-- Claude Flow V3 Memory Database
|
|
16
|
+
-- Version: 3.0.0
|
|
17
|
+
-- Features: Pattern learning, vector embeddings, temporal decay, migration tracking
|
|
18
|
+
|
|
19
|
+
PRAGMA journal_mode = WAL;
|
|
20
|
+
PRAGMA synchronous = NORMAL;
|
|
21
|
+
PRAGMA foreign_keys = ON;
|
|
22
|
+
|
|
23
|
+
-- ============================================
|
|
24
|
+
-- CORE MEMORY TABLES
|
|
25
|
+
-- ============================================
|
|
26
|
+
|
|
27
|
+
-- Memory entries (main storage)
|
|
28
|
+
CREATE TABLE IF NOT EXISTS memory_entries (
|
|
29
|
+
id TEXT PRIMARY KEY,
|
|
30
|
+
key TEXT NOT NULL,
|
|
31
|
+
namespace TEXT DEFAULT 'default',
|
|
32
|
+
content TEXT NOT NULL,
|
|
33
|
+
type TEXT DEFAULT 'semantic' CHECK(type IN ('semantic', 'episodic', 'procedural', 'working', 'pattern')),
|
|
34
|
+
|
|
35
|
+
-- Vector embedding for semantic search (stored as JSON array)
|
|
36
|
+
embedding TEXT,
|
|
37
|
+
embedding_model TEXT DEFAULT 'local',
|
|
38
|
+
embedding_dimensions INTEGER,
|
|
39
|
+
|
|
40
|
+
-- Metadata
|
|
41
|
+
tags TEXT, -- JSON array
|
|
42
|
+
metadata TEXT, -- JSON object
|
|
43
|
+
owner_id TEXT,
|
|
44
|
+
|
|
45
|
+
-- Timestamps
|
|
46
|
+
created_at INTEGER NOT NULL DEFAULT (strftime('%s', 'now') * 1000),
|
|
47
|
+
updated_at INTEGER NOT NULL DEFAULT (strftime('%s', 'now') * 1000),
|
|
48
|
+
expires_at INTEGER,
|
|
49
|
+
last_accessed_at INTEGER,
|
|
50
|
+
|
|
51
|
+
-- Access tracking for hot/cold detection
|
|
52
|
+
access_count INTEGER DEFAULT 0,
|
|
53
|
+
|
|
54
|
+
-- Status
|
|
55
|
+
status TEXT DEFAULT 'active' CHECK(status IN ('active', 'archived', 'deleted')),
|
|
56
|
+
|
|
57
|
+
UNIQUE(namespace, key)
|
|
58
|
+
);
|
|
59
|
+
|
|
60
|
+
-- Indexes for memory entries
|
|
61
|
+
CREATE INDEX IF NOT EXISTS idx_memory_namespace ON memory_entries(namespace);
|
|
62
|
+
CREATE INDEX IF NOT EXISTS idx_memory_key ON memory_entries(key);
|
|
63
|
+
CREATE INDEX IF NOT EXISTS idx_memory_type ON memory_entries(type);
|
|
64
|
+
CREATE INDEX IF NOT EXISTS idx_memory_status ON memory_entries(status);
|
|
65
|
+
CREATE INDEX IF NOT EXISTS idx_memory_created ON memory_entries(created_at);
|
|
66
|
+
CREATE INDEX IF NOT EXISTS idx_memory_accessed ON memory_entries(last_accessed_at);
|
|
67
|
+
CREATE INDEX IF NOT EXISTS idx_memory_owner ON memory_entries(owner_id);
|
|
68
|
+
|
|
69
|
+
-- ============================================
|
|
70
|
+
-- PATTERN LEARNING TABLES
|
|
71
|
+
-- ============================================
|
|
72
|
+
|
|
73
|
+
-- Learned patterns with confidence scoring and versioning
|
|
74
|
+
CREATE TABLE IF NOT EXISTS patterns (
|
|
75
|
+
id TEXT PRIMARY KEY,
|
|
76
|
+
|
|
77
|
+
-- Pattern identification
|
|
78
|
+
name TEXT NOT NULL,
|
|
79
|
+
pattern_type TEXT NOT NULL CHECK(pattern_type IN (
|
|
80
|
+
'task-routing', 'error-recovery', 'optimization', 'learning',
|
|
81
|
+
'coordination', 'prediction', 'code-pattern', 'workflow'
|
|
82
|
+
)),
|
|
83
|
+
|
|
84
|
+
-- Pattern definition
|
|
85
|
+
condition TEXT NOT NULL, -- Regex or semantic match
|
|
86
|
+
action TEXT NOT NULL, -- What to do when pattern matches
|
|
87
|
+
description TEXT,
|
|
88
|
+
|
|
89
|
+
-- Confidence scoring (0.0 - 1.0)
|
|
90
|
+
confidence REAL DEFAULT 0.5,
|
|
91
|
+
success_count INTEGER DEFAULT 0,
|
|
92
|
+
failure_count INTEGER DEFAULT 0,
|
|
93
|
+
|
|
94
|
+
-- Temporal decay
|
|
95
|
+
decay_rate REAL DEFAULT 0.01, -- How fast confidence decays
|
|
96
|
+
half_life_days INTEGER DEFAULT 30, -- Days until confidence halves without use
|
|
97
|
+
|
|
98
|
+
-- Vector embedding for semantic pattern matching
|
|
99
|
+
embedding TEXT,
|
|
100
|
+
embedding_dimensions INTEGER,
|
|
101
|
+
|
|
102
|
+
-- Versioning
|
|
103
|
+
version INTEGER DEFAULT 1,
|
|
104
|
+
parent_id TEXT REFERENCES patterns(id),
|
|
105
|
+
|
|
106
|
+
-- Metadata
|
|
107
|
+
tags TEXT, -- JSON array
|
|
108
|
+
metadata TEXT, -- JSON object
|
|
109
|
+
source TEXT, -- Where the pattern was learned from
|
|
110
|
+
|
|
111
|
+
-- Timestamps
|
|
112
|
+
created_at INTEGER NOT NULL DEFAULT (strftime('%s', 'now') * 1000),
|
|
113
|
+
updated_at INTEGER NOT NULL DEFAULT (strftime('%s', 'now') * 1000),
|
|
114
|
+
last_matched_at INTEGER,
|
|
115
|
+
last_success_at INTEGER,
|
|
116
|
+
last_failure_at INTEGER,
|
|
117
|
+
|
|
118
|
+
-- Status
|
|
119
|
+
status TEXT DEFAULT 'active' CHECK(status IN ('active', 'archived', 'deprecated', 'experimental'))
|
|
120
|
+
);
|
|
121
|
+
|
|
122
|
+
-- Indexes for patterns
|
|
123
|
+
CREATE INDEX IF NOT EXISTS idx_patterns_type ON patterns(pattern_type);
|
|
124
|
+
CREATE INDEX IF NOT EXISTS idx_patterns_confidence ON patterns(confidence DESC);
|
|
125
|
+
CREATE INDEX IF NOT EXISTS idx_patterns_status ON patterns(status);
|
|
126
|
+
CREATE INDEX IF NOT EXISTS idx_patterns_last_matched ON patterns(last_matched_at);
|
|
127
|
+
|
|
128
|
+
-- Pattern evolution history (for versioning)
|
|
129
|
+
CREATE TABLE IF NOT EXISTS pattern_history (
|
|
130
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
131
|
+
pattern_id TEXT NOT NULL REFERENCES patterns(id),
|
|
132
|
+
version INTEGER NOT NULL,
|
|
133
|
+
|
|
134
|
+
-- Snapshot of pattern state
|
|
135
|
+
confidence REAL,
|
|
136
|
+
success_count INTEGER,
|
|
137
|
+
failure_count INTEGER,
|
|
138
|
+
condition TEXT,
|
|
139
|
+
action TEXT,
|
|
140
|
+
|
|
141
|
+
-- What changed
|
|
142
|
+
change_type TEXT CHECK(change_type IN ('created', 'updated', 'success', 'failure', 'decay', 'merged', 'split')),
|
|
143
|
+
change_reason TEXT,
|
|
144
|
+
|
|
145
|
+
created_at INTEGER NOT NULL DEFAULT (strftime('%s', 'now') * 1000)
|
|
146
|
+
);
|
|
147
|
+
|
|
148
|
+
CREATE INDEX IF NOT EXISTS idx_pattern_history_pattern ON pattern_history(pattern_id);
|
|
149
|
+
|
|
150
|
+
-- ============================================
|
|
151
|
+
-- LEARNING & TRAJECTORY TABLES
|
|
152
|
+
-- ============================================
|
|
153
|
+
|
|
154
|
+
-- Learning trajectories (SONA integration)
|
|
155
|
+
CREATE TABLE IF NOT EXISTS trajectories (
|
|
156
|
+
id TEXT PRIMARY KEY,
|
|
157
|
+
session_id TEXT,
|
|
158
|
+
|
|
159
|
+
-- Trajectory state
|
|
160
|
+
status TEXT DEFAULT 'active' CHECK(status IN ('active', 'completed', 'failed', 'abandoned')),
|
|
161
|
+
verdict TEXT CHECK(verdict IN ('success', 'failure', 'partial', NULL)),
|
|
162
|
+
|
|
163
|
+
-- Context
|
|
164
|
+
task TEXT,
|
|
165
|
+
context TEXT, -- JSON object
|
|
166
|
+
|
|
167
|
+
-- Metrics
|
|
168
|
+
total_steps INTEGER DEFAULT 0,
|
|
169
|
+
total_reward REAL DEFAULT 0,
|
|
170
|
+
|
|
171
|
+
-- Timestamps
|
|
172
|
+
started_at INTEGER NOT NULL DEFAULT (strftime('%s', 'now') * 1000),
|
|
173
|
+
ended_at INTEGER,
|
|
174
|
+
|
|
175
|
+
-- Reference to extracted pattern (if any)
|
|
176
|
+
extracted_pattern_id TEXT REFERENCES patterns(id)
|
|
177
|
+
);
|
|
178
|
+
|
|
179
|
+
-- Trajectory steps
|
|
180
|
+
CREATE TABLE IF NOT EXISTS trajectory_steps (
|
|
181
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
182
|
+
trajectory_id TEXT NOT NULL REFERENCES trajectories(id),
|
|
183
|
+
step_number INTEGER NOT NULL,
|
|
184
|
+
|
|
185
|
+
-- Step data
|
|
186
|
+
action TEXT NOT NULL,
|
|
187
|
+
observation TEXT,
|
|
188
|
+
reward REAL DEFAULT 0,
|
|
189
|
+
|
|
190
|
+
-- Metadata
|
|
191
|
+
metadata TEXT, -- JSON object
|
|
192
|
+
|
|
193
|
+
created_at INTEGER NOT NULL DEFAULT (strftime('%s', 'now') * 1000)
|
|
194
|
+
);
|
|
195
|
+
|
|
196
|
+
CREATE INDEX IF NOT EXISTS idx_steps_trajectory ON trajectory_steps(trajectory_id);
|
|
197
|
+
|
|
198
|
+
-- ============================================
|
|
199
|
+
-- MIGRATION STATE TRACKING
|
|
200
|
+
-- ============================================
|
|
201
|
+
|
|
202
|
+
-- Migration state (for resume capability)
|
|
203
|
+
CREATE TABLE IF NOT EXISTS migration_state (
|
|
204
|
+
id TEXT PRIMARY KEY,
|
|
205
|
+
migration_type TEXT NOT NULL, -- 'v2-to-v3', 'pattern', 'memory', etc.
|
|
206
|
+
|
|
207
|
+
-- Progress tracking
|
|
208
|
+
status TEXT DEFAULT 'pending' CHECK(status IN ('pending', 'in_progress', 'completed', 'failed', 'rolled_back')),
|
|
209
|
+
total_items INTEGER DEFAULT 0,
|
|
210
|
+
processed_items INTEGER DEFAULT 0,
|
|
211
|
+
failed_items INTEGER DEFAULT 0,
|
|
212
|
+
skipped_items INTEGER DEFAULT 0,
|
|
213
|
+
|
|
214
|
+
-- Current position (for resume)
|
|
215
|
+
current_batch INTEGER DEFAULT 0,
|
|
216
|
+
last_processed_id TEXT,
|
|
217
|
+
|
|
218
|
+
-- Source/destination info
|
|
219
|
+
source_path TEXT,
|
|
220
|
+
source_type TEXT,
|
|
221
|
+
destination_path TEXT,
|
|
222
|
+
|
|
223
|
+
-- Backup info
|
|
224
|
+
backup_path TEXT,
|
|
225
|
+
backup_created_at INTEGER,
|
|
226
|
+
|
|
227
|
+
-- Error tracking
|
|
228
|
+
last_error TEXT,
|
|
229
|
+
errors TEXT, -- JSON array of errors
|
|
230
|
+
|
|
231
|
+
-- Timestamps
|
|
232
|
+
started_at INTEGER,
|
|
233
|
+
completed_at INTEGER,
|
|
234
|
+
created_at INTEGER NOT NULL DEFAULT (strftime('%s', 'now') * 1000),
|
|
235
|
+
updated_at INTEGER NOT NULL DEFAULT (strftime('%s', 'now') * 1000)
|
|
236
|
+
);
|
|
237
|
+
|
|
238
|
+
-- ============================================
|
|
239
|
+
-- SESSION MANAGEMENT
|
|
240
|
+
-- ============================================
|
|
241
|
+
|
|
242
|
+
-- Sessions for context persistence
|
|
243
|
+
CREATE TABLE IF NOT EXISTS sessions (
|
|
244
|
+
id TEXT PRIMARY KEY,
|
|
245
|
+
|
|
246
|
+
-- Session state
|
|
247
|
+
state TEXT NOT NULL, -- JSON object with full session state
|
|
248
|
+
status TEXT DEFAULT 'active' CHECK(status IN ('active', 'paused', 'completed', 'expired')),
|
|
249
|
+
|
|
250
|
+
-- Context
|
|
251
|
+
project_path TEXT,
|
|
252
|
+
branch TEXT,
|
|
253
|
+
|
|
254
|
+
-- Metrics
|
|
255
|
+
tasks_completed INTEGER DEFAULT 0,
|
|
256
|
+
patterns_learned INTEGER DEFAULT 0,
|
|
257
|
+
|
|
258
|
+
-- Timestamps
|
|
259
|
+
created_at INTEGER NOT NULL DEFAULT (strftime('%s', 'now') * 1000),
|
|
260
|
+
updated_at INTEGER NOT NULL DEFAULT (strftime('%s', 'now') * 1000),
|
|
261
|
+
expires_at INTEGER
|
|
262
|
+
);
|
|
263
|
+
|
|
264
|
+
-- ============================================
|
|
265
|
+
-- VECTOR INDEX METADATA (for HNSW)
|
|
266
|
+
-- ============================================
|
|
267
|
+
|
|
268
|
+
-- Track HNSW index state
|
|
269
|
+
CREATE TABLE IF NOT EXISTS vector_indexes (
|
|
270
|
+
id TEXT PRIMARY KEY,
|
|
271
|
+
name TEXT NOT NULL UNIQUE,
|
|
272
|
+
|
|
273
|
+
-- Index configuration
|
|
274
|
+
dimensions INTEGER NOT NULL,
|
|
275
|
+
metric TEXT DEFAULT 'cosine' CHECK(metric IN ('cosine', 'euclidean', 'dot')),
|
|
276
|
+
|
|
277
|
+
-- HNSW parameters
|
|
278
|
+
hnsw_m INTEGER DEFAULT 16,
|
|
279
|
+
hnsw_ef_construction INTEGER DEFAULT 200,
|
|
280
|
+
hnsw_ef_search INTEGER DEFAULT 100,
|
|
281
|
+
|
|
282
|
+
-- Quantization
|
|
283
|
+
quantization_type TEXT CHECK(quantization_type IN ('none', 'scalar', 'product')),
|
|
284
|
+
quantization_bits INTEGER DEFAULT 8,
|
|
285
|
+
|
|
286
|
+
-- Statistics
|
|
287
|
+
total_vectors INTEGER DEFAULT 0,
|
|
288
|
+
last_rebuild_at INTEGER,
|
|
289
|
+
|
|
290
|
+
created_at INTEGER NOT NULL DEFAULT (strftime('%s', 'now') * 1000),
|
|
291
|
+
updated_at INTEGER NOT NULL DEFAULT (strftime('%s', 'now') * 1000)
|
|
292
|
+
);
|
|
293
|
+
|
|
294
|
+
-- ============================================
|
|
295
|
+
-- SYSTEM METADATA
|
|
296
|
+
-- ============================================
|
|
297
|
+
|
|
298
|
+
CREATE TABLE IF NOT EXISTS metadata (
|
|
299
|
+
key TEXT PRIMARY KEY,
|
|
300
|
+
value TEXT NOT NULL,
|
|
301
|
+
updated_at INTEGER DEFAULT (strftime('%s', 'now') * 1000)
|
|
302
|
+
);
|
|
303
|
+
`;
|
|
304
|
+
/**
|
|
305
|
+
* Initial metadata to insert after schema creation
|
|
306
|
+
*/
|
|
307
|
+
export function getInitialMetadata(backend) {
|
|
308
|
+
return `
|
|
309
|
+
INSERT OR REPLACE INTO metadata (key, value) VALUES
|
|
310
|
+
('schema_version', '3.0.0'),
|
|
311
|
+
('backend', '${backend}'),
|
|
312
|
+
('created_at', '${new Date().toISOString()}'),
|
|
313
|
+
('sql_js', 'true'),
|
|
314
|
+
('vector_embeddings', 'enabled'),
|
|
315
|
+
('pattern_learning', 'enabled'),
|
|
316
|
+
('temporal_decay', 'enabled'),
|
|
317
|
+
('hnsw_indexing', 'enabled');
|
|
318
|
+
|
|
319
|
+
-- Create default vector index configuration
|
|
320
|
+
INSERT OR IGNORE INTO vector_indexes (id, name, dimensions) VALUES
|
|
321
|
+
('default', 'default', 768),
|
|
322
|
+
('patterns', 'patterns', 768);
|
|
323
|
+
`;
|
|
324
|
+
}
|
|
325
|
+
/**
|
|
326
|
+
* Check for legacy database installations and migrate if needed
|
|
327
|
+
*/
|
|
328
|
+
export async function checkAndMigrateLegacy(options) {
|
|
329
|
+
const { dbPath, verbose = false } = options;
|
|
330
|
+
// Check for legacy locations
|
|
331
|
+
const legacyPaths = [
|
|
332
|
+
path.join(process.cwd(), 'memory.db'),
|
|
333
|
+
path.join(process.cwd(), '.claude/memory.db'),
|
|
334
|
+
path.join(process.cwd(), 'data/memory.db'),
|
|
335
|
+
path.join(process.cwd(), '.claude-flow/memory.db')
|
|
336
|
+
];
|
|
337
|
+
for (const legacyPath of legacyPaths) {
|
|
338
|
+
if (fs.existsSync(legacyPath) && legacyPath !== dbPath) {
|
|
339
|
+
try {
|
|
340
|
+
const initSqlJs = (await import('sql.js')).default;
|
|
341
|
+
const SQL = await initSqlJs();
|
|
342
|
+
const legacyBuffer = fs.readFileSync(legacyPath);
|
|
343
|
+
const legacyDb = new SQL.Database(legacyBuffer);
|
|
344
|
+
// Check if it has data
|
|
345
|
+
const countResult = legacyDb.exec('SELECT COUNT(*) FROM memory_entries');
|
|
346
|
+
const count = countResult[0]?.values[0]?.[0] || 0;
|
|
347
|
+
// Get version if available
|
|
348
|
+
let version = 'unknown';
|
|
349
|
+
try {
|
|
350
|
+
const versionResult = legacyDb.exec("SELECT value FROM metadata WHERE key='schema_version'");
|
|
351
|
+
version = versionResult[0]?.values[0]?.[0] || 'unknown';
|
|
352
|
+
}
|
|
353
|
+
catch { /* no metadata table */ }
|
|
354
|
+
legacyDb.close();
|
|
355
|
+
if (count > 0) {
|
|
356
|
+
return {
|
|
357
|
+
needsMigration: true,
|
|
358
|
+
legacyVersion: version,
|
|
359
|
+
legacyEntries: count
|
|
360
|
+
};
|
|
361
|
+
}
|
|
362
|
+
}
|
|
363
|
+
catch {
|
|
364
|
+
// Not a valid SQLite database, skip
|
|
365
|
+
}
|
|
366
|
+
}
|
|
367
|
+
}
|
|
368
|
+
return { needsMigration: false };
|
|
369
|
+
}
|
|
370
|
+
/**
|
|
371
|
+
* Initialize the memory database properly using sql.js
|
|
372
|
+
*/
|
|
373
|
+
export async function initializeMemoryDatabase(options) {
|
|
374
|
+
const { backend = 'hybrid', dbPath: customPath, force = false, verbose = false, migrate = true } = options;
|
|
375
|
+
const swarmDir = path.join(process.cwd(), '.swarm');
|
|
376
|
+
const dbPath = customPath || path.join(swarmDir, 'memory.db');
|
|
377
|
+
const dbDir = path.dirname(dbPath);
|
|
378
|
+
try {
|
|
379
|
+
// Create directory if needed
|
|
380
|
+
if (!fs.existsSync(dbDir)) {
|
|
381
|
+
fs.mkdirSync(dbDir, { recursive: true });
|
|
382
|
+
}
|
|
383
|
+
// Check for legacy installations
|
|
384
|
+
if (migrate) {
|
|
385
|
+
const legacyCheck = await checkAndMigrateLegacy({ dbPath, verbose });
|
|
386
|
+
if (legacyCheck.needsMigration && verbose) {
|
|
387
|
+
console.log(`Found legacy database (v${legacyCheck.legacyVersion}) with ${legacyCheck.legacyEntries} entries`);
|
|
388
|
+
}
|
|
389
|
+
}
|
|
390
|
+
// Check existing database
|
|
391
|
+
if (fs.existsSync(dbPath) && !force) {
|
|
392
|
+
return {
|
|
393
|
+
success: false,
|
|
394
|
+
backend,
|
|
395
|
+
dbPath,
|
|
396
|
+
schemaVersion: '3.0.0',
|
|
397
|
+
tablesCreated: [],
|
|
398
|
+
indexesCreated: [],
|
|
399
|
+
features: {
|
|
400
|
+
vectorEmbeddings: false,
|
|
401
|
+
patternLearning: false,
|
|
402
|
+
temporalDecay: false,
|
|
403
|
+
hnswIndexing: false,
|
|
404
|
+
migrationTracking: false
|
|
405
|
+
},
|
|
406
|
+
error: 'Database already exists. Use --force to reinitialize.'
|
|
407
|
+
};
|
|
408
|
+
}
|
|
409
|
+
// Try to use sql.js (WASM SQLite)
|
|
410
|
+
let db;
|
|
411
|
+
let usedSqlJs = false;
|
|
412
|
+
try {
|
|
413
|
+
// Dynamic import of sql.js
|
|
414
|
+
const initSqlJs = (await import('sql.js')).default;
|
|
415
|
+
const SQL = await initSqlJs();
|
|
416
|
+
// Load existing database or create new
|
|
417
|
+
if (fs.existsSync(dbPath) && force) {
|
|
418
|
+
fs.unlinkSync(dbPath);
|
|
419
|
+
}
|
|
420
|
+
db = new SQL.Database();
|
|
421
|
+
usedSqlJs = true;
|
|
422
|
+
}
|
|
423
|
+
catch (e) {
|
|
424
|
+
// sql.js not available, fall back to writing schema file
|
|
425
|
+
if (verbose) {
|
|
426
|
+
console.log('sql.js not available, writing schema file for later initialization');
|
|
427
|
+
}
|
|
428
|
+
}
|
|
429
|
+
if (usedSqlJs && db) {
|
|
430
|
+
// Execute schema
|
|
431
|
+
db.run(MEMORY_SCHEMA_V3);
|
|
432
|
+
// Insert initial metadata
|
|
433
|
+
db.run(getInitialMetadata(backend));
|
|
434
|
+
// Save to file
|
|
435
|
+
const data = db.export();
|
|
436
|
+
const buffer = Buffer.from(data);
|
|
437
|
+
fs.writeFileSync(dbPath, buffer);
|
|
438
|
+
// Close database
|
|
439
|
+
db.close();
|
|
440
|
+
// Also create schema file for reference
|
|
441
|
+
const schemaPath = path.join(dbDir, 'schema.sql');
|
|
442
|
+
fs.writeFileSync(schemaPath, MEMORY_SCHEMA_V3 + '\n' + getInitialMetadata(backend));
|
|
443
|
+
return {
|
|
444
|
+
success: true,
|
|
445
|
+
backend,
|
|
446
|
+
dbPath,
|
|
447
|
+
schemaVersion: '3.0.0',
|
|
448
|
+
tablesCreated: [
|
|
449
|
+
'memory_entries',
|
|
450
|
+
'patterns',
|
|
451
|
+
'pattern_history',
|
|
452
|
+
'trajectories',
|
|
453
|
+
'trajectory_steps',
|
|
454
|
+
'migration_state',
|
|
455
|
+
'sessions',
|
|
456
|
+
'vector_indexes',
|
|
457
|
+
'metadata'
|
|
458
|
+
],
|
|
459
|
+
indexesCreated: [
|
|
460
|
+
'idx_memory_namespace',
|
|
461
|
+
'idx_memory_key',
|
|
462
|
+
'idx_memory_type',
|
|
463
|
+
'idx_memory_status',
|
|
464
|
+
'idx_memory_created',
|
|
465
|
+
'idx_memory_accessed',
|
|
466
|
+
'idx_memory_owner',
|
|
467
|
+
'idx_patterns_type',
|
|
468
|
+
'idx_patterns_confidence',
|
|
469
|
+
'idx_patterns_status',
|
|
470
|
+
'idx_patterns_last_matched',
|
|
471
|
+
'idx_pattern_history_pattern',
|
|
472
|
+
'idx_steps_trajectory'
|
|
473
|
+
],
|
|
474
|
+
features: {
|
|
475
|
+
vectorEmbeddings: true,
|
|
476
|
+
patternLearning: true,
|
|
477
|
+
temporalDecay: true,
|
|
478
|
+
hnswIndexing: true,
|
|
479
|
+
migrationTracking: true
|
|
480
|
+
}
|
|
481
|
+
};
|
|
482
|
+
}
|
|
483
|
+
else {
|
|
484
|
+
// Fall back to schema file approach
|
|
485
|
+
const schemaPath = path.join(dbDir, 'schema.sql');
|
|
486
|
+
fs.writeFileSync(schemaPath, MEMORY_SCHEMA_V3 + '\n' + getInitialMetadata(backend));
|
|
487
|
+
// Create minimal valid SQLite file
|
|
488
|
+
const sqliteHeader = Buffer.alloc(4096, 0);
|
|
489
|
+
// SQLite format 3 header
|
|
490
|
+
Buffer.from('SQLite format 3\0').copy(sqliteHeader, 0);
|
|
491
|
+
sqliteHeader[16] = 0x10; // page size high byte (4096)
|
|
492
|
+
sqliteHeader[17] = 0x00; // page size low byte
|
|
493
|
+
sqliteHeader[18] = 0x01; // file format write version
|
|
494
|
+
sqliteHeader[19] = 0x01; // file format read version
|
|
495
|
+
sqliteHeader[24] = 0x00; // max embedded payload
|
|
496
|
+
sqliteHeader[25] = 0x40;
|
|
497
|
+
sqliteHeader[26] = 0x20; // min embedded payload
|
|
498
|
+
sqliteHeader[27] = 0x20; // leaf payload
|
|
499
|
+
fs.writeFileSync(dbPath, sqliteHeader);
|
|
500
|
+
return {
|
|
501
|
+
success: true,
|
|
502
|
+
backend,
|
|
503
|
+
dbPath,
|
|
504
|
+
schemaVersion: '3.0.0',
|
|
505
|
+
tablesCreated: [
|
|
506
|
+
'memory_entries (pending)',
|
|
507
|
+
'patterns (pending)',
|
|
508
|
+
'pattern_history (pending)',
|
|
509
|
+
'trajectories (pending)',
|
|
510
|
+
'trajectory_steps (pending)',
|
|
511
|
+
'migration_state (pending)',
|
|
512
|
+
'sessions (pending)',
|
|
513
|
+
'vector_indexes (pending)',
|
|
514
|
+
'metadata (pending)'
|
|
515
|
+
],
|
|
516
|
+
indexesCreated: [],
|
|
517
|
+
features: {
|
|
518
|
+
vectorEmbeddings: true,
|
|
519
|
+
patternLearning: true,
|
|
520
|
+
temporalDecay: true,
|
|
521
|
+
hnswIndexing: true,
|
|
522
|
+
migrationTracking: true
|
|
523
|
+
}
|
|
524
|
+
};
|
|
525
|
+
}
|
|
526
|
+
}
|
|
527
|
+
catch (error) {
|
|
528
|
+
return {
|
|
529
|
+
success: false,
|
|
530
|
+
backend,
|
|
531
|
+
dbPath,
|
|
532
|
+
schemaVersion: '3.0.0',
|
|
533
|
+
tablesCreated: [],
|
|
534
|
+
indexesCreated: [],
|
|
535
|
+
features: {
|
|
536
|
+
vectorEmbeddings: false,
|
|
537
|
+
patternLearning: false,
|
|
538
|
+
temporalDecay: false,
|
|
539
|
+
hnswIndexing: false,
|
|
540
|
+
migrationTracking: false
|
|
541
|
+
},
|
|
542
|
+
error: error instanceof Error ? error.message : String(error)
|
|
543
|
+
};
|
|
544
|
+
}
|
|
545
|
+
}
|
|
546
|
+
/**
|
|
547
|
+
* Check if memory database is properly initialized
|
|
548
|
+
*/
|
|
549
|
+
export async function checkMemoryInitialization(dbPath) {
|
|
550
|
+
const swarmDir = path.join(process.cwd(), '.swarm');
|
|
551
|
+
const path_ = dbPath || path.join(swarmDir, 'memory.db');
|
|
552
|
+
if (!fs.existsSync(path_)) {
|
|
553
|
+
return { initialized: false };
|
|
554
|
+
}
|
|
555
|
+
try {
|
|
556
|
+
// Try to load with sql.js
|
|
557
|
+
const initSqlJs = (await import('sql.js')).default;
|
|
558
|
+
const SQL = await initSqlJs();
|
|
559
|
+
const fileBuffer = fs.readFileSync(path_);
|
|
560
|
+
const db = new SQL.Database(fileBuffer);
|
|
561
|
+
// Check for metadata table
|
|
562
|
+
const tables = db.exec("SELECT name FROM sqlite_master WHERE type='table'");
|
|
563
|
+
const tableNames = tables[0]?.values?.map(v => v[0]) || [];
|
|
564
|
+
// Get version
|
|
565
|
+
let version = 'unknown';
|
|
566
|
+
let backend = 'unknown';
|
|
567
|
+
try {
|
|
568
|
+
const versionResult = db.exec("SELECT value FROM metadata WHERE key='schema_version'");
|
|
569
|
+
version = versionResult[0]?.values[0]?.[0] || 'unknown';
|
|
570
|
+
const backendResult = db.exec("SELECT value FROM metadata WHERE key='backend'");
|
|
571
|
+
backend = backendResult[0]?.values[0]?.[0] || 'unknown';
|
|
572
|
+
}
|
|
573
|
+
catch {
|
|
574
|
+
// Metadata table might not exist
|
|
575
|
+
}
|
|
576
|
+
db.close();
|
|
577
|
+
return {
|
|
578
|
+
initialized: true,
|
|
579
|
+
version,
|
|
580
|
+
backend,
|
|
581
|
+
features: {
|
|
582
|
+
vectorEmbeddings: tableNames.includes('vector_indexes'),
|
|
583
|
+
patternLearning: tableNames.includes('patterns'),
|
|
584
|
+
temporalDecay: tableNames.includes('pattern_history')
|
|
585
|
+
},
|
|
586
|
+
tables: tableNames
|
|
587
|
+
};
|
|
588
|
+
}
|
|
589
|
+
catch {
|
|
590
|
+
// Could not read database
|
|
591
|
+
return { initialized: false };
|
|
592
|
+
}
|
|
593
|
+
}
|
|
594
|
+
/**
|
|
595
|
+
* Apply temporal decay to patterns
|
|
596
|
+
* Reduces confidence of patterns that haven't been used recently
|
|
597
|
+
*/
|
|
598
|
+
export async function applyTemporalDecay(dbPath) {
|
|
599
|
+
const swarmDir = path.join(process.cwd(), '.swarm');
|
|
600
|
+
const path_ = dbPath || path.join(swarmDir, 'memory.db');
|
|
601
|
+
try {
|
|
602
|
+
const initSqlJs = (await import('sql.js')).default;
|
|
603
|
+
const SQL = await initSqlJs();
|
|
604
|
+
const fileBuffer = fs.readFileSync(path_);
|
|
605
|
+
const db = new SQL.Database(fileBuffer);
|
|
606
|
+
// Apply decay: confidence *= exp(-decay_rate * days_since_last_use)
|
|
607
|
+
const now = Date.now();
|
|
608
|
+
const decayQuery = `
|
|
609
|
+
UPDATE patterns
|
|
610
|
+
SET
|
|
611
|
+
confidence = confidence * (1.0 - decay_rate * ((? - COALESCE(last_matched_at, created_at)) / 86400000.0)),
|
|
612
|
+
updated_at = ?
|
|
613
|
+
WHERE status = 'active'
|
|
614
|
+
AND confidence > 0.1
|
|
615
|
+
AND (? - COALESCE(last_matched_at, created_at)) > 86400000
|
|
616
|
+
`;
|
|
617
|
+
db.run(decayQuery, [now, now, now]);
|
|
618
|
+
const changes = db.getRowsModified();
|
|
619
|
+
// Save
|
|
620
|
+
const data = db.export();
|
|
621
|
+
fs.writeFileSync(path_, Buffer.from(data));
|
|
622
|
+
db.close();
|
|
623
|
+
return {
|
|
624
|
+
success: true,
|
|
625
|
+
patternsDecayed: changes
|
|
626
|
+
};
|
|
627
|
+
}
|
|
628
|
+
catch (error) {
|
|
629
|
+
return {
|
|
630
|
+
success: false,
|
|
631
|
+
patternsDecayed: 0,
|
|
632
|
+
error: error instanceof Error ? error.message : String(error)
|
|
633
|
+
};
|
|
634
|
+
}
|
|
635
|
+
}
|
|
636
|
+
let embeddingModelState = null;
|
|
637
|
+
/**
|
|
638
|
+
* Lazy load ONNX embedding model
|
|
639
|
+
* Only loads when first embedding is requested
|
|
640
|
+
*/
|
|
641
|
+
export async function loadEmbeddingModel(options) {
|
|
642
|
+
const { verbose = false } = options || {};
|
|
643
|
+
const startTime = Date.now();
|
|
644
|
+
// Already loaded
|
|
645
|
+
if (embeddingModelState?.loaded) {
|
|
646
|
+
return {
|
|
647
|
+
success: true,
|
|
648
|
+
dimensions: embeddingModelState.dimensions,
|
|
649
|
+
modelName: 'cached',
|
|
650
|
+
loadTime: 0
|
|
651
|
+
};
|
|
652
|
+
}
|
|
653
|
+
try {
|
|
654
|
+
// Try to import @xenova/transformers for ONNX embeddings
|
|
655
|
+
const transformers = await import('@xenova/transformers').catch(() => null);
|
|
656
|
+
if (transformers) {
|
|
657
|
+
if (verbose) {
|
|
658
|
+
console.log('Loading ONNX embedding model (all-MiniLM-L6-v2)...');
|
|
659
|
+
}
|
|
660
|
+
// Use small, fast model for local embeddings
|
|
661
|
+
const { pipeline } = transformers;
|
|
662
|
+
const embedder = await pipeline('feature-extraction', 'Xenova/all-MiniLM-L6-v2');
|
|
663
|
+
embeddingModelState = {
|
|
664
|
+
loaded: true,
|
|
665
|
+
model: embedder,
|
|
666
|
+
tokenizer: null,
|
|
667
|
+
dimensions: 384 // MiniLM-L6 produces 384-dim vectors
|
|
668
|
+
};
|
|
669
|
+
return {
|
|
670
|
+
success: true,
|
|
671
|
+
dimensions: 384,
|
|
672
|
+
modelName: 'all-MiniLM-L6-v2',
|
|
673
|
+
loadTime: Date.now() - startTime
|
|
674
|
+
};
|
|
675
|
+
}
|
|
676
|
+
// Fallback: Check for agentic-flow ONNX
|
|
677
|
+
const agenticFlow = await import('agentic-flow').catch(() => null);
|
|
678
|
+
if (agenticFlow && agenticFlow.embeddings) {
|
|
679
|
+
if (verbose) {
|
|
680
|
+
console.log('Loading agentic-flow embedding model...');
|
|
681
|
+
}
|
|
682
|
+
embeddingModelState = {
|
|
683
|
+
loaded: true,
|
|
684
|
+
model: agenticFlow.embeddings,
|
|
685
|
+
tokenizer: null,
|
|
686
|
+
dimensions: 768
|
|
687
|
+
};
|
|
688
|
+
return {
|
|
689
|
+
success: true,
|
|
690
|
+
dimensions: 768,
|
|
691
|
+
modelName: 'agentic-flow',
|
|
692
|
+
loadTime: Date.now() - startTime
|
|
693
|
+
};
|
|
694
|
+
}
|
|
695
|
+
// No ONNX model available - use fallback
|
|
696
|
+
embeddingModelState = {
|
|
697
|
+
loaded: true,
|
|
698
|
+
model: null, // Will use simple hash-based fallback
|
|
699
|
+
tokenizer: null,
|
|
700
|
+
dimensions: 128 // Smaller fallback dimensions
|
|
701
|
+
};
|
|
702
|
+
return {
|
|
703
|
+
success: true,
|
|
704
|
+
dimensions: 128,
|
|
705
|
+
modelName: 'hash-fallback',
|
|
706
|
+
loadTime: Date.now() - startTime
|
|
707
|
+
};
|
|
708
|
+
}
|
|
709
|
+
catch (error) {
|
|
710
|
+
return {
|
|
711
|
+
success: false,
|
|
712
|
+
dimensions: 0,
|
|
713
|
+
modelName: 'none',
|
|
714
|
+
error: error instanceof Error ? error.message : String(error)
|
|
715
|
+
};
|
|
716
|
+
}
|
|
717
|
+
}
|
|
718
|
+
/**
|
|
719
|
+
* Generate real embedding for text
|
|
720
|
+
* Uses ONNX model if available, falls back to deterministic hash
|
|
721
|
+
*/
|
|
722
|
+
export async function generateEmbedding(text) {
|
|
723
|
+
// Ensure model is loaded
|
|
724
|
+
if (!embeddingModelState?.loaded) {
|
|
725
|
+
await loadEmbeddingModel();
|
|
726
|
+
}
|
|
727
|
+
const state = embeddingModelState;
|
|
728
|
+
// Use ONNX model if available
|
|
729
|
+
if (state.model && typeof state.model === 'function') {
|
|
730
|
+
try {
|
|
731
|
+
const output = await state.model(text, { pooling: 'mean', normalize: true });
|
|
732
|
+
const embedding = Array.from(output.data);
|
|
733
|
+
return {
|
|
734
|
+
embedding,
|
|
735
|
+
dimensions: embedding.length,
|
|
736
|
+
model: 'onnx'
|
|
737
|
+
};
|
|
738
|
+
}
|
|
739
|
+
catch {
|
|
740
|
+
// Fall through to fallback
|
|
741
|
+
}
|
|
742
|
+
}
|
|
743
|
+
// Deterministic hash-based fallback (for testing/demo without ONNX)
|
|
744
|
+
const embedding = generateHashEmbedding(text, state.dimensions);
|
|
745
|
+
return {
|
|
746
|
+
embedding,
|
|
747
|
+
dimensions: state.dimensions,
|
|
748
|
+
model: 'hash-fallback'
|
|
749
|
+
};
|
|
750
|
+
}
|
|
751
|
+
/**
|
|
752
|
+
* Generate deterministic hash-based embedding
|
|
753
|
+
* Not semantic, but deterministic and useful for testing
|
|
754
|
+
*/
|
|
755
|
+
function generateHashEmbedding(text, dimensions) {
|
|
756
|
+
const embedding = new Array(dimensions).fill(0);
|
|
757
|
+
// Simple hash-based approach for reproducibility
|
|
758
|
+
const words = text.toLowerCase().split(/\s+/);
|
|
759
|
+
for (let i = 0; i < words.length; i++) {
|
|
760
|
+
const word = words[i];
|
|
761
|
+
for (let j = 0; j < word.length; j++) {
|
|
762
|
+
const charCode = word.charCodeAt(j);
|
|
763
|
+
const idx = (charCode * (i + 1) * (j + 1)) % dimensions;
|
|
764
|
+
embedding[idx] += Math.sin(charCode * 0.1) * 0.1;
|
|
765
|
+
}
|
|
766
|
+
}
|
|
767
|
+
// Normalize to unit vector
|
|
768
|
+
const magnitude = Math.sqrt(embedding.reduce((sum, v) => sum + v * v, 0)) || 1;
|
|
769
|
+
return embedding.map(v => v / magnitude);
|
|
770
|
+
}
|
|
771
|
+
/**
|
|
772
|
+
* Verify memory initialization works correctly
|
|
773
|
+
* Tests: write, read, search, patterns
|
|
774
|
+
*/
|
|
775
|
+
export async function verifyMemoryInit(dbPath, options) {
|
|
776
|
+
const { verbose = false } = options || {};
|
|
777
|
+
const tests = [];
|
|
778
|
+
try {
|
|
779
|
+
const initSqlJs = (await import('sql.js')).default;
|
|
780
|
+
const SQL = await initSqlJs();
|
|
781
|
+
const fs = await import('fs');
|
|
782
|
+
// Load database
|
|
783
|
+
const fileBuffer = fs.readFileSync(dbPath);
|
|
784
|
+
const db = new SQL.Database(fileBuffer);
|
|
785
|
+
// Test 1: Schema verification
|
|
786
|
+
const schemaStart = Date.now();
|
|
787
|
+
const tables = db.exec("SELECT name FROM sqlite_master WHERE type='table'");
|
|
788
|
+
const tableNames = tables[0]?.values?.map(v => v[0]) || [];
|
|
789
|
+
const expectedTables = ['memory_entries', 'patterns', 'metadata', 'vector_indexes'];
|
|
790
|
+
const missingTables = expectedTables.filter(t => !tableNames.includes(t));
|
|
791
|
+
tests.push({
|
|
792
|
+
name: 'Schema verification',
|
|
793
|
+
passed: missingTables.length === 0,
|
|
794
|
+
details: missingTables.length > 0 ? `Missing: ${missingTables.join(', ')}` : `${tableNames.length} tables found`,
|
|
795
|
+
duration: Date.now() - schemaStart
|
|
796
|
+
});
|
|
797
|
+
// Test 2: Write entry
|
|
798
|
+
const writeStart = Date.now();
|
|
799
|
+
const testId = `test_${Date.now()}`;
|
|
800
|
+
const testKey = 'verification_test';
|
|
801
|
+
const testValue = 'This is a verification test entry for memory initialization';
|
|
802
|
+
try {
|
|
803
|
+
db.run(`
|
|
804
|
+
INSERT INTO memory_entries (id, key, namespace, content, type, created_at, updated_at)
|
|
805
|
+
VALUES (?, ?, 'test', ?, 'semantic', ?, ?)
|
|
806
|
+
`, [testId, testKey, testValue, Date.now(), Date.now()]);
|
|
807
|
+
tests.push({
|
|
808
|
+
name: 'Write entry',
|
|
809
|
+
passed: true,
|
|
810
|
+
details: 'Entry written successfully',
|
|
811
|
+
duration: Date.now() - writeStart
|
|
812
|
+
});
|
|
813
|
+
}
|
|
814
|
+
catch (e) {
|
|
815
|
+
tests.push({
|
|
816
|
+
name: 'Write entry',
|
|
817
|
+
passed: false,
|
|
818
|
+
details: e instanceof Error ? e.message : 'Write failed',
|
|
819
|
+
duration: Date.now() - writeStart
|
|
820
|
+
});
|
|
821
|
+
}
|
|
822
|
+
// Test 3: Read entry
|
|
823
|
+
const readStart = Date.now();
|
|
824
|
+
try {
|
|
825
|
+
const result = db.exec(`SELECT content FROM memory_entries WHERE id = ?`, [testId]);
|
|
826
|
+
const content = result[0]?.values[0]?.[0];
|
|
827
|
+
tests.push({
|
|
828
|
+
name: 'Read entry',
|
|
829
|
+
passed: content === testValue,
|
|
830
|
+
details: content === testValue ? 'Content matches' : 'Content mismatch',
|
|
831
|
+
duration: Date.now() - readStart
|
|
832
|
+
});
|
|
833
|
+
}
|
|
834
|
+
catch (e) {
|
|
835
|
+
tests.push({
|
|
836
|
+
name: 'Read entry',
|
|
837
|
+
passed: false,
|
|
838
|
+
details: e instanceof Error ? e.message : 'Read failed',
|
|
839
|
+
duration: Date.now() - readStart
|
|
840
|
+
});
|
|
841
|
+
}
|
|
842
|
+
// Test 4: Write with embedding
|
|
843
|
+
const embeddingStart = Date.now();
|
|
844
|
+
try {
|
|
845
|
+
const { embedding, dimensions, model } = await generateEmbedding(testValue);
|
|
846
|
+
const embeddingJson = JSON.stringify(embedding);
|
|
847
|
+
db.run(`
|
|
848
|
+
UPDATE memory_entries
|
|
849
|
+
SET embedding = ?, embedding_dimensions = ?, embedding_model = ?
|
|
850
|
+
WHERE id = ?
|
|
851
|
+
`, [embeddingJson, dimensions, model, testId]);
|
|
852
|
+
tests.push({
|
|
853
|
+
name: 'Generate embedding',
|
|
854
|
+
passed: true,
|
|
855
|
+
details: `${dimensions}-dim vector (${model})`,
|
|
856
|
+
duration: Date.now() - embeddingStart
|
|
857
|
+
});
|
|
858
|
+
}
|
|
859
|
+
catch (e) {
|
|
860
|
+
tests.push({
|
|
861
|
+
name: 'Generate embedding',
|
|
862
|
+
passed: false,
|
|
863
|
+
details: e instanceof Error ? e.message : 'Embedding failed',
|
|
864
|
+
duration: Date.now() - embeddingStart
|
|
865
|
+
});
|
|
866
|
+
}
|
|
867
|
+
// Test 5: Pattern storage
|
|
868
|
+
const patternStart = Date.now();
|
|
869
|
+
try {
|
|
870
|
+
const patternId = `pattern_${Date.now()}`;
|
|
871
|
+
db.run(`
|
|
872
|
+
INSERT INTO patterns (id, name, pattern_type, condition, action, confidence, created_at, updated_at)
|
|
873
|
+
VALUES (?, 'test-pattern', 'task-routing', 'test condition', 'test action', 0.5, ?, ?)
|
|
874
|
+
`, [patternId, Date.now(), Date.now()]);
|
|
875
|
+
tests.push({
|
|
876
|
+
name: 'Pattern storage',
|
|
877
|
+
passed: true,
|
|
878
|
+
details: 'Pattern stored with confidence scoring',
|
|
879
|
+
duration: Date.now() - patternStart
|
|
880
|
+
});
|
|
881
|
+
// Cleanup test pattern
|
|
882
|
+
db.run(`DELETE FROM patterns WHERE id = ?`, [patternId]);
|
|
883
|
+
}
|
|
884
|
+
catch (e) {
|
|
885
|
+
tests.push({
|
|
886
|
+
name: 'Pattern storage',
|
|
887
|
+
passed: false,
|
|
888
|
+
details: e instanceof Error ? e.message : 'Pattern storage failed',
|
|
889
|
+
duration: Date.now() - patternStart
|
|
890
|
+
});
|
|
891
|
+
}
|
|
892
|
+
// Test 6: Vector index configuration
|
|
893
|
+
const indexStart = Date.now();
|
|
894
|
+
try {
|
|
895
|
+
const indexResult = db.exec(`SELECT name, dimensions, hnsw_m, hnsw_ef_construction FROM vector_indexes`);
|
|
896
|
+
const indexes = indexResult[0]?.values || [];
|
|
897
|
+
tests.push({
|
|
898
|
+
name: 'Vector index config',
|
|
899
|
+
passed: indexes.length > 0,
|
|
900
|
+
details: `${indexes.length} indexes configured (HNSW M=16, ef=200)`,
|
|
901
|
+
duration: Date.now() - indexStart
|
|
902
|
+
});
|
|
903
|
+
}
|
|
904
|
+
catch (e) {
|
|
905
|
+
tests.push({
|
|
906
|
+
name: 'Vector index config',
|
|
907
|
+
passed: false,
|
|
908
|
+
details: e instanceof Error ? e.message : 'Index check failed',
|
|
909
|
+
duration: Date.now() - indexStart
|
|
910
|
+
});
|
|
911
|
+
}
|
|
912
|
+
// Cleanup test entry
|
|
913
|
+
db.run(`DELETE FROM memory_entries WHERE id = ?`, [testId]);
|
|
914
|
+
// Save changes
|
|
915
|
+
const data = db.export();
|
|
916
|
+
fs.writeFileSync(dbPath, Buffer.from(data));
|
|
917
|
+
db.close();
|
|
918
|
+
const passed = tests.filter(t => t.passed).length;
|
|
919
|
+
const failed = tests.filter(t => !t.passed).length;
|
|
920
|
+
return {
|
|
921
|
+
success: failed === 0,
|
|
922
|
+
tests,
|
|
923
|
+
summary: {
|
|
924
|
+
passed,
|
|
925
|
+
failed,
|
|
926
|
+
total: tests.length
|
|
927
|
+
}
|
|
928
|
+
};
|
|
929
|
+
}
|
|
930
|
+
catch (error) {
|
|
931
|
+
return {
|
|
932
|
+
success: false,
|
|
933
|
+
tests: [{
|
|
934
|
+
name: 'Database access',
|
|
935
|
+
passed: false,
|
|
936
|
+
details: error instanceof Error ? error.message : 'Unknown error'
|
|
937
|
+
}],
|
|
938
|
+
summary: { passed: 0, failed: 1, total: 1 }
|
|
939
|
+
};
|
|
940
|
+
}
|
|
941
|
+
}
|
|
942
|
+
/**
|
|
943
|
+
* Store an entry directly using sql.js
|
|
944
|
+
* This bypasses MCP and writes directly to the database
|
|
945
|
+
*/
|
|
946
|
+
export async function storeEntry(options) {
|
|
947
|
+
const { key, value, namespace = 'default', generateEmbeddingFlag = true, tags = [], ttl, dbPath: customPath } = options;
|
|
948
|
+
const swarmDir = path.join(process.cwd(), '.swarm');
|
|
949
|
+
const dbPath = customPath || path.join(swarmDir, 'memory.db');
|
|
950
|
+
try {
|
|
951
|
+
if (!fs.existsSync(dbPath)) {
|
|
952
|
+
return { success: false, id: '', error: 'Database not initialized. Run: claude-flow memory init' };
|
|
953
|
+
}
|
|
954
|
+
const initSqlJs = (await import('sql.js')).default;
|
|
955
|
+
const SQL = await initSqlJs();
|
|
956
|
+
const fileBuffer = fs.readFileSync(dbPath);
|
|
957
|
+
const db = new SQL.Database(fileBuffer);
|
|
958
|
+
const id = `entry_${Date.now()}_${Math.random().toString(36).substring(7)}`;
|
|
959
|
+
const now = Date.now();
|
|
960
|
+
// Generate embedding if requested
|
|
961
|
+
let embeddingJson = null;
|
|
962
|
+
let embeddingDimensions = null;
|
|
963
|
+
let embeddingModel = null;
|
|
964
|
+
if (generateEmbeddingFlag && value.length > 0) {
|
|
965
|
+
const embResult = await generateEmbedding(value);
|
|
966
|
+
embeddingJson = JSON.stringify(embResult.embedding);
|
|
967
|
+
embeddingDimensions = embResult.dimensions;
|
|
968
|
+
embeddingModel = embResult.model;
|
|
969
|
+
}
|
|
970
|
+
// Insert entry
|
|
971
|
+
db.run(`
|
|
972
|
+
INSERT INTO memory_entries (
|
|
973
|
+
id, key, namespace, content, type,
|
|
974
|
+
embedding, embedding_dimensions, embedding_model,
|
|
975
|
+
tags, metadata, created_at, updated_at, expires_at, status
|
|
976
|
+
) VALUES (?, ?, ?, ?, 'semantic', ?, ?, ?, ?, ?, ?, ?, ?, 'active')
|
|
977
|
+
`, [
|
|
978
|
+
id,
|
|
979
|
+
key,
|
|
980
|
+
namespace,
|
|
981
|
+
value,
|
|
982
|
+
embeddingJson,
|
|
983
|
+
embeddingDimensions,
|
|
984
|
+
embeddingModel,
|
|
985
|
+
tags.length > 0 ? JSON.stringify(tags) : null,
|
|
986
|
+
'{}',
|
|
987
|
+
now,
|
|
988
|
+
now,
|
|
989
|
+
ttl ? now + (ttl * 1000) : null
|
|
990
|
+
]);
|
|
991
|
+
// Save
|
|
992
|
+
const data = db.export();
|
|
993
|
+
fs.writeFileSync(dbPath, Buffer.from(data));
|
|
994
|
+
db.close();
|
|
995
|
+
return {
|
|
996
|
+
success: true,
|
|
997
|
+
id,
|
|
998
|
+
embedding: embeddingJson ? { dimensions: embeddingDimensions, model: embeddingModel } : undefined
|
|
999
|
+
};
|
|
1000
|
+
}
|
|
1001
|
+
catch (error) {
|
|
1002
|
+
return {
|
|
1003
|
+
success: false,
|
|
1004
|
+
id: '',
|
|
1005
|
+
error: error instanceof Error ? error.message : String(error)
|
|
1006
|
+
};
|
|
1007
|
+
}
|
|
1008
|
+
}
|
|
1009
|
+
/**
|
|
1010
|
+
* Search entries using sql.js with vector similarity
|
|
1011
|
+
*/
|
|
1012
|
+
export async function searchEntries(options) {
|
|
1013
|
+
const { query, namespace = 'default', limit = 10, threshold = 0.3, dbPath: customPath } = options;
|
|
1014
|
+
const swarmDir = path.join(process.cwd(), '.swarm');
|
|
1015
|
+
const dbPath = customPath || path.join(swarmDir, 'memory.db');
|
|
1016
|
+
const startTime = Date.now();
|
|
1017
|
+
try {
|
|
1018
|
+
if (!fs.existsSync(dbPath)) {
|
|
1019
|
+
return { success: false, results: [], searchTime: 0, error: 'Database not found' };
|
|
1020
|
+
}
|
|
1021
|
+
const initSqlJs = (await import('sql.js')).default;
|
|
1022
|
+
const SQL = await initSqlJs();
|
|
1023
|
+
const fileBuffer = fs.readFileSync(dbPath);
|
|
1024
|
+
const db = new SQL.Database(fileBuffer);
|
|
1025
|
+
// Generate query embedding
|
|
1026
|
+
const queryEmb = await generateEmbedding(query);
|
|
1027
|
+
const queryEmbedding = queryEmb.embedding;
|
|
1028
|
+
// Get entries with embeddings
|
|
1029
|
+
const entries = db.exec(`
|
|
1030
|
+
SELECT id, key, namespace, content, embedding
|
|
1031
|
+
FROM memory_entries
|
|
1032
|
+
WHERE status = 'active'
|
|
1033
|
+
${namespace !== 'all' ? `AND namespace = '${namespace.replace(/'/g, "''")}'` : ''}
|
|
1034
|
+
LIMIT 1000
|
|
1035
|
+
`);
|
|
1036
|
+
const results = [];
|
|
1037
|
+
if (entries[0]?.values) {
|
|
1038
|
+
for (const row of entries[0].values) {
|
|
1039
|
+
const [id, key, ns, content, embeddingJson] = row;
|
|
1040
|
+
let score = 0;
|
|
1041
|
+
if (embeddingJson) {
|
|
1042
|
+
try {
|
|
1043
|
+
const embedding = JSON.parse(embeddingJson);
|
|
1044
|
+
score = cosineSim(queryEmbedding, embedding);
|
|
1045
|
+
}
|
|
1046
|
+
catch {
|
|
1047
|
+
// Invalid embedding, use keyword score
|
|
1048
|
+
}
|
|
1049
|
+
}
|
|
1050
|
+
// Fallback to keyword matching
|
|
1051
|
+
if (score < threshold) {
|
|
1052
|
+
const lowerContent = (content || '').toLowerCase();
|
|
1053
|
+
const lowerQuery = query.toLowerCase();
|
|
1054
|
+
const words = lowerQuery.split(/\s+/);
|
|
1055
|
+
const matchCount = words.filter(w => lowerContent.includes(w)).length;
|
|
1056
|
+
const keywordScore = matchCount / words.length * 0.5;
|
|
1057
|
+
score = Math.max(score, keywordScore);
|
|
1058
|
+
}
|
|
1059
|
+
if (score >= threshold) {
|
|
1060
|
+
results.push({
|
|
1061
|
+
id: id.substring(0, 12),
|
|
1062
|
+
key: key || id.substring(0, 15),
|
|
1063
|
+
content: (content || '').substring(0, 60) + ((content || '').length > 60 ? '...' : ''),
|
|
1064
|
+
score,
|
|
1065
|
+
namespace: ns || 'default'
|
|
1066
|
+
});
|
|
1067
|
+
}
|
|
1068
|
+
}
|
|
1069
|
+
}
|
|
1070
|
+
db.close();
|
|
1071
|
+
// Sort by score
|
|
1072
|
+
results.sort((a, b) => b.score - a.score);
|
|
1073
|
+
return {
|
|
1074
|
+
success: true,
|
|
1075
|
+
results: results.slice(0, limit),
|
|
1076
|
+
searchTime: Date.now() - startTime
|
|
1077
|
+
};
|
|
1078
|
+
}
|
|
1079
|
+
catch (error) {
|
|
1080
|
+
return {
|
|
1081
|
+
success: false,
|
|
1082
|
+
results: [],
|
|
1083
|
+
searchTime: Date.now() - startTime,
|
|
1084
|
+
error: error instanceof Error ? error.message : String(error)
|
|
1085
|
+
};
|
|
1086
|
+
}
|
|
1087
|
+
}
|
|
1088
|
+
// Cosine similarity helper
|
|
1089
|
+
function cosineSim(a, b) {
|
|
1090
|
+
if (!a || !b || a.length === 0 || b.length === 0)
|
|
1091
|
+
return 0;
|
|
1092
|
+
const minLen = Math.min(a.length, b.length);
|
|
1093
|
+
let dot = 0, normA = 0, normB = 0;
|
|
1094
|
+
for (let i = 0; i < minLen; i++) {
|
|
1095
|
+
dot += a[i] * b[i];
|
|
1096
|
+
normA += a[i] * a[i];
|
|
1097
|
+
normB += b[i] * b[i];
|
|
1098
|
+
}
|
|
1099
|
+
const mag = Math.sqrt(normA) * Math.sqrt(normB);
|
|
1100
|
+
return mag === 0 ? 0 : dot / mag;
|
|
1101
|
+
}
|
|
1102
|
+
/**
|
|
1103
|
+
* List all entries from the memory database
|
|
1104
|
+
*/
|
|
1105
|
+
export async function listEntries(options) {
|
|
1106
|
+
const { namespace, limit = 20, offset = 0, dbPath: customPath } = options;
|
|
1107
|
+
const swarmDir = path.join(process.cwd(), '.swarm');
|
|
1108
|
+
const dbPath = customPath || path.join(swarmDir, 'memory.db');
|
|
1109
|
+
try {
|
|
1110
|
+
if (!fs.existsSync(dbPath)) {
|
|
1111
|
+
return { success: false, entries: [], total: 0, error: 'Database not found' };
|
|
1112
|
+
}
|
|
1113
|
+
const initSqlJs = (await import('sql.js')).default;
|
|
1114
|
+
const SQL = await initSqlJs();
|
|
1115
|
+
const fileBuffer = fs.readFileSync(dbPath);
|
|
1116
|
+
const db = new SQL.Database(fileBuffer);
|
|
1117
|
+
// Get total count
|
|
1118
|
+
const countQuery = namespace
|
|
1119
|
+
? `SELECT COUNT(*) as cnt FROM memory_entries WHERE status = 'active' AND namespace = '${namespace.replace(/'/g, "''")}'`
|
|
1120
|
+
: `SELECT COUNT(*) as cnt FROM memory_entries WHERE status = 'active'`;
|
|
1121
|
+
const countResult = db.exec(countQuery);
|
|
1122
|
+
const total = countResult[0]?.values?.[0]?.[0] || 0;
|
|
1123
|
+
// Get entries
|
|
1124
|
+
const listQuery = `
|
|
1125
|
+
SELECT id, key, namespace, content, embedding, access_count, created_at, updated_at
|
|
1126
|
+
FROM memory_entries
|
|
1127
|
+
WHERE status = 'active'
|
|
1128
|
+
${namespace ? `AND namespace = '${namespace.replace(/'/g, "''")}'` : ''}
|
|
1129
|
+
ORDER BY updated_at DESC
|
|
1130
|
+
LIMIT ${limit} OFFSET ${offset}
|
|
1131
|
+
`;
|
|
1132
|
+
const result = db.exec(listQuery);
|
|
1133
|
+
const entries = [];
|
|
1134
|
+
if (result[0]?.values) {
|
|
1135
|
+
for (const row of result[0].values) {
|
|
1136
|
+
const [id, key, ns, content, embedding, accessCount, createdAt, updatedAt] = row;
|
|
1137
|
+
entries.push({
|
|
1138
|
+
id: String(id).substring(0, 20),
|
|
1139
|
+
key: key || String(id).substring(0, 15),
|
|
1140
|
+
namespace: ns || 'default',
|
|
1141
|
+
size: (content || '').length,
|
|
1142
|
+
accessCount: accessCount || 0,
|
|
1143
|
+
createdAt: createdAt || new Date().toISOString(),
|
|
1144
|
+
updatedAt: updatedAt || new Date().toISOString(),
|
|
1145
|
+
hasEmbedding: !!embedding && embedding.length > 10
|
|
1146
|
+
});
|
|
1147
|
+
}
|
|
1148
|
+
}
|
|
1149
|
+
db.close();
|
|
1150
|
+
return { success: true, entries, total };
|
|
1151
|
+
}
|
|
1152
|
+
catch (error) {
|
|
1153
|
+
return {
|
|
1154
|
+
success: false,
|
|
1155
|
+
entries: [],
|
|
1156
|
+
total: 0,
|
|
1157
|
+
error: error instanceof Error ? error.message : String(error)
|
|
1158
|
+
};
|
|
1159
|
+
}
|
|
1160
|
+
}
|
|
1161
|
+
/**
|
|
1162
|
+
* Get a specific entry from the memory database
|
|
1163
|
+
*/
|
|
1164
|
+
export async function getEntry(options) {
|
|
1165
|
+
const { key, namespace = 'default', dbPath: customPath } = options;
|
|
1166
|
+
const swarmDir = path.join(process.cwd(), '.swarm');
|
|
1167
|
+
const dbPath = customPath || path.join(swarmDir, 'memory.db');
|
|
1168
|
+
try {
|
|
1169
|
+
if (!fs.existsSync(dbPath)) {
|
|
1170
|
+
return { success: false, found: false, error: 'Database not found' };
|
|
1171
|
+
}
|
|
1172
|
+
const initSqlJs = (await import('sql.js')).default;
|
|
1173
|
+
const SQL = await initSqlJs();
|
|
1174
|
+
const fileBuffer = fs.readFileSync(dbPath);
|
|
1175
|
+
const db = new SQL.Database(fileBuffer);
|
|
1176
|
+
// Find entry by key
|
|
1177
|
+
const result = db.exec(`
|
|
1178
|
+
SELECT id, key, namespace, content, embedding, access_count, created_at, updated_at, tags
|
|
1179
|
+
FROM memory_entries
|
|
1180
|
+
WHERE status = 'active'
|
|
1181
|
+
AND key = '${key.replace(/'/g, "''")}'
|
|
1182
|
+
AND namespace = '${namespace.replace(/'/g, "''")}'
|
|
1183
|
+
LIMIT 1
|
|
1184
|
+
`);
|
|
1185
|
+
if (!result[0]?.values?.[0]) {
|
|
1186
|
+
db.close();
|
|
1187
|
+
return { success: true, found: false };
|
|
1188
|
+
}
|
|
1189
|
+
const [id, entryKey, ns, content, embedding, accessCount, createdAt, updatedAt, tagsJson] = result[0].values[0];
|
|
1190
|
+
// Update access count
|
|
1191
|
+
db.run(`
|
|
1192
|
+
UPDATE memory_entries
|
|
1193
|
+
SET access_count = access_count + 1, last_accessed_at = strftime('%s', 'now') * 1000
|
|
1194
|
+
WHERE id = '${String(id).replace(/'/g, "''")}'
|
|
1195
|
+
`);
|
|
1196
|
+
// Save updated database
|
|
1197
|
+
const data = db.export();
|
|
1198
|
+
fs.writeFileSync(dbPath, Buffer.from(data));
|
|
1199
|
+
db.close();
|
|
1200
|
+
let tags = [];
|
|
1201
|
+
if (tagsJson) {
|
|
1202
|
+
try {
|
|
1203
|
+
tags = JSON.parse(tagsJson);
|
|
1204
|
+
}
|
|
1205
|
+
catch {
|
|
1206
|
+
// Invalid JSON
|
|
1207
|
+
}
|
|
1208
|
+
}
|
|
1209
|
+
return {
|
|
1210
|
+
success: true,
|
|
1211
|
+
found: true,
|
|
1212
|
+
entry: {
|
|
1213
|
+
id: String(id),
|
|
1214
|
+
key: entryKey || String(id),
|
|
1215
|
+
namespace: ns || 'default',
|
|
1216
|
+
content: content || '',
|
|
1217
|
+
accessCount: (accessCount || 0) + 1,
|
|
1218
|
+
createdAt: createdAt || new Date().toISOString(),
|
|
1219
|
+
updatedAt: updatedAt || new Date().toISOString(),
|
|
1220
|
+
hasEmbedding: !!embedding && embedding.length > 10,
|
|
1221
|
+
tags
|
|
1222
|
+
}
|
|
1223
|
+
};
|
|
1224
|
+
}
|
|
1225
|
+
catch (error) {
|
|
1226
|
+
return {
|
|
1227
|
+
success: false,
|
|
1228
|
+
found: false,
|
|
1229
|
+
error: error instanceof Error ? error.message : String(error)
|
|
1230
|
+
};
|
|
1231
|
+
}
|
|
1232
|
+
}
|
|
1233
|
+
export default {
|
|
1234
|
+
initializeMemoryDatabase,
|
|
1235
|
+
checkMemoryInitialization,
|
|
1236
|
+
checkAndMigrateLegacy,
|
|
1237
|
+
applyTemporalDecay,
|
|
1238
|
+
loadEmbeddingModel,
|
|
1239
|
+
generateEmbedding,
|
|
1240
|
+
verifyMemoryInit,
|
|
1241
|
+
storeEntry,
|
|
1242
|
+
searchEntries,
|
|
1243
|
+
listEntries,
|
|
1244
|
+
getEntry,
|
|
1245
|
+
MEMORY_SCHEMA_V3,
|
|
1246
|
+
getInitialMetadata
|
|
1247
|
+
};
|
|
1248
|
+
//# sourceMappingURL=memory-initializer.js.map
|