moflo 4.0.2 → 4.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +114 -110
- package/v3/@claude-flow/cli/dist/src/memory/memory-bridge.js +194 -81
- package/v3/@claude-flow/cli/dist/src/memory/memory-initializer.js +1892 -1841
- package/v3/@claude-flow/memory/README.md +587 -0
- package/v3/@claude-flow/memory/dist/agent-memory-scope.d.ts +131 -0
- package/v3/@claude-flow/memory/dist/agent-memory-scope.js +223 -0
- package/v3/@claude-flow/memory/dist/agent-memory-scope.test.d.ts +8 -0
- package/v3/@claude-flow/memory/dist/agent-memory-scope.test.js +463 -0
- package/v3/@claude-flow/memory/dist/agentdb-adapter.d.ts +165 -0
- package/v3/@claude-flow/memory/dist/agentdb-adapter.js +806 -0
- package/v3/@claude-flow/memory/dist/agentdb-backend.d.ts +214 -0
- package/v3/@claude-flow/memory/dist/agentdb-backend.js +844 -0
- package/v3/@claude-flow/memory/dist/agentdb-backend.test.d.ts +7 -0
- package/v3/@claude-flow/memory/dist/agentdb-backend.test.js +258 -0
- package/v3/@claude-flow/memory/dist/application/commands/delete-memory.command.d.ts +65 -0
- package/v3/@claude-flow/memory/dist/application/commands/delete-memory.command.js +129 -0
- package/v3/@claude-flow/memory/dist/application/commands/store-memory.command.d.ts +48 -0
- package/v3/@claude-flow/memory/dist/application/commands/store-memory.command.js +72 -0
- package/v3/@claude-flow/memory/dist/application/index.d.ts +12 -0
- package/v3/@claude-flow/memory/dist/application/index.js +15 -0
- package/v3/@claude-flow/memory/dist/application/queries/search-memory.query.d.ts +72 -0
- package/v3/@claude-flow/memory/dist/application/queries/search-memory.query.js +143 -0
- package/v3/@claude-flow/memory/dist/application/services/memory-application-service.d.ts +121 -0
- package/v3/@claude-flow/memory/dist/application/services/memory-application-service.js +190 -0
- package/v3/@claude-flow/memory/dist/auto-memory-bridge.d.ts +226 -0
- package/v3/@claude-flow/memory/dist/auto-memory-bridge.js +709 -0
- package/v3/@claude-flow/memory/dist/auto-memory-bridge.test.d.ts +8 -0
- package/v3/@claude-flow/memory/dist/auto-memory-bridge.test.js +754 -0
- package/v3/@claude-flow/memory/dist/benchmark.test.d.ts +2 -0
- package/v3/@claude-flow/memory/dist/benchmark.test.js +277 -0
- package/v3/@claude-flow/memory/dist/cache-manager.d.ts +134 -0
- package/v3/@claude-flow/memory/dist/cache-manager.js +407 -0
- package/v3/@claude-flow/memory/dist/controller-registry.d.ts +216 -0
- package/v3/@claude-flow/memory/dist/controller-registry.js +893 -0
- package/v3/@claude-flow/memory/dist/controller-registry.test.d.ts +14 -0
- package/v3/@claude-flow/memory/dist/controller-registry.test.js +636 -0
- package/v3/@claude-flow/memory/dist/database-provider.d.ts +87 -0
- package/v3/@claude-flow/memory/dist/database-provider.js +410 -0
- package/v3/@claude-flow/memory/dist/database-provider.test.d.ts +7 -0
- package/v3/@claude-flow/memory/dist/database-provider.test.js +285 -0
- package/v3/@claude-flow/memory/dist/domain/entities/memory-entry.d.ts +143 -0
- package/v3/@claude-flow/memory/dist/domain/entities/memory-entry.js +226 -0
- package/v3/@claude-flow/memory/dist/domain/index.d.ts +11 -0
- package/v3/@claude-flow/memory/dist/domain/index.js +12 -0
- package/v3/@claude-flow/memory/dist/domain/repositories/memory-repository.interface.d.ts +102 -0
- package/v3/@claude-flow/memory/dist/domain/repositories/memory-repository.interface.js +11 -0
- package/v3/@claude-flow/memory/dist/domain/services/memory-domain-service.d.ts +105 -0
- package/v3/@claude-flow/memory/dist/domain/services/memory-domain-service.js +297 -0
- package/v3/@claude-flow/memory/dist/hnsw-index.d.ts +111 -0
- package/v3/@claude-flow/memory/dist/hnsw-index.js +781 -0
- package/v3/@claude-flow/memory/dist/hnsw-lite.d.ts +23 -0
- package/v3/@claude-flow/memory/dist/hnsw-lite.js +168 -0
- package/v3/@claude-flow/memory/dist/hybrid-backend.d.ts +245 -0
- package/v3/@claude-flow/memory/dist/hybrid-backend.js +569 -0
- package/v3/@claude-flow/memory/dist/hybrid-backend.test.d.ts +8 -0
- package/v3/@claude-flow/memory/dist/hybrid-backend.test.js +320 -0
- package/v3/@claude-flow/memory/dist/index.d.ts +208 -0
- package/v3/@claude-flow/memory/dist/index.js +362 -0
- package/v3/@claude-flow/memory/dist/infrastructure/index.d.ts +17 -0
- package/v3/@claude-flow/memory/dist/infrastructure/index.js +16 -0
- package/v3/@claude-flow/memory/dist/infrastructure/repositories/hybrid-memory-repository.d.ts +66 -0
- package/v3/@claude-flow/memory/dist/infrastructure/repositories/hybrid-memory-repository.js +409 -0
- package/v3/@claude-flow/memory/dist/learning-bridge.d.ts +137 -0
- package/v3/@claude-flow/memory/dist/learning-bridge.js +335 -0
- package/v3/@claude-flow/memory/dist/learning-bridge.test.d.ts +8 -0
- package/v3/@claude-flow/memory/dist/learning-bridge.test.js +578 -0
- package/v3/@claude-flow/memory/dist/memory-graph.d.ts +100 -0
- package/v3/@claude-flow/memory/dist/memory-graph.js +333 -0
- package/v3/@claude-flow/memory/dist/memory-graph.test.d.ts +8 -0
- package/v3/@claude-flow/memory/dist/memory-graph.test.js +609 -0
- package/v3/@claude-flow/memory/dist/migration.d.ts +68 -0
- package/v3/@claude-flow/memory/dist/migration.js +513 -0
- package/v3/@claude-flow/memory/dist/persistent-sona.d.ts +144 -0
- package/v3/@claude-flow/memory/dist/persistent-sona.js +332 -0
- package/v3/@claude-flow/memory/dist/query-builder.d.ts +211 -0
- package/v3/@claude-flow/memory/dist/query-builder.js +438 -0
- package/v3/@claude-flow/memory/dist/rvf-backend.d.ts +51 -0
- package/v3/@claude-flow/memory/dist/rvf-backend.js +481 -0
- package/v3/@claude-flow/memory/dist/rvf-learning-store.d.ts +139 -0
- package/v3/@claude-flow/memory/dist/rvf-learning-store.js +295 -0
- package/v3/@claude-flow/memory/dist/rvf-migration.d.ts +45 -0
- package/v3/@claude-flow/memory/dist/rvf-migration.js +254 -0
- package/v3/@claude-flow/memory/dist/sqlite-backend.d.ts +121 -0
- package/v3/@claude-flow/memory/dist/sqlite-backend.js +564 -0
- package/v3/@claude-flow/memory/dist/sqljs-backend.d.ts +128 -0
- package/v3/@claude-flow/memory/dist/sqljs-backend.js +601 -0
- package/v3/@claude-flow/memory/dist/types.d.ts +484 -0
- package/v3/@claude-flow/memory/dist/types.js +58 -0
- package/v3/@claude-flow/memory/package.json +46 -0
|
@@ -1,35 +1,71 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* V3 Memory Initializer
|
|
3
|
-
* Properly initializes the memory database with sql.js (WASM SQLite)
|
|
4
|
-
* Includes pattern tables, vector embeddings, migration state tracking
|
|
5
|
-
*
|
|
6
|
-
* ADR-053: Routes through ControllerRegistry → AgentDB v3 when available,
|
|
7
|
-
* falls back to raw sql.js for backwards compatibility.
|
|
8
|
-
*
|
|
9
|
-
* @module v3/cli/memory-initializer
|
|
10
|
-
*/
|
|
11
|
-
import * as fs from 'fs';
|
|
12
|
-
import * as path from 'path';
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
if (
|
|
19
|
-
return
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
1
|
+
/**
|
|
2
|
+
* V3 Memory Initializer
|
|
3
|
+
* Properly initializes the memory database with sql.js (WASM SQLite)
|
|
4
|
+
* Includes pattern tables, vector embeddings, migration state tracking
|
|
5
|
+
*
|
|
6
|
+
* ADR-053: Routes through ControllerRegistry → AgentDB v3 when available,
|
|
7
|
+
* falls back to raw sql.js for backwards compatibility.
|
|
8
|
+
*
|
|
9
|
+
* @module v3/cli/memory-initializer
|
|
10
|
+
*/
|
|
11
|
+
import * as fs from 'fs';
|
|
12
|
+
import * as path from 'path';
|
|
13
|
+
import { createRequire } from 'module';
|
|
14
|
+
import { pathToFileURL } from 'url';
|
|
15
|
+
// Project root resolution — handles npx running from node_modules/moflo
|
|
16
|
+
let _projectRoot;
|
|
17
|
+
function getProjectRoot() {
|
|
18
|
+
if (_projectRoot)
|
|
19
|
+
return _projectRoot;
|
|
20
|
+
// 1. Explicit env var (set by Claude Code hooks or moflo CLI wrapper)
|
|
21
|
+
if (process.env.CLAUDE_PROJECT_DIR) {
|
|
22
|
+
_projectRoot = process.env.CLAUDE_PROJECT_DIR;
|
|
23
|
+
return _projectRoot;
|
|
24
|
+
}
|
|
25
|
+
// 2. Walk up from cwd, skipping anything inside node_modules.
|
|
26
|
+
// A stale .swarm inside a package dir is NOT a project root.
|
|
27
|
+
let dir = process.cwd();
|
|
28
|
+
const root = path.parse(dir).root;
|
|
29
|
+
const nm = path.sep + 'node_modules';
|
|
30
|
+
while (dir !== root) {
|
|
31
|
+
// Skip if we're still inside a node_modules tree
|
|
32
|
+
if (!dir.includes(nm)) {
|
|
33
|
+
// Prefer CLAUDE.md (project-specific, never inside a package)
|
|
34
|
+
if (fs.existsSync(path.join(dir, 'CLAUDE.md')) && fs.existsSync(path.join(dir, 'package.json'))) {
|
|
35
|
+
_projectRoot = dir;
|
|
36
|
+
return _projectRoot;
|
|
37
|
+
}
|
|
38
|
+
if (fs.existsSync(path.join(dir, '.swarm', 'memory.db'))) {
|
|
39
|
+
_projectRoot = dir;
|
|
40
|
+
return _projectRoot;
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
dir = path.dirname(dir);
|
|
44
|
+
}
|
|
45
|
+
// 3. Fallback to cwd (original behavior)
|
|
46
|
+
_projectRoot = process.cwd();
|
|
47
|
+
return _projectRoot;
|
|
48
|
+
}
|
|
49
|
+
// ADR-053: Lazy import of AgentDB v3 bridge
|
|
50
|
+
let _bridge;
|
|
51
|
+
async function getBridge() {
|
|
52
|
+
if (_bridge === null)
|
|
53
|
+
return null;
|
|
54
|
+
if (_bridge)
|
|
55
|
+
return _bridge;
|
|
56
|
+
try {
|
|
57
|
+
_bridge = await import('./memory-bridge.js');
|
|
58
|
+
return _bridge;
|
|
59
|
+
}
|
|
60
|
+
catch {
|
|
61
|
+
_bridge = null;
|
|
62
|
+
return null;
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
/**
|
|
66
|
+
* Enhanced schema with pattern confidence, temporal decay, versioning
|
|
67
|
+
* Vector embeddings enabled for semantic search
|
|
68
|
+
*/
|
|
33
69
|
export const MEMORY_SCHEMA_V3 = `
|
|
34
70
|
-- RuFlo V3 Memory Database
|
|
35
71
|
-- Version: 3.0.0
|
|
@@ -319,477 +355,492 @@ CREATE TABLE IF NOT EXISTS metadata (
|
|
|
319
355
|
value TEXT NOT NULL,
|
|
320
356
|
updated_at INTEGER DEFAULT (strftime('%s', 'now') * 1000)
|
|
321
357
|
);
|
|
322
|
-
`;
|
|
323
|
-
let hnswIndex = null;
|
|
324
|
-
let hnswInitializing = false;
|
|
325
|
-
/**
|
|
326
|
-
* Get or create the HNSW index singleton
|
|
327
|
-
* Lazily initializes from SQLite data on first use
|
|
328
|
-
*/
|
|
329
|
-
export async function getHNSWIndex(options) {
|
|
330
|
-
const dimensions = options?.dimensions ?? 384;
|
|
331
|
-
// Return existing index if already initialized
|
|
332
|
-
if (hnswIndex?.initialized && !options?.forceRebuild) {
|
|
333
|
-
return hnswIndex;
|
|
334
|
-
}
|
|
335
|
-
// Prevent concurrent initialization
|
|
336
|
-
if (hnswInitializing) {
|
|
337
|
-
// Wait for initialization to complete
|
|
338
|
-
while (hnswInitializing) {
|
|
339
|
-
await new Promise(resolve => setTimeout(resolve, 10));
|
|
340
|
-
}
|
|
341
|
-
return hnswIndex;
|
|
342
|
-
}
|
|
343
|
-
hnswInitializing = true;
|
|
344
|
-
try {
|
|
345
|
-
// Import @ruvector/core dynamically
|
|
346
|
-
// Handle both ESM (default export) and CJS patterns
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
//
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
|
|
373
|
-
}
|
|
374
|
-
//
|
|
375
|
-
const
|
|
376
|
-
if (fs.existsSync(
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
|
|
401
|
-
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
|
|
358
|
+
`;
|
|
359
|
+
let hnswIndex = null;
|
|
360
|
+
let hnswInitializing = false;
|
|
361
|
+
/**
|
|
362
|
+
* Get or create the HNSW index singleton
|
|
363
|
+
* Lazily initializes from SQLite data on first use
|
|
364
|
+
*/
|
|
365
|
+
export async function getHNSWIndex(options) {
|
|
366
|
+
const dimensions = options?.dimensions ?? 384;
|
|
367
|
+
// Return existing index if already initialized
|
|
368
|
+
if (hnswIndex?.initialized && !options?.forceRebuild) {
|
|
369
|
+
return hnswIndex;
|
|
370
|
+
}
|
|
371
|
+
// Prevent concurrent initialization
|
|
372
|
+
if (hnswInitializing) {
|
|
373
|
+
// Wait for initialization to complete
|
|
374
|
+
while (hnswInitializing) {
|
|
375
|
+
await new Promise(resolve => setTimeout(resolve, 10));
|
|
376
|
+
}
|
|
377
|
+
return hnswIndex;
|
|
378
|
+
}
|
|
379
|
+
hnswInitializing = true;
|
|
380
|
+
try {
|
|
381
|
+
// Import @ruvector/core dynamically
|
|
382
|
+
// Handle both ESM (default export) and CJS patterns
|
|
383
|
+
// Bug fix: try project root resolution via createRequire when direct import fails
|
|
384
|
+
// (npx moflo resolves relative to moflo package, not consuming project)
|
|
385
|
+
let ruvectorModule = null;
|
|
386
|
+
try {
|
|
387
|
+
ruvectorModule = await import('@ruvector/core');
|
|
388
|
+
}
|
|
389
|
+
catch {
|
|
390
|
+
try {
|
|
391
|
+
const req = createRequire(path.join(getProjectRoot(), 'package.json'));
|
|
392
|
+
const resolved = req.resolve('@ruvector/core');
|
|
393
|
+
ruvectorModule = await import(pathToFileURL(resolved).href);
|
|
394
|
+
}
|
|
395
|
+
catch {
|
|
396
|
+
// Not available
|
|
397
|
+
}
|
|
398
|
+
}
|
|
399
|
+
if (!ruvectorModule) {
|
|
400
|
+
hnswInitializing = false;
|
|
401
|
+
return null; // HNSW not available
|
|
402
|
+
}
|
|
403
|
+
// ESM returns { default: { VectorDb, ... } }, CJS returns { VectorDb, ... }
|
|
404
|
+
const ruvectorCore = ruvectorModule.default || ruvectorModule;
|
|
405
|
+
if (!ruvectorCore?.VectorDb) {
|
|
406
|
+
hnswInitializing = false;
|
|
407
|
+
return null; // VectorDb not found
|
|
408
|
+
}
|
|
409
|
+
const { VectorDb } = ruvectorCore;
|
|
410
|
+
// Persistent storage paths
|
|
411
|
+
const swarmDir = path.join(getProjectRoot(), '.swarm');
|
|
412
|
+
if (!fs.existsSync(swarmDir)) {
|
|
413
|
+
fs.mkdirSync(swarmDir, { recursive: true });
|
|
414
|
+
}
|
|
415
|
+
const hnswPath = path.join(swarmDir, 'hnsw.index');
|
|
416
|
+
const metadataPath = path.join(swarmDir, 'hnsw.metadata.json');
|
|
417
|
+
const dbPath = options?.dbPath || path.join(swarmDir, 'memory.db');
|
|
418
|
+
// Create HNSW index with persistent storage
|
|
419
|
+
// @ruvector/core uses string enum for distanceMetric: 'Cosine', 'Euclidean', 'DotProduct', 'Manhattan'
|
|
420
|
+
const db = new VectorDb({
|
|
421
|
+
dimensions,
|
|
422
|
+
distanceMetric: 'Cosine',
|
|
423
|
+
storagePath: hnswPath // Persistent storage!
|
|
424
|
+
});
|
|
425
|
+
// Load metadata (entry info) if exists
|
|
426
|
+
const entries = new Map();
|
|
427
|
+
if (fs.existsSync(metadataPath)) {
|
|
428
|
+
try {
|
|
429
|
+
const metadataJson = fs.readFileSync(metadataPath, 'utf-8');
|
|
430
|
+
const metadata = JSON.parse(metadataJson);
|
|
431
|
+
for (const [key, value] of metadata) {
|
|
432
|
+
entries.set(key, value);
|
|
433
|
+
}
|
|
434
|
+
}
|
|
435
|
+
catch {
|
|
436
|
+
// Metadata load failed, will rebuild
|
|
437
|
+
}
|
|
438
|
+
}
|
|
439
|
+
hnswIndex = {
|
|
440
|
+
db,
|
|
441
|
+
entries,
|
|
442
|
+
dimensions,
|
|
443
|
+
initialized: false
|
|
444
|
+
};
|
|
445
|
+
// Check if index already has data (from persistent storage)
|
|
446
|
+
const existingLen = await db.len();
|
|
447
|
+
if (existingLen > 0 && entries.size > 0) {
|
|
448
|
+
// Index loaded from disk, skip SQLite sync
|
|
449
|
+
hnswIndex.initialized = true;
|
|
450
|
+
hnswInitializing = false;
|
|
451
|
+
return hnswIndex;
|
|
452
|
+
}
|
|
453
|
+
if (fs.existsSync(dbPath)) {
|
|
454
|
+
try {
|
|
455
|
+
const initSqlJs = (await import('sql.js')).default;
|
|
456
|
+
const SQL = await initSqlJs();
|
|
457
|
+
const fileBuffer = fs.readFileSync(dbPath);
|
|
458
|
+
const sqlDb = new SQL.Database(fileBuffer);
|
|
459
|
+
// Load all entries with embeddings
|
|
409
460
|
const result = sqlDb.exec(`
|
|
410
461
|
SELECT id, key, namespace, content, embedding
|
|
411
462
|
FROM memory_entries
|
|
412
463
|
WHERE status = 'active' AND embedding IS NOT NULL
|
|
413
464
|
LIMIT 10000
|
|
414
|
-
`);
|
|
415
|
-
if (result[0]?.values) {
|
|
416
|
-
for (const row of result[0].values) {
|
|
417
|
-
const [id, key, ns, content, embeddingJson] = row;
|
|
418
|
-
if (embeddingJson) {
|
|
419
|
-
try {
|
|
420
|
-
const embedding = JSON.parse(embeddingJson);
|
|
421
|
-
const vector = new Float32Array(embedding);
|
|
422
|
-
await db.insert({
|
|
423
|
-
id: String(id),
|
|
424
|
-
vector
|
|
425
|
-
});
|
|
426
|
-
hnswIndex.entries.set(String(id), {
|
|
427
|
-
id: String(id),
|
|
428
|
-
key: key || String(id),
|
|
429
|
-
namespace: ns || 'default',
|
|
430
|
-
content: content || ''
|
|
431
|
-
});
|
|
432
|
-
}
|
|
433
|
-
catch {
|
|
434
|
-
// Skip invalid embeddings
|
|
435
|
-
}
|
|
436
|
-
}
|
|
437
|
-
}
|
|
438
|
-
}
|
|
439
|
-
sqlDb.close();
|
|
440
|
-
}
|
|
441
|
-
catch {
|
|
442
|
-
// SQLite load failed, start with empty index
|
|
443
|
-
}
|
|
444
|
-
}
|
|
445
|
-
hnswIndex.initialized = true;
|
|
446
|
-
hnswInitializing = false;
|
|
447
|
-
return hnswIndex;
|
|
448
|
-
}
|
|
449
|
-
catch {
|
|
450
|
-
hnswInitializing = false;
|
|
451
|
-
return null;
|
|
452
|
-
}
|
|
453
|
-
}
|
|
454
|
-
/**
|
|
455
|
-
* Save HNSW metadata to disk for persistence
|
|
456
|
-
*/
|
|
457
|
-
function saveHNSWMetadata() {
|
|
458
|
-
if (!hnswIndex?.entries)
|
|
459
|
-
return;
|
|
460
|
-
try {
|
|
461
|
-
const swarmDir = path.join(
|
|
462
|
-
const metadataPath = path.join(swarmDir, 'hnsw.metadata.json');
|
|
463
|
-
const metadata = Array.from(hnswIndex.entries.entries());
|
|
464
|
-
fs.writeFileSync(metadataPath, JSON.stringify(metadata));
|
|
465
|
-
}
|
|
466
|
-
catch {
|
|
467
|
-
// Silently fail - metadata save is best-effort
|
|
468
|
-
}
|
|
469
|
-
}
|
|
470
|
-
/**
|
|
471
|
-
* Add entry to HNSW index (with automatic persistence)
|
|
472
|
-
*/
|
|
473
|
-
export async function addToHNSWIndex(id, embedding, entry) {
|
|
474
|
-
// ADR-053: Try AgentDB v3 bridge first
|
|
475
|
-
const bridge = await getBridge();
|
|
476
|
-
if (bridge) {
|
|
477
|
-
const bridgeResult = await bridge.bridgeAddToHNSW(id, embedding, entry);
|
|
478
|
-
if (bridgeResult === true)
|
|
479
|
-
return true;
|
|
480
|
-
}
|
|
481
|
-
const index = await getHNSWIndex({ dimensions: embedding.length });
|
|
482
|
-
if (!index)
|
|
483
|
-
return false;
|
|
484
|
-
try {
|
|
485
|
-
const vector = new Float32Array(embedding);
|
|
486
|
-
await index.db.insert({
|
|
487
|
-
id,
|
|
488
|
-
vector
|
|
489
|
-
});
|
|
490
|
-
index.entries.set(id, entry);
|
|
491
|
-
// Save metadata for persistence (debounced would be better for high-volume)
|
|
492
|
-
saveHNSWMetadata();
|
|
493
|
-
return true;
|
|
494
|
-
}
|
|
495
|
-
catch {
|
|
496
|
-
return false;
|
|
497
|
-
}
|
|
498
|
-
}
|
|
499
|
-
/**
|
|
500
|
-
* Search HNSW index (150x faster than brute-force)
|
|
501
|
-
* Returns results sorted by similarity (highest first)
|
|
502
|
-
*/
|
|
503
|
-
export async function searchHNSWIndex(queryEmbedding, options) {
|
|
504
|
-
// ADR-053: Try AgentDB v3 bridge first
|
|
505
|
-
const bridge = await getBridge();
|
|
506
|
-
if (bridge) {
|
|
507
|
-
const bridgeResult = await bridge.bridgeSearchHNSW(queryEmbedding, options);
|
|
508
|
-
if (bridgeResult)
|
|
509
|
-
return bridgeResult;
|
|
510
|
-
}
|
|
511
|
-
const index = await getHNSWIndex({ dimensions: queryEmbedding.length });
|
|
512
|
-
if (!index)
|
|
513
|
-
return null;
|
|
514
|
-
try {
|
|
515
|
-
const vector = new Float32Array(queryEmbedding);
|
|
516
|
-
const k = options?.k ?? 10;
|
|
517
|
-
// HNSW search returns results with cosine distance (lower = more similar)
|
|
518
|
-
const results = await index.db.search({ vector, k: k * 2 }); // Get extra for filtering
|
|
519
|
-
const filtered = [];
|
|
520
|
-
for (const result of results) {
|
|
521
|
-
const entry = index.entries.get(result.id);
|
|
522
|
-
if (!entry)
|
|
523
|
-
continue;
|
|
524
|
-
// Filter by namespace if specified
|
|
525
|
-
if (options?.namespace && options.namespace !== 'all' && entry.namespace !== options.namespace) {
|
|
526
|
-
continue;
|
|
527
|
-
}
|
|
528
|
-
// Convert cosine distance to similarity score (1 - distance)
|
|
529
|
-
// Cosine distance from @ruvector/core: 0 = identical, 2 = opposite
|
|
530
|
-
const score = 1 - (result.score / 2);
|
|
531
|
-
filtered.push({
|
|
532
|
-
id: entry.id.substring(0, 12),
|
|
533
|
-
key: entry.key || entry.id.substring(0, 15),
|
|
534
|
-
content: entry.content.substring(0, 60) + (entry.content.length > 60 ? '...' : ''),
|
|
535
|
-
score,
|
|
536
|
-
namespace: entry.namespace
|
|
537
|
-
});
|
|
538
|
-
if (filtered.length >= k)
|
|
539
|
-
break;
|
|
540
|
-
}
|
|
541
|
-
// Sort by score descending (highest similarity first)
|
|
542
|
-
filtered.sort((a, b) => b.score - a.score);
|
|
543
|
-
return filtered;
|
|
544
|
-
}
|
|
545
|
-
catch {
|
|
546
|
-
return null;
|
|
547
|
-
}
|
|
548
|
-
}
|
|
549
|
-
/**
|
|
550
|
-
* Get HNSW index status
|
|
551
|
-
*/
|
|
552
|
-
export function getHNSWStatus() {
|
|
553
|
-
// ADR-053: If bridge was previously loaded, report availability
|
|
554
|
-
if (_bridge && _bridge !== null) {
|
|
555
|
-
// Bridge is loaded — HNSW-equivalent is available via AgentDB v3
|
|
556
|
-
return {
|
|
557
|
-
available: true,
|
|
558
|
-
initialized: true,
|
|
559
|
-
entryCount: hnswIndex?.entries.size ?? 0,
|
|
560
|
-
dimensions: hnswIndex?.dimensions ?? 384
|
|
561
|
-
};
|
|
562
|
-
}
|
|
563
|
-
return {
|
|
564
|
-
available: hnswIndex !== null,
|
|
565
|
-
initialized: hnswIndex?.initialized ?? false,
|
|
566
|
-
entryCount: hnswIndex?.entries.size ?? 0,
|
|
567
|
-
dimensions: hnswIndex?.dimensions ?? 384
|
|
568
|
-
};
|
|
569
|
-
}
|
|
570
|
-
/**
|
|
571
|
-
* Clear the HNSW index (for rebuilding)
|
|
572
|
-
*/
|
|
573
|
-
export function clearHNSWIndex() {
|
|
574
|
-
hnswIndex = null;
|
|
575
|
-
}
|
|
576
|
-
// ============================================================================
|
|
577
|
-
// INT8 VECTOR QUANTIZATION (4x memory reduction)
|
|
578
|
-
// ============================================================================
|
|
579
|
-
/**
|
|
580
|
-
* Quantize a Float32 embedding to Int8 (4x memory reduction)
|
|
581
|
-
* Uses symmetric quantization with scale factor stored per-vector
|
|
582
|
-
*
|
|
583
|
-
* @param embedding - Float32 embedding array
|
|
584
|
-
* @returns Quantized Int8 array with scale factor
|
|
585
|
-
*/
|
|
586
|
-
export function quantizeInt8(embedding) {
|
|
587
|
-
const arr = embedding instanceof Float32Array ? embedding : new Float32Array(embedding);
|
|
588
|
-
// Find min/max for symmetric quantization
|
|
589
|
-
let min = Infinity, max = -Infinity;
|
|
590
|
-
for (let i = 0; i < arr.length; i++) {
|
|
591
|
-
if (arr[i] < min)
|
|
592
|
-
min = arr[i];
|
|
593
|
-
if (arr[i] > max)
|
|
594
|
-
max = arr[i];
|
|
595
|
-
}
|
|
596
|
-
// Symmetric quantization: scale = max(|min|, |max|) / 127
|
|
597
|
-
const absMax = Math.max(Math.abs(min), Math.abs(max));
|
|
598
|
-
const scale = absMax / 127 || 1e-10; // Avoid division by zero
|
|
599
|
-
const zeroPoint = 0; // Symmetric quantization
|
|
600
|
-
// Quantize
|
|
601
|
-
const quantized = new Int8Array(arr.length);
|
|
602
|
-
for (let i = 0; i < arr.length; i++) {
|
|
603
|
-
// Clamp to [-127, 127] to leave room for potential rounding
|
|
604
|
-
const q = Math.round(arr[i] / scale);
|
|
605
|
-
quantized[i] = Math.max(-127, Math.min(127, q));
|
|
606
|
-
}
|
|
607
|
-
return { quantized, scale, zeroPoint };
|
|
608
|
-
}
|
|
609
|
-
/**
|
|
610
|
-
* Dequantize Int8 back to Float32
|
|
611
|
-
*
|
|
612
|
-
* @param quantized - Int8 quantized array
|
|
613
|
-
* @param scale - Scale factor from quantization
|
|
614
|
-
* @param zeroPoint - Zero point (usually 0 for symmetric)
|
|
615
|
-
* @returns Float32Array
|
|
616
|
-
*/
|
|
617
|
-
export function dequantizeInt8(quantized, scale, zeroPoint = 0) {
|
|
618
|
-
const result = new Float32Array(quantized.length);
|
|
619
|
-
for (let i = 0; i < quantized.length; i++) {
|
|
620
|
-
result[i] = (quantized[i] - zeroPoint) * scale;
|
|
621
|
-
}
|
|
622
|
-
return result;
|
|
623
|
-
}
|
|
624
|
-
/**
|
|
625
|
-
* Compute cosine similarity between quantized vectors
|
|
626
|
-
* Faster than dequantizing first
|
|
627
|
-
*/
|
|
628
|
-
export function quantizedCosineSim(a, aScale, b, bScale) {
|
|
629
|
-
if (a.length !== b.length)
|
|
630
|
-
return 0;
|
|
631
|
-
let dot = 0, normA = 0, normB = 0;
|
|
632
|
-
for (let i = 0; i < a.length; i++) {
|
|
633
|
-
dot += a[i] * b[i];
|
|
634
|
-
normA += a[i] * a[i];
|
|
635
|
-
normB += b[i] * b[i];
|
|
636
|
-
}
|
|
637
|
-
// Scales cancel out in cosine similarity for normalized vectors
|
|
638
|
-
const mag = Math.sqrt(normA * normB);
|
|
639
|
-
return mag === 0 ? 0 : dot / mag;
|
|
640
|
-
}
|
|
641
|
-
/**
|
|
642
|
-
* Get quantization statistics for an embedding
|
|
643
|
-
*/
|
|
644
|
-
export function getQuantizationStats(embedding) {
|
|
645
|
-
const len = embedding.length;
|
|
646
|
-
const originalBytes = len * 4; // Float32 = 4 bytes
|
|
647
|
-
const quantizedBytes = len + 8; // Int8 = 1 byte + 8 bytes for scale/zeroPoint
|
|
648
|
-
const compressionRatio = originalBytes / quantizedBytes;
|
|
649
|
-
return { originalBytes, quantizedBytes, compressionRatio };
|
|
650
|
-
}
|
|
651
|
-
// ============================================================================
|
|
652
|
-
// FLASH ATTENTION-STYLE BATCH OPERATIONS (V8-Optimized)
|
|
653
|
-
// ============================================================================
|
|
654
|
-
/**
|
|
655
|
-
* Batch cosine similarity - compute query against multiple vectors
|
|
656
|
-
* Optimized for V8 JIT with typed arrays
|
|
657
|
-
* ~50μs per 1000 vectors (384-dim)
|
|
658
|
-
*/
|
|
659
|
-
export function batchCosineSim(query, vectors) {
|
|
660
|
-
const n = vectors.length;
|
|
661
|
-
const scores = new Float32Array(n);
|
|
662
|
-
if (n === 0 || query.length === 0)
|
|
663
|
-
return scores;
|
|
664
|
-
// Pre-compute query norm
|
|
665
|
-
let queryNorm = 0;
|
|
666
|
-
for (let i = 0; i < query.length; i++) {
|
|
667
|
-
queryNorm += query[i] * query[i];
|
|
668
|
-
}
|
|
669
|
-
queryNorm = Math.sqrt(queryNorm);
|
|
670
|
-
if (queryNorm === 0)
|
|
671
|
-
return scores;
|
|
672
|
-
// Compute similarities
|
|
673
|
-
for (let v = 0; v < n; v++) {
|
|
674
|
-
const vec = vectors[v];
|
|
675
|
-
const len = Math.min(query.length, vec.length);
|
|
676
|
-
let dot = 0, vecNorm = 0;
|
|
677
|
-
for (let i = 0; i < len; i++) {
|
|
678
|
-
dot += query[i] * vec[i];
|
|
679
|
-
vecNorm += vec[i] * vec[i];
|
|
680
|
-
}
|
|
681
|
-
vecNorm = Math.sqrt(vecNorm);
|
|
682
|
-
scores[v] = vecNorm === 0 ? 0 : dot / (queryNorm * vecNorm);
|
|
683
|
-
}
|
|
684
|
-
return scores;
|
|
685
|
-
}
|
|
686
|
-
/**
|
|
687
|
-
* Softmax normalization for attention scores
|
|
688
|
-
* Numerically stable implementation
|
|
689
|
-
*/
|
|
690
|
-
export function softmaxAttention(scores, temperature = 1.0) {
|
|
691
|
-
const n = scores.length;
|
|
692
|
-
const result = new Float32Array(n);
|
|
693
|
-
if (n === 0)
|
|
694
|
-
return result;
|
|
695
|
-
// Find max for numerical stability
|
|
696
|
-
let max = scores[0];
|
|
697
|
-
for (let i = 1; i < n; i++) {
|
|
698
|
-
if (scores[i] > max)
|
|
699
|
-
max = scores[i];
|
|
700
|
-
}
|
|
701
|
-
// Compute exp and sum
|
|
702
|
-
let sum = 0;
|
|
703
|
-
for (let i = 0; i < n; i++) {
|
|
704
|
-
result[i] = Math.exp((scores[i] - max) / temperature);
|
|
705
|
-
sum += result[i];
|
|
706
|
-
}
|
|
707
|
-
// Normalize
|
|
708
|
-
if (sum > 0) {
|
|
709
|
-
for (let i = 0; i < n; i++) {
|
|
710
|
-
result[i] /= sum;
|
|
711
|
-
}
|
|
712
|
-
}
|
|
713
|
-
return result;
|
|
714
|
-
}
|
|
715
|
-
/**
|
|
716
|
-
* Top-K selection with partial sort (O(n + k log k))
|
|
717
|
-
* More efficient than full sort for small k
|
|
718
|
-
*/
|
|
719
|
-
export function topKIndices(scores, k) {
|
|
720
|
-
const n = scores.length;
|
|
721
|
-
if (k >= n) {
|
|
722
|
-
// Return all indices sorted by score
|
|
723
|
-
return Array.from({ length: n }, (_, i) => i)
|
|
724
|
-
.sort((a, b) => scores[b] - scores[a]);
|
|
725
|
-
}
|
|
726
|
-
// Build min-heap of size k
|
|
727
|
-
const heap = [];
|
|
728
|
-
for (let i = 0; i < n; i++) {
|
|
729
|
-
if (heap.length < k) {
|
|
730
|
-
heap.push({ idx: i, score: scores[i] });
|
|
731
|
-
// Bubble up
|
|
732
|
-
let j = heap.length - 1;
|
|
733
|
-
while (j > 0) {
|
|
734
|
-
const parent = Math.floor((j - 1) / 2);
|
|
735
|
-
if (heap[j].score < heap[parent].score) {
|
|
736
|
-
[heap[j], heap[parent]] = [heap[parent], heap[j]];
|
|
737
|
-
j = parent;
|
|
738
|
-
}
|
|
739
|
-
else
|
|
740
|
-
break;
|
|
741
|
-
}
|
|
742
|
-
}
|
|
743
|
-
else if (scores[i] > heap[0].score) {
|
|
744
|
-
// Replace min and heapify down
|
|
745
|
-
heap[0] = { idx: i, score: scores[i] };
|
|
746
|
-
let j = 0;
|
|
747
|
-
while (true) {
|
|
748
|
-
const left = 2 * j + 1, right = 2 * j + 2;
|
|
749
|
-
let smallest = j;
|
|
750
|
-
if (left < k && heap[left].score < heap[smallest].score)
|
|
751
|
-
smallest = left;
|
|
752
|
-
if (right < k && heap[right].score < heap[smallest].score)
|
|
753
|
-
smallest = right;
|
|
754
|
-
if (smallest === j)
|
|
755
|
-
break;
|
|
756
|
-
[heap[j], heap[smallest]] = [heap[smallest], heap[j]];
|
|
757
|
-
j = smallest;
|
|
758
|
-
}
|
|
759
|
-
}
|
|
760
|
-
}
|
|
761
|
-
// Extract and sort descending
|
|
762
|
-
return heap.sort((a, b) => b.score - a.score).map(h => h.idx);
|
|
763
|
-
}
|
|
764
|
-
/**
|
|
765
|
-
* Flash Attention-style search
|
|
766
|
-
* Combines batch similarity, softmax, and top-k in one pass
|
|
767
|
-
* Returns indices and attention weights
|
|
768
|
-
*/
|
|
769
|
-
export function flashAttentionSearch(query, vectors, options = {}) {
|
|
770
|
-
const { k = 10, temperature = 1.0, threshold = 0 } = options;
|
|
771
|
-
// Compute batch similarity
|
|
772
|
-
const scores = batchCosineSim(query, vectors);
|
|
773
|
-
// Get top-k indices
|
|
774
|
-
const indices = topKIndices(scores, k);
|
|
775
|
-
// Filter by threshold
|
|
776
|
-
const filtered = indices.filter(i => scores[i] >= threshold);
|
|
777
|
-
// Extract scores for filtered results
|
|
778
|
-
const topScores = new Float32Array(filtered.length);
|
|
779
|
-
for (let i = 0; i < filtered.length; i++) {
|
|
780
|
-
topScores[i] = scores[filtered[i]];
|
|
781
|
-
}
|
|
782
|
-
// Compute attention weights (softmax over top-k)
|
|
783
|
-
const weights = softmaxAttention(topScores, temperature);
|
|
784
|
-
return { indices: filtered, scores: topScores, weights };
|
|
785
|
-
}
|
|
786
|
-
// ============================================================================
|
|
787
|
-
// METADATA AND INITIALIZATION
|
|
788
|
-
// ============================================================================
|
|
789
|
-
/**
|
|
790
|
-
* Initial metadata to insert after schema creation
|
|
791
|
-
*/
|
|
792
|
-
export function getInitialMetadata(backend) {
|
|
465
|
+
`);
|
|
466
|
+
if (result[0]?.values) {
|
|
467
|
+
for (const row of result[0].values) {
|
|
468
|
+
const [id, key, ns, content, embeddingJson] = row;
|
|
469
|
+
if (embeddingJson) {
|
|
470
|
+
try {
|
|
471
|
+
const embedding = JSON.parse(embeddingJson);
|
|
472
|
+
const vector = new Float32Array(embedding);
|
|
473
|
+
await db.insert({
|
|
474
|
+
id: String(id),
|
|
475
|
+
vector
|
|
476
|
+
});
|
|
477
|
+
hnswIndex.entries.set(String(id), {
|
|
478
|
+
id: String(id),
|
|
479
|
+
key: key || String(id),
|
|
480
|
+
namespace: ns || 'default',
|
|
481
|
+
content: content || ''
|
|
482
|
+
});
|
|
483
|
+
}
|
|
484
|
+
catch {
|
|
485
|
+
// Skip invalid embeddings
|
|
486
|
+
}
|
|
487
|
+
}
|
|
488
|
+
}
|
|
489
|
+
}
|
|
490
|
+
sqlDb.close();
|
|
491
|
+
}
|
|
492
|
+
catch {
|
|
493
|
+
// SQLite load failed, start with empty index
|
|
494
|
+
}
|
|
495
|
+
}
|
|
496
|
+
hnswIndex.initialized = true;
|
|
497
|
+
hnswInitializing = false;
|
|
498
|
+
return hnswIndex;
|
|
499
|
+
}
|
|
500
|
+
catch {
|
|
501
|
+
hnswInitializing = false;
|
|
502
|
+
return null;
|
|
503
|
+
}
|
|
504
|
+
}
|
|
505
|
+
/**
|
|
506
|
+
* Save HNSW metadata to disk for persistence
|
|
507
|
+
*/
|
|
508
|
+
function saveHNSWMetadata() {
|
|
509
|
+
if (!hnswIndex?.entries)
|
|
510
|
+
return;
|
|
511
|
+
try {
|
|
512
|
+
const swarmDir = path.join(getProjectRoot(), '.swarm');
|
|
513
|
+
const metadataPath = path.join(swarmDir, 'hnsw.metadata.json');
|
|
514
|
+
const metadata = Array.from(hnswIndex.entries.entries());
|
|
515
|
+
fs.writeFileSync(metadataPath, JSON.stringify(metadata));
|
|
516
|
+
}
|
|
517
|
+
catch {
|
|
518
|
+
// Silently fail - metadata save is best-effort
|
|
519
|
+
}
|
|
520
|
+
}
|
|
521
|
+
/**
|
|
522
|
+
* Add entry to HNSW index (with automatic persistence)
|
|
523
|
+
*/
|
|
524
|
+
export async function addToHNSWIndex(id, embedding, entry) {
|
|
525
|
+
// ADR-053: Try AgentDB v3 bridge first
|
|
526
|
+
const bridge = await getBridge();
|
|
527
|
+
if (bridge) {
|
|
528
|
+
const bridgeResult = await bridge.bridgeAddToHNSW(id, embedding, entry);
|
|
529
|
+
if (bridgeResult === true)
|
|
530
|
+
return true;
|
|
531
|
+
}
|
|
532
|
+
const index = await getHNSWIndex({ dimensions: embedding.length });
|
|
533
|
+
if (!index)
|
|
534
|
+
return false;
|
|
535
|
+
try {
|
|
536
|
+
const vector = new Float32Array(embedding);
|
|
537
|
+
await index.db.insert({
|
|
538
|
+
id,
|
|
539
|
+
vector
|
|
540
|
+
});
|
|
541
|
+
index.entries.set(id, entry);
|
|
542
|
+
// Save metadata for persistence (debounced would be better for high-volume)
|
|
543
|
+
saveHNSWMetadata();
|
|
544
|
+
return true;
|
|
545
|
+
}
|
|
546
|
+
catch {
|
|
547
|
+
return false;
|
|
548
|
+
}
|
|
549
|
+
}
|
|
550
|
+
/**
|
|
551
|
+
* Search HNSW index (150x faster than brute-force)
|
|
552
|
+
* Returns results sorted by similarity (highest first)
|
|
553
|
+
*/
|
|
554
|
+
export async function searchHNSWIndex(queryEmbedding, options) {
|
|
555
|
+
// ADR-053: Try AgentDB v3 bridge first
|
|
556
|
+
const bridge = await getBridge();
|
|
557
|
+
if (bridge) {
|
|
558
|
+
const bridgeResult = await bridge.bridgeSearchHNSW(queryEmbedding, options);
|
|
559
|
+
if (bridgeResult)
|
|
560
|
+
return bridgeResult;
|
|
561
|
+
}
|
|
562
|
+
const index = await getHNSWIndex({ dimensions: queryEmbedding.length });
|
|
563
|
+
if (!index)
|
|
564
|
+
return null;
|
|
565
|
+
try {
|
|
566
|
+
const vector = new Float32Array(queryEmbedding);
|
|
567
|
+
const k = options?.k ?? 10;
|
|
568
|
+
// HNSW search returns results with cosine distance (lower = more similar)
|
|
569
|
+
const results = await index.db.search({ vector, k: k * 2 }); // Get extra for filtering
|
|
570
|
+
const filtered = [];
|
|
571
|
+
for (const result of results) {
|
|
572
|
+
const entry = index.entries.get(result.id);
|
|
573
|
+
if (!entry)
|
|
574
|
+
continue;
|
|
575
|
+
// Filter by namespace if specified
|
|
576
|
+
if (options?.namespace && options.namespace !== 'all' && entry.namespace !== options.namespace) {
|
|
577
|
+
continue;
|
|
578
|
+
}
|
|
579
|
+
// Convert cosine distance to similarity score (1 - distance)
|
|
580
|
+
// Cosine distance from @ruvector/core: 0 = identical, 2 = opposite
|
|
581
|
+
const score = 1 - (result.score / 2);
|
|
582
|
+
filtered.push({
|
|
583
|
+
id: entry.id.substring(0, 12),
|
|
584
|
+
key: entry.key || entry.id.substring(0, 15),
|
|
585
|
+
content: entry.content.substring(0, 60) + (entry.content.length > 60 ? '...' : ''),
|
|
586
|
+
score,
|
|
587
|
+
namespace: entry.namespace
|
|
588
|
+
});
|
|
589
|
+
if (filtered.length >= k)
|
|
590
|
+
break;
|
|
591
|
+
}
|
|
592
|
+
// Sort by score descending (highest similarity first)
|
|
593
|
+
filtered.sort((a, b) => b.score - a.score);
|
|
594
|
+
return filtered;
|
|
595
|
+
}
|
|
596
|
+
catch {
|
|
597
|
+
return null;
|
|
598
|
+
}
|
|
599
|
+
}
|
|
600
|
+
/**
|
|
601
|
+
* Get HNSW index status
|
|
602
|
+
*/
|
|
603
|
+
export function getHNSWStatus() {
|
|
604
|
+
// ADR-053: If bridge was previously loaded, report availability
|
|
605
|
+
if (_bridge && _bridge !== null) {
|
|
606
|
+
// Bridge is loaded — HNSW-equivalent is available via AgentDB v3
|
|
607
|
+
return {
|
|
608
|
+
available: true,
|
|
609
|
+
initialized: true,
|
|
610
|
+
entryCount: hnswIndex?.entries.size ?? 0,
|
|
611
|
+
dimensions: hnswIndex?.dimensions ?? 384
|
|
612
|
+
};
|
|
613
|
+
}
|
|
614
|
+
return {
|
|
615
|
+
available: hnswIndex !== null,
|
|
616
|
+
initialized: hnswIndex?.initialized ?? false,
|
|
617
|
+
entryCount: hnswIndex?.entries.size ?? 0,
|
|
618
|
+
dimensions: hnswIndex?.dimensions ?? 384
|
|
619
|
+
};
|
|
620
|
+
}
|
|
621
|
+
/**
|
|
622
|
+
* Clear the HNSW index (for rebuilding)
|
|
623
|
+
*/
|
|
624
|
+
export function clearHNSWIndex() {
|
|
625
|
+
hnswIndex = null;
|
|
626
|
+
}
|
|
627
|
+
// ============================================================================
|
|
628
|
+
// INT8 VECTOR QUANTIZATION (4x memory reduction)
|
|
629
|
+
// ============================================================================
|
|
630
|
+
/**
|
|
631
|
+
* Quantize a Float32 embedding to Int8 (4x memory reduction)
|
|
632
|
+
* Uses symmetric quantization with scale factor stored per-vector
|
|
633
|
+
*
|
|
634
|
+
* @param embedding - Float32 embedding array
|
|
635
|
+
* @returns Quantized Int8 array with scale factor
|
|
636
|
+
*/
|
|
637
|
+
export function quantizeInt8(embedding) {
|
|
638
|
+
const arr = embedding instanceof Float32Array ? embedding : new Float32Array(embedding);
|
|
639
|
+
// Find min/max for symmetric quantization
|
|
640
|
+
let min = Infinity, max = -Infinity;
|
|
641
|
+
for (let i = 0; i < arr.length; i++) {
|
|
642
|
+
if (arr[i] < min)
|
|
643
|
+
min = arr[i];
|
|
644
|
+
if (arr[i] > max)
|
|
645
|
+
max = arr[i];
|
|
646
|
+
}
|
|
647
|
+
// Symmetric quantization: scale = max(|min|, |max|) / 127
|
|
648
|
+
const absMax = Math.max(Math.abs(min), Math.abs(max));
|
|
649
|
+
const scale = absMax / 127 || 1e-10; // Avoid division by zero
|
|
650
|
+
const zeroPoint = 0; // Symmetric quantization
|
|
651
|
+
// Quantize
|
|
652
|
+
const quantized = new Int8Array(arr.length);
|
|
653
|
+
for (let i = 0; i < arr.length; i++) {
|
|
654
|
+
// Clamp to [-127, 127] to leave room for potential rounding
|
|
655
|
+
const q = Math.round(arr[i] / scale);
|
|
656
|
+
quantized[i] = Math.max(-127, Math.min(127, q));
|
|
657
|
+
}
|
|
658
|
+
return { quantized, scale, zeroPoint };
|
|
659
|
+
}
|
|
660
|
+
/**
|
|
661
|
+
* Dequantize Int8 back to Float32
|
|
662
|
+
*
|
|
663
|
+
* @param quantized - Int8 quantized array
|
|
664
|
+
* @param scale - Scale factor from quantization
|
|
665
|
+
* @param zeroPoint - Zero point (usually 0 for symmetric)
|
|
666
|
+
* @returns Float32Array
|
|
667
|
+
*/
|
|
668
|
+
export function dequantizeInt8(quantized, scale, zeroPoint = 0) {
|
|
669
|
+
const result = new Float32Array(quantized.length);
|
|
670
|
+
for (let i = 0; i < quantized.length; i++) {
|
|
671
|
+
result[i] = (quantized[i] - zeroPoint) * scale;
|
|
672
|
+
}
|
|
673
|
+
return result;
|
|
674
|
+
}
|
|
675
|
+
/**
|
|
676
|
+
* Compute cosine similarity between quantized vectors
|
|
677
|
+
* Faster than dequantizing first
|
|
678
|
+
*/
|
|
679
|
+
export function quantizedCosineSim(a, aScale, b, bScale) {
|
|
680
|
+
if (a.length !== b.length)
|
|
681
|
+
return 0;
|
|
682
|
+
let dot = 0, normA = 0, normB = 0;
|
|
683
|
+
for (let i = 0; i < a.length; i++) {
|
|
684
|
+
dot += a[i] * b[i];
|
|
685
|
+
normA += a[i] * a[i];
|
|
686
|
+
normB += b[i] * b[i];
|
|
687
|
+
}
|
|
688
|
+
// Scales cancel out in cosine similarity for normalized vectors
|
|
689
|
+
const mag = Math.sqrt(normA * normB);
|
|
690
|
+
return mag === 0 ? 0 : dot / mag;
|
|
691
|
+
}
|
|
692
|
+
/**
|
|
693
|
+
* Get quantization statistics for an embedding
|
|
694
|
+
*/
|
|
695
|
+
export function getQuantizationStats(embedding) {
|
|
696
|
+
const len = embedding.length;
|
|
697
|
+
const originalBytes = len * 4; // Float32 = 4 bytes
|
|
698
|
+
const quantizedBytes = len + 8; // Int8 = 1 byte + 8 bytes for scale/zeroPoint
|
|
699
|
+
const compressionRatio = originalBytes / quantizedBytes;
|
|
700
|
+
return { originalBytes, quantizedBytes, compressionRatio };
|
|
701
|
+
}
|
|
702
|
+
// ============================================================================
|
|
703
|
+
// FLASH ATTENTION-STYLE BATCH OPERATIONS (V8-Optimized)
|
|
704
|
+
// ============================================================================
|
|
705
|
+
/**
|
|
706
|
+
* Batch cosine similarity - compute query against multiple vectors
|
|
707
|
+
* Optimized for V8 JIT with typed arrays
|
|
708
|
+
* ~50μs per 1000 vectors (384-dim)
|
|
709
|
+
*/
|
|
710
|
+
export function batchCosineSim(query, vectors) {
|
|
711
|
+
const n = vectors.length;
|
|
712
|
+
const scores = new Float32Array(n);
|
|
713
|
+
if (n === 0 || query.length === 0)
|
|
714
|
+
return scores;
|
|
715
|
+
// Pre-compute query norm
|
|
716
|
+
let queryNorm = 0;
|
|
717
|
+
for (let i = 0; i < query.length; i++) {
|
|
718
|
+
queryNorm += query[i] * query[i];
|
|
719
|
+
}
|
|
720
|
+
queryNorm = Math.sqrt(queryNorm);
|
|
721
|
+
if (queryNorm === 0)
|
|
722
|
+
return scores;
|
|
723
|
+
// Compute similarities
|
|
724
|
+
for (let v = 0; v < n; v++) {
|
|
725
|
+
const vec = vectors[v];
|
|
726
|
+
const len = Math.min(query.length, vec.length);
|
|
727
|
+
let dot = 0, vecNorm = 0;
|
|
728
|
+
for (let i = 0; i < len; i++) {
|
|
729
|
+
dot += query[i] * vec[i];
|
|
730
|
+
vecNorm += vec[i] * vec[i];
|
|
731
|
+
}
|
|
732
|
+
vecNorm = Math.sqrt(vecNorm);
|
|
733
|
+
scores[v] = vecNorm === 0 ? 0 : dot / (queryNorm * vecNorm);
|
|
734
|
+
}
|
|
735
|
+
return scores;
|
|
736
|
+
}
|
|
737
|
+
/**
|
|
738
|
+
* Softmax normalization for attention scores
|
|
739
|
+
* Numerically stable implementation
|
|
740
|
+
*/
|
|
741
|
+
export function softmaxAttention(scores, temperature = 1.0) {
|
|
742
|
+
const n = scores.length;
|
|
743
|
+
const result = new Float32Array(n);
|
|
744
|
+
if (n === 0)
|
|
745
|
+
return result;
|
|
746
|
+
// Find max for numerical stability
|
|
747
|
+
let max = scores[0];
|
|
748
|
+
for (let i = 1; i < n; i++) {
|
|
749
|
+
if (scores[i] > max)
|
|
750
|
+
max = scores[i];
|
|
751
|
+
}
|
|
752
|
+
// Compute exp and sum
|
|
753
|
+
let sum = 0;
|
|
754
|
+
for (let i = 0; i < n; i++) {
|
|
755
|
+
result[i] = Math.exp((scores[i] - max) / temperature);
|
|
756
|
+
sum += result[i];
|
|
757
|
+
}
|
|
758
|
+
// Normalize
|
|
759
|
+
if (sum > 0) {
|
|
760
|
+
for (let i = 0; i < n; i++) {
|
|
761
|
+
result[i] /= sum;
|
|
762
|
+
}
|
|
763
|
+
}
|
|
764
|
+
return result;
|
|
765
|
+
}
|
|
766
|
+
/**
|
|
767
|
+
* Top-K selection with partial sort (O(n + k log k))
|
|
768
|
+
* More efficient than full sort for small k
|
|
769
|
+
*/
|
|
770
|
+
export function topKIndices(scores, k) {
|
|
771
|
+
const n = scores.length;
|
|
772
|
+
if (k >= n) {
|
|
773
|
+
// Return all indices sorted by score
|
|
774
|
+
return Array.from({ length: n }, (_, i) => i)
|
|
775
|
+
.sort((a, b) => scores[b] - scores[a]);
|
|
776
|
+
}
|
|
777
|
+
// Build min-heap of size k
|
|
778
|
+
const heap = [];
|
|
779
|
+
for (let i = 0; i < n; i++) {
|
|
780
|
+
if (heap.length < k) {
|
|
781
|
+
heap.push({ idx: i, score: scores[i] });
|
|
782
|
+
// Bubble up
|
|
783
|
+
let j = heap.length - 1;
|
|
784
|
+
while (j > 0) {
|
|
785
|
+
const parent = Math.floor((j - 1) / 2);
|
|
786
|
+
if (heap[j].score < heap[parent].score) {
|
|
787
|
+
[heap[j], heap[parent]] = [heap[parent], heap[j]];
|
|
788
|
+
j = parent;
|
|
789
|
+
}
|
|
790
|
+
else
|
|
791
|
+
break;
|
|
792
|
+
}
|
|
793
|
+
}
|
|
794
|
+
else if (scores[i] > heap[0].score) {
|
|
795
|
+
// Replace min and heapify down
|
|
796
|
+
heap[0] = { idx: i, score: scores[i] };
|
|
797
|
+
let j = 0;
|
|
798
|
+
while (true) {
|
|
799
|
+
const left = 2 * j + 1, right = 2 * j + 2;
|
|
800
|
+
let smallest = j;
|
|
801
|
+
if (left < k && heap[left].score < heap[smallest].score)
|
|
802
|
+
smallest = left;
|
|
803
|
+
if (right < k && heap[right].score < heap[smallest].score)
|
|
804
|
+
smallest = right;
|
|
805
|
+
if (smallest === j)
|
|
806
|
+
break;
|
|
807
|
+
[heap[j], heap[smallest]] = [heap[smallest], heap[j]];
|
|
808
|
+
j = smallest;
|
|
809
|
+
}
|
|
810
|
+
}
|
|
811
|
+
}
|
|
812
|
+
// Extract and sort descending
|
|
813
|
+
return heap.sort((a, b) => b.score - a.score).map(h => h.idx);
|
|
814
|
+
}
|
|
815
|
+
/**
|
|
816
|
+
* Flash Attention-style search
|
|
817
|
+
* Combines batch similarity, softmax, and top-k in one pass
|
|
818
|
+
* Returns indices and attention weights
|
|
819
|
+
*/
|
|
820
|
+
export function flashAttentionSearch(query, vectors, options = {}) {
|
|
821
|
+
const { k = 10, temperature = 1.0, threshold = 0 } = options;
|
|
822
|
+
// Compute batch similarity
|
|
823
|
+
const scores = batchCosineSim(query, vectors);
|
|
824
|
+
// Get top-k indices
|
|
825
|
+
const indices = topKIndices(scores, k);
|
|
826
|
+
// Filter by threshold
|
|
827
|
+
const filtered = indices.filter(i => scores[i] >= threshold);
|
|
828
|
+
// Extract scores for filtered results
|
|
829
|
+
const topScores = new Float32Array(filtered.length);
|
|
830
|
+
for (let i = 0; i < filtered.length; i++) {
|
|
831
|
+
topScores[i] = scores[filtered[i]];
|
|
832
|
+
}
|
|
833
|
+
// Compute attention weights (softmax over top-k)
|
|
834
|
+
const weights = softmaxAttention(topScores, temperature);
|
|
835
|
+
return { indices: filtered, scores: topScores, weights };
|
|
836
|
+
}
|
|
837
|
+
// ============================================================================
|
|
838
|
+
// METADATA AND INITIALIZATION
|
|
839
|
+
// ============================================================================
|
|
840
|
+
/**
|
|
841
|
+
* Initial metadata to insert after schema creation
|
|
842
|
+
*/
|
|
843
|
+
export function getInitialMetadata(backend) {
|
|
793
844
|
return `
|
|
794
845
|
INSERT OR REPLACE INTO metadata (key, value) VALUES
|
|
795
846
|
('schema_version', '3.0.0'),
|
|
@@ -805,405 +856,405 @@ INSERT OR REPLACE INTO metadata (key, value) VALUES
|
|
|
805
856
|
INSERT OR IGNORE INTO vector_indexes (id, name, dimensions) VALUES
|
|
806
857
|
('default', 'default', 768),
|
|
807
858
|
('patterns', 'patterns', 768);
|
|
808
|
-
`;
|
|
809
|
-
}
|
|
810
|
-
/**
|
|
811
|
-
* Ensure memory_entries table has all required columns
|
|
812
|
-
* Adds missing columns for older databases (e.g., 'content' column)
|
|
813
|
-
*/
|
|
814
|
-
export async function ensureSchemaColumns(dbPath) {
|
|
815
|
-
const columnsAdded = [];
|
|
816
|
-
try {
|
|
817
|
-
if (!fs.existsSync(dbPath)) {
|
|
818
|
-
return { success: true, columnsAdded: [] };
|
|
819
|
-
}
|
|
820
|
-
const initSqlJs = (await import('sql.js')).default;
|
|
821
|
-
const SQL = await initSqlJs();
|
|
822
|
-
const fileBuffer = fs.readFileSync(dbPath);
|
|
823
|
-
const db = new SQL.Database(fileBuffer);
|
|
824
|
-
// Get current columns in memory_entries
|
|
825
|
-
const tableInfo = db.exec("PRAGMA table_info(memory_entries)");
|
|
826
|
-
const existingColumns = new Set(tableInfo[0]?.values?.map(row => row[1]) || []);
|
|
827
|
-
// Required columns that may be missing in older schemas
|
|
828
|
-
// Issue #977: 'type' column was missing from this list, causing store failures on older DBs
|
|
829
|
-
const requiredColumns = [
|
|
830
|
-
{ name: 'content', definition: "content TEXT DEFAULT ''" },
|
|
831
|
-
{ name: 'type', definition: "type TEXT DEFAULT 'semantic'" },
|
|
832
|
-
{ name: 'embedding', definition: 'embedding TEXT' },
|
|
833
|
-
{ name: 'embedding_model', definition: "embedding_model TEXT DEFAULT 'local'" },
|
|
834
|
-
{ name: 'embedding_dimensions', definition: 'embedding_dimensions INTEGER' },
|
|
835
|
-
{ name: 'tags', definition: 'tags TEXT' },
|
|
836
|
-
{ name: 'metadata', definition: 'metadata TEXT' },
|
|
837
|
-
{ name: 'owner_id', definition: 'owner_id TEXT' },
|
|
838
|
-
{ name: 'expires_at', definition: 'expires_at INTEGER' },
|
|
839
|
-
{ name: 'last_accessed_at', definition: 'last_accessed_at INTEGER' },
|
|
840
|
-
{ name: 'access_count', definition: 'access_count INTEGER DEFAULT 0' },
|
|
841
|
-
{ name: 'status', definition: "status TEXT DEFAULT 'active'" }
|
|
842
|
-
];
|
|
843
|
-
let modified = false;
|
|
844
|
-
for (const col of requiredColumns) {
|
|
845
|
-
if (!existingColumns.has(col.name)) {
|
|
846
|
-
try {
|
|
847
|
-
db.run(`ALTER TABLE memory_entries ADD COLUMN ${col.definition}`);
|
|
848
|
-
columnsAdded.push(col.name);
|
|
849
|
-
modified = true;
|
|
850
|
-
}
|
|
851
|
-
catch (e) {
|
|
852
|
-
// Column might already exist or other error - continue
|
|
853
|
-
}
|
|
854
|
-
}
|
|
855
|
-
}
|
|
856
|
-
if (modified) {
|
|
857
|
-
// Save updated database
|
|
858
|
-
const data = db.export();
|
|
859
|
-
fs.writeFileSync(dbPath, Buffer.from(data));
|
|
860
|
-
}
|
|
861
|
-
db.close();
|
|
862
|
-
return { success: true, columnsAdded };
|
|
863
|
-
}
|
|
864
|
-
catch (error) {
|
|
865
|
-
return {
|
|
866
|
-
success: false,
|
|
867
|
-
columnsAdded,
|
|
868
|
-
error: error instanceof Error ? error.message : String(error)
|
|
869
|
-
};
|
|
870
|
-
}
|
|
871
|
-
}
|
|
872
|
-
/**
|
|
873
|
-
* Check for legacy database installations and migrate if needed
|
|
874
|
-
*/
|
|
875
|
-
export async function checkAndMigrateLegacy(options) {
|
|
876
|
-
const { dbPath, verbose = false } = options;
|
|
877
|
-
// Check for legacy locations
|
|
878
|
-
const legacyPaths = [
|
|
879
|
-
path.join(
|
|
880
|
-
path.join(
|
|
881
|
-
path.join(
|
|
882
|
-
path.join(
|
|
883
|
-
];
|
|
884
|
-
for (const legacyPath of legacyPaths) {
|
|
885
|
-
if (fs.existsSync(legacyPath) && legacyPath !== dbPath) {
|
|
886
|
-
try {
|
|
887
|
-
const initSqlJs = (await import('sql.js')).default;
|
|
888
|
-
const SQL = await initSqlJs();
|
|
889
|
-
const legacyBuffer = fs.readFileSync(legacyPath);
|
|
890
|
-
const legacyDb = new SQL.Database(legacyBuffer);
|
|
891
|
-
// Check if it has data
|
|
892
|
-
const countResult = legacyDb.exec('SELECT COUNT(*) FROM memory_entries');
|
|
893
|
-
const count = countResult[0]?.values[0]?.[0] || 0;
|
|
894
|
-
// Get version if available
|
|
895
|
-
let version = 'unknown';
|
|
896
|
-
try {
|
|
897
|
-
const versionResult = legacyDb.exec("SELECT value FROM metadata WHERE key='schema_version'");
|
|
898
|
-
version = versionResult[0]?.values[0]?.[0] || 'unknown';
|
|
899
|
-
}
|
|
900
|
-
catch { /* no metadata table */ }
|
|
901
|
-
legacyDb.close();
|
|
902
|
-
if (count > 0) {
|
|
903
|
-
return {
|
|
904
|
-
needsMigration: true,
|
|
905
|
-
legacyVersion: version,
|
|
906
|
-
legacyEntries: count
|
|
907
|
-
};
|
|
908
|
-
}
|
|
909
|
-
}
|
|
910
|
-
catch {
|
|
911
|
-
// Not a valid SQLite database, skip
|
|
912
|
-
}
|
|
913
|
-
}
|
|
914
|
-
}
|
|
915
|
-
return { needsMigration: false };
|
|
916
|
-
}
|
|
917
|
-
/**
|
|
918
|
-
* ADR-053: Activate ControllerRegistry so AgentDB v3 controllers
|
|
919
|
-
* (ReasoningBank, SkillLibrary, ExplainableRecall, etc.) are instantiated.
|
|
920
|
-
*
|
|
921
|
-
* Uses the memory-bridge's getControllerRegistry() which lazily creates
|
|
922
|
-
* a singleton ControllerRegistry and initializes it with the given dbPath.
|
|
923
|
-
* After this call, all enabled controllers are ready for immediate use.
|
|
924
|
-
*
|
|
925
|
-
* Failures are isolated: if @claude-flow/memory or agentdb is not installed,
|
|
926
|
-
* this returns an empty result without throwing.
|
|
927
|
-
*/
|
|
928
|
-
async function activateControllerRegistry(dbPath, verbose) {
|
|
929
|
-
const startTime = performance.now();
|
|
930
|
-
const activated = [];
|
|
931
|
-
const failed = [];
|
|
932
|
-
try {
|
|
933
|
-
const bridge = await getBridge();
|
|
934
|
-
if (!bridge) {
|
|
935
|
-
return { activated, failed, initTimeMs: performance.now() - startTime };
|
|
936
|
-
}
|
|
937
|
-
const registry = await bridge.getControllerRegistry(dbPath);
|
|
938
|
-
if (!registry) {
|
|
939
|
-
return { activated, failed, initTimeMs: performance.now() - startTime };
|
|
940
|
-
}
|
|
941
|
-
// Collect controller status from the registry
|
|
942
|
-
if (typeof registry.listControllers === 'function') {
|
|
943
|
-
const controllers = registry.listControllers();
|
|
944
|
-
for (const ctrl of controllers) {
|
|
945
|
-
if (ctrl.enabled) {
|
|
946
|
-
activated.push(ctrl.name);
|
|
947
|
-
}
|
|
948
|
-
else {
|
|
949
|
-
failed.push(ctrl.name);
|
|
950
|
-
}
|
|
951
|
-
}
|
|
952
|
-
}
|
|
953
|
-
if (verbose && activated.length > 0) {
|
|
954
|
-
console.log(`ControllerRegistry: ${activated.length} controllers activated`);
|
|
955
|
-
}
|
|
956
|
-
}
|
|
957
|
-
catch {
|
|
958
|
-
// ControllerRegistry activation is best-effort
|
|
959
|
-
}
|
|
960
|
-
return { activated, failed, initTimeMs: performance.now() - startTime };
|
|
961
|
-
}
|
|
962
|
-
/**
|
|
963
|
-
* Initialize the memory database properly using sql.js
|
|
964
|
-
*/
|
|
965
|
-
export async function initializeMemoryDatabase(options) {
|
|
966
|
-
const { backend = 'hybrid', dbPath: customPath, force = false, verbose = false, migrate = true } = options;
|
|
967
|
-
const swarmDir = path.join(
|
|
968
|
-
const dbPath = customPath || path.join(swarmDir, 'memory.db');
|
|
969
|
-
const dbDir = path.dirname(dbPath);
|
|
970
|
-
try {
|
|
971
|
-
// Create directory if needed
|
|
972
|
-
if (!fs.existsSync(dbDir)) {
|
|
973
|
-
fs.mkdirSync(dbDir, { recursive: true });
|
|
974
|
-
}
|
|
975
|
-
// Check for legacy installations
|
|
976
|
-
if (migrate) {
|
|
977
|
-
const legacyCheck = await checkAndMigrateLegacy({ dbPath, verbose });
|
|
978
|
-
if (legacyCheck.needsMigration && verbose) {
|
|
979
|
-
console.log(`Found legacy database (v${legacyCheck.legacyVersion}) with ${legacyCheck.legacyEntries} entries`);
|
|
980
|
-
}
|
|
981
|
-
}
|
|
982
|
-
// Check existing database
|
|
983
|
-
if (fs.existsSync(dbPath) && !force) {
|
|
984
|
-
return {
|
|
985
|
-
success: false,
|
|
986
|
-
backend,
|
|
987
|
-
dbPath,
|
|
988
|
-
schemaVersion: '3.0.0',
|
|
989
|
-
tablesCreated: [],
|
|
990
|
-
indexesCreated: [],
|
|
991
|
-
features: {
|
|
992
|
-
vectorEmbeddings: false,
|
|
993
|
-
patternLearning: false,
|
|
994
|
-
temporalDecay: false,
|
|
995
|
-
hnswIndexing: false,
|
|
996
|
-
migrationTracking: false
|
|
997
|
-
},
|
|
998
|
-
error: 'Database already exists. Use --force to reinitialize.'
|
|
999
|
-
};
|
|
1000
|
-
}
|
|
1001
|
-
// Try to use sql.js (WASM SQLite)
|
|
1002
|
-
let db;
|
|
1003
|
-
let usedSqlJs = false;
|
|
1004
|
-
try {
|
|
1005
|
-
// Dynamic import of sql.js
|
|
1006
|
-
const initSqlJs = (await import('sql.js')).default;
|
|
1007
|
-
const SQL = await initSqlJs();
|
|
1008
|
-
// Load existing database or create new
|
|
1009
|
-
if (fs.existsSync(dbPath) && force) {
|
|
1010
|
-
fs.unlinkSync(dbPath);
|
|
1011
|
-
}
|
|
1012
|
-
db = new SQL.Database();
|
|
1013
|
-
usedSqlJs = true;
|
|
1014
|
-
}
|
|
1015
|
-
catch (e) {
|
|
1016
|
-
// sql.js not available, fall back to writing schema file
|
|
1017
|
-
if (verbose) {
|
|
1018
|
-
console.log('sql.js not available, writing schema file for later initialization');
|
|
1019
|
-
}
|
|
1020
|
-
}
|
|
1021
|
-
if (usedSqlJs && db) {
|
|
1022
|
-
// Execute schema
|
|
1023
|
-
db.run(MEMORY_SCHEMA_V3);
|
|
1024
|
-
// Insert initial metadata
|
|
1025
|
-
db.run(getInitialMetadata(backend));
|
|
1026
|
-
// Save to file
|
|
1027
|
-
const data = db.export();
|
|
1028
|
-
const buffer = Buffer.from(data);
|
|
1029
|
-
fs.writeFileSync(dbPath, buffer);
|
|
1030
|
-
// Close database
|
|
1031
|
-
db.close();
|
|
1032
|
-
// Also create schema file for reference
|
|
1033
|
-
const schemaPath = path.join(dbDir, 'schema.sql');
|
|
1034
|
-
fs.writeFileSync(schemaPath, MEMORY_SCHEMA_V3 + '\n' + getInitialMetadata(backend));
|
|
1035
|
-
// ADR-053: Activate ControllerRegistry so controllers (ReasoningBank,
|
|
1036
|
-
// SkillLibrary, ExplainableRecall, etc.) are instantiated during init
|
|
1037
|
-
const controllerResult = await activateControllerRegistry(dbPath, verbose);
|
|
1038
|
-
return {
|
|
1039
|
-
success: true,
|
|
1040
|
-
backend,
|
|
1041
|
-
dbPath,
|
|
1042
|
-
schemaVersion: '3.0.0',
|
|
1043
|
-
tablesCreated: [
|
|
1044
|
-
'memory_entries',
|
|
1045
|
-
'patterns',
|
|
1046
|
-
'pattern_history',
|
|
1047
|
-
'trajectories',
|
|
1048
|
-
'trajectory_steps',
|
|
1049
|
-
'migration_state',
|
|
1050
|
-
'sessions',
|
|
1051
|
-
'vector_indexes',
|
|
1052
|
-
'metadata'
|
|
1053
|
-
],
|
|
1054
|
-
indexesCreated: [
|
|
1055
|
-
'idx_memory_namespace',
|
|
1056
|
-
'idx_memory_key',
|
|
1057
|
-
'idx_memory_type',
|
|
1058
|
-
'idx_memory_status',
|
|
1059
|
-
'idx_memory_created',
|
|
1060
|
-
'idx_memory_accessed',
|
|
1061
|
-
'idx_memory_owner',
|
|
1062
|
-
'idx_patterns_type',
|
|
1063
|
-
'idx_patterns_confidence',
|
|
1064
|
-
'idx_patterns_status',
|
|
1065
|
-
'idx_patterns_last_matched',
|
|
1066
|
-
'idx_pattern_history_pattern',
|
|
1067
|
-
'idx_steps_trajectory'
|
|
1068
|
-
],
|
|
1069
|
-
features: {
|
|
1070
|
-
vectorEmbeddings: true,
|
|
1071
|
-
patternLearning: true,
|
|
1072
|
-
temporalDecay: true,
|
|
1073
|
-
hnswIndexing: true,
|
|
1074
|
-
migrationTracking: true
|
|
1075
|
-
},
|
|
1076
|
-
controllers: controllerResult,
|
|
1077
|
-
};
|
|
1078
|
-
}
|
|
1079
|
-
else {
|
|
1080
|
-
// Fall back to schema file approach
|
|
1081
|
-
const schemaPath = path.join(dbDir, 'schema.sql');
|
|
1082
|
-
fs.writeFileSync(schemaPath, MEMORY_SCHEMA_V3 + '\n' + getInitialMetadata(backend));
|
|
1083
|
-
// Create minimal valid SQLite file
|
|
1084
|
-
const sqliteHeader = Buffer.alloc(4096, 0);
|
|
1085
|
-
// SQLite format 3 header
|
|
1086
|
-
Buffer.from('SQLite format 3\0').copy(sqliteHeader, 0);
|
|
1087
|
-
sqliteHeader[16] = 0x10; // page size high byte (4096)
|
|
1088
|
-
sqliteHeader[17] = 0x00; // page size low byte
|
|
1089
|
-
sqliteHeader[18] = 0x01; // file format write version
|
|
1090
|
-
sqliteHeader[19] = 0x01; // file format read version
|
|
1091
|
-
sqliteHeader[24] = 0x00; // max embedded payload
|
|
1092
|
-
sqliteHeader[25] = 0x40;
|
|
1093
|
-
sqliteHeader[26] = 0x20; // min embedded payload
|
|
1094
|
-
sqliteHeader[27] = 0x20; // leaf payload
|
|
1095
|
-
fs.writeFileSync(dbPath, sqliteHeader);
|
|
1096
|
-
// ADR-053: Activate ControllerRegistry even on fallback path
|
|
1097
|
-
const controllerResult = await activateControllerRegistry(dbPath, verbose);
|
|
1098
|
-
return {
|
|
1099
|
-
success: true,
|
|
1100
|
-
backend,
|
|
1101
|
-
dbPath,
|
|
1102
|
-
schemaVersion: '3.0.0',
|
|
1103
|
-
tablesCreated: [
|
|
1104
|
-
'memory_entries (pending)',
|
|
1105
|
-
'patterns (pending)',
|
|
1106
|
-
'pattern_history (pending)',
|
|
1107
|
-
'trajectories (pending)',
|
|
1108
|
-
'trajectory_steps (pending)',
|
|
1109
|
-
'migration_state (pending)',
|
|
1110
|
-
'sessions (pending)',
|
|
1111
|
-
'vector_indexes (pending)',
|
|
1112
|
-
'metadata (pending)'
|
|
1113
|
-
],
|
|
1114
|
-
indexesCreated: [],
|
|
1115
|
-
features: {
|
|
1116
|
-
vectorEmbeddings: true,
|
|
1117
|
-
patternLearning: true,
|
|
1118
|
-
temporalDecay: true,
|
|
1119
|
-
hnswIndexing: true,
|
|
1120
|
-
migrationTracking: true
|
|
1121
|
-
},
|
|
1122
|
-
controllers: controllerResult,
|
|
1123
|
-
};
|
|
1124
|
-
}
|
|
1125
|
-
}
|
|
1126
|
-
catch (error) {
|
|
1127
|
-
return {
|
|
1128
|
-
success: false,
|
|
1129
|
-
backend,
|
|
1130
|
-
dbPath,
|
|
1131
|
-
schemaVersion: '3.0.0',
|
|
1132
|
-
tablesCreated: [],
|
|
1133
|
-
indexesCreated: [],
|
|
1134
|
-
features: {
|
|
1135
|
-
vectorEmbeddings: false,
|
|
1136
|
-
patternLearning: false,
|
|
1137
|
-
temporalDecay: false,
|
|
1138
|
-
hnswIndexing: false,
|
|
1139
|
-
migrationTracking: false
|
|
1140
|
-
},
|
|
1141
|
-
error: error instanceof Error ? error.message : String(error)
|
|
1142
|
-
};
|
|
1143
|
-
}
|
|
1144
|
-
}
|
|
1145
|
-
/**
|
|
1146
|
-
* Check if memory database is properly initialized
|
|
1147
|
-
*/
|
|
1148
|
-
export async function checkMemoryInitialization(dbPath) {
|
|
1149
|
-
const swarmDir = path.join(
|
|
1150
|
-
const path_ = dbPath || path.join(swarmDir, 'memory.db');
|
|
1151
|
-
if (!fs.existsSync(path_)) {
|
|
1152
|
-
return { initialized: false };
|
|
1153
|
-
}
|
|
1154
|
-
try {
|
|
1155
|
-
// Try to load with sql.js
|
|
1156
|
-
const initSqlJs = (await import('sql.js')).default;
|
|
1157
|
-
const SQL = await initSqlJs();
|
|
1158
|
-
const fileBuffer = fs.readFileSync(path_);
|
|
1159
|
-
const db = new SQL.Database(fileBuffer);
|
|
1160
|
-
// Check for metadata table
|
|
1161
|
-
const tables = db.exec("SELECT name FROM sqlite_master WHERE type='table'");
|
|
1162
|
-
const tableNames = tables[0]?.values?.map(v => v[0]) || [];
|
|
1163
|
-
// Get version
|
|
1164
|
-
let version = 'unknown';
|
|
1165
|
-
let backend = 'unknown';
|
|
1166
|
-
try {
|
|
1167
|
-
const versionResult = db.exec("SELECT value FROM metadata WHERE key='schema_version'");
|
|
1168
|
-
version = versionResult[0]?.values[0]?.[0] || 'unknown';
|
|
1169
|
-
const backendResult = db.exec("SELECT value FROM metadata WHERE key='backend'");
|
|
1170
|
-
backend = backendResult[0]?.values[0]?.[0] || 'unknown';
|
|
1171
|
-
}
|
|
1172
|
-
catch {
|
|
1173
|
-
// Metadata table might not exist
|
|
1174
|
-
}
|
|
1175
|
-
db.close();
|
|
1176
|
-
return {
|
|
1177
|
-
initialized: true,
|
|
1178
|
-
version,
|
|
1179
|
-
backend,
|
|
1180
|
-
features: {
|
|
1181
|
-
vectorEmbeddings: tableNames.includes('vector_indexes'),
|
|
1182
|
-
patternLearning: tableNames.includes('patterns'),
|
|
1183
|
-
temporalDecay: tableNames.includes('pattern_history')
|
|
1184
|
-
},
|
|
1185
|
-
tables: tableNames
|
|
1186
|
-
};
|
|
1187
|
-
}
|
|
1188
|
-
catch {
|
|
1189
|
-
// Could not read database
|
|
1190
|
-
return { initialized: false };
|
|
1191
|
-
}
|
|
1192
|
-
}
|
|
1193
|
-
/**
|
|
1194
|
-
* Apply temporal decay to patterns
|
|
1195
|
-
* Reduces confidence of patterns that haven't been used recently
|
|
1196
|
-
*/
|
|
1197
|
-
export async function applyTemporalDecay(dbPath) {
|
|
1198
|
-
const swarmDir = path.join(
|
|
1199
|
-
const path_ = dbPath || path.join(swarmDir, 'memory.db');
|
|
1200
|
-
try {
|
|
1201
|
-
const initSqlJs = (await import('sql.js')).default;
|
|
1202
|
-
const SQL = await initSqlJs();
|
|
1203
|
-
const fileBuffer = fs.readFileSync(path_);
|
|
1204
|
-
const db = new SQL.Database(fileBuffer);
|
|
1205
|
-
// Apply decay: confidence *= exp(-decay_rate * days_since_last_use)
|
|
1206
|
-
const now = Date.now();
|
|
859
|
+
`;
|
|
860
|
+
}
|
|
861
|
+
/**
|
|
862
|
+
* Ensure memory_entries table has all required columns
|
|
863
|
+
* Adds missing columns for older databases (e.g., 'content' column)
|
|
864
|
+
*/
|
|
865
|
+
export async function ensureSchemaColumns(dbPath) {
|
|
866
|
+
const columnsAdded = [];
|
|
867
|
+
try {
|
|
868
|
+
if (!fs.existsSync(dbPath)) {
|
|
869
|
+
return { success: true, columnsAdded: [] };
|
|
870
|
+
}
|
|
871
|
+
const initSqlJs = (await import('sql.js')).default;
|
|
872
|
+
const SQL = await initSqlJs();
|
|
873
|
+
const fileBuffer = fs.readFileSync(dbPath);
|
|
874
|
+
const db = new SQL.Database(fileBuffer);
|
|
875
|
+
// Get current columns in memory_entries
|
|
876
|
+
const tableInfo = db.exec("PRAGMA table_info(memory_entries)");
|
|
877
|
+
const existingColumns = new Set(tableInfo[0]?.values?.map(row => row[1]) || []);
|
|
878
|
+
// Required columns that may be missing in older schemas
|
|
879
|
+
// Issue #977: 'type' column was missing from this list, causing store failures on older DBs
|
|
880
|
+
const requiredColumns = [
|
|
881
|
+
{ name: 'content', definition: "content TEXT DEFAULT ''" },
|
|
882
|
+
{ name: 'type', definition: "type TEXT DEFAULT 'semantic'" },
|
|
883
|
+
{ name: 'embedding', definition: 'embedding TEXT' },
|
|
884
|
+
{ name: 'embedding_model', definition: "embedding_model TEXT DEFAULT 'local'" },
|
|
885
|
+
{ name: 'embedding_dimensions', definition: 'embedding_dimensions INTEGER' },
|
|
886
|
+
{ name: 'tags', definition: 'tags TEXT' },
|
|
887
|
+
{ name: 'metadata', definition: 'metadata TEXT' },
|
|
888
|
+
{ name: 'owner_id', definition: 'owner_id TEXT' },
|
|
889
|
+
{ name: 'expires_at', definition: 'expires_at INTEGER' },
|
|
890
|
+
{ name: 'last_accessed_at', definition: 'last_accessed_at INTEGER' },
|
|
891
|
+
{ name: 'access_count', definition: 'access_count INTEGER DEFAULT 0' },
|
|
892
|
+
{ name: 'status', definition: "status TEXT DEFAULT 'active'" }
|
|
893
|
+
];
|
|
894
|
+
let modified = false;
|
|
895
|
+
for (const col of requiredColumns) {
|
|
896
|
+
if (!existingColumns.has(col.name)) {
|
|
897
|
+
try {
|
|
898
|
+
db.run(`ALTER TABLE memory_entries ADD COLUMN ${col.definition}`);
|
|
899
|
+
columnsAdded.push(col.name);
|
|
900
|
+
modified = true;
|
|
901
|
+
}
|
|
902
|
+
catch (e) {
|
|
903
|
+
// Column might already exist or other error - continue
|
|
904
|
+
}
|
|
905
|
+
}
|
|
906
|
+
}
|
|
907
|
+
if (modified) {
|
|
908
|
+
// Save updated database
|
|
909
|
+
const data = db.export();
|
|
910
|
+
fs.writeFileSync(dbPath, Buffer.from(data));
|
|
911
|
+
}
|
|
912
|
+
db.close();
|
|
913
|
+
return { success: true, columnsAdded };
|
|
914
|
+
}
|
|
915
|
+
catch (error) {
|
|
916
|
+
return {
|
|
917
|
+
success: false,
|
|
918
|
+
columnsAdded,
|
|
919
|
+
error: error instanceof Error ? error.message : String(error)
|
|
920
|
+
};
|
|
921
|
+
}
|
|
922
|
+
}
|
|
923
|
+
/**
|
|
924
|
+
* Check for legacy database installations and migrate if needed
|
|
925
|
+
*/
|
|
926
|
+
export async function checkAndMigrateLegacy(options) {
|
|
927
|
+
const { dbPath, verbose = false } = options;
|
|
928
|
+
// Check for legacy locations
|
|
929
|
+
const legacyPaths = [
|
|
930
|
+
path.join(getProjectRoot(), 'memory.db'),
|
|
931
|
+
path.join(getProjectRoot(), '.claude/memory.db'),
|
|
932
|
+
path.join(getProjectRoot(), 'data/memory.db'),
|
|
933
|
+
path.join(getProjectRoot(), '.claude-flow/memory.db')
|
|
934
|
+
];
|
|
935
|
+
for (const legacyPath of legacyPaths) {
|
|
936
|
+
if (fs.existsSync(legacyPath) && legacyPath !== dbPath) {
|
|
937
|
+
try {
|
|
938
|
+
const initSqlJs = (await import('sql.js')).default;
|
|
939
|
+
const SQL = await initSqlJs();
|
|
940
|
+
const legacyBuffer = fs.readFileSync(legacyPath);
|
|
941
|
+
const legacyDb = new SQL.Database(legacyBuffer);
|
|
942
|
+
// Check if it has data
|
|
943
|
+
const countResult = legacyDb.exec('SELECT COUNT(*) FROM memory_entries');
|
|
944
|
+
const count = countResult[0]?.values[0]?.[0] || 0;
|
|
945
|
+
// Get version if available
|
|
946
|
+
let version = 'unknown';
|
|
947
|
+
try {
|
|
948
|
+
const versionResult = legacyDb.exec("SELECT value FROM metadata WHERE key='schema_version'");
|
|
949
|
+
version = versionResult[0]?.values[0]?.[0] || 'unknown';
|
|
950
|
+
}
|
|
951
|
+
catch { /* no metadata table */ }
|
|
952
|
+
legacyDb.close();
|
|
953
|
+
if (count > 0) {
|
|
954
|
+
return {
|
|
955
|
+
needsMigration: true,
|
|
956
|
+
legacyVersion: version,
|
|
957
|
+
legacyEntries: count
|
|
958
|
+
};
|
|
959
|
+
}
|
|
960
|
+
}
|
|
961
|
+
catch {
|
|
962
|
+
// Not a valid SQLite database, skip
|
|
963
|
+
}
|
|
964
|
+
}
|
|
965
|
+
}
|
|
966
|
+
return { needsMigration: false };
|
|
967
|
+
}
|
|
968
|
+
/**
|
|
969
|
+
* ADR-053: Activate ControllerRegistry so AgentDB v3 controllers
|
|
970
|
+
* (ReasoningBank, SkillLibrary, ExplainableRecall, etc.) are instantiated.
|
|
971
|
+
*
|
|
972
|
+
* Uses the memory-bridge's getControllerRegistry() which lazily creates
|
|
973
|
+
* a singleton ControllerRegistry and initializes it with the given dbPath.
|
|
974
|
+
* After this call, all enabled controllers are ready for immediate use.
|
|
975
|
+
*
|
|
976
|
+
* Failures are isolated: if @claude-flow/memory or agentdb is not installed,
|
|
977
|
+
* this returns an empty result without throwing.
|
|
978
|
+
*/
|
|
979
|
+
async function activateControllerRegistry(dbPath, verbose) {
|
|
980
|
+
const startTime = performance.now();
|
|
981
|
+
const activated = [];
|
|
982
|
+
const failed = [];
|
|
983
|
+
try {
|
|
984
|
+
const bridge = await getBridge();
|
|
985
|
+
if (!bridge) {
|
|
986
|
+
return { activated, failed, initTimeMs: performance.now() - startTime };
|
|
987
|
+
}
|
|
988
|
+
const registry = await bridge.getControllerRegistry(dbPath);
|
|
989
|
+
if (!registry) {
|
|
990
|
+
return { activated, failed, initTimeMs: performance.now() - startTime };
|
|
991
|
+
}
|
|
992
|
+
// Collect controller status from the registry
|
|
993
|
+
if (typeof registry.listControllers === 'function') {
|
|
994
|
+
const controllers = registry.listControllers();
|
|
995
|
+
for (const ctrl of controllers) {
|
|
996
|
+
if (ctrl.enabled) {
|
|
997
|
+
activated.push(ctrl.name);
|
|
998
|
+
}
|
|
999
|
+
else {
|
|
1000
|
+
failed.push(ctrl.name);
|
|
1001
|
+
}
|
|
1002
|
+
}
|
|
1003
|
+
}
|
|
1004
|
+
if (verbose && activated.length > 0) {
|
|
1005
|
+
console.log(`ControllerRegistry: ${activated.length} controllers activated`);
|
|
1006
|
+
}
|
|
1007
|
+
}
|
|
1008
|
+
catch {
|
|
1009
|
+
// ControllerRegistry activation is best-effort
|
|
1010
|
+
}
|
|
1011
|
+
return { activated, failed, initTimeMs: performance.now() - startTime };
|
|
1012
|
+
}
|
|
1013
|
+
/**
|
|
1014
|
+
* Initialize the memory database properly using sql.js
|
|
1015
|
+
*/
|
|
1016
|
+
export async function initializeMemoryDatabase(options) {
|
|
1017
|
+
const { backend = 'hybrid', dbPath: customPath, force = false, verbose = false, migrate = true } = options;
|
|
1018
|
+
const swarmDir = path.join(getProjectRoot(), '.swarm');
|
|
1019
|
+
const dbPath = customPath || path.join(swarmDir, 'memory.db');
|
|
1020
|
+
const dbDir = path.dirname(dbPath);
|
|
1021
|
+
try {
|
|
1022
|
+
// Create directory if needed
|
|
1023
|
+
if (!fs.existsSync(dbDir)) {
|
|
1024
|
+
fs.mkdirSync(dbDir, { recursive: true });
|
|
1025
|
+
}
|
|
1026
|
+
// Check for legacy installations
|
|
1027
|
+
if (migrate) {
|
|
1028
|
+
const legacyCheck = await checkAndMigrateLegacy({ dbPath, verbose });
|
|
1029
|
+
if (legacyCheck.needsMigration && verbose) {
|
|
1030
|
+
console.log(`Found legacy database (v${legacyCheck.legacyVersion}) with ${legacyCheck.legacyEntries} entries`);
|
|
1031
|
+
}
|
|
1032
|
+
}
|
|
1033
|
+
// Check existing database
|
|
1034
|
+
if (fs.existsSync(dbPath) && !force) {
|
|
1035
|
+
return {
|
|
1036
|
+
success: false,
|
|
1037
|
+
backend,
|
|
1038
|
+
dbPath,
|
|
1039
|
+
schemaVersion: '3.0.0',
|
|
1040
|
+
tablesCreated: [],
|
|
1041
|
+
indexesCreated: [],
|
|
1042
|
+
features: {
|
|
1043
|
+
vectorEmbeddings: false,
|
|
1044
|
+
patternLearning: false,
|
|
1045
|
+
temporalDecay: false,
|
|
1046
|
+
hnswIndexing: false,
|
|
1047
|
+
migrationTracking: false
|
|
1048
|
+
},
|
|
1049
|
+
error: 'Database already exists. Use --force to reinitialize.'
|
|
1050
|
+
};
|
|
1051
|
+
}
|
|
1052
|
+
// Try to use sql.js (WASM SQLite)
|
|
1053
|
+
let db;
|
|
1054
|
+
let usedSqlJs = false;
|
|
1055
|
+
try {
|
|
1056
|
+
// Dynamic import of sql.js
|
|
1057
|
+
const initSqlJs = (await import('sql.js')).default;
|
|
1058
|
+
const SQL = await initSqlJs();
|
|
1059
|
+
// Load existing database or create new
|
|
1060
|
+
if (fs.existsSync(dbPath) && force) {
|
|
1061
|
+
fs.unlinkSync(dbPath);
|
|
1062
|
+
}
|
|
1063
|
+
db = new SQL.Database();
|
|
1064
|
+
usedSqlJs = true;
|
|
1065
|
+
}
|
|
1066
|
+
catch (e) {
|
|
1067
|
+
// sql.js not available, fall back to writing schema file
|
|
1068
|
+
if (verbose) {
|
|
1069
|
+
console.log('sql.js not available, writing schema file for later initialization');
|
|
1070
|
+
}
|
|
1071
|
+
}
|
|
1072
|
+
if (usedSqlJs && db) {
|
|
1073
|
+
// Execute schema
|
|
1074
|
+
db.run(MEMORY_SCHEMA_V3);
|
|
1075
|
+
// Insert initial metadata
|
|
1076
|
+
db.run(getInitialMetadata(backend));
|
|
1077
|
+
// Save to file
|
|
1078
|
+
const data = db.export();
|
|
1079
|
+
const buffer = Buffer.from(data);
|
|
1080
|
+
fs.writeFileSync(dbPath, buffer);
|
|
1081
|
+
// Close database
|
|
1082
|
+
db.close();
|
|
1083
|
+
// Also create schema file for reference
|
|
1084
|
+
const schemaPath = path.join(dbDir, 'schema.sql');
|
|
1085
|
+
fs.writeFileSync(schemaPath, MEMORY_SCHEMA_V3 + '\n' + getInitialMetadata(backend));
|
|
1086
|
+
// ADR-053: Activate ControllerRegistry so controllers (ReasoningBank,
|
|
1087
|
+
// SkillLibrary, ExplainableRecall, etc.) are instantiated during init
|
|
1088
|
+
const controllerResult = await activateControllerRegistry(dbPath, verbose);
|
|
1089
|
+
return {
|
|
1090
|
+
success: true,
|
|
1091
|
+
backend,
|
|
1092
|
+
dbPath,
|
|
1093
|
+
schemaVersion: '3.0.0',
|
|
1094
|
+
tablesCreated: [
|
|
1095
|
+
'memory_entries',
|
|
1096
|
+
'patterns',
|
|
1097
|
+
'pattern_history',
|
|
1098
|
+
'trajectories',
|
|
1099
|
+
'trajectory_steps',
|
|
1100
|
+
'migration_state',
|
|
1101
|
+
'sessions',
|
|
1102
|
+
'vector_indexes',
|
|
1103
|
+
'metadata'
|
|
1104
|
+
],
|
|
1105
|
+
indexesCreated: [
|
|
1106
|
+
'idx_memory_namespace',
|
|
1107
|
+
'idx_memory_key',
|
|
1108
|
+
'idx_memory_type',
|
|
1109
|
+
'idx_memory_status',
|
|
1110
|
+
'idx_memory_created',
|
|
1111
|
+
'idx_memory_accessed',
|
|
1112
|
+
'idx_memory_owner',
|
|
1113
|
+
'idx_patterns_type',
|
|
1114
|
+
'idx_patterns_confidence',
|
|
1115
|
+
'idx_patterns_status',
|
|
1116
|
+
'idx_patterns_last_matched',
|
|
1117
|
+
'idx_pattern_history_pattern',
|
|
1118
|
+
'idx_steps_trajectory'
|
|
1119
|
+
],
|
|
1120
|
+
features: {
|
|
1121
|
+
vectorEmbeddings: true,
|
|
1122
|
+
patternLearning: true,
|
|
1123
|
+
temporalDecay: true,
|
|
1124
|
+
hnswIndexing: true,
|
|
1125
|
+
migrationTracking: true
|
|
1126
|
+
},
|
|
1127
|
+
controllers: controllerResult,
|
|
1128
|
+
};
|
|
1129
|
+
}
|
|
1130
|
+
else {
|
|
1131
|
+
// Fall back to schema file approach
|
|
1132
|
+
const schemaPath = path.join(dbDir, 'schema.sql');
|
|
1133
|
+
fs.writeFileSync(schemaPath, MEMORY_SCHEMA_V3 + '\n' + getInitialMetadata(backend));
|
|
1134
|
+
// Create minimal valid SQLite file
|
|
1135
|
+
const sqliteHeader = Buffer.alloc(4096, 0);
|
|
1136
|
+
// SQLite format 3 header
|
|
1137
|
+
Buffer.from('SQLite format 3\0').copy(sqliteHeader, 0);
|
|
1138
|
+
sqliteHeader[16] = 0x10; // page size high byte (4096)
|
|
1139
|
+
sqliteHeader[17] = 0x00; // page size low byte
|
|
1140
|
+
sqliteHeader[18] = 0x01; // file format write version
|
|
1141
|
+
sqliteHeader[19] = 0x01; // file format read version
|
|
1142
|
+
sqliteHeader[24] = 0x00; // max embedded payload
|
|
1143
|
+
sqliteHeader[25] = 0x40;
|
|
1144
|
+
sqliteHeader[26] = 0x20; // min embedded payload
|
|
1145
|
+
sqliteHeader[27] = 0x20; // leaf payload
|
|
1146
|
+
fs.writeFileSync(dbPath, sqliteHeader);
|
|
1147
|
+
// ADR-053: Activate ControllerRegistry even on fallback path
|
|
1148
|
+
const controllerResult = await activateControllerRegistry(dbPath, verbose);
|
|
1149
|
+
return {
|
|
1150
|
+
success: true,
|
|
1151
|
+
backend,
|
|
1152
|
+
dbPath,
|
|
1153
|
+
schemaVersion: '3.0.0',
|
|
1154
|
+
tablesCreated: [
|
|
1155
|
+
'memory_entries (pending)',
|
|
1156
|
+
'patterns (pending)',
|
|
1157
|
+
'pattern_history (pending)',
|
|
1158
|
+
'trajectories (pending)',
|
|
1159
|
+
'trajectory_steps (pending)',
|
|
1160
|
+
'migration_state (pending)',
|
|
1161
|
+
'sessions (pending)',
|
|
1162
|
+
'vector_indexes (pending)',
|
|
1163
|
+
'metadata (pending)'
|
|
1164
|
+
],
|
|
1165
|
+
indexesCreated: [],
|
|
1166
|
+
features: {
|
|
1167
|
+
vectorEmbeddings: true,
|
|
1168
|
+
patternLearning: true,
|
|
1169
|
+
temporalDecay: true,
|
|
1170
|
+
hnswIndexing: true,
|
|
1171
|
+
migrationTracking: true
|
|
1172
|
+
},
|
|
1173
|
+
controllers: controllerResult,
|
|
1174
|
+
};
|
|
1175
|
+
}
|
|
1176
|
+
}
|
|
1177
|
+
catch (error) {
|
|
1178
|
+
return {
|
|
1179
|
+
success: false,
|
|
1180
|
+
backend,
|
|
1181
|
+
dbPath,
|
|
1182
|
+
schemaVersion: '3.0.0',
|
|
1183
|
+
tablesCreated: [],
|
|
1184
|
+
indexesCreated: [],
|
|
1185
|
+
features: {
|
|
1186
|
+
vectorEmbeddings: false,
|
|
1187
|
+
patternLearning: false,
|
|
1188
|
+
temporalDecay: false,
|
|
1189
|
+
hnswIndexing: false,
|
|
1190
|
+
migrationTracking: false
|
|
1191
|
+
},
|
|
1192
|
+
error: error instanceof Error ? error.message : String(error)
|
|
1193
|
+
};
|
|
1194
|
+
}
|
|
1195
|
+
}
|
|
1196
|
+
/**
|
|
1197
|
+
* Check if memory database is properly initialized
|
|
1198
|
+
*/
|
|
1199
|
+
export async function checkMemoryInitialization(dbPath) {
|
|
1200
|
+
const swarmDir = path.join(getProjectRoot(), '.swarm');
|
|
1201
|
+
const path_ = dbPath || path.join(swarmDir, 'memory.db');
|
|
1202
|
+
if (!fs.existsSync(path_)) {
|
|
1203
|
+
return { initialized: false };
|
|
1204
|
+
}
|
|
1205
|
+
try {
|
|
1206
|
+
// Try to load with sql.js
|
|
1207
|
+
const initSqlJs = (await import('sql.js')).default;
|
|
1208
|
+
const SQL = await initSqlJs();
|
|
1209
|
+
const fileBuffer = fs.readFileSync(path_);
|
|
1210
|
+
const db = new SQL.Database(fileBuffer);
|
|
1211
|
+
// Check for metadata table
|
|
1212
|
+
const tables = db.exec("SELECT name FROM sqlite_master WHERE type='table'");
|
|
1213
|
+
const tableNames = tables[0]?.values?.map(v => v[0]) || [];
|
|
1214
|
+
// Get version
|
|
1215
|
+
let version = 'unknown';
|
|
1216
|
+
let backend = 'unknown';
|
|
1217
|
+
try {
|
|
1218
|
+
const versionResult = db.exec("SELECT value FROM metadata WHERE key='schema_version'");
|
|
1219
|
+
version = versionResult[0]?.values[0]?.[0] || 'unknown';
|
|
1220
|
+
const backendResult = db.exec("SELECT value FROM metadata WHERE key='backend'");
|
|
1221
|
+
backend = backendResult[0]?.values[0]?.[0] || 'unknown';
|
|
1222
|
+
}
|
|
1223
|
+
catch {
|
|
1224
|
+
// Metadata table might not exist
|
|
1225
|
+
}
|
|
1226
|
+
db.close();
|
|
1227
|
+
return {
|
|
1228
|
+
initialized: true,
|
|
1229
|
+
version,
|
|
1230
|
+
backend,
|
|
1231
|
+
features: {
|
|
1232
|
+
vectorEmbeddings: tableNames.includes('vector_indexes'),
|
|
1233
|
+
patternLearning: tableNames.includes('patterns'),
|
|
1234
|
+
temporalDecay: tableNames.includes('pattern_history')
|
|
1235
|
+
},
|
|
1236
|
+
tables: tableNames
|
|
1237
|
+
};
|
|
1238
|
+
}
|
|
1239
|
+
catch {
|
|
1240
|
+
// Could not read database
|
|
1241
|
+
return { initialized: false };
|
|
1242
|
+
}
|
|
1243
|
+
}
|
|
1244
|
+
/**
|
|
1245
|
+
* Apply temporal decay to patterns
|
|
1246
|
+
* Reduces confidence of patterns that haven't been used recently
|
|
1247
|
+
*/
|
|
1248
|
+
export async function applyTemporalDecay(dbPath) {
|
|
1249
|
+
const swarmDir = path.join(getProjectRoot(), '.swarm');
|
|
1250
|
+
const path_ = dbPath || path.join(swarmDir, 'memory.db');
|
|
1251
|
+
try {
|
|
1252
|
+
const initSqlJs = (await import('sql.js')).default;
|
|
1253
|
+
const SQL = await initSqlJs();
|
|
1254
|
+
const fileBuffer = fs.readFileSync(path_);
|
|
1255
|
+
const db = new SQL.Database(fileBuffer);
|
|
1256
|
+
// Apply decay: confidence *= exp(-decay_rate * days_since_last_use)
|
|
1257
|
+
const now = Date.now();
|
|
1207
1258
|
const decayQuery = `
|
|
1208
1259
|
UPDATE patterns
|
|
1209
1260
|
SET
|
|
@@ -1212,779 +1263,779 @@ export async function applyTemporalDecay(dbPath) {
|
|
|
1212
1263
|
WHERE status = 'active'
|
|
1213
1264
|
AND confidence > 0.1
|
|
1214
1265
|
AND (? - COALESCE(last_matched_at, created_at)) > 86400000
|
|
1215
|
-
`;
|
|
1216
|
-
db.run(decayQuery, [now, now, now]);
|
|
1217
|
-
const changes = db.getRowsModified();
|
|
1218
|
-
// Save
|
|
1219
|
-
const data = db.export();
|
|
1220
|
-
fs.writeFileSync(path_, Buffer.from(data));
|
|
1221
|
-
db.close();
|
|
1222
|
-
return {
|
|
1223
|
-
success: true,
|
|
1224
|
-
patternsDecayed: changes
|
|
1225
|
-
};
|
|
1226
|
-
}
|
|
1227
|
-
catch (error) {
|
|
1228
|
-
return {
|
|
1229
|
-
success: false,
|
|
1230
|
-
patternsDecayed: 0,
|
|
1231
|
-
error: error instanceof Error ? error.message : String(error)
|
|
1232
|
-
};
|
|
1233
|
-
}
|
|
1234
|
-
}
|
|
1235
|
-
let embeddingModelState = null;
|
|
1236
|
-
/**
|
|
1237
|
-
* Lazy load ONNX embedding model
|
|
1238
|
-
* Only loads when first embedding is requested
|
|
1239
|
-
*/
|
|
1240
|
-
export async function loadEmbeddingModel(options) {
|
|
1241
|
-
const { verbose = false } = options || {};
|
|
1242
|
-
const startTime = Date.now();
|
|
1243
|
-
// Already loaded
|
|
1244
|
-
if (embeddingModelState?.loaded) {
|
|
1245
|
-
return {
|
|
1246
|
-
success: true,
|
|
1247
|
-
dimensions: embeddingModelState.dimensions,
|
|
1248
|
-
modelName: 'cached',
|
|
1249
|
-
loadTime: 0
|
|
1250
|
-
};
|
|
1251
|
-
}
|
|
1252
|
-
// ADR-053: Try AgentDB v3 bridge first
|
|
1253
|
-
const bridge = await getBridge();
|
|
1254
|
-
if (bridge) {
|
|
1255
|
-
const bridgeResult = await bridge.bridgeLoadEmbeddingModel();
|
|
1256
|
-
if (bridgeResult && bridgeResult.success) {
|
|
1257
|
-
// Mark local state as loaded too so subsequent calls use cache
|
|
1258
|
-
embeddingModelState = {
|
|
1259
|
-
loaded: true,
|
|
1260
|
-
model: null, // Bridge handles embedding
|
|
1261
|
-
tokenizer: null,
|
|
1262
|
-
dimensions: bridgeResult.dimensions
|
|
1263
|
-
};
|
|
1264
|
-
return bridgeResult;
|
|
1265
|
-
}
|
|
1266
|
-
}
|
|
1267
|
-
try {
|
|
1268
|
-
// Try to import @xenova/transformers for ONNX embeddings
|
|
1269
|
-
const transformers = await import('@xenova/transformers').catch(() => null);
|
|
1270
|
-
if (transformers) {
|
|
1271
|
-
if (verbose) {
|
|
1272
|
-
console.log('Loading ONNX embedding model (all-MiniLM-L6-v2)...');
|
|
1273
|
-
}
|
|
1274
|
-
// Use small, fast model for local embeddings
|
|
1275
|
-
const { pipeline } = transformers;
|
|
1276
|
-
const embedder = await pipeline('feature-extraction', 'Xenova/all-MiniLM-L6-v2');
|
|
1277
|
-
embeddingModelState = {
|
|
1278
|
-
loaded: true,
|
|
1279
|
-
model: embedder,
|
|
1280
|
-
tokenizer: null,
|
|
1281
|
-
dimensions: 384 // MiniLM-L6 produces 384-dim vectors
|
|
1282
|
-
};
|
|
1283
|
-
return {
|
|
1284
|
-
success: true,
|
|
1285
|
-
dimensions: 384,
|
|
1286
|
-
modelName: 'all-MiniLM-L6-v2',
|
|
1287
|
-
loadTime: Date.now() - startTime
|
|
1288
|
-
};
|
|
1289
|
-
}
|
|
1290
|
-
// Fallback: Check for agentic-flow ReasoningBank embeddings (v3)
|
|
1291
|
-
const reasoningBank = await import('agentic-flow/reasoningbank').catch(() => null);
|
|
1292
|
-
if (reasoningBank?.computeEmbedding) {
|
|
1293
|
-
if (verbose) {
|
|
1294
|
-
console.log('Loading agentic-flow ReasoningBank embedding model...');
|
|
1295
|
-
}
|
|
1296
|
-
embeddingModelState = {
|
|
1297
|
-
loaded: true,
|
|
1298
|
-
model: { embed: reasoningBank.computeEmbedding },
|
|
1299
|
-
tokenizer: null,
|
|
1300
|
-
dimensions: 768
|
|
1301
|
-
};
|
|
1302
|
-
return {
|
|
1303
|
-
success: true,
|
|
1304
|
-
dimensions: 768,
|
|
1305
|
-
modelName: 'agentic-flow/reasoningbank',
|
|
1306
|
-
loadTime: Date.now() - startTime
|
|
1307
|
-
};
|
|
1308
|
-
}
|
|
1309
|
-
// Legacy fallback: Check for agentic-flow core embeddings
|
|
1310
|
-
const agenticFlow = await import('agentic-flow').catch(() => null);
|
|
1311
|
-
if (agenticFlow && agenticFlow.embeddings) {
|
|
1312
|
-
if (verbose) {
|
|
1313
|
-
console.log('Loading agentic-flow embedding model...');
|
|
1314
|
-
}
|
|
1315
|
-
embeddingModelState = {
|
|
1316
|
-
loaded: true,
|
|
1317
|
-
model: agenticFlow.embeddings,
|
|
1318
|
-
tokenizer: null,
|
|
1319
|
-
dimensions: 768
|
|
1320
|
-
};
|
|
1321
|
-
return {
|
|
1322
|
-
success: true,
|
|
1323
|
-
dimensions: 768,
|
|
1324
|
-
modelName: 'agentic-flow',
|
|
1325
|
-
loadTime: Date.now() - startTime
|
|
1326
|
-
};
|
|
1327
|
-
}
|
|
1328
|
-
// No ONNX model available - use fallback
|
|
1329
|
-
embeddingModelState = {
|
|
1330
|
-
loaded: true,
|
|
1331
|
-
model: null, // Will use domain-aware hash fallback
|
|
1332
|
-
tokenizer: null,
|
|
1333
|
-
dimensions: 384 // Domain-aware hash embedding dimensions
|
|
1334
|
-
};
|
|
1335
|
-
return {
|
|
1336
|
-
success: true,
|
|
1337
|
-
dimensions: 384,
|
|
1338
|
-
modelName: 'domain-aware-hash-384',
|
|
1339
|
-
loadTime: Date.now() - startTime
|
|
1340
|
-
};
|
|
1341
|
-
}
|
|
1342
|
-
catch (error) {
|
|
1343
|
-
return {
|
|
1344
|
-
success: false,
|
|
1345
|
-
dimensions: 0,
|
|
1346
|
-
modelName: 'none',
|
|
1347
|
-
error: error instanceof Error ? error.message : String(error)
|
|
1348
|
-
};
|
|
1349
|
-
}
|
|
1350
|
-
}
|
|
1351
|
-
/**
|
|
1352
|
-
* Generate real embedding for text
|
|
1353
|
-
* Uses ONNX model if available, falls back to deterministic hash
|
|
1354
|
-
*/
|
|
1355
|
-
export async function generateEmbedding(text) {
|
|
1356
|
-
// ADR-053: Try AgentDB v3 bridge first
|
|
1357
|
-
const bridge = await getBridge();
|
|
1358
|
-
if (bridge) {
|
|
1359
|
-
const bridgeResult = await bridge.bridgeGenerateEmbedding(text);
|
|
1360
|
-
if (bridgeResult)
|
|
1361
|
-
return bridgeResult;
|
|
1362
|
-
}
|
|
1363
|
-
// Ensure model is loaded
|
|
1364
|
-
if (!embeddingModelState?.loaded) {
|
|
1365
|
-
await loadEmbeddingModel();
|
|
1366
|
-
}
|
|
1367
|
-
const state = embeddingModelState;
|
|
1368
|
-
// Use ONNX model if available
|
|
1369
|
-
if (state.model && typeof state.model === 'function') {
|
|
1370
|
-
try {
|
|
1371
|
-
const output = await state.model(text, { pooling: 'mean', normalize: true });
|
|
1372
|
-
const embedding = Array.from(output.data);
|
|
1373
|
-
return {
|
|
1374
|
-
embedding,
|
|
1375
|
-
dimensions: embedding.length,
|
|
1376
|
-
model: 'onnx'
|
|
1377
|
-
};
|
|
1378
|
-
}
|
|
1379
|
-
catch {
|
|
1380
|
-
// Fall through to fallback
|
|
1381
|
-
}
|
|
1382
|
-
}
|
|
1383
|
-
// Domain-aware hash fallback (for testing/demo without ONNX)
|
|
1384
|
-
const embedding = generateDomainAwareEmbedding(text);
|
|
1385
|
-
return {
|
|
1386
|
-
embedding,
|
|
1387
|
-
dimensions: 384,
|
|
1388
|
-
model: 'domain-aware-hash-384'
|
|
1389
|
-
};
|
|
1390
|
-
}
|
|
1391
|
-
/**
|
|
1392
|
-
* Generate embeddings for multiple texts
|
|
1393
|
-
* Uses parallel execution for API-based providers (2-4x faster)
|
|
1394
|
-
* Note: Local ONNX inference is CPU-bound, so parallelism has limited benefit
|
|
1395
|
-
*
|
|
1396
|
-
* @param texts - Array of texts to embed
|
|
1397
|
-
* @param options - Batch options
|
|
1398
|
-
* @returns Array of embedding results with timing info
|
|
1399
|
-
*/
|
|
1400
|
-
export async function generateBatchEmbeddings(texts, options) {
|
|
1401
|
-
const { concurrency = texts.length, onProgress } = options || {};
|
|
1402
|
-
const startTime = Date.now();
|
|
1403
|
-
// Ensure model is loaded first (prevents cold start in parallel)
|
|
1404
|
-
if (!embeddingModelState?.loaded) {
|
|
1405
|
-
await loadEmbeddingModel();
|
|
1406
|
-
}
|
|
1407
|
-
// Process in parallel with optional concurrency limit
|
|
1408
|
-
if (concurrency >= texts.length) {
|
|
1409
|
-
// Full parallelism
|
|
1410
|
-
const embeddings = await Promise.all(texts.map(async (text, i) => {
|
|
1411
|
-
const result = await generateEmbedding(text);
|
|
1412
|
-
onProgress?.(i + 1, texts.length);
|
|
1413
|
-
return { text, ...result };
|
|
1414
|
-
}));
|
|
1415
|
-
const totalTime = Date.now() - startTime;
|
|
1416
|
-
return {
|
|
1417
|
-
results: embeddings,
|
|
1418
|
-
totalTime,
|
|
1419
|
-
avgTime: totalTime / texts.length
|
|
1420
|
-
};
|
|
1421
|
-
}
|
|
1422
|
-
// Limited concurrency using chunking
|
|
1423
|
-
const results = [];
|
|
1424
|
-
let completed = 0;
|
|
1425
|
-
for (let i = 0; i < texts.length; i += concurrency) {
|
|
1426
|
-
const chunk = texts.slice(i, i + concurrency);
|
|
1427
|
-
const chunkResults = await Promise.all(chunk.map(async (text) => {
|
|
1428
|
-
const result = await generateEmbedding(text);
|
|
1429
|
-
completed++;
|
|
1430
|
-
onProgress?.(completed, texts.length);
|
|
1431
|
-
return { text, ...result };
|
|
1432
|
-
}));
|
|
1433
|
-
results.push(...chunkResults);
|
|
1434
|
-
}
|
|
1435
|
-
const totalTime = Date.now() - startTime;
|
|
1436
|
-
return {
|
|
1437
|
-
results,
|
|
1438
|
-
totalTime,
|
|
1439
|
-
avgTime: totalTime / texts.length
|
|
1440
|
-
};
|
|
1441
|
-
}
|
|
1442
|
-
/**
|
|
1443
|
-
* Domain-aware semantic hash embeddings (384-dim)
|
|
1444
|
-
* Provides consistent embedding dimensions between CLI and MCP tools.
|
|
1445
|
-
* Uses domain cluster awareness for better semantic similarity.
|
|
1446
|
-
*/
|
|
1447
|
-
const DOMAIN_CLUSTERS = {
|
|
1448
|
-
database: ['typeorm', 'mongodb', 'database', 'entity', 'schema', 'table', 'collection',
|
|
1449
|
-
'query', 'sql', 'nosql', 'orm', 'model', 'migration', 'repository', 'column',
|
|
1450
|
-
'relation', 'foreign', 'primary', 'index', 'constraint', 'transaction'],
|
|
1451
|
-
frontend: ['react', 'component', 'ui', 'styling', 'css', 'html', 'jsx', 'tsx', 'frontend',
|
|
1452
|
-
'material', 'mui', 'tailwind', 'dom', 'render', 'hook', 'state', 'props',
|
|
1453
|
-
'redux', 'context', 'styled', 'emotion', 'theme', 'layout', 'responsive'],
|
|
1454
|
-
backend: ['fastify', 'api', 'route', 'handler', 'rest', 'endpoint', 'server', 'controller',
|
|
1455
|
-
'middleware', 'request', 'response', 'http', 'express', 'nest', 'graphql',
|
|
1456
|
-
'websocket', 'socket', 'cors', 'auth', 'jwt', 'session', 'cookie'],
|
|
1457
|
-
testing: ['test', 'testing', 'vitest', 'jest', 'mock', 'spy', 'assert', 'expect', 'describe',
|
|
1458
|
-
'it', 'spec', 'unit', 'integration', 'e2e', 'playwright', 'cypress', 'coverage',
|
|
1459
|
-
'fixture', 'stub', 'fake', 'snapshot', 'beforeeach', 'aftereach'],
|
|
1460
|
-
tenancy: ['tenant', 'tenancy', 'companyid', 'company', 'isolation', 'multi', 'multitenant',
|
|
1461
|
-
'organization', 'workspace', 'account', 'customer', 'client'],
|
|
1462
|
-
security: ['security', 'auth', 'authentication', 'authorization', 'permission', 'role',
|
|
1463
|
-
'access', 'token', 'jwt', 'oauth', 'password', 'encrypt', 'hash', 'salt',
|
|
1464
|
-
'csrf', 'xss', 'injection', 'sanitize', 'validate'],
|
|
1465
|
-
patterns: ['pattern', 'service', 'factory', 'singleton', 'decorator', 'adapter', 'facade',
|
|
1466
|
-
'observer', 'strategy', 'command', 'repository', 'usecase', 'domain', 'ddd',
|
|
1467
|
-
'clean', 'architecture', 'solid', 'dry', 'kiss'],
|
|
1468
|
-
workflow: ['workflow', 'pipeline', 'ci', 'cd', 'deploy', 'build', 'actions',
|
|
1469
|
-
'hook', 'trigger', 'job', 'step', 'artifact', 'release', 'version', 'tag'],
|
|
1470
|
-
memory: ['memory', 'cache', 'store', 'persist', 'storage', 'redis', 'session', 'state',
|
|
1471
|
-
'buffer', 'queue', 'stack', 'heap', 'gc', 'leak', 'embedding', 'vector', 'hnsw',
|
|
1472
|
-
'semantic', 'search', 'index', 'retrieval'],
|
|
1473
|
-
agent: ['agent', 'swarm', 'coordinator', 'orchestrator', 'task', 'worker', 'spawn',
|
|
1474
|
-
'parallel', 'concurrent', 'async', 'promise', 'queue', 'priority', 'schedule'],
|
|
1475
|
-
github: ['github', 'issue', 'branch', 'pr', 'pull', 'request', 'merge', 'commit', 'push',
|
|
1476
|
-
'clone', 'fork', 'remote', 'origin', 'main', 'master', 'checkout', 'rebase',
|
|
1477
|
-
'squash', 'repository', 'repo', 'gh', 'git', 'assignee', 'label'],
|
|
1478
|
-
documentation: ['guidance', 'documentation', 'docs', 'readme', 'guide', 'tutorial',
|
|
1479
|
-
'reference', 'standard', 'convention', 'rule', 'policy', 'template',
|
|
1480
|
-
'example', 'usage', 'instruction', 'markdown']
|
|
1481
|
-
};
|
|
1482
|
-
const COMMON_WORDS = new Set([
|
|
1483
|
-
'the', 'a', 'an', 'is', 'are', 'was', 'were', 'be', 'been', 'being', 'have', 'has', 'had',
|
|
1484
|
-
'do', 'does', 'did', 'will', 'would', 'could', 'should', 'may', 'might', 'must', 'shall',
|
|
1485
|
-
'can', 'need', 'to', 'of', 'in', 'for', 'on', 'with', 'at', 'by', 'from', 'as', 'into',
|
|
1486
|
-
'through', 'during', 'before', 'after', 'above', 'below', 'between', 'under', 'and', 'but',
|
|
1487
|
-
'or', 'nor', 'so', 'yet', 'both', 'either', 'neither', 'not', 'only', 'own', 'same', 'than',
|
|
1488
|
-
'too', 'very', 'just', 'also', 'this', 'that', 'these', 'those', 'it', 'its', 'if', 'then',
|
|
1489
|
-
'else', 'when', 'where', 'why', 'how', 'all', 'each', 'every', 'any', 'some', 'no', 'yes',
|
|
1490
|
-
'use', 'using', 'used', 'uses', 'get', 'set', 'new', 'see', 'like', 'make', 'made'
|
|
1491
|
-
]);
|
|
1492
|
-
function hashWord(str, seed = 0) {
|
|
1493
|
-
let h = seed ^ str.length;
|
|
1494
|
-
for (let i = 0; i < str.length; i++) {
|
|
1495
|
-
h ^= str.charCodeAt(i);
|
|
1496
|
-
h = Math.imul(h, 0x5bd1e995);
|
|
1497
|
-
h ^= h >>> 15;
|
|
1498
|
-
}
|
|
1499
|
-
return h >>> 0;
|
|
1500
|
-
}
|
|
1501
|
-
// Pre-compute domain signatures
|
|
1502
|
-
const domainSignatures = {};
|
|
1503
|
-
for (const [domain, keywords] of Object.entries(DOMAIN_CLUSTERS)) {
|
|
1504
|
-
const sig = new Float32Array(384);
|
|
1505
|
-
for (const kw of keywords) {
|
|
1506
|
-
for (let h = 0; h < 2; h++) {
|
|
1507
|
-
const idx = hashWord(kw + '_dom_' + domain, h) % 384;
|
|
1508
|
-
sig[idx] = 1;
|
|
1509
|
-
}
|
|
1510
|
-
}
|
|
1511
|
-
domainSignatures[domain] = sig;
|
|
1512
|
-
}
|
|
1513
|
-
function generateDomainAwareEmbedding(text) {
|
|
1514
|
-
const dims = 384;
|
|
1515
|
-
const vec = new Float32Array(dims);
|
|
1516
|
-
const lowerText = text.toLowerCase();
|
|
1517
|
-
const words = lowerText.replace(/[^a-z0-9\s]/g, ' ').split(/\s+/).filter(w => w.length > 1);
|
|
1518
|
-
if (words.length === 0)
|
|
1519
|
-
return Array.from(vec);
|
|
1520
|
-
// Domain cluster weights
|
|
1521
|
-
for (const [domain, keywords] of Object.entries(DOMAIN_CLUSTERS)) {
|
|
1522
|
-
let matchCount = 0;
|
|
1523
|
-
for (const kw of keywords) {
|
|
1524
|
-
if (lowerText.includes(kw))
|
|
1525
|
-
matchCount++;
|
|
1526
|
-
}
|
|
1527
|
-
if (matchCount > 0) {
|
|
1528
|
-
const weight = Math.min(2.0, 0.5 + matchCount * 0.3);
|
|
1529
|
-
const sig = domainSignatures[domain];
|
|
1530
|
-
for (let i = 0; i < dims; i++)
|
|
1531
|
-
vec[i] += sig[i] * weight;
|
|
1532
|
-
}
|
|
1533
|
-
}
|
|
1534
|
-
// Word hashes
|
|
1535
|
-
for (const word of words) {
|
|
1536
|
-
const isCommon = COMMON_WORDS.has(word);
|
|
1537
|
-
const weight = isCommon ? 0.2 : (word.length > 6 ? 0.8 : 0.5);
|
|
1538
|
-
for (let h = 0; h < 3; h++) {
|
|
1539
|
-
const idx = hashWord(word, h * 17) % dims;
|
|
1540
|
-
const sign = (hashWord(word, h * 31 + 1) % 2 === 0) ? 1 : -1;
|
|
1541
|
-
vec[idx] += sign * weight;
|
|
1542
|
-
}
|
|
1543
|
-
}
|
|
1544
|
-
// Bigrams
|
|
1545
|
-
for (let i = 0; i < words.length - 1; i++) {
|
|
1546
|
-
if (COMMON_WORDS.has(words[i]) && COMMON_WORDS.has(words[i + 1]))
|
|
1547
|
-
continue;
|
|
1548
|
-
const bigram = words[i] + '_' + words[i + 1];
|
|
1549
|
-
const idx = hashWord(bigram, 42) % dims;
|
|
1550
|
-
const sign = (hashWord(bigram, 43) % 2 === 0) ? 1 : -1;
|
|
1551
|
-
vec[idx] += sign * 0.4;
|
|
1552
|
-
}
|
|
1553
|
-
// Trigrams
|
|
1554
|
-
for (let i = 0; i < words.length - 2; i++) {
|
|
1555
|
-
const trigram = words[i] + '_' + words[i + 1] + '_' + words[i + 2];
|
|
1556
|
-
const idx = hashWord(trigram, 99) % dims;
|
|
1557
|
-
const sign = (hashWord(trigram, 100) % 2 === 0) ? 1 : -1;
|
|
1558
|
-
vec[idx] += sign * 0.3;
|
|
1559
|
-
}
|
|
1560
|
-
// Normalize
|
|
1561
|
-
let norm = 0;
|
|
1562
|
-
for (let i = 0; i < dims; i++)
|
|
1563
|
-
norm += vec[i] * vec[i];
|
|
1564
|
-
norm = Math.sqrt(norm);
|
|
1565
|
-
if (norm > 0)
|
|
1566
|
-
for (let i = 0; i < dims; i++)
|
|
1567
|
-
vec[i] /= norm;
|
|
1568
|
-
return Array.from(vec);
|
|
1569
|
-
}
|
|
1570
|
-
/**
|
|
1571
|
-
* Verify memory initialization works correctly
|
|
1572
|
-
* Tests: write, read, search, patterns
|
|
1573
|
-
*/
|
|
1574
|
-
export async function verifyMemoryInit(dbPath, options) {
|
|
1575
|
-
const { verbose = false } = options || {};
|
|
1576
|
-
const tests = [];
|
|
1577
|
-
try {
|
|
1578
|
-
const initSqlJs = (await import('sql.js')).default;
|
|
1579
|
-
const SQL = await initSqlJs();
|
|
1580
|
-
const fs = await import('fs');
|
|
1581
|
-
// Load database
|
|
1582
|
-
const fileBuffer = fs.readFileSync(dbPath);
|
|
1583
|
-
const db = new SQL.Database(fileBuffer);
|
|
1584
|
-
// Test 1: Schema verification
|
|
1585
|
-
const schemaStart = Date.now();
|
|
1586
|
-
const tables = db.exec("SELECT name FROM sqlite_master WHERE type='table'");
|
|
1587
|
-
const tableNames = tables[0]?.values?.map(v => v[0]) || [];
|
|
1588
|
-
const expectedTables = ['memory_entries', 'patterns', 'metadata', 'vector_indexes'];
|
|
1589
|
-
const missingTables = expectedTables.filter(t => !tableNames.includes(t));
|
|
1590
|
-
tests.push({
|
|
1591
|
-
name: 'Schema verification',
|
|
1592
|
-
passed: missingTables.length === 0,
|
|
1593
|
-
details: missingTables.length > 0 ? `Missing: ${missingTables.join(', ')}` : `${tableNames.length} tables found`,
|
|
1594
|
-
duration: Date.now() - schemaStart
|
|
1595
|
-
});
|
|
1596
|
-
// Test 2: Write entry
|
|
1597
|
-
const writeStart = Date.now();
|
|
1598
|
-
const testId = `test_${Date.now()}`;
|
|
1599
|
-
const testKey = 'verification_test';
|
|
1600
|
-
const testValue = 'This is a verification test entry for memory initialization';
|
|
1601
|
-
try {
|
|
1266
|
+
`;
|
|
1267
|
+
db.run(decayQuery, [now, now, now]);
|
|
1268
|
+
const changes = db.getRowsModified();
|
|
1269
|
+
// Save
|
|
1270
|
+
const data = db.export();
|
|
1271
|
+
fs.writeFileSync(path_, Buffer.from(data));
|
|
1272
|
+
db.close();
|
|
1273
|
+
return {
|
|
1274
|
+
success: true,
|
|
1275
|
+
patternsDecayed: changes
|
|
1276
|
+
};
|
|
1277
|
+
}
|
|
1278
|
+
catch (error) {
|
|
1279
|
+
return {
|
|
1280
|
+
success: false,
|
|
1281
|
+
patternsDecayed: 0,
|
|
1282
|
+
error: error instanceof Error ? error.message : String(error)
|
|
1283
|
+
};
|
|
1284
|
+
}
|
|
1285
|
+
}
|
|
1286
|
+
let embeddingModelState = null;
|
|
1287
|
+
/**
|
|
1288
|
+
* Lazy load ONNX embedding model
|
|
1289
|
+
* Only loads when first embedding is requested
|
|
1290
|
+
*/
|
|
1291
|
+
export async function loadEmbeddingModel(options) {
|
|
1292
|
+
const { verbose = false } = options || {};
|
|
1293
|
+
const startTime = Date.now();
|
|
1294
|
+
// Already loaded
|
|
1295
|
+
if (embeddingModelState?.loaded) {
|
|
1296
|
+
return {
|
|
1297
|
+
success: true,
|
|
1298
|
+
dimensions: embeddingModelState.dimensions,
|
|
1299
|
+
modelName: 'cached',
|
|
1300
|
+
loadTime: 0
|
|
1301
|
+
};
|
|
1302
|
+
}
|
|
1303
|
+
// ADR-053: Try AgentDB v3 bridge first
|
|
1304
|
+
const bridge = await getBridge();
|
|
1305
|
+
if (bridge) {
|
|
1306
|
+
const bridgeResult = await bridge.bridgeLoadEmbeddingModel();
|
|
1307
|
+
if (bridgeResult && bridgeResult.success) {
|
|
1308
|
+
// Mark local state as loaded too so subsequent calls use cache
|
|
1309
|
+
embeddingModelState = {
|
|
1310
|
+
loaded: true,
|
|
1311
|
+
model: null, // Bridge handles embedding
|
|
1312
|
+
tokenizer: null,
|
|
1313
|
+
dimensions: bridgeResult.dimensions
|
|
1314
|
+
};
|
|
1315
|
+
return bridgeResult;
|
|
1316
|
+
}
|
|
1317
|
+
}
|
|
1318
|
+
try {
|
|
1319
|
+
// Try to import @xenova/transformers for ONNX embeddings
|
|
1320
|
+
const transformers = await import('@xenova/transformers').catch(() => null);
|
|
1321
|
+
if (transformers) {
|
|
1322
|
+
if (verbose) {
|
|
1323
|
+
console.log('Loading ONNX embedding model (all-MiniLM-L6-v2)...');
|
|
1324
|
+
}
|
|
1325
|
+
// Use small, fast model for local embeddings
|
|
1326
|
+
const { pipeline } = transformers;
|
|
1327
|
+
const embedder = await pipeline('feature-extraction', 'Xenova/all-MiniLM-L6-v2');
|
|
1328
|
+
embeddingModelState = {
|
|
1329
|
+
loaded: true,
|
|
1330
|
+
model: embedder,
|
|
1331
|
+
tokenizer: null,
|
|
1332
|
+
dimensions: 384 // MiniLM-L6 produces 384-dim vectors
|
|
1333
|
+
};
|
|
1334
|
+
return {
|
|
1335
|
+
success: true,
|
|
1336
|
+
dimensions: 384,
|
|
1337
|
+
modelName: 'all-MiniLM-L6-v2',
|
|
1338
|
+
loadTime: Date.now() - startTime
|
|
1339
|
+
};
|
|
1340
|
+
}
|
|
1341
|
+
// Fallback: Check for agentic-flow ReasoningBank embeddings (v3)
|
|
1342
|
+
const reasoningBank = await import('agentic-flow/reasoningbank').catch(() => null);
|
|
1343
|
+
if (reasoningBank?.computeEmbedding) {
|
|
1344
|
+
if (verbose) {
|
|
1345
|
+
console.log('Loading agentic-flow ReasoningBank embedding model...');
|
|
1346
|
+
}
|
|
1347
|
+
embeddingModelState = {
|
|
1348
|
+
loaded: true,
|
|
1349
|
+
model: { embed: reasoningBank.computeEmbedding },
|
|
1350
|
+
tokenizer: null,
|
|
1351
|
+
dimensions: 768
|
|
1352
|
+
};
|
|
1353
|
+
return {
|
|
1354
|
+
success: true,
|
|
1355
|
+
dimensions: 768,
|
|
1356
|
+
modelName: 'agentic-flow/reasoningbank',
|
|
1357
|
+
loadTime: Date.now() - startTime
|
|
1358
|
+
};
|
|
1359
|
+
}
|
|
1360
|
+
// Legacy fallback: Check for agentic-flow core embeddings
|
|
1361
|
+
const agenticFlow = await import('agentic-flow').catch(() => null);
|
|
1362
|
+
if (agenticFlow && agenticFlow.embeddings) {
|
|
1363
|
+
if (verbose) {
|
|
1364
|
+
console.log('Loading agentic-flow embedding model...');
|
|
1365
|
+
}
|
|
1366
|
+
embeddingModelState = {
|
|
1367
|
+
loaded: true,
|
|
1368
|
+
model: agenticFlow.embeddings,
|
|
1369
|
+
tokenizer: null,
|
|
1370
|
+
dimensions: 768
|
|
1371
|
+
};
|
|
1372
|
+
return {
|
|
1373
|
+
success: true,
|
|
1374
|
+
dimensions: 768,
|
|
1375
|
+
modelName: 'agentic-flow',
|
|
1376
|
+
loadTime: Date.now() - startTime
|
|
1377
|
+
};
|
|
1378
|
+
}
|
|
1379
|
+
// No ONNX model available - use fallback
|
|
1380
|
+
embeddingModelState = {
|
|
1381
|
+
loaded: true,
|
|
1382
|
+
model: null, // Will use domain-aware hash fallback
|
|
1383
|
+
tokenizer: null,
|
|
1384
|
+
dimensions: 384 // Domain-aware hash embedding dimensions
|
|
1385
|
+
};
|
|
1386
|
+
return {
|
|
1387
|
+
success: true,
|
|
1388
|
+
dimensions: 384,
|
|
1389
|
+
modelName: 'domain-aware-hash-384',
|
|
1390
|
+
loadTime: Date.now() - startTime
|
|
1391
|
+
};
|
|
1392
|
+
}
|
|
1393
|
+
catch (error) {
|
|
1394
|
+
return {
|
|
1395
|
+
success: false,
|
|
1396
|
+
dimensions: 0,
|
|
1397
|
+
modelName: 'none',
|
|
1398
|
+
error: error instanceof Error ? error.message : String(error)
|
|
1399
|
+
};
|
|
1400
|
+
}
|
|
1401
|
+
}
|
|
1402
|
+
/**
|
|
1403
|
+
* Generate real embedding for text
|
|
1404
|
+
* Uses ONNX model if available, falls back to deterministic hash
|
|
1405
|
+
*/
|
|
1406
|
+
export async function generateEmbedding(text) {
|
|
1407
|
+
// ADR-053: Try AgentDB v3 bridge first
|
|
1408
|
+
const bridge = await getBridge();
|
|
1409
|
+
if (bridge) {
|
|
1410
|
+
const bridgeResult = await bridge.bridgeGenerateEmbedding(text);
|
|
1411
|
+
if (bridgeResult)
|
|
1412
|
+
return bridgeResult;
|
|
1413
|
+
}
|
|
1414
|
+
// Ensure model is loaded
|
|
1415
|
+
if (!embeddingModelState?.loaded) {
|
|
1416
|
+
await loadEmbeddingModel();
|
|
1417
|
+
}
|
|
1418
|
+
const state = embeddingModelState;
|
|
1419
|
+
// Use ONNX model if available
|
|
1420
|
+
if (state.model && typeof state.model === 'function') {
|
|
1421
|
+
try {
|
|
1422
|
+
const output = await state.model(text, { pooling: 'mean', normalize: true });
|
|
1423
|
+
const embedding = Array.from(output.data);
|
|
1424
|
+
return {
|
|
1425
|
+
embedding,
|
|
1426
|
+
dimensions: embedding.length,
|
|
1427
|
+
model: 'onnx'
|
|
1428
|
+
};
|
|
1429
|
+
}
|
|
1430
|
+
catch {
|
|
1431
|
+
// Fall through to fallback
|
|
1432
|
+
}
|
|
1433
|
+
}
|
|
1434
|
+
// Domain-aware hash fallback (for testing/demo without ONNX)
|
|
1435
|
+
const embedding = generateDomainAwareEmbedding(text);
|
|
1436
|
+
return {
|
|
1437
|
+
embedding,
|
|
1438
|
+
dimensions: 384,
|
|
1439
|
+
model: 'domain-aware-hash-384'
|
|
1440
|
+
};
|
|
1441
|
+
}
|
|
1442
|
+
/**
|
|
1443
|
+
* Generate embeddings for multiple texts
|
|
1444
|
+
* Uses parallel execution for API-based providers (2-4x faster)
|
|
1445
|
+
* Note: Local ONNX inference is CPU-bound, so parallelism has limited benefit
|
|
1446
|
+
*
|
|
1447
|
+
* @param texts - Array of texts to embed
|
|
1448
|
+
* @param options - Batch options
|
|
1449
|
+
* @returns Array of embedding results with timing info
|
|
1450
|
+
*/
|
|
1451
|
+
export async function generateBatchEmbeddings(texts, options) {
|
|
1452
|
+
const { concurrency = texts.length, onProgress } = options || {};
|
|
1453
|
+
const startTime = Date.now();
|
|
1454
|
+
// Ensure model is loaded first (prevents cold start in parallel)
|
|
1455
|
+
if (!embeddingModelState?.loaded) {
|
|
1456
|
+
await loadEmbeddingModel();
|
|
1457
|
+
}
|
|
1458
|
+
// Process in parallel with optional concurrency limit
|
|
1459
|
+
if (concurrency >= texts.length) {
|
|
1460
|
+
// Full parallelism
|
|
1461
|
+
const embeddings = await Promise.all(texts.map(async (text, i) => {
|
|
1462
|
+
const result = await generateEmbedding(text);
|
|
1463
|
+
onProgress?.(i + 1, texts.length);
|
|
1464
|
+
return { text, ...result };
|
|
1465
|
+
}));
|
|
1466
|
+
const totalTime = Date.now() - startTime;
|
|
1467
|
+
return {
|
|
1468
|
+
results: embeddings,
|
|
1469
|
+
totalTime,
|
|
1470
|
+
avgTime: totalTime / texts.length
|
|
1471
|
+
};
|
|
1472
|
+
}
|
|
1473
|
+
// Limited concurrency using chunking
|
|
1474
|
+
const results = [];
|
|
1475
|
+
let completed = 0;
|
|
1476
|
+
for (let i = 0; i < texts.length; i += concurrency) {
|
|
1477
|
+
const chunk = texts.slice(i, i + concurrency);
|
|
1478
|
+
const chunkResults = await Promise.all(chunk.map(async (text) => {
|
|
1479
|
+
const result = await generateEmbedding(text);
|
|
1480
|
+
completed++;
|
|
1481
|
+
onProgress?.(completed, texts.length);
|
|
1482
|
+
return { text, ...result };
|
|
1483
|
+
}));
|
|
1484
|
+
results.push(...chunkResults);
|
|
1485
|
+
}
|
|
1486
|
+
const totalTime = Date.now() - startTime;
|
|
1487
|
+
return {
|
|
1488
|
+
results,
|
|
1489
|
+
totalTime,
|
|
1490
|
+
avgTime: totalTime / texts.length
|
|
1491
|
+
};
|
|
1492
|
+
}
|
|
1493
|
+
/**
|
|
1494
|
+
* Domain-aware semantic hash embeddings (384-dim)
|
|
1495
|
+
* Provides consistent embedding dimensions between CLI and MCP tools.
|
|
1496
|
+
* Uses domain cluster awareness for better semantic similarity.
|
|
1497
|
+
*/
|
|
1498
|
+
const DOMAIN_CLUSTERS = {
|
|
1499
|
+
database: ['typeorm', 'mongodb', 'database', 'entity', 'schema', 'table', 'collection',
|
|
1500
|
+
'query', 'sql', 'nosql', 'orm', 'model', 'migration', 'repository', 'column',
|
|
1501
|
+
'relation', 'foreign', 'primary', 'index', 'constraint', 'transaction'],
|
|
1502
|
+
frontend: ['react', 'component', 'ui', 'styling', 'css', 'html', 'jsx', 'tsx', 'frontend',
|
|
1503
|
+
'material', 'mui', 'tailwind', 'dom', 'render', 'hook', 'state', 'props',
|
|
1504
|
+
'redux', 'context', 'styled', 'emotion', 'theme', 'layout', 'responsive'],
|
|
1505
|
+
backend: ['fastify', 'api', 'route', 'handler', 'rest', 'endpoint', 'server', 'controller',
|
|
1506
|
+
'middleware', 'request', 'response', 'http', 'express', 'nest', 'graphql',
|
|
1507
|
+
'websocket', 'socket', 'cors', 'auth', 'jwt', 'session', 'cookie'],
|
|
1508
|
+
testing: ['test', 'testing', 'vitest', 'jest', 'mock', 'spy', 'assert', 'expect', 'describe',
|
|
1509
|
+
'it', 'spec', 'unit', 'integration', 'e2e', 'playwright', 'cypress', 'coverage',
|
|
1510
|
+
'fixture', 'stub', 'fake', 'snapshot', 'beforeeach', 'aftereach'],
|
|
1511
|
+
tenancy: ['tenant', 'tenancy', 'companyid', 'company', 'isolation', 'multi', 'multitenant',
|
|
1512
|
+
'organization', 'workspace', 'account', 'customer', 'client'],
|
|
1513
|
+
security: ['security', 'auth', 'authentication', 'authorization', 'permission', 'role',
|
|
1514
|
+
'access', 'token', 'jwt', 'oauth', 'password', 'encrypt', 'hash', 'salt',
|
|
1515
|
+
'csrf', 'xss', 'injection', 'sanitize', 'validate'],
|
|
1516
|
+
patterns: ['pattern', 'service', 'factory', 'singleton', 'decorator', 'adapter', 'facade',
|
|
1517
|
+
'observer', 'strategy', 'command', 'repository', 'usecase', 'domain', 'ddd',
|
|
1518
|
+
'clean', 'architecture', 'solid', 'dry', 'kiss'],
|
|
1519
|
+
workflow: ['workflow', 'pipeline', 'ci', 'cd', 'deploy', 'build', 'actions',
|
|
1520
|
+
'hook', 'trigger', 'job', 'step', 'artifact', 'release', 'version', 'tag'],
|
|
1521
|
+
memory: ['memory', 'cache', 'store', 'persist', 'storage', 'redis', 'session', 'state',
|
|
1522
|
+
'buffer', 'queue', 'stack', 'heap', 'gc', 'leak', 'embedding', 'vector', 'hnsw',
|
|
1523
|
+
'semantic', 'search', 'index', 'retrieval'],
|
|
1524
|
+
agent: ['agent', 'swarm', 'coordinator', 'orchestrator', 'task', 'worker', 'spawn',
|
|
1525
|
+
'parallel', 'concurrent', 'async', 'promise', 'queue', 'priority', 'schedule'],
|
|
1526
|
+
github: ['github', 'issue', 'branch', 'pr', 'pull', 'request', 'merge', 'commit', 'push',
|
|
1527
|
+
'clone', 'fork', 'remote', 'origin', 'main', 'master', 'checkout', 'rebase',
|
|
1528
|
+
'squash', 'repository', 'repo', 'gh', 'git', 'assignee', 'label'],
|
|
1529
|
+
documentation: ['guidance', 'documentation', 'docs', 'readme', 'guide', 'tutorial',
|
|
1530
|
+
'reference', 'standard', 'convention', 'rule', 'policy', 'template',
|
|
1531
|
+
'example', 'usage', 'instruction', 'markdown']
|
|
1532
|
+
};
|
|
1533
|
+
const COMMON_WORDS = new Set([
|
|
1534
|
+
'the', 'a', 'an', 'is', 'are', 'was', 'were', 'be', 'been', 'being', 'have', 'has', 'had',
|
|
1535
|
+
'do', 'does', 'did', 'will', 'would', 'could', 'should', 'may', 'might', 'must', 'shall',
|
|
1536
|
+
'can', 'need', 'to', 'of', 'in', 'for', 'on', 'with', 'at', 'by', 'from', 'as', 'into',
|
|
1537
|
+
'through', 'during', 'before', 'after', 'above', 'below', 'between', 'under', 'and', 'but',
|
|
1538
|
+
'or', 'nor', 'so', 'yet', 'both', 'either', 'neither', 'not', 'only', 'own', 'same', 'than',
|
|
1539
|
+
'too', 'very', 'just', 'also', 'this', 'that', 'these', 'those', 'it', 'its', 'if', 'then',
|
|
1540
|
+
'else', 'when', 'where', 'why', 'how', 'all', 'each', 'every', 'any', 'some', 'no', 'yes',
|
|
1541
|
+
'use', 'using', 'used', 'uses', 'get', 'set', 'new', 'see', 'like', 'make', 'made'
|
|
1542
|
+
]);
|
|
1543
|
+
function hashWord(str, seed = 0) {
|
|
1544
|
+
let h = seed ^ str.length;
|
|
1545
|
+
for (let i = 0; i < str.length; i++) {
|
|
1546
|
+
h ^= str.charCodeAt(i);
|
|
1547
|
+
h = Math.imul(h, 0x5bd1e995);
|
|
1548
|
+
h ^= h >>> 15;
|
|
1549
|
+
}
|
|
1550
|
+
return h >>> 0;
|
|
1551
|
+
}
|
|
1552
|
+
// Pre-compute domain signatures
|
|
1553
|
+
const domainSignatures = {};
|
|
1554
|
+
for (const [domain, keywords] of Object.entries(DOMAIN_CLUSTERS)) {
|
|
1555
|
+
const sig = new Float32Array(384);
|
|
1556
|
+
for (const kw of keywords) {
|
|
1557
|
+
for (let h = 0; h < 2; h++) {
|
|
1558
|
+
const idx = hashWord(kw + '_dom_' + domain, h) % 384;
|
|
1559
|
+
sig[idx] = 1;
|
|
1560
|
+
}
|
|
1561
|
+
}
|
|
1562
|
+
domainSignatures[domain] = sig;
|
|
1563
|
+
}
|
|
1564
|
+
function generateDomainAwareEmbedding(text) {
|
|
1565
|
+
const dims = 384;
|
|
1566
|
+
const vec = new Float32Array(dims);
|
|
1567
|
+
const lowerText = text.toLowerCase();
|
|
1568
|
+
const words = lowerText.replace(/[^a-z0-9\s]/g, ' ').split(/\s+/).filter(w => w.length > 1);
|
|
1569
|
+
if (words.length === 0)
|
|
1570
|
+
return Array.from(vec);
|
|
1571
|
+
// Domain cluster weights
|
|
1572
|
+
for (const [domain, keywords] of Object.entries(DOMAIN_CLUSTERS)) {
|
|
1573
|
+
let matchCount = 0;
|
|
1574
|
+
for (const kw of keywords) {
|
|
1575
|
+
if (lowerText.includes(kw))
|
|
1576
|
+
matchCount++;
|
|
1577
|
+
}
|
|
1578
|
+
if (matchCount > 0) {
|
|
1579
|
+
const weight = Math.min(2.0, 0.5 + matchCount * 0.3);
|
|
1580
|
+
const sig = domainSignatures[domain];
|
|
1581
|
+
for (let i = 0; i < dims; i++)
|
|
1582
|
+
vec[i] += sig[i] * weight;
|
|
1583
|
+
}
|
|
1584
|
+
}
|
|
1585
|
+
// Word hashes
|
|
1586
|
+
for (const word of words) {
|
|
1587
|
+
const isCommon = COMMON_WORDS.has(word);
|
|
1588
|
+
const weight = isCommon ? 0.2 : (word.length > 6 ? 0.8 : 0.5);
|
|
1589
|
+
for (let h = 0; h < 3; h++) {
|
|
1590
|
+
const idx = hashWord(word, h * 17) % dims;
|
|
1591
|
+
const sign = (hashWord(word, h * 31 + 1) % 2 === 0) ? 1 : -1;
|
|
1592
|
+
vec[idx] += sign * weight;
|
|
1593
|
+
}
|
|
1594
|
+
}
|
|
1595
|
+
// Bigrams
|
|
1596
|
+
for (let i = 0; i < words.length - 1; i++) {
|
|
1597
|
+
if (COMMON_WORDS.has(words[i]) && COMMON_WORDS.has(words[i + 1]))
|
|
1598
|
+
continue;
|
|
1599
|
+
const bigram = words[i] + '_' + words[i + 1];
|
|
1600
|
+
const idx = hashWord(bigram, 42) % dims;
|
|
1601
|
+
const sign = (hashWord(bigram, 43) % 2 === 0) ? 1 : -1;
|
|
1602
|
+
vec[idx] += sign * 0.4;
|
|
1603
|
+
}
|
|
1604
|
+
// Trigrams
|
|
1605
|
+
for (let i = 0; i < words.length - 2; i++) {
|
|
1606
|
+
const trigram = words[i] + '_' + words[i + 1] + '_' + words[i + 2];
|
|
1607
|
+
const idx = hashWord(trigram, 99) % dims;
|
|
1608
|
+
const sign = (hashWord(trigram, 100) % 2 === 0) ? 1 : -1;
|
|
1609
|
+
vec[idx] += sign * 0.3;
|
|
1610
|
+
}
|
|
1611
|
+
// Normalize
|
|
1612
|
+
let norm = 0;
|
|
1613
|
+
for (let i = 0; i < dims; i++)
|
|
1614
|
+
norm += vec[i] * vec[i];
|
|
1615
|
+
norm = Math.sqrt(norm);
|
|
1616
|
+
if (norm > 0)
|
|
1617
|
+
for (let i = 0; i < dims; i++)
|
|
1618
|
+
vec[i] /= norm;
|
|
1619
|
+
return Array.from(vec);
|
|
1620
|
+
}
|
|
1621
|
+
/**
|
|
1622
|
+
* Verify memory initialization works correctly
|
|
1623
|
+
* Tests: write, read, search, patterns
|
|
1624
|
+
*/
|
|
1625
|
+
export async function verifyMemoryInit(dbPath, options) {
|
|
1626
|
+
const { verbose = false } = options || {};
|
|
1627
|
+
const tests = [];
|
|
1628
|
+
try {
|
|
1629
|
+
const initSqlJs = (await import('sql.js')).default;
|
|
1630
|
+
const SQL = await initSqlJs();
|
|
1631
|
+
const fs = await import('fs');
|
|
1632
|
+
// Load database
|
|
1633
|
+
const fileBuffer = fs.readFileSync(dbPath);
|
|
1634
|
+
const db = new SQL.Database(fileBuffer);
|
|
1635
|
+
// Test 1: Schema verification
|
|
1636
|
+
const schemaStart = Date.now();
|
|
1637
|
+
const tables = db.exec("SELECT name FROM sqlite_master WHERE type='table'");
|
|
1638
|
+
const tableNames = tables[0]?.values?.map(v => v[0]) || [];
|
|
1639
|
+
const expectedTables = ['memory_entries', 'patterns', 'metadata', 'vector_indexes'];
|
|
1640
|
+
const missingTables = expectedTables.filter(t => !tableNames.includes(t));
|
|
1641
|
+
tests.push({
|
|
1642
|
+
name: 'Schema verification',
|
|
1643
|
+
passed: missingTables.length === 0,
|
|
1644
|
+
details: missingTables.length > 0 ? `Missing: ${missingTables.join(', ')}` : `${tableNames.length} tables found`,
|
|
1645
|
+
duration: Date.now() - schemaStart
|
|
1646
|
+
});
|
|
1647
|
+
// Test 2: Write entry
|
|
1648
|
+
const writeStart = Date.now();
|
|
1649
|
+
const testId = `test_${Date.now()}`;
|
|
1650
|
+
const testKey = 'verification_test';
|
|
1651
|
+
const testValue = 'This is a verification test entry for memory initialization';
|
|
1652
|
+
try {
|
|
1602
1653
|
db.run(`
|
|
1603
1654
|
INSERT INTO memory_entries (id, key, namespace, content, type, created_at, updated_at)
|
|
1604
1655
|
VALUES (?, ?, 'test', ?, 'semantic', ?, ?)
|
|
1605
|
-
`, [testId, testKey, testValue, Date.now(), Date.now()]);
|
|
1606
|
-
tests.push({
|
|
1607
|
-
name: 'Write entry',
|
|
1608
|
-
passed: true,
|
|
1609
|
-
details: 'Entry written successfully',
|
|
1610
|
-
duration: Date.now() - writeStart
|
|
1611
|
-
});
|
|
1612
|
-
}
|
|
1613
|
-
catch (e) {
|
|
1614
|
-
tests.push({
|
|
1615
|
-
name: 'Write entry',
|
|
1616
|
-
passed: false,
|
|
1617
|
-
details: e instanceof Error ? e.message : 'Write failed',
|
|
1618
|
-
duration: Date.now() - writeStart
|
|
1619
|
-
});
|
|
1620
|
-
}
|
|
1621
|
-
// Test 3: Read entry
|
|
1622
|
-
const readStart = Date.now();
|
|
1623
|
-
try {
|
|
1624
|
-
const result = db.exec(`SELECT content FROM memory_entries WHERE id = ?`, [testId]);
|
|
1625
|
-
const content = result[0]?.values[0]?.[0];
|
|
1626
|
-
tests.push({
|
|
1627
|
-
name: 'Read entry',
|
|
1628
|
-
passed: content === testValue,
|
|
1629
|
-
details: content === testValue ? 'Content matches' : 'Content mismatch',
|
|
1630
|
-
duration: Date.now() - readStart
|
|
1631
|
-
});
|
|
1632
|
-
}
|
|
1633
|
-
catch (e) {
|
|
1634
|
-
tests.push({
|
|
1635
|
-
name: 'Read entry',
|
|
1636
|
-
passed: false,
|
|
1637
|
-
details: e instanceof Error ? e.message : 'Read failed',
|
|
1638
|
-
duration: Date.now() - readStart
|
|
1639
|
-
});
|
|
1640
|
-
}
|
|
1641
|
-
// Test 4: Write with embedding
|
|
1642
|
-
const embeddingStart = Date.now();
|
|
1643
|
-
try {
|
|
1644
|
-
const { embedding, dimensions, model } = await generateEmbedding(testValue);
|
|
1645
|
-
const embeddingJson = JSON.stringify(embedding);
|
|
1656
|
+
`, [testId, testKey, testValue, Date.now(), Date.now()]);
|
|
1657
|
+
tests.push({
|
|
1658
|
+
name: 'Write entry',
|
|
1659
|
+
passed: true,
|
|
1660
|
+
details: 'Entry written successfully',
|
|
1661
|
+
duration: Date.now() - writeStart
|
|
1662
|
+
});
|
|
1663
|
+
}
|
|
1664
|
+
catch (e) {
|
|
1665
|
+
tests.push({
|
|
1666
|
+
name: 'Write entry',
|
|
1667
|
+
passed: false,
|
|
1668
|
+
details: e instanceof Error ? e.message : 'Write failed',
|
|
1669
|
+
duration: Date.now() - writeStart
|
|
1670
|
+
});
|
|
1671
|
+
}
|
|
1672
|
+
// Test 3: Read entry
|
|
1673
|
+
const readStart = Date.now();
|
|
1674
|
+
try {
|
|
1675
|
+
const result = db.exec(`SELECT content FROM memory_entries WHERE id = ?`, [testId]);
|
|
1676
|
+
const content = result[0]?.values[0]?.[0];
|
|
1677
|
+
tests.push({
|
|
1678
|
+
name: 'Read entry',
|
|
1679
|
+
passed: content === testValue,
|
|
1680
|
+
details: content === testValue ? 'Content matches' : 'Content mismatch',
|
|
1681
|
+
duration: Date.now() - readStart
|
|
1682
|
+
});
|
|
1683
|
+
}
|
|
1684
|
+
catch (e) {
|
|
1685
|
+
tests.push({
|
|
1686
|
+
name: 'Read entry',
|
|
1687
|
+
passed: false,
|
|
1688
|
+
details: e instanceof Error ? e.message : 'Read failed',
|
|
1689
|
+
duration: Date.now() - readStart
|
|
1690
|
+
});
|
|
1691
|
+
}
|
|
1692
|
+
// Test 4: Write with embedding
|
|
1693
|
+
const embeddingStart = Date.now();
|
|
1694
|
+
try {
|
|
1695
|
+
const { embedding, dimensions, model } = await generateEmbedding(testValue);
|
|
1696
|
+
const embeddingJson = JSON.stringify(embedding);
|
|
1646
1697
|
db.run(`
|
|
1647
1698
|
UPDATE memory_entries
|
|
1648
1699
|
SET embedding = ?, embedding_dimensions = ?, embedding_model = ?
|
|
1649
1700
|
WHERE id = ?
|
|
1650
|
-
`, [embeddingJson, dimensions, model, testId]);
|
|
1651
|
-
tests.push({
|
|
1652
|
-
name: 'Generate embedding',
|
|
1653
|
-
passed: true,
|
|
1654
|
-
details: `${dimensions}-dim vector (${model})`,
|
|
1655
|
-
duration: Date.now() - embeddingStart
|
|
1656
|
-
});
|
|
1657
|
-
}
|
|
1658
|
-
catch (e) {
|
|
1659
|
-
tests.push({
|
|
1660
|
-
name: 'Generate embedding',
|
|
1661
|
-
passed: false,
|
|
1662
|
-
details: e instanceof Error ? e.message : 'Embedding failed',
|
|
1663
|
-
duration: Date.now() - embeddingStart
|
|
1664
|
-
});
|
|
1665
|
-
}
|
|
1666
|
-
// Test 5: Pattern storage
|
|
1667
|
-
const patternStart = Date.now();
|
|
1668
|
-
try {
|
|
1669
|
-
const patternId = `pattern_${Date.now()}`;
|
|
1701
|
+
`, [embeddingJson, dimensions, model, testId]);
|
|
1702
|
+
tests.push({
|
|
1703
|
+
name: 'Generate embedding',
|
|
1704
|
+
passed: true,
|
|
1705
|
+
details: `${dimensions}-dim vector (${model})`,
|
|
1706
|
+
duration: Date.now() - embeddingStart
|
|
1707
|
+
});
|
|
1708
|
+
}
|
|
1709
|
+
catch (e) {
|
|
1710
|
+
tests.push({
|
|
1711
|
+
name: 'Generate embedding',
|
|
1712
|
+
passed: false,
|
|
1713
|
+
details: e instanceof Error ? e.message : 'Embedding failed',
|
|
1714
|
+
duration: Date.now() - embeddingStart
|
|
1715
|
+
});
|
|
1716
|
+
}
|
|
1717
|
+
// Test 5: Pattern storage
|
|
1718
|
+
const patternStart = Date.now();
|
|
1719
|
+
try {
|
|
1720
|
+
const patternId = `pattern_${Date.now()}`;
|
|
1670
1721
|
db.run(`
|
|
1671
1722
|
INSERT INTO patterns (id, name, pattern_type, condition, action, confidence, created_at, updated_at)
|
|
1672
1723
|
VALUES (?, 'test-pattern', 'task-routing', 'test condition', 'test action', 0.5, ?, ?)
|
|
1673
|
-
`, [patternId, Date.now(), Date.now()]);
|
|
1674
|
-
tests.push({
|
|
1675
|
-
name: 'Pattern storage',
|
|
1676
|
-
passed: true,
|
|
1677
|
-
details: 'Pattern stored with confidence scoring',
|
|
1678
|
-
duration: Date.now() - patternStart
|
|
1679
|
-
});
|
|
1680
|
-
// Cleanup test pattern
|
|
1681
|
-
db.run(`DELETE FROM patterns WHERE id = ?`, [patternId]);
|
|
1682
|
-
}
|
|
1683
|
-
catch (e) {
|
|
1684
|
-
tests.push({
|
|
1685
|
-
name: 'Pattern storage',
|
|
1686
|
-
passed: false,
|
|
1687
|
-
details: e instanceof Error ? e.message : 'Pattern storage failed',
|
|
1688
|
-
duration: Date.now() - patternStart
|
|
1689
|
-
});
|
|
1690
|
-
}
|
|
1691
|
-
// Test 6: Vector index configuration
|
|
1692
|
-
const indexStart = Date.now();
|
|
1693
|
-
try {
|
|
1694
|
-
const indexResult = db.exec(`SELECT name, dimensions, hnsw_m, hnsw_ef_construction FROM vector_indexes`);
|
|
1695
|
-
const indexes = indexResult[0]?.values || [];
|
|
1696
|
-
tests.push({
|
|
1697
|
-
name: 'Vector index config',
|
|
1698
|
-
passed: indexes.length > 0,
|
|
1699
|
-
details: `${indexes.length} indexes configured (HNSW M=16, ef=200)`,
|
|
1700
|
-
duration: Date.now() - indexStart
|
|
1701
|
-
});
|
|
1702
|
-
}
|
|
1703
|
-
catch (e) {
|
|
1704
|
-
tests.push({
|
|
1705
|
-
name: 'Vector index config',
|
|
1706
|
-
passed: false,
|
|
1707
|
-
details: e instanceof Error ? e.message : 'Index check failed',
|
|
1708
|
-
duration: Date.now() - indexStart
|
|
1709
|
-
});
|
|
1710
|
-
}
|
|
1711
|
-
// Cleanup test entry
|
|
1712
|
-
db.run(`DELETE FROM memory_entries WHERE id = ?`, [testId]);
|
|
1713
|
-
// Save changes
|
|
1714
|
-
const data = db.export();
|
|
1715
|
-
fs.writeFileSync(dbPath, Buffer.from(data));
|
|
1716
|
-
db.close();
|
|
1717
|
-
const passed = tests.filter(t => t.passed).length;
|
|
1718
|
-
const failed = tests.filter(t => !t.passed).length;
|
|
1719
|
-
return {
|
|
1720
|
-
success: failed === 0,
|
|
1721
|
-
tests,
|
|
1722
|
-
summary: {
|
|
1723
|
-
passed,
|
|
1724
|
-
failed,
|
|
1725
|
-
total: tests.length
|
|
1726
|
-
}
|
|
1727
|
-
};
|
|
1728
|
-
}
|
|
1729
|
-
catch (error) {
|
|
1730
|
-
return {
|
|
1731
|
-
success: false,
|
|
1732
|
-
tests: [{
|
|
1733
|
-
name: 'Database access',
|
|
1734
|
-
passed: false,
|
|
1735
|
-
details: error instanceof Error ? error.message : 'Unknown error'
|
|
1736
|
-
}],
|
|
1737
|
-
summary: { passed: 0, failed: 1, total: 1 }
|
|
1738
|
-
};
|
|
1739
|
-
}
|
|
1740
|
-
}
|
|
1741
|
-
/**
|
|
1742
|
-
* Store an entry directly using sql.js
|
|
1743
|
-
* This bypasses MCP and writes directly to the database
|
|
1744
|
-
*/
|
|
1745
|
-
export async function storeEntry(options) {
|
|
1746
|
-
// ADR-053: Try AgentDB v3 bridge first
|
|
1747
|
-
const bridge = await getBridge();
|
|
1748
|
-
if (bridge) {
|
|
1749
|
-
const bridgeResult = await bridge.bridgeStoreEntry(options);
|
|
1750
|
-
if (bridgeResult)
|
|
1751
|
-
return bridgeResult;
|
|
1752
|
-
}
|
|
1753
|
-
// Fallback: raw sql.js
|
|
1754
|
-
const { key, value, namespace = 'default', generateEmbeddingFlag = true, tags = [], ttl, dbPath: customPath, upsert = false } = options;
|
|
1755
|
-
const swarmDir = path.join(
|
|
1756
|
-
const dbPath = customPath || path.join(swarmDir, 'memory.db');
|
|
1757
|
-
try {
|
|
1758
|
-
if (!fs.existsSync(dbPath)) {
|
|
1759
|
-
return { success: false, id: '', error: 'Database not initialized. Run: claude-flow memory init' };
|
|
1760
|
-
}
|
|
1761
|
-
// Ensure schema has all required columns (migration for older DBs)
|
|
1762
|
-
await ensureSchemaColumns(dbPath);
|
|
1763
|
-
const initSqlJs = (await import('sql.js')).default;
|
|
1764
|
-
const SQL = await initSqlJs();
|
|
1765
|
-
const fileBuffer = fs.readFileSync(dbPath);
|
|
1766
|
-
const db = new SQL.Database(fileBuffer);
|
|
1767
|
-
const id = `entry_${Date.now()}_${Math.random().toString(36).substring(7)}`;
|
|
1768
|
-
const now = Date.now();
|
|
1769
|
-
// Generate embedding if requested
|
|
1770
|
-
let embeddingJson = null;
|
|
1771
|
-
let embeddingDimensions = null;
|
|
1772
|
-
let embeddingModel = null;
|
|
1773
|
-
if (generateEmbeddingFlag && value.length > 0) {
|
|
1774
|
-
const embResult = await generateEmbedding(value);
|
|
1775
|
-
embeddingJson = JSON.stringify(embResult.embedding);
|
|
1776
|
-
embeddingDimensions = embResult.dimensions;
|
|
1777
|
-
embeddingModel = embResult.model;
|
|
1778
|
-
}
|
|
1779
|
-
// Insert or update entry (upsert mode uses REPLACE)
|
|
1780
|
-
const insertSql = upsert
|
|
1724
|
+
`, [patternId, Date.now(), Date.now()]);
|
|
1725
|
+
tests.push({
|
|
1726
|
+
name: 'Pattern storage',
|
|
1727
|
+
passed: true,
|
|
1728
|
+
details: 'Pattern stored with confidence scoring',
|
|
1729
|
+
duration: Date.now() - patternStart
|
|
1730
|
+
});
|
|
1731
|
+
// Cleanup test pattern
|
|
1732
|
+
db.run(`DELETE FROM patterns WHERE id = ?`, [patternId]);
|
|
1733
|
+
}
|
|
1734
|
+
catch (e) {
|
|
1735
|
+
tests.push({
|
|
1736
|
+
name: 'Pattern storage',
|
|
1737
|
+
passed: false,
|
|
1738
|
+
details: e instanceof Error ? e.message : 'Pattern storage failed',
|
|
1739
|
+
duration: Date.now() - patternStart
|
|
1740
|
+
});
|
|
1741
|
+
}
|
|
1742
|
+
// Test 6: Vector index configuration
|
|
1743
|
+
const indexStart = Date.now();
|
|
1744
|
+
try {
|
|
1745
|
+
const indexResult = db.exec(`SELECT name, dimensions, hnsw_m, hnsw_ef_construction FROM vector_indexes`);
|
|
1746
|
+
const indexes = indexResult[0]?.values || [];
|
|
1747
|
+
tests.push({
|
|
1748
|
+
name: 'Vector index config',
|
|
1749
|
+
passed: indexes.length > 0,
|
|
1750
|
+
details: `${indexes.length} indexes configured (HNSW M=16, ef=200)`,
|
|
1751
|
+
duration: Date.now() - indexStart
|
|
1752
|
+
});
|
|
1753
|
+
}
|
|
1754
|
+
catch (e) {
|
|
1755
|
+
tests.push({
|
|
1756
|
+
name: 'Vector index config',
|
|
1757
|
+
passed: false,
|
|
1758
|
+
details: e instanceof Error ? e.message : 'Index check failed',
|
|
1759
|
+
duration: Date.now() - indexStart
|
|
1760
|
+
});
|
|
1761
|
+
}
|
|
1762
|
+
// Cleanup test entry
|
|
1763
|
+
db.run(`DELETE FROM memory_entries WHERE id = ?`, [testId]);
|
|
1764
|
+
// Save changes
|
|
1765
|
+
const data = db.export();
|
|
1766
|
+
fs.writeFileSync(dbPath, Buffer.from(data));
|
|
1767
|
+
db.close();
|
|
1768
|
+
const passed = tests.filter(t => t.passed).length;
|
|
1769
|
+
const failed = tests.filter(t => !t.passed).length;
|
|
1770
|
+
return {
|
|
1771
|
+
success: failed === 0,
|
|
1772
|
+
tests,
|
|
1773
|
+
summary: {
|
|
1774
|
+
passed,
|
|
1775
|
+
failed,
|
|
1776
|
+
total: tests.length
|
|
1777
|
+
}
|
|
1778
|
+
};
|
|
1779
|
+
}
|
|
1780
|
+
catch (error) {
|
|
1781
|
+
return {
|
|
1782
|
+
success: false,
|
|
1783
|
+
tests: [{
|
|
1784
|
+
name: 'Database access',
|
|
1785
|
+
passed: false,
|
|
1786
|
+
details: error instanceof Error ? error.message : 'Unknown error'
|
|
1787
|
+
}],
|
|
1788
|
+
summary: { passed: 0, failed: 1, total: 1 }
|
|
1789
|
+
};
|
|
1790
|
+
}
|
|
1791
|
+
}
|
|
1792
|
+
/**
|
|
1793
|
+
* Store an entry directly using sql.js
|
|
1794
|
+
* This bypasses MCP and writes directly to the database
|
|
1795
|
+
*/
|
|
1796
|
+
export async function storeEntry(options) {
|
|
1797
|
+
// ADR-053: Try AgentDB v3 bridge first
|
|
1798
|
+
const bridge = await getBridge();
|
|
1799
|
+
if (bridge) {
|
|
1800
|
+
const bridgeResult = await bridge.bridgeStoreEntry(options);
|
|
1801
|
+
if (bridgeResult)
|
|
1802
|
+
return bridgeResult;
|
|
1803
|
+
}
|
|
1804
|
+
// Fallback: raw sql.js
|
|
1805
|
+
const { key, value, namespace = 'default', generateEmbeddingFlag = true, tags = [], ttl, dbPath: customPath, upsert = false } = options;
|
|
1806
|
+
const swarmDir = path.join(getProjectRoot(), '.swarm');
|
|
1807
|
+
const dbPath = customPath || path.join(swarmDir, 'memory.db');
|
|
1808
|
+
try {
|
|
1809
|
+
if (!fs.existsSync(dbPath)) {
|
|
1810
|
+
return { success: false, id: '', error: 'Database not initialized. Run: claude-flow memory init' };
|
|
1811
|
+
}
|
|
1812
|
+
// Ensure schema has all required columns (migration for older DBs)
|
|
1813
|
+
await ensureSchemaColumns(dbPath);
|
|
1814
|
+
const initSqlJs = (await import('sql.js')).default;
|
|
1815
|
+
const SQL = await initSqlJs();
|
|
1816
|
+
const fileBuffer = fs.readFileSync(dbPath);
|
|
1817
|
+
const db = new SQL.Database(fileBuffer);
|
|
1818
|
+
const id = `entry_${Date.now()}_${Math.random().toString(36).substring(7)}`;
|
|
1819
|
+
const now = Date.now();
|
|
1820
|
+
// Generate embedding if requested
|
|
1821
|
+
let embeddingJson = null;
|
|
1822
|
+
let embeddingDimensions = null;
|
|
1823
|
+
let embeddingModel = null;
|
|
1824
|
+
if (generateEmbeddingFlag && value.length > 0) {
|
|
1825
|
+
const embResult = await generateEmbedding(value);
|
|
1826
|
+
embeddingJson = JSON.stringify(embResult.embedding);
|
|
1827
|
+
embeddingDimensions = embResult.dimensions;
|
|
1828
|
+
embeddingModel = embResult.model;
|
|
1829
|
+
}
|
|
1830
|
+
// Insert or update entry (upsert mode uses REPLACE)
|
|
1831
|
+
const insertSql = upsert
|
|
1781
1832
|
? `INSERT OR REPLACE INTO memory_entries (
|
|
1782
1833
|
id, key, namespace, content, type,
|
|
1783
1834
|
embedding, embedding_dimensions, embedding_model,
|
|
1784
1835
|
tags, metadata, created_at, updated_at, expires_at, status
|
|
1785
|
-
) VALUES (?, ?, ?, ?, 'semantic', ?, ?, ?, ?, ?, ?, ?, ?, 'active')`
|
|
1836
|
+
) VALUES (?, ?, ?, ?, 'semantic', ?, ?, ?, ?, ?, ?, ?, ?, 'active')`
|
|
1786
1837
|
: `INSERT INTO memory_entries (
|
|
1787
1838
|
id, key, namespace, content, type,
|
|
1788
1839
|
embedding, embedding_dimensions, embedding_model,
|
|
1789
1840
|
tags, metadata, created_at, updated_at, expires_at, status
|
|
1790
|
-
) VALUES (?, ?, ?, ?, 'semantic', ?, ?, ?, ?, ?, ?, ?, ?, 'active')`;
|
|
1791
|
-
db.run(insertSql, [
|
|
1792
|
-
id,
|
|
1793
|
-
key,
|
|
1794
|
-
namespace,
|
|
1795
|
-
value,
|
|
1796
|
-
embeddingJson,
|
|
1797
|
-
embeddingDimensions,
|
|
1798
|
-
embeddingModel,
|
|
1799
|
-
tags.length > 0 ? JSON.stringify(tags) : null,
|
|
1800
|
-
'{}',
|
|
1801
|
-
now,
|
|
1802
|
-
now,
|
|
1803
|
-
ttl ? now + (ttl * 1000) : null
|
|
1804
|
-
]);
|
|
1805
|
-
// Save
|
|
1806
|
-
const data = db.export();
|
|
1807
|
-
fs.writeFileSync(dbPath, Buffer.from(data));
|
|
1808
|
-
db.close();
|
|
1809
|
-
// Add to HNSW index for faster future searches
|
|
1810
|
-
if (embeddingJson) {
|
|
1811
|
-
const embResult = JSON.parse(embeddingJson);
|
|
1812
|
-
await addToHNSWIndex(id, embResult, {
|
|
1813
|
-
id,
|
|
1814
|
-
key,
|
|
1815
|
-
namespace,
|
|
1816
|
-
content: value
|
|
1817
|
-
});
|
|
1818
|
-
}
|
|
1819
|
-
return {
|
|
1820
|
-
success: true,
|
|
1821
|
-
id,
|
|
1822
|
-
embedding: embeddingJson ? { dimensions: embeddingDimensions, model: embeddingModel } : undefined
|
|
1823
|
-
};
|
|
1824
|
-
}
|
|
1825
|
-
catch (error) {
|
|
1826
|
-
return {
|
|
1827
|
-
success: false,
|
|
1828
|
-
id: '',
|
|
1829
|
-
error: error instanceof Error ? error.message : String(error)
|
|
1830
|
-
};
|
|
1831
|
-
}
|
|
1832
|
-
}
|
|
1833
|
-
/**
|
|
1834
|
-
* Search entries using sql.js with vector similarity
|
|
1835
|
-
* Uses HNSW index for 150x faster search when available
|
|
1836
|
-
*/
|
|
1837
|
-
export async function searchEntries(options) {
|
|
1838
|
-
// ADR-053: Try AgentDB v3 bridge first
|
|
1839
|
-
const bridge = await getBridge();
|
|
1840
|
-
if (bridge) {
|
|
1841
|
-
const bridgeResult = await bridge.bridgeSearchEntries(options);
|
|
1842
|
-
if (bridgeResult)
|
|
1843
|
-
return bridgeResult;
|
|
1844
|
-
}
|
|
1845
|
-
// Fallback: raw sql.js
|
|
1846
|
-
const { query, namespace = 'default', limit = 10, threshold = 0.3, dbPath: customPath } = options;
|
|
1847
|
-
const swarmDir = path.join(
|
|
1848
|
-
const dbPath = customPath || path.join(swarmDir, 'memory.db');
|
|
1849
|
-
const startTime = Date.now();
|
|
1850
|
-
try {
|
|
1851
|
-
if (!fs.existsSync(dbPath)) {
|
|
1852
|
-
return { success: false, results: [], searchTime: 0, error: 'Database not found' };
|
|
1853
|
-
}
|
|
1854
|
-
// Ensure schema has all required columns (migration for older DBs)
|
|
1855
|
-
await ensureSchemaColumns(dbPath);
|
|
1856
|
-
// Generate query embedding
|
|
1857
|
-
const queryEmb = await generateEmbedding(query);
|
|
1858
|
-
const queryEmbedding = queryEmb.embedding;
|
|
1859
|
-
// Try HNSW search first (150x faster)
|
|
1860
|
-
const hnswResults = await searchHNSWIndex(queryEmbedding, { k: limit, namespace });
|
|
1861
|
-
if (hnswResults && hnswResults.length > 0) {
|
|
1862
|
-
// Filter by threshold
|
|
1863
|
-
const filtered = hnswResults.filter(r => r.score >= threshold);
|
|
1864
|
-
return {
|
|
1865
|
-
success: true,
|
|
1866
|
-
results: filtered,
|
|
1867
|
-
searchTime: Date.now() - startTime
|
|
1868
|
-
};
|
|
1869
|
-
}
|
|
1870
|
-
// Fall back to brute-force SQLite search
|
|
1871
|
-
const initSqlJs = (await import('sql.js')).default;
|
|
1872
|
-
const SQL = await initSqlJs();
|
|
1873
|
-
const fileBuffer = fs.readFileSync(dbPath);
|
|
1874
|
-
const db = new SQL.Database(fileBuffer);
|
|
1875
|
-
// Get entries with embeddings
|
|
1841
|
+
) VALUES (?, ?, ?, ?, 'semantic', ?, ?, ?, ?, ?, ?, ?, ?, 'active')`;
|
|
1842
|
+
db.run(insertSql, [
|
|
1843
|
+
id,
|
|
1844
|
+
key,
|
|
1845
|
+
namespace,
|
|
1846
|
+
value,
|
|
1847
|
+
embeddingJson,
|
|
1848
|
+
embeddingDimensions,
|
|
1849
|
+
embeddingModel,
|
|
1850
|
+
tags.length > 0 ? JSON.stringify(tags) : null,
|
|
1851
|
+
'{}',
|
|
1852
|
+
now,
|
|
1853
|
+
now,
|
|
1854
|
+
ttl ? now + (ttl * 1000) : null
|
|
1855
|
+
]);
|
|
1856
|
+
// Save
|
|
1857
|
+
const data = db.export();
|
|
1858
|
+
fs.writeFileSync(dbPath, Buffer.from(data));
|
|
1859
|
+
db.close();
|
|
1860
|
+
// Add to HNSW index for faster future searches
|
|
1861
|
+
if (embeddingJson) {
|
|
1862
|
+
const embResult = JSON.parse(embeddingJson);
|
|
1863
|
+
await addToHNSWIndex(id, embResult, {
|
|
1864
|
+
id,
|
|
1865
|
+
key,
|
|
1866
|
+
namespace,
|
|
1867
|
+
content: value
|
|
1868
|
+
});
|
|
1869
|
+
}
|
|
1870
|
+
return {
|
|
1871
|
+
success: true,
|
|
1872
|
+
id,
|
|
1873
|
+
embedding: embeddingJson ? { dimensions: embeddingDimensions, model: embeddingModel } : undefined
|
|
1874
|
+
};
|
|
1875
|
+
}
|
|
1876
|
+
catch (error) {
|
|
1877
|
+
return {
|
|
1878
|
+
success: false,
|
|
1879
|
+
id: '',
|
|
1880
|
+
error: error instanceof Error ? error.message : String(error)
|
|
1881
|
+
};
|
|
1882
|
+
}
|
|
1883
|
+
}
|
|
1884
|
+
/**
|
|
1885
|
+
* Search entries using sql.js with vector similarity
|
|
1886
|
+
* Uses HNSW index for 150x faster search when available
|
|
1887
|
+
*/
|
|
1888
|
+
export async function searchEntries(options) {
|
|
1889
|
+
// ADR-053: Try AgentDB v3 bridge first
|
|
1890
|
+
const bridge = await getBridge();
|
|
1891
|
+
if (bridge) {
|
|
1892
|
+
const bridgeResult = await bridge.bridgeSearchEntries(options);
|
|
1893
|
+
if (bridgeResult)
|
|
1894
|
+
return bridgeResult;
|
|
1895
|
+
}
|
|
1896
|
+
// Fallback: raw sql.js
|
|
1897
|
+
const { query, namespace = 'default', limit = 10, threshold = 0.3, dbPath: customPath } = options;
|
|
1898
|
+
const swarmDir = path.join(getProjectRoot(), '.swarm');
|
|
1899
|
+
const dbPath = customPath || path.join(swarmDir, 'memory.db');
|
|
1900
|
+
const startTime = Date.now();
|
|
1901
|
+
try {
|
|
1902
|
+
if (!fs.existsSync(dbPath)) {
|
|
1903
|
+
return { success: false, results: [], searchTime: 0, error: 'Database not found' };
|
|
1904
|
+
}
|
|
1905
|
+
// Ensure schema has all required columns (migration for older DBs)
|
|
1906
|
+
await ensureSchemaColumns(dbPath);
|
|
1907
|
+
// Generate query embedding
|
|
1908
|
+
const queryEmb = await generateEmbedding(query);
|
|
1909
|
+
const queryEmbedding = queryEmb.embedding;
|
|
1910
|
+
// Try HNSW search first (150x faster)
|
|
1911
|
+
const hnswResults = await searchHNSWIndex(queryEmbedding, { k: limit, namespace });
|
|
1912
|
+
if (hnswResults && hnswResults.length > 0) {
|
|
1913
|
+
// Filter by threshold
|
|
1914
|
+
const filtered = hnswResults.filter(r => r.score >= threshold);
|
|
1915
|
+
return {
|
|
1916
|
+
success: true,
|
|
1917
|
+
results: filtered,
|
|
1918
|
+
searchTime: Date.now() - startTime
|
|
1919
|
+
};
|
|
1920
|
+
}
|
|
1921
|
+
// Fall back to brute-force SQLite search
|
|
1922
|
+
const initSqlJs = (await import('sql.js')).default;
|
|
1923
|
+
const SQL = await initSqlJs();
|
|
1924
|
+
const fileBuffer = fs.readFileSync(dbPath);
|
|
1925
|
+
const db = new SQL.Database(fileBuffer);
|
|
1926
|
+
// Get entries with embeddings
|
|
1876
1927
|
const entries = db.exec(`
|
|
1877
1928
|
SELECT id, key, namespace, content, embedding
|
|
1878
1929
|
FROM memory_entries
|
|
1879
1930
|
WHERE status = 'active'
|
|
1880
1931
|
${namespace !== 'all' ? `AND namespace = '${namespace.replace(/'/g, "''")}'` : ''}
|
|
1881
1932
|
LIMIT 1000
|
|
1882
|
-
`);
|
|
1883
|
-
const results = [];
|
|
1884
|
-
if (entries[0]?.values) {
|
|
1885
|
-
for (const row of entries[0].values) {
|
|
1886
|
-
const [id, key, ns, content, embeddingJson] = row;
|
|
1887
|
-
let score = 0;
|
|
1888
|
-
if (embeddingJson) {
|
|
1889
|
-
try {
|
|
1890
|
-
const embedding = JSON.parse(embeddingJson);
|
|
1891
|
-
score = cosineSim(queryEmbedding, embedding);
|
|
1892
|
-
}
|
|
1893
|
-
catch {
|
|
1894
|
-
// Invalid embedding, use keyword score
|
|
1895
|
-
}
|
|
1896
|
-
}
|
|
1897
|
-
// Fallback to keyword matching
|
|
1898
|
-
if (score < threshold) {
|
|
1899
|
-
const lowerContent = (content || '').toLowerCase();
|
|
1900
|
-
const lowerQuery = query.toLowerCase();
|
|
1901
|
-
const words = lowerQuery.split(/\s+/);
|
|
1902
|
-
const matchCount = words.filter(w => lowerContent.includes(w)).length;
|
|
1903
|
-
const keywordScore = matchCount / words.length * 0.5;
|
|
1904
|
-
score = Math.max(score, keywordScore);
|
|
1905
|
-
}
|
|
1906
|
-
if (score >= threshold) {
|
|
1907
|
-
results.push({
|
|
1908
|
-
id: id.substring(0, 12),
|
|
1909
|
-
key: key || id.substring(0, 15),
|
|
1910
|
-
content: (content || '').substring(0, 60) + ((content || '').length > 60 ? '...' : ''),
|
|
1911
|
-
score,
|
|
1912
|
-
namespace: ns || 'default'
|
|
1913
|
-
});
|
|
1914
|
-
}
|
|
1915
|
-
}
|
|
1916
|
-
}
|
|
1917
|
-
db.close();
|
|
1918
|
-
// Sort by score
|
|
1919
|
-
results.sort((a, b) => b.score - a.score);
|
|
1920
|
-
return {
|
|
1921
|
-
success: true,
|
|
1922
|
-
results: results.slice(0, limit),
|
|
1923
|
-
searchTime: Date.now() - startTime
|
|
1924
|
-
};
|
|
1925
|
-
}
|
|
1926
|
-
catch (error) {
|
|
1927
|
-
return {
|
|
1928
|
-
success: false,
|
|
1929
|
-
results: [],
|
|
1930
|
-
searchTime: Date.now() - startTime,
|
|
1931
|
-
error: error instanceof Error ? error.message : String(error)
|
|
1932
|
-
};
|
|
1933
|
-
}
|
|
1934
|
-
}
|
|
1935
|
-
/**
|
|
1936
|
-
* Optimized cosine similarity
|
|
1937
|
-
* V8 JIT-friendly - avoids manual unrolling which can hurt performance
|
|
1938
|
-
* ~0.5μs per 384-dim vector comparison
|
|
1939
|
-
*/
|
|
1940
|
-
function cosineSim(a, b) {
|
|
1941
|
-
if (!a || !b || a.length === 0 || b.length === 0)
|
|
1942
|
-
return 0;
|
|
1943
|
-
const len = Math.min(a.length, b.length);
|
|
1944
|
-
let dot = 0, normA = 0, normB = 0;
|
|
1945
|
-
// Simple loop - V8 optimizes this well
|
|
1946
|
-
for (let i = 0; i < len; i++) {
|
|
1947
|
-
const ai = a[i], bi = b[i];
|
|
1948
|
-
dot += ai * bi;
|
|
1949
|
-
normA += ai * ai;
|
|
1950
|
-
normB += bi * bi;
|
|
1951
|
-
}
|
|
1952
|
-
// Combined sqrt for slightly better performance
|
|
1953
|
-
const mag = Math.sqrt(normA * normB);
|
|
1954
|
-
return mag === 0 ? 0 : dot / mag;
|
|
1955
|
-
}
|
|
1956
|
-
/**
|
|
1957
|
-
* List all entries from the memory database
|
|
1958
|
-
*/
|
|
1959
|
-
export async function listEntries(options) {
|
|
1960
|
-
// ADR-053: Try AgentDB v3 bridge first
|
|
1961
|
-
const bridge = await getBridge();
|
|
1962
|
-
if (bridge) {
|
|
1963
|
-
const bridgeResult = await bridge.bridgeListEntries(options);
|
|
1964
|
-
if (bridgeResult)
|
|
1965
|
-
return bridgeResult;
|
|
1966
|
-
}
|
|
1967
|
-
// Fallback: raw sql.js
|
|
1968
|
-
const { namespace, limit = 20, offset = 0, dbPath: customPath } = options;
|
|
1969
|
-
const swarmDir = path.join(
|
|
1970
|
-
const dbPath = customPath || path.join(swarmDir, 'memory.db');
|
|
1971
|
-
try {
|
|
1972
|
-
if (!fs.existsSync(dbPath)) {
|
|
1973
|
-
return { success: false, entries: [], total: 0, error: 'Database not found' };
|
|
1974
|
-
}
|
|
1975
|
-
// Ensure schema has all required columns (migration for older DBs)
|
|
1976
|
-
await ensureSchemaColumns(dbPath);
|
|
1977
|
-
const initSqlJs = (await import('sql.js')).default;
|
|
1978
|
-
const SQL = await initSqlJs();
|
|
1979
|
-
const fileBuffer = fs.readFileSync(dbPath);
|
|
1980
|
-
const db = new SQL.Database(fileBuffer);
|
|
1981
|
-
// Get total count
|
|
1982
|
-
const countQuery = namespace
|
|
1983
|
-
? `SELECT COUNT(*) as cnt FROM memory_entries WHERE status = 'active' AND namespace = '${namespace.replace(/'/g, "''")}'`
|
|
1984
|
-
: `SELECT COUNT(*) as cnt FROM memory_entries WHERE status = 'active'`;
|
|
1985
|
-
const countResult = db.exec(countQuery);
|
|
1986
|
-
const total = countResult[0]?.values?.[0]?.[0] || 0;
|
|
1987
|
-
// Get entries
|
|
1933
|
+
`);
|
|
1934
|
+
const results = [];
|
|
1935
|
+
if (entries[0]?.values) {
|
|
1936
|
+
for (const row of entries[0].values) {
|
|
1937
|
+
const [id, key, ns, content, embeddingJson] = row;
|
|
1938
|
+
let score = 0;
|
|
1939
|
+
if (embeddingJson) {
|
|
1940
|
+
try {
|
|
1941
|
+
const embedding = JSON.parse(embeddingJson);
|
|
1942
|
+
score = cosineSim(queryEmbedding, embedding);
|
|
1943
|
+
}
|
|
1944
|
+
catch {
|
|
1945
|
+
// Invalid embedding, use keyword score
|
|
1946
|
+
}
|
|
1947
|
+
}
|
|
1948
|
+
// Fallback to keyword matching
|
|
1949
|
+
if (score < threshold) {
|
|
1950
|
+
const lowerContent = (content || '').toLowerCase();
|
|
1951
|
+
const lowerQuery = query.toLowerCase();
|
|
1952
|
+
const words = lowerQuery.split(/\s+/);
|
|
1953
|
+
const matchCount = words.filter(w => lowerContent.includes(w)).length;
|
|
1954
|
+
const keywordScore = matchCount / words.length * 0.5;
|
|
1955
|
+
score = Math.max(score, keywordScore);
|
|
1956
|
+
}
|
|
1957
|
+
if (score >= threshold) {
|
|
1958
|
+
results.push({
|
|
1959
|
+
id: id.substring(0, 12),
|
|
1960
|
+
key: key || id.substring(0, 15),
|
|
1961
|
+
content: (content || '').substring(0, 60) + ((content || '').length > 60 ? '...' : ''),
|
|
1962
|
+
score,
|
|
1963
|
+
namespace: ns || 'default'
|
|
1964
|
+
});
|
|
1965
|
+
}
|
|
1966
|
+
}
|
|
1967
|
+
}
|
|
1968
|
+
db.close();
|
|
1969
|
+
// Sort by score
|
|
1970
|
+
results.sort((a, b) => b.score - a.score);
|
|
1971
|
+
return {
|
|
1972
|
+
success: true,
|
|
1973
|
+
results: results.slice(0, limit),
|
|
1974
|
+
searchTime: Date.now() - startTime
|
|
1975
|
+
};
|
|
1976
|
+
}
|
|
1977
|
+
catch (error) {
|
|
1978
|
+
return {
|
|
1979
|
+
success: false,
|
|
1980
|
+
results: [],
|
|
1981
|
+
searchTime: Date.now() - startTime,
|
|
1982
|
+
error: error instanceof Error ? error.message : String(error)
|
|
1983
|
+
};
|
|
1984
|
+
}
|
|
1985
|
+
}
|
|
1986
|
+
/**
|
|
1987
|
+
* Optimized cosine similarity
|
|
1988
|
+
* V8 JIT-friendly - avoids manual unrolling which can hurt performance
|
|
1989
|
+
* ~0.5μs per 384-dim vector comparison
|
|
1990
|
+
*/
|
|
1991
|
+
function cosineSim(a, b) {
|
|
1992
|
+
if (!a || !b || a.length === 0 || b.length === 0)
|
|
1993
|
+
return 0;
|
|
1994
|
+
const len = Math.min(a.length, b.length);
|
|
1995
|
+
let dot = 0, normA = 0, normB = 0;
|
|
1996
|
+
// Simple loop - V8 optimizes this well
|
|
1997
|
+
for (let i = 0; i < len; i++) {
|
|
1998
|
+
const ai = a[i], bi = b[i];
|
|
1999
|
+
dot += ai * bi;
|
|
2000
|
+
normA += ai * ai;
|
|
2001
|
+
normB += bi * bi;
|
|
2002
|
+
}
|
|
2003
|
+
// Combined sqrt for slightly better performance
|
|
2004
|
+
const mag = Math.sqrt(normA * normB);
|
|
2005
|
+
return mag === 0 ? 0 : dot / mag;
|
|
2006
|
+
}
|
|
2007
|
+
/**
|
|
2008
|
+
* List all entries from the memory database
|
|
2009
|
+
*/
|
|
2010
|
+
export async function listEntries(options) {
|
|
2011
|
+
// ADR-053: Try AgentDB v3 bridge first
|
|
2012
|
+
const bridge = await getBridge();
|
|
2013
|
+
if (bridge) {
|
|
2014
|
+
const bridgeResult = await bridge.bridgeListEntries(options);
|
|
2015
|
+
if (bridgeResult)
|
|
2016
|
+
return bridgeResult;
|
|
2017
|
+
}
|
|
2018
|
+
// Fallback: raw sql.js
|
|
2019
|
+
const { namespace, limit = 20, offset = 0, dbPath: customPath } = options;
|
|
2020
|
+
const swarmDir = path.join(getProjectRoot(), '.swarm');
|
|
2021
|
+
const dbPath = customPath || path.join(swarmDir, 'memory.db');
|
|
2022
|
+
try {
|
|
2023
|
+
if (!fs.existsSync(dbPath)) {
|
|
2024
|
+
return { success: false, entries: [], total: 0, error: 'Database not found' };
|
|
2025
|
+
}
|
|
2026
|
+
// Ensure schema has all required columns (migration for older DBs)
|
|
2027
|
+
await ensureSchemaColumns(dbPath);
|
|
2028
|
+
const initSqlJs = (await import('sql.js')).default;
|
|
2029
|
+
const SQL = await initSqlJs();
|
|
2030
|
+
const fileBuffer = fs.readFileSync(dbPath);
|
|
2031
|
+
const db = new SQL.Database(fileBuffer);
|
|
2032
|
+
// Get total count
|
|
2033
|
+
const countQuery = namespace
|
|
2034
|
+
? `SELECT COUNT(*) as cnt FROM memory_entries WHERE status = 'active' AND namespace = '${namespace.replace(/'/g, "''")}'`
|
|
2035
|
+
: `SELECT COUNT(*) as cnt FROM memory_entries WHERE status = 'active'`;
|
|
2036
|
+
const countResult = db.exec(countQuery);
|
|
2037
|
+
const total = countResult[0]?.values?.[0]?.[0] || 0;
|
|
2038
|
+
// Get entries
|
|
1988
2039
|
const listQuery = `
|
|
1989
2040
|
SELECT id, key, namespace, content, embedding, access_count, created_at, updated_at
|
|
1990
2041
|
FROM memory_entries
|
|
@@ -1992,62 +2043,62 @@ export async function listEntries(options) {
|
|
|
1992
2043
|
${namespace ? `AND namespace = '${namespace.replace(/'/g, "''")}'` : ''}
|
|
1993
2044
|
ORDER BY updated_at DESC
|
|
1994
2045
|
LIMIT ${limit} OFFSET ${offset}
|
|
1995
|
-
`;
|
|
1996
|
-
const result = db.exec(listQuery);
|
|
1997
|
-
const entries = [];
|
|
1998
|
-
if (result[0]?.values) {
|
|
1999
|
-
for (const row of result[0].values) {
|
|
2000
|
-
const [id, key, ns, content, embedding, accessCount, createdAt, updatedAt] = row;
|
|
2001
|
-
entries.push({
|
|
2002
|
-
id: String(id).substring(0, 20),
|
|
2003
|
-
key: key || String(id).substring(0, 15),
|
|
2004
|
-
namespace: ns || 'default',
|
|
2005
|
-
size: (content || '').length,
|
|
2006
|
-
accessCount: accessCount || 0,
|
|
2007
|
-
createdAt: createdAt || new Date().toISOString(),
|
|
2008
|
-
updatedAt: updatedAt || new Date().toISOString(),
|
|
2009
|
-
hasEmbedding: !!embedding && embedding.length > 10
|
|
2010
|
-
});
|
|
2011
|
-
}
|
|
2012
|
-
}
|
|
2013
|
-
db.close();
|
|
2014
|
-
return { success: true, entries, total };
|
|
2015
|
-
}
|
|
2016
|
-
catch (error) {
|
|
2017
|
-
return {
|
|
2018
|
-
success: false,
|
|
2019
|
-
entries: [],
|
|
2020
|
-
total: 0,
|
|
2021
|
-
error: error instanceof Error ? error.message : String(error)
|
|
2022
|
-
};
|
|
2023
|
-
}
|
|
2024
|
-
}
|
|
2025
|
-
/**
|
|
2026
|
-
* Get a specific entry from the memory database
|
|
2027
|
-
*/
|
|
2028
|
-
export async function getEntry(options) {
|
|
2029
|
-
// ADR-053: Try AgentDB v3 bridge first
|
|
2030
|
-
const bridge = await getBridge();
|
|
2031
|
-
if (bridge) {
|
|
2032
|
-
const bridgeResult = await bridge.bridgeGetEntry(options);
|
|
2033
|
-
if (bridgeResult)
|
|
2034
|
-
return bridgeResult;
|
|
2035
|
-
}
|
|
2036
|
-
// Fallback: raw sql.js
|
|
2037
|
-
const { key, namespace = 'default', dbPath: customPath } = options;
|
|
2038
|
-
const swarmDir = path.join(
|
|
2039
|
-
const dbPath = customPath || path.join(swarmDir, 'memory.db');
|
|
2040
|
-
try {
|
|
2041
|
-
if (!fs.existsSync(dbPath)) {
|
|
2042
|
-
return { success: false, found: false, error: 'Database not found' };
|
|
2043
|
-
}
|
|
2044
|
-
// Ensure schema has all required columns (migration for older DBs)
|
|
2045
|
-
await ensureSchemaColumns(dbPath);
|
|
2046
|
-
const initSqlJs = (await import('sql.js')).default;
|
|
2047
|
-
const SQL = await initSqlJs();
|
|
2048
|
-
const fileBuffer = fs.readFileSync(dbPath);
|
|
2049
|
-
const db = new SQL.Database(fileBuffer);
|
|
2050
|
-
// Find entry by key
|
|
2046
|
+
`;
|
|
2047
|
+
const result = db.exec(listQuery);
|
|
2048
|
+
const entries = [];
|
|
2049
|
+
if (result[0]?.values) {
|
|
2050
|
+
for (const row of result[0].values) {
|
|
2051
|
+
const [id, key, ns, content, embedding, accessCount, createdAt, updatedAt] = row;
|
|
2052
|
+
entries.push({
|
|
2053
|
+
id: String(id).substring(0, 20),
|
|
2054
|
+
key: key || String(id).substring(0, 15),
|
|
2055
|
+
namespace: ns || 'default',
|
|
2056
|
+
size: (content || '').length,
|
|
2057
|
+
accessCount: accessCount || 0,
|
|
2058
|
+
createdAt: createdAt || new Date().toISOString(),
|
|
2059
|
+
updatedAt: updatedAt || new Date().toISOString(),
|
|
2060
|
+
hasEmbedding: !!embedding && embedding.length > 10
|
|
2061
|
+
});
|
|
2062
|
+
}
|
|
2063
|
+
}
|
|
2064
|
+
db.close();
|
|
2065
|
+
return { success: true, entries, total };
|
|
2066
|
+
}
|
|
2067
|
+
catch (error) {
|
|
2068
|
+
return {
|
|
2069
|
+
success: false,
|
|
2070
|
+
entries: [],
|
|
2071
|
+
total: 0,
|
|
2072
|
+
error: error instanceof Error ? error.message : String(error)
|
|
2073
|
+
};
|
|
2074
|
+
}
|
|
2075
|
+
}
|
|
2076
|
+
/**
|
|
2077
|
+
* Get a specific entry from the memory database
|
|
2078
|
+
*/
|
|
2079
|
+
export async function getEntry(options) {
|
|
2080
|
+
// ADR-053: Try AgentDB v3 bridge first
|
|
2081
|
+
const bridge = await getBridge();
|
|
2082
|
+
if (bridge) {
|
|
2083
|
+
const bridgeResult = await bridge.bridgeGetEntry(options);
|
|
2084
|
+
if (bridgeResult)
|
|
2085
|
+
return bridgeResult;
|
|
2086
|
+
}
|
|
2087
|
+
// Fallback: raw sql.js
|
|
2088
|
+
const { key, namespace = 'default', dbPath: customPath } = options;
|
|
2089
|
+
const swarmDir = path.join(getProjectRoot(), '.swarm');
|
|
2090
|
+
const dbPath = customPath || path.join(swarmDir, 'memory.db');
|
|
2091
|
+
try {
|
|
2092
|
+
if (!fs.existsSync(dbPath)) {
|
|
2093
|
+
return { success: false, found: false, error: 'Database not found' };
|
|
2094
|
+
}
|
|
2095
|
+
// Ensure schema has all required columns (migration for older DBs)
|
|
2096
|
+
await ensureSchemaColumns(dbPath);
|
|
2097
|
+
const initSqlJs = (await import('sql.js')).default;
|
|
2098
|
+
const SQL = await initSqlJs();
|
|
2099
|
+
const fileBuffer = fs.readFileSync(dbPath);
|
|
2100
|
+
const db = new SQL.Database(fileBuffer);
|
|
2101
|
+
// Find entry by key
|
|
2051
2102
|
const result = db.exec(`
|
|
2052
2103
|
SELECT id, key, namespace, content, embedding, access_count, created_at, updated_at, tags
|
|
2053
2104
|
FROM memory_entries
|
|
@@ -2055,159 +2106,159 @@ export async function getEntry(options) {
|
|
|
2055
2106
|
AND key = '${key.replace(/'/g, "''")}'
|
|
2056
2107
|
AND namespace = '${namespace.replace(/'/g, "''")}'
|
|
2057
2108
|
LIMIT 1
|
|
2058
|
-
`);
|
|
2059
|
-
if (!result[0]?.values?.[0]) {
|
|
2060
|
-
db.close();
|
|
2061
|
-
return { success: true, found: false };
|
|
2062
|
-
}
|
|
2063
|
-
const [id, entryKey, ns, content, embedding, accessCount, createdAt, updatedAt, tagsJson] = result[0].values[0];
|
|
2064
|
-
// Update access count
|
|
2109
|
+
`);
|
|
2110
|
+
if (!result[0]?.values?.[0]) {
|
|
2111
|
+
db.close();
|
|
2112
|
+
return { success: true, found: false };
|
|
2113
|
+
}
|
|
2114
|
+
const [id, entryKey, ns, content, embedding, accessCount, createdAt, updatedAt, tagsJson] = result[0].values[0];
|
|
2115
|
+
// Update access count
|
|
2065
2116
|
db.run(`
|
|
2066
2117
|
UPDATE memory_entries
|
|
2067
2118
|
SET access_count = access_count + 1, last_accessed_at = strftime('%s', 'now') * 1000
|
|
2068
2119
|
WHERE id = '${String(id).replace(/'/g, "''")}'
|
|
2069
|
-
`);
|
|
2070
|
-
// Save updated database
|
|
2071
|
-
const data = db.export();
|
|
2072
|
-
fs.writeFileSync(dbPath, Buffer.from(data));
|
|
2073
|
-
db.close();
|
|
2074
|
-
let tags = [];
|
|
2075
|
-
if (tagsJson) {
|
|
2076
|
-
try {
|
|
2077
|
-
tags = JSON.parse(tagsJson);
|
|
2078
|
-
}
|
|
2079
|
-
catch {
|
|
2080
|
-
// Invalid JSON
|
|
2081
|
-
}
|
|
2082
|
-
}
|
|
2083
|
-
return {
|
|
2084
|
-
success: true,
|
|
2085
|
-
found: true,
|
|
2086
|
-
entry: {
|
|
2087
|
-
id: String(id),
|
|
2088
|
-
key: entryKey || String(id),
|
|
2089
|
-
namespace: ns || 'default',
|
|
2090
|
-
content: content || '',
|
|
2091
|
-
accessCount: (accessCount || 0) + 1,
|
|
2092
|
-
createdAt: createdAt || new Date().toISOString(),
|
|
2093
|
-
updatedAt: updatedAt || new Date().toISOString(),
|
|
2094
|
-
hasEmbedding: !!embedding && embedding.length > 10,
|
|
2095
|
-
tags
|
|
2096
|
-
}
|
|
2097
|
-
};
|
|
2098
|
-
}
|
|
2099
|
-
catch (error) {
|
|
2100
|
-
return {
|
|
2101
|
-
success: false,
|
|
2102
|
-
found: false,
|
|
2103
|
-
error: error instanceof Error ? error.message : String(error)
|
|
2104
|
-
};
|
|
2105
|
-
}
|
|
2106
|
-
}
|
|
2107
|
-
/**
|
|
2108
|
-
* Delete a memory entry by key and namespace
|
|
2109
|
-
* Issue #980: Properly supports namespaced entries
|
|
2110
|
-
*/
|
|
2111
|
-
export async function deleteEntry(options) {
|
|
2112
|
-
// ADR-053: Try AgentDB v3 bridge first
|
|
2113
|
-
const bridge = await getBridge();
|
|
2114
|
-
if (bridge) {
|
|
2115
|
-
const bridgeResult = await bridge.bridgeDeleteEntry(options);
|
|
2116
|
-
if (bridgeResult)
|
|
2117
|
-
return bridgeResult;
|
|
2118
|
-
}
|
|
2119
|
-
// Fallback: raw sql.js
|
|
2120
|
-
const { key, namespace = 'default', dbPath: customPath } = options;
|
|
2121
|
-
const swarmDir = path.join(
|
|
2122
|
-
const dbPath = customPath || path.join(swarmDir, 'memory.db');
|
|
2123
|
-
try {
|
|
2124
|
-
if (!fs.existsSync(dbPath)) {
|
|
2125
|
-
return {
|
|
2126
|
-
success: false,
|
|
2127
|
-
deleted: false,
|
|
2128
|
-
key,
|
|
2129
|
-
namespace,
|
|
2130
|
-
remainingEntries: 0,
|
|
2131
|
-
error: 'Database not found'
|
|
2132
|
-
};
|
|
2133
|
-
}
|
|
2134
|
-
// Ensure schema has all required columns (migration for older DBs)
|
|
2135
|
-
await ensureSchemaColumns(dbPath);
|
|
2136
|
-
const initSqlJs = (await import('sql.js')).default;
|
|
2137
|
-
const SQL = await initSqlJs();
|
|
2138
|
-
const fileBuffer = fs.readFileSync(dbPath);
|
|
2139
|
-
const db = new SQL.Database(fileBuffer);
|
|
2140
|
-
// Check if entry exists first
|
|
2120
|
+
`);
|
|
2121
|
+
// Save updated database
|
|
2122
|
+
const data = db.export();
|
|
2123
|
+
fs.writeFileSync(dbPath, Buffer.from(data));
|
|
2124
|
+
db.close();
|
|
2125
|
+
let tags = [];
|
|
2126
|
+
if (tagsJson) {
|
|
2127
|
+
try {
|
|
2128
|
+
tags = JSON.parse(tagsJson);
|
|
2129
|
+
}
|
|
2130
|
+
catch {
|
|
2131
|
+
// Invalid JSON
|
|
2132
|
+
}
|
|
2133
|
+
}
|
|
2134
|
+
return {
|
|
2135
|
+
success: true,
|
|
2136
|
+
found: true,
|
|
2137
|
+
entry: {
|
|
2138
|
+
id: String(id),
|
|
2139
|
+
key: entryKey || String(id),
|
|
2140
|
+
namespace: ns || 'default',
|
|
2141
|
+
content: content || '',
|
|
2142
|
+
accessCount: (accessCount || 0) + 1,
|
|
2143
|
+
createdAt: createdAt || new Date().toISOString(),
|
|
2144
|
+
updatedAt: updatedAt || new Date().toISOString(),
|
|
2145
|
+
hasEmbedding: !!embedding && embedding.length > 10,
|
|
2146
|
+
tags
|
|
2147
|
+
}
|
|
2148
|
+
};
|
|
2149
|
+
}
|
|
2150
|
+
catch (error) {
|
|
2151
|
+
return {
|
|
2152
|
+
success: false,
|
|
2153
|
+
found: false,
|
|
2154
|
+
error: error instanceof Error ? error.message : String(error)
|
|
2155
|
+
};
|
|
2156
|
+
}
|
|
2157
|
+
}
|
|
2158
|
+
/**
|
|
2159
|
+
* Delete a memory entry by key and namespace
|
|
2160
|
+
* Issue #980: Properly supports namespaced entries
|
|
2161
|
+
*/
|
|
2162
|
+
export async function deleteEntry(options) {
|
|
2163
|
+
// ADR-053: Try AgentDB v3 bridge first
|
|
2164
|
+
const bridge = await getBridge();
|
|
2165
|
+
if (bridge) {
|
|
2166
|
+
const bridgeResult = await bridge.bridgeDeleteEntry(options);
|
|
2167
|
+
if (bridgeResult)
|
|
2168
|
+
return bridgeResult;
|
|
2169
|
+
}
|
|
2170
|
+
// Fallback: raw sql.js
|
|
2171
|
+
const { key, namespace = 'default', dbPath: customPath } = options;
|
|
2172
|
+
const swarmDir = path.join(getProjectRoot(), '.swarm');
|
|
2173
|
+
const dbPath = customPath || path.join(swarmDir, 'memory.db');
|
|
2174
|
+
try {
|
|
2175
|
+
if (!fs.existsSync(dbPath)) {
|
|
2176
|
+
return {
|
|
2177
|
+
success: false,
|
|
2178
|
+
deleted: false,
|
|
2179
|
+
key,
|
|
2180
|
+
namespace,
|
|
2181
|
+
remainingEntries: 0,
|
|
2182
|
+
error: 'Database not found'
|
|
2183
|
+
};
|
|
2184
|
+
}
|
|
2185
|
+
// Ensure schema has all required columns (migration for older DBs)
|
|
2186
|
+
await ensureSchemaColumns(dbPath);
|
|
2187
|
+
const initSqlJs = (await import('sql.js')).default;
|
|
2188
|
+
const SQL = await initSqlJs();
|
|
2189
|
+
const fileBuffer = fs.readFileSync(dbPath);
|
|
2190
|
+
const db = new SQL.Database(fileBuffer);
|
|
2191
|
+
// Check if entry exists first
|
|
2141
2192
|
const checkResult = db.exec(`
|
|
2142
2193
|
SELECT id FROM memory_entries
|
|
2143
2194
|
WHERE status = 'active'
|
|
2144
2195
|
AND key = '${key.replace(/'/g, "''")}'
|
|
2145
2196
|
AND namespace = '${namespace.replace(/'/g, "''")}'
|
|
2146
2197
|
LIMIT 1
|
|
2147
|
-
`);
|
|
2148
|
-
if (!checkResult[0]?.values?.[0]) {
|
|
2149
|
-
// Get remaining count before closing
|
|
2150
|
-
const countResult = db.exec(`SELECT COUNT(*) FROM memory_entries WHERE status = 'active'`);
|
|
2151
|
-
const remainingEntries = countResult[0]?.values?.[0]?.[0] || 0;
|
|
2152
|
-
db.close();
|
|
2153
|
-
return {
|
|
2154
|
-
success: true,
|
|
2155
|
-
deleted: false,
|
|
2156
|
-
key,
|
|
2157
|
-
namespace,
|
|
2158
|
-
remainingEntries,
|
|
2159
|
-
error: `Key '${key}' not found in namespace '${namespace}'`
|
|
2160
|
-
};
|
|
2161
|
-
}
|
|
2162
|
-
// Delete the entry (soft delete by setting status to 'deleted')
|
|
2198
|
+
`);
|
|
2199
|
+
if (!checkResult[0]?.values?.[0]) {
|
|
2200
|
+
// Get remaining count before closing
|
|
2201
|
+
const countResult = db.exec(`SELECT COUNT(*) FROM memory_entries WHERE status = 'active'`);
|
|
2202
|
+
const remainingEntries = countResult[0]?.values?.[0]?.[0] || 0;
|
|
2203
|
+
db.close();
|
|
2204
|
+
return {
|
|
2205
|
+
success: true,
|
|
2206
|
+
deleted: false,
|
|
2207
|
+
key,
|
|
2208
|
+
namespace,
|
|
2209
|
+
remainingEntries,
|
|
2210
|
+
error: `Key '${key}' not found in namespace '${namespace}'`
|
|
2211
|
+
};
|
|
2212
|
+
}
|
|
2213
|
+
// Delete the entry (soft delete by setting status to 'deleted')
|
|
2163
2214
|
db.run(`
|
|
2164
2215
|
UPDATE memory_entries
|
|
2165
2216
|
SET status = 'deleted', updated_at = strftime('%s', 'now') * 1000
|
|
2166
2217
|
WHERE key = '${key.replace(/'/g, "''")}'
|
|
2167
2218
|
AND namespace = '${namespace.replace(/'/g, "''")}'
|
|
2168
2219
|
AND status = 'active'
|
|
2169
|
-
`);
|
|
2170
|
-
// Get remaining count
|
|
2171
|
-
const countResult = db.exec(`SELECT COUNT(*) FROM memory_entries WHERE status = 'active'`);
|
|
2172
|
-
const remainingEntries = countResult[0]?.values?.[0]?.[0] || 0;
|
|
2173
|
-
// Save updated database
|
|
2174
|
-
const data = db.export();
|
|
2175
|
-
fs.writeFileSync(dbPath, Buffer.from(data));
|
|
2176
|
-
db.close();
|
|
2177
|
-
return {
|
|
2178
|
-
success: true,
|
|
2179
|
-
deleted: true,
|
|
2180
|
-
key,
|
|
2181
|
-
namespace,
|
|
2182
|
-
remainingEntries
|
|
2183
|
-
};
|
|
2184
|
-
}
|
|
2185
|
-
catch (error) {
|
|
2186
|
-
return {
|
|
2187
|
-
success: false,
|
|
2188
|
-
deleted: false,
|
|
2189
|
-
key,
|
|
2190
|
-
namespace,
|
|
2191
|
-
remainingEntries: 0,
|
|
2192
|
-
error: error instanceof Error ? error.message : String(error)
|
|
2193
|
-
};
|
|
2194
|
-
}
|
|
2195
|
-
}
|
|
2196
|
-
export default {
|
|
2197
|
-
initializeMemoryDatabase,
|
|
2198
|
-
checkMemoryInitialization,
|
|
2199
|
-
checkAndMigrateLegacy,
|
|
2200
|
-
ensureSchemaColumns,
|
|
2201
|
-
applyTemporalDecay,
|
|
2202
|
-
loadEmbeddingModel,
|
|
2203
|
-
generateEmbedding,
|
|
2204
|
-
verifyMemoryInit,
|
|
2205
|
-
storeEntry,
|
|
2206
|
-
searchEntries,
|
|
2207
|
-
listEntries,
|
|
2208
|
-
getEntry,
|
|
2209
|
-
deleteEntry,
|
|
2210
|
-
MEMORY_SCHEMA_V3,
|
|
2211
|
-
getInitialMetadata
|
|
2212
|
-
};
|
|
2220
|
+
`);
|
|
2221
|
+
// Get remaining count
|
|
2222
|
+
const countResult = db.exec(`SELECT COUNT(*) FROM memory_entries WHERE status = 'active'`);
|
|
2223
|
+
const remainingEntries = countResult[0]?.values?.[0]?.[0] || 0;
|
|
2224
|
+
// Save updated database
|
|
2225
|
+
const data = db.export();
|
|
2226
|
+
fs.writeFileSync(dbPath, Buffer.from(data));
|
|
2227
|
+
db.close();
|
|
2228
|
+
return {
|
|
2229
|
+
success: true,
|
|
2230
|
+
deleted: true,
|
|
2231
|
+
key,
|
|
2232
|
+
namespace,
|
|
2233
|
+
remainingEntries
|
|
2234
|
+
};
|
|
2235
|
+
}
|
|
2236
|
+
catch (error) {
|
|
2237
|
+
return {
|
|
2238
|
+
success: false,
|
|
2239
|
+
deleted: false,
|
|
2240
|
+
key,
|
|
2241
|
+
namespace,
|
|
2242
|
+
remainingEntries: 0,
|
|
2243
|
+
error: error instanceof Error ? error.message : String(error)
|
|
2244
|
+
};
|
|
2245
|
+
}
|
|
2246
|
+
}
|
|
2247
|
+
export default {
|
|
2248
|
+
initializeMemoryDatabase,
|
|
2249
|
+
checkMemoryInitialization,
|
|
2250
|
+
checkAndMigrateLegacy,
|
|
2251
|
+
ensureSchemaColumns,
|
|
2252
|
+
applyTemporalDecay,
|
|
2253
|
+
loadEmbeddingModel,
|
|
2254
|
+
generateEmbedding,
|
|
2255
|
+
verifyMemoryInit,
|
|
2256
|
+
storeEntry,
|
|
2257
|
+
searchEntries,
|
|
2258
|
+
listEntries,
|
|
2259
|
+
getEntry,
|
|
2260
|
+
deleteEntry,
|
|
2261
|
+
MEMORY_SCHEMA_V3,
|
|
2262
|
+
getInitialMetadata
|
|
2263
|
+
};
|
|
2213
2264
|
//# sourceMappingURL=memory-initializer.js.map
|