moflo 4.0.1 → 4.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/guidance/agent-bootstrap.md +12 -6
- package/bin/setup-project.mjs +201 -0
- package/package.json +114 -109
- package/v3/@claude-flow/cli/dist/src/memory/memory-bridge.js +194 -81
- package/v3/@claude-flow/cli/dist/src/memory/memory-initializer.js +1892 -1841
- package/v3/@claude-flow/memory/README.md +587 -0
- package/v3/@claude-flow/memory/dist/agent-memory-scope.d.ts +131 -0
- package/v3/@claude-flow/memory/dist/agent-memory-scope.js +223 -0
- package/v3/@claude-flow/memory/dist/agent-memory-scope.test.d.ts +8 -0
- package/v3/@claude-flow/memory/dist/agent-memory-scope.test.js +463 -0
- package/v3/@claude-flow/memory/dist/agentdb-adapter.d.ts +165 -0
- package/v3/@claude-flow/memory/dist/agentdb-adapter.js +806 -0
- package/v3/@claude-flow/memory/dist/agentdb-backend.d.ts +214 -0
- package/v3/@claude-flow/memory/dist/agentdb-backend.js +844 -0
- package/v3/@claude-flow/memory/dist/agentdb-backend.test.d.ts +7 -0
- package/v3/@claude-flow/memory/dist/agentdb-backend.test.js +258 -0
- package/v3/@claude-flow/memory/dist/application/commands/delete-memory.command.d.ts +65 -0
- package/v3/@claude-flow/memory/dist/application/commands/delete-memory.command.js +129 -0
- package/v3/@claude-flow/memory/dist/application/commands/store-memory.command.d.ts +48 -0
- package/v3/@claude-flow/memory/dist/application/commands/store-memory.command.js +72 -0
- package/v3/@claude-flow/memory/dist/application/index.d.ts +12 -0
- package/v3/@claude-flow/memory/dist/application/index.js +15 -0
- package/v3/@claude-flow/memory/dist/application/queries/search-memory.query.d.ts +72 -0
- package/v3/@claude-flow/memory/dist/application/queries/search-memory.query.js +143 -0
- package/v3/@claude-flow/memory/dist/application/services/memory-application-service.d.ts +121 -0
- package/v3/@claude-flow/memory/dist/application/services/memory-application-service.js +190 -0
- package/v3/@claude-flow/memory/dist/auto-memory-bridge.d.ts +226 -0
- package/v3/@claude-flow/memory/dist/auto-memory-bridge.js +709 -0
- package/v3/@claude-flow/memory/dist/auto-memory-bridge.test.d.ts +8 -0
- package/v3/@claude-flow/memory/dist/auto-memory-bridge.test.js +754 -0
- package/v3/@claude-flow/memory/dist/benchmark.test.d.ts +2 -0
- package/v3/@claude-flow/memory/dist/benchmark.test.js +277 -0
- package/v3/@claude-flow/memory/dist/cache-manager.d.ts +134 -0
- package/v3/@claude-flow/memory/dist/cache-manager.js +407 -0
- package/v3/@claude-flow/memory/dist/controller-registry.d.ts +216 -0
- package/v3/@claude-flow/memory/dist/controller-registry.js +893 -0
- package/v3/@claude-flow/memory/dist/controller-registry.test.d.ts +14 -0
- package/v3/@claude-flow/memory/dist/controller-registry.test.js +636 -0
- package/v3/@claude-flow/memory/dist/database-provider.d.ts +87 -0
- package/v3/@claude-flow/memory/dist/database-provider.js +410 -0
- package/v3/@claude-flow/memory/dist/database-provider.test.d.ts +7 -0
- package/v3/@claude-flow/memory/dist/database-provider.test.js +285 -0
- package/v3/@claude-flow/memory/dist/domain/entities/memory-entry.d.ts +143 -0
- package/v3/@claude-flow/memory/dist/domain/entities/memory-entry.js +226 -0
- package/v3/@claude-flow/memory/dist/domain/index.d.ts +11 -0
- package/v3/@claude-flow/memory/dist/domain/index.js +12 -0
- package/v3/@claude-flow/memory/dist/domain/repositories/memory-repository.interface.d.ts +102 -0
- package/v3/@claude-flow/memory/dist/domain/repositories/memory-repository.interface.js +11 -0
- package/v3/@claude-flow/memory/dist/domain/services/memory-domain-service.d.ts +105 -0
- package/v3/@claude-flow/memory/dist/domain/services/memory-domain-service.js +297 -0
- package/v3/@claude-flow/memory/dist/hnsw-index.d.ts +111 -0
- package/v3/@claude-flow/memory/dist/hnsw-index.js +781 -0
- package/v3/@claude-flow/memory/dist/hnsw-lite.d.ts +23 -0
- package/v3/@claude-flow/memory/dist/hnsw-lite.js +168 -0
- package/v3/@claude-flow/memory/dist/hybrid-backend.d.ts +245 -0
- package/v3/@claude-flow/memory/dist/hybrid-backend.js +569 -0
- package/v3/@claude-flow/memory/dist/hybrid-backend.test.d.ts +8 -0
- package/v3/@claude-flow/memory/dist/hybrid-backend.test.js +320 -0
- package/v3/@claude-flow/memory/dist/index.d.ts +208 -0
- package/v3/@claude-flow/memory/dist/index.js +362 -0
- package/v3/@claude-flow/memory/dist/infrastructure/index.d.ts +17 -0
- package/v3/@claude-flow/memory/dist/infrastructure/index.js +16 -0
- package/v3/@claude-flow/memory/dist/infrastructure/repositories/hybrid-memory-repository.d.ts +66 -0
- package/v3/@claude-flow/memory/dist/infrastructure/repositories/hybrid-memory-repository.js +409 -0
- package/v3/@claude-flow/memory/dist/learning-bridge.d.ts +137 -0
- package/v3/@claude-flow/memory/dist/learning-bridge.js +335 -0
- package/v3/@claude-flow/memory/dist/learning-bridge.test.d.ts +8 -0
- package/v3/@claude-flow/memory/dist/learning-bridge.test.js +578 -0
- package/v3/@claude-flow/memory/dist/memory-graph.d.ts +100 -0
- package/v3/@claude-flow/memory/dist/memory-graph.js +333 -0
- package/v3/@claude-flow/memory/dist/memory-graph.test.d.ts +8 -0
- package/v3/@claude-flow/memory/dist/memory-graph.test.js +609 -0
- package/v3/@claude-flow/memory/dist/migration.d.ts +68 -0
- package/v3/@claude-flow/memory/dist/migration.js +513 -0
- package/v3/@claude-flow/memory/dist/persistent-sona.d.ts +144 -0
- package/v3/@claude-flow/memory/dist/persistent-sona.js +332 -0
- package/v3/@claude-flow/memory/dist/query-builder.d.ts +211 -0
- package/v3/@claude-flow/memory/dist/query-builder.js +438 -0
- package/v3/@claude-flow/memory/dist/rvf-backend.d.ts +51 -0
- package/v3/@claude-flow/memory/dist/rvf-backend.js +481 -0
- package/v3/@claude-flow/memory/dist/rvf-learning-store.d.ts +139 -0
- package/v3/@claude-flow/memory/dist/rvf-learning-store.js +295 -0
- package/v3/@claude-flow/memory/dist/rvf-migration.d.ts +45 -0
- package/v3/@claude-flow/memory/dist/rvf-migration.js +254 -0
- package/v3/@claude-flow/memory/dist/sqlite-backend.d.ts +121 -0
- package/v3/@claude-flow/memory/dist/sqlite-backend.js +564 -0
- package/v3/@claude-flow/memory/dist/sqljs-backend.d.ts +128 -0
- package/v3/@claude-flow/memory/dist/sqljs-backend.js +601 -0
- package/v3/@claude-flow/memory/dist/types.d.ts +484 -0
- package/v3/@claude-flow/memory/dist/types.js +58 -0
- package/v3/@claude-flow/memory/package.json +46 -0
|
@@ -0,0 +1,513 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* V3 Memory Migration Utility
|
|
3
|
+
*
|
|
4
|
+
* Migrates data from legacy memory systems (SQLite, Markdown, JSON, etc.)
|
|
5
|
+
* to the unified AgentDB-backed memory system with HNSW indexing.
|
|
6
|
+
*
|
|
7
|
+
* @module v3/memory/migration
|
|
8
|
+
*/
|
|
9
|
+
import { EventEmitter } from 'node:events';
|
|
10
|
+
import { promises as fs } from 'node:fs';
|
|
11
|
+
import * as path from 'node:path';
|
|
12
|
+
import { createDefaultEntry, } from './types.js';
|
|
13
|
+
/**
|
|
14
|
+
* Default migration configuration
|
|
15
|
+
*/
|
|
16
|
+
const DEFAULT_MIGRATION_CONFIG = {
|
|
17
|
+
batchSize: 100,
|
|
18
|
+
generateEmbeddings: true,
|
|
19
|
+
validateData: true,
|
|
20
|
+
continueOnError: true,
|
|
21
|
+
};
|
|
22
|
+
/**
|
|
23
|
+
* Memory Migration Manager
|
|
24
|
+
*
|
|
25
|
+
* Handles migration from:
|
|
26
|
+
* - SQLite backends (.db files)
|
|
27
|
+
* - Markdown backends (.md files)
|
|
28
|
+
* - JSON memory stores (.json files)
|
|
29
|
+
* - MemoryManager instances
|
|
30
|
+
* - SwarmMemory instances
|
|
31
|
+
* - DistributedMemory instances
|
|
32
|
+
*/
|
|
33
|
+
export class MemoryMigrator extends EventEmitter {
|
|
34
|
+
config;
|
|
35
|
+
target;
|
|
36
|
+
embeddingGenerator;
|
|
37
|
+
progress;
|
|
38
|
+
constructor(target, config, embeddingGenerator) {
|
|
39
|
+
super();
|
|
40
|
+
this.target = target;
|
|
41
|
+
this.config = { ...DEFAULT_MIGRATION_CONFIG, ...config };
|
|
42
|
+
this.embeddingGenerator = embeddingGenerator;
|
|
43
|
+
this.progress = this.initializeProgress();
|
|
44
|
+
}
|
|
45
|
+
/**
|
|
46
|
+
* Run the migration
|
|
47
|
+
*/
|
|
48
|
+
async migrate() {
|
|
49
|
+
const startTime = Date.now();
|
|
50
|
+
this.progress = this.initializeProgress();
|
|
51
|
+
this.emit('migration:started', { source: this.config.source });
|
|
52
|
+
try {
|
|
53
|
+
// Load entries from source
|
|
54
|
+
const entries = await this.loadFromSource();
|
|
55
|
+
this.progress.total = entries.length;
|
|
56
|
+
this.progress.totalBatches = Math.ceil(entries.length / this.config.batchSize);
|
|
57
|
+
this.emit('migration:progress', { ...this.progress });
|
|
58
|
+
// Process in batches
|
|
59
|
+
for (let i = 0; i < entries.length; i += this.config.batchSize) {
|
|
60
|
+
const batch = entries.slice(i, i + this.config.batchSize);
|
|
61
|
+
this.progress.currentBatch = Math.floor(i / this.config.batchSize) + 1;
|
|
62
|
+
await this.processBatch(batch);
|
|
63
|
+
this.progress.percentage = Math.round((this.progress.migrated / this.progress.total) * 100);
|
|
64
|
+
this.progress.estimatedTimeRemaining = this.estimateTimeRemaining(startTime, this.progress.migrated, this.progress.total);
|
|
65
|
+
this.emit('migration:progress', { ...this.progress });
|
|
66
|
+
}
|
|
67
|
+
const duration = Date.now() - startTime;
|
|
68
|
+
const result = {
|
|
69
|
+
success: this.progress.failed === 0 || this.config.continueOnError,
|
|
70
|
+
progress: { ...this.progress },
|
|
71
|
+
duration,
|
|
72
|
+
summary: this.generateSummary(),
|
|
73
|
+
};
|
|
74
|
+
this.emit('migration:completed', result);
|
|
75
|
+
return result;
|
|
76
|
+
}
|
|
77
|
+
catch (error) {
|
|
78
|
+
const duration = Date.now() - startTime;
|
|
79
|
+
const result = {
|
|
80
|
+
success: false,
|
|
81
|
+
progress: { ...this.progress },
|
|
82
|
+
duration,
|
|
83
|
+
summary: `Migration failed: ${error.message}`,
|
|
84
|
+
};
|
|
85
|
+
this.emit('migration:failed', { error, result });
|
|
86
|
+
return result;
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
/**
|
|
90
|
+
* Get current migration progress
|
|
91
|
+
*/
|
|
92
|
+
getProgress() {
|
|
93
|
+
return { ...this.progress };
|
|
94
|
+
}
|
|
95
|
+
// ===== Source Loaders =====
|
|
96
|
+
async loadFromSource() {
|
|
97
|
+
switch (this.config.source) {
|
|
98
|
+
case 'sqlite':
|
|
99
|
+
return this.loadFromSQLite();
|
|
100
|
+
case 'markdown':
|
|
101
|
+
return this.loadFromMarkdown();
|
|
102
|
+
case 'json':
|
|
103
|
+
return this.loadFromJSON();
|
|
104
|
+
case 'memory-manager':
|
|
105
|
+
return this.loadFromMemoryManager();
|
|
106
|
+
case 'swarm-memory':
|
|
107
|
+
return this.loadFromSwarmMemory();
|
|
108
|
+
case 'distributed-memory':
|
|
109
|
+
return this.loadFromDistributedMemory();
|
|
110
|
+
default:
|
|
111
|
+
throw new Error(`Unknown migration source: ${this.config.source}`);
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
async loadFromSQLite() {
|
|
115
|
+
const entries = [];
|
|
116
|
+
const dbPath = this.config.sourcePath;
|
|
117
|
+
try {
|
|
118
|
+
// Dynamic import for better-sqlite3 or similar
|
|
119
|
+
// In production, would use actual SQLite library
|
|
120
|
+
const fileContent = await fs.readFile(dbPath);
|
|
121
|
+
// Parse SQLite format (simplified - actual implementation would use SQLite library)
|
|
122
|
+
// For now, we'll try to read it as a JSON export format
|
|
123
|
+
if (dbPath.endsWith('.json')) {
|
|
124
|
+
const data = JSON.parse(fileContent.toString());
|
|
125
|
+
if (Array.isArray(data)) {
|
|
126
|
+
return data;
|
|
127
|
+
}
|
|
128
|
+
else if (data.entries) {
|
|
129
|
+
return data.entries;
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
// SQLite parsing would go here using better-sqlite3 or sql.js
|
|
133
|
+
this.emit('migration:warning', {
|
|
134
|
+
message: 'Direct SQLite parsing requires additional setup. Using export format.',
|
|
135
|
+
});
|
|
136
|
+
return entries;
|
|
137
|
+
}
|
|
138
|
+
catch (error) {
|
|
139
|
+
throw new Error(`Failed to load SQLite: ${error.message}`);
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
async loadFromMarkdown() {
|
|
143
|
+
const entries = [];
|
|
144
|
+
const basePath = this.config.sourcePath;
|
|
145
|
+
try {
|
|
146
|
+
const files = await this.walkDirectory(basePath, '.md');
|
|
147
|
+
for (const filePath of files) {
|
|
148
|
+
try {
|
|
149
|
+
const content = await fs.readFile(filePath, 'utf-8');
|
|
150
|
+
const entry = this.parseMarkdownEntry(filePath, content, basePath);
|
|
151
|
+
if (entry) {
|
|
152
|
+
entries.push(entry);
|
|
153
|
+
}
|
|
154
|
+
}
|
|
155
|
+
catch (error) {
|
|
156
|
+
this.addError(filePath, error.message, 'PARSE_ERROR', true);
|
|
157
|
+
}
|
|
158
|
+
}
|
|
159
|
+
return entries;
|
|
160
|
+
}
|
|
161
|
+
catch (error) {
|
|
162
|
+
throw new Error(`Failed to load Markdown: ${error.message}`);
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
async loadFromJSON() {
|
|
166
|
+
const filePath = this.config.sourcePath;
|
|
167
|
+
try {
|
|
168
|
+
const content = await fs.readFile(filePath, 'utf-8');
|
|
169
|
+
const data = JSON.parse(content);
|
|
170
|
+
// Handle different JSON formats
|
|
171
|
+
if (Array.isArray(data)) {
|
|
172
|
+
return data;
|
|
173
|
+
}
|
|
174
|
+
else if (data.entries) {
|
|
175
|
+
return data.entries;
|
|
176
|
+
}
|
|
177
|
+
else if (typeof data === 'object') {
|
|
178
|
+
// Assume it's a namespace -> entries map
|
|
179
|
+
const entries = [];
|
|
180
|
+
for (const [namespace, namespaceEntries] of Object.entries(data)) {
|
|
181
|
+
if (Array.isArray(namespaceEntries)) {
|
|
182
|
+
for (const entry of namespaceEntries) {
|
|
183
|
+
entries.push({ ...entry, namespace });
|
|
184
|
+
}
|
|
185
|
+
}
|
|
186
|
+
}
|
|
187
|
+
return entries;
|
|
188
|
+
}
|
|
189
|
+
return [];
|
|
190
|
+
}
|
|
191
|
+
catch (error) {
|
|
192
|
+
throw new Error(`Failed to load JSON: ${error.message}`);
|
|
193
|
+
}
|
|
194
|
+
}
|
|
195
|
+
async loadFromMemoryManager() {
|
|
196
|
+
// Would integrate with existing MemoryManager instance
|
|
197
|
+
// For now, try to load from common paths
|
|
198
|
+
const possiblePaths = [
|
|
199
|
+
'./memory/memory-store.json',
|
|
200
|
+
'./.swarm/memory.db',
|
|
201
|
+
'./memory.json',
|
|
202
|
+
];
|
|
203
|
+
for (const p of possiblePaths) {
|
|
204
|
+
try {
|
|
205
|
+
const fullPath = path.resolve(this.config.sourcePath, p);
|
|
206
|
+
await fs.access(fullPath);
|
|
207
|
+
return this.loadFromJSON();
|
|
208
|
+
}
|
|
209
|
+
catch {
|
|
210
|
+
continue;
|
|
211
|
+
}
|
|
212
|
+
}
|
|
213
|
+
return [];
|
|
214
|
+
}
|
|
215
|
+
async loadFromSwarmMemory() {
|
|
216
|
+
// Would integrate with SwarmMemory partitions
|
|
217
|
+
const entries = [];
|
|
218
|
+
const basePath = this.config.sourcePath;
|
|
219
|
+
try {
|
|
220
|
+
// Check for swarm memory directory structure
|
|
221
|
+
const partitionsPath = path.join(basePath, '.swarm', 'memory');
|
|
222
|
+
const files = await this.walkDirectory(partitionsPath, '.json');
|
|
223
|
+
for (const filePath of files) {
|
|
224
|
+
try {
|
|
225
|
+
const content = await fs.readFile(filePath, 'utf-8');
|
|
226
|
+
const data = JSON.parse(content);
|
|
227
|
+
// Extract namespace from file path
|
|
228
|
+
const relativePath = path.relative(partitionsPath, filePath);
|
|
229
|
+
const namespace = path.dirname(relativePath).replace(/\\/g, '/');
|
|
230
|
+
if (Array.isArray(data)) {
|
|
231
|
+
entries.push(...data.map((e) => ({ ...e, namespace })));
|
|
232
|
+
}
|
|
233
|
+
else if (data.entries) {
|
|
234
|
+
entries.push(...data.entries.map((e) => ({ ...e, namespace })));
|
|
235
|
+
}
|
|
236
|
+
}
|
|
237
|
+
catch (error) {
|
|
238
|
+
this.addError(filePath, error.message, 'PARSE_ERROR', true);
|
|
239
|
+
}
|
|
240
|
+
}
|
|
241
|
+
return entries;
|
|
242
|
+
}
|
|
243
|
+
catch (error) {
|
|
244
|
+
return [];
|
|
245
|
+
}
|
|
246
|
+
}
|
|
247
|
+
async loadFromDistributedMemory() {
|
|
248
|
+
// Would integrate with DistributedMemorySystem nodes
|
|
249
|
+
return this.loadFromSwarmMemory(); // Similar structure
|
|
250
|
+
}
|
|
251
|
+
// ===== Batch Processing =====
|
|
252
|
+
async processBatch(batch) {
|
|
253
|
+
for (const legacyEntry of batch) {
|
|
254
|
+
try {
|
|
255
|
+
// Validate if enabled
|
|
256
|
+
if (this.config.validateData) {
|
|
257
|
+
const validation = this.validateEntry(legacyEntry);
|
|
258
|
+
if (!validation.valid) {
|
|
259
|
+
if (this.config.continueOnError) {
|
|
260
|
+
this.addError(legacyEntry.key || 'unknown', validation.reason || 'Validation failed', 'VALIDATION_ERROR', false);
|
|
261
|
+
this.progress.skipped++;
|
|
262
|
+
continue;
|
|
263
|
+
}
|
|
264
|
+
else {
|
|
265
|
+
throw new Error(validation.reason);
|
|
266
|
+
}
|
|
267
|
+
}
|
|
268
|
+
}
|
|
269
|
+
// Transform to new format
|
|
270
|
+
const newEntry = await this.transformEntry(legacyEntry);
|
|
271
|
+
// Store in target
|
|
272
|
+
await this.target.store(newEntry);
|
|
273
|
+
this.progress.migrated++;
|
|
274
|
+
}
|
|
275
|
+
catch (error) {
|
|
276
|
+
if (this.config.continueOnError) {
|
|
277
|
+
this.addError(legacyEntry.key || 'unknown', error.message, 'STORE_ERROR', true);
|
|
278
|
+
this.progress.failed++;
|
|
279
|
+
}
|
|
280
|
+
else {
|
|
281
|
+
throw error;
|
|
282
|
+
}
|
|
283
|
+
}
|
|
284
|
+
}
|
|
285
|
+
}
|
|
286
|
+
async transformEntry(legacy) {
|
|
287
|
+
// Map namespace if configured
|
|
288
|
+
let namespace = legacy.namespace || 'default';
|
|
289
|
+
if (this.config.namespaceMapping && this.config.namespaceMapping[namespace]) {
|
|
290
|
+
namespace = this.config.namespaceMapping[namespace];
|
|
291
|
+
}
|
|
292
|
+
// Determine content
|
|
293
|
+
const content = typeof legacy.value === 'string'
|
|
294
|
+
? legacy.value
|
|
295
|
+
: JSON.stringify(legacy.value);
|
|
296
|
+
// Map type if configured
|
|
297
|
+
let type = 'semantic';
|
|
298
|
+
if (legacy.metadata?.type && typeof legacy.metadata.type === 'string') {
|
|
299
|
+
if (this.config.typeMapping && this.config.typeMapping[legacy.metadata.type]) {
|
|
300
|
+
type = this.config.typeMapping[legacy.metadata.type];
|
|
301
|
+
}
|
|
302
|
+
else if (this.isValidMemoryType(legacy.metadata.type)) {
|
|
303
|
+
type = legacy.metadata.type;
|
|
304
|
+
}
|
|
305
|
+
}
|
|
306
|
+
// Parse timestamps
|
|
307
|
+
const createdAt = this.parseTimestamp(legacy.createdAt || legacy.created_at || legacy.timestamp);
|
|
308
|
+
const updatedAt = this.parseTimestamp(legacy.updatedAt || legacy.updated_at || legacy.timestamp);
|
|
309
|
+
const input = {
|
|
310
|
+
key: legacy.key,
|
|
311
|
+
content,
|
|
312
|
+
type,
|
|
313
|
+
namespace,
|
|
314
|
+
tags: legacy.tags || [],
|
|
315
|
+
metadata: {
|
|
316
|
+
...legacy.metadata,
|
|
317
|
+
migrated: true,
|
|
318
|
+
migrationSource: this.config.source,
|
|
319
|
+
migrationTimestamp: Date.now(),
|
|
320
|
+
originalValue: legacy.value,
|
|
321
|
+
},
|
|
322
|
+
};
|
|
323
|
+
const entry = createDefaultEntry(input);
|
|
324
|
+
entry.createdAt = createdAt;
|
|
325
|
+
entry.updatedAt = updatedAt;
|
|
326
|
+
// Generate embedding if configured
|
|
327
|
+
if (this.config.generateEmbeddings && this.embeddingGenerator) {
|
|
328
|
+
try {
|
|
329
|
+
entry.embedding = await this.embeddingGenerator(content);
|
|
330
|
+
}
|
|
331
|
+
catch (error) {
|
|
332
|
+
// Log but don't fail
|
|
333
|
+
this.emit('migration:warning', {
|
|
334
|
+
message: `Failed to generate embedding for ${legacy.key}: ${error.message}`,
|
|
335
|
+
});
|
|
336
|
+
}
|
|
337
|
+
}
|
|
338
|
+
return entry;
|
|
339
|
+
}
|
|
340
|
+
// ===== Helper Methods =====
|
|
341
|
+
initializeProgress() {
|
|
342
|
+
return {
|
|
343
|
+
total: 0,
|
|
344
|
+
migrated: 0,
|
|
345
|
+
failed: 0,
|
|
346
|
+
skipped: 0,
|
|
347
|
+
currentBatch: 0,
|
|
348
|
+
totalBatches: 0,
|
|
349
|
+
percentage: 0,
|
|
350
|
+
estimatedTimeRemaining: 0,
|
|
351
|
+
errors: [],
|
|
352
|
+
};
|
|
353
|
+
}
|
|
354
|
+
validateEntry(entry) {
|
|
355
|
+
if (!entry.key || typeof entry.key !== 'string') {
|
|
356
|
+
return { valid: false, reason: 'Missing or invalid key' };
|
|
357
|
+
}
|
|
358
|
+
if (entry.value === undefined) {
|
|
359
|
+
return { valid: false, reason: 'Missing value' };
|
|
360
|
+
}
|
|
361
|
+
if (entry.key.length > 500) {
|
|
362
|
+
return { valid: false, reason: 'Key too long (max 500 chars)' };
|
|
363
|
+
}
|
|
364
|
+
return { valid: true };
|
|
365
|
+
}
|
|
366
|
+
addError(entryId, message, code, recoverable) {
|
|
367
|
+
const error = {
|
|
368
|
+
entryId,
|
|
369
|
+
message,
|
|
370
|
+
code,
|
|
371
|
+
recoverable,
|
|
372
|
+
};
|
|
373
|
+
this.progress.errors.push(error);
|
|
374
|
+
this.emit('migration:error', error);
|
|
375
|
+
}
|
|
376
|
+
parseTimestamp(value) {
|
|
377
|
+
if (!value)
|
|
378
|
+
return Date.now();
|
|
379
|
+
if (typeof value === 'number') {
|
|
380
|
+
// Handle both milliseconds and seconds
|
|
381
|
+
return value > 1e12 ? value : value * 1000;
|
|
382
|
+
}
|
|
383
|
+
const parsed = Date.parse(value);
|
|
384
|
+
return isNaN(parsed) ? Date.now() : parsed;
|
|
385
|
+
}
|
|
386
|
+
isValidMemoryType(type) {
|
|
387
|
+
return ['episodic', 'semantic', 'procedural', 'working', 'cache'].includes(type);
|
|
388
|
+
}
|
|
389
|
+
estimateTimeRemaining(startTime, completed, total) {
|
|
390
|
+
if (completed === 0)
|
|
391
|
+
return 0;
|
|
392
|
+
const elapsed = Date.now() - startTime;
|
|
393
|
+
const rate = completed / elapsed;
|
|
394
|
+
const remaining = total - completed;
|
|
395
|
+
return Math.round(remaining / rate);
|
|
396
|
+
}
|
|
397
|
+
generateSummary() {
|
|
398
|
+
const { migrated, failed, skipped, total, errors } = this.progress;
|
|
399
|
+
let summary = `Migrated ${migrated}/${total} entries`;
|
|
400
|
+
if (failed > 0) {
|
|
401
|
+
summary += `, ${failed} failed`;
|
|
402
|
+
}
|
|
403
|
+
if (skipped > 0) {
|
|
404
|
+
summary += `, ${skipped} skipped`;
|
|
405
|
+
}
|
|
406
|
+
if (errors.length > 0) {
|
|
407
|
+
const errorTypes = new Map();
|
|
408
|
+
for (const error of errors) {
|
|
409
|
+
errorTypes.set(error.code, (errorTypes.get(error.code) || 0) + 1);
|
|
410
|
+
}
|
|
411
|
+
const errorSummary = Array.from(errorTypes.entries())
|
|
412
|
+
.map(([code, count]) => `${code}: ${count}`)
|
|
413
|
+
.join(', ');
|
|
414
|
+
summary += `. Errors: ${errorSummary}`;
|
|
415
|
+
}
|
|
416
|
+
return summary;
|
|
417
|
+
}
|
|
418
|
+
async walkDirectory(dir, extension) {
|
|
419
|
+
const files = [];
|
|
420
|
+
try {
|
|
421
|
+
const entries = await fs.readdir(dir, { withFileTypes: true });
|
|
422
|
+
for (const entry of entries) {
|
|
423
|
+
const fullPath = path.join(dir, entry.name);
|
|
424
|
+
if (entry.isDirectory()) {
|
|
425
|
+
const subFiles = await this.walkDirectory(fullPath, extension);
|
|
426
|
+
files.push(...subFiles);
|
|
427
|
+
}
|
|
428
|
+
else if (entry.isFile() && entry.name.endsWith(extension)) {
|
|
429
|
+
files.push(fullPath);
|
|
430
|
+
}
|
|
431
|
+
}
|
|
432
|
+
}
|
|
433
|
+
catch (error) {
|
|
434
|
+
// Directory doesn't exist or isn't readable
|
|
435
|
+
}
|
|
436
|
+
return files;
|
|
437
|
+
}
|
|
438
|
+
parseMarkdownEntry(filePath, content, basePath) {
|
|
439
|
+
// Extract frontmatter if present
|
|
440
|
+
const frontmatterMatch = content.match(/^---\n([\s\S]*?)\n---\n([\s\S]*)$/);
|
|
441
|
+
let metadata = {};
|
|
442
|
+
let body = content;
|
|
443
|
+
if (frontmatterMatch) {
|
|
444
|
+
try {
|
|
445
|
+
// Simple YAML-like parsing
|
|
446
|
+
const frontmatter = frontmatterMatch[1];
|
|
447
|
+
for (const line of frontmatter.split('\n')) {
|
|
448
|
+
const colonIndex = line.indexOf(':');
|
|
449
|
+
if (colonIndex > 0) {
|
|
450
|
+
const key = line.substring(0, colonIndex).trim();
|
|
451
|
+
let value = line.substring(colonIndex + 1).trim();
|
|
452
|
+
// Parse common types
|
|
453
|
+
if (value === 'true')
|
|
454
|
+
value = true;
|
|
455
|
+
else if (value === 'false')
|
|
456
|
+
value = false;
|
|
457
|
+
else if (typeof value === 'string' && /^\d+$/.test(value))
|
|
458
|
+
value = parseInt(value, 10);
|
|
459
|
+
else if (typeof value === 'string' && value.startsWith('[') && value.endsWith(']')) {
|
|
460
|
+
try {
|
|
461
|
+
value = JSON.parse(value.replace(/'/g, '"'));
|
|
462
|
+
}
|
|
463
|
+
catch {
|
|
464
|
+
// Keep as string
|
|
465
|
+
}
|
|
466
|
+
}
|
|
467
|
+
metadata[key] = value;
|
|
468
|
+
}
|
|
469
|
+
}
|
|
470
|
+
body = frontmatterMatch[2];
|
|
471
|
+
}
|
|
472
|
+
catch {
|
|
473
|
+
// Failed to parse frontmatter, use whole content
|
|
474
|
+
}
|
|
475
|
+
}
|
|
476
|
+
// Derive key from file path
|
|
477
|
+
const relativePath = path.relative(basePath, filePath);
|
|
478
|
+
const key = relativePath
|
|
479
|
+
.replace(/\\/g, '/')
|
|
480
|
+
.replace(/\.md$/, '')
|
|
481
|
+
.replace(/\//g, ':');
|
|
482
|
+
// Derive namespace from directory structure
|
|
483
|
+
const namespace = path.dirname(relativePath).replace(/\\/g, '/') || 'default';
|
|
484
|
+
return {
|
|
485
|
+
key,
|
|
486
|
+
value: body.trim(),
|
|
487
|
+
namespace,
|
|
488
|
+
tags: Array.isArray(metadata.tags) ? metadata.tags : [],
|
|
489
|
+
metadata,
|
|
490
|
+
timestamp: Date.now(),
|
|
491
|
+
};
|
|
492
|
+
}
|
|
493
|
+
}
|
|
494
|
+
/**
|
|
495
|
+
* Convenience function to create a migrator
|
|
496
|
+
*/
|
|
497
|
+
export function createMigrator(target, source, sourcePath, options = {}, embeddingGenerator) {
|
|
498
|
+
return new MemoryMigrator(target, { source, sourcePath, ...options }, embeddingGenerator);
|
|
499
|
+
}
|
|
500
|
+
/**
|
|
501
|
+
* Migrate from multiple sources
|
|
502
|
+
*/
|
|
503
|
+
export async function migrateMultipleSources(target, sources, options = {}, embeddingGenerator) {
|
|
504
|
+
const results = [];
|
|
505
|
+
for (const { source, path: sourcePath } of sources) {
|
|
506
|
+
const migrator = createMigrator(target, source, sourcePath, options, embeddingGenerator);
|
|
507
|
+
const result = await migrator.migrate();
|
|
508
|
+
results.push(result);
|
|
509
|
+
}
|
|
510
|
+
return results;
|
|
511
|
+
}
|
|
512
|
+
export default MemoryMigrator;
|
|
513
|
+
//# sourceMappingURL=migration.js.map
|
|
@@ -0,0 +1,144 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* PersistentSonaCoordinator - SONA learning with RVF persistence
|
|
3
|
+
*
|
|
4
|
+
* Wraps RvfLearningStore to provide an in-memory pattern bank with
|
|
5
|
+
* brute-force cosine similarity, trajectory buffering, EWC tracking,
|
|
6
|
+
* and automatic periodic persistence to disk.
|
|
7
|
+
*
|
|
8
|
+
* This is intentionally decoupled from the ruvector SONA classes:
|
|
9
|
+
* it defines its own compatible types and delegates persistence to
|
|
10
|
+
* RvfLearningStore.
|
|
11
|
+
*
|
|
12
|
+
* @module @claude-flow/memory/persistent-sona
|
|
13
|
+
*/
|
|
14
|
+
import type { PatternRecord, TrajectoryRecord } from './rvf-learning-store.js';
|
|
15
|
+
export interface PersistentSonaConfig {
|
|
16
|
+
/** Path to the RVF learning store file */
|
|
17
|
+
storePath: string;
|
|
18
|
+
/** Cosine similarity threshold for pattern matching (default: 0.85) */
|
|
19
|
+
patternThreshold?: number;
|
|
20
|
+
/** Maximum buffered trajectories before oldest are evicted (default: 1000) */
|
|
21
|
+
maxTrajectoryBuffer?: number;
|
|
22
|
+
/** Auto-persist interval in ms (default: 30000) */
|
|
23
|
+
autoPersistInterval?: number;
|
|
24
|
+
/** Enable verbose logging (default: false) */
|
|
25
|
+
verbose?: boolean;
|
|
26
|
+
}
|
|
27
|
+
/**
|
|
28
|
+
* Coordinates SONA learning with persistent storage.
|
|
29
|
+
*
|
|
30
|
+
* @example
|
|
31
|
+
* ```typescript
|
|
32
|
+
* const sona = new PersistentSonaCoordinator({
|
|
33
|
+
* storePath: './data/sona-learning.rvls',
|
|
34
|
+
* });
|
|
35
|
+
* await sona.initialize();
|
|
36
|
+
*
|
|
37
|
+
* // Store a pattern
|
|
38
|
+
* const id = sona.storePattern('query_response', embedding);
|
|
39
|
+
*
|
|
40
|
+
* // Find similar patterns
|
|
41
|
+
* const matches = sona.findSimilarPatterns(queryEmbedding, 5);
|
|
42
|
+
*
|
|
43
|
+
* // Record a trajectory
|
|
44
|
+
* sona.recordTrajectory({ id: 'traj-1', steps: [...], outcome: 'success', ... });
|
|
45
|
+
*
|
|
46
|
+
* // Periodic background learning
|
|
47
|
+
* const result = sona.runBackgroundLoop();
|
|
48
|
+
*
|
|
49
|
+
* await sona.shutdown();
|
|
50
|
+
* ```
|
|
51
|
+
*/
|
|
52
|
+
export declare class PersistentSonaCoordinator {
|
|
53
|
+
private store;
|
|
54
|
+
private patterns;
|
|
55
|
+
private trajectoryBuffer;
|
|
56
|
+
private ewcState;
|
|
57
|
+
private patternThreshold;
|
|
58
|
+
private maxTrajectoryBuffer;
|
|
59
|
+
private verbose;
|
|
60
|
+
private initialized;
|
|
61
|
+
constructor(config: PersistentSonaConfig);
|
|
62
|
+
/**
|
|
63
|
+
* Initialize by loading persisted state from the RVF store.
|
|
64
|
+
*/
|
|
65
|
+
initialize(): Promise<void>;
|
|
66
|
+
/**
|
|
67
|
+
* Store a new pattern and return its ID.
|
|
68
|
+
*
|
|
69
|
+
* @param type - Pattern type (e.g. 'query_response', 'routing')
|
|
70
|
+
* @param embedding - The pattern embedding vector
|
|
71
|
+
* @param metadata - Optional extra metadata (currently unused, reserved)
|
|
72
|
+
* @returns The generated pattern ID
|
|
73
|
+
*/
|
|
74
|
+
storePattern(type: string, embedding: number[], metadata?: Record<string, unknown>): string;
|
|
75
|
+
/**
|
|
76
|
+
* Find the k most similar patterns above the configured threshold.
|
|
77
|
+
* Uses brute-force cosine similarity (suitable for small pattern sets).
|
|
78
|
+
*/
|
|
79
|
+
findSimilarPatterns(embedding: number[], k?: number): PatternRecord[];
|
|
80
|
+
/**
|
|
81
|
+
* Record a pattern usage outcome. Updates the success rate using an
|
|
82
|
+
* exponential moving average (alpha = 0.1).
|
|
83
|
+
*/
|
|
84
|
+
recordPatternUsage(patternId: string, success: boolean): void;
|
|
85
|
+
/**
|
|
86
|
+
* Remove patterns that have low success rates after sufficient usage.
|
|
87
|
+
*
|
|
88
|
+
* @returns The number of patterns pruned
|
|
89
|
+
*/
|
|
90
|
+
prunePatterns(minSuccessRate?: number, minUseCount?: number): number;
|
|
91
|
+
/**
|
|
92
|
+
* Buffer a completed trajectory for later processing.
|
|
93
|
+
* When the buffer exceeds maxTrajectoryBuffer, the oldest entries
|
|
94
|
+
* are evicted.
|
|
95
|
+
*/
|
|
96
|
+
recordTrajectory(trajectory: TrajectoryRecord): void;
|
|
97
|
+
/**
|
|
98
|
+
* Process buffered trajectories to extract new patterns.
|
|
99
|
+
* Successful and partial trajectories are mined for high-confidence
|
|
100
|
+
* steps; new patterns are stored if they are sufficiently different
|
|
101
|
+
* from existing ones.
|
|
102
|
+
*
|
|
103
|
+
* After processing, the trajectory buffer is cleared and low-performing
|
|
104
|
+
* patterns are pruned.
|
|
105
|
+
*
|
|
106
|
+
* @returns Summary of the learning pass
|
|
107
|
+
*/
|
|
108
|
+
runBackgroundLoop(): {
|
|
109
|
+
patternsLearned: number;
|
|
110
|
+
trajectoriesProcessed: number;
|
|
111
|
+
};
|
|
112
|
+
/**
|
|
113
|
+
* Flush current in-memory state to the RVF store on disk.
|
|
114
|
+
*/
|
|
115
|
+
persist(): Promise<void>;
|
|
116
|
+
/**
|
|
117
|
+
* Persist state and shut down the store.
|
|
118
|
+
*/
|
|
119
|
+
shutdown(): Promise<void>;
|
|
120
|
+
/**
|
|
121
|
+
* Return a summary of the coordinator's current state.
|
|
122
|
+
*/
|
|
123
|
+
getStats(): {
|
|
124
|
+
patterns: number;
|
|
125
|
+
avgSuccessRate: number;
|
|
126
|
+
trajectoriesBuffered: number;
|
|
127
|
+
ewcTasksLearned: number;
|
|
128
|
+
};
|
|
129
|
+
/**
|
|
130
|
+
* Extract patterns from a trajectory's high-confidence steps.
|
|
131
|
+
* A step produces a new pattern only if no sufficiently similar
|
|
132
|
+
* pattern already exists.
|
|
133
|
+
*/
|
|
134
|
+
private extractPatternsFromTrajectory;
|
|
135
|
+
/**
|
|
136
|
+
* Deterministic hash-based embedding for pattern extraction.
|
|
137
|
+
* This is a lightweight stand-in for a real embedding model,
|
|
138
|
+
* matching the approach used in SonaCoordinator.
|
|
139
|
+
*/
|
|
140
|
+
private createHashEmbedding;
|
|
141
|
+
private ensureInitialized;
|
|
142
|
+
private log;
|
|
143
|
+
}
|
|
144
|
+
//# sourceMappingURL=persistent-sona.d.ts.map
|