moflo 4.0.2 → 4.0.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +114 -110
- package/v3/@claude-flow/cli/dist/src/commands/hooks.js +4 -1
- package/v3/@claude-flow/cli/dist/src/memory/memory-bridge.js +61 -5
- package/v3/@claude-flow/cli/dist/src/memory/memory-initializer.js +1892 -1841
- package/v3/@claude-flow/memory/README.md +587 -0
- package/v3/@claude-flow/memory/dist/agent-memory-scope.d.ts +131 -0
- package/v3/@claude-flow/memory/dist/agent-memory-scope.js +223 -0
- package/v3/@claude-flow/memory/dist/agent-memory-scope.test.d.ts +8 -0
- package/v3/@claude-flow/memory/dist/agent-memory-scope.test.js +463 -0
- package/v3/@claude-flow/memory/dist/agentdb-adapter.d.ts +165 -0
- package/v3/@claude-flow/memory/dist/agentdb-adapter.js +806 -0
- package/v3/@claude-flow/memory/dist/agentdb-backend.d.ts +214 -0
- package/v3/@claude-flow/memory/dist/agentdb-backend.js +844 -0
- package/v3/@claude-flow/memory/dist/agentdb-backend.test.d.ts +7 -0
- package/v3/@claude-flow/memory/dist/agentdb-backend.test.js +258 -0
- package/v3/@claude-flow/memory/dist/application/commands/delete-memory.command.d.ts +65 -0
- package/v3/@claude-flow/memory/dist/application/commands/delete-memory.command.js +129 -0
- package/v3/@claude-flow/memory/dist/application/commands/store-memory.command.d.ts +48 -0
- package/v3/@claude-flow/memory/dist/application/commands/store-memory.command.js +72 -0
- package/v3/@claude-flow/memory/dist/application/index.d.ts +12 -0
- package/v3/@claude-flow/memory/dist/application/index.js +15 -0
- package/v3/@claude-flow/memory/dist/application/queries/search-memory.query.d.ts +72 -0
- package/v3/@claude-flow/memory/dist/application/queries/search-memory.query.js +143 -0
- package/v3/@claude-flow/memory/dist/application/services/memory-application-service.d.ts +121 -0
- package/v3/@claude-flow/memory/dist/application/services/memory-application-service.js +190 -0
- package/v3/@claude-flow/memory/dist/auto-memory-bridge.d.ts +226 -0
- package/v3/@claude-flow/memory/dist/auto-memory-bridge.js +709 -0
- package/v3/@claude-flow/memory/dist/auto-memory-bridge.test.d.ts +8 -0
- package/v3/@claude-flow/memory/dist/auto-memory-bridge.test.js +754 -0
- package/v3/@claude-flow/memory/dist/benchmark.test.d.ts +2 -0
- package/v3/@claude-flow/memory/dist/benchmark.test.js +277 -0
- package/v3/@claude-flow/memory/dist/cache-manager.d.ts +134 -0
- package/v3/@claude-flow/memory/dist/cache-manager.js +407 -0
- package/v3/@claude-flow/memory/dist/controller-registry.d.ts +216 -0
- package/v3/@claude-flow/memory/dist/controller-registry.js +893 -0
- package/v3/@claude-flow/memory/dist/controller-registry.test.d.ts +14 -0
- package/v3/@claude-flow/memory/dist/controller-registry.test.js +636 -0
- package/v3/@claude-flow/memory/dist/database-provider.d.ts +87 -0
- package/v3/@claude-flow/memory/dist/database-provider.js +375 -0
- package/v3/@claude-flow/memory/dist/database-provider.test.d.ts +7 -0
- package/v3/@claude-flow/memory/dist/database-provider.test.js +285 -0
- package/v3/@claude-flow/memory/dist/domain/entities/memory-entry.d.ts +143 -0
- package/v3/@claude-flow/memory/dist/domain/entities/memory-entry.js +226 -0
- package/v3/@claude-flow/memory/dist/domain/index.d.ts +11 -0
- package/v3/@claude-flow/memory/dist/domain/index.js +12 -0
- package/v3/@claude-flow/memory/dist/domain/repositories/memory-repository.interface.d.ts +102 -0
- package/v3/@claude-flow/memory/dist/domain/repositories/memory-repository.interface.js +11 -0
- package/v3/@claude-flow/memory/dist/domain/services/memory-domain-service.d.ts +105 -0
- package/v3/@claude-flow/memory/dist/domain/services/memory-domain-service.js +297 -0
- package/v3/@claude-flow/memory/dist/hnsw-index.d.ts +111 -0
- package/v3/@claude-flow/memory/dist/hnsw-index.js +781 -0
- package/v3/@claude-flow/memory/dist/hnsw-lite.d.ts +23 -0
- package/v3/@claude-flow/memory/dist/hnsw-lite.js +168 -0
- package/v3/@claude-flow/memory/dist/hybrid-backend.d.ts +245 -0
- package/v3/@claude-flow/memory/dist/hybrid-backend.js +569 -0
- package/v3/@claude-flow/memory/dist/hybrid-backend.test.d.ts +8 -0
- package/v3/@claude-flow/memory/dist/hybrid-backend.test.js +320 -0
- package/v3/@claude-flow/memory/dist/index.d.ts +207 -0
- package/v3/@claude-flow/memory/dist/index.js +361 -0
- package/v3/@claude-flow/memory/dist/infrastructure/index.d.ts +17 -0
- package/v3/@claude-flow/memory/dist/infrastructure/index.js +16 -0
- package/v3/@claude-flow/memory/dist/infrastructure/repositories/hybrid-memory-repository.d.ts +66 -0
- package/v3/@claude-flow/memory/dist/infrastructure/repositories/hybrid-memory-repository.js +409 -0
- package/v3/@claude-flow/memory/dist/learning-bridge.d.ts +137 -0
- package/v3/@claude-flow/memory/dist/learning-bridge.js +335 -0
- package/v3/@claude-flow/memory/dist/learning-bridge.test.d.ts +8 -0
- package/v3/@claude-flow/memory/dist/learning-bridge.test.js +578 -0
- package/v3/@claude-flow/memory/dist/memory-graph.d.ts +100 -0
- package/v3/@claude-flow/memory/dist/memory-graph.js +333 -0
- package/v3/@claude-flow/memory/dist/memory-graph.test.d.ts +8 -0
- package/v3/@claude-flow/memory/dist/memory-graph.test.js +609 -0
- package/v3/@claude-flow/memory/dist/migration.d.ts +68 -0
- package/v3/@claude-flow/memory/dist/migration.js +513 -0
- package/v3/@claude-flow/memory/dist/persistent-sona.d.ts +144 -0
- package/v3/@claude-flow/memory/dist/persistent-sona.js +332 -0
- package/v3/@claude-flow/memory/dist/query-builder.d.ts +211 -0
- package/v3/@claude-flow/memory/dist/query-builder.js +438 -0
- package/v3/@claude-flow/memory/dist/rvf-backend.d.ts +51 -0
- package/v3/@claude-flow/memory/dist/rvf-backend.js +481 -0
- package/v3/@claude-flow/memory/dist/rvf-learning-store.d.ts +139 -0
- package/v3/@claude-flow/memory/dist/rvf-learning-store.js +295 -0
- package/v3/@claude-flow/memory/dist/rvf-migration.d.ts +45 -0
- package/v3/@claude-flow/memory/dist/rvf-migration.js +234 -0
- package/v3/@claude-flow/memory/dist/sqlite-backend.d.ts +121 -0
- package/v3/@claude-flow/memory/dist/sqlite-backend.js +572 -0
- package/v3/@claude-flow/memory/dist/sqljs-backend.d.ts +128 -0
- package/v3/@claude-flow/memory/dist/sqljs-backend.js +601 -0
- package/v3/@claude-flow/memory/dist/types.d.ts +484 -0
- package/v3/@claude-flow/memory/dist/types.js +58 -0
- package/v3/@claude-flow/memory/package.json +42 -0
|
@@ -0,0 +1,295 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* RvfLearningStore - Persistent storage for SONA learning artifacts
|
|
3
|
+
*
|
|
4
|
+
* Stores patterns, LoRA adapters, EWC state, and trajectories in a
|
|
5
|
+
* binary-header JSON-lines file format for fast append and rebuild.
|
|
6
|
+
*
|
|
7
|
+
* File format:
|
|
8
|
+
* 4-byte magic "RVLS" + newline
|
|
9
|
+
* One JSON record per line: {"type":"pattern"|"lora"|"ewc"|"trajectory","data":{...}}
|
|
10
|
+
*
|
|
11
|
+
* @module @claude-flow/memory/rvf-learning-store
|
|
12
|
+
*/
|
|
13
|
+
import * as fs from 'node:fs';
|
|
14
|
+
import * as path from 'node:path';
|
|
15
|
+
// ===== Constants =====
|
|
16
|
+
const MAGIC_HEADER = 'RVLS';
|
|
17
|
+
const DEFAULT_DIMENSIONS = 64;
|
|
18
|
+
const DEFAULT_AUTO_PERSIST_MS = 30_000;
|
|
19
|
+
// ===== Helpers =====
|
|
20
|
+
function ensureDirectory(filePath) {
|
|
21
|
+
const dir = path.dirname(filePath);
|
|
22
|
+
if (!fs.existsSync(dir)) {
|
|
23
|
+
fs.mkdirSync(dir, { recursive: true });
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
// ===== RvfLearningStore =====
|
|
27
|
+
/**
|
|
28
|
+
* Persistent store for SONA learning artifacts.
|
|
29
|
+
*
|
|
30
|
+
* Maintains in-memory maps for fast reads and flushes to a JSON-lines
|
|
31
|
+
* file with a binary header on persist(). On initialize(), the file is
|
|
32
|
+
* read line-by-line to rebuild state.
|
|
33
|
+
*
|
|
34
|
+
* @example
|
|
35
|
+
* ```typescript
|
|
36
|
+
* const store = new RvfLearningStore({ storePath: './data/learning.rvls' });
|
|
37
|
+
* await store.initialize();
|
|
38
|
+
*
|
|
39
|
+
* await store.savePatterns([{ id: 'p1', type: 'query_response', ... }]);
|
|
40
|
+
* await store.persist();
|
|
41
|
+
* await store.close();
|
|
42
|
+
* ```
|
|
43
|
+
*/
|
|
44
|
+
export class RvfLearningStore {
|
|
45
|
+
config;
|
|
46
|
+
patterns = new Map();
|
|
47
|
+
loraAdapters = new Map();
|
|
48
|
+
ewcState = null;
|
|
49
|
+
trajectories = [];
|
|
50
|
+
dirty = false;
|
|
51
|
+
initialized = false;
|
|
52
|
+
autoPersistTimer = null;
|
|
53
|
+
constructor(config) {
|
|
54
|
+
this.config = {
|
|
55
|
+
storePath: config.storePath,
|
|
56
|
+
dimensions: config.dimensions ?? DEFAULT_DIMENSIONS,
|
|
57
|
+
autoPersistInterval: config.autoPersistInterval ?? DEFAULT_AUTO_PERSIST_MS,
|
|
58
|
+
verbose: config.verbose ?? false,
|
|
59
|
+
};
|
|
60
|
+
}
|
|
61
|
+
/**
|
|
62
|
+
* Initialize the store by loading any existing data from disk.
|
|
63
|
+
* Creates the parent directory if it does not exist.
|
|
64
|
+
*/
|
|
65
|
+
async initialize() {
|
|
66
|
+
if (this.initialized)
|
|
67
|
+
return;
|
|
68
|
+
ensureDirectory(this.config.storePath);
|
|
69
|
+
if (fs.existsSync(this.config.storePath)) {
|
|
70
|
+
await this.loadFromDisk();
|
|
71
|
+
}
|
|
72
|
+
if (this.config.autoPersistInterval > 0) {
|
|
73
|
+
this.autoPersistTimer = setInterval(() => void this.persist().catch(() => { }), this.config.autoPersistInterval);
|
|
74
|
+
// Allow the process to exit even if the timer is active
|
|
75
|
+
if (this.autoPersistTimer.unref) {
|
|
76
|
+
this.autoPersistTimer.unref();
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
this.initialized = true;
|
|
80
|
+
this.log('Store initialized');
|
|
81
|
+
}
|
|
82
|
+
// ===== Pattern operations =====
|
|
83
|
+
/**
|
|
84
|
+
* Save or update patterns. Existing patterns with matching IDs are
|
|
85
|
+
* overwritten; new patterns are added.
|
|
86
|
+
*
|
|
87
|
+
* @returns The number of patterns stored
|
|
88
|
+
*/
|
|
89
|
+
async savePatterns(patterns) {
|
|
90
|
+
this.ensureInitialized();
|
|
91
|
+
let count = 0;
|
|
92
|
+
for (const pattern of patterns) {
|
|
93
|
+
this.patterns.set(pattern.id, { ...pattern });
|
|
94
|
+
count++;
|
|
95
|
+
}
|
|
96
|
+
this.dirty = true;
|
|
97
|
+
return count;
|
|
98
|
+
}
|
|
99
|
+
/** Load all patterns currently held in memory */
|
|
100
|
+
async loadPatterns() {
|
|
101
|
+
this.ensureInitialized();
|
|
102
|
+
return Array.from(this.patterns.values());
|
|
103
|
+
}
|
|
104
|
+
/** Return the number of stored patterns */
|
|
105
|
+
async getPatternCount() {
|
|
106
|
+
this.ensureInitialized();
|
|
107
|
+
return this.patterns.size;
|
|
108
|
+
}
|
|
109
|
+
// ===== LoRA operations =====
|
|
110
|
+
/** Save or update a LoRA adapter record */
|
|
111
|
+
async saveLoraAdapter(record) {
|
|
112
|
+
this.ensureInitialized();
|
|
113
|
+
this.loraAdapters.set(record.id, { ...record });
|
|
114
|
+
this.dirty = true;
|
|
115
|
+
}
|
|
116
|
+
/** Load all LoRA adapter records */
|
|
117
|
+
async loadLoraAdapters() {
|
|
118
|
+
this.ensureInitialized();
|
|
119
|
+
return Array.from(this.loraAdapters.values());
|
|
120
|
+
}
|
|
121
|
+
/** Delete a LoRA adapter by ID */
|
|
122
|
+
async deleteLoraAdapter(id) {
|
|
123
|
+
this.ensureInitialized();
|
|
124
|
+
const existed = this.loraAdapters.delete(id);
|
|
125
|
+
if (existed)
|
|
126
|
+
this.dirty = true;
|
|
127
|
+
return existed;
|
|
128
|
+
}
|
|
129
|
+
// ===== EWC operations =====
|
|
130
|
+
/** Save EWC state (replaces any existing state) */
|
|
131
|
+
async saveEwcState(record) {
|
|
132
|
+
this.ensureInitialized();
|
|
133
|
+
this.ewcState = { ...record };
|
|
134
|
+
this.dirty = true;
|
|
135
|
+
}
|
|
136
|
+
/** Load the EWC state, or null if none has been stored */
|
|
137
|
+
async loadEwcState() {
|
|
138
|
+
this.ensureInitialized();
|
|
139
|
+
return this.ewcState ? { ...this.ewcState } : null;
|
|
140
|
+
}
|
|
141
|
+
// ===== Trajectory operations =====
|
|
142
|
+
/** Append a trajectory record (append-only, never overwritten) */
|
|
143
|
+
async appendTrajectory(record) {
|
|
144
|
+
this.ensureInitialized();
|
|
145
|
+
this.trajectories.push({ ...record });
|
|
146
|
+
this.dirty = true;
|
|
147
|
+
}
|
|
148
|
+
/**
|
|
149
|
+
* Return stored trajectories, newest first.
|
|
150
|
+
* @param limit Maximum number to return (default: all)
|
|
151
|
+
*/
|
|
152
|
+
async getTrajectories(limit) {
|
|
153
|
+
this.ensureInitialized();
|
|
154
|
+
const sorted = [...this.trajectories].reverse();
|
|
155
|
+
return limit !== undefined ? sorted.slice(0, limit) : sorted;
|
|
156
|
+
}
|
|
157
|
+
/** Return the number of stored trajectories */
|
|
158
|
+
async getTrajectoryCount() {
|
|
159
|
+
this.ensureInitialized();
|
|
160
|
+
return this.trajectories.length;
|
|
161
|
+
}
|
|
162
|
+
// ===== Lifecycle =====
|
|
163
|
+
/**
|
|
164
|
+
* Flush all in-memory state to disk. The entire file is rewritten
|
|
165
|
+
* to ensure consistency (patterns may have been updated in-place).
|
|
166
|
+
*/
|
|
167
|
+
async persist() {
|
|
168
|
+
if (!this.dirty)
|
|
169
|
+
return;
|
|
170
|
+
ensureDirectory(this.config.storePath);
|
|
171
|
+
const lines = [MAGIC_HEADER];
|
|
172
|
+
// Patterns
|
|
173
|
+
for (const pattern of this.patterns.values()) {
|
|
174
|
+
lines.push(JSON.stringify({ type: 'pattern', data: pattern }));
|
|
175
|
+
}
|
|
176
|
+
// LoRA adapters
|
|
177
|
+
for (const lora of this.loraAdapters.values()) {
|
|
178
|
+
lines.push(JSON.stringify({ type: 'lora', data: lora }));
|
|
179
|
+
}
|
|
180
|
+
// EWC state
|
|
181
|
+
if (this.ewcState) {
|
|
182
|
+
lines.push(JSON.stringify({ type: 'ewc', data: this.ewcState }));
|
|
183
|
+
}
|
|
184
|
+
// Trajectories
|
|
185
|
+
for (const traj of this.trajectories) {
|
|
186
|
+
lines.push(JSON.stringify({ type: 'trajectory', data: traj }));
|
|
187
|
+
}
|
|
188
|
+
const content = lines.join('\n') + '\n';
|
|
189
|
+
const tmpPath = this.config.storePath + '.tmp';
|
|
190
|
+
await fs.promises.writeFile(tmpPath, content, 'utf-8');
|
|
191
|
+
await fs.promises.rename(tmpPath, this.config.storePath);
|
|
192
|
+
this.dirty = false;
|
|
193
|
+
this.log(`Persisted: ${this.patterns.size} patterns, ${this.loraAdapters.size} LoRA, ${this.trajectories.length} trajectories`);
|
|
194
|
+
}
|
|
195
|
+
/** Persist and release resources */
|
|
196
|
+
async close() {
|
|
197
|
+
if (this.autoPersistTimer) {
|
|
198
|
+
clearInterval(this.autoPersistTimer);
|
|
199
|
+
this.autoPersistTimer = null;
|
|
200
|
+
}
|
|
201
|
+
if (this.dirty) {
|
|
202
|
+
await this.persist();
|
|
203
|
+
}
|
|
204
|
+
this.initialized = false;
|
|
205
|
+
this.log('Store closed');
|
|
206
|
+
}
|
|
207
|
+
// ===== Stats =====
|
|
208
|
+
/** Return summary statistics about the store */
|
|
209
|
+
async getStats() {
|
|
210
|
+
this.ensureInitialized();
|
|
211
|
+
let fileSizeBytes = 0;
|
|
212
|
+
try {
|
|
213
|
+
const stat = await fs.promises.stat(this.config.storePath);
|
|
214
|
+
fileSizeBytes = stat.size;
|
|
215
|
+
}
|
|
216
|
+
catch {
|
|
217
|
+
// File may not exist yet if nothing has been persisted
|
|
218
|
+
}
|
|
219
|
+
return {
|
|
220
|
+
patterns: this.patterns.size,
|
|
221
|
+
loraAdapters: this.loraAdapters.size,
|
|
222
|
+
trajectories: this.trajectories.length,
|
|
223
|
+
hasEwcState: this.ewcState !== null,
|
|
224
|
+
fileSizeBytes,
|
|
225
|
+
};
|
|
226
|
+
}
|
|
227
|
+
// ===== Private =====
|
|
228
|
+
async loadFromDisk() {
|
|
229
|
+
let content;
|
|
230
|
+
try {
|
|
231
|
+
content = await fs.promises.readFile(this.config.storePath, 'utf-8');
|
|
232
|
+
}
|
|
233
|
+
catch {
|
|
234
|
+
return;
|
|
235
|
+
}
|
|
236
|
+
const lines = content.split('\n').filter((l) => l.trim().length > 0);
|
|
237
|
+
if (lines.length === 0)
|
|
238
|
+
return;
|
|
239
|
+
// Verify magic header
|
|
240
|
+
if (lines[0] !== MAGIC_HEADER) {
|
|
241
|
+
this.log(`Warning: invalid magic header "${lines[0]}", expected "${MAGIC_HEADER}"`);
|
|
242
|
+
return;
|
|
243
|
+
}
|
|
244
|
+
let parsed = 0;
|
|
245
|
+
let errors = 0;
|
|
246
|
+
for (let i = 1; i < lines.length; i++) {
|
|
247
|
+
try {
|
|
248
|
+
const record = JSON.parse(lines[i]);
|
|
249
|
+
this.applyRecord(record);
|
|
250
|
+
parsed++;
|
|
251
|
+
}
|
|
252
|
+
catch {
|
|
253
|
+
errors++;
|
|
254
|
+
this.log(`Warning: failed to parse line ${i + 1}`);
|
|
255
|
+
}
|
|
256
|
+
}
|
|
257
|
+
this.log(`Loaded from disk: ${parsed} records, ${errors} errors`);
|
|
258
|
+
}
|
|
259
|
+
applyRecord(record) {
|
|
260
|
+
switch (record.type) {
|
|
261
|
+
case 'pattern': {
|
|
262
|
+
const p = record.data;
|
|
263
|
+
this.patterns.set(p.id, p);
|
|
264
|
+
break;
|
|
265
|
+
}
|
|
266
|
+
case 'lora': {
|
|
267
|
+
const l = record.data;
|
|
268
|
+
this.loraAdapters.set(l.id, l);
|
|
269
|
+
break;
|
|
270
|
+
}
|
|
271
|
+
case 'ewc': {
|
|
272
|
+
this.ewcState = record.data;
|
|
273
|
+
break;
|
|
274
|
+
}
|
|
275
|
+
case 'trajectory': {
|
|
276
|
+
this.trajectories.push(record.data);
|
|
277
|
+
break;
|
|
278
|
+
}
|
|
279
|
+
default:
|
|
280
|
+
this.log(`Warning: unknown record type "${record.type}"`);
|
|
281
|
+
}
|
|
282
|
+
}
|
|
283
|
+
ensureInitialized() {
|
|
284
|
+
if (!this.initialized) {
|
|
285
|
+
throw new Error('RvfLearningStore has not been initialized. Call initialize() first.');
|
|
286
|
+
}
|
|
287
|
+
}
|
|
288
|
+
log(message) {
|
|
289
|
+
if (this.config.verbose) {
|
|
290
|
+
// eslint-disable-next-line no-console
|
|
291
|
+
console.log(`[RvfLearningStore] ${message}`);
|
|
292
|
+
}
|
|
293
|
+
}
|
|
294
|
+
}
|
|
295
|
+
//# sourceMappingURL=rvf-learning-store.js.map
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
/** Options for controlling the migration process. */
|
|
2
|
+
export interface RvfMigrationOptions {
|
|
3
|
+
verbose?: boolean;
|
|
4
|
+
/** Entries per batch (default 500). */
|
|
5
|
+
batchSize?: number;
|
|
6
|
+
/** Embedding dimensions for target RVF file (default 1536). */
|
|
7
|
+
dimensions?: number;
|
|
8
|
+
onProgress?: (progress: {
|
|
9
|
+
current: number;
|
|
10
|
+
total: number;
|
|
11
|
+
phase: string;
|
|
12
|
+
}) => void;
|
|
13
|
+
}
|
|
14
|
+
/** Result returned after a migration completes. */
|
|
15
|
+
export interface RvfMigrationResult {
|
|
16
|
+
success: boolean;
|
|
17
|
+
entriesMigrated: number;
|
|
18
|
+
sourceFormat: string;
|
|
19
|
+
targetFormat: string;
|
|
20
|
+
durationMs: number;
|
|
21
|
+
errors: string[];
|
|
22
|
+
}
|
|
23
|
+
/**
|
|
24
|
+
* Bidirectional migration utility between RVF and legacy memory formats.
|
|
25
|
+
*
|
|
26
|
+
* All methods are static — no instantiation required.
|
|
27
|
+
*/
|
|
28
|
+
export declare class RvfMigrator {
|
|
29
|
+
/** Migrate a JSON memory file to RVF format. */
|
|
30
|
+
static fromJsonFile(jsonPath: string, rvfPath: string, options?: RvfMigrationOptions): Promise<RvfMigrationResult>;
|
|
31
|
+
/** Migrate a SQLite (better-sqlite3 / sql.js) database to RVF. */
|
|
32
|
+
static fromSqlite(dbPath: string, rvfPath: string, options?: RvfMigrationOptions): Promise<RvfMigrationResult>;
|
|
33
|
+
/** Export an RVF file back to a JSON array (backward compatibility). */
|
|
34
|
+
static toJsonFile(rvfPath: string, jsonPath: string): Promise<RvfMigrationResult>;
|
|
35
|
+
/**
|
|
36
|
+
* Detect file format by magic bytes.
|
|
37
|
+
* - RVF\0 (0x52 0x56 0x46 0x00) -> 'rvf'
|
|
38
|
+
* - SQLi (0x53 0x51 0x4C 0x69) -> 'sqlite'
|
|
39
|
+
* - Leading [ or { -> 'json'
|
|
40
|
+
*/
|
|
41
|
+
static detectFormat(filePath: string): Promise<'rvf' | 'json' | 'sqlite' | 'unknown'>;
|
|
42
|
+
/** Auto-detect source format and migrate to RVF. */
|
|
43
|
+
static autoMigrate(sourcePath: string, targetRvfPath: string, options?: RvfMigrationOptions): Promise<RvfMigrationResult>;
|
|
44
|
+
}
|
|
45
|
+
//# sourceMappingURL=rvf-migration.d.ts.map
|
|
@@ -0,0 +1,234 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* RVF Migration Utility — bidirectional migration between RVF and legacy
|
|
3
|
+
* formats (JSON files, sql.js / better-sqlite3 databases).
|
|
4
|
+
* @module @claude-flow/memory/rvf-migration
|
|
5
|
+
*/
|
|
6
|
+
import { readFile, writeFile, rename, mkdir } from 'node:fs/promises';
|
|
7
|
+
import { existsSync } from 'node:fs';
|
|
8
|
+
import { dirname, resolve } from 'node:path';
|
|
9
|
+
import { RvfBackend } from './rvf-backend.js';
|
|
10
|
+
import { generateMemoryId } from './types.js';
|
|
11
|
+
// -- Internal helpers -------------------------------------------------------
|
|
12
|
+
function fillDefaults(raw) {
|
|
13
|
+
const now = Date.now();
|
|
14
|
+
return {
|
|
15
|
+
id: raw.id ?? generateMemoryId(),
|
|
16
|
+
key: raw.key ?? '',
|
|
17
|
+
content: raw.content ?? '',
|
|
18
|
+
type: raw.type ?? 'semantic',
|
|
19
|
+
namespace: raw.namespace ?? 'default',
|
|
20
|
+
tags: raw.tags ?? [],
|
|
21
|
+
metadata: raw.metadata ?? {},
|
|
22
|
+
ownerId: raw.ownerId,
|
|
23
|
+
accessLevel: raw.accessLevel ?? 'private',
|
|
24
|
+
createdAt: raw.createdAt ?? now,
|
|
25
|
+
updatedAt: raw.updatedAt ?? now,
|
|
26
|
+
expiresAt: raw.expiresAt,
|
|
27
|
+
version: raw.version ?? 1,
|
|
28
|
+
references: raw.references ?? [],
|
|
29
|
+
accessCount: raw.accessCount ?? 0,
|
|
30
|
+
lastAccessedAt: raw.lastAccessedAt ?? now,
|
|
31
|
+
embedding: deserializeEmbedding(raw.embedding),
|
|
32
|
+
};
|
|
33
|
+
}
|
|
34
|
+
function deserializeEmbedding(value) {
|
|
35
|
+
if (!value)
|
|
36
|
+
return undefined;
|
|
37
|
+
if (value instanceof Float32Array)
|
|
38
|
+
return value;
|
|
39
|
+
if (value instanceof Buffer || value instanceof Uint8Array) {
|
|
40
|
+
if (value.byteLength === 0)
|
|
41
|
+
return undefined;
|
|
42
|
+
const out = new Float32Array(value.byteLength / 4);
|
|
43
|
+
const view = new DataView(value.buffer, value.byteOffset, value.byteLength);
|
|
44
|
+
for (let i = 0; i < out.length; i++)
|
|
45
|
+
out[i] = view.getFloat32(i * 4, true);
|
|
46
|
+
return out;
|
|
47
|
+
}
|
|
48
|
+
if (Array.isArray(value))
|
|
49
|
+
return new Float32Array(value);
|
|
50
|
+
return undefined;
|
|
51
|
+
}
|
|
52
|
+
function serializeForJson(entry) {
|
|
53
|
+
return { ...entry, embedding: entry.embedding ? Array.from(entry.embedding) : undefined };
|
|
54
|
+
}
|
|
55
|
+
function validateMigrationPath(p) {
|
|
56
|
+
if (!p || typeof p !== 'string')
|
|
57
|
+
throw new Error('Path must be a non-empty string');
|
|
58
|
+
if (p.includes('\0'))
|
|
59
|
+
throw new Error('Path contains null bytes');
|
|
60
|
+
}
|
|
61
|
+
async function ensureDir(filePath) {
|
|
62
|
+
validateMigrationPath(filePath);
|
|
63
|
+
const dir = dirname(resolve(filePath));
|
|
64
|
+
if (!existsSync(dir))
|
|
65
|
+
await mkdir(dir, { recursive: true });
|
|
66
|
+
}
|
|
67
|
+
async function atomicWrite(targetPath, data) {
|
|
68
|
+
validateMigrationPath(targetPath);
|
|
69
|
+
const abs = resolve(targetPath);
|
|
70
|
+
const tmp = abs + '.tmp.' + Date.now();
|
|
71
|
+
await ensureDir(abs);
|
|
72
|
+
await writeFile(tmp, data, typeof data === 'string' ? 'utf-8' : undefined);
|
|
73
|
+
await rename(tmp, abs);
|
|
74
|
+
}
|
|
75
|
+
function mkResult(success, entriesMigrated, sourceFormat, targetFormat, startMs, errors) {
|
|
76
|
+
return { success, entriesMigrated, sourceFormat, targetFormat, durationMs: Date.now() - startMs, errors };
|
|
77
|
+
}
|
|
78
|
+
function normalizeSqliteRow(row) {
|
|
79
|
+
const out = { ...row };
|
|
80
|
+
for (const col of ['tags', 'metadata', 'references']) {
|
|
81
|
+
if (typeof out[col] === 'string') {
|
|
82
|
+
try {
|
|
83
|
+
out[col] = JSON.parse(out[col]);
|
|
84
|
+
}
|
|
85
|
+
catch {
|
|
86
|
+
out[col] = col === 'metadata' ? {} : [];
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
return out;
|
|
91
|
+
}
|
|
92
|
+
async function readSqliteRows(dbPath) {
|
|
93
|
+
// Use sql.js (WASM) for SQLite reading
|
|
94
|
+
try {
|
|
95
|
+
const initSqlJs = (await import('sql.js')).default;
|
|
96
|
+
const SQL = await initSqlJs();
|
|
97
|
+
const fs = await import('node:fs');
|
|
98
|
+
const buf = fs.readFileSync(dbPath);
|
|
99
|
+
const db = new SQL.Database(buf);
|
|
100
|
+
return { exec: (sql) => db.exec(sql), close: () => db.close() };
|
|
101
|
+
}
|
|
102
|
+
catch {
|
|
103
|
+
throw new Error('Cannot read SQLite: install sql.js');
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
// -- Batch migration helper -------------------------------------------------
|
|
107
|
+
async function migrateBatches(items, rvfPath, options, normalize) {
|
|
108
|
+
const batchSize = options.batchSize ?? 500;
|
|
109
|
+
const dimensions = options.dimensions ?? 1536;
|
|
110
|
+
const backend = new RvfBackend({ databasePath: rvfPath, dimensions, verbose: options.verbose });
|
|
111
|
+
await backend.initialize();
|
|
112
|
+
let migrated = 0;
|
|
113
|
+
const errors = [];
|
|
114
|
+
try {
|
|
115
|
+
for (let i = 0; i < items.length; i += batchSize) {
|
|
116
|
+
const batch = items.slice(i, i + batchSize);
|
|
117
|
+
const entries = [];
|
|
118
|
+
for (const item of batch) {
|
|
119
|
+
try {
|
|
120
|
+
entries.push(fillDefaults(normalize ? normalize(item) : item));
|
|
121
|
+
}
|
|
122
|
+
catch (e) {
|
|
123
|
+
errors.push(`Entry ${item.id ?? i}: ${e.message}`);
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
if (entries.length > 0) {
|
|
127
|
+
await backend.bulkInsert(entries);
|
|
128
|
+
migrated += entries.length;
|
|
129
|
+
}
|
|
130
|
+
options.onProgress?.({ current: Math.min(i + batchSize, items.length), total: items.length, phase: 'migrating' });
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
finally {
|
|
134
|
+
await backend.shutdown();
|
|
135
|
+
}
|
|
136
|
+
return { migrated, errors };
|
|
137
|
+
}
|
|
138
|
+
/**
|
|
139
|
+
* Bidirectional migration utility between RVF and legacy memory formats.
|
|
140
|
+
*
|
|
141
|
+
* All methods are static — no instantiation required.
|
|
142
|
+
*/
|
|
143
|
+
export class RvfMigrator {
|
|
144
|
+
/** Migrate a JSON memory file to RVF format. */
|
|
145
|
+
static async fromJsonFile(jsonPath, rvfPath, options = {}) {
|
|
146
|
+
const start = Date.now();
|
|
147
|
+
const raw = await readFile(jsonPath, 'utf-8');
|
|
148
|
+
let parsed;
|
|
149
|
+
try {
|
|
150
|
+
parsed = JSON.parse(raw);
|
|
151
|
+
}
|
|
152
|
+
catch (e) {
|
|
153
|
+
return mkResult(false, 0, 'json', 'rvf', start, [`Invalid JSON: ${e.message}`]);
|
|
154
|
+
}
|
|
155
|
+
const items = Array.isArray(parsed) ? parsed : [parsed];
|
|
156
|
+
const { migrated, errors } = await migrateBatches(items, rvfPath, options);
|
|
157
|
+
if (options.verbose)
|
|
158
|
+
console.log(`[RvfMigrator] Migrated ${migrated} entries from JSON to RVF`);
|
|
159
|
+
return mkResult(errors.length === 0, migrated, 'json', 'rvf', start, errors);
|
|
160
|
+
}
|
|
161
|
+
/** Migrate a SQLite (better-sqlite3 / sql.js) database to RVF. */
|
|
162
|
+
static async fromSqlite(dbPath, rvfPath, options = {}) {
|
|
163
|
+
const start = Date.now();
|
|
164
|
+
let rows;
|
|
165
|
+
try {
|
|
166
|
+
rows = await readSqliteRows(dbPath);
|
|
167
|
+
}
|
|
168
|
+
catch (e) {
|
|
169
|
+
return mkResult(false, 0, 'sqlite', 'rvf', start, [e.message]);
|
|
170
|
+
}
|
|
171
|
+
options.onProgress?.({ current: 0, total: rows.length, phase: 'reading' });
|
|
172
|
+
const { migrated, errors } = await migrateBatches(rows, rvfPath, options, normalizeSqliteRow);
|
|
173
|
+
if (options.verbose)
|
|
174
|
+
console.log(`[RvfMigrator] Migrated ${migrated} entries from SQLite to RVF`);
|
|
175
|
+
return mkResult(errors.length === 0, migrated, 'sqlite', 'rvf', start, errors);
|
|
176
|
+
}
|
|
177
|
+
/** Export an RVF file back to a JSON array (backward compatibility). */
|
|
178
|
+
static async toJsonFile(rvfPath, jsonPath) {
|
|
179
|
+
const start = Date.now();
|
|
180
|
+
const backend = new RvfBackend({ databasePath: rvfPath });
|
|
181
|
+
await backend.initialize();
|
|
182
|
+
let entries;
|
|
183
|
+
try {
|
|
184
|
+
entries = await backend.query({ type: 'hybrid', limit: Number.MAX_SAFE_INTEGER });
|
|
185
|
+
}
|
|
186
|
+
finally {
|
|
187
|
+
await backend.shutdown();
|
|
188
|
+
}
|
|
189
|
+
const warnings = [];
|
|
190
|
+
if (entries.length === 0)
|
|
191
|
+
warnings.push('Source RVF file contained no entries');
|
|
192
|
+
await atomicWrite(jsonPath, JSON.stringify(entries.map(serializeForJson), null, 2));
|
|
193
|
+
return mkResult(true, entries.length, 'rvf', 'json', start, warnings);
|
|
194
|
+
}
|
|
195
|
+
/**
|
|
196
|
+
* Detect file format by magic bytes.
|
|
197
|
+
* - RVF\0 (0x52 0x56 0x46 0x00) -> 'rvf'
|
|
198
|
+
* - SQLi (0x53 0x51 0x4C 0x69) -> 'sqlite'
|
|
199
|
+
* - Leading [ or { -> 'json'
|
|
200
|
+
*/
|
|
201
|
+
static async detectFormat(filePath) {
|
|
202
|
+
if (!existsSync(filePath))
|
|
203
|
+
return 'unknown';
|
|
204
|
+
const fd = await import('node:fs').then(m => m.promises.open(filePath, 'r'));
|
|
205
|
+
try {
|
|
206
|
+
const buf = Buffer.alloc(16);
|
|
207
|
+
await fd.read(buf, 0, 16, 0);
|
|
208
|
+
if (buf[0] === 0x52 && buf[1] === 0x56 && buf[2] === 0x46 && buf[3] === 0x00)
|
|
209
|
+
return 'rvf';
|
|
210
|
+
if (buf[0] === 0x53 && buf[1] === 0x51 && buf[2] === 0x4C && buf[3] === 0x69)
|
|
211
|
+
return 'sqlite';
|
|
212
|
+
const head = buf.toString('utf-8').trimStart();
|
|
213
|
+
if (head.startsWith('[') || head.startsWith('{'))
|
|
214
|
+
return 'json';
|
|
215
|
+
return 'unknown';
|
|
216
|
+
}
|
|
217
|
+
finally {
|
|
218
|
+
await fd.close();
|
|
219
|
+
}
|
|
220
|
+
}
|
|
221
|
+
/** Auto-detect source format and migrate to RVF. */
|
|
222
|
+
static async autoMigrate(sourcePath, targetRvfPath, options = {}) {
|
|
223
|
+
const format = await RvfMigrator.detectFormat(sourcePath);
|
|
224
|
+
if (options.verbose)
|
|
225
|
+
console.log(`[RvfMigrator] Detected source format: ${format}`);
|
|
226
|
+
switch (format) {
|
|
227
|
+
case 'json': return RvfMigrator.fromJsonFile(sourcePath, targetRvfPath, options);
|
|
228
|
+
case 'sqlite': return RvfMigrator.fromSqlite(sourcePath, targetRvfPath, options);
|
|
229
|
+
case 'rvf': return { success: true, entriesMigrated: 0, sourceFormat: 'rvf', targetFormat: 'rvf', durationMs: 0, errors: [] };
|
|
230
|
+
default: return { success: false, entriesMigrated: 0, sourceFormat: 'unknown', targetFormat: 'rvf', durationMs: 0, errors: [`Unrecognized format: ${sourcePath}`] };
|
|
231
|
+
}
|
|
232
|
+
}
|
|
233
|
+
}
|
|
234
|
+
//# sourceMappingURL=rvf-migration.js.map
|
|
@@ -0,0 +1,121 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* SQLite Memory Backend
|
|
3
|
+
*
|
|
4
|
+
* Provides structured storage for memory entries using SQLite.
|
|
5
|
+
* Optimized for ACID transactions, exact matches, and complex queries.
|
|
6
|
+
* Part of ADR-009: Hybrid Memory Backend (SQLite + AgentDB)
|
|
7
|
+
*
|
|
8
|
+
* @module v3/memory/sqlite-backend
|
|
9
|
+
*/
|
|
10
|
+
import { EventEmitter } from 'node:events';
|
|
11
|
+
import { IMemoryBackend, MemoryEntry, MemoryEntryUpdate, MemoryQuery, SearchOptions, SearchResult, BackendStats, HealthCheckResult, EmbeddingGenerator } from './types.js';
|
|
12
|
+
/**
|
|
13
|
+
* Configuration for SQLite Backend
|
|
14
|
+
*/
|
|
15
|
+
export interface SQLiteBackendConfig {
|
|
16
|
+
/** Path to SQLite database file (:memory: for in-memory) */
|
|
17
|
+
databasePath: string;
|
|
18
|
+
/** Enable WAL mode for better concurrency */
|
|
19
|
+
walMode: boolean;
|
|
20
|
+
/** Enable query optimization */
|
|
21
|
+
optimize: boolean;
|
|
22
|
+
/** Default namespace */
|
|
23
|
+
defaultNamespace: string;
|
|
24
|
+
/** Embedding generator (for compatibility with hybrid mode) */
|
|
25
|
+
embeddingGenerator?: EmbeddingGenerator;
|
|
26
|
+
/** Maximum entries before auto-cleanup */
|
|
27
|
+
maxEntries: number;
|
|
28
|
+
/** Enable verbose logging */
|
|
29
|
+
verbose: boolean;
|
|
30
|
+
}
|
|
31
|
+
/**
|
|
32
|
+
* SQLite Backend for Structured Memory Storage
|
|
33
|
+
*
|
|
34
|
+
* Provides:
|
|
35
|
+
* - ACID transactions for data consistency
|
|
36
|
+
* - Efficient indexing for exact matches and prefix queries
|
|
37
|
+
* - Full-text search capabilities
|
|
38
|
+
* - Complex SQL queries with joins and aggregations
|
|
39
|
+
* - Persistent storage with WAL mode
|
|
40
|
+
*/
|
|
41
|
+
export declare class SQLiteBackend extends EventEmitter implements IMemoryBackend {
|
|
42
|
+
private config;
|
|
43
|
+
private db;
|
|
44
|
+
private initialized;
|
|
45
|
+
private stats;
|
|
46
|
+
constructor(config?: Partial<SQLiteBackendConfig>);
|
|
47
|
+
/**
|
|
48
|
+
* Initialize the SQLite backend
|
|
49
|
+
*/
|
|
50
|
+
initialize(): Promise<void>;
|
|
51
|
+
/**
|
|
52
|
+
* Shutdown the backend
|
|
53
|
+
*/
|
|
54
|
+
shutdown(): Promise<void>;
|
|
55
|
+
/**
|
|
56
|
+
* Store a memory entry
|
|
57
|
+
*/
|
|
58
|
+
store(entry: MemoryEntry): Promise<void>;
|
|
59
|
+
/**
|
|
60
|
+
* Get a memory entry by ID
|
|
61
|
+
*/
|
|
62
|
+
get(id: string): Promise<MemoryEntry | null>;
|
|
63
|
+
/**
|
|
64
|
+
* Get a memory entry by key within a namespace
|
|
65
|
+
*/
|
|
66
|
+
getByKey(namespace: string, key: string): Promise<MemoryEntry | null>;
|
|
67
|
+
/**
|
|
68
|
+
* Update a memory entry
|
|
69
|
+
*/
|
|
70
|
+
update(id: string, update: MemoryEntryUpdate): Promise<MemoryEntry | null>;
|
|
71
|
+
/**
|
|
72
|
+
* Delete a memory entry
|
|
73
|
+
*/
|
|
74
|
+
delete(id: string): Promise<boolean>;
|
|
75
|
+
/**
|
|
76
|
+
* Query memory entries with filters
|
|
77
|
+
*/
|
|
78
|
+
query(query: MemoryQuery): Promise<MemoryEntry[]>;
|
|
79
|
+
/**
|
|
80
|
+
* Semantic vector search (not optimized for SQLite, returns empty)
|
|
81
|
+
* Use HybridBackend for semantic search with AgentDB
|
|
82
|
+
*/
|
|
83
|
+
search(embedding: Float32Array, options: SearchOptions): Promise<SearchResult[]>;
|
|
84
|
+
/**
|
|
85
|
+
* Bulk insert entries
|
|
86
|
+
*/
|
|
87
|
+
bulkInsert(entries: MemoryEntry[]): Promise<void>;
|
|
88
|
+
/**
|
|
89
|
+
* Bulk delete entries
|
|
90
|
+
*/
|
|
91
|
+
bulkDelete(ids: string[]): Promise<number>;
|
|
92
|
+
/**
|
|
93
|
+
* Get entry count
|
|
94
|
+
*/
|
|
95
|
+
count(namespace?: string): Promise<number>;
|
|
96
|
+
/**
|
|
97
|
+
* List all namespaces
|
|
98
|
+
*/
|
|
99
|
+
listNamespaces(): Promise<string[]>;
|
|
100
|
+
/**
|
|
101
|
+
* Clear all entries in a namespace
|
|
102
|
+
*/
|
|
103
|
+
clearNamespace(namespace: string): Promise<number>;
|
|
104
|
+
/**
|
|
105
|
+
* Get backend statistics
|
|
106
|
+
*/
|
|
107
|
+
getStats(): Promise<BackendStats>;
|
|
108
|
+
/**
|
|
109
|
+
* Perform health check
|
|
110
|
+
*/
|
|
111
|
+
healthCheck(): Promise<HealthCheckResult>;
|
|
112
|
+
private ensureInitialized;
|
|
113
|
+
private createSchema;
|
|
114
|
+
private rowToEntry;
|
|
115
|
+
/**
|
|
116
|
+
* Synchronous store for use in transactions
|
|
117
|
+
*/
|
|
118
|
+
private storeSync;
|
|
119
|
+
}
|
|
120
|
+
export default SQLiteBackend;
|
|
121
|
+
//# sourceMappingURL=sqlite-backend.d.ts.map
|