@yamo/memory-mesh 2.3.2 → 3.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/memory_mesh.js +1 -1
- package/lib/llm/client.d.ts +111 -0
- package/lib/llm/client.js +299 -357
- package/lib/llm/client.ts +413 -0
- package/lib/llm/index.d.ts +17 -0
- package/lib/llm/index.js +15 -8
- package/lib/llm/index.ts +19 -0
- package/lib/memory/adapters/client.d.ts +183 -0
- package/lib/memory/adapters/client.js +518 -0
- package/lib/memory/adapters/client.ts +678 -0
- package/lib/memory/adapters/config.d.ts +137 -0
- package/lib/memory/adapters/config.js +189 -0
- package/lib/memory/adapters/config.ts +259 -0
- package/lib/memory/adapters/errors.d.ts +76 -0
- package/lib/memory/adapters/errors.js +128 -0
- package/lib/memory/adapters/errors.ts +166 -0
- package/lib/memory/context-manager.d.ts +44 -0
- package/lib/memory/context-manager.js +344 -0
- package/lib/memory/context-manager.ts +432 -0
- package/lib/memory/embeddings/factory.d.ts +59 -0
- package/lib/memory/embeddings/factory.js +148 -0
- package/lib/{embeddings/factory.js → memory/embeddings/factory.ts} +69 -28
- package/lib/memory/embeddings/index.d.ts +2 -0
- package/lib/memory/embeddings/index.js +2 -0
- package/lib/memory/embeddings/index.ts +2 -0
- package/lib/memory/embeddings/service.d.ts +164 -0
- package/lib/memory/embeddings/service.js +515 -0
- package/lib/{embeddings/service.js → memory/embeddings/service.ts} +223 -156
- package/lib/memory/index.d.ts +9 -0
- package/lib/memory/index.js +9 -1
- package/lib/memory/index.ts +20 -0
- package/lib/memory/memory-mesh.d.ts +274 -0
- package/lib/memory/memory-mesh.js +1469 -678
- package/lib/memory/memory-mesh.ts +1803 -0
- package/lib/memory/memory-translator.d.ts +19 -0
- package/lib/memory/memory-translator.js +125 -0
- package/lib/memory/memory-translator.ts +158 -0
- package/lib/memory/schema.d.ts +111 -0
- package/lib/memory/schema.js +183 -0
- package/lib/memory/schema.ts +267 -0
- package/lib/memory/scorer.d.ts +26 -0
- package/lib/memory/scorer.js +77 -0
- package/lib/memory/scorer.ts +95 -0
- package/lib/memory/search/index.d.ts +1 -0
- package/lib/memory/search/index.js +1 -0
- package/lib/memory/search/index.ts +1 -0
- package/lib/memory/search/keyword-search.d.ts +62 -0
- package/lib/memory/search/keyword-search.js +135 -0
- package/lib/{search/keyword-search.js → memory/search/keyword-search.ts} +66 -36
- package/lib/scrubber/config/defaults.d.ts +53 -0
- package/lib/scrubber/config/defaults.js +49 -57
- package/lib/scrubber/config/defaults.ts +117 -0
- package/lib/scrubber/index.d.ts +6 -0
- package/lib/scrubber/index.js +3 -23
- package/lib/scrubber/index.ts +7 -0
- package/lib/scrubber/scrubber.d.ts +61 -0
- package/lib/scrubber/scrubber.js +99 -121
- package/lib/scrubber/scrubber.ts +168 -0
- package/lib/scrubber/stages/chunker.d.ts +13 -0
- package/lib/scrubber/stages/metadata-annotator.d.ts +18 -0
- package/lib/scrubber/stages/normalizer.d.ts +13 -0
- package/lib/scrubber/stages/semantic-filter.d.ts +13 -0
- package/lib/scrubber/stages/structural-cleaner.d.ts +13 -0
- package/lib/scrubber/stages/validator.d.ts +18 -0
- package/lib/scrubber/telemetry.d.ts +36 -0
- package/lib/scrubber/telemetry.js +53 -58
- package/lib/scrubber/telemetry.ts +99 -0
- package/lib/utils/logger.d.ts +29 -0
- package/lib/utils/logger.js +64 -0
- package/lib/utils/logger.ts +85 -0
- package/lib/utils/skill-metadata.d.ts +32 -0
- package/lib/utils/skill-metadata.js +132 -0
- package/lib/utils/skill-metadata.ts +147 -0
- package/lib/yamo/emitter.d.ts +73 -0
- package/lib/yamo/emitter.js +78 -143
- package/lib/yamo/emitter.ts +249 -0
- package/lib/yamo/schema.d.ts +58 -0
- package/lib/yamo/schema.js +81 -108
- package/lib/yamo/schema.ts +165 -0
- package/package.json +11 -8
- package/index.d.ts +0 -111
- package/lib/embeddings/index.js +0 -2
- package/lib/index.js +0 -6
- package/lib/lancedb/client.js +0 -633
- package/lib/lancedb/config.js +0 -215
- package/lib/lancedb/errors.js +0 -144
- package/lib/lancedb/index.js +0 -4
- package/lib/lancedb/schema.js +0 -217
- package/lib/scrubber/errors/scrubber-error.js +0 -43
- package/lib/scrubber/stages/chunker.js +0 -103
- package/lib/scrubber/stages/metadata-annotator.js +0 -74
- package/lib/scrubber/stages/normalizer.js +0 -59
- package/lib/scrubber/stages/semantic-filter.js +0 -61
- package/lib/scrubber/stages/structural-cleaner.js +0 -82
- package/lib/scrubber/stages/validator.js +0 -66
- package/lib/scrubber/utils/hash.js +0 -39
- package/lib/scrubber/utils/html-parser.js +0 -45
- package/lib/scrubber/utils/pattern-matcher.js +0 -63
- package/lib/scrubber/utils/token-counter.js +0 -31
- package/lib/search/index.js +0 -1
- package/lib/utils/index.js +0 -1
- package/lib/yamo/index.js +0 -15
|
@@ -0,0 +1,432 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* MemoryContextManager - High-level memory management for YAMO
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
import { MemoryMesh } from "./memory-mesh.js";
|
|
6
|
+
import { MemoryScorer } from "./scorer.js";
|
|
7
|
+
import { MemoryTranslator } from "./memory-translator.js";
|
|
8
|
+
import { createLogger } from "../utils/logger.js";
|
|
9
|
+
|
|
10
|
+
const logger = createLogger("context-manager");
|
|
11
|
+
|
|
12
|
+
export interface MemoryContextConfig {
|
|
13
|
+
mesh?: MemoryMesh;
|
|
14
|
+
autoInit?: boolean;
|
|
15
|
+
enableCache?: boolean;
|
|
16
|
+
recallLimit?: number;
|
|
17
|
+
minImportance?: number;
|
|
18
|
+
silent?: boolean;
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
interface CacheEntry {
|
|
22
|
+
result: any[];
|
|
23
|
+
timestamp: number;
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
export class MemoryContextManager {
|
|
27
|
+
#config: MemoryContextConfig;
|
|
28
|
+
#mesh: MemoryMesh;
|
|
29
|
+
#scorer: MemoryScorer;
|
|
30
|
+
#initialized = false;
|
|
31
|
+
#queryCache = new Map<string, CacheEntry>();
|
|
32
|
+
#cacheConfig = {
|
|
33
|
+
maxSize: 100,
|
|
34
|
+
ttlMs: 2 * 60 * 1000, // 2 minutes
|
|
35
|
+
};
|
|
36
|
+
#cleanupTimer: ReturnType<typeof setInterval> | null = null;
|
|
37
|
+
|
|
38
|
+
/**
|
|
39
|
+
* Create a new MemoryContextManager
|
|
40
|
+
*/
|
|
41
|
+
constructor(config: MemoryContextConfig = {}) {
|
|
42
|
+
this.#config = {
|
|
43
|
+
autoInit: true,
|
|
44
|
+
enableCache: true,
|
|
45
|
+
recallLimit: 5,
|
|
46
|
+
minImportance: 0.1,
|
|
47
|
+
silent: config.silent !== false,
|
|
48
|
+
...config,
|
|
49
|
+
};
|
|
50
|
+
|
|
51
|
+
// Use provided mesh or create new instance
|
|
52
|
+
this.#mesh = config.mesh || new MemoryMesh();
|
|
53
|
+
this.#scorer = new MemoryScorer(this.#mesh);
|
|
54
|
+
|
|
55
|
+
// Start periodic cleanup timer (every 60 seconds)
|
|
56
|
+
this.#startCleanupTimer();
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
/**
|
|
60
|
+
* Initialize the memory context manager
|
|
61
|
+
*/
|
|
62
|
+
async initialize(): Promise<void> {
|
|
63
|
+
if (this.#initialized) {
|
|
64
|
+
return;
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
try {
|
|
68
|
+
await this.#mesh.init();
|
|
69
|
+
this.#initialized = true;
|
|
70
|
+
} catch (error: any) {
|
|
71
|
+
this.#logWarn(`Initialization failed: ${error.message}`);
|
|
72
|
+
this.#initialized = false;
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
/**
|
|
77
|
+
* Capture an interaction as memory
|
|
78
|
+
*/
|
|
79
|
+
async captureInteraction(
|
|
80
|
+
prompt: string,
|
|
81
|
+
response: string,
|
|
82
|
+
context: any = {},
|
|
83
|
+
): Promise<any> {
|
|
84
|
+
try {
|
|
85
|
+
if (this.#config.autoInit && !this.#initialized) {
|
|
86
|
+
await this.initialize();
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
if (!this.#initialized) {
|
|
90
|
+
return null;
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
const content = this.#formatInteraction(prompt, response);
|
|
94
|
+
const metadata = this.#buildMetadata(context);
|
|
95
|
+
|
|
96
|
+
const isDuplicate = await this.#scorer.isDuplicate(content);
|
|
97
|
+
if (isDuplicate) {
|
|
98
|
+
return null;
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
const importance = this.#scorer.calculateImportance(content, metadata);
|
|
102
|
+
|
|
103
|
+
if (importance < (this.#config.minImportance ?? 0.1)) {
|
|
104
|
+
return null;
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
const memory = await this.#mesh.add(content, {
|
|
108
|
+
...metadata,
|
|
109
|
+
importanceScore: importance,
|
|
110
|
+
});
|
|
111
|
+
|
|
112
|
+
return memory;
|
|
113
|
+
} catch (error: any) {
|
|
114
|
+
this.#logWarn(`Failed to capture interaction: ${error.message}`);
|
|
115
|
+
return null;
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
/**
|
|
120
|
+
* Recall relevant memories for a query
|
|
121
|
+
*/
|
|
122
|
+
async recallMemories(query: string, options: any = {}): Promise<any[]> {
|
|
123
|
+
try {
|
|
124
|
+
if (this.#config.autoInit && !this.#initialized) {
|
|
125
|
+
await this.initialize();
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
if (!this.#initialized) {
|
|
129
|
+
return [];
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
const {
|
|
133
|
+
limit = this.#config.recallLimit,
|
|
134
|
+
useCache = this.#config.enableCache,
|
|
135
|
+
memoryType = null,
|
|
136
|
+
skillName = null,
|
|
137
|
+
} = options;
|
|
138
|
+
|
|
139
|
+
if (useCache) {
|
|
140
|
+
const cacheKey = this.#cacheKey(query, {
|
|
141
|
+
limit,
|
|
142
|
+
memoryType,
|
|
143
|
+
skillName,
|
|
144
|
+
});
|
|
145
|
+
const cached = this.#getCached(cacheKey);
|
|
146
|
+
if (cached) {
|
|
147
|
+
return cached;
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
const filter = memoryType ? `memoryType == '${memoryType}'` : null;
|
|
152
|
+
// Fetch extra when skill-scoping — some results will be filtered out post-query
|
|
153
|
+
const fetchLimit = skillName ? limit * 2 : limit;
|
|
154
|
+
|
|
155
|
+
let memories: any[] = [];
|
|
156
|
+
if (
|
|
157
|
+
memoryType === "synthesized_skill" &&
|
|
158
|
+
typeof this.#mesh.searchSkills === "function"
|
|
159
|
+
) {
|
|
160
|
+
memories = await this.#mesh.searchSkills(query, { limit: fetchLimit });
|
|
161
|
+
} else {
|
|
162
|
+
memories = await this.#mesh.search(query, {
|
|
163
|
+
limit: fetchLimit,
|
|
164
|
+
filter,
|
|
165
|
+
useCache: false,
|
|
166
|
+
});
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
memories = memories.map((memory) => {
|
|
170
|
+
const metadata =
|
|
171
|
+
typeof memory.metadata === "string"
|
|
172
|
+
? JSON.parse(memory.metadata)
|
|
173
|
+
: memory.metadata || {};
|
|
174
|
+
|
|
175
|
+
return {
|
|
176
|
+
...memory,
|
|
177
|
+
importanceScore: memory.score || metadata.importanceScore || 0,
|
|
178
|
+
memoryType:
|
|
179
|
+
metadata.memoryType ||
|
|
180
|
+
(memoryType === "synthesized_skill"
|
|
181
|
+
? "synthesized_skill"
|
|
182
|
+
: "global"),
|
|
183
|
+
};
|
|
184
|
+
});
|
|
185
|
+
|
|
186
|
+
// Deduplicate by content — results are already sorted by score, so first occurrence wins
|
|
187
|
+
const seen = new Set<string>();
|
|
188
|
+
memories = memories.filter((memory) => {
|
|
189
|
+
if (seen.has(memory.content)) {
|
|
190
|
+
return false;
|
|
191
|
+
}
|
|
192
|
+
seen.add(memory.content);
|
|
193
|
+
return true;
|
|
194
|
+
});
|
|
195
|
+
|
|
196
|
+
// Skill-scope filter: keep memories tagged with this skill OR untagged (global).
|
|
197
|
+
// Untagged memories are shared context; tagged memories are skill-private.
|
|
198
|
+
if (skillName) {
|
|
199
|
+
memories = memories.filter((memory) => {
|
|
200
|
+
const meta =
|
|
201
|
+
typeof memory.metadata === "string"
|
|
202
|
+
? JSON.parse(memory.metadata)
|
|
203
|
+
: memory.metadata || {};
|
|
204
|
+
return !meta.skill_name || meta.skill_name === skillName;
|
|
205
|
+
});
|
|
206
|
+
memories = memories.slice(0, limit);
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
if (useCache) {
|
|
210
|
+
const cacheKey = this.#cacheKey(query, {
|
|
211
|
+
limit,
|
|
212
|
+
memoryType,
|
|
213
|
+
skillName,
|
|
214
|
+
});
|
|
215
|
+
this.#setCached(cacheKey, memories);
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
return memories;
|
|
219
|
+
} catch (error: any) {
|
|
220
|
+
this.#logWarn(`Failed to recall memories: ${error.message}`);
|
|
221
|
+
return [];
|
|
222
|
+
}
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
/**
|
|
226
|
+
* Format memories for inclusion in prompt
|
|
227
|
+
*/
|
|
228
|
+
formatMemoriesForPrompt(memories: any[], options: any = {}): string {
|
|
229
|
+
try {
|
|
230
|
+
if (!memories || memories.length === 0) {
|
|
231
|
+
return "";
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
return MemoryTranslator.toYAMOContext(memories, options);
|
|
235
|
+
} catch (error: any) {
|
|
236
|
+
this.#logWarn(`Failed to format memories: ${error.message}`);
|
|
237
|
+
return "";
|
|
238
|
+
}
|
|
239
|
+
}
|
|
240
|
+
|
|
241
|
+
#logWarn(message: string): void {
|
|
242
|
+
if (!this.#config.silent || process.env.YAMO_DEBUG === "true") {
|
|
243
|
+
logger.warn(message);
|
|
244
|
+
}
|
|
245
|
+
}
|
|
246
|
+
|
|
247
|
+
#formatInteraction(prompt: string, response: string): string {
|
|
248
|
+
const lines = [
|
|
249
|
+
`[USER] ${prompt}`,
|
|
250
|
+
`[ASSISTANT] ${response.substring(0, 500)}${response.length > 500 ? "..." : ""}`,
|
|
251
|
+
];
|
|
252
|
+
|
|
253
|
+
return lines.join("\n\n");
|
|
254
|
+
}
|
|
255
|
+
|
|
256
|
+
#buildMetadata(context: any): any {
|
|
257
|
+
const metadata: any = {
|
|
258
|
+
interaction_type: context.interactionType || "llm_response",
|
|
259
|
+
created_at: new Date().toISOString(),
|
|
260
|
+
};
|
|
261
|
+
|
|
262
|
+
if (context.toolsUsed?.length > 0) {
|
|
263
|
+
metadata.tools_used = context.toolsUsed;
|
|
264
|
+
}
|
|
265
|
+
|
|
266
|
+
if (context.filesInvolved?.length > 0) {
|
|
267
|
+
metadata.files_involved = context.filesInvolved;
|
|
268
|
+
}
|
|
269
|
+
|
|
270
|
+
if (context.tags?.length > 0) {
|
|
271
|
+
metadata.tags = context.tags;
|
|
272
|
+
}
|
|
273
|
+
|
|
274
|
+
if (context.skillName) {
|
|
275
|
+
metadata.skill_name = context.skillName;
|
|
276
|
+
}
|
|
277
|
+
|
|
278
|
+
if (context.sessionId) {
|
|
279
|
+
metadata.session_id = context.sessionId;
|
|
280
|
+
}
|
|
281
|
+
|
|
282
|
+
return metadata;
|
|
283
|
+
}
|
|
284
|
+
|
|
285
|
+
#cacheKey(query: string, options: any): string {
|
|
286
|
+
return `recall:${query}:${JSON.stringify(options)}`;
|
|
287
|
+
}
|
|
288
|
+
|
|
289
|
+
/**
|
|
290
|
+
* Get cached result if valid
|
|
291
|
+
* Race condition fix: Update timestamp atomically for LRU tracking
|
|
292
|
+
*/
|
|
293
|
+
#getCached(key: string): any {
|
|
294
|
+
const entry = this.#queryCache.get(key);
|
|
295
|
+
if (!entry) {
|
|
296
|
+
return null;
|
|
297
|
+
}
|
|
298
|
+
|
|
299
|
+
// Check TTL before any mutation
|
|
300
|
+
const now = Date.now();
|
|
301
|
+
if (now - entry.timestamp > this.#cacheConfig.ttlMs) {
|
|
302
|
+
this.#queryCache.delete(key);
|
|
303
|
+
return null;
|
|
304
|
+
}
|
|
305
|
+
|
|
306
|
+
// Move to end (most recently used) - delete and re-add with updated timestamp
|
|
307
|
+
this.#queryCache.delete(key);
|
|
308
|
+
this.#queryCache.set(key, {
|
|
309
|
+
...entry,
|
|
310
|
+
timestamp: now, // Update timestamp for LRU tracking
|
|
311
|
+
});
|
|
312
|
+
|
|
313
|
+
return entry.result;
|
|
314
|
+
}
|
|
315
|
+
|
|
316
|
+
#setCached(key: string, result: any): void {
|
|
317
|
+
if (this.#queryCache.size >= this.#cacheConfig.maxSize) {
|
|
318
|
+
const firstKey = this.#queryCache.keys().next().value;
|
|
319
|
+
if (firstKey !== undefined) {
|
|
320
|
+
this.#queryCache.delete(firstKey);
|
|
321
|
+
}
|
|
322
|
+
}
|
|
323
|
+
|
|
324
|
+
this.#queryCache.set(key, {
|
|
325
|
+
result,
|
|
326
|
+
timestamp: Date.now(),
|
|
327
|
+
});
|
|
328
|
+
}
|
|
329
|
+
|
|
330
|
+
clearCache(): void {
|
|
331
|
+
this.#queryCache.clear();
|
|
332
|
+
}
|
|
333
|
+
|
|
334
|
+
getCacheStats(): any {
|
|
335
|
+
return {
|
|
336
|
+
size: this.#queryCache.size,
|
|
337
|
+
maxSize: this.#cacheConfig.maxSize,
|
|
338
|
+
ttlMs: this.#cacheConfig.ttlMs,
|
|
339
|
+
};
|
|
340
|
+
}
|
|
341
|
+
|
|
342
|
+
async healthCheck(): Promise<any> {
|
|
343
|
+
const health: any = {
|
|
344
|
+
status: "healthy",
|
|
345
|
+
timestamp: new Date().toISOString(),
|
|
346
|
+
initialized: this.#initialized,
|
|
347
|
+
checks: {},
|
|
348
|
+
};
|
|
349
|
+
|
|
350
|
+
try {
|
|
351
|
+
health.checks.mesh = await this.#mesh.stats(); // brain.ts has stats()
|
|
352
|
+
if (health.checks.mesh.isConnected === false) {
|
|
353
|
+
health.status = "degraded";
|
|
354
|
+
}
|
|
355
|
+
} catch (error: any) {
|
|
356
|
+
health.checks.mesh = {
|
|
357
|
+
status: "error",
|
|
358
|
+
error: error.message,
|
|
359
|
+
};
|
|
360
|
+
health.status = "unhealthy";
|
|
361
|
+
}
|
|
362
|
+
|
|
363
|
+
health.checks.cache = {
|
|
364
|
+
status: "up",
|
|
365
|
+
size: this.#queryCache.size,
|
|
366
|
+
maxSize: this.#cacheConfig.maxSize,
|
|
367
|
+
};
|
|
368
|
+
|
|
369
|
+
return health;
|
|
370
|
+
}
|
|
371
|
+
|
|
372
|
+
/**
|
|
373
|
+
* Start periodic cleanup timer to remove expired cache entries
|
|
374
|
+
* @private
|
|
375
|
+
*/
|
|
376
|
+
#startCleanupTimer(): void {
|
|
377
|
+
// Clear any existing timer
|
|
378
|
+
if (this.#cleanupTimer) {
|
|
379
|
+
clearInterval(this.#cleanupTimer);
|
|
380
|
+
}
|
|
381
|
+
|
|
382
|
+
// Run cleanup every 60 seconds
|
|
383
|
+
this.#cleanupTimer = setInterval(() => {
|
|
384
|
+
this.#cleanupExpired();
|
|
385
|
+
}, 60000);
|
|
386
|
+
}
|
|
387
|
+
|
|
388
|
+
/**
|
|
389
|
+
* Clean up expired cache entries
|
|
390
|
+
* @private
|
|
391
|
+
*/
|
|
392
|
+
#cleanupExpired(): void {
|
|
393
|
+
const now = Date.now();
|
|
394
|
+
const expiredKeys: string[] = [];
|
|
395
|
+
|
|
396
|
+
// Find expired entries
|
|
397
|
+
for (const [key, entry] of this.#queryCache.entries()) {
|
|
398
|
+
if (now - entry.timestamp > this.#cacheConfig.ttlMs) {
|
|
399
|
+
expiredKeys.push(key);
|
|
400
|
+
}
|
|
401
|
+
}
|
|
402
|
+
|
|
403
|
+
// Remove expired entries
|
|
404
|
+
for (const key of expiredKeys) {
|
|
405
|
+
this.#queryCache.delete(key);
|
|
406
|
+
}
|
|
407
|
+
|
|
408
|
+
if (
|
|
409
|
+
expiredKeys.length > 0 &&
|
|
410
|
+
(process.env.YAMO_DEBUG === "true" || !this.#config.silent)
|
|
411
|
+
) {
|
|
412
|
+
logger.debug(
|
|
413
|
+
{ count: expiredKeys.length },
|
|
414
|
+
"Cleaned up expired cache entries",
|
|
415
|
+
);
|
|
416
|
+
}
|
|
417
|
+
}
|
|
418
|
+
|
|
419
|
+
/**
|
|
420
|
+
* Dispose of resources (cleanup timer and cache)
|
|
421
|
+
* Call this when the MemoryContextManager is no longer needed
|
|
422
|
+
*/
|
|
423
|
+
dispose(): void {
|
|
424
|
+
if (this.#cleanupTimer) {
|
|
425
|
+
clearInterval(this.#cleanupTimer);
|
|
426
|
+
this.#cleanupTimer = null;
|
|
427
|
+
}
|
|
428
|
+
this.clearCache();
|
|
429
|
+
}
|
|
430
|
+
}
|
|
431
|
+
|
|
432
|
+
export default MemoryContextManager;
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* EmbeddingFactory - Multi-provider embedding with automatic fallback
|
|
3
|
+
* Manages primary and fallback embedding services
|
|
4
|
+
*/
|
|
5
|
+
import EmbeddingService, { ServiceConfig, ServiceStats } from "./service.js";
|
|
6
|
+
export interface FactoryStats {
|
|
7
|
+
configured: boolean;
|
|
8
|
+
primary: ServiceStats | null;
|
|
9
|
+
fallbacks: ServiceStats[];
|
|
10
|
+
}
|
|
11
|
+
export interface InitResult {
|
|
12
|
+
success: boolean;
|
|
13
|
+
primary: string | null;
|
|
14
|
+
fallbacks: string[];
|
|
15
|
+
}
|
|
16
|
+
declare class EmbeddingFactory {
|
|
17
|
+
primaryService: EmbeddingService | null;
|
|
18
|
+
fallbackServices: EmbeddingService[];
|
|
19
|
+
configured: boolean;
|
|
20
|
+
ServiceClass: typeof EmbeddingService;
|
|
21
|
+
constructor(ServiceClass?: typeof EmbeddingService);
|
|
22
|
+
/**
|
|
23
|
+
* Configure embedding services with fallback chain
|
|
24
|
+
* @param {Array} configs - Array of { modelType, modelName, priority, apiKey }
|
|
25
|
+
* @returns {Object} Success status
|
|
26
|
+
*/
|
|
27
|
+
configure(configs: ServiceConfig[]): {
|
|
28
|
+
success: boolean;
|
|
29
|
+
};
|
|
30
|
+
/**
|
|
31
|
+
* Initialize all configured services
|
|
32
|
+
* @returns {Promise<Object>} Initialization status
|
|
33
|
+
*/
|
|
34
|
+
init(): Promise<InitResult>;
|
|
35
|
+
/**
|
|
36
|
+
* Generate embedding with automatic fallback
|
|
37
|
+
* @param {string} text - Text to embed
|
|
38
|
+
* @param {Object} options - Options
|
|
39
|
+
* @returns {Promise<number[]>} Embedding vector
|
|
40
|
+
*/
|
|
41
|
+
embed(text: string, options?: any): Promise<number[]>;
|
|
42
|
+
/**
|
|
43
|
+
* Generate embeddings for batch of texts
|
|
44
|
+
* @param {string[]} texts - Texts to embed
|
|
45
|
+
* @param {Object} options - Options
|
|
46
|
+
* @returns {Promise<number[][]>} Array of embedding vectors
|
|
47
|
+
*/
|
|
48
|
+
embedBatch(texts: string[], options?: any): Promise<number[][]>;
|
|
49
|
+
/**
|
|
50
|
+
* Get factory statistics
|
|
51
|
+
* @returns {Object} Statistics
|
|
52
|
+
*/
|
|
53
|
+
getStats(): FactoryStats;
|
|
54
|
+
/**
|
|
55
|
+
* Clear all caches
|
|
56
|
+
*/
|
|
57
|
+
clearCache(): void;
|
|
58
|
+
}
|
|
59
|
+
export default EmbeddingFactory;
|
|
@@ -0,0 +1,148 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* EmbeddingFactory - Multi-provider embedding with automatic fallback
|
|
3
|
+
* Manages primary and fallback embedding services
|
|
4
|
+
*/
|
|
5
|
+
import EmbeddingService from "./service.js";
|
|
6
|
+
import { ConfigurationError, EmbeddingError } from "../adapters/errors.js";
|
|
7
|
+
import { createLogger } from "../../utils/logger.js";
|
|
8
|
+
const logger = createLogger("embedding-factory");
|
|
9
|
+
class EmbeddingFactory {
|
|
10
|
+
primaryService;
|
|
11
|
+
fallbackServices;
|
|
12
|
+
configured;
|
|
13
|
+
ServiceClass;
|
|
14
|
+
constructor(ServiceClass = EmbeddingService) {
|
|
15
|
+
this.primaryService = null;
|
|
16
|
+
this.fallbackServices = [];
|
|
17
|
+
this.configured = false;
|
|
18
|
+
this.ServiceClass = ServiceClass;
|
|
19
|
+
}
|
|
20
|
+
/**
|
|
21
|
+
* Configure embedding services with fallback chain
|
|
22
|
+
* @param {Array} configs - Array of { modelType, modelName, priority, apiKey }
|
|
23
|
+
* @returns {Object} Success status
|
|
24
|
+
*/
|
|
25
|
+
configure(configs) {
|
|
26
|
+
// Sort by priority (lower = higher priority)
|
|
27
|
+
configs.sort((a, b) => (a.priority || 0) - (b.priority || 0));
|
|
28
|
+
if (configs.length > 0) {
|
|
29
|
+
this.primaryService = new this.ServiceClass(configs[0]);
|
|
30
|
+
}
|
|
31
|
+
if (configs.length > 1) {
|
|
32
|
+
this.fallbackServices = configs
|
|
33
|
+
.slice(1)
|
|
34
|
+
.map((c) => new this.ServiceClass(c));
|
|
35
|
+
}
|
|
36
|
+
this.configured = true;
|
|
37
|
+
return { success: true };
|
|
38
|
+
}
|
|
39
|
+
/**
|
|
40
|
+
* Initialize all configured services
|
|
41
|
+
* @returns {Promise<Object>} Initialization status
|
|
42
|
+
*/
|
|
43
|
+
async init() {
|
|
44
|
+
if (!this.configured) {
|
|
45
|
+
throw new ConfigurationError("EmbeddingFactory not configured. Call configure() first.");
|
|
46
|
+
}
|
|
47
|
+
// Initialize primary service
|
|
48
|
+
if (this.primaryService && !this.primaryService.initialized) {
|
|
49
|
+
await this.primaryService.init();
|
|
50
|
+
}
|
|
51
|
+
// Initialize fallback services lazily (on first use)
|
|
52
|
+
return {
|
|
53
|
+
success: true,
|
|
54
|
+
primary: this.primaryService ? this.primaryService.modelName : null,
|
|
55
|
+
fallbacks: this.fallbackServices.map((s) => s.modelName),
|
|
56
|
+
};
|
|
57
|
+
}
|
|
58
|
+
/**
|
|
59
|
+
* Generate embedding with automatic fallback
|
|
60
|
+
* @param {string} text - Text to embed
|
|
61
|
+
* @param {Object} options - Options
|
|
62
|
+
* @returns {Promise<number[]>} Embedding vector
|
|
63
|
+
*/
|
|
64
|
+
async embed(text, options = {}) {
|
|
65
|
+
if (!this.configured || !this.primaryService) {
|
|
66
|
+
throw new ConfigurationError("EmbeddingFactory not configured");
|
|
67
|
+
}
|
|
68
|
+
// Try primary service
|
|
69
|
+
try {
|
|
70
|
+
if (!this.primaryService.initialized) {
|
|
71
|
+
await this.primaryService.init();
|
|
72
|
+
}
|
|
73
|
+
return await this.primaryService.embed(text, options);
|
|
74
|
+
}
|
|
75
|
+
catch (error) {
|
|
76
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
77
|
+
logger.warn({ err: error, primaryService: this.primaryService?.modelName }, "Primary service failed");
|
|
78
|
+
// Try fallback services in order
|
|
79
|
+
for (const fallback of this.fallbackServices) {
|
|
80
|
+
try {
|
|
81
|
+
if (!fallback.initialized) {
|
|
82
|
+
await fallback.init();
|
|
83
|
+
}
|
|
84
|
+
logger.info({ fallbackModel: fallback.modelName }, "Using fallback service");
|
|
85
|
+
return await fallback.embed(text, options);
|
|
86
|
+
}
|
|
87
|
+
catch (fallbackError) {
|
|
88
|
+
logger.warn({ err: fallbackError, fallbackModel: fallback.modelName }, "Fallback service failed");
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
throw new EmbeddingError("All embedding services failed", {
|
|
92
|
+
primaryError: errorMessage,
|
|
93
|
+
fallbackCount: this.fallbackServices.length,
|
|
94
|
+
});
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
/**
|
|
98
|
+
* Generate embeddings for batch of texts
|
|
99
|
+
* @param {string[]} texts - Texts to embed
|
|
100
|
+
* @param {Object} options - Options
|
|
101
|
+
* @returns {Promise<number[][]>} Array of embedding vectors
|
|
102
|
+
*/
|
|
103
|
+
async embedBatch(texts, options = {}) {
|
|
104
|
+
if (!this.configured || !this.primaryService) {
|
|
105
|
+
throw new ConfigurationError("EmbeddingFactory not configured");
|
|
106
|
+
}
|
|
107
|
+
// Try primary service
|
|
108
|
+
try {
|
|
109
|
+
if (!this.primaryService.initialized) {
|
|
110
|
+
await this.primaryService.init();
|
|
111
|
+
}
|
|
112
|
+
return await this.primaryService.embedBatch(texts, options);
|
|
113
|
+
}
|
|
114
|
+
catch (error) {
|
|
115
|
+
logger.warn({
|
|
116
|
+
err: error,
|
|
117
|
+
primaryService: this.primaryService?.modelName,
|
|
118
|
+
batchSize: texts.length,
|
|
119
|
+
}, "Primary batch embedding failed, falling back to individual embeddings");
|
|
120
|
+
// Fallback to individual embedding with fallback services
|
|
121
|
+
const results = [];
|
|
122
|
+
for (const text of texts) {
|
|
123
|
+
results.push(await this.embed(text, options));
|
|
124
|
+
}
|
|
125
|
+
return results;
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
/**
|
|
129
|
+
* Get factory statistics
|
|
130
|
+
* @returns {Object} Statistics
|
|
131
|
+
*/
|
|
132
|
+
getStats() {
|
|
133
|
+
const stats = {
|
|
134
|
+
configured: this.configured,
|
|
135
|
+
primary: this.primaryService?.getStats() || null,
|
|
136
|
+
fallbacks: this.fallbackServices.map((s) => s.getStats()),
|
|
137
|
+
};
|
|
138
|
+
return stats;
|
|
139
|
+
}
|
|
140
|
+
/**
|
|
141
|
+
* Clear all caches
|
|
142
|
+
*/
|
|
143
|
+
clearCache() {
|
|
144
|
+
this.primaryService?.clearCache();
|
|
145
|
+
this.fallbackServices.forEach((s) => s.clearCache());
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
export default EmbeddingFactory;
|