@mnemoai/core 1.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.ts +3395 -0
- package/openclaw.plugin.json +815 -0
- package/package.json +59 -0
- package/src/access-tracker.ts +341 -0
- package/src/adapters/README.md +78 -0
- package/src/adapters/chroma.ts +206 -0
- package/src/adapters/lancedb.ts +237 -0
- package/src/adapters/pgvector.ts +218 -0
- package/src/adapters/qdrant.ts +191 -0
- package/src/adaptive-retrieval.ts +90 -0
- package/src/audit-log.ts +238 -0
- package/src/chunker.ts +254 -0
- package/src/config.ts +271 -0
- package/src/decay-engine.ts +238 -0
- package/src/embedder.ts +735 -0
- package/src/extraction-prompts.ts +339 -0
- package/src/license.ts +258 -0
- package/src/llm-client.ts +125 -0
- package/src/mcp-server.ts +415 -0
- package/src/memory-categories.ts +71 -0
- package/src/memory-upgrader.ts +388 -0
- package/src/migrate.ts +364 -0
- package/src/mnemo.ts +142 -0
- package/src/noise-filter.ts +97 -0
- package/src/noise-prototypes.ts +164 -0
- package/src/observability.ts +81 -0
- package/src/query-tracker.ts +57 -0
- package/src/reflection-event-store.ts +98 -0
- package/src/reflection-item-store.ts +112 -0
- package/src/reflection-mapped-metadata.ts +84 -0
- package/src/reflection-metadata.ts +23 -0
- package/src/reflection-ranking.ts +33 -0
- package/src/reflection-retry.ts +181 -0
- package/src/reflection-slices.ts +265 -0
- package/src/reflection-store.ts +602 -0
- package/src/resonance-state.ts +85 -0
- package/src/retriever.ts +1510 -0
- package/src/scopes.ts +375 -0
- package/src/self-improvement-files.ts +143 -0
- package/src/semantic-gate.ts +121 -0
- package/src/session-recovery.ts +138 -0
- package/src/smart-extractor.ts +923 -0
- package/src/smart-metadata.ts +561 -0
- package/src/storage-adapter.ts +153 -0
- package/src/store.ts +1330 -0
- package/src/tier-manager.ts +189 -0
- package/src/tools.ts +1292 -0
- package/src/wal-recovery.ts +172 -0
- package/test/core.test.mjs +301 -0
|
@@ -0,0 +1,923 @@
|
|
|
1
|
+
// SPDX-License-Identifier: MIT
|
|
2
|
+
/**
|
|
3
|
+
* Smart Memory Extractor — LLM-powered extraction pipeline
|
|
4
|
+
* Replaces regex-triggered capture with intelligent 6-category extraction.
|
|
5
|
+
*
|
|
6
|
+
* Pipeline: conversation → LLM extract → candidates → dedup → persist
|
|
7
|
+
*
|
|
8
|
+
*/
|
|
9
|
+
|
|
10
|
+
import type { MemoryStore, MemorySearchResult } from "./store.js";
|
|
11
|
+
import type { Embedder } from "./embedder.js";
|
|
12
|
+
import type { LlmClient } from "./llm-client.js";
|
|
13
|
+
import {
|
|
14
|
+
buildExtractionPrompt,
|
|
15
|
+
buildChineseExtractionPrompt,
|
|
16
|
+
buildDedupPrompt,
|
|
17
|
+
buildMergePrompt,
|
|
18
|
+
} from "./extraction-prompts.js";
|
|
19
|
+
import {
|
|
20
|
+
type CandidateMemory,
|
|
21
|
+
type DedupDecision,
|
|
22
|
+
type DedupResult,
|
|
23
|
+
type ExtractionStats,
|
|
24
|
+
type MemoryCategory,
|
|
25
|
+
ALWAYS_MERGE_CATEGORIES,
|
|
26
|
+
MERGE_SUPPORTED_CATEGORIES,
|
|
27
|
+
MEMORY_CATEGORIES,
|
|
28
|
+
normalizeCategory,
|
|
29
|
+
} from "./memory-categories.js";
|
|
30
|
+
import { isNoise } from "./noise-filter.js";
|
|
31
|
+
import type { NoisePrototypeBank } from "./noise-prototypes.js";
|
|
32
|
+
import { buildSmartMetadata, parseSmartMetadata, stringifySmartMetadata, parseSupportInfo, updateSupportStats } from "./smart-metadata.js";
|
|
33
|
+
|
|
34
|
+
// ============================================================================
|
|
35
|
+
// Constants
|
|
36
|
+
// ============================================================================
|
|
37
|
+
|
|
38
|
+
const SIMILARITY_THRESHOLD = 0.7;
|
|
39
|
+
const MAX_SIMILAR_FOR_PROMPT = 3;
|
|
40
|
+
const MAX_MEMORIES_PER_EXTRACTION = 5;
|
|
41
|
+
const VALID_DECISIONS = new Set<string>(["create", "merge", "skip", "support", "contextualize", "contradict"]);
|
|
42
|
+
|
|
43
|
+
// ============================================================================
|
|
44
|
+
// CJK Detection
|
|
45
|
+
// ============================================================================
|
|
46
|
+
|
|
47
|
+
/**
|
|
48
|
+
* Detect whether a text is predominantly CJK (Chinese/Japanese/Korean).
|
|
49
|
+
* Returns true if CJK characters make up > 30% of non-whitespace characters.
|
|
50
|
+
*/
|
|
51
|
+
function isCjkDominant(text: string): boolean {
|
|
52
|
+
// CJK Unified Ideographs + CJK Extension A/B + CJK Compatibility + Kana + Hangul
|
|
53
|
+
const cjkRegex = /[\u4e00-\u9fff\u3400-\u4dbf\u3040-\u309f\u30a0-\u30ff\uac00-\ud7af\uf900-\ufaff]/g;
|
|
54
|
+
const nonWhitespace = text.replace(/\s/g, "");
|
|
55
|
+
if (nonWhitespace.length === 0) return false;
|
|
56
|
+
const cjkMatches = nonWhitespace.match(cjkRegex);
|
|
57
|
+
const cjkCount = cjkMatches ? cjkMatches.length : 0;
|
|
58
|
+
return cjkCount / nonWhitespace.length > 0.3;
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
// ============================================================================
|
|
62
|
+
// Smart Extractor
|
|
63
|
+
// ============================================================================
|
|
64
|
+
|
|
65
|
+
export interface SmartExtractorConfig {
|
|
66
|
+
/** User identifier for extraction prompt. */
|
|
67
|
+
user?: string;
|
|
68
|
+
/** Minimum conversation messages before extraction triggers. */
|
|
69
|
+
extractMinMessages?: number;
|
|
70
|
+
/** Maximum characters of conversation text to process. */
|
|
71
|
+
extractMaxChars?: number;
|
|
72
|
+
/** Default scope for new memories. */
|
|
73
|
+
defaultScope?: string;
|
|
74
|
+
/** Logger function. */
|
|
75
|
+
log?: (msg: string) => void;
|
|
76
|
+
/** Debug logger function. */
|
|
77
|
+
debugLog?: (msg: string) => void;
|
|
78
|
+
/** Optional embedding-based noise prototype bank for language-agnostic noise filtering. */
|
|
79
|
+
noiseBank?: NoisePrototypeBank;
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
export interface ExtractPersistOptions {
|
|
83
|
+
/** Target scope for newly created memories. */
|
|
84
|
+
scope?: string;
|
|
85
|
+
/** Scopes visible to the current agent for dedup/merge. */
|
|
86
|
+
scopeFilter?: string[];
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
export class SmartExtractor {
|
|
90
|
+
private log: (msg: string) => void;
|
|
91
|
+
private debugLog: (msg: string) => void;
|
|
92
|
+
|
|
93
|
+
constructor(
|
|
94
|
+
private store: MemoryStore,
|
|
95
|
+
private embedder: Embedder,
|
|
96
|
+
private llm: LlmClient,
|
|
97
|
+
private config: SmartExtractorConfig = {},
|
|
98
|
+
) {
|
|
99
|
+
this.log = config.log ?? ((msg: string) => console.log(msg));
|
|
100
|
+
this.debugLog = config.debugLog ?? (() => { });
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
// --------------------------------------------------------------------------
|
|
104
|
+
// Main entry point
|
|
105
|
+
// --------------------------------------------------------------------------
|
|
106
|
+
|
|
107
|
+
/**
|
|
108
|
+
* Extract memories from a conversation text and persist them.
|
|
109
|
+
* Returns extraction statistics.
|
|
110
|
+
*/
|
|
111
|
+
async extractAndPersist(
|
|
112
|
+
conversationText: string,
|
|
113
|
+
sessionKey: string = "unknown",
|
|
114
|
+
options: ExtractPersistOptions = {},
|
|
115
|
+
): Promise<ExtractionStats> {
|
|
116
|
+
const stats: ExtractionStats = { created: 0, merged: 0, skipped: 0 };
|
|
117
|
+
const targetScope = options.scope ?? this.config.defaultScope ?? "global";
|
|
118
|
+
const scopeFilter =
|
|
119
|
+
options.scopeFilter && options.scopeFilter.length > 0
|
|
120
|
+
? options.scopeFilter
|
|
121
|
+
: [targetScope];
|
|
122
|
+
|
|
123
|
+
// Step 1: LLM extraction
|
|
124
|
+
const candidates = await this.extractCandidates(conversationText);
|
|
125
|
+
|
|
126
|
+
if (candidates.length === 0) {
|
|
127
|
+
this.log("memory-pro: smart-extractor: no memories extracted");
|
|
128
|
+
// LLM returned zero candidates → strongest noise signal → feedback to noise bank
|
|
129
|
+
this.learnAsNoise(conversationText);
|
|
130
|
+
return stats;
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
this.log(
|
|
134
|
+
`memory-pro: smart-extractor: extracted ${candidates.length} candidate(s)`,
|
|
135
|
+
);
|
|
136
|
+
|
|
137
|
+
// Step 2: Process each candidate through dedup pipeline
|
|
138
|
+
for (const candidate of candidates.slice(0, MAX_MEMORIES_PER_EXTRACTION)) {
|
|
139
|
+
try {
|
|
140
|
+
await this.processCandidate(
|
|
141
|
+
candidate,
|
|
142
|
+
sessionKey,
|
|
143
|
+
stats,
|
|
144
|
+
targetScope,
|
|
145
|
+
scopeFilter,
|
|
146
|
+
);
|
|
147
|
+
} catch (err) {
|
|
148
|
+
this.log(
|
|
149
|
+
`memory-pro: smart-extractor: failed to process candidate [${candidate.category}]: ${String(err)}`,
|
|
150
|
+
);
|
|
151
|
+
}
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
return stats;
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
// --------------------------------------------------------------------------
|
|
158
|
+
// Embedding Noise Pre-Filter
|
|
159
|
+
// --------------------------------------------------------------------------
|
|
160
|
+
|
|
161
|
+
/**
|
|
162
|
+
* Filter out texts that match noise prototypes by embedding similarity.
|
|
163
|
+
* Long texts (>300 chars) are passed through without checking.
|
|
164
|
+
* Only active when noiseBank is configured and initialized.
|
|
165
|
+
*/
|
|
166
|
+
async filterNoiseByEmbedding(texts: string[]): Promise<string[]> {
|
|
167
|
+
const noiseBank = this.config.noiseBank;
|
|
168
|
+
if (!noiseBank || !noiseBank.initialized) return texts;
|
|
169
|
+
|
|
170
|
+
const result: string[] = [];
|
|
171
|
+
for (const text of texts) {
|
|
172
|
+
// Very short texts lack semantic signal — skip noise check to avoid false positives
|
|
173
|
+
if (text.length <= 8) {
|
|
174
|
+
result.push(text);
|
|
175
|
+
continue;
|
|
176
|
+
}
|
|
177
|
+
// Long texts are unlikely to be pure noise queries
|
|
178
|
+
if (text.length > 300) {
|
|
179
|
+
result.push(text);
|
|
180
|
+
continue;
|
|
181
|
+
}
|
|
182
|
+
try {
|
|
183
|
+
const vec = await this.embedder.embed(text);
|
|
184
|
+
if (!vec || vec.length === 0 || !noiseBank.isNoise(vec)) {
|
|
185
|
+
result.push(text);
|
|
186
|
+
} else {
|
|
187
|
+
this.debugLog(
|
|
188
|
+
`mnemo: smart-extractor: embedding noise filtered: ${text.slice(0, 80)}`,
|
|
189
|
+
);
|
|
190
|
+
}
|
|
191
|
+
} catch {
|
|
192
|
+
// Embedding failed — pass text through
|
|
193
|
+
result.push(text);
|
|
194
|
+
}
|
|
195
|
+
}
|
|
196
|
+
return result;
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
/**
|
|
200
|
+
* Feed back conversation text to the noise prototype bank.
|
|
201
|
+
* Called when LLM extraction returns zero candidates (strongest noise signal).
|
|
202
|
+
*/
|
|
203
|
+
private async learnAsNoise(conversationText: string): Promise<void> {
|
|
204
|
+
const noiseBank = this.config.noiseBank;
|
|
205
|
+
if (!noiseBank || !noiseBank.initialized) return;
|
|
206
|
+
|
|
207
|
+
try {
|
|
208
|
+
const tail = conversationText.slice(-300);
|
|
209
|
+
const vec = await this.embedder.embed(tail);
|
|
210
|
+
if (vec && vec.length > 0) {
|
|
211
|
+
noiseBank.learn(vec);
|
|
212
|
+
this.debugLog("mnemo: smart-extractor: learned noise from zero-extraction");
|
|
213
|
+
}
|
|
214
|
+
} catch {
|
|
215
|
+
// Non-critical — silently skip
|
|
216
|
+
}
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
// --------------------------------------------------------------------------
|
|
220
|
+
// Step 1: LLM Extraction
|
|
221
|
+
// --------------------------------------------------------------------------
|
|
222
|
+
|
|
223
|
+
/**
|
|
224
|
+
* Call LLM to extract candidate memories from conversation text.
|
|
225
|
+
*/
|
|
226
|
+
private async extractCandidates(
|
|
227
|
+
conversationText: string,
|
|
228
|
+
): Promise<CandidateMemory[]> {
|
|
229
|
+
const maxChars = this.config.extractMaxChars ?? 8000;
|
|
230
|
+
const truncated =
|
|
231
|
+
conversationText.length > maxChars
|
|
232
|
+
? conversationText.slice(-maxChars)
|
|
233
|
+
: conversationText;
|
|
234
|
+
|
|
235
|
+
const user = this.config.user ?? "User";
|
|
236
|
+
const prompt = isCjkDominant(truncated)
|
|
237
|
+
? buildChineseExtractionPrompt(truncated, user)
|
|
238
|
+
: buildExtractionPrompt(truncated, user);
|
|
239
|
+
|
|
240
|
+
const result = await this.llm.completeJson<{
|
|
241
|
+
memories: Array<{
|
|
242
|
+
category: string;
|
|
243
|
+
abstract: string;
|
|
244
|
+
overview: string;
|
|
245
|
+
content: string;
|
|
246
|
+
}>;
|
|
247
|
+
}>(prompt, "extract-candidates");
|
|
248
|
+
|
|
249
|
+
if (!result) {
|
|
250
|
+
this.debugLog(
|
|
251
|
+
"mnemo: smart-extractor: extract-candidates returned null",
|
|
252
|
+
);
|
|
253
|
+
return [];
|
|
254
|
+
}
|
|
255
|
+
if (!result.memories || !Array.isArray(result.memories)) {
|
|
256
|
+
this.debugLog(
|
|
257
|
+
`mnemo: smart-extractor: extract-candidates returned unexpected shape keys=${Object.keys(result).join(",") || "(none)"}`,
|
|
258
|
+
);
|
|
259
|
+
return [];
|
|
260
|
+
}
|
|
261
|
+
|
|
262
|
+
this.debugLog(
|
|
263
|
+
`mnemo: smart-extractor: extract-candidates raw memories=${result.memories.length}`,
|
|
264
|
+
);
|
|
265
|
+
|
|
266
|
+
// Validate and normalize candidates
|
|
267
|
+
const candidates: CandidateMemory[] = [];
|
|
268
|
+
let invalidCategoryCount = 0;
|
|
269
|
+
let shortAbstractCount = 0;
|
|
270
|
+
let noiseAbstractCount = 0;
|
|
271
|
+
for (const raw of result.memories) {
|
|
272
|
+
const category = normalizeCategory(raw.category ?? "");
|
|
273
|
+
if (!category) {
|
|
274
|
+
invalidCategoryCount++;
|
|
275
|
+
this.debugLog(
|
|
276
|
+
`mnemo: smart-extractor: dropping candidate due to invalid category rawCategory=${JSON.stringify(raw.category ?? "")} abstract=${JSON.stringify((raw.abstract ?? "").trim().slice(0, 120))}`,
|
|
277
|
+
);
|
|
278
|
+
continue;
|
|
279
|
+
}
|
|
280
|
+
|
|
281
|
+
const abstract = (raw.abstract ?? "").trim();
|
|
282
|
+
const overview = (raw.overview ?? "").trim();
|
|
283
|
+
const content = (raw.content ?? "").trim();
|
|
284
|
+
|
|
285
|
+
// Skip empty or noise
|
|
286
|
+
if (!abstract || abstract.length < 5) {
|
|
287
|
+
shortAbstractCount++;
|
|
288
|
+
this.debugLog(
|
|
289
|
+
`mnemo: smart-extractor: dropping candidate due to short abstract category=${category} abstract=${JSON.stringify(abstract)}`,
|
|
290
|
+
);
|
|
291
|
+
continue;
|
|
292
|
+
}
|
|
293
|
+
if (isNoise(abstract)) {
|
|
294
|
+
noiseAbstractCount++;
|
|
295
|
+
this.debugLog(
|
|
296
|
+
`mnemo: smart-extractor: dropping candidate due to noise abstract category=${category} abstract=${JSON.stringify(abstract.slice(0, 120))}`,
|
|
297
|
+
);
|
|
298
|
+
continue;
|
|
299
|
+
}
|
|
300
|
+
|
|
301
|
+
candidates.push({ category, abstract, overview, content });
|
|
302
|
+
}
|
|
303
|
+
|
|
304
|
+
this.debugLog(
|
|
305
|
+
`mnemo: smart-extractor: validation summary accepted=${candidates.length}, invalidCategory=${invalidCategoryCount}, shortAbstract=${shortAbstractCount}, noiseAbstract=${noiseAbstractCount}`,
|
|
306
|
+
);
|
|
307
|
+
|
|
308
|
+
return candidates;
|
|
309
|
+
}
|
|
310
|
+
|
|
311
|
+
// --------------------------------------------------------------------------
|
|
312
|
+
// Step 2: Dedup + Persist
|
|
313
|
+
// --------------------------------------------------------------------------
|
|
314
|
+
|
|
315
|
+
/**
|
|
316
|
+
* Process a single candidate memory: dedup → merge/create → store
|
|
317
|
+
*/
|
|
318
|
+
private async processCandidate(
|
|
319
|
+
candidate: CandidateMemory,
|
|
320
|
+
sessionKey: string,
|
|
321
|
+
stats: ExtractionStats,
|
|
322
|
+
targetScope: string,
|
|
323
|
+
scopeFilter: string[],
|
|
324
|
+
): Promise<void> {
|
|
325
|
+
// Profile always merges (skip dedup)
|
|
326
|
+
if (ALWAYS_MERGE_CATEGORIES.has(candidate.category)) {
|
|
327
|
+
await this.handleProfileMerge(
|
|
328
|
+
candidate,
|
|
329
|
+
sessionKey,
|
|
330
|
+
targetScope,
|
|
331
|
+
scopeFilter,
|
|
332
|
+
);
|
|
333
|
+
stats.merged++;
|
|
334
|
+
return;
|
|
335
|
+
}
|
|
336
|
+
|
|
337
|
+
// Embed the candidate for vector dedup
|
|
338
|
+
const embeddingText = `${candidate.abstract} ${candidate.content}`;
|
|
339
|
+
const vector = await this.embedder.embed(embeddingText);
|
|
340
|
+
if (!vector || vector.length === 0) {
|
|
341
|
+
this.log("memory-pro: smart-extractor: embedding failed, storing as-is");
|
|
342
|
+
await this.storeCandidate(candidate, vector || [], sessionKey, targetScope);
|
|
343
|
+
stats.created++;
|
|
344
|
+
return;
|
|
345
|
+
}
|
|
346
|
+
|
|
347
|
+
// Dedup pipeline
|
|
348
|
+
const dedupResult = await this.deduplicate(candidate, vector, scopeFilter);
|
|
349
|
+
|
|
350
|
+
switch (dedupResult.decision) {
|
|
351
|
+
case "create":
|
|
352
|
+
await this.storeCandidate(candidate, vector, sessionKey, targetScope);
|
|
353
|
+
stats.created++;
|
|
354
|
+
break;
|
|
355
|
+
|
|
356
|
+
case "merge":
|
|
357
|
+
if (
|
|
358
|
+
dedupResult.matchId &&
|
|
359
|
+
MERGE_SUPPORTED_CATEGORIES.has(candidate.category)
|
|
360
|
+
) {
|
|
361
|
+
await this.handleMerge(
|
|
362
|
+
candidate,
|
|
363
|
+
dedupResult.matchId,
|
|
364
|
+
scopeFilter,
|
|
365
|
+
targetScope,
|
|
366
|
+
dedupResult.contextLabel,
|
|
367
|
+
);
|
|
368
|
+
stats.merged++;
|
|
369
|
+
} else {
|
|
370
|
+
// Category doesn't support merge → create instead
|
|
371
|
+
await this.storeCandidate(candidate, vector, sessionKey, targetScope);
|
|
372
|
+
stats.created++;
|
|
373
|
+
}
|
|
374
|
+
break;
|
|
375
|
+
|
|
376
|
+
case "skip":
|
|
377
|
+
this.log(
|
|
378
|
+
`memory-pro: smart-extractor: skipped [${candidate.category}] ${candidate.abstract.slice(0, 60)}`,
|
|
379
|
+
);
|
|
380
|
+
stats.skipped++;
|
|
381
|
+
break;
|
|
382
|
+
|
|
383
|
+
case "support":
|
|
384
|
+
if (dedupResult.matchId) {
|
|
385
|
+
await this.handleSupport(dedupResult.matchId, scopeFilter, { session: sessionKey, timestamp: Date.now() }, dedupResult.reason, dedupResult.contextLabel);
|
|
386
|
+
stats.supported = (stats.supported ?? 0) + 1;
|
|
387
|
+
} else {
|
|
388
|
+
await this.storeCandidate(candidate, vector, sessionKey, targetScope);
|
|
389
|
+
stats.created++;
|
|
390
|
+
}
|
|
391
|
+
break;
|
|
392
|
+
|
|
393
|
+
case "contextualize":
|
|
394
|
+
if (dedupResult.matchId) {
|
|
395
|
+
await this.handleContextualize(candidate, vector, dedupResult.matchId, sessionKey, targetScope, scopeFilter, dedupResult.contextLabel);
|
|
396
|
+
stats.created++;
|
|
397
|
+
} else {
|
|
398
|
+
await this.storeCandidate(candidate, vector, sessionKey, targetScope);
|
|
399
|
+
stats.created++;
|
|
400
|
+
}
|
|
401
|
+
break;
|
|
402
|
+
|
|
403
|
+
case "contradict":
|
|
404
|
+
if (dedupResult.matchId) {
|
|
405
|
+
await this.handleContradict(candidate, vector, dedupResult.matchId, sessionKey, targetScope, scopeFilter, dedupResult.contextLabel);
|
|
406
|
+
stats.created++;
|
|
407
|
+
} else {
|
|
408
|
+
await this.storeCandidate(candidate, vector, sessionKey, targetScope);
|
|
409
|
+
stats.created++;
|
|
410
|
+
}
|
|
411
|
+
break;
|
|
412
|
+
}
|
|
413
|
+
}
|
|
414
|
+
|
|
415
|
+
// --------------------------------------------------------------------------
|
|
416
|
+
// Dedup Pipeline (vector pre-filter + LLM decision)
|
|
417
|
+
// --------------------------------------------------------------------------
|
|
418
|
+
|
|
419
|
+
/**
|
|
420
|
+
* Two-stage dedup: vector similarity search → LLM decision.
|
|
421
|
+
*/
|
|
422
|
+
private async deduplicate(
|
|
423
|
+
candidate: CandidateMemory,
|
|
424
|
+
candidateVector: number[],
|
|
425
|
+
scopeFilter: string[],
|
|
426
|
+
): Promise<DedupResult> {
|
|
427
|
+
// Stage 1: Vector pre-filter — find similar memories
|
|
428
|
+
const similar = await this.store.vectorSearch(
|
|
429
|
+
candidateVector,
|
|
430
|
+
5,
|
|
431
|
+
SIMILARITY_THRESHOLD,
|
|
432
|
+
scopeFilter,
|
|
433
|
+
);
|
|
434
|
+
|
|
435
|
+
if (similar.length === 0) {
|
|
436
|
+
return { decision: "create", reason: "No similar memories found" };
|
|
437
|
+
}
|
|
438
|
+
|
|
439
|
+
// Stage 2: LLM decision
|
|
440
|
+
return this.llmDedupDecision(candidate, similar);
|
|
441
|
+
}
|
|
442
|
+
|
|
443
|
+
private async llmDedupDecision(
|
|
444
|
+
candidate: CandidateMemory,
|
|
445
|
+
similar: MemorySearchResult[],
|
|
446
|
+
): Promise<DedupResult> {
|
|
447
|
+
const topSimilar = similar.slice(0, MAX_SIMILAR_FOR_PROMPT);
|
|
448
|
+
const existingFormatted = topSimilar
|
|
449
|
+
.map((r, i) => {
|
|
450
|
+
// Extract L0 abstract from metadata if available, fallback to text
|
|
451
|
+
let metaObj: Record<string, unknown> = {};
|
|
452
|
+
try {
|
|
453
|
+
metaObj = JSON.parse(r.entry.metadata || "{}");
|
|
454
|
+
} catch { }
|
|
455
|
+
const abstract = (metaObj.l0_abstract as string) || r.entry.text;
|
|
456
|
+
const overview = (metaObj.l1_overview as string) || "";
|
|
457
|
+
return `${i + 1}. [${(metaObj.memory_category as string) || r.entry.category}] ${abstract}\n Overview: ${overview}\n Score: ${r.score.toFixed(3)}`;
|
|
458
|
+
})
|
|
459
|
+
.join("\n");
|
|
460
|
+
|
|
461
|
+
const prompt = buildDedupPrompt(
|
|
462
|
+
candidate.abstract,
|
|
463
|
+
candidate.overview,
|
|
464
|
+
candidate.content,
|
|
465
|
+
existingFormatted,
|
|
466
|
+
);
|
|
467
|
+
|
|
468
|
+
try {
|
|
469
|
+
const data = await this.llm.completeJson<{
|
|
470
|
+
decision: string;
|
|
471
|
+
reason: string;
|
|
472
|
+
match_index?: number;
|
|
473
|
+
}>(prompt, "dedup-decision");
|
|
474
|
+
|
|
475
|
+
if (!data) {
|
|
476
|
+
this.log(
|
|
477
|
+
"memory-pro: smart-extractor: dedup LLM returned unparseable response, defaulting to CREATE",
|
|
478
|
+
);
|
|
479
|
+
return { decision: "create", reason: "LLM response unparseable" };
|
|
480
|
+
}
|
|
481
|
+
|
|
482
|
+
const decision = (data.decision?.toLowerCase() ??
|
|
483
|
+
"create") as DedupDecision;
|
|
484
|
+
if (!VALID_DECISIONS.has(decision)) {
|
|
485
|
+
return {
|
|
486
|
+
decision: "create",
|
|
487
|
+
reason: `Unknown decision: ${data.decision}`,
|
|
488
|
+
};
|
|
489
|
+
}
|
|
490
|
+
|
|
491
|
+
// Resolve merge target from LLM's match_index (1-based)
|
|
492
|
+
const idx = data.match_index;
|
|
493
|
+
const matchEntry =
|
|
494
|
+
typeof idx === "number" && idx >= 1 && idx <= topSimilar.length
|
|
495
|
+
? topSimilar[idx - 1]
|
|
496
|
+
: topSimilar[0];
|
|
497
|
+
|
|
498
|
+
return {
|
|
499
|
+
decision,
|
|
500
|
+
reason: data.reason ?? "",
|
|
501
|
+
matchId: ["merge", "support", "contextualize", "contradict"].includes(decision) ? matchEntry?.entry.id : undefined,
|
|
502
|
+
contextLabel: typeof (data as any).context_label === "string" ? (data as any).context_label : undefined,
|
|
503
|
+
};
|
|
504
|
+
} catch (err) {
|
|
505
|
+
this.log(
|
|
506
|
+
`memory-pro: smart-extractor: dedup LLM failed: ${String(err)}`,
|
|
507
|
+
);
|
|
508
|
+
return { decision: "create", reason: `LLM failed: ${String(err)}` };
|
|
509
|
+
}
|
|
510
|
+
}
|
|
511
|
+
|
|
512
|
+
// --------------------------------------------------------------------------
|
|
513
|
+
// Merge Logic
|
|
514
|
+
// --------------------------------------------------------------------------
|
|
515
|
+
|
|
516
|
+
/**
|
|
517
|
+
* Profile always-merge: read existing profile, merge with LLM, upsert.
|
|
518
|
+
*/
|
|
519
|
+
private async handleProfileMerge(
|
|
520
|
+
candidate: CandidateMemory,
|
|
521
|
+
sessionKey: string,
|
|
522
|
+
targetScope: string,
|
|
523
|
+
scopeFilter: string[],
|
|
524
|
+
): Promise<void> {
|
|
525
|
+
// Find existing profile memory by category
|
|
526
|
+
const embeddingText = `${candidate.abstract} ${candidate.content}`;
|
|
527
|
+
const vector = await this.embedder.embed(embeddingText);
|
|
528
|
+
|
|
529
|
+
// Search for existing profile memories
|
|
530
|
+
const existing = await this.store.vectorSearch(
|
|
531
|
+
vector || [],
|
|
532
|
+
1,
|
|
533
|
+
0.3,
|
|
534
|
+
scopeFilter,
|
|
535
|
+
);
|
|
536
|
+
const profileMatch = existing.find((r) => {
|
|
537
|
+
try {
|
|
538
|
+
const meta = JSON.parse(r.entry.metadata || "{}");
|
|
539
|
+
return meta.memory_category === "profile";
|
|
540
|
+
} catch {
|
|
541
|
+
return false;
|
|
542
|
+
}
|
|
543
|
+
});
|
|
544
|
+
|
|
545
|
+
if (profileMatch) {
|
|
546
|
+
await this.handleMerge(
|
|
547
|
+
candidate,
|
|
548
|
+
profileMatch.entry.id,
|
|
549
|
+
scopeFilter,
|
|
550
|
+
targetScope,
|
|
551
|
+
);
|
|
552
|
+
} else {
|
|
553
|
+
// No existing profile — create new
|
|
554
|
+
await this.storeCandidate(candidate, vector || [], sessionKey, targetScope);
|
|
555
|
+
}
|
|
556
|
+
}
|
|
557
|
+
|
|
558
|
+
/**
|
|
559
|
+
* Merge a candidate into an existing memory using LLM.
|
|
560
|
+
*/
|
|
561
|
+
private async handleMerge(
|
|
562
|
+
candidate: CandidateMemory,
|
|
563
|
+
matchId: string,
|
|
564
|
+
scopeFilter: string[],
|
|
565
|
+
targetScope: string,
|
|
566
|
+
contextLabel?: string,
|
|
567
|
+
): Promise<void> {
|
|
568
|
+
let existingAbstract = "";
|
|
569
|
+
let existingOverview = "";
|
|
570
|
+
let existingContent = "";
|
|
571
|
+
|
|
572
|
+
try {
|
|
573
|
+
const existing = await this.store.getById(matchId, scopeFilter);
|
|
574
|
+
if (existing) {
|
|
575
|
+
const meta = parseSmartMetadata(existing.metadata, existing);
|
|
576
|
+
existingAbstract = meta.l0_abstract || existing.text;
|
|
577
|
+
existingOverview = meta.l1_overview || "";
|
|
578
|
+
existingContent = meta.l2_content || existing.text;
|
|
579
|
+
}
|
|
580
|
+
} catch {
|
|
581
|
+
// Fallback: store as new
|
|
582
|
+
this.log(
|
|
583
|
+
`memory-pro: smart-extractor: could not read existing memory ${matchId}, storing as new`,
|
|
584
|
+
);
|
|
585
|
+
const vector = await this.embedder.embed(
|
|
586
|
+
`${candidate.abstract} ${candidate.content}`,
|
|
587
|
+
);
|
|
588
|
+
await this.storeCandidate(
|
|
589
|
+
candidate,
|
|
590
|
+
vector || [],
|
|
591
|
+
"merge-fallback",
|
|
592
|
+
targetScope,
|
|
593
|
+
);
|
|
594
|
+
return;
|
|
595
|
+
}
|
|
596
|
+
|
|
597
|
+
// Call LLM to merge
|
|
598
|
+
const prompt = buildMergePrompt(
|
|
599
|
+
existingAbstract,
|
|
600
|
+
existingOverview,
|
|
601
|
+
existingContent,
|
|
602
|
+
candidate.abstract,
|
|
603
|
+
candidate.overview,
|
|
604
|
+
candidate.content,
|
|
605
|
+
candidate.category,
|
|
606
|
+
);
|
|
607
|
+
|
|
608
|
+
const merged = await this.llm.completeJson<{
|
|
609
|
+
abstract: string;
|
|
610
|
+
overview: string;
|
|
611
|
+
content: string;
|
|
612
|
+
}>(prompt, "merge-memory");
|
|
613
|
+
|
|
614
|
+
if (!merged) {
|
|
615
|
+
this.log("memory-pro: smart-extractor: merge LLM failed, skipping merge");
|
|
616
|
+
return;
|
|
617
|
+
}
|
|
618
|
+
|
|
619
|
+
// Re-embed the merged content
|
|
620
|
+
const mergedText = `${merged.abstract} ${merged.content}`;
|
|
621
|
+
const newVector = await this.embedder.embed(mergedText);
|
|
622
|
+
|
|
623
|
+
// Update existing memory via store.update()
|
|
624
|
+
const existing = await this.store.getById(matchId, scopeFilter);
|
|
625
|
+
const metadata = stringifySmartMetadata(
|
|
626
|
+
buildSmartMetadata(existing ?? { text: merged.abstract }, {
|
|
627
|
+
l0_abstract: merged.abstract,
|
|
628
|
+
l1_overview: merged.overview,
|
|
629
|
+
l2_content: merged.content,
|
|
630
|
+
memory_category: candidate.category,
|
|
631
|
+
tier: "working",
|
|
632
|
+
confidence: 0.8,
|
|
633
|
+
}),
|
|
634
|
+
);
|
|
635
|
+
|
|
636
|
+
await this.store.update(
|
|
637
|
+
matchId,
|
|
638
|
+
{
|
|
639
|
+
text: merged.abstract,
|
|
640
|
+
vector: newVector,
|
|
641
|
+
metadata,
|
|
642
|
+
},
|
|
643
|
+
scopeFilter,
|
|
644
|
+
);
|
|
645
|
+
|
|
646
|
+
// Update support stats on the merged memory
|
|
647
|
+
try {
|
|
648
|
+
const updatedEntry = await this.store.getById(matchId, scopeFilter);
|
|
649
|
+
if (updatedEntry) {
|
|
650
|
+
const meta = parseSmartMetadata(updatedEntry.metadata, updatedEntry);
|
|
651
|
+
const supportInfo = parseSupportInfo(meta.support_info);
|
|
652
|
+
const updated = updateSupportStats(supportInfo, contextLabel, "support");
|
|
653
|
+
const finalMetadata = stringifySmartMetadata({ ...meta, support_info: updated });
|
|
654
|
+
await this.store.update(matchId, { metadata: finalMetadata }, scopeFilter);
|
|
655
|
+
}
|
|
656
|
+
} catch {
|
|
657
|
+
// Non-critical: merge succeeded, support stats update is best-effort
|
|
658
|
+
}
|
|
659
|
+
|
|
660
|
+
this.log(
|
|
661
|
+
`memory-pro: smart-extractor: merged [${candidate.category}]${contextLabel ? ` [${contextLabel}]` : ""} into ${matchId.slice(0, 8)}`,
|
|
662
|
+
);
|
|
663
|
+
}
|
|
664
|
+
|
|
665
|
+
// --------------------------------------------------------------------------
|
|
666
|
+
// Context-Aware Handlers (support / contextualize / contradict)
|
|
667
|
+
// --------------------------------------------------------------------------
|
|
668
|
+
|
|
669
|
+
/**
|
|
670
|
+
* Handle SUPPORT: update support stats on existing memory for a specific context.
|
|
671
|
+
*/
|
|
672
|
+
private async handleSupport(
|
|
673
|
+
matchId: string,
|
|
674
|
+
scopeFilter: string[],
|
|
675
|
+
source: { session: string; timestamp: number },
|
|
676
|
+
reason: string,
|
|
677
|
+
contextLabel?: string,
|
|
678
|
+
): Promise<void> {
|
|
679
|
+
const existing = await this.store.getById(matchId, scopeFilter);
|
|
680
|
+
if (!existing) return;
|
|
681
|
+
|
|
682
|
+
const meta = parseSmartMetadata(existing.metadata, existing);
|
|
683
|
+
const supportInfo = parseSupportInfo(meta.support_info);
|
|
684
|
+
const updated = updateSupportStats(supportInfo, contextLabel, "support");
|
|
685
|
+
meta.support_info = updated;
|
|
686
|
+
|
|
687
|
+
await this.store.update(
|
|
688
|
+
matchId,
|
|
689
|
+
{ metadata: stringifySmartMetadata(meta) },
|
|
690
|
+
scopeFilter,
|
|
691
|
+
);
|
|
692
|
+
|
|
693
|
+
this.log(
|
|
694
|
+
`memory-pro: smart-extractor: support [${contextLabel || "general"}] on ${matchId.slice(0, 8)} — ${reason}`,
|
|
695
|
+
);
|
|
696
|
+
}
|
|
697
|
+
|
|
698
|
+
/**
|
|
699
|
+
* Handle CONTEXTUALIZE: create a new entry that adds situational nuance,
|
|
700
|
+
* linked to the original via a relation in metadata.
|
|
701
|
+
*/
|
|
702
|
+
private async handleContextualize(
|
|
703
|
+
candidate: CandidateMemory,
|
|
704
|
+
vector: number[],
|
|
705
|
+
matchId: string,
|
|
706
|
+
sessionKey: string,
|
|
707
|
+
targetScope: string,
|
|
708
|
+
scopeFilter: string[],
|
|
709
|
+
contextLabel?: string,
|
|
710
|
+
): Promise<void> {
|
|
711
|
+
const storeCategory = this.mapToStoreCategory(candidate.category);
|
|
712
|
+
const metadata = stringifySmartMetadata({
|
|
713
|
+
l0_abstract: candidate.abstract,
|
|
714
|
+
l1_overview: candidate.overview,
|
|
715
|
+
l2_content: candidate.content,
|
|
716
|
+
memory_category: candidate.category,
|
|
717
|
+
tier: "working" as const,
|
|
718
|
+
access_count: 0,
|
|
719
|
+
confidence: 0.7,
|
|
720
|
+
last_accessed_at: Date.now(),
|
|
721
|
+
source_session: sessionKey,
|
|
722
|
+
contexts: contextLabel ? [contextLabel] : [],
|
|
723
|
+
relations: [{ type: "contextualizes", targetId: matchId }],
|
|
724
|
+
});
|
|
725
|
+
|
|
726
|
+
await this.store.store({
|
|
727
|
+
text: candidate.abstract,
|
|
728
|
+
vector,
|
|
729
|
+
category: storeCategory,
|
|
730
|
+
scope: targetScope,
|
|
731
|
+
importance: this.getDefaultImportance(candidate.category),
|
|
732
|
+
metadata,
|
|
733
|
+
});
|
|
734
|
+
|
|
735
|
+
this.log(
|
|
736
|
+
`memory-pro: smart-extractor: contextualize [${contextLabel || "general"}] new entry linked to ${matchId.slice(0, 8)}`,
|
|
737
|
+
);
|
|
738
|
+
}
|
|
739
|
+
|
|
740
|
+
/**
|
|
741
|
+
* Handle CONTRADICT: create contradicting entry + record contradiction evidence
|
|
742
|
+
* on the original memory's support stats.
|
|
743
|
+
*/
|
|
744
|
+
private async handleContradict(
|
|
745
|
+
candidate: CandidateMemory,
|
|
746
|
+
vector: number[],
|
|
747
|
+
matchId: string,
|
|
748
|
+
sessionKey: string,
|
|
749
|
+
targetScope: string,
|
|
750
|
+
scopeFilter: string[],
|
|
751
|
+
contextLabel?: string,
|
|
752
|
+
): Promise<void> {
|
|
753
|
+
const now = Date.now();
|
|
754
|
+
const nowIso = new Date(now).toISOString();
|
|
755
|
+
|
|
756
|
+
// 1. Demote + expire the contradicted memory
|
|
757
|
+
const existing = await this.store.getById(matchId, scopeFilter);
|
|
758
|
+
if (existing) {
|
|
759
|
+
const meta = parseSmartMetadata(existing.metadata, existing);
|
|
760
|
+
const supportInfo = parseSupportInfo(meta.support_info);
|
|
761
|
+
const updated = updateSupportStats(supportInfo, contextLabel, "contradict");
|
|
762
|
+
meta.support_info = updated;
|
|
763
|
+
meta.expired_at = nowIso;
|
|
764
|
+
meta.expired_reason = `contradicted by: ${candidate.abstract.slice(0, 120)}`;
|
|
765
|
+
meta.superseded_by_session = sessionKey;
|
|
766
|
+
await this.store.update(
|
|
767
|
+
matchId,
|
|
768
|
+
{
|
|
769
|
+
importance: Math.max(0.05, (existing.importance ?? 0.7) * 0.2),
|
|
770
|
+
metadata: stringifySmartMetadata(meta),
|
|
771
|
+
},
|
|
772
|
+
scopeFilter,
|
|
773
|
+
);
|
|
774
|
+
|
|
775
|
+
// 2. Expire in Graphiti (fire-and-forget)
|
|
776
|
+
if (process.env.GRAPHITI_ENABLED === "true") {
|
|
777
|
+
const graphitiBase = process.env.GRAPHITI_BASE_URL || "http://127.0.0.1:18799";
|
|
778
|
+
fetch(`${graphitiBase}/facts/expire`, {
|
|
779
|
+
method: "POST",
|
|
780
|
+
headers: { "Content-Type": "application/json" },
|
|
781
|
+
body: JSON.stringify({
|
|
782
|
+
text: existing.text,
|
|
783
|
+
expired_at: nowIso,
|
|
784
|
+
reason: `contradicted: ${candidate.abstract.slice(0, 80)}`,
|
|
785
|
+
}),
|
|
786
|
+
signal: AbortSignal.timeout(5000),
|
|
787
|
+
}).catch(() => {});
|
|
788
|
+
}
|
|
789
|
+
|
|
790
|
+
this.log(
|
|
791
|
+
`memory-pro: smart-extractor: expired old memory ${matchId.slice(0, 8)} (imp ${(existing.importance ?? 0.7).toFixed(2)}→${Math.max(0.05, (existing.importance ?? 0.7) * 0.2).toFixed(2)})`,
|
|
792
|
+
);
|
|
793
|
+
}
|
|
794
|
+
|
|
795
|
+
// 3. Store the new (contradicting) entry with supersedes relation
|
|
796
|
+
const storeCategory = this.mapToStoreCategory(candidate.category);
|
|
797
|
+
const metadata = stringifySmartMetadata({
|
|
798
|
+
l0_abstract: candidate.abstract,
|
|
799
|
+
l1_overview: candidate.overview,
|
|
800
|
+
l2_content: candidate.content,
|
|
801
|
+
memory_category: candidate.category,
|
|
802
|
+
tier: "working" as const,
|
|
803
|
+
access_count: 0,
|
|
804
|
+
confidence: 0.85,
|
|
805
|
+
last_accessed_at: now,
|
|
806
|
+
source_session: sessionKey,
|
|
807
|
+
contexts: contextLabel ? [contextLabel] : [],
|
|
808
|
+
relations: [
|
|
809
|
+
{ type: "contradicts", targetId: matchId },
|
|
810
|
+
{ type: "supersedes", targetId: matchId },
|
|
811
|
+
],
|
|
812
|
+
valid_from: nowIso,
|
|
813
|
+
});
|
|
814
|
+
|
|
815
|
+
await this.store.store({
|
|
816
|
+
text: candidate.abstract,
|
|
817
|
+
vector,
|
|
818
|
+
category: storeCategory,
|
|
819
|
+
scope: targetScope,
|
|
820
|
+
importance: Math.min(1.0, this.getDefaultImportance(candidate.category) + 0.1),
|
|
821
|
+
metadata,
|
|
822
|
+
});
|
|
823
|
+
|
|
824
|
+
this.log(
|
|
825
|
+
`memory-pro: smart-extractor: contradict [${contextLabel || "general"}] superseded ${matchId.slice(0, 8)} → new entry (imp ${(this.getDefaultImportance(candidate.category) + 0.1).toFixed(2)})`,
|
|
826
|
+
);
|
|
827
|
+
}
|
|
828
|
+
|
|
829
|
+
// --------------------------------------------------------------------------
|
|
830
|
+
// Store Helper
|
|
831
|
+
// --------------------------------------------------------------------------
|
|
832
|
+
|
|
833
|
+
/**
|
|
834
|
+
* Store a candidate memory as a new entry with L0/L1/L2 metadata.
|
|
835
|
+
*/
|
|
836
|
+
private async storeCandidate(
|
|
837
|
+
candidate: CandidateMemory,
|
|
838
|
+
vector: number[],
|
|
839
|
+
sessionKey: string,
|
|
840
|
+
targetScope: string,
|
|
841
|
+
): Promise<void> {
|
|
842
|
+
// Map 6-category to existing store categories for backward compatibility
|
|
843
|
+
const storeCategory = this.mapToStoreCategory(candidate.category);
|
|
844
|
+
|
|
845
|
+
const metadata = stringifySmartMetadata(
|
|
846
|
+
buildSmartMetadata(
|
|
847
|
+
{
|
|
848
|
+
text: candidate.abstract,
|
|
849
|
+
category: this.mapToStoreCategory(candidate.category),
|
|
850
|
+
},
|
|
851
|
+
{
|
|
852
|
+
l0_abstract: candidate.abstract,
|
|
853
|
+
l1_overview: candidate.overview,
|
|
854
|
+
l2_content: candidate.content,
|
|
855
|
+
memory_category: candidate.category,
|
|
856
|
+
tier: "working",
|
|
857
|
+
access_count: 0,
|
|
858
|
+
confidence: 0.7,
|
|
859
|
+
source_session: sessionKey,
|
|
860
|
+
},
|
|
861
|
+
),
|
|
862
|
+
);
|
|
863
|
+
|
|
864
|
+
await this.store.store({
|
|
865
|
+
text: candidate.abstract, // L0 used as the searchable text
|
|
866
|
+
vector,
|
|
867
|
+
category: storeCategory,
|
|
868
|
+
scope: targetScope,
|
|
869
|
+
importance: this.getDefaultImportance(candidate.category),
|
|
870
|
+
metadata,
|
|
871
|
+
});
|
|
872
|
+
|
|
873
|
+
this.log(
|
|
874
|
+
`memory-pro: smart-extractor: created [${candidate.category}] ${candidate.abstract.slice(0, 60)}`,
|
|
875
|
+
);
|
|
876
|
+
}
|
|
877
|
+
|
|
878
|
+
/**
|
|
879
|
+
* Map 6-category to existing 5-category store type for backward compatibility.
|
|
880
|
+
*/
|
|
881
|
+
private mapToStoreCategory(
|
|
882
|
+
category: MemoryCategory,
|
|
883
|
+
): "preference" | "fact" | "decision" | "entity" | "other" {
|
|
884
|
+
switch (category) {
|
|
885
|
+
case "profile":
|
|
886
|
+
return "fact";
|
|
887
|
+
case "preferences":
|
|
888
|
+
return "preference";
|
|
889
|
+
case "entities":
|
|
890
|
+
return "entity";
|
|
891
|
+
case "events":
|
|
892
|
+
return "decision";
|
|
893
|
+
case "cases":
|
|
894
|
+
return "fact";
|
|
895
|
+
case "patterns":
|
|
896
|
+
return "other";
|
|
897
|
+
default:
|
|
898
|
+
return "other";
|
|
899
|
+
}
|
|
900
|
+
}
|
|
901
|
+
|
|
902
|
+
/**
|
|
903
|
+
* Get default importance score by category.
|
|
904
|
+
*/
|
|
905
|
+
private getDefaultImportance(category: MemoryCategory): number {
|
|
906
|
+
switch (category) {
|
|
907
|
+
case "profile":
|
|
908
|
+
return 0.9; // Identity is very important
|
|
909
|
+
case "preferences":
|
|
910
|
+
return 0.8;
|
|
911
|
+
case "entities":
|
|
912
|
+
return 0.7;
|
|
913
|
+
case "events":
|
|
914
|
+
return 0.6;
|
|
915
|
+
case "cases":
|
|
916
|
+
return 0.8; // Problem-solution pairs are high value
|
|
917
|
+
case "patterns":
|
|
918
|
+
return 0.85; // Reusable processes are high value
|
|
919
|
+
default:
|
|
920
|
+
return 0.5;
|
|
921
|
+
}
|
|
922
|
+
}
|
|
923
|
+
}
|