@mnemoai/core 1.1.0 → 1.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli.d.ts +2 -0
- package/dist/cli.d.ts.map +1 -0
- package/dist/cli.js +7 -0
- package/dist/cli.js.map +7 -0
- package/dist/index.d.ts +136 -0
- package/dist/index.d.ts.map +1 -0
- package/{index.ts → dist/index.js} +537 -1333
- package/dist/index.js.map +7 -0
- package/dist/src/access-tracker.d.ts +97 -0
- package/dist/src/access-tracker.d.ts.map +1 -0
- package/dist/src/access-tracker.js +184 -0
- package/dist/src/access-tracker.js.map +7 -0
- package/dist/src/adapters/chroma.d.ts +31 -0
- package/dist/src/adapters/chroma.d.ts.map +1 -0
- package/{src/adapters/chroma.ts → dist/src/adapters/chroma.js} +45 -107
- package/dist/src/adapters/chroma.js.map +7 -0
- package/dist/src/adapters/lancedb.d.ts +29 -0
- package/dist/src/adapters/lancedb.d.ts.map +1 -0
- package/{src/adapters/lancedb.ts → dist/src/adapters/lancedb.js} +41 -109
- package/dist/src/adapters/lancedb.js.map +7 -0
- package/dist/src/adapters/pgvector.d.ts +33 -0
- package/dist/src/adapters/pgvector.d.ts.map +1 -0
- package/{src/adapters/pgvector.ts → dist/src/adapters/pgvector.js} +42 -104
- package/dist/src/adapters/pgvector.js.map +7 -0
- package/dist/src/adapters/qdrant.d.ts +34 -0
- package/dist/src/adapters/qdrant.d.ts.map +1 -0
- package/dist/src/adapters/qdrant.js +132 -0
- package/dist/src/adapters/qdrant.js.map +7 -0
- package/dist/src/adaptive-retrieval.d.ts +14 -0
- package/dist/src/adaptive-retrieval.d.ts.map +1 -0
- package/dist/src/adaptive-retrieval.js +52 -0
- package/dist/src/adaptive-retrieval.js.map +7 -0
- package/dist/src/audit-log.d.ts +56 -0
- package/dist/src/audit-log.d.ts.map +1 -0
- package/dist/src/audit-log.js +139 -0
- package/dist/src/audit-log.js.map +7 -0
- package/dist/src/chunker.d.ts +45 -0
- package/dist/src/chunker.d.ts.map +1 -0
- package/dist/src/chunker.js +157 -0
- package/dist/src/chunker.js.map +7 -0
- package/dist/src/config.d.ts +70 -0
- package/dist/src/config.d.ts.map +1 -0
- package/dist/src/config.js +142 -0
- package/dist/src/config.js.map +7 -0
- package/dist/src/decay-engine.d.ts +73 -0
- package/dist/src/decay-engine.d.ts.map +1 -0
- package/dist/src/decay-engine.js +119 -0
- package/dist/src/decay-engine.js.map +7 -0
- package/dist/src/embedder.d.ts +94 -0
- package/dist/src/embedder.d.ts.map +1 -0
- package/{src/embedder.ts → dist/src/embedder.js} +119 -317
- package/dist/src/embedder.js.map +7 -0
- package/dist/src/extraction-prompts.d.ts +12 -0
- package/dist/src/extraction-prompts.d.ts.map +1 -0
- package/dist/src/extraction-prompts.js +311 -0
- package/dist/src/extraction-prompts.js.map +7 -0
- package/dist/src/license.d.ts +29 -0
- package/dist/src/license.d.ts.map +1 -0
- package/{src/license.ts → dist/src/license.js} +42 -113
- package/dist/src/license.js.map +7 -0
- package/dist/src/llm-client.d.ts +23 -0
- package/dist/src/llm-client.d.ts.map +1 -0
- package/{src/llm-client.ts → dist/src/llm-client.js} +22 -55
- package/dist/src/llm-client.js.map +7 -0
- package/dist/src/logger.d.ts +33 -0
- package/dist/src/logger.d.ts.map +1 -0
- package/dist/src/logger.js +35 -0
- package/dist/src/logger.js.map +7 -0
- package/dist/src/mcp-server.d.ts +16 -0
- package/dist/src/mcp-server.d.ts.map +1 -0
- package/{src/mcp-server.ts → dist/src/mcp-server.js} +81 -181
- package/dist/src/mcp-server.js.map +7 -0
- package/dist/src/memory-categories.d.ts +40 -0
- package/dist/src/memory-categories.d.ts.map +1 -0
- package/dist/src/memory-categories.js +33 -0
- package/dist/src/memory-categories.js.map +7 -0
- package/dist/src/memory-upgrader.d.ts +71 -0
- package/dist/src/memory-upgrader.d.ts.map +1 -0
- package/dist/src/memory-upgrader.js +238 -0
- package/dist/src/memory-upgrader.js.map +7 -0
- package/dist/src/migrate.d.ts +47 -0
- package/dist/src/migrate.d.ts.map +1 -0
- package/{src/migrate.ts → dist/src/migrate.js} +57 -165
- package/dist/src/migrate.js.map +7 -0
- package/dist/src/mnemo.d.ts +67 -0
- package/dist/src/mnemo.d.ts.map +1 -0
- package/dist/src/mnemo.js +66 -0
- package/dist/src/mnemo.js.map +7 -0
- package/dist/src/noise-filter.d.ts +23 -0
- package/dist/src/noise-filter.d.ts.map +1 -0
- package/dist/src/noise-filter.js +62 -0
- package/dist/src/noise-filter.js.map +7 -0
- package/dist/src/noise-prototypes.d.ts +40 -0
- package/dist/src/noise-prototypes.d.ts.map +1 -0
- package/dist/src/noise-prototypes.js +116 -0
- package/dist/src/noise-prototypes.js.map +7 -0
- package/dist/src/observability.d.ts +16 -0
- package/dist/src/observability.d.ts.map +1 -0
- package/dist/src/observability.js +53 -0
- package/dist/src/observability.js.map +7 -0
- package/dist/src/query-tracker.d.ts +27 -0
- package/dist/src/query-tracker.d.ts.map +1 -0
- package/dist/src/query-tracker.js +32 -0
- package/dist/src/query-tracker.js.map +7 -0
- package/dist/src/reflection-event-store.d.ts +44 -0
- package/dist/src/reflection-event-store.d.ts.map +1 -0
- package/dist/src/reflection-event-store.js +50 -0
- package/dist/src/reflection-event-store.js.map +7 -0
- package/dist/src/reflection-item-store.d.ts +58 -0
- package/dist/src/reflection-item-store.d.ts.map +1 -0
- package/dist/src/reflection-item-store.js +69 -0
- package/dist/src/reflection-item-store.js.map +7 -0
- package/dist/src/reflection-mapped-metadata.d.ts +47 -0
- package/dist/src/reflection-mapped-metadata.d.ts.map +1 -0
- package/dist/src/reflection-mapped-metadata.js +40 -0
- package/dist/src/reflection-mapped-metadata.js.map +7 -0
- package/dist/src/reflection-metadata.d.ts +11 -0
- package/dist/src/reflection-metadata.d.ts.map +1 -0
- package/dist/src/reflection-metadata.js +24 -0
- package/dist/src/reflection-metadata.js.map +7 -0
- package/dist/src/reflection-ranking.d.ts +13 -0
- package/dist/src/reflection-ranking.d.ts.map +1 -0
- package/{src/reflection-ranking.ts → dist/src/reflection-ranking.js} +12 -21
- package/dist/src/reflection-ranking.js.map +7 -0
- package/dist/src/reflection-retry.d.ts +30 -0
- package/dist/src/reflection-retry.d.ts.map +1 -0
- package/{src/reflection-retry.ts → dist/src/reflection-retry.js} +24 -64
- package/dist/src/reflection-retry.js.map +7 -0
- package/dist/src/reflection-slices.d.ts +42 -0
- package/dist/src/reflection-slices.d.ts.map +1 -0
- package/{src/reflection-slices.ts → dist/src/reflection-slices.js} +60 -136
- package/dist/src/reflection-slices.js.map +7 -0
- package/dist/src/reflection-store.d.ts +85 -0
- package/dist/src/reflection-store.d.ts.map +1 -0
- package/dist/src/reflection-store.js +407 -0
- package/dist/src/reflection-store.js.map +7 -0
- package/dist/src/resonance-state.d.ts +19 -0
- package/dist/src/resonance-state.d.ts.map +1 -0
- package/{src/resonance-state.ts → dist/src/resonance-state.js} +13 -42
- package/dist/src/resonance-state.js.map +7 -0
- package/dist/src/retriever.d.ts +228 -0
- package/dist/src/retriever.d.ts.map +1 -0
- package/dist/src/retriever.js +1006 -0
- package/dist/src/retriever.js.map +7 -0
- package/dist/src/scopes.d.ts +58 -0
- package/dist/src/scopes.d.ts.map +1 -0
- package/dist/src/scopes.js +252 -0
- package/dist/src/scopes.js.map +7 -0
- package/dist/src/self-improvement-files.d.ts +20 -0
- package/dist/src/self-improvement-files.d.ts.map +1 -0
- package/{src/self-improvement-files.ts → dist/src/self-improvement-files.js} +24 -49
- package/dist/src/self-improvement-files.js.map +7 -0
- package/dist/src/semantic-gate.d.ts +24 -0
- package/dist/src/semantic-gate.d.ts.map +1 -0
- package/dist/src/semantic-gate.js +86 -0
- package/dist/src/semantic-gate.js.map +7 -0
- package/dist/src/session-recovery.d.ts +9 -0
- package/dist/src/session-recovery.d.ts.map +1 -0
- package/{src/session-recovery.ts → dist/src/session-recovery.js} +40 -57
- package/dist/src/session-recovery.js.map +7 -0
- package/dist/src/smart-extractor.d.ts +107 -0
- package/dist/src/smart-extractor.d.ts.map +1 -0
- package/{src/smart-extractor.ts → dist/src/smart-extractor.js} +130 -383
- package/dist/src/smart-extractor.js.map +7 -0
- package/dist/src/smart-metadata.d.ts +103 -0
- package/dist/src/smart-metadata.d.ts.map +1 -0
- package/dist/src/smart-metadata.js +361 -0
- package/dist/src/smart-metadata.js.map +7 -0
- package/dist/src/storage-adapter.d.ts +102 -0
- package/dist/src/storage-adapter.d.ts.map +1 -0
- package/dist/src/storage-adapter.js +22 -0
- package/dist/src/storage-adapter.js.map +7 -0
- package/dist/src/store.d.ts +108 -0
- package/dist/src/store.d.ts.map +1 -0
- package/dist/src/store.js +939 -0
- package/dist/src/store.js.map +7 -0
- package/dist/src/tier-manager.d.ts +57 -0
- package/dist/src/tier-manager.d.ts.map +1 -0
- package/dist/src/tier-manager.js +80 -0
- package/dist/src/tier-manager.js.map +7 -0
- package/dist/src/tools.d.ts +43 -0
- package/dist/src/tools.d.ts.map +1 -0
- package/dist/src/tools.js +1075 -0
- package/dist/src/tools.js.map +7 -0
- package/dist/src/wal-recovery.d.ts +30 -0
- package/dist/src/wal-recovery.d.ts.map +1 -0
- package/{src/wal-recovery.ts → dist/src/wal-recovery.js} +26 -79
- package/dist/src/wal-recovery.js.map +7 -0
- package/package.json +21 -2
- package/openclaw.plugin.json +0 -815
- package/src/access-tracker.ts +0 -341
- package/src/adapters/README.md +0 -78
- package/src/adapters/qdrant.ts +0 -191
- package/src/adaptive-retrieval.ts +0 -90
- package/src/audit-log.ts +0 -238
- package/src/chunker.ts +0 -254
- package/src/config.ts +0 -271
- package/src/decay-engine.ts +0 -238
- package/src/extraction-prompts.ts +0 -339
- package/src/memory-categories.ts +0 -71
- package/src/memory-upgrader.ts +0 -388
- package/src/mnemo.ts +0 -142
- package/src/noise-filter.ts +0 -97
- package/src/noise-prototypes.ts +0 -164
- package/src/observability.ts +0 -81
- package/src/query-tracker.ts +0 -57
- package/src/reflection-event-store.ts +0 -98
- package/src/reflection-item-store.ts +0 -112
- package/src/reflection-mapped-metadata.ts +0 -84
- package/src/reflection-metadata.ts +0 -23
- package/src/reflection-store.ts +0 -602
- package/src/retriever.ts +0 -1510
- package/src/scopes.ts +0 -375
- package/src/semantic-gate.ts +0 -121
- package/src/smart-metadata.ts +0 -561
- package/src/storage-adapter.ts +0 -153
- package/src/store.ts +0 -1330
- package/src/tier-manager.ts +0 -189
- package/src/tools.ts +0 -1292
- package/test/core.test.mjs +0 -301
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Memory Upgrader — Convert legacy memories to new smart memory format
|
|
3
|
+
*
|
|
4
|
+
* Legacy memories lack L0/L1/L2 metadata, memory_category (6-category),
|
|
5
|
+
* tier, access_count, and confidence fields. This module enriches them
|
|
6
|
+
* to enable unified memory lifecycle management (decay, tier promotion,
|
|
7
|
+
* smart dedup).
|
|
8
|
+
*
|
|
9
|
+
* Pipeline per memory:
|
|
10
|
+
* 1. Detect legacy format (missing `memory_category` in metadata)
|
|
11
|
+
* 2. Reverse-map 5-category → 6-category
|
|
12
|
+
* 3. Generate L0/L1/L2 via LLM (or fallback to simple rules)
|
|
13
|
+
* 4. Write enriched metadata back via store.update()
|
|
14
|
+
*/
|
|
15
|
+
import type { MemoryStore, MemoryEntry } from "./store.js";
|
|
16
|
+
import type { LlmClient } from "./llm-client.js";
|
|
17
|
+
export interface UpgradeOptions {
|
|
18
|
+
/** Only report counts without modifying data (default: false) */
|
|
19
|
+
dryRun?: boolean;
|
|
20
|
+
/** Number of memories to process per batch (default: 10) */
|
|
21
|
+
batchSize?: number;
|
|
22
|
+
/** Skip LLM calls; use simple text truncation for L0/L1 (default: false) */
|
|
23
|
+
noLlm?: boolean;
|
|
24
|
+
/** Maximum number of memories to upgrade (default: unlimited) */
|
|
25
|
+
limit?: number;
|
|
26
|
+
/** Scope filter — only upgrade memories in these scopes */
|
|
27
|
+
scopeFilter?: string[];
|
|
28
|
+
/** Logger function */
|
|
29
|
+
log?: (msg: string) => void;
|
|
30
|
+
}
|
|
31
|
+
export interface UpgradeResult {
|
|
32
|
+
/** Total legacy memories found */
|
|
33
|
+
totalLegacy: number;
|
|
34
|
+
/** Successfully upgraded count */
|
|
35
|
+
upgraded: number;
|
|
36
|
+
/** Skipped (already new format) */
|
|
37
|
+
skipped: number;
|
|
38
|
+
/** Errors encountered */
|
|
39
|
+
errors: string[];
|
|
40
|
+
}
|
|
41
|
+
export declare class MemoryUpgrader {
|
|
42
|
+
private store;
|
|
43
|
+
private llm;
|
|
44
|
+
private options;
|
|
45
|
+
private log;
|
|
46
|
+
constructor(store: MemoryStore, llm: LlmClient | null, options?: UpgradeOptions);
|
|
47
|
+
/**
|
|
48
|
+
* Check if a memory entry is in legacy format (needs upgrade).
|
|
49
|
+
* Legacy = no metadata, or metadata lacks `memory_category`.
|
|
50
|
+
*/
|
|
51
|
+
isLegacyMemory(entry: MemoryEntry): boolean;
|
|
52
|
+
/**
|
|
53
|
+
* Scan and count legacy memories without modifying them.
|
|
54
|
+
*/
|
|
55
|
+
countLegacy(scopeFilter?: string[]): Promise<{
|
|
56
|
+
total: number;
|
|
57
|
+
legacy: number;
|
|
58
|
+
byCategory: Record<string, number>;
|
|
59
|
+
}>;
|
|
60
|
+
/**
|
|
61
|
+
* Main upgrade entry point.
|
|
62
|
+
* Scans all memories, filters legacy ones, and enriches them.
|
|
63
|
+
*/
|
|
64
|
+
upgrade(options?: UpgradeOptions): Promise<UpgradeResult>;
|
|
65
|
+
/**
|
|
66
|
+
* Upgrade a single legacy memory entry.
|
|
67
|
+
*/
|
|
68
|
+
private upgradeEntry;
|
|
69
|
+
}
|
|
70
|
+
export declare function createMemoryUpgrader(store: MemoryStore, llm: LlmClient | null, options?: UpgradeOptions): MemoryUpgrader;
|
|
71
|
+
//# sourceMappingURL=memory-upgrader.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"memory-upgrader.d.ts","sourceRoot":"","sources":["../../src/memory-upgrader.ts"],"names":[],"mappings":"AACA;;;;;;;;;;;;;GAaG;AAEH,OAAO,KAAK,EAAE,WAAW,EAAE,WAAW,EAAE,MAAM,YAAY,CAAC;AAC3D,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAC;AAUjD,MAAM,WAAW,cAAc;IAC7B,iEAAiE;IACjE,MAAM,CAAC,EAAE,OAAO,CAAC;IACjB,4DAA4D;IAC5D,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,4EAA4E;IAC5E,KAAK,CAAC,EAAE,OAAO,CAAC;IAChB,iEAAiE;IACjE,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,2DAA2D;IAC3D,WAAW,CAAC,EAAE,MAAM,EAAE,CAAC;IACvB,sBAAsB;IACtB,GAAG,CAAC,EAAE,CAAC,GAAG,EAAE,MAAM,KAAK,IAAI,CAAC;CAC7B;AAED,MAAM,WAAW,aAAa;IAC5B,kCAAkC;IAClC,WAAW,EAAE,MAAM,CAAC;IACpB,kCAAkC;IAClC,QAAQ,EAAE,MAAM,CAAC;IACjB,mCAAmC;IACnC,OAAO,EAAE,MAAM,CAAC;IAChB,yBAAyB;IACzB,MAAM,EAAE,MAAM,EAAE,CAAC;CAClB;AA8GD,qBAAa,cAAc;IAIvB,OAAO,CAAC,KAAK;IACb,OAAO,CAAC,GAAG;IACX,OAAO,CAAC,OAAO;IALjB,OAAO,CAAC,GAAG,CAAwB;gBAGzB,KAAK,EAAE,WAAW,EAClB,GAAG,EAAE,SAAS,GAAG,IAAI,EACrB,OAAO,GAAE,cAAmB;IAKtC;;;OAGG;IACH,cAAc,CAAC,KAAK,EAAE,WAAW,GAAG,OAAO;IAW3C;;OAEG;IACG,WAAW,CAAC,WAAW,CAAC,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC;QACjD,KAAK,EAAE,MAAM,CAAC;QACd,MAAM,EAAE,MAAM,CAAC;QACf,UAAU,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;KACpC,CAAC;IAeF;;;OAGG;IACG,OAAO,CAAC,OAAO,GAAE,cAAmB,GAAG,OAAO,CAAC,aAAa,CAAC;IAkFnE;;OAEG;YACW,YAAY;CA8E3B;AAMD,wBAAgB,oBAAoB,CAClC,KAAK,EAAE,WAAW,EAClB,GAAG,EAAE,SAAS,GAAG,IAAI,EACrB,OAAO,GAAE,cAAmB,GAC3B,cAAc,CAEhB"}
|
|
@@ -0,0 +1,238 @@
|
|
|
1
|
+
import { buildSmartMetadata, stringifySmartMetadata } from "./smart-metadata.js";
|
|
2
|
+
import { log } from "./logger.js";
|
|
3
|
+
function reverseMapCategory(oldCategory, text) {
|
|
4
|
+
switch (oldCategory) {
|
|
5
|
+
case "preference":
|
|
6
|
+
return "preferences";
|
|
7
|
+
case "entity":
|
|
8
|
+
return "entities";
|
|
9
|
+
case "decision":
|
|
10
|
+
return "events";
|
|
11
|
+
case "other":
|
|
12
|
+
return "patterns";
|
|
13
|
+
case "fact":
|
|
14
|
+
if (/\b(my |i am |i'm |name is |叫我|我的|我是)\b/i.test(text) && text.length < 200) {
|
|
15
|
+
return "profile";
|
|
16
|
+
}
|
|
17
|
+
return "cases";
|
|
18
|
+
default:
|
|
19
|
+
return "patterns";
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
function buildUpgradePrompt(text, category) {
|
|
23
|
+
return `You are a memory librarian. Given a raw memory text and its category, produce a structured 3-layer summary.
|
|
24
|
+
|
|
25
|
+
**Category**: ${category}
|
|
26
|
+
|
|
27
|
+
**Raw memory text**:
|
|
28
|
+
"""
|
|
29
|
+
${text.slice(0, 2e3)}
|
|
30
|
+
"""
|
|
31
|
+
|
|
32
|
+
Return ONLY valid JSON (no markdown fences):
|
|
33
|
+
{
|
|
34
|
+
"l0_abstract": "One sentence (\u226430 words) summarizing the core fact/preference/event",
|
|
35
|
+
"l1_overview": "A structured markdown summary (2-5 bullet points)",
|
|
36
|
+
"l2_content": "The full original text, cleaned up if needed",
|
|
37
|
+
"resolved_category": "${category}"
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
Rules:
|
|
41
|
+
- l0_abstract must be a single concise sentence, suitable as a search index key
|
|
42
|
+
- l1_overview should use markdown bullet points to structure the information
|
|
43
|
+
- l2_content should preserve the original meaning; may clean up formatting
|
|
44
|
+
- resolved_category: if the text is clearly about personal identity/profile info (name, age, role, etc.), set to "profile"; if it's a reusable problem-solution pair, set to "cases"; otherwise keep "${category}"
|
|
45
|
+
- Respond in the SAME language as the raw memory text`;
|
|
46
|
+
}
|
|
47
|
+
function simpleEnrich(text, category) {
|
|
48
|
+
const firstSentence = text.match(/^[^.!?。!?\n]+[.!?。!?]?/)?.[0] || text;
|
|
49
|
+
const l0 = firstSentence.slice(0, 100).trim();
|
|
50
|
+
const l1 = `- ${l0}`;
|
|
51
|
+
return {
|
|
52
|
+
l0_abstract: l0,
|
|
53
|
+
l1_overview: l1,
|
|
54
|
+
l2_content: text
|
|
55
|
+
};
|
|
56
|
+
}
|
|
57
|
+
class MemoryUpgrader {
|
|
58
|
+
constructor(store, llm, options = {}) {
|
|
59
|
+
this.store = store;
|
|
60
|
+
this.llm = llm;
|
|
61
|
+
this.options = options;
|
|
62
|
+
this.log = options.log ?? ((msg) => log.info(msg));
|
|
63
|
+
}
|
|
64
|
+
log;
|
|
65
|
+
/**
|
|
66
|
+
* Check if a memory entry is in legacy format (needs upgrade).
|
|
67
|
+
* Legacy = no metadata, or metadata lacks `memory_category`.
|
|
68
|
+
*/
|
|
69
|
+
isLegacyMemory(entry) {
|
|
70
|
+
if (!entry.metadata) return true;
|
|
71
|
+
try {
|
|
72
|
+
const meta = JSON.parse(entry.metadata);
|
|
73
|
+
return !meta.memory_category;
|
|
74
|
+
} catch {
|
|
75
|
+
return true;
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
/**
|
|
79
|
+
* Scan and count legacy memories without modifying them.
|
|
80
|
+
*/
|
|
81
|
+
async countLegacy(scopeFilter) {
|
|
82
|
+
const allMemories = await this.store.list(scopeFilter, void 0, 1e4, 0);
|
|
83
|
+
let legacy = 0;
|
|
84
|
+
const byCategory = {};
|
|
85
|
+
for (const entry of allMemories) {
|
|
86
|
+
if (this.isLegacyMemory(entry)) {
|
|
87
|
+
legacy++;
|
|
88
|
+
byCategory[entry.category] = (byCategory[entry.category] || 0) + 1;
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
return { total: allMemories.length, legacy, byCategory };
|
|
92
|
+
}
|
|
93
|
+
/**
|
|
94
|
+
* Main upgrade entry point.
|
|
95
|
+
* Scans all memories, filters legacy ones, and enriches them.
|
|
96
|
+
*/
|
|
97
|
+
async upgrade(options = {}) {
|
|
98
|
+
const batchSize = options.batchSize ?? this.options.batchSize ?? 10;
|
|
99
|
+
const noLlm = options.noLlm ?? this.options.noLlm ?? false;
|
|
100
|
+
const dryRun = options.dryRun ?? this.options.dryRun ?? false;
|
|
101
|
+
const limit = options.limit ?? this.options.limit;
|
|
102
|
+
const result = {
|
|
103
|
+
totalLegacy: 0,
|
|
104
|
+
upgraded: 0,
|
|
105
|
+
skipped: 0,
|
|
106
|
+
errors: []
|
|
107
|
+
};
|
|
108
|
+
this.log("memory-upgrader: scanning memories...");
|
|
109
|
+
const allMemories = await this.store.list(
|
|
110
|
+
options.scopeFilter ?? this.options.scopeFilter,
|
|
111
|
+
void 0,
|
|
112
|
+
1e4,
|
|
113
|
+
0
|
|
114
|
+
);
|
|
115
|
+
const legacyMemories = allMemories.filter((m) => this.isLegacyMemory(m));
|
|
116
|
+
result.totalLegacy = legacyMemories.length;
|
|
117
|
+
result.skipped = allMemories.length - legacyMemories.length;
|
|
118
|
+
if (legacyMemories.length === 0) {
|
|
119
|
+
this.log("memory-upgrader: no legacy memories found \u2014 all memories are already in new format");
|
|
120
|
+
return result;
|
|
121
|
+
}
|
|
122
|
+
this.log(
|
|
123
|
+
`memory-upgrader: found ${legacyMemories.length} legacy memories out of ${allMemories.length} total`
|
|
124
|
+
);
|
|
125
|
+
if (dryRun) {
|
|
126
|
+
const byCategory = {};
|
|
127
|
+
for (const m of legacyMemories) {
|
|
128
|
+
byCategory[m.category] = (byCategory[m.category] || 0) + 1;
|
|
129
|
+
}
|
|
130
|
+
this.log(
|
|
131
|
+
`memory-upgrader: [DRY-RUN] would upgrade ${legacyMemories.length} memories`
|
|
132
|
+
);
|
|
133
|
+
this.log(`memory-upgrader: [DRY-RUN] breakdown: ${JSON.stringify(byCategory)}`);
|
|
134
|
+
return result;
|
|
135
|
+
}
|
|
136
|
+
const toProcess = limit ? legacyMemories.slice(0, limit) : legacyMemories;
|
|
137
|
+
for (let i = 0; i < toProcess.length; i += batchSize) {
|
|
138
|
+
const batch = toProcess.slice(i, i + batchSize);
|
|
139
|
+
this.log(
|
|
140
|
+
`memory-upgrader: processing batch ${Math.floor(i / batchSize) + 1}/${Math.ceil(toProcess.length / batchSize)} (${batch.length} memories)`
|
|
141
|
+
);
|
|
142
|
+
for (const entry of batch) {
|
|
143
|
+
try {
|
|
144
|
+
await this.upgradeEntry(entry, noLlm);
|
|
145
|
+
result.upgraded++;
|
|
146
|
+
} catch (err) {
|
|
147
|
+
const errMsg = `Failed to upgrade ${entry.id}: ${String(err)}`;
|
|
148
|
+
result.errors.push(errMsg);
|
|
149
|
+
this.log(`memory-upgrader: ERROR \u2014 ${errMsg}`);
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
this.log(
|
|
153
|
+
`memory-upgrader: progress \u2014 ${result.upgraded} upgraded, ${result.errors.length} errors`
|
|
154
|
+
);
|
|
155
|
+
}
|
|
156
|
+
this.log(
|
|
157
|
+
`memory-upgrader: upgrade complete \u2014 ${result.upgraded} upgraded, ${result.skipped} already new, ${result.errors.length} errors`
|
|
158
|
+
);
|
|
159
|
+
return result;
|
|
160
|
+
}
|
|
161
|
+
/**
|
|
162
|
+
* Upgrade a single legacy memory entry.
|
|
163
|
+
*/
|
|
164
|
+
async upgradeEntry(entry, noLlm) {
|
|
165
|
+
let newCategory = reverseMapCategory(entry.category, entry.text);
|
|
166
|
+
let enriched;
|
|
167
|
+
if (!noLlm && this.llm) {
|
|
168
|
+
try {
|
|
169
|
+
const prompt = buildUpgradePrompt(entry.text, newCategory);
|
|
170
|
+
const llmResult = await this.llm.completeJson(prompt);
|
|
171
|
+
if (!llmResult) {
|
|
172
|
+
throw new Error("LLM returned null");
|
|
173
|
+
}
|
|
174
|
+
enriched = {
|
|
175
|
+
l0_abstract: llmResult.l0_abstract || simpleEnrich(entry.text, newCategory).l0_abstract,
|
|
176
|
+
l1_overview: llmResult.l1_overview || simpleEnrich(entry.text, newCategory).l1_overview,
|
|
177
|
+
l2_content: llmResult.l2_content || entry.text
|
|
178
|
+
};
|
|
179
|
+
if (llmResult.resolved_category) {
|
|
180
|
+
const validCategories = /* @__PURE__ */ new Set([
|
|
181
|
+
"profile",
|
|
182
|
+
"preferences",
|
|
183
|
+
"entities",
|
|
184
|
+
"events",
|
|
185
|
+
"cases",
|
|
186
|
+
"patterns"
|
|
187
|
+
]);
|
|
188
|
+
if (validCategories.has(llmResult.resolved_category)) {
|
|
189
|
+
newCategory = llmResult.resolved_category;
|
|
190
|
+
}
|
|
191
|
+
}
|
|
192
|
+
} catch (err) {
|
|
193
|
+
this.log(
|
|
194
|
+
`memory-upgrader: LLM enrichment failed for ${entry.id}, falling back to simple \u2014 ${String(err)}`
|
|
195
|
+
);
|
|
196
|
+
enriched = simpleEnrich(entry.text, newCategory);
|
|
197
|
+
}
|
|
198
|
+
} else {
|
|
199
|
+
enriched = simpleEnrich(entry.text, newCategory);
|
|
200
|
+
}
|
|
201
|
+
const existingMeta = entry.metadata ? (() => {
|
|
202
|
+
try {
|
|
203
|
+
return JSON.parse(entry.metadata);
|
|
204
|
+
} catch {
|
|
205
|
+
return {};
|
|
206
|
+
}
|
|
207
|
+
})() : {};
|
|
208
|
+
const newMetadata = {
|
|
209
|
+
...buildSmartMetadata(
|
|
210
|
+
{ ...entry, metadata: JSON.stringify(existingMeta) },
|
|
211
|
+
{
|
|
212
|
+
l0_abstract: enriched.l0_abstract,
|
|
213
|
+
l1_overview: enriched.l1_overview,
|
|
214
|
+
l2_content: enriched.l2_content,
|
|
215
|
+
memory_category: newCategory,
|
|
216
|
+
tier: "working",
|
|
217
|
+
access_count: 0,
|
|
218
|
+
confidence: 0.7
|
|
219
|
+
}
|
|
220
|
+
),
|
|
221
|
+
upgraded_from: entry.category,
|
|
222
|
+
upgraded_at: Date.now()
|
|
223
|
+
};
|
|
224
|
+
await this.store.update(entry.id, {
|
|
225
|
+
// Update text to L0 abstract for better search indexing
|
|
226
|
+
text: enriched.l0_abstract,
|
|
227
|
+
metadata: stringifySmartMetadata(newMetadata)
|
|
228
|
+
});
|
|
229
|
+
}
|
|
230
|
+
}
|
|
231
|
+
function createMemoryUpgrader(store, llm, options = {}) {
|
|
232
|
+
return new MemoryUpgrader(store, llm, options);
|
|
233
|
+
}
|
|
234
|
+
export {
|
|
235
|
+
MemoryUpgrader,
|
|
236
|
+
createMemoryUpgrader
|
|
237
|
+
};
|
|
238
|
+
//# sourceMappingURL=memory-upgrader.js.map
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
{
|
|
2
|
+
"version": 3,
|
|
3
|
+
"sources": ["../../src/memory-upgrader.ts"],
|
|
4
|
+
"sourcesContent": ["// SPDX-License-Identifier: LicenseRef-Mnemo-Pro\n/**\n * Memory Upgrader \u2014 Convert legacy memories to new smart memory format\n *\n * Legacy memories lack L0/L1/L2 metadata, memory_category (6-category),\n * tier, access_count, and confidence fields. This module enriches them\n * to enable unified memory lifecycle management (decay, tier promotion,\n * smart dedup).\n *\n * Pipeline per memory:\n * 1. Detect legacy format (missing `memory_category` in metadata)\n * 2. Reverse-map 5-category \u2192 6-category\n * 3. Generate L0/L1/L2 via LLM (or fallback to simple rules)\n * 4. Write enriched metadata back via store.update()\n */\n\nimport type { MemoryStore, MemoryEntry } from \"./store.js\";\nimport type { LlmClient } from \"./llm-client.js\";\nimport type { MemoryCategory } from \"./memory-categories.js\";\nimport type { MemoryTier } from \"./memory-categories.js\";\nimport { buildSmartMetadata, stringifySmartMetadata } from \"./smart-metadata.js\";\nimport { log } from \"./logger.js\";\n\n// ============================================================================\n// Types\n// ============================================================================\n\nexport interface UpgradeOptions {\n /** Only report counts without modifying data (default: false) */\n dryRun?: boolean;\n /** Number of memories to process per batch (default: 10) */\n batchSize?: number;\n /** Skip LLM calls; use simple text truncation for L0/L1 (default: false) */\n noLlm?: boolean;\n /** Maximum number of memories to upgrade (default: unlimited) */\n limit?: number;\n /** Scope filter \u2014 only upgrade memories in these scopes */\n scopeFilter?: string[];\n /** Logger function */\n log?: (msg: string) => void;\n}\n\nexport interface UpgradeResult {\n /** Total legacy memories found */\n totalLegacy: number;\n /** Successfully upgraded count */\n upgraded: number;\n /** Skipped (already new format) */\n skipped: number;\n /** Errors encountered */\n errors: string[];\n}\n\ninterface EnrichedMetadata {\n l0_abstract: string;\n l1_overview: string;\n l2_content: string;\n memory_category: MemoryCategory;\n tier: MemoryTier;\n access_count: number;\n confidence: number;\n last_accessed_at: number;\n upgraded_from: string; // original 5-category\n upgraded_at: number; // timestamp of upgrade\n}\n\n// ============================================================================\n// Reverse Category Mapping\n// ============================================================================\n\n/**\n * Reverse-map old 5-category \u2192 new 6-category.\n *\n * Ambiguous case: `fact` maps to both `profile` and `cases`.\n * Without LLM, defaults to `cases` (conservative).\n * With LLM, the enrichment prompt will determine the correct category.\n */\nfunction reverseMapCategory(\n oldCategory: MemoryEntry[\"category\"],\n text: string,\n): MemoryCategory {\n switch (oldCategory) {\n case \"preference\":\n return \"preferences\";\n case \"entity\":\n return \"entities\";\n case \"decision\":\n return \"events\";\n case \"other\":\n return \"patterns\";\n case \"fact\":\n // Heuristic: if text looks like personal identity info, map to profile\n if (\n /\\b(my |i am |i'm |name is |\u53EB\u6211|\u6211\u7684|\u6211\u662F)\\b/i.test(text) &&\n text.length < 200\n ) {\n return \"profile\";\n }\n return \"cases\";\n default:\n return \"patterns\";\n }\n}\n\n// ============================================================================\n// LLM Upgrade Prompt\n// ============================================================================\n\nfunction buildUpgradePrompt(text: string, category: MemoryCategory): string {\n return `You are a memory librarian. Given a raw memory text and its category, produce a structured 3-layer summary.\n\n**Category**: ${category}\n\n**Raw memory text**:\n\"\"\"\n${text.slice(0, 2000)}\n\"\"\"\n\nReturn ONLY valid JSON (no markdown fences):\n{\n \"l0_abstract\": \"One sentence (\u226430 words) summarizing the core fact/preference/event\",\n \"l1_overview\": \"A structured markdown summary (2-5 bullet points)\",\n \"l2_content\": \"The full original text, cleaned up if needed\",\n \"resolved_category\": \"${category}\"\n}\n\nRules:\n- l0_abstract must be a single concise sentence, suitable as a search index key\n- l1_overview should use markdown bullet points to structure the information\n- l2_content should preserve the original meaning; may clean up formatting\n- resolved_category: if the text is clearly about personal identity/profile info (name, age, role, etc.), set to \"profile\"; if it's a reusable problem-solution pair, set to \"cases\"; otherwise keep \"${category}\"\n- Respond in the SAME language as the raw memory text`;\n}\n\n// ============================================================================\n// Simple (No-LLM) Enrichment\n// ============================================================================\n\nfunction simpleEnrich(\n text: string,\n category: MemoryCategory,\n): Pick<EnrichedMetadata, \"l0_abstract\" | \"l1_overview\" | \"l2_content\"> {\n // L0: first sentence or first 80 chars\n const firstSentence = text.match(/^[^.!?\u3002\uFF01\uFF1F\\n]+[.!?\u3002\uFF01\uFF1F]?/)?.[0] || text;\n const l0 = firstSentence.slice(0, 100).trim();\n\n // L1: structured as a single bullet\n const l1 = `- ${l0}`;\n\n // L2: full text\n return {\n l0_abstract: l0,\n l1_overview: l1,\n l2_content: text,\n };\n}\n\n// ============================================================================\n// Memory Upgrader\n// ============================================================================\n\nexport class MemoryUpgrader {\n private log: (msg: string) => void;\n\n constructor(\n private store: MemoryStore,\n private llm: LlmClient | null,\n private options: UpgradeOptions = {},\n ) {\n this.log = options.log ?? ((msg: string) => log.info(msg));\n }\n\n /**\n * Check if a memory entry is in legacy format (needs upgrade).\n * Legacy = no metadata, or metadata lacks `memory_category`.\n */\n isLegacyMemory(entry: MemoryEntry): boolean {\n if (!entry.metadata) return true;\n try {\n const meta = JSON.parse(entry.metadata);\n // If it has memory_category, it was created by SmartExtractor \u2192 new format\n return !meta.memory_category;\n } catch {\n return true;\n }\n }\n\n /**\n * Scan and count legacy memories without modifying them.\n */\n async countLegacy(scopeFilter?: string[]): Promise<{\n total: number;\n legacy: number;\n byCategory: Record<string, number>;\n }> {\n const allMemories = await this.store.list(scopeFilter, undefined, 10000, 0);\n let legacy = 0;\n const byCategory: Record<string, number> = {};\n\n for (const entry of allMemories) {\n if (this.isLegacyMemory(entry)) {\n legacy++;\n byCategory[entry.category] = (byCategory[entry.category] || 0) + 1;\n }\n }\n\n return { total: allMemories.length, legacy, byCategory };\n }\n\n /**\n * Main upgrade entry point.\n * Scans all memories, filters legacy ones, and enriches them.\n */\n async upgrade(options: UpgradeOptions = {}): Promise<UpgradeResult> {\n const batchSize = options.batchSize ?? this.options.batchSize ?? 10;\n const noLlm = options.noLlm ?? this.options.noLlm ?? false;\n const dryRun = options.dryRun ?? this.options.dryRun ?? false;\n const limit = options.limit ?? this.options.limit;\n\n const result: UpgradeResult = {\n totalLegacy: 0,\n upgraded: 0,\n skipped: 0,\n errors: [],\n };\n\n // Load all memories\n this.log(\"memory-upgrader: scanning memories...\");\n const allMemories = await this.store.list(\n options.scopeFilter ?? this.options.scopeFilter,\n undefined,\n 10000,\n 0,\n );\n\n // Filter legacy memories\n const legacyMemories = allMemories.filter((m) => this.isLegacyMemory(m));\n result.totalLegacy = legacyMemories.length;\n result.skipped = allMemories.length - legacyMemories.length;\n\n if (legacyMemories.length === 0) {\n this.log(\"memory-upgrader: no legacy memories found \u2014 all memories are already in new format\");\n return result;\n }\n\n this.log(\n `memory-upgrader: found ${legacyMemories.length} legacy memories out of ${allMemories.length} total`,\n );\n\n if (dryRun) {\n const byCategory: Record<string, number> = {};\n for (const m of legacyMemories) {\n byCategory[m.category] = (byCategory[m.category] || 0) + 1;\n }\n this.log(\n `memory-upgrader: [DRY-RUN] would upgrade ${legacyMemories.length} memories`,\n );\n this.log(`memory-upgrader: [DRY-RUN] breakdown: ${JSON.stringify(byCategory)}`);\n return result;\n }\n\n // Process in batches\n const toProcess = limit\n ? legacyMemories.slice(0, limit)\n : legacyMemories;\n\n for (let i = 0; i < toProcess.length; i += batchSize) {\n const batch = toProcess.slice(i, i + batchSize);\n this.log(\n `memory-upgrader: processing batch ${Math.floor(i / batchSize) + 1}/${Math.ceil(toProcess.length / batchSize)} (${batch.length} memories)`,\n );\n\n for (const entry of batch) {\n try {\n await this.upgradeEntry(entry, noLlm);\n result.upgraded++;\n } catch (err) {\n const errMsg = `Failed to upgrade ${entry.id}: ${String(err)}`;\n result.errors.push(errMsg);\n this.log(`memory-upgrader: ERROR \u2014 ${errMsg}`);\n }\n }\n\n // Progress report\n this.log(\n `memory-upgrader: progress \u2014 ${result.upgraded} upgraded, ${result.errors.length} errors`,\n );\n }\n\n this.log(\n `memory-upgrader: upgrade complete \u2014 ${result.upgraded} upgraded, ${result.skipped} already new, ${result.errors.length} errors`,\n );\n return result;\n }\n\n /**\n * Upgrade a single legacy memory entry.\n */\n private async upgradeEntry(\n entry: MemoryEntry,\n noLlm: boolean,\n ): Promise<void> {\n // Step 1: Reverse-map category\n let newCategory = reverseMapCategory(entry.category, entry.text);\n\n // Step 2: Generate L0/L1/L2\n let enriched: Pick<EnrichedMetadata, \"l0_abstract\" | \"l1_overview\" | \"l2_content\">;\n\n if (!noLlm && this.llm) {\n try {\n const prompt = buildUpgradePrompt(entry.text, newCategory);\n const llmResult = await this.llm.completeJson<{\n l0_abstract: string;\n l1_overview: string;\n l2_content: string;\n resolved_category?: string;\n }>(prompt);\n\n if (!llmResult) {\n throw new Error(\"LLM returned null\");\n }\n\n enriched = {\n l0_abstract: llmResult.l0_abstract || simpleEnrich(entry.text, newCategory).l0_abstract,\n l1_overview: llmResult.l1_overview || simpleEnrich(entry.text, newCategory).l1_overview,\n l2_content: llmResult.l2_content || entry.text,\n };\n\n // LLM may have resolved the ambiguous fact\u2192profile/cases\n if (llmResult.resolved_category) {\n const validCategories = new Set([\n \"profile\", \"preferences\", \"entities\", \"events\", \"cases\", \"patterns\",\n ]);\n if (validCategories.has(llmResult.resolved_category)) {\n newCategory = llmResult.resolved_category as MemoryCategory;\n }\n }\n } catch (err) {\n this.log(\n `memory-upgrader: LLM enrichment failed for ${entry.id}, falling back to simple \u2014 ${String(err)}`,\n );\n enriched = simpleEnrich(entry.text, newCategory);\n }\n } else {\n enriched = simpleEnrich(entry.text, newCategory);\n }\n\n // Step 3: Build enriched metadata\n const existingMeta = entry.metadata ? (() => {\n try { return JSON.parse(entry.metadata!); } catch { return {}; }\n })() : {};\n\n const newMetadata: EnrichedMetadata = {\n ...buildSmartMetadata(\n { ...entry, metadata: JSON.stringify(existingMeta) },\n {\n l0_abstract: enriched.l0_abstract,\n l1_overview: enriched.l1_overview,\n l2_content: enriched.l2_content,\n memory_category: newCategory,\n tier: \"working\" as MemoryTier,\n access_count: 0,\n confidence: 0.7,\n },\n ),\n upgraded_from: entry.category,\n upgraded_at: Date.now(),\n };\n\n // Step 4: Update the memory entry\n await this.store.update(entry.id, {\n // Update text to L0 abstract for better search indexing\n text: enriched.l0_abstract,\n metadata: stringifySmartMetadata(newMetadata),\n });\n }\n}\n\n// ============================================================================\n// Factory\n// ============================================================================\n\nexport function createMemoryUpgrader(\n store: MemoryStore,\n llm: LlmClient | null,\n options: UpgradeOptions = {},\n): MemoryUpgrader {\n return new MemoryUpgrader(store, llm, options);\n}\n"],
|
|
5
|
+
"mappings": "AAoBA,SAAS,oBAAoB,8BAA8B;AAC3D,SAAS,WAAW;AAwDpB,SAAS,mBACP,aACA,MACgB;AAChB,UAAQ,aAAa;AAAA,IACnB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAEH,UACE,0CAA0C,KAAK,IAAI,KACnD,KAAK,SAAS,KACd;AACA,eAAO;AAAA,MACT;AACA,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;AAMA,SAAS,mBAAmB,MAAc,UAAkC;AAC1E,SAAO;AAAA;AAAA,gBAEO,QAAQ;AAAA;AAAA;AAAA;AAAA,EAItB,KAAK,MAAM,GAAG,GAAI,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,0BAQK,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,wMAOsK,QAAQ;AAAA;AAEhN;AAMA,SAAS,aACP,MACA,UACsE;AAEtE,QAAM,gBAAgB,KAAK,MAAM,wBAAwB,IAAI,CAAC,KAAK;AACnE,QAAM,KAAK,cAAc,MAAM,GAAG,GAAG,EAAE,KAAK;AAG5C,QAAM,KAAK,KAAK,EAAE;AAGlB,SAAO;AAAA,IACL,aAAa;AAAA,IACb,aAAa;AAAA,IACb,YAAY;AAAA,EACd;AACF;AAMO,MAAM,eAAe;AAAA,EAG1B,YACU,OACA,KACA,UAA0B,CAAC,GACnC;AAHQ;AACA;AACA;AAER,SAAK,MAAM,QAAQ,QAAQ,CAAC,QAAgB,IAAI,KAAK,GAAG;AAAA,EAC1D;AAAA,EARQ;AAAA;AAAA;AAAA;AAAA;AAAA,EAcR,eAAe,OAA6B;AAC1C,QAAI,CAAC,MAAM,SAAU,QAAO;AAC5B,QAAI;AACF,YAAM,OAAO,KAAK,MAAM,MAAM,QAAQ;AAEtC,aAAO,CAAC,KAAK;AAAA,IACf,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,YAAY,aAIf;AACD,UAAM,cAAc,MAAM,KAAK,MAAM,KAAK,aAAa,QAAW,KAAO,CAAC;AAC1E,QAAI,SAAS;AACb,UAAM,aAAqC,CAAC;AAE5C,eAAW,SAAS,aAAa;AAC/B,UAAI,KAAK,eAAe,KAAK,GAAG;AAC9B;AACA,mBAAW,MAAM,QAAQ,KAAK,WAAW,MAAM,QAAQ,KAAK,KAAK;AAAA,MACnE;AAAA,IACF;AAEA,WAAO,EAAE,OAAO,YAAY,QAAQ,QAAQ,WAAW;AAAA,EACzD;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,QAAQ,UAA0B,CAAC,GAA2B;AAClE,UAAM,YAAY,QAAQ,aAAa,KAAK,QAAQ,aAAa;AACjE,UAAM,QAAQ,QAAQ,SAAS,KAAK,QAAQ,SAAS;AACrD,UAAM,SAAS,QAAQ,UAAU,KAAK,QAAQ,UAAU;AACxD,UAAM,QAAQ,QAAQ,SAAS,KAAK,QAAQ;AAE5C,UAAM,SAAwB;AAAA,MAC5B,aAAa;AAAA,MACb,UAAU;AAAA,MACV,SAAS;AAAA,MACT,QAAQ,CAAC;AAAA,IACX;AAGA,SAAK,IAAI,uCAAuC;AAChD,UAAM,cAAc,MAAM,KAAK,MAAM;AAAA,MACnC,QAAQ,eAAe,KAAK,QAAQ;AAAA,MACpC;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAGA,UAAM,iBAAiB,YAAY,OAAO,CAAC,MAAM,KAAK,eAAe,CAAC,CAAC;AACvE,WAAO,cAAc,eAAe;AACpC,WAAO,UAAU,YAAY,SAAS,eAAe;AAErD,QAAI,eAAe,WAAW,GAAG;AAC/B,WAAK,IAAI,yFAAoF;AAC7F,aAAO;AAAA,IACT;AAEA,SAAK;AAAA,MACH,0BAA0B,eAAe,MAAM,2BAA2B,YAAY,MAAM;AAAA,IAC9F;AAEA,QAAI,QAAQ;AACV,YAAM,aAAqC,CAAC;AAC5C,iBAAW,KAAK,gBAAgB;AAC9B,mBAAW,EAAE,QAAQ,KAAK,WAAW,EAAE,QAAQ,KAAK,KAAK;AAAA,MAC3D;AACA,WAAK;AAAA,QACH,4CAA4C,eAAe,MAAM;AAAA,MACnE;AACA,WAAK,IAAI,yCAAyC,KAAK,UAAU,UAAU,CAAC,EAAE;AAC9E,aAAO;AAAA,IACT;AAGA,UAAM,YAAY,QACd,eAAe,MAAM,GAAG,KAAK,IAC7B;AAEJ,aAAS,IAAI,GAAG,IAAI,UAAU,QAAQ,KAAK,WAAW;AACpD,YAAM,QAAQ,UAAU,MAAM,GAAG,IAAI,SAAS;AAC9C,WAAK;AAAA,QACH,qCAAqC,KAAK,MAAM,IAAI,SAAS,IAAI,CAAC,IAAI,KAAK,KAAK,UAAU,SAAS,SAAS,CAAC,KAAK,MAAM,MAAM;AAAA,MAChI;AAEA,iBAAW,SAAS,OAAO;AACzB,YAAI;AACF,gBAAM,KAAK,aAAa,OAAO,KAAK;AACpC,iBAAO;AAAA,QACT,SAAS,KAAK;AACZ,gBAAM,SAAS,qBAAqB,MAAM,EAAE,KAAK,OAAO,GAAG,CAAC;AAC5D,iBAAO,OAAO,KAAK,MAAM;AACzB,eAAK,IAAI,iCAA4B,MAAM,EAAE;AAAA,QAC/C;AAAA,MACF;AAGA,WAAK;AAAA,QACH,oCAA+B,OAAO,QAAQ,cAAc,OAAO,OAAO,MAAM;AAAA,MAClF;AAAA,IACF;AAEA,SAAK;AAAA,MACH,4CAAuC,OAAO,QAAQ,cAAc,OAAO,OAAO,iBAAiB,OAAO,OAAO,MAAM;AAAA,IACzH;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,aACZ,OACA,OACe;AAEf,QAAI,cAAc,mBAAmB,MAAM,UAAU,MAAM,IAAI;AAG/D,QAAI;AAEJ,QAAI,CAAC,SAAS,KAAK,KAAK;AACtB,UAAI;AACF,cAAM,SAAS,mBAAmB,MAAM,MAAM,WAAW;AACzD,cAAM,YAAY,MAAM,KAAK,IAAI,aAK9B,MAAM;AAET,YAAI,CAAC,WAAW;AACd,gBAAM,IAAI,MAAM,mBAAmB;AAAA,QACrC;AAEA,mBAAW;AAAA,UACT,aAAa,UAAU,eAAe,aAAa,MAAM,MAAM,WAAW,EAAE;AAAA,UAC5E,aAAa,UAAU,eAAe,aAAa,MAAM,MAAM,WAAW,EAAE;AAAA,UAC5E,YAAY,UAAU,cAAc,MAAM;AAAA,QAC5C;AAGA,YAAI,UAAU,mBAAmB;AAC/B,gBAAM,kBAAkB,oBAAI,IAAI;AAAA,YAC9B;AAAA,YAAW;AAAA,YAAe;AAAA,YAAY;AAAA,YAAU;AAAA,YAAS;AAAA,UAC3D,CAAC;AACD,cAAI,gBAAgB,IAAI,UAAU,iBAAiB,GAAG;AACpD,0BAAc,UAAU;AAAA,UAC1B;AAAA,QACF;AAAA,MACF,SAAS,KAAK;AACZ,aAAK;AAAA,UACH,8CAA8C,MAAM,EAAE,mCAA8B,OAAO,GAAG,CAAC;AAAA,QACjG;AACA,mBAAW,aAAa,MAAM,MAAM,WAAW;AAAA,MACjD;AAAA,IACF,OAAO;AACL,iBAAW,aAAa,MAAM,MAAM,WAAW;AAAA,IACjD;AAGA,UAAM,eAAe,MAAM,YAAY,MAAM;AAC3C,UAAI;AAAE,eAAO,KAAK,MAAM,MAAM,QAAS;AAAA,MAAG,QAAQ;AAAE,eAAO,CAAC;AAAA,MAAG;AAAA,IACjE,GAAG,IAAI,CAAC;AAER,UAAM,cAAgC;AAAA,MACpC,GAAG;AAAA,QACD,EAAE,GAAG,OAAO,UAAU,KAAK,UAAU,YAAY,EAAE;AAAA,QACnD;AAAA,UACE,aAAa,SAAS;AAAA,UACtB,aAAa,SAAS;AAAA,UACtB,YAAY,SAAS;AAAA,UACrB,iBAAiB;AAAA,UACjB,MAAM;AAAA,UACN,cAAc;AAAA,UACd,YAAY;AAAA,QACd;AAAA,MACF;AAAA,MACA,eAAe,MAAM;AAAA,MACrB,aAAa,KAAK,IAAI;AAAA,IACxB;AAGA,UAAM,KAAK,MAAM,OAAO,MAAM,IAAI;AAAA;AAAA,MAEhC,MAAM,SAAS;AAAA,MACf,UAAU,uBAAuB,WAAW;AAAA,IAC9C,CAAC;AAAA,EACH;AACF;AAMO,SAAS,qBACd,OACA,KACA,UAA0B,CAAC,GACX;AAChB,SAAO,IAAI,eAAe,OAAO,KAAK,OAAO;AAC/C;",
|
|
6
|
+
"names": []
|
|
7
|
+
}
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Migration Utilities
|
|
3
|
+
* Migrates data from old memory-lancedb plugin to memory-lancedb-pro
|
|
4
|
+
*/
|
|
5
|
+
import type { MemoryStore } from "./store.js";
|
|
6
|
+
interface MigrationResult {
|
|
7
|
+
success: boolean;
|
|
8
|
+
migratedCount: number;
|
|
9
|
+
skippedCount: number;
|
|
10
|
+
errors: string[];
|
|
11
|
+
summary: string;
|
|
12
|
+
}
|
|
13
|
+
interface MigrationOptions {
|
|
14
|
+
sourceDbPath?: string;
|
|
15
|
+
dryRun?: boolean;
|
|
16
|
+
defaultScope?: string;
|
|
17
|
+
skipExisting?: boolean;
|
|
18
|
+
}
|
|
19
|
+
export declare class MemoryMigrator {
|
|
20
|
+
private targetStore;
|
|
21
|
+
constructor(targetStore: MemoryStore);
|
|
22
|
+
migrate(options?: MigrationOptions): Promise<MigrationResult>;
|
|
23
|
+
private findSourceDatabase;
|
|
24
|
+
private loadLegacyData;
|
|
25
|
+
private migrateEntries;
|
|
26
|
+
checkMigrationNeeded(sourceDbPath?: string): Promise<{
|
|
27
|
+
needed: boolean;
|
|
28
|
+
sourceFound: boolean;
|
|
29
|
+
sourceDbPath?: string;
|
|
30
|
+
entryCount?: number;
|
|
31
|
+
}>;
|
|
32
|
+
verifyMigration(sourceDbPath?: string): Promise<{
|
|
33
|
+
valid: boolean;
|
|
34
|
+
sourceCount: number;
|
|
35
|
+
targetCount: number;
|
|
36
|
+
issues: string[];
|
|
37
|
+
}>;
|
|
38
|
+
}
|
|
39
|
+
export declare function createMigrator(targetStore: MemoryStore): MemoryMigrator;
|
|
40
|
+
export declare function migrateFromLegacy(targetStore: MemoryStore, options?: MigrationOptions): Promise<MigrationResult>;
|
|
41
|
+
export declare function checkForLegacyData(): Promise<{
|
|
42
|
+
found: boolean;
|
|
43
|
+
paths: string[];
|
|
44
|
+
totalEntries: number;
|
|
45
|
+
}>;
|
|
46
|
+
export {};
|
|
47
|
+
//# sourceMappingURL=migrate.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"migrate.d.ts","sourceRoot":"","sources":["../../src/migrate.ts"],"names":[],"mappings":"AACA;;;GAGG;AAKH,OAAO,KAAK,EAAE,WAAW,EAAe,MAAM,YAAY,CAAC;AAkB3D,UAAU,eAAe;IACvB,OAAO,EAAE,OAAO,CAAC;IACjB,aAAa,EAAE,MAAM,CAAC;IACtB,YAAY,EAAE,MAAM,CAAC;IACrB,MAAM,EAAE,MAAM,EAAE,CAAC;IACjB,OAAO,EAAE,MAAM,CAAC;CACjB;AAED,UAAU,gBAAgB;IACxB,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,MAAM,CAAC,EAAE,OAAO,CAAC;IACjB,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,YAAY,CAAC,EAAE,OAAO,CAAC;CACxB;AAmCD,qBAAa,cAAc;IACb,OAAO,CAAC,WAAW;gBAAX,WAAW,EAAE,WAAW;IAEtC,OAAO,CAAC,OAAO,GAAE,gBAAqB,GAAG,OAAO,CAAC,eAAe,CAAC;YAsDzD,kBAAkB;YA2BlB,cAAc;YAyBd,cAAc;IA4DtB,oBAAoB,CAAC,YAAY,CAAC,EAAE,MAAM,GAAG,OAAO,CAAC;QACzD,MAAM,EAAE,OAAO,CAAC;QAChB,WAAW,EAAE,OAAO,CAAC;QACrB,YAAY,CAAC,EAAE,MAAM,CAAC;QACtB,UAAU,CAAC,EAAE,MAAM,CAAC;KACrB,CAAC;IA2BI,eAAe,CAAC,YAAY,CAAC,EAAE,MAAM,GAAG,OAAO,CAAC;QACpD,KAAK,EAAE,OAAO,CAAC;QACf,WAAW,EAAE,MAAM,CAAC;QACpB,WAAW,EAAE,MAAM,CAAC;QACpB,MAAM,EAAE,MAAM,EAAE,CAAC;KAClB,CAAC;CAwCH;AAED,wBAAgB,cAAc,CAAC,WAAW,EAAE,WAAW,GAAG,cAAc,CAEvE;AAED,wBAAsB,iBAAiB,CACrC,WAAW,EAAE,WAAW,EACxB,OAAO,GAAE,gBAAqB,GAC7B,OAAO,CAAC,eAAe,CAAC,CAG1B;AAED,wBAAsB,kBAAkB,IAAI,OAAO,CAAC;IAClD,KAAK,EAAE,OAAO,CAAC;IACf,KAAK,EAAE,MAAM,EAAE,CAAC;IAChB,YAAY,EAAE,MAAM,CAAC;CACtB,CAAC,CAyBD"}
|