@usewhisper/mcp-server 0.1.0 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (70) hide show
  1. package/README.md +26 -24
  2. package/dist/autosubscribe-6EDKPBE2.js +4068 -0
  3. package/dist/autosubscribe-GHO6YR5A.js +4068 -0
  4. package/dist/autosubscribe-ISDETQIB.js +436 -0
  5. package/dist/autosubscribe-ISDETQIB.js.map +1 -0
  6. package/dist/chunk-3WGYBAYR.js +8387 -0
  7. package/dist/chunk-52VJYCZ7.js +455 -0
  8. package/dist/chunk-5KBZQHDL.js +189 -0
  9. package/dist/chunk-5KIJNY6Z.js +370 -0
  10. package/dist/chunk-7SN3CKDK.js +1076 -0
  11. package/dist/chunk-B3VWOHUA.js +271 -0
  12. package/dist/chunk-C57DHKTL.js +459 -0
  13. package/dist/chunk-EI5CE3EY.js +616 -0
  14. package/dist/chunk-FTWUJBAH.js +387 -0
  15. package/dist/chunk-FTWUJBAH.js.map +1 -0
  16. package/dist/chunk-H3HSKH2P.js +4841 -0
  17. package/dist/chunk-JO3ORBZD.js +616 -0
  18. package/dist/chunk-L6DXSM2U.js +457 -0
  19. package/dist/chunk-L6DXSM2U.js.map +1 -0
  20. package/dist/chunk-LMEYV4JD.js +368 -0
  21. package/dist/chunk-MEFLJ4PV.js +8385 -0
  22. package/dist/chunk-OBLI4FE4.js +276 -0
  23. package/dist/chunk-OBLI4FE4.js.map +1 -0
  24. package/dist/chunk-PPGYJJED.js +271 -0
  25. package/dist/chunk-QGM4M3NI.js +37 -0
  26. package/dist/chunk-T7KMSTWP.js +399 -0
  27. package/dist/chunk-TWEIYHI6.js +399 -0
  28. package/dist/chunk-UYWE7HSU.js +369 -0
  29. package/dist/chunk-UYWE7HSU.js.map +1 -0
  30. package/dist/chunk-X2DL2GWT.js +33 -0
  31. package/dist/chunk-X2DL2GWT.js.map +1 -0
  32. package/dist/chunk-X7HNNNJJ.js +1079 -0
  33. package/dist/consolidation-2GCKI4RE.js +220 -0
  34. package/dist/consolidation-4JOPW6BG.js +220 -0
  35. package/dist/consolidation-FOVQTWNQ.js +222 -0
  36. package/dist/consolidation-IFQ52E44.js +210 -0
  37. package/dist/consolidation-IFQ52E44.js.map +1 -0
  38. package/dist/context-sharing-4ITCNKG4.js +307 -0
  39. package/dist/context-sharing-6CCFIAKL.js +276 -0
  40. package/dist/context-sharing-6CCFIAKL.js.map +1 -0
  41. package/dist/context-sharing-GYKLXHZA.js +307 -0
  42. package/dist/context-sharing-PH64JTXS.js +308 -0
  43. package/dist/context-sharing-Y6LTZZOF.js +307 -0
  44. package/dist/cost-optimization-6OIKRSBV.js +196 -0
  45. package/dist/cost-optimization-6OIKRSBV.js.map +1 -0
  46. package/dist/cost-optimization-7DVSTL6R.js +307 -0
  47. package/dist/cost-optimization-BH5NAX33.js +287 -0
  48. package/dist/cost-optimization-BH5NAX33.js.map +1 -0
  49. package/dist/cost-optimization-F3L5BS5F.js +303 -0
  50. package/dist/ingest-2LPTWUUM.js +16 -0
  51. package/dist/ingest-7T5FAZNC.js +15 -0
  52. package/dist/ingest-EBNIE7XB.js +15 -0
  53. package/dist/ingest-FSHT5BCS.js +15 -0
  54. package/dist/ingest-QE2BTV72.js +15 -0
  55. package/dist/ingest-QE2BTV72.js.map +1 -0
  56. package/dist/oracle-3RLQF3DP.js +259 -0
  57. package/dist/oracle-FKRTQUUG.js +282 -0
  58. package/dist/oracle-J47QCSEW.js +263 -0
  59. package/dist/oracle-MDP5MZRC.js +257 -0
  60. package/dist/oracle-MDP5MZRC.js.map +1 -0
  61. package/dist/search-BLVHWLWC.js +14 -0
  62. package/dist/search-CZ5NYL5B.js +13 -0
  63. package/dist/search-CZ5NYL5B.js.map +1 -0
  64. package/dist/search-EG6TYWWW.js +13 -0
  65. package/dist/search-I22QQA7T.js +13 -0
  66. package/dist/search-T7H5G6DW.js +13 -0
  67. package/dist/server.d.ts +2 -0
  68. package/dist/server.js +914 -1503
  69. package/dist/server.js.map +1 -1
  70. package/package.json +6 -7
@@ -0,0 +1,220 @@
1
+ import {
2
+ db,
3
+ embedSingle
4
+ } from "./chunk-MEFLJ4PV.js";
5
+ import "./chunk-QGM4M3NI.js";
6
+
7
+ // ../src/engine/memory/consolidation.ts
8
+ import OpenAI from "openai";
9
+ var openai = new OpenAI({
10
+ apiKey: process.env.OPENAI_API_KEY || ""
11
+ });
12
+ async function findDuplicateMemories(params) {
13
+ const {
14
+ projectId,
15
+ userId,
16
+ similarityThreshold = 0.95,
17
+ limit = 50
18
+ } = params;
19
+ const maxMemories = Math.min(Math.max(limit, 10), 100);
20
+ const memories = await db.memory.findMany({
21
+ where: {
22
+ projectId,
23
+ userId,
24
+ isActive: true,
25
+ validUntil: null
26
+ },
27
+ orderBy: { importance: "desc" },
28
+ take: maxMemories
29
+ });
30
+ const clusters = [];
31
+ const processed = /* @__PURE__ */ new Set();
32
+ for (let i = 0; i < memories.length; i++) {
33
+ const memory = memories[i];
34
+ if (processed.has(memory.id)) continue;
35
+ const similar = [];
36
+ const candidates = memories.slice(i + 1);
37
+ const batchSimilarities = await calculateBatchSimilarity(memory.id, candidates.map((c) => c.id));
38
+ for (let j = 0; j < candidates.length; j++) {
39
+ const other = candidates[j];
40
+ if (processed.has(other.id)) continue;
41
+ const similarity = batchSimilarities[j];
42
+ if (similarity >= similarityThreshold) {
43
+ similar.push({ ...other, similarity });
44
+ processed.add(other.id);
45
+ }
46
+ }
47
+ if (similar.length > 0) {
48
+ clusters.push({
49
+ representative: memory,
50
+ duplicates: similar,
51
+ similarity: similar.reduce((sum, m) => sum + m.similarity, 0) / similar.length
52
+ });
53
+ processed.add(memory.id);
54
+ }
55
+ }
56
+ return clusters;
57
+ }
58
+ async function calculateBatchSimilarity(memoryId, otherIds) {
59
+ if (otherIds.length === 0) return [];
60
+ const placeholders = otherIds.map((_, i) => `(m1.embedding <=> $${i + 2}::vector)`).join(" + ");
61
+ const conditions = otherIds.map((id, i) => `m2.id = $${i + 2}`).join(" OR ");
62
+ const result = await db.$queryRaw`
63
+ SELECT
64
+ 1 - (m1.embedding <=> m2.embedding) as similarity,
65
+ m2.id as id
66
+ FROM memories m1, memories m2
67
+ WHERE m1.id = ${memoryId} AND (${conditions})
68
+ `;
69
+ const similarityMap = new Map(result.map((r) => [r.id, r.similarity]));
70
+ return otherIds.map((id) => similarityMap.get(id) || 0);
71
+ }
72
+ async function mergeDuplicateMemories(cluster) {
73
+ const memories = [cluster.representative, ...cluster.duplicates];
74
+ const prompt = `You are merging duplicate memories into a single, comprehensive memory.
75
+
76
+ **Memories to merge:**
77
+ ${memories.map(
78
+ (m, i) => `${i + 1}. "${m.content}" (confidence: ${m.confidence}, date: ${m.documentDate?.toISOString() || "unknown"})`
79
+ ).join("\n")}
80
+
81
+ **Instructions:**
82
+ 1. Combine all unique information from these memories
83
+ 2. Resolve any contradictions by keeping the most recent or most confident information
84
+ 3. Extract all unique entity mentions
85
+ 4. Use the highest confidence score
86
+ 5. Keep the most recent document date
87
+
88
+ Return JSON:
89
+ {
90
+ "merged_content": "comprehensive merged memory",
91
+ "entity_mentions": ["list", "of", "entities"],
92
+ "confidence": 0.0-1.0,
93
+ "reasoning": "brief explanation of how you merged"
94
+ }`;
95
+ const response = await openai.chat.completions.create({
96
+ model: "gpt-4o",
97
+ max_tokens: 2048,
98
+ temperature: 0,
99
+ messages: [{ role: "user", content: prompt }],
100
+ response_format: { type: "json_object" }
101
+ });
102
+ const text = response.choices[0]?.message?.content?.trim();
103
+ if (!text) {
104
+ throw new Error("Failed to merge memories");
105
+ }
106
+ const jsonMatch = text.match(/```json\n?([\s\S]*?)\n?```/) || text.match(/\{[\s\S]*\}/);
107
+ const jsonStr = jsonMatch ? jsonMatch[1] || jsonMatch[0] : text;
108
+ const result = JSON.parse(jsonStr);
109
+ const embedding = await embedSingle(result.merged_content);
110
+ const mergedMemory = await db.memory.create({
111
+ data: {
112
+ projectId: cluster.representative.projectId,
113
+ orgId: cluster.representative.orgId,
114
+ userId: cluster.representative.userId,
115
+ sessionId: cluster.representative.sessionId,
116
+ memoryType: cluster.representative.memoryType,
117
+ content: result.merged_content,
118
+ embedding,
119
+ entityMentions: result.entity_mentions || [],
120
+ confidence: result.confidence || cluster.representative.confidence,
121
+ documentDate: cluster.representative.documentDate,
122
+ eventDate: cluster.representative.eventDate,
123
+ validFrom: /* @__PURE__ */ new Date(),
124
+ importance: Math.max(...memories.map((m) => m.importance || 0.5)),
125
+ metadata: {
126
+ mergedFrom: memories.map((m) => m.id),
127
+ mergeReasoning: result.reasoning,
128
+ mergedAt: (/* @__PURE__ */ new Date()).toISOString()
129
+ }
130
+ }
131
+ });
132
+ for (const memory of memories) {
133
+ await db.memory.update({
134
+ where: { id: memory.id },
135
+ data: {
136
+ isActive: false,
137
+ validUntil: /* @__PURE__ */ new Date(),
138
+ supersededBy: mergedMemory.id
139
+ }
140
+ });
141
+ }
142
+ return mergedMemory.id;
143
+ }
144
+ async function consolidateMemories(params) {
145
+ const { projectId, userId, similarityThreshold = 0.95, dryRun = false } = params;
146
+ console.log(`\u{1F50D} Finding duplicate memories in project ${projectId}...`);
147
+ const clusters = await findDuplicateMemories({
148
+ projectId,
149
+ userId,
150
+ similarityThreshold
151
+ });
152
+ console.log(`\u{1F4CA} Found ${clusters.length} memory clusters`);
153
+ if (dryRun) {
154
+ for (const cluster of clusters) {
155
+ console.log(`
156
+ Cluster (similarity: ${cluster.similarity.toFixed(2)}):`);
157
+ console.log(` Representative: "${cluster.representative.content}"`);
158
+ console.log(` Duplicates: ${cluster.duplicates.length}`);
159
+ cluster.duplicates.forEach((d) => {
160
+ console.log(` - "${d.content}"`);
161
+ });
162
+ }
163
+ return {
164
+ clustersFound: clusters.length,
165
+ memoriesMerged: 0,
166
+ memoriesDeactivated: 0
167
+ };
168
+ }
169
+ let memoriesMerged = 0;
170
+ let memoriesDeactivated = 0;
171
+ for (const cluster of clusters) {
172
+ try {
173
+ console.log(`\u{1F517} Merging cluster with ${cluster.duplicates.length + 1} memories...`);
174
+ await mergeDuplicateMemories(cluster);
175
+ memoriesMerged++;
176
+ memoriesDeactivated += cluster.duplicates.length + 1;
177
+ console.log(`\u2705 Merged successfully`);
178
+ } catch (error) {
179
+ console.error(`\u274C Failed to merge cluster:`, error);
180
+ }
181
+ }
182
+ console.log(
183
+ `
184
+ \u2705 Consolidation complete: ${memoriesMerged} clusters merged, ${memoriesDeactivated} memories deactivated`
185
+ );
186
+ return {
187
+ clustersFound: clusters.length,
188
+ memoriesMerged,
189
+ memoriesDeactivated
190
+ };
191
+ }
192
+ async function scheduledConsolidation(orgId) {
193
+ console.log(`\u{1F504} Running scheduled consolidation for org ${orgId}...`);
194
+ const projects = await db.project.findMany({
195
+ where: { orgId }
196
+ });
197
+ for (const project of projects) {
198
+ try {
199
+ const result = await consolidateMemories({
200
+ projectId: project.id,
201
+ similarityThreshold: 0.92
202
+ // Slightly lower for scheduled runs
203
+ });
204
+ if (result.memoriesMerged > 0) {
205
+ console.log(
206
+ `\u{1F4CA} Project ${project.name}: merged ${result.memoriesMerged} clusters`
207
+ );
208
+ }
209
+ } catch (error) {
210
+ console.error(`Failed to consolidate project ${project.name}:`, error);
211
+ }
212
+ }
213
+ console.log("\u2705 Scheduled consolidation complete");
214
+ }
215
+ export {
216
+ consolidateMemories,
217
+ findDuplicateMemories,
218
+ mergeDuplicateMemories,
219
+ scheduledConsolidation
220
+ };
@@ -0,0 +1,220 @@
1
+ import {
2
+ db,
3
+ embedSingle
4
+ } from "./chunk-3WGYBAYR.js";
5
+ import "./chunk-QGM4M3NI.js";
6
+
7
+ // ../src/engine/memory/consolidation.ts
8
+ import OpenAI from "openai";
9
+ var openai = new OpenAI({
10
+ apiKey: process.env.OPENAI_API_KEY || ""
11
+ });
12
+ async function findDuplicateMemories(params) {
13
+ const {
14
+ projectId,
15
+ userId,
16
+ similarityThreshold = 0.95,
17
+ limit = 50
18
+ } = params;
19
+ const maxMemories = Math.min(Math.max(limit, 10), 100);
20
+ const memories = await db.memory.findMany({
21
+ where: {
22
+ projectId,
23
+ userId,
24
+ isActive: true,
25
+ validUntil: null
26
+ },
27
+ orderBy: { importance: "desc" },
28
+ take: maxMemories
29
+ });
30
+ const clusters = [];
31
+ const processed = /* @__PURE__ */ new Set();
32
+ for (let i = 0; i < memories.length; i++) {
33
+ const memory = memories[i];
34
+ if (processed.has(memory.id)) continue;
35
+ const similar = [];
36
+ const candidates = memories.slice(i + 1);
37
+ const batchSimilarities = await calculateBatchSimilarity(memory.id, candidates.map((c) => c.id));
38
+ for (let j = 0; j < candidates.length; j++) {
39
+ const other = candidates[j];
40
+ if (processed.has(other.id)) continue;
41
+ const similarity = batchSimilarities[j];
42
+ if (similarity >= similarityThreshold) {
43
+ similar.push({ ...other, similarity });
44
+ processed.add(other.id);
45
+ }
46
+ }
47
+ if (similar.length > 0) {
48
+ clusters.push({
49
+ representative: memory,
50
+ duplicates: similar,
51
+ similarity: similar.reduce((sum, m) => sum + m.similarity, 0) / similar.length
52
+ });
53
+ processed.add(memory.id);
54
+ }
55
+ }
56
+ return clusters;
57
+ }
58
+ async function calculateBatchSimilarity(memoryId, otherIds) {
59
+ if (otherIds.length === 0) return [];
60
+ const placeholders = otherIds.map((_, i) => `(m1.embedding <=> $${i + 2}::vector)`).join(" + ");
61
+ const conditions = otherIds.map((id, i) => `m2.id = $${i + 2}`).join(" OR ");
62
+ const result = await db.$queryRaw`
63
+ SELECT
64
+ 1 - (m1.embedding <=> m2.embedding) as similarity,
65
+ m2.id as id
66
+ FROM memories m1, memories m2
67
+ WHERE m1.id = ${memoryId} AND (${conditions})
68
+ `;
69
+ const similarityMap = new Map(result.map((r) => [r.id, r.similarity]));
70
+ return otherIds.map((id) => similarityMap.get(id) || 0);
71
+ }
72
+ async function mergeDuplicateMemories(cluster) {
73
+ const memories = [cluster.representative, ...cluster.duplicates];
74
+ const prompt = `You are merging duplicate memories into a single, comprehensive memory.
75
+
76
+ **Memories to merge:**
77
+ ${memories.map(
78
+ (m, i) => `${i + 1}. "${m.content}" (confidence: ${m.confidence}, date: ${m.documentDate?.toISOString() || "unknown"})`
79
+ ).join("\n")}
80
+
81
+ **Instructions:**
82
+ 1. Combine all unique information from these memories
83
+ 2. Resolve any contradictions by keeping the most recent or most confident information
84
+ 3. Extract all unique entity mentions
85
+ 4. Use the highest confidence score
86
+ 5. Keep the most recent document date
87
+
88
+ Return JSON:
89
+ {
90
+ "merged_content": "comprehensive merged memory",
91
+ "entity_mentions": ["list", "of", "entities"],
92
+ "confidence": 0.0-1.0,
93
+ "reasoning": "brief explanation of how you merged"
94
+ }`;
95
+ const response = await openai.chat.completions.create({
96
+ model: "gpt-4o",
97
+ max_tokens: 2048,
98
+ temperature: 0,
99
+ messages: [{ role: "user", content: prompt }],
100
+ response_format: { type: "json_object" }
101
+ });
102
+ const text = response.choices[0]?.message?.content?.trim();
103
+ if (!text) {
104
+ throw new Error("Failed to merge memories");
105
+ }
106
+ const jsonMatch = text.match(/```json\n?([\s\S]*?)\n?```/) || text.match(/\{[\s\S]*\}/);
107
+ const jsonStr = jsonMatch ? jsonMatch[1] || jsonMatch[0] : text;
108
+ const result = JSON.parse(jsonStr);
109
+ const embedding = await embedSingle(result.merged_content);
110
+ const mergedMemory = await db.memory.create({
111
+ data: {
112
+ projectId: cluster.representative.projectId,
113
+ orgId: cluster.representative.orgId,
114
+ userId: cluster.representative.userId,
115
+ sessionId: cluster.representative.sessionId,
116
+ memoryType: cluster.representative.memoryType,
117
+ content: result.merged_content,
118
+ embedding,
119
+ entityMentions: result.entity_mentions || [],
120
+ confidence: result.confidence || cluster.representative.confidence,
121
+ documentDate: cluster.representative.documentDate,
122
+ eventDate: cluster.representative.eventDate,
123
+ validFrom: /* @__PURE__ */ new Date(),
124
+ importance: Math.max(...memories.map((m) => m.importance || 0.5)),
125
+ metadata: {
126
+ mergedFrom: memories.map((m) => m.id),
127
+ mergeReasoning: result.reasoning,
128
+ mergedAt: (/* @__PURE__ */ new Date()).toISOString()
129
+ }
130
+ }
131
+ });
132
+ for (const memory of memories) {
133
+ await db.memory.update({
134
+ where: { id: memory.id },
135
+ data: {
136
+ isActive: false,
137
+ validUntil: /* @__PURE__ */ new Date(),
138
+ supersededBy: mergedMemory.id
139
+ }
140
+ });
141
+ }
142
+ return mergedMemory.id;
143
+ }
144
+ async function consolidateMemories(params) {
145
+ const { projectId, userId, similarityThreshold = 0.95, dryRun = false } = params;
146
+ console.log(`\u{1F50D} Finding duplicate memories in project ${projectId}...`);
147
+ const clusters = await findDuplicateMemories({
148
+ projectId,
149
+ userId,
150
+ similarityThreshold
151
+ });
152
+ console.log(`\u{1F4CA} Found ${clusters.length} memory clusters`);
153
+ if (dryRun) {
154
+ for (const cluster of clusters) {
155
+ console.log(`
156
+ Cluster (similarity: ${cluster.similarity.toFixed(2)}):`);
157
+ console.log(` Representative: "${cluster.representative.content}"`);
158
+ console.log(` Duplicates: ${cluster.duplicates.length}`);
159
+ cluster.duplicates.forEach((d) => {
160
+ console.log(` - "${d.content}"`);
161
+ });
162
+ }
163
+ return {
164
+ clustersFound: clusters.length,
165
+ memoriesMerged: 0,
166
+ memoriesDeactivated: 0
167
+ };
168
+ }
169
+ let memoriesMerged = 0;
170
+ let memoriesDeactivated = 0;
171
+ for (const cluster of clusters) {
172
+ try {
173
+ console.log(`\u{1F517} Merging cluster with ${cluster.duplicates.length + 1} memories...`);
174
+ await mergeDuplicateMemories(cluster);
175
+ memoriesMerged++;
176
+ memoriesDeactivated += cluster.duplicates.length + 1;
177
+ console.log(`\u2705 Merged successfully`);
178
+ } catch (error) {
179
+ console.error(`\u274C Failed to merge cluster:`, error);
180
+ }
181
+ }
182
+ console.log(
183
+ `
184
+ \u2705 Consolidation complete: ${memoriesMerged} clusters merged, ${memoriesDeactivated} memories deactivated`
185
+ );
186
+ return {
187
+ clustersFound: clusters.length,
188
+ memoriesMerged,
189
+ memoriesDeactivated
190
+ };
191
+ }
192
+ async function scheduledConsolidation(orgId) {
193
+ console.log(`\u{1F504} Running scheduled consolidation for org ${orgId}...`);
194
+ const projects = await db.project.findMany({
195
+ where: { orgId }
196
+ });
197
+ for (const project of projects) {
198
+ try {
199
+ const result = await consolidateMemories({
200
+ projectId: project.id,
201
+ similarityThreshold: 0.92
202
+ // Slightly lower for scheduled runs
203
+ });
204
+ if (result.memoriesMerged > 0) {
205
+ console.log(
206
+ `\u{1F4CA} Project ${project.name}: merged ${result.memoriesMerged} clusters`
207
+ );
208
+ }
209
+ } catch (error) {
210
+ console.error(`Failed to consolidate project ${project.name}:`, error);
211
+ }
212
+ }
213
+ console.log("\u2705 Scheduled consolidation complete");
214
+ }
215
+ export {
216
+ consolidateMemories,
217
+ findDuplicateMemories,
218
+ mergeDuplicateMemories,
219
+ scheduledConsolidation
220
+ };
@@ -0,0 +1,222 @@
1
+ import {
2
+ db,
3
+ embedSingle
4
+ } from "./chunk-3WGYBAYR.js";
5
+ import {
6
+ Anthropic
7
+ } from "./chunk-H3HSKH2P.js";
8
+ import "./chunk-QGM4M3NI.js";
9
+
10
+ // ../src/engine/memory/consolidation.ts
11
+ var anthropic = new Anthropic({
12
+ apiKey: process.env.ANTHROPIC_API_KEY || ""
13
+ });
14
+ async function findDuplicateMemories(params) {
15
+ const {
16
+ projectId,
17
+ userId,
18
+ similarityThreshold = 0.95,
19
+ limit = 50
20
+ } = params;
21
+ const maxMemories = Math.min(Math.max(limit, 10), 100);
22
+ const memories = await db.memory.findMany({
23
+ where: {
24
+ projectId,
25
+ userId,
26
+ isActive: true,
27
+ validUntil: null
28
+ },
29
+ orderBy: { importance: "desc" },
30
+ take: maxMemories
31
+ });
32
+ const clusters = [];
33
+ const processed = /* @__PURE__ */ new Set();
34
+ for (let i = 0; i < memories.length; i++) {
35
+ const memory = memories[i];
36
+ if (processed.has(memory.id)) continue;
37
+ const similar = [];
38
+ const candidates = memories.slice(i + 1);
39
+ const batchSimilarities = await calculateBatchSimilarity(memory.id, candidates.map((c) => c.id));
40
+ for (let j = 0; j < candidates.length; j++) {
41
+ const other = candidates[j];
42
+ if (processed.has(other.id)) continue;
43
+ const similarity = batchSimilarities[j];
44
+ if (similarity >= similarityThreshold) {
45
+ similar.push({ ...other, similarity });
46
+ processed.add(other.id);
47
+ }
48
+ }
49
+ if (similar.length > 0) {
50
+ clusters.push({
51
+ representative: memory,
52
+ duplicates: similar,
53
+ similarity: similar.reduce((sum, m) => sum + m.similarity, 0) / similar.length
54
+ });
55
+ processed.add(memory.id);
56
+ }
57
+ }
58
+ return clusters;
59
+ }
60
+ async function calculateBatchSimilarity(memoryId, otherIds) {
61
+ if (otherIds.length === 0) return [];
62
+ const placeholders = otherIds.map((_, i) => `(m1.embedding <=> $${i + 2}::vector)`).join(" + ");
63
+ const conditions = otherIds.map((id, i) => `m2.id = $${i + 2}`).join(" OR ");
64
+ const result = await db.$queryRaw`
65
+ SELECT
66
+ 1 - (m1.embedding <=> m2.embedding) as similarity,
67
+ m2.id as id
68
+ FROM memories m1, memories m2
69
+ WHERE m1.id = ${memoryId} AND (${conditions})
70
+ `;
71
+ const similarityMap = new Map(result.map((r) => [r.id, r.similarity]));
72
+ return otherIds.map((id) => similarityMap.get(id) || 0);
73
+ }
74
+ async function mergeDuplicateMemories(cluster) {
75
+ const memories = [cluster.representative, ...cluster.duplicates];
76
+ const prompt = `You are merging duplicate memories into a single, comprehensive memory.
77
+
78
+ **Memories to merge:**
79
+ ${memories.map(
80
+ (m, i) => `${i + 1}. "${m.content}" (confidence: ${m.confidence}, date: ${m.documentDate?.toISOString() || "unknown"})`
81
+ ).join("\n")}
82
+
83
+ **Instructions:**
84
+ 1. Combine all unique information from these memories
85
+ 2. Resolve any contradictions by keeping the most recent or most confident information
86
+ 3. Extract all unique entity mentions
87
+ 4. Use the highest confidence score
88
+ 5. Keep the most recent document date
89
+
90
+ Return JSON:
91
+ {
92
+ "merged_content": "comprehensive merged memory",
93
+ "entity_mentions": ["list", "of", "entities"],
94
+ "confidence": 0.0-1.0,
95
+ "reasoning": "brief explanation of how you merged"
96
+ }`;
97
+ const response = await anthropic.messages.create({
98
+ model: "claude-sonnet-4-5-20250929",
99
+ // Fixed: was "claude-sonnet-4.5" (wrong format)
100
+ max_tokens: 2048,
101
+ temperature: 0,
102
+ messages: [{ role: "user", content: prompt }]
103
+ });
104
+ const text = response.content.find((c) => c.type === "text");
105
+ if (!text || text.type !== "text") {
106
+ throw new Error("Failed to merge memories");
107
+ }
108
+ const jsonMatch = text.text.match(/```json\n?([\s\S]*?)\n?```/) || text.text.match(/\{[\s\S]*\}/);
109
+ const jsonStr = jsonMatch ? jsonMatch[1] || jsonMatch[0] : text.text;
110
+ const result = JSON.parse(jsonStr);
111
+ const embedding = await embedSingle(result.merged_content);
112
+ const mergedMemory = await db.memory.create({
113
+ data: {
114
+ projectId: cluster.representative.projectId,
115
+ orgId: cluster.representative.orgId,
116
+ userId: cluster.representative.userId,
117
+ sessionId: cluster.representative.sessionId,
118
+ memoryType: cluster.representative.memoryType,
119
+ content: result.merged_content,
120
+ embedding,
121
+ entityMentions: result.entity_mentions || [],
122
+ confidence: result.confidence || cluster.representative.confidence,
123
+ documentDate: cluster.representative.documentDate,
124
+ eventDate: cluster.representative.eventDate,
125
+ validFrom: /* @__PURE__ */ new Date(),
126
+ importance: Math.max(...memories.map((m) => m.importance || 0.5)),
127
+ metadata: {
128
+ mergedFrom: memories.map((m) => m.id),
129
+ mergeReasoning: result.reasoning,
130
+ mergedAt: (/* @__PURE__ */ new Date()).toISOString()
131
+ }
132
+ }
133
+ });
134
+ for (const memory of memories) {
135
+ await db.memory.update({
136
+ where: { id: memory.id },
137
+ data: {
138
+ isActive: false,
139
+ validUntil: /* @__PURE__ */ new Date(),
140
+ supersededBy: mergedMemory.id
141
+ }
142
+ });
143
+ }
144
+ return mergedMemory.id;
145
+ }
146
+ async function consolidateMemories(params) {
147
+ const { projectId, userId, similarityThreshold = 0.95, dryRun = false } = params;
148
+ console.log(`\u{1F50D} Finding duplicate memories in project ${projectId}...`);
149
+ const clusters = await findDuplicateMemories({
150
+ projectId,
151
+ userId,
152
+ similarityThreshold
153
+ });
154
+ console.log(`\u{1F4CA} Found ${clusters.length} memory clusters`);
155
+ if (dryRun) {
156
+ for (const cluster of clusters) {
157
+ console.log(`
158
+ Cluster (similarity: ${cluster.similarity.toFixed(2)}):`);
159
+ console.log(` Representative: "${cluster.representative.content}"`);
160
+ console.log(` Duplicates: ${cluster.duplicates.length}`);
161
+ cluster.duplicates.forEach((d) => {
162
+ console.log(` - "${d.content}"`);
163
+ });
164
+ }
165
+ return {
166
+ clustersFound: clusters.length,
167
+ memoriesMerged: 0,
168
+ memoriesDeactivated: 0
169
+ };
170
+ }
171
+ let memoriesMerged = 0;
172
+ let memoriesDeactivated = 0;
173
+ for (const cluster of clusters) {
174
+ try {
175
+ console.log(`\u{1F517} Merging cluster with ${cluster.duplicates.length + 1} memories...`);
176
+ await mergeDuplicateMemories(cluster);
177
+ memoriesMerged++;
178
+ memoriesDeactivated += cluster.duplicates.length + 1;
179
+ console.log(`\u2705 Merged successfully`);
180
+ } catch (error) {
181
+ console.error(`\u274C Failed to merge cluster:`, error);
182
+ }
183
+ }
184
+ console.log(
185
+ `
186
+ \u2705 Consolidation complete: ${memoriesMerged} clusters merged, ${memoriesDeactivated} memories deactivated`
187
+ );
188
+ return {
189
+ clustersFound: clusters.length,
190
+ memoriesMerged,
191
+ memoriesDeactivated
192
+ };
193
+ }
194
+ async function scheduledConsolidation(orgId) {
195
+ console.log(`\u{1F504} Running scheduled consolidation for org ${orgId}...`);
196
+ const projects = await db.project.findMany({
197
+ where: { orgId }
198
+ });
199
+ for (const project of projects) {
200
+ try {
201
+ const result = await consolidateMemories({
202
+ projectId: project.id,
203
+ similarityThreshold: 0.92
204
+ // Slightly lower for scheduled runs
205
+ });
206
+ if (result.memoriesMerged > 0) {
207
+ console.log(
208
+ `\u{1F4CA} Project ${project.name}: merged ${result.memoriesMerged} clusters`
209
+ );
210
+ }
211
+ } catch (error) {
212
+ console.error(`Failed to consolidate project ${project.name}:`, error);
213
+ }
214
+ }
215
+ console.log("\u2705 Scheduled consolidation complete");
216
+ }
217
+ export {
218
+ consolidateMemories,
219
+ findDuplicateMemories,
220
+ mergeDuplicateMemories,
221
+ scheduledConsolidation
222
+ };