@usewhisper/mcp-server 0.3.0 → 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. package/README.md +182 -154
  2. package/dist/autosubscribe-6EDKPBE2.js +4068 -4068
  3. package/dist/autosubscribe-GHO6YR5A.js +4068 -4068
  4. package/dist/autosubscribe-ISDETQIB.js +435 -435
  5. package/dist/chunk-3WGYBAYR.js +8387 -8387
  6. package/dist/chunk-52VJYCZ7.js +455 -455
  7. package/dist/chunk-5KBZQHDL.js +189 -189
  8. package/dist/chunk-5KIJNY6Z.js +370 -370
  9. package/dist/chunk-7SN3CKDK.js +1076 -1076
  10. package/dist/chunk-B3VWOHUA.js +271 -271
  11. package/dist/chunk-C57DHKTL.js +459 -459
  12. package/dist/chunk-EI5CE3EY.js +616 -616
  13. package/dist/chunk-FTWUJBAH.js +386 -386
  14. package/dist/chunk-H3HSKH2P.js +4841 -4841
  15. package/dist/chunk-JO3ORBZD.js +616 -616
  16. package/dist/chunk-L6DXSM2U.js +456 -456
  17. package/dist/chunk-LMEYV4JD.js +368 -368
  18. package/dist/chunk-MEFLJ4PV.js +8385 -8385
  19. package/dist/chunk-OBLI4FE4.js +275 -275
  20. package/dist/chunk-PPGYJJED.js +271 -271
  21. package/dist/chunk-QGM4M3NI.js +37 -37
  22. package/dist/chunk-T7KMSTWP.js +399 -399
  23. package/dist/chunk-TWEIYHI6.js +399 -399
  24. package/dist/chunk-UYWE7HSU.js +368 -368
  25. package/dist/chunk-X2DL2GWT.js +32 -32
  26. package/dist/chunk-X7HNNNJJ.js +1079 -1079
  27. package/dist/consolidation-2GCKI4RE.js +220 -220
  28. package/dist/consolidation-4JOPW6BG.js +220 -220
  29. package/dist/consolidation-FOVQTWNQ.js +222 -222
  30. package/dist/consolidation-IFQ52E44.js +209 -209
  31. package/dist/context-sharing-4ITCNKG4.js +307 -307
  32. package/dist/context-sharing-6CCFIAKL.js +275 -275
  33. package/dist/context-sharing-GYKLXHZA.js +307 -307
  34. package/dist/context-sharing-PH64JTXS.js +308 -308
  35. package/dist/context-sharing-Y6LTZZOF.js +307 -307
  36. package/dist/cost-optimization-6OIKRSBV.js +195 -195
  37. package/dist/cost-optimization-7DVSTL6R.js +307 -307
  38. package/dist/cost-optimization-BH5NAX33.js +286 -286
  39. package/dist/cost-optimization-F3L5BS5F.js +303 -303
  40. package/dist/ingest-2LPTWUUM.js +16 -16
  41. package/dist/ingest-7T5FAZNC.js +15 -15
  42. package/dist/ingest-EBNIE7XB.js +15 -15
  43. package/dist/ingest-FSHT5BCS.js +15 -15
  44. package/dist/ingest-QE2BTV72.js +14 -14
  45. package/dist/oracle-3RLQF3DP.js +259 -259
  46. package/dist/oracle-FKRTQUUG.js +282 -282
  47. package/dist/oracle-J47QCSEW.js +263 -263
  48. package/dist/oracle-MDP5MZRC.js +256 -256
  49. package/dist/search-BLVHWLWC.js +14 -14
  50. package/dist/search-CZ5NYL5B.js +12 -12
  51. package/dist/search-EG6TYWWW.js +13 -13
  52. package/dist/search-I22QQA7T.js +13 -13
  53. package/dist/search-T7H5G6DW.js +13 -13
  54. package/dist/server.d.ts +2 -2
  55. package/dist/server.js +1973 -169
  56. package/dist/server.js.map +1 -1
  57. package/package.json +51 -51
@@ -1,210 +1,210 @@
1
- import {
2
- db,
3
- embedSingle
4
- } from "./chunk-X2DL2GWT.js";
5
-
6
- // src/engine/memory/consolidation.ts
7
- import Anthropic from "@anthropic-ai/sdk";
8
- var anthropic = new Anthropic({
9
- apiKey: process.env.ANTHROPIC_API_KEY || ""
10
- });
11
- async function findDuplicateMemories(params) {
12
- const { projectId, userId, similarityThreshold = 0.95, limit = 100 } = params;
13
- const memories = await db.memory.findMany({
14
- where: {
15
- projectId,
16
- userId,
17
- isActive: true,
18
- validUntil: null
19
- // Only current versions
20
- },
21
- orderBy: {
22
- importance: "desc"
23
- },
24
- take: limit * 2
25
- // Get more to find duplicates
26
- });
27
- const clusters = [];
28
- const processed = /* @__PURE__ */ new Set();
29
- for (let i = 0; i < memories.length; i++) {
30
- const memory = memories[i];
31
- if (processed.has(memory.id)) continue;
32
- const similar = [];
33
- for (let j = i + 1; j < memories.length; j++) {
34
- const other = memories[j];
35
- if (processed.has(other.id)) continue;
36
- const similarity = await calculateSimilarity(memory.id, other.id);
37
- if (similarity >= similarityThreshold) {
38
- similar.push({ ...other, similarity });
39
- processed.add(other.id);
40
- }
41
- }
42
- if (similar.length > 0) {
43
- clusters.push({
44
- representative: memory,
45
- duplicates: similar,
46
- similarity: similar.reduce((sum, m) => sum + m.similarity, 0) / similar.length
47
- });
48
- processed.add(memory.id);
49
- }
50
- }
51
- return clusters;
52
- }
53
- async function calculateSimilarity(memoryId1, memoryId2) {
54
- const result = await db.$queryRaw`
55
- SELECT
56
- 1 - (m1.embedding <=> m2.embedding) as similarity
57
- FROM memories m1, memories m2
58
- WHERE m1.id = ${memoryId1} AND m2.id = ${memoryId2}
59
- `;
60
- return result[0]?.similarity || 0;
61
- }
62
- async function mergeDuplicateMemories(cluster) {
63
- const memories = [cluster.representative, ...cluster.duplicates];
64
- const prompt = `You are merging duplicate memories into a single, comprehensive memory.
65
-
66
- **Memories to merge:**
67
- ${memories.map(
68
- (m, i) => `${i + 1}. "${m.content}" (confidence: ${m.confidence}, date: ${m.documentDate?.toISOString() || "unknown"})`
69
- ).join("\n")}
70
-
71
- **Instructions:**
72
- 1. Combine all unique information from these memories
73
- 2. Resolve any contradictions by keeping the most recent or most confident information
74
- 3. Extract all unique entity mentions
75
- 4. Use the highest confidence score
76
- 5. Keep the most recent document date
77
-
78
- Return JSON:
79
- {
80
- "merged_content": "comprehensive merged memory",
81
- "entity_mentions": ["list", "of", "entities"],
82
- "confidence": 0.0-1.0,
83
- "reasoning": "brief explanation of how you merged"
84
- }`;
85
- const response = await anthropic.messages.create({
86
- model: "claude-sonnet-4.5",
87
- max_tokens: 2048,
88
- temperature: 0,
89
- messages: [{ role: "user", content: prompt }]
90
- });
91
- const text = response.content.find((c) => c.type === "text");
92
- if (!text || text.type !== "text") {
93
- throw new Error("Failed to merge memories");
94
- }
95
- const jsonMatch = text.text.match(/```json\n?([\s\S]*?)\n?```/) || text.text.match(/\{[\s\S]*\}/);
96
- const jsonStr = jsonMatch ? jsonMatch[1] || jsonMatch[0] : text.text;
97
- const result = JSON.parse(jsonStr);
98
- const embedding = await embedSingle(result.merged_content);
99
- const mergedMemory = await db.memory.create({
100
- data: {
101
- projectId: cluster.representative.projectId,
102
- orgId: cluster.representative.orgId,
103
- userId: cluster.representative.userId,
104
- sessionId: cluster.representative.sessionId,
105
- memoryType: cluster.representative.memoryType,
106
- content: result.merged_content,
107
- embedding,
108
- entityMentions: result.entity_mentions || [],
109
- confidence: result.confidence || cluster.representative.confidence,
110
- documentDate: cluster.representative.documentDate,
111
- eventDate: cluster.representative.eventDate,
112
- validFrom: /* @__PURE__ */ new Date(),
113
- importance: Math.max(...memories.map((m) => m.importance || 0.5)),
114
- metadata: {
115
- mergedFrom: memories.map((m) => m.id),
116
- mergeReasoning: result.reasoning,
117
- mergedAt: (/* @__PURE__ */ new Date()).toISOString()
118
- }
119
- }
120
- });
121
- for (const memory of memories) {
122
- await db.memory.update({
123
- where: { id: memory.id },
124
- data: {
125
- isActive: false,
126
- validUntil: /* @__PURE__ */ new Date(),
127
- supersededBy: mergedMemory.id
128
- }
129
- });
130
- }
131
- return mergedMemory.id;
132
- }
133
- async function consolidateMemories(params) {
134
- const { projectId, userId, similarityThreshold = 0.95, dryRun = false } = params;
135
- console.log(`\u{1F50D} Finding duplicate memories in project ${projectId}...`);
136
- const clusters = await findDuplicateMemories({
137
- projectId,
138
- userId,
139
- similarityThreshold
140
- });
141
- console.log(`\u{1F4CA} Found ${clusters.length} memory clusters`);
142
- if (dryRun) {
143
- for (const cluster of clusters) {
144
- console.log(`
145
- Cluster (similarity: ${cluster.similarity.toFixed(2)}):`);
146
- console.log(` Representative: "${cluster.representative.content}"`);
147
- console.log(` Duplicates: ${cluster.duplicates.length}`);
148
- cluster.duplicates.forEach((d) => {
149
- console.log(` - "${d.content}"`);
150
- });
151
- }
152
- return {
153
- clustersFound: clusters.length,
154
- memoriesMerged: 0,
155
- memoriesDeactivated: 0
156
- };
157
- }
158
- let memoriesMerged = 0;
159
- let memoriesDeactivated = 0;
160
- for (const cluster of clusters) {
161
- try {
162
- console.log(`\u{1F517} Merging cluster with ${cluster.duplicates.length + 1} memories...`);
163
- await mergeDuplicateMemories(cluster);
164
- memoriesMerged++;
165
- memoriesDeactivated += cluster.duplicates.length + 1;
166
- console.log(`\u2705 Merged successfully`);
167
- } catch (error) {
168
- console.error(`\u274C Failed to merge cluster:`, error);
169
- }
170
- }
171
- console.log(
172
- `
173
- \u2705 Consolidation complete: ${memoriesMerged} clusters merged, ${memoriesDeactivated} memories deactivated`
174
- );
175
- return {
176
- clustersFound: clusters.length,
177
- memoriesMerged,
178
- memoriesDeactivated
179
- };
180
- }
181
- async function scheduledConsolidation(orgId) {
182
- console.log(`\u{1F504} Running scheduled consolidation for org ${orgId}...`);
183
- const projects = await db.project.findMany({
184
- where: { orgId }
185
- });
186
- for (const project of projects) {
187
- try {
188
- const result = await consolidateMemories({
189
- projectId: project.id,
190
- similarityThreshold: 0.92
191
- // Slightly lower for scheduled runs
192
- });
193
- if (result.memoriesMerged > 0) {
194
- console.log(
195
- `\u{1F4CA} Project ${project.name}: merged ${result.memoriesMerged} clusters`
196
- );
197
- }
198
- } catch (error) {
199
- console.error(`Failed to consolidate project ${project.name}:`, error);
200
- }
201
- }
202
- console.log("\u2705 Scheduled consolidation complete");
203
- }
204
- export {
205
- consolidateMemories,
206
- findDuplicateMemories,
207
- mergeDuplicateMemories,
208
- scheduledConsolidation
209
- };
1
+ import {
2
+ db,
3
+ embedSingle
4
+ } from "./chunk-X2DL2GWT.js";
5
+
6
+ // src/engine/memory/consolidation.ts
7
+ import Anthropic from "@anthropic-ai/sdk";
8
+ var anthropic = new Anthropic({
9
+ apiKey: process.env.ANTHROPIC_API_KEY || ""
10
+ });
11
+ async function findDuplicateMemories(params) {
12
+ const { projectId, userId, similarityThreshold = 0.95, limit = 100 } = params;
13
+ const memories = await db.memory.findMany({
14
+ where: {
15
+ projectId,
16
+ userId,
17
+ isActive: true,
18
+ validUntil: null
19
+ // Only current versions
20
+ },
21
+ orderBy: {
22
+ importance: "desc"
23
+ },
24
+ take: limit * 2
25
+ // Get more to find duplicates
26
+ });
27
+ const clusters = [];
28
+ const processed = /* @__PURE__ */ new Set();
29
+ for (let i = 0; i < memories.length; i++) {
30
+ const memory = memories[i];
31
+ if (processed.has(memory.id)) continue;
32
+ const similar = [];
33
+ for (let j = i + 1; j < memories.length; j++) {
34
+ const other = memories[j];
35
+ if (processed.has(other.id)) continue;
36
+ const similarity = await calculateSimilarity(memory.id, other.id);
37
+ if (similarity >= similarityThreshold) {
38
+ similar.push({ ...other, similarity });
39
+ processed.add(other.id);
40
+ }
41
+ }
42
+ if (similar.length > 0) {
43
+ clusters.push({
44
+ representative: memory,
45
+ duplicates: similar,
46
+ similarity: similar.reduce((sum, m) => sum + m.similarity, 0) / similar.length
47
+ });
48
+ processed.add(memory.id);
49
+ }
50
+ }
51
+ return clusters;
52
+ }
53
+ async function calculateSimilarity(memoryId1, memoryId2) {
54
+ const result = await db.$queryRaw`
55
+ SELECT
56
+ 1 - (m1.embedding <=> m2.embedding) as similarity
57
+ FROM memories m1, memories m2
58
+ WHERE m1.id = ${memoryId1} AND m2.id = ${memoryId2}
59
+ `;
60
+ return result[0]?.similarity || 0;
61
+ }
62
+ async function mergeDuplicateMemories(cluster) {
63
+ const memories = [cluster.representative, ...cluster.duplicates];
64
+ const prompt = `You are merging duplicate memories into a single, comprehensive memory.
65
+
66
+ **Memories to merge:**
67
+ ${memories.map(
68
+ (m, i) => `${i + 1}. "${m.content}" (confidence: ${m.confidence}, date: ${m.documentDate?.toISOString() || "unknown"})`
69
+ ).join("\n")}
70
+
71
+ **Instructions:**
72
+ 1. Combine all unique information from these memories
73
+ 2. Resolve any contradictions by keeping the most recent or most confident information
74
+ 3. Extract all unique entity mentions
75
+ 4. Use the highest confidence score
76
+ 5. Keep the most recent document date
77
+
78
+ Return JSON:
79
+ {
80
+ "merged_content": "comprehensive merged memory",
81
+ "entity_mentions": ["list", "of", "entities"],
82
+ "confidence": 0.0-1.0,
83
+ "reasoning": "brief explanation of how you merged"
84
+ }`;
85
+ const response = await anthropic.messages.create({
86
+ model: "claude-sonnet-4.5",
87
+ max_tokens: 2048,
88
+ temperature: 0,
89
+ messages: [{ role: "user", content: prompt }]
90
+ });
91
+ const text = response.content.find((c) => c.type === "text");
92
+ if (!text || text.type !== "text") {
93
+ throw new Error("Failed to merge memories");
94
+ }
95
+ const jsonMatch = text.text.match(/```json\n?([\s\S]*?)\n?```/) || text.text.match(/\{[\s\S]*\}/);
96
+ const jsonStr = jsonMatch ? jsonMatch[1] || jsonMatch[0] : text.text;
97
+ const result = JSON.parse(jsonStr);
98
+ const embedding = await embedSingle(result.merged_content);
99
+ const mergedMemory = await db.memory.create({
100
+ data: {
101
+ projectId: cluster.representative.projectId,
102
+ orgId: cluster.representative.orgId,
103
+ userId: cluster.representative.userId,
104
+ sessionId: cluster.representative.sessionId,
105
+ memoryType: cluster.representative.memoryType,
106
+ content: result.merged_content,
107
+ embedding,
108
+ entityMentions: result.entity_mentions || [],
109
+ confidence: result.confidence || cluster.representative.confidence,
110
+ documentDate: cluster.representative.documentDate,
111
+ eventDate: cluster.representative.eventDate,
112
+ validFrom: /* @__PURE__ */ new Date(),
113
+ importance: Math.max(...memories.map((m) => m.importance || 0.5)),
114
+ metadata: {
115
+ mergedFrom: memories.map((m) => m.id),
116
+ mergeReasoning: result.reasoning,
117
+ mergedAt: (/* @__PURE__ */ new Date()).toISOString()
118
+ }
119
+ }
120
+ });
121
+ for (const memory of memories) {
122
+ await db.memory.update({
123
+ where: { id: memory.id },
124
+ data: {
125
+ isActive: false,
126
+ validUntil: /* @__PURE__ */ new Date(),
127
+ supersededBy: mergedMemory.id
128
+ }
129
+ });
130
+ }
131
+ return mergedMemory.id;
132
+ }
133
+ async function consolidateMemories(params) {
134
+ const { projectId, userId, similarityThreshold = 0.95, dryRun = false } = params;
135
+ console.log(`\u{1F50D} Finding duplicate memories in project ${projectId}...`);
136
+ const clusters = await findDuplicateMemories({
137
+ projectId,
138
+ userId,
139
+ similarityThreshold
140
+ });
141
+ console.log(`\u{1F4CA} Found ${clusters.length} memory clusters`);
142
+ if (dryRun) {
143
+ for (const cluster of clusters) {
144
+ console.log(`
145
+ Cluster (similarity: ${cluster.similarity.toFixed(2)}):`);
146
+ console.log(` Representative: "${cluster.representative.content}"`);
147
+ console.log(` Duplicates: ${cluster.duplicates.length}`);
148
+ cluster.duplicates.forEach((d) => {
149
+ console.log(` - "${d.content}"`);
150
+ });
151
+ }
152
+ return {
153
+ clustersFound: clusters.length,
154
+ memoriesMerged: 0,
155
+ memoriesDeactivated: 0
156
+ };
157
+ }
158
+ let memoriesMerged = 0;
159
+ let memoriesDeactivated = 0;
160
+ for (const cluster of clusters) {
161
+ try {
162
+ console.log(`\u{1F517} Merging cluster with ${cluster.duplicates.length + 1} memories...`);
163
+ await mergeDuplicateMemories(cluster);
164
+ memoriesMerged++;
165
+ memoriesDeactivated += cluster.duplicates.length + 1;
166
+ console.log(`\u2705 Merged successfully`);
167
+ } catch (error) {
168
+ console.error(`\u274C Failed to merge cluster:`, error);
169
+ }
170
+ }
171
+ console.log(
172
+ `
173
+ \u2705 Consolidation complete: ${memoriesMerged} clusters merged, ${memoriesDeactivated} memories deactivated`
174
+ );
175
+ return {
176
+ clustersFound: clusters.length,
177
+ memoriesMerged,
178
+ memoriesDeactivated
179
+ };
180
+ }
181
+ async function scheduledConsolidation(orgId) {
182
+ console.log(`\u{1F504} Running scheduled consolidation for org ${orgId}...`);
183
+ const projects = await db.project.findMany({
184
+ where: { orgId }
185
+ });
186
+ for (const project of projects) {
187
+ try {
188
+ const result = await consolidateMemories({
189
+ projectId: project.id,
190
+ similarityThreshold: 0.92
191
+ // Slightly lower for scheduled runs
192
+ });
193
+ if (result.memoriesMerged > 0) {
194
+ console.log(
195
+ `\u{1F4CA} Project ${project.name}: merged ${result.memoriesMerged} clusters`
196
+ );
197
+ }
198
+ } catch (error) {
199
+ console.error(`Failed to consolidate project ${project.name}:`, error);
200
+ }
201
+ }
202
+ console.log("\u2705 Scheduled consolidation complete");
203
+ }
204
+ export {
205
+ consolidateMemories,
206
+ findDuplicateMemories,
207
+ mergeDuplicateMemories,
208
+ scheduledConsolidation
209
+ };
210
210
  //# sourceMappingURL=consolidation-IFQ52E44.js.map