@usewhisper/mcp-server 0.1.0 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (70) hide show
  1. package/README.md +26 -24
  2. package/dist/autosubscribe-6EDKPBE2.js +4068 -0
  3. package/dist/autosubscribe-GHO6YR5A.js +4068 -0
  4. package/dist/autosubscribe-ISDETQIB.js +436 -0
  5. package/dist/autosubscribe-ISDETQIB.js.map +1 -0
  6. package/dist/chunk-3WGYBAYR.js +8387 -0
  7. package/dist/chunk-52VJYCZ7.js +455 -0
  8. package/dist/chunk-5KBZQHDL.js +189 -0
  9. package/dist/chunk-5KIJNY6Z.js +370 -0
  10. package/dist/chunk-7SN3CKDK.js +1076 -0
  11. package/dist/chunk-B3VWOHUA.js +271 -0
  12. package/dist/chunk-C57DHKTL.js +459 -0
  13. package/dist/chunk-EI5CE3EY.js +616 -0
  14. package/dist/chunk-FTWUJBAH.js +387 -0
  15. package/dist/chunk-FTWUJBAH.js.map +1 -0
  16. package/dist/chunk-H3HSKH2P.js +4841 -0
  17. package/dist/chunk-JO3ORBZD.js +616 -0
  18. package/dist/chunk-L6DXSM2U.js +457 -0
  19. package/dist/chunk-L6DXSM2U.js.map +1 -0
  20. package/dist/chunk-LMEYV4JD.js +368 -0
  21. package/dist/chunk-MEFLJ4PV.js +8385 -0
  22. package/dist/chunk-OBLI4FE4.js +276 -0
  23. package/dist/chunk-OBLI4FE4.js.map +1 -0
  24. package/dist/chunk-PPGYJJED.js +271 -0
  25. package/dist/chunk-QGM4M3NI.js +37 -0
  26. package/dist/chunk-T7KMSTWP.js +399 -0
  27. package/dist/chunk-TWEIYHI6.js +399 -0
  28. package/dist/chunk-UYWE7HSU.js +369 -0
  29. package/dist/chunk-UYWE7HSU.js.map +1 -0
  30. package/dist/chunk-X2DL2GWT.js +33 -0
  31. package/dist/chunk-X2DL2GWT.js.map +1 -0
  32. package/dist/chunk-X7HNNNJJ.js +1079 -0
  33. package/dist/consolidation-2GCKI4RE.js +220 -0
  34. package/dist/consolidation-4JOPW6BG.js +220 -0
  35. package/dist/consolidation-FOVQTWNQ.js +222 -0
  36. package/dist/consolidation-IFQ52E44.js +210 -0
  37. package/dist/consolidation-IFQ52E44.js.map +1 -0
  38. package/dist/context-sharing-4ITCNKG4.js +307 -0
  39. package/dist/context-sharing-6CCFIAKL.js +276 -0
  40. package/dist/context-sharing-6CCFIAKL.js.map +1 -0
  41. package/dist/context-sharing-GYKLXHZA.js +307 -0
  42. package/dist/context-sharing-PH64JTXS.js +308 -0
  43. package/dist/context-sharing-Y6LTZZOF.js +307 -0
  44. package/dist/cost-optimization-6OIKRSBV.js +196 -0
  45. package/dist/cost-optimization-6OIKRSBV.js.map +1 -0
  46. package/dist/cost-optimization-7DVSTL6R.js +307 -0
  47. package/dist/cost-optimization-BH5NAX33.js +287 -0
  48. package/dist/cost-optimization-BH5NAX33.js.map +1 -0
  49. package/dist/cost-optimization-F3L5BS5F.js +303 -0
  50. package/dist/ingest-2LPTWUUM.js +16 -0
  51. package/dist/ingest-7T5FAZNC.js +15 -0
  52. package/dist/ingest-EBNIE7XB.js +15 -0
  53. package/dist/ingest-FSHT5BCS.js +15 -0
  54. package/dist/ingest-QE2BTV72.js +15 -0
  55. package/dist/ingest-QE2BTV72.js.map +1 -0
  56. package/dist/oracle-3RLQF3DP.js +259 -0
  57. package/dist/oracle-FKRTQUUG.js +282 -0
  58. package/dist/oracle-J47QCSEW.js +263 -0
  59. package/dist/oracle-MDP5MZRC.js +257 -0
  60. package/dist/oracle-MDP5MZRC.js.map +1 -0
  61. package/dist/search-BLVHWLWC.js +14 -0
  62. package/dist/search-CZ5NYL5B.js +13 -0
  63. package/dist/search-CZ5NYL5B.js.map +1 -0
  64. package/dist/search-EG6TYWWW.js +13 -0
  65. package/dist/search-I22QQA7T.js +13 -0
  66. package/dist/search-T7H5G6DW.js +13 -0
  67. package/dist/server.d.ts +2 -0
  68. package/dist/server.js +914 -1503
  69. package/dist/server.js.map +1 -1
  70. package/package.json +6 -7
@@ -0,0 +1,210 @@
1
+ import {
2
+ db,
3
+ embedSingle
4
+ } from "./chunk-X2DL2GWT.js";
5
+
6
+ // src/engine/memory/consolidation.ts
7
+ import Anthropic from "@anthropic-ai/sdk";
8
+ var anthropic = new Anthropic({
9
+ apiKey: process.env.ANTHROPIC_API_KEY || ""
10
+ });
11
+ async function findDuplicateMemories(params) {
12
+ const { projectId, userId, similarityThreshold = 0.95, limit = 100 } = params;
13
+ const memories = await db.memory.findMany({
14
+ where: {
15
+ projectId,
16
+ userId,
17
+ isActive: true,
18
+ validUntil: null
19
+ // Only current versions
20
+ },
21
+ orderBy: {
22
+ importance: "desc"
23
+ },
24
+ take: limit * 2
25
+ // Get more to find duplicates
26
+ });
27
+ const clusters = [];
28
+ const processed = /* @__PURE__ */ new Set();
29
+ for (let i = 0; i < memories.length; i++) {
30
+ const memory = memories[i];
31
+ if (processed.has(memory.id)) continue;
32
+ const similar = [];
33
+ for (let j = i + 1; j < memories.length; j++) {
34
+ const other = memories[j];
35
+ if (processed.has(other.id)) continue;
36
+ const similarity = await calculateSimilarity(memory.id, other.id);
37
+ if (similarity >= similarityThreshold) {
38
+ similar.push({ ...other, similarity });
39
+ processed.add(other.id);
40
+ }
41
+ }
42
+ if (similar.length > 0) {
43
+ clusters.push({
44
+ representative: memory,
45
+ duplicates: similar,
46
+ similarity: similar.reduce((sum, m) => sum + m.similarity, 0) / similar.length
47
+ });
48
+ processed.add(memory.id);
49
+ }
50
+ }
51
+ return clusters;
52
+ }
53
+ async function calculateSimilarity(memoryId1, memoryId2) {
54
+ const result = await db.$queryRaw`
55
+ SELECT
56
+ 1 - (m1.embedding <=> m2.embedding) as similarity
57
+ FROM memories m1, memories m2
58
+ WHERE m1.id = ${memoryId1} AND m2.id = ${memoryId2}
59
+ `;
60
+ return result[0]?.similarity || 0;
61
+ }
62
+ async function mergeDuplicateMemories(cluster) {
63
+ const memories = [cluster.representative, ...cluster.duplicates];
64
+ const prompt = `You are merging duplicate memories into a single, comprehensive memory.
65
+
66
+ **Memories to merge:**
67
+ ${memories.map(
68
+ (m, i) => `${i + 1}. "${m.content}" (confidence: ${m.confidence}, date: ${m.documentDate?.toISOString() || "unknown"})`
69
+ ).join("\n")}
70
+
71
+ **Instructions:**
72
+ 1. Combine all unique information from these memories
73
+ 2. Resolve any contradictions by keeping the most recent or most confident information
74
+ 3. Extract all unique entity mentions
75
+ 4. Use the highest confidence score
76
+ 5. Keep the most recent document date
77
+
78
+ Return JSON:
79
+ {
80
+ "merged_content": "comprehensive merged memory",
81
+ "entity_mentions": ["list", "of", "entities"],
82
+ "confidence": 0.0-1.0,
83
+ "reasoning": "brief explanation of how you merged"
84
+ }`;
85
+ const response = await anthropic.messages.create({
86
+ model: "claude-sonnet-4.5",
87
+ max_tokens: 2048,
88
+ temperature: 0,
89
+ messages: [{ role: "user", content: prompt }]
90
+ });
91
+ const text = response.content.find((c) => c.type === "text");
92
+ if (!text || text.type !== "text") {
93
+ throw new Error("Failed to merge memories");
94
+ }
95
+ const jsonMatch = text.text.match(/```json\n?([\s\S]*?)\n?```/) || text.text.match(/\{[\s\S]*\}/);
96
+ const jsonStr = jsonMatch ? jsonMatch[1] || jsonMatch[0] : text.text;
97
+ const result = JSON.parse(jsonStr);
98
+ const embedding = await embedSingle(result.merged_content);
99
+ const mergedMemory = await db.memory.create({
100
+ data: {
101
+ projectId: cluster.representative.projectId,
102
+ orgId: cluster.representative.orgId,
103
+ userId: cluster.representative.userId,
104
+ sessionId: cluster.representative.sessionId,
105
+ memoryType: cluster.representative.memoryType,
106
+ content: result.merged_content,
107
+ embedding,
108
+ entityMentions: result.entity_mentions || [],
109
+ confidence: result.confidence || cluster.representative.confidence,
110
+ documentDate: cluster.representative.documentDate,
111
+ eventDate: cluster.representative.eventDate,
112
+ validFrom: /* @__PURE__ */ new Date(),
113
+ importance: Math.max(...memories.map((m) => m.importance || 0.5)),
114
+ metadata: {
115
+ mergedFrom: memories.map((m) => m.id),
116
+ mergeReasoning: result.reasoning,
117
+ mergedAt: (/* @__PURE__ */ new Date()).toISOString()
118
+ }
119
+ }
120
+ });
121
+ for (const memory of memories) {
122
+ await db.memory.update({
123
+ where: { id: memory.id },
124
+ data: {
125
+ isActive: false,
126
+ validUntil: /* @__PURE__ */ new Date(),
127
+ supersededBy: mergedMemory.id
128
+ }
129
+ });
130
+ }
131
+ return mergedMemory.id;
132
+ }
133
+ async function consolidateMemories(params) {
134
+ const { projectId, userId, similarityThreshold = 0.95, dryRun = false } = params;
135
+ console.log(`\u{1F50D} Finding duplicate memories in project ${projectId}...`);
136
+ const clusters = await findDuplicateMemories({
137
+ projectId,
138
+ userId,
139
+ similarityThreshold
140
+ });
141
+ console.log(`\u{1F4CA} Found ${clusters.length} memory clusters`);
142
+ if (dryRun) {
143
+ for (const cluster of clusters) {
144
+ console.log(`
145
+ Cluster (similarity: ${cluster.similarity.toFixed(2)}):`);
146
+ console.log(` Representative: "${cluster.representative.content}"`);
147
+ console.log(` Duplicates: ${cluster.duplicates.length}`);
148
+ cluster.duplicates.forEach((d) => {
149
+ console.log(` - "${d.content}"`);
150
+ });
151
+ }
152
+ return {
153
+ clustersFound: clusters.length,
154
+ memoriesMerged: 0,
155
+ memoriesDeactivated: 0
156
+ };
157
+ }
158
+ let memoriesMerged = 0;
159
+ let memoriesDeactivated = 0;
160
+ for (const cluster of clusters) {
161
+ try {
162
+ console.log(`\u{1F517} Merging cluster with ${cluster.duplicates.length + 1} memories...`);
163
+ await mergeDuplicateMemories(cluster);
164
+ memoriesMerged++;
165
+ memoriesDeactivated += cluster.duplicates.length + 1;
166
+ console.log(`\u2705 Merged successfully`);
167
+ } catch (error) {
168
+ console.error(`\u274C Failed to merge cluster:`, error);
169
+ }
170
+ }
171
+ console.log(
172
+ `
173
+ \u2705 Consolidation complete: ${memoriesMerged} clusters merged, ${memoriesDeactivated} memories deactivated`
174
+ );
175
+ return {
176
+ clustersFound: clusters.length,
177
+ memoriesMerged,
178
+ memoriesDeactivated
179
+ };
180
+ }
181
+ async function scheduledConsolidation(orgId) {
182
+ console.log(`\u{1F504} Running scheduled consolidation for org ${orgId}...`);
183
+ const projects = await db.project.findMany({
184
+ where: { orgId }
185
+ });
186
+ for (const project of projects) {
187
+ try {
188
+ const result = await consolidateMemories({
189
+ projectId: project.id,
190
+ similarityThreshold: 0.92
191
+ // Slightly lower for scheduled runs
192
+ });
193
+ if (result.memoriesMerged > 0) {
194
+ console.log(
195
+ `\u{1F4CA} Project ${project.name}: merged ${result.memoriesMerged} clusters`
196
+ );
197
+ }
198
+ } catch (error) {
199
+ console.error(`Failed to consolidate project ${project.name}:`, error);
200
+ }
201
+ }
202
+ console.log("\u2705 Scheduled consolidation complete");
203
+ }
204
+ export {
205
+ consolidateMemories,
206
+ findDuplicateMemories,
207
+ mergeDuplicateMemories,
208
+ scheduledConsolidation
209
+ };
210
+ //# sourceMappingURL=consolidation-IFQ52E44.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../src/engine/memory/consolidation.ts"],"sourcesContent":["/**\n * Memory Consolidation - Deduplicate and merge similar memories\n * Prevents memory bloat and improves search accuracy\n */\n\nimport Anthropic from \"@anthropic-ai/sdk\";\nimport { db } from \"../../db/index.js\";\nimport { embedSingle } from \"../embeddings.js\";\n\nconst anthropic = new Anthropic({\n apiKey: process.env.ANTHROPIC_API_KEY || \"\",\n});\n\ninterface MemoryCluster {\n representative: any; // Most important memory in cluster\n duplicates: any[]; // Similar memories to merge\n similarity: number; // Average similarity score\n}\n\n/**\n * Find duplicate/similar memories using vector similarity\n */\nexport async function findDuplicateMemories(params: {\n projectId: string;\n userId?: string;\n similarityThreshold?: number;\n limit?: number;\n}): Promise<MemoryCluster[]> {\n const { projectId, userId, similarityThreshold = 0.95, limit = 100 } = params;\n\n // Get active memories\n const memories = await db.memory.findMany({\n where: {\n projectId,\n userId,\n isActive: true,\n validUntil: null, // Only current versions\n },\n orderBy: {\n importance: \"desc\",\n },\n take: limit * 2, // Get more to find duplicates\n });\n\n const clusters: MemoryCluster[] = [];\n const processed = new Set<string>();\n\n for (let i = 0; i < memories.length; i++) {\n const memory = memories[i];\n\n if (processed.has(memory.id)) continue;\n\n // Find similar memories\n const similar: any[] = [];\n\n for (let j = i + 1; j < memories.length; j++) {\n const other = memories[j];\n\n if (processed.has(other.id)) continue;\n\n // Calculate cosine similarity (using raw SQL for vector ops)\n const similarity = await calculateSimilarity(memory.id, other.id);\n\n if (similarity >= similarityThreshold) {\n similar.push({ ...other, similarity });\n processed.add(other.id);\n }\n }\n\n if (similar.length > 0) {\n clusters.push({\n representative: memory,\n duplicates: similar,\n similarity: similar.reduce((sum, m) => sum + m.similarity, 0) / similar.length,\n });\n processed.add(memory.id);\n }\n }\n\n return clusters;\n}\n\n/**\n * Calculate cosine similarity between two memory embeddings\n */\nasync function calculateSimilarity(memoryId1: string, memoryId2: string): Promise<number> {\n const result = await db.$queryRaw<any[]>`\n SELECT\n 1 - (m1.embedding <=> m2.embedding) as similarity\n FROM memories m1, memories m2\n WHERE m1.id = ${memoryId1} AND m2.id = ${memoryId2}\n `;\n\n return result[0]?.similarity || 0;\n}\n\n/**\n * Merge duplicate memories using LLM\n */\nexport async function mergeDuplicateMemories(cluster: MemoryCluster): Promise<string> {\n const memories = [cluster.representative, ...cluster.duplicates];\n\n const prompt = `You are merging duplicate memories into a single, comprehensive memory.\n\n**Memories to merge:**\n${memories\n .map(\n (m, i) => `${i + 1}. \"${m.content}\" (confidence: ${m.confidence}, date: ${m.documentDate?.toISOString() || \"unknown\"})`\n )\n .join(\"\\n\")}\n\n**Instructions:**\n1. Combine all unique information from these memories\n2. Resolve any contradictions by keeping the most recent or most confident information\n3. Extract all unique entity mentions\n4. Use the highest confidence score\n5. Keep the most recent document date\n\nReturn JSON:\n{\n \"merged_content\": \"comprehensive merged memory\",\n \"entity_mentions\": [\"list\", \"of\", \"entities\"],\n \"confidence\": 0.0-1.0,\n \"reasoning\": \"brief explanation of how you merged\"\n}`;\n\n const response = await anthropic.messages.create({\n model: \"claude-sonnet-4.5\",\n max_tokens: 2048,\n temperature: 0.0,\n messages: [{ role: \"user\", content: prompt }],\n });\n\n const text = response.content.find((c) => c.type === \"text\");\n if (!text || text.type !== \"text\") {\n throw new Error(\"Failed to merge memories\");\n }\n\n const jsonMatch = text.text.match(/```json\\n?([\\s\\S]*?)\\n?```/) || text.text.match(/\\{[\\s\\S]*\\}/);\n const jsonStr = jsonMatch ? (jsonMatch[1] || jsonMatch[0]) : text.text;\n\n const result = JSON.parse(jsonStr);\n\n // Create merged memory\n const embedding = await embedSingle(result.merged_content);\n\n const mergedMemory = await db.memory.create({\n data: {\n projectId: cluster.representative.projectId,\n orgId: cluster.representative.orgId,\n userId: cluster.representative.userId,\n sessionId: cluster.representative.sessionId,\n memoryType: cluster.representative.memoryType,\n content: result.merged_content,\n embedding,\n entityMentions: result.entity_mentions || [],\n confidence: result.confidence || cluster.representative.confidence,\n documentDate: cluster.representative.documentDate,\n eventDate: cluster.representative.eventDate,\n validFrom: new Date(),\n importance: Math.max(...memories.map((m) => m.importance || 0.5)),\n metadata: {\n mergedFrom: memories.map((m) => m.id),\n mergeReasoning: result.reasoning,\n mergedAt: new Date().toISOString(),\n },\n },\n });\n\n // Deactivate old memories\n for (const memory of memories) {\n await db.memory.update({\n where: { id: memory.id },\n data: {\n isActive: false,\n validUntil: new Date(),\n supersededBy: mergedMemory.id,\n },\n });\n }\n\n return mergedMemory.id;\n}\n\n/**\n * Run consolidation job on a project\n */\nexport async function consolidateMemories(params: {\n projectId: string;\n userId?: string;\n similarityThreshold?: number;\n dryRun?: boolean;\n}): Promise<{\n clustersFound: number;\n memoriesMerged: number;\n memoriesDeactivated: number;\n}> {\n const { projectId, userId, similarityThreshold = 0.95, dryRun = false } = params;\n\n console.log(`🔍 Finding duplicate memories in project ${projectId}...`);\n\n const clusters = await findDuplicateMemories({\n projectId,\n userId,\n similarityThreshold,\n });\n\n console.log(`📊 Found ${clusters.length} memory clusters`);\n\n if (dryRun) {\n for (const cluster of clusters) {\n console.log(`\\nCluster (similarity: ${cluster.similarity.toFixed(2)}):`);\n console.log(` Representative: \"${cluster.representative.content}\"`);\n console.log(` Duplicates: ${cluster.duplicates.length}`);\n cluster.duplicates.forEach((d) => {\n console.log(` - \"${d.content}\"`);\n });\n }\n\n return {\n clustersFound: clusters.length,\n memoriesMerged: 0,\n memoriesDeactivated: 0,\n };\n }\n\n // Merge clusters\n let memoriesMerged = 0;\n let memoriesDeactivated = 0;\n\n for (const cluster of clusters) {\n try {\n console.log(`🔗 Merging cluster with ${cluster.duplicates.length + 1} memories...`);\n\n await mergeDuplicateMemories(cluster);\n\n memoriesMerged++;\n memoriesDeactivated += cluster.duplicates.length + 1; // All memories in cluster\n\n console.log(`✅ Merged successfully`);\n } catch (error) {\n console.error(`❌ Failed to merge cluster:`, error);\n }\n }\n\n console.log(\n `\\n✅ Consolidation complete: ${memoriesMerged} clusters merged, ${memoriesDeactivated} memories deactivated`\n );\n\n return {\n clustersFound: clusters.length,\n memoriesMerged,\n memoriesDeactivated,\n };\n}\n\n/**\n * Find memories that need consolidation (scheduled job)\n */\nexport async function scheduledConsolidation(orgId: string): Promise<void> {\n console.log(`🔄 Running scheduled consolidation for org ${orgId}...`);\n\n const projects = await db.project.findMany({\n where: { orgId },\n });\n\n for (const project of projects) {\n try {\n const result = await consolidateMemories({\n projectId: project.id,\n similarityThreshold: 0.92, // Slightly lower for scheduled runs\n });\n\n if (result.memoriesMerged > 0) {\n console.log(\n `📊 Project ${project.name}: merged ${result.memoriesMerged} clusters`\n );\n }\n } catch (error) {\n console.error(`Failed to consolidate project ${project.name}:`, error);\n }\n }\n\n console.log(\"✅ Scheduled consolidation complete\");\n}\n"],"mappings":";;;;;;AAKA,OAAO,eAAe;AAItB,IAAM,YAAY,IAAI,UAAU;AAAA,EAC9B,QAAQ,QAAQ,IAAI,qBAAqB;AAC3C,CAAC;AAWD,eAAsB,sBAAsB,QAKf;AAC3B,QAAM,EAAE,WAAW,QAAQ,sBAAsB,MAAM,QAAQ,IAAI,IAAI;AAGvE,QAAM,WAAW,MAAM,GAAG,OAAO,SAAS;AAAA,IACxC,OAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA,UAAU;AAAA,MACV,YAAY;AAAA;AAAA,IACd;AAAA,IACA,SAAS;AAAA,MACP,YAAY;AAAA,IACd;AAAA,IACA,MAAM,QAAQ;AAAA;AAAA,EAChB,CAAC;AAED,QAAM,WAA4B,CAAC;AACnC,QAAM,YAAY,oBAAI,IAAY;AAElC,WAAS,IAAI,GAAG,IAAI,SAAS,QAAQ,KAAK;AACxC,UAAM,SAAS,SAAS,CAAC;AAEzB,QAAI,UAAU,IAAI,OAAO,EAAE,EAAG;AAG9B,UAAM,UAAiB,CAAC;AAExB,aAAS,IAAI,IAAI,GAAG,IAAI,SAAS,QAAQ,KAAK;AAC5C,YAAM,QAAQ,SAAS,CAAC;AAExB,UAAI,UAAU,IAAI,MAAM,EAAE,EAAG;AAG7B,YAAM,aAAa,MAAM,oBAAoB,OAAO,IAAI,MAAM,EAAE;AAEhE,UAAI,cAAc,qBAAqB;AACrC,gBAAQ,KAAK,EAAE,GAAG,OAAO,WAAW,CAAC;AACrC,kBAAU,IAAI,MAAM,EAAE;AAAA,MACxB;AAAA,IACF;AAEA,QAAI,QAAQ,SAAS,GAAG;AACtB,eAAS,KAAK;AAAA,QACZ,gBAAgB;AAAA,QAChB,YAAY;AAAA,QACZ,YAAY,QAAQ,OAAO,CAAC,KAAK,MAAM,MAAM,EAAE,YAAY,CAAC,IAAI,QAAQ;AAAA,MAC1E,CAAC;AACD,gBAAU,IAAI,OAAO,EAAE;AAAA,IACzB;AAAA,EACF;AAEA,SAAO;AACT;AAKA,eAAe,oBAAoB,WAAmB,WAAoC;AACxF,QAAM,SAAS,MAAM,GAAG;AAAA;AAAA;AAAA;AAAA,oBAIN,SAAS,gBAAgB,SAAS;AAAA;AAGpD,SAAO,OAAO,CAAC,GAAG,cAAc;AAClC;AAKA,eAAsB,uBAAuB,SAAyC;AACpF,QAAM,WAAW,CAAC,QAAQ,gBAAgB,GAAG,QAAQ,UAAU;AAE/D,QAAM,SAAS;AAAA;AAAA;AAAA,EAGf,SACC;AAAA,IACC,CAAC,GAAG,MAAM,GAAG,IAAI,CAAC,MAAM,EAAE,OAAO,kBAAkB,EAAE,UAAU,WAAW,EAAE,cAAc,YAAY,KAAK,SAAS;AAAA,EACtH,EACC,KAAK,IAAI,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAiBX,QAAM,WAAW,MAAM,UAAU,SAAS,OAAO;AAAA,IAC/C,OAAO;AAAA,IACP,YAAY;AAAA,IACZ,aAAa;AAAA,IACb,UAAU,CAAC,EAAE,MAAM,QAAQ,SAAS,OAAO,CAAC;AAAA,EAC9C,CAAC;AAED,QAAM,OAAO,SAAS,QAAQ,KAAK,CAAC,MAAM,EAAE,SAAS,MAAM;AAC3D,MAAI,CAAC,QAAQ,KAAK,SAAS,QAAQ;AACjC,UAAM,IAAI,MAAM,0BAA0B;AAAA,EAC5C;AAEA,QAAM,YAAY,KAAK,KAAK,MAAM,4BAA4B,KAAK,KAAK,KAAK,MAAM,aAAa;AAChG,QAAM,UAAU,YAAa,UAAU,CAAC,KAAK,UAAU,CAAC,IAAK,KAAK;AAElE,QAAM,SAAS,KAAK,MAAM,OAAO;AAGjC,QAAM,YAAY,MAAM,YAAY,OAAO,cAAc;AAEzD,QAAM,eAAe,MAAM,GAAG,OAAO,OAAO;AAAA,IAC1C,MAAM;AAAA,MACJ,WAAW,QAAQ,eAAe;AAAA,MAClC,OAAO,QAAQ,eAAe;AAAA,MAC9B,QAAQ,QAAQ,eAAe;AAAA,MAC/B,WAAW,QAAQ,eAAe;AAAA,MAClC,YAAY,QAAQ,eAAe;AAAA,MACnC,SAAS,OAAO;AAAA,MAChB;AAAA,MACA,gBAAgB,OAAO,mBAAmB,CAAC;AAAA,MAC3C,YAAY,OAAO,cAAc,QAAQ,eAAe;AAAA,MACxD,cAAc,QAAQ,eAAe;AAAA,MACrC,WAAW,QAAQ,eAAe;AAAA,MAClC,WAAW,oBAAI,KAAK;AAAA,MACpB,YAAY,KAAK,IAAI,GAAG,SAAS,IAAI,CAAC,MAAM,EAAE,cAAc,GAAG,CAAC;AAAA,MAChE,UAAU;AAAA,QACR,YAAY,SAAS,IAAI,CAAC,MAAM,EAAE,EAAE;AAAA,QACpC,gBAAgB,OAAO;AAAA,QACvB,WAAU,oBAAI,KAAK,GAAE,YAAY;AAAA,MACnC;AAAA,IACF;AAAA,EACF,CAAC;AAGD,aAAW,UAAU,UAAU;AAC7B,UAAM,GAAG,OAAO,OAAO;AAAA,MACrB,OAAO,EAAE,IAAI,OAAO,GAAG;AAAA,MACvB,MAAM;AAAA,QACJ,UAAU;AAAA,QACV,YAAY,oBAAI,KAAK;AAAA,QACrB,cAAc,aAAa;AAAA,MAC7B;AAAA,IACF,CAAC;AAAA,EACH;AAEA,SAAO,aAAa;AACtB;AAKA,eAAsB,oBAAoB,QASvC;AACD,QAAM,EAAE,WAAW,QAAQ,sBAAsB,MAAM,SAAS,MAAM,IAAI;AAE1E,UAAQ,IAAI,mDAA4C,SAAS,KAAK;AAEtE,QAAM,WAAW,MAAM,sBAAsB;AAAA,IAC3C;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,UAAQ,IAAI,mBAAY,SAAS,MAAM,kBAAkB;AAEzD,MAAI,QAAQ;AACV,eAAW,WAAW,UAAU;AAC9B,cAAQ,IAAI;AAAA,uBAA0B,QAAQ,WAAW,QAAQ,CAAC,CAAC,IAAI;AACvE,cAAQ,IAAI,sBAAsB,QAAQ,eAAe,OAAO,GAAG;AACnE,cAAQ,IAAI,iBAAiB,QAAQ,WAAW,MAAM,EAAE;AACxD,cAAQ,WAAW,QAAQ,CAAC,MAAM;AAChC,gBAAQ,IAAI,UAAU,EAAE,OAAO,GAAG;AAAA,MACpC,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,MACL,eAAe,SAAS;AAAA,MACxB,gBAAgB;AAAA,MAChB,qBAAqB;AAAA,IACvB;AAAA,EACF;AAGA,MAAI,iBAAiB;AACrB,MAAI,sBAAsB;AAE1B,aAAW,WAAW,UAAU;AAC9B,QAAI;AACF,cAAQ,IAAI,kCAA2B,QAAQ,WAAW,SAAS,CAAC,cAAc;AAElF,YAAM,uBAAuB,OAAO;AAEpC;AACA,6BAAuB,QAAQ,WAAW,SAAS;AAEnD,cAAQ,IAAI,4BAAuB;AAAA,IACrC,SAAS,OAAO;AACd,cAAQ,MAAM,mCAA8B,KAAK;AAAA,IACnD;AAAA,EACF;AAEA,UAAQ;AAAA,IACN;AAAA,iCAA+B,cAAc,qBAAqB,mBAAmB;AAAA,EACvF;AAEA,SAAO;AAAA,IACL,eAAe,SAAS;AAAA,IACxB;AAAA,IACA;AAAA,EACF;AACF;AAKA,eAAsB,uBAAuB,OAA8B;AACzE,UAAQ,IAAI,qDAA8C,KAAK,KAAK;AAEpE,QAAM,WAAW,MAAM,GAAG,QAAQ,SAAS;AAAA,IACzC,OAAO,EAAE,MAAM;AAAA,EACjB,CAAC;AAED,aAAW,WAAW,UAAU;AAC9B,QAAI;AACF,YAAM,SAAS,MAAM,oBAAoB;AAAA,QACvC,WAAW,QAAQ;AAAA,QACnB,qBAAqB;AAAA;AAAA,MACvB,CAAC;AAED,UAAI,OAAO,iBAAiB,GAAG;AAC7B,gBAAQ;AAAA,UACN,qBAAc,QAAQ,IAAI,YAAY,OAAO,cAAc;AAAA,QAC7D;AAAA,MACF;AAAA,IACF,SAAS,OAAO;AACd,cAAQ,MAAM,iCAAiC,QAAQ,IAAI,KAAK,KAAK;AAAA,IACvE;AAAA,EACF;AAEA,UAAQ,IAAI,yCAAoC;AAClD;","names":[]}
@@ -0,0 +1,307 @@
1
+ import {
2
+ getSessionMemories
3
+ } from "./chunk-T7KMSTWP.js";
4
+ import "./chunk-EI5CE3EY.js";
5
+ import "./chunk-5KBZQHDL.js";
6
+ import {
7
+ db
8
+ } from "./chunk-3WGYBAYR.js";
9
+ import "./chunk-QGM4M3NI.js";
10
+
11
+ // ../node_modules/nanoid/index.js
12
+ import { webcrypto as crypto } from "crypto";
13
+
14
+ // ../node_modules/nanoid/url-alphabet/index.js
15
+ var urlAlphabet = "useandom-26T198340PX75pxJACKVERYMINDBUSHWOLF_GQZbfghjklqvwyzrict";
16
+
17
+ // ../node_modules/nanoid/index.js
18
+ var POOL_SIZE_MULTIPLIER = 128;
19
+ var pool;
20
+ var poolOffset;
21
+ function fillPool(bytes) {
22
+ if (!pool || pool.length < bytes) {
23
+ pool = Buffer.allocUnsafe(bytes * POOL_SIZE_MULTIPLIER);
24
+ crypto.getRandomValues(pool);
25
+ poolOffset = 0;
26
+ } else if (poolOffset + bytes > pool.length) {
27
+ crypto.getRandomValues(pool);
28
+ poolOffset = 0;
29
+ }
30
+ poolOffset += bytes;
31
+ }
32
+ function nanoid(size = 21) {
33
+ fillPool(size |= 0);
34
+ let id = "";
35
+ for (let i = poolOffset - size; i < poolOffset; i++) {
36
+ id += urlAlphabet[pool[i] & 63];
37
+ }
38
+ return id;
39
+ }
40
+
41
+ // ../src/engine/context-sharing.ts
42
+ async function createSharedContext(params) {
43
+ const {
44
+ sessionId,
45
+ projectId,
46
+ orgId,
47
+ userId,
48
+ includeMemories = true,
49
+ includeMessages = true,
50
+ includeChunks = false,
51
+ expiryDays = 7,
52
+ metadata = {}
53
+ } = params;
54
+ const shareId = nanoid(12);
55
+ const session = await db.session.findUnique({
56
+ where: { id: sessionId }
57
+ });
58
+ if (!session) {
59
+ throw new Error("Session not found");
60
+ }
61
+ const shareData = {
62
+ sessionId,
63
+ projectId,
64
+ userId,
65
+ metadata
66
+ };
67
+ if (includeMemories) {
68
+ const memories = await getSessionMemories({
69
+ sessionId,
70
+ projectId,
71
+ limit: 200
72
+ });
73
+ shareData.memories = memories.map((m) => ({
74
+ id: m.id,
75
+ content: m.content,
76
+ type: m.memoryType,
77
+ entities: m.entityMentions,
78
+ confidence: m.confidence,
79
+ documentDate: m.documentDate?.toISOString(),
80
+ eventDate: m.eventDate?.toISOString()
81
+ }));
82
+ } else {
83
+ shareData.memories = [];
84
+ }
85
+ if (includeMessages) {
86
+ const messages = await db.message.findMany({
87
+ where: { sessionId },
88
+ orderBy: { createdAt: "asc" },
89
+ take: 500
90
+ });
91
+ shareData.messages = messages.map((m) => ({
92
+ role: m.role,
93
+ content: m.content,
94
+ createdAt: m.createdAt.toISOString()
95
+ }));
96
+ } else {
97
+ shareData.messages = [];
98
+ }
99
+ if (includeChunks) {
100
+ const chunkIds = shareData.memories.map((m) => m.sourceChunkId).filter(Boolean).slice(0, 50);
101
+ if (chunkIds.length > 0) {
102
+ const chunks = await db.chunk.findMany({
103
+ where: {
104
+ id: { in: chunkIds }
105
+ },
106
+ select: {
107
+ id: true,
108
+ content: true,
109
+ metadata: true,
110
+ chunkType: true
111
+ },
112
+ take: 50
113
+ });
114
+ shareData.chunks = chunks;
115
+ } else {
116
+ shareData.chunks = [];
117
+ }
118
+ } else {
119
+ shareData.chunks = [];
120
+ }
121
+ const expiresAt = expiryDays ? new Date(Date.now() + expiryDays * 24 * 60 * 60 * 1e3) : null;
122
+ await db.$executeRaw`
123
+ INSERT INTO shared_contexts (
124
+ id, session_id, project_id, org_id, user_id,
125
+ share_data, expires_at, created_at, access_count
126
+ ) VALUES (
127
+ ${shareId}, ${sessionId}, ${projectId}, ${orgId}, ${userId || null},
128
+ ${JSON.stringify(shareData)}::jsonb, ${expiresAt}, NOW(), 0
129
+ )
130
+ ON CONFLICT (id) DO NOTHING
131
+ `;
132
+ const baseUrl = process.env.BASE_URL || "http://localhost:4000";
133
+ const shareUrl = `${baseUrl}/shared/${shareId}`;
134
+ return {
135
+ id: shareId,
136
+ sessionId,
137
+ projectId,
138
+ userId,
139
+ shareUrl,
140
+ memories: shareData.memories,
141
+ messages: shareData.messages,
142
+ chunks: shareData.chunks,
143
+ metadata,
144
+ createdAt: /* @__PURE__ */ new Date(),
145
+ expiresAt,
146
+ accessCount: 0
147
+ };
148
+ }
149
+ async function loadSharedContext(shareId) {
150
+ const result = await db.$queryRaw`
151
+ SELECT * FROM shared_contexts
152
+ WHERE id = ${shareId}
153
+ AND (expires_at IS NULL OR expires_at > NOW())
154
+ `;
155
+ if (result.length === 0) {
156
+ return null;
157
+ }
158
+ const row = result[0];
159
+ await db.$executeRaw`
160
+ UPDATE shared_contexts
161
+ SET access_count = access_count + 1,
162
+ last_accessed_at = NOW()
163
+ WHERE id = ${shareId}
164
+ `;
165
+ const baseUrl = process.env.BASE_URL || "http://localhost:4000";
166
+ return {
167
+ id: row.id,
168
+ sessionId: row.session_id,
169
+ projectId: row.project_id,
170
+ userId: row.user_id,
171
+ shareUrl: `${baseUrl}/shared/${shareId}`,
172
+ memories: row.share_data.memories || [],
173
+ messages: row.share_data.messages || [],
174
+ chunks: row.share_data.chunks || [],
175
+ metadata: row.share_data.metadata || {},
176
+ createdAt: row.created_at,
177
+ expiresAt: row.expires_at,
178
+ accessCount: row.access_count
179
+ };
180
+ }
181
+ async function resumeFromSharedContext(params) {
182
+ const { shareId, projectId, orgId, userId, newSessionId } = params;
183
+ const sharedContext = await loadSharedContext(shareId);
184
+ if (!sharedContext) {
185
+ throw new Error("Shared context not found or expired");
186
+ }
187
+ const sessionId = newSessionId || nanoid();
188
+ await db.session.create({
189
+ data: {
190
+ id: sessionId,
191
+ projectId,
192
+ orgId,
193
+ userId,
194
+ title: `Resumed from ${sharedContext.id}`,
195
+ metadata: {
196
+ resumedFrom: shareId,
197
+ originalSessionId: sharedContext.sessionId,
198
+ ...sharedContext.metadata
199
+ }
200
+ }
201
+ });
202
+ let memoriesRestored = 0;
203
+ for (const memory of sharedContext.memories) {
204
+ try {
205
+ await db.memory.create({
206
+ data: {
207
+ projectId,
208
+ orgId,
209
+ userId,
210
+ sessionId,
211
+ memoryType: memory.type,
212
+ content: memory.content,
213
+ entityMentions: memory.entities || [],
214
+ confidence: memory.confidence || 0.8,
215
+ documentDate: memory.documentDate ? new Date(memory.documentDate) : null,
216
+ eventDate: memory.eventDate ? new Date(memory.eventDate) : null,
217
+ validFrom: /* @__PURE__ */ new Date(),
218
+ metadata: {
219
+ restoredFrom: shareId
220
+ }
221
+ }
222
+ });
223
+ memoriesRestored++;
224
+ } catch (error) {
225
+ console.error("Failed to restore memory:", error);
226
+ }
227
+ }
228
+ for (const msg of sharedContext.messages) {
229
+ try {
230
+ await db.message.create({
231
+ data: {
232
+ sessionId,
233
+ role: msg.role,
234
+ content: msg.content,
235
+ metadata: {
236
+ restoredFrom: shareId,
237
+ originalTimestamp: msg.createdAt
238
+ }
239
+ }
240
+ });
241
+ } catch (error) {
242
+ console.error("Failed to restore message:", error);
243
+ }
244
+ }
245
+ return {
246
+ sessionId,
247
+ memoriesRestored
248
+ };
249
+ }
250
+ async function cleanupExpiredContexts() {
251
+ const result = await db.$executeRaw`
252
+ DELETE FROM shared_contexts
253
+ WHERE expires_at IS NOT NULL
254
+ AND expires_at < NOW()
255
+ `;
256
+ return result;
257
+ }
258
+ async function listSharedContexts(params) {
259
+ const { userId, projectId, orgId, limit = 50 } = params;
260
+ const maxLimit = Math.min(limit, 100);
261
+ const whereClauses = [`org_id = ${orgId}`];
262
+ if (userId) whereClauses.push(`user_id = ${userId}`);
263
+ if (projectId) whereClauses.push(`project_id = ${projectId}`);
264
+ const results = await db.$queryRaw`
265
+ SELECT id, session_id, created_at, expires_at, access_count, share_data
266
+ FROM shared_contexts
267
+ WHERE ${db.Prisma.raw(whereClauses.join(" AND "))}
268
+ ORDER BY created_at DESC
269
+ LIMIT ${maxLimit}
270
+ `;
271
+ const baseUrl = process.env.BASE_URL || "http://localhost:4000";
272
+ return results.map((row) => ({
273
+ id: row.id,
274
+ shareUrl: `${baseUrl}/shared/${row.id}`,
275
+ createdAt: row.created_at,
276
+ expiresAt: row.expires_at,
277
+ accessCount: row.access_count,
278
+ metadata: row.share_data?.metadata || {}
279
+ }));
280
+ }
281
+ var SHARED_CONTEXTS_MIGRATION = `
282
+ CREATE TABLE IF NOT EXISTS shared_contexts (
283
+ id TEXT PRIMARY KEY,
284
+ session_id TEXT NOT NULL,
285
+ project_id TEXT NOT NULL,
286
+ org_id TEXT NOT NULL,
287
+ user_id TEXT,
288
+ share_data JSONB NOT NULL DEFAULT '{}',
289
+ expires_at TIMESTAMPTZ,
290
+ created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
291
+ access_count INTEGER NOT NULL DEFAULT 0,
292
+ last_accessed_at TIMESTAMPTZ
293
+ );
294
+
295
+ CREATE INDEX IF NOT EXISTS idx_shared_contexts_org ON shared_contexts(org_id);
296
+ CREATE INDEX IF NOT EXISTS idx_shared_contexts_user ON shared_contexts(user_id);
297
+ CREATE INDEX IF NOT EXISTS idx_shared_contexts_expires ON shared_contexts(expires_at);
298
+ CREATE INDEX IF NOT EXISTS idx_shared_contexts_session ON shared_contexts(session_id);
299
+ `;
300
+ export {
301
+ SHARED_CONTEXTS_MIGRATION,
302
+ cleanupExpiredContexts,
303
+ createSharedContext,
304
+ listSharedContexts,
305
+ loadSharedContext,
306
+ resumeFromSharedContext
307
+ };