@usewhisper/mcp-server 0.3.0 → 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. package/README.md +182 -154
  2. package/dist/autosubscribe-6EDKPBE2.js +4068 -4068
  3. package/dist/autosubscribe-GHO6YR5A.js +4068 -4068
  4. package/dist/autosubscribe-ISDETQIB.js +435 -435
  5. package/dist/chunk-3WGYBAYR.js +8387 -8387
  6. package/dist/chunk-52VJYCZ7.js +455 -455
  7. package/dist/chunk-5KBZQHDL.js +189 -189
  8. package/dist/chunk-5KIJNY6Z.js +370 -370
  9. package/dist/chunk-7SN3CKDK.js +1076 -1076
  10. package/dist/chunk-B3VWOHUA.js +271 -271
  11. package/dist/chunk-C57DHKTL.js +459 -459
  12. package/dist/chunk-EI5CE3EY.js +616 -616
  13. package/dist/chunk-FTWUJBAH.js +386 -386
  14. package/dist/chunk-H3HSKH2P.js +4841 -4841
  15. package/dist/chunk-JO3ORBZD.js +616 -616
  16. package/dist/chunk-L6DXSM2U.js +456 -456
  17. package/dist/chunk-LMEYV4JD.js +368 -368
  18. package/dist/chunk-MEFLJ4PV.js +8385 -8385
  19. package/dist/chunk-OBLI4FE4.js +275 -275
  20. package/dist/chunk-PPGYJJED.js +271 -271
  21. package/dist/chunk-QGM4M3NI.js +37 -37
  22. package/dist/chunk-T7KMSTWP.js +399 -399
  23. package/dist/chunk-TWEIYHI6.js +399 -399
  24. package/dist/chunk-UYWE7HSU.js +368 -368
  25. package/dist/chunk-X2DL2GWT.js +32 -32
  26. package/dist/chunk-X7HNNNJJ.js +1079 -1079
  27. package/dist/consolidation-2GCKI4RE.js +220 -220
  28. package/dist/consolidation-4JOPW6BG.js +220 -220
  29. package/dist/consolidation-FOVQTWNQ.js +222 -222
  30. package/dist/consolidation-IFQ52E44.js +209 -209
  31. package/dist/context-sharing-4ITCNKG4.js +307 -307
  32. package/dist/context-sharing-6CCFIAKL.js +275 -275
  33. package/dist/context-sharing-GYKLXHZA.js +307 -307
  34. package/dist/context-sharing-PH64JTXS.js +308 -308
  35. package/dist/context-sharing-Y6LTZZOF.js +307 -307
  36. package/dist/cost-optimization-6OIKRSBV.js +195 -195
  37. package/dist/cost-optimization-7DVSTL6R.js +307 -307
  38. package/dist/cost-optimization-BH5NAX33.js +286 -286
  39. package/dist/cost-optimization-F3L5BS5F.js +303 -303
  40. package/dist/ingest-2LPTWUUM.js +16 -16
  41. package/dist/ingest-7T5FAZNC.js +15 -15
  42. package/dist/ingest-EBNIE7XB.js +15 -15
  43. package/dist/ingest-FSHT5BCS.js +15 -15
  44. package/dist/ingest-QE2BTV72.js +14 -14
  45. package/dist/oracle-3RLQF3DP.js +259 -259
  46. package/dist/oracle-FKRTQUUG.js +282 -282
  47. package/dist/oracle-J47QCSEW.js +263 -263
  48. package/dist/oracle-MDP5MZRC.js +256 -256
  49. package/dist/search-BLVHWLWC.js +14 -14
  50. package/dist/search-CZ5NYL5B.js +12 -12
  51. package/dist/search-EG6TYWWW.js +13 -13
  52. package/dist/search-I22QQA7T.js +13 -13
  53. package/dist/search-T7H5G6DW.js +13 -13
  54. package/dist/server.d.ts +2 -2
  55. package/dist/server.js +1973 -169
  56. package/dist/server.js.map +1 -1
  57. package/package.json +51 -51
@@ -1,459 +1,459 @@
1
- import {
2
- detectRelations,
3
- extractEventDate,
4
- shouldInvalidateMemory
5
- } from "./chunk-5KIJNY6Z.js";
6
- import {
7
- db,
8
- embedSingle
9
- } from "./chunk-3WGYBAYR.js";
10
- import {
11
- Anthropic
12
- } from "./chunk-H3HSKH2P.js";
13
-
14
- // ../src/engine/memory/extractor.ts
15
- var anthropic = new Anthropic({
16
- apiKey: process.env.ANTHROPIC_API_KEY || ""
17
- });
18
- var EXTRACTION_PROMPT = `You are an expert memory extraction system. Your job is to extract atomic, unambiguous memories from conversation chunks.
19
-
20
- **Critical Rules:**
21
- 1. Each memory must be a SINGLE fact/preference/event/relationship
22
- 2. Resolve ALL pronouns (he/she/it/they/them) to actual names using context
23
- 3. Resolve ALL ambiguous references ("the company", "that project") to specific entities
24
- 4. Extract temporal information when events occurred (not when mentioned)
25
- 5. Be conservative - only extract high-confidence memories
26
-
27
- **Memory Types:**
28
- - factual: Objective facts ("John works at Google")
29
- - preference: User preferences ("Sarah prefers dark mode")
30
- - event: Events with timestamps ("Team met on Jan 15, 2024")
31
- - relationship: Relationships ("Alex reports to Maria")
32
- - opinion: Subjective views ("User thinks Python is easier than Rust")
33
- - goal: Future intentions ("User wants to learn machine learning")
34
- - instruction: Persistent instructions ("Always use formal tone with clients")
35
-
36
- **Disambiguation:**
37
- - Replace "he" \u2192 actual name using context
38
- - Replace "she" \u2192 actual name using context
39
- - Replace "it" \u2192 specific thing using context
40
- - Replace "the company" \u2192 company name
41
- - Replace "that project" \u2192 project name
42
-
43
- **Example:**
44
- Input: "He said he prefers using React. The project will launch next week."
45
- Context: Previous message: "Alex joined the team yesterday. He's working on the dashboard project."
46
-
47
- Bad Output:
48
- - "He prefers React" \u274C (ambiguous)
49
- - "The project launches next week" \u274C (what project?)
50
-
51
- Good Output:
52
- - content: "Alex prefers using React for development"
53
- type: preference
54
- entities: ["Alex", "React"]
55
- eventDate: null
56
-
57
- - content: "Dashboard project launch scheduled for [specific date if mentioned]"
58
- type: event
59
- entities: ["Dashboard project"]
60
- eventDate: [calculated date] or null if not specific`;
61
- async function extractMemories(chunk, context) {
62
- const contextStr = buildContextString(context);
63
- const prompt = `${EXTRACTION_PROMPT}
64
-
65
- ${contextStr}
66
-
67
- **Current chunk to analyze:**
68
- ${chunk}
69
-
70
- **Document Date (when this was said):** ${context.documentDate.toISOString()}
71
-
72
- Extract memories and return a JSON array. For each memory:
73
- {
74
- "content": "clear, unambiguous statement with no pronouns",
75
- "memoryType": "factual|preference|event|relationship|opinion|goal|instruction",
76
- "entityMentions": ["list", "of", "entities", "mentioned"],
77
- "eventDate": "ISO date string or null",
78
- "confidence": 0.0-1.0,
79
- "reasoning": "brief explanation of extraction"
80
- }
81
-
82
- Return ONLY the JSON array, no other text.`;
83
- try {
84
- const response = await anthropic.messages.create({
85
- model: "claude-sonnet-4-5-20250929",
86
- // Fixed: was "claude-sonnet-4.5" (wrong format)
87
- max_tokens: 4096,
88
- temperature: 0,
89
- // Deterministic for extraction
90
- messages: [
91
- {
92
- role: "user",
93
- content: prompt
94
- }
95
- ]
96
- });
97
- const textContent = response.content.find((c) => c.type === "text");
98
- if (!textContent || textContent.type !== "text") {
99
- throw new Error("No text response from Claude");
100
- }
101
- const text = textContent.text.trim();
102
- const jsonMatch = text.match(/```json\n?([\s\S]*?)\n?```/) || text.match(/\[[\s\S]*\]/);
103
- const jsonStr = jsonMatch ? jsonMatch[1] || jsonMatch[0] : text;
104
- const rawMemories = JSON.parse(jsonStr);
105
- if (!Array.isArray(rawMemories)) {
106
- console.error("Expected array of memories, got:", rawMemories);
107
- return [];
108
- }
109
- return rawMemories.map((m) => ({
110
- content: m.content,
111
- memoryType: m.memoryType,
112
- entityMentions: m.entityMentions || [],
113
- eventDate: m.eventDate ? new Date(m.eventDate) : null,
114
- confidence: m.confidence || 0.7,
115
- reasoning: m.reasoning
116
- }));
117
- } catch (error) {
118
- console.error("Memory extraction failed:", error);
119
- return [];
120
- }
121
- }
122
- function buildContextString(context) {
123
- const parts = [];
124
- if (context.previousMessages && context.previousMessages.length > 0) {
125
- parts.push("**Context from previous messages:**");
126
- parts.push(context.previousMessages.slice(-5).join("\n"));
127
- }
128
- if (context.entityContext && context.entityContext.size > 0) {
129
- parts.push("\n**Known entities:**");
130
- context.entityContext.forEach((name, pronoun) => {
131
- parts.push(`- "${pronoun}" refers to ${name}`);
132
- });
133
- }
134
- if (parts.length === 0) {
135
- return "**Context:** None available";
136
- }
137
- return parts.join("\n");
138
- }
139
- function buildEntityContext(recentMemories) {
140
- const entityMap = /* @__PURE__ */ new Map();
141
- for (const memory of recentMemories) {
142
- for (const entity of memory.entityMentions) {
143
- if (/^[A-Z][a-z]+(?:\s[A-Z][a-z]+)*$/.test(entity)) {
144
- entityMap.set("he", entity);
145
- entityMap.set("she", entity);
146
- entityMap.set("they", entity);
147
- }
148
- }
149
- }
150
- return entityMap;
151
- }
152
- function validateMemory(memory) {
153
- if (memory.confidence < 0.6) {
154
- return false;
155
- }
156
- if (memory.content.length < 10) {
157
- return false;
158
- }
159
- const pronouns = /\b(he|she|it|they|them|his|her|their)\b/i;
160
- if (pronouns.test(memory.content)) {
161
- console.warn("Memory contains unresolved pronouns:", memory.content);
162
- return false;
163
- }
164
- const vagueRefs = /\b(the company|that project|this thing|the system)\b/i;
165
- if (vagueRefs.test(memory.content)) {
166
- console.warn("Memory contains vague references:", memory.content);
167
- return false;
168
- }
169
- return true;
170
- }
171
-
172
- // ../src/engine/memory/ingest.ts
173
- async function ingestSession(params) {
174
- const { sessionId, projectId, orgId, userId, messages } = params;
175
- const result = {
176
- memoriesCreated: 0,
177
- relationsCreated: 0,
178
- memoriesInvalidated: 0,
179
- errors: []
180
- };
181
- if (messages.length === 0) {
182
- return result;
183
- }
184
- try {
185
- const context = {
186
- sessionId,
187
- userId: userId || "unknown",
188
- projectId,
189
- orgId,
190
- documentDate: messages[messages.length - 1].timestamp,
191
- previousMessages: messages.slice(0, -1).map((m) => `${m.role}: ${m.content}`)
192
- };
193
- const recentMemories = await db.memory.findMany({
194
- where: {
195
- sessionId,
196
- projectId,
197
- isActive: true
198
- },
199
- orderBy: {
200
- createdAt: "desc"
201
- },
202
- take: 20,
203
- select: {
204
- content: true,
205
- entityMentions: true
206
- }
207
- });
208
- context.entityContext = buildEntityContext(recentMemories);
209
- const latestMessage = messages[messages.length - 1].content;
210
- const extractedMemories = await extractMemories(latestMessage, context);
211
- const validMemories = extractedMemories.filter(validateMemory);
212
- if (validMemories.length === 0) {
213
- return result;
214
- }
215
- const existingMemories = await db.memory.findMany({
216
- where: {
217
- projectId,
218
- userId,
219
- isActive: true
220
- },
221
- orderBy: {
222
- createdAt: "desc"
223
- },
224
- take: 100,
225
- // Check against last 100 memories
226
- select: {
227
- id: true,
228
- content: true,
229
- memoryType: true,
230
- entityMentions: true,
231
- documentDate: true
232
- }
233
- });
234
- for (const extracted of validMemories) {
235
- try {
236
- const relations = await detectRelations(
237
- {
238
- content: extracted.content,
239
- memoryType: extracted.memoryType,
240
- entityMentions: extracted.entityMentions
241
- },
242
- existingMemories
243
- );
244
- const eventDate = extracted.eventDate || await extractEventDate(
245
- extracted.content,
246
- context.documentDate
247
- );
248
- const embedding = await embedSingle(extracted.content);
249
- const memory = await db.memory.create({
250
- data: {
251
- projectId,
252
- orgId,
253
- userId,
254
- sessionId,
255
- memoryType: extracted.memoryType,
256
- content: extracted.content,
257
- embedding,
258
- entityMentions: extracted.entityMentions,
259
- confidence: extracted.confidence,
260
- documentDate: context.documentDate,
261
- eventDate,
262
- validFrom: /* @__PURE__ */ new Date(),
263
- metadata: {
264
- reasoning: extracted.reasoning,
265
- extractedFrom: "session_ingestion"
266
- }
267
- }
268
- });
269
- result.memoriesCreated++;
270
- for (const relation of relations) {
271
- try {
272
- await db.memoryRelation.create({
273
- data: {
274
- fromMemoryId: memory.id,
275
- toMemoryId: relation.toMemoryId,
276
- relationType: relation.relationType,
277
- confidence: relation.confidence,
278
- reasoning: relation.reasoning
279
- }
280
- });
281
- result.relationsCreated++;
282
- if (shouldInvalidateMemory(relation.relationType)) {
283
- await db.memory.update({
284
- where: { id: relation.toMemoryId },
285
- data: {
286
- validUntil: /* @__PURE__ */ new Date(),
287
- supersededBy: memory.id
288
- }
289
- });
290
- const oldMemory = await db.memory.findUnique({
291
- where: { id: relation.toMemoryId },
292
- select: { version: true }
293
- });
294
- if (oldMemory) {
295
- await db.memory.update({
296
- where: { id: memory.id },
297
- data: { version: oldMemory.version + 1 }
298
- });
299
- }
300
- result.memoriesInvalidated++;
301
- }
302
- } catch (error) {
303
- result.errors.push(`Failed to create relation: ${error}`);
304
- }
305
- }
306
- } catch (error) {
307
- result.errors.push(`Failed to process memory: ${error}`);
308
- }
309
- }
310
- return result;
311
- } catch (error) {
312
- result.errors.push(`Ingestion failed: ${error}`);
313
- return result;
314
- }
315
- }
316
- async function ingestChunk(params) {
317
- const { chunkId, chunkContent, projectId, orgId, documentDate, metadata } = params;
318
- const result = {
319
- memoriesCreated: 0,
320
- relationsCreated: 0,
321
- memoriesInvalidated: 0,
322
- errors: []
323
- };
324
- try {
325
- const context = {
326
- sessionId: `chunk_${chunkId}`,
327
- userId: "system",
328
- projectId,
329
- orgId,
330
- documentDate
331
- };
332
- const extractedMemories = await extractMemories(chunkContent, context);
333
- const validMemories = extractedMemories.filter(validateMemory);
334
- for (const extracted of validMemories) {
335
- const eventDate = extracted.eventDate || await extractEventDate(
336
- extracted.content,
337
- documentDate
338
- );
339
- const embedding = await embedSingle(extracted.content);
340
- await db.memory.create({
341
- data: {
342
- projectId,
343
- orgId,
344
- memoryType: extracted.memoryType,
345
- content: extracted.content,
346
- embedding,
347
- entityMentions: extracted.entityMentions,
348
- confidence: extracted.confidence,
349
- documentDate,
350
- eventDate,
351
- validFrom: /* @__PURE__ */ new Date(),
352
- sourceChunkId: chunkId,
353
- scope: "DOCUMENT",
354
- // Document-level scope
355
- metadata: {
356
- ...metadata,
357
- reasoning: extracted.reasoning
358
- }
359
- }
360
- });
361
- result.memoriesCreated++;
362
- }
363
- return result;
364
- } catch (error) {
365
- result.errors.push(`Chunk ingestion failed: ${error}`);
366
- return result;
367
- }
368
- }
369
- async function ingestChunksBatch(params) {
370
- const { chunks, projectId, orgId, documentDate } = params;
371
- const aggregateResult = {
372
- memoriesCreated: 0,
373
- relationsCreated: 0,
374
- memoriesInvalidated: 0,
375
- errors: []
376
- };
377
- const batchSize = 10;
378
- for (let i = 0; i < chunks.length; i += batchSize) {
379
- const batch = chunks.slice(i, i + batchSize);
380
- const results = await Promise.all(
381
- batch.map(
382
- (chunk) => ingestChunk({
383
- chunkId: chunk.id,
384
- chunkContent: chunk.content,
385
- projectId,
386
- orgId,
387
- documentDate,
388
- metadata: chunk.metadata
389
- })
390
- )
391
- );
392
- for (const result of results) {
393
- aggregateResult.memoriesCreated += result.memoriesCreated;
394
- aggregateResult.relationsCreated += result.relationsCreated;
395
- aggregateResult.memoriesInvalidated += result.memoriesInvalidated;
396
- aggregateResult.errors.push(...result.errors);
397
- }
398
- }
399
- return aggregateResult;
400
- }
401
- async function updateMemory(params) {
402
- const { memoryId, newContent, reasoning } = params;
403
- const oldMemory = await db.memory.findUnique({
404
- where: { id: memoryId }
405
- });
406
- if (!oldMemory) {
407
- throw new Error("Memory not found");
408
- }
409
- const embedding = await embedSingle(newContent);
410
- const newMemory = await db.memory.create({
411
- data: {
412
- projectId: oldMemory.projectId,
413
- orgId: oldMemory.orgId,
414
- userId: oldMemory.userId,
415
- sessionId: oldMemory.sessionId,
416
- memoryType: oldMemory.memoryType,
417
- content: newContent,
418
- embedding,
419
- entityMentions: oldMemory.entityMentions,
420
- confidence: oldMemory.confidence,
421
- documentDate: oldMemory.documentDate,
422
- eventDate: oldMemory.eventDate,
423
- validFrom: /* @__PURE__ */ new Date(),
424
- version: oldMemory.version + 1,
425
- scope: oldMemory.scope,
426
- metadata: {
427
- ...oldMemory.metadata,
428
- updateReasoning: reasoning
429
- }
430
- }
431
- });
432
- await db.memory.update({
433
- where: { id: memoryId },
434
- data: {
435
- validUntil: /* @__PURE__ */ new Date(),
436
- supersededBy: newMemory.id
437
- }
438
- });
439
- await db.memoryRelation.create({
440
- data: {
441
- fromMemoryId: newMemory.id,
442
- toMemoryId: memoryId,
443
- relationType: "updates",
444
- confidence: 1,
445
- reasoning: reasoning || "Manual update"
446
- }
447
- });
448
- return {
449
- newMemoryId: newMemory.id,
450
- oldMemoryId: memoryId
451
- };
452
- }
453
-
454
- export {
455
- ingestSession,
456
- ingestChunk,
457
- ingestChunksBatch,
458
- updateMemory
459
- };
1
+ import {
2
+ detectRelations,
3
+ extractEventDate,
4
+ shouldInvalidateMemory
5
+ } from "./chunk-5KIJNY6Z.js";
6
+ import {
7
+ db,
8
+ embedSingle
9
+ } from "./chunk-3WGYBAYR.js";
10
+ import {
11
+ Anthropic
12
+ } from "./chunk-H3HSKH2P.js";
13
+
14
+ // ../src/engine/memory/extractor.ts
15
+ var anthropic = new Anthropic({
16
+ apiKey: process.env.ANTHROPIC_API_KEY || ""
17
+ });
18
+ var EXTRACTION_PROMPT = `You are an expert memory extraction system. Your job is to extract atomic, unambiguous memories from conversation chunks.
19
+
20
+ **Critical Rules:**
21
+ 1. Each memory must be a SINGLE fact/preference/event/relationship
22
+ 2. Resolve ALL pronouns (he/she/it/they/them) to actual names using context
23
+ 3. Resolve ALL ambiguous references ("the company", "that project") to specific entities
24
+ 4. Extract temporal information when events occurred (not when mentioned)
25
+ 5. Be conservative - only extract high-confidence memories
26
+
27
+ **Memory Types:**
28
+ - factual: Objective facts ("John works at Google")
29
+ - preference: User preferences ("Sarah prefers dark mode")
30
+ - event: Events with timestamps ("Team met on Jan 15, 2024")
31
+ - relationship: Relationships ("Alex reports to Maria")
32
+ - opinion: Subjective views ("User thinks Python is easier than Rust")
33
+ - goal: Future intentions ("User wants to learn machine learning")
34
+ - instruction: Persistent instructions ("Always use formal tone with clients")
35
+
36
+ **Disambiguation:**
37
+ - Replace "he" \u2192 actual name using context
38
+ - Replace "she" \u2192 actual name using context
39
+ - Replace "it" \u2192 specific thing using context
40
+ - Replace "the company" \u2192 company name
41
+ - Replace "that project" \u2192 project name
42
+
43
+ **Example:**
44
+ Input: "He said he prefers using React. The project will launch next week."
45
+ Context: Previous message: "Alex joined the team yesterday. He's working on the dashboard project."
46
+
47
+ Bad Output:
48
+ - "He prefers React" \u274C (ambiguous)
49
+ - "The project launches next week" \u274C (what project?)
50
+
51
+ Good Output:
52
+ - content: "Alex prefers using React for development"
53
+ type: preference
54
+ entities: ["Alex", "React"]
55
+ eventDate: null
56
+
57
+ - content: "Dashboard project launch scheduled for [specific date if mentioned]"
58
+ type: event
59
+ entities: ["Dashboard project"]
60
+ eventDate: [calculated date] or null if not specific`;
61
+ async function extractMemories(chunk, context) {
62
+ const contextStr = buildContextString(context);
63
+ const prompt = `${EXTRACTION_PROMPT}
64
+
65
+ ${contextStr}
66
+
67
+ **Current chunk to analyze:**
68
+ ${chunk}
69
+
70
+ **Document Date (when this was said):** ${context.documentDate.toISOString()}
71
+
72
+ Extract memories and return a JSON array. For each memory:
73
+ {
74
+ "content": "clear, unambiguous statement with no pronouns",
75
+ "memoryType": "factual|preference|event|relationship|opinion|goal|instruction",
76
+ "entityMentions": ["list", "of", "entities", "mentioned"],
77
+ "eventDate": "ISO date string or null",
78
+ "confidence": 0.0-1.0,
79
+ "reasoning": "brief explanation of extraction"
80
+ }
81
+
82
+ Return ONLY the JSON array, no other text.`;
83
+ try {
84
+ const response = await anthropic.messages.create({
85
+ model: "claude-sonnet-4-5-20250929",
86
+ // Fixed: was "claude-sonnet-4.5" (wrong format)
87
+ max_tokens: 4096,
88
+ temperature: 0,
89
+ // Deterministic for extraction
90
+ messages: [
91
+ {
92
+ role: "user",
93
+ content: prompt
94
+ }
95
+ ]
96
+ });
97
+ const textContent = response.content.find((c) => c.type === "text");
98
+ if (!textContent || textContent.type !== "text") {
99
+ throw new Error("No text response from Claude");
100
+ }
101
+ const text = textContent.text.trim();
102
+ const jsonMatch = text.match(/```json\n?([\s\S]*?)\n?```/) || text.match(/\[[\s\S]*\]/);
103
+ const jsonStr = jsonMatch ? jsonMatch[1] || jsonMatch[0] : text;
104
+ const rawMemories = JSON.parse(jsonStr);
105
+ if (!Array.isArray(rawMemories)) {
106
+ console.error("Expected array of memories, got:", rawMemories);
107
+ return [];
108
+ }
109
+ return rawMemories.map((m) => ({
110
+ content: m.content,
111
+ memoryType: m.memoryType,
112
+ entityMentions: m.entityMentions || [],
113
+ eventDate: m.eventDate ? new Date(m.eventDate) : null,
114
+ confidence: m.confidence || 0.7,
115
+ reasoning: m.reasoning
116
+ }));
117
+ } catch (error) {
118
+ console.error("Memory extraction failed:", error);
119
+ return [];
120
+ }
121
+ }
122
+ function buildContextString(context) {
123
+ const parts = [];
124
+ if (context.previousMessages && context.previousMessages.length > 0) {
125
+ parts.push("**Context from previous messages:**");
126
+ parts.push(context.previousMessages.slice(-5).join("\n"));
127
+ }
128
+ if (context.entityContext && context.entityContext.size > 0) {
129
+ parts.push("\n**Known entities:**");
130
+ context.entityContext.forEach((name, pronoun) => {
131
+ parts.push(`- "${pronoun}" refers to ${name}`);
132
+ });
133
+ }
134
+ if (parts.length === 0) {
135
+ return "**Context:** None available";
136
+ }
137
+ return parts.join("\n");
138
+ }
139
+ function buildEntityContext(recentMemories) {
140
+ const entityMap = /* @__PURE__ */ new Map();
141
+ for (const memory of recentMemories) {
142
+ for (const entity of memory.entityMentions) {
143
+ if (/^[A-Z][a-z]+(?:\s[A-Z][a-z]+)*$/.test(entity)) {
144
+ entityMap.set("he", entity);
145
+ entityMap.set("she", entity);
146
+ entityMap.set("they", entity);
147
+ }
148
+ }
149
+ }
150
+ return entityMap;
151
+ }
152
+ function validateMemory(memory) {
153
+ if (memory.confidence < 0.6) {
154
+ return false;
155
+ }
156
+ if (memory.content.length < 10) {
157
+ return false;
158
+ }
159
+ const pronouns = /\b(he|she|it|they|them|his|her|their)\b/i;
160
+ if (pronouns.test(memory.content)) {
161
+ console.warn("Memory contains unresolved pronouns:", memory.content);
162
+ return false;
163
+ }
164
+ const vagueRefs = /\b(the company|that project|this thing|the system)\b/i;
165
+ if (vagueRefs.test(memory.content)) {
166
+ console.warn("Memory contains vague references:", memory.content);
167
+ return false;
168
+ }
169
+ return true;
170
+ }
171
+
172
+ // ../src/engine/memory/ingest.ts
173
+ async function ingestSession(params) {
174
+ const { sessionId, projectId, orgId, userId, messages } = params;
175
+ const result = {
176
+ memoriesCreated: 0,
177
+ relationsCreated: 0,
178
+ memoriesInvalidated: 0,
179
+ errors: []
180
+ };
181
+ if (messages.length === 0) {
182
+ return result;
183
+ }
184
+ try {
185
+ const context = {
186
+ sessionId,
187
+ userId: userId || "unknown",
188
+ projectId,
189
+ orgId,
190
+ documentDate: messages[messages.length - 1].timestamp,
191
+ previousMessages: messages.slice(0, -1).map((m) => `${m.role}: ${m.content}`)
192
+ };
193
+ const recentMemories = await db.memory.findMany({
194
+ where: {
195
+ sessionId,
196
+ projectId,
197
+ isActive: true
198
+ },
199
+ orderBy: {
200
+ createdAt: "desc"
201
+ },
202
+ take: 20,
203
+ select: {
204
+ content: true,
205
+ entityMentions: true
206
+ }
207
+ });
208
+ context.entityContext = buildEntityContext(recentMemories);
209
+ const latestMessage = messages[messages.length - 1].content;
210
+ const extractedMemories = await extractMemories(latestMessage, context);
211
+ const validMemories = extractedMemories.filter(validateMemory);
212
+ if (validMemories.length === 0) {
213
+ return result;
214
+ }
215
+ const existingMemories = await db.memory.findMany({
216
+ where: {
217
+ projectId,
218
+ userId,
219
+ isActive: true
220
+ },
221
+ orderBy: {
222
+ createdAt: "desc"
223
+ },
224
+ take: 100,
225
+ // Check against last 100 memories
226
+ select: {
227
+ id: true,
228
+ content: true,
229
+ memoryType: true,
230
+ entityMentions: true,
231
+ documentDate: true
232
+ }
233
+ });
234
+ for (const extracted of validMemories) {
235
+ try {
236
+ const relations = await detectRelations(
237
+ {
238
+ content: extracted.content,
239
+ memoryType: extracted.memoryType,
240
+ entityMentions: extracted.entityMentions
241
+ },
242
+ existingMemories
243
+ );
244
+ const eventDate = extracted.eventDate || await extractEventDate(
245
+ extracted.content,
246
+ context.documentDate
247
+ );
248
+ const embedding = await embedSingle(extracted.content);
249
+ const memory = await db.memory.create({
250
+ data: {
251
+ projectId,
252
+ orgId,
253
+ userId,
254
+ sessionId,
255
+ memoryType: extracted.memoryType,
256
+ content: extracted.content,
257
+ embedding,
258
+ entityMentions: extracted.entityMentions,
259
+ confidence: extracted.confidence,
260
+ documentDate: context.documentDate,
261
+ eventDate,
262
+ validFrom: /* @__PURE__ */ new Date(),
263
+ metadata: {
264
+ reasoning: extracted.reasoning,
265
+ extractedFrom: "session_ingestion"
266
+ }
267
+ }
268
+ });
269
+ result.memoriesCreated++;
270
+ for (const relation of relations) {
271
+ try {
272
+ await db.memoryRelation.create({
273
+ data: {
274
+ fromMemoryId: memory.id,
275
+ toMemoryId: relation.toMemoryId,
276
+ relationType: relation.relationType,
277
+ confidence: relation.confidence,
278
+ reasoning: relation.reasoning
279
+ }
280
+ });
281
+ result.relationsCreated++;
282
+ if (shouldInvalidateMemory(relation.relationType)) {
283
+ await db.memory.update({
284
+ where: { id: relation.toMemoryId },
285
+ data: {
286
+ validUntil: /* @__PURE__ */ new Date(),
287
+ supersededBy: memory.id
288
+ }
289
+ });
290
+ const oldMemory = await db.memory.findUnique({
291
+ where: { id: relation.toMemoryId },
292
+ select: { version: true }
293
+ });
294
+ if (oldMemory) {
295
+ await db.memory.update({
296
+ where: { id: memory.id },
297
+ data: { version: oldMemory.version + 1 }
298
+ });
299
+ }
300
+ result.memoriesInvalidated++;
301
+ }
302
+ } catch (error) {
303
+ result.errors.push(`Failed to create relation: ${error}`);
304
+ }
305
+ }
306
+ } catch (error) {
307
+ result.errors.push(`Failed to process memory: ${error}`);
308
+ }
309
+ }
310
+ return result;
311
+ } catch (error) {
312
+ result.errors.push(`Ingestion failed: ${error}`);
313
+ return result;
314
+ }
315
+ }
316
+ async function ingestChunk(params) {
317
+ const { chunkId, chunkContent, projectId, orgId, documentDate, metadata } = params;
318
+ const result = {
319
+ memoriesCreated: 0,
320
+ relationsCreated: 0,
321
+ memoriesInvalidated: 0,
322
+ errors: []
323
+ };
324
+ try {
325
+ const context = {
326
+ sessionId: `chunk_${chunkId}`,
327
+ userId: "system",
328
+ projectId,
329
+ orgId,
330
+ documentDate
331
+ };
332
+ const extractedMemories = await extractMemories(chunkContent, context);
333
+ const validMemories = extractedMemories.filter(validateMemory);
334
+ for (const extracted of validMemories) {
335
+ const eventDate = extracted.eventDate || await extractEventDate(
336
+ extracted.content,
337
+ documentDate
338
+ );
339
+ const embedding = await embedSingle(extracted.content);
340
+ await db.memory.create({
341
+ data: {
342
+ projectId,
343
+ orgId,
344
+ memoryType: extracted.memoryType,
345
+ content: extracted.content,
346
+ embedding,
347
+ entityMentions: extracted.entityMentions,
348
+ confidence: extracted.confidence,
349
+ documentDate,
350
+ eventDate,
351
+ validFrom: /* @__PURE__ */ new Date(),
352
+ sourceChunkId: chunkId,
353
+ scope: "DOCUMENT",
354
+ // Document-level scope
355
+ metadata: {
356
+ ...metadata,
357
+ reasoning: extracted.reasoning
358
+ }
359
+ }
360
+ });
361
+ result.memoriesCreated++;
362
+ }
363
+ return result;
364
+ } catch (error) {
365
+ result.errors.push(`Chunk ingestion failed: ${error}`);
366
+ return result;
367
+ }
368
+ }
369
+ async function ingestChunksBatch(params) {
370
+ const { chunks, projectId, orgId, documentDate } = params;
371
+ const aggregateResult = {
372
+ memoriesCreated: 0,
373
+ relationsCreated: 0,
374
+ memoriesInvalidated: 0,
375
+ errors: []
376
+ };
377
+ const batchSize = 10;
378
+ for (let i = 0; i < chunks.length; i += batchSize) {
379
+ const batch = chunks.slice(i, i + batchSize);
380
+ const results = await Promise.all(
381
+ batch.map(
382
+ (chunk) => ingestChunk({
383
+ chunkId: chunk.id,
384
+ chunkContent: chunk.content,
385
+ projectId,
386
+ orgId,
387
+ documentDate,
388
+ metadata: chunk.metadata
389
+ })
390
+ )
391
+ );
392
+ for (const result of results) {
393
+ aggregateResult.memoriesCreated += result.memoriesCreated;
394
+ aggregateResult.relationsCreated += result.relationsCreated;
395
+ aggregateResult.memoriesInvalidated += result.memoriesInvalidated;
396
+ aggregateResult.errors.push(...result.errors);
397
+ }
398
+ }
399
+ return aggregateResult;
400
+ }
401
+ async function updateMemory(params) {
402
+ const { memoryId, newContent, reasoning } = params;
403
+ const oldMemory = await db.memory.findUnique({
404
+ where: { id: memoryId }
405
+ });
406
+ if (!oldMemory) {
407
+ throw new Error("Memory not found");
408
+ }
409
+ const embedding = await embedSingle(newContent);
410
+ const newMemory = await db.memory.create({
411
+ data: {
412
+ projectId: oldMemory.projectId,
413
+ orgId: oldMemory.orgId,
414
+ userId: oldMemory.userId,
415
+ sessionId: oldMemory.sessionId,
416
+ memoryType: oldMemory.memoryType,
417
+ content: newContent,
418
+ embedding,
419
+ entityMentions: oldMemory.entityMentions,
420
+ confidence: oldMemory.confidence,
421
+ documentDate: oldMemory.documentDate,
422
+ eventDate: oldMemory.eventDate,
423
+ validFrom: /* @__PURE__ */ new Date(),
424
+ version: oldMemory.version + 1,
425
+ scope: oldMemory.scope,
426
+ metadata: {
427
+ ...oldMemory.metadata,
428
+ updateReasoning: reasoning
429
+ }
430
+ }
431
+ });
432
+ await db.memory.update({
433
+ where: { id: memoryId },
434
+ data: {
435
+ validUntil: /* @__PURE__ */ new Date(),
436
+ supersededBy: newMemory.id
437
+ }
438
+ });
439
+ await db.memoryRelation.create({
440
+ data: {
441
+ fromMemoryId: newMemory.id,
442
+ toMemoryId: memoryId,
443
+ relationType: "updates",
444
+ confidence: 1,
445
+ reasoning: reasoning || "Manual update"
446
+ }
447
+ });
448
+ return {
449
+ newMemoryId: newMemory.id,
450
+ oldMemoryId: memoryId
451
+ };
452
+ }
453
+
454
+ export {
455
+ ingestSession,
456
+ ingestChunk,
457
+ ingestChunksBatch,
458
+ updateMemory
459
+ };