@usewhisper/mcp-server 0.1.0 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +26 -24
- package/dist/autosubscribe-6EDKPBE2.js +4068 -0
- package/dist/autosubscribe-GHO6YR5A.js +4068 -0
- package/dist/autosubscribe-ISDETQIB.js +436 -0
- package/dist/autosubscribe-ISDETQIB.js.map +1 -0
- package/dist/chunk-3WGYBAYR.js +8387 -0
- package/dist/chunk-52VJYCZ7.js +455 -0
- package/dist/chunk-5KBZQHDL.js +189 -0
- package/dist/chunk-5KIJNY6Z.js +370 -0
- package/dist/chunk-7SN3CKDK.js +1076 -0
- package/dist/chunk-B3VWOHUA.js +271 -0
- package/dist/chunk-C57DHKTL.js +459 -0
- package/dist/chunk-EI5CE3EY.js +616 -0
- package/dist/chunk-FTWUJBAH.js +387 -0
- package/dist/chunk-FTWUJBAH.js.map +1 -0
- package/dist/chunk-H3HSKH2P.js +4841 -0
- package/dist/chunk-JO3ORBZD.js +616 -0
- package/dist/chunk-L6DXSM2U.js +457 -0
- package/dist/chunk-L6DXSM2U.js.map +1 -0
- package/dist/chunk-LMEYV4JD.js +368 -0
- package/dist/chunk-MEFLJ4PV.js +8385 -0
- package/dist/chunk-OBLI4FE4.js +276 -0
- package/dist/chunk-OBLI4FE4.js.map +1 -0
- package/dist/chunk-PPGYJJED.js +271 -0
- package/dist/chunk-QGM4M3NI.js +37 -0
- package/dist/chunk-T7KMSTWP.js +399 -0
- package/dist/chunk-TWEIYHI6.js +399 -0
- package/dist/chunk-UYWE7HSU.js +369 -0
- package/dist/chunk-UYWE7HSU.js.map +1 -0
- package/dist/chunk-X2DL2GWT.js +33 -0
- package/dist/chunk-X2DL2GWT.js.map +1 -0
- package/dist/chunk-X7HNNNJJ.js +1079 -0
- package/dist/consolidation-2GCKI4RE.js +220 -0
- package/dist/consolidation-4JOPW6BG.js +220 -0
- package/dist/consolidation-FOVQTWNQ.js +222 -0
- package/dist/consolidation-IFQ52E44.js +210 -0
- package/dist/consolidation-IFQ52E44.js.map +1 -0
- package/dist/context-sharing-4ITCNKG4.js +307 -0
- package/dist/context-sharing-6CCFIAKL.js +276 -0
- package/dist/context-sharing-6CCFIAKL.js.map +1 -0
- package/dist/context-sharing-GYKLXHZA.js +307 -0
- package/dist/context-sharing-PH64JTXS.js +308 -0
- package/dist/context-sharing-Y6LTZZOF.js +307 -0
- package/dist/cost-optimization-6OIKRSBV.js +196 -0
- package/dist/cost-optimization-6OIKRSBV.js.map +1 -0
- package/dist/cost-optimization-7DVSTL6R.js +307 -0
- package/dist/cost-optimization-BH5NAX33.js +287 -0
- package/dist/cost-optimization-BH5NAX33.js.map +1 -0
- package/dist/cost-optimization-F3L5BS5F.js +303 -0
- package/dist/ingest-2LPTWUUM.js +16 -0
- package/dist/ingest-7T5FAZNC.js +15 -0
- package/dist/ingest-EBNIE7XB.js +15 -0
- package/dist/ingest-FSHT5BCS.js +15 -0
- package/dist/ingest-QE2BTV72.js +15 -0
- package/dist/ingest-QE2BTV72.js.map +1 -0
- package/dist/oracle-3RLQF3DP.js +259 -0
- package/dist/oracle-FKRTQUUG.js +282 -0
- package/dist/oracle-J47QCSEW.js +263 -0
- package/dist/oracle-MDP5MZRC.js +257 -0
- package/dist/oracle-MDP5MZRC.js.map +1 -0
- package/dist/search-BLVHWLWC.js +14 -0
- package/dist/search-CZ5NYL5B.js +13 -0
- package/dist/search-CZ5NYL5B.js.map +1 -0
- package/dist/search-EG6TYWWW.js +13 -0
- package/dist/search-I22QQA7T.js +13 -0
- package/dist/search-T7H5G6DW.js +13 -0
- package/dist/server.d.ts +2 -0
- package/dist/server.js +914 -1503
- package/dist/server.js.map +1 -1
- package/package.json +6 -7
|
@@ -0,0 +1,369 @@
|
|
|
1
|
+
// src/engine/memory/temporal.ts
|
|
2
|
+
import Anthropic from "@anthropic-ai/sdk";
|
|
3
|
+
var anthropic = new Anthropic({
|
|
4
|
+
apiKey: process.env.ANTHROPIC_API_KEY || ""
|
|
5
|
+
});
|
|
6
|
+
var TEMPORAL_PARSING_PROMPT = `You are an expert temporal query parser. Extract temporal constraints from user queries.
|
|
7
|
+
|
|
8
|
+
**Your job:**
|
|
9
|
+
1. Identify if the query has temporal constraints
|
|
10
|
+
2. Extract relative time references (today, yesterday, last week, etc.)
|
|
11
|
+
3. Extract absolute dates if mentioned
|
|
12
|
+
4. Calculate date ranges if applicable
|
|
13
|
+
|
|
14
|
+
**Relative Terms:**
|
|
15
|
+
- "today" \u2192 filter to documentDate = questionDate
|
|
16
|
+
- "yesterday" \u2192 documentDate = questionDate - 1 day
|
|
17
|
+
- "last week" \u2192 documentDate in range [questionDate - 7 days, questionDate]
|
|
18
|
+
- "last month" \u2192 documentDate in range [questionDate - 30 days, questionDate]
|
|
19
|
+
- "last year" \u2192 documentDate in range [questionDate - 365 days, questionDate]
|
|
20
|
+
- "this week" \u2192 current week
|
|
21
|
+
- "this month" \u2192 current month
|
|
22
|
+
|
|
23
|
+
**Examples:**
|
|
24
|
+
- "What did I say about vacation yesterday?" \u2192 relative: "yesterday"
|
|
25
|
+
- "Tell me about meetings last week" \u2192 relative: "last_week"
|
|
26
|
+
- "What happened on January 15?" \u2192 absoluteDate: "2024-01-15"
|
|
27
|
+
- "Show me everything from last month" \u2192 relative: "last_month"
|
|
28
|
+
- "What's my favorite color?" \u2192 no temporal constraint`;
|
|
29
|
+
async function parseTemporalQuery(query, questionDate) {
|
|
30
|
+
const prompt = `${TEMPORAL_PARSING_PROMPT}
|
|
31
|
+
|
|
32
|
+
**Query:** "${query}"
|
|
33
|
+
**Question asked on:** ${questionDate.toISOString()}
|
|
34
|
+
|
|
35
|
+
Extract temporal information and return JSON:
|
|
36
|
+
{
|
|
37
|
+
"hasTemporalConstraint": boolean,
|
|
38
|
+
"relative": "today|yesterday|last_week|last_month|last_year|this_week|this_month|null",
|
|
39
|
+
"absoluteDate": "ISO date string or null",
|
|
40
|
+
"dateRange": { "start": "ISO", "end": "ISO" } or null
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
Return ONLY the JSON, no other text.`;
|
|
44
|
+
try {
|
|
45
|
+
const response = await anthropic.messages.create({
|
|
46
|
+
model: "claude-haiku-4.5",
|
|
47
|
+
// Faster model for parsing
|
|
48
|
+
max_tokens: 512,
|
|
49
|
+
temperature: 0,
|
|
50
|
+
messages: [{ role: "user", content: prompt }]
|
|
51
|
+
});
|
|
52
|
+
const textContent = response.content.find((c) => c.type === "text");
|
|
53
|
+
if (!textContent || textContent.type !== "text") {
|
|
54
|
+
return { hasTemporalConstraint: false };
|
|
55
|
+
}
|
|
56
|
+
const text = textContent.text.trim();
|
|
57
|
+
const jsonMatch = text.match(/```json\n?([\s\S]*?)\n?```/) || text.match(/\{[\s\S]*\}/);
|
|
58
|
+
const jsonStr = jsonMatch ? jsonMatch[1] || jsonMatch[0] : text;
|
|
59
|
+
const parsed = JSON.parse(jsonStr);
|
|
60
|
+
if (parsed.relative) {
|
|
61
|
+
const range = calculateRelativeDateRange(parsed.relative, questionDate);
|
|
62
|
+
parsed.dateRange = range;
|
|
63
|
+
parsed.absoluteDate = range.start;
|
|
64
|
+
} else if (parsed.absoluteDate) {
|
|
65
|
+
parsed.absoluteDate = new Date(parsed.absoluteDate);
|
|
66
|
+
}
|
|
67
|
+
if (parsed.dateRange) {
|
|
68
|
+
parsed.dateRange = {
|
|
69
|
+
start: new Date(parsed.dateRange.start),
|
|
70
|
+
end: new Date(parsed.dateRange.end)
|
|
71
|
+
};
|
|
72
|
+
}
|
|
73
|
+
return parsed;
|
|
74
|
+
} catch (error) {
|
|
75
|
+
console.error("Temporal parsing failed:", error);
|
|
76
|
+
return { hasTemporalConstraint: false };
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
function calculateRelativeDateRange(relative, from) {
|
|
80
|
+
const start = new Date(from);
|
|
81
|
+
const end = new Date(from);
|
|
82
|
+
switch (relative) {
|
|
83
|
+
case "today":
|
|
84
|
+
start.setHours(0, 0, 0, 0);
|
|
85
|
+
end.setHours(23, 59, 59, 999);
|
|
86
|
+
break;
|
|
87
|
+
case "yesterday":
|
|
88
|
+
start.setDate(start.getDate() - 1);
|
|
89
|
+
start.setHours(0, 0, 0, 0);
|
|
90
|
+
end.setDate(end.getDate() - 1);
|
|
91
|
+
end.setHours(23, 59, 59, 999);
|
|
92
|
+
break;
|
|
93
|
+
case "last_week":
|
|
94
|
+
start.setDate(start.getDate() - 7);
|
|
95
|
+
start.setHours(0, 0, 0, 0);
|
|
96
|
+
end.setHours(23, 59, 59, 999);
|
|
97
|
+
break;
|
|
98
|
+
case "this_week":
|
|
99
|
+
const dayOfWeek = start.getDay();
|
|
100
|
+
const diff = dayOfWeek === 0 ? -6 : 1 - dayOfWeek;
|
|
101
|
+
start.setDate(start.getDate() + diff);
|
|
102
|
+
start.setHours(0, 0, 0, 0);
|
|
103
|
+
end.setHours(23, 59, 59, 999);
|
|
104
|
+
break;
|
|
105
|
+
case "last_month":
|
|
106
|
+
start.setDate(start.getDate() - 30);
|
|
107
|
+
start.setHours(0, 0, 0, 0);
|
|
108
|
+
end.setHours(23, 59, 59, 999);
|
|
109
|
+
break;
|
|
110
|
+
case "this_month":
|
|
111
|
+
start.setDate(1);
|
|
112
|
+
start.setHours(0, 0, 0, 0);
|
|
113
|
+
end.setHours(23, 59, 59, 999);
|
|
114
|
+
break;
|
|
115
|
+
case "last_year":
|
|
116
|
+
start.setFullYear(start.getFullYear() - 1);
|
|
117
|
+
start.setHours(0, 0, 0, 0);
|
|
118
|
+
end.setHours(23, 59, 59, 999);
|
|
119
|
+
break;
|
|
120
|
+
default:
|
|
121
|
+
start.setFullYear(1970);
|
|
122
|
+
end.setFullYear(2100);
|
|
123
|
+
}
|
|
124
|
+
return { start, end };
|
|
125
|
+
}
|
|
126
|
+
async function extractEventDate(memoryContent, documentDate) {
|
|
127
|
+
const prompt = `Extract the event date from this memory.
|
|
128
|
+
|
|
129
|
+
**Important distinction:**
|
|
130
|
+
- documentDate: When this was said/written
|
|
131
|
+
- eventDate: When the event actually occurred/will occur
|
|
132
|
+
|
|
133
|
+
**Memory:** "${memoryContent}"
|
|
134
|
+
**Document Date (when this was said):** ${documentDate.toISOString()}
|
|
135
|
+
|
|
136
|
+
**Examples:**
|
|
137
|
+
- "User said they have a meeting tomorrow" \u2192 eventDate = documentDate + 1 day
|
|
138
|
+
- "User attended conference on Jan 15" \u2192 eventDate = Jan 15 of appropriate year
|
|
139
|
+
- "User's favorite color is blue" \u2192 eventDate = null (no event, just a fact)
|
|
140
|
+
- "Meeting happened yesterday" \u2192 eventDate = documentDate - 1 day
|
|
141
|
+
|
|
142
|
+
Return JSON:
|
|
143
|
+
{
|
|
144
|
+
"hasEvent": boolean,
|
|
145
|
+
"eventDate": "ISO date string or null",
|
|
146
|
+
"reasoning": "brief explanation"
|
|
147
|
+
}`;
|
|
148
|
+
try {
|
|
149
|
+
const response = await anthropic.messages.create({
|
|
150
|
+
model: "claude-haiku-4.5",
|
|
151
|
+
max_tokens: 256,
|
|
152
|
+
temperature: 0,
|
|
153
|
+
messages: [{ role: "user", content: prompt }]
|
|
154
|
+
});
|
|
155
|
+
const textContent = response.content.find((c) => c.type === "text");
|
|
156
|
+
if (!textContent || textContent.type !== "text") {
|
|
157
|
+
return null;
|
|
158
|
+
}
|
|
159
|
+
const text = textContent.text.trim();
|
|
160
|
+
const jsonMatch = text.match(/```json\n?([\s\S]*?)\n?```/) || text.match(/\{[\s\S]*\}/);
|
|
161
|
+
const jsonStr = jsonMatch ? jsonMatch[1] || jsonMatch[0] : text;
|
|
162
|
+
const result = JSON.parse(jsonStr);
|
|
163
|
+
if (result.hasEvent && result.eventDate) {
|
|
164
|
+
return new Date(result.eventDate);
|
|
165
|
+
}
|
|
166
|
+
return null;
|
|
167
|
+
} catch (error) {
|
|
168
|
+
console.error("Event date extraction failed:", error);
|
|
169
|
+
return null;
|
|
170
|
+
}
|
|
171
|
+
}
|
|
172
|
+
function calculateTemporalRelevance(memoryDate, questionDate, decayFactor = 0.1) {
|
|
173
|
+
const daysDiff = Math.abs(
|
|
174
|
+
(questionDate.getTime() - memoryDate.getTime()) / (1e3 * 60 * 60 * 24)
|
|
175
|
+
);
|
|
176
|
+
const score = Math.exp(-decayFactor * daysDiff);
|
|
177
|
+
return score;
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
// src/engine/memory/relations.ts
|
|
181
|
+
import Anthropic2 from "@anthropic-ai/sdk";
|
|
182
|
+
var anthropic2 = new Anthropic2({
|
|
183
|
+
apiKey: process.env.ANTHROPIC_API_KEY || ""
|
|
184
|
+
});
|
|
185
|
+
var RELATION_DETECTION_PROMPT = `You are an expert at detecting relationships between memories in a knowledge graph.
|
|
186
|
+
|
|
187
|
+
**Relation Types:**
|
|
188
|
+
|
|
189
|
+
1. **updates** - New memory supersedes/replaces old memory (state mutation)
|
|
190
|
+
Example:
|
|
191
|
+
- Old: "User's favorite color is blue"
|
|
192
|
+
- New: "User's favorite color is green"
|
|
193
|
+
- Relation: updates (green replaces blue)
|
|
194
|
+
|
|
195
|
+
2. **extends** - New memory adds detail to existing memory without contradiction (refinement)
|
|
196
|
+
Example:
|
|
197
|
+
- Old: "John works at Google"
|
|
198
|
+
- New: "John works at Google as a Senior Engineer"
|
|
199
|
+
- Relation: extends (adds job title)
|
|
200
|
+
|
|
201
|
+
3. **derives** - New memory is inferred from existing memory/memories (inference)
|
|
202
|
+
Example:
|
|
203
|
+
- Memory 1: "User prefers dark mode"
|
|
204
|
+
- Memory 2: "User prefers high contrast"
|
|
205
|
+
- New: "User likely has vision preferences for accessibility"
|
|
206
|
+
- Relation: derives (inferred from both)
|
|
207
|
+
|
|
208
|
+
4. **contradicts** - New memory conflicts with existing memory (conflict detection)
|
|
209
|
+
Example:
|
|
210
|
+
- Old: "Meeting scheduled for 3pm"
|
|
211
|
+
- New: "Meeting scheduled for 4pm"
|
|
212
|
+
- Relation: contradicts (should trigger update)
|
|
213
|
+
|
|
214
|
+
5. **supports** - New memory provides evidence/support for existing memory
|
|
215
|
+
Example:
|
|
216
|
+
- Memory 1: "User is interested in ML"
|
|
217
|
+
- New: "User enrolled in ML course"
|
|
218
|
+
- Relation: supports (confirms interest)
|
|
219
|
+
|
|
220
|
+
**Important:**
|
|
221
|
+
- Only detect relations when there's a clear, meaningful connection
|
|
222
|
+
- Be conservative - if unsure, don't create a relation
|
|
223
|
+
- "updates" should invalidate the old memory (set validUntil)
|
|
224
|
+
- "extends" keeps the old memory valid but adds information
|
|
225
|
+
- "contradicts" should flag for review/resolution`;
|
|
226
|
+
async function detectRelations(newMemory, existingMemories) {
|
|
227
|
+
if (existingMemories.length === 0) {
|
|
228
|
+
return [];
|
|
229
|
+
}
|
|
230
|
+
const relevantMemories = filterRelevantMemories(newMemory, existingMemories);
|
|
231
|
+
if (relevantMemories.length === 0) {
|
|
232
|
+
return [];
|
|
233
|
+
}
|
|
234
|
+
const prompt = `${RELATION_DETECTION_PROMPT}
|
|
235
|
+
|
|
236
|
+
**New memory:**
|
|
237
|
+
"${newMemory.content}"
|
|
238
|
+
Type: ${newMemory.memoryType}
|
|
239
|
+
Entities: ${newMemory.entityMentions.join(", ")}
|
|
240
|
+
|
|
241
|
+
**Existing memories to check against:**
|
|
242
|
+
${relevantMemories.map((m, i) => `${i}. "${m.content}" (Type: ${m.memoryType}, Date: ${m.documentDate?.toISOString() || "unknown"})`).join("\n")}
|
|
243
|
+
|
|
244
|
+
Analyze if the new memory relates to any existing memories.
|
|
245
|
+
|
|
246
|
+
Return a JSON array of relations:
|
|
247
|
+
[{
|
|
248
|
+
"toMemoryIndex": 0,
|
|
249
|
+
"relationType": "updates|extends|derives|contradicts|supports",
|
|
250
|
+
"confidence": 0.0-1.0,
|
|
251
|
+
"reasoning": "brief explanation why this relation exists"
|
|
252
|
+
}]
|
|
253
|
+
|
|
254
|
+
Return ONLY the JSON array. If no relations found, return [].`;
|
|
255
|
+
try {
|
|
256
|
+
const response = await anthropic2.messages.create({
|
|
257
|
+
model: "claude-sonnet-4.5",
|
|
258
|
+
max_tokens: 2048,
|
|
259
|
+
temperature: 0,
|
|
260
|
+
messages: [{ role: "user", content: prompt }]
|
|
261
|
+
});
|
|
262
|
+
const textContent = response.content.find((c) => c.type === "text");
|
|
263
|
+
if (!textContent || textContent.type !== "text") {
|
|
264
|
+
return [];
|
|
265
|
+
}
|
|
266
|
+
const text = textContent.text.trim();
|
|
267
|
+
const jsonMatch = text.match(/```json\n?([\s\S]*?)\n?```/) || text.match(/\[[\s\S]*\]/);
|
|
268
|
+
const jsonStr = jsonMatch ? jsonMatch[1] || jsonMatch[0] : text;
|
|
269
|
+
const relations = JSON.parse(jsonStr);
|
|
270
|
+
if (!Array.isArray(relations)) {
|
|
271
|
+
return [];
|
|
272
|
+
}
|
|
273
|
+
return relations.filter((r) => r.confidence >= 0.7).map((r) => ({
|
|
274
|
+
toMemoryId: relevantMemories[r.toMemoryIndex].id,
|
|
275
|
+
relationType: r.relationType,
|
|
276
|
+
confidence: r.confidence,
|
|
277
|
+
reasoning: r.reasoning
|
|
278
|
+
}));
|
|
279
|
+
} catch (error) {
|
|
280
|
+
console.error("Relation detection failed:", error);
|
|
281
|
+
return [];
|
|
282
|
+
}
|
|
283
|
+
}
|
|
284
|
+
function filterRelevantMemories(newMemory, existingMemories) {
|
|
285
|
+
return existingMemories.filter((existing) => {
|
|
286
|
+
const sharedEntities = newMemory.entityMentions.some(
|
|
287
|
+
(entity) => existing.entityMentions.includes(entity)
|
|
288
|
+
);
|
|
289
|
+
if (sharedEntities) {
|
|
290
|
+
return true;
|
|
291
|
+
}
|
|
292
|
+
const newWords = extractKeywords(newMemory.content);
|
|
293
|
+
const existingWords = extractKeywords(existing.content);
|
|
294
|
+
const overlap = newWords.filter((w) => existingWords.includes(w));
|
|
295
|
+
return overlap.length >= 2;
|
|
296
|
+
});
|
|
297
|
+
}
|
|
298
|
+
function extractKeywords(text) {
|
|
299
|
+
const stopWords = /* @__PURE__ */ new Set([
|
|
300
|
+
"the",
|
|
301
|
+
"a",
|
|
302
|
+
"an",
|
|
303
|
+
"is",
|
|
304
|
+
"are",
|
|
305
|
+
"was",
|
|
306
|
+
"were",
|
|
307
|
+
"be",
|
|
308
|
+
"been",
|
|
309
|
+
"being",
|
|
310
|
+
"have",
|
|
311
|
+
"has",
|
|
312
|
+
"had",
|
|
313
|
+
"do",
|
|
314
|
+
"does",
|
|
315
|
+
"did",
|
|
316
|
+
"will",
|
|
317
|
+
"would",
|
|
318
|
+
"could",
|
|
319
|
+
"should",
|
|
320
|
+
"may",
|
|
321
|
+
"might",
|
|
322
|
+
"must",
|
|
323
|
+
"can",
|
|
324
|
+
"to",
|
|
325
|
+
"of",
|
|
326
|
+
"in",
|
|
327
|
+
"for",
|
|
328
|
+
"on",
|
|
329
|
+
"at",
|
|
330
|
+
"by",
|
|
331
|
+
"from",
|
|
332
|
+
"with",
|
|
333
|
+
"about"
|
|
334
|
+
]);
|
|
335
|
+
return text.toLowerCase().split(/\W+/).filter((word) => word.length > 3 && !stopWords.has(word)).slice(0, 10);
|
|
336
|
+
}
|
|
337
|
+
function shouldInvalidateMemory(relationType) {
|
|
338
|
+
return relationType === "updates" || relationType === "contradicts";
|
|
339
|
+
}
|
|
340
|
+
function buildRelationGraph(relations) {
|
|
341
|
+
const graph = /* @__PURE__ */ new Map();
|
|
342
|
+
for (const relation of relations) {
|
|
343
|
+
if (!graph.has(relation.fromMemoryId)) {
|
|
344
|
+
graph.set(relation.fromMemoryId, []);
|
|
345
|
+
}
|
|
346
|
+
graph.get(relation.fromMemoryId).push({
|
|
347
|
+
memoryId: relation.toMemoryId,
|
|
348
|
+
relationType: relation.relationType
|
|
349
|
+
});
|
|
350
|
+
if (!graph.has(relation.toMemoryId)) {
|
|
351
|
+
graph.set(relation.toMemoryId, []);
|
|
352
|
+
}
|
|
353
|
+
graph.get(relation.toMemoryId).push({
|
|
354
|
+
memoryId: relation.fromMemoryId,
|
|
355
|
+
relationType: `inverse_${relation.relationType}`
|
|
356
|
+
});
|
|
357
|
+
}
|
|
358
|
+
return graph;
|
|
359
|
+
}
|
|
360
|
+
|
|
361
|
+
export {
|
|
362
|
+
parseTemporalQuery,
|
|
363
|
+
extractEventDate,
|
|
364
|
+
calculateTemporalRelevance,
|
|
365
|
+
detectRelations,
|
|
366
|
+
shouldInvalidateMemory,
|
|
367
|
+
buildRelationGraph
|
|
368
|
+
};
|
|
369
|
+
//# sourceMappingURL=chunk-UYWE7HSU.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../src/engine/memory/temporal.ts","../../src/engine/memory/relations.ts"],"sourcesContent":["/**\n * SOTA Temporal Reasoning System\n * Handles temporal queries, relative dates, and event timeline reasoning\n * Key differentiator vs competitors (76.69% on LongMemEval temporal reasoning)\n */\n\nimport Anthropic from \"@anthropic-ai/sdk\";\nimport type { TemporalFilter } from \"./types.js\";\n\nconst anthropic = new Anthropic({\n apiKey: process.env.ANTHROPIC_API_KEY || \"\",\n});\n\nconst TEMPORAL_PARSING_PROMPT = `You are an expert temporal query parser. Extract temporal constraints from user queries.\n\n**Your job:**\n1. Identify if the query has temporal constraints\n2. Extract relative time references (today, yesterday, last week, etc.)\n3. Extract absolute dates if mentioned\n4. Calculate date ranges if applicable\n\n**Relative Terms:**\n- \"today\" → filter to documentDate = questionDate\n- \"yesterday\" → documentDate = questionDate - 1 day\n- \"last week\" → documentDate in range [questionDate - 7 days, questionDate]\n- \"last month\" → documentDate in range [questionDate - 30 days, questionDate]\n- \"last year\" → documentDate in range [questionDate - 365 days, questionDate]\n- \"this week\" → current week\n- \"this month\" → current month\n\n**Examples:**\n- \"What did I say about vacation yesterday?\" → relative: \"yesterday\"\n- \"Tell me about meetings last week\" → relative: \"last_week\"\n- \"What happened on January 15?\" → absoluteDate: \"2024-01-15\"\n- \"Show me everything from last month\" → relative: \"last_month\"\n- \"What's my favorite color?\" → no temporal constraint`;\n\nexport async function parseTemporalQuery(\n query: string,\n questionDate: Date\n): Promise<TemporalFilter> {\n const prompt = `${TEMPORAL_PARSING_PROMPT}\n\n**Query:** \"${query}\"\n**Question asked on:** ${questionDate.toISOString()}\n\nExtract temporal information and return JSON:\n{\n \"hasTemporalConstraint\": boolean,\n \"relative\": \"today|yesterday|last_week|last_month|last_year|this_week|this_month|null\",\n \"absoluteDate\": \"ISO date string or null\",\n \"dateRange\": { \"start\": \"ISO\", \"end\": \"ISO\" } or null\n}\n\nReturn ONLY the JSON, no other text.`;\n\n try {\n const response = await anthropic.messages.create({\n model: \"claude-haiku-4.5\", // Faster model for parsing\n max_tokens: 512,\n temperature: 0.0,\n messages: [{ role: \"user\", content: prompt }],\n });\n\n const textContent = response.content.find((c) => c.type === \"text\");\n if (!textContent || textContent.type !== \"text\") {\n return { hasTemporalConstraint: false };\n }\n\n const text = textContent.text.trim();\n const jsonMatch = text.match(/```json\\n?([\\s\\S]*?)\\n?```/) || text.match(/\\{[\\s\\S]*\\}/);\n const jsonStr = jsonMatch ? (jsonMatch[1] || jsonMatch[0]) : text;\n\n const parsed = JSON.parse(jsonStr);\n\n // Convert relative to absolute dates\n if (parsed.relative) {\n const range = calculateRelativeDateRange(parsed.relative, questionDate);\n parsed.dateRange = range;\n parsed.absoluteDate = range.start;\n } else if (parsed.absoluteDate) {\n parsed.absoluteDate = new Date(parsed.absoluteDate);\n }\n\n // Convert date strings to Date objects in range\n if (parsed.dateRange) {\n parsed.dateRange = {\n start: new Date(parsed.dateRange.start),\n end: new Date(parsed.dateRange.end),\n };\n }\n\n return parsed;\n } catch (error) {\n console.error(\"Temporal parsing failed:\", error);\n return { hasTemporalConstraint: false };\n }\n}\n\n/**\n * Calculate absolute date range from relative term\n */\nexport function calculateRelativeDateRange(\n relative: string,\n from: Date\n): { start: Date; end: Date } {\n const start = new Date(from);\n const end = new Date(from);\n\n switch (relative) {\n case \"today\":\n start.setHours(0, 0, 0, 0);\n end.setHours(23, 59, 59, 999);\n break;\n\n case \"yesterday\":\n start.setDate(start.getDate() - 1);\n start.setHours(0, 0, 0, 0);\n end.setDate(end.getDate() - 1);\n end.setHours(23, 59, 59, 999);\n break;\n\n case \"last_week\":\n start.setDate(start.getDate() - 7);\n start.setHours(0, 0, 0, 0);\n end.setHours(23, 59, 59, 999);\n break;\n\n case \"this_week\":\n // Start of current week (Monday)\n const dayOfWeek = start.getDay();\n const diff = dayOfWeek === 0 ? -6 : 1 - dayOfWeek; // Adjust for Sunday\n start.setDate(start.getDate() + diff);\n start.setHours(0, 0, 0, 0);\n end.setHours(23, 59, 59, 999);\n break;\n\n case \"last_month\":\n start.setDate(start.getDate() - 30);\n start.setHours(0, 0, 0, 0);\n end.setHours(23, 59, 59, 999);\n break;\n\n case \"this_month\":\n start.setDate(1);\n start.setHours(0, 0, 0, 0);\n end.setHours(23, 59, 59, 999);\n break;\n\n case \"last_year\":\n start.setFullYear(start.getFullYear() - 1);\n start.setHours(0, 0, 0, 0);\n end.setHours(23, 59, 59, 999);\n break;\n\n default:\n // Default to no constraint\n start.setFullYear(1970);\n end.setFullYear(2100);\n }\n\n return { start, end };\n}\n\n/**\n * Extract event date from memory content using LLM\n * Differentiates between documentDate (when said) and eventDate (when occurred)\n */\nexport async function extractEventDate(\n memoryContent: string,\n documentDate: Date\n): Promise<Date | null> {\n const prompt = `Extract the event date from this memory.\n\n**Important distinction:**\n- documentDate: When this was said/written\n- eventDate: When the event actually occurred/will occur\n\n**Memory:** \"${memoryContent}\"\n**Document Date (when this was said):** ${documentDate.toISOString()}\n\n**Examples:**\n- \"User said they have a meeting tomorrow\" → eventDate = documentDate + 1 day\n- \"User attended conference on Jan 15\" → eventDate = Jan 15 of appropriate year\n- \"User's favorite color is blue\" → eventDate = null (no event, just a fact)\n- \"Meeting happened yesterday\" → eventDate = documentDate - 1 day\n\nReturn JSON:\n{\n \"hasEvent\": boolean,\n \"eventDate\": \"ISO date string or null\",\n \"reasoning\": \"brief explanation\"\n}`;\n\n try {\n const response = await anthropic.messages.create({\n model: \"claude-haiku-4.5\",\n max_tokens: 256,\n temperature: 0.0,\n messages: [{ role: \"user\", content: prompt }],\n });\n\n const textContent = response.content.find((c) => c.type === \"text\");\n if (!textContent || textContent.type !== \"text\") {\n return null;\n }\n\n const text = textContent.text.trim();\n const jsonMatch = text.match(/```json\\n?([\\s\\S]*?)\\n?```/) || text.match(/\\{[\\s\\S]*\\}/);\n const jsonStr = jsonMatch ? (jsonMatch[1] || jsonMatch[0]) : text;\n\n const result = JSON.parse(jsonStr);\n\n if (result.hasEvent && result.eventDate) {\n return new Date(result.eventDate);\n }\n\n return null;\n } catch (error) {\n console.error(\"Event date extraction failed:\", error);\n return null;\n }\n}\n\n/**\n * Check if a memory is valid at a given point in time\n * Uses validFrom/validUntil for version tracking\n */\nexport function isMemoryValidAt(\n memory: {\n validFrom: Date | null;\n validUntil: Date | null;\n },\n atTime: Date\n): boolean {\n if (!memory.validFrom) {\n return true; // No validity constraints\n }\n\n if (atTime < memory.validFrom) {\n return false; // Not yet valid\n }\n\n if (memory.validUntil && atTime > memory.validUntil) {\n return false; // No longer valid\n }\n\n return true;\n}\n\n/**\n * Get the current version of a memory at a given time\n * Handles knowledge updates and versioning\n */\nexport async function getMemoryVersionAt(\n memoryId: string,\n atTime: Date,\n db: any\n): Promise<any | null> {\n // Get all versions in the chain\n const versions = await db.memory.findMany({\n where: {\n OR: [\n { id: memoryId },\n { supersededBy: memoryId },\n ],\n },\n orderBy: {\n version: \"asc\",\n },\n });\n\n if (versions.length === 0) {\n return null;\n }\n\n // Find the version valid at the given time\n for (const version of versions.reverse()) {\n // Reverse to check newest first\n if (isMemoryValidAt(version, atTime)) {\n return version;\n }\n }\n\n return null;\n}\n\n/**\n * Temporal distance scoring\n * Boost recent memories, decay old ones\n */\nexport function calculateTemporalRelevance(\n memoryDate: Date,\n questionDate: Date,\n decayFactor: number = 0.1\n): number {\n const daysDiff = Math.abs(\n (questionDate.getTime() - memoryDate.getTime()) / (1000 * 60 * 60 * 24)\n );\n\n // Exponential decay: score = e^(-decay * days)\n const score = Math.exp(-decayFactor * daysDiff);\n\n return score;\n}\n\n/**\n * Build timeline of events from memories\n * Useful for \"what happened between X and Y\" queries\n */\nexport interface TimelineEvent {\n date: Date;\n memory: {\n id: string;\n content: string;\n memoryType: string;\n };\n}\n\nexport function buildTimeline(\n memories: Array<{\n id: string;\n content: string;\n memoryType: string;\n eventDate: Date | null;\n documentDate: Date | null;\n }>\n): TimelineEvent[] {\n const events: TimelineEvent[] = [];\n\n for (const memory of memories) {\n const date = memory.eventDate || memory.documentDate;\n if (date) {\n events.push({\n date,\n memory: {\n id: memory.id,\n content: memory.content,\n memoryType: memory.memoryType,\n },\n });\n }\n }\n\n // Sort by date\n events.sort((a, b) => a.date.getTime() - b.date.getTime());\n\n return events;\n}\n","/**\n * SOTA Memory Relation Detection\n * Detects relationships between memories (updates, extends, derives, contradicts)\n * Builds knowledge graph for temporal reasoning and version tracking\n */\n\nimport Anthropic from \"@anthropic-ai/sdk\";\nimport type { MemoryRelationship, RelationType } from \"./types.js\";\n\nconst anthropic = new Anthropic({\n apiKey: process.env.ANTHROPIC_API_KEY || \"\",\n});\n\nconst RELATION_DETECTION_PROMPT = `You are an expert at detecting relationships between memories in a knowledge graph.\n\n**Relation Types:**\n\n1. **updates** - New memory supersedes/replaces old memory (state mutation)\n Example:\n - Old: \"User's favorite color is blue\"\n - New: \"User's favorite color is green\"\n - Relation: updates (green replaces blue)\n\n2. **extends** - New memory adds detail to existing memory without contradiction (refinement)\n Example:\n - Old: \"John works at Google\"\n - New: \"John works at Google as a Senior Engineer\"\n - Relation: extends (adds job title)\n\n3. **derives** - New memory is inferred from existing memory/memories (inference)\n Example:\n - Memory 1: \"User prefers dark mode\"\n - Memory 2: \"User prefers high contrast\"\n - New: \"User likely has vision preferences for accessibility\"\n - Relation: derives (inferred from both)\n\n4. **contradicts** - New memory conflicts with existing memory (conflict detection)\n Example:\n - Old: \"Meeting scheduled for 3pm\"\n - New: \"Meeting scheduled for 4pm\"\n - Relation: contradicts (should trigger update)\n\n5. **supports** - New memory provides evidence/support for existing memory\n Example:\n - Memory 1: \"User is interested in ML\"\n - New: \"User enrolled in ML course\"\n - Relation: supports (confirms interest)\n\n**Important:**\n- Only detect relations when there's a clear, meaningful connection\n- Be conservative - if unsure, don't create a relation\n- \"updates\" should invalidate the old memory (set validUntil)\n- \"extends\" keeps the old memory valid but adds information\n- \"contradicts\" should flag for review/resolution`;\n\nexport async function detectRelations(\n newMemory: {\n content: string;\n memoryType: string;\n entityMentions: string[];\n },\n existingMemories: Array<{\n id: string;\n content: string;\n memoryType: string;\n entityMentions: string[];\n documentDate: Date | null;\n }>\n): Promise<MemoryRelationship[]> {\n if (existingMemories.length === 0) {\n return [];\n }\n\n // Filter to relevant memories (share entities or topics)\n const relevantMemories = filterRelevantMemories(newMemory, existingMemories);\n\n if (relevantMemories.length === 0) {\n return [];\n }\n\n const prompt = `${RELATION_DETECTION_PROMPT}\n\n**New memory:**\n\"${newMemory.content}\"\nType: ${newMemory.memoryType}\nEntities: ${newMemory.entityMentions.join(\", \")}\n\n**Existing memories to check against:**\n${relevantMemories.map((m, i) => `${i}. \"${m.content}\" (Type: ${m.memoryType}, Date: ${m.documentDate?.toISOString() || \"unknown\"})`).join(\"\\n\")}\n\nAnalyze if the new memory relates to any existing memories.\n\nReturn a JSON array of relations:\n[{\n \"toMemoryIndex\": 0,\n \"relationType\": \"updates|extends|derives|contradicts|supports\",\n \"confidence\": 0.0-1.0,\n \"reasoning\": \"brief explanation why this relation exists\"\n}]\n\nReturn ONLY the JSON array. If no relations found, return [].`;\n\n try {\n const response = await anthropic.messages.create({\n model: \"claude-sonnet-4.5\",\n max_tokens: 2048,\n temperature: 0.0,\n messages: [{ role: \"user\", content: prompt }],\n });\n\n const textContent = response.content.find((c) => c.type === \"text\");\n if (!textContent || textContent.type !== \"text\") {\n return [];\n }\n\n const text = textContent.text.trim();\n const jsonMatch = text.match(/```json\\n?([\\s\\S]*?)\\n?```/) || text.match(/\\[[\\s\\S]*\\]/);\n const jsonStr = jsonMatch ? (jsonMatch[1] || jsonMatch[0]) : text;\n\n const relations = JSON.parse(jsonStr);\n\n if (!Array.isArray(relations)) {\n return [];\n }\n\n return relations\n .filter((r: any) => r.confidence >= 0.7) // High confidence threshold\n .map((r: any) => ({\n toMemoryId: relevantMemories[r.toMemoryIndex].id,\n relationType: r.relationType as RelationType,\n confidence: r.confidence,\n reasoning: r.reasoning,\n }));\n } catch (error) {\n console.error(\"Relation detection failed:\", error);\n return [];\n }\n}\n\n/**\n * Filter memories that are likely related to the new memory\n * Reduces LLM calls and improves accuracy\n */\nfunction filterRelevantMemories(\n newMemory: {\n content: string;\n entityMentions: string[];\n },\n existingMemories: Array<{\n id: string;\n content: string;\n entityMentions: string[];\n }>\n): typeof existingMemories {\n return existingMemories.filter((existing) => {\n // Share at least one entity\n const sharedEntities = newMemory.entityMentions.some((entity) =>\n existing.entityMentions.includes(entity)\n );\n\n if (sharedEntities) {\n return true;\n }\n\n // Share significant keywords (simple keyword overlap)\n const newWords = extractKeywords(newMemory.content);\n const existingWords = extractKeywords(existing.content);\n\n const overlap = newWords.filter((w) => existingWords.includes(w));\n\n return overlap.length >= 2; // At least 2 shared keywords\n });\n}\n\n/**\n * Extract keywords from text (simple approach)\n */\nfunction extractKeywords(text: string): string[] {\n const stopWords = new Set([\n \"the\",\n \"a\",\n \"an\",\n \"is\",\n \"are\",\n \"was\",\n \"were\",\n \"be\",\n \"been\",\n \"being\",\n \"have\",\n \"has\",\n \"had\",\n \"do\",\n \"does\",\n \"did\",\n \"will\",\n \"would\",\n \"could\",\n \"should\",\n \"may\",\n \"might\",\n \"must\",\n \"can\",\n \"to\",\n \"of\",\n \"in\",\n \"for\",\n \"on\",\n \"at\",\n \"by\",\n \"from\",\n \"with\",\n \"about\",\n ]);\n\n return text\n .toLowerCase()\n .split(/\\W+/)\n .filter((word) => word.length > 3 && !stopWords.has(word))\n .slice(0, 10); // Top 10 keywords\n}\n\n/**\n * Determine if relation should invalidate old memory\n */\nexport function shouldInvalidateMemory(relationType: RelationType): boolean {\n return relationType === \"updates\" || relationType === \"contradicts\";\n}\n\n/**\n * Build relation graph for traversal\n * Returns adjacency list representation\n */\nexport function buildRelationGraph(\n relations: Array<{\n fromMemoryId: string;\n toMemoryId: string;\n relationType: string;\n }>\n): Map<string, Array<{ memoryId: string; relationType: string }>> {\n const graph = new Map<string, Array<{ memoryId: string; relationType: string }>>();\n\n for (const relation of relations) {\n // Forward edge (from → to)\n if (!graph.has(relation.fromMemoryId)) {\n graph.set(relation.fromMemoryId, []);\n }\n graph.get(relation.fromMemoryId)!.push({\n memoryId: relation.toMemoryId,\n relationType: relation.relationType,\n });\n\n // Backward edge (to → from) for bidirectional traversal\n if (!graph.has(relation.toMemoryId)) {\n graph.set(relation.toMemoryId, []);\n }\n graph.get(relation.toMemoryId)!.push({\n memoryId: relation.fromMemoryId,\n relationType: `inverse_${relation.relationType}`,\n });\n }\n\n return graph;\n}\n\n/**\n * Get version chain for a memory\n * Follows \"updates\" relations to find all versions\n */\nexport async function getVersionChain(\n memoryId: string,\n db: any\n): Promise<Array<{ id: string; version: number; content: string; validFrom: Date; validUntil: Date | null }>> {\n const versions = [];\n\n // Get current memory\n let currentMemory = await db.memory.findUnique({\n where: { id: memoryId },\n select: {\n id: true,\n content: true,\n version: true,\n validFrom: true,\n validUntil: true,\n supersededBy: true,\n },\n });\n\n if (!currentMemory) {\n return [];\n }\n\n // Walk backward through supersedes relation\n const seenIds = new Set<string>();\n while (currentMemory && !seenIds.has(currentMemory.id)) {\n seenIds.add(currentMemory.id);\n\n versions.unshift({\n id: currentMemory.id,\n version: currentMemory.version,\n content: currentMemory.content,\n validFrom: currentMemory.validFrom!,\n validUntil: currentMemory.validUntil,\n });\n\n // Find previous version\n const prev = await db.memory.findFirst({\n where: { supersededBy: currentMemory.id },\n select: {\n id: true,\n content: true,\n version: true,\n validFrom: true,\n validUntil: true,\n supersededBy: true,\n },\n });\n\n currentMemory = prev;\n }\n\n // Walk forward through supersededBy relation\n currentMemory = await db.memory.findUnique({\n where: { id: memoryId },\n });\n\n while (currentMemory?.supersededBy && !seenIds.has(currentMemory.supersededBy)) {\n seenIds.add(currentMemory.supersededBy);\n\n const next = await db.memory.findUnique({\n where: { id: currentMemory.supersededBy },\n select: {\n id: true,\n content: true,\n version: true,\n validFrom: true,\n validUntil: true,\n supersededBy: true,\n },\n });\n\n if (next) {\n versions.push({\n id: next.id,\n version: next.version,\n content: next.content,\n validFrom: next.validFrom!,\n validUntil: next.validUntil,\n });\n\n currentMemory = next;\n } else {\n break;\n }\n }\n\n return versions;\n}\n"],"mappings":";AAMA,OAAO,eAAe;AAGtB,IAAM,YAAY,IAAI,UAAU;AAAA,EAC9B,QAAQ,QAAQ,IAAI,qBAAqB;AAC3C,CAAC;AAED,IAAM,0BAA0B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAwBhC,eAAsB,mBACpB,OACA,cACyB;AACzB,QAAM,SAAS,GAAG,uBAAuB;AAAA;AAAA,cAE7B,KAAK;AAAA,yBACM,aAAa,YAAY,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAYjD,MAAI;AACF,UAAM,WAAW,MAAM,UAAU,SAAS,OAAO;AAAA,MAC/C,OAAO;AAAA;AAAA,MACP,YAAY;AAAA,MACZ,aAAa;AAAA,MACb,UAAU,CAAC,EAAE,MAAM,QAAQ,SAAS,OAAO,CAAC;AAAA,IAC9C,CAAC;AAED,UAAM,cAAc,SAAS,QAAQ,KAAK,CAAC,MAAM,EAAE,SAAS,MAAM;AAClE,QAAI,CAAC,eAAe,YAAY,SAAS,QAAQ;AAC/C,aAAO,EAAE,uBAAuB,MAAM;AAAA,IACxC;AAEA,UAAM,OAAO,YAAY,KAAK,KAAK;AACnC,UAAM,YAAY,KAAK,MAAM,4BAA4B,KAAK,KAAK,MAAM,aAAa;AACtF,UAAM,UAAU,YAAa,UAAU,CAAC,KAAK,UAAU,CAAC,IAAK;AAE7D,UAAM,SAAS,KAAK,MAAM,OAAO;AAGjC,QAAI,OAAO,UAAU;AACnB,YAAM,QAAQ,2BAA2B,OAAO,UAAU,YAAY;AACtE,aAAO,YAAY;AACnB,aAAO,eAAe,MAAM;AAAA,IAC9B,WAAW,OAAO,cAAc;AAC9B,aAAO,eAAe,IAAI,KAAK,OAAO,YAAY;AAAA,IACpD;AAGA,QAAI,OAAO,WAAW;AACpB,aAAO,YAAY;AAAA,QACjB,OAAO,IAAI,KAAK,OAAO,UAAU,KAAK;AAAA,QACtC,KAAK,IAAI,KAAK,OAAO,UAAU,GAAG;AAAA,MACpC;AAAA,IACF;AAEA,WAAO;AAAA,EACT,SAAS,OAAO;AACd,YAAQ,MAAM,4BAA4B,KAAK;AAC/C,WAAO,EAAE,uBAAuB,MAAM;AAAA,EACxC;AACF;AAKO,SAAS,2BACd,UACA,MAC4B;AAC5B,QAAM,QAAQ,IAAI,KAAK,IAAI;AAC3B,QAAM,MAAM,IAAI,KAAK,IAAI;AAEzB,UAAQ,UAAU;AAAA,IAChB,KAAK;AACH,YAAM,SAAS,GAAG,GAAG,GAAG,CAAC;AACzB,UAAI,SAAS,IAAI,IAAI,IAAI,GAAG;AAC5B;AAAA,IAEF,KAAK;AACH,YAAM,QAAQ,MAAM,QAAQ,IAAI,CAAC;AACjC,YAAM,SAAS,GAAG,GAAG,GAAG,CAAC;AACzB,UAAI,QAAQ,IAAI,QAAQ,IAAI,CAAC;AAC7B,UAAI,SAAS,IAAI,IAAI,IAAI,GAAG;AAC5B;AAAA,IAEF,KAAK;AACH,YAAM,QAAQ,MAAM,QAAQ,IAAI,CAAC;AACjC,YAAM,SAAS,GAAG,GAAG,GAAG,CAAC;AACzB,UAAI,SAAS,IAAI,IAAI,IAAI,GAAG;AAC5B;AAAA,IAEF,KAAK;AAEH,YAAM,YAAY,MAAM,OAAO;AAC/B,YAAM,OAAO,cAAc,IAAI,KAAK,IAAI;AACxC,YAAM,QAAQ,MAAM,QAAQ,IAAI,IAAI;AACpC,YAAM,SAAS,GAAG,GAAG,GAAG,CAAC;AACzB,UAAI,SAAS,IAAI,IAAI,IAAI,GAAG;AAC5B;AAAA,IAEF,KAAK;AACH,YAAM,QAAQ,MAAM,QAAQ,IAAI,EAAE;AAClC,YAAM,SAAS,GAAG,GAAG,GAAG,CAAC;AACzB,UAAI,SAAS,IAAI,IAAI,IAAI,GAAG;AAC5B;AAAA,IAEF,KAAK;AACH,YAAM,QAAQ,CAAC;AACf,YAAM,SAAS,GAAG,GAAG,GAAG,CAAC;AACzB,UAAI,SAAS,IAAI,IAAI,IAAI,GAAG;AAC5B;AAAA,IAEF,KAAK;AACH,YAAM,YAAY,MAAM,YAAY,IAAI,CAAC;AACzC,YAAM,SAAS,GAAG,GAAG,GAAG,CAAC;AACzB,UAAI,SAAS,IAAI,IAAI,IAAI,GAAG;AAC5B;AAAA,IAEF;AAEE,YAAM,YAAY,IAAI;AACtB,UAAI,YAAY,IAAI;AAAA,EACxB;AAEA,SAAO,EAAE,OAAO,IAAI;AACtB;AAMA,eAAsB,iBACpB,eACA,cACsB;AACtB,QAAM,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,eAMF,aAAa;AAAA,0CACc,aAAa,YAAY,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAelE,MAAI;AACF,UAAM,WAAW,MAAM,UAAU,SAAS,OAAO;AAAA,MAC/C,OAAO;AAAA,MACP,YAAY;AAAA,MACZ,aAAa;AAAA,MACb,UAAU,CAAC,EAAE,MAAM,QAAQ,SAAS,OAAO,CAAC;AAAA,IAC9C,CAAC;AAED,UAAM,cAAc,SAAS,QAAQ,KAAK,CAAC,MAAM,EAAE,SAAS,MAAM;AAClE,QAAI,CAAC,eAAe,YAAY,SAAS,QAAQ;AAC/C,aAAO;AAAA,IACT;AAEA,UAAM,OAAO,YAAY,KAAK,KAAK;AACnC,UAAM,YAAY,KAAK,MAAM,4BAA4B,KAAK,KAAK,MAAM,aAAa;AACtF,UAAM,UAAU,YAAa,UAAU,CAAC,KAAK,UAAU,CAAC,IAAK;AAE7D,UAAM,SAAS,KAAK,MAAM,OAAO;AAEjC,QAAI,OAAO,YAAY,OAAO,WAAW;AACvC,aAAO,IAAI,KAAK,OAAO,SAAS;AAAA,IAClC;AAEA,WAAO;AAAA,EACT,SAAS,OAAO;AACd,YAAQ,MAAM,iCAAiC,KAAK;AACpD,WAAO;AAAA,EACT;AACF;AAqEO,SAAS,2BACd,YACA,cACA,cAAsB,KACd;AACR,QAAM,WAAW,KAAK;AAAA,KACnB,aAAa,QAAQ,IAAI,WAAW,QAAQ,MAAM,MAAO,KAAK,KAAK;AAAA,EACtE;AAGA,QAAM,QAAQ,KAAK,IAAI,CAAC,cAAc,QAAQ;AAE9C,SAAO;AACT;;;AC1SA,OAAOA,gBAAe;AAGtB,IAAMC,aAAY,IAAID,WAAU;AAAA,EAC9B,QAAQ,QAAQ,IAAI,qBAAqB;AAC3C,CAAC;AAED,IAAM,4BAA4B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AA0ClC,eAAsB,gBACpB,WAKA,kBAO+B;AAC/B,MAAI,iBAAiB,WAAW,GAAG;AACjC,WAAO,CAAC;AAAA,EACV;AAGA,QAAM,mBAAmB,uBAAuB,WAAW,gBAAgB;AAE3E,MAAI,iBAAiB,WAAW,GAAG;AACjC,WAAO,CAAC;AAAA,EACV;AAEA,QAAM,SAAS,GAAG,yBAAyB;AAAA;AAAA;AAAA,GAG1C,UAAU,OAAO;AAAA,QACZ,UAAU,UAAU;AAAA,YAChB,UAAU,eAAe,KAAK,IAAI,CAAC;AAAA;AAAA;AAAA,EAG7C,iBAAiB,IAAI,CAAC,GAAG,MAAM,GAAG,CAAC,MAAM,EAAE,OAAO,YAAY,EAAE,UAAU,WAAW,EAAE,cAAc,YAAY,KAAK,SAAS,GAAG,EAAE,KAAK,IAAI,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAc9I,MAAI;AACF,UAAM,WAAW,MAAMC,WAAU,SAAS,OAAO;AAAA,MAC/C,OAAO;AAAA,MACP,YAAY;AAAA,MACZ,aAAa;AAAA,MACb,UAAU,CAAC,EAAE,MAAM,QAAQ,SAAS,OAAO,CAAC;AAAA,IAC9C,CAAC;AAED,UAAM,cAAc,SAAS,QAAQ,KAAK,CAAC,MAAM,EAAE,SAAS,MAAM;AAClE,QAAI,CAAC,eAAe,YAAY,SAAS,QAAQ;AAC/C,aAAO,CAAC;AAAA,IACV;AAEA,UAAM,OAAO,YAAY,KAAK,KAAK;AACnC,UAAM,YAAY,KAAK,MAAM,4BAA4B,KAAK,KAAK,MAAM,aAAa;AACtF,UAAM,UAAU,YAAa,UAAU,CAAC,KAAK,UAAU,CAAC,IAAK;AAE7D,UAAM,YAAY,KAAK,MAAM,OAAO;AAEpC,QAAI,CAAC,MAAM,QAAQ,SAAS,GAAG;AAC7B,aAAO,CAAC;AAAA,IACV;AAEA,WAAO,UACJ,OAAO,CAAC,MAAW,EAAE,cAAc,GAAG,EACtC,IAAI,CAAC,OAAY;AAAA,MAChB,YAAY,iBAAiB,EAAE,aAAa,EAAE;AAAA,MAC9C,cAAc,EAAE;AAAA,MAChB,YAAY,EAAE;AAAA,MACd,WAAW,EAAE;AAAA,IACf,EAAE;AAAA,EACN,SAAS,OAAO;AACd,YAAQ,MAAM,8BAA8B,KAAK;AACjD,WAAO,CAAC;AAAA,EACV;AACF;AAMA,SAAS,uBACP,WAIA,kBAKyB;AACzB,SAAO,iBAAiB,OAAO,CAAC,aAAa;AAE3C,UAAM,iBAAiB,UAAU,eAAe;AAAA,MAAK,CAAC,WACpD,SAAS,eAAe,SAAS,MAAM;AAAA,IACzC;AAEA,QAAI,gBAAgB;AAClB,aAAO;AAAA,IACT;AAGA,UAAM,WAAW,gBAAgB,UAAU,OAAO;AAClD,UAAM,gBAAgB,gBAAgB,SAAS,OAAO;AAEtD,UAAM,UAAU,SAAS,OAAO,CAAC,MAAM,cAAc,SAAS,CAAC,CAAC;AAEhE,WAAO,QAAQ,UAAU;AAAA,EAC3B,CAAC;AACH;AAKA,SAAS,gBAAgB,MAAwB;AAC/C,QAAM,YAAY,oBAAI,IAAI;AAAA,IACxB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,SAAO,KACJ,YAAY,EACZ,MAAM,KAAK,EACX,OAAO,CAAC,SAAS,KAAK,SAAS,KAAK,CAAC,UAAU,IAAI,IAAI,CAAC,EACxD,MAAM,GAAG,EAAE;AAChB;AAKO,SAAS,uBAAuB,cAAqC;AAC1E,SAAO,iBAAiB,aAAa,iBAAiB;AACxD;AAMO,SAAS,mBACd,WAKgE;AAChE,QAAM,QAAQ,oBAAI,IAA+D;AAEjF,aAAW,YAAY,WAAW;AAEhC,QAAI,CAAC,MAAM,IAAI,SAAS,YAAY,GAAG;AACrC,YAAM,IAAI,SAAS,cAAc,CAAC,CAAC;AAAA,IACrC;AACA,UAAM,IAAI,SAAS,YAAY,EAAG,KAAK;AAAA,MACrC,UAAU,SAAS;AAAA,MACnB,cAAc,SAAS;AAAA,IACzB,CAAC;AAGD,QAAI,CAAC,MAAM,IAAI,SAAS,UAAU,GAAG;AACnC,YAAM,IAAI,SAAS,YAAY,CAAC,CAAC;AAAA,IACnC;AACA,UAAM,IAAI,SAAS,UAAU,EAAG,KAAK;AAAA,MACnC,UAAU,SAAS;AAAA,MACnB,cAAc,WAAW,SAAS,YAAY;AAAA,IAChD,CAAC;AAAA,EACH;AAEA,SAAO;AACT;","names":["Anthropic","anthropic"]}
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
// src/db/index.ts
|
|
2
|
+
import { PrismaClient } from "@prisma/client";
|
|
3
|
+
import { Prisma } from "@prisma/client";
|
|
4
|
+
var globalForPrisma = globalThis;
|
|
5
|
+
var db = globalForPrisma.prisma ?? new PrismaClient();
|
|
6
|
+
var prisma = db;
|
|
7
|
+
if (process.env.NODE_ENV !== "production") {
|
|
8
|
+
globalForPrisma.prisma = db;
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
// src/engine/embeddings.ts
|
|
12
|
+
import OpenAI from "openai";
|
|
13
|
+
var openai = new OpenAI({ apiKey: process.env.OPENAI_API_KEY });
|
|
14
|
+
async function embed(texts) {
|
|
15
|
+
const res = await openai.embeddings.create({
|
|
16
|
+
model: "text-embedding-3-small",
|
|
17
|
+
input: texts,
|
|
18
|
+
dimensions: 1536
|
|
19
|
+
});
|
|
20
|
+
return res.data.map((d) => d.embedding);
|
|
21
|
+
}
|
|
22
|
+
async function embedSingle(text) {
|
|
23
|
+
const [embedding] = await embed([text]);
|
|
24
|
+
return embedding;
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
export {
|
|
28
|
+
db,
|
|
29
|
+
prisma,
|
|
30
|
+
embed,
|
|
31
|
+
embedSingle
|
|
32
|
+
};
|
|
33
|
+
//# sourceMappingURL=chunk-X2DL2GWT.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../src/db/index.ts","../../src/engine/embeddings.ts"],"sourcesContent":["import { PrismaClient } from \"@prisma/client\";\n\nconst globalForPrisma = globalThis as unknown as {\n prisma: PrismaClient | undefined;\n};\n\nexport const db = globalForPrisma.prisma ?? new PrismaClient();\nexport const prisma = db; // Alias for compatibility\n\nif (process.env.NODE_ENV !== \"production\") {\n globalForPrisma.prisma = db;\n}\n\n// Re-export Prisma types for convenience\nexport { Prisma } from \"@prisma/client\";\nexport type {\n User,\n Organization,\n OrganizationMember,\n Project,\n Source,\n Document,\n Chunk,\n Embedding,\n Entity,\n EntityRelation,\n Memory,\n MemoryRelation,\n Session,\n Message,\n ChunkMemory,\n SyncJob,\n Webhook,\n WebhookDelivery,\n SearchLog,\n Package,\n PackageVersion,\n ResearchSession,\n ResearchStep,\n UsageRecord,\n ApiKey,\n AuditLog,\n Integration,\n Scan,\n Finding,\n FixSession,\n} from \"@prisma/client\";\n","import OpenAI from \"openai\";\n\nconst openai = new OpenAI({ apiKey: process.env.OPENAI_API_KEY });\n\nexport async function embed(texts: string[]): Promise<number[][]> {\n const res = await openai.embeddings.create({\n model: \"text-embedding-3-small\",\n input: texts,\n dimensions: 1536,\n });\n\n return res.data.map((d) => d.embedding);\n}\n\nexport async function embedSingle(text: string): Promise<number[]> {\n const [embedding] = await embed([text]);\n return embedding;\n}\n"],"mappings":";AAAA,SAAS,oBAAoB;AAc7B,SAAS,cAAc;AAZvB,IAAM,kBAAkB;AAIjB,IAAM,KAAK,gBAAgB,UAAU,IAAI,aAAa;AACtD,IAAM,SAAS;AAEtB,IAAI,QAAQ,IAAI,aAAa,cAAc;AACzC,kBAAgB,SAAS;AAC3B;;;ACXA,OAAO,YAAY;AAEnB,IAAM,SAAS,IAAI,OAAO,EAAE,QAAQ,QAAQ,IAAI,eAAe,CAAC;AAEhE,eAAsB,MAAM,OAAsC;AAChE,QAAM,MAAM,MAAM,OAAO,WAAW,OAAO;AAAA,IACzC,OAAO;AAAA,IACP,OAAO;AAAA,IACP,YAAY;AAAA,EACd,CAAC;AAED,SAAO,IAAI,KAAK,IAAI,CAAC,MAAM,EAAE,SAAS;AACxC;AAEA,eAAsB,YAAY,MAAiC;AACjE,QAAM,CAAC,SAAS,IAAI,MAAM,MAAM,CAAC,IAAI,CAAC;AACtC,SAAO;AACT;","names":[]}
|