@usewhisper/mcp-server 0.1.0 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (70) hide show
  1. package/README.md +26 -24
  2. package/dist/autosubscribe-6EDKPBE2.js +4068 -0
  3. package/dist/autosubscribe-GHO6YR5A.js +4068 -0
  4. package/dist/autosubscribe-ISDETQIB.js +436 -0
  5. package/dist/autosubscribe-ISDETQIB.js.map +1 -0
  6. package/dist/chunk-3WGYBAYR.js +8387 -0
  7. package/dist/chunk-52VJYCZ7.js +455 -0
  8. package/dist/chunk-5KBZQHDL.js +189 -0
  9. package/dist/chunk-5KIJNY6Z.js +370 -0
  10. package/dist/chunk-7SN3CKDK.js +1076 -0
  11. package/dist/chunk-B3VWOHUA.js +271 -0
  12. package/dist/chunk-C57DHKTL.js +459 -0
  13. package/dist/chunk-EI5CE3EY.js +616 -0
  14. package/dist/chunk-FTWUJBAH.js +387 -0
  15. package/dist/chunk-FTWUJBAH.js.map +1 -0
  16. package/dist/chunk-H3HSKH2P.js +4841 -0
  17. package/dist/chunk-JO3ORBZD.js +616 -0
  18. package/dist/chunk-L6DXSM2U.js +457 -0
  19. package/dist/chunk-L6DXSM2U.js.map +1 -0
  20. package/dist/chunk-LMEYV4JD.js +368 -0
  21. package/dist/chunk-MEFLJ4PV.js +8385 -0
  22. package/dist/chunk-OBLI4FE4.js +276 -0
  23. package/dist/chunk-OBLI4FE4.js.map +1 -0
  24. package/dist/chunk-PPGYJJED.js +271 -0
  25. package/dist/chunk-QGM4M3NI.js +37 -0
  26. package/dist/chunk-T7KMSTWP.js +399 -0
  27. package/dist/chunk-TWEIYHI6.js +399 -0
  28. package/dist/chunk-UYWE7HSU.js +369 -0
  29. package/dist/chunk-UYWE7HSU.js.map +1 -0
  30. package/dist/chunk-X2DL2GWT.js +33 -0
  31. package/dist/chunk-X2DL2GWT.js.map +1 -0
  32. package/dist/chunk-X7HNNNJJ.js +1079 -0
  33. package/dist/consolidation-2GCKI4RE.js +220 -0
  34. package/dist/consolidation-4JOPW6BG.js +220 -0
  35. package/dist/consolidation-FOVQTWNQ.js +222 -0
  36. package/dist/consolidation-IFQ52E44.js +210 -0
  37. package/dist/consolidation-IFQ52E44.js.map +1 -0
  38. package/dist/context-sharing-4ITCNKG4.js +307 -0
  39. package/dist/context-sharing-6CCFIAKL.js +276 -0
  40. package/dist/context-sharing-6CCFIAKL.js.map +1 -0
  41. package/dist/context-sharing-GYKLXHZA.js +307 -0
  42. package/dist/context-sharing-PH64JTXS.js +308 -0
  43. package/dist/context-sharing-Y6LTZZOF.js +307 -0
  44. package/dist/cost-optimization-6OIKRSBV.js +196 -0
  45. package/dist/cost-optimization-6OIKRSBV.js.map +1 -0
  46. package/dist/cost-optimization-7DVSTL6R.js +307 -0
  47. package/dist/cost-optimization-BH5NAX33.js +287 -0
  48. package/dist/cost-optimization-BH5NAX33.js.map +1 -0
  49. package/dist/cost-optimization-F3L5BS5F.js +303 -0
  50. package/dist/ingest-2LPTWUUM.js +16 -0
  51. package/dist/ingest-7T5FAZNC.js +15 -0
  52. package/dist/ingest-EBNIE7XB.js +15 -0
  53. package/dist/ingest-FSHT5BCS.js +15 -0
  54. package/dist/ingest-QE2BTV72.js +15 -0
  55. package/dist/ingest-QE2BTV72.js.map +1 -0
  56. package/dist/oracle-3RLQF3DP.js +259 -0
  57. package/dist/oracle-FKRTQUUG.js +282 -0
  58. package/dist/oracle-J47QCSEW.js +263 -0
  59. package/dist/oracle-MDP5MZRC.js +257 -0
  60. package/dist/oracle-MDP5MZRC.js.map +1 -0
  61. package/dist/search-BLVHWLWC.js +14 -0
  62. package/dist/search-CZ5NYL5B.js +13 -0
  63. package/dist/search-CZ5NYL5B.js.map +1 -0
  64. package/dist/search-EG6TYWWW.js +13 -0
  65. package/dist/search-I22QQA7T.js +13 -0
  66. package/dist/search-T7H5G6DW.js +13 -0
  67. package/dist/server.d.ts +2 -0
  68. package/dist/server.js +914 -1503
  69. package/dist/server.js.map +1 -1
  70. package/package.json +6 -7
@@ -0,0 +1,387 @@
1
+ import {
2
+ embed,
3
+ embedSingle,
4
+ prisma
5
+ } from "./chunk-X2DL2GWT.js";
6
+
7
+ // src/engine/chunker.ts
8
+ var CODE_EXTENSIONS = /* @__PURE__ */ new Set([
9
+ ".ts",
10
+ ".tsx",
11
+ ".js",
12
+ ".jsx",
13
+ ".py",
14
+ ".java",
15
+ ".go",
16
+ ".rb",
17
+ ".php",
18
+ ".cs",
19
+ ".rs",
20
+ ".swift",
21
+ ".kt",
22
+ ".scala",
23
+ ".c",
24
+ ".cpp",
25
+ ".h",
26
+ ".hpp",
27
+ ".sol",
28
+ ".vy"
29
+ ]);
30
+ var CONFIG_EXTENSIONS = /* @__PURE__ */ new Set([
31
+ ".json",
32
+ ".yaml",
33
+ ".yml",
34
+ ".toml",
35
+ ".ini",
36
+ ".env",
37
+ ".xml"
38
+ ]);
39
+ function detectChunkType(filePath, content) {
40
+ if (!filePath) return "text";
41
+ const ext = "." + filePath.split(".").pop()?.toLowerCase();
42
+ if (CODE_EXTENSIONS.has(ext)) return "code";
43
+ if (CONFIG_EXTENSIONS.has(ext)) return "config";
44
+ if (filePath.includes("schema") || filePath.includes("migration")) return "schema";
45
+ if (filePath.endsWith(".md") || filePath.endsWith(".mdx") || filePath.endsWith(".rst")) return "documentation";
46
+ if (filePath.includes("openapi") || filePath.includes("swagger")) return "api_spec";
47
+ return "text";
48
+ }
49
+ function chunkText(content, opts = {}) {
50
+ const { chunkSize = 1e3, chunkOverlap = 200, filePath, metadata = {} } = opts;
51
+ const chunkType = detectChunkType(filePath, content);
52
+ if (chunkType === "code") {
53
+ return chunkCode(content, { chunkSize, filePath, metadata });
54
+ }
55
+ return chunkBySize(content, { chunkSize, chunkOverlap, chunkType, metadata });
56
+ }
57
+ function chunkCode(content, opts) {
58
+ const { chunkSize, filePath, metadata = {} } = opts;
59
+ const lines = content.split("\n");
60
+ const chunks = [];
61
+ const boundaries = [
62
+ /^(export\s+)?(async\s+)?function\s+/,
63
+ /^(export\s+)?(default\s+)?class\s+/,
64
+ /^(export\s+)?const\s+\w+\s*=\s*(async\s+)?\(/,
65
+ /^(export\s+)?const\s+\w+\s*=\s*\{/,
66
+ /^(export\s+)?interface\s+/,
67
+ /^(export\s+)?type\s+/,
68
+ /^(export\s+)?enum\s+/,
69
+ /^def\s+/,
70
+ // Python
71
+ /^class\s+/,
72
+ // Python/Java
73
+ /^func\s+/,
74
+ // Go
75
+ /^pub\s+(fn|struct|enum|impl)/
76
+ // Rust
77
+ ];
78
+ let currentChunk = [];
79
+ let currentStart = 0;
80
+ for (let i = 0; i < lines.length; i++) {
81
+ const trimmed = lines[i].trimStart();
82
+ const isBoundary = boundaries.some((b) => b.test(trimmed));
83
+ if (isBoundary && currentChunk.length > 0) {
84
+ const chunkContent = currentChunk.join("\n").trim();
85
+ if (chunkContent.length > 0) {
86
+ chunks.push({
87
+ content: chunkContent,
88
+ chunkType: "code",
89
+ chunkIndex: chunks.length,
90
+ metadata: {
91
+ ...metadata,
92
+ filePath,
93
+ startLine: currentStart + 1,
94
+ endLine: i
95
+ }
96
+ });
97
+ }
98
+ currentChunk = [lines[i]];
99
+ currentStart = i;
100
+ } else {
101
+ currentChunk.push(lines[i]);
102
+ }
103
+ if (currentChunk.join("\n").length > chunkSize * 1.5) {
104
+ const chunkContent = currentChunk.join("\n").trim();
105
+ if (chunkContent.length > 0) {
106
+ chunks.push({
107
+ content: chunkContent,
108
+ chunkType: "code",
109
+ chunkIndex: chunks.length,
110
+ metadata: {
111
+ ...metadata,
112
+ filePath,
113
+ startLine: currentStart + 1,
114
+ endLine: i + 1
115
+ }
116
+ });
117
+ }
118
+ currentChunk = [];
119
+ currentStart = i + 1;
120
+ }
121
+ }
122
+ if (currentChunk.length > 0) {
123
+ const chunkContent = currentChunk.join("\n").trim();
124
+ if (chunkContent.length > 0) {
125
+ chunks.push({
126
+ content: chunkContent,
127
+ chunkType: "code",
128
+ chunkIndex: chunks.length,
129
+ metadata: {
130
+ ...metadata,
131
+ filePath,
132
+ startLine: currentStart + 1,
133
+ endLine: lines.length
134
+ }
135
+ });
136
+ }
137
+ }
138
+ return chunks;
139
+ }
140
+ function chunkBySize(content, opts) {
141
+ const { chunkSize, chunkOverlap, chunkType, metadata = {} } = opts;
142
+ const chunks = [];
143
+ const paragraphs = content.split(/\n\n+/);
144
+ let current = "";
145
+ for (const para of paragraphs) {
146
+ if ((current + "\n\n" + para).length > chunkSize && current.length > 0) {
147
+ chunks.push({
148
+ content: current.trim(),
149
+ chunkType,
150
+ chunkIndex: chunks.length,
151
+ metadata
152
+ });
153
+ const words = current.split(/\s+/);
154
+ const overlapWords = words.slice(-Math.floor(chunkOverlap / 5));
155
+ current = overlapWords.join(" ") + "\n\n" + para;
156
+ } else {
157
+ current = current ? current + "\n\n" + para : para;
158
+ }
159
+ }
160
+ if (current.trim().length > 0) {
161
+ chunks.push({
162
+ content: current.trim(),
163
+ chunkType,
164
+ chunkIndex: chunks.length,
165
+ metadata
166
+ });
167
+ }
168
+ return chunks;
169
+ }
170
+
171
+ // src/engine/extractor.ts
172
+ import OpenAI from "openai";
173
+ var openai = new OpenAI({ apiKey: process.env.OPENAI_API_KEY });
174
+ async function extractEntities(projectId, content, chunkType, metadata = {}, chunkId) {
175
+ if (content.length < 100) return { entities: 0, relations: 0 };
176
+ const isCode = ["code", "function", "class"].includes(chunkType);
177
+ const prompt = isCode ? `Analyze this code and extract entities and relationships.
178
+
179
+ Entities: functions, classes, interfaces, types, modules, variables, constants, API endpoints, services.
180
+ Relations: imports, exports, calls, implements, extends, depends_on, references, part_of.
181
+
182
+ Code:
183
+ \`\`\`
184
+ ${content.slice(0, 3e3)}
185
+ \`\`\`
186
+
187
+ Respond with JSON only:
188
+ {
189
+ "entities": [{"name": "...", "type": "function|class|interface|module|constant|api_endpoint|service", "description": "one line"}],
190
+ "relations": [{"from": "name", "fromType": "type", "to": "name", "toType": "type", "relation": "imports|calls|extends|implements|depends_on|references|part_of"}]
191
+ }` : `Analyze this text and extract key entities (concepts, people, tools, services, APIs, technologies) and their relationships.
192
+
193
+ Text:
194
+ ${content.slice(0, 3e3)}
195
+
196
+ Respond with JSON only:
197
+ {
198
+ "entities": [{"name": "...", "type": "concept|tool|service|api|technology|person|organization", "description": "one line"}],
199
+ "relations": [{"from": "name", "fromType": "type", "to": "name", "toType": "type", "relation": "references|depends_on|related_to|part_of|supersedes"}]
200
+ }`;
201
+ try {
202
+ const res = await openai.chat.completions.create({
203
+ model: "gpt-4.1-nano",
204
+ messages: [{ role: "user", content: prompt }],
205
+ temperature: 0,
206
+ max_tokens: 1e3,
207
+ response_format: { type: "json_object" }
208
+ });
209
+ const text = res.choices[0]?.message?.content?.trim() || "{}";
210
+ const parsed = JSON.parse(text);
211
+ const extractedEntities = parsed.entities || [];
212
+ const extractedRelations = parsed.relations || [];
213
+ let entityCount = 0;
214
+ let relationCount = 0;
215
+ const entityMap = /* @__PURE__ */ new Map();
216
+ for (const ent of extractedEntities.slice(0, 20)) {
217
+ if (!ent.name || !ent.type) continue;
218
+ const embedding = await embedSingle(`${ent.type}: ${ent.name} - ${ent.description || ""}`);
219
+ const entity = await prisma.entity.upsert({
220
+ where: {
221
+ projectId_name_entityType: {
222
+ projectId,
223
+ name: ent.name,
224
+ entityType: ent.type
225
+ }
226
+ },
227
+ update: {
228
+ description: ent.description,
229
+ sourceChunkId: chunkId,
230
+ embedding,
231
+ updatedAt: /* @__PURE__ */ new Date()
232
+ },
233
+ create: {
234
+ projectId,
235
+ name: ent.name,
236
+ entityType: ent.type,
237
+ description: ent.description,
238
+ metadata: { ...metadata, autoExtracted: true },
239
+ sourceChunkId: chunkId,
240
+ embedding
241
+ }
242
+ });
243
+ entityMap.set(`${ent.name}:${ent.type}`, entity.id);
244
+ entityCount++;
245
+ }
246
+ for (const rel of extractedRelations.slice(0, 30)) {
247
+ if (!rel.from || !rel.to || !rel.relation) continue;
248
+ const fromId = entityMap.get(`${rel.from}:${rel.fromType}`);
249
+ const toId = entityMap.get(`${rel.to}:${rel.toType}`);
250
+ if (!fromId || !toId) continue;
251
+ const validRelations = [
252
+ "imports",
253
+ "exports",
254
+ "calls",
255
+ "implements",
256
+ "extends",
257
+ "references",
258
+ "depends_on",
259
+ "related_to",
260
+ "part_of",
261
+ "contradicts",
262
+ "supersedes"
263
+ ];
264
+ if (!validRelations.includes(rel.relation)) continue;
265
+ await prisma.entityRelation.upsert({
266
+ where: {
267
+ fromEntityId_toEntityId_relationType: {
268
+ fromEntityId: fromId,
269
+ toEntityId: toId,
270
+ relationType: rel.relation
271
+ }
272
+ },
273
+ update: {
274
+ metadata: { autoExtracted: true }
275
+ },
276
+ create: {
277
+ projectId,
278
+ fromEntityId: fromId,
279
+ toEntityId: toId,
280
+ relationType: rel.relation,
281
+ metadata: { autoExtracted: true }
282
+ }
283
+ });
284
+ relationCount++;
285
+ }
286
+ return { entities: entityCount, relations: relationCount };
287
+ } catch {
288
+ return { entities: 0, relations: 0 };
289
+ }
290
+ }
291
+
292
+ // src/engine/ingest.ts
293
+ import { createHash } from "crypto";
294
+ import PQueue from "p-queue";
295
+ var queue = new PQueue({ concurrency: 3 });
296
+ var ENABLE_AUTO_EXTRACTION = process.env.DISABLE_AUTO_EXTRACTION !== "true";
297
+ async function ingestDocument(input) {
298
+ const { sourceId, projectId, externalId, title, content, metadata = {}, filePath } = input;
299
+ const contentHash = createHash("sha256").update(content).digest("hex");
300
+ const doc = await prisma.document.upsert({
301
+ where: {
302
+ sourceId_externalId: {
303
+ sourceId,
304
+ externalId
305
+ }
306
+ },
307
+ update: {
308
+ title,
309
+ content,
310
+ metadata,
311
+ contentHash,
312
+ updatedAt: /* @__PURE__ */ new Date()
313
+ },
314
+ create: {
315
+ sourceId,
316
+ projectId,
317
+ externalId,
318
+ title,
319
+ content,
320
+ metadata,
321
+ contentHash
322
+ }
323
+ });
324
+ await prisma.chunk.deleteMany({
325
+ where: { documentId: doc.id }
326
+ });
327
+ const textChunks = chunkText(content, {
328
+ filePath: filePath || externalId,
329
+ metadata: { ...metadata, title }
330
+ });
331
+ if (textChunks.length === 0) return doc;
332
+ const batchSize = 50;
333
+ const insertedChunkIds = [];
334
+ for (let i = 0; i < textChunks.length; i += batchSize) {
335
+ const batch = textChunks.slice(i, i + batchSize);
336
+ const embeddings = await embed(batch.map((c) => c.content));
337
+ const inserted = await prisma.$transaction(
338
+ batch.map(
339
+ (chunk, j) => prisma.chunk.create({
340
+ data: {
341
+ documentId: doc.id,
342
+ projectId,
343
+ content: chunk.content,
344
+ chunkType: chunk.chunkType,
345
+ chunkIndex: chunk.chunkIndex,
346
+ metadata: chunk.metadata,
347
+ embedding: embeddings[j],
348
+ tokenCount: Math.ceil(chunk.content.length / 4)
349
+ },
350
+ select: { id: true }
351
+ })
352
+ )
353
+ );
354
+ insertedChunkIds.push(...inserted.map((c) => c.id));
355
+ }
356
+ if (ENABLE_AUTO_EXTRACTION && !input.skipEntityExtraction) {
357
+ const chunksToExtract = textChunks.filter((c) => c.content.length > 200).slice(0, 5);
358
+ for (let i = 0; i < chunksToExtract.length; i++) {
359
+ const chunk = chunksToExtract[i];
360
+ const chunkId = insertedChunkIds[textChunks.indexOf(chunk)];
361
+ extractEntities(projectId, chunk.content, chunk.chunkType, metadata, chunkId).catch(() => {
362
+ });
363
+ }
364
+ }
365
+ const docCount = await prisma.document.count({
366
+ where: { sourceId }
367
+ });
368
+ const chunkCount = await prisma.chunk.count({
369
+ where: { documentId: doc.id }
370
+ });
371
+ await prisma.source.update({
372
+ where: { id: sourceId },
373
+ data: {
374
+ documentCount: docCount,
375
+ chunkCount,
376
+ lastSyncAt: /* @__PURE__ */ new Date(),
377
+ status: "READY",
378
+ updatedAt: /* @__PURE__ */ new Date()
379
+ }
380
+ });
381
+ return doc;
382
+ }
383
+
384
+ export {
385
+ ingestDocument
386
+ };
387
+ //# sourceMappingURL=chunk-FTWUJBAH.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../src/engine/chunker.ts","../../src/engine/extractor.ts","../../src/engine/ingest.ts"],"sourcesContent":["export interface Chunk {\n content: string;\n metadata: Record<string, any>;\n chunkType: \"code\" | \"documentation\" | \"api_spec\" | \"schema\" | \"config\" | \"text\" | \"comment\";\n chunkIndex: number;\n}\n\nconst CODE_EXTENSIONS = new Set([\n \".ts\", \".tsx\", \".js\", \".jsx\", \".py\", \".java\", \".go\", \".rb\",\n \".php\", \".cs\", \".rs\", \".swift\", \".kt\", \".scala\", \".c\", \".cpp\",\n \".h\", \".hpp\", \".sol\", \".vy\",\n]);\n\nconst CONFIG_EXTENSIONS = new Set([\n \".json\", \".yaml\", \".yml\", \".toml\", \".ini\", \".env\", \".xml\",\n]);\n\nexport function detectChunkType(filePath?: string, content?: string): Chunk[\"chunkType\"] {\n if (!filePath) return \"text\";\n\n const ext = \".\" + filePath.split(\".\").pop()?.toLowerCase();\n\n if (CODE_EXTENSIONS.has(ext)) return \"code\";\n if (CONFIG_EXTENSIONS.has(ext)) return \"config\";\n if (filePath.includes(\"schema\") || filePath.includes(\"migration\")) return \"schema\";\n if (filePath.endsWith(\".md\") || filePath.endsWith(\".mdx\") || filePath.endsWith(\".rst\")) return \"documentation\";\n if (filePath.includes(\"openapi\") || filePath.includes(\"swagger\")) return \"api_spec\";\n\n return \"text\";\n}\n\nexport function chunkText(\n content: string,\n opts: {\n chunkSize?: number;\n chunkOverlap?: number;\n filePath?: string;\n metadata?: Record<string, any>;\n } = {}\n): Chunk[] {\n const { chunkSize = 1000, chunkOverlap = 200, filePath, metadata = {} } = opts;\n const chunkType = detectChunkType(filePath, content);\n\n // For code, try to split on function/class boundaries first\n if (chunkType === \"code\") {\n return chunkCode(content, { chunkSize, filePath, metadata });\n }\n\n // For everything else, split by paragraphs/sections then by size\n return chunkBySize(content, { chunkSize, chunkOverlap, chunkType, metadata });\n}\n\nfunction chunkCode(\n content: string,\n opts: { chunkSize: number; filePath?: string; metadata?: Record<string, any> }\n): Chunk[] {\n const { chunkSize, filePath, metadata = {} } = opts;\n const lines = content.split(\"\\n\");\n const chunks: Chunk[] = [];\n\n // Split on common code boundaries\n const boundaries = [\n /^(export\\s+)?(async\\s+)?function\\s+/,\n /^(export\\s+)?(default\\s+)?class\\s+/,\n /^(export\\s+)?const\\s+\\w+\\s*=\\s*(async\\s+)?\\(/,\n /^(export\\s+)?const\\s+\\w+\\s*=\\s*\\{/,\n /^(export\\s+)?interface\\s+/,\n /^(export\\s+)?type\\s+/,\n /^(export\\s+)?enum\\s+/,\n /^def\\s+/, // Python\n /^class\\s+/, // Python/Java\n /^func\\s+/, // Go\n /^pub\\s+(fn|struct|enum|impl)/, // Rust\n ];\n\n let currentChunk: string[] = [];\n let currentStart = 0;\n\n for (let i = 0; i < lines.length; i++) {\n const trimmed = lines[i].trimStart();\n const isBoundary = boundaries.some((b) => b.test(trimmed));\n\n if (isBoundary && currentChunk.length > 0) {\n const chunkContent = currentChunk.join(\"\\n\").trim();\n if (chunkContent.length > 0) {\n chunks.push({\n content: chunkContent,\n chunkType: \"code\",\n chunkIndex: chunks.length,\n metadata: {\n ...metadata,\n filePath,\n startLine: currentStart + 1,\n endLine: i,\n },\n });\n }\n currentChunk = [lines[i]];\n currentStart = i;\n } else {\n currentChunk.push(lines[i]);\n }\n\n // If chunk is too big, flush it\n if (currentChunk.join(\"\\n\").length > chunkSize * 1.5) {\n const chunkContent = currentChunk.join(\"\\n\").trim();\n if (chunkContent.length > 0) {\n chunks.push({\n content: chunkContent,\n chunkType: \"code\",\n chunkIndex: chunks.length,\n metadata: {\n ...metadata,\n filePath,\n startLine: currentStart + 1,\n endLine: i + 1,\n },\n });\n }\n currentChunk = [];\n currentStart = i + 1;\n }\n }\n\n // Flush remaining\n if (currentChunk.length > 0) {\n const chunkContent = currentChunk.join(\"\\n\").trim();\n if (chunkContent.length > 0) {\n chunks.push({\n content: chunkContent,\n chunkType: \"code\",\n chunkIndex: chunks.length,\n metadata: {\n ...metadata,\n filePath,\n startLine: currentStart + 1,\n endLine: lines.length,\n },\n });\n }\n }\n\n return chunks;\n}\n\nfunction chunkBySize(\n content: string,\n opts: {\n chunkSize: number;\n chunkOverlap: number;\n chunkType: Chunk[\"chunkType\"];\n metadata?: Record<string, any>;\n }\n): Chunk[] {\n const { chunkSize, chunkOverlap, chunkType, metadata = {} } = opts;\n const chunks: Chunk[] = [];\n\n // Split on double newlines (paragraphs) first\n const paragraphs = content.split(/\\n\\n+/);\n let current = \"\";\n\n for (const para of paragraphs) {\n if ((current + \"\\n\\n\" + para).length > chunkSize && current.length > 0) {\n chunks.push({\n content: current.trim(),\n chunkType,\n chunkIndex: chunks.length,\n metadata,\n });\n // Keep overlap\n const words = current.split(/\\s+/);\n const overlapWords = words.slice(-Math.floor(chunkOverlap / 5));\n current = overlapWords.join(\" \") + \"\\n\\n\" + para;\n } else {\n current = current ? current + \"\\n\\n\" + para : para;\n }\n }\n\n if (current.trim().length > 0) {\n chunks.push({\n content: current.trim(),\n chunkType,\n chunkIndex: chunks.length,\n metadata,\n });\n }\n\n return chunks;\n}\n","import OpenAI from \"openai\";\nimport { prisma } from \"../db/index.js\";\nimport { embedSingle } from \"./embeddings.js\";\n\nconst openai = new OpenAI({ apiKey: process.env.OPENAI_API_KEY });\n\n// ═══════════════════════════════════════════════════════════════\n// ENTITY EXTRACTION (runs during ingestion)\n// ═══════════════════════════════════════════════════════════════\n\ninterface ExtractedEntity {\n name: string;\n type: string; // function, class, module, concept, api_endpoint, config, service, etc.\n description: string;\n}\n\ninterface ExtractedRelation {\n from: string;\n fromType: string;\n to: string;\n toType: string;\n relation: string; // imports, calls, extends, depends_on, etc.\n}\n\nexport async function extractEntities(\n projectId: string,\n content: string,\n chunkType: string,\n metadata: Record<string, any> = {},\n chunkId?: string\n): Promise<{ entities: number; relations: number }> {\n // Skip small chunks\n if (content.length < 100) return { entities: 0, relations: 0 };\n\n const isCode = [\"code\", \"function\", \"class\"].includes(chunkType);\n\n const prompt = isCode\n ? `Analyze this code and extract entities and relationships.\n\nEntities: functions, classes, interfaces, types, modules, variables, constants, API endpoints, services.\nRelations: imports, exports, calls, implements, extends, depends_on, references, part_of.\n\nCode:\n\\`\\`\\`\n${content.slice(0, 3000)}\n\\`\\`\\`\n\nRespond with JSON only:\n{\n \"entities\": [{\"name\": \"...\", \"type\": \"function|class|interface|module|constant|api_endpoint|service\", \"description\": \"one line\"}],\n \"relations\": [{\"from\": \"name\", \"fromType\": \"type\", \"to\": \"name\", \"toType\": \"type\", \"relation\": \"imports|calls|extends|implements|depends_on|references|part_of\"}]\n}`\n : `Analyze this text and extract key entities (concepts, people, tools, services, APIs, technologies) and their relationships.\n\nText:\n${content.slice(0, 3000)}\n\nRespond with JSON only:\n{\n \"entities\": [{\"name\": \"...\", \"type\": \"concept|tool|service|api|technology|person|organization\", \"description\": \"one line\"}],\n \"relations\": [{\"from\": \"name\", \"fromType\": \"type\", \"to\": \"name\", \"toType\": \"type\", \"relation\": \"references|depends_on|related_to|part_of|supersedes\"}]\n}`;\n\n try {\n const res = await openai.chat.completions.create({\n model: \"gpt-4.1-nano\",\n messages: [{ role: \"user\", content: prompt }],\n temperature: 0,\n max_tokens: 1000,\n response_format: { type: \"json_object\" },\n });\n\n const text = res.choices[0]?.message?.content?.trim() || \"{}\";\n const parsed = JSON.parse(text);\n\n const extractedEntities: ExtractedEntity[] = parsed.entities || [];\n const extractedRelations: ExtractedRelation[] = parsed.relations || [];\n\n let entityCount = 0;\n let relationCount = 0;\n\n // Upsert entities\n const entityMap = new Map<string, string>(); // name:type -> id\n\n for (const ent of extractedEntities.slice(0, 20)) {\n if (!ent.name || !ent.type) continue;\n\n const embedding = await embedSingle(`${ent.type}: ${ent.name} - ${ent.description || \"\"}`);\n\n const entity = await prisma.entity.upsert({\n where: {\n projectId_name_entityType: {\n projectId,\n name: ent.name,\n entityType: ent.type,\n },\n },\n update: {\n description: ent.description,\n sourceChunkId: chunkId,\n embedding,\n updatedAt: new Date(),\n } as any,\n create: {\n projectId,\n name: ent.name,\n entityType: ent.type,\n description: ent.description,\n metadata: { ...metadata, autoExtracted: true },\n sourceChunkId: chunkId,\n embedding,\n } as any,\n });\n\n entityMap.set(`${ent.name}:${ent.type}`, entity.id);\n entityCount++;\n }\n\n // Upsert relations\n for (const rel of extractedRelations.slice(0, 30)) {\n if (!rel.from || !rel.to || !rel.relation) continue;\n\n const fromId = entityMap.get(`${rel.from}:${rel.fromType}`);\n const toId = entityMap.get(`${rel.to}:${rel.toType}`);\n\n if (!fromId || !toId) continue;\n\n // Validate relation type\n const validRelations = [\n \"imports\", \"exports\", \"calls\", \"implements\", \"extends\",\n \"references\", \"depends_on\", \"related_to\", \"part_of\",\n \"contradicts\", \"supersedes\",\n ];\n if (!validRelations.includes(rel.relation)) continue;\n\n await prisma.entityRelation.upsert({\n where: {\n fromEntityId_toEntityId_relationType: {\n fromEntityId: fromId,\n toEntityId: toId,\n relationType: rel.relation,\n },\n },\n update: {\n metadata: { autoExtracted: true },\n },\n create: {\n projectId,\n fromEntityId: fromId,\n toEntityId: toId,\n relationType: rel.relation,\n metadata: { autoExtracted: true },\n },\n });\n\n relationCount++;\n }\n\n return { entities: entityCount, relations: relationCount };\n } catch {\n return { entities: 0, relations: 0 };\n }\n}\n\n// ═══════════════════════════════════════════════════════════════\n// MEMORY EXTRACTION (runs on conversation messages)\n// ═══════════════════════════════════════════════════════════════\n\ninterface ExtractedMemory {\n content: string;\n type: \"factual\" | \"episodic\" | \"semantic\" | \"procedural\";\n importance: number;\n}\n\n/**\n * Analyzes a conversation message (or batch) and extracts facts worth remembering.\n * Call this after adding messages to a conversation.\n */\nexport async function extractMemories(\n projectId: string,\n messages: { role: string; content: string }[],\n opts?: { userId?: string; sessionId?: string; agentId?: string }\n): Promise<{ memoriesCreated: number }> {\n if (messages.length === 0) return { memoriesCreated: 0 };\n\n const conversation = messages\n .map((m) => `[${m.role}]: ${m.content}`)\n .join(\"\\n\\n\");\n\n // Skip very short conversations\n if (conversation.length < 50) return { memoriesCreated: 0 };\n\n const prompt = `Analyze this conversation and extract important facts, preferences, decisions, or knowledge worth remembering for future interactions.\n\nRules:\n- Only extract truly useful information (not greetings, acknowledgments, etc.)\n- Each memory should be a standalone fact\n- Set importance 0-1 (1 = critical preference/decision, 0.3 = minor detail)\n- Type: factual (facts/preferences), episodic (what happened), semantic (general knowledge), procedural (how to do something)\n- If nothing worth remembering, return empty array\n\nConversation:\n${conversation.slice(0, 4000)}\n\nRespond with JSON only:\n{\n \"memories\": [\n {\"content\": \"standalone fact\", \"type\": \"factual|episodic|semantic|procedural\", \"importance\": 0.5}\n ]\n}`;\n\n try {\n const res = await openai.chat.completions.create({\n model: \"gpt-4.1-nano\",\n messages: [{ role: \"user\", content: prompt }],\n temperature: 0,\n max_tokens: 500,\n response_format: { type: \"json_object\" },\n });\n\n const text = res.choices[0]?.message?.content?.trim() || \"{}\";\n const parsed = JSON.parse(text);\n const extracted: ExtractedMemory[] = parsed.memories || [];\n\n let created = 0;\n\n for (const mem of extracted.slice(0, 10)) {\n if (!mem.content || mem.content.length < 10) continue;\n\n const embedding = await embedSingle(mem.content);\n\n const validTypes = [\"factual\", \"episodic\", \"semantic\", \"procedural\"];\n const memType = validTypes.includes(mem.type) ? mem.type : \"factual\";\n const importance = Math.max(0, Math.min(1, mem.importance || 0.5));\n\n await prisma.memory.create({\n data: {\n projectId,\n content: mem.content,\n memoryType: memType,\n importance,\n userId: opts?.userId,\n sessionId: opts?.sessionId,\n agentId: opts?.agentId,\n embedding,\n metadata: { autoExtracted: true },\n } as any,\n });\n\n created++;\n }\n\n return { memoriesCreated: created };\n } catch {\n return { memoriesCreated: 0 };\n }\n}\n","import { prisma } from \"../db/index.js\";\nimport { chunkText } from \"./chunker.js\";\nimport { embed } from \"./embeddings.js\";\nimport { extractEntities } from \"./extractor.js\";\nimport { createHash } from \"crypto\";\nimport PQueue from \"p-queue\";\n\nconst queue = new PQueue({ concurrency: 3 });\n\n// Entity extraction is async and non-blocking — failures don't break ingestion\nconst ENABLE_AUTO_EXTRACTION = process.env.DISABLE_AUTO_EXTRACTION !== \"true\";\n\nexport interface IngestDocumentInput {\n sourceId: string;\n projectId: string;\n externalId: string;\n title: string;\n content: string;\n metadata?: Record<string, any>;\n filePath?: string;\n skipEntityExtraction?: boolean;\n}\n\nexport async function ingestDocument(input: IngestDocumentInput) {\n const { sourceId, projectId, externalId, title, content, metadata = {}, filePath } = input;\n\n const contentHash = createHash(\"sha256\").update(content).digest(\"hex\");\n\n // Upsert document\n const doc = await prisma.document.upsert({\n where: {\n sourceId_externalId: {\n sourceId,\n externalId,\n },\n },\n update: {\n title,\n content,\n metadata,\n contentHash,\n updatedAt: new Date(),\n },\n create: {\n sourceId,\n projectId,\n externalId,\n title,\n content,\n metadata,\n contentHash,\n },\n });\n\n // Delete old chunks for this document\n await prisma.chunk.deleteMany({\n where: { documentId: doc.id },\n });\n\n // Chunk the content\n const textChunks = chunkText(content, {\n filePath: filePath || externalId,\n metadata: { ...metadata, title },\n });\n\n if (textChunks.length === 0) return doc;\n\n // Embed in batches of 50\n const batchSize = 50;\n const insertedChunkIds: string[] = [];\n\n for (let i = 0; i < textChunks.length; i += batchSize) {\n const batch = textChunks.slice(i, i + batchSize);\n const embeddings = await embed(batch.map((c) => c.content));\n\n const inserted = await prisma.$transaction(\n batch.map((chunk, j) =>\n prisma.chunk.create({\n data: {\n documentId: doc.id,\n projectId,\n content: chunk.content,\n chunkType: chunk.chunkType,\n chunkIndex: chunk.chunkIndex,\n metadata: chunk.metadata,\n embedding: embeddings[j],\n tokenCount: Math.ceil(chunk.content.length / 4),\n } as any,\n select: { id: true },\n })\n )\n );\n\n insertedChunkIds.push(...inserted.map((c) => c.id));\n }\n\n // Auto entity extraction (fire-and-forget, non-blocking)\n if (ENABLE_AUTO_EXTRACTION && !input.skipEntityExtraction) {\n // Only extract from the first few significant chunks to save tokens\n const chunksToExtract = textChunks\n .filter((c) => c.content.length > 200)\n .slice(0, 5);\n\n for (let i = 0; i < chunksToExtract.length; i++) {\n const chunk = chunksToExtract[i];\n const chunkId = insertedChunkIds[textChunks.indexOf(chunk)];\n\n // Don't await — let it run in background\n extractEntities(projectId, chunk.content, chunk.chunkType, metadata, chunkId).catch(() => {\n // Silently ignore extraction failures\n });\n }\n }\n\n // Update source counts\n const docCount = await prisma.document.count({\n where: { sourceId },\n });\n\n const chunkCount = await prisma.chunk.count({\n where: { documentId: doc.id },\n });\n\n await prisma.source.update({\n where: { id: sourceId },\n data: {\n documentCount: docCount,\n chunkCount,\n lastSyncAt: new Date(),\n status: \"READY\",\n updatedAt: new Date(),\n },\n });\n\n return doc;\n}\n\nexport async function ingestDocuments(inputs: IngestDocumentInput[]) {\n const results = await Promise.all(\n inputs.map((input) => queue.add(() => ingestDocument(input)))\n );\n return results;\n}\n"],"mappings":";;;;;;;AAOA,IAAM,kBAAkB,oBAAI,IAAI;AAAA,EAC9B;AAAA,EAAO;AAAA,EAAQ;AAAA,EAAO;AAAA,EAAQ;AAAA,EAAO;AAAA,EAAS;AAAA,EAAO;AAAA,EACrD;AAAA,EAAQ;AAAA,EAAO;AAAA,EAAO;AAAA,EAAU;AAAA,EAAO;AAAA,EAAU;AAAA,EAAM;AAAA,EACvD;AAAA,EAAM;AAAA,EAAQ;AAAA,EAAQ;AACxB,CAAC;AAED,IAAM,oBAAoB,oBAAI,IAAI;AAAA,EAChC;AAAA,EAAS;AAAA,EAAS;AAAA,EAAQ;AAAA,EAAS;AAAA,EAAQ;AAAA,EAAQ;AACrD,CAAC;AAEM,SAAS,gBAAgB,UAAmB,SAAsC;AACvF,MAAI,CAAC,SAAU,QAAO;AAEtB,QAAM,MAAM,MAAM,SAAS,MAAM,GAAG,EAAE,IAAI,GAAG,YAAY;AAEzD,MAAI,gBAAgB,IAAI,GAAG,EAAG,QAAO;AACrC,MAAI,kBAAkB,IAAI,GAAG,EAAG,QAAO;AACvC,MAAI,SAAS,SAAS,QAAQ,KAAK,SAAS,SAAS,WAAW,EAAG,QAAO;AAC1E,MAAI,SAAS,SAAS,KAAK,KAAK,SAAS,SAAS,MAAM,KAAK,SAAS,SAAS,MAAM,EAAG,QAAO;AAC/F,MAAI,SAAS,SAAS,SAAS,KAAK,SAAS,SAAS,SAAS,EAAG,QAAO;AAEzE,SAAO;AACT;AAEO,SAAS,UACd,SACA,OAKI,CAAC,GACI;AACT,QAAM,EAAE,YAAY,KAAM,eAAe,KAAK,UAAU,WAAW,CAAC,EAAE,IAAI;AAC1E,QAAM,YAAY,gBAAgB,UAAU,OAAO;AAGnD,MAAI,cAAc,QAAQ;AACxB,WAAO,UAAU,SAAS,EAAE,WAAW,UAAU,SAAS,CAAC;AAAA,EAC7D;AAGA,SAAO,YAAY,SAAS,EAAE,WAAW,cAAc,WAAW,SAAS,CAAC;AAC9E;AAEA,SAAS,UACP,SACA,MACS;AACT,QAAM,EAAE,WAAW,UAAU,WAAW,CAAC,EAAE,IAAI;AAC/C,QAAM,QAAQ,QAAQ,MAAM,IAAI;AAChC,QAAM,SAAkB,CAAC;AAGzB,QAAM,aAAa;AAAA,IACjB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA;AAAA,IACA;AAAA;AAAA,IACA;AAAA;AAAA,IACA;AAAA;AAAA,EACF;AAEA,MAAI,eAAyB,CAAC;AAC9B,MAAI,eAAe;AAEnB,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,UAAM,UAAU,MAAM,CAAC,EAAE,UAAU;AACnC,UAAM,aAAa,WAAW,KAAK,CAAC,MAAM,EAAE,KAAK,OAAO,CAAC;AAEzD,QAAI,cAAc,aAAa,SAAS,GAAG;AACzC,YAAM,eAAe,aAAa,KAAK,IAAI,EAAE,KAAK;AAClD,UAAI,aAAa,SAAS,GAAG;AAC3B,eAAO,KAAK;AAAA,UACV,SAAS;AAAA,UACT,WAAW;AAAA,UACX,YAAY,OAAO;AAAA,UACnB,UAAU;AAAA,YACR,GAAG;AAAA,YACH;AAAA,YACA,WAAW,eAAe;AAAA,YAC1B,SAAS;AAAA,UACX;AAAA,QACF,CAAC;AAAA,MACH;AACA,qBAAe,CAAC,MAAM,CAAC,CAAC;AACxB,qBAAe;AAAA,IACjB,OAAO;AACL,mBAAa,KAAK,MAAM,CAAC,CAAC;AAAA,IAC5B;AAGA,QAAI,aAAa,KAAK,IAAI,EAAE,SAAS,YAAY,KAAK;AACpD,YAAM,eAAe,aAAa,KAAK,IAAI,EAAE,KAAK;AAClD,UAAI,aAAa,SAAS,GAAG;AAC3B,eAAO,KAAK;AAAA,UACV,SAAS;AAAA,UACT,WAAW;AAAA,UACX,YAAY,OAAO;AAAA,UACnB,UAAU;AAAA,YACR,GAAG;AAAA,YACH;AAAA,YACA,WAAW,eAAe;AAAA,YAC1B,SAAS,IAAI;AAAA,UACf;AAAA,QACF,CAAC;AAAA,MACH;AACA,qBAAe,CAAC;AAChB,qBAAe,IAAI;AAAA,IACrB;AAAA,EACF;AAGA,MAAI,aAAa,SAAS,GAAG;AAC3B,UAAM,eAAe,aAAa,KAAK,IAAI,EAAE,KAAK;AAClD,QAAI,aAAa,SAAS,GAAG;AAC3B,aAAO,KAAK;AAAA,QACV,SAAS;AAAA,QACT,WAAW;AAAA,QACX,YAAY,OAAO;AAAA,QACnB,UAAU;AAAA,UACR,GAAG;AAAA,UACH;AAAA,UACA,WAAW,eAAe;AAAA,UAC1B,SAAS,MAAM;AAAA,QACjB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,SAAO;AACT;AAEA,SAAS,YACP,SACA,MAMS;AACT,QAAM,EAAE,WAAW,cAAc,WAAW,WAAW,CAAC,EAAE,IAAI;AAC9D,QAAM,SAAkB,CAAC;AAGzB,QAAM,aAAa,QAAQ,MAAM,OAAO;AACxC,MAAI,UAAU;AAEd,aAAW,QAAQ,YAAY;AAC7B,SAAK,UAAU,SAAS,MAAM,SAAS,aAAa,QAAQ,SAAS,GAAG;AACtE,aAAO,KAAK;AAAA,QACV,SAAS,QAAQ,KAAK;AAAA,QACtB;AAAA,QACA,YAAY,OAAO;AAAA,QACnB;AAAA,MACF,CAAC;AAED,YAAM,QAAQ,QAAQ,MAAM,KAAK;AACjC,YAAM,eAAe,MAAM,MAAM,CAAC,KAAK,MAAM,eAAe,CAAC,CAAC;AAC9D,gBAAU,aAAa,KAAK,GAAG,IAAI,SAAS;AAAA,IAC9C,OAAO;AACL,gBAAU,UAAU,UAAU,SAAS,OAAO;AAAA,IAChD;AAAA,EACF;AAEA,MAAI,QAAQ,KAAK,EAAE,SAAS,GAAG;AAC7B,WAAO,KAAK;AAAA,MACV,SAAS,QAAQ,KAAK;AAAA,MACtB;AAAA,MACA,YAAY,OAAO;AAAA,MACnB;AAAA,IACF,CAAC;AAAA,EACH;AAEA,SAAO;AACT;;;AC5LA,OAAO,YAAY;AAInB,IAAM,SAAS,IAAI,OAAO,EAAE,QAAQ,QAAQ,IAAI,eAAe,CAAC;AAoBhE,eAAsB,gBACpB,WACA,SACA,WACA,WAAgC,CAAC,GACjC,SACkD;AAElD,MAAI,QAAQ,SAAS,IAAK,QAAO,EAAE,UAAU,GAAG,WAAW,EAAE;AAE7D,QAAM,SAAS,CAAC,QAAQ,YAAY,OAAO,EAAE,SAAS,SAAS;AAE/D,QAAM,SAAS,SACX;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOJ,QAAQ,MAAM,GAAG,GAAI,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KAQlB;AAAA;AAAA;AAAA,EAGJ,QAAQ,MAAM,GAAG,GAAI,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAQtB,MAAI;AACF,UAAM,MAAM,MAAM,OAAO,KAAK,YAAY,OAAO;AAAA,MAC/C,OAAO;AAAA,MACP,UAAU,CAAC,EAAE,MAAM,QAAQ,SAAS,OAAO,CAAC;AAAA,MAC5C,aAAa;AAAA,MACb,YAAY;AAAA,MACZ,iBAAiB,EAAE,MAAM,cAAc;AAAA,IACzC,CAAC;AAED,UAAM,OAAO,IAAI,QAAQ,CAAC,GAAG,SAAS,SAAS,KAAK,KAAK;AACzD,UAAM,SAAS,KAAK,MAAM,IAAI;AAE9B,UAAM,oBAAuC,OAAO,YAAY,CAAC;AACjE,UAAM,qBAA0C,OAAO,aAAa,CAAC;AAErE,QAAI,cAAc;AAClB,QAAI,gBAAgB;AAGpB,UAAM,YAAY,oBAAI,IAAoB;AAE1C,eAAW,OAAO,kBAAkB,MAAM,GAAG,EAAE,GAAG;AAChD,UAAI,CAAC,IAAI,QAAQ,CAAC,IAAI,KAAM;AAE5B,YAAM,YAAY,MAAM,YAAY,GAAG,IAAI,IAAI,KAAK,IAAI,IAAI,MAAM,IAAI,eAAe,EAAE,EAAE;AAEzF,YAAM,SAAS,MAAM,OAAO,OAAO,OAAO;AAAA,QACxC,OAAO;AAAA,UACL,2BAA2B;AAAA,YACzB;AAAA,YACA,MAAM,IAAI;AAAA,YACV,YAAY,IAAI;AAAA,UAClB;AAAA,QACF;AAAA,QACA,QAAQ;AAAA,UACN,aAAa,IAAI;AAAA,UACjB,eAAe;AAAA,UACf;AAAA,UACA,WAAW,oBAAI,KAAK;AAAA,QACtB;AAAA,QACA,QAAQ;AAAA,UACN;AAAA,UACA,MAAM,IAAI;AAAA,UACV,YAAY,IAAI;AAAA,UAChB,aAAa,IAAI;AAAA,UACjB,UAAU,EAAE,GAAG,UAAU,eAAe,KAAK;AAAA,UAC7C,eAAe;AAAA,UACf;AAAA,QACF;AAAA,MACF,CAAC;AAED,gBAAU,IAAI,GAAG,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI,OAAO,EAAE;AAClD;AAAA,IACF;AAGA,eAAW,OAAO,mBAAmB,MAAM,GAAG,EAAE,GAAG;AACjD,UAAI,CAAC,IAAI,QAAQ,CAAC,IAAI,MAAM,CAAC,IAAI,SAAU;AAE3C,YAAM,SAAS,UAAU,IAAI,GAAG,IAAI,IAAI,IAAI,IAAI,QAAQ,EAAE;AAC1D,YAAM,OAAO,UAAU,IAAI,GAAG,IAAI,EAAE,IAAI,IAAI,MAAM,EAAE;AAEpD,UAAI,CAAC,UAAU,CAAC,KAAM;AAGtB,YAAM,iBAAiB;AAAA,QACrB;AAAA,QAAW;AAAA,QAAW;AAAA,QAAS;AAAA,QAAc;AAAA,QAC7C;AAAA,QAAc;AAAA,QAAc;AAAA,QAAc;AAAA,QAC1C;AAAA,QAAe;AAAA,MACjB;AACA,UAAI,CAAC,eAAe,SAAS,IAAI,QAAQ,EAAG;AAE5C,YAAM,OAAO,eAAe,OAAO;AAAA,QACjC,OAAO;AAAA,UACL,sCAAsC;AAAA,YACpC,cAAc;AAAA,YACd,YAAY;AAAA,YACZ,cAAc,IAAI;AAAA,UACpB;AAAA,QACF;AAAA,QACA,QAAQ;AAAA,UACN,UAAU,EAAE,eAAe,KAAK;AAAA,QAClC;AAAA,QACA,QAAQ;AAAA,UACN;AAAA,UACA,cAAc;AAAA,UACd,YAAY;AAAA,UACZ,cAAc,IAAI;AAAA,UAClB,UAAU,EAAE,eAAe,KAAK;AAAA,QAClC;AAAA,MACF,CAAC;AAED;AAAA,IACF;AAEA,WAAO,EAAE,UAAU,aAAa,WAAW,cAAc;AAAA,EAC3D,QAAQ;AACN,WAAO,EAAE,UAAU,GAAG,WAAW,EAAE;AAAA,EACrC;AACF;;;AC9JA,SAAS,kBAAkB;AAC3B,OAAO,YAAY;AAEnB,IAAM,QAAQ,IAAI,OAAO,EAAE,aAAa,EAAE,CAAC;AAG3C,IAAM,yBAAyB,QAAQ,IAAI,4BAA4B;AAavE,eAAsB,eAAe,OAA4B;AAC/D,QAAM,EAAE,UAAU,WAAW,YAAY,OAAO,SAAS,WAAW,CAAC,GAAG,SAAS,IAAI;AAErF,QAAM,cAAc,WAAW,QAAQ,EAAE,OAAO,OAAO,EAAE,OAAO,KAAK;AAGrE,QAAM,MAAM,MAAM,OAAO,SAAS,OAAO;AAAA,IACvC,OAAO;AAAA,MACL,qBAAqB;AAAA,QACnB;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAAA,IACA,QAAQ;AAAA,MACN;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,WAAW,oBAAI,KAAK;AAAA,IACtB;AAAA,IACA,QAAQ;AAAA,MACN;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF,CAAC;AAGD,QAAM,OAAO,MAAM,WAAW;AAAA,IAC5B,OAAO,EAAE,YAAY,IAAI,GAAG;AAAA,EAC9B,CAAC;AAGD,QAAM,aAAa,UAAU,SAAS;AAAA,IACpC,UAAU,YAAY;AAAA,IACtB,UAAU,EAAE,GAAG,UAAU,MAAM;AAAA,EACjC,CAAC;AAED,MAAI,WAAW,WAAW,EAAG,QAAO;AAGpC,QAAM,YAAY;AAClB,QAAM,mBAA6B,CAAC;AAEpC,WAAS,IAAI,GAAG,IAAI,WAAW,QAAQ,KAAK,WAAW;AACrD,UAAM,QAAQ,WAAW,MAAM,GAAG,IAAI,SAAS;AAC/C,UAAM,aAAa,MAAM,MAAM,MAAM,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC;AAE1D,UAAM,WAAW,MAAM,OAAO;AAAA,MAC5B,MAAM;AAAA,QAAI,CAAC,OAAO,MAChB,OAAO,MAAM,OAAO;AAAA,UAClB,MAAM;AAAA,YACJ,YAAY,IAAI;AAAA,YAChB;AAAA,YACA,SAAS,MAAM;AAAA,YACf,WAAW,MAAM;AAAA,YACjB,YAAY,MAAM;AAAA,YAClB,UAAU,MAAM;AAAA,YAChB,WAAW,WAAW,CAAC;AAAA,YACvB,YAAY,KAAK,KAAK,MAAM,QAAQ,SAAS,CAAC;AAAA,UAChD;AAAA,UACA,QAAQ,EAAE,IAAI,KAAK;AAAA,QACrB,CAAC;AAAA,MACH;AAAA,IACF;AAEA,qBAAiB,KAAK,GAAG,SAAS,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC;AAAA,EACpD;AAGA,MAAI,0BAA0B,CAAC,MAAM,sBAAsB;AAEzD,UAAM,kBAAkB,WACrB,OAAO,CAAC,MAAM,EAAE,QAAQ,SAAS,GAAG,EACpC,MAAM,GAAG,CAAC;AAEb,aAAS,IAAI,GAAG,IAAI,gBAAgB,QAAQ,KAAK;AAC/C,YAAM,QAAQ,gBAAgB,CAAC;AAC/B,YAAM,UAAU,iBAAiB,WAAW,QAAQ,KAAK,CAAC;AAG1D,sBAAgB,WAAW,MAAM,SAAS,MAAM,WAAW,UAAU,OAAO,EAAE,MAAM,MAAM;AAAA,MAE1F,CAAC;AAAA,IACH;AAAA,EACF;AAGA,QAAM,WAAW,MAAM,OAAO,SAAS,MAAM;AAAA,IAC3C,OAAO,EAAE,SAAS;AAAA,EACpB,CAAC;AAED,QAAM,aAAa,MAAM,OAAO,MAAM,MAAM;AAAA,IAC1C,OAAO,EAAE,YAAY,IAAI,GAAG;AAAA,EAC9B,CAAC;AAED,QAAM,OAAO,OAAO,OAAO;AAAA,IACzB,OAAO,EAAE,IAAI,SAAS;AAAA,IACtB,MAAM;AAAA,MACJ,eAAe;AAAA,MACf;AAAA,MACA,YAAY,oBAAI,KAAK;AAAA,MACrB,QAAQ;AAAA,MACR,WAAW,oBAAI,KAAK;AAAA,IACtB;AAAA,EACF,CAAC;AAED,SAAO;AACT;","names":[]}