@stackmemoryai/stackmemory 0.3.8 → 0.3.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/core/context/recursive-context-manager.js +582 -0
- package/dist/core/context/recursive-context-manager.js.map +7 -0
- package/dist/core/execution/parallel-executor.js +254 -0
- package/dist/core/execution/parallel-executor.js.map +7 -0
- package/dist/integrations/anthropic/client.js +259 -0
- package/dist/integrations/anthropic/client.js.map +7 -0
- package/dist/integrations/claude-code/subagent-client.js +404 -0
- package/dist/integrations/claude-code/subagent-client.js.map +7 -0
- package/dist/skills/claude-skills.js +97 -0
- package/dist/skills/claude-skills.js.map +2 -2
- package/dist/skills/recursive-agent-orchestrator.js +559 -0
- package/dist/skills/recursive-agent-orchestrator.js.map +7 -0
- package/dist/skills/security-secrets-scanner.js +265 -0
- package/dist/skills/security-secrets-scanner.js.map +7 -0
- package/package.json +1 -1
|
@@ -0,0 +1,582 @@
|
|
|
1
|
+
import { logger } from "../monitoring/logger.js";
|
|
2
|
+
import * as fs from "fs";
|
|
3
|
+
import * as path from "path";
|
|
4
|
+
class RecursiveContextManager {
|
|
5
|
+
dualStackManager;
|
|
6
|
+
contextRetriever;
|
|
7
|
+
// Context cache for sharing between agents
|
|
8
|
+
sharedContextCache = /* @__PURE__ */ new Map();
|
|
9
|
+
// Agent-specific configurations
|
|
10
|
+
agentConfigs;
|
|
11
|
+
constructor(dualStackManager, contextRetriever) {
|
|
12
|
+
this.dualStackManager = dualStackManager;
|
|
13
|
+
this.contextRetriever = contextRetriever;
|
|
14
|
+
this.agentConfigs = this.initializeAgentConfigs();
|
|
15
|
+
}
|
|
16
|
+
/**
|
|
17
|
+
* Initialize agent-specific context configurations
|
|
18
|
+
*/
|
|
19
|
+
initializeAgentConfigs() {
|
|
20
|
+
const configs = /* @__PURE__ */ new Map();
|
|
21
|
+
configs.set("planning", {
|
|
22
|
+
agent: "planning",
|
|
23
|
+
maxTokens: 2e4,
|
|
24
|
+
priorityWeights: {
|
|
25
|
+
recent: 0.3,
|
|
26
|
+
relevant: 0.4,
|
|
27
|
+
dependency: 0.2,
|
|
28
|
+
error: 0.05,
|
|
29
|
+
test: 0.05
|
|
30
|
+
},
|
|
31
|
+
includeTypes: ["frame", "documentation", "config"],
|
|
32
|
+
excludeTypes: []
|
|
33
|
+
});
|
|
34
|
+
configs.set("code", {
|
|
35
|
+
agent: "code",
|
|
36
|
+
maxTokens: 3e4,
|
|
37
|
+
priorityWeights: {
|
|
38
|
+
recent: 0.2,
|
|
39
|
+
relevant: 0.5,
|
|
40
|
+
dependency: 0.2,
|
|
41
|
+
error: 0.05,
|
|
42
|
+
test: 0.05
|
|
43
|
+
},
|
|
44
|
+
includeTypes: ["code", "frame", "test"],
|
|
45
|
+
excludeTypes: ["documentation"]
|
|
46
|
+
});
|
|
47
|
+
configs.set("testing", {
|
|
48
|
+
agent: "testing",
|
|
49
|
+
maxTokens: 25e3,
|
|
50
|
+
priorityWeights: {
|
|
51
|
+
recent: 0.1,
|
|
52
|
+
relevant: 0.3,
|
|
53
|
+
dependency: 0.1,
|
|
54
|
+
error: 0.1,
|
|
55
|
+
test: 0.4
|
|
56
|
+
},
|
|
57
|
+
includeTypes: ["code", "test", "frame"],
|
|
58
|
+
excludeTypes: ["documentation", "config"]
|
|
59
|
+
});
|
|
60
|
+
configs.set("linting", {
|
|
61
|
+
agent: "linting",
|
|
62
|
+
maxTokens: 15e3,
|
|
63
|
+
priorityWeights: {
|
|
64
|
+
recent: 0.2,
|
|
65
|
+
relevant: 0.4,
|
|
66
|
+
dependency: 0.1,
|
|
67
|
+
error: 0.2,
|
|
68
|
+
test: 0.1
|
|
69
|
+
},
|
|
70
|
+
includeTypes: ["code", "config"],
|
|
71
|
+
excludeTypes: ["documentation", "test"]
|
|
72
|
+
});
|
|
73
|
+
configs.set("review", {
|
|
74
|
+
agent: "review",
|
|
75
|
+
maxTokens: 25e3,
|
|
76
|
+
priorityWeights: {
|
|
77
|
+
recent: 0.3,
|
|
78
|
+
relevant: 0.3,
|
|
79
|
+
dependency: 0.1,
|
|
80
|
+
error: 0.2,
|
|
81
|
+
test: 0.1
|
|
82
|
+
},
|
|
83
|
+
includeTypes: ["code", "test", "frame", "documentation"],
|
|
84
|
+
excludeTypes: []
|
|
85
|
+
});
|
|
86
|
+
configs.set("context", {
|
|
87
|
+
agent: "context",
|
|
88
|
+
maxTokens: 1e4,
|
|
89
|
+
priorityWeights: {
|
|
90
|
+
recent: 0.1,
|
|
91
|
+
relevant: 0.6,
|
|
92
|
+
dependency: 0.2,
|
|
93
|
+
error: 0.05,
|
|
94
|
+
test: 0.05
|
|
95
|
+
},
|
|
96
|
+
includeTypes: ["frame", "documentation"],
|
|
97
|
+
excludeTypes: []
|
|
98
|
+
});
|
|
99
|
+
configs.set("improve", {
|
|
100
|
+
agent: "improve",
|
|
101
|
+
maxTokens: 3e4,
|
|
102
|
+
priorityWeights: {
|
|
103
|
+
recent: 0.3,
|
|
104
|
+
relevant: 0.4,
|
|
105
|
+
dependency: 0.1,
|
|
106
|
+
error: 0.15,
|
|
107
|
+
test: 0.05
|
|
108
|
+
},
|
|
109
|
+
includeTypes: ["code", "test", "frame"],
|
|
110
|
+
excludeTypes: ["documentation"]
|
|
111
|
+
});
|
|
112
|
+
configs.set("publish", {
|
|
113
|
+
agent: "publish",
|
|
114
|
+
maxTokens: 15e3,
|
|
115
|
+
priorityWeights: {
|
|
116
|
+
recent: 0.4,
|
|
117
|
+
relevant: 0.2,
|
|
118
|
+
dependency: 0.1,
|
|
119
|
+
error: 0.2,
|
|
120
|
+
test: 0.1
|
|
121
|
+
},
|
|
122
|
+
includeTypes: ["config", "frame"],
|
|
123
|
+
excludeTypes: ["code", "test"]
|
|
124
|
+
});
|
|
125
|
+
return configs;
|
|
126
|
+
}
|
|
127
|
+
/**
|
|
128
|
+
* Prepare context for a specific agent type
|
|
129
|
+
*/
|
|
130
|
+
async prepareAgentContext(agentType, baseContext, maxTokens) {
|
|
131
|
+
const config = this.agentConfigs.get(agentType);
|
|
132
|
+
if (!config) {
|
|
133
|
+
throw new Error(`Unknown agent type: ${agentType}`);
|
|
134
|
+
}
|
|
135
|
+
logger.debug(`Preparing context for ${agentType} agent`, { maxTokens });
|
|
136
|
+
const chunks = await this.collectRelevantChunks(
|
|
137
|
+
baseContext,
|
|
138
|
+
config,
|
|
139
|
+
maxTokens
|
|
140
|
+
);
|
|
141
|
+
const sortedChunks = this.prioritizeChunks(chunks, config.priorityWeights);
|
|
142
|
+
const selectedChunks = this.fitChunksToTokenBudget(
|
|
143
|
+
sortedChunks,
|
|
144
|
+
maxTokens
|
|
145
|
+
);
|
|
146
|
+
const agentContext = {
|
|
147
|
+
...baseContext,
|
|
148
|
+
chunks: selectedChunks.map((c) => ({
|
|
149
|
+
type: c.type,
|
|
150
|
+
content: c.content,
|
|
151
|
+
metadata: c.metadata
|
|
152
|
+
}))
|
|
153
|
+
};
|
|
154
|
+
this.sharedContextCache.set(`${agentType}-${Date.now()}`, selectedChunks);
|
|
155
|
+
logger.debug(`Prepared context for ${agentType}`, {
|
|
156
|
+
chunksSelected: selectedChunks.length,
|
|
157
|
+
totalSize: selectedChunks.reduce((sum, c) => sum + c.metadata.size, 0)
|
|
158
|
+
});
|
|
159
|
+
return agentContext;
|
|
160
|
+
}
|
|
161
|
+
/**
|
|
162
|
+
* Chunk large codebase for processing
|
|
163
|
+
*/
|
|
164
|
+
async chunkCodebase(rootPath, strategy) {
|
|
165
|
+
const chunks = [];
|
|
166
|
+
logger.info("Chunking codebase", { rootPath, strategy: strategy.type });
|
|
167
|
+
switch (strategy.type) {
|
|
168
|
+
case "file":
|
|
169
|
+
chunks.push(...await this.chunkByFile(rootPath, strategy));
|
|
170
|
+
break;
|
|
171
|
+
case "semantic":
|
|
172
|
+
chunks.push(...await this.chunkBySemantic(rootPath, strategy));
|
|
173
|
+
break;
|
|
174
|
+
case "size":
|
|
175
|
+
chunks.push(...await this.chunkBySize(rootPath, strategy));
|
|
176
|
+
break;
|
|
177
|
+
default:
|
|
178
|
+
throw new Error(`Unknown chunking strategy: ${strategy.type}`);
|
|
179
|
+
}
|
|
180
|
+
logger.info("Codebase chunked", {
|
|
181
|
+
totalChunks: chunks.length,
|
|
182
|
+
totalSize: chunks.reduce((sum, c) => sum + c.metadata.size, 0)
|
|
183
|
+
});
|
|
184
|
+
return chunks;
|
|
185
|
+
}
|
|
186
|
+
/**
|
|
187
|
+
* Chunk by file boundaries
|
|
188
|
+
*/
|
|
189
|
+
async chunkByFile(rootPath, strategy) {
|
|
190
|
+
const chunks = [];
|
|
191
|
+
const files = await this.walkDirectory(rootPath);
|
|
192
|
+
for (const file of files) {
|
|
193
|
+
const content = await fs.promises.readFile(file, "utf-8");
|
|
194
|
+
if (content.length > strategy.maxChunkSize) {
|
|
195
|
+
const fileChunks = this.splitLargeFile(file, content, strategy);
|
|
196
|
+
chunks.push(...fileChunks);
|
|
197
|
+
} else {
|
|
198
|
+
chunks.push({
|
|
199
|
+
id: `file-${path.basename(file)}`,
|
|
200
|
+
type: "code",
|
|
201
|
+
content,
|
|
202
|
+
metadata: {
|
|
203
|
+
filePath: file,
|
|
204
|
+
language: this.detectLanguage(file),
|
|
205
|
+
size: content.length,
|
|
206
|
+
score: 0.5
|
|
207
|
+
},
|
|
208
|
+
boundaries: {
|
|
209
|
+
start: 0,
|
|
210
|
+
end: content.length
|
|
211
|
+
}
|
|
212
|
+
});
|
|
213
|
+
}
|
|
214
|
+
}
|
|
215
|
+
return chunks;
|
|
216
|
+
}
|
|
217
|
+
/**
|
|
218
|
+
* Chunk by semantic boundaries (classes, functions)
|
|
219
|
+
*/
|
|
220
|
+
async chunkBySemantic(rootPath, strategy) {
|
|
221
|
+
const chunks = [];
|
|
222
|
+
const files = await this.walkDirectory(rootPath);
|
|
223
|
+
for (const file of files) {
|
|
224
|
+
const content = await fs.promises.readFile(file, "utf-8");
|
|
225
|
+
const language = this.detectLanguage(file);
|
|
226
|
+
const semanticUnits = this.extractSemanticUnits(content, language);
|
|
227
|
+
for (const unit of semanticUnits) {
|
|
228
|
+
if (unit.content.length <= strategy.maxChunkSize) {
|
|
229
|
+
chunks.push({
|
|
230
|
+
id: `semantic-${file}-${unit.name}`,
|
|
231
|
+
type: "code",
|
|
232
|
+
content: unit.content,
|
|
233
|
+
metadata: {
|
|
234
|
+
filePath: file,
|
|
235
|
+
language,
|
|
236
|
+
size: unit.content.length,
|
|
237
|
+
score: unit.importance
|
|
238
|
+
},
|
|
239
|
+
boundaries: {
|
|
240
|
+
start: unit.start,
|
|
241
|
+
end: unit.end
|
|
242
|
+
}
|
|
243
|
+
});
|
|
244
|
+
}
|
|
245
|
+
}
|
|
246
|
+
}
|
|
247
|
+
return chunks;
|
|
248
|
+
}
|
|
249
|
+
/**
|
|
250
|
+
* Chunk by fixed size with overlap
|
|
251
|
+
*/
|
|
252
|
+
async chunkBySize(rootPath, strategy) {
|
|
253
|
+
const chunks = [];
|
|
254
|
+
const files = await this.walkDirectory(rootPath);
|
|
255
|
+
for (const file of files) {
|
|
256
|
+
const content = await fs.promises.readFile(file, "utf-8");
|
|
257
|
+
const lines = content.split("\n");
|
|
258
|
+
let currentChunk = "";
|
|
259
|
+
let startLine = 0;
|
|
260
|
+
for (let i = 0; i < lines.length; i++) {
|
|
261
|
+
currentChunk += lines[i] + "\n";
|
|
262
|
+
if (currentChunk.length >= strategy.maxChunkSize) {
|
|
263
|
+
chunks.push({
|
|
264
|
+
id: `size-${file}-${startLine}`,
|
|
265
|
+
type: "code",
|
|
266
|
+
content: currentChunk,
|
|
267
|
+
metadata: {
|
|
268
|
+
filePath: file,
|
|
269
|
+
language: this.detectLanguage(file),
|
|
270
|
+
size: currentChunk.length,
|
|
271
|
+
score: 0.5
|
|
272
|
+
},
|
|
273
|
+
boundaries: {
|
|
274
|
+
start: startLine,
|
|
275
|
+
end: i,
|
|
276
|
+
overlap: strategy.overlapSize
|
|
277
|
+
}
|
|
278
|
+
});
|
|
279
|
+
const overlapLines = Math.floor(strategy.overlapSize / 50);
|
|
280
|
+
startLine = Math.max(0, i - overlapLines);
|
|
281
|
+
currentChunk = lines.slice(startLine, i + 1).join("\n");
|
|
282
|
+
}
|
|
283
|
+
}
|
|
284
|
+
if (currentChunk.trim()) {
|
|
285
|
+
chunks.push({
|
|
286
|
+
id: `size-${file}-${startLine}`,
|
|
287
|
+
type: "code",
|
|
288
|
+
content: currentChunk,
|
|
289
|
+
metadata: {
|
|
290
|
+
filePath: file,
|
|
291
|
+
language: this.detectLanguage(file),
|
|
292
|
+
size: currentChunk.length,
|
|
293
|
+
score: 0.5
|
|
294
|
+
},
|
|
295
|
+
boundaries: {
|
|
296
|
+
start: startLine,
|
|
297
|
+
end: lines.length - 1
|
|
298
|
+
}
|
|
299
|
+
});
|
|
300
|
+
}
|
|
301
|
+
}
|
|
302
|
+
return chunks;
|
|
303
|
+
}
|
|
304
|
+
/**
|
|
305
|
+
* Collect relevant chunks for agent context
|
|
306
|
+
*/
|
|
307
|
+
async collectRelevantChunks(baseContext, config, maxTokens) {
|
|
308
|
+
const chunks = [];
|
|
309
|
+
if (config.includeTypes.includes("frame")) {
|
|
310
|
+
const recentFrames = await this.getRecentFrameChunks(10);
|
|
311
|
+
chunks.push(...recentFrames);
|
|
312
|
+
}
|
|
313
|
+
if (config.includeTypes.includes("code") && baseContext.files) {
|
|
314
|
+
const codeChunks = await this.getCodeChunks(baseContext.files);
|
|
315
|
+
chunks.push(...codeChunks);
|
|
316
|
+
}
|
|
317
|
+
if (config.includeTypes.includes("test") && baseContext.testFiles) {
|
|
318
|
+
const testChunks = await this.getTestChunks(baseContext.testFiles);
|
|
319
|
+
chunks.push(...testChunks);
|
|
320
|
+
}
|
|
321
|
+
if (baseContext.query) {
|
|
322
|
+
const searchResults = await this.contextRetriever.retrieve({
|
|
323
|
+
query: baseContext.query,
|
|
324
|
+
limit: 20
|
|
325
|
+
});
|
|
326
|
+
for (const result of searchResults) {
|
|
327
|
+
chunks.push({
|
|
328
|
+
id: `search-${result.frameId}`,
|
|
329
|
+
type: "frame",
|
|
330
|
+
content: result.content,
|
|
331
|
+
metadata: {
|
|
332
|
+
frameId: result.frameId,
|
|
333
|
+
size: result.content.length,
|
|
334
|
+
score: result.score,
|
|
335
|
+
timestamp: new Date(result.timestamp)
|
|
336
|
+
},
|
|
337
|
+
boundaries: {}
|
|
338
|
+
});
|
|
339
|
+
}
|
|
340
|
+
}
|
|
341
|
+
const cachedChunks = this.getRelevantCachedChunks(config.agent);
|
|
342
|
+
chunks.push(...cachedChunks);
|
|
343
|
+
return chunks;
|
|
344
|
+
}
|
|
345
|
+
/**
|
|
346
|
+
* Prioritize chunks based on agent weights
|
|
347
|
+
*/
|
|
348
|
+
prioritizeChunks(chunks, weights) {
|
|
349
|
+
return chunks.map((chunk) => {
|
|
350
|
+
let priority = 0;
|
|
351
|
+
if (chunk.metadata.timestamp) {
|
|
352
|
+
const age = Date.now() - chunk.metadata.timestamp.getTime();
|
|
353
|
+
const recentScore = Math.max(0, 1 - age / (24 * 60 * 60 * 1e3));
|
|
354
|
+
priority += recentScore * weights.recent;
|
|
355
|
+
}
|
|
356
|
+
priority += (chunk.metadata.score || 0.5) * weights.relevant;
|
|
357
|
+
if (chunk.type === "test") {
|
|
358
|
+
priority += weights.test;
|
|
359
|
+
}
|
|
360
|
+
if (chunk.metadata.filePath?.includes("error")) {
|
|
361
|
+
priority += weights.error;
|
|
362
|
+
}
|
|
363
|
+
return { ...chunk, priority };
|
|
364
|
+
}).sort((a, b) => b.priority - a.priority);
|
|
365
|
+
}
|
|
366
|
+
/**
|
|
367
|
+
* Fit chunks within token budget
|
|
368
|
+
*/
|
|
369
|
+
fitChunksToTokenBudget(chunks, maxTokens) {
|
|
370
|
+
const selected = [];
|
|
371
|
+
let totalTokens = 0;
|
|
372
|
+
const estimateTokens = (text) => Math.ceil(text.length / 4);
|
|
373
|
+
for (const chunk of chunks) {
|
|
374
|
+
const chunkTokens = estimateTokens(chunk.content);
|
|
375
|
+
if (totalTokens + chunkTokens <= maxTokens) {
|
|
376
|
+
selected.push(chunk);
|
|
377
|
+
totalTokens += chunkTokens;
|
|
378
|
+
} else if (selected.length === 0) {
|
|
379
|
+
const truncatedContent = chunk.content.slice(0, maxTokens * 4);
|
|
380
|
+
selected.push({
|
|
381
|
+
...chunk,
|
|
382
|
+
content: truncatedContent,
|
|
383
|
+
metadata: {
|
|
384
|
+
...chunk.metadata,
|
|
385
|
+
size: truncatedContent.length
|
|
386
|
+
}
|
|
387
|
+
});
|
|
388
|
+
break;
|
|
389
|
+
} else {
|
|
390
|
+
break;
|
|
391
|
+
}
|
|
392
|
+
}
|
|
393
|
+
return selected;
|
|
394
|
+
}
|
|
395
|
+
/**
|
|
396
|
+
* Helper methods
|
|
397
|
+
*/
|
|
398
|
+
async walkDirectory(dir) {
|
|
399
|
+
const files = [];
|
|
400
|
+
const entries = await fs.promises.readdir(dir, { withFileTypes: true });
|
|
401
|
+
for (const entry of entries) {
|
|
402
|
+
const fullPath = path.join(dir, entry.name);
|
|
403
|
+
if (entry.isDirectory()) {
|
|
404
|
+
if (!["node_modules", ".git", "dist", "build"].includes(entry.name)) {
|
|
405
|
+
files.push(...await this.walkDirectory(fullPath));
|
|
406
|
+
}
|
|
407
|
+
} else if (entry.isFile()) {
|
|
408
|
+
if (/\.(ts|tsx|js|jsx|py|java|go|rs|cpp|c|h)$/.test(entry.name)) {
|
|
409
|
+
files.push(fullPath);
|
|
410
|
+
}
|
|
411
|
+
}
|
|
412
|
+
}
|
|
413
|
+
return files;
|
|
414
|
+
}
|
|
415
|
+
detectLanguage(filePath) {
|
|
416
|
+
const ext = path.extname(filePath);
|
|
417
|
+
const langMap = {
|
|
418
|
+
".ts": "typescript",
|
|
419
|
+
".tsx": "typescript",
|
|
420
|
+
".js": "javascript",
|
|
421
|
+
".jsx": "javascript",
|
|
422
|
+
".py": "python",
|
|
423
|
+
".java": "java",
|
|
424
|
+
".go": "go",
|
|
425
|
+
".rs": "rust",
|
|
426
|
+
".cpp": "cpp",
|
|
427
|
+
".c": "c",
|
|
428
|
+
".h": "c"
|
|
429
|
+
};
|
|
430
|
+
return langMap[ext] || "unknown";
|
|
431
|
+
}
|
|
432
|
+
splitLargeFile(filePath, content, strategy) {
|
|
433
|
+
const chunks = [];
|
|
434
|
+
const lines = content.split("\n");
|
|
435
|
+
const linesPerChunk = Math.ceil(strategy.maxChunkSize / 50);
|
|
436
|
+
for (let i = 0; i < lines.length; i += linesPerChunk) {
|
|
437
|
+
const chunkLines = lines.slice(i, i + linesPerChunk);
|
|
438
|
+
const chunkContent = chunkLines.join("\n");
|
|
439
|
+
chunks.push({
|
|
440
|
+
id: `file-${path.basename(filePath)}-part-${i}`,
|
|
441
|
+
type: "code",
|
|
442
|
+
content: chunkContent,
|
|
443
|
+
metadata: {
|
|
444
|
+
filePath,
|
|
445
|
+
language: this.detectLanguage(filePath),
|
|
446
|
+
size: chunkContent.length,
|
|
447
|
+
score: 0.5
|
|
448
|
+
},
|
|
449
|
+
boundaries: {
|
|
450
|
+
start: i,
|
|
451
|
+
end: Math.min(i + linesPerChunk, lines.length),
|
|
452
|
+
overlap: strategy.overlapSize
|
|
453
|
+
}
|
|
454
|
+
});
|
|
455
|
+
}
|
|
456
|
+
return chunks;
|
|
457
|
+
}
|
|
458
|
+
extractSemanticUnits(content, language) {
|
|
459
|
+
const units = [];
|
|
460
|
+
if (language === "typescript" || language === "javascript") {
|
|
461
|
+
const classRegex = /class\s+(\w+)[^{]*\{[^}]+\}/g;
|
|
462
|
+
let match;
|
|
463
|
+
while ((match = classRegex.exec(content)) !== null) {
|
|
464
|
+
units.push({
|
|
465
|
+
name: match[1],
|
|
466
|
+
content: match[0],
|
|
467
|
+
start: match.index,
|
|
468
|
+
end: match.index + match[0].length,
|
|
469
|
+
importance: 0.8
|
|
470
|
+
});
|
|
471
|
+
}
|
|
472
|
+
const funcRegex = /(?:function|const|let)\s+(\w+)\s*=?\s*(?:\([^)]*\)|\w+)\s*(?:=>|{)[^}]+}/g;
|
|
473
|
+
while ((match = funcRegex.exec(content)) !== null) {
|
|
474
|
+
units.push({
|
|
475
|
+
name: match[1],
|
|
476
|
+
content: match[0],
|
|
477
|
+
start: match.index,
|
|
478
|
+
end: match.index + match[0].length,
|
|
479
|
+
importance: 0.6
|
|
480
|
+
});
|
|
481
|
+
}
|
|
482
|
+
}
|
|
483
|
+
return units;
|
|
484
|
+
}
|
|
485
|
+
async getRecentFrameChunks(limit) {
|
|
486
|
+
const activeStack = this.dualStackManager.getActiveStack();
|
|
487
|
+
const frames = await activeStack.getAllFrames();
|
|
488
|
+
return frames.slice(-limit).map((frame) => ({
|
|
489
|
+
id: `frame-${frame.frameId}`,
|
|
490
|
+
type: "frame",
|
|
491
|
+
content: JSON.stringify(frame, null, 2),
|
|
492
|
+
metadata: {
|
|
493
|
+
frameId: frame.frameId,
|
|
494
|
+
size: JSON.stringify(frame).length,
|
|
495
|
+
score: 0.7,
|
|
496
|
+
timestamp: new Date(frame.timestamp)
|
|
497
|
+
},
|
|
498
|
+
boundaries: {}
|
|
499
|
+
}));
|
|
500
|
+
}
|
|
501
|
+
async getCodeChunks(files) {
|
|
502
|
+
const chunks = [];
|
|
503
|
+
for (const file of files) {
|
|
504
|
+
if (fs.existsSync(file)) {
|
|
505
|
+
const content = await fs.promises.readFile(file, "utf-8");
|
|
506
|
+
chunks.push({
|
|
507
|
+
id: `code-${path.basename(file)}`,
|
|
508
|
+
type: "code",
|
|
509
|
+
content,
|
|
510
|
+
metadata: {
|
|
511
|
+
filePath: file,
|
|
512
|
+
language: this.detectLanguage(file),
|
|
513
|
+
size: content.length,
|
|
514
|
+
score: 0.8
|
|
515
|
+
},
|
|
516
|
+
boundaries: {}
|
|
517
|
+
});
|
|
518
|
+
}
|
|
519
|
+
}
|
|
520
|
+
return chunks;
|
|
521
|
+
}
|
|
522
|
+
async getTestChunks(testFiles) {
|
|
523
|
+
const chunks = [];
|
|
524
|
+
for (const file of testFiles) {
|
|
525
|
+
if (fs.existsSync(file)) {
|
|
526
|
+
const content = await fs.promises.readFile(file, "utf-8");
|
|
527
|
+
chunks.push({
|
|
528
|
+
id: `test-${path.basename(file)}`,
|
|
529
|
+
type: "test",
|
|
530
|
+
content,
|
|
531
|
+
metadata: {
|
|
532
|
+
filePath: file,
|
|
533
|
+
language: this.detectLanguage(file),
|
|
534
|
+
size: content.length,
|
|
535
|
+
score: 0.7
|
|
536
|
+
},
|
|
537
|
+
boundaries: {}
|
|
538
|
+
});
|
|
539
|
+
}
|
|
540
|
+
}
|
|
541
|
+
return chunks;
|
|
542
|
+
}
|
|
543
|
+
getRelevantCachedChunks(agentType) {
|
|
544
|
+
const relevantChunks = [];
|
|
545
|
+
for (const [key, chunks] of this.sharedContextCache.entries()) {
|
|
546
|
+
const timestamp = parseInt(key.split("-").pop() || "0");
|
|
547
|
+
if (Date.now() - timestamp > 5 * 60 * 1e3) {
|
|
548
|
+
continue;
|
|
549
|
+
}
|
|
550
|
+
if (agentType === "review" || agentType === "improve") {
|
|
551
|
+
relevantChunks.push(...chunks.filter((c) => c.type === "code"));
|
|
552
|
+
}
|
|
553
|
+
}
|
|
554
|
+
return relevantChunks;
|
|
555
|
+
}
|
|
556
|
+
/**
|
|
557
|
+
* Clear context cache
|
|
558
|
+
*/
|
|
559
|
+
clearCache() {
|
|
560
|
+
this.sharedContextCache.clear();
|
|
561
|
+
logger.debug("Context cache cleared");
|
|
562
|
+
}
|
|
563
|
+
/**
|
|
564
|
+
* Get cache statistics
|
|
565
|
+
*/
|
|
566
|
+
getCacheStats() {
|
|
567
|
+
const stats = {
|
|
568
|
+
cacheSize: this.sharedContextCache.size,
|
|
569
|
+
totalChunks: 0,
|
|
570
|
+
totalBytes: 0
|
|
571
|
+
};
|
|
572
|
+
for (const chunks of this.sharedContextCache.values()) {
|
|
573
|
+
stats.totalChunks += chunks.length;
|
|
574
|
+
stats.totalBytes += chunks.reduce((sum, c) => sum + c.metadata.size, 0);
|
|
575
|
+
}
|
|
576
|
+
return stats;
|
|
577
|
+
}
|
|
578
|
+
}
|
|
579
|
+
export {
|
|
580
|
+
RecursiveContextManager
|
|
581
|
+
};
|
|
582
|
+
//# sourceMappingURL=recursive-context-manager.js.map
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
{
|
|
2
|
+
"version": 3,
|
|
3
|
+
"sources": ["../../../src/core/context/recursive-context-manager.ts"],
|
|
4
|
+
"sourcesContent": ["/**\n * Recursive Context Manager for RLM\n * \n * Handles context chunking, decomposition, and distribution\n * for recursive agent execution\n */\n\nimport { DualStackManager } from './dual-stack-manager.js';\nimport { ContextRetriever } from '../retrieval/context-retriever.js';\nimport { logger } from '../monitoring/logger.js';\nimport * as fs from 'fs';\nimport * as path from 'path';\nimport type { SubagentType } from '../../skills/recursive-agent-orchestrator.js';\n\nexport interface ContextChunk {\n id: string;\n type: 'code' | 'frame' | 'documentation' | 'test' | 'config';\n content: string;\n metadata: {\n filePath?: string;\n frameId?: string;\n language?: string;\n size: number;\n score: number;\n timestamp?: Date;\n };\n boundaries: {\n start?: number;\n end?: number;\n overlap?: number;\n };\n}\n\nexport interface ChunkingStrategy {\n type: 'file' | 'semantic' | 'size' | 'time';\n maxChunkSize: number;\n overlapSize: number;\n priorityThreshold: number;\n}\n\nexport interface AgentContextConfig {\n agent: SubagentType;\n maxTokens: number;\n priorityWeights: {\n recent: number;\n relevant: number;\n dependency: number;\n error: number;\n test: number;\n };\n includeTypes: string[];\n excludeTypes: string[];\n}\n\n/**\n * Manages context for recursive agent execution\n */\nexport class RecursiveContextManager {\n private dualStackManager: DualStackManager;\n private contextRetriever: ContextRetriever;\n \n // Context cache for sharing between agents\n private sharedContextCache: Map<string, ContextChunk[]> = new Map();\n \n // Agent-specific configurations\n private agentConfigs: Map<SubagentType, AgentContextConfig>;\n \n constructor(\n dualStackManager: DualStackManager,\n contextRetriever: ContextRetriever\n ) {\n this.dualStackManager = dualStackManager;\n this.contextRetriever = contextRetriever;\n this.agentConfigs = this.initializeAgentConfigs();\n }\n \n /**\n * Initialize agent-specific context configurations\n */\n private initializeAgentConfigs(): Map<SubagentType, AgentContextConfig> {\n const configs = new Map<SubagentType, AgentContextConfig>();\n \n // Planning agent needs broad context\n configs.set('planning', {\n agent: 'planning',\n maxTokens: 20000,\n priorityWeights: {\n recent: 0.3,\n relevant: 0.4,\n dependency: 0.2,\n error: 0.05,\n test: 0.05,\n },\n includeTypes: ['frame', 'documentation', 'config'],\n excludeTypes: [],\n });\n \n // Code agent needs implementation context\n configs.set('code', {\n agent: 'code',\n maxTokens: 30000,\n priorityWeights: {\n recent: 0.2,\n relevant: 0.5,\n dependency: 0.2,\n error: 0.05,\n test: 0.05,\n },\n includeTypes: ['code', 'frame', 'test'],\n excludeTypes: ['documentation'],\n });\n \n // Testing agent needs code and existing tests\n configs.set('testing', {\n agent: 'testing',\n maxTokens: 25000,\n priorityWeights: {\n recent: 0.1,\n relevant: 0.3,\n dependency: 0.1,\n error: 0.1,\n test: 0.4,\n },\n includeTypes: ['code', 'test', 'frame'],\n excludeTypes: ['documentation', 'config'],\n });\n \n // Linting agent needs code and config\n configs.set('linting', {\n agent: 'linting',\n maxTokens: 15000,\n priorityWeights: {\n recent: 0.2,\n relevant: 0.4,\n dependency: 0.1,\n error: 0.2,\n test: 0.1,\n },\n includeTypes: ['code', 'config'],\n excludeTypes: ['documentation', 'test'],\n });\n \n // Review agent needs comprehensive context\n configs.set('review', {\n agent: 'review',\n maxTokens: 25000,\n priorityWeights: {\n recent: 0.3,\n relevant: 0.3,\n dependency: 0.1,\n error: 0.2,\n test: 0.1,\n },\n includeTypes: ['code', 'test', 'frame', 'documentation'],\n excludeTypes: [],\n });\n \n // Context agent for searching\n configs.set('context', {\n agent: 'context',\n maxTokens: 10000,\n priorityWeights: {\n recent: 0.1,\n relevant: 0.6,\n dependency: 0.2,\n error: 0.05,\n test: 0.05,\n },\n includeTypes: ['frame', 'documentation'],\n excludeTypes: [],\n });\n \n // Improvement agent needs review context\n configs.set('improve', {\n agent: 'improve',\n maxTokens: 30000,\n priorityWeights: {\n recent: 0.3,\n relevant: 0.4,\n dependency: 0.1,\n error: 0.15,\n test: 0.05,\n },\n includeTypes: ['code', 'test', 'frame'],\n excludeTypes: ['documentation'],\n });\n \n // Publish agent needs build/config context\n configs.set('publish', {\n agent: 'publish',\n maxTokens: 15000,\n priorityWeights: {\n recent: 0.4,\n relevant: 0.2,\n dependency: 0.1,\n error: 0.2,\n test: 0.1,\n },\n includeTypes: ['config', 'frame'],\n excludeTypes: ['code', 'test'],\n });\n \n return configs;\n }\n \n /**\n * Prepare context for a specific agent type\n */\n async prepareAgentContext(\n agentType: SubagentType,\n baseContext: Record<string, any>,\n maxTokens: number\n ): Promise<Record<string, any>> {\n const config = this.agentConfigs.get(agentType);\n if (!config) {\n throw new Error(`Unknown agent type: ${agentType}`);\n }\n \n logger.debug(`Preparing context for ${agentType} agent`, { maxTokens });\n \n // Collect relevant chunks\n const chunks = await this.collectRelevantChunks(\n baseContext,\n config,\n maxTokens\n );\n \n // Sort by priority\n const sortedChunks = this.prioritizeChunks(chunks, config.priorityWeights);\n \n // Fit within token budget\n const selectedChunks = this.fitChunksToTokenBudget(\n sortedChunks,\n maxTokens\n );\n \n // Build agent context\n const agentContext: Record<string, any> = {\n ...baseContext,\n chunks: selectedChunks.map(c => ({\n type: c.type,\n content: c.content,\n metadata: c.metadata,\n })),\n };\n \n // Cache for potential reuse\n this.sharedContextCache.set(`${agentType}-${Date.now()}`, selectedChunks);\n \n logger.debug(`Prepared context for ${agentType}`, {\n chunksSelected: selectedChunks.length,\n totalSize: selectedChunks.reduce((sum, c) => sum + c.metadata.size, 0),\n });\n \n return agentContext;\n }\n \n /**\n * Chunk large codebase for processing\n */\n async chunkCodebase(\n rootPath: string,\n strategy: ChunkingStrategy\n ): Promise<ContextChunk[]> {\n const chunks: ContextChunk[] = [];\n \n logger.info('Chunking codebase', { rootPath, strategy: strategy.type });\n \n switch (strategy.type) {\n case 'file':\n chunks.push(...await this.chunkByFile(rootPath, strategy));\n break;\n \n case 'semantic':\n chunks.push(...await this.chunkBySemantic(rootPath, strategy));\n break;\n \n case 'size':\n chunks.push(...await this.chunkBySize(rootPath, strategy));\n break;\n \n default:\n throw new Error(`Unknown chunking strategy: ${strategy.type}`);\n }\n \n logger.info('Codebase chunked', {\n totalChunks: chunks.length,\n totalSize: chunks.reduce((sum, c) => sum + c.metadata.size, 0),\n });\n \n return chunks;\n }\n \n /**\n * Chunk by file boundaries\n */\n private async chunkByFile(\n rootPath: string,\n strategy: ChunkingStrategy\n ): Promise<ContextChunk[]> {\n const chunks: ContextChunk[] = [];\n const files = await this.walkDirectory(rootPath);\n \n for (const file of files) {\n const content = await fs.promises.readFile(file, 'utf-8');\n \n // Skip files larger than max chunk size\n if (content.length > strategy.maxChunkSize) {\n // Split large files\n const fileChunks = this.splitLargeFile(file, content, strategy);\n chunks.push(...fileChunks);\n } else {\n chunks.push({\n id: `file-${path.basename(file)}`,\n type: 'code',\n content,\n metadata: {\n filePath: file,\n language: this.detectLanguage(file),\n size: content.length,\n score: 0.5,\n },\n boundaries: {\n start: 0,\n end: content.length,\n },\n });\n }\n }\n \n return chunks;\n }\n \n /**\n * Chunk by semantic boundaries (classes, functions)\n */\n private async chunkBySemantic(\n rootPath: string,\n strategy: ChunkingStrategy\n ): Promise<ContextChunk[]> {\n const chunks: ContextChunk[] = [];\n const files = await this.walkDirectory(rootPath);\n \n for (const file of files) {\n const content = await fs.promises.readFile(file, 'utf-8');\n const language = this.detectLanguage(file);\n \n // Extract semantic units based on language\n const semanticUnits = this.extractSemanticUnits(content, language);\n \n for (const unit of semanticUnits) {\n if (unit.content.length <= strategy.maxChunkSize) {\n chunks.push({\n id: `semantic-${file}-${unit.name}`,\n type: 'code',\n content: unit.content,\n metadata: {\n filePath: file,\n language,\n size: unit.content.length,\n score: unit.importance,\n },\n boundaries: {\n start: unit.start,\n end: unit.end,\n },\n });\n }\n }\n }\n \n return chunks;\n }\n \n /**\n * Chunk by fixed size with overlap\n */\n private async chunkBySize(\n rootPath: string,\n strategy: ChunkingStrategy\n ): Promise<ContextChunk[]> {\n const chunks: ContextChunk[] = [];\n const files = await this.walkDirectory(rootPath);\n \n for (const file of files) {\n const content = await fs.promises.readFile(file, 'utf-8');\n const lines = content.split('\\n');\n \n let currentChunk = '';\n let startLine = 0;\n \n for (let i = 0; i < lines.length; i++) {\n currentChunk += lines[i] + '\\n';\n \n if (currentChunk.length >= strategy.maxChunkSize) {\n chunks.push({\n id: `size-${file}-${startLine}`,\n type: 'code',\n content: currentChunk,\n metadata: {\n filePath: file,\n language: this.detectLanguage(file),\n size: currentChunk.length,\n score: 0.5,\n },\n boundaries: {\n start: startLine,\n end: i,\n overlap: strategy.overlapSize,\n },\n });\n \n // Move window with overlap\n const overlapLines = Math.floor(strategy.overlapSize / 50); // Estimate lines\n startLine = Math.max(0, i - overlapLines);\n currentChunk = lines.slice(startLine, i + 1).join('\\n');\n }\n }\n \n // Add remaining chunk\n if (currentChunk.trim()) {\n chunks.push({\n id: `size-${file}-${startLine}`,\n type: 'code',\n content: currentChunk,\n metadata: {\n filePath: file,\n language: this.detectLanguage(file),\n size: currentChunk.length,\n score: 0.5,\n },\n boundaries: {\n start: startLine,\n end: lines.length - 1,\n },\n });\n }\n }\n \n return chunks;\n }\n \n /**\n * Collect relevant chunks for agent context\n */\n private async collectRelevantChunks(\n baseContext: Record<string, any>,\n config: AgentContextConfig,\n maxTokens: number\n ): Promise<ContextChunk[]> {\n const chunks: ContextChunk[] = [];\n \n // Get recent frames\n if (config.includeTypes.includes('frame')) {\n const recentFrames = await this.getRecentFrameChunks(10);\n chunks.push(...recentFrames);\n }\n \n // Get relevant code files\n if (config.includeTypes.includes('code') && baseContext.files) {\n const codeChunks = await this.getCodeChunks(baseContext.files);\n chunks.push(...codeChunks);\n }\n \n // Get test files\n if (config.includeTypes.includes('test') && baseContext.testFiles) {\n const testChunks = await this.getTestChunks(baseContext.testFiles);\n chunks.push(...testChunks);\n }\n \n // Search for relevant context\n if (baseContext.query) {\n const searchResults = await this.contextRetriever.retrieve({\n query: baseContext.query,\n limit: 20,\n });\n \n for (const result of searchResults) {\n chunks.push({\n id: `search-${result.frameId}`,\n type: 'frame',\n content: result.content,\n metadata: {\n frameId: result.frameId,\n size: result.content.length,\n score: result.score,\n timestamp: new Date(result.timestamp),\n },\n boundaries: {},\n });\n }\n }\n \n // Check shared cache for relevant chunks\n const cachedChunks = this.getRelevantCachedChunks(config.agent);\n chunks.push(...cachedChunks);\n \n return chunks;\n }\n \n /**\n * Prioritize chunks based on agent weights\n */\n private prioritizeChunks(\n chunks: ContextChunk[],\n weights: AgentContextConfig['priorityWeights']\n ): ContextChunk[] {\n return chunks\n .map(chunk => {\n let priority = 0;\n \n // Recent weight\n if (chunk.metadata.timestamp) {\n const age = Date.now() - chunk.metadata.timestamp.getTime();\n const recentScore = Math.max(0, 1 - age / (24 * 60 * 60 * 1000)); // Decay over 24h\n priority += recentScore * weights.recent;\n }\n \n // Relevance weight\n priority += (chunk.metadata.score || 0.5) * weights.relevant;\n \n // Type-specific weights\n if (chunk.type === 'test') {\n priority += weights.test;\n }\n if (chunk.metadata.filePath?.includes('error')) {\n priority += weights.error;\n }\n \n return { ...chunk, priority };\n })\n .sort((a, b) => (b as any).priority - (a as any).priority);\n }\n \n /**\n * Fit chunks within token budget\n */\n private fitChunksToTokenBudget(\n chunks: ContextChunk[],\n maxTokens: number\n ): ContextChunk[] {\n const selected: ContextChunk[] = [];\n let totalTokens = 0;\n \n // Rough token estimation (1 token \u2248 4 chars)\n const estimateTokens = (text: string) => Math.ceil(text.length / 4);\n \n for (const chunk of chunks) {\n const chunkTokens = estimateTokens(chunk.content);\n \n if (totalTokens + chunkTokens <= maxTokens) {\n selected.push(chunk);\n totalTokens += chunkTokens;\n } else if (selected.length === 0) {\n // Always include at least one chunk, truncated if necessary\n const truncatedContent = chunk.content.slice(0, maxTokens * 4);\n selected.push({\n ...chunk,\n content: truncatedContent,\n metadata: {\n ...chunk.metadata,\n size: truncatedContent.length,\n },\n });\n break;\n } else {\n break;\n }\n }\n \n return selected;\n }\n \n /**\n * Helper methods\n */\n \n private async walkDirectory(dir: string): Promise<string[]> {\n const files: string[] = [];\n const entries = await fs.promises.readdir(dir, { withFileTypes: true });\n \n for (const entry of entries) {\n const fullPath = path.join(dir, entry.name);\n \n if (entry.isDirectory()) {\n // Skip node_modules, .git, etc\n if (!['node_modules', '.git', 'dist', 'build'].includes(entry.name)) {\n files.push(...await this.walkDirectory(fullPath));\n }\n } else if (entry.isFile()) {\n // Include code files\n if (/\\.(ts|tsx|js|jsx|py|java|go|rs|cpp|c|h)$/.test(entry.name)) {\n files.push(fullPath);\n }\n }\n }\n \n return files;\n }\n \n private detectLanguage(filePath: string): string {\n const ext = path.extname(filePath);\n const langMap: Record<string, string> = {\n '.ts': 'typescript',\n '.tsx': 'typescript',\n '.js': 'javascript',\n '.jsx': 'javascript',\n '.py': 'python',\n '.java': 'java',\n '.go': 'go',\n '.rs': 'rust',\n '.cpp': 'cpp',\n '.c': 'c',\n '.h': 'c',\n };\n return langMap[ext] || 'unknown';\n }\n \n private splitLargeFile(\n filePath: string,\n content: string,\n strategy: ChunkingStrategy\n ): ContextChunk[] {\n const chunks: ContextChunk[] = [];\n const lines = content.split('\\n');\n const linesPerChunk = Math.ceil(strategy.maxChunkSize / 50); // Estimate\n \n for (let i = 0; i < lines.length; i += linesPerChunk) {\n const chunkLines = lines.slice(i, i + linesPerChunk);\n const chunkContent = chunkLines.join('\\n');\n \n chunks.push({\n id: `file-${path.basename(filePath)}-part-${i}`,\n type: 'code',\n content: chunkContent,\n metadata: {\n filePath,\n language: this.detectLanguage(filePath),\n size: chunkContent.length,\n score: 0.5,\n },\n boundaries: {\n start: i,\n end: Math.min(i + linesPerChunk, lines.length),\n overlap: strategy.overlapSize,\n },\n });\n }\n \n return chunks;\n }\n \n private extractSemanticUnits(\n content: string,\n language: string\n ): Array<{\n name: string;\n content: string;\n start: number;\n end: number;\n importance: number;\n }> {\n const units: Array<{\n name: string;\n content: string;\n start: number;\n end: number;\n importance: number;\n }> = [];\n \n // Simple regex-based extraction (would need proper AST parsing for production)\n if (language === 'typescript' || language === 'javascript') {\n // Extract classes\n const classRegex = /class\\s+(\\w+)[^{]*\\{[^}]+\\}/g;\n let match;\n while ((match = classRegex.exec(content)) !== null) {\n units.push({\n name: match[1],\n content: match[0],\n start: match.index,\n end: match.index + match[0].length,\n importance: 0.8,\n });\n }\n \n // Extract functions\n const funcRegex = /(?:function|const|let)\\s+(\\w+)\\s*=?\\s*(?:\\([^)]*\\)|\\w+)\\s*(?:=>|{)[^}]+}/g;\n while ((match = funcRegex.exec(content)) !== null) {\n units.push({\n name: match[1],\n content: match[0],\n start: match.index,\n end: match.index + match[0].length,\n importance: 0.6,\n });\n }\n }\n \n return units;\n }\n \n private async getRecentFrameChunks(limit: number): Promise<ContextChunk[]> {\n const activeStack = this.dualStackManager.getActiveStack();\n const frames = await activeStack.getAllFrames();\n \n return frames.slice(-limit).map(frame => ({\n id: `frame-${frame.frameId}`,\n type: 'frame',\n content: JSON.stringify(frame, null, 2),\n metadata: {\n frameId: frame.frameId,\n size: JSON.stringify(frame).length,\n score: 0.7,\n timestamp: new Date(frame.timestamp),\n },\n boundaries: {},\n }));\n }\n \n private async getCodeChunks(files: string[]): Promise<ContextChunk[]> {\n const chunks: ContextChunk[] = [];\n \n for (const file of files) {\n if (fs.existsSync(file)) {\n const content = await fs.promises.readFile(file, 'utf-8');\n chunks.push({\n id: `code-${path.basename(file)}`,\n type: 'code',\n content,\n metadata: {\n filePath: file,\n language: this.detectLanguage(file),\n size: content.length,\n score: 0.8,\n },\n boundaries: {},\n });\n }\n }\n \n return chunks;\n }\n \n private async getTestChunks(testFiles: string[]): Promise<ContextChunk[]> {\n const chunks: ContextChunk[] = [];\n \n for (const file of testFiles) {\n if (fs.existsSync(file)) {\n const content = await fs.promises.readFile(file, 'utf-8');\n chunks.push({\n id: `test-${path.basename(file)}`,\n type: 'test',\n content,\n metadata: {\n filePath: file,\n language: this.detectLanguage(file),\n size: content.length,\n score: 0.7,\n },\n boundaries: {},\n });\n }\n }\n \n return chunks;\n }\n \n private getRelevantCachedChunks(agentType: SubagentType): ContextChunk[] {\n const relevantChunks: ContextChunk[] = [];\n \n // Get chunks from cache that might be relevant\n for (const [key, chunks] of this.sharedContextCache.entries()) {\n // Skip very old cache entries\n const timestamp = parseInt(key.split('-').pop() || '0');\n if (Date.now() - timestamp > 5 * 60 * 1000) { // 5 minutes\n continue;\n }\n \n // Add relevant chunks based on agent type\n if (agentType === 'review' || agentType === 'improve') {\n relevantChunks.push(...chunks.filter(c => c.type === 'code'));\n }\n }\n \n return relevantChunks;\n }\n \n /**\n * Clear context cache\n */\n clearCache(): void {\n this.sharedContextCache.clear();\n logger.debug('Context cache cleared');\n }\n \n /**\n * Get cache statistics\n */\n getCacheStats() {\n const stats = {\n cacheSize: this.sharedContextCache.size,\n totalChunks: 0,\n totalBytes: 0,\n };\n \n for (const chunks of this.sharedContextCache.values()) {\n stats.totalChunks += chunks.length;\n stats.totalBytes += chunks.reduce((sum, c) => sum + c.metadata.size, 0);\n }\n \n return stats;\n }\n}"],
|
|
5
|
+
"mappings": "AASA,SAAS,cAAc;AACvB,YAAY,QAAQ;AACpB,YAAY,UAAU;AA8Cf,MAAM,wBAAwB;AAAA,EAC3B;AAAA,EACA;AAAA;AAAA,EAGA,qBAAkD,oBAAI,IAAI;AAAA;AAAA,EAG1D;AAAA,EAER,YACE,kBACA,kBACA;AACA,SAAK,mBAAmB;AACxB,SAAK,mBAAmB;AACxB,SAAK,eAAe,KAAK,uBAAuB;AAAA,EAClD;AAAA;AAAA;AAAA;AAAA,EAKQ,yBAAgE;AACtE,UAAM,UAAU,oBAAI,IAAsC;AAG1D,YAAQ,IAAI,YAAY;AAAA,MACtB,OAAO;AAAA,MACP,WAAW;AAAA,MACX,iBAAiB;AAAA,QACf,QAAQ;AAAA,QACR,UAAU;AAAA,QACV,YAAY;AAAA,QACZ,OAAO;AAAA,QACP,MAAM;AAAA,MACR;AAAA,MACA,cAAc,CAAC,SAAS,iBAAiB,QAAQ;AAAA,MACjD,cAAc,CAAC;AAAA,IACjB,CAAC;AAGD,YAAQ,IAAI,QAAQ;AAAA,MAClB,OAAO;AAAA,MACP,WAAW;AAAA,MACX,iBAAiB;AAAA,QACf,QAAQ;AAAA,QACR,UAAU;AAAA,QACV,YAAY;AAAA,QACZ,OAAO;AAAA,QACP,MAAM;AAAA,MACR;AAAA,MACA,cAAc,CAAC,QAAQ,SAAS,MAAM;AAAA,MACtC,cAAc,CAAC,eAAe;AAAA,IAChC,CAAC;AAGD,YAAQ,IAAI,WAAW;AAAA,MACrB,OAAO;AAAA,MACP,WAAW;AAAA,MACX,iBAAiB;AAAA,QACf,QAAQ;AAAA,QACR,UAAU;AAAA,QACV,YAAY;AAAA,QACZ,OAAO;AAAA,QACP,MAAM;AAAA,MACR;AAAA,MACA,cAAc,CAAC,QAAQ,QAAQ,OAAO;AAAA,MACtC,cAAc,CAAC,iBAAiB,QAAQ;AAAA,IAC1C,CAAC;AAGD,YAAQ,IAAI,WAAW;AAAA,MACrB,OAAO;AAAA,MACP,WAAW;AAAA,MACX,iBAAiB;AAAA,QACf,QAAQ;AAAA,QACR,UAAU;AAAA,QACV,YAAY;AAAA,QACZ,OAAO;AAAA,QACP,MAAM;AAAA,MACR;AAAA,MACA,cAAc,CAAC,QAAQ,QAAQ;AAAA,MAC/B,cAAc,CAAC,iBAAiB,MAAM;AAAA,IACxC,CAAC;AAGD,YAAQ,IAAI,UAAU;AAAA,MACpB,OAAO;AAAA,MACP,WAAW;AAAA,MACX,iBAAiB;AAAA,QACf,QAAQ;AAAA,QACR,UAAU;AAAA,QACV,YAAY;AAAA,QACZ,OAAO;AAAA,QACP,MAAM;AAAA,MACR;AAAA,MACA,cAAc,CAAC,QAAQ,QAAQ,SAAS,eAAe;AAAA,MACvD,cAAc,CAAC;AAAA,IACjB,CAAC;AAGD,YAAQ,IAAI,WAAW;AAAA,MACrB,OAAO;AAAA,MACP,WAAW;AAAA,MACX,iBAAiB;AAAA,QACf,QAAQ;AAAA,QACR,UAAU;AAAA,QACV,YAAY;AAAA,QACZ,OAAO;AAAA,QACP,MAAM;AAAA,MACR;AAAA,MACA,cAAc,CAAC,SAAS,eAAe;AAAA,MACvC,cAAc,CAAC;AAAA,IACjB,CAAC;AAGD,YAAQ,IAAI,WAAW;AAAA,MACrB,OAAO;AAAA,MACP,WAAW;AAAA,MACX,iBAAiB;AAAA,QACf,QAAQ;AAAA,QACR,UAAU;AAAA,QACV,YAAY;AAAA,QACZ,OAAO;AAAA,QACP,MAAM;AAAA,MACR;AAAA,MACA,cAAc,CAAC,QAAQ,QAAQ,OAAO;AAAA,MACtC,cAAc,CAAC,eAAe;AAAA,IAChC,CAAC;AAGD,YAAQ,IAAI,WAAW;AAAA,MACrB,OAAO;AAAA,MACP,WAAW;AAAA,MACX,iBAAiB;AAAA,QACf,QAAQ;AAAA,QACR,UAAU;AAAA,QACV,YAAY;AAAA,QACZ,OAAO;AAAA,QACP,MAAM;AAAA,MACR;AAAA,MACA,cAAc,CAAC,UAAU,OAAO;AAAA,MAChC,cAAc,CAAC,QAAQ,MAAM;AAAA,IAC/B,CAAC;AAED,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,oBACJ,WACA,aACA,WAC8B;AAC9B,UAAM,SAAS,KAAK,aAAa,IAAI,SAAS;AAC9C,QAAI,CAAC,QAAQ;AACX,YAAM,IAAI,MAAM,uBAAuB,SAAS,EAAE;AAAA,IACpD;AAEA,WAAO,MAAM,yBAAyB,SAAS,UAAU,EAAE,UAAU,CAAC;AAGtE,UAAM,SAAS,MAAM,KAAK;AAAA,MACxB;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAGA,UAAM,eAAe,KAAK,iBAAiB,QAAQ,OAAO,eAAe;AAGzE,UAAM,iBAAiB,KAAK;AAAA,MAC1B;AAAA,MACA;AAAA,IACF;AAGA,UAAM,eAAoC;AAAA,MACxC,GAAG;AAAA,MACH,QAAQ,eAAe,IAAI,QAAM;AAAA,QAC/B,MAAM,EAAE;AAAA,QACR,SAAS,EAAE;AAAA,QACX,UAAU,EAAE;AAAA,MACd,EAAE;AAAA,IACJ;AAGA,SAAK,mBAAmB,IAAI,GAAG,SAAS,IAAI,KAAK,IAAI,CAAC,IAAI,cAAc;AAExE,WAAO,MAAM,wBAAwB,SAAS,IAAI;AAAA,MAChD,gBAAgB,eAAe;AAAA,MAC/B,WAAW,eAAe,OAAO,CAAC,KAAK,MAAM,MAAM,EAAE,SAAS,MAAM,CAAC;AAAA,IACvE,CAAC;AAED,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,cACJ,UACA,UACyB;AACzB,UAAM,SAAyB,CAAC;AAEhC,WAAO,KAAK,qBAAqB,EAAE,UAAU,UAAU,SAAS,KAAK,CAAC;AAEtE,YAAQ,SAAS,MAAM;AAAA,MACrB,KAAK;AACH,eAAO,KAAK,GAAG,MAAM,KAAK,YAAY,UAAU,QAAQ,CAAC;AACzD;AAAA,MAEF,KAAK;AACH,eAAO,KAAK,GAAG,MAAM,KAAK,gBAAgB,UAAU,QAAQ,CAAC;AAC7D;AAAA,MAEF,KAAK;AACH,eAAO,KAAK,GAAG,MAAM,KAAK,YAAY,UAAU,QAAQ,CAAC;AACzD;AAAA,MAEF;AACE,cAAM,IAAI,MAAM,8BAA8B,SAAS,IAAI,EAAE;AAAA,IACjE;AAEA,WAAO,KAAK,oBAAoB;AAAA,MAC9B,aAAa,OAAO;AAAA,MACpB,WAAW,OAAO,OAAO,CAAC,KAAK,MAAM,MAAM,EAAE,SAAS,MAAM,CAAC;AAAA,IAC/D,CAAC;AAED,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,YACZ,UACA,UACyB;AACzB,UAAM,SAAyB,CAAC;AAChC,UAAM,QAAQ,MAAM,KAAK,cAAc,QAAQ;AAE/C,eAAW,QAAQ,OAAO;AACxB,YAAM,UAAU,MAAM,GAAG,SAAS,SAAS,MAAM,OAAO;AAGxD,UAAI,QAAQ,SAAS,SAAS,cAAc;AAE1C,cAAM,aAAa,KAAK,eAAe,MAAM,SAAS,QAAQ;AAC9D,eAAO,KAAK,GAAG,UAAU;AAAA,MAC3B,OAAO;AACL,eAAO,KAAK;AAAA,UACV,IAAI,QAAQ,KAAK,SAAS,IAAI,CAAC;AAAA,UAC/B,MAAM;AAAA,UACN;AAAA,UACA,UAAU;AAAA,YACR,UAAU;AAAA,YACV,UAAU,KAAK,eAAe,IAAI;AAAA,YAClC,MAAM,QAAQ;AAAA,YACd,OAAO;AAAA,UACT;AAAA,UACA,YAAY;AAAA,YACV,OAAO;AAAA,YACP,KAAK,QAAQ;AAAA,UACf;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,gBACZ,UACA,UACyB;AACzB,UAAM,SAAyB,CAAC;AAChC,UAAM,QAAQ,MAAM,KAAK,cAAc,QAAQ;AAE/C,eAAW,QAAQ,OAAO;AACxB,YAAM,UAAU,MAAM,GAAG,SAAS,SAAS,MAAM,OAAO;AACxD,YAAM,WAAW,KAAK,eAAe,IAAI;AAGzC,YAAM,gBAAgB,KAAK,qBAAqB,SAAS,QAAQ;AAEjE,iBAAW,QAAQ,eAAe;AAChC,YAAI,KAAK,QAAQ,UAAU,SAAS,cAAc;AAChD,iBAAO,KAAK;AAAA,YACV,IAAI,YAAY,IAAI,IAAI,KAAK,IAAI;AAAA,YACjC,MAAM;AAAA,YACN,SAAS,KAAK;AAAA,YACd,UAAU;AAAA,cACR,UAAU;AAAA,cACV;AAAA,cACA,MAAM,KAAK,QAAQ;AAAA,cACnB,OAAO,KAAK;AAAA,YACd;AAAA,YACA,YAAY;AAAA,cACV,OAAO,KAAK;AAAA,cACZ,KAAK,KAAK;AAAA,YACZ;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,YACZ,UACA,UACyB;AACzB,UAAM,SAAyB,CAAC;AAChC,UAAM,QAAQ,MAAM,KAAK,cAAc,QAAQ;AAE/C,eAAW,QAAQ,OAAO;AACxB,YAAM,UAAU,MAAM,GAAG,SAAS,SAAS,MAAM,OAAO;AACxD,YAAM,QAAQ,QAAQ,MAAM,IAAI;AAEhC,UAAI,eAAe;AACnB,UAAI,YAAY;AAEhB,eAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,wBAAgB,MAAM,CAAC,IAAI;AAE3B,YAAI,aAAa,UAAU,SAAS,cAAc;AAChD,iBAAO,KAAK;AAAA,YACV,IAAI,QAAQ,IAAI,IAAI,SAAS;AAAA,YAC7B,MAAM;AAAA,YACN,SAAS;AAAA,YACT,UAAU;AAAA,cACR,UAAU;AAAA,cACV,UAAU,KAAK,eAAe,IAAI;AAAA,cAClC,MAAM,aAAa;AAAA,cACnB,OAAO;AAAA,YACT;AAAA,YACA,YAAY;AAAA,cACV,OAAO;AAAA,cACP,KAAK;AAAA,cACL,SAAS,SAAS;AAAA,YACpB;AAAA,UACF,CAAC;AAGD,gBAAM,eAAe,KAAK,MAAM,SAAS,cAAc,EAAE;AACzD,sBAAY,KAAK,IAAI,GAAG,IAAI,YAAY;AACxC,yBAAe,MAAM,MAAM,WAAW,IAAI,CAAC,EAAE,KAAK,IAAI;AAAA,QACxD;AAAA,MACF;AAGA,UAAI,aAAa,KAAK,GAAG;AACvB,eAAO,KAAK;AAAA,UACV,IAAI,QAAQ,IAAI,IAAI,SAAS;AAAA,UAC7B,MAAM;AAAA,UACN,SAAS;AAAA,UACT,UAAU;AAAA,YACR,UAAU;AAAA,YACV,UAAU,KAAK,eAAe,IAAI;AAAA,YAClC,MAAM,aAAa;AAAA,YACnB,OAAO;AAAA,UACT;AAAA,UACA,YAAY;AAAA,YACV,OAAO;AAAA,YACP,KAAK,MAAM,SAAS;AAAA,UACtB;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,sBACZ,aACA,QACA,WACyB;AACzB,UAAM,SAAyB,CAAC;AAGhC,QAAI,OAAO,aAAa,SAAS,OAAO,GAAG;AACzC,YAAM,eAAe,MAAM,KAAK,qBAAqB,EAAE;AACvD,aAAO,KAAK,GAAG,YAAY;AAAA,IAC7B;AAGA,QAAI,OAAO,aAAa,SAAS,MAAM,KAAK,YAAY,OAAO;AAC7D,YAAM,aAAa,MAAM,KAAK,cAAc,YAAY,KAAK;AAC7D,aAAO,KAAK,GAAG,UAAU;AAAA,IAC3B;AAGA,QAAI,OAAO,aAAa,SAAS,MAAM,KAAK,YAAY,WAAW;AACjE,YAAM,aAAa,MAAM,KAAK,cAAc,YAAY,SAAS;AACjE,aAAO,KAAK,GAAG,UAAU;AAAA,IAC3B;AAGA,QAAI,YAAY,OAAO;AACrB,YAAM,gBAAgB,MAAM,KAAK,iBAAiB,SAAS;AAAA,QACzD,OAAO,YAAY;AAAA,QACnB,OAAO;AAAA,MACT,CAAC;AAED,iBAAW,UAAU,eAAe;AAClC,eAAO,KAAK;AAAA,UACV,IAAI,UAAU,OAAO,OAAO;AAAA,UAC5B,MAAM;AAAA,UACN,SAAS,OAAO;AAAA,UAChB,UAAU;AAAA,YACR,SAAS,OAAO;AAAA,YAChB,MAAM,OAAO,QAAQ;AAAA,YACrB,OAAO,OAAO;AAAA,YACd,WAAW,IAAI,KAAK,OAAO,SAAS;AAAA,UACtC;AAAA,UACA,YAAY,CAAC;AAAA,QACf,CAAC;AAAA,MACH;AAAA,IACF;AAGA,UAAM,eAAe,KAAK,wBAAwB,OAAO,KAAK;AAC9D,WAAO,KAAK,GAAG,YAAY;AAE3B,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,iBACN,QACA,SACgB;AAChB,WAAO,OACJ,IAAI,WAAS;AACZ,UAAI,WAAW;AAGf,UAAI,MAAM,SAAS,WAAW;AAC5B,cAAM,MAAM,KAAK,IAAI,IAAI,MAAM,SAAS,UAAU,QAAQ;AAC1D,cAAM,cAAc,KAAK,IAAI,GAAG,IAAI,OAAO,KAAK,KAAK,KAAK,IAAK;AAC/D,oBAAY,cAAc,QAAQ;AAAA,MACpC;AAGA,mBAAa,MAAM,SAAS,SAAS,OAAO,QAAQ;AAGpD,UAAI,MAAM,SAAS,QAAQ;AACzB,oBAAY,QAAQ;AAAA,MACtB;AACA,UAAI,MAAM,SAAS,UAAU,SAAS,OAAO,GAAG;AAC9C,oBAAY,QAAQ;AAAA,MACtB;AAEA,aAAO,EAAE,GAAG,OAAO,SAAS;AAAA,IAC9B,CAAC,EACA,KAAK,CAAC,GAAG,MAAO,EAAU,WAAY,EAAU,QAAQ;AAAA,EAC7D;AAAA;AAAA;AAAA;AAAA,EAKQ,uBACN,QACA,WACgB;AAChB,UAAM,WAA2B,CAAC;AAClC,QAAI,cAAc;AAGlB,UAAM,iBAAiB,CAAC,SAAiB,KAAK,KAAK,KAAK,SAAS,CAAC;AAElE,eAAW,SAAS,QAAQ;AAC1B,YAAM,cAAc,eAAe,MAAM,OAAO;AAEhD,UAAI,cAAc,eAAe,WAAW;AAC1C,iBAAS,KAAK,KAAK;AACnB,uBAAe;AAAA,MACjB,WAAW,SAAS,WAAW,GAAG;AAEhC,cAAM,mBAAmB,MAAM,QAAQ,MAAM,GAAG,YAAY,CAAC;AAC7D,iBAAS,KAAK;AAAA,UACZ,GAAG;AAAA,UACH,SAAS;AAAA,UACT,UAAU;AAAA,YACR,GAAG,MAAM;AAAA,YACT,MAAM,iBAAiB;AAAA,UACzB;AAAA,QACF,CAAC;AACD;AAAA,MACF,OAAO;AACL;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAMA,MAAc,cAAc,KAAgC;AAC1D,UAAM,QAAkB,CAAC;AACzB,UAAM,UAAU,MAAM,GAAG,SAAS,QAAQ,KAAK,EAAE,eAAe,KAAK,CAAC;AAEtE,eAAW,SAAS,SAAS;AAC3B,YAAM,WAAW,KAAK,KAAK,KAAK,MAAM,IAAI;AAE1C,UAAI,MAAM,YAAY,GAAG;AAEvB,YAAI,CAAC,CAAC,gBAAgB,QAAQ,QAAQ,OAAO,EAAE,SAAS,MAAM,IAAI,GAAG;AACnE,gBAAM,KAAK,GAAG,MAAM,KAAK,cAAc,QAAQ,CAAC;AAAA,QAClD;AAAA,MACF,WAAW,MAAM,OAAO,GAAG;AAEzB,YAAI,2CAA2C,KAAK,MAAM,IAAI,GAAG;AAC/D,gBAAM,KAAK,QAAQ;AAAA,QACrB;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA,EAEQ,eAAe,UAA0B;AAC/C,UAAM,MAAM,KAAK,QAAQ,QAAQ;AACjC,UAAM,UAAkC;AAAA,MACtC,OAAO;AAAA,MACP,QAAQ;AAAA,MACR,OAAO;AAAA,MACP,QAAQ;AAAA,MACR,OAAO;AAAA,MACP,SAAS;AAAA,MACT,OAAO;AAAA,MACP,OAAO;AAAA,MACP,QAAQ;AAAA,MACR,MAAM;AAAA,MACN,MAAM;AAAA,IACR;AACA,WAAO,QAAQ,GAAG,KAAK;AAAA,EACzB;AAAA,EAEQ,eACN,UACA,SACA,UACgB;AAChB,UAAM,SAAyB,CAAC;AAChC,UAAM,QAAQ,QAAQ,MAAM,IAAI;AAChC,UAAM,gBAAgB,KAAK,KAAK,SAAS,eAAe,EAAE;AAE1D,aAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,eAAe;AACpD,YAAM,aAAa,MAAM,MAAM,GAAG,IAAI,aAAa;AACnD,YAAM,eAAe,WAAW,KAAK,IAAI;AAEzC,aAAO,KAAK;AAAA,QACV,IAAI,QAAQ,KAAK,SAAS,QAAQ,CAAC,SAAS,CAAC;AAAA,QAC7C,MAAM;AAAA,QACN,SAAS;AAAA,QACT,UAAU;AAAA,UACR;AAAA,UACA,UAAU,KAAK,eAAe,QAAQ;AAAA,UACtC,MAAM,aAAa;AAAA,UACnB,OAAO;AAAA,QACT;AAAA,QACA,YAAY;AAAA,UACV,OAAO;AAAA,UACP,KAAK,KAAK,IAAI,IAAI,eAAe,MAAM,MAAM;AAAA,UAC7C,SAAS,SAAS;AAAA,QACpB;AAAA,MACF,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA,EAEQ,qBACN,SACA,UAOC;AACD,UAAM,QAMD,CAAC;AAGN,QAAI,aAAa,gBAAgB,aAAa,cAAc;AAE1D,YAAM,aAAa;AACnB,UAAI;AACJ,cAAQ,QAAQ,WAAW,KAAK,OAAO,OAAO,MAAM;AAClD,cAAM,KAAK;AAAA,UACT,MAAM,MAAM,CAAC;AAAA,UACb,SAAS,MAAM,CAAC;AAAA,UAChB,OAAO,MAAM;AAAA,UACb,KAAK,MAAM,QAAQ,MAAM,CAAC,EAAE;AAAA,UAC5B,YAAY;AAAA,QACd,CAAC;AAAA,MACH;AAGA,YAAM,YAAY;AAClB,cAAQ,QAAQ,UAAU,KAAK,OAAO,OAAO,MAAM;AACjD,cAAM,KAAK;AAAA,UACT,MAAM,MAAM,CAAC;AAAA,UACb,SAAS,MAAM,CAAC;AAAA,UAChB,OAAO,MAAM;AAAA,UACb,KAAK,MAAM,QAAQ,MAAM,CAAC,EAAE;AAAA,UAC5B,YAAY;AAAA,QACd,CAAC;AAAA,MACH;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA,EAEA,MAAc,qBAAqB,OAAwC;AACzE,UAAM,cAAc,KAAK,iBAAiB,eAAe;AACzD,UAAM,SAAS,MAAM,YAAY,aAAa;AAE9C,WAAO,OAAO,MAAM,CAAC,KAAK,EAAE,IAAI,YAAU;AAAA,MACxC,IAAI,SAAS,MAAM,OAAO;AAAA,MAC1B,MAAM;AAAA,MACN,SAAS,KAAK,UAAU,OAAO,MAAM,CAAC;AAAA,MACtC,UAAU;AAAA,QACR,SAAS,MAAM;AAAA,QACf,MAAM,KAAK,UAAU,KAAK,EAAE;AAAA,QAC5B,OAAO;AAAA,QACP,WAAW,IAAI,KAAK,MAAM,SAAS;AAAA,MACrC;AAAA,MACA,YAAY,CAAC;AAAA,IACf,EAAE;AAAA,EACJ;AAAA,EAEA,MAAc,cAAc,OAA0C;AACpE,UAAM,SAAyB,CAAC;AAEhC,eAAW,QAAQ,OAAO;AACxB,UAAI,GAAG,WAAW,IAAI,GAAG;AACvB,cAAM,UAAU,MAAM,GAAG,SAAS,SAAS,MAAM,OAAO;AACxD,eAAO,KAAK;AAAA,UACV,IAAI,QAAQ,KAAK,SAAS,IAAI,CAAC;AAAA,UAC/B,MAAM;AAAA,UACN;AAAA,UACA,UAAU;AAAA,YACR,UAAU;AAAA,YACV,UAAU,KAAK,eAAe,IAAI;AAAA,YAClC,MAAM,QAAQ;AAAA,YACd,OAAO;AAAA,UACT;AAAA,UACA,YAAY,CAAC;AAAA,QACf,CAAC;AAAA,MACH;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA,EAEA,MAAc,cAAc,WAA8C;AACxE,UAAM,SAAyB,CAAC;AAEhC,eAAW,QAAQ,WAAW;AAC5B,UAAI,GAAG,WAAW,IAAI,GAAG;AACvB,cAAM,UAAU,MAAM,GAAG,SAAS,SAAS,MAAM,OAAO;AACxD,eAAO,KAAK;AAAA,UACV,IAAI,QAAQ,KAAK,SAAS,IAAI,CAAC;AAAA,UAC/B,MAAM;AAAA,UACN;AAAA,UACA,UAAU;AAAA,YACR,UAAU;AAAA,YACV,UAAU,KAAK,eAAe,IAAI;AAAA,YAClC,MAAM,QAAQ;AAAA,YACd,OAAO;AAAA,UACT;AAAA,UACA,YAAY,CAAC;AAAA,QACf,CAAC;AAAA,MACH;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA,EAEQ,wBAAwB,WAAyC;AACvE,UAAM,iBAAiC,CAAC;AAGxC,eAAW,CAAC,KAAK,MAAM,KAAK,KAAK,mBAAmB,QAAQ,GAAG;AAE7D,YAAM,YAAY,SAAS,IAAI,MAAM,GAAG,EAAE,IAAI,KAAK,GAAG;AACtD,UAAI,KAAK,IAAI,IAAI,YAAY,IAAI,KAAK,KAAM;AAC1C;AAAA,MACF;AAGA,UAAI,cAAc,YAAY,cAAc,WAAW;AACrD,uBAAe,KAAK,GAAG,OAAO,OAAO,OAAK,EAAE,SAAS,MAAM,CAAC;AAAA,MAC9D;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,aAAmB;AACjB,SAAK,mBAAmB,MAAM;AAC9B,WAAO,MAAM,uBAAuB;AAAA,EACtC;AAAA;AAAA;AAAA;AAAA,EAKA,gBAAgB;AACd,UAAM,QAAQ;AAAA,MACZ,WAAW,KAAK,mBAAmB;AAAA,MACnC,aAAa;AAAA,MACb,YAAY;AAAA,IACd;AAEA,eAAW,UAAU,KAAK,mBAAmB,OAAO,GAAG;AACrD,YAAM,eAAe,OAAO;AAC5B,YAAM,cAAc,OAAO,OAAO,CAAC,KAAK,MAAM,MAAM,EAAE,SAAS,MAAM,CAAC;AAAA,IACxE;AAEA,WAAO;AAAA,EACT;AACF;",
|
|
6
|
+
"names": []
|
|
7
|
+
}
|