scai 0.1.117 → 0.1.118
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/agents/MainAgent.js +255 -0
- package/dist/agents/contextReviewStep.js +104 -0
- package/dist/agents/finalPlanGenStep.js +123 -0
- package/dist/agents/infoPlanGenStep.js +126 -0
- package/dist/agents/planGeneratorStep.js +118 -0
- package/dist/agents/planResolverStep.js +95 -0
- package/dist/agents/planTargetFilesStep.js +48 -0
- package/dist/agents/preFileSearchCheckStep.js +95 -0
- package/dist/agents/selectRelevantSourcesStep.js +100 -0
- package/dist/agents/semanticAnalysisStep.js +144 -0
- package/dist/agents/structuralAnalysisStep.js +46 -0
- package/dist/agents/transformPlanGenStep.js +107 -0
- package/dist/agents/understandIntentStep.js +72 -0
- package/dist/agents/validationAnalysisStep.js +87 -0
- package/dist/commands/AskCmd.js +47 -116
- package/dist/commands/ChangeLogUpdateCmd.js +11 -5
- package/dist/commands/CommitSuggesterCmd.js +50 -75
- package/dist/commands/DaemonCmd.js +119 -29
- package/dist/commands/IndexCmd.js +41 -24
- package/dist/commands/InspectCmd.js +0 -1
- package/dist/commands/ReadlineSingleton.js +18 -0
- package/dist/commands/ResetDbCmd.js +20 -21
- package/dist/commands/ReviewCmd.js +89 -54
- package/dist/commands/SummaryCmd.js +12 -18
- package/dist/commands/WorkflowCmd.js +41 -0
- package/dist/commands/factory.js +254 -0
- package/dist/config.js +67 -15
- package/dist/constants.js +20 -4
- package/dist/context.js +10 -11
- package/dist/daemon/daemonQueues.js +63 -0
- package/dist/daemon/daemonWorker.js +40 -63
- package/dist/daemon/generateSummaries.js +58 -0
- package/dist/daemon/runFolderCapsuleBatch.js +247 -0
- package/dist/daemon/runIndexingBatch.js +147 -0
- package/dist/daemon/runKgBatch.js +104 -0
- package/dist/db/fileIndex.js +168 -63
- package/dist/db/functionExtractors/extractFromJava.js +210 -6
- package/dist/db/functionExtractors/extractFromJs.js +173 -214
- package/dist/db/functionExtractors/extractFromTs.js +159 -160
- package/dist/db/functionExtractors/index.js +7 -5
- package/dist/db/schema.js +55 -20
- package/dist/db/sqlTemplates.js +50 -19
- package/dist/fileRules/builtins.js +31 -14
- package/dist/fileRules/codeAllowedExtensions.js +4 -0
- package/dist/fileRules/fileExceptions.js +0 -13
- package/dist/fileRules/ignoredExtensions.js +10 -0
- package/dist/index.js +128 -325
- package/dist/lib/generate.js +37 -14
- package/dist/lib/generateFolderCapsules.js +109 -0
- package/dist/lib/spinner.js +12 -5
- package/dist/modelSetup.js +0 -10
- package/dist/pipeline/modules/changeLogModule.js +16 -19
- package/dist/pipeline/modules/chunkManagerModule.js +24 -0
- package/dist/pipeline/modules/cleanupModule.js +96 -91
- package/dist/pipeline/modules/codeTransformModule.js +208 -0
- package/dist/pipeline/modules/commentModule.js +20 -11
- package/dist/pipeline/modules/commitSuggesterModule.js +36 -14
- package/dist/pipeline/modules/contextReviewModule.js +52 -0
- package/dist/pipeline/modules/fileReaderModule.js +72 -0
- package/dist/pipeline/modules/fileSearchModule.js +136 -0
- package/dist/pipeline/modules/finalAnswerModule.js +53 -0
- package/dist/pipeline/modules/gatherInfoModule.js +176 -0
- package/dist/pipeline/modules/generateTestsModule.js +63 -54
- package/dist/pipeline/modules/kgModule.js +26 -11
- package/dist/pipeline/modules/preserveCodeModule.js +91 -49
- package/dist/pipeline/modules/refactorModule.js +19 -7
- package/dist/pipeline/modules/repairTestsModule.js +44 -36
- package/dist/pipeline/modules/reviewModule.js +23 -13
- package/dist/pipeline/modules/summaryModule.js +27 -35
- package/dist/pipeline/modules/writeFileModule.js +86 -0
- package/dist/pipeline/registry/moduleRegistry.js +38 -93
- package/dist/pipeline/runModulePipeline.js +22 -19
- package/dist/scripts/dbcheck.js +143 -228
- package/dist/utils/buildContextualPrompt.js +245 -172
- package/dist/utils/debugContext.js +24 -0
- package/dist/utils/fileTree.js +16 -6
- package/dist/utils/loadRelevantFolderCapsules.js +64 -0
- package/dist/utils/log.js +2 -0
- package/dist/utils/normalizeData.js +23 -0
- package/dist/utils/planActions.js +60 -0
- package/dist/utils/promptBuilderHelper.js +67 -0
- package/dist/utils/promptLogHelper.js +52 -0
- package/dist/utils/sanitizeQuery.js +20 -8
- package/dist/utils/sleep.js +3 -0
- package/dist/utils/splitCodeIntoChunk.js +65 -32
- package/dist/utils/vscode.js +49 -0
- package/dist/workflow/workflowResolver.js +14 -0
- package/dist/workflow/workflowRunner.js +103 -0
- package/package.json +6 -5
- package/dist/agent/agentManager.js +0 -39
- package/dist/agent/workflowManager.js +0 -95
- package/dist/commands/ModulePipelineCmd.js +0 -31
- package/dist/daemon/daemonBatch.js +0 -186
- package/dist/fileRules/scoreFiles.js +0 -71
- package/dist/lib/generateEmbedding.js +0 -22
|
@@ -1,183 +1,256 @@
|
|
|
1
1
|
// src/utils/buildContextualPrompt.ts
|
|
2
|
+
import path from "path";
|
|
3
|
+
import fs from "fs";
|
|
2
4
|
import { getDbForRepo } from "../db/client.js";
|
|
3
|
-
import { generateFocusedFileTree } from "./fileTree.js";
|
|
4
|
-
|
|
5
|
+
import { generateFileTree, generateFocusedFileTree } from "./fileTree.js";
|
|
6
|
+
import { RELATED_FILES_LIMIT } from "../constants.js";
|
|
7
|
+
import { loadRelevantFolderCapsules } from "./loadRelevantFolderCapsules.js";
|
|
8
|
+
/* --- Constants --- */
|
|
9
|
+
const MAX_FUNCTIONS = 50;
|
|
10
|
+
const MAX_KG_NEIGHBORS = 50;
|
|
11
|
+
const DEFAULT_KG_DEPTH = 3;
|
|
12
|
+
/* --- Helpers --- */
|
|
13
|
+
function fileRowIdForPath(db, filePath) {
|
|
14
|
+
try {
|
|
15
|
+
const row = db
|
|
16
|
+
.prepare(`SELECT id FROM files WHERE path = ?`)
|
|
17
|
+
.get(filePath);
|
|
18
|
+
return row?.id;
|
|
19
|
+
}
|
|
20
|
+
catch {
|
|
21
|
+
return undefined;
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
/* ---------------- KG helpers ---------------- */
|
|
25
|
+
function loadKgTags(db, entityUniqueId, limit = MAX_KG_NEIGHBORS) {
|
|
26
|
+
try {
|
|
27
|
+
const rows = db
|
|
28
|
+
.prepare(`
|
|
29
|
+
SELECT tm.name as tag
|
|
30
|
+
FROM graph_entity_tags et
|
|
31
|
+
JOIN graph_tags_master tm ON tm.id = et.tag_id
|
|
32
|
+
WHERE et.entity_unique_id = ?
|
|
33
|
+
LIMIT ?
|
|
34
|
+
`)
|
|
35
|
+
.all(entityUniqueId, limit);
|
|
36
|
+
return rows.map((r) => r.tag);
|
|
37
|
+
}
|
|
38
|
+
catch {
|
|
39
|
+
return [];
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
function loadKgNeighbors(db, sourceUniqueId, limit = MAX_KG_NEIGHBORS) {
|
|
43
|
+
try {
|
|
44
|
+
const rows = db
|
|
45
|
+
.prepare(`
|
|
46
|
+
SELECT relation, target_unique_id as target
|
|
47
|
+
FROM graph_edges
|
|
48
|
+
WHERE source_unique_id = ?
|
|
49
|
+
LIMIT ?
|
|
50
|
+
`)
|
|
51
|
+
.all(sourceUniqueId, limit);
|
|
52
|
+
return rows.map((r) => ({ relation: r.relation, target: r.target }));
|
|
53
|
+
}
|
|
54
|
+
catch {
|
|
55
|
+
return [];
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
/* ---------------- Functions / classes ---------------- */
|
|
59
|
+
function loadFunctions(db, fileId, limit = MAX_FUNCTIONS) {
|
|
60
|
+
if (!fileId)
|
|
61
|
+
return [];
|
|
62
|
+
try {
|
|
63
|
+
const rows = db
|
|
64
|
+
.prepare(`SELECT name, start_line, end_line
|
|
65
|
+
FROM functions
|
|
66
|
+
WHERE file_id = ?
|
|
67
|
+
ORDER BY start_line
|
|
68
|
+
LIMIT ?`)
|
|
69
|
+
.all(fileId, limit);
|
|
70
|
+
return rows.map((r) => ({
|
|
71
|
+
name: r.name ?? undefined,
|
|
72
|
+
start: r.start_line,
|
|
73
|
+
end: r.end_line,
|
|
74
|
+
}));
|
|
75
|
+
}
|
|
76
|
+
catch {
|
|
77
|
+
return [];
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
function loadClasses(db, fileId, limit = 50) {
|
|
81
|
+
if (!fileId)
|
|
82
|
+
return [];
|
|
83
|
+
try {
|
|
84
|
+
const rows = db
|
|
85
|
+
.prepare(`SELECT name, start_line, end_line
|
|
86
|
+
FROM graph_classes
|
|
87
|
+
WHERE file_id = ?
|
|
88
|
+
ORDER BY start_line
|
|
89
|
+
LIMIT ?`)
|
|
90
|
+
.all(fileId, limit);
|
|
91
|
+
return rows.map((r) => ({
|
|
92
|
+
name: r.name ?? undefined,
|
|
93
|
+
start: r.start_line,
|
|
94
|
+
end: r.end_line,
|
|
95
|
+
}));
|
|
96
|
+
}
|
|
97
|
+
catch {
|
|
98
|
+
return [];
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
/* ---------------- Trees ---------------- */
|
|
102
|
+
function safeGenerateFocusedTree(filePath, depth = 2) {
|
|
103
|
+
try {
|
|
104
|
+
return generateFocusedFileTree(filePath, depth) || undefined;
|
|
105
|
+
}
|
|
106
|
+
catch {
|
|
107
|
+
return undefined;
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
function safeGenerateRepoTree(depth = 2) {
|
|
111
|
+
try {
|
|
112
|
+
const root = process.cwd();
|
|
113
|
+
return generateFileTree(root, depth) || undefined;
|
|
114
|
+
}
|
|
115
|
+
catch {
|
|
116
|
+
return undefined;
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
/* ======================================================
|
|
120
|
+
LIGHT CONTEXT
|
|
121
|
+
====================================================== */
|
|
122
|
+
export async function buildLightContext(args) {
|
|
5
123
|
const db = getDbForRepo();
|
|
6
|
-
const
|
|
7
|
-
const
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
const
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
AND e.relation = 'imports'
|
|
37
|
-
AND e.source_unique_id = ?
|
|
38
|
-
`).all(topFile.path);
|
|
39
|
-
if (imports.length) {
|
|
40
|
-
promptSections.push(`**Imports from ${topFile.path}:**\n` + imports.map(i => `- ${i.imported}`).join("\n"));
|
|
41
|
-
}
|
|
42
|
-
const exports = db.prepare(`
|
|
43
|
-
SELECT e.target_unique_id AS exported
|
|
44
|
-
FROM graph_edges e
|
|
45
|
-
WHERE e.source_type = 'file'
|
|
46
|
-
AND e.relation = 'exports'
|
|
47
|
-
AND e.source_unique_id = ?
|
|
48
|
-
`).all(topFile.path);
|
|
49
|
-
if (exports.length) {
|
|
50
|
-
promptSections.push(`**Exports from ${topFile.path}:**\n` + exports.map(e => `- ${e.exported}`).join("\n"));
|
|
51
|
-
}
|
|
52
|
-
// --- Functions in file ---
|
|
53
|
-
const functionRows = db
|
|
54
|
-
.prepare(`SELECT unique_id, name, start_line, end_line, content FROM functions WHERE file_id = ? ORDER BY start_line`)
|
|
55
|
-
.all(topFile.id);
|
|
56
|
-
const FUNCTION_LIMIT = 5;
|
|
57
|
-
const hasMoreFunctions = functionRows.length > FUNCTION_LIMIT;
|
|
58
|
-
const functionsSummary = functionRows.slice(0, FUNCTION_LIMIT).map(f => {
|
|
59
|
-
const lines = f.content?.split("\n").map(l => l.trim()).filter(Boolean) || ["[no content]"];
|
|
60
|
-
const preview = lines.slice(0, 3).map(l => l.slice(0, 200) + (l.length > 200 ? "…" : "")).join(" | ");
|
|
61
|
-
return `- ${f.name || "[anonymous]"} (lines ${f.start_line}-${f.end_line}) — ${preview}`;
|
|
62
|
-
});
|
|
63
|
-
if (functionsSummary.length) {
|
|
64
|
-
promptSections.push(`**Functions in ${topFile.path} (showing ${functionsSummary.length}${hasMoreFunctions ? ` of ${functionRows.length}` : ""}):**\n${functionsSummary.join("\n")}`);
|
|
65
|
-
}
|
|
66
|
-
// ===============================
|
|
67
|
-
// Graph / KG traversal
|
|
68
|
-
// ===============================
|
|
69
|
-
function getRelatedKGFiles(fileUniqueId, visited = new Set()) {
|
|
70
|
-
if (visited.has(fileUniqueId))
|
|
71
|
-
return [];
|
|
72
|
-
visited.add(fileUniqueId);
|
|
73
|
-
const rows = db.prepare(`
|
|
74
|
-
SELECT DISTINCT f.id, f.path, f.summary
|
|
75
|
-
FROM graph_edges e
|
|
76
|
-
JOIN files f ON e.target_unique_id = f.path
|
|
77
|
-
WHERE e.source_type = 'file'
|
|
78
|
-
AND e.target_type = 'file'
|
|
79
|
-
AND e.source_unique_id = ?
|
|
80
|
-
`).all(fileUniqueId);
|
|
81
|
-
let results = [];
|
|
82
|
-
for (const row of rows) {
|
|
83
|
-
results.push(row);
|
|
84
|
-
results.push(...getRelatedKGFiles(row.path, visited));
|
|
85
|
-
}
|
|
86
|
-
return results;
|
|
87
|
-
}
|
|
88
|
-
function buildFileTree(file, depth, visited = new Set()) {
|
|
89
|
-
if (visited.has(file.path))
|
|
90
|
-
return { id: file.id.toString(), path: file.path };
|
|
91
|
-
visited.add(file.path);
|
|
92
|
-
const maxWordsByDepth = depth >= 3 ? 30 : depth === 2 ? 15 : 0;
|
|
93
|
-
const node = {
|
|
94
|
-
id: file.id.toString(),
|
|
95
|
-
path: file.path,
|
|
96
|
-
summary: maxWordsByDepth > 0 ? summarizeForPrompt(file.summary, maxWordsByDepth) : undefined,
|
|
97
|
-
};
|
|
98
|
-
if (depth > 1) {
|
|
99
|
-
const relatedFiles = getRelatedKGFiles(file.path).map(f => ({ id: f.id, path: f.path, summary: f.summary })).slice(0, 5);
|
|
100
|
-
const relatedNodes = relatedFiles.map(f => buildFileTree(f, depth - 1, visited));
|
|
101
|
-
if (relatedNodes.length)
|
|
102
|
-
node.related = relatedNodes;
|
|
124
|
+
const safeTopFiles = Array.isArray(args.topFiles) ? args.topFiles : [];
|
|
125
|
+
const safeRelated = Array.isArray(args.relatedFiles)
|
|
126
|
+
? args.relatedFiles
|
|
127
|
+
: [];
|
|
128
|
+
const ctx = {
|
|
129
|
+
initContext: {
|
|
130
|
+
userQuery: (args.query || "").trim(),
|
|
131
|
+
repoTree: safeGenerateRepoTree(2),
|
|
132
|
+
relatedFiles: [],
|
|
133
|
+
folderCapsules: [],
|
|
134
|
+
},
|
|
135
|
+
};
|
|
136
|
+
/* -------- Collect related file paths -------- */
|
|
137
|
+
const relatedPaths = [];
|
|
138
|
+
for (const tf of safeTopFiles) {
|
|
139
|
+
if (!relatedPaths.includes(tf.path))
|
|
140
|
+
relatedPaths.push(tf.path);
|
|
141
|
+
}
|
|
142
|
+
for (let i = 0; i < Math.min(RELATED_FILES_LIMIT, safeRelated.length); i++) {
|
|
143
|
+
const p = safeRelated[i].path;
|
|
144
|
+
if (!relatedPaths.includes(p))
|
|
145
|
+
relatedPaths.push(p);
|
|
146
|
+
}
|
|
147
|
+
/* -------- Add file references from user query -------- */
|
|
148
|
+
const referencedFiles = extractFileReferences(ctx.initContext.userQuery);
|
|
149
|
+
const topFilePaths = [];
|
|
150
|
+
for (const ref of referencedFiles) {
|
|
151
|
+
const found = safeTopFiles.find(f => f.path.toLowerCase().includes(ref));
|
|
152
|
+
if (found && !topFilePaths.includes(found.path)) {
|
|
153
|
+
topFilePaths.push(found.path);
|
|
103
154
|
}
|
|
104
|
-
return node;
|
|
105
|
-
}
|
|
106
|
-
const kgTree = buildFileTree({ id: topFile.id, path: topFile.path, summary: topFile.summary }, kgDepth);
|
|
107
|
-
promptSections.push(`**KG-Related Files (JSON tree, depth ${kgDepth}):**\n\`\`\`json\n${JSON.stringify(kgTree, null, 2)}\n\`\`\``);
|
|
108
|
-
const functionCallsAll = db.prepare(`
|
|
109
|
-
SELECT source_unique_id, target_unique_id
|
|
110
|
-
FROM graph_edges
|
|
111
|
-
WHERE source_type = 'function' AND relation = 'calls'
|
|
112
|
-
AND source_unique_id IN (
|
|
113
|
-
SELECT unique_id FROM functions WHERE file_id = ?
|
|
114
|
-
)
|
|
115
|
-
`).all(topFile.id);
|
|
116
|
-
const callsByFunction = {};
|
|
117
|
-
for (const fn of functionRows) {
|
|
118
|
-
const rows = functionCallsAll
|
|
119
|
-
.filter(r => r.source_unique_id === fn.unique_id)
|
|
120
|
-
.slice(0, FUNCTION_LIMIT);
|
|
121
|
-
// Truncate function content for preview
|
|
122
|
-
const lines = fn.content?.split("\n").map(l => l.trim()).filter(Boolean) || ["[no content]"];
|
|
123
|
-
const preview = lines.slice(0, 3).map(l => l.slice(0, 200) + (l.length > 200 ? "…" : "")).join(" | ");
|
|
124
|
-
callsByFunction[fn.name || fn.unique_id] = {
|
|
125
|
-
calls: rows.map(r => ({ unique_id: r.target_unique_id })),
|
|
126
|
-
preview,
|
|
127
|
-
};
|
|
128
155
|
}
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
)
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
calledBy: rows.map(r => ({ unique_id: r.source_unique_id })),
|
|
151
|
-
preview,
|
|
156
|
+
// Merge query references into relatedPaths
|
|
157
|
+
for (const tf of topFilePaths) {
|
|
158
|
+
if (!relatedPaths.includes(tf))
|
|
159
|
+
relatedPaths.push(tf);
|
|
160
|
+
}
|
|
161
|
+
ctx.initContext.relatedFiles = relatedPaths;
|
|
162
|
+
/* -------- Folder capsules (orientation layer) -------- */
|
|
163
|
+
const folderPaths = normalizeToFolders(relatedPaths);
|
|
164
|
+
let folderCapsules = loadRelevantFolderCapsules(folderPaths);
|
|
165
|
+
// Ensure root capsule exists
|
|
166
|
+
const rootExists = folderCapsules.some(c => c.path === '/');
|
|
167
|
+
if (!rootExists) {
|
|
168
|
+
const rootCapsule = {
|
|
169
|
+
path: '/',
|
|
170
|
+
depth: 1,
|
|
171
|
+
stats: { fileCount: 0, byType: {} },
|
|
172
|
+
roles: [],
|
|
173
|
+
concerns: [],
|
|
174
|
+
keyFiles: [],
|
|
175
|
+
dependencies: { importsFrom: [], usedBy: [] },
|
|
176
|
+
confidence: 0.5,
|
|
152
177
|
};
|
|
178
|
+
folderCapsules.unshift(rootCapsule);
|
|
153
179
|
}
|
|
154
|
-
|
|
155
|
-
|
|
180
|
+
ctx.initContext.folderCapsules = folderCapsules;
|
|
181
|
+
return ctx;
|
|
182
|
+
}
|
|
183
|
+
/* -------- Helper: get folders from filepaths -------- */
|
|
184
|
+
function normalizeToFolders(paths) {
|
|
185
|
+
const out = new Set();
|
|
186
|
+
for (const p of paths) {
|
|
187
|
+
try {
|
|
188
|
+
const stat = fs.statSync(p);
|
|
189
|
+
if (stat.isDirectory()) {
|
|
190
|
+
out.add(p);
|
|
191
|
+
}
|
|
192
|
+
else if (stat.isFile()) {
|
|
193
|
+
out.add(path.dirname(p));
|
|
194
|
+
}
|
|
195
|
+
}
|
|
196
|
+
catch {
|
|
197
|
+
// ignore invalid paths
|
|
198
|
+
}
|
|
156
199
|
}
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
200
|
+
return [...out];
|
|
201
|
+
}
|
|
202
|
+
/* -------- Helper: extract file references from query -------- */
|
|
203
|
+
function extractFileReferences(query) {
|
|
204
|
+
const matches = [];
|
|
205
|
+
const filenameRegex = /[\w\-\/]+\.\w{1,6}/gi;
|
|
206
|
+
const explicit = query.match(filenameRegex);
|
|
207
|
+
if (explicit)
|
|
208
|
+
matches.push(...explicit.map(s => s.toLowerCase()));
|
|
209
|
+
return [...new Set(matches)];
|
|
210
|
+
}
|
|
211
|
+
/* ======================================================
|
|
212
|
+
IN-DEPTH CONTEXT
|
|
213
|
+
====================================================== */
|
|
214
|
+
export async function buildInDepthContext({ filenames, kgDepth = DEFAULT_KG_DEPTH, relatedFiles, query, }) {
|
|
215
|
+
const db = getDbForRepo();
|
|
216
|
+
const safeFilenames = Array.isArray(filenames) ? filenames : [];
|
|
217
|
+
const safeRelated = Array.isArray(relatedFiles) ? relatedFiles : [];
|
|
218
|
+
const initCtx = {
|
|
219
|
+
userQuery: query?.trim() || "",
|
|
220
|
+
repoTree: safeGenerateRepoTree(3),
|
|
221
|
+
relatedFiles: safeRelated,
|
|
222
|
+
folderCapsules: loadRelevantFolderCapsules(normalizeToFolders(safeRelated)),
|
|
223
|
+
};
|
|
224
|
+
const workingFiles = [];
|
|
225
|
+
const out = {
|
|
226
|
+
initContext: initCtx,
|
|
227
|
+
workingFiles,
|
|
228
|
+
};
|
|
229
|
+
/* -------- Working files (deep phase only) -------- */
|
|
230
|
+
for (const p of safeFilenames) {
|
|
231
|
+
const fileId = fileRowIdForPath(db, p);
|
|
232
|
+
const fileObj = { path: p };
|
|
233
|
+
if (typeof fileId === "number") {
|
|
234
|
+
fileObj.functions = loadFunctions(db, fileId, MAX_FUNCTIONS);
|
|
235
|
+
fileObj.classes = loadClasses(db, fileId, 200);
|
|
236
|
+
}
|
|
237
|
+
const neighbors = loadKgNeighbors(db, p, MAX_KG_NEIGHBORS);
|
|
238
|
+
fileObj.kgTags = loadKgTags(db, p, MAX_KG_NEIGHBORS);
|
|
239
|
+
if (neighbors.length) {
|
|
240
|
+
fileObj.kgNeighborhood = neighbors.map((e) => `${e.relation}:${e.target}`);
|
|
241
|
+
const imports = neighbors
|
|
242
|
+
.filter((e) => e.relation === "imports")
|
|
243
|
+
.map((e) => e.target);
|
|
244
|
+
const exports = neighbors
|
|
245
|
+
.filter((e) => e.relation === "exports")
|
|
246
|
+
.map((e) => e.target);
|
|
247
|
+
if (imports.length)
|
|
248
|
+
fileObj.imports = imports.slice(0, 200);
|
|
249
|
+
if (exports.length)
|
|
250
|
+
fileObj.exports = exports.slice(0, 200);
|
|
162
251
|
}
|
|
252
|
+
fileObj.focusedTree = safeGenerateFocusedTree(p, 3);
|
|
253
|
+
workingFiles.push(fileObj);
|
|
163
254
|
}
|
|
164
|
-
|
|
165
|
-
console.warn("⚠️ Could not generate file tree:", e);
|
|
166
|
-
}
|
|
167
|
-
// --- Optional code snippet ---
|
|
168
|
-
const MAX_LINES = 50;
|
|
169
|
-
const queryNeedsCode = /\b(code|implementation|function|snippet)\b/i.test(query);
|
|
170
|
-
if ((!topFile.summary || queryNeedsCode) && topFile.code) {
|
|
171
|
-
const lines = topFile.code.split("\n").slice(0, MAX_LINES);
|
|
172
|
-
let snippet = lines.join("\n");
|
|
173
|
-
if (topFile.code.split("\n").length > MAX_LINES)
|
|
174
|
-
snippet += "\n... [truncated]";
|
|
175
|
-
promptSections.push(`**Code Context (first ${MAX_LINES} lines):**\n\`\`\`\n${snippet}\n\`\`\``);
|
|
176
|
-
}
|
|
177
|
-
// --- User query ---
|
|
178
|
-
promptSections.push(`**Query:** ${query}`);
|
|
179
|
-
const promptText = promptSections.join("\n\n---\n\n");
|
|
180
|
-
log("✅ Contextual prompt built for:", topFile.path);
|
|
181
|
-
log("📄 Prompt preview:\n", promptText + "\n");
|
|
182
|
-
return promptText;
|
|
255
|
+
return out;
|
|
183
256
|
}
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
// File: src/utils/debugContext.ts
|
|
2
|
+
import util from "util";
|
|
3
|
+
import chalk from "chalk";
|
|
4
|
+
export function debugContext(context, options = {}) {
|
|
5
|
+
const { step = "unknown-step", note, exit = true, depth = 6 } = options;
|
|
6
|
+
console.log("\n");
|
|
7
|
+
console.log(chalk.bgYellow.black(" 🛑 AGENT DEBUG BREAKPOINT "));
|
|
8
|
+
console.log(chalk.yellow(`Step: ${step}`));
|
|
9
|
+
if (note)
|
|
10
|
+
console.log(chalk.gray(`Note: ${note}`));
|
|
11
|
+
console.log("\n📦 StructuredContext snapshot:\n");
|
|
12
|
+
console.log(util.inspect(context, {
|
|
13
|
+
depth,
|
|
14
|
+
colors: true,
|
|
15
|
+
compact: false,
|
|
16
|
+
maxArrayLength: 20,
|
|
17
|
+
breakLength: 120
|
|
18
|
+
}));
|
|
19
|
+
console.log("\n---------------------------------------------");
|
|
20
|
+
if (exit) {
|
|
21
|
+
console.log(chalk.red("🚨 Exiting process after debug breakpoint\n"));
|
|
22
|
+
process.exit(0);
|
|
23
|
+
}
|
|
24
|
+
}
|
package/dist/utils/fileTree.js
CHANGED
|
@@ -1,20 +1,29 @@
|
|
|
1
1
|
import fs from 'fs';
|
|
2
2
|
import path from 'path';
|
|
3
3
|
import { getIndexDir } from '../constants.js';
|
|
4
|
+
import { IGNORED_FOLDER_GLOBS } from '../fileRules/ignoredPaths.js';
|
|
5
|
+
/** Utility to check if a file/folder should be ignored */
|
|
6
|
+
/** Utility to check if a file/folder should be ignored */
|
|
7
|
+
function isIgnored(fullPath) {
|
|
8
|
+
const normalizedPath = fullPath.replace(/\\/g, '/');
|
|
9
|
+
return IGNORED_FOLDER_GLOBS.some(pattern => {
|
|
10
|
+
// remove leading/trailing **
|
|
11
|
+
const cleanPattern = pattern.replace(/^\*\*\/?/, '').replace(/\/\*\*$/, '');
|
|
12
|
+
return normalizedPath.includes(cleanPattern);
|
|
13
|
+
});
|
|
14
|
+
}
|
|
4
15
|
/**
|
|
5
16
|
* Generate a reduced file tree centered around the focus path, including nearby sibling folders.
|
|
6
17
|
*/
|
|
7
18
|
export function generateFocusedFileTree(focusPath, maxDepth = 2, siblingWindow = 2) {
|
|
8
19
|
const absoluteFocus = path.resolve(focusPath);
|
|
9
20
|
const fileOrDir = fs.statSync(absoluteFocus);
|
|
10
|
-
const targetDir = fileOrDir.isDirectory()
|
|
11
|
-
? absoluteFocus
|
|
12
|
-
: path.dirname(absoluteFocus);
|
|
21
|
+
const targetDir = fileOrDir.isDirectory() ? absoluteFocus : path.dirname(absoluteFocus);
|
|
13
22
|
const parentDir = path.dirname(targetDir);
|
|
14
23
|
const indexDir = getIndexDir();
|
|
15
24
|
const siblings = fs
|
|
16
25
|
.readdirSync(parentDir, { withFileTypes: true })
|
|
17
|
-
.filter(entry => entry.isDirectory())
|
|
26
|
+
.filter(entry => entry.isDirectory() && !isIgnored(path.join(parentDir, entry.name)))
|
|
18
27
|
.sort((a, b) => a.name.localeCompare(b.name));
|
|
19
28
|
const focusIndex = siblings.findIndex(entry => path.resolve(path.join(parentDir, entry.name)) === path.resolve(targetDir));
|
|
20
29
|
const start = Math.max(0, focusIndex - siblingWindow);
|
|
@@ -30,11 +39,12 @@ export function generateFocusedFileTree(focusPath, maxDepth = 2, siblingWindow =
|
|
|
30
39
|
});
|
|
31
40
|
return output;
|
|
32
41
|
}
|
|
33
|
-
function generateFileTree(dir, depth, highlightPath, prefix = '') {
|
|
42
|
+
export function generateFileTree(dir, depth, highlightPath, prefix = '') {
|
|
34
43
|
if (depth < 0)
|
|
35
44
|
return '';
|
|
36
45
|
let output = '';
|
|
37
|
-
const entries = fs.readdirSync(dir, { withFileTypes: true })
|
|
46
|
+
const entries = fs.readdirSync(dir, { withFileTypes: true })
|
|
47
|
+
.filter(entry => !isIgnored(path.join(dir, entry.name)));
|
|
38
48
|
const sorted = entries.sort((a, b) => Number(b.isDirectory()) - Number(a.isDirectory()));
|
|
39
49
|
sorted.forEach((entry, index) => {
|
|
40
50
|
const isLast = index === sorted.length - 1;
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
// src/utils/loadRelevantFolderCapsules.ts
|
|
2
|
+
import { getDbForRepo } from '../db/client.js';
|
|
3
|
+
import path from 'path';
|
|
4
|
+
import fs from 'fs';
|
|
5
|
+
export function loadRelevantFolderCapsules(paths) {
|
|
6
|
+
const db = getDbForRepo();
|
|
7
|
+
const capsules = [];
|
|
8
|
+
// 1️⃣ Query DB first
|
|
9
|
+
const placeholders = paths.map(() => '?').join(',');
|
|
10
|
+
const stmt = db.prepare(`SELECT capsule_json FROM folder_capsules WHERE path IN (${placeholders})`);
|
|
11
|
+
const dbRows = stmt.all(...paths);
|
|
12
|
+
for (const row of dbRows) {
|
|
13
|
+
try {
|
|
14
|
+
const c = JSON.parse(row.capsule_json);
|
|
15
|
+
capsules.push(c);
|
|
16
|
+
}
|
|
17
|
+
catch {
|
|
18
|
+
// skip malformed JSON
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
// 2️⃣ Generate ephemeral capsules for missing paths
|
|
22
|
+
const existingPaths = new Set(capsules.map(c => c.path));
|
|
23
|
+
for (const p of paths) {
|
|
24
|
+
if (existingPaths.has(p))
|
|
25
|
+
continue;
|
|
26
|
+
if (!fs.existsSync(p))
|
|
27
|
+
continue;
|
|
28
|
+
const files = fs.readdirSync(p, { withFileTypes: true })
|
|
29
|
+
.filter(f => f.isFile())
|
|
30
|
+
.map(f => f.name);
|
|
31
|
+
const byType = {};
|
|
32
|
+
for (const f of files) {
|
|
33
|
+
const ext = path.extname(f).replace('.', '') || 'unknown';
|
|
34
|
+
byType[ext] = (byType[ext] || 0) + 1;
|
|
35
|
+
}
|
|
36
|
+
capsules.push({
|
|
37
|
+
path: p,
|
|
38
|
+
depth: p.split('/').length,
|
|
39
|
+
stats: { fileCount: files.length, byType },
|
|
40
|
+
roles: [],
|
|
41
|
+
concerns: [],
|
|
42
|
+
keyFiles: files
|
|
43
|
+
.filter(f => /index|main|cli|app|server|config/i.test(f))
|
|
44
|
+
.slice(0, 5)
|
|
45
|
+
.map(f => ({ path: path.join(p, f), reason: 'heuristic key file' })),
|
|
46
|
+
dependencies: { importsFrom: [], usedBy: [] },
|
|
47
|
+
confidence: 0.35,
|
|
48
|
+
});
|
|
49
|
+
}
|
|
50
|
+
// 3️⃣ Always add root capsule
|
|
51
|
+
if (!capsules.some(c => c.path === '/')) {
|
|
52
|
+
capsules.push({
|
|
53
|
+
path: '/',
|
|
54
|
+
depth: 1,
|
|
55
|
+
stats: { fileCount: 0, byType: {} },
|
|
56
|
+
roles: [],
|
|
57
|
+
concerns: [],
|
|
58
|
+
keyFiles: [],
|
|
59
|
+
dependencies: { importsFrom: [], usedBy: [] },
|
|
60
|
+
confidence: 0.5,
|
|
61
|
+
});
|
|
62
|
+
}
|
|
63
|
+
return capsules;
|
|
64
|
+
}
|
package/dist/utils/log.js
CHANGED
|
@@ -1,5 +1,7 @@
|
|
|
1
1
|
import { LOG_PATH } from "../constants.js";
|
|
2
2
|
import fs from 'fs';
|
|
3
|
+
export const DEBUG = false;
|
|
4
|
+
export const SUMMARY_LENGTH = 120;
|
|
3
5
|
export function log(...args) {
|
|
4
6
|
const timestamp = new Date().toISOString();
|
|
5
7
|
const message = args.map(arg => typeof arg === 'string' ? arg : JSON.stringify(arg, null, 2)).join(' ');
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* normalizeData ensures that ModuleIO.data is always returned as a usable object.
|
|
3
|
+
*
|
|
4
|
+
* Modules may output either a JSON string or a structured object. This helper
|
|
5
|
+
* converts JSON strings into objects, passes objects through unchanged, and
|
|
6
|
+
* provides a safe fallback for null/undefined. It centralizes the parsing logic
|
|
7
|
+
* so individual modules don’t need to guess the data format.
|
|
8
|
+
*/
|
|
9
|
+
export function normalizeData(data) {
|
|
10
|
+
if (data == null)
|
|
11
|
+
return {};
|
|
12
|
+
if (typeof data === "object")
|
|
13
|
+
return data;
|
|
14
|
+
if (typeof data === "string") {
|
|
15
|
+
try {
|
|
16
|
+
return JSON.parse(data);
|
|
17
|
+
}
|
|
18
|
+
catch {
|
|
19
|
+
throw new Error("Invalid JSON string in ModuleIO.data");
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
return {};
|
|
23
|
+
}
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
// src/utils/planActions.ts
|
|
2
|
+
export const PLAN_ACTIONS = [
|
|
3
|
+
// =====================================================
|
|
4
|
+
// INFORMATION ACQUISITION PHASE
|
|
5
|
+
// Purpose: gather raw information, no interpretation
|
|
6
|
+
// =====================================================
|
|
7
|
+
{
|
|
8
|
+
action: "fileSearch",
|
|
9
|
+
groups: ["info"],
|
|
10
|
+
description: "Search the repository for files matching a query, filename, extension, or pattern. " +
|
|
11
|
+
"Use when the user references files or concepts not already present in context."
|
|
12
|
+
},
|
|
13
|
+
// =====================================================
|
|
14
|
+
// ROUTING / PLANNING PHASE
|
|
15
|
+
// Purpose: decide how execution should proceed
|
|
16
|
+
// =====================================================
|
|
17
|
+
/* {
|
|
18
|
+
action: "routeTask",
|
|
19
|
+
groups: ["routing"],
|
|
20
|
+
description:
|
|
21
|
+
"Decide execution strategy based on semantic intent and scope. " +
|
|
22
|
+
"Determines whether to proceed with direct execution, recursive planning, or plan-only output. " +
|
|
23
|
+
"Produces executionMode and optional subtask definitions."
|
|
24
|
+
}, */
|
|
25
|
+
// =====================================================
|
|
26
|
+
// TRANSFORM PHASE
|
|
27
|
+
// Purpose: produce concrete changes or artifacts
|
|
28
|
+
// =====================================================
|
|
29
|
+
{
|
|
30
|
+
action: "codeTransform",
|
|
31
|
+
groups: ["transform"],
|
|
32
|
+
description: "Generate concrete code changes according to the execution plan and selected source files. " +
|
|
33
|
+
"This produces structured patches or full file rewrites, but does NOT write anything to disk. " +
|
|
34
|
+
"It should only be planned when actual code modifications are required, never for analysis purposes. " +
|
|
35
|
+
"Must complete successfully before any 'writeFile' steps are executed."
|
|
36
|
+
},
|
|
37
|
+
{
|
|
38
|
+
action: "writeFile",
|
|
39
|
+
groups: ["transform"],
|
|
40
|
+
description: "Persist file changes to disk. " +
|
|
41
|
+
"This action MUST ONLY be planned if codeTransform has already produced output, " +
|
|
42
|
+
"and MUST come after codeTransform in the plan."
|
|
43
|
+
},
|
|
44
|
+
{
|
|
45
|
+
action: "cleanup",
|
|
46
|
+
groups: ["transform"],
|
|
47
|
+
description: "Normalize or post-process generated output into valid, machine-consumable structures. " +
|
|
48
|
+
"Used when strict JSON or schema adherence is required."
|
|
49
|
+
},
|
|
50
|
+
// =====================================================
|
|
51
|
+
// FINALIZE PHASE
|
|
52
|
+
// Purpose: commit results and respond to the user
|
|
53
|
+
// =====================================================
|
|
54
|
+
{
|
|
55
|
+
action: "finalAnswer",
|
|
56
|
+
groups: ["finalize"],
|
|
57
|
+
description: "Produce the final user-facing response, explanation, or report. " +
|
|
58
|
+
"Summarizes actions taken and results produced."
|
|
59
|
+
},
|
|
60
|
+
];
|