deepdebug-local-agent 0.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.dockerignore +24 -0
- package/.idea/deepdebug-local-agent.iml +12 -0
- package/.idea/modules.xml +8 -0
- package/.idea/vcs.xml +6 -0
- package/Dockerfile +46 -0
- package/cloudbuild.yaml +42 -0
- package/index.js +42 -0
- package/mcp-server.js +533 -0
- package/package.json +22 -0
- package/src/ai-engine.js +861 -0
- package/src/analyzers/config-analyzer.js +446 -0
- package/src/analyzers/controller-analyzer.js +429 -0
- package/src/analyzers/dto-analyzer.js +455 -0
- package/src/detectors/build-tool-detector.js +0 -0
- package/src/detectors/framework-detector.js +91 -0
- package/src/detectors/language-detector.js +89 -0
- package/src/detectors/multi-project-detector.js +191 -0
- package/src/detectors/service-detector.js +244 -0
- package/src/detectors.js +30 -0
- package/src/exec-utils.js +215 -0
- package/src/fs-utils.js +34 -0
- package/src/git/base-git-provider.js +384 -0
- package/src/git/git-provider-registry.js +110 -0
- package/src/git/github-provider.js +502 -0
- package/src/mcp-http-server.js +313 -0
- package/src/patch/patch-engine.js +339 -0
- package/src/patch-manager.js +816 -0
- package/src/patch.js +607 -0
- package/src/patch_bkp.js +154 -0
- package/src/ports.js +69 -0
- package/src/routes/workspace.route.js +528 -0
- package/src/runtimes/base-runtime.js +290 -0
- package/src/runtimes/java/gradle-runtime.js +378 -0
- package/src/runtimes/java/java-integrations.js +339 -0
- package/src/runtimes/java/maven-runtime.js +418 -0
- package/src/runtimes/node/node-integrations.js +247 -0
- package/src/runtimes/node/npm-runtime.js +466 -0
- package/src/runtimes/node/yarn-runtime.js +354 -0
- package/src/runtimes/runtime-registry.js +256 -0
- package/src/server-local.js +576 -0
- package/src/server.js +4565 -0
- package/src/utils/environment-diagnostics.js +666 -0
- package/src/utils/exec-utils.js +264 -0
- package/src/utils/fs-utils.js +218 -0
- package/src/workspace/detect-port.js +176 -0
- package/src/workspace/file-reader.js +54 -0
- package/src/workspace/git-client.js +0 -0
- package/src/workspace/process-manager.js +619 -0
- package/src/workspace/scanner.js +72 -0
- package/src/workspace-manager.js +172 -0
|
@@ -0,0 +1,313 @@
|
|
|
1
|
+
import express from "express";
|
|
2
|
+
import path from "path";
|
|
3
|
+
import { spawn } from "child_process";
|
|
4
|
+
import { readFile, writeFile, listRecursive, exists } from "./fs-utils.js";
|
|
5
|
+
import { detectProject } from "./detectors.js";
|
|
6
|
+
import { applyUnifiedDiff } from "./patch.js";
|
|
7
|
+
|
|
8
|
+
/**
|
|
9
|
+
* MCP HTTP Bridge — Expõe as MCP tools como REST API.
|
|
10
|
+
*
|
|
11
|
+
* O Gateway Java chama estes endpoints para obter contexto do código
|
|
12
|
+
* ANTES de enviar o prompt ao Claude.
|
|
13
|
+
*
|
|
14
|
+
* Port: 5056 (separado do Express principal na 5055)
|
|
15
|
+
*
|
|
16
|
+
* Isto é mais prático que stdio MCP para comunicação Gateway ↔ Agent,
|
|
17
|
+
* e mantém compatibilidade com MCP tools para uso futuro com Claude Code.
|
|
18
|
+
*/
|
|
19
|
+
|
|
20
|
+
const IGNORE_DIRS = ["node_modules", ".git", "target", "build", "dist", ".idea", "__pycache__", "vendor", ".gradle"];
|
|
21
|
+
|
|
22
|
+
export function startMCPHttpServer(workspaceManager, port = 5056) {
|
|
23
|
+
const app = express();
|
|
24
|
+
app.use(express.json({ limit: "50mb" }));
|
|
25
|
+
|
|
26
|
+
// ========================================
|
|
27
|
+
// Health
|
|
28
|
+
// ========================================
|
|
29
|
+
app.get("/health", (_req, res) => {
|
|
30
|
+
res.json({
|
|
31
|
+
status: "ok",
|
|
32
|
+
server: "deepdebug-mcp",
|
|
33
|
+
workspaces: workspaceManager.count,
|
|
34
|
+
openWorkspaces: workspaceManager.list().map(w => ({
|
|
35
|
+
id: w.id, root: w.root, language: w.projectInfo?.language
|
|
36
|
+
})),
|
|
37
|
+
});
|
|
38
|
+
});
|
|
39
|
+
|
|
40
|
+
// ========================================
|
|
41
|
+
// POST /mcp/open — Open workspace
|
|
42
|
+
// ========================================
|
|
43
|
+
app.post("/mcp/open", async (req, res) => {
|
|
44
|
+
const { workspaceId, root } = req.body;
|
|
45
|
+
if (!workspaceId || !root) {
|
|
46
|
+
return res.status(400).json({ error: "workspaceId and root are required" });
|
|
47
|
+
}
|
|
48
|
+
try {
|
|
49
|
+
const ws = await workspaceManager.open(workspaceId, root);
|
|
50
|
+
res.json({ ok: true, workspace: ws });
|
|
51
|
+
} catch (e) {
|
|
52
|
+
res.status(400).json({ error: e.message });
|
|
53
|
+
}
|
|
54
|
+
});
|
|
55
|
+
|
|
56
|
+
// ========================================
|
|
57
|
+
// POST /mcp/close — Close workspace
|
|
58
|
+
// ========================================
|
|
59
|
+
app.post("/mcp/close", (req, res) => {
|
|
60
|
+
const { workspaceId } = req.body;
|
|
61
|
+
const closed = workspaceManager.close(workspaceId);
|
|
62
|
+
res.json({ ok: closed });
|
|
63
|
+
});
|
|
64
|
+
|
|
65
|
+
// ========================================
|
|
66
|
+
// GET /mcp/workspaces — List all open
|
|
67
|
+
// ========================================
|
|
68
|
+
app.get("/mcp/workspaces", (_req, res) => {
|
|
69
|
+
res.json({ workspaces: workspaceManager.list() });
|
|
70
|
+
});
|
|
71
|
+
|
|
72
|
+
// ========================================
|
|
73
|
+
// POST /mcp/read-file
|
|
74
|
+
// ========================================
|
|
75
|
+
app.post("/mcp/read-file", async (req, res) => {
|
|
76
|
+
const { workspaceId, path: filePath } = req.body;
|
|
77
|
+
try {
|
|
78
|
+
const root = workspaceManager.resolveRoot(workspaceId);
|
|
79
|
+
const fullPath = path.resolve(root, filePath);
|
|
80
|
+
if (!fullPath.startsWith(root)) {
|
|
81
|
+
return res.status(403).json({ error: "Path outside workspace" });
|
|
82
|
+
}
|
|
83
|
+
if (!(await exists(fullPath))) {
|
|
84
|
+
return res.status(404).json({ error: `File not found: ${filePath}` });
|
|
85
|
+
}
|
|
86
|
+
const content = await readFile(fullPath, "utf8");
|
|
87
|
+
res.json({
|
|
88
|
+
ok: true,
|
|
89
|
+
path: filePath,
|
|
90
|
+
content,
|
|
91
|
+
size: Buffer.byteLength(content),
|
|
92
|
+
lines: content.split("\n").length,
|
|
93
|
+
});
|
|
94
|
+
} catch (e) {
|
|
95
|
+
res.status(400).json({ error: e.message });
|
|
96
|
+
}
|
|
97
|
+
});
|
|
98
|
+
|
|
99
|
+
// ========================================
|
|
100
|
+
// POST /mcp/read-files — Batch read
|
|
101
|
+
// ========================================
|
|
102
|
+
app.post("/mcp/read-files", async (req, res) => {
|
|
103
|
+
const { workspaceId, paths } = req.body;
|
|
104
|
+
if (!paths || !Array.isArray(paths)) {
|
|
105
|
+
return res.status(400).json({ error: "paths array required" });
|
|
106
|
+
}
|
|
107
|
+
try {
|
|
108
|
+
const root = workspaceManager.resolveRoot(workspaceId);
|
|
109
|
+
const results = {};
|
|
110
|
+
|
|
111
|
+
for (const filePath of paths.slice(0, 20)) { // Max 20 files
|
|
112
|
+
const fullPath = path.resolve(root, filePath);
|
|
113
|
+
if (!fullPath.startsWith(root)) continue;
|
|
114
|
+
try {
|
|
115
|
+
const content = await readFile(fullPath, "utf8");
|
|
116
|
+
results[filePath] = { ok: true, content, lines: content.split("\n").length };
|
|
117
|
+
} catch {
|
|
118
|
+
results[filePath] = { ok: false, error: "File not found" };
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
res.json({ ok: true, files: results, count: Object.keys(results).length });
|
|
123
|
+
} catch (e) {
|
|
124
|
+
res.status(400).json({ error: e.message });
|
|
125
|
+
}
|
|
126
|
+
});
|
|
127
|
+
|
|
128
|
+
// ========================================
|
|
129
|
+
// POST /mcp/list-directory
|
|
130
|
+
// ========================================
|
|
131
|
+
app.post("/mcp/list-directory", async (req, res) => {
|
|
132
|
+
const { workspaceId, path: dirPath = "", maxFiles = 500 } = req.body;
|
|
133
|
+
try {
|
|
134
|
+
const root = workspaceManager.resolveRoot(workspaceId);
|
|
135
|
+
const targetDir = dirPath ? path.resolve(root, dirPath) : root;
|
|
136
|
+
|
|
137
|
+
if (!targetDir.startsWith(root)) {
|
|
138
|
+
return res.status(403).json({ error: "Path outside workspace" });
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
const files = await listRecursive(targetDir, { maxFiles });
|
|
142
|
+
const filtered = files.filter(f => !IGNORE_DIRS.some(ig => f.path.includes(ig)));
|
|
143
|
+
|
|
144
|
+
res.json({
|
|
145
|
+
ok: true,
|
|
146
|
+
root: dirPath || "/",
|
|
147
|
+
totalItems: filtered.length,
|
|
148
|
+
items: filtered,
|
|
149
|
+
});
|
|
150
|
+
} catch (e) {
|
|
151
|
+
res.status(400).json({ error: e.message });
|
|
152
|
+
}
|
|
153
|
+
});
|
|
154
|
+
|
|
155
|
+
// ========================================
|
|
156
|
+
// POST /mcp/search-code
|
|
157
|
+
// ========================================
|
|
158
|
+
app.post("/mcp/search-code", async (req, res) => {
|
|
159
|
+
const { workspaceId, query, filePattern = "", maxResults = 50 } = req.body;
|
|
160
|
+
if (!query) return res.status(400).json({ error: "query required" });
|
|
161
|
+
|
|
162
|
+
try {
|
|
163
|
+
const root = workspaceManager.resolveRoot(workspaceId);
|
|
164
|
+
const results = await grepWorkspace(root, query, filePattern, maxResults);
|
|
165
|
+
res.json({ ok: true, query, matches: results.length, results });
|
|
166
|
+
} catch (e) {
|
|
167
|
+
res.status(400).json({ error: e.message });
|
|
168
|
+
}
|
|
169
|
+
});
|
|
170
|
+
|
|
171
|
+
// ========================================
|
|
172
|
+
// POST /mcp/execute-command
|
|
173
|
+
// ========================================
|
|
174
|
+
app.post("/mcp/execute-command", async (req, res) => {
|
|
175
|
+
const { workspaceId, command, timeout = 120 } = req.body;
|
|
176
|
+
if (!command) return res.status(400).json({ error: "command required" });
|
|
177
|
+
|
|
178
|
+
// Security
|
|
179
|
+
const BLOCKED = ["rm -rf /", "mkfs", "dd if=/dev", ":(){ :|:"];
|
|
180
|
+
if (BLOCKED.some(b => command.includes(b))) {
|
|
181
|
+
return res.status(403).json({ error: "Command blocked" });
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
try {
|
|
185
|
+
const root = workspaceManager.resolveRoot(workspaceId);
|
|
186
|
+
const result = await execInWorkspace(root, command, timeout);
|
|
187
|
+
res.json({ ok: result.exitCode === 0, ...result });
|
|
188
|
+
} catch (e) {
|
|
189
|
+
res.status(400).json({ error: e.message });
|
|
190
|
+
}
|
|
191
|
+
});
|
|
192
|
+
|
|
193
|
+
// ========================================
|
|
194
|
+
// POST /mcp/project-info
|
|
195
|
+
// ========================================
|
|
196
|
+
app.post("/mcp/project-info", async (req, res) => {
|
|
197
|
+
const { workspaceId } = req.body;
|
|
198
|
+
try {
|
|
199
|
+
const root = workspaceManager.resolveRoot(workspaceId);
|
|
200
|
+
const projectInfo = await detectProject(root);
|
|
201
|
+
const files = await listRecursive(root, { maxFiles: 200 });
|
|
202
|
+
const filtered = files.filter(f => !IGNORE_DIRS.some(ig => f.path.includes(ig)));
|
|
203
|
+
|
|
204
|
+
const sourceExts = { java: 0, js: 0, ts: 0, py: 0, go: 0, cs: 0 };
|
|
205
|
+
filtered.forEach(f => {
|
|
206
|
+
if (f.type !== "file") return;
|
|
207
|
+
const ext = f.path.split(".").pop();
|
|
208
|
+
if (ext in sourceExts) sourceExts[ext]++;
|
|
209
|
+
});
|
|
210
|
+
|
|
211
|
+
res.json({
|
|
212
|
+
ok: true,
|
|
213
|
+
...projectInfo,
|
|
214
|
+
root,
|
|
215
|
+
totalFiles: filtered.length,
|
|
216
|
+
sourceFiles: sourceExts,
|
|
217
|
+
topDirectories: [...new Set(filtered
|
|
218
|
+
.filter(f => f.type === "dir")
|
|
219
|
+
.map(f => f.path.split("/")[0])
|
|
220
|
+
)].slice(0, 25),
|
|
221
|
+
});
|
|
222
|
+
} catch (e) {
|
|
223
|
+
res.status(400).json({ error: e.message });
|
|
224
|
+
}
|
|
225
|
+
});
|
|
226
|
+
|
|
227
|
+
// ========================================
|
|
228
|
+
// POST /mcp/apply-patch
|
|
229
|
+
// ========================================
|
|
230
|
+
app.post("/mcp/apply-patch", async (req, res) => {
|
|
231
|
+
const { workspaceId, diff } = req.body;
|
|
232
|
+
if (!diff) return res.status(400).json({ error: "diff required" });
|
|
233
|
+
|
|
234
|
+
try {
|
|
235
|
+
const root = workspaceManager.resolveRoot(workspaceId);
|
|
236
|
+
const result = await applyUnifiedDiff(root, diff);
|
|
237
|
+
res.json({ ok: true, ...result });
|
|
238
|
+
} catch (e) {
|
|
239
|
+
res.status(400).json({ error: e.message });
|
|
240
|
+
}
|
|
241
|
+
});
|
|
242
|
+
|
|
243
|
+
// ========================================
|
|
244
|
+
// START SERVER
|
|
245
|
+
// ========================================
|
|
246
|
+
app.listen(port, () => {
|
|
247
|
+
console.log(`🔌 MCP HTTP Bridge listening on http://localhost:${port}`);
|
|
248
|
+
});
|
|
249
|
+
|
|
250
|
+
return app;
|
|
251
|
+
}
|
|
252
|
+
|
|
253
|
+
// ========================================
|
|
254
|
+
// HELPERS
|
|
255
|
+
// ========================================
|
|
256
|
+
|
|
257
|
+
function grepWorkspace(root, query, filePattern, maxResults) {
|
|
258
|
+
return new Promise((resolve) => {
|
|
259
|
+
const args = [
|
|
260
|
+
"-r", "-n",
|
|
261
|
+
...IGNORE_DIRS.flatMap(d => ["--exclude-dir", d]),
|
|
262
|
+
];
|
|
263
|
+
if (filePattern) args.push("--include", filePattern);
|
|
264
|
+
args.push("-m", String(maxResults), query, ".");
|
|
265
|
+
|
|
266
|
+
const child = spawn("grep", args, { cwd: root });
|
|
267
|
+
let stdout = "";
|
|
268
|
+
|
|
269
|
+
child.stdout.on("data", d => stdout += d.toString());
|
|
270
|
+
const timer = setTimeout(() => { try { child.kill(); } catch {} }, 15000);
|
|
271
|
+
|
|
272
|
+
child.on("close", () => {
|
|
273
|
+
clearTimeout(timer);
|
|
274
|
+
if (!stdout.trim()) return resolve([]);
|
|
275
|
+
|
|
276
|
+
const results = stdout.trim().split("\n").slice(0, maxResults).map(line => {
|
|
277
|
+
// Format: ./path/to/file.java:42: code line content
|
|
278
|
+
const match = line.match(/^\.\/(.+?):(\d+):(.*)$/);
|
|
279
|
+
if (match) {
|
|
280
|
+
return { file: match[1], line: parseInt(match[2]), content: match[3].trim() };
|
|
281
|
+
}
|
|
282
|
+
return { raw: line };
|
|
283
|
+
});
|
|
284
|
+
resolve(results);
|
|
285
|
+
});
|
|
286
|
+
});
|
|
287
|
+
}
|
|
288
|
+
|
|
289
|
+
function execInWorkspace(root, command, timeoutSec) {
|
|
290
|
+
return new Promise((resolve) => {
|
|
291
|
+
const child = spawn("sh", ["-c", command], { cwd: root });
|
|
292
|
+
let stdout = "";
|
|
293
|
+
let stderr = "";
|
|
294
|
+
|
|
295
|
+
child.stdout.on("data", d => stdout += d.toString());
|
|
296
|
+
child.stderr.on("data", d => stderr += d.toString());
|
|
297
|
+
|
|
298
|
+
const timer = setTimeout(() => {
|
|
299
|
+
try { child.kill("SIGKILL"); } catch {}
|
|
300
|
+
resolve({ exitCode: -1, stdout, stderr, timedOut: true });
|
|
301
|
+
}, timeoutSec * 1000);
|
|
302
|
+
|
|
303
|
+
child.on("close", (code) => {
|
|
304
|
+
clearTimeout(timer);
|
|
305
|
+
// Truncate long outputs
|
|
306
|
+
const maxLen = 10000;
|
|
307
|
+
if (stdout.length > maxLen) {
|
|
308
|
+
stdout = stdout.substring(0, maxLen / 2) + "\n...(truncated)...\n" + stdout.substring(stdout.length - maxLen / 2);
|
|
309
|
+
}
|
|
310
|
+
resolve({ exitCode: code, stdout, stderr: stderr.substring(0, 5000), timedOut: false });
|
|
311
|
+
});
|
|
312
|
+
});
|
|
313
|
+
}
|
|
@@ -0,0 +1,339 @@
|
|
|
1
|
+
import path from 'path';
|
|
2
|
+
import { readFile, writeFile, exists, ensureDir, copyFile } from '../utils/fs-utils.js';
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* PatchEngine
|
|
6
|
+
*
|
|
7
|
+
* Sistema de aplicação de patches com backup e rollback.
|
|
8
|
+
*/
|
|
9
|
+
|
|
10
|
+
/**
|
|
11
|
+
* Parseia unified diff
|
|
12
|
+
* @param {string} diffText - Texto do diff
|
|
13
|
+
* @returns {Patch[]}
|
|
14
|
+
*/
|
|
15
|
+
export function parsePatch(diffText) {
|
|
16
|
+
const patches = [];
|
|
17
|
+
const lines = diffText.split('\n');
|
|
18
|
+
|
|
19
|
+
let currentPatch = null;
|
|
20
|
+
let currentHunk = null;
|
|
21
|
+
|
|
22
|
+
for (let i = 0; i < lines.length; i++) {
|
|
23
|
+
const line = lines[i];
|
|
24
|
+
|
|
25
|
+
// Início de novo arquivo
|
|
26
|
+
if (line.startsWith('---')) {
|
|
27
|
+
if (currentPatch) {
|
|
28
|
+
patches.push(currentPatch);
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
currentPatch = {
|
|
32
|
+
oldFileName: extractFileName(line),
|
|
33
|
+
newFileName: null,
|
|
34
|
+
hunks: []
|
|
35
|
+
};
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
// Nome do novo arquivo
|
|
39
|
+
else if (line.startsWith('+++') && currentPatch) {
|
|
40
|
+
currentPatch.newFileName = extractFileName(line);
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
// Início de hunk
|
|
44
|
+
else if (line.startsWith('@@') && currentPatch) {
|
|
45
|
+
const hunkMatch = line.match(/@@ -(\d+),?(\d*) \+(\d+),?(\d*) @@/);
|
|
46
|
+
|
|
47
|
+
if (hunkMatch) {
|
|
48
|
+
currentHunk = {
|
|
49
|
+
oldStart: parseInt(hunkMatch[1]),
|
|
50
|
+
oldLines: parseInt(hunkMatch[2]) || 1,
|
|
51
|
+
newStart: parseInt(hunkMatch[3]),
|
|
52
|
+
newLines: parseInt(hunkMatch[4]) || 1,
|
|
53
|
+
lines: []
|
|
54
|
+
};
|
|
55
|
+
currentPatch.hunks.push(currentHunk);
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
// Linha de conteúdo do hunk
|
|
60
|
+
else if (currentHunk && (line.startsWith(' ') || line.startsWith('+') || line.startsWith('-'))) {
|
|
61
|
+
currentHunk.lines.push(line);
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
if (currentPatch) {
|
|
66
|
+
patches.push(currentPatch);
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
return patches;
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
/**
|
|
73
|
+
* Extrai nome do arquivo da linha --- ou +++
|
|
74
|
+
*/
|
|
75
|
+
function extractFileName(line) {
|
|
76
|
+
return line.replace(/^[-+]{3}\s+[ab]\//, '').replace(/^[-+]{3}\s+/, '');
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
/**
|
|
80
|
+
* Aplica patch a um arquivo
|
|
81
|
+
* @param {string} original - Conteúdo original
|
|
82
|
+
* @param {Patch} patch - Patch a aplicar
|
|
83
|
+
* @returns {string|false}
|
|
84
|
+
*/
|
|
85
|
+
export function applyPatch(original, patch) {
|
|
86
|
+
const originalLines = original.split('\n');
|
|
87
|
+
const resultLines = [...originalLines];
|
|
88
|
+
|
|
89
|
+
let offset = 0;
|
|
90
|
+
|
|
91
|
+
for (const hunk of patch.hunks) {
|
|
92
|
+
const startLine = hunk.oldStart - 1 + offset;
|
|
93
|
+
let currentLine = startLine;
|
|
94
|
+
let removedCount = 0;
|
|
95
|
+
let addedCount = 0;
|
|
96
|
+
|
|
97
|
+
// Verificar contexto
|
|
98
|
+
let contextMatches = true;
|
|
99
|
+
let checkLine = startLine;
|
|
100
|
+
|
|
101
|
+
for (const line of hunk.lines) {
|
|
102
|
+
if (line.startsWith(' ') || line.startsWith('-')) {
|
|
103
|
+
const expectedContent = line.substring(1);
|
|
104
|
+
const actualContent = originalLines[checkLine];
|
|
105
|
+
|
|
106
|
+
if (actualContent !== expectedContent) {
|
|
107
|
+
if (actualContent?.trim() !== expectedContent.trim()) {
|
|
108
|
+
contextMatches = false;
|
|
109
|
+
break;
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
checkLine++;
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
if (!contextMatches) {
|
|
117
|
+
console.warn(`⚠️ Hunk at line ${hunk.oldStart} doesn't match context`);
|
|
118
|
+
return false;
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
// Aplicar hunk
|
|
122
|
+
const linesToRemove = [];
|
|
123
|
+
const linesToAdd = [];
|
|
124
|
+
|
|
125
|
+
for (const line of hunk.lines) {
|
|
126
|
+
if (line.startsWith('-')) {
|
|
127
|
+
linesToRemove.push(currentLine);
|
|
128
|
+
removedCount++;
|
|
129
|
+
currentLine++;
|
|
130
|
+
} else if (line.startsWith('+')) {
|
|
131
|
+
linesToAdd.push({
|
|
132
|
+
position: currentLine,
|
|
133
|
+
content: line.substring(1)
|
|
134
|
+
});
|
|
135
|
+
addedCount++;
|
|
136
|
+
} else if (line.startsWith(' ')) {
|
|
137
|
+
currentLine++;
|
|
138
|
+
}
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
// Remover linhas (de trás para frente)
|
|
142
|
+
for (let i = linesToRemove.length - 1; i >= 0; i--) {
|
|
143
|
+
resultLines.splice(linesToRemove[i], 1);
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
// Adicionar linhas
|
|
147
|
+
for (let i = 0; i < linesToAdd.length; i++) {
|
|
148
|
+
const { position, content } = linesToAdd[i];
|
|
149
|
+
const adjustedPosition = position - removedCount + i;
|
|
150
|
+
resultLines.splice(adjustedPosition, 0, content);
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
offset += addedCount - removedCount;
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
return resultLines.join('\n');
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
/**
|
|
160
|
+
* Aplica unified diff a um workspace
|
|
161
|
+
* @param {string} root - Raiz do workspace
|
|
162
|
+
* @param {string} diffText - Texto do diff
|
|
163
|
+
* @returns {Promise<PatchResult>}
|
|
164
|
+
*/
|
|
165
|
+
export async function applyUnifiedDiff(root, diffText) {
|
|
166
|
+
const patches = parsePatch(diffText);
|
|
167
|
+
|
|
168
|
+
if (patches.length === 0) {
|
|
169
|
+
throw new Error('No patches found in diff');
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
const results = [];
|
|
173
|
+
|
|
174
|
+
for (const patch of patches) {
|
|
175
|
+
const targetRel = patch.newFileName?.replace(/^b\//, '') ||
|
|
176
|
+
patch.oldFileName?.replace(/^a\//, '');
|
|
177
|
+
|
|
178
|
+
if (!targetRel || targetRel === '/dev/null') {
|
|
179
|
+
continue;
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
const fullPath = path.join(root, targetRel);
|
|
183
|
+
|
|
184
|
+
if (!(await exists(fullPath))) {
|
|
185
|
+
if (patch.oldFileName === '/dev/null') {
|
|
186
|
+
const newContent = patch.hunks
|
|
187
|
+
.flatMap(h => h.lines)
|
|
188
|
+
.filter(l => l.startsWith('+'))
|
|
189
|
+
.map(l => l.substring(1))
|
|
190
|
+
.join('\n');
|
|
191
|
+
|
|
192
|
+
await ensureDir(path.dirname(fullPath));
|
|
193
|
+
await writeFile(fullPath, newContent, 'utf8');
|
|
194
|
+
|
|
195
|
+
results.push({
|
|
196
|
+
file: targetRel,
|
|
197
|
+
action: 'created',
|
|
198
|
+
bytes: Buffer.byteLength(newContent, 'utf8')
|
|
199
|
+
});
|
|
200
|
+
|
|
201
|
+
continue;
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
throw new Error(`File not found: ${targetRel}`);
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
const original = await readFile(fullPath, 'utf8');
|
|
208
|
+
const result = applyPatch(original, patch);
|
|
209
|
+
|
|
210
|
+
if (result === false) {
|
|
211
|
+
throw new Error(`Failed to apply patch to ${targetRel}`);
|
|
212
|
+
}
|
|
213
|
+
|
|
214
|
+
await writeFile(fullPath, result, 'utf8');
|
|
215
|
+
|
|
216
|
+
results.push({
|
|
217
|
+
file: targetRel,
|
|
218
|
+
action: 'modified',
|
|
219
|
+
bytes: Buffer.byteLength(result, 'utf8')
|
|
220
|
+
});
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
const firstResult = results[0] || { file: null, action: null, bytes: 0 };
|
|
224
|
+
|
|
225
|
+
return {
|
|
226
|
+
target: firstResult.file,
|
|
227
|
+
bytes: firstResult.bytes,
|
|
228
|
+
results,
|
|
229
|
+
patchCount: results.length
|
|
230
|
+
};
|
|
231
|
+
}
|
|
232
|
+
|
|
233
|
+
/**
|
|
234
|
+
* Cria backup de arquivos antes de aplicar patch
|
|
235
|
+
* @param {string} root - Raiz do workspace
|
|
236
|
+
* @param {string[]} files - Lista de arquivos
|
|
237
|
+
* @param {string} backupId - ID do backup
|
|
238
|
+
* @returns {Promise<BackupResult>}
|
|
239
|
+
*/
|
|
240
|
+
export async function createBackup(root, files, backupId) {
|
|
241
|
+
const backupDir = path.join(root, '.insptech', 'backups', backupId);
|
|
242
|
+
await ensureDir(backupDir);
|
|
243
|
+
|
|
244
|
+
const backedUp = [];
|
|
245
|
+
|
|
246
|
+
for (const file of files) {
|
|
247
|
+
const srcPath = path.join(root, file);
|
|
248
|
+
const destPath = path.join(backupDir, file);
|
|
249
|
+
|
|
250
|
+
if (await exists(srcPath)) {
|
|
251
|
+
await ensureDir(path.dirname(destPath));
|
|
252
|
+
await copyFile(srcPath, destPath);
|
|
253
|
+
backedUp.push(file);
|
|
254
|
+
}
|
|
255
|
+
}
|
|
256
|
+
|
|
257
|
+
// Criar manifest
|
|
258
|
+
const manifest = {
|
|
259
|
+
id: backupId,
|
|
260
|
+
timestamp: new Date().toISOString(),
|
|
261
|
+
files: backedUp
|
|
262
|
+
};
|
|
263
|
+
|
|
264
|
+
await writeFile(
|
|
265
|
+
path.join(backupDir, 'manifest.json'),
|
|
266
|
+
JSON.stringify(manifest, null, 2),
|
|
267
|
+
'utf8'
|
|
268
|
+
);
|
|
269
|
+
|
|
270
|
+
return {
|
|
271
|
+
backupId,
|
|
272
|
+
backupDir,
|
|
273
|
+
files: backedUp
|
|
274
|
+
};
|
|
275
|
+
}
|
|
276
|
+
|
|
277
|
+
/**
|
|
278
|
+
* Restaura backup
|
|
279
|
+
* @param {string} root - Raiz do workspace
|
|
280
|
+
* @param {string} backupId - ID do backup
|
|
281
|
+
* @returns {Promise<RestoreResult>}
|
|
282
|
+
*/
|
|
283
|
+
export async function restoreBackup(root, backupId) {
|
|
284
|
+
const backupDir = path.join(root, '.insptech', 'backups', backupId);
|
|
285
|
+
|
|
286
|
+
if (!(await exists(backupDir))) {
|
|
287
|
+
throw new Error(`Backup not found: ${backupId}`);
|
|
288
|
+
}
|
|
289
|
+
|
|
290
|
+
const manifestPath = path.join(backupDir, 'manifest.json');
|
|
291
|
+
const manifest = JSON.parse(await readFile(manifestPath, 'utf8'));
|
|
292
|
+
|
|
293
|
+
const restored = [];
|
|
294
|
+
|
|
295
|
+
for (const file of manifest.files) {
|
|
296
|
+
const srcPath = path.join(backupDir, file);
|
|
297
|
+
const destPath = path.join(root, file);
|
|
298
|
+
|
|
299
|
+
if (await exists(srcPath)) {
|
|
300
|
+
await copyFile(srcPath, destPath);
|
|
301
|
+
restored.push(file);
|
|
302
|
+
}
|
|
303
|
+
}
|
|
304
|
+
|
|
305
|
+
return {
|
|
306
|
+
backupId,
|
|
307
|
+
files: restored
|
|
308
|
+
};
|
|
309
|
+
}
|
|
310
|
+
|
|
311
|
+
/**
|
|
312
|
+
* Extrai arquivos afetados por um diff
|
|
313
|
+
* @param {string} diffText - Texto do diff
|
|
314
|
+
* @returns {string[]}
|
|
315
|
+
*/
|
|
316
|
+
export function extractAffectedFiles(diffText) {
|
|
317
|
+
const patches = parsePatch(diffText);
|
|
318
|
+
const files = new Set();
|
|
319
|
+
|
|
320
|
+
for (const patch of patches) {
|
|
321
|
+
if (patch.newFileName && patch.newFileName !== '/dev/null') {
|
|
322
|
+
files.add(patch.newFileName.replace(/^b\//, ''));
|
|
323
|
+
}
|
|
324
|
+
if (patch.oldFileName && patch.oldFileName !== '/dev/null') {
|
|
325
|
+
files.add(patch.oldFileName.replace(/^a\//, ''));
|
|
326
|
+
}
|
|
327
|
+
}
|
|
328
|
+
|
|
329
|
+
return Array.from(files);
|
|
330
|
+
}
|
|
331
|
+
|
|
332
|
+
export default {
|
|
333
|
+
parsePatch,
|
|
334
|
+
applyPatch,
|
|
335
|
+
applyUnifiedDiff,
|
|
336
|
+
createBackup,
|
|
337
|
+
restoreBackup,
|
|
338
|
+
extractAffectedFiles
|
|
339
|
+
};
|