scai 0.1.103 → 0.1.104
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/CHANGELOG.md +5 -1
- package/dist/commands/SwitchCmd.js +1 -1
- package/dist/config.js +1 -1
- package/dist/context.js +27 -2
- package/dist/index.js +9 -6
- package/dist/pipeline/modules/cleanGeneratedTestsModule.js +21 -0
- package/dist/pipeline/modules/commentModule.js +4 -1
- package/dist/pipeline/modules/generateTestsModule.js +8 -5
- package/dist/pipeline/modules/preserveCodeModule.js +5 -1
- package/dist/pipeline/registry/moduleRegistry.js +4 -0
- package/dist/utils/checkModel.js +30 -0
- package/dist/utils/contentUtils.js +49 -0
- package/dist/utils/repoKey.js +1 -1
- package/dist/workflowManager.js +89 -0
- package/package.json +1 -1
- package/dist/agentManager.js +0 -47
- package/dist/utils/normalizePath.js +0 -23
package/dist/CHANGELOG.md
CHANGED
|
@@ -146,4 +146,8 @@ Type handling with the module pipeline
|
|
|
146
146
|
|
|
147
147
|
• Fixed bug where entire block was returned as a single line for multi-line comments
|
|
148
148
|
• Add multi-line comment handling with ~90% accuracy
|
|
149
|
-
• Update CLI config file to use codellama:13b model and 4096 context length
|
|
149
|
+
• Update CLI config file to use codellama:13b model and 4096 context length
|
|
150
|
+
|
|
151
|
+
## 2025-08-30
|
|
152
|
+
|
|
153
|
+
* Add new workflow management functionality to handle file writes.
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
// File: src/commands/switch.ts
|
|
2
2
|
import readline from 'readline';
|
|
3
3
|
import { Config, writeConfig } from '../config.js';
|
|
4
|
-
import { getRepoKeyForPath } from '../utils/
|
|
4
|
+
import { getRepoKeyForPath } from '../utils/contentUtils.js';
|
|
5
5
|
import chalk from 'chalk';
|
|
6
6
|
export function runSwitchCommand(inputPathOrKey) {
|
|
7
7
|
const config = Config.getRaw();
|
package/dist/config.js
CHANGED
|
@@ -2,7 +2,7 @@ import fs from 'fs';
|
|
|
2
2
|
import path from 'path';
|
|
3
3
|
import { CONFIG_PATH, SCAI_HOME, SCAI_REPOS } from './constants.js';
|
|
4
4
|
import { getDbForRepo } from './db/client.js';
|
|
5
|
-
import { normalizePath } from './utils/
|
|
5
|
+
import { normalizePath } from './utils/contentUtils.js';
|
|
6
6
|
import chalk from 'chalk';
|
|
7
7
|
import { getHashedRepoKey } from './utils/repoKey.js';
|
|
8
8
|
const defaultConfig = {
|
package/dist/context.js
CHANGED
|
@@ -1,10 +1,12 @@
|
|
|
1
1
|
// context.ts
|
|
2
|
-
import { readConfig, writeConfig } from "./config.js";
|
|
3
|
-
import { normalizePath } from "./utils/
|
|
2
|
+
import { readConfig, writeConfig, Config } from "./config.js";
|
|
3
|
+
import { normalizePath } from "./utils/contentUtils.js";
|
|
4
4
|
import { getHashedRepoKey } from "./utils/repoKey.js";
|
|
5
5
|
import { getDbForRepo, getDbPathForRepo } from "./db/client.js";
|
|
6
6
|
import fs from "fs";
|
|
7
7
|
import chalk from "chalk";
|
|
8
|
+
import { generate } from "./lib/generate.js"; // 👈 use your existing generate wrapper
|
|
9
|
+
import { startModelProcess } from "./utils/checkModel.js";
|
|
8
10
|
export async function updateContext() {
|
|
9
11
|
const cwd = normalizePath(process.cwd());
|
|
10
12
|
const cfg = readConfig();
|
|
@@ -54,6 +56,17 @@ export async function updateContext() {
|
|
|
54
56
|
else if (isNewRepo || activeRepoChanged) {
|
|
55
57
|
console.log(chalk.green("✅ Database present"));
|
|
56
58
|
}
|
|
59
|
+
// ✅ NEW: Ensure model is available
|
|
60
|
+
if (ok) {
|
|
61
|
+
const modelReady = await ensureModelReady();
|
|
62
|
+
if (modelReady) {
|
|
63
|
+
console.log(chalk.green("✅ Model ready"));
|
|
64
|
+
}
|
|
65
|
+
else {
|
|
66
|
+
console.log(chalk.red("❌ Model not available"));
|
|
67
|
+
ok = false;
|
|
68
|
+
}
|
|
69
|
+
}
|
|
57
70
|
// Final context status
|
|
58
71
|
if (ok) {
|
|
59
72
|
console.log(chalk.bold.green("\n✅ Context OK\n"));
|
|
@@ -63,3 +76,15 @@ export async function updateContext() {
|
|
|
63
76
|
}
|
|
64
77
|
return ok;
|
|
65
78
|
}
|
|
79
|
+
async function ensureModelReady() {
|
|
80
|
+
try {
|
|
81
|
+
// simple "ping" prompt that costs almost nothing
|
|
82
|
+
const res = await generate({ content: "ping" }, Config.getModel());
|
|
83
|
+
return Boolean(res?.content);
|
|
84
|
+
}
|
|
85
|
+
catch {
|
|
86
|
+
console.log(chalk.yellow("⚡ Model not responding. Attempting to start..."));
|
|
87
|
+
await startModelProcess();
|
|
88
|
+
return false;
|
|
89
|
+
}
|
|
90
|
+
}
|
package/dist/index.js
CHANGED
|
@@ -26,13 +26,14 @@ import { runInteractiveSwitch } from "./commands/SwitchCmd.js";
|
|
|
26
26
|
import { execSync } from "child_process";
|
|
27
27
|
import { fileURLToPath } from "url";
|
|
28
28
|
import { dirname, resolve } from "path";
|
|
29
|
-
import { handleAgentRun } from './
|
|
29
|
+
import { handleAgentRun } from './workflowManager.js';
|
|
30
30
|
import { addCommentsModule } from './pipeline/modules/commentModule.js';
|
|
31
31
|
import { generateTestsModule } from './pipeline/modules/generateTestsModule.js';
|
|
32
32
|
import { preserveCodeModule } from './pipeline/modules/preserveCodeModule.js';
|
|
33
33
|
import { runInteractiveDelete } from './commands/DeleteIndex.js';
|
|
34
34
|
import { resolveTargetsToFiles } from './utils/resolveTargetsToFiles.js';
|
|
35
35
|
import { updateContext } from './context.js';
|
|
36
|
+
import { cleanGeneratedTestsModule } from './pipeline/modules/cleanGeneratedTestsModule.js';
|
|
36
37
|
// 🎛️ CLI Setup
|
|
37
38
|
const cmd = new Command('scai')
|
|
38
39
|
.version(version)
|
|
@@ -149,12 +150,14 @@ gen
|
|
|
149
150
|
});
|
|
150
151
|
});
|
|
151
152
|
gen
|
|
152
|
-
.command(
|
|
153
|
-
.description(
|
|
154
|
-
.
|
|
155
|
-
.action(async (file) => {
|
|
153
|
+
.command("test <targets...>")
|
|
154
|
+
.description("Generate tests for the given file(s) or folder(s)")
|
|
155
|
+
.action(async (targets, options) => {
|
|
156
156
|
await withContext(async () => {
|
|
157
|
-
|
|
157
|
+
const files = await resolveTargetsToFiles(targets);
|
|
158
|
+
for (const file of files) {
|
|
159
|
+
await handleAgentRun(file, [generateTestsModule, cleanGeneratedTestsModule]);
|
|
160
|
+
}
|
|
158
161
|
});
|
|
159
162
|
});
|
|
160
163
|
// ⚙️ Group: Configuration settings
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import { normalizeText, stripMarkdownFences, isCodeLike } from "../../utils/contentUtils.js";
|
|
2
|
+
export const cleanGeneratedTestsModule = {
|
|
3
|
+
name: "cleanGeneratedTestsModule",
|
|
4
|
+
description: "Removes markdown fences, prose, and explanations from generated test output, leaving only code",
|
|
5
|
+
async run(input) {
|
|
6
|
+
const { content, filepath } = input;
|
|
7
|
+
// normalize + strip markdown
|
|
8
|
+
const normalized = normalizeText(content);
|
|
9
|
+
const stripped = stripMarkdownFences(normalized);
|
|
10
|
+
// filter non-code lines
|
|
11
|
+
const lines = stripped.split("\n");
|
|
12
|
+
const codeLines = lines.filter(line => isCodeLike(line));
|
|
13
|
+
const cleanedCode = codeLines.join("\n");
|
|
14
|
+
return {
|
|
15
|
+
originalContent: content,
|
|
16
|
+
content: cleanedCode, // cleaned code for pipeline
|
|
17
|
+
filepath, // original file path
|
|
18
|
+
mode: "overwrite", // indicates overwrite existing file
|
|
19
|
+
};
|
|
20
|
+
}
|
|
21
|
+
};
|
|
@@ -59,6 +59,9 @@ ${input.content}
|
|
|
59
59
|
`.trim();
|
|
60
60
|
const response = await generate({ content: prompt }, model);
|
|
61
61
|
const contentToReturn = (response.content && response.content !== 'NO UPDATE') ? response.content : input.content;
|
|
62
|
-
return {
|
|
62
|
+
return {
|
|
63
|
+
content: contentToReturn,
|
|
64
|
+
mode: 'overwrite', // <-- declares that the original file should be overwritten
|
|
65
|
+
};
|
|
63
66
|
},
|
|
64
67
|
};
|
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
import fs from 'fs/promises';
|
|
2
1
|
import path from 'path';
|
|
3
2
|
import { generate } from '../../lib/generate.js';
|
|
4
3
|
import { detectFileType } from '../../fileRules/detectFileType.js';
|
|
@@ -25,14 +24,18 @@ export const generateTestsModule = {
|
|
|
25
24
|
--- CODE START ---
|
|
26
25
|
${content}
|
|
27
26
|
--- CODE END ---
|
|
28
|
-
|
|
27
|
+
`.trim();
|
|
29
28
|
const response = await generate({ content: prompt }, model);
|
|
30
29
|
if (!response)
|
|
31
30
|
throw new Error('⚠️ No test code returned from model');
|
|
32
31
|
const { dir, name } = path.parse(filepath);
|
|
33
32
|
const testPath = path.join(dir, `${name}.test.ts`);
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
33
|
+
return {
|
|
34
|
+
originalContent: content,
|
|
35
|
+
content: response.content, // the test code
|
|
36
|
+
filepath, // original file path
|
|
37
|
+
newFilepath: testPath,
|
|
38
|
+
mode: "newFile" // where it *should* be written
|
|
39
|
+
};
|
|
37
40
|
}
|
|
38
41
|
};
|
|
@@ -144,6 +144,10 @@ export const preserveCodeModule = {
|
|
|
144
144
|
const colored = type === "code" ? chalk.green(line) : chalk.yellow(line);
|
|
145
145
|
console.log(`${i + 1}: ${colored} ${chalk.gray(`[${type}]`)}`);
|
|
146
146
|
});
|
|
147
|
-
return {
|
|
147
|
+
return {
|
|
148
|
+
content: fixedLines.join("\n"),
|
|
149
|
+
filepath,
|
|
150
|
+
mode: "overwrite"
|
|
151
|
+
};
|
|
148
152
|
}
|
|
149
153
|
};
|
|
@@ -4,6 +4,8 @@ import { summaryModule } from '../modules/summaryModule.js';
|
|
|
4
4
|
import { generateTestsModule } from '../modules/generateTestsModule.js';
|
|
5
5
|
import { commitSuggesterModule } from '../modules/commitSuggesterModule.js';
|
|
6
6
|
import { changelogModule } from '../modules/changeLogModule.js';
|
|
7
|
+
import { cleanGeneratedTestsModule } from '../modules/cleanGeneratedTestsModule.js';
|
|
8
|
+
import { preserveCodeModule } from '../modules/preserveCodeModule.js';
|
|
7
9
|
// Add more as needed...
|
|
8
10
|
const builtInModules = {
|
|
9
11
|
comments: addCommentsModule,
|
|
@@ -12,6 +14,8 @@ const builtInModules = {
|
|
|
12
14
|
tests: generateTestsModule,
|
|
13
15
|
suggest: commitSuggesterModule,
|
|
14
16
|
changelog: changelogModule,
|
|
17
|
+
cleantTests: cleanGeneratedTestsModule,
|
|
18
|
+
cleanComments: preserveCodeModule
|
|
15
19
|
};
|
|
16
20
|
export function getModuleByName(name) {
|
|
17
21
|
return builtInModules[name];
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
import { spawn } from "child_process";
|
|
2
|
+
let modelProcess = null;
|
|
3
|
+
async function isModelRunning() {
|
|
4
|
+
try {
|
|
5
|
+
const res = await fetch("http://localhost:11434/health"); // whatever endpoint your model exposes
|
|
6
|
+
return res.ok;
|
|
7
|
+
}
|
|
8
|
+
catch {
|
|
9
|
+
return false;
|
|
10
|
+
}
|
|
11
|
+
}
|
|
12
|
+
export async function startModelProcess() {
|
|
13
|
+
if (await isModelRunning()) {
|
|
14
|
+
console.log("✅ Model already running");
|
|
15
|
+
return;
|
|
16
|
+
}
|
|
17
|
+
console.log("🚀 Starting model process...");
|
|
18
|
+
modelProcess = spawn("ollama", ["serve"], {
|
|
19
|
+
stdio: "inherit",
|
|
20
|
+
});
|
|
21
|
+
// Poll until the model is ready
|
|
22
|
+
for (let i = 0; i < 30; i++) {
|
|
23
|
+
if (await isModelRunning()) {
|
|
24
|
+
console.log("✅ Model is now running");
|
|
25
|
+
return;
|
|
26
|
+
}
|
|
27
|
+
await new Promise((res) => setTimeout(res, 1000));
|
|
28
|
+
}
|
|
29
|
+
throw new Error("❌ Model failed to start in time");
|
|
30
|
+
}
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
import os from 'os';
|
|
2
|
+
import path from "path";
|
|
3
|
+
/**
|
|
4
|
+
* Normalizes a path string for loose, fuzzy matching:
|
|
5
|
+
* - Lowercases
|
|
6
|
+
* - Removes slashes and backslashes
|
|
7
|
+
* - Removes whitespace
|
|
8
|
+
*/
|
|
9
|
+
export function normalizePathForLooseMatch(p) {
|
|
10
|
+
return p.toLowerCase().replace(/[\\/]/g, '').replace(/\s+/g, '');
|
|
11
|
+
}
|
|
12
|
+
// Helper to normalize and resolve paths to a consistent format (forward slashes)
|
|
13
|
+
export function normalizePath(p) {
|
|
14
|
+
if (p.startsWith('~')) {
|
|
15
|
+
p = path.join(os.homedir(), p.slice(1));
|
|
16
|
+
}
|
|
17
|
+
return path.resolve(p).replace(/\\/g, '/');
|
|
18
|
+
}
|
|
19
|
+
export function getRepoKeyForPath(pathToMatch, config) {
|
|
20
|
+
const norm = normalizePath(pathToMatch);
|
|
21
|
+
return Object.entries(config.repos).find(([, val]) => normalizePath(val.indexDir) === norm)?.[0] || null;
|
|
22
|
+
}
|
|
23
|
+
export function normalizeText(txt) {
|
|
24
|
+
return txt.replace(/\r\n/g, "\n").replace(/\r/g, "\n");
|
|
25
|
+
}
|
|
26
|
+
export function stripMarkdownFences(txt) {
|
|
27
|
+
return txt
|
|
28
|
+
.replace(/```[\w-]*\s*/g, "") // ``` or ```java
|
|
29
|
+
.replace(/```/g, ""); // closing ```
|
|
30
|
+
}
|
|
31
|
+
// Very naive classifier: decide if a line is "code-like"
|
|
32
|
+
export function isCodeLike(line) {
|
|
33
|
+
const trimmed = line.trim();
|
|
34
|
+
if (!trimmed)
|
|
35
|
+
return false;
|
|
36
|
+
// obvious markdown / prose markers
|
|
37
|
+
if (/^(This|Here is|Note)\b/.test(trimmed))
|
|
38
|
+
return false;
|
|
39
|
+
if (/^\d+\./.test(trimmed))
|
|
40
|
+
return false; // bullet list
|
|
41
|
+
if (/^[-*] /.test(trimmed))
|
|
42
|
+
return false; // list
|
|
43
|
+
// allow imports, class, functions, braces, annotations, etc.
|
|
44
|
+
if (/^(import|export|public|private|protected|class|function|@Test|@Before)/.test(trimmed))
|
|
45
|
+
return true;
|
|
46
|
+
if (/[;{}()=]/.test(trimmed))
|
|
47
|
+
return true;
|
|
48
|
+
return false;
|
|
49
|
+
}
|
package/dist/utils/repoKey.js
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import crypto from 'crypto';
|
|
2
2
|
import path from 'path';
|
|
3
|
-
import { normalizePath } from './
|
|
3
|
+
import { normalizePath } from './contentUtils.js';
|
|
4
4
|
/**
|
|
5
5
|
* Generate a stable unique key for a repo path.
|
|
6
6
|
* Uses the basename plus a short hash of the full path.
|
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
// agentManager.ts
|
|
2
|
+
import fs from 'fs/promises';
|
|
3
|
+
import chalk from 'chalk';
|
|
4
|
+
import { runModulePipeline } from './pipeline/runModulePipeline.js';
|
|
5
|
+
import { countTokens, splitCodeIntoChunks } from './utils/splitCodeIntoChunk.js';
|
|
6
|
+
import { normalizePath } from './utils/contentUtils.js';
|
|
7
|
+
export async function handleAgentRun(filepath, modules) {
|
|
8
|
+
try {
|
|
9
|
+
filepath = normalizePath(filepath);
|
|
10
|
+
let fileContent = await fs.readFile(filepath, 'utf-8');
|
|
11
|
+
// Immutable baseline for this file (stays until file changes)
|
|
12
|
+
const maxTokens = 1500;
|
|
13
|
+
const baseChunks = splitCodeIntoChunks(fileContent, maxTokens);
|
|
14
|
+
// Working chunks that flow through modules; stays index-aligned with baseChunks
|
|
15
|
+
let workingChunks = [...baseChunks];
|
|
16
|
+
for (const mod of modules) {
|
|
17
|
+
console.log(chalk.cyan(`\n⚙️ Running module: ${mod.name}`));
|
|
18
|
+
console.log(chalk.blue(`🧮 Tokens:`), chalk.yellow(countTokens(fileContent).toString()));
|
|
19
|
+
console.log(chalk.magenta(`📦 Chunks: ${workingChunks.length}`));
|
|
20
|
+
const processed = [];
|
|
21
|
+
let mode;
|
|
22
|
+
let newFilepath;
|
|
23
|
+
for (let i = 0; i < workingChunks.length; i++) {
|
|
24
|
+
const input = {
|
|
25
|
+
originalContent: baseChunks[i], // immutable baseline for this file
|
|
26
|
+
content: workingChunks[i], // current state for this slice
|
|
27
|
+
filepath,
|
|
28
|
+
chunkIndex: i,
|
|
29
|
+
chunkCount: workingChunks.length,
|
|
30
|
+
};
|
|
31
|
+
const out = await runModulePipeline([mod], input);
|
|
32
|
+
if (!out.content?.trim()) {
|
|
33
|
+
throw new Error(`⚠️ Empty result on chunk ${i + 1}`);
|
|
34
|
+
}
|
|
35
|
+
processed.push(out.content);
|
|
36
|
+
// Capture mode/path (should be consistent across chunks)
|
|
37
|
+
if (out.mode)
|
|
38
|
+
mode = out.mode;
|
|
39
|
+
if (out.newFilepath)
|
|
40
|
+
newFilepath = out.newFilepath;
|
|
41
|
+
}
|
|
42
|
+
const finalOutput = processed.join('\n\n');
|
|
43
|
+
// Apply output mode
|
|
44
|
+
switch (mode ?? 'overwrite') {
|
|
45
|
+
case 'overwrite':
|
|
46
|
+
await fs.writeFile(filepath, finalOutput, 'utf-8');
|
|
47
|
+
console.log(chalk.green(`✅ Overwritten: ${filepath}`));
|
|
48
|
+
// keep baseChunks (baseline stays the same), keep alignment: do NOT re-chunk
|
|
49
|
+
workingChunks = processed;
|
|
50
|
+
fileContent = finalOutput;
|
|
51
|
+
break;
|
|
52
|
+
case 'append':
|
|
53
|
+
await fs.appendFile(filepath, finalOutput, 'utf-8');
|
|
54
|
+
console.log(chalk.green(`✅ Appended: ${filepath}`));
|
|
55
|
+
// appended file content diverges; keep alignment by using processed as new working
|
|
56
|
+
workingChunks = processed;
|
|
57
|
+
fileContent += finalOutput;
|
|
58
|
+
break;
|
|
59
|
+
case 'newFile':
|
|
60
|
+
if (!newFilepath)
|
|
61
|
+
throw new Error(`newFile mode requires newFilepath`);
|
|
62
|
+
await fs.writeFile(newFilepath, finalOutput, 'utf-8');
|
|
63
|
+
console.log(chalk.green(`✅ New file: ${newFilepath}`));
|
|
64
|
+
// File context changes → reset baseline and working to the new file
|
|
65
|
+
filepath = newFilepath;
|
|
66
|
+
fileContent = finalOutput;
|
|
67
|
+
const reset = splitCodeIntoChunks(fileContent, maxTokens);
|
|
68
|
+
// new baseline for the new file (e.g., generated tests before cleaning)
|
|
69
|
+
for (let i = 0; i < reset.length; i++)
|
|
70
|
+
; // (no-op; just clarity)
|
|
71
|
+
// Replace both arrays to keep them in sync for subsequent modules
|
|
72
|
+
workingChunks = reset;
|
|
73
|
+
// Important: also reset baseChunks to this new file’s content so the next module
|
|
74
|
+
// (e.g., cleaner) sees the *generated tests* as its originalContent baseline.
|
|
75
|
+
baseChunks.length = 0;
|
|
76
|
+
baseChunks.push(...reset);
|
|
77
|
+
break;
|
|
78
|
+
default:
|
|
79
|
+
console.log(chalk.yellow(`⚠️ Unknown mode; skipping write`));
|
|
80
|
+
// still move pipeline forward with processed
|
|
81
|
+
workingChunks = processed;
|
|
82
|
+
fileContent = finalOutput;
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
catch (err) {
|
|
87
|
+
console.error(chalk.red('❌ Error in agent run:'), err instanceof Error ? err.message : err);
|
|
88
|
+
}
|
|
89
|
+
}
|
package/package.json
CHANGED
package/dist/agentManager.js
DELETED
|
@@ -1,47 +0,0 @@
|
|
|
1
|
-
import fs from 'fs/promises';
|
|
2
|
-
import chalk from 'chalk';
|
|
3
|
-
import { runModulePipeline } from './pipeline/runModulePipeline.js';
|
|
4
|
-
import { normalizePath } from './utils/normalizePath.js';
|
|
5
|
-
import { readConfig } from './config.js';
|
|
6
|
-
import { countTokens, splitCodeIntoChunks } from './utils/splitCodeIntoChunk.js';
|
|
7
|
-
export async function handleAgentRun(filepath, modules) {
|
|
8
|
-
try {
|
|
9
|
-
filepath = normalizePath(filepath);
|
|
10
|
-
const content = await fs.readFile(filepath, 'utf-8');
|
|
11
|
-
const totalTokens = countTokens(content);
|
|
12
|
-
console.log(chalk.blue(`🧮 Total tokens in file:`), chalk.yellow(totalTokens.toString()));
|
|
13
|
-
const config = readConfig();
|
|
14
|
-
const maxTokens = 1500;
|
|
15
|
-
const chunks = splitCodeIntoChunks(content, maxTokens);
|
|
16
|
-
console.log(chalk.magenta(`📦 Split into ${chunks.length} chunks`));
|
|
17
|
-
const processedChunks = [];
|
|
18
|
-
for (const [i, chunk] of chunks.entries()) {
|
|
19
|
-
const chunkTokens = countTokens(chunk);
|
|
20
|
-
if (i === 0) {
|
|
21
|
-
console.log(chalk.cyan(`🔍 Processing ${chunks.length} chunks of file:`), chalk.white(filepath));
|
|
22
|
-
}
|
|
23
|
-
console.log(chalk.gray(` - Chunk ${i + 1} tokens:`), chalk.yellow(chunkTokens.toString()));
|
|
24
|
-
const chunkInput = {
|
|
25
|
-
originalContent: chunk,
|
|
26
|
-
content: chunk,
|
|
27
|
-
filepath,
|
|
28
|
-
chunkIndex: i,
|
|
29
|
-
chunkCount: chunks.length,
|
|
30
|
-
};
|
|
31
|
-
const response = await runModulePipeline(modules, chunkInput);
|
|
32
|
-
if (!response.content.trim()) {
|
|
33
|
-
throw new Error(`⚠️ Model returned empty result on chunk ${i + 1}`);
|
|
34
|
-
}
|
|
35
|
-
processedChunks.push(response.content);
|
|
36
|
-
//console.log(chalk.green(`✅ Finished chunk ${i + 1}/${chunks.length}`));
|
|
37
|
-
}
|
|
38
|
-
// Join all chunk outputs into one string
|
|
39
|
-
const finalOutput = processedChunks.join('\n\n');
|
|
40
|
-
// Overwrite original file here:
|
|
41
|
-
await fs.writeFile(filepath, finalOutput, 'utf-8');
|
|
42
|
-
console.log(chalk.green(`✅ Original file overwritten: ${filepath}`));
|
|
43
|
-
}
|
|
44
|
-
catch (err) {
|
|
45
|
-
console.error(chalk.red('❌ Error in agent run:'), err instanceof Error ? err.message : err);
|
|
46
|
-
}
|
|
47
|
-
}
|
|
@@ -1,23 +0,0 @@
|
|
|
1
|
-
// src/utils/normalizePath.ts
|
|
2
|
-
import os from 'os';
|
|
3
|
-
import path from "path";
|
|
4
|
-
/**
|
|
5
|
-
* Normalizes a path string for loose, fuzzy matching:
|
|
6
|
-
* - Lowercases
|
|
7
|
-
* - Removes slashes and backslashes
|
|
8
|
-
* - Removes whitespace
|
|
9
|
-
*/
|
|
10
|
-
export function normalizePathForLooseMatch(p) {
|
|
11
|
-
return p.toLowerCase().replace(/[\\/]/g, '').replace(/\s+/g, '');
|
|
12
|
-
}
|
|
13
|
-
// Helper to normalize and resolve paths to a consistent format (forward slashes)
|
|
14
|
-
export function normalizePath(p) {
|
|
15
|
-
if (p.startsWith('~')) {
|
|
16
|
-
p = path.join(os.homedir(), p.slice(1));
|
|
17
|
-
}
|
|
18
|
-
return path.resolve(p).replace(/\\/g, '/');
|
|
19
|
-
}
|
|
20
|
-
export function getRepoKeyForPath(pathToMatch, config) {
|
|
21
|
-
const norm = normalizePath(pathToMatch);
|
|
22
|
-
return Object.entries(config.repos).find(([, val]) => normalizePath(val.indexDir) === norm)?.[0] || null;
|
|
23
|
-
}
|