scai 0.1.117 → 0.1.119

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (96) hide show
  1. package/README.md +88 -503
  2. package/dist/agents/MainAgent.js +255 -0
  3. package/dist/agents/contextReviewStep.js +104 -0
  4. package/dist/agents/finalPlanGenStep.js +123 -0
  5. package/dist/agents/infoPlanGenStep.js +126 -0
  6. package/dist/agents/planGeneratorStep.js +118 -0
  7. package/dist/agents/planResolverStep.js +95 -0
  8. package/dist/agents/planTargetFilesStep.js +48 -0
  9. package/dist/agents/preFileSearchCheckStep.js +95 -0
  10. package/dist/agents/selectRelevantSourcesStep.js +100 -0
  11. package/dist/agents/semanticAnalysisStep.js +144 -0
  12. package/dist/agents/structuralAnalysisStep.js +46 -0
  13. package/dist/agents/transformPlanGenStep.js +107 -0
  14. package/dist/agents/understandIntentStep.js +72 -0
  15. package/dist/agents/validationAnalysisStep.js +87 -0
  16. package/dist/commands/AskCmd.js +47 -116
  17. package/dist/commands/ChangeLogUpdateCmd.js +11 -5
  18. package/dist/commands/CommitSuggesterCmd.js +50 -75
  19. package/dist/commands/DaemonCmd.js +119 -29
  20. package/dist/commands/IndexCmd.js +41 -24
  21. package/dist/commands/InspectCmd.js +0 -1
  22. package/dist/commands/ReadlineSingleton.js +18 -0
  23. package/dist/commands/ResetDbCmd.js +20 -21
  24. package/dist/commands/ReviewCmd.js +89 -54
  25. package/dist/commands/SummaryCmd.js +12 -18
  26. package/dist/commands/WorkflowCmd.js +41 -0
  27. package/dist/commands/factory.js +254 -0
  28. package/dist/config.js +67 -15
  29. package/dist/constants.js +20 -4
  30. package/dist/context.js +10 -11
  31. package/dist/daemon/daemonQueues.js +63 -0
  32. package/dist/daemon/daemonWorker.js +40 -63
  33. package/dist/daemon/generateSummaries.js +58 -0
  34. package/dist/daemon/runFolderCapsuleBatch.js +247 -0
  35. package/dist/daemon/runIndexingBatch.js +147 -0
  36. package/dist/daemon/runKgBatch.js +104 -0
  37. package/dist/db/fileIndex.js +168 -63
  38. package/dist/db/functionExtractors/extractFromJava.js +210 -6
  39. package/dist/db/functionExtractors/extractFromJs.js +173 -214
  40. package/dist/db/functionExtractors/extractFromTs.js +159 -160
  41. package/dist/db/functionExtractors/index.js +7 -5
  42. package/dist/db/schema.js +55 -20
  43. package/dist/db/sqlTemplates.js +50 -19
  44. package/dist/fileRules/builtins.js +31 -14
  45. package/dist/fileRules/codeAllowedExtensions.js +4 -0
  46. package/dist/fileRules/fileExceptions.js +0 -13
  47. package/dist/fileRules/ignoredExtensions.js +10 -0
  48. package/dist/index.js +128 -325
  49. package/dist/lib/generate.js +37 -14
  50. package/dist/lib/generateFolderCapsules.js +109 -0
  51. package/dist/lib/spinner.js +12 -5
  52. package/dist/modelSetup.js +1 -11
  53. package/dist/pipeline/modules/changeLogModule.js +16 -19
  54. package/dist/pipeline/modules/chunkManagerModule.js +24 -0
  55. package/dist/pipeline/modules/cleanupModule.js +95 -91
  56. package/dist/pipeline/modules/codeTransformModule.js +208 -0
  57. package/dist/pipeline/modules/commentModule.js +20 -11
  58. package/dist/pipeline/modules/commitSuggesterModule.js +36 -14
  59. package/dist/pipeline/modules/contextReviewModule.js +52 -0
  60. package/dist/pipeline/modules/fileReaderModule.js +72 -0
  61. package/dist/pipeline/modules/fileSearchModule.js +136 -0
  62. package/dist/pipeline/modules/finalAnswerModule.js +53 -0
  63. package/dist/pipeline/modules/gatherInfoModule.js +176 -0
  64. package/dist/pipeline/modules/generateTestsModule.js +63 -54
  65. package/dist/pipeline/modules/kgModule.js +26 -11
  66. package/dist/pipeline/modules/preserveCodeModule.js +91 -49
  67. package/dist/pipeline/modules/refactorModule.js +19 -7
  68. package/dist/pipeline/modules/repairTestsModule.js +44 -36
  69. package/dist/pipeline/modules/reviewModule.js +23 -13
  70. package/dist/pipeline/modules/summaryModule.js +27 -35
  71. package/dist/pipeline/modules/writeFileModule.js +86 -0
  72. package/dist/pipeline/registry/moduleRegistry.js +38 -93
  73. package/dist/pipeline/runModulePipeline.js +22 -19
  74. package/dist/scripts/dbcheck.js +143 -228
  75. package/dist/utils/buildContextualPrompt.js +245 -172
  76. package/dist/utils/debugContext.js +24 -0
  77. package/dist/utils/fileTree.js +16 -6
  78. package/dist/utils/loadRelevantFolderCapsules.js +64 -0
  79. package/dist/utils/log.js +2 -0
  80. package/dist/utils/normalizeData.js +23 -0
  81. package/dist/utils/planActions.js +60 -0
  82. package/dist/utils/promptBuilderHelper.js +67 -0
  83. package/dist/utils/promptLogHelper.js +52 -0
  84. package/dist/utils/sanitizeQuery.js +20 -8
  85. package/dist/utils/sleep.js +3 -0
  86. package/dist/utils/splitCodeIntoChunk.js +65 -32
  87. package/dist/utils/vscode.js +49 -0
  88. package/dist/workflow/workflowResolver.js +14 -0
  89. package/dist/workflow/workflowRunner.js +103 -0
  90. package/package.json +6 -5
  91. package/dist/agent/agentManager.js +0 -39
  92. package/dist/agent/workflowManager.js +0 -95
  93. package/dist/commands/ModulePipelineCmd.js +0 -31
  94. package/dist/daemon/daemonBatch.js +0 -186
  95. package/dist/fileRules/scoreFiles.js +0 -71
  96. package/dist/lib/generateEmbedding.js +0 -22
@@ -0,0 +1,67 @@
1
+ import chalk from "chalk";
2
+ import { buildLightContext } from "./buildContextualPrompt.js";
3
+ /**
4
+ * Responsible only for building the contextual prompt text (no file writes).
5
+ */
6
+ export async function buildContext(args) {
7
+ console.log(chalk.blueBright("\n📦 Building contextual prompt..."));
8
+ // ↳ Now returns a StructuredContext object
9
+ const ctx = await buildLightContext(args);
10
+ // Serialize to prompt string
11
+ const promptContent = serializeContext(ctx);
12
+ console.log(chalk.greenBright("✅ Prompt built successfully."));
13
+ console.log(chalk.cyan(`[PromptBuilder] Token estimate: ~${Math.round(promptContent.length / 4)} tokens`));
14
+ return promptContent;
15
+ }
16
+ function serializeContext(ctx) {
17
+ const out = [];
18
+ out.push(`# User Query\n${ctx.initContext?.userQuery}\n`);
19
+ if (ctx.initContext?.projectSummary) {
20
+ out.push(`# Project Summary\n${ctx.initContext?.projectSummary}\n`);
21
+ }
22
+ if (ctx.initContext?.moduleSummaries?.length) {
23
+ out.push(`# Module Summaries`);
24
+ for (const m of ctx.initContext?.moduleSummaries) {
25
+ out.push(`- **${m.path}**: ${m.summary}`);
26
+ }
27
+ out.push("");
28
+ }
29
+ if (ctx.workingFiles?.length) {
30
+ out.push(`# Working Files`);
31
+ for (const f of ctx.workingFiles) {
32
+ out.push(`\n## ${f.path}`);
33
+ if (f.summary)
34
+ out.push(`Summary: ${f.summary}`);
35
+ if (f.code)
36
+ out.push(`\nCode (truncated):\n${f.code.slice(0, 1000)}`);
37
+ }
38
+ }
39
+ if (ctx.analysis?.focus?.relevantFiles?.length) {
40
+ out.push(`# Focused Files (semantic analysis)`);
41
+ for (const path of ctx.analysis.focus.relevantFiles) {
42
+ out.push(`- ${path}`);
43
+ }
44
+ if (ctx.analysis.focus.rationale) {
45
+ out.push(`Rationale: ${ctx.analysis.focus.rationale}`);
46
+ }
47
+ }
48
+ if (ctx.analysis?.understanding) {
49
+ out.push(`# Semantic Understanding`);
50
+ if (ctx.analysis.understanding.problemStatement)
51
+ out.push(`Problem: ${ctx.analysis.understanding.problemStatement}`);
52
+ if (ctx.analysis.understanding.assumptions?.length)
53
+ out.push(`Assumptions: ${ctx.analysis.understanding.assumptions.join(", ")}`);
54
+ if (ctx.analysis.understanding.risks?.length)
55
+ out.push(`Risks: ${ctx.analysis.understanding.risks.join(", ")}`);
56
+ }
57
+ if (ctx.initContext?.repoTree) {
58
+ out.push(`# Repo Tree\n${ctx.initContext?.repoTree}\n`);
59
+ }
60
+ if (ctx.initContext?.kgSample?.length) {
61
+ out.push(`# Knowledge Graph Sample`);
62
+ for (const e of ctx.initContext?.kgSample) {
63
+ out.push(`- ${e.source} -> (${e.relation}) -> ${e.target}`);
64
+ }
65
+ }
66
+ return out.join("\n");
67
+ }
@@ -0,0 +1,52 @@
1
+ import fs from "fs";
2
+ import path from "path";
3
+ import { log } from "../utils/log.js";
4
+ import { SCAI_HOME, PROMPT_LOG_PATH } from "../constants.js";
5
+ /**
6
+ * Ensures SCAI_HOME exists before writing logs.
7
+ */
8
+ function ensureHomeDir() {
9
+ if (!fs.existsSync(SCAI_HOME))
10
+ fs.mkdirSync(SCAI_HOME, { recursive: true });
11
+ }
12
+ /**
13
+ * Creates a formatted header for visual clarity in logs.
14
+ */
15
+ function formatHeader(title) {
16
+ const divider = "=".repeat(68);
17
+ return `\n\n${divider}\n📂 ${title}\n${divider}\n`;
18
+ }
19
+ /**
20
+ * Overwrites the prompt log with a new prompt.
21
+ */
22
+ export function logPrompt(prompt) {
23
+ try {
24
+ ensureHomeDir();
25
+ const entry = formatHeader("Prompt Updated") + prompt + "\n";
26
+ fs.writeFileSync(PROMPT_LOG_PATH, entry, "utf-8");
27
+ log(`📝 Prompt written to ${PROMPT_LOG_PATH}`);
28
+ }
29
+ catch (err) {
30
+ log("❌ Failed to write prompt log:", err);
31
+ }
32
+ }
33
+ /**
34
+ * Appends module input/output data to a separate log file.
35
+ * Automatically stringifies objects.
36
+ */
37
+ export function logInputOutput(stepName, type, content) {
38
+ const ioLogPath = path.join(SCAI_HOME, "input_output.log");
39
+ try {
40
+ ensureHomeDir();
41
+ const contentStr = typeof content === "string" ? content.trim() : JSON.stringify(content, null, 2);
42
+ const entry = formatHeader(`${type.toUpperCase()} | ${stepName}`) +
43
+ contentStr +
44
+ "\n" +
45
+ "=".repeat(68) +
46
+ "\n";
47
+ fs.appendFileSync(ioLogPath, entry, "utf-8");
48
+ }
49
+ catch (err) {
50
+ log(`❌ Failed to append ${type} for ${stepName}:`, err);
51
+ }
52
+ }
@@ -1,27 +1,39 @@
1
1
  // src/utils/sanitizeQuery.ts
2
2
  import { STOP_WORDS } from '../fileRules/stopWords.js';
3
+ // Stage 1: normalize junk out of query
4
+ export function basicCleanup(raw) {
5
+ return raw
6
+ .replace(/["']/g, '') // remove quotes
7
+ .replace(/[()]/g, ' ') // replace parens with space
8
+ .replace(/\bOR\b|\bAND\b/gi, ' ') // strip boolean operators
9
+ .replace(/\w+:/g, '') // remove field prefixes like path:, ext:
10
+ .replace(/[<>=*]/g, ' ') // remove invalid operators
11
+ .replace(/\\/g, '') // remove stray backslashes
12
+ .replace(/<[^>]+>/g, '') // strip placeholders like <endpoint>
13
+ .replace(/\s+/g, ' ') // collapse whitespace
14
+ .trim();
15
+ }
16
+ // Stage 2: tokenize & prepare for FTS5
3
17
  export function sanitizeQueryForFts(input) {
4
- input = input.trim().toLowerCase();
5
- // If the whole input looks like a filename/path, quote it
18
+ input = basicCleanup(input)
19
+ .toLowerCase()
20
+ .replace(/\(([^)]+)\)/g, (_, inner) => inner.replace(/[|]/g, ' ')) // expand grouped ORs
21
+ .replace(/[|]/g, ' '); // handle standalone ORs
6
22
  if (/^[\w\-./]+$/.test(input) && !/\s/.test(input)) {
7
23
  return `"${input.replace(/"/g, '""')}"*`;
8
24
  }
9
25
  const tokens = input
10
26
  .split(/\s+/)
11
- .map(token => token.toLowerCase())
12
27
  .map(token => {
13
- // If the token looks like a filename/path, keep it quoted
14
28
  if (/[\w]+\.[a-z0-9]+$/.test(token)) {
15
29
  return `"${token.replace(/"/g, '""')}"`;
16
30
  }
17
- // Otherwise, clean it like normal
18
31
  return token
19
- .replace(/[^a-z0-9_*"]/gi, '') // remove all invalid FTS5 chars
32
+ .replace(/[^a-z0-9_*"]/gi, '')
20
33
  .replace(/'/g, "''");
21
34
  })
22
35
  .filter(token => token.length > 2 &&
23
- !STOP_WORDS.has(token.replace(/[*"]/g, '')) // check unquoted
24
- )
36
+ !STOP_WORDS.has(token.replace(/[*"]/g, '')))
25
37
  .map(token => (token.startsWith('"') ? token : token + '*'));
26
38
  return tokens.length > 0 ? tokens.join(' OR ') : '*';
27
39
  }
@@ -0,0 +1,3 @@
1
+ export function sleep(ms) {
2
+ return new Promise(resolve => setTimeout(resolve, ms));
3
+ }
@@ -1,49 +1,82 @@
1
1
  import { encode } from 'gpt-3-encoder';
2
- export function splitCodeIntoChunks(text, maxTokens) {
2
+ export function splitCodeIntoChunks(text, maxTokens = 1500, hardLimitMultiplier = 1.8) {
3
3
  const lines = text.split('\n');
4
4
  const chunks = [];
5
5
  let currentChunkLines = [];
6
6
  let currentTokens = 0;
7
7
  let inMultiComment = false;
8
- const start = '/*';
9
- const end = '*/';
8
+ let inFunction = false;
9
+ let inTryBlock = false;
10
+ let globalBraceDepth = 0;
11
+ let functionBraceDepth = 0;
12
+ let parenDepth = 0;
13
+ let bracketDepth = 0;
10
14
  for (const line of lines) {
11
15
  const trimmed = line.trim();
12
- // --- Track multi-line comments ---
13
- if (trimmed.includes(start) && !trimmed.includes(end)) {
14
- // Starts a block comment but does not end on the same line
16
+ // ---------- comments ----------
17
+ if (trimmed.includes('/*') && !trimmed.includes('*/'))
15
18
  inMultiComment = true;
16
- }
17
- else if (trimmed.includes(start) && trimmed.includes(end)) {
18
- // Inline comment: "/* ... */" on same line → ignore, don't toggle state
19
- // do nothing with inMultiComment
20
- }
21
- else if (trimmed.includes(end)) {
22
- // End of a block comment
19
+ if (trimmed.includes('*/'))
23
20
  inMultiComment = false;
21
+ // ---------- function start ----------
22
+ const isFunctionStart = trimmed.startsWith('function ') ||
23
+ trimmed.startsWith('async function ') ||
24
+ trimmed.startsWith('class ') ||
25
+ trimmed.match(/^\w+\s*=\s*\(.*\)\s*=>\s*{/);
26
+ if (!inFunction && isFunctionStart) {
27
+ inFunction = true;
28
+ functionBraceDepth = 0;
24
29
  }
25
- const lineTokens = encode(line + '\n').length;
26
- if (currentTokens + lineTokens > maxTokens) {
27
- // Split at natural points but never inside a multi-line comment
28
- let splitIndex = currentChunkLines.length;
29
- for (let i = currentChunkLines.length - 1; i >= 0; i--) {
30
- const t = currentChunkLines[i].trim();
31
- if (!inMultiComment &&
32
- (t === '' ||
33
- t.startsWith('function ') ||
34
- t.startsWith('class ') ||
35
- t.endsWith('}') ||
36
- t.endsWith(';'))) {
37
- splitIndex = i + 1;
38
- break;
39
- }
30
+ // ---------- try/catch ----------
31
+ if (trimmed.startsWith('try {'))
32
+ inTryBlock = true;
33
+ if (trimmed.startsWith('catch') || trimmed.startsWith('finally'))
34
+ inTryBlock = false;
35
+ // ---------- depth tracking ----------
36
+ for (const char of line) {
37
+ if (char === '{') {
38
+ globalBraceDepth++;
39
+ if (inFunction)
40
+ functionBraceDepth++;
41
+ }
42
+ else if (char === '}') {
43
+ globalBraceDepth = Math.max(0, globalBraceDepth - 1);
44
+ if (inFunction)
45
+ functionBraceDepth = Math.max(0, functionBraceDepth - 1);
46
+ }
47
+ else if (char === '(') {
48
+ parenDepth++;
49
+ }
50
+ else if (char === ')') {
51
+ parenDepth = Math.max(0, parenDepth - 1);
52
+ }
53
+ else if (char === '[') {
54
+ bracketDepth++;
55
+ }
56
+ else if (char === ']') {
57
+ bracketDepth = Math.max(0, bracketDepth - 1);
40
58
  }
41
- chunks.push(currentChunkLines.slice(0, splitIndex).join('\n'));
42
- currentChunkLines = currentChunkLines.slice(splitIndex);
43
- currentTokens = encode(currentChunkLines.join('\n')).length;
44
59
  }
60
+ // ---------- add line ----------
45
61
  currentChunkLines.push(line);
46
- currentTokens += lineTokens;
62
+ currentTokens += encode(line + '\n').length;
63
+ // ---------- split decision ----------
64
+ const softLimitReached = currentTokens >= maxTokens;
65
+ const hardLimitReached = currentTokens >= maxTokens * hardLimitMultiplier;
66
+ const safeToSplit = !inMultiComment &&
67
+ !inTryBlock &&
68
+ functionBraceDepth === 0 &&
69
+ parenDepth === 0 &&
70
+ bracketDepth === 0;
71
+ if ((softLimitReached && safeToSplit) || hardLimitReached) {
72
+ chunks.push(currentChunkLines.join('\n'));
73
+ currentChunkLines = [];
74
+ currentTokens = 0;
75
+ }
76
+ // ---------- function end ----------
77
+ if (inFunction && functionBraceDepth === 0) {
78
+ inFunction = false;
79
+ }
47
80
  }
48
81
  if (currentChunkLines.length > 0) {
49
82
  chunks.push(currentChunkLines.join('\n'));
@@ -0,0 +1,49 @@
1
+ import fs from "fs";
2
+ import os from "os";
3
+ import path from "path";
4
+ import { spawn } from "child_process";
5
+ /**
6
+ * Opens a visual diff in VS Code between the original and modified versions.
7
+ *
8
+ * @param filePath - The path or name of the file being diffed.
9
+ * @param diffContent - The unified diff text for that file.
10
+ */
11
+ export async function openDiffInVSCode(filePath, diffContent) {
12
+ try {
13
+ const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "scai-diff-"));
14
+ // Temporary file paths
15
+ const baseName = path.basename(filePath);
16
+ const originalPath = path.join(tmpDir, `${baseName}_OLD.tmp`);
17
+ const modifiedPath = path.join(tmpDir, `${baseName}_NEW.tmp`);
18
+ // Extract file versions from unified diff
19
+ const oldLines = [];
20
+ const newLines = [];
21
+ for (const line of diffContent.split("\n")) {
22
+ if (line.startsWith("+") && !line.startsWith("+++")) {
23
+ newLines.push(line.slice(1));
24
+ }
25
+ else if (line.startsWith("-") && !line.startsWith("---")) {
26
+ oldLines.push(line.slice(1));
27
+ }
28
+ else if (!line.startsWith("@@")) {
29
+ // context lines appear in both versions
30
+ oldLines.push(line);
31
+ newLines.push(line);
32
+ }
33
+ }
34
+ fs.writeFileSync(originalPath, oldLines.join("\n"), "utf-8");
35
+ fs.writeFileSync(modifiedPath, newLines.join("\n"), "utf-8");
36
+ console.log(`\n🔍 Opening VS Code diff:`);
37
+ console.log(` ${originalPath}`);
38
+ console.log(`⇄ ${modifiedPath}\n`);
39
+ const child = spawn("code", ["--diff", originalPath, modifiedPath], {
40
+ stdio: "inherit",
41
+ });
42
+ child.on("error", () => {
43
+ console.error("❌ Failed to open diff in VS Code. Ensure the 'code' CLI is installed and in your PATH.");
44
+ });
45
+ }
46
+ catch (err) {
47
+ console.error("❌ Error while preparing diff for VS Code:", err);
48
+ }
49
+ }
@@ -0,0 +1,14 @@
1
+ // src/agents/WorkflowResolver.ts
2
+ import { resolveModulesByNames } from "../pipeline/registry/moduleRegistry.js";
3
+ /**
4
+ * Simple resolver function to compute module order (honours before/after relationships).
5
+ * This replaces the previous Workflow class used only to resolve modules.
6
+ *
7
+ * Returns an array of PromptModuleMeta (the existing shape your registry returns).
8
+ */
9
+ export function resolveModules(goals) {
10
+ // Trim and validate names here (defensive)
11
+ const trimmed = goals.map((g) => (typeof g === "string" ? g.trim() : g));
12
+ const modules = resolveModulesByNames(trimmed);
13
+ return modules;
14
+ }
@@ -0,0 +1,103 @@
1
+ // File: src/workflow/workflowRunner.ts
2
+ import fs from "fs/promises";
3
+ import chalk from "chalk";
4
+ import { normalizePath } from "../utils/contentUtils.js";
5
+ /**
6
+ * runWorkflow - orchestrates running a modular I/O pipeline.
7
+ *
8
+ * Each module conforms to the `ModuleIO` interface:
9
+ * input: { query, content? }
10
+ * output: { data?, mode?, newFilepath? }
11
+ *
12
+ * Options:
13
+ * - modules: ordered modules to run (Module[])
14
+ * - filepath: optional file path to read/write
15
+ * - inputContent: optional string for stdin content
16
+ *
17
+ * Behaviours:
18
+ * - Sequentially runs each module on the full file content
19
+ * - Supports output modes: overwrite, append, newFile, skip
20
+ * - Stream mode (no filepath): prints final output to stdout
21
+ */
22
+ export async function runWorkflow(opts) {
23
+ const { modules } = opts;
24
+ let filepath = opts.filepath;
25
+ let fileContent = "";
26
+ try {
27
+ if (filepath) {
28
+ filepath = normalizePath(filepath);
29
+ await fs.access(filepath);
30
+ fileContent = await fs.readFile(filepath, "utf-8");
31
+ }
32
+ else {
33
+ fileContent = opts.inputContent ?? "";
34
+ }
35
+ for (const mod of modules) {
36
+ console.log(chalk.cyan(`\n⚙️ Running module: ${mod.name}`));
37
+ const io = {
38
+ query: `Process ${filepath ?? "<stdin>"} with ${mod.name}`,
39
+ content: fileContent,
40
+ };
41
+ const result = await mod.run(io);
42
+ if (!result || !result.data || !String(result.data).trim()) {
43
+ throw new Error(`⚠️ Empty result from module ${mod.name}`);
44
+ }
45
+ const output = typeof result.data === "string"
46
+ ? result.data
47
+ : JSON.stringify(result.data, null, 2);
48
+ const mode = result.mode ??
49
+ "overwrite"; // default mode if not specified by module
50
+ const newFilepath = result.newFilepath;
51
+ //
52
+ // === Handle stdout vs file modes ===
53
+ //
54
+ if (!filepath) {
55
+ // stdin mode: print directly to stdout
56
+ process.stdout.write(output);
57
+ fileContent = output;
58
+ continue;
59
+ }
60
+ //
61
+ // === File-backed mode handling (honor mode semantics) ===
62
+ //
63
+ switch (mode) {
64
+ case "overwrite":
65
+ await fs.writeFile(filepath, output, "utf-8");
66
+ console.log(chalk.green(`✅ Overwritten: ${filepath}`));
67
+ fileContent = output;
68
+ break;
69
+ case "append":
70
+ await fs.appendFile(filepath, output, "utf-8");
71
+ console.log(chalk.green(`✅ Appended: ${filepath}`));
72
+ fileContent += output;
73
+ break;
74
+ case "newFile":
75
+ if (!newFilepath)
76
+ throw new Error(`newFile mode requires a newFilepath`);
77
+ const resolvedNew = normalizePath(newFilepath);
78
+ await fs.writeFile(resolvedNew, output, "utf-8");
79
+ console.log(chalk.green(`✅ New file created: ${resolvedNew}`));
80
+ filepath = resolvedNew;
81
+ fileContent = output;
82
+ break;
83
+ case "skip":
84
+ console.log(chalk.gray(`⏭️ Skipped writing for module ${mod.name}`));
85
+ fileContent = output;
86
+ break;
87
+ default:
88
+ console.log(chalk.yellow(`⚠️ Unknown mode (${String(mode)}). Treating as overwrite.`));
89
+ await fs.writeFile(filepath, output, "utf-8");
90
+ fileContent = output;
91
+ break;
92
+ }
93
+ }
94
+ // Final stdout flush if running without filepath
95
+ if (!opts.filepath) {
96
+ process.stdout.write("\n");
97
+ }
98
+ }
99
+ catch (err) {
100
+ console.error(chalk.red("❌ Error in workflow run:"), err?.message ?? err);
101
+ throw err;
102
+ }
103
+ }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "scai",
3
- "version": "0.1.117",
3
+ "version": "0.1.119",
4
4
  "type": "module",
5
5
  "bin": {
6
6
  "scai": "./dist/index.js"
@@ -35,7 +35,9 @@
35
35
  ],
36
36
  "scripts": {
37
37
  "build": "rm -rfd dist && tsc && chmod +x dist/index.js && git add .",
38
- "start": "node dist/index.js"
38
+ "start": "node dist/index.js",
39
+ "test": "vitest run",
40
+ "test:watch": "vitest watch"
39
41
  },
40
42
  "dependencies": {
41
43
  "@octokit/rest": "^22.0.0",
@@ -47,18 +49,17 @@
47
49
  "commander": "^11.0.0",
48
50
  "fast-glob": "^3.3.3",
49
51
  "gpt-3-encoder": "^1.1.4",
52
+ "java-parser": "^3.0.1",
50
53
  "proper-lockfile": "^4.1.2",
54
+ "shell-quote": "^1.8.3",
51
55
  "string-similarity-js": "^2.1.4",
52
56
  "ts-morph": "^26.0.0"
53
57
  },
54
58
  "devDependencies": {
55
59
  "@types/better-sqlite3": "^7.6.13",
56
60
  "@types/columnify": "^1.5.4",
57
- "@types/jest": "^30.0.0",
58
61
  "@types/node": "^24.2.1",
59
62
  "@types/proper-lockfile": "^4.1.4",
60
- "jest": "^30.0.2",
61
- "ts-jest": "^29.4.0",
62
63
  "ts-node": "^10.9.2",
63
64
  "typescript": "^5.8.3"
64
65
  },
@@ -1,39 +0,0 @@
1
- // src/agent/agentManager.ts
2
- import chalk from "chalk";
3
- import fs from "fs/promises";
4
- import { resolveModuleOrder } from "../pipeline/registry/moduleRegistry.js";
5
- import { handleAgentRun } from "./workflowManager.js";
6
- // Minimal agent: resolves modules (with before/after dependencies) and delegates to handleAgentRun
7
- export class Agent {
8
- constructor(goals) {
9
- // Trim goal names to avoid whitespace issues
10
- this.goals = goals.map((g) => g.trim());
11
- }
12
- resolveModules(goals) {
13
- // Use the registry helper to get the correct order
14
- return resolveModuleOrder(goals);
15
- }
16
- async execute(filepath) {
17
- console.log(chalk.cyan(`🤖 Agent starting on: ${filepath}`));
18
- // Resolve modules (with before/after dependencies)
19
- const modules = this.resolveModules(this.goals);
20
- console.log(chalk.green("📋 Modules to run:"), modules.map((m) => m.name).join(" → "));
21
- try {
22
- // Check that the file exists before trying to read it
23
- await fs.access(filepath);
24
- // Read file content (optional, could be used by modules in workflow)
25
- await fs.readFile(filepath, "utf-8");
26
- }
27
- catch (err) {
28
- if (err.code === "ENOENT") {
29
- console.error(chalk.redBright("❌ Error:"), `File not found: ${chalk.yellow(filepath)}`);
30
- console.error(`Make sure the path is correct. (cwd: ${chalk.gray(process.cwd())})`);
31
- process.exit(1);
32
- }
33
- throw err; // rethrow for unexpected errors
34
- }
35
- // Delegate everything to handleAgentRun (like CLI commands do)
36
- await handleAgentRun(filepath, modules);
37
- console.log(chalk.green("✅ Agent finished!"));
38
- }
39
- }
@@ -1,95 +0,0 @@
1
- // agentManager.ts
2
- import fs from 'fs/promises';
3
- import chalk from 'chalk';
4
- import { runModulePipeline } from '../pipeline/runModulePipeline.js';
5
- import { countTokens, splitCodeIntoChunks } from '../utils/splitCodeIntoChunk.js';
6
- import { normalizePath } from '../utils/contentUtils.js';
7
- // basically handles all input (chunk if large), and writing of output (overwrite, append, new file)
8
- export async function handleAgentRun(filepath, modules) {
9
- try {
10
- filepath = normalizePath(filepath);
11
- let fileContent = await fs.readFile(filepath, 'utf-8');
12
- // Immutable baseline for this file (stays until file changes)
13
- const maxTokens = 1500;
14
- const baseChunks = splitCodeIntoChunks(fileContent, maxTokens);
15
- // Working chunks that flow through modules; stays index-aligned with baseChunks
16
- let workingChunks = [...baseChunks];
17
- for (const mod of modules) {
18
- console.log(chalk.cyan(`\n⚙️ Running module: ${mod.name}`));
19
- console.log(chalk.blue(`🧮 Tokens:`), chalk.yellow(countTokens(fileContent).toString()));
20
- console.log(chalk.magenta(`📦 Chunks: ${workingChunks.length}`));
21
- const processed = [];
22
- let mode;
23
- let newFilepath;
24
- for (let i = 0; i < workingChunks.length; i++) {
25
- const input = {
26
- originalContent: baseChunks[i], // immutable baseline for this file
27
- content: workingChunks[i], // current state for this slice
28
- filepath,
29
- chunkIndex: i,
30
- chunkCount: workingChunks.length,
31
- };
32
- const out = await runModulePipeline([mod], input);
33
- if (!out.content?.trim()) {
34
- throw new Error(`⚠️ Empty result on chunk ${i + 1}`);
35
- }
36
- processed.push(out.content);
37
- // Capture mode/path (should be consistent across chunks)
38
- if (out.mode)
39
- mode = out.mode;
40
- if (out.newFilepath)
41
- newFilepath = out.newFilepath;
42
- }
43
- const finalOutput = processed.join('\n\n');
44
- // Apply output mode
45
- switch (mode ?? 'overwrite') {
46
- case 'overwrite':
47
- await fs.writeFile(filepath, finalOutput, 'utf-8');
48
- console.log(chalk.green(`✅ Overwritten: ${filepath}`));
49
- // keep baseChunks (baseline stays the same), keep alignment: do NOT re-chunk
50
- workingChunks = processed;
51
- fileContent = finalOutput;
52
- break;
53
- case 'append':
54
- await fs.appendFile(filepath, finalOutput, 'utf-8');
55
- console.log(chalk.green(`✅ Appended: ${filepath}`));
56
- // appended file content diverges; keep alignment by using processed as new working
57
- workingChunks = processed;
58
- fileContent += finalOutput;
59
- break;
60
- case 'newFile':
61
- if (!newFilepath)
62
- throw new Error(`newFile mode requires newFilepath`);
63
- await fs.writeFile(newFilepath, finalOutput, 'utf-8');
64
- console.log(chalk.green(`✅ New file: ${newFilepath}`));
65
- // File context changes → reset baseline and working to the new file
66
- filepath = newFilepath;
67
- fileContent = finalOutput;
68
- const reset = splitCodeIntoChunks(fileContent, maxTokens);
69
- // new baseline for the new file (e.g., generated tests before cleaning)
70
- for (let i = 0; i < reset.length; i++)
71
- ; // (no-op; just clarity)
72
- // Replace both arrays to keep them in sync for subsequent modules
73
- workingChunks = reset;
74
- // Important: also reset baseChunks to this new file’s content so the next module
75
- // (e.g., cleaner) sees the *generated tests* as its originalContent baseline.
76
- baseChunks.length = 0;
77
- baseChunks.push(...reset);
78
- break;
79
- case 'skip':
80
- console.log(chalk.gray(`⏭️ Skipped writing for module ${mod.name}`));
81
- // don’t touch files, but keep chunks flowing
82
- workingChunks = processed;
83
- break;
84
- default:
85
- console.log(chalk.yellow(`⚠️ Unknown mode; skipping write`));
86
- // still move pipeline forward with processed
87
- workingChunks = processed;
88
- fileContent = finalOutput;
89
- }
90
- }
91
- }
92
- catch (err) {
93
- console.error(chalk.red('❌ Error in agent run:'), err instanceof Error ? err.message : err);
94
- }
95
- }
@@ -1,31 +0,0 @@
1
- import { readFileSync } from 'fs';
2
- import { getModuleByName } from '../pipeline/registry/moduleRegistry.js';
3
- import { runModulePipeline } from '../pipeline/runModulePipeline.js';
4
- export async function runModulePipelineFromCLI(file, options) {
5
- if (!options.modules) {
6
- console.error('❌ No modules specified. Use --modules or -m.');
7
- process.exit(1);
8
- }
9
- const moduleNames = options.modules.split(',').map((m) => m.trim());
10
- // Read file content
11
- let fileContent = '';
12
- try {
13
- fileContent = readFileSync(file, 'utf-8');
14
- }
15
- catch (err) {
16
- console.error(`❌ Could not read file: ${file}`);
17
- process.exit(1);
18
- }
19
- const input = { content: fileContent, filepath: file };
20
- // Retrieve modules from the registry
21
- const resolvedModules = moduleNames.map((name) => {
22
- const module = getModuleByName(name);
23
- if (!module) {
24
- console.error(`❌ Failed to load module: ${name}`);
25
- process.exit(1);
26
- }
27
- return module;
28
- });
29
- // Run the module pipeline
30
- await runModulePipeline(resolvedModules, input);
31
- }