opencode-autognosis 2.0.1 → 2.0.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/activeset.js +2 -1
- package/dist/chunk-cards.js +7 -1
- package/dist/database.d.ts +56 -0
- package/dist/database.js +535 -0
- package/dist/git-worktree.js +5 -4
- package/dist/index.d.ts +1 -1
- package/dist/index.js +2 -14
- package/dist/module-summaries.js +2 -1
- package/dist/performance-optimization.js +24 -9
- package/dist/services/logger.d.ts +4 -1
- package/dist/services/logger.js +39 -16
- package/dist/services/ollama.d.ts +11 -0
- package/dist/services/ollama.js +132 -0
- package/dist/system-tools.js +139 -33
- package/dist/testing-infrastructure.js +2 -1
- package/dist/unified-api.d.ts +3 -0
- package/dist/unified-api.js +160 -0
- package/package.json +5 -1
package/dist/index.js
CHANGED
|
@@ -1,20 +1,8 @@
|
|
|
1
|
-
import {
|
|
2
|
-
import { gitWorktreeTools } from "./git-worktree.js";
|
|
3
|
-
import { testingTools } from "./testing-infrastructure.js";
|
|
4
|
-
import { chunkCardsTools } from "./chunk-cards.js";
|
|
5
|
-
import { activeSetTools } from "./activeset.js";
|
|
6
|
-
import { moduleSummariesTools } from "./module-summaries.js";
|
|
7
|
-
import { performanceTools } from "./performance-optimization.js";
|
|
1
|
+
import { unifiedTools } from "./unified-api.js";
|
|
8
2
|
export const AutognosisPlugin = async () => {
|
|
9
3
|
return {
|
|
10
4
|
tool: {
|
|
11
|
-
...
|
|
12
|
-
...gitWorktreeTools(),
|
|
13
|
-
...testingTools(),
|
|
14
|
-
...chunkCardsTools(),
|
|
15
|
-
...activeSetTools(),
|
|
16
|
-
...moduleSummariesTools(),
|
|
17
|
-
...performanceTools(),
|
|
5
|
+
...unifiedTools(),
|
|
18
6
|
},
|
|
19
7
|
};
|
|
20
8
|
};
|
package/dist/module-summaries.js
CHANGED
|
@@ -3,13 +3,14 @@ import * as fs from "node:fs/promises";
|
|
|
3
3
|
import * as fsSync from "node:fs";
|
|
4
4
|
import * as path from "node:path";
|
|
5
5
|
import * as crypto from "node:crypto";
|
|
6
|
+
import { Logger } from "./services/logger.js";
|
|
6
7
|
const PROJECT_ROOT = process.cwd();
|
|
7
8
|
const OPENCODE_DIR = path.join(PROJECT_ROOT, ".opencode");
|
|
8
9
|
const CHUNK_DIR = path.join(OPENCODE_DIR, "chunks");
|
|
9
10
|
const MODULE_DIR = path.join(OPENCODE_DIR, "modules");
|
|
10
11
|
// Internal logging
|
|
11
12
|
function log(message, data) {
|
|
12
|
-
|
|
13
|
+
Logger.log("ModuleSummaries", message, data);
|
|
13
14
|
}
|
|
14
15
|
// =============================================================================
|
|
15
16
|
// HELPERS
|
|
@@ -5,7 +5,9 @@ import * as fsSync from "node:fs";
|
|
|
5
5
|
import * as path from "node:path";
|
|
6
6
|
import { promisify } from "node:util";
|
|
7
7
|
import * as crypto from "node:crypto";
|
|
8
|
+
import { getDb } from "./database.js";
|
|
8
9
|
import { CHUNK_DIR, ensureChunkDir, calculateHash, calculateComplexity, parseFileAST, generateSummaryChunk, generateApiChunk, generateInvariantChunk, extractDependencies, extractSymbolsFromAST, extractSymbols } from "./chunk-cards.js";
|
|
10
|
+
import { Logger } from "./services/logger.js";
|
|
9
11
|
const execAsync = promisify(exec);
|
|
10
12
|
const PROJECT_ROOT = process.cwd();
|
|
11
13
|
const OPENCODE_DIR = path.join(PROJECT_ROOT, ".opencode");
|
|
@@ -14,7 +16,7 @@ const PERF_DIR = path.join(OPENCODE_DIR, "performance");
|
|
|
14
16
|
const METRICS_DIR = path.join(OPENCODE_DIR, "metrics");
|
|
15
17
|
// Internal logging
|
|
16
18
|
function log(message, data) {
|
|
17
|
-
|
|
19
|
+
Logger.log("Performance", message, data);
|
|
18
20
|
}
|
|
19
21
|
// =============================================================================
|
|
20
22
|
// HELPERS
|
|
@@ -431,29 +433,40 @@ export function performanceTools() {
|
|
|
431
433
|
description: "Check status of background tasks and operations.",
|
|
432
434
|
args: {
|
|
433
435
|
task_id: tool.schema.string().optional().describe("Specific task ID to check"),
|
|
434
|
-
task_type: tool.schema.enum(["indexing", "caching", "cleanup", "analysis"]).optional().describe("Filter by task type")
|
|
436
|
+
task_type: tool.schema.enum(["indexing", "caching", "cleanup", "analysis", "validation", "setup"]).optional().describe("Filter by task type")
|
|
435
437
|
},
|
|
436
438
|
async execute({ task_id, task_type }) {
|
|
437
439
|
log("Tool call: perf_background_status", { task_id, task_type });
|
|
438
440
|
try {
|
|
441
|
+
const tasks = [];
|
|
442
|
+
// 1. Check DB Jobs
|
|
443
|
+
if (task_id) {
|
|
444
|
+
const job = getDb().getJob(task_id);
|
|
445
|
+
if (job)
|
|
446
|
+
tasks.push(job);
|
|
447
|
+
}
|
|
448
|
+
else {
|
|
449
|
+
const dbJobs = getDb().listJobs(task_type, 10);
|
|
450
|
+
tasks.push(...dbJobs);
|
|
451
|
+
}
|
|
452
|
+
// 2. Check File-based tasks
|
|
439
453
|
await ensurePerfDirs();
|
|
440
454
|
const files = await fs.readdir(PERF_DIR);
|
|
441
|
-
const tasks = [];
|
|
442
455
|
for (const file of files) {
|
|
443
456
|
if (file.startsWith('task-') && file.endsWith('.json')) {
|
|
444
457
|
try {
|
|
445
458
|
const taskPath = path.join(PERF_DIR, file);
|
|
446
459
|
const task = JSON.parse(await fs.readFile(taskPath, 'utf-8'));
|
|
447
|
-
// Apply filters
|
|
448
460
|
if (task_id && task.id !== task_id)
|
|
449
461
|
continue;
|
|
450
462
|
if (task_type && task.type !== task_type)
|
|
451
463
|
continue;
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
|
|
464
|
+
// Avoid duplication if already in DB (shouldn't happen with new ID scheme)
|
|
465
|
+
if (!tasks.some(t => t.id === task.id)) {
|
|
466
|
+
tasks.push(task);
|
|
467
|
+
}
|
|
456
468
|
}
|
|
469
|
+
catch (error) { }
|
|
457
470
|
}
|
|
458
471
|
}
|
|
459
472
|
return JSON.stringify({
|
|
@@ -626,6 +639,8 @@ async function indexFile(filePath) {
|
|
|
626
639
|
}
|
|
627
640
|
};
|
|
628
641
|
await fs.writeFile(cardPath, JSON.stringify(chunkCard, null, 2));
|
|
642
|
+
// Sync to SQLite Index
|
|
643
|
+
getDb().ingestChunkCard(chunkCard);
|
|
629
644
|
}
|
|
630
645
|
}
|
|
631
646
|
catch (error) {
|
|
@@ -719,7 +734,7 @@ async function runBackgroundIndexing(taskId, indexingState) {
|
|
|
719
734
|
}
|
|
720
735
|
}
|
|
721
736
|
catch (writeError) {
|
|
722
|
-
|
|
737
|
+
log("Failed to update task error state", writeError);
|
|
723
738
|
}
|
|
724
739
|
}
|
|
725
740
|
}
|
package/dist/services/logger.js
CHANGED
|
@@ -1,17 +1,40 @@
|
|
|
1
|
-
import
|
|
2
|
-
import
|
|
3
|
-
|
|
4
|
-
const
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
1
|
+
import * as fs from "node:fs";
|
|
2
|
+
import * as path from "node:path";
|
|
3
|
+
const PROJECT_ROOT = process.cwd();
|
|
4
|
+
const LOG_DIR = path.join(PROJECT_ROOT, ".opencode", "logs");
|
|
5
|
+
const LOG_FILE = path.join(LOG_DIR, "autognosis.log");
|
|
6
|
+
// Ensure log directory exists
|
|
7
|
+
try {
|
|
8
|
+
if (!fs.existsSync(LOG_DIR)) {
|
|
9
|
+
fs.mkdirSync(LOG_DIR, { recursive: true });
|
|
10
|
+
}
|
|
11
|
+
}
|
|
12
|
+
catch (e) {
|
|
13
|
+
// Ignore error if we can't create directory (e.g. read-only fs)
|
|
14
|
+
}
|
|
15
|
+
export class Logger {
|
|
16
|
+
static formatMessage(module, message, data) {
|
|
17
|
+
const timestamp = new Date().toISOString();
|
|
18
|
+
let dataStr = "";
|
|
19
|
+
if (data) {
|
|
20
|
+
try {
|
|
21
|
+
dataStr = typeof data === "string" ? data : JSON.stringify(data);
|
|
22
|
+
}
|
|
23
|
+
catch {
|
|
24
|
+
dataStr = "[Circular/Unserializable]";
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
return `[${timestamp}] [${module}] ${message} ${dataStr}\n`;
|
|
28
|
+
}
|
|
29
|
+
static log(module, message, data) {
|
|
30
|
+
const line = this.formatMessage(module, message, data);
|
|
31
|
+
try {
|
|
32
|
+
// Append to log file synchronously to ensure write
|
|
33
|
+
fs.appendFileSync(LOG_FILE, line);
|
|
34
|
+
}
|
|
35
|
+
catch (e) {
|
|
36
|
+
// Fallback: strictly avoid console.log/error to prevent TUI breakage.
|
|
37
|
+
// We essentially swallow the log if file write fails.
|
|
38
|
+
}
|
|
39
|
+
}
|
|
17
40
|
}
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
export declare const DEFAULT_EMBEDDING_MODEL = "nomic-embed-text";
|
|
2
|
+
export declare const OLLAMA_BASE_URL = "http://127.0.0.1:11434";
|
|
3
|
+
export declare class OllamaService {
|
|
4
|
+
isInstalled(): Promise<boolean>;
|
|
5
|
+
isRunning(): Promise<boolean>;
|
|
6
|
+
install(): Promise<string>;
|
|
7
|
+
startServer(): Promise<void>;
|
|
8
|
+
pullModel(model?: string): Promise<void>;
|
|
9
|
+
getEmbedding(text: string, model?: string): Promise<number[]>;
|
|
10
|
+
}
|
|
11
|
+
export declare const ollama: OllamaService;
|
|
@@ -0,0 +1,132 @@
|
|
|
1
|
+
import { exec, spawn } from "node:child_process";
|
|
2
|
+
import { promisify } from "node:util";
|
|
3
|
+
import * as fs from "node:fs";
|
|
4
|
+
import * as path from "node:path";
|
|
5
|
+
import { Logger } from "./logger.js";
|
|
6
|
+
const execAsync = promisify(exec);
|
|
7
|
+
export const DEFAULT_EMBEDDING_MODEL = "nomic-embed-text";
|
|
8
|
+
export const OLLAMA_BASE_URL = "http://127.0.0.1:11434";
|
|
9
|
+
export class OllamaService {
|
|
10
|
+
async isInstalled() {
|
|
11
|
+
try {
|
|
12
|
+
await execAsync("which ollama");
|
|
13
|
+
return true;
|
|
14
|
+
}
|
|
15
|
+
catch {
|
|
16
|
+
return false;
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
async isRunning() {
|
|
20
|
+
try {
|
|
21
|
+
const controller = new AbortController();
|
|
22
|
+
const timeoutId = setTimeout(() => controller.abort(), 1000);
|
|
23
|
+
const res = await fetch(`${OLLAMA_BASE_URL}/api/version`, { signal: controller.signal });
|
|
24
|
+
clearTimeout(timeoutId);
|
|
25
|
+
return res.ok;
|
|
26
|
+
}
|
|
27
|
+
catch {
|
|
28
|
+
return false;
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
async install() {
|
|
32
|
+
const platform = process.platform;
|
|
33
|
+
try {
|
|
34
|
+
if (platform === "darwin") {
|
|
35
|
+
// Try Homebrew first
|
|
36
|
+
try {
|
|
37
|
+
await execAsync("which brew");
|
|
38
|
+
await execAsync("brew install ollama");
|
|
39
|
+
return "Installed via Homebrew";
|
|
40
|
+
}
|
|
41
|
+
catch {
|
|
42
|
+
// Fallback to script
|
|
43
|
+
await execAsync("curl -fsSL https://ollama.com/install.sh | sh");
|
|
44
|
+
return "Installed via official script";
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
else if (platform === "linux") {
|
|
48
|
+
await execAsync("curl -fsSL https://ollama.com/install.sh | sh");
|
|
49
|
+
return "Installed via official script";
|
|
50
|
+
}
|
|
51
|
+
else {
|
|
52
|
+
throw new Error("Automatic installation only supported on macOS and Linux. Please install Ollama manually.");
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
catch (error) {
|
|
56
|
+
throw new Error(`Installation failed: ${error.message}`);
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
async startServer() {
|
|
60
|
+
if (await this.isRunning())
|
|
61
|
+
return;
|
|
62
|
+
// Start in background
|
|
63
|
+
const logFile = fs.openSync(path.join(process.cwd(), ".opencode", "ollama.log"), "a");
|
|
64
|
+
const child = spawn("ollama", ["serve"], {
|
|
65
|
+
detached: true,
|
|
66
|
+
stdio: ["ignore", logFile, logFile]
|
|
67
|
+
});
|
|
68
|
+
child.unref();
|
|
69
|
+
// Wait for it to come up
|
|
70
|
+
let attempts = 0;
|
|
71
|
+
while (attempts < 10) {
|
|
72
|
+
await new Promise(r => setTimeout(r, 1000));
|
|
73
|
+
if (await this.isRunning())
|
|
74
|
+
return;
|
|
75
|
+
attempts++;
|
|
76
|
+
}
|
|
77
|
+
throw new Error("Ollama server failed to start within 10 seconds");
|
|
78
|
+
}
|
|
79
|
+
async pullModel(model = DEFAULT_EMBEDDING_MODEL) {
|
|
80
|
+
// Check if exists
|
|
81
|
+
try {
|
|
82
|
+
const res = await fetch(`${OLLAMA_BASE_URL}/api/tags`);
|
|
83
|
+
const data = await res.json();
|
|
84
|
+
const models = data.models || [];
|
|
85
|
+
if (models.some((m) => m.name.includes(model))) {
|
|
86
|
+
return; // Already exists
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
catch { }
|
|
90
|
+
// Pull model (this blocks, usually handled via CLI)
|
|
91
|
+
// We'll use the API to pull so we can await it
|
|
92
|
+
const res = await fetch(`${OLLAMA_BASE_URL}/api/pull`, {
|
|
93
|
+
method: "POST",
|
|
94
|
+
body: JSON.stringify({ name: model }),
|
|
95
|
+
});
|
|
96
|
+
if (!res.ok)
|
|
97
|
+
throw new Error(`Failed to pull model ${model}`);
|
|
98
|
+
// Read stream to completion to ensure it's done
|
|
99
|
+
const reader = res.body?.getReader();
|
|
100
|
+
if (reader) {
|
|
101
|
+
while (true) {
|
|
102
|
+
const { done } = await reader.read();
|
|
103
|
+
if (done)
|
|
104
|
+
break;
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
async getEmbedding(text, model = DEFAULT_EMBEDDING_MODEL) {
|
|
109
|
+
if (!text || !text.trim())
|
|
110
|
+
return [];
|
|
111
|
+
try {
|
|
112
|
+
const res = await fetch(`${OLLAMA_BASE_URL}/api/embeddings`, {
|
|
113
|
+
method: "POST",
|
|
114
|
+
body: JSON.stringify({
|
|
115
|
+
model,
|
|
116
|
+
prompt: text
|
|
117
|
+
})
|
|
118
|
+
});
|
|
119
|
+
if (!res.ok) {
|
|
120
|
+
const errText = await res.text();
|
|
121
|
+
throw new Error(`Ollama API error: ${res.status} ${errText}`);
|
|
122
|
+
}
|
|
123
|
+
const data = await res.json();
|
|
124
|
+
return data.embedding;
|
|
125
|
+
}
|
|
126
|
+
catch (error) {
|
|
127
|
+
Logger.log("Ollama", "Embedding failed", error);
|
|
128
|
+
return [];
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
export const ollama = new OllamaService();
|
package/dist/system-tools.js
CHANGED
|
@@ -5,14 +5,15 @@ import * as fsSync from "node:fs";
|
|
|
5
5
|
import * as path from "node:path";
|
|
6
6
|
import { promisify } from "node:util";
|
|
7
7
|
import * as crypto from "node:crypto";
|
|
8
|
+
import { Logger } from "./services/logger.js";
|
|
9
|
+
import { getDb } from "./database.js";
|
|
8
10
|
const execAsync = promisify(exec);
|
|
9
11
|
const PROJECT_ROOT = process.cwd();
|
|
10
12
|
const OPENCODE_DIR = path.join(PROJECT_ROOT, ".opencode");
|
|
11
13
|
const CACHE_DIR = path.join(OPENCODE_DIR, "cache");
|
|
12
14
|
// Internal logging
|
|
13
15
|
function log(message, data) {
|
|
14
|
-
|
|
15
|
-
console.error(`[Autognosis] ${message}`, data || '');
|
|
16
|
+
Logger.log("Autognosis", message, data);
|
|
16
17
|
}
|
|
17
18
|
// =============================================================================
|
|
18
19
|
// HELPERS
|
|
@@ -94,7 +95,10 @@ async function maintainSymbolIndex() {
|
|
|
94
95
|
// TOOLS
|
|
95
96
|
// =============================================================================
|
|
96
97
|
export function systemTools() {
|
|
97
|
-
let pendingInitToken = null;
|
|
98
|
+
let pendingInitToken = null;
|
|
99
|
+
const record = (planId, tool, args) => {
|
|
100
|
+
getDb().recordExecution(planId, tool, args, !!planId);
|
|
101
|
+
};
|
|
98
102
|
return {
|
|
99
103
|
autognosis_init: tool({
|
|
100
104
|
description: "Initialize or check the Autognosis environment. Two-phase: 'plan' (default) generates a token, 'apply' executes it.",
|
|
@@ -104,6 +108,7 @@ export function systemTools() {
|
|
|
104
108
|
},
|
|
105
109
|
async execute({ mode, token }) {
|
|
106
110
|
log("Tool call: autognosis_init", { mode });
|
|
111
|
+
record(undefined, "autognosis_init", { mode });
|
|
107
112
|
if (mode === "plan") {
|
|
108
113
|
const checks = {
|
|
109
114
|
rg: await checkBinary("rg"),
|
|
@@ -139,10 +144,12 @@ export function systemTools() {
|
|
|
139
144
|
args: {
|
|
140
145
|
query: tool.schema.string(),
|
|
141
146
|
mode: tool.schema.enum(["filename", "content"]).optional().default("filename"),
|
|
142
|
-
path: tool.schema.string().optional().default(".")
|
|
147
|
+
path: tool.schema.string().optional().default("."),
|
|
148
|
+
plan_id: tool.schema.string().optional().describe("Associated Plan ID")
|
|
143
149
|
},
|
|
144
|
-
async execute({ query, mode, path: searchPath }) {
|
|
145
|
-
log("Tool call: fast_search", { query, mode, searchPath });
|
|
150
|
+
async execute({ query, mode, path: searchPath, plan_id }) {
|
|
151
|
+
log("Tool call: fast_search", { query, mode, searchPath, plan_id });
|
|
152
|
+
record(plan_id, "fast_search", { query, mode, searchPath });
|
|
146
153
|
if (mode === "content") {
|
|
147
154
|
if (!(await checkBinary("rg")))
|
|
148
155
|
return "Error: 'rg' not installed.";
|
|
@@ -166,10 +173,12 @@ export function systemTools() {
|
|
|
166
173
|
args: {
|
|
167
174
|
file: tool.schema.string(),
|
|
168
175
|
start_line: tool.schema.number(),
|
|
169
|
-
end_line: tool.schema.number()
|
|
176
|
+
end_line: tool.schema.number(),
|
|
177
|
+
plan_id: tool.schema.string().optional().describe("Associated Plan ID")
|
|
170
178
|
},
|
|
171
|
-
async execute({ file, start_line, end_line }) {
|
|
172
|
-
log("Tool call: read_slice", { file, start_line, end_line });
|
|
179
|
+
async execute({ file, start_line, end_line, plan_id }) {
|
|
180
|
+
log("Tool call: read_slice", { file, start_line, end_line, plan_id });
|
|
181
|
+
record(plan_id, "read_slice", { file, start_line, end_line });
|
|
173
182
|
const { stdout, stderr } = await runCmd(`sed -n '${start_line},${end_line}p;${end_line + 1}q' "${file}"`);
|
|
174
183
|
if (stderr)
|
|
175
184
|
return `Error: ${stderr}`;
|
|
@@ -179,10 +188,12 @@ export function systemTools() {
|
|
|
179
188
|
symbol_query: tool({
|
|
180
189
|
description: "Query the symbol index. Rebuilds automatically if stale.",
|
|
181
190
|
args: {
|
|
182
|
-
symbol: tool.schema.string()
|
|
191
|
+
symbol: tool.schema.string(),
|
|
192
|
+
plan_id: tool.schema.string().optional().describe("Associated Plan ID")
|
|
183
193
|
},
|
|
184
|
-
async execute({ symbol }) {
|
|
185
|
-
log("Tool call: symbol_query", { symbol });
|
|
194
|
+
async execute({ symbol, plan_id }) {
|
|
195
|
+
log("Tool call: symbol_query", { symbol, plan_id });
|
|
196
|
+
record(plan_id, "symbol_query", { symbol });
|
|
186
197
|
const maint = await maintainSymbolIndex();
|
|
187
198
|
if (maint.status === "unavailable")
|
|
188
199
|
return JSON.stringify({ error: maint.reason });
|
|
@@ -194,10 +205,12 @@ export function systemTools() {
|
|
|
194
205
|
jump_to_symbol: tool({
|
|
195
206
|
description: "Jump to a symbol's definition by querying the index and reading the slice.",
|
|
196
207
|
args: {
|
|
197
|
-
symbol: tool.schema.string()
|
|
208
|
+
symbol: tool.schema.string(),
|
|
209
|
+
plan_id: tool.schema.string().optional().describe("Associated Plan ID")
|
|
198
210
|
},
|
|
199
|
-
async execute({ symbol }) {
|
|
200
|
-
log("Tool call: jump_to_symbol", { symbol });
|
|
211
|
+
async execute({ symbol, plan_id }) {
|
|
212
|
+
log("Tool call: jump_to_symbol", { symbol, plan_id });
|
|
213
|
+
record(plan_id, "jump_to_symbol", { symbol });
|
|
201
214
|
const maint = await maintainSymbolIndex();
|
|
202
215
|
if (maint.status !== "ok")
|
|
203
216
|
return JSON.stringify({ error: maint.reason });
|
|
@@ -223,34 +236,121 @@ export function systemTools() {
|
|
|
223
236
|
async execute({ symbol, intent }) {
|
|
224
237
|
log("Tool call: brief_fix_loop", { symbol, intent });
|
|
225
238
|
const planId = `plan-${Date.now()}`;
|
|
226
|
-
|
|
239
|
+
record(planId, "brief_fix_loop", { symbol, intent });
|
|
240
|
+
const maint = await maintainSymbolIndex();
|
|
241
|
+
const tagsFile = path.join(CACHE_DIR, "tags");
|
|
242
|
+
const { stdout: tagLine } = await runCmd(`grep -P "^${symbol}\t" "${tagsFile}" | head -n 1`);
|
|
243
|
+
const locusFile = tagLine ? tagLine.split('\t')[1] : null;
|
|
244
|
+
let dependents = [];
|
|
245
|
+
let hotFiles = [];
|
|
246
|
+
if (locusFile) {
|
|
247
|
+
dependents = getDb().findDependents(locusFile);
|
|
248
|
+
hotFiles = getDb().getHotFiles('', 20);
|
|
249
|
+
}
|
|
250
|
+
const worklist = dependents.map(d => ({
|
|
251
|
+
file: d,
|
|
252
|
+
is_hot: hotFiles.some(h => h.path === d),
|
|
253
|
+
reason: "Dependency impact"
|
|
254
|
+
}));
|
|
255
|
+
return JSON.stringify({
|
|
256
|
+
plan_id: planId,
|
|
257
|
+
symbol,
|
|
258
|
+
intent,
|
|
259
|
+
locus: { file: locusFile },
|
|
260
|
+
worklist,
|
|
261
|
+
status: "PLAN_GENERATED",
|
|
262
|
+
metadata: {
|
|
263
|
+
fingerprint: maint.status,
|
|
264
|
+
generated_at: new Date().toISOString()
|
|
265
|
+
}
|
|
266
|
+
}, null, 2);
|
|
227
267
|
}
|
|
228
268
|
}),
|
|
229
269
|
prepare_patch: tool({
|
|
230
270
|
description: "Generate a .diff artifact for the current changes.",
|
|
231
271
|
args: {
|
|
232
|
-
message: tool.schema.string()
|
|
272
|
+
message: tool.schema.string(),
|
|
273
|
+
plan_id: tool.schema.string().optional().describe("Associated Plan ID")
|
|
233
274
|
},
|
|
234
|
-
async execute({ message }) {
|
|
235
|
-
log("Tool call: prepare_patch", { message });
|
|
275
|
+
async execute({ message, plan_id }) {
|
|
276
|
+
log("Tool call: prepare_patch", { message, plan_id });
|
|
277
|
+
record(plan_id, "prepare_patch", { message });
|
|
236
278
|
await ensureCache();
|
|
237
|
-
const
|
|
238
|
-
const
|
|
239
|
-
|
|
279
|
+
const patchId = `patch-${Date.now()}`;
|
|
280
|
+
const patchPath = path.join(CACHE_DIR, `${patchId}.diff`);
|
|
281
|
+
const { stdout: diff } = await runCmd("git diff");
|
|
282
|
+
if (!diff)
|
|
240
283
|
return "No changes to patch.";
|
|
241
|
-
|
|
242
|
-
|
|
284
|
+
const header = {
|
|
285
|
+
patch_id: patchId,
|
|
286
|
+
plan_id: plan_id || "adhoc",
|
|
287
|
+
message,
|
|
288
|
+
created_at: new Date().toISOString()
|
|
289
|
+
};
|
|
290
|
+
await fs.writeFile(patchPath, `// PATCH_METADATA: ${JSON.stringify(header)}\n\n${diff}`);
|
|
291
|
+
return JSON.stringify({ status: "SUCCESS", patch_id: patchId, path: patchPath }, null, 2);
|
|
243
292
|
}
|
|
244
293
|
}),
|
|
245
294
|
validate_patch: tool({
|
|
246
|
-
description: "Validate a patch by applying it in a fresh worktree.",
|
|
295
|
+
description: "Validate a patch by applying it in a fresh worktree and running build (Background Job).",
|
|
247
296
|
args: {
|
|
248
|
-
patch_path: tool.schema.string()
|
|
297
|
+
patch_path: tool.schema.string(),
|
|
298
|
+
plan_id: tool.schema.string().optional().describe("Associated Plan ID")
|
|
249
299
|
},
|
|
250
|
-
async execute({ patch_path }) {
|
|
251
|
-
log("Tool call: validate_patch", { patch_path });
|
|
252
|
-
|
|
253
|
-
|
|
300
|
+
async execute({ patch_path, plan_id }) {
|
|
301
|
+
log("Tool call: validate_patch (background)", { patch_path, plan_id });
|
|
302
|
+
record(plan_id, "validate_patch", { patch_path });
|
|
303
|
+
const jobId = `job-validate-${Date.now()}`;
|
|
304
|
+
getDb().createJob(jobId, "validation", { patch_path, plan_id });
|
|
305
|
+
// Spawn background worker
|
|
306
|
+
(async () => {
|
|
307
|
+
getDb().updateJob(jobId, { status: "running", progress: 10 });
|
|
308
|
+
const tempWorktree = path.join(PROJECT_ROOT, ".opencode", "temp-" + jobId);
|
|
309
|
+
try {
|
|
310
|
+
await runCmd(`git worktree add -d "${tempWorktree}"`);
|
|
311
|
+
getDb().updateJob(jobId, { progress: 30 });
|
|
312
|
+
const content = await fs.readFile(patch_path, "utf-8");
|
|
313
|
+
const parts = content.split('\n\n');
|
|
314
|
+
const diffOnly = parts.length > 1 ? parts.slice(1).join('\n\n') : content;
|
|
315
|
+
const tempDiff = path.join(tempWorktree, "valid.diff");
|
|
316
|
+
await fs.writeFile(tempDiff, diffOnly);
|
|
317
|
+
const { error: applyError } = await runCmd(`git apply "${tempDiff}"`, tempWorktree);
|
|
318
|
+
if (applyError)
|
|
319
|
+
throw new Error(`Apply failed: ${applyError.message}`);
|
|
320
|
+
getDb().updateJob(jobId, { progress: 60 });
|
|
321
|
+
let buildStatus = "SKIPPED";
|
|
322
|
+
if (fsSync.existsSync(path.join(tempWorktree, "package.json"))) {
|
|
323
|
+
const { error: buildError } = await runCmd("npm run build", tempWorktree);
|
|
324
|
+
buildStatus = buildError ? "FAILED" : "SUCCESS";
|
|
325
|
+
}
|
|
326
|
+
else if (fsSync.existsSync(path.join(tempWorktree, "Package.swift"))) {
|
|
327
|
+
const { error: buildError } = await runCmd("swift build", tempWorktree);
|
|
328
|
+
buildStatus = buildError ? "FAILED" : "SUCCESS";
|
|
329
|
+
}
|
|
330
|
+
getDb().updateJob(jobId, {
|
|
331
|
+
status: "completed",
|
|
332
|
+
progress: 100,
|
|
333
|
+
result: JSON.stringify({ apply: "OK", build: buildStatus })
|
|
334
|
+
});
|
|
335
|
+
}
|
|
336
|
+
catch (error) {
|
|
337
|
+
getDb().updateJob(jobId, { status: "failed", error: error.message });
|
|
338
|
+
}
|
|
339
|
+
finally {
|
|
340
|
+
try {
|
|
341
|
+
await runCmd(`git worktree remove -f "${tempWorktree}"`);
|
|
342
|
+
if (fsSync.existsSync(tempWorktree))
|
|
343
|
+
await fs.rm(tempWorktree, { recursive: true, force: true });
|
|
344
|
+
}
|
|
345
|
+
catch (e) { }
|
|
346
|
+
}
|
|
347
|
+
})();
|
|
348
|
+
return JSON.stringify({
|
|
349
|
+
status: "STARTED",
|
|
350
|
+
message: "Validation started in background.",
|
|
351
|
+
job_id: jobId,
|
|
352
|
+
instruction: "Use perf_background_status to check progress."
|
|
353
|
+
}, null, 2);
|
|
254
354
|
}
|
|
255
355
|
}),
|
|
256
356
|
finalize_plan: tool({
|
|
@@ -261,11 +361,17 @@ export function systemTools() {
|
|
|
261
361
|
},
|
|
262
362
|
async execute({ plan_id, outcome }) {
|
|
263
363
|
log("Tool call: finalize_plan", { plan_id, outcome });
|
|
264
|
-
|
|
265
|
-
const
|
|
364
|
+
record(plan_id, "finalize_plan", { outcome });
|
|
365
|
+
const metrics = getDb().getPlanMetrics(plan_id);
|
|
366
|
+
const report = {
|
|
367
|
+
plan_id,
|
|
368
|
+
outcome,
|
|
369
|
+
metrics,
|
|
370
|
+
finished_at: new Date().toISOString()
|
|
371
|
+
};
|
|
266
372
|
await fs.appendFile(path.join(CACHE_DIR, "gaps.jsonl"), JSON.stringify(report) + "\n");
|
|
267
373
|
const deleted = await cleanCache();
|
|
268
|
-
return
|
|
374
|
+
return JSON.stringify({ status: "FINALIZED", report, cache_cleared: deleted }, null, 2);
|
|
269
375
|
}
|
|
270
376
|
})
|
|
271
377
|
};
|
|
@@ -5,6 +5,7 @@ import * as fsSync from "node:fs";
|
|
|
5
5
|
import * as path from "node:path";
|
|
6
6
|
import { promisify } from "node:util";
|
|
7
7
|
import * as crypto from "node:crypto";
|
|
8
|
+
import { Logger } from "./services/logger.js";
|
|
8
9
|
const execAsync = promisify(exec);
|
|
9
10
|
const PROJECT_ROOT = process.cwd();
|
|
10
11
|
const OPENCODE_DIR = path.join(PROJECT_ROOT, ".opencode");
|
|
@@ -12,7 +13,7 @@ const TEST_DIR = path.join(OPENCODE_DIR, "tests");
|
|
|
12
13
|
const BENCHMARK_DIR = path.join(OPENCODE_DIR, "benchmarks");
|
|
13
14
|
// Internal logging
|
|
14
15
|
function log(message, data) {
|
|
15
|
-
|
|
16
|
+
Logger.log("Testing", message, data);
|
|
16
17
|
}
|
|
17
18
|
// =============================================================================
|
|
18
19
|
// HELPERS
|