@kody-ade/kody-engine-lite 0.1.121 → 0.1.123
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/agent-runner.d.ts +4 -0
- package/dist/agent-runner.js +122 -0
- package/dist/bin/cli.js +43 -24
- package/dist/ci/parse-inputs.d.ts +6 -0
- package/dist/ci/parse-inputs.js +76 -0
- package/dist/ci/parse-safety.d.ts +6 -0
- package/dist/ci/parse-safety.js +22 -0
- package/dist/cli/args.d.ts +13 -0
- package/dist/cli/args.js +42 -0
- package/dist/cli/litellm.d.ts +2 -0
- package/dist/cli/litellm.js +85 -0
- package/dist/cli/task-resolution.d.ts +2 -0
- package/dist/cli/task-resolution.js +41 -0
- package/dist/config.d.ts +49 -0
- package/dist/config.js +72 -0
- package/dist/context.d.ts +4 -0
- package/dist/context.js +83 -0
- package/dist/definitions.d.ts +3 -0
- package/dist/definitions.js +59 -0
- package/dist/entry.d.ts +1 -0
- package/dist/entry.js +236 -0
- package/dist/git-utils.d.ts +13 -0
- package/dist/git-utils.js +174 -0
- package/dist/github-api.d.ts +14 -0
- package/dist/github-api.js +114 -0
- package/dist/kody-utils.d.ts +1 -0
- package/dist/kody-utils.js +9 -0
- package/dist/learning/auto-learn.d.ts +2 -0
- package/dist/learning/auto-learn.js +169 -0
- package/dist/logger.d.ts +14 -0
- package/dist/logger.js +51 -0
- package/dist/memory.d.ts +1 -0
- package/dist/memory.js +20 -0
- package/dist/observer.d.ts +9 -0
- package/dist/observer.js +80 -0
- package/dist/pipeline/complexity.d.ts +3 -0
- package/dist/pipeline/complexity.js +12 -0
- package/dist/pipeline/executor-registry.d.ts +3 -0
- package/dist/pipeline/executor-registry.js +20 -0
- package/dist/pipeline/hooks.d.ts +17 -0
- package/dist/pipeline/hooks.js +110 -0
- package/dist/pipeline/questions.d.ts +2 -0
- package/dist/pipeline/questions.js +44 -0
- package/dist/pipeline/runner-selection.d.ts +2 -0
- package/dist/pipeline/runner-selection.js +13 -0
- package/dist/pipeline/state.d.ts +4 -0
- package/dist/pipeline/state.js +37 -0
- package/dist/pipeline.d.ts +3 -0
- package/dist/pipeline.js +213 -0
- package/dist/preflight.d.ts +1 -0
- package/dist/preflight.js +69 -0
- package/dist/retrospective.d.ts +26 -0
- package/dist/retrospective.js +211 -0
- package/dist/stages/agent.d.ts +2 -0
- package/dist/stages/agent.js +94 -0
- package/dist/stages/gate.d.ts +2 -0
- package/dist/stages/gate.js +32 -0
- package/dist/stages/review.d.ts +2 -0
- package/dist/stages/review.js +32 -0
- package/dist/stages/ship.d.ts +3 -0
- package/dist/stages/ship.js +154 -0
- package/dist/stages/verify.d.ts +2 -0
- package/dist/stages/verify.js +94 -0
- package/dist/types.d.ts +61 -0
- package/dist/types.js +1 -0
- package/dist/validators.d.ts +8 -0
- package/dist/validators.js +42 -0
- package/dist/verify-runner.d.ts +11 -0
- package/dist/verify-runner.js +110 -0
- package/package.json +3 -3
|
@@ -0,0 +1,114 @@
|
|
|
1
|
+
import { execFileSync } from "child_process";
|
|
2
|
+
import { logger } from "./logger.js";
|
|
3
|
+
const API_TIMEOUT_MS = 30_000;
|
|
4
|
+
const LIFECYCLE_LABELS = ["planning", "building", "review", "done", "failed", "waiting", "low", "medium", "high"];
|
|
5
|
+
let _ghCwd;
|
|
6
|
+
export function setGhCwd(cwd) {
|
|
7
|
+
_ghCwd = cwd;
|
|
8
|
+
}
|
|
9
|
+
function ghToken() {
|
|
10
|
+
return process.env.GH_PAT?.trim() || process.env.GH_TOKEN;
|
|
11
|
+
}
|
|
12
|
+
function gh(args, options) {
|
|
13
|
+
const token = ghToken();
|
|
14
|
+
const env = token
|
|
15
|
+
? { ...process.env, GH_TOKEN: token }
|
|
16
|
+
: { ...process.env };
|
|
17
|
+
return execFileSync("gh", args, {
|
|
18
|
+
encoding: "utf-8",
|
|
19
|
+
timeout: API_TIMEOUT_MS,
|
|
20
|
+
cwd: _ghCwd,
|
|
21
|
+
env,
|
|
22
|
+
input: options?.input,
|
|
23
|
+
stdio: options?.input ? ["pipe", "pipe", "pipe"] : ["inherit", "pipe", "pipe"],
|
|
24
|
+
}).trim();
|
|
25
|
+
}
|
|
26
|
+
export function getIssue(issueNumber) {
|
|
27
|
+
try {
|
|
28
|
+
const output = gh([
|
|
29
|
+
"issue", "view", String(issueNumber),
|
|
30
|
+
"--json", "body,title",
|
|
31
|
+
]);
|
|
32
|
+
return JSON.parse(output);
|
|
33
|
+
}
|
|
34
|
+
catch (err) {
|
|
35
|
+
logger.error(` Failed to get issue #${issueNumber}: ${err}`);
|
|
36
|
+
return null;
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
export function setLabel(issueNumber, label) {
|
|
40
|
+
try {
|
|
41
|
+
gh(["issue", "edit", String(issueNumber), "--add-label", label]);
|
|
42
|
+
logger.info(` Label added: ${label}`);
|
|
43
|
+
}
|
|
44
|
+
catch (err) {
|
|
45
|
+
logger.warn(` Failed to set label ${label}: ${err}`);
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
export function removeLabel(issueNumber, label) {
|
|
49
|
+
try {
|
|
50
|
+
gh(["issue", "edit", String(issueNumber), "--remove-label", label]);
|
|
51
|
+
}
|
|
52
|
+
catch {
|
|
53
|
+
// Label may not exist — ignore
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
export function postComment(issueNumber, body) {
|
|
57
|
+
try {
|
|
58
|
+
gh(["issue", "comment", String(issueNumber), "--body-file", "-"], { input: body });
|
|
59
|
+
logger.info(` Comment posted on #${issueNumber}`);
|
|
60
|
+
}
|
|
61
|
+
catch (err) {
|
|
62
|
+
logger.warn(` Failed to post comment: ${err}`);
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
export function createPR(head, base, title, body) {
|
|
66
|
+
try {
|
|
67
|
+
const output = gh([
|
|
68
|
+
"pr", "create",
|
|
69
|
+
"--head", head,
|
|
70
|
+
"--base", base,
|
|
71
|
+
"--title", title,
|
|
72
|
+
"--body-file", "-",
|
|
73
|
+
], { input: body });
|
|
74
|
+
const url = output.trim();
|
|
75
|
+
const match = url.match(/\/pull\/(\d+)$/);
|
|
76
|
+
const number = match ? parseInt(match[1], 10) : 0;
|
|
77
|
+
logger.info(` PR created: ${url}`);
|
|
78
|
+
return { number, url };
|
|
79
|
+
}
|
|
80
|
+
catch (err) {
|
|
81
|
+
logger.error(` Failed to create PR: ${err}`);
|
|
82
|
+
return null;
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
export function setLifecycleLabel(issueNumber, phase) {
|
|
86
|
+
if (!LIFECYCLE_LABELS.includes(phase)) {
|
|
87
|
+
logger.warn(` Invalid lifecycle phase: ${phase}`);
|
|
88
|
+
return;
|
|
89
|
+
}
|
|
90
|
+
// Remove all other lifecycle labels
|
|
91
|
+
const othersToRemove = LIFECYCLE_LABELS
|
|
92
|
+
.filter((l) => l !== phase)
|
|
93
|
+
.map((l) => `kody:${l}`)
|
|
94
|
+
.join(",");
|
|
95
|
+
if (othersToRemove) {
|
|
96
|
+
try {
|
|
97
|
+
gh(["issue", "edit", String(issueNumber), "--remove-label", othersToRemove]);
|
|
98
|
+
}
|
|
99
|
+
catch {
|
|
100
|
+
// Labels may not exist — ignore
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
// Add new label
|
|
104
|
+
setLabel(issueNumber, `kody:${phase}`);
|
|
105
|
+
}
|
|
106
|
+
export function closeIssue(issueNumber, reason = "completed") {
|
|
107
|
+
try {
|
|
108
|
+
gh(["issue", "close", String(issueNumber), "--reason", reason]);
|
|
109
|
+
logger.info(` Issue #${issueNumber} closed: ${reason}`);
|
|
110
|
+
}
|
|
111
|
+
catch (err) {
|
|
112
|
+
logger.warn(` Failed to close issue: ${err}`);
|
|
113
|
+
}
|
|
114
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare function ensureTaskDir(taskId: string): string;
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
import * as fs from "fs";
|
|
2
|
+
import * as path from "path";
|
|
3
|
+
export function ensureTaskDir(taskId) {
|
|
4
|
+
const taskDir = path.join(process.cwd(), ".tasks", taskId);
|
|
5
|
+
if (!fs.existsSync(taskDir)) {
|
|
6
|
+
fs.mkdirSync(taskDir, { recursive: true });
|
|
7
|
+
}
|
|
8
|
+
return taskDir;
|
|
9
|
+
}
|
|
@@ -0,0 +1,169 @@
|
|
|
1
|
+
import * as fs from "fs";
|
|
2
|
+
import * as path from "path";
|
|
3
|
+
import { logger } from "../logger.js";
|
|
4
|
+
function stripAnsi(str) {
|
|
5
|
+
return str.replace(/\x1b\[[0-9;]*m/g, "");
|
|
6
|
+
}
|
|
7
|
+
export function autoLearn(ctx) {
|
|
8
|
+
try {
|
|
9
|
+
const memoryDir = path.join(ctx.projectDir, ".kody", "memory");
|
|
10
|
+
if (!fs.existsSync(memoryDir)) {
|
|
11
|
+
fs.mkdirSync(memoryDir, { recursive: true });
|
|
12
|
+
}
|
|
13
|
+
const learnings = [];
|
|
14
|
+
const timestamp = new Date().toISOString().slice(0, 10);
|
|
15
|
+
// Extract from verify.md (strip ANSI codes first)
|
|
16
|
+
const verifyPath = path.join(ctx.taskDir, "verify.md");
|
|
17
|
+
if (fs.existsSync(verifyPath)) {
|
|
18
|
+
const verify = stripAnsi(fs.readFileSync(verifyPath, "utf-8"));
|
|
19
|
+
if (/vitest/i.test(verify))
|
|
20
|
+
learnings.push("- Uses vitest for testing");
|
|
21
|
+
if (/jest/i.test(verify))
|
|
22
|
+
learnings.push("- Uses jest for testing");
|
|
23
|
+
if (/eslint/i.test(verify))
|
|
24
|
+
learnings.push("- Uses eslint for linting");
|
|
25
|
+
if (/prettier/i.test(verify))
|
|
26
|
+
learnings.push("- Uses prettier for formatting");
|
|
27
|
+
if (/tsc\b/i.test(verify))
|
|
28
|
+
learnings.push("- Uses TypeScript (tsc)");
|
|
29
|
+
if (/jsdom/i.test(verify))
|
|
30
|
+
learnings.push("- Test environment: jsdom");
|
|
31
|
+
if (/node/i.test(verify) && /environment/i.test(verify))
|
|
32
|
+
learnings.push("- Test environment: node");
|
|
33
|
+
}
|
|
34
|
+
// Extract from review.md
|
|
35
|
+
const reviewPath = path.join(ctx.taskDir, "review.md");
|
|
36
|
+
if (fs.existsSync(reviewPath)) {
|
|
37
|
+
const review = fs.readFileSync(reviewPath, "utf-8");
|
|
38
|
+
if (/\.js extension/i.test(review))
|
|
39
|
+
learnings.push("- Imports use .js extensions (ESM)");
|
|
40
|
+
if (/barrel export/i.test(review))
|
|
41
|
+
learnings.push("- Uses barrel exports (index.ts)");
|
|
42
|
+
if (/timezone/i.test(review))
|
|
43
|
+
learnings.push("- Timezone handling is a concern in this codebase");
|
|
44
|
+
if (/UTC/i.test(review))
|
|
45
|
+
learnings.push("- Date operations should consider UTC vs local time");
|
|
46
|
+
}
|
|
47
|
+
// Extract from task.json
|
|
48
|
+
const taskJsonPath = path.join(ctx.taskDir, "task.json");
|
|
49
|
+
if (fs.existsSync(taskJsonPath)) {
|
|
50
|
+
try {
|
|
51
|
+
const raw = stripAnsi(fs.readFileSync(taskJsonPath, "utf-8"));
|
|
52
|
+
const cleaned = raw.replace(/^```json\s*\n?/m, "").replace(/\n?```\s*$/m, "");
|
|
53
|
+
const task = JSON.parse(cleaned);
|
|
54
|
+
if (task.scope && Array.isArray(task.scope)) {
|
|
55
|
+
const dirs = [...new Set(task.scope.map((s) => s.split("/").slice(0, -1).join("/")).filter(Boolean))];
|
|
56
|
+
if (dirs.length > 0)
|
|
57
|
+
learnings.push(`- Active directories: ${dirs.join(", ")}`);
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
catch {
|
|
61
|
+
// Ignore
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
if (learnings.length > 0) {
|
|
65
|
+
const conventionsPath = path.join(memoryDir, "conventions.md");
|
|
66
|
+
const entry = `\n## Learned ${timestamp} (task: ${ctx.taskId})\n${learnings.join("\n")}\n`;
|
|
67
|
+
fs.appendFileSync(conventionsPath, entry);
|
|
68
|
+
logger.info(`Auto-learned ${learnings.length} convention(s)`);
|
|
69
|
+
}
|
|
70
|
+
// Auto-detect architecture
|
|
71
|
+
autoLearnArchitecture(ctx.projectDir, memoryDir, timestamp);
|
|
72
|
+
}
|
|
73
|
+
catch {
|
|
74
|
+
// Auto-learn is best-effort — don't fail the pipeline
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
function autoLearnArchitecture(projectDir, memoryDir, timestamp) {
|
|
78
|
+
const archPath = path.join(memoryDir, "architecture.md");
|
|
79
|
+
// Only auto-detect if architecture.md doesn't exist yet
|
|
80
|
+
if (fs.existsSync(archPath))
|
|
81
|
+
return;
|
|
82
|
+
const detected = [];
|
|
83
|
+
// Detect framework from package.json
|
|
84
|
+
const pkgPath = path.join(projectDir, "package.json");
|
|
85
|
+
if (fs.existsSync(pkgPath)) {
|
|
86
|
+
try {
|
|
87
|
+
const pkg = JSON.parse(fs.readFileSync(pkgPath, "utf-8"));
|
|
88
|
+
const allDeps = { ...pkg.dependencies, ...pkg.devDependencies };
|
|
89
|
+
// Frameworks
|
|
90
|
+
if (allDeps.next)
|
|
91
|
+
detected.push(`- Framework: Next.js ${allDeps.next}`);
|
|
92
|
+
else if (allDeps.react)
|
|
93
|
+
detected.push(`- Framework: React ${allDeps.react}`);
|
|
94
|
+
else if (allDeps.express)
|
|
95
|
+
detected.push(`- Framework: Express ${allDeps.express}`);
|
|
96
|
+
else if (allDeps.fastify)
|
|
97
|
+
detected.push(`- Framework: Fastify ${allDeps.fastify}`);
|
|
98
|
+
// Language
|
|
99
|
+
if (allDeps.typescript)
|
|
100
|
+
detected.push(`- Language: TypeScript ${allDeps.typescript}`);
|
|
101
|
+
// Testing
|
|
102
|
+
if (allDeps.vitest)
|
|
103
|
+
detected.push(`- Testing: vitest ${allDeps.vitest}`);
|
|
104
|
+
else if (allDeps.jest)
|
|
105
|
+
detected.push(`- Testing: jest ${allDeps.jest}`);
|
|
106
|
+
// Linting
|
|
107
|
+
if (allDeps.eslint)
|
|
108
|
+
detected.push(`- Linting: eslint ${allDeps.eslint}`);
|
|
109
|
+
// Database
|
|
110
|
+
if (allDeps.prisma || allDeps["@prisma/client"])
|
|
111
|
+
detected.push("- Database: Prisma ORM");
|
|
112
|
+
if (allDeps.drizzle || allDeps["drizzle-orm"])
|
|
113
|
+
detected.push("- Database: Drizzle ORM");
|
|
114
|
+
if (allDeps.pg || allDeps.postgres)
|
|
115
|
+
detected.push("- Database: PostgreSQL");
|
|
116
|
+
// CMS
|
|
117
|
+
if (allDeps.payload || allDeps["@payloadcms/next"])
|
|
118
|
+
detected.push(`- CMS: Payload CMS`);
|
|
119
|
+
// Module type
|
|
120
|
+
if (pkg.type === "module")
|
|
121
|
+
detected.push("- Module system: ESM");
|
|
122
|
+
else
|
|
123
|
+
detected.push("- Module system: CommonJS");
|
|
124
|
+
// Package manager
|
|
125
|
+
if (fs.existsSync(path.join(projectDir, "pnpm-lock.yaml")))
|
|
126
|
+
detected.push("- Package manager: pnpm");
|
|
127
|
+
else if (fs.existsSync(path.join(projectDir, "yarn.lock")))
|
|
128
|
+
detected.push("- Package manager: yarn");
|
|
129
|
+
else if (fs.existsSync(path.join(projectDir, "package-lock.json")))
|
|
130
|
+
detected.push("- Package manager: npm");
|
|
131
|
+
}
|
|
132
|
+
catch {
|
|
133
|
+
// Ignore parse errors
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
// Detect directory structure
|
|
137
|
+
const topDirs = [];
|
|
138
|
+
try {
|
|
139
|
+
const entries = fs.readdirSync(projectDir, { withFileTypes: true });
|
|
140
|
+
for (const entry of entries) {
|
|
141
|
+
if (entry.isDirectory() && !entry.name.startsWith(".") && entry.name !== "node_modules") {
|
|
142
|
+
topDirs.push(entry.name);
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
if (topDirs.length > 0)
|
|
146
|
+
detected.push(`- Top-level directories: ${topDirs.join(", ")}`);
|
|
147
|
+
}
|
|
148
|
+
catch {
|
|
149
|
+
// Ignore
|
|
150
|
+
}
|
|
151
|
+
// Detect src structure
|
|
152
|
+
const srcDir = path.join(projectDir, "src");
|
|
153
|
+
if (fs.existsSync(srcDir)) {
|
|
154
|
+
try {
|
|
155
|
+
const srcEntries = fs.readdirSync(srcDir, { withFileTypes: true });
|
|
156
|
+
const srcDirs = srcEntries.filter((e) => e.isDirectory()).map((e) => e.name);
|
|
157
|
+
if (srcDirs.length > 0)
|
|
158
|
+
detected.push(`- src/ structure: ${srcDirs.join(", ")}`);
|
|
159
|
+
}
|
|
160
|
+
catch {
|
|
161
|
+
// Ignore
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
if (detected.length > 0) {
|
|
165
|
+
const content = `# Architecture (auto-detected ${timestamp})\n\n## Overview\n${detected.join("\n")}\n`;
|
|
166
|
+
fs.writeFileSync(archPath, content);
|
|
167
|
+
logger.info(`Auto-detected architecture (${detected.length} items)`);
|
|
168
|
+
}
|
|
169
|
+
}
|
package/dist/logger.d.ts
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
export declare const logger: {
|
|
2
|
+
debug: (msg: string) => void;
|
|
3
|
+
info: (msg: string) => void;
|
|
4
|
+
warn: (msg: string) => void;
|
|
5
|
+
error: (msg: string) => void;
|
|
6
|
+
};
|
|
7
|
+
export declare function createStageLogger(stage: string, taskId?: string): {
|
|
8
|
+
debug: (msg: string) => void;
|
|
9
|
+
info: (msg: string) => void;
|
|
10
|
+
warn: (msg: string) => void;
|
|
11
|
+
error: (msg: string) => void;
|
|
12
|
+
};
|
|
13
|
+
export declare function ciGroup(title: string): void;
|
|
14
|
+
export declare function ciGroupEnd(): void;
|
package/dist/logger.js
ADDED
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
const isCI = !!process.env.GITHUB_ACTIONS;
|
|
2
|
+
const LEVELS = {
|
|
3
|
+
debug: 0,
|
|
4
|
+
info: 1,
|
|
5
|
+
warn: 2,
|
|
6
|
+
error: 3,
|
|
7
|
+
};
|
|
8
|
+
function getLevel() {
|
|
9
|
+
const env = process.env.LOG_LEVEL;
|
|
10
|
+
return LEVELS[env ?? "info"] ?? LEVELS.info;
|
|
11
|
+
}
|
|
12
|
+
function timestamp() {
|
|
13
|
+
return new Date().toISOString().slice(11, 19);
|
|
14
|
+
}
|
|
15
|
+
function log(level, msg) {
|
|
16
|
+
if (LEVELS[level] < getLevel())
|
|
17
|
+
return;
|
|
18
|
+
const prefix = `[${timestamp()}] ${level.toUpperCase().padEnd(5)}`;
|
|
19
|
+
if (level === "error") {
|
|
20
|
+
console.error(`${prefix} ${msg}`);
|
|
21
|
+
}
|
|
22
|
+
else if (level === "warn") {
|
|
23
|
+
console.warn(`${prefix} ${msg}`);
|
|
24
|
+
}
|
|
25
|
+
else {
|
|
26
|
+
console.log(`${prefix} ${msg}`);
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
export const logger = {
|
|
30
|
+
debug: (msg) => log("debug", msg),
|
|
31
|
+
info: (msg) => log("info", msg),
|
|
32
|
+
warn: (msg) => log("warn", msg),
|
|
33
|
+
error: (msg) => log("error", msg),
|
|
34
|
+
};
|
|
35
|
+
export function createStageLogger(stage, taskId) {
|
|
36
|
+
const prefix = taskId ? `[${stage}:${taskId}]` : `[${stage}]`;
|
|
37
|
+
return {
|
|
38
|
+
debug: (msg) => logger.debug(`${prefix} ${msg}`),
|
|
39
|
+
info: (msg) => logger.info(`${prefix} ${msg}`),
|
|
40
|
+
warn: (msg) => logger.warn(`${prefix} ${msg}`),
|
|
41
|
+
error: (msg) => logger.error(`${prefix} ${msg}`),
|
|
42
|
+
};
|
|
43
|
+
}
|
|
44
|
+
export function ciGroup(title) {
|
|
45
|
+
if (isCI)
|
|
46
|
+
process.stdout.write(`::group::${title}\n`);
|
|
47
|
+
}
|
|
48
|
+
export function ciGroupEnd() {
|
|
49
|
+
if (isCI)
|
|
50
|
+
process.stdout.write(`::endgroup::\n`);
|
|
51
|
+
}
|
package/dist/memory.d.ts
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare function readProjectMemory(projectDir: string): string;
|
package/dist/memory.js
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
import * as fs from "fs";
|
|
2
|
+
import * as path from "path";
|
|
3
|
+
export function readProjectMemory(projectDir) {
|
|
4
|
+
const memoryDir = path.join(projectDir, ".kody", "memory");
|
|
5
|
+
if (!fs.existsSync(memoryDir))
|
|
6
|
+
return "";
|
|
7
|
+
const files = fs.readdirSync(memoryDir).filter((f) => f.endsWith(".md")).sort();
|
|
8
|
+
if (files.length === 0)
|
|
9
|
+
return "";
|
|
10
|
+
const sections = [];
|
|
11
|
+
for (const file of files) {
|
|
12
|
+
const content = fs.readFileSync(path.join(memoryDir, file), "utf-8").trim();
|
|
13
|
+
if (content) {
|
|
14
|
+
sections.push(`## ${file.replace(".md", "")}\n${content}`);
|
|
15
|
+
}
|
|
16
|
+
}
|
|
17
|
+
if (sections.length === 0)
|
|
18
|
+
return "";
|
|
19
|
+
return `# Project Memory\n\n${sections.join("\n\n")}\n`;
|
|
20
|
+
}
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
import type { AgentRunner } from "./types.js";
|
|
2
|
+
export type FailureClassification = "fixable" | "infrastructure" | "pre-existing" | "retry" | "abort";
|
|
3
|
+
export interface DiagnosisResult {
|
|
4
|
+
classification: FailureClassification;
|
|
5
|
+
reason: string;
|
|
6
|
+
resolution: string;
|
|
7
|
+
}
|
|
8
|
+
export declare function diagnoseFailure(stageName: string, errorOutput: string, modifiedFiles: string[], runner: AgentRunner, model: string): Promise<DiagnosisResult>;
|
|
9
|
+
export declare function getModifiedFiles(projectDir: string): string[];
|
package/dist/observer.js
ADDED
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
import { execFileSync } from "child_process";
|
|
2
|
+
import { logger } from "./logger.js";
|
|
3
|
+
const DIAGNOSIS_PROMPT = `You are a pipeline failure diagnosis agent. Analyze the error and classify it.
|
|
4
|
+
|
|
5
|
+
Output ONLY valid JSON. No markdown fences. No explanation.
|
|
6
|
+
|
|
7
|
+
{
|
|
8
|
+
"classification": "fixable | infrastructure | pre-existing | retry | abort",
|
|
9
|
+
"reason": "One sentence explaining what went wrong",
|
|
10
|
+
"resolution": "Specific instructions for fixing (if fixable) or what the user needs to do (if infrastructure)"
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
Classification rules:
|
|
14
|
+
- fixable: Error is in code that was just written/modified. The resolution should describe exactly what to change.
|
|
15
|
+
- infrastructure: External dependency not available (database, API, service). The resolution should say what the user needs to set up.
|
|
16
|
+
- pre-existing: Error exists in code that was NOT modified. Safe to skip. The resolution should note which files.
|
|
17
|
+
- retry: Transient error (network timeout, rate limit, flaky test). Worth retrying once.
|
|
18
|
+
- abort: Unrecoverable error (permission denied, corrupted state, out of disk). Pipeline should stop.
|
|
19
|
+
|
|
20
|
+
Error context:
|
|
21
|
+
`;
|
|
22
|
+
export async function diagnoseFailure(stageName, errorOutput, modifiedFiles, runner, model) {
|
|
23
|
+
const context = [
|
|
24
|
+
`Stage: ${stageName}`,
|
|
25
|
+
``,
|
|
26
|
+
`Error output:`,
|
|
27
|
+
errorOutput.slice(-2000), // Last 2000 chars of error
|
|
28
|
+
``,
|
|
29
|
+
modifiedFiles.length > 0
|
|
30
|
+
? `Files modified by build stage:\n${modifiedFiles.map((f) => `- ${f}`).join("\n")}`
|
|
31
|
+
: "No files were modified (build may not have run yet).",
|
|
32
|
+
].join("\n");
|
|
33
|
+
const prompt = DIAGNOSIS_PROMPT + context;
|
|
34
|
+
try {
|
|
35
|
+
const result = await runner.run("diagnosis", prompt, model, 30_000, // 30s timeout — this should be fast
|
|
36
|
+
"");
|
|
37
|
+
if (result.outcome === "completed" && result.output) {
|
|
38
|
+
const cleaned = result.output
|
|
39
|
+
.replace(/^```json\s*\n?/m, "")
|
|
40
|
+
.replace(/\n?```\s*$/m, "")
|
|
41
|
+
.trim();
|
|
42
|
+
const parsed = JSON.parse(cleaned);
|
|
43
|
+
const validClassifications = [
|
|
44
|
+
"fixable", "infrastructure", "pre-existing", "retry", "abort",
|
|
45
|
+
];
|
|
46
|
+
if (validClassifications.includes(parsed.classification)) {
|
|
47
|
+
logger.info(` Diagnosis: ${parsed.classification} — ${parsed.reason}`);
|
|
48
|
+
return {
|
|
49
|
+
classification: parsed.classification,
|
|
50
|
+
reason: parsed.reason ?? "Unknown reason",
|
|
51
|
+
resolution: parsed.resolution ?? "",
|
|
52
|
+
};
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
catch (err) {
|
|
57
|
+
logger.warn(` Diagnosis error: ${err instanceof Error ? err.message : err}`);
|
|
58
|
+
}
|
|
59
|
+
// Default: assume fixable (safest — will attempt autofix)
|
|
60
|
+
logger.warn(" Diagnosis failed — defaulting to fixable");
|
|
61
|
+
return {
|
|
62
|
+
classification: "fixable",
|
|
63
|
+
reason: "Could not diagnose failure",
|
|
64
|
+
resolution: errorOutput.slice(-500),
|
|
65
|
+
};
|
|
66
|
+
}
|
|
67
|
+
export function getModifiedFiles(projectDir) {
|
|
68
|
+
try {
|
|
69
|
+
const output = execFileSync("git", ["diff", "--name-only", "HEAD~1"], {
|
|
70
|
+
encoding: "utf-8",
|
|
71
|
+
cwd: projectDir,
|
|
72
|
+
timeout: 5000,
|
|
73
|
+
stdio: ["pipe", "pipe", "pipe"],
|
|
74
|
+
}).trim();
|
|
75
|
+
return output ? output.split("\n").filter(Boolean) : [];
|
|
76
|
+
}
|
|
77
|
+
catch {
|
|
78
|
+
return [];
|
|
79
|
+
}
|
|
80
|
+
}
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
const COMPLEXITY_SKIP = {
|
|
2
|
+
low: ["plan", "review", "review-fix"],
|
|
3
|
+
medium: ["review-fix"],
|
|
4
|
+
high: [],
|
|
5
|
+
};
|
|
6
|
+
export function filterByComplexity(stages, complexity) {
|
|
7
|
+
const skip = COMPLEXITY_SKIP[complexity] ?? [];
|
|
8
|
+
return stages.filter((s) => !skip.includes(s.name));
|
|
9
|
+
}
|
|
10
|
+
export function isValidComplexity(value) {
|
|
11
|
+
return value in COMPLEXITY_SKIP;
|
|
12
|
+
}
|
|
@@ -0,0 +1,3 @@
|
|
|
1
|
+
import type { StageName, StageDefinition, StageResult, PipelineContext } from "../types.js";
|
|
2
|
+
export type StageExecutor = (ctx: PipelineContext, def: StageDefinition) => StageResult | Promise<StageResult>;
|
|
3
|
+
export declare function getExecutor(name: StageName): StageExecutor;
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
import { executeAgentStage } from "../stages/agent.js";
|
|
2
|
+
import { executeVerifyWithAutofix } from "../stages/verify.js";
|
|
3
|
+
import { executeReviewWithFix } from "../stages/review.js";
|
|
4
|
+
import { executeShipStage } from "../stages/ship.js";
|
|
5
|
+
const EXECUTOR_REGISTRY = {
|
|
6
|
+
taskify: executeAgentStage,
|
|
7
|
+
plan: executeAgentStage,
|
|
8
|
+
build: executeAgentStage,
|
|
9
|
+
verify: executeVerifyWithAutofix,
|
|
10
|
+
review: executeReviewWithFix,
|
|
11
|
+
"review-fix": executeAgentStage,
|
|
12
|
+
ship: executeShipStage,
|
|
13
|
+
};
|
|
14
|
+
export function getExecutor(name) {
|
|
15
|
+
const executor = EXECUTOR_REGISTRY[name];
|
|
16
|
+
if (!executor) {
|
|
17
|
+
throw new Error(`No executor registered for stage: ${name}`);
|
|
18
|
+
}
|
|
19
|
+
return executor;
|
|
20
|
+
}
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import type { StageDefinition, PipelineStatus, PipelineContext } from "../types.js";
|
|
2
|
+
export declare function applyPreStageLabel(ctx: PipelineContext, def: StageDefinition): void;
|
|
3
|
+
/**
|
|
4
|
+
* Check for clarifying questions after taskify/plan.
|
|
5
|
+
* Returns the paused PipelineStatus if pipeline should stop, null to continue.
|
|
6
|
+
*/
|
|
7
|
+
export declare function checkQuestionsAfterStage(ctx: PipelineContext, def: StageDefinition, state: PipelineStatus): PipelineStatus | null;
|
|
8
|
+
/**
|
|
9
|
+
* Auto-detect complexity from task.json after taskify.
|
|
10
|
+
* Returns new complexity + activeStages if detected, null otherwise.
|
|
11
|
+
*/
|
|
12
|
+
export declare function autoDetectComplexity(ctx: PipelineContext, def: StageDefinition): {
|
|
13
|
+
complexity: "low" | "medium" | "high";
|
|
14
|
+
activeStages: StageDefinition[];
|
|
15
|
+
} | null;
|
|
16
|
+
export declare function commitAfterStage(ctx: PipelineContext, def: StageDefinition): void;
|
|
17
|
+
export declare function postSkippedStagesComment(ctx: PipelineContext, complexity: string, activeStages: StageDefinition[]): void;
|
|
@@ -0,0 +1,110 @@
|
|
|
1
|
+
import * as fs from "fs";
|
|
2
|
+
import * as path from "path";
|
|
3
|
+
import { STAGES } from "../definitions.js";
|
|
4
|
+
import { setLifecycleLabel, setLabel, postComment } from "../github-api.js";
|
|
5
|
+
import { commitAll } from "../git-utils.js";
|
|
6
|
+
import { checkForQuestions } from "./questions.js";
|
|
7
|
+
import { filterByComplexity, isValidComplexity } from "./complexity.js";
|
|
8
|
+
import { writeState } from "./state.js";
|
|
9
|
+
import { logger } from "../logger.js";
|
|
10
|
+
// ─── Pre-stage ──────────────────────────────────────────────────────────────
|
|
11
|
+
export function applyPreStageLabel(ctx, def) {
|
|
12
|
+
if (!ctx.input.issueNumber || ctx.input.local)
|
|
13
|
+
return;
|
|
14
|
+
if (def.name === "build")
|
|
15
|
+
setLifecycleLabel(ctx.input.issueNumber, "building");
|
|
16
|
+
if (def.name === "review")
|
|
17
|
+
setLifecycleLabel(ctx.input.issueNumber, "review");
|
|
18
|
+
}
|
|
19
|
+
// ─── Post-stage (success) ───────────────────────────────────────────────────
|
|
20
|
+
/**
|
|
21
|
+
* Check for clarifying questions after taskify/plan.
|
|
22
|
+
* Returns the paused PipelineStatus if pipeline should stop, null to continue.
|
|
23
|
+
*/
|
|
24
|
+
export function checkQuestionsAfterStage(ctx, def, state) {
|
|
25
|
+
if (def.name !== "taskify" && def.name !== "plan")
|
|
26
|
+
return null;
|
|
27
|
+
if (ctx.input.dryRun)
|
|
28
|
+
return null;
|
|
29
|
+
const paused = checkForQuestions(ctx, def.name);
|
|
30
|
+
if (!paused)
|
|
31
|
+
return null;
|
|
32
|
+
state.state = "failed";
|
|
33
|
+
state.stages[def.name] = {
|
|
34
|
+
...state.stages[def.name],
|
|
35
|
+
state: "completed",
|
|
36
|
+
error: "paused: waiting for answers",
|
|
37
|
+
};
|
|
38
|
+
writeState(state, ctx.taskDir);
|
|
39
|
+
logger.info(` Pipeline paused — questions posted on issue`);
|
|
40
|
+
return state;
|
|
41
|
+
}
|
|
42
|
+
/**
|
|
43
|
+
* Auto-detect complexity from task.json after taskify.
|
|
44
|
+
* Returns new complexity + activeStages if detected, null otherwise.
|
|
45
|
+
*/
|
|
46
|
+
export function autoDetectComplexity(ctx, def) {
|
|
47
|
+
if (def.name !== "taskify")
|
|
48
|
+
return null;
|
|
49
|
+
if (ctx.input.complexity)
|
|
50
|
+
return null;
|
|
51
|
+
try {
|
|
52
|
+
const taskJsonPath = path.join(ctx.taskDir, "task.json");
|
|
53
|
+
if (!fs.existsSync(taskJsonPath))
|
|
54
|
+
return null;
|
|
55
|
+
const raw = fs.readFileSync(taskJsonPath, "utf-8");
|
|
56
|
+
const cleaned = raw.replace(/^```json\s*\n?/m, "").replace(/\n?```\s*$/m, "");
|
|
57
|
+
const taskJson = JSON.parse(cleaned);
|
|
58
|
+
if (!taskJson.risk_level || !isValidComplexity(taskJson.risk_level))
|
|
59
|
+
return null;
|
|
60
|
+
const complexity = taskJson.risk_level;
|
|
61
|
+
const activeStages = filterByComplexity(STAGES, complexity);
|
|
62
|
+
logger.info(` Complexity auto-detected: ${complexity} (${activeStages.map(s => s.name).join(" → ")})`);
|
|
63
|
+
if (ctx.input.issueNumber && !ctx.input.local) {
|
|
64
|
+
try {
|
|
65
|
+
setLifecycleLabel(ctx.input.issueNumber, complexity);
|
|
66
|
+
}
|
|
67
|
+
catch { /* ignore */ }
|
|
68
|
+
if (taskJson.task_type) {
|
|
69
|
+
try {
|
|
70
|
+
setLabel(ctx.input.issueNumber, `kody:${taskJson.task_type}`);
|
|
71
|
+
}
|
|
72
|
+
catch { /* ignore */ }
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
return { complexity, activeStages };
|
|
76
|
+
}
|
|
77
|
+
catch {
|
|
78
|
+
return null;
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
export function commitAfterStage(ctx, def) {
|
|
82
|
+
if (ctx.input.dryRun || !ctx.input.issueNumber)
|
|
83
|
+
return;
|
|
84
|
+
if (def.name === "build") {
|
|
85
|
+
try {
|
|
86
|
+
commitAll(`feat(${ctx.taskId}): implement task`, ctx.projectDir);
|
|
87
|
+
}
|
|
88
|
+
catch { /* ignore */ }
|
|
89
|
+
}
|
|
90
|
+
if (def.name === "review-fix") {
|
|
91
|
+
try {
|
|
92
|
+
commitAll(`fix(${ctx.taskId}): address review`, ctx.projectDir);
|
|
93
|
+
}
|
|
94
|
+
catch { /* ignore */ }
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
// ─── Skip logic ─────────────────────────────────────────────────────────────
|
|
98
|
+
export function postSkippedStagesComment(ctx, complexity, activeStages) {
|
|
99
|
+
if (!ctx.input.issueNumber || ctx.input.local || ctx.input.dryRun)
|
|
100
|
+
return;
|
|
101
|
+
const skipped = STAGES
|
|
102
|
+
.filter(s => !activeStages.find(a => a.name === s.name))
|
|
103
|
+
.map(s => s.name);
|
|
104
|
+
if (skipped.length === 0)
|
|
105
|
+
return;
|
|
106
|
+
try {
|
|
107
|
+
postComment(ctx.input.issueNumber, `⚡ **Complexity: ${complexity}** — skipping ${skipped.join(", ")} (not needed for ${complexity}-risk tasks)`);
|
|
108
|
+
}
|
|
109
|
+
catch { /* ignore */ }
|
|
110
|
+
}
|