@kody-ade/kody-engine-lite 0.1.104 → 0.1.106

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (73) hide show
  1. package/dist/bin/cli.js +626 -171
  2. package/kody.config.schema.json +5 -0
  3. package/package.json +1 -1
  4. package/prompts/taskify.md +0 -5
  5. package/templates/kody.yml +30 -103
  6. package/dist/agent-runner.d.ts +0 -4
  7. package/dist/agent-runner.js +0 -122
  8. package/dist/ci/parse-inputs.d.ts +0 -6
  9. package/dist/ci/parse-inputs.js +0 -76
  10. package/dist/ci/parse-safety.d.ts +0 -6
  11. package/dist/ci/parse-safety.js +0 -22
  12. package/dist/cli/args.d.ts +0 -13
  13. package/dist/cli/args.js +0 -42
  14. package/dist/cli/litellm.d.ts +0 -2
  15. package/dist/cli/litellm.js +0 -85
  16. package/dist/cli/task-resolution.d.ts +0 -2
  17. package/dist/cli/task-resolution.js +0 -41
  18. package/dist/config.d.ts +0 -49
  19. package/dist/config.js +0 -72
  20. package/dist/context.d.ts +0 -4
  21. package/dist/context.js +0 -83
  22. package/dist/definitions.d.ts +0 -3
  23. package/dist/definitions.js +0 -59
  24. package/dist/entry.d.ts +0 -1
  25. package/dist/entry.js +0 -236
  26. package/dist/git-utils.d.ts +0 -13
  27. package/dist/git-utils.js +0 -174
  28. package/dist/github-api.d.ts +0 -14
  29. package/dist/github-api.js +0 -114
  30. package/dist/kody-utils.d.ts +0 -1
  31. package/dist/kody-utils.js +0 -9
  32. package/dist/learning/auto-learn.d.ts +0 -2
  33. package/dist/learning/auto-learn.js +0 -169
  34. package/dist/logger.d.ts +0 -14
  35. package/dist/logger.js +0 -51
  36. package/dist/memory.d.ts +0 -1
  37. package/dist/memory.js +0 -20
  38. package/dist/observer.d.ts +0 -9
  39. package/dist/observer.js +0 -80
  40. package/dist/pipeline/complexity.d.ts +0 -3
  41. package/dist/pipeline/complexity.js +0 -12
  42. package/dist/pipeline/executor-registry.d.ts +0 -3
  43. package/dist/pipeline/executor-registry.js +0 -20
  44. package/dist/pipeline/hooks.d.ts +0 -17
  45. package/dist/pipeline/hooks.js +0 -110
  46. package/dist/pipeline/questions.d.ts +0 -2
  47. package/dist/pipeline/questions.js +0 -44
  48. package/dist/pipeline/runner-selection.d.ts +0 -2
  49. package/dist/pipeline/runner-selection.js +0 -13
  50. package/dist/pipeline/state.d.ts +0 -4
  51. package/dist/pipeline/state.js +0 -37
  52. package/dist/pipeline.d.ts +0 -3
  53. package/dist/pipeline.js +0 -213
  54. package/dist/preflight.d.ts +0 -1
  55. package/dist/preflight.js +0 -69
  56. package/dist/retrospective.d.ts +0 -26
  57. package/dist/retrospective.js +0 -211
  58. package/dist/stages/agent.d.ts +0 -2
  59. package/dist/stages/agent.js +0 -94
  60. package/dist/stages/gate.d.ts +0 -2
  61. package/dist/stages/gate.js +0 -32
  62. package/dist/stages/review.d.ts +0 -2
  63. package/dist/stages/review.js +0 -32
  64. package/dist/stages/ship.d.ts +0 -3
  65. package/dist/stages/ship.js +0 -154
  66. package/dist/stages/verify.d.ts +0 -2
  67. package/dist/stages/verify.js +0 -94
  68. package/dist/types.d.ts +0 -61
  69. package/dist/types.js +0 -1
  70. package/dist/validators.d.ts +0 -8
  71. package/dist/validators.js +0 -42
  72. package/dist/verify-runner.d.ts +0 -11
  73. package/dist/verify-runner.js +0 -110
package/dist/git-utils.js DELETED
@@ -1,174 +0,0 @@
1
- import { execFileSync } from "child_process";
2
- import { logger } from "./logger.js";
3
- const BASE_BRANCHES = ["dev", "main", "master"];
4
- let _hookSafeEnv = null;
5
- function getHookSafeEnv() {
6
- if (!_hookSafeEnv) {
7
- _hookSafeEnv = { ...process.env, HUSKY: "0", SKIP_HOOKS: "1" };
8
- }
9
- return _hookSafeEnv;
10
- }
11
- function git(args, options) {
12
- return execFileSync("git", args, {
13
- encoding: "utf-8",
14
- timeout: options?.timeout ?? 30_000,
15
- cwd: options?.cwd,
16
- env: options?.env ?? getHookSafeEnv(),
17
- stdio: ["pipe", "pipe", "pipe"],
18
- }).trim();
19
- }
20
- export function deriveBranchName(issueNumber, title) {
21
- const slug = title
22
- .toLowerCase()
23
- .replace(/[^a-z0-9\s-]/g, "")
24
- .replace(/\s+/g, "-")
25
- .replace(/-+/g, "-")
26
- .slice(0, 50)
27
- .replace(/-$/, "");
28
- return `${issueNumber}-${slug}`;
29
- }
30
- export function getDefaultBranch(cwd) {
31
- // Method 1: symbolic-ref (fast, no network)
32
- try {
33
- const ref = git(["symbolic-ref", "refs/remotes/origin/HEAD"], { cwd });
34
- return ref.replace("refs/remotes/origin/", "");
35
- }
36
- catch {
37
- // Fall through
38
- }
39
- // Method 2: remote show (needs network, 10s timeout)
40
- try {
41
- const output = git(["remote", "show", "origin"], { cwd, timeout: 10_000 });
42
- const match = output.match(/HEAD branch:\s*(\S+)/);
43
- if (match)
44
- return match[1];
45
- }
46
- catch {
47
- // Fall through
48
- }
49
- // Method 3: hardcoded fallback
50
- return "dev";
51
- }
52
- export function getCurrentBranch(cwd) {
53
- return git(["branch", "--show-current"], { cwd });
54
- }
55
- export function ensureFeatureBranch(issueNumber, title, cwd) {
56
- const current = getCurrentBranch(cwd);
57
- const branchName = deriveBranchName(issueNumber, title);
58
- // Already on the correct feature branch for this issue
59
- if (current === branchName || current.startsWith(`${issueNumber}-`)) {
60
- logger.info(` Already on feature branch: ${current}`);
61
- return current;
62
- }
63
- // On a different feature branch — switch to default first
64
- if (!BASE_BRANCHES.includes(current) && current !== "") {
65
- const defaultBranch = getDefaultBranch(cwd);
66
- logger.info(` Switching from ${current} to ${defaultBranch} before creating ${branchName}`);
67
- try {
68
- git(["checkout", defaultBranch], { cwd });
69
- }
70
- catch {
71
- logger.warn(` Failed to checkout ${defaultBranch}, aborting branch creation`);
72
- return current;
73
- }
74
- }
75
- // Fetch origin
76
- try {
77
- git(["fetch", "origin"], { cwd, timeout: 30_000 });
78
- }
79
- catch {
80
- logger.warn(" Failed to fetch origin");
81
- }
82
- // Check if branch exists on remote
83
- try {
84
- git(["rev-parse", "--verify", `origin/${branchName}`], { cwd });
85
- git(["checkout", branchName], { cwd });
86
- git(["pull", "origin", branchName], { cwd, timeout: 30_000 });
87
- logger.info(` Checked out existing remote branch: ${branchName}`);
88
- return branchName;
89
- }
90
- catch {
91
- // Branch doesn't exist on remote
92
- }
93
- // Check if branch exists locally
94
- try {
95
- git(["rev-parse", "--verify", branchName], { cwd });
96
- git(["checkout", branchName], { cwd });
97
- logger.info(` Checked out existing local branch: ${branchName}`);
98
- return branchName;
99
- }
100
- catch {
101
- // Branch doesn't exist locally either
102
- }
103
- // Create new branch tracking default branch
104
- const defaultBranch = getDefaultBranch(cwd);
105
- try {
106
- git(["checkout", "-b", branchName, `origin/${defaultBranch}`], { cwd });
107
- }
108
- catch {
109
- // If origin/default doesn't exist, create from current HEAD
110
- git(["checkout", "-b", branchName], { cwd });
111
- }
112
- logger.info(` Created new branch: ${branchName}`);
113
- return branchName;
114
- }
115
- export function syncWithDefault(cwd) {
116
- const defaultBranch = getDefaultBranch(cwd);
117
- const current = getCurrentBranch(cwd);
118
- if (current === defaultBranch)
119
- return; // already on default, no merge needed
120
- // Fetch latest
121
- try {
122
- git(["fetch", "origin", defaultBranch], { cwd, timeout: 30_000 });
123
- }
124
- catch {
125
- logger.warn(" Failed to fetch latest from origin");
126
- return;
127
- }
128
- // Merge default into feature branch
129
- try {
130
- git(["merge", `origin/${defaultBranch}`, "--no-edit"], { cwd, timeout: 30_000 });
131
- logger.info(` Synced with origin/${defaultBranch}`);
132
- }
133
- catch {
134
- // Merge conflict — abort and warn
135
- try {
136
- git(["merge", "--abort"], { cwd });
137
- }
138
- catch { /* ignore */ }
139
- logger.warn(` Merge conflict with origin/${defaultBranch} — skipping sync`);
140
- }
141
- }
142
- export function commitAll(message, cwd) {
143
- // Check for changes
144
- const status = git(["status", "--porcelain"], { cwd });
145
- if (!status) {
146
- return { success: false, hash: "", message: "No changes to commit" };
147
- }
148
- git(["add", "."], { cwd });
149
- git(["commit", "--no-gpg-sign", "-m", message], { cwd });
150
- const hash = git(["rev-parse", "HEAD"], { cwd }).slice(0, 7);
151
- logger.info(` Committed: ${hash} ${message}`);
152
- return { success: true, hash, message };
153
- }
154
- export function pushBranch(cwd) {
155
- git(["push", "-u", "origin", "HEAD"], { cwd, timeout: 120_000 });
156
- logger.info(" Pushed to origin");
157
- }
158
- export function getChangedFiles(baseBranch, cwd) {
159
- try {
160
- const output = git(["diff", "--name-only", `origin/${baseBranch}...HEAD`], { cwd });
161
- return output ? output.split("\n").filter(Boolean) : [];
162
- }
163
- catch {
164
- return [];
165
- }
166
- }
167
- export function getDiff(baseBranch, cwd) {
168
- try {
169
- return git(["diff", `origin/${baseBranch}...HEAD`], { cwd });
170
- }
171
- catch {
172
- return "";
173
- }
174
- }
@@ -1,14 +0,0 @@
1
- export declare function setGhCwd(cwd: string): void;
2
- export declare function getIssue(issueNumber: number): {
3
- body: string;
4
- title: string;
5
- } | null;
6
- export declare function setLabel(issueNumber: number, label: string): void;
7
- export declare function removeLabel(issueNumber: number, label: string): void;
8
- export declare function postComment(issueNumber: number, body: string): void;
9
- export declare function createPR(head: string, base: string, title: string, body: string): {
10
- number: number;
11
- url: string;
12
- } | null;
13
- export declare function setLifecycleLabel(issueNumber: number, phase: string): void;
14
- export declare function closeIssue(issueNumber: number, reason?: "completed" | "not planned"): void;
@@ -1,114 +0,0 @@
1
- import { execFileSync } from "child_process";
2
- import { logger } from "./logger.js";
3
- const API_TIMEOUT_MS = 30_000;
4
- const LIFECYCLE_LABELS = ["planning", "building", "review", "done", "failed", "waiting", "low", "medium", "high"];
5
- let _ghCwd;
6
- export function setGhCwd(cwd) {
7
- _ghCwd = cwd;
8
- }
9
- function ghToken() {
10
- return process.env.GH_PAT?.trim() || process.env.GH_TOKEN;
11
- }
12
- function gh(args, options) {
13
- const token = ghToken();
14
- const env = token
15
- ? { ...process.env, GH_TOKEN: token }
16
- : { ...process.env };
17
- return execFileSync("gh", args, {
18
- encoding: "utf-8",
19
- timeout: API_TIMEOUT_MS,
20
- cwd: _ghCwd,
21
- env,
22
- input: options?.input,
23
- stdio: options?.input ? ["pipe", "pipe", "pipe"] : ["inherit", "pipe", "pipe"],
24
- }).trim();
25
- }
26
- export function getIssue(issueNumber) {
27
- try {
28
- const output = gh([
29
- "issue", "view", String(issueNumber),
30
- "--json", "body,title",
31
- ]);
32
- return JSON.parse(output);
33
- }
34
- catch (err) {
35
- logger.error(` Failed to get issue #${issueNumber}: ${err}`);
36
- return null;
37
- }
38
- }
39
- export function setLabel(issueNumber, label) {
40
- try {
41
- gh(["issue", "edit", String(issueNumber), "--add-label", label]);
42
- logger.info(` Label added: ${label}`);
43
- }
44
- catch (err) {
45
- logger.warn(` Failed to set label ${label}: ${err}`);
46
- }
47
- }
48
- export function removeLabel(issueNumber, label) {
49
- try {
50
- gh(["issue", "edit", String(issueNumber), "--remove-label", label]);
51
- }
52
- catch {
53
- // Label may not exist — ignore
54
- }
55
- }
56
- export function postComment(issueNumber, body) {
57
- try {
58
- gh(["issue", "comment", String(issueNumber), "--body-file", "-"], { input: body });
59
- logger.info(` Comment posted on #${issueNumber}`);
60
- }
61
- catch (err) {
62
- logger.warn(` Failed to post comment: ${err}`);
63
- }
64
- }
65
- export function createPR(head, base, title, body) {
66
- try {
67
- const output = gh([
68
- "pr", "create",
69
- "--head", head,
70
- "--base", base,
71
- "--title", title,
72
- "--body-file", "-",
73
- ], { input: body });
74
- const url = output.trim();
75
- const match = url.match(/\/pull\/(\d+)$/);
76
- const number = match ? parseInt(match[1], 10) : 0;
77
- logger.info(` PR created: ${url}`);
78
- return { number, url };
79
- }
80
- catch (err) {
81
- logger.error(` Failed to create PR: ${err}`);
82
- return null;
83
- }
84
- }
85
- export function setLifecycleLabel(issueNumber, phase) {
86
- if (!LIFECYCLE_LABELS.includes(phase)) {
87
- logger.warn(` Invalid lifecycle phase: ${phase}`);
88
- return;
89
- }
90
- // Remove all other lifecycle labels
91
- const othersToRemove = LIFECYCLE_LABELS
92
- .filter((l) => l !== phase)
93
- .map((l) => `kody:${l}`)
94
- .join(",");
95
- if (othersToRemove) {
96
- try {
97
- gh(["issue", "edit", String(issueNumber), "--remove-label", othersToRemove]);
98
- }
99
- catch {
100
- // Labels may not exist — ignore
101
- }
102
- }
103
- // Add new label
104
- setLabel(issueNumber, `kody:${phase}`);
105
- }
106
- export function closeIssue(issueNumber, reason = "completed") {
107
- try {
108
- gh(["issue", "close", String(issueNumber), "--reason", reason]);
109
- logger.info(` Issue #${issueNumber} closed: ${reason}`);
110
- }
111
- catch (err) {
112
- logger.warn(` Failed to close issue: ${err}`);
113
- }
114
- }
@@ -1 +0,0 @@
1
- export declare function ensureTaskDir(taskId: string): string;
@@ -1,9 +0,0 @@
1
- import * as fs from "fs";
2
- import * as path from "path";
3
- export function ensureTaskDir(taskId) {
4
- const taskDir = path.join(process.cwd(), ".tasks", taskId);
5
- if (!fs.existsSync(taskDir)) {
6
- fs.mkdirSync(taskDir, { recursive: true });
7
- }
8
- return taskDir;
9
- }
@@ -1,2 +0,0 @@
1
- import type { PipelineContext } from "../types.js";
2
- export declare function autoLearn(ctx: PipelineContext): void;
@@ -1,169 +0,0 @@
1
- import * as fs from "fs";
2
- import * as path from "path";
3
- import { logger } from "../logger.js";
4
- function stripAnsi(str) {
5
- return str.replace(/\x1b\[[0-9;]*m/g, "");
6
- }
7
- export function autoLearn(ctx) {
8
- try {
9
- const memoryDir = path.join(ctx.projectDir, ".kody", "memory");
10
- if (!fs.existsSync(memoryDir)) {
11
- fs.mkdirSync(memoryDir, { recursive: true });
12
- }
13
- const learnings = [];
14
- const timestamp = new Date().toISOString().slice(0, 10);
15
- // Extract from verify.md (strip ANSI codes first)
16
- const verifyPath = path.join(ctx.taskDir, "verify.md");
17
- if (fs.existsSync(verifyPath)) {
18
- const verify = stripAnsi(fs.readFileSync(verifyPath, "utf-8"));
19
- if (/vitest/i.test(verify))
20
- learnings.push("- Uses vitest for testing");
21
- if (/jest/i.test(verify))
22
- learnings.push("- Uses jest for testing");
23
- if (/eslint/i.test(verify))
24
- learnings.push("- Uses eslint for linting");
25
- if (/prettier/i.test(verify))
26
- learnings.push("- Uses prettier for formatting");
27
- if (/tsc\b/i.test(verify))
28
- learnings.push("- Uses TypeScript (tsc)");
29
- if (/jsdom/i.test(verify))
30
- learnings.push("- Test environment: jsdom");
31
- if (/node/i.test(verify) && /environment/i.test(verify))
32
- learnings.push("- Test environment: node");
33
- }
34
- // Extract from review.md
35
- const reviewPath = path.join(ctx.taskDir, "review.md");
36
- if (fs.existsSync(reviewPath)) {
37
- const review = fs.readFileSync(reviewPath, "utf-8");
38
- if (/\.js extension/i.test(review))
39
- learnings.push("- Imports use .js extensions (ESM)");
40
- if (/barrel export/i.test(review))
41
- learnings.push("- Uses barrel exports (index.ts)");
42
- if (/timezone/i.test(review))
43
- learnings.push("- Timezone handling is a concern in this codebase");
44
- if (/UTC/i.test(review))
45
- learnings.push("- Date operations should consider UTC vs local time");
46
- }
47
- // Extract from task.json
48
- const taskJsonPath = path.join(ctx.taskDir, "task.json");
49
- if (fs.existsSync(taskJsonPath)) {
50
- try {
51
- const raw = stripAnsi(fs.readFileSync(taskJsonPath, "utf-8"));
52
- const cleaned = raw.replace(/^```json\s*\n?/m, "").replace(/\n?```\s*$/m, "");
53
- const task = JSON.parse(cleaned);
54
- if (task.scope && Array.isArray(task.scope)) {
55
- const dirs = [...new Set(task.scope.map((s) => s.split("/").slice(0, -1).join("/")).filter(Boolean))];
56
- if (dirs.length > 0)
57
- learnings.push(`- Active directories: ${dirs.join(", ")}`);
58
- }
59
- }
60
- catch {
61
- // Ignore
62
- }
63
- }
64
- if (learnings.length > 0) {
65
- const conventionsPath = path.join(memoryDir, "conventions.md");
66
- const entry = `\n## Learned ${timestamp} (task: ${ctx.taskId})\n${learnings.join("\n")}\n`;
67
- fs.appendFileSync(conventionsPath, entry);
68
- logger.info(`Auto-learned ${learnings.length} convention(s)`);
69
- }
70
- // Auto-detect architecture
71
- autoLearnArchitecture(ctx.projectDir, memoryDir, timestamp);
72
- }
73
- catch {
74
- // Auto-learn is best-effort — don't fail the pipeline
75
- }
76
- }
77
- function autoLearnArchitecture(projectDir, memoryDir, timestamp) {
78
- const archPath = path.join(memoryDir, "architecture.md");
79
- // Only auto-detect if architecture.md doesn't exist yet
80
- if (fs.existsSync(archPath))
81
- return;
82
- const detected = [];
83
- // Detect framework from package.json
84
- const pkgPath = path.join(projectDir, "package.json");
85
- if (fs.existsSync(pkgPath)) {
86
- try {
87
- const pkg = JSON.parse(fs.readFileSync(pkgPath, "utf-8"));
88
- const allDeps = { ...pkg.dependencies, ...pkg.devDependencies };
89
- // Frameworks
90
- if (allDeps.next)
91
- detected.push(`- Framework: Next.js ${allDeps.next}`);
92
- else if (allDeps.react)
93
- detected.push(`- Framework: React ${allDeps.react}`);
94
- else if (allDeps.express)
95
- detected.push(`- Framework: Express ${allDeps.express}`);
96
- else if (allDeps.fastify)
97
- detected.push(`- Framework: Fastify ${allDeps.fastify}`);
98
- // Language
99
- if (allDeps.typescript)
100
- detected.push(`- Language: TypeScript ${allDeps.typescript}`);
101
- // Testing
102
- if (allDeps.vitest)
103
- detected.push(`- Testing: vitest ${allDeps.vitest}`);
104
- else if (allDeps.jest)
105
- detected.push(`- Testing: jest ${allDeps.jest}`);
106
- // Linting
107
- if (allDeps.eslint)
108
- detected.push(`- Linting: eslint ${allDeps.eslint}`);
109
- // Database
110
- if (allDeps.prisma || allDeps["@prisma/client"])
111
- detected.push("- Database: Prisma ORM");
112
- if (allDeps.drizzle || allDeps["drizzle-orm"])
113
- detected.push("- Database: Drizzle ORM");
114
- if (allDeps.pg || allDeps.postgres)
115
- detected.push("- Database: PostgreSQL");
116
- // CMS
117
- if (allDeps.payload || allDeps["@payloadcms/next"])
118
- detected.push(`- CMS: Payload CMS`);
119
- // Module type
120
- if (pkg.type === "module")
121
- detected.push("- Module system: ESM");
122
- else
123
- detected.push("- Module system: CommonJS");
124
- // Package manager
125
- if (fs.existsSync(path.join(projectDir, "pnpm-lock.yaml")))
126
- detected.push("- Package manager: pnpm");
127
- else if (fs.existsSync(path.join(projectDir, "yarn.lock")))
128
- detected.push("- Package manager: yarn");
129
- else if (fs.existsSync(path.join(projectDir, "package-lock.json")))
130
- detected.push("- Package manager: npm");
131
- }
132
- catch {
133
- // Ignore parse errors
134
- }
135
- }
136
- // Detect directory structure
137
- const topDirs = [];
138
- try {
139
- const entries = fs.readdirSync(projectDir, { withFileTypes: true });
140
- for (const entry of entries) {
141
- if (entry.isDirectory() && !entry.name.startsWith(".") && entry.name !== "node_modules") {
142
- topDirs.push(entry.name);
143
- }
144
- }
145
- if (topDirs.length > 0)
146
- detected.push(`- Top-level directories: ${topDirs.join(", ")}`);
147
- }
148
- catch {
149
- // Ignore
150
- }
151
- // Detect src structure
152
- const srcDir = path.join(projectDir, "src");
153
- if (fs.existsSync(srcDir)) {
154
- try {
155
- const srcEntries = fs.readdirSync(srcDir, { withFileTypes: true });
156
- const srcDirs = srcEntries.filter((e) => e.isDirectory()).map((e) => e.name);
157
- if (srcDirs.length > 0)
158
- detected.push(`- src/ structure: ${srcDirs.join(", ")}`);
159
- }
160
- catch {
161
- // Ignore
162
- }
163
- }
164
- if (detected.length > 0) {
165
- const content = `# Architecture (auto-detected ${timestamp})\n\n## Overview\n${detected.join("\n")}\n`;
166
- fs.writeFileSync(archPath, content);
167
- logger.info(`Auto-detected architecture (${detected.length} items)`);
168
- }
169
- }
package/dist/logger.d.ts DELETED
@@ -1,14 +0,0 @@
1
- export declare const logger: {
2
- debug: (msg: string) => void;
3
- info: (msg: string) => void;
4
- warn: (msg: string) => void;
5
- error: (msg: string) => void;
6
- };
7
- export declare function createStageLogger(stage: string, taskId?: string): {
8
- debug: (msg: string) => void;
9
- info: (msg: string) => void;
10
- warn: (msg: string) => void;
11
- error: (msg: string) => void;
12
- };
13
- export declare function ciGroup(title: string): void;
14
- export declare function ciGroupEnd(): void;
package/dist/logger.js DELETED
@@ -1,51 +0,0 @@
1
- const isCI = !!process.env.GITHUB_ACTIONS;
2
- const LEVELS = {
3
- debug: 0,
4
- info: 1,
5
- warn: 2,
6
- error: 3,
7
- };
8
- function getLevel() {
9
- const env = process.env.LOG_LEVEL;
10
- return LEVELS[env ?? "info"] ?? LEVELS.info;
11
- }
12
- function timestamp() {
13
- return new Date().toISOString().slice(11, 19);
14
- }
15
- function log(level, msg) {
16
- if (LEVELS[level] < getLevel())
17
- return;
18
- const prefix = `[${timestamp()}] ${level.toUpperCase().padEnd(5)}`;
19
- if (level === "error") {
20
- console.error(`${prefix} ${msg}`);
21
- }
22
- else if (level === "warn") {
23
- console.warn(`${prefix} ${msg}`);
24
- }
25
- else {
26
- console.log(`${prefix} ${msg}`);
27
- }
28
- }
29
- export const logger = {
30
- debug: (msg) => log("debug", msg),
31
- info: (msg) => log("info", msg),
32
- warn: (msg) => log("warn", msg),
33
- error: (msg) => log("error", msg),
34
- };
35
- export function createStageLogger(stage, taskId) {
36
- const prefix = taskId ? `[${stage}:${taskId}]` : `[${stage}]`;
37
- return {
38
- debug: (msg) => logger.debug(`${prefix} ${msg}`),
39
- info: (msg) => logger.info(`${prefix} ${msg}`),
40
- warn: (msg) => logger.warn(`${prefix} ${msg}`),
41
- error: (msg) => logger.error(`${prefix} ${msg}`),
42
- };
43
- }
44
- export function ciGroup(title) {
45
- if (isCI)
46
- process.stdout.write(`::group::${title}\n`);
47
- }
48
- export function ciGroupEnd() {
49
- if (isCI)
50
- process.stdout.write(`::endgroup::\n`);
51
- }
package/dist/memory.d.ts DELETED
@@ -1 +0,0 @@
1
- export declare function readProjectMemory(projectDir: string): string;
package/dist/memory.js DELETED
@@ -1,20 +0,0 @@
1
- import * as fs from "fs";
2
- import * as path from "path";
3
- export function readProjectMemory(projectDir) {
4
- const memoryDir = path.join(projectDir, ".kody", "memory");
5
- if (!fs.existsSync(memoryDir))
6
- return "";
7
- const files = fs.readdirSync(memoryDir).filter((f) => f.endsWith(".md")).sort();
8
- if (files.length === 0)
9
- return "";
10
- const sections = [];
11
- for (const file of files) {
12
- const content = fs.readFileSync(path.join(memoryDir, file), "utf-8").trim();
13
- if (content) {
14
- sections.push(`## ${file.replace(".md", "")}\n${content}`);
15
- }
16
- }
17
- if (sections.length === 0)
18
- return "";
19
- return `# Project Memory\n\n${sections.join("\n\n")}\n`;
20
- }
@@ -1,9 +0,0 @@
1
- import type { AgentRunner } from "./types.js";
2
- export type FailureClassification = "fixable" | "infrastructure" | "pre-existing" | "retry" | "abort";
3
- export interface DiagnosisResult {
4
- classification: FailureClassification;
5
- reason: string;
6
- resolution: string;
7
- }
8
- export declare function diagnoseFailure(stageName: string, errorOutput: string, modifiedFiles: string[], runner: AgentRunner, model: string): Promise<DiagnosisResult>;
9
- export declare function getModifiedFiles(projectDir: string): string[];
package/dist/observer.js DELETED
@@ -1,80 +0,0 @@
1
- import { execFileSync } from "child_process";
2
- import { logger } from "./logger.js";
3
- const DIAGNOSIS_PROMPT = `You are a pipeline failure diagnosis agent. Analyze the error and classify it.
4
-
5
- Output ONLY valid JSON. No markdown fences. No explanation.
6
-
7
- {
8
- "classification": "fixable | infrastructure | pre-existing | retry | abort",
9
- "reason": "One sentence explaining what went wrong",
10
- "resolution": "Specific instructions for fixing (if fixable) or what the user needs to do (if infrastructure)"
11
- }
12
-
13
- Classification rules:
14
- - fixable: Error is in code that was just written/modified. The resolution should describe exactly what to change.
15
- - infrastructure: External dependency not available (database, API, service). The resolution should say what the user needs to set up.
16
- - pre-existing: Error exists in code that was NOT modified. Safe to skip. The resolution should note which files.
17
- - retry: Transient error (network timeout, rate limit, flaky test). Worth retrying once.
18
- - abort: Unrecoverable error (permission denied, corrupted state, out of disk). Pipeline should stop.
19
-
20
- Error context:
21
- `;
22
- export async function diagnoseFailure(stageName, errorOutput, modifiedFiles, runner, model) {
23
- const context = [
24
- `Stage: ${stageName}`,
25
- ``,
26
- `Error output:`,
27
- errorOutput.slice(-2000), // Last 2000 chars of error
28
- ``,
29
- modifiedFiles.length > 0
30
- ? `Files modified by build stage:\n${modifiedFiles.map((f) => `- ${f}`).join("\n")}`
31
- : "No files were modified (build may not have run yet).",
32
- ].join("\n");
33
- const prompt = DIAGNOSIS_PROMPT + context;
34
- try {
35
- const result = await runner.run("diagnosis", prompt, model, 30_000, // 30s timeout — this should be fast
36
- "");
37
- if (result.outcome === "completed" && result.output) {
38
- const cleaned = result.output
39
- .replace(/^```json\s*\n?/m, "")
40
- .replace(/\n?```\s*$/m, "")
41
- .trim();
42
- const parsed = JSON.parse(cleaned);
43
- const validClassifications = [
44
- "fixable", "infrastructure", "pre-existing", "retry", "abort",
45
- ];
46
- if (validClassifications.includes(parsed.classification)) {
47
- logger.info(` Diagnosis: ${parsed.classification} — ${parsed.reason}`);
48
- return {
49
- classification: parsed.classification,
50
- reason: parsed.reason ?? "Unknown reason",
51
- resolution: parsed.resolution ?? "",
52
- };
53
- }
54
- }
55
- }
56
- catch (err) {
57
- logger.warn(` Diagnosis error: ${err instanceof Error ? err.message : err}`);
58
- }
59
- // Default: assume fixable (safest — will attempt autofix)
60
- logger.warn(" Diagnosis failed — defaulting to fixable");
61
- return {
62
- classification: "fixable",
63
- reason: "Could not diagnose failure",
64
- resolution: errorOutput.slice(-500),
65
- };
66
- }
67
- export function getModifiedFiles(projectDir) {
68
- try {
69
- const output = execFileSync("git", ["diff", "--name-only", "HEAD~1"], {
70
- encoding: "utf-8",
71
- cwd: projectDir,
72
- timeout: 5000,
73
- stdio: ["pipe", "pipe", "pipe"],
74
- }).trim();
75
- return output ? output.split("\n").filter(Boolean) : [];
76
- }
77
- catch {
78
- return [];
79
- }
80
- }
@@ -1,3 +0,0 @@
1
- import type { StageDefinition } from "../types.js";
2
- export declare function filterByComplexity(stages: StageDefinition[], complexity: string): StageDefinition[];
3
- export declare function isValidComplexity(value: string): boolean;