@posthog/agent 1.29.0 → 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +57 -87
- package/dist/index.js +916 -2203
- package/dist/index.js.map +1 -1
- package/package.json +1 -1
- package/src/acp-extensions.ts +0 -37
- package/src/adapters/claude/claude.ts +515 -107
- package/src/adapters/claude/tools.ts +178 -101
- package/src/adapters/connection.ts +95 -0
- package/src/agent.ts +50 -184
- package/src/file-manager.ts +1 -34
- package/src/git-manager.ts +2 -20
- package/src/posthog-api.ts +4 -4
- package/src/tools/registry.ts +5 -0
- package/src/tools/types.ts +6 -0
- package/src/types.ts +5 -25
- package/src/utils/gateway.ts +15 -0
- package/src/worktree-manager.ts +92 -46
- package/dist/templates/plan-template.md +0 -41
- package/src/agents/execution.ts +0 -37
- package/src/agents/planning.ts +0 -60
- package/src/agents/research.ts +0 -160
- package/src/prompt-builder.ts +0 -497
- package/src/template-manager.ts +0 -240
- package/src/templates/plan-template.md +0 -41
- package/src/workflow/config.ts +0 -53
- package/src/workflow/steps/build.ts +0 -135
- package/src/workflow/steps/finalize.ts +0 -241
- package/src/workflow/steps/plan.ts +0 -167
- package/src/workflow/steps/research.ts +0 -223
- package/src/workflow/types.ts +0 -62
- package/src/workflow/utils.ts +0 -53
package/src/template-manager.ts
DELETED
|
@@ -1,240 +0,0 @@
|
|
|
1
|
-
import { existsSync, promises as fs } from "node:fs";
|
|
2
|
-
import { dirname, join } from "node:path";
|
|
3
|
-
import { fileURLToPath } from "node:url";
|
|
4
|
-
import { Logger } from "./utils/logger";
|
|
5
|
-
|
|
6
|
-
const logger = new Logger({ prefix: "[TemplateManager]" });
|
|
7
|
-
|
|
8
|
-
export interface TemplateVariables {
|
|
9
|
-
task_id: string;
|
|
10
|
-
task_title: string;
|
|
11
|
-
task_description?: string;
|
|
12
|
-
date: string;
|
|
13
|
-
repository?: string;
|
|
14
|
-
[key: string]: string | undefined;
|
|
15
|
-
}
|
|
16
|
-
|
|
17
|
-
export class TemplateManager {
|
|
18
|
-
private templatesDir: string;
|
|
19
|
-
|
|
20
|
-
constructor() {
|
|
21
|
-
const __filename = fileURLToPath(import.meta.url);
|
|
22
|
-
const __dirname = dirname(__filename);
|
|
23
|
-
|
|
24
|
-
// Exhaustive list of possible template locations
|
|
25
|
-
const candidateDirs = [
|
|
26
|
-
// Standard build output (dist/src/template-manager.js -> dist/templates)
|
|
27
|
-
join(__dirname, "..", "templates"),
|
|
28
|
-
|
|
29
|
-
// If preserveModules creates nested structure (dist/src/template-manager.js -> dist/src/templates)
|
|
30
|
-
join(__dirname, "templates"),
|
|
31
|
-
|
|
32
|
-
// Development scenarios (src/template-manager.ts -> src/templates)
|
|
33
|
-
join(__dirname, "..", "..", "src", "templates"),
|
|
34
|
-
|
|
35
|
-
// Package root templates directory
|
|
36
|
-
join(__dirname, "..", "..", "templates"),
|
|
37
|
-
|
|
38
|
-
// When node_modules symlink or installed (node_modules/@posthog/agent/dist/src/... -> node_modules/@posthog/agent/dist/templates)
|
|
39
|
-
join(__dirname, "..", "..", "dist", "templates"),
|
|
40
|
-
|
|
41
|
-
// When consumed from node_modules deep in tree
|
|
42
|
-
join(__dirname, "..", "..", "..", "templates"),
|
|
43
|
-
join(__dirname, "..", "..", "..", "dist", "templates"),
|
|
44
|
-
join(__dirname, "..", "..", "..", "src", "templates"),
|
|
45
|
-
|
|
46
|
-
// When bundled by Vite/Webpack (e.g., .vite/build/index.js -> node_modules/@posthog/agent/dist/templates)
|
|
47
|
-
// Try to find node_modules from current location
|
|
48
|
-
join(
|
|
49
|
-
__dirname,
|
|
50
|
-
"..",
|
|
51
|
-
"node_modules",
|
|
52
|
-
"@posthog",
|
|
53
|
-
"agent",
|
|
54
|
-
"dist",
|
|
55
|
-
"templates",
|
|
56
|
-
),
|
|
57
|
-
join(
|
|
58
|
-
__dirname,
|
|
59
|
-
"..",
|
|
60
|
-
"..",
|
|
61
|
-
"node_modules",
|
|
62
|
-
"@posthog",
|
|
63
|
-
"agent",
|
|
64
|
-
"dist",
|
|
65
|
-
"templates",
|
|
66
|
-
),
|
|
67
|
-
join(
|
|
68
|
-
__dirname,
|
|
69
|
-
"..",
|
|
70
|
-
"..",
|
|
71
|
-
"..",
|
|
72
|
-
"node_modules",
|
|
73
|
-
"@posthog",
|
|
74
|
-
"agent",
|
|
75
|
-
"dist",
|
|
76
|
-
"templates",
|
|
77
|
-
),
|
|
78
|
-
];
|
|
79
|
-
|
|
80
|
-
const resolvedDir = candidateDirs.find((dir) => existsSync(dir));
|
|
81
|
-
|
|
82
|
-
if (!resolvedDir) {
|
|
83
|
-
logger.error("Could not find templates directory.");
|
|
84
|
-
logger.error(`Current file: ${__filename}`);
|
|
85
|
-
logger.error(`Current dir: ${__dirname}`);
|
|
86
|
-
logger.error(
|
|
87
|
-
`Tried: ${candidateDirs.map((d) => `\n - ${d} (exists: ${existsSync(d)})`).join("")}`,
|
|
88
|
-
);
|
|
89
|
-
}
|
|
90
|
-
|
|
91
|
-
this.templatesDir = resolvedDir ?? candidateDirs[0];
|
|
92
|
-
}
|
|
93
|
-
|
|
94
|
-
private async loadTemplate(templateName: string): Promise<string> {
|
|
95
|
-
try {
|
|
96
|
-
const templatePath = join(this.templatesDir, templateName);
|
|
97
|
-
return await fs.readFile(templatePath, "utf8");
|
|
98
|
-
} catch (error) {
|
|
99
|
-
throw new Error(
|
|
100
|
-
`Failed to load template ${templateName} from ${this.templatesDir}: ${error}`,
|
|
101
|
-
);
|
|
102
|
-
}
|
|
103
|
-
}
|
|
104
|
-
|
|
105
|
-
private substituteVariables(
|
|
106
|
-
template: string,
|
|
107
|
-
variables: TemplateVariables,
|
|
108
|
-
): string {
|
|
109
|
-
let result = template;
|
|
110
|
-
|
|
111
|
-
for (const [key, value] of Object.entries(variables)) {
|
|
112
|
-
if (value !== undefined) {
|
|
113
|
-
const placeholder = new RegExp(`{{${key}}}`, "g");
|
|
114
|
-
result = result.replace(placeholder, value);
|
|
115
|
-
}
|
|
116
|
-
}
|
|
117
|
-
|
|
118
|
-
result = result.replace(/{{[^}]+}}/g, "[PLACEHOLDER]");
|
|
119
|
-
|
|
120
|
-
return result;
|
|
121
|
-
}
|
|
122
|
-
|
|
123
|
-
async generatePlan(variables: TemplateVariables): Promise<string> {
|
|
124
|
-
const template = await this.loadTemplate("plan-template.md");
|
|
125
|
-
return this.substituteVariables(template, {
|
|
126
|
-
...variables,
|
|
127
|
-
date: variables.date || new Date().toISOString().split("T")[0],
|
|
128
|
-
});
|
|
129
|
-
}
|
|
130
|
-
|
|
131
|
-
async generateCustomFile(
|
|
132
|
-
templateName: string,
|
|
133
|
-
variables: TemplateVariables,
|
|
134
|
-
): Promise<string> {
|
|
135
|
-
const template = await this.loadTemplate(templateName);
|
|
136
|
-
return this.substituteVariables(template, {
|
|
137
|
-
...variables,
|
|
138
|
-
date: variables.date || new Date().toISOString().split("T")[0],
|
|
139
|
-
});
|
|
140
|
-
}
|
|
141
|
-
|
|
142
|
-
async createTaskStructure(
|
|
143
|
-
taskId: string,
|
|
144
|
-
taskTitle: string,
|
|
145
|
-
options?: {
|
|
146
|
-
includePlan?: boolean;
|
|
147
|
-
additionalFiles?: Array<{
|
|
148
|
-
name: string;
|
|
149
|
-
template?: string;
|
|
150
|
-
content?: string;
|
|
151
|
-
}>;
|
|
152
|
-
},
|
|
153
|
-
): Promise<
|
|
154
|
-
Array<{
|
|
155
|
-
name: string;
|
|
156
|
-
content: string;
|
|
157
|
-
type: "plan" | "context" | "reference" | "output";
|
|
158
|
-
}>
|
|
159
|
-
> {
|
|
160
|
-
const files: Array<{
|
|
161
|
-
name: string;
|
|
162
|
-
content: string;
|
|
163
|
-
type: "plan" | "context" | "reference" | "output";
|
|
164
|
-
}> = [];
|
|
165
|
-
|
|
166
|
-
const variables: TemplateVariables = {
|
|
167
|
-
task_id: taskId,
|
|
168
|
-
task_title: taskTitle,
|
|
169
|
-
date: new Date().toISOString().split("T")[0],
|
|
170
|
-
};
|
|
171
|
-
|
|
172
|
-
// Generate plan file if requested
|
|
173
|
-
if (options?.includePlan !== false) {
|
|
174
|
-
const planContent = await this.generatePlan(variables);
|
|
175
|
-
files.push({
|
|
176
|
-
name: "plan.md",
|
|
177
|
-
content: planContent,
|
|
178
|
-
type: "plan",
|
|
179
|
-
});
|
|
180
|
-
}
|
|
181
|
-
|
|
182
|
-
if (options?.additionalFiles) {
|
|
183
|
-
for (const file of options.additionalFiles) {
|
|
184
|
-
let content: string;
|
|
185
|
-
|
|
186
|
-
if (file.template) {
|
|
187
|
-
content = await this.generateCustomFile(file.template, variables);
|
|
188
|
-
} else if (file.content) {
|
|
189
|
-
content = this.substituteVariables(file.content, variables);
|
|
190
|
-
} else {
|
|
191
|
-
content = `# ${file.name}\n\nPlaceholder content for ${file.name}`;
|
|
192
|
-
}
|
|
193
|
-
|
|
194
|
-
files.push({
|
|
195
|
-
name: file.name,
|
|
196
|
-
content,
|
|
197
|
-
type: file.name.includes("context") ? "context" : "reference",
|
|
198
|
-
});
|
|
199
|
-
}
|
|
200
|
-
}
|
|
201
|
-
|
|
202
|
-
return files;
|
|
203
|
-
}
|
|
204
|
-
|
|
205
|
-
generatePostHogReadme(): string {
|
|
206
|
-
return `# PostHog Task Files
|
|
207
|
-
|
|
208
|
-
This directory contains task-related files generated by the PostHog Agent.
|
|
209
|
-
|
|
210
|
-
## Structure
|
|
211
|
-
|
|
212
|
-
Each task has its own subdirectory: \`.posthog/{task-id}/\`
|
|
213
|
-
|
|
214
|
-
### Common Files
|
|
215
|
-
|
|
216
|
-
- **plan.md** - Implementation plan generated during planning phase
|
|
217
|
-
- **Supporting files** - Any additional files added for task context
|
|
218
|
-
- **artifacts/** - Generated files, outputs, and temporary artifacts
|
|
219
|
-
|
|
220
|
-
### Usage
|
|
221
|
-
|
|
222
|
-
These files are:
|
|
223
|
-
- Version controlled alongside your code
|
|
224
|
-
- Used by the PostHog Agent for context
|
|
225
|
-
- Available for review in pull requests
|
|
226
|
-
- Organized by task ID for easy reference
|
|
227
|
-
|
|
228
|
-
### Gitignore
|
|
229
|
-
|
|
230
|
-
Customize \`.posthog/.gitignore\` to control which files are committed:
|
|
231
|
-
- Include plans and documentation by default
|
|
232
|
-
- Exclude temporary files and sensitive data
|
|
233
|
-
- Customize based on your team's needs
|
|
234
|
-
|
|
235
|
-
---
|
|
236
|
-
|
|
237
|
-
*Generated by PostHog Agent*
|
|
238
|
-
`;
|
|
239
|
-
}
|
|
240
|
-
}
|
|
@@ -1,41 +0,0 @@
|
|
|
1
|
-
# Implementation Plan: {{task_title}}
|
|
2
|
-
|
|
3
|
-
**Task ID:** {{task_id}}
|
|
4
|
-
**Generated:** {{date}}
|
|
5
|
-
|
|
6
|
-
## Summary
|
|
7
|
-
|
|
8
|
-
Brief description of what will be implemented and the overall approach.
|
|
9
|
-
|
|
10
|
-
## Implementation Steps
|
|
11
|
-
|
|
12
|
-
### 1. Analysis
|
|
13
|
-
- [ ] Identify relevant files and components
|
|
14
|
-
- [ ] Review existing patterns and constraints
|
|
15
|
-
|
|
16
|
-
### 2. Changes Required
|
|
17
|
-
- [ ] Files to create/modify
|
|
18
|
-
- [ ] Dependencies to add/update
|
|
19
|
-
|
|
20
|
-
### 3. Implementation
|
|
21
|
-
- [ ] Core functionality changes
|
|
22
|
-
- [ ] Tests and validation
|
|
23
|
-
- [ ] Documentation updates
|
|
24
|
-
|
|
25
|
-
## File Changes
|
|
26
|
-
|
|
27
|
-
### New Files
|
|
28
|
-
```
|
|
29
|
-
path/to/new/file.ts - Purpose
|
|
30
|
-
```
|
|
31
|
-
|
|
32
|
-
### Modified Files
|
|
33
|
-
```
|
|
34
|
-
path/to/existing/file.ts - Changes needed
|
|
35
|
-
```
|
|
36
|
-
|
|
37
|
-
## Considerations
|
|
38
|
-
|
|
39
|
-
- Key architectural decisions
|
|
40
|
-
- Potential risks and mitigation
|
|
41
|
-
- Testing approach
|
package/src/workflow/config.ts
DELETED
|
@@ -1,53 +0,0 @@
|
|
|
1
|
-
import { buildStep } from "./steps/build.js";
|
|
2
|
-
import { finalizeStep } from "./steps/finalize.js";
|
|
3
|
-
import { planStep } from "./steps/plan.js";
|
|
4
|
-
import { researchStep } from "./steps/research.js";
|
|
5
|
-
import type { WorkflowDefinition } from "./types.js";
|
|
6
|
-
|
|
7
|
-
const MODELS = {
|
|
8
|
-
SONNET: "claude-sonnet-4-5",
|
|
9
|
-
HAIKU: "claude-haiku-4-5",
|
|
10
|
-
};
|
|
11
|
-
|
|
12
|
-
export const TASK_WORKFLOW: WorkflowDefinition = [
|
|
13
|
-
{
|
|
14
|
-
id: "research",
|
|
15
|
-
name: "Research",
|
|
16
|
-
agent: "research",
|
|
17
|
-
model: MODELS.HAIKU,
|
|
18
|
-
permissionMode: "plan",
|
|
19
|
-
commit: true,
|
|
20
|
-
push: true,
|
|
21
|
-
run: researchStep,
|
|
22
|
-
},
|
|
23
|
-
{
|
|
24
|
-
id: "plan",
|
|
25
|
-
name: "Plan",
|
|
26
|
-
agent: "planning",
|
|
27
|
-
model: MODELS.SONNET,
|
|
28
|
-
permissionMode: "plan",
|
|
29
|
-
commit: true,
|
|
30
|
-
push: true,
|
|
31
|
-
run: planStep,
|
|
32
|
-
},
|
|
33
|
-
{
|
|
34
|
-
id: "build",
|
|
35
|
-
name: "Build",
|
|
36
|
-
agent: "execution",
|
|
37
|
-
model: MODELS.SONNET,
|
|
38
|
-
permissionMode: "acceptEdits",
|
|
39
|
-
commit: true,
|
|
40
|
-
push: true,
|
|
41
|
-
run: buildStep,
|
|
42
|
-
},
|
|
43
|
-
{
|
|
44
|
-
id: "finalize",
|
|
45
|
-
name: "Finalize",
|
|
46
|
-
agent: "system", // not used
|
|
47
|
-
model: MODELS.HAIKU, // not used
|
|
48
|
-
permissionMode: "plan", // not used
|
|
49
|
-
commit: true,
|
|
50
|
-
push: true,
|
|
51
|
-
run: finalizeStep,
|
|
52
|
-
},
|
|
53
|
-
];
|
|
@@ -1,135 +0,0 @@
|
|
|
1
|
-
import { query } from "@anthropic-ai/claude-agent-sdk";
|
|
2
|
-
import { POSTHOG_NOTIFICATIONS } from "../../acp-extensions.js";
|
|
3
|
-
import { EXECUTION_SYSTEM_PROMPT } from "../../agents/execution.js";
|
|
4
|
-
import { TodoManager } from "../../todo-manager.js";
|
|
5
|
-
import { PermissionMode } from "../../types.js";
|
|
6
|
-
import type { WorkflowStepRunner } from "../types.js";
|
|
7
|
-
|
|
8
|
-
export const buildStep: WorkflowStepRunner = async ({ step, context }) => {
|
|
9
|
-
const {
|
|
10
|
-
task,
|
|
11
|
-
cwd,
|
|
12
|
-
options,
|
|
13
|
-
logger,
|
|
14
|
-
promptBuilder,
|
|
15
|
-
sessionId,
|
|
16
|
-
mcpServers,
|
|
17
|
-
gitManager,
|
|
18
|
-
sendNotification,
|
|
19
|
-
} = context;
|
|
20
|
-
|
|
21
|
-
const stepLogger = logger.child("BuildStep");
|
|
22
|
-
|
|
23
|
-
const latestRun = task.latest_run;
|
|
24
|
-
const prExists =
|
|
25
|
-
latestRun?.output && typeof latestRun.output === "object"
|
|
26
|
-
? (latestRun.output as Record<string, unknown>).pr_url
|
|
27
|
-
: null;
|
|
28
|
-
|
|
29
|
-
if (prExists) {
|
|
30
|
-
stepLogger.info("PR already exists, skipping build phase", {
|
|
31
|
-
taskId: task.id,
|
|
32
|
-
});
|
|
33
|
-
return { status: "skipped" };
|
|
34
|
-
}
|
|
35
|
-
|
|
36
|
-
stepLogger.info("Starting build phase", { taskId: task.id });
|
|
37
|
-
await sendNotification(POSTHOG_NOTIFICATIONS.PHASE_START, {
|
|
38
|
-
sessionId,
|
|
39
|
-
phase: "build",
|
|
40
|
-
});
|
|
41
|
-
|
|
42
|
-
const executionPrompt = await promptBuilder.buildExecutionPrompt(task, cwd);
|
|
43
|
-
const fullPrompt = `${EXECUTION_SYSTEM_PROMPT}\n\n${executionPrompt}`;
|
|
44
|
-
|
|
45
|
-
const configuredPermissionMode =
|
|
46
|
-
options.permissionMode ??
|
|
47
|
-
(typeof step.permissionMode === "string"
|
|
48
|
-
? (step.permissionMode as PermissionMode)
|
|
49
|
-
: step.permissionMode) ??
|
|
50
|
-
PermissionMode.ACCEPT_EDITS;
|
|
51
|
-
|
|
52
|
-
const baseOptions: Record<string, unknown> = {
|
|
53
|
-
model: step.model,
|
|
54
|
-
cwd,
|
|
55
|
-
permissionMode: configuredPermissionMode,
|
|
56
|
-
settingSources: ["local"],
|
|
57
|
-
mcpServers,
|
|
58
|
-
// Allow all tools for build phase - full read/write access needed for implementation
|
|
59
|
-
allowedTools: [
|
|
60
|
-
"Task",
|
|
61
|
-
"Bash",
|
|
62
|
-
"BashOutput",
|
|
63
|
-
"KillBash",
|
|
64
|
-
"Edit",
|
|
65
|
-
"Read",
|
|
66
|
-
"Write",
|
|
67
|
-
"Glob",
|
|
68
|
-
"Grep",
|
|
69
|
-
"NotebookEdit",
|
|
70
|
-
"WebFetch",
|
|
71
|
-
"WebSearch",
|
|
72
|
-
"ListMcpResources",
|
|
73
|
-
"ReadMcpResource",
|
|
74
|
-
"TodoWrite",
|
|
75
|
-
],
|
|
76
|
-
};
|
|
77
|
-
|
|
78
|
-
// Add fine-grained permission hook if provided
|
|
79
|
-
if (options.canUseTool) {
|
|
80
|
-
baseOptions.canUseTool = options.canUseTool;
|
|
81
|
-
}
|
|
82
|
-
|
|
83
|
-
const response = query({
|
|
84
|
-
prompt: fullPrompt,
|
|
85
|
-
options: { ...baseOptions, ...(options.queryOverrides || {}) },
|
|
86
|
-
});
|
|
87
|
-
|
|
88
|
-
// Track commits made during Claude Code execution
|
|
89
|
-
const commitTracker = await gitManager.trackCommitsDuring();
|
|
90
|
-
|
|
91
|
-
// Track todos from TodoWrite tool calls
|
|
92
|
-
const todoManager = new TodoManager(context.fileManager, stepLogger);
|
|
93
|
-
|
|
94
|
-
try {
|
|
95
|
-
for await (const message of response) {
|
|
96
|
-
const todoList = await todoManager.checkAndPersistFromMessage(
|
|
97
|
-
message,
|
|
98
|
-
task.id,
|
|
99
|
-
);
|
|
100
|
-
if (todoList) {
|
|
101
|
-
await sendNotification(POSTHOG_NOTIFICATIONS.ARTIFACT, {
|
|
102
|
-
sessionId,
|
|
103
|
-
kind: "todos",
|
|
104
|
-
content: todoList,
|
|
105
|
-
});
|
|
106
|
-
}
|
|
107
|
-
}
|
|
108
|
-
} catch (error) {
|
|
109
|
-
stepLogger.error("Error during build step query", error);
|
|
110
|
-
throw error;
|
|
111
|
-
}
|
|
112
|
-
|
|
113
|
-
// Finalize: commit any remaining changes and optionally push
|
|
114
|
-
const { commitCreated, pushedBranch } = await commitTracker.finalize({
|
|
115
|
-
commitMessage: `Implementation for ${task.title}`,
|
|
116
|
-
push: step.push,
|
|
117
|
-
});
|
|
118
|
-
|
|
119
|
-
context.stepResults[step.id] = { commitCreated };
|
|
120
|
-
|
|
121
|
-
if (!commitCreated) {
|
|
122
|
-
stepLogger.warn("No changes to commit in build phase", { taskId: task.id });
|
|
123
|
-
} else {
|
|
124
|
-
stepLogger.info("Build commits finalized", {
|
|
125
|
-
taskId: task.id,
|
|
126
|
-
pushedBranch,
|
|
127
|
-
});
|
|
128
|
-
}
|
|
129
|
-
|
|
130
|
-
await sendNotification(POSTHOG_NOTIFICATIONS.PHASE_COMPLETE, {
|
|
131
|
-
sessionId,
|
|
132
|
-
phase: "build",
|
|
133
|
-
});
|
|
134
|
-
return { status: "completed" };
|
|
135
|
-
};
|
|
@@ -1,241 +0,0 @@
|
|
|
1
|
-
import type { LocalArtifact } from "../../file-manager.js";
|
|
2
|
-
import type { Task, TaskRunArtifact } from "../../types.js";
|
|
3
|
-
import type { WorkflowStepRunner } from "../types.js";
|
|
4
|
-
import { finalizeStepGitActions } from "../utils.js";
|
|
5
|
-
|
|
6
|
-
const MAX_SNIPPET_LENGTH = 1200;
|
|
7
|
-
|
|
8
|
-
export const finalizeStep: WorkflowStepRunner = async ({ step, context }) => {
|
|
9
|
-
const { task, logger, fileManager, gitManager, posthogAPI, runId } = context;
|
|
10
|
-
|
|
11
|
-
const stepLogger = logger.child("FinalizeStep");
|
|
12
|
-
const artifacts = await fileManager.collectTaskArtifacts(task.id);
|
|
13
|
-
let uploadedArtifacts: TaskRunArtifact[] | undefined;
|
|
14
|
-
|
|
15
|
-
if (artifacts.length && posthogAPI && runId) {
|
|
16
|
-
try {
|
|
17
|
-
const payload = artifacts.map((artifact) => ({
|
|
18
|
-
name: artifact.name,
|
|
19
|
-
type: artifact.type,
|
|
20
|
-
content: artifact.content,
|
|
21
|
-
content_type: artifact.contentType,
|
|
22
|
-
}));
|
|
23
|
-
uploadedArtifacts = await posthogAPI.uploadTaskArtifacts(
|
|
24
|
-
task.id,
|
|
25
|
-
runId,
|
|
26
|
-
payload,
|
|
27
|
-
);
|
|
28
|
-
stepLogger.info("Uploaded task artifacts to PostHog", {
|
|
29
|
-
taskId: task.id,
|
|
30
|
-
uploadedCount: uploadedArtifacts.length,
|
|
31
|
-
});
|
|
32
|
-
} catch (error) {
|
|
33
|
-
stepLogger.warn("Failed to upload task artifacts", {
|
|
34
|
-
taskId: task.id,
|
|
35
|
-
error: error instanceof Error ? error.message : String(error),
|
|
36
|
-
});
|
|
37
|
-
}
|
|
38
|
-
} else {
|
|
39
|
-
stepLogger.debug("Skipping artifact upload", {
|
|
40
|
-
hasArtifacts: artifacts.length > 0,
|
|
41
|
-
hasPostHogApi: Boolean(posthogAPI),
|
|
42
|
-
runId,
|
|
43
|
-
});
|
|
44
|
-
}
|
|
45
|
-
|
|
46
|
-
const prBody = buildPullRequestBody(task, artifacts, uploadedArtifacts);
|
|
47
|
-
await fileManager.cleanupTaskDirectory(task.id);
|
|
48
|
-
await gitManager.addAllPostHogFiles();
|
|
49
|
-
|
|
50
|
-
// Commit the deletion of artifacts
|
|
51
|
-
await finalizeStepGitActions(context, step, {
|
|
52
|
-
commitMessage: `Cleanup task artifacts for ${task.title}`,
|
|
53
|
-
allowEmptyCommit: true,
|
|
54
|
-
});
|
|
55
|
-
|
|
56
|
-
context.stepResults[step.id] = {
|
|
57
|
-
prBody,
|
|
58
|
-
uploadedArtifacts,
|
|
59
|
-
artifactCount: artifacts.length,
|
|
60
|
-
};
|
|
61
|
-
|
|
62
|
-
return { status: "completed" };
|
|
63
|
-
};
|
|
64
|
-
|
|
65
|
-
function buildPullRequestBody(
|
|
66
|
-
task: Task,
|
|
67
|
-
artifacts: LocalArtifact[],
|
|
68
|
-
uploaded?: TaskRunArtifact[],
|
|
69
|
-
): string {
|
|
70
|
-
const lines: string[] = [];
|
|
71
|
-
const taskSlug = (task as unknown as Record<string, unknown>).slug || task.id;
|
|
72
|
-
|
|
73
|
-
lines.push("## Task context");
|
|
74
|
-
lines.push(`- **Task**: ${taskSlug}`);
|
|
75
|
-
lines.push(`- **Title**: ${task.title}`);
|
|
76
|
-
lines.push(`- **Origin**: ${task.origin_product}`);
|
|
77
|
-
|
|
78
|
-
if (task.description) {
|
|
79
|
-
lines.push("");
|
|
80
|
-
lines.push(`> ${task.description.trim().split("\n").join("\n> ")}`);
|
|
81
|
-
}
|
|
82
|
-
|
|
83
|
-
const usedFiles = new Set<string>();
|
|
84
|
-
|
|
85
|
-
const contextArtifact = artifacts.find(
|
|
86
|
-
(artifact) => artifact.name === "context.md",
|
|
87
|
-
);
|
|
88
|
-
if (contextArtifact) {
|
|
89
|
-
lines.push("");
|
|
90
|
-
lines.push("### Task prompt");
|
|
91
|
-
lines.push(contextArtifact.content);
|
|
92
|
-
usedFiles.add(contextArtifact.name);
|
|
93
|
-
}
|
|
94
|
-
|
|
95
|
-
const researchArtifact = artifacts.find(
|
|
96
|
-
(artifact) => artifact.name === "research.json",
|
|
97
|
-
);
|
|
98
|
-
if (researchArtifact) {
|
|
99
|
-
usedFiles.add(researchArtifact.name);
|
|
100
|
-
const researchSection = formatResearchSection(researchArtifact.content);
|
|
101
|
-
if (researchSection) {
|
|
102
|
-
lines.push("");
|
|
103
|
-
lines.push(researchSection);
|
|
104
|
-
}
|
|
105
|
-
}
|
|
106
|
-
|
|
107
|
-
const planArtifact = artifacts.find(
|
|
108
|
-
(artifact) => artifact.name === "plan.md",
|
|
109
|
-
);
|
|
110
|
-
if (planArtifact) {
|
|
111
|
-
lines.push("");
|
|
112
|
-
lines.push("### Implementation plan");
|
|
113
|
-
lines.push(planArtifact.content);
|
|
114
|
-
usedFiles.add(planArtifact.name);
|
|
115
|
-
}
|
|
116
|
-
|
|
117
|
-
const todoArtifact = artifacts.find(
|
|
118
|
-
(artifact) => artifact.name === "todos.json",
|
|
119
|
-
);
|
|
120
|
-
if (todoArtifact) {
|
|
121
|
-
const summary = summarizeTodos(todoArtifact.content);
|
|
122
|
-
if (summary) {
|
|
123
|
-
lines.push("");
|
|
124
|
-
lines.push("### Todo list");
|
|
125
|
-
lines.push(summary);
|
|
126
|
-
}
|
|
127
|
-
usedFiles.add(todoArtifact.name);
|
|
128
|
-
}
|
|
129
|
-
|
|
130
|
-
const remainingArtifacts = artifacts.filter(
|
|
131
|
-
(artifact) => !usedFiles.has(artifact.name),
|
|
132
|
-
);
|
|
133
|
-
if (remainingArtifacts.length) {
|
|
134
|
-
lines.push("");
|
|
135
|
-
lines.push("### Additional artifacts");
|
|
136
|
-
for (const artifact of remainingArtifacts) {
|
|
137
|
-
lines.push(`#### ${artifact.name}`);
|
|
138
|
-
lines.push(renderCodeFence(artifact.content));
|
|
139
|
-
}
|
|
140
|
-
}
|
|
141
|
-
|
|
142
|
-
const artifactList =
|
|
143
|
-
uploaded ??
|
|
144
|
-
artifacts.map((artifact) => ({
|
|
145
|
-
name: artifact.name,
|
|
146
|
-
type: artifact.type,
|
|
147
|
-
}));
|
|
148
|
-
|
|
149
|
-
if (artifactList.length) {
|
|
150
|
-
lines.push("");
|
|
151
|
-
lines.push("### Uploaded artifacts");
|
|
152
|
-
for (const artifact of artifactList) {
|
|
153
|
-
const rawStoragePath =
|
|
154
|
-
"storage_path" in artifact
|
|
155
|
-
? (artifact as Record<string, unknown>).storage_path
|
|
156
|
-
: undefined;
|
|
157
|
-
const storagePath =
|
|
158
|
-
typeof rawStoragePath === "string" ? rawStoragePath : undefined;
|
|
159
|
-
const storage =
|
|
160
|
-
storagePath && storagePath.trim().length > 0
|
|
161
|
-
? ` – \`${storagePath.trim()}\``
|
|
162
|
-
: "";
|
|
163
|
-
lines.push(`- ${artifact.name} (${artifact.type})${storage}`);
|
|
164
|
-
}
|
|
165
|
-
}
|
|
166
|
-
|
|
167
|
-
return lines.join("\n\n");
|
|
168
|
-
}
|
|
169
|
-
|
|
170
|
-
function renderCodeFence(content: string): string {
|
|
171
|
-
const snippet = truncate(content, MAX_SNIPPET_LENGTH);
|
|
172
|
-
return ["```", snippet, "```"].join("\n");
|
|
173
|
-
}
|
|
174
|
-
|
|
175
|
-
function truncate(value: string, maxLength: number): string {
|
|
176
|
-
if (value.length <= maxLength) {
|
|
177
|
-
return value;
|
|
178
|
-
}
|
|
179
|
-
return `${value.slice(0, maxLength)}\n…`;
|
|
180
|
-
}
|
|
181
|
-
|
|
182
|
-
function formatResearchSection(content: string): string | null {
|
|
183
|
-
try {
|
|
184
|
-
const parsed = JSON.parse(content);
|
|
185
|
-
const sections: string[] = [];
|
|
186
|
-
|
|
187
|
-
if (parsed.context) {
|
|
188
|
-
sections.push("### Research summary");
|
|
189
|
-
sections.push(parsed.context);
|
|
190
|
-
}
|
|
191
|
-
|
|
192
|
-
if (parsed.questions?.length) {
|
|
193
|
-
sections.push("");
|
|
194
|
-
sections.push("### Questions needing answers");
|
|
195
|
-
for (const question of parsed.questions) {
|
|
196
|
-
sections.push(`- ${question.question ?? question}`);
|
|
197
|
-
}
|
|
198
|
-
}
|
|
199
|
-
|
|
200
|
-
if (parsed.answers?.length) {
|
|
201
|
-
sections.push("");
|
|
202
|
-
sections.push("### Answers provided");
|
|
203
|
-
for (const answer of parsed.answers) {
|
|
204
|
-
const questionId = answer.questionId
|
|
205
|
-
? ` (Q: ${answer.questionId})`
|
|
206
|
-
: "";
|
|
207
|
-
sections.push(
|
|
208
|
-
`- ${answer.selectedOption || answer.customInput || "answer"}${questionId}`,
|
|
209
|
-
);
|
|
210
|
-
}
|
|
211
|
-
}
|
|
212
|
-
|
|
213
|
-
return sections.length ? sections.join("\n") : null;
|
|
214
|
-
} catch {
|
|
215
|
-
return null;
|
|
216
|
-
}
|
|
217
|
-
}
|
|
218
|
-
|
|
219
|
-
function summarizeTodos(content: string): string | null {
|
|
220
|
-
try {
|
|
221
|
-
const data = JSON.parse(content);
|
|
222
|
-
const total = data?.metadata?.total ?? data?.items?.length;
|
|
223
|
-
const completed =
|
|
224
|
-
data?.metadata?.completed ??
|
|
225
|
-
data?.items?.filter(
|
|
226
|
-
(item: { status?: string }) => item.status === "completed",
|
|
227
|
-
).length;
|
|
228
|
-
|
|
229
|
-
const lines = [`Progress: ${completed}/${total} completed`];
|
|
230
|
-
|
|
231
|
-
if (data?.items?.length) {
|
|
232
|
-
for (const item of data.items) {
|
|
233
|
-
lines.push(`- [${item.status}] ${item.content}`);
|
|
234
|
-
}
|
|
235
|
-
}
|
|
236
|
-
|
|
237
|
-
return lines.join("\n");
|
|
238
|
-
} catch {
|
|
239
|
-
return null;
|
|
240
|
-
}
|
|
241
|
-
}
|