@sireai/optimus 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.env.example +16 -0
- package/LICENSE +21 -0
- package/README.md +104 -0
- package/dist/cli/optimus.d.ts +2 -0
- package/dist/cli/optimus.js +2951 -0
- package/dist/cli/optimus.js.map +1 -0
- package/dist/cli/self-update.d.ts +49 -0
- package/dist/cli/self-update.js +264 -0
- package/dist/cli/self-update.js.map +1 -0
- package/dist/config/load-config.d.ts +3 -0
- package/dist/config/load-config.js +321 -0
- package/dist/config/load-config.js.map +1 -0
- package/dist/config/optimus-paths.d.ts +13 -0
- package/dist/config/optimus-paths.js +44 -0
- package/dist/config/optimus-paths.js.map +1 -0
- package/dist/index.d.ts +25 -0
- package/dist/index.js +27 -0
- package/dist/index.js.map +1 -0
- package/dist/integrations/jira/jira-cli.d.ts +1 -0
- package/dist/integrations/jira/jira-cli.js +278 -0
- package/dist/integrations/jira/jira-cli.js.map +1 -0
- package/dist/integrations/jira/jira-client.d.ts +99 -0
- package/dist/integrations/jira/jira-client.js +521 -0
- package/dist/integrations/jira/jira-client.js.map +1 -0
- package/dist/integrations/jira/jira-submit.d.ts +71 -0
- package/dist/integrations/jira/jira-submit.js +351 -0
- package/dist/integrations/jira/jira-submit.js.map +1 -0
- package/dist/problem-solving-core/codex/codex-auth-resolver.d.ts +23 -0
- package/dist/problem-solving-core/codex/codex-auth-resolver.js +136 -0
- package/dist/problem-solving-core/codex/codex-auth-resolver.js.map +1 -0
- package/dist/problem-solving-core/codex/codex-connectivity-checks.d.ts +6 -0
- package/dist/problem-solving-core/codex/codex-connectivity-checks.js +81 -0
- package/dist/problem-solving-core/codex/codex-connectivity-checks.js.map +1 -0
- package/dist/problem-solving-core/codex/codex-failure-classifier.d.ts +2 -0
- package/dist/problem-solving-core/codex/codex-failure-classifier.js +49 -0
- package/dist/problem-solving-core/codex/codex-failure-classifier.js.map +1 -0
- package/dist/problem-solving-core/codex/codex-global-config.d.ts +17 -0
- package/dist/problem-solving-core/codex/codex-global-config.js +100 -0
- package/dist/problem-solving-core/codex/codex-global-config.js.map +1 -0
- package/dist/problem-solving-core/codex/codex-preflight.d.ts +13 -0
- package/dist/problem-solving-core/codex/codex-preflight.js +142 -0
- package/dist/problem-solving-core/codex/codex-preflight.js.map +1 -0
- package/dist/problem-solving-core/codex/codex-provider-profile.d.ts +14 -0
- package/dist/problem-solving-core/codex/codex-provider-profile.js +68 -0
- package/dist/problem-solving-core/codex/codex-provider-profile.js.map +1 -0
- package/dist/problem-solving-core/codex/codex-required-env.d.ts +3 -0
- package/dist/problem-solving-core/codex/codex-required-env.js +21 -0
- package/dist/problem-solving-core/codex/codex-required-env.js.map +1 -0
- package/dist/problem-solving-core/codex/codex-runner.d.ts +37 -0
- package/dist/problem-solving-core/codex/codex-runner.js +926 -0
- package/dist/problem-solving-core/codex/codex-runner.js.map +1 -0
- package/dist/problem-solving-core/codex/evolution-skill-guard.d.ts +36 -0
- package/dist/problem-solving-core/codex/evolution-skill-guard.js +143 -0
- package/dist/problem-solving-core/codex/evolution-skill-guard.js.map +1 -0
- package/dist/problem-solving-core/codex/repo-memory-service.d.ts +24 -0
- package/dist/problem-solving-core/codex/repo-memory-service.js +114 -0
- package/dist/problem-solving-core/codex/repo-memory-service.js.map +1 -0
- package/dist/problem-solving-core/codex/skill-sync-service.d.ts +35 -0
- package/dist/problem-solving-core/codex/skill-sync-service.js +280 -0
- package/dist/problem-solving-core/codex/skill-sync-service.js.map +1 -0
- package/dist/task-environment/cancellation/task-abort-registry.d.ts +17 -0
- package/dist/task-environment/cancellation/task-abort-registry.js +51 -0
- package/dist/task-environment/cancellation/task-abort-registry.js.map +1 -0
- package/dist/task-environment/cancellation/task-cancellation-service.d.ts +25 -0
- package/dist/task-environment/cancellation/task-cancellation-service.js +54 -0
- package/dist/task-environment/cancellation/task-cancellation-service.js.map +1 -0
- package/dist/task-environment/cancellation/task-cleanup-service.d.ts +22 -0
- package/dist/task-environment/cancellation/task-cleanup-service.js +67 -0
- package/dist/task-environment/cancellation/task-cleanup-service.js.map +1 -0
- package/dist/task-environment/delivery/commit-message/bugfix-commit-message-template.d.ts +13 -0
- package/dist/task-environment/delivery/commit-message/bugfix-commit-message-template.js +83 -0
- package/dist/task-environment/delivery/commit-message/bugfix-commit-message-template.js.map +1 -0
- package/dist/task-environment/delivery/commit-message/commit-message-builder.d.ts +6 -0
- package/dist/task-environment/delivery/commit-message/commit-message-builder.js +15 -0
- package/dist/task-environment/delivery/commit-message/commit-message-builder.js.map +1 -0
- package/dist/task-environment/delivery/commit-message/commit-message-template-types.d.ts +16 -0
- package/dist/task-environment/delivery/commit-message/commit-message-template-types.js +2 -0
- package/dist/task-environment/delivery/commit-message/commit-message-template-types.js.map +1 -0
- package/dist/task-environment/delivery/feishu-analysis-doc-service.d.ts +50 -0
- package/dist/task-environment/delivery/feishu-analysis-doc-service.js +454 -0
- package/dist/task-environment/delivery/feishu-analysis-doc-service.js.map +1 -0
- package/dist/task-environment/delivery/feishu-card-renderer.d.ts +38 -0
- package/dist/task-environment/delivery/feishu-card-renderer.js +449 -0
- package/dist/task-environment/delivery/feishu-card-renderer.js.map +1 -0
- package/dist/task-environment/delivery/feishu-content/feishu-content-renderer.d.ts +34 -0
- package/dist/task-environment/delivery/feishu-content/feishu-content-renderer.js +201 -0
- package/dist/task-environment/delivery/feishu-content/feishu-content-renderer.js.map +1 -0
- package/dist/task-environment/delivery/feishu-content/feishu-copy-config.d.ts +27 -0
- package/dist/task-environment/delivery/feishu-content/feishu-copy-config.js +74 -0
- package/dist/task-environment/delivery/feishu-content/feishu-copy-config.js.map +1 -0
- package/dist/task-environment/delivery/feishu-notifier.d.ts +45 -0
- package/dist/task-environment/delivery/feishu-notifier.js +250 -0
- package/dist/task-environment/delivery/feishu-notifier.js.map +1 -0
- package/dist/task-environment/delivery/feishu-templates/analysis-message-template.d.ts +6 -0
- package/dist/task-environment/delivery/feishu-templates/analysis-message-template.js +39 -0
- package/dist/task-environment/delivery/feishu-templates/analysis-message-template.js.map +1 -0
- package/dist/task-environment/delivery/feishu-templates/bugfix-message-template.d.ts +6 -0
- package/dist/task-environment/delivery/feishu-templates/bugfix-message-template.js +40 -0
- package/dist/task-environment/delivery/feishu-templates/bugfix-message-template.js.map +1 -0
- package/dist/task-environment/delivery/feishu-templates/default-message-template.d.ts +6 -0
- package/dist/task-environment/delivery/feishu-templates/default-message-template.js +33 -0
- package/dist/task-environment/delivery/feishu-templates/default-message-template.js.map +1 -0
- package/dist/task-environment/delivery/feishu-templates/patch-message-template.d.ts +6 -0
- package/dist/task-environment/delivery/feishu-templates/patch-message-template.js +40 -0
- package/dist/task-environment/delivery/feishu-templates/patch-message-template.js.map +1 -0
- package/dist/task-environment/delivery/feishu-templates/template-registry.d.ts +2 -0
- package/dist/task-environment/delivery/feishu-templates/template-registry.js +11 -0
- package/dist/task-environment/delivery/feishu-templates/template-registry.js.map +1 -0
- package/dist/task-environment/delivery/feishu-templates/template-types.d.ts +20 -0
- package/dist/task-environment/delivery/feishu-templates/template-types.js +2 -0
- package/dist/task-environment/delivery/feishu-templates/template-types.js.map +1 -0
- package/dist/task-environment/delivery/task-delivery-dispatcher.d.ts +14 -0
- package/dist/task-environment/delivery/task-delivery-dispatcher.js +109 -0
- package/dist/task-environment/delivery/task-delivery-dispatcher.js.map +1 -0
- package/dist/task-environment/delivery/task-delivery-service.d.ts +33 -0
- package/dist/task-environment/delivery/task-delivery-service.js +432 -0
- package/dist/task-environment/delivery/task-delivery-service.js.map +1 -0
- package/dist/task-environment/delivery/task-publication-service.d.ts +97 -0
- package/dist/task-environment/delivery/task-publication-service.js +1369 -0
- package/dist/task-environment/delivery/task-publication-service.js.map +1 -0
- package/dist/task-environment/execution-addresses.d.ts +40 -0
- package/dist/task-environment/execution-addresses.js +63 -0
- package/dist/task-environment/execution-addresses.js.map +1 -0
- package/dist/task-environment/intake/cli-file-intake.d.ts +12 -0
- package/dist/task-environment/intake/cli-file-intake.js +56 -0
- package/dist/task-environment/intake/cli-file-intake.js.map +1 -0
- package/dist/task-environment/intake/manual-problem-intake.d.ts +3 -0
- package/dist/task-environment/intake/manual-problem-intake.js +57 -0
- package/dist/task-environment/intake/manual-problem-intake.js.map +1 -0
- package/dist/task-environment/intake/polling-problem-intake.d.ts +14 -0
- package/dist/task-environment/intake/polling-problem-intake.js +232 -0
- package/dist/task-environment/intake/polling-problem-intake.js.map +1 -0
- package/dist/task-environment/observability/logger.d.ts +76 -0
- package/dist/task-environment/observability/logger.js +604 -0
- package/dist/task-environment/observability/logger.js.map +1 -0
- package/dist/task-environment/observability/runtime-panel.d.ts +82 -0
- package/dist/task-environment/observability/runtime-panel.js +1008 -0
- package/dist/task-environment/observability/runtime-panel.js.map +1 -0
- package/dist/task-environment/observability/sound-notifier.d.ts +18 -0
- package/dist/task-environment/observability/sound-notifier.js +71 -0
- package/dist/task-environment/observability/sound-notifier.js.map +1 -0
- package/dist/task-environment/orchestration/execution-context-assembler.d.ts +41 -0
- package/dist/task-environment/orchestration/execution-context-assembler.js +464 -0
- package/dist/task-environment/orchestration/execution-context-assembler.js.map +1 -0
- package/dist/task-environment/orchestration/git-change-classifier.d.ts +19 -0
- package/dist/task-environment/orchestration/git-change-classifier.js +106 -0
- package/dist/task-environment/orchestration/git-change-classifier.js.map +1 -0
- package/dist/task-environment/orchestration/harness-registry.d.ts +27 -0
- package/dist/task-environment/orchestration/harness-registry.js +116 -0
- package/dist/task-environment/orchestration/harness-registry.js.map +1 -0
- package/dist/task-environment/orchestration/harness-resolver.d.ts +8 -0
- package/dist/task-environment/orchestration/harness-resolver.js +39 -0
- package/dist/task-environment/orchestration/harness-resolver.js.map +1 -0
- package/dist/task-environment/orchestration/task-orchestrator.d.ts +45 -0
- package/dist/task-environment/orchestration/task-orchestrator.js +1122 -0
- package/dist/task-environment/orchestration/task-orchestrator.js.map +1 -0
- package/dist/task-environment/orchestration/task-package-assembler.d.ts +4 -0
- package/dist/task-environment/orchestration/task-package-assembler.js +10 -0
- package/dist/task-environment/orchestration/task-package-assembler.js.map +1 -0
- package/dist/task-environment/orchestration/triage-agent.d.ts +54 -0
- package/dist/task-environment/orchestration/triage-agent.js +636 -0
- package/dist/task-environment/orchestration/triage-agent.js.map +1 -0
- package/dist/task-environment/orchestration/triage-runner.d.ts +65 -0
- package/dist/task-environment/orchestration/triage-runner.js +655 -0
- package/dist/task-environment/orchestration/triage-runner.js.map +1 -0
- package/dist/task-environment/publication-target.d.ts +12 -0
- package/dist/task-environment/publication-target.js +174 -0
- package/dist/task-environment/publication-target.js.map +1 -0
- package/dist/task-environment/runtime/blocking-event-queue.d.ts +7 -0
- package/dist/task-environment/runtime/blocking-event-queue.js +27 -0
- package/dist/task-environment/runtime/blocking-event-queue.js.map +1 -0
- package/dist/task-environment/runtime/optimus-runtime.d.ts +69 -0
- package/dist/task-environment/runtime/optimus-runtime.js +751 -0
- package/dist/task-environment/runtime/optimus-runtime.js.map +1 -0
- package/dist/task-environment/storage/sqlite-event-store.d.ts +52 -0
- package/dist/task-environment/storage/sqlite-event-store.js +288 -0
- package/dist/task-environment/storage/sqlite-event-store.js.map +1 -0
- package/dist/task-environment/storage/sqlite-task-store.d.ts +122 -0
- package/dist/task-environment/storage/sqlite-task-store.js +1182 -0
- package/dist/task-environment/storage/sqlite-task-store.js.map +1 -0
- package/dist/types.d.ts +629 -0
- package/dist/types.js +2 -0
- package/dist/types.js.map +1 -0
- package/embedded-skills/shared/repo-inspection/SKILL.md +9 -0
- package/embedded-skills/shared/repo-inspection/skill.json +5 -0
- package/embedded-skills/task/bugfix/android-debug-protocol/SKILL.md +10 -0
- package/embedded-skills/task/bugfix/android-debug-protocol/skill.json +6 -0
- package/harness/AGENTS.md +30 -0
- package/harness/CHECKLIST.md +44 -0
- package/harness/CONSTRAINTS.md +60 -0
- package/harness/FRAMEWORK.md +28 -0
- package/harness/GOAL.md +28 -0
- package/harness/HANDOFF.md +45 -0
- package/harness/TASK_PLAN.md +79 -0
- package/optimus.config.template.json +34 -0
- package/package.json +109 -0
- package/task-harnesses/bugfix/ACCEPT.md +47 -0
- package/task-harnesses/bugfix/CONSTRAINTS.md +46 -0
- package/task-harnesses/bugfix/CONTEXT.md +29 -0
- package/task-harnesses/bugfix/EVOLUTION.md +82 -0
- package/task-harnesses/bugfix/ROLE.md +29 -0
- package/task-harnesses/bugfix/STANDARD.md +250 -0
- package/task-harnesses/bugfix/manifest.json +13 -0
- package/task-harnesses/registry.json +8 -0
|
@@ -0,0 +1,1122 @@
|
|
|
1
|
+
import { execFile } from "node:child_process";
|
|
2
|
+
import { access, mkdir, mkdtemp, readdir, rename, rm, writeFile } from "node:fs/promises";
|
|
3
|
+
import { promisify } from "node:util";
|
|
4
|
+
import { basename, dirname, resolve, relative, sep, join } from "node:path";
|
|
5
|
+
import { getVisibleRepoDir } from "../execution-addresses.js";
|
|
6
|
+
import { ExecutionContextAssembler } from "./execution-context-assembler.js";
|
|
7
|
+
import { HarnessResolver } from "./harness-resolver.js";
|
|
8
|
+
import { OptimusLogger } from "../observability/logger.js";
|
|
9
|
+
import { EvolutionSkillGuard } from "../../problem-solving-core/codex/evolution-skill-guard.js";
|
|
10
|
+
import { RepoMemoryService } from "../../problem-solving-core/codex/repo-memory-service.js";
|
|
11
|
+
import { FeishuAnalysisDocService } from "../delivery/feishu-analysis-doc-service.js";
|
|
12
|
+
import { TaskDeliveryDispatcher } from "../delivery/task-delivery-dispatcher.js";
|
|
13
|
+
import { TaskDeliveryService } from "../delivery/task-delivery-service.js";
|
|
14
|
+
import { TaskPublicationService } from "../delivery/task-publication-service.js";
|
|
15
|
+
import { collectBusinessChangePaths, filterBusinessChangeEntries, parseGitStatusOutput, parseRepoProjectStatusReports } from "./git-change-classifier.js";
|
|
16
|
+
const execFileAsync = promisify(execFile);
|
|
17
|
+
const EXEC_MAX_BUFFER = 4 * 1024 * 1024;
|
|
18
|
+
const REPO_MANAGED_TEMP_BRANCH_PATTERN = /^optimus\/task-[a-z0-9-]+$/i;
|
|
19
|
+
function resolveTaskResultPath(resultPath, artifactDir) {
|
|
20
|
+
if (resultPath.startsWith("/")) {
|
|
21
|
+
return resolve(resultPath);
|
|
22
|
+
}
|
|
23
|
+
return resolve(artifactDir, resultPath);
|
|
24
|
+
}
|
|
25
|
+
export class TaskOrchestrator {
|
|
26
|
+
store;
|
|
27
|
+
codexRunner;
|
|
28
|
+
config;
|
|
29
|
+
logger;
|
|
30
|
+
harnessResolver;
|
|
31
|
+
executionContextAssembler;
|
|
32
|
+
evolutionSkillGuard;
|
|
33
|
+
repoMemoryService;
|
|
34
|
+
taskDeliveryDispatcher;
|
|
35
|
+
taskDeliveryService;
|
|
36
|
+
feishuAnalysisDocService;
|
|
37
|
+
taskPublicationService;
|
|
38
|
+
constructor(store, codexRunner, config) {
|
|
39
|
+
this.store = store;
|
|
40
|
+
this.codexRunner = codexRunner;
|
|
41
|
+
this.config = config;
|
|
42
|
+
this.logger = new OptimusLogger(config);
|
|
43
|
+
this.harnessResolver = new HarnessResolver(config);
|
|
44
|
+
this.executionContextAssembler = new ExecutionContextAssembler(store, config);
|
|
45
|
+
this.evolutionSkillGuard = new EvolutionSkillGuard(config);
|
|
46
|
+
this.repoMemoryService = new RepoMemoryService(config);
|
|
47
|
+
this.taskDeliveryDispatcher = new TaskDeliveryDispatcher(config);
|
|
48
|
+
this.taskDeliveryService = new TaskDeliveryService();
|
|
49
|
+
this.feishuAnalysisDocService = new FeishuAnalysisDocService({ refStore: store });
|
|
50
|
+
this.taskPublicationService = new TaskPublicationService(this.logger);
|
|
51
|
+
}
|
|
52
|
+
// Resolve the queued task into execution inputs, run it once, then persist the terminal outcome.
|
|
53
|
+
async run(task) {
|
|
54
|
+
await this.logger.info("task.orchestrator.started", {
|
|
55
|
+
taskId: task.taskId,
|
|
56
|
+
taskType: task.taskType,
|
|
57
|
+
runId: task.activeRunId,
|
|
58
|
+
stage: "orchestrator_start",
|
|
59
|
+
status: task.status
|
|
60
|
+
});
|
|
61
|
+
const taskPackage = await this.store.getTaskPackage(task.taskPackageId);
|
|
62
|
+
if (!taskPackage) {
|
|
63
|
+
throw new Error(`Task package not found for task ${task.taskId}`);
|
|
64
|
+
}
|
|
65
|
+
const harness = this.harnessResolver.resolve(taskPackage);
|
|
66
|
+
const startedAt = Date.now();
|
|
67
|
+
const runId = task.activeRunId;
|
|
68
|
+
const sourceEventId = task.sourceEventId;
|
|
69
|
+
let finalStatus = "failed";
|
|
70
|
+
let finalSummary = "Task failed before completion.";
|
|
71
|
+
let finalFailureCategory;
|
|
72
|
+
let preparedContext;
|
|
73
|
+
let evolutionGuardState;
|
|
74
|
+
let completionArtifacts = [];
|
|
75
|
+
let cleanupPending = false;
|
|
76
|
+
try {
|
|
77
|
+
await this.logger.info("task.orchestrator.harness_resolved", {
|
|
78
|
+
taskId: task.taskId,
|
|
79
|
+
taskType: task.taskType,
|
|
80
|
+
runId: task.activeRunId,
|
|
81
|
+
stage: "harness_resolved"
|
|
82
|
+
});
|
|
83
|
+
const context = await this.prepareExecutionContext(task, harness, taskPackage, task.abortSignal);
|
|
84
|
+
preparedContext = context;
|
|
85
|
+
cleanupPending = Boolean(context.cleanupRequired && context.cleanup);
|
|
86
|
+
evolutionGuardState = await this.evolutionSkillGuard.prepare(task.taskType);
|
|
87
|
+
await this.logger.info("skills.evolution.prepare", {
|
|
88
|
+
taskId: task.taskId,
|
|
89
|
+
taskType: task.taskType,
|
|
90
|
+
runId,
|
|
91
|
+
allowedEvolutionSkillDir: evolutionGuardState.allowedTaskDir,
|
|
92
|
+
baselineSkillIds: evolutionGuardState.baselineSkillIds,
|
|
93
|
+
baselineFileCount: evolutionGuardState.baselineFileCount
|
|
94
|
+
});
|
|
95
|
+
const executionRequest = await this.prepareExecutionRequest(task, context, taskPackage, harness);
|
|
96
|
+
const result = await this.codexRunner.runTask(executionRequest, context);
|
|
97
|
+
await this.persistRepoMemoryIfChanged(task, context);
|
|
98
|
+
this.validateTaskResultContract(result, context);
|
|
99
|
+
const completedAt = new Date().toISOString();
|
|
100
|
+
if (result.status === "completed") {
|
|
101
|
+
if (context.addresses.mode === "copy") {
|
|
102
|
+
await this.validateExecutionIsolation(context);
|
|
103
|
+
}
|
|
104
|
+
const patchArtifactPath = await this.writePatchArtifactIfNeeded(context);
|
|
105
|
+
if (patchArtifactPath) {
|
|
106
|
+
completionArtifacts.push({
|
|
107
|
+
kind: "patch_diff",
|
|
108
|
+
path: patchArtifactPath,
|
|
109
|
+
createdAt: completedAt
|
|
110
|
+
});
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
if (result.resultPath) {
|
|
114
|
+
const resolvedResultPath = resolveTaskResultPath(result.resultPath, context.addresses.artifactDir);
|
|
115
|
+
completionArtifacts.unshift({
|
|
116
|
+
kind: "result_md",
|
|
117
|
+
path: resolvedResultPath,
|
|
118
|
+
createdAt: completedAt
|
|
119
|
+
});
|
|
120
|
+
}
|
|
121
|
+
if (completionArtifacts.length > 0) {
|
|
122
|
+
await this.store.appendArtifacts(task.taskId, completionArtifacts);
|
|
123
|
+
}
|
|
124
|
+
finalStatus = result.status === "completed" ? "completed" : "failed";
|
|
125
|
+
finalSummary = result.status === "completed"
|
|
126
|
+
? `Task completed with result file ${result.resultPath ? resolveTaskResultPath(result.resultPath, context.addresses.artifactDir) : "unknown"}.`
|
|
127
|
+
: result.error?.message ?? "Task failed without an explicit error.";
|
|
128
|
+
finalFailureCategory = result.status === "failed"
|
|
129
|
+
? (result.error?.category ?? result.diagnostics?.failureCategory ?? "unknown")
|
|
130
|
+
: result.diagnostics?.failureCategory;
|
|
131
|
+
if (executionRequest.task.status !== finalStatus) {
|
|
132
|
+
await context.onStatusUpdate?.({ status: finalStatus, detail: finalSummary });
|
|
133
|
+
}
|
|
134
|
+
const durationMs = Date.now() - startedAt;
|
|
135
|
+
if (result.diagnostics?.usedFallback) {
|
|
136
|
+
await this.logger.warn("task.fallback", {
|
|
137
|
+
taskId: task.taskId,
|
|
138
|
+
taskType: task.taskType,
|
|
139
|
+
sourceEventId,
|
|
140
|
+
runId,
|
|
141
|
+
status: finalStatus,
|
|
142
|
+
durationMs,
|
|
143
|
+
reason: result.diagnostics.fallbackReason ?? "unknown",
|
|
144
|
+
failureCategory: result.diagnostics.failureCategory ?? "unknown"
|
|
145
|
+
});
|
|
146
|
+
}
|
|
147
|
+
if (finalStatus === "completed" && evolutionGuardState) {
|
|
148
|
+
const evolutionChanges = await this.evolutionSkillGuard.evaluate(evolutionGuardState);
|
|
149
|
+
await this.logger.info("skills.evolution.checked", {
|
|
150
|
+
taskId: task.taskId,
|
|
151
|
+
taskType: task.taskType,
|
|
152
|
+
runId,
|
|
153
|
+
allowedEvolutionSkillDir: preparedContext?.allowedEvolutionSkillDir ?? evolutionGuardState.allowedTaskDir,
|
|
154
|
+
baselineSkillIds: evolutionChanges.baselineSkillIds,
|
|
155
|
+
currentSkillIds: evolutionChanges.currentSkillIds,
|
|
156
|
+
baselineFileCount: evolutionChanges.baselineFileCount,
|
|
157
|
+
currentFileCount: evolutionChanges.currentFileCount,
|
|
158
|
+
createdSkillIds: evolutionChanges.createdSkillIds,
|
|
159
|
+
updatedSkillIds: evolutionChanges.updatedSkillIds,
|
|
160
|
+
violationCount: evolutionChanges.violations.length
|
|
161
|
+
});
|
|
162
|
+
if (evolutionChanges.violations.length > 0) {
|
|
163
|
+
await this.logger.error("skills.evolution.rejected", {
|
|
164
|
+
taskId: task.taskId,
|
|
165
|
+
taskType: task.taskType,
|
|
166
|
+
runId,
|
|
167
|
+
allowedEvolutionSkillDir: preparedContext?.allowedEvolutionSkillDir ?? evolutionGuardState.allowedTaskDir,
|
|
168
|
+
violations: evolutionChanges.violations,
|
|
169
|
+
createdSkillIds: evolutionChanges.createdSkillIds,
|
|
170
|
+
updatedSkillIds: evolutionChanges.updatedSkillIds
|
|
171
|
+
});
|
|
172
|
+
throw new Error(`Evolution skill guard rejected changes: ${evolutionChanges.violations.join("; ")}`);
|
|
173
|
+
}
|
|
174
|
+
await this.logger.writeEvolutionSnapshot({
|
|
175
|
+
ok: evolutionChanges.violations.length === 0,
|
|
176
|
+
taskId: task.taskId,
|
|
177
|
+
taskType: task.taskType,
|
|
178
|
+
runId,
|
|
179
|
+
createdSkillIds: evolutionChanges.createdSkillIds,
|
|
180
|
+
updatedSkillIds: evolutionChanges.updatedSkillIds,
|
|
181
|
+
violations: evolutionChanges.violations,
|
|
182
|
+
allowedEvolutionSkillDir: preparedContext?.allowedEvolutionSkillDir ?? null,
|
|
183
|
+
updatedAt: new Date().toISOString()
|
|
184
|
+
});
|
|
185
|
+
if (evolutionChanges.createdSkillIds.length > 0 || evolutionChanges.updatedSkillIds.length > 0) {
|
|
186
|
+
await this.logger.info("skills.evolution.applied", {
|
|
187
|
+
taskId: task.taskId,
|
|
188
|
+
taskType: task.taskType,
|
|
189
|
+
runId,
|
|
190
|
+
createdSkillIds: evolutionChanges.createdSkillIds,
|
|
191
|
+
updatedSkillIds: evolutionChanges.updatedSkillIds,
|
|
192
|
+
allowedEvolutionSkillDir: preparedContext?.allowedEvolutionSkillDir ?? null
|
|
193
|
+
});
|
|
194
|
+
}
|
|
195
|
+
else {
|
|
196
|
+
await this.logger.info("skills.evolution.noop", {
|
|
197
|
+
taskId: task.taskId,
|
|
198
|
+
taskType: task.taskType,
|
|
199
|
+
runId,
|
|
200
|
+
allowedEvolutionSkillDir: preparedContext?.allowedEvolutionSkillDir ?? null,
|
|
201
|
+
baselineSkillIds: evolutionChanges.baselineSkillIds,
|
|
202
|
+
currentSkillIds: evolutionChanges.currentSkillIds,
|
|
203
|
+
baselineFileCount: evolutionChanges.baselineFileCount,
|
|
204
|
+
currentFileCount: evolutionChanges.currentFileCount
|
|
205
|
+
});
|
|
206
|
+
}
|
|
207
|
+
}
|
|
208
|
+
await this.logger.info(this.resolveTerminalLogEvent(finalStatus), {
|
|
209
|
+
taskId: task.taskId,
|
|
210
|
+
taskType: task.taskType,
|
|
211
|
+
sourceEventId,
|
|
212
|
+
runId,
|
|
213
|
+
status: finalStatus,
|
|
214
|
+
durationMs,
|
|
215
|
+
...(finalFailureCategory ? { failureCategory: finalFailureCategory } : {}),
|
|
216
|
+
artifactCount: completionArtifacts.length
|
|
217
|
+
});
|
|
218
|
+
}
|
|
219
|
+
catch (error) {
|
|
220
|
+
finalFailureCategory = this.resolveFailureCategory(error);
|
|
221
|
+
const errorMessage = error instanceof Error ? error.message : "Unknown task execution error";
|
|
222
|
+
const canceledByRuntime = task.abortSignal?.aborted === true;
|
|
223
|
+
finalStatus = canceledByRuntime ? "canceled" : "failed";
|
|
224
|
+
finalSummary = canceledByRuntime
|
|
225
|
+
? `Task canceled: ${String(task.abortSignal?.reason ?? errorMessage)}`
|
|
226
|
+
: errorMessage;
|
|
227
|
+
if (canceledByRuntime) {
|
|
228
|
+
await this.logger.warn("task.canceled", {
|
|
229
|
+
taskId: task.taskId,
|
|
230
|
+
taskType: task.taskType,
|
|
231
|
+
sourceEventId,
|
|
232
|
+
runId,
|
|
233
|
+
status: finalStatus,
|
|
234
|
+
reason: finalSummary,
|
|
235
|
+
failureCategory: finalFailureCategory
|
|
236
|
+
});
|
|
237
|
+
}
|
|
238
|
+
else {
|
|
239
|
+
await this.logger.error("task.failed", {
|
|
240
|
+
taskId: task.taskId,
|
|
241
|
+
taskType: task.taskType,
|
|
242
|
+
sourceEventId,
|
|
243
|
+
runId,
|
|
244
|
+
status: finalStatus,
|
|
245
|
+
reason: finalSummary,
|
|
246
|
+
failureCategory: finalFailureCategory
|
|
247
|
+
});
|
|
248
|
+
}
|
|
249
|
+
}
|
|
250
|
+
finally {
|
|
251
|
+
await this.logger.info("task.orchestrator.finished", {
|
|
252
|
+
taskId: task.taskId,
|
|
253
|
+
taskType: task.taskType,
|
|
254
|
+
runId,
|
|
255
|
+
stage: "orchestrator_finish",
|
|
256
|
+
status: finalStatus,
|
|
257
|
+
failureCategory: finalFailureCategory
|
|
258
|
+
});
|
|
259
|
+
await this.store.completeTaskRun(task.taskId, finalStatus, finalSummary, this.buildCompleteTaskRunOptions(finalFailureCategory));
|
|
260
|
+
try {
|
|
261
|
+
const postRunStartedAt = Date.now();
|
|
262
|
+
let postRunStage = "load_final_state";
|
|
263
|
+
await this.logger.info("task.postrun.started", {
|
|
264
|
+
taskId: task.taskId,
|
|
265
|
+
taskType: task.taskType,
|
|
266
|
+
runId,
|
|
267
|
+
status: finalStatus
|
|
268
|
+
});
|
|
269
|
+
try {
|
|
270
|
+
const persistedArtifacts = await this.store.listTaskArtifacts(task.taskId);
|
|
271
|
+
const finalizedTask = await this.store.getTask(task.taskId);
|
|
272
|
+
const finalizedRun = runId
|
|
273
|
+
? (await this.store.listTaskRuns()).find((item) => item.runId === runId && item.taskId === task.taskId)
|
|
274
|
+
: undefined;
|
|
275
|
+
postRunStage = "build_delivery_bundle";
|
|
276
|
+
const deliveryBundleBuildStartedAt = Date.now();
|
|
277
|
+
await this.logger.info("task.delivery_bundle.build_started", {
|
|
278
|
+
taskId: task.taskId,
|
|
279
|
+
taskType: task.taskType,
|
|
280
|
+
runId,
|
|
281
|
+
artifactCount: persistedArtifacts.length
|
|
282
|
+
});
|
|
283
|
+
let deliveryBundle = await this.taskDeliveryService.buildBundle({
|
|
284
|
+
task: finalizedTask ?? { ...task, status: finalStatus },
|
|
285
|
+
...(taskPackage ? {
|
|
286
|
+
taskPackage: {
|
|
287
|
+
title: taskPackage.title,
|
|
288
|
+
...(taskPackage.sourceRef ? { sourceRef: taskPackage.sourceRef } : {}),
|
|
289
|
+
...(taskPackage.input.metadata ? { metadata: taskPackage.input.metadata } : {})
|
|
290
|
+
}
|
|
291
|
+
} : {}),
|
|
292
|
+
...(preparedContext ? { context: preparedContext } : {}),
|
|
293
|
+
...(finalizedRun ? { latestRun: finalizedRun } : {}),
|
|
294
|
+
artifacts: persistedArtifacts
|
|
295
|
+
});
|
|
296
|
+
await this.logger.info("task.delivery_bundle.build_completed", {
|
|
297
|
+
taskId: task.taskId,
|
|
298
|
+
taskType: task.taskType,
|
|
299
|
+
runId,
|
|
300
|
+
durationMs: Date.now() - deliveryBundleBuildStartedAt,
|
|
301
|
+
outcome: deliveryBundle.outcome,
|
|
302
|
+
hasPatch: Boolean(deliveryBundle.artifacts.patchDiff),
|
|
303
|
+
resultMd: deliveryBundle.artifacts.resultMd ?? null
|
|
304
|
+
});
|
|
305
|
+
if (this.config.delivery.enabled && this.config.delivery.channels.includes("feishu")) {
|
|
306
|
+
postRunStage = "upload_analysis_doc";
|
|
307
|
+
const analysisDocStartedAt = Date.now();
|
|
308
|
+
await this.logger.info("task.delivery_analysis_doc.started", {
|
|
309
|
+
taskId: task.taskId,
|
|
310
|
+
taskType: task.taskType,
|
|
311
|
+
runId,
|
|
312
|
+
outcome: deliveryBundle.outcome,
|
|
313
|
+
hasExistingAnalysisDocUrl: Boolean(deliveryBundle.summary.analysisDocUrl?.trim())
|
|
314
|
+
});
|
|
315
|
+
const analysisDocPublish = await this.feishuAnalysisDocService.publishIfNeeded(deliveryBundle);
|
|
316
|
+
deliveryBundle = analysisDocPublish.bundle;
|
|
317
|
+
if (analysisDocPublish.status === "uploaded") {
|
|
318
|
+
await this.logger.info("task.delivery_analysis_doc.uploaded", {
|
|
319
|
+
taskId: task.taskId,
|
|
320
|
+
taskType: task.taskType,
|
|
321
|
+
runId,
|
|
322
|
+
durationMs: Date.now() - analysisDocStartedAt,
|
|
323
|
+
url: analysisDocPublish.url ?? null
|
|
324
|
+
});
|
|
325
|
+
}
|
|
326
|
+
else if (analysisDocPublish.status === "skipped") {
|
|
327
|
+
await this.logger.info("task.delivery_analysis_doc.skipped", {
|
|
328
|
+
taskId: task.taskId,
|
|
329
|
+
taskType: task.taskType,
|
|
330
|
+
runId,
|
|
331
|
+
durationMs: Date.now() - analysisDocStartedAt,
|
|
332
|
+
reason: analysisDocPublish.reason ?? "unknown"
|
|
333
|
+
});
|
|
334
|
+
}
|
|
335
|
+
else if (analysisDocPublish.status === "failed") {
|
|
336
|
+
await this.logger.warn("task.delivery_analysis_doc.failed", {
|
|
337
|
+
taskId: task.taskId,
|
|
338
|
+
taskType: task.taskType,
|
|
339
|
+
runId,
|
|
340
|
+
durationMs: Date.now() - analysisDocStartedAt,
|
|
341
|
+
reason: analysisDocPublish.reason ?? "unknown"
|
|
342
|
+
});
|
|
343
|
+
}
|
|
344
|
+
}
|
|
345
|
+
postRunStage = "persist_delivery_bundle";
|
|
346
|
+
await this.store.appendTaskDeliveryBundle(deliveryBundle);
|
|
347
|
+
await this.logger.info("task.delivery_bundle.created", {
|
|
348
|
+
taskId: task.taskId,
|
|
349
|
+
taskType: task.taskType,
|
|
350
|
+
runId,
|
|
351
|
+
outcome: deliveryBundle.outcome,
|
|
352
|
+
publicationAction: deliveryBundle.publication?.action ?? null,
|
|
353
|
+
publicationReason: deliveryBundle.publication?.reason ?? null,
|
|
354
|
+
hasPatch: Boolean(deliveryBundle.artifacts.patchDiff)
|
|
355
|
+
});
|
|
356
|
+
for (const warning of deliveryBundle.warnings ?? []) {
|
|
357
|
+
await this.store.appendEvent(task.taskId, "task.delivery_bundle.warning", warning, this.buildTaskEventOptions(runId, sourceEventId));
|
|
358
|
+
await this.logger.warn("task.delivery_bundle.warning", {
|
|
359
|
+
taskId: task.taskId,
|
|
360
|
+
taskType: task.taskType,
|
|
361
|
+
runId,
|
|
362
|
+
warning,
|
|
363
|
+
resultMd: deliveryBundle.artifacts.resultMd ?? null
|
|
364
|
+
});
|
|
365
|
+
}
|
|
366
|
+
postRunStage = "publish";
|
|
367
|
+
const publicationStartedAt = Date.now();
|
|
368
|
+
await this.logger.info("task.publication.started", {
|
|
369
|
+
taskId: task.taskId,
|
|
370
|
+
taskType: task.taskType,
|
|
371
|
+
runId,
|
|
372
|
+
dryRun: this.config.publication.dryRun,
|
|
373
|
+
reviewMode: this.config.publication.reviewMode,
|
|
374
|
+
publicationAction: deliveryBundle.publication?.action ?? null,
|
|
375
|
+
publicationReason: deliveryBundle.publication?.reason ?? null
|
|
376
|
+
});
|
|
377
|
+
const publicationResult = await this.taskPublicationService.publish({
|
|
378
|
+
bundle: deliveryBundle,
|
|
379
|
+
...(preparedContext ? { context: preparedContext } : {}),
|
|
380
|
+
dryRun: this.config.publication.dryRun,
|
|
381
|
+
reviewMode: this.config.publication.reviewMode
|
|
382
|
+
});
|
|
383
|
+
await this.logger.info("task.publication.completed", {
|
|
384
|
+
taskId: task.taskId,
|
|
385
|
+
taskType: task.taskType,
|
|
386
|
+
runId,
|
|
387
|
+
durationMs: Date.now() - publicationStartedAt,
|
|
388
|
+
attemptCount: publicationResult.attempts.length,
|
|
389
|
+
artifactCount: publicationResult.artifacts.length
|
|
390
|
+
});
|
|
391
|
+
if (publicationResult.artifacts.length > 0) {
|
|
392
|
+
await this.store.appendArtifacts(task.taskId, publicationResult.artifacts);
|
|
393
|
+
}
|
|
394
|
+
await this.store.appendTaskPublicationAttempts(publicationResult.attempts);
|
|
395
|
+
for (const attempt of publicationResult.attempts) {
|
|
396
|
+
await this.logger.info("task.publication_attempt.recorded", {
|
|
397
|
+
taskId: attempt.taskId,
|
|
398
|
+
taskType: task.taskType,
|
|
399
|
+
runId: attempt.runId ?? runId,
|
|
400
|
+
publicationMode: attempt.mode,
|
|
401
|
+
publicationStatus: attempt.status,
|
|
402
|
+
summary: attempt.summary,
|
|
403
|
+
...(attempt.artifactPath ? { artifactPath: attempt.artifactPath } : {}),
|
|
404
|
+
...(attempt.error ? { reason: attempt.error } : {})
|
|
405
|
+
});
|
|
406
|
+
}
|
|
407
|
+
postRunStage = "dispatch_delivery";
|
|
408
|
+
const deliveryDispatchStartedAt = Date.now();
|
|
409
|
+
await this.logger.info("task.delivery_dispatch.started", {
|
|
410
|
+
taskId: task.taskId,
|
|
411
|
+
taskType: task.taskType,
|
|
412
|
+
runId,
|
|
413
|
+
channels: this.config.delivery.channels
|
|
414
|
+
});
|
|
415
|
+
const deliveryDispatch = await this.taskDeliveryDispatcher.dispatch({
|
|
416
|
+
bundle: deliveryBundle,
|
|
417
|
+
publicationAttempts: publicationResult.attempts
|
|
418
|
+
});
|
|
419
|
+
await this.logger.info("task.delivery_dispatch.completed", {
|
|
420
|
+
taskId: task.taskId,
|
|
421
|
+
taskType: task.taskType,
|
|
422
|
+
runId,
|
|
423
|
+
durationMs: Date.now() - deliveryDispatchStartedAt,
|
|
424
|
+
attemptCount: deliveryDispatch.attempts.length
|
|
425
|
+
});
|
|
426
|
+
await this.store.appendTaskDeliveryAttempts(deliveryDispatch.attempts);
|
|
427
|
+
for (const attempt of deliveryDispatch.attempts) {
|
|
428
|
+
await this.logger.info("task.delivery_attempt.recorded", {
|
|
429
|
+
taskId: attempt.taskId,
|
|
430
|
+
taskType: task.taskType,
|
|
431
|
+
runId: attempt.runId ?? runId,
|
|
432
|
+
channel: attempt.channel,
|
|
433
|
+
deliveryStatus: attempt.status,
|
|
434
|
+
summary: attempt.summary,
|
|
435
|
+
...(attempt.error ? { reason: attempt.error } : {})
|
|
436
|
+
});
|
|
437
|
+
}
|
|
438
|
+
await this.logger.info("task.postrun.completed", {
|
|
439
|
+
taskId: task.taskId,
|
|
440
|
+
taskType: task.taskType,
|
|
441
|
+
runId,
|
|
442
|
+
durationMs: Date.now() - postRunStartedAt
|
|
443
|
+
});
|
|
444
|
+
}
|
|
445
|
+
catch (postRunError) {
|
|
446
|
+
await this.logger.error("task.postrun.failed", {
|
|
447
|
+
taskId: task.taskId,
|
|
448
|
+
taskType: task.taskType,
|
|
449
|
+
runId,
|
|
450
|
+
stage: postRunStage,
|
|
451
|
+
durationMs: Date.now() - postRunStartedAt,
|
|
452
|
+
reason: postRunError instanceof Error ? postRunError.message : String(postRunError)
|
|
453
|
+
});
|
|
454
|
+
throw postRunError;
|
|
455
|
+
}
|
|
456
|
+
}
|
|
457
|
+
finally {
|
|
458
|
+
if (cleanupPending && preparedContext?.cleanupRequired && preparedContext.cleanup) {
|
|
459
|
+
try {
|
|
460
|
+
const cleanupLogContext = {
|
|
461
|
+
taskId: task.taskId,
|
|
462
|
+
taskType: task.taskType
|
|
463
|
+
};
|
|
464
|
+
if (runId) {
|
|
465
|
+
cleanupLogContext.runId = runId;
|
|
466
|
+
}
|
|
467
|
+
const visibleRepoDir = getVisibleRepoDir(preparedContext.addresses);
|
|
468
|
+
if (visibleRepoDir) {
|
|
469
|
+
cleanupLogContext.addressesVisibleRepoDir = visibleRepoDir;
|
|
470
|
+
}
|
|
471
|
+
cleanupLogContext.addressesResolvedExecutionMode = preparedContext.addresses.mode;
|
|
472
|
+
await this.logger.info("repo.inplace.cleanup_started", cleanupLogContext);
|
|
473
|
+
await preparedContext.cleanup();
|
|
474
|
+
await this.logger.info("repo.inplace.cleanup_completed", cleanupLogContext);
|
|
475
|
+
}
|
|
476
|
+
catch (cleanupError) {
|
|
477
|
+
finalStatus = "failed";
|
|
478
|
+
finalFailureCategory = "environment_error";
|
|
479
|
+
finalSummary = cleanupError instanceof Error
|
|
480
|
+
? `In-place cleanup failed: ${cleanupError.message}`
|
|
481
|
+
: "In-place cleanup failed.";
|
|
482
|
+
const cleanupFailureLogContext = {
|
|
483
|
+
taskId: task.taskId,
|
|
484
|
+
taskType: task.taskType,
|
|
485
|
+
reason: finalSummary,
|
|
486
|
+
failureCategory: finalFailureCategory
|
|
487
|
+
};
|
|
488
|
+
if (runId) {
|
|
489
|
+
cleanupFailureLogContext.runId = runId;
|
|
490
|
+
}
|
|
491
|
+
const visibleRepoDir = getVisibleRepoDir(preparedContext.addresses);
|
|
492
|
+
if (visibleRepoDir) {
|
|
493
|
+
cleanupFailureLogContext.addressesVisibleRepoDir = visibleRepoDir;
|
|
494
|
+
}
|
|
495
|
+
cleanupFailureLogContext.addressesResolvedExecutionMode = preparedContext.addresses.mode;
|
|
496
|
+
await this.logger.error("repo.inplace.cleanup_failed", cleanupFailureLogContext);
|
|
497
|
+
}
|
|
498
|
+
}
|
|
499
|
+
if (preparedContext?.addresses.mode === "copy") {
|
|
500
|
+
try {
|
|
501
|
+
await this.cleanupCopyWorkspace(preparedContext);
|
|
502
|
+
await this.logger.info("repo.copy.cleanup_completed", {
|
|
503
|
+
taskId: task.taskId,
|
|
504
|
+
taskType: task.taskType,
|
|
505
|
+
runId,
|
|
506
|
+
addressesWorkspaceDir: preparedContext.addresses.workspaceDir,
|
|
507
|
+
addressesArtifactDir: preparedContext.addresses.artifactDir
|
|
508
|
+
});
|
|
509
|
+
}
|
|
510
|
+
catch (cleanupError) {
|
|
511
|
+
finalStatus = "failed";
|
|
512
|
+
finalFailureCategory = "environment_error";
|
|
513
|
+
finalSummary = cleanupError instanceof Error
|
|
514
|
+
? `Copy workspace cleanup failed: ${cleanupError.message}`
|
|
515
|
+
: "Copy workspace cleanup failed.";
|
|
516
|
+
await this.logger.error("repo.copy.cleanup_failed", {
|
|
517
|
+
taskId: task.taskId,
|
|
518
|
+
taskType: task.taskType,
|
|
519
|
+
runId,
|
|
520
|
+
reason: finalSummary,
|
|
521
|
+
failureCategory: finalFailureCategory,
|
|
522
|
+
addressesWorkspaceDir: preparedContext.addresses.workspaceDir,
|
|
523
|
+
addressesArtifactDir: preparedContext.addresses.artifactDir
|
|
524
|
+
});
|
|
525
|
+
}
|
|
526
|
+
}
|
|
527
|
+
}
|
|
528
|
+
}
|
|
529
|
+
}
|
|
530
|
+
resolveTerminalLogEvent(status) {
|
|
531
|
+
if (status === "failed") {
|
|
532
|
+
return "task.failed";
|
|
533
|
+
}
|
|
534
|
+
if (status === "needs_human") {
|
|
535
|
+
return "task.needs_human";
|
|
536
|
+
}
|
|
537
|
+
return "task.completed";
|
|
538
|
+
}
|
|
539
|
+
async cleanupResidualRepoManagedTempBranches(task, repoRoot) {
|
|
540
|
+
await this.logger.info("repo.inplace.precheck.temp_branch_cleanup_started", {
|
|
541
|
+
taskId: task.taskId,
|
|
542
|
+
taskType: task.taskType,
|
|
543
|
+
runId: task.activeRunId,
|
|
544
|
+
addressesWorkspaceDir: repoRoot
|
|
545
|
+
});
|
|
546
|
+
const projectDirs = await this.listRepoManagedProjectDirs(repoRoot);
|
|
547
|
+
let cleanedBranchCount = 0;
|
|
548
|
+
for (const projectDir of projectDirs) {
|
|
549
|
+
cleanedBranchCount += await this.cleanupRepoManagedProjectBranches(task, projectDir);
|
|
550
|
+
}
|
|
551
|
+
await this.logger.info("repo.inplace.precheck.temp_branch_cleanup_completed", {
|
|
552
|
+
taskId: task.taskId,
|
|
553
|
+
taskType: task.taskType,
|
|
554
|
+
runId: task.activeRunId,
|
|
555
|
+
addressesWorkspaceDir: repoRoot,
|
|
556
|
+
projectCount: projectDirs.length,
|
|
557
|
+
cleanedBranchCount
|
|
558
|
+
});
|
|
559
|
+
}
|
|
560
|
+
async listRepoManagedProjectDirs(repoRoot) {
|
|
561
|
+
try {
|
|
562
|
+
const output = await this.readCommand("repo", ["list", "-p"], { cwd: repoRoot });
|
|
563
|
+
return output
|
|
564
|
+
.split(/\r?\n/u)
|
|
565
|
+
.map((line) => line.trim())
|
|
566
|
+
.filter((line) => line.length > 0)
|
|
567
|
+
.map((line) => join(repoRoot, line));
|
|
568
|
+
}
|
|
569
|
+
catch (error) {
|
|
570
|
+
const discoveredRepos = await this.findNestedGitRepositories(repoRoot);
|
|
571
|
+
await this.logger.warn("repo.inplace.precheck.temp_branch_cleanup_fallback", {
|
|
572
|
+
repoRoot,
|
|
573
|
+
reason: error instanceof Error ? error.message : String(error),
|
|
574
|
+
projectCount: discoveredRepos.length
|
|
575
|
+
});
|
|
576
|
+
return discoveredRepos;
|
|
577
|
+
}
|
|
578
|
+
}
|
|
579
|
+
async findNestedGitRepositories(rootPath) {
|
|
580
|
+
const discovered = new Set();
|
|
581
|
+
const visit = async (candidatePath) => {
|
|
582
|
+
try {
|
|
583
|
+
await access(join(candidatePath, ".git"));
|
|
584
|
+
discovered.add(candidatePath);
|
|
585
|
+
return;
|
|
586
|
+
}
|
|
587
|
+
catch {
|
|
588
|
+
// Continue recursive discovery.
|
|
589
|
+
}
|
|
590
|
+
const entries = await readdir(candidatePath, { withFileTypes: true }).catch(() => []);
|
|
591
|
+
for (const entry of entries) {
|
|
592
|
+
if (!entry.isDirectory() || entry.name.startsWith(".")) {
|
|
593
|
+
continue;
|
|
594
|
+
}
|
|
595
|
+
await visit(join(candidatePath, entry.name));
|
|
596
|
+
}
|
|
597
|
+
};
|
|
598
|
+
await visit(rootPath);
|
|
599
|
+
discovered.delete(rootPath);
|
|
600
|
+
return [...discovered].sort((left, right) => left.localeCompare(right));
|
|
601
|
+
}
|
|
602
|
+
async cleanupRepoManagedProjectBranches(task, projectDir) {
|
|
603
|
+
const currentBranch = await this.readCommand("git", ["-C", projectDir, "rev-parse", "--abbrev-ref", "HEAD"])
|
|
604
|
+
.then((value) => value.trim())
|
|
605
|
+
.catch(() => "HEAD");
|
|
606
|
+
const branchListing = await this.readCommand("git", ["-C", projectDir, "branch", "--format=%(refname:short)"]);
|
|
607
|
+
const staleBranches = branchListing
|
|
608
|
+
.split(/\r?\n/u)
|
|
609
|
+
.map((line) => line.trim())
|
|
610
|
+
.filter((line) => REPO_MANAGED_TEMP_BRANCH_PATTERN.test(line));
|
|
611
|
+
if (staleBranches.length === 0) {
|
|
612
|
+
return 0;
|
|
613
|
+
}
|
|
614
|
+
let cleaned = 0;
|
|
615
|
+
for (const branchName of staleBranches) {
|
|
616
|
+
if (branchName === currentBranch) {
|
|
617
|
+
await this.detachFromResidualTempBranch(projectDir, branchName);
|
|
618
|
+
}
|
|
619
|
+
await this.runCommand("git", ["-C", projectDir, "branch", "-D", branchName]);
|
|
620
|
+
cleaned += 1;
|
|
621
|
+
await this.logger.info("repo.inplace.precheck.temp_branch_removed", {
|
|
622
|
+
taskId: task.taskId,
|
|
623
|
+
taskType: task.taskType,
|
|
624
|
+
runId: task.activeRunId,
|
|
625
|
+
projectDir,
|
|
626
|
+
branchName,
|
|
627
|
+
wasCurrentBranch: branchName === currentBranch
|
|
628
|
+
});
|
|
629
|
+
}
|
|
630
|
+
return cleaned;
|
|
631
|
+
}
|
|
632
|
+
async detachFromResidualTempBranch(projectDir, branchName) {
|
|
633
|
+
const upstreamRef = await this.readCommand("git", ["-C", projectDir, "rev-parse", "--abbrev-ref", "--symbolic-full-name", `${branchName}@{upstream}`])
|
|
634
|
+
.then((value) => value.trim())
|
|
635
|
+
.catch(() => undefined);
|
|
636
|
+
if (upstreamRef) {
|
|
637
|
+
await this.runCommand("git", ["-C", projectDir, "checkout", "--detach", upstreamRef]);
|
|
638
|
+
return;
|
|
639
|
+
}
|
|
640
|
+
const parentCommit = await this.readCommand("git", ["-C", projectDir, "rev-parse", `${branchName}^`])
|
|
641
|
+
.then((value) => value.trim())
|
|
642
|
+
.catch(() => undefined);
|
|
643
|
+
if (parentCommit) {
|
|
644
|
+
await this.runCommand("git", ["-C", projectDir, "checkout", "--detach", parentCommit]);
|
|
645
|
+
return;
|
|
646
|
+
}
|
|
647
|
+
throw new Error(`Unable to detach from residual temporary branch ${branchName} in ${projectDir}.`);
|
|
648
|
+
}
|
|
649
|
+
async prepareExecutionContext(task, harness, taskPackage, abortSignal) {
|
|
650
|
+
await this.logger.info("repo.inplace.precheck_started", {
|
|
651
|
+
taskId: task.taskId,
|
|
652
|
+
taskType: task.taskType,
|
|
653
|
+
runId: task.activeRunId,
|
|
654
|
+
stage: "execution_precheck"
|
|
655
|
+
});
|
|
656
|
+
const baseContext = await this.executionContextAssembler.assemble(task, harness, taskPackage);
|
|
657
|
+
if (baseContext.addresses.mode === "inplace" && await this.isRepoManagedWorkspace(baseContext.addresses.workspaceDir)) {
|
|
658
|
+
await this.cleanupResidualRepoManagedTempBranches(task, baseContext.addresses.workspaceDir);
|
|
659
|
+
}
|
|
660
|
+
const visibleRepoDir = getVisibleRepoDir(baseContext.addresses);
|
|
661
|
+
const transitionOptions = {
|
|
662
|
+
runStatus: "bootstrapping",
|
|
663
|
+
health: "waiting_start",
|
|
664
|
+
eventType: "execution.context.ready",
|
|
665
|
+
heartbeatAt: new Date().toISOString(),
|
|
666
|
+
addresses: {
|
|
667
|
+
workspaceDir: baseContext.addresses.workspaceDir,
|
|
668
|
+
...(visibleRepoDir ? { visibleRepoDir } : {}),
|
|
669
|
+
...(baseContext.configuredExecutionMode ? { configuredExecutionMode: baseContext.configuredExecutionMode } : {}),
|
|
670
|
+
resolvedExecutionMode: baseContext.addresses.mode
|
|
671
|
+
},
|
|
672
|
+
sandboxMode: baseContext.sandboxMode,
|
|
673
|
+
approvalPolicy: baseContext.approvalPolicy
|
|
674
|
+
};
|
|
675
|
+
if (baseContext.workspaceSkillMountDir) {
|
|
676
|
+
transitionOptions.workspaceSkillMountDir = baseContext.workspaceSkillMountDir;
|
|
677
|
+
}
|
|
678
|
+
if (baseContext.mountedSkills && baseContext.mountedSkills.length > 0) {
|
|
679
|
+
transitionOptions.mountedSkills = baseContext.mountedSkills;
|
|
680
|
+
}
|
|
681
|
+
if (task.activeRunId) {
|
|
682
|
+
transitionOptions.runId = task.activeRunId;
|
|
683
|
+
}
|
|
684
|
+
if (task.sourceEventId) {
|
|
685
|
+
transitionOptions.sourceEventId = task.sourceEventId;
|
|
686
|
+
}
|
|
687
|
+
await this.store.transitionTask(task.taskId, "preparing", `Execution context prepared at ${baseContext.addresses.workspaceDir}.`, transitionOptions);
|
|
688
|
+
await this.store.appendEvent(task.taskId, "execution.context.ready", `Execution harness resolved with workspace ${baseContext.addresses.workspaceDir}, visible repo ${visibleRepoDir ?? "hidden"}, resolved mode ${baseContext.addresses.mode}, repo memory ${baseContext.repoMemory?.exists ? "loaded" : "missing"}.`, this.buildTaskEventOptions(task.activeRunId, task.sourceEventId));
|
|
689
|
+
const executionLogContext = {
|
|
690
|
+
taskId: task.taskId,
|
|
691
|
+
taskType: task.taskType,
|
|
692
|
+
taskRootDir: baseContext.taskRootDir,
|
|
693
|
+
addressesWorkspaceDir: baseContext.addresses.workspaceDir,
|
|
694
|
+
addressesArtifactDir: baseContext.addresses.artifactDir,
|
|
695
|
+
cleanupRequired: baseContext.cleanupRequired ?? false,
|
|
696
|
+
workspaceSkillMountDir: baseContext.workspaceSkillMountDir ?? null,
|
|
697
|
+
mountedSkillCount: baseContext.mountedSkills?.length ?? 0,
|
|
698
|
+
repoMemoryExists: baseContext.repoMemory?.exists ?? false,
|
|
699
|
+
repoMemoryFilePath: baseContext.repoMemory?.memoryFilePath ?? null
|
|
700
|
+
};
|
|
701
|
+
if (task.activeRunId) {
|
|
702
|
+
executionLogContext.runId = task.activeRunId;
|
|
703
|
+
}
|
|
704
|
+
if (visibleRepoDir) {
|
|
705
|
+
executionLogContext.addressesVisibleRepoDir = visibleRepoDir;
|
|
706
|
+
}
|
|
707
|
+
executionLogContext.addressesResolvedExecutionMode = baseContext.addresses.mode;
|
|
708
|
+
if (baseContext.configuredExecutionMode) {
|
|
709
|
+
executionLogContext.addressesConfiguredExecutionMode = baseContext.configuredExecutionMode;
|
|
710
|
+
}
|
|
711
|
+
await this.logger.info("repo.execution_mode_resolved", executionLogContext);
|
|
712
|
+
if (baseContext.addresses.mode === "inplace") {
|
|
713
|
+
await this.logger.info("repo.inplace.precheck_passed", {
|
|
714
|
+
taskId: task.taskId,
|
|
715
|
+
taskType: task.taskType,
|
|
716
|
+
runId: task.activeRunId,
|
|
717
|
+
addressesVisibleRepoDir: visibleRepoDir,
|
|
718
|
+
addressesResolvedExecutionMode: baseContext.addresses.mode
|
|
719
|
+
});
|
|
720
|
+
}
|
|
721
|
+
return {
|
|
722
|
+
...baseContext,
|
|
723
|
+
...(abortSignal ? { abortSignal } : {}),
|
|
724
|
+
onStatusUpdate: async (update) => {
|
|
725
|
+
const transitionOptions = {
|
|
726
|
+
...this.buildTaskEventOptions(task.activeRunId, task.sourceEventId),
|
|
727
|
+
...(update.runStatus ? { runStatus: update.runStatus } : {}),
|
|
728
|
+
...(update.health ? { health: update.health } : {}),
|
|
729
|
+
...(update.eventType ? { eventType: update.eventType } : {}),
|
|
730
|
+
...(update.sdkThreadId ? { sdkThreadId: update.sdkThreadId } : {}),
|
|
731
|
+
...(update.agentMessagePreview ? { agentMessagePreview: update.agentMessagePreview } : {}),
|
|
732
|
+
...(update.incrementProgressCounter ? { incrementProgressCounter: true } : {}),
|
|
733
|
+
...(update.incrementCommandCount ? { incrementCommandCount: true } : {}),
|
|
734
|
+
...(update.incrementFileChangeCount ? { incrementFileChangeCount: true } : {}),
|
|
735
|
+
...(update.heartbeatAt ? { heartbeatAt: update.heartbeatAt } : {})
|
|
736
|
+
};
|
|
737
|
+
await this.store.transitionTask(task.taskId, update.status, update.detail, transitionOptions);
|
|
738
|
+
}
|
|
739
|
+
};
|
|
740
|
+
}
|
|
741
|
+
// Request preparation records execution intent before the runner starts streaming SDK events.
|
|
742
|
+
async prepareExecutionRequest(task, context, taskPackage, harness) {
|
|
743
|
+
await this.store.appendEvent(task.taskId, "task.package.ready", "Task package loaded for execution.", this.buildTaskEventOptions(task.activeRunId, task.sourceEventId));
|
|
744
|
+
await context.onStatusUpdate?.({
|
|
745
|
+
status: "running",
|
|
746
|
+
detail: "Codex execution started.",
|
|
747
|
+
runStatus: "sdk_starting",
|
|
748
|
+
health: "waiting_start",
|
|
749
|
+
eventType: "execution.started",
|
|
750
|
+
heartbeatAt: new Date().toISOString()
|
|
751
|
+
});
|
|
752
|
+
await this.logger.info("task.execution.started", {
|
|
753
|
+
taskId: task.taskId,
|
|
754
|
+
taskType: task.taskType,
|
|
755
|
+
sourceEventId: task.sourceEventId,
|
|
756
|
+
runId: task.activeRunId,
|
|
757
|
+
status: "running",
|
|
758
|
+
taskRootDir: context.taskRootDir,
|
|
759
|
+
addressesWorkspaceDir: context.addresses.workspaceDir,
|
|
760
|
+
addressesArtifactDir: context.addresses.artifactDir,
|
|
761
|
+
sandboxMode: context.sandboxMode,
|
|
762
|
+
approvalPolicy: context.approvalPolicy,
|
|
763
|
+
workspaceSkillMountDir: context.workspaceSkillMountDir ?? null,
|
|
764
|
+
mountedSkillCount: context.mountedSkills?.length ?? 0,
|
|
765
|
+
repoMemoryExists: context.repoMemory?.exists ?? false,
|
|
766
|
+
repoMemoryFilePath: context.repoMemory?.memoryFilePath ?? null
|
|
767
|
+
});
|
|
768
|
+
return {
|
|
769
|
+
task: {
|
|
770
|
+
...task,
|
|
771
|
+
status: "running"
|
|
772
|
+
},
|
|
773
|
+
taskPackage,
|
|
774
|
+
harness
|
|
775
|
+
};
|
|
776
|
+
}
|
|
777
|
+
// Orchestrator re-validates terminal task results even if the underlying runner was swapped out in tests, fallback mode, or future providers.
|
|
778
|
+
// This keeps harness delivery guarantees enforced at the scheduling boundary instead of trusting every runner implementation to do it correctly.
|
|
779
|
+
async persistRepoMemoryIfChanged(task, context) {
|
|
780
|
+
if (!context.repoMemory) {
|
|
781
|
+
return;
|
|
782
|
+
}
|
|
783
|
+
const { readFile } = await import("node:fs/promises");
|
|
784
|
+
const { createHash } = await import("node:crypto");
|
|
785
|
+
const currentContent = await readFile(context.repoMemory.memoryFilePath, "utf8").catch(() => undefined);
|
|
786
|
+
if (typeof currentContent !== "string") {
|
|
787
|
+
return;
|
|
788
|
+
}
|
|
789
|
+
const currentHash = createHash("sha256").update(currentContent, "utf8").digest("hex");
|
|
790
|
+
if (context.repoMemory.baselineContentHash && currentHash === context.repoMemory.baselineContentHash) {
|
|
791
|
+
return;
|
|
792
|
+
}
|
|
793
|
+
const persistedMemory = await this.repoMemoryService.persist({
|
|
794
|
+
taskType: context.repoMemory.taskType,
|
|
795
|
+
repoAlias: context.repoMemory.repoAlias,
|
|
796
|
+
content: currentContent,
|
|
797
|
+
sourceTaskId: task.taskId
|
|
798
|
+
});
|
|
799
|
+
context.repoMemory = persistedMemory;
|
|
800
|
+
await this.logger.writeRepoMemorySnapshot({
|
|
801
|
+
ok: true,
|
|
802
|
+
taskId: task.taskId,
|
|
803
|
+
taskType: task.taskType,
|
|
804
|
+
repoAlias: persistedMemory.repoAlias,
|
|
805
|
+
memoryFilePath: persistedMemory.memoryFilePath,
|
|
806
|
+
updatedAt: new Date().toISOString()
|
|
807
|
+
});
|
|
808
|
+
await this.logger.info("repo.memory.updated", {
|
|
809
|
+
taskId: task.taskId,
|
|
810
|
+
taskType: task.taskType,
|
|
811
|
+
runId: task.activeRunId,
|
|
812
|
+
repoAlias: context.repoMemory.repoAlias,
|
|
813
|
+
repoMemoryFilePath: context.repoMemory.memoryFilePath
|
|
814
|
+
});
|
|
815
|
+
}
|
|
816
|
+
validateTaskResultContract(result, context) {
|
|
817
|
+
if (result.status === "failed") {
|
|
818
|
+
if (!result.error) {
|
|
819
|
+
const error = new Error("Task result contract mismatch: failed result must include error.");
|
|
820
|
+
error.failureCategory = "schema_error";
|
|
821
|
+
throw error;
|
|
822
|
+
}
|
|
823
|
+
return;
|
|
824
|
+
}
|
|
825
|
+
if (!result.resultPath) {
|
|
826
|
+
const error = new Error("Task result contract mismatch: completed result must include resultPath.");
|
|
827
|
+
error.failureCategory = "schema_error";
|
|
828
|
+
throw error;
|
|
829
|
+
}
|
|
830
|
+
const resolvedArtifactDir = resolve(context.addresses.artifactDir);
|
|
831
|
+
const resolvedResultPath = resolveTaskResultPath(result.resultPath, context.addresses.artifactDir);
|
|
832
|
+
const relativeResultPath = relative(resolvedArtifactDir, resolvedResultPath);
|
|
833
|
+
const isWithinArtifactDir = relativeResultPath.length > 0 && !relativeResultPath.startsWith(`..${sep}`) && relativeResultPath !== "..";
|
|
834
|
+
if (!isWithinArtifactDir) {
|
|
835
|
+
const error = new Error(`Task result contract mismatch: resultPath ${result.resultPath} is outside artifactDir ${context.addresses.artifactDir}.`);
|
|
836
|
+
error.failureCategory = "schema_error";
|
|
837
|
+
throw error;
|
|
838
|
+
}
|
|
839
|
+
}
|
|
840
|
+
// Copy-mode tasks must land their business edits inside the task workspace and must not leak edits back into the source repository.
|
|
841
|
+
async validateExecutionIsolation(context) {
|
|
842
|
+
if (context.addresses.mode !== "copy") {
|
|
843
|
+
return;
|
|
844
|
+
}
|
|
845
|
+
const sourceRepoChanges = await this.readGitStatusEntries(context.addresses.sourceDir);
|
|
846
|
+
const sourceRepoBusinessChanges = filterBusinessChangeEntries(sourceRepoChanges);
|
|
847
|
+
if (sourceRepoBusinessChanges.length > 0) {
|
|
848
|
+
const error = new Error(`Copy-mode execution isolation failed: source repository ${context.addresses.sourceDir} was modified during task execution: ${sourceRepoBusinessChanges.map((entry) => entry.raw).join(" | ")}.`);
|
|
849
|
+
error.failureCategory = "environment_error";
|
|
850
|
+
throw error;
|
|
851
|
+
}
|
|
852
|
+
}
|
|
853
|
+
// Runtime emits a stable patch artifact whenever a copy-mode task changed business files.
|
|
854
|
+
// This keeps downstream review artifacts aligned with the actual repository diff instead of relying on result prose.
|
|
855
|
+
async writePatchArtifactIfNeeded(context) {
|
|
856
|
+
const patchSourceDir = context.addresses.workspaceDir;
|
|
857
|
+
let patchContent = "";
|
|
858
|
+
if (await this.isRepoManagedWorkspace(patchSourceDir)) {
|
|
859
|
+
const workspaceReports = await this.readRepoWorkspaceStatusReports(patchSourceDir);
|
|
860
|
+
const workspaceBusinessReports = workspaceReports
|
|
861
|
+
.map((report) => ({
|
|
862
|
+
projectPath: report.projectPath,
|
|
863
|
+
entries: filterBusinessChangeEntries(report.entries)
|
|
864
|
+
}))
|
|
865
|
+
.filter((report) => report.entries.length > 0);
|
|
866
|
+
if (workspaceBusinessReports.length === 0) {
|
|
867
|
+
return undefined;
|
|
868
|
+
}
|
|
869
|
+
patchContent = await this.buildRepoManagedBusinessPatchContent(patchSourceDir, workspaceBusinessReports);
|
|
870
|
+
}
|
|
871
|
+
else {
|
|
872
|
+
const workspaceChanges = await this.readGitStatusEntries(patchSourceDir);
|
|
873
|
+
const workspaceBusinessChanges = filterBusinessChangeEntries(workspaceChanges);
|
|
874
|
+
if (workspaceBusinessChanges.length === 0) {
|
|
875
|
+
return undefined;
|
|
876
|
+
}
|
|
877
|
+
patchContent = await this.buildBusinessPatchContent(patchSourceDir, workspaceBusinessChanges);
|
|
878
|
+
}
|
|
879
|
+
await mkdir(context.addresses.artifactDir, { recursive: true });
|
|
880
|
+
const patchArtifactPath = join(context.addresses.artifactDir, "patch.diff");
|
|
881
|
+
await writeFile(patchArtifactPath, patchContent, "utf8");
|
|
882
|
+
return patchArtifactPath;
|
|
883
|
+
}
|
|
884
|
+
async cleanupCopyWorkspace(context) {
|
|
885
|
+
if (context.addresses.mode !== "copy") {
|
|
886
|
+
return;
|
|
887
|
+
}
|
|
888
|
+
const workspaceDir = context.addresses.workspaceDir;
|
|
889
|
+
const taskRootDir = context.taskRootDir;
|
|
890
|
+
if (resolve(workspaceDir) !== resolve(taskRootDir)) {
|
|
891
|
+
return;
|
|
892
|
+
}
|
|
893
|
+
const artifactRootName = basename(dirname(context.addresses.artifactDir));
|
|
894
|
+
const artifactRootDir = join(taskRootDir, artifactRootName);
|
|
895
|
+
const tempRoot = await mkdtemp(join(dirname(taskRootDir), `${basename(taskRootDir)}-preserve-`));
|
|
896
|
+
const preservedArtifactRoot = join(tempRoot, artifactRootName);
|
|
897
|
+
const workspaceEntries = await readdir(taskRootDir, { withFileTypes: true });
|
|
898
|
+
try {
|
|
899
|
+
await rename(artifactRootDir, preservedArtifactRoot);
|
|
900
|
+
try {
|
|
901
|
+
await execFileAsync("git", ["-C", workspaceDir, "worktree", "remove", "--force", workspaceDir]);
|
|
902
|
+
}
|
|
903
|
+
catch {
|
|
904
|
+
await rm(taskRootDir, { recursive: true, force: true });
|
|
905
|
+
}
|
|
906
|
+
await mkdir(taskRootDir, { recursive: true });
|
|
907
|
+
await rename(preservedArtifactRoot, artifactRootDir);
|
|
908
|
+
}
|
|
909
|
+
finally {
|
|
910
|
+
for (const entry of workspaceEntries) {
|
|
911
|
+
if (entry.name === artifactRootName || entry.name === ".git" || entry.name === ".gitignore") {
|
|
912
|
+
continue;
|
|
913
|
+
}
|
|
914
|
+
await rm(join(taskRootDir, entry.name), { recursive: true, force: true });
|
|
915
|
+
}
|
|
916
|
+
await rm(join(taskRootDir, ".git"), { recursive: true, force: true });
|
|
917
|
+
await rm(join(taskRootDir, ".gitignore"), { force: true });
|
|
918
|
+
await rm(tempRoot, { recursive: true, force: true });
|
|
919
|
+
}
|
|
920
|
+
}
|
|
921
|
+
async readGitStatusEntries(repoPath) {
|
|
922
|
+
const { stdout } = await execFileAsync("git", ["-C", repoPath, "status", "--porcelain=v1", "--untracked-files=all"]);
|
|
923
|
+
return parseGitStatusOutput(stdout);
|
|
924
|
+
}
|
|
925
|
+
async readRepoWorkspaceStatusReports(rootPath) {
|
|
926
|
+
const { stdout } = await execFileAsync("repo", [
|
|
927
|
+
"forall",
|
|
928
|
+
"-pc",
|
|
929
|
+
"git status --porcelain=v1 --untracked-files=all"
|
|
930
|
+
], {
|
|
931
|
+
cwd: rootPath,
|
|
932
|
+
maxBuffer: 20 * 1024 * 1024
|
|
933
|
+
});
|
|
934
|
+
return parseRepoProjectStatusReports(stdout);
|
|
935
|
+
}
|
|
936
|
+
async buildBusinessPatchContent(repoPath, entries) {
|
|
937
|
+
const trackedPaths = collectBusinessChangePaths(entries.filter((entry) => !entry.isUntracked));
|
|
938
|
+
const untrackedPaths = collectBusinessChangePaths(entries.filter((entry) => entry.isUntracked));
|
|
939
|
+
const chunks = [];
|
|
940
|
+
if (trackedPaths.length > 0) {
|
|
941
|
+
const { stdout } = await execFileAsync("git", [
|
|
942
|
+
"-C",
|
|
943
|
+
repoPath,
|
|
944
|
+
"diff",
|
|
945
|
+
"--binary",
|
|
946
|
+
"--",
|
|
947
|
+
...trackedPaths
|
|
948
|
+
], { maxBuffer: 20 * 1024 * 1024 });
|
|
949
|
+
if (stdout.trim()) {
|
|
950
|
+
chunks.push(stdout.trimEnd());
|
|
951
|
+
}
|
|
952
|
+
}
|
|
953
|
+
for (const relativePath of untrackedPaths) {
|
|
954
|
+
const diff = await this.buildUntrackedFilePatch(repoPath, relativePath);
|
|
955
|
+
if (diff.trim()) {
|
|
956
|
+
chunks.push(diff.trimEnd());
|
|
957
|
+
}
|
|
958
|
+
}
|
|
959
|
+
return chunks.length > 0 ? `${chunks.join("\n")}\n` : "";
|
|
960
|
+
}
|
|
961
|
+
async buildRepoManagedBusinessPatchContent(rootPath, reports) {
|
|
962
|
+
const chunks = [];
|
|
963
|
+
for (const report of reports) {
|
|
964
|
+
const projectDir = report.projectPath === "." ? rootPath : join(rootPath, report.projectPath);
|
|
965
|
+
const projectPatch = await this.buildBusinessPatchContent(projectDir, report.entries);
|
|
966
|
+
if (!projectPatch.trim()) {
|
|
967
|
+
continue;
|
|
968
|
+
}
|
|
969
|
+
chunks.push(this.prefixPatchPaths(projectPatch, report.projectPath).trimEnd());
|
|
970
|
+
}
|
|
971
|
+
return chunks.length > 0 ? `${chunks.join("\n")}\n` : "";
|
|
972
|
+
}
|
|
973
|
+
prefixPatchPaths(patchContent, projectPath) {
|
|
974
|
+
const normalizedProjectPath = projectPath.replace(/\\/gu, "/").replace(/^\.\/+/u, "").replace(/\/$/u, "");
|
|
975
|
+
if (!normalizedProjectPath || normalizedProjectPath === ".") {
|
|
976
|
+
return patchContent;
|
|
977
|
+
}
|
|
978
|
+
const prefix = (value) => `${normalizedProjectPath}/${value}`;
|
|
979
|
+
return patchContent
|
|
980
|
+
.split(/\r?\n/gu)
|
|
981
|
+
.map((line) => {
|
|
982
|
+
if (line.startsWith("diff --git a/")) {
|
|
983
|
+
const match = line.match(/^diff --git a\/(.+) b\/(.+)$/u);
|
|
984
|
+
const fromPath = match?.[1];
|
|
985
|
+
const toPath = match?.[2];
|
|
986
|
+
if (fromPath && toPath) {
|
|
987
|
+
return `diff --git a/${prefix(fromPath)} b/${prefix(toPath)}`;
|
|
988
|
+
}
|
|
989
|
+
}
|
|
990
|
+
if (line.startsWith("--- a/")) {
|
|
991
|
+
return `--- a/${prefix(line.slice("--- a/".length))}`;
|
|
992
|
+
}
|
|
993
|
+
if (line.startsWith("+++ b/")) {
|
|
994
|
+
return `+++ b/${prefix(line.slice("+++ b/".length))}`;
|
|
995
|
+
}
|
|
996
|
+
if (line.startsWith("rename from ")) {
|
|
997
|
+
return `rename from ${prefix(line.slice("rename from ".length))}`;
|
|
998
|
+
}
|
|
999
|
+
if (line.startsWith("rename to ")) {
|
|
1000
|
+
return `rename to ${prefix(line.slice("rename to ".length))}`;
|
|
1001
|
+
}
|
|
1002
|
+
if (line.startsWith("copy from ")) {
|
|
1003
|
+
return `copy from ${prefix(line.slice("copy from ".length))}`;
|
|
1004
|
+
}
|
|
1005
|
+
if (line.startsWith("copy to ")) {
|
|
1006
|
+
return `copy to ${prefix(line.slice("copy to ".length))}`;
|
|
1007
|
+
}
|
|
1008
|
+
if (line.startsWith("Binary files a/")) {
|
|
1009
|
+
const match = line.match(/^Binary files a\/(.+) and b\/(.+) differ$/u);
|
|
1010
|
+
const fromPath = match?.[1];
|
|
1011
|
+
const toPath = match?.[2];
|
|
1012
|
+
if (fromPath && toPath) {
|
|
1013
|
+
return `Binary files a/${prefix(fromPath)} and b/${prefix(toPath)} differ`;
|
|
1014
|
+
}
|
|
1015
|
+
}
|
|
1016
|
+
return line;
|
|
1017
|
+
})
|
|
1018
|
+
.join("\n");
|
|
1019
|
+
}
|
|
1020
|
+
async buildUntrackedFilePatch(repoPath, relativePath) {
|
|
1021
|
+
try {
|
|
1022
|
+
const { stdout } = await execFileAsync("git", [
|
|
1023
|
+
"diff",
|
|
1024
|
+
"--binary",
|
|
1025
|
+
"--no-index",
|
|
1026
|
+
"--",
|
|
1027
|
+
"/dev/null",
|
|
1028
|
+
relativePath
|
|
1029
|
+
], {
|
|
1030
|
+
cwd: repoPath,
|
|
1031
|
+
maxBuffer: 20 * 1024 * 1024
|
|
1032
|
+
});
|
|
1033
|
+
return stdout;
|
|
1034
|
+
}
|
|
1035
|
+
catch (error) {
|
|
1036
|
+
if (typeof error === "object" && error && "stdout" in error) {
|
|
1037
|
+
return String(error.stdout ?? "");
|
|
1038
|
+
}
|
|
1039
|
+
throw error;
|
|
1040
|
+
}
|
|
1041
|
+
}
|
|
1042
|
+
async isRepoManagedWorkspace(rootPath) {
|
|
1043
|
+
try {
|
|
1044
|
+
await access(join(rootPath, ".repo"));
|
|
1045
|
+
return true;
|
|
1046
|
+
}
|
|
1047
|
+
catch {
|
|
1048
|
+
return false;
|
|
1049
|
+
}
|
|
1050
|
+
}
|
|
1051
|
+
async runCommand(command, args, options) {
|
|
1052
|
+
try {
|
|
1053
|
+
await execFileAsync(command, args, {
|
|
1054
|
+
cwd: options?.cwd,
|
|
1055
|
+
encoding: "utf8",
|
|
1056
|
+
maxBuffer: EXEC_MAX_BUFFER
|
|
1057
|
+
});
|
|
1058
|
+
}
|
|
1059
|
+
catch (error) {
|
|
1060
|
+
throw new Error(this.renderExecError(command, args, error));
|
|
1061
|
+
}
|
|
1062
|
+
}
|
|
1063
|
+
async readCommand(command, args, options) {
|
|
1064
|
+
try {
|
|
1065
|
+
const { stdout, stderr } = await execFileAsync(command, args, {
|
|
1066
|
+
cwd: options?.cwd,
|
|
1067
|
+
encoding: "utf8",
|
|
1068
|
+
maxBuffer: EXEC_MAX_BUFFER
|
|
1069
|
+
});
|
|
1070
|
+
return `${String(stdout)}${String(stderr)}`.trim();
|
|
1071
|
+
}
|
|
1072
|
+
catch (error) {
|
|
1073
|
+
throw new Error(this.renderExecError(command, args, error));
|
|
1074
|
+
}
|
|
1075
|
+
}
|
|
1076
|
+
renderExecError(command, args, error) {
|
|
1077
|
+
if (error && typeof error === "object") {
|
|
1078
|
+
const failure = error;
|
|
1079
|
+
const output = [failure.stdout, failure.stderr].filter(Boolean).join(" ").trim();
|
|
1080
|
+
return `${command} ${args.join(" ")} failed${failure.message ? `: ${failure.message}` : ""}${output ? ` | ${output}` : ""}`;
|
|
1081
|
+
}
|
|
1082
|
+
return `${command} ${args.join(" ")} failed: ${String(error)}`;
|
|
1083
|
+
}
|
|
1084
|
+
resolveFailureCategory(error) {
|
|
1085
|
+
if (typeof error === "object" && error && "failureCategory" in error) {
|
|
1086
|
+
const failureCategory = error.failureCategory;
|
|
1087
|
+
if (failureCategory) {
|
|
1088
|
+
return failureCategory;
|
|
1089
|
+
}
|
|
1090
|
+
}
|
|
1091
|
+
if (error instanceof Error) {
|
|
1092
|
+
const normalized = error.message.toLowerCase();
|
|
1093
|
+
if (normalized.includes("timeout") || normalized.includes("timed out")) {
|
|
1094
|
+
return "timeout";
|
|
1095
|
+
}
|
|
1096
|
+
}
|
|
1097
|
+
if (error instanceof Error) {
|
|
1098
|
+
const normalized = error.message.toLowerCase();
|
|
1099
|
+
if (normalized.includes("abort") || normalized.includes("aborted")) {
|
|
1100
|
+
return "unknown";
|
|
1101
|
+
}
|
|
1102
|
+
}
|
|
1103
|
+
return "unknown";
|
|
1104
|
+
}
|
|
1105
|
+
buildTaskEventOptions(runId, sourceEventId) {
|
|
1106
|
+
const options = {};
|
|
1107
|
+
if (runId) {
|
|
1108
|
+
options.runId = runId;
|
|
1109
|
+
}
|
|
1110
|
+
if (sourceEventId) {
|
|
1111
|
+
options.sourceEventId = sourceEventId;
|
|
1112
|
+
}
|
|
1113
|
+
return options;
|
|
1114
|
+
}
|
|
1115
|
+
buildCompleteTaskRunOptions(failureCategory) {
|
|
1116
|
+
if (!failureCategory) {
|
|
1117
|
+
return {};
|
|
1118
|
+
}
|
|
1119
|
+
return { failureCategory };
|
|
1120
|
+
}
|
|
1121
|
+
}
|
|
1122
|
+
//# sourceMappingURL=task-orchestrator.js.map
|