task-while 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +322 -0
- package/bin/task-while.mjs +22 -0
- package/package.json +72 -0
- package/src/agents/claude.ts +175 -0
- package/src/agents/codex.ts +231 -0
- package/src/agents/provider-options.ts +45 -0
- package/src/agents/types.ts +69 -0
- package/src/batch/config.ts +109 -0
- package/src/batch/discovery.ts +35 -0
- package/src/batch/provider.ts +79 -0
- package/src/commands/batch.ts +266 -0
- package/src/commands/run.ts +270 -0
- package/src/core/engine-helpers.ts +114 -0
- package/src/core/engine-outcomes.ts +166 -0
- package/src/core/engine.ts +223 -0
- package/src/core/orchestrator-helpers.ts +52 -0
- package/src/core/orchestrator-integrate-resume.ts +149 -0
- package/src/core/orchestrator-review-resume.ts +228 -0
- package/src/core/orchestrator-task-attempt.ts +257 -0
- package/src/core/orchestrator.ts +99 -0
- package/src/core/runtime.ts +175 -0
- package/src/core/task-topology.ts +85 -0
- package/src/index.ts +121 -0
- package/src/prompts/implementer.ts +18 -0
- package/src/prompts/reviewer.ts +26 -0
- package/src/runtime/fs-runtime.ts +209 -0
- package/src/runtime/git.ts +137 -0
- package/src/runtime/github-pr-snapshot-decode.ts +307 -0
- package/src/runtime/github-pr-snapshot-queries.ts +137 -0
- package/src/runtime/github-pr-snapshot.ts +139 -0
- package/src/runtime/github.ts +232 -0
- package/src/runtime/path-layout.ts +13 -0
- package/src/runtime/workspace-resolver.ts +125 -0
- package/src/schema/index.ts +127 -0
- package/src/schema/model.ts +233 -0
- package/src/schema/shared.ts +93 -0
- package/src/task-sources/openspec/cli-json.ts +79 -0
- package/src/task-sources/openspec/context-files.ts +121 -0
- package/src/task-sources/openspec/parse-tasks-md.ts +57 -0
- package/src/task-sources/openspec/session.ts +235 -0
- package/src/task-sources/openspec/source.ts +59 -0
- package/src/task-sources/registry.ts +22 -0
- package/src/task-sources/spec-kit/parse-tasks-md.ts +48 -0
- package/src/task-sources/spec-kit/session.ts +174 -0
- package/src/task-sources/spec-kit/source.ts +30 -0
- package/src/task-sources/types.ts +47 -0
- package/src/types.ts +29 -0
- package/src/utils/fs.ts +31 -0
- package/src/workflow/config.ts +127 -0
- package/src/workflow/direct-preset.ts +44 -0
- package/src/workflow/finalize-task-checkbox.ts +24 -0
- package/src/workflow/preset.ts +86 -0
- package/src/workflow/pull-request-preset.ts +312 -0
- package/src/workflow/remote-reviewer.ts +243 -0
|
@@ -0,0 +1,266 @@
|
|
|
1
|
+
import { readFile } from 'node:fs/promises'
|
|
2
|
+
import path from 'node:path'
|
|
3
|
+
|
|
4
|
+
import Ajv from 'ajv'
|
|
5
|
+
import * as fsExtra from 'fs-extra'
|
|
6
|
+
import { z } from 'zod'
|
|
7
|
+
|
|
8
|
+
import { loadBatchConfig, type BatchConfig } from '../batch/config'
|
|
9
|
+
import { discoverBatchFiles } from '../batch/discovery'
|
|
10
|
+
import {
|
|
11
|
+
createBatchStructuredOutputProvider,
|
|
12
|
+
type BatchStructuredOutputProvider,
|
|
13
|
+
} from '../batch/provider'
|
|
14
|
+
import { parseWithSchema, uniqueStringArray } from '../schema/shared'
|
|
15
|
+
import { writeJsonAtomic } from '../utils/fs'
|
|
16
|
+
|
|
17
|
+
const batchStateSchema = z
|
|
18
|
+
.object({
|
|
19
|
+
failed: uniqueStringArray('failed'),
|
|
20
|
+
inProgress: uniqueStringArray('inProgress'),
|
|
21
|
+
pending: uniqueStringArray('pending'),
|
|
22
|
+
})
|
|
23
|
+
.strict()
|
|
24
|
+
|
|
25
|
+
const batchResultsSchema = z.custom<Record<string, unknown>>(
|
|
26
|
+
(value) =>
|
|
27
|
+
typeof value === 'object' && value !== null && !Array.isArray(value),
|
|
28
|
+
{
|
|
29
|
+
message: 'results must be an object',
|
|
30
|
+
},
|
|
31
|
+
)
|
|
32
|
+
|
|
33
|
+
export interface BatchState {
|
|
34
|
+
failed: string[]
|
|
35
|
+
inProgress: string[]
|
|
36
|
+
pending: string[]
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
export interface RunBatchCommandInput {
|
|
40
|
+
configPath: string
|
|
41
|
+
cwd?: string
|
|
42
|
+
verbose?: boolean
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
export interface RunBatchCommandResult {
|
|
46
|
+
config: BatchConfig
|
|
47
|
+
failedFiles: string[]
|
|
48
|
+
processedFiles: string[]
|
|
49
|
+
results: Record<string, unknown>
|
|
50
|
+
state: BatchState
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
function createEmptyState(): BatchState {
|
|
54
|
+
return {
|
|
55
|
+
failed: [],
|
|
56
|
+
inProgress: [],
|
|
57
|
+
pending: [],
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
function unique(items: string[]) {
|
|
62
|
+
return [...new Set(items)]
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
async function readJsonFileIfExists(filePath: string) {
|
|
66
|
+
const exists = await fsExtra.pathExists(filePath)
|
|
67
|
+
if (!exists) {
|
|
68
|
+
return null
|
|
69
|
+
}
|
|
70
|
+
const raw = await readFile(filePath, 'utf8')
|
|
71
|
+
const value: unknown = JSON.parse(raw)
|
|
72
|
+
return value
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
async function loadBatchState(filePath: string) {
|
|
76
|
+
const value = await readJsonFileIfExists(filePath)
|
|
77
|
+
if (value === null) {
|
|
78
|
+
return createEmptyState()
|
|
79
|
+
}
|
|
80
|
+
return parseWithSchema(batchStateSchema, value)
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
async function loadBatchResults(filePath: string) {
|
|
84
|
+
const value = await readJsonFileIfExists(filePath)
|
|
85
|
+
if (value === null) {
|
|
86
|
+
return {}
|
|
87
|
+
}
|
|
88
|
+
return parseWithSchema(batchResultsSchema, value)
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
function mergeBatchState(input: {
|
|
92
|
+
discoveredFiles: string[]
|
|
93
|
+
results: Record<string, unknown>
|
|
94
|
+
state: BatchState
|
|
95
|
+
}): BatchState {
|
|
96
|
+
const discovered = new Set(input.discoveredFiles)
|
|
97
|
+
const completed = new Set(Object.keys(input.results))
|
|
98
|
+
const failed = unique(input.state.failed).filter(
|
|
99
|
+
(filePath) => discovered.has(filePath) && !completed.has(filePath),
|
|
100
|
+
)
|
|
101
|
+
const failedSet = new Set(failed)
|
|
102
|
+
const pending = unique([
|
|
103
|
+
...input.state.inProgress,
|
|
104
|
+
...input.state.pending,
|
|
105
|
+
]).filter(
|
|
106
|
+
(filePath) =>
|
|
107
|
+
discovered.has(filePath) &&
|
|
108
|
+
!completed.has(filePath) &&
|
|
109
|
+
!failedSet.has(filePath),
|
|
110
|
+
)
|
|
111
|
+
const pendingSet = new Set(pending)
|
|
112
|
+
|
|
113
|
+
for (const filePath of input.discoveredFiles) {
|
|
114
|
+
if (
|
|
115
|
+
completed.has(filePath) ||
|
|
116
|
+
failedSet.has(filePath) ||
|
|
117
|
+
pendingSet.has(filePath)
|
|
118
|
+
) {
|
|
119
|
+
continue
|
|
120
|
+
}
|
|
121
|
+
pending.push(filePath)
|
|
122
|
+
pendingSet.add(filePath)
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
return {
|
|
126
|
+
failed,
|
|
127
|
+
inProgress: [],
|
|
128
|
+
pending,
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
function removeFile(items: string[], filePath: string) {
|
|
133
|
+
return items.filter((item) => item !== filePath)
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
function writeBatchFailure(filePath: string, error: unknown) {
|
|
137
|
+
process.stderr.write(
|
|
138
|
+
`[batch] failed ${filePath}: ${
|
|
139
|
+
error instanceof Error ? error.message : String(error)
|
|
140
|
+
}\n`,
|
|
141
|
+
)
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
async function recycleFailedFiles(
|
|
145
|
+
statePath: string,
|
|
146
|
+
state: BatchState,
|
|
147
|
+
): Promise<BatchState> {
|
|
148
|
+
if (state.pending.length !== 0 || state.failed.length === 0) {
|
|
149
|
+
return state
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
const nextState: BatchState = {
|
|
153
|
+
failed: [],
|
|
154
|
+
inProgress: [],
|
|
155
|
+
pending: [...state.failed],
|
|
156
|
+
}
|
|
157
|
+
await writeJsonAtomic(statePath, nextState)
|
|
158
|
+
return nextState
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
function createProvider(config: BatchConfig): BatchStructuredOutputProvider {
|
|
162
|
+
if (config.provider === 'codex') {
|
|
163
|
+
return createBatchStructuredOutputProvider({
|
|
164
|
+
provider: 'codex',
|
|
165
|
+
...(config.effort ? { effort: config.effort } : {}),
|
|
166
|
+
...(config.model ? { model: config.model } : {}),
|
|
167
|
+
workspaceRoot: config.configDir,
|
|
168
|
+
})
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
return createBatchStructuredOutputProvider({
|
|
172
|
+
provider: 'claude',
|
|
173
|
+
...(config.effort ? { effort: config.effort } : {}),
|
|
174
|
+
...(config.model ? { model: config.model } : {}),
|
|
175
|
+
workspaceRoot: config.configDir,
|
|
176
|
+
})
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
export async function runBatchCommand(
|
|
180
|
+
input: RunBatchCommandInput,
|
|
181
|
+
): Promise<RunBatchCommandResult> {
|
|
182
|
+
const cwd = input.cwd ?? process.cwd()
|
|
183
|
+
const config = await loadBatchConfig({
|
|
184
|
+
configPath: input.configPath,
|
|
185
|
+
cwd,
|
|
186
|
+
})
|
|
187
|
+
|
|
188
|
+
const statePath = path.join(config.configDir, 'state.json')
|
|
189
|
+
const resultsPath = path.join(config.configDir, 'results.json')
|
|
190
|
+
const excludedFiles = new Set([config.configPath, statePath, resultsPath])
|
|
191
|
+
const discoveredFiles = await discoverBatchFiles({
|
|
192
|
+
baseDir: config.configDir,
|
|
193
|
+
excludedFiles,
|
|
194
|
+
patterns: config.glob,
|
|
195
|
+
})
|
|
196
|
+
const results = await loadBatchResults(resultsPath)
|
|
197
|
+
let state: BatchState = mergeBatchState({
|
|
198
|
+
discoveredFiles,
|
|
199
|
+
results,
|
|
200
|
+
state: await loadBatchState(statePath),
|
|
201
|
+
})
|
|
202
|
+
await writeJsonAtomic(statePath, state)
|
|
203
|
+
await writeJsonAtomic(resultsPath, results)
|
|
204
|
+
const ajv = new Ajv({
|
|
205
|
+
allErrors: true,
|
|
206
|
+
strict: false,
|
|
207
|
+
})
|
|
208
|
+
const validateOutput = ajv.compile(config.schema)
|
|
209
|
+
const processedFiles: string[] = []
|
|
210
|
+
let provider: BatchStructuredOutputProvider | null = null
|
|
211
|
+
|
|
212
|
+
while (state.pending.length !== 0 || state.failed.length !== 0) {
|
|
213
|
+
if (state.pending.length === 0) {
|
|
214
|
+
state = await recycleFailedFiles(statePath, state)
|
|
215
|
+
continue
|
|
216
|
+
}
|
|
217
|
+
const filePath = state.pending[0]!
|
|
218
|
+
state = {
|
|
219
|
+
...state,
|
|
220
|
+
inProgress: unique([...state.inProgress, filePath]),
|
|
221
|
+
pending: state.pending.slice(1),
|
|
222
|
+
}
|
|
223
|
+
await writeJsonAtomic(statePath, state)
|
|
224
|
+
|
|
225
|
+
try {
|
|
226
|
+
provider ??= createProvider(config)
|
|
227
|
+
const absoluteFilePath = path.join(config.configDir, filePath)
|
|
228
|
+
const content = await readFile(absoluteFilePath, 'utf8')
|
|
229
|
+
const output = await provider.runFile({
|
|
230
|
+
content,
|
|
231
|
+
filePath,
|
|
232
|
+
outputSchema: config.schema,
|
|
233
|
+
prompt: config.prompt,
|
|
234
|
+
})
|
|
235
|
+
if (!validateOutput(output)) {
|
|
236
|
+
throw new Error(ajv.errorsText(validateOutput.errors))
|
|
237
|
+
}
|
|
238
|
+
results[filePath] = output
|
|
239
|
+
await writeJsonAtomic(resultsPath, results)
|
|
240
|
+
state = {
|
|
241
|
+
...state,
|
|
242
|
+
inProgress: removeFile(state.inProgress, filePath),
|
|
243
|
+
}
|
|
244
|
+
await writeJsonAtomic(statePath, state)
|
|
245
|
+
processedFiles.push(filePath)
|
|
246
|
+
} catch (error) {
|
|
247
|
+
if (input.verbose) {
|
|
248
|
+
writeBatchFailure(filePath, error)
|
|
249
|
+
}
|
|
250
|
+
state = {
|
|
251
|
+
failed: unique([...state.failed, filePath]),
|
|
252
|
+
inProgress: removeFile(state.inProgress, filePath),
|
|
253
|
+
pending: state.pending,
|
|
254
|
+
}
|
|
255
|
+
await writeJsonAtomic(statePath, state)
|
|
256
|
+
}
|
|
257
|
+
}
|
|
258
|
+
|
|
259
|
+
return {
|
|
260
|
+
config,
|
|
261
|
+
failedFiles: state.failed,
|
|
262
|
+
processedFiles,
|
|
263
|
+
results,
|
|
264
|
+
state,
|
|
265
|
+
}
|
|
266
|
+
}
|
|
@@ -0,0 +1,270 @@
|
|
|
1
|
+
import {
|
|
2
|
+
createClaudeProvider,
|
|
3
|
+
type ClaudeAgentEvent,
|
|
4
|
+
type ClaudeAgentEventHandler,
|
|
5
|
+
} from '../agents/claude'
|
|
6
|
+
import {
|
|
7
|
+
createCodexProvider,
|
|
8
|
+
type CodexThreadEvent,
|
|
9
|
+
type CodexThreadEventHandler,
|
|
10
|
+
} from '../agents/codex'
|
|
11
|
+
import { providerOptionsEqual } from '../agents/provider-options'
|
|
12
|
+
import { runWorkflow, type WorkflowRunResult } from '../core/orchestrator'
|
|
13
|
+
import { buildTaskTopology } from '../core/task-topology'
|
|
14
|
+
import { createOrchestratorRuntime } from '../runtime/fs-runtime'
|
|
15
|
+
import { openTaskSource } from '../task-sources/registry'
|
|
16
|
+
import {
|
|
17
|
+
loadWorkflowConfig,
|
|
18
|
+
type WorkflowConfig,
|
|
19
|
+
type WorkflowProvider,
|
|
20
|
+
type WorkflowRoleConfig,
|
|
21
|
+
} from '../workflow/config'
|
|
22
|
+
import {
|
|
23
|
+
createDirectWorkflowPreset,
|
|
24
|
+
createPullRequestWorkflowPreset,
|
|
25
|
+
type WorkflowRuntime,
|
|
26
|
+
} from '../workflow/preset'
|
|
27
|
+
import { createCodexRemoteReviewerProvider } from '../workflow/remote-reviewer'
|
|
28
|
+
|
|
29
|
+
import type {
|
|
30
|
+
ImplementerProvider,
|
|
31
|
+
RemoteReviewerProvider,
|
|
32
|
+
ReviewerProvider,
|
|
33
|
+
WorkflowRoleProviders,
|
|
34
|
+
} from '../agents/types'
|
|
35
|
+
import type { WorkspaceContext } from '../types'
|
|
36
|
+
|
|
37
|
+
export interface RunCommandOptions {
|
|
38
|
+
config?: WorkflowConfig
|
|
39
|
+
untilTaskId?: string
|
|
40
|
+
verbose?: boolean
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
export type WorkflowExecutionRunner = () => Promise<WorkflowRunResult>
|
|
44
|
+
|
|
45
|
+
export interface WorkflowExecution {
|
|
46
|
+
config: WorkflowConfig
|
|
47
|
+
execute: WorkflowExecutionRunner
|
|
48
|
+
workflow: WorkflowRuntime
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
export interface ResolveWorkflowRuntimeInput {
|
|
52
|
+
config: WorkflowConfig
|
|
53
|
+
context: WorkspaceContext
|
|
54
|
+
options: RunCommandOptions
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
export type ProviderResolver = (
|
|
58
|
+
role: WorkflowRoleConfig,
|
|
59
|
+
) => ImplementerProvider & ReviewerProvider
|
|
60
|
+
|
|
61
|
+
export type RemoteReviewerResolver = (
|
|
62
|
+
providerName: WorkflowProvider,
|
|
63
|
+
) => RemoteReviewerProvider
|
|
64
|
+
|
|
65
|
+
function writeCodexEvent(event: CodexThreadEvent) {
|
|
66
|
+
const itemType =
|
|
67
|
+
event.type === 'item.completed' ||
|
|
68
|
+
event.type === 'item.started' ||
|
|
69
|
+
event.type === 'item.updated'
|
|
70
|
+
? event.item.type
|
|
71
|
+
: null
|
|
72
|
+
process.stderr.write(
|
|
73
|
+
`[codex] ${event.type}${itemType ? ` ${itemType}` : ''}\n`,
|
|
74
|
+
)
|
|
75
|
+
if (
|
|
76
|
+
event.type === 'item.completed' &&
|
|
77
|
+
event.item.type === 'agent_message' &&
|
|
78
|
+
event.item.text?.trim()
|
|
79
|
+
) {
|
|
80
|
+
process.stderr.write(`[codex] message ${event.item.text.trim()}\n`)
|
|
81
|
+
}
|
|
82
|
+
if (event.type === 'error') {
|
|
83
|
+
process.stderr.write(`[codex] error ${event.message}\n`)
|
|
84
|
+
}
|
|
85
|
+
if (event.type === 'turn.failed') {
|
|
86
|
+
process.stderr.write(`[codex] error ${event.error.message}\n`)
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
function createCodexEventHandler(
|
|
91
|
+
verbose: boolean | undefined,
|
|
92
|
+
): CodexThreadEventHandler | undefined {
|
|
93
|
+
if (!verbose) {
|
|
94
|
+
return undefined
|
|
95
|
+
}
|
|
96
|
+
return writeCodexEvent
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
function writeClaudeEvent(event: ClaudeAgentEvent) {
|
|
100
|
+
const detail = event.type === 'text' ? ` ${event.delta}` : ''
|
|
101
|
+
process.stderr.write(`[claude] ${event.type}${detail}\n`)
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
function createClaudeEventHandler(
|
|
105
|
+
verbose: boolean | undefined,
|
|
106
|
+
): ClaudeAgentEventHandler | undefined {
|
|
107
|
+
if (!verbose) {
|
|
108
|
+
return undefined
|
|
109
|
+
}
|
|
110
|
+
return writeClaudeEvent
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
function createProviderResolver(
|
|
114
|
+
context: WorkspaceContext,
|
|
115
|
+
verbose: boolean | undefined,
|
|
116
|
+
): ProviderResolver {
|
|
117
|
+
const cache = new Map<
|
|
118
|
+
WorkflowProvider,
|
|
119
|
+
ImplementerProvider & ReviewerProvider
|
|
120
|
+
>()
|
|
121
|
+
return (role: WorkflowRoleConfig) => {
|
|
122
|
+
const cached = cache.get(role.provider)
|
|
123
|
+
if (cached) {
|
|
124
|
+
return cached
|
|
125
|
+
}
|
|
126
|
+
let provider: ImplementerProvider & ReviewerProvider
|
|
127
|
+
if (role.provider === 'claude') {
|
|
128
|
+
const onEvent = createClaudeEventHandler(verbose)
|
|
129
|
+
provider = createClaudeProvider({
|
|
130
|
+
...(role.effort ? { effort: role.effort } : {}),
|
|
131
|
+
...(role.model ? { model: role.model } : {}),
|
|
132
|
+
workspaceRoot: context.workspaceRoot,
|
|
133
|
+
...(onEvent ? { onEvent } : {}),
|
|
134
|
+
})
|
|
135
|
+
} else {
|
|
136
|
+
const onEvent = createCodexEventHandler(verbose)
|
|
137
|
+
provider = createCodexProvider({
|
|
138
|
+
...(role.effort ? { effort: role.effort } : {}),
|
|
139
|
+
...(role.model ? { model: role.model } : {}),
|
|
140
|
+
workspaceRoot: context.workspaceRoot,
|
|
141
|
+
...(onEvent ? { onEvent } : {}),
|
|
142
|
+
})
|
|
143
|
+
}
|
|
144
|
+
cache.set(role.provider, provider)
|
|
145
|
+
return provider
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
function createRemoteReviewerResolver(): RemoteReviewerResolver {
|
|
150
|
+
const cache = new Map<WorkflowProvider, RemoteReviewerProvider>()
|
|
151
|
+
return (providerName: WorkflowProvider) => {
|
|
152
|
+
const cached = cache.get(providerName)
|
|
153
|
+
if (cached) {
|
|
154
|
+
return cached
|
|
155
|
+
}
|
|
156
|
+
if (providerName === 'claude') {
|
|
157
|
+
throw new Error(
|
|
158
|
+
'claude remote reviewer is not implemented in pull-request mode',
|
|
159
|
+
)
|
|
160
|
+
}
|
|
161
|
+
const provider = createCodexRemoteReviewerProvider()
|
|
162
|
+
cache.set(providerName, provider)
|
|
163
|
+
return provider
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
function resolveWorkflowRuntime(
|
|
168
|
+
input: ResolveWorkflowRuntimeInput,
|
|
169
|
+
): WorkflowRuntime {
|
|
170
|
+
const resolveProvider = createProviderResolver(
|
|
171
|
+
input.context,
|
|
172
|
+
input.options.verbose,
|
|
173
|
+
)
|
|
174
|
+
const implementerRole = input.config.workflow.roles.implementer
|
|
175
|
+
const reviewerRole = input.config.workflow.roles.reviewer
|
|
176
|
+
|
|
177
|
+
if (input.config.workflow.mode === 'pull-request') {
|
|
178
|
+
const resolveRemoteReviewer = createRemoteReviewerResolver()
|
|
179
|
+
const reviewer = resolveRemoteReviewer(reviewerRole.provider)
|
|
180
|
+
const implementer = resolveProvider(implementerRole)
|
|
181
|
+
const roles: WorkflowRoleProviders = {
|
|
182
|
+
implementer,
|
|
183
|
+
reviewer,
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
return {
|
|
187
|
+
roles,
|
|
188
|
+
preset: createPullRequestWorkflowPreset({
|
|
189
|
+
reviewer,
|
|
190
|
+
}),
|
|
191
|
+
}
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
if (
|
|
195
|
+
implementerRole.provider === reviewerRole.provider &&
|
|
196
|
+
!providerOptionsEqual(implementerRole, reviewerRole)
|
|
197
|
+
) {
|
|
198
|
+
throw new Error(
|
|
199
|
+
`direct workflow roles implementer and reviewer must use matching model and effort when sharing provider ${implementerRole.provider}`,
|
|
200
|
+
)
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
const implementer = resolveProvider(implementerRole)
|
|
204
|
+
const reviewer = resolveProvider(reviewerRole)
|
|
205
|
+
const roles: WorkflowRoleProviders = {
|
|
206
|
+
implementer,
|
|
207
|
+
reviewer,
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
return {
|
|
211
|
+
roles,
|
|
212
|
+
preset: createDirectWorkflowPreset({
|
|
213
|
+
reviewer,
|
|
214
|
+
}),
|
|
215
|
+
}
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
export async function loadWorkflowExecution(
|
|
219
|
+
context: WorkspaceContext,
|
|
220
|
+
options: RunCommandOptions = {},
|
|
221
|
+
): Promise<WorkflowExecution> {
|
|
222
|
+
const config =
|
|
223
|
+
options.config ?? (await loadWorkflowConfig(context.workspaceRoot))
|
|
224
|
+
const workflow = resolveWorkflowRuntime({
|
|
225
|
+
config,
|
|
226
|
+
context,
|
|
227
|
+
options,
|
|
228
|
+
})
|
|
229
|
+
const taskSource = await openTaskSource(config.task.source, {
|
|
230
|
+
featureDir: context.featureDir,
|
|
231
|
+
featureId: context.featureId,
|
|
232
|
+
workspaceRoot: context.workspaceRoot,
|
|
233
|
+
})
|
|
234
|
+
const runtime = createOrchestratorRuntime({
|
|
235
|
+
featureDir: context.featureDir,
|
|
236
|
+
taskSource,
|
|
237
|
+
workspaceRoot: context.workspaceRoot,
|
|
238
|
+
})
|
|
239
|
+
await runtime.git.requireCleanWorktree()
|
|
240
|
+
const graph = buildTaskTopology(
|
|
241
|
+
taskSource,
|
|
242
|
+
context.featureId,
|
|
243
|
+
config.task.maxIterations,
|
|
244
|
+
)
|
|
245
|
+
const untilTaskHandle = options.untilTaskId
|
|
246
|
+
? taskSource.resolveTaskSelector(options.untilTaskId)
|
|
247
|
+
: undefined
|
|
248
|
+
const workflowInput = {
|
|
249
|
+
graph,
|
|
250
|
+
runtime,
|
|
251
|
+
workflow,
|
|
252
|
+
...(untilTaskHandle ? { untilTaskHandle } : {}),
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
return {
|
|
256
|
+
config,
|
|
257
|
+
workflow,
|
|
258
|
+
async execute() {
|
|
259
|
+
return runWorkflow(workflowInput)
|
|
260
|
+
},
|
|
261
|
+
}
|
|
262
|
+
}
|
|
263
|
+
|
|
264
|
+
export async function runCommand(
|
|
265
|
+
context: WorkspaceContext,
|
|
266
|
+
options: RunCommandOptions = {},
|
|
267
|
+
) {
|
|
268
|
+
const execution = await loadWorkflowExecution(context, options)
|
|
269
|
+
return execution.execute()
|
|
270
|
+
}
|
|
@@ -0,0 +1,114 @@
|
|
|
1
|
+
import type {
|
|
2
|
+
PendingTaskState,
|
|
3
|
+
ReviewFinding,
|
|
4
|
+
ReviewOutput,
|
|
5
|
+
ReviewVerdict,
|
|
6
|
+
TaskGraph,
|
|
7
|
+
TaskState,
|
|
8
|
+
TaskTopologyEntry,
|
|
9
|
+
WorkflowState,
|
|
10
|
+
} from '../types'
|
|
11
|
+
|
|
12
|
+
export function cloneState(state: WorkflowState): WorkflowState {
|
|
13
|
+
return structuredClone(state)
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
export function createBaseTaskState(): PendingTaskState {
|
|
17
|
+
return {
|
|
18
|
+
attempt: 0,
|
|
19
|
+
generation: 1,
|
|
20
|
+
invalidatedBy: null,
|
|
21
|
+
lastFindings: [],
|
|
22
|
+
status: 'pending',
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
export function getTask(
|
|
27
|
+
graph: TaskGraph,
|
|
28
|
+
taskHandle: string,
|
|
29
|
+
): TaskTopologyEntry {
|
|
30
|
+
const task = graph.tasks.find((item) => item.handle === taskHandle)
|
|
31
|
+
if (!task) {
|
|
32
|
+
throw new Error(`Unknown task: ${taskHandle}`)
|
|
33
|
+
}
|
|
34
|
+
return task
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
export function getTaskState(
|
|
38
|
+
state: WorkflowState,
|
|
39
|
+
taskHandle: string,
|
|
40
|
+
): TaskState {
|
|
41
|
+
const taskState = state.tasks[taskHandle]
|
|
42
|
+
if (!taskState) {
|
|
43
|
+
throw new Error(`Missing state for task ${taskHandle}`)
|
|
44
|
+
}
|
|
45
|
+
return taskState
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
export function canStartTask(
|
|
49
|
+
graph: TaskGraph,
|
|
50
|
+
state: WorkflowState,
|
|
51
|
+
taskHandle: string,
|
|
52
|
+
) {
|
|
53
|
+
const task = getTask(graph, taskHandle)
|
|
54
|
+
return task.dependsOn.every(
|
|
55
|
+
(dependency) => state.tasks[dependency]?.status === 'done',
|
|
56
|
+
)
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
export function getMaxIterations(graph: TaskGraph) {
|
|
60
|
+
return graph.maxIterations
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
export function withReviewMetadata(
|
|
64
|
+
taskState: TaskState,
|
|
65
|
+
input: WithReviewMetadataInput,
|
|
66
|
+
) {
|
|
67
|
+
const next = {
|
|
68
|
+
attempt: taskState.attempt,
|
|
69
|
+
generation: taskState.generation,
|
|
70
|
+
invalidatedBy: taskState.invalidatedBy,
|
|
71
|
+
lastFindings: taskState.lastFindings,
|
|
72
|
+
...(taskState.lastReviewVerdict
|
|
73
|
+
? { lastReviewVerdict: taskState.lastReviewVerdict }
|
|
74
|
+
: {}),
|
|
75
|
+
}
|
|
76
|
+
if (input.findings) {
|
|
77
|
+
next.lastFindings = input.findings
|
|
78
|
+
}
|
|
79
|
+
if (input.reviewVerdict) {
|
|
80
|
+
next.lastReviewVerdict = input.reviewVerdict
|
|
81
|
+
}
|
|
82
|
+
return next
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
export interface WithReviewMetadataInput {
|
|
86
|
+
findings?: ReviewFinding[]
|
|
87
|
+
reviewVerdict?: ReviewVerdict
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
export interface ZeroGateInput {
|
|
91
|
+
review: ReviewOutput
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
export function shouldPassZeroGate(input: ZeroGateInput) {
|
|
95
|
+
return (
|
|
96
|
+
input.review.verdict === 'pass' &&
|
|
97
|
+
input.review.findings.length === 0 &&
|
|
98
|
+
input.review.acceptanceChecks.every((check) => check.status === 'pass')
|
|
99
|
+
)
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
export function collectDescendants(
|
|
103
|
+
graph: TaskGraph,
|
|
104
|
+
taskHandle: string,
|
|
105
|
+
seen = new Set<string>(),
|
|
106
|
+
) {
|
|
107
|
+
for (const task of graph.tasks) {
|
|
108
|
+
if (task.dependsOn.includes(taskHandle) && !seen.has(task.handle)) {
|
|
109
|
+
seen.add(task.handle)
|
|
110
|
+
collectDescendants(graph, task.handle, seen)
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
return seen
|
|
114
|
+
}
|