task-while 0.0.2 → 0.0.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. package/README.md +34 -34
  2. package/package.json +2 -2
  3. package/src/adapters/fs/harness-store.ts +84 -0
  4. package/src/agents/claude.ts +159 -9
  5. package/src/agents/codex.ts +68 -4
  6. package/src/agents/event-log.ts +160 -15
  7. package/src/batch/discovery.ts +1 -1
  8. package/src/commands/batch.ts +152 -155
  9. package/src/commands/run-branch-helpers.ts +81 -0
  10. package/src/commands/run-providers.ts +77 -0
  11. package/src/commands/run.ts +121 -177
  12. package/src/core/create-runtime-ports.ts +118 -0
  13. package/src/core/runtime.ts +15 -36
  14. package/src/harness/in-memory-store.ts +45 -0
  15. package/src/harness/kernel.ts +226 -0
  16. package/src/harness/state.ts +47 -0
  17. package/src/harness/store.ts +26 -0
  18. package/src/harness/workflow-builders.ts +87 -0
  19. package/src/harness/workflow-program.ts +86 -0
  20. package/src/ports/agent.ts +17 -0
  21. package/src/ports/code-host.ts +23 -0
  22. package/src/programs/batch.ts +139 -0
  23. package/src/programs/run-direct.ts +209 -0
  24. package/src/programs/run-pr-transitions.ts +81 -0
  25. package/src/programs/run-pr.ts +290 -0
  26. package/src/programs/shared-steps.ts +252 -0
  27. package/src/schedulers/scheduler.ts +208 -0
  28. package/src/session/session.ts +127 -0
  29. package/src/workflow/config.ts +15 -0
  30. package/src/core/engine-helpers.ts +0 -114
  31. package/src/core/engine-outcomes.ts +0 -166
  32. package/src/core/engine.ts +0 -223
  33. package/src/core/orchestrator-helpers.ts +0 -52
  34. package/src/core/orchestrator-integrate-resume.ts +0 -149
  35. package/src/core/orchestrator-review-resume.ts +0 -228
  36. package/src/core/orchestrator-task-attempt.ts +0 -257
  37. package/src/core/orchestrator.ts +0 -99
  38. package/src/runtime/fs-runtime.ts +0 -209
  39. package/src/workflow/direct-preset.ts +0 -44
  40. package/src/workflow/preset.ts +0 -86
  41. package/src/workflow/pull-request-preset.ts +0 -312
@@ -1,34 +1,179 @@
1
1
  import type { ClaudeAgentEvent, ClaudeAgentEventHandler } from './claude'
2
2
  import type { CodexThreadEvent, CodexThreadEventHandler } from './codex'
3
3
 
4
+ function formatInline(value: string) {
5
+ return value.trim().replaceAll('\n', String.raw`\n`)
6
+ }
7
+
8
+ function formatJson(value: unknown) {
9
+ try {
10
+ return JSON.stringify(value)
11
+ } catch {
12
+ return String(value)
13
+ }
14
+ }
15
+
4
16
  function writeCodexEvent(event: CodexThreadEvent) {
5
- const itemType =
17
+ if (
6
18
  event.type === 'item.completed' ||
7
19
  event.type === 'item.started' ||
8
20
  event.type === 'item.updated'
9
- ? event.item.type
10
- : null
11
- process.stderr.write(
12
- `[codex] ${event.type}${itemType ? ` ${itemType}` : ''}\n`,
13
- )
14
- if (
15
- event.type === 'item.completed' &&
16
- event.item.type === 'agent_message' &&
17
- event.item.text?.trim()
18
21
  ) {
19
- process.stderr.write(`[codex] message ${event.item.text.trim()}\n`)
22
+ const item = event.item
23
+
24
+ if (item.type === 'reasoning') {
25
+ const text = formatInline(item.text)
26
+ if (text) {
27
+ process.stderr.write(`[codex] thinking ${text}\n`)
28
+ }
29
+ return
30
+ }
31
+
32
+ if (item.type === 'command_execution') {
33
+ if (event.type === 'item.started') {
34
+ process.stderr.write(`[codex] exec ${formatInline(item.command)}\n`)
35
+ return
36
+ }
37
+ if (event.type === 'item.completed') {
38
+ process.stderr.write(
39
+ `[codex] exec ${item.status} exit=${item.exit_code ?? 'unknown'} ${formatInline(item.command)}\n`,
40
+ )
41
+ const output = formatInline(item.aggregated_output)
42
+ if (output) {
43
+ process.stderr.write(`[codex] output ${output}\n`)
44
+ }
45
+ return
46
+ }
47
+ }
48
+
49
+ if (item.type === 'mcp_tool_call') {
50
+ const target = `${item.server}.${item.tool}`
51
+ if (event.type === 'item.started') {
52
+ process.stderr.write(
53
+ `[codex] tool ${target} ${formatJson(item.arguments)}\n`,
54
+ )
55
+ return
56
+ }
57
+ if (event.type === 'item.completed') {
58
+ const detail =
59
+ item.status === 'failed'
60
+ ? ` error=${item.error?.message ?? 'unknown'}`
61
+ : ''
62
+ process.stderr.write(`[codex] tool ${item.status} ${target}${detail}\n`)
63
+ return
64
+ }
65
+ }
66
+
67
+ if (item.type === 'file_change' && event.type === 'item.completed') {
68
+ const files = item.changes.map((change) => change.path).join(', ')
69
+ process.stderr.write(`[codex] files ${item.status} ${files}\n`)
70
+ return
71
+ }
72
+
73
+ if (item.type === 'web_search') {
74
+ process.stderr.write(`[codex] search ${formatInline(item.query)}\n`)
75
+ return
76
+ }
77
+
78
+ if (item.type === 'todo_list') {
79
+ for (const todo of item.items) {
80
+ process.stderr.write(
81
+ `[codex] todo ${todo.completed ? '[x]' : '[ ]'} ${formatInline(todo.text)}\n`,
82
+ )
83
+ }
84
+ return
85
+ }
86
+
87
+ if (item.type === 'error') {
88
+ process.stderr.write(`[codex] error ${formatInline(item.message)}\n`)
89
+ return
90
+ }
91
+
92
+ if (item.type === 'agent_message' && event.type === 'item.completed') {
93
+ const text = formatInline(item.text)
94
+ if (text) {
95
+ process.stderr.write(`[codex] message ${text}\n`)
96
+ }
97
+ return
98
+ }
99
+ }
100
+
101
+ if (event.type === 'turn.completed') {
102
+ process.stderr.write(
103
+ `[codex] result tokens in=${event.usage.input_tokens} out=${event.usage.output_tokens} cached=${event.usage.cached_input_tokens}\n`,
104
+ )
105
+ return
20
106
  }
107
+
21
108
  if (event.type === 'error') {
22
- process.stderr.write(`[codex] error ${event.message}\n`)
109
+ process.stderr.write(`[codex] error ${formatInline(event.message)}\n`)
110
+ return
23
111
  }
112
+
24
113
  if (event.type === 'turn.failed') {
25
- process.stderr.write(`[codex] error ${event.error.message}\n`)
114
+ process.stderr.write(`[codex] error ${formatInline(event.error.message)}\n`)
26
115
  }
27
116
  }
28
117
 
29
118
  function writeClaudeEvent(event: ClaudeAgentEvent) {
30
- const detail = event.type === 'text' ? ` ${event.delta}` : ''
31
- process.stderr.write(`[claude] ${event.type}${detail}\n`)
119
+ if (event.type === 'system.init') {
120
+ const tools = event.tools.length !== 0 ? event.tools.join(',') : '-'
121
+ const skills = event.skills.length !== 0 ? event.skills.join(',') : '-'
122
+ const mcp =
123
+ event.mcpServers.length !== 0
124
+ ? event.mcpServers
125
+ .map((server) => `${server.name}:${server.status}`)
126
+ .join(',')
127
+ : '-'
128
+ process.stderr.write(
129
+ `[claude] init model=${event.model} permission=${event.permissionMode} tools=${tools} skills=${skills} mcp=${mcp}\n`,
130
+ )
131
+ return
132
+ }
133
+
134
+ if (event.type === 'task.started') {
135
+ process.stderr.write(`[claude] task ${formatInline(event.description)}\n`)
136
+ return
137
+ }
138
+
139
+ if (event.type === 'tool.progress') {
140
+ process.stderr.write(
141
+ `[claude] tool ${event.toolName} ${event.elapsedTimeSeconds}s\n`,
142
+ )
143
+ return
144
+ }
145
+
146
+ if (event.type === 'tool.summary') {
147
+ process.stderr.write(
148
+ `[claude] tool-summary ${formatInline(event.summary)}\n`,
149
+ )
150
+ return
151
+ }
152
+
153
+ if (event.type === 'task.progress') {
154
+ const detail = event.summary ?? event.description
155
+ process.stderr.write(
156
+ `[claude] progress ${formatInline(event.lastToolName ?? '-')} ${formatInline(detail)}\n`,
157
+ )
158
+ return
159
+ }
160
+
161
+ if (event.type === 'text') {
162
+ const text = formatInline(event.delta)
163
+ if (text) {
164
+ process.stderr.write(`[claude] text ${text}\n`)
165
+ }
166
+ return
167
+ }
168
+
169
+ if (event.type === 'result') {
170
+ process.stderr.write(
171
+ `[claude] result ${event.subtype} turns=${event.numTurns} duration=${event.durationMs}ms\n`,
172
+ )
173
+ return
174
+ }
175
+
176
+ process.stderr.write(`[claude] error ${formatInline(event.message)}\n`)
32
177
  }
33
178
 
34
179
  export function createCodexEventHandler(
@@ -24,7 +24,7 @@ export async function discoverBatchFiles(
24
24
  absolute: false,
25
25
  cwd: input.baseDir,
26
26
  dot: true,
27
- ignore: ['.git/**', 'node_modules/**'],
27
+ ignore: ['.git/**', '.while/**', 'node_modules/**'],
28
28
  nodir: true,
29
29
  posix: true,
30
30
  })
@@ -5,23 +5,21 @@ import Ajv from 'ajv'
5
5
  import * as fsExtra from 'fs-extra'
6
6
  import { z } from 'zod'
7
7
 
8
+ import { createFsHarnessStore } from '../adapters/fs/harness-store'
8
9
  import { loadBatchConfig, type BatchConfig } from '../batch/config'
9
10
  import { discoverBatchFiles } from '../batch/discovery'
10
11
  import {
11
12
  createBatchStructuredOutputProvider,
12
13
  type BatchStructuredOutputProvider,
13
14
  } from '../batch/provider'
14
- import { parseWithSchema, uniqueStringArray } from '../schema/shared'
15
+ import { runKernel } from '../harness/kernel'
16
+ import { createBatchProgram } from '../programs/batch'
17
+ import { createRuntimePaths } from '../runtime/path-layout'
18
+ import { createBatchRetryScheduler } from '../schedulers/scheduler'
19
+ import { parseWithSchema } from '../schema/shared'
20
+ import { runSession, SessionEventType } from '../session/session'
15
21
  import { writeJsonAtomic } from '../utils/fs'
16
22
 
17
- const batchStateSchema = z
18
- .object({
19
- failed: uniqueStringArray('failed'),
20
- inProgress: uniqueStringArray('inProgress'),
21
- pending: uniqueStringArray('pending'),
22
- })
23
- .strict()
24
-
25
23
  const batchResultsSchema = z.custom<Record<string, unknown>>(
26
24
  (value) =>
27
25
  typeof value === 'object' && value !== null && !Array.isArray(value),
@@ -30,12 +28,6 @@ const batchResultsSchema = z.custom<Record<string, unknown>>(
30
28
  },
31
29
  )
32
30
 
33
- export interface BatchState {
34
- failed: string[]
35
- inProgress: string[]
36
- pending: string[]
37
- }
38
-
39
31
  export interface RunBatchCommandInput {
40
32
  configPath: string
41
33
  cwd?: string
@@ -47,19 +39,39 @@ export interface RunBatchCommandResult {
47
39
  failedFiles: string[]
48
40
  processedFiles: string[]
49
41
  results: Record<string, unknown>
50
- state: BatchState
42
+ resultsFilePath: string
51
43
  }
52
44
 
53
- function createEmptyState(): BatchState {
54
- return {
55
- failed: [],
56
- inProgress: [],
57
- pending: [],
45
+ function writeBatchVerboseLine(verbose: boolean | undefined, line: string) {
46
+ if (!verbose) {
47
+ return
58
48
  }
49
+ process.stderr.write(`[batch] ${line}\n`)
59
50
  }
60
51
 
61
- function unique(items: string[]) {
62
- return [...new Set(items)]
52
+ function readSessionProgress(detail: unknown) {
53
+ if (typeof detail !== 'object' || detail === null) {
54
+ return null
55
+ }
56
+ const progress = (detail as { progress?: unknown }).progress
57
+ if (typeof progress !== 'object' || progress === null) {
58
+ return null
59
+ }
60
+
61
+ const blocked =
62
+ typeof (progress as { blocked?: unknown }).blocked === 'number'
63
+ ? (progress as { blocked: number }).blocked
64
+ : 0
65
+ const completed =
66
+ typeof (progress as { completed?: unknown }).completed === 'number'
67
+ ? (progress as { completed: number }).completed
68
+ : 0
69
+ const suspended =
70
+ typeof (progress as { suspended?: unknown }).suspended === 'number'
71
+ ? (progress as { suspended: number }).suspended
72
+ : 0
73
+
74
+ return { blocked, completed, suspended }
63
75
  }
64
76
 
65
77
  async function readJsonFileIfExists(filePath: string) {
@@ -72,14 +84,6 @@ async function readJsonFileIfExists(filePath: string) {
72
84
  return value
73
85
  }
74
86
 
75
- async function loadBatchState(filePath: string) {
76
- const value = await readJsonFileIfExists(filePath)
77
- if (value === null) {
78
- return createEmptyState()
79
- }
80
- return parseWithSchema(batchStateSchema, value)
81
- }
82
-
83
87
  async function loadBatchResults(filePath: string) {
84
88
  const value = await readJsonFileIfExists(filePath)
85
89
  if (value === null) {
@@ -88,76 +92,6 @@ async function loadBatchResults(filePath: string) {
88
92
  return parseWithSchema(batchResultsSchema, value)
89
93
  }
90
94
 
91
- function mergeBatchState(input: {
92
- discoveredFiles: string[]
93
- results: Record<string, unknown>
94
- state: BatchState
95
- }): BatchState {
96
- const discovered = new Set(input.discoveredFiles)
97
- const completed = new Set(Object.keys(input.results))
98
- const failed = unique(input.state.failed).filter(
99
- (filePath) => discovered.has(filePath) && !completed.has(filePath),
100
- )
101
- const failedSet = new Set(failed)
102
- const pending = unique([
103
- ...input.state.inProgress,
104
- ...input.state.pending,
105
- ]).filter(
106
- (filePath) =>
107
- discovered.has(filePath) &&
108
- !completed.has(filePath) &&
109
- !failedSet.has(filePath),
110
- )
111
- const pendingSet = new Set(pending)
112
-
113
- for (const filePath of input.discoveredFiles) {
114
- if (
115
- completed.has(filePath) ||
116
- failedSet.has(filePath) ||
117
- pendingSet.has(filePath)
118
- ) {
119
- continue
120
- }
121
- pending.push(filePath)
122
- pendingSet.add(filePath)
123
- }
124
-
125
- return {
126
- failed,
127
- inProgress: [],
128
- pending,
129
- }
130
- }
131
-
132
- function removeFile(items: string[], filePath: string) {
133
- return items.filter((item) => item !== filePath)
134
- }
135
-
136
- function writeBatchFailure(filePath: string, error: unknown) {
137
- process.stderr.write(
138
- `[batch] failed ${filePath}: ${
139
- error instanceof Error ? error.message : String(error)
140
- }\n`,
141
- )
142
- }
143
-
144
- async function recycleFailedFiles(
145
- statePath: string,
146
- state: BatchState,
147
- ): Promise<BatchState> {
148
- if (state.pending.length !== 0 || state.failed.length === 0) {
149
- return state
150
- }
151
-
152
- const nextState: BatchState = {
153
- failed: [],
154
- inProgress: [],
155
- pending: [...state.failed],
156
- }
157
- await writeJsonAtomic(statePath, nextState)
158
- return nextState
159
- }
160
-
161
95
  function createProvider(
162
96
  config: BatchConfig,
163
97
  verbose: boolean | undefined,
@@ -181,6 +115,17 @@ function createProvider(
181
115
  })
182
116
  }
183
117
 
118
+ function createOutputValidator(schema: Record<string, unknown>) {
119
+ const ajv = new Ajv({ strict: false })
120
+ const validate = ajv.compile(schema)
121
+ return (value: unknown) => {
122
+ if (validate(value)) {
123
+ return
124
+ }
125
+ throw new Error(ajv.errorsText(validate.errors))
126
+ }
127
+ }
128
+
184
129
  export async function runBatchCommand(
185
130
  input: RunBatchCommandInput,
186
131
  ): Promise<RunBatchCommandResult> {
@@ -190,82 +135,134 @@ export async function runBatchCommand(
190
135
  cwd,
191
136
  })
192
137
 
193
- const statePath = path.join(config.configDir, 'state.json')
194
138
  const resultsPath = path.join(config.configDir, 'results.json')
195
- const excludedFiles = new Set([config.configPath, statePath, resultsPath])
139
+ const excludedFiles = new Set([config.configPath, resultsPath])
196
140
  const discoveredFiles = await discoverBatchFiles({
197
141
  baseDir: config.configDir,
198
142
  excludedFiles,
199
143
  patterns: config.glob,
200
144
  })
201
145
  const results = await loadBatchResults(resultsPath)
202
- let state: BatchState = mergeBatchState({
203
- discoveredFiles,
146
+ await writeJsonAtomic(resultsPath, results)
147
+
148
+ const provider = createProvider(config, input.verbose)
149
+ const validateOutput = createOutputValidator(config.schema)
150
+ const harnessDir = createRuntimePaths(config.configDir).runtimeDir
151
+ const store = createFsHarnessStore(harnessDir)
152
+ const protocol = 'batch'
153
+
154
+ const program = createBatchProgram({
155
+ configDir: config.configDir,
156
+ maxRetries: 3,
157
+ outputSchema: config.schema,
158
+ prompt: config.prompt,
159
+ provider,
204
160
  results,
205
- state: await loadBatchState(statePath),
161
+ resultsPath,
162
+ validateOutput,
206
163
  })
207
- await writeJsonAtomic(statePath, state)
208
- await writeJsonAtomic(resultsPath, results)
209
- const ajv = new Ajv({
210
- allErrors: true,
211
- strict: false,
164
+
165
+ const scheduler = createBatchRetryScheduler({
166
+ files: discoveredFiles,
167
+ protocol,
168
+ results,
169
+ store,
212
170
  })
213
- const validateOutput = ajv.compile(config.schema)
171
+
214
172
  const processedFiles: string[] = []
215
- let provider: BatchStructuredOutputProvider | null = null
173
+ const totalFiles = discoveredFiles.length
174
+ let blockedCount = 0
175
+ let completedCount = 0
176
+ let suspendedCount = 0
216
177
 
217
- while (state.pending.length !== 0 || state.failed.length !== 0) {
218
- if (state.pending.length === 0) {
219
- state = await recycleFailedFiles(statePath, state)
178
+ for await (const event of runSession({
179
+ config: {},
180
+ scheduler,
181
+ kernel: {
182
+ run: (subjectId) =>
183
+ runKernel({
184
+ config: { prompt: config.prompt, schema: config.schema },
185
+ program,
186
+ protocol,
187
+ store,
188
+ subjectId,
189
+ }),
190
+ },
191
+ })) {
192
+ if (event.type === SessionEventType.SessionStarted) {
193
+ const progress = readSessionProgress(event.detail)
194
+ if (progress) {
195
+ blockedCount = progress.blocked
196
+ completedCount = progress.completed
197
+ suspendedCount = progress.suspended
198
+ }
199
+ writeBatchVerboseLine(
200
+ input.verbose,
201
+ `resume total=${totalFiles} completed=${completedCount} blocked=${blockedCount} suspended=${suspendedCount}`,
202
+ )
220
203
  continue
221
204
  }
222
- const filePath = state.pending[0]!
223
- state = {
224
- ...state,
225
- inProgress: unique([...state.inProgress, filePath]),
226
- pending: state.pending.slice(1),
205
+
206
+ if (event.type === SessionEventType.SubjectStarted) {
207
+ writeBatchVerboseLine(
208
+ input.verbose,
209
+ `start completed=${completedCount}/${totalFiles} file=${event.subjectId}`,
210
+ )
211
+ continue
227
212
  }
228
- await writeJsonAtomic(statePath, state)
229
213
 
230
- try {
231
- provider ??= createProvider(config, input.verbose)
232
- const absoluteFilePath = path.join(config.configDir, filePath)
233
- const content = await readFile(absoluteFilePath, 'utf8')
234
- const output = await provider.runFile({
235
- content,
236
- filePath,
237
- outputSchema: config.schema,
238
- prompt: config.prompt,
239
- })
240
- if (!validateOutput(output)) {
241
- throw new Error(ajv.errorsText(validateOutput.errors))
242
- }
243
- results[filePath] = output
244
- await writeJsonAtomic(resultsPath, results)
245
- state = {
246
- ...state,
247
- inProgress: removeFile(state.inProgress, filePath),
248
- }
249
- await writeJsonAtomic(statePath, state)
250
- processedFiles.push(filePath)
251
- } catch (error) {
252
- if (input.verbose) {
253
- writeBatchFailure(filePath, error)
254
- }
255
- state = {
256
- failed: unique([...state.failed, filePath]),
257
- inProgress: removeFile(state.inProgress, filePath),
258
- pending: state.pending,
259
- }
260
- await writeJsonAtomic(statePath, state)
214
+ if (event.type === SessionEventType.SubjectResumed) {
215
+ suspendedCount = Math.max(0, suspendedCount - 1)
216
+ writeBatchVerboseLine(
217
+ input.verbose,
218
+ `resume-file completed=${completedCount}/${totalFiles} file=${event.subjectId}`,
219
+ )
220
+ continue
221
+ }
222
+
223
+ if (event.type === SessionEventType.SubjectDone) {
224
+ completedCount += 1
225
+ processedFiles.push(event.subjectId)
226
+ writeBatchVerboseLine(
227
+ input.verbose,
228
+ `done completed=${completedCount}/${totalFiles} file=${event.subjectId}`,
229
+ )
230
+ continue
231
+ }
232
+
233
+ if (event.type === SessionEventType.SubjectBlocked) {
234
+ blockedCount += 1
235
+ writeBatchVerboseLine(
236
+ input.verbose,
237
+ `blocked completed=${completedCount}/${totalFiles} file=${event.subjectId}`,
238
+ )
239
+ continue
240
+ }
241
+
242
+ if (event.type === SessionEventType.SubjectSuspended) {
243
+ suspendedCount += 1
244
+ writeBatchVerboseLine(
245
+ input.verbose,
246
+ `suspended completed=${completedCount}/${totalFiles} file=${event.subjectId}`,
247
+ )
248
+ continue
249
+ }
250
+
251
+ if (event.type === SessionEventType.SessionDone) {
252
+ writeBatchVerboseLine(
253
+ input.verbose,
254
+ `session-done total=${totalFiles} completed=${completedCount} blocked=${blockedCount} suspended=${suspendedCount}`,
255
+ )
261
256
  }
262
257
  }
263
258
 
259
+ const sets = await scheduler.rebuild()
260
+
264
261
  return {
265
262
  config,
266
- failedFiles: state.failed,
263
+ failedFiles: [...sets.blocked],
267
264
  processedFiles,
268
265
  results,
269
- state,
266
+ resultsFilePath: resultsPath,
270
267
  }
271
268
  }
@@ -0,0 +1,81 @@
1
+ import type { GitPort } from '../core/runtime'
2
+ import type { CodeHostPort } from '../ports/code-host'
3
+ import type { TaskSourceSession } from '../task-sources/types'
4
+
5
+ export const sleep = (ms: number) =>
6
+ new Promise<void>((resolve) => setTimeout(resolve, ms))
7
+
8
+ export function toTaskBranchName(commitSubject: string) {
9
+ const slug = commitSubject
10
+ .replace(/^Task\s+/i, '')
11
+ .toLowerCase()
12
+ .replace(/[^a-z0-9]+/g, '-')
13
+ .replace(/^-+|-+$/g, '')
14
+ return `task/${slug}`
15
+ }
16
+
17
+ export async function ensureTaskBranch(
18
+ git: GitPort,
19
+ branchName: string,
20
+ restoreFromRemote: boolean,
21
+ ) {
22
+ const currentBranch = await git.getCurrentBranch()
23
+ if (currentBranch === branchName) {
24
+ return
25
+ }
26
+ try {
27
+ await git.checkoutBranch(branchName)
28
+ } catch {
29
+ if (restoreFromRemote) {
30
+ await git.checkoutRemoteBranch(branchName)
31
+ return
32
+ }
33
+ await git.checkoutBranch(branchName, {
34
+ create: true,
35
+ startPoint: 'main',
36
+ })
37
+ }
38
+ }
39
+
40
+ export async function runPrCheckpoint(
41
+ ports: { codeHost: CodeHostPort; git: GitPort },
42
+ taskSource: TaskSourceSession,
43
+ input: { iteration: number; subjectId: string },
44
+ ): Promise<{ checkpointStartedAt: string; prNumber: number }> {
45
+ const commitSubject = taskSource.buildCommitSubject(input.subjectId)
46
+ const branchName = toTaskBranchName(commitSubject)
47
+ const existingPr = await ports.codeHost.findOpenPullRequestByHeadBranch({
48
+ headBranch: branchName,
49
+ })
50
+
51
+ await ensureTaskBranch(ports.git, branchName, existingPr !== null)
52
+
53
+ const checkpointMessage = `checkpoint: ${commitSubject} (attempt ${input.iteration})`
54
+ const headSubject = await ports.git.getHeadSubject()
55
+ if (headSubject !== checkpointMessage) {
56
+ await ports.git.commitTask({ message: checkpointMessage })
57
+ }
58
+
59
+ await ports.git.pushBranch(branchName)
60
+
61
+ let pullRequest = existingPr
62
+ if (!pullRequest) {
63
+ pullRequest = await ports.codeHost.createPullRequest({
64
+ baseBranch: 'main',
65
+ body: `Task: ${commitSubject}\nManaged by task-while.`,
66
+ headBranch: branchName,
67
+ title: commitSubject,
68
+ })
69
+ }
70
+
71
+ const checkpointStartedAt = await ports.git.getHeadTimestamp()
72
+ return { checkpointStartedAt, prNumber: pullRequest.number }
73
+ }
74
+
75
+ export async function cleanupBranch(git: GitPort, branchName: string) {
76
+ try {
77
+ await git.checkoutBranch('main')
78
+ await git.pullFastForward('main')
79
+ await git.deleteLocalBranch(branchName)
80
+ } catch {}
81
+ }