task-while 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +322 -0
- package/bin/task-while.mjs +22 -0
- package/package.json +72 -0
- package/src/agents/claude.ts +175 -0
- package/src/agents/codex.ts +231 -0
- package/src/agents/provider-options.ts +45 -0
- package/src/agents/types.ts +69 -0
- package/src/batch/config.ts +109 -0
- package/src/batch/discovery.ts +35 -0
- package/src/batch/provider.ts +79 -0
- package/src/commands/batch.ts +266 -0
- package/src/commands/run.ts +270 -0
- package/src/core/engine-helpers.ts +114 -0
- package/src/core/engine-outcomes.ts +166 -0
- package/src/core/engine.ts +223 -0
- package/src/core/orchestrator-helpers.ts +52 -0
- package/src/core/orchestrator-integrate-resume.ts +149 -0
- package/src/core/orchestrator-review-resume.ts +228 -0
- package/src/core/orchestrator-task-attempt.ts +257 -0
- package/src/core/orchestrator.ts +99 -0
- package/src/core/runtime.ts +175 -0
- package/src/core/task-topology.ts +85 -0
- package/src/index.ts +121 -0
- package/src/prompts/implementer.ts +18 -0
- package/src/prompts/reviewer.ts +26 -0
- package/src/runtime/fs-runtime.ts +209 -0
- package/src/runtime/git.ts +137 -0
- package/src/runtime/github-pr-snapshot-decode.ts +307 -0
- package/src/runtime/github-pr-snapshot-queries.ts +137 -0
- package/src/runtime/github-pr-snapshot.ts +139 -0
- package/src/runtime/github.ts +232 -0
- package/src/runtime/path-layout.ts +13 -0
- package/src/runtime/workspace-resolver.ts +125 -0
- package/src/schema/index.ts +127 -0
- package/src/schema/model.ts +233 -0
- package/src/schema/shared.ts +93 -0
- package/src/task-sources/openspec/cli-json.ts +79 -0
- package/src/task-sources/openspec/context-files.ts +121 -0
- package/src/task-sources/openspec/parse-tasks-md.ts +57 -0
- package/src/task-sources/openspec/session.ts +235 -0
- package/src/task-sources/openspec/source.ts +59 -0
- package/src/task-sources/registry.ts +22 -0
- package/src/task-sources/spec-kit/parse-tasks-md.ts +48 -0
- package/src/task-sources/spec-kit/session.ts +174 -0
- package/src/task-sources/spec-kit/source.ts +30 -0
- package/src/task-sources/types.ts +47 -0
- package/src/types.ts +29 -0
- package/src/utils/fs.ts +31 -0
- package/src/workflow/config.ts +127 -0
- package/src/workflow/direct-preset.ts +44 -0
- package/src/workflow/finalize-task-checkbox.ts +24 -0
- package/src/workflow/preset.ts +86 -0
- package/src/workflow/pull-request-preset.ts +312 -0
- package/src/workflow/remote-reviewer.ts +243 -0
|
@@ -0,0 +1,139 @@
|
|
|
1
|
+
import {
|
|
2
|
+
parseReviewThreadCommentsPage,
|
|
3
|
+
parseSnapshotPage,
|
|
4
|
+
} from './github-pr-snapshot-decode'
|
|
5
|
+
import {
|
|
6
|
+
buildReviewThreadCommentsArgs,
|
|
7
|
+
buildSnapshotArgs,
|
|
8
|
+
} from './github-pr-snapshot-queries'
|
|
9
|
+
|
|
10
|
+
import type {
|
|
11
|
+
PullRequestDiscussionComment,
|
|
12
|
+
PullRequestReaction,
|
|
13
|
+
PullRequestReviewSummary,
|
|
14
|
+
PullRequestReviewThread,
|
|
15
|
+
PullRequestSnapshot,
|
|
16
|
+
} from '../core/runtime'
|
|
17
|
+
|
|
18
|
+
export type RunGh = (args: string[]) => Promise<string>
|
|
19
|
+
|
|
20
|
+
export interface GetPullRequestSnapshotViaGraphQLInput {
|
|
21
|
+
owner: string
|
|
22
|
+
pullRequestNumber: number
|
|
23
|
+
repo: string
|
|
24
|
+
runGh: RunGh
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
export async function getPullRequestSnapshotViaGraphQL(
|
|
28
|
+
input: GetPullRequestSnapshotViaGraphQLInput,
|
|
29
|
+
): Promise<PullRequestSnapshot> {
|
|
30
|
+
const changedFiles: string[] = []
|
|
31
|
+
const discussionComments: PullRequestDiscussionComment[] = []
|
|
32
|
+
const reactions: PullRequestReaction[] = []
|
|
33
|
+
const reviewSummaries: PullRequestReviewSummary[] = []
|
|
34
|
+
const reviewThreads: PullRequestReviewThread[] = []
|
|
35
|
+
const cursors = {
|
|
36
|
+
comments: null as null | string,
|
|
37
|
+
files: null as null | string,
|
|
38
|
+
reactions: null as null | string,
|
|
39
|
+
reviews: null as null | string,
|
|
40
|
+
reviewThreads: null as null | string,
|
|
41
|
+
}
|
|
42
|
+
const includes = {
|
|
43
|
+
comments: true,
|
|
44
|
+
files: true,
|
|
45
|
+
reactions: true,
|
|
46
|
+
reviews: true,
|
|
47
|
+
reviewThreads: true,
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
while (
|
|
51
|
+
includes.comments ||
|
|
52
|
+
includes.files ||
|
|
53
|
+
includes.reactions ||
|
|
54
|
+
includes.reviews ||
|
|
55
|
+
includes.reviewThreads
|
|
56
|
+
) {
|
|
57
|
+
const page = parseSnapshotPage(
|
|
58
|
+
await input.runGh(
|
|
59
|
+
buildSnapshotArgs({
|
|
60
|
+
commentsAfter: cursors.comments,
|
|
61
|
+
filesAfter: cursors.files,
|
|
62
|
+
includeComments: includes.comments,
|
|
63
|
+
includeFiles: includes.files,
|
|
64
|
+
includeReactions: includes.reactions,
|
|
65
|
+
includeReviews: includes.reviews,
|
|
66
|
+
includeReviewThreads: includes.reviewThreads,
|
|
67
|
+
number: input.pullRequestNumber,
|
|
68
|
+
owner: input.owner,
|
|
69
|
+
reactionsAfter: cursors.reactions,
|
|
70
|
+
repo: input.repo,
|
|
71
|
+
reviewsAfter: cursors.reviews,
|
|
72
|
+
reviewThreadsAfter: cursors.reviewThreads,
|
|
73
|
+
}),
|
|
74
|
+
),
|
|
75
|
+
{
|
|
76
|
+
comments: includes.comments,
|
|
77
|
+
files: includes.files,
|
|
78
|
+
reactions: includes.reactions,
|
|
79
|
+
reviews: includes.reviews,
|
|
80
|
+
reviewThreads: includes.reviewThreads,
|
|
81
|
+
},
|
|
82
|
+
)
|
|
83
|
+
|
|
84
|
+
if (page.comments) {
|
|
85
|
+
discussionComments.push(...page.comments.nodes)
|
|
86
|
+
cursors.comments = page.comments.pageInfo.endCursor
|
|
87
|
+
includes.comments = page.comments.pageInfo.hasNextPage
|
|
88
|
+
}
|
|
89
|
+
if (page.files) {
|
|
90
|
+
changedFiles.push(...page.files.nodes)
|
|
91
|
+
cursors.files = page.files.pageInfo.endCursor
|
|
92
|
+
includes.files = page.files.pageInfo.hasNextPage
|
|
93
|
+
}
|
|
94
|
+
if (page.reactions) {
|
|
95
|
+
reactions.push(...page.reactions.nodes)
|
|
96
|
+
cursors.reactions = page.reactions.pageInfo.endCursor
|
|
97
|
+
includes.reactions = page.reactions.pageInfo.hasNextPage
|
|
98
|
+
}
|
|
99
|
+
if (page.reviews) {
|
|
100
|
+
reviewSummaries.push(...page.reviews.nodes)
|
|
101
|
+
cursors.reviews = page.reviews.pageInfo.endCursor
|
|
102
|
+
includes.reviews = page.reviews.pageInfo.hasNextPage
|
|
103
|
+
}
|
|
104
|
+
if (page.reviewThreads) {
|
|
105
|
+
for (const thread of page.reviewThreads.nodes) {
|
|
106
|
+
const comments = [...thread.comments]
|
|
107
|
+
let pageInfo = thread.commentsPageInfo
|
|
108
|
+
while (pageInfo.hasNextPage) {
|
|
109
|
+
const nextPage = parseReviewThreadCommentsPage(
|
|
110
|
+
await input.runGh(
|
|
111
|
+
buildReviewThreadCommentsArgs({
|
|
112
|
+
after: pageInfo.endCursor,
|
|
113
|
+
threadId: thread.id,
|
|
114
|
+
}),
|
|
115
|
+
),
|
|
116
|
+
)
|
|
117
|
+
comments.push(...nextPage.nodes)
|
|
118
|
+
pageInfo = nextPage.pageInfo
|
|
119
|
+
}
|
|
120
|
+
reviewThreads.push({
|
|
121
|
+
id: thread.id,
|
|
122
|
+
comments,
|
|
123
|
+
isOutdated: thread.isOutdated,
|
|
124
|
+
isResolved: thread.isResolved,
|
|
125
|
+
})
|
|
126
|
+
}
|
|
127
|
+
cursors.reviewThreads = page.reviewThreads.pageInfo.endCursor
|
|
128
|
+
includes.reviewThreads = page.reviewThreads.pageInfo.hasNextPage
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
return {
|
|
133
|
+
changedFiles: changedFiles.filter(Boolean),
|
|
134
|
+
discussionComments,
|
|
135
|
+
reactions,
|
|
136
|
+
reviewSummaries,
|
|
137
|
+
reviewThreads,
|
|
138
|
+
}
|
|
139
|
+
}
|
|
@@ -0,0 +1,232 @@
|
|
|
1
|
+
import { execa } from 'execa'
|
|
2
|
+
|
|
3
|
+
import {
|
|
4
|
+
getPullRequestSnapshotViaGraphQL,
|
|
5
|
+
type RunGh,
|
|
6
|
+
} from './github-pr-snapshot'
|
|
7
|
+
|
|
8
|
+
import type {
|
|
9
|
+
CreatePullRequestInput,
|
|
10
|
+
FindMergedPullRequestByHeadBranchInput,
|
|
11
|
+
FindOpenPullRequestByHeadBranchInput,
|
|
12
|
+
GetPullRequestSnapshotInput,
|
|
13
|
+
GitHubPort,
|
|
14
|
+
MergedPullRequestRef,
|
|
15
|
+
PullRequestRef,
|
|
16
|
+
PullRequestSnapshot,
|
|
17
|
+
SquashMergePullRequestInput,
|
|
18
|
+
} from '../core/runtime'
|
|
19
|
+
|
|
20
|
+
async function defaultRunGh(args: string[], cwd: string) {
|
|
21
|
+
const env = process.env.GITHUB_BOT_TOKEN
|
|
22
|
+
? {
|
|
23
|
+
...process.env,
|
|
24
|
+
GH_TOKEN: process.env.GITHUB_BOT_TOKEN,
|
|
25
|
+
}
|
|
26
|
+
: process.env
|
|
27
|
+
const result = await execa('gh', args, { cwd, env })
|
|
28
|
+
return result.stdout.trim()
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
function asArray<T>(value: unknown): T[] {
|
|
32
|
+
return Array.isArray(value) ? (value as T[]) : []
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
function asNumber(value: unknown) {
|
|
36
|
+
return typeof value === 'number' && Number.isFinite(value) ? value : 0
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
function asString(value: unknown) {
|
|
40
|
+
return typeof value === 'string' ? value : ''
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
interface LoginLike {
|
|
44
|
+
login?: unknown
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
interface MergeCommitLike {
|
|
48
|
+
oid?: unknown
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
interface RepoViewPayload {
|
|
52
|
+
nameWithOwner?: unknown
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
function asOwnerLogin(value: unknown) {
|
|
56
|
+
if (typeof value === 'string') {
|
|
57
|
+
return value
|
|
58
|
+
}
|
|
59
|
+
if (value && typeof value === 'object' && 'login' in value) {
|
|
60
|
+
return asString((value as LoginLike).login)
|
|
61
|
+
}
|
|
62
|
+
return ''
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
export class GitHubRuntime implements GitHubPort {
|
|
66
|
+
private readonly runGh: RunGh
|
|
67
|
+
|
|
68
|
+
public constructor(
|
|
69
|
+
workspaceRoot: string,
|
|
70
|
+
runGh?: RunGh,
|
|
71
|
+
private readonly repoName?: string,
|
|
72
|
+
) {
|
|
73
|
+
this.runGh = runGh ?? ((args) => defaultRunGh(args, workspaceRoot))
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
private async resolveRepo() {
|
|
77
|
+
if (this.repoName) {
|
|
78
|
+
return this.repoName
|
|
79
|
+
}
|
|
80
|
+
const payload = JSON.parse(
|
|
81
|
+
await this.runGh(['repo', 'view', '--json', 'nameWithOwner']),
|
|
82
|
+
) as RepoViewPayload
|
|
83
|
+
return asString(payload.nameWithOwner)
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
private async resolveRepoOwner() {
|
|
87
|
+
const repo = await this.resolveRepo()
|
|
88
|
+
const [owner] = repo.split('/')
|
|
89
|
+
if (!owner) {
|
|
90
|
+
throw new Error(`Invalid GitHub repository name: ${repo}`)
|
|
91
|
+
}
|
|
92
|
+
return owner
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
public async createPullRequest(
|
|
96
|
+
input: CreatePullRequestInput,
|
|
97
|
+
): Promise<PullRequestRef> {
|
|
98
|
+
await this.runGh([
|
|
99
|
+
'pr',
|
|
100
|
+
'create',
|
|
101
|
+
'--base',
|
|
102
|
+
input.baseBranch,
|
|
103
|
+
'--head',
|
|
104
|
+
input.headBranch,
|
|
105
|
+
'--title',
|
|
106
|
+
input.title,
|
|
107
|
+
'--body',
|
|
108
|
+
input.body,
|
|
109
|
+
])
|
|
110
|
+
const created = await this.findOpenPullRequestByHeadBranch({
|
|
111
|
+
headBranch: input.headBranch,
|
|
112
|
+
})
|
|
113
|
+
if (!created) {
|
|
114
|
+
throw new Error(
|
|
115
|
+
`Could not resolve pull request after creating branch ${input.headBranch}`,
|
|
116
|
+
)
|
|
117
|
+
}
|
|
118
|
+
return created
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
public async findMergedPullRequestByHeadBranch(
|
|
122
|
+
input: FindMergedPullRequestByHeadBranchInput,
|
|
123
|
+
): Promise<MergedPullRequestRef | null> {
|
|
124
|
+
const owner = await this.resolveRepoOwner()
|
|
125
|
+
const payload = JSON.parse(
|
|
126
|
+
await this.runGh([
|
|
127
|
+
'pr',
|
|
128
|
+
'list',
|
|
129
|
+
'--head',
|
|
130
|
+
input.headBranch,
|
|
131
|
+
'--state',
|
|
132
|
+
'merged',
|
|
133
|
+
'--json',
|
|
134
|
+
'number,title,url,mergeCommit,headRefName,headRepositoryOwner',
|
|
135
|
+
]),
|
|
136
|
+
)
|
|
137
|
+
const pullRequests = asArray<Record<string, unknown>>(payload)
|
|
138
|
+
const match =
|
|
139
|
+
pullRequests.find(
|
|
140
|
+
(candidate) =>
|
|
141
|
+
asString(candidate.headRefName) === input.headBranch &&
|
|
142
|
+
asOwnerLogin(candidate.headRepositoryOwner) === owner,
|
|
143
|
+
) ?? null
|
|
144
|
+
if (!match) {
|
|
145
|
+
return null
|
|
146
|
+
}
|
|
147
|
+
const mergeCommit = match.mergeCommit
|
|
148
|
+
const mergeCommitSha =
|
|
149
|
+
mergeCommit && typeof mergeCommit === 'object' && 'oid' in mergeCommit
|
|
150
|
+
? asString((mergeCommit as MergeCommitLike).oid)
|
|
151
|
+
: ''
|
|
152
|
+
if (!mergeCommitSha) {
|
|
153
|
+
throw new Error(
|
|
154
|
+
`Merged pull request for branch ${input.headBranch} is missing mergeCommit`,
|
|
155
|
+
)
|
|
156
|
+
}
|
|
157
|
+
return {
|
|
158
|
+
mergeCommitSha,
|
|
159
|
+
number: asNumber(match.number),
|
|
160
|
+
title: asString(match.title),
|
|
161
|
+
url: asString(match.url),
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
public async findOpenPullRequestByHeadBranch(
|
|
166
|
+
input: FindOpenPullRequestByHeadBranchInput,
|
|
167
|
+
): Promise<null | PullRequestRef> {
|
|
168
|
+
const owner = await this.resolveRepoOwner()
|
|
169
|
+
const payload = JSON.parse(
|
|
170
|
+
await this.runGh([
|
|
171
|
+
'pr',
|
|
172
|
+
'list',
|
|
173
|
+
'--head',
|
|
174
|
+
input.headBranch,
|
|
175
|
+
'--state',
|
|
176
|
+
'open',
|
|
177
|
+
'--json',
|
|
178
|
+
'number,title,url,headRefName,headRepositoryOwner',
|
|
179
|
+
]),
|
|
180
|
+
)
|
|
181
|
+
const pullRequests = asArray<Record<string, unknown>>(payload)
|
|
182
|
+
const match =
|
|
183
|
+
pullRequests.find(
|
|
184
|
+
(candidate) =>
|
|
185
|
+
asString(candidate.headRefName) === input.headBranch &&
|
|
186
|
+
asOwnerLogin(candidate.headRepositoryOwner) === owner,
|
|
187
|
+
) ?? null
|
|
188
|
+
if (!match) {
|
|
189
|
+
return null
|
|
190
|
+
}
|
|
191
|
+
return {
|
|
192
|
+
number: asNumber(match.number),
|
|
193
|
+
title: asString(match.title),
|
|
194
|
+
url: asString(match.url),
|
|
195
|
+
}
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
public async getPullRequestSnapshot(
|
|
199
|
+
input: GetPullRequestSnapshotInput,
|
|
200
|
+
): Promise<PullRequestSnapshot> {
|
|
201
|
+
const repo = await this.resolveRepo()
|
|
202
|
+
const [owner, repoName] = repo.split('/')
|
|
203
|
+
if (!owner || !repoName) {
|
|
204
|
+
throw new Error(`Invalid GitHub repository name: ${repo}`)
|
|
205
|
+
}
|
|
206
|
+
return getPullRequestSnapshotViaGraphQL({
|
|
207
|
+
owner,
|
|
208
|
+
pullRequestNumber: input.pullRequestNumber,
|
|
209
|
+
repo: repoName,
|
|
210
|
+
runGh: this.runGh,
|
|
211
|
+
})
|
|
212
|
+
}
|
|
213
|
+
|
|
214
|
+
public async squashMergePullRequest(input: SquashMergePullRequestInput) {
|
|
215
|
+
const repo = await this.resolveRepo()
|
|
216
|
+
const commitSha = asString(
|
|
217
|
+
JSON.parse(
|
|
218
|
+
await this.runGh([
|
|
219
|
+
'api',
|
|
220
|
+
`repos/${repo}/pulls/${input.pullRequestNumber}/merge`,
|
|
221
|
+
'--method',
|
|
222
|
+
'PUT',
|
|
223
|
+
'-f',
|
|
224
|
+
'merge_method=squash',
|
|
225
|
+
'-f',
|
|
226
|
+
`commit_title=${input.subject}`,
|
|
227
|
+
]),
|
|
228
|
+
).sha,
|
|
229
|
+
)
|
|
230
|
+
return { commitSha }
|
|
231
|
+
}
|
|
232
|
+
}
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
import path from 'node:path'
|
|
2
|
+
|
|
3
|
+
export function createRuntimePaths(featureDir: string) {
|
|
4
|
+
const runtimeDir = path.join(featureDir, '.while')
|
|
5
|
+
return {
|
|
6
|
+
events: path.join(runtimeDir, 'events.jsonl'),
|
|
7
|
+
graph: path.join(runtimeDir, 'graph.json'),
|
|
8
|
+
report: path.join(runtimeDir, 'report.json'),
|
|
9
|
+
runtimeDir,
|
|
10
|
+
state: path.join(runtimeDir, 'state.json'),
|
|
11
|
+
tasksDir: path.join(runtimeDir, 'tasks'),
|
|
12
|
+
}
|
|
13
|
+
}
|
|
@@ -0,0 +1,125 @@
|
|
|
1
|
+
import path from 'node:path'
|
|
2
|
+
|
|
3
|
+
import { execa } from 'execa'
|
|
4
|
+
import * as fsExtra from 'fs-extra'
|
|
5
|
+
|
|
6
|
+
import type { WorkspaceContext } from '../types'
|
|
7
|
+
|
|
8
|
+
export interface ResolveWorkspaceContextInput {
|
|
9
|
+
cwd: string
|
|
10
|
+
feature?: string | undefined
|
|
11
|
+
taskSource?: string | undefined
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
function resolveFeatureRoot(workspaceRoot: string, taskSource: string) {
|
|
15
|
+
if (taskSource === 'openspec') {
|
|
16
|
+
return path.join(workspaceRoot, 'openspec', 'changes')
|
|
17
|
+
}
|
|
18
|
+
return path.join(workspaceRoot, 'specs')
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
async function resolveWorkspaceRoot(cwd: string, taskSource: string) {
|
|
22
|
+
const workspaceRoot = path.resolve(cwd)
|
|
23
|
+
const featureRoot = resolveFeatureRoot(workspaceRoot, taskSource)
|
|
24
|
+
const featureRootExists = await fsExtra.pathExists(featureRoot)
|
|
25
|
+
if (!featureRootExists) {
|
|
26
|
+
if (taskSource === 'openspec') {
|
|
27
|
+
throw new Error(
|
|
28
|
+
'Current working directory must contain an openspec/changes/ directory. Run task-while from the workspace root.',
|
|
29
|
+
)
|
|
30
|
+
}
|
|
31
|
+
throw new Error(
|
|
32
|
+
'Current working directory must contain a specs/ directory. Run task-while from the workspace root.',
|
|
33
|
+
)
|
|
34
|
+
}
|
|
35
|
+
return workspaceRoot
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
async function readFeatureDirs(workspaceRoot: string, taskSource: string) {
|
|
39
|
+
const featureRoot = resolveFeatureRoot(workspaceRoot, taskSource)
|
|
40
|
+
const entries = await fsExtra.readdir(featureRoot, {
|
|
41
|
+
withFileTypes: true,
|
|
42
|
+
})
|
|
43
|
+
return entries
|
|
44
|
+
.filter((entry) => {
|
|
45
|
+
if (!entry.isDirectory()) {
|
|
46
|
+
return false
|
|
47
|
+
}
|
|
48
|
+
if (taskSource === 'openspec' && entry.name === 'archive') {
|
|
49
|
+
return false
|
|
50
|
+
}
|
|
51
|
+
return true
|
|
52
|
+
})
|
|
53
|
+
.map((entry) => entry.name)
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
async function detectGitBranch(workspaceRoot: string) {
|
|
57
|
+
try {
|
|
58
|
+
const { stdout } = await execa(
|
|
59
|
+
'git',
|
|
60
|
+
['rev-parse', '--abbrev-ref', 'HEAD'],
|
|
61
|
+
{
|
|
62
|
+
cwd: workspaceRoot,
|
|
63
|
+
},
|
|
64
|
+
)
|
|
65
|
+
return stdout.trim()
|
|
66
|
+
} catch {
|
|
67
|
+
return null
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
function matchFeatureByPrefix(featureDirs: string[], branch: string) {
|
|
72
|
+
const prefixMatch = branch.match(/^(\d{3}|\d{8}-\d{6})-/)
|
|
73
|
+
const prefix = prefixMatch?.[1]
|
|
74
|
+
if (!prefix) {
|
|
75
|
+
return null
|
|
76
|
+
}
|
|
77
|
+
return featureDirs.find((feature) => feature.startsWith(`${prefix}-`)) ?? null
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
export async function resolveWorkspaceContext(
|
|
81
|
+
input: ResolveWorkspaceContextInput,
|
|
82
|
+
): Promise<WorkspaceContext> {
|
|
83
|
+
const taskSource = input.taskSource ?? 'spec-kit'
|
|
84
|
+
const workspaceRoot = await resolveWorkspaceRoot(input.cwd, taskSource)
|
|
85
|
+
|
|
86
|
+
const featureDirs = await readFeatureDirs(workspaceRoot, taskSource)
|
|
87
|
+
let featureId = input.feature ?? null
|
|
88
|
+
|
|
89
|
+
if (!featureId && taskSource === 'spec-kit') {
|
|
90
|
+
const branch = await detectGitBranch(workspaceRoot)
|
|
91
|
+
if (branch) {
|
|
92
|
+
featureId = matchFeatureByPrefix(featureDirs, branch)
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
if (!featureId) {
|
|
97
|
+
if (featureDirs.length === 1) {
|
|
98
|
+
featureId = featureDirs[0] ?? null
|
|
99
|
+
} else {
|
|
100
|
+
throw new Error('Unable to determine feature. Pass --feature explicitly.')
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
if (!featureId) {
|
|
105
|
+
throw new Error('Unable to determine feature. Pass --feature explicitly.')
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
const featureDir = path.join(
|
|
109
|
+
resolveFeatureRoot(workspaceRoot, taskSource),
|
|
110
|
+
featureId,
|
|
111
|
+
)
|
|
112
|
+
const featureExists = await fsExtra.pathExists(featureDir)
|
|
113
|
+
if (!featureExists) {
|
|
114
|
+
if (taskSource === 'openspec') {
|
|
115
|
+
throw new Error(`OpenSpec change directory does not exist: ${featureId}`)
|
|
116
|
+
}
|
|
117
|
+
throw new Error(`Feature directory does not exist: ${featureId}`)
|
|
118
|
+
}
|
|
119
|
+
return {
|
|
120
|
+
featureDir,
|
|
121
|
+
featureId,
|
|
122
|
+
runtimeDir: path.join(featureDir, '.while'),
|
|
123
|
+
workspaceRoot,
|
|
124
|
+
}
|
|
125
|
+
}
|
|
@@ -0,0 +1,127 @@
|
|
|
1
|
+
import { zodToJsonSchema } from 'zod-to-json-schema'
|
|
2
|
+
|
|
3
|
+
import {
|
|
4
|
+
finalReportSchema,
|
|
5
|
+
implementArtifactSchema,
|
|
6
|
+
implementOutputSchemaInternal,
|
|
7
|
+
integrateArtifactSchema,
|
|
8
|
+
reviewArtifactSchema,
|
|
9
|
+
reviewOutputSchemaInternal,
|
|
10
|
+
taskGraphSchema,
|
|
11
|
+
workflowEventSchema,
|
|
12
|
+
workflowStateSchema,
|
|
13
|
+
type acceptanceCheckSchema,
|
|
14
|
+
type blockedTaskStateSchema,
|
|
15
|
+
type doneTaskStateSchema,
|
|
16
|
+
type finalReportTaskSchema,
|
|
17
|
+
type pendingTaskStateSchema,
|
|
18
|
+
type replanTaskStateSchema,
|
|
19
|
+
type reviewFindingSchema,
|
|
20
|
+
type reworkTaskStateSchema,
|
|
21
|
+
type runningTaskStateSchema,
|
|
22
|
+
type taskStateSchema,
|
|
23
|
+
type taskTopologyEntrySchema,
|
|
24
|
+
} from './model'
|
|
25
|
+
import { parseWithSchema } from './shared'
|
|
26
|
+
|
|
27
|
+
import type { z } from 'zod'
|
|
28
|
+
|
|
29
|
+
export * from './model'
|
|
30
|
+
export * from './shared'
|
|
31
|
+
|
|
32
|
+
function ensureReviewSemantics(result: ReviewOutput) {
|
|
33
|
+
if (result.verdict === 'pass' && result.findings.length !== 0) {
|
|
34
|
+
throw new Error('Review verdict pass requires empty findings')
|
|
35
|
+
}
|
|
36
|
+
if (
|
|
37
|
+
result.verdict === 'pass' &&
|
|
38
|
+
result.acceptanceChecks.some((check) => check.status !== 'pass')
|
|
39
|
+
) {
|
|
40
|
+
throw new Error(
|
|
41
|
+
'Review verdict pass requires all acceptance checks to pass',
|
|
42
|
+
)
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
function toOpenAiOutputSchema(
|
|
47
|
+
schema:
|
|
48
|
+
| typeof implementOutputSchemaInternal
|
|
49
|
+
| typeof reviewOutputSchemaInternal,
|
|
50
|
+
) {
|
|
51
|
+
return zodToJsonSchema(schema, {
|
|
52
|
+
$refStrategy: 'none',
|
|
53
|
+
target: 'openAi',
|
|
54
|
+
}) as Record<string, unknown>
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
export const implementOutputSchema = toOpenAiOutputSchema(
|
|
58
|
+
implementOutputSchemaInternal,
|
|
59
|
+
)
|
|
60
|
+
export const reviewOutputSchema = toOpenAiOutputSchema(
|
|
61
|
+
reviewOutputSchemaInternal,
|
|
62
|
+
)
|
|
63
|
+
|
|
64
|
+
export function validateImplementOutput(value: unknown) {
|
|
65
|
+
return parseWithSchema(implementOutputSchemaInternal, value)
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
export function validateReviewOutput(value: unknown) {
|
|
69
|
+
const result = parseWithSchema(reviewOutputSchemaInternal, value)
|
|
70
|
+
ensureReviewSemantics(result)
|
|
71
|
+
return result
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
export function validateTaskGraph(value: unknown) {
|
|
75
|
+
return parseWithSchema(taskGraphSchema, value)
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
export function validateWorkflowState(value: unknown) {
|
|
79
|
+
return parseWithSchema(workflowStateSchema, value)
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
export function validateImplementArtifact(value: unknown) {
|
|
83
|
+
return parseWithSchema(implementArtifactSchema, value)
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
export function validateIntegrateArtifact(value: unknown) {
|
|
87
|
+
return parseWithSchema(integrateArtifactSchema, value)
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
export function validateReviewArtifact(value: unknown) {
|
|
91
|
+
return parseWithSchema(reviewArtifactSchema, value)
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
export function validateWorkflowEvent(value: unknown) {
|
|
95
|
+
return parseWithSchema(workflowEventSchema, value)
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
export function validateFinalReport(value: unknown) {
|
|
99
|
+
return parseWithSchema(finalReportSchema, value)
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
export type AcceptanceCheck = z.infer<typeof acceptanceCheckSchema>
|
|
103
|
+
export type BlockedTaskState = z.infer<typeof blockedTaskStateSchema>
|
|
104
|
+
export type DoneTaskState = z.infer<typeof doneTaskStateSchema>
|
|
105
|
+
export type FinalReport = z.infer<typeof finalReportSchema>
|
|
106
|
+
export type FinalReportTask = z.infer<typeof finalReportTaskSchema>
|
|
107
|
+
export type IntegrateArtifact = z.infer<typeof integrateArtifactSchema>
|
|
108
|
+
export type ImplementArtifact = z.infer<typeof implementArtifactSchema>
|
|
109
|
+
export type ImplementOutput = z.infer<typeof implementOutputSchemaInternal>
|
|
110
|
+
export type PendingTaskState = z.infer<typeof pendingTaskStateSchema>
|
|
111
|
+
export type ReviewArtifact = z.infer<typeof reviewArtifactSchema>
|
|
112
|
+
export type ReviewFinding = z.infer<typeof reviewFindingSchema>
|
|
113
|
+
export type ReviewOutput = z.infer<typeof reviewOutputSchemaInternal>
|
|
114
|
+
export type ReviewVerdict = z.infer<
|
|
115
|
+
typeof reviewOutputSchemaInternal
|
|
116
|
+
>['verdict']
|
|
117
|
+
export type ReplanTaskState = z.infer<typeof replanTaskStateSchema>
|
|
118
|
+
export type ReworkTaskState = z.infer<typeof reworkTaskStateSchema>
|
|
119
|
+
export type RunningStage = z.infer<typeof runningTaskStateSchema>['stage']
|
|
120
|
+
export type RunningTaskState = z.infer<typeof runningTaskStateSchema>
|
|
121
|
+
export type TaskTopologyEntry = z.infer<typeof taskTopologyEntrySchema>
|
|
122
|
+
export type TaskGraph = z.infer<typeof taskGraphSchema>
|
|
123
|
+
export type TaskState = z.infer<typeof taskStateSchema>
|
|
124
|
+
export type TaskStatus = z.infer<typeof taskStateSchema>['status']
|
|
125
|
+
export type WorkflowEvent = z.infer<typeof workflowEventSchema>
|
|
126
|
+
export type WorkflowEventType = z.infer<typeof workflowEventSchema>['type']
|
|
127
|
+
export type WorkflowState = z.infer<typeof workflowStateSchema>
|