agentweaver 0.1.5 → 0.1.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/Dockerfile.codex +56 -0
- package/README.md +38 -15
- package/dist/artifacts.js +38 -8
- package/dist/executors/configs/fetch-gitlab-review-config.js +3 -0
- package/dist/executors/fetch-gitlab-review-executor.js +25 -0
- package/dist/flow-state.js +134 -0
- package/dist/gitlab.js +153 -0
- package/dist/index.js +397 -250
- package/dist/interactive-ui.js +170 -42
- package/dist/pipeline/declarative-flow-runner.js +28 -0
- package/dist/pipeline/flow-specs/auto.json +530 -392
- package/dist/pipeline/flow-specs/bug-analyze.json +149 -0
- package/dist/pipeline/flow-specs/gitlab-review.json +347 -0
- package/dist/pipeline/flow-specs/implement.json +0 -9
- package/dist/pipeline/flow-specs/plan.json +133 -0
- package/dist/pipeline/flow-specs/review-fix.json +2 -11
- package/dist/pipeline/flow-specs/review-project.json +243 -0
- package/dist/pipeline/flow-specs/run-go-linter-loop.json +155 -0
- package/dist/pipeline/flow-specs/run-go-tests-loop.json +155 -0
- package/dist/pipeline/flow-specs/run-linter-loop.json +17 -11
- package/dist/pipeline/flow-specs/run-tests-loop.json +17 -11
- package/dist/pipeline/flow-specs/task-describe.json +25 -0
- package/dist/pipeline/node-registry.js +28 -1
- package/dist/pipeline/nodes/fetch-gitlab-review-node.js +34 -0
- package/dist/pipeline/nodes/gitlab-review-artifacts-node.js +105 -0
- package/dist/pipeline/nodes/jira-issue-check-node.js +53 -0
- package/dist/pipeline/nodes/local-script-check-node.js +81 -0
- package/dist/pipeline/nodes/review-findings-form-node.js +14 -14
- package/dist/pipeline/prompt-registry.js +5 -5
- package/dist/pipeline/registry.js +2 -0
- package/dist/pipeline/value-resolver.js +7 -1
- package/dist/prompts.js +11 -4
- package/dist/scope.js +118 -0
- package/dist/structured-artifacts.js +33 -0
- package/docker-compose.yml +445 -0
- package/package.json +8 -3
- package/run_go_coverage.sh +113 -0
- package/run_go_linter.sh +89 -0
- package/run_go_tests.sh +83 -0
- package/verify_build.sh +105 -0
- package/dist/executors/claude-summary-executor.js +0 -31
- package/dist/executors/configs/claude-summary-config.js +0 -8
- package/dist/pipeline/flow-runner.js +0 -13
- package/dist/pipeline/flow-specs/test-fix.json +0 -24
- package/dist/pipeline/flow-specs/test-linter-fix.json +0 -24
- package/dist/pipeline/flow-specs/test.json +0 -19
- package/dist/pipeline/flow-types.js +0 -1
- package/dist/pipeline/flows/implement-flow.js +0 -47
- package/dist/pipeline/flows/plan-flow.js +0 -42
- package/dist/pipeline/flows/review-fix-flow.js +0 -62
- package/dist/pipeline/flows/review-flow.js +0 -124
- package/dist/pipeline/flows/test-fix-flow.js +0 -12
- package/dist/pipeline/flows/test-flow.js +0 -32
- package/dist/pipeline/nodes/claude-summary-node.js +0 -38
- package/dist/pipeline/nodes/implement-codex-node.js +0 -16
- package/dist/pipeline/nodes/task-summary-node.js +0 -42
package/Dockerfile.codex
ADDED
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
FROM golang:1.25.5-bookworm
|
|
2
|
+
|
|
3
|
+
ARG GOLANGCI_LINT_VERSION=v2.7.2
|
|
4
|
+
ARG MOCKGEN_VERSION=v1.6.0
|
|
5
|
+
ARG SWAG_VERSION=latest
|
|
6
|
+
ARG PROTOC_GEN_GO_VERSION=latest
|
|
7
|
+
ARG PROTOC_GEN_GO_GRPC_VERSION=latest
|
|
8
|
+
|
|
9
|
+
RUN apt-get update \
|
|
10
|
+
&& apt-get install -y --no-install-recommends \
|
|
11
|
+
ca-certificates \
|
|
12
|
+
nodejs \
|
|
13
|
+
npm \
|
|
14
|
+
curl \
|
|
15
|
+
jq \
|
|
16
|
+
less \
|
|
17
|
+
file \
|
|
18
|
+
make \
|
|
19
|
+
procps \
|
|
20
|
+
ripgrep \
|
|
21
|
+
git \
|
|
22
|
+
openssh-client \
|
|
23
|
+
docker.io \
|
|
24
|
+
protobuf-compiler \
|
|
25
|
+
unzip \
|
|
26
|
+
zip \
|
|
27
|
+
findutils \
|
|
28
|
+
&& update-ca-certificates \
|
|
29
|
+
&& rm -rf /var/lib/apt/lists/*
|
|
30
|
+
|
|
31
|
+
RUN if ! getent group 1000 >/dev/null; then groupadd -g 1000 codex; fi \
|
|
32
|
+
&& if ! getent passwd 1000 >/dev/null; then useradd -u 1000 -g 1000 -d /codex-home/home -M -s /bin/bash codex; fi
|
|
33
|
+
|
|
34
|
+
RUN npm install -g @openai/codex@latest \
|
|
35
|
+
&& npm cache clean --force
|
|
36
|
+
|
|
37
|
+
RUN GOBIN=/usr/local/bin go install github.com/golangci/golangci-lint/v2/cmd/golangci-lint@${GOLANGCI_LINT_VERSION} \
|
|
38
|
+
&& GOBIN=/usr/local/bin go install github.com/golang/mock/mockgen@${MOCKGEN_VERSION} \
|
|
39
|
+
&& GOBIN=/usr/local/bin go install github.com/swaggo/swag/cmd/swag@${SWAG_VERSION} \
|
|
40
|
+
&& GOBIN=/usr/local/bin go install google.golang.org/protobuf/cmd/protoc-gen-go@${PROTOC_GEN_GO_VERSION} \
|
|
41
|
+
&& GOBIN=/usr/local/bin go install google.golang.org/grpc/cmd/protoc-gen-go-grpc@${PROTOC_GEN_GO_GRPC_VERSION} \
|
|
42
|
+
&& ln -sf /usr/local/go/bin/go /usr/bin/go \
|
|
43
|
+
&& ln -sf /usr/local/go/bin/gofmt /usr/bin/gofmt
|
|
44
|
+
|
|
45
|
+
ENV PATH="/usr/local/go/bin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
|
|
46
|
+
|
|
47
|
+
COPY verify_build.sh /usr/local/bin/verify_build.sh
|
|
48
|
+
COPY run_go_tests.sh /usr/local/bin/run_go_tests.sh
|
|
49
|
+
COPY run_go_linter.sh /usr/local/bin/run_go_linter.sh
|
|
50
|
+
COPY run_go_coverage.sh /usr/local/bin/run_go_coverage.sh
|
|
51
|
+
RUN chmod +x /usr/local/bin/verify_build.sh /usr/local/bin/run_go_tests.sh /usr/local/bin/run_go_linter.sh /usr/local/bin/run_go_coverage.sh
|
|
52
|
+
|
|
53
|
+
WORKDIR /workspace
|
|
54
|
+
|
|
55
|
+
ENTRYPOINT ["codex"]
|
|
56
|
+
CMD ["--dangerously-bypass-approvals-and-sandbox"]
|
package/README.md
CHANGED
|
@@ -1,19 +1,21 @@
|
|
|
1
1
|
# AgentWeaver
|
|
2
2
|
|
|
3
|
-
`AgentWeaver` is a TypeScript/Node.js CLI for engineering workflows around Jira, Codex, and Claude.
|
|
3
|
+
`AgentWeaver` is a TypeScript/Node.js CLI for engineering workflows around Jira, GitLab review artifacts, Codex, and Claude.
|
|
4
4
|
|
|
5
5
|
It orchestrates a flow like:
|
|
6
6
|
|
|
7
|
-
`plan -> implement ->
|
|
7
|
+
`plan -> implement -> run-go-linter-loop -> run-go-tests-loop -> review -> review-fix`
|
|
8
8
|
|
|
9
9
|
The package is designed to run as an npm CLI and includes an interactive terminal UI built on `neo-blessed`.
|
|
10
10
|
|
|
11
11
|
## What It Does
|
|
12
12
|
|
|
13
13
|
- Fetches a Jira issue by key or browse URL
|
|
14
|
+
- Fetches GitLab merge request review comments into reusable markdown and JSON artifacts
|
|
14
15
|
- Generates workflow artifacts such as design, implementation plan, QA plan, bug analysis, reviews, and summaries
|
|
15
|
-
- Machine-readable JSON artifacts are stored under `.agentweaver
|
|
16
|
-
-
|
|
16
|
+
- Machine-readable JSON artifacts are stored under `.agentweaver/scopes/<scope-key>/.artifacts/` and act as the source of truth between workflow steps; Markdown artifacts remain for human inspection
|
|
17
|
+
- Workflow artifacts are isolated by scope; for Jira-driven flows the scope key defaults to the Jira task key, otherwise it defaults to `<git-branch>--<worktree-hash>`
|
|
18
|
+
- Runs workflow stages like `bug-analyze`, `bug-fix`, `mr-description`, `plan`, `task-describe`, `implement`, `review`, `review-fix`, `run-go-tests-loop`, `run-go-linter-loop`, and `auto`
|
|
17
19
|
- Persists compact `auto` pipeline state on disk so runs can resume without storing large agent outputs
|
|
18
20
|
- Uses Docker runtime services for isolated Codex execution and build verification
|
|
19
21
|
|
|
@@ -22,9 +24,9 @@ The package is designed to run as an npm CLI and includes an interactive termina
|
|
|
22
24
|
The CLI now uses an executor + node + declarative flow architecture.
|
|
23
25
|
|
|
24
26
|
- `src/index.ts` remains the CLI entrypoint and high-level orchestration layer
|
|
25
|
-
- `src/executors/` contains first-class executors for external actions such as Jira fetch, local Codex, Docker-based build verification, Claude, Claude summaries, and process execution
|
|
27
|
+
- `src/executors/` contains first-class executors for external actions such as Jira fetch, GitLab review fetch, local Codex, Docker-based build verification, Claude, Claude summaries, and process execution
|
|
26
28
|
- `src/pipeline/nodes/` contains reusable runtime nodes built on top of executors
|
|
27
|
-
- `src/pipeline/flow-specs/` contains declarative JSON flow specs for `preflight`, `bug-analyze`, `bug-fix`, `mr-description`, `plan`, `task-describe`, `implement`, `review`, `review-fix`, `
|
|
29
|
+
- `src/pipeline/flow-specs/` contains declarative JSON flow specs for `preflight`, `bug-analyze`, `bug-fix`, `gitlab-review`, `mr-description`, `plan`, `task-describe`, `implement`, `review`, `review-fix`, `run-go-tests-loop`, `run-go-linter-loop`, and `auto`
|
|
28
30
|
- `src/runtime/` contains shared runtime services such as command resolution, Docker runtime environment setup, and subprocess execution
|
|
29
31
|
|
|
30
32
|
This keeps command handlers focused on choosing a flow and providing parameters instead of assembling prompts and subprocess wiring inline.
|
|
@@ -43,8 +45,9 @@ This keeps command handlers focused on choosing a flow and providing parameters
|
|
|
43
45
|
- `docker-compose.yml` — runtime services for Codex and build verification
|
|
44
46
|
- `Dockerfile.codex` — container image for Codex runtime
|
|
45
47
|
- `verify_build.sh` — aggregated verification entrypoint used by `verify-build`
|
|
46
|
-
- `
|
|
47
|
-
- `
|
|
48
|
+
- `run_go_tests.sh` — isolated Go test verification entrypoint
|
|
49
|
+
- `run_go_linter.sh` — isolated Go generate + lint verification entrypoint
|
|
50
|
+
- `run_go_coverage.sh` — isolated Go coverage verification entrypoint
|
|
48
51
|
- `package.json` — npm package metadata and scripts
|
|
49
52
|
- `tsconfig.json` — TypeScript configuration
|
|
50
53
|
|
|
@@ -86,6 +89,7 @@ Required:
|
|
|
86
89
|
Common optional variables:
|
|
87
90
|
|
|
88
91
|
- `JIRA_BASE_URL` — required when you pass only an issue key like `DEMO-123`
|
|
92
|
+
- `GITLAB_TOKEN` — personal access token for `gitlab-review`
|
|
89
93
|
- `AGENTWEAVER_HOME` — path to the AgentWeaver installation directory
|
|
90
94
|
- `DOCKER_COMPOSE_BIN` — override compose command, for example `docker compose`
|
|
91
95
|
- `CODEX_BIN` — override `codex` executable path
|
|
@@ -115,14 +119,19 @@ Direct CLI usage:
|
|
|
115
119
|
|
|
116
120
|
```bash
|
|
117
121
|
agentweaver plan DEMO-3288
|
|
122
|
+
agentweaver plan
|
|
118
123
|
agentweaver bug-analyze DEMO-3288
|
|
119
124
|
agentweaver bug-fix DEMO-3288
|
|
125
|
+
agentweaver gitlab-review DEMO-3288
|
|
120
126
|
agentweaver mr-description DEMO-3288
|
|
121
127
|
agentweaver task-describe DEMO-3288
|
|
122
128
|
agentweaver implement DEMO-3288
|
|
129
|
+
agentweaver review
|
|
123
130
|
agentweaver review DEMO-3288
|
|
124
|
-
agentweaver
|
|
125
|
-
agentweaver run-
|
|
131
|
+
agentweaver review --scope release-prep
|
|
132
|
+
agentweaver run-go-tests-loop DEMO-3288
|
|
133
|
+
agentweaver run-go-tests-loop
|
|
134
|
+
agentweaver run-go-linter-loop DEMO-3288
|
|
126
135
|
agentweaver auto DEMO-3288
|
|
127
136
|
```
|
|
128
137
|
|
|
@@ -130,10 +139,13 @@ From source checkout:
|
|
|
130
139
|
|
|
131
140
|
```bash
|
|
132
141
|
node dist/index.js plan DEMO-3288
|
|
142
|
+
node dist/index.js plan
|
|
133
143
|
node dist/index.js bug-analyze DEMO-3288
|
|
134
144
|
node dist/index.js bug-fix DEMO-3288
|
|
145
|
+
node dist/index.js gitlab-review DEMO-3288
|
|
135
146
|
node dist/index.js mr-description DEMO-3288
|
|
136
147
|
node dist/index.js task-describe DEMO-3288
|
|
148
|
+
node dist/index.js review
|
|
137
149
|
node dist/index.js auto DEMO-3288
|
|
138
150
|
```
|
|
139
151
|
|
|
@@ -141,6 +153,7 @@ Interactive mode:
|
|
|
141
153
|
|
|
142
154
|
```bash
|
|
143
155
|
agentweaver DEMO-3288
|
|
156
|
+
agentweaver
|
|
144
157
|
```
|
|
145
158
|
|
|
146
159
|
When you run from a working project directory, set `AGENTWEAVER_HOME` to the AgentWeaver installation:
|
|
@@ -161,6 +174,9 @@ agentweaver auto-reset DEMO-3288
|
|
|
161
174
|
Notes:
|
|
162
175
|
|
|
163
176
|
- `--verbose` streams child process `stdout/stderr` in direct CLI mode
|
|
177
|
+
- task-only commands such as `plan` and `auto` ask for Jira task via interactive `user-input` when it is omitted
|
|
178
|
+
- scope-flexible commands such as `review`, `review-fix`, `run-go-tests-loop`, and `run-go-linter-loop` use the current git branch by default when Jira task is omitted
|
|
179
|
+
- `--scope <name>` lets you override the default project scope name
|
|
164
180
|
- the interactive `Activity` pane is intentionally structured: it shows launch separators, prompts, summaries, and short status messages instead of raw Codex/Claude logs by default
|
|
165
181
|
|
|
166
182
|
## Interactive TUI
|
|
@@ -189,15 +205,16 @@ Activity pane behavior:
|
|
|
189
205
|
|
|
190
206
|
## Docker Runtime
|
|
191
207
|
|
|
192
|
-
Docker is used as an isolated execution environment for Codex
|
|
208
|
+
Docker is used as an isolated execution environment for Codex-related runtime scenarios that still require container orchestration.
|
|
193
209
|
|
|
194
210
|
Main services:
|
|
195
211
|
|
|
196
212
|
- `codex` — interactive Codex container
|
|
197
213
|
- `codex-exec` — non-interactive `codex exec`
|
|
198
214
|
- `verify-build` — project verification script inside container
|
|
199
|
-
- `run-tests` — isolated `
|
|
200
|
-
- `run-linter` — isolated `
|
|
215
|
+
- `run-go-tests` — isolated `run_go_tests.sh` execution inside container
|
|
216
|
+
- `run-go-linter` — isolated `run_go_linter.sh` execution inside container
|
|
217
|
+
- `run-go-coverage` — isolated `run_go_coverage.sh` execution inside container
|
|
201
218
|
- `codex-login` — interactive login container
|
|
202
219
|
- `dockerd` — internal Docker daemon for testcontainers/build flows
|
|
203
220
|
|
|
@@ -230,13 +247,19 @@ PROJECT_DIR="$PWD" docker compose -f "$AGENTWEAVER_HOME/docker-compose.yml" run
|
|
|
230
247
|
Tests only:
|
|
231
248
|
|
|
232
249
|
```bash
|
|
233
|
-
PROJECT_DIR="$PWD" docker compose -f "$AGENTWEAVER_HOME/docker-compose.yml" run --rm run-tests
|
|
250
|
+
PROJECT_DIR="$PWD" docker compose -f "$AGENTWEAVER_HOME/docker-compose.yml" run --rm run-go-tests
|
|
234
251
|
```
|
|
235
252
|
|
|
236
253
|
Linter only:
|
|
237
254
|
|
|
238
255
|
```bash
|
|
239
|
-
PROJECT_DIR="$PWD" docker compose -f "$AGENTWEAVER_HOME/docker-compose.yml" run --rm run-linter
|
|
256
|
+
PROJECT_DIR="$PWD" docker compose -f "$AGENTWEAVER_HOME/docker-compose.yml" run --rm run-go-linter
|
|
257
|
+
```
|
|
258
|
+
|
|
259
|
+
Coverage only:
|
|
260
|
+
|
|
261
|
+
```bash
|
|
262
|
+
PROJECT_DIR="$PWD" docker compose -f "$AGENTWEAVER_HOME/docker-compose.yml" run --rm run-go-coverage
|
|
240
263
|
```
|
|
241
264
|
|
|
242
265
|
## Development
|
package/dist/artifacts.js
CHANGED
|
@@ -5,23 +5,41 @@ import { TaskRunnerError } from "./errors.js";
|
|
|
5
5
|
export const REVIEW_FILE_RE = /^review-(.+)-(\d+)\.md$/;
|
|
6
6
|
export const REVIEW_REPLY_FILE_RE = /^review-reply-(.+)-(\d+)\.md$/;
|
|
7
7
|
export const READY_TO_MERGE_FILE = "ready-to-merge.md";
|
|
8
|
-
export function
|
|
9
|
-
return path.join(process.cwd(),
|
|
8
|
+
export function scopesRootDir() {
|
|
9
|
+
return path.join(process.cwd(), ".agentweaver", "scopes");
|
|
10
10
|
}
|
|
11
|
-
export function
|
|
12
|
-
|
|
11
|
+
export function scopeWorkspaceDir(scopeKey) {
|
|
12
|
+
return path.join(scopesRootDir(), scopeKey);
|
|
13
|
+
}
|
|
14
|
+
export function ensureScopeWorkspaceDir(scopeKey) {
|
|
15
|
+
const workspaceDir = scopeWorkspaceDir(scopeKey);
|
|
13
16
|
mkdirSync(workspaceDir, { recursive: true });
|
|
14
|
-
mkdirSync(
|
|
17
|
+
mkdirSync(scopeArtifactsDir(scopeKey), { recursive: true });
|
|
15
18
|
return workspaceDir;
|
|
16
19
|
}
|
|
20
|
+
export function scopeWorkspaceFile(scopeKey, fileName) {
|
|
21
|
+
return path.join(scopeWorkspaceDir(scopeKey), fileName);
|
|
22
|
+
}
|
|
23
|
+
export function scopeArtifactsDir(scopeKey) {
|
|
24
|
+
return path.join(scopeWorkspaceDir(scopeKey), ".artifacts");
|
|
25
|
+
}
|
|
26
|
+
export function scopeArtifactsFile(scopeKey, fileName) {
|
|
27
|
+
return path.join(scopeArtifactsDir(scopeKey), fileName);
|
|
28
|
+
}
|
|
29
|
+
export function taskWorkspaceDir(taskKey) {
|
|
30
|
+
return scopeWorkspaceDir(taskKey);
|
|
31
|
+
}
|
|
32
|
+
export function ensureTaskWorkspaceDir(taskKey) {
|
|
33
|
+
return ensureScopeWorkspaceDir(taskKey);
|
|
34
|
+
}
|
|
17
35
|
export function taskWorkspaceFile(taskKey, fileName) {
|
|
18
|
-
return
|
|
36
|
+
return scopeWorkspaceFile(taskKey, fileName);
|
|
19
37
|
}
|
|
20
38
|
export function taskArtifactsDir(taskKey) {
|
|
21
|
-
return
|
|
39
|
+
return scopeArtifactsDir(taskKey);
|
|
22
40
|
}
|
|
23
41
|
export function taskArtifactsFile(taskKey, fileName) {
|
|
24
|
-
return
|
|
42
|
+
return scopeArtifactsFile(taskKey, fileName);
|
|
25
43
|
}
|
|
26
44
|
export function artifactFile(prefix, taskKey, iteration) {
|
|
27
45
|
return taskWorkspaceFile(taskKey, `${prefix}-${taskKey}-${iteration}.md`);
|
|
@@ -89,9 +107,21 @@ export function mrDescriptionFile(taskKey) {
|
|
|
89
107
|
export function mrDescriptionJsonFile(taskKey) {
|
|
90
108
|
return taskArtifactsFile(taskKey, `mr-description-${taskKey}.json`);
|
|
91
109
|
}
|
|
110
|
+
export function gitlabReviewFile(taskKey) {
|
|
111
|
+
return taskWorkspaceFile(taskKey, `gitlab-review-${taskKey}.md`);
|
|
112
|
+
}
|
|
113
|
+
export function gitlabReviewJsonFile(taskKey) {
|
|
114
|
+
return taskArtifactsFile(taskKey, `gitlab-review-${taskKey}.json`);
|
|
115
|
+
}
|
|
116
|
+
export function gitlabReviewInputJsonFile(taskKey) {
|
|
117
|
+
return taskArtifactsFile(taskKey, `gitlab-review-input-${taskKey}.json`);
|
|
118
|
+
}
|
|
92
119
|
export function autoStateFile(taskKey) {
|
|
93
120
|
return taskArtifactsFile(taskKey, `.agentweaver-state-${taskKey}.json`);
|
|
94
121
|
}
|
|
122
|
+
export function flowStateFile(scopeKey, flowId) {
|
|
123
|
+
return scopeArtifactsFile(scopeKey, `.agentweaver-flow-state-${flowId}.json`);
|
|
124
|
+
}
|
|
95
125
|
export function planArtifacts(taskKey) {
|
|
96
126
|
return [designFile(taskKey), designJsonFile(taskKey), planFile(taskKey), planJsonFile(taskKey), qaFile(taskKey), qaJsonFile(taskKey)];
|
|
97
127
|
}
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
import { fetchGitLabReviewExecutorDefaultConfig } from "./configs/fetch-gitlab-review-config.js";
|
|
2
|
+
import { buildGitLabReviewFetchTarget, fetchGitLabReview } from "../gitlab.js";
|
|
3
|
+
export const fetchGitLabReviewExecutor = {
|
|
4
|
+
kind: "fetch-gitlab-review",
|
|
5
|
+
version: 1,
|
|
6
|
+
defaultConfig: fetchGitLabReviewExecutorDefaultConfig,
|
|
7
|
+
async execute(context, input) {
|
|
8
|
+
const target = buildGitLabReviewFetchTarget(input.mergeRequestUrl);
|
|
9
|
+
if (context.verbose) {
|
|
10
|
+
context.ui.writeStdout(`GitLab MR URL: ${target.mergeRequestUrl}\n`);
|
|
11
|
+
context.ui.writeStdout(`GitLab project path: ${target.projectPath}\n`);
|
|
12
|
+
context.ui.writeStdout(`GitLab merge request IID: ${target.mergeRequestIid}\n`);
|
|
13
|
+
context.ui.writeStdout(`GitLab discussions API URL: ${target.discussionsApiUrl}\n`);
|
|
14
|
+
context.ui.writeStdout(`Saving GitLab review markdown to: ${input.outputFile}\n`);
|
|
15
|
+
context.ui.writeStdout(`Saving GitLab review JSON to: ${input.outputJsonFile}\n`);
|
|
16
|
+
}
|
|
17
|
+
const artifact = await fetchGitLabReview(input.mergeRequestUrl, input.outputFile, input.outputJsonFile);
|
|
18
|
+
return {
|
|
19
|
+
outputFile: input.outputFile,
|
|
20
|
+
outputJsonFile: input.outputJsonFile,
|
|
21
|
+
mergeRequestUrl: artifact.merge_request_url,
|
|
22
|
+
commentsCount: artifact.comments.length,
|
|
23
|
+
};
|
|
24
|
+
},
|
|
25
|
+
};
|
|
@@ -0,0 +1,134 @@
|
|
|
1
|
+
import { existsSync, readFileSync, rmSync, writeFileSync } from "node:fs";
|
|
2
|
+
import { ensureScopeWorkspaceDir, flowStateFile } from "./artifacts.js";
|
|
3
|
+
import { TaskRunnerError } from "./errors.js";
|
|
4
|
+
const FLOW_STATE_SCHEMA_VERSION = 1;
|
|
5
|
+
function nowIso8601() {
|
|
6
|
+
return new Date().toISOString();
|
|
7
|
+
}
|
|
8
|
+
export function stripExecutionStatePayload(executionState) {
|
|
9
|
+
return {
|
|
10
|
+
flowKind: executionState.flowKind,
|
|
11
|
+
flowVersion: executionState.flowVersion,
|
|
12
|
+
terminated: executionState.terminated,
|
|
13
|
+
...(executionState.terminationReason ? { terminationReason: executionState.terminationReason } : {}),
|
|
14
|
+
phases: executionState.phases.map((phase) => ({
|
|
15
|
+
id: phase.id,
|
|
16
|
+
status: phase.status,
|
|
17
|
+
repeatVars: { ...phase.repeatVars },
|
|
18
|
+
...(phase.startedAt ? { startedAt: phase.startedAt } : {}),
|
|
19
|
+
...(phase.finishedAt ? { finishedAt: phase.finishedAt } : {}),
|
|
20
|
+
steps: phase.steps.map((step) => ({
|
|
21
|
+
id: step.id,
|
|
22
|
+
status: step.status,
|
|
23
|
+
...(step.outputs ? { outputs: step.outputs } : {}),
|
|
24
|
+
...(step.value !== undefined ? { value: step.value } : {}),
|
|
25
|
+
...(step.startedAt ? { startedAt: step.startedAt } : {}),
|
|
26
|
+
...(step.finishedAt ? { finishedAt: step.finishedAt } : {}),
|
|
27
|
+
...(step.stopFlow !== undefined ? { stopFlow: step.stopFlow } : {}),
|
|
28
|
+
})),
|
|
29
|
+
})),
|
|
30
|
+
};
|
|
31
|
+
}
|
|
32
|
+
export function createFlowRunState(scopeKey, flowId, executionState) {
|
|
33
|
+
return {
|
|
34
|
+
schemaVersion: FLOW_STATE_SCHEMA_VERSION,
|
|
35
|
+
flowId,
|
|
36
|
+
scopeKey,
|
|
37
|
+
status: "pending",
|
|
38
|
+
currentStep: null,
|
|
39
|
+
updatedAt: nowIso8601(),
|
|
40
|
+
executionState: stripExecutionStatePayload(executionState),
|
|
41
|
+
};
|
|
42
|
+
}
|
|
43
|
+
export function loadFlowRunState(scopeKey, flowId) {
|
|
44
|
+
const filePath = flowStateFile(scopeKey, flowId);
|
|
45
|
+
if (!existsSync(filePath)) {
|
|
46
|
+
return null;
|
|
47
|
+
}
|
|
48
|
+
let raw;
|
|
49
|
+
try {
|
|
50
|
+
raw = JSON.parse(readFileSync(filePath, "utf8"));
|
|
51
|
+
}
|
|
52
|
+
catch (error) {
|
|
53
|
+
throw new TaskRunnerError(`Failed to parse flow state file ${filePath}: ${error.message}`);
|
|
54
|
+
}
|
|
55
|
+
if (!raw || typeof raw !== "object") {
|
|
56
|
+
throw new TaskRunnerError(`Invalid flow state file format: ${filePath}`);
|
|
57
|
+
}
|
|
58
|
+
const state = raw;
|
|
59
|
+
if (state.schemaVersion !== FLOW_STATE_SCHEMA_VERSION) {
|
|
60
|
+
throw new TaskRunnerError(`Unsupported flow state schema in ${filePath}: ${state.schemaVersion}`);
|
|
61
|
+
}
|
|
62
|
+
if (state.flowId !== flowId) {
|
|
63
|
+
throw new TaskRunnerError(`Flow state ${filePath} belongs to flow '${state.flowId}', expected '${flowId}'`);
|
|
64
|
+
}
|
|
65
|
+
return state;
|
|
66
|
+
}
|
|
67
|
+
export function saveFlowRunState(state) {
|
|
68
|
+
state.updatedAt = nowIso8601();
|
|
69
|
+
ensureScopeWorkspaceDir(state.scopeKey);
|
|
70
|
+
writeFileSync(flowStateFile(state.scopeKey, state.flowId), `${JSON.stringify({
|
|
71
|
+
...state,
|
|
72
|
+
executionState: stripExecutionStatePayload(state.executionState),
|
|
73
|
+
}, null, 2)}\n`, "utf8");
|
|
74
|
+
}
|
|
75
|
+
export function resetFlowRunState(scopeKey, flowId) {
|
|
76
|
+
const filePath = flowStateFile(scopeKey, flowId);
|
|
77
|
+
if (!existsSync(filePath)) {
|
|
78
|
+
return false;
|
|
79
|
+
}
|
|
80
|
+
rmSync(filePath);
|
|
81
|
+
return true;
|
|
82
|
+
}
|
|
83
|
+
export function hasResumableFlowState(state) {
|
|
84
|
+
if (!state) {
|
|
85
|
+
return false;
|
|
86
|
+
}
|
|
87
|
+
if (state.executionState.terminated) {
|
|
88
|
+
return false;
|
|
89
|
+
}
|
|
90
|
+
if (state.status === "completed") {
|
|
91
|
+
return false;
|
|
92
|
+
}
|
|
93
|
+
if (state.status === "running" || state.status === "blocked") {
|
|
94
|
+
return true;
|
|
95
|
+
}
|
|
96
|
+
return state.executionState.phases.some((phase) => phase.steps.some((step) => step.status === "done" || step.status === "running"));
|
|
97
|
+
}
|
|
98
|
+
function normalizeStepState(step) {
|
|
99
|
+
if (step.status !== "running") {
|
|
100
|
+
return step;
|
|
101
|
+
}
|
|
102
|
+
const { finishedAt: _finishedAt, outputs: _outputs, value: _value, stopFlow: _stopFlow, ...rest } = step;
|
|
103
|
+
return {
|
|
104
|
+
...rest,
|
|
105
|
+
status: "pending",
|
|
106
|
+
};
|
|
107
|
+
}
|
|
108
|
+
function normalizePhaseState(phase) {
|
|
109
|
+
const normalizedSteps = phase.steps.map(normalizeStepState);
|
|
110
|
+
if (phase.status !== "running") {
|
|
111
|
+
return {
|
|
112
|
+
...phase,
|
|
113
|
+
steps: normalizedSteps,
|
|
114
|
+
};
|
|
115
|
+
}
|
|
116
|
+
const { finishedAt: _finishedAt, ...rest } = phase;
|
|
117
|
+
return {
|
|
118
|
+
...rest,
|
|
119
|
+
status: "pending",
|
|
120
|
+
steps: normalizedSteps,
|
|
121
|
+
};
|
|
122
|
+
}
|
|
123
|
+
export function prepareFlowStateForResume(state) {
|
|
124
|
+
state.status = "pending";
|
|
125
|
+
state.lastError = null;
|
|
126
|
+
state.currentStep = null;
|
|
127
|
+
state.executionState = {
|
|
128
|
+
...state.executionState,
|
|
129
|
+
terminated: false,
|
|
130
|
+
phases: state.executionState.phases.map(normalizePhaseState),
|
|
131
|
+
};
|
|
132
|
+
delete state.executionState.terminationReason;
|
|
133
|
+
return state;
|
|
134
|
+
}
|
package/dist/gitlab.js
ADDED
|
@@ -0,0 +1,153 @@
|
|
|
1
|
+
import { mkdirSync } from "node:fs";
|
|
2
|
+
import { writeFile } from "node:fs/promises";
|
|
3
|
+
import path from "node:path";
|
|
4
|
+
import { TaskRunnerError } from "./errors.js";
|
|
5
|
+
const MERGE_REQUEST_PATH_RE = /^(?<projectPath>.+?)\/-\/merge_requests\/(?<iid>\d+)(?:\/.*)?$/;
|
|
6
|
+
function normalizeUrl(value) {
|
|
7
|
+
return value.trim().replace(/\/+$/, "");
|
|
8
|
+
}
|
|
9
|
+
function normalizeProjectPath(value) {
|
|
10
|
+
return value.replace(/^\/+/, "").replace(/\/+$/, "");
|
|
11
|
+
}
|
|
12
|
+
export function parseGitLabMergeRequestUrl(mergeRequestUrl) {
|
|
13
|
+
let parsed;
|
|
14
|
+
try {
|
|
15
|
+
parsed = new URL(normalizeUrl(mergeRequestUrl));
|
|
16
|
+
}
|
|
17
|
+
catch {
|
|
18
|
+
throw new TaskRunnerError("Expected GitLab merge request URL like https://gitlab.example.com/group/project/-/merge_requests/123");
|
|
19
|
+
}
|
|
20
|
+
const match = MERGE_REQUEST_PATH_RE.exec(parsed.pathname);
|
|
21
|
+
const projectPath = normalizeProjectPath(match?.groups?.projectPath ?? "");
|
|
22
|
+
const iidRaw = match?.groups?.iid;
|
|
23
|
+
if (!projectPath || !iidRaw) {
|
|
24
|
+
throw new TaskRunnerError("Expected GitLab merge request URL like https://gitlab.example.com/group/project/-/merge_requests/123");
|
|
25
|
+
}
|
|
26
|
+
return {
|
|
27
|
+
apiBaseUrl: `${parsed.protocol}//${parsed.host}/api/v4`,
|
|
28
|
+
mergeRequestUrl: `${parsed.protocol}//${parsed.host}${parsed.pathname}`,
|
|
29
|
+
projectPath,
|
|
30
|
+
mergeRequestIid: Number.parseInt(iidRaw, 10),
|
|
31
|
+
};
|
|
32
|
+
}
|
|
33
|
+
export function buildGitLabReviewFetchTarget(mergeRequestUrl) {
|
|
34
|
+
const mergeRequestRef = parseGitLabMergeRequestUrl(mergeRequestUrl);
|
|
35
|
+
return {
|
|
36
|
+
...mergeRequestRef,
|
|
37
|
+
discussionsApiUrl: `${mergeRequestRef.apiBaseUrl}/projects/${encodeURIComponent(mergeRequestRef.projectPath)}/merge_requests/${mergeRequestRef.mergeRequestIid}/discussions`,
|
|
38
|
+
};
|
|
39
|
+
}
|
|
40
|
+
async function fetchDiscussionPage(target, page, token) {
|
|
41
|
+
const apiUrl = `${target.discussionsApiUrl}?per_page=100&page=${page}`;
|
|
42
|
+
const response = await fetch(apiUrl, {
|
|
43
|
+
headers: {
|
|
44
|
+
"PRIVATE-TOKEN": token,
|
|
45
|
+
Accept: "application/json",
|
|
46
|
+
},
|
|
47
|
+
});
|
|
48
|
+
if (!response.ok) {
|
|
49
|
+
throw new TaskRunnerError([
|
|
50
|
+
`Failed to fetch GitLab merge request discussions: HTTP ${response.status}`,
|
|
51
|
+
`MR URL: ${target.mergeRequestUrl}`,
|
|
52
|
+
`GitLab project path: ${target.projectPath}`,
|
|
53
|
+
`GitLab merge request IID: ${target.mergeRequestIid}`,
|
|
54
|
+
`GitLab discussions API URL: ${apiUrl}`,
|
|
55
|
+
].join("\n"));
|
|
56
|
+
}
|
|
57
|
+
const nextPageHeader = response.headers.get("x-next-page");
|
|
58
|
+
const nextPage = nextPageHeader && nextPageHeader.trim().length > 0 ? Number.parseInt(nextPageHeader, 10) : null;
|
|
59
|
+
const discussions = (await response.json());
|
|
60
|
+
return { discussions, nextPage: Number.isNaN(nextPage ?? Number.NaN) ? null : nextPage };
|
|
61
|
+
}
|
|
62
|
+
async function fetchMergeRequestDiscussions(target, token) {
|
|
63
|
+
const discussions = [];
|
|
64
|
+
let page = 1;
|
|
65
|
+
while (true) {
|
|
66
|
+
const chunk = await fetchDiscussionPage(target, page, token);
|
|
67
|
+
discussions.push(...chunk.discussions);
|
|
68
|
+
if (!chunk.nextPage) {
|
|
69
|
+
return discussions;
|
|
70
|
+
}
|
|
71
|
+
page = chunk.nextPage;
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
function normalizeDiscussionNotes(discussions) {
|
|
75
|
+
return discussions.flatMap((discussion) => {
|
|
76
|
+
const discussionId = String(discussion.id ?? "");
|
|
77
|
+
if (!discussionId) {
|
|
78
|
+
return [];
|
|
79
|
+
}
|
|
80
|
+
return (discussion.notes ?? [])
|
|
81
|
+
.filter((note) => typeof note.body === "string" && note.body.trim().length > 0)
|
|
82
|
+
.filter((note) => note.system !== true)
|
|
83
|
+
.map((note) => ({
|
|
84
|
+
id: String(note.id ?? `${discussionId}-${note.created_at ?? "unknown"}`),
|
|
85
|
+
discussion_id: discussionId,
|
|
86
|
+
body: note.body?.trim() ?? "",
|
|
87
|
+
author: note.author?.username?.trim() || note.author?.name?.trim() || "unknown",
|
|
88
|
+
created_at: note.created_at ?? new Date(0).toISOString(),
|
|
89
|
+
system: Boolean(note.system),
|
|
90
|
+
resolvable: Boolean(note.resolvable),
|
|
91
|
+
resolved: Boolean(note.resolved),
|
|
92
|
+
file_path: note.position?.new_path ?? note.position?.old_path ?? null,
|
|
93
|
+
new_line: typeof note.position?.new_line === "number" ? note.position.new_line : null,
|
|
94
|
+
old_line: typeof note.position?.old_line === "number" ? note.position.old_line : null,
|
|
95
|
+
}));
|
|
96
|
+
});
|
|
97
|
+
}
|
|
98
|
+
function buildGitLabReviewMarkdown(artifact) {
|
|
99
|
+
const lines = [
|
|
100
|
+
"# GitLab Review",
|
|
101
|
+
"",
|
|
102
|
+
`- MR: ${artifact.merge_request_url}`,
|
|
103
|
+
`- Project: ${artifact.project_path}`,
|
|
104
|
+
`- IID: ${artifact.merge_request_iid}`,
|
|
105
|
+
`- Fetched at: ${artifact.fetched_at}`,
|
|
106
|
+
`- Comments: ${artifact.comments.length}`,
|
|
107
|
+
"",
|
|
108
|
+
];
|
|
109
|
+
if (artifact.comments.length === 0) {
|
|
110
|
+
lines.push("Код-ревью комментариев не найдено.");
|
|
111
|
+
return lines.join("\n");
|
|
112
|
+
}
|
|
113
|
+
artifact.comments.forEach((comment, index) => {
|
|
114
|
+
lines.push(`## Comment ${index + 1}`);
|
|
115
|
+
lines.push(`- Author: ${comment.author}`);
|
|
116
|
+
lines.push(`- Created at: ${comment.created_at}`);
|
|
117
|
+
lines.push(`- Discussion: ${comment.discussion_id}`);
|
|
118
|
+
if (comment.file_path) {
|
|
119
|
+
const location = [comment.file_path, comment.new_line ?? comment.old_line].filter((item) => item !== null).join(":");
|
|
120
|
+
lines.push(`- Location: ${location}`);
|
|
121
|
+
}
|
|
122
|
+
if (comment.resolvable) {
|
|
123
|
+
lines.push(`- Resolved: ${comment.resolved ? "yes" : "no"}`);
|
|
124
|
+
}
|
|
125
|
+
lines.push("");
|
|
126
|
+
lines.push(comment.body);
|
|
127
|
+
lines.push("");
|
|
128
|
+
});
|
|
129
|
+
return lines.join("\n");
|
|
130
|
+
}
|
|
131
|
+
export async function fetchGitLabReview(mergeRequestUrl, outputFile, outputJsonFile) {
|
|
132
|
+
const token = process.env.GITLAB_TOKEN?.trim();
|
|
133
|
+
if (!token) {
|
|
134
|
+
throw new TaskRunnerError("GITLAB_TOKEN is required for gitlab-review flow.");
|
|
135
|
+
}
|
|
136
|
+
const target = buildGitLabReviewFetchTarget(mergeRequestUrl);
|
|
137
|
+
const discussions = await fetchMergeRequestDiscussions(target, token);
|
|
138
|
+
const comments = normalizeDiscussionNotes(discussions);
|
|
139
|
+
const fetchedAt = new Date().toISOString();
|
|
140
|
+
const artifact = {
|
|
141
|
+
summary: comments.length > 0 ? `Fetched ${comments.length} GitLab review comments.` : "No GitLab review comments found.",
|
|
142
|
+
merge_request_url: target.mergeRequestUrl,
|
|
143
|
+
project_path: target.projectPath,
|
|
144
|
+
merge_request_iid: target.mergeRequestIid,
|
|
145
|
+
fetched_at: fetchedAt,
|
|
146
|
+
comments,
|
|
147
|
+
};
|
|
148
|
+
mkdirSync(path.dirname(outputFile), { recursive: true });
|
|
149
|
+
mkdirSync(path.dirname(outputJsonFile), { recursive: true });
|
|
150
|
+
await writeFile(outputJsonFile, `${JSON.stringify(artifact, null, 2)}\n`, "utf8");
|
|
151
|
+
await writeFile(outputFile, `${buildGitLabReviewMarkdown(artifact)}\n`, "utf8");
|
|
152
|
+
return artifact;
|
|
153
|
+
}
|