kodevu 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +112 -0
- package/config.example.json +11 -0
- package/package.json +28 -0
- package/src/config.js +109 -0
- package/src/git-client.js +221 -0
- package/src/index.js +47 -0
- package/src/review-runner.js +353 -0
- package/src/shell.js +79 -0
- package/src/svn-client.js +180 -0
- package/src/vcs-client.js +178 -0
package/README.md
ADDED
|
@@ -0,0 +1,112 @@
|
|
|
1
|
+
# Kodevu
|
|
2
|
+
|
|
3
|
+
A Node.js tool that polls new SVN revisions or Git commits, fetches each change diff directly from the repository, sends the diff to a supported reviewer CLI, and writes the result to Markdown files.
|
|
4
|
+
|
|
5
|
+
## Workflow
|
|
6
|
+
|
|
7
|
+
1. Detect the configured repository type, or use the explicit `vcs` setting.
|
|
8
|
+
2. Read the latest change from `target`.
|
|
9
|
+
3. Find changes that have not been reviewed yet.
|
|
10
|
+
4. For each change:
|
|
11
|
+
- load metadata and changed paths from SVN or Git
|
|
12
|
+
- generate a unified diff for that single revision or commit
|
|
13
|
+
- send the diff and change metadata to the configured reviewer CLI
|
|
14
|
+
- allow the reviewer to inspect related local repository files in read-only mode when a local workspace is available
|
|
15
|
+
- write the result to `reports/`
|
|
16
|
+
5. Update `data/state.json` so the same change is not reviewed twice.
|
|
17
|
+
|
|
18
|
+
## Setup
|
|
19
|
+
|
|
20
|
+
```bash
|
|
21
|
+
npm install
|
|
22
|
+
copy config.example.json config.json
|
|
23
|
+
```
|
|
24
|
+
|
|
25
|
+
Then edit `config.json` and set `target`.
|
|
26
|
+
|
|
27
|
+
Install as a CLI package:
|
|
28
|
+
|
|
29
|
+
```bash
|
|
30
|
+
npm install -g kodevu
|
|
31
|
+
copy config.example.json config.json
|
|
32
|
+
```
|
|
33
|
+
|
|
34
|
+
## Run
|
|
35
|
+
|
|
36
|
+
Run one cycle:
|
|
37
|
+
|
|
38
|
+
```bash
|
|
39
|
+
npm run once
|
|
40
|
+
```
|
|
41
|
+
|
|
42
|
+
Or run directly as the published CLI:
|
|
43
|
+
|
|
44
|
+
```bash
|
|
45
|
+
kodevu --once
|
|
46
|
+
```
|
|
47
|
+
|
|
48
|
+
Use `npx` without installing globally:
|
|
49
|
+
|
|
50
|
+
```bash
|
|
51
|
+
npx kodevu --once --config ./config.json
|
|
52
|
+
```
|
|
53
|
+
|
|
54
|
+
Start the scheduler:
|
|
55
|
+
|
|
56
|
+
```bash
|
|
57
|
+
npm start
|
|
58
|
+
```
|
|
59
|
+
|
|
60
|
+
Published CLI form:
|
|
61
|
+
|
|
62
|
+
```bash
|
|
63
|
+
kodevu --config ./config.json
|
|
64
|
+
```
|
|
65
|
+
|
|
66
|
+
Use a custom config path:
|
|
67
|
+
|
|
68
|
+
```bash
|
|
69
|
+
node src/index.js --config ./config.json --once
|
|
70
|
+
```
|
|
71
|
+
|
|
72
|
+
Equivalent `npx` usage:
|
|
73
|
+
|
|
74
|
+
```bash
|
|
75
|
+
npx kodevu --config ./config.json --once
|
|
76
|
+
```
|
|
77
|
+
|
|
78
|
+
## Config
|
|
79
|
+
|
|
80
|
+
- `target`: required repository target
|
|
81
|
+
- `vcs`: `auto`, `svn`, or `git`; default `auto`
|
|
82
|
+
- `reviewer`: `codex` or `gemini`; default `codex`
|
|
83
|
+
- `pollCron`: cron schedule, default every 10 minutes
|
|
84
|
+
- `reviewPrompt`: saved into the report as review context
|
|
85
|
+
- `outputDir`: report output directory; default `./reports`
|
|
86
|
+
- `commandTimeoutMs`: timeout for a single review command execution in milliseconds
|
|
87
|
+
- `bootstrapToLatest`: if no state exists, start by reviewing only the current latest change instead of replaying the full history
|
|
88
|
+
- `maxRevisionsPerRun`: cap the number of pending changes per polling cycle
|
|
89
|
+
|
|
90
|
+
Internal defaults:
|
|
91
|
+
|
|
92
|
+
- review state is always stored in `./data/state.json`
|
|
93
|
+
- the tool always invokes `git`, `svn`, and the configured reviewer CLI from `PATH`
|
|
94
|
+
- command output is decoded as `utf8`
|
|
95
|
+
|
|
96
|
+
## Target Rules
|
|
97
|
+
|
|
98
|
+
- For SVN, `target` can be a working copy path or repository URL.
|
|
99
|
+
- For Git, `target` must be a local repository path or a subdirectory inside a local repository.
|
|
100
|
+
- When `vcs` is `auto`, the tool tries Git first for existing local paths, then falls back to SVN.
|
|
101
|
+
- Legacy `svnTarget` is still accepted for backward compatibility.
|
|
102
|
+
|
|
103
|
+
## Notes
|
|
104
|
+
|
|
105
|
+
- `reviewer: "codex"` uses `codex exec` with the diff embedded in the prompt.
|
|
106
|
+
- `reviewer: "gemini"` uses `gemini -p` in non-interactive mode.
|
|
107
|
+
- For Git targets and local SVN working copies, the reviewer command runs from the repository workspace so it can inspect related files beyond the diff when needed.
|
|
108
|
+
- For remote SVN URLs without a local working copy, the review still relies on the diff and change metadata only.
|
|
109
|
+
- SVN reports keep the `r123.md` naming style.
|
|
110
|
+
- Git reports are written as `git-<full-commit-hash>.md`.
|
|
111
|
+
- `data/state.json` stores per-project checkpoints keyed by repository identity; only the v2 multi-project structure is supported.
|
|
112
|
+
- If the reviewer command exits non-zero or times out, the report is still written, but the state is not advanced so the change can be retried later.
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
{
|
|
2
|
+
"target": "C:/path/to/your/repository-or-subdirectory",
|
|
3
|
+
"vcs": "auto",
|
|
4
|
+
"reviewer": "codex",
|
|
5
|
+
"pollCron": "*/10 * * * *",
|
|
6
|
+
"reviewPrompt": "请严格审查当前变更,优先指出 bug、回归风险、兼容性问题、安全问题、边界条件缺陷和缺失测试。请使用简体中文输出 Markdown;如果没有明确缺陷,请写“未发现明确缺陷”,并补充剩余风险。",
|
|
7
|
+
"outputDir": "./reports",
|
|
8
|
+
"commandTimeoutMs": 600000,
|
|
9
|
+
"bootstrapToLatest": true,
|
|
10
|
+
"maxRevisionsPerRun": 5
|
|
11
|
+
}
|
package/package.json
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "kodevu",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"type": "module",
|
|
5
|
+
"description": "Poll SVN revisions or Git commits, send each change diff to a reviewer CLI, and write Markdown review reports.",
|
|
6
|
+
"bin": {
|
|
7
|
+
"kodevu": "./src/index.js"
|
|
8
|
+
},
|
|
9
|
+
"files": [
|
|
10
|
+
"src",
|
|
11
|
+
"README.md",
|
|
12
|
+
"config.example.json"
|
|
13
|
+
],
|
|
14
|
+
"scripts": {
|
|
15
|
+
"start": "node src/index.js",
|
|
16
|
+
"once": "node src/index.js --once",
|
|
17
|
+
"check": "node --check src/index.js && node --check src/config.js && node --check src/review-runner.js && node --check src/svn-client.js && node --check src/git-client.js && node --check src/vcs-client.js && node --check src/shell.js"
|
|
18
|
+
},
|
|
19
|
+
"engines": {
|
|
20
|
+
"node": ">=20"
|
|
21
|
+
},
|
|
22
|
+
"dependencies": {
|
|
23
|
+
"cross-spawn": "^7.0.6",
|
|
24
|
+
"fast-xml-parser": "^5.2.5",
|
|
25
|
+
"iconv-lite": "^0.6.3",
|
|
26
|
+
"node-cron": "^4.2.1"
|
|
27
|
+
}
|
|
28
|
+
}
|
package/src/config.js
ADDED
|
@@ -0,0 +1,109 @@
|
|
|
1
|
+
import fs from "node:fs/promises";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
|
|
4
|
+
const defaultConfig = {
|
|
5
|
+
vcs: "auto",
|
|
6
|
+
reviewer: "codex",
|
|
7
|
+
target: "",
|
|
8
|
+
pollCron: "*/10 * * * *",
|
|
9
|
+
outputDir: "./reports",
|
|
10
|
+
commandTimeoutMs: 600000,
|
|
11
|
+
reviewPrompt:
|
|
12
|
+
"请严格审查当前变更,优先指出 bug、回归风险、兼容性问题、安全问题、边界条件缺陷和缺失测试。请使用简体中文输出 Markdown;如果没有明确缺陷,请写“未发现明确缺陷”,并补充剩余风险。",
|
|
13
|
+
bootstrapToLatest: true,
|
|
14
|
+
maxRevisionsPerRun: 20
|
|
15
|
+
};
|
|
16
|
+
|
|
17
|
+
export function parseCliArgs(argv) {
|
|
18
|
+
const args = {
|
|
19
|
+
configPath: "config.json",
|
|
20
|
+
once: false,
|
|
21
|
+
help: false
|
|
22
|
+
};
|
|
23
|
+
|
|
24
|
+
for (let index = 0; index < argv.length; index += 1) {
|
|
25
|
+
const value = argv[index];
|
|
26
|
+
|
|
27
|
+
if (value === "--once") {
|
|
28
|
+
args.once = true;
|
|
29
|
+
continue;
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
if (value === "--help" || value === "-h") {
|
|
33
|
+
args.help = true;
|
|
34
|
+
continue;
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
if (value === "--config" || value === "-c") {
|
|
38
|
+
args.configPath = argv[index + 1];
|
|
39
|
+
index += 1;
|
|
40
|
+
continue;
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
return args;
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
export async function loadConfig(configPath) {
|
|
48
|
+
const absoluteConfigPath = path.resolve(configPath);
|
|
49
|
+
const raw = await fs.readFile(absoluteConfigPath, "utf8");
|
|
50
|
+
const config = {
|
|
51
|
+
...defaultConfig,
|
|
52
|
+
...JSON.parse(raw)
|
|
53
|
+
};
|
|
54
|
+
|
|
55
|
+
if (!config.target && config.svnTarget) {
|
|
56
|
+
config.target = config.svnTarget;
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
if (!config.target) {
|
|
60
|
+
throw new Error(`Missing required config field "target" (or legacy "svnTarget") in ${absoluteConfigPath}`);
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
config.vcs = String(config.vcs || "auto").toLowerCase();
|
|
64
|
+
config.reviewer = String(config.reviewer || "codex").toLowerCase();
|
|
65
|
+
|
|
66
|
+
if (!["auto", "svn", "git"].includes(config.vcs)) {
|
|
67
|
+
throw new Error(`"vcs" must be one of "auto", "svn", or "git" in ${absoluteConfigPath}`);
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
if (!["codex", "gemini"].includes(config.reviewer)) {
|
|
71
|
+
throw new Error(`"reviewer" must be one of "codex" or "gemini" in ${absoluteConfigPath}`);
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
config.configPath = absoluteConfigPath;
|
|
75
|
+
config.baseDir = path.dirname(absoluteConfigPath);
|
|
76
|
+
config.outputDir = path.resolve(config.baseDir, config.outputDir);
|
|
77
|
+
config.stateFilePath = path.resolve(config.baseDir, "./data/state.json");
|
|
78
|
+
config.maxRevisionsPerRun = Number(config.maxRevisionsPerRun);
|
|
79
|
+
config.commandTimeoutMs = Number(config.commandTimeoutMs);
|
|
80
|
+
|
|
81
|
+
if (!Number.isInteger(config.maxRevisionsPerRun) || config.maxRevisionsPerRun <= 0) {
|
|
82
|
+
throw new Error(`"maxRevisionsPerRun" must be a positive integer in ${absoluteConfigPath}`);
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
if (!Number.isInteger(config.commandTimeoutMs) || config.commandTimeoutMs <= 0) {
|
|
86
|
+
throw new Error(`"commandTimeoutMs" must be a positive integer in ${absoluteConfigPath}`);
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
return config;
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
export function printHelp() {
|
|
93
|
+
console.log(`Kodevu
|
|
94
|
+
|
|
95
|
+
Usage:
|
|
96
|
+
kodevu [--config config.json] [--once]
|
|
97
|
+
npx kodevu [--config config.json] [--once]
|
|
98
|
+
|
|
99
|
+
Options:
|
|
100
|
+
--config, -c Path to config json. Default: ./config.json
|
|
101
|
+
--once Run one polling cycle and exit
|
|
102
|
+
--help, -h Show help
|
|
103
|
+
|
|
104
|
+
Config highlights:
|
|
105
|
+
vcs auto | svn | git
|
|
106
|
+
reviewer codex | gemini
|
|
107
|
+
target Repository target path (Git) or SVN working copy / URL
|
|
108
|
+
`);
|
|
109
|
+
}
|
|
@@ -0,0 +1,221 @@
|
|
|
1
|
+
import fs from "node:fs/promises";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
import { runCommand } from "./shell.js";
|
|
4
|
+
|
|
5
|
+
const GIT_COMMAND = "git";
|
|
6
|
+
const COMMAND_ENCODING = "utf8";
|
|
7
|
+
|
|
8
|
+
function toPosixPath(filePath) {
|
|
9
|
+
return filePath.split(path.sep).join("/");
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
function splitLines(text) {
|
|
13
|
+
return text
|
|
14
|
+
.split(/\r?\n/)
|
|
15
|
+
.map((line) => line.trim())
|
|
16
|
+
.filter(Boolean);
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
function buildPathArgs(targetInfo) {
|
|
20
|
+
return targetInfo.targetPathspec ? ["--", targetInfo.targetPathspec] : [];
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
async function statPath(targetPath) {
|
|
24
|
+
try {
|
|
25
|
+
return await fs.stat(targetPath);
|
|
26
|
+
} catch {
|
|
27
|
+
return null;
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
async function runGit(config, args, options = {}) {
|
|
32
|
+
return await runCommand(GIT_COMMAND, args, {
|
|
33
|
+
encoding: COMMAND_ENCODING,
|
|
34
|
+
...options
|
|
35
|
+
});
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
export async function getTargetInfo(config) {
|
|
39
|
+
const requestedTargetPath = path.resolve(config.baseDir, config.target);
|
|
40
|
+
const targetStat = await statPath(requestedTargetPath);
|
|
41
|
+
|
|
42
|
+
if (!targetStat) {
|
|
43
|
+
throw new Error(`Git target path does not exist: ${requestedTargetPath}`);
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
const lookupCwd = targetStat.isDirectory() ? requestedTargetPath : path.dirname(requestedTargetPath);
|
|
47
|
+
const topLevelResult = await runGit(config, ["rev-parse", "--show-toplevel"], {
|
|
48
|
+
cwd: lookupCwd,
|
|
49
|
+
trim: true
|
|
50
|
+
});
|
|
51
|
+
const repoRootPath = path.resolve(topLevelResult.stdout);
|
|
52
|
+
const relativeTargetPath = toPosixPath(path.relative(repoRootPath, requestedTargetPath));
|
|
53
|
+
const branchResult = await runGit(config, ["rev-parse", "--abbrev-ref", "HEAD"], {
|
|
54
|
+
cwd: repoRootPath,
|
|
55
|
+
trim: true
|
|
56
|
+
});
|
|
57
|
+
|
|
58
|
+
return {
|
|
59
|
+
repoRootPath,
|
|
60
|
+
requestedTargetPath,
|
|
61
|
+
targetDisplay: requestedTargetPath,
|
|
62
|
+
targetPathspec: relativeTargetPath ? relativeTargetPath : "",
|
|
63
|
+
branchName: branchResult.stdout || "HEAD",
|
|
64
|
+
stateKey: `git:${repoRootPath}:${relativeTargetPath || "."}`
|
|
65
|
+
};
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
export async function getLatestCommit(config, targetInfo) {
|
|
69
|
+
const result = await runGit(
|
|
70
|
+
config,
|
|
71
|
+
["log", "--format=%H", "-n", "1", "HEAD", ...buildPathArgs(targetInfo)],
|
|
72
|
+
{ cwd: targetInfo.repoRootPath, trim: true }
|
|
73
|
+
);
|
|
74
|
+
const latestCommit = splitLines(result.stdout)[0];
|
|
75
|
+
|
|
76
|
+
if (!latestCommit) {
|
|
77
|
+
throw new Error(`Unable to determine the latest Git commit for ${targetInfo.targetDisplay}`);
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
return latestCommit;
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
export async function isValidCheckpoint(config, targetInfo, checkpointCommit, latestCommit) {
|
|
84
|
+
if (!checkpointCommit) {
|
|
85
|
+
return true;
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
const commitExists = await runGit(config, ["cat-file", "-e", `${checkpointCommit}^{commit}`], {
|
|
89
|
+
cwd: targetInfo.repoRootPath,
|
|
90
|
+
allowFailure: true,
|
|
91
|
+
trim: true
|
|
92
|
+
});
|
|
93
|
+
|
|
94
|
+
if (commitExists.code !== 0) {
|
|
95
|
+
return false;
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
const ancestorResult = await runGit(config, ["merge-base", "--is-ancestor", checkpointCommit, latestCommit], {
|
|
99
|
+
cwd: targetInfo.repoRootPath,
|
|
100
|
+
allowFailure: true,
|
|
101
|
+
trim: true
|
|
102
|
+
});
|
|
103
|
+
|
|
104
|
+
return ancestorResult.code === 0;
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
export async function getPendingCommits(config, targetInfo, startExclusive, endInclusive, limit) {
|
|
108
|
+
const args = ["rev-list", "--reverse"];
|
|
109
|
+
|
|
110
|
+
if (startExclusive) {
|
|
111
|
+
args.push(endInclusive, `^${startExclusive}`);
|
|
112
|
+
} else {
|
|
113
|
+
args.push(endInclusive);
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
args.push(...buildPathArgs(targetInfo));
|
|
117
|
+
|
|
118
|
+
const result = await runGit(config, args, {
|
|
119
|
+
cwd: targetInfo.repoRootPath,
|
|
120
|
+
trim: true
|
|
121
|
+
});
|
|
122
|
+
|
|
123
|
+
return splitLines(result.stdout).slice(0, limit);
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
export async function getCommitDiff(config, targetInfo, commitHash) {
|
|
127
|
+
const result = await runGit(
|
|
128
|
+
config,
|
|
129
|
+
[
|
|
130
|
+
"show",
|
|
131
|
+
"--format=",
|
|
132
|
+
"--find-renames",
|
|
133
|
+
"--find-copies",
|
|
134
|
+
"--no-ext-diff",
|
|
135
|
+
commitHash,
|
|
136
|
+
...buildPathArgs(targetInfo)
|
|
137
|
+
],
|
|
138
|
+
{ cwd: targetInfo.repoRootPath, trim: false }
|
|
139
|
+
);
|
|
140
|
+
|
|
141
|
+
return result.stdout;
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
function parseNameStatus(stdout) {
|
|
145
|
+
const entries = stdout.split("\0").filter(Boolean);
|
|
146
|
+
const changedPaths = [];
|
|
147
|
+
|
|
148
|
+
for (let index = 0; index < entries.length; index += 1) {
|
|
149
|
+
const status = entries[index];
|
|
150
|
+
|
|
151
|
+
if (!status) {
|
|
152
|
+
continue;
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
const action = status[0];
|
|
156
|
+
|
|
157
|
+
if (status.startsWith("R") || status.startsWith("C")) {
|
|
158
|
+
const oldPath = entries[index + 1];
|
|
159
|
+
const newPath = entries[index + 2];
|
|
160
|
+
|
|
161
|
+
if (newPath) {
|
|
162
|
+
changedPaths.push({
|
|
163
|
+
action,
|
|
164
|
+
relativePath: newPath,
|
|
165
|
+
previousPath: oldPath || null
|
|
166
|
+
});
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
index += 2;
|
|
170
|
+
continue;
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
const filePath = entries[index + 1];
|
|
174
|
+
|
|
175
|
+
if (filePath) {
|
|
176
|
+
changedPaths.push({
|
|
177
|
+
action,
|
|
178
|
+
relativePath: filePath,
|
|
179
|
+
previousPath: null
|
|
180
|
+
});
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
index += 1;
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
return changedPaths;
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
export async function getCommitDetails(config, targetInfo, commitHash) {
|
|
190
|
+
const metaResult = await runGit(
|
|
191
|
+
config,
|
|
192
|
+
["show", "--no-patch", "--format=%H%x00%an%x00%aI%x00%B", commitHash],
|
|
193
|
+
{ cwd: targetInfo.repoRootPath, trim: false }
|
|
194
|
+
);
|
|
195
|
+
const [hash = "", author = "", date = "", ...messageParts] = metaResult.stdout.split("\0");
|
|
196
|
+
const message = messageParts.join("\0").trim();
|
|
197
|
+
const changedFilesResult = await runGit(
|
|
198
|
+
config,
|
|
199
|
+
[
|
|
200
|
+
"diff-tree",
|
|
201
|
+
"--no-commit-id",
|
|
202
|
+
"--name-status",
|
|
203
|
+
"-r",
|
|
204
|
+
"--root",
|
|
205
|
+
"-z",
|
|
206
|
+
"-M",
|
|
207
|
+
"-C",
|
|
208
|
+
commitHash,
|
|
209
|
+
...buildPathArgs(targetInfo)
|
|
210
|
+
],
|
|
211
|
+
{ cwd: targetInfo.repoRootPath, trim: false }
|
|
212
|
+
);
|
|
213
|
+
|
|
214
|
+
return {
|
|
215
|
+
commitHash: hash.trim() || commitHash,
|
|
216
|
+
author: author.trim() || "unknown",
|
|
217
|
+
date: date.trim(),
|
|
218
|
+
message,
|
|
219
|
+
changedPaths: parseNameStatus(changedFilesResult.stdout)
|
|
220
|
+
};
|
|
221
|
+
}
|
package/src/index.js
ADDED
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
import cron from "node-cron";
|
|
4
|
+
import { loadConfig, parseCliArgs, printHelp } from "./config.js";
|
|
5
|
+
import { runReviewCycle } from "./review-runner.js";
|
|
6
|
+
|
|
7
|
+
const cliArgs = parseCliArgs(process.argv.slice(2));
|
|
8
|
+
|
|
9
|
+
if (cliArgs.help) {
|
|
10
|
+
printHelp();
|
|
11
|
+
process.exit(0);
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
const config = await loadConfig(cliArgs.configPath);
|
|
15
|
+
|
|
16
|
+
let running = false;
|
|
17
|
+
|
|
18
|
+
async function runOnce() {
|
|
19
|
+
if (running) {
|
|
20
|
+
console.log("A review cycle is already running, skipping this trigger.");
|
|
21
|
+
return;
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
running = true;
|
|
25
|
+
|
|
26
|
+
try {
|
|
27
|
+
await runReviewCycle(config);
|
|
28
|
+
} catch (error) {
|
|
29
|
+
console.error(error?.stack || String(error));
|
|
30
|
+
process.exitCode = 1;
|
|
31
|
+
} finally {
|
|
32
|
+
running = false;
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
if (cliArgs.once) {
|
|
37
|
+
await runOnce();
|
|
38
|
+
process.exit(process.exitCode || 0);
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
await runOnce();
|
|
42
|
+
|
|
43
|
+
console.log(`Scheduler started. Cron: ${config.pollCron}`);
|
|
44
|
+
|
|
45
|
+
cron.schedule(config.pollCron, () => {
|
|
46
|
+
void runOnce();
|
|
47
|
+
});
|
|
@@ -0,0 +1,353 @@
|
|
|
1
|
+
import fs from "node:fs/promises";
|
|
2
|
+
import os from "node:os";
|
|
3
|
+
import path from "node:path";
|
|
4
|
+
import { runCommand } from "./shell.js";
|
|
5
|
+
import { resolveRepositoryContext } from "./vcs-client.js";
|
|
6
|
+
|
|
7
|
+
const REVIEWERS = {
|
|
8
|
+
codex: {
|
|
9
|
+
displayName: "Codex",
|
|
10
|
+
responseSectionTitle: "Codex Response",
|
|
11
|
+
emptyResponseText: "_No final response returned from codex exec._",
|
|
12
|
+
async run(config, workingDir, promptText, diffText) {
|
|
13
|
+
const tempDir = await fs.mkdtemp(path.join(os.tmpdir(), "kodevu-"));
|
|
14
|
+
const outputFile = path.join(tempDir, "codex-last-message.md");
|
|
15
|
+
const args = [
|
|
16
|
+
"exec",
|
|
17
|
+
"--skip-git-repo-check",
|
|
18
|
+
"--sandbox",
|
|
19
|
+
"read-only",
|
|
20
|
+
"--color",
|
|
21
|
+
"never",
|
|
22
|
+
"--output-last-message",
|
|
23
|
+
outputFile,
|
|
24
|
+
"-"
|
|
25
|
+
];
|
|
26
|
+
|
|
27
|
+
try {
|
|
28
|
+
const execResult = await runCommand("codex", args, {
|
|
29
|
+
cwd: workingDir,
|
|
30
|
+
input: [promptText, "Unified diff:", diffText].join("\n\n"),
|
|
31
|
+
allowFailure: true,
|
|
32
|
+
timeoutMs: config.commandTimeoutMs
|
|
33
|
+
});
|
|
34
|
+
|
|
35
|
+
let message = "";
|
|
36
|
+
|
|
37
|
+
try {
|
|
38
|
+
message = await fs.readFile(outputFile, "utf8");
|
|
39
|
+
} catch {
|
|
40
|
+
message = execResult.stdout;
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
return {
|
|
44
|
+
...execResult,
|
|
45
|
+
message
|
|
46
|
+
};
|
|
47
|
+
} finally {
|
|
48
|
+
await fs.rm(tempDir, { recursive: true, force: true });
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
},
|
|
52
|
+
gemini: {
|
|
53
|
+
displayName: "Gemini",
|
|
54
|
+
responseSectionTitle: "Gemini Response",
|
|
55
|
+
emptyResponseText: "_No final response returned from gemini._",
|
|
56
|
+
async run(config, workingDir, promptText, diffText) {
|
|
57
|
+
const execResult = await runCommand("gemini", ["-p", promptText], {
|
|
58
|
+
cwd: workingDir,
|
|
59
|
+
input: ["Unified diff:", diffText].join("\n\n"),
|
|
60
|
+
allowFailure: true,
|
|
61
|
+
timeoutMs: config.commandTimeoutMs
|
|
62
|
+
});
|
|
63
|
+
|
|
64
|
+
return {
|
|
65
|
+
...execResult,
|
|
66
|
+
message: execResult.stdout
|
|
67
|
+
};
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
};
|
|
71
|
+
|
|
72
|
+
async function ensureDir(targetPath) {
|
|
73
|
+
await fs.mkdir(targetPath, { recursive: true });
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
async function pathExists(targetPath) {
|
|
77
|
+
try {
|
|
78
|
+
await fs.access(targetPath);
|
|
79
|
+
return true;
|
|
80
|
+
} catch {
|
|
81
|
+
return false;
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
async function loadState(stateFile) {
|
|
86
|
+
if (!(await pathExists(stateFile))) {
|
|
87
|
+
return { version: 2, projects: {} };
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
const raw = await fs.readFile(stateFile, "utf8");
|
|
91
|
+
return normalizeStateFile(JSON.parse(raw));
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
async function saveState(stateFile, state) {
|
|
95
|
+
await ensureDir(path.dirname(stateFile));
|
|
96
|
+
await fs.writeFile(stateFile, `${JSON.stringify(state, null, 2)}\n`, "utf8");
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
function normalizeStateFile(state) {
|
|
100
|
+
if (!state || typeof state !== "object" || Array.isArray(state)) {
|
|
101
|
+
throw new Error('State file must be a JSON object with shape {"version":2,"projects":{...}}.');
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
if (state.version !== 2) {
|
|
105
|
+
throw new Error('State file version must be 2.');
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
if (!state.projects || typeof state.projects !== "object" || Array.isArray(state.projects)) {
|
|
109
|
+
throw new Error('State file must contain a "projects" object.');
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
return {
|
|
113
|
+
version: 2,
|
|
114
|
+
projects: state.projects
|
|
115
|
+
};
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
function getProjectState(stateFile, targetInfo) {
|
|
119
|
+
return stateFile.projects?.[targetInfo.stateKey] ?? {};
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
function updateProjectState(stateFile, targetInfo, projectState) {
|
|
123
|
+
return {
|
|
124
|
+
version: 2,
|
|
125
|
+
projects: {
|
|
126
|
+
...(stateFile.projects || {}),
|
|
127
|
+
[targetInfo.stateKey]: projectState
|
|
128
|
+
}
|
|
129
|
+
};
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
async function writeTextFile(filePath, contents) {
|
|
133
|
+
await ensureDir(path.dirname(filePath));
|
|
134
|
+
await fs.writeFile(filePath, contents, "utf8");
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
function formatChangedPaths(changedPaths) {
|
|
138
|
+
if (changedPaths.length === 0) {
|
|
139
|
+
return "_No changed files captured._";
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
return changedPaths
|
|
143
|
+
.map((item) => {
|
|
144
|
+
const renameSuffix = item.previousPath ? ` (from ${item.previousPath})` : "";
|
|
145
|
+
return `- \`${item.action}\` ${item.relativePath}${renameSuffix}`;
|
|
146
|
+
})
|
|
147
|
+
.join("\n");
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
function getReviewWorkspaceRoot(config, backend, targetInfo) {
|
|
151
|
+
if (backend.kind === "git" && targetInfo.repoRootPath) {
|
|
152
|
+
return targetInfo.repoRootPath;
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
if (backend.kind === "svn" && targetInfo.workingCopyPath) {
|
|
156
|
+
return targetInfo.workingCopyPath;
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
return config.baseDir;
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
function buildPrompt(config, backend, targetInfo, details) {
|
|
163
|
+
const fileList = details.changedPaths.map((item) => `${item.action} ${item.relativePath}`).join("\n");
|
|
164
|
+
const workspaceRoot = getReviewWorkspaceRoot(config, backend, targetInfo);
|
|
165
|
+
const canReadRelatedFiles = backend.kind === "git" || Boolean(targetInfo.workingCopyPath);
|
|
166
|
+
|
|
167
|
+
return [
|
|
168
|
+
config.reviewPrompt,
|
|
169
|
+
canReadRelatedFiles
|
|
170
|
+
? `You are running inside a read-only workspace rooted at: ${workspaceRoot}`
|
|
171
|
+
: "No local repository workspace is available for this review run.",
|
|
172
|
+
canReadRelatedFiles
|
|
173
|
+
? "Besides the diff below, you may read other related files in the workspace when needed to understand call sites, shared utilities, configuration, tests, or data flow. Do not modify files or rely on shell commands."
|
|
174
|
+
: "Review primarily from the diff below. Do not assume access to other local files, shell commands, or repository history.",
|
|
175
|
+
"Use plain text file references like path/to/file.js:123. Do not emit clickable workspace links.",
|
|
176
|
+
"Write the final review in Simplified Chinese.",
|
|
177
|
+
`Repository Type: ${backend.displayName}`,
|
|
178
|
+
`Change ID: ${details.displayId}`,
|
|
179
|
+
`Author: ${details.author}`,
|
|
180
|
+
`Date: ${details.date || "unknown"}`,
|
|
181
|
+
`Changed files:\n${fileList || "(none)"}`,
|
|
182
|
+
`Commit message:\n${details.message || "(empty)"}`
|
|
183
|
+
].join("\n\n");
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
function buildReport(config, backend, targetInfo, details, diffText, reviewer, reviewerResult) {
|
|
187
|
+
const lines = [
|
|
188
|
+
`# ${backend.displayName} Review Report: ${details.displayId}`,
|
|
189
|
+
"",
|
|
190
|
+
`- Repository Type: \`${backend.displayName}\``,
|
|
191
|
+
`- Target: \`${targetInfo.targetDisplay || config.target}\``,
|
|
192
|
+
`- Change ID: \`${details.displayId}\``,
|
|
193
|
+
`- Author: \`${details.author}\``,
|
|
194
|
+
`- Commit Date: \`${details.date || "unknown"}\``,
|
|
195
|
+
`- Generated At: \`${new Date().toISOString()}\``,
|
|
196
|
+
`- Reviewer: \`${reviewer.displayName}\``,
|
|
197
|
+
`- Reviewer Exit Code: \`${reviewerResult.code}\``,
|
|
198
|
+
`- Reviewer Timed Out: \`${reviewerResult.timedOut ? "yes" : "no"}\``,
|
|
199
|
+
"",
|
|
200
|
+
"## Changed Files",
|
|
201
|
+
"",
|
|
202
|
+
formatChangedPaths(details.changedPaths),
|
|
203
|
+
"",
|
|
204
|
+
"## Commit Message",
|
|
205
|
+
"",
|
|
206
|
+
details.message ? "```text\n" + details.message + "\n```" : "_Empty_",
|
|
207
|
+
"",
|
|
208
|
+
"## Review Context",
|
|
209
|
+
"",
|
|
210
|
+
"```text",
|
|
211
|
+
buildPrompt(config, backend, targetInfo, details),
|
|
212
|
+
"```",
|
|
213
|
+
"",
|
|
214
|
+
"## Diff",
|
|
215
|
+
"",
|
|
216
|
+
"```diff",
|
|
217
|
+
diffText.trim() || "(empty diff)",
|
|
218
|
+
"```",
|
|
219
|
+
"",
|
|
220
|
+
`## ${reviewer.responseSectionTitle}`,
|
|
221
|
+
"",
|
|
222
|
+
reviewerResult.message?.trim() ? reviewerResult.message.trim() : reviewer.emptyResponseText
|
|
223
|
+
];
|
|
224
|
+
|
|
225
|
+
return `${lines.join("\n")}\n`;
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
async function runReviewerPrompt(config, backend, targetInfo, details, diffText) {
|
|
229
|
+
const reviewer = REVIEWERS[config.reviewer];
|
|
230
|
+
const reviewWorkspaceRoot = getReviewWorkspaceRoot(config, backend, targetInfo);
|
|
231
|
+
const promptText = buildPrompt(config, backend, targetInfo, details);
|
|
232
|
+
return {
|
|
233
|
+
reviewer,
|
|
234
|
+
result: await reviewer.run(config, reviewWorkspaceRoot, promptText, diffText)
|
|
235
|
+
};
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
function readLastReviewedId(state, backend, targetInfo) {
|
|
239
|
+
if (state.vcs && state.vcs !== backend.kind) {
|
|
240
|
+
return null;
|
|
241
|
+
}
|
|
242
|
+
|
|
243
|
+
if (state.targetKey && state.targetKey !== targetInfo.stateKey) {
|
|
244
|
+
return null;
|
|
245
|
+
}
|
|
246
|
+
|
|
247
|
+
return backend.fromStateValue(state);
|
|
248
|
+
}
|
|
249
|
+
|
|
250
|
+
function buildStateSnapshot(backend, targetInfo, changeId) {
|
|
251
|
+
const state = {
|
|
252
|
+
vcs: backend.kind,
|
|
253
|
+
targetKey: targetInfo.stateKey,
|
|
254
|
+
lastReviewedId: backend.toStateValue(changeId),
|
|
255
|
+
updatedAt: new Date().toISOString()
|
|
256
|
+
};
|
|
257
|
+
|
|
258
|
+
return backend.extendState(state, changeId);
|
|
259
|
+
}
|
|
260
|
+
|
|
261
|
+
async function reviewChange(config, backend, targetInfo, changeId) {
|
|
262
|
+
const details = await backend.getChangeDetails(config, targetInfo, changeId);
|
|
263
|
+
|
|
264
|
+
if (details.changedPaths.length === 0) {
|
|
265
|
+
const skippedReport = [
|
|
266
|
+
`# ${backend.displayName} Review Report: ${details.displayId}`,
|
|
267
|
+
"",
|
|
268
|
+
"No file changes were captured for this change under the configured target."
|
|
269
|
+
].join("\n");
|
|
270
|
+
|
|
271
|
+
const outputFile = path.join(config.outputDir, backend.getReportFileName(changeId));
|
|
272
|
+
await writeTextFile(outputFile, `${skippedReport}\n`);
|
|
273
|
+
return { success: true, outputFile };
|
|
274
|
+
}
|
|
275
|
+
|
|
276
|
+
const diffText = await backend.getChangeDiff(config, targetInfo, changeId);
|
|
277
|
+
const { reviewer, result: reviewerResult } = await runReviewerPrompt(config, backend, targetInfo, details, diffText);
|
|
278
|
+
const report = buildReport(config, backend, targetInfo, details, diffText, reviewer, reviewerResult);
|
|
279
|
+
const outputFile = path.join(config.outputDir, backend.getReportFileName(changeId));
|
|
280
|
+
await writeTextFile(outputFile, report);
|
|
281
|
+
|
|
282
|
+
if (reviewerResult.code !== 0 || reviewerResult.timedOut) {
|
|
283
|
+
throw new Error(`${reviewer.displayName} failed for ${details.displayId}; report written to ${outputFile}`);
|
|
284
|
+
}
|
|
285
|
+
|
|
286
|
+
return { success: true, outputFile, details };
|
|
287
|
+
}
|
|
288
|
+
|
|
289
|
+
function formatChangeList(backend, changeIds) {
|
|
290
|
+
return changeIds.map((changeId) => backend.formatChangeId(changeId)).join(", ");
|
|
291
|
+
}
|
|
292
|
+
|
|
293
|
+
export async function runReviewCycle(config) {
|
|
294
|
+
await ensureDir(config.outputDir);
|
|
295
|
+
|
|
296
|
+
const { backend, targetInfo } = await resolveRepositoryContext(config);
|
|
297
|
+
const latestChangeId = await backend.getLatestChangeId(config, targetInfo);
|
|
298
|
+
const stateFile = await loadState(config.stateFilePath);
|
|
299
|
+
const projectState = getProjectState(stateFile, targetInfo);
|
|
300
|
+
let lastReviewedId = readLastReviewedId(projectState, backend, targetInfo);
|
|
301
|
+
|
|
302
|
+
if (lastReviewedId) {
|
|
303
|
+
const checkpointIsValid = await backend.isValidCheckpoint(config, targetInfo, lastReviewedId, latestChangeId);
|
|
304
|
+
|
|
305
|
+
if (!checkpointIsValid) {
|
|
306
|
+
console.log("Saved review state no longer matches repository history. Resetting checkpoint.");
|
|
307
|
+
lastReviewedId = null;
|
|
308
|
+
}
|
|
309
|
+
}
|
|
310
|
+
|
|
311
|
+
let changeIdsToReview = [];
|
|
312
|
+
|
|
313
|
+
if (!lastReviewedId && config.bootstrapToLatest) {
|
|
314
|
+
changeIdsToReview = [latestChangeId];
|
|
315
|
+
console.log(`Initialized state to review the latest ${backend.changeName} ${backend.formatChangeId(latestChangeId)} first.`);
|
|
316
|
+
} else {
|
|
317
|
+
changeIdsToReview = await backend.getPendingChangeIds(
|
|
318
|
+
config,
|
|
319
|
+
targetInfo,
|
|
320
|
+
lastReviewedId,
|
|
321
|
+
latestChangeId,
|
|
322
|
+
config.maxRevisionsPerRun
|
|
323
|
+
);
|
|
324
|
+
}
|
|
325
|
+
|
|
326
|
+
if (changeIdsToReview.length === 0) {
|
|
327
|
+
const lastKnownId = lastReviewedId ? backend.formatChangeId(lastReviewedId) : "(none)";
|
|
328
|
+
console.log(`No new ${backend.changeName}s. Last reviewed: ${lastKnownId}`);
|
|
329
|
+
return;
|
|
330
|
+
}
|
|
331
|
+
|
|
332
|
+
console.log(`Reviewing ${backend.displayName} ${backend.changeName}s ${formatChangeList(backend, changeIdsToReview)}`);
|
|
333
|
+
|
|
334
|
+
for (const changeId of changeIdsToReview) {
|
|
335
|
+
const result = await reviewChange(config, backend, targetInfo, changeId);
|
|
336
|
+
console.log(`Reviewed ${backend.formatChangeId(changeId)}: ${result.outputFile}`);
|
|
337
|
+
const nextProjectState = buildStateSnapshot(backend, targetInfo, changeId);
|
|
338
|
+
await saveState(config.stateFilePath, updateProjectState(stateFile, targetInfo, nextProjectState));
|
|
339
|
+
stateFile.projects[targetInfo.stateKey] = nextProjectState;
|
|
340
|
+
}
|
|
341
|
+
|
|
342
|
+
const remainingChanges = await backend.getPendingChangeIds(
|
|
343
|
+
config,
|
|
344
|
+
targetInfo,
|
|
345
|
+
changeIdsToReview[changeIdsToReview.length - 1],
|
|
346
|
+
latestChangeId,
|
|
347
|
+
1
|
|
348
|
+
);
|
|
349
|
+
|
|
350
|
+
if (remainingChanges.length > 0) {
|
|
351
|
+
console.log(`Backlog remains. Latest ${backend.changeName} is ${backend.formatChangeId(latestChangeId)}.`);
|
|
352
|
+
}
|
|
353
|
+
}
|
package/src/shell.js
ADDED
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
import spawn from "cross-spawn";
|
|
2
|
+
import iconv from "iconv-lite";
|
|
3
|
+
|
|
4
|
+
export async function runCommand(command, args = [], options = {}) {
|
|
5
|
+
const {
|
|
6
|
+
cwd,
|
|
7
|
+
env,
|
|
8
|
+
input,
|
|
9
|
+
encoding = "utf8",
|
|
10
|
+
allowFailure = false,
|
|
11
|
+
timeoutMs = 0,
|
|
12
|
+
trim = false
|
|
13
|
+
} = options;
|
|
14
|
+
|
|
15
|
+
return await new Promise((resolve, reject) => {
|
|
16
|
+
const child = spawn(command, args, {
|
|
17
|
+
cwd,
|
|
18
|
+
env: {
|
|
19
|
+
...process.env,
|
|
20
|
+
...env
|
|
21
|
+
},
|
|
22
|
+
stdio: ["pipe", "pipe", "pipe"]
|
|
23
|
+
});
|
|
24
|
+
|
|
25
|
+
const stdoutChunks = [];
|
|
26
|
+
const stderrChunks = [];
|
|
27
|
+
let timedOut = false;
|
|
28
|
+
let timer = null;
|
|
29
|
+
|
|
30
|
+
child.stdout.on("data", (chunk) => {
|
|
31
|
+
stdoutChunks.push(Buffer.from(chunk));
|
|
32
|
+
});
|
|
33
|
+
|
|
34
|
+
child.stderr.on("data", (chunk) => {
|
|
35
|
+
stderrChunks.push(Buffer.from(chunk));
|
|
36
|
+
});
|
|
37
|
+
|
|
38
|
+
child.on("error", reject);
|
|
39
|
+
|
|
40
|
+
child.on("close", (code) => {
|
|
41
|
+
if (timer) {
|
|
42
|
+
clearTimeout(timer);
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
const stdout = iconv.decode(Buffer.concat(stdoutChunks), encoding);
|
|
46
|
+
const stderr = iconv.decode(Buffer.concat(stderrChunks), encoding);
|
|
47
|
+
const result = {
|
|
48
|
+
code: code ?? 1,
|
|
49
|
+
timedOut,
|
|
50
|
+
stdout: trim ? stdout.trim() : stdout,
|
|
51
|
+
stderr: trim ? stderr.trim() : stderr
|
|
52
|
+
};
|
|
53
|
+
|
|
54
|
+
if ((result.code !== 0 || result.timedOut) && !allowFailure) {
|
|
55
|
+
const error = new Error(
|
|
56
|
+
`Command failed: ${command} ${args.join(" ")}\n${result.stderr || result.stdout}`.trim()
|
|
57
|
+
);
|
|
58
|
+
error.result = result;
|
|
59
|
+
reject(error);
|
|
60
|
+
return;
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
resolve(result);
|
|
64
|
+
});
|
|
65
|
+
|
|
66
|
+
if (input) {
|
|
67
|
+
child.stdin.write(input);
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
child.stdin.end();
|
|
71
|
+
|
|
72
|
+
if (timeoutMs > 0) {
|
|
73
|
+
timer = setTimeout(() => {
|
|
74
|
+
timedOut = true;
|
|
75
|
+
child.kill("SIGTERM");
|
|
76
|
+
}, timeoutMs);
|
|
77
|
+
}
|
|
78
|
+
});
|
|
79
|
+
}
|
|
@@ -0,0 +1,180 @@
|
|
|
1
|
+
import path from "node:path";
|
|
2
|
+
import { XMLParser } from "fast-xml-parser";
|
|
3
|
+
import { runCommand } from "./shell.js";
|
|
4
|
+
|
|
5
|
+
const SVN_COMMAND = "svn";
|
|
6
|
+
const COMMAND_ENCODING = "utf8";
|
|
7
|
+
|
|
8
|
+
const xmlParser = new XMLParser({
|
|
9
|
+
ignoreAttributes: false,
|
|
10
|
+
attributeNamePrefix: ""
|
|
11
|
+
});
|
|
12
|
+
|
|
13
|
+
function asArray(value) {
|
|
14
|
+
if (!value) {
|
|
15
|
+
return [];
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
return Array.isArray(value) ? value : [value];
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
function normalizeRepoPath(repoPath) {
|
|
22
|
+
if (!repoPath) {
|
|
23
|
+
return "/";
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
return repoPath.startsWith("/") ? repoPath : `/${repoPath}`;
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
function repoPathFromUrl(rootUrl, url) {
|
|
30
|
+
if (!url.startsWith(rootUrl)) {
|
|
31
|
+
throw new Error(`URL ${url} is not under repository root ${rootUrl}`);
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
const suffix = url.slice(rootUrl.length) || "/";
|
|
35
|
+
return normalizeRepoPath(suffix);
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
export async function getTargetInfo(config) {
|
|
39
|
+
const result = await runCommand(SVN_COMMAND, ["info", "--xml", config.target], {
|
|
40
|
+
encoding: COMMAND_ENCODING,
|
|
41
|
+
trim: true
|
|
42
|
+
});
|
|
43
|
+
const parsed = xmlParser.parse(result.stdout);
|
|
44
|
+
const entry = parsed?.info?.entry;
|
|
45
|
+
|
|
46
|
+
if (!entry?.url || !entry?.repository?.root) {
|
|
47
|
+
throw new Error(`Unable to read svn info for target ${config.target}`);
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
const repoRootUrl = entry.repository.root;
|
|
51
|
+
const targetUrl = entry.url;
|
|
52
|
+
const targetRepoPath = repoPathFromUrl(repoRootUrl, targetUrl);
|
|
53
|
+
|
|
54
|
+
return {
|
|
55
|
+
repoRootUrl,
|
|
56
|
+
targetUrl,
|
|
57
|
+
targetRepoPath,
|
|
58
|
+
targetDisplay: config.target,
|
|
59
|
+
stateKey: `svn:${targetUrl}`,
|
|
60
|
+
workingCopyPath:
|
|
61
|
+
entry["wc-info"]?.["wcroot-abspath"] || (path.isAbsolute(config.target) ? config.target : null)
|
|
62
|
+
};
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
function getRemoteTarget(targetInfo, config) {
|
|
66
|
+
return targetInfo?.targetUrl || config.target;
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
export async function getLatestRevision(config, targetInfo) {
|
|
70
|
+
const result = await runCommand(
|
|
71
|
+
SVN_COMMAND,
|
|
72
|
+
["log", "--xml", "-r", "HEAD:1", "-l", "1", getRemoteTarget(targetInfo, config)],
|
|
73
|
+
{ encoding: COMMAND_ENCODING, trim: true }
|
|
74
|
+
);
|
|
75
|
+
const parsed = xmlParser.parse(result.stdout);
|
|
76
|
+
const entry = parsed?.log?.logentry;
|
|
77
|
+
const revision = Number(entry?.revision);
|
|
78
|
+
|
|
79
|
+
if (!Number.isInteger(revision)) {
|
|
80
|
+
throw new Error(`Unable to determine latest SVN revision for ${config.target}`);
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
return revision;
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
export async function getPendingRevisions(config, targetInfo, startExclusive, endInclusive, limit) {
|
|
87
|
+
const startRevision = Number.isInteger(startExclusive) ? startExclusive + 1 : 1;
|
|
88
|
+
|
|
89
|
+
if (endInclusive < startRevision) {
|
|
90
|
+
return [];
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
const result = await runCommand(
|
|
94
|
+
SVN_COMMAND,
|
|
95
|
+
[
|
|
96
|
+
"log",
|
|
97
|
+
"--xml",
|
|
98
|
+
"--quiet",
|
|
99
|
+
"-r",
|
|
100
|
+
`${startRevision}:${endInclusive}`,
|
|
101
|
+
getRemoteTarget(targetInfo, config)
|
|
102
|
+
],
|
|
103
|
+
{ encoding: COMMAND_ENCODING, trim: true }
|
|
104
|
+
);
|
|
105
|
+
const parsed = xmlParser.parse(result.stdout);
|
|
106
|
+
|
|
107
|
+
return asArray(parsed?.log?.logentry)
|
|
108
|
+
.map((entry) => Number(entry?.revision))
|
|
109
|
+
.filter((revision) => Number.isInteger(revision))
|
|
110
|
+
.sort((left, right) => left - right)
|
|
111
|
+
.slice(0, limit);
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
export async function getRevisionDiff(config, revision) {
|
|
115
|
+
const result = await runCommand(
|
|
116
|
+
SVN_COMMAND,
|
|
117
|
+
["diff", "--git", "--internal-diff", "--ignore-properties", "-c", String(revision), config.target],
|
|
118
|
+
{ encoding: COMMAND_ENCODING, trim: false }
|
|
119
|
+
);
|
|
120
|
+
|
|
121
|
+
return result.stdout;
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
function isPathInsideTarget(targetRepoPath, repoPath) {
|
|
125
|
+
if (targetRepoPath === "/") {
|
|
126
|
+
return true;
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
return repoPath === targetRepoPath || repoPath.startsWith(`${targetRepoPath}/`);
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
function toRelativePath(targetRepoPath, repoPath) {
|
|
133
|
+
if (repoPath === targetRepoPath) {
|
|
134
|
+
return path.posix.basename(repoPath);
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
if (targetRepoPath === "/") {
|
|
138
|
+
return repoPath.replace(/^\/+/, "");
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
return repoPath.slice(targetRepoPath.length).replace(/^\/+/, "");
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
export async function getRevisionDetails(config, targetInfo, revision) {
|
|
145
|
+
const result = await runCommand(
|
|
146
|
+
SVN_COMMAND,
|
|
147
|
+
["log", "--xml", "-v", "-c", String(revision), getRemoteTarget(targetInfo, config)],
|
|
148
|
+
{ encoding: COMMAND_ENCODING, trim: true }
|
|
149
|
+
);
|
|
150
|
+
const parsed = xmlParser.parse(result.stdout);
|
|
151
|
+
const entry = parsed?.log?.logentry;
|
|
152
|
+
|
|
153
|
+
if (!entry?.revision) {
|
|
154
|
+
throw new Error(`Unable to load SVN log for revision r${revision}`);
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
const changedPaths = asArray(entry.paths?.path)
|
|
158
|
+
.map((item) => {
|
|
159
|
+
const repoPath = normalizeRepoPath(item["#text"] || item);
|
|
160
|
+
return {
|
|
161
|
+
action: item.action || "M",
|
|
162
|
+
kind: item.kind || "unknown",
|
|
163
|
+
repoPath,
|
|
164
|
+
relativePath: toRelativePath(targetInfo.targetRepoPath, repoPath),
|
|
165
|
+
copyFromPath: item["copyfrom-path"] ? normalizeRepoPath(item["copyfrom-path"]) : null,
|
|
166
|
+
copyFromRev: item["copyfrom-rev"] ? Number(item["copyfrom-rev"]) : null
|
|
167
|
+
};
|
|
168
|
+
})
|
|
169
|
+
.filter((item) => isPathInsideTarget(targetInfo.targetRepoPath, item.repoPath))
|
|
170
|
+
.filter((item) => item.relativePath.length > 0)
|
|
171
|
+
.filter((item) => item.kind === "file" || item.kind === "unknown");
|
|
172
|
+
|
|
173
|
+
return {
|
|
174
|
+
revision: Number(entry.revision),
|
|
175
|
+
author: entry.author || "unknown",
|
|
176
|
+
date: entry.date || "",
|
|
177
|
+
message: entry.msg || "",
|
|
178
|
+
changedPaths
|
|
179
|
+
};
|
|
180
|
+
}
|
|
@@ -0,0 +1,178 @@
|
|
|
1
|
+
import fs from "node:fs/promises";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
import * as gitClient from "./git-client.js";
|
|
4
|
+
import * as svnClient from "./svn-client.js";
|
|
5
|
+
|
|
6
|
+
function isLikelyUrl(value) {
|
|
7
|
+
return /^[a-z][a-z0-9+.-]*:\/\//i.test(value);
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
async function pathExists(targetPath) {
|
|
11
|
+
try {
|
|
12
|
+
await fs.access(targetPath);
|
|
13
|
+
return true;
|
|
14
|
+
} catch {
|
|
15
|
+
return false;
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
function createSvnBackend() {
|
|
20
|
+
return {
|
|
21
|
+
kind: "svn",
|
|
22
|
+
displayName: "SVN",
|
|
23
|
+
changeName: "revision",
|
|
24
|
+
formatChangeId(revision) {
|
|
25
|
+
return `r${revision}`;
|
|
26
|
+
},
|
|
27
|
+
getReportFileName(revision) {
|
|
28
|
+
return `r${revision}.md`;
|
|
29
|
+
},
|
|
30
|
+
async getTargetInfo(config) {
|
|
31
|
+
return await svnClient.getTargetInfo(config);
|
|
32
|
+
},
|
|
33
|
+
async getLatestChangeId(config, targetInfo) {
|
|
34
|
+
return await svnClient.getLatestRevision(config, targetInfo);
|
|
35
|
+
},
|
|
36
|
+
async getPendingChangeIds(config, targetInfo, startExclusive, endInclusive, limit) {
|
|
37
|
+
return await svnClient.getPendingRevisions(config, targetInfo, startExclusive, endInclusive, limit);
|
|
38
|
+
},
|
|
39
|
+
async getChangeDiff(config, targetInfo, revision) {
|
|
40
|
+
return await svnClient.getRevisionDiff(config, revision);
|
|
41
|
+
},
|
|
42
|
+
async getChangeDetails(config, targetInfo, revision) {
|
|
43
|
+
const details = await svnClient.getRevisionDetails(config, targetInfo, revision);
|
|
44
|
+
|
|
45
|
+
return {
|
|
46
|
+
id: revision,
|
|
47
|
+
displayId: `r${details.revision}`,
|
|
48
|
+
author: details.author,
|
|
49
|
+
date: details.date,
|
|
50
|
+
message: details.message,
|
|
51
|
+
changedPaths: details.changedPaths
|
|
52
|
+
};
|
|
53
|
+
},
|
|
54
|
+
async isValidCheckpoint() {
|
|
55
|
+
return true;
|
|
56
|
+
},
|
|
57
|
+
toStateValue(revision) {
|
|
58
|
+
return Number(revision);
|
|
59
|
+
},
|
|
60
|
+
fromStateValue(state) {
|
|
61
|
+
if (Number.isInteger(state.lastReviewedId)) {
|
|
62
|
+
return state.lastReviewedId;
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
if (Number.isInteger(state.lastReviewedRevision)) {
|
|
66
|
+
return state.lastReviewedRevision;
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
return null;
|
|
70
|
+
},
|
|
71
|
+
extendState(state, revision) {
|
|
72
|
+
return {
|
|
73
|
+
...state,
|
|
74
|
+
lastReviewedRevision: Number(revision)
|
|
75
|
+
};
|
|
76
|
+
}
|
|
77
|
+
};
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
function createGitBackend() {
|
|
81
|
+
return {
|
|
82
|
+
kind: "git",
|
|
83
|
+
displayName: "Git",
|
|
84
|
+
changeName: "commit",
|
|
85
|
+
formatChangeId(commitHash) {
|
|
86
|
+
return commitHash.slice(0, 12);
|
|
87
|
+
},
|
|
88
|
+
getReportFileName(commitHash) {
|
|
89
|
+
return `git-${commitHash}.md`;
|
|
90
|
+
},
|
|
91
|
+
async getTargetInfo(config) {
|
|
92
|
+
return await gitClient.getTargetInfo(config);
|
|
93
|
+
},
|
|
94
|
+
async getLatestChangeId(config, targetInfo) {
|
|
95
|
+
return await gitClient.getLatestCommit(config, targetInfo);
|
|
96
|
+
},
|
|
97
|
+
async getPendingChangeIds(config, targetInfo, startExclusive, endInclusive, limit) {
|
|
98
|
+
return await gitClient.getPendingCommits(config, targetInfo, startExclusive, endInclusive, limit);
|
|
99
|
+
},
|
|
100
|
+
async getChangeDiff(config, targetInfo, commitHash) {
|
|
101
|
+
return await gitClient.getCommitDiff(config, targetInfo, commitHash);
|
|
102
|
+
},
|
|
103
|
+
async getChangeDetails(config, targetInfo, commitHash) {
|
|
104
|
+
const details = await gitClient.getCommitDetails(config, targetInfo, commitHash);
|
|
105
|
+
|
|
106
|
+
return {
|
|
107
|
+
id: commitHash,
|
|
108
|
+
displayId: commitHash.slice(0, 12),
|
|
109
|
+
author: details.author,
|
|
110
|
+
date: details.date,
|
|
111
|
+
message: details.message,
|
|
112
|
+
changedPaths: details.changedPaths
|
|
113
|
+
};
|
|
114
|
+
},
|
|
115
|
+
async isValidCheckpoint(config, targetInfo, checkpointCommit, latestCommit) {
|
|
116
|
+
return await gitClient.isValidCheckpoint(config, targetInfo, checkpointCommit, latestCommit);
|
|
117
|
+
},
|
|
118
|
+
toStateValue(commitHash) {
|
|
119
|
+
return String(commitHash);
|
|
120
|
+
},
|
|
121
|
+
fromStateValue(state) {
|
|
122
|
+
if (typeof state.lastReviewedId === "string" && state.lastReviewedId) {
|
|
123
|
+
return state.lastReviewedId;
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
if (typeof state.lastReviewedCommit === "string" && state.lastReviewedCommit) {
|
|
127
|
+
return state.lastReviewedCommit;
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
return null;
|
|
131
|
+
},
|
|
132
|
+
extendState(state, commitHash) {
|
|
133
|
+
return {
|
|
134
|
+
...state,
|
|
135
|
+
lastReviewedCommit: String(commitHash)
|
|
136
|
+
};
|
|
137
|
+
}
|
|
138
|
+
};
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
const backends = {
|
|
142
|
+
svn: createSvnBackend(),
|
|
143
|
+
git: createGitBackend()
|
|
144
|
+
};
|
|
145
|
+
|
|
146
|
+
export async function resolveRepositoryContext(config) {
|
|
147
|
+
if (config.vcs === "svn") {
|
|
148
|
+
return {
|
|
149
|
+
backend: backends.svn,
|
|
150
|
+
targetInfo: await backends.svn.getTargetInfo(config)
|
|
151
|
+
};
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
if (config.vcs === "git") {
|
|
155
|
+
return {
|
|
156
|
+
backend: backends.git,
|
|
157
|
+
targetInfo: await backends.git.getTargetInfo(config)
|
|
158
|
+
};
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
const candidateTargetPath = path.resolve(config.baseDir, config.target);
|
|
162
|
+
|
|
163
|
+
if (!isLikelyUrl(config.target) && (await pathExists(candidateTargetPath))) {
|
|
164
|
+
try {
|
|
165
|
+
return {
|
|
166
|
+
backend: backends.git,
|
|
167
|
+
targetInfo: await backends.git.getTargetInfo(config)
|
|
168
|
+
};
|
|
169
|
+
} catch {
|
|
170
|
+
// Fall through to SVN auto-detection.
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
return {
|
|
175
|
+
backend: backends.svn,
|
|
176
|
+
targetInfo: await backends.svn.getTargetInfo(config)
|
|
177
|
+
};
|
|
178
|
+
}
|