@agentv/core 4.10.0 → 4.11.2-next.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/chunk-3WGHC7LC.js +149 -0
- package/dist/chunk-3WGHC7LC.js.map +1 -0
- package/dist/{chunk-BWHUWLGW.js → chunk-5POFMJJ7.js} +1 -1
- package/dist/chunk-5POFMJJ7.js.map +1 -0
- package/dist/chunk-SDIANPEY.js +181 -0
- package/dist/chunk-SDIANPEY.js.map +1 -0
- package/dist/docker-workspace-RPPXBT27.js +9 -0
- package/dist/docker-workspace-RPPXBT27.js.map +1 -0
- package/dist/evaluation/validation/index.cjs +70 -3
- package/dist/evaluation/validation/index.cjs.map +1 -1
- package/dist/evaluation/validation/index.js +71 -4
- package/dist/evaluation/validation/index.js.map +1 -1
- package/dist/exec-AR6JUUN5.js +9 -0
- package/dist/exec-AR6JUUN5.js.map +1 -0
- package/dist/index.cjs +1264 -468
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +191 -5
- package/dist/index.d.ts +191 -5
- package/dist/index.js +780 -342
- package/dist/index.js.map +1 -1
- package/package.json +1 -1
- package/dist/chunk-BWHUWLGW.js.map +0 -1
|
@@ -0,0 +1,181 @@
|
|
|
1
|
+
// src/evaluation/workspace/docker-workspace.ts
|
|
2
|
+
var DefaultCommandExecutor = class {
|
|
3
|
+
async exec(argv, options = {}) {
|
|
4
|
+
const { execFileWithStdin } = await import("./exec-AR6JUUN5.js");
|
|
5
|
+
return execFileWithStdin(argv, options.stdin ?? "", {
|
|
6
|
+
timeoutMs: options.timeoutMs
|
|
7
|
+
});
|
|
8
|
+
}
|
|
9
|
+
};
|
|
10
|
+
var DEFAULT_TIMEOUT_S = 1800;
|
|
11
|
+
function buildGitCommand(target, args) {
|
|
12
|
+
if (!target?.path) {
|
|
13
|
+
return ["git", ...args];
|
|
14
|
+
}
|
|
15
|
+
return ["git", "-C", target.path, ...args];
|
|
16
|
+
}
|
|
17
|
+
var DockerWorkspaceProvider = class {
|
|
18
|
+
config;
|
|
19
|
+
executor;
|
|
20
|
+
timeoutMs;
|
|
21
|
+
constructor(config, executor) {
|
|
22
|
+
this.config = config;
|
|
23
|
+
this.executor = executor ?? new DefaultCommandExecutor();
|
|
24
|
+
this.timeoutMs = (config.timeout ?? DEFAULT_TIMEOUT_S) * 1e3;
|
|
25
|
+
}
|
|
26
|
+
/** Check whether the Docker CLI is available on the host. */
|
|
27
|
+
async isDockerAvailable() {
|
|
28
|
+
try {
|
|
29
|
+
const result = await this.executor.exec(
|
|
30
|
+
["docker", "version", "--format", "{{.Server.Version}}"],
|
|
31
|
+
{
|
|
32
|
+
timeoutMs: 1e4
|
|
33
|
+
}
|
|
34
|
+
);
|
|
35
|
+
return result.exitCode === 0;
|
|
36
|
+
} catch {
|
|
37
|
+
return false;
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
/** Pull the configured Docker image. No-op if already cached locally. */
|
|
41
|
+
async pullImage() {
|
|
42
|
+
const inspectResult = await this.executor.exec(
|
|
43
|
+
["docker", "image", "inspect", this.config.image],
|
|
44
|
+
{
|
|
45
|
+
timeoutMs: 1e4
|
|
46
|
+
}
|
|
47
|
+
);
|
|
48
|
+
if (inspectResult.exitCode === 0) {
|
|
49
|
+
return;
|
|
50
|
+
}
|
|
51
|
+
const result = await this.executor.exec(["docker", "pull", this.config.image], {
|
|
52
|
+
timeoutMs: this.timeoutMs
|
|
53
|
+
});
|
|
54
|
+
if (result.exitCode !== 0) {
|
|
55
|
+
throw new Error(`docker pull failed (exit ${result.exitCode}): ${result.stderr.trim()}`);
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
/** Create a stopped container from the configured image with resource limits. Returns container ID. */
|
|
59
|
+
async createContainer() {
|
|
60
|
+
const argv = ["docker", "create"];
|
|
61
|
+
if (this.config.memory) {
|
|
62
|
+
argv.push(`--memory=${this.config.memory}`);
|
|
63
|
+
}
|
|
64
|
+
if (this.config.cpus !== void 0) {
|
|
65
|
+
argv.push(`--cpus=${this.config.cpus}`);
|
|
66
|
+
}
|
|
67
|
+
argv.push(this.config.image, "sleep", "infinity");
|
|
68
|
+
const result = await this.executor.exec(argv, { timeoutMs: 3e4 });
|
|
69
|
+
if (result.exitCode !== 0) {
|
|
70
|
+
throw new Error(`docker create failed (exit ${result.exitCode}): ${result.stderr.trim()}`);
|
|
71
|
+
}
|
|
72
|
+
return result.stdout.trim();
|
|
73
|
+
}
|
|
74
|
+
/** Start a previously created container. */
|
|
75
|
+
async startContainer(containerId) {
|
|
76
|
+
const result = await this.executor.exec(["docker", "start", containerId], {
|
|
77
|
+
timeoutMs: 3e4
|
|
78
|
+
});
|
|
79
|
+
if (result.exitCode !== 0) {
|
|
80
|
+
throw new Error(`docker start failed (exit ${result.exitCode}): ${result.stderr.trim()}`);
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
/**
|
|
84
|
+
* Reset the container checkout to the specified target refs, if any.
|
|
85
|
+
* This is used for SWE-bench images where the repo state must match the
|
|
86
|
+
* dataset's base snapshot before grading begins.
|
|
87
|
+
*/
|
|
88
|
+
async resetContainerCheckout(containerId, repoCheckouts) {
|
|
89
|
+
if (!repoCheckouts || repoCheckouts.length === 0) {
|
|
90
|
+
return;
|
|
91
|
+
}
|
|
92
|
+
for (const target of repoCheckouts) {
|
|
93
|
+
const resetResult = await this.execInContainer({
|
|
94
|
+
containerId,
|
|
95
|
+
command: buildGitCommand(target, ["reset", "--hard", target.ref])
|
|
96
|
+
});
|
|
97
|
+
if (resetResult.exitCode !== 0) {
|
|
98
|
+
throw new Error(
|
|
99
|
+
`docker git reset failed (exit ${resetResult.exitCode}): ${resetResult.stderr.trim()}`
|
|
100
|
+
);
|
|
101
|
+
}
|
|
102
|
+
const verifyResult = await this.execInContainer({
|
|
103
|
+
containerId,
|
|
104
|
+
command: buildGitCommand(target, ["rev-parse", "HEAD"]),
|
|
105
|
+
timeoutMs: 3e4
|
|
106
|
+
});
|
|
107
|
+
if (verifyResult.exitCode !== 0) {
|
|
108
|
+
throw new Error(
|
|
109
|
+
`docker checkout verification failed (exit ${verifyResult.exitCode}): ${verifyResult.stderr.trim()}`
|
|
110
|
+
);
|
|
111
|
+
}
|
|
112
|
+
const head = verifyResult.stdout.trim();
|
|
113
|
+
if (head !== target.ref) {
|
|
114
|
+
throw new Error(
|
|
115
|
+
`docker checkout verification failed: expected ${target.ref} but found ${head || "<empty>"}`
|
|
116
|
+
);
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
}
|
|
120
|
+
/** Copy a local file or directory into a running container. */
|
|
121
|
+
async copyToContainer(containerId, localPath, containerPath) {
|
|
122
|
+
const result = await this.executor.exec(
|
|
123
|
+
["docker", "cp", localPath, `${containerId}:${containerPath}`],
|
|
124
|
+
{ timeoutMs: 6e4 }
|
|
125
|
+
);
|
|
126
|
+
if (result.exitCode !== 0) {
|
|
127
|
+
throw new Error(`docker cp failed (exit ${result.exitCode}): ${result.stderr.trim()}`);
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
/**
|
|
131
|
+
* Execute a command inside a running container.
|
|
132
|
+
* If stdin is provided, it is piped via `docker exec -i`.
|
|
133
|
+
*/
|
|
134
|
+
async execInContainer(options) {
|
|
135
|
+
const { containerId, command, timeoutMs, stdin } = options;
|
|
136
|
+
const argv = ["docker", "exec"];
|
|
137
|
+
if (stdin !== void 0) {
|
|
138
|
+
argv.push("-i");
|
|
139
|
+
}
|
|
140
|
+
argv.push(containerId, ...command);
|
|
141
|
+
return this.executor.exec(argv, {
|
|
142
|
+
timeoutMs: timeoutMs ?? this.timeoutMs,
|
|
143
|
+
stdin
|
|
144
|
+
});
|
|
145
|
+
}
|
|
146
|
+
/** Force-remove a container (always succeeds, even if container doesn't exist). */
|
|
147
|
+
async removeContainer(containerId) {
|
|
148
|
+
try {
|
|
149
|
+
await this.executor.exec(["docker", "rm", "-f", containerId], {
|
|
150
|
+
timeoutMs: 3e4
|
|
151
|
+
});
|
|
152
|
+
} catch {
|
|
153
|
+
}
|
|
154
|
+
}
|
|
155
|
+
/** Full lifecycle: create → start → exec → cleanup. Convenience for single-command grading. */
|
|
156
|
+
async runGraderInContainer(options) {
|
|
157
|
+
const containerId = await this.createContainer();
|
|
158
|
+
try {
|
|
159
|
+
await this.startContainer(containerId);
|
|
160
|
+
await this.resetContainerCheckout(containerId, options.repoCheckouts);
|
|
161
|
+
if (options.copyFiles) {
|
|
162
|
+
for (const file of options.copyFiles) {
|
|
163
|
+
await this.copyToContainer(containerId, file.localPath, file.containerPath);
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
return await this.execInContainer({
|
|
167
|
+
containerId,
|
|
168
|
+
command: options.command,
|
|
169
|
+
stdin: options.stdin
|
|
170
|
+
});
|
|
171
|
+
} finally {
|
|
172
|
+
await this.removeContainer(containerId);
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
};
|
|
176
|
+
|
|
177
|
+
export {
|
|
178
|
+
DefaultCommandExecutor,
|
|
179
|
+
DockerWorkspaceProvider
|
|
180
|
+
};
|
|
181
|
+
//# sourceMappingURL=chunk-SDIANPEY.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/evaluation/workspace/docker-workspace.ts"],"sourcesContent":["/**\n * Docker workspace provider — manages Docker container lifecycle for eval grading.\n *\n * Flow: pull image → create container → copy files in → exec grader → parse output → destroy container.\n * All Docker commands use `execFile` (no shell) for security.\n *\n * To add a new Docker command: add a method that calls `this.exec(...)` with the appropriate argv.\n *\n * Design decisions:\n * - CommandExecutor interface for testability (mock `execFile` in tests)\n * - Always `docker rm -f` in cleanup, even on errors (try/finally)\n * - Lazy-loaded: non-Docker evals never import this module\n */\n\nimport type { DockerWorkspaceConfig } from '../types.js';\nimport type { RepoCheckoutTarget } from './repo-checkout.js';\n\n/** Result of a command execution */\nexport interface ExecResult {\n readonly stdout: string;\n readonly stderr: string;\n readonly exitCode: number;\n}\n\n/** Abstraction over process execution for testability */\nexport interface CommandExecutor {\n exec(\n argv: readonly string[],\n options?: { timeoutMs?: number; stdin?: string },\n ): Promise<ExecResult>;\n}\n\n/**\n * Default command executor using Bun.spawn / Node child_process.\n * Mirrors the pattern in runtime/exec.ts.\n */\nexport class DefaultCommandExecutor implements CommandExecutor {\n async exec(\n argv: readonly string[],\n options: { timeoutMs?: number; stdin?: string } = {},\n ): Promise<ExecResult> {\n const { execFileWithStdin } = await import('../../runtime/exec.js');\n return execFileWithStdin(argv, options.stdin ?? '', {\n timeoutMs: options.timeoutMs,\n });\n }\n}\n\n/** Options for creating a Docker container */\nexport interface CreateContainerOptions {\n readonly image: string;\n readonly memory?: string;\n readonly cpus?: number;\n}\n\n/** Options for executing a command inside a container */\nexport interface ExecInContainerOptions {\n readonly containerId: string;\n readonly command: readonly string[];\n readonly timeoutMs?: number;\n readonly stdin?: string;\n}\n\nconst DEFAULT_TIMEOUT_S = 1800;\n\nfunction buildGitCommand(\n target: RepoCheckoutTarget | undefined,\n args: readonly string[],\n): string[] {\n if (!target?.path) {\n return ['git', ...args];\n }\n return ['git', '-C', target.path, ...args];\n}\n\n/**\n * Manages Docker container lifecycle for workspace-based evaluations.\n *\n * Usage:\n * const docker = new DockerWorkspaceProvider(config);\n * await docker.pullImage();\n * const containerId = await docker.createContainer();\n * try {\n * await docker.copyToContainer(containerId, localPath, containerPath);\n * const output = await docker.execInContainer({ containerId, command: [...] });\n * // parse output...\n * } finally {\n * await docker.removeContainer(containerId);\n * }\n */\nexport class DockerWorkspaceProvider {\n private readonly config: DockerWorkspaceConfig;\n private readonly executor: CommandExecutor;\n private readonly timeoutMs: number;\n\n constructor(config: DockerWorkspaceConfig, executor?: CommandExecutor) {\n this.config = config;\n this.executor = executor ?? new DefaultCommandExecutor();\n this.timeoutMs = (config.timeout ?? DEFAULT_TIMEOUT_S) * 1000;\n }\n\n /** Check whether the Docker CLI is available on the host. */\n async isDockerAvailable(): Promise<boolean> {\n try {\n const result = await this.executor.exec(\n ['docker', 'version', '--format', '{{.Server.Version}}'],\n {\n timeoutMs: 10_000,\n },\n );\n return result.exitCode === 0;\n } catch {\n return false;\n }\n }\n\n /** Pull the configured Docker image. No-op if already cached locally. */\n async pullImage(): Promise<void> {\n // Skip pull if image already exists locally (e.g. locally-built images)\n const inspectResult = await this.executor.exec(\n ['docker', 'image', 'inspect', this.config.image],\n {\n timeoutMs: 10_000,\n },\n );\n if (inspectResult.exitCode === 0) {\n return; // Image exists locally, no pull needed\n }\n\n const result = await this.executor.exec(['docker', 'pull', this.config.image], {\n timeoutMs: this.timeoutMs,\n });\n if (result.exitCode !== 0) {\n throw new Error(`docker pull failed (exit ${result.exitCode}): ${result.stderr.trim()}`);\n }\n }\n\n /** Create a stopped container from the configured image with resource limits. Returns container ID. */\n async createContainer(): Promise<string> {\n const argv: string[] = ['docker', 'create'];\n\n if (this.config.memory) {\n argv.push(`--memory=${this.config.memory}`);\n }\n if (this.config.cpus !== undefined) {\n argv.push(`--cpus=${this.config.cpus}`);\n }\n\n // Keep the container alive with a long sleep so we can exec into it\n argv.push(this.config.image, 'sleep', 'infinity');\n\n const result = await this.executor.exec(argv, { timeoutMs: 30_000 });\n if (result.exitCode !== 0) {\n throw new Error(`docker create failed (exit ${result.exitCode}): ${result.stderr.trim()}`);\n }\n return result.stdout.trim();\n }\n\n /** Start a previously created container. */\n async startContainer(containerId: string): Promise<void> {\n const result = await this.executor.exec(['docker', 'start', containerId], {\n timeoutMs: 30_000,\n });\n if (result.exitCode !== 0) {\n throw new Error(`docker start failed (exit ${result.exitCode}): ${result.stderr.trim()}`);\n }\n }\n\n /**\n * Reset the container checkout to the specified target refs, if any.\n * This is used for SWE-bench images where the repo state must match the\n * dataset's base snapshot before grading begins.\n */\n async resetContainerCheckout(\n containerId: string,\n repoCheckouts?: readonly RepoCheckoutTarget[],\n ): Promise<void> {\n if (!repoCheckouts || repoCheckouts.length === 0) {\n return;\n }\n\n for (const target of repoCheckouts) {\n const resetResult = await this.execInContainer({\n containerId,\n command: buildGitCommand(target, ['reset', '--hard', target.ref]),\n });\n if (resetResult.exitCode !== 0) {\n throw new Error(\n `docker git reset failed (exit ${resetResult.exitCode}): ${resetResult.stderr.trim()}`,\n );\n }\n\n const verifyResult = await this.execInContainer({\n containerId,\n command: buildGitCommand(target, ['rev-parse', 'HEAD']),\n timeoutMs: 30_000,\n });\n if (verifyResult.exitCode !== 0) {\n throw new Error(\n `docker checkout verification failed (exit ${verifyResult.exitCode}): ${verifyResult.stderr.trim()}`,\n );\n }\n\n const head = verifyResult.stdout.trim();\n if (head !== target.ref) {\n throw new Error(\n `docker checkout verification failed: expected ${target.ref} but found ${head || '<empty>'}`,\n );\n }\n }\n }\n\n /** Copy a local file or directory into a running container. */\n async copyToContainer(\n containerId: string,\n localPath: string,\n containerPath: string,\n ): Promise<void> {\n const result = await this.executor.exec(\n ['docker', 'cp', localPath, `${containerId}:${containerPath}`],\n { timeoutMs: 60_000 },\n );\n if (result.exitCode !== 0) {\n throw new Error(`docker cp failed (exit ${result.exitCode}): ${result.stderr.trim()}`);\n }\n }\n\n /**\n * Execute a command inside a running container.\n * If stdin is provided, it is piped via `docker exec -i`.\n */\n async execInContainer(options: ExecInContainerOptions): Promise<ExecResult> {\n const { containerId, command, timeoutMs, stdin } = options;\n const argv: string[] = ['docker', 'exec'];\n\n if (stdin !== undefined) {\n argv.push('-i');\n }\n\n argv.push(containerId, ...command);\n\n return this.executor.exec(argv, {\n timeoutMs: timeoutMs ?? this.timeoutMs,\n stdin,\n });\n }\n\n /** Force-remove a container (always succeeds, even if container doesn't exist). */\n async removeContainer(containerId: string): Promise<void> {\n try {\n await this.executor.exec(['docker', 'rm', '-f', containerId], {\n timeoutMs: 30_000,\n });\n } catch {\n // Best-effort cleanup — don't throw on removal failure\n }\n }\n\n /** Full lifecycle: create → start → exec → cleanup. Convenience for single-command grading. */\n async runGraderInContainer(options: {\n readonly command: readonly string[];\n readonly stdin?: string;\n readonly copyFiles?: ReadonlyArray<{ localPath: string; containerPath: string }>;\n readonly repoCheckouts?: readonly RepoCheckoutTarget[];\n }): Promise<ExecResult> {\n const containerId = await this.createContainer();\n try {\n await this.startContainer(containerId);\n await this.resetContainerCheckout(containerId, options.repoCheckouts);\n\n if (options.copyFiles) {\n for (const file of options.copyFiles) {\n await this.copyToContainer(containerId, file.localPath, file.containerPath);\n }\n }\n\n return await this.execInContainer({\n containerId,\n command: options.command,\n stdin: options.stdin,\n });\n } finally {\n await this.removeContainer(containerId);\n }\n }\n}\n"],"mappings":";AAoCO,IAAM,yBAAN,MAAwD;AAAA,EAC7D,MAAM,KACJ,MACA,UAAkD,CAAC,GAC9B;AACrB,UAAM,EAAE,kBAAkB,IAAI,MAAM,OAAO,oBAAuB;AAClE,WAAO,kBAAkB,MAAM,QAAQ,SAAS,IAAI;AAAA,MAClD,WAAW,QAAQ;AAAA,IACrB,CAAC;AAAA,EACH;AACF;AAiBA,IAAM,oBAAoB;AAE1B,SAAS,gBACP,QACA,MACU;AACV,MAAI,CAAC,QAAQ,MAAM;AACjB,WAAO,CAAC,OAAO,GAAG,IAAI;AAAA,EACxB;AACA,SAAO,CAAC,OAAO,MAAM,OAAO,MAAM,GAAG,IAAI;AAC3C;AAiBO,IAAM,0BAAN,MAA8B;AAAA,EAClB;AAAA,EACA;AAAA,EACA;AAAA,EAEjB,YAAY,QAA+B,UAA4B;AACrE,SAAK,SAAS;AACd,SAAK,WAAW,YAAY,IAAI,uBAAuB;AACvD,SAAK,aAAa,OAAO,WAAW,qBAAqB;AAAA,EAC3D;AAAA;AAAA,EAGA,MAAM,oBAAsC;AAC1C,QAAI;AACF,YAAM,SAAS,MAAM,KAAK,SAAS;AAAA,QACjC,CAAC,UAAU,WAAW,YAAY,qBAAqB;AAAA,QACvD;AAAA,UACE,WAAW;AAAA,QACb;AAAA,MACF;AACA,aAAO,OAAO,aAAa;AAAA,IAC7B,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAAA;AAAA,EAGA,MAAM,YAA2B;AAE/B,UAAM,gBAAgB,MAAM,KAAK,SAAS;AAAA,MACxC,CAAC,UAAU,SAAS,WAAW,KAAK,OAAO,KAAK;AAAA,MAChD;AAAA,QACE,WAAW;AAAA,MACb;AAAA,IACF;AACA,QAAI,cAAc,aAAa,GAAG;AAChC;AAAA,IACF;AAEA,UAAM,SAAS,MAAM,KAAK,SAAS,KAAK,CAAC,UAAU,QAAQ,KAAK,OAAO,KAAK,GAAG;AAAA,MAC7E,WAAW,KAAK;AAAA,IAClB,CAAC;AACD,QAAI,OAAO,aAAa,GAAG;AACzB,YAAM,IAAI,MAAM,4BAA4B,OAAO,QAAQ,MAAM,OAAO,OAAO,KAAK,CAAC,EAAE;AAAA,IACzF;AAAA,EACF;AAAA;AAAA,EAGA,MAAM,kBAAmC;AACvC,UAAM,OAAiB,CAAC,UAAU,QAAQ;AAE1C,QAAI,KAAK,OAAO,QAAQ;AACtB,WAAK,KAAK,YAAY,KAAK,OAAO,MAAM,EAAE;AAAA,IAC5C;AACA,QAAI,KAAK,OAAO,SAAS,QAAW;AAClC,WAAK,KAAK,UAAU,KAAK,OAAO,IAAI,EAAE;AAAA,IACxC;AAGA,SAAK,KAAK,KAAK,OAAO,OAAO,SAAS,UAAU;AAEhD,UAAM,SAAS,MAAM,KAAK,SAAS,KAAK,MAAM,EAAE,WAAW,IAAO,CAAC;AACnE,QAAI,OAAO,aAAa,GAAG;AACzB,YAAM,IAAI,MAAM,8BAA8B,OAAO,QAAQ,MAAM,OAAO,OAAO,KAAK,CAAC,EAAE;AAAA,IAC3F;AACA,WAAO,OAAO,OAAO,KAAK;AAAA,EAC5B;AAAA;AAAA,EAGA,MAAM,eAAe,aAAoC;AACvD,UAAM,SAAS,MAAM,KAAK,SAAS,KAAK,CAAC,UAAU,SAAS,WAAW,GAAG;AAAA,MACxE,WAAW;AAAA,IACb,CAAC;AACD,QAAI,OAAO,aAAa,GAAG;AACzB,YAAM,IAAI,MAAM,6BAA6B,OAAO,QAAQ,MAAM,OAAO,OAAO,KAAK,CAAC,EAAE;AAAA,IAC1F;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,uBACJ,aACA,eACe;AACf,QAAI,CAAC,iBAAiB,cAAc,WAAW,GAAG;AAChD;AAAA,IACF;AAEA,eAAW,UAAU,eAAe;AAClC,YAAM,cAAc,MAAM,KAAK,gBAAgB;AAAA,QAC7C;AAAA,QACA,SAAS,gBAAgB,QAAQ,CAAC,SAAS,UAAU,OAAO,GAAG,CAAC;AAAA,MAClE,CAAC;AACD,UAAI,YAAY,aAAa,GAAG;AAC9B,cAAM,IAAI;AAAA,UACR,iCAAiC,YAAY,QAAQ,MAAM,YAAY,OAAO,KAAK,CAAC;AAAA,QACtF;AAAA,MACF;AAEA,YAAM,eAAe,MAAM,KAAK,gBAAgB;AAAA,QAC9C;AAAA,QACA,SAAS,gBAAgB,QAAQ,CAAC,aAAa,MAAM,CAAC;AAAA,QACtD,WAAW;AAAA,MACb,CAAC;AACD,UAAI,aAAa,aAAa,GAAG;AAC/B,cAAM,IAAI;AAAA,UACR,6CAA6C,aAAa,QAAQ,MAAM,aAAa,OAAO,KAAK,CAAC;AAAA,QACpG;AAAA,MACF;AAEA,YAAM,OAAO,aAAa,OAAO,KAAK;AACtC,UAAI,SAAS,OAAO,KAAK;AACvB,cAAM,IAAI;AAAA,UACR,iDAAiD,OAAO,GAAG,cAAc,QAAQ,SAAS;AAAA,QAC5F;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA;AAAA,EAGA,MAAM,gBACJ,aACA,WACA,eACe;AACf,UAAM,SAAS,MAAM,KAAK,SAAS;AAAA,MACjC,CAAC,UAAU,MAAM,WAAW,GAAG,WAAW,IAAI,aAAa,EAAE;AAAA,MAC7D,EAAE,WAAW,IAAO;AAAA,IACtB;AACA,QAAI,OAAO,aAAa,GAAG;AACzB,YAAM,IAAI,MAAM,0BAA0B,OAAO,QAAQ,MAAM,OAAO,OAAO,KAAK,CAAC,EAAE;AAAA,IACvF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,gBAAgB,SAAsD;AAC1E,UAAM,EAAE,aAAa,SAAS,WAAW,MAAM,IAAI;AACnD,UAAM,OAAiB,CAAC,UAAU,MAAM;AAExC,QAAI,UAAU,QAAW;AACvB,WAAK,KAAK,IAAI;AAAA,IAChB;AAEA,SAAK,KAAK,aAAa,GAAG,OAAO;AAEjC,WAAO,KAAK,SAAS,KAAK,MAAM;AAAA,MAC9B,WAAW,aAAa,KAAK;AAAA,MAC7B;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA,EAGA,MAAM,gBAAgB,aAAoC;AACxD,QAAI;AACF,YAAM,KAAK,SAAS,KAAK,CAAC,UAAU,MAAM,MAAM,WAAW,GAAG;AAAA,QAC5D,WAAW;AAAA,MACb,CAAC;AAAA,IACH,QAAQ;AAAA,IAER;AAAA,EACF;AAAA;AAAA,EAGA,MAAM,qBAAqB,SAKH;AACtB,UAAM,cAAc,MAAM,KAAK,gBAAgB;AAC/C,QAAI;AACF,YAAM,KAAK,eAAe,WAAW;AACrC,YAAM,KAAK,uBAAuB,aAAa,QAAQ,aAAa;AAEpE,UAAI,QAAQ,WAAW;AACrB,mBAAW,QAAQ,QAAQ,WAAW;AACpC,gBAAM,KAAK,gBAAgB,aAAa,KAAK,WAAW,KAAK,aAAa;AAAA,QAC5E;AAAA,MACF;AAEA,aAAO,MAAM,KAAK,gBAAgB;AAAA,QAChC;AAAA,QACA,SAAS,QAAQ;AAAA,QACjB,OAAO,QAAQ;AAAA,MACjB,CAAC;AAAA,IACH,UAAE;AACA,YAAM,KAAK,gBAAgB,WAAW;AAAA,IACxC;AAAA,EACF;AACF;","names":[]}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":[],"sourcesContent":[],"mappings":"","names":[]}
|
|
@@ -568,12 +568,21 @@ function validateWorkspaceRepoConfig(workspace, filePath, errors) {
|
|
|
568
568
|
const hooks = workspace.hooks;
|
|
569
569
|
const afterEachHook = isObject(hooks) ? hooks.after_each : void 0;
|
|
570
570
|
const isolation = workspace.isolation;
|
|
571
|
+
const docker = workspace.docker;
|
|
571
572
|
if (Array.isArray(repos)) {
|
|
572
573
|
for (const repo of repos) {
|
|
573
574
|
if (!isObject(repo)) continue;
|
|
574
575
|
const source = repo.source;
|
|
575
576
|
const checkout = repo.checkout;
|
|
576
577
|
const clone = repo.clone;
|
|
578
|
+
if (!isObject(source) && !isObject(docker)) {
|
|
579
|
+
errors.push({
|
|
580
|
+
severity: "error",
|
|
581
|
+
filePath,
|
|
582
|
+
location: `workspace.repos[path=${repo.path ?? "(none)"}]`,
|
|
583
|
+
message: "repos[].source is required for non-Docker workspaces. Source-less repos are only valid when workspace.docker is configured (repo exists inside the container)."
|
|
584
|
+
});
|
|
585
|
+
}
|
|
577
586
|
if (isObject(source) && isObject(checkout)) {
|
|
578
587
|
const sourceType = source.type;
|
|
579
588
|
const resolve = checkout.resolve;
|
|
@@ -581,8 +590,8 @@ function validateWorkspaceRepoConfig(workspace, filePath, errors) {
|
|
|
581
590
|
errors.push({
|
|
582
591
|
severity: "warning",
|
|
583
592
|
filePath,
|
|
584
|
-
location: `workspace.repos[path=${repo.path}]`,
|
|
585
|
-
message: "checkout.resolve has no effect for a local source. Use source.type to choose where the repo comes from; keep checkout.ref or checkout.ancestor only when pinning a local source."
|
|
593
|
+
location: `workspace.repos[path=${repo.path ?? "(none)"}]`,
|
|
594
|
+
message: "checkout.resolve has no effect for a local source. Use source.type to choose where the repo comes from; keep checkout.ref, checkout.base_commit, or checkout.ancestor only when pinning a local source."
|
|
586
595
|
});
|
|
587
596
|
}
|
|
588
597
|
}
|
|
@@ -593,7 +602,7 @@ function validateWorkspaceRepoConfig(workspace, filePath, errors) {
|
|
|
593
602
|
errors.push({
|
|
594
603
|
severity: "warning",
|
|
595
604
|
filePath,
|
|
596
|
-
location: `workspace.repos[path=${repo.path}]`,
|
|
605
|
+
location: `workspace.repos[path=${repo.path ?? "(none)"}]`,
|
|
597
606
|
message: `clone.depth (${depth}) may be insufficient for checkout.ancestor (${ancestor}). Recommend depth >= ${ancestor + 1}.`
|
|
598
607
|
});
|
|
599
608
|
}
|
|
@@ -1567,11 +1576,69 @@ async function validateConfigFile(filePath) {
|
|
|
1567
1576
|
});
|
|
1568
1577
|
}
|
|
1569
1578
|
}
|
|
1579
|
+
const results = config.results;
|
|
1580
|
+
if (results !== void 0) {
|
|
1581
|
+
if (typeof results !== "object" || results === null || Array.isArray(results)) {
|
|
1582
|
+
errors.push({
|
|
1583
|
+
severity: "error",
|
|
1584
|
+
filePath,
|
|
1585
|
+
location: "results",
|
|
1586
|
+
message: "Field 'results' must be an object"
|
|
1587
|
+
});
|
|
1588
|
+
} else {
|
|
1589
|
+
const exportConfig = results.export;
|
|
1590
|
+
if (exportConfig !== void 0) {
|
|
1591
|
+
if (typeof exportConfig !== "object" || exportConfig === null || Array.isArray(exportConfig)) {
|
|
1592
|
+
errors.push({
|
|
1593
|
+
severity: "error",
|
|
1594
|
+
filePath,
|
|
1595
|
+
location: "results.export",
|
|
1596
|
+
message: "Field 'results.export' must be an object"
|
|
1597
|
+
});
|
|
1598
|
+
} else {
|
|
1599
|
+
const exportRecord = exportConfig;
|
|
1600
|
+
if (typeof exportRecord.repo !== "string" || exportRecord.repo.trim().length === 0) {
|
|
1601
|
+
errors.push({
|
|
1602
|
+
severity: "error",
|
|
1603
|
+
filePath,
|
|
1604
|
+
location: "results.export.repo",
|
|
1605
|
+
message: "Field 'results.export.repo' must be a non-empty string"
|
|
1606
|
+
});
|
|
1607
|
+
}
|
|
1608
|
+
if (typeof exportRecord.path !== "string" || exportRecord.path.trim().length === 0) {
|
|
1609
|
+
errors.push({
|
|
1610
|
+
severity: "error",
|
|
1611
|
+
filePath,
|
|
1612
|
+
location: "results.export.path",
|
|
1613
|
+
message: "Field 'results.export.path' must be a non-empty string"
|
|
1614
|
+
});
|
|
1615
|
+
}
|
|
1616
|
+
if (exportRecord.auto_push !== void 0 && typeof exportRecord.auto_push !== "boolean") {
|
|
1617
|
+
errors.push({
|
|
1618
|
+
severity: "error",
|
|
1619
|
+
filePath,
|
|
1620
|
+
location: "results.export.auto_push",
|
|
1621
|
+
message: "Field 'results.export.auto_push' must be a boolean"
|
|
1622
|
+
});
|
|
1623
|
+
}
|
|
1624
|
+
if (exportRecord.branch_prefix !== void 0 && (typeof exportRecord.branch_prefix !== "string" || exportRecord.branch_prefix.trim().length === 0)) {
|
|
1625
|
+
errors.push({
|
|
1626
|
+
severity: "error",
|
|
1627
|
+
filePath,
|
|
1628
|
+
location: "results.export.branch_prefix",
|
|
1629
|
+
message: "Field 'results.export.branch_prefix' must be a non-empty string"
|
|
1630
|
+
});
|
|
1631
|
+
}
|
|
1632
|
+
}
|
|
1633
|
+
}
|
|
1634
|
+
}
|
|
1635
|
+
}
|
|
1570
1636
|
const allowedFields = /* @__PURE__ */ new Set([
|
|
1571
1637
|
"$schema",
|
|
1572
1638
|
"eval_patterns",
|
|
1573
1639
|
"required_version",
|
|
1574
1640
|
"execution",
|
|
1641
|
+
"results",
|
|
1575
1642
|
"studio"
|
|
1576
1643
|
]);
|
|
1577
1644
|
const unexpectedFields = Object.keys(config).filter((key) => !allowedFields.has(key));
|