@kata.dev/challenge-cli 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +104 -0
- package/bin/challenge.js +36 -0
- package/package.json +30 -0
- package/src/commands/init.js +137 -0
- package/src/commands/login.js +33 -0
- package/src/commands/pack.js +100 -0
- package/src/commands/publish.js +170 -0
- package/src/commands/validate.js +77 -0
- package/src/lib/artifacts.js +123 -0
- package/src/lib/config.js +49 -0
- package/src/lib/helpers.js +83 -0
- package/src/lib/http.js +70 -0
- package/src/lib/runtime-deps.js +179 -0
package/README.md
ADDED
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
# @kata.dev/challenge-cli
|
|
2
|
+
|
|
3
|
+
CLI for authoring, packing, validating, and publishing Eval Engine challenges.
|
|
4
|
+
|
|
5
|
+
## Install
|
|
6
|
+
|
|
7
|
+
```bash
|
|
8
|
+
npx @kata.dev/challenge-cli --help
|
|
9
|
+
```
|
|
10
|
+
|
|
11
|
+
Or install globally:
|
|
12
|
+
|
|
13
|
+
```bash
|
|
14
|
+
npm install -g @kata.dev/challenge-cli
|
|
15
|
+
```
|
|
16
|
+
|
|
17
|
+
## Quick Start
|
|
18
|
+
|
|
19
|
+
```bash
|
|
20
|
+
# 1. Save your API credentials
|
|
21
|
+
npx @kata.dev/challenge-cli login --api https://eval.example.com --token <your-token>
|
|
22
|
+
|
|
23
|
+
# 2. Scaffold a new challenge
|
|
24
|
+
npx @kata.dev/challenge-cli init --slug hello-express
|
|
25
|
+
|
|
26
|
+
# 3. Author your challenge (edit files in hello-express/)
|
|
27
|
+
|
|
28
|
+
# 4. Pack artifacts
|
|
29
|
+
npx @kata.dev/challenge-cli pack --dir ./hello-express
|
|
30
|
+
|
|
31
|
+
# 5. Validate locally
|
|
32
|
+
npx @kata.dev/challenge-cli validate --manifest ./hello-express/dist/cks-manifest.json
|
|
33
|
+
|
|
34
|
+
# 6. Publish to the API
|
|
35
|
+
npx @kata.dev/challenge-cli publish --manifest ./hello-express/dist/cks-manifest.json
|
|
36
|
+
```
|
|
37
|
+
|
|
38
|
+
## Commands
|
|
39
|
+
|
|
40
|
+
### `login`
|
|
41
|
+
|
|
42
|
+
Save API URL and authentication token to `~/.challengerc.json`:
|
|
43
|
+
|
|
44
|
+
```bash
|
|
45
|
+
challenge login --api https://eval.example.com --token <token>
|
|
46
|
+
challenge login --api https://eval.example.com --token <token> --signing-secret <secret>
|
|
47
|
+
```
|
|
48
|
+
|
|
49
|
+
### `init`
|
|
50
|
+
|
|
51
|
+
Scaffold a new challenge directory:
|
|
52
|
+
|
|
53
|
+
```bash
|
|
54
|
+
challenge init --slug my-challenge
|
|
55
|
+
challenge init --slug my-challenge --dir ./challenges
|
|
56
|
+
challenge init # interactive mode (prompts for slug)
|
|
57
|
+
```
|
|
58
|
+
|
|
59
|
+
### `pack`
|
|
60
|
+
|
|
61
|
+
Pack challenge artifacts into uploadable bundles:
|
|
62
|
+
|
|
63
|
+
```bash
|
|
64
|
+
challenge pack --dir ./my-challenge
|
|
65
|
+
challenge pack --dir ./my-challenge --out ./my-challenge/dist --runtime-mode auto
|
|
66
|
+
```
|
|
67
|
+
|
|
68
|
+
Runtime modes:
|
|
69
|
+
- `auto` (default) — Builds `node_modules` from `artifacts/runtime_manifest/package.json`
|
|
70
|
+
- `manual` — Uses `artifacts/runtime_deps/` directly
|
|
71
|
+
|
|
72
|
+
### `validate`
|
|
73
|
+
|
|
74
|
+
Validate packed artifacts locally:
|
|
75
|
+
|
|
76
|
+
```bash
|
|
77
|
+
challenge validate --manifest ./my-challenge/dist/cks-manifest.json
|
|
78
|
+
```
|
|
79
|
+
|
|
80
|
+
### `publish`
|
|
81
|
+
|
|
82
|
+
Publish a challenge to the Eval Engine API:
|
|
83
|
+
|
|
84
|
+
```bash
|
|
85
|
+
challenge publish --manifest ./my-challenge/dist/cks-manifest.json
|
|
86
|
+
challenge publish --manifest ./my-challenge/dist/cks-manifest.json --api https://eval.example.com --token <token>
|
|
87
|
+
```
|
|
88
|
+
|
|
89
|
+
## Configuration
|
|
90
|
+
|
|
91
|
+
Credentials are stored in `~/.challengerc.json`:
|
|
92
|
+
|
|
93
|
+
```json
|
|
94
|
+
{
|
|
95
|
+
"apiUrl": "https://eval.example.com",
|
|
96
|
+
"token": "eyJ...",
|
|
97
|
+
"signingSecret": "your-signing-secret"
|
|
98
|
+
}
|
|
99
|
+
```
|
|
100
|
+
|
|
101
|
+
You can also use environment variables:
|
|
102
|
+
- `CKS_API_BASE_URL` — API URL
|
|
103
|
+
- `CKS_API_TOKEN` — Auth token
|
|
104
|
+
- `CHALLENGE_ARTIFACT_SIGNING_SECRET` — Signing secret for validation
|
package/bin/challenge.js
ADDED
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
import { Command } from 'commander';
|
|
4
|
+
import chalk from 'chalk';
|
|
5
|
+
import { readFileSync } from 'node:fs';
|
|
6
|
+
import { fileURLToPath } from 'node:url';
|
|
7
|
+
import { dirname, join } from 'node:path';
|
|
8
|
+
import { registerLoginCommand } from '../src/commands/login.js';
|
|
9
|
+
import { registerInitCommand } from '../src/commands/init.js';
|
|
10
|
+
import { registerPackCommand } from '../src/commands/pack.js';
|
|
11
|
+
import { registerValidateCommand } from '../src/commands/validate.js';
|
|
12
|
+
import { registerPublishCommand } from '../src/commands/publish.js';
|
|
13
|
+
|
|
14
|
+
const __dirname = dirname(fileURLToPath(import.meta.url));
|
|
15
|
+
const pkg = JSON.parse(readFileSync(join(__dirname, '..', 'package.json'), 'utf8'));
|
|
16
|
+
|
|
17
|
+
const program = new Command();
|
|
18
|
+
|
|
19
|
+
program
|
|
20
|
+
.name('challenge')
|
|
21
|
+
.description('CLI for authoring, packing, validating, and publishing Eval Engine challenges')
|
|
22
|
+
.version(pkg.version);
|
|
23
|
+
|
|
24
|
+
registerLoginCommand(program);
|
|
25
|
+
registerInitCommand(program);
|
|
26
|
+
registerPackCommand(program);
|
|
27
|
+
registerValidateCommand(program);
|
|
28
|
+
registerPublishCommand(program);
|
|
29
|
+
|
|
30
|
+
program.parseAsync(process.argv).catch((err) => {
|
|
31
|
+
console.error(chalk.red('Error:'), err.message);
|
|
32
|
+
if (process.env.DEBUG) {
|
|
33
|
+
console.error(err.stack);
|
|
34
|
+
}
|
|
35
|
+
process.exit(1);
|
|
36
|
+
});
|
package/package.json
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@kata.dev/challenge-cli",
|
|
3
|
+
"version": "1.0.0",
|
|
4
|
+
"type": "module",
|
|
5
|
+
"description": "CLI for authoring, packing, validating, and publishing Eval Engine challenges",
|
|
6
|
+
"bin": {
|
|
7
|
+
"challenge": "bin/challenge.js"
|
|
8
|
+
},
|
|
9
|
+
"files": [
|
|
10
|
+
"bin/",
|
|
11
|
+
"src/"
|
|
12
|
+
],
|
|
13
|
+
"engines": {
|
|
14
|
+
"node": ">=18"
|
|
15
|
+
},
|
|
16
|
+
"keywords": [
|
|
17
|
+
"eval-engine",
|
|
18
|
+
"challenge",
|
|
19
|
+
"cli",
|
|
20
|
+
"authoring"
|
|
21
|
+
],
|
|
22
|
+
"license": "MIT",
|
|
23
|
+
"dependencies": {
|
|
24
|
+
"chalk": "^5.4.1",
|
|
25
|
+
"commander": "^13.1.0",
|
|
26
|
+
"ora": "^8.2.0",
|
|
27
|
+
"prompts": "^2.4.2",
|
|
28
|
+
"tar": "^7.4.3"
|
|
29
|
+
}
|
|
30
|
+
}
|
|
@@ -0,0 +1,137 @@
|
|
|
1
|
+
import fs from 'node:fs';
|
|
2
|
+
import path from 'node:path';
|
|
3
|
+
import chalk from 'chalk';
|
|
4
|
+
import prompts from 'prompts';
|
|
5
|
+
import { ARTIFACT_TYPES } from '../lib/artifacts.js';
|
|
6
|
+
import { assertSlug, ensureDir, writeJson } from '../lib/helpers.js';
|
|
7
|
+
|
|
8
|
+
async function resolveSlug(flagSlug) {
|
|
9
|
+
if (flagSlug) return flagSlug;
|
|
10
|
+
|
|
11
|
+
const response = await prompts({
|
|
12
|
+
type: 'text',
|
|
13
|
+
name: 'slug',
|
|
14
|
+
message: 'Challenge slug (lowercase, dashes only):',
|
|
15
|
+
validate: (v) => /^[a-z0-9-]+$/.test(v) || 'Must be lowercase alphanumeric with dashes',
|
|
16
|
+
});
|
|
17
|
+
|
|
18
|
+
if (!response.slug) {
|
|
19
|
+
throw new Error('Challenge slug is required.');
|
|
20
|
+
}
|
|
21
|
+
return response.slug;
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
export function registerInitCommand(program) {
|
|
25
|
+
program
|
|
26
|
+
.command('init')
|
|
27
|
+
.description('Scaffold a new challenge authoring directory')
|
|
28
|
+
.option('--slug <slug>', 'Challenge slug (lowercase, dashes)')
|
|
29
|
+
.option('--dir <baseDir>', 'Parent directory to create the challenge in', process.cwd())
|
|
30
|
+
.action(async (opts) => {
|
|
31
|
+
const slug = await resolveSlug(opts.slug);
|
|
32
|
+
assertSlug(slug);
|
|
33
|
+
|
|
34
|
+
const baseDir = path.resolve(opts.dir, slug);
|
|
35
|
+
|
|
36
|
+
if (fs.existsSync(baseDir)) {
|
|
37
|
+
throw new Error(`Directory already exists: ${baseDir}`);
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
const artifactRoots = {
|
|
41
|
+
tests: path.join(baseDir, 'artifacts', 'tests'),
|
|
42
|
+
starter: path.join(baseDir, 'artifacts', 'starter'),
|
|
43
|
+
runtime_deps: path.join(baseDir, 'artifacts', 'runtime_deps'),
|
|
44
|
+
runtime_manifest: path.join(baseDir, 'artifacts', 'runtime_manifest'),
|
|
45
|
+
};
|
|
46
|
+
|
|
47
|
+
ensureDir(baseDir);
|
|
48
|
+
for (const dir of Object.values(artifactRoots)) {
|
|
49
|
+
ensureDir(dir);
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
// challenge.json
|
|
53
|
+
writeJson(path.join(baseDir, 'challenge.json'), {
|
|
54
|
+
slug,
|
|
55
|
+
title: slug,
|
|
56
|
+
owner: 'team-evals',
|
|
57
|
+
version: 1,
|
|
58
|
+
runner: 'express-supertest',
|
|
59
|
+
entry: 'src/app.js',
|
|
60
|
+
timeoutSec: 10,
|
|
61
|
+
allowedDependencies: ['express@5.2.1'],
|
|
62
|
+
allowedPaths: ['src/'],
|
|
63
|
+
requiredFiles: ['src/app.js'],
|
|
64
|
+
description: 'Describe challenge behavior and grading expectations.',
|
|
65
|
+
});
|
|
66
|
+
|
|
67
|
+
// Example test
|
|
68
|
+
const exampleTestPath = path.join(artifactRoots.tests, 'example.test.js');
|
|
69
|
+
fs.writeFileSync(
|
|
70
|
+
exampleTestPath,
|
|
71
|
+
`const request = require('supertest');
|
|
72
|
+
const app = require('./src/app');
|
|
73
|
+
|
|
74
|
+
describe('GET /', () => {
|
|
75
|
+
it('should respond with 200', async () => {
|
|
76
|
+
const res = await request(app).get('/');
|
|
77
|
+
expect(res.status).toBe(200);
|
|
78
|
+
});
|
|
79
|
+
});
|
|
80
|
+
`,
|
|
81
|
+
'utf8'
|
|
82
|
+
);
|
|
83
|
+
|
|
84
|
+
// Starter code
|
|
85
|
+
const starterDir = path.join(artifactRoots.starter, 'src');
|
|
86
|
+
ensureDir(starterDir);
|
|
87
|
+
fs.writeFileSync(
|
|
88
|
+
path.join(starterDir, 'app.js'),
|
|
89
|
+
`const express = require('express');
|
|
90
|
+
const app = express();
|
|
91
|
+
|
|
92
|
+
// TODO: Implement your routes here
|
|
93
|
+
|
|
94
|
+
module.exports = app;
|
|
95
|
+
`,
|
|
96
|
+
'utf8'
|
|
97
|
+
);
|
|
98
|
+
|
|
99
|
+
// Runtime manifest
|
|
100
|
+
writeJson(path.join(artifactRoots.runtime_manifest, 'package.json'), {
|
|
101
|
+
name: `${slug}-runtime-deps`,
|
|
102
|
+
private: true,
|
|
103
|
+
version: '1.0.0',
|
|
104
|
+
description: 'Dependencies needed by hidden challenge tests at runtime.',
|
|
105
|
+
dependencies: {},
|
|
106
|
+
});
|
|
107
|
+
|
|
108
|
+
// README files
|
|
109
|
+
for (const type of ARTIFACT_TYPES) {
|
|
110
|
+
const artifactDir = artifactRoots[type];
|
|
111
|
+
if (!artifactDir) continue;
|
|
112
|
+
const readme = path.join(artifactDir, 'README.md');
|
|
113
|
+
if (!fs.existsSync(readme)) {
|
|
114
|
+
fs.writeFileSync(
|
|
115
|
+
readme,
|
|
116
|
+
`# ${type}\n\nPlace files for the \`${type}\` artifact in this folder.\n`,
|
|
117
|
+
'utf8'
|
|
118
|
+
);
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
console.log();
|
|
123
|
+
console.log(chalk.green('✔') + ` Challenge scaffolded at ${chalk.bold(baseDir)}`);
|
|
124
|
+
console.log();
|
|
125
|
+
console.log(chalk.dim(' Structure:'));
|
|
126
|
+
console.log(chalk.dim(` ${slug}/`));
|
|
127
|
+
console.log(chalk.dim(' ├── challenge.json'));
|
|
128
|
+
console.log(chalk.dim(' └── artifacts/'));
|
|
129
|
+
console.log(chalk.dim(' ├── tests/'));
|
|
130
|
+
console.log(chalk.dim(' ├── starter/'));
|
|
131
|
+
console.log(chalk.dim(' ├── runtime_deps/'));
|
|
132
|
+
console.log(chalk.dim(' └── runtime_manifest/'));
|
|
133
|
+
console.log();
|
|
134
|
+
console.log(` Next: edit ${chalk.cyan('challenge.json')} and add tests, then run:`);
|
|
135
|
+
console.log(` ${chalk.cyan(`npx @kata.dev/challenge-cli pack --dir ./${slug}`)}`);
|
|
136
|
+
});
|
|
137
|
+
}
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
import chalk from 'chalk';
|
|
2
|
+
import { saveConfig, loadConfig } from '../lib/config.js';
|
|
3
|
+
|
|
4
|
+
export function registerLoginCommand(program) {
|
|
5
|
+
program
|
|
6
|
+
.command('login')
|
|
7
|
+
.description('Save API credentials to ~/.challengerc.json')
|
|
8
|
+
.requiredOption('--api <url>', 'Eval Engine API base URL')
|
|
9
|
+
.requiredOption('--token <token>', 'Authentication token (JWT)')
|
|
10
|
+
.option('--signing-secret <secret>', 'Challenge artifact signing secret (optional, for local validation)')
|
|
11
|
+
.action(async (opts) => {
|
|
12
|
+
const existing = loadConfig();
|
|
13
|
+
|
|
14
|
+
const config = {
|
|
15
|
+
...existing,
|
|
16
|
+
apiUrl: opts.api.replace(/\/$/, ''),
|
|
17
|
+
token: opts.token,
|
|
18
|
+
};
|
|
19
|
+
|
|
20
|
+
if (opts.signingSecret) {
|
|
21
|
+
config.signingSecret = opts.signingSecret;
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
const configPath = saveConfig(config);
|
|
25
|
+
|
|
26
|
+
console.log(chalk.green('✔') + ' Credentials saved to ' + chalk.dim(configPath));
|
|
27
|
+
console.log(' API: ' + chalk.cyan(config.apiUrl));
|
|
28
|
+
console.log(' Token: ' + chalk.dim(config.token.slice(0, 20) + '…'));
|
|
29
|
+
if (config.signingSecret) {
|
|
30
|
+
console.log(' Signing secret: ' + chalk.dim('configured'));
|
|
31
|
+
}
|
|
32
|
+
});
|
|
33
|
+
}
|
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
import fs from 'node:fs';
|
|
2
|
+
import path from 'node:path';
|
|
3
|
+
import chalk from 'chalk';
|
|
4
|
+
import ora from 'ora';
|
|
5
|
+
import { REQUIRED_ARTIFACT_TYPES, packArtifact } from '../lib/artifacts.js';
|
|
6
|
+
import {
|
|
7
|
+
readJson,
|
|
8
|
+
writeJson,
|
|
9
|
+
assertSlug,
|
|
10
|
+
assertPinnedAllowedDependencies,
|
|
11
|
+
resolveRuntimeMode,
|
|
12
|
+
} from '../lib/helpers.js';
|
|
13
|
+
import { resolveRuntimeDepsSourceDir } from '../lib/runtime-deps.js';
|
|
14
|
+
import { resolveSigningSecret } from '../lib/config.js';
|
|
15
|
+
|
|
16
|
+
export function registerPackCommand(program) {
|
|
17
|
+
program
|
|
18
|
+
.command('pack')
|
|
19
|
+
.description('Pack challenge artifacts into uploadable bundles')
|
|
20
|
+
.requiredOption('--dir <challengeDir>', 'Path to the challenge directory')
|
|
21
|
+
.option('--out <outDir>', 'Output directory (defaults to <challengeDir>/dist)')
|
|
22
|
+
.option('--runtime-mode <mode>', 'Runtime deps mode: auto (default) or manual', 'auto')
|
|
23
|
+
.option('--signing-secret <secret>', 'Artifact signing secret (falls back to config)')
|
|
24
|
+
.action(async (opts) => {
|
|
25
|
+
const challengeDir = path.resolve(opts.dir);
|
|
26
|
+
const outDir = path.resolve(opts.out || path.join(challengeDir, 'dist'));
|
|
27
|
+
const runtimeMode = resolveRuntimeMode(opts.runtimeMode);
|
|
28
|
+
|
|
29
|
+
const signingSecret = resolveSigningSecret(opts.signingSecret);
|
|
30
|
+
if (!signingSecret) {
|
|
31
|
+
throw new Error(
|
|
32
|
+
'Signing secret is required. Set it via --signing-secret, CHALLENGE_ARTIFACT_SIGNING_SECRET env var, or run "challenge login --signing-secret <secret>".'
|
|
33
|
+
);
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
const challengeJsonPath = path.join(challengeDir, 'challenge.json');
|
|
37
|
+
if (!fs.existsSync(challengeJsonPath)) {
|
|
38
|
+
throw new Error(`challenge.json not found: ${challengeJsonPath}`);
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
const metadata = readJson(challengeJsonPath);
|
|
42
|
+
assertSlug(metadata.slug);
|
|
43
|
+
assertPinnedAllowedDependencies(metadata.allowedDependencies);
|
|
44
|
+
|
|
45
|
+
console.log(chalk.bold(`\nPacking ${metadata.slug}@${metadata.version}\n`));
|
|
46
|
+
|
|
47
|
+
const artifacts = {};
|
|
48
|
+
for (const type of REQUIRED_ARTIFACT_TYPES) {
|
|
49
|
+
const spinner = ora(`Packing ${type}…`).start();
|
|
50
|
+
|
|
51
|
+
try {
|
|
52
|
+
const sourceDir =
|
|
53
|
+
type === 'runtime_deps'
|
|
54
|
+
? await resolveRuntimeDepsSourceDir({
|
|
55
|
+
challengeDir,
|
|
56
|
+
runtimeMode,
|
|
57
|
+
log: (msg) => { spinner.text = msg; },
|
|
58
|
+
})
|
|
59
|
+
: path.join(challengeDir, 'artifacts', type);
|
|
60
|
+
|
|
61
|
+
if (!fs.existsSync(sourceDir)) {
|
|
62
|
+
throw new Error(`Missing artifact source: ${sourceDir}`);
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
const outputFile = path.join(outDir, `${type}.tar.gz`);
|
|
66
|
+
const packed = await packArtifact(sourceDir, outputFile, signingSecret);
|
|
67
|
+
|
|
68
|
+
artifacts[type] = {
|
|
69
|
+
sha256: packed.sha256,
|
|
70
|
+
signature: packed.signature,
|
|
71
|
+
sizeBytes: packed.sizeBytes,
|
|
72
|
+
file: path.basename(outputFile),
|
|
73
|
+
};
|
|
74
|
+
|
|
75
|
+
const sizeKb = (packed.sizeBytes / 1024).toFixed(1);
|
|
76
|
+
spinner.succeed(`${type} ${chalk.dim(`${sizeKb} KB sha256:${packed.sha256.slice(0, 12)}…`)}`);
|
|
77
|
+
} catch (err) {
|
|
78
|
+
spinner.fail(`${type} failed`);
|
|
79
|
+
throw err;
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
const manifest = {
|
|
84
|
+
cksVersion: '1',
|
|
85
|
+
generatedAt: new Date().toISOString(),
|
|
86
|
+
metadata,
|
|
87
|
+
artifacts,
|
|
88
|
+
};
|
|
89
|
+
|
|
90
|
+
const manifestPath = path.join(outDir, 'cks-manifest.json');
|
|
91
|
+
writeJson(manifestPath, manifest);
|
|
92
|
+
|
|
93
|
+
console.log();
|
|
94
|
+
console.log(chalk.green('✔') + ` Manifest: ${chalk.cyan(manifestPath)}`);
|
|
95
|
+
console.log();
|
|
96
|
+
console.log(' Next steps:');
|
|
97
|
+
console.log(` ${chalk.cyan(`npx @kata.dev/challenge-cli validate --manifest ${path.relative(process.cwd(), manifestPath)}`)}`);
|
|
98
|
+
console.log(` ${chalk.cyan(`npx @kata.dev/challenge-cli publish --manifest ${path.relative(process.cwd(), manifestPath)}`)}`);
|
|
99
|
+
});
|
|
100
|
+
}
|
|
@@ -0,0 +1,170 @@
|
|
|
1
|
+
import fs from 'node:fs';
|
|
2
|
+
import path from 'node:path';
|
|
3
|
+
import chalk from 'chalk';
|
|
4
|
+
import ora from 'ora';
|
|
5
|
+
import { REQUIRED_ARTIFACT_TYPES } from '../lib/artifacts.js';
|
|
6
|
+
import { readJson, assertSlug, assertPinnedAllowedDependencies } from '../lib/helpers.js';
|
|
7
|
+
import { fetchWithTimeout, requestJson } from '../lib/http.js';
|
|
8
|
+
import { resolveApiUrl, resolveToken } from '../lib/config.js';
|
|
9
|
+
|
|
10
|
+
async function ensureChallengeExists(apiBase, token, metadata) {
|
|
11
|
+
try {
|
|
12
|
+
await requestJson(`${apiBase}/admin/challenges`, {
|
|
13
|
+
method: 'POST',
|
|
14
|
+
token,
|
|
15
|
+
body: {
|
|
16
|
+
slug: metadata.slug,
|
|
17
|
+
title: metadata.title || metadata.slug,
|
|
18
|
+
owner: metadata.owner || 'team-evals',
|
|
19
|
+
},
|
|
20
|
+
});
|
|
21
|
+
} catch (err) {
|
|
22
|
+
if (err.status !== 409) throw err;
|
|
23
|
+
// 409 = already exists, that's fine
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
async function uploadArtifacts({ apiBase, token, versionId, manifest, manifestPath, onProgress }) {
|
|
28
|
+
for (const type of REQUIRED_ARTIFACT_TYPES) {
|
|
29
|
+
onProgress(type, 'uploading');
|
|
30
|
+
|
|
31
|
+
const artifact = manifest.artifacts[type];
|
|
32
|
+
const artifactPath = path.resolve(path.dirname(manifestPath), artifact.file);
|
|
33
|
+
|
|
34
|
+
const uploadTicket = await requestJson(
|
|
35
|
+
`${apiBase}/admin/challenge-versions/${versionId}/artifacts/${type}/presign-upload`,
|
|
36
|
+
{
|
|
37
|
+
method: 'POST',
|
|
38
|
+
token,
|
|
39
|
+
body: {
|
|
40
|
+
sha256: artifact.sha256,
|
|
41
|
+
sizeBytes: artifact.sizeBytes,
|
|
42
|
+
},
|
|
43
|
+
}
|
|
44
|
+
);
|
|
45
|
+
|
|
46
|
+
const uploadResponse = await fetchWithTimeout(uploadTicket.upload.url, {
|
|
47
|
+
method: uploadTicket.upload.method || 'PUT',
|
|
48
|
+
headers: uploadTicket.upload.headers || {
|
|
49
|
+
'content-type': 'application/gzip',
|
|
50
|
+
},
|
|
51
|
+
body: fs.readFileSync(artifactPath),
|
|
52
|
+
});
|
|
53
|
+
|
|
54
|
+
if (!uploadResponse.ok) {
|
|
55
|
+
throw new Error(`Artifact upload failed for ${type}: HTTP ${uploadResponse.status}`);
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
onProgress(type, 'done');
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
export function registerPublishCommand(program) {
|
|
63
|
+
program
|
|
64
|
+
.command('publish')
|
|
65
|
+
.description('Publish a challenge to the Eval Engine API')
|
|
66
|
+
.requiredOption('--manifest <path>', 'Path to cks-manifest.json')
|
|
67
|
+
.option('--api <url>', 'API base URL (falls back to config)')
|
|
68
|
+
.option('--token <token>', 'Auth token (falls back to config)')
|
|
69
|
+
.action(async (opts) => {
|
|
70
|
+
const manifestPath = path.resolve(opts.manifest);
|
|
71
|
+
const apiBase = resolveApiUrl(opts.api);
|
|
72
|
+
const token = resolveToken(opts.token);
|
|
73
|
+
|
|
74
|
+
const manifest = readJson(manifestPath);
|
|
75
|
+
const metadata = manifest.metadata || {};
|
|
76
|
+
assertSlug(metadata.slug);
|
|
77
|
+
assertPinnedAllowedDependencies(metadata.allowedDependencies);
|
|
78
|
+
|
|
79
|
+
console.log(chalk.bold(`\nPublishing ${metadata.slug}@${metadata.version}\n`));
|
|
80
|
+
console.log(chalk.dim(` API: ${apiBase}\n`));
|
|
81
|
+
|
|
82
|
+
// Step 1: Ensure challenge exists
|
|
83
|
+
let spinner = ora('Creating challenge (or confirming exists)…').start();
|
|
84
|
+
try {
|
|
85
|
+
await ensureChallengeExists(apiBase, token, metadata);
|
|
86
|
+
spinner.succeed('Challenge exists');
|
|
87
|
+
} catch (err) {
|
|
88
|
+
spinner.fail('Failed to create challenge');
|
|
89
|
+
throw err;
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
// Step 2: Create version
|
|
93
|
+
spinner = ora('Creating version…').start();
|
|
94
|
+
let version;
|
|
95
|
+
try {
|
|
96
|
+
version = await requestJson(`${apiBase}/admin/challenges/${metadata.slug}/versions`, {
|
|
97
|
+
method: 'POST',
|
|
98
|
+
token,
|
|
99
|
+
body: {
|
|
100
|
+
version: metadata.version,
|
|
101
|
+
runner: metadata.runner,
|
|
102
|
+
entry: metadata.entry,
|
|
103
|
+
timeoutSec: metadata.timeoutSec,
|
|
104
|
+
allowedDependencies: metadata.allowedDependencies,
|
|
105
|
+
allowedPaths: metadata.allowedPaths,
|
|
106
|
+
requiredFiles: metadata.requiredFiles,
|
|
107
|
+
description: metadata.description,
|
|
108
|
+
},
|
|
109
|
+
});
|
|
110
|
+
spinner.succeed(`Version created (id: ${chalk.dim(version.id)})`);
|
|
111
|
+
} catch (err) {
|
|
112
|
+
spinner.fail('Failed to create version');
|
|
113
|
+
throw err;
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
// Step 3: Upload artifacts
|
|
117
|
+
spinner = ora('Uploading artifacts…').start();
|
|
118
|
+
try {
|
|
119
|
+
await uploadArtifacts({
|
|
120
|
+
apiBase,
|
|
121
|
+
token,
|
|
122
|
+
versionId: version.id,
|
|
123
|
+
manifest,
|
|
124
|
+
manifestPath,
|
|
125
|
+
onProgress: (type, status) => {
|
|
126
|
+
if (status === 'uploading') {
|
|
127
|
+
spinner.text = `Uploading ${type}…`;
|
|
128
|
+
}
|
|
129
|
+
},
|
|
130
|
+
});
|
|
131
|
+
spinner.succeed('Artifacts uploaded');
|
|
132
|
+
} catch (err) {
|
|
133
|
+
spinner.fail('Upload failed');
|
|
134
|
+
throw err;
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
// Step 4: Validate
|
|
138
|
+
spinner = ora('Server-side validation…').start();
|
|
139
|
+
try {
|
|
140
|
+
await requestJson(`${apiBase}/admin/challenge-versions/${version.id}/validate`, {
|
|
141
|
+
method: 'POST',
|
|
142
|
+
token,
|
|
143
|
+
});
|
|
144
|
+
spinner.succeed('Validation passed');
|
|
145
|
+
} catch (err) {
|
|
146
|
+
spinner.fail('Validation failed');
|
|
147
|
+
throw err;
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
// Step 5: Publish
|
|
151
|
+
spinner = ora('Publishing…').start();
|
|
152
|
+
try {
|
|
153
|
+
const published = await requestJson(`${apiBase}/admin/challenge-versions/${version.id}/publish`, {
|
|
154
|
+
method: 'POST',
|
|
155
|
+
token,
|
|
156
|
+
});
|
|
157
|
+
spinner.succeed('Published');
|
|
158
|
+
|
|
159
|
+
console.log();
|
|
160
|
+
console.log(
|
|
161
|
+
chalk.green('✔') +
|
|
162
|
+
` ${chalk.bold(metadata.slug)}@${metadata.version} is now live!`
|
|
163
|
+
);
|
|
164
|
+
console.log(chalk.dim(` challengeVersionId: ${published.challengeVersionId}`));
|
|
165
|
+
} catch (err) {
|
|
166
|
+
spinner.fail('Publish failed');
|
|
167
|
+
throw err;
|
|
168
|
+
}
|
|
169
|
+
});
|
|
170
|
+
}
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
import fs from 'node:fs';
|
|
2
|
+
import path from 'node:path';
|
|
3
|
+
import chalk from 'chalk';
|
|
4
|
+
import ora from 'ora';
|
|
5
|
+
import {
|
|
6
|
+
REQUIRED_ARTIFACT_TYPES,
|
|
7
|
+
computeSha256Hex,
|
|
8
|
+
signSha256Hex,
|
|
9
|
+
validateTarGzBuffer,
|
|
10
|
+
} from '../lib/artifacts.js';
|
|
11
|
+
import { readJson } from '../lib/helpers.js';
|
|
12
|
+
import { resolveSigningSecret } from '../lib/config.js';
|
|
13
|
+
|
|
14
|
+
export function registerValidateCommand(program) {
|
|
15
|
+
program
|
|
16
|
+
.command('validate')
|
|
17
|
+
.description('Validate packed artifacts locally')
|
|
18
|
+
.requiredOption('--manifest <path>', 'Path to cks-manifest.json')
|
|
19
|
+
.option('--signing-secret <secret>', 'Artifact signing secret (falls back to config)')
|
|
20
|
+
.action(async (opts) => {
|
|
21
|
+
const manifestPath = path.resolve(opts.manifest);
|
|
22
|
+
const signingSecret = resolveSigningSecret(opts.signingSecret);
|
|
23
|
+
|
|
24
|
+
if (!signingSecret) {
|
|
25
|
+
throw new Error(
|
|
26
|
+
'Signing secret is required for validation. Set it via --signing-secret, CHALLENGE_ARTIFACT_SIGNING_SECRET env var, or run "challenge login --signing-secret <secret>".'
|
|
27
|
+
);
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
const manifest = readJson(manifestPath);
|
|
31
|
+
const slug = manifest.metadata?.slug || 'unknown';
|
|
32
|
+
|
|
33
|
+
console.log(chalk.bold(`\nValidating ${slug}@${manifest.metadata?.version || '?'}\n`));
|
|
34
|
+
|
|
35
|
+
for (const type of REQUIRED_ARTIFACT_TYPES) {
|
|
36
|
+
const spinner = ora(`Checking ${type}…`).start();
|
|
37
|
+
|
|
38
|
+
const artifact = manifest.artifacts && manifest.artifacts[type];
|
|
39
|
+
if (!artifact) {
|
|
40
|
+
spinner.fail(`${type}: missing from manifest`);
|
|
41
|
+
throw new Error(`Manifest missing artifact: ${type}`);
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
const artifactPath = path.resolve(path.dirname(manifestPath), artifact.file);
|
|
45
|
+
if (!fs.existsSync(artifactPath)) {
|
|
46
|
+
spinner.fail(`${type}: file not found`);
|
|
47
|
+
throw new Error(`Artifact file does not exist: ${artifactPath}`);
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
const buffer = fs.readFileSync(artifactPath);
|
|
51
|
+
|
|
52
|
+
// Verify SHA-256
|
|
53
|
+
const actualSha = computeSha256Hex(buffer);
|
|
54
|
+
if (actualSha !== artifact.sha256) {
|
|
55
|
+
spinner.fail(`${type}: SHA-256 mismatch`);
|
|
56
|
+
throw new Error(`SHA mismatch for ${type}. expected=${artifact.sha256} actual=${actualSha}`);
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
// Verify HMAC signature
|
|
60
|
+
const actualSig = signSha256Hex(actualSha, signingSecret);
|
|
61
|
+
if (actualSig !== artifact.signature) {
|
|
62
|
+
spinner.fail(`${type}: signature mismatch`);
|
|
63
|
+
throw new Error(`Signature mismatch for ${type}. expected=${artifact.signature} actual=${actualSig}`);
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
// Validate tar contents
|
|
67
|
+
spinner.text = `Validating ${type} tar contents…`;
|
|
68
|
+
const { entryCount, totalSize } = await validateTarGzBuffer({ buffer });
|
|
69
|
+
|
|
70
|
+
const sizeKb = (totalSize / 1024).toFixed(1);
|
|
71
|
+
spinner.succeed(`${type} ${chalk.dim(`${entryCount} entries, ${sizeKb} KB extracted`)}`);
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
console.log();
|
|
75
|
+
console.log(chalk.green('✔') + ' All artifacts validated successfully');
|
|
76
|
+
});
|
|
77
|
+
}
|
|
@@ -0,0 +1,123 @@
|
|
|
1
|
+
import crypto from 'node:crypto';
|
|
2
|
+
import fs from 'node:fs';
|
|
3
|
+
import os from 'node:os';
|
|
4
|
+
import path from 'node:path';
|
|
5
|
+
import { create as tarCreate, list as tarList } from 'tar';
|
|
6
|
+
|
|
7
|
+
export const ARTIFACT_TYPES = ['tests', 'starter', 'runtime_deps'];
|
|
8
|
+
export const REQUIRED_ARTIFACT_TYPES = ['tests', 'starter', 'runtime_deps'];
|
|
9
|
+
|
|
10
|
+
export function computeSha256Hex(buffer) {
|
|
11
|
+
if (!Buffer.isBuffer(buffer)) {
|
|
12
|
+
throw new Error('Buffer required to compute SHA-256.');
|
|
13
|
+
}
|
|
14
|
+
return crypto.createHash('sha256').update(buffer).digest('hex');
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
export function signSha256Hex(sha256Hex, secret) {
|
|
18
|
+
if (!sha256Hex || typeof sha256Hex !== 'string') {
|
|
19
|
+
throw new Error('sha256Hex is required to compute signature.');
|
|
20
|
+
}
|
|
21
|
+
if (!secret || typeof secret !== 'string') {
|
|
22
|
+
throw new Error('Signing secret is required to compute signature.');
|
|
23
|
+
}
|
|
24
|
+
return crypto.createHmac('sha256', secret).update(sha256Hex).digest('hex');
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
export function assertSafeTarPath(entryPath) {
|
|
28
|
+
const normalized = String(entryPath || '').replace(/\\/g, '/');
|
|
29
|
+
if (!normalized) {
|
|
30
|
+
throw new Error('Invalid tar entry path: empty path.');
|
|
31
|
+
}
|
|
32
|
+
if (normalized.startsWith('/') || /^[A-Za-z]:/.test(normalized)) {
|
|
33
|
+
throw new Error(`Unsafe tar entry path: ${entryPath}`);
|
|
34
|
+
}
|
|
35
|
+
const segments = normalized.split('/');
|
|
36
|
+
if (segments.includes('..')) {
|
|
37
|
+
throw new Error(`Unsafe tar entry path traversal: ${entryPath}`);
|
|
38
|
+
}
|
|
39
|
+
const cleaned = path.posix.normalize(normalized);
|
|
40
|
+
if (cleaned === '..' || cleaned.startsWith('../') || cleaned.includes('/../')) {
|
|
41
|
+
throw new Error(`Unsafe tar entry path traversal: ${entryPath}`);
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
async function withTempArchive(buffer, fn) {
|
|
46
|
+
const tmpRoot = await fs.promises.mkdtemp(path.join(os.tmpdir(), 'challenge-cli-artifact-'));
|
|
47
|
+
const archivePath = path.join(tmpRoot, 'artifact.tar.gz');
|
|
48
|
+
try {
|
|
49
|
+
await fs.promises.writeFile(archivePath, buffer);
|
|
50
|
+
return await fn(archivePath, tmpRoot);
|
|
51
|
+
} finally {
|
|
52
|
+
await fs.promises.rm(tmpRoot, { recursive: true, force: true });
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
export async function validateTarGzBuffer({ buffer, maxEntries = 2000, maxExtractedBytes = 104857600 }) {
|
|
57
|
+
if (!Buffer.isBuffer(buffer)) {
|
|
58
|
+
throw new Error('Artifact tar validation requires a Buffer.');
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
let entryCount = 0;
|
|
62
|
+
let totalSize = 0;
|
|
63
|
+
let validationError = null;
|
|
64
|
+
|
|
65
|
+
await withTempArchive(buffer, async (archivePath) => {
|
|
66
|
+
await tarList({
|
|
67
|
+
file: archivePath,
|
|
68
|
+
strict: true,
|
|
69
|
+
onentry: (entry) => {
|
|
70
|
+
if (validationError) return;
|
|
71
|
+
try {
|
|
72
|
+
entryCount += 1;
|
|
73
|
+
if (entryCount > maxEntries) {
|
|
74
|
+
throw new Error(`Tar entry count exceeds maximum (${maxEntries}).`);
|
|
75
|
+
}
|
|
76
|
+
assertSafeTarPath(entry.path);
|
|
77
|
+
if (entry.type === 'SymbolicLink' || entry.type === 'Link') {
|
|
78
|
+
throw new Error(`Symlink/hardlink entries are not allowed: ${entry.path}`);
|
|
79
|
+
}
|
|
80
|
+
const entrySize = Number.isFinite(entry.size) ? entry.size : 0;
|
|
81
|
+
totalSize += entrySize;
|
|
82
|
+
if (totalSize > maxExtractedBytes) {
|
|
83
|
+
throw new Error(`Tar extracted size exceeds maximum (${maxExtractedBytes} bytes).`);
|
|
84
|
+
}
|
|
85
|
+
} catch (err) {
|
|
86
|
+
validationError = err;
|
|
87
|
+
}
|
|
88
|
+
},
|
|
89
|
+
});
|
|
90
|
+
});
|
|
91
|
+
|
|
92
|
+
if (validationError) {
|
|
93
|
+
throw validationError;
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
return { entryCount, totalSize };
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
export async function packArtifact(sourceDir, outputFile, signingSecret) {
|
|
100
|
+
fs.mkdirSync(path.dirname(outputFile), { recursive: true });
|
|
101
|
+
|
|
102
|
+
await tarCreate(
|
|
103
|
+
{
|
|
104
|
+
gzip: true,
|
|
105
|
+
cwd: sourceDir,
|
|
106
|
+
file: outputFile,
|
|
107
|
+
portable: true,
|
|
108
|
+
noMtime: true,
|
|
109
|
+
},
|
|
110
|
+
['.']
|
|
111
|
+
);
|
|
112
|
+
|
|
113
|
+
const buffer = fs.readFileSync(outputFile);
|
|
114
|
+
const sha256 = computeSha256Hex(buffer);
|
|
115
|
+
const signature = signSha256Hex(sha256, signingSecret);
|
|
116
|
+
|
|
117
|
+
return {
|
|
118
|
+
file: outputFile,
|
|
119
|
+
sha256,
|
|
120
|
+
signature,
|
|
121
|
+
sizeBytes: buffer.length,
|
|
122
|
+
};
|
|
123
|
+
}
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
import fs from 'node:fs';
|
|
2
|
+
import os from 'node:os';
|
|
3
|
+
import path from 'node:path';
|
|
4
|
+
|
|
5
|
+
const CONFIG_FILENAME = '.challengerc.json';
|
|
6
|
+
|
|
7
|
+
function getConfigPath() {
|
|
8
|
+
return path.join(os.homedir(), CONFIG_FILENAME);
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
export function loadConfig() {
|
|
12
|
+
const configPath = getConfigPath();
|
|
13
|
+
if (!fs.existsSync(configPath)) {
|
|
14
|
+
return {};
|
|
15
|
+
}
|
|
16
|
+
try {
|
|
17
|
+
return JSON.parse(fs.readFileSync(configPath, 'utf8'));
|
|
18
|
+
} catch {
|
|
19
|
+
return {};
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
export function saveConfig(config) {
|
|
24
|
+
const configPath = getConfigPath();
|
|
25
|
+
fs.writeFileSync(configPath, JSON.stringify(config, null, 2) + '\n', 'utf8');
|
|
26
|
+
return configPath;
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
export function resolveApiUrl(flagValue) {
|
|
30
|
+
return (
|
|
31
|
+
flagValue ||
|
|
32
|
+
process.env.CKS_API_BASE_URL ||
|
|
33
|
+
loadConfig().apiUrl ||
|
|
34
|
+
'http://127.0.0.1:3000'
|
|
35
|
+
).replace(/\/$/, '');
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
export function resolveToken(flagValue) {
|
|
39
|
+
return flagValue || process.env.CKS_API_TOKEN || loadConfig().token || null;
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
export function resolveSigningSecret(flagValue) {
|
|
43
|
+
return (
|
|
44
|
+
flagValue ||
|
|
45
|
+
process.env.CHALLENGE_ARTIFACT_SIGNING_SECRET ||
|
|
46
|
+
loadConfig().signingSecret ||
|
|
47
|
+
null
|
|
48
|
+
);
|
|
49
|
+
}
|
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
import fs from 'node:fs';
|
|
2
|
+
import crypto from 'node:crypto';
|
|
3
|
+
|
|
4
|
+
const SLUG_PATTERN = /^[a-z0-9-]+$/;
|
|
5
|
+
const PACKAGE_NAME_PATTERN = /^(?:@[-a-z0-9~][a-z0-9-._~]*\/)?[a-z0-9~][a-z0-9-._~]*$/i;
|
|
6
|
+
const EXACT_VERSION_PATTERN = /^\d+\.\d+\.\d+(?:-[0-9A-Za-z-.]+)?(?:\+[0-9A-Za-z-.]+)?$/;
|
|
7
|
+
|
|
8
|
+
export function assertSlug(slug) {
|
|
9
|
+
if (!slug || !SLUG_PATTERN.test(slug)) {
|
|
10
|
+
throw new Error(`Invalid challenge slug: "${slug}". Must be lowercase alphanumeric with dashes.`);
|
|
11
|
+
}
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
export function assertPinnedAllowedDependencies(allowedDependencies) {
|
|
15
|
+
if (!Array.isArray(allowedDependencies) || allowedDependencies.length === 0) {
|
|
16
|
+
throw new Error('challenge.json allowedDependencies must be a non-empty array.');
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
const seen = new Map();
|
|
20
|
+
for (const rawSpec of allowedDependencies) {
|
|
21
|
+
if (typeof rawSpec !== 'string' || rawSpec.trim() === '') {
|
|
22
|
+
throw new Error('allowedDependencies entries must be non-empty strings.');
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
const spec = rawSpec.trim();
|
|
26
|
+
const splitIndex = spec.lastIndexOf('@');
|
|
27
|
+
if (splitIndex <= 0) {
|
|
28
|
+
throw new Error(`Dependency "${spec}" must use package@x.y.z format.`);
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
const name = spec.slice(0, splitIndex).trim();
|
|
32
|
+
const version = spec.slice(splitIndex + 1).trim();
|
|
33
|
+
if (!PACKAGE_NAME_PATTERN.test(name)) {
|
|
34
|
+
throw new Error(`Invalid dependency name in allowedDependencies: ${name}`);
|
|
35
|
+
}
|
|
36
|
+
if (!EXACT_VERSION_PATTERN.test(version)) {
|
|
37
|
+
throw new Error(`Dependency "${spec}" must pin an exact version (x.y.z).`);
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
const existing = seen.get(name);
|
|
41
|
+
if (existing && existing !== version) {
|
|
42
|
+
throw new Error(`Conflicting allowed dependency versions for ${name}.`);
|
|
43
|
+
}
|
|
44
|
+
seen.set(name, version);
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
export function readJson(filePath) {
|
|
49
|
+
return JSON.parse(fs.readFileSync(filePath, 'utf8'));
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
export function writeJson(filePath, payload) {
|
|
53
|
+
fs.writeFileSync(filePath, `${JSON.stringify(payload, null, 2)}\n`, 'utf8');
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
export function ensureDir(dirPath) {
|
|
57
|
+
fs.mkdirSync(dirPath, { recursive: true });
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
export function hashBuffer(value) {
|
|
61
|
+
return crypto.createHash('sha256').update(value).digest('hex');
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
export function findFirstExistingFile(paths) {
|
|
65
|
+
for (const filePath of paths) {
|
|
66
|
+
if (fs.existsSync(filePath)) {
|
|
67
|
+
return filePath;
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
return null;
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
const RUNTIME_MODE_VALUES = new Set(['auto', 'manual']);
|
|
74
|
+
|
|
75
|
+
export function resolveRuntimeMode(mode) {
|
|
76
|
+
const normalized = (mode || 'auto').toLowerCase();
|
|
77
|
+
if (!RUNTIME_MODE_VALUES.has(normalized)) {
|
|
78
|
+
throw new Error(
|
|
79
|
+
`Invalid runtime mode: ${mode}. Supported values: ${Array.from(RUNTIME_MODE_VALUES).join(', ')}`
|
|
80
|
+
);
|
|
81
|
+
}
|
|
82
|
+
return normalized;
|
|
83
|
+
}
|
package/src/lib/http.js
ADDED
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
const DEFAULT_HTTP_TIMEOUT_MS = 30_000;
|
|
2
|
+
|
|
3
|
+
function resolveHttpTimeoutMs() {
|
|
4
|
+
const parsed = Number.parseInt(process.env.CKS_HTTP_TIMEOUT_MS || '', 10);
|
|
5
|
+
if (!Number.isFinite(parsed) || parsed <= 0) {
|
|
6
|
+
return DEFAULT_HTTP_TIMEOUT_MS;
|
|
7
|
+
}
|
|
8
|
+
return parsed;
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
export async function fetchWithTimeout(url, options = {}) {
|
|
12
|
+
const timeoutMs = resolveHttpTimeoutMs();
|
|
13
|
+
const controller = new AbortController();
|
|
14
|
+
const timeoutId = setTimeout(() => controller.abort(), timeoutMs);
|
|
15
|
+
|
|
16
|
+
try {
|
|
17
|
+
return await fetch(url, {
|
|
18
|
+
...options,
|
|
19
|
+
signal: controller.signal,
|
|
20
|
+
});
|
|
21
|
+
} catch (err) {
|
|
22
|
+
if (err && (err.name === 'AbortError' || err.code === 'ABORT_ERR')) {
|
|
23
|
+
const method = options.method || 'GET';
|
|
24
|
+
throw new Error(`Request timed out after ${timeoutMs}ms: ${method} ${url}`);
|
|
25
|
+
}
|
|
26
|
+
throw err;
|
|
27
|
+
} finally {
|
|
28
|
+
clearTimeout(timeoutId);
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
export async function requestJson(url, { method, body, token }) {
|
|
33
|
+
const headers = {
|
|
34
|
+
'content-type': 'application/json',
|
|
35
|
+
};
|
|
36
|
+
if (token) {
|
|
37
|
+
headers.authorization = `Bearer ${token}`;
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
const response = await fetchWithTimeout(url, {
|
|
41
|
+
method,
|
|
42
|
+
headers,
|
|
43
|
+
body: body == null ? undefined : JSON.stringify(body),
|
|
44
|
+
});
|
|
45
|
+
|
|
46
|
+
const rawBody = await response.text();
|
|
47
|
+
let payload = {};
|
|
48
|
+
if (rawBody && rawBody.trim() !== '') {
|
|
49
|
+
try {
|
|
50
|
+
payload = JSON.parse(rawBody);
|
|
51
|
+
} catch (err) {
|
|
52
|
+
const parseError = new Error(
|
|
53
|
+
`Invalid JSON response from ${method} ${url}: ${err.message}`
|
|
54
|
+
);
|
|
55
|
+
parseError.status = response.status;
|
|
56
|
+
parseError.responseText = rawBody.slice(0, 2000);
|
|
57
|
+
throw parseError;
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
if (!response.ok) {
|
|
62
|
+
const message = payload && payload.error ? payload.error : `Request failed: ${method} ${url} (${response.status})`;
|
|
63
|
+
const error = new Error(message);
|
|
64
|
+
error.status = response.status;
|
|
65
|
+
error.payload = payload;
|
|
66
|
+
throw error;
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
return payload;
|
|
70
|
+
}
|
|
@@ -0,0 +1,179 @@
|
|
|
1
|
+
import fs from 'node:fs';
|
|
2
|
+
import os from 'node:os';
|
|
3
|
+
import path from 'node:path';
|
|
4
|
+
import { spawn } from 'node:child_process';
|
|
5
|
+
import { hashBuffer, findFirstExistingFile, writeJson } from './helpers.js';
|
|
6
|
+
|
|
7
|
+
function quoteWindowsArg(arg) {
|
|
8
|
+
const value = String(arg);
|
|
9
|
+
if (!/[ \t"]/g.test(value)) return value;
|
|
10
|
+
return `"${value.replace(/"/g, '\\"')}"`;
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
function runCommand(command, args, { cwd }) {
|
|
14
|
+
return new Promise((resolve, reject) => {
|
|
15
|
+
const isWindows = process.platform === 'win32';
|
|
16
|
+
const spawnCommand = isWindows ? 'cmd.exe' : command;
|
|
17
|
+
const spawnArgs = isWindows
|
|
18
|
+
? ['/d', '/s', '/c', [command, ...args].map(quoteWindowsArg).join(' ')]
|
|
19
|
+
: args;
|
|
20
|
+
|
|
21
|
+
const child = spawn(spawnCommand, spawnArgs, {
|
|
22
|
+
cwd,
|
|
23
|
+
shell: false,
|
|
24
|
+
env: { ...process.env, npm_config_loglevel: 'error' },
|
|
25
|
+
stdio: ['ignore', 'pipe', 'pipe'],
|
|
26
|
+
});
|
|
27
|
+
|
|
28
|
+
const stdoutChunks = [];
|
|
29
|
+
const stderrChunks = [];
|
|
30
|
+
|
|
31
|
+
child.stdout.on('data', (chunk) => stdoutChunks.push(chunk));
|
|
32
|
+
child.stderr.on('data', (chunk) => stderrChunks.push(chunk));
|
|
33
|
+
child.on('error', reject);
|
|
34
|
+
|
|
35
|
+
child.on('close', (code) => {
|
|
36
|
+
const stdout = Buffer.concat(stdoutChunks).toString('utf8');
|
|
37
|
+
const stderr = Buffer.concat(stderrChunks).toString('utf8');
|
|
38
|
+
if (code !== 0) {
|
|
39
|
+
reject(
|
|
40
|
+
new Error(
|
|
41
|
+
[
|
|
42
|
+
`Command failed: ${command} ${args.join(' ')}`,
|
|
43
|
+
stdout ? `stdout:\n${stdout}` : null,
|
|
44
|
+
stderr ? `stderr:\n${stderr}` : null,
|
|
45
|
+
]
|
|
46
|
+
.filter(Boolean)
|
|
47
|
+
.join('\n')
|
|
48
|
+
)
|
|
49
|
+
);
|
|
50
|
+
return;
|
|
51
|
+
}
|
|
52
|
+
resolve({ stdout, stderr });
|
|
53
|
+
});
|
|
54
|
+
});
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
function computeRuntimeDepsCacheKey(manifestDir) {
|
|
58
|
+
const packageJsonPath = path.join(manifestDir, 'package.json');
|
|
59
|
+
if (!fs.existsSync(packageJsonPath)) {
|
|
60
|
+
throw new Error(`Runtime manifest package.json not found: ${packageJsonPath}`);
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
const lockFilePath = findFirstExistingFile([
|
|
64
|
+
path.join(manifestDir, 'package-lock.json'),
|
|
65
|
+
path.join(manifestDir, 'npm-shrinkwrap.json'),
|
|
66
|
+
]);
|
|
67
|
+
|
|
68
|
+
const keyPayload = [
|
|
69
|
+
`platform:${process.platform}`,
|
|
70
|
+
`arch:${process.arch}`,
|
|
71
|
+
`node-major:${process.versions.node.split('.')[0]}`,
|
|
72
|
+
fs.readFileSync(packageJsonPath),
|
|
73
|
+
lockFilePath ? fs.readFileSync(lockFilePath) : 'no-lockfile',
|
|
74
|
+
];
|
|
75
|
+
|
|
76
|
+
return hashBuffer(
|
|
77
|
+
Buffer.concat(
|
|
78
|
+
keyPayload.map((item) =>
|
|
79
|
+
Buffer.isBuffer(item) ? item : Buffer.from(String(item), 'utf8')
|
|
80
|
+
)
|
|
81
|
+
)
|
|
82
|
+
);
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
async function buildRuntimeDepsFromManifest({ challengeDir, manifestDir, log }) {
|
|
86
|
+
const packageJsonPath = path.join(manifestDir, 'package.json');
|
|
87
|
+
if (!fs.existsSync(packageJsonPath)) {
|
|
88
|
+
throw new Error(`Runtime manifest package.json not found: ${packageJsonPath}`);
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
const lockFilePath = findFirstExistingFile([
|
|
92
|
+
path.join(manifestDir, 'package-lock.json'),
|
|
93
|
+
path.join(manifestDir, 'npm-shrinkwrap.json'),
|
|
94
|
+
]);
|
|
95
|
+
const cacheKey = computeRuntimeDepsCacheKey(manifestDir);
|
|
96
|
+
const cacheRoot = path.join(challengeDir, '.cache', 'runtime-deps');
|
|
97
|
+
const cacheDir = path.join(cacheRoot, cacheKey);
|
|
98
|
+
const cachedNodeModules = path.join(cacheDir, 'node_modules');
|
|
99
|
+
|
|
100
|
+
if (fs.existsSync(cachedNodeModules)) {
|
|
101
|
+
log(`Using cached runtime deps (key: ${cacheKey.slice(0, 12)}…)`);
|
|
102
|
+
return cacheDir;
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
const tempBuildRoot = await fs.promises.mkdtemp(
|
|
106
|
+
path.join(os.tmpdir(), 'challenge-cli-runtime-deps-')
|
|
107
|
+
);
|
|
108
|
+
|
|
109
|
+
try {
|
|
110
|
+
const tempManifestDir = path.join(tempBuildRoot, 'manifest');
|
|
111
|
+
await fs.promises.mkdir(tempManifestDir, { recursive: true });
|
|
112
|
+
await fs.promises.copyFile(packageJsonPath, path.join(tempManifestDir, 'package.json'));
|
|
113
|
+
if (lockFilePath) {
|
|
114
|
+
await fs.promises.copyFile(
|
|
115
|
+
lockFilePath,
|
|
116
|
+
path.join(tempManifestDir, path.basename(lockFilePath))
|
|
117
|
+
);
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
const installArgs = lockFilePath
|
|
121
|
+
? ['ci', '--omit=dev', '--ignore-scripts']
|
|
122
|
+
: ['install', '--omit=dev', '--ignore-scripts'];
|
|
123
|
+
|
|
124
|
+
log(`Building runtime deps (${lockFilePath ? 'npm ci' : 'npm install'})…`);
|
|
125
|
+
await runCommand('npm', installArgs, { cwd: tempManifestDir });
|
|
126
|
+
|
|
127
|
+
const builtNodeModules = path.join(tempManifestDir, 'node_modules');
|
|
128
|
+
if (!fs.existsSync(builtNodeModules)) {
|
|
129
|
+
await fs.promises.mkdir(builtNodeModules, { recursive: true });
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
await fs.promises.mkdir(cacheDir, { recursive: true });
|
|
133
|
+
fs.cpSync(builtNodeModules, cachedNodeModules, { recursive: true });
|
|
134
|
+
writeJson(path.join(cacheDir, 'build-metadata.json'), {
|
|
135
|
+
cacheKey,
|
|
136
|
+
builtAt: new Date().toISOString(),
|
|
137
|
+
packageJsonSha256: hashBuffer(fs.readFileSync(packageJsonPath)),
|
|
138
|
+
lockFile: lockFilePath ? path.basename(lockFilePath) : null,
|
|
139
|
+
lockFileSha256: lockFilePath ? hashBuffer(fs.readFileSync(lockFilePath)) : null,
|
|
140
|
+
platform: process.platform,
|
|
141
|
+
arch: process.arch,
|
|
142
|
+
nodeVersion: process.versions.node,
|
|
143
|
+
});
|
|
144
|
+
|
|
145
|
+
return cacheDir;
|
|
146
|
+
} finally {
|
|
147
|
+
await fs.promises.rm(tempBuildRoot, { recursive: true, force: true });
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
export async function resolveRuntimeDepsSourceDir({ challengeDir, runtimeMode, log = () => { } }) {
|
|
152
|
+
const manualRuntimeDepsDir = path.join(challengeDir, 'artifacts', 'runtime_deps');
|
|
153
|
+
const runtimeManifestDir = path.join(challengeDir, 'artifacts', 'runtime_manifest');
|
|
154
|
+
const runtimeManifestPackageJson = path.join(runtimeManifestDir, 'package.json');
|
|
155
|
+
|
|
156
|
+
if (runtimeMode === 'manual') {
|
|
157
|
+
if (!fs.existsSync(manualRuntimeDepsDir)) {
|
|
158
|
+
throw new Error(`Missing artifact source directory: ${manualRuntimeDepsDir}`);
|
|
159
|
+
}
|
|
160
|
+
return manualRuntimeDepsDir;
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
if (fs.existsSync(runtimeManifestPackageJson)) {
|
|
164
|
+
return buildRuntimeDepsFromManifest({ challengeDir, manifestDir: runtimeManifestDir, log });
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
if (!fs.existsSync(manualRuntimeDepsDir)) {
|
|
168
|
+
throw new Error(
|
|
169
|
+
[
|
|
170
|
+
`Missing runtime deps sources for challenge at ${challengeDir}.`,
|
|
171
|
+
'Expected either:',
|
|
172
|
+
` - ${runtimeManifestPackageJson} (auto mode)`,
|
|
173
|
+
` - ${manualRuntimeDepsDir} (manual mode)`,
|
|
174
|
+
].join('\n')
|
|
175
|
+
);
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
return manualRuntimeDepsDir;
|
|
179
|
+
}
|