@livingdata/pipex 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +261 -0
- package/dist/cli/index.js +126 -0
- package/dist/cli/pipeline-loader.js +87 -0
- package/dist/cli/pipeline-runner.js +193 -0
- package/dist/cli/reporter.js +78 -0
- package/dist/cli/state.js +89 -0
- package/dist/cli/types.js +1 -0
- package/dist/engine/docker-executor.js +96 -0
- package/dist/engine/docker-runtime.js +65 -0
- package/dist/engine/executor.js +16 -0
- package/dist/engine/index.js +3 -0
- package/dist/engine/runtime.js +2 -0
- package/dist/engine/types.js +1 -0
- package/dist/engine/workspace.js +264 -0
- package/dist/index.js +40 -0
- package/dist/reporter.js +13 -0
- package/package.json +40 -0
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
import pino from 'pino';
|
|
2
|
+
import chalk from 'chalk';
|
|
3
|
+
import ora from 'ora';
|
|
4
|
+
/**
|
|
5
|
+
* Reporter that outputs structured JSON logs via pino.
|
|
6
|
+
* Suitable for CI/CD environments and log aggregation.
|
|
7
|
+
*/
|
|
8
|
+
export class ConsoleReporter {
|
|
9
|
+
logger = pino({ level: 'info' });
|
|
10
|
+
state(workspaceId, event, stepId, meta) {
|
|
11
|
+
this.logger.info({ workspaceId, event, stepId, ...meta });
|
|
12
|
+
}
|
|
13
|
+
log(workspaceId, stepId, stream, line) {
|
|
14
|
+
this.logger.info({ workspaceId, stepId, stream, line });
|
|
15
|
+
}
|
|
16
|
+
result(workspaceId, stepId, result) {
|
|
17
|
+
this.logger.info({ workspaceId, stepId, result });
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
/**
|
|
21
|
+
* Reporter with interactive terminal UI using spinners and colors.
|
|
22
|
+
* Suitable for local development and manual execution.
|
|
23
|
+
*/
|
|
24
|
+
export class InteractiveReporter {
|
|
25
|
+
spinner;
|
|
26
|
+
stepSpinners = new Map();
|
|
27
|
+
state(workspaceId, event, stepId, meta) {
|
|
28
|
+
if (event === 'PIPELINE_START') {
|
|
29
|
+
console.log(chalk.bold(`\n▶ Pipeline: ${chalk.cyan(workspaceId)}\n`));
|
|
30
|
+
}
|
|
31
|
+
if (event === 'STEP_STARTING' && stepId) {
|
|
32
|
+
const spinner = ora({ text: stepId, prefixText: ' ' }).start();
|
|
33
|
+
this.stepSpinners.set(stepId, spinner);
|
|
34
|
+
}
|
|
35
|
+
if (event === 'STEP_SKIPPED' && stepId) {
|
|
36
|
+
const spinner = this.stepSpinners.get(stepId);
|
|
37
|
+
if (spinner) {
|
|
38
|
+
spinner.stopAndPersist({ symbol: chalk.gray('⊙'), text: chalk.gray(`${stepId} (cached)`) });
|
|
39
|
+
this.stepSpinners.delete(stepId);
|
|
40
|
+
}
|
|
41
|
+
else {
|
|
42
|
+
// Step was skipped before spinner was created
|
|
43
|
+
console.log(` ${chalk.gray('⊙')} ${chalk.gray(`${stepId} (cached)`)}`);
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
if (event === 'STEP_FINISHED' && stepId) {
|
|
47
|
+
const spinner = this.stepSpinners.get(stepId);
|
|
48
|
+
if (spinner) {
|
|
49
|
+
spinner.stopAndPersist({ symbol: chalk.green('✓'), text: chalk.green(stepId) });
|
|
50
|
+
this.stepSpinners.delete(stepId);
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
if (event === 'STEP_FAILED' && stepId) {
|
|
54
|
+
const spinner = this.stepSpinners.get(stepId);
|
|
55
|
+
const exitCode = meta?.exitCode;
|
|
56
|
+
if (spinner) {
|
|
57
|
+
const exitInfo = exitCode === undefined ? '' : ` (exit ${exitCode})`;
|
|
58
|
+
spinner.stopAndPersist({
|
|
59
|
+
symbol: chalk.red('✗'),
|
|
60
|
+
text: chalk.red(`${stepId}${exitInfo}`)
|
|
61
|
+
});
|
|
62
|
+
this.stepSpinners.delete(stepId);
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
if (event === 'PIPELINE_FINISHED') {
|
|
66
|
+
console.log(chalk.bold.green('\n✓ Pipeline completed\n'));
|
|
67
|
+
}
|
|
68
|
+
if (event === 'PIPELINE_FAILED') {
|
|
69
|
+
console.log(chalk.bold.red('\n✗ Pipeline failed\n'));
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
log(_workspaceId, _stepId, _stream, _line) {
|
|
73
|
+
// Suppress logs in interactive mode
|
|
74
|
+
}
|
|
75
|
+
result(_workspaceId, _stepId, _result) {
|
|
76
|
+
// Results shown via state updates
|
|
77
|
+
}
|
|
78
|
+
}
|
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
import { readFile, writeFile } from 'node:fs/promises';
|
|
2
|
+
import { join } from 'node:path';
|
|
3
|
+
import { createHash } from 'node:crypto';
|
|
4
|
+
/**
|
|
5
|
+
* Manages caching state for pipeline execution.
|
|
6
|
+
*
|
|
7
|
+
* The StateManager computes fingerprints for steps and tracks which
|
|
8
|
+
* artifact was produced by each step. This enables cache hits when
|
|
9
|
+
* a step's configuration hasn't changed.
|
|
10
|
+
*
|
|
11
|
+
* ## Fingerprint Algorithm
|
|
12
|
+
*
|
|
13
|
+
* A step fingerprint is computed as:
|
|
14
|
+
* ```
|
|
15
|
+
* SHA256(image + JSON(cmd) + JSON(sorted env) + JSON(sorted inputArtifactIds))
|
|
16
|
+
* ```
|
|
17
|
+
*
|
|
18
|
+
* A step is re-executed when:
|
|
19
|
+
* - The fingerprint changes (image, cmd, env, or inputs modified)
|
|
20
|
+
* - The artifact no longer exists (manually deleted)
|
|
21
|
+
*
|
|
22
|
+
* ## Cache Propagation
|
|
23
|
+
*
|
|
24
|
+
* Changes propagate through dependencies. If step A is modified,
|
|
25
|
+
* all steps depending on A are invalidated automatically (via inputArtifactIds).
|
|
26
|
+
*/
|
|
27
|
+
export class StateManager {
|
|
28
|
+
static fingerprint(config) {
|
|
29
|
+
const hash = createHash('sha256');
|
|
30
|
+
hash.update(config.image);
|
|
31
|
+
hash.update(JSON.stringify(config.cmd));
|
|
32
|
+
if (config.env) {
|
|
33
|
+
hash.update(JSON.stringify(Object.entries(config.env).sort((a, b) => a[0].localeCompare(b[0]))));
|
|
34
|
+
}
|
|
35
|
+
if (config.inputArtifactIds) {
|
|
36
|
+
hash.update(JSON.stringify([...config.inputArtifactIds].sort((a, b) => a.localeCompare(b))));
|
|
37
|
+
}
|
|
38
|
+
if (config.mounts && config.mounts.length > 0) {
|
|
39
|
+
const sorted = [...config.mounts].sort((a, b) => a.containerPath.localeCompare(b.containerPath));
|
|
40
|
+
hash.update(JSON.stringify(sorted));
|
|
41
|
+
}
|
|
42
|
+
return hash.digest('hex');
|
|
43
|
+
}
|
|
44
|
+
state = { steps: {} };
|
|
45
|
+
path;
|
|
46
|
+
/**
|
|
47
|
+
* Creates a state manager for the given workspace.
|
|
48
|
+
* @param workspaceRoot - Absolute path to workspace directory
|
|
49
|
+
*/
|
|
50
|
+
constructor(workspaceRoot) {
|
|
51
|
+
this.path = join(workspaceRoot, 'state.json');
|
|
52
|
+
}
|
|
53
|
+
/**
|
|
54
|
+
* Loads cached state from state.json.
|
|
55
|
+
* If the file doesn't exist, initializes with empty state.
|
|
56
|
+
*/
|
|
57
|
+
async load() {
|
|
58
|
+
try {
|
|
59
|
+
const content = await readFile(this.path, 'utf8');
|
|
60
|
+
Object.assign(this.state, JSON.parse(content));
|
|
61
|
+
}
|
|
62
|
+
catch {
|
|
63
|
+
this.state.steps = {};
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
/**
|
|
67
|
+
* Persists current state to state.json.
|
|
68
|
+
*/
|
|
69
|
+
async save() {
|
|
70
|
+
await writeFile(this.path, JSON.stringify(this.state, null, 2), 'utf8');
|
|
71
|
+
}
|
|
72
|
+
/**
|
|
73
|
+
* Retrieves cached state for a step.
|
|
74
|
+
* @param stepId - Step identifier
|
|
75
|
+
* @returns Cached state if available, undefined otherwise
|
|
76
|
+
*/
|
|
77
|
+
getStep(stepId) {
|
|
78
|
+
return this.state.steps[stepId];
|
|
79
|
+
}
|
|
80
|
+
/**
|
|
81
|
+
* Updates cached state for a step.
|
|
82
|
+
* @param stepId - Step identifier
|
|
83
|
+
* @param artifactId - Artifact produced by the step
|
|
84
|
+
* @param fingerprint - Step configuration fingerprint
|
|
85
|
+
*/
|
|
86
|
+
setStep(stepId, artifactId, fingerprint) {
|
|
87
|
+
this.state.steps[stepId] = { artifactId, fingerprint };
|
|
88
|
+
}
|
|
89
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
import process from 'node:process';
|
|
2
|
+
import { execa } from 'execa';
|
|
3
|
+
import { createInterface } from 'node:readline';
|
|
4
|
+
import { ContainerExecutor } from './executor.js';
|
|
5
|
+
/**
|
|
6
|
+
* Build a minimal environment for the Docker CLI process.
|
|
7
|
+
* Only PATH, HOME, and DOCKER_* are kept — everything else is stripped
|
|
8
|
+
* so that host secrets (API keys, tokens, credentials) never leak,
|
|
9
|
+
* even if a `-e KEY` (without value) were accidentally added.
|
|
10
|
+
*/
|
|
11
|
+
function dockerCliEnv() {
|
|
12
|
+
const env = {};
|
|
13
|
+
for (const [key, value] of Object.entries(process.env)) {
|
|
14
|
+
if (value !== undefined && (key === 'PATH' || key === 'HOME' || key.startsWith('DOCKER_'))) {
|
|
15
|
+
env[key] = value;
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
return env;
|
|
19
|
+
}
|
|
20
|
+
export class DockerCliExecutor extends ContainerExecutor {
|
|
21
|
+
env = dockerCliEnv();
|
|
22
|
+
async check() {
|
|
23
|
+
try {
|
|
24
|
+
await execa('docker', ['--version'], { env: this.env });
|
|
25
|
+
}
|
|
26
|
+
catch {
|
|
27
|
+
throw new Error('Docker CLI not found. Please install Docker.');
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
async run(workspace, request, onLogLine) {
|
|
31
|
+
const startedAt = new Date();
|
|
32
|
+
const args = ['run', '--name', request.name, '--network', request.network];
|
|
33
|
+
if (request.env) {
|
|
34
|
+
for (const [key, value] of Object.entries(request.env)) {
|
|
35
|
+
args.push('-e', `${key}=${value}`);
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
// Mount inputs (committed artifacts, read-only)
|
|
39
|
+
for (const input of request.inputs) {
|
|
40
|
+
const hostPath = workspace.artifactPath(input.artifactId);
|
|
41
|
+
args.push('-v', `${hostPath}:${input.containerPath}:ro`);
|
|
42
|
+
}
|
|
43
|
+
// Mount caches (persistent, read-write)
|
|
44
|
+
if (request.caches) {
|
|
45
|
+
for (const cache of request.caches) {
|
|
46
|
+
const hostPath = workspace.cachePath(cache.name);
|
|
47
|
+
args.push('-v', `${hostPath}:${cache.containerPath}:rw`);
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
// Mount host bind mounts (always read-only)
|
|
51
|
+
if (request.mounts) {
|
|
52
|
+
for (const mount of request.mounts) {
|
|
53
|
+
args.push('-v', `${mount.hostPath}:${mount.containerPath}:ro`);
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
// Mount output (staging artifact, read-write)
|
|
57
|
+
const outputHostPath = workspace.stagingPath(request.output.stagingArtifactId);
|
|
58
|
+
args.push('-v', `${outputHostPath}:${request.output.containerPath}:rw`, request.image, ...request.cmd);
|
|
59
|
+
let exitCode = 0;
|
|
60
|
+
let error;
|
|
61
|
+
try {
|
|
62
|
+
const proc = execa('docker', args, {
|
|
63
|
+
env: this.env,
|
|
64
|
+
reject: false,
|
|
65
|
+
timeout: request.timeoutSec ? request.timeoutSec * 1000 : undefined
|
|
66
|
+
});
|
|
67
|
+
if (proc.stdout) {
|
|
68
|
+
const rl = createInterface({ input: proc.stdout });
|
|
69
|
+
rl.on('line', line => {
|
|
70
|
+
onLogLine({ stream: 'stdout', line });
|
|
71
|
+
});
|
|
72
|
+
}
|
|
73
|
+
if (proc.stderr) {
|
|
74
|
+
const rl = createInterface({ input: proc.stderr });
|
|
75
|
+
rl.on('line', line => {
|
|
76
|
+
onLogLine({ stream: 'stderr', line });
|
|
77
|
+
});
|
|
78
|
+
}
|
|
79
|
+
const result = await proc;
|
|
80
|
+
exitCode = result.exitCode ?? 0;
|
|
81
|
+
}
|
|
82
|
+
catch (error_) {
|
|
83
|
+
exitCode = 1;
|
|
84
|
+
error = error_ instanceof Error ? error_.message : String(error_);
|
|
85
|
+
}
|
|
86
|
+
finally {
|
|
87
|
+
try {
|
|
88
|
+
await execa('docker', ['rm', '-f', request.name], { env: this.env, reject: false });
|
|
89
|
+
}
|
|
90
|
+
catch {
|
|
91
|
+
// Best effort cleanup
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
return { exitCode, startedAt, finishedAt: new Date(), error };
|
|
95
|
+
}
|
|
96
|
+
}
|
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
import { execa } from 'execa';
|
|
2
|
+
import { createInterface } from 'node:readline';
|
|
3
|
+
import { ContainerRuntime } from './runtime.js';
|
|
4
|
+
export class DockerCliRuntime extends ContainerRuntime {
|
|
5
|
+
async check() {
|
|
6
|
+
try {
|
|
7
|
+
await execa('docker', ['--version']);
|
|
8
|
+
}
|
|
9
|
+
catch {
|
|
10
|
+
throw new Error('Docker CLI not found. Please install Docker.');
|
|
11
|
+
}
|
|
12
|
+
}
|
|
13
|
+
async run(workspace, request, onLogLine) {
|
|
14
|
+
const startedAt = new Date();
|
|
15
|
+
const args = ['run', '--name', request.name, '--network', request.network];
|
|
16
|
+
if (request.env) {
|
|
17
|
+
for (const [key, value] of Object.entries(request.env)) {
|
|
18
|
+
args.push('-e', `${key}=${value}`);
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
// Mount inputs (committed artifacts, read-only)
|
|
22
|
+
for (const input of request.inputs) {
|
|
23
|
+
const hostPath = workspace.artifactPath(input.artifactId);
|
|
24
|
+
args.push('-v', `${hostPath}:${input.containerPath}:ro`);
|
|
25
|
+
}
|
|
26
|
+
// Mount output (staging artifact, read-write)
|
|
27
|
+
const outputHostPath = workspace.stagingPath(request.output.stagingArtifactId);
|
|
28
|
+
args.push('-v', `${outputHostPath}:${request.output.containerPath}:rw`, request.image, ...request.cmd);
|
|
29
|
+
let exitCode = 0;
|
|
30
|
+
let error;
|
|
31
|
+
try {
|
|
32
|
+
const proc = execa('docker', args, {
|
|
33
|
+
reject: false,
|
|
34
|
+
timeout: request.timeoutSec ? request.timeoutSec * 1000 : undefined
|
|
35
|
+
});
|
|
36
|
+
if (proc.stdout) {
|
|
37
|
+
const rl = createInterface({ input: proc.stdout });
|
|
38
|
+
rl.on('line', line => {
|
|
39
|
+
onLogLine({ stream: 'stdout', line });
|
|
40
|
+
});
|
|
41
|
+
}
|
|
42
|
+
if (proc.stderr) {
|
|
43
|
+
const rl = createInterface({ input: proc.stderr });
|
|
44
|
+
rl.on('line', line => {
|
|
45
|
+
onLogLine({ stream: 'stderr', line });
|
|
46
|
+
});
|
|
47
|
+
}
|
|
48
|
+
const result = await proc;
|
|
49
|
+
exitCode = result.exitCode ?? 0;
|
|
50
|
+
}
|
|
51
|
+
catch (error_) {
|
|
52
|
+
exitCode = 1;
|
|
53
|
+
error = error_ instanceof Error ? error_.message : String(error_);
|
|
54
|
+
}
|
|
55
|
+
finally {
|
|
56
|
+
try {
|
|
57
|
+
await execa('docker', ['rm', '-f', request.name], { reject: false });
|
|
58
|
+
}
|
|
59
|
+
catch {
|
|
60
|
+
// Best effort cleanup
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
return { exitCode, startedAt, finishedAt: new Date(), error };
|
|
64
|
+
}
|
|
65
|
+
}
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Abstract interface for executing containers.
|
|
3
|
+
*
|
|
4
|
+
* Implementations:
|
|
5
|
+
* - `DockerCliExecutor`: Uses Docker CLI
|
|
6
|
+
* - Future: PodmanExecutor, KubernetesExecutor, etc.
|
|
7
|
+
*
|
|
8
|
+
* The executor is responsible for:
|
|
9
|
+
* - Running containers with specified configuration
|
|
10
|
+
* - Mounting input artifacts as read-only volumes
|
|
11
|
+
* - Mounting output staging directory as read-write volume
|
|
12
|
+
* - Streaming logs in real-time
|
|
13
|
+
* - Cleaning up containers after execution
|
|
14
|
+
*/
|
|
15
|
+
export class ContainerExecutor {
|
|
16
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,264 @@
|
|
|
1
|
+
import { access, mkdir, readdir, rename, rm } from 'node:fs/promises';
|
|
2
|
+
import { randomUUID } from 'node:crypto';
|
|
3
|
+
import { join } from 'node:path';
|
|
4
|
+
/**
|
|
5
|
+
* Isolated execution environment for container runs.
|
|
6
|
+
*
|
|
7
|
+
* A workspace provides:
|
|
8
|
+
* - **staging/**: Temporary write location during execution
|
|
9
|
+
* - **artifacts/**: Committed outputs (immutable, read-only)
|
|
10
|
+
* - **caches/**: Persistent read-write caches (shared across steps)
|
|
11
|
+
* - **state.json**: Managed by orchestration layer (e.g., CLI) for caching
|
|
12
|
+
*
|
|
13
|
+
* ## Artifact Lifecycle
|
|
14
|
+
*
|
|
15
|
+
* 1. `prepareArtifact()` creates `staging/{artifactId}/`
|
|
16
|
+
* 2. Container writes to `staging/{artifactId}/` (mounted as `/output`)
|
|
17
|
+
* 3. Success: `commitArtifact()` atomically moves to `artifacts/{artifactId}/`
|
|
18
|
+
* OR Failure: `discardArtifact()` deletes `staging/{artifactId}/`
|
|
19
|
+
*
|
|
20
|
+
* Artifacts are immutable once committed.
|
|
21
|
+
*
|
|
22
|
+
* ## Cache Lifecycle
|
|
23
|
+
*
|
|
24
|
+
* 1. `prepareCache()` creates `caches/{name}/` if it doesn't exist
|
|
25
|
+
* 2. Container writes/reads from `caches/{name}/` during execution
|
|
26
|
+
* 3. Cache persists across executions (never deleted automatically)
|
|
27
|
+
*
|
|
28
|
+
* Caches are mutable and shared between steps.
|
|
29
|
+
*
|
|
30
|
+
* @example
|
|
31
|
+
* ```typescript
|
|
32
|
+
* const ws = await Workspace.create('/tmp/workdir', 'my-workspace')
|
|
33
|
+
* const artifactId = ws.generateArtifactId()
|
|
34
|
+
* await ws.prepareArtifact(artifactId)
|
|
35
|
+
* await ws.prepareCache('pnpm-store')
|
|
36
|
+
* // ... container execution ...
|
|
37
|
+
* await ws.commitArtifact(artifactId) // On success
|
|
38
|
+
* // OR await ws.discardArtifact(artifactId) // On failure
|
|
39
|
+
* ```
|
|
40
|
+
*/
|
|
41
|
+
export class Workspace {
|
|
42
|
+
id;
|
|
43
|
+
root;
|
|
44
|
+
/**
|
|
45
|
+
* Generates a unique workspace identifier.
|
|
46
|
+
* @returns Workspace ID in format: `{timestamp}-{uuid-prefix}`
|
|
47
|
+
*/
|
|
48
|
+
static generateWorkspaceId() {
|
|
49
|
+
return Workspace.generateId();
|
|
50
|
+
}
|
|
51
|
+
/**
|
|
52
|
+
* Creates a new workspace with staging, artifacts, and caches directories.
|
|
53
|
+
* @param workdirRoot - Root directory for all workspaces
|
|
54
|
+
* @param id - Optional workspace ID (auto-generated if omitted)
|
|
55
|
+
* @returns Newly created workspace
|
|
56
|
+
*/
|
|
57
|
+
static async create(workdirRoot, id) {
|
|
58
|
+
const workspaceId = id ?? Workspace.generateWorkspaceId();
|
|
59
|
+
const root = join(workdirRoot, workspaceId);
|
|
60
|
+
await mkdir(join(root, 'staging'), { recursive: true });
|
|
61
|
+
await mkdir(join(root, 'artifacts'), { recursive: true });
|
|
62
|
+
await mkdir(join(root, 'caches'), { recursive: true });
|
|
63
|
+
return new Workspace(workspaceId, root);
|
|
64
|
+
}
|
|
65
|
+
/**
|
|
66
|
+
* Opens an existing workspace.
|
|
67
|
+
* @param workdirRoot - Root directory for all workspaces
|
|
68
|
+
* @param id - Workspace ID
|
|
69
|
+
* @returns Existing workspace
|
|
70
|
+
* @throws If workspace does not exist
|
|
71
|
+
*/
|
|
72
|
+
static async open(workdirRoot, id) {
|
|
73
|
+
const root = join(workdirRoot, id);
|
|
74
|
+
await access(root);
|
|
75
|
+
return new Workspace(id, root);
|
|
76
|
+
}
|
|
77
|
+
/**
|
|
78
|
+
* Lists all workspace IDs under the given root directory.
|
|
79
|
+
* @param workdirRoot - Root directory for all workspaces
|
|
80
|
+
* @returns Sorted array of workspace names (directories)
|
|
81
|
+
*/
|
|
82
|
+
static async list(workdirRoot) {
|
|
83
|
+
try {
|
|
84
|
+
const entries = await readdir(workdirRoot, { withFileTypes: true });
|
|
85
|
+
return entries.filter(e => e.isDirectory()).map(e => e.name).sort();
|
|
86
|
+
}
|
|
87
|
+
catch {
|
|
88
|
+
return [];
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
/**
|
|
92
|
+
* Removes a workspace directory.
|
|
93
|
+
* @param workdirRoot - Root directory for all workspaces
|
|
94
|
+
* @param id - Workspace ID to remove
|
|
95
|
+
* @throws If the workspace ID is invalid
|
|
96
|
+
*/
|
|
97
|
+
static async remove(workdirRoot, id) {
|
|
98
|
+
if (id.includes('..') || id.includes('/')) {
|
|
99
|
+
throw new Error(`Invalid workspace ID: ${id}`);
|
|
100
|
+
}
|
|
101
|
+
await rm(join(workdirRoot, id), { recursive: true, force: true });
|
|
102
|
+
}
|
|
103
|
+
/**
|
|
104
|
+
* Generates a unique identifier using timestamp and UUID.
|
|
105
|
+
* @returns Unique ID in format: `{timestamp}-{uuid-prefix}`
|
|
106
|
+
* @internal
|
|
107
|
+
*/
|
|
108
|
+
static generateId() {
|
|
109
|
+
return `${Date.now()}-${randomUUID().slice(0, 8)}`;
|
|
110
|
+
}
|
|
111
|
+
constructor(id, root) {
|
|
112
|
+
this.id = id;
|
|
113
|
+
this.root = root;
|
|
114
|
+
}
|
|
115
|
+
/**
|
|
116
|
+
* Generates a unique artifact identifier.
|
|
117
|
+
* @returns Artifact ID in format: `{timestamp}-{uuid-prefix}`
|
|
118
|
+
*/
|
|
119
|
+
generateArtifactId() {
|
|
120
|
+
return Workspace.generateId();
|
|
121
|
+
}
|
|
122
|
+
/**
|
|
123
|
+
* Returns the staging directory path for an artifact.
|
|
124
|
+
* Staging is used for temporary writes during execution.
|
|
125
|
+
* @param artifactId - Artifact identifier
|
|
126
|
+
* @returns Absolute path to staging directory
|
|
127
|
+
* @throws If artifact ID is invalid
|
|
128
|
+
*/
|
|
129
|
+
stagingPath(artifactId) {
|
|
130
|
+
this.validateArtifactId(artifactId);
|
|
131
|
+
return join(this.root, 'staging', artifactId);
|
|
132
|
+
}
|
|
133
|
+
/**
|
|
134
|
+
* Returns the committed artifact directory path.
|
|
135
|
+
* Artifacts are immutable once committed.
|
|
136
|
+
* @param artifactId - Artifact identifier
|
|
137
|
+
* @returns Absolute path to artifact directory
|
|
138
|
+
* @throws If artifact ID is invalid
|
|
139
|
+
*/
|
|
140
|
+
artifactPath(artifactId) {
|
|
141
|
+
this.validateArtifactId(artifactId);
|
|
142
|
+
return join(this.root, 'artifacts', artifactId);
|
|
143
|
+
}
|
|
144
|
+
/**
|
|
145
|
+
* Prepares a staging directory for a new artifact.
|
|
146
|
+
* @param artifactId - Artifact identifier
|
|
147
|
+
* @returns Absolute path to the created staging directory
|
|
148
|
+
*/
|
|
149
|
+
async prepareArtifact(artifactId) {
|
|
150
|
+
const path = this.stagingPath(artifactId);
|
|
151
|
+
await mkdir(path, { recursive: true });
|
|
152
|
+
return path;
|
|
153
|
+
}
|
|
154
|
+
/**
|
|
155
|
+
* Commits a staging artifact to the artifacts directory.
|
|
156
|
+
* Uses atomic rename operation for consistency.
|
|
157
|
+
* @param artifactId - Artifact identifier
|
|
158
|
+
*/
|
|
159
|
+
async commitArtifact(artifactId) {
|
|
160
|
+
await rename(this.stagingPath(artifactId), this.artifactPath(artifactId));
|
|
161
|
+
}
|
|
162
|
+
/**
|
|
163
|
+
* Discards a staging artifact (on execution failure).
|
|
164
|
+
* @param artifactId - Artifact identifier
|
|
165
|
+
*/
|
|
166
|
+
async discardArtifact(artifactId) {
|
|
167
|
+
await rm(this.stagingPath(artifactId), { recursive: true, force: true });
|
|
168
|
+
}
|
|
169
|
+
/**
|
|
170
|
+
* Removes all staging directories.
|
|
171
|
+
* Should be called on workspace initialization to clean up incomplete artifacts.
|
|
172
|
+
*/
|
|
173
|
+
async cleanupStaging() {
|
|
174
|
+
const stagingDir = join(this.root, 'staging');
|
|
175
|
+
try {
|
|
176
|
+
const entries = await readdir(stagingDir, { withFileTypes: true });
|
|
177
|
+
for (const entry of entries) {
|
|
178
|
+
if (entry.isDirectory()) {
|
|
179
|
+
await rm(join(stagingDir, entry.name), { recursive: true, force: true });
|
|
180
|
+
}
|
|
181
|
+
}
|
|
182
|
+
}
|
|
183
|
+
catch {
|
|
184
|
+
// Staging directory doesn't exist yet
|
|
185
|
+
}
|
|
186
|
+
}
|
|
187
|
+
/**
|
|
188
|
+
* Lists all committed artifact IDs in this workspace.
|
|
189
|
+
* @returns Array of artifact IDs (directory names in artifacts/)
|
|
190
|
+
*/
|
|
191
|
+
async listArtifacts() {
|
|
192
|
+
try {
|
|
193
|
+
const entries = await readdir(join(this.root, 'artifacts'), { withFileTypes: true });
|
|
194
|
+
return entries.filter(e => e.isDirectory()).map(e => e.name);
|
|
195
|
+
}
|
|
196
|
+
catch {
|
|
197
|
+
return [];
|
|
198
|
+
}
|
|
199
|
+
}
|
|
200
|
+
/**
|
|
201
|
+
* Returns the cache directory path.
|
|
202
|
+
* Caches are persistent read-write directories shared across steps.
|
|
203
|
+
* @param cacheName - Cache identifier (e.g., "pnpm-store")
|
|
204
|
+
* @returns Absolute path to cache directory
|
|
205
|
+
* @throws If cache name is invalid
|
|
206
|
+
*/
|
|
207
|
+
cachePath(cacheName) {
|
|
208
|
+
this.validateCacheName(cacheName);
|
|
209
|
+
return join(this.root, 'caches', cacheName);
|
|
210
|
+
}
|
|
211
|
+
/**
|
|
212
|
+
* Prepares a cache directory.
|
|
213
|
+
* Creates the directory if it doesn't exist.
|
|
214
|
+
* @param cacheName - Cache identifier
|
|
215
|
+
* @returns Absolute path to the cache directory
|
|
216
|
+
*/
|
|
217
|
+
async prepareCache(cacheName) {
|
|
218
|
+
const path = this.cachePath(cacheName);
|
|
219
|
+
await mkdir(path, { recursive: true });
|
|
220
|
+
return path;
|
|
221
|
+
}
|
|
222
|
+
/**
|
|
223
|
+
* Lists all cache directories in the workspace.
|
|
224
|
+
* @returns Array of cache names
|
|
225
|
+
*/
|
|
226
|
+
async listCaches() {
|
|
227
|
+
try {
|
|
228
|
+
const entries = await readdir(join(this.root, 'caches'), { withFileTypes: true });
|
|
229
|
+
return entries.filter(e => e.isDirectory()).map(e => e.name);
|
|
230
|
+
}
|
|
231
|
+
catch {
|
|
232
|
+
return [];
|
|
233
|
+
}
|
|
234
|
+
}
|
|
235
|
+
/**
|
|
236
|
+
* Validates an artifact ID to prevent path traversal attacks.
|
|
237
|
+
* @param id - Artifact identifier to validate
|
|
238
|
+
* @throws If the artifact ID contains invalid characters or path traversal attempts
|
|
239
|
+
* @internal
|
|
240
|
+
*/
|
|
241
|
+
validateArtifactId(id) {
|
|
242
|
+
if (!/^[\w-]+$/.test(id)) {
|
|
243
|
+
throw new Error(`Invalid artifact ID: ${id}. Must contain only alphanumeric characters, dashes, and underscores.`);
|
|
244
|
+
}
|
|
245
|
+
if (id.includes('..')) {
|
|
246
|
+
throw new Error(`Invalid artifact ID: ${id}. Path traversal is not allowed.`);
|
|
247
|
+
}
|
|
248
|
+
}
|
|
249
|
+
/**
|
|
250
|
+
* Validates a cache name to prevent path traversal attacks.
|
|
251
|
+
* Same rules as artifact IDs: alphanumeric, dashes, underscores only.
|
|
252
|
+
* @param name - Cache name to validate
|
|
253
|
+
* @throws If the cache name contains invalid characters or path traversal attempts
|
|
254
|
+
* @internal
|
|
255
|
+
*/
|
|
256
|
+
validateCacheName(name) {
|
|
257
|
+
if (!/^[\w-]+$/.test(name)) {
|
|
258
|
+
throw new Error(`Invalid cache name: ${name}. Must contain only alphanumeric characters, dashes, and underscores.`);
|
|
259
|
+
}
|
|
260
|
+
if (name.includes('..')) {
|
|
261
|
+
throw new Error(`Invalid cache name: ${name}. Path traversal is not allowed.`);
|
|
262
|
+
}
|
|
263
|
+
}
|
|
264
|
+
}
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Engine layer exports for programmatic use.
|
|
3
|
+
*
|
|
4
|
+
* The engine provides low-level primitives for container execution
|
|
5
|
+
* and artifact management, decoupled from pipeline orchestration.
|
|
6
|
+
*
|
|
7
|
+
* For CLI usage, see src/cli/index.ts
|
|
8
|
+
*
|
|
9
|
+
* @example
|
|
10
|
+
* ```typescript
|
|
11
|
+
* import {Workspace, DockerCliExecutor} from '@livingdata/pipex'
|
|
12
|
+
*
|
|
13
|
+
* const ws = await Workspace.create('/tmp/workdir', 'my-workspace')
|
|
14
|
+
* const executor = new DockerCliExecutor()
|
|
15
|
+
* await executor.check()
|
|
16
|
+
*
|
|
17
|
+
* // Prepare cache
|
|
18
|
+
* await ws.prepareCache('pnpm-store')
|
|
19
|
+
*
|
|
20
|
+
* const artifactId = ws.generateArtifactId()
|
|
21
|
+
* await ws.prepareArtifact(artifactId)
|
|
22
|
+
*
|
|
23
|
+
* await executor.run(ws, {
|
|
24
|
+
* name: 'my-container',
|
|
25
|
+
* image: 'node:20-alpine',
|
|
26
|
+
* cmd: ['pnpm', 'install'],
|
|
27
|
+
* inputs: [],
|
|
28
|
+
* output: {stagingArtifactId: artifactId, containerPath: '/output'},
|
|
29
|
+
* caches: [{
|
|
30
|
+
* name: 'pnpm-store',
|
|
31
|
+
* containerPath: '/root/.local/share/pnpm/store'
|
|
32
|
+
* }],
|
|
33
|
+
* network: 'none'
|
|
34
|
+
* }, (log) => console.log(log))
|
|
35
|
+
*
|
|
36
|
+
* await ws.commitArtifact(artifactId)
|
|
37
|
+
* ```
|
|
38
|
+
*/
|
|
39
|
+
// Export engine layer for programmatic use
|
|
40
|
+
export { Workspace, ContainerExecutor, DockerCliExecutor } from './engine/index.js';
|
package/dist/reporter.js
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
import pino from 'pino';
|
|
2
|
+
export class ConsoleReporter {
|
|
3
|
+
logger = pino({ level: 'info' });
|
|
4
|
+
state(workspaceId, event, stepId, meta) {
|
|
5
|
+
this.logger.info({ workspaceId, event, stepId, ...meta });
|
|
6
|
+
}
|
|
7
|
+
log(workspaceId, stepId, stream, line) {
|
|
8
|
+
this.logger.info({ workspaceId, stepId, stream, line });
|
|
9
|
+
}
|
|
10
|
+
result(workspaceId, stepId, result) {
|
|
11
|
+
this.logger.info({ workspaceId, stepId, result });
|
|
12
|
+
}
|
|
13
|
+
}
|