@livingdata/pipex 0.0.8 → 0.0.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (90) hide show
  1. package/README.md +186 -16
  2. package/dist/__tests__/errors.js +162 -0
  3. package/dist/__tests__/helpers.js +41 -0
  4. package/dist/__tests__/types.js +8 -0
  5. package/dist/cli/__tests__/condition.js +23 -0
  6. package/dist/cli/__tests__/dag.js +154 -0
  7. package/dist/cli/__tests__/pipeline-loader.js +267 -0
  8. package/dist/cli/__tests__/pipeline-runner.js +257 -0
  9. package/dist/cli/__tests__/state-persistence.js +80 -0
  10. package/dist/cli/__tests__/state.js +58 -0
  11. package/dist/cli/__tests__/step-runner.js +116 -0
  12. package/dist/cli/commands/bundle.js +35 -0
  13. package/dist/cli/commands/cat.js +54 -0
  14. package/dist/cli/commands/clean.js +22 -0
  15. package/dist/cli/commands/exec.js +89 -0
  16. package/dist/cli/commands/export.js +32 -0
  17. package/dist/cli/commands/inspect.js +58 -0
  18. package/dist/cli/commands/list.js +39 -0
  19. package/dist/cli/commands/logs.js +54 -0
  20. package/dist/cli/commands/prune.js +26 -0
  21. package/dist/cli/commands/rm-step.js +41 -0
  22. package/dist/cli/commands/rm.js +27 -0
  23. package/dist/cli/commands/run-bundle.js +59 -0
  24. package/dist/cli/commands/run.js +44 -0
  25. package/dist/cli/commands/show.js +108 -0
  26. package/dist/cli/condition.js +11 -0
  27. package/dist/cli/dag.js +143 -0
  28. package/dist/cli/index.js +24 -105
  29. package/dist/cli/interactive-reporter.js +227 -0
  30. package/dist/cli/pipeline-loader.js +10 -110
  31. package/dist/cli/pipeline-runner.js +256 -111
  32. package/dist/cli/reporter.js +2 -107
  33. package/dist/cli/state.js +30 -9
  34. package/dist/cli/step-loader.js +25 -0
  35. package/dist/cli/step-resolver.js +111 -0
  36. package/dist/cli/step-runner.js +226 -0
  37. package/dist/cli/utils.js +3 -0
  38. package/dist/core/__tests__/bundle.js +663 -0
  39. package/dist/core/__tests__/condition.js +23 -0
  40. package/dist/core/__tests__/dag.js +154 -0
  41. package/dist/core/__tests__/env-file.test.js +41 -0
  42. package/dist/core/__tests__/event-aggregator.js +244 -0
  43. package/dist/core/__tests__/pipeline-loader.js +267 -0
  44. package/dist/core/__tests__/pipeline-runner.js +257 -0
  45. package/dist/core/__tests__/state-persistence.js +80 -0
  46. package/dist/core/__tests__/state.js +58 -0
  47. package/dist/core/__tests__/step-runner.js +118 -0
  48. package/dist/core/__tests__/stream-reporter.js +142 -0
  49. package/dist/core/__tests__/transport.js +50 -0
  50. package/dist/core/__tests__/utils.js +40 -0
  51. package/dist/core/bundle.js +130 -0
  52. package/dist/core/condition.js +11 -0
  53. package/dist/core/dag.js +143 -0
  54. package/dist/core/env-file.js +6 -0
  55. package/dist/core/event-aggregator.js +114 -0
  56. package/dist/core/index.js +14 -0
  57. package/dist/core/pipeline-loader.js +81 -0
  58. package/dist/core/pipeline-runner.js +360 -0
  59. package/dist/core/reporter.js +11 -0
  60. package/dist/core/state.js +110 -0
  61. package/dist/core/step-loader.js +25 -0
  62. package/dist/core/step-resolver.js +117 -0
  63. package/dist/core/step-runner.js +225 -0
  64. package/dist/core/stream-reporter.js +41 -0
  65. package/dist/core/transport.js +9 -0
  66. package/dist/core/utils.js +56 -0
  67. package/dist/engine/__tests__/workspace.js +288 -0
  68. package/dist/engine/docker-executor.js +32 -6
  69. package/dist/engine/index.js +1 -0
  70. package/dist/engine/workspace.js +164 -66
  71. package/dist/errors.js +122 -0
  72. package/dist/index.js +3 -0
  73. package/dist/kits/__tests__/index.js +23 -0
  74. package/dist/kits/builtin/__tests__/node.js +74 -0
  75. package/dist/kits/builtin/__tests__/python.js +67 -0
  76. package/dist/kits/builtin/__tests__/shell.js +74 -0
  77. package/dist/kits/builtin/node.js +10 -5
  78. package/dist/kits/builtin/python.js +10 -5
  79. package/dist/kits/builtin/shell.js +2 -1
  80. package/dist/kits/index.js +2 -1
  81. package/package.json +6 -3
  82. package/dist/cli/types.js +0 -3
  83. package/dist/engine/docker-runtime.js +0 -65
  84. package/dist/engine/runtime.js +0 -2
  85. package/dist/kits/bash.js +0 -19
  86. package/dist/kits/builtin/bash.js +0 -19
  87. package/dist/kits/node.js +0 -56
  88. package/dist/kits/python.js +0 -51
  89. package/dist/kits/types.js +0 -1
  90. package/dist/reporter.js +0 -13
@@ -0,0 +1,118 @@
1
+ import { randomUUID } from 'node:crypto';
2
+ import { readFile } from 'node:fs/promises';
3
+ import { join } from 'node:path';
4
+ import test from 'ava';
5
+ import { Workspace } from '../../engine/workspace.js';
6
+ import { DockerCliExecutor } from '../../engine/docker-executor.js';
7
+ import { ContainerCrashError } from '../../errors.js';
8
+ import { StateManager } from '../state.js';
9
+ import { StepRunner } from '../step-runner.js';
10
+ import { createTmpDir, isDockerAvailable, noopReporter, recordingReporter } from '../../__tests__/helpers.js';
11
+ const hasDocker = isDockerAvailable();
12
+ const dockerTest = hasDocker ? test : test.skip;
13
+ // -- helpers -----------------------------------------------------------------
14
+ function makeStep(overrides) {
15
+ return {
16
+ image: 'alpine:3.20',
17
+ cmd: ['sh', '-c', 'echo hello'],
18
+ ...overrides
19
+ };
20
+ }
21
+ async function setupWorkspace() {
22
+ const tmpDir = await createTmpDir();
23
+ const workspace = await Workspace.create(tmpDir, 'test-ws');
24
+ const state = new StateManager(workspace.root);
25
+ await state.load();
26
+ const job = { workspaceId: workspace.id, jobId: randomUUID() };
27
+ return { workspace, state, tmpDir, job };
28
+ }
29
+ // -- minimal execution -------------------------------------------------------
30
+ dockerTest('minimal step writes artifact and returns exitCode 0', async (t) => {
31
+ const { workspace, state, job } = await setupWorkspace();
32
+ const runner = new StepRunner(new DockerCliExecutor(), noopReporter);
33
+ const step = makeStep({ id: 'greet', cmd: ['sh', '-c', 'echo hello > /output/greeting.txt'] });
34
+ const result = await runner.run({ workspace, state, step, inputs: new Map(), pipelineRoot: '/', job });
35
+ t.is(result.exitCode, 0);
36
+ t.truthy(result.runId);
37
+ // Artifact exists in committed run
38
+ const content = await readFile(join(workspace.runArtifactsPath(result.runId), 'greeting.txt'), 'utf8');
39
+ t.is(content.trim(), 'hello');
40
+ });
41
+ dockerTest('meta.json exists with correct fields', async (t) => {
42
+ const { workspace, state, job } = await setupWorkspace();
43
+ const runner = new StepRunner(new DockerCliExecutor(), noopReporter);
44
+ const step = makeStep({ id: 'meta-test', cmd: ['sh', '-c', 'echo ok > /output/out.txt'] });
45
+ const result = await runner.run({ workspace, state, step, inputs: new Map(), pipelineRoot: '/', job });
46
+ const metaPath = join(workspace.runPath(result.runId), 'meta.json');
47
+ const meta = JSON.parse(await readFile(metaPath, 'utf8'));
48
+ t.is(meta.runId, result.runId);
49
+ t.is(meta.exitCode, 0);
50
+ t.is(meta.image, 'alpine:3.20');
51
+ t.deepEqual(meta.cmd, ['sh', '-c', 'echo ok > /output/out.txt']);
52
+ });
53
+ // -- log capture -------------------------------------------------------------
54
+ dockerTest('stdout and stderr are captured to log files', async (t) => {
55
+ const { workspace, state, job } = await setupWorkspace();
56
+ const runner = new StepRunner(new DockerCliExecutor(), noopReporter);
57
+ const step = makeStep({ id: 'logs', cmd: ['sh', '-c', 'echo out-line && echo err-line >&2'] });
58
+ const result = await runner.run({ workspace, state, step, inputs: new Map(), pipelineRoot: '/', job });
59
+ const stdout = await readFile(join(workspace.runPath(result.runId), 'stdout.log'), 'utf8');
60
+ const stderr = await readFile(join(workspace.runPath(result.runId), 'stderr.log'), 'utf8');
61
+ t.true(stdout.includes('out-line'));
62
+ t.true(stderr.includes('err-line'));
63
+ });
64
+ // -- cache hit ---------------------------------------------------------------
65
+ dockerTest('second run of same step is cached (STEP_SKIPPED)', async (t) => {
66
+ const { workspace, state, job } = await setupWorkspace();
67
+ const { reporter, events } = recordingReporter();
68
+ const runner = new StepRunner(new DockerCliExecutor(), reporter);
69
+ const step = makeStep({ id: 'cached', cmd: ['sh', '-c', 'echo hi > /output/x.txt'] });
70
+ const first = await runner.run({ workspace, state, step, inputs: new Map(), pipelineRoot: '/', job });
71
+ const second = await runner.run({ workspace, state, step, inputs: new Map(), pipelineRoot: '/', job });
72
+ t.is(first.runId, second.runId);
73
+ const skipped = events.find(e => e.event === 'STEP_SKIPPED');
74
+ t.truthy(skipped);
75
+ t.is(skipped.reason, 'cached');
76
+ });
77
+ // -- force -------------------------------------------------------------------
78
+ dockerTest('force: true produces new runId', async (t) => {
79
+ const { workspace, state, job } = await setupWorkspace();
80
+ const runner = new StepRunner(new DockerCliExecutor(), noopReporter);
81
+ const step = makeStep({ id: 'force-test', cmd: ['sh', '-c', 'echo data > /output/f.txt'] });
82
+ const first = await runner.run({ workspace, state, step, inputs: new Map(), pipelineRoot: '/', job });
83
+ const second = await runner.run({ workspace, state, step, inputs: new Map(), pipelineRoot: '/', force: true, job });
84
+ t.not(first.runId, second.runId);
85
+ t.is(second.exitCode, 0);
86
+ });
87
+ // -- ephemeral ---------------------------------------------------------------
88
+ dockerTest('ephemeral: true returns exitCode but no runId', async (t) => {
89
+ const { workspace, state, job } = await setupWorkspace();
90
+ const runner = new StepRunner(new DockerCliExecutor(), noopReporter);
91
+ const step = makeStep({ id: 'ephemeral', cmd: ['sh', '-c', 'echo temp > /output/t.txt'] });
92
+ const result = await runner.run({ workspace, state, step, inputs: new Map(), pipelineRoot: '/', ephemeral: true, job });
93
+ t.is(result.exitCode, 0);
94
+ t.is(result.runId, undefined);
95
+ // No run committed
96
+ const runs = await workspace.listRuns();
97
+ t.is(runs.length, 0);
98
+ });
99
+ // -- failure -----------------------------------------------------------------
100
+ dockerTest('non-zero exit throws ContainerCrashError', async (t) => {
101
+ const { workspace, state, job } = await setupWorkspace();
102
+ const runner = new StepRunner(new DockerCliExecutor(), noopReporter);
103
+ const step = makeStep({ id: 'fail', cmd: ['sh', '-c', 'exit 1'] });
104
+ const error = await t.throwsAsync(async () => runner.run({ workspace, state, step, inputs: new Map(), pipelineRoot: '/', job }));
105
+ t.true(error instanceof ContainerCrashError);
106
+ });
107
+ // -- allowFailure ------------------------------------------------------------
108
+ dockerTest('allowFailure: true commits run with non-zero exitCode', async (t) => {
109
+ const { workspace, state, job } = await setupWorkspace();
110
+ const runner = new StepRunner(new DockerCliExecutor(), noopReporter);
111
+ const step = makeStep({ id: 'allow-fail', cmd: ['sh', '-c', 'exit 1'], allowFailure: true });
112
+ const result = await runner.run({ workspace, state, step, inputs: new Map(), pipelineRoot: '/', job });
113
+ t.truthy(result.runId);
114
+ t.is(result.exitCode, 1);
115
+ // Run was committed
116
+ const runs = await workspace.listRuns();
117
+ t.true(runs.includes(result.runId));
118
+ });
@@ -0,0 +1,142 @@
1
+ import test from 'ava';
2
+ import { StreamReporter, CompositeReporter } from '../stream-reporter.js';
3
+ import { InMemoryTransport } from '../transport.js';
4
+ const jobId = 'test-job-1';
5
+ const workspaceId = 'ws-1';
6
+ test('publishes non-log events to transport', t => {
7
+ const transport = new InMemoryTransport();
8
+ const reporter = new StreamReporter(transport);
9
+ reporter.emit({ event: 'PIPELINE_START', workspaceId, jobId, pipelineName: 'test', steps: [] });
10
+ t.is(transport.messages.length, 1);
11
+ t.is(transport.messages[0].event.event, 'PIPELINE_START');
12
+ t.is(transport.messages[0].type, 'PIPELINE_START');
13
+ t.is(transport.messages[0].version, 1);
14
+ });
15
+ test('ignores STEP_LOG events', t => {
16
+ const transport = new InMemoryTransport();
17
+ const reporter = new StreamReporter(transport);
18
+ reporter.emit({
19
+ event: 'STEP_LOG',
20
+ workspaceId,
21
+ jobId,
22
+ step: { id: 's1', displayName: 's1' },
23
+ stream: 'stdout',
24
+ line: 'hello'
25
+ });
26
+ t.is(transport.messages.length, 0);
27
+ });
28
+ test('sequence numbers are monotonically increasing', t => {
29
+ const transport = new InMemoryTransport();
30
+ const reporter = new StreamReporter(transport);
31
+ reporter.emit({ event: 'PIPELINE_START', workspaceId, jobId, pipelineName: 'test', steps: [] });
32
+ reporter.emit({ event: 'STEP_STARTING', workspaceId, jobId, step: { id: 's1', displayName: 's1' } });
33
+ reporter.emit({ event: 'PIPELINE_FINISHED', workspaceId, jobId, totalArtifactSize: 0 });
34
+ t.is(transport.messages[0].seq, 0);
35
+ t.is(transport.messages[1].seq, 1);
36
+ t.is(transport.messages[2].seq, 2);
37
+ });
38
+ test('timestamp is set on each message', t => {
39
+ const transport = new InMemoryTransport();
40
+ const reporter = new StreamReporter(transport);
41
+ reporter.emit({ event: 'PIPELINE_START', workspaceId, jobId, pipelineName: 'test', steps: [] });
42
+ t.truthy(transport.messages[0].timestamp);
43
+ t.regex(transport.messages[0].timestamp, /^\d{4}-\d{2}-\d{2}T/);
44
+ });
45
+ test('flush calls transport.flush', async (t) => {
46
+ let flushed = false;
47
+ const transport = {
48
+ messages: [],
49
+ async publish(message) {
50
+ this.messages.push(message);
51
+ },
52
+ async flush() {
53
+ flushed = true;
54
+ }
55
+ };
56
+ const reporter = new StreamReporter(transport);
57
+ await reporter.flush();
58
+ t.true(flushed);
59
+ });
60
+ test('CompositeReporter delegates to all reporters', t => {
61
+ const events1 = [];
62
+ const events2 = [];
63
+ const r1 = {
64
+ emit(e) {
65
+ events1.push(e);
66
+ }
67
+ };
68
+ const r2 = {
69
+ emit(e) {
70
+ events2.push(e);
71
+ }
72
+ };
73
+ const composite = new CompositeReporter(r1, r2);
74
+ composite.emit({ event: 'PIPELINE_START', workspaceId, jobId, pipelineName: 'test', steps: [] });
75
+ t.is(events1.length, 1);
76
+ t.is(events2.length, 1);
77
+ });
78
+ test('CompositeReporter with no reporters does not throw', t => {
79
+ const composite = new CompositeReporter();
80
+ t.notThrows(() => {
81
+ composite.emit({ event: 'PIPELINE_START', workspaceId, jobId, pipelineName: 'test', steps: [] });
82
+ });
83
+ });
84
+ test('STEP_LOG interleaved with other events does not affect sequence', t => {
85
+ const transport = new InMemoryTransport();
86
+ const reporter = new StreamReporter(transport);
87
+ reporter.emit({ event: 'PIPELINE_START', workspaceId, jobId, pipelineName: 'test', steps: [] });
88
+ reporter.emit({ event: 'STEP_LOG', workspaceId, jobId, step: { id: 's1', displayName: 's1' }, stream: 'stdout', line: 'ignored' });
89
+ reporter.emit({ event: 'STEP_LOG', workspaceId, jobId, step: { id: 's1', displayName: 's1' }, stream: 'stderr', line: 'also ignored' });
90
+ reporter.emit({ event: 'STEP_STARTING', workspaceId, jobId, step: { id: 's1', displayName: 's1' } });
91
+ t.is(transport.messages.length, 2);
92
+ t.is(transport.messages[0].seq, 0);
93
+ t.is(transport.messages[1].seq, 1);
94
+ });
95
+ test('event data is preserved in transport message', t => {
96
+ const transport = new InMemoryTransport();
97
+ const reporter = new StreamReporter(transport);
98
+ const step = { id: 's1', displayName: 'Step One' };
99
+ reporter.emit({ event: 'STEP_FINISHED', workspaceId, jobId, step, runId: 'run-42', durationMs: 1234, artifactSize: 5678 });
100
+ const published = transport.messages[0];
101
+ t.is(published.type, 'STEP_FINISHED');
102
+ const ev = published.event;
103
+ t.is(ev.event, 'STEP_FINISHED');
104
+ if (ev.event === 'STEP_FINISHED') {
105
+ t.is(ev.runId, 'run-42');
106
+ t.is(ev.durationMs, 1234);
107
+ t.is(ev.artifactSize, 5678);
108
+ t.deepEqual(ev.step, step);
109
+ }
110
+ });
111
+ test('flush succeeds when transport has no flush method', async (t) => {
112
+ const transport = new InMemoryTransport();
113
+ const reporter = new StreamReporter(transport);
114
+ await t.notThrowsAsync(async () => reporter.flush());
115
+ });
116
+ test('all non-log event types pass through StreamReporter', t => {
117
+ const transport = new InMemoryTransport();
118
+ const reporter = new StreamReporter(transport);
119
+ const step = { id: 's1', displayName: 's1' };
120
+ reporter.emit({ event: 'PIPELINE_START', workspaceId, jobId, pipelineName: 'test', steps: [step] });
121
+ reporter.emit({ event: 'STEP_STARTING', workspaceId, jobId, step });
122
+ reporter.emit({ event: 'STEP_SKIPPED', workspaceId, jobId, step, reason: 'cached' });
123
+ reporter.emit({ event: 'STEP_FINISHED', workspaceId, jobId, step });
124
+ reporter.emit({ event: 'STEP_FAILED', workspaceId, jobId, step, exitCode: 1 });
125
+ reporter.emit({ event: 'STEP_RETRYING', workspaceId, jobId, step, attempt: 1, maxRetries: 3 });
126
+ reporter.emit({ event: 'STEP_WOULD_RUN', workspaceId, jobId, step });
127
+ reporter.emit({ event: 'PIPELINE_FINISHED', workspaceId, jobId, totalArtifactSize: 0 });
128
+ reporter.emit({ event: 'PIPELINE_FAILED', workspaceId, jobId });
129
+ t.is(transport.messages.length, 9);
130
+ const types = transport.messages.map(m => m.type);
131
+ t.deepEqual(types, [
132
+ 'PIPELINE_START',
133
+ 'STEP_STARTING',
134
+ 'STEP_SKIPPED',
135
+ 'STEP_FINISHED',
136
+ 'STEP_FAILED',
137
+ 'STEP_RETRYING',
138
+ 'STEP_WOULD_RUN',
139
+ 'PIPELINE_FINISHED',
140
+ 'PIPELINE_FAILED'
141
+ ]);
142
+ });
@@ -0,0 +1,50 @@
1
+ import test from 'ava';
2
+ import { InMemoryTransport } from '../transport.js';
3
+ function makeMessage(seq, overrides) {
4
+ const event = {
5
+ event: 'PIPELINE_START',
6
+ workspaceId: 'ws-1',
7
+ jobId: 'job-1',
8
+ pipelineName: 'test',
9
+ steps: []
10
+ };
11
+ return {
12
+ seq,
13
+ timestamp: new Date().toISOString(),
14
+ version: 1,
15
+ type: 'PIPELINE_START',
16
+ event,
17
+ ...overrides
18
+ };
19
+ }
20
+ test('publish stores messages in order', async (t) => {
21
+ const transport = new InMemoryTransport();
22
+ await transport.publish(makeMessage(0));
23
+ await transport.publish(makeMessage(1));
24
+ t.is(transport.messages.length, 2);
25
+ t.is(transport.messages[0].seq, 0);
26
+ t.is(transport.messages[1].seq, 1);
27
+ });
28
+ test('clear removes all messages', async (t) => {
29
+ const transport = new InMemoryTransport();
30
+ await transport.publish(makeMessage(0));
31
+ await transport.publish(makeMessage(1));
32
+ transport.clear();
33
+ t.is(transport.messages.length, 0);
34
+ });
35
+ test('publish after clear accumulates fresh', async (t) => {
36
+ const transport = new InMemoryTransport();
37
+ await transport.publish(makeMessage(0));
38
+ transport.clear();
39
+ await transport.publish(makeMessage(5));
40
+ t.is(transport.messages.length, 1);
41
+ t.is(transport.messages[0].seq, 5);
42
+ });
43
+ test('message content is preserved through publish', async (t) => {
44
+ const transport = new InMemoryTransport();
45
+ const message = makeMessage(0);
46
+ await transport.publish(message);
47
+ t.deepEqual(transport.messages[0], message);
48
+ t.is(transport.messages[0].event.event, 'PIPELINE_START');
49
+ t.is(transport.messages[0].version, 1);
50
+ });
@@ -0,0 +1,40 @@
1
+ import process from 'node:process';
2
+ import { join } from 'node:path';
3
+ import test from 'ava';
4
+ import { ValidationError } from '../../errors.js';
5
+ import { resolveHostPath } from '../utils.js';
6
+ test('resolveHostPath: resolves path within cwd', t => {
7
+ const cwd = process.cwd();
8
+ const pipelineRoot = join(cwd, 'pipelines');
9
+ const result = resolveHostPath(pipelineRoot, '../scripts');
10
+ t.is(result, join(cwd, 'scripts'));
11
+ });
12
+ test('resolveHostPath: resolves sibling directory with ..', t => {
13
+ const cwd = process.cwd();
14
+ const pipelineRoot = join(cwd, 'a', 'b');
15
+ const result = resolveHostPath(pipelineRoot, '../c');
16
+ t.is(result, join(cwd, 'a', 'c'));
17
+ });
18
+ test('resolveHostPath: allows path equal to cwd', t => {
19
+ const cwd = process.cwd();
20
+ const pipelineRoot = join(cwd, 'sub');
21
+ const result = resolveHostPath(pipelineRoot, '..');
22
+ t.is(result, cwd);
23
+ });
24
+ test('resolveHostPath: simple relative path without ..', t => {
25
+ const cwd = process.cwd();
26
+ const pipelineRoot = join(cwd, 'pipelines');
27
+ const result = resolveHostPath(pipelineRoot, 'data');
28
+ t.is(result, join(cwd, 'pipelines', 'data'));
29
+ });
30
+ test('resolveHostPath: throws when path escapes cwd', t => {
31
+ const cwd = process.cwd();
32
+ const error = t.throws(() => resolveHostPath(cwd, '../outside'), { instanceOf: ValidationError });
33
+ t.truthy(error?.message.includes('outside the working directory'));
34
+ });
35
+ test('resolveHostPath: throws when deeply escaping cwd', t => {
36
+ const cwd = process.cwd();
37
+ const pipelineRoot = join(cwd, 'a');
38
+ const error = t.throws(() => resolveHostPath(pipelineRoot, '../../outside'), { instanceOf: ValidationError });
39
+ t.truthy(error?.message.includes('outside the working directory'));
40
+ });
@@ -0,0 +1,130 @@
1
+ import { Buffer } from 'node:buffer';
2
+ import { readFile, mkdir, writeFile, symlink, rm, stat } from 'node:fs/promises';
3
+ import { tmpdir } from 'node:os';
4
+ import { dirname, join, normalize, resolve, sep } from 'node:path';
5
+ import { pipeline } from 'node:stream/promises';
6
+ import { Readable } from 'node:stream';
7
+ import { buffer as streamToBuffer } from 'node:stream/consumers';
8
+ import ignore from 'ignore';
9
+ import * as tar from 'tar';
10
+ import { BundleError } from '../errors.js';
11
+ import { PipelineLoader } from './pipeline-loader.js';
12
+ const MAX_BUNDLE_SIZE = 50 * 1024 * 1024; // 50 MB
13
+ const DEFAULT_IGNORES = [
14
+ '.git',
15
+ 'node_modules',
16
+ '__pycache__',
17
+ '.DS_Store',
18
+ '*.pyc',
19
+ '.env'
20
+ ];
21
+ const MANIFEST_VERSION = 1;
22
+ function normalizePath(p) {
23
+ const n = normalize(p);
24
+ return n.endsWith(sep) ? n.slice(0, -1) : n;
25
+ }
26
+ export function collectDependencies(pipeline) {
27
+ const paths = new Set();
28
+ for (const step of pipeline.steps) {
29
+ if (step.mounts) {
30
+ for (const mount of step.mounts) {
31
+ paths.add(normalizePath(mount.host));
32
+ }
33
+ }
34
+ if (step.sources) {
35
+ for (const source of step.sources) {
36
+ paths.add(normalizePath(source.host));
37
+ }
38
+ }
39
+ }
40
+ return [...paths].sort();
41
+ }
42
+ export async function buildIgnoreFilter(pipelineRoot) {
43
+ const ig = ignore().add(DEFAULT_IGNORES);
44
+ try {
45
+ const gitignore = await readFile(resolve(pipelineRoot, '.gitignore'), 'utf8');
46
+ ig.add(gitignore);
47
+ }
48
+ catch { }
49
+ return (path) => {
50
+ if (path === '') {
51
+ return false;
52
+ }
53
+ // Test both variants to handle directory-only patterns (e.g. "dist/")
54
+ // which only match when the path has a trailing slash.
55
+ return ig.ignores(path) || ig.ignores(path + '/');
56
+ };
57
+ }
58
+ export async function buildBundle(pipelineFilePath) {
59
+ const absolutePath = resolve(pipelineFilePath);
60
+ const pipelineRoot = dirname(absolutePath);
61
+ const loader = new PipelineLoader();
62
+ const pipeline = await loader.load(absolutePath);
63
+ const deps = collectDependencies(pipeline);
64
+ // Verify all dependencies exist
65
+ for (const dep of deps) {
66
+ const depPath = resolve(pipelineRoot, dep);
67
+ try {
68
+ await stat(depPath);
69
+ }
70
+ catch {
71
+ throw new BundleError(`Dependency not found: ${dep}`);
72
+ }
73
+ }
74
+ const shouldIgnore = await buildIgnoreFilter(pipelineRoot);
75
+ const manifest = { version: MANIFEST_VERSION, pipeline };
76
+ const manifestJson = JSON.stringify(manifest, null, 2) + '\n';
77
+ // Use a staging directory with symlinks to combine manifest + deps in a single tar
78
+ const stagingDir = join(tmpdir(), `pipex-bundle-${Date.now()}-${Math.random().toString(36).slice(2)}`);
79
+ await mkdir(stagingDir, { recursive: true });
80
+ try {
81
+ // Write manifest
82
+ await writeFile(join(stagingDir, 'manifest.json'), manifestJson);
83
+ // Symlink dependencies into staging
84
+ for (const dep of deps) {
85
+ const target = join(stagingDir, dep);
86
+ await mkdir(dirname(target), { recursive: true });
87
+ await symlink(resolve(pipelineRoot, dep), target);
88
+ }
89
+ // Create tar archive
90
+ const entries = ['manifest.json', ...deps];
91
+ const stream = tar.create({
92
+ cwd: stagingDir,
93
+ gzip: true,
94
+ follow: true,
95
+ filter(path) {
96
+ return !shouldIgnore(path);
97
+ }
98
+ }, entries);
99
+ const archiveBuffer = await streamToBuffer(stream);
100
+ if (archiveBuffer.length > MAX_BUNDLE_SIZE) {
101
+ throw new BundleError(`Bundle size (${archiveBuffer.length} bytes) exceeds maximum of ${MAX_BUNDLE_SIZE} bytes`);
102
+ }
103
+ return Buffer.from(archiveBuffer);
104
+ }
105
+ finally {
106
+ await rm(stagingDir, { recursive: true, force: true });
107
+ }
108
+ }
109
+ export async function extractBundle(archive, targetDir) {
110
+ await mkdir(targetDir, { recursive: true });
111
+ await pipeline(Readable.from(archive), tar.extract({ cwd: targetDir }));
112
+ let manifestContent;
113
+ try {
114
+ manifestContent = await readFile(resolve(targetDir, 'manifest.json'), 'utf8');
115
+ }
116
+ catch {
117
+ throw new BundleError('Invalid bundle: manifest.json not found');
118
+ }
119
+ let manifest;
120
+ try {
121
+ manifest = JSON.parse(manifestContent);
122
+ }
123
+ catch {
124
+ throw new BundleError('Invalid bundle: manifest.json is not valid JSON');
125
+ }
126
+ if (!manifest.pipeline) {
127
+ throw new BundleError('Invalid bundle: manifest.json missing pipeline');
128
+ }
129
+ return manifest.pipeline;
130
+ }
@@ -0,0 +1,11 @@
1
+ import jexlModule from 'jexl';
2
+ const jexl = new jexlModule.Jexl();
3
+ export async function evaluateCondition(expression, context) {
4
+ try {
5
+ const result = await jexl.eval(expression, context);
6
+ return Boolean(result);
7
+ }
8
+ catch {
9
+ return false;
10
+ }
11
+ }
@@ -0,0 +1,143 @@
1
+ import { CyclicDependencyError, ValidationError } from '../errors.js';
2
+ /** Build a dependency graph from resolved steps. */
3
+ export function buildGraph(steps) {
4
+ const graph = new Map();
5
+ for (const step of steps) {
6
+ const deps = new Set();
7
+ if (step.inputs) {
8
+ for (const input of step.inputs) {
9
+ deps.add(input.step);
10
+ }
11
+ }
12
+ graph.set(step.id, deps);
13
+ }
14
+ return graph;
15
+ }
16
+ /** Validate graph: check for missing refs (non-optional) and cycles. */
17
+ export function validateGraph(graph, steps) {
18
+ validateReferences(graph, steps);
19
+ detectCycles(graph);
20
+ }
21
+ function validateReferences(graph, steps) {
22
+ const optionalInputs = new Set();
23
+ for (const step of steps) {
24
+ if (step.inputs) {
25
+ for (const input of step.inputs) {
26
+ if (input.optional) {
27
+ optionalInputs.add(`${step.id}:${input.step}`);
28
+ }
29
+ }
30
+ }
31
+ }
32
+ for (const [stepId, deps] of graph) {
33
+ for (const dep of deps) {
34
+ if (!graph.has(dep) && !optionalInputs.has(`${stepId}:${dep}`)) {
35
+ throw new ValidationError(`Step '${stepId}' references unknown step '${dep}'`);
36
+ }
37
+ }
38
+ }
39
+ }
40
+ function detectCycles(graph) {
41
+ const inDeg = computeInDegree(graph);
42
+ const queue = [];
43
+ for (const [id, deg] of inDeg) {
44
+ if (deg === 0) {
45
+ queue.push(id);
46
+ }
47
+ }
48
+ let processed = 0;
49
+ while (queue.length > 0) {
50
+ const current = queue.shift();
51
+ processed++;
52
+ for (const [id, deps] of graph) {
53
+ if (deps.has(current)) {
54
+ const newDeg = inDeg.get(id) - 1;
55
+ inDeg.set(id, newDeg);
56
+ if (newDeg === 0) {
57
+ queue.push(id);
58
+ }
59
+ }
60
+ }
61
+ }
62
+ if (processed < graph.size) {
63
+ throw new CyclicDependencyError('Pipeline contains a dependency cycle');
64
+ }
65
+ }
66
+ /** Compute in-degree for each node (number of existing deps). */
67
+ function computeInDegree(graph) {
68
+ const inDeg = new Map();
69
+ for (const [id, deps] of graph) {
70
+ let count = 0;
71
+ for (const dep of deps) {
72
+ if (graph.has(dep)) {
73
+ count++;
74
+ }
75
+ }
76
+ inDeg.set(id, count);
77
+ }
78
+ return inDeg;
79
+ }
80
+ /** Return steps grouped by topological level (parallelizable groups). */
81
+ export function topologicalLevels(graph) {
82
+ const inDeg = computeInDegree(graph);
83
+ const levels = [];
84
+ const remaining = new Set(graph.keys());
85
+ while (remaining.size > 0) {
86
+ const level = [];
87
+ for (const id of remaining) {
88
+ if (inDeg.get(id) === 0) {
89
+ level.push(id);
90
+ }
91
+ }
92
+ if (level.length === 0) {
93
+ break; // Cycle — should not happen after validateGraph
94
+ }
95
+ levels.push(level);
96
+ for (const id of level) {
97
+ remaining.delete(id);
98
+ for (const [nodeId, deps] of graph) {
99
+ if (deps.has(id) && remaining.has(nodeId)) {
100
+ inDeg.set(nodeId, inDeg.get(nodeId) - 1);
101
+ }
102
+ }
103
+ }
104
+ }
105
+ return levels;
106
+ }
107
+ /** BFS backward from targets to collect all ancestors + targets. */
108
+ export function subgraph(graph, targets) {
109
+ const result = new Set();
110
+ const queue = [...targets];
111
+ while (queue.length > 0) {
112
+ const current = queue.shift();
113
+ if (result.has(current)) {
114
+ continue;
115
+ }
116
+ result.add(current);
117
+ const deps = graph.get(current);
118
+ if (deps) {
119
+ for (const dep of deps) {
120
+ if (!result.has(dep)) {
121
+ queue.push(dep);
122
+ }
123
+ }
124
+ }
125
+ }
126
+ return result;
127
+ }
128
+ /** Return steps that no other step depends on (leaf/terminal nodes). */
129
+ export function leafNodes(graph) {
130
+ const depended = new Set();
131
+ for (const deps of graph.values()) {
132
+ for (const dep of deps) {
133
+ depended.add(dep);
134
+ }
135
+ }
136
+ const leaves = [];
137
+ for (const id of graph.keys()) {
138
+ if (!depended.has(id)) {
139
+ leaves.push(id);
140
+ }
141
+ }
142
+ return leaves;
143
+ }
@@ -0,0 +1,6 @@
1
+ import { readFile } from 'node:fs/promises';
2
+ import { parse } from 'dotenv';
3
+ export async function loadEnvFile(filePath) {
4
+ const content = await readFile(filePath, 'utf8');
5
+ return parse(content);
6
+ }