@ucdjs/pipelines-executor 0.0.1-beta.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +27 -0
- package/dist/index.d.mts +87 -0
- package/dist/index.mjs +869 -0
- package/package.json +55 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2025-PRESENT Lucas Nørgård
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
# @ucdjs/pipelines-executor
|
|
2
|
+
|
|
3
|
+
[![npm version][npm-version-src]][npm-version-href]
|
|
4
|
+
[![npm downloads][npm-downloads-src]][npm-downloads-href]
|
|
5
|
+
[![codecov][codecov-src]][codecov-href]
|
|
6
|
+
|
|
7
|
+
> [!IMPORTANT]
|
|
8
|
+
> This is an internal package. It may change without warning and is not subject to semantic versioning. Use at your own risk.
|
|
9
|
+
|
|
10
|
+
A collection of core pipeline functionalities for the UCD project.
|
|
11
|
+
|
|
12
|
+
## Installation
|
|
13
|
+
|
|
14
|
+
```bash
|
|
15
|
+
npm install @ucdjs/pipelines-executor
|
|
16
|
+
```
|
|
17
|
+
|
|
18
|
+
## 📄 License
|
|
19
|
+
|
|
20
|
+
Published under [MIT License](./LICENSE).
|
|
21
|
+
|
|
22
|
+
[npm-version-src]: https://img.shields.io/npm/v/@ucdjs/pipelines-executor?style=flat&colorA=18181B&colorB=4169E1
|
|
23
|
+
[npm-version-href]: https://npmjs.com/package/@ucdjs/pipelines-executor
|
|
24
|
+
[npm-downloads-src]: https://img.shields.io/npm/dm/@ucdjs/pipelines-executor?style=flat&colorA=18181B&colorB=4169E1
|
|
25
|
+
[npm-downloads-href]: https://npmjs.com/package/@ucdjs/pipelines-executor
|
|
26
|
+
[codecov-src]: https://img.shields.io/codecov/c/gh/ucdjs/ucd?style=flat&colorA=18181B&colorB=4169E1
|
|
27
|
+
[codecov-href]: https://codecov.io/gh/ucdjs/ucd
|
package/dist/index.d.mts
ADDED
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
import { AnyPipelineDefinition, PipelineError, PipelineEvent, PipelineGraph } from "@ucdjs/pipelines-core";
|
|
2
|
+
import { PipelineArtifactDefinition } from "@ucdjs/pipelines-artifacts";
|
|
3
|
+
|
|
4
|
+
//#region src/cache.d.ts
|
|
5
|
+
interface CacheKey {
|
|
6
|
+
routeId: string;
|
|
7
|
+
version: string;
|
|
8
|
+
inputHash: string;
|
|
9
|
+
artifactHashes: Record<string, string>;
|
|
10
|
+
}
|
|
11
|
+
interface CacheEntry<TOutput = unknown> {
|
|
12
|
+
key: CacheKey;
|
|
13
|
+
output: TOutput[];
|
|
14
|
+
producedArtifacts: Record<string, unknown>;
|
|
15
|
+
createdAt: string;
|
|
16
|
+
meta?: Record<string, unknown>;
|
|
17
|
+
}
|
|
18
|
+
declare function serializeCacheKey(key: CacheKey): string;
|
|
19
|
+
interface CacheStore {
|
|
20
|
+
get: (key: CacheKey) => Promise<CacheEntry | undefined>;
|
|
21
|
+
set: (entry: CacheEntry) => Promise<void>;
|
|
22
|
+
has: (key: CacheKey) => Promise<boolean>;
|
|
23
|
+
delete: (key: CacheKey) => Promise<boolean>;
|
|
24
|
+
clear: () => Promise<void>;
|
|
25
|
+
stats?: () => Promise<CacheStats>;
|
|
26
|
+
}
|
|
27
|
+
interface CacheStats {
|
|
28
|
+
entries: number;
|
|
29
|
+
sizeBytes?: number;
|
|
30
|
+
hits?: number;
|
|
31
|
+
misses?: number;
|
|
32
|
+
}
|
|
33
|
+
interface CacheOptions {
|
|
34
|
+
enabled?: boolean;
|
|
35
|
+
hashFn?: (content: string) => string;
|
|
36
|
+
}
|
|
37
|
+
declare function createMemoryCacheStore(): CacheStore;
|
|
38
|
+
declare function defaultHashFn(content: string): string;
|
|
39
|
+
declare function hashArtifact(value: unknown): string;
|
|
40
|
+
//#endregion
|
|
41
|
+
//#region src/types.d.ts
|
|
42
|
+
interface PipelineSummary {
|
|
43
|
+
versions: string[];
|
|
44
|
+
totalFiles: number;
|
|
45
|
+
matchedFiles: number;
|
|
46
|
+
skippedFiles: number;
|
|
47
|
+
fallbackFiles: number;
|
|
48
|
+
totalOutputs: number;
|
|
49
|
+
durationMs: number;
|
|
50
|
+
}
|
|
51
|
+
type ExecutionStatus = "running" | "completed" | "failed";
|
|
52
|
+
interface PipelineExecutionResult {
|
|
53
|
+
id: string;
|
|
54
|
+
data: unknown[];
|
|
55
|
+
graph: PipelineGraph;
|
|
56
|
+
errors: PipelineError[];
|
|
57
|
+
summary: PipelineSummary;
|
|
58
|
+
status: ExecutionStatus;
|
|
59
|
+
}
|
|
60
|
+
interface PipelineExecutorOptions {
|
|
61
|
+
artifacts?: PipelineArtifactDefinition[];
|
|
62
|
+
cacheStore?: CacheStore;
|
|
63
|
+
onEvent?: (event: PipelineEvent) => void | Promise<void>;
|
|
64
|
+
}
|
|
65
|
+
interface PipelineExecutorRunOptions {
|
|
66
|
+
cache?: boolean;
|
|
67
|
+
versions?: string[];
|
|
68
|
+
}
|
|
69
|
+
interface PipelineExecutor {
|
|
70
|
+
run: (pipelines: AnyPipelineDefinition[], options?: PipelineExecutorRunOptions) => Promise<PipelineExecutionResult[]>;
|
|
71
|
+
}
|
|
72
|
+
//#endregion
|
|
73
|
+
//#region src/executor.d.ts
|
|
74
|
+
declare function createPipelineExecutor(options: PipelineExecutorOptions): PipelineExecutor;
|
|
75
|
+
//#endregion
|
|
76
|
+
//#region src/log-context.d.ts
|
|
77
|
+
interface PipelineExecutionContext {
|
|
78
|
+
executionId: string;
|
|
79
|
+
spanId?: string;
|
|
80
|
+
event?: PipelineEvent;
|
|
81
|
+
}
|
|
82
|
+
declare function getPipelineExecutionContext(): PipelineExecutionContext | undefined;
|
|
83
|
+
declare function runWithPipelineExecutionContext<T>(context: PipelineExecutionContext, fn: () => T | Promise<T>): T | Promise<T>;
|
|
84
|
+
declare function withPipelineSpan<T>(spanId: string, fn: () => T | Promise<T>): T | Promise<T>;
|
|
85
|
+
declare function withPipelineEvent<T>(event: PipelineEvent, fn: () => T | Promise<T>): T | Promise<T>;
|
|
86
|
+
//#endregion
|
|
87
|
+
export { type CacheEntry, type CacheKey, type CacheOptions, type CacheStats, type CacheStore, type ExecutionStatus, type PipelineExecutionResult, type PipelineExecutor, type PipelineExecutorOptions, type PipelineExecutorRunOptions, type PipelineSummary, createMemoryCacheStore, createPipelineExecutor, defaultHashFn, getPipelineExecutionContext, hashArtifact, runWithPipelineExecutionContext, serializeCacheKey, withPipelineEvent, withPipelineSpan };
|
package/dist/index.mjs
ADDED
|
@@ -0,0 +1,869 @@
|
|
|
1
|
+
import { AsyncLocalStorage } from "node:async_hooks";
|
|
2
|
+
import { applyTransforms, getExecutionLayers, resolveMultipleSourceFiles } from "@ucdjs/pipelines-core";
|
|
3
|
+
import { PipelineGraphBuilder } from "@ucdjs/pipelines-graph";
|
|
4
|
+
import { isGlobalArtifact } from "@ucdjs/pipelines-artifacts";
|
|
5
|
+
|
|
6
|
+
//#region src/cache.ts
|
|
7
|
+
function serializeCacheKey(key) {
|
|
8
|
+
const artifactHashStr = Object.entries(key.artifactHashes).sort(([a], [b]) => a.localeCompare(b)).map(([id, hash]) => `${id}:${hash}`).join(",");
|
|
9
|
+
return `${key.routeId}|${key.version}|${key.inputHash}|${artifactHashStr}`;
|
|
10
|
+
}
|
|
11
|
+
function createMemoryCacheStore() {
|
|
12
|
+
const cache = /* @__PURE__ */ new Map();
|
|
13
|
+
let hits = 0;
|
|
14
|
+
let misses = 0;
|
|
15
|
+
return {
|
|
16
|
+
async get(key) {
|
|
17
|
+
const serialized = serializeCacheKey(key);
|
|
18
|
+
const entry = cache.get(serialized);
|
|
19
|
+
if (entry) hits++;
|
|
20
|
+
else misses++;
|
|
21
|
+
return entry;
|
|
22
|
+
},
|
|
23
|
+
async set(entry) {
|
|
24
|
+
const serialized = serializeCacheKey(entry.key);
|
|
25
|
+
cache.set(serialized, entry);
|
|
26
|
+
},
|
|
27
|
+
async has(key) {
|
|
28
|
+
const serialized = serializeCacheKey(key);
|
|
29
|
+
return cache.has(serialized);
|
|
30
|
+
},
|
|
31
|
+
async delete(key) {
|
|
32
|
+
const serialized = serializeCacheKey(key);
|
|
33
|
+
return cache.delete(serialized);
|
|
34
|
+
},
|
|
35
|
+
async clear() {
|
|
36
|
+
cache.clear();
|
|
37
|
+
hits = 0;
|
|
38
|
+
misses = 0;
|
|
39
|
+
},
|
|
40
|
+
async stats() {
|
|
41
|
+
return {
|
|
42
|
+
entries: cache.size,
|
|
43
|
+
hits,
|
|
44
|
+
misses
|
|
45
|
+
};
|
|
46
|
+
}
|
|
47
|
+
};
|
|
48
|
+
}
|
|
49
|
+
function defaultHashFn(content) {
|
|
50
|
+
let hash = 5381;
|
|
51
|
+
for (let i = 0; i < content.length; i++) hash = (hash << 5) + hash ^ content.charCodeAt(i);
|
|
52
|
+
return (hash >>> 0).toString(16).padStart(8, "0");
|
|
53
|
+
}
|
|
54
|
+
function hashArtifact(value) {
|
|
55
|
+
if (value === null || value === void 0) return "null";
|
|
56
|
+
if (typeof value === "string") return defaultHashFn(value);
|
|
57
|
+
if (value instanceof Map) return defaultHashFn(Array.from(value.entries()).sort(([a], [b]) => String(a).localeCompare(String(b))).map(([k, v]) => `${String(k)}=${String(v)}`).join(";"));
|
|
58
|
+
if (value instanceof Set) return defaultHashFn(Array.from(value).map(String).sort().join(";"));
|
|
59
|
+
if (Array.isArray(value)) return defaultHashFn(JSON.stringify(value));
|
|
60
|
+
if (typeof value === "object") return defaultHashFn(JSON.stringify(value));
|
|
61
|
+
return defaultHashFn(String(value));
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
//#endregion
|
|
65
|
+
//#region src/log-context.ts
|
|
66
|
+
const storage = new AsyncLocalStorage();
|
|
67
|
+
function getPipelineExecutionContext() {
|
|
68
|
+
return storage.getStore();
|
|
69
|
+
}
|
|
70
|
+
function runWithPipelineExecutionContext(context, fn) {
|
|
71
|
+
return storage.run(context, fn);
|
|
72
|
+
}
|
|
73
|
+
function withPipelineSpan(spanId, fn) {
|
|
74
|
+
const current = storage.getStore();
|
|
75
|
+
if (!current) return fn();
|
|
76
|
+
return storage.run({
|
|
77
|
+
...current,
|
|
78
|
+
spanId
|
|
79
|
+
}, fn);
|
|
80
|
+
}
|
|
81
|
+
function withPipelineEvent(event, fn) {
|
|
82
|
+
const current = storage.getStore();
|
|
83
|
+
if (!current) return fn();
|
|
84
|
+
return storage.run({
|
|
85
|
+
...current,
|
|
86
|
+
spanId: event.spanId,
|
|
87
|
+
event
|
|
88
|
+
}, fn);
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
//#endregion
|
|
92
|
+
//#region src/executor/events.ts
|
|
93
|
+
function createEventEmitter(options) {
|
|
94
|
+
const { onEvent } = options;
|
|
95
|
+
let eventCounter = 0;
|
|
96
|
+
const nextEventId = () => `evt_${Date.now()}_${++eventCounter}`;
|
|
97
|
+
let spanCounter = 0;
|
|
98
|
+
const nextSpanId = () => `span_${Date.now()}_${++spanCounter}`;
|
|
99
|
+
const emit = async (event) => {
|
|
100
|
+
const fullEvent = {
|
|
101
|
+
...event,
|
|
102
|
+
id: event.id ?? nextEventId(),
|
|
103
|
+
spanId: event.spanId ?? nextSpanId()
|
|
104
|
+
};
|
|
105
|
+
await withPipelineEvent(fullEvent, async () => {
|
|
106
|
+
await onEvent?.(fullEvent);
|
|
107
|
+
});
|
|
108
|
+
};
|
|
109
|
+
return {
|
|
110
|
+
emit,
|
|
111
|
+
nextEventId,
|
|
112
|
+
nextSpanId
|
|
113
|
+
};
|
|
114
|
+
}
|
|
115
|
+
async function emitWithSpan(spanId, fn) {
|
|
116
|
+
await withPipelineSpan(spanId, fn);
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
//#endregion
|
|
120
|
+
//#region src/executor/cache-helpers.ts
|
|
121
|
+
async function tryLoadCachedResult(options) {
|
|
122
|
+
const { cacheStore, routeId, version, fileContent, artifactsMap } = options;
|
|
123
|
+
const partialKey = {
|
|
124
|
+
routeId,
|
|
125
|
+
version,
|
|
126
|
+
inputHash: defaultHashFn(fileContent),
|
|
127
|
+
artifactHashes: {}
|
|
128
|
+
};
|
|
129
|
+
const cachedEntry = await cacheStore.get(partialKey);
|
|
130
|
+
if (!cachedEntry) return {
|
|
131
|
+
result: null,
|
|
132
|
+
hit: false
|
|
133
|
+
};
|
|
134
|
+
const currentArtifactHashes = {};
|
|
135
|
+
for (const id of Object.keys(cachedEntry.key.artifactHashes)) if (id in artifactsMap) currentArtifactHashes[id] = hashArtifact(artifactsMap[id]);
|
|
136
|
+
if (!Object.keys(cachedEntry.key.artifactHashes).every((id) => currentArtifactHashes[id] === cachedEntry.key.artifactHashes[id])) return {
|
|
137
|
+
result: null,
|
|
138
|
+
hit: false
|
|
139
|
+
};
|
|
140
|
+
return {
|
|
141
|
+
result: {
|
|
142
|
+
outputs: cachedEntry.output,
|
|
143
|
+
emittedArtifacts: cachedEntry.producedArtifacts,
|
|
144
|
+
consumedArtifactIds: Object.keys(cachedEntry.key.artifactHashes)
|
|
145
|
+
},
|
|
146
|
+
hit: true
|
|
147
|
+
};
|
|
148
|
+
}
|
|
149
|
+
async function buildCacheKey(routeId, version, fileContent, artifactsMap, consumedArtifactIds) {
|
|
150
|
+
const artifactHashes = {};
|
|
151
|
+
for (const id of consumedArtifactIds) if (id in artifactsMap) artifactHashes[id] = hashArtifact(artifactsMap[id]);
|
|
152
|
+
return {
|
|
153
|
+
routeId,
|
|
154
|
+
version,
|
|
155
|
+
inputHash: defaultHashFn(fileContent),
|
|
156
|
+
artifactHashes
|
|
157
|
+
};
|
|
158
|
+
}
|
|
159
|
+
async function storeCacheEntry(options) {
|
|
160
|
+
const { cacheStore, cacheKey, outputs, emittedArtifacts } = options;
|
|
161
|
+
const cacheEntry = {
|
|
162
|
+
key: cacheKey,
|
|
163
|
+
output: outputs,
|
|
164
|
+
producedArtifacts: emittedArtifacts,
|
|
165
|
+
createdAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
166
|
+
};
|
|
167
|
+
await cacheStore.set(cacheEntry);
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
//#endregion
|
|
171
|
+
//#region src/executor/processing-queue.ts
|
|
172
|
+
function createProcessingQueue(concurrency) {
|
|
173
|
+
const queue = [];
|
|
174
|
+
let running = 0;
|
|
175
|
+
let resolveIdle = null;
|
|
176
|
+
const runNext = async () => {
|
|
177
|
+
if (running >= concurrency || queue.length === 0) {
|
|
178
|
+
if (running === 0 && queue.length === 0 && resolveIdle) resolveIdle();
|
|
179
|
+
return;
|
|
180
|
+
}
|
|
181
|
+
running++;
|
|
182
|
+
const task = queue.shift();
|
|
183
|
+
try {
|
|
184
|
+
await task();
|
|
185
|
+
} finally {
|
|
186
|
+
running--;
|
|
187
|
+
runNext();
|
|
188
|
+
}
|
|
189
|
+
};
|
|
190
|
+
return {
|
|
191
|
+
add: async (task) => {
|
|
192
|
+
queue.push(task);
|
|
193
|
+
runNext();
|
|
194
|
+
},
|
|
195
|
+
drain: () => {
|
|
196
|
+
if (running === 0 && queue.length === 0) return Promise.resolve();
|
|
197
|
+
return new Promise((resolve) => {
|
|
198
|
+
resolveIdle = resolve;
|
|
199
|
+
});
|
|
200
|
+
}
|
|
201
|
+
};
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
//#endregion
|
|
205
|
+
//#region src/executor/source-adapter.ts
|
|
206
|
+
function createSourceAdapter(pipeline) {
|
|
207
|
+
if (pipeline.inputs.length === 0) throw new Error("Pipeline requires at least one input source");
|
|
208
|
+
const backends = /* @__PURE__ */ new Map();
|
|
209
|
+
for (const input of pipeline.inputs) backends.set(input.id, input.backend);
|
|
210
|
+
return {
|
|
211
|
+
listFiles: async (version) => resolveMultipleSourceFiles(pipeline.inputs, version),
|
|
212
|
+
readFile: async (file) => {
|
|
213
|
+
const sourceFile = file;
|
|
214
|
+
if ("source" in sourceFile && sourceFile.source) {
|
|
215
|
+
const backend = backends.get(sourceFile.source.id);
|
|
216
|
+
if (backend) return backend.readFile(file);
|
|
217
|
+
}
|
|
218
|
+
const firstBackend = backends.values().next().value;
|
|
219
|
+
if (firstBackend) return firstBackend.readFile(file);
|
|
220
|
+
throw new Error(`No backend found for file: ${file.path}`);
|
|
221
|
+
}
|
|
222
|
+
};
|
|
223
|
+
}
|
|
224
|
+
function createParseContext(file, source) {
|
|
225
|
+
let cachedContent = null;
|
|
226
|
+
return {
|
|
227
|
+
file,
|
|
228
|
+
readContent: async () => {
|
|
229
|
+
if (cachedContent === null) cachedContent = await source.readFile(file);
|
|
230
|
+
return cachedContent;
|
|
231
|
+
},
|
|
232
|
+
async *readLines() {
|
|
233
|
+
const lines = (await source.readFile(file)).split(/\r?\n/);
|
|
234
|
+
for (const line of lines) yield line;
|
|
235
|
+
},
|
|
236
|
+
isComment: (line) => line.startsWith("#") || line.trim() === ""
|
|
237
|
+
};
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
//#endregion
|
|
241
|
+
//#region src/executor/route-execution.ts
|
|
242
|
+
function createRouteResolveContext(options) {
|
|
243
|
+
const { version, file, routeId, artifactsMap, emittedArtifacts, emitsDefinition, onArtifactEmit, onArtifactGet } = options;
|
|
244
|
+
return {
|
|
245
|
+
version,
|
|
246
|
+
file,
|
|
247
|
+
getArtifact: (key) => {
|
|
248
|
+
if (!(key in artifactsMap)) throw new Error(`Artifact "${key}" not found. Make sure a route that produces this artifact runs before route "${routeId}".`);
|
|
249
|
+
onArtifactGet?.(key);
|
|
250
|
+
return artifactsMap[key];
|
|
251
|
+
},
|
|
252
|
+
emitArtifact: (id, value) => {
|
|
253
|
+
if (emitsDefinition) {
|
|
254
|
+
const def = emitsDefinition[id];
|
|
255
|
+
if (def) {
|
|
256
|
+
const result = def.schema.safeParse(value);
|
|
257
|
+
if (!result.success) throw new Error(`Artifact "${id}" validation failed: ${result.error.message}`);
|
|
258
|
+
}
|
|
259
|
+
}
|
|
260
|
+
emittedArtifacts[id] = value;
|
|
261
|
+
onArtifactEmit?.(id, value);
|
|
262
|
+
},
|
|
263
|
+
normalizeEntries: (entries) => {
|
|
264
|
+
return entries.sort((a, b) => {
|
|
265
|
+
const aStart = a.range?.split("..")[0] ?? a.codePoint ?? "";
|
|
266
|
+
const bStart = b.range?.split("..")[0] ?? b.codePoint ?? "";
|
|
267
|
+
return aStart.localeCompare(bStart);
|
|
268
|
+
});
|
|
269
|
+
},
|
|
270
|
+
now: () => (/* @__PURE__ */ new Date()).toISOString()
|
|
271
|
+
};
|
|
272
|
+
}
|
|
273
|
+
async function processRoute(options) {
|
|
274
|
+
const { file, route, artifactsMap, source, version, emit, spanId } = options;
|
|
275
|
+
const parseStartTime = performance.now();
|
|
276
|
+
const parseSpanId = spanId();
|
|
277
|
+
await withPipelineSpan(parseSpanId, async () => {
|
|
278
|
+
await emit({
|
|
279
|
+
type: "parse:start",
|
|
280
|
+
file,
|
|
281
|
+
routeId: route.id,
|
|
282
|
+
spanId: parseSpanId,
|
|
283
|
+
timestamp: performance.now()
|
|
284
|
+
});
|
|
285
|
+
});
|
|
286
|
+
const parseCtx = createParseContext(file, source);
|
|
287
|
+
let rows = route.parser(parseCtx);
|
|
288
|
+
const collectedRows = [];
|
|
289
|
+
const filteredRows = filterRows(rows, file, route.filter, collectedRows);
|
|
290
|
+
if (route.transforms && route.transforms.length > 0) rows = applyTransforms({
|
|
291
|
+
version,
|
|
292
|
+
file
|
|
293
|
+
}, filteredRows, route.transforms);
|
|
294
|
+
else rows = filteredRows;
|
|
295
|
+
await withPipelineSpan(parseSpanId, async () => {
|
|
296
|
+
await emit({
|
|
297
|
+
type: "parse:end",
|
|
298
|
+
file,
|
|
299
|
+
routeId: route.id,
|
|
300
|
+
rowCount: collectedRows.length,
|
|
301
|
+
durationMs: performance.now() - parseStartTime,
|
|
302
|
+
spanId: parseSpanId,
|
|
303
|
+
timestamp: performance.now()
|
|
304
|
+
});
|
|
305
|
+
});
|
|
306
|
+
const resolveStartTime = performance.now();
|
|
307
|
+
const resolveSpanId = spanId();
|
|
308
|
+
await withPipelineSpan(resolveSpanId, async () => {
|
|
309
|
+
await emit({
|
|
310
|
+
type: "resolve:start",
|
|
311
|
+
file,
|
|
312
|
+
routeId: route.id,
|
|
313
|
+
spanId: resolveSpanId,
|
|
314
|
+
timestamp: performance.now()
|
|
315
|
+
});
|
|
316
|
+
});
|
|
317
|
+
const emittedArtifacts = {};
|
|
318
|
+
const consumedArtifactIds = [];
|
|
319
|
+
const resolveCtx = createRouteResolveContext({
|
|
320
|
+
version,
|
|
321
|
+
file,
|
|
322
|
+
routeId: route.id,
|
|
323
|
+
artifactsMap,
|
|
324
|
+
emittedArtifacts,
|
|
325
|
+
emitsDefinition: route.emits,
|
|
326
|
+
onArtifactEmit: async (id) => {
|
|
327
|
+
await withPipelineSpan(resolveSpanId, async () => {
|
|
328
|
+
await emit({
|
|
329
|
+
type: "artifact:produced",
|
|
330
|
+
artifactId: `${route.id}:${id}`,
|
|
331
|
+
routeId: route.id,
|
|
332
|
+
version,
|
|
333
|
+
spanId: resolveSpanId,
|
|
334
|
+
timestamp: performance.now()
|
|
335
|
+
});
|
|
336
|
+
});
|
|
337
|
+
},
|
|
338
|
+
onArtifactGet: async (id) => {
|
|
339
|
+
if (!consumedArtifactIds.includes(id)) {
|
|
340
|
+
consumedArtifactIds.push(id);
|
|
341
|
+
await withPipelineSpan(resolveSpanId, async () => {
|
|
342
|
+
await emit({
|
|
343
|
+
type: "artifact:consumed",
|
|
344
|
+
artifactId: id,
|
|
345
|
+
routeId: route.id,
|
|
346
|
+
version,
|
|
347
|
+
spanId: resolveSpanId,
|
|
348
|
+
timestamp: performance.now()
|
|
349
|
+
});
|
|
350
|
+
});
|
|
351
|
+
}
|
|
352
|
+
}
|
|
353
|
+
});
|
|
354
|
+
const outputs = await route.resolver(resolveCtx, rows);
|
|
355
|
+
const outputArray = Array.isArray(outputs) ? outputs : [outputs];
|
|
356
|
+
await withPipelineSpan(resolveSpanId, async () => {
|
|
357
|
+
await emit({
|
|
358
|
+
type: "resolve:end",
|
|
359
|
+
file,
|
|
360
|
+
routeId: route.id,
|
|
361
|
+
outputCount: outputArray.length,
|
|
362
|
+
durationMs: performance.now() - resolveStartTime,
|
|
363
|
+
spanId: resolveSpanId,
|
|
364
|
+
timestamp: performance.now()
|
|
365
|
+
});
|
|
366
|
+
});
|
|
367
|
+
return {
|
|
368
|
+
outputs: outputArray,
|
|
369
|
+
emittedArtifacts,
|
|
370
|
+
consumedArtifactIds
|
|
371
|
+
};
|
|
372
|
+
}
|
|
373
|
+
async function processFallback(options) {
|
|
374
|
+
const { file, fallback, artifactsMap, source, version, emit, spanId } = options;
|
|
375
|
+
const parseStartTime = performance.now();
|
|
376
|
+
const parseSpanId = spanId();
|
|
377
|
+
await withPipelineSpan(parseSpanId, async () => {
|
|
378
|
+
await emit({
|
|
379
|
+
type: "parse:start",
|
|
380
|
+
file,
|
|
381
|
+
routeId: "__fallback__",
|
|
382
|
+
spanId: parseSpanId,
|
|
383
|
+
timestamp: performance.now()
|
|
384
|
+
});
|
|
385
|
+
});
|
|
386
|
+
const parseCtx = createParseContext(file, source);
|
|
387
|
+
const rows = fallback.parser(parseCtx);
|
|
388
|
+
const collectedRows = [];
|
|
389
|
+
const filteredRows = filterRows(rows, file, fallback.filter, collectedRows);
|
|
390
|
+
await withPipelineSpan(parseSpanId, async () => {
|
|
391
|
+
await emit({
|
|
392
|
+
type: "parse:end",
|
|
393
|
+
file,
|
|
394
|
+
routeId: "__fallback__",
|
|
395
|
+
rowCount: collectedRows.length,
|
|
396
|
+
durationMs: performance.now() - parseStartTime,
|
|
397
|
+
spanId: parseSpanId,
|
|
398
|
+
timestamp: performance.now()
|
|
399
|
+
});
|
|
400
|
+
});
|
|
401
|
+
const resolveStartTime = performance.now();
|
|
402
|
+
const resolveSpanId = spanId();
|
|
403
|
+
await withPipelineSpan(resolveSpanId, async () => {
|
|
404
|
+
await emit({
|
|
405
|
+
type: "resolve:start",
|
|
406
|
+
file,
|
|
407
|
+
routeId: "__fallback__",
|
|
408
|
+
spanId: resolveSpanId,
|
|
409
|
+
timestamp: performance.now()
|
|
410
|
+
});
|
|
411
|
+
});
|
|
412
|
+
const emittedArtifacts = {};
|
|
413
|
+
const resolveCtx = {
|
|
414
|
+
version,
|
|
415
|
+
file,
|
|
416
|
+
getArtifact: (id) => {
|
|
417
|
+
if (!(id in artifactsMap)) throw new Error(`Artifact "${String(id)}" not found.`);
|
|
418
|
+
return artifactsMap[id];
|
|
419
|
+
},
|
|
420
|
+
emitArtifact: (id, value) => {
|
|
421
|
+
emittedArtifacts[id] = value;
|
|
422
|
+
},
|
|
423
|
+
normalizeEntries: (entries) => {
|
|
424
|
+
return entries.sort((a, b) => {
|
|
425
|
+
const aStart = a.range?.split("..")[0] ?? a.codePoint ?? "";
|
|
426
|
+
const bStart = b.range?.split("..")[0] ?? b.codePoint ?? "";
|
|
427
|
+
return aStart.localeCompare(bStart);
|
|
428
|
+
});
|
|
429
|
+
},
|
|
430
|
+
now: () => (/* @__PURE__ */ new Date()).toISOString()
|
|
431
|
+
};
|
|
432
|
+
const outputs = await fallback.resolver(resolveCtx, filteredRows);
|
|
433
|
+
const outputArray = Array.isArray(outputs) ? outputs : [outputs];
|
|
434
|
+
await withPipelineSpan(resolveSpanId, async () => {
|
|
435
|
+
await emit({
|
|
436
|
+
type: "resolve:end",
|
|
437
|
+
file,
|
|
438
|
+
routeId: "__fallback__",
|
|
439
|
+
outputCount: outputArray.length,
|
|
440
|
+
durationMs: performance.now() - resolveStartTime,
|
|
441
|
+
spanId: resolveSpanId,
|
|
442
|
+
timestamp: performance.now()
|
|
443
|
+
});
|
|
444
|
+
});
|
|
445
|
+
return outputArray;
|
|
446
|
+
}
|
|
447
|
+
async function* filterRows(rows, file, filter, collector) {
|
|
448
|
+
for await (const row of rows) {
|
|
449
|
+
collector.push(row);
|
|
450
|
+
if (!filter) {
|
|
451
|
+
yield row;
|
|
452
|
+
continue;
|
|
453
|
+
}
|
|
454
|
+
if (filter({
|
|
455
|
+
file,
|
|
456
|
+
row: { property: row.property }
|
|
457
|
+
})) yield row;
|
|
458
|
+
}
|
|
459
|
+
}
|
|
460
|
+
function recordEmittedArtifacts(options) {
|
|
461
|
+
const { routeId, emittedArtifacts, routeEmits, artifactsMap, globalArtifactsMap } = options;
|
|
462
|
+
for (const [artifactName, artifactValue] of Object.entries(emittedArtifacts)) {
|
|
463
|
+
const prefixedKey = `${routeId}:${artifactName}`;
|
|
464
|
+
const artifactDef = routeEmits?.[artifactName];
|
|
465
|
+
if (artifactDef && isGlobalArtifact(artifactDef)) globalArtifactsMap[prefixedKey] = artifactValue;
|
|
466
|
+
else artifactsMap[prefixedKey] = artifactValue;
|
|
467
|
+
}
|
|
468
|
+
}
|
|
469
|
+
|
|
470
|
+
//#endregion
|
|
471
|
+
//#region src/executor/run-pipeline.ts
|
|
472
|
+
async function runPipeline(options) {
|
|
473
|
+
const { pipeline, runOptions = {}, cacheStore, artifacts: globalArtifacts, events } = options;
|
|
474
|
+
const { cache: enableCache = true, versions: runVersions } = runOptions;
|
|
475
|
+
const useCache = enableCache && cacheStore != null;
|
|
476
|
+
const versionsToRun = runVersions ?? pipeline.versions;
|
|
477
|
+
const source = createSourceAdapter(pipeline);
|
|
478
|
+
const graph = new PipelineGraphBuilder();
|
|
479
|
+
const startTime = performance.now();
|
|
480
|
+
const outputs = [];
|
|
481
|
+
const errors = [];
|
|
482
|
+
let totalFiles = 0;
|
|
483
|
+
let matchedFiles = 0;
|
|
484
|
+
let skippedFiles = 0;
|
|
485
|
+
let fallbackFiles = 0;
|
|
486
|
+
const dag = pipeline.dag;
|
|
487
|
+
const pipelineSpanId = events.nextSpanId();
|
|
488
|
+
await emitWithSpan(pipelineSpanId, () => events.emit({
|
|
489
|
+
type: "pipeline:start",
|
|
490
|
+
versions: versionsToRun,
|
|
491
|
+
spanId: pipelineSpanId,
|
|
492
|
+
timestamp: performance.now()
|
|
493
|
+
}));
|
|
494
|
+
for (const version of versionsToRun) {
|
|
495
|
+
const versionStartTime = performance.now();
|
|
496
|
+
const versionSpanId = events.nextSpanId();
|
|
497
|
+
await emitWithSpan(versionSpanId, () => events.emit({
|
|
498
|
+
type: "version:start",
|
|
499
|
+
version,
|
|
500
|
+
spanId: versionSpanId,
|
|
501
|
+
timestamp: performance.now()
|
|
502
|
+
}));
|
|
503
|
+
const sourceNodeId = graph.addSourceNode(version);
|
|
504
|
+
const artifactsMap = {};
|
|
505
|
+
const globalArtifactsMap = {};
|
|
506
|
+
let versionFiles = null;
|
|
507
|
+
async function listVersionFiles() {
|
|
508
|
+
if (!versionFiles) versionFiles = await source.listFiles(version);
|
|
509
|
+
return versionFiles;
|
|
510
|
+
}
|
|
511
|
+
for (const artifactDef of globalArtifacts) {
|
|
512
|
+
const artifactStartTime = performance.now();
|
|
513
|
+
const artifactSpanId = events.nextSpanId();
|
|
514
|
+
await emitWithSpan(artifactSpanId, () => events.emit({
|
|
515
|
+
type: "artifact:start",
|
|
516
|
+
artifactId: artifactDef.id,
|
|
517
|
+
version,
|
|
518
|
+
spanId: artifactSpanId,
|
|
519
|
+
timestamp: performance.now()
|
|
520
|
+
}));
|
|
521
|
+
const artifactNodeId = graph.addArtifactNode(artifactDef.id, version);
|
|
522
|
+
graph.addEdge(sourceNodeId, artifactNodeId, "provides");
|
|
523
|
+
try {
|
|
524
|
+
let rows;
|
|
525
|
+
if (artifactDef.filter && artifactDef.parser) {
|
|
526
|
+
const files = await listVersionFiles();
|
|
527
|
+
for (const file of files) if (artifactDef.filter({ file })) {
|
|
528
|
+
rows = artifactDef.parser(createParseContext(file, source));
|
|
529
|
+
break;
|
|
530
|
+
}
|
|
531
|
+
}
|
|
532
|
+
const value = await artifactDef.build({ version }, rows);
|
|
533
|
+
artifactsMap[artifactDef.id] = value;
|
|
534
|
+
} catch (err) {
|
|
535
|
+
const pipelineError = {
|
|
536
|
+
scope: "artifact",
|
|
537
|
+
message: err instanceof Error ? err.message : String(err),
|
|
538
|
+
error: err,
|
|
539
|
+
artifactId: artifactDef.id,
|
|
540
|
+
version
|
|
541
|
+
};
|
|
542
|
+
errors.push(pipelineError);
|
|
543
|
+
await emitWithSpan(artifactSpanId, () => events.emit({
|
|
544
|
+
type: "error",
|
|
545
|
+
error: pipelineError,
|
|
546
|
+
spanId: artifactSpanId,
|
|
547
|
+
timestamp: performance.now()
|
|
548
|
+
}));
|
|
549
|
+
}
|
|
550
|
+
await emitWithSpan(artifactSpanId, () => events.emit({
|
|
551
|
+
type: "artifact:end",
|
|
552
|
+
artifactId: artifactDef.id,
|
|
553
|
+
version,
|
|
554
|
+
durationMs: performance.now() - artifactStartTime,
|
|
555
|
+
spanId: artifactSpanId,
|
|
556
|
+
timestamp: performance.now()
|
|
557
|
+
}));
|
|
558
|
+
}
|
|
559
|
+
const files = await listVersionFiles();
|
|
560
|
+
totalFiles += files.length;
|
|
561
|
+
const filesToProcess = pipeline.include ? files.filter((file) => pipeline.include({ file })) : files;
|
|
562
|
+
const executionLayers = getExecutionLayers(dag);
|
|
563
|
+
const processedFiles = /* @__PURE__ */ new Set();
|
|
564
|
+
for (const layer of executionLayers) {
|
|
565
|
+
const processingQueue = createProcessingQueue(pipeline.concurrency);
|
|
566
|
+
const layerRoutes = pipeline.routes.filter((route) => layer.includes(route.id));
|
|
567
|
+
for (const route of layerRoutes) {
|
|
568
|
+
const matchingFiles = filesToProcess.filter((file) => {
|
|
569
|
+
const filterCtx = {
|
|
570
|
+
file,
|
|
571
|
+
source: file.source
|
|
572
|
+
};
|
|
573
|
+
return route.filter(filterCtx);
|
|
574
|
+
});
|
|
575
|
+
for (const file of matchingFiles) {
|
|
576
|
+
processedFiles.add(file.path);
|
|
577
|
+
await processingQueue.add(async () => {
|
|
578
|
+
const fileNodeId = graph.addFileNode(file);
|
|
579
|
+
graph.addEdge(sourceNodeId, fileNodeId, "provides");
|
|
580
|
+
matchedFiles++;
|
|
581
|
+
const routeNodeId = graph.addRouteNode(route.id, version);
|
|
582
|
+
graph.addEdge(fileNodeId, routeNodeId, "matched");
|
|
583
|
+
const fileSpanId = events.nextSpanId();
|
|
584
|
+
await emitWithSpan(fileSpanId, () => events.emit({
|
|
585
|
+
type: "file:matched",
|
|
586
|
+
file,
|
|
587
|
+
routeId: route.id,
|
|
588
|
+
spanId: fileSpanId,
|
|
589
|
+
timestamp: performance.now()
|
|
590
|
+
}));
|
|
591
|
+
await withPipelineSpan(fileSpanId, async () => {
|
|
592
|
+
try {
|
|
593
|
+
const routeCacheEnabled = useCache && route.cache !== false;
|
|
594
|
+
let result = null;
|
|
595
|
+
let fileContent;
|
|
596
|
+
if (routeCacheEnabled && cacheStore) {
|
|
597
|
+
fileContent = await source.readFile(file);
|
|
598
|
+
const combinedArtifacts = {
|
|
599
|
+
...artifactsMap,
|
|
600
|
+
...globalArtifactsMap
|
|
601
|
+
};
|
|
602
|
+
const cachedResult = await tryLoadCachedResult({
|
|
603
|
+
cacheStore,
|
|
604
|
+
routeId: route.id,
|
|
605
|
+
version,
|
|
606
|
+
fileContent,
|
|
607
|
+
artifactsMap: combinedArtifacts
|
|
608
|
+
});
|
|
609
|
+
if (cachedResult.hit && cachedResult.result) {
|
|
610
|
+
result = cachedResult.result;
|
|
611
|
+
await events.emit({
|
|
612
|
+
type: "cache:hit",
|
|
613
|
+
routeId: route.id,
|
|
614
|
+
file,
|
|
615
|
+
version,
|
|
616
|
+
spanId: fileSpanId,
|
|
617
|
+
timestamp: performance.now()
|
|
618
|
+
});
|
|
619
|
+
} else await events.emit({
|
|
620
|
+
type: "cache:miss",
|
|
621
|
+
routeId: route.id,
|
|
622
|
+
file,
|
|
623
|
+
version,
|
|
624
|
+
spanId: fileSpanId,
|
|
625
|
+
timestamp: performance.now()
|
|
626
|
+
});
|
|
627
|
+
}
|
|
628
|
+
if (!result) {
|
|
629
|
+
result = await processRoute({
|
|
630
|
+
file,
|
|
631
|
+
route,
|
|
632
|
+
artifactsMap: {
|
|
633
|
+
...artifactsMap,
|
|
634
|
+
...globalArtifactsMap
|
|
635
|
+
},
|
|
636
|
+
source,
|
|
637
|
+
version,
|
|
638
|
+
emit: (event) => events.emit({
|
|
639
|
+
...event,
|
|
640
|
+
spanId: fileSpanId
|
|
641
|
+
}),
|
|
642
|
+
spanId: events.nextSpanId
|
|
643
|
+
});
|
|
644
|
+
if (routeCacheEnabled && cacheStore) {
|
|
645
|
+
fileContent ??= await source.readFile(file);
|
|
646
|
+
const cacheArtifacts = {
|
|
647
|
+
...artifactsMap,
|
|
648
|
+
...globalArtifactsMap
|
|
649
|
+
};
|
|
650
|
+
await storeCacheEntry({
|
|
651
|
+
cacheStore,
|
|
652
|
+
cacheKey: await buildCacheKey(route.id, version, fileContent, cacheArtifacts, result.consumedArtifactIds),
|
|
653
|
+
outputs: result.outputs,
|
|
654
|
+
emittedArtifacts: result.emittedArtifacts
|
|
655
|
+
});
|
|
656
|
+
await events.emit({
|
|
657
|
+
type: "cache:store",
|
|
658
|
+
routeId: route.id,
|
|
659
|
+
file,
|
|
660
|
+
version,
|
|
661
|
+
spanId: fileSpanId,
|
|
662
|
+
timestamp: performance.now()
|
|
663
|
+
});
|
|
664
|
+
}
|
|
665
|
+
}
|
|
666
|
+
recordEmittedArtifacts({
|
|
667
|
+
routeId: route.id,
|
|
668
|
+
emittedArtifacts: result.emittedArtifacts,
|
|
669
|
+
routeEmits: route.emits,
|
|
670
|
+
artifactsMap,
|
|
671
|
+
globalArtifactsMap
|
|
672
|
+
});
|
|
673
|
+
for (const output of result.outputs) {
|
|
674
|
+
const outputIndex = outputs.length;
|
|
675
|
+
outputs.push(output);
|
|
676
|
+
const outputNodeId = graph.addOutputNode(outputIndex, version, output.property);
|
|
677
|
+
graph.addEdge(routeNodeId, outputNodeId, "resolved");
|
|
678
|
+
}
|
|
679
|
+
} catch (err) {
|
|
680
|
+
const pipelineError = {
|
|
681
|
+
scope: "route",
|
|
682
|
+
message: err instanceof Error ? err.message : String(err),
|
|
683
|
+
error: err,
|
|
684
|
+
file,
|
|
685
|
+
routeId: route.id,
|
|
686
|
+
version
|
|
687
|
+
};
|
|
688
|
+
errors.push(pipelineError);
|
|
689
|
+
await events.emit({
|
|
690
|
+
type: "error",
|
|
691
|
+
error: pipelineError,
|
|
692
|
+
spanId: fileSpanId,
|
|
693
|
+
timestamp: performance.now()
|
|
694
|
+
});
|
|
695
|
+
}
|
|
696
|
+
});
|
|
697
|
+
});
|
|
698
|
+
}
|
|
699
|
+
}
|
|
700
|
+
await processingQueue.drain();
|
|
701
|
+
}
|
|
702
|
+
for (const file of filesToProcess) {
|
|
703
|
+
if (processedFiles.has(file.path)) continue;
|
|
704
|
+
if (pipeline.fallback) if (!pipeline.fallback.filter || pipeline.fallback.filter({ file })) {
|
|
705
|
+
fallbackFiles++;
|
|
706
|
+
const fileNodeId = graph.addFileNode(file);
|
|
707
|
+
graph.addEdge(sourceNodeId, fileNodeId, "provides");
|
|
708
|
+
const fallbackSpanId = events.nextSpanId();
|
|
709
|
+
await emitWithSpan(fallbackSpanId, () => events.emit({
|
|
710
|
+
type: "file:fallback",
|
|
711
|
+
file,
|
|
712
|
+
spanId: fallbackSpanId,
|
|
713
|
+
timestamp: performance.now()
|
|
714
|
+
}));
|
|
715
|
+
try {
|
|
716
|
+
const fallbackOutputs = await withPipelineSpan(fallbackSpanId, () => processFallback({
|
|
717
|
+
file,
|
|
718
|
+
fallback: pipeline.fallback,
|
|
719
|
+
artifactsMap: {
|
|
720
|
+
...artifactsMap,
|
|
721
|
+
...globalArtifactsMap
|
|
722
|
+
},
|
|
723
|
+
source,
|
|
724
|
+
version,
|
|
725
|
+
emit: (event) => events.emit({
|
|
726
|
+
...event,
|
|
727
|
+
spanId: fallbackSpanId
|
|
728
|
+
}),
|
|
729
|
+
spanId: events.nextSpanId
|
|
730
|
+
}));
|
|
731
|
+
for (const output of fallbackOutputs) {
|
|
732
|
+
const outputIndex = outputs.length;
|
|
733
|
+
outputs.push(output);
|
|
734
|
+
const outputNodeId = graph.addOutputNode(outputIndex, version, output.property);
|
|
735
|
+
graph.addEdge(fileNodeId, outputNodeId, "resolved");
|
|
736
|
+
}
|
|
737
|
+
} catch (err) {
|
|
738
|
+
const pipelineError = {
|
|
739
|
+
scope: "file",
|
|
740
|
+
message: err instanceof Error ? err.message : String(err),
|
|
741
|
+
error: err,
|
|
742
|
+
file,
|
|
743
|
+
version
|
|
744
|
+
};
|
|
745
|
+
errors.push(pipelineError);
|
|
746
|
+
await emitWithSpan(fallbackSpanId, () => events.emit({
|
|
747
|
+
type: "error",
|
|
748
|
+
error: pipelineError,
|
|
749
|
+
spanId: fallbackSpanId,
|
|
750
|
+
timestamp: performance.now()
|
|
751
|
+
}));
|
|
752
|
+
}
|
|
753
|
+
} else {
|
|
754
|
+
skippedFiles++;
|
|
755
|
+
await emitWithSpan(versionSpanId, () => events.emit({
|
|
756
|
+
type: "file:skipped",
|
|
757
|
+
file,
|
|
758
|
+
reason: "filtered",
|
|
759
|
+
spanId: versionSpanId,
|
|
760
|
+
timestamp: performance.now()
|
|
761
|
+
}));
|
|
762
|
+
}
|
|
763
|
+
else {
|
|
764
|
+
skippedFiles++;
|
|
765
|
+
if (pipeline.strict) {
|
|
766
|
+
const pipelineError = {
|
|
767
|
+
scope: "file",
|
|
768
|
+
message: `No matching route for file: ${file.path}`,
|
|
769
|
+
file,
|
|
770
|
+
version
|
|
771
|
+
};
|
|
772
|
+
errors.push(pipelineError);
|
|
773
|
+
await emitWithSpan(versionSpanId, () => events.emit({
|
|
774
|
+
type: "error",
|
|
775
|
+
error: pipelineError,
|
|
776
|
+
spanId: versionSpanId,
|
|
777
|
+
timestamp: performance.now()
|
|
778
|
+
}));
|
|
779
|
+
} else await emitWithSpan(versionSpanId, () => events.emit({
|
|
780
|
+
type: "file:skipped",
|
|
781
|
+
file,
|
|
782
|
+
reason: "no-match",
|
|
783
|
+
spanId: versionSpanId,
|
|
784
|
+
timestamp: performance.now()
|
|
785
|
+
}));
|
|
786
|
+
}
|
|
787
|
+
}
|
|
788
|
+
await emitWithSpan(versionSpanId, () => events.emit({
|
|
789
|
+
type: "version:end",
|
|
790
|
+
version,
|
|
791
|
+
durationMs: performance.now() - versionStartTime,
|
|
792
|
+
spanId: versionSpanId,
|
|
793
|
+
timestamp: performance.now()
|
|
794
|
+
}));
|
|
795
|
+
}
|
|
796
|
+
const durationMs = performance.now() - startTime;
|
|
797
|
+
await emitWithSpan(pipelineSpanId, () => events.emit({
|
|
798
|
+
type: "pipeline:end",
|
|
799
|
+
durationMs,
|
|
800
|
+
spanId: pipelineSpanId,
|
|
801
|
+
timestamp: performance.now()
|
|
802
|
+
}));
|
|
803
|
+
const summary = {
|
|
804
|
+
versions: versionsToRun,
|
|
805
|
+
totalFiles,
|
|
806
|
+
matchedFiles,
|
|
807
|
+
skippedFiles,
|
|
808
|
+
fallbackFiles,
|
|
809
|
+
totalOutputs: outputs.length,
|
|
810
|
+
durationMs
|
|
811
|
+
};
|
|
812
|
+
const status = errors.length === 0 ? "completed" : "failed";
|
|
813
|
+
return {
|
|
814
|
+
id: pipeline.id,
|
|
815
|
+
data: outputs,
|
|
816
|
+
graph: graph.build(),
|
|
817
|
+
errors,
|
|
818
|
+
summary,
|
|
819
|
+
status
|
|
820
|
+
};
|
|
821
|
+
}
|
|
822
|
+
|
|
823
|
+
//#endregion
|
|
824
|
+
//#region src/executor.ts
|
|
825
|
+
function createPipelineExecutor(options) {
|
|
826
|
+
const { artifacts: globalArtifacts = [], cacheStore, onEvent } = options;
|
|
827
|
+
const events = createEventEmitter({ onEvent });
|
|
828
|
+
const run = async (pipelinesToRun, runOptions = {}) => {
|
|
829
|
+
const results = [];
|
|
830
|
+
for (const pipeline of pipelinesToRun) try {
|
|
831
|
+
results.push(await runPipeline({
|
|
832
|
+
pipeline,
|
|
833
|
+
runOptions,
|
|
834
|
+
cacheStore,
|
|
835
|
+
artifacts: globalArtifacts,
|
|
836
|
+
events
|
|
837
|
+
}));
|
|
838
|
+
} catch (err) {
|
|
839
|
+
results.push({
|
|
840
|
+
id: pipeline.id,
|
|
841
|
+
data: [],
|
|
842
|
+
graph: {
|
|
843
|
+
nodes: [],
|
|
844
|
+
edges: []
|
|
845
|
+
},
|
|
846
|
+
errors: [{
|
|
847
|
+
scope: "pipeline",
|
|
848
|
+
message: err instanceof Error ? err.message : String(err),
|
|
849
|
+
error: err
|
|
850
|
+
}],
|
|
851
|
+
summary: {
|
|
852
|
+
versions: pipeline.versions,
|
|
853
|
+
totalFiles: 0,
|
|
854
|
+
matchedFiles: 0,
|
|
855
|
+
skippedFiles: 0,
|
|
856
|
+
fallbackFiles: 0,
|
|
857
|
+
totalOutputs: 0,
|
|
858
|
+
durationMs: 0
|
|
859
|
+
},
|
|
860
|
+
status: "failed"
|
|
861
|
+
});
|
|
862
|
+
}
|
|
863
|
+
return results;
|
|
864
|
+
};
|
|
865
|
+
return { run };
|
|
866
|
+
}
|
|
867
|
+
|
|
868
|
+
//#endregion
|
|
869
|
+
export { createMemoryCacheStore, createPipelineExecutor, defaultHashFn, getPipelineExecutionContext, hashArtifact, runWithPipelineExecutionContext, serializeCacheKey, withPipelineEvent, withPipelineSpan };
|
package/package.json
ADDED
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@ucdjs/pipelines-executor",
|
|
3
|
+
"version": "0.0.1-beta.1",
|
|
4
|
+
"type": "module",
|
|
5
|
+
"author": {
|
|
6
|
+
"name": "Lucas Nørgård",
|
|
7
|
+
"email": "lucasnrgaard@gmail.com",
|
|
8
|
+
"url": "https://luxass.dev"
|
|
9
|
+
},
|
|
10
|
+
"license": "MIT",
|
|
11
|
+
"homepage": "https://github.com/ucdjs/ucd",
|
|
12
|
+
"repository": {
|
|
13
|
+
"type": "git",
|
|
14
|
+
"url": "git+https://github.com/ucdjs/ucd.git",
|
|
15
|
+
"directory": "packages/pipelines/pipeline-executor"
|
|
16
|
+
},
|
|
17
|
+
"bugs": {
|
|
18
|
+
"url": "https://github.com/ucdjs/ucd/issues"
|
|
19
|
+
},
|
|
20
|
+
"exports": {
|
|
21
|
+
".": "./dist/index.mjs",
|
|
22
|
+
"./package.json": "./package.json"
|
|
23
|
+
},
|
|
24
|
+
"types": "./dist/index.d.mts",
|
|
25
|
+
"files": [
|
|
26
|
+
"dist"
|
|
27
|
+
],
|
|
28
|
+
"engines": {
|
|
29
|
+
"node": ">=22.18"
|
|
30
|
+
},
|
|
31
|
+
"dependencies": {
|
|
32
|
+
"@ucdjs/pipelines-core": "0.0.1-beta.1",
|
|
33
|
+
"@ucdjs/pipelines-artifacts": "0.0.1-beta.1",
|
|
34
|
+
"@ucdjs/pipelines-graph": "0.0.1-beta.1"
|
|
35
|
+
},
|
|
36
|
+
"devDependencies": {
|
|
37
|
+
"@luxass/eslint-config": "7.2.0",
|
|
38
|
+
"eslint": "10.0.0",
|
|
39
|
+
"publint": "0.3.17",
|
|
40
|
+
"tsdown": "0.20.3",
|
|
41
|
+
"typescript": "5.9.3",
|
|
42
|
+
"@ucdjs-tooling/tsdown-config": "1.0.0",
|
|
43
|
+
"@ucdjs-tooling/tsconfig": "1.0.0"
|
|
44
|
+
},
|
|
45
|
+
"publishConfig": {
|
|
46
|
+
"access": "public"
|
|
47
|
+
},
|
|
48
|
+
"scripts": {
|
|
49
|
+
"build": "tsdown --tsconfig=./tsconfig.build.json",
|
|
50
|
+
"dev": "tsdown --watch",
|
|
51
|
+
"clean": "git clean -xdf dist node_modules",
|
|
52
|
+
"lint": "eslint .",
|
|
53
|
+
"typecheck": "tsc --noEmit -p tsconfig.build.json"
|
|
54
|
+
}
|
|
55
|
+
}
|