@ucdjs/pipelines-loader 0.0.1-beta.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +27 -0
- package/dist/bundle-dlu7M3TU.mjs +320 -0
- package/dist/chunk-DQk6qfdC.mjs +18 -0
- package/dist/gitlab-C8zDC1_j.d.mts +32 -0
- package/dist/index.d.mts +16 -0
- package/dist/index.mjs +75 -0
- package/dist/insecure.d.mts +10 -0
- package/dist/insecure.mjs +30 -0
- package/dist/remote.d.mts +16 -0
- package/dist/remote.mjs +77 -0
- package/dist/types-Br8gGmsN.d.mts +41 -0
- package/package.json +62 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2025-PRESENT Lucas Nørgård
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
# @ucdjs/pipelines-loader
|
|
2
|
+
|
|
3
|
+
[![npm version][npm-version-src]][npm-version-href]
|
|
4
|
+
[![npm downloads][npm-downloads-src]][npm-downloads-href]
|
|
5
|
+
[![codecov][codecov-src]][codecov-href]
|
|
6
|
+
|
|
7
|
+
> [!IMPORTANT]
|
|
8
|
+
> This is an internal package. It may change without warning and is not subject to semantic versioning. Use at your own risk.
|
|
9
|
+
|
|
10
|
+
A collection of core pipeline functionalities for the UCD project.
|
|
11
|
+
|
|
12
|
+
## Installation
|
|
13
|
+
|
|
14
|
+
```bash
|
|
15
|
+
npm install @ucdjs/pipelines-loader
|
|
16
|
+
```
|
|
17
|
+
|
|
18
|
+
## 📄 License
|
|
19
|
+
|
|
20
|
+
Published under [MIT License](./LICENSE).
|
|
21
|
+
|
|
22
|
+
[npm-version-src]: https://img.shields.io/npm/v/@ucdjs/pipelines-loader?style=flat&colorA=18181B&colorB=4169E1
|
|
23
|
+
[npm-version-href]: https://npmjs.com/package/@ucdjs/pipelines-loader
|
|
24
|
+
[npm-downloads-src]: https://img.shields.io/npm/dm/@ucdjs/pipelines-loader?style=flat&colorA=18181B&colorB=4169E1
|
|
25
|
+
[npm-downloads-href]: https://npmjs.com/package/@ucdjs/pipelines-loader
|
|
26
|
+
[codecov-src]: https://img.shields.io/codecov/c/gh/ucdjs/ucd?style=flat&colorA=18181B&colorB=4169E1
|
|
27
|
+
[codecov-href]: https://codecov.io/gh/ucdjs/ucd
|
|
@@ -0,0 +1,320 @@
|
|
|
1
|
+
import { t as __exportAll } from "./chunk-DQk6qfdC.mjs";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
import { build } from "rolldown";
|
|
4
|
+
import { parseSync } from "oxc-parser";
|
|
5
|
+
import { readFile } from "node:fs/promises";
|
|
6
|
+
import { transform } from "oxc-transform";
|
|
7
|
+
|
|
8
|
+
//#region src/bundler/identifiers.ts
|
|
9
|
+
function isUrlLike(value) {
|
|
10
|
+
return /^[a-z][a-z+.-]*:/i.test(value);
|
|
11
|
+
}
|
|
12
|
+
function parseRemoteIdentifier(identifier) {
|
|
13
|
+
if (!identifier.startsWith("github://") && !identifier.startsWith("gitlab://")) return null;
|
|
14
|
+
const url = new URL(identifier);
|
|
15
|
+
const provider = url.protocol.replace(":", "");
|
|
16
|
+
const owner = url.hostname;
|
|
17
|
+
const repo = url.pathname.replace(/^\/+/, "");
|
|
18
|
+
if (!owner || !repo) throw new Error(`Invalid remote identifier: ${identifier}`);
|
|
19
|
+
return {
|
|
20
|
+
provider,
|
|
21
|
+
owner,
|
|
22
|
+
repo,
|
|
23
|
+
ref: url.searchParams.get("ref") ?? "HEAD",
|
|
24
|
+
path: url.searchParams.get("path") ?? ""
|
|
25
|
+
};
|
|
26
|
+
}
|
|
27
|
+
function formatRemoteIdentifier(remote) {
|
|
28
|
+
const url = new URL(`${remote.provider}://${remote.owner}/${remote.repo}`);
|
|
29
|
+
url.searchParams.set("ref", remote.ref);
|
|
30
|
+
url.searchParams.set("path", remote.path);
|
|
31
|
+
return url.toString();
|
|
32
|
+
}
|
|
33
|
+
function formatRemoteIdentifierFromParts(provider, owner, repo, ref, filePath) {
|
|
34
|
+
return formatRemoteIdentifier({
|
|
35
|
+
provider,
|
|
36
|
+
owner,
|
|
37
|
+
repo,
|
|
38
|
+
ref: ref ?? "HEAD",
|
|
39
|
+
path: filePath
|
|
40
|
+
});
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
//#endregion
|
|
44
|
+
//#region src/bundler/errors.ts
|
|
45
|
+
var RemoteNotFoundError = class extends Error {
|
|
46
|
+
name = "RemoteNotFoundError";
|
|
47
|
+
constructor(message) {
|
|
48
|
+
super(message);
|
|
49
|
+
}
|
|
50
|
+
};
|
|
51
|
+
|
|
52
|
+
//#endregion
|
|
53
|
+
//#region src/bundler/parse.ts
|
|
54
|
+
function getStaticImportSpecifiers(source, identifier) {
|
|
55
|
+
let parsed;
|
|
56
|
+
try {
|
|
57
|
+
parsed = parseSync(identifier ?? "<inline>", source, { sourceType: "module" });
|
|
58
|
+
} catch (error) {
|
|
59
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
60
|
+
throw new Error(`Failed to parse module ${identifier ?? "<inline>"}: ${message}`);
|
|
61
|
+
}
|
|
62
|
+
const specifiers = /* @__PURE__ */ new Set();
|
|
63
|
+
const visit = (value) => {
|
|
64
|
+
if (!value) return;
|
|
65
|
+
if (Array.isArray(value)) {
|
|
66
|
+
for (const item of value) visit(item);
|
|
67
|
+
return;
|
|
68
|
+
}
|
|
69
|
+
if (typeof value !== "object") return;
|
|
70
|
+
const node = value;
|
|
71
|
+
if (node.type === "ImportDeclaration") {
|
|
72
|
+
const sourceNode = node.source;
|
|
73
|
+
if (sourceNode?.value) specifiers.add(sourceNode.value);
|
|
74
|
+
} else if (node.type === "ExportAllDeclaration" || node.type === "ExportNamedDeclaration") {
|
|
75
|
+
const sourceNode = node.source;
|
|
76
|
+
if (sourceNode?.value) specifiers.add(sourceNode.value);
|
|
77
|
+
} else if (node.type === "ImportExpression") {
|
|
78
|
+
const sourceNode = node.source ?? node.argument;
|
|
79
|
+
if (sourceNode?.type === "StringLiteral" && sourceNode.value) specifiers.add(sourceNode.value);
|
|
80
|
+
}
|
|
81
|
+
for (const [key, child] of Object.entries(node)) {
|
|
82
|
+
if (key === "parent") continue;
|
|
83
|
+
visit(child);
|
|
84
|
+
}
|
|
85
|
+
};
|
|
86
|
+
visit(parsed.program);
|
|
87
|
+
return Array.from(specifiers);
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
//#endregion
|
|
91
|
+
//#region src/bundler/resolve.ts
|
|
92
|
+
const EXTENSIONS = [
|
|
93
|
+
".ts",
|
|
94
|
+
".mts",
|
|
95
|
+
".js",
|
|
96
|
+
".mjs"
|
|
97
|
+
];
|
|
98
|
+
function assertRelativeSpecifier(specifier) {
|
|
99
|
+
if (isUrlLike(specifier)) throw new Error(`Unsupported import specifier: ${specifier}`);
|
|
100
|
+
if (!specifier.startsWith("./") && !specifier.startsWith("../")) throw new Error(`Unsupported import specifier: ${specifier}`);
|
|
101
|
+
}
|
|
102
|
+
function stripTrailingSlash(value) {
|
|
103
|
+
return value.endsWith("/") ? value.slice(0, -1) : value;
|
|
104
|
+
}
|
|
105
|
+
function appendSuffix(identifier, suffix) {
|
|
106
|
+
const remote = parseRemoteIdentifier(identifier);
|
|
107
|
+
if (remote) return formatRemoteIdentifier({
|
|
108
|
+
...remote,
|
|
109
|
+
path: `${remote.path}${suffix}`
|
|
110
|
+
});
|
|
111
|
+
if (isUrlLike(identifier)) {
|
|
112
|
+
const url = new URL(identifier);
|
|
113
|
+
url.pathname = `${url.pathname}${suffix}`;
|
|
114
|
+
return url.toString();
|
|
115
|
+
}
|
|
116
|
+
return `${identifier}${suffix}`;
|
|
117
|
+
}
|
|
118
|
+
function resolveRelativeSpecifier(specifier, parentIdentifier) {
|
|
119
|
+
const remote = parseRemoteIdentifier(parentIdentifier);
|
|
120
|
+
if (remote) {
|
|
121
|
+
const parentDir = remote.path ? path.posix.dirname(remote.path) : "";
|
|
122
|
+
const cleanPath = path.posix.normalize(path.posix.join(parentDir, specifier)).replace(/^\/+/, "");
|
|
123
|
+
return formatRemoteIdentifier({
|
|
124
|
+
...remote,
|
|
125
|
+
path: cleanPath
|
|
126
|
+
});
|
|
127
|
+
}
|
|
128
|
+
if (isUrlLike(parentIdentifier)) {
|
|
129
|
+
const base = new URL(parentIdentifier);
|
|
130
|
+
return new URL(specifier, base).toString();
|
|
131
|
+
}
|
|
132
|
+
const parentDir = path.dirname(parentIdentifier);
|
|
133
|
+
return path.resolve(parentDir, specifier);
|
|
134
|
+
}
|
|
135
|
+
function getSpecifierExtension(specifier) {
|
|
136
|
+
return path.posix.extname(specifier);
|
|
137
|
+
}
|
|
138
|
+
function buildCandidateIdentifiers(specifier, parentIdentifier) {
|
|
139
|
+
const resolvedBase = resolveRelativeSpecifier(specifier, parentIdentifier);
|
|
140
|
+
if (getSpecifierExtension(specifier) !== "") return [resolvedBase];
|
|
141
|
+
const normalizedBase = stripTrailingSlash(resolvedBase);
|
|
142
|
+
const candidates = [];
|
|
143
|
+
for (const ext of EXTENSIONS) candidates.push(appendSuffix(normalizedBase, ext));
|
|
144
|
+
for (const ext of EXTENSIONS) candidates.push(appendSuffix(`${normalizedBase}/index`, ext));
|
|
145
|
+
return candidates;
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
//#endregion
|
|
149
|
+
//#region src/remote/github.ts
|
|
150
|
+
var github_exports = /* @__PURE__ */ __exportAll({
|
|
151
|
+
fetchFile: () => fetchFile$1,
|
|
152
|
+
listFiles: () => listFiles$1
|
|
153
|
+
});
|
|
154
|
+
const GITHUB_API_BASE = "https://api.github.com";
|
|
155
|
+
const GITHUB_ACCEPT_HEADER = "application/vnd.github.v3+json";
|
|
156
|
+
async function listFiles$1(repoRef, options = {}) {
|
|
157
|
+
const { owner, repo, ref = "HEAD", path = "" } = repoRef;
|
|
158
|
+
const { customFetch = fetch } = options;
|
|
159
|
+
const response = await customFetch(`${GITHUB_API_BASE}/repos/${owner}/${repo}/git/trees/${ref}?recursive=1`, { headers: { Accept: GITHUB_ACCEPT_HEADER } });
|
|
160
|
+
if (!response.ok) throw new Error(`GitHub API error: ${response.status} ${response.statusText}`);
|
|
161
|
+
const data = await response.json();
|
|
162
|
+
const prefix = path ? `${path}/` : "";
|
|
163
|
+
return {
|
|
164
|
+
files: data.tree.filter((item) => item.type === "blob" && item.path.startsWith(prefix)).map((item) => item.path),
|
|
165
|
+
truncated: data.truncated
|
|
166
|
+
};
|
|
167
|
+
}
|
|
168
|
+
async function fetchFile$1(repoRef, filePath, options = {}) {
|
|
169
|
+
const { owner, repo, ref = "HEAD" } = repoRef;
|
|
170
|
+
const { customFetch = fetch } = options;
|
|
171
|
+
const response = await customFetch(`${GITHUB_API_BASE}/repos/${owner}/${repo}/contents/${encodeURIComponent(filePath)}?ref=${ref}`, { headers: { Accept: GITHUB_ACCEPT_HEADER } });
|
|
172
|
+
if (!response.ok) {
|
|
173
|
+
if (response.status === 404) throw new RemoteNotFoundError(`GitHub file not found: ${filePath}`);
|
|
174
|
+
throw new Error(`GitHub API error: ${response.status} ${response.statusText}`);
|
|
175
|
+
}
|
|
176
|
+
const data = await response.json();
|
|
177
|
+
if (data.encoding !== "base64") throw new Error(`Unexpected encoding: ${data.encoding}`);
|
|
178
|
+
return Buffer.from(data.content, "base64").toString("utf-8");
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
//#endregion
|
|
182
|
+
//#region src/remote/gitlab.ts
|
|
183
|
+
var gitlab_exports = /* @__PURE__ */ __exportAll({
|
|
184
|
+
fetchFile: () => fetchFile,
|
|
185
|
+
listFiles: () => listFiles
|
|
186
|
+
});
|
|
187
|
+
const GITLAB_API_BASE = "https://gitlab.com/api/v4";
|
|
188
|
+
function encodeProjectPath(owner, repo) {
|
|
189
|
+
return encodeURIComponent(`${owner}/${repo}`);
|
|
190
|
+
}
|
|
191
|
+
async function listFiles(repoRef, options = {}) {
|
|
192
|
+
const { owner, repo, ref, path } = repoRef;
|
|
193
|
+
const refValue = ref ?? "HEAD";
|
|
194
|
+
const pathValue = path ?? "";
|
|
195
|
+
const { customFetch = fetch } = options;
|
|
196
|
+
const projectId = encodeProjectPath(owner, repo);
|
|
197
|
+
const encodedPath = encodeURIComponent(pathValue);
|
|
198
|
+
const files = [];
|
|
199
|
+
let truncated = false;
|
|
200
|
+
async function fetchPage(page) {
|
|
201
|
+
const response = await customFetch(`${GITLAB_API_BASE}/projects/${projectId}/repository/tree?recursive=true&ref=${refValue}&path=${encodedPath}&per_page=100&page=${page}`);
|
|
202
|
+
if (!response.ok) throw new Error(`GitLab API error: ${response.status} ${response.statusText}`);
|
|
203
|
+
const data = await response.json();
|
|
204
|
+
files.push(...data.filter((item) => item.type === "blob").map((item) => item.path));
|
|
205
|
+
const nextPage = response.headers.get("x-next-page");
|
|
206
|
+
if (!nextPage) return;
|
|
207
|
+
const nextPageNumber = Number(nextPage);
|
|
208
|
+
if (!Number.isFinite(nextPageNumber) || nextPageNumber <= page) {
|
|
209
|
+
truncated = true;
|
|
210
|
+
return;
|
|
211
|
+
}
|
|
212
|
+
await fetchPage(nextPageNumber);
|
|
213
|
+
}
|
|
214
|
+
await fetchPage(1);
|
|
215
|
+
return {
|
|
216
|
+
files,
|
|
217
|
+
truncated
|
|
218
|
+
};
|
|
219
|
+
}
|
|
220
|
+
async function fetchFile(repoRef, filePath, options = {}) {
|
|
221
|
+
const { owner, repo, ref } = repoRef;
|
|
222
|
+
const refValue = ref ?? "HEAD";
|
|
223
|
+
const { customFetch = fetch } = options;
|
|
224
|
+
const response = await customFetch(`${GITLAB_API_BASE}/projects/${encodeProjectPath(owner, repo)}/repository/files/${encodeURIComponent(filePath)}/raw?ref=${refValue}`);
|
|
225
|
+
if (!response.ok) {
|
|
226
|
+
if (response.status === 404) throw new RemoteNotFoundError(`GitLab file not found: ${filePath}`);
|
|
227
|
+
throw new Error(`GitLab API error: ${response.status} ${response.statusText}`);
|
|
228
|
+
}
|
|
229
|
+
return response.text();
|
|
230
|
+
}
|
|
231
|
+
|
|
232
|
+
//#endregion
|
|
233
|
+
//#region src/bundler/source.ts
|
|
234
|
+
async function loadRemoteSource(identifier, customFetch) {
|
|
235
|
+
const remote = parseRemoteIdentifier(identifier);
|
|
236
|
+
if (!remote) {
|
|
237
|
+
if (isUrlLike(identifier)) throw new Error(`Unsupported import specifier: ${identifier}`);
|
|
238
|
+
try {
|
|
239
|
+
return await readFile(identifier, "utf-8");
|
|
240
|
+
} catch (error) {
|
|
241
|
+
if (error instanceof Error && "code" in error && error.code === "ENOENT") throw new RemoteNotFoundError(`Module not found: ${identifier}`);
|
|
242
|
+
throw error;
|
|
243
|
+
}
|
|
244
|
+
}
|
|
245
|
+
const repoRef = {
|
|
246
|
+
owner: remote.owner,
|
|
247
|
+
repo: remote.repo,
|
|
248
|
+
ref: remote.ref
|
|
249
|
+
};
|
|
250
|
+
if (remote.provider === "github") return fetchFile$1(repoRef, remote.path, { customFetch });
|
|
251
|
+
return fetchFile(repoRef, remote.path, { customFetch });
|
|
252
|
+
}
|
|
253
|
+
async function compileModuleSource(identifier, source) {
|
|
254
|
+
let filename;
|
|
255
|
+
try {
|
|
256
|
+
const url = new URL(identifier);
|
|
257
|
+
filename = url.searchParams.get("path") ?? (url.pathname || identifier);
|
|
258
|
+
} catch {
|
|
259
|
+
filename = identifier;
|
|
260
|
+
}
|
|
261
|
+
const result = await transform(filename, source, { sourceType: "module" });
|
|
262
|
+
if (result.errors && result.errors.length > 0) {
|
|
263
|
+
const message = result.errors.map((error) => error.message).join("\n");
|
|
264
|
+
throw new Error(`Failed to parse module ${identifier}: ${message}`);
|
|
265
|
+
}
|
|
266
|
+
return result.code;
|
|
267
|
+
}
|
|
268
|
+
|
|
269
|
+
//#endregion
|
|
270
|
+
//#region src/bundler/bundle.ts
|
|
271
|
+
function createRemotePlugin(input) {
|
|
272
|
+
const customFetch = input.customFetch ?? fetch;
|
|
273
|
+
const moduleCache = /* @__PURE__ */ new Map();
|
|
274
|
+
return {
|
|
275
|
+
name: "pipeline-remote-loader",
|
|
276
|
+
resolveId: async (specifier, importer) => {
|
|
277
|
+
if (!importer) return input.identifier;
|
|
278
|
+
assertRelativeSpecifier(specifier);
|
|
279
|
+
const candidates = buildCandidateIdentifiers(specifier, importer);
|
|
280
|
+
for (const candidate of candidates) try {
|
|
281
|
+
const source = await loadRemoteSource(candidate, customFetch);
|
|
282
|
+
moduleCache.set(candidate, source);
|
|
283
|
+
return candidate;
|
|
284
|
+
} catch (err) {
|
|
285
|
+
if (err instanceof RemoteNotFoundError) continue;
|
|
286
|
+
throw err;
|
|
287
|
+
}
|
|
288
|
+
throw new Error(`Module not found: ${specifier}`);
|
|
289
|
+
},
|
|
290
|
+
load: async (id) => {
|
|
291
|
+
if (id === input.identifier) return compileModuleSource(id, input.content);
|
|
292
|
+
const source = moduleCache.get(id) ?? await loadRemoteSource(id, customFetch);
|
|
293
|
+
const code = await compileModuleSource(id, source);
|
|
294
|
+
moduleCache.set(id, source);
|
|
295
|
+
return code;
|
|
296
|
+
}
|
|
297
|
+
};
|
|
298
|
+
}
|
|
299
|
+
async function bundleRemoteModule(input) {
|
|
300
|
+
const specifiers = getStaticImportSpecifiers(input.content, input.identifier);
|
|
301
|
+
for (const specifier of specifiers) assertRelativeSpecifier(specifier);
|
|
302
|
+
const result = await build({
|
|
303
|
+
input: input.identifier,
|
|
304
|
+
plugins: [createRemotePlugin(input)],
|
|
305
|
+
write: false,
|
|
306
|
+
output: { format: "esm" }
|
|
307
|
+
});
|
|
308
|
+
const chunk = (Array.isArray(result) ? result : [result]).flatMap((output) => output.output ?? []).find((item) => item.type === "chunk");
|
|
309
|
+
if (!chunk || chunk.type !== "chunk") throw new Error("Failed to bundle remote module");
|
|
310
|
+
return chunk.code;
|
|
311
|
+
}
|
|
312
|
+
function createDataUrl(code) {
|
|
313
|
+
return `data:text/javascript;base64,${Buffer.from(code, "utf-8").toString("base64")}`;
|
|
314
|
+
}
|
|
315
|
+
function identifierForLocalFile(filePath) {
|
|
316
|
+
return path.resolve(filePath);
|
|
317
|
+
}
|
|
318
|
+
|
|
319
|
+
//#endregion
|
|
320
|
+
export { gitlab_exports as a, github_exports as c, fetchFile as i, listFiles$1 as l, createDataUrl as n, listFiles as o, identifierForLocalFile as r, fetchFile$1 as s, bundleRemoteModule as t, formatRemoteIdentifierFromParts as u };
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
//#region \0rolldown/runtime.js
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __exportAll = (all, no_symbols) => {
|
|
4
|
+
let target = {};
|
|
5
|
+
for (var name in all) {
|
|
6
|
+
__defProp(target, name, {
|
|
7
|
+
get: all[name],
|
|
8
|
+
enumerable: true
|
|
9
|
+
});
|
|
10
|
+
}
|
|
11
|
+
if (!no_symbols) {
|
|
12
|
+
__defProp(target, Symbol.toStringTag, { value: "Module" });
|
|
13
|
+
}
|
|
14
|
+
return target;
|
|
15
|
+
};
|
|
16
|
+
|
|
17
|
+
//#endregion
|
|
18
|
+
export { __exportAll as t };
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
//#region src/remote/types.d.ts
|
|
2
|
+
interface RemoteFileList {
|
|
3
|
+
files: string[];
|
|
4
|
+
truncated: boolean;
|
|
5
|
+
}
|
|
6
|
+
interface RemoteRequestOptions {
|
|
7
|
+
customFetch?: typeof fetch;
|
|
8
|
+
}
|
|
9
|
+
declare namespace github_d_exports {
|
|
10
|
+
export { fetchFile$1 as fetchFile, listFiles$1 as listFiles };
|
|
11
|
+
}
|
|
12
|
+
interface GitHubRepoRef {
|
|
13
|
+
owner: string;
|
|
14
|
+
repo: string;
|
|
15
|
+
ref?: string;
|
|
16
|
+
path?: string;
|
|
17
|
+
}
|
|
18
|
+
declare function listFiles$1(repoRef: GitHubRepoRef, options?: RemoteRequestOptions): Promise<RemoteFileList>;
|
|
19
|
+
declare function fetchFile$1(repoRef: GitHubRepoRef, filePath: string, options?: RemoteRequestOptions): Promise<string>;
|
|
20
|
+
declare namespace gitlab_d_exports {
|
|
21
|
+
export { GitLabRepoRef, fetchFile, listFiles };
|
|
22
|
+
}
|
|
23
|
+
interface GitLabRepoRef {
|
|
24
|
+
owner: string;
|
|
25
|
+
repo: string;
|
|
26
|
+
ref?: string;
|
|
27
|
+
path?: string;
|
|
28
|
+
}
|
|
29
|
+
declare function listFiles(repoRef: GitLabRepoRef, options?: RemoteRequestOptions): Promise<RemoteFileList>;
|
|
30
|
+
declare function fetchFile(repoRef: GitLabRepoRef, filePath: string, options?: RemoteRequestOptions): Promise<string>;
|
|
31
|
+
//#endregion
|
|
32
|
+
export { github_d_exports as n, RemoteFileList as r, gitlab_d_exports as t };
|
package/dist/index.d.mts
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import { a as LocalSource, i as LoadedPipelineFile, n as GitLabSource, o as PipelineLoadError, r as LoadPipelinesResult, s as PipelineSource, t as GitHubSource } from "./types-Br8gGmsN.mjs";
|
|
2
|
+
import { FindRemotePipelineFilesOptions, LoadRemotePipelinesOptions, findRemotePipelineFiles, loadRemotePipelines } from "./remote.mjs";
|
|
3
|
+
|
|
4
|
+
//#region src/loader.d.ts
|
|
5
|
+
declare function loadPipelineFile(filePath: string): Promise<LoadedPipelineFile>;
|
|
6
|
+
interface LoadPipelinesOptions {
|
|
7
|
+
throwOnError?: boolean;
|
|
8
|
+
}
|
|
9
|
+
declare function loadPipelinesFromPaths(filePaths: string[], options?: LoadPipelinesOptions): Promise<LoadPipelinesResult>;
|
|
10
|
+
interface FindPipelineFilesOptions {
|
|
11
|
+
patterns?: string | string[];
|
|
12
|
+
cwd?: string;
|
|
13
|
+
}
|
|
14
|
+
declare function findPipelineFiles(options?: FindPipelineFilesOptions): Promise<string[]>;
|
|
15
|
+
//#endregion
|
|
16
|
+
export { type FindPipelineFilesOptions, type FindRemotePipelineFilesOptions, type GitHubSource, type GitLabSource, type LoadPipelinesOptions, type LoadPipelinesResult, type LoadRemotePipelinesOptions, type LoadedPipelineFile, type LocalSource, type PipelineLoadError, type PipelineSource, findPipelineFiles, findRemotePipelineFiles, loadPipelineFile, loadPipelinesFromPaths, loadRemotePipelines };
|
package/dist/index.mjs
ADDED
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
import "./bundle-dlu7M3TU.mjs";
|
|
2
|
+
import { findRemotePipelineFiles, loadRemotePipelines } from "./remote.mjs";
|
|
3
|
+
import { pathToFileURL } from "node:url";
|
|
4
|
+
import { isPipelineDefinition } from "@ucdjs/pipelines-core";
|
|
5
|
+
import { glob } from "tinyglobby";
|
|
6
|
+
|
|
7
|
+
//#region src/loader.ts
|
|
8
|
+
async function loadPipelineFile(filePath) {
|
|
9
|
+
const module = await (filePath.startsWith("file://") ? import(filePath) : import(pathToFileURL(filePath).href));
|
|
10
|
+
const pipelines = [];
|
|
11
|
+
const exportNames = [];
|
|
12
|
+
for (const [name, value] of Object.entries(module)) if (isPipelineDefinition(value)) {
|
|
13
|
+
pipelines.push(value);
|
|
14
|
+
exportNames.push(name);
|
|
15
|
+
}
|
|
16
|
+
return {
|
|
17
|
+
filePath,
|
|
18
|
+
pipelines,
|
|
19
|
+
exportNames
|
|
20
|
+
};
|
|
21
|
+
}
|
|
22
|
+
async function loadPipelinesFromPaths(filePaths, options = {}) {
|
|
23
|
+
const { throwOnError = false } = options;
|
|
24
|
+
if (throwOnError) {
|
|
25
|
+
const wrapped = filePaths.map((filePath) => loadPipelineFile(filePath).catch((err) => {
|
|
26
|
+
const error = err instanceof Error ? err : new Error(String(err));
|
|
27
|
+
throw new Error(`Failed to load pipeline file: ${filePath}`, { cause: error });
|
|
28
|
+
}));
|
|
29
|
+
const results = await Promise.all(wrapped);
|
|
30
|
+
return {
|
|
31
|
+
pipelines: results.flatMap((r) => r.pipelines),
|
|
32
|
+
files: results,
|
|
33
|
+
errors: []
|
|
34
|
+
};
|
|
35
|
+
}
|
|
36
|
+
const settled = await Promise.allSettled(filePaths.map((fp) => loadPipelineFile(fp)));
|
|
37
|
+
const files = [];
|
|
38
|
+
const errors = [];
|
|
39
|
+
for (const [i, result] of settled.entries()) {
|
|
40
|
+
if (result.status === "fulfilled") {
|
|
41
|
+
files.push(result.value);
|
|
42
|
+
continue;
|
|
43
|
+
}
|
|
44
|
+
const error = result.reason instanceof Error ? result.reason : new Error(String(result.reason));
|
|
45
|
+
errors.push({
|
|
46
|
+
filePath: filePaths[i],
|
|
47
|
+
error
|
|
48
|
+
});
|
|
49
|
+
}
|
|
50
|
+
return {
|
|
51
|
+
pipelines: files.flatMap((f) => f.pipelines),
|
|
52
|
+
files,
|
|
53
|
+
errors
|
|
54
|
+
};
|
|
55
|
+
}
|
|
56
|
+
async function findPipelineFiles(options = {}) {
|
|
57
|
+
let patterns = ["**/*.ucd-pipeline.ts"];
|
|
58
|
+
const resolvedCwd = options.cwd ?? process.cwd();
|
|
59
|
+
if (options.patterns) patterns = Array.isArray(options.patterns) ? options.patterns : [options.patterns];
|
|
60
|
+
return glob(patterns, {
|
|
61
|
+
cwd: resolvedCwd,
|
|
62
|
+
ignore: [
|
|
63
|
+
"node_modules/**",
|
|
64
|
+
"**/node_modules/**",
|
|
65
|
+
"**/dist/**",
|
|
66
|
+
"**/build/**",
|
|
67
|
+
"**/.git/**"
|
|
68
|
+
],
|
|
69
|
+
absolute: true,
|
|
70
|
+
onlyFiles: true
|
|
71
|
+
});
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
//#endregion
|
|
75
|
+
export { findPipelineFiles, findRemotePipelineFiles, loadPipelineFile, loadPipelinesFromPaths, loadRemotePipelines };
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import { i as LoadedPipelineFile } from "./types-Br8gGmsN.mjs";
|
|
2
|
+
|
|
3
|
+
//#region src/insecure.d.ts
|
|
4
|
+
interface LoadPipelineFromContentOptions {
|
|
5
|
+
identifier?: string;
|
|
6
|
+
customFetch?: typeof fetch;
|
|
7
|
+
}
|
|
8
|
+
declare function loadPipelineFromContent(content: string, filename: string, options?: LoadPipelineFromContentOptions): Promise<LoadedPipelineFile>;
|
|
9
|
+
//#endregion
|
|
10
|
+
export { LoadPipelineFromContentOptions, loadPipelineFromContent };
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
import { n as createDataUrl, r as identifierForLocalFile, t as bundleRemoteModule } from "./bundle-dlu7M3TU.mjs";
|
|
2
|
+
import { isPipelineDefinition } from "@ucdjs/pipelines-core";
|
|
3
|
+
import path from "node:path";
|
|
4
|
+
|
|
5
|
+
//#region src/insecure.ts
|
|
6
|
+
async function loadPipelineFromContent(content, filename, options = {}) {
|
|
7
|
+
const module = await import(createDataUrl(await bundleRemoteModule({
|
|
8
|
+
content,
|
|
9
|
+
identifier: options.identifier ?? identifierForLocalFile(path.resolve(filename)),
|
|
10
|
+
customFetch: options.customFetch
|
|
11
|
+
})));
|
|
12
|
+
const pipelines = [];
|
|
13
|
+
const exportNames = [];
|
|
14
|
+
const exportedModule = module;
|
|
15
|
+
for (const [name, value] of Object.entries(exportedModule)) {
|
|
16
|
+
if (name === "default") continue;
|
|
17
|
+
if (isPipelineDefinition(value)) {
|
|
18
|
+
pipelines.push(value);
|
|
19
|
+
exportNames.push(name);
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
return {
|
|
23
|
+
filePath: filename,
|
|
24
|
+
pipelines,
|
|
25
|
+
exportNames
|
|
26
|
+
};
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
//#endregion
|
|
30
|
+
export { loadPipelineFromContent };
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import { n as GitLabSource, r as LoadPipelinesResult, t as GitHubSource } from "./types-Br8gGmsN.mjs";
|
|
2
|
+
import { n as github_d_exports, r as RemoteFileList, t as gitlab_d_exports } from "./gitlab-C8zDC1_j.mjs";
|
|
3
|
+
|
|
4
|
+
//#region src/remote.d.ts
|
|
5
|
+
interface FindRemotePipelineFilesOptions {
|
|
6
|
+
pattern?: string;
|
|
7
|
+
customFetch?: typeof fetch;
|
|
8
|
+
}
|
|
9
|
+
declare function findRemotePipelineFiles(source: GitHubSource | GitLabSource, options?: FindRemotePipelineFilesOptions): Promise<RemoteFileList>;
|
|
10
|
+
interface LoadRemotePipelinesOptions {
|
|
11
|
+
throwOnError?: boolean;
|
|
12
|
+
customFetch?: typeof fetch;
|
|
13
|
+
}
|
|
14
|
+
declare function loadRemotePipelines(source: GitHubSource | GitLabSource, filePaths: string[], options?: LoadRemotePipelinesOptions): Promise<LoadPipelinesResult>;
|
|
15
|
+
//#endregion
|
|
16
|
+
export { FindRemotePipelineFilesOptions, LoadRemotePipelinesOptions, findRemotePipelineFiles, github_d_exports as github, gitlab_d_exports as gitlab, loadRemotePipelines };
|
package/dist/remote.mjs
ADDED
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
import { a as gitlab_exports, c as github_exports, i as fetchFile$1, l as listFiles, o as listFiles$1, s as fetchFile, u as formatRemoteIdentifierFromParts } from "./bundle-dlu7M3TU.mjs";
|
|
2
|
+
import { loadPipelineFromContent } from "./insecure.mjs";
|
|
3
|
+
import picomatch from "picomatch";
|
|
4
|
+
|
|
5
|
+
//#region src/remote.ts
|
|
6
|
+
async function findRemotePipelineFiles(source, options = {}) {
|
|
7
|
+
const { pattern = "**/*.ucd-pipeline.ts", customFetch = fetch } = options;
|
|
8
|
+
const { owner, repo, ref, path } = source;
|
|
9
|
+
const repoRef = {
|
|
10
|
+
owner,
|
|
11
|
+
repo,
|
|
12
|
+
ref,
|
|
13
|
+
path
|
|
14
|
+
};
|
|
15
|
+
let fileList;
|
|
16
|
+
if (source.type === "github") fileList = await listFiles(repoRef, { customFetch });
|
|
17
|
+
else fileList = await listFiles$1(repoRef, { customFetch });
|
|
18
|
+
const isMatch = picomatch(pattern, { dot: true });
|
|
19
|
+
return {
|
|
20
|
+
files: fileList.files.filter((file) => isMatch(file)),
|
|
21
|
+
truncated: fileList.truncated
|
|
22
|
+
};
|
|
23
|
+
}
|
|
24
|
+
function buildRemoteIdentifier(provider, owner, repo, ref, filePath) {
|
|
25
|
+
return formatRemoteIdentifierFromParts(provider, owner, repo, ref, filePath);
|
|
26
|
+
}
|
|
27
|
+
async function loadRemotePipelines(source, filePaths, options = {}) {
|
|
28
|
+
const { throwOnError = false, customFetch = fetch } = options;
|
|
29
|
+
const { owner, repo, ref, type } = source;
|
|
30
|
+
const repoRef = {
|
|
31
|
+
owner,
|
|
32
|
+
repo,
|
|
33
|
+
ref
|
|
34
|
+
};
|
|
35
|
+
if (throwOnError) {
|
|
36
|
+
const wrapped = filePaths.map((filePath) => (type === "github" ? fetchFile(repoRef, filePath, { customFetch }) : fetchFile$1(repoRef, filePath, { customFetch })).then((content) => loadPipelineFromContent(content, filePath, {
|
|
37
|
+
identifier: buildRemoteIdentifier(type, owner, repo, ref, filePath),
|
|
38
|
+
customFetch
|
|
39
|
+
})).catch((err) => {
|
|
40
|
+
const error = err instanceof Error ? err : new Error(String(err));
|
|
41
|
+
throw new Error(`Failed to load pipeline file: ${filePath}`, { cause: error });
|
|
42
|
+
}));
|
|
43
|
+
const results = await Promise.all(wrapped);
|
|
44
|
+
return {
|
|
45
|
+
pipelines: results.flatMap((r) => r.pipelines),
|
|
46
|
+
files: results,
|
|
47
|
+
errors: []
|
|
48
|
+
};
|
|
49
|
+
}
|
|
50
|
+
const settled = await Promise.allSettled(filePaths.map(async (filePath) => {
|
|
51
|
+
return loadPipelineFromContent(type === "github" ? await fetchFile(repoRef, filePath, { customFetch }) : await fetchFile$1(repoRef, filePath, { customFetch }), filePath, {
|
|
52
|
+
identifier: buildRemoteIdentifier(type, owner, repo, ref, filePath),
|
|
53
|
+
customFetch
|
|
54
|
+
});
|
|
55
|
+
}));
|
|
56
|
+
const files = [];
|
|
57
|
+
const errors = [];
|
|
58
|
+
for (const [i, result] of settled.entries()) {
|
|
59
|
+
if (result.status === "fulfilled") {
|
|
60
|
+
files.push(result.value);
|
|
61
|
+
continue;
|
|
62
|
+
}
|
|
63
|
+
const error = result.reason instanceof Error ? result.reason : new Error(String(result.reason));
|
|
64
|
+
errors.push({
|
|
65
|
+
filePath: filePaths[i],
|
|
66
|
+
error
|
|
67
|
+
});
|
|
68
|
+
}
|
|
69
|
+
return {
|
|
70
|
+
pipelines: files.flatMap((f) => f.pipelines),
|
|
71
|
+
files,
|
|
72
|
+
errors
|
|
73
|
+
};
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
//#endregion
|
|
77
|
+
export { findRemotePipelineFiles, github_exports as github, gitlab_exports as gitlab, loadRemotePipelines };
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
import { PipelineDefinition } from "@ucdjs/pipelines-core";
|
|
2
|
+
|
|
3
|
+
//#region src/types.d.ts
|
|
4
|
+
interface LoadedPipelineFile {
|
|
5
|
+
filePath: string;
|
|
6
|
+
pipelines: PipelineDefinition[];
|
|
7
|
+
exportNames: string[];
|
|
8
|
+
}
|
|
9
|
+
interface LoadPipelinesResult {
|
|
10
|
+
pipelines: PipelineDefinition[];
|
|
11
|
+
files: LoadedPipelineFile[];
|
|
12
|
+
errors: PipelineLoadError[];
|
|
13
|
+
}
|
|
14
|
+
interface PipelineLoadError {
|
|
15
|
+
filePath: string;
|
|
16
|
+
error: Error;
|
|
17
|
+
}
|
|
18
|
+
interface GitHubSource {
|
|
19
|
+
type: "github";
|
|
20
|
+
id: string;
|
|
21
|
+
owner: string;
|
|
22
|
+
repo: string;
|
|
23
|
+
ref?: string;
|
|
24
|
+
path?: string;
|
|
25
|
+
}
|
|
26
|
+
interface GitLabSource {
|
|
27
|
+
type: "gitlab";
|
|
28
|
+
id: string;
|
|
29
|
+
owner: string;
|
|
30
|
+
repo: string;
|
|
31
|
+
ref?: string;
|
|
32
|
+
path?: string;
|
|
33
|
+
}
|
|
34
|
+
interface LocalSource {
|
|
35
|
+
type: "local";
|
|
36
|
+
id: string;
|
|
37
|
+
cwd: string;
|
|
38
|
+
}
|
|
39
|
+
type PipelineSource = LocalSource | GitHubSource | GitLabSource;
|
|
40
|
+
//#endregion
|
|
41
|
+
export { LocalSource as a, LoadedPipelineFile as i, GitLabSource as n, PipelineLoadError as o, LoadPipelinesResult as r, PipelineSource as s, GitHubSource as t };
|
package/package.json
ADDED
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@ucdjs/pipelines-loader",
|
|
3
|
+
"version": "0.0.1-beta.1",
|
|
4
|
+
"type": "module",
|
|
5
|
+
"author": {
|
|
6
|
+
"name": "Lucas Nørgård",
|
|
7
|
+
"email": "lucasnrgaard@gmail.com",
|
|
8
|
+
"url": "https://luxass.dev"
|
|
9
|
+
},
|
|
10
|
+
"license": "MIT",
|
|
11
|
+
"homepage": "https://github.com/ucdjs/ucd",
|
|
12
|
+
"repository": {
|
|
13
|
+
"type": "git",
|
|
14
|
+
"url": "git+https://github.com/ucdjs/ucd.git",
|
|
15
|
+
"directory": "packages/pipelines/pipeline-loader"
|
|
16
|
+
},
|
|
17
|
+
"bugs": {
|
|
18
|
+
"url": "https://github.com/ucdjs/ucd/issues"
|
|
19
|
+
},
|
|
20
|
+
"exports": {
|
|
21
|
+
".": "./dist/index.mjs",
|
|
22
|
+
"./insecure": "./dist/insecure.mjs",
|
|
23
|
+
"./remote": "./dist/remote.mjs",
|
|
24
|
+
"./package.json": "./package.json"
|
|
25
|
+
},
|
|
26
|
+
"types": "./dist/index.d.mts",
|
|
27
|
+
"files": [
|
|
28
|
+
"dist"
|
|
29
|
+
],
|
|
30
|
+
"engines": {
|
|
31
|
+
"node": ">=22.18"
|
|
32
|
+
},
|
|
33
|
+
"dependencies": {
|
|
34
|
+
"oxc-parser": "0.112.0",
|
|
35
|
+
"oxc-transform": "0.112.0",
|
|
36
|
+
"picomatch": "4.0.3",
|
|
37
|
+
"rolldown": "1.0.0-rc.4",
|
|
38
|
+
"tinyglobby": "0.2.15",
|
|
39
|
+
"@ucdjs/pipelines-core": "0.0.1-beta.1"
|
|
40
|
+
},
|
|
41
|
+
"devDependencies": {
|
|
42
|
+
"@luxass/eslint-config": "7.2.0",
|
|
43
|
+
"@types/picomatch": "4.0.2",
|
|
44
|
+
"eslint": "10.0.0",
|
|
45
|
+
"publint": "0.3.17",
|
|
46
|
+
"tsdown": "0.20.3",
|
|
47
|
+
"typescript": "5.9.3",
|
|
48
|
+
"vitest-testdirs": "4.4.2",
|
|
49
|
+
"@ucdjs-tooling/tsconfig": "1.0.0",
|
|
50
|
+
"@ucdjs-tooling/tsdown-config": "1.0.0"
|
|
51
|
+
},
|
|
52
|
+
"publishConfig": {
|
|
53
|
+
"access": "public"
|
|
54
|
+
},
|
|
55
|
+
"scripts": {
|
|
56
|
+
"build": "tsdown --tsconfig=./tsconfig.build.json",
|
|
57
|
+
"dev": "tsdown --watch",
|
|
58
|
+
"clean": "git clean -xdf dist node_modules",
|
|
59
|
+
"lint": "eslint .",
|
|
60
|
+
"typecheck": "tsc --noEmit -p tsconfig.build.json"
|
|
61
|
+
}
|
|
62
|
+
}
|