@ucdjs/pipelines-loader 0.0.1-beta.5 → 0.0.1-beta.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/gitlab-BeZb8tDi.mjs +108 -0
- package/dist/index.d.mts +46 -4
- package/dist/index.mjs +110 -10
- package/dist/internal.d.mts +31 -0
- package/dist/internal.mjs +3 -0
- package/dist/{types-Br8gGmsN.d.mts → types-C7EhTWo6.d.mts} +8 -1
- package/package.json +9 -12
- package/dist/bundle-dlu7M3TU.mjs +0 -320
- package/dist/chunk-DQk6qfdC.mjs +0 -18
- package/dist/gitlab-C8zDC1_j.d.mts +0 -32
- package/dist/insecure.d.mts +0 -10
- package/dist/insecure.mjs +0 -30
- package/dist/remote.d.mts +0 -16
- package/dist/remote.mjs +0 -77
|
@@ -0,0 +1,108 @@
|
|
|
1
|
+
import path from "node:path";
|
|
2
|
+
import { mkdir, writeFile } from "node:fs/promises";
|
|
3
|
+
import { getRepositoryCacheDir } from "@ucdjs-internal/shared/config";
|
|
4
|
+
import { parseTarGzip } from "nanotar";
|
|
5
|
+
|
|
6
|
+
//#region src/cache/github.ts
|
|
7
|
+
const GITHUB_API_BASE = "https://api.github.com";
|
|
8
|
+
const GITHUB_ACCEPT_HEADER = "application/vnd.github.v3+json";
|
|
9
|
+
/**
|
|
10
|
+
* Resolve a ref (branch/tag) to a commit SHA
|
|
11
|
+
*/
|
|
12
|
+
async function resolveGitHubRef(repoRef, options = {}) {
|
|
13
|
+
const { owner, repo, ref = "HEAD" } = repoRef;
|
|
14
|
+
const { customFetch = fetch } = options;
|
|
15
|
+
const response = await customFetch(`${GITHUB_API_BASE}/repos/${owner}/${repo}/commits/${ref}`, { headers: { Accept: GITHUB_ACCEPT_HEADER } });
|
|
16
|
+
if (!response.ok) throw new Error(`GitHub API error: ${response.status} ${response.statusText}`);
|
|
17
|
+
return (await response.json()).sha;
|
|
18
|
+
}
|
|
19
|
+
/**
|
|
20
|
+
* Download and extract a GitHub repository archive
|
|
21
|
+
*/
|
|
22
|
+
async function downloadGitHubRepo(repoRef, options = {}) {
|
|
23
|
+
const { owner, repo } = repoRef;
|
|
24
|
+
const { customFetch = fetch } = options;
|
|
25
|
+
const commitSha = await resolveGitHubRef(repoRef, options);
|
|
26
|
+
const cacheDir = getRepositoryCacheDir("github", owner, repo, commitSha);
|
|
27
|
+
try {
|
|
28
|
+
if ((await import("node:fs")).existsSync(cacheDir)) return cacheDir;
|
|
29
|
+
} catch {}
|
|
30
|
+
const response = await customFetch(`${GITHUB_API_BASE}/repos/${owner}/${repo}/tarball/${commitSha}`, { headers: { Accept: "application/vnd.github.v3+json" } });
|
|
31
|
+
if (!response.ok) throw new Error(`Failed to download GitHub archive: ${response.status} ${response.statusText}`);
|
|
32
|
+
await mkdir(cacheDir, { recursive: true });
|
|
33
|
+
const files = await parseTarGzip(await response.arrayBuffer());
|
|
34
|
+
const rootPrefix = files[0]?.name.split("/")[0];
|
|
35
|
+
if (!rootPrefix) throw new Error("Invalid archive: no files found");
|
|
36
|
+
for (const file of files) {
|
|
37
|
+
if (file.type === "directory" || !file.data) continue;
|
|
38
|
+
const relativePath = file.name.slice(rootPrefix.length + 1);
|
|
39
|
+
if (!relativePath) continue;
|
|
40
|
+
let safeRelativePath = path.normalize(relativePath);
|
|
41
|
+
safeRelativePath = safeRelativePath.replace(/^([/\\])+/, "");
|
|
42
|
+
if (!safeRelativePath) continue;
|
|
43
|
+
const upSegment = `..${path.sep}`;
|
|
44
|
+
if (safeRelativePath === ".." || safeRelativePath.startsWith(upSegment) || safeRelativePath.includes(`${path.sep}..${path.sep}`) || safeRelativePath.endsWith(`${path.sep}..`)) throw new Error(`Invalid archive entry path (path traversal detected): ${file.name}`);
|
|
45
|
+
const outputPath = path.join(cacheDir, safeRelativePath);
|
|
46
|
+
const resolvedCacheDir = path.resolve(cacheDir);
|
|
47
|
+
const resolvedOutputPath = path.resolve(outputPath);
|
|
48
|
+
if (resolvedOutputPath !== resolvedCacheDir && !resolvedOutputPath.startsWith(resolvedCacheDir + path.sep)) throw new Error(`Invalid archive entry path (outside cache dir): ${file.name}`);
|
|
49
|
+
await mkdir(path.dirname(resolvedOutputPath), { recursive: true });
|
|
50
|
+
await writeFile(resolvedOutputPath, file.data);
|
|
51
|
+
}
|
|
52
|
+
return cacheDir;
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
//#endregion
|
|
56
|
+
//#region src/cache/gitlab.ts
|
|
57
|
+
const GITLAB_API_BASE = "https://gitlab.com/api/v4";
|
|
58
|
+
function encodeProjectPath(owner, repo) {
|
|
59
|
+
return encodeURIComponent(`${owner}/${repo}`);
|
|
60
|
+
}
|
|
61
|
+
/**
|
|
62
|
+
* Resolve a ref (branch/tag) to a commit SHA
|
|
63
|
+
*/
|
|
64
|
+
async function resolveGitLabRef(repoRef, options = {}) {
|
|
65
|
+
const { owner, repo, ref = "HEAD" } = repoRef;
|
|
66
|
+
const { customFetch = fetch } = options;
|
|
67
|
+
const response = await customFetch(`${GITLAB_API_BASE}/projects/${encodeProjectPath(owner, repo)}/repository/commits/${ref === "HEAD" ? "HEAD" : ref}`);
|
|
68
|
+
if (!response.ok) throw new Error(`GitLab API error: ${response.status} ${response.statusText}`);
|
|
69
|
+
return (await response.json()).id;
|
|
70
|
+
}
|
|
71
|
+
/**
|
|
72
|
+
* Download and extract a GitLab repository archive
|
|
73
|
+
*/
|
|
74
|
+
async function downloadGitLabRepo(repoRef, options = {}) {
|
|
75
|
+
const { owner, repo } = repoRef;
|
|
76
|
+
const { customFetch = fetch } = options;
|
|
77
|
+
const commitSha = await resolveGitLabRef(repoRef, options);
|
|
78
|
+
const cacheDir = getRepositoryCacheDir("gitlab", owner, repo, commitSha);
|
|
79
|
+
try {
|
|
80
|
+
if ((await import("node:fs")).existsSync(cacheDir)) return cacheDir;
|
|
81
|
+
} catch {}
|
|
82
|
+
const response = await customFetch(`${GITLAB_API_BASE}/projects/${encodeProjectPath(owner, repo)}/repository/archive.tar.gz?sha=${commitSha}`);
|
|
83
|
+
if (!response.ok) throw new Error(`Failed to download GitLab archive: ${response.status} ${response.statusText}`);
|
|
84
|
+
await mkdir(cacheDir, { recursive: true });
|
|
85
|
+
const files = await parseTarGzip(await response.arrayBuffer());
|
|
86
|
+
const rootPrefix = files[0]?.name.split("/")[0];
|
|
87
|
+
if (!rootPrefix) throw new Error("Invalid archive: no files found");
|
|
88
|
+
for (const file of files) {
|
|
89
|
+
if (file.type === "directory" || !file.data) continue;
|
|
90
|
+
const relativePath = file.name.slice(rootPrefix.length + 1);
|
|
91
|
+
if (!relativePath) continue;
|
|
92
|
+
let safeRelativePath = path.normalize(relativePath);
|
|
93
|
+
safeRelativePath = safeRelativePath.replace(/^([/\\])+/, "");
|
|
94
|
+
if (!safeRelativePath) continue;
|
|
95
|
+
const upSegment = `..${path.sep}`;
|
|
96
|
+
if (safeRelativePath === ".." || safeRelativePath.startsWith(upSegment) || safeRelativePath.includes(`${path.sep}..${path.sep}`) || safeRelativePath.endsWith(`${path.sep}..`)) throw new Error(`Invalid archive entry path (path traversal detected): ${file.name}`);
|
|
97
|
+
const outputPath = path.join(cacheDir, safeRelativePath);
|
|
98
|
+
const resolvedCacheDir = path.resolve(cacheDir);
|
|
99
|
+
const resolvedOutputPath = path.resolve(outputPath);
|
|
100
|
+
if (resolvedOutputPath !== resolvedCacheDir && !resolvedOutputPath.startsWith(resolvedCacheDir + path.sep)) throw new Error(`Invalid archive entry path (outside cache dir): ${file.name}`);
|
|
101
|
+
await mkdir(path.dirname(resolvedOutputPath), { recursive: true });
|
|
102
|
+
await writeFile(resolvedOutputPath, file.data);
|
|
103
|
+
}
|
|
104
|
+
return cacheDir;
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
//#endregion
|
|
108
|
+
export { downloadGitHubRepo as n, downloadGitLabRepo as t };
|
package/dist/index.d.mts
CHANGED
|
@@ -1,7 +1,30 @@
|
|
|
1
|
-
import { a as LocalSource, i as LoadedPipelineFile, n as GitLabSource, o as PipelineLoadError, r as LoadPipelinesResult, s as PipelineSource, t as GitHubSource } from "./types-
|
|
2
|
-
import { FindRemotePipelineFilesOptions, LoadRemotePipelinesOptions, findRemotePipelineFiles, loadRemotePipelines } from "./remote.mjs";
|
|
1
|
+
import { a as LocalSource, c as RemoteFileList, i as LoadedPipelineFile, l as RemoteRequestOptions, n as GitLabSource, o as PipelineLoadError, r as LoadPipelinesResult, s as PipelineSource, t as GitHubSource } from "./types-C7EhTWo6.mjs";
|
|
3
2
|
|
|
4
3
|
//#region src/loader.d.ts
|
|
4
|
+
type FindPipelineSource = {
|
|
5
|
+
type: "local";
|
|
6
|
+
cwd: string;
|
|
7
|
+
} | {
|
|
8
|
+
type: "github";
|
|
9
|
+
owner: string;
|
|
10
|
+
repo: string;
|
|
11
|
+
ref?: string;
|
|
12
|
+
path?: string;
|
|
13
|
+
} | {
|
|
14
|
+
type: "gitlab";
|
|
15
|
+
owner: string;
|
|
16
|
+
repo: string;
|
|
17
|
+
ref?: string;
|
|
18
|
+
path?: string;
|
|
19
|
+
};
|
|
20
|
+
/**
|
|
21
|
+
* Load a pipeline file from a local path or remote URL.
|
|
22
|
+
*
|
|
23
|
+
* Supports:
|
|
24
|
+
* - Local file paths
|
|
25
|
+
* - github://owner/repo?ref=branch&path=file.ts
|
|
26
|
+
* - gitlab://owner/repo?ref=branch&path=file.ts
|
|
27
|
+
*/
|
|
5
28
|
declare function loadPipelineFile(filePath: string): Promise<LoadedPipelineFile>;
|
|
6
29
|
interface LoadPipelinesOptions {
|
|
7
30
|
throwOnError?: boolean;
|
|
@@ -9,8 +32,27 @@ interface LoadPipelinesOptions {
|
|
|
9
32
|
declare function loadPipelinesFromPaths(filePaths: string[], options?: LoadPipelinesOptions): Promise<LoadPipelinesResult>;
|
|
10
33
|
interface FindPipelineFilesOptions {
|
|
11
34
|
patterns?: string | string[];
|
|
12
|
-
|
|
35
|
+
source?: FindPipelineSource;
|
|
13
36
|
}
|
|
37
|
+
/**
|
|
38
|
+
* Find pipeline files in a local directory or remote repository.
|
|
39
|
+
*
|
|
40
|
+
* Examples:
|
|
41
|
+
* ```typescript
|
|
42
|
+
* // Local directory
|
|
43
|
+
* findPipelineFiles({ source: { type: "local", cwd: "./pipelines" } })
|
|
44
|
+
*
|
|
45
|
+
* // GitHub repository
|
|
46
|
+
* findPipelineFiles({
|
|
47
|
+
* source: { type: "github", owner: "ucdjs", repo: "demo-pipelines", ref: "main" }
|
|
48
|
+
* })
|
|
49
|
+
*
|
|
50
|
+
* // GitLab repository
|
|
51
|
+
* findPipelineFiles({
|
|
52
|
+
* source: { type: "gitlab", owner: "mygroup", repo: "demo", ref: "main" }
|
|
53
|
+
* })
|
|
54
|
+
* ```
|
|
55
|
+
*/
|
|
14
56
|
declare function findPipelineFiles(options?: FindPipelineFilesOptions): Promise<string[]>;
|
|
15
57
|
//#endregion
|
|
16
|
-
export { type FindPipelineFilesOptions, type
|
|
58
|
+
export { type FindPipelineFilesOptions, type FindPipelineSource, type GitHubSource, type GitLabSource, type LoadPipelinesOptions, type LoadPipelinesResult, type LoadedPipelineFile, type LocalSource, type PipelineLoadError, type PipelineSource, type RemoteFileList, type RemoteRequestOptions, findPipelineFiles, loadPipelineFile, loadPipelinesFromPaths };
|
package/dist/index.mjs
CHANGED
|
@@ -1,17 +1,84 @@
|
|
|
1
|
-
import "./
|
|
2
|
-
import
|
|
3
|
-
import { pathToFileURL } from "node:url";
|
|
1
|
+
import { n as downloadGitHubRepo, t as downloadGitLabRepo } from "./gitlab-BeZb8tDi.mjs";
|
|
2
|
+
import path from "node:path";
|
|
4
3
|
import { isPipelineDefinition } from "@ucdjs/pipelines-core";
|
|
5
4
|
import { glob } from "tinyglobby";
|
|
5
|
+
import { build } from "rolldown";
|
|
6
6
|
|
|
7
|
+
//#region src/bundle.ts
|
|
8
|
+
async function bundleModule(entryPath) {
|
|
9
|
+
const result = await build({
|
|
10
|
+
input: entryPath,
|
|
11
|
+
write: false,
|
|
12
|
+
output: { format: "esm" }
|
|
13
|
+
});
|
|
14
|
+
const chunk = (Array.isArray(result) ? result : [result]).flatMap((output) => output.output ?? []).find((item) => item.type === "chunk");
|
|
15
|
+
if (!chunk || chunk.type !== "chunk") throw new Error("Failed to bundle module");
|
|
16
|
+
return chunk.code;
|
|
17
|
+
}
|
|
18
|
+
function createDataUrl(code) {
|
|
19
|
+
return `data:text/javascript;base64,${Buffer.from(code, "utf-8").toString("base64")}`;
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
//#endregion
|
|
7
23
|
//#region src/loader.ts
|
|
24
|
+
/**
|
|
25
|
+
* Parse a github:// or gitlab:// URL
|
|
26
|
+
*/
|
|
27
|
+
function parseRepoUrl(url) {
|
|
28
|
+
if (url.startsWith("github://")) {
|
|
29
|
+
const match = url.match(/^github:\/\/([^/]+)\/([^?]+)\?ref=([^&]+)&path=(.+)$/);
|
|
30
|
+
if (match && match[1] && match[2] && match[3] && match[4]) return {
|
|
31
|
+
type: "github",
|
|
32
|
+
owner: match[1],
|
|
33
|
+
repo: match[2],
|
|
34
|
+
ref: match[3],
|
|
35
|
+
filePath: match[4]
|
|
36
|
+
};
|
|
37
|
+
}
|
|
38
|
+
if (url.startsWith("gitlab://")) {
|
|
39
|
+
const match = url.match(/^gitlab:\/\/([^/]+)\/([^?]+)\?ref=([^&]+)&path=(.+)$/);
|
|
40
|
+
if (match && match[1] && match[2] && match[3] && match[4]) return {
|
|
41
|
+
type: "gitlab",
|
|
42
|
+
owner: match[1],
|
|
43
|
+
repo: match[2],
|
|
44
|
+
ref: match[3],
|
|
45
|
+
filePath: match[4]
|
|
46
|
+
};
|
|
47
|
+
}
|
|
48
|
+
return null;
|
|
49
|
+
}
|
|
50
|
+
/**
|
|
51
|
+
* Load a pipeline file from a local path or remote URL.
|
|
52
|
+
*
|
|
53
|
+
* Supports:
|
|
54
|
+
* - Local file paths
|
|
55
|
+
* - github://owner/repo?ref=branch&path=file.ts
|
|
56
|
+
* - gitlab://owner/repo?ref=branch&path=file.ts
|
|
57
|
+
*/
|
|
8
58
|
async function loadPipelineFile(filePath) {
|
|
9
|
-
|
|
59
|
+
let resolvedPath;
|
|
60
|
+
const repoInfo = parseRepoUrl(filePath);
|
|
61
|
+
if (repoInfo) {
|
|
62
|
+
const cacheDir = repoInfo.type === "github" ? await downloadGitHubRepo({
|
|
63
|
+
owner: repoInfo.owner,
|
|
64
|
+
repo: repoInfo.repo,
|
|
65
|
+
ref: repoInfo.ref
|
|
66
|
+
}) : await downloadGitLabRepo({
|
|
67
|
+
owner: repoInfo.owner,
|
|
68
|
+
repo: repoInfo.repo,
|
|
69
|
+
ref: repoInfo.ref
|
|
70
|
+
});
|
|
71
|
+
resolvedPath = path.join(cacheDir, repoInfo.filePath);
|
|
72
|
+
} else resolvedPath = path.resolve(filePath);
|
|
73
|
+
const module = await import(createDataUrl(await bundleModule(resolvedPath)));
|
|
10
74
|
const pipelines = [];
|
|
11
75
|
const exportNames = [];
|
|
12
|
-
for (const [name, value] of Object.entries(module))
|
|
13
|
-
|
|
14
|
-
|
|
76
|
+
for (const [name, value] of Object.entries(module)) {
|
|
77
|
+
if (name === "default") continue;
|
|
78
|
+
if (isPipelineDefinition(value)) {
|
|
79
|
+
pipelines.push(value);
|
|
80
|
+
exportNames.push(name);
|
|
81
|
+
}
|
|
15
82
|
}
|
|
16
83
|
return {
|
|
17
84
|
filePath,
|
|
@@ -53,12 +120,45 @@ async function loadPipelinesFromPaths(filePaths, options = {}) {
|
|
|
53
120
|
errors
|
|
54
121
|
};
|
|
55
122
|
}
|
|
123
|
+
/**
|
|
124
|
+
* Find pipeline files in a local directory or remote repository.
|
|
125
|
+
*
|
|
126
|
+
* Examples:
|
|
127
|
+
* ```typescript
|
|
128
|
+
* // Local directory
|
|
129
|
+
* findPipelineFiles({ source: { type: "local", cwd: "./pipelines" } })
|
|
130
|
+
*
|
|
131
|
+
* // GitHub repository
|
|
132
|
+
* findPipelineFiles({
|
|
133
|
+
* source: { type: "github", owner: "ucdjs", repo: "demo-pipelines", ref: "main" }
|
|
134
|
+
* })
|
|
135
|
+
*
|
|
136
|
+
* // GitLab repository
|
|
137
|
+
* findPipelineFiles({
|
|
138
|
+
* source: { type: "gitlab", owner: "mygroup", repo: "demo", ref: "main" }
|
|
139
|
+
* })
|
|
140
|
+
* ```
|
|
141
|
+
*/
|
|
56
142
|
async function findPipelineFiles(options = {}) {
|
|
57
143
|
let patterns = ["**/*.ucd-pipeline.ts"];
|
|
58
|
-
const resolvedCwd = options.cwd ?? process.cwd();
|
|
59
144
|
if (options.patterns) patterns = Array.isArray(options.patterns) ? options.patterns : [options.patterns];
|
|
145
|
+
let cwd;
|
|
146
|
+
if (options.source) {
|
|
147
|
+
const source = options.source;
|
|
148
|
+
if (source.type === "local") cwd = source.cwd;
|
|
149
|
+
else if (source.type === "github") cwd = await downloadGitHubRepo({
|
|
150
|
+
owner: source.owner,
|
|
151
|
+
repo: source.repo,
|
|
152
|
+
ref: source.ref
|
|
153
|
+
});
|
|
154
|
+
else cwd = await downloadGitLabRepo({
|
|
155
|
+
owner: source.owner,
|
|
156
|
+
repo: source.repo,
|
|
157
|
+
ref: source.ref
|
|
158
|
+
});
|
|
159
|
+
} else cwd = process.cwd();
|
|
60
160
|
return glob(patterns, {
|
|
61
|
-
cwd
|
|
161
|
+
cwd,
|
|
62
162
|
ignore: [
|
|
63
163
|
"node_modules/**",
|
|
64
164
|
"**/node_modules/**",
|
|
@@ -72,4 +172,4 @@ async function findPipelineFiles(options = {}) {
|
|
|
72
172
|
}
|
|
73
173
|
|
|
74
174
|
//#endregion
|
|
75
|
-
export { findPipelineFiles,
|
|
175
|
+
export { findPipelineFiles, loadPipelineFile, loadPipelinesFromPaths };
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
import { l as RemoteRequestOptions } from "./types-C7EhTWo6.mjs";
|
|
2
|
+
|
|
3
|
+
//#region src/cache/github.d.ts
|
|
4
|
+
interface GitHubRepoRef {
|
|
5
|
+
owner: string;
|
|
6
|
+
repo: string;
|
|
7
|
+
ref?: string;
|
|
8
|
+
}
|
|
9
|
+
/**
|
|
10
|
+
* Resolve a ref (branch/tag) to a commit SHA
|
|
11
|
+
*/
|
|
12
|
+
/**
|
|
13
|
+
* Download and extract a GitHub repository archive
|
|
14
|
+
*/
|
|
15
|
+
declare function downloadGitHubRepo(repoRef: GitHubRepoRef, options?: RemoteRequestOptions): Promise<string>;
|
|
16
|
+
//#endregion
|
|
17
|
+
//#region src/cache/gitlab.d.ts
|
|
18
|
+
interface GitLabRepoRef {
|
|
19
|
+
owner: string;
|
|
20
|
+
repo: string;
|
|
21
|
+
ref?: string;
|
|
22
|
+
}
|
|
23
|
+
/**
|
|
24
|
+
* Resolve a ref (branch/tag) to a commit SHA
|
|
25
|
+
*/
|
|
26
|
+
/**
|
|
27
|
+
* Download and extract a GitLab repository archive
|
|
28
|
+
*/
|
|
29
|
+
declare function downloadGitLabRepo(repoRef: GitLabRepoRef, options?: RemoteRequestOptions): Promise<string>;
|
|
30
|
+
//#endregion
|
|
31
|
+
export { downloadGitHubRepo, downloadGitLabRepo };
|
|
@@ -37,5 +37,12 @@ interface LocalSource {
|
|
|
37
37
|
cwd: string;
|
|
38
38
|
}
|
|
39
39
|
type PipelineSource = LocalSource | GitHubSource | GitLabSource;
|
|
40
|
+
interface RemoteFileList {
|
|
41
|
+
files: string[];
|
|
42
|
+
truncated: boolean;
|
|
43
|
+
}
|
|
44
|
+
interface RemoteRequestOptions {
|
|
45
|
+
customFetch?: typeof fetch;
|
|
46
|
+
}
|
|
40
47
|
//#endregion
|
|
41
|
-
export { LocalSource as a, LoadedPipelineFile as i, GitLabSource as n, PipelineLoadError as o, LoadPipelinesResult as r, PipelineSource as s, GitHubSource as t };
|
|
48
|
+
export { LocalSource as a, RemoteFileList as c, LoadedPipelineFile as i, RemoteRequestOptions as l, GitLabSource as n, PipelineLoadError as o, LoadPipelinesResult as r, PipelineSource as s, GitHubSource as t };
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@ucdjs/pipelines-loader",
|
|
3
|
-
"version": "0.0.1-beta.
|
|
3
|
+
"version": "0.0.1-beta.7",
|
|
4
4
|
"type": "module",
|
|
5
5
|
"author": {
|
|
6
6
|
"name": "Lucas Nørgård",
|
|
@@ -19,8 +19,7 @@
|
|
|
19
19
|
},
|
|
20
20
|
"exports": {
|
|
21
21
|
".": "./dist/index.mjs",
|
|
22
|
-
"./
|
|
23
|
-
"./remote": "./dist/remote.mjs",
|
|
22
|
+
"./internal": "./dist/internal.mjs",
|
|
24
23
|
"./package.json": "./package.json"
|
|
25
24
|
},
|
|
26
25
|
"types": "./dist/index.d.mts",
|
|
@@ -28,20 +27,18 @@
|
|
|
28
27
|
"dist"
|
|
29
28
|
],
|
|
30
29
|
"engines": {
|
|
31
|
-
"node": ">=
|
|
30
|
+
"node": ">=24.13"
|
|
32
31
|
},
|
|
33
32
|
"dependencies": {
|
|
34
|
-
"
|
|
35
|
-
"
|
|
36
|
-
"picomatch": "4.0.3",
|
|
37
|
-
"rolldown": "1.0.0-rc.4",
|
|
33
|
+
"nanotar": "0.3.0",
|
|
34
|
+
"rolldown": "1.0.0-rc.6",
|
|
38
35
|
"tinyglobby": "0.2.15",
|
|
39
|
-
"@ucdjs/pipelines-core": "0.0.1-beta.
|
|
36
|
+
"@ucdjs/pipelines-core": "0.0.1-beta.7",
|
|
37
|
+
"@ucdjs-internal/shared": "0.1.1-beta.7"
|
|
40
38
|
},
|
|
41
39
|
"devDependencies": {
|
|
42
|
-
"@luxass/eslint-config": "7.2.
|
|
43
|
-
"
|
|
44
|
-
"eslint": "10.0.0",
|
|
40
|
+
"@luxass/eslint-config": "7.2.1",
|
|
41
|
+
"eslint": "10.0.2",
|
|
45
42
|
"publint": "0.3.17",
|
|
46
43
|
"tsdown": "0.20.3",
|
|
47
44
|
"typescript": "5.9.3",
|
package/dist/bundle-dlu7M3TU.mjs
DELETED
|
@@ -1,320 +0,0 @@
|
|
|
1
|
-
import { t as __exportAll } from "./chunk-DQk6qfdC.mjs";
|
|
2
|
-
import path from "node:path";
|
|
3
|
-
import { build } from "rolldown";
|
|
4
|
-
import { parseSync } from "oxc-parser";
|
|
5
|
-
import { readFile } from "node:fs/promises";
|
|
6
|
-
import { transform } from "oxc-transform";
|
|
7
|
-
|
|
8
|
-
//#region src/bundler/identifiers.ts
|
|
9
|
-
function isUrlLike(value) {
|
|
10
|
-
return /^[a-z][a-z+.-]*:/i.test(value);
|
|
11
|
-
}
|
|
12
|
-
function parseRemoteIdentifier(identifier) {
|
|
13
|
-
if (!identifier.startsWith("github://") && !identifier.startsWith("gitlab://")) return null;
|
|
14
|
-
const url = new URL(identifier);
|
|
15
|
-
const provider = url.protocol.replace(":", "");
|
|
16
|
-
const owner = url.hostname;
|
|
17
|
-
const repo = url.pathname.replace(/^\/+/, "");
|
|
18
|
-
if (!owner || !repo) throw new Error(`Invalid remote identifier: ${identifier}`);
|
|
19
|
-
return {
|
|
20
|
-
provider,
|
|
21
|
-
owner,
|
|
22
|
-
repo,
|
|
23
|
-
ref: url.searchParams.get("ref") ?? "HEAD",
|
|
24
|
-
path: url.searchParams.get("path") ?? ""
|
|
25
|
-
};
|
|
26
|
-
}
|
|
27
|
-
function formatRemoteIdentifier(remote) {
|
|
28
|
-
const url = new URL(`${remote.provider}://${remote.owner}/${remote.repo}`);
|
|
29
|
-
url.searchParams.set("ref", remote.ref);
|
|
30
|
-
url.searchParams.set("path", remote.path);
|
|
31
|
-
return url.toString();
|
|
32
|
-
}
|
|
33
|
-
function formatRemoteIdentifierFromParts(provider, owner, repo, ref, filePath) {
|
|
34
|
-
return formatRemoteIdentifier({
|
|
35
|
-
provider,
|
|
36
|
-
owner,
|
|
37
|
-
repo,
|
|
38
|
-
ref: ref ?? "HEAD",
|
|
39
|
-
path: filePath
|
|
40
|
-
});
|
|
41
|
-
}
|
|
42
|
-
|
|
43
|
-
//#endregion
|
|
44
|
-
//#region src/bundler/errors.ts
|
|
45
|
-
var RemoteNotFoundError = class extends Error {
|
|
46
|
-
name = "RemoteNotFoundError";
|
|
47
|
-
constructor(message) {
|
|
48
|
-
super(message);
|
|
49
|
-
}
|
|
50
|
-
};
|
|
51
|
-
|
|
52
|
-
//#endregion
|
|
53
|
-
//#region src/bundler/parse.ts
|
|
54
|
-
function getStaticImportSpecifiers(source, identifier) {
|
|
55
|
-
let parsed;
|
|
56
|
-
try {
|
|
57
|
-
parsed = parseSync(identifier ?? "<inline>", source, { sourceType: "module" });
|
|
58
|
-
} catch (error) {
|
|
59
|
-
const message = error instanceof Error ? error.message : String(error);
|
|
60
|
-
throw new Error(`Failed to parse module ${identifier ?? "<inline>"}: ${message}`);
|
|
61
|
-
}
|
|
62
|
-
const specifiers = /* @__PURE__ */ new Set();
|
|
63
|
-
const visit = (value) => {
|
|
64
|
-
if (!value) return;
|
|
65
|
-
if (Array.isArray(value)) {
|
|
66
|
-
for (const item of value) visit(item);
|
|
67
|
-
return;
|
|
68
|
-
}
|
|
69
|
-
if (typeof value !== "object") return;
|
|
70
|
-
const node = value;
|
|
71
|
-
if (node.type === "ImportDeclaration") {
|
|
72
|
-
const sourceNode = node.source;
|
|
73
|
-
if (sourceNode?.value) specifiers.add(sourceNode.value);
|
|
74
|
-
} else if (node.type === "ExportAllDeclaration" || node.type === "ExportNamedDeclaration") {
|
|
75
|
-
const sourceNode = node.source;
|
|
76
|
-
if (sourceNode?.value) specifiers.add(sourceNode.value);
|
|
77
|
-
} else if (node.type === "ImportExpression") {
|
|
78
|
-
const sourceNode = node.source ?? node.argument;
|
|
79
|
-
if (sourceNode?.type === "StringLiteral" && sourceNode.value) specifiers.add(sourceNode.value);
|
|
80
|
-
}
|
|
81
|
-
for (const [key, child] of Object.entries(node)) {
|
|
82
|
-
if (key === "parent") continue;
|
|
83
|
-
visit(child);
|
|
84
|
-
}
|
|
85
|
-
};
|
|
86
|
-
visit(parsed.program);
|
|
87
|
-
return Array.from(specifiers);
|
|
88
|
-
}
|
|
89
|
-
|
|
90
|
-
//#endregion
|
|
91
|
-
//#region src/bundler/resolve.ts
|
|
92
|
-
const EXTENSIONS = [
|
|
93
|
-
".ts",
|
|
94
|
-
".mts",
|
|
95
|
-
".js",
|
|
96
|
-
".mjs"
|
|
97
|
-
];
|
|
98
|
-
function assertRelativeSpecifier(specifier) {
|
|
99
|
-
if (isUrlLike(specifier)) throw new Error(`Unsupported import specifier: ${specifier}`);
|
|
100
|
-
if (!specifier.startsWith("./") && !specifier.startsWith("../")) throw new Error(`Unsupported import specifier: ${specifier}`);
|
|
101
|
-
}
|
|
102
|
-
function stripTrailingSlash(value) {
|
|
103
|
-
return value.endsWith("/") ? value.slice(0, -1) : value;
|
|
104
|
-
}
|
|
105
|
-
function appendSuffix(identifier, suffix) {
|
|
106
|
-
const remote = parseRemoteIdentifier(identifier);
|
|
107
|
-
if (remote) return formatRemoteIdentifier({
|
|
108
|
-
...remote,
|
|
109
|
-
path: `${remote.path}${suffix}`
|
|
110
|
-
});
|
|
111
|
-
if (isUrlLike(identifier)) {
|
|
112
|
-
const url = new URL(identifier);
|
|
113
|
-
url.pathname = `${url.pathname}${suffix}`;
|
|
114
|
-
return url.toString();
|
|
115
|
-
}
|
|
116
|
-
return `${identifier}${suffix}`;
|
|
117
|
-
}
|
|
118
|
-
function resolveRelativeSpecifier(specifier, parentIdentifier) {
|
|
119
|
-
const remote = parseRemoteIdentifier(parentIdentifier);
|
|
120
|
-
if (remote) {
|
|
121
|
-
const parentDir = remote.path ? path.posix.dirname(remote.path) : "";
|
|
122
|
-
const cleanPath = path.posix.normalize(path.posix.join(parentDir, specifier)).replace(/^\/+/, "");
|
|
123
|
-
return formatRemoteIdentifier({
|
|
124
|
-
...remote,
|
|
125
|
-
path: cleanPath
|
|
126
|
-
});
|
|
127
|
-
}
|
|
128
|
-
if (isUrlLike(parentIdentifier)) {
|
|
129
|
-
const base = new URL(parentIdentifier);
|
|
130
|
-
return new URL(specifier, base).toString();
|
|
131
|
-
}
|
|
132
|
-
const parentDir = path.dirname(parentIdentifier);
|
|
133
|
-
return path.resolve(parentDir, specifier);
|
|
134
|
-
}
|
|
135
|
-
function getSpecifierExtension(specifier) {
|
|
136
|
-
return path.posix.extname(specifier);
|
|
137
|
-
}
|
|
138
|
-
function buildCandidateIdentifiers(specifier, parentIdentifier) {
|
|
139
|
-
const resolvedBase = resolveRelativeSpecifier(specifier, parentIdentifier);
|
|
140
|
-
if (getSpecifierExtension(specifier) !== "") return [resolvedBase];
|
|
141
|
-
const normalizedBase = stripTrailingSlash(resolvedBase);
|
|
142
|
-
const candidates = [];
|
|
143
|
-
for (const ext of EXTENSIONS) candidates.push(appendSuffix(normalizedBase, ext));
|
|
144
|
-
for (const ext of EXTENSIONS) candidates.push(appendSuffix(`${normalizedBase}/index`, ext));
|
|
145
|
-
return candidates;
|
|
146
|
-
}
|
|
147
|
-
|
|
148
|
-
//#endregion
|
|
149
|
-
//#region src/remote/github.ts
|
|
150
|
-
var github_exports = /* @__PURE__ */ __exportAll({
|
|
151
|
-
fetchFile: () => fetchFile$1,
|
|
152
|
-
listFiles: () => listFiles$1
|
|
153
|
-
});
|
|
154
|
-
const GITHUB_API_BASE = "https://api.github.com";
|
|
155
|
-
const GITHUB_ACCEPT_HEADER = "application/vnd.github.v3+json";
|
|
156
|
-
async function listFiles$1(repoRef, options = {}) {
|
|
157
|
-
const { owner, repo, ref = "HEAD", path = "" } = repoRef;
|
|
158
|
-
const { customFetch = fetch } = options;
|
|
159
|
-
const response = await customFetch(`${GITHUB_API_BASE}/repos/${owner}/${repo}/git/trees/${ref}?recursive=1`, { headers: { Accept: GITHUB_ACCEPT_HEADER } });
|
|
160
|
-
if (!response.ok) throw new Error(`GitHub API error: ${response.status} ${response.statusText}`);
|
|
161
|
-
const data = await response.json();
|
|
162
|
-
const prefix = path ? `${path}/` : "";
|
|
163
|
-
return {
|
|
164
|
-
files: data.tree.filter((item) => item.type === "blob" && item.path.startsWith(prefix)).map((item) => item.path),
|
|
165
|
-
truncated: data.truncated
|
|
166
|
-
};
|
|
167
|
-
}
|
|
168
|
-
async function fetchFile$1(repoRef, filePath, options = {}) {
|
|
169
|
-
const { owner, repo, ref = "HEAD" } = repoRef;
|
|
170
|
-
const { customFetch = fetch } = options;
|
|
171
|
-
const response = await customFetch(`${GITHUB_API_BASE}/repos/${owner}/${repo}/contents/${encodeURIComponent(filePath)}?ref=${ref}`, { headers: { Accept: GITHUB_ACCEPT_HEADER } });
|
|
172
|
-
if (!response.ok) {
|
|
173
|
-
if (response.status === 404) throw new RemoteNotFoundError(`GitHub file not found: ${filePath}`);
|
|
174
|
-
throw new Error(`GitHub API error: ${response.status} ${response.statusText}`);
|
|
175
|
-
}
|
|
176
|
-
const data = await response.json();
|
|
177
|
-
if (data.encoding !== "base64") throw new Error(`Unexpected encoding: ${data.encoding}`);
|
|
178
|
-
return Buffer.from(data.content, "base64").toString("utf-8");
|
|
179
|
-
}
|
|
180
|
-
|
|
181
|
-
//#endregion
|
|
182
|
-
//#region src/remote/gitlab.ts
|
|
183
|
-
var gitlab_exports = /* @__PURE__ */ __exportAll({
|
|
184
|
-
fetchFile: () => fetchFile,
|
|
185
|
-
listFiles: () => listFiles
|
|
186
|
-
});
|
|
187
|
-
const GITLAB_API_BASE = "https://gitlab.com/api/v4";
|
|
188
|
-
function encodeProjectPath(owner, repo) {
|
|
189
|
-
return encodeURIComponent(`${owner}/${repo}`);
|
|
190
|
-
}
|
|
191
|
-
async function listFiles(repoRef, options = {}) {
|
|
192
|
-
const { owner, repo, ref, path } = repoRef;
|
|
193
|
-
const refValue = ref ?? "HEAD";
|
|
194
|
-
const pathValue = path ?? "";
|
|
195
|
-
const { customFetch = fetch } = options;
|
|
196
|
-
const projectId = encodeProjectPath(owner, repo);
|
|
197
|
-
const encodedPath = encodeURIComponent(pathValue);
|
|
198
|
-
const files = [];
|
|
199
|
-
let truncated = false;
|
|
200
|
-
async function fetchPage(page) {
|
|
201
|
-
const response = await customFetch(`${GITLAB_API_BASE}/projects/${projectId}/repository/tree?recursive=true&ref=${refValue}&path=${encodedPath}&per_page=100&page=${page}`);
|
|
202
|
-
if (!response.ok) throw new Error(`GitLab API error: ${response.status} ${response.statusText}`);
|
|
203
|
-
const data = await response.json();
|
|
204
|
-
files.push(...data.filter((item) => item.type === "blob").map((item) => item.path));
|
|
205
|
-
const nextPage = response.headers.get("x-next-page");
|
|
206
|
-
if (!nextPage) return;
|
|
207
|
-
const nextPageNumber = Number(nextPage);
|
|
208
|
-
if (!Number.isFinite(nextPageNumber) || nextPageNumber <= page) {
|
|
209
|
-
truncated = true;
|
|
210
|
-
return;
|
|
211
|
-
}
|
|
212
|
-
await fetchPage(nextPageNumber);
|
|
213
|
-
}
|
|
214
|
-
await fetchPage(1);
|
|
215
|
-
return {
|
|
216
|
-
files,
|
|
217
|
-
truncated
|
|
218
|
-
};
|
|
219
|
-
}
|
|
220
|
-
async function fetchFile(repoRef, filePath, options = {}) {
|
|
221
|
-
const { owner, repo, ref } = repoRef;
|
|
222
|
-
const refValue = ref ?? "HEAD";
|
|
223
|
-
const { customFetch = fetch } = options;
|
|
224
|
-
const response = await customFetch(`${GITLAB_API_BASE}/projects/${encodeProjectPath(owner, repo)}/repository/files/${encodeURIComponent(filePath)}/raw?ref=${refValue}`);
|
|
225
|
-
if (!response.ok) {
|
|
226
|
-
if (response.status === 404) throw new RemoteNotFoundError(`GitLab file not found: ${filePath}`);
|
|
227
|
-
throw new Error(`GitLab API error: ${response.status} ${response.statusText}`);
|
|
228
|
-
}
|
|
229
|
-
return response.text();
|
|
230
|
-
}
|
|
231
|
-
|
|
232
|
-
//#endregion
|
|
233
|
-
//#region src/bundler/source.ts
|
|
234
|
-
async function loadRemoteSource(identifier, customFetch) {
|
|
235
|
-
const remote = parseRemoteIdentifier(identifier);
|
|
236
|
-
if (!remote) {
|
|
237
|
-
if (isUrlLike(identifier)) throw new Error(`Unsupported import specifier: ${identifier}`);
|
|
238
|
-
try {
|
|
239
|
-
return await readFile(identifier, "utf-8");
|
|
240
|
-
} catch (error) {
|
|
241
|
-
if (error instanceof Error && "code" in error && error.code === "ENOENT") throw new RemoteNotFoundError(`Module not found: ${identifier}`);
|
|
242
|
-
throw error;
|
|
243
|
-
}
|
|
244
|
-
}
|
|
245
|
-
const repoRef = {
|
|
246
|
-
owner: remote.owner,
|
|
247
|
-
repo: remote.repo,
|
|
248
|
-
ref: remote.ref
|
|
249
|
-
};
|
|
250
|
-
if (remote.provider === "github") return fetchFile$1(repoRef, remote.path, { customFetch });
|
|
251
|
-
return fetchFile(repoRef, remote.path, { customFetch });
|
|
252
|
-
}
|
|
253
|
-
async function compileModuleSource(identifier, source) {
|
|
254
|
-
let filename;
|
|
255
|
-
try {
|
|
256
|
-
const url = new URL(identifier);
|
|
257
|
-
filename = url.searchParams.get("path") ?? (url.pathname || identifier);
|
|
258
|
-
} catch {
|
|
259
|
-
filename = identifier;
|
|
260
|
-
}
|
|
261
|
-
const result = await transform(filename, source, { sourceType: "module" });
|
|
262
|
-
if (result.errors && result.errors.length > 0) {
|
|
263
|
-
const message = result.errors.map((error) => error.message).join("\n");
|
|
264
|
-
throw new Error(`Failed to parse module ${identifier}: ${message}`);
|
|
265
|
-
}
|
|
266
|
-
return result.code;
|
|
267
|
-
}
|
|
268
|
-
|
|
269
|
-
//#endregion
|
|
270
|
-
//#region src/bundler/bundle.ts
|
|
271
|
-
function createRemotePlugin(input) {
|
|
272
|
-
const customFetch = input.customFetch ?? fetch;
|
|
273
|
-
const moduleCache = /* @__PURE__ */ new Map();
|
|
274
|
-
return {
|
|
275
|
-
name: "pipeline-remote-loader",
|
|
276
|
-
resolveId: async (specifier, importer) => {
|
|
277
|
-
if (!importer) return input.identifier;
|
|
278
|
-
assertRelativeSpecifier(specifier);
|
|
279
|
-
const candidates = buildCandidateIdentifiers(specifier, importer);
|
|
280
|
-
for (const candidate of candidates) try {
|
|
281
|
-
const source = await loadRemoteSource(candidate, customFetch);
|
|
282
|
-
moduleCache.set(candidate, source);
|
|
283
|
-
return candidate;
|
|
284
|
-
} catch (err) {
|
|
285
|
-
if (err instanceof RemoteNotFoundError) continue;
|
|
286
|
-
throw err;
|
|
287
|
-
}
|
|
288
|
-
throw new Error(`Module not found: ${specifier}`);
|
|
289
|
-
},
|
|
290
|
-
load: async (id) => {
|
|
291
|
-
if (id === input.identifier) return compileModuleSource(id, input.content);
|
|
292
|
-
const source = moduleCache.get(id) ?? await loadRemoteSource(id, customFetch);
|
|
293
|
-
const code = await compileModuleSource(id, source);
|
|
294
|
-
moduleCache.set(id, source);
|
|
295
|
-
return code;
|
|
296
|
-
}
|
|
297
|
-
};
|
|
298
|
-
}
|
|
299
|
-
async function bundleRemoteModule(input) {
|
|
300
|
-
const specifiers = getStaticImportSpecifiers(input.content, input.identifier);
|
|
301
|
-
for (const specifier of specifiers) assertRelativeSpecifier(specifier);
|
|
302
|
-
const result = await build({
|
|
303
|
-
input: input.identifier,
|
|
304
|
-
plugins: [createRemotePlugin(input)],
|
|
305
|
-
write: false,
|
|
306
|
-
output: { format: "esm" }
|
|
307
|
-
});
|
|
308
|
-
const chunk = (Array.isArray(result) ? result : [result]).flatMap((output) => output.output ?? []).find((item) => item.type === "chunk");
|
|
309
|
-
if (!chunk || chunk.type !== "chunk") throw new Error("Failed to bundle remote module");
|
|
310
|
-
return chunk.code;
|
|
311
|
-
}
|
|
312
|
-
function createDataUrl(code) {
|
|
313
|
-
return `data:text/javascript;base64,${Buffer.from(code, "utf-8").toString("base64")}`;
|
|
314
|
-
}
|
|
315
|
-
function identifierForLocalFile(filePath) {
|
|
316
|
-
return path.resolve(filePath);
|
|
317
|
-
}
|
|
318
|
-
|
|
319
|
-
//#endregion
|
|
320
|
-
export { gitlab_exports as a, github_exports as c, fetchFile as i, listFiles$1 as l, createDataUrl as n, listFiles as o, identifierForLocalFile as r, fetchFile$1 as s, bundleRemoteModule as t, formatRemoteIdentifierFromParts as u };
|
package/dist/chunk-DQk6qfdC.mjs
DELETED
|
@@ -1,18 +0,0 @@
|
|
|
1
|
-
//#region \0rolldown/runtime.js
|
|
2
|
-
var __defProp = Object.defineProperty;
|
|
3
|
-
var __exportAll = (all, no_symbols) => {
|
|
4
|
-
let target = {};
|
|
5
|
-
for (var name in all) {
|
|
6
|
-
__defProp(target, name, {
|
|
7
|
-
get: all[name],
|
|
8
|
-
enumerable: true
|
|
9
|
-
});
|
|
10
|
-
}
|
|
11
|
-
if (!no_symbols) {
|
|
12
|
-
__defProp(target, Symbol.toStringTag, { value: "Module" });
|
|
13
|
-
}
|
|
14
|
-
return target;
|
|
15
|
-
};
|
|
16
|
-
|
|
17
|
-
//#endregion
|
|
18
|
-
export { __exportAll as t };
|
|
@@ -1,32 +0,0 @@
|
|
|
1
|
-
//#region src/remote/types.d.ts
|
|
2
|
-
interface RemoteFileList {
|
|
3
|
-
files: string[];
|
|
4
|
-
truncated: boolean;
|
|
5
|
-
}
|
|
6
|
-
interface RemoteRequestOptions {
|
|
7
|
-
customFetch?: typeof fetch;
|
|
8
|
-
}
|
|
9
|
-
declare namespace github_d_exports {
|
|
10
|
-
export { fetchFile$1 as fetchFile, listFiles$1 as listFiles };
|
|
11
|
-
}
|
|
12
|
-
interface GitHubRepoRef {
|
|
13
|
-
owner: string;
|
|
14
|
-
repo: string;
|
|
15
|
-
ref?: string;
|
|
16
|
-
path?: string;
|
|
17
|
-
}
|
|
18
|
-
declare function listFiles$1(repoRef: GitHubRepoRef, options?: RemoteRequestOptions): Promise<RemoteFileList>;
|
|
19
|
-
declare function fetchFile$1(repoRef: GitHubRepoRef, filePath: string, options?: RemoteRequestOptions): Promise<string>;
|
|
20
|
-
declare namespace gitlab_d_exports {
|
|
21
|
-
export { GitLabRepoRef, fetchFile, listFiles };
|
|
22
|
-
}
|
|
23
|
-
interface GitLabRepoRef {
|
|
24
|
-
owner: string;
|
|
25
|
-
repo: string;
|
|
26
|
-
ref?: string;
|
|
27
|
-
path?: string;
|
|
28
|
-
}
|
|
29
|
-
declare function listFiles(repoRef: GitLabRepoRef, options?: RemoteRequestOptions): Promise<RemoteFileList>;
|
|
30
|
-
declare function fetchFile(repoRef: GitLabRepoRef, filePath: string, options?: RemoteRequestOptions): Promise<string>;
|
|
31
|
-
//#endregion
|
|
32
|
-
export { github_d_exports as n, RemoteFileList as r, gitlab_d_exports as t };
|
package/dist/insecure.d.mts
DELETED
|
@@ -1,10 +0,0 @@
|
|
|
1
|
-
import { i as LoadedPipelineFile } from "./types-Br8gGmsN.mjs";
|
|
2
|
-
|
|
3
|
-
//#region src/insecure.d.ts
|
|
4
|
-
interface LoadPipelineFromContentOptions {
|
|
5
|
-
identifier?: string;
|
|
6
|
-
customFetch?: typeof fetch;
|
|
7
|
-
}
|
|
8
|
-
declare function loadPipelineFromContent(content: string, filename: string, options?: LoadPipelineFromContentOptions): Promise<LoadedPipelineFile>;
|
|
9
|
-
//#endregion
|
|
10
|
-
export { LoadPipelineFromContentOptions, loadPipelineFromContent };
|
package/dist/insecure.mjs
DELETED
|
@@ -1,30 +0,0 @@
|
|
|
1
|
-
import { n as createDataUrl, r as identifierForLocalFile, t as bundleRemoteModule } from "./bundle-dlu7M3TU.mjs";
|
|
2
|
-
import { isPipelineDefinition } from "@ucdjs/pipelines-core";
|
|
3
|
-
import path from "node:path";
|
|
4
|
-
|
|
5
|
-
//#region src/insecure.ts
|
|
6
|
-
async function loadPipelineFromContent(content, filename, options = {}) {
|
|
7
|
-
const module = await import(createDataUrl(await bundleRemoteModule({
|
|
8
|
-
content,
|
|
9
|
-
identifier: options.identifier ?? identifierForLocalFile(path.resolve(filename)),
|
|
10
|
-
customFetch: options.customFetch
|
|
11
|
-
})));
|
|
12
|
-
const pipelines = [];
|
|
13
|
-
const exportNames = [];
|
|
14
|
-
const exportedModule = module;
|
|
15
|
-
for (const [name, value] of Object.entries(exportedModule)) {
|
|
16
|
-
if (name === "default") continue;
|
|
17
|
-
if (isPipelineDefinition(value)) {
|
|
18
|
-
pipelines.push(value);
|
|
19
|
-
exportNames.push(name);
|
|
20
|
-
}
|
|
21
|
-
}
|
|
22
|
-
return {
|
|
23
|
-
filePath: filename,
|
|
24
|
-
pipelines,
|
|
25
|
-
exportNames
|
|
26
|
-
};
|
|
27
|
-
}
|
|
28
|
-
|
|
29
|
-
//#endregion
|
|
30
|
-
export { loadPipelineFromContent };
|
package/dist/remote.d.mts
DELETED
|
@@ -1,16 +0,0 @@
|
|
|
1
|
-
import { n as GitLabSource, r as LoadPipelinesResult, t as GitHubSource } from "./types-Br8gGmsN.mjs";
|
|
2
|
-
import { n as github_d_exports, r as RemoteFileList, t as gitlab_d_exports } from "./gitlab-C8zDC1_j.mjs";
|
|
3
|
-
|
|
4
|
-
//#region src/remote.d.ts
|
|
5
|
-
interface FindRemotePipelineFilesOptions {
|
|
6
|
-
pattern?: string;
|
|
7
|
-
customFetch?: typeof fetch;
|
|
8
|
-
}
|
|
9
|
-
declare function findRemotePipelineFiles(source: GitHubSource | GitLabSource, options?: FindRemotePipelineFilesOptions): Promise<RemoteFileList>;
|
|
10
|
-
interface LoadRemotePipelinesOptions {
|
|
11
|
-
throwOnError?: boolean;
|
|
12
|
-
customFetch?: typeof fetch;
|
|
13
|
-
}
|
|
14
|
-
declare function loadRemotePipelines(source: GitHubSource | GitLabSource, filePaths: string[], options?: LoadRemotePipelinesOptions): Promise<LoadPipelinesResult>;
|
|
15
|
-
//#endregion
|
|
16
|
-
export { FindRemotePipelineFilesOptions, LoadRemotePipelinesOptions, findRemotePipelineFiles, github_d_exports as github, gitlab_d_exports as gitlab, loadRemotePipelines };
|
package/dist/remote.mjs
DELETED
|
@@ -1,77 +0,0 @@
|
|
|
1
|
-
import { a as gitlab_exports, c as github_exports, i as fetchFile$1, l as listFiles, o as listFiles$1, s as fetchFile, u as formatRemoteIdentifierFromParts } from "./bundle-dlu7M3TU.mjs";
|
|
2
|
-
import { loadPipelineFromContent } from "./insecure.mjs";
|
|
3
|
-
import picomatch from "picomatch";
|
|
4
|
-
|
|
5
|
-
//#region src/remote.ts
|
|
6
|
-
async function findRemotePipelineFiles(source, options = {}) {
|
|
7
|
-
const { pattern = "**/*.ucd-pipeline.ts", customFetch = fetch } = options;
|
|
8
|
-
const { owner, repo, ref, path } = source;
|
|
9
|
-
const repoRef = {
|
|
10
|
-
owner,
|
|
11
|
-
repo,
|
|
12
|
-
ref,
|
|
13
|
-
path
|
|
14
|
-
};
|
|
15
|
-
let fileList;
|
|
16
|
-
if (source.type === "github") fileList = await listFiles(repoRef, { customFetch });
|
|
17
|
-
else fileList = await listFiles$1(repoRef, { customFetch });
|
|
18
|
-
const isMatch = picomatch(pattern, { dot: true });
|
|
19
|
-
return {
|
|
20
|
-
files: fileList.files.filter((file) => isMatch(file)),
|
|
21
|
-
truncated: fileList.truncated
|
|
22
|
-
};
|
|
23
|
-
}
|
|
24
|
-
function buildRemoteIdentifier(provider, owner, repo, ref, filePath) {
|
|
25
|
-
return formatRemoteIdentifierFromParts(provider, owner, repo, ref, filePath);
|
|
26
|
-
}
|
|
27
|
-
async function loadRemotePipelines(source, filePaths, options = {}) {
|
|
28
|
-
const { throwOnError = false, customFetch = fetch } = options;
|
|
29
|
-
const { owner, repo, ref, type } = source;
|
|
30
|
-
const repoRef = {
|
|
31
|
-
owner,
|
|
32
|
-
repo,
|
|
33
|
-
ref
|
|
34
|
-
};
|
|
35
|
-
if (throwOnError) {
|
|
36
|
-
const wrapped = filePaths.map((filePath) => (type === "github" ? fetchFile(repoRef, filePath, { customFetch }) : fetchFile$1(repoRef, filePath, { customFetch })).then((content) => loadPipelineFromContent(content, filePath, {
|
|
37
|
-
identifier: buildRemoteIdentifier(type, owner, repo, ref, filePath),
|
|
38
|
-
customFetch
|
|
39
|
-
})).catch((err) => {
|
|
40
|
-
const error = err instanceof Error ? err : new Error(String(err));
|
|
41
|
-
throw new Error(`Failed to load pipeline file: ${filePath}`, { cause: error });
|
|
42
|
-
}));
|
|
43
|
-
const results = await Promise.all(wrapped);
|
|
44
|
-
return {
|
|
45
|
-
pipelines: results.flatMap((r) => r.pipelines),
|
|
46
|
-
files: results,
|
|
47
|
-
errors: []
|
|
48
|
-
};
|
|
49
|
-
}
|
|
50
|
-
const settled = await Promise.allSettled(filePaths.map(async (filePath) => {
|
|
51
|
-
return loadPipelineFromContent(type === "github" ? await fetchFile(repoRef, filePath, { customFetch }) : await fetchFile$1(repoRef, filePath, { customFetch }), filePath, {
|
|
52
|
-
identifier: buildRemoteIdentifier(type, owner, repo, ref, filePath),
|
|
53
|
-
customFetch
|
|
54
|
-
});
|
|
55
|
-
}));
|
|
56
|
-
const files = [];
|
|
57
|
-
const errors = [];
|
|
58
|
-
for (const [i, result] of settled.entries()) {
|
|
59
|
-
if (result.status === "fulfilled") {
|
|
60
|
-
files.push(result.value);
|
|
61
|
-
continue;
|
|
62
|
-
}
|
|
63
|
-
const error = result.reason instanceof Error ? result.reason : new Error(String(result.reason));
|
|
64
|
-
errors.push({
|
|
65
|
-
filePath: filePaths[i],
|
|
66
|
-
error
|
|
67
|
-
});
|
|
68
|
-
}
|
|
69
|
-
return {
|
|
70
|
-
pipelines: files.flatMap((f) => f.pipelines),
|
|
71
|
-
files,
|
|
72
|
-
errors
|
|
73
|
-
};
|
|
74
|
-
}
|
|
75
|
-
|
|
76
|
-
//#endregion
|
|
77
|
-
export { findRemotePipelineFiles, github_exports as github, gitlab_exports as gitlab, loadRemotePipelines };
|