giget 1.2.3 → 1.2.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +14 -2
- package/dist/cli.cjs +5 -80
- package/dist/cli.d.cts +0 -1
- package/dist/cli.d.mts +0 -1
- package/dist/cli.d.ts +0 -1
- package/dist/cli.mjs +3 -78
- package/dist/index.cjs +15 -360
- package/dist/index.mjs +12 -361
- package/dist/shared/giget.BgKdRmJH.mjs +361 -0
- package/dist/shared/giget.C0XVJdqO.cjs +365 -0
- package/package.json +21 -22
package/dist/index.mjs
CHANGED
|
@@ -1,361 +1,12 @@
|
|
|
1
|
-
|
|
2
|
-
import
|
|
3
|
-
import
|
|
4
|
-
import
|
|
5
|
-
import
|
|
6
|
-
import
|
|
7
|
-
import
|
|
8
|
-
import
|
|
9
|
-
import
|
|
10
|
-
import
|
|
11
|
-
import
|
|
12
|
-
|
|
13
|
-
async function download(url, filePath, options = {}) {
|
|
14
|
-
const infoPath = filePath + ".json";
|
|
15
|
-
const info = JSON.parse(
|
|
16
|
-
await readFile(infoPath, "utf8").catch(() => "{}")
|
|
17
|
-
);
|
|
18
|
-
const headResponse = await sendFetch(url, {
|
|
19
|
-
method: "HEAD",
|
|
20
|
-
headers: options.headers
|
|
21
|
-
}).catch(() => void 0);
|
|
22
|
-
const etag = headResponse?.headers.get("etag");
|
|
23
|
-
if (info.etag === etag && existsSync(filePath)) {
|
|
24
|
-
return;
|
|
25
|
-
}
|
|
26
|
-
if (typeof etag === "string") {
|
|
27
|
-
info.etag = etag;
|
|
28
|
-
}
|
|
29
|
-
const response = await sendFetch(url, { headers: options.headers });
|
|
30
|
-
if (response.status >= 400) {
|
|
31
|
-
throw new Error(
|
|
32
|
-
`Failed to download ${url}: ${response.status} ${response.statusText}`
|
|
33
|
-
);
|
|
34
|
-
}
|
|
35
|
-
const stream = createWriteStream(filePath);
|
|
36
|
-
await promisify(pipeline)(response.body, stream);
|
|
37
|
-
await writeFile(infoPath, JSON.stringify(info), "utf8");
|
|
38
|
-
}
|
|
39
|
-
const inputRegex = /^(?<repo>[\w.-]+\/[\w.-]+)(?<subdir>[^#]+)?(?<ref>#[\w./@-]+)?/;
|
|
40
|
-
function parseGitURI(input) {
|
|
41
|
-
const m = input.match(inputRegex)?.groups || {};
|
|
42
|
-
return {
|
|
43
|
-
repo: m.repo,
|
|
44
|
-
subdir: m.subdir || "/",
|
|
45
|
-
ref: m.ref ? m.ref.slice(1) : "main"
|
|
46
|
-
};
|
|
47
|
-
}
|
|
48
|
-
function debug(...args) {
|
|
49
|
-
if (process.env.DEBUG) {
|
|
50
|
-
console.debug("[giget]", ...args);
|
|
51
|
-
}
|
|
52
|
-
}
|
|
53
|
-
async function sendFetch(url, options = {}) {
|
|
54
|
-
if (options.headers?.["sec-fetch-mode"]) {
|
|
55
|
-
options.mode = options.headers["sec-fetch-mode"];
|
|
56
|
-
}
|
|
57
|
-
const res = await fetch(url, {
|
|
58
|
-
...options,
|
|
59
|
-
headers: normalizeHeaders(options.headers)
|
|
60
|
-
}).catch((error) => {
|
|
61
|
-
throw new Error(`Failed to download ${url}: ${error}`, { cause: error });
|
|
62
|
-
});
|
|
63
|
-
if (options.validateStatus && res.status >= 400) {
|
|
64
|
-
throw new Error(`Failed to fetch ${url}: ${res.status} ${res.statusText}`);
|
|
65
|
-
}
|
|
66
|
-
return res;
|
|
67
|
-
}
|
|
68
|
-
function cacheDirectory() {
|
|
69
|
-
return process.env.XDG_CACHE_HOME ? resolve(process.env.XDG_CACHE_HOME, "giget") : resolve(homedir(), ".cache/giget");
|
|
70
|
-
}
|
|
71
|
-
function normalizeHeaders(headers = {}) {
|
|
72
|
-
const normalized = {};
|
|
73
|
-
for (const [key, value] of Object.entries(headers)) {
|
|
74
|
-
if (!value) {
|
|
75
|
-
continue;
|
|
76
|
-
}
|
|
77
|
-
normalized[key.toLowerCase()] = value;
|
|
78
|
-
}
|
|
79
|
-
return normalized;
|
|
80
|
-
}
|
|
81
|
-
function currentShell() {
|
|
82
|
-
if (process.env.SHELL) {
|
|
83
|
-
return process.env.SHELL;
|
|
84
|
-
}
|
|
85
|
-
if (process.platform === "win32") {
|
|
86
|
-
return "cmd.exe";
|
|
87
|
-
}
|
|
88
|
-
return "/bin/bash";
|
|
89
|
-
}
|
|
90
|
-
function startShell(cwd) {
|
|
91
|
-
cwd = resolve(cwd);
|
|
92
|
-
const shell = currentShell();
|
|
93
|
-
console.info(
|
|
94
|
-
`(experimental) Opening shell in ${relative(process.cwd(), cwd)}...`
|
|
95
|
-
);
|
|
96
|
-
spawnSync(shell, [], {
|
|
97
|
-
cwd,
|
|
98
|
-
shell: true,
|
|
99
|
-
stdio: "inherit"
|
|
100
|
-
});
|
|
101
|
-
}
|
|
102
|
-
|
|
103
|
-
const http = async (input, options) => {
|
|
104
|
-
if (input.endsWith(".json")) {
|
|
105
|
-
return await _httpJSON(input, options);
|
|
106
|
-
}
|
|
107
|
-
const url = new URL(input);
|
|
108
|
-
let name = basename(url.pathname);
|
|
109
|
-
try {
|
|
110
|
-
const head = await sendFetch(url.href, {
|
|
111
|
-
method: "HEAD",
|
|
112
|
-
validateStatus: true,
|
|
113
|
-
headers: {
|
|
114
|
-
authorization: options.auth ? `Bearer ${options.auth}` : void 0
|
|
115
|
-
}
|
|
116
|
-
});
|
|
117
|
-
const _contentType = head.headers.get("content-type") || "";
|
|
118
|
-
if (_contentType.includes("application/json")) {
|
|
119
|
-
return await _httpJSON(input, options);
|
|
120
|
-
}
|
|
121
|
-
const filename = head.headers.get("content-disposition")?.match(/filename="?(.+)"?/)?.[1];
|
|
122
|
-
if (filename) {
|
|
123
|
-
name = filename.split(".")[0];
|
|
124
|
-
}
|
|
125
|
-
} catch (error) {
|
|
126
|
-
debug(`Failed to fetch HEAD for ${url.href}:`, error);
|
|
127
|
-
}
|
|
128
|
-
return {
|
|
129
|
-
name: `${name}-${url.href.slice(0, 8)}`,
|
|
130
|
-
version: "",
|
|
131
|
-
subdir: "",
|
|
132
|
-
tar: url.href,
|
|
133
|
-
defaultDir: name,
|
|
134
|
-
headers: {
|
|
135
|
-
Authorization: options.auth ? `Bearer ${options.auth}` : void 0
|
|
136
|
-
}
|
|
137
|
-
};
|
|
138
|
-
};
|
|
139
|
-
const _httpJSON = async (input, options) => {
|
|
140
|
-
const result = await sendFetch(input, {
|
|
141
|
-
validateStatus: true,
|
|
142
|
-
headers: {
|
|
143
|
-
authorization: options.auth ? `Bearer ${options.auth}` : void 0
|
|
144
|
-
}
|
|
145
|
-
});
|
|
146
|
-
const info = await result.json();
|
|
147
|
-
if (!info.tar || !info.name) {
|
|
148
|
-
throw new Error(
|
|
149
|
-
`Invalid template info from ${input}. name or tar fields are missing!`
|
|
150
|
-
);
|
|
151
|
-
}
|
|
152
|
-
return info;
|
|
153
|
-
};
|
|
154
|
-
const github = (input, options) => {
|
|
155
|
-
const parsed = parseGitURI(input);
|
|
156
|
-
const githubAPIURL = process.env.GIGET_GITHUB_URL || "https://api.github.com";
|
|
157
|
-
return {
|
|
158
|
-
name: parsed.repo.replace("/", "-"),
|
|
159
|
-
version: parsed.ref,
|
|
160
|
-
subdir: parsed.subdir,
|
|
161
|
-
headers: {
|
|
162
|
-
Authorization: options.auth ? `Bearer ${options.auth}` : void 0,
|
|
163
|
-
Accept: "application/vnd.github+json",
|
|
164
|
-
"X-GitHub-Api-Version": "2022-11-28"
|
|
165
|
-
},
|
|
166
|
-
url: `${githubAPIURL.replace("api.github.com", "github.com")}/${parsed.repo}/tree/${parsed.ref}${parsed.subdir}`,
|
|
167
|
-
tar: `${githubAPIURL}/repos/${parsed.repo}/tarball/${parsed.ref}`
|
|
168
|
-
};
|
|
169
|
-
};
|
|
170
|
-
const gitlab = (input, options) => {
|
|
171
|
-
const parsed = parseGitURI(input);
|
|
172
|
-
const gitlab2 = process.env.GIGET_GITLAB_URL || "https://gitlab.com";
|
|
173
|
-
return {
|
|
174
|
-
name: parsed.repo.replace("/", "-"),
|
|
175
|
-
version: parsed.ref,
|
|
176
|
-
subdir: parsed.subdir,
|
|
177
|
-
headers: {
|
|
178
|
-
authorization: options.auth ? `Bearer ${options.auth}` : void 0,
|
|
179
|
-
// https://gitlab.com/gitlab-org/gitlab/-/commit/50c11f278d18fe1f3fb12eb595067216bb58ade2
|
|
180
|
-
"sec-fetch-mode": "same-origin"
|
|
181
|
-
},
|
|
182
|
-
url: `${gitlab2}/${parsed.repo}/tree/${parsed.ref}${parsed.subdir}`,
|
|
183
|
-
tar: `${gitlab2}/${parsed.repo}/-/archive/${parsed.ref}.tar.gz`
|
|
184
|
-
};
|
|
185
|
-
};
|
|
186
|
-
const bitbucket = (input, options) => {
|
|
187
|
-
const parsed = parseGitURI(input);
|
|
188
|
-
return {
|
|
189
|
-
name: parsed.repo.replace("/", "-"),
|
|
190
|
-
version: parsed.ref,
|
|
191
|
-
subdir: parsed.subdir,
|
|
192
|
-
headers: {
|
|
193
|
-
authorization: options.auth ? `Bearer ${options.auth}` : void 0
|
|
194
|
-
},
|
|
195
|
-
url: `https://bitbucket.com/${parsed.repo}/src/${parsed.ref}${parsed.subdir}`,
|
|
196
|
-
tar: `https://bitbucket.org/${parsed.repo}/get/${parsed.ref}.tar.gz`
|
|
197
|
-
};
|
|
198
|
-
};
|
|
199
|
-
const sourcehut = (input, options) => {
|
|
200
|
-
const parsed = parseGitURI(input);
|
|
201
|
-
return {
|
|
202
|
-
name: parsed.repo.replace("/", "-"),
|
|
203
|
-
version: parsed.ref,
|
|
204
|
-
subdir: parsed.subdir,
|
|
205
|
-
headers: {
|
|
206
|
-
authorization: options.auth ? `Bearer ${options.auth}` : void 0
|
|
207
|
-
},
|
|
208
|
-
url: `https://git.sr.ht/~${parsed.repo}/tree/${parsed.ref}/item${parsed.subdir}`,
|
|
209
|
-
tar: `https://git.sr.ht/~${parsed.repo}/archive/${parsed.ref}.tar.gz`
|
|
210
|
-
};
|
|
211
|
-
};
|
|
212
|
-
const providers = {
|
|
213
|
-
http,
|
|
214
|
-
https: http,
|
|
215
|
-
github,
|
|
216
|
-
gh: github,
|
|
217
|
-
gitlab,
|
|
218
|
-
bitbucket,
|
|
219
|
-
sourcehut
|
|
220
|
-
};
|
|
221
|
-
|
|
222
|
-
const DEFAULT_REGISTRY = "https://raw.githubusercontent.com/unjs/giget/main/templates";
|
|
223
|
-
const registryProvider = (registryEndpoint = DEFAULT_REGISTRY, options = {}) => {
|
|
224
|
-
return async (input) => {
|
|
225
|
-
const start = Date.now();
|
|
226
|
-
const registryURL = `${registryEndpoint}/${input}.json`;
|
|
227
|
-
const result = await sendFetch(registryURL, {
|
|
228
|
-
headers: {
|
|
229
|
-
authorization: options.auth ? `Bearer ${options.auth}` : void 0
|
|
230
|
-
}
|
|
231
|
-
});
|
|
232
|
-
if (result.status >= 400) {
|
|
233
|
-
throw new Error(
|
|
234
|
-
`Failed to download ${input} template info from ${registryURL}: ${result.status} ${result.statusText}`
|
|
235
|
-
);
|
|
236
|
-
}
|
|
237
|
-
const info = await result.json();
|
|
238
|
-
if (!info.tar || !info.name) {
|
|
239
|
-
throw new Error(
|
|
240
|
-
`Invalid template info from ${registryURL}. name or tar fields are missing!`
|
|
241
|
-
);
|
|
242
|
-
}
|
|
243
|
-
debug(
|
|
244
|
-
`Fetched ${input} template info from ${registryURL} in ${Date.now() - start}ms`
|
|
245
|
-
);
|
|
246
|
-
return info;
|
|
247
|
-
};
|
|
248
|
-
};
|
|
249
|
-
|
|
250
|
-
const sourceProtoRe = /^([\w-.]+):/;
|
|
251
|
-
async function downloadTemplate(input, options = {}) {
|
|
252
|
-
options = defu(
|
|
253
|
-
{
|
|
254
|
-
registry: process.env.GIGET_REGISTRY,
|
|
255
|
-
auth: process.env.GIGET_AUTH
|
|
256
|
-
},
|
|
257
|
-
options
|
|
258
|
-
);
|
|
259
|
-
const registry = options.registry === false ? void 0 : registryProvider(options.registry, { auth: options.auth });
|
|
260
|
-
let providerName = options.provider || (registry ? "registry" : "github");
|
|
261
|
-
let source = input;
|
|
262
|
-
const sourceProvierMatch = input.match(sourceProtoRe);
|
|
263
|
-
if (sourceProvierMatch) {
|
|
264
|
-
providerName = sourceProvierMatch[1];
|
|
265
|
-
source = input.slice(sourceProvierMatch[0].length);
|
|
266
|
-
if (providerName === "http" || providerName === "https") {
|
|
267
|
-
source = input;
|
|
268
|
-
}
|
|
269
|
-
}
|
|
270
|
-
const provider = options.providers?.[providerName] || providers[providerName] || registry;
|
|
271
|
-
if (!provider) {
|
|
272
|
-
throw new Error(`Unsupported provider: ${providerName}`);
|
|
273
|
-
}
|
|
274
|
-
const template = await Promise.resolve().then(() => provider(source, { auth: options.auth })).catch((error) => {
|
|
275
|
-
throw new Error(
|
|
276
|
-
`Failed to download template from ${providerName}: ${error.message}`
|
|
277
|
-
);
|
|
278
|
-
});
|
|
279
|
-
if (!template) {
|
|
280
|
-
throw new Error(`Failed to resolve template from ${providerName}`);
|
|
281
|
-
}
|
|
282
|
-
template.name = (template.name || "template").replace(/[^\da-z-]/gi, "-");
|
|
283
|
-
template.defaultDir = (template.defaultDir || template.name).replace(
|
|
284
|
-
/[^\da-z-]/gi,
|
|
285
|
-
"-"
|
|
286
|
-
);
|
|
287
|
-
const temporaryDirectory = resolve(
|
|
288
|
-
cacheDirectory(),
|
|
289
|
-
providerName,
|
|
290
|
-
template.name
|
|
291
|
-
);
|
|
292
|
-
const tarPath = resolve(
|
|
293
|
-
temporaryDirectory,
|
|
294
|
-
(template.version || template.name) + ".tar.gz"
|
|
295
|
-
);
|
|
296
|
-
if (options.preferOffline && existsSync(tarPath)) {
|
|
297
|
-
options.offline = true;
|
|
298
|
-
}
|
|
299
|
-
if (!options.offline) {
|
|
300
|
-
await mkdir(dirname(tarPath), { recursive: true });
|
|
301
|
-
const s2 = Date.now();
|
|
302
|
-
await download(template.tar, tarPath, {
|
|
303
|
-
headers: {
|
|
304
|
-
Authorization: options.auth ? `Bearer ${options.auth}` : void 0,
|
|
305
|
-
...normalizeHeaders(template.headers)
|
|
306
|
-
}
|
|
307
|
-
}).catch((error) => {
|
|
308
|
-
if (!existsSync(tarPath)) {
|
|
309
|
-
throw error;
|
|
310
|
-
}
|
|
311
|
-
debug("Download error. Using cached version:", error);
|
|
312
|
-
options.offline = true;
|
|
313
|
-
});
|
|
314
|
-
debug(`Downloaded ${template.tar} to ${tarPath} in ${Date.now() - s2}ms`);
|
|
315
|
-
}
|
|
316
|
-
if (!existsSync(tarPath)) {
|
|
317
|
-
throw new Error(
|
|
318
|
-
`Tarball not found: ${tarPath} (offline: ${options.offline})`
|
|
319
|
-
);
|
|
320
|
-
}
|
|
321
|
-
const cwd = resolve(options.cwd || ".");
|
|
322
|
-
const extractPath = resolve(cwd, options.dir || template.defaultDir);
|
|
323
|
-
if (options.forceClean) {
|
|
324
|
-
await rm(extractPath, { recursive: true, force: true });
|
|
325
|
-
}
|
|
326
|
-
if (!options.force && existsSync(extractPath) && readdirSync(extractPath).length > 0) {
|
|
327
|
-
throw new Error(`Destination ${extractPath} already exists.`);
|
|
328
|
-
}
|
|
329
|
-
await mkdir(extractPath, { recursive: true });
|
|
330
|
-
const s = Date.now();
|
|
331
|
-
const subdir = template.subdir?.replace(/^\//, "") || "";
|
|
332
|
-
await extract({
|
|
333
|
-
file: tarPath,
|
|
334
|
-
cwd: extractPath,
|
|
335
|
-
onentry(entry) {
|
|
336
|
-
entry.path = entry.path.split("/").splice(1).join("/");
|
|
337
|
-
if (subdir) {
|
|
338
|
-
if (entry.path.startsWith(subdir + "/")) {
|
|
339
|
-
entry.path = entry.path.slice(subdir.length);
|
|
340
|
-
} else {
|
|
341
|
-
entry.path = "";
|
|
342
|
-
}
|
|
343
|
-
}
|
|
344
|
-
}
|
|
345
|
-
});
|
|
346
|
-
debug(`Extracted to ${extractPath} in ${Date.now() - s}ms`);
|
|
347
|
-
if (options.install) {
|
|
348
|
-
debug("Installing dependencies...");
|
|
349
|
-
await installDependencies({
|
|
350
|
-
cwd: extractPath,
|
|
351
|
-
silent: options.silent
|
|
352
|
-
});
|
|
353
|
-
}
|
|
354
|
-
return {
|
|
355
|
-
...template,
|
|
356
|
-
source,
|
|
357
|
-
dir: extractPath
|
|
358
|
-
};
|
|
359
|
-
}
|
|
360
|
-
|
|
361
|
-
export { downloadTemplate, registryProvider, startShell };
|
|
1
|
+
export { d as downloadTemplate, r as registryProvider, s as startShell } from './shared/giget.BgKdRmJH.mjs';
|
|
2
|
+
import 'node:fs/promises';
|
|
3
|
+
import 'node:fs';
|
|
4
|
+
import 'tar';
|
|
5
|
+
import 'pathe';
|
|
6
|
+
import 'defu';
|
|
7
|
+
import 'nypm';
|
|
8
|
+
import 'node:stream';
|
|
9
|
+
import 'node:child_process';
|
|
10
|
+
import 'node:os';
|
|
11
|
+
import 'node:util';
|
|
12
|
+
import 'node-fetch-native/proxy';
|