giget 1.1.2 → 1.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,287 +0,0 @@
1
- import { readFile, writeFile, rm, mkdir } from 'node:fs/promises';
2
- import { existsSync, createWriteStream, readdirSync } from 'node:fs';
3
- import { extract } from 'tar';
4
- import { resolve, relative, dirname } from 'pathe';
5
- import { defu } from 'defu';
6
- import { pipeline } from 'node:stream';
7
- import { spawnSync } from 'node:child_process';
8
- import { homedir } from 'node:os';
9
- import { promisify } from 'node:util';
10
- import { fetch } from 'node-fetch-native';
11
- import createHttpsProxyAgent from 'https-proxy-agent';
12
-
13
- async function download(url, filePath, options = {}) {
14
- const infoPath = filePath + ".json";
15
- const info = JSON.parse(
16
- await readFile(infoPath, "utf8").catch(() => "{}")
17
- );
18
- const headResponse = await sendFetch(url, {
19
- method: "HEAD",
20
- headers: options.headers
21
- }).catch(() => void 0);
22
- const etag = headResponse?.headers.get("etag");
23
- if (info.etag === etag && existsSync(filePath)) {
24
- return;
25
- }
26
- info.etag = etag;
27
- const response = await sendFetch(url, { headers: options.headers });
28
- if (response.status >= 400) {
29
- throw new Error(
30
- `Failed to download ${url}: ${response.status} ${response.statusText}`
31
- );
32
- }
33
- const stream = createWriteStream(filePath);
34
- await promisify(pipeline)(response.body, stream);
35
- await writeFile(infoPath, JSON.stringify(info), "utf8");
36
- }
37
- const inputRegex = /^(?<repo>[\w.-]+\/[\w.-]+)(?<subdir>[^#]+)?(?<ref>#[\w.-]+)?/;
38
- function parseGitURI(input) {
39
- const m = input.match(inputRegex)?.groups;
40
- return {
41
- repo: m.repo,
42
- subdir: m.subdir || "/",
43
- ref: m.ref ? m.ref.slice(1) : "main"
44
- };
45
- }
46
- function debug(...arguments_) {
47
- if (process.env.DEBUG) {
48
- console.debug("[giget]", ...arguments_);
49
- }
50
- }
51
- async function sendFetch(url, options = {}) {
52
- if (!options.agent) {
53
- const proxyEnv = process.env.HTTPS_PROXY || process.env.https_proxy || process.env.HTTP_PROXY || process.env.http_proxy;
54
- if (proxyEnv) {
55
- options.agent = createHttpsProxyAgent(proxyEnv);
56
- }
57
- }
58
- if (options?.headers) {
59
- options.headers = normalizeHeaders(options.headers);
60
- }
61
- return await fetch(url, options);
62
- }
63
- function cacheDirectory() {
64
- return process.env.XDG_CACHE_HOME ? resolve(process.env.XDG_CACHE_HOME, "giget") : resolve(homedir(), ".cache/giget");
65
- }
66
- function normalizeHeaders(headers = {}) {
67
- const normalized = {};
68
- for (const [key, value] of Object.entries(headers)) {
69
- if (!value) {
70
- continue;
71
- }
72
- normalized[key.toLowerCase()] = value;
73
- }
74
- return normalized;
75
- }
76
- function currentShell() {
77
- if (process.env.SHELL) {
78
- return process.env.SHELL;
79
- }
80
- if (process.platform === "win32") {
81
- return "cmd.exe";
82
- }
83
- return "/bin/bash";
84
- }
85
- function startShell(cwd) {
86
- cwd = resolve(cwd);
87
- const shell = currentShell();
88
- console.info(
89
- `(experimental) Opening shell in ${relative(process.cwd(), cwd)}...`
90
- );
91
- spawnSync(shell, [], {
92
- cwd,
93
- shell: true,
94
- stdio: "inherit"
95
- });
96
- }
97
-
98
- const github = (input, options) => {
99
- const parsed = parseGitURI(input);
100
- const github2 = process.env.GIGET_GITHUB_URL || "https://github.com";
101
- return {
102
- name: parsed.repo.replace("/", "-"),
103
- version: parsed.ref,
104
- subdir: parsed.subdir,
105
- headers: {
106
- authorization: options.auth ? `Bearer ${options.auth}` : void 0
107
- },
108
- url: `${github2}/${parsed.repo}/tree/${parsed.ref}${parsed.subdir}`,
109
- tar: `${github2}/${parsed.repo}/archive/${parsed.ref}.tar.gz`
110
- };
111
- };
112
- const gitlab = (input, options) => {
113
- const parsed = parseGitURI(input);
114
- const gitlab2 = process.env.GIGET_GITLAB_URL || "https://gitlab.com";
115
- return {
116
- name: parsed.repo.replace("/", "-"),
117
- version: parsed.ref,
118
- subdir: parsed.subdir,
119
- headers: {
120
- authorization: options.auth ? `Bearer ${options.auth}` : void 0
121
- },
122
- url: `${gitlab2}/${parsed.repo}/tree/${parsed.ref}${parsed.subdir}`,
123
- tar: `${gitlab2}/${parsed.repo}/-/archive/${parsed.ref}.tar.gz`
124
- };
125
- };
126
- const bitbucket = (input, options) => {
127
- const parsed = parseGitURI(input);
128
- return {
129
- name: parsed.repo.replace("/", "-"),
130
- version: parsed.ref,
131
- subdir: parsed.subdir,
132
- headers: {
133
- authorization: options.auth ? `Bearer ${options.auth}` : void 0
134
- },
135
- url: `https://bitbucket.com/${parsed.repo}/src/${parsed.ref}${parsed.subdir}`,
136
- tar: `https://bitbucket.org/${parsed.repo}/get/${parsed.ref}.tar.gz`
137
- };
138
- };
139
- const sourcehut = (input, options) => {
140
- const parsed = parseGitURI(input);
141
- return {
142
- name: parsed.repo.replace("/", "-"),
143
- version: parsed.ref,
144
- subdir: parsed.subdir,
145
- headers: {
146
- authorization: options.auth ? `Bearer ${options.auth}` : void 0
147
- },
148
- url: `https://git.sr.ht/~${parsed.repo}/tree/${parsed.ref}/item${parsed.subdir}`,
149
- tar: `https://git.sr.ht/~${parsed.repo}/archive/${parsed.ref}.tar.gz`
150
- };
151
- };
152
- const providers = {
153
- github,
154
- gh: github,
155
- gitlab,
156
- bitbucket,
157
- sourcehut
158
- };
159
-
160
- const DEFAULT_REGISTRY = "https://raw.githubusercontent.com/unjs/giget/main/templates";
161
- const registryProvider = (registryEndpoint = DEFAULT_REGISTRY, options) => {
162
- options = options || {};
163
- return async (input) => {
164
- const start = Date.now();
165
- const registryURL = `${registryEndpoint}/${input}.json`;
166
- const result = await sendFetch(registryURL, {
167
- headers: {
168
- authorization: options.auth ? `Bearer ${options.auth}` : void 0
169
- }
170
- });
171
- if (result.status >= 400) {
172
- throw new Error(
173
- `Failed to download ${input} template info from ${registryURL}: ${result.status} ${result.statusText}`
174
- );
175
- }
176
- const info = await result.json();
177
- if (!info.tar || !info.name) {
178
- throw new Error(
179
- `Invalid template info from ${registryURL}. name or tar fields are missing!`
180
- );
181
- }
182
- debug(
183
- `Fetched ${input} template info from ${registryURL} in ${Date.now() - start}ms`
184
- );
185
- return info;
186
- };
187
- };
188
-
189
- const sourceProtoRe = /^([\w-.]+):/;
190
- async function downloadTemplate(input, options = {}) {
191
- options = defu(
192
- {
193
- registry: process.env.GIGET_REGISTRY,
194
- auth: process.env.GIGET_AUTH
195
- },
196
- options
197
- );
198
- const registry = options.registry !== false ? registryProvider(options.registry, { auth: options.auth }) : void 0;
199
- let providerName = options.provider || (registryProvider ? "registry" : "github");
200
- let source = input;
201
- const sourceProvierMatch = input.match(sourceProtoRe);
202
- if (sourceProvierMatch) {
203
- providerName = sourceProvierMatch[1];
204
- source = input.slice(sourceProvierMatch[0].length);
205
- }
206
- const provider = options.providers?.[providerName] || providers[providerName] || registry;
207
- if (!provider) {
208
- throw new Error(`Unsupported provider: ${providerName}`);
209
- }
210
- const template = await Promise.resolve().then(() => provider(source, { auth: options.auth })).catch((error) => {
211
- throw new Error(
212
- `Failed to download template from ${providerName}: ${error.message}`
213
- );
214
- });
215
- template.name = (template.name || "template").replace(/[^\da-z-]/gi, "-");
216
- template.defaultDir = (template.defaultDir || template.name).replace(
217
- /[^\da-z-]/gi,
218
- "-"
219
- );
220
- const cwd = resolve(options.cwd || ".");
221
- const extractPath = resolve(cwd, options.dir || template.defaultDir);
222
- if (options.forceClean) {
223
- await rm(extractPath, { recursive: true, force: true });
224
- }
225
- if (!options.force && existsSync(extractPath) && readdirSync(extractPath).length > 0) {
226
- throw new Error(`Destination ${extractPath} already exists.`);
227
- }
228
- await mkdir(extractPath, { recursive: true });
229
- const temporaryDirectory = resolve(
230
- cacheDirectory(),
231
- options.provider,
232
- template.name
233
- );
234
- const tarPath = resolve(
235
- temporaryDirectory,
236
- (template.version || template.name) + ".tar.gz"
237
- );
238
- if (options.preferOffline && existsSync(tarPath)) {
239
- options.offline = true;
240
- }
241
- if (!options.offline) {
242
- await mkdir(dirname(tarPath), { recursive: true });
243
- const s2 = Date.now();
244
- await download(template.tar, tarPath, {
245
- headers: {
246
- authorization: options.auth ? `Bearer ${options.auth}` : void 0,
247
- ...normalizeHeaders(template.headers)
248
- }
249
- }).catch((error) => {
250
- if (!existsSync(tarPath)) {
251
- throw error;
252
- }
253
- debug("Download error. Using cached version:", error);
254
- options.offline = true;
255
- });
256
- debug(`Downloaded ${template.tar} to ${tarPath} in ${Date.now() - s2}ms`);
257
- }
258
- if (!existsSync(tarPath)) {
259
- throw new Error(
260
- `Tarball not found: ${tarPath} (offline: ${options.offline})`
261
- );
262
- }
263
- const s = Date.now();
264
- const subdir = template.subdir?.replace(/^\//, "") || "";
265
- await extract({
266
- file: tarPath,
267
- cwd: extractPath,
268
- onentry(entry) {
269
- entry.path = entry.path.split("/").splice(1).join("/");
270
- if (subdir) {
271
- if (entry.path.startsWith(subdir + "/")) {
272
- entry.path = entry.path.slice(subdir.length);
273
- } else {
274
- entry.path = "";
275
- }
276
- }
277
- }
278
- });
279
- debug(`Extracted to ${extractPath} in ${Date.now() - s}ms`);
280
- return {
281
- ...template,
282
- source,
283
- dir: extractPath
284
- };
285
- }
286
-
287
- export { downloadTemplate as d, registryProvider as r, startShell as s };
@@ -1,291 +0,0 @@
1
- 'use strict';
2
-
3
- const promises = require('node:fs/promises');
4
- const node_fs = require('node:fs');
5
- const tar = require('tar');
6
- const pathe = require('pathe');
7
- const defu = require('defu');
8
- const node_stream = require('node:stream');
9
- const node_child_process = require('node:child_process');
10
- const node_os = require('node:os');
11
- const node_util = require('node:util');
12
- const nodeFetchNative = require('node-fetch-native');
13
- const createHttpsProxyAgent = require('https-proxy-agent');
14
-
15
- async function download(url, filePath, options = {}) {
16
- const infoPath = filePath + ".json";
17
- const info = JSON.parse(
18
- await promises.readFile(infoPath, "utf8").catch(() => "{}")
19
- );
20
- const headResponse = await sendFetch(url, {
21
- method: "HEAD",
22
- headers: options.headers
23
- }).catch(() => void 0);
24
- const etag = headResponse?.headers.get("etag");
25
- if (info.etag === etag && node_fs.existsSync(filePath)) {
26
- return;
27
- }
28
- info.etag = etag;
29
- const response = await sendFetch(url, { headers: options.headers });
30
- if (response.status >= 400) {
31
- throw new Error(
32
- `Failed to download ${url}: ${response.status} ${response.statusText}`
33
- );
34
- }
35
- const stream = node_fs.createWriteStream(filePath);
36
- await node_util.promisify(node_stream.pipeline)(response.body, stream);
37
- await promises.writeFile(infoPath, JSON.stringify(info), "utf8");
38
- }
39
- const inputRegex = /^(?<repo>[\w.-]+\/[\w.-]+)(?<subdir>[^#]+)?(?<ref>#[\w.-]+)?/;
40
- function parseGitURI(input) {
41
- const m = input.match(inputRegex)?.groups;
42
- return {
43
- repo: m.repo,
44
- subdir: m.subdir || "/",
45
- ref: m.ref ? m.ref.slice(1) : "main"
46
- };
47
- }
48
- function debug(...arguments_) {
49
- if (process.env.DEBUG) {
50
- console.debug("[giget]", ...arguments_);
51
- }
52
- }
53
- async function sendFetch(url, options = {}) {
54
- if (!options.agent) {
55
- const proxyEnv = process.env.HTTPS_PROXY || process.env.https_proxy || process.env.HTTP_PROXY || process.env.http_proxy;
56
- if (proxyEnv) {
57
- options.agent = createHttpsProxyAgent(proxyEnv);
58
- }
59
- }
60
- if (options?.headers) {
61
- options.headers = normalizeHeaders(options.headers);
62
- }
63
- return await nodeFetchNative.fetch(url, options);
64
- }
65
- function cacheDirectory() {
66
- return process.env.XDG_CACHE_HOME ? pathe.resolve(process.env.XDG_CACHE_HOME, "giget") : pathe.resolve(node_os.homedir(), ".cache/giget");
67
- }
68
- function normalizeHeaders(headers = {}) {
69
- const normalized = {};
70
- for (const [key, value] of Object.entries(headers)) {
71
- if (!value) {
72
- continue;
73
- }
74
- normalized[key.toLowerCase()] = value;
75
- }
76
- return normalized;
77
- }
78
- function currentShell() {
79
- if (process.env.SHELL) {
80
- return process.env.SHELL;
81
- }
82
- if (process.platform === "win32") {
83
- return "cmd.exe";
84
- }
85
- return "/bin/bash";
86
- }
87
- function startShell(cwd) {
88
- cwd = pathe.resolve(cwd);
89
- const shell = currentShell();
90
- console.info(
91
- `(experimental) Opening shell in ${pathe.relative(process.cwd(), cwd)}...`
92
- );
93
- node_child_process.spawnSync(shell, [], {
94
- cwd,
95
- shell: true,
96
- stdio: "inherit"
97
- });
98
- }
99
-
100
- const github = (input, options) => {
101
- const parsed = parseGitURI(input);
102
- const github2 = process.env.GIGET_GITHUB_URL || "https://github.com";
103
- return {
104
- name: parsed.repo.replace("/", "-"),
105
- version: parsed.ref,
106
- subdir: parsed.subdir,
107
- headers: {
108
- authorization: options.auth ? `Bearer ${options.auth}` : void 0
109
- },
110
- url: `${github2}/${parsed.repo}/tree/${parsed.ref}${parsed.subdir}`,
111
- tar: `${github2}/${parsed.repo}/archive/${parsed.ref}.tar.gz`
112
- };
113
- };
114
- const gitlab = (input, options) => {
115
- const parsed = parseGitURI(input);
116
- const gitlab2 = process.env.GIGET_GITLAB_URL || "https://gitlab.com";
117
- return {
118
- name: parsed.repo.replace("/", "-"),
119
- version: parsed.ref,
120
- subdir: parsed.subdir,
121
- headers: {
122
- authorization: options.auth ? `Bearer ${options.auth}` : void 0
123
- },
124
- url: `${gitlab2}/${parsed.repo}/tree/${parsed.ref}${parsed.subdir}`,
125
- tar: `${gitlab2}/${parsed.repo}/-/archive/${parsed.ref}.tar.gz`
126
- };
127
- };
128
- const bitbucket = (input, options) => {
129
- const parsed = parseGitURI(input);
130
- return {
131
- name: parsed.repo.replace("/", "-"),
132
- version: parsed.ref,
133
- subdir: parsed.subdir,
134
- headers: {
135
- authorization: options.auth ? `Bearer ${options.auth}` : void 0
136
- },
137
- url: `https://bitbucket.com/${parsed.repo}/src/${parsed.ref}${parsed.subdir}`,
138
- tar: `https://bitbucket.org/${parsed.repo}/get/${parsed.ref}.tar.gz`
139
- };
140
- };
141
- const sourcehut = (input, options) => {
142
- const parsed = parseGitURI(input);
143
- return {
144
- name: parsed.repo.replace("/", "-"),
145
- version: parsed.ref,
146
- subdir: parsed.subdir,
147
- headers: {
148
- authorization: options.auth ? `Bearer ${options.auth}` : void 0
149
- },
150
- url: `https://git.sr.ht/~${parsed.repo}/tree/${parsed.ref}/item${parsed.subdir}`,
151
- tar: `https://git.sr.ht/~${parsed.repo}/archive/${parsed.ref}.tar.gz`
152
- };
153
- };
154
- const providers = {
155
- github,
156
- gh: github,
157
- gitlab,
158
- bitbucket,
159
- sourcehut
160
- };
161
-
162
- const DEFAULT_REGISTRY = "https://raw.githubusercontent.com/unjs/giget/main/templates";
163
- const registryProvider = (registryEndpoint = DEFAULT_REGISTRY, options) => {
164
- options = options || {};
165
- return async (input) => {
166
- const start = Date.now();
167
- const registryURL = `${registryEndpoint}/${input}.json`;
168
- const result = await sendFetch(registryURL, {
169
- headers: {
170
- authorization: options.auth ? `Bearer ${options.auth}` : void 0
171
- }
172
- });
173
- if (result.status >= 400) {
174
- throw new Error(
175
- `Failed to download ${input} template info from ${registryURL}: ${result.status} ${result.statusText}`
176
- );
177
- }
178
- const info = await result.json();
179
- if (!info.tar || !info.name) {
180
- throw new Error(
181
- `Invalid template info from ${registryURL}. name or tar fields are missing!`
182
- );
183
- }
184
- debug(
185
- `Fetched ${input} template info from ${registryURL} in ${Date.now() - start}ms`
186
- );
187
- return info;
188
- };
189
- };
190
-
191
- const sourceProtoRe = /^([\w-.]+):/;
192
- async function downloadTemplate(input, options = {}) {
193
- options = defu.defu(
194
- {
195
- registry: process.env.GIGET_REGISTRY,
196
- auth: process.env.GIGET_AUTH
197
- },
198
- options
199
- );
200
- const registry = options.registry !== false ? registryProvider(options.registry, { auth: options.auth }) : void 0;
201
- let providerName = options.provider || (registryProvider ? "registry" : "github");
202
- let source = input;
203
- const sourceProvierMatch = input.match(sourceProtoRe);
204
- if (sourceProvierMatch) {
205
- providerName = sourceProvierMatch[1];
206
- source = input.slice(sourceProvierMatch[0].length);
207
- }
208
- const provider = options.providers?.[providerName] || providers[providerName] || registry;
209
- if (!provider) {
210
- throw new Error(`Unsupported provider: ${providerName}`);
211
- }
212
- const template = await Promise.resolve().then(() => provider(source, { auth: options.auth })).catch((error) => {
213
- throw new Error(
214
- `Failed to download template from ${providerName}: ${error.message}`
215
- );
216
- });
217
- template.name = (template.name || "template").replace(/[^\da-z-]/gi, "-");
218
- template.defaultDir = (template.defaultDir || template.name).replace(
219
- /[^\da-z-]/gi,
220
- "-"
221
- );
222
- const cwd = pathe.resolve(options.cwd || ".");
223
- const extractPath = pathe.resolve(cwd, options.dir || template.defaultDir);
224
- if (options.forceClean) {
225
- await promises.rm(extractPath, { recursive: true, force: true });
226
- }
227
- if (!options.force && node_fs.existsSync(extractPath) && node_fs.readdirSync(extractPath).length > 0) {
228
- throw new Error(`Destination ${extractPath} already exists.`);
229
- }
230
- await promises.mkdir(extractPath, { recursive: true });
231
- const temporaryDirectory = pathe.resolve(
232
- cacheDirectory(),
233
- options.provider,
234
- template.name
235
- );
236
- const tarPath = pathe.resolve(
237
- temporaryDirectory,
238
- (template.version || template.name) + ".tar.gz"
239
- );
240
- if (options.preferOffline && node_fs.existsSync(tarPath)) {
241
- options.offline = true;
242
- }
243
- if (!options.offline) {
244
- await promises.mkdir(pathe.dirname(tarPath), { recursive: true });
245
- const s2 = Date.now();
246
- await download(template.tar, tarPath, {
247
- headers: {
248
- authorization: options.auth ? `Bearer ${options.auth}` : void 0,
249
- ...normalizeHeaders(template.headers)
250
- }
251
- }).catch((error) => {
252
- if (!node_fs.existsSync(tarPath)) {
253
- throw error;
254
- }
255
- debug("Download error. Using cached version:", error);
256
- options.offline = true;
257
- });
258
- debug(`Downloaded ${template.tar} to ${tarPath} in ${Date.now() - s2}ms`);
259
- }
260
- if (!node_fs.existsSync(tarPath)) {
261
- throw new Error(
262
- `Tarball not found: ${tarPath} (offline: ${options.offline})`
263
- );
264
- }
265
- const s = Date.now();
266
- const subdir = template.subdir?.replace(/^\//, "") || "";
267
- await tar.extract({
268
- file: tarPath,
269
- cwd: extractPath,
270
- onentry(entry) {
271
- entry.path = entry.path.split("/").splice(1).join("/");
272
- if (subdir) {
273
- if (entry.path.startsWith(subdir + "/")) {
274
- entry.path = entry.path.slice(subdir.length);
275
- } else {
276
- entry.path = "";
277
- }
278
- }
279
- }
280
- });
281
- debug(`Extracted to ${extractPath} in ${Date.now() - s}ms`);
282
- return {
283
- ...template,
284
- source,
285
- dir: extractPath
286
- };
287
- }
288
-
289
- exports.downloadTemplate = downloadTemplate;
290
- exports.registryProvider = registryProvider;
291
- exports.startShell = startShell;