giget 1.1.1 → 1.1.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -1,12 +1,295 @@
1
- export { d as downloadTemplate, r as registryProvider, s as startShell } from './shared/giget.c3d868f5.mjs';
2
- import 'node:fs/promises';
3
- import 'node:fs';
4
- import 'tar';
5
- import 'pathe';
6
- import 'defu';
7
- import 'node:stream';
8
- import 'node:child_process';
9
- import 'node:os';
10
- import 'node:util';
11
- import 'node-fetch-native';
12
- import 'https-proxy-agent';
1
+ import { readFile, writeFile, rm, mkdir } from 'node:fs/promises';
2
+ import { existsSync, createWriteStream, readdirSync } from 'node:fs';
3
+ import { extract } from 'tar';
4
+ import { resolve, relative, dirname } from 'pathe';
5
+ import { defu } from 'defu';
6
+ import { pipeline } from 'node:stream';
7
+ import { spawnSync } from 'node:child_process';
8
+ import { homedir } from 'node:os';
9
+ import { promisify } from 'node:util';
10
+ import { fetch } from 'node-fetch-native';
11
+
12
+ async function download(url, filePath, options = {}) {
13
+ const infoPath = filePath + ".json";
14
+ const info = JSON.parse(
15
+ await readFile(infoPath, "utf8").catch(() => "{}")
16
+ );
17
+ const headResponse = await sendFetch(url, {
18
+ method: "HEAD",
19
+ headers: options.headers
20
+ }).catch(() => void 0);
21
+ const etag = headResponse?.headers.get("etag");
22
+ if (info.etag === etag && existsSync(filePath)) {
23
+ return;
24
+ }
25
+ if (typeof etag === "string") {
26
+ info.etag = etag;
27
+ }
28
+ const response = await sendFetch(url, { headers: options.headers });
29
+ if (response.status >= 400) {
30
+ throw new Error(
31
+ `Failed to download ${url}: ${response.status} ${response.statusText}`
32
+ );
33
+ }
34
+ const stream = createWriteStream(filePath);
35
+ await promisify(pipeline)(response.body, stream);
36
+ await writeFile(infoPath, JSON.stringify(info), "utf8");
37
+ }
38
+ const inputRegex = /^(?<repo>[\w.-]+\/[\w.-]+)(?<subdir>[^#]+)?(?<ref>#[\w./-]+)?/;
39
+ function parseGitURI(input) {
40
+ const m = input.match(inputRegex)?.groups || {};
41
+ return {
42
+ repo: m.repo,
43
+ subdir: m.subdir || "/",
44
+ ref: m.ref ? m.ref.slice(1) : "main"
45
+ };
46
+ }
47
+ function debug(...args) {
48
+ if (process.env.DEBUG) {
49
+ console.debug("[giget]", ...args);
50
+ }
51
+ }
52
+ async function sendFetch(url, options = {}) {
53
+ if (!options.agent) {
54
+ const proxyEnv = process.env.HTTPS_PROXY || process.env.https_proxy || process.env.HTTP_PROXY || process.env.http_proxy;
55
+ if (proxyEnv) {
56
+ const HttpsProxyAgent = await import('https-proxy-agent').then(
57
+ (r) => r.HttpsProxyAgent || r.default
58
+ );
59
+ options.agent = new HttpsProxyAgent(proxyEnv);
60
+ }
61
+ }
62
+ return await fetch(url, {
63
+ ...options,
64
+ headers: normalizeHeaders(options.headers)
65
+ });
66
+ }
67
+ function cacheDirectory() {
68
+ return process.env.XDG_CACHE_HOME ? resolve(process.env.XDG_CACHE_HOME, "giget") : resolve(homedir(), ".cache/giget");
69
+ }
70
+ function normalizeHeaders(headers = {}) {
71
+ const normalized = {};
72
+ for (const [key, value] of Object.entries(headers)) {
73
+ if (!value) {
74
+ continue;
75
+ }
76
+ normalized[key.toLowerCase()] = value;
77
+ }
78
+ return normalized;
79
+ }
80
+ function currentShell() {
81
+ if (process.env.SHELL) {
82
+ return process.env.SHELL;
83
+ }
84
+ if (process.platform === "win32") {
85
+ return "cmd.exe";
86
+ }
87
+ return "/bin/bash";
88
+ }
89
+ function startShell(cwd) {
90
+ cwd = resolve(cwd);
91
+ const shell = currentShell();
92
+ console.info(
93
+ `(experimental) Opening shell in ${relative(process.cwd(), cwd)}...`
94
+ );
95
+ spawnSync(shell, [], {
96
+ cwd,
97
+ shell: true,
98
+ stdio: "inherit"
99
+ });
100
+ }
101
+
102
+ const github = (input, options) => {
103
+ const parsed = parseGitURI(input);
104
+ const githubAPIURL = process.env.GIGET_GITHUB_URL || "https://api.github.com";
105
+ return {
106
+ name: parsed.repo.replace("/", "-"),
107
+ version: parsed.ref,
108
+ subdir: parsed.subdir,
109
+ headers: {
110
+ Authorization: options.auth ? `Bearer ${options.auth}` : void 0,
111
+ Accept: "application/vnd.github+json",
112
+ "X-GitHub-Api-Version": "2022-11-28"
113
+ },
114
+ url: `${githubAPIURL.replace("api.github.com", "github.com")}/${parsed.repo}/tree/${parsed.ref}${parsed.subdir}`,
115
+ tar: `${githubAPIURL}/repos/${parsed.repo}/tarball/${parsed.ref}`
116
+ };
117
+ };
118
+ const gitlab = (input, options) => {
119
+ const parsed = parseGitURI(input);
120
+ const gitlab2 = process.env.GIGET_GITLAB_URL || "https://gitlab.com";
121
+ return {
122
+ name: parsed.repo.replace("/", "-"),
123
+ version: parsed.ref,
124
+ subdir: parsed.subdir,
125
+ headers: {
126
+ authorization: options.auth ? `Bearer ${options.auth}` : void 0
127
+ },
128
+ url: `${gitlab2}/${parsed.repo}/tree/${parsed.ref}${parsed.subdir}`,
129
+ tar: `${gitlab2}/${parsed.repo}/-/archive/${parsed.ref}.tar.gz`
130
+ };
131
+ };
132
+ const bitbucket = (input, options) => {
133
+ const parsed = parseGitURI(input);
134
+ return {
135
+ name: parsed.repo.replace("/", "-"),
136
+ version: parsed.ref,
137
+ subdir: parsed.subdir,
138
+ headers: {
139
+ authorization: options.auth ? `Bearer ${options.auth}` : void 0
140
+ },
141
+ url: `https://bitbucket.com/${parsed.repo}/src/${parsed.ref}${parsed.subdir}`,
142
+ tar: `https://bitbucket.org/${parsed.repo}/get/${parsed.ref}.tar.gz`
143
+ };
144
+ };
145
+ const sourcehut = (input, options) => {
146
+ const parsed = parseGitURI(input);
147
+ return {
148
+ name: parsed.repo.replace("/", "-"),
149
+ version: parsed.ref,
150
+ subdir: parsed.subdir,
151
+ headers: {
152
+ authorization: options.auth ? `Bearer ${options.auth}` : void 0
153
+ },
154
+ url: `https://git.sr.ht/~${parsed.repo}/tree/${parsed.ref}/item${parsed.subdir}`,
155
+ tar: `https://git.sr.ht/~${parsed.repo}/archive/${parsed.ref}.tar.gz`
156
+ };
157
+ };
158
+ const providers = {
159
+ github,
160
+ gh: github,
161
+ gitlab,
162
+ bitbucket,
163
+ sourcehut
164
+ };
165
+
166
+ const DEFAULT_REGISTRY = "https://raw.githubusercontent.com/unjs/giget/main/templates";
167
+ const registryProvider = (registryEndpoint = DEFAULT_REGISTRY, options = {}) => {
168
+ return async (input) => {
169
+ const start = Date.now();
170
+ const registryURL = `${registryEndpoint}/${input}.json`;
171
+ const result = await sendFetch(registryURL, {
172
+ headers: {
173
+ authorization: options.auth ? `Bearer ${options.auth}` : void 0
174
+ }
175
+ });
176
+ if (result.status >= 400) {
177
+ throw new Error(
178
+ `Failed to download ${input} template info from ${registryURL}: ${result.status} ${result.statusText}`
179
+ );
180
+ }
181
+ const info = await result.json();
182
+ if (!info.tar || !info.name) {
183
+ throw new Error(
184
+ `Invalid template info from ${registryURL}. name or tar fields are missing!`
185
+ );
186
+ }
187
+ debug(
188
+ `Fetched ${input} template info from ${registryURL} in ${Date.now() - start}ms`
189
+ );
190
+ return info;
191
+ };
192
+ };
193
+
194
+ const sourceProtoRe = /^([\w-.]+):/;
195
+ async function downloadTemplate(input, options = {}) {
196
+ options = defu(
197
+ {
198
+ registry: process.env.GIGET_REGISTRY,
199
+ auth: process.env.GIGET_AUTH
200
+ },
201
+ options
202
+ );
203
+ const registry = options.registry === false ? void 0 : registryProvider(options.registry, { auth: options.auth });
204
+ let providerName = options.provider || (registry ? "registry" : "github");
205
+ let source = input;
206
+ const sourceProvierMatch = input.match(sourceProtoRe);
207
+ if (sourceProvierMatch) {
208
+ providerName = sourceProvierMatch[1];
209
+ source = input.slice(sourceProvierMatch[0].length);
210
+ }
211
+ const provider = options.providers?.[providerName] || providers[providerName] || registry;
212
+ if (!provider) {
213
+ throw new Error(`Unsupported provider: ${providerName}`);
214
+ }
215
+ const template = await Promise.resolve().then(() => provider(source, { auth: options.auth })).catch((error) => {
216
+ throw new Error(
217
+ `Failed to download template from ${providerName}: ${error.message}`
218
+ );
219
+ });
220
+ if (!template) {
221
+ throw new Error(`Failed to resolve template from ${providerName}`);
222
+ }
223
+ template.name = (template.name || "template").replace(/[^\da-z-]/gi, "-");
224
+ template.defaultDir = (template.defaultDir || template.name).replace(
225
+ /[^\da-z-]/gi,
226
+ "-"
227
+ );
228
+ const cwd = resolve(options.cwd || ".");
229
+ const extractPath = resolve(cwd, options.dir || template.defaultDir);
230
+ if (options.forceClean) {
231
+ await rm(extractPath, { recursive: true, force: true });
232
+ }
233
+ if (!options.force && existsSync(extractPath) && readdirSync(extractPath).length > 0) {
234
+ throw new Error(`Destination ${extractPath} already exists.`);
235
+ }
236
+ await mkdir(extractPath, { recursive: true });
237
+ const temporaryDirectory = resolve(
238
+ cacheDirectory(),
239
+ providerName,
240
+ template.name
241
+ );
242
+ const tarPath = resolve(
243
+ temporaryDirectory,
244
+ (template.version || template.name) + ".tar.gz"
245
+ );
246
+ if (options.preferOffline && existsSync(tarPath)) {
247
+ options.offline = true;
248
+ }
249
+ if (!options.offline) {
250
+ await mkdir(dirname(tarPath), { recursive: true });
251
+ const s2 = Date.now();
252
+ await download(template.tar, tarPath, {
253
+ headers: {
254
+ Authorization: options.auth ? `Bearer ${options.auth}` : void 0,
255
+ ...normalizeHeaders(template.headers)
256
+ }
257
+ }).catch((error) => {
258
+ if (!existsSync(tarPath)) {
259
+ throw error;
260
+ }
261
+ debug("Download error. Using cached version:", error);
262
+ options.offline = true;
263
+ });
264
+ debug(`Downloaded ${template.tar} to ${tarPath} in ${Date.now() - s2}ms`);
265
+ }
266
+ if (!existsSync(tarPath)) {
267
+ throw new Error(
268
+ `Tarball not found: ${tarPath} (offline: ${options.offline})`
269
+ );
270
+ }
271
+ const s = Date.now();
272
+ const subdir = template.subdir?.replace(/^\//, "") || "";
273
+ await extract({
274
+ file: tarPath,
275
+ cwd: extractPath,
276
+ onentry(entry) {
277
+ entry.path = entry.path.split("/").splice(1).join("/");
278
+ if (subdir) {
279
+ if (entry.path.startsWith(subdir + "/")) {
280
+ entry.path = entry.path.slice(subdir.length);
281
+ } else {
282
+ entry.path = "";
283
+ }
284
+ }
285
+ }
286
+ });
287
+ debug(`Extracted to ${extractPath} in ${Date.now() - s}ms`);
288
+ return {
289
+ ...template,
290
+ source,
291
+ dir: extractPath
292
+ };
293
+ }
294
+
295
+ export { downloadTemplate, registryProvider, startShell };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "giget",
3
- "version": "1.1.1",
3
+ "version": "1.1.3",
4
4
  "description": "Download templates and git repositories with pleasure!",
5
5
  "repository": "unjs/giget",
6
6
  "license": "MIT",
@@ -34,26 +34,26 @@
34
34
  "test": "pnpm lint && vitest run --coverage"
35
35
  },
36
36
  "dependencies": {
37
- "colorette": "^2.0.19",
37
+ "colorette": "^2.0.20",
38
38
  "defu": "^6.1.2",
39
- "https-proxy-agent": "^5.0.1",
39
+ "https-proxy-agent": "^7.0.2",
40
40
  "mri": "^1.2.0",
41
- "node-fetch-native": "^1.0.2",
42
- "pathe": "^1.1.0",
43
- "tar": "^6.1.13"
41
+ "node-fetch-native": "^1.4.0",
42
+ "pathe": "^1.1.1",
43
+ "tar": "^6.2.0"
44
44
  },
45
45
  "devDependencies": {
46
- "@types/node": "^18.13.0",
47
- "@types/tar": "^6.1.4",
48
- "@vitest/coverage-c8": "^0.28.5",
49
- "changelogen": "^0.4.1",
50
- "eslint": "^8.34.0",
51
- "eslint-config-unjs": "^0.1.0",
52
- "jiti": "^1.17.1",
53
- "prettier": "^2.8.4",
54
- "typescript": "^4.9.5",
55
- "unbuild": "^1.1.2",
56
- "vitest": "^0.28.5"
46
+ "@types/node": "^20.8.2",
47
+ "@types/tar": "^6.1.6",
48
+ "@vitest/coverage-v8": "^0.34.6",
49
+ "changelogen": "^0.5.5",
50
+ "eslint": "^8.50.0",
51
+ "eslint-config-unjs": "^0.2.1",
52
+ "jiti": "^1.20.0",
53
+ "prettier": "^3.0.3",
54
+ "typescript": "^5.2.2",
55
+ "unbuild": "^2.0.0",
56
+ "vitest": "^0.34.6"
57
57
  },
58
- "packageManager": "pnpm@7.27.0"
59
- }
58
+ "packageManager": "pnpm@8.8.0"
59
+ }
@@ -1,287 +0,0 @@
1
- import { readFile, writeFile, rm, mkdir } from 'node:fs/promises';
2
- import { existsSync, createWriteStream, readdirSync } from 'node:fs';
3
- import { extract } from 'tar';
4
- import { resolve, relative, dirname } from 'pathe';
5
- import { defu } from 'defu';
6
- import { pipeline } from 'node:stream';
7
- import { spawnSync } from 'node:child_process';
8
- import { homedir } from 'node:os';
9
- import { promisify } from 'node:util';
10
- import { fetch } from 'node-fetch-native';
11
- import createHttpsProxyAgent from 'https-proxy-agent';
12
-
13
- async function download(url, filePath, options = {}) {
14
- const infoPath = filePath + ".json";
15
- const info = JSON.parse(
16
- await readFile(infoPath, "utf8").catch(() => "{}")
17
- );
18
- const headResponse = await sendFetch(url, {
19
- method: "HEAD",
20
- headers: options.headers
21
- }).catch(() => void 0);
22
- const etag = headResponse?.headers.get("etag");
23
- if (info.etag === etag && existsSync(filePath)) {
24
- return;
25
- }
26
- info.etag = etag;
27
- const response = await sendFetch(url, { headers: options.headers });
28
- if (response.status >= 400) {
29
- throw new Error(
30
- `Failed to download ${url}: ${response.status} ${response.statusText}`
31
- );
32
- }
33
- const stream = createWriteStream(filePath);
34
- await promisify(pipeline)(response.body, stream);
35
- await writeFile(infoPath, JSON.stringify(info), "utf8");
36
- }
37
- const inputRegex = /^(?<repo>[\w.-]+\/[\w.-]+)(?<subdir>[^#]+)?(?<ref>#[\w.-]+)?/;
38
- function parseGitURI(input) {
39
- const m = input.match(inputRegex)?.groups;
40
- return {
41
- repo: m.repo,
42
- subdir: m.subdir || "/",
43
- ref: m.ref ? m.ref.slice(1) : "main"
44
- };
45
- }
46
- function debug(...arguments_) {
47
- if (process.env.DEBUG) {
48
- console.debug("[giget]", ...arguments_);
49
- }
50
- }
51
- async function sendFetch(url, options = {}) {
52
- if (!options.agent) {
53
- const proxyEnv = process.env.HTTPS_PROXY || process.env.https_proxy || process.env.HTTP_PROXY || process.env.http_proxy;
54
- if (proxyEnv) {
55
- options.agent = createHttpsProxyAgent(proxyEnv);
56
- }
57
- }
58
- if (options?.headers) {
59
- options.headers = normalizeHeaders(options.headers);
60
- }
61
- return await fetch(url, options);
62
- }
63
- function cacheDirectory() {
64
- return process.env.XDG_CACHE_HOME ? resolve(process.env.XDG_CACHE_HOME, "giget") : resolve(homedir(), ".cache/giget");
65
- }
66
- function normalizeHeaders(headers) {
67
- const normalized = {};
68
- for (const [key, value] of Object.entries(headers)) {
69
- if (!value) {
70
- continue;
71
- }
72
- normalized[key.toLowerCase()] = value;
73
- }
74
- return normalized;
75
- }
76
- function currentShell() {
77
- if (process.env.SHELL) {
78
- return process.env.SHELL;
79
- }
80
- if (process.platform === "win32") {
81
- return "cmd.exe";
82
- }
83
- return "/bin/bash";
84
- }
85
- function startShell(cwd) {
86
- cwd = resolve(cwd);
87
- const shell = currentShell();
88
- console.info(
89
- `(experimental) Opening shell in ${relative(process.cwd(), cwd)}...`
90
- );
91
- spawnSync(shell, [], {
92
- cwd,
93
- shell: true,
94
- stdio: "inherit"
95
- });
96
- }
97
-
98
- const github = (input, options) => {
99
- const parsed = parseGitURI(input);
100
- const github2 = process.env.GIGET_GITHUB_URL || "https://github.com";
101
- return {
102
- name: parsed.repo.replace("/", "-"),
103
- version: parsed.ref,
104
- subdir: parsed.subdir,
105
- headers: {
106
- authorization: options.auth ? `Bearer ${options.auth}` : void 0
107
- },
108
- url: `${github2}/${parsed.repo}/tree/${parsed.ref}${parsed.subdir}`,
109
- tar: `${github2}/${parsed.repo}/archive/${parsed.ref}.tar.gz`
110
- };
111
- };
112
- const gitlab = (input, options) => {
113
- const parsed = parseGitURI(input);
114
- const gitlab2 = process.env.GIGET_GITLAB_URL || "https://gitlab.com";
115
- return {
116
- name: parsed.repo.replace("/", "-"),
117
- version: parsed.ref,
118
- subdir: parsed.subdir,
119
- headers: {
120
- authorization: options.auth ? `Bearer ${options.auth}` : void 0
121
- },
122
- url: `${gitlab2}/${parsed.repo}/tree/${parsed.ref}${parsed.subdir}`,
123
- tar: `${gitlab2}/${parsed.repo}/-/archive/${parsed.ref}.tar.gz`
124
- };
125
- };
126
- const bitbucket = (input, options) => {
127
- const parsed = parseGitURI(input);
128
- return {
129
- name: parsed.repo.replace("/", "-"),
130
- version: parsed.ref,
131
- subdir: parsed.subdir,
132
- headers: {
133
- authorization: options.auth ? `Bearer ${options.auth}` : void 0
134
- },
135
- url: `https://bitbucket.com/${parsed.repo}/src/${parsed.ref}${parsed.subdir}`,
136
- tar: `https://bitbucket.org/${parsed.repo}/get/${parsed.ref}.tar.gz`
137
- };
138
- };
139
- const sourcehut = (input, options) => {
140
- const parsed = parseGitURI(input);
141
- return {
142
- name: parsed.repo.replace("/", "-"),
143
- version: parsed.ref,
144
- subdir: parsed.subdir,
145
- headers: {
146
- authorization: options.auth ? `Bearer ${options.auth}` : void 0
147
- },
148
- url: `https://git.sr.ht/~${parsed.repo}/tree/${parsed.ref}/item${parsed.subdir}`,
149
- tar: `https://git.sr.ht/~${parsed.repo}/archive/${parsed.ref}.tar.gz`
150
- };
151
- };
152
- const providers = {
153
- github,
154
- gh: github,
155
- gitlab,
156
- bitbucket,
157
- sourcehut
158
- };
159
-
160
- const DEFAULT_REGISTRY = "https://raw.githubusercontent.com/unjs/giget/main/templates";
161
- const registryProvider = (registryEndpoint = DEFAULT_REGISTRY, options) => {
162
- options = options || {};
163
- return async (input) => {
164
- const start = Date.now();
165
- const registryURL = `${registryEndpoint}/${input}.json`;
166
- const result = await sendFetch(registryURL, {
167
- headers: {
168
- authorization: options.auth ? `Bearer ${options.auth}` : void 0
169
- }
170
- });
171
- if (result.status >= 400) {
172
- throw new Error(
173
- `Failed to download ${input} template info from ${registryURL}: ${result.status} ${result.statusText}`
174
- );
175
- }
176
- const info = await result.json();
177
- if (!info.tar || !info.name) {
178
- throw new Error(
179
- `Invalid template info from ${registryURL}. name or tar fields are missing!`
180
- );
181
- }
182
- debug(
183
- `Fetched ${input} template info from ${registryURL} in ${Date.now() - start}ms`
184
- );
185
- return info;
186
- };
187
- };
188
-
189
- const sourceProtoRe = /^([\w-.]+):/;
190
- async function downloadTemplate(input, options = {}) {
191
- options = defu(
192
- {
193
- registry: process.env.GIGET_REGISTRY,
194
- auth: process.env.GIGET_AUTH
195
- },
196
- options
197
- );
198
- const registry = options.registry !== false ? registryProvider(options.registry, { auth: options.auth }) : void 0;
199
- let providerName = options.provider || (registryProvider ? "registry" : "github");
200
- let source = input;
201
- const sourceProvierMatch = input.match(sourceProtoRe);
202
- if (sourceProvierMatch) {
203
- providerName = sourceProvierMatch[1];
204
- source = input.slice(sourceProvierMatch[0].length);
205
- }
206
- const provider = options.providers?.[providerName] || providers[providerName] || registry;
207
- if (!provider) {
208
- throw new Error(`Unsupported provider: ${providerName}`);
209
- }
210
- const template = await Promise.resolve().then(() => provider(source, { auth: options.auth })).catch((error) => {
211
- throw new Error(
212
- `Failed to download template from ${providerName}: ${error.message}`
213
- );
214
- });
215
- template.name = (template.name || "template").replace(/[^\da-z-]/gi, "-");
216
- template.defaultDir = (template.defaultDir || template.name).replace(
217
- /[^\da-z-]/gi,
218
- "-"
219
- );
220
- const cwd = resolve(options.cwd || ".");
221
- const extractPath = resolve(cwd, options.dir || template.defaultDir);
222
- if (options.forceClean) {
223
- await rm(extractPath, { recursive: true, force: true });
224
- }
225
- if (!options.force && existsSync(extractPath) && readdirSync(extractPath).length > 0) {
226
- throw new Error(`Destination ${extractPath} already exists.`);
227
- }
228
- await mkdir(extractPath, { recursive: true });
229
- const temporaryDirectory = resolve(
230
- cacheDirectory(),
231
- options.provider,
232
- template.name
233
- );
234
- const tarPath = resolve(
235
- temporaryDirectory,
236
- (template.version || template.name) + ".tar.gz"
237
- );
238
- if (options.preferOffline && existsSync(tarPath)) {
239
- options.offline = true;
240
- }
241
- if (!options.offline) {
242
- await mkdir(dirname(tarPath), { recursive: true });
243
- const s2 = Date.now();
244
- await download(template.tar, tarPath, {
245
- headers: {
246
- authorization: options.auth ? `Bearer ${options.auth}` : void 0,
247
- ...normalizeHeaders(template.headers)
248
- }
249
- }).catch((error) => {
250
- if (!existsSync(tarPath)) {
251
- throw error;
252
- }
253
- debug("Download error. Using cached version:", error);
254
- options.offline = true;
255
- });
256
- debug(`Downloaded ${template.tar} to ${tarPath} in ${Date.now() - s2}ms`);
257
- }
258
- if (!existsSync(tarPath)) {
259
- throw new Error(
260
- `Tarball not found: ${tarPath} (offline: ${options.offline})`
261
- );
262
- }
263
- const s = Date.now();
264
- const subdir = template.subdir?.replace(/^\//, "") || "";
265
- await extract({
266
- file: tarPath,
267
- cwd: extractPath,
268
- onentry(entry) {
269
- entry.path = entry.path.split("/").splice(1).join("/");
270
- if (subdir) {
271
- if (entry.path.startsWith(subdir + "/")) {
272
- entry.path = entry.path.slice(subdir.length);
273
- } else {
274
- entry.path = "";
275
- }
276
- }
277
- }
278
- });
279
- debug(`Extracted to ${extractPath} in ${Date.now() - s}ms`);
280
- return {
281
- ...template,
282
- source,
283
- dir: extractPath
284
- };
285
- }
286
-
287
- export { downloadTemplate as d, registryProvider as r, startShell as s };