giget 0.1.7 → 1.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/LICENSE CHANGED
@@ -1,6 +1,6 @@
1
1
  MIT License
2
2
 
3
- Copyright (c) 2022 - UnJS
3
+ Copyright (c) Pooya Parsa <pooya@pi0.io>
4
4
 
5
5
  Permission is hereby granted, free of charge, to any person obtaining a copy
6
6
  of this software and associated documentation files (the "Software"), to deal
package/README.md CHANGED
@@ -36,7 +36,7 @@ npx giget@latest <template> [<dir>] [...options]
36
36
 
37
37
  ### Options
38
38
 
39
- - `--force`: Clone to exsiting directory even if exists.
39
+ - `--force`: Clone to existing directory even if exists.
40
40
  - `--offline`: Do not attempt to download and use cached version.
41
41
  - `--prefer-offline`: Use cache if exists otherwise try to download.
42
42
  - `--force-clean`: ⚠️ Remove any existing directory or file recusively before cloning.
@@ -89,10 +89,10 @@ A custom registry should provide an endpoint with dynamic path `/:template.json`
89
89
  - `tar` (required) Link to the tar download link.
90
90
  - `defaultDir`: (optional) Default cloning directory.
91
91
  - `url`: (optional) Webpage of the template.
92
- - `subpath`: (optional) Subpath inside the tar file.
92
+ - `subdir`: (optional) Directory inside the tar file.
93
93
  - `headers`: (optional) Custom headers to send while downloading template.
94
94
 
95
- Because of the simplicity, you can even use a github repository as template registry but also you can build something more powerful by bringing your own API.
95
+ Because of the simplicity, you can even use a GitHub repository as template registry but also you can build something more powerful by bringing your own API.
96
96
 
97
97
  ## Usage (Programmatic)
98
98
 
@@ -130,12 +130,12 @@ const { source, dir } = await downloadTemplate('github:unjs/template')
130
130
  **Options:**
131
131
 
132
132
  - `source`: (string) Input source in format of `[provider]:repo[/subpath][#ref]`.
133
- - `dir`: (string) Destination directory to clone to. If not provided, `user-name` will be used relative to the current directory.
134
133
  - `options`: (object) Options are usually inferred from the input string. You can customize them.
134
+ - `dir`: (string) Destination directory to clone to. If not provided, `user-name` will be used relative to the current directory.
135
135
  - `provider`: (string) Either `github`, `gitlab`, `bitbucket` or `sourcehut`. The default is `github`.
136
136
  - `repo`: (string) Name of repository in format of `{username}/{reponame}`.
137
137
  - `ref`: (string) Git ref (branch or commit or tag). The default value is `main`.
138
- - `subdirpath`: (string) subdir of the repo to clone from. The default value is none.
138
+ - `subdir`: (string) Directory of the repo to clone from. The default value is none.
139
139
  - `force`: (boolean) Extract to the exisiting dir even if already exsists.
140
140
  - `forceClean`: (boolean) ⚠️ Clean ups any existing directory or file before cloning.
141
141
  - `offline`: (boolean) Do not attempt to download and use cached version.
@@ -188,7 +188,7 @@ const { source, dir } = await downloadRepo('themes:test', { providers: { themes
188
188
 
189
189
  ## Related projects
190
190
 
191
- Giget wouldn't be possible without inspering from former projects. In comparation giget does not depend on any local command which increases stability and performance, supports custom template providers, auth and many more features out of the box.
191
+ Giget wouldn't be possible without inspiration from former projects. In comparison, giget does not depend on any local command which increases stability and performance, supports custom template providers, auth and many more features out of the box.
192
192
 
193
193
  - https://github.com/samsonjs/gitter
194
194
  - https://github.com/tiged/tiged
package/dist/cli.cjs CHANGED
@@ -4,53 +4,64 @@
4
4
  const node_path = require('node:path');
5
5
  const mri = require('mri');
6
6
  const colorette = require('colorette');
7
- const giget = require('./shared/giget.3522139c.cjs');
7
+ const giget = require('./shared/giget.51477975.cjs');
8
8
  require('node:fs/promises');
9
- require('node:os');
10
9
  require('node:fs');
11
10
  require('tar');
12
11
  require('pathe');
13
12
  require('defu');
14
13
  require('node:stream');
15
14
  require('node:child_process');
15
+ require('node:os');
16
16
  require('node:util');
17
17
  require('node-fetch-native');
18
-
19
- function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e["default"] : e; }
20
-
21
- const mri__default = /*#__PURE__*/_interopDefaultLegacy(mri);
18
+ require('https-proxy-agent');
22
19
 
23
20
  async function main() {
24
- const args = mri__default(process.argv.slice(2), {
25
- boolean: ["help", "force", "force-clean", "offline", "prefer-offline", "shell", "verbose"],
21
+ const arguments_ = mri(process.argv.slice(2), {
22
+ boolean: [
23
+ "help",
24
+ "force",
25
+ "force-clean",
26
+ "offline",
27
+ "prefer-offline",
28
+ "shell",
29
+ "verbose"
30
+ ],
26
31
  string: ["registry", "cwd", "auth"]
27
32
  });
28
- const input = args._[0];
29
- const dir = args._[1];
30
- if (!input || args.help || args.h) {
31
- console.error("Usage: npx getgit@latest <input> [<dir>] [--force] [--force-clean] [--offline] [--prefer-offline] [--shell] [--registry] [--no-registry] [--verbose] [--cwd] [--auth]");
33
+ const input = arguments_._[0];
34
+ const dir = arguments_._[1];
35
+ if (!input || arguments_.help || arguments_.h) {
36
+ console.error(
37
+ "Usage: npx giget@latest <input> [<dir>] [--force] [--force-clean] [--offline] [--prefer-offline] [--shell] [--registry] [--no-registry] [--verbose] [--cwd] [--auth]"
38
+ );
32
39
  process.exit(1);
33
40
  }
34
- if (args.verbose) {
41
+ if (arguments_.verbose) {
35
42
  process.env.DEBUG = process.env.DEBUG || "true";
36
43
  }
37
44
  const r = await giget.downloadTemplate(input, {
38
45
  dir,
39
- force: args.force,
40
- forceClean: args["force-clean"],
41
- offline: args.offline,
42
- registry: args.registry,
43
- cwd: args.cwd,
44
- auth: args.auth
46
+ force: arguments_.force,
47
+ forceClean: arguments_["force-clean"],
48
+ offline: arguments_.offline,
49
+ registry: arguments_.registry,
50
+ cwd: arguments_.cwd,
51
+ auth: arguments_.auth
45
52
  });
46
- console.log(`\u2728 Successfully cloned ${colorette.cyan(r.name || r.url)} to ${colorette.cyan(node_path.relative(process.cwd(), r.dir))}
47
- `);
48
- if (args.shell) {
53
+ console.log(
54
+ `\u2728 Successfully cloned ${colorette.cyan(r.name || r.url)} to ${colorette.cyan(
55
+ node_path.relative(process.cwd(), r.dir)
56
+ )}
57
+ `
58
+ );
59
+ if (arguments_.shell) {
49
60
  giget.startShell(r.dir);
50
61
  }
51
62
  process.exit(0);
52
63
  }
53
- main().catch((err) => {
54
- console.error(err);
64
+ main().catch((error) => {
65
+ console.error(error);
55
66
  process.exit(1);
56
67
  });
package/dist/cli.d.ts CHANGED
@@ -1 +1,2 @@
1
1
 
2
+ export { }
package/dist/cli.mjs CHANGED
@@ -2,49 +2,64 @@
2
2
  import { relative } from 'node:path';
3
3
  import mri from 'mri';
4
4
  import { cyan } from 'colorette';
5
- import { d as downloadTemplate, s as startShell } from './shared/giget.dd19862e.mjs';
5
+ import { d as downloadTemplate, s as startShell } from './shared/giget.6c52cb03.mjs';
6
6
  import 'node:fs/promises';
7
- import 'node:os';
8
7
  import 'node:fs';
9
8
  import 'tar';
10
9
  import 'pathe';
11
10
  import 'defu';
12
11
  import 'node:stream';
13
12
  import 'node:child_process';
13
+ import 'node:os';
14
14
  import 'node:util';
15
15
  import 'node-fetch-native';
16
+ import 'https-proxy-agent';
16
17
 
17
18
  async function main() {
18
- const args = mri(process.argv.slice(2), {
19
- boolean: ["help", "force", "force-clean", "offline", "prefer-offline", "shell", "verbose"],
19
+ const arguments_ = mri(process.argv.slice(2), {
20
+ boolean: [
21
+ "help",
22
+ "force",
23
+ "force-clean",
24
+ "offline",
25
+ "prefer-offline",
26
+ "shell",
27
+ "verbose"
28
+ ],
20
29
  string: ["registry", "cwd", "auth"]
21
30
  });
22
- const input = args._[0];
23
- const dir = args._[1];
24
- if (!input || args.help || args.h) {
25
- console.error("Usage: npx getgit@latest <input> [<dir>] [--force] [--force-clean] [--offline] [--prefer-offline] [--shell] [--registry] [--no-registry] [--verbose] [--cwd] [--auth]");
31
+ const input = arguments_._[0];
32
+ const dir = arguments_._[1];
33
+ if (!input || arguments_.help || arguments_.h) {
34
+ console.error(
35
+ "Usage: npx giget@latest <input> [<dir>] [--force] [--force-clean] [--offline] [--prefer-offline] [--shell] [--registry] [--no-registry] [--verbose] [--cwd] [--auth]"
36
+ );
26
37
  process.exit(1);
27
38
  }
28
- if (args.verbose) {
39
+ if (arguments_.verbose) {
29
40
  process.env.DEBUG = process.env.DEBUG || "true";
30
41
  }
31
42
  const r = await downloadTemplate(input, {
32
43
  dir,
33
- force: args.force,
34
- forceClean: args["force-clean"],
35
- offline: args.offline,
36
- registry: args.registry,
37
- cwd: args.cwd,
38
- auth: args.auth
44
+ force: arguments_.force,
45
+ forceClean: arguments_["force-clean"],
46
+ offline: arguments_.offline,
47
+ registry: arguments_.registry,
48
+ cwd: arguments_.cwd,
49
+ auth: arguments_.auth
39
50
  });
40
- console.log(`\u2728 Successfully cloned ${cyan(r.name || r.url)} to ${cyan(relative(process.cwd(), r.dir))}
41
- `);
42
- if (args.shell) {
51
+ console.log(
52
+ `\u2728 Successfully cloned ${cyan(r.name || r.url)} to ${cyan(
53
+ relative(process.cwd(), r.dir)
54
+ )}
55
+ `
56
+ );
57
+ if (arguments_.shell) {
43
58
  startShell(r.dir);
44
59
  }
45
60
  process.exit(0);
46
61
  }
47
- main().catch((err) => {
48
- console.error(err);
62
+ main().catch((error) => {
63
+ console.error(error);
49
64
  process.exit(1);
50
65
  });
package/dist/index.cjs CHANGED
@@ -1,18 +1,17 @@
1
1
  'use strict';
2
2
 
3
- Object.defineProperty(exports, '__esModule', { value: true });
4
-
5
- const giget = require('./shared/giget.3522139c.cjs');
3
+ const giget = require('./shared/giget.51477975.cjs');
6
4
  require('node:fs/promises');
7
- require('node:os');
8
5
  require('node:fs');
9
6
  require('tar');
10
7
  require('pathe');
11
8
  require('defu');
12
9
  require('node:stream');
13
10
  require('node:child_process');
11
+ require('node:os');
14
12
  require('node:util');
15
13
  require('node-fetch-native');
14
+ require('https-proxy-agent');
16
15
 
17
16
 
18
17
 
package/dist/index.d.ts CHANGED
@@ -1,5 +1,5 @@
1
1
  interface GitInfo {
2
- provider: 'github' | 'gitlab' | 'bitbucket' | 'sourcehut';
2
+ provider: "github" | "gitlab" | "bitbucket" | "sourcehut";
3
3
  repo: string;
4
4
  subdir: string;
5
5
  ref: string;
@@ -16,7 +16,7 @@ interface TemplateInfo {
16
16
  dir?: never;
17
17
  [key: string]: any;
18
18
  }
19
- declare type TemplateProvider = (input: string, options: {
19
+ type TemplateProvider = (input: string, options: {
20
20
  auth?: string;
21
21
  }) => TemplateInfo | Promise<TemplateInfo> | null;
22
22
 
@@ -32,13 +32,15 @@ interface DownloadTemplateOptions {
32
32
  cwd?: string;
33
33
  auth?: string;
34
34
  }
35
- declare type DownloadTemplateResult = Omit<TemplateInfo, 'dir' | 'source'> & {
35
+ type DownloadTemplateResult = Omit<TemplateInfo, "dir" | "source"> & {
36
36
  dir: string;
37
37
  source: string;
38
38
  };
39
- declare function downloadTemplate(input: string, opts?: DownloadTemplateOptions): Promise<DownloadTemplateResult>;
39
+ declare function downloadTemplate(input: string, options?: DownloadTemplateOptions): Promise<DownloadTemplateResult>;
40
40
 
41
- declare const registryProvider: (registryEndpoint?: string) => TemplateProvider;
41
+ declare const registryProvider: (registryEndpoint?: string, options?: {
42
+ auth?: string;
43
+ }) => TemplateProvider;
42
44
 
43
45
  declare function startShell(cwd: string): void;
44
46
 
package/dist/index.mjs CHANGED
@@ -1,11 +1,12 @@
1
- export { d as downloadTemplate, r as registryProvider, s as startShell } from './shared/giget.dd19862e.mjs';
1
+ export { d as downloadTemplate, r as registryProvider, s as startShell } from './shared/giget.6c52cb03.mjs';
2
2
  import 'node:fs/promises';
3
- import 'node:os';
4
3
  import 'node:fs';
5
4
  import 'tar';
6
5
  import 'pathe';
7
6
  import 'defu';
8
7
  import 'node:stream';
9
8
  import 'node:child_process';
9
+ import 'node:os';
10
10
  import 'node:util';
11
11
  import 'node-fetch-native';
12
+ import 'https-proxy-agent';
@@ -0,0 +1,274 @@
1
+ 'use strict';
2
+
3
+ const promises = require('node:fs/promises');
4
+ const node_fs = require('node:fs');
5
+ const tar = require('tar');
6
+ const pathe = require('pathe');
7
+ const defu = require('defu');
8
+ const node_stream = require('node:stream');
9
+ const node_child_process = require('node:child_process');
10
+ const node_os = require('node:os');
11
+ const node_util = require('node:util');
12
+ const nodeFetchNative = require('node-fetch-native');
13
+ const createHttpsProxyAgent = require('https-proxy-agent');
14
+
15
+ async function download(url, filePath, options = {}) {
16
+ const infoPath = filePath + ".json";
17
+ const info = JSON.parse(
18
+ await promises.readFile(infoPath, "utf8").catch(() => "{}")
19
+ );
20
+ const headResponse = await sendFetch(url, {
21
+ method: "HEAD",
22
+ headers: options.headers
23
+ }).catch(() => void 0);
24
+ const etag = headResponse?.headers.get("etag");
25
+ if (info.etag === etag && node_fs.existsSync(filePath)) {
26
+ return;
27
+ }
28
+ info.etag = etag;
29
+ const response = await sendFetch(url, { headers: options.headers });
30
+ if (response.status >= 400) {
31
+ throw new Error(
32
+ `Failed to download ${url}: ${response.status} ${response.statusText}`
33
+ );
34
+ }
35
+ const stream = node_fs.createWriteStream(filePath);
36
+ await node_util.promisify(node_stream.pipeline)(response.body, stream);
37
+ await promises.writeFile(infoPath, JSON.stringify(info), "utf8");
38
+ }
39
+ const inputRegex = /^(?<repo>[\w.-]+\/[\w.-]+)(?<subdir>[^#]+)?(?<ref>#[\w.-]+)?/;
40
+ function parseGitURI(input) {
41
+ const m = input.match(inputRegex)?.groups;
42
+ return {
43
+ repo: m.repo,
44
+ subdir: m.subdir || "/",
45
+ ref: m.ref ? m.ref.slice(1) : "main"
46
+ };
47
+ }
48
+ function debug(...arguments_) {
49
+ if (process.env.DEBUG) {
50
+ console.debug("[giget]", ...arguments_);
51
+ }
52
+ }
53
+ async function sendFetch(url, options) {
54
+ const proxy = process.env.HTTPS_PROXY || process.env.https_proxy || process.env.HTTP_PROXY || process.env.http_proxy;
55
+ const requestOptions = proxy ? { agent: createHttpsProxyAgent(proxy), ...options } : options;
56
+ return await nodeFetchNative.fetch(url, requestOptions);
57
+ }
58
+ function cacheDirectory() {
59
+ return process.env.XDG_CACHE_HOME ? pathe.resolve(process.env.XDG_CACHE_HOME, "giget") : pathe.resolve(node_os.homedir(), ".cache/giget");
60
+ }
61
+ function currentShell() {
62
+ if (process.env.SHELL) {
63
+ return process.env.SHELL;
64
+ }
65
+ if (process.platform === "win32") {
66
+ return "cmd.exe";
67
+ }
68
+ return "/bin/bash";
69
+ }
70
+ function startShell(cwd) {
71
+ cwd = pathe.resolve(cwd);
72
+ const shell = currentShell();
73
+ console.info(
74
+ `(experimental) Opening shell in ${pathe.relative(process.cwd(), cwd)}...`
75
+ );
76
+ node_child_process.spawnSync(shell, [], {
77
+ cwd,
78
+ shell: true,
79
+ stdio: "inherit"
80
+ });
81
+ }
82
+
83
+ const github = (input, options) => {
84
+ const parsed = parseGitURI(input);
85
+ const github2 = process.env.GIGET_GITHUB_URL || "https://github.com";
86
+ return {
87
+ name: parsed.repo.replace("/", "-"),
88
+ version: parsed.ref,
89
+ subdir: parsed.subdir,
90
+ headers: {
91
+ Authorization: options.auth ? `Bearer ${options.auth}` : void 0
92
+ },
93
+ url: `${github2}/${parsed.repo}/tree/${parsed.ref}${parsed.subdir}`,
94
+ tar: `${github2}/${parsed.repo}/archive/${parsed.ref}.tar.gz`
95
+ };
96
+ };
97
+ const gitlab = (input, options) => {
98
+ const parsed = parseGitURI(input);
99
+ const gitlab2 = process.env.GIGET_GITLAB_URL || "https://gitlab.com";
100
+ return {
101
+ name: parsed.repo.replace("/", "-"),
102
+ version: parsed.ref,
103
+ subdir: parsed.subdir,
104
+ headers: {
105
+ Authorization: options.auth ? `Bearer ${options.auth}` : void 0
106
+ },
107
+ url: `${gitlab2}/${parsed.repo}/tree/${parsed.ref}${parsed.subdir}`,
108
+ tar: `${gitlab2}/${parsed.repo}/-/archive/${parsed.ref}.tar.gz`
109
+ };
110
+ };
111
+ const bitbucket = (input, options) => {
112
+ const parsed = parseGitURI(input);
113
+ return {
114
+ name: parsed.repo.replace("/", "-"),
115
+ version: parsed.ref,
116
+ subdir: parsed.subdir,
117
+ headers: {
118
+ Authorization: options.auth ? `Bearer ${options.auth}` : void 0
119
+ },
120
+ url: `https://bitbucket.com/${parsed.repo}/src/${parsed.ref}${parsed.subdir}`,
121
+ tar: `https://bitbucket.org/${parsed.repo}/get/${parsed.ref}.tar.gz`
122
+ };
123
+ };
124
+ const sourcehut = (input, options) => {
125
+ const parsed = parseGitURI(input);
126
+ return {
127
+ name: parsed.repo.replace("/", "-"),
128
+ version: parsed.ref,
129
+ subdir: parsed.subdir,
130
+ headers: {
131
+ Authorization: options.auth ? `Bearer ${options.auth}` : void 0
132
+ },
133
+ url: `https://git.sr.ht/~${parsed.repo}/tree/${parsed.ref}/item${parsed.subdir}`,
134
+ tar: `https://git.sr.ht/~${parsed.repo}/archive/${parsed.ref}.tar.gz`
135
+ };
136
+ };
137
+ const providers = {
138
+ github,
139
+ gh: github,
140
+ gitlab,
141
+ bitbucket,
142
+ sourcehut
143
+ };
144
+
145
+ const DEFAULT_REGISTRY = "https://raw.githubusercontent.com/unjs/giget/main/templates";
146
+ const registryProvider = (registryEndpoint = DEFAULT_REGISTRY, options) => {
147
+ options = options || {};
148
+ return async (input) => {
149
+ const start = Date.now();
150
+ const registryURL = `${registryEndpoint}/${input}.json`;
151
+ const result = await sendFetch(registryURL, {
152
+ headers: {
153
+ Authorization: options.auth ? `Bearer ${options.auth}` : void 0
154
+ }
155
+ });
156
+ if (result.status >= 400) {
157
+ throw new Error(
158
+ `Failed to download ${input} template info from ${registryURL}: ${result.status} ${result.statusText}`
159
+ );
160
+ }
161
+ const info = await result.json();
162
+ if (!info.tar || !info.name) {
163
+ throw new Error(
164
+ `Invalid template info from ${registryURL}. name or tar fields are missing!`
165
+ );
166
+ }
167
+ debug(
168
+ `Fetched ${input} template info from ${registryURL} in ${Date.now() - start}ms`
169
+ );
170
+ return info;
171
+ };
172
+ };
173
+
174
+ const sourceProtoRe = /^([\w-.]+):/;
175
+ async function downloadTemplate(input, options = {}) {
176
+ options = defu.defu(
177
+ {
178
+ registry: process.env.GIGET_REGISTRY,
179
+ auth: process.env.GIGET_AUTH
180
+ },
181
+ options
182
+ );
183
+ const registry = options.registry !== false ? registryProvider(options.registry, { auth: options.auth }) : void 0;
184
+ let providerName = options.provider || (registryProvider ? "registry" : "github");
185
+ let source = input;
186
+ const sourceProvierMatch = input.match(sourceProtoRe);
187
+ if (sourceProvierMatch) {
188
+ providerName = sourceProvierMatch[1];
189
+ source = input.slice(sourceProvierMatch[0].length);
190
+ }
191
+ const provider = options.providers?.[providerName] || providers[providerName] || registry;
192
+ if (!provider) {
193
+ throw new Error(`Unsupported provider: ${providerName}`);
194
+ }
195
+ const template = await Promise.resolve().then(() => provider(source, { auth: options.auth })).catch((error) => {
196
+ throw new Error(
197
+ `Failed to download template from ${providerName}: ${error.message}`
198
+ );
199
+ });
200
+ template.name = (template.name || "template").replace(/[^\da-z-]/gi, "-");
201
+ template.defaultDir = (template.defaultDir || template.name).replace(
202
+ /[^\da-z-]/gi,
203
+ "-"
204
+ );
205
+ const cwd = pathe.resolve(options.cwd || ".");
206
+ const extractPath = pathe.resolve(cwd, options.dir || template.defaultDir);
207
+ if (options.forceClean) {
208
+ await promises.rm(extractPath, { recursive: true, force: true });
209
+ }
210
+ if (!options.force && node_fs.existsSync(extractPath) && node_fs.readdirSync(extractPath).length > 0) {
211
+ throw new Error(`Destination ${extractPath} already exists.`);
212
+ }
213
+ await promises.mkdir(extractPath, { recursive: true });
214
+ const temporaryDirectory = pathe.resolve(
215
+ cacheDirectory(),
216
+ options.provider,
217
+ template.name
218
+ );
219
+ const tarPath = pathe.resolve(
220
+ temporaryDirectory,
221
+ (template.version || template.name) + ".tar.gz"
222
+ );
223
+ if (options.preferOffline && node_fs.existsSync(tarPath)) {
224
+ options.offline = true;
225
+ }
226
+ if (!options.offline) {
227
+ await promises.mkdir(pathe.dirname(tarPath), { recursive: true });
228
+ const s2 = Date.now();
229
+ await download(template.tar, tarPath, {
230
+ headers: {
231
+ authorization: options.auth ? `Bearer ${options.auth}` : void 0,
232
+ ...template.headers
233
+ }
234
+ }).catch((error) => {
235
+ if (!node_fs.existsSync(tarPath)) {
236
+ throw error;
237
+ }
238
+ debug("Download error. Using cached version:", error);
239
+ options.offline = true;
240
+ });
241
+ debug(`Downloaded ${template.tar} to ${tarPath} in ${Date.now() - s2}ms`);
242
+ }
243
+ if (!node_fs.existsSync(tarPath)) {
244
+ throw new Error(
245
+ `Tarball not found: ${tarPath} (offline: ${options.offline})`
246
+ );
247
+ }
248
+ const s = Date.now();
249
+ const subdir = template.subdir?.replace(/^\//, "") || "";
250
+ await tar.extract({
251
+ file: tarPath,
252
+ cwd: extractPath,
253
+ onentry(entry) {
254
+ entry.path = entry.path.split("/").splice(1).join("/");
255
+ if (subdir) {
256
+ if (entry.path.startsWith(subdir + "/")) {
257
+ entry.path = entry.path.slice(subdir.length);
258
+ } else {
259
+ entry.path = "";
260
+ }
261
+ }
262
+ }
263
+ });
264
+ debug(`Extracted to ${extractPath} in ${Date.now() - s}ms`);
265
+ return {
266
+ ...template,
267
+ source,
268
+ dir: extractPath
269
+ };
270
+ }
271
+
272
+ exports.downloadTemplate = downloadTemplate;
273
+ exports.registryProvider = registryProvider;
274
+ exports.startShell = startShell;
@@ -0,0 +1,270 @@
1
+ import { readFile, writeFile, rm, mkdir } from 'node:fs/promises';
2
+ import { existsSync, createWriteStream, readdirSync } from 'node:fs';
3
+ import { extract } from 'tar';
4
+ import { resolve, relative, dirname } from 'pathe';
5
+ import { defu } from 'defu';
6
+ import { pipeline } from 'node:stream';
7
+ import { spawnSync } from 'node:child_process';
8
+ import { homedir } from 'node:os';
9
+ import { promisify } from 'node:util';
10
+ import { fetch } from 'node-fetch-native';
11
+ import createHttpsProxyAgent from 'https-proxy-agent';
12
+
13
+ async function download(url, filePath, options = {}) {
14
+ const infoPath = filePath + ".json";
15
+ const info = JSON.parse(
16
+ await readFile(infoPath, "utf8").catch(() => "{}")
17
+ );
18
+ const headResponse = await sendFetch(url, {
19
+ method: "HEAD",
20
+ headers: options.headers
21
+ }).catch(() => void 0);
22
+ const etag = headResponse?.headers.get("etag");
23
+ if (info.etag === etag && existsSync(filePath)) {
24
+ return;
25
+ }
26
+ info.etag = etag;
27
+ const response = await sendFetch(url, { headers: options.headers });
28
+ if (response.status >= 400) {
29
+ throw new Error(
30
+ `Failed to download ${url}: ${response.status} ${response.statusText}`
31
+ );
32
+ }
33
+ const stream = createWriteStream(filePath);
34
+ await promisify(pipeline)(response.body, stream);
35
+ await writeFile(infoPath, JSON.stringify(info), "utf8");
36
+ }
37
+ const inputRegex = /^(?<repo>[\w.-]+\/[\w.-]+)(?<subdir>[^#]+)?(?<ref>#[\w.-]+)?/;
38
+ function parseGitURI(input) {
39
+ const m = input.match(inputRegex)?.groups;
40
+ return {
41
+ repo: m.repo,
42
+ subdir: m.subdir || "/",
43
+ ref: m.ref ? m.ref.slice(1) : "main"
44
+ };
45
+ }
46
+ function debug(...arguments_) {
47
+ if (process.env.DEBUG) {
48
+ console.debug("[giget]", ...arguments_);
49
+ }
50
+ }
51
+ async function sendFetch(url, options) {
52
+ const proxy = process.env.HTTPS_PROXY || process.env.https_proxy || process.env.HTTP_PROXY || process.env.http_proxy;
53
+ const requestOptions = proxy ? { agent: createHttpsProxyAgent(proxy), ...options } : options;
54
+ return await fetch(url, requestOptions);
55
+ }
56
+ function cacheDirectory() {
57
+ return process.env.XDG_CACHE_HOME ? resolve(process.env.XDG_CACHE_HOME, "giget") : resolve(homedir(), ".cache/giget");
58
+ }
59
+ function currentShell() {
60
+ if (process.env.SHELL) {
61
+ return process.env.SHELL;
62
+ }
63
+ if (process.platform === "win32") {
64
+ return "cmd.exe";
65
+ }
66
+ return "/bin/bash";
67
+ }
68
+ function startShell(cwd) {
69
+ cwd = resolve(cwd);
70
+ const shell = currentShell();
71
+ console.info(
72
+ `(experimental) Opening shell in ${relative(process.cwd(), cwd)}...`
73
+ );
74
+ spawnSync(shell, [], {
75
+ cwd,
76
+ shell: true,
77
+ stdio: "inherit"
78
+ });
79
+ }
80
+
81
+ const github = (input, options) => {
82
+ const parsed = parseGitURI(input);
83
+ const github2 = process.env.GIGET_GITHUB_URL || "https://github.com";
84
+ return {
85
+ name: parsed.repo.replace("/", "-"),
86
+ version: parsed.ref,
87
+ subdir: parsed.subdir,
88
+ headers: {
89
+ Authorization: options.auth ? `Bearer ${options.auth}` : void 0
90
+ },
91
+ url: `${github2}/${parsed.repo}/tree/${parsed.ref}${parsed.subdir}`,
92
+ tar: `${github2}/${parsed.repo}/archive/${parsed.ref}.tar.gz`
93
+ };
94
+ };
95
+ const gitlab = (input, options) => {
96
+ const parsed = parseGitURI(input);
97
+ const gitlab2 = process.env.GIGET_GITLAB_URL || "https://gitlab.com";
98
+ return {
99
+ name: parsed.repo.replace("/", "-"),
100
+ version: parsed.ref,
101
+ subdir: parsed.subdir,
102
+ headers: {
103
+ Authorization: options.auth ? `Bearer ${options.auth}` : void 0
104
+ },
105
+ url: `${gitlab2}/${parsed.repo}/tree/${parsed.ref}${parsed.subdir}`,
106
+ tar: `${gitlab2}/${parsed.repo}/-/archive/${parsed.ref}.tar.gz`
107
+ };
108
+ };
109
+ const bitbucket = (input, options) => {
110
+ const parsed = parseGitURI(input);
111
+ return {
112
+ name: parsed.repo.replace("/", "-"),
113
+ version: parsed.ref,
114
+ subdir: parsed.subdir,
115
+ headers: {
116
+ Authorization: options.auth ? `Bearer ${options.auth}` : void 0
117
+ },
118
+ url: `https://bitbucket.com/${parsed.repo}/src/${parsed.ref}${parsed.subdir}`,
119
+ tar: `https://bitbucket.org/${parsed.repo}/get/${parsed.ref}.tar.gz`
120
+ };
121
+ };
122
+ const sourcehut = (input, options) => {
123
+ const parsed = parseGitURI(input);
124
+ return {
125
+ name: parsed.repo.replace("/", "-"),
126
+ version: parsed.ref,
127
+ subdir: parsed.subdir,
128
+ headers: {
129
+ Authorization: options.auth ? `Bearer ${options.auth}` : void 0
130
+ },
131
+ url: `https://git.sr.ht/~${parsed.repo}/tree/${parsed.ref}/item${parsed.subdir}`,
132
+ tar: `https://git.sr.ht/~${parsed.repo}/archive/${parsed.ref}.tar.gz`
133
+ };
134
+ };
135
+ const providers = {
136
+ github,
137
+ gh: github,
138
+ gitlab,
139
+ bitbucket,
140
+ sourcehut
141
+ };
142
+
143
+ const DEFAULT_REGISTRY = "https://raw.githubusercontent.com/unjs/giget/main/templates";
144
+ const registryProvider = (registryEndpoint = DEFAULT_REGISTRY, options) => {
145
+ options = options || {};
146
+ return async (input) => {
147
+ const start = Date.now();
148
+ const registryURL = `${registryEndpoint}/${input}.json`;
149
+ const result = await sendFetch(registryURL, {
150
+ headers: {
151
+ Authorization: options.auth ? `Bearer ${options.auth}` : void 0
152
+ }
153
+ });
154
+ if (result.status >= 400) {
155
+ throw new Error(
156
+ `Failed to download ${input} template info from ${registryURL}: ${result.status} ${result.statusText}`
157
+ );
158
+ }
159
+ const info = await result.json();
160
+ if (!info.tar || !info.name) {
161
+ throw new Error(
162
+ `Invalid template info from ${registryURL}. name or tar fields are missing!`
163
+ );
164
+ }
165
+ debug(
166
+ `Fetched ${input} template info from ${registryURL} in ${Date.now() - start}ms`
167
+ );
168
+ return info;
169
+ };
170
+ };
171
+
172
+ const sourceProtoRe = /^([\w-.]+):/;
173
+ async function downloadTemplate(input, options = {}) {
174
+ options = defu(
175
+ {
176
+ registry: process.env.GIGET_REGISTRY,
177
+ auth: process.env.GIGET_AUTH
178
+ },
179
+ options
180
+ );
181
+ const registry = options.registry !== false ? registryProvider(options.registry, { auth: options.auth }) : void 0;
182
+ let providerName = options.provider || (registryProvider ? "registry" : "github");
183
+ let source = input;
184
+ const sourceProvierMatch = input.match(sourceProtoRe);
185
+ if (sourceProvierMatch) {
186
+ providerName = sourceProvierMatch[1];
187
+ source = input.slice(sourceProvierMatch[0].length);
188
+ }
189
+ const provider = options.providers?.[providerName] || providers[providerName] || registry;
190
+ if (!provider) {
191
+ throw new Error(`Unsupported provider: ${providerName}`);
192
+ }
193
+ const template = await Promise.resolve().then(() => provider(source, { auth: options.auth })).catch((error) => {
194
+ throw new Error(
195
+ `Failed to download template from ${providerName}: ${error.message}`
196
+ );
197
+ });
198
+ template.name = (template.name || "template").replace(/[^\da-z-]/gi, "-");
199
+ template.defaultDir = (template.defaultDir || template.name).replace(
200
+ /[^\da-z-]/gi,
201
+ "-"
202
+ );
203
+ const cwd = resolve(options.cwd || ".");
204
+ const extractPath = resolve(cwd, options.dir || template.defaultDir);
205
+ if (options.forceClean) {
206
+ await rm(extractPath, { recursive: true, force: true });
207
+ }
208
+ if (!options.force && existsSync(extractPath) && readdirSync(extractPath).length > 0) {
209
+ throw new Error(`Destination ${extractPath} already exists.`);
210
+ }
211
+ await mkdir(extractPath, { recursive: true });
212
+ const temporaryDirectory = resolve(
213
+ cacheDirectory(),
214
+ options.provider,
215
+ template.name
216
+ );
217
+ const tarPath = resolve(
218
+ temporaryDirectory,
219
+ (template.version || template.name) + ".tar.gz"
220
+ );
221
+ if (options.preferOffline && existsSync(tarPath)) {
222
+ options.offline = true;
223
+ }
224
+ if (!options.offline) {
225
+ await mkdir(dirname(tarPath), { recursive: true });
226
+ const s2 = Date.now();
227
+ await download(template.tar, tarPath, {
228
+ headers: {
229
+ authorization: options.auth ? `Bearer ${options.auth}` : void 0,
230
+ ...template.headers
231
+ }
232
+ }).catch((error) => {
233
+ if (!existsSync(tarPath)) {
234
+ throw error;
235
+ }
236
+ debug("Download error. Using cached version:", error);
237
+ options.offline = true;
238
+ });
239
+ debug(`Downloaded ${template.tar} to ${tarPath} in ${Date.now() - s2}ms`);
240
+ }
241
+ if (!existsSync(tarPath)) {
242
+ throw new Error(
243
+ `Tarball not found: ${tarPath} (offline: ${options.offline})`
244
+ );
245
+ }
246
+ const s = Date.now();
247
+ const subdir = template.subdir?.replace(/^\//, "") || "";
248
+ await extract({
249
+ file: tarPath,
250
+ cwd: extractPath,
251
+ onentry(entry) {
252
+ entry.path = entry.path.split("/").splice(1).join("/");
253
+ if (subdir) {
254
+ if (entry.path.startsWith(subdir + "/")) {
255
+ entry.path = entry.path.slice(subdir.length);
256
+ } else {
257
+ entry.path = "";
258
+ }
259
+ }
260
+ }
261
+ });
262
+ debug(`Extracted to ${extractPath} in ${Date.now() - s}ms`);
263
+ return {
264
+ ...template,
265
+ source,
266
+ dir: extractPath
267
+ };
268
+ }
269
+
270
+ export { downloadTemplate as d, registryProvider as r, startShell as s };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "giget",
3
- "version": "0.1.7",
3
+ "version": "1.1.0",
4
4
  "description": "Download templates and git repositories with pleasure!",
5
5
  "repository": "unjs/giget",
6
6
  "license": "MIT",
@@ -12,6 +12,7 @@
12
12
  "exports": {
13
13
  ".": {
14
14
  "import": "./dist/index.mjs",
15
+ "types": "./dist/index.d.ts",
15
16
  "require": "./dist/index.cjs"
16
17
  }
17
18
  },
@@ -21,34 +22,38 @@
21
22
  "files": [
22
23
  "dist"
23
24
  ],
24
- "dependencies": {
25
- "colorette": "^2.0.19",
26
- "defu": "^6.1.0",
27
- "mri": "^1.2.0",
28
- "node-fetch-native": "^0.1.4",
29
- "pathe": "^0.3.8",
30
- "tar": "^6.1.11"
31
- },
32
- "devDependencies": {
33
- "@nuxtjs/eslint-config-typescript": "^11.0.0",
34
- "@types/node": "^18.7.16",
35
- "@types/tar": "^6.1.2",
36
- "@vitest/coverage-c8": "^0.23.4",
37
- "eslint": "^8.23.1",
38
- "jiti": "^1.16.0",
39
- "standard-version": "^9.5.0",
40
- "typescript": "^4.8.3",
41
- "unbuild": "^0.8.11",
42
- "vitest": "^0.23.4"
43
- },
44
- "packageManager": "pnpm@7.12.0",
45
25
  "scripts": {
46
26
  "build": "unbuild",
47
27
  "dev": "vitest dev",
48
28
  "giget": "jiti ./src/cli.ts",
49
- "lint": "eslint --ext .ts,.js,.mjs,.cjs .",
29
+ "lint": "eslint --ext .ts,.js,.mjs,.cjs . && prettier -c src test",
30
+ "lint:fix": "eslint --ext .ts,.js,.mjs,.cjs . --fix && prettier -w src test",
31
+ "prepack": "unbuild",
50
32
  "play": "pnpm giget --force-clean --verbose unjs .tmp/clone",
51
- "release": "pnpm test && standard-version && git push --follow-tags && pnpm publish",
33
+ "release": "pnpm test && changelogen --release && npm publish && git push --follow-tags",
52
34
  "test": "pnpm lint && vitest run --coverage"
53
- }
54
- }
35
+ },
36
+ "dependencies": {
37
+ "colorette": "^2.0.19",
38
+ "defu": "^6.1.2",
39
+ "https-proxy-agent": "^5.0.1",
40
+ "mri": "^1.2.0",
41
+ "node-fetch-native": "^1.0.2",
42
+ "pathe": "^1.1.0",
43
+ "tar": "^6.1.13"
44
+ },
45
+ "devDependencies": {
46
+ "@types/node": "^18.13.0",
47
+ "@types/tar": "^6.1.4",
48
+ "@vitest/coverage-c8": "^0.28.5",
49
+ "changelogen": "^0.4.1",
50
+ "eslint": "^8.34.0",
51
+ "eslint-config-unjs": "^0.1.0",
52
+ "jiti": "^1.17.0",
53
+ "prettier": "^2.8.4",
54
+ "typescript": "^4.9.5",
55
+ "unbuild": "^1.1.1",
56
+ "vitest": "^0.28.5"
57
+ },
58
+ "packageManager": "pnpm@7.27.0"
59
+ }
@@ -1,217 +0,0 @@
1
- 'use strict';
2
-
3
- const promises = require('node:fs/promises');
4
- const node_os = require('node:os');
5
- const node_fs = require('node:fs');
6
- const tar = require('tar');
7
- const pathe = require('pathe');
8
- const defu = require('defu');
9
- const node_stream = require('node:stream');
10
- const node_child_process = require('node:child_process');
11
- const node_util = require('node:util');
12
- const fetch = require('node-fetch-native');
13
-
14
- function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e["default"] : e; }
15
-
16
- const fetch__default = /*#__PURE__*/_interopDefaultLegacy(fetch);
17
-
18
- async function download(url, filePath, opts = {}) {
19
- const infoPath = filePath + ".json";
20
- const info = JSON.parse(await promises.readFile(infoPath, "utf8").catch(() => "{}"));
21
- const headRes = await fetch.fetch(url, { method: "HEAD", headers: opts.headers }).catch(() => null);
22
- const etag = headRes?.headers.get("etag");
23
- if (info.etag === etag && node_fs.existsSync(filePath)) {
24
- return;
25
- }
26
- info.etag = etag;
27
- const res = await fetch.fetch(url, { headers: opts.headers });
28
- if (res.status >= 400) {
29
- throw new Error(`Failed to download ${url}: ${res.status} ${res.statusText}`);
30
- }
31
- const stream = node_fs.createWriteStream(filePath);
32
- await node_util.promisify(node_stream.pipeline)(res.body, stream);
33
- await promises.writeFile(infoPath, JSON.stringify(info), "utf8");
34
- }
35
- const inputRegex = /^(?<repo>[\w.-]+\/[\w.-]+)(?<subdir>[^#]+)?(?<ref>#[\w.-]+)?/;
36
- function parseGitURI(input) {
37
- const m = input.match(inputRegex)?.groups;
38
- return {
39
- repo: m.repo,
40
- subdir: m.subdir || "/",
41
- ref: m.ref ? m.ref.substring(1) : "main"
42
- };
43
- }
44
- function debug(...args) {
45
- if (process.env.DEBUG) {
46
- console.debug("[giget]", ...args);
47
- }
48
- }
49
- function currentShell() {
50
- if (process.env.SHELL) {
51
- return process.env.SHELL;
52
- }
53
- if (process.platform === "win32") {
54
- return "cmd.exe";
55
- }
56
- return "/bin/bash";
57
- }
58
- function startShell(cwd) {
59
- cwd = pathe.resolve(cwd);
60
- const shell = currentShell();
61
- console.info(`(experimental) Opening shell in ${pathe.relative(process.cwd(), cwd)}...`);
62
- node_child_process.spawnSync(shell, [], {
63
- cwd,
64
- shell: true,
65
- stdio: "inherit"
66
- });
67
- }
68
-
69
- const github = (input, opts) => {
70
- const parsed = parseGitURI(input);
71
- return {
72
- name: parsed.repo.replace("/", "-"),
73
- version: parsed.ref,
74
- subdir: parsed.subdir,
75
- headers: { Authorization: opts.auth ? `Bearer ${opts.auth}` : void 0 },
76
- url: `https://github.com/${parsed.repo}/tree/${parsed.ref}${parsed.subdir}`,
77
- tar: `https://github.com/${parsed.repo}/archive/${parsed.ref}.tar.gz`
78
- };
79
- };
80
- const gitlab = (input, opts) => {
81
- const parsed = parseGitURI(input);
82
- return {
83
- name: parsed.repo.replace("/", "-"),
84
- version: parsed.ref,
85
- subdir: parsed.subdir,
86
- headers: { Authorization: opts.auth ? `Bearer ${opts.auth}` : void 0 },
87
- url: `https://gitlab.com/${parsed.repo}/tree/${parsed.ref}${parsed.subdir}`,
88
- tar: `https://gitlab.com/${parsed.repo}/-/archive/${parsed.ref}.tar.gz`
89
- };
90
- };
91
- const bitbucket = (input, opts) => {
92
- const parsed = parseGitURI(input);
93
- return {
94
- name: parsed.repo.replace("/", "-"),
95
- version: parsed.ref,
96
- subdir: parsed.subdir,
97
- headers: { Authorization: opts.auth ? `Bearer ${opts.auth}` : void 0 },
98
- url: `https://bitbucket.com/${parsed.repo}/src/${parsed.ref}${parsed.subdir}`,
99
- tar: `https://bitbucket.org/${parsed.repo}/get/${parsed.ref}.tar.gz`
100
- };
101
- };
102
- const sourcehut = (input, opts) => {
103
- const parsed = parseGitURI(input);
104
- return {
105
- name: parsed.repo.replace("/", "-"),
106
- version: parsed.ref,
107
- subdir: parsed.subdir,
108
- headers: { Authorization: opts.auth ? `Bearer ${opts.auth}` : void 0 },
109
- url: `https://git.sr.ht/~${parsed.repo}/tree/${parsed.ref}/item${parsed.subdir}`,
110
- tar: `https://git.sr.ht/~${parsed.repo}/archive/${parsed.ref}.tar.gz`
111
- };
112
- };
113
- const providers = {
114
- github,
115
- gh: github,
116
- gitlab,
117
- bitbucket,
118
- sourcehut
119
- };
120
-
121
- const DEFAULT_REGISTRY = "https://raw.githubusercontent.com/unjs/giget/main/templates";
122
- const registryProvider = (registryEndpoint = DEFAULT_REGISTRY) => {
123
- return async (input) => {
124
- const start = Date.now();
125
- const registryURL = `${registryEndpoint}/${input}.json`;
126
- const res = await fetch__default(registryURL);
127
- if (res.status >= 400) {
128
- throw new Error(`Failed to download ${input} template info from ${registryURL}: ${res.status} ${res.statusText}`);
129
- }
130
- const info = await res.json();
131
- if (!info.tar || !info.name) {
132
- throw new Error(`Invalid template info from ${registryURL}. name or tar fields are missing!`);
133
- }
134
- debug(`Fetched ${input} template info from ${registryURL} in ${Date.now() - start}ms`);
135
- return info;
136
- };
137
- };
138
-
139
- const sourceProtoRe = /^([\w-.]+):/;
140
- async function downloadTemplate(input, opts = {}) {
141
- opts = defu.defu({
142
- registry: process.env.GIGET_REGISTRY,
143
- auth: process.env.GIGET_AUTH
144
- }, opts);
145
- const registry = opts.registry !== false ? registryProvider(opts.registry) : null;
146
- let providerName = opts.provider || (registryProvider ? "registry" : "github");
147
- let source = input;
148
- const sourceProvierMatch = input.match(sourceProtoRe);
149
- if (sourceProvierMatch) {
150
- providerName = sourceProvierMatch[1];
151
- source = input.substring(sourceProvierMatch[0].length);
152
- }
153
- const provider = opts.providers?.[providerName] || providers[providerName] || registry;
154
- if (!provider) {
155
- throw new Error(`Unsupported provider: ${providerName}`);
156
- }
157
- const template = await Promise.resolve().then(() => provider(source, { auth: opts.auth })).catch((err) => {
158
- throw new Error(`Failed to download template from ${providerName}: ${err.message}`);
159
- });
160
- template.name = (template.name || "template").replace(/[^a-z0-9-]/gi, "-");
161
- template.defaultDir = (template.defaultDir || template.name).replace(/[^a-z0-9-]/gi, "-");
162
- const cwd = pathe.resolve(opts.cwd || ".");
163
- const extractPath = pathe.resolve(cwd, opts.dir || template.defaultDir);
164
- if (opts.forceClean) {
165
- await promises.rm(extractPath, { recursive: true, force: true });
166
- }
167
- if (!opts.force && node_fs.existsSync(extractPath) && node_fs.readdirSync(extractPath).length) {
168
- throw new Error(`Destination ${extractPath} already exists.`);
169
- }
170
- await promises.mkdir(extractPath, { recursive: true });
171
- const tmpDir = pathe.resolve(node_os.homedir(), ".giget", opts.provider, template.name);
172
- const tarPath = pathe.resolve(tmpDir, (template.version || template.name) + ".tar.gz");
173
- if (opts.preferOffline && node_fs.existsSync(tarPath)) {
174
- opts.offline = true;
175
- }
176
- if (!opts.offline) {
177
- await promises.mkdir(pathe.dirname(tarPath), { recursive: true });
178
- const s2 = Date.now();
179
- await download(template.tar, tarPath, { headers: template.headers }).catch((err) => {
180
- if (!node_fs.existsSync(tarPath)) {
181
- throw err;
182
- }
183
- debug("Download error. Using cached version:", err);
184
- opts.offline = true;
185
- });
186
- debug(`Downloaded ${template.tar} to ${tarPath} in ${Date.now() - s2}ms`);
187
- }
188
- if (!node_fs.existsSync(tarPath)) {
189
- throw new Error(`Tarball not found: ${tarPath} (offline: ${opts.offline})`);
190
- }
191
- const s = Date.now();
192
- const subdir = template.subdir?.replace(/^\//, "") || "";
193
- await tar.extract({
194
- file: tarPath,
195
- cwd: extractPath,
196
- onentry(entry) {
197
- entry.path = entry.path.split("/").splice(1).join("/");
198
- if (subdir) {
199
- if (entry.path.startsWith(subdir + "/")) {
200
- entry.path = entry.path.substring(subdir.length);
201
- } else {
202
- entry.path = "";
203
- }
204
- }
205
- }
206
- });
207
- debug(`Extracted to ${extractPath} in ${Date.now() - s}ms`);
208
- return {
209
- ...template,
210
- source,
211
- dir: extractPath
212
- };
213
- }
214
-
215
- exports.downloadTemplate = downloadTemplate;
216
- exports.registryProvider = registryProvider;
217
- exports.startShell = startShell;
@@ -1,209 +0,0 @@
1
- import { readFile, writeFile, rm, mkdir } from 'node:fs/promises';
2
- import { homedir } from 'node:os';
3
- import { existsSync, createWriteStream, readdirSync } from 'node:fs';
4
- import { extract } from 'tar';
5
- import { resolve, relative, dirname } from 'pathe';
6
- import { defu } from 'defu';
7
- import { pipeline } from 'node:stream';
8
- import { spawnSync } from 'node:child_process';
9
- import { promisify } from 'node:util';
10
- import fetch$1, { fetch } from 'node-fetch-native';
11
-
12
- async function download(url, filePath, opts = {}) {
13
- const infoPath = filePath + ".json";
14
- const info = JSON.parse(await readFile(infoPath, "utf8").catch(() => "{}"));
15
- const headRes = await fetch(url, { method: "HEAD", headers: opts.headers }).catch(() => null);
16
- const etag = headRes?.headers.get("etag");
17
- if (info.etag === etag && existsSync(filePath)) {
18
- return;
19
- }
20
- info.etag = etag;
21
- const res = await fetch(url, { headers: opts.headers });
22
- if (res.status >= 400) {
23
- throw new Error(`Failed to download ${url}: ${res.status} ${res.statusText}`);
24
- }
25
- const stream = createWriteStream(filePath);
26
- await promisify(pipeline)(res.body, stream);
27
- await writeFile(infoPath, JSON.stringify(info), "utf8");
28
- }
29
- const inputRegex = /^(?<repo>[\w.-]+\/[\w.-]+)(?<subdir>[^#]+)?(?<ref>#[\w.-]+)?/;
30
- function parseGitURI(input) {
31
- const m = input.match(inputRegex)?.groups;
32
- return {
33
- repo: m.repo,
34
- subdir: m.subdir || "/",
35
- ref: m.ref ? m.ref.substring(1) : "main"
36
- };
37
- }
38
- function debug(...args) {
39
- if (process.env.DEBUG) {
40
- console.debug("[giget]", ...args);
41
- }
42
- }
43
- function currentShell() {
44
- if (process.env.SHELL) {
45
- return process.env.SHELL;
46
- }
47
- if (process.platform === "win32") {
48
- return "cmd.exe";
49
- }
50
- return "/bin/bash";
51
- }
52
- function startShell(cwd) {
53
- cwd = resolve(cwd);
54
- const shell = currentShell();
55
- console.info(`(experimental) Opening shell in ${relative(process.cwd(), cwd)}...`);
56
- spawnSync(shell, [], {
57
- cwd,
58
- shell: true,
59
- stdio: "inherit"
60
- });
61
- }
62
-
63
- const github = (input, opts) => {
64
- const parsed = parseGitURI(input);
65
- return {
66
- name: parsed.repo.replace("/", "-"),
67
- version: parsed.ref,
68
- subdir: parsed.subdir,
69
- headers: { Authorization: opts.auth ? `Bearer ${opts.auth}` : void 0 },
70
- url: `https://github.com/${parsed.repo}/tree/${parsed.ref}${parsed.subdir}`,
71
- tar: `https://github.com/${parsed.repo}/archive/${parsed.ref}.tar.gz`
72
- };
73
- };
74
- const gitlab = (input, opts) => {
75
- const parsed = parseGitURI(input);
76
- return {
77
- name: parsed.repo.replace("/", "-"),
78
- version: parsed.ref,
79
- subdir: parsed.subdir,
80
- headers: { Authorization: opts.auth ? `Bearer ${opts.auth}` : void 0 },
81
- url: `https://gitlab.com/${parsed.repo}/tree/${parsed.ref}${parsed.subdir}`,
82
- tar: `https://gitlab.com/${parsed.repo}/-/archive/${parsed.ref}.tar.gz`
83
- };
84
- };
85
- const bitbucket = (input, opts) => {
86
- const parsed = parseGitURI(input);
87
- return {
88
- name: parsed.repo.replace("/", "-"),
89
- version: parsed.ref,
90
- subdir: parsed.subdir,
91
- headers: { Authorization: opts.auth ? `Bearer ${opts.auth}` : void 0 },
92
- url: `https://bitbucket.com/${parsed.repo}/src/${parsed.ref}${parsed.subdir}`,
93
- tar: `https://bitbucket.org/${parsed.repo}/get/${parsed.ref}.tar.gz`
94
- };
95
- };
96
- const sourcehut = (input, opts) => {
97
- const parsed = parseGitURI(input);
98
- return {
99
- name: parsed.repo.replace("/", "-"),
100
- version: parsed.ref,
101
- subdir: parsed.subdir,
102
- headers: { Authorization: opts.auth ? `Bearer ${opts.auth}` : void 0 },
103
- url: `https://git.sr.ht/~${parsed.repo}/tree/${parsed.ref}/item${parsed.subdir}`,
104
- tar: `https://git.sr.ht/~${parsed.repo}/archive/${parsed.ref}.tar.gz`
105
- };
106
- };
107
- const providers = {
108
- github,
109
- gh: github,
110
- gitlab,
111
- bitbucket,
112
- sourcehut
113
- };
114
-
115
- const DEFAULT_REGISTRY = "https://raw.githubusercontent.com/unjs/giget/main/templates";
116
- const registryProvider = (registryEndpoint = DEFAULT_REGISTRY) => {
117
- return async (input) => {
118
- const start = Date.now();
119
- const registryURL = `${registryEndpoint}/${input}.json`;
120
- const res = await fetch$1(registryURL);
121
- if (res.status >= 400) {
122
- throw new Error(`Failed to download ${input} template info from ${registryURL}: ${res.status} ${res.statusText}`);
123
- }
124
- const info = await res.json();
125
- if (!info.tar || !info.name) {
126
- throw new Error(`Invalid template info from ${registryURL}. name or tar fields are missing!`);
127
- }
128
- debug(`Fetched ${input} template info from ${registryURL} in ${Date.now() - start}ms`);
129
- return info;
130
- };
131
- };
132
-
133
- const sourceProtoRe = /^([\w-.]+):/;
134
- async function downloadTemplate(input, opts = {}) {
135
- opts = defu({
136
- registry: process.env.GIGET_REGISTRY,
137
- auth: process.env.GIGET_AUTH
138
- }, opts);
139
- const registry = opts.registry !== false ? registryProvider(opts.registry) : null;
140
- let providerName = opts.provider || (registryProvider ? "registry" : "github");
141
- let source = input;
142
- const sourceProvierMatch = input.match(sourceProtoRe);
143
- if (sourceProvierMatch) {
144
- providerName = sourceProvierMatch[1];
145
- source = input.substring(sourceProvierMatch[0].length);
146
- }
147
- const provider = opts.providers?.[providerName] || providers[providerName] || registry;
148
- if (!provider) {
149
- throw new Error(`Unsupported provider: ${providerName}`);
150
- }
151
- const template = await Promise.resolve().then(() => provider(source, { auth: opts.auth })).catch((err) => {
152
- throw new Error(`Failed to download template from ${providerName}: ${err.message}`);
153
- });
154
- template.name = (template.name || "template").replace(/[^a-z0-9-]/gi, "-");
155
- template.defaultDir = (template.defaultDir || template.name).replace(/[^a-z0-9-]/gi, "-");
156
- const cwd = resolve(opts.cwd || ".");
157
- const extractPath = resolve(cwd, opts.dir || template.defaultDir);
158
- if (opts.forceClean) {
159
- await rm(extractPath, { recursive: true, force: true });
160
- }
161
- if (!opts.force && existsSync(extractPath) && readdirSync(extractPath).length) {
162
- throw new Error(`Destination ${extractPath} already exists.`);
163
- }
164
- await mkdir(extractPath, { recursive: true });
165
- const tmpDir = resolve(homedir(), ".giget", opts.provider, template.name);
166
- const tarPath = resolve(tmpDir, (template.version || template.name) + ".tar.gz");
167
- if (opts.preferOffline && existsSync(tarPath)) {
168
- opts.offline = true;
169
- }
170
- if (!opts.offline) {
171
- await mkdir(dirname(tarPath), { recursive: true });
172
- const s2 = Date.now();
173
- await download(template.tar, tarPath, { headers: template.headers }).catch((err) => {
174
- if (!existsSync(tarPath)) {
175
- throw err;
176
- }
177
- debug("Download error. Using cached version:", err);
178
- opts.offline = true;
179
- });
180
- debug(`Downloaded ${template.tar} to ${tarPath} in ${Date.now() - s2}ms`);
181
- }
182
- if (!existsSync(tarPath)) {
183
- throw new Error(`Tarball not found: ${tarPath} (offline: ${opts.offline})`);
184
- }
185
- const s = Date.now();
186
- const subdir = template.subdir?.replace(/^\//, "") || "";
187
- await extract({
188
- file: tarPath,
189
- cwd: extractPath,
190
- onentry(entry) {
191
- entry.path = entry.path.split("/").splice(1).join("/");
192
- if (subdir) {
193
- if (entry.path.startsWith(subdir + "/")) {
194
- entry.path = entry.path.substring(subdir.length);
195
- } else {
196
- entry.path = "";
197
- }
198
- }
199
- }
200
- });
201
- debug(`Extracted to ${extractPath} in ${Date.now() - s}ms`);
202
- return {
203
- ...template,
204
- source,
205
- dir: extractPath
206
- };
207
- }
208
-
209
- export { downloadTemplate as d, registryProvider as r, startShell as s };