@metamask-previews/foundryup 0.0.0-preview-c1fef6e5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. package/CHANGELOG.md +10 -0
  2. package/LICENSE +20 -0
  3. package/README.md +47 -0
  4. package/dist/cli.cjs +10 -0
  5. package/dist/cli.cjs.map +1 -0
  6. package/dist/cli.d.cts +3 -0
  7. package/dist/cli.d.cts.map +1 -0
  8. package/dist/cli.d.mts +3 -0
  9. package/dist/cli.d.mts.map +1 -0
  10. package/dist/cli.mjs +8 -0
  11. package/dist/cli.mjs.map +1 -0
  12. package/dist/download.cjs +68 -0
  13. package/dist/download.cjs.map +1 -0
  14. package/dist/download.d.cts +18 -0
  15. package/dist/download.d.cts.map +1 -0
  16. package/dist/download.d.mts +18 -0
  17. package/dist/download.d.mts.map +1 -0
  18. package/dist/download.mjs +64 -0
  19. package/dist/download.mjs.map +1 -0
  20. package/dist/extract.cjs +184 -0
  21. package/dist/extract.cjs.map +1 -0
  22. package/dist/extract.d.cts +15 -0
  23. package/dist/extract.d.cts.map +1 -0
  24. package/dist/extract.d.mts +15 -0
  25. package/dist/extract.d.mts.map +1 -0
  26. package/dist/extract.mjs +180 -0
  27. package/dist/extract.mjs.map +1 -0
  28. package/dist/index.cjs +116 -0
  29. package/dist/index.cjs.map +1 -0
  30. package/dist/index.d.cts +11 -0
  31. package/dist/index.d.cts.map +1 -0
  32. package/dist/index.d.mts +11 -0
  33. package/dist/index.d.mts.map +1 -0
  34. package/dist/index.mjs +109 -0
  35. package/dist/index.mjs.map +1 -0
  36. package/dist/options.cjs +135 -0
  37. package/dist/options.cjs.map +1 -0
  38. package/dist/options.d.cts +52 -0
  39. package/dist/options.d.cts.map +1 -0
  40. package/dist/options.d.mts +52 -0
  41. package/dist/options.d.mts.map +1 -0
  42. package/dist/options.mjs +127 -0
  43. package/dist/options.mjs.map +1 -0
  44. package/dist/types.cjs +30 -0
  45. package/dist/types.cjs.map +1 -0
  46. package/dist/types.d.cts +72 -0
  47. package/dist/types.d.cts.map +1 -0
  48. package/dist/types.d.mts +72 -0
  49. package/dist/types.d.mts.map +1 -0
  50. package/dist/types.mjs +27 -0
  51. package/dist/types.mjs.map +1 -0
  52. package/dist/utils.cjs +104 -0
  53. package/dist/utils.cjs.map +1 -0
  54. package/dist/utils.d.cts +44 -0
  55. package/dist/utils.d.cts.map +1 -0
  56. package/dist/utils.d.mts +44 -0
  57. package/dist/utils.d.mts.map +1 -0
  58. package/dist/utils.mjs +95 -0
  59. package/dist/utils.mjs.map +1 -0
  60. package/package.json +66 -0
package/CHANGELOG.md ADDED
@@ -0,0 +1,10 @@
1
+ # Changelog
2
+
3
+ All notable changes to this project will be documented in this file.
4
+
5
+ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
6
+ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
7
+
8
+ ## [Unreleased]
9
+
10
+ [Unreleased]: https://github.com/MetaMask/core/
package/LICENSE ADDED
@@ -0,0 +1,20 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2025 MetaMask
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
package/README.md ADDED
@@ -0,0 +1,47 @@
1
+ # `@metamask/foundryup`
2
+
3
+ foundryup
4
+
5
+ ## Installation
6
+
7
+ `yarn add @metamask/foundryup`
8
+
9
+ or
10
+
11
+ `npm install @metamask/foundryup`
12
+
13
+ ## Usage
14
+
15
+ Once installed into a package you can do `yarn bin mm-foundryup`.
16
+
17
+ This will install the latest version of Foundry things by default.
18
+
19
+ Try `yarn bin mm-foundryup --help` for more options.
20
+
21
+ Once you have the binaries installed, you have to figure out how to get to them.
22
+
23
+ Probably best to just add each as a `package.json` script:
24
+
25
+ ```json
26
+ "scripts": {
27
+ "anvil": "node_modules/.bin/anvil",
28
+ }
29
+ ```
30
+
31
+ Kind of weird, but it seems to work okay. You can probably use `npx anvil` in place of `node_modules/.bin/anvil`, but
32
+ getting it to work in all scenarios (cross platform and in CI) wasn't straightforward. `yarn bin anvil` doesn't work
33
+ in yarn v4 because it isn't a bin of `@metamask/foundryup`, so yarn pretends it doesn't exist.
34
+
35
+ This all needs to work.
36
+
37
+ ---
38
+
39
+ You can try it here in the monorepo by running `yarn workspace @metamask/foundryup anvil`.
40
+
41
+
42
+
43
+ ## Contributing
44
+
45
+ This package is part of a monorepo. Instructions for contributing can be found in the [monorepo README](https://github.com/MetaMask/core#readme).
46
+
47
+
package/dist/cli.cjs ADDED
@@ -0,0 +1,10 @@
1
+ #!/usr/bin/env node
2
+ "use strict";
3
+ Object.defineProperty(exports, "__esModule", { value: true });
4
+ const node_process_1 = require("node:process");
5
+ const _1 = require(".//index.cjs");
6
+ (0, _1.downloadAndInstallFoundryBinaries)().catch((error) => {
7
+ console.error('Error:', error);
8
+ (0, node_process_1.exit)(1);
9
+ });
10
+ //# sourceMappingURL=cli.cjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"cli.cjs","sourceRoot":"","sources":["../src/cli.ts"],"names":[],"mappings":";;;AAEA,+CAAoC;AACpC,mCAAuD;AAEvD,IAAA,oCAAiC,GAAE,CAAC,KAAK,CAAC,CAAC,KAAK,EAAE,EAAE;IAChD,OAAO,CAAC,KAAK,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC;IAC/B,IAAA,mBAAI,EAAC,CAAC,CAAC,CAAC;AACZ,CAAC,CAAC,CAAC","sourcesContent":["#!/usr/bin/env node\n\nimport { exit } from \"node:process\";\nimport { downloadAndInstallFoundryBinaries } from \"./\";\n\ndownloadAndInstallFoundryBinaries().catch((error) => {\n console.error('Error:', error);\n exit(1);\n});"]}
package/dist/cli.d.cts ADDED
@@ -0,0 +1,3 @@
1
+ #!/usr/bin/env node
2
+ export {};
3
+ //# sourceMappingURL=cli.d.cts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"cli.d.cts","sourceRoot":"","sources":["../src/cli.ts"],"names":[],"mappings":""}
package/dist/cli.d.mts ADDED
@@ -0,0 +1,3 @@
1
+ #!/usr/bin/env node
2
+ export {};
3
+ //# sourceMappingURL=cli.d.mts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"cli.d.mts","sourceRoot":"","sources":["../src/cli.ts"],"names":[],"mappings":""}
package/dist/cli.mjs ADDED
@@ -0,0 +1,8 @@
1
+ #!/usr/bin/env node
2
+ import { exit } from "node:process";
3
+ import { downloadAndInstallFoundryBinaries } from ".//index.mjs";
4
+ downloadAndInstallFoundryBinaries().catch((error) => {
5
+ console.error('Error:', error);
6
+ exit(1);
7
+ });
8
+ //# sourceMappingURL=cli.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"cli.mjs","sourceRoot":"","sources":["../src/cli.ts"],"names":[],"mappings":";AAEA,OAAO,EAAE,IAAI,EAAE,qBAAqB;AACpC,OAAO,EAAE,iCAAiC,EAAE,qBAAW;AAEvD,iCAAiC,EAAE,CAAC,KAAK,CAAC,CAAC,KAAK,EAAE,EAAE;IAChD,OAAO,CAAC,KAAK,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC;IAC/B,IAAI,CAAC,CAAC,CAAC,CAAC;AACZ,CAAC,CAAC,CAAC","sourcesContent":["#!/usr/bin/env node\n\nimport { exit } from \"node:process\";\nimport { downloadAndInstallFoundryBinaries } from \"./\";\n\ndownloadAndInstallFoundryBinaries().catch((error) => {\n console.error('Error:', error);\n exit(1);\n});"]}
@@ -0,0 +1,68 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.startDownload = void 0;
4
+ const node_http_1 = require("node:http");
5
+ const node_https_1 = require("node:https");
6
+ const node_stream_1 = require("node:stream");
7
+ const promises_1 = require("node:stream/promises");
8
+ class DownloadStream extends node_stream_1.Stream.PassThrough {
9
+ async response() {
10
+ return new Promise((resolve, reject) => {
11
+ this.once('response', resolve);
12
+ this.once('error', reject);
13
+ });
14
+ }
15
+ }
16
+ /**
17
+ * Starts a download from the given URL.
18
+ *
19
+ * @param url - The URL to download from
20
+ * @param options - The download options
21
+ * @param redirects - The number of redirects that have occurred
22
+ * @returns A stream of the download
23
+ */
24
+ function startDownload(url, options = {}, redirects = 0) {
25
+ const MAX_REDIRECTS = options.maxRedirects ?? 5;
26
+ const request = url.protocol === 'http:' ? node_http_1.request : node_https_1.request;
27
+ const stream = new DownloadStream();
28
+ request(url, options, async (response) => {
29
+ stream.once('close', () => {
30
+ response.destroy();
31
+ });
32
+ const { statusCode, statusMessage, headers } = response;
33
+ // handle redirects
34
+ if (statusCode &&
35
+ statusCode >= 300 &&
36
+ statusCode < 400 &&
37
+ headers.location) {
38
+ if (redirects >= MAX_REDIRECTS) {
39
+ stream.emit('error', new Error('Too many redirects'));
40
+ response.destroy();
41
+ }
42
+ else {
43
+ // note: we don't emit a response until we're done redirecting, because
44
+ // handlers only expect it to be emitted once.
45
+ await (0, promises_1.pipeline)(startDownload(new URL(headers.location, url), options, redirects + 1)
46
+ // emit the response event to the stream
47
+ .once('response', stream.emit.bind(stream, 'response')), stream).catch(stream.emit.bind(stream, 'error'));
48
+ response.destroy();
49
+ }
50
+ }
51
+ // check for HTTP errors
52
+ else if (!statusCode || statusCode < 200 || statusCode >= 300) {
53
+ stream.emit('error', new Error(`Request to ${url} failed. Status Code: ${statusCode} - ${statusMessage}`));
54
+ response.destroy();
55
+ }
56
+ else {
57
+ // resolve with response stream
58
+ stream.emit('response', response);
59
+ response.once('error', stream.emit.bind(stream, 'error'));
60
+ await (0, promises_1.pipeline)(response, stream).catch(stream.emit.bind(stream, 'error'));
61
+ }
62
+ })
63
+ .once('error', stream.emit.bind(stream, 'error'))
64
+ .end();
65
+ return stream;
66
+ }
67
+ exports.startDownload = startDownload;
68
+ //# sourceMappingURL=download.cjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"download.cjs","sourceRoot":"","sources":["../src/download.ts"],"names":[],"mappings":";;;AAAA,yCAAyE;AACzE,2CAAqD;AACrD,6CAAqC;AACrC,mDAAgD;AAGhD,MAAM,cAAe,SAAQ,oBAAM,CAAC,WAAW;IAC7C,KAAK,CAAC,QAAQ;QACZ,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;YACrC,IAAI,CAAC,IAAI,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;YAC/B,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;QAC7B,CAAC,CAAC,CAAC;IACL,CAAC;CACF;AAED;;;;;;;GAOG;AAEH,SAAgB,aAAa,CAC3B,GAAQ,EACR,UAA2B,EAAE,EAC7B,YAAoB,CAAC;IAErB,MAAM,aAAa,GAAG,OAAO,CAAC,YAAY,IAAI,CAAC,CAAC;IAChD,MAAM,OAAO,GAAG,GAAG,CAAC,QAAQ,KAAK,OAAO,CAAC,CAAC,CAAC,mBAAW,CAAC,CAAC,CAAC,oBAAY,CAAC;IACtE,MAAM,MAAM,GAAG,IAAI,cAAc,EAAE,CAAC;IACpC,OAAO,CAAC,GAAG,EAAE,OAAO,EAAE,KAAK,EAAE,QAAQ,EAAE,EAAE;QACvC,MAAM,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,EAAE;YACxB,QAAQ,CAAC,OAAO,EAAE,CAAC;QACrB,CAAC,CAAC,CAAC;QAEH,MAAM,EAAE,UAAU,EAAE,aAAa,EAAE,OAAO,EAAE,GAAG,QAAQ,CAAC;QACxD,mBAAmB;QACnB,IACE,UAAU;YACV,UAAU,IAAI,GAAG;YACjB,UAAU,GAAG,GAAG;YAChB,OAAO,CAAC,QAAQ,EAChB;YACA,IAAI,SAAS,IAAI,aAAa,EAAE;gBAC9B,MAAM,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,KAAK,CAAC,oBAAoB,CAAC,CAAC,CAAC;gBACtD,QAAQ,CAAC,OAAO,EAAE,CAAC;aACpB;iBAAM;gBACL,uEAAuE;gBACvE,8CAA8C;gBAC9C,MAAM,IAAA,mBAAQ,EACZ,aAAa,CAAC,IAAI,GAAG,CAAC,OAAO,CAAC,QAAQ,EAAE,GAAG,CAAC,EAAE,OAAO,EAAE,SAAS,GAAG,CAAC,CAAC;oBACnE,wCAAwC;qBACvC,IAAI,CAAC,UAAU,EAAE,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,UAAU,CAAC,CAAC,EACzD,MAAM,CACP,CAAC,KAAK,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC,CAAC;gBAC3C,QAAQ,CAAC,OAAO,EAAE,CAAC;aACpB;SACF;QAED,wBAAwB;aACnB,IAAI,CAAC,UAAU,IAAI,UAAU,GAAG,GAAG,IAAI,UAAU,IAAI,GAAG,EAAE;YAC7D,MAAM,CAAC,IAAI,CACT,OAAO,EACP,IAAI,KAAK,CACP,cAAc,GAAG,yBAAyB,UAAU,MAAM,aAAa,EAAE,CAC1E,CACF,CAAC;YACF,QAAQ,CAAC,OAAO,EAAE,CAAC;SACpB;aAAM;YACL,+BAA+B;YAC/B,MAAM,CAAC,IAAI,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;YAElC,QAAQ,CAAC,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC,CAAC;YAC1D,MAAM,IAAA,mBAAQ,EAAC,QAAQ,EAAE,MAAM,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC,CAAC;SAC3E;IACH,CAAC,CAAC;SACC,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;SAChD,GAAG,EAAE,CAAC;IACT,OAAO,MAAM,CAAC;AAChB,CAAC;AAzDD,sCAyDC","sourcesContent":["import { request as httpRequest, type IncomingMessage } from 'node:http';\nimport { request as httpsRequest } from 'node:https';\nimport { Stream } from 'node:stream';\nimport { pipeline } from 'node:stream/promises';\nimport type { DownloadOptions } from './types';\n\nclass DownloadStream extends Stream.PassThrough {\n async response(): Promise<IncomingMessage> {\n return new Promise((resolve, reject) => {\n this.once('response', resolve);\n this.once('error', reject);\n });\n }\n}\n\n/**\n * Starts a download from the given URL.\n *\n * @param url - The URL to download from\n * @param options - The download options\n * @param redirects - The number of redirects that have occurred\n * @returns A stream of the download\n */\n\nexport function startDownload(\n url: URL,\n options: DownloadOptions = {},\n redirects: number = 0,\n) {\n const MAX_REDIRECTS = options.maxRedirects ?? 5;\n const request = url.protocol === 'http:' ? httpRequest : httpsRequest;\n const stream = new DownloadStream();\n request(url, options, async (response) => {\n stream.once('close', () => {\n response.destroy();\n });\n\n const { statusCode, statusMessage, headers } = response;\n // handle redirects\n if (\n statusCode &&\n statusCode >= 300 &&\n statusCode < 400 &&\n headers.location\n ) {\n if (redirects >= MAX_REDIRECTS) {\n stream.emit('error', new Error('Too many redirects'));\n response.destroy();\n } else {\n // note: we don't emit a response until we're done redirecting, because\n // handlers only expect it to be emitted once.\n await pipeline(\n startDownload(new URL(headers.location, url), options, redirects + 1)\n // emit the response event to the stream\n .once('response', stream.emit.bind(stream, 'response')),\n stream,\n ).catch(stream.emit.bind(stream, 'error'));\n response.destroy();\n }\n }\n\n // check for HTTP errors\n else if (!statusCode || statusCode < 200 || statusCode >= 300) {\n stream.emit(\n 'error',\n new Error(\n `Request to ${url} failed. Status Code: ${statusCode} - ${statusMessage}`,\n ),\n );\n response.destroy();\n } else {\n // resolve with response stream\n stream.emit('response', response);\n\n response.once('error', stream.emit.bind(stream, 'error'));\n await pipeline(response, stream).catch(stream.emit.bind(stream, 'error'));\n }\n })\n .once('error', stream.emit.bind(stream, 'error'))\n .end();\n return stream;\n}\n"]}
@@ -0,0 +1,18 @@
1
+ /// <reference types="node" />
2
+ import { type IncomingMessage } from "node:http";
3
+ import { Stream } from "node:stream";
4
+ import type { DownloadOptions } from "./types.cjs";
5
+ declare class DownloadStream extends Stream.PassThrough {
6
+ response(): Promise<IncomingMessage>;
7
+ }
8
+ /**
9
+ * Starts a download from the given URL.
10
+ *
11
+ * @param url - The URL to download from
12
+ * @param options - The download options
13
+ * @param redirects - The number of redirects that have occurred
14
+ * @returns A stream of the download
15
+ */
16
+ export declare function startDownload(url: URL, options?: DownloadOptions, redirects?: number): DownloadStream;
17
+ export {};
18
+ //# sourceMappingURL=download.d.cts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"download.d.cts","sourceRoot":"","sources":["../src/download.ts"],"names":[],"mappings":";AAAA,OAAO,EAA0B,KAAK,eAAe,EAAE,kBAAkB;AAEzE,OAAO,EAAE,MAAM,EAAE,oBAAoB;AAErC,OAAO,KAAK,EAAE,eAAe,EAAE,oBAAgB;AAE/C,cAAM,cAAe,SAAQ,MAAM,CAAC,WAAW;IACvC,QAAQ,IAAI,OAAO,CAAC,eAAe,CAAC;CAM3C;AAED;;;;;;;GAOG;AAEH,wBAAgB,aAAa,CAC3B,GAAG,EAAE,GAAG,EACR,OAAO,GAAE,eAAoB,EAC7B,SAAS,GAAE,MAAU,kBAsDtB"}
@@ -0,0 +1,18 @@
1
+ /// <reference types="node" />
2
+ import { type IncomingMessage } from "node:http";
3
+ import { Stream } from "node:stream";
4
+ import type { DownloadOptions } from "./types.mjs";
5
+ declare class DownloadStream extends Stream.PassThrough {
6
+ response(): Promise<IncomingMessage>;
7
+ }
8
+ /**
9
+ * Starts a download from the given URL.
10
+ *
11
+ * @param url - The URL to download from
12
+ * @param options - The download options
13
+ * @param redirects - The number of redirects that have occurred
14
+ * @returns A stream of the download
15
+ */
16
+ export declare function startDownload(url: URL, options?: DownloadOptions, redirects?: number): DownloadStream;
17
+ export {};
18
+ //# sourceMappingURL=download.d.mts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"download.d.mts","sourceRoot":"","sources":["../src/download.ts"],"names":[],"mappings":";AAAA,OAAO,EAA0B,KAAK,eAAe,EAAE,kBAAkB;AAEzE,OAAO,EAAE,MAAM,EAAE,oBAAoB;AAErC,OAAO,KAAK,EAAE,eAAe,EAAE,oBAAgB;AAE/C,cAAM,cAAe,SAAQ,MAAM,CAAC,WAAW;IACvC,QAAQ,IAAI,OAAO,CAAC,eAAe,CAAC;CAM3C;AAED;;;;;;;GAOG;AAEH,wBAAgB,aAAa,CAC3B,GAAG,EAAE,GAAG,EACR,OAAO,GAAE,eAAoB,EAC7B,SAAS,GAAE,MAAU,kBAsDtB"}
@@ -0,0 +1,64 @@
1
+ import { request as httpRequest } from "node:http";
2
+ import { request as httpsRequest } from "node:https";
3
+ import { Stream } from "node:stream";
4
+ import { pipeline } from "node:stream/promises";
5
+ class DownloadStream extends Stream.PassThrough {
6
+ async response() {
7
+ return new Promise((resolve, reject) => {
8
+ this.once('response', resolve);
9
+ this.once('error', reject);
10
+ });
11
+ }
12
+ }
13
+ /**
14
+ * Starts a download from the given URL.
15
+ *
16
+ * @param url - The URL to download from
17
+ * @param options - The download options
18
+ * @param redirects - The number of redirects that have occurred
19
+ * @returns A stream of the download
20
+ */
21
+ export function startDownload(url, options = {}, redirects = 0) {
22
+ const MAX_REDIRECTS = options.maxRedirects ?? 5;
23
+ const request = url.protocol === 'http:' ? httpRequest : httpsRequest;
24
+ const stream = new DownloadStream();
25
+ request(url, options, async (response) => {
26
+ stream.once('close', () => {
27
+ response.destroy();
28
+ });
29
+ const { statusCode, statusMessage, headers } = response;
30
+ // handle redirects
31
+ if (statusCode &&
32
+ statusCode >= 300 &&
33
+ statusCode < 400 &&
34
+ headers.location) {
35
+ if (redirects >= MAX_REDIRECTS) {
36
+ stream.emit('error', new Error('Too many redirects'));
37
+ response.destroy();
38
+ }
39
+ else {
40
+ // note: we don't emit a response until we're done redirecting, because
41
+ // handlers only expect it to be emitted once.
42
+ await pipeline(startDownload(new URL(headers.location, url), options, redirects + 1)
43
+ // emit the response event to the stream
44
+ .once('response', stream.emit.bind(stream, 'response')), stream).catch(stream.emit.bind(stream, 'error'));
45
+ response.destroy();
46
+ }
47
+ }
48
+ // check for HTTP errors
49
+ else if (!statusCode || statusCode < 200 || statusCode >= 300) {
50
+ stream.emit('error', new Error(`Request to ${url} failed. Status Code: ${statusCode} - ${statusMessage}`));
51
+ response.destroy();
52
+ }
53
+ else {
54
+ // resolve with response stream
55
+ stream.emit('response', response);
56
+ response.once('error', stream.emit.bind(stream, 'error'));
57
+ await pipeline(response, stream).catch(stream.emit.bind(stream, 'error'));
58
+ }
59
+ })
60
+ .once('error', stream.emit.bind(stream, 'error'))
61
+ .end();
62
+ return stream;
63
+ }
64
+ //# sourceMappingURL=download.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"download.mjs","sourceRoot":"","sources":["../src/download.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,IAAI,WAAW,EAAwB,kBAAkB;AACzE,OAAO,EAAE,OAAO,IAAI,YAAY,EAAE,mBAAmB;AACrD,OAAO,EAAE,MAAM,EAAE,oBAAoB;AACrC,OAAO,EAAE,QAAQ,EAAE,6BAA6B;AAGhD,MAAM,cAAe,SAAQ,MAAM,CAAC,WAAW;IAC7C,KAAK,CAAC,QAAQ;QACZ,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;YACrC,IAAI,CAAC,IAAI,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;YAC/B,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;QAC7B,CAAC,CAAC,CAAC;IACL,CAAC;CACF;AAED;;;;;;;GAOG;AAEH,MAAM,UAAU,aAAa,CAC3B,GAAQ,EACR,UAA2B,EAAE,EAC7B,YAAoB,CAAC;IAErB,MAAM,aAAa,GAAG,OAAO,CAAC,YAAY,IAAI,CAAC,CAAC;IAChD,MAAM,OAAO,GAAG,GAAG,CAAC,QAAQ,KAAK,OAAO,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,YAAY,CAAC;IACtE,MAAM,MAAM,GAAG,IAAI,cAAc,EAAE,CAAC;IACpC,OAAO,CAAC,GAAG,EAAE,OAAO,EAAE,KAAK,EAAE,QAAQ,EAAE,EAAE;QACvC,MAAM,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,EAAE;YACxB,QAAQ,CAAC,OAAO,EAAE,CAAC;QACrB,CAAC,CAAC,CAAC;QAEH,MAAM,EAAE,UAAU,EAAE,aAAa,EAAE,OAAO,EAAE,GAAG,QAAQ,CAAC;QACxD,mBAAmB;QACnB,IACE,UAAU;YACV,UAAU,IAAI,GAAG;YACjB,UAAU,GAAG,GAAG;YAChB,OAAO,CAAC,QAAQ,EAChB;YACA,IAAI,SAAS,IAAI,aAAa,EAAE;gBAC9B,MAAM,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,KAAK,CAAC,oBAAoB,CAAC,CAAC,CAAC;gBACtD,QAAQ,CAAC,OAAO,EAAE,CAAC;aACpB;iBAAM;gBACL,uEAAuE;gBACvE,8CAA8C;gBAC9C,MAAM,QAAQ,CACZ,aAAa,CAAC,IAAI,GAAG,CAAC,OAAO,CAAC,QAAQ,EAAE,GAAG,CAAC,EAAE,OAAO,EAAE,SAAS,GAAG,CAAC,CAAC;oBACnE,wCAAwC;qBACvC,IAAI,CAAC,UAAU,EAAE,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,UAAU,CAAC,CAAC,EACzD,MAAM,CACP,CAAC,KAAK,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC,CAAC;gBAC3C,QAAQ,CAAC,OAAO,EAAE,CAAC;aACpB;SACF;QAED,wBAAwB;aACnB,IAAI,CAAC,UAAU,IAAI,UAAU,GAAG,GAAG,IAAI,UAAU,IAAI,GAAG,EAAE;YAC7D,MAAM,CAAC,IAAI,CACT,OAAO,EACP,IAAI,KAAK,CACP,cAAc,GAAG,yBAAyB,UAAU,MAAM,aAAa,EAAE,CAC1E,CACF,CAAC;YACF,QAAQ,CAAC,OAAO,EAAE,CAAC;SACpB;aAAM;YACL,+BAA+B;YAC/B,MAAM,CAAC,IAAI,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;YAElC,QAAQ,CAAC,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC,CAAC;YAC1D,MAAM,QAAQ,CAAC,QAAQ,EAAE,MAAM,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC,CAAC;SAC3E;IACH,CAAC,CAAC;SACC,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;SAChD,GAAG,EAAE,CAAC;IACT,OAAO,MAAM,CAAC;AAChB,CAAC","sourcesContent":["import { request as httpRequest, type IncomingMessage } from 'node:http';\nimport { request as httpsRequest } from 'node:https';\nimport { Stream } from 'node:stream';\nimport { pipeline } from 'node:stream/promises';\nimport type { DownloadOptions } from './types';\n\nclass DownloadStream extends Stream.PassThrough {\n async response(): Promise<IncomingMessage> {\n return new Promise((resolve, reject) => {\n this.once('response', resolve);\n this.once('error', reject);\n });\n }\n}\n\n/**\n * Starts a download from the given URL.\n *\n * @param url - The URL to download from\n * @param options - The download options\n * @param redirects - The number of redirects that have occurred\n * @returns A stream of the download\n */\n\nexport function startDownload(\n url: URL,\n options: DownloadOptions = {},\n redirects: number = 0,\n) {\n const MAX_REDIRECTS = options.maxRedirects ?? 5;\n const request = url.protocol === 'http:' ? httpRequest : httpsRequest;\n const stream = new DownloadStream();\n request(url, options, async (response) => {\n stream.once('close', () => {\n response.destroy();\n });\n\n const { statusCode, statusMessage, headers } = response;\n // handle redirects\n if (\n statusCode &&\n statusCode >= 300 &&\n statusCode < 400 &&\n headers.location\n ) {\n if (redirects >= MAX_REDIRECTS) {\n stream.emit('error', new Error('Too many redirects'));\n response.destroy();\n } else {\n // note: we don't emit a response until we're done redirecting, because\n // handlers only expect it to be emitted once.\n await pipeline(\n startDownload(new URL(headers.location, url), options, redirects + 1)\n // emit the response event to the stream\n .once('response', stream.emit.bind(stream, 'response')),\n stream,\n ).catch(stream.emit.bind(stream, 'error'));\n response.destroy();\n }\n }\n\n // check for HTTP errors\n else if (!statusCode || statusCode < 200 || statusCode >= 300) {\n stream.emit(\n 'error',\n new Error(\n `Request to ${url} failed. Status Code: ${statusCode} - ${statusMessage}`,\n ),\n );\n response.destroy();\n } else {\n // resolve with response stream\n stream.emit('response', response);\n\n response.once('error', stream.emit.bind(stream, 'error'));\n await pipeline(response, stream).catch(stream.emit.bind(stream, 'error'));\n }\n })\n .once('error', stream.emit.bind(stream, 'error'))\n .end();\n return stream;\n}\n"]}
@@ -0,0 +1,184 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.extractFrom = void 0;
4
+ const strict_1 = require("node:assert/strict");
5
+ const node_crypto_1 = require("node:crypto");
6
+ const node_fs_1 = require("node:fs");
7
+ const promises_1 = require("node:fs/promises");
8
+ const node_http_1 = require("node:http");
9
+ const node_https_1 = require("node:https");
10
+ const node_path_1 = require("node:path");
11
+ const promises_2 = require("node:stream/promises");
12
+ const minipass_1 = require("minipass");
13
+ const tar_1 = require("tar");
14
+ const unzipper_1 = require("unzipper");
15
+ const utils_1 = require("./utils.cjs");
16
+ const download_1 = require("./download.cjs");
17
+ const types_1 = require("./types.cjs");
18
+ /**
19
+ * Extracts the binaries from the given URL and writes them to the destination.
20
+ *
21
+ * @param url - The URL of the archive to extract the binaries from
22
+ * @param binaries - The list of binaries to extract
23
+ * @param dir - The destination directory
24
+ * @param checksums - The checksums to verify the binaries against
25
+ * @returns The list of binaries extracted
26
+ */
27
+ async function extractFrom(url, binaries, dir, checksums) {
28
+ const extract = url.pathname.toLowerCase().endsWith(types_1.Extension.Tar)
29
+ ? extractFromTar
30
+ : extractFromZip;
31
+ // write all files to a temporary directory first, then rename to the final
32
+ // destination to avoid accidental partial extraction. We don't use
33
+ // `os.tmpdir` for this because `rename` will fail if the directories are on
34
+ // different file systems.
35
+ const tempDir = `${dir}.downloading`;
36
+ const rmOpts = { recursive: true, maxRetries: 3, force: true };
37
+ try {
38
+ // clean up any previous in-progress downloads
39
+ await (0, promises_1.rm)(tempDir, rmOpts);
40
+ // make the temporary directory to extract the binaries to
41
+ await (0, promises_1.mkdir)(tempDir, { recursive: true });
42
+ const downloads = await extract(url, binaries, tempDir, checksums?.algorithm);
43
+ (0, strict_1.ok)(downloads.length === binaries.length, 'Failed to extract all binaries');
44
+ const paths = [];
45
+ for (const { path, binary, checksum } of downloads) {
46
+ if (checksums) {
47
+ (0, utils_1.say)(`verifying checksum for ${binary}`);
48
+ const expected = checksums.binaries[binary];
49
+ if (checksum === expected) {
50
+ (0, utils_1.say)(`checksum verified for ${binary}`);
51
+ }
52
+ else {
53
+ throw new Error(`checksum mismatch for ${binary}, expected ${expected}, got ${checksum}`);
54
+ }
55
+ }
56
+ // add the *final* path to the list of binaries
57
+ paths.push((0, node_path_1.join)(dir, (0, node_path_1.relative)(tempDir, path)));
58
+ }
59
+ // this directory shouldn't exist, but if two simultaneous `yarn foundryup`
60
+ // processes are running, it might. Last process wins, so we remove other
61
+ // `dir`s just in case.
62
+ await (0, promises_1.rm)(dir, rmOpts);
63
+ // everything has been extracted; move the files to their final destination
64
+ await (0, promises_1.rename)(tempDir, dir);
65
+ // return the list of extracted binaries
66
+ return paths;
67
+ }
68
+ catch (error) {
69
+ // if things fail for any reason try to clean up a bit. it is very important
70
+ // to not leave `dir` behind, as its existence is a signal that the binaries
71
+ // are installed.
72
+ const rmErrors = (await Promise.allSettled([(0, promises_1.rm)(tempDir, rmOpts), (0, promises_1.rm)(dir, rmOpts)]))
73
+ .filter((r) => r.status === 'rejected')
74
+ .map((r) => r.reason);
75
+ // if we failed to clean up, create an aggregate error message
76
+ if (rmErrors.length) {
77
+ throw new AggregateError([error, ...rmErrors], 'This is a bug; you should report it.');
78
+ }
79
+ throw error;
80
+ }
81
+ }
82
+ exports.extractFrom = extractFrom;
83
+ /**
84
+ * Extracts the binaries from a tar archive.
85
+ *
86
+ * @param url - The URL of the archive to extract the binaries from
87
+ * @param binaries - The list of binaries to extract
88
+ * @param dir - The destination directory
89
+ * @param checksumAlgorithm - The checksum algorithm to use
90
+ * @returns The list of binaries extracted
91
+ */
92
+ async function extractFromTar(url, binaries, dir, checksumAlgorithm) {
93
+ const downloads = [];
94
+ await (0, promises_2.pipeline)((0, download_1.startDownload)(url), (0, tar_1.extract)({
95
+ cwd: dir,
96
+ transform: (entry) => {
97
+ const absolutePath = entry.absolute;
98
+ if (!absolutePath) {
99
+ throw new Error('Missing absolute path for entry');
100
+ }
101
+ if (checksumAlgorithm) {
102
+ const hash = (0, node_crypto_1.createHash)(checksumAlgorithm);
103
+ const passThrough = new minipass_1.Minipass({ async: true });
104
+ passThrough.pipe(hash);
105
+ passThrough.on('end', () => {
106
+ downloads.push({
107
+ path: absolutePath,
108
+ binary: entry.path,
109
+ checksum: hash.digest('hex'),
110
+ });
111
+ });
112
+ return passThrough;
113
+ }
114
+ // When no checksum is needed, record the entry and return undefined
115
+ // to use the original stream without transformation
116
+ downloads.push({
117
+ path: absolutePath,
118
+ binary: entry.path,
119
+ });
120
+ return undefined;
121
+ },
122
+ }, binaries));
123
+ return downloads;
124
+ }
125
+ /**
126
+ * Extracts the binaries from a zip archive.
127
+ *
128
+ * @param url - The URL of the archive to extract the binaries from
129
+ * @param binaries - The list of binaries to extract
130
+ * @param dir - The destination directory
131
+ * @param checksumAlgorithm - The checksum algorithm to use
132
+ * @returns The list of binaries extracted
133
+ */
134
+ async function extractFromZip(url, binaries, dir, checksumAlgorithm) {
135
+ const agent = new (url.protocol === 'http:' ? node_http_1.Agent : node_https_1.Agent)({
136
+ keepAlive: true,
137
+ });
138
+ const source = {
139
+ async size() {
140
+ const download = (0, download_1.startDownload)(url, { agent, method: 'HEAD' });
141
+ const response = await download.response();
142
+ const contentLength = response.headers['content-length'];
143
+ return contentLength ? parseInt(contentLength, 10) : 0;
144
+ },
145
+ stream(offset, bytes) {
146
+ const options = {
147
+ agent,
148
+ headers: {
149
+ range: `bytes=${offset}-${bytes ? offset + bytes : ''}`,
150
+ },
151
+ };
152
+ return (0, download_1.startDownload)(url, options);
153
+ },
154
+ };
155
+ const { files } = await unzipper_1.Open.custom(source, {});
156
+ const filtered = files.filter(({ path }) => binaries.includes((0, node_path_1.basename)(path, (0, node_path_1.extname)(path))));
157
+ return await Promise.all(filtered.map(async ({ path, stream }) => {
158
+ const dest = (0, node_path_1.join)(dir, path);
159
+ const entry = stream();
160
+ const destStream = (0, node_fs_1.createWriteStream)(dest);
161
+ const binary = (0, node_path_1.basename)(path, (0, node_path_1.extname)(path));
162
+ if (checksumAlgorithm) {
163
+ const hash = (0, node_crypto_1.createHash)(checksumAlgorithm);
164
+ const hashStream = async function* (entryStream) {
165
+ for await (const chunk of entryStream) {
166
+ hash.update(chunk);
167
+ yield chunk;
168
+ }
169
+ };
170
+ await (0, promises_2.pipeline)(entry, hashStream, destStream);
171
+ return {
172
+ path: dest,
173
+ binary,
174
+ checksum: hash.digest('hex'),
175
+ };
176
+ }
177
+ await (0, promises_2.pipeline)(entry, destStream);
178
+ return {
179
+ path: dest,
180
+ binary,
181
+ };
182
+ }));
183
+ }
184
+ //# sourceMappingURL=extract.cjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"extract.cjs","sourceRoot":"","sources":["../src/extract.ts"],"names":[],"mappings":";;;AAAA,+CAAwC;AACxC,6CAAyC;AACzC,qCAA4C;AAC5C,+CAAqD;AACrD,yCAA+C;AAC/C,2CAAiD;AACjD,yCAA8D;AAC9D,mDAAgD;AAChD,uCAAoC;AACpC,6BAA4C;AAC5C,uCAAyD;AACzD,uCAA8B;AAC9B,6CAA2C;AAC3C,uCAA4C;AAE5C;;;;;;;;GAQG;AAEI,KAAK,UAAU,WAAW,CAC/B,GAAQ,EACR,QAAkB,EAClB,GAAW,EACX,SAAyE;IAEzE,MAAM,OAAO,GAAG,GAAG,CAAC,QAAQ,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,iBAAS,CAAC,GAAG,CAAC;QAChE,CAAC,CAAC,cAAc;QAChB,CAAC,CAAC,cAAc,CAAC;IACnB,2EAA2E;IAC3E,mEAAmE;IACnE,4EAA4E;IAC5E,0BAA0B;IAC1B,MAAM,OAAO,GAAG,GAAG,GAAG,cAAc,CAAC;IACrC,MAAM,MAAM,GAAG,EAAE,SAAS,EAAE,IAAI,EAAE,UAAU,EAAE,CAAC,EAAE,KAAK,EAAE,IAAI,EAAE,CAAC;IAC/D,IAAI;QACF,8CAA8C;QAC9C,MAAM,IAAA,aAAE,EAAC,OAAO,EAAE,MAAM,CAAC,CAAC;QAC1B,0DAA0D;QAC1D,MAAM,IAAA,gBAAK,EAAC,OAAO,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;QAC1C,MAAM,SAAS,GAAG,MAAM,OAAO,CAC7B,GAAG,EACH,QAAQ,EACR,OAAO,EACP,SAAS,EAAE,SAAS,CACrB,CAAC;QACF,IAAA,WAAE,EAAC,SAAS,CAAC,MAAM,KAAK,QAAQ,CAAC,MAAM,EAAE,gCAAgC,CAAC,CAAC;QAE3E,MAAM,KAAK,GAAa,EAAE,CAAC;QAC3B,KAAK,MAAM,EAAE,IAAI,EAAE,MAAM,EAAE,QAAQ,EAAE,IAAI,SAAS,EAAE;YAClD,IAAI,SAAS,EAAE;gBACb,IAAA,WAAG,EAAC,0BAA0B,MAAM,EAAE,CAAC,CAAC;gBACxC,MAAM,QAAQ,GAAG,SAAS,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC;gBAC5C,IAAI,QAAQ,KAAK,QAAQ,EAAE;oBACzB,IAAA,WAAG,EAAC,yBAAyB,MAAM,EAAE,CAAC,CAAC;iBACxC;qBAAM;oBACL,MAAM,IAAI,KAAK,CACb,yBAAyB,MAAM,cAAc,QAAQ,SAAS,QAAQ,EAAE,CACzE,CAAC;iBACH;aACF;YACD,+CAA+C;YAC/C,KAAK,CAAC,IAAI,CAAC,IAAA,gBAAI,EAAC,GAAG,EAAE,IAAA,oBAAQ,EAAC,OAAO,EAAE,IAAI,CAAC,CAAC,CAAC,CAAC;SAChD;QAED,2EAA2E;QAC3E,yEAAyE;QACzE,uBAAuB;QACvB,MAAM,IAAA,aAAE,EAAC,GAAG,EAAE,MAAM,CAAC,CAAC;QACtB,2EAA2E;QAC3E,MAAM,IAAA,iBAAM,EAAC,OAAO,EAAE,GAAG,CAAC,CAAC;QAC3B,wCAAwC;QACxC,OAAO,KAAK,CAAC;KACd;IAAC,OAAO,KAAK,EAAE;QACd,4EAA4E;QAC5E,4EAA4E;QAC5E,iBAAiB;QACjB,MAAM,QAAQ,GAAG,CACf,MAAM,OAAO,CAAC,UAAU,CAAC,CAAC,IAAA,aAAE,EAAC,OAAO,EAAE,MAAM,CAAC,EAAE,IAAA,aAAE,EAAC,GAAG,EAAE,MAAM,CAAC,CAAC,CAAC,CACjE;aACE,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,MAAM,KAAK,UAAU,CAAC;aACtC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAE,CAA2B,CAAC,MAAM,CAAC,CAAC;QAEnD,8DAA8D;QAC9D,IAAI,QAAQ,CAAC,MAAM,EAAE;YACnB,MAAM,IAAI,cAAc,CACtB,CAAC,KAAK,EAAE,GAAG,QAAQ,CAAC,EACpB,sCAAsC,CACvC,CAAC;SACH;QACD,MAAM,KAAK,CAAC;KACb;AACH,CAAC;AAxED,kCAwEC;AACD;;;;;;;;GAQG;AAEH,KAAK,UAAU,cAAc,CAC3B,GAAQ,EACR,QAAkB,EAClB,GAAW,EACX,iBAA0B;IAE1B,MAAM,SAAS,GAIT,EAAE,CAAC;IACT,MAAM,IAAA,mBAAQ,EACZ,IAAA,wBAAa,EAAC,GAAG,CAAC,EAClB,IAAA,aAAU,EACR;QACE,GAAG,EAAE,GAAG;QACR,SAAS,EAAE,CAAC,KAAK,EAAE,EAAE;YACnB,MAAM,YAAY,GAAG,KAAK,CAAC,QAAQ,CAAC;YACpC,IAAI,CAAC,YAAY,EAAE;gBACjB,MAAM,IAAI,KAAK,CAAC,iCAAiC,CAAC,CAAC;aACpD;YAED,IAAI,iBAAiB,EAAE;gBACrB,MAAM,IAAI,GAAG,IAAA,wBAAU,EAAC,iBAAiB,CAAC,CAAC;gBAC3C,MAAM,WAAW,GAAG,IAAI,mBAAQ,CAAC,EAAE,KAAK,EAAE,IAAI,EAAE,CAAC,CAAC;gBAClD,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;gBACvB,WAAW,CAAC,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE;oBACzB,SAAS,CAAC,IAAI,CAAC;wBACb,IAAI,EAAE,YAAY;wBAClB,MAAM,EAAE,KAAK,CAAC,IAAc;wBAC5B,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC;qBAC7B,CAAC,CAAC;gBACL,CAAC,CAAC,CAAC;gBACH,OAAO,WAAW,CAAC;aACpB;YAED,oEAAoE;YACpE,oDAAoD;YACpD,SAAS,CAAC,IAAI,CAAC;gBACb,IAAI,EAAE,YAAY;gBAClB,MAAM,EAAE,KAAK,CAAC,IAAc;aAC7B,CAAC,CAAC;YACH,OAAO,SAAS,CAAC;QACnB,CAAC;KACF,EACD,QAAQ,CACT,CACF,CAAC;IACF,OAAO,SAAS,CAAC;AACnB,CAAC;AACD;;;;;;;;GAQG;AAEH,KAAK,UAAU,cAAc,CAC3B,GAAQ,EACR,QAAkB,EAClB,GAAW,EACX,iBAA0B;IAE1B,MAAM,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,QAAQ,KAAK,OAAO,CAAC,CAAC,CAAC,iBAAS,CAAC,CAAC,CAAC,kBAAU,CAAC,CAAC;QACpE,SAAS,EAAE,IAAI;KAChB,CAAC,CAAC;IACH,MAAM,MAAM,GAAW;QACrB,KAAK,CAAC,IAAI;YACR,MAAM,QAAQ,GAAG,IAAA,wBAAa,EAAC,GAAG,EAAE,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,CAAC,CAAC;YAC/D,MAAM,QAAQ,GAAG,MAAM,QAAQ,CAAC,QAAQ,EAAE,CAAC;YAC3C,MAAM,aAAa,GAAG,QAAQ,CAAC,OAAO,CAAC,gBAAgB,CAAC,CAAC;YACzD,OAAO,aAAa,CAAC,CAAC,CAAC,QAAQ,CAAC,aAAa,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;QACzD,CAAC;QACD,MAAM,CAAC,MAAc,EAAE,KAAa;YAClC,MAAM,OAAO,GAAG;gBACd,KAAK;gBACL,OAAO,EAAE;oBACP,KAAK,EAAE,SAAS,MAAM,IAAI,KAAK,CAAC,CAAC,CAAC,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE;iBACxD;aACF,CAAC;YACF,OAAO,IAAA,wBAAa,EAAC,GAAG,EAAE,OAAO,CAAC,CAAC;QACrC,CAAC;KACF,CAAC;IAEF,MAAM,EAAE,KAAK,EAAE,GAAG,MAAM,eAAI,CAAC,MAAM,CAAC,MAAM,EAAE,EAAE,CAAC,CAAC;IAChD,MAAM,QAAQ,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,EAAE,IAAI,EAAE,EAAE,EAAE,CACzC,QAAQ,CAAC,QAAQ,CAAC,IAAA,oBAAQ,EAAC,IAAI,EAAE,IAAA,mBAAO,EAAC,IAAI,CAAC,CAAW,CAAC,CAC3D,CAAC;IACF,OAAO,MAAM,OAAO,CAAC,GAAG,CACtB,QAAQ,CAAC,GAAG,CAAC,KAAK,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,EAAE,EAAE;QACtC,MAAM,IAAI,GAAG,IAAA,gBAAI,EAAC,GAAG,EAAE,IAAI,CAAC,CAAC;QAC7B,MAAM,KAAK,GAAG,MAAM,EAAE,CAAC;QACvB,MAAM,UAAU,GAAG,IAAA,2BAAiB,EAAC,IAAI,CAAC,CAAC;QAC3C,MAAM,MAAM,GAAG,IAAA,oBAAQ,EAAC,IAAI,EAAE,IAAA,mBAAO,EAAC,IAAI,CAAC,CAAW,CAAC;QACvD,IAAI,iBAAiB,EAAE;YACrB,MAAM,IAAI,GAAG,IAAA,wBAAU,EAAC,iBAAiB,CAAC,CAAC;YAC3C,MAAM,UAAU,GAAG,KAAK,SAAS,CAAC,EAAE,WAAkB;gBACpD,IAAI,KAAK,EAAE,MAAM,KAAK,IAAI,WAAW,EAAE;oBACrC,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;oBACnB,MAAM,KAAK,CAAC;iBACb;YACH,CAAC,CAAC;YACF,MAAM,IAAA,mBAAQ,EAAC,KAAK,EAAE,UAAU,EAAE,UAAU,CAAC,CAAC;YAC9C,OAAO;gBACL,IAAI,EAAE,IAAI;gBACV,MAAM;gBACN,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC;aAC7B,CAAC;SACH;QACD,MAAM,IAAA,mBAAQ,EAAC,KAAK,EAAE,UAAU,CAAC,CAAC;QAClC,OAAO;YACL,IAAI,EAAE,IAAI;YACV,MAAM;SACP,CAAC;IACJ,CAAC,CAAC,CACH,CAAC;AACJ,CAAC","sourcesContent":["import { ok } from 'node:assert/strict';\nimport { createHash } from 'node:crypto';\nimport { createWriteStream } from 'node:fs';\nimport { rename, mkdir, rm } from 'node:fs/promises';\nimport { Agent as HttpAgent } from 'node:http';\nimport { Agent as HttpsAgent } from 'node:https';\nimport { join, basename, extname, relative } from 'node:path';\nimport { pipeline } from 'node:stream/promises';\nimport { Minipass } from 'minipass';\nimport { extract as extractTar } from 'tar';\nimport { Open, type Source, type Entry } from 'unzipper';\nimport { say } from './utils';\nimport { startDownload } from './download';\nimport { Extension, Binary } from './types';\n\n/**\n * Extracts the binaries from the given URL and writes them to the destination.\n *\n * @param url - The URL of the archive to extract the binaries from\n * @param binaries - The list of binaries to extract\n * @param dir - The destination directory\n * @param checksums - The checksums to verify the binaries against\n * @returns The list of binaries extracted\n */\n\nexport async function extractFrom(\n url: URL,\n binaries: Binary[],\n dir: string,\n checksums: { algorithm: string; binaries: Record<Binary, string> } | null,\n) {\n const extract = url.pathname.toLowerCase().endsWith(Extension.Tar)\n ? extractFromTar\n : extractFromZip;\n // write all files to a temporary directory first, then rename to the final\n // destination to avoid accidental partial extraction. We don't use\n // `os.tmpdir` for this because `rename` will fail if the directories are on\n // different file systems.\n const tempDir = `${dir}.downloading`;\n const rmOpts = { recursive: true, maxRetries: 3, force: true };\n try {\n // clean up any previous in-progress downloads\n await rm(tempDir, rmOpts);\n // make the temporary directory to extract the binaries to\n await mkdir(tempDir, { recursive: true });\n const downloads = await extract(\n url,\n binaries,\n tempDir,\n checksums?.algorithm,\n );\n ok(downloads.length === binaries.length, 'Failed to extract all binaries');\n\n const paths: string[] = [];\n for (const { path, binary, checksum } of downloads) {\n if (checksums) {\n say(`verifying checksum for ${binary}`);\n const expected = checksums.binaries[binary];\n if (checksum === expected) {\n say(`checksum verified for ${binary}`);\n } else {\n throw new Error(\n `checksum mismatch for ${binary}, expected ${expected}, got ${checksum}`,\n );\n }\n }\n // add the *final* path to the list of binaries\n paths.push(join(dir, relative(tempDir, path)));\n }\n\n // this directory shouldn't exist, but if two simultaneous `yarn foundryup`\n // processes are running, it might. Last process wins, so we remove other\n // `dir`s just in case.\n await rm(dir, rmOpts);\n // everything has been extracted; move the files to their final destination\n await rename(tempDir, dir);\n // return the list of extracted binaries\n return paths;\n } catch (error) {\n // if things fail for any reason try to clean up a bit. it is very important\n // to not leave `dir` behind, as its existence is a signal that the binaries\n // are installed.\n const rmErrors = (\n await Promise.allSettled([rm(tempDir, rmOpts), rm(dir, rmOpts)])\n )\n .filter((r) => r.status === 'rejected')\n .map((r) => (r as PromiseRejectedResult).reason);\n\n // if we failed to clean up, create an aggregate error message\n if (rmErrors.length) {\n throw new AggregateError(\n [error, ...rmErrors],\n 'This is a bug; you should report it.',\n );\n }\n throw error;\n }\n}\n/**\n * Extracts the binaries from a tar archive.\n *\n * @param url - The URL of the archive to extract the binaries from\n * @param binaries - The list of binaries to extract\n * @param dir - The destination directory\n * @param checksumAlgorithm - The checksum algorithm to use\n * @returns The list of binaries extracted\n */\n\nasync function extractFromTar(\n url: URL,\n binaries: Binary[],\n dir: string,\n checksumAlgorithm?: string,\n) {\n const downloads: {\n path: string;\n binary: Binary;\n checksum?: string;\n }[] = [];\n await pipeline(\n startDownload(url),\n extractTar(\n {\n cwd: dir,\n transform: (entry) => {\n const absolutePath = entry.absolute;\n if (!absolutePath) {\n throw new Error('Missing absolute path for entry');\n }\n\n if (checksumAlgorithm) {\n const hash = createHash(checksumAlgorithm);\n const passThrough = new Minipass({ async: true });\n passThrough.pipe(hash);\n passThrough.on('end', () => {\n downloads.push({\n path: absolutePath,\n binary: entry.path as Binary,\n checksum: hash.digest('hex'),\n });\n });\n return passThrough;\n }\n\n // When no checksum is needed, record the entry and return undefined\n // to use the original stream without transformation\n downloads.push({\n path: absolutePath,\n binary: entry.path as Binary,\n });\n return undefined;\n },\n },\n binaries,\n ),\n );\n return downloads;\n}\n/**\n * Extracts the binaries from a zip archive.\n *\n * @param url - The URL of the archive to extract the binaries from\n * @param binaries - The list of binaries to extract\n * @param dir - The destination directory\n * @param checksumAlgorithm - The checksum algorithm to use\n * @returns The list of binaries extracted\n */\n\nasync function extractFromZip(\n url: URL,\n binaries: Binary[],\n dir: string,\n checksumAlgorithm?: string,\n) {\n const agent = new (url.protocol === 'http:' ? HttpAgent : HttpsAgent)({\n keepAlive: true,\n });\n const source: Source = {\n async size() {\n const download = startDownload(url, { agent, method: 'HEAD' });\n const response = await download.response();\n const contentLength = response.headers['content-length'];\n return contentLength ? parseInt(contentLength, 10) : 0;\n },\n stream(offset: number, bytes: number) {\n const options = {\n agent,\n headers: {\n range: `bytes=${offset}-${bytes ? offset + bytes : ''}`,\n },\n };\n return startDownload(url, options);\n },\n };\n\n const { files } = await Open.custom(source, {});\n const filtered = files.filter(({ path }) =>\n binaries.includes(basename(path, extname(path)) as Binary),\n );\n return await Promise.all(\n filtered.map(async ({ path, stream }) => {\n const dest = join(dir, path);\n const entry = stream();\n const destStream = createWriteStream(dest);\n const binary = basename(path, extname(path)) as Binary;\n if (checksumAlgorithm) {\n const hash = createHash(checksumAlgorithm);\n const hashStream = async function* (entryStream: Entry) {\n for await (const chunk of entryStream) {\n hash.update(chunk);\n yield chunk;\n }\n };\n await pipeline(entry, hashStream, destStream);\n return {\n path: dest,\n binary,\n checksum: hash.digest('hex'),\n };\n }\n await pipeline(entry, destStream);\n return {\n path: dest,\n binary,\n };\n }),\n );\n}\n"]}
@@ -0,0 +1,15 @@
1
+ import { Binary } from "./types.cjs";
2
+ /**
3
+ * Extracts the binaries from the given URL and writes them to the destination.
4
+ *
5
+ * @param url - The URL of the archive to extract the binaries from
6
+ * @param binaries - The list of binaries to extract
7
+ * @param dir - The destination directory
8
+ * @param checksums - The checksums to verify the binaries against
9
+ * @returns The list of binaries extracted
10
+ */
11
+ export declare function extractFrom(url: URL, binaries: Binary[], dir: string, checksums: {
12
+ algorithm: string;
13
+ binaries: Record<Binary, string>;
14
+ } | null): Promise<string[]>;
15
+ //# sourceMappingURL=extract.d.cts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"extract.d.cts","sourceRoot":"","sources":["../src/extract.ts"],"names":[],"mappings":"AAaA,OAAO,EAAa,MAAM,EAAE,oBAAgB;AAE5C;;;;;;;;GAQG;AAEH,wBAAsB,WAAW,CAC/B,GAAG,EAAE,GAAG,EACR,QAAQ,EAAE,MAAM,EAAE,EAClB,GAAG,EAAE,MAAM,EACX,SAAS,EAAE;IAAE,SAAS,EAAE,MAAM,CAAC;IAAC,QAAQ,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;CAAE,GAAG,IAAI,qBAoE1E"}
@@ -0,0 +1,15 @@
1
+ import { Binary } from "./types.mjs";
2
+ /**
3
+ * Extracts the binaries from the given URL and writes them to the destination.
4
+ *
5
+ * @param url - The URL of the archive to extract the binaries from
6
+ * @param binaries - The list of binaries to extract
7
+ * @param dir - The destination directory
8
+ * @param checksums - The checksums to verify the binaries against
9
+ * @returns The list of binaries extracted
10
+ */
11
+ export declare function extractFrom(url: URL, binaries: Binary[], dir: string, checksums: {
12
+ algorithm: string;
13
+ binaries: Record<Binary, string>;
14
+ } | null): Promise<string[]>;
15
+ //# sourceMappingURL=extract.d.mts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"extract.d.mts","sourceRoot":"","sources":["../src/extract.ts"],"names":[],"mappings":"AAaA,OAAO,EAAa,MAAM,EAAE,oBAAgB;AAE5C;;;;;;;;GAQG;AAEH,wBAAsB,WAAW,CAC/B,GAAG,EAAE,GAAG,EACR,QAAQ,EAAE,MAAM,EAAE,EAClB,GAAG,EAAE,MAAM,EACX,SAAS,EAAE;IAAE,SAAS,EAAE,MAAM,CAAC;IAAC,QAAQ,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;CAAE,GAAG,IAAI,qBAoE1E"}