@metamask-previews/foundryup 0.0.0-preview-c1fef6e5 → 1.0.0-preview-d2bf8ff
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +17 -1
- package/README.md +0 -4
- package/dist/cli.cjs +16 -3
- package/dist/cli.cjs.map +1 -1
- package/dist/cli.mjs +16 -3
- package/dist/cli.mjs.map +1 -1
- package/dist/download.cjs +11 -3
- package/dist/download.cjs.map +1 -1
- package/dist/download.d.cts +8 -0
- package/dist/download.d.cts.map +1 -1
- package/dist/download.d.mts +8 -0
- package/dist/download.d.mts.map +1 -1
- package/dist/download.mjs +11 -3
- package/dist/download.mjs.map +1 -1
- package/dist/extract.cjs +20 -2
- package/dist/extract.cjs.map +1 -1
- package/dist/extract.d.cts +10 -1
- package/dist/extract.d.cts.map +1 -1
- package/dist/extract.d.mts +10 -1
- package/dist/extract.d.mts.map +1 -1
- package/dist/extract.mjs +21 -3
- package/dist/extract.mjs.map +1 -1
- package/dist/index.cjs +48 -5
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +46 -3
- package/dist/index.d.cts.map +1 -1
- package/dist/index.d.mts +46 -3
- package/dist/index.d.mts.map +1 -1
- package/dist/index.mjs +51 -9
- package/dist/index.mjs.map +1 -1
- package/dist/options.cjs +24 -2
- package/dist/options.cjs.map +1 -1
- package/dist/options.d.cts +9 -0
- package/dist/options.d.cts.map +1 -1
- package/dist/options.d.mts +9 -0
- package/dist/options.d.mts.map +1 -1
- package/dist/options.mjs +24 -2
- package/dist/options.mjs.map +1 -1
- package/dist/types.cjs.map +1 -1
- package/dist/types.d.cts +4 -2
- package/dist/types.d.cts.map +1 -1
- package/dist/types.d.mts +4 -2
- package/dist/types.d.mts.map +1 -1
- package/dist/types.mjs.map +1 -1
- package/dist/utils.cjs +9 -3
- package/dist/utils.cjs.map +1 -1
- package/dist/utils.d.cts +10 -4
- package/dist/utils.d.cts.map +1 -1
- package/dist/utils.d.mts +10 -4
- package/dist/utils.d.mts.map +1 -1
- package/dist/utils.mjs +10 -4
- package/dist/utils.mjs.map +1 -1
- package/package.json +15 -9
package/CHANGELOG.md
CHANGED
|
@@ -7,4 +7,20 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|
|
7
7
|
|
|
8
8
|
## [Unreleased]
|
|
9
9
|
|
|
10
|
-
[
|
|
10
|
+
## [1.0.0]
|
|
11
|
+
|
|
12
|
+
### Added
|
|
13
|
+
|
|
14
|
+
- Initial release of the foundryup package ([#5810](https://github.com/MetaMask/core/pull/5810), [#5909](https://github.com/MetaMask/core/pull/5909))
|
|
15
|
+
- `foundryup` is a cross-platform tool that installs and manages Foundry binaries with MetaMask-specific defaults for use in development and end-to-end testing workflows. Features included:
|
|
16
|
+
- CLI tool for managing Foundry binaries in MetaMask's development environment
|
|
17
|
+
- Support for downloading and installing `forge`, `anvil`, `cast`, and `chisel` binaries
|
|
18
|
+
- Cross-platform support for Linux, macOS, and Windows with both amd64 and arm64 architectures
|
|
19
|
+
- Binary integrity verification using SHA-256 checksums
|
|
20
|
+
- Intelligent binary installation with automatic symlink creation (falls back to copy if symlink fails)
|
|
21
|
+
- Configurable binary caching with local storage support
|
|
22
|
+
- Cache management commands for cleaning downloaded binaries
|
|
23
|
+
- Automatic version detection and management of Foundry releases
|
|
24
|
+
|
|
25
|
+
[Unreleased]: https://github.com/MetaMask/core/compare/@metamask/foundryup@1.0.0...HEAD
|
|
26
|
+
[1.0.0]: https://github.com/MetaMask/core/releases/tag/@metamask/foundryup@1.0.0
|
package/README.md
CHANGED
|
@@ -38,10 +38,6 @@ This all needs to work.
|
|
|
38
38
|
|
|
39
39
|
You can try it here in the monorepo by running `yarn workspace @metamask/foundryup anvil`.
|
|
40
40
|
|
|
41
|
-
|
|
42
|
-
|
|
43
41
|
## Contributing
|
|
44
42
|
|
|
45
43
|
This package is part of a monorepo. Instructions for contributing can be found in the [monorepo README](https://github.com/MetaMask/core#readme).
|
|
46
|
-
|
|
47
|
-
|
package/dist/cli.cjs
CHANGED
|
@@ -1,10 +1,23 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
2
|
"use strict";
|
|
3
3
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
4
|
-
|
|
5
|
-
|
|
4
|
+
/**
|
|
5
|
+
* CLI entry point for Foundryup.
|
|
6
|
+
*
|
|
7
|
+
* This script downloads and installs Foundry binaries.
|
|
8
|
+
* If an error occurs, it logs the error and exits with code 1.
|
|
9
|
+
*/
|
|
10
|
+
const _1 = require("./index.cjs");
|
|
11
|
+
/**
|
|
12
|
+
* Run the main installation process and handle errors.
|
|
13
|
+
*/
|
|
6
14
|
(0, _1.downloadAndInstallFoundryBinaries)().catch((error) => {
|
|
15
|
+
/**
|
|
16
|
+
* Log any error that occurs during installation and exit with code 1.
|
|
17
|
+
*
|
|
18
|
+
* @param error - The error thrown during installation.
|
|
19
|
+
*/
|
|
7
20
|
console.error('Error:', error);
|
|
8
|
-
|
|
21
|
+
process.exit(1);
|
|
9
22
|
});
|
|
10
23
|
//# sourceMappingURL=cli.cjs.map
|
package/dist/cli.cjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"cli.cjs","sourceRoot":"","sources":["../src/cli.ts"],"names":[],"mappings":";;;AAEA
|
|
1
|
+
{"version":3,"file":"cli.cjs","sourceRoot":"","sources":["../src/cli.ts"],"names":[],"mappings":";;;AAEA;;;;;GAKG;AACH,kCAAsD;AAEtD;;GAEG;AACH,IAAA,oCAAiC,GAAE,CAAC,KAAK,CAAC,CAAC,KAAK,EAAE,EAAE;IAClD;;;;OAIG;IACH,OAAO,CAAC,KAAK,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC;IAC/B,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;AAClB,CAAC,CAAC,CAAC","sourcesContent":["#!/usr/bin/env node\n\n/**\n * CLI entry point for Foundryup.\n *\n * This script downloads and installs Foundry binaries.\n * If an error occurs, it logs the error and exits with code 1.\n */\nimport { downloadAndInstallFoundryBinaries } from '.';\n\n/**\n * Run the main installation process and handle errors.\n */\ndownloadAndInstallFoundryBinaries().catch((error) => {\n /**\n * Log any error that occurs during installation and exit with code 1.\n *\n * @param error - The error thrown during installation.\n */\n console.error('Error:', error);\n process.exit(1);\n});\n"]}
|
package/dist/cli.mjs
CHANGED
|
@@ -1,8 +1,21 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
|
-
|
|
3
|
-
|
|
2
|
+
/**
|
|
3
|
+
* CLI entry point for Foundryup.
|
|
4
|
+
*
|
|
5
|
+
* This script downloads and installs Foundry binaries.
|
|
6
|
+
* If an error occurs, it logs the error and exits with code 1.
|
|
7
|
+
*/
|
|
8
|
+
import { downloadAndInstallFoundryBinaries } from "./index.mjs";
|
|
9
|
+
/**
|
|
10
|
+
* Run the main installation process and handle errors.
|
|
11
|
+
*/
|
|
4
12
|
downloadAndInstallFoundryBinaries().catch((error) => {
|
|
13
|
+
/**
|
|
14
|
+
* Log any error that occurs during installation and exit with code 1.
|
|
15
|
+
*
|
|
16
|
+
* @param error - The error thrown during installation.
|
|
17
|
+
*/
|
|
5
18
|
console.error('Error:', error);
|
|
6
|
-
exit(1);
|
|
19
|
+
process.exit(1);
|
|
7
20
|
});
|
|
8
21
|
//# sourceMappingURL=cli.mjs.map
|
package/dist/cli.mjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"cli.mjs","sourceRoot":"","sources":["../src/cli.ts"],"names":[],"mappings":";AAEA
|
|
1
|
+
{"version":3,"file":"cli.mjs","sourceRoot":"","sources":["../src/cli.ts"],"names":[],"mappings":";AAEA;;;;;GAKG;AACH,OAAO,EAAE,iCAAiC,EAAE,oBAAU;AAEtD;;GAEG;AACH,iCAAiC,EAAE,CAAC,KAAK,CAAC,CAAC,KAAK,EAAE,EAAE;IAClD;;;;OAIG;IACH,OAAO,CAAC,KAAK,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC;IAC/B,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;AAClB,CAAC,CAAC,CAAC","sourcesContent":["#!/usr/bin/env node\n\n/**\n * CLI entry point for Foundryup.\n *\n * This script downloads and installs Foundry binaries.\n * If an error occurs, it logs the error and exits with code 1.\n */\nimport { downloadAndInstallFoundryBinaries } from '.';\n\n/**\n * Run the main installation process and handle errors.\n */\ndownloadAndInstallFoundryBinaries().catch((error) => {\n /**\n * Log any error that occurs during installation and exit with code 1.\n *\n * @param error - The error thrown during installation.\n */\n console.error('Error:', error);\n process.exit(1);\n});\n"]}
|
package/dist/download.cjs
CHANGED
|
@@ -5,7 +5,15 @@ const node_http_1 = require("node:http");
|
|
|
5
5
|
const node_https_1 = require("node:https");
|
|
6
6
|
const node_stream_1 = require("node:stream");
|
|
7
7
|
const promises_1 = require("node:stream/promises");
|
|
8
|
+
/**
|
|
9
|
+
* A PassThrough stream that emits a 'response' event when the HTTP(S) response is available.
|
|
10
|
+
*/
|
|
8
11
|
class DownloadStream extends node_stream_1.Stream.PassThrough {
|
|
12
|
+
/**
|
|
13
|
+
* Returns a promise that resolves with the HTTP(S) IncomingMessage response.
|
|
14
|
+
*
|
|
15
|
+
* @returns The HTTP(S) response stream.
|
|
16
|
+
*/
|
|
9
17
|
async response() {
|
|
10
18
|
return new Promise((resolve, reject) => {
|
|
11
19
|
this.once('response', resolve);
|
|
@@ -25,7 +33,7 @@ function startDownload(url, options = {}, redirects = 0) {
|
|
|
25
33
|
const MAX_REDIRECTS = options.maxRedirects ?? 5;
|
|
26
34
|
const request = url.protocol === 'http:' ? node_http_1.request : node_https_1.request;
|
|
27
35
|
const stream = new DownloadStream();
|
|
28
|
-
request(url, options,
|
|
36
|
+
request(url, options, (response) => {
|
|
29
37
|
stream.once('close', () => {
|
|
30
38
|
response.destroy();
|
|
31
39
|
});
|
|
@@ -42,7 +50,7 @@ function startDownload(url, options = {}, redirects = 0) {
|
|
|
42
50
|
else {
|
|
43
51
|
// note: we don't emit a response until we're done redirecting, because
|
|
44
52
|
// handlers only expect it to be emitted once.
|
|
45
|
-
|
|
53
|
+
(0, promises_1.pipeline)(startDownload(new URL(headers.location, url), options, redirects + 1)
|
|
46
54
|
// emit the response event to the stream
|
|
47
55
|
.once('response', stream.emit.bind(stream, 'response')), stream).catch(stream.emit.bind(stream, 'error'));
|
|
48
56
|
response.destroy();
|
|
@@ -57,7 +65,7 @@ function startDownload(url, options = {}, redirects = 0) {
|
|
|
57
65
|
// resolve with response stream
|
|
58
66
|
stream.emit('response', response);
|
|
59
67
|
response.once('error', stream.emit.bind(stream, 'error'));
|
|
60
|
-
|
|
68
|
+
(0, promises_1.pipeline)(response, stream).catch(stream.emit.bind(stream, 'error'));
|
|
61
69
|
}
|
|
62
70
|
})
|
|
63
71
|
.once('error', stream.emit.bind(stream, 'error'))
|
package/dist/download.cjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"download.cjs","sourceRoot":"","sources":["../src/download.ts"],"names":[],"mappings":";;;AAAA,yCAAyE;AACzE,2CAAqD;AACrD,6CAAqC;AACrC,mDAAgD;
|
|
1
|
+
{"version":3,"file":"download.cjs","sourceRoot":"","sources":["../src/download.ts"],"names":[],"mappings":";;;AAAA,yCAAyE;AACzE,2CAAqD;AACrD,6CAAqC;AACrC,mDAAgD;AAIhD;;GAEG;AACH,MAAM,cAAe,SAAQ,oBAAM,CAAC,WAAW;IAC7C;;;;OAIG;IACH,KAAK,CAAC,QAAQ;QACZ,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;YACrC,IAAI,CAAC,IAAI,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;YAC/B,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;QAC7B,CAAC,CAAC,CAAC;IACL,CAAC;CACF;AAED;;;;;;;GAOG;AACH,SAAgB,aAAa,CAC3B,GAAQ,EACR,UAA2B,EAAE,EAC7B,YAAoB,CAAC;IAErB,MAAM,aAAa,GAAG,OAAO,CAAC,YAAY,IAAI,CAAC,CAAC;IAChD,MAAM,OAAO,GAAG,GAAG,CAAC,QAAQ,KAAK,OAAO,CAAC,CAAC,CAAC,mBAAW,CAAC,CAAC,CAAC,oBAAY,CAAC;IACtE,MAAM,MAAM,GAAG,IAAI,cAAc,EAAE,CAAC;IACpC,OAAO,CAAC,GAAG,EAAE,OAAO,EAAE,CAAC,QAAQ,EAAE,EAAE;QACjC,MAAM,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,EAAE;YACxB,QAAQ,CAAC,OAAO,EAAE,CAAC;QACrB,CAAC,CAAC,CAAC;QAEH,MAAM,EAAE,UAAU,EAAE,aAAa,EAAE,OAAO,EAAE,GAAG,QAAQ,CAAC;QACxD,mBAAmB;QACnB,IACE,UAAU;YACV,UAAU,IAAI,GAAG;YACjB,UAAU,GAAG,GAAG;YAChB,OAAO,CAAC,QAAQ,EAChB;YACA,IAAI,SAAS,IAAI,aAAa,EAAE;gBAC9B,MAAM,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,KAAK,CAAC,oBAAoB,CAAC,CAAC,CAAC;gBACtD,QAAQ,CAAC,OAAO,EAAE,CAAC;aACpB;iBAAM;gBACL,uEAAuE;gBACvE,8CAA8C;gBAC9C,IAAA,mBAAQ,EACN,aAAa,CAAC,IAAI,GAAG,CAAC,OAAO,CAAC,QAAQ,EAAE,GAAG,CAAC,EAAE,OAAO,EAAE,SAAS,GAAG,CAAC,CAAC;oBACnE,wCAAwC;qBACvC,IAAI,CAAC,UAAU,EAAE,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,UAAU,CAAC,CAAC,EACzD,MAAM,CACP,CAAC,KAAK,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC,CAAC;gBAC3C,QAAQ,CAAC,OAAO,EAAE,CAAC;aACpB;SACF;QAED,wBAAwB;aACnB,IAAI,CAAC,UAAU,IAAI,UAAU,GAAG,GAAG,IAAI,UAAU,IAAI,GAAG,EAAE;YAC7D,MAAM,CAAC,IAAI,CACT,OAAO,EACP,IAAI,KAAK,CACP,cAAc,GAAG,yBAAyB,UAAU,MAAM,aAAa,EAAE,CAC1E,CACF,CAAC;YACF,QAAQ,CAAC,OAAO,EAAE,CAAC;SACpB;aAAM;YACL,+BAA+B;YAC/B,MAAM,CAAC,IAAI,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;YAElC,QAAQ,CAAC,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC,CAAC;YAC1D,IAAA,mBAAQ,EAAC,QAAQ,EAAE,MAAM,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC,CAAC;SACrE;IACH,CAAC,CAAC;SACC,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;SAChD,GAAG,EAAE,CAAC;IACT,OAAO,MAAM,CAAC;AAChB,CAAC;AAzDD,sCAyDC","sourcesContent":["import { request as httpRequest, type IncomingMessage } from 'node:http';\nimport { request as httpsRequest } from 'node:https';\nimport { Stream } from 'node:stream';\nimport { pipeline } from 'node:stream/promises';\n\nimport type { DownloadOptions } from './types';\n\n/**\n * A PassThrough stream that emits a 'response' event when the HTTP(S) response is available.\n */\nclass DownloadStream extends Stream.PassThrough {\n /**\n * Returns a promise that resolves with the HTTP(S) IncomingMessage response.\n *\n * @returns The HTTP(S) response stream.\n */\n async response(): Promise<IncomingMessage> {\n return new Promise((resolve, reject) => {\n this.once('response', resolve);\n this.once('error', reject);\n });\n }\n}\n\n/**\n * Starts a download from the given URL.\n *\n * @param url - The URL to download from\n * @param options - The download options\n * @param redirects - The number of redirects that have occurred\n * @returns A stream of the download\n */\nexport function startDownload(\n url: URL,\n options: DownloadOptions = {},\n redirects: number = 0,\n) {\n const MAX_REDIRECTS = options.maxRedirects ?? 5;\n const request = url.protocol === 'http:' ? httpRequest : httpsRequest;\n const stream = new DownloadStream();\n request(url, options, (response) => {\n stream.once('close', () => {\n response.destroy();\n });\n\n const { statusCode, statusMessage, headers } = response;\n // handle redirects\n if (\n statusCode &&\n statusCode >= 300 &&\n statusCode < 400 &&\n headers.location\n ) {\n if (redirects >= MAX_REDIRECTS) {\n stream.emit('error', new Error('Too many redirects'));\n response.destroy();\n } else {\n // note: we don't emit a response until we're done redirecting, because\n // handlers only expect it to be emitted once.\n pipeline(\n startDownload(new URL(headers.location, url), options, redirects + 1)\n // emit the response event to the stream\n .once('response', stream.emit.bind(stream, 'response')),\n stream,\n ).catch(stream.emit.bind(stream, 'error'));\n response.destroy();\n }\n }\n\n // check for HTTP errors\n else if (!statusCode || statusCode < 200 || statusCode >= 300) {\n stream.emit(\n 'error',\n new Error(\n `Request to ${url} failed. Status Code: ${statusCode} - ${statusMessage}`,\n ),\n );\n response.destroy();\n } else {\n // resolve with response stream\n stream.emit('response', response);\n\n response.once('error', stream.emit.bind(stream, 'error'));\n pipeline(response, stream).catch(stream.emit.bind(stream, 'error'));\n }\n })\n .once('error', stream.emit.bind(stream, 'error'))\n .end();\n return stream;\n}\n"]}
|
package/dist/download.d.cts
CHANGED
|
@@ -2,7 +2,15 @@
|
|
|
2
2
|
import { type IncomingMessage } from "node:http";
|
|
3
3
|
import { Stream } from "node:stream";
|
|
4
4
|
import type { DownloadOptions } from "./types.cjs";
|
|
5
|
+
/**
|
|
6
|
+
* A PassThrough stream that emits a 'response' event when the HTTP(S) response is available.
|
|
7
|
+
*/
|
|
5
8
|
declare class DownloadStream extends Stream.PassThrough {
|
|
9
|
+
/**
|
|
10
|
+
* Returns a promise that resolves with the HTTP(S) IncomingMessage response.
|
|
11
|
+
*
|
|
12
|
+
* @returns The HTTP(S) response stream.
|
|
13
|
+
*/
|
|
6
14
|
response(): Promise<IncomingMessage>;
|
|
7
15
|
}
|
|
8
16
|
/**
|
package/dist/download.d.cts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"download.d.cts","sourceRoot":"","sources":["../src/download.ts"],"names":[],"mappings":";AAAA,OAAO,EAA0B,KAAK,eAAe,EAAE,kBAAkB;AAEzE,OAAO,EAAE,MAAM,EAAE,oBAAoB;
|
|
1
|
+
{"version":3,"file":"download.d.cts","sourceRoot":"","sources":["../src/download.ts"],"names":[],"mappings":";AAAA,OAAO,EAA0B,KAAK,eAAe,EAAE,kBAAkB;AAEzE,OAAO,EAAE,MAAM,EAAE,oBAAoB;AAGrC,OAAO,KAAK,EAAE,eAAe,EAAE,oBAAgB;AAE/C;;GAEG;AACH,cAAM,cAAe,SAAQ,MAAM,CAAC,WAAW;IAC7C;;;;OAIG;IACG,QAAQ,IAAI,OAAO,CAAC,eAAe,CAAC;CAM3C;AAED;;;;;;;GAOG;AACH,wBAAgB,aAAa,CAC3B,GAAG,EAAE,GAAG,EACR,OAAO,GAAE,eAAoB,EAC7B,SAAS,GAAE,MAAU,kBAsDtB"}
|
package/dist/download.d.mts
CHANGED
|
@@ -2,7 +2,15 @@
|
|
|
2
2
|
import { type IncomingMessage } from "node:http";
|
|
3
3
|
import { Stream } from "node:stream";
|
|
4
4
|
import type { DownloadOptions } from "./types.mjs";
|
|
5
|
+
/**
|
|
6
|
+
* A PassThrough stream that emits a 'response' event when the HTTP(S) response is available.
|
|
7
|
+
*/
|
|
5
8
|
declare class DownloadStream extends Stream.PassThrough {
|
|
9
|
+
/**
|
|
10
|
+
* Returns a promise that resolves with the HTTP(S) IncomingMessage response.
|
|
11
|
+
*
|
|
12
|
+
* @returns The HTTP(S) response stream.
|
|
13
|
+
*/
|
|
6
14
|
response(): Promise<IncomingMessage>;
|
|
7
15
|
}
|
|
8
16
|
/**
|
package/dist/download.d.mts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"download.d.mts","sourceRoot":"","sources":["../src/download.ts"],"names":[],"mappings":";AAAA,OAAO,EAA0B,KAAK,eAAe,EAAE,kBAAkB;AAEzE,OAAO,EAAE,MAAM,EAAE,oBAAoB;
|
|
1
|
+
{"version":3,"file":"download.d.mts","sourceRoot":"","sources":["../src/download.ts"],"names":[],"mappings":";AAAA,OAAO,EAA0B,KAAK,eAAe,EAAE,kBAAkB;AAEzE,OAAO,EAAE,MAAM,EAAE,oBAAoB;AAGrC,OAAO,KAAK,EAAE,eAAe,EAAE,oBAAgB;AAE/C;;GAEG;AACH,cAAM,cAAe,SAAQ,MAAM,CAAC,WAAW;IAC7C;;;;OAIG;IACG,QAAQ,IAAI,OAAO,CAAC,eAAe,CAAC;CAM3C;AAED;;;;;;;GAOG;AACH,wBAAgB,aAAa,CAC3B,GAAG,EAAE,GAAG,EACR,OAAO,GAAE,eAAoB,EAC7B,SAAS,GAAE,MAAU,kBAsDtB"}
|
package/dist/download.mjs
CHANGED
|
@@ -2,7 +2,15 @@ import { request as httpRequest } from "node:http";
|
|
|
2
2
|
import { request as httpsRequest } from "node:https";
|
|
3
3
|
import { Stream } from "node:stream";
|
|
4
4
|
import { pipeline } from "node:stream/promises";
|
|
5
|
+
/**
|
|
6
|
+
* A PassThrough stream that emits a 'response' event when the HTTP(S) response is available.
|
|
7
|
+
*/
|
|
5
8
|
class DownloadStream extends Stream.PassThrough {
|
|
9
|
+
/**
|
|
10
|
+
* Returns a promise that resolves with the HTTP(S) IncomingMessage response.
|
|
11
|
+
*
|
|
12
|
+
* @returns The HTTP(S) response stream.
|
|
13
|
+
*/
|
|
6
14
|
async response() {
|
|
7
15
|
return new Promise((resolve, reject) => {
|
|
8
16
|
this.once('response', resolve);
|
|
@@ -22,7 +30,7 @@ export function startDownload(url, options = {}, redirects = 0) {
|
|
|
22
30
|
const MAX_REDIRECTS = options.maxRedirects ?? 5;
|
|
23
31
|
const request = url.protocol === 'http:' ? httpRequest : httpsRequest;
|
|
24
32
|
const stream = new DownloadStream();
|
|
25
|
-
request(url, options,
|
|
33
|
+
request(url, options, (response) => {
|
|
26
34
|
stream.once('close', () => {
|
|
27
35
|
response.destroy();
|
|
28
36
|
});
|
|
@@ -39,7 +47,7 @@ export function startDownload(url, options = {}, redirects = 0) {
|
|
|
39
47
|
else {
|
|
40
48
|
// note: we don't emit a response until we're done redirecting, because
|
|
41
49
|
// handlers only expect it to be emitted once.
|
|
42
|
-
|
|
50
|
+
pipeline(startDownload(new URL(headers.location, url), options, redirects + 1)
|
|
43
51
|
// emit the response event to the stream
|
|
44
52
|
.once('response', stream.emit.bind(stream, 'response')), stream).catch(stream.emit.bind(stream, 'error'));
|
|
45
53
|
response.destroy();
|
|
@@ -54,7 +62,7 @@ export function startDownload(url, options = {}, redirects = 0) {
|
|
|
54
62
|
// resolve with response stream
|
|
55
63
|
stream.emit('response', response);
|
|
56
64
|
response.once('error', stream.emit.bind(stream, 'error'));
|
|
57
|
-
|
|
65
|
+
pipeline(response, stream).catch(stream.emit.bind(stream, 'error'));
|
|
58
66
|
}
|
|
59
67
|
})
|
|
60
68
|
.once('error', stream.emit.bind(stream, 'error'))
|
package/dist/download.mjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"download.mjs","sourceRoot":"","sources":["../src/download.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,IAAI,WAAW,EAAwB,kBAAkB;AACzE,OAAO,EAAE,OAAO,IAAI,YAAY,EAAE,mBAAmB;AACrD,OAAO,EAAE,MAAM,EAAE,oBAAoB;AACrC,OAAO,EAAE,QAAQ,EAAE,6BAA6B;
|
|
1
|
+
{"version":3,"file":"download.mjs","sourceRoot":"","sources":["../src/download.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,IAAI,WAAW,EAAwB,kBAAkB;AACzE,OAAO,EAAE,OAAO,IAAI,YAAY,EAAE,mBAAmB;AACrD,OAAO,EAAE,MAAM,EAAE,oBAAoB;AACrC,OAAO,EAAE,QAAQ,EAAE,6BAA6B;AAIhD;;GAEG;AACH,MAAM,cAAe,SAAQ,MAAM,CAAC,WAAW;IAC7C;;;;OAIG;IACH,KAAK,CAAC,QAAQ;QACZ,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;YACrC,IAAI,CAAC,IAAI,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;YAC/B,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;QAC7B,CAAC,CAAC,CAAC;IACL,CAAC;CACF;AAED;;;;;;;GAOG;AACH,MAAM,UAAU,aAAa,CAC3B,GAAQ,EACR,UAA2B,EAAE,EAC7B,YAAoB,CAAC;IAErB,MAAM,aAAa,GAAG,OAAO,CAAC,YAAY,IAAI,CAAC,CAAC;IAChD,MAAM,OAAO,GAAG,GAAG,CAAC,QAAQ,KAAK,OAAO,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,YAAY,CAAC;IACtE,MAAM,MAAM,GAAG,IAAI,cAAc,EAAE,CAAC;IACpC,OAAO,CAAC,GAAG,EAAE,OAAO,EAAE,CAAC,QAAQ,EAAE,EAAE;QACjC,MAAM,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,EAAE;YACxB,QAAQ,CAAC,OAAO,EAAE,CAAC;QACrB,CAAC,CAAC,CAAC;QAEH,MAAM,EAAE,UAAU,EAAE,aAAa,EAAE,OAAO,EAAE,GAAG,QAAQ,CAAC;QACxD,mBAAmB;QACnB,IACE,UAAU;YACV,UAAU,IAAI,GAAG;YACjB,UAAU,GAAG,GAAG;YAChB,OAAO,CAAC,QAAQ,EAChB;YACA,IAAI,SAAS,IAAI,aAAa,EAAE;gBAC9B,MAAM,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,KAAK,CAAC,oBAAoB,CAAC,CAAC,CAAC;gBACtD,QAAQ,CAAC,OAAO,EAAE,CAAC;aACpB;iBAAM;gBACL,uEAAuE;gBACvE,8CAA8C;gBAC9C,QAAQ,CACN,aAAa,CAAC,IAAI,GAAG,CAAC,OAAO,CAAC,QAAQ,EAAE,GAAG,CAAC,EAAE,OAAO,EAAE,SAAS,GAAG,CAAC,CAAC;oBACnE,wCAAwC;qBACvC,IAAI,CAAC,UAAU,EAAE,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,UAAU,CAAC,CAAC,EACzD,MAAM,CACP,CAAC,KAAK,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC,CAAC;gBAC3C,QAAQ,CAAC,OAAO,EAAE,CAAC;aACpB;SACF;QAED,wBAAwB;aACnB,IAAI,CAAC,UAAU,IAAI,UAAU,GAAG,GAAG,IAAI,UAAU,IAAI,GAAG,EAAE;YAC7D,MAAM,CAAC,IAAI,CACT,OAAO,EACP,IAAI,KAAK,CACP,cAAc,GAAG,yBAAyB,UAAU,MAAM,aAAa,EAAE,CAC1E,CACF,CAAC;YACF,QAAQ,CAAC,OAAO,EAAE,CAAC;SACpB;aAAM;YACL,+BAA+B;YAC/B,MAAM,CAAC,IAAI,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;YAElC,QAAQ,CAAC,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC,CAAC;YAC1D,QAAQ,CAAC,QAAQ,EAAE,MAAM,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC,CAAC;SACrE;IACH,CAAC,CAAC;SACC,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;SAChD,GAAG,EAAE,CAAC;IACT,OAAO,MAAM,CAAC;AAChB,CAAC","sourcesContent":["import { request as httpRequest, type IncomingMessage } from 'node:http';\nimport { request as httpsRequest } from 'node:https';\nimport { Stream } from 'node:stream';\nimport { pipeline } from 'node:stream/promises';\n\nimport type { DownloadOptions } from './types';\n\n/**\n * A PassThrough stream that emits a 'response' event when the HTTP(S) response is available.\n */\nclass DownloadStream extends Stream.PassThrough {\n /**\n * Returns a promise that resolves with the HTTP(S) IncomingMessage response.\n *\n * @returns The HTTP(S) response stream.\n */\n async response(): Promise<IncomingMessage> {\n return new Promise((resolve, reject) => {\n this.once('response', resolve);\n this.once('error', reject);\n });\n }\n}\n\n/**\n * Starts a download from the given URL.\n *\n * @param url - The URL to download from\n * @param options - The download options\n * @param redirects - The number of redirects that have occurred\n * @returns A stream of the download\n */\nexport function startDownload(\n url: URL,\n options: DownloadOptions = {},\n redirects: number = 0,\n) {\n const MAX_REDIRECTS = options.maxRedirects ?? 5;\n const request = url.protocol === 'http:' ? httpRequest : httpsRequest;\n const stream = new DownloadStream();\n request(url, options, (response) => {\n stream.once('close', () => {\n response.destroy();\n });\n\n const { statusCode, statusMessage, headers } = response;\n // handle redirects\n if (\n statusCode &&\n statusCode >= 300 &&\n statusCode < 400 &&\n headers.location\n ) {\n if (redirects >= MAX_REDIRECTS) {\n stream.emit('error', new Error('Too many redirects'));\n response.destroy();\n } else {\n // note: we don't emit a response until we're done redirecting, because\n // handlers only expect it to be emitted once.\n pipeline(\n startDownload(new URL(headers.location, url), options, redirects + 1)\n // emit the response event to the stream\n .once('response', stream.emit.bind(stream, 'response')),\n stream,\n ).catch(stream.emit.bind(stream, 'error'));\n response.destroy();\n }\n }\n\n // check for HTTP errors\n else if (!statusCode || statusCode < 200 || statusCode >= 300) {\n stream.emit(\n 'error',\n new Error(\n `Request to ${url} failed. Status Code: ${statusCode} - ${statusMessage}`,\n ),\n );\n response.destroy();\n } else {\n // resolve with response stream\n stream.emit('response', response);\n\n response.once('error', stream.emit.bind(stream, 'error'));\n pipeline(response, stream).catch(stream.emit.bind(stream, 'error'));\n }\n })\n .once('error', stream.emit.bind(stream, 'error'))\n .end();\n return stream;\n}\n"]}
|
package/dist/extract.cjs
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.extractFrom = void 0;
|
|
4
|
+
const minipass_1 = require("minipass");
|
|
4
5
|
const strict_1 = require("node:assert/strict");
|
|
5
6
|
const node_crypto_1 = require("node:crypto");
|
|
6
7
|
const node_fs_1 = require("node:fs");
|
|
@@ -9,12 +10,20 @@ const node_http_1 = require("node:http");
|
|
|
9
10
|
const node_https_1 = require("node:https");
|
|
10
11
|
const node_path_1 = require("node:path");
|
|
11
12
|
const promises_2 = require("node:stream/promises");
|
|
12
|
-
const minipass_1 = require("minipass");
|
|
13
13
|
const tar_1 = require("tar");
|
|
14
14
|
const unzipper_1 = require("unzipper");
|
|
15
|
-
const utils_1 = require("./utils.cjs");
|
|
16
15
|
const download_1 = require("./download.cjs");
|
|
17
16
|
const types_1 = require("./types.cjs");
|
|
17
|
+
const utils_1 = require("./utils.cjs");
|
|
18
|
+
/**
|
|
19
|
+
* Extracts the binaries from the given URL and writes them to the destination.
|
|
20
|
+
*
|
|
21
|
+
* @param url - The URL of the archive to extract the binaries from
|
|
22
|
+
* @param binaries - The list of binaries to extract
|
|
23
|
+
* @param dir - The destination directory
|
|
24
|
+
* @param checksums - The checksums to verify the binaries against
|
|
25
|
+
* @returns The list of binaries extracted
|
|
26
|
+
*/
|
|
18
27
|
/**
|
|
19
28
|
* Extracts the binaries from the given URL and writes them to the destination.
|
|
20
29
|
*
|
|
@@ -80,6 +89,15 @@ async function extractFrom(url, binaries, dir, checksums) {
|
|
|
80
89
|
}
|
|
81
90
|
}
|
|
82
91
|
exports.extractFrom = extractFrom;
|
|
92
|
+
/**
|
|
93
|
+
* Extracts the binaries from a tar archive.
|
|
94
|
+
*
|
|
95
|
+
* @param url - The URL of the archive to extract the binaries from
|
|
96
|
+
* @param binaries - The list of binaries to extract
|
|
97
|
+
* @param dir - The destination directory
|
|
98
|
+
* @param checksumAlgorithm - The checksum algorithm to use
|
|
99
|
+
* @returns The list of binaries extracted
|
|
100
|
+
*/
|
|
83
101
|
/**
|
|
84
102
|
* Extracts the binaries from a tar archive.
|
|
85
103
|
*
|
package/dist/extract.cjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"extract.cjs","sourceRoot":"","sources":["../src/extract.ts"],"names":[],"mappings":";;;AAAA,+CAAwC;AACxC,6CAAyC;AACzC,qCAA4C;AAC5C,+CAAqD;AACrD,yCAA+C;AAC/C,2CAAiD;AACjD,yCAA8D;AAC9D,mDAAgD;AAChD,uCAAoC;AACpC,6BAA4C;AAC5C,uCAAyD;AACzD,uCAA8B;AAC9B,6CAA2C;AAC3C,uCAA4C;AAE5C;;;;;;;;GAQG;AAEI,KAAK,UAAU,WAAW,CAC/B,GAAQ,EACR,QAAkB,EAClB,GAAW,EACX,SAAyE;IAEzE,MAAM,OAAO,GAAG,GAAG,CAAC,QAAQ,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,iBAAS,CAAC,GAAG,CAAC;QAChE,CAAC,CAAC,cAAc;QAChB,CAAC,CAAC,cAAc,CAAC;IACnB,2EAA2E;IAC3E,mEAAmE;IACnE,4EAA4E;IAC5E,0BAA0B;IAC1B,MAAM,OAAO,GAAG,GAAG,GAAG,cAAc,CAAC;IACrC,MAAM,MAAM,GAAG,EAAE,SAAS,EAAE,IAAI,EAAE,UAAU,EAAE,CAAC,EAAE,KAAK,EAAE,IAAI,EAAE,CAAC;IAC/D,IAAI;QACF,8CAA8C;QAC9C,MAAM,IAAA,aAAE,EAAC,OAAO,EAAE,MAAM,CAAC,CAAC;QAC1B,0DAA0D;QAC1D,MAAM,IAAA,gBAAK,EAAC,OAAO,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;QAC1C,MAAM,SAAS,GAAG,MAAM,OAAO,CAC7B,GAAG,EACH,QAAQ,EACR,OAAO,EACP,SAAS,EAAE,SAAS,CACrB,CAAC;QACF,IAAA,WAAE,EAAC,SAAS,CAAC,MAAM,KAAK,QAAQ,CAAC,MAAM,EAAE,gCAAgC,CAAC,CAAC;QAE3E,MAAM,KAAK,GAAa,EAAE,CAAC;QAC3B,KAAK,MAAM,EAAE,IAAI,EAAE,MAAM,EAAE,QAAQ,EAAE,IAAI,SAAS,EAAE;YAClD,IAAI,SAAS,EAAE;gBACb,IAAA,WAAG,EAAC,0BAA0B,MAAM,EAAE,CAAC,CAAC;gBACxC,MAAM,QAAQ,GAAG,SAAS,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC;gBAC5C,IAAI,QAAQ,KAAK,QAAQ,EAAE;oBACzB,IAAA,WAAG,EAAC,yBAAyB,MAAM,EAAE,CAAC,CAAC;iBACxC;qBAAM;oBACL,MAAM,IAAI,KAAK,CACb,yBAAyB,MAAM,cAAc,QAAQ,SAAS,QAAQ,EAAE,CACzE,CAAC;iBACH;aACF;YACD,+CAA+C;YAC/C,KAAK,CAAC,IAAI,CAAC,IAAA,gBAAI,EAAC,GAAG,EAAE,IAAA,oBAAQ,EAAC,OAAO,EAAE,IAAI,CAAC,CAAC,CAAC,CAAC;SAChD;QAED,2EAA2E;QAC3E,yEAAyE;QACzE,uBAAuB;QACvB,MAAM,IAAA,aAAE,EAAC,GAAG,EAAE,MAAM,CAAC,CAAC;QACtB,2EAA2E;QAC3E,MAAM,IAAA,iBAAM,EAAC,OAAO,EAAE,GAAG,CAAC,CAAC;QAC3B,wCAAwC;QACxC,OAAO,KAAK,CAAC;KACd;IAAC,OAAO,KAAK,EAAE;QACd,4EAA4E;QAC5E,4EAA4E;QAC5E,iBAAiB;QACjB,MAAM,QAAQ,GAAG,CACf,MAAM,OAAO,CAAC,UAAU,CAAC,CAAC,IAAA,aAAE,EAAC,OAAO,EAAE,MAAM,CAAC,EAAE,IAAA,aAAE,EAAC,GAAG,EAAE,MAAM,CAAC,CAAC,CAAC,CACjE;aACE,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,MAAM,KAAK,UAAU,CAAC;aACtC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAE,CAA2B,CAAC,MAAM,CAAC,CAAC;QAEnD,8DAA8D;QAC9D,IAAI,QAAQ,CAAC,MAAM,EAAE;YACnB,MAAM,IAAI,cAAc,CACtB,CAAC,KAAK,EAAE,GAAG,QAAQ,CAAC,EACpB,sCAAsC,CACvC,CAAC;SACH;QACD,MAAM,KAAK,CAAC;KACb;AACH,CAAC;AAxED,kCAwEC;AACD;;;;;;;;GAQG;AAEH,KAAK,UAAU,cAAc,CAC3B,GAAQ,EACR,QAAkB,EAClB,GAAW,EACX,iBAA0B;IAE1B,MAAM,SAAS,GAIT,EAAE,CAAC;IACT,MAAM,IAAA,mBAAQ,EACZ,IAAA,wBAAa,EAAC,GAAG,CAAC,EAClB,IAAA,aAAU,EACR;QACE,GAAG,EAAE,GAAG;QACR,SAAS,EAAE,CAAC,KAAK,EAAE,EAAE;YACnB,MAAM,YAAY,GAAG,KAAK,CAAC,QAAQ,CAAC;YACpC,IAAI,CAAC,YAAY,EAAE;gBACjB,MAAM,IAAI,KAAK,CAAC,iCAAiC,CAAC,CAAC;aACpD;YAED,IAAI,iBAAiB,EAAE;gBACrB,MAAM,IAAI,GAAG,IAAA,wBAAU,EAAC,iBAAiB,CAAC,CAAC;gBAC3C,MAAM,WAAW,GAAG,IAAI,mBAAQ,CAAC,EAAE,KAAK,EAAE,IAAI,EAAE,CAAC,CAAC;gBAClD,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;gBACvB,WAAW,CAAC,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE;oBACzB,SAAS,CAAC,IAAI,CAAC;wBACb,IAAI,EAAE,YAAY;wBAClB,MAAM,EAAE,KAAK,CAAC,IAAc;wBAC5B,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC;qBAC7B,CAAC,CAAC;gBACL,CAAC,CAAC,CAAC;gBACH,OAAO,WAAW,CAAC;aACpB;YAED,oEAAoE;YACpE,oDAAoD;YACpD,SAAS,CAAC,IAAI,CAAC;gBACb,IAAI,EAAE,YAAY;gBAClB,MAAM,EAAE,KAAK,CAAC,IAAc;aAC7B,CAAC,CAAC;YACH,OAAO,SAAS,CAAC;QACnB,CAAC;KACF,EACD,QAAQ,CACT,CACF,CAAC;IACF,OAAO,SAAS,CAAC;AACnB,CAAC;AACD;;;;;;;;GAQG;AAEH,KAAK,UAAU,cAAc,CAC3B,GAAQ,EACR,QAAkB,EAClB,GAAW,EACX,iBAA0B;IAE1B,MAAM,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,QAAQ,KAAK,OAAO,CAAC,CAAC,CAAC,iBAAS,CAAC,CAAC,CAAC,kBAAU,CAAC,CAAC;QACpE,SAAS,EAAE,IAAI;KAChB,CAAC,CAAC;IACH,MAAM,MAAM,GAAW;QACrB,KAAK,CAAC,IAAI;YACR,MAAM,QAAQ,GAAG,IAAA,wBAAa,EAAC,GAAG,EAAE,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,CAAC,CAAC;YAC/D,MAAM,QAAQ,GAAG,MAAM,QAAQ,CAAC,QAAQ,EAAE,CAAC;YAC3C,MAAM,aAAa,GAAG,QAAQ,CAAC,OAAO,CAAC,gBAAgB,CAAC,CAAC;YACzD,OAAO,aAAa,CAAC,CAAC,CAAC,QAAQ,CAAC,aAAa,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;QACzD,CAAC;QACD,MAAM,CAAC,MAAc,EAAE,KAAa;YAClC,MAAM,OAAO,GAAG;gBACd,KAAK;gBACL,OAAO,EAAE;oBACP,KAAK,EAAE,SAAS,MAAM,IAAI,KAAK,CAAC,CAAC,CAAC,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE;iBACxD;aACF,CAAC;YACF,OAAO,IAAA,wBAAa,EAAC,GAAG,EAAE,OAAO,CAAC,CAAC;QACrC,CAAC;KACF,CAAC;IAEF,MAAM,EAAE,KAAK,EAAE,GAAG,MAAM,eAAI,CAAC,MAAM,CAAC,MAAM,EAAE,EAAE,CAAC,CAAC;IAChD,MAAM,QAAQ,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,EAAE,IAAI,EAAE,EAAE,EAAE,CACzC,QAAQ,CAAC,QAAQ,CAAC,IAAA,oBAAQ,EAAC,IAAI,EAAE,IAAA,mBAAO,EAAC,IAAI,CAAC,CAAW,CAAC,CAC3D,CAAC;IACF,OAAO,MAAM,OAAO,CAAC,GAAG,CACtB,QAAQ,CAAC,GAAG,CAAC,KAAK,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,EAAE,EAAE;QACtC,MAAM,IAAI,GAAG,IAAA,gBAAI,EAAC,GAAG,EAAE,IAAI,CAAC,CAAC;QAC7B,MAAM,KAAK,GAAG,MAAM,EAAE,CAAC;QACvB,MAAM,UAAU,GAAG,IAAA,2BAAiB,EAAC,IAAI,CAAC,CAAC;QAC3C,MAAM,MAAM,GAAG,IAAA,oBAAQ,EAAC,IAAI,EAAE,IAAA,mBAAO,EAAC,IAAI,CAAC,CAAW,CAAC;QACvD,IAAI,iBAAiB,EAAE;YACrB,MAAM,IAAI,GAAG,IAAA,wBAAU,EAAC,iBAAiB,CAAC,CAAC;YAC3C,MAAM,UAAU,GAAG,KAAK,SAAS,CAAC,EAAE,WAAkB;gBACpD,IAAI,KAAK,EAAE,MAAM,KAAK,IAAI,WAAW,EAAE;oBACrC,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;oBACnB,MAAM,KAAK,CAAC;iBACb;YACH,CAAC,CAAC;YACF,MAAM,IAAA,mBAAQ,EAAC,KAAK,EAAE,UAAU,EAAE,UAAU,CAAC,CAAC;YAC9C,OAAO;gBACL,IAAI,EAAE,IAAI;gBACV,MAAM;gBACN,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC;aAC7B,CAAC;SACH;QACD,MAAM,IAAA,mBAAQ,EAAC,KAAK,EAAE,UAAU,CAAC,CAAC;QAClC,OAAO;YACL,IAAI,EAAE,IAAI;YACV,MAAM;SACP,CAAC;IACJ,CAAC,CAAC,CACH,CAAC;AACJ,CAAC","sourcesContent":["import { ok } from 'node:assert/strict';\nimport { createHash } from 'node:crypto';\nimport { createWriteStream } from 'node:fs';\nimport { rename, mkdir, rm } from 'node:fs/promises';\nimport { Agent as HttpAgent } from 'node:http';\nimport { Agent as HttpsAgent } from 'node:https';\nimport { join, basename, extname, relative } from 'node:path';\nimport { pipeline } from 'node:stream/promises';\nimport { Minipass } from 'minipass';\nimport { extract as extractTar } from 'tar';\nimport { Open, type Source, type Entry } from 'unzipper';\nimport { say } from './utils';\nimport { startDownload } from './download';\nimport { Extension, Binary } from './types';\n\n/**\n * Extracts the binaries from the given URL and writes them to the destination.\n *\n * @param url - The URL of the archive to extract the binaries from\n * @param binaries - The list of binaries to extract\n * @param dir - The destination directory\n * @param checksums - The checksums to verify the binaries against\n * @returns The list of binaries extracted\n */\n\nexport async function extractFrom(\n url: URL,\n binaries: Binary[],\n dir: string,\n checksums: { algorithm: string; binaries: Record<Binary, string> } | null,\n) {\n const extract = url.pathname.toLowerCase().endsWith(Extension.Tar)\n ? extractFromTar\n : extractFromZip;\n // write all files to a temporary directory first, then rename to the final\n // destination to avoid accidental partial extraction. We don't use\n // `os.tmpdir` for this because `rename` will fail if the directories are on\n // different file systems.\n const tempDir = `${dir}.downloading`;\n const rmOpts = { recursive: true, maxRetries: 3, force: true };\n try {\n // clean up any previous in-progress downloads\n await rm(tempDir, rmOpts);\n // make the temporary directory to extract the binaries to\n await mkdir(tempDir, { recursive: true });\n const downloads = await extract(\n url,\n binaries,\n tempDir,\n checksums?.algorithm,\n );\n ok(downloads.length === binaries.length, 'Failed to extract all binaries');\n\n const paths: string[] = [];\n for (const { path, binary, checksum } of downloads) {\n if (checksums) {\n say(`verifying checksum for ${binary}`);\n const expected = checksums.binaries[binary];\n if (checksum === expected) {\n say(`checksum verified for ${binary}`);\n } else {\n throw new Error(\n `checksum mismatch for ${binary}, expected ${expected}, got ${checksum}`,\n );\n }\n }\n // add the *final* path to the list of binaries\n paths.push(join(dir, relative(tempDir, path)));\n }\n\n // this directory shouldn't exist, but if two simultaneous `yarn foundryup`\n // processes are running, it might. Last process wins, so we remove other\n // `dir`s just in case.\n await rm(dir, rmOpts);\n // everything has been extracted; move the files to their final destination\n await rename(tempDir, dir);\n // return the list of extracted binaries\n return paths;\n } catch (error) {\n // if things fail for any reason try to clean up a bit. it is very important\n // to not leave `dir` behind, as its existence is a signal that the binaries\n // are installed.\n const rmErrors = (\n await Promise.allSettled([rm(tempDir, rmOpts), rm(dir, rmOpts)])\n )\n .filter((r) => r.status === 'rejected')\n .map((r) => (r as PromiseRejectedResult).reason);\n\n // if we failed to clean up, create an aggregate error message\n if (rmErrors.length) {\n throw new AggregateError(\n [error, ...rmErrors],\n 'This is a bug; you should report it.',\n );\n }\n throw error;\n }\n}\n/**\n * Extracts the binaries from a tar archive.\n *\n * @param url - The URL of the archive to extract the binaries from\n * @param binaries - The list of binaries to extract\n * @param dir - The destination directory\n * @param checksumAlgorithm - The checksum algorithm to use\n * @returns The list of binaries extracted\n */\n\nasync function extractFromTar(\n url: URL,\n binaries: Binary[],\n dir: string,\n checksumAlgorithm?: string,\n) {\n const downloads: {\n path: string;\n binary: Binary;\n checksum?: string;\n }[] = [];\n await pipeline(\n startDownload(url),\n extractTar(\n {\n cwd: dir,\n transform: (entry) => {\n const absolutePath = entry.absolute;\n if (!absolutePath) {\n throw new Error('Missing absolute path for entry');\n }\n\n if (checksumAlgorithm) {\n const hash = createHash(checksumAlgorithm);\n const passThrough = new Minipass({ async: true });\n passThrough.pipe(hash);\n passThrough.on('end', () => {\n downloads.push({\n path: absolutePath,\n binary: entry.path as Binary,\n checksum: hash.digest('hex'),\n });\n });\n return passThrough;\n }\n\n // When no checksum is needed, record the entry and return undefined\n // to use the original stream without transformation\n downloads.push({\n path: absolutePath,\n binary: entry.path as Binary,\n });\n return undefined;\n },\n },\n binaries,\n ),\n );\n return downloads;\n}\n/**\n * Extracts the binaries from a zip archive.\n *\n * @param url - The URL of the archive to extract the binaries from\n * @param binaries - The list of binaries to extract\n * @param dir - The destination directory\n * @param checksumAlgorithm - The checksum algorithm to use\n * @returns The list of binaries extracted\n */\n\nasync function extractFromZip(\n url: URL,\n binaries: Binary[],\n dir: string,\n checksumAlgorithm?: string,\n) {\n const agent = new (url.protocol === 'http:' ? HttpAgent : HttpsAgent)({\n keepAlive: true,\n });\n const source: Source = {\n async size() {\n const download = startDownload(url, { agent, method: 'HEAD' });\n const response = await download.response();\n const contentLength = response.headers['content-length'];\n return contentLength ? parseInt(contentLength, 10) : 0;\n },\n stream(offset: number, bytes: number) {\n const options = {\n agent,\n headers: {\n range: `bytes=${offset}-${bytes ? offset + bytes : ''}`,\n },\n };\n return startDownload(url, options);\n },\n };\n\n const { files } = await Open.custom(source, {});\n const filtered = files.filter(({ path }) =>\n binaries.includes(basename(path, extname(path)) as Binary),\n );\n return await Promise.all(\n filtered.map(async ({ path, stream }) => {\n const dest = join(dir, path);\n const entry = stream();\n const destStream = createWriteStream(dest);\n const binary = basename(path, extname(path)) as Binary;\n if (checksumAlgorithm) {\n const hash = createHash(checksumAlgorithm);\n const hashStream = async function* (entryStream: Entry) {\n for await (const chunk of entryStream) {\n hash.update(chunk);\n yield chunk;\n }\n };\n await pipeline(entry, hashStream, destStream);\n return {\n path: dest,\n binary,\n checksum: hash.digest('hex'),\n };\n }\n await pipeline(entry, destStream);\n return {\n path: dest,\n binary,\n };\n }),\n );\n}\n"]}
|
|
1
|
+
{"version":3,"file":"extract.cjs","sourceRoot":"","sources":["../src/extract.ts"],"names":[],"mappings":";;;AAAA,uCAAoC;AACpC,+CAAwC;AACxC,6CAAyC;AACzC,qCAA4C;AAC5C,+CAAqD;AACrD,yCAA+C;AAC/C,2CAAiD;AACjD,yCAA8D;AAC9D,mDAAgD;AAChD,6BAA4C;AAC5C,uCAAyD;AAEzD,6CAA2C;AAC3C,uCAAiD;AACjD,uCAA8B;AAE9B;;;;;;;;GAQG;AAEH;;;;;;;;GAQG;AACI,KAAK,UAAU,WAAW,CAC/B,GAAQ,EACR,QAAkB,EAClB,GAAW,EACX,SAAyE;IAEzE,MAAM,OAAO,GAAG,GAAG,CAAC,QAAQ,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,iBAAS,CAAC,GAAG,CAAC;QAChE,CAAC,CAAC,cAAc;QAChB,CAAC,CAAC,cAAc,CAAC;IACnB,2EAA2E;IAC3E,mEAAmE;IACnE,4EAA4E;IAC5E,0BAA0B;IAC1B,MAAM,OAAO,GAAG,GAAG,GAAG,cAAc,CAAC;IACrC,MAAM,MAAM,GAAG,EAAE,SAAS,EAAE,IAAI,EAAE,UAAU,EAAE,CAAC,EAAE,KAAK,EAAE,IAAI,EAAE,CAAC;IAC/D,IAAI;QACF,8CAA8C;QAC9C,MAAM,IAAA,aAAE,EAAC,OAAO,EAAE,MAAM,CAAC,CAAC;QAC1B,0DAA0D;QAC1D,MAAM,IAAA,gBAAK,EAAC,OAAO,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;QAC1C,MAAM,SAAS,GAAG,MAAM,OAAO,CAC7B,GAAG,EACH,QAAQ,EACR,OAAO,EACP,SAAS,EAAE,SAAS,CACrB,CAAC;QACF,IAAA,WAAE,EAAC,SAAS,CAAC,MAAM,KAAK,QAAQ,CAAC,MAAM,EAAE,gCAAgC,CAAC,CAAC;QAE3E,MAAM,KAAK,GAAa,EAAE,CAAC;QAC3B,KAAK,MAAM,EAAE,IAAI,EAAE,MAAM,EAAE,QAAQ,EAAE,IAAI,SAAS,EAAE;YAClD,IAAI,SAAS,EAAE;gBACb,IAAA,WAAG,EAAC,0BAA0B,MAAM,EAAE,CAAC,CAAC;gBACxC,MAAM,QAAQ,GAAG,SAAS,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC;gBAC5C,IAAI,QAAQ,KAAK,QAAQ,EAAE;oBACzB,IAAA,WAAG,EAAC,yBAAyB,MAAM,EAAE,CAAC,CAAC;iBACxC;qBAAM;oBACL,MAAM,IAAI,KAAK,CACb,yBAAyB,MAAM,cAAc,QAAQ,SAAS,QAAQ,EAAE,CACzE,CAAC;iBACH;aACF;YACD,+CAA+C;YAC/C,KAAK,CAAC,IAAI,CAAC,IAAA,gBAAI,EAAC,GAAG,EAAE,IAAA,oBAAQ,EAAC,OAAO,EAAE,IAAI,CAAC,CAAC,CAAC,CAAC;SAChD;QAED,2EAA2E;QAC3E,yEAAyE;QACzE,uBAAuB;QACvB,MAAM,IAAA,aAAE,EAAC,GAAG,EAAE,MAAM,CAAC,CAAC;QACtB,2EAA2E;QAC3E,MAAM,IAAA,iBAAM,EAAC,OAAO,EAAE,GAAG,CAAC,CAAC;QAC3B,wCAAwC;QACxC,OAAO,KAAK,CAAC;KACd;IAAC,OAAO,KAAK,EAAE;QACd,4EAA4E;QAC5E,4EAA4E;QAC5E,iBAAiB;QACjB,MAAM,QAAQ,GAAG,CACf,MAAM,OAAO,CAAC,UAAU,CAAC,CAAC,IAAA,aAAE,EAAC,OAAO,EAAE,MAAM,CAAC,EAAE,IAAA,aAAE,EAAC,GAAG,EAAE,MAAM,CAAC,CAAC,CAAC,CACjE;aACE,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,MAAM,KAAK,UAAU,CAAC;aACtC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAE,CAA2B,CAAC,MAAM,CAAC,CAAC;QAEnD,8DAA8D;QAC9D,IAAI,QAAQ,CAAC,MAAM,EAAE;YACnB,MAAM,IAAI,cAAc,CACtB,CAAC,KAAK,EAAE,GAAG,QAAQ,CAAC,EACpB,sCAAsC,CACvC,CAAC;SACH;QACD,MAAM,KAAK,CAAC;KACb;AACH,CAAC;AAxED,kCAwEC;AACD;;;;;;;;GAQG;AAEH;;;;;;;;GAQG;AACH,KAAK,UAAU,cAAc,CAC3B,GAAQ,EACR,QAAkB,EAClB,GAAW,EACX,iBAA0B;IAE1B,MAAM,SAAS,GAIT,EAAE,CAAC;IACT,MAAM,IAAA,mBAAQ,EACZ,IAAA,wBAAa,EAAC,GAAG,CAAC,EAClB,IAAA,aAAU,EACR;QACE,GAAG,EAAE,GAAG;QACR,SAAS,EAAE,CAAC,KAAK,EAAE,EAAE;YACnB,MAAM,YAAY,GAAG,KAAK,CAAC,QAAQ,CAAC;YACpC,IAAI,CAAC,YAAY,EAAE;gBACjB,MAAM,IAAI,KAAK,CAAC,iCAAiC,CAAC,CAAC;aACpD;YAED,IAAI,iBAAiB,EAAE;gBACrB,MAAM,IAAI,GAAG,IAAA,wBAAU,EAAC,iBAAiB,CAAC,CAAC;gBAC3C,MAAM,WAAW,GAAG,IAAI,mBAAQ,CAAC,EAAE,KAAK,EAAE,IAAI,EAAE,CAAC,CAAC;gBAClD,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;gBACvB,WAAW,CAAC,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE;oBACzB,SAAS,CAAC,IAAI,CAAC;wBACb,IAAI,EAAE,YAAY;wBAClB,MAAM,EAAE,KAAK,CAAC,IAAc;wBAC5B,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC;qBAC7B,CAAC,CAAC;gBACL,CAAC,CAAC,CAAC;gBACH,OAAO,WAAW,CAAC;aACpB;YAED,oEAAoE;YACpE,oDAAoD;YACpD,SAAS,CAAC,IAAI,CAAC;gBACb,IAAI,EAAE,YAAY;gBAClB,MAAM,EAAE,KAAK,CAAC,IAAc;aAC7B,CAAC,CAAC;YACH,OAAO,SAAS,CAAC;QACnB,CAAC;KACF,EACD,QAAQ,CACT,CACF,CAAC;IACF,OAAO,SAAS,CAAC;AACnB,CAAC;AACD;;;;;;;;GAQG;AACH,KAAK,UAAU,cAAc,CAC3B,GAAQ,EACR,QAAkB,EAClB,GAAW,EACX,iBAA0B;IAE1B,MAAM,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,QAAQ,KAAK,OAAO,CAAC,CAAC,CAAC,iBAAS,CAAC,CAAC,CAAC,kBAAU,CAAC,CAAC;QACpE,SAAS,EAAE,IAAI;KAChB,CAAC,CAAC;IACH,MAAM,MAAM,GAAW;QACrB,KAAK,CAAC,IAAI;YACR,MAAM,QAAQ,GAAG,IAAA,wBAAa,EAAC,GAAG,EAAE,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,CAAC,CAAC;YAC/D,MAAM,QAAQ,GAAG,MAAM,QAAQ,CAAC,QAAQ,EAAE,CAAC;YAC3C,MAAM,aAAa,GAAG,QAAQ,CAAC,OAAO,CAAC,gBAAgB,CAAC,CAAC;YACzD,OAAO,aAAa,CAAC,CAAC,CAAC,QAAQ,CAAC,aAAa,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;QACzD,CAAC;QACD,MAAM,CAAC,MAAc,EAAE,KAAa;YAClC,MAAM,OAAO,GAAG;gBACd,KAAK;gBACL,OAAO,EAAE;oBACP,KAAK,EAAE,SAAS,MAAM,IAAI,KAAK,CAAC,CAAC,CAAC,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE;iBACxD;aACF,CAAC;YACF,OAAO,IAAA,wBAAa,EAAC,GAAG,EAAE,OAAO,CAAC,CAAC;QACrC,CAAC;KACF,CAAC;IAEF,MAAM,EAAE,KAAK,EAAE,GAAG,MAAM,eAAI,CAAC,MAAM,CAAC,MAAM,EAAE,EAAE,CAAC,CAAC;IAChD,MAAM,QAAQ,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,EAAE,IAAI,EAAE,EAAE,EAAE,CACzC,QAAQ,CAAC,QAAQ,CAAC,IAAA,oBAAQ,EAAC,IAAI,EAAE,IAAA,mBAAO,EAAC,IAAI,CAAC,CAAW,CAAC,CAC3D,CAAC;IACF,OAAO,MAAM,OAAO,CAAC,GAAG,CACtB,QAAQ,CAAC,GAAG,CAAC,KAAK,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,EAAE,EAAE;QACtC,MAAM,IAAI,GAAG,IAAA,gBAAI,EAAC,GAAG,EAAE,IAAI,CAAC,CAAC;QAC7B,MAAM,KAAK,GAAG,MAAM,EAAE,CAAC;QACvB,MAAM,UAAU,GAAG,IAAA,2BAAiB,EAAC,IAAI,CAAC,CAAC;QAC3C,MAAM,MAAM,GAAG,IAAA,oBAAQ,EAAC,IAAI,EAAE,IAAA,mBAAO,EAAC,IAAI,CAAC,CAAW,CAAC;QACvD,IAAI,iBAAiB,EAAE;YACrB,MAAM,IAAI,GAAG,IAAA,wBAAU,EAAC,iBAAiB,CAAC,CAAC;YAC3C,MAAM,UAAU,GAAG,KAAK,SAAS,CAAC,EAAE,WAAkB;gBACpD,IAAI,KAAK,EAAE,MAAM,KAAK,IAAI,WAAW,EAAE;oBACrC,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;oBACnB,MAAM,KAAK,CAAC;iBACb;YACH,CAAC,CAAC;YACF,MAAM,IAAA,mBAAQ,EAAC,KAAK,EAAE,UAAU,EAAE,UAAU,CAAC,CAAC;YAC9C,OAAO;gBACL,IAAI,EAAE,IAAI;gBACV,MAAM;gBACN,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC;aAC7B,CAAC;SACH;QACD,MAAM,IAAA,mBAAQ,EAAC,KAAK,EAAE,UAAU,CAAC,CAAC;QAClC,OAAO;YACL,IAAI,EAAE,IAAI;YACV,MAAM;SACP,CAAC;IACJ,CAAC,CAAC,CACH,CAAC;AACJ,CAAC","sourcesContent":["import { Minipass } from 'minipass';\nimport { ok } from 'node:assert/strict';\nimport { createHash } from 'node:crypto';\nimport { createWriteStream } from 'node:fs';\nimport { rename, mkdir, rm } from 'node:fs/promises';\nimport { Agent as HttpAgent } from 'node:http';\nimport { Agent as HttpsAgent } from 'node:https';\nimport { join, basename, extname, relative } from 'node:path';\nimport { pipeline } from 'node:stream/promises';\nimport { extract as extractTar } from 'tar';\nimport { Open, type Source, type Entry } from 'unzipper';\n\nimport { startDownload } from './download';\nimport { Extension, type Binary } from './types';\nimport { say } from './utils';\n\n/**\n * Extracts the binaries from the given URL and writes them to the destination.\n *\n * @param url - The URL of the archive to extract the binaries from\n * @param binaries - The list of binaries to extract\n * @param dir - The destination directory\n * @param checksums - The checksums to verify the binaries against\n * @returns The list of binaries extracted\n */\n\n/**\n * Extracts the binaries from the given URL and writes them to the destination.\n *\n * @param url - The URL of the archive to extract the binaries from\n * @param binaries - The list of binaries to extract\n * @param dir - The destination directory\n * @param checksums - The checksums to verify the binaries against\n * @returns The list of binaries extracted\n */\nexport async function extractFrom(\n url: URL,\n binaries: Binary[],\n dir: string,\n checksums: { algorithm: string; binaries: Record<Binary, string> } | null,\n) {\n const extract = url.pathname.toLowerCase().endsWith(Extension.Tar)\n ? extractFromTar\n : extractFromZip;\n // write all files to a temporary directory first, then rename to the final\n // destination to avoid accidental partial extraction. We don't use\n // `os.tmpdir` for this because `rename` will fail if the directories are on\n // different file systems.\n const tempDir = `${dir}.downloading`;\n const rmOpts = { recursive: true, maxRetries: 3, force: true };\n try {\n // clean up any previous in-progress downloads\n await rm(tempDir, rmOpts);\n // make the temporary directory to extract the binaries to\n await mkdir(tempDir, { recursive: true });\n const downloads = await extract(\n url,\n binaries,\n tempDir,\n checksums?.algorithm,\n );\n ok(downloads.length === binaries.length, 'Failed to extract all binaries');\n\n const paths: string[] = [];\n for (const { path, binary, checksum } of downloads) {\n if (checksums) {\n say(`verifying checksum for ${binary}`);\n const expected = checksums.binaries[binary];\n if (checksum === expected) {\n say(`checksum verified for ${binary}`);\n } else {\n throw new Error(\n `checksum mismatch for ${binary}, expected ${expected}, got ${checksum}`,\n );\n }\n }\n // add the *final* path to the list of binaries\n paths.push(join(dir, relative(tempDir, path)));\n }\n\n // this directory shouldn't exist, but if two simultaneous `yarn foundryup`\n // processes are running, it might. Last process wins, so we remove other\n // `dir`s just in case.\n await rm(dir, rmOpts);\n // everything has been extracted; move the files to their final destination\n await rename(tempDir, dir);\n // return the list of extracted binaries\n return paths;\n } catch (error) {\n // if things fail for any reason try to clean up a bit. it is very important\n // to not leave `dir` behind, as its existence is a signal that the binaries\n // are installed.\n const rmErrors = (\n await Promise.allSettled([rm(tempDir, rmOpts), rm(dir, rmOpts)])\n )\n .filter((r) => r.status === 'rejected')\n .map((r) => (r as PromiseRejectedResult).reason);\n\n // if we failed to clean up, create an aggregate error message\n if (rmErrors.length) {\n throw new AggregateError(\n [error, ...rmErrors],\n 'This is a bug; you should report it.',\n );\n }\n throw error;\n }\n}\n/**\n * Extracts the binaries from a tar archive.\n *\n * @param url - The URL of the archive to extract the binaries from\n * @param binaries - The list of binaries to extract\n * @param dir - The destination directory\n * @param checksumAlgorithm - The checksum algorithm to use\n * @returns The list of binaries extracted\n */\n\n/**\n * Extracts the binaries from a tar archive.\n *\n * @param url - The URL of the archive to extract the binaries from\n * @param binaries - The list of binaries to extract\n * @param dir - The destination directory\n * @param checksumAlgorithm - The checksum algorithm to use\n * @returns The list of binaries extracted\n */\nasync function extractFromTar(\n url: URL,\n binaries: Binary[],\n dir: string,\n checksumAlgorithm?: string,\n) {\n const downloads: {\n path: string;\n binary: Binary;\n checksum?: string;\n }[] = [];\n await pipeline(\n startDownload(url),\n extractTar(\n {\n cwd: dir,\n transform: (entry) => {\n const absolutePath = entry.absolute;\n if (!absolutePath) {\n throw new Error('Missing absolute path for entry');\n }\n\n if (checksumAlgorithm) {\n const hash = createHash(checksumAlgorithm);\n const passThrough = new Minipass({ async: true });\n passThrough.pipe(hash);\n passThrough.on('end', () => {\n downloads.push({\n path: absolutePath,\n binary: entry.path as Binary,\n checksum: hash.digest('hex'),\n });\n });\n return passThrough;\n }\n\n // When no checksum is needed, record the entry and return undefined\n // to use the original stream without transformation\n downloads.push({\n path: absolutePath,\n binary: entry.path as Binary,\n });\n return undefined;\n },\n },\n binaries,\n ),\n );\n return downloads;\n}\n/**\n * Extracts the binaries from a zip archive.\n *\n * @param url - The URL of the archive to extract the binaries from\n * @param binaries - The list of binaries to extract\n * @param dir - The destination directory\n * @param checksumAlgorithm - The checksum algorithm to use\n * @returns The list of binaries extracted\n */\nasync function extractFromZip(\n url: URL,\n binaries: Binary[],\n dir: string,\n checksumAlgorithm?: string,\n) {\n const agent = new (url.protocol === 'http:' ? HttpAgent : HttpsAgent)({\n keepAlive: true,\n });\n const source: Source = {\n async size() {\n const download = startDownload(url, { agent, method: 'HEAD' });\n const response = await download.response();\n const contentLength = response.headers['content-length'];\n return contentLength ? parseInt(contentLength, 10) : 0;\n },\n stream(offset: number, bytes: number) {\n const options = {\n agent,\n headers: {\n range: `bytes=${offset}-${bytes ? offset + bytes : ''}`,\n },\n };\n return startDownload(url, options);\n },\n };\n\n const { files } = await Open.custom(source, {});\n const filtered = files.filter(({ path }) =>\n binaries.includes(basename(path, extname(path)) as Binary),\n );\n return await Promise.all(\n filtered.map(async ({ path, stream }) => {\n const dest = join(dir, path);\n const entry = stream();\n const destStream = createWriteStream(dest);\n const binary = basename(path, extname(path)) as Binary;\n if (checksumAlgorithm) {\n const hash = createHash(checksumAlgorithm);\n const hashStream = async function* (entryStream: Entry) {\n for await (const chunk of entryStream) {\n hash.update(chunk);\n yield chunk;\n }\n };\n await pipeline(entry, hashStream, destStream);\n return {\n path: dest,\n binary,\n checksum: hash.digest('hex'),\n };\n }\n await pipeline(entry, destStream);\n return {\n path: dest,\n binary,\n };\n }),\n );\n}\n"]}
|
package/dist/extract.d.cts
CHANGED
|
@@ -1,4 +1,13 @@
|
|
|
1
|
-
import { Binary } from "./types.cjs";
|
|
1
|
+
import { type Binary } from "./types.cjs";
|
|
2
|
+
/**
|
|
3
|
+
* Extracts the binaries from the given URL and writes them to the destination.
|
|
4
|
+
*
|
|
5
|
+
* @param url - The URL of the archive to extract the binaries from
|
|
6
|
+
* @param binaries - The list of binaries to extract
|
|
7
|
+
* @param dir - The destination directory
|
|
8
|
+
* @param checksums - The checksums to verify the binaries against
|
|
9
|
+
* @returns The list of binaries extracted
|
|
10
|
+
*/
|
|
2
11
|
/**
|
|
3
12
|
* Extracts the binaries from the given URL and writes them to the destination.
|
|
4
13
|
*
|
package/dist/extract.d.cts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"extract.d.cts","sourceRoot":"","sources":["../src/extract.ts"],"names":[],"mappings":"AAaA,OAAO,EAAa,MAAM,EAAE,oBAAgB;
|
|
1
|
+
{"version":3,"file":"extract.d.cts","sourceRoot":"","sources":["../src/extract.ts"],"names":[],"mappings":"AAaA,OAAO,EAAa,KAAK,MAAM,EAAE,oBAAgB;AAGjD;;;;;;;;GAQG;AAEH;;;;;;;;GAQG;AACH,wBAAsB,WAAW,CAC/B,GAAG,EAAE,GAAG,EACR,QAAQ,EAAE,MAAM,EAAE,EAClB,GAAG,EAAE,MAAM,EACX,SAAS,EAAE;IAAE,SAAS,EAAE,MAAM,CAAC;IAAC,QAAQ,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;CAAE,GAAG,IAAI,qBAoE1E"}
|
package/dist/extract.d.mts
CHANGED
|
@@ -1,4 +1,13 @@
|
|
|
1
|
-
import { Binary } from "./types.mjs";
|
|
1
|
+
import { type Binary } from "./types.mjs";
|
|
2
|
+
/**
|
|
3
|
+
* Extracts the binaries from the given URL and writes them to the destination.
|
|
4
|
+
*
|
|
5
|
+
* @param url - The URL of the archive to extract the binaries from
|
|
6
|
+
* @param binaries - The list of binaries to extract
|
|
7
|
+
* @param dir - The destination directory
|
|
8
|
+
* @param checksums - The checksums to verify the binaries against
|
|
9
|
+
* @returns The list of binaries extracted
|
|
10
|
+
*/
|
|
2
11
|
/**
|
|
3
12
|
* Extracts the binaries from the given URL and writes them to the destination.
|
|
4
13
|
*
|
package/dist/extract.d.mts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"extract.d.mts","sourceRoot":"","sources":["../src/extract.ts"],"names":[],"mappings":"AAaA,OAAO,EAAa,MAAM,EAAE,oBAAgB;
|
|
1
|
+
{"version":3,"file":"extract.d.mts","sourceRoot":"","sources":["../src/extract.ts"],"names":[],"mappings":"AAaA,OAAO,EAAa,KAAK,MAAM,EAAE,oBAAgB;AAGjD;;;;;;;;GAQG;AAEH;;;;;;;;GAQG;AACH,wBAAsB,WAAW,CAC/B,GAAG,EAAE,GAAG,EACR,QAAQ,EAAE,MAAM,EAAE,EAClB,GAAG,EAAE,MAAM,EACX,SAAS,EAAE;IAAE,SAAS,EAAE,MAAM,CAAC;IAAC,QAAQ,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;CAAE,GAAG,IAAI,qBAoE1E"}
|
package/dist/extract.mjs
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import { Minipass } from "minipass";
|
|
1
2
|
import { ok } from "node:assert/strict";
|
|
2
3
|
import { createHash } from "node:crypto";
|
|
3
4
|
import { createWriteStream } from "node:fs";
|
|
@@ -6,12 +7,20 @@ import { Agent as HttpAgent } from "node:http";
|
|
|
6
7
|
import { Agent as HttpsAgent } from "node:https";
|
|
7
8
|
import { join, basename, extname, relative } from "node:path";
|
|
8
9
|
import { pipeline } from "node:stream/promises";
|
|
9
|
-
import { Minipass } from "minipass";
|
|
10
10
|
import { extract as extractTar } from "tar";
|
|
11
11
|
import { Open } from "unzipper";
|
|
12
|
-
import { say } from "./utils.mjs";
|
|
13
12
|
import { startDownload } from "./download.mjs";
|
|
14
|
-
import { Extension
|
|
13
|
+
import { Extension } from "./types.mjs";
|
|
14
|
+
import { say } from "./utils.mjs";
|
|
15
|
+
/**
|
|
16
|
+
* Extracts the binaries from the given URL and writes them to the destination.
|
|
17
|
+
*
|
|
18
|
+
* @param url - The URL of the archive to extract the binaries from
|
|
19
|
+
* @param binaries - The list of binaries to extract
|
|
20
|
+
* @param dir - The destination directory
|
|
21
|
+
* @param checksums - The checksums to verify the binaries against
|
|
22
|
+
* @returns The list of binaries extracted
|
|
23
|
+
*/
|
|
15
24
|
/**
|
|
16
25
|
* Extracts the binaries from the given URL and writes them to the destination.
|
|
17
26
|
*
|
|
@@ -76,6 +85,15 @@ export async function extractFrom(url, binaries, dir, checksums) {
|
|
|
76
85
|
throw error;
|
|
77
86
|
}
|
|
78
87
|
}
|
|
88
|
+
/**
|
|
89
|
+
* Extracts the binaries from a tar archive.
|
|
90
|
+
*
|
|
91
|
+
* @param url - The URL of the archive to extract the binaries from
|
|
92
|
+
* @param binaries - The list of binaries to extract
|
|
93
|
+
* @param dir - The destination directory
|
|
94
|
+
* @param checksumAlgorithm - The checksum algorithm to use
|
|
95
|
+
* @returns The list of binaries extracted
|
|
96
|
+
*/
|
|
79
97
|
/**
|
|
80
98
|
* Extracts the binaries from a tar archive.
|
|
81
99
|
*
|
package/dist/extract.mjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"extract.mjs","sourceRoot":"","sources":["../src/extract.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,EAAE,EAAE,2BAA2B;AACxC,OAAO,EAAE,UAAU,EAAE,oBAAoB;AACzC,OAAO,EAAE,iBAAiB,EAAE,gBAAgB;AAC5C,OAAO,EAAE,MAAM,EAAE,KAAK,EAAE,EAAE,EAAE,yBAAyB;AACrD,OAAO,EAAE,KAAK,IAAI,SAAS,EAAE,kBAAkB;AAC/C,OAAO,EAAE,KAAK,IAAI,UAAU,EAAE,mBAAmB;AACjD,OAAO,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,QAAQ,EAAE,kBAAkB;AAC9D,OAAO,EAAE,QAAQ,EAAE,6BAA6B;AAChD,OAAO,EAAE,QAAQ,EAAE,iBAAiB;AACpC,OAAO,EAAE,OAAO,IAAI,UAAU,EAAE,YAAY;AAC5C,OAAO,EAAE,IAAI,EAA2B,iBAAiB;AACzD,OAAO,EAAE,GAAG,EAAE,oBAAgB;AAC9B,OAAO,EAAE,aAAa,EAAE,uBAAmB;AAC3C,OAAO,EAAE,SAAS,EAAE,MAAM,EAAE,oBAAgB;AAE5C;;;;;;;;GAQG;AAEH,MAAM,CAAC,KAAK,UAAU,WAAW,CAC/B,GAAQ,EACR,QAAkB,EAClB,GAAW,EACX,SAAyE;IAEzE,MAAM,OAAO,GAAG,GAAG,CAAC,QAAQ,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,SAAS,CAAC,GAAG,CAAC;QAChE,CAAC,CAAC,cAAc;QAChB,CAAC,CAAC,cAAc,CAAC;IACnB,2EAA2E;IAC3E,mEAAmE;IACnE,4EAA4E;IAC5E,0BAA0B;IAC1B,MAAM,OAAO,GAAG,GAAG,GAAG,cAAc,CAAC;IACrC,MAAM,MAAM,GAAG,EAAE,SAAS,EAAE,IAAI,EAAE,UAAU,EAAE,CAAC,EAAE,KAAK,EAAE,IAAI,EAAE,CAAC;IAC/D,IAAI;QACF,8CAA8C;QAC9C,MAAM,EAAE,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;QAC1B,0DAA0D;QAC1D,MAAM,KAAK,CAAC,OAAO,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;QAC1C,MAAM,SAAS,GAAG,MAAM,OAAO,CAC7B,GAAG,EACH,QAAQ,EACR,OAAO,EACP,SAAS,EAAE,SAAS,CACrB,CAAC;QACF,EAAE,CAAC,SAAS,CAAC,MAAM,KAAK,QAAQ,CAAC,MAAM,EAAE,gCAAgC,CAAC,CAAC;QAE3E,MAAM,KAAK,GAAa,EAAE,CAAC;QAC3B,KAAK,MAAM,EAAE,IAAI,EAAE,MAAM,EAAE,QAAQ,EAAE,IAAI,SAAS,EAAE;YAClD,IAAI,SAAS,EAAE;gBACb,GAAG,CAAC,0BAA0B,MAAM,EAAE,CAAC,CAAC;gBACxC,MAAM,QAAQ,GAAG,SAAS,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC;gBAC5C,IAAI,QAAQ,KAAK,QAAQ,EAAE;oBACzB,GAAG,CAAC,yBAAyB,MAAM,EAAE,CAAC,CAAC;iBACxC;qBAAM;oBACL,MAAM,IAAI,KAAK,CACb,yBAAyB,MAAM,cAAc,QAAQ,SAAS,QAAQ,EAAE,CACzE,CAAC;iBACH;aACF;YACD,+CAA+C;YAC/C,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE,QAAQ,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC,CAAC,CAAC;SAChD;QAED,2EAA2E;QAC3E,yEAAyE;QACzE,uBAAuB;QACvB,MAAM,EAAE,CAAC,GAAG,EAAE,MAAM,CAAC,CAAC;QACtB,2EAA2E;QAC3E,MAAM,MAAM,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC;QAC3B,wCAAwC;QACxC,OAAO,KAAK,CAAC;KACd;IAAC,OAAO,KAAK,EAAE;QACd,4EAA4E;QAC5E,4EAA4E;QAC5E,iBAAiB;QACjB,MAAM,QAAQ,GAAG,CACf,MAAM,OAAO,CAAC,UAAU,CAAC,CAAC,EAAE,CAAC,OAAO,EAAE,MAAM,CAAC,EAAE,EAAE,CAAC,GAAG,EAAE,MAAM,CAAC,CAAC,CAAC,CACjE;aACE,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,MAAM,KAAK,UAAU,CAAC;aACtC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAE,CAA2B,CAAC,MAAM,CAAC,CAAC;QAEnD,8DAA8D;QAC9D,IAAI,QAAQ,CAAC,MAAM,EAAE;YACnB,MAAM,IAAI,cAAc,CACtB,CAAC,KAAK,EAAE,GAAG,QAAQ,CAAC,EACpB,sCAAsC,CACvC,CAAC;SACH;QACD,MAAM,KAAK,CAAC;KACb;AACH,CAAC;AACD;;;;;;;;GAQG;AAEH,KAAK,UAAU,cAAc,CAC3B,GAAQ,EACR,QAAkB,EAClB,GAAW,EACX,iBAA0B;IAE1B,MAAM,SAAS,GAIT,EAAE,CAAC;IACT,MAAM,QAAQ,CACZ,aAAa,CAAC,GAAG,CAAC,EAClB,UAAU,CACR;QACE,GAAG,EAAE,GAAG;QACR,SAAS,EAAE,CAAC,KAAK,EAAE,EAAE;YACnB,MAAM,YAAY,GAAG,KAAK,CAAC,QAAQ,CAAC;YACpC,IAAI,CAAC,YAAY,EAAE;gBACjB,MAAM,IAAI,KAAK,CAAC,iCAAiC,CAAC,CAAC;aACpD;YAED,IAAI,iBAAiB,EAAE;gBACrB,MAAM,IAAI,GAAG,UAAU,CAAC,iBAAiB,CAAC,CAAC;gBAC3C,MAAM,WAAW,GAAG,IAAI,QAAQ,CAAC,EAAE,KAAK,EAAE,IAAI,EAAE,CAAC,CAAC;gBAClD,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;gBACvB,WAAW,CAAC,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE;oBACzB,SAAS,CAAC,IAAI,CAAC;wBACb,IAAI,EAAE,YAAY;wBAClB,MAAM,EAAE,KAAK,CAAC,IAAc;wBAC5B,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC;qBAC7B,CAAC,CAAC;gBACL,CAAC,CAAC,CAAC;gBACH,OAAO,WAAW,CAAC;aACpB;YAED,oEAAoE;YACpE,oDAAoD;YACpD,SAAS,CAAC,IAAI,CAAC;gBACb,IAAI,EAAE,YAAY;gBAClB,MAAM,EAAE,KAAK,CAAC,IAAc;aAC7B,CAAC,CAAC;YACH,OAAO,SAAS,CAAC;QACnB,CAAC;KACF,EACD,QAAQ,CACT,CACF,CAAC;IACF,OAAO,SAAS,CAAC;AACnB,CAAC;AACD;;;;;;;;GAQG;AAEH,KAAK,UAAU,cAAc,CAC3B,GAAQ,EACR,QAAkB,EAClB,GAAW,EACX,iBAA0B;IAE1B,MAAM,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,QAAQ,KAAK,OAAO,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC;QACpE,SAAS,EAAE,IAAI;KAChB,CAAC,CAAC;IACH,MAAM,MAAM,GAAW;QACrB,KAAK,CAAC,IAAI;YACR,MAAM,QAAQ,GAAG,aAAa,CAAC,GAAG,EAAE,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,CAAC,CAAC;YAC/D,MAAM,QAAQ,GAAG,MAAM,QAAQ,CAAC,QAAQ,EAAE,CAAC;YAC3C,MAAM,aAAa,GAAG,QAAQ,CAAC,OAAO,CAAC,gBAAgB,CAAC,CAAC;YACzD,OAAO,aAAa,CAAC,CAAC,CAAC,QAAQ,CAAC,aAAa,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;QACzD,CAAC;QACD,MAAM,CAAC,MAAc,EAAE,KAAa;YAClC,MAAM,OAAO,GAAG;gBACd,KAAK;gBACL,OAAO,EAAE;oBACP,KAAK,EAAE,SAAS,MAAM,IAAI,KAAK,CAAC,CAAC,CAAC,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE;iBACxD;aACF,CAAC;YACF,OAAO,aAAa,CAAC,GAAG,EAAE,OAAO,CAAC,CAAC;QACrC,CAAC;KACF,CAAC;IAEF,MAAM,EAAE,KAAK,EAAE,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE,EAAE,CAAC,CAAC;IAChD,MAAM,QAAQ,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,EAAE,IAAI,EAAE,EAAE,EAAE,CACzC,QAAQ,CAAC,QAAQ,CAAC,QAAQ,CAAC,IAAI,EAAE,OAAO,CAAC,IAAI,CAAC,CAAW,CAAC,CAC3D,CAAC;IACF,OAAO,MAAM,OAAO,CAAC,GAAG,CACtB,QAAQ,CAAC,GAAG,CAAC,KAAK,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,EAAE,EAAE;QACtC,MAAM,IAAI,GAAG,IAAI,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC;QAC7B,MAAM,KAAK,GAAG,MAAM,EAAE,CAAC;QACvB,MAAM,UAAU,GAAG,iBAAiB,CAAC,IAAI,CAAC,CAAC;QAC3C,MAAM,MAAM,GAAG,QAAQ,CAAC,IAAI,EAAE,OAAO,CAAC,IAAI,CAAC,CAAW,CAAC;QACvD,IAAI,iBAAiB,EAAE;YACrB,MAAM,IAAI,GAAG,UAAU,CAAC,iBAAiB,CAAC,CAAC;YAC3C,MAAM,UAAU,GAAG,KAAK,SAAS,CAAC,EAAE,WAAkB;gBACpD,IAAI,KAAK,EAAE,MAAM,KAAK,IAAI,WAAW,EAAE;oBACrC,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;oBACnB,MAAM,KAAK,CAAC;iBACb;YACH,CAAC,CAAC;YACF,MAAM,QAAQ,CAAC,KAAK,EAAE,UAAU,EAAE,UAAU,CAAC,CAAC;YAC9C,OAAO;gBACL,IAAI,EAAE,IAAI;gBACV,MAAM;gBACN,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC;aAC7B,CAAC;SACH;QACD,MAAM,QAAQ,CAAC,KAAK,EAAE,UAAU,CAAC,CAAC;QAClC,OAAO;YACL,IAAI,EAAE,IAAI;YACV,MAAM;SACP,CAAC;IACJ,CAAC,CAAC,CACH,CAAC;AACJ,CAAC","sourcesContent":["import { ok } from 'node:assert/strict';\nimport { createHash } from 'node:crypto';\nimport { createWriteStream } from 'node:fs';\nimport { rename, mkdir, rm } from 'node:fs/promises';\nimport { Agent as HttpAgent } from 'node:http';\nimport { Agent as HttpsAgent } from 'node:https';\nimport { join, basename, extname, relative } from 'node:path';\nimport { pipeline } from 'node:stream/promises';\nimport { Minipass } from 'minipass';\nimport { extract as extractTar } from 'tar';\nimport { Open, type Source, type Entry } from 'unzipper';\nimport { say } from './utils';\nimport { startDownload } from './download';\nimport { Extension, Binary } from './types';\n\n/**\n * Extracts the binaries from the given URL and writes them to the destination.\n *\n * @param url - The URL of the archive to extract the binaries from\n * @param binaries - The list of binaries to extract\n * @param dir - The destination directory\n * @param checksums - The checksums to verify the binaries against\n * @returns The list of binaries extracted\n */\n\nexport async function extractFrom(\n url: URL,\n binaries: Binary[],\n dir: string,\n checksums: { algorithm: string; binaries: Record<Binary, string> } | null,\n) {\n const extract = url.pathname.toLowerCase().endsWith(Extension.Tar)\n ? extractFromTar\n : extractFromZip;\n // write all files to a temporary directory first, then rename to the final\n // destination to avoid accidental partial extraction. We don't use\n // `os.tmpdir` for this because `rename` will fail if the directories are on\n // different file systems.\n const tempDir = `${dir}.downloading`;\n const rmOpts = { recursive: true, maxRetries: 3, force: true };\n try {\n // clean up any previous in-progress downloads\n await rm(tempDir, rmOpts);\n // make the temporary directory to extract the binaries to\n await mkdir(tempDir, { recursive: true });\n const downloads = await extract(\n url,\n binaries,\n tempDir,\n checksums?.algorithm,\n );\n ok(downloads.length === binaries.length, 'Failed to extract all binaries');\n\n const paths: string[] = [];\n for (const { path, binary, checksum } of downloads) {\n if (checksums) {\n say(`verifying checksum for ${binary}`);\n const expected = checksums.binaries[binary];\n if (checksum === expected) {\n say(`checksum verified for ${binary}`);\n } else {\n throw new Error(\n `checksum mismatch for ${binary}, expected ${expected}, got ${checksum}`,\n );\n }\n }\n // add the *final* path to the list of binaries\n paths.push(join(dir, relative(tempDir, path)));\n }\n\n // this directory shouldn't exist, but if two simultaneous `yarn foundryup`\n // processes are running, it might. Last process wins, so we remove other\n // `dir`s just in case.\n await rm(dir, rmOpts);\n // everything has been extracted; move the files to their final destination\n await rename(tempDir, dir);\n // return the list of extracted binaries\n return paths;\n } catch (error) {\n // if things fail for any reason try to clean up a bit. it is very important\n // to not leave `dir` behind, as its existence is a signal that the binaries\n // are installed.\n const rmErrors = (\n await Promise.allSettled([rm(tempDir, rmOpts), rm(dir, rmOpts)])\n )\n .filter((r) => r.status === 'rejected')\n .map((r) => (r as PromiseRejectedResult).reason);\n\n // if we failed to clean up, create an aggregate error message\n if (rmErrors.length) {\n throw new AggregateError(\n [error, ...rmErrors],\n 'This is a bug; you should report it.',\n );\n }\n throw error;\n }\n}\n/**\n * Extracts the binaries from a tar archive.\n *\n * @param url - The URL of the archive to extract the binaries from\n * @param binaries - The list of binaries to extract\n * @param dir - The destination directory\n * @param checksumAlgorithm - The checksum algorithm to use\n * @returns The list of binaries extracted\n */\n\nasync function extractFromTar(\n url: URL,\n binaries: Binary[],\n dir: string,\n checksumAlgorithm?: string,\n) {\n const downloads: {\n path: string;\n binary: Binary;\n checksum?: string;\n }[] = [];\n await pipeline(\n startDownload(url),\n extractTar(\n {\n cwd: dir,\n transform: (entry) => {\n const absolutePath = entry.absolute;\n if (!absolutePath) {\n throw new Error('Missing absolute path for entry');\n }\n\n if (checksumAlgorithm) {\n const hash = createHash(checksumAlgorithm);\n const passThrough = new Minipass({ async: true });\n passThrough.pipe(hash);\n passThrough.on('end', () => {\n downloads.push({\n path: absolutePath,\n binary: entry.path as Binary,\n checksum: hash.digest('hex'),\n });\n });\n return passThrough;\n }\n\n // When no checksum is needed, record the entry and return undefined\n // to use the original stream without transformation\n downloads.push({\n path: absolutePath,\n binary: entry.path as Binary,\n });\n return undefined;\n },\n },\n binaries,\n ),\n );\n return downloads;\n}\n/**\n * Extracts the binaries from a zip archive.\n *\n * @param url - The URL of the archive to extract the binaries from\n * @param binaries - The list of binaries to extract\n * @param dir - The destination directory\n * @param checksumAlgorithm - The checksum algorithm to use\n * @returns The list of binaries extracted\n */\n\nasync function extractFromZip(\n url: URL,\n binaries: Binary[],\n dir: string,\n checksumAlgorithm?: string,\n) {\n const agent = new (url.protocol === 'http:' ? HttpAgent : HttpsAgent)({\n keepAlive: true,\n });\n const source: Source = {\n async size() {\n const download = startDownload(url, { agent, method: 'HEAD' });\n const response = await download.response();\n const contentLength = response.headers['content-length'];\n return contentLength ? parseInt(contentLength, 10) : 0;\n },\n stream(offset: number, bytes: number) {\n const options = {\n agent,\n headers: {\n range: `bytes=${offset}-${bytes ? offset + bytes : ''}`,\n },\n };\n return startDownload(url, options);\n },\n };\n\n const { files } = await Open.custom(source, {});\n const filtered = files.filter(({ path }) =>\n binaries.includes(basename(path, extname(path)) as Binary),\n );\n return await Promise.all(\n filtered.map(async ({ path, stream }) => {\n const dest = join(dir, path);\n const entry = stream();\n const destStream = createWriteStream(dest);\n const binary = basename(path, extname(path)) as Binary;\n if (checksumAlgorithm) {\n const hash = createHash(checksumAlgorithm);\n const hashStream = async function* (entryStream: Entry) {\n for await (const chunk of entryStream) {\n hash.update(chunk);\n yield chunk;\n }\n };\n await pipeline(entry, hashStream, destStream);\n return {\n path: dest,\n binary,\n checksum: hash.digest('hex'),\n };\n }\n await pipeline(entry, destStream);\n return {\n path: dest,\n binary,\n };\n }),\n );\n}\n"]}
|
|
1
|
+
{"version":3,"file":"extract.mjs","sourceRoot":"","sources":["../src/extract.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,QAAQ,EAAE,iBAAiB;AACpC,OAAO,EAAE,EAAE,EAAE,2BAA2B;AACxC,OAAO,EAAE,UAAU,EAAE,oBAAoB;AACzC,OAAO,EAAE,iBAAiB,EAAE,gBAAgB;AAC5C,OAAO,EAAE,MAAM,EAAE,KAAK,EAAE,EAAE,EAAE,yBAAyB;AACrD,OAAO,EAAE,KAAK,IAAI,SAAS,EAAE,kBAAkB;AAC/C,OAAO,EAAE,KAAK,IAAI,UAAU,EAAE,mBAAmB;AACjD,OAAO,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,QAAQ,EAAE,kBAAkB;AAC9D,OAAO,EAAE,QAAQ,EAAE,6BAA6B;AAChD,OAAO,EAAE,OAAO,IAAI,UAAU,EAAE,YAAY;AAC5C,OAAO,EAAE,IAAI,EAA2B,iBAAiB;AAEzD,OAAO,EAAE,aAAa,EAAE,uBAAmB;AAC3C,OAAO,EAAE,SAAS,EAAe,oBAAgB;AACjD,OAAO,EAAE,GAAG,EAAE,oBAAgB;AAE9B;;;;;;;;GAQG;AAEH;;;;;;;;GAQG;AACH,MAAM,CAAC,KAAK,UAAU,WAAW,CAC/B,GAAQ,EACR,QAAkB,EAClB,GAAW,EACX,SAAyE;IAEzE,MAAM,OAAO,GAAG,GAAG,CAAC,QAAQ,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,SAAS,CAAC,GAAG,CAAC;QAChE,CAAC,CAAC,cAAc;QAChB,CAAC,CAAC,cAAc,CAAC;IACnB,2EAA2E;IAC3E,mEAAmE;IACnE,4EAA4E;IAC5E,0BAA0B;IAC1B,MAAM,OAAO,GAAG,GAAG,GAAG,cAAc,CAAC;IACrC,MAAM,MAAM,GAAG,EAAE,SAAS,EAAE,IAAI,EAAE,UAAU,EAAE,CAAC,EAAE,KAAK,EAAE,IAAI,EAAE,CAAC;IAC/D,IAAI;QACF,8CAA8C;QAC9C,MAAM,EAAE,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;QAC1B,0DAA0D;QAC1D,MAAM,KAAK,CAAC,OAAO,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;QAC1C,MAAM,SAAS,GAAG,MAAM,OAAO,CAC7B,GAAG,EACH,QAAQ,EACR,OAAO,EACP,SAAS,EAAE,SAAS,CACrB,CAAC;QACF,EAAE,CAAC,SAAS,CAAC,MAAM,KAAK,QAAQ,CAAC,MAAM,EAAE,gCAAgC,CAAC,CAAC;QAE3E,MAAM,KAAK,GAAa,EAAE,CAAC;QAC3B,KAAK,MAAM,EAAE,IAAI,EAAE,MAAM,EAAE,QAAQ,EAAE,IAAI,SAAS,EAAE;YAClD,IAAI,SAAS,EAAE;gBACb,GAAG,CAAC,0BAA0B,MAAM,EAAE,CAAC,CAAC;gBACxC,MAAM,QAAQ,GAAG,SAAS,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC;gBAC5C,IAAI,QAAQ,KAAK,QAAQ,EAAE;oBACzB,GAAG,CAAC,yBAAyB,MAAM,EAAE,CAAC,CAAC;iBACxC;qBAAM;oBACL,MAAM,IAAI,KAAK,CACb,yBAAyB,MAAM,cAAc,QAAQ,SAAS,QAAQ,EAAE,CACzE,CAAC;iBACH;aACF;YACD,+CAA+C;YAC/C,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE,QAAQ,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC,CAAC,CAAC;SAChD;QAED,2EAA2E;QAC3E,yEAAyE;QACzE,uBAAuB;QACvB,MAAM,EAAE,CAAC,GAAG,EAAE,MAAM,CAAC,CAAC;QACtB,2EAA2E;QAC3E,MAAM,MAAM,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC;QAC3B,wCAAwC;QACxC,OAAO,KAAK,CAAC;KACd;IAAC,OAAO,KAAK,EAAE;QACd,4EAA4E;QAC5E,4EAA4E;QAC5E,iBAAiB;QACjB,MAAM,QAAQ,GAAG,CACf,MAAM,OAAO,CAAC,UAAU,CAAC,CAAC,EAAE,CAAC,OAAO,EAAE,MAAM,CAAC,EAAE,EAAE,CAAC,GAAG,EAAE,MAAM,CAAC,CAAC,CAAC,CACjE;aACE,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,MAAM,KAAK,UAAU,CAAC;aACtC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAE,CAA2B,CAAC,MAAM,CAAC,CAAC;QAEnD,8DAA8D;QAC9D,IAAI,QAAQ,CAAC,MAAM,EAAE;YACnB,MAAM,IAAI,cAAc,CACtB,CAAC,KAAK,EAAE,GAAG,QAAQ,CAAC,EACpB,sCAAsC,CACvC,CAAC;SACH;QACD,MAAM,KAAK,CAAC;KACb;AACH,CAAC;AACD;;;;;;;;GAQG;AAEH;;;;;;;;GAQG;AACH,KAAK,UAAU,cAAc,CAC3B,GAAQ,EACR,QAAkB,EAClB,GAAW,EACX,iBAA0B;IAE1B,MAAM,SAAS,GAIT,EAAE,CAAC;IACT,MAAM,QAAQ,CACZ,aAAa,CAAC,GAAG,CAAC,EAClB,UAAU,CACR;QACE,GAAG,EAAE,GAAG;QACR,SAAS,EAAE,CAAC,KAAK,EAAE,EAAE;YACnB,MAAM,YAAY,GAAG,KAAK,CAAC,QAAQ,CAAC;YACpC,IAAI,CAAC,YAAY,EAAE;gBACjB,MAAM,IAAI,KAAK,CAAC,iCAAiC,CAAC,CAAC;aACpD;YAED,IAAI,iBAAiB,EAAE;gBACrB,MAAM,IAAI,GAAG,UAAU,CAAC,iBAAiB,CAAC,CAAC;gBAC3C,MAAM,WAAW,GAAG,IAAI,QAAQ,CAAC,EAAE,KAAK,EAAE,IAAI,EAAE,CAAC,CAAC;gBAClD,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;gBACvB,WAAW,CAAC,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE;oBACzB,SAAS,CAAC,IAAI,CAAC;wBACb,IAAI,EAAE,YAAY;wBAClB,MAAM,EAAE,KAAK,CAAC,IAAc;wBAC5B,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC;qBAC7B,CAAC,CAAC;gBACL,CAAC,CAAC,CAAC;gBACH,OAAO,WAAW,CAAC;aACpB;YAED,oEAAoE;YACpE,oDAAoD;YACpD,SAAS,CAAC,IAAI,CAAC;gBACb,IAAI,EAAE,YAAY;gBAClB,MAAM,EAAE,KAAK,CAAC,IAAc;aAC7B,CAAC,CAAC;YACH,OAAO,SAAS,CAAC;QACnB,CAAC;KACF,EACD,QAAQ,CACT,CACF,CAAC;IACF,OAAO,SAAS,CAAC;AACnB,CAAC;AACD;;;;;;;;GAQG;AACH,KAAK,UAAU,cAAc,CAC3B,GAAQ,EACR,QAAkB,EAClB,GAAW,EACX,iBAA0B;IAE1B,MAAM,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,QAAQ,KAAK,OAAO,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC;QACpE,SAAS,EAAE,IAAI;KAChB,CAAC,CAAC;IACH,MAAM,MAAM,GAAW;QACrB,KAAK,CAAC,IAAI;YACR,MAAM,QAAQ,GAAG,aAAa,CAAC,GAAG,EAAE,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,CAAC,CAAC;YAC/D,MAAM,QAAQ,GAAG,MAAM,QAAQ,CAAC,QAAQ,EAAE,CAAC;YAC3C,MAAM,aAAa,GAAG,QAAQ,CAAC,OAAO,CAAC,gBAAgB,CAAC,CAAC;YACzD,OAAO,aAAa,CAAC,CAAC,CAAC,QAAQ,CAAC,aAAa,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;QACzD,CAAC;QACD,MAAM,CAAC,MAAc,EAAE,KAAa;YAClC,MAAM,OAAO,GAAG;gBACd,KAAK;gBACL,OAAO,EAAE;oBACP,KAAK,EAAE,SAAS,MAAM,IAAI,KAAK,CAAC,CAAC,CAAC,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE;iBACxD;aACF,CAAC;YACF,OAAO,aAAa,CAAC,GAAG,EAAE,OAAO,CAAC,CAAC;QACrC,CAAC;KACF,CAAC;IAEF,MAAM,EAAE,KAAK,EAAE,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE,EAAE,CAAC,CAAC;IAChD,MAAM,QAAQ,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,EAAE,IAAI,EAAE,EAAE,EAAE,CACzC,QAAQ,CAAC,QAAQ,CAAC,QAAQ,CAAC,IAAI,EAAE,OAAO,CAAC,IAAI,CAAC,CAAW,CAAC,CAC3D,CAAC;IACF,OAAO,MAAM,OAAO,CAAC,GAAG,CACtB,QAAQ,CAAC,GAAG,CAAC,KAAK,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,EAAE,EAAE;QACtC,MAAM,IAAI,GAAG,IAAI,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC;QAC7B,MAAM,KAAK,GAAG,MAAM,EAAE,CAAC;QACvB,MAAM,UAAU,GAAG,iBAAiB,CAAC,IAAI,CAAC,CAAC;QAC3C,MAAM,MAAM,GAAG,QAAQ,CAAC,IAAI,EAAE,OAAO,CAAC,IAAI,CAAC,CAAW,CAAC;QACvD,IAAI,iBAAiB,EAAE;YACrB,MAAM,IAAI,GAAG,UAAU,CAAC,iBAAiB,CAAC,CAAC;YAC3C,MAAM,UAAU,GAAG,KAAK,SAAS,CAAC,EAAE,WAAkB;gBACpD,IAAI,KAAK,EAAE,MAAM,KAAK,IAAI,WAAW,EAAE;oBACrC,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;oBACnB,MAAM,KAAK,CAAC;iBACb;YACH,CAAC,CAAC;YACF,MAAM,QAAQ,CAAC,KAAK,EAAE,UAAU,EAAE,UAAU,CAAC,CAAC;YAC9C,OAAO;gBACL,IAAI,EAAE,IAAI;gBACV,MAAM;gBACN,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC;aAC7B,CAAC;SACH;QACD,MAAM,QAAQ,CAAC,KAAK,EAAE,UAAU,CAAC,CAAC;QAClC,OAAO;YACL,IAAI,EAAE,IAAI;YACV,MAAM;SACP,CAAC;IACJ,CAAC,CAAC,CACH,CAAC;AACJ,CAAC","sourcesContent":["import { Minipass } from 'minipass';\nimport { ok } from 'node:assert/strict';\nimport { createHash } from 'node:crypto';\nimport { createWriteStream } from 'node:fs';\nimport { rename, mkdir, rm } from 'node:fs/promises';\nimport { Agent as HttpAgent } from 'node:http';\nimport { Agent as HttpsAgent } from 'node:https';\nimport { join, basename, extname, relative } from 'node:path';\nimport { pipeline } from 'node:stream/promises';\nimport { extract as extractTar } from 'tar';\nimport { Open, type Source, type Entry } from 'unzipper';\n\nimport { startDownload } from './download';\nimport { Extension, type Binary } from './types';\nimport { say } from './utils';\n\n/**\n * Extracts the binaries from the given URL and writes them to the destination.\n *\n * @param url - The URL of the archive to extract the binaries from\n * @param binaries - The list of binaries to extract\n * @param dir - The destination directory\n * @param checksums - The checksums to verify the binaries against\n * @returns The list of binaries extracted\n */\n\n/**\n * Extracts the binaries from the given URL and writes them to the destination.\n *\n * @param url - The URL of the archive to extract the binaries from\n * @param binaries - The list of binaries to extract\n * @param dir - The destination directory\n * @param checksums - The checksums to verify the binaries against\n * @returns The list of binaries extracted\n */\nexport async function extractFrom(\n url: URL,\n binaries: Binary[],\n dir: string,\n checksums: { algorithm: string; binaries: Record<Binary, string> } | null,\n) {\n const extract = url.pathname.toLowerCase().endsWith(Extension.Tar)\n ? extractFromTar\n : extractFromZip;\n // write all files to a temporary directory first, then rename to the final\n // destination to avoid accidental partial extraction. We don't use\n // `os.tmpdir` for this because `rename` will fail if the directories are on\n // different file systems.\n const tempDir = `${dir}.downloading`;\n const rmOpts = { recursive: true, maxRetries: 3, force: true };\n try {\n // clean up any previous in-progress downloads\n await rm(tempDir, rmOpts);\n // make the temporary directory to extract the binaries to\n await mkdir(tempDir, { recursive: true });\n const downloads = await extract(\n url,\n binaries,\n tempDir,\n checksums?.algorithm,\n );\n ok(downloads.length === binaries.length, 'Failed to extract all binaries');\n\n const paths: string[] = [];\n for (const { path, binary, checksum } of downloads) {\n if (checksums) {\n say(`verifying checksum for ${binary}`);\n const expected = checksums.binaries[binary];\n if (checksum === expected) {\n say(`checksum verified for ${binary}`);\n } else {\n throw new Error(\n `checksum mismatch for ${binary}, expected ${expected}, got ${checksum}`,\n );\n }\n }\n // add the *final* path to the list of binaries\n paths.push(join(dir, relative(tempDir, path)));\n }\n\n // this directory shouldn't exist, but if two simultaneous `yarn foundryup`\n // processes are running, it might. Last process wins, so we remove other\n // `dir`s just in case.\n await rm(dir, rmOpts);\n // everything has been extracted; move the files to their final destination\n await rename(tempDir, dir);\n // return the list of extracted binaries\n return paths;\n } catch (error) {\n // if things fail for any reason try to clean up a bit. it is very important\n // to not leave `dir` behind, as its existence is a signal that the binaries\n // are installed.\n const rmErrors = (\n await Promise.allSettled([rm(tempDir, rmOpts), rm(dir, rmOpts)])\n )\n .filter((r) => r.status === 'rejected')\n .map((r) => (r as PromiseRejectedResult).reason);\n\n // if we failed to clean up, create an aggregate error message\n if (rmErrors.length) {\n throw new AggregateError(\n [error, ...rmErrors],\n 'This is a bug; you should report it.',\n );\n }\n throw error;\n }\n}\n/**\n * Extracts the binaries from a tar archive.\n *\n * @param url - The URL of the archive to extract the binaries from\n * @param binaries - The list of binaries to extract\n * @param dir - The destination directory\n * @param checksumAlgorithm - The checksum algorithm to use\n * @returns The list of binaries extracted\n */\n\n/**\n * Extracts the binaries from a tar archive.\n *\n * @param url - The URL of the archive to extract the binaries from\n * @param binaries - The list of binaries to extract\n * @param dir - The destination directory\n * @param checksumAlgorithm - The checksum algorithm to use\n * @returns The list of binaries extracted\n */\nasync function extractFromTar(\n url: URL,\n binaries: Binary[],\n dir: string,\n checksumAlgorithm?: string,\n) {\n const downloads: {\n path: string;\n binary: Binary;\n checksum?: string;\n }[] = [];\n await pipeline(\n startDownload(url),\n extractTar(\n {\n cwd: dir,\n transform: (entry) => {\n const absolutePath = entry.absolute;\n if (!absolutePath) {\n throw new Error('Missing absolute path for entry');\n }\n\n if (checksumAlgorithm) {\n const hash = createHash(checksumAlgorithm);\n const passThrough = new Minipass({ async: true });\n passThrough.pipe(hash);\n passThrough.on('end', () => {\n downloads.push({\n path: absolutePath,\n binary: entry.path as Binary,\n checksum: hash.digest('hex'),\n });\n });\n return passThrough;\n }\n\n // When no checksum is needed, record the entry and return undefined\n // to use the original stream without transformation\n downloads.push({\n path: absolutePath,\n binary: entry.path as Binary,\n });\n return undefined;\n },\n },\n binaries,\n ),\n );\n return downloads;\n}\n/**\n * Extracts the binaries from a zip archive.\n *\n * @param url - The URL of the archive to extract the binaries from\n * @param binaries - The list of binaries to extract\n * @param dir - The destination directory\n * @param checksumAlgorithm - The checksum algorithm to use\n * @returns The list of binaries extracted\n */\nasync function extractFromZip(\n url: URL,\n binaries: Binary[],\n dir: string,\n checksumAlgorithm?: string,\n) {\n const agent = new (url.protocol === 'http:' ? HttpAgent : HttpsAgent)({\n keepAlive: true,\n });\n const source: Source = {\n async size() {\n const download = startDownload(url, { agent, method: 'HEAD' });\n const response = await download.response();\n const contentLength = response.headers['content-length'];\n return contentLength ? parseInt(contentLength, 10) : 0;\n },\n stream(offset: number, bytes: number) {\n const options = {\n agent,\n headers: {\n range: `bytes=${offset}-${bytes ? offset + bytes : ''}`,\n },\n };\n return startDownload(url, options);\n },\n };\n\n const { files } = await Open.custom(source, {});\n const filtered = files.filter(({ path }) =>\n binaries.includes(basename(path, extname(path)) as Binary),\n );\n return await Promise.all(\n filtered.map(async ({ path, stream }) => {\n const dest = join(dir, path);\n const entry = stream();\n const destStream = createWriteStream(dest);\n const binary = basename(path, extname(path)) as Binary;\n if (checksumAlgorithm) {\n const hash = createHash(checksumAlgorithm);\n const hashStream = async function* (entryStream: Entry) {\n for await (const chunk of entryStream) {\n hash.update(chunk);\n yield chunk;\n }\n };\n await pipeline(entry, hashStream, destStream);\n return {\n path: dest,\n binary,\n checksum: hash.digest('hex'),\n };\n }\n await pipeline(entry, destStream);\n return {\n path: dest,\n binary,\n };\n }),\n );\n}\n"]}
|