@vltpkg/tar 0.0.0-0.1730239248325

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/LICENSE ADDED
@@ -0,0 +1,15 @@
1
+ Copyright (c) vlt technology, Inc.
2
+
3
+ Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
4
+
5
+ 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
6
+ 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
7
+ Subject to the terms and conditions of this license, each copyright holder and contributor hereby grants to those receiving rights under this license a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except for failure to satisfy the conditions of this license) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer this software, where such license applies only to those patent claims, already acquired or hereafter acquired, licensable by such copyright holder or contributor that are necessarily infringed by:
8
+
9
+ (a) their Contribution(s) (the licensed copyrights of copyright holders and non-copyrightable additions of contributors, in source or binary form) alone; or
10
+ (b) combination of their Contribution(s) with the work of authorship to which such Contribution(s) was added by such copyright holder or contributor, if, at the time the Contribution is added, such addition causes such combination to be necessarily infringed. The patent license shall not apply to any other combinations which include the Contribution.
11
+ Except as expressly stated above, no rights or licenses from any copyright holder or contributor is granted under this license, whether expressly, by implication, estoppel or otherwise.
12
+
13
+ DISCLAIMER
14
+
15
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package/README.md ADDED
@@ -0,0 +1,86 @@
1
+ # `@vltpkg/tar`
2
+
3
+ If you are unpacking anything other than npm packages, in
4
+ precisely the way that [vlt](https://vlt.sh) does, then this is
5
+ probably not what you're looking for.
6
+
7
+ For a very complete and battle-tested generic tar implementation
8
+ in JavaScript, see [node-tar](http://npm.im/tar).
9
+
10
+ ## USAGE
11
+
12
+ ### unpack(tarData, targetFolder)
13
+
14
+ Pass your gzip-compressed or raw uncompressed JavaScript package
15
+ tarball in a Buffer (or Uint8Array, or ArrayBuffer slice) into
16
+ the function, along with the folder you want to drop it in.
17
+
18
+ It will unpack as fast as possible, using synchronous I/O.
19
+
20
+ ```js
21
+ import { unpack } from '@vltpkg/tar'
22
+ import { readFileSync } from 'node:fs'
23
+ import { gunzipSync } from 'node:zlib'
24
+
25
+ const gzipped = readFileSync('my-package.tgz')
26
+ const unzipped = gunzipSync(gzipped)
27
+
28
+ unpack(unzipped, 'node_modules/target')
29
+ ```
30
+
31
+ ### `class Pool`
32
+
33
+ Create a pool of worker threads which will service unpack
34
+ requests in parallel.
35
+
36
+ New requests that get added will be assigned to workers as those
37
+ workers become available. When a worker completes its task, and
38
+ there are no more tasks to assign, it is terminated. If not
39
+ enough workers are available to service requests, then new ones
40
+ will be spawned.
41
+
42
+ #### `pool.jobs`
43
+
44
+ The number of worker threads that will be created.
45
+
46
+ #### `pool.workers`
47
+
48
+ Set of currently active worker threads.
49
+
50
+ #### `pool.queue`
51
+
52
+ Queue of requests awaiting an available worker.
53
+
54
+ #### `pool.pending`
55
+
56
+ Unpack requests that have been assigned to a worker, but not yet
57
+ completed.
58
+
59
+ #### `pool.unpack(tarData: Buffer, target: string) => Promise<void>`
60
+
61
+ Unpack the supplied Buffer of data into the target folder,
62
+ using synchronous I/O in a worker thread.
63
+
64
+ Promise resolves when this unpack request has been completed.
65
+
66
+ ## CAVEATS
67
+
68
+ As stated above, **this is not a general purpose tar
69
+ implementation**. It does not handle symbolic links at all (those
70
+ aren't allowed in JavaScript packages). It does not respect
71
+ uid/gid ownership flags. All files are with a mode of `0o644` (or
72
+ `0o666` on Windows - technically it's `0o666` xor'ed with the
73
+ system umask, so that'll often be `0o664` on linux systems). All
74
+ directories are created with a mode of `0o755` by default (with
75
+ the same windows/umask caveats as files, so maybe that's `0o775`
76
+ or some such.) All ctime/mtime/atime/birthtime values will just
77
+ be left as the current time.
78
+
79
+ It does not do any of the binary linking or other stuff that a
80
+ package manager will need to do. It _just_ does the unpack, as
81
+ ruthlessly fast as possible, and that's all.
82
+
83
+ Synchronous IO is used because that's faster and requires less
84
+ CPU utilization. The `Pool` class manages multiple worker threads
85
+ doing this work, so faster sync IO for the whole thing is much
86
+ more optimal.
@@ -0,0 +1,2 @@
1
+ export declare const findTarDir: (path: string | undefined, tarDir?: string) => string | undefined;
2
+ //# sourceMappingURL=find-tar-dir.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"find-tar-dir.d.ts","sourceRoot":"","sources":["../../src/find-tar-dir.ts"],"names":[],"mappings":"AAGA,eAAO,MAAM,UAAU,SACf,MAAM,GAAG,SAAS,WACf,MAAM,uBAkBhB,CAAA"}
@@ -0,0 +1,23 @@
1
+ // usually this will be 'package/', but could also be anything
2
+ // eg, github tarballs are ${user}-${project}-${committish}
3
+ // if it starts with `./` then all entries must as well.
4
+ export const findTarDir = (path, tarDir) => {
5
+ if (tarDir !== undefined)
6
+ return tarDir;
7
+ if (!path)
8
+ return undefined;
9
+ const i = path.indexOf('/', path.startsWith('./') ? 2 : 0);
10
+ if (i === -1)
11
+ return undefined;
12
+ const chomp = path.substring(0, i);
13
+ if (chomp === '.' ||
14
+ chomp === '..' ||
15
+ chomp === '' ||
16
+ chomp === './.' ||
17
+ chomp === './..' ||
18
+ chomp === './') {
19
+ return undefined;
20
+ }
21
+ return chomp + '/';
22
+ };
23
+ //# sourceMappingURL=find-tar-dir.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"find-tar-dir.js","sourceRoot":"","sources":["../../src/find-tar-dir.ts"],"names":[],"mappings":"AAAA,8DAA8D;AAC9D,2DAA2D;AAC3D,wDAAwD;AACxD,MAAM,CAAC,MAAM,UAAU,GAAG,CACxB,IAAwB,EACxB,MAAe,EACf,EAAE;IACF,IAAI,MAAM,KAAK,SAAS;QAAE,OAAO,MAAM,CAAA;IACvC,IAAI,CAAC,IAAI;QAAE,OAAO,SAAS,CAAA;IAC3B,MAAM,CAAC,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAA;IAC1D,IAAI,CAAC,KAAK,CAAC,CAAC;QAAE,OAAO,SAAS,CAAA;IAC9B,MAAM,KAAK,GAAG,IAAI,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC,CAAA;IAClC,IACE,KAAK,KAAK,GAAG;QACb,KAAK,KAAK,IAAI;QACd,KAAK,KAAK,EAAE;QACZ,KAAK,KAAK,KAAK;QACf,KAAK,KAAK,MAAM;QAChB,KAAK,KAAK,IAAI,EACd,CAAC;QACD,OAAO,SAAS,CAAA;IAClB,CAAC;IACD,OAAO,KAAK,GAAG,GAAG,CAAA;AACpB,CAAC,CAAA","sourcesContent":["// usually this will be 'package/', but could also be anything\n// eg, github tarballs are ${user}-${project}-${committish}\n// if it starts with `./` then all entries must as well.\nexport const findTarDir = (\n path: string | undefined,\n tarDir?: string,\n) => {\n if (tarDir !== undefined) return tarDir\n if (!path) return undefined\n const i = path.indexOf('/', path.startsWith('./') ? 2 : 0)\n if (i === -1) return undefined\n const chomp = path.substring(0, i)\n if (\n chomp === '.' ||\n chomp === '..' ||\n chomp === '' ||\n chomp === './.' ||\n chomp === './..' ||\n chomp === './'\n ) {\n return undefined\n }\n return chomp + '/'\n}\n"]}
@@ -0,0 +1,4 @@
1
+ export * from './unpack.js';
2
+ export * from './pool.js';
3
+ export * from './unpack-request.js';
4
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,aAAa,CAAA;AAC3B,cAAc,WAAW,CAAA;AACzB,cAAc,qBAAqB,CAAA"}
@@ -0,0 +1,4 @@
1
+ export * from './unpack.js';
2
+ export * from './pool.js';
3
+ export * from './unpack-request.js';
4
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,aAAa,CAAA;AAC3B,cAAc,WAAW,CAAA;AACzB,cAAc,qBAAqB,CAAA","sourcesContent":["export * from './unpack.js'\nexport * from './pool.js'\nexport * from './unpack-request.js'\n"]}
@@ -0,0 +1,3 @@
1
+ {
2
+ "type": "module"
3
+ }
@@ -0,0 +1,41 @@
1
+ import { UnpackRequest } from './unpack-request.js';
2
+ import { Worker } from './worker.js';
3
+ export * from './worker.js';
4
+ /**
5
+ * Automatically expanding/contracting set of workers to maximize parallelism
6
+ * of unpack operations up to 1 less than the number of CPUs (or 1).
7
+ *
8
+ * `pool.unpack(tarData, target)` will perform the unpack operation
9
+ * synchronously, in one of these workers, and returns a promise when the
10
+ * worker has confirmed completion of the task.
11
+ */
12
+ export declare class Pool {
13
+ #private;
14
+ /**
15
+ * Number of workers to emplly. Defaults to 1 less than the number of
16
+ * CPUs, or 1.
17
+ */
18
+ jobs: number;
19
+ /**
20
+ * Set of currently active worker threads
21
+ */
22
+ workers: Set<Worker>;
23
+ /**
24
+ * Queue of requests awaiting an available worker
25
+ */
26
+ queue: UnpackRequest[];
27
+ /**
28
+ * Requests that have been assigned to a worker, but have not yet
29
+ * been confirmed completed.
30
+ */
31
+ pending: Map<number, UnpackRequest>;
32
+ /**
33
+ * Provide the tardata to be unpacked, and the location where it's to be
34
+ * placed. Will create a new worker up to the `jobs` value, and then start
35
+ * pushing in the queue for workers to pick up as they become available.
36
+ *
37
+ * Returned promise resolves when the provided tarball has been extracted.
38
+ */
39
+ unpack(tarData: Buffer, target: string): Promise<void>;
40
+ }
41
+ //# sourceMappingURL=pool.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"pool.d.ts","sourceRoot":"","sources":["../../src/pool.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,aAAa,EAAE,MAAM,qBAAqB,CAAA;AACnD,OAAO,EAIL,MAAM,EACP,MAAM,aAAa,CAAA;AAEpB,cAAc,aAAa,CAAA;AAE3B;;;;;;;GAOG;AACH,qBAAa,IAAI;;IACf;;;OAGG;IAEH,IAAI,EAAE,MAAM,CAAmD;IAC/D;;OAEG;IACH,OAAO,cAAoB;IAC3B;;OAEG;IACH,KAAK,EAAE,aAAa,EAAE,CAAK;IAC3B;;;OAGG;IACH,OAAO,6BAAmC;IAsC1C;;;;;;OAMG;IACG,MAAM,CAAC,OAAO,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM;CAU7C"}
@@ -0,0 +1,88 @@
1
+ import { error } from '@vltpkg/error-cause';
2
+ import os from 'os';
3
+ import { UnpackRequest } from './unpack-request.js';
4
+ import { isResponseOK, Worker, } from './worker.js';
5
+ export * from './worker.js';
6
+ /**
7
+ * Automatically expanding/contracting set of workers to maximize parallelism
8
+ * of unpack operations up to 1 less than the number of CPUs (or 1).
9
+ *
10
+ * `pool.unpack(tarData, target)` will perform the unpack operation
11
+ * synchronously, in one of these workers, and returns a promise when the
12
+ * worker has confirmed completion of the task.
13
+ */
14
+ export class Pool {
15
+ /**
16
+ * Number of workers to emplly. Defaults to 1 less than the number of
17
+ * CPUs, or 1.
18
+ */
19
+ /* c8 ignore next */
20
+ jobs = 8 * (Math.max(os.availableParallelism(), 2) - 1);
21
+ /**
22
+ * Set of currently active worker threads
23
+ */
24
+ workers = new Set();
25
+ /**
26
+ * Queue of requests awaiting an available worker
27
+ */
28
+ queue = [];
29
+ /**
30
+ * Requests that have been assigned to a worker, but have not yet
31
+ * been confirmed completed.
32
+ */
33
+ pending = new Map();
34
+ // handle a message from the worker
35
+ #onMessage(w, m) {
36
+ const { id } = m;
37
+ // a request has been met or failed, report and either
38
+ // pick up the next item in the queue, or terminate worker
39
+ const ur = this.pending.get(id);
40
+ /* c8 ignore next */
41
+ if (!ur)
42
+ return;
43
+ if (isResponseOK(m)) {
44
+ ur.resolve();
45
+ /* c8 ignore start - nearly impossible in normal circumstances */
46
+ }
47
+ else {
48
+ const er = m.error instanceof Error ? m.error.message : String(m.error);
49
+ const cause = { found: m };
50
+ if (m.error instanceof Error)
51
+ cause.cause = m.error;
52
+ ur.reject(error(er || 'failed without error message', cause));
53
+ }
54
+ /* c8 ignore stop */
55
+ const next = this.queue.shift();
56
+ if (!next) {
57
+ this.workers.delete(w);
58
+ }
59
+ else {
60
+ void w.process(next);
61
+ }
62
+ }
63
+ // create a new worker
64
+ #createWorker(req) {
65
+ const w = new Worker((m) => this.#onMessage(w, m));
66
+ this.workers.add(w);
67
+ void w.process(req);
68
+ }
69
+ /**
70
+ * Provide the tardata to be unpacked, and the location where it's to be
71
+ * placed. Will create a new worker up to the `jobs` value, and then start
72
+ * pushing in the queue for workers to pick up as they become available.
73
+ *
74
+ * Returned promise resolves when the provided tarball has been extracted.
75
+ */
76
+ async unpack(tarData, target) {
77
+ const ur = new UnpackRequest(tarData, target);
78
+ this.pending.set(ur.id, ur);
79
+ if (this.workers.size < this.jobs) {
80
+ this.#createWorker(ur);
81
+ }
82
+ else {
83
+ this.queue.push(ur);
84
+ }
85
+ return ur.promise;
86
+ }
87
+ }
88
+ //# sourceMappingURL=pool.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"pool.js","sourceRoot":"","sources":["../../src/pool.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,EAAoB,MAAM,qBAAqB,CAAA;AAC7D,OAAO,EAAE,MAAM,IAAI,CAAA;AACnB,OAAO,EAAE,aAAa,EAAE,MAAM,qBAAqB,CAAA;AACnD,OAAO,EACL,YAAY,EAGZ,MAAM,GACP,MAAM,aAAa,CAAA;AAEpB,cAAc,aAAa,CAAA;AAE3B;;;;;;;GAOG;AACH,MAAM,OAAO,IAAI;IACf;;;OAGG;IACH,oBAAoB;IACpB,IAAI,GAAW,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,oBAAoB,EAAE,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAA;IAC/D;;OAEG;IACH,OAAO,GAAG,IAAI,GAAG,EAAU,CAAA;IAC3B;;OAEG;IACH,KAAK,GAAoB,EAAE,CAAA;IAC3B;;;OAGG;IACH,OAAO,GAAG,IAAI,GAAG,EAAyB,CAAA;IAE1C,mCAAmC;IACnC,UAAU,CAAC,CAAS,EAAE,CAA6B;QACjD,MAAM,EAAE,EAAE,EAAE,GAAG,CAAC,CAAA;QAChB,sDAAsD;QACtD,0DAA0D;QAC1D,MAAM,EAAE,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE,CAAC,CAAA;QAC/B,oBAAoB;QACpB,IAAI,CAAC,EAAE;YAAE,OAAM;QACf,IAAI,YAAY,CAAC,CAAC,CAAC,EAAE,CAAC;YACpB,EAAE,CAAC,OAAO,EAAE,CAAA;YACZ,iEAAiE;QACnE,CAAC;aAAM,CAAC;YACN,MAAM,EAAE,GACN,CAAC,CAAC,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,CAAA;YAC9D,MAAM,KAAK,GAAqB,EAAE,KAAK,EAAE,CAAC,EAAE,CAAA;YAC5C,IAAI,CAAC,CAAC,KAAK,YAAY,KAAK;gBAAE,KAAK,CAAC,KAAK,GAAG,CAAC,CAAC,KAAK,CAAA;YACnD,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,EAAE,IAAI,8BAA8B,EAAE,KAAK,CAAC,CAAC,CAAA;QAC/D,CAAC;QACD,oBAAoB;QACpB,MAAM,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,EAAE,CAAA;QAC/B,IAAI,CAAC,IAAI,EAAE,CAAC;YACV,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;QACxB,CAAC;aAAM,CAAC;YACN,KAAK,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAA;QACtB,CAAC;IACH,CAAC;IAED,sBAAsB;IACtB,aAAa,CAAC,GAAkB;QAC9B,MAAM,CAAC,GAAW,IAAI,MAAM,CAAC,CAAC,CAA6B,EAAE,EAAE,CAC7D,IAAI,CAAC,UAAU,CAAC,CAAC,EAAE,CAAC,CAAC,CACtB,CAAA;QACD,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAA;QACnB,KAAK,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,CAAA;IACrB,CAAC;IAED;;;;;;OAMG;IACH,KAAK,CAAC,MAAM,CAAC,OAAe,EAAE,MAAc;QAC1C,MAAM,EAAE,GAAG,IAAI,aAAa,CAAC,OAAO,EAAE,MAAM,CAAC,CAAA;QAC7C,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,CAAA;QAC3B,IAAI,IAAI,CAAC,OAAO,CAAC,IAAI,GAAG,IAAI,CAAC,IAAI,EAAE,CAAC;YAClC,IAAI,CAAC,aAAa,CAAC,EAAE,CAAC,CAAA;QACxB,CAAC;aAAM,CAAC;YACN,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,EAAE,CAAC,CAAA;QACrB,CAAC;QACD,OAAO,EAAE,CAAC,OAAO,CAAA;IACnB,CAAC;CACF","sourcesContent":["import { error, ErrorCauseObject } from '@vltpkg/error-cause'\nimport os from 'os'\nimport { UnpackRequest } from './unpack-request.js'\nimport {\n isResponseOK,\n ResponseError,\n ResponseOK,\n Worker,\n} from './worker.js'\n\nexport * from './worker.js'\n\n/**\n * Automatically expanding/contracting set of workers to maximize parallelism\n * of unpack operations up to 1 less than the number of CPUs (or 1).\n *\n * `pool.unpack(tarData, target)` will perform the unpack operation\n * synchronously, in one of these workers, and returns a promise when the\n * worker has confirmed completion of the task.\n */\nexport class Pool {\n /**\n * Number of workers to emplly. Defaults to 1 less than the number of\n * CPUs, or 1.\n */\n /* c8 ignore next */\n jobs: number = 8 * (Math.max(os.availableParallelism(), 2) - 1)\n /**\n * Set of currently active worker threads\n */\n workers = new Set<Worker>()\n /**\n * Queue of requests awaiting an available worker\n */\n queue: UnpackRequest[] = []\n /**\n * Requests that have been assigned to a worker, but have not yet\n * been confirmed completed.\n */\n pending = new Map<number, UnpackRequest>()\n\n // handle a message from the worker\n #onMessage(w: Worker, m: ResponseError | ResponseOK) {\n const { id } = m\n // a request has been met or failed, report and either\n // pick up the next item in the queue, or terminate worker\n const ur = this.pending.get(id)\n /* c8 ignore next */\n if (!ur) return\n if (isResponseOK(m)) {\n ur.resolve()\n /* c8 ignore start - nearly impossible in normal circumstances */\n } else {\n const er =\n m.error instanceof Error ? m.error.message : String(m.error)\n const cause: ErrorCauseObject = { found: m }\n if (m.error instanceof Error) cause.cause = m.error\n ur.reject(error(er || 'failed without error message', cause))\n }\n /* c8 ignore stop */\n const next = this.queue.shift()\n if (!next) {\n this.workers.delete(w)\n } else {\n void w.process(next)\n }\n }\n\n // create a new worker\n #createWorker(req: UnpackRequest) {\n const w: Worker = new Worker((m: ResponseError | ResponseOK) =>\n this.#onMessage(w, m),\n )\n this.workers.add(w)\n void w.process(req)\n }\n\n /**\n * Provide the tardata to be unpacked, and the location where it's to be\n * placed. Will create a new worker up to the `jobs` value, and then start\n * pushing in the queue for workers to pick up as they become available.\n *\n * Returned promise resolves when the provided tarball has been extracted.\n */\n async unpack(tarData: Buffer, target: string) {\n const ur = new UnpackRequest(tarData, target)\n this.pending.set(ur.id, ur)\n if (this.workers.size < this.jobs) {\n this.#createWorker(ur)\n } else {\n this.queue.push(ur)\n }\n return ur.promise\n }\n}\n"]}
@@ -0,0 +1,10 @@
1
+ export declare class UnpackRequest {
2
+ id: number;
3
+ tarData: Buffer;
4
+ target: string;
5
+ resolve: () => void;
6
+ reject: (reason?: any) => void;
7
+ promise: Promise<void>;
8
+ constructor(tarData: Buffer, target: string);
9
+ }
10
+ //# sourceMappingURL=unpack-request.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"unpack-request.d.ts","sourceRoot":"","sources":["../../src/unpack-request.ts"],"names":[],"mappings":"AACA,qBAAa,aAAa;IACxB,EAAE,EAAE,MAAM,CAAO;IACjB,OAAO,EAAE,MAAM,CAAA;IACf,MAAM,EAAE,MAAM,CAAA;IACd,OAAO,EAAG,MAAM,IAAI,CAAA;IACpB,MAAM,EAAG,CAAC,MAAM,CAAC,EAAE,GAAG,KAAK,IAAI,CAAA;IAC/B,OAAO,gBAGL;gBACU,OAAO,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM;CAI5C"}
@@ -0,0 +1,17 @@
1
+ let ID = 1;
2
+ export class UnpackRequest {
3
+ id = ID++;
4
+ tarData;
5
+ target;
6
+ resolve;
7
+ reject;
8
+ promise = new Promise((res, rej) => {
9
+ this.resolve = res;
10
+ this.reject = rej;
11
+ });
12
+ constructor(tarData, target) {
13
+ this.tarData = tarData;
14
+ this.target = target;
15
+ }
16
+ }
17
+ //# sourceMappingURL=unpack-request.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"unpack-request.js","sourceRoot":"","sources":["../../src/unpack-request.ts"],"names":[],"mappings":"AAAA,IAAI,EAAE,GAAG,CAAC,CAAA;AACV,MAAM,OAAO,aAAa;IACxB,EAAE,GAAW,EAAE,EAAE,CAAA;IACjB,OAAO,CAAQ;IACf,MAAM,CAAQ;IACd,OAAO,CAAa;IACpB,MAAM,CAAyB;IAC/B,OAAO,GAAG,IAAI,OAAO,CAAO,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE;QACvC,IAAI,CAAC,OAAO,GAAG,GAAG,CAAA;QAClB,IAAI,CAAC,MAAM,GAAG,GAAG,CAAA;IACnB,CAAC,CAAC,CAAA;IACF,YAAY,OAAe,EAAE,MAAc;QACzC,IAAI,CAAC,OAAO,GAAG,OAAO,CAAA;QACtB,IAAI,CAAC,MAAM,GAAG,MAAM,CAAA;IACtB,CAAC;CACF","sourcesContent":["let ID = 1\nexport class UnpackRequest {\n id: number = ID++\n tarData: Buffer\n target: string\n resolve!: () => void\n reject!: (reason?: any) => void\n promise = new Promise<void>((res, rej) => {\n this.resolve = res\n this.reject = rej\n })\n constructor(tarData: Buffer, target: string) {\n this.tarData = tarData\n this.target = target\n }\n}\n"]}
@@ -0,0 +1,2 @@
1
+ export declare const unpack: (tarData: Buffer, target: string) => Promise<void>;
2
+ //# sourceMappingURL=unpack.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"unpack.d.ts","sourceRoot":"","sources":["../../src/unpack.ts"],"names":[],"mappings":"AA+EA,eAAO,MAAM,MAAM,YACR,MAAM,UACP,MAAM,KACb,OAAO,CAAC,IAAI,CAMd,CAAA"}
@@ -0,0 +1,179 @@
1
+ import { error } from '@vltpkg/error-cause';
2
+ import { randomBytes } from 'crypto';
3
+ import { lstat, mkdir, rename, writeFile } from 'fs/promises';
4
+ import { basename, dirname, parse, resolve } from 'path';
5
+ import { rimraf } from 'rimraf';
6
+ import { Header } from 'tar/header';
7
+ import { Pax } from 'tar/pax';
8
+ import { unzip as unzipCB } from 'zlib';
9
+ import { findTarDir } from './find-tar-dir.js';
10
+ const unzip = async (input) => new Promise((res, rej) =>
11
+ /* c8 ignore start */
12
+ unzipCB(input, (er, result) => (er ? rej(er) : res(result))));
13
+ const exists = async (path) => {
14
+ try {
15
+ await lstat(path);
16
+ return true;
17
+ }
18
+ catch {
19
+ return false;
20
+ }
21
+ };
22
+ let id = 1;
23
+ const tmp = randomBytes(6).toString('hex') + '.';
24
+ const tmpSuffix = () => tmp + String(id++);
25
+ const checkFs = (h, tarDir) => {
26
+ /* c8 ignore start - impossible */
27
+ if (!h.path)
28
+ return false;
29
+ if (!tarDir)
30
+ return false;
31
+ /* c8 ignore stop */
32
+ h.path = h.path.replace(/[\\/]+/g, '/');
33
+ const parsed = parse(h.path);
34
+ if (parsed.root)
35
+ return false;
36
+ const p = h.path.replace(/\\/, '/');
37
+ // any .. at the beginning, end, or middle = no good
38
+ if (/(\/|)^\.\.(\/|$)/.test(p))
39
+ return false;
40
+ // packages should always be in a 'package' tarDir in the archive
41
+ if (!p.startsWith(tarDir))
42
+ return false;
43
+ return true;
44
+ };
45
+ const write = async (path, body, executable = false) => {
46
+ await mkdirp(dirname(path));
47
+ // if the mode is world-executable, then make it executable
48
+ // this is needed for some packages that have a file that is
49
+ // not a declared bin, but still used as a cli executable.
50
+ await writeFile(path, body, {
51
+ mode: executable ? 0o777 : 0o666,
52
+ });
53
+ };
54
+ const made = new Set();
55
+ const making = new Map();
56
+ const mkdirp = async (d) => {
57
+ if (!made.has(d)) {
58
+ const m = making.get(d) ??
59
+ mkdir(d, { recursive: true, mode: 0o777 }).then(() => making.delete(d));
60
+ making.set(d, m);
61
+ await m;
62
+ made.add(d);
63
+ }
64
+ };
65
+ export const unpack = async (tarData, target) => {
66
+ const isGzip = tarData[0] === 0x1f && tarData[1] === 0x8b;
67
+ await unpackUnzipped(isGzip ? await unzip(tarData) : tarData, target);
68
+ };
69
+ const unpackUnzipped = async (buffer, target) => {
70
+ /* c8 ignore start */
71
+ const isGzip = buffer[0] === 0x1f && buffer[1] === 0x8b;
72
+ if (isGzip) {
73
+ throw error('still gzipped after unzipping', {
74
+ found: isGzip,
75
+ wanted: false,
76
+ });
77
+ }
78
+ /* c8 ignore stop */
79
+ // another real quick gutcheck before we get started
80
+ if (buffer.length % 512 !== 0) {
81
+ throw error('Invalid tarball: length not divisible by 512', {
82
+ found: buffer.length,
83
+ });
84
+ }
85
+ if (buffer.length < 1024) {
86
+ throw error('Invalid tarball: not terminated by 1024 null bytes', { found: buffer.length });
87
+ }
88
+ // make sure the last kb is all zeros
89
+ for (let i = buffer.length - 1024; i < buffer.length; i++) {
90
+ if (buffer[i] !== 0) {
91
+ throw error('Invalid tarball: not terminated by 1024 null bytes', { found: buffer.subarray(i, i + 10) });
92
+ }
93
+ }
94
+ const tmp = dirname(target) + '/.' + basename(target) + '.' + tmpSuffix();
95
+ const og = tmp + '.ORIGINAL';
96
+ await Promise.all([rimraf(tmp), rimraf(og)]);
97
+ let succeeded = false;
98
+ try {
99
+ let tarDir = undefined;
100
+ let offset = 0;
101
+ let h;
102
+ let ex = undefined;
103
+ let gex = undefined;
104
+ while (offset < buffer.length &&
105
+ // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition
106
+ (h = new Header(buffer, offset, ex, gex)) &&
107
+ !h.nullBlock) {
108
+ offset += 512;
109
+ ex = undefined;
110
+ gex = undefined;
111
+ const size = h.size ?? 0;
112
+ const body = buffer.subarray(offset, offset + size);
113
+ // skip invalid headers
114
+ if (!h.cksumValid)
115
+ continue;
116
+ offset += 512 * Math.ceil(size / 512);
117
+ // TODO: tarDir might not be named "package/"
118
+ // find the first tarDir in the first entry, and use that.
119
+ switch (h.type) {
120
+ case 'File':
121
+ if (!tarDir)
122
+ tarDir = findTarDir(h.path, tarDir);
123
+ /* c8 ignore next */
124
+ if (!tarDir)
125
+ continue;
126
+ if (!checkFs(h, tarDir))
127
+ continue;
128
+ await write(resolve(tmp, h.path.substring(tarDir.length)), body,
129
+ // if it's world-executable, it's an executable
130
+ // otherwise, make it read-only.
131
+ 1 === ((h.mode ?? 0x666) & 1));
132
+ break;
133
+ case 'Directory':
134
+ /* c8 ignore next 2 */
135
+ if (!tarDir)
136
+ tarDir = findTarDir(h.path, tarDir);
137
+ if (!tarDir)
138
+ continue;
139
+ if (!checkFs(h, tarDir))
140
+ continue;
141
+ await mkdirp(resolve(tmp, h.path.substring(tarDir.length)));
142
+ break;
143
+ case 'GlobalExtendedHeader':
144
+ gex = Pax.parse(body.toString(), gex, true);
145
+ break;
146
+ case 'ExtendedHeader':
147
+ case 'OldExtendedHeader':
148
+ ex = Pax.parse(body.toString(), ex, false);
149
+ break;
150
+ case 'NextFileHasLongPath':
151
+ case 'OldGnuLongPath':
152
+ ex ??= Object.create(null);
153
+ ex.path = body.toString().replace(/\0.*/, '');
154
+ break;
155
+ }
156
+ }
157
+ const targetExists = await exists(target);
158
+ if (targetExists)
159
+ await rename(target, og);
160
+ await rename(tmp, target);
161
+ if (targetExists)
162
+ await rimraf(og);
163
+ succeeded = true;
164
+ }
165
+ finally {
166
+ // do not handle error or obscure throw site, just do the cleanup
167
+ // if it didn't complete successfully.
168
+ if (!succeeded) {
169
+ /* c8 ignore start */
170
+ if (await exists(og)) {
171
+ await rimraf(target);
172
+ await rename(og, target);
173
+ }
174
+ /* c8 ignore stop */
175
+ await rimraf(tmp);
176
+ }
177
+ }
178
+ };
179
+ //# sourceMappingURL=unpack.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"unpack.js","sourceRoot":"","sources":["../../src/unpack.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,EAAE,MAAM,qBAAqB,CAAA;AAC3C,OAAO,EAAE,WAAW,EAAE,MAAM,QAAQ,CAAA;AACpC,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,aAAa,CAAA;AAC7D,OAAO,EAAE,QAAQ,EAAE,OAAO,EAAE,KAAK,EAAE,OAAO,EAAE,MAAM,MAAM,CAAA;AACxD,OAAO,EAAE,MAAM,EAAE,MAAM,QAAQ,CAAA;AAC/B,OAAO,EAAE,MAAM,EAAc,MAAM,YAAY,CAAA;AAC/C,OAAO,EAAE,GAAG,EAAE,MAAM,SAAS,CAAA;AAC7B,OAAO,EAAE,KAAK,IAAI,OAAO,EAAE,MAAM,MAAM,CAAA;AACvC,OAAO,EAAE,UAAU,EAAE,MAAM,mBAAmB,CAAA;AAE9C,MAAM,KAAK,GAAG,KAAK,EAAE,KAAa,EAAE,EAAE,CACpC,IAAI,OAAO,CACT,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE;AACX,qBAAqB;AACrB,OAAO,CAAC,KAAK,EAAE,CAAC,EAAE,EAAE,MAAM,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAE/D,CAAA;AAEH,MAAM,MAAM,GAAG,KAAK,EAAE,IAAY,EAAoB,EAAE;IACtD,IAAI,CAAC;QACH,MAAM,KAAK,CAAC,IAAI,CAAC,CAAA;QACjB,OAAO,IAAI,CAAA;IACb,CAAC;IAAC,MAAM,CAAC;QACP,OAAO,KAAK,CAAA;IACd,CAAC;AACH,CAAC,CAAA;AAED,IAAI,EAAE,GAAG,CAAC,CAAA;AACV,MAAM,GAAG,GAAG,WAAW,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,KAAK,CAAC,GAAG,GAAG,CAAA;AAChD,MAAM,SAAS,GAAG,GAAG,EAAE,CAAC,GAAG,GAAG,MAAM,CAAC,EAAE,EAAE,CAAC,CAAA;AAE1C,MAAM,OAAO,GAAG,CACd,CAAS,EACT,MAA0B,EACM,EAAE;IAClC,kCAAkC;IAClC,IAAI,CAAC,CAAC,CAAC,IAAI;QAAE,OAAO,KAAK,CAAA;IACzB,IAAI,CAAC,MAAM;QAAE,OAAO,KAAK,CAAA;IACzB,oBAAoB;IACpB,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,GAAG,CAAC,CAAA;IACvC,MAAM,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,IAAI,CAAC,CAAA;IAC5B,IAAI,MAAM,CAAC,IAAI;QAAE,OAAO,KAAK,CAAA;IAC7B,MAAM,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;IACnC,oDAAoD;IACpD,IAAI,kBAAkB,CAAC,IAAI,CAAC,CAAC,CAAC;QAAE,OAAO,KAAK,CAAA;IAC5C,iEAAiE;IACjE,IAAI,CAAC,CAAC,CAAC,UAAU,CAAC,MAAM,CAAC;QAAE,OAAO,KAAK,CAAA;IACvC,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAED,MAAM,KAAK,GAAG,KAAK,EACjB,IAAY,EACZ,IAAY,EACZ,UAAU,GAAG,KAAK,EAClB,EAAE;IACF,MAAM,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAA;IAC3B,2DAA2D;IAC3D,4DAA4D;IAC5D,0DAA0D;IAC1D,MAAM,SAAS,CAAC,IAAI,EAAE,IAAI,EAAE;QAC1B,IAAI,EAAE,UAAU,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK;KACjC,CAAC,CAAA;AACJ,CAAC,CAAA;AAED,MAAM,IAAI,GAAG,IAAI,GAAG,EAAU,CAAA;AAC9B,MAAM,MAAM,GAAG,IAAI,GAAG,EAA4B,CAAA;AAClD,MAAM,MAAM,GAAG,KAAK,EAAE,CAAS,EAAE,EAAE;IACjC,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC;QACjB,MAAM,CAAC,GACL,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC;YACb,KAAK,CAAC,CAAC,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,GAAG,EAAE,CACnD,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CACjB,CAAA;QACH,MAAM,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC,CAAA;QAChB,MAAM,CAAC,CAAA;QACP,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAA;IACb,CAAC;AACH,CAAC,CAAA;AAED,MAAM,CAAC,MAAM,MAAM,GAAG,KAAK,EACzB,OAAe,EACf,MAAc,EACC,EAAE;IACjB,MAAM,MAAM,GAAG,OAAO,CAAC,CAAC,CAAC,KAAK,IAAI,IAAI,OAAO,CAAC,CAAC,CAAC,KAAK,IAAI,CAAA;IACzD,MAAM,cAAc,CAClB,MAAM,CAAC,CAAC,CAAC,MAAM,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,OAAO,EACvC,MAAM,CACP,CAAA;AACH,CAAC,CAAA;AAED,MAAM,cAAc,GAAG,KAAK,EAC1B,MAAc,EACd,MAAc,EACC,EAAE;IACjB,qBAAqB;IACrB,MAAM,MAAM,GAAG,MAAM,CAAC,CAAC,CAAC,KAAK,IAAI,IAAI,MAAM,CAAC,CAAC,CAAC,KAAK,IAAI,CAAA;IACvD,IAAI,MAAM,EAAE,CAAC;QACX,MAAM,KAAK,CAAC,+BAA+B,EAAE;YAC3C,KAAK,EAAE,MAAM;YACb,MAAM,EAAE,KAAK;SACd,CAAC,CAAA;IACJ,CAAC;IACD,oBAAoB;IAEpB,oDAAoD;IACpD,IAAI,MAAM,CAAC,MAAM,GAAG,GAAG,KAAK,CAAC,EAAE,CAAC;QAC9B,MAAM,KAAK,CAAC,8CAA8C,EAAE;YAC1D,KAAK,EAAE,MAAM,CAAC,MAAM;SACrB,CAAC,CAAA;IACJ,CAAC;IACD,IAAI,MAAM,CAAC,MAAM,GAAG,IAAI,EAAE,CAAC;QACzB,MAAM,KAAK,CACT,oDAAoD,EACpD,EAAE,KAAK,EAAE,MAAM,CAAC,MAAM,EAAE,CACzB,CAAA;IACH,CAAC;IACD,qCAAqC;IACrC,KAAK,IAAI,CAAC,GAAG,MAAM,CAAC,MAAM,GAAG,IAAI,EAAE,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;QAC1D,IAAI,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC;YACpB,MAAM,KAAK,CACT,oDAAoD,EACpD,EAAE,KAAK,EAAE,MAAM,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,GAAG,EAAE,CAAC,EAAE,CACtC,CAAA;QACH,CAAC;IACH,CAAC;IAED,MAAM,GAAG,GACP,OAAO,CAAC,MAAM,CAAC,GAAG,IAAI,GAAG,QAAQ,CAAC,MAAM,CAAC,GAAG,GAAG,GAAG,SAAS,EAAE,CAAA;IAC/D,MAAM,EAAE,GAAG,GAAG,GAAG,WAAW,CAAA;IAC5B,MAAM,OAAO,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,EAAE,MAAM,CAAC,EAAE,CAAC,CAAC,CAAC,CAAA;IAE5C,IAAI,SAAS,GAAG,KAAK,CAAA;IACrB,IAAI,CAAC;QACH,IAAI,MAAM,GAAuB,SAAS,CAAA;QAC1C,IAAI,MAAM,GAAG,CAAC,CAAA;QACd,IAAI,CAAS,CAAA;QACb,IAAI,EAAE,GAA2B,SAAS,CAAA;QAC1C,IAAI,GAAG,GAA2B,SAAS,CAAA;QAC3C,OACE,MAAM,GAAG,MAAM,CAAC,MAAM;YACtB,uEAAuE;YACvE,CAAC,CAAC,GAAG,IAAI,MAAM,CAAC,MAAM,EAAE,MAAM,EAAE,EAAE,EAAE,GAAG,CAAC,CAAC;YACzC,CAAC,CAAC,CAAC,SAAS,EACZ,CAAC;YACD,MAAM,IAAI,GAAG,CAAA;YACb,EAAE,GAAG,SAAS,CAAA;YACd,GAAG,GAAG,SAAS,CAAA;YACf,MAAM,IAAI,GAAG,CAAC,CAAC,IAAI,IAAI,CAAC,CAAA;YACxB,MAAM,IAAI,GAAG,MAAM,CAAC,QAAQ,CAAC,MAAM,EAAE,MAAM,GAAG,IAAI,CAAC,CAAA;YACnD,uBAAuB;YACvB,IAAI,CAAC,CAAC,CAAC,UAAU;gBAAE,SAAQ;YAC3B,MAAM,IAAI,GAAG,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,GAAG,GAAG,CAAC,CAAA;YAErC,6CAA6C;YAC7C,0DAA0D;YAC1D,QAAQ,CAAC,CAAC,IAAI,EAAE,CAAC;gBACf,KAAK,MAAM;oBACT,IAAI,CAAC,MAAM;wBAAE,MAAM,GAAG,UAAU,CAAC,CAAC,CAAC,IAAI,EAAE,MAAM,CAAC,CAAA;oBAChD,oBAAoB;oBACpB,IAAI,CAAC,MAAM;wBAAE,SAAQ;oBACrB,IAAI,CAAC,OAAO,CAAC,CAAC,EAAE,MAAM,CAAC;wBAAE,SAAQ;oBACjC,MAAM,KAAK,CACT,OAAO,CAAC,GAAG,EAAE,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,EAC7C,IAAI;oBACJ,+CAA+C;oBAC/C,gCAAgC;oBAChC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,IAAI,KAAK,CAAC,GAAG,CAAC,CAAC,CAC9B,CAAA;oBACD,MAAK;gBAEP,KAAK,WAAW;oBACd,sBAAsB;oBACtB,IAAI,CAAC,MAAM;wBAAE,MAAM,GAAG,UAAU,CAAC,CAAC,CAAC,IAAI,EAAE,MAAM,CAAC,CAAA;oBAChD,IAAI,CAAC,MAAM;wBAAE,SAAQ;oBACrB,IAAI,CAAC,OAAO,CAAC,CAAC,EAAE,MAAM,CAAC;wBAAE,SAAQ;oBACjC,MAAM,MAAM,CAAC,OAAO,CAAC,GAAG,EAAE,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;oBAC3D,MAAK;gBAEP,KAAK,sBAAsB;oBACzB,GAAG,GAAG,GAAG,CAAC,KAAK,CAAC,IAAI,CAAC,QAAQ,EAAE,EAAE,GAAG,EAAE,IAAI,CAAC,CAAA;oBAC3C,MAAK;gBAEP,KAAK,gBAAgB,CAAC;gBACtB,KAAK,mBAAmB;oBACtB,EAAE,GAAG,GAAG,CAAC,KAAK,CAAC,IAAI,CAAC,QAAQ,EAAE,EAAE,EAAE,EAAE,KAAK,CAAC,CAAA;oBAC1C,MAAK;gBAEP,KAAK,qBAAqB,CAAC;gBAC3B,KAAK,gBAAgB;oBACnB,EAAE,KAAK,MAAM,CAAC,MAAM,CAAC,IAAI,CAAe,CAAA;oBACxC,EAAE,CAAC,IAAI,GAAG,IAAI,CAAC,QAAQ,EAAE,CAAC,OAAO,CAAC,MAAM,EAAE,EAAE,CAAC,CAAA;oBAC7C,MAAK;YACT,CAAC;QACH,CAAC;QAED,MAAM,YAAY,GAAG,MAAM,MAAM,CAAC,MAAM,CAAC,CAAA;QACzC,IAAI,YAAY;YAAE,MAAM,MAAM,CAAC,MAAM,EAAE,EAAE,CAAC,CAAA;QAC1C,MAAM,MAAM,CAAC,GAAG,EAAE,MAAM,CAAC,CAAA;QACzB,IAAI,YAAY;YAAE,MAAM,MAAM,CAAC,EAAE,CAAC,CAAA;QAClC,SAAS,GAAG,IAAI,CAAA;IAClB,CAAC;YAAS,CAAC;QACT,iEAAiE;QACjE,sCAAsC;QACtC,IAAI,CAAC,SAAS,EAAE,CAAC;YACf,qBAAqB;YACrB,IAAI,MAAM,MAAM,CAAC,EAAE,CAAC,EAAE,CAAC;gBACrB,MAAM,MAAM,CAAC,MAAM,CAAC,CAAA;gBACpB,MAAM,MAAM,CAAC,EAAE,EAAE,MAAM,CAAC,CAAA;YAC1B,CAAC;YACD,oBAAoB;YACpB,MAAM,MAAM,CAAC,GAAG,CAAC,CAAA;QACnB,CAAC;IACH,CAAC;AACH,CAAC,CAAA","sourcesContent":["import { error } from '@vltpkg/error-cause'\nimport { randomBytes } from 'crypto'\nimport { lstat, mkdir, rename, writeFile } from 'fs/promises'\nimport { basename, dirname, parse, resolve } from 'path'\nimport { rimraf } from 'rimraf'\nimport { Header, HeaderData } from 'tar/header'\nimport { Pax } from 'tar/pax'\nimport { unzip as unzipCB } from 'zlib'\nimport { findTarDir } from './find-tar-dir.js'\n\nconst unzip = async (input: Buffer) =>\n new Promise<Buffer>(\n (res, rej) =>\n /* c8 ignore start */\n unzipCB(input, (er, result) => (er ? rej(er) : res(result))),\n /* c8 ignore stop */\n )\n\nconst exists = async (path: string): Promise<boolean> => {\n try {\n await lstat(path)\n return true\n } catch {\n return false\n }\n}\n\nlet id = 1\nconst tmp = randomBytes(6).toString('hex') + '.'\nconst tmpSuffix = () => tmp + String(id++)\n\nconst checkFs = (\n h: Header,\n tarDir: string | undefined,\n): h is Header & { path: string } => {\n /* c8 ignore start - impossible */\n if (!h.path) return false\n if (!tarDir) return false\n /* c8 ignore stop */\n h.path = h.path.replace(/[\\\\/]+/g, '/')\n const parsed = parse(h.path)\n if (parsed.root) return false\n const p = h.path.replace(/\\\\/, '/')\n // any .. at the beginning, end, or middle = no good\n if (/(\\/|)^\\.\\.(\\/|$)/.test(p)) return false\n // packages should always be in a 'package' tarDir in the archive\n if (!p.startsWith(tarDir)) return false\n return true\n}\n\nconst write = async (\n path: string,\n body: Buffer,\n executable = false,\n) => {\n await mkdirp(dirname(path))\n // if the mode is world-executable, then make it executable\n // this is needed for some packages that have a file that is\n // not a declared bin, but still used as a cli executable.\n await writeFile(path, body, {\n mode: executable ? 0o777 : 0o666,\n })\n}\n\nconst made = new Set<string>()\nconst making = new Map<string, Promise<boolean>>()\nconst mkdirp = async (d: string) => {\n if (!made.has(d)) {\n const m =\n making.get(d) ??\n mkdir(d, { recursive: true, mode: 0o777 }).then(() =>\n making.delete(d),\n )\n making.set(d, m)\n await m\n made.add(d)\n }\n}\n\nexport const unpack = async (\n tarData: Buffer,\n target: string,\n): Promise<void> => {\n const isGzip = tarData[0] === 0x1f && tarData[1] === 0x8b\n await unpackUnzipped(\n isGzip ? await unzip(tarData) : tarData,\n target,\n )\n}\n\nconst unpackUnzipped = async (\n buffer: Buffer,\n target: string,\n): Promise<void> => {\n /* c8 ignore start */\n const isGzip = buffer[0] === 0x1f && buffer[1] === 0x8b\n if (isGzip) {\n throw error('still gzipped after unzipping', {\n found: isGzip,\n wanted: false,\n })\n }\n /* c8 ignore stop */\n\n // another real quick gutcheck before we get started\n if (buffer.length % 512 !== 0) {\n throw error('Invalid tarball: length not divisible by 512', {\n found: buffer.length,\n })\n }\n if (buffer.length < 1024) {\n throw error(\n 'Invalid tarball: not terminated by 1024 null bytes',\n { found: buffer.length },\n )\n }\n // make sure the last kb is all zeros\n for (let i = buffer.length - 1024; i < buffer.length; i++) {\n if (buffer[i] !== 0) {\n throw error(\n 'Invalid tarball: not terminated by 1024 null bytes',\n { found: buffer.subarray(i, i + 10) },\n )\n }\n }\n\n const tmp =\n dirname(target) + '/.' + basename(target) + '.' + tmpSuffix()\n const og = tmp + '.ORIGINAL'\n await Promise.all([rimraf(tmp), rimraf(og)])\n\n let succeeded = false\n try {\n let tarDir: string | undefined = undefined\n let offset = 0\n let h: Header\n let ex: HeaderData | undefined = undefined\n let gex: HeaderData | undefined = undefined\n while (\n offset < buffer.length &&\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n (h = new Header(buffer, offset, ex, gex)) &&\n !h.nullBlock\n ) {\n offset += 512\n ex = undefined\n gex = undefined\n const size = h.size ?? 0\n const body = buffer.subarray(offset, offset + size)\n // skip invalid headers\n if (!h.cksumValid) continue\n offset += 512 * Math.ceil(size / 512)\n\n // TODO: tarDir might not be named \"package/\"\n // find the first tarDir in the first entry, and use that.\n switch (h.type) {\n case 'File':\n if (!tarDir) tarDir = findTarDir(h.path, tarDir)\n /* c8 ignore next */\n if (!tarDir) continue\n if (!checkFs(h, tarDir)) continue\n await write(\n resolve(tmp, h.path.substring(tarDir.length)),\n body,\n // if it's world-executable, it's an executable\n // otherwise, make it read-only.\n 1 === ((h.mode ?? 0x666) & 1),\n )\n break\n\n case 'Directory':\n /* c8 ignore next 2 */\n if (!tarDir) tarDir = findTarDir(h.path, tarDir)\n if (!tarDir) continue\n if (!checkFs(h, tarDir)) continue\n await mkdirp(resolve(tmp, h.path.substring(tarDir.length)))\n break\n\n case 'GlobalExtendedHeader':\n gex = Pax.parse(body.toString(), gex, true)\n break\n\n case 'ExtendedHeader':\n case 'OldExtendedHeader':\n ex = Pax.parse(body.toString(), ex, false)\n break\n\n case 'NextFileHasLongPath':\n case 'OldGnuLongPath':\n ex ??= Object.create(null) as HeaderData\n ex.path = body.toString().replace(/\\0.*/, '')\n break\n }\n }\n\n const targetExists = await exists(target)\n if (targetExists) await rename(target, og)\n await rename(tmp, target)\n if (targetExists) await rimraf(og)\n succeeded = true\n } finally {\n // do not handle error or obscure throw site, just do the cleanup\n // if it didn't complete successfully.\n if (!succeeded) {\n /* c8 ignore start */\n if (await exists(og)) {\n await rimraf(target)\n await rename(og, target)\n }\n /* c8 ignore stop */\n await rimraf(tmp)\n }\n }\n}\n"]}
@@ -0,0 +1,20 @@
1
+ import { UnpackRequest } from './unpack-request.js';
2
+ export type ResponseError = {
3
+ id: number;
4
+ error: unknown;
5
+ };
6
+ export type ResponseOK = {
7
+ id: number;
8
+ ok: true;
9
+ };
10
+ export declare const isResponseOK: (o: any) => o is ResponseOK;
11
+ /**
12
+ * Basically just a queue of unpack requests,
13
+ * to keep them throttled to a reasonable amount of parallelism
14
+ */
15
+ export declare class Worker {
16
+ onMessage: (m: ResponseError | ResponseOK) => void;
17
+ constructor(onMessage: (m: ResponseError | ResponseOK) => void);
18
+ process(req: UnpackRequest): Promise<void>;
19
+ }
20
+ //# sourceMappingURL=worker.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"worker.d.ts","sourceRoot":"","sources":["../../src/worker.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,aAAa,EAAE,MAAM,qBAAqB,CAAA;AAGnD,MAAM,MAAM,aAAa,GAAG;IAAE,EAAE,EAAE,MAAM,CAAC;IAAC,KAAK,EAAE,OAAO,CAAA;CAAE,CAAA;AAC1D,MAAM,MAAM,UAAU,GAAG;IAAE,EAAE,EAAE,MAAM,CAAC;IAAC,EAAE,EAAE,IAAI,CAAA;CAAE,CAAA;AAEjD,eAAO,MAAM,YAAY,MAAO,GAAG,KAAG,CAAC,IAAI,UAI5B,CAAA;AAEf;;;GAGG;AACH,qBAAa,MAAM;IACjB,SAAS,EAAE,CAAC,CAAC,EAAE,aAAa,GAAG,UAAU,KAAK,IAAI,CAAA;gBAEtC,SAAS,EAAE,CAAC,CAAC,EAAE,aAAa,GAAG,UAAU,KAAK,IAAI;IAIxD,OAAO,CAAC,GAAG,EAAE,aAAa;CAWjC"}
@@ -0,0 +1,28 @@
1
+ import { unpack } from './unpack.js';
2
+ export const isResponseOK = (o) => !!o &&
3
+ typeof o === 'object' &&
4
+ typeof o.id === 'number' &&
5
+ o.ok === true;
6
+ /**
7
+ * Basically just a queue of unpack requests,
8
+ * to keep them throttled to a reasonable amount of parallelism
9
+ */
10
+ export class Worker {
11
+ onMessage;
12
+ constructor(onMessage) {
13
+ this.onMessage = onMessage;
14
+ }
15
+ async process(req) {
16
+ const { target, tarData, id } = req;
17
+ try {
18
+ await unpack(tarData, target);
19
+ const m = { id, ok: true };
20
+ this.onMessage(m);
21
+ }
22
+ catch (error) {
23
+ const m = { id, error };
24
+ this.onMessage(m);
25
+ }
26
+ }
27
+ }
28
+ //# sourceMappingURL=worker.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"worker.js","sourceRoot":"","sources":["../../src/worker.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,MAAM,EAAE,MAAM,aAAa,CAAA;AAKpC,MAAM,CAAC,MAAM,YAAY,GAAG,CAAC,CAAM,EAAmB,EAAE,CACtD,CAAC,CAAC,CAAC;IACH,OAAO,CAAC,KAAK,QAAQ;IACrB,OAAO,CAAC,CAAC,EAAE,KAAK,QAAQ;IACxB,CAAC,CAAC,EAAE,KAAK,IAAI,CAAA;AAEf;;;GAGG;AACH,MAAM,OAAO,MAAM;IACjB,SAAS,CAAyC;IAElD,YAAY,SAAkD;QAC5D,IAAI,CAAC,SAAS,GAAG,SAAS,CAAA;IAC5B,CAAC;IAED,KAAK,CAAC,OAAO,CAAC,GAAkB;QAC9B,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,EAAE,EAAE,GAAG,GAAG,CAAA;QACnC,IAAI,CAAC;YACH,MAAM,MAAM,CAAC,OAAO,EAAE,MAAM,CAAC,CAAA;YAC7B,MAAM,CAAC,GAAe,EAAE,EAAE,EAAE,EAAE,EAAE,IAAI,EAAE,CAAA;YACtC,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,CAAA;QACnB,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACf,MAAM,CAAC,GAAkB,EAAE,EAAE,EAAE,KAAK,EAAE,CAAA;YACtC,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,CAAA;QACnB,CAAC;IACH,CAAC;CACF","sourcesContent":["import { UnpackRequest } from './unpack-request.js'\nimport { unpack } from './unpack.js'\n\nexport type ResponseError = { id: number; error: unknown }\nexport type ResponseOK = { id: number; ok: true }\n\nexport const isResponseOK = (o: any): o is ResponseOK =>\n !!o &&\n typeof o === 'object' &&\n typeof o.id === 'number' &&\n o.ok === true\n\n/**\n * Basically just a queue of unpack requests,\n * to keep them throttled to a reasonable amount of parallelism\n */\nexport class Worker {\n onMessage: (m: ResponseError | ResponseOK) => void\n\n constructor(onMessage: (m: ResponseError | ResponseOK) => void) {\n this.onMessage = onMessage\n }\n\n async process(req: UnpackRequest) {\n const { target, tarData, id } = req\n try {\n await unpack(tarData, target)\n const m: ResponseOK = { id, ok: true }\n this.onMessage(m)\n } catch (error) {\n const m: ResponseError = { id, error }\n this.onMessage(m)\n }\n }\n}\n"]}
package/package.json ADDED
@@ -0,0 +1,90 @@
1
+ {
2
+ "name": "@vltpkg/tar",
3
+ "description": "An extremely limited and very fast tar extractor",
4
+ "version": "0.0.0-0.1730239248325",
5
+ "tshy": {
6
+ "selfLink": false,
7
+ "dialects": [
8
+ "esm"
9
+ ],
10
+ "exports": {
11
+ "./package.json": "./package.json",
12
+ ".": "./src/index.ts",
13
+ "./pool": "./src/pool.ts",
14
+ "./unpack": "./src/unpack.ts",
15
+ "./unpack-request": "./src/unpack-request.ts"
16
+ }
17
+ },
18
+ "dependencies": {
19
+ "rimraf": "^6.0.1",
20
+ "tar": "^7.4.3",
21
+ "@vltpkg/error-cause": "0.0.0-0.1730239248325"
22
+ },
23
+ "devDependencies": {
24
+ "@eslint/js": "^9.8.0",
25
+ "@types/eslint__js": "^8.42.3",
26
+ "@types/node": "^22.4.1",
27
+ "@types/tar": "^6.1.13",
28
+ "eslint": "^9.8.0",
29
+ "mutate-fs": "^2.1.1",
30
+ "pacote": "^18.0.6",
31
+ "prettier": "^3.3.2",
32
+ "tap": "^21.0.1",
33
+ "tshy": "^3.0.2",
34
+ "typescript": "^5.5.4",
35
+ "typescript-eslint": "^8.0.1",
36
+ "@vltpkg/benchmark": "0.0.0-0"
37
+ },
38
+ "license": "BSD-2-Clause-Patent",
39
+ "engines": {
40
+ "node": "20 || >=22"
41
+ },
42
+ "tap": {
43
+ "extends": "../../tap-config.yaml"
44
+ },
45
+ "prettier": "../../.prettierrc.js",
46
+ "module": "./dist/esm/index.js",
47
+ "type": "module",
48
+ "exports": {
49
+ "./package.json": "./package.json",
50
+ ".": {
51
+ "import": {
52
+ "types": "./dist/esm/index.d.ts",
53
+ "default": "./dist/esm/index.js"
54
+ }
55
+ },
56
+ "./pool": {
57
+ "import": {
58
+ "types": "./dist/esm/pool.d.ts",
59
+ "default": "./dist/esm/pool.js"
60
+ }
61
+ },
62
+ "./unpack": {
63
+ "import": {
64
+ "types": "./dist/esm/unpack.d.ts",
65
+ "default": "./dist/esm/unpack.js"
66
+ }
67
+ },
68
+ "./unpack-request": {
69
+ "import": {
70
+ "types": "./dist/esm/unpack-request.d.ts",
71
+ "default": "./dist/esm/unpack-request.js"
72
+ }
73
+ }
74
+ },
75
+ "files": [
76
+ "dist"
77
+ ],
78
+ "scripts": {
79
+ "prebenchmark": "pnpm vlt-benchmark-download-fixtures",
80
+ "benchmark": "node scripts/benchmark.js",
81
+ "format": "prettier --write . --log-level warn --ignore-path ../../.prettierignore --cache",
82
+ "format:check": "prettier --check . --ignore-path ../../.prettierignore --cache",
83
+ "lint": "eslint . --fix",
84
+ "lint:check": "eslint .",
85
+ "presnap": "tshy",
86
+ "snap": "tap",
87
+ "pretest": "tshy",
88
+ "test": "tap"
89
+ }
90
+ }