@vertesia/memory 0.24.0-dev.202601221707
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +13 -0
- package/README.md +473 -0
- package/lib/cjs/Builder.js +186 -0
- package/lib/cjs/Builder.js.map +1 -0
- package/lib/cjs/ContentObject.js +114 -0
- package/lib/cjs/ContentObject.js.map +1 -0
- package/lib/cjs/ContentSource.js +82 -0
- package/lib/cjs/ContentSource.js.map +1 -0
- package/lib/cjs/MemoryPack.js +228 -0
- package/lib/cjs/MemoryPack.js.map +1 -0
- package/lib/cjs/MemoryPackBuilder.js +47 -0
- package/lib/cjs/MemoryPackBuilder.js.map +1 -0
- package/lib/cjs/commands/copy.js +53 -0
- package/lib/cjs/commands/copy.js.map +1 -0
- package/lib/cjs/commands/exec.js +82 -0
- package/lib/cjs/commands/exec.js.map +1 -0
- package/lib/cjs/index.js +28 -0
- package/lib/cjs/index.js.map +1 -0
- package/lib/cjs/package.json +3 -0
- package/lib/cjs/utils/cmdline.js +90 -0
- package/lib/cjs/utils/cmdline.js.map +1 -0
- package/lib/cjs/utils/rewrite.js +166 -0
- package/lib/cjs/utils/rewrite.js.map +1 -0
- package/lib/cjs/utils/stream.js +27 -0
- package/lib/cjs/utils/stream.js.map +1 -0
- package/lib/cjs/utils/tar.js +185 -0
- package/lib/cjs/utils/tar.js.map +1 -0
- package/lib/esm/Builder.js +178 -0
- package/lib/esm/Builder.js.map +1 -0
- package/lib/esm/ContentObject.js +103 -0
- package/lib/esm/ContentObject.js.map +1 -0
- package/lib/esm/ContentSource.js +75 -0
- package/lib/esm/ContentSource.js.map +1 -0
- package/lib/esm/MemoryPack.js +218 -0
- package/lib/esm/MemoryPack.js.map +1 -0
- package/lib/esm/MemoryPackBuilder.js +43 -0
- package/lib/esm/MemoryPackBuilder.js.map +1 -0
- package/lib/esm/commands/copy.js +50 -0
- package/lib/esm/commands/copy.js.map +1 -0
- package/lib/esm/commands/exec.js +75 -0
- package/lib/esm/commands/exec.js.map +1 -0
- package/lib/esm/index.js +7 -0
- package/lib/esm/index.js.map +1 -0
- package/lib/esm/utils/cmdline.js +86 -0
- package/lib/esm/utils/cmdline.js.map +1 -0
- package/lib/esm/utils/rewrite.js +161 -0
- package/lib/esm/utils/rewrite.js.map +1 -0
- package/lib/esm/utils/stream.js +23 -0
- package/lib/esm/utils/stream.js.map +1 -0
- package/lib/esm/utils/tar.js +175 -0
- package/lib/esm/utils/tar.js.map +1 -0
- package/lib/tsconfig.tsbuildinfo +1 -0
- package/lib/types/Builder.d.ts +72 -0
- package/lib/types/Builder.d.ts.map +1 -0
- package/lib/types/ContentObject.d.ts +43 -0
- package/lib/types/ContentObject.d.ts.map +1 -0
- package/lib/types/ContentSource.d.ts +32 -0
- package/lib/types/ContentSource.d.ts.map +1 -0
- package/lib/types/MemoryPack.d.ts +46 -0
- package/lib/types/MemoryPack.d.ts.map +1 -0
- package/lib/types/MemoryPackBuilder.d.ts +18 -0
- package/lib/types/MemoryPackBuilder.d.ts.map +1 -0
- package/lib/types/commands/copy.d.ts +8 -0
- package/lib/types/commands/copy.d.ts.map +1 -0
- package/lib/types/commands/exec.d.ts +7 -0
- package/lib/types/commands/exec.d.ts.map +1 -0
- package/lib/types/index.d.ts +14 -0
- package/lib/types/index.d.ts.map +1 -0
- package/lib/types/utils/cmdline.d.ts +10 -0
- package/lib/types/utils/cmdline.d.ts.map +1 -0
- package/lib/types/utils/rewrite.d.ts +38 -0
- package/lib/types/utils/rewrite.d.ts.map +1 -0
- package/lib/types/utils/stream.d.ts +9 -0
- package/lib/types/utils/stream.d.ts.map +1 -0
- package/lib/types/utils/tar.d.ts +40 -0
- package/lib/types/utils/tar.d.ts.map +1 -0
- package/package.json +53 -0
- package/src/Builder.ts +239 -0
- package/src/ContentObject.ts +114 -0
- package/src/ContentSource.ts +88 -0
- package/src/MemoryPack.ts +233 -0
- package/src/MemoryPackBuilder.ts +55 -0
- package/src/builder.test.ts +214 -0
- package/src/commands/copy.ts +53 -0
- package/src/commands/exec.test.ts +22 -0
- package/src/commands/exec.ts +83 -0
- package/src/index.ts +14 -0
- package/src/utils/cmdline.test.ts +32 -0
- package/src/utils/cmdline.ts +92 -0
- package/src/utils/rewrite.test.ts +65 -0
- package/src/utils/rewrite.ts +167 -0
- package/src/utils/stream.test.ts +13 -0
- package/src/utils/stream.ts +27 -0
- package/src/utils/tar.test.ts +48 -0
- package/src/utils/tar.ts +203 -0
|
@@ -0,0 +1,167 @@
|
|
|
1
|
+
import { basename, dirname, extname, join } from "path";
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* The path argument is the empty string when mapping streams or buffers not related to a file system file.
|
|
5
|
+
*/
|
|
6
|
+
export type PathMapperFn = ((path: string, index: number) => string);
|
|
7
|
+
|
|
8
|
+
export function createPathRewrite(path: string): PathMapperFn {
|
|
9
|
+
let truncPath: (path: string) => string;
|
|
10
|
+
let basePath: string = '';
|
|
11
|
+
let index = path.indexOf('!');
|
|
12
|
+
if (index > -1) {
|
|
13
|
+
basePath = path.substring(0, index);
|
|
14
|
+
if (!basePath.endsWith('/')) {
|
|
15
|
+
basePath += '/';
|
|
16
|
+
}
|
|
17
|
+
truncPath = (path: string) => {
|
|
18
|
+
return path.substring(basePath.length);
|
|
19
|
+
}
|
|
20
|
+
path = path.substring(index + 1);
|
|
21
|
+
} else {
|
|
22
|
+
truncPath = (path: string) => {
|
|
23
|
+
return basename(path);
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
if (path === '*') {
|
|
27
|
+
// preserve path
|
|
28
|
+
return truncPath;
|
|
29
|
+
} else if (path.endsWith("/*")) {
|
|
30
|
+
const prefix = path.slice(0, -2);
|
|
31
|
+
return (path: string) => {
|
|
32
|
+
path = truncPath(path);
|
|
33
|
+
return join(prefix, path);
|
|
34
|
+
}
|
|
35
|
+
} else {
|
|
36
|
+
// use path builder
|
|
37
|
+
return buildPathRewrite(path, truncPath);
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
const RX_PARTS = /(%d\/)|(\.?%e)|(%[fnip])/g;
|
|
42
|
+
function buildPathRewrite(path: string, truncPath: (path: string) => string): PathMapperFn {
|
|
43
|
+
let parts: ((path: Path, index: number) => string)[] = [];
|
|
44
|
+
let m: RegExpExecArray | null;
|
|
45
|
+
let lastIndex = 0;
|
|
46
|
+
while (m = RX_PARTS.exec(path)) {
|
|
47
|
+
if (m.index > lastIndex) {
|
|
48
|
+
const literal = path.substring(lastIndex, m.index);
|
|
49
|
+
parts.push(() => literal);
|
|
50
|
+
}
|
|
51
|
+
if (m[1]) { // %d/
|
|
52
|
+
parts.push((path: Path) => path.dirname ? path.dirname + '/' : '');
|
|
53
|
+
} else if (m[2]) { // .?%e
|
|
54
|
+
if (m[2][0] === '.') {
|
|
55
|
+
parts.push((path: Path) => path.extname || '');
|
|
56
|
+
} else {
|
|
57
|
+
parts.push((path: Path) => path.extname ? path.extname.slice(1) : ''); // extension without dot
|
|
58
|
+
}
|
|
59
|
+
} else if (m[3]) {
|
|
60
|
+
switch (m[3]) {
|
|
61
|
+
case '%f':
|
|
62
|
+
parts.push((path: Path) => path.name);
|
|
63
|
+
break;
|
|
64
|
+
case '%n':
|
|
65
|
+
parts.push((path: Path) => path.basename);
|
|
66
|
+
break;
|
|
67
|
+
case '%p': // stringify the path by replacing / with _
|
|
68
|
+
parts.push((path: Path) => {
|
|
69
|
+
let p = path.value;
|
|
70
|
+
if (p.startsWith('/')) {
|
|
71
|
+
p = p.substring(1);
|
|
72
|
+
}
|
|
73
|
+
return p.replaceAll('/', '_');
|
|
74
|
+
});
|
|
75
|
+
break;
|
|
76
|
+
case '%i': // index
|
|
77
|
+
parts.push((_path: Path, index: number) => String(index));
|
|
78
|
+
break;
|
|
79
|
+
default: throw new Error(`Bug: should never happen`);
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
lastIndex = m.index + m[0].length;
|
|
83
|
+
}
|
|
84
|
+
if (!parts.length) {
|
|
85
|
+
return () => path;
|
|
86
|
+
} else {
|
|
87
|
+
if (lastIndex < path.length) {
|
|
88
|
+
const literal = path.substring(lastIndex);
|
|
89
|
+
parts.push(() => literal);
|
|
90
|
+
}
|
|
91
|
+
return (path: string, index: number) => {
|
|
92
|
+
const pathObj = new Path(truncPath(path));
|
|
93
|
+
const out = [];
|
|
94
|
+
for (const part of parts) {
|
|
95
|
+
out.push(part(pathObj, index));
|
|
96
|
+
}
|
|
97
|
+
return out.join('');
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
export class Path {
|
|
104
|
+
_name?: string;
|
|
105
|
+
_extname?: string;
|
|
106
|
+
_dirname?: string;
|
|
107
|
+
_basename?: string;
|
|
108
|
+
|
|
109
|
+
/**
|
|
110
|
+
* The complete path value
|
|
111
|
+
*/
|
|
112
|
+
value: string
|
|
113
|
+
|
|
114
|
+
/**
|
|
115
|
+
* The file name (the last portion of the path). Includes the extension if present.
|
|
116
|
+
*/
|
|
117
|
+
get name(): string {
|
|
118
|
+
if (!this._name) {
|
|
119
|
+
this._name = basename(this.value);
|
|
120
|
+
}
|
|
121
|
+
return this._name;
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
/**
|
|
125
|
+
* The extension of the file including the leading '.'.
|
|
126
|
+
* An empty string if the file has no extension.
|
|
127
|
+
*/
|
|
128
|
+
get extname(): string {
|
|
129
|
+
if (!this._extname) {
|
|
130
|
+
this._extname = extname(this.value);
|
|
131
|
+
}
|
|
132
|
+
return this._extname;
|
|
133
|
+
}
|
|
134
|
+
/**
|
|
135
|
+
* The directory portion of the path. Doesn't include the trailing slash.
|
|
136
|
+
* If no directory is present, returns an empty string.
|
|
137
|
+
*/
|
|
138
|
+
get dirname(): string {
|
|
139
|
+
if (!this._dirname) {
|
|
140
|
+
this._dirname = dirname(this.value);
|
|
141
|
+
if (this._dirname === '.') {
|
|
142
|
+
this._dirname = '';
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
return this._dirname;
|
|
146
|
+
}
|
|
147
|
+
/**
|
|
148
|
+
* The path without the extension
|
|
149
|
+
*/
|
|
150
|
+
get basename(): string {
|
|
151
|
+
if (!this._basename) {
|
|
152
|
+
this._basename = this.extname ? this.name.slice(0, -this.extname.length) : this.name;
|
|
153
|
+
}
|
|
154
|
+
return this._basename;
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
constructor(value: string) {
|
|
158
|
+
this.value = value;
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
/**
|
|
162
|
+
* Return the complete path value (same as `value`)
|
|
163
|
+
*/
|
|
164
|
+
toString(): string {
|
|
165
|
+
return this.value;
|
|
166
|
+
}
|
|
167
|
+
}
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
import { describe, test, expect } from "vitest";
|
|
2
|
+
import { BufferWritableStream } from "./stream";
|
|
3
|
+
|
|
4
|
+
describe("BufferWritableStream", () => {
|
|
5
|
+
test("write buffer", () => {
|
|
6
|
+
const stream = new BufferWritableStream();
|
|
7
|
+
stream.write(Buffer.from("hello"));
|
|
8
|
+
stream.write(Buffer.from(" "));
|
|
9
|
+
stream.write(Buffer.from("world"));
|
|
10
|
+
stream.end();
|
|
11
|
+
expect(stream.getText()).toBe("hello world");
|
|
12
|
+
})
|
|
13
|
+
})
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
import { Writable } from "stream";
|
|
2
|
+
|
|
3
|
+
export class BufferWritableStream extends Writable {
|
|
4
|
+
chunks: Buffer[] = []
|
|
5
|
+
buffer: Buffer | undefined;
|
|
6
|
+
|
|
7
|
+
// _write method is required to handle the incoming data
|
|
8
|
+
_write(chunk: Buffer, _encoding: BufferEncoding, callback: (error?: Error | null) => void) {
|
|
9
|
+
this.chunks.push(chunk); // Collect the chunk into the array
|
|
10
|
+
callback(); // Indicate that the write is complete
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
// Optional _final method is called when the stream is ending
|
|
14
|
+
_final(callback: (error?: Error | null) => void) {
|
|
15
|
+
this.buffer = Buffer.concat(this.chunks); // Concatenate the collected chunks into a buffer
|
|
16
|
+
callback(); // Indicate the stream is finished
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
// Method to get the final buffer when the stream is closed
|
|
20
|
+
getBuffer() {
|
|
21
|
+
return this.buffer;
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
getText(encoding: BufferEncoding = "utf-8") {
|
|
25
|
+
return this.buffer?.toString(encoding);
|
|
26
|
+
}
|
|
27
|
+
}
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
import { describe, test, expect, beforeAll, afterAll } from "vitest";
|
|
2
|
+
import { loadTarIndex, TarBuilder, TarIndex } from "./tar";
|
|
3
|
+
import { readFileSync, stat, statSync, unlinkSync } from "fs";
|
|
4
|
+
|
|
5
|
+
const tarFile = `test-${Date.now()}.tar`;
|
|
6
|
+
afterAll(() => {
|
|
7
|
+
unlinkSync(tarFile);
|
|
8
|
+
});
|
|
9
|
+
|
|
10
|
+
describe("Indexed tar format", () => {
|
|
11
|
+
const builder = new TarBuilder(tarFile);
|
|
12
|
+
test("build tar", async () => {
|
|
13
|
+
builder.add("file1.txt", Buffer.from("hello world!"));
|
|
14
|
+
builder.add("file2.txt", Buffer.from("bonjour monde!"));
|
|
15
|
+
builder.add("app/package.json", readFileSync("./package.json"));
|
|
16
|
+
await builder.build();
|
|
17
|
+
const stats = statSync(tarFile);
|
|
18
|
+
expect(stats.isFile()).toBeTruthy();
|
|
19
|
+
});
|
|
20
|
+
|
|
21
|
+
test("read tar", async () => {
|
|
22
|
+
const index = await loadTarIndex(tarFile) as TarIndex;
|
|
23
|
+
expect(index).toBeDefined();
|
|
24
|
+
expect(Object.keys(index.entries).length).toBe(3);
|
|
25
|
+
const file1 = index.get('file1.txt');
|
|
26
|
+
const file2 = index.get('file2.txt');
|
|
27
|
+
const file3 = index.get('app/package.json');
|
|
28
|
+
expect(file1).toBeDefined();
|
|
29
|
+
expect(file2).toBeDefined();
|
|
30
|
+
expect(file3).toBeDefined();
|
|
31
|
+
expect(file1!.size).toBe(12);
|
|
32
|
+
expect(file2!.size).toBe(14);
|
|
33
|
+
expect(file3!.size).toBeGreaterThan(0);
|
|
34
|
+
|
|
35
|
+
const content1 = await index.getContent('file1.txt');
|
|
36
|
+
const content2 = await index.getContent('file2.txt');
|
|
37
|
+
const content3 = await index.getContent('app/package.json');
|
|
38
|
+
|
|
39
|
+
expect(content1!.toString()).toBe("hello world!");
|
|
40
|
+
expect(content2!.toString()).toBe("bonjour monde!");
|
|
41
|
+
const pkg = JSON.parse(content3!.toString());
|
|
42
|
+
expect(pkg).toBeDefined();
|
|
43
|
+
expect(pkg).toHaveProperty("name");
|
|
44
|
+
expect(pkg).toHaveProperty("version");
|
|
45
|
+
expect(pkg).toHaveProperty("dependencies");
|
|
46
|
+
});
|
|
47
|
+
|
|
48
|
+
});
|
package/src/utils/tar.ts
ADDED
|
@@ -0,0 +1,203 @@
|
|
|
1
|
+
import fs from "fs";
|
|
2
|
+
import { FileHandle, open } from "fs/promises";
|
|
3
|
+
import { pipeline } from "stream/promises";
|
|
4
|
+
import tar from "tar-stream";
|
|
5
|
+
import zlib from "zlib";
|
|
6
|
+
|
|
7
|
+
export interface TarEntry {
|
|
8
|
+
name: string;
|
|
9
|
+
getContent(): Promise<Buffer>;
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
export class TarBuilder {
|
|
13
|
+
pack: tar.Pack;
|
|
14
|
+
indexData: string[] = [];
|
|
15
|
+
currentOffset = 0;
|
|
16
|
+
tarPromise: Promise<unknown>;
|
|
17
|
+
|
|
18
|
+
constructor(file: string) {
|
|
19
|
+
const pack = tar.pack(); // Create a new tar stream
|
|
20
|
+
this.pack = pack;
|
|
21
|
+
// Open the output file as a write stream
|
|
22
|
+
const outputStream = fs.createWriteStream(file);
|
|
23
|
+
if (file.endsWith('.gz')) {
|
|
24
|
+
this.tarPromise = pipeline(pack, zlib.createGzip(), outputStream);
|
|
25
|
+
} else {
|
|
26
|
+
this.tarPromise = pipeline(pack, outputStream);
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
async add(name: string, content?: Buffer) {
|
|
32
|
+
name = normalizePath(name);
|
|
33
|
+
// Calculate header size, 512 bytes for tar headers
|
|
34
|
+
const headerSize = 512;
|
|
35
|
+
const contentSize = content ? Buffer.byteLength(content) : 0;
|
|
36
|
+
const entryHeaderOffset = this.currentOffset;
|
|
37
|
+
|
|
38
|
+
// Store the index entry
|
|
39
|
+
// entry data offset is always at header offset + 512 bytes
|
|
40
|
+
if (contentSize > 0) { // do not index directories
|
|
41
|
+
this.indexData.push(`${name}:${entryHeaderOffset},${contentSize}`);
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
// Add the file entry to the tar stream
|
|
45
|
+
this.pack.entry({ name, size: contentSize }, content);
|
|
46
|
+
|
|
47
|
+
// Update the offset
|
|
48
|
+
this.currentOffset += headerSize + contentSize;
|
|
49
|
+
// Tar files are padded to 512-byte boundaries
|
|
50
|
+
if (contentSize % 512 !== 0) {
|
|
51
|
+
this.currentOffset += 512 - (contentSize % 512);
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
async build() {
|
|
56
|
+
const pack = this.pack;
|
|
57
|
+
// Convert index data to string and calculate its size
|
|
58
|
+
const indexContent = this.indexData.join('\n') + '\n';
|
|
59
|
+
const indexContentSize = Buffer.byteLength(indexContent);
|
|
60
|
+
|
|
61
|
+
// Add the .index entry to the tar
|
|
62
|
+
pack.entry({ name: '.index', size: indexContentSize }, indexContent);
|
|
63
|
+
|
|
64
|
+
pack.finalize(); // Finalize the tar stream
|
|
65
|
+
|
|
66
|
+
await this.tarPromise;
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
destroy() {
|
|
70
|
+
this.pack.destroy();
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
export async function loadTarIndex(tarFile: string) {
|
|
76
|
+
const fd = await open(tarFile, 'r');
|
|
77
|
+
try {
|
|
78
|
+
return await readTarIndex(fd);
|
|
79
|
+
} catch (err) {
|
|
80
|
+
await fd.close();
|
|
81
|
+
throw err;
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
async function readTarIndex(fd: FileHandle) {
|
|
86
|
+
const stats = await fd.stat();
|
|
87
|
+
const size = stats.size;
|
|
88
|
+
// we want to find the index header.
|
|
89
|
+
// we read the last chunks of 512 until we find the file name followed by a 0 char.
|
|
90
|
+
// the tar file ends with a segment of 1024 bytes of 0 so we need to skip that.
|
|
91
|
+
// we pick a size for the buffer to also include the file size entry from the header. So the buffer should be
|
|
92
|
+
// of 100 + 8 + 8 + 8 + 12 bytes = 124 + 12 bytes = 136 bytes
|
|
93
|
+
// the file size will be located at offset 124 and is 12 bytes long
|
|
94
|
+
// skip 1024 0 bytes then skip another 1024 bytes to find the first possible location of the index header (512 bytes for content and 512 bytes for the header)
|
|
95
|
+
let offset = size - 1024 - 1024;
|
|
96
|
+
const buffer = Buffer.alloc(512);
|
|
97
|
+
while (offset >= 0) {
|
|
98
|
+
await fd.read(buffer, 0, 512, offset);
|
|
99
|
+
// remove the 0 byte padding
|
|
100
|
+
const fileName = buffer.toString('utf-8', 0, 100);
|
|
101
|
+
if (fileName.startsWith('.index\0')) {
|
|
102
|
+
// we found the index header
|
|
103
|
+
const indexSize = getHeaderFileSize(buffer);
|
|
104
|
+
const indexDataOffset = offset + 512;
|
|
105
|
+
const indexDataEnd = indexDataOffset + indexSize;
|
|
106
|
+
if (indexDataEnd > size - 1024) {
|
|
107
|
+
throw new Error('Invalid index data offsets: [' + indexDataOffset + ':' + indexDataEnd + ']');
|
|
108
|
+
}
|
|
109
|
+
const dataBuffer = Buffer.alloc(indexSize);
|
|
110
|
+
await fd.read(dataBuffer, 0, indexSize, indexDataOffset);
|
|
111
|
+
const indexContent = dataBuffer.toString('utf-8');
|
|
112
|
+
return new TarIndex(fd, indexContent);
|
|
113
|
+
}
|
|
114
|
+
offset -= 512;
|
|
115
|
+
}
|
|
116
|
+
return null;
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
export interface TarEntryIndex {
|
|
120
|
+
offset: number,
|
|
121
|
+
size: number
|
|
122
|
+
}
|
|
123
|
+
export class TarIndex {
|
|
124
|
+
entries: Record<string, TarEntryIndex> = {};
|
|
125
|
+
headerBuffer = Buffer.alloc(512);
|
|
126
|
+
/**
|
|
127
|
+
* @param fd the tar file descriptor
|
|
128
|
+
* @param content the index content
|
|
129
|
+
*/
|
|
130
|
+
constructor(public fd: FileHandle, content: string) {
|
|
131
|
+
const lines = content.split('\n');
|
|
132
|
+
for (const line of lines) {
|
|
133
|
+
if (line) {
|
|
134
|
+
const [name, value] = line.split(':');
|
|
135
|
+
const [offsetStr, sizeStr] = value.split(',');
|
|
136
|
+
const offset = parseInt(offsetStr);
|
|
137
|
+
const size = parseInt(sizeStr);
|
|
138
|
+
this.entries[name] = { offset, size };
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
getPaths() {
|
|
144
|
+
return Object.keys(this.entries);
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
getSortedPaths() {
|
|
148
|
+
return Object.keys(this.entries).sort();
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
get(name: string) {
|
|
152
|
+
return this.entries[name];
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
async getContentAt(offset: number, size: number) {
|
|
156
|
+
const buffer = Buffer.alloc(size);
|
|
157
|
+
await this.fd.read(buffer, 0, size, offset + 512);
|
|
158
|
+
return buffer;
|
|
159
|
+
}
|
|
160
|
+
async getContent(name: string) {
|
|
161
|
+
const entry = this.entries[name];
|
|
162
|
+
if (entry) {
|
|
163
|
+
return this.getContentAt(entry.offset, entry.size);
|
|
164
|
+
} else {
|
|
165
|
+
return null;
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
getReadStream(name: string, encoding?: BufferEncoding) {
|
|
170
|
+
const entry = this.entries[name];
|
|
171
|
+
if (entry) {
|
|
172
|
+
const offset = entry.offset + 512;
|
|
173
|
+
return this.fd.createReadStream({
|
|
174
|
+
encoding,
|
|
175
|
+
start: entry.offset,
|
|
176
|
+
end: offset + entry.size
|
|
177
|
+
})
|
|
178
|
+
} else {
|
|
179
|
+
return null;
|
|
180
|
+
}
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
async close() {
|
|
184
|
+
await this.fd.close();
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
|
|
190
|
+
function getHeaderFileSize(buffer: Buffer) {
|
|
191
|
+
const octalSize = buffer.toString('ascii', 124, 136).trim();
|
|
192
|
+
return parseInt(octalSize, 8);
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
export function normalizePath(path: string) {
|
|
196
|
+
if (path.startsWith('/')) {
|
|
197
|
+
path = path.slice(1);
|
|
198
|
+
}
|
|
199
|
+
if (path.endsWith('/')) {
|
|
200
|
+
path = path.slice(-1);
|
|
201
|
+
}
|
|
202
|
+
return path;
|
|
203
|
+
}
|