@ismail-elkorchi/bytefold 0.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +48 -0
- package/SPEC.md +285 -0
- package/dist/abort.d.ts +3 -0
- package/dist/abort.d.ts.map +1 -0
- package/dist/abort.js +33 -0
- package/dist/abort.js.map +1 -0
- package/dist/archive/errors.d.ts +34 -0
- package/dist/archive/errors.d.ts.map +1 -0
- package/dist/archive/errors.js +45 -0
- package/dist/archive/errors.js.map +1 -0
- package/dist/archive/httpArchiveErrors.d.ts +2 -0
- package/dist/archive/httpArchiveErrors.d.ts.map +1 -0
- package/dist/archive/httpArchiveErrors.js +25 -0
- package/dist/archive/httpArchiveErrors.js.map +1 -0
- package/dist/archive/index.d.ts +47 -0
- package/dist/archive/index.d.ts.map +1 -0
- package/dist/archive/index.js +1490 -0
- package/dist/archive/index.js.map +1 -0
- package/dist/archive/types.d.ts +91 -0
- package/dist/archive/types.d.ts.map +1 -0
- package/dist/archive/types.js +2 -0
- package/dist/archive/types.js.map +1 -0
- package/dist/archive/xzPreflight.d.ts +13 -0
- package/dist/archive/xzPreflight.d.ts.map +1 -0
- package/dist/archive/xzPreflight.js +44 -0
- package/dist/archive/xzPreflight.js.map +1 -0
- package/dist/archive/zipPreflight.d.ts +18 -0
- package/dist/archive/zipPreflight.d.ts.map +1 -0
- package/dist/archive/zipPreflight.js +50 -0
- package/dist/archive/zipPreflight.js.map +1 -0
- package/dist/binary.d.ts +12 -0
- package/dist/binary.d.ts.map +1 -0
- package/dist/binary.js +59 -0
- package/dist/binary.js.map +1 -0
- package/dist/bun/index.d.ts +19 -0
- package/dist/bun/index.d.ts.map +1 -0
- package/dist/bun/index.js +427 -0
- package/dist/bun/index.js.map +1 -0
- package/dist/compress/errors.d.ts +30 -0
- package/dist/compress/errors.d.ts.map +1 -0
- package/dist/compress/errors.js +40 -0
- package/dist/compress/errors.js.map +1 -0
- package/dist/compress/index.d.ts +12 -0
- package/dist/compress/index.d.ts.map +1 -0
- package/dist/compress/index.js +339 -0
- package/dist/compress/index.js.map +1 -0
- package/dist/compress/types.d.ts +41 -0
- package/dist/compress/types.d.ts.map +1 -0
- package/dist/compress/types.js +2 -0
- package/dist/compress/types.js.map +1 -0
- package/dist/compression/bzip2.d.ts +9 -0
- package/dist/compression/bzip2.d.ts.map +1 -0
- package/dist/compression/bzip2.js +546 -0
- package/dist/compression/bzip2.js.map +1 -0
- package/dist/compression/codecs.d.ts +6 -0
- package/dist/compression/codecs.d.ts.map +1 -0
- package/dist/compression/codecs.js +82 -0
- package/dist/compression/codecs.js.map +1 -0
- package/dist/compression/deflate64.d.ts +3 -0
- package/dist/compression/deflate64.d.ts.map +1 -0
- package/dist/compression/deflate64.js +549 -0
- package/dist/compression/deflate64.js.map +1 -0
- package/dist/compression/node-backend.d.ts +9 -0
- package/dist/compression/node-backend.d.ts.map +1 -0
- package/dist/compression/node-backend.js +103 -0
- package/dist/compression/node-backend.js.map +1 -0
- package/dist/compression/registry.d.ts +10 -0
- package/dist/compression/registry.d.ts.map +1 -0
- package/dist/compression/registry.js +30 -0
- package/dist/compression/registry.js.map +1 -0
- package/dist/compression/streams.d.ts +31 -0
- package/dist/compression/streams.d.ts.map +1 -0
- package/dist/compression/streams.js +147 -0
- package/dist/compression/streams.js.map +1 -0
- package/dist/compression/types.d.ts +19 -0
- package/dist/compression/types.d.ts.map +1 -0
- package/dist/compression/types.js +2 -0
- package/dist/compression/types.js.map +1 -0
- package/dist/compression/xz.d.ts +21 -0
- package/dist/compression/xz.d.ts.map +1 -0
- package/dist/compression/xz.js +1455 -0
- package/dist/compression/xz.js.map +1 -0
- package/dist/compression/xzFilters.d.ts +14 -0
- package/dist/compression/xzFilters.d.ts.map +1 -0
- package/dist/compression/xzFilters.js +736 -0
- package/dist/compression/xzFilters.js.map +1 -0
- package/dist/compression/xzIndexPreflight.d.ts +20 -0
- package/dist/compression/xzIndexPreflight.d.ts.map +1 -0
- package/dist/compression/xzIndexPreflight.js +371 -0
- package/dist/compression/xzIndexPreflight.js.map +1 -0
- package/dist/compression/xzScan.d.ts +15 -0
- package/dist/compression/xzScan.d.ts.map +1 -0
- package/dist/compression/xzScan.js +310 -0
- package/dist/compression/xzScan.js.map +1 -0
- package/dist/cp437.d.ts +2 -0
- package/dist/cp437.d.ts.map +1 -0
- package/dist/cp437.js +31 -0
- package/dist/cp437.js.map +1 -0
- package/dist/crc32.d.ts +7 -0
- package/dist/crc32.d.ts.map +1 -0
- package/dist/crc32.js +37 -0
- package/dist/crc32.js.map +1 -0
- package/dist/crc64.d.ts +6 -0
- package/dist/crc64.d.ts.map +1 -0
- package/dist/crc64.js +32 -0
- package/dist/crc64.js.map +1 -0
- package/dist/crypto/ctr.d.ts +11 -0
- package/dist/crypto/ctr.d.ts.map +1 -0
- package/dist/crypto/ctr.js +56 -0
- package/dist/crypto/ctr.js.map +1 -0
- package/dist/crypto/sha256.d.ts +16 -0
- package/dist/crypto/sha256.d.ts.map +1 -0
- package/dist/crypto/sha256.js +152 -0
- package/dist/crypto/sha256.js.map +1 -0
- package/dist/crypto/winzip-aes.d.ts +17 -0
- package/dist/crypto/winzip-aes.d.ts.map +1 -0
- package/dist/crypto/winzip-aes.js +98 -0
- package/dist/crypto/winzip-aes.js.map +1 -0
- package/dist/crypto/zipcrypto.d.ts +23 -0
- package/dist/crypto/zipcrypto.d.ts.map +1 -0
- package/dist/crypto/zipcrypto.js +99 -0
- package/dist/crypto/zipcrypto.js.map +1 -0
- package/dist/deno/index.d.ts +19 -0
- package/dist/deno/index.d.ts.map +1 -0
- package/dist/deno/index.js +422 -0
- package/dist/deno/index.js.map +1 -0
- package/dist/dosTime.d.ts +7 -0
- package/dist/dosTime.d.ts.map +1 -0
- package/dist/dosTime.js +21 -0
- package/dist/dosTime.js.map +1 -0
- package/dist/errorContext.d.ts +2 -0
- package/dist/errorContext.d.ts.map +1 -0
- package/dist/errorContext.js +24 -0
- package/dist/errorContext.js.map +1 -0
- package/dist/errors.d.ts +46 -0
- package/dist/errors.d.ts.map +1 -0
- package/dist/errors.js +51 -0
- package/dist/errors.js.map +1 -0
- package/dist/extraFields.d.ts +29 -0
- package/dist/extraFields.d.ts.map +1 -0
- package/dist/extraFields.js +201 -0
- package/dist/extraFields.js.map +1 -0
- package/dist/generated/unicodeCaseFolding.d.ts +4 -0
- package/dist/generated/unicodeCaseFolding.d.ts.map +1 -0
- package/dist/generated/unicodeCaseFolding.js +1594 -0
- package/dist/generated/unicodeCaseFolding.js.map +1 -0
- package/dist/http/errors.d.ts +26 -0
- package/dist/http/errors.d.ts.map +1 -0
- package/dist/http/errors.js +33 -0
- package/dist/http/errors.js.map +1 -0
- package/dist/index.d.ts +10 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +7 -0
- package/dist/index.js.map +1 -0
- package/dist/limits.d.ts +22 -0
- package/dist/limits.d.ts.map +1 -0
- package/dist/limits.js +39 -0
- package/dist/limits.js.map +1 -0
- package/dist/node/index.d.ts +13 -0
- package/dist/node/index.d.ts.map +1 -0
- package/dist/node/index.js +448 -0
- package/dist/node/index.js.map +1 -0
- package/dist/node/zip/RandomAccess.d.ts +12 -0
- package/dist/node/zip/RandomAccess.d.ts.map +1 -0
- package/dist/node/zip/RandomAccess.js +38 -0
- package/dist/node/zip/RandomAccess.js.map +1 -0
- package/dist/node/zip/Sink.d.ts +17 -0
- package/dist/node/zip/Sink.d.ts.map +1 -0
- package/dist/node/zip/Sink.js +45 -0
- package/dist/node/zip/Sink.js.map +1 -0
- package/dist/node/zip/ZipReader.d.ts +51 -0
- package/dist/node/zip/ZipReader.d.ts.map +1 -0
- package/dist/node/zip/ZipReader.js +1540 -0
- package/dist/node/zip/ZipReader.js.map +1 -0
- package/dist/node/zip/ZipWriter.d.ts +21 -0
- package/dist/node/zip/ZipWriter.d.ts.map +1 -0
- package/dist/node/zip/ZipWriter.js +196 -0
- package/dist/node/zip/ZipWriter.js.map +1 -0
- package/dist/node/zip/entryStream.d.ts +22 -0
- package/dist/node/zip/entryStream.d.ts.map +1 -0
- package/dist/node/zip/entryStream.js +241 -0
- package/dist/node/zip/entryStream.js.map +1 -0
- package/dist/node/zip/entryWriter.d.ts +54 -0
- package/dist/node/zip/entryWriter.d.ts.map +1 -0
- package/dist/node/zip/entryWriter.js +512 -0
- package/dist/node/zip/entryWriter.js.map +1 -0
- package/dist/node/zip/index.d.ts +8 -0
- package/dist/node/zip/index.d.ts.map +1 -0
- package/dist/node/zip/index.js +5 -0
- package/dist/node/zip/index.js.map +1 -0
- package/dist/reader/RandomAccess.d.ts +55 -0
- package/dist/reader/RandomAccess.d.ts.map +1 -0
- package/dist/reader/RandomAccess.js +528 -0
- package/dist/reader/RandomAccess.js.map +1 -0
- package/dist/reader/ZipReader.d.ts +89 -0
- package/dist/reader/ZipReader.d.ts.map +1 -0
- package/dist/reader/ZipReader.js +1359 -0
- package/dist/reader/ZipReader.js.map +1 -0
- package/dist/reader/centralDirectory.d.ts +40 -0
- package/dist/reader/centralDirectory.d.ts.map +1 -0
- package/dist/reader/centralDirectory.js +311 -0
- package/dist/reader/centralDirectory.js.map +1 -0
- package/dist/reader/entryStream.d.ts +22 -0
- package/dist/reader/entryStream.d.ts.map +1 -0
- package/dist/reader/entryStream.js +122 -0
- package/dist/reader/entryStream.js.map +1 -0
- package/dist/reader/eocd.d.ts +22 -0
- package/dist/reader/eocd.d.ts.map +1 -0
- package/dist/reader/eocd.js +184 -0
- package/dist/reader/eocd.js.map +1 -0
- package/dist/reader/httpZipErrors.d.ts +4 -0
- package/dist/reader/httpZipErrors.d.ts.map +1 -0
- package/dist/reader/httpZipErrors.js +48 -0
- package/dist/reader/httpZipErrors.js.map +1 -0
- package/dist/reader/localHeader.d.ts +15 -0
- package/dist/reader/localHeader.d.ts.map +1 -0
- package/dist/reader/localHeader.js +37 -0
- package/dist/reader/localHeader.js.map +1 -0
- package/dist/reportSchema.d.ts +3 -0
- package/dist/reportSchema.d.ts.map +1 -0
- package/dist/reportSchema.js +3 -0
- package/dist/reportSchema.js.map +1 -0
- package/dist/streams/adapters.d.ts +10 -0
- package/dist/streams/adapters.d.ts.map +1 -0
- package/dist/streams/adapters.js +54 -0
- package/dist/streams/adapters.js.map +1 -0
- package/dist/streams/buffer.d.ts +5 -0
- package/dist/streams/buffer.d.ts.map +1 -0
- package/dist/streams/buffer.js +44 -0
- package/dist/streams/buffer.js.map +1 -0
- package/dist/streams/crcTransform.d.ts +15 -0
- package/dist/streams/crcTransform.d.ts.map +1 -0
- package/dist/streams/crcTransform.js +30 -0
- package/dist/streams/crcTransform.js.map +1 -0
- package/dist/streams/emit.d.ts +7 -0
- package/dist/streams/emit.d.ts.map +1 -0
- package/dist/streams/emit.js +13 -0
- package/dist/streams/emit.js.map +1 -0
- package/dist/streams/limits.d.ts +16 -0
- package/dist/streams/limits.d.ts.map +1 -0
- package/dist/streams/limits.js +39 -0
- package/dist/streams/limits.js.map +1 -0
- package/dist/streams/measure.d.ts +5 -0
- package/dist/streams/measure.d.ts.map +1 -0
- package/dist/streams/measure.js +9 -0
- package/dist/streams/measure.js.map +1 -0
- package/dist/streams/progress.d.ts +8 -0
- package/dist/streams/progress.d.ts.map +1 -0
- package/dist/streams/progress.js +69 -0
- package/dist/streams/progress.js.map +1 -0
- package/dist/streams/web.d.ts +5 -0
- package/dist/streams/web.d.ts.map +1 -0
- package/dist/streams/web.js +33 -0
- package/dist/streams/web.js.map +1 -0
- package/dist/tar/TarReader.d.ts +41 -0
- package/dist/tar/TarReader.d.ts.map +1 -0
- package/dist/tar/TarReader.js +930 -0
- package/dist/tar/TarReader.js.map +1 -0
- package/dist/tar/TarWriter.d.ts +25 -0
- package/dist/tar/TarWriter.d.ts.map +1 -0
- package/dist/tar/TarWriter.js +307 -0
- package/dist/tar/TarWriter.js.map +1 -0
- package/dist/tar/index.d.ts +4 -0
- package/dist/tar/index.d.ts.map +1 -0
- package/dist/tar/index.js +3 -0
- package/dist/tar/index.js.map +1 -0
- package/dist/tar/types.d.ts +67 -0
- package/dist/tar/types.d.ts.map +1 -0
- package/dist/tar/types.js +2 -0
- package/dist/tar/types.js.map +1 -0
- package/dist/text/caseFold.d.ts +7 -0
- package/dist/text/caseFold.d.ts.map +1 -0
- package/dist/text/caseFold.js +45 -0
- package/dist/text/caseFold.js.map +1 -0
- package/dist/types.d.ts +190 -0
- package/dist/types.d.ts.map +1 -0
- package/dist/types.js +2 -0
- package/dist/types.js.map +1 -0
- package/dist/web/index.d.ts +11 -0
- package/dist/web/index.d.ts.map +1 -0
- package/dist/web/index.js +95 -0
- package/dist/web/index.js.map +1 -0
- package/dist/writer/Sink.d.ts +21 -0
- package/dist/writer/Sink.d.ts.map +1 -0
- package/dist/writer/Sink.js +24 -0
- package/dist/writer/Sink.js.map +1 -0
- package/dist/writer/ZipWriter.d.ts +27 -0
- package/dist/writer/ZipWriter.d.ts.map +1 -0
- package/dist/writer/ZipWriter.js +153 -0
- package/dist/writer/ZipWriter.js.map +1 -0
- package/dist/writer/centralDirectoryWriter.d.ts +8 -0
- package/dist/writer/centralDirectoryWriter.d.ts.map +1 -0
- package/dist/writer/centralDirectoryWriter.js +77 -0
- package/dist/writer/centralDirectoryWriter.js.map +1 -0
- package/dist/writer/entryWriter.d.ts +54 -0
- package/dist/writer/entryWriter.d.ts.map +1 -0
- package/dist/writer/entryWriter.js +327 -0
- package/dist/writer/entryWriter.js.map +1 -0
- package/dist/writer/finalize.d.ts +10 -0
- package/dist/writer/finalize.d.ts.map +1 -0
- package/dist/writer/finalize.js +56 -0
- package/dist/writer/finalize.js.map +1 -0
- package/dist/zip/index.d.ts +8 -0
- package/dist/zip/index.d.ts.map +1 -0
- package/dist/zip/index.js +5 -0
- package/dist/zip/index.js.map +1 -0
- package/jsr.json +41 -0
- package/package.json +117 -0
- package/schemas/audit-report.schema.json +38 -0
- package/schemas/capabilities-report.schema.json +25 -0
- package/schemas/detection-report.schema.json +23 -0
- package/schemas/error.schema.json +22 -0
- package/schemas/normalize-report.schema.json +47 -0
|
@@ -0,0 +1,1490 @@
|
|
|
1
|
+
import { ArchiveError } from './errors.js';
|
|
2
|
+
import { throwIfAborted } from '../abort.js';
|
|
3
|
+
import { readAllBytes } from '../streams/buffer.js';
|
|
4
|
+
import { readableFromAsyncIterable, readableFromBytes } from '../streams/web.js';
|
|
5
|
+
import { createCompressTransform } from '../compression/streams.js';
|
|
6
|
+
import { readBzip2BlockSize } from '../compression/bzip2.js';
|
|
7
|
+
import { scanXzResourceRequirements } from '../compression/xzScan.js';
|
|
8
|
+
import { createDecompressor, getCompressionCapabilities } from '../compress/index.js';
|
|
9
|
+
import { CompressionError } from '../compress/errors.js';
|
|
10
|
+
import { crc32 } from '../crc32.js';
|
|
11
|
+
import { BlobRandomAccess } from '../reader/RandomAccess.js';
|
|
12
|
+
import { ZipReader } from '../reader/ZipReader.js';
|
|
13
|
+
import { ZipWriter } from '../writer/ZipWriter.js';
|
|
14
|
+
import { BYTEFOLD_REPORT_SCHEMA_VERSION } from '../reportSchema.js';
|
|
15
|
+
import { TarReader } from '../tar/TarReader.js';
|
|
16
|
+
import { TarWriter } from '../tar/TarWriter.js';
|
|
17
|
+
import { DEFAULT_RESOURCE_LIMITS } from '../limits.js';
|
|
18
|
+
import { resolveXzDictionaryLimit, resolveXzIndexLimits } from './xzPreflight.js';
|
|
19
|
+
import { isZipSignature, preflightZip, resolveZipPreflightLimits, shouldPreflightZip } from './zipPreflight.js';
|
|
20
|
+
import { decodeNullTerminatedUtf8 } from '../binary.js';
|
|
21
|
+
/** Open an archive with auto-detection (or a forced format). */
|
|
22
|
+
export async function openArchive(input, options) {
|
|
23
|
+
const internal = options;
|
|
24
|
+
if (internal?.__zipReader) {
|
|
25
|
+
const detection = internal.__zipDetection;
|
|
26
|
+
const inputKind = detection?.inputKind ?? resolveInputKind(input, options?.inputKind);
|
|
27
|
+
const notes = detection?.notes ?? ['Format inferred from magic bytes'];
|
|
28
|
+
const confidence = detection?.confidence ?? 'high';
|
|
29
|
+
const openOptions = {
|
|
30
|
+
...(options?.isStrict !== undefined ? { isStrict: options.isStrict } : {}),
|
|
31
|
+
...(options?.password !== undefined ? { password: options.password } : {}),
|
|
32
|
+
...(options?.signal ? { signal: options.signal } : {})
|
|
33
|
+
};
|
|
34
|
+
const reader = new ZipArchiveReader(internal.__zipReader, Object.keys(openOptions).length > 0 ? openOptions : undefined);
|
|
35
|
+
reader.detection = buildDetectionReport(inputKind, 'zip', confidence, notes);
|
|
36
|
+
return reader;
|
|
37
|
+
}
|
|
38
|
+
const formatOption = options?.format ?? 'auto';
|
|
39
|
+
if (isBlobInput(input)) {
|
|
40
|
+
const openedZip = await maybeOpenZipFromBlob(input, formatOption, options);
|
|
41
|
+
if (openedZip)
|
|
42
|
+
return openedZip;
|
|
43
|
+
}
|
|
44
|
+
const inputKind = resolveInputKind(input, options?.inputKind);
|
|
45
|
+
const data = await resolveInput(input, options);
|
|
46
|
+
const notes = [];
|
|
47
|
+
let confidence = 'high';
|
|
48
|
+
let format;
|
|
49
|
+
if (formatOption !== 'auto') {
|
|
50
|
+
format = formatOption;
|
|
51
|
+
notes.push('Format forced by options.format');
|
|
52
|
+
}
|
|
53
|
+
else {
|
|
54
|
+
const hinted = formatFromFilename(options?.filename);
|
|
55
|
+
if (hinted) {
|
|
56
|
+
format = hinted;
|
|
57
|
+
confidence = 'medium';
|
|
58
|
+
notes.push('Format inferred from filename');
|
|
59
|
+
}
|
|
60
|
+
else {
|
|
61
|
+
format = detectFormat(data);
|
|
62
|
+
if (!format) {
|
|
63
|
+
throw new ArchiveError('ARCHIVE_UNSUPPORTED_FORMAT', 'Unable to detect archive format');
|
|
64
|
+
}
|
|
65
|
+
notes.push('Format inferred from magic bytes');
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
const result = await openWithFormat(format, data, options);
|
|
69
|
+
const report = buildDetectionReport(inputKind, result.format, confidence, [...notes, ...result.notes]);
|
|
70
|
+
const reader = result.reader;
|
|
71
|
+
reader.detection = report;
|
|
72
|
+
return reader;
|
|
73
|
+
}
|
|
74
|
+
async function maybeOpenZipFromBlob(input, formatOption, options) {
|
|
75
|
+
const filename = options?.filename;
|
|
76
|
+
if (shouldPreflightZip(formatOption, filename)) {
|
|
77
|
+
const detection = buildZipDetection('blob', formatOption === 'zip' ? 'forced' : 'filename');
|
|
78
|
+
return openZipFromRandomAccess(new BlobRandomAccess(input), detection, options, filename);
|
|
79
|
+
}
|
|
80
|
+
if (formatOption !== 'auto')
|
|
81
|
+
return undefined;
|
|
82
|
+
const reader = new BlobRandomAccess(input);
|
|
83
|
+
try {
|
|
84
|
+
const signature = await reader.read(0n, 4, options?.signal);
|
|
85
|
+
if (!isZipSignature(signature)) {
|
|
86
|
+
await reader.close();
|
|
87
|
+
return undefined;
|
|
88
|
+
}
|
|
89
|
+
const detection = buildZipDetection('blob', 'magic');
|
|
90
|
+
return openZipFromRandomAccess(reader, detection, options, filename);
|
|
91
|
+
}
|
|
92
|
+
catch (err) {
|
|
93
|
+
await reader.close().catch(() => { });
|
|
94
|
+
throw err;
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
function buildZipDetection(inputKind, source) {
|
|
98
|
+
if (source === 'forced') {
|
|
99
|
+
return { inputKind, confidence: 'high', notes: ['Format forced by options.format'] };
|
|
100
|
+
}
|
|
101
|
+
if (source === 'filename') {
|
|
102
|
+
return { inputKind, confidence: 'medium', notes: ['Format inferred from filename'] };
|
|
103
|
+
}
|
|
104
|
+
return { inputKind, confidence: 'high', notes: ['Format inferred from magic bytes'] };
|
|
105
|
+
}
|
|
106
|
+
function resolveZipStrict(options) {
|
|
107
|
+
const profile = options?.profile ?? 'strict';
|
|
108
|
+
const strictDefault = profile === 'compat' ? false : true;
|
|
109
|
+
return options?.isStrict ?? strictDefault;
|
|
110
|
+
}
|
|
111
|
+
function buildZipReaderOptions(options) {
|
|
112
|
+
const zipOptions = { ...(options?.zip ?? {}) };
|
|
113
|
+
const profile = options?.profile;
|
|
114
|
+
if (profile !== undefined)
|
|
115
|
+
zipOptions.profile = profile;
|
|
116
|
+
if (options?.isStrict !== undefined)
|
|
117
|
+
zipOptions.isStrict = options.isStrict;
|
|
118
|
+
if (options?.limits !== undefined)
|
|
119
|
+
zipOptions.limits = options.limits;
|
|
120
|
+
if (options?.password !== undefined)
|
|
121
|
+
zipOptions.password = options.password;
|
|
122
|
+
return zipOptions;
|
|
123
|
+
}
|
|
124
|
+
async function openZipFromRandomAccess(reader, detection, options, filename) {
|
|
125
|
+
try {
|
|
126
|
+
const limits = resolveZipPreflightLimits(options?.limits, options?.profile);
|
|
127
|
+
await preflightZip(reader, {
|
|
128
|
+
strict: resolveZipStrict(options),
|
|
129
|
+
limits,
|
|
130
|
+
...(options?.signal ? { signal: options.signal } : {})
|
|
131
|
+
});
|
|
132
|
+
const zipOptions = buildZipReaderOptions(options);
|
|
133
|
+
const zipReader = await ZipReader.fromRandomAccess(reader, zipOptions);
|
|
134
|
+
return openArchive(new Uint8Array(0), {
|
|
135
|
+
...options,
|
|
136
|
+
__zipReader: zipReader,
|
|
137
|
+
__zipDetection: detection,
|
|
138
|
+
...(options?.inputKind ? {} : { inputKind: detection.inputKind }),
|
|
139
|
+
...(options?.filename ? {} : filename ? { filename } : {})
|
|
140
|
+
});
|
|
141
|
+
}
|
|
142
|
+
catch (err) {
|
|
143
|
+
await reader.close().catch(() => { });
|
|
144
|
+
throw err;
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
/** Create an archive writer for a specific format. */
|
|
148
|
+
export function createArchiveWriter(format, writable, options) {
|
|
149
|
+
if (format === 'zip') {
|
|
150
|
+
const writer = ZipWriter.toWritable(writable, options?.zip);
|
|
151
|
+
return {
|
|
152
|
+
format: 'zip',
|
|
153
|
+
add: (name, source, addOptions) => writer.add(name, source, addOptions),
|
|
154
|
+
close: () => writer.close()
|
|
155
|
+
};
|
|
156
|
+
}
|
|
157
|
+
if (format === 'tar') {
|
|
158
|
+
const writer = TarWriter.toWritable(writable, options?.tar);
|
|
159
|
+
return {
|
|
160
|
+
format: 'tar',
|
|
161
|
+
add: (name, source, addOptions) => writer.add(name, source, addOptions),
|
|
162
|
+
close: () => writer.close()
|
|
163
|
+
};
|
|
164
|
+
}
|
|
165
|
+
if (format === 'tgz' || format === 'tar.gz') {
|
|
166
|
+
ensureCompressionWriteSupported('gzip');
|
|
167
|
+
return createCompressedTarWriter(format, 'gzip', writable, options);
|
|
168
|
+
}
|
|
169
|
+
if (format === 'tar.zst') {
|
|
170
|
+
ensureCompressionWriteSupported('zstd');
|
|
171
|
+
return createCompressedTarWriter('tar.zst', 'zstd', writable, options);
|
|
172
|
+
}
|
|
173
|
+
if (format === 'tar.br') {
|
|
174
|
+
ensureCompressionWriteSupported('brotli');
|
|
175
|
+
return createCompressedTarWriter('tar.br', 'brotli', writable, options);
|
|
176
|
+
}
|
|
177
|
+
if (format === 'tar.bz2' || format === 'bz2') {
|
|
178
|
+
throw new ArchiveError('ARCHIVE_UNSUPPORTED_FORMAT', 'BZip2 compression is not supported for writing');
|
|
179
|
+
}
|
|
180
|
+
if (format === 'tar.xz' || format === 'xz') {
|
|
181
|
+
throw new ArchiveError('ARCHIVE_UNSUPPORTED_FORMAT', 'XZ compression is not supported for writing');
|
|
182
|
+
}
|
|
183
|
+
if (format === 'gz') {
|
|
184
|
+
ensureCompressionWriteSupported('gzip');
|
|
185
|
+
return createCompressedStreamWriter('gz', 'gzip', writable, options);
|
|
186
|
+
}
|
|
187
|
+
if (format === 'zst') {
|
|
188
|
+
ensureCompressionWriteSupported('zstd');
|
|
189
|
+
return createCompressedStreamWriter('zst', 'zstd', writable, options);
|
|
190
|
+
}
|
|
191
|
+
if (format === 'br') {
|
|
192
|
+
ensureCompressionWriteSupported('brotli');
|
|
193
|
+
return createCompressedStreamWriter('br', 'brotli', writable, options);
|
|
194
|
+
}
|
|
195
|
+
throw new ArchiveError('ARCHIVE_UNSUPPORTED_FORMAT', `Unsupported writer format: ${format}`);
|
|
196
|
+
}
|
|
197
|
+
function ensureCompressionWriteSupported(algorithm) {
|
|
198
|
+
const caps = getCompressionCapabilities();
|
|
199
|
+
const support = caps.algorithms[algorithm];
|
|
200
|
+
if (!support || !support.compress) {
|
|
201
|
+
throw new CompressionError('COMPRESSION_UNSUPPORTED_ALGORITHM', `Compression algorithm ${algorithm} is not supported in this runtime`, { algorithm });
|
|
202
|
+
}
|
|
203
|
+
}
|
|
204
|
+
function createCompressedTarWriter(format, algorithm, writable, options) {
|
|
205
|
+
let initPromise = null;
|
|
206
|
+
const init = async () => {
|
|
207
|
+
if (!initPromise) {
|
|
208
|
+
initPromise = (async () => {
|
|
209
|
+
const transform = await createCompressTransform({
|
|
210
|
+
algorithm,
|
|
211
|
+
...(options?.tar?.signal ? { signal: options.tar.signal } : {}),
|
|
212
|
+
...(options?.compression?.level !== undefined ? { level: options.compression.level } : {}),
|
|
213
|
+
...(options?.compression?.quality !== undefined ? { quality: options.compression.quality } : {})
|
|
214
|
+
});
|
|
215
|
+
const pipe = transform.readable.pipeTo(writable, {
|
|
216
|
+
...(options?.tar?.signal ? { signal: options.tar.signal } : {})
|
|
217
|
+
});
|
|
218
|
+
const writer = TarWriter.toWritable(transform.writable, options?.tar);
|
|
219
|
+
return { writer, pipe };
|
|
220
|
+
})();
|
|
221
|
+
}
|
|
222
|
+
return initPromise;
|
|
223
|
+
};
|
|
224
|
+
return {
|
|
225
|
+
format,
|
|
226
|
+
add: async (name, source, addOptions) => {
|
|
227
|
+
const { writer } = await init();
|
|
228
|
+
await writer.add(name, source, addOptions);
|
|
229
|
+
},
|
|
230
|
+
close: async () => {
|
|
231
|
+
const { writer, pipe } = await init();
|
|
232
|
+
await writer.close();
|
|
233
|
+
await pipe;
|
|
234
|
+
}
|
|
235
|
+
};
|
|
236
|
+
}
|
|
237
|
+
function createCompressedStreamWriter(format, algorithm, writable, options) {
|
|
238
|
+
let started = false;
|
|
239
|
+
let done = null;
|
|
240
|
+
const run = async (source) => {
|
|
241
|
+
const input = sourceToReadable(source);
|
|
242
|
+
const transform = await createCompressTransform({
|
|
243
|
+
algorithm,
|
|
244
|
+
...(options?.compression?.level !== undefined ? { level: options.compression.level } : {}),
|
|
245
|
+
...(options?.compression?.quality !== undefined ? { quality: options.compression.quality } : {})
|
|
246
|
+
});
|
|
247
|
+
const outputPipe = transform.readable.pipeTo(writable);
|
|
248
|
+
const inputPipe = input.pipeTo(transform.writable);
|
|
249
|
+
await Promise.all([inputPipe, outputPipe]);
|
|
250
|
+
};
|
|
251
|
+
return {
|
|
252
|
+
format,
|
|
253
|
+
add: async (_name, source) => {
|
|
254
|
+
if (started) {
|
|
255
|
+
throw new ArchiveError('ARCHIVE_UNSUPPORTED_FEATURE', 'Compressed stream writers accept a single entry');
|
|
256
|
+
}
|
|
257
|
+
started = true;
|
|
258
|
+
done = run(source);
|
|
259
|
+
await done;
|
|
260
|
+
},
|
|
261
|
+
close: async () => {
|
|
262
|
+
if (!done) {
|
|
263
|
+
done = run(new Uint8Array(0));
|
|
264
|
+
}
|
|
265
|
+
await done;
|
|
266
|
+
}
|
|
267
|
+
};
|
|
268
|
+
}
|
|
269
|
+
function sourceToReadable(source) {
|
|
270
|
+
if (!source)
|
|
271
|
+
return readableFromBytes(new Uint8Array(0));
|
|
272
|
+
if (source instanceof Uint8Array)
|
|
273
|
+
return readableFromBytes(source);
|
|
274
|
+
if (source instanceof ArrayBuffer)
|
|
275
|
+
return readableFromBytes(new Uint8Array(source));
|
|
276
|
+
if (isReadableStream(source))
|
|
277
|
+
return source;
|
|
278
|
+
return readableFromAsyncIterable(source);
|
|
279
|
+
}
|
|
280
|
+
async function openWithFormat(format, data, options) {
|
|
281
|
+
const internalOptions = options;
|
|
282
|
+
const notes = [];
|
|
283
|
+
if (format === 'zip') {
|
|
284
|
+
const zipOptions = { ...(options?.zip ?? {}) };
|
|
285
|
+
const profile = options?.profile;
|
|
286
|
+
if (profile !== undefined)
|
|
287
|
+
zipOptions.profile = profile;
|
|
288
|
+
if (options?.isStrict !== undefined)
|
|
289
|
+
zipOptions.isStrict = options.isStrict;
|
|
290
|
+
if (options?.limits !== undefined)
|
|
291
|
+
zipOptions.limits = options.limits;
|
|
292
|
+
if (options?.password !== undefined)
|
|
293
|
+
zipOptions.password = options.password;
|
|
294
|
+
const reader = await ZipReader.fromUint8Array(data, zipOptions);
|
|
295
|
+
const openOptions = {
|
|
296
|
+
...(options?.isStrict !== undefined ? { isStrict: options.isStrict } : {}),
|
|
297
|
+
...(options?.password !== undefined ? { password: options.password } : {}),
|
|
298
|
+
...(options?.signal ? { signal: options.signal } : {})
|
|
299
|
+
};
|
|
300
|
+
return {
|
|
301
|
+
reader: new ZipArchiveReader(reader, Object.keys(openOptions).length > 0 ? openOptions : undefined),
|
|
302
|
+
format: 'zip',
|
|
303
|
+
notes
|
|
304
|
+
};
|
|
305
|
+
}
|
|
306
|
+
if (format === 'tar') {
|
|
307
|
+
const tarOptions = { ...(options?.tar ?? {}) };
|
|
308
|
+
if (options?.profile !== undefined)
|
|
309
|
+
tarOptions.profile = options.profile;
|
|
310
|
+
if (options?.isStrict !== undefined)
|
|
311
|
+
tarOptions.isStrict = options.isStrict;
|
|
312
|
+
if (options?.limits !== undefined)
|
|
313
|
+
tarOptions.limits = options.limits;
|
|
314
|
+
const reader = await TarReader.fromUint8Array(data, tarOptions);
|
|
315
|
+
const auditDefaults = {
|
|
316
|
+
...(options?.profile !== undefined ? { profile: options.profile } : {}),
|
|
317
|
+
...(options?.isStrict !== undefined ? { isStrict: options.isStrict } : {}),
|
|
318
|
+
...(options?.limits !== undefined ? { limits: options.limits } : {})
|
|
319
|
+
};
|
|
320
|
+
return {
|
|
321
|
+
reader: new TarArchiveReader(reader, Object.keys(auditDefaults).length > 0 ? auditDefaults : undefined, 'tar'),
|
|
322
|
+
format: 'tar',
|
|
323
|
+
notes
|
|
324
|
+
};
|
|
325
|
+
}
|
|
326
|
+
if (format === 'gz' || format === 'tgz' || format === 'tar.gz') {
|
|
327
|
+
const header = parseGzipHeader(data);
|
|
328
|
+
const decompressed = await gunzipToBytes(data, options);
|
|
329
|
+
if (format !== 'gz' || detectFormat(decompressed) === 'tar') {
|
|
330
|
+
if (format === 'gz') {
|
|
331
|
+
notes.push('TAR layer detected inside gzip payload');
|
|
332
|
+
}
|
|
333
|
+
const tarOptions = {
|
|
334
|
+
...(options?.profile !== undefined ? { profile: options.profile } : {}),
|
|
335
|
+
...(options?.isStrict !== undefined ? { isStrict: options.isStrict } : {}),
|
|
336
|
+
...(options?.limits !== undefined ? { limits: options.limits } : {})
|
|
337
|
+
};
|
|
338
|
+
const tarReader = await TarReader.fromUint8Array(decompressed, tarOptions);
|
|
339
|
+
const auditDefaults = { ...tarOptions };
|
|
340
|
+
return {
|
|
341
|
+
reader: new TarArchiveReader(tarReader, Object.keys(auditDefaults).length > 0 ? auditDefaults : undefined, 'tgz'),
|
|
342
|
+
format: 'tgz',
|
|
343
|
+
notes
|
|
344
|
+
};
|
|
345
|
+
}
|
|
346
|
+
const name = inferGzipEntryName(header, options?.filename);
|
|
347
|
+
return { reader: new GzipArchiveReader(decompressed, header, name), format: 'gz', notes };
|
|
348
|
+
}
|
|
349
|
+
if (format === 'zst' || format === 'tar.zst') {
|
|
350
|
+
const decompressed = await decompressToBytes(data, 'zstd', options);
|
|
351
|
+
if (format === 'tar.zst' || detectFormat(decompressed) === 'tar') {
|
|
352
|
+
if (format !== 'tar.zst') {
|
|
353
|
+
notes.push('TAR layer detected inside zstd payload');
|
|
354
|
+
}
|
|
355
|
+
const tarOptions = {
|
|
356
|
+
...(options?.profile !== undefined ? { profile: options.profile } : {}),
|
|
357
|
+
...(options?.isStrict !== undefined ? { isStrict: options.isStrict } : {}),
|
|
358
|
+
...(options?.limits !== undefined ? { limits: options.limits } : {})
|
|
359
|
+
};
|
|
360
|
+
const tarReader = await TarReader.fromUint8Array(decompressed, tarOptions);
|
|
361
|
+
const auditDefaults = { ...tarOptions };
|
|
362
|
+
return {
|
|
363
|
+
reader: new TarArchiveReader(tarReader, Object.keys(auditDefaults).length > 0 ? auditDefaults : undefined, 'tar.zst'),
|
|
364
|
+
format: 'tar.zst',
|
|
365
|
+
notes
|
|
366
|
+
};
|
|
367
|
+
}
|
|
368
|
+
const name = inferZstdEntryName(options?.filename);
|
|
369
|
+
return { reader: new CompressedArchiveReader(decompressed, 'zstd', name), format: 'zst', notes };
|
|
370
|
+
}
|
|
371
|
+
if (format === 'br' || format === 'tar.br') {
|
|
372
|
+
const decompressed = await decompressToBytes(data, 'brotli', options);
|
|
373
|
+
if (format === 'tar.br' || detectFormat(decompressed) === 'tar') {
|
|
374
|
+
if (format !== 'tar.br') {
|
|
375
|
+
notes.push('TAR layer detected inside brotli payload');
|
|
376
|
+
}
|
|
377
|
+
const tarOptions = {
|
|
378
|
+
...(options?.profile !== undefined ? { profile: options.profile } : {}),
|
|
379
|
+
...(options?.isStrict !== undefined ? { isStrict: options.isStrict } : {}),
|
|
380
|
+
...(options?.limits !== undefined ? { limits: options.limits } : {})
|
|
381
|
+
};
|
|
382
|
+
const tarReader = await TarReader.fromUint8Array(decompressed, tarOptions);
|
|
383
|
+
const auditDefaults = { ...tarOptions };
|
|
384
|
+
return {
|
|
385
|
+
reader: new TarArchiveReader(tarReader, Object.keys(auditDefaults).length > 0 ? auditDefaults : undefined, 'tar.br'),
|
|
386
|
+
format: 'tar.br',
|
|
387
|
+
notes
|
|
388
|
+
};
|
|
389
|
+
}
|
|
390
|
+
const name = inferBrotliEntryName(options?.filename);
|
|
391
|
+
return { reader: new CompressedArchiveReader(decompressed, 'brotli', name), format: 'br', notes };
|
|
392
|
+
}
|
|
393
|
+
if (format === 'bz2' || format === 'tar.bz2') {
|
|
394
|
+
const preflight = (() => {
|
|
395
|
+
const size = readBzip2BlockSize(data);
|
|
396
|
+
return size !== undefined
|
|
397
|
+
? {
|
|
398
|
+
algorithm: 'bzip2',
|
|
399
|
+
requiredBlockSize: size,
|
|
400
|
+
preflightComplete: false
|
|
401
|
+
}
|
|
402
|
+
: undefined;
|
|
403
|
+
})();
|
|
404
|
+
enforceResourceLimits(preflight, options?.limits, options?.profile);
|
|
405
|
+
const decompressed = await decompressToBytes(data, 'bzip2', options);
|
|
406
|
+
if (format === 'tar.bz2' || detectFormat(decompressed) === 'tar') {
|
|
407
|
+
if (format !== 'tar.bz2') {
|
|
408
|
+
notes.push('TAR layer detected inside bzip2 payload');
|
|
409
|
+
}
|
|
410
|
+
const tarOptions = {
|
|
411
|
+
...(options?.profile !== undefined ? { profile: options.profile } : {}),
|
|
412
|
+
...(options?.isStrict !== undefined ? { isStrict: options.isStrict } : {}),
|
|
413
|
+
...(options?.limits !== undefined ? { limits: options.limits } : {})
|
|
414
|
+
};
|
|
415
|
+
const tarReader = await TarReader.fromUint8Array(decompressed, tarOptions);
|
|
416
|
+
const auditDefaults = { ...tarOptions };
|
|
417
|
+
return {
|
|
418
|
+
reader: new TarArchiveReader(tarReader, Object.keys(auditDefaults).length > 0 ? auditDefaults : undefined, 'tar.bz2', preflight),
|
|
419
|
+
format: 'tar.bz2',
|
|
420
|
+
notes
|
|
421
|
+
};
|
|
422
|
+
}
|
|
423
|
+
const name = inferBzip2EntryName(options?.filename);
|
|
424
|
+
return { reader: new CompressedArchiveReader(decompressed, 'bzip2', name, preflight), format: 'bz2', notes };
|
|
425
|
+
}
|
|
426
|
+
if (format === 'xz' || format === 'tar.xz') {
|
|
427
|
+
const profile = options?.profile ?? 'strict';
|
|
428
|
+
const checkType = readXzCheckType(data);
|
|
429
|
+
if (checkType !== undefined && !isSupportedXzCheck(checkType) && profile === 'compat') {
|
|
430
|
+
notes.push(`XZ check type ${formatXzCheck(checkType)} is not verified in compat profile`);
|
|
431
|
+
}
|
|
432
|
+
const seekablePreflight = internalOptions?.__preflight;
|
|
433
|
+
const preflight = (() => {
|
|
434
|
+
const indexLimits = resolveXzIndexLimits(options?.limits, profile);
|
|
435
|
+
const scan = scanXzResourceRequirements(data, {
|
|
436
|
+
...(options?.signal ? { signal: options.signal } : {}),
|
|
437
|
+
maxIndexBytes: indexLimits.maxIndexBytes,
|
|
438
|
+
maxIndexRecords: indexLimits.maxIndexRecords
|
|
439
|
+
});
|
|
440
|
+
if (scan) {
|
|
441
|
+
const merged = {
|
|
442
|
+
algorithm: 'xz',
|
|
443
|
+
requiredDictionaryBytes: scan.maxDictionaryBytes,
|
|
444
|
+
requiredIndexRecords: scan.requiredIndexRecords,
|
|
445
|
+
requiredIndexBytes: scan.requiredIndexBytes
|
|
446
|
+
};
|
|
447
|
+
if (seekablePreflight?.preflightComplete === false) {
|
|
448
|
+
merged.preflightComplete = false;
|
|
449
|
+
if (seekablePreflight.preflightBlockHeaders !== undefined) {
|
|
450
|
+
merged.preflightBlockHeaders = seekablePreflight.preflightBlockHeaders;
|
|
451
|
+
}
|
|
452
|
+
if (seekablePreflight.preflightBlockLimit !== undefined) {
|
|
453
|
+
merged.preflightBlockLimit = seekablePreflight.preflightBlockLimit;
|
|
454
|
+
}
|
|
455
|
+
}
|
|
456
|
+
return merged;
|
|
457
|
+
}
|
|
458
|
+
return seekablePreflight;
|
|
459
|
+
})();
|
|
460
|
+
enforceResourceLimits(preflight, options?.limits, profile);
|
|
461
|
+
const decompressed = await decompressToBytes(data, 'xz', options);
|
|
462
|
+
if (format === 'tar.xz' || detectFormat(decompressed) === 'tar') {
|
|
463
|
+
if (format !== 'tar.xz') {
|
|
464
|
+
notes.push('TAR layer detected inside xz payload');
|
|
465
|
+
}
|
|
466
|
+
const tarOptions = {
|
|
467
|
+
...(options?.profile !== undefined ? { profile: options.profile } : {}),
|
|
468
|
+
...(options?.isStrict !== undefined ? { isStrict: options.isStrict } : {}),
|
|
469
|
+
...(options?.limits !== undefined ? { limits: options.limits } : {})
|
|
470
|
+
};
|
|
471
|
+
const tarReader = await TarReader.fromUint8Array(decompressed, tarOptions);
|
|
472
|
+
const auditDefaults = { ...tarOptions };
|
|
473
|
+
return {
|
|
474
|
+
reader: new TarArchiveReader(tarReader, Object.keys(auditDefaults).length > 0 ? auditDefaults : undefined, 'tar.xz', preflight),
|
|
475
|
+
format: 'tar.xz',
|
|
476
|
+
notes
|
|
477
|
+
};
|
|
478
|
+
}
|
|
479
|
+
const name = inferXzEntryName(options?.filename);
|
|
480
|
+
return { reader: new CompressedArchiveReader(decompressed, 'xz', name, preflight), format: 'xz', notes };
|
|
481
|
+
}
|
|
482
|
+
throw new ArchiveError('ARCHIVE_UNSUPPORTED_FORMAT', `Unsupported format: ${format}`);
|
|
483
|
+
}
|
|
484
|
+
async function resolveInput(input, options) {
|
|
485
|
+
if (input instanceof Uint8Array)
|
|
486
|
+
return input;
|
|
487
|
+
if (input instanceof ArrayBuffer)
|
|
488
|
+
return new Uint8Array(input);
|
|
489
|
+
const maxBytes = resolveInputMaxBytes(options);
|
|
490
|
+
if (isBlobInput(input)) {
|
|
491
|
+
return readBlobBytes(input, {
|
|
492
|
+
...(options?.signal ? { signal: options.signal } : {}),
|
|
493
|
+
...(maxBytes !== undefined ? { maxBytes } : {})
|
|
494
|
+
});
|
|
495
|
+
}
|
|
496
|
+
const readOptions = {};
|
|
497
|
+
if (options?.signal)
|
|
498
|
+
readOptions.signal = options.signal;
|
|
499
|
+
if (maxBytes !== undefined)
|
|
500
|
+
readOptions.maxBytes = maxBytes;
|
|
501
|
+
return readAllBytes(input, readOptions);
|
|
502
|
+
}
|
|
503
|
+
function resolveInputMaxBytes(options) {
|
|
504
|
+
if (options?.limits?.maxInputBytes !== undefined) {
|
|
505
|
+
return options.limits.maxInputBytes;
|
|
506
|
+
}
|
|
507
|
+
if (options?.limits?.maxTotalDecompressedBytes !== undefined) {
|
|
508
|
+
return options.limits.maxTotalDecompressedBytes;
|
|
509
|
+
}
|
|
510
|
+
if (options?.limits?.maxTotalUncompressedBytes !== undefined) {
|
|
511
|
+
return options.limits.maxTotalUncompressedBytes;
|
|
512
|
+
}
|
|
513
|
+
return undefined;
|
|
514
|
+
}
|
|
515
|
+
async function readBlobBytes(input, options) {
|
|
516
|
+
throwIfAborted(options?.signal);
|
|
517
|
+
if (options?.maxBytes !== undefined && BigInt(input.size) > toBigInt(options.maxBytes)) {
|
|
518
|
+
throw new RangeError('Stream exceeds maximum allowed size');
|
|
519
|
+
}
|
|
520
|
+
const buffer = await input.arrayBuffer();
|
|
521
|
+
throwIfAborted(options?.signal);
|
|
522
|
+
return new Uint8Array(buffer);
|
|
523
|
+
}
|
|
524
|
+
function resolveBzip2BlockLimit(limits) {
|
|
525
|
+
const raw = limits?.maxBzip2BlockSize;
|
|
526
|
+
if (typeof raw === 'number' && Number.isFinite(raw)) {
|
|
527
|
+
return Math.max(1, Math.min(9, Math.floor(raw)));
|
|
528
|
+
}
|
|
529
|
+
return DEFAULT_RESOURCE_LIMITS.maxBzip2BlockSize;
|
|
530
|
+
}
|
|
531
|
+
// resolveXzIndexLimits and resolveXzDictionaryLimit live in src/archive/xzPreflight.ts
|
|
532
|
+
function resolveResourceLimitIssue(preflight, limits, profile) {
|
|
533
|
+
if (!preflight)
|
|
534
|
+
return null;
|
|
535
|
+
if (preflight.algorithm === 'bzip2' && preflight.requiredBlockSize !== undefined) {
|
|
536
|
+
const limit = resolveBzip2BlockLimit(limits);
|
|
537
|
+
if (preflight.requiredBlockSize > limit) {
|
|
538
|
+
const context = {
|
|
539
|
+
algorithm: 'bzip2',
|
|
540
|
+
requiredBlockSize: String(preflight.requiredBlockSize),
|
|
541
|
+
limitBlockSize: String(limit)
|
|
542
|
+
};
|
|
543
|
+
return {
|
|
544
|
+
issue: {
|
|
545
|
+
code: 'COMPRESSION_RESOURCE_LIMIT',
|
|
546
|
+
severity: 'error',
|
|
547
|
+
message: `BZip2 block size ${preflight.requiredBlockSize} exceeds limit`,
|
|
548
|
+
details: context
|
|
549
|
+
},
|
|
550
|
+
context
|
|
551
|
+
};
|
|
552
|
+
}
|
|
553
|
+
return null;
|
|
554
|
+
}
|
|
555
|
+
if (preflight.algorithm === 'xz') {
|
|
556
|
+
const indexLimits = resolveXzIndexLimits(limits, profile);
|
|
557
|
+
if (preflight.requiredIndexRecords !== undefined &&
|
|
558
|
+
preflight.requiredIndexRecords > indexLimits.maxIndexRecords) {
|
|
559
|
+
const context = {
|
|
560
|
+
algorithm: 'xz',
|
|
561
|
+
requiredIndexRecords: String(preflight.requiredIndexRecords),
|
|
562
|
+
limitIndexRecords: String(indexLimits.maxIndexRecords)
|
|
563
|
+
};
|
|
564
|
+
return {
|
|
565
|
+
issue: {
|
|
566
|
+
code: 'COMPRESSION_RESOURCE_LIMIT',
|
|
567
|
+
severity: 'error',
|
|
568
|
+
message: `XZ index record count ${preflight.requiredIndexRecords} exceeds limit`,
|
|
569
|
+
details: context
|
|
570
|
+
},
|
|
571
|
+
context
|
|
572
|
+
};
|
|
573
|
+
}
|
|
574
|
+
if (preflight.requiredIndexBytes !== undefined && preflight.requiredIndexBytes > indexLimits.maxIndexBytes) {
|
|
575
|
+
const context = {
|
|
576
|
+
algorithm: 'xz',
|
|
577
|
+
requiredIndexBytes: String(preflight.requiredIndexBytes),
|
|
578
|
+
limitIndexBytes: String(indexLimits.maxIndexBytes)
|
|
579
|
+
};
|
|
580
|
+
return {
|
|
581
|
+
issue: {
|
|
582
|
+
code: 'COMPRESSION_RESOURCE_LIMIT',
|
|
583
|
+
severity: 'error',
|
|
584
|
+
message: `XZ index size ${preflight.requiredIndexBytes} exceeds limit`,
|
|
585
|
+
details: context
|
|
586
|
+
},
|
|
587
|
+
context
|
|
588
|
+
};
|
|
589
|
+
}
|
|
590
|
+
if (preflight.requiredDictionaryBytes !== undefined) {
|
|
591
|
+
const limit = resolveXzDictionaryLimit(limits, profile);
|
|
592
|
+
if (BigInt(preflight.requiredDictionaryBytes) > limit) {
|
|
593
|
+
const context = {
|
|
594
|
+
algorithm: 'xz',
|
|
595
|
+
requiredDictionaryBytes: String(preflight.requiredDictionaryBytes),
|
|
596
|
+
limitDictionaryBytes: limit.toString()
|
|
597
|
+
};
|
|
598
|
+
return {
|
|
599
|
+
issue: {
|
|
600
|
+
code: 'COMPRESSION_RESOURCE_LIMIT',
|
|
601
|
+
severity: 'error',
|
|
602
|
+
message: `XZ dictionary size ${preflight.requiredDictionaryBytes} exceeds limit`,
|
|
603
|
+
details: context
|
|
604
|
+
},
|
|
605
|
+
context
|
|
606
|
+
};
|
|
607
|
+
}
|
|
608
|
+
}
|
|
609
|
+
}
|
|
610
|
+
return null;
|
|
611
|
+
}
|
|
612
|
+
function enforceResourceLimits(preflight, limits, profile) {
|
|
613
|
+
const violation = resolveResourceLimitIssue(preflight, limits, profile);
|
|
614
|
+
if (!violation || !preflight)
|
|
615
|
+
return;
|
|
616
|
+
throw new CompressionError('COMPRESSION_RESOURCE_LIMIT', violation.issue.message, {
|
|
617
|
+
algorithm: preflight.algorithm,
|
|
618
|
+
context: violation.context
|
|
619
|
+
});
|
|
620
|
+
}
|
|
621
|
+
function appendResourceLimitIssue(report, preflight, limits, profile) {
|
|
622
|
+
const violation = resolveResourceLimitIssue(preflight, limits, profile);
|
|
623
|
+
if (!violation)
|
|
624
|
+
return report;
|
|
625
|
+
report.issues.push(violation.issue);
|
|
626
|
+
report.summary.errors += 1;
|
|
627
|
+
report.ok = false;
|
|
628
|
+
return report;
|
|
629
|
+
}
|
|
630
|
+
function appendResourcePreflightIssue(report, preflight) {
|
|
631
|
+
if (!preflight || preflight.preflightComplete !== false)
|
|
632
|
+
return report;
|
|
633
|
+
const context = { algorithm: preflight.algorithm };
|
|
634
|
+
let severity = 'warning';
|
|
635
|
+
let message = 'Resource preflight is incomplete';
|
|
636
|
+
if (preflight.algorithm === 'bzip2') {
|
|
637
|
+
message = 'Resource preflight does not scan concatenated bzip2 members';
|
|
638
|
+
}
|
|
639
|
+
else if (preflight.algorithm === 'xz') {
|
|
640
|
+
severity = 'info';
|
|
641
|
+
message = 'Resource preflight did not scan all XZ block headers';
|
|
642
|
+
if (preflight.preflightBlockHeaders !== undefined) {
|
|
643
|
+
context.requiredBlockHeaders = String(preflight.preflightBlockHeaders);
|
|
644
|
+
}
|
|
645
|
+
if (preflight.preflightBlockLimit !== undefined) {
|
|
646
|
+
context.limitBlockHeaders = String(preflight.preflightBlockLimit);
|
|
647
|
+
}
|
|
648
|
+
}
|
|
649
|
+
report.issues.push({
|
|
650
|
+
code: 'COMPRESSION_RESOURCE_PREFLIGHT_INCOMPLETE',
|
|
651
|
+
severity,
|
|
652
|
+
message,
|
|
653
|
+
details: context
|
|
654
|
+
});
|
|
655
|
+
if (severity === 'warning')
|
|
656
|
+
report.summary.warnings += 1;
|
|
657
|
+
return report;
|
|
658
|
+
}
|
|
659
|
+
function detectFormat(data) {
|
|
660
|
+
if (data.length >= 2 && data[0] === 0x1f && data[1] === 0x8b) {
|
|
661
|
+
return 'gz';
|
|
662
|
+
}
|
|
663
|
+
if (data.length >= 4 && data[0] === 0x42 && data[1] === 0x5a && data[2] === 0x68) {
|
|
664
|
+
const level = data[3] ?? 0;
|
|
665
|
+
if (level >= 0x31 && level <= 0x39)
|
|
666
|
+
return 'bz2';
|
|
667
|
+
}
|
|
668
|
+
if (data.length >= 4 && isZstdHeader(data)) {
|
|
669
|
+
return 'zst';
|
|
670
|
+
}
|
|
671
|
+
if (data.length >= 6 && isXzHeader(data)) {
|
|
672
|
+
return 'xz';
|
|
673
|
+
}
|
|
674
|
+
if (data.length >= 4 && data[0] === 0x50 && data[1] === 0x4b) {
|
|
675
|
+
const sig = ((data[2] ?? 0) << 8) | (data[3] ?? 0);
|
|
676
|
+
if (sig === 0x0304 || sig === 0x0506 || sig === 0x0708) {
|
|
677
|
+
return 'zip';
|
|
678
|
+
}
|
|
679
|
+
}
|
|
680
|
+
if (data.length >= 512 && isTarHeader(data.subarray(0, 512))) {
|
|
681
|
+
return 'tar';
|
|
682
|
+
}
|
|
683
|
+
return undefined;
|
|
684
|
+
}
|
|
685
|
+
function formatFromFilename(filename) {
|
|
686
|
+
if (!filename)
|
|
687
|
+
return undefined;
|
|
688
|
+
const lower = filename.toLowerCase();
|
|
689
|
+
if (lower.endsWith('.tar.gz') || lower.endsWith('.tgz'))
|
|
690
|
+
return 'tgz';
|
|
691
|
+
if (lower.endsWith('.tar.bz2') || lower.endsWith('.tbz2') || lower.endsWith('.tbz'))
|
|
692
|
+
return 'tar.bz2';
|
|
693
|
+
if (lower.endsWith('.tar.zst') || lower.endsWith('.tzst'))
|
|
694
|
+
return 'tar.zst';
|
|
695
|
+
if (lower.endsWith('.tar.br') || lower.endsWith('.tbr'))
|
|
696
|
+
return 'tar.br';
|
|
697
|
+
if (lower.endsWith('.tar.xz') || lower.endsWith('.txz'))
|
|
698
|
+
return 'tar.xz';
|
|
699
|
+
if (lower.endsWith('.tar'))
|
|
700
|
+
return 'tar';
|
|
701
|
+
if (lower.endsWith('.zip'))
|
|
702
|
+
return 'zip';
|
|
703
|
+
if (lower.endsWith('.gz'))
|
|
704
|
+
return 'gz';
|
|
705
|
+
if (lower.endsWith('.bz2'))
|
|
706
|
+
return 'bz2';
|
|
707
|
+
if (lower.endsWith('.bz'))
|
|
708
|
+
return 'bz2';
|
|
709
|
+
if (lower.endsWith('.zst'))
|
|
710
|
+
return 'zst';
|
|
711
|
+
if (lower.endsWith('.br'))
|
|
712
|
+
return 'br';
|
|
713
|
+
if (lower.endsWith('.xz'))
|
|
714
|
+
return 'xz';
|
|
715
|
+
return undefined;
|
|
716
|
+
}
|
|
717
|
+
function sanitizeSingleFileName(name) {
|
|
718
|
+
if (!name)
|
|
719
|
+
return undefined;
|
|
720
|
+
if (name.includes('\u0000'))
|
|
721
|
+
return undefined;
|
|
722
|
+
const base = name.split(/[\\/]/).pop() ?? name;
|
|
723
|
+
const trimmed = base.trim();
|
|
724
|
+
if (!trimmed || trimmed === '.' || trimmed === '..')
|
|
725
|
+
return undefined;
|
|
726
|
+
return trimmed;
|
|
727
|
+
}
|
|
728
|
+
function inferGzipEntryName(header, filename) {
|
|
729
|
+
const headerName = sanitizeSingleFileName(header.name);
|
|
730
|
+
if (headerName)
|
|
731
|
+
return headerName;
|
|
732
|
+
const base = sanitizeSingleFileName(filename);
|
|
733
|
+
if (!base)
|
|
734
|
+
return 'data';
|
|
735
|
+
const lower = base.toLowerCase();
|
|
736
|
+
if (lower.endsWith('.tar.gz')) {
|
|
737
|
+
const stem = base.slice(0, -7);
|
|
738
|
+
return stem ? `${stem}.tar` : 'data';
|
|
739
|
+
}
|
|
740
|
+
if (lower.endsWith('.tgz')) {
|
|
741
|
+
const stem = base.slice(0, -4);
|
|
742
|
+
return stem ? `${stem}.tar` : 'data';
|
|
743
|
+
}
|
|
744
|
+
if (lower.endsWith('.gz')) {
|
|
745
|
+
const stem = base.slice(0, -3);
|
|
746
|
+
return stem || 'data';
|
|
747
|
+
}
|
|
748
|
+
return 'data';
|
|
749
|
+
}
|
|
750
|
+
function inferBrotliEntryName(filename) {
|
|
751
|
+
const base = sanitizeSingleFileName(filename);
|
|
752
|
+
if (!base)
|
|
753
|
+
return 'data';
|
|
754
|
+
const lower = base.toLowerCase();
|
|
755
|
+
if (lower.endsWith('.tar.br')) {
|
|
756
|
+
const stem = base.slice(0, -7);
|
|
757
|
+
return stem ? `${stem}.tar` : 'data';
|
|
758
|
+
}
|
|
759
|
+
if (lower.endsWith('.tbr')) {
|
|
760
|
+
const stem = base.slice(0, -4);
|
|
761
|
+
return stem ? `${stem}.tar` : 'data';
|
|
762
|
+
}
|
|
763
|
+
if (lower.endsWith('.br')) {
|
|
764
|
+
const stem = base.slice(0, -3);
|
|
765
|
+
return stem || 'data';
|
|
766
|
+
}
|
|
767
|
+
return 'data';
|
|
768
|
+
}
|
|
769
|
+
function inferZstdEntryName(filename) {
|
|
770
|
+
const base = sanitizeSingleFileName(filename);
|
|
771
|
+
if (!base)
|
|
772
|
+
return 'data';
|
|
773
|
+
const lower = base.toLowerCase();
|
|
774
|
+
if (lower.endsWith('.tar.zst')) {
|
|
775
|
+
const stem = base.slice(0, -8);
|
|
776
|
+
return stem ? `${stem}.tar` : 'data';
|
|
777
|
+
}
|
|
778
|
+
if (lower.endsWith('.tzst')) {
|
|
779
|
+
const stem = base.slice(0, -5);
|
|
780
|
+
return stem ? `${stem}.tar` : 'data';
|
|
781
|
+
}
|
|
782
|
+
if (lower.endsWith('.zst')) {
|
|
783
|
+
const stem = base.slice(0, -4);
|
|
784
|
+
return stem || 'data';
|
|
785
|
+
}
|
|
786
|
+
return 'data';
|
|
787
|
+
}
|
|
788
|
+
function inferBzip2EntryName(filename) {
|
|
789
|
+
const base = sanitizeSingleFileName(filename);
|
|
790
|
+
if (!base)
|
|
791
|
+
return 'data';
|
|
792
|
+
const lower = base.toLowerCase();
|
|
793
|
+
if (lower.endsWith('.tar.bz2')) {
|
|
794
|
+
const stem = base.slice(0, -8);
|
|
795
|
+
return stem ? `${stem}.tar` : 'data';
|
|
796
|
+
}
|
|
797
|
+
if (lower.endsWith('.tbz2')) {
|
|
798
|
+
const stem = base.slice(0, -5);
|
|
799
|
+
return stem ? `${stem}.tar` : 'data';
|
|
800
|
+
}
|
|
801
|
+
if (lower.endsWith('.tbz')) {
|
|
802
|
+
const stem = base.slice(0, -4);
|
|
803
|
+
return stem ? `${stem}.tar` : 'data';
|
|
804
|
+
}
|
|
805
|
+
if (lower.endsWith('.bz2')) {
|
|
806
|
+
const stem = base.slice(0, -4);
|
|
807
|
+
return stem || 'data';
|
|
808
|
+
}
|
|
809
|
+
if (lower.endsWith('.bz')) {
|
|
810
|
+
const stem = base.slice(0, -3);
|
|
811
|
+
return stem || 'data';
|
|
812
|
+
}
|
|
813
|
+
return 'data';
|
|
814
|
+
}
|
|
815
|
+
function inferXzEntryName(filename) {
|
|
816
|
+
const base = sanitizeSingleFileName(filename);
|
|
817
|
+
if (!base)
|
|
818
|
+
return 'data';
|
|
819
|
+
const lower = base.toLowerCase();
|
|
820
|
+
if (lower.endsWith('.tar.xz')) {
|
|
821
|
+
const stem = base.slice(0, -7);
|
|
822
|
+
return stem ? `${stem}.tar` : 'data';
|
|
823
|
+
}
|
|
824
|
+
if (lower.endsWith('.txz')) {
|
|
825
|
+
const stem = base.slice(0, -4);
|
|
826
|
+
return stem ? `${stem}.tar` : 'data';
|
|
827
|
+
}
|
|
828
|
+
if (lower.endsWith('.xz')) {
|
|
829
|
+
const stem = base.slice(0, -3);
|
|
830
|
+
return stem || 'data';
|
|
831
|
+
}
|
|
832
|
+
return 'data';
|
|
833
|
+
}
|
|
834
|
+
function resolveInputKind(input, hint) {
|
|
835
|
+
if (hint)
|
|
836
|
+
return hint;
|
|
837
|
+
if (input instanceof Uint8Array || input instanceof ArrayBuffer)
|
|
838
|
+
return 'bytes';
|
|
839
|
+
if (isBlobInput(input))
|
|
840
|
+
return 'blob';
|
|
841
|
+
return 'stream';
|
|
842
|
+
}
|
|
843
|
+
function isBlobInput(input) {
|
|
844
|
+
return typeof Blob !== 'undefined' && input instanceof Blob;
|
|
845
|
+
}
|
|
846
|
+
function toBigInt(value) {
|
|
847
|
+
return typeof value === 'bigint' ? value : BigInt(value);
|
|
848
|
+
}
|
|
849
|
+
function buildDetectionReport(inputKind, format, confidence, notes) {
|
|
850
|
+
const detected = { layers: [] };
|
|
851
|
+
switch (format) {
|
|
852
|
+
case 'zip':
|
|
853
|
+
detected.container = 'zip';
|
|
854
|
+
detected.compression = 'none';
|
|
855
|
+
detected.layers = ['zip'];
|
|
856
|
+
break;
|
|
857
|
+
case 'tar':
|
|
858
|
+
detected.container = 'tar';
|
|
859
|
+
detected.compression = 'none';
|
|
860
|
+
detected.layers = ['tar'];
|
|
861
|
+
break;
|
|
862
|
+
case 'gz':
|
|
863
|
+
detected.compression = 'gzip';
|
|
864
|
+
detected.layers = ['gzip'];
|
|
865
|
+
break;
|
|
866
|
+
case 'tgz':
|
|
867
|
+
case 'tar.gz':
|
|
868
|
+
detected.container = 'tar';
|
|
869
|
+
detected.compression = 'gzip';
|
|
870
|
+
detected.layers = ['gzip', 'tar'];
|
|
871
|
+
break;
|
|
872
|
+
case 'zst':
|
|
873
|
+
detected.compression = 'zstd';
|
|
874
|
+
detected.layers = ['zstd'];
|
|
875
|
+
break;
|
|
876
|
+
case 'br':
|
|
877
|
+
detected.compression = 'brotli';
|
|
878
|
+
detected.layers = ['brotli'];
|
|
879
|
+
break;
|
|
880
|
+
case 'tar.zst':
|
|
881
|
+
detected.container = 'tar';
|
|
882
|
+
detected.compression = 'zstd';
|
|
883
|
+
detected.layers = ['zstd', 'tar'];
|
|
884
|
+
break;
|
|
885
|
+
case 'tar.br':
|
|
886
|
+
detected.container = 'tar';
|
|
887
|
+
detected.compression = 'brotli';
|
|
888
|
+
detected.layers = ['brotli', 'tar'];
|
|
889
|
+
break;
|
|
890
|
+
case 'bz2':
|
|
891
|
+
detected.compression = 'bzip2';
|
|
892
|
+
detected.layers = ['bzip2'];
|
|
893
|
+
break;
|
|
894
|
+
case 'tar.bz2':
|
|
895
|
+
detected.container = 'tar';
|
|
896
|
+
detected.compression = 'bzip2';
|
|
897
|
+
detected.layers = ['bzip2', 'tar'];
|
|
898
|
+
break;
|
|
899
|
+
case 'xz':
|
|
900
|
+
detected.compression = 'xz';
|
|
901
|
+
detected.layers = ['xz'];
|
|
902
|
+
break;
|
|
903
|
+
case 'tar.xz':
|
|
904
|
+
detected.container = 'tar';
|
|
905
|
+
detected.compression = 'xz';
|
|
906
|
+
detected.layers = ['xz', 'tar'];
|
|
907
|
+
break;
|
|
908
|
+
default:
|
|
909
|
+
detected.layers = [];
|
|
910
|
+
}
|
|
911
|
+
return {
|
|
912
|
+
schemaVersion: BYTEFOLD_REPORT_SCHEMA_VERSION,
|
|
913
|
+
inputKind,
|
|
914
|
+
detected,
|
|
915
|
+
confidence,
|
|
916
|
+
notes
|
|
917
|
+
};
|
|
918
|
+
}
|
|
919
|
+
const ZSTD_MAGIC = new Uint8Array([0x28, 0xb5, 0x2f, 0xfd]);
|
|
920
|
+
const XZ_MAGIC = new Uint8Array([0xfd, 0x37, 0x7a, 0x58, 0x5a, 0x00]);
|
|
921
|
+
function isZstdHeader(data) {
|
|
922
|
+
if (data.length < ZSTD_MAGIC.length)
|
|
923
|
+
return false;
|
|
924
|
+
for (let i = 0; i < ZSTD_MAGIC.length; i += 1) {
|
|
925
|
+
if (data[i] !== ZSTD_MAGIC[i])
|
|
926
|
+
return false;
|
|
927
|
+
}
|
|
928
|
+
return true;
|
|
929
|
+
}
|
|
930
|
+
function isXzHeader(data) {
|
|
931
|
+
if (data.length < XZ_MAGIC.length)
|
|
932
|
+
return false;
|
|
933
|
+
for (let i = 0; i < XZ_MAGIC.length; i += 1) {
|
|
934
|
+
if (data[i] !== XZ_MAGIC[i])
|
|
935
|
+
return false;
|
|
936
|
+
}
|
|
937
|
+
return true;
|
|
938
|
+
}
|
|
939
|
+
function readXzCheckType(data) {
|
|
940
|
+
if (!isXzHeader(data) || data.length < 12)
|
|
941
|
+
return undefined;
|
|
942
|
+
const flags0 = data[6];
|
|
943
|
+
const flags1 = data[7];
|
|
944
|
+
if (flags0 !== 0x00 || (flags1 & 0xf0) !== 0)
|
|
945
|
+
return undefined;
|
|
946
|
+
return flags1 & 0x0f;
|
|
947
|
+
}
|
|
948
|
+
function isSupportedXzCheck(checkType) {
|
|
949
|
+
return checkType === 0x00 || checkType === 0x01 || checkType === 0x04;
|
|
950
|
+
}
|
|
951
|
+
function formatXzCheck(checkType) {
|
|
952
|
+
if (checkType === 0x00)
|
|
953
|
+
return 'none';
|
|
954
|
+
if (checkType === 0x01)
|
|
955
|
+
return 'crc32';
|
|
956
|
+
if (checkType === 0x04)
|
|
957
|
+
return 'crc64';
|
|
958
|
+
if (checkType === 0x0a)
|
|
959
|
+
return 'sha256';
|
|
960
|
+
return `0x${checkType.toString(16)}`;
|
|
961
|
+
}
|
|
962
|
+
function isTarHeader(block) {
|
|
963
|
+
const checksumStored = parseOctal(block.subarray(148, 156));
|
|
964
|
+
const checksumActual = computeChecksum(block);
|
|
965
|
+
if (checksumStored !== undefined && checksumStored !== checksumActual)
|
|
966
|
+
return false;
|
|
967
|
+
const magic = readString(block, 257, 6);
|
|
968
|
+
return magic === 'ustar' || magic === 'ustar\0' || magic === '';
|
|
969
|
+
}
|
|
970
|
+
function readString(buffer, start, length) {
|
|
971
|
+
let end = start;
|
|
972
|
+
for (; end < start + length; end += 1) {
|
|
973
|
+
if (buffer[end] === 0)
|
|
974
|
+
break;
|
|
975
|
+
}
|
|
976
|
+
return new TextDecoder('utf-8').decode(buffer.subarray(start, end)).trim();
|
|
977
|
+
}
|
|
978
|
+
function parseOctal(buffer) {
|
|
979
|
+
const text = decodeNullTerminatedUtf8(buffer).trim();
|
|
980
|
+
if (!text)
|
|
981
|
+
return undefined;
|
|
982
|
+
const value = parseInt(text, 8);
|
|
983
|
+
return Number.isFinite(value) ? value : undefined;
|
|
984
|
+
}
|
|
985
|
+
function computeChecksum(header) {
|
|
986
|
+
let sum = 0;
|
|
987
|
+
for (let i = 0; i < header.length; i += 1) {
|
|
988
|
+
if (i >= 148 && i < 156)
|
|
989
|
+
sum += 0x20;
|
|
990
|
+
else
|
|
991
|
+
sum += header[i];
|
|
992
|
+
}
|
|
993
|
+
return sum;
|
|
994
|
+
}
|
|
995
|
+
async function gunzipToBytes(data, options) {
|
|
996
|
+
return decompressToBytes(data, 'gzip', options);
|
|
997
|
+
}
|
|
998
|
+
async function decompressToBytes(data, algorithm, options) {
|
|
999
|
+
const limits = options?.limits;
|
|
1000
|
+
const transform = createDecompressor({
|
|
1001
|
+
algorithm,
|
|
1002
|
+
...(options?.signal ? { signal: options.signal } : {}),
|
|
1003
|
+
...(limits?.maxTotalDecompressedBytes !== undefined
|
|
1004
|
+
? { maxOutputBytes: limits.maxTotalDecompressedBytes }
|
|
1005
|
+
: limits?.maxTotalUncompressedBytes !== undefined
|
|
1006
|
+
? { maxOutputBytes: limits.maxTotalUncompressedBytes }
|
|
1007
|
+
: {}),
|
|
1008
|
+
...(limits?.maxCompressionRatio !== undefined ? { maxCompressionRatio: limits.maxCompressionRatio } : {}),
|
|
1009
|
+
...(limits?.maxXzDictionaryBytes !== undefined
|
|
1010
|
+
? { maxDictionaryBytes: limits.maxXzDictionaryBytes }
|
|
1011
|
+
: limits?.maxDictionaryBytes !== undefined
|
|
1012
|
+
? { maxDictionaryBytes: limits.maxDictionaryBytes }
|
|
1013
|
+
: {}),
|
|
1014
|
+
...(limits?.maxXzBufferedBytes !== undefined ? { maxBufferedInputBytes: limits.maxXzBufferedBytes } : {}),
|
|
1015
|
+
...(limits?.maxBzip2BlockSize !== undefined ? { maxBzip2BlockSize: limits.maxBzip2BlockSize } : {}),
|
|
1016
|
+
...(limits ? { limits } : {}),
|
|
1017
|
+
...(options?.profile ? { profile: options.profile } : {})
|
|
1018
|
+
});
|
|
1019
|
+
const stream = readableFromBytes(data).pipeThrough(transform);
|
|
1020
|
+
const readOptions = {};
|
|
1021
|
+
if (options?.signal)
|
|
1022
|
+
readOptions.signal = options.signal;
|
|
1023
|
+
if (limits?.maxTotalDecompressedBytes !== undefined) {
|
|
1024
|
+
readOptions.maxBytes = limits.maxTotalDecompressedBytes;
|
|
1025
|
+
}
|
|
1026
|
+
else if (limits?.maxTotalUncompressedBytes !== undefined) {
|
|
1027
|
+
readOptions.maxBytes = limits.maxTotalUncompressedBytes;
|
|
1028
|
+
}
|
|
1029
|
+
try {
|
|
1030
|
+
return await readAllBytes(stream, readOptions);
|
|
1031
|
+
}
|
|
1032
|
+
catch (err) {
|
|
1033
|
+
if (err instanceof CompressionError)
|
|
1034
|
+
throw err;
|
|
1035
|
+
if (err instanceof RangeError)
|
|
1036
|
+
throw err;
|
|
1037
|
+
if (err && typeof err === 'object' && err.name === 'AbortError')
|
|
1038
|
+
throw err;
|
|
1039
|
+
throw new CompressionError('COMPRESSION_BACKEND_UNAVAILABLE', 'Compression backend failed', {
|
|
1040
|
+
algorithm,
|
|
1041
|
+
cause: err
|
|
1042
|
+
});
|
|
1043
|
+
}
|
|
1044
|
+
}
|
|
1045
|
+
function parseGzipHeader(data) {
|
|
1046
|
+
if (data.length < 10)
|
|
1047
|
+
return {};
|
|
1048
|
+
const flags = data[3];
|
|
1049
|
+
const mtime = readUint32LE(data, 4);
|
|
1050
|
+
let offset = 10;
|
|
1051
|
+
if (flags & 0x04) {
|
|
1052
|
+
if (offset + 2 > data.length) {
|
|
1053
|
+
throw new CompressionError('COMPRESSION_GZIP_BAD_HEADER', 'Gzip header truncated', {
|
|
1054
|
+
algorithm: 'gzip'
|
|
1055
|
+
});
|
|
1056
|
+
}
|
|
1057
|
+
const xlen = data[offset] | (data[offset + 1] << 8);
|
|
1058
|
+
if (offset + 2 + xlen > data.length) {
|
|
1059
|
+
throw new CompressionError('COMPRESSION_GZIP_BAD_HEADER', 'Gzip header truncated', {
|
|
1060
|
+
algorithm: 'gzip'
|
|
1061
|
+
});
|
|
1062
|
+
}
|
|
1063
|
+
offset += 2 + xlen;
|
|
1064
|
+
}
|
|
1065
|
+
let name;
|
|
1066
|
+
if (flags & 0x08) {
|
|
1067
|
+
const start = offset;
|
|
1068
|
+
while (offset < data.length && data[offset] !== 0)
|
|
1069
|
+
offset += 1;
|
|
1070
|
+
if (offset >= data.length) {
|
|
1071
|
+
throw new CompressionError('COMPRESSION_GZIP_BAD_HEADER', 'Gzip header truncated', {
|
|
1072
|
+
algorithm: 'gzip'
|
|
1073
|
+
});
|
|
1074
|
+
}
|
|
1075
|
+
name = decodeLatin1(data.subarray(start, offset));
|
|
1076
|
+
offset += 1;
|
|
1077
|
+
}
|
|
1078
|
+
if (flags & 0x10) {
|
|
1079
|
+
while (offset < data.length && data[offset] !== 0)
|
|
1080
|
+
offset += 1;
|
|
1081
|
+
if (offset >= data.length) {
|
|
1082
|
+
throw new CompressionError('COMPRESSION_GZIP_BAD_HEADER', 'Gzip header truncated', {
|
|
1083
|
+
algorithm: 'gzip'
|
|
1084
|
+
});
|
|
1085
|
+
}
|
|
1086
|
+
offset += 1;
|
|
1087
|
+
}
|
|
1088
|
+
if (flags & 0x02) {
|
|
1089
|
+
if (offset + 2 > data.length) {
|
|
1090
|
+
throw new CompressionError('COMPRESSION_GZIP_BAD_HEADER', 'Gzip header truncated', {
|
|
1091
|
+
algorithm: 'gzip'
|
|
1092
|
+
});
|
|
1093
|
+
}
|
|
1094
|
+
const stored = data[offset] | (data[offset + 1] << 8);
|
|
1095
|
+
const computed = (crc32(data.subarray(0, offset)) ^ 0xffffffff) & 0xffff;
|
|
1096
|
+
if (stored !== computed) {
|
|
1097
|
+
throw new CompressionError('COMPRESSION_GZIP_BAD_HEADER', 'Gzip header CRC mismatch', {
|
|
1098
|
+
algorithm: 'gzip',
|
|
1099
|
+
context: { stored: String(stored), expected: String(computed) }
|
|
1100
|
+
});
|
|
1101
|
+
}
|
|
1102
|
+
offset += 2;
|
|
1103
|
+
}
|
|
1104
|
+
const header = {};
|
|
1105
|
+
if (name !== undefined)
|
|
1106
|
+
header.name = name;
|
|
1107
|
+
if (mtime)
|
|
1108
|
+
header.mtime = new Date(mtime * 1000);
|
|
1109
|
+
return header;
|
|
1110
|
+
}
|
|
1111
|
+
function decodeLatin1(bytes) {
|
|
1112
|
+
try {
|
|
1113
|
+
return new TextDecoder('latin1').decode(bytes);
|
|
1114
|
+
}
|
|
1115
|
+
catch {
|
|
1116
|
+
return new TextDecoder('utf-8').decode(bytes);
|
|
1117
|
+
}
|
|
1118
|
+
}
|
|
1119
|
+
function readUint32LE(data, offset) {
|
|
1120
|
+
return (data[offset] | (data[offset + 1] << 8) | (data[offset + 2] << 16) | (data[offset + 3] << 24)) >>> 0;
|
|
1121
|
+
}
|
|
1122
|
+
class ZipArchiveReader {
|
|
1123
|
+
reader;
|
|
1124
|
+
openOptions;
|
|
1125
|
+
format = 'zip';
|
|
1126
|
+
detection;
|
|
1127
|
+
constructor(reader, openOptions) {
|
|
1128
|
+
this.reader = reader;
|
|
1129
|
+
this.openOptions = openOptions;
|
|
1130
|
+
}
|
|
1131
|
+
async *entries() {
|
|
1132
|
+
for await (const entry of this.reader.iterEntries()) {
|
|
1133
|
+
yield {
|
|
1134
|
+
format: 'zip',
|
|
1135
|
+
name: entry.name,
|
|
1136
|
+
size: entry.uncompressedSize,
|
|
1137
|
+
isDirectory: entry.isDirectory,
|
|
1138
|
+
isSymlink: entry.isSymlink,
|
|
1139
|
+
mtime: entry.mtime,
|
|
1140
|
+
open: () => this.reader.open(entry, this.openOptions),
|
|
1141
|
+
raw: entry
|
|
1142
|
+
};
|
|
1143
|
+
}
|
|
1144
|
+
}
|
|
1145
|
+
async audit(options) {
|
|
1146
|
+
const profile = options?.profile;
|
|
1147
|
+
const zipOptions = {
|
|
1148
|
+
...(profile !== undefined ? { profile: profile } : {}),
|
|
1149
|
+
...(options?.isStrict !== undefined ? { isStrict: options.isStrict } : {}),
|
|
1150
|
+
...(options?.limits !== undefined ? { limits: options.limits } : {}),
|
|
1151
|
+
...(options?.signal ? { signal: options.signal } : {})
|
|
1152
|
+
};
|
|
1153
|
+
const report = await this.reader.audit(zipOptions);
|
|
1154
|
+
const archiveReport = {
|
|
1155
|
+
schemaVersion: BYTEFOLD_REPORT_SCHEMA_VERSION,
|
|
1156
|
+
ok: report.ok,
|
|
1157
|
+
summary: {
|
|
1158
|
+
entries: report.summary.entries,
|
|
1159
|
+
warnings: report.summary.warnings,
|
|
1160
|
+
errors: report.summary.errors,
|
|
1161
|
+
...(report.summary.trailingBytes !== undefined ? { totalBytes: report.summary.trailingBytes } : {})
|
|
1162
|
+
},
|
|
1163
|
+
issues: report.issues.map((issue) => ({
|
|
1164
|
+
code: issue.code,
|
|
1165
|
+
severity: issue.severity,
|
|
1166
|
+
message: issue.message,
|
|
1167
|
+
...(issue.entryName ? { entryName: issue.entryName } : {}),
|
|
1168
|
+
...(issue.offset !== undefined ? { offset: issue.offset.toString() } : {}),
|
|
1169
|
+
...(issue.details ? { details: sanitizeDetails(issue.details) } : {})
|
|
1170
|
+
}))
|
|
1171
|
+
};
|
|
1172
|
+
archiveReport.toJSON = () => ({
|
|
1173
|
+
schemaVersion: archiveReport.schemaVersion,
|
|
1174
|
+
ok: archiveReport.ok,
|
|
1175
|
+
summary: archiveReport.summary,
|
|
1176
|
+
issues: archiveReport.issues
|
|
1177
|
+
});
|
|
1178
|
+
return archiveReport;
|
|
1179
|
+
}
|
|
1180
|
+
async close() {
|
|
1181
|
+
await this.reader.close();
|
|
1182
|
+
}
|
|
1183
|
+
async assertSafe(options) {
|
|
1184
|
+
const profile = options?.profile;
|
|
1185
|
+
const auditOptions = {
|
|
1186
|
+
...(profile !== undefined ? { profile: profile } : {}),
|
|
1187
|
+
...(options?.isStrict !== undefined ? { isStrict: options.isStrict } : {}),
|
|
1188
|
+
...(options?.limits !== undefined ? { limits: options.limits } : {}),
|
|
1189
|
+
...(options?.signal ? { signal: options.signal } : {})
|
|
1190
|
+
};
|
|
1191
|
+
await this.reader.assertSafe(auditOptions);
|
|
1192
|
+
}
|
|
1193
|
+
async normalizeToWritable(writable, options) {
|
|
1194
|
+
const normalizeOptions = {
|
|
1195
|
+
...(options?.isDeterministic !== undefined ? { isDeterministic: options.isDeterministic } : {}),
|
|
1196
|
+
...(options?.signal ? { signal: options.signal } : {})
|
|
1197
|
+
};
|
|
1198
|
+
const report = await this.reader.normalizeToWritable(writable, normalizeOptions);
|
|
1199
|
+
const archiveReport = {
|
|
1200
|
+
schemaVersion: BYTEFOLD_REPORT_SCHEMA_VERSION,
|
|
1201
|
+
ok: report.ok,
|
|
1202
|
+
summary: {
|
|
1203
|
+
entries: report.summary.entries,
|
|
1204
|
+
outputEntries: report.summary.outputEntries,
|
|
1205
|
+
droppedEntries: report.summary.droppedEntries,
|
|
1206
|
+
renamedEntries: report.summary.renamedEntries,
|
|
1207
|
+
warnings: report.summary.warnings,
|
|
1208
|
+
errors: report.summary.errors
|
|
1209
|
+
},
|
|
1210
|
+
issues: report.issues.map((issue) => ({
|
|
1211
|
+
code: issue.code,
|
|
1212
|
+
severity: issue.severity,
|
|
1213
|
+
message: issue.message,
|
|
1214
|
+
...(issue.entryName ? { entryName: issue.entryName } : {}),
|
|
1215
|
+
...(issue.offset !== undefined ? { offset: issue.offset.toString() } : {}),
|
|
1216
|
+
...(issue.details ? { details: sanitizeDetails(issue.details) } : {})
|
|
1217
|
+
}))
|
|
1218
|
+
};
|
|
1219
|
+
archiveReport.toJSON = () => ({
|
|
1220
|
+
schemaVersion: archiveReport.schemaVersion,
|
|
1221
|
+
ok: archiveReport.ok,
|
|
1222
|
+
summary: archiveReport.summary,
|
|
1223
|
+
issues: archiveReport.issues
|
|
1224
|
+
});
|
|
1225
|
+
return archiveReport;
|
|
1226
|
+
}
|
|
1227
|
+
}
|
|
1228
|
+
class TarArchiveReader {
|
|
1229
|
+
reader;
|
|
1230
|
+
auditDefaults;
|
|
1231
|
+
format;
|
|
1232
|
+
preflight;
|
|
1233
|
+
detection;
|
|
1234
|
+
constructor(reader, auditDefaults, format = 'tar', preflight) {
|
|
1235
|
+
this.reader = reader;
|
|
1236
|
+
this.auditDefaults = auditDefaults;
|
|
1237
|
+
this.format = format;
|
|
1238
|
+
this.preflight = preflight;
|
|
1239
|
+
}
|
|
1240
|
+
async *entries() {
|
|
1241
|
+
for await (const entry of this.reader.iterEntries()) {
|
|
1242
|
+
const archiveEntry = {
|
|
1243
|
+
format: this.format,
|
|
1244
|
+
name: entry.name,
|
|
1245
|
+
size: entry.size,
|
|
1246
|
+
isDirectory: entry.isDirectory,
|
|
1247
|
+
isSymlink: entry.isSymlink,
|
|
1248
|
+
open: () => this.reader.open(entry),
|
|
1249
|
+
raw: entry
|
|
1250
|
+
};
|
|
1251
|
+
if (entry.mtime)
|
|
1252
|
+
archiveEntry.mtime = entry.mtime;
|
|
1253
|
+
if (entry.mode !== undefined)
|
|
1254
|
+
archiveEntry.mode = entry.mode;
|
|
1255
|
+
if (entry.uid !== undefined)
|
|
1256
|
+
archiveEntry.uid = entry.uid;
|
|
1257
|
+
if (entry.gid !== undefined)
|
|
1258
|
+
archiveEntry.gid = entry.gid;
|
|
1259
|
+
if (entry.linkName !== undefined)
|
|
1260
|
+
archiveEntry.linkName = entry.linkName;
|
|
1261
|
+
yield archiveEntry;
|
|
1262
|
+
}
|
|
1263
|
+
}
|
|
1264
|
+
async audit(options) {
|
|
1265
|
+
const profile = options?.profile ?? this.auditDefaults?.profile;
|
|
1266
|
+
const isStrict = options?.isStrict ?? this.auditDefaults?.isStrict;
|
|
1267
|
+
const limits = options?.limits ?? this.auditDefaults?.limits;
|
|
1268
|
+
const tarOptions = {
|
|
1269
|
+
...(profile !== undefined ? { profile } : {}),
|
|
1270
|
+
...(isStrict !== undefined ? { isStrict } : {}),
|
|
1271
|
+
...(limits !== undefined ? { limits } : {}),
|
|
1272
|
+
...(options?.signal ? { signal: options.signal } : {})
|
|
1273
|
+
};
|
|
1274
|
+
const report = await this.reader.audit(tarOptions);
|
|
1275
|
+
appendResourcePreflightIssue(report, this.preflight);
|
|
1276
|
+
return appendResourceLimitIssue(report, this.preflight, limits, profile);
|
|
1277
|
+
}
|
|
1278
|
+
async assertSafe(options) {
|
|
1279
|
+
const report = await this.audit(options);
|
|
1280
|
+
if (!report.ok) {
|
|
1281
|
+
throw new ArchiveError('ARCHIVE_AUDIT_FAILED', 'TAR audit failed');
|
|
1282
|
+
}
|
|
1283
|
+
}
|
|
1284
|
+
async normalizeToWritable(writable, options) {
|
|
1285
|
+
const limits = options?.limits ?? this.auditDefaults?.limits;
|
|
1286
|
+
enforceResourceLimits(this.preflight, limits, this.auditDefaults?.profile);
|
|
1287
|
+
const tarOptions = {
|
|
1288
|
+
...(options?.isDeterministic !== undefined ? { isDeterministic: options.isDeterministic } : {}),
|
|
1289
|
+
...(options?.signal ? { signal: options.signal } : {}),
|
|
1290
|
+
...(options?.limits !== undefined ? { limits: options.limits } : {})
|
|
1291
|
+
};
|
|
1292
|
+
return this.reader.normalizeToWritable(writable, tarOptions);
|
|
1293
|
+
}
|
|
1294
|
+
}
|
|
1295
|
+
class GzipArchiveReader {
|
|
1296
|
+
data;
|
|
1297
|
+
format = 'gz';
|
|
1298
|
+
detection;
|
|
1299
|
+
entry;
|
|
1300
|
+
constructor(data, header, name) {
|
|
1301
|
+
this.data = data;
|
|
1302
|
+
const entry = {
|
|
1303
|
+
format: 'gz',
|
|
1304
|
+
name,
|
|
1305
|
+
size: BigInt(data.length),
|
|
1306
|
+
isDirectory: false,
|
|
1307
|
+
isSymlink: false,
|
|
1308
|
+
open: async () => readableFromBytes(this.data)
|
|
1309
|
+
};
|
|
1310
|
+
if (header.mtime)
|
|
1311
|
+
entry.mtime = header.mtime;
|
|
1312
|
+
this.entry = entry;
|
|
1313
|
+
}
|
|
1314
|
+
async *entries() {
|
|
1315
|
+
yield this.entry;
|
|
1316
|
+
}
|
|
1317
|
+
async audit(options) {
|
|
1318
|
+
const issues = [];
|
|
1319
|
+
const summary = {
|
|
1320
|
+
entries: 1,
|
|
1321
|
+
warnings: 0,
|
|
1322
|
+
errors: 0
|
|
1323
|
+
};
|
|
1324
|
+
const totalBytes = this.entry.size > BigInt(Number.MAX_SAFE_INTEGER) ? undefined : Number(this.entry.size);
|
|
1325
|
+
if (totalBytes !== undefined)
|
|
1326
|
+
summary.totalBytes = totalBytes;
|
|
1327
|
+
const maxTotal = options?.limits?.maxTotalDecompressedBytes ?? options?.limits?.maxTotalUncompressedBytes;
|
|
1328
|
+
if (maxTotal !== undefined && this.entry.size > BigInt(maxTotal)) {
|
|
1329
|
+
issues.push({
|
|
1330
|
+
code: 'GZIP_LIMIT_EXCEEDED',
|
|
1331
|
+
severity: 'error',
|
|
1332
|
+
message: 'Uncompressed size exceeds limit',
|
|
1333
|
+
entryName: this.entry.name
|
|
1334
|
+
});
|
|
1335
|
+
summary.errors += 1;
|
|
1336
|
+
}
|
|
1337
|
+
const pathIssues = entryPathIssues(this.entry.name);
|
|
1338
|
+
for (const issue of pathIssues) {
|
|
1339
|
+
issues.push(issue);
|
|
1340
|
+
if (issue.severity === 'warning')
|
|
1341
|
+
summary.warnings += 1;
|
|
1342
|
+
if (issue.severity === 'error')
|
|
1343
|
+
summary.errors += 1;
|
|
1344
|
+
}
|
|
1345
|
+
return {
|
|
1346
|
+
schemaVersion: BYTEFOLD_REPORT_SCHEMA_VERSION,
|
|
1347
|
+
ok: summary.errors === 0,
|
|
1348
|
+
summary,
|
|
1349
|
+
issues,
|
|
1350
|
+
toJSON: () => ({ schemaVersion: BYTEFOLD_REPORT_SCHEMA_VERSION, ok: summary.errors === 0, summary, issues })
|
|
1351
|
+
};
|
|
1352
|
+
}
|
|
1353
|
+
async assertSafe(options) {
|
|
1354
|
+
const report = await this.audit(options);
|
|
1355
|
+
if (!report.ok) {
|
|
1356
|
+
throw new ArchiveError('ARCHIVE_AUDIT_FAILED', 'GZIP audit failed');
|
|
1357
|
+
}
|
|
1358
|
+
}
|
|
1359
|
+
async normalizeToWritable() {
|
|
1360
|
+
throw new ArchiveError('ARCHIVE_UNSUPPORTED_FEATURE', 'Normalization is not supported for single-file compressed formats');
|
|
1361
|
+
}
|
|
1362
|
+
}
|
|
1363
|
+
class CompressedArchiveReader {
|
|
1364
|
+
data;
|
|
1365
|
+
format;
|
|
1366
|
+
detection;
|
|
1367
|
+
entry;
|
|
1368
|
+
algorithm;
|
|
1369
|
+
preflight;
|
|
1370
|
+
constructor(data, algorithm, name = 'data', preflight) {
|
|
1371
|
+
this.data = data;
|
|
1372
|
+
this.algorithm = algorithm;
|
|
1373
|
+
this.format = algorithm === 'zstd' ? 'zst' : algorithm === 'brotli' ? 'br' : algorithm === 'xz' ? 'xz' : 'bz2';
|
|
1374
|
+
this.preflight = preflight;
|
|
1375
|
+
this.entry = {
|
|
1376
|
+
format: this.format,
|
|
1377
|
+
name,
|
|
1378
|
+
size: BigInt(data.length),
|
|
1379
|
+
isDirectory: false,
|
|
1380
|
+
isSymlink: false,
|
|
1381
|
+
open: async () => readableFromBytes(this.data)
|
|
1382
|
+
};
|
|
1383
|
+
}
|
|
1384
|
+
async *entries() {
|
|
1385
|
+
yield this.entry;
|
|
1386
|
+
}
|
|
1387
|
+
async audit(options) {
|
|
1388
|
+
const issues = [];
|
|
1389
|
+
const summary = {
|
|
1390
|
+
entries: 1,
|
|
1391
|
+
warnings: 0,
|
|
1392
|
+
errors: 0
|
|
1393
|
+
};
|
|
1394
|
+
const totalBytes = this.entry.size > BigInt(Number.MAX_SAFE_INTEGER) ? undefined : Number(this.entry.size);
|
|
1395
|
+
if (totalBytes !== undefined)
|
|
1396
|
+
summary.totalBytes = totalBytes;
|
|
1397
|
+
const maxTotal = options?.limits?.maxTotalDecompressedBytes ?? options?.limits?.maxTotalUncompressedBytes;
|
|
1398
|
+
if (maxTotal !== undefined && this.entry.size > BigInt(maxTotal)) {
|
|
1399
|
+
const code = this.algorithm === 'zstd'
|
|
1400
|
+
? 'ZSTD_LIMIT_EXCEEDED'
|
|
1401
|
+
: this.algorithm === 'brotli'
|
|
1402
|
+
? 'BROTLI_LIMIT_EXCEEDED'
|
|
1403
|
+
: this.algorithm === 'xz'
|
|
1404
|
+
? 'XZ_LIMIT_EXCEEDED'
|
|
1405
|
+
: 'BZIP2_LIMIT_EXCEEDED';
|
|
1406
|
+
issues.push({
|
|
1407
|
+
code,
|
|
1408
|
+
severity: 'error',
|
|
1409
|
+
message: 'Uncompressed size exceeds limit',
|
|
1410
|
+
entryName: this.entry.name
|
|
1411
|
+
});
|
|
1412
|
+
summary.errors += 1;
|
|
1413
|
+
}
|
|
1414
|
+
const pathIssues = entryPathIssues(this.entry.name);
|
|
1415
|
+
for (const issue of pathIssues) {
|
|
1416
|
+
issues.push(issue);
|
|
1417
|
+
if (issue.severity === 'warning')
|
|
1418
|
+
summary.warnings += 1;
|
|
1419
|
+
if (issue.severity === 'error')
|
|
1420
|
+
summary.errors += 1;
|
|
1421
|
+
}
|
|
1422
|
+
const report = {
|
|
1423
|
+
schemaVersion: BYTEFOLD_REPORT_SCHEMA_VERSION,
|
|
1424
|
+
ok: summary.errors === 0,
|
|
1425
|
+
summary,
|
|
1426
|
+
issues,
|
|
1427
|
+
toJSON: () => ({ schemaVersion: BYTEFOLD_REPORT_SCHEMA_VERSION, ok: summary.errors === 0, summary, issues })
|
|
1428
|
+
};
|
|
1429
|
+
appendResourcePreflightIssue(report, this.preflight);
|
|
1430
|
+
return appendResourceLimitIssue(report, this.preflight, options?.limits, options?.profile);
|
|
1431
|
+
}
|
|
1432
|
+
async assertSafe(options) {
|
|
1433
|
+
const report = await this.audit(options);
|
|
1434
|
+
if (!report.ok) {
|
|
1435
|
+
throw new ArchiveError('ARCHIVE_AUDIT_FAILED', 'Compressed audit failed');
|
|
1436
|
+
}
|
|
1437
|
+
}
|
|
1438
|
+
async normalizeToWritable() {
|
|
1439
|
+
throw new ArchiveError('ARCHIVE_UNSUPPORTED_FEATURE', 'Normalization is not supported for single-file compressed formats');
|
|
1440
|
+
}
|
|
1441
|
+
}
|
|
1442
|
+
function sanitizeDetails(value) {
|
|
1443
|
+
if (typeof value === 'bigint')
|
|
1444
|
+
return value.toString();
|
|
1445
|
+
if (Array.isArray(value))
|
|
1446
|
+
return value.map(sanitizeDetails);
|
|
1447
|
+
if (value && typeof value === 'object') {
|
|
1448
|
+
const out = {};
|
|
1449
|
+
for (const [key, val] of Object.entries(value)) {
|
|
1450
|
+
out[key] = sanitizeDetails(val);
|
|
1451
|
+
}
|
|
1452
|
+
return out;
|
|
1453
|
+
}
|
|
1454
|
+
return value;
|
|
1455
|
+
}
|
|
1456
|
+
function entryPathIssues(entryName) {
|
|
1457
|
+
const issues = [];
|
|
1458
|
+
if (entryName.includes('\u0000')) {
|
|
1459
|
+
issues.push({
|
|
1460
|
+
code: 'ARCHIVE_PATH_TRAVERSAL',
|
|
1461
|
+
severity: 'error',
|
|
1462
|
+
message: 'Entry name contains NUL byte',
|
|
1463
|
+
entryName
|
|
1464
|
+
});
|
|
1465
|
+
return issues;
|
|
1466
|
+
}
|
|
1467
|
+
const normalized = entryName.replace(/\\/g, '/');
|
|
1468
|
+
if (normalized.startsWith('/') || /^[a-zA-Z]:/.test(normalized)) {
|
|
1469
|
+
issues.push({
|
|
1470
|
+
code: 'ARCHIVE_PATH_TRAVERSAL',
|
|
1471
|
+
severity: 'error',
|
|
1472
|
+
message: 'Absolute paths are not allowed',
|
|
1473
|
+
entryName
|
|
1474
|
+
});
|
|
1475
|
+
}
|
|
1476
|
+
const parts = normalized.split('/').filter((part) => part.length > 0);
|
|
1477
|
+
if (parts.some((part) => part === '..')) {
|
|
1478
|
+
issues.push({
|
|
1479
|
+
code: 'ARCHIVE_PATH_TRAVERSAL',
|
|
1480
|
+
severity: 'error',
|
|
1481
|
+
message: 'Path traversal detected',
|
|
1482
|
+
entryName
|
|
1483
|
+
});
|
|
1484
|
+
}
|
|
1485
|
+
return issues;
|
|
1486
|
+
}
|
|
1487
|
+
function isReadableStream(value) {
|
|
1488
|
+
return !!value && typeof value.getReader === 'function';
|
|
1489
|
+
}
|
|
1490
|
+
//# sourceMappingURL=index.js.map
|