node-pkware 3.0.0 → 3.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/Explode.js +1 -1
- package/dist/Explode.js.map +1 -1
- package/dist/Implode.d.ts +0 -1
- package/dist/Implode.js +1 -1
- package/dist/Implode.js.map +1 -1
- package/dist/tsconfig.tsbuildinfo +1 -1
- package/package.json +1 -1
- package/src/ExpandingBuffer.ts +0 -148
- package/src/Explode.ts +0 -404
- package/src/Implode.ts +0 -368
- package/src/bin/explode.ts +0 -80
- package/src/bin/helpers.ts +0 -65
- package/src/bin/implode.ts +0 -116
- package/src/constants.ts +0 -120
- package/src/errors.ts +0 -47
- package/src/functions.ts +0 -73
- package/src/index.ts +0 -30
- package/src/stream.ts +0 -220
- package/src/types.ts +0 -26
package/src/Explode.ts
DELETED
|
@@ -1,404 +0,0 @@
|
|
|
1
|
-
import { Buffer } from 'node:buffer'
|
|
2
|
-
import { Transform, TransformCallback } from 'node:stream'
|
|
3
|
-
import {
|
|
4
|
-
ChBitsAsc,
|
|
5
|
-
ChCodeAsc,
|
|
6
|
-
Compression,
|
|
7
|
-
DictionarySize,
|
|
8
|
-
DistBits,
|
|
9
|
-
DistCode,
|
|
10
|
-
ExLenBits,
|
|
11
|
-
LenBase,
|
|
12
|
-
LenBits,
|
|
13
|
-
LenCode,
|
|
14
|
-
LITERAL_END_STREAM,
|
|
15
|
-
} from './constants'
|
|
16
|
-
import { AbortedError, InvalidCompressionTypeError, InvalidDictionarySizeError } from './errors'
|
|
17
|
-
import { ExpandingBuffer } from './ExpandingBuffer'
|
|
18
|
-
import { evenAndRemainder, getLowestNBits, mergeSparseArrays, nBitsOfOnes, repeat, toHex, unfold } from './functions'
|
|
19
|
-
import { Config, Stats } from './types'
|
|
20
|
-
|
|
21
|
-
/**
|
|
22
|
-
* This function assumes there are at least 2 bytes of data in the buffer
|
|
23
|
-
*/
|
|
24
|
-
const readHeader = (buffer: Buffer) => {
|
|
25
|
-
const compressionType = buffer.readUInt8(0)
|
|
26
|
-
const dictionarySize = buffer.readUInt8(1)
|
|
27
|
-
|
|
28
|
-
if (!(compressionType in Compression) || compressionType === Compression.Unknown) {
|
|
29
|
-
throw new InvalidCompressionTypeError()
|
|
30
|
-
}
|
|
31
|
-
|
|
32
|
-
if (!(dictionarySize in DictionarySize) || dictionarySize === DictionarySize.Unknown) {
|
|
33
|
-
throw new InvalidDictionarySizeError()
|
|
34
|
-
}
|
|
35
|
-
|
|
36
|
-
return {
|
|
37
|
-
compressionType: compressionType as Compression,
|
|
38
|
-
dictionarySize: dictionarySize as DictionarySize,
|
|
39
|
-
}
|
|
40
|
-
}
|
|
41
|
-
|
|
42
|
-
const generateDecodeTables = (startIndexes: number[], lengthBits: number[]) => {
|
|
43
|
-
const codes = repeat(0, 0x100)
|
|
44
|
-
|
|
45
|
-
lengthBits.forEach((lengthBit, i) => {
|
|
46
|
-
for (let index = startIndexes[i]; index < 0x100; index += 1 << lengthBit) {
|
|
47
|
-
codes[index] = i
|
|
48
|
-
}
|
|
49
|
-
})
|
|
50
|
-
|
|
51
|
-
return codes
|
|
52
|
-
}
|
|
53
|
-
|
|
54
|
-
/**
|
|
55
|
-
* PAT = populate ascii table
|
|
56
|
-
*/
|
|
57
|
-
const createPATIterator = (limit: number, stepper: number) => {
|
|
58
|
-
return (n: number) => {
|
|
59
|
-
return n >= limit ? false : ([n, n + (1 << stepper)] as [number, number])
|
|
60
|
-
}
|
|
61
|
-
}
|
|
62
|
-
|
|
63
|
-
const populateAsciiTable = (value: number, index: number, bits: number, limit: number) => {
|
|
64
|
-
const iterator = createPATIterator(limit, value - bits)
|
|
65
|
-
const seed = ChCodeAsc[index] >> bits
|
|
66
|
-
const idxs = unfold(iterator, seed)
|
|
67
|
-
|
|
68
|
-
const table: number[] = []
|
|
69
|
-
idxs.forEach((idx) => {
|
|
70
|
-
table[idx] = index
|
|
71
|
-
})
|
|
72
|
-
return table
|
|
73
|
-
}
|
|
74
|
-
|
|
75
|
-
export class Explode {
|
|
76
|
-
#verbose: boolean
|
|
77
|
-
#needMoreInput: boolean = true
|
|
78
|
-
#isFirstChunk: boolean = true
|
|
79
|
-
#extraBits: number = 0
|
|
80
|
-
#bitBuffer: number = 0
|
|
81
|
-
#backupData: { extraBits: number; bitBuffer: number } = {
|
|
82
|
-
extraBits: -1,
|
|
83
|
-
bitBuffer: -1,
|
|
84
|
-
}
|
|
85
|
-
#lengthCodes: number[] = generateDecodeTables(LenCode, LenBits)
|
|
86
|
-
#distPosCodes: number[] = generateDecodeTables(DistCode, DistBits)
|
|
87
|
-
#inputBuffer: ExpandingBuffer
|
|
88
|
-
#outputBuffer: ExpandingBuffer
|
|
89
|
-
#stats: Stats = { chunkCounter: 0 }
|
|
90
|
-
#compressionType: Compression = Compression.Unknown
|
|
91
|
-
#dictionarySize: DictionarySize = DictionarySize.Unknown
|
|
92
|
-
#dictionarySizeMask: number = 0
|
|
93
|
-
#chBitsAsc: number[] = repeat(0, 0x100)
|
|
94
|
-
#asciiTable2C34: number[] = repeat(0, 0x100)
|
|
95
|
-
#asciiTable2D34: number[] = repeat(0, 0x100)
|
|
96
|
-
#asciiTable2E34: number[] = repeat(0, 0x80)
|
|
97
|
-
#asciiTable2EB4: number[] = repeat(0, 0x100)
|
|
98
|
-
|
|
99
|
-
constructor(config: Config = {}) {
|
|
100
|
-
this.#verbose = config?.verbose ?? false
|
|
101
|
-
this.#inputBuffer = new ExpandingBuffer(config?.inputBufferSize ?? 0)
|
|
102
|
-
this.#outputBuffer = new ExpandingBuffer(config?.outputBufferSize ?? 0)
|
|
103
|
-
}
|
|
104
|
-
|
|
105
|
-
getHandler() {
|
|
106
|
-
const instance = this
|
|
107
|
-
|
|
108
|
-
return function (this: Transform, chunk: Buffer, encoding: BufferEncoding, callback: TransformCallback) {
|
|
109
|
-
instance.#needMoreInput = true
|
|
110
|
-
|
|
111
|
-
try {
|
|
112
|
-
instance.#inputBuffer.append(chunk)
|
|
113
|
-
|
|
114
|
-
if (instance.#isFirstChunk) {
|
|
115
|
-
instance.#isFirstChunk = false
|
|
116
|
-
this._flush = instance.#onInputFinished.bind(instance)
|
|
117
|
-
}
|
|
118
|
-
|
|
119
|
-
if (instance.#verbose) {
|
|
120
|
-
instance.#stats.chunkCounter++
|
|
121
|
-
console.log(`explode: reading ${toHex(chunk.length)} bytes from chunk #${instance.#stats.chunkCounter}`)
|
|
122
|
-
}
|
|
123
|
-
|
|
124
|
-
instance.#processChunkData()
|
|
125
|
-
|
|
126
|
-
const blockSize = 0x1000
|
|
127
|
-
|
|
128
|
-
if (instance.#outputBuffer.size() <= blockSize) {
|
|
129
|
-
callback(null, Buffer.from([]))
|
|
130
|
-
return
|
|
131
|
-
}
|
|
132
|
-
|
|
133
|
-
let [numberOfBlocks] = evenAndRemainder(instance.#outputBuffer.size(), blockSize)
|
|
134
|
-
|
|
135
|
-
// making sure to leave one block worth of data for lookback when processing chunk data
|
|
136
|
-
numberOfBlocks--
|
|
137
|
-
|
|
138
|
-
const numberOfBytes = numberOfBlocks * blockSize
|
|
139
|
-
// make sure to create a copy of the output buffer slice as it will get flushed in the next line
|
|
140
|
-
const output = Buffer.from(instance.#outputBuffer.read(0, numberOfBytes))
|
|
141
|
-
instance.#outputBuffer.flushStart(numberOfBytes)
|
|
142
|
-
|
|
143
|
-
callback(null, output)
|
|
144
|
-
} catch (e: unknown) {
|
|
145
|
-
callback(e as Error)
|
|
146
|
-
}
|
|
147
|
-
}
|
|
148
|
-
}
|
|
149
|
-
|
|
150
|
-
#generateAsciiTables() {
|
|
151
|
-
this.#chBitsAsc = ChBitsAsc.map((value, index) => {
|
|
152
|
-
if (value <= 8) {
|
|
153
|
-
this.#asciiTable2C34 = mergeSparseArrays(
|
|
154
|
-
populateAsciiTable(value, index, 0, 0x100),
|
|
155
|
-
this.#asciiTable2C34,
|
|
156
|
-
) as number[]
|
|
157
|
-
return value - 0
|
|
158
|
-
}
|
|
159
|
-
|
|
160
|
-
const acc = getLowestNBits(8, ChCodeAsc[index])
|
|
161
|
-
if (acc === 0) {
|
|
162
|
-
this.#asciiTable2EB4 = mergeSparseArrays(
|
|
163
|
-
populateAsciiTable(value, index, 8, 0x100),
|
|
164
|
-
this.#asciiTable2EB4,
|
|
165
|
-
) as number[]
|
|
166
|
-
return value - 8
|
|
167
|
-
}
|
|
168
|
-
|
|
169
|
-
this.#asciiTable2C34[acc] = 0xff
|
|
170
|
-
|
|
171
|
-
if (getLowestNBits(6, acc) === 0) {
|
|
172
|
-
this.#asciiTable2E34 = mergeSparseArrays(
|
|
173
|
-
populateAsciiTable(value, index, 6, 0x80),
|
|
174
|
-
this.#asciiTable2E34,
|
|
175
|
-
) as number[]
|
|
176
|
-
return value - 6
|
|
177
|
-
}
|
|
178
|
-
|
|
179
|
-
this.#asciiTable2D34 = mergeSparseArrays(
|
|
180
|
-
populateAsciiTable(value, index, 4, 0x100),
|
|
181
|
-
this.#asciiTable2D34,
|
|
182
|
-
) as number[]
|
|
183
|
-
|
|
184
|
-
return value - 4
|
|
185
|
-
})
|
|
186
|
-
}
|
|
187
|
-
|
|
188
|
-
#onInputFinished(callback: TransformCallback) {
|
|
189
|
-
if (this.#verbose) {
|
|
190
|
-
console.log('---------------')
|
|
191
|
-
console.log('explode: total number of chunks read:', this.#stats.chunkCounter)
|
|
192
|
-
console.log('explode: inputBuffer heap size', toHex(this.#inputBuffer.heapSize()))
|
|
193
|
-
console.log('explode: outputBuffer heap size', toHex(this.#outputBuffer.heapSize()))
|
|
194
|
-
}
|
|
195
|
-
|
|
196
|
-
if (this.#needMoreInput) {
|
|
197
|
-
callback(new AbortedError())
|
|
198
|
-
return
|
|
199
|
-
}
|
|
200
|
-
|
|
201
|
-
callback(null, this.#outputBuffer.read())
|
|
202
|
-
}
|
|
203
|
-
|
|
204
|
-
/**
|
|
205
|
-
* @throws {@link AbortedError} when there isn't enough data to be wasted
|
|
206
|
-
*/
|
|
207
|
-
#wasteBits(numberOfBits: number) {
|
|
208
|
-
if (numberOfBits > this.#extraBits && this.#inputBuffer.isEmpty()) {
|
|
209
|
-
throw new AbortedError()
|
|
210
|
-
}
|
|
211
|
-
|
|
212
|
-
if (numberOfBits <= this.#extraBits) {
|
|
213
|
-
this.#bitBuffer = this.#bitBuffer >> numberOfBits
|
|
214
|
-
this.#extraBits = this.#extraBits - numberOfBits
|
|
215
|
-
return
|
|
216
|
-
}
|
|
217
|
-
|
|
218
|
-
const nextByte = this.#inputBuffer.readByte(0)
|
|
219
|
-
this.#inputBuffer.dropStart(1)
|
|
220
|
-
|
|
221
|
-
this.#bitBuffer = ((this.#bitBuffer >> this.#extraBits) | (nextByte << 8)) >> (numberOfBits - this.#extraBits)
|
|
222
|
-
this.#extraBits = this.#extraBits + 8 - numberOfBits
|
|
223
|
-
}
|
|
224
|
-
|
|
225
|
-
/**
|
|
226
|
-
* @throws {@link AbortedError}
|
|
227
|
-
*/
|
|
228
|
-
#decodeNextLiteral() {
|
|
229
|
-
const lastBit = getLowestNBits(1, this.#bitBuffer)
|
|
230
|
-
|
|
231
|
-
this.#wasteBits(1)
|
|
232
|
-
|
|
233
|
-
if (lastBit) {
|
|
234
|
-
let lengthCode = this.#lengthCodes[getLowestNBits(8, this.#bitBuffer)]
|
|
235
|
-
|
|
236
|
-
this.#wasteBits(LenBits[lengthCode])
|
|
237
|
-
|
|
238
|
-
const extraLenghtBits = ExLenBits[lengthCode]
|
|
239
|
-
if (extraLenghtBits !== 0) {
|
|
240
|
-
const extraLength = getLowestNBits(extraLenghtBits, this.#bitBuffer)
|
|
241
|
-
|
|
242
|
-
try {
|
|
243
|
-
this.#wasteBits(extraLenghtBits)
|
|
244
|
-
} catch (e) {
|
|
245
|
-
if (lengthCode + extraLength !== 0x10e) {
|
|
246
|
-
throw new AbortedError()
|
|
247
|
-
}
|
|
248
|
-
}
|
|
249
|
-
|
|
250
|
-
lengthCode = LenBase[lengthCode] + extraLength
|
|
251
|
-
}
|
|
252
|
-
|
|
253
|
-
return lengthCode + 0x100
|
|
254
|
-
}
|
|
255
|
-
|
|
256
|
-
const lastByte = getLowestNBits(8, this.#bitBuffer)
|
|
257
|
-
|
|
258
|
-
if (this.#compressionType === Compression.Binary) {
|
|
259
|
-
this.#wasteBits(8)
|
|
260
|
-
return lastByte
|
|
261
|
-
}
|
|
262
|
-
|
|
263
|
-
let value: number
|
|
264
|
-
|
|
265
|
-
if (lastByte > 0) {
|
|
266
|
-
value = this.#asciiTable2C34[lastByte]
|
|
267
|
-
|
|
268
|
-
if (value === 0xff) {
|
|
269
|
-
if (getLowestNBits(6, this.#bitBuffer)) {
|
|
270
|
-
this.#wasteBits(4)
|
|
271
|
-
|
|
272
|
-
value = this.#asciiTable2D34[getLowestNBits(8, this.#bitBuffer)]
|
|
273
|
-
} else {
|
|
274
|
-
this.#wasteBits(6)
|
|
275
|
-
|
|
276
|
-
value = this.#asciiTable2E34[getLowestNBits(7, this.#bitBuffer)]
|
|
277
|
-
}
|
|
278
|
-
}
|
|
279
|
-
} else {
|
|
280
|
-
this.#wasteBits(8)
|
|
281
|
-
|
|
282
|
-
value = this.#asciiTable2EB4[getLowestNBits(8, this.#bitBuffer)]
|
|
283
|
-
}
|
|
284
|
-
|
|
285
|
-
this.#wasteBits(this.#chBitsAsc[value])
|
|
286
|
-
|
|
287
|
-
return value
|
|
288
|
-
}
|
|
289
|
-
|
|
290
|
-
/**
|
|
291
|
-
* @throws {@link AbortedError}
|
|
292
|
-
*/
|
|
293
|
-
#decodeDistance(repeatLength: number) {
|
|
294
|
-
const distPosCode = this.#distPosCodes[getLowestNBits(8, this.#bitBuffer)]
|
|
295
|
-
const distPosBits = DistBits[distPosCode]
|
|
296
|
-
|
|
297
|
-
this.#wasteBits(distPosBits)
|
|
298
|
-
|
|
299
|
-
let distance: number
|
|
300
|
-
let bitsToWaste: number
|
|
301
|
-
|
|
302
|
-
if (repeatLength === 2) {
|
|
303
|
-
distance = (distPosCode << 2) | getLowestNBits(2, this.#bitBuffer)
|
|
304
|
-
bitsToWaste = 2
|
|
305
|
-
} else {
|
|
306
|
-
distance = (distPosCode << this.#dictionarySize) | (this.#bitBuffer & this.#dictionarySizeMask)
|
|
307
|
-
bitsToWaste = this.#dictionarySize
|
|
308
|
-
}
|
|
309
|
-
|
|
310
|
-
this.#wasteBits(bitsToWaste)
|
|
311
|
-
|
|
312
|
-
return distance + 1
|
|
313
|
-
}
|
|
314
|
-
|
|
315
|
-
#processChunkData() {
|
|
316
|
-
if (this.#inputBuffer.isEmpty()) {
|
|
317
|
-
return
|
|
318
|
-
}
|
|
319
|
-
|
|
320
|
-
if (this.#compressionType === Compression.Unknown) {
|
|
321
|
-
const headerParsedSuccessfully = this.#parseInitialData()
|
|
322
|
-
if (!headerParsedSuccessfully || this.#inputBuffer.isEmpty()) {
|
|
323
|
-
return
|
|
324
|
-
}
|
|
325
|
-
}
|
|
326
|
-
|
|
327
|
-
this.#needMoreInput = false
|
|
328
|
-
|
|
329
|
-
this.#backup()
|
|
330
|
-
|
|
331
|
-
try {
|
|
332
|
-
let nextLiteral = this.#decodeNextLiteral()
|
|
333
|
-
|
|
334
|
-
while (nextLiteral !== LITERAL_END_STREAM) {
|
|
335
|
-
let addition: Buffer
|
|
336
|
-
|
|
337
|
-
if (nextLiteral >= 0x100) {
|
|
338
|
-
const repeatLength = nextLiteral - 0xfe
|
|
339
|
-
|
|
340
|
-
const minusDistance = this.#decodeDistance(repeatLength)
|
|
341
|
-
const availableData = this.#outputBuffer.read(this.#outputBuffer.size() - minusDistance, repeatLength)
|
|
342
|
-
|
|
343
|
-
if (repeatLength > minusDistance) {
|
|
344
|
-
const multipliedData = repeat(availableData, Math.ceil(repeatLength / availableData.length))
|
|
345
|
-
addition = Buffer.concat(multipliedData).subarray(0, repeatLength)
|
|
346
|
-
} else {
|
|
347
|
-
addition = availableData
|
|
348
|
-
}
|
|
349
|
-
} else {
|
|
350
|
-
addition = Buffer.from([nextLiteral])
|
|
351
|
-
}
|
|
352
|
-
|
|
353
|
-
this.#outputBuffer.append(addition)
|
|
354
|
-
|
|
355
|
-
this.#backup()
|
|
356
|
-
|
|
357
|
-
nextLiteral = this.#decodeNextLiteral()
|
|
358
|
-
}
|
|
359
|
-
} catch (e) {
|
|
360
|
-
this.#needMoreInput = true
|
|
361
|
-
}
|
|
362
|
-
|
|
363
|
-
if (this.#needMoreInput) {
|
|
364
|
-
this.#restore()
|
|
365
|
-
}
|
|
366
|
-
}
|
|
367
|
-
|
|
368
|
-
#parseInitialData() {
|
|
369
|
-
if (this.#inputBuffer.size() < 4) {
|
|
370
|
-
return false
|
|
371
|
-
}
|
|
372
|
-
|
|
373
|
-
const { compressionType, dictionarySize } = readHeader(this.#inputBuffer.read(0, 2))
|
|
374
|
-
|
|
375
|
-
this.#compressionType = compressionType
|
|
376
|
-
this.#dictionarySize = dictionarySize
|
|
377
|
-
this.#bitBuffer = this.#inputBuffer.readByte(2)
|
|
378
|
-
this.#inputBuffer.dropStart(3)
|
|
379
|
-
this.#dictionarySizeMask = nBitsOfOnes(dictionarySize)
|
|
380
|
-
|
|
381
|
-
if (this.#compressionType === Compression.Ascii) {
|
|
382
|
-
this.#generateAsciiTables()
|
|
383
|
-
}
|
|
384
|
-
|
|
385
|
-
if (this.#verbose) {
|
|
386
|
-
console.log(`explode: compression type: ${Compression[this.#compressionType]}`)
|
|
387
|
-
console.log(`explode: compression level: ${DictionarySize[this.#dictionarySize]}`)
|
|
388
|
-
}
|
|
389
|
-
|
|
390
|
-
return true
|
|
391
|
-
}
|
|
392
|
-
|
|
393
|
-
#backup() {
|
|
394
|
-
this.#backupData.extraBits = this.#extraBits
|
|
395
|
-
this.#backupData.bitBuffer = this.#bitBuffer
|
|
396
|
-
this.#inputBuffer.saveIndices()
|
|
397
|
-
}
|
|
398
|
-
|
|
399
|
-
#restore() {
|
|
400
|
-
this.#extraBits = this.#backupData.extraBits
|
|
401
|
-
this.#bitBuffer = this.#backupData.bitBuffer
|
|
402
|
-
this.#inputBuffer.restoreIndices()
|
|
403
|
-
}
|
|
404
|
-
}
|