node-pkware 2.0.0 → 3.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +42 -40
- package/dist/ExpandingBuffer.d.ts +53 -0
- package/dist/ExpandingBuffer.js +134 -0
- package/dist/ExpandingBuffer.js.map +1 -0
- package/dist/Explode.d.ts +8 -0
- package/dist/Explode.js +309 -0
- package/dist/Explode.js.map +1 -0
- package/dist/Implode.d.ts +11 -0
- package/dist/Implode.js +305 -0
- package/dist/Implode.js.map +1 -0
- package/dist/bin/explode.d.ts +2 -0
- package/dist/bin/explode.js +59 -0
- package/dist/bin/explode.js.map +1 -0
- package/dist/bin/helpers.d.ts +8 -0
- package/dist/bin/helpers.js +65 -0
- package/dist/bin/helpers.js.map +1 -0
- package/dist/bin/implode.d.ts +2 -0
- package/dist/bin/implode.js +79 -0
- package/dist/bin/implode.js.map +1 -0
- package/dist/constants.d.ts +32 -0
- package/dist/constants.js +114 -0
- package/dist/constants.js.map +1 -0
- package/{types → dist}/errors.d.ts +13 -11
- package/dist/errors.js +52 -0
- package/dist/errors.js.map +1 -0
- package/dist/functions.d.ts +11 -0
- package/dist/functions.js +73 -0
- package/dist/functions.js.map +1 -0
- package/dist/index.d.ts +20 -0
- package/dist/index.js +54 -0
- package/dist/index.js.map +1 -0
- package/{types/helpers → dist}/stream.d.ts +13 -34
- package/dist/stream.js +205 -0
- package/dist/stream.js.map +1 -0
- package/dist/tsconfig.tsbuildinfo +1 -0
- package/dist/types.d.ts +25 -0
- package/dist/types.js +3 -0
- package/dist/types.js.map +1 -0
- package/package.json +14 -45
- package/src/ExpandingBuffer.ts +148 -0
- package/src/Explode.ts +404 -0
- package/src/Implode.ts +368 -0
- package/{bin/explode.js → src/bin/explode.ts} +35 -33
- package/src/bin/helpers.ts +65 -0
- package/src/bin/implode.ts +116 -0
- package/src/{constants.js → constants.ts} +31 -50
- package/src/errors.ts +47 -0
- package/src/functions.ts +73 -0
- package/src/index.ts +30 -0
- package/src/stream.ts +220 -0
- package/src/types.ts +26 -0
- package/bin/implode.js +0 -116
- package/src/errors.js +0 -50
- package/src/explode.js +0 -411
- package/src/helpers/ExpandingBuffer.js +0 -123
- package/src/helpers/functions.js +0 -150
- package/src/helpers/stream.js +0 -190
- package/src/helpers/testing.js +0 -80
- package/src/implode.js +0 -364
- package/src/index.js +0 -18
- package/tsconfig.json +0 -20
- package/types/constants.d.ts +0 -41
- package/types/explode.d.ts +0 -56
- package/types/helpers/ExpandingBuffer.d.ts +0 -25
- package/types/helpers/Shared.d.ts +0 -46
- package/types/helpers/functions.d.ts +0 -15
- package/types/helpers/testing.d.ts +0 -6
- package/types/implode.d.ts +0 -63
- package/types/index.d.ts +0 -8
package/src/explode.js
DELETED
|
@@ -1,411 +0,0 @@
|
|
|
1
|
-
const { repeat, unfold, has } = require('ramda')
|
|
2
|
-
const {
|
|
3
|
-
InvalidDataError,
|
|
4
|
-
InvalidCompressionTypeError,
|
|
5
|
-
InvalidDictionarySizeError,
|
|
6
|
-
ExpectedBufferError,
|
|
7
|
-
ExpectedFunctionError,
|
|
8
|
-
AbortedError,
|
|
9
|
-
} = require('./errors.js')
|
|
10
|
-
const { mergeSparseArrays, getLowestNBits, nBitsOfOnes, toHex, isFunction } = require('./helpers/functions.js')
|
|
11
|
-
const {
|
|
12
|
-
ChBitsAsc,
|
|
13
|
-
ChCodeAsc,
|
|
14
|
-
COMPRESSION_BINARY,
|
|
15
|
-
COMPRESSION_ASCII,
|
|
16
|
-
DICTIONARY_SIZE_SMALL,
|
|
17
|
-
DICTIONARY_SIZE_MEDIUM,
|
|
18
|
-
DICTIONARY_SIZE_LARGE,
|
|
19
|
-
PKDCL_OK,
|
|
20
|
-
PKDCL_STREAM_END,
|
|
21
|
-
LITERAL_STREAM_ABORTED,
|
|
22
|
-
LITERAL_END_STREAM,
|
|
23
|
-
LenBits,
|
|
24
|
-
LenBase,
|
|
25
|
-
ExLenBits,
|
|
26
|
-
DistBits,
|
|
27
|
-
LenCode,
|
|
28
|
-
DistCode,
|
|
29
|
-
} = require('./constants.js')
|
|
30
|
-
const ExpandingBuffer = require('./helpers/ExpandingBuffer.js')
|
|
31
|
-
|
|
32
|
-
const readHeader = (buffer) => {
|
|
33
|
-
if (!Buffer.isBuffer(buffer)) {
|
|
34
|
-
throw new ExpectedBufferError()
|
|
35
|
-
}
|
|
36
|
-
if (buffer.length < 4) {
|
|
37
|
-
throw new InvalidDataError()
|
|
38
|
-
}
|
|
39
|
-
|
|
40
|
-
const compressionType = buffer.readUInt8(0)
|
|
41
|
-
const dictionarySizeBits = buffer.readUInt8(1)
|
|
42
|
-
if (![COMPRESSION_BINARY, COMPRESSION_ASCII].includes(compressionType)) {
|
|
43
|
-
throw new InvalidCompressionTypeError()
|
|
44
|
-
}
|
|
45
|
-
if (![DICTIONARY_SIZE_SMALL, DICTIONARY_SIZE_MEDIUM, DICTIONARY_SIZE_LARGE].includes(dictionarySizeBits)) {
|
|
46
|
-
throw new InvalidDictionarySizeError()
|
|
47
|
-
}
|
|
48
|
-
|
|
49
|
-
return {
|
|
50
|
-
compressionType,
|
|
51
|
-
dictionarySizeBits,
|
|
52
|
-
}
|
|
53
|
-
}
|
|
54
|
-
|
|
55
|
-
// PAT = populate ascii table
|
|
56
|
-
const createPATIterator = (limit, stepper) => {
|
|
57
|
-
return (n) => {
|
|
58
|
-
return n >= limit ? false : [n, n + (1 << stepper)]
|
|
59
|
-
}
|
|
60
|
-
}
|
|
61
|
-
|
|
62
|
-
const populateAsciiTable = (value, index, bits, limit) => {
|
|
63
|
-
const iterator = createPATIterator(limit, value - bits)
|
|
64
|
-
const seed = ChCodeAsc[index] >> bits
|
|
65
|
-
const idxs = unfold(iterator, seed)
|
|
66
|
-
|
|
67
|
-
return idxs.reduce((acc, idx) => {
|
|
68
|
-
acc[idx] = index
|
|
69
|
-
return acc
|
|
70
|
-
}, [])
|
|
71
|
-
}
|
|
72
|
-
|
|
73
|
-
const generateAsciiTables = () => {
|
|
74
|
-
const tables = {
|
|
75
|
-
asciiTable2C34: repeat(0, 0x100),
|
|
76
|
-
asciiTable2D34: repeat(0, 0x100),
|
|
77
|
-
asciiTable2E34: repeat(0, 0x80),
|
|
78
|
-
asciiTable2EB4: repeat(0, 0x100),
|
|
79
|
-
}
|
|
80
|
-
|
|
81
|
-
tables.chBitsAsc = ChBitsAsc.map((value, index) => {
|
|
82
|
-
if (value <= 8) {
|
|
83
|
-
tables.asciiTable2C34 = mergeSparseArrays(populateAsciiTable(value, index, 0, 0x100), tables.asciiTable2C34)
|
|
84
|
-
return value - 0
|
|
85
|
-
}
|
|
86
|
-
|
|
87
|
-
const acc = getLowestNBits(8, ChCodeAsc[index])
|
|
88
|
-
if (acc === 0) {
|
|
89
|
-
tables.asciiTable2EB4 = mergeSparseArrays(populateAsciiTable(value, index, 8, 0x100), tables.asciiTable2EB4)
|
|
90
|
-
return value - 8
|
|
91
|
-
}
|
|
92
|
-
|
|
93
|
-
tables.asciiTable2C34[acc] = 0xff
|
|
94
|
-
|
|
95
|
-
if (getLowestNBits(6, ChCodeAsc[index]) === 0) {
|
|
96
|
-
tables.asciiTable2E34 = mergeSparseArrays(populateAsciiTable(value, index, 6, 0x80), tables.asciiTable2E34)
|
|
97
|
-
return value - 6
|
|
98
|
-
}
|
|
99
|
-
|
|
100
|
-
tables.asciiTable2D34 = mergeSparseArrays(populateAsciiTable(value, index, 4, 0x100), tables.asciiTable2D34)
|
|
101
|
-
return value - 4
|
|
102
|
-
})
|
|
103
|
-
|
|
104
|
-
return tables
|
|
105
|
-
}
|
|
106
|
-
|
|
107
|
-
const parseInitialData = (state, verbose = false) => {
|
|
108
|
-
if (state.inputBuffer.size() < 4) {
|
|
109
|
-
return false
|
|
110
|
-
}
|
|
111
|
-
|
|
112
|
-
const { compressionType, dictionarySizeBits } = readHeader(state.inputBuffer.read())
|
|
113
|
-
|
|
114
|
-
state.compressionType = compressionType
|
|
115
|
-
state.dictionarySizeBits = dictionarySizeBits
|
|
116
|
-
state.bitBuffer = state.inputBuffer.read(2, 1)
|
|
117
|
-
state.inputBuffer.dropStart(3)
|
|
118
|
-
state.dictionarySizeMask = nBitsOfOnes(dictionarySizeBits)
|
|
119
|
-
|
|
120
|
-
if (compressionType === COMPRESSION_ASCII) {
|
|
121
|
-
const tables = generateAsciiTables()
|
|
122
|
-
Object.entries(tables).forEach(([key, value]) => {
|
|
123
|
-
state[key] = value
|
|
124
|
-
})
|
|
125
|
-
}
|
|
126
|
-
|
|
127
|
-
if (verbose) {
|
|
128
|
-
console.log(`explode: compression type: ${state.compressionType === COMPRESSION_BINARY ? 'binary' : 'ascii'}`)
|
|
129
|
-
console.log(
|
|
130
|
-
`explode: compression level: ${
|
|
131
|
-
state.dictionarySizeBits === 4 ? 'small' : state.dictionarySizeBits === 5 ? 'medium' : 'large'
|
|
132
|
-
}`,
|
|
133
|
-
)
|
|
134
|
-
}
|
|
135
|
-
|
|
136
|
-
return true
|
|
137
|
-
}
|
|
138
|
-
|
|
139
|
-
const wasteBits = (state, numberOfBits) => {
|
|
140
|
-
if (numberOfBits > state.extraBits && state.inputBuffer.isEmpty()) {
|
|
141
|
-
return PKDCL_STREAM_END
|
|
142
|
-
}
|
|
143
|
-
|
|
144
|
-
if (numberOfBits <= state.extraBits) {
|
|
145
|
-
state.bitBuffer = state.bitBuffer >> numberOfBits
|
|
146
|
-
state.extraBits = state.extraBits - numberOfBits
|
|
147
|
-
} else {
|
|
148
|
-
const nextByte = state.inputBuffer.read(0, 1)
|
|
149
|
-
state.inputBuffer.dropStart(1)
|
|
150
|
-
|
|
151
|
-
state.bitBuffer = ((state.bitBuffer >> state.extraBits) | (nextByte << 8)) >> (numberOfBits - state.extraBits)
|
|
152
|
-
state.extraBits = state.extraBits + 8 - numberOfBits
|
|
153
|
-
}
|
|
154
|
-
|
|
155
|
-
return PKDCL_OK
|
|
156
|
-
}
|
|
157
|
-
|
|
158
|
-
const decodeNextLiteral = (state) => {
|
|
159
|
-
const lastBit = state.bitBuffer & 1
|
|
160
|
-
|
|
161
|
-
if (wasteBits(state, 1) === PKDCL_STREAM_END) {
|
|
162
|
-
return LITERAL_STREAM_ABORTED
|
|
163
|
-
}
|
|
164
|
-
|
|
165
|
-
if (lastBit) {
|
|
166
|
-
let lengthCode = state.lengthCodes[getLowestNBits(8, state.bitBuffer)]
|
|
167
|
-
|
|
168
|
-
if (wasteBits(state, LenBits[lengthCode]) === PKDCL_STREAM_END) {
|
|
169
|
-
return LITERAL_STREAM_ABORTED
|
|
170
|
-
}
|
|
171
|
-
|
|
172
|
-
const extraLenghtBits = ExLenBits[lengthCode]
|
|
173
|
-
if (extraLenghtBits !== 0) {
|
|
174
|
-
const extraLength = getLowestNBits(extraLenghtBits, state.bitBuffer)
|
|
175
|
-
|
|
176
|
-
if (wasteBits(state, extraLenghtBits) === PKDCL_STREAM_END && lengthCode + extraLength !== 0x10e) {
|
|
177
|
-
return LITERAL_STREAM_ABORTED
|
|
178
|
-
}
|
|
179
|
-
|
|
180
|
-
lengthCode = LenBase[lengthCode] + extraLength
|
|
181
|
-
}
|
|
182
|
-
|
|
183
|
-
return lengthCode + 0x100
|
|
184
|
-
} else {
|
|
185
|
-
const lastByte = getLowestNBits(8, state.bitBuffer)
|
|
186
|
-
|
|
187
|
-
if (state.compressionType === COMPRESSION_BINARY) {
|
|
188
|
-
return wasteBits(state, 8) === PKDCL_STREAM_END ? LITERAL_STREAM_ABORTED : lastByte
|
|
189
|
-
} else {
|
|
190
|
-
let value
|
|
191
|
-
if (lastByte > 0) {
|
|
192
|
-
value = state.asciiTable2C34[lastByte]
|
|
193
|
-
|
|
194
|
-
if (value === 0xff) {
|
|
195
|
-
if (getLowestNBits(6, state.bitBuffer)) {
|
|
196
|
-
if (wasteBits(state, 4) === PKDCL_STREAM_END) {
|
|
197
|
-
return LITERAL_STREAM_ABORTED
|
|
198
|
-
}
|
|
199
|
-
|
|
200
|
-
value = state.asciiTable2D34[getLowestNBits(8, state.bitBuffer)]
|
|
201
|
-
} else {
|
|
202
|
-
if (wasteBits(state, 6) === PKDCL_STREAM_END) {
|
|
203
|
-
return LITERAL_STREAM_ABORTED
|
|
204
|
-
}
|
|
205
|
-
|
|
206
|
-
value = state.asciiTable2E34[getLowestNBits(7, state.bitBuffer)]
|
|
207
|
-
}
|
|
208
|
-
}
|
|
209
|
-
} else {
|
|
210
|
-
if (wasteBits(state, 8) === PKDCL_STREAM_END) {
|
|
211
|
-
return LITERAL_STREAM_ABORTED
|
|
212
|
-
}
|
|
213
|
-
|
|
214
|
-
value = state.asciiTable2EB4[getLowestNBits(8, state.bitBuffer)]
|
|
215
|
-
}
|
|
216
|
-
|
|
217
|
-
return wasteBits(state, state.chBitsAsc[value]) === PKDCL_STREAM_END ? LITERAL_STREAM_ABORTED : value
|
|
218
|
-
}
|
|
219
|
-
}
|
|
220
|
-
}
|
|
221
|
-
|
|
222
|
-
const decodeDistance = (state, repeatLength) => {
|
|
223
|
-
const distPosCode = state.distPosCodes[getLowestNBits(8, state.bitBuffer)]
|
|
224
|
-
const distPosBits = DistBits[distPosCode]
|
|
225
|
-
if (wasteBits(state, distPosBits) === PKDCL_STREAM_END) {
|
|
226
|
-
return 0
|
|
227
|
-
}
|
|
228
|
-
|
|
229
|
-
let distance
|
|
230
|
-
let bitsToWaste
|
|
231
|
-
|
|
232
|
-
if (repeatLength === 2) {
|
|
233
|
-
distance = (distPosCode << 2) | getLowestNBits(2, state.bitBuffer)
|
|
234
|
-
bitsToWaste = 2
|
|
235
|
-
} else {
|
|
236
|
-
distance = (distPosCode << state.dictionarySizeBits) | (state.bitBuffer & state.dictionarySizeMask)
|
|
237
|
-
bitsToWaste = state.dictionarySizeBits
|
|
238
|
-
}
|
|
239
|
-
|
|
240
|
-
if (wasteBits(state, bitsToWaste) === PKDCL_STREAM_END) {
|
|
241
|
-
return 0
|
|
242
|
-
}
|
|
243
|
-
|
|
244
|
-
return distance + 1
|
|
245
|
-
}
|
|
246
|
-
|
|
247
|
-
const processChunkData = (state, verbose = false) => {
|
|
248
|
-
if (state.inputBuffer.isEmpty()) {
|
|
249
|
-
return
|
|
250
|
-
}
|
|
251
|
-
|
|
252
|
-
if (!has('compressionType', state)) {
|
|
253
|
-
const parsedHeader = parseInitialData(state, verbose)
|
|
254
|
-
if (!parsedHeader || state.inputBuffer.isEmpty()) {
|
|
255
|
-
return
|
|
256
|
-
}
|
|
257
|
-
}
|
|
258
|
-
|
|
259
|
-
state.needMoreInput = false
|
|
260
|
-
|
|
261
|
-
state.backup()
|
|
262
|
-
let nextLiteral = decodeNextLiteral(state)
|
|
263
|
-
|
|
264
|
-
while (nextLiteral !== LITERAL_END_STREAM && nextLiteral !== LITERAL_STREAM_ABORTED) {
|
|
265
|
-
let addition
|
|
266
|
-
if (nextLiteral >= 0x100) {
|
|
267
|
-
const repeatLength = nextLiteral - 0xfe
|
|
268
|
-
const minusDistance = decodeDistance(state, repeatLength)
|
|
269
|
-
if (minusDistance === 0) {
|
|
270
|
-
state.needMoreInput = true
|
|
271
|
-
break
|
|
272
|
-
}
|
|
273
|
-
|
|
274
|
-
const availableData = state.outputBuffer.read(state.outputBuffer.size() - minusDistance, repeatLength)
|
|
275
|
-
|
|
276
|
-
if (repeatLength > minusDistance) {
|
|
277
|
-
const multipliedData = repeat(availableData, Math.ceil(repeatLength / availableData.length))
|
|
278
|
-
addition = Buffer.concat(multipliedData).slice(0, repeatLength)
|
|
279
|
-
} else {
|
|
280
|
-
addition = availableData
|
|
281
|
-
}
|
|
282
|
-
} else {
|
|
283
|
-
addition = Buffer.from([nextLiteral])
|
|
284
|
-
}
|
|
285
|
-
|
|
286
|
-
state.outputBuffer.append(addition)
|
|
287
|
-
|
|
288
|
-
state.backup()
|
|
289
|
-
nextLiteral = decodeNextLiteral(state)
|
|
290
|
-
}
|
|
291
|
-
|
|
292
|
-
if (nextLiteral === LITERAL_STREAM_ABORTED) {
|
|
293
|
-
state.needMoreInput = true
|
|
294
|
-
}
|
|
295
|
-
|
|
296
|
-
if (state.needMoreInput) {
|
|
297
|
-
state.restore()
|
|
298
|
-
}
|
|
299
|
-
}
|
|
300
|
-
|
|
301
|
-
const generateDecodeTables = (startIndexes, lengthBits) => {
|
|
302
|
-
return lengthBits.reduce((acc, lengthBit, i) => {
|
|
303
|
-
for (let index = startIndexes[i]; index < 0x100; index += 1 << lengthBit) {
|
|
304
|
-
acc[index] = i
|
|
305
|
-
}
|
|
306
|
-
|
|
307
|
-
return acc
|
|
308
|
-
}, repeat(0, 0x100))
|
|
309
|
-
}
|
|
310
|
-
|
|
311
|
-
const explode = (config = {}) => {
|
|
312
|
-
const { verbose = false, inputBufferSize = 0x0, outputBufferSize = 0x0 } = config
|
|
313
|
-
|
|
314
|
-
const handler = function (chunk, encoding, callback) {
|
|
315
|
-
if (!isFunction(callback)) {
|
|
316
|
-
// can't call callback to pass in data or errors, so we throw up
|
|
317
|
-
throw new ExpectedFunctionError()
|
|
318
|
-
}
|
|
319
|
-
|
|
320
|
-
const state = handler._state
|
|
321
|
-
state.needMoreInput = true
|
|
322
|
-
|
|
323
|
-
try {
|
|
324
|
-
state.inputBuffer.append(chunk)
|
|
325
|
-
if (state.isFirstChunk) {
|
|
326
|
-
state.isFirstChunk = false
|
|
327
|
-
this._flush = state.onInputFinished
|
|
328
|
-
}
|
|
329
|
-
|
|
330
|
-
if (verbose) {
|
|
331
|
-
console.log(`explode: reading ${toHex(chunk.length)} bytes from chunk #${state.stats.chunkCounter++}`)
|
|
332
|
-
}
|
|
333
|
-
|
|
334
|
-
processChunkData(state, verbose)
|
|
335
|
-
|
|
336
|
-
const blockSize = 0x1000
|
|
337
|
-
if (state.outputBuffer.size() > blockSize) {
|
|
338
|
-
const numberOfBytes = (Math.floor(state.outputBuffer.size() / blockSize) - 1) * blockSize
|
|
339
|
-
const output = Buffer.from(state.outputBuffer.read(0, numberOfBytes))
|
|
340
|
-
state.outputBuffer.flushStart(numberOfBytes)
|
|
341
|
-
|
|
342
|
-
callback(null, output)
|
|
343
|
-
} else {
|
|
344
|
-
callback(null, Buffer.from([]))
|
|
345
|
-
}
|
|
346
|
-
} catch (e) {
|
|
347
|
-
callback(e)
|
|
348
|
-
}
|
|
349
|
-
}
|
|
350
|
-
|
|
351
|
-
handler._state = {
|
|
352
|
-
_backup: {
|
|
353
|
-
extraBits: null,
|
|
354
|
-
bitBuffer: null,
|
|
355
|
-
},
|
|
356
|
-
needMoreInput: true,
|
|
357
|
-
isFirstChunk: true,
|
|
358
|
-
extraBits: 0,
|
|
359
|
-
chBitsAsc: repeat(0, 0x100), // DecodeLit and GenAscTabs uses this
|
|
360
|
-
lengthCodes: generateDecodeTables(LenCode, LenBits),
|
|
361
|
-
distPosCodes: generateDecodeTables(DistCode, DistBits),
|
|
362
|
-
inputBuffer: new ExpandingBuffer(inputBufferSize),
|
|
363
|
-
outputBuffer: new ExpandingBuffer(outputBufferSize),
|
|
364
|
-
onInputFinished: (callback) => {
|
|
365
|
-
const state = handler._state
|
|
366
|
-
|
|
367
|
-
if (verbose) {
|
|
368
|
-
console.log('---------------')
|
|
369
|
-
console.log('explode: total number of chunks read:', state.stats.chunkCounter)
|
|
370
|
-
console.log('explode: inputBuffer heap size', toHex(state.inputBuffer.heapSize()))
|
|
371
|
-
console.log('explode: outputBuffer heap size', toHex(state.outputBuffer.heapSize()))
|
|
372
|
-
}
|
|
373
|
-
|
|
374
|
-
if (state.needMoreInput) {
|
|
375
|
-
callback(new AbortedError())
|
|
376
|
-
} else {
|
|
377
|
-
callback(null, state.outputBuffer.read())
|
|
378
|
-
}
|
|
379
|
-
},
|
|
380
|
-
backup: () => {
|
|
381
|
-
const state = handler._state
|
|
382
|
-
state._backup.extraBits = state.extraBits
|
|
383
|
-
state._backup.bitBuffer = state.bitBuffer
|
|
384
|
-
state.inputBuffer._saveIndices()
|
|
385
|
-
},
|
|
386
|
-
restore: () => {
|
|
387
|
-
const state = handler._state
|
|
388
|
-
state.extraBits = state._backup.extraBits
|
|
389
|
-
state.bitBuffer = state._backup.bitBuffer
|
|
390
|
-
state.inputBuffer._restoreIndices()
|
|
391
|
-
},
|
|
392
|
-
stats: {
|
|
393
|
-
chunkCounter: 0,
|
|
394
|
-
},
|
|
395
|
-
}
|
|
396
|
-
|
|
397
|
-
return handler
|
|
398
|
-
}
|
|
399
|
-
|
|
400
|
-
module.exports = {
|
|
401
|
-
readHeader,
|
|
402
|
-
explode,
|
|
403
|
-
createPATIterator,
|
|
404
|
-
populateAsciiTable,
|
|
405
|
-
generateAsciiTables,
|
|
406
|
-
processChunkData,
|
|
407
|
-
wasteBits,
|
|
408
|
-
decodeNextLiteral,
|
|
409
|
-
decodeDistance,
|
|
410
|
-
generateDecodeTables,
|
|
411
|
-
}
|
|
@@ -1,123 +0,0 @@
|
|
|
1
|
-
const { clamp } = require('ramda')
|
|
2
|
-
const { ExpectedBufferError } = require('../errors')
|
|
3
|
-
|
|
4
|
-
class ExpandingBuffer {
|
|
5
|
-
constructor(numberOfBytes = 0) {
|
|
6
|
-
this._heap = Buffer.allocUnsafe(numberOfBytes)
|
|
7
|
-
this._startIndex = 0
|
|
8
|
-
this._endIndex = 0
|
|
9
|
-
|
|
10
|
-
this._backup = {
|
|
11
|
-
_startIndex: 0,
|
|
12
|
-
_endIndex: 0,
|
|
13
|
-
}
|
|
14
|
-
}
|
|
15
|
-
|
|
16
|
-
_getActualData(offset = 0) {
|
|
17
|
-
return this._heap.slice(this._startIndex + offset, this._endIndex)
|
|
18
|
-
}
|
|
19
|
-
|
|
20
|
-
size() {
|
|
21
|
-
return this._endIndex - this._startIndex
|
|
22
|
-
}
|
|
23
|
-
|
|
24
|
-
isEmpty() {
|
|
25
|
-
return this.size() === 0
|
|
26
|
-
}
|
|
27
|
-
|
|
28
|
-
heapSize() {
|
|
29
|
-
return this._heap.length
|
|
30
|
-
}
|
|
31
|
-
|
|
32
|
-
append(buffer) {
|
|
33
|
-
if (!Buffer.isBuffer(buffer)) {
|
|
34
|
-
throw new ExpectedBufferError()
|
|
35
|
-
}
|
|
36
|
-
|
|
37
|
-
if (this._endIndex + buffer.length < this.heapSize()) {
|
|
38
|
-
buffer.copy(this._heap, this._endIndex)
|
|
39
|
-
this._endIndex += buffer.length
|
|
40
|
-
} else {
|
|
41
|
-
this._heap = Buffer.concat([this._getActualData(), buffer])
|
|
42
|
-
this._startIndex = 0
|
|
43
|
-
this._endIndex = this.heapSize()
|
|
44
|
-
}
|
|
45
|
-
}
|
|
46
|
-
|
|
47
|
-
// watch out! the buffer returned by Buffer.slice() will point to the same memory!
|
|
48
|
-
read(offset, limit) {
|
|
49
|
-
if (offset < 0 || limit < 1) {
|
|
50
|
-
return Buffer.from([])
|
|
51
|
-
}
|
|
52
|
-
if (limit === 1) {
|
|
53
|
-
return this._heap[this._startIndex + offset]
|
|
54
|
-
}
|
|
55
|
-
|
|
56
|
-
if (offset + limit < this.size()) {
|
|
57
|
-
return this._heap.slice(this._startIndex + offset, this._startIndex + limit + offset)
|
|
58
|
-
}
|
|
59
|
-
|
|
60
|
-
return this._getActualData(offset)
|
|
61
|
-
}
|
|
62
|
-
|
|
63
|
-
// hard delete
|
|
64
|
-
// removes data from the buffer by copying bytes to lower indices
|
|
65
|
-
flushStart(numberOfBytes) {
|
|
66
|
-
numberOfBytes = clamp(0, this.heapSize(), numberOfBytes)
|
|
67
|
-
if (numberOfBytes > 0) {
|
|
68
|
-
if (numberOfBytes < this.heapSize()) {
|
|
69
|
-
this._heap.copy(this._heap, 0, this._startIndex + numberOfBytes)
|
|
70
|
-
}
|
|
71
|
-
this._endIndex -= this._startIndex + numberOfBytes
|
|
72
|
-
this._startIndex = 0
|
|
73
|
-
}
|
|
74
|
-
}
|
|
75
|
-
|
|
76
|
-
flushEnd(numberOfBytes) {
|
|
77
|
-
numberOfBytes = clamp(0, this.heapSize(), numberOfBytes)
|
|
78
|
-
if (numberOfBytes > 0) {
|
|
79
|
-
this._endIndex -= numberOfBytes
|
|
80
|
-
}
|
|
81
|
-
}
|
|
82
|
-
|
|
83
|
-
// soft delete
|
|
84
|
-
// removes data from the buffer by moving the startIndex forward
|
|
85
|
-
dropStart(numberOfBytes) {
|
|
86
|
-
if (numberOfBytes > 0) {
|
|
87
|
-
this._startIndex += numberOfBytes
|
|
88
|
-
if (this._startIndex >= this._endIndex) {
|
|
89
|
-
this.clear()
|
|
90
|
-
}
|
|
91
|
-
}
|
|
92
|
-
}
|
|
93
|
-
|
|
94
|
-
dropEnd(numberOfBytes) {
|
|
95
|
-
if (numberOfBytes > 0) {
|
|
96
|
-
this._endIndex -= numberOfBytes
|
|
97
|
-
if (this._startIndex >= this._endIndex) {
|
|
98
|
-
this.clear()
|
|
99
|
-
}
|
|
100
|
-
}
|
|
101
|
-
}
|
|
102
|
-
|
|
103
|
-
getHeap() {
|
|
104
|
-
return this._heap
|
|
105
|
-
}
|
|
106
|
-
|
|
107
|
-
clear() {
|
|
108
|
-
this._startIndex = 0
|
|
109
|
-
this._endIndex = 0
|
|
110
|
-
}
|
|
111
|
-
|
|
112
|
-
_saveIndices() {
|
|
113
|
-
this._backup._startIndex = this._startIndex
|
|
114
|
-
this._backup._endIndex = this._endIndex
|
|
115
|
-
}
|
|
116
|
-
|
|
117
|
-
_restoreIndices() {
|
|
118
|
-
this._startIndex = this._backup._startIndex
|
|
119
|
-
this._endIndex = this._backup._endIndex
|
|
120
|
-
}
|
|
121
|
-
}
|
|
122
|
-
|
|
123
|
-
module.exports = ExpandingBuffer
|
package/src/helpers/functions.js
DELETED
|
@@ -1,150 +0,0 @@
|
|
|
1
|
-
const fs = require('fs')
|
|
2
|
-
const { repeat, test, type } = require('ramda')
|
|
3
|
-
|
|
4
|
-
const isNumber = (x) => {
|
|
5
|
-
return typeof x === 'number'
|
|
6
|
-
}
|
|
7
|
-
|
|
8
|
-
const isString = (x) => {
|
|
9
|
-
return typeof x === 'string'
|
|
10
|
-
}
|
|
11
|
-
|
|
12
|
-
const isFunction = (x) => {
|
|
13
|
-
return type(x) === 'Function'
|
|
14
|
-
}
|
|
15
|
-
|
|
16
|
-
const noop = () => {}
|
|
17
|
-
|
|
18
|
-
// https://stackoverflow.com/a/68989785/1806628
|
|
19
|
-
const isPlainObject = (x) => {
|
|
20
|
-
return x.constructor === Object
|
|
21
|
-
}
|
|
22
|
-
|
|
23
|
-
const isBetween = (min, max, num) => {
|
|
24
|
-
if (!isNumber(min) || !isNumber(max) || !isNumber(num)) {
|
|
25
|
-
return null
|
|
26
|
-
}
|
|
27
|
-
if (min > max) {
|
|
28
|
-
;[min, max] = [max, min]
|
|
29
|
-
}
|
|
30
|
-
|
|
31
|
-
return num >= min && num <= max
|
|
32
|
-
}
|
|
33
|
-
|
|
34
|
-
const nBitsOfOnes = (numberOfBits) => {
|
|
35
|
-
if (!Number.isInteger(numberOfBits) || numberOfBits < 0) {
|
|
36
|
-
return null
|
|
37
|
-
}
|
|
38
|
-
return (1 << numberOfBits) - 1
|
|
39
|
-
}
|
|
40
|
-
|
|
41
|
-
const maskBits = (numberOfBits, number) => {
|
|
42
|
-
const bits = nBitsOfOnes(numberOfBits)
|
|
43
|
-
if (bits === null) {
|
|
44
|
-
return null
|
|
45
|
-
}
|
|
46
|
-
if (!Number.isInteger(number) || number < 0) {
|
|
47
|
-
return null
|
|
48
|
-
}
|
|
49
|
-
return number & nBitsOfOnes(numberOfBits)
|
|
50
|
-
}
|
|
51
|
-
|
|
52
|
-
const getLowestNBits = (numberOfBits, number) => {
|
|
53
|
-
return number & nBitsOfOnes(numberOfBits)
|
|
54
|
-
}
|
|
55
|
-
|
|
56
|
-
const isDecimalString = test(/^\d+$/)
|
|
57
|
-
|
|
58
|
-
const isFullHexString = (str) => {
|
|
59
|
-
if (isString(str)) {
|
|
60
|
-
return /^\s*0x[0-9a-f]+\s*$/.test(str)
|
|
61
|
-
} else {
|
|
62
|
-
return false
|
|
63
|
-
}
|
|
64
|
-
}
|
|
65
|
-
|
|
66
|
-
const toHex = (num, digits = 0, withoutPrefix = false) => {
|
|
67
|
-
const prefix = withoutPrefix ? '' : '0x'
|
|
68
|
-
if (!Number.isInteger(digits) || digits < 0) {
|
|
69
|
-
return null
|
|
70
|
-
}
|
|
71
|
-
if (isFullHexString(num)) {
|
|
72
|
-
const number = num.trim().replace(/^0x0*/, '')
|
|
73
|
-
return `${prefix}${number.padStart(digits, '0')}`
|
|
74
|
-
}
|
|
75
|
-
if (!Number.isInteger(num)) {
|
|
76
|
-
return null
|
|
77
|
-
}
|
|
78
|
-
return `${prefix}${num.toString(16).padStart(digits, '0')}`
|
|
79
|
-
}
|
|
80
|
-
|
|
81
|
-
const mergeSparseArrays = (a, b) => {
|
|
82
|
-
if (!Array.isArray(a) || !Array.isArray(b)) {
|
|
83
|
-
return []
|
|
84
|
-
}
|
|
85
|
-
|
|
86
|
-
const result = [...b, ...(b.length < a.length ? repeat(undefined, a.length - b.length) : [])]
|
|
87
|
-
for (let i = 0; i < a.length; i++) {
|
|
88
|
-
if (a[i] !== undefined) {
|
|
89
|
-
result[i] = a[i]
|
|
90
|
-
}
|
|
91
|
-
}
|
|
92
|
-
return result
|
|
93
|
-
}
|
|
94
|
-
|
|
95
|
-
/*
|
|
96
|
-
export const dumpBytes = bytes => {
|
|
97
|
-
const formattedBytes = Array.from(bytes)
|
|
98
|
-
.map(byte => {
|
|
99
|
-
return toHex(byte, 2, true)
|
|
100
|
-
})
|
|
101
|
-
.join(' ')
|
|
102
|
-
return `<${formattedBytes}>`
|
|
103
|
-
}
|
|
104
|
-
*/
|
|
105
|
-
|
|
106
|
-
const parseNumberString = (n, defaultValue = 0) => {
|
|
107
|
-
if (isDecimalString(n)) {
|
|
108
|
-
return parseInt(n)
|
|
109
|
-
} else if (isFullHexString(n)) {
|
|
110
|
-
return parseInt(n.replace(/^0x/, ''), 16)
|
|
111
|
-
} else {
|
|
112
|
-
return defaultValue
|
|
113
|
-
}
|
|
114
|
-
}
|
|
115
|
-
|
|
116
|
-
const getPackageVersion = async () => {
|
|
117
|
-
try {
|
|
118
|
-
const { version } = require('../../package.json')
|
|
119
|
-
return version
|
|
120
|
-
} catch (error) {
|
|
121
|
-
return 'unknown'
|
|
122
|
-
}
|
|
123
|
-
}
|
|
124
|
-
|
|
125
|
-
const fileExists = async (filename) => {
|
|
126
|
-
try {
|
|
127
|
-
await fs.promises.access(filename, fs.constants.R_OK)
|
|
128
|
-
return true
|
|
129
|
-
} catch (error) {
|
|
130
|
-
return false
|
|
131
|
-
}
|
|
132
|
-
}
|
|
133
|
-
|
|
134
|
-
module.exports = {
|
|
135
|
-
isNumber,
|
|
136
|
-
isString,
|
|
137
|
-
isFunction,
|
|
138
|
-
noop,
|
|
139
|
-
isPlainObject,
|
|
140
|
-
isBetween,
|
|
141
|
-
nBitsOfOnes,
|
|
142
|
-
maskBits,
|
|
143
|
-
getLowestNBits,
|
|
144
|
-
isFullHexString,
|
|
145
|
-
toHex,
|
|
146
|
-
mergeSparseArrays,
|
|
147
|
-
parseNumberString,
|
|
148
|
-
getPackageVersion,
|
|
149
|
-
fileExists,
|
|
150
|
-
}
|