node-pkware 1.0.2 → 3.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (70) hide show
  1. package/README.md +50 -48
  2. package/dist/ExpandingBuffer.d.ts +53 -0
  3. package/dist/ExpandingBuffer.js +134 -0
  4. package/dist/ExpandingBuffer.js.map +1 -0
  5. package/dist/Explode.d.ts +8 -0
  6. package/dist/Explode.js +309 -0
  7. package/dist/Explode.js.map +1 -0
  8. package/dist/Implode.d.ts +11 -0
  9. package/dist/Implode.js +305 -0
  10. package/dist/Implode.js.map +1 -0
  11. package/dist/bin/explode.d.ts +2 -0
  12. package/dist/bin/explode.js +59 -0
  13. package/dist/bin/explode.js.map +1 -0
  14. package/dist/bin/helpers.d.ts +8 -0
  15. package/dist/bin/helpers.js +65 -0
  16. package/dist/bin/helpers.js.map +1 -0
  17. package/dist/bin/implode.d.ts +2 -0
  18. package/dist/bin/implode.js +79 -0
  19. package/dist/bin/implode.js.map +1 -0
  20. package/dist/constants.d.ts +32 -0
  21. package/dist/constants.js +114 -0
  22. package/dist/constants.js.map +1 -0
  23. package/{types → dist}/errors.d.ts +13 -11
  24. package/dist/errors.js +52 -0
  25. package/dist/errors.js.map +1 -0
  26. package/dist/functions.d.ts +11 -0
  27. package/dist/functions.js +73 -0
  28. package/dist/functions.js.map +1 -0
  29. package/dist/index.d.ts +20 -0
  30. package/dist/index.js +54 -0
  31. package/dist/index.js.map +1 -0
  32. package/{types/helpers → dist}/stream.d.ts +13 -34
  33. package/dist/stream.js +205 -0
  34. package/dist/stream.js.map +1 -0
  35. package/dist/tsconfig.tsbuildinfo +1 -0
  36. package/dist/types.d.ts +25 -0
  37. package/dist/types.js +3 -0
  38. package/dist/types.js.map +1 -0
  39. package/package.json +23 -45
  40. package/src/ExpandingBuffer.ts +148 -0
  41. package/src/Explode.ts +404 -0
  42. package/src/Implode.ts +368 -0
  43. package/src/bin/explode.ts +80 -0
  44. package/src/bin/helpers.ts +65 -0
  45. package/src/bin/implode.ts +116 -0
  46. package/src/{constants.js → constants.ts} +31 -50
  47. package/src/errors.ts +47 -0
  48. package/src/functions.ts +73 -0
  49. package/src/index.ts +30 -0
  50. package/src/stream.ts +220 -0
  51. package/src/types.ts +26 -0
  52. package/bin/explode.js +0 -78
  53. package/bin/implode.js +0 -116
  54. package/src/errors.js +0 -50
  55. package/src/explode.js +0 -411
  56. package/src/helpers/ExpandingBuffer.js +0 -123
  57. package/src/helpers/functions.js +0 -150
  58. package/src/helpers/stream.js +0 -190
  59. package/src/helpers/testing.js +0 -80
  60. package/src/implode.js +0 -364
  61. package/src/index.js +0 -18
  62. package/tsconfig.json +0 -20
  63. package/types/constants.d.ts +0 -41
  64. package/types/explode.d.ts +0 -56
  65. package/types/helpers/ExpandingBuffer.d.ts +0 -25
  66. package/types/helpers/Shared.d.ts +0 -46
  67. package/types/helpers/functions.d.ts +0 -15
  68. package/types/helpers/testing.d.ts +0 -6
  69. package/types/implode.d.ts +0 -63
  70. package/types/index.d.ts +0 -8
package/bin/implode.js DELETED
@@ -1,116 +0,0 @@
1
- #!/usr/bin/env node
2
-
3
- const fs = require('fs')
4
- const minimist = require('minimist-lite')
5
- const {
6
- COMPRESSION_BINARY,
7
- COMPRESSION_ASCII,
8
- DICTIONARY_SIZE_SMALL,
9
- DICTIONARY_SIZE_MEDIUM,
10
- DICTIONARY_SIZE_LARGE,
11
- } = require('../src/constants.js')
12
- const { getPackageVersion, parseNumberString, fileExists } = require('../src/helpers/functions.js')
13
- const { implode } = require('../src/implode.js')
14
- const { transformEmpty, transformIdentity, transformSplitBy, splitAt, through } = require('../src/helpers/stream.js')
15
-
16
- const decompress = (input, output, offset, keepHeader, compressionType, dictionarySize, config) => {
17
- const leftHandler = keepHeader ? transformIdentity() : transformEmpty()
18
- const rightHandler = implode(compressionType, dictionarySize, config)
19
-
20
- const handler = transformSplitBy(splitAt(offset), leftHandler, rightHandler)
21
-
22
- return new Promise((resolve, reject) => {
23
- input.pipe(through(handler).on('error', reject)).pipe(output).on('finish', resolve).on('error', reject)
24
- })
25
- }
26
-
27
- const args = minimist(process.argv.slice(2), {
28
- string: ['output', 'offset', 'input-buffer-size', 'output-buffer-size'],
29
- boolean: ['version', 'binary', 'ascii', 'drop-before-offset', 'debug', 'small', 'medium', 'large'],
30
- alias: {
31
- a: 'ascii',
32
- b: 'binary',
33
- s: 'small',
34
- m: 'medium',
35
- l: 'large',
36
- v: 'version',
37
- },
38
- })
39
-
40
- ;(async () => {
41
- if (args.version) {
42
- const version = await getPackageVersion()
43
- console.log(`node-pkware - version ${version}`)
44
- process.exit(0)
45
- }
46
-
47
- let input = args._[0] || args.input
48
- let output = args.output
49
-
50
- let hasErrors = false
51
-
52
- if (input) {
53
- if (await fileExists(input)) {
54
- input = fs.createReadStream(input)
55
- } else {
56
- console.error('error: given file does not exist')
57
- hasErrors = true
58
- }
59
- } else {
60
- input = process.openStdin()
61
- }
62
-
63
- if (args.ascii && args.binary) {
64
- console.error('error: multiple compression types specified, can only work with one of --ascii and --binary')
65
- hasErrors = true
66
- } else if (!args.ascii && !args.binary) {
67
- console.error('error: compression type missing, expected either --ascii or --binary')
68
- hasErrors = true
69
- }
70
-
71
- const sizes = [args.small, args.medium, args.large].filter((x) => {
72
- return x === true
73
- })
74
- if (sizes.length > 1) {
75
- console.error('error: multiple size types specified, can only work with one of --small, --medium and --large')
76
- hasErrors = true
77
- } else if (sizes.length === 0) {
78
- console.error('error: size type missing, expected either --small, --medium or --large')
79
- hasErrors = true
80
- }
81
-
82
- if (output) {
83
- output = fs.createWriteStream(output)
84
- } else {
85
- output = process.stdout
86
- }
87
-
88
- if (hasErrors) {
89
- process.exit(1)
90
- }
91
-
92
- const compressionType = args.ascii ? COMPRESSION_ASCII : COMPRESSION_BINARY
93
- const dictionarySize = args.small
94
- ? DICTIONARY_SIZE_SMALL
95
- : args.medium
96
- ? DICTIONARY_SIZE_MEDIUM
97
- : DICTIONARY_SIZE_LARGE
98
-
99
- const offset = parseNumberString(args.offset, 0)
100
-
101
- const keepHeader = !args['drop-before-offset']
102
- const config = {
103
- debug: args.debug,
104
- inputBufferSize: parseNumberString(args['input-buffer-size'], 0x10000),
105
- outputBufferSize: parseNumberString(args['output-buffer-size'], 0x12000),
106
- }
107
-
108
- decompress(input, output, offset, keepHeader, compressionType, dictionarySize, config)
109
- .then(() => {
110
- process.exit(0)
111
- })
112
- .catch((e) => {
113
- console.error(`error: ${e.message}`)
114
- process.exit(1)
115
- })
116
- })()
package/src/errors.js DELETED
@@ -1,50 +0,0 @@
1
- class InvalidDictionarySizeError extends Error {
2
- constructor() {
3
- super('Invalid dictionary size')
4
- this.name = 'InvalidDictionarySizeError'
5
- }
6
- }
7
-
8
- class InvalidCompressionTypeError extends Error {
9
- constructor() {
10
- super('Invalid compression type')
11
- this.name = 'InvalidCompressionTypeError'
12
- }
13
- }
14
-
15
- class InvalidDataError extends Error {
16
- constructor() {
17
- super('Invalid data')
18
- this.name = 'InvalidDataError'
19
- }
20
- }
21
-
22
- class AbortedError extends Error {
23
- constructor() {
24
- super('Aborted')
25
- this.name = 'AbortedError'
26
- }
27
- }
28
-
29
- class ExpectedBufferError extends TypeError {
30
- constructor() {
31
- super('Expected variable to be of type Buffer')
32
- this.name = 'ExpectedBufferError'
33
- }
34
- }
35
-
36
- class ExpectedFunctionError extends TypeError {
37
- constructor() {
38
- super('Expected variable to be a Function')
39
- this.name = 'ExpectedFunctionError'
40
- }
41
- }
42
-
43
- module.exports = {
44
- InvalidDictionarySizeError,
45
- InvalidCompressionTypeError,
46
- InvalidDataError,
47
- AbortedError,
48
- ExpectedBufferError,
49
- ExpectedFunctionError,
50
- }
package/src/explode.js DELETED
@@ -1,411 +0,0 @@
1
- const { repeat, unfold, has } = require('ramda')
2
- const {
3
- InvalidDataError,
4
- InvalidCompressionTypeError,
5
- InvalidDictionarySizeError,
6
- ExpectedBufferError,
7
- ExpectedFunctionError,
8
- AbortedError,
9
- } = require('./errors.js')
10
- const { mergeSparseArrays, getLowestNBits, nBitsOfOnes, toHex, isFunction } = require('./helpers/functions.js')
11
- const {
12
- ChBitsAsc,
13
- ChCodeAsc,
14
- COMPRESSION_BINARY,
15
- COMPRESSION_ASCII,
16
- DICTIONARY_SIZE_SMALL,
17
- DICTIONARY_SIZE_MEDIUM,
18
- DICTIONARY_SIZE_LARGE,
19
- PKDCL_OK,
20
- PKDCL_STREAM_END,
21
- LITERAL_STREAM_ABORTED,
22
- LITERAL_END_STREAM,
23
- LenBits,
24
- LenBase,
25
- ExLenBits,
26
- DistBits,
27
- LenCode,
28
- DistCode,
29
- } = require('./constants.js')
30
- const ExpandingBuffer = require('./helpers/ExpandingBuffer.js')
31
-
32
- const readHeader = (buffer) => {
33
- if (!Buffer.isBuffer(buffer)) {
34
- throw new ExpectedBufferError()
35
- }
36
- if (buffer.length < 4) {
37
- throw new InvalidDataError()
38
- }
39
-
40
- const compressionType = buffer.readUInt8(0)
41
- const dictionarySizeBits = buffer.readUInt8(1)
42
- if (![COMPRESSION_BINARY, COMPRESSION_ASCII].includes(compressionType)) {
43
- throw new InvalidCompressionTypeError()
44
- }
45
- if (![DICTIONARY_SIZE_SMALL, DICTIONARY_SIZE_MEDIUM, DICTIONARY_SIZE_LARGE].includes(dictionarySizeBits)) {
46
- throw new InvalidDictionarySizeError()
47
- }
48
-
49
- return {
50
- compressionType,
51
- dictionarySizeBits,
52
- }
53
- }
54
-
55
- // PAT = populate ascii table
56
- const createPATIterator = (limit, stepper) => {
57
- return (n) => {
58
- return n >= limit ? false : [n, n + (1 << stepper)]
59
- }
60
- }
61
-
62
- const populateAsciiTable = (value, index, bits, limit) => {
63
- const iterator = createPATIterator(limit, value - bits)
64
- const seed = ChCodeAsc[index] >> bits
65
- const idxs = unfold(iterator, seed)
66
-
67
- return idxs.reduce((acc, idx) => {
68
- acc[idx] = index
69
- return acc
70
- }, [])
71
- }
72
-
73
- const generateAsciiTables = () => {
74
- const tables = {
75
- asciiTable2C34: repeat(0, 0x100),
76
- asciiTable2D34: repeat(0, 0x100),
77
- asciiTable2E34: repeat(0, 0x80),
78
- asciiTable2EB4: repeat(0, 0x100),
79
- }
80
-
81
- tables.chBitsAsc = ChBitsAsc.map((value, index) => {
82
- if (value <= 8) {
83
- tables.asciiTable2C34 = mergeSparseArrays(populateAsciiTable(value, index, 0, 0x100), tables.asciiTable2C34)
84
- return value - 0
85
- }
86
-
87
- const acc = getLowestNBits(8, ChCodeAsc[index])
88
- if (acc === 0) {
89
- tables.asciiTable2EB4 = mergeSparseArrays(populateAsciiTable(value, index, 8, 0x100), tables.asciiTable2EB4)
90
- return value - 8
91
- }
92
-
93
- tables.asciiTable2C34[acc] = 0xff
94
-
95
- if (getLowestNBits(6, ChCodeAsc[index]) === 0) {
96
- tables.asciiTable2E34 = mergeSparseArrays(populateAsciiTable(value, index, 6, 0x80), tables.asciiTable2E34)
97
- return value - 6
98
- }
99
-
100
- tables.asciiTable2D34 = mergeSparseArrays(populateAsciiTable(value, index, 4, 0x100), tables.asciiTable2D34)
101
- return value - 4
102
- })
103
-
104
- return tables
105
- }
106
-
107
- const parseInitialData = (state, debug = false) => {
108
- if (state.inputBuffer.size() < 4) {
109
- return false
110
- }
111
-
112
- const { compressionType, dictionarySizeBits } = readHeader(state.inputBuffer.read())
113
-
114
- state.compressionType = compressionType
115
- state.dictionarySizeBits = dictionarySizeBits
116
- state.bitBuffer = state.inputBuffer.read(2, 1)
117
- state.inputBuffer.dropStart(3)
118
- state.dictionarySizeMask = nBitsOfOnes(dictionarySizeBits)
119
-
120
- if (compressionType === COMPRESSION_ASCII) {
121
- const tables = generateAsciiTables()
122
- Object.entries(tables).forEach(([key, value]) => {
123
- state[key] = value
124
- })
125
- }
126
-
127
- if (debug) {
128
- console.log(`explode: compression type: ${state.compressionType === COMPRESSION_BINARY ? 'binary' : 'ascii'}`)
129
- console.log(
130
- `explode: compression level: ${
131
- state.dictionarySizeBits === 4 ? 'small' : state.dictionarySizeBits === 5 ? 'medium' : 'large'
132
- }`,
133
- )
134
- }
135
-
136
- return true
137
- }
138
-
139
- const wasteBits = (state, numberOfBits) => {
140
- if (numberOfBits > state.extraBits && state.inputBuffer.isEmpty()) {
141
- return PKDCL_STREAM_END
142
- }
143
-
144
- if (numberOfBits <= state.extraBits) {
145
- state.bitBuffer = state.bitBuffer >> numberOfBits
146
- state.extraBits = state.extraBits - numberOfBits
147
- } else {
148
- const nextByte = state.inputBuffer.read(0, 1)
149
- state.inputBuffer.dropStart(1)
150
-
151
- state.bitBuffer = ((state.bitBuffer >> state.extraBits) | (nextByte << 8)) >> (numberOfBits - state.extraBits)
152
- state.extraBits = state.extraBits + 8 - numberOfBits
153
- }
154
-
155
- return PKDCL_OK
156
- }
157
-
158
- const decodeNextLiteral = (state) => {
159
- const lastBit = state.bitBuffer & 1
160
-
161
- if (wasteBits(state, 1) === PKDCL_STREAM_END) {
162
- return LITERAL_STREAM_ABORTED
163
- }
164
-
165
- if (lastBit) {
166
- let lengthCode = state.lengthCodes[getLowestNBits(8, state.bitBuffer)]
167
-
168
- if (wasteBits(state, LenBits[lengthCode]) === PKDCL_STREAM_END) {
169
- return LITERAL_STREAM_ABORTED
170
- }
171
-
172
- const extraLenghtBits = ExLenBits[lengthCode]
173
- if (extraLenghtBits !== 0) {
174
- const extraLength = getLowestNBits(extraLenghtBits, state.bitBuffer)
175
-
176
- if (wasteBits(state, extraLenghtBits) === PKDCL_STREAM_END && lengthCode + extraLength !== 0x10e) {
177
- return LITERAL_STREAM_ABORTED
178
- }
179
-
180
- lengthCode = LenBase[lengthCode] + extraLength
181
- }
182
-
183
- return lengthCode + 0x100
184
- } else {
185
- const lastByte = getLowestNBits(8, state.bitBuffer)
186
-
187
- if (state.compressionType === COMPRESSION_BINARY) {
188
- return wasteBits(state, 8) === PKDCL_STREAM_END ? LITERAL_STREAM_ABORTED : lastByte
189
- } else {
190
- let value
191
- if (lastByte > 0) {
192
- value = state.asciiTable2C34[lastByte]
193
-
194
- if (value === 0xff) {
195
- if (getLowestNBits(6, state.bitBuffer)) {
196
- if (wasteBits(state, 4) === PKDCL_STREAM_END) {
197
- return LITERAL_STREAM_ABORTED
198
- }
199
-
200
- value = state.asciiTable2D34[getLowestNBits(8, state.bitBuffer)]
201
- } else {
202
- if (wasteBits(state, 6) === PKDCL_STREAM_END) {
203
- return LITERAL_STREAM_ABORTED
204
- }
205
-
206
- value = state.asciiTable2E34[getLowestNBits(7, state.bitBuffer)]
207
- }
208
- }
209
- } else {
210
- if (wasteBits(state, 8) === PKDCL_STREAM_END) {
211
- return LITERAL_STREAM_ABORTED
212
- }
213
-
214
- value = state.asciiTable2EB4[getLowestNBits(8, state.bitBuffer)]
215
- }
216
-
217
- return wasteBits(state, state.chBitsAsc[value]) === PKDCL_STREAM_END ? LITERAL_STREAM_ABORTED : value
218
- }
219
- }
220
- }
221
-
222
- const decodeDistance = (state, repeatLength) => {
223
- const distPosCode = state.distPosCodes[getLowestNBits(8, state.bitBuffer)]
224
- const distPosBits = DistBits[distPosCode]
225
- if (wasteBits(state, distPosBits) === PKDCL_STREAM_END) {
226
- return 0
227
- }
228
-
229
- let distance
230
- let bitsToWaste
231
-
232
- if (repeatLength === 2) {
233
- distance = (distPosCode << 2) | getLowestNBits(2, state.bitBuffer)
234
- bitsToWaste = 2
235
- } else {
236
- distance = (distPosCode << state.dictionarySizeBits) | (state.bitBuffer & state.dictionarySizeMask)
237
- bitsToWaste = state.dictionarySizeBits
238
- }
239
-
240
- if (wasteBits(state, bitsToWaste) === PKDCL_STREAM_END) {
241
- return 0
242
- }
243
-
244
- return distance + 1
245
- }
246
-
247
- const processChunkData = (state, debug = false) => {
248
- if (state.inputBuffer.isEmpty()) {
249
- return
250
- }
251
-
252
- if (!has('compressionType', state)) {
253
- const parsedHeader = parseInitialData(state, debug)
254
- if (!parsedHeader || state.inputBuffer.isEmpty()) {
255
- return
256
- }
257
- }
258
-
259
- state.needMoreInput = false
260
-
261
- state.backup()
262
- let nextLiteral = decodeNextLiteral(state)
263
-
264
- while (nextLiteral !== LITERAL_END_STREAM && nextLiteral !== LITERAL_STREAM_ABORTED) {
265
- let addition
266
- if (nextLiteral >= 0x100) {
267
- const repeatLength = nextLiteral - 0xfe
268
- const minusDistance = decodeDistance(state, repeatLength)
269
- if (minusDistance === 0) {
270
- state.needMoreInput = true
271
- break
272
- }
273
-
274
- const availableData = state.outputBuffer.read(state.outputBuffer.size() - minusDistance, repeatLength)
275
-
276
- if (repeatLength > minusDistance) {
277
- const multipliedData = repeat(availableData, Math.ceil(repeatLength / availableData.length))
278
- addition = Buffer.concat(multipliedData).slice(0, repeatLength)
279
- } else {
280
- addition = availableData
281
- }
282
- } else {
283
- addition = Buffer.from([nextLiteral])
284
- }
285
-
286
- state.outputBuffer.append(addition)
287
-
288
- state.backup()
289
- nextLiteral = decodeNextLiteral(state)
290
- }
291
-
292
- if (nextLiteral === LITERAL_STREAM_ABORTED) {
293
- state.needMoreInput = true
294
- }
295
-
296
- if (state.needMoreInput) {
297
- state.restore()
298
- }
299
- }
300
-
301
- const generateDecodeTables = (startIndexes, lengthBits) => {
302
- return lengthBits.reduce((acc, lengthBit, i) => {
303
- for (let index = startIndexes[i]; index < 0x100; index += 1 << lengthBit) {
304
- acc[index] = i
305
- }
306
-
307
- return acc
308
- }, repeat(0, 0x100))
309
- }
310
-
311
- const explode = (config = {}) => {
312
- const { debug = false, inputBufferSize = 0x0, outputBufferSize = 0x0 } = config
313
-
314
- const handler = function (chunk, encoding, callback) {
315
- if (!isFunction(callback)) {
316
- // can't call callback to pass in data or errors, so we throw up
317
- throw new ExpectedFunctionError()
318
- }
319
-
320
- const state = handler._state
321
- state.needMoreInput = true
322
-
323
- try {
324
- state.inputBuffer.append(chunk)
325
- if (state.isFirstChunk) {
326
- state.isFirstChunk = false
327
- this._flush = state.onInputFinished
328
- }
329
-
330
- if (debug) {
331
- console.log(`explode: reading ${toHex(chunk.length)} bytes from chunk #${state.stats.chunkCounter++}`)
332
- }
333
-
334
- processChunkData(state, debug)
335
-
336
- const blockSize = 0x1000
337
- if (state.outputBuffer.size() > blockSize) {
338
- const numberOfBytes = (Math.floor(state.outputBuffer.size() / blockSize) - 1) * blockSize
339
- const output = Buffer.from(state.outputBuffer.read(0, numberOfBytes))
340
- state.outputBuffer.flushStart(numberOfBytes)
341
-
342
- callback(null, output)
343
- } else {
344
- callback(null, Buffer.from([]))
345
- }
346
- } catch (e) {
347
- callback(e)
348
- }
349
- }
350
-
351
- handler._state = {
352
- _backup: {
353
- extraBits: null,
354
- bitBuffer: null,
355
- },
356
- needMoreInput: true,
357
- isFirstChunk: true,
358
- extraBits: 0,
359
- chBitsAsc: repeat(0, 0x100), // DecodeLit and GenAscTabs uses this
360
- lengthCodes: generateDecodeTables(LenCode, LenBits),
361
- distPosCodes: generateDecodeTables(DistCode, DistBits),
362
- inputBuffer: new ExpandingBuffer(inputBufferSize),
363
- outputBuffer: new ExpandingBuffer(outputBufferSize),
364
- onInputFinished: (callback) => {
365
- const state = handler._state
366
-
367
- if (debug) {
368
- console.log('---------------')
369
- console.log('explode: total number of chunks read:', state.stats.chunkCounter)
370
- console.log('explode: inputBuffer heap size', toHex(state.inputBuffer.heapSize()))
371
- console.log('explode: outputBuffer heap size', toHex(state.outputBuffer.heapSize()))
372
- }
373
-
374
- if (state.needMoreInput) {
375
- callback(new AbortedError())
376
- } else {
377
- callback(null, state.outputBuffer.read())
378
- }
379
- },
380
- backup: () => {
381
- const state = handler._state
382
- state._backup.extraBits = state.extraBits
383
- state._backup.bitBuffer = state.bitBuffer
384
- state.inputBuffer._saveIndices()
385
- },
386
- restore: () => {
387
- const state = handler._state
388
- state.extraBits = state._backup.extraBits
389
- state.bitBuffer = state._backup.bitBuffer
390
- state.inputBuffer._restoreIndices()
391
- },
392
- stats: {
393
- chunkCounter: 0,
394
- },
395
- }
396
-
397
- return handler
398
- }
399
-
400
- module.exports = {
401
- readHeader,
402
- explode,
403
- createPATIterator,
404
- populateAsciiTable,
405
- generateAsciiTables,
406
- processChunkData,
407
- wasteBits,
408
- decodeNextLiteral,
409
- decodeDistance,
410
- generateDecodeTables,
411
- }
@@ -1,123 +0,0 @@
1
- const { clamp } = require('ramda')
2
- const { ExpectedBufferError } = require('../errors')
3
-
4
- class ExpandingBuffer {
5
- constructor(numberOfBytes = 0) {
6
- this._heap = Buffer.allocUnsafe(numberOfBytes)
7
- this._startIndex = 0
8
- this._endIndex = 0
9
-
10
- this._backup = {
11
- _startIndex: 0,
12
- _endIndex: 0,
13
- }
14
- }
15
-
16
- _getActualData(offset = 0) {
17
- return this._heap.slice(this._startIndex + offset, this._endIndex)
18
- }
19
-
20
- size() {
21
- return this._endIndex - this._startIndex
22
- }
23
-
24
- isEmpty() {
25
- return this.size() === 0
26
- }
27
-
28
- heapSize() {
29
- return this._heap.length
30
- }
31
-
32
- append(buffer) {
33
- if (!Buffer.isBuffer(buffer)) {
34
- throw new ExpectedBufferError()
35
- }
36
-
37
- if (this._endIndex + buffer.length < this.heapSize()) {
38
- buffer.copy(this._heap, this._endIndex)
39
- this._endIndex += buffer.length
40
- } else {
41
- this._heap = Buffer.concat([this._getActualData(), buffer])
42
- this._startIndex = 0
43
- this._endIndex = this.heapSize()
44
- }
45
- }
46
-
47
- // watch out! the buffer returned by Buffer.slice() will point to the same memory!
48
- read(offset, limit) {
49
- if (offset < 0 || limit < 1) {
50
- return Buffer.from([])
51
- }
52
- if (limit === 1) {
53
- return this._heap[this._startIndex + offset]
54
- }
55
-
56
- if (offset + limit < this.size()) {
57
- return this._heap.slice(this._startIndex + offset, this._startIndex + limit + offset)
58
- }
59
-
60
- return this._getActualData(offset)
61
- }
62
-
63
- // hard delete
64
- // removes data from the buffer by copying bytes to lower indices
65
- flushStart(numberOfBytes) {
66
- numberOfBytes = clamp(0, this.heapSize(), numberOfBytes)
67
- if (numberOfBytes > 0) {
68
- if (numberOfBytes < this.heapSize()) {
69
- this._heap.copy(this._heap, 0, this._startIndex + numberOfBytes)
70
- }
71
- this._endIndex -= this._startIndex + numberOfBytes
72
- this._startIndex = 0
73
- }
74
- }
75
-
76
- flushEnd(numberOfBytes) {
77
- numberOfBytes = clamp(0, this.heapSize(), numberOfBytes)
78
- if (numberOfBytes > 0) {
79
- this._endIndex -= numberOfBytes
80
- }
81
- }
82
-
83
- // soft delete
84
- // removes data from the buffer by moving the startIndex forward
85
- dropStart(numberOfBytes) {
86
- if (numberOfBytes > 0) {
87
- this._startIndex += numberOfBytes
88
- if (this._startIndex >= this._endIndex) {
89
- this.clear()
90
- }
91
- }
92
- }
93
-
94
- dropEnd(numberOfBytes) {
95
- if (numberOfBytes > 0) {
96
- this._endIndex -= numberOfBytes
97
- if (this._startIndex >= this._endIndex) {
98
- this.clear()
99
- }
100
- }
101
- }
102
-
103
- getHeap() {
104
- return this._heap
105
- }
106
-
107
- clear() {
108
- this._startIndex = 0
109
- this._endIndex = 0
110
- }
111
-
112
- _saveIndices() {
113
- this._backup._startIndex = this._startIndex
114
- this._backup._endIndex = this._endIndex
115
- }
116
-
117
- _restoreIndices() {
118
- this._startIndex = this._backup._startIndex
119
- this._endIndex = this._backup._endIndex
120
- }
121
- }
122
-
123
- module.exports = ExpandingBuffer