node-pkware 2.0.0 → 3.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (69) hide show
  1. package/README.md +42 -40
  2. package/dist/ExpandingBuffer.d.ts +53 -0
  3. package/dist/ExpandingBuffer.js +134 -0
  4. package/dist/ExpandingBuffer.js.map +1 -0
  5. package/dist/Explode.d.ts +8 -0
  6. package/dist/Explode.js +309 -0
  7. package/dist/Explode.js.map +1 -0
  8. package/dist/Implode.d.ts +11 -0
  9. package/dist/Implode.js +305 -0
  10. package/dist/Implode.js.map +1 -0
  11. package/dist/bin/explode.d.ts +2 -0
  12. package/dist/bin/explode.js +59 -0
  13. package/dist/bin/explode.js.map +1 -0
  14. package/dist/bin/helpers.d.ts +8 -0
  15. package/dist/bin/helpers.js +65 -0
  16. package/dist/bin/helpers.js.map +1 -0
  17. package/dist/bin/implode.d.ts +2 -0
  18. package/dist/bin/implode.js +79 -0
  19. package/dist/bin/implode.js.map +1 -0
  20. package/dist/constants.d.ts +32 -0
  21. package/dist/constants.js +114 -0
  22. package/dist/constants.js.map +1 -0
  23. package/{types → dist}/errors.d.ts +13 -11
  24. package/dist/errors.js +52 -0
  25. package/dist/errors.js.map +1 -0
  26. package/dist/functions.d.ts +11 -0
  27. package/dist/functions.js +73 -0
  28. package/dist/functions.js.map +1 -0
  29. package/dist/index.d.ts +20 -0
  30. package/dist/index.js +54 -0
  31. package/dist/index.js.map +1 -0
  32. package/{types/helpers → dist}/stream.d.ts +13 -34
  33. package/dist/stream.js +205 -0
  34. package/dist/stream.js.map +1 -0
  35. package/dist/tsconfig.tsbuildinfo +1 -0
  36. package/dist/types.d.ts +25 -0
  37. package/dist/types.js +3 -0
  38. package/dist/types.js.map +1 -0
  39. package/package.json +14 -45
  40. package/src/ExpandingBuffer.ts +148 -0
  41. package/src/Explode.ts +404 -0
  42. package/src/Implode.ts +368 -0
  43. package/{bin/explode.js → src/bin/explode.ts} +35 -33
  44. package/src/bin/helpers.ts +65 -0
  45. package/src/bin/implode.ts +116 -0
  46. package/src/{constants.js → constants.ts} +31 -50
  47. package/src/errors.ts +47 -0
  48. package/src/functions.ts +73 -0
  49. package/src/index.ts +30 -0
  50. package/src/stream.ts +220 -0
  51. package/src/types.ts +26 -0
  52. package/bin/implode.js +0 -116
  53. package/src/errors.js +0 -50
  54. package/src/explode.js +0 -411
  55. package/src/helpers/ExpandingBuffer.js +0 -123
  56. package/src/helpers/functions.js +0 -150
  57. package/src/helpers/stream.js +0 -190
  58. package/src/helpers/testing.js +0 -80
  59. package/src/implode.js +0 -364
  60. package/src/index.js +0 -18
  61. package/tsconfig.json +0 -20
  62. package/types/constants.d.ts +0 -41
  63. package/types/explode.d.ts +0 -56
  64. package/types/helpers/ExpandingBuffer.d.ts +0 -25
  65. package/types/helpers/Shared.d.ts +0 -46
  66. package/types/helpers/functions.d.ts +0 -15
  67. package/types/helpers/testing.d.ts +0 -6
  68. package/types/implode.d.ts +0 -63
  69. package/types/index.d.ts +0 -8
@@ -1,190 +0,0 @@
1
- const { Transform, Writable } = require('stream')
2
- const { promisify } = require('util')
3
- const ExpandingBuffer = require('./ExpandingBuffer.js')
4
- const { isFunction } = require('./functions.js')
5
-
6
- const emptyBuffer = Buffer.from([])
7
-
8
- class QuasiTransform {
9
- constructor(handler) {
10
- this.handler = handler
11
- }
12
-
13
- handle(chunk, encoding) {
14
- return promisify(this.handler).call(this, chunk, encoding)
15
- }
16
- }
17
-
18
- const splitAt = (index) => {
19
- let cntr = 0
20
-
21
- if (!Number.isInteger(index) || index < 0) {
22
- return () => {
23
- return null
24
- }
25
- }
26
-
27
- return (chunk) => {
28
- let left
29
- let right
30
- let isLeftDone = true
31
-
32
- if (!Buffer.isBuffer(chunk)) {
33
- return null
34
- }
35
-
36
- if (index <= cntr) {
37
- // index ..... cntr ..... chunk.length
38
- left = emptyBuffer
39
- right = chunk
40
- } else if (index >= cntr + chunk.length) {
41
- // cntr ..... chunk.length ..... index
42
- left = chunk
43
- right = emptyBuffer
44
- isLeftDone = index === cntr + chunk.length
45
- } else {
46
- // cntr ..... index ..... chunk.length
47
- left = chunk.slice(0, index - cntr)
48
- right = chunk.slice(index - cntr)
49
- }
50
-
51
- cntr += chunk.length
52
-
53
- return [left, right, isLeftDone]
54
- }
55
- }
56
-
57
- const transformIdentity = () => {
58
- return function (chunk, encoding, callback) {
59
- callback(null, chunk)
60
- }
61
- }
62
-
63
- const transformEmpty = () => {
64
- return function (chunk, encoding, callback) {
65
- callback(null, emptyBuffer)
66
- }
67
- }
68
-
69
- const through = (handler) => {
70
- return new Transform({
71
- transform: handler,
72
- })
73
- }
74
-
75
- const transformSplitBy = (predicate, leftHandler, rightHandler) => {
76
- let isFirstChunk = true
77
- let wasLeftFlushCalled = false
78
- const damChunkSize = 0x10000
79
- const dam = new ExpandingBuffer()
80
-
81
- const leftTransform = new QuasiTransform(leftHandler)
82
- const rightTransform = new QuasiTransform(rightHandler)
83
-
84
- return function (chunk, encoding, callback) {
85
- const [left, right, isLeftDone] = predicate(chunk)
86
-
87
- const _left = leftTransform.handle(left, encoding)
88
- const _right = rightTransform.handle(right, encoding)
89
-
90
- if (isFirstChunk) {
91
- isFirstChunk = false
92
- this._flush = (flushCallback) => {
93
- if (!dam.isEmpty()) {
94
- this.push(dam.read())
95
- }
96
-
97
- let leftFiller = Promise.resolve(emptyBuffer)
98
- let rightFiller = Promise.resolve(emptyBuffer)
99
-
100
- if (!wasLeftFlushCalled && isFunction(leftTransform._flush)) {
101
- leftFiller = new Promise((resolve, reject) => {
102
- leftTransform._flush((err, data) => {
103
- if (err) {
104
- reject(err)
105
- } else {
106
- resolve(data)
107
- }
108
- })
109
- })
110
- }
111
-
112
- if (isFunction(rightTransform._flush)) {
113
- rightFiller = new Promise((resolve, reject) => {
114
- rightTransform._flush((err, data) => {
115
- if (err) {
116
- reject(err)
117
- } else {
118
- resolve(data)
119
- }
120
- })
121
- })
122
- }
123
-
124
- Promise.all([leftFiller, rightFiller])
125
- .then((buffers) => {
126
- flushCallback(null, Buffer.concat(buffers))
127
- })
128
- .catch((err) => {
129
- flushCallback(err)
130
- })
131
- }
132
- }
133
-
134
- let filler = Promise.resolve(emptyBuffer)
135
- if (isLeftDone && !wasLeftFlushCalled && isFunction(leftTransform._flush)) {
136
- wasLeftFlushCalled = true
137
- filler = new Promise((resolve, reject) => {
138
- leftTransform._flush((err, data) => {
139
- if (err) {
140
- reject(err)
141
- } else {
142
- resolve(data)
143
- }
144
- })
145
- })
146
- }
147
-
148
- Promise.all([_left, filler, _right])
149
- .then((buffers) => {
150
- dam.append(Buffer.concat(buffers))
151
- if (dam.size() > damChunkSize) {
152
- const chunks = Math.floor(dam.size() / damChunkSize)
153
- const data = Buffer.from(dam.read(0, chunks * damChunkSize))
154
- dam.flushStart(chunks * damChunkSize)
155
- for (let i = 0; i < chunks - 1; i++) {
156
- this.push(data.slice(i * damChunkSize, i * damChunkSize + damChunkSize))
157
- }
158
- callback(null, data.slice((chunks - 1) * damChunkSize))
159
- } else {
160
- callback(null, emptyBuffer)
161
- }
162
- })
163
- .catch((err) => {
164
- callback(err)
165
- })
166
- }
167
- }
168
-
169
- const streamToBuffer = (done) => {
170
- const buffer = new ExpandingBuffer()
171
- return new Writable({
172
- write(chunk, encoding, callback) {
173
- buffer.append(chunk)
174
- callback()
175
- },
176
- final(callback) {
177
- done(buffer.getHeap())
178
- callback()
179
- },
180
- })
181
- }
182
-
183
- module.exports = {
184
- splitAt,
185
- transformIdentity,
186
- transformEmpty,
187
- through,
188
- transformSplitBy,
189
- streamToBuffer,
190
- }
@@ -1,80 +0,0 @@
1
- // const { EOL } = require('os')
2
- // const fs = require('fs')
3
- const assert = require('assert')
4
- const { compare, report } = require('binary-comparator')
5
-
6
- /*
7
- const isPromise = promise => {
8
- return typeof promise === 'object' && promise.constructor.name === 'Promise'
9
- }
10
-
11
- const toConsole = () => {
12
- return (chunk, encoding, callback) => {
13
- process.stdout.write(chunk)
14
- process.stdout.write(Buffer.from(EOL))
15
- callback(null, chunk)
16
- }
17
- }
18
-
19
- const readToBuffer = (fileName, chunkSizeInBytes = 1024) => {
20
- return new Promise((resolve, reject) => {
21
- const chunks = []
22
- fs.createReadStream(fileName, { highWaterMark: chunkSizeInBytes })
23
- .on('error', reject)
24
- .on('data', chunk => {
25
- chunks.push(chunk)
26
- })
27
- .on('end', function () {
28
- resolve(Buffer.concat(chunks))
29
- })
30
- })
31
- }
32
- */
33
-
34
- // source: https://stackoverflow.com/a/43197340/1806628
35
- const isClass = (obj) => {
36
- const isCtorClass = obj.constructor && obj.constructor.toString().substring(0, 5) === 'class'
37
- if (obj.prototype === undefined) {
38
- return isCtorClass
39
- }
40
- const isPrototypeCtorClass =
41
- obj.prototype.constructor &&
42
- obj.prototype.constructor.toString &&
43
- obj.prototype.constructor.toString().substring(0, 5) === 'class'
44
- return isCtorClass || isPrototypeCtorClass
45
- }
46
-
47
- // https://stackoverflow.com/a/48845122/1806628
48
- const bufferToString = (buffer, limit = 20) => {
49
- const ellipsisNecessary = buffer.length > limit
50
- let hexString = buffer.slice(0, limit).toString('hex')
51
- hexString = hexString.length > 2 ? hexString.match(/../g).join(' ') : hexString
52
- return `<Buffer ${hexString}${ellipsisNecessary ? '...' : ''}>`
53
- }
54
-
55
- const buffersShouldEqual = (expected, result, offset = 0, displayAsHex = false) => {
56
- if (!Buffer.isBuffer(expected)) {
57
- throw new Error('expected is not a Buffer')
58
- }
59
-
60
- if (!Buffer.isBuffer(result)) {
61
- throw new Error('result is not a Buffer')
62
- }
63
-
64
- const diff = report(expected, result, compare(expected, result, offset), displayAsHex)
65
- assert.ok(expected.equals(result), diff)
66
- }
67
-
68
- const transformToABC = () => {
69
- let cntr = 0
70
- return function (chunk, encoding, callback) {
71
- callback(null, Buffer.from([65 + (cntr++ % 26)]))
72
- }
73
- }
74
-
75
- module.exports = {
76
- isClass,
77
- buffersShouldEqual,
78
- bufferToString,
79
- transformToABC,
80
- }
package/src/implode.js DELETED
@@ -1,364 +0,0 @@
1
- const { has, repeat, clone, last, clamp } = require('ramda')
2
- const ExpandingBuffer = require('./helpers/ExpandingBuffer.js')
3
- const { toHex, getLowestNBits, nBitsOfOnes, isFunction } = require('./helpers/functions.js')
4
- const { ExpectedFunctionError, InvalidDictionarySizeError, InvalidCompressionTypeError } = require('./errors.js')
5
- const {
6
- ChBitsAsc,
7
- ChCodeAsc,
8
- LONGEST_ALLOWED_REPETITION,
9
- DICTIONARY_SIZE_LARGE,
10
- DICTIONARY_SIZE_MEDIUM,
11
- DICTIONARY_SIZE_SMALL,
12
- COMPRESSION_BINARY,
13
- COMPRESSION_ASCII,
14
- ExLenBits,
15
- LenBits,
16
- LenCode,
17
- DistCode,
18
- DistBits,
19
- } = require('./constants.js')
20
-
21
- const setup = (state) => {
22
- state.nChBits = repeat(0, 0x306)
23
- state.nChCodes = repeat(0, 0x306)
24
-
25
- switch (state.dictionarySizeBits) {
26
- case DICTIONARY_SIZE_LARGE:
27
- state.dictionarySizeMask = nBitsOfOnes(6)
28
- break
29
- case DICTIONARY_SIZE_MEDIUM:
30
- state.dictionarySizeMask = nBitsOfOnes(5)
31
- break
32
- case DICTIONARY_SIZE_SMALL:
33
- state.dictionarySizeMask = nBitsOfOnes(4)
34
- break
35
- default:
36
- throw new InvalidDictionarySizeError()
37
- }
38
-
39
- switch (state.compressionType) {
40
- case COMPRESSION_BINARY:
41
- for (let nChCode = 0, nCount = 0; nCount < 0x100; nCount++) {
42
- state.nChBits[nCount] = 9
43
- state.nChCodes[nCount] = nChCode
44
- nChCode = getLowestNBits(16, nChCode) + 2
45
- }
46
- break
47
- case COMPRESSION_ASCII:
48
- for (let nCount = 0; nCount < 0x100; nCount++) {
49
- state.nChBits[nCount] = ChBitsAsc[nCount] + 1
50
- state.nChCodes[nCount] = ChCodeAsc[nCount] * 2
51
- }
52
- break
53
- default:
54
- throw new InvalidCompressionTypeError()
55
- }
56
-
57
- let nCount = 0x100
58
- for (let i = 0; i < 0x10; i++) {
59
- for (let nCount2 = 0; nCount2 < 1 << ExLenBits[i]; nCount2++) {
60
- state.nChBits[nCount] = ExLenBits[i] + LenBits[i] + 1
61
- state.nChCodes[nCount] = (nCount2 << (LenBits[i] + 1)) | (LenCode[i] * 2) | 1
62
- nCount++
63
- }
64
- }
65
-
66
- state.outputBuffer.append(Buffer.from([state.compressionType, state.dictionarySizeBits, 0]))
67
- state.outBits = 0
68
- }
69
-
70
- const outputBits = (state, nBits, bitBuffer) => {
71
- if (nBits > 8) {
72
- outputBits(state, 8, bitBuffer)
73
- bitBuffer = bitBuffer >> 8
74
- nBits = nBits - 8
75
- }
76
-
77
- const outBits = state.outBits
78
-
79
- // in the original code bitBuffer is long, but is cast to char
80
- const lastBytes = state.outputBuffer.read(state.outputBuffer.size() - 1, 1)
81
- state.outputBuffer.dropEnd(1)
82
- state.outputBuffer.append(Buffer.from([lastBytes | getLowestNBits(8, bitBuffer << outBits)]))
83
-
84
- state.outBits = state.outBits + nBits
85
-
86
- if (state.outBits > 8) {
87
- bitBuffer = bitBuffer >> (8 - outBits)
88
- state.outputBuffer.append(Buffer.from([getLowestNBits(8, bitBuffer)]))
89
- state.outBits = getLowestNBits(3, state.outBits)
90
- } else {
91
- state.outBits = getLowestNBits(3, state.outBits)
92
- if (state.outBits === 0) {
93
- state.outputBuffer.append(Buffer.from([0]))
94
- }
95
- }
96
- }
97
-
98
- // ---------------------------------
99
-
100
- const getSizeOfMatching = (inputBytes, a, b) => {
101
- const limit = clamp(2, LONGEST_ALLOWED_REPETITION, b - a)
102
-
103
- for (let i = 2; i <= limit; i++) {
104
- if (inputBytes[a + i] !== inputBytes[b + i]) {
105
- return i
106
- }
107
- }
108
-
109
- return limit
110
- }
111
-
112
- // TODO: make sure that we find the most recent one, which in turn allows
113
- // us to store backward length in less amount of bits
114
- // currently the code goes from the furthest point
115
- const findRepetitions = (inputBytes, endOfLastMatch, cursor) => {
116
- const notEnoughBytes = inputBytes.length - cursor < 2
117
- const tooClose = cursor === endOfLastMatch || cursor - endOfLastMatch < 2
118
- if (notEnoughBytes || tooClose) {
119
- return { size: 0, distance: 0 }
120
- }
121
-
122
- const haystack = inputBytes.slice(endOfLastMatch, cursor)
123
- const needle = inputBytes.slice(cursor, cursor + 2)
124
-
125
- const matchIndex = haystack.indexOf(needle)
126
- if (matchIndex !== -1) {
127
- const distance = cursor - endOfLastMatch - matchIndex
128
- return {
129
- distance: distance - 1,
130
- size: distance > 2 ? getSizeOfMatching(inputBytes, endOfLastMatch + matchIndex, cursor) : 2,
131
- }
132
- }
133
-
134
- return { size: 0, distance: 0 }
135
- }
136
-
137
- // this function can return:
138
- // false - not flushable
139
- // true - flushable
140
- // null - flushable, but there might be a better repetition
141
- const isRepetitionFlushable = (size, distance, startIndex, inputBufferSize) => {
142
- if (size === 0) {
143
- return false
144
- }
145
-
146
- // If we found repetition of 2 bytes, that is 0x100 or fuhrter back,
147
- // don't bother. Storing the distance of 0x100 bytes would actually
148
- // take more space than storing the 2 bytes as-is.
149
- if (size === 2 && distance >= 0x100) {
150
- return false
151
- }
152
-
153
- if (size >= 8 || startIndex + 1 >= inputBufferSize) {
154
- return true
155
- }
156
-
157
- return null
158
- }
159
-
160
- // ---------------------------------
161
-
162
- // repetitions are at least 2 bytes long,
163
- // so the initial 2 bytes can be moved to the output as is
164
- const handleFirstTwoBytes = (state) => {
165
- if (state.handledFirstTwoBytes) {
166
- return
167
- }
168
-
169
- if (state.inputBuffer.size() < 3) {
170
- return
171
- }
172
-
173
- const [byte1, byte2] = state.inputBuffer.read(0, 2)
174
- outputBits(state, state.nChBits[byte1], state.nChCodes[byte1])
175
- outputBits(state, state.nChBits[byte2], state.nChCodes[byte2])
176
-
177
- state.handledFirstTwoBytes = true
178
- state.startIndex += 2
179
- }
180
-
181
- const processChunkData = (state, verbose = false) => {
182
- if (!has('dictionarySizeMask', state)) {
183
- setup(state)
184
- }
185
-
186
- if (!state.inputBuffer.isEmpty()) {
187
- state.startIndex = 0
188
-
189
- handleFirstTwoBytes(state)
190
-
191
- // -------------------------------
192
-
193
- /* eslint-disable prefer-const */
194
-
195
- let endOfLastMatch = 0 // used when searching for longer repetitions later
196
- while (state.startIndex < state.inputBuffer.size()) {
197
- let { size, distance } = findRepetitions(state.inputBuffer.read(endOfLastMatch), endOfLastMatch, state.startIndex)
198
-
199
- let isFlushable = isRepetitionFlushable(size, distance, state.startIndex, state.inputBuffer.size())
200
-
201
- if (isFlushable === false) {
202
- const byte = state.inputBuffer.read(state.startIndex, 1)
203
- outputBits(state, state.nChBits[byte], state.nChCodes[byte])
204
- state.startIndex += 1
205
- } else {
206
- if (isFlushable === null) {
207
- /*
208
- // Try to find better repetition 1 byte later.
209
- // stormlib/implode.c L517
210
- let cursor = state.startIndex
211
- let newSize = size
212
- let newDistance = distance
213
- let currentSize
214
- let currentDistance
215
- while (newSize <= currentSize && isRepetitionFlushable(newSize, newDistance, state.startIndex, state.inputBuffer.size())) {
216
- currentSize = newSize
217
- currentDistance = newDistance
218
- const reps = findRepetitions(state.inputBuffer.read(endOfLastMatch), endOfLastMatch, ++cursor)
219
- newSize = reps.size
220
- newDistance = reps.distance
221
- }
222
- size = newSize
223
- distance = currentDistance
224
- */
225
- }
226
-
227
- const byte = size + 0xfe
228
- outputBits(state, state.nChBits[byte], state.nChCodes[byte])
229
- if (size === 2) {
230
- const byte = distance >> 2
231
- outputBits(state, state.distBits[byte], state.distCodes[byte])
232
- outputBits(state, 2, distance & 3)
233
- } else {
234
- const byte = distance >> state.dictionarySizeBits
235
- outputBits(state, state.distBits[byte], state.distCodes[byte])
236
- outputBits(state, state.dictionarySizeBits, state.dictionarySizeMask & distance)
237
- }
238
-
239
- state.startIndex += size
240
- }
241
-
242
- /*
243
- state.inputBuffer.dropStart(endOfLastMatch)
244
- state.startIndex -= endOfLastMatch
245
- endOfLastMatch = 0
246
- */
247
-
248
- if (state.dictionarySizeBits === DICTIONARY_SIZE_SMALL && state.startIndex >= 0x400) {
249
- state.inputBuffer.dropStart(0x400)
250
- state.startIndex -= 0x400
251
- } else if (state.dictionarySizeBits === DICTIONARY_SIZE_MEDIUM && state.startIndex >= 0x800) {
252
- state.inputBuffer.dropStart(0x800)
253
- state.startIndex -= 0x800
254
- } else if (state.dictionarySizeBits === DICTIONARY_SIZE_LARGE && state.startIndex >= 0x1000) {
255
- state.inputBuffer.dropStart(0x1000)
256
- state.startIndex -= 0x1000
257
- }
258
- }
259
-
260
- /* eslint-enable prefer-const */
261
-
262
- // -------------------------------
263
-
264
- state.inputBuffer.dropStart(state.inputBuffer.size())
265
- }
266
-
267
- if (state.streamEnded) {
268
- // Write the termination literal
269
- outputBits(state, last(state.nChBits), last(state.nChCodes))
270
- }
271
- }
272
-
273
- const implode = (compressionType, dictionarySizeBits, config = {}) => {
274
- const { verbose = false, inputBufferSize = 0x0, outputBufferSize = 0x0 } = config
275
-
276
- const handler = function (chunk, encoding, callback) {
277
- if (!isFunction(callback)) {
278
- // can't call callback to pass in data or errors, so we throw up
279
- throw new ExpectedFunctionError()
280
- }
281
-
282
- const state = handler._state
283
-
284
- try {
285
- state.inputBuffer.append(chunk)
286
- if (state.isFirstChunk) {
287
- state.isFirstChunk = false
288
- this._flush = state.onInputFinished
289
- }
290
-
291
- if (verbose) {
292
- console.log(`implode: reading ${toHex(chunk.length)} bytes from chunk #${state.stats.chunkCounter++}`)
293
- }
294
-
295
- processChunkData(state, verbose)
296
-
297
- const blockSize = 0x800
298
- if (state.outputBuffer.size() > blockSize) {
299
- const numberOfBytes = (Math.floor(state.outputBuffer.size() / blockSize) - 1) * blockSize
300
- const output = Buffer.from(state.outputBuffer.read(0, numberOfBytes))
301
- state.outputBuffer.flushStart(numberOfBytes)
302
-
303
- if (state.outBits === 0) {
304
- // set last byte to 0
305
- state.outputBuffer.dropEnd(1)
306
- state.outputBuffer.append(Buffer.from([0]))
307
- }
308
-
309
- callback(null, output)
310
- } else {
311
- callback(null, Buffer.from([]))
312
- }
313
- } catch (e) {
314
- callback(e)
315
- }
316
- }
317
-
318
- handler._state = {
319
- isFirstChunk: true,
320
- streamEnded: false,
321
- compressionType,
322
- dictionarySizeBits,
323
- distCodes: clone(DistCode),
324
- distBits: clone(DistBits),
325
- startIndex: 0,
326
- inputBuffer: new ExpandingBuffer(inputBufferSize),
327
- outputBuffer: new ExpandingBuffer(outputBufferSize),
328
- handledFirstTwoBytes: false,
329
- onInputFinished: (callback) => {
330
- const state = handler._state
331
- state.streamEnded = true
332
- try {
333
- processChunkData(state, verbose)
334
-
335
- if (verbose) {
336
- console.log('---------------')
337
- console.log('implode: total number of chunks read:', state.stats.chunkCounter)
338
- console.log('implode: inputBuffer heap size', toHex(state.inputBuffer.heapSize()))
339
- console.log('implode: outputBuffer heap size', toHex(state.outputBuffer.heapSize()))
340
- }
341
-
342
- callback(null, state.outputBuffer.read())
343
- } catch (e) {
344
- callback(e)
345
- }
346
- },
347
- stats: {
348
- chunkCounter: 0,
349
- },
350
- }
351
-
352
- return handler
353
- }
354
-
355
- module.exports = {
356
- setup,
357
- outputBits,
358
- getSizeOfMatching,
359
- findRepetitions,
360
- isRepetitionFlushable,
361
- handleFirstTwoBytes,
362
- processChunkData,
363
- implode,
364
- }
package/src/index.js DELETED
@@ -1,18 +0,0 @@
1
- const { implode } = require('./implode.js')
2
- const { explode } = require('./explode.js')
3
- const constants = require('./constants.js')
4
- const errors = require('./errors.js')
5
- const stream = require('./helpers/stream.js')
6
-
7
- const compress = implode
8
- const decompress = explode
9
-
10
- module.exports = {
11
- implode,
12
- compress,
13
- explode,
14
- decompress,
15
- constants,
16
- errors,
17
- stream,
18
- }
package/tsconfig.json DELETED
@@ -1,20 +0,0 @@
1
- {
2
- "compilerOptions": {
3
- "noEmit": true,
4
- "skipLibCheck": true,
5
- "target": "esnext",
6
- "moduleResolution": "node",
7
- "allowJs": true,
8
- "noImplicitAny": false,
9
- "strict": true,
10
- "forceConsistentCasingInFileNames": true,
11
- "esModuleInterop": true,
12
- "module": "commonjs",
13
- "resolveJsonModule": true,
14
- "isolatedModules": true,
15
- "incremental": true,
16
- "allowSyntheticDefaultImports": true
17
- },
18
- "include": ["src/**/*"],
19
- "exclude": ["node_modules"]
20
- }
@@ -1,41 +0,0 @@
1
- export const COMPRESSION_BINARY: 0
2
- export const COMPRESSION_ASCII: 1
3
- export const DICTIONARY_SIZE_SMALL: 4
4
- export const DICTIONARY_SIZE_MEDIUM: 5
5
- export const DICTIONARY_SIZE_LARGE: 6
6
- export const LONGEST_ALLOWED_REPETITION: 0x204
7
-
8
- export const PKDCL_OK: 'OK'
9
- export const PKDCL_STREAM_END: 'All data from the input stream is read'
10
- export const PKDCL_NEED_DICT: 'Need more data (dictionary)'
11
- export const PKDCL_CONTINUE: 'Continue (internal flag)'
12
- export const PKDCL_GET_INPUT: 'Get input (internal flag)'
13
-
14
- export const LITERAL_END_STREAM: 0x305
15
- export const LITERAL_STREAM_ABORTED: 0x306
16
-
17
- export const DistCode: number[]
18
- export const DistBits: number[]
19
- export const LenBits: number[]
20
- export const LenCode: number[]
21
- export const ExLenBits: number[]
22
- export const LenBase: number[]
23
- export const ChBitsAsc: number[]
24
- export const ChCodeAsc: number[]
25
-
26
- // Additional types
27
-
28
- /**
29
- * Compression types for implode
30
- */
31
- export type CompressionType = typeof COMPRESSION_BINARY | typeof COMPRESSION_ASCII
32
- /**
33
- * Dictionary sizes for implode, determines how well the file get compressed.
34
- *
35
- * Small dictionary size means less memory to lookback in data for repetitions, meaning it will be less effective, the file stays larger, less compressed.
36
- * On the other hand, large compression allows more lookback allowing more effective compression, thus generating smaller, more compressed files.
37
- */
38
- export type DictionarySizeBits =
39
- | typeof DICTIONARY_SIZE_SMALL
40
- | typeof DICTIONARY_SIZE_MEDIUM
41
- | typeof DICTIONARY_SIZE_LARGE