node-pkware 1.0.1 → 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +12 -12
- package/bin/explode.js +8 -7
- package/bin/implode.js +12 -9
- package/package.json +25 -14
- package/src/constants.js +1 -1
- package/src/errors.js +1 -1
- package/src/explode.js +32 -35
- package/src/helpers/ExpandingBuffer.js +1 -1
- package/src/helpers/functions.js +32 -7
- package/src/helpers/stream.js +13 -13
- package/src/helpers/testing.js +8 -6
- package/src/implode.js +17 -18
- package/src/index.js +1 -1
- package/tsconfig.json +20 -0
- package/types/explode.d.ts +1 -1
- package/types/helpers/Shared.d.ts +2 -2
- package/types/helpers/functions.d.ts +5 -0
- package/types/implode.d.ts +4 -4
package/README.md
CHANGED
|
@@ -34,13 +34,13 @@ Calling either explode or implode with the `-v` or `--version` flag will display
|
|
|
34
34
|
|
|
35
35
|
`implode test/files/fast.fts.unpacked --output=C:/fast.fts --binary --large --offset=1816`
|
|
36
36
|
|
|
37
|
-
`explode test/files/fast.fts --auto-detect --
|
|
37
|
+
`explode test/files/fast.fts --auto-detect --verbose --output=E:/fast.fts.unpacked`
|
|
38
38
|
|
|
39
|
-
`explode test/files/fast.fts --auto-detect --
|
|
39
|
+
`explode test/files/fast.fts --auto-detect --verbose --output=E:/fast.fts.unpacked --offset=2000`
|
|
40
40
|
|
|
41
41
|
### piping also works
|
|
42
42
|
|
|
43
|
-
**Don't use --
|
|
43
|
+
**Don't use --verbose when piping, because verbose messages will be outputted to where the decompressed data is being outputted!**
|
|
44
44
|
|
|
45
45
|
`cat c:/arx/level8.llf | explode > c:/arx/level8.llf.unpacked`
|
|
46
46
|
|
|
@@ -66,7 +66,7 @@ Takes an optional config object, which has the following properties:
|
|
|
66
66
|
|
|
67
67
|
```js
|
|
68
68
|
{
|
|
69
|
-
|
|
69
|
+
verbose: boolean, // whether the code should display extra debug messages on the console or not (default = false)
|
|
70
70
|
inputBufferSize: int, // the starting size of the input buffer, may expand later as needed. Not having to expand may have performance impact (default 0)
|
|
71
71
|
outputBufferSize: int // same as inputBufferSize, but for the outputBuffer (default 0)
|
|
72
72
|
}
|
|
@@ -80,7 +80,7 @@ Takes an optional config object, which has the following properties:
|
|
|
80
80
|
|
|
81
81
|
```js
|
|
82
82
|
{
|
|
83
|
-
|
|
83
|
+
verbose: boolean, // whether the code should display extra debug messages on the console or not (default = false)
|
|
84
84
|
inputBufferSize: int, // the starting size of the input buffer, may expand later as needed. Not having to expand may have performance impact (default 0)
|
|
85
85
|
outputBufferSize: int // same as inputBufferSize, but for the outputBuffer (default 0)
|
|
86
86
|
}
|
|
@@ -148,9 +148,9 @@ const { through, streamToBuffer } = stream
|
|
|
148
148
|
Readable.from(buffer) // buffer is of type Buffer with compressed data
|
|
149
149
|
.pipe(through(explode()))
|
|
150
150
|
.pipe(
|
|
151
|
-
streamToBuffer(decompressedData => {
|
|
151
|
+
streamToBuffer((decompressedData) => {
|
|
152
152
|
// decompressedData holds the decompressed buffer
|
|
153
|
-
})
|
|
153
|
+
}),
|
|
154
154
|
)
|
|
155
155
|
```
|
|
156
156
|
|
|
@@ -190,18 +190,18 @@ const { explode, stream } = require('node-pkware')
|
|
|
190
190
|
const { through } = stream
|
|
191
191
|
|
|
192
192
|
fs.createReadStream(`path-to-compressed-file`)
|
|
193
|
-
.on('error', err => {
|
|
193
|
+
.on('error', (err) => {
|
|
194
194
|
console.error('readstream error')
|
|
195
195
|
})
|
|
196
196
|
.pipe(
|
|
197
|
-
through(explode()).on('error', err => {
|
|
197
|
+
through(explode()).on('error', (err) => {
|
|
198
198
|
console.error('explode error')
|
|
199
|
-
})
|
|
199
|
+
}),
|
|
200
200
|
)
|
|
201
201
|
.pipe(
|
|
202
|
-
fs.createWriteStream(`path-to-write-decompressed-data`).on('error', err => {
|
|
202
|
+
fs.createWriteStream(`path-to-write-decompressed-data`).on('error', (err) => {
|
|
203
203
|
console.error('writestream error')
|
|
204
|
-
})
|
|
204
|
+
}),
|
|
205
205
|
)
|
|
206
206
|
```
|
|
207
207
|
|
package/bin/explode.js
CHANGED
|
@@ -8,10 +8,10 @@ const { explode } = require('../src/explode.js')
|
|
|
8
8
|
|
|
9
9
|
const args = minimist(process.argv.slice(2), {
|
|
10
10
|
string: ['output', 'offset', 'input-buffer-size', 'output-buffer-size'],
|
|
11
|
-
boolean: ['version', 'drop-before-offset', '
|
|
11
|
+
boolean: ['version', 'drop-before-offset', 'verbose'],
|
|
12
12
|
alias: {
|
|
13
|
-
v: 'version'
|
|
14
|
-
}
|
|
13
|
+
v: 'version',
|
|
14
|
+
},
|
|
15
15
|
})
|
|
16
16
|
|
|
17
17
|
const decompress = (input, output, offset, keepHeader, config) => {
|
|
@@ -27,7 +27,8 @@ const decompress = (input, output, offset, keepHeader, config) => {
|
|
|
27
27
|
|
|
28
28
|
;(async () => {
|
|
29
29
|
if (args.version) {
|
|
30
|
-
|
|
30
|
+
const version = await getPackageVersion()
|
|
31
|
+
console.log(`node-pkware - version ${version}`)
|
|
31
32
|
process.exit(0)
|
|
32
33
|
}
|
|
33
34
|
|
|
@@ -61,16 +62,16 @@ const decompress = (input, output, offset, keepHeader, config) => {
|
|
|
61
62
|
|
|
62
63
|
const keepHeader = !args['drop-before-offset']
|
|
63
64
|
const config = {
|
|
64
|
-
|
|
65
|
+
verbose: args.verbose,
|
|
65
66
|
inputBufferSize: parseNumberString(args['input-buffer-size'], 0x10000),
|
|
66
|
-
outputBufferSize: parseNumberString(args['output-buffer-size'], 0x40000)
|
|
67
|
+
outputBufferSize: parseNumberString(args['output-buffer-size'], 0x40000),
|
|
67
68
|
}
|
|
68
69
|
|
|
69
70
|
decompress(input, output, offset, keepHeader, config)
|
|
70
71
|
.then(() => {
|
|
71
72
|
process.exit(0)
|
|
72
73
|
})
|
|
73
|
-
.catch(e => {
|
|
74
|
+
.catch((e) => {
|
|
74
75
|
console.error(`error: ${e.message}`)
|
|
75
76
|
process.exit(1)
|
|
76
77
|
})
|
package/bin/implode.js
CHANGED
|
@@ -7,7 +7,7 @@ const {
|
|
|
7
7
|
COMPRESSION_ASCII,
|
|
8
8
|
DICTIONARY_SIZE_SMALL,
|
|
9
9
|
DICTIONARY_SIZE_MEDIUM,
|
|
10
|
-
DICTIONARY_SIZE_LARGE
|
|
10
|
+
DICTIONARY_SIZE_LARGE,
|
|
11
11
|
} = require('../src/constants.js')
|
|
12
12
|
const { getPackageVersion, parseNumberString, fileExists } = require('../src/helpers/functions.js')
|
|
13
13
|
const { implode } = require('../src/implode.js')
|
|
@@ -26,20 +26,21 @@ const decompress = (input, output, offset, keepHeader, compressionType, dictiona
|
|
|
26
26
|
|
|
27
27
|
const args = minimist(process.argv.slice(2), {
|
|
28
28
|
string: ['output', 'offset', 'input-buffer-size', 'output-buffer-size'],
|
|
29
|
-
boolean: ['version', 'binary', 'ascii', 'drop-before-offset', '
|
|
29
|
+
boolean: ['version', 'binary', 'ascii', 'drop-before-offset', 'verbose', 'small', 'medium', 'large'],
|
|
30
30
|
alias: {
|
|
31
31
|
a: 'ascii',
|
|
32
32
|
b: 'binary',
|
|
33
33
|
s: 'small',
|
|
34
34
|
m: 'medium',
|
|
35
35
|
l: 'large',
|
|
36
|
-
v: 'version'
|
|
37
|
-
}
|
|
36
|
+
v: 'version',
|
|
37
|
+
},
|
|
38
38
|
})
|
|
39
39
|
|
|
40
40
|
;(async () => {
|
|
41
41
|
if (args.version) {
|
|
42
|
-
|
|
42
|
+
const version = await getPackageVersion()
|
|
43
|
+
console.log(`node-pkware - version ${version}`)
|
|
43
44
|
process.exit(0)
|
|
44
45
|
}
|
|
45
46
|
|
|
@@ -67,7 +68,9 @@ const args = minimist(process.argv.slice(2), {
|
|
|
67
68
|
hasErrors = true
|
|
68
69
|
}
|
|
69
70
|
|
|
70
|
-
const sizes = [args.small, args.medium, args.large].filter(x =>
|
|
71
|
+
const sizes = [args.small, args.medium, args.large].filter((x) => {
|
|
72
|
+
return x === true
|
|
73
|
+
})
|
|
71
74
|
if (sizes.length > 1) {
|
|
72
75
|
console.error('error: multiple size types specified, can only work with one of --small, --medium and --large')
|
|
73
76
|
hasErrors = true
|
|
@@ -97,16 +100,16 @@ const args = minimist(process.argv.slice(2), {
|
|
|
97
100
|
|
|
98
101
|
const keepHeader = !args['drop-before-offset']
|
|
99
102
|
const config = {
|
|
100
|
-
|
|
103
|
+
verbose: args.verbose,
|
|
101
104
|
inputBufferSize: parseNumberString(args['input-buffer-size'], 0x10000),
|
|
102
|
-
outputBufferSize: parseNumberString(args['output-buffer-size'], 0x12000)
|
|
105
|
+
outputBufferSize: parseNumberString(args['output-buffer-size'], 0x12000),
|
|
103
106
|
}
|
|
104
107
|
|
|
105
108
|
decompress(input, output, offset, keepHeader, compressionType, dictionarySize, config)
|
|
106
109
|
.then(() => {
|
|
107
110
|
process.exit(0)
|
|
108
111
|
})
|
|
109
|
-
.catch(e => {
|
|
112
|
+
.catch((e) => {
|
|
110
113
|
console.error(`error: ${e.message}`)
|
|
111
114
|
process.exit(1)
|
|
112
115
|
})
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "node-pkware",
|
|
3
|
-
"version": "
|
|
3
|
+
"version": "2.0.0",
|
|
4
4
|
"description": "The nodejs implementation of StormLib's pkware compressor/de-compressor",
|
|
5
5
|
"main": "src/index.js",
|
|
6
6
|
"types": "types/index.d.ts",
|
|
@@ -13,7 +13,7 @@
|
|
|
13
13
|
"implode": "bin/implode.js"
|
|
14
14
|
},
|
|
15
15
|
"scripts": {
|
|
16
|
-
"lint": "eslint \"src/**/*.js\"",
|
|
16
|
+
"lint": "eslint \"{bin,src,test}/**/*.{js,ts}\"",
|
|
17
17
|
"lint:fix": "npm run lint -- --fix",
|
|
18
18
|
"lint:staged": "lint-staged",
|
|
19
19
|
"test:unit": "set FORCE_COLOR=true && mocha test/**/*.spec.js --timeout 5000",
|
|
@@ -33,28 +33,30 @@
|
|
|
33
33
|
"license": "GPL-3.0-or-later",
|
|
34
34
|
"dependencies": {
|
|
35
35
|
"minimist-lite": "^2.2.1",
|
|
36
|
-
"ramda": "^0.28.0"
|
|
37
|
-
"ramda-adjunct": "^3.1.0"
|
|
36
|
+
"ramda": "^0.28.0"
|
|
38
37
|
},
|
|
39
38
|
"devDependencies": {
|
|
40
|
-
"
|
|
39
|
+
"@types/node": "^18.11.10",
|
|
40
|
+
"arx-header-size": "^2.0.0",
|
|
41
41
|
"binary-comparator": "^0.5.0",
|
|
42
|
-
"
|
|
42
|
+
"esbuild": "^0.15.16",
|
|
43
|
+
"eslint": "^8.28.0",
|
|
43
44
|
"eslint-config-prettier": "^8.5.0",
|
|
44
45
|
"eslint-config-prettier-standard": "^4.0.1",
|
|
45
46
|
"eslint-config-standard": "^17.0.0",
|
|
46
47
|
"eslint-plugin-import": "^2.26.0",
|
|
47
48
|
"eslint-plugin-node": "^11.1.0",
|
|
48
|
-
"eslint-plugin-prettier": "^4.
|
|
49
|
-
"eslint-plugin-promise": "^6.
|
|
49
|
+
"eslint-plugin-prettier": "^4.2.1",
|
|
50
|
+
"eslint-plugin-promise": "^6.1.1",
|
|
50
51
|
"eslint-plugin-ramda": "^2.5.1",
|
|
51
52
|
"eslint-plugin-standard": "^4.1.0",
|
|
52
|
-
"lint-staged": "^
|
|
53
|
-
"mocha": "^10.
|
|
54
|
-
"nodemon": "^2.0.
|
|
53
|
+
"lint-staged": "^13.0.4",
|
|
54
|
+
"mocha": "^10.1.0",
|
|
55
|
+
"nodemon": "^2.0.20",
|
|
55
56
|
"pre-commit": "^1.2.2",
|
|
56
|
-
"prettier": "^2.
|
|
57
|
-
"prettier-config-standard": "^5.0.0"
|
|
57
|
+
"prettier": "^2.8.0",
|
|
58
|
+
"prettier-config-standard": "^5.0.0",
|
|
59
|
+
"typescript": "^4.9.3"
|
|
58
60
|
},
|
|
59
61
|
"pre-commit": [
|
|
60
62
|
"lint:staged",
|
|
@@ -63,5 +65,14 @@
|
|
|
63
65
|
],
|
|
64
66
|
"lint-staged": {
|
|
65
67
|
"*.js": "eslint --fix"
|
|
66
|
-
}
|
|
68
|
+
},
|
|
69
|
+
"keywords": [
|
|
70
|
+
"arx-fatalis",
|
|
71
|
+
"pkware",
|
|
72
|
+
"stormlib",
|
|
73
|
+
"implode",
|
|
74
|
+
"explode",
|
|
75
|
+
"compression",
|
|
76
|
+
"blast"
|
|
77
|
+
]
|
|
67
78
|
}
|
package/src/constants.js
CHANGED
package/src/errors.js
CHANGED
package/src/explode.js
CHANGED
|
@@ -1,14 +1,13 @@
|
|
|
1
|
-
const { repeat, unfold,
|
|
2
|
-
const { isFunction } = require('ramda-adjunct')
|
|
1
|
+
const { repeat, unfold, has } = require('ramda')
|
|
3
2
|
const {
|
|
4
3
|
InvalidDataError,
|
|
5
4
|
InvalidCompressionTypeError,
|
|
6
5
|
InvalidDictionarySizeError,
|
|
7
6
|
ExpectedBufferError,
|
|
8
7
|
ExpectedFunctionError,
|
|
9
|
-
AbortedError
|
|
8
|
+
AbortedError,
|
|
10
9
|
} = require('./errors.js')
|
|
11
|
-
const { mergeSparseArrays, getLowestNBits, nBitsOfOnes, toHex } = require('./helpers/functions.js')
|
|
10
|
+
const { mergeSparseArrays, getLowestNBits, nBitsOfOnes, toHex, isFunction } = require('./helpers/functions.js')
|
|
12
11
|
const {
|
|
13
12
|
ChBitsAsc,
|
|
14
13
|
ChCodeAsc,
|
|
@@ -26,11 +25,11 @@ const {
|
|
|
26
25
|
ExLenBits,
|
|
27
26
|
DistBits,
|
|
28
27
|
LenCode,
|
|
29
|
-
DistCode
|
|
28
|
+
DistCode,
|
|
30
29
|
} = require('./constants.js')
|
|
31
30
|
const ExpandingBuffer = require('./helpers/ExpandingBuffer.js')
|
|
32
31
|
|
|
33
|
-
const readHeader = buffer => {
|
|
32
|
+
const readHeader = (buffer) => {
|
|
34
33
|
if (!Buffer.isBuffer(buffer)) {
|
|
35
34
|
throw new ExpectedBufferError()
|
|
36
35
|
}
|
|
@@ -40,22 +39,24 @@ const readHeader = buffer => {
|
|
|
40
39
|
|
|
41
40
|
const compressionType = buffer.readUInt8(0)
|
|
42
41
|
const dictionarySizeBits = buffer.readUInt8(1)
|
|
43
|
-
if (
|
|
42
|
+
if (![COMPRESSION_BINARY, COMPRESSION_ASCII].includes(compressionType)) {
|
|
44
43
|
throw new InvalidCompressionTypeError()
|
|
45
44
|
}
|
|
46
|
-
if (!
|
|
45
|
+
if (![DICTIONARY_SIZE_SMALL, DICTIONARY_SIZE_MEDIUM, DICTIONARY_SIZE_LARGE].includes(dictionarySizeBits)) {
|
|
47
46
|
throw new InvalidDictionarySizeError()
|
|
48
47
|
}
|
|
49
48
|
|
|
50
49
|
return {
|
|
51
50
|
compressionType,
|
|
52
|
-
dictionarySizeBits
|
|
51
|
+
dictionarySizeBits,
|
|
53
52
|
}
|
|
54
53
|
}
|
|
55
54
|
|
|
56
55
|
// PAT = populate ascii table
|
|
57
|
-
const createPATIterator = (limit, stepper) =>
|
|
58
|
-
return n
|
|
56
|
+
const createPATIterator = (limit, stepper) => {
|
|
57
|
+
return (n) => {
|
|
58
|
+
return n >= limit ? false : [n, n + (1 << stepper)]
|
|
59
|
+
}
|
|
59
60
|
}
|
|
60
61
|
|
|
61
62
|
const populateAsciiTable = (value, index, bits, limit) => {
|
|
@@ -63,14 +64,10 @@ const populateAsciiTable = (value, index, bits, limit) => {
|
|
|
63
64
|
const seed = ChCodeAsc[index] >> bits
|
|
64
65
|
const idxs = unfold(iterator, seed)
|
|
65
66
|
|
|
66
|
-
return reduce(
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
},
|
|
71
|
-
[],
|
|
72
|
-
idxs
|
|
73
|
-
)
|
|
67
|
+
return idxs.reduce((acc, idx) => {
|
|
68
|
+
acc[idx] = index
|
|
69
|
+
return acc
|
|
70
|
+
}, [])
|
|
74
71
|
}
|
|
75
72
|
|
|
76
73
|
const generateAsciiTables = () => {
|
|
@@ -78,7 +75,7 @@ const generateAsciiTables = () => {
|
|
|
78
75
|
asciiTable2C34: repeat(0, 0x100),
|
|
79
76
|
asciiTable2D34: repeat(0, 0x100),
|
|
80
77
|
asciiTable2E34: repeat(0, 0x80),
|
|
81
|
-
asciiTable2EB4: repeat(0, 0x100)
|
|
78
|
+
asciiTable2EB4: repeat(0, 0x100),
|
|
82
79
|
}
|
|
83
80
|
|
|
84
81
|
tables.chBitsAsc = ChBitsAsc.map((value, index) => {
|
|
@@ -107,7 +104,7 @@ const generateAsciiTables = () => {
|
|
|
107
104
|
return tables
|
|
108
105
|
}
|
|
109
106
|
|
|
110
|
-
const parseInitialData = (state,
|
|
107
|
+
const parseInitialData = (state, verbose = false) => {
|
|
111
108
|
if (state.inputBuffer.size() < 4) {
|
|
112
109
|
return false
|
|
113
110
|
}
|
|
@@ -127,12 +124,12 @@ const parseInitialData = (state, debug = false) => {
|
|
|
127
124
|
})
|
|
128
125
|
}
|
|
129
126
|
|
|
130
|
-
if (
|
|
127
|
+
if (verbose) {
|
|
131
128
|
console.log(`explode: compression type: ${state.compressionType === COMPRESSION_BINARY ? 'binary' : 'ascii'}`)
|
|
132
129
|
console.log(
|
|
133
130
|
`explode: compression level: ${
|
|
134
131
|
state.dictionarySizeBits === 4 ? 'small' : state.dictionarySizeBits === 5 ? 'medium' : 'large'
|
|
135
|
-
}
|
|
132
|
+
}`,
|
|
136
133
|
)
|
|
137
134
|
}
|
|
138
135
|
|
|
@@ -158,7 +155,7 @@ const wasteBits = (state, numberOfBits) => {
|
|
|
158
155
|
return PKDCL_OK
|
|
159
156
|
}
|
|
160
157
|
|
|
161
|
-
const decodeNextLiteral = state => {
|
|
158
|
+
const decodeNextLiteral = (state) => {
|
|
162
159
|
const lastBit = state.bitBuffer & 1
|
|
163
160
|
|
|
164
161
|
if (wasteBits(state, 1) === PKDCL_STREAM_END) {
|
|
@@ -247,13 +244,13 @@ const decodeDistance = (state, repeatLength) => {
|
|
|
247
244
|
return distance + 1
|
|
248
245
|
}
|
|
249
246
|
|
|
250
|
-
const processChunkData = (state,
|
|
247
|
+
const processChunkData = (state, verbose = false) => {
|
|
251
248
|
if (state.inputBuffer.isEmpty()) {
|
|
252
249
|
return
|
|
253
250
|
}
|
|
254
251
|
|
|
255
252
|
if (!has('compressionType', state)) {
|
|
256
|
-
const parsedHeader = parseInitialData(state,
|
|
253
|
+
const parsedHeader = parseInitialData(state, verbose)
|
|
257
254
|
if (!parsedHeader || state.inputBuffer.isEmpty()) {
|
|
258
255
|
return
|
|
259
256
|
}
|
|
@@ -312,7 +309,7 @@ const generateDecodeTables = (startIndexes, lengthBits) => {
|
|
|
312
309
|
}
|
|
313
310
|
|
|
314
311
|
const explode = (config = {}) => {
|
|
315
|
-
const {
|
|
312
|
+
const { verbose = false, inputBufferSize = 0x0, outputBufferSize = 0x0 } = config
|
|
316
313
|
|
|
317
314
|
const handler = function (chunk, encoding, callback) {
|
|
318
315
|
if (!isFunction(callback)) {
|
|
@@ -330,11 +327,11 @@ const explode = (config = {}) => {
|
|
|
330
327
|
this._flush = state.onInputFinished
|
|
331
328
|
}
|
|
332
329
|
|
|
333
|
-
if (
|
|
330
|
+
if (verbose) {
|
|
334
331
|
console.log(`explode: reading ${toHex(chunk.length)} bytes from chunk #${state.stats.chunkCounter++}`)
|
|
335
332
|
}
|
|
336
333
|
|
|
337
|
-
processChunkData(state,
|
|
334
|
+
processChunkData(state, verbose)
|
|
338
335
|
|
|
339
336
|
const blockSize = 0x1000
|
|
340
337
|
if (state.outputBuffer.size() > blockSize) {
|
|
@@ -354,7 +351,7 @@ const explode = (config = {}) => {
|
|
|
354
351
|
handler._state = {
|
|
355
352
|
_backup: {
|
|
356
353
|
extraBits: null,
|
|
357
|
-
bitBuffer: null
|
|
354
|
+
bitBuffer: null,
|
|
358
355
|
},
|
|
359
356
|
needMoreInput: true,
|
|
360
357
|
isFirstChunk: true,
|
|
@@ -364,10 +361,10 @@ const explode = (config = {}) => {
|
|
|
364
361
|
distPosCodes: generateDecodeTables(DistCode, DistBits),
|
|
365
362
|
inputBuffer: new ExpandingBuffer(inputBufferSize),
|
|
366
363
|
outputBuffer: new ExpandingBuffer(outputBufferSize),
|
|
367
|
-
onInputFinished: callback => {
|
|
364
|
+
onInputFinished: (callback) => {
|
|
368
365
|
const state = handler._state
|
|
369
366
|
|
|
370
|
-
if (
|
|
367
|
+
if (verbose) {
|
|
371
368
|
console.log('---------------')
|
|
372
369
|
console.log('explode: total number of chunks read:', state.stats.chunkCounter)
|
|
373
370
|
console.log('explode: inputBuffer heap size', toHex(state.inputBuffer.heapSize()))
|
|
@@ -393,8 +390,8 @@ const explode = (config = {}) => {
|
|
|
393
390
|
state.inputBuffer._restoreIndices()
|
|
394
391
|
},
|
|
395
392
|
stats: {
|
|
396
|
-
chunkCounter: 0
|
|
397
|
-
}
|
|
393
|
+
chunkCounter: 0,
|
|
394
|
+
},
|
|
398
395
|
}
|
|
399
396
|
|
|
400
397
|
return handler
|
|
@@ -410,5 +407,5 @@ module.exports = {
|
|
|
410
407
|
wasteBits,
|
|
411
408
|
decodeNextLiteral,
|
|
412
409
|
decodeDistance,
|
|
413
|
-
generateDecodeTables
|
|
410
|
+
generateDecodeTables,
|
|
414
411
|
}
|
package/src/helpers/functions.js
CHANGED
|
@@ -1,6 +1,24 @@
|
|
|
1
1
|
const fs = require('fs')
|
|
2
|
-
const { repeat, test } = require('ramda')
|
|
3
|
-
|
|
2
|
+
const { repeat, test, type } = require('ramda')
|
|
3
|
+
|
|
4
|
+
const isNumber = (x) => {
|
|
5
|
+
return typeof x === 'number'
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
const isString = (x) => {
|
|
9
|
+
return typeof x === 'string'
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
const isFunction = (x) => {
|
|
13
|
+
return type(x) === 'Function'
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
const noop = () => {}
|
|
17
|
+
|
|
18
|
+
// https://stackoverflow.com/a/68989785/1806628
|
|
19
|
+
const isPlainObject = (x) => {
|
|
20
|
+
return x.constructor === Object
|
|
21
|
+
}
|
|
4
22
|
|
|
5
23
|
const isBetween = (min, max, num) => {
|
|
6
24
|
if (!isNumber(min) || !isNumber(max) || !isNumber(num)) {
|
|
@@ -13,7 +31,7 @@ const isBetween = (min, max, num) => {
|
|
|
13
31
|
return num >= min && num <= max
|
|
14
32
|
}
|
|
15
33
|
|
|
16
|
-
const nBitsOfOnes = numberOfBits => {
|
|
34
|
+
const nBitsOfOnes = (numberOfBits) => {
|
|
17
35
|
if (!Number.isInteger(numberOfBits) || numberOfBits < 0) {
|
|
18
36
|
return null
|
|
19
37
|
}
|
|
@@ -37,7 +55,7 @@ const getLowestNBits = (numberOfBits, number) => {
|
|
|
37
55
|
|
|
38
56
|
const isDecimalString = test(/^\d+$/)
|
|
39
57
|
|
|
40
|
-
const isFullHexString = str => {
|
|
58
|
+
const isFullHexString = (str) => {
|
|
41
59
|
if (isString(str)) {
|
|
42
60
|
return /^\s*0x[0-9a-f]+\s*$/.test(str)
|
|
43
61
|
} else {
|
|
@@ -77,7 +95,9 @@ const mergeSparseArrays = (a, b) => {
|
|
|
77
95
|
/*
|
|
78
96
|
export const dumpBytes = bytes => {
|
|
79
97
|
const formattedBytes = Array.from(bytes)
|
|
80
|
-
.map(byte =>
|
|
98
|
+
.map(byte => {
|
|
99
|
+
return toHex(byte, 2, true)
|
|
100
|
+
})
|
|
81
101
|
.join(' ')
|
|
82
102
|
return `<${formattedBytes}>`
|
|
83
103
|
}
|
|
@@ -102,7 +122,7 @@ const getPackageVersion = async () => {
|
|
|
102
122
|
}
|
|
103
123
|
}
|
|
104
124
|
|
|
105
|
-
const fileExists = async filename => {
|
|
125
|
+
const fileExists = async (filename) => {
|
|
106
126
|
try {
|
|
107
127
|
await fs.promises.access(filename, fs.constants.R_OK)
|
|
108
128
|
return true
|
|
@@ -112,6 +132,11 @@ const fileExists = async filename => {
|
|
|
112
132
|
}
|
|
113
133
|
|
|
114
134
|
module.exports = {
|
|
135
|
+
isNumber,
|
|
136
|
+
isString,
|
|
137
|
+
isFunction,
|
|
138
|
+
noop,
|
|
139
|
+
isPlainObject,
|
|
115
140
|
isBetween,
|
|
116
141
|
nBitsOfOnes,
|
|
117
142
|
maskBits,
|
|
@@ -121,5 +146,5 @@ module.exports = {
|
|
|
121
146
|
mergeSparseArrays,
|
|
122
147
|
parseNumberString,
|
|
123
148
|
getPackageVersion,
|
|
124
|
-
fileExists
|
|
149
|
+
fileExists,
|
|
125
150
|
}
|
package/src/helpers/stream.js
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
const { Transform, Writable } = require('stream')
|
|
2
2
|
const { promisify } = require('util')
|
|
3
|
-
const { isFunction } = require('ramda-adjunct')
|
|
4
3
|
const ExpandingBuffer = require('./ExpandingBuffer.js')
|
|
4
|
+
const { isFunction } = require('./functions.js')
|
|
5
5
|
|
|
6
6
|
const emptyBuffer = Buffer.from([])
|
|
7
7
|
|
|
@@ -15,7 +15,7 @@ class QuasiTransform {
|
|
|
15
15
|
}
|
|
16
16
|
}
|
|
17
17
|
|
|
18
|
-
const splitAt = index => {
|
|
18
|
+
const splitAt = (index) => {
|
|
19
19
|
let cntr = 0
|
|
20
20
|
|
|
21
21
|
if (!Number.isInteger(index) || index < 0) {
|
|
@@ -24,7 +24,7 @@ const splitAt = index => {
|
|
|
24
24
|
}
|
|
25
25
|
}
|
|
26
26
|
|
|
27
|
-
return chunk => {
|
|
27
|
+
return (chunk) => {
|
|
28
28
|
let left
|
|
29
29
|
let right
|
|
30
30
|
let isLeftDone = true
|
|
@@ -66,9 +66,9 @@ const transformEmpty = () => {
|
|
|
66
66
|
}
|
|
67
67
|
}
|
|
68
68
|
|
|
69
|
-
const through = handler => {
|
|
69
|
+
const through = (handler) => {
|
|
70
70
|
return new Transform({
|
|
71
|
-
transform: handler
|
|
71
|
+
transform: handler,
|
|
72
72
|
})
|
|
73
73
|
}
|
|
74
74
|
|
|
@@ -89,7 +89,7 @@ const transformSplitBy = (predicate, leftHandler, rightHandler) => {
|
|
|
89
89
|
|
|
90
90
|
if (isFirstChunk) {
|
|
91
91
|
isFirstChunk = false
|
|
92
|
-
this._flush = flushCallback => {
|
|
92
|
+
this._flush = (flushCallback) => {
|
|
93
93
|
if (!dam.isEmpty()) {
|
|
94
94
|
this.push(dam.read())
|
|
95
95
|
}
|
|
@@ -122,10 +122,10 @@ const transformSplitBy = (predicate, leftHandler, rightHandler) => {
|
|
|
122
122
|
}
|
|
123
123
|
|
|
124
124
|
Promise.all([leftFiller, rightFiller])
|
|
125
|
-
.then(buffers => {
|
|
125
|
+
.then((buffers) => {
|
|
126
126
|
flushCallback(null, Buffer.concat(buffers))
|
|
127
127
|
})
|
|
128
|
-
.catch(err => {
|
|
128
|
+
.catch((err) => {
|
|
129
129
|
flushCallback(err)
|
|
130
130
|
})
|
|
131
131
|
}
|
|
@@ -146,7 +146,7 @@ const transformSplitBy = (predicate, leftHandler, rightHandler) => {
|
|
|
146
146
|
}
|
|
147
147
|
|
|
148
148
|
Promise.all([_left, filler, _right])
|
|
149
|
-
.then(buffers => {
|
|
149
|
+
.then((buffers) => {
|
|
150
150
|
dam.append(Buffer.concat(buffers))
|
|
151
151
|
if (dam.size() > damChunkSize) {
|
|
152
152
|
const chunks = Math.floor(dam.size() / damChunkSize)
|
|
@@ -160,13 +160,13 @@ const transformSplitBy = (predicate, leftHandler, rightHandler) => {
|
|
|
160
160
|
callback(null, emptyBuffer)
|
|
161
161
|
}
|
|
162
162
|
})
|
|
163
|
-
.catch(err => {
|
|
163
|
+
.catch((err) => {
|
|
164
164
|
callback(err)
|
|
165
165
|
})
|
|
166
166
|
}
|
|
167
167
|
}
|
|
168
168
|
|
|
169
|
-
const streamToBuffer = done => {
|
|
169
|
+
const streamToBuffer = (done) => {
|
|
170
170
|
const buffer = new ExpandingBuffer()
|
|
171
171
|
return new Writable({
|
|
172
172
|
write(chunk, encoding, callback) {
|
|
@@ -176,7 +176,7 @@ const streamToBuffer = done => {
|
|
|
176
176
|
final(callback) {
|
|
177
177
|
done(buffer.getHeap())
|
|
178
178
|
callback()
|
|
179
|
-
}
|
|
179
|
+
},
|
|
180
180
|
})
|
|
181
181
|
}
|
|
182
182
|
|
|
@@ -186,5 +186,5 @@ module.exports = {
|
|
|
186
186
|
transformEmpty,
|
|
187
187
|
through,
|
|
188
188
|
transformSplitBy,
|
|
189
|
-
streamToBuffer
|
|
189
|
+
streamToBuffer,
|
|
190
190
|
}
|
package/src/helpers/testing.js
CHANGED
|
@@ -8,10 +8,12 @@ const isPromise = promise => {
|
|
|
8
8
|
return typeof promise === 'object' && promise.constructor.name === 'Promise'
|
|
9
9
|
}
|
|
10
10
|
|
|
11
|
-
const toConsole = () =>
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
11
|
+
const toConsole = () => {
|
|
12
|
+
return (chunk, encoding, callback) => {
|
|
13
|
+
process.stdout.write(chunk)
|
|
14
|
+
process.stdout.write(Buffer.from(EOL))
|
|
15
|
+
callback(null, chunk)
|
|
16
|
+
}
|
|
15
17
|
}
|
|
16
18
|
|
|
17
19
|
const readToBuffer = (fileName, chunkSizeInBytes = 1024) => {
|
|
@@ -30,7 +32,7 @@ const readToBuffer = (fileName, chunkSizeInBytes = 1024) => {
|
|
|
30
32
|
*/
|
|
31
33
|
|
|
32
34
|
// source: https://stackoverflow.com/a/43197340/1806628
|
|
33
|
-
const isClass = obj => {
|
|
35
|
+
const isClass = (obj) => {
|
|
34
36
|
const isCtorClass = obj.constructor && obj.constructor.toString().substring(0, 5) === 'class'
|
|
35
37
|
if (obj.prototype === undefined) {
|
|
36
38
|
return isCtorClass
|
|
@@ -74,5 +76,5 @@ module.exports = {
|
|
|
74
76
|
isClass,
|
|
75
77
|
buffersShouldEqual,
|
|
76
78
|
bufferToString,
|
|
77
|
-
transformToABC
|
|
79
|
+
transformToABC,
|
|
78
80
|
}
|
package/src/implode.js
CHANGED
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
const { has, repeat, clone, last, clamp } = require('ramda')
|
|
2
|
-
const { isFunction } = require('ramda-adjunct')
|
|
3
2
|
const ExpandingBuffer = require('./helpers/ExpandingBuffer.js')
|
|
4
|
-
const { toHex, getLowestNBits, nBitsOfOnes } = require('./helpers/functions.js')
|
|
3
|
+
const { toHex, getLowestNBits, nBitsOfOnes, isFunction } = require('./helpers/functions.js')
|
|
5
4
|
const { ExpectedFunctionError, InvalidDictionarySizeError, InvalidCompressionTypeError } = require('./errors.js')
|
|
6
5
|
const {
|
|
7
6
|
ChBitsAsc,
|
|
@@ -16,10 +15,10 @@ const {
|
|
|
16
15
|
LenBits,
|
|
17
16
|
LenCode,
|
|
18
17
|
DistCode,
|
|
19
|
-
DistBits
|
|
18
|
+
DistBits,
|
|
20
19
|
} = require('./constants.js')
|
|
21
20
|
|
|
22
|
-
const setup = state => {
|
|
21
|
+
const setup = (state) => {
|
|
23
22
|
state.nChBits = repeat(0, 0x306)
|
|
24
23
|
state.nChCodes = repeat(0, 0x306)
|
|
25
24
|
|
|
@@ -128,7 +127,7 @@ const findRepetitions = (inputBytes, endOfLastMatch, cursor) => {
|
|
|
128
127
|
const distance = cursor - endOfLastMatch - matchIndex
|
|
129
128
|
return {
|
|
130
129
|
distance: distance - 1,
|
|
131
|
-
size: distance > 2 ? getSizeOfMatching(inputBytes, endOfLastMatch + matchIndex, cursor) : 2
|
|
130
|
+
size: distance > 2 ? getSizeOfMatching(inputBytes, endOfLastMatch + matchIndex, cursor) : 2,
|
|
132
131
|
}
|
|
133
132
|
}
|
|
134
133
|
|
|
@@ -162,7 +161,7 @@ const isRepetitionFlushable = (size, distance, startIndex, inputBufferSize) => {
|
|
|
162
161
|
|
|
163
162
|
// repetitions are at least 2 bytes long,
|
|
164
163
|
// so the initial 2 bytes can be moved to the output as is
|
|
165
|
-
const handleFirstTwoBytes = state => {
|
|
164
|
+
const handleFirstTwoBytes = (state) => {
|
|
166
165
|
if (state.handledFirstTwoBytes) {
|
|
167
166
|
return
|
|
168
167
|
}
|
|
@@ -179,7 +178,7 @@ const handleFirstTwoBytes = state => {
|
|
|
179
178
|
state.startIndex += 2
|
|
180
179
|
}
|
|
181
180
|
|
|
182
|
-
const processChunkData = (state,
|
|
181
|
+
const processChunkData = (state, verbose = false) => {
|
|
183
182
|
if (!has('dictionarySizeMask', state)) {
|
|
184
183
|
setup(state)
|
|
185
184
|
}
|
|
@@ -272,7 +271,7 @@ const processChunkData = (state, debug = false) => {
|
|
|
272
271
|
}
|
|
273
272
|
|
|
274
273
|
const implode = (compressionType, dictionarySizeBits, config = {}) => {
|
|
275
|
-
const {
|
|
274
|
+
const { verbose = false, inputBufferSize = 0x0, outputBufferSize = 0x0 } = config
|
|
276
275
|
|
|
277
276
|
const handler = function (chunk, encoding, callback) {
|
|
278
277
|
if (!isFunction(callback)) {
|
|
@@ -289,11 +288,11 @@ const implode = (compressionType, dictionarySizeBits, config = {}) => {
|
|
|
289
288
|
this._flush = state.onInputFinished
|
|
290
289
|
}
|
|
291
290
|
|
|
292
|
-
if (
|
|
291
|
+
if (verbose) {
|
|
293
292
|
console.log(`implode: reading ${toHex(chunk.length)} bytes from chunk #${state.stats.chunkCounter++}`)
|
|
294
293
|
}
|
|
295
294
|
|
|
296
|
-
processChunkData(state,
|
|
295
|
+
processChunkData(state, verbose)
|
|
297
296
|
|
|
298
297
|
const blockSize = 0x800
|
|
299
298
|
if (state.outputBuffer.size() > blockSize) {
|
|
@@ -319,21 +318,21 @@ const implode = (compressionType, dictionarySizeBits, config = {}) => {
|
|
|
319
318
|
handler._state = {
|
|
320
319
|
isFirstChunk: true,
|
|
321
320
|
streamEnded: false,
|
|
322
|
-
compressionType
|
|
323
|
-
dictionarySizeBits
|
|
321
|
+
compressionType,
|
|
322
|
+
dictionarySizeBits,
|
|
324
323
|
distCodes: clone(DistCode),
|
|
325
324
|
distBits: clone(DistBits),
|
|
326
325
|
startIndex: 0,
|
|
327
326
|
inputBuffer: new ExpandingBuffer(inputBufferSize),
|
|
328
327
|
outputBuffer: new ExpandingBuffer(outputBufferSize),
|
|
329
328
|
handledFirstTwoBytes: false,
|
|
330
|
-
onInputFinished: callback => {
|
|
329
|
+
onInputFinished: (callback) => {
|
|
331
330
|
const state = handler._state
|
|
332
331
|
state.streamEnded = true
|
|
333
332
|
try {
|
|
334
|
-
processChunkData(state,
|
|
333
|
+
processChunkData(state, verbose)
|
|
335
334
|
|
|
336
|
-
if (
|
|
335
|
+
if (verbose) {
|
|
337
336
|
console.log('---------------')
|
|
338
337
|
console.log('implode: total number of chunks read:', state.stats.chunkCounter)
|
|
339
338
|
console.log('implode: inputBuffer heap size', toHex(state.inputBuffer.heapSize()))
|
|
@@ -346,8 +345,8 @@ const implode = (compressionType, dictionarySizeBits, config = {}) => {
|
|
|
346
345
|
}
|
|
347
346
|
},
|
|
348
347
|
stats: {
|
|
349
|
-
chunkCounter: 0
|
|
350
|
-
}
|
|
348
|
+
chunkCounter: 0,
|
|
349
|
+
},
|
|
351
350
|
}
|
|
352
351
|
|
|
353
352
|
return handler
|
|
@@ -361,5 +360,5 @@ module.exports = {
|
|
|
361
360
|
isRepetitionFlushable,
|
|
362
361
|
handleFirstTwoBytes,
|
|
363
362
|
processChunkData,
|
|
364
|
-
implode
|
|
363
|
+
implode,
|
|
365
364
|
}
|
package/src/index.js
CHANGED
package/tsconfig.json
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
{
|
|
2
|
+
"compilerOptions": {
|
|
3
|
+
"noEmit": true,
|
|
4
|
+
"skipLibCheck": true,
|
|
5
|
+
"target": "esnext",
|
|
6
|
+
"moduleResolution": "node",
|
|
7
|
+
"allowJs": true,
|
|
8
|
+
"noImplicitAny": false,
|
|
9
|
+
"strict": true,
|
|
10
|
+
"forceConsistentCasingInFileNames": true,
|
|
11
|
+
"esModuleInterop": true,
|
|
12
|
+
"module": "commonjs",
|
|
13
|
+
"resolveJsonModule": true,
|
|
14
|
+
"isolatedModules": true,
|
|
15
|
+
"incremental": true,
|
|
16
|
+
"allowSyntheticDefaultImports": true
|
|
17
|
+
},
|
|
18
|
+
"include": ["src/**/*"],
|
|
19
|
+
"exclude": ["node_modules"]
|
|
20
|
+
}
|
package/types/explode.d.ts
CHANGED
|
@@ -49,7 +49,7 @@ export function generateAsciiTables(): {
|
|
|
49
49
|
asciiTable2E34: number[]
|
|
50
50
|
asciiTable2EB4: number[]
|
|
51
51
|
}
|
|
52
|
-
export function processChunkData(state: PrivateExplodeState,
|
|
52
|
+
export function processChunkData(state: PrivateExplodeState, verbose?: boolean): void
|
|
53
53
|
export function wasteBits(state: PrivateExplodeState, numberOfBits: number): typeof PKDCL_STREAM_END | typeof PKDCL_OK
|
|
54
54
|
export function decodeNextLiteral(state: PrivateExplodeState): typeof LITERAL_STREAM_ABORTED | number
|
|
55
55
|
export function decodeDistance(state: PrivateExplodeState, repeatLength: number): number
|
|
@@ -27,10 +27,10 @@ export type PrivateState<T> = { _state: T }
|
|
|
27
27
|
*/
|
|
28
28
|
export type Config = {
|
|
29
29
|
/**
|
|
30
|
-
* Whether the code should display
|
|
30
|
+
* Whether the code should display extra messages on the console or not
|
|
31
31
|
* @default false
|
|
32
32
|
*/
|
|
33
|
-
|
|
33
|
+
verbose?: boolean
|
|
34
34
|
/**
|
|
35
35
|
* The starting size of the input buffer, may expand later as needed.
|
|
36
36
|
* Not having to expand may have performance impact.
|
|
@@ -1,3 +1,8 @@
|
|
|
1
|
+
export function isNumber(x: any): boolean
|
|
2
|
+
export function isString(x: any): boolean
|
|
3
|
+
export function isFunction(x: any): boolean
|
|
4
|
+
export function noop(): void
|
|
5
|
+
export function isPlainObject(x: any): boolean
|
|
1
6
|
export function isBetween(min: number, max: number, num: number): boolean
|
|
2
7
|
export function nBitsOfOnes(numberOfBits: number): number
|
|
3
8
|
export function maskBits(numberOfBits: number, number: number): number
|
package/types/implode.d.ts
CHANGED
|
@@ -39,15 +39,15 @@ export function getSizeOfMatching(inputBytes: number[], a: number, b: number): n
|
|
|
39
39
|
export function findRepetitions(
|
|
40
40
|
inputBytes: number[],
|
|
41
41
|
endOfLastMatch: number,
|
|
42
|
-
cursor: number
|
|
42
|
+
cursor: number,
|
|
43
43
|
): { size: number; distance: number }
|
|
44
44
|
export function isRepetitionFlushable(
|
|
45
45
|
size: number,
|
|
46
46
|
distance: number,
|
|
47
47
|
startIndex: number,
|
|
48
|
-
inputBufferSize: number
|
|
48
|
+
inputBufferSize: number,
|
|
49
49
|
): boolean | null
|
|
50
|
-
export function processChunkData(state: PrivateExplodeState,
|
|
50
|
+
export function processChunkData(state: PrivateExplodeState, verbose?: boolean): void
|
|
51
51
|
|
|
52
52
|
/**
|
|
53
53
|
* Compresses stream
|
|
@@ -59,5 +59,5 @@ export function processChunkData(state: PrivateExplodeState, debug?: boolean): v
|
|
|
59
59
|
export function implode(
|
|
60
60
|
compressionType: CompressionType,
|
|
61
61
|
dictionarySizeBits: DictionarySizeBits,
|
|
62
|
-
config?: Config
|
|
62
|
+
config?: Config,
|
|
63
63
|
): PrivateState<PrivateExplodeState> & Handler
|