node-pkware 1.0.1 → 1.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/bin/explode.js CHANGED
@@ -10,8 +10,8 @@ const args = minimist(process.argv.slice(2), {
10
10
  string: ['output', 'offset', 'input-buffer-size', 'output-buffer-size'],
11
11
  boolean: ['version', 'drop-before-offset', 'debug'],
12
12
  alias: {
13
- v: 'version'
14
- }
13
+ v: 'version',
14
+ },
15
15
  })
16
16
 
17
17
  const decompress = (input, output, offset, keepHeader, config) => {
@@ -27,7 +27,8 @@ const decompress = (input, output, offset, keepHeader, config) => {
27
27
 
28
28
  ;(async () => {
29
29
  if (args.version) {
30
- console.log(await getPackageVersion())
30
+ const version = await getPackageVersion()
31
+ console.log(`node-pkware - version ${version}`)
31
32
  process.exit(0)
32
33
  }
33
34
 
@@ -63,14 +64,14 @@ const decompress = (input, output, offset, keepHeader, config) => {
63
64
  const config = {
64
65
  debug: args.debug,
65
66
  inputBufferSize: parseNumberString(args['input-buffer-size'], 0x10000),
66
- outputBufferSize: parseNumberString(args['output-buffer-size'], 0x40000)
67
+ outputBufferSize: parseNumberString(args['output-buffer-size'], 0x40000),
67
68
  }
68
69
 
69
70
  decompress(input, output, offset, keepHeader, config)
70
71
  .then(() => {
71
72
  process.exit(0)
72
73
  })
73
- .catch(e => {
74
+ .catch((e) => {
74
75
  console.error(`error: ${e.message}`)
75
76
  process.exit(1)
76
77
  })
package/bin/implode.js CHANGED
@@ -7,7 +7,7 @@ const {
7
7
  COMPRESSION_ASCII,
8
8
  DICTIONARY_SIZE_SMALL,
9
9
  DICTIONARY_SIZE_MEDIUM,
10
- DICTIONARY_SIZE_LARGE
10
+ DICTIONARY_SIZE_LARGE,
11
11
  } = require('../src/constants.js')
12
12
  const { getPackageVersion, parseNumberString, fileExists } = require('../src/helpers/functions.js')
13
13
  const { implode } = require('../src/implode.js')
@@ -33,13 +33,14 @@ const args = minimist(process.argv.slice(2), {
33
33
  s: 'small',
34
34
  m: 'medium',
35
35
  l: 'large',
36
- v: 'version'
37
- }
36
+ v: 'version',
37
+ },
38
38
  })
39
39
 
40
40
  ;(async () => {
41
41
  if (args.version) {
42
- console.log(await getPackageVersion())
42
+ const version = await getPackageVersion()
43
+ console.log(`node-pkware - version ${version}`)
43
44
  process.exit(0)
44
45
  }
45
46
 
@@ -67,7 +68,9 @@ const args = minimist(process.argv.slice(2), {
67
68
  hasErrors = true
68
69
  }
69
70
 
70
- const sizes = [args.small, args.medium, args.large].filter(x => x === true)
71
+ const sizes = [args.small, args.medium, args.large].filter((x) => {
72
+ return x === true
73
+ })
71
74
  if (sizes.length > 1) {
72
75
  console.error('error: multiple size types specified, can only work with one of --small, --medium and --large')
73
76
  hasErrors = true
@@ -99,14 +102,14 @@ const args = minimist(process.argv.slice(2), {
99
102
  const config = {
100
103
  debug: args.debug,
101
104
  inputBufferSize: parseNumberString(args['input-buffer-size'], 0x10000),
102
- outputBufferSize: parseNumberString(args['output-buffer-size'], 0x12000)
105
+ outputBufferSize: parseNumberString(args['output-buffer-size'], 0x12000),
103
106
  }
104
107
 
105
108
  decompress(input, output, offset, keepHeader, compressionType, dictionarySize, config)
106
109
  .then(() => {
107
110
  process.exit(0)
108
111
  })
109
- .catch(e => {
112
+ .catch((e) => {
110
113
  console.error(`error: ${e.message}`)
111
114
  process.exit(1)
112
115
  })
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "node-pkware",
3
- "version": "1.0.1",
3
+ "version": "1.0.2",
4
4
  "description": "The nodejs implementation of StormLib's pkware compressor/de-compressor",
5
5
  "main": "src/index.js",
6
6
  "types": "types/index.d.ts",
@@ -13,7 +13,7 @@
13
13
  "implode": "bin/implode.js"
14
14
  },
15
15
  "scripts": {
16
- "lint": "eslint \"src/**/*.js\"",
16
+ "lint": "eslint \"{bin,src,test}/**/*.{js,ts}\"",
17
17
  "lint:fix": "npm run lint -- --fix",
18
18
  "lint:staged": "lint-staged",
19
19
  "test:unit": "set FORCE_COLOR=true && mocha test/**/*.spec.js --timeout 5000",
@@ -33,28 +33,30 @@
33
33
  "license": "GPL-3.0-or-later",
34
34
  "dependencies": {
35
35
  "minimist-lite": "^2.2.1",
36
- "ramda": "^0.28.0",
37
- "ramda-adjunct": "^3.1.0"
36
+ "ramda": "^0.28.0"
38
37
  },
39
38
  "devDependencies": {
39
+ "@types/node": "^18.7.14",
40
40
  "arx-header-size": "^0.7.0",
41
41
  "binary-comparator": "^0.5.0",
42
- "eslint": "^8.15.0",
42
+ "esbuild": "^0.15.6",
43
+ "eslint": "^8.23.0",
43
44
  "eslint-config-prettier": "^8.5.0",
44
45
  "eslint-config-prettier-standard": "^4.0.1",
45
46
  "eslint-config-standard": "^17.0.0",
46
47
  "eslint-plugin-import": "^2.26.0",
47
48
  "eslint-plugin-node": "^11.1.0",
48
- "eslint-plugin-prettier": "^4.0.0",
49
- "eslint-plugin-promise": "^6.0.0",
49
+ "eslint-plugin-prettier": "^4.2.1",
50
+ "eslint-plugin-promise": "^6.0.1",
50
51
  "eslint-plugin-ramda": "^2.5.1",
51
52
  "eslint-plugin-standard": "^4.1.0",
52
- "lint-staged": "^12.4.1",
53
+ "lint-staged": "^13.0.3",
53
54
  "mocha": "^10.0.0",
54
- "nodemon": "^2.0.16",
55
+ "nodemon": "^2.0.19",
55
56
  "pre-commit": "^1.2.2",
56
- "prettier": "^2.6.2",
57
- "prettier-config-standard": "^5.0.0"
57
+ "prettier": "^2.7.1",
58
+ "prettier-config-standard": "^5.0.0",
59
+ "typescript": "^4.8.2"
58
60
  },
59
61
  "pre-commit": [
60
62
  "lint:staged",
package/src/constants.js CHANGED
@@ -135,5 +135,5 @@ module.exports = {
135
135
  ExLenBits,
136
136
  LenBase,
137
137
  ChBitsAsc,
138
- ChCodeAsc
138
+ ChCodeAsc,
139
139
  }
package/src/errors.js CHANGED
@@ -46,5 +46,5 @@ module.exports = {
46
46
  InvalidDataError,
47
47
  AbortedError,
48
48
  ExpectedBufferError,
49
- ExpectedFunctionError
49
+ ExpectedFunctionError,
50
50
  }
package/src/explode.js CHANGED
@@ -1,14 +1,13 @@
1
- const { repeat, unfold, reduce, has, includes } = require('ramda')
2
- const { isFunction } = require('ramda-adjunct')
1
+ const { repeat, unfold, has } = require('ramda')
3
2
  const {
4
3
  InvalidDataError,
5
4
  InvalidCompressionTypeError,
6
5
  InvalidDictionarySizeError,
7
6
  ExpectedBufferError,
8
7
  ExpectedFunctionError,
9
- AbortedError
8
+ AbortedError,
10
9
  } = require('./errors.js')
11
- const { mergeSparseArrays, getLowestNBits, nBitsOfOnes, toHex } = require('./helpers/functions.js')
10
+ const { mergeSparseArrays, getLowestNBits, nBitsOfOnes, toHex, isFunction } = require('./helpers/functions.js')
12
11
  const {
13
12
  ChBitsAsc,
14
13
  ChCodeAsc,
@@ -26,11 +25,11 @@ const {
26
25
  ExLenBits,
27
26
  DistBits,
28
27
  LenCode,
29
- DistCode
28
+ DistCode,
30
29
  } = require('./constants.js')
31
30
  const ExpandingBuffer = require('./helpers/ExpandingBuffer.js')
32
31
 
33
- const readHeader = buffer => {
32
+ const readHeader = (buffer) => {
34
33
  if (!Buffer.isBuffer(buffer)) {
35
34
  throw new ExpectedBufferError()
36
35
  }
@@ -40,22 +39,24 @@ const readHeader = buffer => {
40
39
 
41
40
  const compressionType = buffer.readUInt8(0)
42
41
  const dictionarySizeBits = buffer.readUInt8(1)
43
- if (compressionType !== COMPRESSION_BINARY && compressionType !== COMPRESSION_ASCII) {
42
+ if (![COMPRESSION_BINARY, COMPRESSION_ASCII].includes(compressionType)) {
44
43
  throw new InvalidCompressionTypeError()
45
44
  }
46
- if (!includes(dictionarySizeBits, [DICTIONARY_SIZE_SMALL, DICTIONARY_SIZE_MEDIUM, DICTIONARY_SIZE_LARGE])) {
45
+ if (![DICTIONARY_SIZE_SMALL, DICTIONARY_SIZE_MEDIUM, DICTIONARY_SIZE_LARGE].includes(dictionarySizeBits)) {
47
46
  throw new InvalidDictionarySizeError()
48
47
  }
49
48
 
50
49
  return {
51
50
  compressionType,
52
- dictionarySizeBits
51
+ dictionarySizeBits,
53
52
  }
54
53
  }
55
54
 
56
55
  // PAT = populate ascii table
57
- const createPATIterator = (limit, stepper) => n => {
58
- return n >= limit ? false : [n, n + (1 << stepper)]
56
+ const createPATIterator = (limit, stepper) => {
57
+ return (n) => {
58
+ return n >= limit ? false : [n, n + (1 << stepper)]
59
+ }
59
60
  }
60
61
 
61
62
  const populateAsciiTable = (value, index, bits, limit) => {
@@ -63,14 +64,10 @@ const populateAsciiTable = (value, index, bits, limit) => {
63
64
  const seed = ChCodeAsc[index] >> bits
64
65
  const idxs = unfold(iterator, seed)
65
66
 
66
- return reduce(
67
- (acc, idx) => {
68
- acc[idx] = index
69
- return acc
70
- },
71
- [],
72
- idxs
73
- )
67
+ return idxs.reduce((acc, idx) => {
68
+ acc[idx] = index
69
+ return acc
70
+ }, [])
74
71
  }
75
72
 
76
73
  const generateAsciiTables = () => {
@@ -78,7 +75,7 @@ const generateAsciiTables = () => {
78
75
  asciiTable2C34: repeat(0, 0x100),
79
76
  asciiTable2D34: repeat(0, 0x100),
80
77
  asciiTable2E34: repeat(0, 0x80),
81
- asciiTable2EB4: repeat(0, 0x100)
78
+ asciiTable2EB4: repeat(0, 0x100),
82
79
  }
83
80
 
84
81
  tables.chBitsAsc = ChBitsAsc.map((value, index) => {
@@ -132,7 +129,7 @@ const parseInitialData = (state, debug = false) => {
132
129
  console.log(
133
130
  `explode: compression level: ${
134
131
  state.dictionarySizeBits === 4 ? 'small' : state.dictionarySizeBits === 5 ? 'medium' : 'large'
135
- }`
132
+ }`,
136
133
  )
137
134
  }
138
135
 
@@ -158,7 +155,7 @@ const wasteBits = (state, numberOfBits) => {
158
155
  return PKDCL_OK
159
156
  }
160
157
 
161
- const decodeNextLiteral = state => {
158
+ const decodeNextLiteral = (state) => {
162
159
  const lastBit = state.bitBuffer & 1
163
160
 
164
161
  if (wasteBits(state, 1) === PKDCL_STREAM_END) {
@@ -354,7 +351,7 @@ const explode = (config = {}) => {
354
351
  handler._state = {
355
352
  _backup: {
356
353
  extraBits: null,
357
- bitBuffer: null
354
+ bitBuffer: null,
358
355
  },
359
356
  needMoreInput: true,
360
357
  isFirstChunk: true,
@@ -364,7 +361,7 @@ const explode = (config = {}) => {
364
361
  distPosCodes: generateDecodeTables(DistCode, DistBits),
365
362
  inputBuffer: new ExpandingBuffer(inputBufferSize),
366
363
  outputBuffer: new ExpandingBuffer(outputBufferSize),
367
- onInputFinished: callback => {
364
+ onInputFinished: (callback) => {
368
365
  const state = handler._state
369
366
 
370
367
  if (debug) {
@@ -393,8 +390,8 @@ const explode = (config = {}) => {
393
390
  state.inputBuffer._restoreIndices()
394
391
  },
395
392
  stats: {
396
- chunkCounter: 0
397
- }
393
+ chunkCounter: 0,
394
+ },
398
395
  }
399
396
 
400
397
  return handler
@@ -410,5 +407,5 @@ module.exports = {
410
407
  wasteBits,
411
408
  decodeNextLiteral,
412
409
  decodeDistance,
413
- generateDecodeTables
410
+ generateDecodeTables,
414
411
  }
@@ -9,7 +9,7 @@ class ExpandingBuffer {
9
9
 
10
10
  this._backup = {
11
11
  _startIndex: 0,
12
- _endIndex: 0
12
+ _endIndex: 0,
13
13
  }
14
14
  }
15
15
 
@@ -1,6 +1,24 @@
1
1
  const fs = require('fs')
2
- const { repeat, test } = require('ramda')
3
- const { isNumber, isString } = require('ramda-adjunct')
2
+ const { repeat, test, type } = require('ramda')
3
+
4
+ const isNumber = (x) => {
5
+ return typeof x === 'number'
6
+ }
7
+
8
+ const isString = (x) => {
9
+ return typeof x === 'string'
10
+ }
11
+
12
+ const isFunction = (x) => {
13
+ return type(x) === 'Function'
14
+ }
15
+
16
+ const noop = () => {}
17
+
18
+ // https://stackoverflow.com/a/68989785/1806628
19
+ const isPlainObject = (x) => {
20
+ return x.constructor === Object
21
+ }
4
22
 
5
23
  const isBetween = (min, max, num) => {
6
24
  if (!isNumber(min) || !isNumber(max) || !isNumber(num)) {
@@ -13,7 +31,7 @@ const isBetween = (min, max, num) => {
13
31
  return num >= min && num <= max
14
32
  }
15
33
 
16
- const nBitsOfOnes = numberOfBits => {
34
+ const nBitsOfOnes = (numberOfBits) => {
17
35
  if (!Number.isInteger(numberOfBits) || numberOfBits < 0) {
18
36
  return null
19
37
  }
@@ -37,7 +55,7 @@ const getLowestNBits = (numberOfBits, number) => {
37
55
 
38
56
  const isDecimalString = test(/^\d+$/)
39
57
 
40
- const isFullHexString = str => {
58
+ const isFullHexString = (str) => {
41
59
  if (isString(str)) {
42
60
  return /^\s*0x[0-9a-f]+\s*$/.test(str)
43
61
  } else {
@@ -77,7 +95,9 @@ const mergeSparseArrays = (a, b) => {
77
95
  /*
78
96
  export const dumpBytes = bytes => {
79
97
  const formattedBytes = Array.from(bytes)
80
- .map(byte => toHex(byte, 2, true))
98
+ .map(byte => {
99
+ return toHex(byte, 2, true)
100
+ })
81
101
  .join(' ')
82
102
  return `<${formattedBytes}>`
83
103
  }
@@ -102,7 +122,7 @@ const getPackageVersion = async () => {
102
122
  }
103
123
  }
104
124
 
105
- const fileExists = async filename => {
125
+ const fileExists = async (filename) => {
106
126
  try {
107
127
  await fs.promises.access(filename, fs.constants.R_OK)
108
128
  return true
@@ -112,6 +132,11 @@ const fileExists = async filename => {
112
132
  }
113
133
 
114
134
  module.exports = {
135
+ isNumber,
136
+ isString,
137
+ isFunction,
138
+ noop,
139
+ isPlainObject,
115
140
  isBetween,
116
141
  nBitsOfOnes,
117
142
  maskBits,
@@ -121,5 +146,5 @@ module.exports = {
121
146
  mergeSparseArrays,
122
147
  parseNumberString,
123
148
  getPackageVersion,
124
- fileExists
149
+ fileExists,
125
150
  }
@@ -1,7 +1,7 @@
1
1
  const { Transform, Writable } = require('stream')
2
2
  const { promisify } = require('util')
3
- const { isFunction } = require('ramda-adjunct')
4
3
  const ExpandingBuffer = require('./ExpandingBuffer.js')
4
+ const { isFunction } = require('./functions.js')
5
5
 
6
6
  const emptyBuffer = Buffer.from([])
7
7
 
@@ -15,7 +15,7 @@ class QuasiTransform {
15
15
  }
16
16
  }
17
17
 
18
- const splitAt = index => {
18
+ const splitAt = (index) => {
19
19
  let cntr = 0
20
20
 
21
21
  if (!Number.isInteger(index) || index < 0) {
@@ -24,7 +24,7 @@ const splitAt = index => {
24
24
  }
25
25
  }
26
26
 
27
- return chunk => {
27
+ return (chunk) => {
28
28
  let left
29
29
  let right
30
30
  let isLeftDone = true
@@ -66,9 +66,9 @@ const transformEmpty = () => {
66
66
  }
67
67
  }
68
68
 
69
- const through = handler => {
69
+ const through = (handler) => {
70
70
  return new Transform({
71
- transform: handler
71
+ transform: handler,
72
72
  })
73
73
  }
74
74
 
@@ -89,7 +89,7 @@ const transformSplitBy = (predicate, leftHandler, rightHandler) => {
89
89
 
90
90
  if (isFirstChunk) {
91
91
  isFirstChunk = false
92
- this._flush = flushCallback => {
92
+ this._flush = (flushCallback) => {
93
93
  if (!dam.isEmpty()) {
94
94
  this.push(dam.read())
95
95
  }
@@ -122,10 +122,10 @@ const transformSplitBy = (predicate, leftHandler, rightHandler) => {
122
122
  }
123
123
 
124
124
  Promise.all([leftFiller, rightFiller])
125
- .then(buffers => {
125
+ .then((buffers) => {
126
126
  flushCallback(null, Buffer.concat(buffers))
127
127
  })
128
- .catch(err => {
128
+ .catch((err) => {
129
129
  flushCallback(err)
130
130
  })
131
131
  }
@@ -146,7 +146,7 @@ const transformSplitBy = (predicate, leftHandler, rightHandler) => {
146
146
  }
147
147
 
148
148
  Promise.all([_left, filler, _right])
149
- .then(buffers => {
149
+ .then((buffers) => {
150
150
  dam.append(Buffer.concat(buffers))
151
151
  if (dam.size() > damChunkSize) {
152
152
  const chunks = Math.floor(dam.size() / damChunkSize)
@@ -160,13 +160,13 @@ const transformSplitBy = (predicate, leftHandler, rightHandler) => {
160
160
  callback(null, emptyBuffer)
161
161
  }
162
162
  })
163
- .catch(err => {
163
+ .catch((err) => {
164
164
  callback(err)
165
165
  })
166
166
  }
167
167
  }
168
168
 
169
- const streamToBuffer = done => {
169
+ const streamToBuffer = (done) => {
170
170
  const buffer = new ExpandingBuffer()
171
171
  return new Writable({
172
172
  write(chunk, encoding, callback) {
@@ -176,7 +176,7 @@ const streamToBuffer = done => {
176
176
  final(callback) {
177
177
  done(buffer.getHeap())
178
178
  callback()
179
- }
179
+ },
180
180
  })
181
181
  }
182
182
 
@@ -186,5 +186,5 @@ module.exports = {
186
186
  transformEmpty,
187
187
  through,
188
188
  transformSplitBy,
189
- streamToBuffer
189
+ streamToBuffer,
190
190
  }
@@ -8,10 +8,12 @@ const isPromise = promise => {
8
8
  return typeof promise === 'object' && promise.constructor.name === 'Promise'
9
9
  }
10
10
 
11
- const toConsole = () => (chunk, encoding, callback) => {
12
- process.stdout.write(chunk)
13
- process.stdout.write(Buffer.from(EOL))
14
- callback(null, chunk)
11
+ const toConsole = () => {
12
+ return (chunk, encoding, callback) => {
13
+ process.stdout.write(chunk)
14
+ process.stdout.write(Buffer.from(EOL))
15
+ callback(null, chunk)
16
+ }
15
17
  }
16
18
 
17
19
  const readToBuffer = (fileName, chunkSizeInBytes = 1024) => {
@@ -30,7 +32,7 @@ const readToBuffer = (fileName, chunkSizeInBytes = 1024) => {
30
32
  */
31
33
 
32
34
  // source: https://stackoverflow.com/a/43197340/1806628
33
- const isClass = obj => {
35
+ const isClass = (obj) => {
34
36
  const isCtorClass = obj.constructor && obj.constructor.toString().substring(0, 5) === 'class'
35
37
  if (obj.prototype === undefined) {
36
38
  return isCtorClass
@@ -74,5 +76,5 @@ module.exports = {
74
76
  isClass,
75
77
  buffersShouldEqual,
76
78
  bufferToString,
77
- transformToABC
79
+ transformToABC,
78
80
  }
package/src/implode.js CHANGED
@@ -1,7 +1,6 @@
1
1
  const { has, repeat, clone, last, clamp } = require('ramda')
2
- const { isFunction } = require('ramda-adjunct')
3
2
  const ExpandingBuffer = require('./helpers/ExpandingBuffer.js')
4
- const { toHex, getLowestNBits, nBitsOfOnes } = require('./helpers/functions.js')
3
+ const { toHex, getLowestNBits, nBitsOfOnes, isFunction } = require('./helpers/functions.js')
5
4
  const { ExpectedFunctionError, InvalidDictionarySizeError, InvalidCompressionTypeError } = require('./errors.js')
6
5
  const {
7
6
  ChBitsAsc,
@@ -16,10 +15,10 @@ const {
16
15
  LenBits,
17
16
  LenCode,
18
17
  DistCode,
19
- DistBits
18
+ DistBits,
20
19
  } = require('./constants.js')
21
20
 
22
- const setup = state => {
21
+ const setup = (state) => {
23
22
  state.nChBits = repeat(0, 0x306)
24
23
  state.nChCodes = repeat(0, 0x306)
25
24
 
@@ -128,7 +127,7 @@ const findRepetitions = (inputBytes, endOfLastMatch, cursor) => {
128
127
  const distance = cursor - endOfLastMatch - matchIndex
129
128
  return {
130
129
  distance: distance - 1,
131
- size: distance > 2 ? getSizeOfMatching(inputBytes, endOfLastMatch + matchIndex, cursor) : 2
130
+ size: distance > 2 ? getSizeOfMatching(inputBytes, endOfLastMatch + matchIndex, cursor) : 2,
132
131
  }
133
132
  }
134
133
 
@@ -162,7 +161,7 @@ const isRepetitionFlushable = (size, distance, startIndex, inputBufferSize) => {
162
161
 
163
162
  // repetitions are at least 2 bytes long,
164
163
  // so the initial 2 bytes can be moved to the output as is
165
- const handleFirstTwoBytes = state => {
164
+ const handleFirstTwoBytes = (state) => {
166
165
  if (state.handledFirstTwoBytes) {
167
166
  return
168
167
  }
@@ -319,15 +318,15 @@ const implode = (compressionType, dictionarySizeBits, config = {}) => {
319
318
  handler._state = {
320
319
  isFirstChunk: true,
321
320
  streamEnded: false,
322
- compressionType: compressionType,
323
- dictionarySizeBits: dictionarySizeBits,
321
+ compressionType,
322
+ dictionarySizeBits,
324
323
  distCodes: clone(DistCode),
325
324
  distBits: clone(DistBits),
326
325
  startIndex: 0,
327
326
  inputBuffer: new ExpandingBuffer(inputBufferSize),
328
327
  outputBuffer: new ExpandingBuffer(outputBufferSize),
329
328
  handledFirstTwoBytes: false,
330
- onInputFinished: callback => {
329
+ onInputFinished: (callback) => {
331
330
  const state = handler._state
332
331
  state.streamEnded = true
333
332
  try {
@@ -346,8 +345,8 @@ const implode = (compressionType, dictionarySizeBits, config = {}) => {
346
345
  }
347
346
  },
348
347
  stats: {
349
- chunkCounter: 0
350
- }
348
+ chunkCounter: 0,
349
+ },
351
350
  }
352
351
 
353
352
  return handler
@@ -361,5 +360,5 @@ module.exports = {
361
360
  isRepetitionFlushable,
362
361
  handleFirstTwoBytes,
363
362
  processChunkData,
364
- implode
363
+ implode,
365
364
  }
package/src/index.js CHANGED
@@ -14,5 +14,5 @@ module.exports = {
14
14
  decompress,
15
15
  constants,
16
16
  errors,
17
- stream
17
+ stream,
18
18
  }
package/tsconfig.json ADDED
@@ -0,0 +1,20 @@
1
+ {
2
+ "compilerOptions": {
3
+ "noEmit": true,
4
+ "skipLibCheck": true,
5
+ "target": "esnext",
6
+ "moduleResolution": "node",
7
+ "allowJs": true,
8
+ "noImplicitAny": false,
9
+ "strict": true,
10
+ "forceConsistentCasingInFileNames": true,
11
+ "esModuleInterop": true,
12
+ "module": "commonjs",
13
+ "resolveJsonModule": true,
14
+ "isolatedModules": true,
15
+ "incremental": true,
16
+ "allowSyntheticDefaultImports": true
17
+ },
18
+ "include": ["src/**/*"],
19
+ "exclude": ["node_modules"]
20
+ }
@@ -1,3 +1,8 @@
1
+ export function isNumber(x: any): boolean
2
+ export function isString(x: any): boolean
3
+ export function isFunction(x: any): boolean
4
+ export function noop(): void
5
+ export function isPlainObject(x: any): boolean
1
6
  export function isBetween(min: number, max: number, num: number): boolean
2
7
  export function nBitsOfOnes(numberOfBits: number): number
3
8
  export function maskBits(numberOfBits: number, number: number): number