node-pkware 2.0.0 → 3.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +42 -40
- package/dist/ExpandingBuffer.d.ts +53 -0
- package/dist/ExpandingBuffer.js +134 -0
- package/dist/ExpandingBuffer.js.map +1 -0
- package/dist/Explode.d.ts +8 -0
- package/dist/Explode.js +309 -0
- package/dist/Explode.js.map +1 -0
- package/dist/Implode.d.ts +11 -0
- package/dist/Implode.js +305 -0
- package/dist/Implode.js.map +1 -0
- package/dist/bin/explode.d.ts +2 -0
- package/dist/bin/explode.js +59 -0
- package/dist/bin/explode.js.map +1 -0
- package/dist/bin/helpers.d.ts +8 -0
- package/dist/bin/helpers.js +65 -0
- package/dist/bin/helpers.js.map +1 -0
- package/dist/bin/implode.d.ts +2 -0
- package/dist/bin/implode.js +79 -0
- package/dist/bin/implode.js.map +1 -0
- package/dist/constants.d.ts +32 -0
- package/dist/constants.js +114 -0
- package/dist/constants.js.map +1 -0
- package/{types → dist}/errors.d.ts +13 -11
- package/dist/errors.js +52 -0
- package/dist/errors.js.map +1 -0
- package/dist/functions.d.ts +11 -0
- package/dist/functions.js +73 -0
- package/dist/functions.js.map +1 -0
- package/dist/index.d.ts +20 -0
- package/dist/index.js +54 -0
- package/dist/index.js.map +1 -0
- package/{types/helpers → dist}/stream.d.ts +13 -34
- package/dist/stream.js +205 -0
- package/dist/stream.js.map +1 -0
- package/dist/tsconfig.tsbuildinfo +1 -0
- package/dist/types.d.ts +25 -0
- package/dist/types.js +3 -0
- package/dist/types.js.map +1 -0
- package/package.json +14 -45
- package/bin/explode.js +0 -78
- package/bin/implode.js +0 -116
- package/src/constants.js +0 -139
- package/src/errors.js +0 -50
- package/src/explode.js +0 -411
- package/src/helpers/ExpandingBuffer.js +0 -123
- package/src/helpers/functions.js +0 -150
- package/src/helpers/stream.js +0 -190
- package/src/helpers/testing.js +0 -80
- package/src/implode.js +0 -364
- package/src/index.js +0 -18
- package/tsconfig.json +0 -20
- package/types/constants.d.ts +0 -41
- package/types/explode.d.ts +0 -56
- package/types/helpers/ExpandingBuffer.d.ts +0 -25
- package/types/helpers/Shared.d.ts +0 -46
- package/types/helpers/functions.d.ts +0 -15
- package/types/helpers/testing.d.ts +0 -6
- package/types/implode.d.ts +0 -63
- package/types/index.d.ts +0 -8
package/README.md
CHANGED
|
@@ -1,30 +1,36 @@
|
|
|
1
1
|
# node-pkware
|
|
2
2
|
|
|
3
|
-
Node JS implementation of StormLib's Pkware compression/decompression algorithm
|
|
3
|
+
Node JS implementation of StormLib's Pkware compression/decompression algorithm, which is not the same as the zip pkware
|
|
4
|
+
format that is commonly used today
|
|
4
5
|
|
|
5
|
-
It was the de-facto compression for games from around Y2K, like Arx Fatalis
|
|
6
|
+
It was the de-facto compression for games from around Y2K, like [Arx Fatalis](https://en.wikipedia.org/wiki/Arx_Fatalis)
|
|
6
7
|
|
|
7
8
|
## installation / update existing version
|
|
8
9
|
|
|
9
10
|
`npm i -g node-pkware`
|
|
10
11
|
|
|
11
|
-
minimum required node version:
|
|
12
|
+
minimum required node version: 18.0.0
|
|
12
13
|
|
|
13
|
-
|
|
14
|
+
## command line interface (CLI)
|
|
14
15
|
|
|
15
|
-
|
|
16
|
+
`explode [<filename>] [--offset=<offset>] [--drop-before-offset] [--output=<filename> [--verbose]]` - decompresses a file or a stream
|
|
16
17
|
|
|
17
|
-
|
|
18
|
+
`implode [<filename>] <compression type> <dictionary size> [--offset=<offset>] [--drop-before-offset] [--output=<filename> [--verbose]]` - compresses a file or a stream
|
|
18
19
|
|
|
19
|
-
|
|
20
|
+
`<filename>`, `--output` or both can be omitted when the input is being piped from stdin or when the output is being piped into stdout
|
|
20
21
|
|
|
21
|
-
|
|
22
|
+
The `--offset` can have a numeric value in either decimal or hexadecimal format which tells explode or implode to start decompression at a later point.
|
|
23
|
+
This is useful for partially compressed files where the initial header part is uncompressed while the remaining part is compressed.
|
|
22
24
|
|
|
23
25
|
The `--drop-before-offset` flag tells node-pkware to drop the portion before `--offset`, otherwise it will keep it untouched and attach it to the output file.
|
|
24
26
|
|
|
25
|
-
|
|
27
|
+
The `--verbose` flag will display additional information while running the commands
|
|
26
28
|
|
|
27
|
-
|
|
29
|
+
For implode `<compression type>` can either be `--ascii` or `--binary`
|
|
30
|
+
|
|
31
|
+
For implode `<dictionary size>` can either be `--small`, `--medium` or `--large`
|
|
32
|
+
|
|
33
|
+
Calling either explode or implode with only the `-v` or `--version` flag will display the package's version
|
|
28
34
|
|
|
29
35
|
## examples
|
|
30
36
|
|
|
@@ -34,14 +40,8 @@ Calling either explode or implode with the `-v` or `--version` flag will display
|
|
|
34
40
|
|
|
35
41
|
`implode test/files/fast.fts.unpacked --output=C:/fast.fts --binary --large --offset=1816`
|
|
36
42
|
|
|
37
|
-
`explode test/files/fast.fts --auto-detect --verbose --output=E:/fast.fts.unpacked`
|
|
38
|
-
|
|
39
|
-
`explode test/files/fast.fts --auto-detect --verbose --output=E:/fast.fts.unpacked --offset=2000`
|
|
40
|
-
|
|
41
43
|
### piping also works
|
|
42
44
|
|
|
43
|
-
**Don't use --verbose when piping, because verbose messages will be outputted to where the decompressed data is being outputted!**
|
|
44
|
-
|
|
45
45
|
`cat c:/arx/level8.llf | explode > c:/arx/level8.llf.unpacked`
|
|
46
46
|
|
|
47
47
|
`explode c:/arx/level8.llf > c:/arx/level8.llf.unpacked`
|
|
@@ -60,35 +60,37 @@ Calling either explode or implode with the `-v` or `--version` flag will display
|
|
|
60
60
|
|
|
61
61
|
`explode(config: object): transform._transform` - decompresses stream
|
|
62
62
|
|
|
63
|
-
|
|
63
|
+
`decompress(config: object): transform._transform` - alias for explode
|
|
64
|
+
|
|
65
|
+
Returns a function, that you can use as a [transform.\_transform](https://nodejs.org/api/stream.html#stream_transform_transform_chunk_encoding_callback) method.
|
|
66
|
+
The returned function has the `(chunk: Buffer, encoding: string, callback: function)` parameter signature.
|
|
64
67
|
|
|
65
68
|
Takes an optional config object, which has the following properties:
|
|
66
69
|
|
|
67
70
|
```js
|
|
68
71
|
{
|
|
69
72
|
verbose: boolean, // whether the code should display extra debug messages on the console or not (default = false)
|
|
70
|
-
inputBufferSize: int, // the starting size of the input buffer, may expand later as needed. Not having to expand may have performance impact (default 0)
|
|
73
|
+
inputBufferSize: int, // the starting size of the input buffer, may expand later as needed. Not having to expand may have positive performance impact (default 0)
|
|
71
74
|
outputBufferSize: int // same as inputBufferSize, but for the outputBuffer (default 0)
|
|
72
75
|
}
|
|
73
76
|
```
|
|
74
77
|
|
|
75
|
-
`decompress(config: object): transform._transform` - alias for explode
|
|
76
|
-
|
|
77
78
|
`implode(compressionType: int, dictionarySize: int, config: object): transform._transform` - compresses stream
|
|
78
79
|
|
|
80
|
+
`compress(compressionType: int, dictionarySize: int, config: object): transform._transform` - alias for implode
|
|
81
|
+
|
|
79
82
|
Takes an optional config object, which has the following properties:
|
|
80
83
|
|
|
81
84
|
```js
|
|
82
85
|
{
|
|
83
86
|
verbose: boolean, // whether the code should display extra debug messages on the console or not (default = false)
|
|
84
|
-
inputBufferSize: int, // the starting size of the input buffer, may expand later as needed. Not having to expand may have performance impact (default 0)
|
|
87
|
+
inputBufferSize: int, // the starting size of the input buffer, may expand later as needed. Not having to expand may have positive performance impact (default 0)
|
|
85
88
|
outputBufferSize: int // same as inputBufferSize, but for the outputBuffer (default 0)
|
|
86
89
|
}
|
|
87
90
|
```
|
|
88
91
|
|
|
89
|
-
Returns a function, that you can use as a [transform.\_transform](https://nodejs.org/api/stream.html#stream_transform_transform_chunk_encoding_callback) method.
|
|
90
|
-
|
|
91
|
-
`compress(compressionType: int, dictionarySize: int, config: object): transform._transform` - aliasa for implode
|
|
92
|
+
Returns a function, that you can use as a [transform.\_transform](https://nodejs.org/api/stream.html#stream_transform_transform_chunk_encoding_callback) method.
|
|
93
|
+
The returned function has the `(chunk: Buffer, encoding: string, callback: function)` parameter signature.
|
|
92
94
|
|
|
93
95
|
`stream` - an object of helper functions for channeling streams to and from explode/implode
|
|
94
96
|
|
|
@@ -102,7 +104,7 @@ Returns a function, that you can use as a [transform.\_transform](https://nodejs
|
|
|
102
104
|
|
|
103
105
|
```
|
|
104
106
|
1) [inputBuffer, emptyBuffer, false]
|
|
105
|
-
2) [inputBuffer.
|
|
107
|
+
2) [inputBuffer.subarray(0, 40), inputBuffer.subarray(40, 60), true]
|
|
106
108
|
3) [emptyBuffer, inputBuffer, true]
|
|
107
109
|
4) [emptyBuffer, inputBuffer, true]
|
|
108
110
|
... and so on
|
|
@@ -110,11 +112,11 @@ Returns a function, that you can use as a [transform.\_transform](https://nodejs
|
|
|
110
112
|
|
|
111
113
|
`stream.transformSplitBy(predicate: predicate, left: transform._transform, right: transform._transform): transform._transform` - higher order function for introducing conditional logic to transform.\_transform functions. This is used internally to handle offsets for explode()
|
|
112
114
|
|
|
113
|
-
`stream.
|
|
115
|
+
`stream.toBuffer(callback: function): writable._write` - data can be piped to the returned function from a stream and it will concatenate all chunks into a single buffer. Takes a callback function, which will receive the concatenated buffer as a parameter
|
|
114
116
|
|
|
115
|
-
`constants.
|
|
117
|
+
`constants.Compression.Binary` and `constants.Compression.Ascii` - compression types for implode
|
|
116
118
|
|
|
117
|
-
`constants.
|
|
119
|
+
`constants.DictionarySize.Small`, `constants.DictionarySize.Medium` and `constants.DictionarySize.Large` - dictionary sizes for implode, determines how well the file get compressed. Small dictionary size means less memory to lookback in data for repetitions, meaning it will be less effective, the file stays larger, less compressed. On the other hand, large compression allows more lookback allowing more effective compression, thus generating smaller, more compressed files. The original C library used less memory when the dictionary size was smaller, plus there might be files out there which only support smaller dictionary sizes
|
|
118
120
|
|
|
119
121
|
`errors.InvalidDictionarySizeError` - thrown by implode when invalid dictionary size was specified or by explode when it encounters invalid data in the header section (the first 2 bytes of a compressed files)
|
|
120
122
|
|
|
@@ -128,8 +130,8 @@ Returns a function, that you can use as a [transform.\_transform](https://nodejs
|
|
|
128
130
|
|
|
129
131
|
#### decompressing file with no offset into a file
|
|
130
132
|
|
|
131
|
-
```
|
|
132
|
-
const fs = require('fs')
|
|
133
|
+
```js
|
|
134
|
+
const fs = require('node:fs')
|
|
133
135
|
const { explode, stream } = require('node-pkware')
|
|
134
136
|
const { through } = stream
|
|
135
137
|
|
|
@@ -140,15 +142,15 @@ fs.createReadStream(`path-to-compressed-file`)
|
|
|
140
142
|
|
|
141
143
|
#### decompressing buffer with no offset into a buffer
|
|
142
144
|
|
|
143
|
-
```
|
|
144
|
-
const { Readable } = require('stream')
|
|
145
|
+
```js
|
|
146
|
+
const { Readable } = require('node:stream')
|
|
145
147
|
const { explode, stream } = require('node-pkware')
|
|
146
|
-
const { through,
|
|
148
|
+
const { through, toBuffer } = stream
|
|
147
149
|
|
|
148
150
|
Readable.from(buffer) // buffer is of type Buffer with compressed data
|
|
149
151
|
.pipe(through(explode()))
|
|
150
152
|
.pipe(
|
|
151
|
-
|
|
153
|
+
toBuffer((decompressedData) => {
|
|
152
154
|
// decompressedData holds the decompressed buffer
|
|
153
155
|
}),
|
|
154
156
|
)
|
|
@@ -156,8 +158,8 @@ Readable.from(buffer) // buffer is of type Buffer with compressed data
|
|
|
156
158
|
|
|
157
159
|
#### decompressing file with offset into a file, keeping initial part intact
|
|
158
160
|
|
|
159
|
-
```
|
|
160
|
-
const fs = require('fs')
|
|
161
|
+
```js
|
|
162
|
+
const fs = require('node:fs')
|
|
161
163
|
const { explode, stream } = require('node-pkware')
|
|
162
164
|
const { through, transformSplitBy, splitAt, transformIdentity } = stream
|
|
163
165
|
|
|
@@ -170,8 +172,8 @@ fs.createReadStream(`path-to-compressed-file`)
|
|
|
170
172
|
|
|
171
173
|
#### decompressing file with offset into a file, discarding initial part
|
|
172
174
|
|
|
173
|
-
```
|
|
174
|
-
const fs = require('fs')
|
|
175
|
+
```js
|
|
176
|
+
const fs = require('node:fs')
|
|
175
177
|
const { explode, stream } = require('node-pkware')
|
|
176
178
|
const { through, transformSplitBy, splitAt, transformEmpty } = stream
|
|
177
179
|
|
|
@@ -184,8 +186,8 @@ fs.createReadStream(`path-to-compressed-file`)
|
|
|
184
186
|
|
|
185
187
|
### Catching errors
|
|
186
188
|
|
|
187
|
-
```
|
|
188
|
-
const fs = require('fs')
|
|
189
|
+
```js
|
|
190
|
+
const fs = require('node:fs')
|
|
189
191
|
const { explode, stream } = require('node-pkware')
|
|
190
192
|
const { through } = stream
|
|
191
193
|
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
/// <reference types="node" />
|
|
2
|
+
import { Buffer } from 'node:buffer';
|
|
3
|
+
export declare class ExpandingBuffer {
|
|
4
|
+
#private;
|
|
5
|
+
constructor(numberOfBytes?: number);
|
|
6
|
+
size(): number;
|
|
7
|
+
isEmpty(): boolean;
|
|
8
|
+
heapSize(): number;
|
|
9
|
+
append(buffer: Buffer): void;
|
|
10
|
+
/**
|
|
11
|
+
* Watch out! The returned slice of Buffer points to the same Buffer in memory!
|
|
12
|
+
*/
|
|
13
|
+
read(offset?: number, limit?: number): Buffer;
|
|
14
|
+
readByte(offset?: number): number;
|
|
15
|
+
/**
|
|
16
|
+
* Does hard delete
|
|
17
|
+
*
|
|
18
|
+
* Removes data from the start of the internal buffer (heap)
|
|
19
|
+
* by copying bytes to lower indices making sure the
|
|
20
|
+
* startIndex goes back to 0 afterwards
|
|
21
|
+
*/
|
|
22
|
+
flushStart(numberOfBytes: number): void;
|
|
23
|
+
/**
|
|
24
|
+
* Does hard delete
|
|
25
|
+
*
|
|
26
|
+
* Removes data from the end of the internal buffer (heap)
|
|
27
|
+
* by moving the endIndex back
|
|
28
|
+
*/
|
|
29
|
+
flushEnd(numberOfBytes: number): void;
|
|
30
|
+
/**
|
|
31
|
+
* Does soft delete
|
|
32
|
+
*
|
|
33
|
+
* Removes data from the start of the internal buffer (heap)
|
|
34
|
+
* by moving the startIndex forward
|
|
35
|
+
* When the heap gets empty it also resets the indices as a cleanup
|
|
36
|
+
*/
|
|
37
|
+
dropStart(numberOfBytes: number): void;
|
|
38
|
+
/**
|
|
39
|
+
* Does soft delete
|
|
40
|
+
*
|
|
41
|
+
* removes data from the end of the internal buffer (heap)
|
|
42
|
+
* by moving the endIndex back
|
|
43
|
+
* When the heap gets empty it also resets the indices as a cleanup
|
|
44
|
+
*/
|
|
45
|
+
dropEnd(numberOfBytes: number): void;
|
|
46
|
+
/**
|
|
47
|
+
* returns the internal buffer
|
|
48
|
+
*/
|
|
49
|
+
getHeap(): Buffer;
|
|
50
|
+
clear(): void;
|
|
51
|
+
saveIndices(): void;
|
|
52
|
+
restoreIndices(): void;
|
|
53
|
+
}
|
|
@@ -0,0 +1,134 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.ExpandingBuffer = void 0;
|
|
4
|
+
const node_buffer_1 = require("node:buffer");
|
|
5
|
+
const functions_1 = require("./functions");
|
|
6
|
+
class ExpandingBuffer {
|
|
7
|
+
#heap;
|
|
8
|
+
#startIndex = 0;
|
|
9
|
+
#endIndex = 0;
|
|
10
|
+
#backup = {
|
|
11
|
+
startIndex: 0,
|
|
12
|
+
endIndex: 0,
|
|
13
|
+
};
|
|
14
|
+
constructor(numberOfBytes = 0) {
|
|
15
|
+
this.#heap = node_buffer_1.Buffer.allocUnsafe(numberOfBytes);
|
|
16
|
+
}
|
|
17
|
+
#getActualData(offset = 0) {
|
|
18
|
+
return this.#heap.subarray(this.#startIndex + offset, this.#endIndex);
|
|
19
|
+
}
|
|
20
|
+
size() {
|
|
21
|
+
return this.#endIndex - this.#startIndex;
|
|
22
|
+
}
|
|
23
|
+
isEmpty() {
|
|
24
|
+
return this.size() === 0;
|
|
25
|
+
}
|
|
26
|
+
heapSize() {
|
|
27
|
+
return this.#heap.length;
|
|
28
|
+
}
|
|
29
|
+
append(buffer) {
|
|
30
|
+
if (this.#endIndex + buffer.length < this.heapSize()) {
|
|
31
|
+
buffer.copy(this.#heap, this.#endIndex);
|
|
32
|
+
this.#endIndex += buffer.length;
|
|
33
|
+
}
|
|
34
|
+
else {
|
|
35
|
+
this.#heap = node_buffer_1.Buffer.concat([this.#getActualData(), buffer]);
|
|
36
|
+
this.#startIndex = 0;
|
|
37
|
+
this.#endIndex = this.heapSize();
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
/**
|
|
41
|
+
* Watch out! The returned slice of Buffer points to the same Buffer in memory!
|
|
42
|
+
*/
|
|
43
|
+
read(offset = 0, limit = this.size()) {
|
|
44
|
+
if (offset < 0 || limit < 1) {
|
|
45
|
+
return node_buffer_1.Buffer.from([]);
|
|
46
|
+
}
|
|
47
|
+
if (offset + limit < this.size()) {
|
|
48
|
+
return this.#heap.subarray(this.#startIndex + offset, this.#startIndex + limit + offset);
|
|
49
|
+
}
|
|
50
|
+
return this.#getActualData(offset);
|
|
51
|
+
}
|
|
52
|
+
readByte(offset = 0) {
|
|
53
|
+
return this.#heap[this.#startIndex + offset];
|
|
54
|
+
}
|
|
55
|
+
/**
|
|
56
|
+
* Does hard delete
|
|
57
|
+
*
|
|
58
|
+
* Removes data from the start of the internal buffer (heap)
|
|
59
|
+
* by copying bytes to lower indices making sure the
|
|
60
|
+
* startIndex goes back to 0 afterwards
|
|
61
|
+
*/
|
|
62
|
+
flushStart(numberOfBytes) {
|
|
63
|
+
numberOfBytes = (0, functions_1.clamp)(0, this.heapSize(), numberOfBytes);
|
|
64
|
+
if (numberOfBytes > 0) {
|
|
65
|
+
if (numberOfBytes < this.heapSize()) {
|
|
66
|
+
this.#heap.copy(this.#heap, 0, this.#startIndex + numberOfBytes);
|
|
67
|
+
}
|
|
68
|
+
this.#endIndex -= this.#startIndex + numberOfBytes;
|
|
69
|
+
this.#startIndex = 0;
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
/**
|
|
73
|
+
* Does hard delete
|
|
74
|
+
*
|
|
75
|
+
* Removes data from the end of the internal buffer (heap)
|
|
76
|
+
* by moving the endIndex back
|
|
77
|
+
*/
|
|
78
|
+
flushEnd(numberOfBytes) {
|
|
79
|
+
const clampedNumberOfBytes = (0, functions_1.clamp)(0, this.heapSize(), numberOfBytes);
|
|
80
|
+
if (clampedNumberOfBytes > 0) {
|
|
81
|
+
this.#endIndex -= clampedNumberOfBytes;
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
/**
|
|
85
|
+
* Does soft delete
|
|
86
|
+
*
|
|
87
|
+
* Removes data from the start of the internal buffer (heap)
|
|
88
|
+
* by moving the startIndex forward
|
|
89
|
+
* When the heap gets empty it also resets the indices as a cleanup
|
|
90
|
+
*/
|
|
91
|
+
dropStart(numberOfBytes) {
|
|
92
|
+
if (numberOfBytes > 0) {
|
|
93
|
+
this.#startIndex += numberOfBytes;
|
|
94
|
+
if (this.#startIndex >= this.#endIndex) {
|
|
95
|
+
this.clear();
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
/**
|
|
100
|
+
* Does soft delete
|
|
101
|
+
*
|
|
102
|
+
* removes data from the end of the internal buffer (heap)
|
|
103
|
+
* by moving the endIndex back
|
|
104
|
+
* When the heap gets empty it also resets the indices as a cleanup
|
|
105
|
+
*/
|
|
106
|
+
dropEnd(numberOfBytes) {
|
|
107
|
+
if (numberOfBytes > 0) {
|
|
108
|
+
this.#endIndex -= numberOfBytes;
|
|
109
|
+
if (this.#startIndex >= this.#endIndex) {
|
|
110
|
+
this.clear();
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
/**
|
|
115
|
+
* returns the internal buffer
|
|
116
|
+
*/
|
|
117
|
+
getHeap() {
|
|
118
|
+
return this.#heap;
|
|
119
|
+
}
|
|
120
|
+
clear() {
|
|
121
|
+
this.#startIndex = 0;
|
|
122
|
+
this.#endIndex = 0;
|
|
123
|
+
}
|
|
124
|
+
saveIndices() {
|
|
125
|
+
this.#backup.startIndex = this.#startIndex;
|
|
126
|
+
this.#backup.endIndex = this.#endIndex;
|
|
127
|
+
}
|
|
128
|
+
restoreIndices() {
|
|
129
|
+
this.#startIndex = this.#backup.startIndex;
|
|
130
|
+
this.#endIndex = this.#backup.endIndex;
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
exports.ExpandingBuffer = ExpandingBuffer;
|
|
134
|
+
//# sourceMappingURL=ExpandingBuffer.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"ExpandingBuffer.js","sourceRoot":"","sources":["../src/ExpandingBuffer.ts"],"names":[],"mappings":";;;AAAA,6CAAoC;AACpC,2CAAmC;AAEnC,MAAa,eAAe;IAC1B,KAAK,CAAQ;IACb,WAAW,GAAW,CAAC,CAAA;IACvB,SAAS,GAAW,CAAC,CAAA;IACrB,OAAO,GAA6C;QAClD,UAAU,EAAE,CAAC;QACb,QAAQ,EAAE,CAAC;KACZ,CAAA;IAED,YAAY,gBAAwB,CAAC;QACnC,IAAI,CAAC,KAAK,GAAG,oBAAM,CAAC,WAAW,CAAC,aAAa,CAAC,CAAA;IAChD,CAAC;IAED,cAAc,CAAC,SAAiB,CAAC;QAC/B,OAAO,IAAI,CAAC,KAAK,CAAC,QAAQ,CAAC,IAAI,CAAC,WAAW,GAAG,MAAM,EAAE,IAAI,CAAC,SAAS,CAAC,CAAA;IACvE,CAAC;IAED,IAAI;QACF,OAAO,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,WAAW,CAAA;IAC1C,CAAC;IAED,OAAO;QACL,OAAO,IAAI,CAAC,IAAI,EAAE,KAAK,CAAC,CAAA;IAC1B,CAAC;IAED,QAAQ;QACN,OAAO,IAAI,CAAC,KAAK,CAAC,MAAM,CAAA;IAC1B,CAAC;IAED,MAAM,CAAC,MAAc;QACnB,IAAI,IAAI,CAAC,SAAS,GAAG,MAAM,CAAC,MAAM,GAAG,IAAI,CAAC,QAAQ,EAAE,EAAE;YACpD,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,IAAI,CAAC,SAAS,CAAC,CAAA;YACvC,IAAI,CAAC,SAAS,IAAI,MAAM,CAAC,MAAM,CAAA;SAChC;aAAM;YACL,IAAI,CAAC,KAAK,GAAG,oBAAM,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,cAAc,EAAE,EAAE,MAAM,CAAC,CAAC,CAAA;YAC3D,IAAI,CAAC,WAAW,GAAG,CAAC,CAAA;YACpB,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,QAAQ,EAAE,CAAA;SACjC;IACH,CAAC;IAED;;OAEG;IACH,IAAI,CAAC,SAAiB,CAAC,EAAE,QAAgB,IAAI,CAAC,IAAI,EAAE;QAClD,IAAI,MAAM,GAAG,CAAC,IAAI,KAAK,GAAG,CAAC,EAAE;YAC3B,OAAO,oBAAM,CAAC,IAAI,CAAC,EAAE,CAAC,CAAA;SACvB;QAED,IAAI,MAAM,GAAG,KAAK,GAAG,IAAI,CAAC,IAAI,EAAE,EAAE;YAChC,OAAO,IAAI,CAAC,KAAK,CAAC,QAAQ,CAAC,IAAI,CAAC,WAAW,GAAG,MAAM,EAAE,IAAI,CAAC,WAAW,GAAG,KAAK,GAAG,MAAM,CAAC,CAAA;SACzF;QAED,OAAO,IAAI,CAAC,cAAc,CAAC,MAAM,CAAC,CAAA;IACpC,CAAC;IAED,QAAQ,CAAC,SAAiB,CAAC;QACzB,OAAO,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,WAAW,GAAG,MAAM,CAAC,CAAA;IAC9C,CAAC;IAED;;;;;;OAMG;IACH,UAAU,CAAC,aAAqB;QAC9B,aAAa,GAAG,IAAA,iBAAK,EAAC,CAAC,EAAE,IAAI,CAAC,QAAQ,EAAE,EAAE,aAAa,CAAC,CAAA;QACxD,IAAI,aAAa,GAAG,CAAC,EAAE;YACrB,IAAI,aAAa,GAAG,IAAI,CAAC,QAAQ,EAAE,EAAE;gBACnC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,CAAC,EAAE,IAAI,CAAC,WAAW,GAAG,aAAa,CAAC,CAAA;aACjE;YAED,IAAI,CAAC,SAAS,IAAI,IAAI,CAAC,WAAW,GAAG,aAAa,CAAA;YAClD,IAAI,CAAC,WAAW,GAAG,CAAC,CAAA;SACrB;IACH,CAAC;IAED;;;;;OAKG;IACH,QAAQ,CAAC,aAAqB;QAC5B,MAAM,oBAAoB,GAAG,IAAA,iBAAK,EAAC,CAAC,EAAE,IAAI,CAAC,QAAQ,EAAE,EAAE,aAAa,CAAC,CAAA;QACrE,IAAI,oBAAoB,GAAG,CAAC,EAAE;YAC5B,IAAI,CAAC,SAAS,IAAI,oBAAoB,CAAA;SACvC;IACH,CAAC;IAED;;;;;;OAMG;IACH,SAAS,CAAC,aAAqB;QAC7B,IAAI,aAAa,GAAG,CAAC,EAAE;YACrB,IAAI,CAAC,WAAW,IAAI,aAAa,CAAA;YACjC,IAAI,IAAI,CAAC,WAAW,IAAI,IAAI,CAAC,SAAS,EAAE;gBACtC,IAAI,CAAC,KAAK,EAAE,CAAA;aACb;SACF;IACH,CAAC;IAED;;;;;;OAMG;IACH,OAAO,CAAC,aAAqB;QAC3B,IAAI,aAAa,GAAG,CAAC,EAAE;YACrB,IAAI,CAAC,SAAS,IAAI,aAAa,CAAA;YAC/B,IAAI,IAAI,CAAC,WAAW,IAAI,IAAI,CAAC,SAAS,EAAE;gBACtC,IAAI,CAAC,KAAK,EAAE,CAAA;aACb;SACF;IACH,CAAC;IAED;;OAEG;IACH,OAAO;QACL,OAAO,IAAI,CAAC,KAAK,CAAA;IACnB,CAAC;IAED,KAAK;QACH,IAAI,CAAC,WAAW,GAAG,CAAC,CAAA;QACpB,IAAI,CAAC,SAAS,GAAG,CAAC,CAAA;IACpB,CAAC;IAED,WAAW;QACT,IAAI,CAAC,OAAO,CAAC,UAAU,GAAG,IAAI,CAAC,WAAW,CAAA;QAC1C,IAAI,CAAC,OAAO,CAAC,QAAQ,GAAG,IAAI,CAAC,SAAS,CAAA;IACxC,CAAC;IAED,cAAc;QACZ,IAAI,CAAC,WAAW,GAAG,IAAI,CAAC,OAAO,CAAC,UAAU,CAAA;QAC1C,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAA;IACxC,CAAC;CACF;AAhJD,0CAgJC"}
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
import { Buffer } from 'node:buffer';
|
|
2
|
+
import { Transform, TransformCallback } from 'node:stream';
|
|
3
|
+
import { Config } from './types';
|
|
4
|
+
export declare class Explode {
|
|
5
|
+
#private;
|
|
6
|
+
constructor(config?: Config);
|
|
7
|
+
getHandler(): (this: Transform, chunk: Buffer, encoding: BufferEncoding, callback: TransformCallback) => void;
|
|
8
|
+
}
|