stegdoc 4.0.0 → 5.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -21
- package/README.md +200 -214
- package/package.json +59 -59
- package/src/commands/decode.js +485 -343
- package/src/commands/encode.js +567 -449
- package/src/commands/info.js +118 -114
- package/src/commands/verify.js +207 -204
- package/src/index.js +89 -87
- package/src/lib/compression.js +177 -115
- package/src/lib/crypto.js +172 -172
- package/src/lib/decoy-generator.js +306 -306
- package/src/lib/docx-handler.js +587 -161
- package/src/lib/docx-templates.js +355 -0
- package/src/lib/file-handler.js +113 -113
- package/src/lib/file-utils.js +160 -150
- package/src/lib/interactive.js +190 -190
- package/src/lib/log-generator.js +764 -0
- package/src/lib/metadata.js +151 -122
- package/src/lib/streams.js +197 -197
- package/src/lib/utils.js +227 -227
- package/src/lib/xlsx-handler.js +597 -416
- package/src/lib/xml-utils.js +115 -115
package/src/lib/metadata.js
CHANGED
|
@@ -1,122 +1,151 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Create metadata object for encoding
|
|
3
|
-
* @param {object} options - Metadata options
|
|
4
|
-
* @param {string} options.originalFilename - Original filename
|
|
5
|
-
* @param {string} options.originalExtension - Original file extension
|
|
6
|
-
* @param {string} options.hash - Hash identifier for this file
|
|
7
|
-
* @param {number} options.partNumber - Part number (for split files)
|
|
8
|
-
* @param {number} options.totalParts - Total number of parts
|
|
9
|
-
* @param {number} options.originalSize - Original file size in bytes
|
|
10
|
-
* @param {string} options.format - Output format ('xlsx' or 'docx')
|
|
11
|
-
* @param {boolean} options.encrypted - Whether the content is encrypted
|
|
12
|
-
* @param {boolean} options.compressed - Whether the content is compressed
|
|
13
|
-
* @param {string} options.contentHash - SHA-256 hash of original file for integrity verification
|
|
14
|
-
* @returns {object} Metadata object
|
|
15
|
-
*/
|
|
16
|
-
function createMetadata({
|
|
17
|
-
originalFilename,
|
|
18
|
-
originalExtension,
|
|
19
|
-
hash,
|
|
20
|
-
partNumber = null,
|
|
21
|
-
totalParts = null,
|
|
22
|
-
originalSize = 0,
|
|
23
|
-
format = 'xlsx',
|
|
24
|
-
encrypted = true,
|
|
25
|
-
compressed = false,
|
|
26
|
-
contentHash = null,
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
}
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
1
|
+
/**
|
|
2
|
+
* Create metadata object for encoding
|
|
3
|
+
* @param {object} options - Metadata options
|
|
4
|
+
* @param {string} options.originalFilename - Original filename
|
|
5
|
+
* @param {string} options.originalExtension - Original file extension
|
|
6
|
+
* @param {string} options.hash - Hash identifier for this file
|
|
7
|
+
* @param {number} options.partNumber - Part number (for split files)
|
|
8
|
+
* @param {number} options.totalParts - Total number of parts
|
|
9
|
+
* @param {number} options.originalSize - Original file size in bytes
|
|
10
|
+
* @param {string} options.format - Output format ('xlsx' or 'docx')
|
|
11
|
+
* @param {boolean} options.encrypted - Whether the content is encrypted
|
|
12
|
+
* @param {boolean} options.compressed - Whether the content is compressed
|
|
13
|
+
* @param {string} options.contentHash - SHA-256 hash of original file for integrity verification
|
|
14
|
+
* @returns {object} Metadata object
|
|
15
|
+
*/
|
|
16
|
+
function createMetadata({
|
|
17
|
+
originalFilename,
|
|
18
|
+
originalExtension,
|
|
19
|
+
hash,
|
|
20
|
+
partNumber = null,
|
|
21
|
+
totalParts = null,
|
|
22
|
+
originalSize = 0,
|
|
23
|
+
format = 'xlsx',
|
|
24
|
+
encrypted = true,
|
|
25
|
+
compressed = false,
|
|
26
|
+
contentHash = null,
|
|
27
|
+
// v5 fields
|
|
28
|
+
stegoMethod = null,
|
|
29
|
+
compressionAlgo = null,
|
|
30
|
+
payloadSize = null,
|
|
31
|
+
dataLineCount = null,
|
|
32
|
+
headerLineCount = null,
|
|
33
|
+
}) {
|
|
34
|
+
const isV5 = stegoMethod === 'log-embed';
|
|
35
|
+
|
|
36
|
+
const meta = {
|
|
37
|
+
originalFilename,
|
|
38
|
+
originalExtension,
|
|
39
|
+
hash,
|
|
40
|
+
partNumber,
|
|
41
|
+
totalParts,
|
|
42
|
+
originalSize,
|
|
43
|
+
format,
|
|
44
|
+
encrypted,
|
|
45
|
+
compressed,
|
|
46
|
+
contentHash,
|
|
47
|
+
pipelineOrder: isV5 ? 'brotli-encrypt-logEmbed' : 'compress-encrypt-base64',
|
|
48
|
+
encodingDate: new Date().toISOString(),
|
|
49
|
+
version: isV5 ? '5.0.0' : '4.0.0',
|
|
50
|
+
tool: 'stegdoc',
|
|
51
|
+
};
|
|
52
|
+
|
|
53
|
+
// v5-specific fields
|
|
54
|
+
if (isV5) {
|
|
55
|
+
meta.stegoMethod = 'log-embed';
|
|
56
|
+
meta.compressionAlgo = compressionAlgo || 'brotli';
|
|
57
|
+
if (payloadSize !== null) meta.payloadSize = payloadSize;
|
|
58
|
+
if (dataLineCount !== null) meta.dataLineCount = dataLineCount;
|
|
59
|
+
if (headerLineCount !== null) meta.headerLineCount = headerLineCount;
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
return meta;
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
/**
|
|
66
|
+
* Serialize metadata to string format for storage in DOCX
|
|
67
|
+
* @param {object} metadata - Metadata object
|
|
68
|
+
* @returns {string} JSON string
|
|
69
|
+
*/
|
|
70
|
+
function serializeMetadata(metadata) {
|
|
71
|
+
return JSON.stringify(metadata);
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
/**
|
|
75
|
+
* Parse metadata from string
|
|
76
|
+
* @param {string} metadataStr - JSON string
|
|
77
|
+
* @returns {object} Metadata object
|
|
78
|
+
*/
|
|
79
|
+
function parseMetadata(metadataStr) {
|
|
80
|
+
try {
|
|
81
|
+
return JSON.parse(metadataStr);
|
|
82
|
+
} catch (error) {
|
|
83
|
+
throw new Error(`Failed to parse metadata: ${error.message}`);
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
/**
|
|
88
|
+
* Validate metadata object
|
|
89
|
+
* @param {object} metadata - Metadata to validate
|
|
90
|
+
* @returns {boolean} True if valid
|
|
91
|
+
* @throws {Error} If metadata is invalid
|
|
92
|
+
*/
|
|
93
|
+
function validateMetadata(metadata) {
|
|
94
|
+
const required = ['originalFilename', 'originalExtension', 'hash', 'tool'];
|
|
95
|
+
|
|
96
|
+
for (const field of required) {
|
|
97
|
+
if (!metadata[field]) {
|
|
98
|
+
throw new Error(`Missing required metadata field: ${field}`);
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
if (metadata.tool !== 'stegdoc' && metadata.tool !== 'whitener') {
|
|
103
|
+
throw new Error('Invalid tool identifier in metadata');
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
// If it's a multi-part file, validate part info
|
|
107
|
+
if (metadata.totalParts !== null && metadata.totalParts > 1) {
|
|
108
|
+
if (metadata.partNumber === null || metadata.partNumber < 1 || metadata.partNumber > metadata.totalParts) {
|
|
109
|
+
throw new Error('Invalid part number in metadata');
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
return true;
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
/**
|
|
117
|
+
* Check if metadata indicates a multi-part file
|
|
118
|
+
* @param {object} metadata - Metadata object
|
|
119
|
+
* @returns {boolean} True if multi-part
|
|
120
|
+
*/
|
|
121
|
+
function isMultiPart(metadata) {
|
|
122
|
+
return metadata.totalParts !== null && metadata.totalParts > 1;
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
/**
|
|
126
|
+
* Check if metadata indicates the v4 streaming format
|
|
127
|
+
* @param {object} metadata - Metadata object
|
|
128
|
+
* @returns {boolean} True if streaming format (v4+)
|
|
129
|
+
*/
|
|
130
|
+
function isStreamingFormat(metadata) {
|
|
131
|
+
return metadata.pipelineOrder === 'compress-encrypt-base64';
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
/**
|
|
135
|
+
* Check if metadata indicates the v5 log-embed format
|
|
136
|
+
* @param {object} metadata - Metadata object
|
|
137
|
+
* @returns {boolean} True if log-embed format (v5+)
|
|
138
|
+
*/
|
|
139
|
+
function isLogEmbedFormat(metadata) {
|
|
140
|
+
return metadata.stegoMethod === 'log-embed' || metadata.pipelineOrder === 'brotli-encrypt-logEmbed';
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
module.exports = {
|
|
144
|
+
createMetadata,
|
|
145
|
+
serializeMetadata,
|
|
146
|
+
parseMetadata,
|
|
147
|
+
validateMetadata,
|
|
148
|
+
isMultiPart,
|
|
149
|
+
isStreamingFormat,
|
|
150
|
+
isLogEmbedFormat,
|
|
151
|
+
};
|
package/src/lib/streams.js
CHANGED
|
@@ -1,197 +1,197 @@
|
|
|
1
|
-
const { Transform, Writable } = require('stream');
|
|
2
|
-
const crypto = require('crypto');
|
|
3
|
-
|
|
4
|
-
/**
|
|
5
|
-
* Transform stream that converts binary input to base64 text output.
|
|
6
|
-
* Buffers incomplete 3-byte groups across chunk boundaries.
|
|
7
|
-
*/
|
|
8
|
-
class Base64EncodeTransform extends Transform {
|
|
9
|
-
constructor() {
|
|
10
|
-
super();
|
|
11
|
-
this._remainder = Buffer.alloc(0);
|
|
12
|
-
}
|
|
13
|
-
|
|
14
|
-
_transform(chunk, encoding, callback) {
|
|
15
|
-
const buf = Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk, encoding);
|
|
16
|
-
const combined = this._remainder.length > 0 ? Buffer.concat([this._remainder, buf]) : buf;
|
|
17
|
-
const usable = combined.length - (combined.length % 3);
|
|
18
|
-
if (usable > 0) {
|
|
19
|
-
this.push(combined.slice(0, usable).toString('base64'));
|
|
20
|
-
}
|
|
21
|
-
this._remainder = usable < combined.length ? combined.slice(usable) : Buffer.alloc(0);
|
|
22
|
-
callback();
|
|
23
|
-
}
|
|
24
|
-
|
|
25
|
-
_flush(callback) {
|
|
26
|
-
if (this._remainder.length > 0) {
|
|
27
|
-
this.push(this._remainder.toString('base64'));
|
|
28
|
-
this._remainder = Buffer.alloc(0);
|
|
29
|
-
}
|
|
30
|
-
callback();
|
|
31
|
-
}
|
|
32
|
-
}
|
|
33
|
-
|
|
34
|
-
/**
|
|
35
|
-
* Transform stream that converts base64 text input to binary output.
|
|
36
|
-
* Buffers incomplete 4-char groups across chunk boundaries.
|
|
37
|
-
*/
|
|
38
|
-
class Base64DecodeTransform extends Transform {
|
|
39
|
-
constructor() {
|
|
40
|
-
super();
|
|
41
|
-
this._remainder = '';
|
|
42
|
-
}
|
|
43
|
-
|
|
44
|
-
_transform(chunk, encoding, callback) {
|
|
45
|
-
const str = this._remainder + (Buffer.isBuffer(chunk) ? chunk.toString() : chunk);
|
|
46
|
-
const usable = str.length - (str.length % 4);
|
|
47
|
-
if (usable > 0) {
|
|
48
|
-
this.push(Buffer.from(str.slice(0, usable), 'base64'));
|
|
49
|
-
}
|
|
50
|
-
this._remainder = usable < str.length ? str.slice(usable) : '';
|
|
51
|
-
callback();
|
|
52
|
-
}
|
|
53
|
-
|
|
54
|
-
_flush(callback) {
|
|
55
|
-
if (this._remainder.length > 0) {
|
|
56
|
-
this.push(Buffer.from(this._remainder, 'base64'));
|
|
57
|
-
this._remainder = '';
|
|
58
|
-
}
|
|
59
|
-
callback();
|
|
60
|
-
}
|
|
61
|
-
}
|
|
62
|
-
|
|
63
|
-
/**
|
|
64
|
-
* Transform stream that passes data through unchanged while computing SHA-256 hash.
|
|
65
|
-
* Access the hex hash via .digest after the stream has ended.
|
|
66
|
-
*/
|
|
67
|
-
class HashPassthrough extends Transform {
|
|
68
|
-
constructor() {
|
|
69
|
-
super();
|
|
70
|
-
this._hash = crypto.createHash('sha256');
|
|
71
|
-
this._finalized = false;
|
|
72
|
-
}
|
|
73
|
-
|
|
74
|
-
_transform(chunk, encoding, callback) {
|
|
75
|
-
this._hash.update(chunk);
|
|
76
|
-
this.push(chunk);
|
|
77
|
-
callback();
|
|
78
|
-
}
|
|
79
|
-
|
|
80
|
-
_flush(callback) {
|
|
81
|
-
this._finalized = true;
|
|
82
|
-
callback();
|
|
83
|
-
}
|
|
84
|
-
|
|
85
|
-
get digest() {
|
|
86
|
-
if (!this._finalized) {
|
|
87
|
-
throw new Error('Cannot read digest before stream has ended');
|
|
88
|
-
}
|
|
89
|
-
return this._hash.digest('hex');
|
|
90
|
-
}
|
|
91
|
-
}
|
|
92
|
-
|
|
93
|
-
/**
|
|
94
|
-
* Writable stream that collects string output up to maxBytes.
|
|
95
|
-
* Calls an async onChunkReady callback when a chunk is full, applying
|
|
96
|
-
* backpressure to pause upstream until the callback resolves.
|
|
97
|
-
*/
|
|
98
|
-
class ChunkCollector extends Writable {
|
|
99
|
-
constructor(maxBytes, onChunkReady) {
|
|
100
|
-
super({ decodeStrings: false });
|
|
101
|
-
this._maxBytes = maxBytes;
|
|
102
|
-
this._buffer = '';
|
|
103
|
-
this._chunkIndex = 0;
|
|
104
|
-
this._onChunkReady = onChunkReady;
|
|
105
|
-
}
|
|
106
|
-
|
|
107
|
-
async _write(chunk, encoding, callback) {
|
|
108
|
-
try {
|
|
109
|
-
this._buffer += typeof chunk === 'string' ? chunk : chunk.toString();
|
|
110
|
-
while (this._buffer.length >= this._maxBytes) {
|
|
111
|
-
const piece = this._buffer.slice(0, this._maxBytes);
|
|
112
|
-
this._buffer = this._buffer.slice(this._maxBytes);
|
|
113
|
-
await this._onChunkReady(piece, this._chunkIndex++);
|
|
114
|
-
}
|
|
115
|
-
callback();
|
|
116
|
-
} catch (err) {
|
|
117
|
-
callback(err);
|
|
118
|
-
}
|
|
119
|
-
}
|
|
120
|
-
|
|
121
|
-
async _final(callback) {
|
|
122
|
-
try {
|
|
123
|
-
if (this._buffer.length > 0) {
|
|
124
|
-
await this._onChunkReady(this._buffer, this._chunkIndex++);
|
|
125
|
-
this._buffer = '';
|
|
126
|
-
}
|
|
127
|
-
callback();
|
|
128
|
-
} catch (err) {
|
|
129
|
-
callback(err);
|
|
130
|
-
}
|
|
131
|
-
}
|
|
132
|
-
|
|
133
|
-
get totalChunks() {
|
|
134
|
-
return this._chunkIndex;
|
|
135
|
-
}
|
|
136
|
-
}
|
|
137
|
-
|
|
138
|
-
/**
|
|
139
|
-
* Writable stream that collects binary Buffer output up to maxBytes.
|
|
140
|
-
* Calls an async onChunkReady callback with a Buffer when full.
|
|
141
|
-
*/
|
|
142
|
-
class BinaryChunkCollector extends Writable {
|
|
143
|
-
constructor(maxBytes, onChunkReady) {
|
|
144
|
-
super();
|
|
145
|
-
this._maxBytes = maxBytes;
|
|
146
|
-
this._buffers = [];
|
|
147
|
-
this._currentSize = 0;
|
|
148
|
-
this._chunkIndex = 0;
|
|
149
|
-
this._onChunkReady = onChunkReady;
|
|
150
|
-
}
|
|
151
|
-
|
|
152
|
-
async _write(chunk, encoding, callback) {
|
|
153
|
-
try {
|
|
154
|
-
const buf = Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk, encoding);
|
|
155
|
-
this._buffers.push(buf);
|
|
156
|
-
this._currentSize += buf.length;
|
|
157
|
-
|
|
158
|
-
while (this._currentSize >= this._maxBytes) {
|
|
159
|
-
const combined = Buffer.concat(this._buffers);
|
|
160
|
-
const piece = combined.slice(0, this._maxBytes);
|
|
161
|
-
const leftover = combined.slice(this._maxBytes);
|
|
162
|
-
this._buffers = leftover.length > 0 ? [leftover] : [];
|
|
163
|
-
this._currentSize = leftover.length;
|
|
164
|
-
await this._onChunkReady(piece, this._chunkIndex++);
|
|
165
|
-
}
|
|
166
|
-
callback();
|
|
167
|
-
} catch (err) {
|
|
168
|
-
callback(err);
|
|
169
|
-
}
|
|
170
|
-
}
|
|
171
|
-
|
|
172
|
-
async _final(callback) {
|
|
173
|
-
try {
|
|
174
|
-
if (this._currentSize > 0) {
|
|
175
|
-
const combined = Buffer.concat(this._buffers);
|
|
176
|
-
await this._onChunkReady(combined, this._chunkIndex++);
|
|
177
|
-
this._buffers = [];
|
|
178
|
-
this._currentSize = 0;
|
|
179
|
-
}
|
|
180
|
-
callback();
|
|
181
|
-
} catch (err) {
|
|
182
|
-
callback(err);
|
|
183
|
-
}
|
|
184
|
-
}
|
|
185
|
-
|
|
186
|
-
get totalChunks() {
|
|
187
|
-
return this._chunkIndex;
|
|
188
|
-
}
|
|
189
|
-
}
|
|
190
|
-
|
|
191
|
-
module.exports = {
|
|
192
|
-
Base64EncodeTransform,
|
|
193
|
-
Base64DecodeTransform,
|
|
194
|
-
HashPassthrough,
|
|
195
|
-
ChunkCollector,
|
|
196
|
-
BinaryChunkCollector,
|
|
197
|
-
};
|
|
1
|
+
const { Transform, Writable } = require('stream');
|
|
2
|
+
const crypto = require('crypto');
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* Transform stream that converts binary input to base64 text output.
|
|
6
|
+
* Buffers incomplete 3-byte groups across chunk boundaries.
|
|
7
|
+
*/
|
|
8
|
+
class Base64EncodeTransform extends Transform {
|
|
9
|
+
constructor() {
|
|
10
|
+
super();
|
|
11
|
+
this._remainder = Buffer.alloc(0);
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
_transform(chunk, encoding, callback) {
|
|
15
|
+
const buf = Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk, encoding);
|
|
16
|
+
const combined = this._remainder.length > 0 ? Buffer.concat([this._remainder, buf]) : buf;
|
|
17
|
+
const usable = combined.length - (combined.length % 3);
|
|
18
|
+
if (usable > 0) {
|
|
19
|
+
this.push(combined.slice(0, usable).toString('base64'));
|
|
20
|
+
}
|
|
21
|
+
this._remainder = usable < combined.length ? combined.slice(usable) : Buffer.alloc(0);
|
|
22
|
+
callback();
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
_flush(callback) {
|
|
26
|
+
if (this._remainder.length > 0) {
|
|
27
|
+
this.push(this._remainder.toString('base64'));
|
|
28
|
+
this._remainder = Buffer.alloc(0);
|
|
29
|
+
}
|
|
30
|
+
callback();
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
/**
|
|
35
|
+
* Transform stream that converts base64 text input to binary output.
|
|
36
|
+
* Buffers incomplete 4-char groups across chunk boundaries.
|
|
37
|
+
*/
|
|
38
|
+
class Base64DecodeTransform extends Transform {
|
|
39
|
+
constructor() {
|
|
40
|
+
super();
|
|
41
|
+
this._remainder = '';
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
_transform(chunk, encoding, callback) {
|
|
45
|
+
const str = this._remainder + (Buffer.isBuffer(chunk) ? chunk.toString() : chunk);
|
|
46
|
+
const usable = str.length - (str.length % 4);
|
|
47
|
+
if (usable > 0) {
|
|
48
|
+
this.push(Buffer.from(str.slice(0, usable), 'base64'));
|
|
49
|
+
}
|
|
50
|
+
this._remainder = usable < str.length ? str.slice(usable) : '';
|
|
51
|
+
callback();
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
_flush(callback) {
|
|
55
|
+
if (this._remainder.length > 0) {
|
|
56
|
+
this.push(Buffer.from(this._remainder, 'base64'));
|
|
57
|
+
this._remainder = '';
|
|
58
|
+
}
|
|
59
|
+
callback();
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
/**
|
|
64
|
+
* Transform stream that passes data through unchanged while computing SHA-256 hash.
|
|
65
|
+
* Access the hex hash via .digest after the stream has ended.
|
|
66
|
+
*/
|
|
67
|
+
class HashPassthrough extends Transform {
|
|
68
|
+
constructor() {
|
|
69
|
+
super();
|
|
70
|
+
this._hash = crypto.createHash('sha256');
|
|
71
|
+
this._finalized = false;
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
_transform(chunk, encoding, callback) {
|
|
75
|
+
this._hash.update(chunk);
|
|
76
|
+
this.push(chunk);
|
|
77
|
+
callback();
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
_flush(callback) {
|
|
81
|
+
this._finalized = true;
|
|
82
|
+
callback();
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
get digest() {
|
|
86
|
+
if (!this._finalized) {
|
|
87
|
+
throw new Error('Cannot read digest before stream has ended');
|
|
88
|
+
}
|
|
89
|
+
return this._hash.digest('hex');
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
/**
|
|
94
|
+
* Writable stream that collects string output up to maxBytes.
|
|
95
|
+
* Calls an async onChunkReady callback when a chunk is full, applying
|
|
96
|
+
* backpressure to pause upstream until the callback resolves.
|
|
97
|
+
*/
|
|
98
|
+
class ChunkCollector extends Writable {
|
|
99
|
+
constructor(maxBytes, onChunkReady) {
|
|
100
|
+
super({ decodeStrings: false });
|
|
101
|
+
this._maxBytes = maxBytes;
|
|
102
|
+
this._buffer = '';
|
|
103
|
+
this._chunkIndex = 0;
|
|
104
|
+
this._onChunkReady = onChunkReady;
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
async _write(chunk, encoding, callback) {
|
|
108
|
+
try {
|
|
109
|
+
this._buffer += typeof chunk === 'string' ? chunk : chunk.toString();
|
|
110
|
+
while (this._buffer.length >= this._maxBytes) {
|
|
111
|
+
const piece = this._buffer.slice(0, this._maxBytes);
|
|
112
|
+
this._buffer = this._buffer.slice(this._maxBytes);
|
|
113
|
+
await this._onChunkReady(piece, this._chunkIndex++);
|
|
114
|
+
}
|
|
115
|
+
callback();
|
|
116
|
+
} catch (err) {
|
|
117
|
+
callback(err);
|
|
118
|
+
}
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
async _final(callback) {
|
|
122
|
+
try {
|
|
123
|
+
if (this._buffer.length > 0) {
|
|
124
|
+
await this._onChunkReady(this._buffer, this._chunkIndex++);
|
|
125
|
+
this._buffer = '';
|
|
126
|
+
}
|
|
127
|
+
callback();
|
|
128
|
+
} catch (err) {
|
|
129
|
+
callback(err);
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
get totalChunks() {
|
|
134
|
+
return this._chunkIndex;
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
/**
|
|
139
|
+
* Writable stream that collects binary Buffer output up to maxBytes.
|
|
140
|
+
* Calls an async onChunkReady callback with a Buffer when full.
|
|
141
|
+
*/
|
|
142
|
+
class BinaryChunkCollector extends Writable {
|
|
143
|
+
constructor(maxBytes, onChunkReady) {
|
|
144
|
+
super();
|
|
145
|
+
this._maxBytes = maxBytes;
|
|
146
|
+
this._buffers = [];
|
|
147
|
+
this._currentSize = 0;
|
|
148
|
+
this._chunkIndex = 0;
|
|
149
|
+
this._onChunkReady = onChunkReady;
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
async _write(chunk, encoding, callback) {
|
|
153
|
+
try {
|
|
154
|
+
const buf = Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk, encoding);
|
|
155
|
+
this._buffers.push(buf);
|
|
156
|
+
this._currentSize += buf.length;
|
|
157
|
+
|
|
158
|
+
while (this._currentSize >= this._maxBytes) {
|
|
159
|
+
const combined = Buffer.concat(this._buffers);
|
|
160
|
+
const piece = combined.slice(0, this._maxBytes);
|
|
161
|
+
const leftover = combined.slice(this._maxBytes);
|
|
162
|
+
this._buffers = leftover.length > 0 ? [leftover] : [];
|
|
163
|
+
this._currentSize = leftover.length;
|
|
164
|
+
await this._onChunkReady(piece, this._chunkIndex++);
|
|
165
|
+
}
|
|
166
|
+
callback();
|
|
167
|
+
} catch (err) {
|
|
168
|
+
callback(err);
|
|
169
|
+
}
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
async _final(callback) {
|
|
173
|
+
try {
|
|
174
|
+
if (this._currentSize > 0) {
|
|
175
|
+
const combined = Buffer.concat(this._buffers);
|
|
176
|
+
await this._onChunkReady(combined, this._chunkIndex++);
|
|
177
|
+
this._buffers = [];
|
|
178
|
+
this._currentSize = 0;
|
|
179
|
+
}
|
|
180
|
+
callback();
|
|
181
|
+
} catch (err) {
|
|
182
|
+
callback(err);
|
|
183
|
+
}
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
get totalChunks() {
|
|
187
|
+
return this._chunkIndex;
|
|
188
|
+
}
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
module.exports = {
|
|
192
|
+
Base64EncodeTransform,
|
|
193
|
+
Base64DecodeTransform,
|
|
194
|
+
HashPassthrough,
|
|
195
|
+
ChunkCollector,
|
|
196
|
+
BinaryChunkCollector,
|
|
197
|
+
};
|