@js-ak/excel-toolbox 1.3.0 → 1.3.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -56,20 +56,32 @@ const constants_js_1 = require("./constants.js");
56
56
  * @throws {Error} - If the writable stream emits an error.
57
57
  */
58
58
  async function createWithStream(fileKeys, destination, output) {
59
+ // Stores central directory records
59
60
  const centralDirectory = [];
61
+ // Tracks the current offset in the output stream
60
62
  let offset = 0;
61
63
  for (const filename of fileKeys.sort((a, b) => a.localeCompare(b))) {
64
+ // Prevent directory traversal
62
65
  if (filename.includes("..")) {
63
66
  throw new Error(`Invalid filename: ${filename}`);
64
67
  }
68
+ // Construct absolute path to the file
65
69
  const fullPath = path.join(destination, ...filename.split("/"));
70
+ // Convert filename to UTF-8 buffer
66
71
  const fileNameBuf = Buffer.from(filename, "utf8");
72
+ // Get modification time in DOS format
67
73
  const modTime = (0, index_js_1.dosTime)(new Date());
74
+ // Read file as stream
68
75
  const source = (0, node_fs_1.createReadStream)(fullPath);
76
+ // Create CRC32 transform stream
69
77
  const crc32 = (0, index_js_1.crc32Stream)();
78
+ // Create raw deflate stream (no zlib headers)
70
79
  const deflater = node_zlib_1.default.createDeflateRaw();
80
+ // Uncompressed size counter
71
81
  let uncompSize = 0;
82
+ // Compressed size counter
72
83
  let compSize = 0;
84
+ // Store compressed output data
73
85
  const compressedChunks = [];
74
86
  const sizeCounter = new node_stream_1.Transform({
75
87
  transform(chunk, _enc, cb) {
@@ -77,71 +89,83 @@ async function createWithStream(fileKeys, destination, output) {
77
89
  cb(null, chunk);
78
90
  },
79
91
  });
80
- const collectCompressed = new node_stream_1.Transform({
81
- transform(chunk, _enc, cb) {
82
- compressedChunks.push(chunk);
83
- compSize += chunk.length;
84
- cb(null, chunk);
85
- },
92
+ const collectCompressed = new node_stream_1.PassThrough();
93
+ collectCompressed.on("data", chunk => {
94
+ // // Count compressed bytes
95
+ compSize += chunk.length;
96
+ // // Save compressed chunk
97
+ compressedChunks.push(chunk);
86
98
  });
87
- await (0, promises_1.pipeline)(source, sizeCounter, crc32, deflater, collectCompressed, new node_stream_1.PassThrough());
99
+ // Run all transforms in pipeline: read -> count size -> CRC -> deflate -> collect compressed
100
+ await (0, promises_1.pipeline)(source, sizeCounter, crc32, deflater, collectCompressed);
101
+ // Get final CRC32 value
88
102
  const crc = crc32.digest();
103
+ // Concatenate all compressed chunks into a single buffer
89
104
  const compressed = Buffer.concat(compressedChunks);
105
+ // Create local file header followed by compressed content
90
106
  const localHeader = Buffer.concat([
91
- constants_js_1.LOCAL_FILE_HEADER_SIG,
92
- (0, index_js_1.toBytes)(20, 2),
93
- (0, index_js_1.toBytes)(0, 2),
94
- (0, index_js_1.toBytes)(8, 2),
95
- modTime,
96
- (0, index_js_1.toBytes)(crc, 4),
97
- (0, index_js_1.toBytes)(compSize, 4),
98
- (0, index_js_1.toBytes)(uncompSize, 4),
99
- (0, index_js_1.toBytes)(fileNameBuf.length, 2),
100
- (0, index_js_1.toBytes)(0, 2),
101
- fileNameBuf,
102
- compressed,
107
+ constants_js_1.LOCAL_FILE_HEADER_SIG, // Local file header signature
108
+ (0, index_js_1.toBytes)(20, 2), // Version needed to extract
109
+ (0, index_js_1.toBytes)(0, 2), // General purpose bit flag
110
+ (0, index_js_1.toBytes)(8, 2), // Compression method (deflate)
111
+ modTime, // File modification time and date
112
+ (0, index_js_1.toBytes)(crc, 4), // CRC-32 checksum
113
+ (0, index_js_1.toBytes)(compSize, 4), // Compressed size
114
+ (0, index_js_1.toBytes)(uncompSize, 4), // Uncompressed size
115
+ (0, index_js_1.toBytes)(fileNameBuf.length, 2), // Filename length
116
+ (0, index_js_1.toBytes)(0, 2), // Extra field length
117
+ fileNameBuf, // Filename
118
+ compressed, // Compressed file data
103
119
  ]);
120
+ // Write local file header and data to output
104
121
  await new Promise((resolve, reject) => {
105
122
  output.write(localHeader, err => err ? reject(err) : resolve());
106
123
  });
124
+ // Create central directory entry for this file
107
125
  const centralEntry = Buffer.concat([
108
- constants_js_1.CENTRAL_DIR_HEADER_SIG,
109
- (0, index_js_1.toBytes)(20, 2),
110
- (0, index_js_1.toBytes)(20, 2),
111
- (0, index_js_1.toBytes)(0, 2),
112
- (0, index_js_1.toBytes)(8, 2),
113
- modTime,
114
- (0, index_js_1.toBytes)(crc, 4),
115
- (0, index_js_1.toBytes)(compSize, 4),
116
- (0, index_js_1.toBytes)(uncompSize, 4),
117
- (0, index_js_1.toBytes)(fileNameBuf.length, 2),
118
- (0, index_js_1.toBytes)(0, 2),
119
- (0, index_js_1.toBytes)(0, 2),
120
- (0, index_js_1.toBytes)(0, 2),
121
- (0, index_js_1.toBytes)(0, 2),
122
- (0, index_js_1.toBytes)(0, 4),
123
- (0, index_js_1.toBytes)(offset, 4),
124
- fileNameBuf,
126
+ constants_js_1.CENTRAL_DIR_HEADER_SIG, // Central directory file header signature
127
+ (0, index_js_1.toBytes)(20, 2), // Version made by
128
+ (0, index_js_1.toBytes)(20, 2), // Version needed to extract
129
+ (0, index_js_1.toBytes)(0, 2), // General purpose bit flag
130
+ (0, index_js_1.toBytes)(8, 2), // Compression method
131
+ modTime, // File modification time and date
132
+ (0, index_js_1.toBytes)(crc, 4), // CRC-32 checksum
133
+ (0, index_js_1.toBytes)(compSize, 4), // Compressed size
134
+ (0, index_js_1.toBytes)(uncompSize, 4), // Uncompressed size
135
+ (0, index_js_1.toBytes)(fileNameBuf.length, 2), // Filename length
136
+ (0, index_js_1.toBytes)(0, 2), // Extra field length
137
+ (0, index_js_1.toBytes)(0, 2), // File comment length
138
+ (0, index_js_1.toBytes)(0, 2), // Disk number start
139
+ (0, index_js_1.toBytes)(0, 2), // Internal file attributes
140
+ (0, index_js_1.toBytes)(0, 4), // External file attributes
141
+ (0, index_js_1.toBytes)(offset, 4), // Offset of local header
142
+ fileNameBuf, // Filename
125
143
  ]);
144
+ // Store for later
126
145
  centralDirectory.push(centralEntry);
146
+ // Update offset after writing this entry
127
147
  offset += localHeader.length;
128
148
  }
149
+ // Total size of central directory
129
150
  const centralDirSize = centralDirectory.reduce((sum, entry) => sum + entry.length, 0);
151
+ // Start of central directory
130
152
  const centralDirOffset = offset;
153
+ // Write each central directory entry to output
131
154
  for (const entry of centralDirectory) {
132
155
  await new Promise((resolve, reject) => {
133
156
  output.write(entry, err => err ? reject(err) : resolve());
134
157
  });
135
158
  }
159
+ // Create and write end of central directory record
136
160
  const endRecord = Buffer.concat([
137
- constants_js_1.END_OF_CENTRAL_DIR_SIG,
138
- (0, index_js_1.toBytes)(0, 2),
139
- (0, index_js_1.toBytes)(0, 2),
140
- (0, index_js_1.toBytes)(centralDirectory.length, 2),
141
- (0, index_js_1.toBytes)(centralDirectory.length, 2),
142
- (0, index_js_1.toBytes)(centralDirSize, 4),
143
- (0, index_js_1.toBytes)(centralDirOffset, 4),
144
- (0, index_js_1.toBytes)(0, 2),
161
+ constants_js_1.END_OF_CENTRAL_DIR_SIG, // End of central directory signature
162
+ (0, index_js_1.toBytes)(0, 2), // Number of this disk
163
+ (0, index_js_1.toBytes)(0, 2), // Disk with start of central directory
164
+ (0, index_js_1.toBytes)(centralDirectory.length, 2), // Total entries on this disk
165
+ (0, index_js_1.toBytes)(centralDirectory.length, 2), // Total entries overall
166
+ (0, index_js_1.toBytes)(centralDirSize, 4), // Size of central directory
167
+ (0, index_js_1.toBytes)(centralDirOffset, 4), // Offset of start of central directory
168
+ (0, index_js_1.toBytes)(0, 2), // ZIP file comment length
145
169
  ]);
146
170
  await new Promise((resolve, reject) => {
147
171
  output.write(endRecord, err => err ? reject(err) : resolve());
@@ -24,6 +24,12 @@ function crc32(byte, crc = 0xffffffff) {
24
24
  function crc32Stream() {
25
25
  let crc = 0xffffffff;
26
26
  const transform = new node_stream_1.Transform({
27
+ final(callback) {
28
+ callback();
29
+ },
30
+ flush(callback) {
31
+ callback();
32
+ },
27
33
  transform(chunk, _encoding, callback) {
28
34
  for (let i = 0; i < chunk.length; i++) {
29
35
  crc = crc32(chunk[i], crc);
@@ -17,20 +17,32 @@ import { CENTRAL_DIR_HEADER_SIG, END_OF_CENTRAL_DIR_SIG, LOCAL_FILE_HEADER_SIG,
17
17
  * @throws {Error} - If the writable stream emits an error.
18
18
  */
19
19
  export async function createWithStream(fileKeys, destination, output) {
20
+ // Stores central directory records
20
21
  const centralDirectory = [];
22
+ // Tracks the current offset in the output stream
21
23
  let offset = 0;
22
24
  for (const filename of fileKeys.sort((a, b) => a.localeCompare(b))) {
25
+ // Prevent directory traversal
23
26
  if (filename.includes("..")) {
24
27
  throw new Error(`Invalid filename: ${filename}`);
25
28
  }
29
+ // Construct absolute path to the file
26
30
  const fullPath = path.join(destination, ...filename.split("/"));
31
+ // Convert filename to UTF-8 buffer
27
32
  const fileNameBuf = Buffer.from(filename, "utf8");
33
+ // Get modification time in DOS format
28
34
  const modTime = dosTime(new Date());
35
+ // Read file as stream
29
36
  const source = createReadStream(fullPath);
37
+ // Create CRC32 transform stream
30
38
  const crc32 = crc32Stream();
39
+ // Create raw deflate stream (no zlib headers)
31
40
  const deflater = zlib.createDeflateRaw();
41
+ // Uncompressed size counter
32
42
  let uncompSize = 0;
43
+ // Compressed size counter
33
44
  let compSize = 0;
45
+ // Store compressed output data
34
46
  const compressedChunks = [];
35
47
  const sizeCounter = new Transform({
36
48
  transform(chunk, _enc, cb) {
@@ -38,71 +50,83 @@ export async function createWithStream(fileKeys, destination, output) {
38
50
  cb(null, chunk);
39
51
  },
40
52
  });
41
- const collectCompressed = new Transform({
42
- transform(chunk, _enc, cb) {
43
- compressedChunks.push(chunk);
44
- compSize += chunk.length;
45
- cb(null, chunk);
46
- },
53
+ const collectCompressed = new PassThrough();
54
+ collectCompressed.on("data", chunk => {
55
+ // // Count compressed bytes
56
+ compSize += chunk.length;
57
+ // // Save compressed chunk
58
+ compressedChunks.push(chunk);
47
59
  });
48
- await pipeline(source, sizeCounter, crc32, deflater, collectCompressed, new PassThrough());
60
+ // Run all transforms in pipeline: read -> count size -> CRC -> deflate -> collect compressed
61
+ await pipeline(source, sizeCounter, crc32, deflater, collectCompressed);
62
+ // Get final CRC32 value
49
63
  const crc = crc32.digest();
64
+ // Concatenate all compressed chunks into a single buffer
50
65
  const compressed = Buffer.concat(compressedChunks);
66
+ // Create local file header followed by compressed content
51
67
  const localHeader = Buffer.concat([
52
- LOCAL_FILE_HEADER_SIG,
53
- toBytes(20, 2),
54
- toBytes(0, 2),
55
- toBytes(8, 2),
56
- modTime,
57
- toBytes(crc, 4),
58
- toBytes(compSize, 4),
59
- toBytes(uncompSize, 4),
60
- toBytes(fileNameBuf.length, 2),
61
- toBytes(0, 2),
62
- fileNameBuf,
63
- compressed,
68
+ LOCAL_FILE_HEADER_SIG, // Local file header signature
69
+ toBytes(20, 2), // Version needed to extract
70
+ toBytes(0, 2), // General purpose bit flag
71
+ toBytes(8, 2), // Compression method (deflate)
72
+ modTime, // File modification time and date
73
+ toBytes(crc, 4), // CRC-32 checksum
74
+ toBytes(compSize, 4), // Compressed size
75
+ toBytes(uncompSize, 4), // Uncompressed size
76
+ toBytes(fileNameBuf.length, 2), // Filename length
77
+ toBytes(0, 2), // Extra field length
78
+ fileNameBuf, // Filename
79
+ compressed, // Compressed file data
64
80
  ]);
81
+ // Write local file header and data to output
65
82
  await new Promise((resolve, reject) => {
66
83
  output.write(localHeader, err => err ? reject(err) : resolve());
67
84
  });
85
+ // Create central directory entry for this file
68
86
  const centralEntry = Buffer.concat([
69
- CENTRAL_DIR_HEADER_SIG,
70
- toBytes(20, 2),
71
- toBytes(20, 2),
72
- toBytes(0, 2),
73
- toBytes(8, 2),
74
- modTime,
75
- toBytes(crc, 4),
76
- toBytes(compSize, 4),
77
- toBytes(uncompSize, 4),
78
- toBytes(fileNameBuf.length, 2),
79
- toBytes(0, 2),
80
- toBytes(0, 2),
81
- toBytes(0, 2),
82
- toBytes(0, 2),
83
- toBytes(0, 4),
84
- toBytes(offset, 4),
85
- fileNameBuf,
87
+ CENTRAL_DIR_HEADER_SIG, // Central directory file header signature
88
+ toBytes(20, 2), // Version made by
89
+ toBytes(20, 2), // Version needed to extract
90
+ toBytes(0, 2), // General purpose bit flag
91
+ toBytes(8, 2), // Compression method
92
+ modTime, // File modification time and date
93
+ toBytes(crc, 4), // CRC-32 checksum
94
+ toBytes(compSize, 4), // Compressed size
95
+ toBytes(uncompSize, 4), // Uncompressed size
96
+ toBytes(fileNameBuf.length, 2), // Filename length
97
+ toBytes(0, 2), // Extra field length
98
+ toBytes(0, 2), // File comment length
99
+ toBytes(0, 2), // Disk number start
100
+ toBytes(0, 2), // Internal file attributes
101
+ toBytes(0, 4), // External file attributes
102
+ toBytes(offset, 4), // Offset of local header
103
+ fileNameBuf, // Filename
86
104
  ]);
105
+ // Store for later
87
106
  centralDirectory.push(centralEntry);
107
+ // Update offset after writing this entry
88
108
  offset += localHeader.length;
89
109
  }
110
+ // Total size of central directory
90
111
  const centralDirSize = centralDirectory.reduce((sum, entry) => sum + entry.length, 0);
112
+ // Start of central directory
91
113
  const centralDirOffset = offset;
114
+ // Write each central directory entry to output
92
115
  for (const entry of centralDirectory) {
93
116
  await new Promise((resolve, reject) => {
94
117
  output.write(entry, err => err ? reject(err) : resolve());
95
118
  });
96
119
  }
120
+ // Create and write end of central directory record
97
121
  const endRecord = Buffer.concat([
98
- END_OF_CENTRAL_DIR_SIG,
99
- toBytes(0, 2),
100
- toBytes(0, 2),
101
- toBytes(centralDirectory.length, 2),
102
- toBytes(centralDirectory.length, 2),
103
- toBytes(centralDirSize, 4),
104
- toBytes(centralDirOffset, 4),
105
- toBytes(0, 2),
122
+ END_OF_CENTRAL_DIR_SIG, // End of central directory signature
123
+ toBytes(0, 2), // Number of this disk
124
+ toBytes(0, 2), // Disk with start of central directory
125
+ toBytes(centralDirectory.length, 2), // Total entries on this disk
126
+ toBytes(centralDirectory.length, 2), // Total entries overall
127
+ toBytes(centralDirSize, 4), // Size of central directory
128
+ toBytes(centralDirOffset, 4), // Offset of start of central directory
129
+ toBytes(0, 2), // ZIP file comment length
106
130
  ]);
107
131
  await new Promise((resolve, reject) => {
108
132
  output.write(endRecord, err => err ? reject(err) : resolve());
@@ -21,6 +21,12 @@ function crc32(byte, crc = 0xffffffff) {
21
21
  export function crc32Stream() {
22
22
  let crc = 0xffffffff;
23
23
  const transform = new Transform({
24
+ final(callback) {
25
+ callback();
26
+ },
27
+ flush(callback) {
28
+ callback();
29
+ },
24
30
  transform(chunk, _encoding, callback) {
25
31
  for (let i = 0; i < chunk.length; i++) {
26
32
  crc = crc32(chunk[i], crc);
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@js-ak/excel-toolbox",
3
- "version": "1.3.0",
3
+ "version": "1.3.2",
4
4
  "description": "excel-toolbox",
5
5
  "publishConfig": {
6
6
  "access": "public",