@js-ak/excel-toolbox 1.3.1 → 1.3.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -56,20 +56,32 @@ const constants_js_1 = require("./constants.js");
56
56
  * @throws {Error} - If the writable stream emits an error.
57
57
  */
58
58
  async function createWithStream(fileKeys, destination, output) {
59
+ // Stores central directory records
59
60
  const centralDirectory = [];
61
+ // Tracks the current offset in the output stream
60
62
  let offset = 0;
61
63
  for (const filename of fileKeys.sort((a, b) => a.localeCompare(b))) {
64
+ // Prevent directory traversal
62
65
  if (filename.includes("..")) {
63
66
  throw new Error(`Invalid filename: ${filename}`);
64
67
  }
68
+ // Construct absolute path to the file
65
69
  const fullPath = path.join(destination, ...filename.split("/"));
70
+ // Convert filename to UTF-8 buffer
66
71
  const fileNameBuf = Buffer.from(filename, "utf8");
72
+ // Get modification time in DOS format
67
73
  const modTime = (0, index_js_1.dosTime)(new Date());
74
+ // Read file as stream
68
75
  const source = (0, node_fs_1.createReadStream)(fullPath);
76
+ // Create CRC32 transform stream
69
77
  const crc32 = (0, index_js_1.crc32Stream)();
78
+ // Create raw deflate stream (no zlib headers)
70
79
  const deflater = node_zlib_1.default.createDeflateRaw();
80
+ // Uncompressed size counter
71
81
  let uncompSize = 0;
82
+ // Compressed size counter
72
83
  let compSize = 0;
84
+ // Store compressed output data
73
85
  const compressedChunks = [];
74
86
  const sizeCounter = new node_stream_1.Transform({
75
87
  transform(chunk, _enc, cb) {
@@ -77,135 +89,83 @@ async function createWithStream(fileKeys, destination, output) {
77
89
  cb(null, chunk);
78
90
  },
79
91
  });
80
- const collectCompressed = new node_stream_1.Transform({
81
- transform(chunk, _enc, cb) {
82
- compressedChunks.push(chunk);
83
- compSize += chunk.length;
84
- cb(null, chunk);
85
- },
92
+ const collectCompressed = new node_stream_1.PassThrough();
93
+ collectCompressed.on("data", chunk => {
94
+ // // Count compressed bytes
95
+ compSize += chunk.length;
96
+ // // Save compressed chunk
97
+ compressedChunks.push(chunk);
86
98
  });
87
- // deflater.on("data", (chunk) => { console.log("deflater data path:", fullPath, "length:", chunk.length); });
88
- // deflater.on("finish", () => { console.log("deflater finished path:", fullPath, "uncompSize:", uncompSize, "compSize:", compSize); });
89
- // deflater.on("error", (err) => { console.log("deflater error path:", fullPath, "error:", err); });
90
- // deflater.on("close", () => { console.log("deflater closed path:", fullPath); });
91
- // deflater.on("pipe", (src) => { console.log("deflater pipe path:", fullPath); });
92
- // deflater.on("unpipe", (src) => { console.log("deflater unpipe path:", fullPath); });
93
- // deflater.on("drain", () => { console.log("deflater drain path:", fullPath); });
94
- // deflater.on("pause", () => { console.log("deflater pause path:", fullPath); });
95
- // deflater.on("resume", () => { console.log("deflater resume path:", fullPath); });
96
- // deflater.on("end", () => console.log("deflater ended, path:", fullPath));
97
- // source.on("data", (chunk) => { console.log("source data path:", fullPath, "length:", chunk.length); });
98
- // source.on("finish", () => { console.log("source finished path:", fullPath, "uncompSize:", uncompSize, "compSize:", compSize); });
99
- // source.on("error", (err) => { console.log("source error path:", fullPath, "error:", err); });
100
- // source.on("close", () => { console.log("source closed path:", fullPath); });
101
- // source.on("pipe", (src) => { console.log("source pipe path:", fullPath); });
102
- // source.on("unpipe", (src) => { console.log("source unpipe path:", fullPath); });
103
- // source.on("drain", () => { console.log("source drain path:", fullPath); });
104
- // source.on("pause", () => { console.log("source pause path:", fullPath); });
105
- // source.on("resume", () => { console.log("source resume path:", fullPath); });
106
- // source.on("end", () => console.log("source ended, path:", fullPath));
107
- // sizeCounter.on("data", (chunk) => { console.log("sizeCounter data path:", fullPath, "length:", chunk.length); });
108
- // sizeCounter.on("finish", () => { console.log("sizeCounter finished path:", fullPath, "uncompSize:", uncompSize, "compSize:", compSize); });
109
- // sizeCounter.on("error", (err) => { console.log("sizeCounter error path:", fullPath, "error:", err); });
110
- // sizeCounter.on("close", () => { console.log("sizeCounter closed path:", fullPath); });
111
- // sizeCounter.on("pipe", (src) => { console.log("sizeCounter pipe path:", fullPath); });
112
- // sizeCounter.on("unpipe", (src) => { console.log("sizeCounter unpipe path:", fullPath); });
113
- // sizeCounter.on("drain", () => { console.log("sizeCounter drain path:", fullPath); });
114
- // sizeCounter.on("pause", () => { console.log("sizeCounter pause path:", fullPath); });
115
- // sizeCounter.on("resume", () => { console.log("sizeCounter resume path:", fullPath); });
116
- // sizeCounter.on("end", () => console.log("sizeCounter ended, path:", fullPath));
117
- // crc32.on("data", (chunk) => { console.log("crc32 data path:", fullPath, "length:", chunk.length); });
118
- // crc32.on("finish", () => { console.log("crc32 finished path:", fullPath, "uncompSize:", uncompSize, "compSize:", compSize); });
119
- // crc32.on("error", (err) => { console.log("crc32 error path:", fullPath, "error:", err); });
120
- // crc32.on("close", () => { console.log("crc32 closed path:", fullPath); });
121
- // crc32.on("pipe", (src) => { console.log("crc32 pipe path:", fullPath); });
122
- // crc32.on("unpipe", (src) => { console.log("crc32 unpipe path:", fullPath); });
123
- // crc32.on("drain", () => { console.log("crc32 drain path:", fullPath); });
124
- // crc32.on("pause", () => { console.log("crc32 pause path:", fullPath); });
125
- // crc32.on("resume", () => { console.log("crc32 resume path:", fullPath); });
126
- // crc32.on("end", () => console.log("crc32 ended, path:", fullPath));
127
- collectCompressed.on("data", ( /* chunk */) => { });
128
- // collectCompressed.on("finish", () => { console.log("collectCompressed finished path:", fullPath, "uncompSize:", uncompSize, "compSize:", compSize); });
129
- // collectCompressed.on("error", (err) => { console.log("collectCompressed error path:", fullPath, "error:", err); });
130
- // collectCompressed.on("close", () => { console.log("collectCompressed closed path:", fullPath); });
131
- // collectCompressed.on("pipe", (src) => { console.log("collectCompressed pipe path:", fullPath); });
132
- // collectCompressed.on("unpipe", (src) => { console.log("collectCompressed unpipe path:", fullPath); });
133
- // collectCompressed.on("drain", () => { console.log("collectCompressed drain path:", fullPath); });
134
- // collectCompressed.on("pause", () => { console.log("collectCompressed pause path:", fullPath); });
135
- // collectCompressed.on("resume", () => { console.log("collectCompressed resume path:", fullPath); });
136
- // collectCompressed.on("end", () => console.log("collectCompressed ended, path:", fullPath));
137
- // deflater.on("readable", () => {
138
- // console.log("deflater readable path:", fullPath);
139
- // });
99
+ // Run all transforms in pipeline: read -> count size -> CRC -> deflate -> collect compressed
140
100
  await (0, promises_1.pipeline)(source, sizeCounter, crc32, deflater, collectCompressed);
141
- // await new Promise<void>((resolve, reject) => {
142
- // source
143
- // .pipe(sizeCounter)
144
- // .pipe(crc32)
145
- // .pipe(deflater)
146
- // .pipe(collectCompressed)
147
- // .on("finish", resolve)
148
- // .on("error", reject);
149
- // source.on("error", reject);
150
- // deflater.on("error", reject);
151
- // });
101
+ // Get final CRC32 value
152
102
  const crc = crc32.digest();
103
+ // Concatenate all compressed chunks into a single buffer
153
104
  const compressed = Buffer.concat(compressedChunks);
105
+ // Create local file header followed by compressed content
154
106
  const localHeader = Buffer.concat([
155
- constants_js_1.LOCAL_FILE_HEADER_SIG,
156
- (0, index_js_1.toBytes)(20, 2),
157
- (0, index_js_1.toBytes)(0, 2),
158
- (0, index_js_1.toBytes)(8, 2),
159
- modTime,
160
- (0, index_js_1.toBytes)(crc, 4),
161
- (0, index_js_1.toBytes)(compSize, 4),
162
- (0, index_js_1.toBytes)(uncompSize, 4),
163
- (0, index_js_1.toBytes)(fileNameBuf.length, 2),
164
- (0, index_js_1.toBytes)(0, 2),
165
- fileNameBuf,
166
- compressed,
107
+ constants_js_1.LOCAL_FILE_HEADER_SIG, // Local file header signature
108
+ (0, index_js_1.toBytes)(20, 2), // Version needed to extract
109
+ (0, index_js_1.toBytes)(0, 2), // General purpose bit flag
110
+ (0, index_js_1.toBytes)(8, 2), // Compression method (deflate)
111
+ modTime, // File modification time and date
112
+ (0, index_js_1.toBytes)(crc, 4), // CRC-32 checksum
113
+ (0, index_js_1.toBytes)(compSize, 4), // Compressed size
114
+ (0, index_js_1.toBytes)(uncompSize, 4), // Uncompressed size
115
+ (0, index_js_1.toBytes)(fileNameBuf.length, 2), // Filename length
116
+ (0, index_js_1.toBytes)(0, 2), // Extra field length
117
+ fileNameBuf, // Filename
118
+ compressed, // Compressed file data
167
119
  ]);
120
+ // Write local file header and data to output
168
121
  await new Promise((resolve, reject) => {
169
122
  output.write(localHeader, err => err ? reject(err) : resolve());
170
123
  });
124
+ // Create central directory entry for this file
171
125
  const centralEntry = Buffer.concat([
172
- constants_js_1.CENTRAL_DIR_HEADER_SIG,
173
- (0, index_js_1.toBytes)(20, 2),
174
- (0, index_js_1.toBytes)(20, 2),
175
- (0, index_js_1.toBytes)(0, 2),
176
- (0, index_js_1.toBytes)(8, 2),
177
- modTime,
178
- (0, index_js_1.toBytes)(crc, 4),
179
- (0, index_js_1.toBytes)(compSize, 4),
180
- (0, index_js_1.toBytes)(uncompSize, 4),
181
- (0, index_js_1.toBytes)(fileNameBuf.length, 2),
182
- (0, index_js_1.toBytes)(0, 2),
183
- (0, index_js_1.toBytes)(0, 2),
184
- (0, index_js_1.toBytes)(0, 2),
185
- (0, index_js_1.toBytes)(0, 2),
186
- (0, index_js_1.toBytes)(0, 4),
187
- (0, index_js_1.toBytes)(offset, 4),
188
- fileNameBuf,
126
+ constants_js_1.CENTRAL_DIR_HEADER_SIG, // Central directory file header signature
127
+ (0, index_js_1.toBytes)(20, 2), // Version made by
128
+ (0, index_js_1.toBytes)(20, 2), // Version needed to extract
129
+ (0, index_js_1.toBytes)(0, 2), // General purpose bit flag
130
+ (0, index_js_1.toBytes)(8, 2), // Compression method
131
+ modTime, // File modification time and date
132
+ (0, index_js_1.toBytes)(crc, 4), // CRC-32 checksum
133
+ (0, index_js_1.toBytes)(compSize, 4), // Compressed size
134
+ (0, index_js_1.toBytes)(uncompSize, 4), // Uncompressed size
135
+ (0, index_js_1.toBytes)(fileNameBuf.length, 2), // Filename length
136
+ (0, index_js_1.toBytes)(0, 2), // Extra field length
137
+ (0, index_js_1.toBytes)(0, 2), // File comment length
138
+ (0, index_js_1.toBytes)(0, 2), // Disk number start
139
+ (0, index_js_1.toBytes)(0, 2), // Internal file attributes
140
+ (0, index_js_1.toBytes)(0, 4), // External file attributes
141
+ (0, index_js_1.toBytes)(offset, 4), // Offset of local header
142
+ fileNameBuf, // Filename
189
143
  ]);
144
+ // Store for later
190
145
  centralDirectory.push(centralEntry);
146
+ // Update offset after writing this entry
191
147
  offset += localHeader.length;
192
148
  }
149
+ // Total size of central directory
193
150
  const centralDirSize = centralDirectory.reduce((sum, entry) => sum + entry.length, 0);
151
+ // Start of central directory
194
152
  const centralDirOffset = offset;
153
+ // Write each central directory entry to output
195
154
  for (const entry of centralDirectory) {
196
155
  await new Promise((resolve, reject) => {
197
156
  output.write(entry, err => err ? reject(err) : resolve());
198
157
  });
199
158
  }
159
+ // Create and write end of central directory record
200
160
  const endRecord = Buffer.concat([
201
- constants_js_1.END_OF_CENTRAL_DIR_SIG,
202
- (0, index_js_1.toBytes)(0, 2),
203
- (0, index_js_1.toBytes)(0, 2),
204
- (0, index_js_1.toBytes)(centralDirectory.length, 2),
205
- (0, index_js_1.toBytes)(centralDirectory.length, 2),
206
- (0, index_js_1.toBytes)(centralDirSize, 4),
207
- (0, index_js_1.toBytes)(centralDirOffset, 4),
208
- (0, index_js_1.toBytes)(0, 2),
161
+ constants_js_1.END_OF_CENTRAL_DIR_SIG, // End of central directory signature
162
+ (0, index_js_1.toBytes)(0, 2), // Number of this disk
163
+ (0, index_js_1.toBytes)(0, 2), // Disk with start of central directory
164
+ (0, index_js_1.toBytes)(centralDirectory.length, 2), // Total entries on this disk
165
+ (0, index_js_1.toBytes)(centralDirectory.length, 2), // Total entries overall
166
+ (0, index_js_1.toBytes)(centralDirSize, 4), // Size of central directory
167
+ (0, index_js_1.toBytes)(centralDirOffset, 4), // Offset of start of central directory
168
+ (0, index_js_1.toBytes)(0, 2), // ZIP file comment length
209
169
  ]);
210
170
  await new Promise((resolve, reject) => {
211
171
  output.write(endRecord, err => err ? reject(err) : resolve());
@@ -1,5 +1,5 @@
1
1
  import * as path from "node:path";
2
- import { Transform } from "node:stream";
2
+ import { PassThrough, Transform } from "node:stream";
3
3
  import { createReadStream } from "node:fs";
4
4
  import { pipeline } from "node:stream/promises";
5
5
  import zlib from "node:zlib";
@@ -17,20 +17,32 @@ import { CENTRAL_DIR_HEADER_SIG, END_OF_CENTRAL_DIR_SIG, LOCAL_FILE_HEADER_SIG,
17
17
  * @throws {Error} - If the writable stream emits an error.
18
18
  */
19
19
  export async function createWithStream(fileKeys, destination, output) {
20
+ // Stores central directory records
20
21
  const centralDirectory = [];
22
+ // Tracks the current offset in the output stream
21
23
  let offset = 0;
22
24
  for (const filename of fileKeys.sort((a, b) => a.localeCompare(b))) {
25
+ // Prevent directory traversal
23
26
  if (filename.includes("..")) {
24
27
  throw new Error(`Invalid filename: ${filename}`);
25
28
  }
29
+ // Construct absolute path to the file
26
30
  const fullPath = path.join(destination, ...filename.split("/"));
31
+ // Convert filename to UTF-8 buffer
27
32
  const fileNameBuf = Buffer.from(filename, "utf8");
33
+ // Get modification time in DOS format
28
34
  const modTime = dosTime(new Date());
35
+ // Read file as stream
29
36
  const source = createReadStream(fullPath);
37
+ // Create CRC32 transform stream
30
38
  const crc32 = crc32Stream();
39
+ // Create raw deflate stream (no zlib headers)
31
40
  const deflater = zlib.createDeflateRaw();
41
+ // Uncompressed size counter
32
42
  let uncompSize = 0;
43
+ // Compressed size counter
33
44
  let compSize = 0;
45
+ // Store compressed output data
34
46
  const compressedChunks = [];
35
47
  const sizeCounter = new Transform({
36
48
  transform(chunk, _enc, cb) {
@@ -38,135 +50,83 @@ export async function createWithStream(fileKeys, destination, output) {
38
50
  cb(null, chunk);
39
51
  },
40
52
  });
41
- const collectCompressed = new Transform({
42
- transform(chunk, _enc, cb) {
43
- compressedChunks.push(chunk);
44
- compSize += chunk.length;
45
- cb(null, chunk);
46
- },
53
+ const collectCompressed = new PassThrough();
54
+ collectCompressed.on("data", chunk => {
55
+ // // Count compressed bytes
56
+ compSize += chunk.length;
57
+ // // Save compressed chunk
58
+ compressedChunks.push(chunk);
47
59
  });
48
- // deflater.on("data", (chunk) => { console.log("deflater data path:", fullPath, "length:", chunk.length); });
49
- // deflater.on("finish", () => { console.log("deflater finished path:", fullPath, "uncompSize:", uncompSize, "compSize:", compSize); });
50
- // deflater.on("error", (err) => { console.log("deflater error path:", fullPath, "error:", err); });
51
- // deflater.on("close", () => { console.log("deflater closed path:", fullPath); });
52
- // deflater.on("pipe", (src) => { console.log("deflater pipe path:", fullPath); });
53
- // deflater.on("unpipe", (src) => { console.log("deflater unpipe path:", fullPath); });
54
- // deflater.on("drain", () => { console.log("deflater drain path:", fullPath); });
55
- // deflater.on("pause", () => { console.log("deflater pause path:", fullPath); });
56
- // deflater.on("resume", () => { console.log("deflater resume path:", fullPath); });
57
- // deflater.on("end", () => console.log("deflater ended, path:", fullPath));
58
- // source.on("data", (chunk) => { console.log("source data path:", fullPath, "length:", chunk.length); });
59
- // source.on("finish", () => { console.log("source finished path:", fullPath, "uncompSize:", uncompSize, "compSize:", compSize); });
60
- // source.on("error", (err) => { console.log("source error path:", fullPath, "error:", err); });
61
- // source.on("close", () => { console.log("source closed path:", fullPath); });
62
- // source.on("pipe", (src) => { console.log("source pipe path:", fullPath); });
63
- // source.on("unpipe", (src) => { console.log("source unpipe path:", fullPath); });
64
- // source.on("drain", () => { console.log("source drain path:", fullPath); });
65
- // source.on("pause", () => { console.log("source pause path:", fullPath); });
66
- // source.on("resume", () => { console.log("source resume path:", fullPath); });
67
- // source.on("end", () => console.log("source ended, path:", fullPath));
68
- // sizeCounter.on("data", (chunk) => { console.log("sizeCounter data path:", fullPath, "length:", chunk.length); });
69
- // sizeCounter.on("finish", () => { console.log("sizeCounter finished path:", fullPath, "uncompSize:", uncompSize, "compSize:", compSize); });
70
- // sizeCounter.on("error", (err) => { console.log("sizeCounter error path:", fullPath, "error:", err); });
71
- // sizeCounter.on("close", () => { console.log("sizeCounter closed path:", fullPath); });
72
- // sizeCounter.on("pipe", (src) => { console.log("sizeCounter pipe path:", fullPath); });
73
- // sizeCounter.on("unpipe", (src) => { console.log("sizeCounter unpipe path:", fullPath); });
74
- // sizeCounter.on("drain", () => { console.log("sizeCounter drain path:", fullPath); });
75
- // sizeCounter.on("pause", () => { console.log("sizeCounter pause path:", fullPath); });
76
- // sizeCounter.on("resume", () => { console.log("sizeCounter resume path:", fullPath); });
77
- // sizeCounter.on("end", () => console.log("sizeCounter ended, path:", fullPath));
78
- // crc32.on("data", (chunk) => { console.log("crc32 data path:", fullPath, "length:", chunk.length); });
79
- // crc32.on("finish", () => { console.log("crc32 finished path:", fullPath, "uncompSize:", uncompSize, "compSize:", compSize); });
80
- // crc32.on("error", (err) => { console.log("crc32 error path:", fullPath, "error:", err); });
81
- // crc32.on("close", () => { console.log("crc32 closed path:", fullPath); });
82
- // crc32.on("pipe", (src) => { console.log("crc32 pipe path:", fullPath); });
83
- // crc32.on("unpipe", (src) => { console.log("crc32 unpipe path:", fullPath); });
84
- // crc32.on("drain", () => { console.log("crc32 drain path:", fullPath); });
85
- // crc32.on("pause", () => { console.log("crc32 pause path:", fullPath); });
86
- // crc32.on("resume", () => { console.log("crc32 resume path:", fullPath); });
87
- // crc32.on("end", () => console.log("crc32 ended, path:", fullPath));
88
- collectCompressed.on("data", ( /* chunk */) => { });
89
- // collectCompressed.on("finish", () => { console.log("collectCompressed finished path:", fullPath, "uncompSize:", uncompSize, "compSize:", compSize); });
90
- // collectCompressed.on("error", (err) => { console.log("collectCompressed error path:", fullPath, "error:", err); });
91
- // collectCompressed.on("close", () => { console.log("collectCompressed closed path:", fullPath); });
92
- // collectCompressed.on("pipe", (src) => { console.log("collectCompressed pipe path:", fullPath); });
93
- // collectCompressed.on("unpipe", (src) => { console.log("collectCompressed unpipe path:", fullPath); });
94
- // collectCompressed.on("drain", () => { console.log("collectCompressed drain path:", fullPath); });
95
- // collectCompressed.on("pause", () => { console.log("collectCompressed pause path:", fullPath); });
96
- // collectCompressed.on("resume", () => { console.log("collectCompressed resume path:", fullPath); });
97
- // collectCompressed.on("end", () => console.log("collectCompressed ended, path:", fullPath));
98
- // deflater.on("readable", () => {
99
- // console.log("deflater readable path:", fullPath);
100
- // });
60
+ // Run all transforms in pipeline: read -> count size -> CRC -> deflate -> collect compressed
101
61
  await pipeline(source, sizeCounter, crc32, deflater, collectCompressed);
102
- // await new Promise<void>((resolve, reject) => {
103
- // source
104
- // .pipe(sizeCounter)
105
- // .pipe(crc32)
106
- // .pipe(deflater)
107
- // .pipe(collectCompressed)
108
- // .on("finish", resolve)
109
- // .on("error", reject);
110
- // source.on("error", reject);
111
- // deflater.on("error", reject);
112
- // });
62
+ // Get final CRC32 value
113
63
  const crc = crc32.digest();
64
+ // Concatenate all compressed chunks into a single buffer
114
65
  const compressed = Buffer.concat(compressedChunks);
66
+ // Create local file header followed by compressed content
115
67
  const localHeader = Buffer.concat([
116
- LOCAL_FILE_HEADER_SIG,
117
- toBytes(20, 2),
118
- toBytes(0, 2),
119
- toBytes(8, 2),
120
- modTime,
121
- toBytes(crc, 4),
122
- toBytes(compSize, 4),
123
- toBytes(uncompSize, 4),
124
- toBytes(fileNameBuf.length, 2),
125
- toBytes(0, 2),
126
- fileNameBuf,
127
- compressed,
68
+ LOCAL_FILE_HEADER_SIG, // Local file header signature
69
+ toBytes(20, 2), // Version needed to extract
70
+ toBytes(0, 2), // General purpose bit flag
71
+ toBytes(8, 2), // Compression method (deflate)
72
+ modTime, // File modification time and date
73
+ toBytes(crc, 4), // CRC-32 checksum
74
+ toBytes(compSize, 4), // Compressed size
75
+ toBytes(uncompSize, 4), // Uncompressed size
76
+ toBytes(fileNameBuf.length, 2), // Filename length
77
+ toBytes(0, 2), // Extra field length
78
+ fileNameBuf, // Filename
79
+ compressed, // Compressed file data
128
80
  ]);
81
+ // Write local file header and data to output
129
82
  await new Promise((resolve, reject) => {
130
83
  output.write(localHeader, err => err ? reject(err) : resolve());
131
84
  });
85
+ // Create central directory entry for this file
132
86
  const centralEntry = Buffer.concat([
133
- CENTRAL_DIR_HEADER_SIG,
134
- toBytes(20, 2),
135
- toBytes(20, 2),
136
- toBytes(0, 2),
137
- toBytes(8, 2),
138
- modTime,
139
- toBytes(crc, 4),
140
- toBytes(compSize, 4),
141
- toBytes(uncompSize, 4),
142
- toBytes(fileNameBuf.length, 2),
143
- toBytes(0, 2),
144
- toBytes(0, 2),
145
- toBytes(0, 2),
146
- toBytes(0, 2),
147
- toBytes(0, 4),
148
- toBytes(offset, 4),
149
- fileNameBuf,
87
+ CENTRAL_DIR_HEADER_SIG, // Central directory file header signature
88
+ toBytes(20, 2), // Version made by
89
+ toBytes(20, 2), // Version needed to extract
90
+ toBytes(0, 2), // General purpose bit flag
91
+ toBytes(8, 2), // Compression method
92
+ modTime, // File modification time and date
93
+ toBytes(crc, 4), // CRC-32 checksum
94
+ toBytes(compSize, 4), // Compressed size
95
+ toBytes(uncompSize, 4), // Uncompressed size
96
+ toBytes(fileNameBuf.length, 2), // Filename length
97
+ toBytes(0, 2), // Extra field length
98
+ toBytes(0, 2), // File comment length
99
+ toBytes(0, 2), // Disk number start
100
+ toBytes(0, 2), // Internal file attributes
101
+ toBytes(0, 4), // External file attributes
102
+ toBytes(offset, 4), // Offset of local header
103
+ fileNameBuf, // Filename
150
104
  ]);
105
+ // Store for later
151
106
  centralDirectory.push(centralEntry);
107
+ // Update offset after writing this entry
152
108
  offset += localHeader.length;
153
109
  }
110
+ // Total size of central directory
154
111
  const centralDirSize = centralDirectory.reduce((sum, entry) => sum + entry.length, 0);
112
+ // Start of central directory
155
113
  const centralDirOffset = offset;
114
+ // Write each central directory entry to output
156
115
  for (const entry of centralDirectory) {
157
116
  await new Promise((resolve, reject) => {
158
117
  output.write(entry, err => err ? reject(err) : resolve());
159
118
  });
160
119
  }
120
+ // Create and write end of central directory record
161
121
  const endRecord = Buffer.concat([
162
- END_OF_CENTRAL_DIR_SIG,
163
- toBytes(0, 2),
164
- toBytes(0, 2),
165
- toBytes(centralDirectory.length, 2),
166
- toBytes(centralDirectory.length, 2),
167
- toBytes(centralDirSize, 4),
168
- toBytes(centralDirOffset, 4),
169
- toBytes(0, 2),
122
+ END_OF_CENTRAL_DIR_SIG, // End of central directory signature
123
+ toBytes(0, 2), // Number of this disk
124
+ toBytes(0, 2), // Disk with start of central directory
125
+ toBytes(centralDirectory.length, 2), // Total entries on this disk
126
+ toBytes(centralDirectory.length, 2), // Total entries overall
127
+ toBytes(centralDirSize, 4), // Size of central directory
128
+ toBytes(centralDirOffset, 4), // Offset of start of central directory
129
+ toBytes(0, 2), // ZIP file comment length
170
130
  ]);
171
131
  await new Promise((resolve, reject) => {
172
132
  output.write(endRecord, err => err ? reject(err) : resolve());
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@js-ak/excel-toolbox",
3
- "version": "1.3.1",
3
+ "version": "1.3.2",
4
4
  "description": "excel-toolbox",
5
5
  "publishConfig": {
6
6
  "access": "public",