@js-ak/excel-toolbox 1.2.6 → 1.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (87) hide show
  1. package/README.md +2 -2
  2. package/build/cjs/lib/index.js +1 -0
  3. package/build/cjs/lib/template/index.js +17 -0
  4. package/build/cjs/lib/template/template-fs.js +465 -0
  5. package/build/cjs/lib/template/utils/check-row.js +23 -0
  6. package/build/cjs/lib/template/utils/check-rows.js +19 -0
  7. package/build/cjs/lib/template/utils/check-start-row.js +18 -0
  8. package/build/cjs/lib/template/utils/column-index-to-letter.js +17 -0
  9. package/build/cjs/lib/template/utils/escape-xml.js +24 -0
  10. package/build/cjs/lib/template/utils/get-max-row-number.js +20 -0
  11. package/build/cjs/lib/template/utils/get-rows-above.js +23 -0
  12. package/build/cjs/lib/template/utils/get-rows-below.js +23 -0
  13. package/build/cjs/lib/template/utils/index.js +27 -0
  14. package/build/cjs/lib/template/utils/parse-rows.js +30 -0
  15. package/build/cjs/lib/template/utils/to-excel-column-object.js +29 -0
  16. package/build/cjs/lib/template/utils/write-rows-to-stream.js +41 -0
  17. package/build/cjs/lib/xml/build-merged-sheet.js +1 -1
  18. package/build/cjs/lib/zip/constants.js +16 -1
  19. package/build/cjs/lib/zip/create-sync.js +32 -32
  20. package/build/cjs/lib/zip/create-with-stream.js +150 -0
  21. package/build/cjs/lib/zip/create.js +32 -32
  22. package/build/cjs/lib/zip/index.js +1 -0
  23. package/build/cjs/lib/zip/read-sync.js +60 -12
  24. package/build/cjs/lib/zip/read.js +60 -12
  25. package/build/cjs/lib/zip/utils/crc-32-stream.js +36 -0
  26. package/build/cjs/lib/zip/utils/crc-32.js +43 -0
  27. package/build/cjs/lib/zip/utils/dos-time.js +50 -0
  28. package/build/cjs/lib/zip/utils/find-data-descriptor.js +29 -0
  29. package/build/cjs/lib/zip/utils/index.js +21 -0
  30. package/build/cjs/lib/zip/utils/to-bytes.js +37 -0
  31. package/build/esm/lib/index.js +1 -0
  32. package/build/esm/lib/template/index.js +1 -0
  33. package/build/esm/lib/template/template-fs.js +428 -0
  34. package/build/esm/lib/template/utils/check-row.js +20 -0
  35. package/build/esm/lib/template/utils/check-rows.js +16 -0
  36. package/build/esm/lib/template/utils/check-start-row.js +15 -0
  37. package/build/esm/lib/template/utils/column-index-to-letter.js +14 -0
  38. package/build/esm/lib/template/utils/escape-xml.js +21 -0
  39. package/build/esm/lib/template/utils/get-max-row-number.js +17 -0
  40. package/build/esm/lib/template/utils/get-rows-above.js +20 -0
  41. package/build/esm/lib/template/utils/get-rows-below.js +20 -0
  42. package/build/esm/lib/template/utils/index.js +11 -0
  43. package/build/esm/lib/template/utils/parse-rows.js +27 -0
  44. package/build/esm/lib/template/utils/to-excel-column-object.js +26 -0
  45. package/build/esm/lib/template/utils/write-rows-to-stream.js +38 -0
  46. package/build/esm/lib/xml/build-merged-sheet.js +1 -1
  47. package/build/esm/lib/zip/constants.js +15 -0
  48. package/build/esm/lib/zip/create-sync.js +1 -1
  49. package/build/esm/lib/zip/create-with-stream.js +111 -0
  50. package/build/esm/lib/zip/create.js +1 -1
  51. package/build/esm/lib/zip/index.js +1 -0
  52. package/build/esm/lib/zip/read-sync.js +27 -12
  53. package/build/esm/lib/zip/read.js +27 -12
  54. package/build/esm/lib/zip/utils/crc-32-stream.js +33 -0
  55. package/build/esm/lib/zip/utils/crc-32.js +40 -0
  56. package/build/esm/lib/zip/utils/dos-time.js +47 -0
  57. package/build/esm/lib/zip/utils/find-data-descriptor.js +26 -0
  58. package/build/esm/lib/zip/utils/index.js +5 -0
  59. package/build/esm/lib/zip/utils/to-bytes.js +34 -0
  60. package/build/types/lib/index.d.ts +1 -0
  61. package/build/types/lib/template/index.d.ts +1 -0
  62. package/build/types/lib/template/template-fs.d.ts +122 -0
  63. package/build/types/lib/template/utils/check-row.d.ts +14 -0
  64. package/build/types/lib/template/utils/check-rows.d.ts +11 -0
  65. package/build/types/lib/template/utils/check-start-row.d.ts +8 -0
  66. package/build/types/lib/template/utils/column-index-to-letter.d.ts +7 -0
  67. package/build/types/lib/template/utils/escape-xml.d.ts +14 -0
  68. package/build/types/lib/template/utils/get-max-row-number.d.ts +7 -0
  69. package/build/types/lib/template/utils/get-rows-above.d.ts +12 -0
  70. package/build/types/lib/template/utils/get-rows-below.d.ts +12 -0
  71. package/build/types/lib/template/utils/index.d.ts +11 -0
  72. package/build/types/lib/template/utils/parse-rows.d.ts +1 -0
  73. package/build/types/lib/template/utils/to-excel-column-object.d.ts +10 -0
  74. package/build/types/lib/template/utils/write-rows-to-stream.d.ts +25 -0
  75. package/build/types/lib/zip/constants.d.ts +9 -0
  76. package/build/types/lib/zip/create-with-stream.d.ts +13 -0
  77. package/build/types/lib/zip/index.d.ts +1 -0
  78. package/build/types/lib/zip/utils/crc-32-stream.d.ts +11 -0
  79. package/build/types/lib/zip/utils/crc-32.d.ts +15 -0
  80. package/build/types/lib/zip/utils/dos-time.d.ts +25 -0
  81. package/build/types/lib/zip/utils/find-data-descriptor.d.ts +15 -0
  82. package/build/types/lib/zip/utils/index.d.ts +5 -0
  83. package/build/types/lib/zip/utils/to-bytes.d.ts +20 -0
  84. package/package.json +1 -1
  85. package/build/cjs/lib/zip/utils.js +0 -157
  86. package/build/esm/lib/zip/utils.js +0 -152
  87. package/build/types/lib/zip/utils.d.ts +0 -58
@@ -0,0 +1,38 @@
1
+ import { columnIndexToLetter } from "./column-index-to-letter.js";
2
+ /**
3
+ * Writes an async iterable of rows to an Excel XML file.
4
+ *
5
+ * Each row is expected to be an array of values, where each value is
6
+ * converted to a string using the `String()` function. Empty values are
7
+ * replaced with an empty string.
8
+ *
9
+ * The `startRowNumber` parameter is used as the starting row number
10
+ * for the first row written to the file. Subsequent rows are written
11
+ * with incrementing row numbers.
12
+ *
13
+ * @param output - A file write stream to write the Excel XML to.
14
+ * @param rows - An async iterable of rows, where each row is an array
15
+ * of values.
16
+ * @param startRowNumber - The starting row number to use for the first
17
+ * row written to the file.
18
+ *
19
+ * @returns An object with a single property `rowNumber`, which is the
20
+ * last row number written to the file (i.e., the `startRowNumber`
21
+ * plus the number of rows written).
22
+ */
23
+ export async function writeRowsToStream(output, rows, startRowNumber) {
24
+ let rowNumber = startRowNumber;
25
+ for await (const row of rows) {
26
+ // Transform the row into XML
27
+ const cells = row.map((value, colIndex) => {
28
+ const colLetter = columnIndexToLetter(colIndex);
29
+ const cellRef = `${colLetter}${rowNumber}`;
30
+ const cellValue = String(value ?? "");
31
+ return `<c r="${cellRef}" t="inlineStr"><is><t>${cellValue}</t></is></c>`;
32
+ });
33
+ // Write the row to the file
34
+ output.write(`<row r="${rowNumber}">${cells.join("")}</row>`);
35
+ rowNumber++;
36
+ }
37
+ return { rowNumber };
38
+ }
@@ -22,7 +22,7 @@ export function buildMergedSheet(originalXml, mergedRows, mergeCells = []) {
22
22
  // Construct a new <mergeCells> section with the provided merge references
23
23
  const mergeCellsXml = `<mergeCells count="${mergeCells.length}">${mergeCells.map(mc => `<mergeCell ref="${mc.ref}"/>`).join("")}</mergeCells>`;
24
24
  // Insert <mergeCells> after </sheetData> and before the next XML tag
25
- xmlData = xmlData.replace(/(<\/sheetData>)(\s*<)/, `$1\n${mergeCellsXml}\n$2`);
25
+ xmlData = xmlData.replace(/(<\/sheetData>)(\s*<)/, `$1${mergeCellsXml}$2`);
26
26
  }
27
27
  return Buffer.from(xmlData);
28
28
  }
@@ -12,6 +12,21 @@ import { Buffer } from "node:buffer";
12
12
  * Found in the central directory that appears at the end of the ZIP file.
13
13
  */
14
14
  export const CENTRAL_DIR_HEADER_SIG = Buffer.from("504b0102", "hex");
15
+ /**
16
+ * Precomputed CRC-32 lookup table for optimized checksum calculation.
17
+ * The table is generated using the standard IEEE 802.3 (Ethernet) polynomial:
18
+ * 0xEDB88320 (reversed representation of 0x04C11DB7).
19
+ *
20
+ * The table is immediately invoked and cached as a constant for performance,
21
+ * following the common implementation pattern for CRC algorithms.
22
+ */
23
+ export const CRC32_TABLE = new Uint32Array(256).map((_, n) => {
24
+ let c = n;
25
+ for (let k = 0; k < 8; k++) {
26
+ c = c & 1 ? 0xEDB88320 ^ (c >>> 1) : c >>> 1;
27
+ }
28
+ return c >>> 0;
29
+ });
15
30
  /**
16
31
  * End of Central Directory Record signature (0x504b0506).
17
32
  * Marks the end of the central directory and contains global information
@@ -1,6 +1,6 @@
1
1
  import { Buffer } from "node:buffer";
2
2
  import { deflateRawSync } from "node:zlib";
3
- import { crc32, dosTime, toBytes } from "./utils.js";
3
+ import { crc32, dosTime, toBytes } from "./utils/index.js";
4
4
  import { CENTRAL_DIR_HEADER_SIG, END_OF_CENTRAL_DIR_SIG, LOCAL_FILE_HEADER_SIG, } from "./constants.js";
5
5
  /**
6
6
  * Creates a ZIP archive from a collection of files.
@@ -0,0 +1,111 @@
1
+ import * as path from "node:path";
2
+ import { PassThrough, Transform } from "node:stream";
3
+ import { createReadStream } from "node:fs";
4
+ import { pipeline } from "node:stream/promises";
5
+ import zlib from "node:zlib";
6
+ import { crc32Stream, dosTime, toBytes } from "./utils/index.js";
7
+ import { CENTRAL_DIR_HEADER_SIG, END_OF_CENTRAL_DIR_SIG, LOCAL_FILE_HEADER_SIG, } from "./constants.js";
8
+ /**
9
+ * Creates a ZIP archive from a collection of files, streaming the output to a provided writable stream.
10
+ *
11
+ * @param fileKeys - An array of file paths (relative to the destination) that will be used to create a new workbook.
12
+ * @param destination - The path where the template files are located.
13
+ * @param output - A Writable stream that the ZIP archive will be written to.
14
+ *
15
+ * @throws {Error} - If a file does not exist in the destination.
16
+ * @throws {Error} - If a file is not readable.
17
+ * @throws {Error} - If the writable stream emits an error.
18
+ */
19
+ export async function createWithStream(fileKeys, destination, output) {
20
+ const centralDirectory = [];
21
+ let offset = 0;
22
+ for (const filename of fileKeys.sort((a, b) => a.localeCompare(b))) {
23
+ if (filename.includes("..")) {
24
+ throw new Error(`Invalid filename: ${filename}`);
25
+ }
26
+ const fullPath = path.join(destination, ...filename.split("/"));
27
+ const fileNameBuf = Buffer.from(filename, "utf8");
28
+ const modTime = dosTime(new Date());
29
+ const source = createReadStream(fullPath);
30
+ const crc32 = crc32Stream();
31
+ const deflater = zlib.createDeflateRaw();
32
+ let uncompSize = 0;
33
+ let compSize = 0;
34
+ const compressedChunks = [];
35
+ const sizeCounter = new Transform({
36
+ transform(chunk, _enc, cb) {
37
+ uncompSize += chunk.length;
38
+ cb(null, chunk);
39
+ },
40
+ });
41
+ const collectCompressed = new Transform({
42
+ transform(chunk, _enc, cb) {
43
+ compressedChunks.push(chunk);
44
+ compSize += chunk.length;
45
+ cb(null, chunk);
46
+ },
47
+ });
48
+ await pipeline(source, sizeCounter, crc32, deflater, collectCompressed, new PassThrough());
49
+ const crc = crc32.digest();
50
+ const compressed = Buffer.concat(compressedChunks);
51
+ const localHeader = Buffer.concat([
52
+ LOCAL_FILE_HEADER_SIG,
53
+ toBytes(20, 2),
54
+ toBytes(0, 2),
55
+ toBytes(8, 2),
56
+ modTime,
57
+ toBytes(crc, 4),
58
+ toBytes(compSize, 4),
59
+ toBytes(uncompSize, 4),
60
+ toBytes(fileNameBuf.length, 2),
61
+ toBytes(0, 2),
62
+ fileNameBuf,
63
+ compressed,
64
+ ]);
65
+ await new Promise((resolve, reject) => {
66
+ output.write(localHeader, err => err ? reject(err) : resolve());
67
+ });
68
+ const centralEntry = Buffer.concat([
69
+ CENTRAL_DIR_HEADER_SIG,
70
+ toBytes(20, 2),
71
+ toBytes(20, 2),
72
+ toBytes(0, 2),
73
+ toBytes(8, 2),
74
+ modTime,
75
+ toBytes(crc, 4),
76
+ toBytes(compSize, 4),
77
+ toBytes(uncompSize, 4),
78
+ toBytes(fileNameBuf.length, 2),
79
+ toBytes(0, 2),
80
+ toBytes(0, 2),
81
+ toBytes(0, 2),
82
+ toBytes(0, 2),
83
+ toBytes(0, 4),
84
+ toBytes(offset, 4),
85
+ fileNameBuf,
86
+ ]);
87
+ centralDirectory.push(centralEntry);
88
+ offset += localHeader.length;
89
+ }
90
+ const centralDirSize = centralDirectory.reduce((sum, entry) => sum + entry.length, 0);
91
+ const centralDirOffset = offset;
92
+ for (const entry of centralDirectory) {
93
+ await new Promise((resolve, reject) => {
94
+ output.write(entry, err => err ? reject(err) : resolve());
95
+ });
96
+ }
97
+ const endRecord = Buffer.concat([
98
+ END_OF_CENTRAL_DIR_SIG,
99
+ toBytes(0, 2),
100
+ toBytes(0, 2),
101
+ toBytes(centralDirectory.length, 2),
102
+ toBytes(centralDirectory.length, 2),
103
+ toBytes(centralDirSize, 4),
104
+ toBytes(centralDirOffset, 4),
105
+ toBytes(0, 2),
106
+ ]);
107
+ await new Promise((resolve, reject) => {
108
+ output.write(endRecord, err => err ? reject(err) : resolve());
109
+ });
110
+ output.end();
111
+ }
@@ -2,7 +2,7 @@ import { Buffer } from "node:buffer";
2
2
  import util from "node:util";
3
3
  import zlib from "node:zlib";
4
4
  const deflateRaw = util.promisify(zlib.deflateRaw);
5
- import { crc32, dosTime, toBytes } from "./utils.js";
5
+ import { crc32, dosTime, toBytes } from "./utils/index.js";
6
6
  import { CENTRAL_DIR_HEADER_SIG, END_OF_CENTRAL_DIR_SIG, LOCAL_FILE_HEADER_SIG, } from "./constants.js";
7
7
  /**
8
8
  * Creates a ZIP archive from a collection of files.
@@ -1,4 +1,5 @@
1
1
  export * from "./create-sync.js";
2
+ export * from "./create-with-stream.js";
2
3
  export * from "./create.js";
3
4
  export * from "./read-sync.js";
4
5
  export * from "./read.js";
@@ -1,4 +1,5 @@
1
1
  import zlib from "node:zlib";
2
+ import * as Utils from "./utils/index.js";
2
3
  /**
3
4
  * Parses a ZIP archive from a buffer and extracts the files within.
4
5
  *
@@ -21,22 +22,37 @@ export function readSync(buffer) {
21
22
  const fileNameEnd = fileNameStart + fileNameLength;
22
23
  const fileName = buffer.subarray(fileNameStart, fileNameEnd).toString();
23
24
  const dataStart = fileNameEnd + extraFieldLength;
24
- const compressedSize = buffer.readUInt32LE(offset + 18);
25
25
  const useDataDescriptor = (generalPurposeBitFlag & 0x08) !== 0;
26
- if (useDataDescriptor) {
27
- throw new Error(`File ${fileName} uses data descriptor. Not supported in this minimal parser.`);
28
- }
29
- const compressedData = buffer.subarray(dataStart, dataStart + compressedSize);
26
+ let compressedData;
30
27
  let content;
31
28
  try {
32
- if (compressionMethod === 0) {
33
- content = compressedData;
34
- }
35
- else if (compressionMethod === 8) {
36
- content = zlib.inflateRawSync(compressedData);
29
+ if (useDataDescriptor) {
30
+ const { compressedSize, offset: ddOffset } = Utils.findDataDescriptor(buffer, dataStart);
31
+ compressedData = buffer.subarray(dataStart, dataStart + compressedSize);
32
+ if (compressionMethod === 0) {
33
+ content = compressedData;
34
+ }
35
+ else if (compressionMethod === 8) {
36
+ content = zlib.inflateRawSync(compressedData);
37
+ }
38
+ else {
39
+ throw new Error(`Unsupported compression method ${compressionMethod}`);
40
+ }
41
+ offset = ddOffset + 16; // Skip over data descriptor
37
42
  }
38
43
  else {
39
- throw new Error(`Unsupported compression method ${compressionMethod}`);
44
+ const compressedSize = buffer.readUInt32LE(offset + 18);
45
+ compressedData = buffer.subarray(dataStart, dataStart + compressedSize);
46
+ if (compressionMethod === 0) {
47
+ content = compressedData;
48
+ }
49
+ else if (compressionMethod === 8) {
50
+ content = zlib.inflateRawSync(compressedData);
51
+ }
52
+ else {
53
+ throw new Error(`Unsupported compression method ${compressionMethod}`);
54
+ }
55
+ offset = dataStart + compressedSize;
40
56
  }
41
57
  }
42
58
  catch (error) {
@@ -44,7 +60,6 @@ export function readSync(buffer) {
44
60
  throw new Error(`Error unpacking file ${fileName}: ${message}`);
45
61
  }
46
62
  files[fileName] = content;
47
- offset = dataStart + compressedSize;
48
63
  }
49
64
  return files;
50
65
  }
@@ -1,5 +1,6 @@
1
1
  import util from "node:util";
2
2
  import zlib from "node:zlib";
3
+ import * as Utils from "./utils/index.js";
3
4
  const inflateRaw = util.promisify(zlib.inflateRaw);
4
5
  /**
5
6
  * Parses a ZIP archive from a buffer and extracts the files within.
@@ -23,22 +24,37 @@ export async function read(buffer) {
23
24
  const fileNameEnd = fileNameStart + fileNameLength;
24
25
  const fileName = buffer.subarray(fileNameStart, fileNameEnd).toString();
25
26
  const dataStart = fileNameEnd + extraFieldLength;
26
- const compressedSize = buffer.readUInt32LE(offset + 18);
27
27
  const useDataDescriptor = (generalPurposeBitFlag & 0x08) !== 0;
28
- if (useDataDescriptor) {
29
- throw new Error(`File ${fileName} uses data descriptor. Not supported in this minimal parser.`);
30
- }
31
- const compressedData = buffer.subarray(dataStart, dataStart + compressedSize);
28
+ let compressedData;
32
29
  let content;
33
30
  try {
34
- if (compressionMethod === 0) {
35
- content = compressedData;
36
- }
37
- else if (compressionMethod === 8) {
38
- content = await inflateRaw(compressedData);
31
+ if (useDataDescriptor) {
32
+ const { compressedSize, offset: ddOffset } = Utils.findDataDescriptor(buffer, dataStart);
33
+ compressedData = buffer.subarray(dataStart, dataStart + compressedSize);
34
+ if (compressionMethod === 0) {
35
+ content = compressedData;
36
+ }
37
+ else if (compressionMethod === 8) {
38
+ content = await inflateRaw(compressedData);
39
+ }
40
+ else {
41
+ throw new Error(`Unsupported compression method ${compressionMethod}`);
42
+ }
43
+ offset = ddOffset + 16; // Skip over data descriptor
39
44
  }
40
45
  else {
41
- throw new Error(`Unsupported compression method ${compressionMethod}`);
46
+ const compressedSize = buffer.readUInt32LE(offset + 18);
47
+ compressedData = buffer.subarray(dataStart, dataStart + compressedSize);
48
+ if (compressionMethod === 0) {
49
+ content = compressedData;
50
+ }
51
+ else if (compressionMethod === 8) {
52
+ content = await inflateRaw(compressedData);
53
+ }
54
+ else {
55
+ throw new Error(`Unsupported compression method ${compressionMethod}`);
56
+ }
57
+ offset = dataStart + compressedSize;
42
58
  }
43
59
  }
44
60
  catch (error) {
@@ -46,7 +62,6 @@ export async function read(buffer) {
46
62
  throw new Error(`Error unpacking file ${fileName}: ${message}`);
47
63
  }
48
64
  files[fileName] = content;
49
- offset = dataStart + compressedSize;
50
65
  }
51
66
  return files;
52
67
  }
@@ -0,0 +1,33 @@
1
+ import { Transform } from "node:stream";
2
+ import { CRC32_TABLE } from "../constants.js";
3
+ /**
4
+ * Computes the CRC-32 checksum for the given byte, using the standard IEEE 802.3 polynomial.
5
+ * This is a low-level function that is used by the crc32Stream() function.
6
+ *
7
+ * @param {number} byte - The byte (0-255) to add to the checksum.
8
+ * @param {number} crc - The current checksum value to update.
9
+ * @returns {number} - The new checksum value.
10
+ */
11
+ function crc32(byte, crc = 0xffffffff) {
12
+ return CRC32_TABLE[(crc ^ byte) & 0xff] ^ (crc >>> 8);
13
+ }
14
+ /**
15
+ * Creates a Transform stream that computes the CRC-32 checksum of the input data.
16
+ *
17
+ * The `digest()` method can be called on the returned stream to get the final checksum value.
18
+ *
19
+ * @returns {Transform & { digest: () => number }} - The Transform stream.
20
+ */
21
+ export function crc32Stream() {
22
+ let crc = 0xffffffff;
23
+ const transform = new Transform({
24
+ transform(chunk, _encoding, callback) {
25
+ for (let i = 0; i < chunk.length; i++) {
26
+ crc = crc32(chunk[i], crc);
27
+ }
28
+ callback(null, chunk);
29
+ },
30
+ });
31
+ transform.digest = () => (crc ^ 0xffffffff) >>> 0;
32
+ return transform;
33
+ }
@@ -0,0 +1,40 @@
1
+ import { CRC32_TABLE } from "../constants.js";
2
+ /**
3
+ * Computes a CRC-32 checksum for the given Buffer using the standard IEEE 802.3 polynomial.
4
+ * This implementation uses a precomputed lookup table for optimal performance.
5
+ *
6
+ * The algorithm follows these characteristics:
7
+ * - Polynomial: 0xEDB88320 (reversed representation of 0x04C11DB7)
8
+ * - Initial value: 0xFFFFFFFF (inverted by ~0)
9
+ * - Final XOR value: 0xFFFFFFFF (achieved by inverting the result)
10
+ * - Input and output reflection: Yes
11
+ *
12
+ * @param {Buffer} buf - The input buffer to calculate checksum for
13
+ * @returns {number} - The 32-bit unsigned CRC-32 checksum (0x00000000 to 0xFFFFFFFF)
14
+ */
15
+ export function crc32(buf) {
16
+ // Initialize CRC with all 1's (0xFFFFFFFF) using bitwise NOT
17
+ let crc = ~0;
18
+ // Process each byte in the buffer
19
+ for (let i = 0; i < buf.length; i++) {
20
+ /*
21
+ * CRC update algorithm steps:
22
+ * 1. XOR current CRC with next byte (lowest 8 bits)
23
+ * 2. Use result as index in precomputed table (0-255)
24
+ * 3. XOR the table value with right-shifted CRC (8 bits)
25
+ *
26
+ * The operation breakdown:
27
+ * - (crc ^ buf[i]) - XOR with next byte
28
+ * - & 0xff - Isolate lowest 8 bits
29
+ * - crc >>> 8 - Shift CRC right by 8 bits (unsigned)
30
+ * - ^ crcTable[...] - XOR with precomputed table value
31
+ */
32
+ crc = (crc >>> 8) ^ CRC32_TABLE[(crc ^ buf[i]) & 0xff];
33
+ }
34
+ /*
35
+ * Final processing:
36
+ * 1. Invert all bits (~crc) to match standard CRC-32 output
37
+ * 2. Convert to unsigned 32-bit integer (>>> 0)
38
+ */
39
+ return ~crc >>> 0;
40
+ }
@@ -0,0 +1,47 @@
1
+ import { Buffer } from "node:buffer";
2
+ import { toBytes } from "./to-bytes.js";
3
+ /**
4
+ * Converts a JavaScript Date object to a 4-byte Buffer in MS-DOS date/time format
5
+ * as specified in the ZIP file format specification (PKZIP APPNOTE.TXT).
6
+ *
7
+ * The MS-DOS date/time format packs both date and time into 4 bytes (32 bits) with
8
+ * the following bit layout:
9
+ *
10
+ * Time portion (2 bytes/16 bits):
11
+ * - Bits 00-04: Seconds divided by 2 (0-29, representing 0-58 seconds)
12
+ * - Bits 05-10: Minutes (0-59)
13
+ * - Bits 11-15: Hours (0-23)
14
+ *
15
+ * Date portion (2 bytes/16 bits):
16
+ * - Bits 00-04: Day (1-31)
17
+ * - Bits 05-08: Month (1-12)
18
+ * - Bits 09-15: Year offset from 1980 (0-127, representing 1980-2107)
19
+ *
20
+ * @param {Date} date - The JavaScript Date object to convert
21
+ * @returns {Buffer} - 4-byte Buffer containing:
22
+ * - Bytes 0-1: DOS time (hours, minutes, seconds/2)
23
+ * - Bytes 2-3: DOS date (year-1980, month, day)
24
+ * @throws {RangeError} - If the date is before 1980 or after 2107
25
+ */
26
+ export function dosTime(date) {
27
+ // Pack time components into 2 bytes (16 bits):
28
+ // - Hours (5 bits) shifted left 11 positions (bits 11-15)
29
+ // - Minutes (6 bits) shifted left 5 positions (bits 5-10)
30
+ // - Seconds/2 (5 bits) in least significant bits (bits 0-4)
31
+ const time = (date.getHours() << 11) | // Hours occupy bits 11-15
32
+ (date.getMinutes() << 5) | // Minutes occupy bits 5-10
33
+ (Math.floor(date.getSeconds() / 2)); // Seconds/2 occupy bits 0-4
34
+ // Pack date components into 2 bytes (16 bits):
35
+ // - (Year-1980) (7 bits) shifted left 9 positions (bits 9-15)
36
+ // - Month (4 bits) shifted left 5 positions (bits 5-8)
37
+ // - Day (5 bits) in least significant bits (bits 0-4)
38
+ const day = ((date.getFullYear() - 1980) << 9) | // Years since 1980 (bits 9-15)
39
+ ((date.getMonth() + 1) << 5) | // Month 1-12 (bits 5-8)
40
+ date.getDate(); // Day 1-31 (bits 0-4)
41
+ // Combine both 2-byte values into a single 4-byte Buffer
42
+ // Note: Using little-endian byte order for each 2-byte segment
43
+ return Buffer.from([
44
+ ...toBytes(time, 2), // Convert time to 2 bytes (LSB first)
45
+ ...toBytes(day, 2), // Convert date to 2 bytes (LSB first)
46
+ ]);
47
+ }
@@ -0,0 +1,26 @@
1
+ /**
2
+ * Finds a Data Descriptor in a ZIP archive buffer.
3
+ *
4
+ * The Data Descriptor is an optional 16-byte structure that appears at the end of a file's compressed data.
5
+ * It contains the compressed size of the file, and must be used when the Local File Header does not contain this information.
6
+ *
7
+ * @param buffer - The buffer containing the ZIP archive data.
8
+ * @param start - The starting offset in the buffer to search for the Data Descriptor.
9
+ * @returns - An object with `offset` and `compressedSize` properties.
10
+ * @throws {Error} - If the Data Descriptor is not found.
11
+ */
12
+ export function findDataDescriptor(buffer, start) {
13
+ const DATA_DESCRIPTOR_SIGNATURE = 0x08074b50;
14
+ const DATA_DESCRIPTOR_TOTAL_LENGTH = 16;
15
+ const COMPRESSED_SIZE_OFFSET_FROM_SIGNATURE = 8;
16
+ for (let i = start; i <= buffer.length - DATA_DESCRIPTOR_TOTAL_LENGTH; i++) {
17
+ if (buffer.readUInt32LE(i) === DATA_DESCRIPTOR_SIGNATURE) {
18
+ const compressedSize = buffer.readUInt32LE(i + COMPRESSED_SIZE_OFFSET_FROM_SIGNATURE);
19
+ return {
20
+ compressedSize,
21
+ offset: i,
22
+ };
23
+ }
24
+ }
25
+ throw new Error("Data Descriptor not found");
26
+ }
@@ -0,0 +1,5 @@
1
+ export * from "./crc-32-stream.js";
2
+ export * from "./crc-32.js";
3
+ export * from "./dos-time.js";
4
+ export * from "./find-data-descriptor.js";
5
+ export * from "./to-bytes.js";
@@ -0,0 +1,34 @@
1
+ import { Buffer } from "node:buffer";
2
+ /**
3
+ * Converts a numeric value into a fixed-length Buffer representation,
4
+ * storing the value in little-endian format with right-padding of zeros.
5
+ *
6
+ * This is particularly useful for binary protocols or file formats that
7
+ * require fixed-width numeric fields.
8
+ *
9
+ * @param {number} value - The numeric value to convert to bytes.
10
+ * Note: JavaScript numbers are IEEE 754 doubles, but only the
11
+ * integer portion will be used (up to 53-bit precision).
12
+ * @param {number} len - The desired length of the output Buffer in bytes.
13
+ * Must be a positive integer.
14
+ * @returns {Buffer} - A new Buffer of exactly `len` bytes containing:
15
+ * 1. The value's bytes in little-endian order (least significant byte first)
16
+ * 2. Zero padding in any remaining higher-order bytes
17
+ * @throws {RangeError} - If the value requires more bytes than `len` to represent
18
+ * (though this is currently not explicitly checked)
19
+ */
20
+ export function toBytes(value, len) {
21
+ // Allocate a new Buffer of the requested length, automatically zero-filled
22
+ const buf = Buffer.alloc(len);
23
+ // Process each byte position from least significant to most significant
24
+ for (let i = 0; i < len; i++) {
25
+ // Store the least significant byte of the current value
26
+ buf[i] = value & 0xff; // Mask to get bottom 8 bits
27
+ // Right-shift the value by 8 bits to process the next byte
28
+ // Note: This uses unsigned right shift (>>> would be signed)
29
+ value >>= 8;
30
+ // If the loop completes with value != 0, we've overflowed the buffer length,
31
+ // but this isn't currently checked/handled
32
+ }
33
+ return buf;
34
+ }
@@ -1,2 +1,3 @@
1
1
  export * from "./merge-sheets-to-base-file-sync.js";
2
2
  export * from "./merge-sheets-to-base-file.js";
3
+ export * from "./template/index.js";
@@ -0,0 +1 @@
1
+ export * from "./template-fs.js";
@@ -0,0 +1,122 @@
1
+ import { Writable } from "node:stream";
2
+ /**
3
+ * A class for manipulating Excel templates by extracting, modifying, and repacking Excel files.
4
+ *
5
+ * @experimental This API is experimental and might change in future versions.
6
+ */
7
+ export declare class TemplateFs {
8
+ #private;
9
+ /**
10
+ * Set of file paths (relative to the template) that will be used to create a new workbook.
11
+ * @type {Set<string>}
12
+ */
13
+ fileKeys: Set<string>;
14
+ /**
15
+ * The path where the template will be expanded.
16
+ * @type {string}
17
+ */
18
+ destination: string;
19
+ /**
20
+ * Flag indicating whether this template instance has been destroyed.
21
+ * @type {boolean}
22
+ */
23
+ destroyed: boolean;
24
+ /**
25
+ * Creates a Template instance.
26
+ *
27
+ * @param {Set<string>} fileKeys - Set of file paths (relative to the template) that will be used to create a new workbook.
28
+ * @param {string} destination - The path where the template will be expanded.
29
+ * @experimental This API is experimental and might change in future versions.
30
+ */
31
+ constructor(fileKeys: Set<string>, destination: string);
32
+ /**
33
+ * Inserts rows into a specific sheet in the template.
34
+ *
35
+ * @param {Object} data - The data for row insertion.
36
+ * @param {string} data.sheetName - The name of the sheet to insert rows into.
37
+ * @param {number} [data.startRowNumber] - The row number to start inserting from.
38
+ * @param {unknown[][]} data.rows - The rows to insert.
39
+ * @returns {Promise<void>}
40
+ * @throws {Error} If the template instance has been destroyed.
41
+ * @throws {Error} If the sheet does not exist.
42
+ * @throws {Error} If the row number is out of range.
43
+ * @throws {Error} If a column is out of range.
44
+ * @experimental This API is experimental and might change in future versions.
45
+ */
46
+ insertRows(data: {
47
+ sheetName: string;
48
+ startRowNumber?: number;
49
+ rows: unknown[][];
50
+ }): Promise<void>;
51
+ /**
52
+ * Inserts rows into a specific sheet in the template using an async stream.
53
+ *
54
+ * @param {Object} data - The data for row insertion.
55
+ * @param {string} data.sheetName - The name of the sheet to insert rows into.
56
+ * @param {number} [data.startRowNumber] - The row number to start inserting from.
57
+ * @param {AsyncIterable<unknown[]>} data.rows - Async iterable of rows to insert.
58
+ * @returns {Promise<void>}
59
+ * @throws {Error} If the template instance has been destroyed.
60
+ * @throws {Error} If the sheet does not exist.
61
+ * @throws {Error} If the row number is out of range.
62
+ * @throws {Error} If a column is out of range.
63
+ * @experimental This API is experimental and might change in future versions.
64
+ */
65
+ insertRowsStream(data: {
66
+ sheetName: string;
67
+ startRowNumber?: number;
68
+ rows: AsyncIterable<unknown[]>;
69
+ }): Promise<void>;
70
+ /**
71
+ * Saves the modified Excel template to a buffer.
72
+ *
73
+ * @returns {Promise<Buffer>} The modified Excel template as a buffer.
74
+ * @throws {Error} If the template instance has been destroyed.
75
+ * @experimental This API is experimental and might change in future versions.
76
+ */
77
+ save(): Promise<Buffer>;
78
+ /**
79
+ * Writes the modified Excel template to a writable stream.
80
+ *
81
+ * @param {Writable} output - The writable stream to write to.
82
+ * @returns {Promise<void>}
83
+ * @throws {Error} If the template instance has been destroyed.
84
+ * @experimental This API is experimental and might change in future versions.
85
+ */
86
+ saveStream(output: Writable): Promise<void>;
87
+ /**
88
+ * Replaces the contents of a file in the template.
89
+ *
90
+ * @param {string} key - The Excel path of the file to replace.
91
+ * @param {Buffer|string} content - The new content.
92
+ * @returns {Promise<void>}
93
+ * @throws {Error} If the template instance has been destroyed.
94
+ * @throws {Error} If the file does not exist in the template.
95
+ * @experimental This API is experimental and might change in future versions.
96
+ */
97
+ set(key: string, content: Buffer | string): Promise<void>;
98
+ /**
99
+ * Validates the template by checking all required files exist.
100
+ *
101
+ * @returns {Promise<void>}
102
+ * @throws {Error} If the template instance has been destroyed.
103
+ * @throws {Error} If any required files are missing.
104
+ * @experimental This API is experimental and might change in future versions.
105
+ */
106
+ validate(): Promise<void>;
107
+ /**
108
+ * Creates a Template instance from an Excel file source.
109
+ * Removes any existing files in the destination directory.
110
+ *
111
+ * @param {Object} data - The data to create the template from.
112
+ * @param {string} data.source - The path or buffer of the Excel file.
113
+ * @param {string} data.destination - The path to save the template to.
114
+ * @returns {Promise<Template>} A new Template instance.
115
+ * @throws {Error} If reading or writing files fails.
116
+ * @experimental This API is experimental and might change in future versions.
117
+ */
118
+ static from(data: {
119
+ destination: string;
120
+ source: string | Buffer;
121
+ }): Promise<TemplateFs>;
122
+ }