@js-ak/excel-toolbox 1.1.0 → 1.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +100 -0
- package/build/cjs/index.js +17 -0
- package/build/cjs/lib/index.js +18 -0
- package/build/cjs/lib/merge-sheets-to-base-file-process.js +96 -0
- package/build/cjs/lib/merge-sheets-to-base-file-sync.js +68 -0
- package/build/cjs/lib/merge-sheets-to-base-file.js +68 -0
- package/build/cjs/lib/utils/get-max-row-number.js +23 -0
- package/build/cjs/lib/utils/index.js +23 -0
- package/build/cjs/lib/utils/is-same-buffer.js +13 -0
- package/build/cjs/lib/utils/remove-sheet-by-name.js +45 -0
- package/build/cjs/lib/utils/remove-sheet-from-content-types.js +13 -0
- package/build/cjs/lib/utils/remove-sheet-from-rels.js +13 -0
- package/build/cjs/lib/utils/remove-sheet-from-workbook.js +13 -0
- package/build/cjs/lib/utils/shift-cell-ref.js +26 -0
- package/build/cjs/lib/xml/build-merged-sheet.js +32 -0
- package/build/cjs/lib/xml/extract-rows-from-sheet.js +65 -0
- package/build/cjs/lib/xml/extract-xml-from-sheet.js +49 -0
- package/build/cjs/lib/xml/extract-xml-from-system-content.js +53 -0
- package/build/cjs/lib/xml/index.js +21 -0
- package/build/cjs/lib/xml/shift-row-indices.js +36 -0
- package/build/cjs/lib/zip/constants.js +32 -0
- package/build/cjs/lib/zip/create-sync.js +84 -0
- package/build/cjs/lib/zip/create.js +89 -0
- package/build/cjs/lib/zip/index.js +20 -0
- package/build/cjs/lib/zip/read-sync.js +57 -0
- package/build/cjs/lib/zip/read.js +62 -0
- package/build/cjs/lib/zip/utils.js +158 -0
- package/build/cjs/test/index.js +10 -0
- package/build/esm/lib/index.js +2 -0
- package/build/esm/lib/merge-sheets-to-base-file-process.js +69 -0
- package/build/esm/lib/merge-sheets-to-base-file-sync.js +41 -0
- package/build/esm/lib/merge-sheets-to-base-file.js +41 -0
- package/build/esm/lib/utils/get-max-row-number.js +19 -0
- package/build/esm/lib/utils/index.js +7 -0
- package/build/esm/lib/utils/is-same-buffer.js +9 -0
- package/build/esm/lib/utils/remove-sheet-by-name.js +41 -0
- package/build/esm/lib/utils/remove-sheet-from-content-types.js +9 -0
- package/build/esm/lib/utils/remove-sheet-from-rels.js +9 -0
- package/build/esm/lib/utils/remove-sheet-from-workbook.js +9 -0
- package/build/esm/lib/utils/shift-cell-ref.js +22 -0
- package/build/esm/lib/xml/build-merged-sheet.js +28 -0
- package/build/{lib → esm/lib}/xml/extract-rows-from-sheet.js +1 -1
- package/build/esm/lib/xml/index.js +5 -0
- package/build/{lib/zip/create.js → esm/lib/zip/create-sync.js} +1 -1
- package/build/esm/lib/zip/create.js +82 -0
- package/build/esm/lib/zip/index.js +4 -0
- package/build/{lib/zip/read.js → esm/lib/zip/read-sync.js} +1 -1
- package/build/esm/lib/zip/read.js +55 -0
- package/build/{lib → esm/lib}/zip/utils.js +1 -1
- package/build/esm/test/index.js +5 -0
- package/build/types/index.d.ts +1 -0
- package/build/types/lib/index.d.ts +2 -0
- package/build/types/lib/merge-sheets-to-base-file-process.d.ts +27 -0
- package/build/{lib/merge-sheets-to-base-file.d.ts → types/lib/merge-sheets-to-base-file-sync.d.ts} +5 -5
- package/build/types/lib/merge-sheets-to-base-file.d.ts +29 -0
- package/build/types/lib/utils/get-max-row-number.d.ts +6 -0
- package/build/types/lib/utils/index.d.ts +7 -0
- package/build/types/lib/utils/is-same-buffer.d.ts +9 -0
- package/build/types/lib/utils/remove-sheet-by-name.d.ts +7 -0
- package/build/types/lib/utils/remove-sheet-from-content-types.d.ts +7 -0
- package/build/types/lib/utils/remove-sheet-from-rels.d.ts +7 -0
- package/build/types/lib/utils/remove-sheet-from-workbook.d.ts +7 -0
- package/build/types/lib/utils/shift-cell-ref.d.ts +13 -0
- package/build/{lib → types/lib}/xml/build-merged-sheet.d.ts +7 -8
- package/build/{lib → types/lib}/xml/extract-rows-from-sheet.d.ts +0 -1
- package/build/{lib → types/lib}/xml/extract-xml-from-sheet.d.ts +0 -1
- package/build/{lib → types/lib}/xml/extract-xml-from-system-content.d.ts +0 -1
- package/build/types/lib/xml/index.d.ts +5 -0
- package/build/{lib → types/lib}/xml/shift-row-indices.d.ts +0 -1
- package/build/{lib → types/lib}/zip/constants.d.ts +0 -1
- package/build/types/lib/zip/create-sync.d.ts +12 -0
- package/build/{lib → types/lib}/zip/create.d.ts +1 -2
- package/build/types/lib/zip/index.d.ts +4 -0
- package/build/types/lib/zip/read-sync.d.ts +10 -0
- package/build/{lib → types/lib}/zip/read.d.ts +1 -4
- package/build/{lib → types/lib}/zip/utils.d.ts +1 -2
- package/build/types/test/index.d.ts +1 -0
- package/package.json +15 -8
- package/build/index.d.ts +0 -2
- package/build/lib/index.d.ts +0 -2
- package/build/lib/index.d.ts.map +0 -1
- package/build/lib/index.js +0 -1
- package/build/lib/merge-sheets-to-base-file.d.ts.map +0 -1
- package/build/lib/merge-sheets-to-base-file.js +0 -190
- package/build/lib/xml/build-merged-sheet.d.ts.map +0 -1
- package/build/lib/xml/build-merged-sheet.js +0 -32
- package/build/lib/xml/extract-rows-from-sheet.d.ts.map +0 -1
- package/build/lib/xml/extract-xml-from-sheet.d.ts.map +0 -1
- package/build/lib/xml/extract-xml-from-system-content.d.ts.map +0 -1
- package/build/lib/xml/shift-row-indices.d.ts.map +0 -1
- package/build/lib/zip/constants.d.ts.map +0 -1
- package/build/lib/zip/create.d.ts.map +0 -1
- package/build/lib/zip/index.d.ts +0 -3
- package/build/lib/zip/index.d.ts.map +0 -1
- package/build/lib/zip/index.js +0 -2
- package/build/lib/zip/read.d.ts.map +0 -1
- package/build/lib/zip/utils.d.ts.map +0 -1
- /package/build/{index.js → esm/index.js} +0 -0
- /package/build/{lib → esm/lib}/xml/extract-xml-from-sheet.js +0 -0
- /package/build/{lib → esm/lib}/xml/extract-xml-from-system-content.js +0 -0
- /package/build/{lib → esm/lib}/xml/shift-row-indices.js +0 -0
- /package/build/{lib → esm/lib}/zip/constants.js +0 -0
@@ -0,0 +1,49 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.extractXmlFromSheet = void 0;
|
4
|
+
const pako_1 = require("pako");
|
5
|
+
/**
|
6
|
+
* Extracts and parses XML content from an Excel worksheet file (e.g., xl/worksheets/sheet1.xml).
|
7
|
+
* Handles both compressed (raw deflate) and uncompressed (plain XML) formats.
|
8
|
+
*
|
9
|
+
* This function is designed to work with Excel Open XML (.xlsx) worksheet files,
|
10
|
+
* which may be stored in either compressed or uncompressed format within the ZIP container.
|
11
|
+
*
|
12
|
+
* @param {Buffer} buffer - The file content to process, which may be:
|
13
|
+
* - Raw XML text
|
14
|
+
* - Deflate-compressed XML data (without zlib headers)
|
15
|
+
* @returns {string} - The extracted XML content as a UTF-8 string
|
16
|
+
* @throws {Error} - If the buffer is empty or cannot be processed
|
17
|
+
*/
|
18
|
+
function extractXmlFromSheet(buffer) {
|
19
|
+
if (!buffer || buffer.length === 0) {
|
20
|
+
throw new Error("Empty buffer provided");
|
21
|
+
}
|
22
|
+
let xml;
|
23
|
+
// Check if the buffer starts with an XML declaration (<?xml)
|
24
|
+
const startsWithXml = buffer.subarray(0, 5).toString("utf8").trim().startsWith("<?xml");
|
25
|
+
if (startsWithXml) {
|
26
|
+
// Case 1: Already uncompressed XML - convert directly to string
|
27
|
+
xml = buffer.toString("utf8");
|
28
|
+
}
|
29
|
+
else {
|
30
|
+
// Case 2: Attempt to decompress as raw deflate data
|
31
|
+
const inflated = (0, pako_1.inflateRaw)(buffer, { to: "string" });
|
32
|
+
// Validate the decompressed content contains worksheet data
|
33
|
+
if (inflated && inflated.includes("<sheetData")) {
|
34
|
+
xml = inflated;
|
35
|
+
}
|
36
|
+
else {
|
37
|
+
throw new Error("Decompressed data does not contain sheetData");
|
38
|
+
}
|
39
|
+
}
|
40
|
+
// Fallback: If no XML obtained yet, try direct UTF-8 conversion
|
41
|
+
if (!xml) {
|
42
|
+
xml = buffer.toString("utf8");
|
43
|
+
}
|
44
|
+
// Sanitize XML by removing control characters (except tab, newline, carriage return)
|
45
|
+
// This handles potential corruption from binary data or encoding issues
|
46
|
+
xml = xml.replace(/[\x00-\x08\x0B\x0C\x0E-\x1F]/g, "");
|
47
|
+
return xml;
|
48
|
+
}
|
49
|
+
exports.extractXmlFromSheet = extractXmlFromSheet;
|
@@ -0,0 +1,53 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.extractXmlFromSystemContent = void 0;
|
4
|
+
const pako_1 = require("pako");
|
5
|
+
/**
|
6
|
+
* Extracts and decompresses XML content from Excel system files (e.g., workbook.xml, [Content_Types].xml).
|
7
|
+
* Handles both compressed (raw DEFLATE) and uncompressed (plain XML) formats with comprehensive error handling.
|
8
|
+
*
|
9
|
+
* @param {Buffer} buffer - The file content to process, which may be:
|
10
|
+
* - Raw XML text
|
11
|
+
* - DEFLATE-compressed XML data (without zlib headers)
|
12
|
+
* @param {string} name - The filename being processed (for error reporting)
|
13
|
+
* @returns {string} - The extracted XML content as a sanitized UTF-8 string
|
14
|
+
* @throws {Error} - With descriptive messages for various failure scenarios:
|
15
|
+
* - Empty buffer
|
16
|
+
* - Decompression failures
|
17
|
+
* - Invalid XML content
|
18
|
+
*/
|
19
|
+
const extractXmlFromSystemContent = (buffer, name) => {
|
20
|
+
// Validate input buffer
|
21
|
+
if (!buffer || buffer.length === 0) {
|
22
|
+
throw new Error(`Empty data buffer provided for file ${name}`);
|
23
|
+
}
|
24
|
+
let xml;
|
25
|
+
// Check for XML declaration in first 5 bytes (<?xml)
|
26
|
+
const startsWithXml = buffer.subarray(0, 5).toString("utf8").trim().startsWith("<?xml");
|
27
|
+
if (startsWithXml) {
|
28
|
+
// Case 1: Already uncompressed XML - convert directly to string
|
29
|
+
xml = buffer.toString("utf8");
|
30
|
+
}
|
31
|
+
else {
|
32
|
+
// Case 2: Attempt DEFLATE decompression
|
33
|
+
try {
|
34
|
+
const inflated = (0, pako_1.inflateRaw)(buffer, { to: "string" });
|
35
|
+
// Validate decompressed content contains XML declaration
|
36
|
+
if (inflated && inflated.includes("<?xml")) {
|
37
|
+
xml = inflated;
|
38
|
+
}
|
39
|
+
else {
|
40
|
+
throw new Error(`Decompressed data doesn't contain valid XML in ${name}`);
|
41
|
+
}
|
42
|
+
}
|
43
|
+
catch (error) {
|
44
|
+
const message = error instanceof Error ? error.message : "Unknown error";
|
45
|
+
throw new Error(`Failed to decompress ${name}: ${message}`);
|
46
|
+
}
|
47
|
+
}
|
48
|
+
// Sanitize XML by removing illegal control characters (per XML 1.0 spec)
|
49
|
+
// Preserves tabs (0x09), newlines (0x0A), and carriage returns (0x0D)
|
50
|
+
xml = xml.replace(/[\x00-\x08\x0B\x0C\x0E-\x1F]/g, "");
|
51
|
+
return xml;
|
52
|
+
};
|
53
|
+
exports.extractXmlFromSystemContent = extractXmlFromSystemContent;
|
@@ -0,0 +1,21 @@
|
|
1
|
+
"use strict";
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
3
|
+
if (k2 === undefined) k2 = k;
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
7
|
+
}
|
8
|
+
Object.defineProperty(o, k2, desc);
|
9
|
+
}) : (function(o, m, k, k2) {
|
10
|
+
if (k2 === undefined) k2 = k;
|
11
|
+
o[k2] = m[k];
|
12
|
+
}));
|
13
|
+
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
14
|
+
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
15
|
+
};
|
16
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
17
|
+
__exportStar(require("./build-merged-sheet.js"), exports);
|
18
|
+
__exportStar(require("./extract-rows-from-sheet.js"), exports);
|
19
|
+
__exportStar(require("./extract-xml-from-sheet.js"), exports);
|
20
|
+
__exportStar(require("./extract-xml-from-system-content.js"), exports);
|
21
|
+
__exportStar(require("./shift-row-indices.js"), exports);
|
@@ -0,0 +1,36 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.shiftRowIndices = void 0;
|
4
|
+
/**
|
5
|
+
* Adjusts row indices in Excel XML row elements by a specified offset.
|
6
|
+
* Handles both row element attributes and cell references within rows.
|
7
|
+
*
|
8
|
+
* This function is particularly useful when merging sheets or rearranging
|
9
|
+
* worksheet content while maintaining proper Excel XML structure.
|
10
|
+
*
|
11
|
+
* @param {string[]} rows - Array of XML <row> elements as strings
|
12
|
+
* @param {number} offset - Numeric value to adjust row indices by:
|
13
|
+
* - Positive values shift rows down
|
14
|
+
* - Negative values shift rows up
|
15
|
+
* @returns {string[]} - New array with modified row elements containing updated indices
|
16
|
+
*
|
17
|
+
* @example
|
18
|
+
* // Shifts rows down by 2 positions
|
19
|
+
* shiftRowIndices([`<row r="1"><c r="A1"/></row>`], 2);
|
20
|
+
* // Returns: [`<row r="3"><c r="A3"/></row>`]
|
21
|
+
*/
|
22
|
+
function shiftRowIndices(rows, offset) {
|
23
|
+
return rows.map(row => {
|
24
|
+
// Process each row element through two replacement phases:
|
25
|
+
// 1. Update the row's own index (r="N" attribute)
|
26
|
+
let adjustedRow = row.replace(/(<row[^>]*\br=")(\d+)(")/, (_, prefix, rowIndex, suffix) => {
|
27
|
+
return `${prefix}${parseInt(rowIndex) + offset}${suffix}`;
|
28
|
+
});
|
29
|
+
// 2. Update all cell references within the row (r="AN" attributes)
|
30
|
+
adjustedRow = adjustedRow.replace(/(<c[^>]*\br=")([A-Z]+)(\d+)(")/g, (_, prefix, columnLetter, cellRowIndex, suffix) => {
|
31
|
+
return `${prefix}${columnLetter}${parseInt(cellRowIndex) + offset}${suffix}`;
|
32
|
+
});
|
33
|
+
return adjustedRow;
|
34
|
+
});
|
35
|
+
}
|
36
|
+
exports.shiftRowIndices = shiftRowIndices;
|
@@ -0,0 +1,32 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.LOCAL_FILE_HEADER_SIG = exports.END_OF_CENTRAL_DIR_SIG = exports.CENTRAL_DIR_HEADER_SIG = void 0;
|
4
|
+
const node_buffer_1 = require("node:buffer");
|
5
|
+
/**
|
6
|
+
* ZIP file signature constants in Buffer format.
|
7
|
+
* These magic numbers identify different sections of a ZIP file,
|
8
|
+
* as specified in PKWARE's APPNOTE.TXT (ZIP File Format Specification).
|
9
|
+
*/
|
10
|
+
/**
|
11
|
+
* Central Directory Header signature (0x504b0102).
|
12
|
+
* Marks an entry in the central directory, which contains metadata
|
13
|
+
* about all files in the archive.
|
14
|
+
* Format: 'PK\01\02'
|
15
|
+
* Found in the central directory that appears at the end of the ZIP file.
|
16
|
+
*/
|
17
|
+
exports.CENTRAL_DIR_HEADER_SIG = node_buffer_1.Buffer.from("504b0102", "hex");
|
18
|
+
/**
|
19
|
+
* End of Central Directory Record signature (0x504b0506).
|
20
|
+
* Marks the end of the central directory and contains global information
|
21
|
+
* about the ZIP archive.
|
22
|
+
* Format: 'PK\05\06'
|
23
|
+
* This is the last record in a valid ZIP file.
|
24
|
+
*/
|
25
|
+
exports.END_OF_CENTRAL_DIR_SIG = node_buffer_1.Buffer.from("504b0506", "hex");
|
26
|
+
/**
|
27
|
+
* Local File Header signature (0x504b0304).
|
28
|
+
* Marks the beginning of a file entry within the ZIP archive.
|
29
|
+
* Format: 'PK\03\04' (ASCII letters PK followed by version numbers)
|
30
|
+
* Appears before each file's compressed data.
|
31
|
+
*/
|
32
|
+
exports.LOCAL_FILE_HEADER_SIG = node_buffer_1.Buffer.from("504b0304", "hex");
|
@@ -0,0 +1,84 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.createSync = void 0;
|
4
|
+
const node_buffer_1 = require("node:buffer");
|
5
|
+
const node_zlib_1 = require("node:zlib");
|
6
|
+
const utils_js_1 = require("./utils.js");
|
7
|
+
const constants_js_1 = require("./constants.js");
|
8
|
+
/**
|
9
|
+
* Creates a ZIP archive from a collection of files.
|
10
|
+
*
|
11
|
+
* @param {Object.<string, Buffer|string>} files - An object with file paths as keys and either Buffer or string content as values.
|
12
|
+
* @returns {Buffer} - The ZIP archive as a Buffer.
|
13
|
+
*/
|
14
|
+
function createSync(files) {
|
15
|
+
const fileEntries = [];
|
16
|
+
const centralDirectory = [];
|
17
|
+
let offset = 0;
|
18
|
+
for (const [filename, rawContent] of Object.entries(files).sort(([a], [b]) => a.localeCompare(b))) {
|
19
|
+
if (filename.includes("..")) {
|
20
|
+
throw new Error(`Invalid filename: ${filename}`);
|
21
|
+
}
|
22
|
+
const content = node_buffer_1.Buffer.isBuffer(rawContent) ? rawContent : node_buffer_1.Buffer.from(rawContent);
|
23
|
+
const fileNameBuf = node_buffer_1.Buffer.from(filename, "utf8");
|
24
|
+
const modTime = (0, utils_js_1.dosTime)(new Date());
|
25
|
+
const crc = (0, utils_js_1.crc32)(content);
|
26
|
+
const compressed = (0, node_zlib_1.deflateRawSync)(content);
|
27
|
+
const compSize = compressed.length;
|
28
|
+
const uncompSize = content.length;
|
29
|
+
// Local file header
|
30
|
+
const localHeader = node_buffer_1.Buffer.concat([
|
31
|
+
constants_js_1.LOCAL_FILE_HEADER_SIG,
|
32
|
+
(0, utils_js_1.toBytes)(20, 2),
|
33
|
+
(0, utils_js_1.toBytes)(0, 2),
|
34
|
+
(0, utils_js_1.toBytes)(8, 2),
|
35
|
+
modTime,
|
36
|
+
(0, utils_js_1.toBytes)(crc, 4),
|
37
|
+
(0, utils_js_1.toBytes)(compSize, 4),
|
38
|
+
(0, utils_js_1.toBytes)(uncompSize, 4),
|
39
|
+
(0, utils_js_1.toBytes)(fileNameBuf.length, 2),
|
40
|
+
(0, utils_js_1.toBytes)(0, 2),
|
41
|
+
]);
|
42
|
+
const localEntry = node_buffer_1.Buffer.concat([
|
43
|
+
localHeader,
|
44
|
+
fileNameBuf,
|
45
|
+
compressed,
|
46
|
+
]);
|
47
|
+
fileEntries.push(localEntry);
|
48
|
+
const centralEntry = node_buffer_1.Buffer.concat([
|
49
|
+
node_buffer_1.Buffer.from(constants_js_1.CENTRAL_DIR_HEADER_SIG),
|
50
|
+
node_buffer_1.Buffer.from((0, utils_js_1.toBytes)(20, 2)), // Version made by
|
51
|
+
node_buffer_1.Buffer.from((0, utils_js_1.toBytes)(20, 2)), // Version needed
|
52
|
+
node_buffer_1.Buffer.from((0, utils_js_1.toBytes)(0, 2)), // Flags
|
53
|
+
node_buffer_1.Buffer.from((0, utils_js_1.toBytes)(8, 2)), // Compression
|
54
|
+
node_buffer_1.Buffer.from(modTime),
|
55
|
+
node_buffer_1.Buffer.from((0, utils_js_1.toBytes)(crc, 4)),
|
56
|
+
node_buffer_1.Buffer.from((0, utils_js_1.toBytes)(compSize, 4)),
|
57
|
+
node_buffer_1.Buffer.from((0, utils_js_1.toBytes)(uncompSize, 4)),
|
58
|
+
node_buffer_1.Buffer.from((0, utils_js_1.toBytes)(fileNameBuf.length, 2)),
|
59
|
+
node_buffer_1.Buffer.from((0, utils_js_1.toBytes)(0, 2)), // Extra field length
|
60
|
+
node_buffer_1.Buffer.from((0, utils_js_1.toBytes)(0, 2)), // Comment length
|
61
|
+
node_buffer_1.Buffer.from((0, utils_js_1.toBytes)(0, 2)), // Disk start
|
62
|
+
node_buffer_1.Buffer.from((0, utils_js_1.toBytes)(0, 2)), // Internal attrs
|
63
|
+
node_buffer_1.Buffer.from((0, utils_js_1.toBytes)(0, 4)), // External attrs
|
64
|
+
node_buffer_1.Buffer.from((0, utils_js_1.toBytes)(offset, 4)),
|
65
|
+
fileNameBuf,
|
66
|
+
]);
|
67
|
+
centralDirectory.push(centralEntry);
|
68
|
+
offset += localEntry.length;
|
69
|
+
}
|
70
|
+
const centralDirSize = centralDirectory.reduce((sum, entry) => sum + entry.length, 0);
|
71
|
+
const centralDirOffset = offset;
|
72
|
+
const endRecord = node_buffer_1.Buffer.concat([
|
73
|
+
node_buffer_1.Buffer.from(constants_js_1.END_OF_CENTRAL_DIR_SIG),
|
74
|
+
node_buffer_1.Buffer.from((0, utils_js_1.toBytes)(0, 2)), // Disk #
|
75
|
+
node_buffer_1.Buffer.from((0, utils_js_1.toBytes)(0, 2)), // Start disk #
|
76
|
+
node_buffer_1.Buffer.from((0, utils_js_1.toBytes)(centralDirectory.length, 2)),
|
77
|
+
node_buffer_1.Buffer.from((0, utils_js_1.toBytes)(centralDirectory.length, 2)),
|
78
|
+
node_buffer_1.Buffer.from((0, utils_js_1.toBytes)(centralDirSize, 4)),
|
79
|
+
node_buffer_1.Buffer.from((0, utils_js_1.toBytes)(centralDirOffset, 4)),
|
80
|
+
node_buffer_1.Buffer.from((0, utils_js_1.toBytes)(0, 2)), // Comment length
|
81
|
+
]);
|
82
|
+
return node_buffer_1.Buffer.concat(fileEntries.concat(centralDirectory).concat([endRecord]));
|
83
|
+
}
|
84
|
+
exports.createSync = createSync;
|
@@ -0,0 +1,89 @@
|
|
1
|
+
"use strict";
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
4
|
+
};
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
6
|
+
exports.create = void 0;
|
7
|
+
const node_buffer_1 = require("node:buffer");
|
8
|
+
const node_util_1 = __importDefault(require("node:util"));
|
9
|
+
const node_zlib_1 = __importDefault(require("node:zlib"));
|
10
|
+
const deflateRaw = node_util_1.default.promisify(node_zlib_1.default.deflateRaw);
|
11
|
+
const utils_js_1 = require("./utils.js");
|
12
|
+
const constants_js_1 = require("./constants.js");
|
13
|
+
/**
|
14
|
+
* Creates a ZIP archive from a collection of files.
|
15
|
+
*
|
16
|
+
* @param {Object.<string, Buffer|string>} files - An object with file paths as keys and either Buffer or string content as values.
|
17
|
+
* @returns {Buffer} - The ZIP archive as a Buffer.
|
18
|
+
*/
|
19
|
+
async function create(files) {
|
20
|
+
const fileEntries = [];
|
21
|
+
const centralDirectory = [];
|
22
|
+
let offset = 0;
|
23
|
+
for (const [filename, rawContent] of Object.entries(files).sort(([a], [b]) => a.localeCompare(b))) {
|
24
|
+
if (filename.includes("..")) {
|
25
|
+
throw new Error(`Invalid filename: ${filename}`);
|
26
|
+
}
|
27
|
+
const content = node_buffer_1.Buffer.isBuffer(rawContent) ? rawContent : node_buffer_1.Buffer.from(rawContent);
|
28
|
+
const fileNameBuf = node_buffer_1.Buffer.from(filename, "utf8");
|
29
|
+
const modTime = (0, utils_js_1.dosTime)(new Date());
|
30
|
+
const crc = (0, utils_js_1.crc32)(content);
|
31
|
+
const compressed = await deflateRaw(content);
|
32
|
+
const compSize = compressed.length;
|
33
|
+
const uncompSize = content.length;
|
34
|
+
// Local file header
|
35
|
+
const localHeader = node_buffer_1.Buffer.concat([
|
36
|
+
constants_js_1.LOCAL_FILE_HEADER_SIG,
|
37
|
+
(0, utils_js_1.toBytes)(20, 2),
|
38
|
+
(0, utils_js_1.toBytes)(0, 2),
|
39
|
+
(0, utils_js_1.toBytes)(8, 2),
|
40
|
+
modTime,
|
41
|
+
(0, utils_js_1.toBytes)(crc, 4),
|
42
|
+
(0, utils_js_1.toBytes)(compSize, 4),
|
43
|
+
(0, utils_js_1.toBytes)(uncompSize, 4),
|
44
|
+
(0, utils_js_1.toBytes)(fileNameBuf.length, 2),
|
45
|
+
(0, utils_js_1.toBytes)(0, 2),
|
46
|
+
]);
|
47
|
+
const localEntry = node_buffer_1.Buffer.concat([
|
48
|
+
localHeader,
|
49
|
+
fileNameBuf,
|
50
|
+
compressed,
|
51
|
+
]);
|
52
|
+
fileEntries.push(localEntry);
|
53
|
+
const centralEntry = node_buffer_1.Buffer.concat([
|
54
|
+
node_buffer_1.Buffer.from(constants_js_1.CENTRAL_DIR_HEADER_SIG),
|
55
|
+
node_buffer_1.Buffer.from((0, utils_js_1.toBytes)(20, 2)), // Version made by
|
56
|
+
node_buffer_1.Buffer.from((0, utils_js_1.toBytes)(20, 2)), // Version needed
|
57
|
+
node_buffer_1.Buffer.from((0, utils_js_1.toBytes)(0, 2)), // Flags
|
58
|
+
node_buffer_1.Buffer.from((0, utils_js_1.toBytes)(8, 2)), // Compression
|
59
|
+
node_buffer_1.Buffer.from(modTime),
|
60
|
+
node_buffer_1.Buffer.from((0, utils_js_1.toBytes)(crc, 4)),
|
61
|
+
node_buffer_1.Buffer.from((0, utils_js_1.toBytes)(compSize, 4)),
|
62
|
+
node_buffer_1.Buffer.from((0, utils_js_1.toBytes)(uncompSize, 4)),
|
63
|
+
node_buffer_1.Buffer.from((0, utils_js_1.toBytes)(fileNameBuf.length, 2)),
|
64
|
+
node_buffer_1.Buffer.from((0, utils_js_1.toBytes)(0, 2)), // Extra field length
|
65
|
+
node_buffer_1.Buffer.from((0, utils_js_1.toBytes)(0, 2)), // Comment length
|
66
|
+
node_buffer_1.Buffer.from((0, utils_js_1.toBytes)(0, 2)), // Disk start
|
67
|
+
node_buffer_1.Buffer.from((0, utils_js_1.toBytes)(0, 2)), // Internal attrs
|
68
|
+
node_buffer_1.Buffer.from((0, utils_js_1.toBytes)(0, 4)), // External attrs
|
69
|
+
node_buffer_1.Buffer.from((0, utils_js_1.toBytes)(offset, 4)),
|
70
|
+
fileNameBuf,
|
71
|
+
]);
|
72
|
+
centralDirectory.push(centralEntry);
|
73
|
+
offset += localEntry.length;
|
74
|
+
}
|
75
|
+
const centralDirSize = centralDirectory.reduce((sum, entry) => sum + entry.length, 0);
|
76
|
+
const centralDirOffset = offset;
|
77
|
+
const endRecord = node_buffer_1.Buffer.concat([
|
78
|
+
node_buffer_1.Buffer.from(constants_js_1.END_OF_CENTRAL_DIR_SIG),
|
79
|
+
node_buffer_1.Buffer.from((0, utils_js_1.toBytes)(0, 2)), // Disk #
|
80
|
+
node_buffer_1.Buffer.from((0, utils_js_1.toBytes)(0, 2)), // Start disk #
|
81
|
+
node_buffer_1.Buffer.from((0, utils_js_1.toBytes)(centralDirectory.length, 2)),
|
82
|
+
node_buffer_1.Buffer.from((0, utils_js_1.toBytes)(centralDirectory.length, 2)),
|
83
|
+
node_buffer_1.Buffer.from((0, utils_js_1.toBytes)(centralDirSize, 4)),
|
84
|
+
node_buffer_1.Buffer.from((0, utils_js_1.toBytes)(centralDirOffset, 4)),
|
85
|
+
node_buffer_1.Buffer.from((0, utils_js_1.toBytes)(0, 2)), // Comment length
|
86
|
+
]);
|
87
|
+
return node_buffer_1.Buffer.concat(fileEntries.concat(centralDirectory).concat([endRecord]));
|
88
|
+
}
|
89
|
+
exports.create = create;
|
@@ -0,0 +1,20 @@
|
|
1
|
+
"use strict";
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
3
|
+
if (k2 === undefined) k2 = k;
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
7
|
+
}
|
8
|
+
Object.defineProperty(o, k2, desc);
|
9
|
+
}) : (function(o, m, k, k2) {
|
10
|
+
if (k2 === undefined) k2 = k;
|
11
|
+
o[k2] = m[k];
|
12
|
+
}));
|
13
|
+
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
14
|
+
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
15
|
+
};
|
16
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
17
|
+
__exportStar(require("./create-sync.js"), exports);
|
18
|
+
__exportStar(require("./create.js"), exports);
|
19
|
+
__exportStar(require("./read-sync.js"), exports);
|
20
|
+
__exportStar(require("./read.js"), exports);
|
@@ -0,0 +1,57 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.readSync = void 0;
|
4
|
+
const node_zlib_1 = require("node:zlib");
|
5
|
+
/**
|
6
|
+
* Parses a ZIP archive from a buffer and extracts the files within.
|
7
|
+
*
|
8
|
+
* @param {Buffer} buffer - The buffer containing the ZIP archive data.
|
9
|
+
* @returns {Object.<string, string>} - An object where keys are file names and values are file contents.
|
10
|
+
* @throws {Error} - Throws an error if an unsupported compression method is encountered or if decompression fails.
|
11
|
+
*/
|
12
|
+
function readSync(buffer) {
|
13
|
+
const files = {};
|
14
|
+
let offset = 0;
|
15
|
+
while (offset + 4 <= buffer.length) {
|
16
|
+
const signature = buffer.readUInt32LE(offset);
|
17
|
+
if (signature !== 0x04034b50)
|
18
|
+
break;
|
19
|
+
const compressionMethod = buffer.readUInt16LE(offset + 8);
|
20
|
+
const fileNameLength = buffer.readUInt16LE(offset + 26);
|
21
|
+
const extraLength = buffer.readUInt16LE(offset + 28);
|
22
|
+
const fileNameStart = offset + 30;
|
23
|
+
const fileNameEnd = fileNameStart + fileNameLength;
|
24
|
+
const fileName = buffer.subarray(fileNameStart, fileNameEnd).toString();
|
25
|
+
const dataStart = fileNameEnd + extraLength;
|
26
|
+
let nextOffset = dataStart;
|
27
|
+
while (nextOffset + 4 <= buffer.length) {
|
28
|
+
if (buffer.readUInt32LE(nextOffset) === 0x04034b50)
|
29
|
+
break;
|
30
|
+
nextOffset++;
|
31
|
+
}
|
32
|
+
if (nextOffset + 4 > buffer.length) {
|
33
|
+
nextOffset = buffer.length;
|
34
|
+
}
|
35
|
+
const compressedData = buffer.subarray(dataStart, nextOffset);
|
36
|
+
let content = "";
|
37
|
+
try {
|
38
|
+
if (compressionMethod === 0) {
|
39
|
+
content = compressedData.toString();
|
40
|
+
}
|
41
|
+
else if (compressionMethod === 8) {
|
42
|
+
content = (0, node_zlib_1.inflateRawSync)(new Uint8Array(compressedData)).toString();
|
43
|
+
}
|
44
|
+
else {
|
45
|
+
throw new Error(`Unsupported compression method ${compressionMethod}`);
|
46
|
+
}
|
47
|
+
}
|
48
|
+
catch (error) {
|
49
|
+
const message = error instanceof Error ? error.message : "Unknown error";
|
50
|
+
throw new Error(`Error unpacking file ${fileName}: ${message}`);
|
51
|
+
}
|
52
|
+
files[fileName] = content;
|
53
|
+
offset = nextOffset;
|
54
|
+
}
|
55
|
+
return files;
|
56
|
+
}
|
57
|
+
exports.readSync = readSync;
|
@@ -0,0 +1,62 @@
|
|
1
|
+
"use strict";
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
4
|
+
};
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
6
|
+
exports.read = void 0;
|
7
|
+
const node_util_1 = __importDefault(require("node:util"));
|
8
|
+
const node_zlib_1 = __importDefault(require("node:zlib"));
|
9
|
+
const inflateRaw = node_util_1.default.promisify(node_zlib_1.default.inflateRaw);
|
10
|
+
/**
|
11
|
+
* Parses a ZIP archive from a buffer and extracts the files within.
|
12
|
+
*
|
13
|
+
* @param {Buffer} buffer - The buffer containing the ZIP archive data.
|
14
|
+
* @returns {Object.<string, string>} - An object where keys are file names and values are file contents.
|
15
|
+
* @throws {Error} - Throws an error if an unsupported compression method is encountered or if decompression fails.
|
16
|
+
*/
|
17
|
+
async function read(buffer) {
|
18
|
+
const files = {};
|
19
|
+
let offset = 0;
|
20
|
+
while (offset + 4 <= buffer.length) {
|
21
|
+
const signature = buffer.readUInt32LE(offset);
|
22
|
+
if (signature !== 0x04034b50)
|
23
|
+
break;
|
24
|
+
const compressionMethod = buffer.readUInt16LE(offset + 8);
|
25
|
+
const fileNameLength = buffer.readUInt16LE(offset + 26);
|
26
|
+
const extraLength = buffer.readUInt16LE(offset + 28);
|
27
|
+
const fileNameStart = offset + 30;
|
28
|
+
const fileNameEnd = fileNameStart + fileNameLength;
|
29
|
+
const fileName = buffer.subarray(fileNameStart, fileNameEnd).toString();
|
30
|
+
const dataStart = fileNameEnd + extraLength;
|
31
|
+
let nextOffset = dataStart;
|
32
|
+
while (nextOffset + 4 <= buffer.length) {
|
33
|
+
if (buffer.readUInt32LE(nextOffset) === 0x04034b50)
|
34
|
+
break;
|
35
|
+
nextOffset++;
|
36
|
+
}
|
37
|
+
if (nextOffset + 4 > buffer.length) {
|
38
|
+
nextOffset = buffer.length;
|
39
|
+
}
|
40
|
+
const compressedData = buffer.subarray(dataStart, nextOffset);
|
41
|
+
let content = "";
|
42
|
+
try {
|
43
|
+
if (compressionMethod === 0) {
|
44
|
+
content = compressedData.toString();
|
45
|
+
}
|
46
|
+
else if (compressionMethod === 8) {
|
47
|
+
content = (await inflateRaw(new Uint8Array(compressedData))).toString();
|
48
|
+
}
|
49
|
+
else {
|
50
|
+
throw new Error(`Unsupported compression method ${compressionMethod}`);
|
51
|
+
}
|
52
|
+
}
|
53
|
+
catch (error) {
|
54
|
+
const message = error instanceof Error ? error.message : "Unknown error";
|
55
|
+
throw new Error(`Error unpacking file ${fileName}: ${message}`);
|
56
|
+
}
|
57
|
+
files[fileName] = content;
|
58
|
+
offset = nextOffset;
|
59
|
+
}
|
60
|
+
return files;
|
61
|
+
}
|
62
|
+
exports.read = read;
|
@@ -0,0 +1,158 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.toBytes = exports.dosTime = exports.crc32 = void 0;
|
4
|
+
const node_buffer_1 = require("node:buffer");
|
5
|
+
/**
|
6
|
+
* Precomputed CRC-32 lookup table for optimized checksum calculation.
|
7
|
+
* The table is generated using the standard IEEE 802.3 (Ethernet) polynomial:
|
8
|
+
* 0xEDB88320 (reversed representation of 0x04C11DB7).
|
9
|
+
*
|
10
|
+
* The table is immediately invoked and cached as a constant for performance,
|
11
|
+
* following the common implementation pattern for CRC algorithms.
|
12
|
+
*/
|
13
|
+
const crcTable = (() => {
|
14
|
+
// Create a typed array for better performance with 256 32-bit unsigned integers
|
15
|
+
const table = new Uint32Array(256);
|
16
|
+
// Generate table entries for all possible byte values (0-255)
|
17
|
+
for (let i = 0; i < 256; i++) {
|
18
|
+
let crc = i; // Initialize with current byte value
|
19
|
+
// Process each bit (8 times)
|
20
|
+
for (let j = 0; j < 8; j++) {
|
21
|
+
/*
|
22
|
+
* CRC division algorithm:
|
23
|
+
* 1. If LSB is set (crc & 1), XOR with polynomial
|
24
|
+
* 2. Right-shift by 1 (unsigned)
|
25
|
+
*
|
26
|
+
* The polynomial 0xEDB88320 is:
|
27
|
+
* - Bit-reversed version of 0x04C11DB7
|
28
|
+
* - Uses reflected input/output algorithm
|
29
|
+
*/
|
30
|
+
crc = crc & 1
|
31
|
+
? 0xedb88320 ^ (crc >>> 1) // XOR with polynomial if LSB is set
|
32
|
+
: crc >>> 1; // Just shift right if LSB is not set
|
33
|
+
}
|
34
|
+
// Store final 32-bit value (>>> 0 ensures unsigned 32-bit representation)
|
35
|
+
table[i] = crc >>> 0;
|
36
|
+
}
|
37
|
+
return table;
|
38
|
+
})();
|
39
|
+
/**
|
40
|
+
* Computes a CRC-32 checksum for the given Buffer using the standard IEEE 802.3 polynomial.
|
41
|
+
* This implementation uses a precomputed lookup table for optimal performance.
|
42
|
+
*
|
43
|
+
* The algorithm follows these characteristics:
|
44
|
+
* - Polynomial: 0xEDB88320 (reversed representation of 0x04C11DB7)
|
45
|
+
* - Initial value: 0xFFFFFFFF (inverted by ~0)
|
46
|
+
* - Final XOR value: 0xFFFFFFFF (achieved by inverting the result)
|
47
|
+
* - Input and output reflection: Yes
|
48
|
+
*
|
49
|
+
* @param {Buffer} buf - The input buffer to calculate checksum for
|
50
|
+
* @returns {number} - The 32-bit unsigned CRC-32 checksum (0x00000000 to 0xFFFFFFFF)
|
51
|
+
*/
|
52
|
+
function crc32(buf) {
|
53
|
+
// Initialize CRC with all 1's (0xFFFFFFFF) using bitwise NOT
|
54
|
+
let crc = ~0;
|
55
|
+
// Process each byte in the buffer
|
56
|
+
for (let i = 0; i < buf.length; i++) {
|
57
|
+
/*
|
58
|
+
* CRC update algorithm steps:
|
59
|
+
* 1. XOR current CRC with next byte (lowest 8 bits)
|
60
|
+
* 2. Use result as index in precomputed table (0-255)
|
61
|
+
* 3. XOR the table value with right-shifted CRC (8 bits)
|
62
|
+
*
|
63
|
+
* The operation breakdown:
|
64
|
+
* - (crc ^ buf[i]) - XOR with next byte
|
65
|
+
* - & 0xff - Isolate lowest 8 bits
|
66
|
+
* - crc >>> 8 - Shift CRC right by 8 bits (unsigned)
|
67
|
+
* - ^ crcTable[...] - XOR with precomputed table value
|
68
|
+
*/
|
69
|
+
crc = (crc >>> 8) ^ crcTable[(crc ^ buf[i]) & 0xff];
|
70
|
+
}
|
71
|
+
/*
|
72
|
+
* Final processing:
|
73
|
+
* 1. Invert all bits (~crc) to match standard CRC-32 output
|
74
|
+
* 2. Convert to unsigned 32-bit integer (>>> 0)
|
75
|
+
*/
|
76
|
+
return ~crc >>> 0;
|
77
|
+
}
|
78
|
+
exports.crc32 = crc32;
|
79
|
+
/**
|
80
|
+
* Converts a JavaScript Date object to a 4-byte Buffer in MS-DOS date/time format
|
81
|
+
* as specified in the ZIP file format specification (PKZIP APPNOTE.TXT).
|
82
|
+
*
|
83
|
+
* The MS-DOS date/time format packs both date and time into 4 bytes (32 bits) with
|
84
|
+
* the following bit layout:
|
85
|
+
*
|
86
|
+
* Time portion (2 bytes/16 bits):
|
87
|
+
* - Bits 00-04: Seconds divided by 2 (0-29, representing 0-58 seconds)
|
88
|
+
* - Bits 05-10: Minutes (0-59)
|
89
|
+
* - Bits 11-15: Hours (0-23)
|
90
|
+
*
|
91
|
+
* Date portion (2 bytes/16 bits):
|
92
|
+
* - Bits 00-04: Day (1-31)
|
93
|
+
* - Bits 05-08: Month (1-12)
|
94
|
+
* - Bits 09-15: Year offset from 1980 (0-127, representing 1980-2107)
|
95
|
+
*
|
96
|
+
* @param {Date} date - The JavaScript Date object to convert
|
97
|
+
* @returns {Buffer} - 4-byte Buffer containing:
|
98
|
+
* - Bytes 0-1: DOS time (hours, minutes, seconds/2)
|
99
|
+
* - Bytes 2-3: DOS date (year-1980, month, day)
|
100
|
+
* @throws {RangeError} - If the date is before 1980 or after 2107
|
101
|
+
*/
|
102
|
+
function dosTime(date) {
|
103
|
+
// Pack time components into 2 bytes (16 bits):
|
104
|
+
// - Hours (5 bits) shifted left 11 positions (bits 11-15)
|
105
|
+
// - Minutes (6 bits) shifted left 5 positions (bits 5-10)
|
106
|
+
// - Seconds/2 (5 bits) in least significant bits (bits 0-4)
|
107
|
+
const time = (date.getHours() << 11) | // Hours occupy bits 11-15
|
108
|
+
(date.getMinutes() << 5) | // Minutes occupy bits 5-10
|
109
|
+
(Math.floor(date.getSeconds() / 2)); // Seconds/2 occupy bits 0-4
|
110
|
+
// Pack date components into 2 bytes (16 bits):
|
111
|
+
// - (Year-1980) (7 bits) shifted left 9 positions (bits 9-15)
|
112
|
+
// - Month (4 bits) shifted left 5 positions (bits 5-8)
|
113
|
+
// - Day (5 bits) in least significant bits (bits 0-4)
|
114
|
+
const day = ((date.getFullYear() - 1980) << 9) | // Years since 1980 (bits 9-15)
|
115
|
+
((date.getMonth() + 1) << 5) | // Month 1-12 (bits 5-8)
|
116
|
+
date.getDate(); // Day 1-31 (bits 0-4)
|
117
|
+
// Combine both 2-byte values into a single 4-byte Buffer
|
118
|
+
// Note: Using little-endian byte order for each 2-byte segment
|
119
|
+
return node_buffer_1.Buffer.from([
|
120
|
+
...toBytes(time, 2), // Convert time to 2 bytes (LSB first)
|
121
|
+
...toBytes(day, 2), // Convert date to 2 bytes (LSB first)
|
122
|
+
]);
|
123
|
+
}
|
124
|
+
exports.dosTime = dosTime;
|
125
|
+
/**
|
126
|
+
* Converts a numeric value into a fixed-length Buffer representation,
|
127
|
+
* storing the value in little-endian format with right-padding of zeros.
|
128
|
+
*
|
129
|
+
* This is particularly useful for binary protocols or file formats that
|
130
|
+
* require fixed-width numeric fields.
|
131
|
+
*
|
132
|
+
* @param {number} value - The numeric value to convert to bytes.
|
133
|
+
* Note: JavaScript numbers are IEEE 754 doubles, but only the
|
134
|
+
* integer portion will be used (up to 53-bit precision).
|
135
|
+
* @param {number} len - The desired length of the output Buffer in bytes.
|
136
|
+
* Must be a positive integer.
|
137
|
+
* @returns {Buffer} - A new Buffer of exactly `len` bytes containing:
|
138
|
+
* 1. The value's bytes in little-endian order (least significant byte first)
|
139
|
+
* 2. Zero padding in any remaining higher-order bytes
|
140
|
+
* @throws {RangeError} - If the value requires more bytes than `len` to represent
|
141
|
+
* (though this is currently not explicitly checked)
|
142
|
+
*/
|
143
|
+
function toBytes(value, len) {
|
144
|
+
// Allocate a new Buffer of the requested length, automatically zero-filled
|
145
|
+
const buf = node_buffer_1.Buffer.alloc(len);
|
146
|
+
// Process each byte position from least significant to most significant
|
147
|
+
for (let i = 0; i < len; i++) {
|
148
|
+
// Store the least significant byte of the current value
|
149
|
+
buf[i] = value & 0xff; // Mask to get bottom 8 bits
|
150
|
+
// Right-shift the value by 8 bits to process the next byte
|
151
|
+
// Note: This uses unsigned right shift (>>> would be signed)
|
152
|
+
value >>= 8;
|
153
|
+
// If the loop completes with value != 0, we've overflowed the buffer length,
|
154
|
+
// but this isn't currently checked/handled
|
155
|
+
}
|
156
|
+
return buf;
|
157
|
+
}
|
158
|
+
exports.toBytes = toBytes;
|