@js-ak/excel-toolbox 1.3.1 → 1.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/build/cjs/lib/template/template-fs.js +678 -197
- package/build/cjs/lib/template/utils/apply-replacements.js +26 -0
- package/build/cjs/lib/template/utils/check-row.js +6 -11
- package/build/cjs/lib/template/utils/column-index-to-letter.js +14 -3
- package/build/cjs/lib/template/utils/extract-xml-declaration.js +22 -0
- package/build/cjs/lib/template/utils/get-by-path.js +18 -0
- package/build/cjs/lib/template/utils/index.js +5 -0
- package/build/cjs/lib/template/utils/update-dimension.js +40 -0
- package/build/cjs/lib/template/utils/validate-worksheet-xml.js +217 -0
- package/build/cjs/lib/zip/create-with-stream.js +67 -107
- package/build/esm/lib/template/template-fs.js +678 -197
- package/build/esm/lib/template/utils/apply-replacements.js +22 -0
- package/build/esm/lib/template/utils/check-row.js +6 -11
- package/build/esm/lib/template/utils/column-index-to-letter.js +14 -3
- package/build/esm/lib/template/utils/extract-xml-declaration.js +19 -0
- package/build/esm/lib/template/utils/get-by-path.js +15 -0
- package/build/esm/lib/template/utils/index.js +5 -0
- package/build/esm/lib/template/utils/update-dimension.js +37 -0
- package/build/esm/lib/template/utils/validate-worksheet-xml.js +214 -0
- package/build/esm/lib/zip/create-with-stream.js +68 -108
- package/build/types/lib/template/template-fs.d.ts +24 -0
- package/build/types/lib/template/utils/apply-replacements.d.ts +13 -0
- package/build/types/lib/template/utils/check-row.d.ts +5 -10
- package/build/types/lib/template/utils/column-index-to-letter.d.ts +11 -3
- package/build/types/lib/template/utils/extract-xml-declaration.d.ts +14 -0
- package/build/types/lib/template/utils/get-by-path.d.ts +8 -0
- package/build/types/lib/template/utils/index.d.ts +5 -0
- package/build/types/lib/template/utils/update-dimension.d.ts +1 -0
- package/build/types/lib/template/utils/validate-worksheet-xml.d.ts +9 -0
- package/package.json +6 -4
@@ -0,0 +1,26 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.applyReplacements = void 0;
|
4
|
+
const get_by_path_js_1 = require("./get-by-path.js");
|
5
|
+
/**
|
6
|
+
* Replaces placeholders in the given content string with values from the replacements map.
|
7
|
+
*
|
8
|
+
* The function searches for placeholders in the format `${key}` within the content
|
9
|
+
* string, where `key` corresponds to a path in the replacements object.
|
10
|
+
* If a value is found for the key, it replaces the placeholder with the value.
|
11
|
+
* If no value is found, the original placeholder remains unchanged.
|
12
|
+
*
|
13
|
+
* @param content - The string containing placeholders to be replaced.
|
14
|
+
* @param replacements - An object where keys represent placeholder paths and values are the replacements.
|
15
|
+
* @returns A new string with placeholders replaced by corresponding values from the replacements object.
|
16
|
+
*/
|
17
|
+
const applyReplacements = (content, replacements) => {
|
18
|
+
if (!content) {
|
19
|
+
return "";
|
20
|
+
}
|
21
|
+
return content.replace(/\$\{([^}]+)\}/g, (match, path) => {
|
22
|
+
const value = (0, get_by_path_js_1.getByPath)(replacements, path);
|
23
|
+
return value !== undefined ? String(value) : match;
|
24
|
+
});
|
25
|
+
};
|
26
|
+
exports.applyReplacements = applyReplacements;
|
@@ -2,21 +2,16 @@
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
3
|
exports.checkRow = checkRow;
|
4
4
|
/**
|
5
|
-
* Validates
|
5
|
+
* Validates an object representing a single row of data to ensure that its keys
|
6
|
+
* are valid Excel column references. Throws an error if any of the keys are
|
7
|
+
* invalid.
|
6
8
|
*
|
7
|
-
*
|
8
|
-
*
|
9
|
-
* not match this pattern, an error is thrown with a message indicating the
|
10
|
-
* invalid cell reference.
|
11
|
-
*
|
12
|
-
* @param row - An object representing a row of data, where keys are cell
|
13
|
-
* references and values are strings.
|
14
|
-
*
|
15
|
-
* @throws {Error} If any key in the row is not a valid column letter.
|
9
|
+
* @param row An object with string keys that represent the cell references and
|
10
|
+
* string values that represent the values of those cells.
|
16
11
|
*/
|
17
12
|
function checkRow(row) {
|
18
13
|
for (const key of Object.keys(row)) {
|
19
|
-
if (!/^[A-Z]+$/i.test(key)) {
|
14
|
+
if (!/^[A-Z]+$/i.test(key) || !/^[A-Z]$|^[A-Z][A-Z]$|^[A-Z][A-Z][A-Z]$/i.test(key)) {
|
20
15
|
throw new Error(`Invalid cell reference "${key}" in row. Only column letters (like "A", "B", "C") are allowed.`);
|
21
16
|
}
|
22
17
|
}
|
@@ -2,12 +2,23 @@
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
3
|
exports.columnIndexToLetter = columnIndexToLetter;
|
4
4
|
/**
|
5
|
-
* Converts a
|
5
|
+
* Converts a zero-based column index to its corresponding Excel column letter.
|
6
6
|
*
|
7
|
-
* @
|
8
|
-
* @
|
7
|
+
* @throws Will throw an error if the input is not a positive integer.
|
8
|
+
* @param {number} index - The zero-based index of the column to convert.
|
9
|
+
* @returns {string} The corresponding Excel column letter.
|
10
|
+
*
|
11
|
+
* @example
|
12
|
+
* columnIndexToLetter(0); // returns "A"
|
13
|
+
* columnIndexToLetter(25); // returns "Z"
|
14
|
+
* columnIndexToLetter(26); // returns "AA"
|
15
|
+
* columnIndexToLetter(51); // returns "AZ"
|
16
|
+
* columnIndexToLetter(52); // returns "BA"
|
9
17
|
*/
|
10
18
|
function columnIndexToLetter(index) {
|
19
|
+
if (!Number.isInteger(index) || index < 0) {
|
20
|
+
throw new Error(`Invalid column index: ${index}. Must be a positive integer.`);
|
21
|
+
}
|
11
22
|
let letters = "";
|
12
23
|
while (index >= 0) {
|
13
24
|
letters = String.fromCharCode((index % 26) + 65) + letters;
|
@@ -0,0 +1,22 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.extractXmlDeclaration = extractXmlDeclaration;
|
4
|
+
/**
|
5
|
+
* Extracts the XML declaration from a given XML string.
|
6
|
+
*
|
7
|
+
* The XML declaration is a string that looks like `<?xml ...?>` and is usually
|
8
|
+
* present at the beginning of an XML file. It contains information about the
|
9
|
+
* XML version, encoding, and standalone status.
|
10
|
+
*
|
11
|
+
* This function returns `null` if the input string does not have a valid XML
|
12
|
+
* declaration.
|
13
|
+
*
|
14
|
+
* @param xmlString - The XML string to extract the declaration from.
|
15
|
+
* @returns The extracted XML declaration string, or `null`.
|
16
|
+
*/
|
17
|
+
function extractXmlDeclaration(xmlString) {
|
18
|
+
// const declarationRegex = /^<\?xml\s+[^?]+\?>/;
|
19
|
+
const declarationRegex = /^<\?xml\s+version\s*=\s*["'][^"']+["'](\s+(encoding|standalone)\s*=\s*["'][^"']+["'])*\s*\?>/;
|
20
|
+
const match = xmlString.match(declarationRegex);
|
21
|
+
return match ? match[0] : null;
|
22
|
+
}
|
@@ -0,0 +1,18 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.getByPath = getByPath;
|
4
|
+
/**
|
5
|
+
* Gets a value from an object by a given path.
|
6
|
+
*
|
7
|
+
* @param obj - The object to search.
|
8
|
+
* @param path - The path to the value, separated by dots.
|
9
|
+
* @returns The value at the given path, or undefined if not found.
|
10
|
+
*/
|
11
|
+
function getByPath(obj, path) {
|
12
|
+
return path.split(".").reduce((acc, key) => {
|
13
|
+
if (acc && typeof acc === "object" && key in acc) {
|
14
|
+
return acc[key];
|
15
|
+
}
|
16
|
+
return undefined;
|
17
|
+
}, obj);
|
18
|
+
}
|
@@ -14,14 +14,19 @@ var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
|
14
14
|
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
15
15
|
};
|
16
16
|
Object.defineProperty(exports, "__esModule", { value: true });
|
17
|
+
__exportStar(require("./apply-replacements.js"), exports);
|
17
18
|
__exportStar(require("./check-row.js"), exports);
|
18
19
|
__exportStar(require("./check-rows.js"), exports);
|
19
20
|
__exportStar(require("./check-start-row.js"), exports);
|
20
21
|
__exportStar(require("./column-index-to-letter.js"), exports);
|
21
22
|
__exportStar(require("./escape-xml.js"), exports);
|
23
|
+
__exportStar(require("./extract-xml-declaration.js"), exports);
|
24
|
+
__exportStar(require("./get-by-path.js"), exports);
|
22
25
|
__exportStar(require("./get-max-row-number.js"), exports);
|
23
26
|
__exportStar(require("./get-rows-above.js"), exports);
|
24
27
|
__exportStar(require("./get-rows-below.js"), exports);
|
25
28
|
__exportStar(require("./parse-rows.js"), exports);
|
26
29
|
__exportStar(require("./to-excel-column-object.js"), exports);
|
30
|
+
__exportStar(require("./update-dimension.js"), exports);
|
31
|
+
__exportStar(require("./validate-worksheet-xml.js"), exports);
|
27
32
|
__exportStar(require("./write-rows-to-stream.js"), exports);
|
@@ -0,0 +1,40 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.updateDimension = updateDimension;
|
4
|
+
function updateDimension(xml) {
|
5
|
+
const cellRefs = [...xml.matchAll(/<c r="([A-Z]+)(\d+)"/g)];
|
6
|
+
if (cellRefs.length === 0)
|
7
|
+
return xml;
|
8
|
+
let minCol = Infinity, maxCol = -Infinity;
|
9
|
+
let minRow = Infinity, maxRow = -Infinity;
|
10
|
+
for (const [, colStr, rowStr] of cellRefs) {
|
11
|
+
const col = columnLetterToNumber(colStr);
|
12
|
+
const row = parseInt(rowStr, 10);
|
13
|
+
if (col < minCol)
|
14
|
+
minCol = col;
|
15
|
+
if (col > maxCol)
|
16
|
+
maxCol = col;
|
17
|
+
if (row < minRow)
|
18
|
+
minRow = row;
|
19
|
+
if (row > maxRow)
|
20
|
+
maxRow = row;
|
21
|
+
}
|
22
|
+
const newRef = `${columnNumberToLetter(minCol)}${minRow}:${columnNumberToLetter(maxCol)}${maxRow}`;
|
23
|
+
return xml.replace(/<dimension ref="[^"]*"/, `<dimension ref="${newRef}"`);
|
24
|
+
}
|
25
|
+
function columnLetterToNumber(letters) {
|
26
|
+
let num = 0;
|
27
|
+
for (let i = 0; i < letters.length; i++) {
|
28
|
+
num = num * 26 + (letters.charCodeAt(i) - 64);
|
29
|
+
}
|
30
|
+
return num;
|
31
|
+
}
|
32
|
+
function columnNumberToLetter(num) {
|
33
|
+
let letters = "";
|
34
|
+
while (num > 0) {
|
35
|
+
const rem = (num - 1) % 26;
|
36
|
+
letters = String.fromCharCode(65 + rem) + letters;
|
37
|
+
num = Math.floor((num - 1) / 26);
|
38
|
+
}
|
39
|
+
return letters;
|
40
|
+
}
|
@@ -0,0 +1,217 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.validateWorksheetXml = validateWorksheetXml;
|
4
|
+
function validateWorksheetXml(xml) {
|
5
|
+
const createError = (message, details) => ({
|
6
|
+
error: {
|
7
|
+
details,
|
8
|
+
message,
|
9
|
+
},
|
10
|
+
isValid: false,
|
11
|
+
});
|
12
|
+
// 1. Проверка базовой структуры XML
|
13
|
+
if (!xml.startsWith("<?xml")) {
|
14
|
+
return createError("XML должен начинаться с декларации <?xml>");
|
15
|
+
}
|
16
|
+
if (!xml.includes("<worksheet") || !xml.includes("</worksheet>")) {
|
17
|
+
return createError("Не найден корневой элемент worksheet");
|
18
|
+
}
|
19
|
+
// 2. Проверка наличия обязательных элементов
|
20
|
+
const requiredElements = [
|
21
|
+
{ name: "sheetViews", tag: "<sheetViews>" },
|
22
|
+
{ name: "sheetFormatPr", tag: "<sheetFormatPr" },
|
23
|
+
{ name: "cols", tag: "<cols>" },
|
24
|
+
{ name: "sheetData", tag: "<sheetData>" },
|
25
|
+
{ name: "mergeCells", tag: "<mergeCells" },
|
26
|
+
];
|
27
|
+
for (const { name, tag } of requiredElements) {
|
28
|
+
if (!xml.includes(tag)) {
|
29
|
+
return createError(`Отсутствует обязательный элемент ${name}`);
|
30
|
+
}
|
31
|
+
}
|
32
|
+
// 3. Извлечение и проверка sheetData
|
33
|
+
const sheetDataStart = xml.indexOf("<sheetData>");
|
34
|
+
const sheetDataEnd = xml.indexOf("</sheetData>");
|
35
|
+
if (sheetDataStart === -1 || sheetDataEnd === -1) {
|
36
|
+
return createError("Некорректная структура sheetData");
|
37
|
+
}
|
38
|
+
const sheetDataContent = xml.substring(sheetDataStart + 10, sheetDataEnd);
|
39
|
+
const rows = sheetDataContent.split("</row>");
|
40
|
+
if (rows.length < 2) {
|
41
|
+
return createError("SheetData должен содержать хотя бы одну строку");
|
42
|
+
}
|
43
|
+
// Собираем информацию о всех строках и ячейках
|
44
|
+
const allRows = [];
|
45
|
+
const allCells = [];
|
46
|
+
let prevRowNum = 0;
|
47
|
+
for (const row of rows.slice(0, -1)) {
|
48
|
+
if (!row.includes("<row ")) {
|
49
|
+
return createError("Не найден тег row", `Фрагмент: ${row.substring(0, 50)}...`);
|
50
|
+
}
|
51
|
+
if (!row.includes("<c ")) {
|
52
|
+
return createError("Строка не содержит ячеек", `Строка: ${row.substring(0, 50)}...`);
|
53
|
+
}
|
54
|
+
// Извлекаем номер строки
|
55
|
+
const rowNumMatch = row.match(/<row\s+r="(\d+)"/);
|
56
|
+
if (!rowNumMatch) {
|
57
|
+
return createError("Не указан номер строки (атрибут r)", `Строка: ${row.substring(0, 50)}...`);
|
58
|
+
}
|
59
|
+
const rowNum = parseInt(rowNumMatch[1]);
|
60
|
+
// Проверка уникальности строк
|
61
|
+
if (allRows.includes(rowNum)) {
|
62
|
+
return createError("Найден дубликат номера строки", `Номер строки: ${rowNum}`);
|
63
|
+
}
|
64
|
+
allRows.push(rowNum);
|
65
|
+
// Проверка порядка строк (должны идти по возрастанию)
|
66
|
+
if (rowNum <= prevRowNum) {
|
67
|
+
return createError("Нарушен порядок следования строк", `Текущая строка: ${rowNum}, предыдущая: ${prevRowNum}`);
|
68
|
+
}
|
69
|
+
prevRowNum = rowNum;
|
70
|
+
// Извлекаем все ячейки в строке
|
71
|
+
const cells = row.match(/<c\s+r="([A-Z]+)(\d+)"/g) || [];
|
72
|
+
for (const cell of cells) {
|
73
|
+
const match = cell.match(/<c\s+r="([A-Z]+)(\d+)"/);
|
74
|
+
if (!match) {
|
75
|
+
return createError("Некорректный формат ячейки", `Ячейка: ${cell}`);
|
76
|
+
}
|
77
|
+
const col = match[1];
|
78
|
+
const cellRowNum = parseInt(match[2]);
|
79
|
+
// Проверяем соответствие номера строки
|
80
|
+
if (cellRowNum !== rowNum) {
|
81
|
+
return createError("Несоответствие номера строки в ячейке", `Ожидалось: ${rowNum}, найдено: ${cellRowNum} в ячейке ${col}${cellRowNum}`);
|
82
|
+
}
|
83
|
+
allCells.push({
|
84
|
+
col,
|
85
|
+
row: rowNum,
|
86
|
+
});
|
87
|
+
}
|
88
|
+
}
|
89
|
+
// 4. Проверка mergeCells
|
90
|
+
const mergeCellsStart = xml.indexOf("<mergeCells");
|
91
|
+
const mergeCellsEnd = xml.indexOf("</mergeCells>");
|
92
|
+
if (mergeCellsStart === -1 || mergeCellsEnd === -1) {
|
93
|
+
return createError("Некорректная структура mergeCells");
|
94
|
+
}
|
95
|
+
const mergeCellsContent = xml.substring(mergeCellsStart, mergeCellsEnd);
|
96
|
+
const countMatch = mergeCellsContent.match(/count="(\d+)"/);
|
97
|
+
if (!countMatch) {
|
98
|
+
return createError("Не указано количество объединенных ячеек (атрибут count)");
|
99
|
+
}
|
100
|
+
const mergeCellTags = mergeCellsContent.match(/<mergeCell\s+ref="([A-Z]+\d+:[A-Z]+\d+)"\s*\/>/g);
|
101
|
+
if (!mergeCellTags) {
|
102
|
+
return createError("Не найдены объединенные ячейки");
|
103
|
+
}
|
104
|
+
// Проверка соответствия заявленного количества и фактического
|
105
|
+
if (mergeCellTags.length !== parseInt(countMatch[1])) {
|
106
|
+
return createError("Несоответствие количества объединенных ячеек", `Ожидалось: ${countMatch[1]}, найдено: ${mergeCellTags.length}`);
|
107
|
+
}
|
108
|
+
// Проверка на дублирующиеся mergeCell
|
109
|
+
const mergeRefs = new Set();
|
110
|
+
const duplicates = new Set();
|
111
|
+
for (const mergeTag of mergeCellTags) {
|
112
|
+
const refMatch = mergeTag.match(/ref="([A-Z]+\d+:[A-Z]+\d+)"/);
|
113
|
+
if (!refMatch) {
|
114
|
+
return createError("Некорректный формат объединения ячеек", `Тег: ${mergeTag}`);
|
115
|
+
}
|
116
|
+
const ref = refMatch[1];
|
117
|
+
if (mergeRefs.has(ref)) {
|
118
|
+
duplicates.add(ref);
|
119
|
+
}
|
120
|
+
else {
|
121
|
+
mergeRefs.add(ref);
|
122
|
+
}
|
123
|
+
}
|
124
|
+
if (duplicates.size > 0) {
|
125
|
+
return createError("Найдены дублирующиеся объединения ячеек", `Дубликаты: ${Array.from(duplicates).join(", ")}`);
|
126
|
+
}
|
127
|
+
// Проверка пересекающихся объединений
|
128
|
+
const mergedRanges = Array.from(mergeRefs).map(ref => {
|
129
|
+
const [start, end] = ref.split(":");
|
130
|
+
return {
|
131
|
+
endCol: end.match(/[A-Z]+/)?.[0] || "",
|
132
|
+
endRow: parseInt(end.match(/\d+/)?.[0] || "0"),
|
133
|
+
startCol: start.match(/[A-Z]+/)?.[0] || "",
|
134
|
+
startRow: parseInt(start.match(/\d+/)?.[0] || "0"),
|
135
|
+
};
|
136
|
+
});
|
137
|
+
for (let i = 0; i < mergedRanges.length; i++) {
|
138
|
+
for (let j = i + 1; j < mergedRanges.length; j++) {
|
139
|
+
const a = mergedRanges[i];
|
140
|
+
const b = mergedRanges[j];
|
141
|
+
if (rangesIntersect(a, b)) {
|
142
|
+
return createError("Найдены пересекающиеся объединения ячеек", `Пересекаются: ${getRangeString(a)} и ${getRangeString(b)}`);
|
143
|
+
}
|
144
|
+
}
|
145
|
+
}
|
146
|
+
// 5. Проверка dimension и соответствия с реальными данными
|
147
|
+
const dimensionMatch = xml.match(/<dimension\s+ref="([A-Z]+\d+:[A-Z]+\d+)"\s*\/>/);
|
148
|
+
if (!dimensionMatch) {
|
149
|
+
return createError("Не указана область данных (dimension)");
|
150
|
+
}
|
151
|
+
const [startCell, endCell] = dimensionMatch[1].split(":");
|
152
|
+
const startCol = startCell.match(/[A-Z]+/)?.[0];
|
153
|
+
const startRow = parseInt(startCell.match(/\d+/)?.[0] || "0");
|
154
|
+
const endCol = endCell.match(/[A-Z]+/)?.[0];
|
155
|
+
const endRow = parseInt(endCell.match(/\d+/)?.[0] || "0");
|
156
|
+
if (!startCol || !endCol || isNaN(startRow) || isNaN(endRow)) {
|
157
|
+
return createError("Некорректный формат dimension", `Dimension: ${dimensionMatch[1]}`);
|
158
|
+
}
|
159
|
+
const startColNum = colToNumber(startCol);
|
160
|
+
const endColNum = colToNumber(endCol);
|
161
|
+
// Проверяем все ячейки на вхождение в dimension
|
162
|
+
for (const cell of allCells) {
|
163
|
+
const colNum = colToNumber(cell.col);
|
164
|
+
if (cell.row < startRow || cell.row > endRow) {
|
165
|
+
return createError("Ячейка находится вне указанной области (по строке)", `Ячейка: ${cell.col}${cell.row}, dimension: ${dimensionMatch[1]}`);
|
166
|
+
}
|
167
|
+
if (colNum < startColNum || colNum > endColNum) {
|
168
|
+
return createError("Ячейка находится вне указанной области (по столбцу)", `Ячейка: ${cell.col}${cell.row}, dimension: ${dimensionMatch[1]}`);
|
169
|
+
}
|
170
|
+
}
|
171
|
+
// 6. Дополнительная проверка: все mergeCell ссылаются на существующие ячейки
|
172
|
+
for (const mergeTag of mergeCellTags) {
|
173
|
+
const refMatch = mergeTag.match(/ref="([A-Z]+\d+:[A-Z]+\d+)"/);
|
174
|
+
if (!refMatch) {
|
175
|
+
return createError("Некорректный формат объединения ячеек", `Тег: ${mergeTag}`);
|
176
|
+
}
|
177
|
+
const [cell1, cell2] = refMatch[1].split(":");
|
178
|
+
const cell1Col = cell1.match(/[A-Z]+/)?.[0];
|
179
|
+
const cell1Row = parseInt(cell1.match(/\d+/)?.[0] || "0");
|
180
|
+
const cell2Col = cell2.match(/[A-Z]+/)?.[0];
|
181
|
+
const cell2Row = parseInt(cell2.match(/\d+/)?.[0] || "0");
|
182
|
+
if (!cell1Col || !cell2Col || isNaN(cell1Row) || isNaN(cell2Row)) {
|
183
|
+
return createError("Некорректные координаты объединения ячеек", `Объединение: ${refMatch[1]}`);
|
184
|
+
}
|
185
|
+
// Проверяем что объединяемые ячейки существуют
|
186
|
+
const cell1Exists = allCells.some(c => c.row === cell1Row && c.col === cell1Col);
|
187
|
+
const cell2Exists = allCells.some(c => c.row === cell2Row && c.col === cell2Col);
|
188
|
+
if (!cell1Exists || !cell2Exists) {
|
189
|
+
return createError("Объединение ссылается на несуществующие ячейки", `Объединение: ${refMatch[1]}, отсутствует: ${!cell1Exists ? `${cell1Col}${cell1Row}` : `${cell2Col}${cell2Row}`}`);
|
190
|
+
}
|
191
|
+
}
|
192
|
+
return { isValid: true };
|
193
|
+
}
|
194
|
+
// Вспомогательные функции для проверки пересечений
|
195
|
+
function rangesIntersect(a, b) {
|
196
|
+
const aStartColNum = colToNumber(a.startCol);
|
197
|
+
const aEndColNum = colToNumber(a.endCol);
|
198
|
+
const bStartColNum = colToNumber(b.startCol);
|
199
|
+
const bEndColNum = colToNumber(b.endCol);
|
200
|
+
// Проверяем пересечение по строкам
|
201
|
+
const rowsIntersect = !(a.endRow < b.startRow || a.startRow > b.endRow);
|
202
|
+
// Проверяем пересечение по колонкам
|
203
|
+
const colsIntersect = !(aEndColNum < bStartColNum || aStartColNum > bEndColNum);
|
204
|
+
return rowsIntersect && colsIntersect;
|
205
|
+
}
|
206
|
+
function getRangeString(range) {
|
207
|
+
return `${range.startCol}${range.startRow}:${range.endCol}${range.endRow}`;
|
208
|
+
}
|
209
|
+
// Функция для преобразования букв колонки в число
|
210
|
+
function colToNumber(col) {
|
211
|
+
let num = 0;
|
212
|
+
for (let i = 0; i < col.length; i++) {
|
213
|
+
num = num * 26 + (col.charCodeAt(i) - 64);
|
214
|
+
}
|
215
|
+
return num;
|
216
|
+
}
|
217
|
+
;
|
@@ -56,20 +56,32 @@ const constants_js_1 = require("./constants.js");
|
|
56
56
|
* @throws {Error} - If the writable stream emits an error.
|
57
57
|
*/
|
58
58
|
async function createWithStream(fileKeys, destination, output) {
|
59
|
+
// Stores central directory records
|
59
60
|
const centralDirectory = [];
|
61
|
+
// Tracks the current offset in the output stream
|
60
62
|
let offset = 0;
|
61
63
|
for (const filename of fileKeys.sort((a, b) => a.localeCompare(b))) {
|
64
|
+
// Prevent directory traversal
|
62
65
|
if (filename.includes("..")) {
|
63
66
|
throw new Error(`Invalid filename: ${filename}`);
|
64
67
|
}
|
68
|
+
// Construct absolute path to the file
|
65
69
|
const fullPath = path.join(destination, ...filename.split("/"));
|
70
|
+
// Convert filename to UTF-8 buffer
|
66
71
|
const fileNameBuf = Buffer.from(filename, "utf8");
|
72
|
+
// Get modification time in DOS format
|
67
73
|
const modTime = (0, index_js_1.dosTime)(new Date());
|
74
|
+
// Read file as stream
|
68
75
|
const source = (0, node_fs_1.createReadStream)(fullPath);
|
76
|
+
// Create CRC32 transform stream
|
69
77
|
const crc32 = (0, index_js_1.crc32Stream)();
|
78
|
+
// Create raw deflate stream (no zlib headers)
|
70
79
|
const deflater = node_zlib_1.default.createDeflateRaw();
|
80
|
+
// Uncompressed size counter
|
71
81
|
let uncompSize = 0;
|
82
|
+
// Compressed size counter
|
72
83
|
let compSize = 0;
|
84
|
+
// Store compressed output data
|
73
85
|
const compressedChunks = [];
|
74
86
|
const sizeCounter = new node_stream_1.Transform({
|
75
87
|
transform(chunk, _enc, cb) {
|
@@ -77,135 +89,83 @@ async function createWithStream(fileKeys, destination, output) {
|
|
77
89
|
cb(null, chunk);
|
78
90
|
},
|
79
91
|
});
|
80
|
-
const collectCompressed = new node_stream_1.
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
|
92
|
+
const collectCompressed = new node_stream_1.PassThrough();
|
93
|
+
collectCompressed.on("data", chunk => {
|
94
|
+
// Count compressed bytes
|
95
|
+
compSize += chunk.length;
|
96
|
+
// Save compressed chunk
|
97
|
+
compressedChunks.push(chunk);
|
86
98
|
});
|
87
|
-
//
|
88
|
-
// deflater.on("finish", () => { console.log("deflater finished path:", fullPath, "uncompSize:", uncompSize, "compSize:", compSize); });
|
89
|
-
// deflater.on("error", (err) => { console.log("deflater error path:", fullPath, "error:", err); });
|
90
|
-
// deflater.on("close", () => { console.log("deflater closed path:", fullPath); });
|
91
|
-
// deflater.on("pipe", (src) => { console.log("deflater pipe path:", fullPath); });
|
92
|
-
// deflater.on("unpipe", (src) => { console.log("deflater unpipe path:", fullPath); });
|
93
|
-
// deflater.on("drain", () => { console.log("deflater drain path:", fullPath); });
|
94
|
-
// deflater.on("pause", () => { console.log("deflater pause path:", fullPath); });
|
95
|
-
// deflater.on("resume", () => { console.log("deflater resume path:", fullPath); });
|
96
|
-
// deflater.on("end", () => console.log("deflater ended, path:", fullPath));
|
97
|
-
// source.on("data", (chunk) => { console.log("source data path:", fullPath, "length:", chunk.length); });
|
98
|
-
// source.on("finish", () => { console.log("source finished path:", fullPath, "uncompSize:", uncompSize, "compSize:", compSize); });
|
99
|
-
// source.on("error", (err) => { console.log("source error path:", fullPath, "error:", err); });
|
100
|
-
// source.on("close", () => { console.log("source closed path:", fullPath); });
|
101
|
-
// source.on("pipe", (src) => { console.log("source pipe path:", fullPath); });
|
102
|
-
// source.on("unpipe", (src) => { console.log("source unpipe path:", fullPath); });
|
103
|
-
// source.on("drain", () => { console.log("source drain path:", fullPath); });
|
104
|
-
// source.on("pause", () => { console.log("source pause path:", fullPath); });
|
105
|
-
// source.on("resume", () => { console.log("source resume path:", fullPath); });
|
106
|
-
// source.on("end", () => console.log("source ended, path:", fullPath));
|
107
|
-
// sizeCounter.on("data", (chunk) => { console.log("sizeCounter data path:", fullPath, "length:", chunk.length); });
|
108
|
-
// sizeCounter.on("finish", () => { console.log("sizeCounter finished path:", fullPath, "uncompSize:", uncompSize, "compSize:", compSize); });
|
109
|
-
// sizeCounter.on("error", (err) => { console.log("sizeCounter error path:", fullPath, "error:", err); });
|
110
|
-
// sizeCounter.on("close", () => { console.log("sizeCounter closed path:", fullPath); });
|
111
|
-
// sizeCounter.on("pipe", (src) => { console.log("sizeCounter pipe path:", fullPath); });
|
112
|
-
// sizeCounter.on("unpipe", (src) => { console.log("sizeCounter unpipe path:", fullPath); });
|
113
|
-
// sizeCounter.on("drain", () => { console.log("sizeCounter drain path:", fullPath); });
|
114
|
-
// sizeCounter.on("pause", () => { console.log("sizeCounter pause path:", fullPath); });
|
115
|
-
// sizeCounter.on("resume", () => { console.log("sizeCounter resume path:", fullPath); });
|
116
|
-
// sizeCounter.on("end", () => console.log("sizeCounter ended, path:", fullPath));
|
117
|
-
// crc32.on("data", (chunk) => { console.log("crc32 data path:", fullPath, "length:", chunk.length); });
|
118
|
-
// crc32.on("finish", () => { console.log("crc32 finished path:", fullPath, "uncompSize:", uncompSize, "compSize:", compSize); });
|
119
|
-
// crc32.on("error", (err) => { console.log("crc32 error path:", fullPath, "error:", err); });
|
120
|
-
// crc32.on("close", () => { console.log("crc32 closed path:", fullPath); });
|
121
|
-
// crc32.on("pipe", (src) => { console.log("crc32 pipe path:", fullPath); });
|
122
|
-
// crc32.on("unpipe", (src) => { console.log("crc32 unpipe path:", fullPath); });
|
123
|
-
// crc32.on("drain", () => { console.log("crc32 drain path:", fullPath); });
|
124
|
-
// crc32.on("pause", () => { console.log("crc32 pause path:", fullPath); });
|
125
|
-
// crc32.on("resume", () => { console.log("crc32 resume path:", fullPath); });
|
126
|
-
// crc32.on("end", () => console.log("crc32 ended, path:", fullPath));
|
127
|
-
collectCompressed.on("data", ( /* chunk */) => { });
|
128
|
-
// collectCompressed.on("finish", () => { console.log("collectCompressed finished path:", fullPath, "uncompSize:", uncompSize, "compSize:", compSize); });
|
129
|
-
// collectCompressed.on("error", (err) => { console.log("collectCompressed error path:", fullPath, "error:", err); });
|
130
|
-
// collectCompressed.on("close", () => { console.log("collectCompressed closed path:", fullPath); });
|
131
|
-
// collectCompressed.on("pipe", (src) => { console.log("collectCompressed pipe path:", fullPath); });
|
132
|
-
// collectCompressed.on("unpipe", (src) => { console.log("collectCompressed unpipe path:", fullPath); });
|
133
|
-
// collectCompressed.on("drain", () => { console.log("collectCompressed drain path:", fullPath); });
|
134
|
-
// collectCompressed.on("pause", () => { console.log("collectCompressed pause path:", fullPath); });
|
135
|
-
// collectCompressed.on("resume", () => { console.log("collectCompressed resume path:", fullPath); });
|
136
|
-
// collectCompressed.on("end", () => console.log("collectCompressed ended, path:", fullPath));
|
137
|
-
// deflater.on("readable", () => {
|
138
|
-
// console.log("deflater readable path:", fullPath);
|
139
|
-
// });
|
99
|
+
// Run all transforms in pipeline: read -> count size -> CRC -> deflate -> collect compressed
|
140
100
|
await (0, promises_1.pipeline)(source, sizeCounter, crc32, deflater, collectCompressed);
|
141
|
-
//
|
142
|
-
// source
|
143
|
-
// .pipe(sizeCounter)
|
144
|
-
// .pipe(crc32)
|
145
|
-
// .pipe(deflater)
|
146
|
-
// .pipe(collectCompressed)
|
147
|
-
// .on("finish", resolve)
|
148
|
-
// .on("error", reject);
|
149
|
-
// source.on("error", reject);
|
150
|
-
// deflater.on("error", reject);
|
151
|
-
// });
|
101
|
+
// Get final CRC32 value
|
152
102
|
const crc = crc32.digest();
|
103
|
+
// Concatenate all compressed chunks into a single buffer
|
153
104
|
const compressed = Buffer.concat(compressedChunks);
|
105
|
+
// Create local file header followed by compressed content
|
154
106
|
const localHeader = Buffer.concat([
|
155
|
-
constants_js_1.LOCAL_FILE_HEADER_SIG,
|
156
|
-
(0, index_js_1.toBytes)(20, 2),
|
157
|
-
(0, index_js_1.toBytes)(0, 2),
|
158
|
-
(0, index_js_1.toBytes)(8, 2),
|
159
|
-
modTime,
|
160
|
-
(0, index_js_1.toBytes)(crc, 4),
|
161
|
-
(0, index_js_1.toBytes)(compSize, 4),
|
162
|
-
(0, index_js_1.toBytes)(uncompSize, 4),
|
163
|
-
(0, index_js_1.toBytes)(fileNameBuf.length, 2),
|
164
|
-
(0, index_js_1.toBytes)(0, 2),
|
165
|
-
fileNameBuf,
|
166
|
-
compressed,
|
107
|
+
constants_js_1.LOCAL_FILE_HEADER_SIG, // Local file header signature
|
108
|
+
(0, index_js_1.toBytes)(20, 2), // Version needed to extract
|
109
|
+
(0, index_js_1.toBytes)(0, 2), // General purpose bit flag
|
110
|
+
(0, index_js_1.toBytes)(8, 2), // Compression method (deflate)
|
111
|
+
modTime, // File modification time and date
|
112
|
+
(0, index_js_1.toBytes)(crc, 4), // CRC-32 checksum
|
113
|
+
(0, index_js_1.toBytes)(compSize, 4), // Compressed size
|
114
|
+
(0, index_js_1.toBytes)(uncompSize, 4), // Uncompressed size
|
115
|
+
(0, index_js_1.toBytes)(fileNameBuf.length, 2), // Filename length
|
116
|
+
(0, index_js_1.toBytes)(0, 2), // Extra field length
|
117
|
+
fileNameBuf, // Filename
|
118
|
+
compressed, // Compressed file data
|
167
119
|
]);
|
120
|
+
// Write local file header and data to output
|
168
121
|
await new Promise((resolve, reject) => {
|
169
122
|
output.write(localHeader, err => err ? reject(err) : resolve());
|
170
123
|
});
|
124
|
+
// Create central directory entry for this file
|
171
125
|
const centralEntry = Buffer.concat([
|
172
|
-
constants_js_1.CENTRAL_DIR_HEADER_SIG,
|
173
|
-
(0, index_js_1.toBytes)(20, 2),
|
174
|
-
(0, index_js_1.toBytes)(20, 2),
|
175
|
-
(0, index_js_1.toBytes)(0, 2),
|
176
|
-
(0, index_js_1.toBytes)(8, 2),
|
177
|
-
modTime,
|
178
|
-
(0, index_js_1.toBytes)(crc, 4),
|
179
|
-
(0, index_js_1.toBytes)(compSize, 4),
|
180
|
-
(0, index_js_1.toBytes)(uncompSize, 4),
|
181
|
-
(0, index_js_1.toBytes)(fileNameBuf.length, 2),
|
182
|
-
(0, index_js_1.toBytes)(0, 2),
|
183
|
-
(0, index_js_1.toBytes)(0, 2),
|
184
|
-
(0, index_js_1.toBytes)(0, 2),
|
185
|
-
(0, index_js_1.toBytes)(0, 2),
|
186
|
-
(0, index_js_1.toBytes)(0, 4),
|
187
|
-
(0, index_js_1.toBytes)(offset, 4),
|
188
|
-
fileNameBuf,
|
126
|
+
constants_js_1.CENTRAL_DIR_HEADER_SIG, // Central directory file header signature
|
127
|
+
(0, index_js_1.toBytes)(20, 2), // Version made by
|
128
|
+
(0, index_js_1.toBytes)(20, 2), // Version needed to extract
|
129
|
+
(0, index_js_1.toBytes)(0, 2), // General purpose bit flag
|
130
|
+
(0, index_js_1.toBytes)(8, 2), // Compression method
|
131
|
+
modTime, // File modification time and date
|
132
|
+
(0, index_js_1.toBytes)(crc, 4), // CRC-32 checksum
|
133
|
+
(0, index_js_1.toBytes)(compSize, 4), // Compressed size
|
134
|
+
(0, index_js_1.toBytes)(uncompSize, 4), // Uncompressed size
|
135
|
+
(0, index_js_1.toBytes)(fileNameBuf.length, 2), // Filename length
|
136
|
+
(0, index_js_1.toBytes)(0, 2), // Extra field length
|
137
|
+
(0, index_js_1.toBytes)(0, 2), // File comment length
|
138
|
+
(0, index_js_1.toBytes)(0, 2), // Disk number start
|
139
|
+
(0, index_js_1.toBytes)(0, 2), // Internal file attributes
|
140
|
+
(0, index_js_1.toBytes)(0, 4), // External file attributes
|
141
|
+
(0, index_js_1.toBytes)(offset, 4), // Offset of local header
|
142
|
+
fileNameBuf, // Filename
|
189
143
|
]);
|
144
|
+
// Store for later
|
190
145
|
centralDirectory.push(centralEntry);
|
146
|
+
// Update offset after writing this entry
|
191
147
|
offset += localHeader.length;
|
192
148
|
}
|
149
|
+
// Total size of central directory
|
193
150
|
const centralDirSize = centralDirectory.reduce((sum, entry) => sum + entry.length, 0);
|
151
|
+
// Start of central directory
|
194
152
|
const centralDirOffset = offset;
|
153
|
+
// Write each central directory entry to output
|
195
154
|
for (const entry of centralDirectory) {
|
196
155
|
await new Promise((resolve, reject) => {
|
197
156
|
output.write(entry, err => err ? reject(err) : resolve());
|
198
157
|
});
|
199
158
|
}
|
159
|
+
// Create and write end of central directory record
|
200
160
|
const endRecord = Buffer.concat([
|
201
|
-
constants_js_1.END_OF_CENTRAL_DIR_SIG,
|
202
|
-
(0, index_js_1.toBytes)(0, 2),
|
203
|
-
(0, index_js_1.toBytes)(0, 2),
|
204
|
-
(0, index_js_1.toBytes)(centralDirectory.length, 2),
|
205
|
-
(0, index_js_1.toBytes)(centralDirectory.length, 2),
|
206
|
-
(0, index_js_1.toBytes)(centralDirSize, 4),
|
207
|
-
(0, index_js_1.toBytes)(centralDirOffset, 4),
|
208
|
-
(0, index_js_1.toBytes)(0, 2),
|
161
|
+
constants_js_1.END_OF_CENTRAL_DIR_SIG, // End of central directory signature
|
162
|
+
(0, index_js_1.toBytes)(0, 2), // Number of this disk
|
163
|
+
(0, index_js_1.toBytes)(0, 2), // Disk with start of central directory
|
164
|
+
(0, index_js_1.toBytes)(centralDirectory.length, 2), // Total entries on this disk
|
165
|
+
(0, index_js_1.toBytes)(centralDirectory.length, 2), // Total entries overall
|
166
|
+
(0, index_js_1.toBytes)(centralDirSize, 4), // Size of central directory
|
167
|
+
(0, index_js_1.toBytes)(centralDirOffset, 4), // Offset of start of central directory
|
168
|
+
(0, index_js_1.toBytes)(0, 2), // ZIP file comment length
|
209
169
|
]);
|
210
170
|
await new Promise((resolve, reject) => {
|
211
171
|
output.write(endRecord, err => err ? reject(err) : resolve());
|