@gefyra/diffyr6-cli 1.0.0 → 1.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -21
- package/README.md +449 -447
- package/config/README.md +27 -27
- package/config/default-rules.json +135 -135
- package/config/resources-r4-not-in-r6.json +42 -42
- package/package.json +54 -54
- package/src/cli.js +94 -92
- package/src/compare-profiles.js +386 -386
- package/src/config.js +153 -147
- package/src/generate-fsh.js +457 -457
- package/src/index.js +462 -394
- package/src/rules-engine.js +642 -642
- package/src/upgrade-sushi.js +553 -553
- package/src/utils/fs.js +38 -38
- package/src/utils/html.js +28 -28
- package/src/utils/process.js +101 -101
- package/src/utils/removed-resources.js +135 -135
- package/src/utils/sushi-log.js +46 -46
- package/src/utils/validator.js +103 -103
- package/src/utils/zip.js +112 -0
package/src/utils/sushi-log.js
CHANGED
|
@@ -1,46 +1,46 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Parses SUSHI log files and returns a list of error entries with file/line information
|
|
3
|
-
*/
|
|
4
|
-
export function parseSushiLog(logContent) {
|
|
5
|
-
const entries = [];
|
|
6
|
-
const lines = logContent.split(/\r?\n/);
|
|
7
|
-
let index = 0;
|
|
8
|
-
|
|
9
|
-
while (index < lines.length) {
|
|
10
|
-
const line = lines[index];
|
|
11
|
-
const errorMatch = line.match(/^\s*error\s+(.*)$/i);
|
|
12
|
-
if (!errorMatch) {
|
|
13
|
-
index += 1;
|
|
14
|
-
continue;
|
|
15
|
-
}
|
|
16
|
-
|
|
17
|
-
const entry = { message: errorMatch[1], file: null, line: null, endLine: null };
|
|
18
|
-
index += 1;
|
|
19
|
-
|
|
20
|
-
while (index < lines.length) {
|
|
21
|
-
const detailMatch = lines[index].match(/^\s{2,}(.*)$/);
|
|
22
|
-
if (!detailMatch) {
|
|
23
|
-
break;
|
|
24
|
-
}
|
|
25
|
-
const detail = detailMatch[1].trim();
|
|
26
|
-
const fileMatch = detail.match(/^File:\s*(.+)$/i);
|
|
27
|
-
if (fileMatch) {
|
|
28
|
-
entry.file = fileMatch[1];
|
|
29
|
-
}
|
|
30
|
-
// Parse single line (e.g., "Line: 122") or line range (e.g., "Line: 122 - 124")
|
|
31
|
-
const lineRangeMatch = detail.match(/^Line:\s*(\d+)\s*-\s*(\d+)$/i);
|
|
32
|
-
const lineMatch = detail.match(/^Line:\s*(\d+)$/i);
|
|
33
|
-
if (lineRangeMatch) {
|
|
34
|
-
entry.line = Number.parseInt(lineRangeMatch[1], 10);
|
|
35
|
-
entry.endLine = Number.parseInt(lineRangeMatch[2], 10);
|
|
36
|
-
} else if (lineMatch) {
|
|
37
|
-
entry.line = Number.parseInt(lineMatch[1], 10);
|
|
38
|
-
}
|
|
39
|
-
index += 1;
|
|
40
|
-
}
|
|
41
|
-
|
|
42
|
-
entries.push(entry);
|
|
43
|
-
}
|
|
44
|
-
|
|
45
|
-
return entries;
|
|
46
|
-
}
|
|
1
|
+
/**
|
|
2
|
+
* Parses SUSHI log files and returns a list of error entries with file/line information
|
|
3
|
+
*/
|
|
4
|
+
export function parseSushiLog(logContent) {
|
|
5
|
+
const entries = [];
|
|
6
|
+
const lines = logContent.split(/\r?\n/);
|
|
7
|
+
let index = 0;
|
|
8
|
+
|
|
9
|
+
while (index < lines.length) {
|
|
10
|
+
const line = lines[index];
|
|
11
|
+
const errorMatch = line.match(/^\s*error\s+(.*)$/i);
|
|
12
|
+
if (!errorMatch) {
|
|
13
|
+
index += 1;
|
|
14
|
+
continue;
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
const entry = { message: errorMatch[1], file: null, line: null, endLine: null };
|
|
18
|
+
index += 1;
|
|
19
|
+
|
|
20
|
+
while (index < lines.length) {
|
|
21
|
+
const detailMatch = lines[index].match(/^\s{2,}(.*)$/);
|
|
22
|
+
if (!detailMatch) {
|
|
23
|
+
break;
|
|
24
|
+
}
|
|
25
|
+
const detail = detailMatch[1].trim();
|
|
26
|
+
const fileMatch = detail.match(/^File:\s*(.+)$/i);
|
|
27
|
+
if (fileMatch) {
|
|
28
|
+
entry.file = fileMatch[1];
|
|
29
|
+
}
|
|
30
|
+
// Parse single line (e.g., "Line: 122") or line range (e.g., "Line: 122 - 124")
|
|
31
|
+
const lineRangeMatch = detail.match(/^Line:\s*(\d+)\s*-\s*(\d+)$/i);
|
|
32
|
+
const lineMatch = detail.match(/^Line:\s*(\d+)$/i);
|
|
33
|
+
if (lineRangeMatch) {
|
|
34
|
+
entry.line = Number.parseInt(lineRangeMatch[1], 10);
|
|
35
|
+
entry.endLine = Number.parseInt(lineRangeMatch[2], 10);
|
|
36
|
+
} else if (lineMatch) {
|
|
37
|
+
entry.line = Number.parseInt(lineMatch[1], 10);
|
|
38
|
+
}
|
|
39
|
+
index += 1;
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
entries.push(entry);
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
return entries;
|
|
46
|
+
}
|
package/src/utils/validator.js
CHANGED
|
@@ -1,103 +1,103 @@
|
|
|
1
|
-
import fsp from 'fs/promises';
|
|
2
|
-
import fs from 'fs';
|
|
3
|
-
import https from 'https';
|
|
4
|
-
import path from 'path';
|
|
5
|
-
import { fileExists } from './fs.js';
|
|
6
|
-
import { createAnimator } from './process.js';
|
|
7
|
-
|
|
8
|
-
const VALIDATOR_DOWNLOAD_URL = 'https://github.com/hapifhir/org.hl7.fhir.core/releases/download/6.7.10/validator_cli.jar';
|
|
9
|
-
const DEFAULT_VALIDATOR_FILENAME = 'validator_cli.jar';
|
|
10
|
-
|
|
11
|
-
/**
|
|
12
|
-
* Ensures the validator JAR exists, downloading it if necessary
|
|
13
|
-
* @param {string|null} jarPath - Path to validator JAR or null for auto-download
|
|
14
|
-
* @param {string} workdir - Working directory where to download the JAR
|
|
15
|
-
* @returns {Promise<string>} Path to the validator JAR
|
|
16
|
-
*/
|
|
17
|
-
export async function ensureValidator(jarPath, workdir) {
|
|
18
|
-
// If jarPath is explicitly provided, verify it exists
|
|
19
|
-
if (jarPath) {
|
|
20
|
-
const resolvedPath = path.isAbsolute(jarPath) ? jarPath : path.resolve(workdir, jarPath);
|
|
21
|
-
if (await fileExists(resolvedPath)) {
|
|
22
|
-
return resolvedPath;
|
|
23
|
-
}
|
|
24
|
-
throw new Error(`Validator JAR not found at specified path: ${resolvedPath}`);
|
|
25
|
-
}
|
|
26
|
-
|
|
27
|
-
// Auto-download: check default location in workdir
|
|
28
|
-
const defaultPath = path.resolve(workdir, DEFAULT_VALIDATOR_FILENAME);
|
|
29
|
-
if (await fileExists(defaultPath)) {
|
|
30
|
-
console.log(` Using existing validator: ${defaultPath}`);
|
|
31
|
-
return defaultPath;
|
|
32
|
-
}
|
|
33
|
-
|
|
34
|
-
// Download validator
|
|
35
|
-
console.log(' Validator not found, downloading latest version...');
|
|
36
|
-
await downloadValidator(defaultPath);
|
|
37
|
-
console.log(` Downloaded validator to: ${defaultPath}`);
|
|
38
|
-
return defaultPath;
|
|
39
|
-
}
|
|
40
|
-
|
|
41
|
-
/**
|
|
42
|
-
* Downloads the validator JAR from GitHub releases
|
|
43
|
-
*/
|
|
44
|
-
async function downloadValidator(targetPath) {
|
|
45
|
-
await fsp.mkdir(path.dirname(targetPath), { recursive: true });
|
|
46
|
-
|
|
47
|
-
const animator = createAnimator('Downloading HL7 FHIR Validator...');
|
|
48
|
-
animator.start();
|
|
49
|
-
|
|
50
|
-
try {
|
|
51
|
-
await downloadFile(VALIDATOR_DOWNLOAD_URL, targetPath);
|
|
52
|
-
} finally {
|
|
53
|
-
animator.stop();
|
|
54
|
-
}
|
|
55
|
-
}
|
|
56
|
-
|
|
57
|
-
/**
|
|
58
|
-
* Downloads a file from a URL with redirect following
|
|
59
|
-
*/
|
|
60
|
-
async function downloadFile(url, targetPath, maxRedirects = 5) {
|
|
61
|
-
if (maxRedirects <= 0) {
|
|
62
|
-
throw new Error('Too many redirects while downloading validator');
|
|
63
|
-
}
|
|
64
|
-
|
|
65
|
-
return new Promise((resolve, reject) => {
|
|
66
|
-
https.get(url, (response) => {
|
|
67
|
-
// Handle redirects
|
|
68
|
-
if (response.statusCode === 301 || response.statusCode === 302 || response.statusCode === 307 || response.statusCode === 308) {
|
|
69
|
-
const redirectUrl = response.headers.location;
|
|
70
|
-
if (!redirectUrl) {
|
|
71
|
-
reject(new Error('Redirect without location header'));
|
|
72
|
-
return;
|
|
73
|
-
}
|
|
74
|
-
downloadFile(redirectUrl, targetPath, maxRedirects - 1)
|
|
75
|
-
.then(resolve)
|
|
76
|
-
.catch(reject);
|
|
77
|
-
return;
|
|
78
|
-
}
|
|
79
|
-
|
|
80
|
-
// Handle errors
|
|
81
|
-
if (response.statusCode !== 200) {
|
|
82
|
-
reject(new Error(`Failed to download validator: HTTP ${response.statusCode}`));
|
|
83
|
-
return;
|
|
84
|
-
}
|
|
85
|
-
|
|
86
|
-
// Write to file
|
|
87
|
-
const fileStream = fs.createWriteStream(targetPath);
|
|
88
|
-
response.pipe(fileStream);
|
|
89
|
-
|
|
90
|
-
fileStream.on('finish', () => {
|
|
91
|
-
fileStream.close();
|
|
92
|
-
resolve();
|
|
93
|
-
});
|
|
94
|
-
|
|
95
|
-
fileStream.on('error', (err) => {
|
|
96
|
-
fsp.unlink(targetPath).catch(() => {});
|
|
97
|
-
reject(err);
|
|
98
|
-
});
|
|
99
|
-
}).on('error', (err) => {
|
|
100
|
-
reject(new Error(`Network error while downloading validator: ${err.message}`));
|
|
101
|
-
});
|
|
102
|
-
});
|
|
103
|
-
}
|
|
1
|
+
import fsp from 'fs/promises';
|
|
2
|
+
import fs from 'fs';
|
|
3
|
+
import https from 'https';
|
|
4
|
+
import path from 'path';
|
|
5
|
+
import { fileExists } from './fs.js';
|
|
6
|
+
import { createAnimator } from './process.js';
|
|
7
|
+
|
|
8
|
+
const VALIDATOR_DOWNLOAD_URL = 'https://github.com/hapifhir/org.hl7.fhir.core/releases/download/6.7.10/validator_cli.jar';
|
|
9
|
+
const DEFAULT_VALIDATOR_FILENAME = 'validator_cli.jar';
|
|
10
|
+
|
|
11
|
+
/**
|
|
12
|
+
* Ensures the validator JAR exists, downloading it if necessary
|
|
13
|
+
* @param {string|null} jarPath - Path to validator JAR or null for auto-download
|
|
14
|
+
* @param {string} workdir - Working directory where to download the JAR
|
|
15
|
+
* @returns {Promise<string>} Path to the validator JAR
|
|
16
|
+
*/
|
|
17
|
+
export async function ensureValidator(jarPath, workdir) {
|
|
18
|
+
// If jarPath is explicitly provided, verify it exists
|
|
19
|
+
if (jarPath) {
|
|
20
|
+
const resolvedPath = path.isAbsolute(jarPath) ? jarPath : path.resolve(workdir, jarPath);
|
|
21
|
+
if (await fileExists(resolvedPath)) {
|
|
22
|
+
return resolvedPath;
|
|
23
|
+
}
|
|
24
|
+
throw new Error(`Validator JAR not found at specified path: ${resolvedPath}`);
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
// Auto-download: check default location in workdir
|
|
28
|
+
const defaultPath = path.resolve(workdir, DEFAULT_VALIDATOR_FILENAME);
|
|
29
|
+
if (await fileExists(defaultPath)) {
|
|
30
|
+
console.log(` Using existing validator: ${defaultPath}`);
|
|
31
|
+
return defaultPath;
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
// Download validator
|
|
35
|
+
console.log(' Validator not found, downloading latest version...');
|
|
36
|
+
await downloadValidator(defaultPath);
|
|
37
|
+
console.log(` Downloaded validator to: ${defaultPath}`);
|
|
38
|
+
return defaultPath;
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
/**
|
|
42
|
+
* Downloads the validator JAR from GitHub releases
|
|
43
|
+
*/
|
|
44
|
+
async function downloadValidator(targetPath) {
|
|
45
|
+
await fsp.mkdir(path.dirname(targetPath), { recursive: true });
|
|
46
|
+
|
|
47
|
+
const animator = createAnimator('Downloading HL7 FHIR Validator...');
|
|
48
|
+
animator.start();
|
|
49
|
+
|
|
50
|
+
try {
|
|
51
|
+
await downloadFile(VALIDATOR_DOWNLOAD_URL, targetPath);
|
|
52
|
+
} finally {
|
|
53
|
+
animator.stop();
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
/**
|
|
58
|
+
* Downloads a file from a URL with redirect following
|
|
59
|
+
*/
|
|
60
|
+
async function downloadFile(url, targetPath, maxRedirects = 5) {
|
|
61
|
+
if (maxRedirects <= 0) {
|
|
62
|
+
throw new Error('Too many redirects while downloading validator');
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
return new Promise((resolve, reject) => {
|
|
66
|
+
https.get(url, (response) => {
|
|
67
|
+
// Handle redirects
|
|
68
|
+
if (response.statusCode === 301 || response.statusCode === 302 || response.statusCode === 307 || response.statusCode === 308) {
|
|
69
|
+
const redirectUrl = response.headers.location;
|
|
70
|
+
if (!redirectUrl) {
|
|
71
|
+
reject(new Error('Redirect without location header'));
|
|
72
|
+
return;
|
|
73
|
+
}
|
|
74
|
+
downloadFile(redirectUrl, targetPath, maxRedirects - 1)
|
|
75
|
+
.then(resolve)
|
|
76
|
+
.catch(reject);
|
|
77
|
+
return;
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
// Handle errors
|
|
81
|
+
if (response.statusCode !== 200) {
|
|
82
|
+
reject(new Error(`Failed to download validator: HTTP ${response.statusCode}`));
|
|
83
|
+
return;
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
// Write to file
|
|
87
|
+
const fileStream = fs.createWriteStream(targetPath);
|
|
88
|
+
response.pipe(fileStream);
|
|
89
|
+
|
|
90
|
+
fileStream.on('finish', () => {
|
|
91
|
+
fileStream.close();
|
|
92
|
+
resolve();
|
|
93
|
+
});
|
|
94
|
+
|
|
95
|
+
fileStream.on('error', (err) => {
|
|
96
|
+
fsp.unlink(targetPath).catch(() => {});
|
|
97
|
+
reject(err);
|
|
98
|
+
});
|
|
99
|
+
}).on('error', (err) => {
|
|
100
|
+
reject(new Error(`Network error while downloading validator: ${err.message}`));
|
|
101
|
+
});
|
|
102
|
+
});
|
|
103
|
+
}
|
package/src/utils/zip.js
ADDED
|
@@ -0,0 +1,112 @@
|
|
|
1
|
+
import fsp from 'fs/promises';
|
|
2
|
+
|
|
3
|
+
const CRC32_TABLE = (() => {
|
|
4
|
+
const table = new Uint32Array(256);
|
|
5
|
+
for (let i = 0; i < 256; i += 1) {
|
|
6
|
+
let c = i;
|
|
7
|
+
for (let k = 0; k < 8; k += 1) {
|
|
8
|
+
c = (c & 1) ? (0xEDB88320 ^ (c >>> 1)) : (c >>> 1);
|
|
9
|
+
}
|
|
10
|
+
table[i] = c >>> 0;
|
|
11
|
+
}
|
|
12
|
+
return table;
|
|
13
|
+
})();
|
|
14
|
+
|
|
15
|
+
function crc32(buffer) {
|
|
16
|
+
let crc = 0xFFFFFFFF;
|
|
17
|
+
for (let i = 0; i < buffer.length; i += 1) {
|
|
18
|
+
const byte = buffer[i];
|
|
19
|
+
crc = (crc >>> 8) ^ CRC32_TABLE[(crc ^ byte) & 0xFF];
|
|
20
|
+
}
|
|
21
|
+
return (crc ^ 0xFFFFFFFF) >>> 0;
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
function toDosDateTime(date) {
|
|
25
|
+
const d = date instanceof Date ? date : new Date(date);
|
|
26
|
+
let year = d.getFullYear();
|
|
27
|
+
if (year < 1980) {
|
|
28
|
+
year = 1980;
|
|
29
|
+
}
|
|
30
|
+
const month = d.getMonth() + 1;
|
|
31
|
+
const day = d.getDate();
|
|
32
|
+
const hours = d.getHours();
|
|
33
|
+
const minutes = d.getMinutes();
|
|
34
|
+
const seconds = Math.floor(d.getSeconds() / 2);
|
|
35
|
+
const dosTime = (hours << 11) | (minutes << 5) | seconds;
|
|
36
|
+
const dosDate = ((year - 1980) << 9) | (month << 5) | day;
|
|
37
|
+
return { dosTime, dosDate };
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
export async function createZip(outputPath, entries) {
|
|
41
|
+
const fileParts = [];
|
|
42
|
+
const centralParts = [];
|
|
43
|
+
let offset = 0;
|
|
44
|
+
|
|
45
|
+
for (const entry of entries) {
|
|
46
|
+
const name = entry.name.replace(/\\/g, '/');
|
|
47
|
+
const nameBuffer = Buffer.from(name, 'utf8');
|
|
48
|
+
const dataBuffer = Buffer.isBuffer(entry.data)
|
|
49
|
+
? entry.data
|
|
50
|
+
: Buffer.from(entry.data, 'utf8');
|
|
51
|
+
const { dosTime, dosDate } = toDosDateTime(entry.mtime || new Date());
|
|
52
|
+
const crc = crc32(dataBuffer);
|
|
53
|
+
const size = dataBuffer.length;
|
|
54
|
+
|
|
55
|
+
const localHeader = Buffer.alloc(30 + nameBuffer.length);
|
|
56
|
+
let p = 0;
|
|
57
|
+
localHeader.writeUInt32LE(0x04034b50, p); p += 4; // Local file header signature
|
|
58
|
+
localHeader.writeUInt16LE(20, p); p += 2; // Version needed
|
|
59
|
+
localHeader.writeUInt16LE(0, p); p += 2; // Flags
|
|
60
|
+
localHeader.writeUInt16LE(0, p); p += 2; // Compression (store)
|
|
61
|
+
localHeader.writeUInt16LE(dosTime, p); p += 2;
|
|
62
|
+
localHeader.writeUInt16LE(dosDate, p); p += 2;
|
|
63
|
+
localHeader.writeUInt32LE(crc, p); p += 4;
|
|
64
|
+
localHeader.writeUInt32LE(size, p); p += 4;
|
|
65
|
+
localHeader.writeUInt32LE(size, p); p += 4;
|
|
66
|
+
localHeader.writeUInt16LE(nameBuffer.length, p); p += 2;
|
|
67
|
+
localHeader.writeUInt16LE(0, p); p += 2; // Extra length
|
|
68
|
+
nameBuffer.copy(localHeader, p);
|
|
69
|
+
|
|
70
|
+
fileParts.push(localHeader, dataBuffer);
|
|
71
|
+
|
|
72
|
+
const centralHeader = Buffer.alloc(46 + nameBuffer.length);
|
|
73
|
+
p = 0;
|
|
74
|
+
centralHeader.writeUInt32LE(0x02014b50, p); p += 4; // Central dir signature
|
|
75
|
+
centralHeader.writeUInt16LE(20, p); p += 2; // Version made by
|
|
76
|
+
centralHeader.writeUInt16LE(20, p); p += 2; // Version needed
|
|
77
|
+
centralHeader.writeUInt16LE(0, p); p += 2; // Flags
|
|
78
|
+
centralHeader.writeUInt16LE(0, p); p += 2; // Compression
|
|
79
|
+
centralHeader.writeUInt16LE(dosTime, p); p += 2;
|
|
80
|
+
centralHeader.writeUInt16LE(dosDate, p); p += 2;
|
|
81
|
+
centralHeader.writeUInt32LE(crc, p); p += 4;
|
|
82
|
+
centralHeader.writeUInt32LE(size, p); p += 4;
|
|
83
|
+
centralHeader.writeUInt32LE(size, p); p += 4;
|
|
84
|
+
centralHeader.writeUInt16LE(nameBuffer.length, p); p += 2;
|
|
85
|
+
centralHeader.writeUInt16LE(0, p); p += 2; // Extra length
|
|
86
|
+
centralHeader.writeUInt16LE(0, p); p += 2; // Comment length
|
|
87
|
+
centralHeader.writeUInt16LE(0, p); p += 2; // Disk number
|
|
88
|
+
centralHeader.writeUInt16LE(0, p); p += 2; // Internal attributes
|
|
89
|
+
centralHeader.writeUInt32LE(0, p); p += 4; // External attributes
|
|
90
|
+
centralHeader.writeUInt32LE(offset, p); p += 4; // Local header offset
|
|
91
|
+
nameBuffer.copy(centralHeader, p);
|
|
92
|
+
|
|
93
|
+
centralParts.push(centralHeader);
|
|
94
|
+
offset += localHeader.length + dataBuffer.length;
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
const centralDirSize = centralParts.reduce((sum, buf) => sum + buf.length, 0);
|
|
98
|
+
const centralDirOffset = offset;
|
|
99
|
+
const endRecord = Buffer.alloc(22);
|
|
100
|
+
let e = 0;
|
|
101
|
+
endRecord.writeUInt32LE(0x06054b50, e); e += 4; // End of central dir signature
|
|
102
|
+
endRecord.writeUInt16LE(0, e); e += 2; // Disk number
|
|
103
|
+
endRecord.writeUInt16LE(0, e); e += 2; // Central dir start disk
|
|
104
|
+
endRecord.writeUInt16LE(entries.length, e); e += 2; // Entries on disk
|
|
105
|
+
endRecord.writeUInt16LE(entries.length, e); e += 2; // Total entries
|
|
106
|
+
endRecord.writeUInt32LE(centralDirSize, e); e += 4;
|
|
107
|
+
endRecord.writeUInt32LE(centralDirOffset, e); e += 4;
|
|
108
|
+
endRecord.writeUInt16LE(0, e); e += 2; // Comment length
|
|
109
|
+
|
|
110
|
+
const buffer = Buffer.concat([...fileParts, ...centralParts, endRecord]);
|
|
111
|
+
await fsp.writeFile(outputPath, buffer);
|
|
112
|
+
}
|