@gefyra/diffyr6-cli 1.0.0 → 1.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,553 +1,553 @@
1
- import fsp from 'fs/promises';
2
- import path from 'path';
3
- import os from 'os';
4
- import { createAnimator, spawnProcess } from './utils/process.js';
5
- import { fileExists, pathExists } from './utils/fs.js';
6
- import { parseSushiLog } from './utils/sushi-log.js';
7
-
8
- const SOURCE_VERSION = '4.0.1';
9
- const TARGET_VERSION = '6.0.0-ballot3';
10
- const MAX_ITERATIONS = 25;
11
- const SNOMED_CT_ERROR_TEXT = 'Resolved value "SNOMED_CT" is not a valid URI';
12
-
13
- /**
14
- * Upgrades a SUSHI project to FHIR R6
15
- */
16
- export async function upgradeSushiToR6(sourceDir, sushiExecutable = 'sushi -s') {
17
- await ensureDirectory(sourceDir);
18
- const workingDir = await createR6Workspace(sourceDir);
19
-
20
- const configs = await findSushiConfigs(workingDir);
21
- if (configs.length === 0) {
22
- throw new Error(`No sushi-config.yaml found in ${workingDir}`);
23
- }
24
-
25
- await Promise.all(
26
- configs.map(async (configPath) => {
27
- const updated = await updateFhirVersion(configPath, SOURCE_VERSION, TARGET_VERSION);
28
- if (updated) {
29
- console.log(` Updated ${path.basename(configPath)} to ${TARGET_VERSION}`);
30
- }
31
- })
32
- );
33
-
34
- await renameProfilesWithSuffix(workingDir);
35
- await runSushiUntilSuccess(workingDir, sushiExecutable);
36
-
37
- return workingDir;
38
- }
39
-
40
- async function ensureDirectory(dir) {
41
- const stat = await fsp.stat(dir).catch(() => null);
42
- if (!stat || !stat.isDirectory()) {
43
- throw new Error(`Path is not a directory: ${dir}`);
44
- }
45
- }
46
-
47
- async function createR6Workspace(sourceDir) {
48
- const targetDir = deriveR6Path(sourceDir);
49
- const exists = await pathExists(targetDir);
50
- if (exists) {
51
- throw new Error(`Target directory already exists: ${targetDir}`);
52
- }
53
- console.log(` Copying ${sourceDir} to ${targetDir}...`);
54
- await fsp.cp(sourceDir, targetDir, { recursive: true });
55
- return targetDir;
56
- }
57
-
58
- function deriveR6Path(sourceDir) {
59
- const parent = path.dirname(sourceDir);
60
- const base = path.basename(sourceDir);
61
- const hasR4Suffix = base.toLowerCase().endsWith('r4');
62
- const trimmed = hasR4Suffix ? base.slice(0, -2) : base;
63
- return path.join(parent, `${trimmed}R6`);
64
- }
65
-
66
- async function findSushiConfigs(rootDir) {
67
- const entries = await fsp.readdir(rootDir, { withFileTypes: true });
68
- const targets = [];
69
- for (const entry of entries) {
70
- if (!entry.isFile()) {
71
- continue;
72
- }
73
- const lower = entry.name.toLowerCase();
74
- if (lower === 'sushi-config.yaml' || lower === 'sushi-config.yml') {
75
- targets.push(path.join(rootDir, entry.name));
76
- }
77
- }
78
- return targets;
79
- }
80
-
81
- async function updateFhirVersion(filePath, fromVersion, toVersion) {
82
- const original = await fsp.readFile(filePath, 'utf8');
83
- const updated = replaceFhirVersion(original, fromVersion, toVersion);
84
- if (updated === original) {
85
- return false;
86
- }
87
- await fsp.writeFile(filePath, updated, 'utf8');
88
- return true;
89
- }
90
-
91
- function replaceFhirVersion(content, fromVersion, toVersion) {
92
- const regex = new RegExp(
93
- `(fhirVersion\\s*:\\s*)(["']?)${escapeRegExp(fromVersion)}\\2`,
94
- 'i'
95
- );
96
- return content.replace(regex, (_, prefix, quote) => {
97
- const q = quote || '';
98
- return `${prefix}${q}${toVersion}${q}`;
99
- });
100
- }
101
-
102
- function escapeRegExp(value) {
103
- return value.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
104
- }
105
-
106
- async function renameProfilesWithSuffix(rootDir) {
107
- const fshFiles = await collectFshFiles(rootDir);
108
- if (fshFiles.length === 0) {
109
- console.warn(' No FSH files found, skipping profile renaming');
110
- return;
111
- }
112
- const renameMap = await buildProfileRenameMap(fshFiles);
113
- if (renameMap.size === 0) {
114
- console.log(' No profiles without R6 suffix found');
115
- return;
116
- }
117
- const changedFiles = await applyProfileRenames(fshFiles, renameMap);
118
- console.log(` Renamed ${renameMap.size} profile(s) in ${changedFiles} file(s)`);
119
- }
120
-
121
- async function collectFshFiles(dir) {
122
- const files = [];
123
- async function walk(current) {
124
- const entries = await fsp.readdir(current, { withFileTypes: true }).catch(() => []);
125
- for (const entry of entries) {
126
- const entryPath = path.join(current, entry.name);
127
- if (entry.isDirectory()) {
128
- await walk(entryPath);
129
- } else if (entry.isFile() && entry.name.toLowerCase().endsWith('.fsh')) {
130
- files.push(entryPath);
131
- }
132
- }
133
- }
134
- await walk(dir);
135
- return files;
136
- }
137
-
138
- async function buildProfileRenameMap(files) {
139
- const renameMap = new Map();
140
- const profileRegex = /^Profile:\s*(\S+)/;
141
- for (const file of files) {
142
- const content = await fsp.readFile(file, 'utf8');
143
- const lines = content.split(/\r?\n/);
144
- for (const line of lines) {
145
- const match = line.match(profileRegex);
146
- if (!match) {
147
- continue;
148
- }
149
- const name = match[1];
150
- if (name.endsWith('R6') || renameMap.has(name)) {
151
- continue;
152
- }
153
- renameMap.set(name, `${name}R6`);
154
- }
155
- }
156
- return renameMap;
157
- }
158
-
159
- async function applyProfileRenames(files, renameMap) {
160
- if (renameMap.size === 0) {
161
- return 0;
162
- }
163
- const patterns = [...renameMap.entries()].map(([oldName, newName]) => ({
164
- newName,
165
- regex: createProfileNameRegex(oldName),
166
- }));
167
-
168
- let changedFiles = 0;
169
- for (const file of files) {
170
- const original = await fsp.readFile(file, 'utf8');
171
- let updated = original;
172
-
173
- for (const { newName, regex } of patterns) {
174
- updated = updated.replace(regex, (_, prefix = '') => `${prefix || ''}${newName}`);
175
- }
176
-
177
- for (const [oldName, newName] of renameMap.entries()) {
178
- const oldIdPart = camelCaseToKebabCase(oldName);
179
- const newIdPart = camelCaseToKebabCase(newName);
180
-
181
- const urlPattern = new RegExp(
182
- `(\\^url\\s*=\\s*["\'])([^"']*\\/)${escapeRegExp(oldIdPart)}(["\'])`,
183
- 'g'
184
- );
185
- updated = updated.replace(urlPattern, (_, prefix, urlPrefix, suffix) => {
186
- return `${prefix}${urlPrefix}${newIdPart}${suffix}`;
187
- });
188
-
189
- const idPattern = new RegExp(
190
- `(\\^id\\s*=\\s*["\'])${escapeRegExp(oldIdPart)}(["\'])`,
191
- 'g'
192
- );
193
- updated = updated.replace(idPattern, (_, prefix, suffix) => {
194
- return `${prefix}${newIdPart}${suffix}`;
195
- });
196
- }
197
-
198
- if (updated !== original) {
199
- await fsp.writeFile(file, updated, 'utf8');
200
- changedFiles += 1;
201
- }
202
- }
203
- return changedFiles;
204
- }
205
-
206
- function camelCaseToKebabCase(str) {
207
- return str
208
- .replace(/([a-z])([A-Z])/g, '$1-$2')
209
- .toLowerCase();
210
- }
211
-
212
- function createProfileNameRegex(name) {
213
- const prefix = '(^|[^A-Za-z0-9._-])';
214
- const suffix = '(?=[^A-Za-z0-9._-]|$)';
215
- const pattern = `${prefix}(${escapeRegExp(name)})${suffix}`;
216
- return new RegExp(pattern, 'gm');
217
- }
218
-
219
- async function runSushiUntilSuccess(targetDir, sushiExecutable) {
220
- let iteration = 0;
221
- while (iteration < MAX_ITERATIONS) {
222
- iteration += 1;
223
- console.log(` SUSHI iteration ${iteration}...`);
224
- const { stdout, stderr, exitCode } = await runSushi(sushiExecutable, targetDir);
225
- const combinedLog = [stdout, stderr].filter(Boolean).join('\n');
226
- const logPath = path.join(targetDir, `sushi-upgrade-${iteration}.log`);
227
- await fsp.writeFile(logPath, combinedLog, 'utf8');
228
- if (exitCode === 0) {
229
- console.log(` SUSHI completed successfully (iteration ${iteration})`);
230
- return;
231
- }
232
- console.warn(` SUSHI exited with code ${exitCode}, analyzing errors...`);
233
-
234
- const errors = parseSushiLog(combinedLog);
235
- const snomedFix = await fixSnomedCtIssues(errors, targetDir);
236
- if (snomedFix) {
237
- console.log(' Applied SNOMED_CT fix, re-running...');
238
- continue;
239
- }
240
- const cardinalityFix = await fixMinCardinalityInstanceErrors(errors, targetDir);
241
- if (cardinalityFix) {
242
- console.log(` Disabled ${cardinalityFix} instance file(s), re-running...`);
243
- continue;
244
- }
245
- const modifications = await commentErrorLines(errors, targetDir);
246
- if (modifications === 0) {
247
- throw new Error('SUSHI failed but no lines could be commented out');
248
- }
249
- console.log(` Commented out lines in ${modifications} file(s)`);
250
- }
251
- throw new Error(`SUSHI failed after ${MAX_ITERATIONS} attempts`);
252
- }
253
-
254
- async function runSushi(executable, targetDir) {
255
- const animator = createAnimator('SUSHI working...');
256
- animator.start();
257
- try {
258
- const parts = executable.trim().split(/\s+/);
259
- const command = parts[0];
260
- const args = [...parts.slice(1), targetDir];
261
-
262
- return await spawnProcess(command, args, process.cwd());
263
- } finally {
264
- animator.stop();
265
- }
266
- }
267
-
268
- async function commentErrorLines(logEntries, workingDir) {
269
- const grouped = new Map();
270
-
271
- for (const entry of logEntries) {
272
- const normalizedPath = normalizeLogPath(entry.file, workingDir);
273
- if (!normalizedPath || typeof entry.line !== 'number') {
274
- continue;
275
- }
276
- if (!grouped.has(normalizedPath)) {
277
- grouped.set(normalizedPath, new Set());
278
- }
279
- const startLine = entry.line;
280
- const endLine = entry.endLine || entry.line;
281
- for (let ln = startLine; ln <= endLine; ln++) {
282
- grouped.get(normalizedPath).add(ln);
283
- }
284
- }
285
-
286
- let modifiedFiles = 0;
287
- for (const [filePath, lines] of grouped.entries()) {
288
- const updated = await commentLinesInFile(filePath, [...lines]);
289
- if (updated) {
290
- modifiedFiles += 1;
291
- }
292
- }
293
- return modifiedFiles;
294
- }
295
-
296
- function normalizeLogPath(filePath, workingDir) {
297
- if (!filePath) {
298
- return null;
299
- }
300
- let candidate = filePath.trim();
301
- if (!candidate) {
302
- return null;
303
- }
304
-
305
- if (/^[A-Za-z]:[\\/]/.test(candidate)) {
306
- if (process.platform === 'win32') {
307
- candidate = candidate.replace(/\//g, '\\');
308
- } else {
309
- const drive = candidate[0].toLowerCase();
310
- const rest = candidate.slice(2).replace(/\\/g, '/').replace(/^\/+/, '');
311
- candidate = `/mnt/${drive}/${rest}`;
312
- }
313
- } else if (process.platform === 'win32') {
314
- candidate = candidate.replace(/\//g, '\\');
315
- } else {
316
- candidate = candidate.replace(/\\/g, '/');
317
- }
318
-
319
- if (!path.isAbsolute(candidate)) {
320
- candidate = path.join(workingDir, candidate);
321
- }
322
-
323
- return candidate;
324
- }
325
-
326
- async function commentLinesInFile(filePath, lineNumbers) {
327
- const stat = await fsp.stat(filePath).catch(() => null);
328
- if (!stat || !stat.isFile()) {
329
- console.warn(`Could not find file: ${filePath}`);
330
- return false;
331
- }
332
-
333
- const original = await fsp.readFile(filePath, 'utf8');
334
- const newline = original.includes('\r\n') ? '\r\n' : '\n';
335
- const lines = original.split(/\r?\n/);
336
- let changed = false;
337
-
338
- const expandedLines = expandContainsBlockLines(lines, lineNumbers);
339
-
340
- expandedLines
341
- .filter((n) => Number.isInteger(n) && n > 0 && n <= lines.length)
342
- .sort((a, b) => a - b)
343
- .forEach((lineNumber) => {
344
- const idx = lineNumber - 1;
345
- const current = lines[idx];
346
- if (current.trim().startsWith('// AUTO-DISABLED (SUSHI R6):')) {
347
- return;
348
- }
349
- const indent = current.match(/^\s*/)[0] || '';
350
- const content = current.trimStart();
351
- lines[idx] = `${indent}// AUTO-DISABLED (SUSHI R6): ${content}`;
352
- changed = true;
353
- });
354
-
355
- if (changed) {
356
- const updated = lines.join(newline);
357
- await fsp.writeFile(filePath, updated, 'utf8');
358
- }
359
-
360
- return changed;
361
- }
362
-
363
- function expandContainsBlockLines(lines, lineNumbers) {
364
- const expanded = new Set(lineNumbers);
365
-
366
- for (const lineNumber of lineNumbers) {
367
- const idx = lineNumber - 1;
368
- if (idx < 0 || idx >= lines.length) {
369
- continue;
370
- }
371
-
372
- const currentLine = lines[idx].trim();
373
-
374
- if (currentLine.includes(' contains') || currentLine.endsWith(' and')) {
375
- let nextIdx = idx + 1;
376
- while (nextIdx < lines.length) {
377
- const nextLine = lines[nextIdx].trim();
378
- if (nextLine.startsWith('// AUTO-DISABLED (SUSHI R6):')) {
379
- nextIdx++;
380
- continue;
381
- }
382
- if (nextLine === '') {
383
- nextIdx++;
384
- continue;
385
- }
386
- const isIndentedContinuation = lines[nextIdx].match(/^\s+/) &&
387
- (nextLine.match(/^\w+\s+\d/) || nextLine.endsWith(' and') || nextLine.endsWith('MS') || nextLine.endsWith('MS and'));
388
- const isSliceDefinition = nextLine.match(/^\w+\s+\d+\.\.(\d+|\*)\s*(MS)?\s*(and)?\s*$/);
389
-
390
- if (isIndentedContinuation || isSliceDefinition) {
391
- expanded.add(nextIdx + 1);
392
- nextIdx++;
393
- } else {
394
- break;
395
- }
396
- }
397
- }
398
-
399
- const softIndexMatch = currentLine.match(/^\*\s*(\S+)\[(\d+|\+)\]\./);
400
- if (softIndexMatch) {
401
- const elementPath = softIndexMatch[1];
402
- let nextIdx = idx + 1;
403
- while (nextIdx < lines.length) {
404
- const nextLine = lines[nextIdx].trim();
405
- if (nextLine.startsWith('// AUTO-DISABLED (SUSHI R6):')) {
406
- nextIdx++;
407
- continue;
408
- }
409
- if (nextLine === '') {
410
- nextIdx++;
411
- continue;
412
- }
413
- const eqPattern = new RegExp(`^\\*\\s*${escapeRegExp(elementPath)}\\[=\\]\\.`);
414
- if (eqPattern.test(nextLine)) {
415
- expanded.add(nextIdx + 1);
416
- nextIdx++;
417
- } else if (nextLine.match(new RegExp(`^\\*\\s*${escapeRegExp(elementPath)}\\[(\\d+|\\+)\\]\\.`))) {
418
- break;
419
- } else {
420
- break;
421
- }
422
- }
423
- }
424
- }
425
-
426
- return [...expanded];
427
- }
428
-
429
- async function fixSnomedCtIssues(logEntries, workingDir) {
430
- const hasSnomedError = logEntries.some(
431
- (entry) => typeof entry.message === 'string' && entry.message.includes(SNOMED_CT_ERROR_TEXT)
432
- );
433
- if (!hasSnomedError) {
434
- return false;
435
- }
436
- const replacements = await replacePlainSnomedReferences(workingDir);
437
- const aliasAdded = await ensureSnomedAliasDefinition(workingDir);
438
- if (replacements > 0 || aliasAdded) {
439
- console.log(` Fixed SNOMED_CT in ${replacements} file(s)${aliasAdded ? ' and added alias' : ''}`);
440
- return true;
441
- }
442
- return false;
443
- }
444
-
445
- async function replacePlainSnomedReferences(rootDir) {
446
- const files = await collectFshFiles(rootDir);
447
- let changedFiles = 0;
448
- const pattern = /(^|[^$A-Za-z0-9_])SNOMED_CT\b/gm;
449
- for (const file of files) {
450
- const original = await fsp.readFile(file, 'utf8');
451
- const updated = original.replace(pattern, (_, prefix) => `${prefix}$SNOMED_CT`);
452
- if (updated !== original) {
453
- await fsp.writeFile(file, updated, 'utf8');
454
- changedFiles += 1;
455
- }
456
- }
457
- return changedFiles;
458
- }
459
-
460
- async function ensureSnomedAliasDefinition(rootDir) {
461
- const { filePath, exists } = await resolveAliasFilePath(rootDir);
462
- let content = '';
463
- if (exists) {
464
- content = await fsp.readFile(filePath, 'utf8');
465
- if (/^\s*Alias:\s*\$SNOMED_CT\b/m.test(content)) {
466
- return false;
467
- }
468
- }
469
- await fsp.mkdir(path.dirname(filePath), { recursive: true });
470
- const aliasLine = 'Alias: $SNOMED_CT = http://snomed.info/sct';
471
- if (!exists || content.length === 0) {
472
- const newline = os.EOL || '\n';
473
- await fsp.writeFile(filePath, `${aliasLine}${newline}`, 'utf8');
474
- return true;
475
- }
476
- const newline = content.includes('\r\n') ? '\r\n' : '\n';
477
- const needsNewline = content.endsWith('\r') || content.endsWith('\n');
478
- const suffix = needsNewline ? '' : newline;
479
- const updated = `${content}${suffix}${aliasLine}${newline}`;
480
- await fsp.writeFile(filePath, updated, 'utf8');
481
- return true;
482
- }
483
-
484
- async function fixMinCardinalityInstanceErrors(logEntries, workingDir) {
485
- const candidates = logEntries.filter(
486
- (entry) =>
487
- entry &&
488
- typeof entry.message === 'string' &&
489
- entry.message.includes('minimum cardinality 1 but occurs 0 time(s).') &&
490
- typeof entry.file === 'string'
491
- );
492
- if (candidates.length === 0) {
493
- return 0;
494
- }
495
-
496
- let disabled = 0;
497
- for (const entry of candidates) {
498
- const sourcePath = normalizeLogPath(entry.file, workingDir);
499
- if (
500
- !sourcePath ||
501
- !sourcePath.endsWith('.fsh') ||
502
- !sourcePath.includes(`${path.sep}instances${path.sep}`)
503
- ) {
504
- continue;
505
- }
506
- const targetPath = sourcePath.slice(0, -4);
507
- const renamed = await disableInstanceFile(sourcePath, targetPath);
508
- if (renamed) {
509
- disabled += 1;
510
- }
511
- }
512
- return disabled;
513
- }
514
-
515
- async function disableInstanceFile(sourcePath, targetPath) {
516
- const stat = await fsp.stat(sourcePath).catch(() => null);
517
- if (!stat || !stat.isFile()) {
518
- return false;
519
- }
520
- const original = await fsp.readFile(sourcePath, 'utf8');
521
- const newline = original.includes('\r\n') ? '\r\n' : '\n';
522
- const lines = original.split(/\r?\n/);
523
- let renamedInstance = false;
524
- for (let i = 0; i < lines.length; i += 1) {
525
- const match = lines[i].match(/^(\s*Instance:\s*)(\S+)(.*)$/);
526
- if (match) {
527
- lines[i] = `${match[1]}${match[2]}-disabled${match[3] || ''}`;
528
- renamedInstance = true;
529
- break;
530
- }
531
- }
532
- if (renamedInstance) {
533
- const updated = lines.join(newline);
534
- await fsp.writeFile(sourcePath, updated, 'utf8');
535
- }
536
- await fsp.rename(sourcePath, targetPath).catch(() => null);
537
- return true;
538
- }
539
-
540
- async function resolveAliasFilePath(rootDir) {
541
- const candidates = [
542
- path.join(rootDir, 'input', 'fsh', 'aliases.fsh'),
543
- path.join(rootDir, 'input', 'fsh', 'alias.fsh'),
544
- path.join(rootDir, 'aliases.fsh'),
545
- path.join(rootDir, 'alias.fsh'),
546
- ];
547
- for (const candidate of candidates) {
548
- if (await fileExists(candidate)) {
549
- return { filePath: candidate, exists: true };
550
- }
551
- }
552
- return { filePath: candidates[0], exists: false };
553
- }
1
+ import fsp from 'fs/promises';
2
+ import path from 'path';
3
+ import os from 'os';
4
+ import { createAnimator, spawnProcess } from './utils/process.js';
5
+ import { fileExists, pathExists } from './utils/fs.js';
6
+ import { parseSushiLog } from './utils/sushi-log.js';
7
+
8
+ const SOURCE_VERSION = '4.0.1';
9
+ const TARGET_VERSION = '6.0.0-ballot3';
10
+ const MAX_ITERATIONS = 25;
11
+ const SNOMED_CT_ERROR_TEXT = 'Resolved value "SNOMED_CT" is not a valid URI';
12
+
13
+ /**
14
+ * Upgrades a SUSHI project to FHIR R6
15
+ */
16
+ export async function upgradeSushiToR6(sourceDir, sushiExecutable = 'sushi -s') {
17
+ await ensureDirectory(sourceDir);
18
+ const workingDir = await createR6Workspace(sourceDir);
19
+
20
+ const configs = await findSushiConfigs(workingDir);
21
+ if (configs.length === 0) {
22
+ throw new Error(`No sushi-config.yaml found in ${workingDir}`);
23
+ }
24
+
25
+ await Promise.all(
26
+ configs.map(async (configPath) => {
27
+ const updated = await updateFhirVersion(configPath, SOURCE_VERSION, TARGET_VERSION);
28
+ if (updated) {
29
+ console.log(` Updated ${path.basename(configPath)} to ${TARGET_VERSION}`);
30
+ }
31
+ })
32
+ );
33
+
34
+ await renameProfilesWithSuffix(workingDir);
35
+ await runSushiUntilSuccess(workingDir, sushiExecutable);
36
+
37
+ return workingDir;
38
+ }
39
+
40
+ async function ensureDirectory(dir) {
41
+ const stat = await fsp.stat(dir).catch(() => null);
42
+ if (!stat || !stat.isDirectory()) {
43
+ throw new Error(`Path is not a directory: ${dir}`);
44
+ }
45
+ }
46
+
47
+ async function createR6Workspace(sourceDir) {
48
+ const targetDir = deriveR6Path(sourceDir);
49
+ const exists = await pathExists(targetDir);
50
+ if (exists) {
51
+ throw new Error(`Target directory already exists: ${targetDir}`);
52
+ }
53
+ console.log(` Copying ${sourceDir} to ${targetDir}...`);
54
+ await fsp.cp(sourceDir, targetDir, { recursive: true });
55
+ return targetDir;
56
+ }
57
+
58
+ function deriveR6Path(sourceDir) {
59
+ const parent = path.dirname(sourceDir);
60
+ const base = path.basename(sourceDir);
61
+ const hasR4Suffix = base.toLowerCase().endsWith('r4');
62
+ const trimmed = hasR4Suffix ? base.slice(0, -2) : base;
63
+ return path.join(parent, `${trimmed}R6`);
64
+ }
65
+
66
+ async function findSushiConfigs(rootDir) {
67
+ const entries = await fsp.readdir(rootDir, { withFileTypes: true });
68
+ const targets = [];
69
+ for (const entry of entries) {
70
+ if (!entry.isFile()) {
71
+ continue;
72
+ }
73
+ const lower = entry.name.toLowerCase();
74
+ if (lower === 'sushi-config.yaml' || lower === 'sushi-config.yml') {
75
+ targets.push(path.join(rootDir, entry.name));
76
+ }
77
+ }
78
+ return targets;
79
+ }
80
+
81
+ async function updateFhirVersion(filePath, fromVersion, toVersion) {
82
+ const original = await fsp.readFile(filePath, 'utf8');
83
+ const updated = replaceFhirVersion(original, fromVersion, toVersion);
84
+ if (updated === original) {
85
+ return false;
86
+ }
87
+ await fsp.writeFile(filePath, updated, 'utf8');
88
+ return true;
89
+ }
90
+
91
+ function replaceFhirVersion(content, fromVersion, toVersion) {
92
+ const regex = new RegExp(
93
+ `(fhirVersion\\s*:\\s*)(["']?)${escapeRegExp(fromVersion)}\\2`,
94
+ 'i'
95
+ );
96
+ return content.replace(regex, (_, prefix, quote) => {
97
+ const q = quote || '';
98
+ return `${prefix}${q}${toVersion}${q}`;
99
+ });
100
+ }
101
+
102
+ function escapeRegExp(value) {
103
+ return value.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
104
+ }
105
+
106
+ async function renameProfilesWithSuffix(rootDir) {
107
+ const fshFiles = await collectFshFiles(rootDir);
108
+ if (fshFiles.length === 0) {
109
+ console.warn(' No FSH files found, skipping profile renaming');
110
+ return;
111
+ }
112
+ const renameMap = await buildProfileRenameMap(fshFiles);
113
+ if (renameMap.size === 0) {
114
+ console.log(' No profiles without R6 suffix found');
115
+ return;
116
+ }
117
+ const changedFiles = await applyProfileRenames(fshFiles, renameMap);
118
+ console.log(` Renamed ${renameMap.size} profile(s) in ${changedFiles} file(s)`);
119
+ }
120
+
121
+ async function collectFshFiles(dir) {
122
+ const files = [];
123
+ async function walk(current) {
124
+ const entries = await fsp.readdir(current, { withFileTypes: true }).catch(() => []);
125
+ for (const entry of entries) {
126
+ const entryPath = path.join(current, entry.name);
127
+ if (entry.isDirectory()) {
128
+ await walk(entryPath);
129
+ } else if (entry.isFile() && entry.name.toLowerCase().endsWith('.fsh')) {
130
+ files.push(entryPath);
131
+ }
132
+ }
133
+ }
134
+ await walk(dir);
135
+ return files;
136
+ }
137
+
138
+ async function buildProfileRenameMap(files) {
139
+ const renameMap = new Map();
140
+ const profileRegex = /^Profile:\s*(\S+)/;
141
+ for (const file of files) {
142
+ const content = await fsp.readFile(file, 'utf8');
143
+ const lines = content.split(/\r?\n/);
144
+ for (const line of lines) {
145
+ const match = line.match(profileRegex);
146
+ if (!match) {
147
+ continue;
148
+ }
149
+ const name = match[1];
150
+ if (name.endsWith('R6') || renameMap.has(name)) {
151
+ continue;
152
+ }
153
+ renameMap.set(name, `${name}R6`);
154
+ }
155
+ }
156
+ return renameMap;
157
+ }
158
+
159
+ async function applyProfileRenames(files, renameMap) {
160
+ if (renameMap.size === 0) {
161
+ return 0;
162
+ }
163
+ const patterns = [...renameMap.entries()].map(([oldName, newName]) => ({
164
+ newName,
165
+ regex: createProfileNameRegex(oldName),
166
+ }));
167
+
168
+ let changedFiles = 0;
169
+ for (const file of files) {
170
+ const original = await fsp.readFile(file, 'utf8');
171
+ let updated = original;
172
+
173
+ for (const { newName, regex } of patterns) {
174
+ updated = updated.replace(regex, (_, prefix = '') => `${prefix || ''}${newName}`);
175
+ }
176
+
177
+ for (const [oldName, newName] of renameMap.entries()) {
178
+ const oldIdPart = camelCaseToKebabCase(oldName);
179
+ const newIdPart = camelCaseToKebabCase(newName);
180
+
181
+ const urlPattern = new RegExp(
182
+ `(\\^url\\s*=\\s*["\'])([^"']*\\/)${escapeRegExp(oldIdPart)}(["\'])`,
183
+ 'g'
184
+ );
185
+ updated = updated.replace(urlPattern, (_, prefix, urlPrefix, suffix) => {
186
+ return `${prefix}${urlPrefix}${newIdPart}${suffix}`;
187
+ });
188
+
189
+ const idPattern = new RegExp(
190
+ `(\\^id\\s*=\\s*["\'])${escapeRegExp(oldIdPart)}(["\'])`,
191
+ 'g'
192
+ );
193
+ updated = updated.replace(idPattern, (_, prefix, suffix) => {
194
+ return `${prefix}${newIdPart}${suffix}`;
195
+ });
196
+ }
197
+
198
+ if (updated !== original) {
199
+ await fsp.writeFile(file, updated, 'utf8');
200
+ changedFiles += 1;
201
+ }
202
+ }
203
+ return changedFiles;
204
+ }
205
+
206
+ function camelCaseToKebabCase(str) {
207
+ return str
208
+ .replace(/([a-z])([A-Z])/g, '$1-$2')
209
+ .toLowerCase();
210
+ }
211
+
212
+ function createProfileNameRegex(name) {
213
+ const prefix = '(^|[^A-Za-z0-9._-])';
214
+ const suffix = '(?=[^A-Za-z0-9._-]|$)';
215
+ const pattern = `${prefix}(${escapeRegExp(name)})${suffix}`;
216
+ return new RegExp(pattern, 'gm');
217
+ }
218
+
219
+ async function runSushiUntilSuccess(targetDir, sushiExecutable) {
220
+ let iteration = 0;
221
+ while (iteration < MAX_ITERATIONS) {
222
+ iteration += 1;
223
+ console.log(` SUSHI iteration ${iteration}...`);
224
+ const { stdout, stderr, exitCode } = await runSushi(sushiExecutable, targetDir);
225
+ const combinedLog = [stdout, stderr].filter(Boolean).join('\n');
226
+ const logPath = path.join(targetDir, `sushi-upgrade-${iteration}.log`);
227
+ await fsp.writeFile(logPath, combinedLog, 'utf8');
228
+ if (exitCode === 0) {
229
+ console.log(` SUSHI completed successfully (iteration ${iteration})`);
230
+ return;
231
+ }
232
+ console.warn(` SUSHI exited with code ${exitCode}, analyzing errors...`);
233
+
234
+ const errors = parseSushiLog(combinedLog);
235
+ const snomedFix = await fixSnomedCtIssues(errors, targetDir);
236
+ if (snomedFix) {
237
+ console.log(' Applied SNOMED_CT fix, re-running...');
238
+ continue;
239
+ }
240
+ const cardinalityFix = await fixMinCardinalityInstanceErrors(errors, targetDir);
241
+ if (cardinalityFix) {
242
+ console.log(` Disabled ${cardinalityFix} instance file(s), re-running...`);
243
+ continue;
244
+ }
245
+ const modifications = await commentErrorLines(errors, targetDir);
246
+ if (modifications === 0) {
247
+ throw new Error('SUSHI failed but no lines could be commented out');
248
+ }
249
+ console.log(` Commented out lines in ${modifications} file(s)`);
250
+ }
251
+ throw new Error(`SUSHI failed after ${MAX_ITERATIONS} attempts`);
252
+ }
253
+
254
+ async function runSushi(executable, targetDir) {
255
+ const animator = createAnimator('SUSHI working...');
256
+ animator.start();
257
+ try {
258
+ const parts = executable.trim().split(/\s+/);
259
+ const command = parts[0];
260
+ const args = [...parts.slice(1), targetDir];
261
+
262
+ return await spawnProcess(command, args, process.cwd());
263
+ } finally {
264
+ animator.stop();
265
+ }
266
+ }
267
+
268
+ async function commentErrorLines(logEntries, workingDir) {
269
+ const grouped = new Map();
270
+
271
+ for (const entry of logEntries) {
272
+ const normalizedPath = normalizeLogPath(entry.file, workingDir);
273
+ if (!normalizedPath || typeof entry.line !== 'number') {
274
+ continue;
275
+ }
276
+ if (!grouped.has(normalizedPath)) {
277
+ grouped.set(normalizedPath, new Set());
278
+ }
279
+ const startLine = entry.line;
280
+ const endLine = entry.endLine || entry.line;
281
+ for (let ln = startLine; ln <= endLine; ln++) {
282
+ grouped.get(normalizedPath).add(ln);
283
+ }
284
+ }
285
+
286
+ let modifiedFiles = 0;
287
+ for (const [filePath, lines] of grouped.entries()) {
288
+ const updated = await commentLinesInFile(filePath, [...lines]);
289
+ if (updated) {
290
+ modifiedFiles += 1;
291
+ }
292
+ }
293
+ return modifiedFiles;
294
+ }
295
+
296
+ function normalizeLogPath(filePath, workingDir) {
297
+ if (!filePath) {
298
+ return null;
299
+ }
300
+ let candidate = filePath.trim();
301
+ if (!candidate) {
302
+ return null;
303
+ }
304
+
305
+ if (/^[A-Za-z]:[\\/]/.test(candidate)) {
306
+ if (process.platform === 'win32') {
307
+ candidate = candidate.replace(/\//g, '\\');
308
+ } else {
309
+ const drive = candidate[0].toLowerCase();
310
+ const rest = candidate.slice(2).replace(/\\/g, '/').replace(/^\/+/, '');
311
+ candidate = `/mnt/${drive}/${rest}`;
312
+ }
313
+ } else if (process.platform === 'win32') {
314
+ candidate = candidate.replace(/\//g, '\\');
315
+ } else {
316
+ candidate = candidate.replace(/\\/g, '/');
317
+ }
318
+
319
+ if (!path.isAbsolute(candidate)) {
320
+ candidate = path.join(workingDir, candidate);
321
+ }
322
+
323
+ return candidate;
324
+ }
325
+
326
+ async function commentLinesInFile(filePath, lineNumbers) {
327
+ const stat = await fsp.stat(filePath).catch(() => null);
328
+ if (!stat || !stat.isFile()) {
329
+ console.warn(`Could not find file: ${filePath}`);
330
+ return false;
331
+ }
332
+
333
+ const original = await fsp.readFile(filePath, 'utf8');
334
+ const newline = original.includes('\r\n') ? '\r\n' : '\n';
335
+ const lines = original.split(/\r?\n/);
336
+ let changed = false;
337
+
338
+ const expandedLines = expandContainsBlockLines(lines, lineNumbers);
339
+
340
+ expandedLines
341
+ .filter((n) => Number.isInteger(n) && n > 0 && n <= lines.length)
342
+ .sort((a, b) => a - b)
343
+ .forEach((lineNumber) => {
344
+ const idx = lineNumber - 1;
345
+ const current = lines[idx];
346
+ if (current.trim().startsWith('// AUTO-DISABLED (SUSHI R6):')) {
347
+ return;
348
+ }
349
+ const indent = current.match(/^\s*/)[0] || '';
350
+ const content = current.trimStart();
351
+ lines[idx] = `${indent}// AUTO-DISABLED (SUSHI R6): ${content}`;
352
+ changed = true;
353
+ });
354
+
355
+ if (changed) {
356
+ const updated = lines.join(newline);
357
+ await fsp.writeFile(filePath, updated, 'utf8');
358
+ }
359
+
360
+ return changed;
361
+ }
362
+
363
+ function expandContainsBlockLines(lines, lineNumbers) {
364
+ const expanded = new Set(lineNumbers);
365
+
366
+ for (const lineNumber of lineNumbers) {
367
+ const idx = lineNumber - 1;
368
+ if (idx < 0 || idx >= lines.length) {
369
+ continue;
370
+ }
371
+
372
+ const currentLine = lines[idx].trim();
373
+
374
+ if (currentLine.includes(' contains') || currentLine.endsWith(' and')) {
375
+ let nextIdx = idx + 1;
376
+ while (nextIdx < lines.length) {
377
+ const nextLine = lines[nextIdx].trim();
378
+ if (nextLine.startsWith('// AUTO-DISABLED (SUSHI R6):')) {
379
+ nextIdx++;
380
+ continue;
381
+ }
382
+ if (nextLine === '') {
383
+ nextIdx++;
384
+ continue;
385
+ }
386
+ const isIndentedContinuation = lines[nextIdx].match(/^\s+/) &&
387
+ (nextLine.match(/^\w+\s+\d/) || nextLine.endsWith(' and') || nextLine.endsWith('MS') || nextLine.endsWith('MS and'));
388
+ const isSliceDefinition = nextLine.match(/^\w+\s+\d+\.\.(\d+|\*)\s*(MS)?\s*(and)?\s*$/);
389
+
390
+ if (isIndentedContinuation || isSliceDefinition) {
391
+ expanded.add(nextIdx + 1);
392
+ nextIdx++;
393
+ } else {
394
+ break;
395
+ }
396
+ }
397
+ }
398
+
399
+ const softIndexMatch = currentLine.match(/^\*\s*(\S+)\[(\d+|\+)\]\./);
400
+ if (softIndexMatch) {
401
+ const elementPath = softIndexMatch[1];
402
+ let nextIdx = idx + 1;
403
+ while (nextIdx < lines.length) {
404
+ const nextLine = lines[nextIdx].trim();
405
+ if (nextLine.startsWith('// AUTO-DISABLED (SUSHI R6):')) {
406
+ nextIdx++;
407
+ continue;
408
+ }
409
+ if (nextLine === '') {
410
+ nextIdx++;
411
+ continue;
412
+ }
413
+ const eqPattern = new RegExp(`^\\*\\s*${escapeRegExp(elementPath)}\\[=\\]\\.`);
414
+ if (eqPattern.test(nextLine)) {
415
+ expanded.add(nextIdx + 1);
416
+ nextIdx++;
417
+ } else if (nextLine.match(new RegExp(`^\\*\\s*${escapeRegExp(elementPath)}\\[(\\d+|\\+)\\]\\.`))) {
418
+ break;
419
+ } else {
420
+ break;
421
+ }
422
+ }
423
+ }
424
+ }
425
+
426
+ return [...expanded];
427
+ }
428
+
429
+ async function fixSnomedCtIssues(logEntries, workingDir) {
430
+ const hasSnomedError = logEntries.some(
431
+ (entry) => typeof entry.message === 'string' && entry.message.includes(SNOMED_CT_ERROR_TEXT)
432
+ );
433
+ if (!hasSnomedError) {
434
+ return false;
435
+ }
436
+ const replacements = await replacePlainSnomedReferences(workingDir);
437
+ const aliasAdded = await ensureSnomedAliasDefinition(workingDir);
438
+ if (replacements > 0 || aliasAdded) {
439
+ console.log(` Fixed SNOMED_CT in ${replacements} file(s)${aliasAdded ? ' and added alias' : ''}`);
440
+ return true;
441
+ }
442
+ return false;
443
+ }
444
+
445
+ async function replacePlainSnomedReferences(rootDir) {
446
+ const files = await collectFshFiles(rootDir);
447
+ let changedFiles = 0;
448
+ const pattern = /(^|[^$A-Za-z0-9_])SNOMED_CT\b/gm;
449
+ for (const file of files) {
450
+ const original = await fsp.readFile(file, 'utf8');
451
+ const updated = original.replace(pattern, (_, prefix) => `${prefix}$SNOMED_CT`);
452
+ if (updated !== original) {
453
+ await fsp.writeFile(file, updated, 'utf8');
454
+ changedFiles += 1;
455
+ }
456
+ }
457
+ return changedFiles;
458
+ }
459
+
460
+ async function ensureSnomedAliasDefinition(rootDir) {
461
+ const { filePath, exists } = await resolveAliasFilePath(rootDir);
462
+ let content = '';
463
+ if (exists) {
464
+ content = await fsp.readFile(filePath, 'utf8');
465
+ if (/^\s*Alias:\s*\$SNOMED_CT\b/m.test(content)) {
466
+ return false;
467
+ }
468
+ }
469
+ await fsp.mkdir(path.dirname(filePath), { recursive: true });
470
+ const aliasLine = 'Alias: $SNOMED_CT = http://snomed.info/sct';
471
+ if (!exists || content.length === 0) {
472
+ const newline = os.EOL || '\n';
473
+ await fsp.writeFile(filePath, `${aliasLine}${newline}`, 'utf8');
474
+ return true;
475
+ }
476
+ const newline = content.includes('\r\n') ? '\r\n' : '\n';
477
+ const needsNewline = content.endsWith('\r') || content.endsWith('\n');
478
+ const suffix = needsNewline ? '' : newline;
479
+ const updated = `${content}${suffix}${aliasLine}${newline}`;
480
+ await fsp.writeFile(filePath, updated, 'utf8');
481
+ return true;
482
+ }
483
+
484
+ async function fixMinCardinalityInstanceErrors(logEntries, workingDir) {
485
+ const candidates = logEntries.filter(
486
+ (entry) =>
487
+ entry &&
488
+ typeof entry.message === 'string' &&
489
+ entry.message.includes('minimum cardinality 1 but occurs 0 time(s).') &&
490
+ typeof entry.file === 'string'
491
+ );
492
+ if (candidates.length === 0) {
493
+ return 0;
494
+ }
495
+
496
+ let disabled = 0;
497
+ for (const entry of candidates) {
498
+ const sourcePath = normalizeLogPath(entry.file, workingDir);
499
+ if (
500
+ !sourcePath ||
501
+ !sourcePath.endsWith('.fsh') ||
502
+ !sourcePath.includes(`${path.sep}instances${path.sep}`)
503
+ ) {
504
+ continue;
505
+ }
506
+ const targetPath = sourcePath.slice(0, -4);
507
+ const renamed = await disableInstanceFile(sourcePath, targetPath);
508
+ if (renamed) {
509
+ disabled += 1;
510
+ }
511
+ }
512
+ return disabled;
513
+ }
514
+
515
+ async function disableInstanceFile(sourcePath, targetPath) {
516
+ const stat = await fsp.stat(sourcePath).catch(() => null);
517
+ if (!stat || !stat.isFile()) {
518
+ return false;
519
+ }
520
+ const original = await fsp.readFile(sourcePath, 'utf8');
521
+ const newline = original.includes('\r\n') ? '\r\n' : '\n';
522
+ const lines = original.split(/\r?\n/);
523
+ let renamedInstance = false;
524
+ for (let i = 0; i < lines.length; i += 1) {
525
+ const match = lines[i].match(/^(\s*Instance:\s*)(\S+)(.*)$/);
526
+ if (match) {
527
+ lines[i] = `${match[1]}${match[2]}-disabled${match[3] || ''}`;
528
+ renamedInstance = true;
529
+ break;
530
+ }
531
+ }
532
+ if (renamedInstance) {
533
+ const updated = lines.join(newline);
534
+ await fsp.writeFile(sourcePath, updated, 'utf8');
535
+ }
536
+ await fsp.rename(sourcePath, targetPath).catch(() => null);
537
+ return true;
538
+ }
539
+
540
+ async function resolveAliasFilePath(rootDir) {
541
+ const candidates = [
542
+ path.join(rootDir, 'input', 'fsh', 'aliases.fsh'),
543
+ path.join(rootDir, 'input', 'fsh', 'alias.fsh'),
544
+ path.join(rootDir, 'aliases.fsh'),
545
+ path.join(rootDir, 'alias.fsh'),
546
+ ];
547
+ for (const candidate of candidates) {
548
+ if (await fileExists(candidate)) {
549
+ return { filePath: candidate, exists: true };
550
+ }
551
+ }
552
+ return { filePath: candidates[0], exists: false };
553
+ }