@gefyra/diffyr6-cli 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +447 -0
- package/config/README.md +27 -0
- package/config/default-rules.json +135 -0
- package/config/resources-r4-not-in-r6.json +42 -0
- package/package.json +54 -0
- package/src/cli.js +93 -0
- package/src/compare-profiles.js +386 -0
- package/src/config.js +147 -0
- package/src/generate-fsh.js +457 -0
- package/src/index.js +394 -0
- package/src/rules-engine.js +642 -0
- package/src/upgrade-sushi.js +553 -0
- package/src/utils/fs.js +38 -0
- package/src/utils/html.js +28 -0
- package/src/utils/process.js +101 -0
- package/src/utils/removed-resources.js +135 -0
- package/src/utils/sushi-log.js +46 -0
- package/src/utils/validator.js +103 -0
|
@@ -0,0 +1,457 @@
|
|
|
1
|
+
import fs from 'fs';
|
|
2
|
+
import fsp from 'fs/promises';
|
|
3
|
+
import path from 'path';
|
|
4
|
+
import os from 'os';
|
|
5
|
+
import https from 'https';
|
|
6
|
+
import { spawn } from 'child_process';
|
|
7
|
+
import { fileExists } from './utils/fs.js';
|
|
8
|
+
import { spawnProcess, createAnimator } from './utils/process.js';
|
|
9
|
+
import { parseSushiLog } from './utils/sushi-log.js';
|
|
10
|
+
|
|
11
|
+
const IGNORED_PACKAGE_DEPENDENCIES = new Set(['gofsh', 'sushi']);
|
|
12
|
+
|
|
13
|
+
/**
|
|
14
|
+
* Downloads a FHIR package and generates FSH using GoFSH
|
|
15
|
+
*/
|
|
16
|
+
export async function generateFshFromPackage(packageSpec, outputDir) {
|
|
17
|
+
const [packageName, versionFromArg] = packageSpec.split('#');
|
|
18
|
+
const version = versionFromArg && versionFromArg.length > 0 ? versionFromArg : 'current';
|
|
19
|
+
const packageSpecifier = `${packageName}/${version}`;
|
|
20
|
+
const displaySpecifier = `${packageName}#${version}`;
|
|
21
|
+
const downloadUrl = `https://packages.fhir.org/${packageSpecifier}`;
|
|
22
|
+
const gofshBin = await resolveGofshExecutable(process.env.GOFSH_BIN);
|
|
23
|
+
|
|
24
|
+
const tempRoot = await fsp.mkdtemp(path.join(os.tmpdir(), 'gofsh-'));
|
|
25
|
+
const archivePath = path.join(tempRoot, 'package.tgz');
|
|
26
|
+
const extractDir = path.join(tempRoot, 'extracted');
|
|
27
|
+
|
|
28
|
+
console.log(` Downloading ${displaySpecifier}...`);
|
|
29
|
+
try {
|
|
30
|
+
await downloadToFile(downloadUrl, archivePath);
|
|
31
|
+
await fsp.mkdir(extractDir, { recursive: true });
|
|
32
|
+
console.log(' Extracting package...');
|
|
33
|
+
await extractArchive(archivePath, extractDir);
|
|
34
|
+
|
|
35
|
+
const packageDir = await resolvePackageDir(extractDir);
|
|
36
|
+
await ensurePackageContent(packageDir);
|
|
37
|
+
await fsp.mkdir(outputDir, { recursive: true });
|
|
38
|
+
|
|
39
|
+
const gofshArgs = ['--out', outputDir, packageDir];
|
|
40
|
+
console.log(` Running GoFSH...`);
|
|
41
|
+
await runCommand(gofshBin, gofshArgs);
|
|
42
|
+
console.log(' GoFSH finished successfully');
|
|
43
|
+
|
|
44
|
+
await updateSushiConfigDependencies(path.join(extractDir, 'package'), outputDir);
|
|
45
|
+
await runSushiWithDuplicateSliceFix(outputDir);
|
|
46
|
+
} catch (err) {
|
|
47
|
+
throw new Error(`Failed to generate FSH from package: ${err.message}`);
|
|
48
|
+
} finally {
|
|
49
|
+
await fsp.rm(tempRoot, { recursive: true, force: true }).catch(() => {});
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
async function resolveGofshExecutable(overridePath) {
|
|
54
|
+
if (overridePath) {
|
|
55
|
+
return overridePath;
|
|
56
|
+
}
|
|
57
|
+
const candidateNames =
|
|
58
|
+
process.platform === 'win32'
|
|
59
|
+
? ['gofsh.cmd', 'gofsh.exe', 'gofsh.bat', 'gofsh']
|
|
60
|
+
: ['gofsh'];
|
|
61
|
+
|
|
62
|
+
const searchRoots = [
|
|
63
|
+
process.cwd(),
|
|
64
|
+
path.resolve(process.cwd(), 'node_modules', '.bin'),
|
|
65
|
+
];
|
|
66
|
+
|
|
67
|
+
for (const root of searchRoots) {
|
|
68
|
+
for (const name of candidateNames) {
|
|
69
|
+
const candidate = path.join(root, name);
|
|
70
|
+
if (await fileExists(candidate)) {
|
|
71
|
+
return candidate;
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
return 'gofsh';
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
function downloadToFile(url, destination) {
|
|
79
|
+
return new Promise((resolve, reject) => {
|
|
80
|
+
const file = fs.createWriteStream(destination);
|
|
81
|
+
https
|
|
82
|
+
.get(url, response => {
|
|
83
|
+
if (response.statusCode && response.statusCode >= 300 && response.statusCode < 400 && response.headers.location) {
|
|
84
|
+
response.destroy();
|
|
85
|
+
file.close(() => {
|
|
86
|
+
downloadToFile(response.headers.location, destination).then(resolve).catch(reject);
|
|
87
|
+
});
|
|
88
|
+
return;
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
if (response.statusCode !== 200) {
|
|
92
|
+
response.destroy();
|
|
93
|
+
file.close(() => reject(new Error(`Download failed (${response.statusCode}) ${url}`)));
|
|
94
|
+
return;
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
response.pipe(file);
|
|
98
|
+
file.on('finish', () => file.close(resolve));
|
|
99
|
+
})
|
|
100
|
+
.on('error', reject);
|
|
101
|
+
});
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
async function resolvePackageDir(extractedRoot) {
|
|
105
|
+
const candidate = path.join(extractedRoot, 'package');
|
|
106
|
+
const stat = await fsp
|
|
107
|
+
.stat(candidate)
|
|
108
|
+
.then(info => (info.isDirectory() ? candidate : extractedRoot))
|
|
109
|
+
.catch(() => extractedRoot);
|
|
110
|
+
return stat;
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
async function ensurePackageContent(packageDir) {
|
|
114
|
+
const marker = path.join(packageDir, 'package.json');
|
|
115
|
+
await fsp
|
|
116
|
+
.access(marker)
|
|
117
|
+
.catch(() => {
|
|
118
|
+
throw new Error(`Extracted package incomplete - ${marker} missing`);
|
|
119
|
+
});
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
async function extractArchive(archivePath, destination) {
|
|
123
|
+
await runCommand('tar', ['-xzf', archivePath, '-C', destination]);
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
function runCommand(command, args, options = {}) {
|
|
127
|
+
return new Promise((resolve, reject) => {
|
|
128
|
+
const needsShell =
|
|
129
|
+
process.platform === 'win32' &&
|
|
130
|
+
typeof command === 'string' &&
|
|
131
|
+
(command.toLowerCase().endsWith('.cmd') || command.toLowerCase().endsWith('.bat'));
|
|
132
|
+
const child = spawn(command, args, {
|
|
133
|
+
stdio: 'inherit',
|
|
134
|
+
shell: needsShell,
|
|
135
|
+
...options,
|
|
136
|
+
});
|
|
137
|
+
child.on('error', reject);
|
|
138
|
+
child.on('close', code => {
|
|
139
|
+
if (code === 0) {
|
|
140
|
+
resolve();
|
|
141
|
+
} else {
|
|
142
|
+
reject(new Error(`${command} exited with code ${code}`));
|
|
143
|
+
}
|
|
144
|
+
});
|
|
145
|
+
});
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
async function updateSushiConfigDependencies(packageDir, outputDir) {
|
|
149
|
+
const projectDependencies = await readProjectDependencies(packageDir);
|
|
150
|
+
if (!projectDependencies || Object.keys(projectDependencies).length === 0) {
|
|
151
|
+
return;
|
|
152
|
+
}
|
|
153
|
+
const configPath = await findSushiConfig(outputDir);
|
|
154
|
+
if (!configPath) {
|
|
155
|
+
console.warn(' Could not find sushi-config.yaml - dependencies not updated');
|
|
156
|
+
return;
|
|
157
|
+
}
|
|
158
|
+
const updated = await mergeDependenciesIntoConfig(configPath, projectDependencies);
|
|
159
|
+
if (updated) {
|
|
160
|
+
console.log(` Updated dependencies in ${path.basename(configPath)}`);
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
async function readProjectDependencies(targetDir) {
|
|
165
|
+
const pkgPath = path.join(targetDir, 'package.json');
|
|
166
|
+
const pkgRaw = await fsp.readFile(pkgPath, 'utf8').catch(() => null);
|
|
167
|
+
if (!pkgRaw) {
|
|
168
|
+
return null;
|
|
169
|
+
}
|
|
170
|
+
let pkg;
|
|
171
|
+
try {
|
|
172
|
+
pkg = JSON.parse(pkgRaw);
|
|
173
|
+
} catch {
|
|
174
|
+
return null;
|
|
175
|
+
}
|
|
176
|
+
const source = pkg.fhirDependencies && Object.keys(pkg.fhirDependencies).length > 0 ? pkg.fhirDependencies : pkg.dependencies;
|
|
177
|
+
if (!source) {
|
|
178
|
+
return null;
|
|
179
|
+
}
|
|
180
|
+
const entries = Object.entries(source).filter(
|
|
181
|
+
([name, version]) => typeof version === 'string' && version.trim().length > 0 && !IGNORED_PACKAGE_DEPENDENCIES.has(name)
|
|
182
|
+
);
|
|
183
|
+
if (entries.length === 0) {
|
|
184
|
+
return null;
|
|
185
|
+
}
|
|
186
|
+
return Object.fromEntries(entries.map(([name, version]) => [name, version.trim()]));
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
async function findSushiConfig(dir) {
|
|
190
|
+
const candidates = ['sushi-config.yaml', 'sushi-config.yml'];
|
|
191
|
+
for (const candidate of candidates) {
|
|
192
|
+
const filePath = path.join(dir, candidate);
|
|
193
|
+
if (await fileExists(filePath)) {
|
|
194
|
+
return filePath;
|
|
195
|
+
}
|
|
196
|
+
}
|
|
197
|
+
const entries = await fsp.readdir(dir, { withFileTypes: true }).catch(() => []);
|
|
198
|
+
for (const entry of entries) {
|
|
199
|
+
if (!entry.isDirectory()) {
|
|
200
|
+
continue;
|
|
201
|
+
}
|
|
202
|
+
if (entry.name === '.' || entry.name === '..') {
|
|
203
|
+
continue;
|
|
204
|
+
}
|
|
205
|
+
const nested = await findSushiConfig(path.join(dir, entry.name));
|
|
206
|
+
if (nested) {
|
|
207
|
+
return nested;
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
return null;
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
async function mergeDependenciesIntoConfig(configPath, dependencies) {
|
|
214
|
+
const original = await fsp.readFile(configPath, 'utf8');
|
|
215
|
+
const newline = original.includes('\r\n') ? '\r\n' : '\n';
|
|
216
|
+
const blockLines = ['dependencies:', ...Object.entries(dependencies).map(([name, version]) => ` ${name}: ${version}`)];
|
|
217
|
+
const lines = original.split(/\r?\n/);
|
|
218
|
+
const { start, end } = findDependencyBlockRange(lines);
|
|
219
|
+
let updated;
|
|
220
|
+
if (start === -1) {
|
|
221
|
+
const trimmed = original.trimEnd();
|
|
222
|
+
const separator = trimmed.length > 0 ? `${newline}${newline}` : '';
|
|
223
|
+
updated = `${trimmed}${separator}${blockLines.join(newline)}${newline}`;
|
|
224
|
+
} else {
|
|
225
|
+
lines.splice(start, end - start, ...blockLines);
|
|
226
|
+
updated = lines.join(newline);
|
|
227
|
+
}
|
|
228
|
+
if (updated !== original) {
|
|
229
|
+
await fsp.writeFile(configPath, updated, 'utf8');
|
|
230
|
+
return true;
|
|
231
|
+
}
|
|
232
|
+
return false;
|
|
233
|
+
}
|
|
234
|
+
|
|
235
|
+
function findDependencyBlockRange(lines) {
|
|
236
|
+
const start = lines.findIndex(
|
|
237
|
+
line => line.trimStart().startsWith('dependencies:') && !/^\s/.test(line)
|
|
238
|
+
);
|
|
239
|
+
if (start === -1) {
|
|
240
|
+
return { start: -1, end: -1 };
|
|
241
|
+
}
|
|
242
|
+
let end = start + 1;
|
|
243
|
+
while (end < lines.length) {
|
|
244
|
+
const line = lines[end];
|
|
245
|
+
if (/^\s/.test(line)) {
|
|
246
|
+
end += 1;
|
|
247
|
+
continue;
|
|
248
|
+
}
|
|
249
|
+
if (line.trim().length === 0) {
|
|
250
|
+
break;
|
|
251
|
+
}
|
|
252
|
+
break;
|
|
253
|
+
}
|
|
254
|
+
return { start, end };
|
|
255
|
+
}
|
|
256
|
+
|
|
257
|
+
async function runSushiWithDuplicateSliceFix(outputDir) {
|
|
258
|
+
const sushiExec = process.env.SUSHI_BIN || 'sushi';
|
|
259
|
+
const sushiArgs = ['-s', outputDir];
|
|
260
|
+
console.log(` Validating FSH with SUSHI...`);
|
|
261
|
+
const initialRun = await runSushiOnce(sushiExec, sushiArgs, outputDir, 'sushi-from-gofsh.log');
|
|
262
|
+
if (initialRun.exitCode === 0) {
|
|
263
|
+
console.log(' SUSHI completed without errors');
|
|
264
|
+
return;
|
|
265
|
+
}
|
|
266
|
+
|
|
267
|
+
const fixes = await fixDuplicateSlicesFromLog(initialRun.logEntries, outputDir);
|
|
268
|
+
if (fixes === 0) {
|
|
269
|
+
console.warn(' SUSHI reported errors but no duplicate slice issues were detected');
|
|
270
|
+
return;
|
|
271
|
+
}
|
|
272
|
+
console.log(` Applied ${fixes} duplicate-slice fix(es), re-running SUSHI...`);
|
|
273
|
+
const rerun = await runSushiOnce(sushiExec, sushiArgs, outputDir, 'sushi-from-gofsh-rerun.log');
|
|
274
|
+
if (rerun.exitCode !== 0) {
|
|
275
|
+
console.warn(' SUSHI still reports errors after auto-fix');
|
|
276
|
+
}
|
|
277
|
+
}
|
|
278
|
+
|
|
279
|
+
async function runSushiOnce(executable, args, cwd, logFileName) {
|
|
280
|
+
const animator = createAnimator('SUSHI working...');
|
|
281
|
+
animator.start();
|
|
282
|
+
const { stdout, stderr, exitCode } = await spawnProcess(executable, args, cwd).finally(() =>
|
|
283
|
+
animator.stop()
|
|
284
|
+
);
|
|
285
|
+
const combined = [stdout, stderr].filter(Boolean).join('\n');
|
|
286
|
+
const logPath = path.join(cwd, logFileName);
|
|
287
|
+
if (combined.trim()) {
|
|
288
|
+
await fsp.writeFile(logPath, combined, 'utf8');
|
|
289
|
+
}
|
|
290
|
+
const logEntries = parseSushiLog(combined);
|
|
291
|
+
return { exitCode, logEntries };
|
|
292
|
+
}
|
|
293
|
+
|
|
294
|
+
async function fixDuplicateSlicesFromLog(logEntries, outputDir) {
|
|
295
|
+
const duplicateEntries = logEntries
|
|
296
|
+
.map(parseDuplicateSliceMessage)
|
|
297
|
+
.filter(entry => entry && entry.file && entry.elementPath && entry.sliceName);
|
|
298
|
+
if (duplicateEntries.length === 0) {
|
|
299
|
+
return 0;
|
|
300
|
+
}
|
|
301
|
+
|
|
302
|
+
const grouped = new Map();
|
|
303
|
+
for (const entry of duplicateEntries) {
|
|
304
|
+
const targetFile = path.isAbsolute(entry.file) ? entry.file : path.join(outputDir, entry.file);
|
|
305
|
+
const normalizedPath = toFshPath(entry.elementPath);
|
|
306
|
+
if (!normalizedPath) {
|
|
307
|
+
continue;
|
|
308
|
+
}
|
|
309
|
+
if (!grouped.has(targetFile)) {
|
|
310
|
+
grouped.set(targetFile, new Map());
|
|
311
|
+
}
|
|
312
|
+
const perElement = grouped.get(targetFile);
|
|
313
|
+
if (!perElement.has(normalizedPath)) {
|
|
314
|
+
perElement.set(normalizedPath, new Set());
|
|
315
|
+
}
|
|
316
|
+
perElement.get(normalizedPath).add(entry.sliceName);
|
|
317
|
+
}
|
|
318
|
+
|
|
319
|
+
let changedBlocks = 0;
|
|
320
|
+
for (const [filePath, elementMap] of grouped.entries()) {
|
|
321
|
+
const updated = await rewriteContainsBlocks(filePath, elementMap);
|
|
322
|
+
changedBlocks += updated;
|
|
323
|
+
}
|
|
324
|
+
return changedBlocks;
|
|
325
|
+
}
|
|
326
|
+
|
|
327
|
+
function parseDuplicateSliceMessage(entry) {
|
|
328
|
+
if (!entry || typeof entry.message !== 'string') {
|
|
329
|
+
return null;
|
|
330
|
+
}
|
|
331
|
+
const match = entry.message.match(/Slice named (\S+) already exists on element (\S+) of/i);
|
|
332
|
+
if (!match) {
|
|
333
|
+
return null;
|
|
334
|
+
}
|
|
335
|
+
const [, sliceName, elementPath] = match;
|
|
336
|
+
return {
|
|
337
|
+
file: entry.file,
|
|
338
|
+
sliceName,
|
|
339
|
+
elementPath,
|
|
340
|
+
};
|
|
341
|
+
}
|
|
342
|
+
|
|
343
|
+
function toFshPath(elementPath) {
|
|
344
|
+
if (!elementPath || typeof elementPath !== 'string') {
|
|
345
|
+
return null;
|
|
346
|
+
}
|
|
347
|
+
const withoutRoot = elementPath.includes('.')
|
|
348
|
+
? elementPath.slice(elementPath.indexOf('.') + 1)
|
|
349
|
+
: elementPath;
|
|
350
|
+
return withoutRoot
|
|
351
|
+
.split('.')
|
|
352
|
+
.map(part => part.replace(/:([^.:]+)/g, '[$1]'))
|
|
353
|
+
.join('.');
|
|
354
|
+
}
|
|
355
|
+
|
|
356
|
+
async function rewriteContainsBlocks(filePath, elementMap) {
|
|
357
|
+
const stat = await fsp.stat(filePath).catch(() => null);
|
|
358
|
+
if (!stat || !stat.isFile()) {
|
|
359
|
+
console.warn(`Cannot apply duplicate-slice fix. File not found: ${filePath}`);
|
|
360
|
+
return 0;
|
|
361
|
+
}
|
|
362
|
+
const original = await fsp.readFile(filePath, 'utf8');
|
|
363
|
+
const newline = original.includes('\r\n') ? '\r\n' : '\n';
|
|
364
|
+
const lines = original.split(/\r?\n/);
|
|
365
|
+
let replacements = 0;
|
|
366
|
+
|
|
367
|
+
for (const [elementPath, sliceNames] of elementMap.entries()) {
|
|
368
|
+
const parsed = parseContainsBlock(lines, elementPath);
|
|
369
|
+
if (!parsed || parsed.slices.length === 0) {
|
|
370
|
+
continue;
|
|
371
|
+
}
|
|
372
|
+
const newRules = parsed.slices.map(slice => {
|
|
373
|
+
const parts = [`${parsed.indent}* ${elementPath}[${slice.sliceName}]`];
|
|
374
|
+
if (slice.cardinality) {
|
|
375
|
+
parts.push(slice.cardinality);
|
|
376
|
+
}
|
|
377
|
+
if (slice.suffix) {
|
|
378
|
+
parts.push(slice.suffix);
|
|
379
|
+
}
|
|
380
|
+
return parts.filter(Boolean).join(' ').trimEnd();
|
|
381
|
+
});
|
|
382
|
+
if (newRules.length === 0) {
|
|
383
|
+
continue;
|
|
384
|
+
}
|
|
385
|
+
const deleteCount = parsed.end - parsed.start + 1;
|
|
386
|
+
lines.splice(parsed.start, deleteCount, ...newRules);
|
|
387
|
+
replacements += 1;
|
|
388
|
+
}
|
|
389
|
+
|
|
390
|
+
if (replacements > 0) {
|
|
391
|
+
const updated = lines.join(newline);
|
|
392
|
+
if (updated !== original) {
|
|
393
|
+
await fsp.writeFile(filePath, updated, 'utf8');
|
|
394
|
+
}
|
|
395
|
+
}
|
|
396
|
+
return replacements;
|
|
397
|
+
}
|
|
398
|
+
|
|
399
|
+
function parseContainsBlock(lines, elementPath) {
|
|
400
|
+
const regex = new RegExp(`^\\s*\\*\\s+${escapeRegExp(elementPath)}\\s+contains\\b`, 'i');
|
|
401
|
+
for (let i = 0; i < lines.length; i += 1) {
|
|
402
|
+
const line = lines[i];
|
|
403
|
+
if (!regex.test(line)) {
|
|
404
|
+
continue;
|
|
405
|
+
}
|
|
406
|
+
const indentMatch = line.match(/^\s*/);
|
|
407
|
+
const indent = indentMatch ? indentMatch[0] : '';
|
|
408
|
+
const afterContains = line.split(/contains/i)[1]?.trim() || '';
|
|
409
|
+
let sliceText = afterContains;
|
|
410
|
+
let end = i;
|
|
411
|
+
let cursor = i + 1;
|
|
412
|
+
while (!sliceText || sliceText.endsWith('and')) {
|
|
413
|
+
const next = lines[cursor];
|
|
414
|
+
if (!next) {
|
|
415
|
+
break;
|
|
416
|
+
}
|
|
417
|
+
const trimmed = next.trim();
|
|
418
|
+
if (!trimmed || trimmed.startsWith('*')) {
|
|
419
|
+
break;
|
|
420
|
+
}
|
|
421
|
+
sliceText = `${sliceText} ${trimmed}`.trim();
|
|
422
|
+
end = cursor;
|
|
423
|
+
cursor += 1;
|
|
424
|
+
}
|
|
425
|
+
const slices = parseSliceEntries(sliceText);
|
|
426
|
+
return { start: i, end, indent, slices };
|
|
427
|
+
}
|
|
428
|
+
return null;
|
|
429
|
+
}
|
|
430
|
+
|
|
431
|
+
function parseSliceEntries(sliceText) {
|
|
432
|
+
if (!sliceText) {
|
|
433
|
+
return [];
|
|
434
|
+
}
|
|
435
|
+
return sliceText
|
|
436
|
+
.split(/\s+and\s+/i)
|
|
437
|
+
.map(entry => entry.replace(/\s+and\s*$/i, '').trim())
|
|
438
|
+
.filter(Boolean)
|
|
439
|
+
.map(entry => {
|
|
440
|
+
const match = entry.match(/^(\S+)(?:\s+named\s+(\S+))?\s*(\d+\.\.\d+|\d+\.\.\*)?\s*(.*)$/i);
|
|
441
|
+
if (!match) {
|
|
442
|
+
return null;
|
|
443
|
+
}
|
|
444
|
+
const [, token, named, card, rest] = match;
|
|
445
|
+
const sliceName = (named || token).replace(/^\[|\]$/g, '');
|
|
446
|
+
return {
|
|
447
|
+
sliceName,
|
|
448
|
+
cardinality: card || '',
|
|
449
|
+
suffix: (rest || '').replace(/\s+and\s*$/i, '').trim(),
|
|
450
|
+
};
|
|
451
|
+
})
|
|
452
|
+
.filter(Boolean);
|
|
453
|
+
}
|
|
454
|
+
|
|
455
|
+
function escapeRegExp(value) {
|
|
456
|
+
return value.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
|
457
|
+
}
|