@hey-api/json-schema-ref-parser 1.1.0 → 1.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +51 -11
- package/dist/lib/bundle.js +16 -1
- package/dist/lib/index.d.ts +15 -0
- package/dist/lib/index.js +303 -0
- package/dist/lib/resolve-external.js +8 -4
- package/lib/bundle.ts +15 -1
- package/lib/index.ts +356 -0
- package/lib/resolve-external.ts +11 -10
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -15,9 +15,9 @@
|
|
|
15
15
|
Install using [npm](https://docs.npmjs.com/about-npm/):
|
|
16
16
|
|
|
17
17
|
```bash
|
|
18
|
-
npm install @
|
|
19
|
-
yarn add @
|
|
20
|
-
bun add @
|
|
18
|
+
npm install @hey-api/json-schema-ref-parser
|
|
19
|
+
yarn add @hey-api/json-schema-ref-parser
|
|
20
|
+
bun add @hey-api/json-schema-ref-parser
|
|
21
21
|
```
|
|
22
22
|
|
|
23
23
|
## The Problem:
|
|
@@ -69,21 +69,61 @@ JavaScript objects.
|
|
|
69
69
|
## Example
|
|
70
70
|
|
|
71
71
|
```javascript
|
|
72
|
-
import $RefParser from "@
|
|
72
|
+
import { $RefParser } from "@hey-api/json-schema-ref-parser";
|
|
73
73
|
|
|
74
74
|
try {
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
console.log(
|
|
78
|
-
|
|
79
|
-
// if you want to avoid modifying the original schema, you can disable the `mutateInputSchema` option
|
|
80
|
-
let clonedSchema = await $RefParser.dereference(mySchema, { mutateInputSchema: false });
|
|
81
|
-
console.log(clonedSchema.definitions.person.properties.firstName);
|
|
75
|
+
const parser = new $RefParser();
|
|
76
|
+
await parser.dereference({ pathOrUrlOrSchema: mySchema });
|
|
77
|
+
console.log(parser.schema.definitions.person.properties.firstName);
|
|
82
78
|
} catch (err) {
|
|
83
79
|
console.error(err);
|
|
84
80
|
}
|
|
85
81
|
```
|
|
86
82
|
|
|
83
|
+
### New in this fork (@hey-api)
|
|
84
|
+
|
|
85
|
+
- **Multiple inputs with `bundleMany`**: Merge and bundle several OpenAPI/JSON Schema inputs (files, URLs, or raw objects) into a single schema. Components are prefixed to avoid name collisions, paths are namespaced on conflict, and `$ref`s are rewritten accordingly.
|
|
86
|
+
|
|
87
|
+
```javascript
|
|
88
|
+
import { $RefParser } from "@hey-api/json-schema-ref-parser";
|
|
89
|
+
|
|
90
|
+
const parser = new $RefParser();
|
|
91
|
+
const merged = await parser.bundleMany({
|
|
92
|
+
pathOrUrlOrSchemas: [
|
|
93
|
+
"./specs/a.yaml",
|
|
94
|
+
"https://example.com/b.yaml",
|
|
95
|
+
{ openapi: "3.1.0", info: { title: "Inline" }, paths: {} },
|
|
96
|
+
],
|
|
97
|
+
});
|
|
98
|
+
|
|
99
|
+
// merged.components.* will contain prefixed names like a_<name>, b_<name>, etc.
|
|
100
|
+
```
|
|
101
|
+
|
|
102
|
+
- **Dereference hooks**: Fine-tune dereferencing with `excludedPathMatcher(path) => boolean` to skip subpaths and `onDereference(path, value, parent, parentPropName)` to observe replacements.
|
|
103
|
+
|
|
104
|
+
```javascript
|
|
105
|
+
const parser = new $RefParser();
|
|
106
|
+
parser.options.dereference.excludedPathMatcher = (p) => p.includes("/example/");
|
|
107
|
+
parser.options.dereference.onDereference = (p, v) => {
|
|
108
|
+
// inspect p / v as needed
|
|
109
|
+
};
|
|
110
|
+
await parser.dereference({ pathOrUrlOrSchema: "./openapi.yaml" });
|
|
111
|
+
```
|
|
112
|
+
|
|
113
|
+
- **Smart input resolution**: You can pass a file path, URL, or raw schema object. If a raw schema includes `$id`, it is used as the base URL for resolving relative `$ref`s.
|
|
114
|
+
|
|
115
|
+
```javascript
|
|
116
|
+
await new $RefParser().bundle({
|
|
117
|
+
pathOrUrlOrSchema: {
|
|
118
|
+
$id: "https://api.example.com/openapi.json",
|
|
119
|
+
openapi: "3.1.0",
|
|
120
|
+
paths: {
|
|
121
|
+
"/ping": { get: { responses: { 200: { description: "ok" } } } },
|
|
122
|
+
},
|
|
123
|
+
},
|
|
124
|
+
});
|
|
125
|
+
```
|
|
126
|
+
|
|
87
127
|
## Polyfills
|
|
88
128
|
|
|
89
129
|
If you are using Node.js < 18, you'll need a polyfill for `fetch`,
|
package/dist/lib/bundle.js
CHANGED
|
@@ -494,7 +494,22 @@ function remap(parser, inventory) {
|
|
|
494
494
|
const namesForPrefix = targetToNameByPrefix.get(prefix);
|
|
495
495
|
let defName = namesForPrefix.get(targetKey);
|
|
496
496
|
if (!defName) {
|
|
497
|
-
|
|
497
|
+
// If the external file is one of the original input sources, prefer its assigned prefix
|
|
498
|
+
let proposedBase = baseName(entry.file);
|
|
499
|
+
try {
|
|
500
|
+
const parserAny = parser;
|
|
501
|
+
if (parserAny && parserAny.sourcePathToPrefix && typeof parserAny.sourcePathToPrefix.get === "function") {
|
|
502
|
+
const withoutHash = (entry.file || "").split("#")[0];
|
|
503
|
+
const mapped = parserAny.sourcePathToPrefix.get(withoutHash);
|
|
504
|
+
if (mapped && typeof mapped === "string") {
|
|
505
|
+
proposedBase = mapped;
|
|
506
|
+
}
|
|
507
|
+
}
|
|
508
|
+
}
|
|
509
|
+
catch {
|
|
510
|
+
// Ignore errors
|
|
511
|
+
}
|
|
512
|
+
const proposed = `${proposedBase}_${lastToken(entry.hash)}`;
|
|
498
513
|
defName = uniqueName(container, proposed);
|
|
499
514
|
namesForPrefix.set(targetKey, defName);
|
|
500
515
|
// Store the resolved value under the container
|
package/dist/lib/index.d.ts
CHANGED
|
@@ -28,6 +28,9 @@ export declare class $RefParser {
|
|
|
28
28
|
* @readonly
|
|
29
29
|
*/
|
|
30
30
|
schema: JSONSchema | null;
|
|
31
|
+
schemaMany: JSONSchema[];
|
|
32
|
+
schemaManySources: string[];
|
|
33
|
+
sourcePathToPrefix: Map<string, string>;
|
|
31
34
|
/**
|
|
32
35
|
* Bundles all referenced files/URLs into a single schema that only has internal `$ref` pointers. This lets you split-up your schema however you want while you're building it, but easily combine all those files together when it's time to package or distribute the schema to other people. The resulting schema size will be small, since it will still contain internal JSON references rather than being fully-dereferenced.
|
|
33
36
|
*
|
|
@@ -43,6 +46,16 @@ export declare class $RefParser {
|
|
|
43
46
|
pathOrUrlOrSchema: JSONSchema | string | unknown;
|
|
44
47
|
resolvedInput?: ResolvedInput;
|
|
45
48
|
}): Promise<JSONSchema>;
|
|
49
|
+
/**
|
|
50
|
+
* Bundles multiple roots (files/URLs/objects) into a single schema by creating a synthetic root
|
|
51
|
+
* that references each input, resolving all externals, and then hoisting via the existing bundler.
|
|
52
|
+
*/
|
|
53
|
+
bundleMany({ arrayBuffer, fetch, pathOrUrlOrSchemas, resolvedInputs, }: {
|
|
54
|
+
arrayBuffer?: ArrayBuffer[];
|
|
55
|
+
fetch?: RequestInit;
|
|
56
|
+
pathOrUrlOrSchemas: Array<JSONSchema | string | unknown>;
|
|
57
|
+
resolvedInputs?: ResolvedInput[];
|
|
58
|
+
}): Promise<JSONSchema>;
|
|
46
59
|
/**
|
|
47
60
|
* Dereferences all `$ref` pointers in the JSON Schema, replacing each reference with its resolved value. This results in a schema object that does not contain any `$ref` pointers. Instead, it's a normal JavaScript object tree that can easily be crawled and used just like any other JavaScript object. This is great for programmatic usage, especially when using tools that don't understand JSON references.
|
|
48
61
|
*
|
|
@@ -72,6 +85,8 @@ export declare class $RefParser {
|
|
|
72
85
|
}): Promise<{
|
|
73
86
|
schema: JSONSchema;
|
|
74
87
|
}>;
|
|
88
|
+
private parseMany;
|
|
89
|
+
mergeMany(): JSONSchema;
|
|
75
90
|
}
|
|
76
91
|
export { sendRequest } from "./resolvers/url.js";
|
|
77
92
|
export type { JSONSchema } from "./types/index.js";
|
package/dist/lib/index.js
CHANGED
|
@@ -87,6 +87,13 @@ const getResolvedInput = ({ pathOrUrlOrSchema, }) => {
|
|
|
87
87
|
return resolvedInput;
|
|
88
88
|
};
|
|
89
89
|
exports.getResolvedInput = getResolvedInput;
|
|
90
|
+
const _ensureResolvedInputPath = (input, fallbackPath) => {
|
|
91
|
+
if (input.type === "json" && (!input.path || input.path.length === 0)) {
|
|
92
|
+
return { ...input, path: fallbackPath };
|
|
93
|
+
}
|
|
94
|
+
return input;
|
|
95
|
+
};
|
|
96
|
+
// NOTE: previously used helper removed as unused
|
|
90
97
|
/**
|
|
91
98
|
* This class parses a JSON schema, builds a map of its JSON references and their resolved values,
|
|
92
99
|
* and provides methods for traversing, manipulating, and dereferencing those references.
|
|
@@ -108,6 +115,9 @@ class $RefParser {
|
|
|
108
115
|
* @readonly
|
|
109
116
|
*/
|
|
110
117
|
this.schema = null;
|
|
118
|
+
this.schemaMany = [];
|
|
119
|
+
this.schemaManySources = [];
|
|
120
|
+
this.sourcePathToPrefix = new Map();
|
|
111
121
|
}
|
|
112
122
|
/**
|
|
113
123
|
* Bundles all referenced files/URLs into a single schema that only has internal `$ref` pointers. This lets you split-up your schema however you want while you're building it, but easily combine all those files together when it's time to package or distribute the schema to other people. The resulting schema size will be small, since it will still contain internal JSON references rather than being fully-dereferenced.
|
|
@@ -137,6 +147,26 @@ class $RefParser {
|
|
|
137
147
|
}
|
|
138
148
|
return this.schema;
|
|
139
149
|
}
|
|
150
|
+
/**
|
|
151
|
+
* Bundles multiple roots (files/URLs/objects) into a single schema by creating a synthetic root
|
|
152
|
+
* that references each input, resolving all externals, and then hoisting via the existing bundler.
|
|
153
|
+
*/
|
|
154
|
+
async bundleMany({ arrayBuffer, fetch, pathOrUrlOrSchemas, resolvedInputs, }) {
|
|
155
|
+
await this.parseMany({ arrayBuffer, fetch, pathOrUrlOrSchemas, resolvedInputs });
|
|
156
|
+
this.mergeMany();
|
|
157
|
+
await (0, resolve_external_js_1.resolveExternal)(this, this.options);
|
|
158
|
+
const errors = errors_js_1.JSONParserErrorGroup.getParserErrors(this);
|
|
159
|
+
if (errors.length > 0) {
|
|
160
|
+
throw new errors_js_1.JSONParserErrorGroup(this);
|
|
161
|
+
}
|
|
162
|
+
(0, bundle_js_1.bundle)(this, this.options);
|
|
163
|
+
// Merged root is ready for bundling
|
|
164
|
+
const errors2 = errors_js_1.JSONParserErrorGroup.getParserErrors(this);
|
|
165
|
+
if (errors2.length > 0) {
|
|
166
|
+
throw new errors_js_1.JSONParserErrorGroup(this);
|
|
167
|
+
}
|
|
168
|
+
return this.schema;
|
|
169
|
+
}
|
|
140
170
|
/**
|
|
141
171
|
* Dereferences all `$ref` pointers in the JSON Schema, replacing each reference with its resolved value. This results in a schema object that does not contain any `$ref` pointers. Instead, it's a normal JavaScript object tree that can easily be crawled and used just like any other JavaScript object. This is great for programmatic usage, especially when using tools that don't understand JSON references.
|
|
142
172
|
*
|
|
@@ -216,6 +246,279 @@ class $RefParser {
|
|
|
216
246
|
schema,
|
|
217
247
|
};
|
|
218
248
|
}
|
|
249
|
+
async parseMany({ arrayBuffer, fetch, pathOrUrlOrSchemas, resolvedInputs: _resolvedInputs, }) {
|
|
250
|
+
const resolvedInputs = [...(_resolvedInputs || [])];
|
|
251
|
+
resolvedInputs.push(...(pathOrUrlOrSchemas.map((schema) => (0, exports.getResolvedInput)({ pathOrUrlOrSchema: schema })) || []));
|
|
252
|
+
this.schemaMany = [];
|
|
253
|
+
this.schemaManySources = [];
|
|
254
|
+
this.sourcePathToPrefix = new Map();
|
|
255
|
+
for (let i = 0; i < resolvedInputs.length; i++) {
|
|
256
|
+
const resolvedInput = resolvedInputs[i];
|
|
257
|
+
const { path, type } = resolvedInput;
|
|
258
|
+
let { schema } = resolvedInput;
|
|
259
|
+
if (schema) {
|
|
260
|
+
// keep schema as-is
|
|
261
|
+
}
|
|
262
|
+
else if (type !== "json") {
|
|
263
|
+
const file = (0, parse_js_1.newFile)(path);
|
|
264
|
+
// Add a new $Ref for this file, even though we don't have the value yet.
|
|
265
|
+
// This ensures that we don't simultaneously read & parse the same file multiple times
|
|
266
|
+
const $refAdded = this.$refs._add(file.url);
|
|
267
|
+
$refAdded.pathType = type;
|
|
268
|
+
try {
|
|
269
|
+
const resolver = type === "file" ? file_js_1.fileResolver : url_js_1.urlResolver;
|
|
270
|
+
await resolver.handler({
|
|
271
|
+
arrayBuffer: arrayBuffer?.[i],
|
|
272
|
+
fetch,
|
|
273
|
+
file,
|
|
274
|
+
});
|
|
275
|
+
const parseResult = await (0, parse_js_1.parseFile)(file, this.options);
|
|
276
|
+
$refAdded.value = parseResult.result;
|
|
277
|
+
schema = parseResult.result;
|
|
278
|
+
}
|
|
279
|
+
catch (err) {
|
|
280
|
+
if ((0, errors_js_1.isHandledError)(err)) {
|
|
281
|
+
$refAdded.value = err;
|
|
282
|
+
}
|
|
283
|
+
throw err;
|
|
284
|
+
}
|
|
285
|
+
}
|
|
286
|
+
if (schema === null || typeof schema !== "object" || Buffer.isBuffer(schema)) {
|
|
287
|
+
throw ono_1.ono.syntax(`"${this.$refs._root$Ref.path || schema}" is not a valid JSON Schema`);
|
|
288
|
+
}
|
|
289
|
+
this.schemaMany.push(schema);
|
|
290
|
+
this.schemaManySources.push(path && path.length ? path : url.cwd());
|
|
291
|
+
}
|
|
292
|
+
return {
|
|
293
|
+
schemaMany: this.schemaMany,
|
|
294
|
+
};
|
|
295
|
+
}
|
|
296
|
+
mergeMany() {
|
|
297
|
+
const schemas = this.schemaMany || [];
|
|
298
|
+
if (schemas.length === 0) {
|
|
299
|
+
throw (0, ono_1.ono)("mergeMany called with no schemas. Did you run parseMany?");
|
|
300
|
+
}
|
|
301
|
+
const merged = {};
|
|
302
|
+
// Determine spec version: prefer first occurrence of openapi, else swagger
|
|
303
|
+
let chosenOpenapi;
|
|
304
|
+
let chosenSwagger;
|
|
305
|
+
for (const s of schemas) {
|
|
306
|
+
if (!chosenOpenapi && s && typeof s.openapi === "string") {
|
|
307
|
+
chosenOpenapi = s.openapi;
|
|
308
|
+
}
|
|
309
|
+
if (!chosenSwagger && s && typeof s.swagger === "string") {
|
|
310
|
+
chosenSwagger = s.swagger;
|
|
311
|
+
}
|
|
312
|
+
if (chosenOpenapi && chosenSwagger) {
|
|
313
|
+
break;
|
|
314
|
+
}
|
|
315
|
+
}
|
|
316
|
+
if (typeof chosenOpenapi === "string") {
|
|
317
|
+
merged.openapi = chosenOpenapi;
|
|
318
|
+
}
|
|
319
|
+
else if (typeof chosenSwagger === "string") {
|
|
320
|
+
merged.swagger = chosenSwagger;
|
|
321
|
+
}
|
|
322
|
+
// Merge info: take first non-empty per-field across inputs
|
|
323
|
+
const infoAccumulator = {};
|
|
324
|
+
for (const s of schemas) {
|
|
325
|
+
const info = s?.info;
|
|
326
|
+
if (info && typeof info === "object") {
|
|
327
|
+
for (const [k, v] of Object.entries(info)) {
|
|
328
|
+
if (infoAccumulator[k] === undefined && v !== undefined) {
|
|
329
|
+
infoAccumulator[k] = JSON.parse(JSON.stringify(v));
|
|
330
|
+
}
|
|
331
|
+
}
|
|
332
|
+
}
|
|
333
|
+
}
|
|
334
|
+
if (Object.keys(infoAccumulator).length > 0) {
|
|
335
|
+
merged.info = infoAccumulator;
|
|
336
|
+
}
|
|
337
|
+
// Merge servers: union by url+description
|
|
338
|
+
const servers = [];
|
|
339
|
+
const seenServers = new Set();
|
|
340
|
+
for (const s of schemas) {
|
|
341
|
+
const arr = s?.servers;
|
|
342
|
+
if (Array.isArray(arr)) {
|
|
343
|
+
for (const srv of arr) {
|
|
344
|
+
if (srv && typeof srv === "object") {
|
|
345
|
+
const key = `${srv.url || ""}|${srv.description || ""}`;
|
|
346
|
+
if (!seenServers.has(key)) {
|
|
347
|
+
seenServers.add(key);
|
|
348
|
+
servers.push(JSON.parse(JSON.stringify(srv)));
|
|
349
|
+
}
|
|
350
|
+
}
|
|
351
|
+
}
|
|
352
|
+
}
|
|
353
|
+
}
|
|
354
|
+
if (servers.length > 0) {
|
|
355
|
+
merged.servers = servers;
|
|
356
|
+
}
|
|
357
|
+
merged.paths = {};
|
|
358
|
+
merged.components = {};
|
|
359
|
+
const componentSections = [
|
|
360
|
+
"schemas",
|
|
361
|
+
"parameters",
|
|
362
|
+
"requestBodies",
|
|
363
|
+
"responses",
|
|
364
|
+
"headers",
|
|
365
|
+
"securitySchemes",
|
|
366
|
+
"examples",
|
|
367
|
+
"links",
|
|
368
|
+
"callbacks",
|
|
369
|
+
];
|
|
370
|
+
for (const sec of componentSections) {
|
|
371
|
+
merged.components[sec] = {};
|
|
372
|
+
}
|
|
373
|
+
const tagNameSet = new Set();
|
|
374
|
+
const tags = [];
|
|
375
|
+
const usedOpIds = new Set();
|
|
376
|
+
const baseName = (p) => {
|
|
377
|
+
try {
|
|
378
|
+
const withoutHash = p.split("#")[0];
|
|
379
|
+
const parts = withoutHash.split("/");
|
|
380
|
+
const filename = parts[parts.length - 1] || "schema";
|
|
381
|
+
const dot = filename.lastIndexOf(".");
|
|
382
|
+
const raw = dot > 0 ? filename.substring(0, dot) : filename;
|
|
383
|
+
return raw.replace(/[^A-Za-z0-9_-]/g, "_");
|
|
384
|
+
}
|
|
385
|
+
catch {
|
|
386
|
+
return "schema";
|
|
387
|
+
}
|
|
388
|
+
};
|
|
389
|
+
const unique = (set, proposed) => {
|
|
390
|
+
let name = proposed;
|
|
391
|
+
let i = 2;
|
|
392
|
+
while (set.has(name)) {
|
|
393
|
+
name = `${proposed}_${i++}`;
|
|
394
|
+
}
|
|
395
|
+
set.add(name);
|
|
396
|
+
return name;
|
|
397
|
+
};
|
|
398
|
+
const rewriteRef = (ref, refMap) => {
|
|
399
|
+
// OAS3: #/components/{section}/{name}...
|
|
400
|
+
let m = ref.match(/^#\/components\/([^/]+)\/([^/]+)(.*)$/);
|
|
401
|
+
if (m) {
|
|
402
|
+
const base = `#/components/${m[1]}/${m[2]}`;
|
|
403
|
+
const mapped = refMap.get(base);
|
|
404
|
+
if (mapped) {
|
|
405
|
+
return mapped + (m[3] || "");
|
|
406
|
+
}
|
|
407
|
+
}
|
|
408
|
+
// OAS2: #/definitions/{name}...
|
|
409
|
+
m = ref.match(/^#\/definitions\/([^/]+)(.*)$/);
|
|
410
|
+
if (m) {
|
|
411
|
+
const base = `#/components/schemas/${m[1]}`;
|
|
412
|
+
const mapped = refMap.get(base);
|
|
413
|
+
if (mapped) {
|
|
414
|
+
// map definitions -> components/schemas
|
|
415
|
+
return mapped + (m[2] || "");
|
|
416
|
+
}
|
|
417
|
+
}
|
|
418
|
+
return ref;
|
|
419
|
+
};
|
|
420
|
+
const cloneAndRewrite = (obj, refMap, tagMap, opIdPrefix, basePath) => {
|
|
421
|
+
if (obj === null || obj === undefined) {
|
|
422
|
+
return obj;
|
|
423
|
+
}
|
|
424
|
+
if (Array.isArray(obj)) {
|
|
425
|
+
return obj.map((v) => cloneAndRewrite(v, refMap, tagMap, opIdPrefix, basePath));
|
|
426
|
+
}
|
|
427
|
+
if (typeof obj !== "object") {
|
|
428
|
+
return obj;
|
|
429
|
+
}
|
|
430
|
+
const out = {};
|
|
431
|
+
for (const [k, v] of Object.entries(obj)) {
|
|
432
|
+
if (k === "$ref" && typeof v === "string") {
|
|
433
|
+
const s = v;
|
|
434
|
+
if (s.startsWith("#")) {
|
|
435
|
+
out[k] = rewriteRef(s, refMap);
|
|
436
|
+
}
|
|
437
|
+
else {
|
|
438
|
+
const proto = url.getProtocol(s);
|
|
439
|
+
if (proto === undefined) {
|
|
440
|
+
// relative external ref -> absolutize against source base path
|
|
441
|
+
out[k] = url.resolve(basePath + "#", s);
|
|
442
|
+
}
|
|
443
|
+
else {
|
|
444
|
+
out[k] = s;
|
|
445
|
+
}
|
|
446
|
+
}
|
|
447
|
+
}
|
|
448
|
+
else if (k === "tags" && Array.isArray(v) && v.every((x) => typeof x === "string")) {
|
|
449
|
+
out[k] = v.map((t) => tagMap.get(t) || t);
|
|
450
|
+
}
|
|
451
|
+
else if (k === "operationId" && typeof v === "string") {
|
|
452
|
+
out[k] = unique(usedOpIds, `${opIdPrefix}_${v}`);
|
|
453
|
+
}
|
|
454
|
+
else {
|
|
455
|
+
out[k] = cloneAndRewrite(v, refMap, tagMap, opIdPrefix, basePath);
|
|
456
|
+
}
|
|
457
|
+
}
|
|
458
|
+
return out;
|
|
459
|
+
};
|
|
460
|
+
for (let i = 0; i < schemas.length; i++) {
|
|
461
|
+
const schema = schemas[i] || {};
|
|
462
|
+
const sourcePath = this.schemaManySources[i] || `multi://input/${i + 1}`;
|
|
463
|
+
const prefix = baseName(sourcePath);
|
|
464
|
+
// Track prefix for this source path (strip hash). Only map real file/http paths
|
|
465
|
+
const withoutHash = url.stripHash(sourcePath);
|
|
466
|
+
const protocol = url.getProtocol(withoutHash);
|
|
467
|
+
if (protocol === undefined || protocol === "file" || protocol === "http" || protocol === "https") {
|
|
468
|
+
this.sourcePathToPrefix.set(withoutHash, prefix);
|
|
469
|
+
}
|
|
470
|
+
const refMap = new Map();
|
|
471
|
+
const tagMap = new Map();
|
|
472
|
+
const srcComponents = (schema.components || {});
|
|
473
|
+
for (const sec of componentSections) {
|
|
474
|
+
const group = srcComponents[sec] || {};
|
|
475
|
+
for (const [name] of Object.entries(group)) {
|
|
476
|
+
const newName = `${prefix}_${name}`;
|
|
477
|
+
refMap.set(`#/components/${sec}/${name}`, `#/components/${sec}/${newName}`);
|
|
478
|
+
}
|
|
479
|
+
}
|
|
480
|
+
const srcTags = Array.isArray(schema.tags) ? schema.tags : [];
|
|
481
|
+
for (const t of srcTags) {
|
|
482
|
+
if (!t || typeof t !== "object" || typeof t.name !== "string") {
|
|
483
|
+
continue;
|
|
484
|
+
}
|
|
485
|
+
const desired = t.name;
|
|
486
|
+
const finalName = tagNameSet.has(desired) ? `${prefix}_${desired}` : desired;
|
|
487
|
+
tagNameSet.add(finalName);
|
|
488
|
+
tagMap.set(desired, finalName);
|
|
489
|
+
if (!tags.find((x) => x && x.name === finalName)) {
|
|
490
|
+
tags.push({ ...t, name: finalName });
|
|
491
|
+
}
|
|
492
|
+
}
|
|
493
|
+
for (const sec of componentSections) {
|
|
494
|
+
const group = (schema.components && schema.components[sec]) || {};
|
|
495
|
+
for (const [name, val] of Object.entries(group)) {
|
|
496
|
+
const newName = `${prefix}_${name}`;
|
|
497
|
+
merged.components[sec][newName] = cloneAndRewrite(val, refMap, tagMap, prefix, url.stripHash(sourcePath));
|
|
498
|
+
}
|
|
499
|
+
}
|
|
500
|
+
const srcPaths = (schema.paths || {});
|
|
501
|
+
for (const [p, item] of Object.entries(srcPaths)) {
|
|
502
|
+
let targetPath = p;
|
|
503
|
+
if (merged.paths[p]) {
|
|
504
|
+
const trimmed = p.startsWith("/") ? p.substring(1) : p;
|
|
505
|
+
targetPath = `/${prefix}/${trimmed}`;
|
|
506
|
+
}
|
|
507
|
+
merged.paths[targetPath] = cloneAndRewrite(item, refMap, tagMap, prefix, url.stripHash(sourcePath));
|
|
508
|
+
}
|
|
509
|
+
}
|
|
510
|
+
if (tags.length > 0) {
|
|
511
|
+
merged.tags = tags;
|
|
512
|
+
}
|
|
513
|
+
// Rebuild $refs root using the first input's path to preserve external resolution semantics
|
|
514
|
+
const rootPath = this.schemaManySources[0] || url.cwd();
|
|
515
|
+
this.$refs = new refs_js_1.default();
|
|
516
|
+
const rootRef = this.$refs._add(rootPath);
|
|
517
|
+
rootRef.pathType = url.isFileSystemPath(rootPath) ? "file" : "http";
|
|
518
|
+
rootRef.value = merged;
|
|
519
|
+
this.schema = merged;
|
|
520
|
+
return merged;
|
|
521
|
+
}
|
|
219
522
|
}
|
|
220
523
|
exports.$RefParser = $RefParser;
|
|
221
524
|
var url_js_2 = require("./resolvers/url.js");
|
|
@@ -114,11 +114,15 @@ async function resolve$Ref($ref, path, $refs, options) {
|
|
|
114
114
|
const resolvedPath = url.resolve(path, $ref.$ref);
|
|
115
115
|
const withoutHash = url.stripHash(resolvedPath);
|
|
116
116
|
// $ref.$ref = url.relative($refs._root$Ref.path, resolvedPath);
|
|
117
|
+
// If this ref points back to an input source we've already merged, avoid re-importing
|
|
118
|
+
// by checking if the path (without hash) matches a known source in parser and we can serve it internally later.
|
|
119
|
+
// We keep normal flow but ensure cache hit if already added.
|
|
117
120
|
// Do we already have this $ref?
|
|
118
121
|
const ref = $refs._$refs[withoutHash];
|
|
119
122
|
if (ref) {
|
|
120
|
-
// We've already parsed this $ref, so
|
|
121
|
-
|
|
123
|
+
// We've already parsed this $ref, so crawl it to resolve its own externals
|
|
124
|
+
const promises = crawl(ref.value, `${withoutHash}#`, $refs, options, new Set(), true);
|
|
125
|
+
return Promise.all(promises);
|
|
122
126
|
}
|
|
123
127
|
// Parse the $referenced file/url
|
|
124
128
|
const file = (0, parse_js_1.newFile)(resolvedPath);
|
|
@@ -129,8 +133,8 @@ async function resolve$Ref($ref, path, $refs, options) {
|
|
|
129
133
|
const resolvedInput = (0, index_js_1.getResolvedInput)({ pathOrUrlOrSchema: resolvedPath });
|
|
130
134
|
$refAdded.pathType = resolvedInput.type;
|
|
131
135
|
let promises = [];
|
|
132
|
-
if (resolvedInput.type !==
|
|
133
|
-
const resolver = resolvedInput.type ===
|
|
136
|
+
if (resolvedInput.type !== "json") {
|
|
137
|
+
const resolver = resolvedInput.type === "file" ? file_js_1.fileResolver : url_js_1.urlResolver;
|
|
134
138
|
await resolver.handler({ file });
|
|
135
139
|
const parseResult = await (0, parse_js_1.parseFile)(file, options);
|
|
136
140
|
$refAdded.value = parseResult.result;
|
package/lib/bundle.ts
CHANGED
|
@@ -607,7 +607,21 @@ function remap(parser: $RefParser, inventory: InventoryEntry[]) {
|
|
|
607
607
|
|
|
608
608
|
let defName = namesForPrefix.get(targetKey);
|
|
609
609
|
if (!defName) {
|
|
610
|
-
|
|
610
|
+
// If the external file is one of the original input sources, prefer its assigned prefix
|
|
611
|
+
let proposedBase = baseName(entry.file);
|
|
612
|
+
try {
|
|
613
|
+
const parserAny: any = parser as any;
|
|
614
|
+
if (parserAny && parserAny.sourcePathToPrefix && typeof parserAny.sourcePathToPrefix.get === "function") {
|
|
615
|
+
const withoutHash = (entry.file || "").split("#")[0];
|
|
616
|
+
const mapped = parserAny.sourcePathToPrefix.get(withoutHash);
|
|
617
|
+
if (mapped && typeof mapped === "string") {
|
|
618
|
+
proposedBase = mapped;
|
|
619
|
+
}
|
|
620
|
+
}
|
|
621
|
+
} catch {
|
|
622
|
+
// Ignore errors
|
|
623
|
+
}
|
|
624
|
+
const proposed = `${proposedBase}_${lastToken(entry.hash)}`;
|
|
611
625
|
defName = uniqueName(container, proposed);
|
|
612
626
|
namesForPrefix.set(targetKey, defName);
|
|
613
627
|
// Store the resolved value under the container
|
package/lib/index.ts
CHANGED
|
@@ -62,6 +62,15 @@ export const getResolvedInput = ({
|
|
|
62
62
|
return resolvedInput;
|
|
63
63
|
};
|
|
64
64
|
|
|
65
|
+
const _ensureResolvedInputPath = (input: ResolvedInput, fallbackPath: string): ResolvedInput => {
|
|
66
|
+
if (input.type === "json" && (!input.path || input.path.length === 0)) {
|
|
67
|
+
return { ...input, path: fallbackPath };
|
|
68
|
+
}
|
|
69
|
+
return input;
|
|
70
|
+
};
|
|
71
|
+
|
|
72
|
+
// NOTE: previously used helper removed as unused
|
|
73
|
+
|
|
65
74
|
/**
|
|
66
75
|
* This class parses a JSON schema, builds a map of its JSON references and their resolved values,
|
|
67
76
|
* and provides methods for traversing, manipulating, and dereferencing those references.
|
|
@@ -82,6 +91,9 @@ export class $RefParser {
|
|
|
82
91
|
* @readonly
|
|
83
92
|
*/
|
|
84
93
|
public schema: JSONSchema | null = null;
|
|
94
|
+
public schemaMany: JSONSchema[] = [];
|
|
95
|
+
public schemaManySources: string[] = [];
|
|
96
|
+
public sourcePathToPrefix: Map<string, string> = new Map();
|
|
85
97
|
|
|
86
98
|
/**
|
|
87
99
|
* Bundles all referenced files/URLs into a single schema that only has internal `$ref` pointers. This lets you split-up your schema however you want while you're building it, but easily combine all those files together when it's time to package or distribute the schema to other people. The resulting schema size will be small, since it will still contain internal JSON references rather than being fully-dereferenced.
|
|
@@ -109,6 +121,7 @@ export class $RefParser {
|
|
|
109
121
|
pathOrUrlOrSchema,
|
|
110
122
|
resolvedInput,
|
|
111
123
|
});
|
|
124
|
+
|
|
112
125
|
await resolveExternal(this, this.options);
|
|
113
126
|
const errors = JSONParserErrorGroup.getParserErrors(this);
|
|
114
127
|
if (errors.length > 0) {
|
|
@@ -122,6 +135,39 @@ export class $RefParser {
|
|
|
122
135
|
return this.schema!;
|
|
123
136
|
}
|
|
124
137
|
|
|
138
|
+
/**
|
|
139
|
+
* Bundles multiple roots (files/URLs/objects) into a single schema by creating a synthetic root
|
|
140
|
+
* that references each input, resolving all externals, and then hoisting via the existing bundler.
|
|
141
|
+
*/
|
|
142
|
+
public async bundleMany({
|
|
143
|
+
arrayBuffer,
|
|
144
|
+
fetch,
|
|
145
|
+
pathOrUrlOrSchemas,
|
|
146
|
+
resolvedInputs,
|
|
147
|
+
}: {
|
|
148
|
+
arrayBuffer?: ArrayBuffer[];
|
|
149
|
+
fetch?: RequestInit;
|
|
150
|
+
pathOrUrlOrSchemas: Array<JSONSchema | string | unknown>;
|
|
151
|
+
resolvedInputs?: ResolvedInput[];
|
|
152
|
+
}): Promise<JSONSchema> {
|
|
153
|
+
await this.parseMany({ arrayBuffer, fetch, pathOrUrlOrSchemas, resolvedInputs });
|
|
154
|
+
this.mergeMany();
|
|
155
|
+
|
|
156
|
+
await resolveExternal(this, this.options);
|
|
157
|
+
const errors = JSONParserErrorGroup.getParserErrors(this);
|
|
158
|
+
if (errors.length > 0) {
|
|
159
|
+
throw new JSONParserErrorGroup(this);
|
|
160
|
+
}
|
|
161
|
+
_bundle(this, this.options);
|
|
162
|
+
// Merged root is ready for bundling
|
|
163
|
+
|
|
164
|
+
const errors2 = JSONParserErrorGroup.getParserErrors(this);
|
|
165
|
+
if (errors2.length > 0) {
|
|
166
|
+
throw new JSONParserErrorGroup(this);
|
|
167
|
+
}
|
|
168
|
+
return this.schema!;
|
|
169
|
+
}
|
|
170
|
+
|
|
125
171
|
/**
|
|
126
172
|
* Dereferences all `$ref` pointers in the JSON Schema, replacing each reference with its resolved value. This results in a schema object that does not contain any `$ref` pointers. Instead, it's a normal JavaScript object tree that can easily be crawled and used just like any other JavaScript object. This is great for programmatic usage, especially when using tools that don't understand JSON references.
|
|
127
173
|
*
|
|
@@ -223,6 +269,316 @@ export class $RefParser {
|
|
|
223
269
|
schema,
|
|
224
270
|
};
|
|
225
271
|
}
|
|
272
|
+
|
|
273
|
+
private async parseMany({
|
|
274
|
+
arrayBuffer,
|
|
275
|
+
fetch,
|
|
276
|
+
pathOrUrlOrSchemas,
|
|
277
|
+
resolvedInputs: _resolvedInputs,
|
|
278
|
+
}: {
|
|
279
|
+
arrayBuffer?: ArrayBuffer[];
|
|
280
|
+
fetch?: RequestInit;
|
|
281
|
+
pathOrUrlOrSchemas: Array<JSONSchema | string | unknown>;
|
|
282
|
+
resolvedInputs?: ResolvedInput[];
|
|
283
|
+
}): Promise<{ schemaMany: JSONSchema[] }> {
|
|
284
|
+
const resolvedInputs = [...(_resolvedInputs || [])];
|
|
285
|
+
resolvedInputs.push(...(pathOrUrlOrSchemas.map((schema) => getResolvedInput({ pathOrUrlOrSchema: schema })) || []));
|
|
286
|
+
|
|
287
|
+
this.schemaMany = [];
|
|
288
|
+
this.schemaManySources = [];
|
|
289
|
+
this.sourcePathToPrefix = new Map();
|
|
290
|
+
|
|
291
|
+
for (let i = 0; i < resolvedInputs.length; i++) {
|
|
292
|
+
const resolvedInput = resolvedInputs[i];
|
|
293
|
+
const { path, type } = resolvedInput;
|
|
294
|
+
let { schema } = resolvedInput;
|
|
295
|
+
|
|
296
|
+
if (schema) {
|
|
297
|
+
// keep schema as-is
|
|
298
|
+
} else if (type !== "json") {
|
|
299
|
+
const file = newFile(path);
|
|
300
|
+
|
|
301
|
+
// Add a new $Ref for this file, even though we don't have the value yet.
|
|
302
|
+
// This ensures that we don't simultaneously read & parse the same file multiple times
|
|
303
|
+
const $refAdded = this.$refs._add(file.url);
|
|
304
|
+
$refAdded.pathType = type;
|
|
305
|
+
try {
|
|
306
|
+
const resolver = type === "file" ? fileResolver : urlResolver;
|
|
307
|
+
await resolver.handler({
|
|
308
|
+
arrayBuffer: arrayBuffer?.[i],
|
|
309
|
+
fetch,
|
|
310
|
+
file,
|
|
311
|
+
});
|
|
312
|
+
const parseResult = await parseFile(file, this.options);
|
|
313
|
+
$refAdded.value = parseResult.result;
|
|
314
|
+
schema = parseResult.result;
|
|
315
|
+
} catch (err) {
|
|
316
|
+
if (isHandledError(err)) {
|
|
317
|
+
$refAdded.value = err;
|
|
318
|
+
}
|
|
319
|
+
|
|
320
|
+
throw err;
|
|
321
|
+
}
|
|
322
|
+
}
|
|
323
|
+
|
|
324
|
+
if (schema === null || typeof schema !== "object" || Buffer.isBuffer(schema)) {
|
|
325
|
+
throw ono.syntax(`"${this.$refs._root$Ref.path || schema}" is not a valid JSON Schema`);
|
|
326
|
+
}
|
|
327
|
+
|
|
328
|
+
this.schemaMany.push(schema);
|
|
329
|
+
this.schemaManySources.push(path && path.length ? path : url.cwd());
|
|
330
|
+
}
|
|
331
|
+
|
|
332
|
+
return {
|
|
333
|
+
schemaMany: this.schemaMany,
|
|
334
|
+
};
|
|
335
|
+
}
|
|
336
|
+
|
|
337
|
+
public mergeMany(): JSONSchema {
|
|
338
|
+
const schemas = this.schemaMany || [];
|
|
339
|
+
if (schemas.length === 0) {
|
|
340
|
+
throw ono("mergeMany called with no schemas. Did you run parseMany?");
|
|
341
|
+
}
|
|
342
|
+
|
|
343
|
+
const merged: any = {};
|
|
344
|
+
|
|
345
|
+
// Determine spec version: prefer first occurrence of openapi, else swagger
|
|
346
|
+
let chosenOpenapi: string | undefined;
|
|
347
|
+
let chosenSwagger: string | undefined;
|
|
348
|
+
for (const s of schemas) {
|
|
349
|
+
if (!chosenOpenapi && s && typeof (s as any).openapi === "string") {
|
|
350
|
+
chosenOpenapi = (s as any).openapi;
|
|
351
|
+
}
|
|
352
|
+
if (!chosenSwagger && s && typeof (s as any).swagger === "string") {
|
|
353
|
+
chosenSwagger = (s as any).swagger;
|
|
354
|
+
}
|
|
355
|
+
if (chosenOpenapi && chosenSwagger) {
|
|
356
|
+
break;
|
|
357
|
+
}
|
|
358
|
+
}
|
|
359
|
+
if (typeof chosenOpenapi === "string") {
|
|
360
|
+
merged.openapi = chosenOpenapi;
|
|
361
|
+
} else if (typeof chosenSwagger === "string") {
|
|
362
|
+
merged.swagger = chosenSwagger;
|
|
363
|
+
}
|
|
364
|
+
|
|
365
|
+
// Merge info: take first non-empty per-field across inputs
|
|
366
|
+
const infoAccumulator: any = {};
|
|
367
|
+
for (const s of schemas) {
|
|
368
|
+
const info = (s as any)?.info;
|
|
369
|
+
if (info && typeof info === "object") {
|
|
370
|
+
for (const [k, v] of Object.entries(info)) {
|
|
371
|
+
if (infoAccumulator[k] === undefined && v !== undefined) {
|
|
372
|
+
infoAccumulator[k] = JSON.parse(JSON.stringify(v));
|
|
373
|
+
}
|
|
374
|
+
}
|
|
375
|
+
}
|
|
376
|
+
}
|
|
377
|
+
if (Object.keys(infoAccumulator).length > 0) {
|
|
378
|
+
merged.info = infoAccumulator;
|
|
379
|
+
}
|
|
380
|
+
|
|
381
|
+
// Merge servers: union by url+description
|
|
382
|
+
const servers: any[] = [];
|
|
383
|
+
const seenServers = new Set<string>();
|
|
384
|
+
for (const s of schemas) {
|
|
385
|
+
const arr = (s as any)?.servers;
|
|
386
|
+
if (Array.isArray(arr)) {
|
|
387
|
+
for (const srv of arr) {
|
|
388
|
+
if (srv && typeof srv === "object") {
|
|
389
|
+
const key = `${srv.url || ""}|${srv.description || ""}`;
|
|
390
|
+
if (!seenServers.has(key)) {
|
|
391
|
+
seenServers.add(key);
|
|
392
|
+
servers.push(JSON.parse(JSON.stringify(srv)));
|
|
393
|
+
}
|
|
394
|
+
}
|
|
395
|
+
}
|
|
396
|
+
}
|
|
397
|
+
}
|
|
398
|
+
if (servers.length > 0) {
|
|
399
|
+
merged.servers = servers;
|
|
400
|
+
}
|
|
401
|
+
|
|
402
|
+
merged.paths = {};
|
|
403
|
+
merged.components = {};
|
|
404
|
+
|
|
405
|
+
const componentSections = [
|
|
406
|
+
"schemas",
|
|
407
|
+
"parameters",
|
|
408
|
+
"requestBodies",
|
|
409
|
+
"responses",
|
|
410
|
+
"headers",
|
|
411
|
+
"securitySchemes",
|
|
412
|
+
"examples",
|
|
413
|
+
"links",
|
|
414
|
+
"callbacks",
|
|
415
|
+
];
|
|
416
|
+
for (const sec of componentSections) {
|
|
417
|
+
merged.components[sec] = {};
|
|
418
|
+
}
|
|
419
|
+
|
|
420
|
+
const tagNameSet = new Set<string>();
|
|
421
|
+
const tags: any[] = [];
|
|
422
|
+
const usedOpIds = new Set<string>();
|
|
423
|
+
|
|
424
|
+
const baseName = (p: string) => {
|
|
425
|
+
try {
|
|
426
|
+
const withoutHash = p.split("#")[0];
|
|
427
|
+
const parts = withoutHash.split("/");
|
|
428
|
+
const filename = parts[parts.length - 1] || "schema";
|
|
429
|
+
const dot = filename.lastIndexOf(".");
|
|
430
|
+
const raw = dot > 0 ? filename.substring(0, dot) : filename;
|
|
431
|
+
return raw.replace(/[^A-Za-z0-9_-]/g, "_");
|
|
432
|
+
} catch {
|
|
433
|
+
return "schema";
|
|
434
|
+
}
|
|
435
|
+
};
|
|
436
|
+
const unique = (set: Set<string>, proposed: string) => {
|
|
437
|
+
let name = proposed;
|
|
438
|
+
let i = 2;
|
|
439
|
+
while (set.has(name)) {
|
|
440
|
+
name = `${proposed}_${i++}`;
|
|
441
|
+
}
|
|
442
|
+
set.add(name);
|
|
443
|
+
return name;
|
|
444
|
+
};
|
|
445
|
+
|
|
446
|
+
const rewriteRef = (ref: string, refMap: Map<string, string>): string => {
|
|
447
|
+
// OAS3: #/components/{section}/{name}...
|
|
448
|
+
let m = ref.match(/^#\/components\/([^/]+)\/([^/]+)(.*)$/);
|
|
449
|
+
if (m) {
|
|
450
|
+
const base = `#/components/${m[1]}/${m[2]}`;
|
|
451
|
+
const mapped = refMap.get(base);
|
|
452
|
+
if (mapped) {
|
|
453
|
+
return mapped + (m[3] || "");
|
|
454
|
+
}
|
|
455
|
+
}
|
|
456
|
+
// OAS2: #/definitions/{name}...
|
|
457
|
+
m = ref.match(/^#\/definitions\/([^/]+)(.*)$/);
|
|
458
|
+
if (m) {
|
|
459
|
+
const base = `#/components/schemas/${m[1]}`;
|
|
460
|
+
const mapped = refMap.get(base);
|
|
461
|
+
if (mapped) {
|
|
462
|
+
// map definitions -> components/schemas
|
|
463
|
+
return mapped + (m[2] || "");
|
|
464
|
+
}
|
|
465
|
+
}
|
|
466
|
+
return ref;
|
|
467
|
+
};
|
|
468
|
+
|
|
469
|
+
const cloneAndRewrite = (
|
|
470
|
+
obj: any,
|
|
471
|
+
refMap: Map<string, string>,
|
|
472
|
+
tagMap: Map<string, string>,
|
|
473
|
+
opIdPrefix: string,
|
|
474
|
+
basePath: string,
|
|
475
|
+
): any => {
|
|
476
|
+
if (obj === null || obj === undefined) {
|
|
477
|
+
return obj;
|
|
478
|
+
}
|
|
479
|
+
if (Array.isArray(obj)) {
|
|
480
|
+
return obj.map((v) => cloneAndRewrite(v, refMap, tagMap, opIdPrefix, basePath));
|
|
481
|
+
}
|
|
482
|
+
if (typeof obj !== "object") {
|
|
483
|
+
return obj;
|
|
484
|
+
}
|
|
485
|
+
|
|
486
|
+
const out: any = {};
|
|
487
|
+
for (const [k, v] of Object.entries(obj)) {
|
|
488
|
+
if (k === "$ref" && typeof v === "string") {
|
|
489
|
+
const s = v as string;
|
|
490
|
+
if (s.startsWith("#")) {
|
|
491
|
+
out[k] = rewriteRef(s, refMap);
|
|
492
|
+
} else {
|
|
493
|
+
const proto = url.getProtocol(s);
|
|
494
|
+
if (proto === undefined) {
|
|
495
|
+
// relative external ref -> absolutize against source base path
|
|
496
|
+
out[k] = url.resolve(basePath + "#", s);
|
|
497
|
+
} else {
|
|
498
|
+
out[k] = s;
|
|
499
|
+
}
|
|
500
|
+
}
|
|
501
|
+
} else if (k === "tags" && Array.isArray(v) && v.every((x) => typeof x === "string")) {
|
|
502
|
+
out[k] = v.map((t) => tagMap.get(t) || t);
|
|
503
|
+
} else if (k === "operationId" && typeof v === "string") {
|
|
504
|
+
out[k] = unique(usedOpIds, `${opIdPrefix}_${v}`);
|
|
505
|
+
} else {
|
|
506
|
+
out[k] = cloneAndRewrite(v as any, refMap, tagMap, opIdPrefix, basePath);
|
|
507
|
+
}
|
|
508
|
+
}
|
|
509
|
+
return out;
|
|
510
|
+
};
|
|
511
|
+
|
|
512
|
+
for (let i = 0; i < schemas.length; i++) {
|
|
513
|
+
const schema: any = schemas[i] || {};
|
|
514
|
+
const sourcePath = this.schemaManySources[i] || `multi://input/${i + 1}`;
|
|
515
|
+
const prefix = baseName(sourcePath);
|
|
516
|
+
|
|
517
|
+
// Track prefix for this source path (strip hash). Only map real file/http paths
|
|
518
|
+
const withoutHash = url.stripHash(sourcePath);
|
|
519
|
+
const protocol = url.getProtocol(withoutHash);
|
|
520
|
+
if (protocol === undefined || protocol === "file" || protocol === "http" || protocol === "https") {
|
|
521
|
+
this.sourcePathToPrefix.set(withoutHash, prefix);
|
|
522
|
+
}
|
|
523
|
+
|
|
524
|
+
const refMap = new Map<string, string>();
|
|
525
|
+
const tagMap = new Map<string, string>();
|
|
526
|
+
|
|
527
|
+
const srcComponents = (schema.components || {}) as any;
|
|
528
|
+
for (const sec of componentSections) {
|
|
529
|
+
const group = srcComponents[sec] || {};
|
|
530
|
+
for (const [name] of Object.entries(group)) {
|
|
531
|
+
const newName = `${prefix}_${name}`;
|
|
532
|
+
refMap.set(`#/components/${sec}/${name}`, `#/components/${sec}/${newName}`);
|
|
533
|
+
}
|
|
534
|
+
}
|
|
535
|
+
|
|
536
|
+
const srcTags: any[] = Array.isArray(schema.tags) ? schema.tags : [];
|
|
537
|
+
for (const t of srcTags) {
|
|
538
|
+
if (!t || typeof t !== "object" || typeof t.name !== "string") {
|
|
539
|
+
continue;
|
|
540
|
+
}
|
|
541
|
+
const desired = t.name;
|
|
542
|
+
const finalName = tagNameSet.has(desired) ? `${prefix}_${desired}` : desired;
|
|
543
|
+
tagNameSet.add(finalName);
|
|
544
|
+
tagMap.set(desired, finalName);
|
|
545
|
+
if (!tags.find((x) => x && x.name === finalName)) {
|
|
546
|
+
tags.push({ ...t, name: finalName });
|
|
547
|
+
}
|
|
548
|
+
}
|
|
549
|
+
|
|
550
|
+
for (const sec of componentSections) {
|
|
551
|
+
const group = (schema.components && schema.components[sec]) || {};
|
|
552
|
+
for (const [name, val] of Object.entries(group)) {
|
|
553
|
+
const newName = `${prefix}_${name}`;
|
|
554
|
+
merged.components[sec][newName] = cloneAndRewrite(val, refMap, tagMap, prefix, url.stripHash(sourcePath));
|
|
555
|
+
}
|
|
556
|
+
}
|
|
557
|
+
|
|
558
|
+
const srcPaths = (schema.paths || {}) as Record<string, any>;
|
|
559
|
+
for (const [p, item] of Object.entries(srcPaths)) {
|
|
560
|
+
let targetPath = p;
|
|
561
|
+
if (merged.paths[p]) {
|
|
562
|
+
const trimmed = p.startsWith("/") ? p.substring(1) : p;
|
|
563
|
+
targetPath = `/${prefix}/${trimmed}`;
|
|
564
|
+
}
|
|
565
|
+
merged.paths[targetPath] = cloneAndRewrite(item, refMap, tagMap, prefix, url.stripHash(sourcePath));
|
|
566
|
+
}
|
|
567
|
+
}
|
|
568
|
+
|
|
569
|
+
if (tags.length > 0) {
|
|
570
|
+
merged.tags = tags;
|
|
571
|
+
}
|
|
572
|
+
|
|
573
|
+
// Rebuild $refs root using the first input's path to preserve external resolution semantics
|
|
574
|
+
const rootPath = this.schemaManySources[0] || url.cwd();
|
|
575
|
+
this.$refs = new $Refs();
|
|
576
|
+
const rootRef = this.$refs._add(rootPath);
|
|
577
|
+
rootRef.pathType = url.isFileSystemPath(rootPath) ? "file" : "http";
|
|
578
|
+
rootRef.value = merged;
|
|
579
|
+
this.schema = merged;
|
|
580
|
+
return merged as JSONSchema;
|
|
581
|
+
}
|
|
226
582
|
}
|
|
227
583
|
|
|
228
584
|
export { sendRequest } from "./resolvers/url.js";
|
package/lib/resolve-external.ts
CHANGED
|
@@ -21,10 +21,7 @@ import { urlResolver } from "./resolvers/url.js";
|
|
|
21
21
|
* The promise resolves once all JSON references in the schema have been resolved,
|
|
22
22
|
* including nested references that are contained in externally-referenced files.
|
|
23
23
|
*/
|
|
24
|
-
export function resolveExternal(
|
|
25
|
-
parser: $RefParser,
|
|
26
|
-
options: $RefParserOptions,
|
|
27
|
-
) {
|
|
24
|
+
export function resolveExternal(parser: $RefParser, options: $RefParserOptions) {
|
|
28
25
|
try {
|
|
29
26
|
// console.log('Resolving $ref pointers in %s', parser.$refs._root$Ref.path);
|
|
30
27
|
const promises = crawl(parser.schema, parser.$refs._root$Ref.path + "#", parser.$refs, options);
|
|
@@ -101,29 +98,33 @@ async function resolve$Ref<S extends object = JSONSchema>(
|
|
|
101
98
|
|
|
102
99
|
// $ref.$ref = url.relative($refs._root$Ref.path, resolvedPath);
|
|
103
100
|
|
|
101
|
+
// If this ref points back to an input source we've already merged, avoid re-importing
|
|
102
|
+
// by checking if the path (without hash) matches a known source in parser and we can serve it internally later.
|
|
103
|
+
// We keep normal flow but ensure cache hit if already added.
|
|
104
104
|
// Do we already have this $ref?
|
|
105
105
|
const ref = $refs._$refs[withoutHash];
|
|
106
106
|
if (ref) {
|
|
107
|
-
// We've already parsed this $ref, so
|
|
108
|
-
|
|
107
|
+
// We've already parsed this $ref, so crawl it to resolve its own externals
|
|
108
|
+
const promises = crawl(ref.value as S, `${withoutHash}#`, $refs, options, new Set(), true);
|
|
109
|
+
return Promise.all(promises);
|
|
109
110
|
}
|
|
110
111
|
|
|
111
112
|
// Parse the $referenced file/url
|
|
112
|
-
const file = newFile(resolvedPath)
|
|
113
|
+
const file = newFile(resolvedPath);
|
|
113
114
|
|
|
114
115
|
// Add a new $Ref for this file, even though we don't have the value yet.
|
|
115
116
|
// This ensures that we don't simultaneously read & parse the same file multiple times
|
|
116
117
|
const $refAdded = $refs._add(file.url);
|
|
117
118
|
|
|
118
119
|
try {
|
|
119
|
-
const resolvedInput = getResolvedInput({ pathOrUrlOrSchema: resolvedPath })
|
|
120
|
+
const resolvedInput = getResolvedInput({ pathOrUrlOrSchema: resolvedPath });
|
|
120
121
|
|
|
121
122
|
$refAdded.pathType = resolvedInput.type;
|
|
122
123
|
|
|
123
124
|
let promises: any = [];
|
|
124
125
|
|
|
125
|
-
if (resolvedInput.type !==
|
|
126
|
-
const resolver = resolvedInput.type ===
|
|
126
|
+
if (resolvedInput.type !== "json") {
|
|
127
|
+
const resolver = resolvedInput.type === "file" ? fileResolver : urlResolver;
|
|
127
128
|
await resolver.handler({ file });
|
|
128
129
|
const parseResult = await parseFile(file, options);
|
|
129
130
|
$refAdded.value = parseResult.result;
|