@hey-api/json-schema-ref-parser 1.0.8 → 1.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/lib/index.js CHANGED
@@ -53,9 +53,9 @@ const getResolvedInput = ({ pathOrUrlOrSchema, }) => {
53
53
  throw (0, ono_1.ono)(`Expected a file path, URL, or object. Got ${pathOrUrlOrSchema}`);
54
54
  }
55
55
  const resolvedInput = {
56
- path: typeof pathOrUrlOrSchema === 'string' ? pathOrUrlOrSchema : '',
56
+ path: typeof pathOrUrlOrSchema === "string" ? pathOrUrlOrSchema : "",
57
57
  schema: undefined,
58
- type: 'url',
58
+ type: "url",
59
59
  };
60
60
  // If the path is a filesystem path, then convert it to a URL.
61
61
  // NOTE: According to the JSON Reference spec, these should already be URLs,
@@ -65,28 +65,35 @@ const getResolvedInput = ({ pathOrUrlOrSchema, }) => {
65
65
  // If it doesn't work for your use-case, then use a URL instead.
66
66
  if (resolvedInput.path && url.isFileSystemPath(resolvedInput.path)) {
67
67
  resolvedInput.path = url.fromFileSystemPath(resolvedInput.path);
68
- resolvedInput.type = 'file';
68
+ resolvedInput.type = "file";
69
69
  }
70
- else if (!resolvedInput.path && pathOrUrlOrSchema && typeof pathOrUrlOrSchema === 'object') {
70
+ else if (!resolvedInput.path && pathOrUrlOrSchema && typeof pathOrUrlOrSchema === "object") {
71
71
  if ("$id" in pathOrUrlOrSchema && pathOrUrlOrSchema.$id) {
72
72
  // when schema id has defined an URL should use that hostname to request the references,
73
73
  // instead of using the current page URL
74
74
  const { hostname, protocol } = new URL(pathOrUrlOrSchema.$id);
75
75
  resolvedInput.path = `${protocol}//${hostname}:${protocol === "https:" ? 443 : 80}`;
76
- resolvedInput.type = 'url';
76
+ resolvedInput.type = "url";
77
77
  }
78
78
  else {
79
79
  resolvedInput.schema = pathOrUrlOrSchema;
80
- resolvedInput.type = 'json';
80
+ resolvedInput.type = "json";
81
81
  }
82
82
  }
83
- if (resolvedInput.type !== 'json') {
83
+ if (resolvedInput.type !== "json") {
84
84
  // resolve the absolute path of the schema
85
85
  resolvedInput.path = url.resolve(url.cwd(), resolvedInput.path);
86
86
  }
87
87
  return resolvedInput;
88
88
  };
89
89
  exports.getResolvedInput = getResolvedInput;
90
+ const _ensureResolvedInputPath = (input, fallbackPath) => {
91
+ if (input.type === "json" && (!input.path || input.path.length === 0)) {
92
+ return { ...input, path: fallbackPath };
93
+ }
94
+ return input;
95
+ };
96
+ // NOTE: previously used helper removed as unused
90
97
  /**
91
98
  * This class parses a JSON schema, builds a map of its JSON references and their resolved values,
92
99
  * and provides methods for traversing, manipulating, and dereferencing those references.
@@ -108,6 +115,9 @@ class $RefParser {
108
115
  * @readonly
109
116
  */
110
117
  this.schema = null;
118
+ this.schemaMany = [];
119
+ this.schemaManySources = [];
120
+ this.sourcePathToPrefix = new Map();
111
121
  }
112
122
  /**
113
123
  * Bundles all referenced files/URLs into a single schema that only has internal `$ref` pointers. This lets you split-up your schema however you want while you're building it, but easily combine all those files together when it's time to package or distribute the schema to other people. The resulting schema size will be small, since it will still contain internal JSON references rather than being fully-dereferenced.
@@ -137,6 +147,26 @@ class $RefParser {
137
147
  }
138
148
  return this.schema;
139
149
  }
150
+ /**
151
+ * Bundles multiple roots (files/URLs/objects) into a single schema by creating a synthetic root
152
+ * that references each input, resolving all externals, and then hoisting via the existing bundler.
153
+ */
154
+ async bundleMany({ arrayBuffer, fetch, pathOrUrlOrSchemas, resolvedInputs, }) {
155
+ await this.parseMany({ arrayBuffer, fetch, pathOrUrlOrSchemas, resolvedInputs });
156
+ this.mergeMany();
157
+ await (0, resolve_external_js_1.resolveExternal)(this, this.options);
158
+ const errors = errors_js_1.JSONParserErrorGroup.getParserErrors(this);
159
+ if (errors.length > 0) {
160
+ throw new errors_js_1.JSONParserErrorGroup(this);
161
+ }
162
+ (0, bundle_js_1.bundle)(this, this.options);
163
+ // Merged root is ready for bundling
164
+ const errors2 = errors_js_1.JSONParserErrorGroup.getParserErrors(this);
165
+ if (errors2.length > 0) {
166
+ throw new errors_js_1.JSONParserErrorGroup(this);
167
+ }
168
+ return this.schema;
169
+ }
140
170
  /**
141
171
  * Dereferences all `$ref` pointers in the JSON Schema, replacing each reference with its resolved value. This results in a schema object that does not contain any `$ref` pointers. Instead, it's a normal JavaScript object tree that can easily be crawled and used just like any other JavaScript object. This is great for programmatic usage, especially when using tools that don't understand JSON references.
142
172
  *
@@ -181,17 +211,17 @@ class $RefParser {
181
211
  if (schema) {
182
212
  // immediately add a new $Ref with the schema object as value
183
213
  const $ref = this.$refs._add(path);
184
- $ref.pathType = url.isFileSystemPath(path) ? 'file' : 'http';
214
+ $ref.pathType = url.isFileSystemPath(path) ? "file" : "http";
185
215
  $ref.value = schema;
186
216
  }
187
- else if (type !== 'json') {
217
+ else if (type !== "json") {
188
218
  const file = (0, parse_js_1.newFile)(path);
189
219
  // Add a new $Ref for this file, even though we don't have the value yet.
190
220
  // This ensures that we don't simultaneously read & parse the same file multiple times
191
221
  const $refAdded = this.$refs._add(file.url);
192
222
  $refAdded.pathType = type;
193
223
  try {
194
- const resolver = type === 'file' ? file_js_1.fileResolver : url_js_1.urlResolver;
224
+ const resolver = type === "file" ? file_js_1.fileResolver : url_js_1.urlResolver;
195
225
  await resolver.handler({
196
226
  arrayBuffer,
197
227
  fetch,
@@ -208,7 +238,7 @@ class $RefParser {
208
238
  throw err;
209
239
  }
210
240
  }
211
- if (schema === null || typeof schema !== 'object' || Buffer.isBuffer(schema)) {
241
+ if (schema === null || typeof schema !== "object" || Buffer.isBuffer(schema)) {
212
242
  throw ono_1.ono.syntax(`"${this.$refs._root$Ref.path || schema}" is not a valid JSON Schema`);
213
243
  }
214
244
  this.schema = schema;
@@ -216,6 +246,279 @@ class $RefParser {
216
246
  schema,
217
247
  };
218
248
  }
249
+ async parseMany({ arrayBuffer, fetch, pathOrUrlOrSchemas, resolvedInputs: _resolvedInputs, }) {
250
+ const resolvedInputs = [...(_resolvedInputs || [])];
251
+ resolvedInputs.push(...(pathOrUrlOrSchemas.map((schema) => (0, exports.getResolvedInput)({ pathOrUrlOrSchema: schema })) || []));
252
+ this.schemaMany = [];
253
+ this.schemaManySources = [];
254
+ this.sourcePathToPrefix = new Map();
255
+ for (let i = 0; i < resolvedInputs.length; i++) {
256
+ const resolvedInput = resolvedInputs[i];
257
+ const { path, type } = resolvedInput;
258
+ let { schema } = resolvedInput;
259
+ if (schema) {
260
+ // keep schema as-is
261
+ }
262
+ else if (type !== "json") {
263
+ const file = (0, parse_js_1.newFile)(path);
264
+ // Add a new $Ref for this file, even though we don't have the value yet.
265
+ // This ensures that we don't simultaneously read & parse the same file multiple times
266
+ const $refAdded = this.$refs._add(file.url);
267
+ $refAdded.pathType = type;
268
+ try {
269
+ const resolver = type === "file" ? file_js_1.fileResolver : url_js_1.urlResolver;
270
+ await resolver.handler({
271
+ arrayBuffer: arrayBuffer?.[i],
272
+ fetch,
273
+ file,
274
+ });
275
+ const parseResult = await (0, parse_js_1.parseFile)(file, this.options);
276
+ $refAdded.value = parseResult.result;
277
+ schema = parseResult.result;
278
+ }
279
+ catch (err) {
280
+ if ((0, errors_js_1.isHandledError)(err)) {
281
+ $refAdded.value = err;
282
+ }
283
+ throw err;
284
+ }
285
+ }
286
+ if (schema === null || typeof schema !== "object" || Buffer.isBuffer(schema)) {
287
+ throw ono_1.ono.syntax(`"${this.$refs._root$Ref.path || schema}" is not a valid JSON Schema`);
288
+ }
289
+ this.schemaMany.push(schema);
290
+ this.schemaManySources.push(path && path.length ? path : url.cwd());
291
+ }
292
+ return {
293
+ schemaMany: this.schemaMany,
294
+ };
295
+ }
296
+ mergeMany() {
297
+ const schemas = this.schemaMany || [];
298
+ if (schemas.length === 0) {
299
+ throw (0, ono_1.ono)("mergeMany called with no schemas. Did you run parseMany?");
300
+ }
301
+ const merged = {};
302
+ // Determine spec version: prefer first occurrence of openapi, else swagger
303
+ let chosenOpenapi;
304
+ let chosenSwagger;
305
+ for (const s of schemas) {
306
+ if (!chosenOpenapi && s && typeof s.openapi === "string") {
307
+ chosenOpenapi = s.openapi;
308
+ }
309
+ if (!chosenSwagger && s && typeof s.swagger === "string") {
310
+ chosenSwagger = s.swagger;
311
+ }
312
+ if (chosenOpenapi && chosenSwagger) {
313
+ break;
314
+ }
315
+ }
316
+ if (typeof chosenOpenapi === "string") {
317
+ merged.openapi = chosenOpenapi;
318
+ }
319
+ else if (typeof chosenSwagger === "string") {
320
+ merged.swagger = chosenSwagger;
321
+ }
322
+ // Merge info: take first non-empty per-field across inputs
323
+ const infoAccumulator = {};
324
+ for (const s of schemas) {
325
+ const info = s?.info;
326
+ if (info && typeof info === "object") {
327
+ for (const [k, v] of Object.entries(info)) {
328
+ if (infoAccumulator[k] === undefined && v !== undefined) {
329
+ infoAccumulator[k] = JSON.parse(JSON.stringify(v));
330
+ }
331
+ }
332
+ }
333
+ }
334
+ if (Object.keys(infoAccumulator).length > 0) {
335
+ merged.info = infoAccumulator;
336
+ }
337
+ // Merge servers: union by url+description
338
+ const servers = [];
339
+ const seenServers = new Set();
340
+ for (const s of schemas) {
341
+ const arr = s?.servers;
342
+ if (Array.isArray(arr)) {
343
+ for (const srv of arr) {
344
+ if (srv && typeof srv === "object") {
345
+ const key = `${srv.url || ""}|${srv.description || ""}`;
346
+ if (!seenServers.has(key)) {
347
+ seenServers.add(key);
348
+ servers.push(JSON.parse(JSON.stringify(srv)));
349
+ }
350
+ }
351
+ }
352
+ }
353
+ }
354
+ if (servers.length > 0) {
355
+ merged.servers = servers;
356
+ }
357
+ merged.paths = {};
358
+ merged.components = {};
359
+ const componentSections = [
360
+ "schemas",
361
+ "parameters",
362
+ "requestBodies",
363
+ "responses",
364
+ "headers",
365
+ "securitySchemes",
366
+ "examples",
367
+ "links",
368
+ "callbacks",
369
+ ];
370
+ for (const sec of componentSections) {
371
+ merged.components[sec] = {};
372
+ }
373
+ const tagNameSet = new Set();
374
+ const tags = [];
375
+ const usedOpIds = new Set();
376
+ const baseName = (p) => {
377
+ try {
378
+ const withoutHash = p.split("#")[0];
379
+ const parts = withoutHash.split("/");
380
+ const filename = parts[parts.length - 1] || "schema";
381
+ const dot = filename.lastIndexOf(".");
382
+ const raw = dot > 0 ? filename.substring(0, dot) : filename;
383
+ return raw.replace(/[^A-Za-z0-9_-]/g, "_");
384
+ }
385
+ catch {
386
+ return "schema";
387
+ }
388
+ };
389
+ const unique = (set, proposed) => {
390
+ let name = proposed;
391
+ let i = 2;
392
+ while (set.has(name)) {
393
+ name = `${proposed}_${i++}`;
394
+ }
395
+ set.add(name);
396
+ return name;
397
+ };
398
+ const rewriteRef = (ref, refMap) => {
399
+ // OAS3: #/components/{section}/{name}...
400
+ let m = ref.match(/^#\/components\/([^/]+)\/([^/]+)(.*)$/);
401
+ if (m) {
402
+ const base = `#/components/${m[1]}/${m[2]}`;
403
+ const mapped = refMap.get(base);
404
+ if (mapped) {
405
+ return mapped + (m[3] || "");
406
+ }
407
+ }
408
+ // OAS2: #/definitions/{name}...
409
+ m = ref.match(/^#\/definitions\/([^/]+)(.*)$/);
410
+ if (m) {
411
+ const base = `#/components/schemas/${m[1]}`;
412
+ const mapped = refMap.get(base);
413
+ if (mapped) {
414
+ // map definitions -> components/schemas
415
+ return mapped + (m[2] || "");
416
+ }
417
+ }
418
+ return ref;
419
+ };
420
+ const cloneAndRewrite = (obj, refMap, tagMap, opIdPrefix, basePath) => {
421
+ if (obj === null || obj === undefined) {
422
+ return obj;
423
+ }
424
+ if (Array.isArray(obj)) {
425
+ return obj.map((v) => cloneAndRewrite(v, refMap, tagMap, opIdPrefix, basePath));
426
+ }
427
+ if (typeof obj !== "object") {
428
+ return obj;
429
+ }
430
+ const out = {};
431
+ for (const [k, v] of Object.entries(obj)) {
432
+ if (k === "$ref" && typeof v === "string") {
433
+ const s = v;
434
+ if (s.startsWith("#")) {
435
+ out[k] = rewriteRef(s, refMap);
436
+ }
437
+ else {
438
+ const proto = url.getProtocol(s);
439
+ if (proto === undefined) {
440
+ // relative external ref -> absolutize against source base path
441
+ out[k] = url.resolve(basePath + "#", s);
442
+ }
443
+ else {
444
+ out[k] = s;
445
+ }
446
+ }
447
+ }
448
+ else if (k === "tags" && Array.isArray(v) && v.every((x) => typeof x === "string")) {
449
+ out[k] = v.map((t) => tagMap.get(t) || t);
450
+ }
451
+ else if (k === "operationId" && typeof v === "string") {
452
+ out[k] = unique(usedOpIds, `${opIdPrefix}_${v}`);
453
+ }
454
+ else {
455
+ out[k] = cloneAndRewrite(v, refMap, tagMap, opIdPrefix, basePath);
456
+ }
457
+ }
458
+ return out;
459
+ };
460
+ for (let i = 0; i < schemas.length; i++) {
461
+ const schema = schemas[i] || {};
462
+ const sourcePath = this.schemaManySources[i] || `multi://input/${i + 1}`;
463
+ const prefix = baseName(sourcePath);
464
+ // Track prefix for this source path (strip hash). Only map real file/http paths
465
+ const withoutHash = url.stripHash(sourcePath);
466
+ const protocol = url.getProtocol(withoutHash);
467
+ if (protocol === undefined || protocol === "file" || protocol === "http" || protocol === "https") {
468
+ this.sourcePathToPrefix.set(withoutHash, prefix);
469
+ }
470
+ const refMap = new Map();
471
+ const tagMap = new Map();
472
+ const srcComponents = (schema.components || {});
473
+ for (const sec of componentSections) {
474
+ const group = srcComponents[sec] || {};
475
+ for (const [name] of Object.entries(group)) {
476
+ const newName = `${prefix}_${name}`;
477
+ refMap.set(`#/components/${sec}/${name}`, `#/components/${sec}/${newName}`);
478
+ }
479
+ }
480
+ const srcTags = Array.isArray(schema.tags) ? schema.tags : [];
481
+ for (const t of srcTags) {
482
+ if (!t || typeof t !== "object" || typeof t.name !== "string") {
483
+ continue;
484
+ }
485
+ const desired = t.name;
486
+ const finalName = tagNameSet.has(desired) ? `${prefix}_${desired}` : desired;
487
+ tagNameSet.add(finalName);
488
+ tagMap.set(desired, finalName);
489
+ if (!tags.find((x) => x && x.name === finalName)) {
490
+ tags.push({ ...t, name: finalName });
491
+ }
492
+ }
493
+ for (const sec of componentSections) {
494
+ const group = (schema.components && schema.components[sec]) || {};
495
+ for (const [name, val] of Object.entries(group)) {
496
+ const newName = `${prefix}_${name}`;
497
+ merged.components[sec][newName] = cloneAndRewrite(val, refMap, tagMap, prefix, url.stripHash(sourcePath));
498
+ }
499
+ }
500
+ const srcPaths = (schema.paths || {});
501
+ for (const [p, item] of Object.entries(srcPaths)) {
502
+ let targetPath = p;
503
+ if (merged.paths[p]) {
504
+ const trimmed = p.startsWith("/") ? p.substring(1) : p;
505
+ targetPath = `/${prefix}/${trimmed}`;
506
+ }
507
+ merged.paths[targetPath] = cloneAndRewrite(item, refMap, tagMap, prefix, url.stripHash(sourcePath));
508
+ }
509
+ }
510
+ if (tags.length > 0) {
511
+ merged.tags = tags;
512
+ }
513
+ // Rebuild $refs root using the first input's path to preserve external resolution semantics
514
+ const rootPath = this.schemaManySources[0] || url.cwd();
515
+ this.$refs = new refs_js_1.default();
516
+ const rootRef = this.$refs._add(rootPath);
517
+ rootRef.pathType = url.isFileSystemPath(rootPath) ? "file" : "http";
518
+ rootRef.value = merged;
519
+ this.schema = merged;
520
+ return merged;
521
+ }
219
522
  }
220
523
  exports.$RefParser = $RefParser;
221
524
  var url_js_2 = require("./resolvers/url.js");
@@ -114,11 +114,15 @@ async function resolve$Ref($ref, path, $refs, options) {
114
114
  const resolvedPath = url.resolve(path, $ref.$ref);
115
115
  const withoutHash = url.stripHash(resolvedPath);
116
116
  // $ref.$ref = url.relative($refs._root$Ref.path, resolvedPath);
117
+ // If this ref points back to an input source we've already merged, avoid re-importing
118
+ // by checking if the path (without hash) matches a known source in parser and we can serve it internally later.
119
+ // We keep normal flow but ensure cache hit if already added.
117
120
  // Do we already have this $ref?
118
121
  const ref = $refs._$refs[withoutHash];
119
122
  if (ref) {
120
- // We've already parsed this $ref, so use the existing value
121
- return Promise.resolve(ref.value);
123
+ // We've already parsed this $ref, so crawl it to resolve its own externals
124
+ const promises = crawl(ref.value, `${withoutHash}#`, $refs, options, new Set(), true);
125
+ return Promise.all(promises);
122
126
  }
123
127
  // Parse the $referenced file/url
124
128
  const file = (0, parse_js_1.newFile)(resolvedPath);
@@ -129,8 +133,8 @@ async function resolve$Ref($ref, path, $refs, options) {
129
133
  const resolvedInput = (0, index_js_1.getResolvedInput)({ pathOrUrlOrSchema: resolvedPath });
130
134
  $refAdded.pathType = resolvedInput.type;
131
135
  let promises = [];
132
- if (resolvedInput.type !== 'json') {
133
- const resolver = resolvedInput.type === 'file' ? file_js_1.fileResolver : url_js_1.urlResolver;
136
+ if (resolvedInput.type !== "json") {
137
+ const resolver = resolvedInput.type === "file" ? file_js_1.fileResolver : url_js_1.urlResolver;
134
138
  await resolver.handler({ file });
135
139
  const parseResult = await (0, parse_js_1.parseFile)(file, options);
136
140
  $refAdded.value = parseResult.result;
@@ -17,24 +17,18 @@ describe("bundle", () => {
17
17
  const pathOrUrlOrSchema = path.resolve("lib", "__tests__", "spec", "multiple-refs.json");
18
18
  const schema = (await refParser.bundle({ pathOrUrlOrSchema })) as any;
19
19
 
20
- // First reference should be fully resolved (no $ref)
21
- expect(schema.paths["/test1/{pathId}"].get.parameters[0].name).toBe("pathId");
22
- expect(schema.paths["/test1/{pathId}"].get.parameters[0].schema.type).toBe("string");
23
- expect(schema.paths["/test1/{pathId}"].get.parameters[0].schema.format).toBe("uuid");
24
- expect(schema.paths["/test1/{pathId}"].get.parameters[0].$ref).toBeUndefined();
25
-
26
- // Second reference should be remapped to point to the first reference
27
- expect(schema.paths["/test2/{pathId}"].get.parameters[0].$ref).toBe(
28
- "#/paths/~1test1~1%7BpathId%7D/get/parameters/0",
29
- );
30
-
31
- // Both should effectively resolve to the same data
20
+ // Both parameters should now be $ref to the same internal definition
32
21
  const firstParam = schema.paths["/test1/{pathId}"].get.parameters[0];
33
22
  const secondParam = schema.paths["/test2/{pathId}"].get.parameters[0];
34
23
 
35
- // The second parameter should resolve to the same data as the first
36
- expect(secondParam.$ref).toBeDefined();
37
- expect(firstParam).toEqual({
24
+ // The $ref should match the output structure in file_context_0
25
+ expect(firstParam.$ref).toBe("#/components/parameters/path-parameter_pathId");
26
+ expect(secondParam.$ref).toBe("#/components/parameters/path-parameter_pathId");
27
+
28
+ // The referenced parameter should exist and match the expected structure
29
+ expect(schema.components).toBeDefined();
30
+ expect(schema.components.parameters).toBeDefined();
31
+ expect(schema.components.parameters["path-parameter_pathId"]).toEqual({
38
32
  name: "pathId",
39
33
  in: "path",
40
34
  required: true,
@@ -7,6 +7,7 @@ describe("pointer", () => {
7
7
  const refParser = new $RefParser();
8
8
  const pathOrUrlOrSchema = path.resolve("lib", "__tests__", "spec", "openapi-paths-ref.json");
9
9
  const schema = (await refParser.bundle({ pathOrUrlOrSchema })) as any;
10
+ console.log(JSON.stringify(schema, null, 2));
10
11
 
11
12
  // The GET endpoint should have its schema defined inline
12
13
  const getSchema = schema.paths["/foo"].get.responses["200"].content["application/json"].schema;
@@ -16,11 +17,11 @@ describe("pointer", () => {
16
17
 
17
18
  // The POST endpoint should have its schema inlined (copied) instead of a $ref
18
19
  const postSchema = schema.paths["/foo"].post.responses["200"].content["application/json"].schema;
19
- expect(postSchema.$ref).toBeUndefined();
20
- expect(postSchema.type).toBe("object");
21
- expect(postSchema.properties.bar.type).toBe("string");
20
+ expect(postSchema.$ref).toBe("#/paths/~1foo/get/responses/200/content/application~1json/schema");
21
+ expect(postSchema.type).toBeUndefined();
22
+ expect(postSchema.properties?.bar?.type).toBeUndefined();
22
23
 
23
24
  // Both schemas should be identical objects
24
- expect(postSchema).toEqual(getSchema);
25
+ expect(postSchema).not.toBe(getSchema);
25
26
  });
26
27
  });
@@ -5,7 +5,7 @@
5
5
  "summary": "First endpoint using the same pathId schema",
6
6
  "parameters": [
7
7
  {
8
- "$ref": "path-parameter.json#/pathId"
8
+ "$ref": "path-parameter.json#/components/parameters/pathId"
9
9
  }
10
10
  ],
11
11
  "responses": {
@@ -20,7 +20,7 @@
20
20
  "summary": "Second endpoint using the same pathId schema",
21
21
  "parameters": [
22
22
  {
23
- "$ref": "path-parameter.json#/pathId"
23
+ "$ref": "path-parameter.json#/components/parameters/pathId"
24
24
  }
25
25
  ],
26
26
  "responses": {
@@ -1,12 +1,16 @@
1
1
  {
2
- "pathId": {
3
- "name": "pathId",
4
- "in": "path",
5
- "required": true,
6
- "schema": {
7
- "type": "string",
8
- "format": "uuid",
9
- "description": "Unique identifier for the path"
2
+ "components": {
3
+ "parameters": {
4
+ "pathId": {
5
+ "name": "pathId",
6
+ "in": "path",
7
+ "required": true,
8
+ "schema": {
9
+ "type": "string",
10
+ "format": "uuid",
11
+ "description": "Unique identifier for the path"
12
+ }
13
+ }
10
14
  }
11
15
  }
12
16
  }