pwn 0.5.331 → 0.5.333

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,879 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'yaml'
4
+ require 'json'
5
+ require 'uri'
6
+ require 'fileutils'
7
+ require 'json_schemer'
8
+ require 'rest-client'
9
+
10
+ module PWN
11
+ module Plugins
12
+ # Module to interact with OpenAPI specifications, merging multiple specs
13
+ # while resolving schema dependencies and ensuring OpenAPI compliance.
14
+ module OpenAPI
15
+ # Supported Method Parameters:
16
+ # openapi_spec = PWN::Plugins::OpenAPI.generate_spec(
17
+ # spec_paths: 'required - array of OpenAPI file paths to merge',
18
+ # base_url: 'required - base URL for OpenAPI endpoints (e.g., http://fqdn.com)',
19
+ # output_json_path: 'optional - path to save the merged OpenAPI JSON file',
20
+ # target_version: 'optional - target OpenAPI version (default: 3.0.3)',
21
+ # debug: 'optional - boolean to enable debug logging (default: false)'
22
+ # )
23
+ def self.generate_spec(opts = {})
24
+ spec_paths = opts[:spec_paths] ||= []
25
+ raise ArgumentError, 'spec_paths must be a non-empty array' if spec_paths.empty?
26
+
27
+ base_url = opts[:base_url]
28
+ raise ArgumentError, 'base_url is required' if base_url.nil? || base_url.empty?
29
+
30
+ target_version = opts[:target_version] || '3.0.3'
31
+ raise ArgumentError, "Unsupported OpenAPI version: #{target_version}" unless %w[3.0.0 3.0.1 3.0.2 3.0.3 3.1.0].include?(target_version)
32
+
33
+ output_json_path = opts[:output_json_path]
34
+ raise ArgumentError, 'output_json_path is required' if output_json_path.nil? || output_json_path.empty?
35
+
36
+ debug = opts[:debug] || false
37
+ validation_fixes = []
38
+
39
+ begin
40
+ # Parse base_url to extract host and default base path
41
+ normalized_base_url, default_base_path = normalize_url(url: base_url)
42
+ default_base_path ||= '' # Fallback if base_url has no path
43
+ log("Using normalized base URL: #{normalized_base_url}, default base path: #{default_base_path}", debug: debug)
44
+
45
+ # Load and parse all OpenAPI files
46
+ specs = {}
47
+ spec_paths.each do |path|
48
+ raise Errno::ENOENT, "OpenAPI file not found: #{path}" unless File.exist?(path)
49
+
50
+ begin
51
+ case File.extname(path).downcase
52
+ when '.yaml', '.yml'
53
+ specs[path] = YAML.load_file(path, permitted_classes: [Symbol, Date, Time])
54
+ when '.json'
55
+ specs[path] = JSON.parse(File.read(path))
56
+ else
57
+ raise "Unsupported file type: #{path} - only .yaml, .yml, and .json files"
58
+ end
59
+ rescue YAML::SyntaxError, JSON::ParserError => e
60
+ raise "Error parsing OpenAPI file #{path}: #{e.message}"
61
+ end
62
+ end
63
+
64
+ specs.each do |path, spec|
65
+ # Pre-validate input specs
66
+ if spec['paths'].is_a?(Hash)
67
+ spec['paths'].each do |endpoint, path_item|
68
+ next unless path_item.is_a?(Hash)
69
+
70
+ path_item.each do |method, operation|
71
+ next unless operation.is_a?(Hash) && operation['parameters'].is_a?(Array)
72
+
73
+ param_names = operation['parameters'].map { |p| p['name'] }.compact
74
+ duplicates = param_names.tally.select { |_, count| count > 1 }.keys
75
+ raise "Duplicate parameters found in #{path} for path '#{endpoint}' (method: #{method}): #{duplicates.join(', ')}" unless duplicates.empty?
76
+
77
+ operation['parameters'].each do |param|
78
+ next unless param['in'] == 'path'
79
+
80
+ raise "Path parameter #{param['name']} in #{path} (path: #{endpoint}, method: #{method}) must have a schema" unless param['schema'].is_a?(Hash)
81
+ end
82
+ end
83
+ end
84
+ end
85
+
86
+ # Clean up null schemas in each spec
87
+ clean_null_schemas(spec, path, '', validation_fixes, debug)
88
+
89
+ # Fix invalid header definitions
90
+ if spec['components']&.key?('headers')
91
+ spec['components']['headers'].each do |header_name, header|
92
+ next unless header.is_a?(Hash)
93
+
94
+ if header.key?('name') || header.key?('in')
95
+ validation_fixes << {
96
+ path: "/components/headers/#{header_name}",
97
+ error: "Invalid properties 'name' or 'in' in header",
98
+ fix: "Removed 'name' and 'in' from header definition"
99
+ }
100
+ log("Fixing header '#{header_name}' in #{path}: Removing invalid 'name' and 'in' properties", debug: debug)
101
+ header.delete('name')
102
+ header.delete('in')
103
+ end
104
+ next unless header['schema'].nil?
105
+
106
+ validation_fixes << {
107
+ path: "/components/headers/#{header_name}",
108
+ error: 'Header schema is null',
109
+ fix: 'Added default schema { type: string }'
110
+ }
111
+ log("Fixing header '#{header_name}' in #{path}: Replacing null schema with default { type: string }", debug: debug)
112
+ header['schema'] = { 'type' => 'string' }
113
+ end
114
+ end
115
+
116
+ # Fix schema items for arrays (e.g., mediaServers)
117
+ next unless spec['components']&.key?('schemas')
118
+
119
+ spec['components']['schemas'].each do |schema_name, schema|
120
+ fix_array_items(schema, path, "/components/schemas/#{schema_name}", validation_fixes, debug)
121
+ end
122
+ end
123
+
124
+ # # Pre-validate input specs
125
+ # specs.each do |path, spec|
126
+ # next unless spec['paths'].is_a?(Hash)
127
+
128
+ # spec['paths'].each do |endpoint, path_item|
129
+ # next unless path_item.is_a?(Hash)
130
+
131
+ # path_item.each do |method, operation|
132
+ # next unless operation.is_a?(Hash) && operation['parameters'].is_a?(Array)
133
+
134
+ # param_names = operation['parameters'].map { |p| p['name'] }.compact
135
+ # duplicates = param_names.tally.select { |_, count| count > 1 }.keys
136
+ # raise "Duplicate parameters found in #{path} for path '#{endpoint}' (method: #{method}): #{duplicates.join(', ')}" unless duplicates.empty?
137
+
138
+ # operation['parameters'].each do |param|
139
+ # next unless param['in'] == 'path'
140
+
141
+ # raise "Path parameter #{param['name']} in #{path} (path: #{endpoint}, method: #{method}) must have a schema" unless param['schema'].is_a?(Hash)
142
+ # end
143
+ # end
144
+ # end
145
+ # end
146
+
147
+ # # Fix invalid header definitions
148
+ # specs.each do |path, spec|
149
+ # # Clean up null schemas in each spec
150
+ # clean_null_schemas(spec, path, '', validation_fixes, debug)
151
+
152
+ # next unless spec['components']&.key?('headers')
153
+
154
+ # spec['components']['headers'].each do |header_name, header|
155
+ # next unless header.is_a?(Hash)
156
+
157
+ # if header.key?('name') || header.key?('in')
158
+ # validation_fixes << {
159
+ # path: "/components/headers/#{header_name}",
160
+ # error: "Invalid properties 'name' or 'in' in header",
161
+ # fix: "Removed 'name' and 'in' from header definition"
162
+ # }
163
+ # log("Fixing header '#{header_name}' in #{path}: Removing invalid 'name' and ''in' properties", debug: debug)
164
+ # header.delete('name')
165
+ # header.delete('in')
166
+ # end
167
+ # next unless header['schema'].nil?
168
+
169
+ # validation_fixes << {
170
+ # path: "/components/headers/#{header_name}",
171
+ # error: 'Header schema is null',
172
+ # fix: 'Added default schema { type: string }'
173
+ # }
174
+ # log("Fixing header '#{header_name}' in #{path}: Replacing null schema with default { type: string }", debug: debug)
175
+ # header['schema'] = { 'type' => 'string' }
176
+ # end
177
+ # end
178
+
179
+ # Fix schema items for arrays (e.g., mediaServers)
180
+ # specs.each do |path, spec|
181
+ # next unless spec['components']&.key?('schemas')
182
+
183
+ # spec['components']['schemas'].each do |schema_name, schema|
184
+ # fix_array_items(schema, path, "/components/schemas/#{schema_name}", validation_fixes, debug)
185
+ # end
186
+ # end
187
+
188
+ # Determine dependencies based on $ref
189
+ dependencies = {}
190
+ specs.each do |path, spec|
191
+ dependencies[path] = [] # Initialize empty array for all paths
192
+ refs = extract_refs(spec: spec, spec_paths: spec_paths)
193
+ refs.each do |ref|
194
+ dep_path = resolve_ref_path(ref: ref, spec_paths: spec_paths, referencing_file: path)
195
+ dependencies[path] << dep_path if specs.key?(dep_path) && dep_path != path
196
+ end
197
+ end
198
+
199
+ # Sort files by dependencies
200
+ ordered_paths, cycle_info = topological_sort(dependencies: dependencies, spec_paths: spec_paths)
201
+ if cycle_info
202
+ log("Cyclic dependencies detected: #{cycle_info.join(' -> ')}. Processing files in provided order.", debug: debug)
203
+ ordered_paths = spec_paths
204
+ end
205
+
206
+ # Initialize merged specification with a single server
207
+ merged_spec = {
208
+ 'openapi' => target_version,
209
+ 'info' => {
210
+ 'title' => 'Merged OpenAPI Specification',
211
+ 'version' => '1.0.0'
212
+ },
213
+ 'servers' => [{ 'url' => normalized_base_url, 'description' => 'Default server' }],
214
+ 'paths' => {},
215
+ 'components' => { 'schemas' => {}, 'headers' => {} },
216
+ 'tags' => [],
217
+ 'security' => []
218
+ }
219
+
220
+ # Collect base paths from server URLs
221
+ server_base_paths = {}
222
+
223
+ ordered_paths.each do |path|
224
+ spec = specs[path]
225
+ unless spec.is_a?(Hash)
226
+ log("Skipping #{path}: Invalid OpenAPI specification", debug: debug)
227
+ next
228
+ end
229
+
230
+ log("Warning: #{path} uses OpenAPI version #{spec['openapi']}, which may not be compatible with target version #{target_version}", debug: debug) if spec['openapi'] && !spec['openapi'].start_with?(target_version.split('.')[0..1].join('.'))
231
+
232
+ if spec['definitions'] && target_version.start_with?('3.')
233
+ log("Migrating OpenAPI 2.0 'definitions' to 'components/schemas' for #{path}", debug: debug)
234
+ spec['components'] ||= {}
235
+ spec['components']['schemas'] = spec.delete('definitions')
236
+ end
237
+
238
+ resolved_spec = resolve_refs(spec: spec, specs: specs, spec_paths: spec_paths, referencing_file: path, debug: debug)
239
+
240
+ # Process server URLs
241
+ selected_server = nil
242
+ server_base_path = nil
243
+ absolute_url = nil
244
+
245
+ if resolved_spec['servers']
246
+ servers = resolved_spec['servers'].is_a?(Array) ? resolved_spec['servers'] : [resolved_spec['servers']]
247
+ # Prioritize server with non-empty path
248
+ selected_server = servers.find { |s| s.is_a?(Hash) && s['url'] && !URI.parse(s['url']).path.empty? } ||
249
+ servers.find { |s| s.is_a?(Hash) && s['description'] } ||
250
+ servers.first
251
+
252
+ server_url = selected_server.is_a?(Hash) ? selected_server['url'] : selected_server
253
+ if server_url.is_a?(String)
254
+ absolute_url, server_base_path = normalize_url(url: server_url, base_url: normalized_base_url)
255
+ server_base_path ||= default_base_path
256
+ log("Selected server URL: #{server_url}, normalized: #{absolute_url}, base path: #{server_base_path} for #{path}", debug: debug)
257
+ server_obj = selected_server.is_a?(Hash) ? selected_server.merge('url' => absolute_url) : { 'url' => absolute_url }
258
+ unless merged_spec['servers'].any? { |s| s['url'] == absolute_url }
259
+ merged_spec['servers'] << server_obj
260
+ # Update default_base_path if servers length > 1
261
+ if merged_spec['servers'].length > 1
262
+ last_server_url = merged_spec['servers'].last['url']
263
+ new_base_path = URI.parse(last_server_url).path&.sub(%r{^/+}, '')&.sub(%r{/+$}, '')
264
+ default_base_path = new_base_path || default_base_path
265
+ log("Updated default_base_path to '#{default_base_path}' based on last server: #{last_server_url}", debug: debug)
266
+ end
267
+ end
268
+ else
269
+ log("No valid server URL in #{path}, using default base path: #{default_base_path}", debug: debug)
270
+ absolute_url = normalized_base_url
271
+ server_base_path = default_base_path
272
+ end
273
+ else
274
+ # Check dependencies for server URLs
275
+ (dependencies[path] || []).each do |dep_path|
276
+ dep_spec = specs[dep_path]
277
+ next unless dep_spec['servers']
278
+
279
+ dep_servers = dep_spec['servers'].is_a?(Array) ? dep_spec['servers'] : [dep_spec['servers']]
280
+ dep_server = dep_servers.find { |s| s.is_a?(Hash) && s['url'] && !URI.parse(s['url']).path.empty? }
281
+ next unless dep_server
282
+
283
+ dep_server_url = dep_server['url']
284
+ absolute_url, server_base_path = normalize_url(url: dep_server_url, base_url: normalized_base_url)
285
+ server_base_path ||= default_base_path
286
+ log("Using dependency server URL: #{dep_server_url}, normalized: #{absolute_url}, base path: #{server_base_path} for #{path}", debug: debug)
287
+ server_obj = dep_server.merge('url' => absolute_url)
288
+ unless merged_spec['servers'].any? { |s| s['url'] == absolute_url }
289
+ merged_spec['servers'] << server_obj
290
+ # Update default_base_path if servers length > 1
291
+ if merged_spec['servers'].length > 1
292
+ last_server_url = merged_spec['servers'].last['url']
293
+ new_base_path = URI.parse(last_server_url).path&.sub(%r{^/+}, '')&.sub(%r{/+$}, '')
294
+ default_base_path = new_base_path || default_base_path
295
+ log("Updated default_base_path to '#{default_base_path}' based on last server: #{last_server_url}", debug: debug)
296
+ end
297
+ end
298
+ break
299
+ end
300
+ unless absolute_url
301
+ log("No servers defined in #{path} or dependencies, using default base path: #{default_base_path}", debug: debug)
302
+ absolute_url = normalized_base_url
303
+ server_base_path = default_base_path
304
+ end
305
+ end
306
+ server_base_paths[path] = server_base_path
307
+
308
+ # Normalize paths
309
+ if resolved_spec['paths'].is_a?(Hash)
310
+ resolved_spec['paths'] = validate_path_parameters(
311
+ resolved_spec['paths'],
312
+ path,
313
+ server_base_path: server_base_path,
314
+ debug: debug
315
+ )
316
+ end
317
+
318
+ merged_spec['openapi'] = [resolved_spec['openapi'], target_version].max if resolved_spec['openapi']
319
+
320
+ if resolved_spec['info'].is_a?(Hash)
321
+ merged_spec['info'] = deep_merge(hash1: merged_spec['info'], hash2: resolved_spec['info'])
322
+ raise "Missing required info.title in #{path}" unless merged_spec['info']['title']
323
+ raise "Missing required info.version in #{path}" unless merged_spec['info']['version']
324
+ end
325
+
326
+ if resolved_spec['paths'].is_a?(Hash)
327
+ resolved_paths = resolved_spec['paths'].transform_keys do |endpoint|
328
+ effective_base_path = server_base_paths[path]
329
+ # Strip redundant base path before combining
330
+ normalized_endpoint = endpoint.to_s.sub(%r{^/+}, '').sub(%r{/+$}, '')
331
+ if effective_base_path && !effective_base_path.empty?
332
+ prefix_pattern = Regexp.new("^#{Regexp.escape(effective_base_path)}/")
333
+ while normalized_endpoint.match?(prefix_pattern)
334
+ normalized_endpoint = normalized_endpoint.sub(prefix_pattern, '')
335
+ log("Stripped '#{effective_base_path}' from endpoint '#{endpoint}' to '#{normalized_endpoint}' during merge in #{path}", debug: debug)
336
+ end
337
+ end
338
+ normalized_endpoint = '/' if normalized_endpoint.empty?
339
+ combined_path = combine_paths(effective_base_path, normalized_endpoint)
340
+ log("Merging path '#{endpoint}' as '#{combined_path}' from #{path}", debug: debug)
341
+ combined_path
342
+ end
343
+ merged_spec['paths'].merge!(resolved_paths) do |api_endpoint, _existing, new|
344
+ log("Path '#{api_endpoint}' in #{path} conflicts with existing path. Overwriting.", debug: debug)
345
+ new
346
+ end
347
+ end
348
+
349
+ merged_spec['components'] = deep_merge(hash1: merged_spec['components'], hash2: resolved_spec['components']) if resolved_spec['components'].is_a?(Hash)
350
+
351
+ if resolved_spec['tags'].is_a?(Array)
352
+ resolved_spec['tags'].each do |tag|
353
+ merged_spec['tags'] << tag unless merged_spec['tags'].include?(tag)
354
+ end
355
+ end
356
+
357
+ next unless resolved_spec['security'].is_a?(Array)
358
+
359
+ resolved_spec['security'].each do |security|
360
+ merged_spec['security'] << security unless merged_spec['security'].include?(security)
361
+ end
362
+ end
363
+
364
+ # Filter servers to keep only those with paths matching the first folder in paths
365
+ if merged_spec['paths'].any?
366
+ path_first_folders = merged_spec['paths'].keys.map do |path|
367
+ path_segments = path.sub(%r{^/+}, '').split('/')
368
+ path_segments.first if path_segments.any?
369
+ end.compact.uniq
370
+ log("First folders in paths: #{path_first_folders}", debug: debug)
371
+
372
+ if path_first_folders.any?
373
+ merged_spec['servers'] = merged_spec['servers'].select do |server|
374
+ server_url = server['url']
375
+ server_path = URI.parse(server_url).path&.sub(%r{^/+}, '')&.sub(%r{/+$}, '')
376
+ server_path && path_first_folders.include?(server_path)
377
+ end
378
+ log("Filtered servers to: #{merged_spec['servers'].map { |s| s['url'] }}", debug: debug)
379
+ end
380
+ end
381
+
382
+ # Ensure at least one server remains
383
+ if merged_spec['servers'].empty?
384
+ merged_spec['servers'] = [{ 'url' => normalized_base_url, 'description' => 'Default server' }]
385
+ log("No servers matched path prefixes. Reverted to default: #{normalized_base_url}", debug: debug)
386
+ end
387
+
388
+ # Remove server path prefixes from path keys
389
+ merged_spec = remove_server_path_prefixes(merged_spec, debug: debug)
390
+
391
+ # Clean up null schemas in the merged spec
392
+ clean_null_schemas(merged_spec, 'merged_spec', '', validation_fixes, debug)
393
+
394
+ merged_spec, schema_validation_errors = validate_openapi_spec(
395
+ merged_spec: merged_spec,
396
+ target_version: target_version,
397
+ debug: debug
398
+ )
399
+
400
+ unless validation_fixes.empty? && schema_validation_errors.empty?
401
+ merged_spec['x-validation-fixes'] = validation_fixes + schema_validation_errors
402
+ log("Added validation fixes to spec: #{merged_spec['x-validation-fixes'].map { |f| f[:error] }.join(', ')}", debug: debug)
403
+ end
404
+
405
+ FileUtils.mkdir_p(File.dirname(output_json_path))
406
+ File.write(output_json_path, JSON.pretty_generate(merged_spec))
407
+ log("Merged OpenAPI specification written to: #{output_json_path}", debug: debug)
408
+
409
+ { individual_specs: specs, merged_spec: merged_spec }
410
+ rescue Errno::ENOENT => e
411
+ raise "Error accessing file: #{e.message}"
412
+ rescue StandardError => e
413
+ raise "Unexpected error: #{e.message}"
414
+ end
415
+ end
416
+
417
+ # Recursively clean null schemas
418
+ private_class_method def self.clean_null_schemas(spec, file_path, current_path, validation_fixes, debug)
419
+ case spec
420
+ when Hash
421
+ spec.each do |key, value|
422
+ new_path = current_path.empty? ? key : "#{current_path}/#{key}"
423
+ if key == 'schema' && value.nil?
424
+ validation_fixes << {
425
+ path: new_path,
426
+ error: 'Schema is null',
427
+ fix: 'Replaced with default schema { type: string }'
428
+ }
429
+ log("Fixing null schema at #{new_path} in #{file_path}: Replacing with default { type: string }", debug: debug)
430
+ spec[key] = { 'type' => 'string' }
431
+ else
432
+ clean_null_schemas(value, file_path, new_path, validation_fixes, debug)
433
+ end
434
+ end
435
+ when Array
436
+ spec.each_with_index do |item, i|
437
+ clean_null_schemas(item, file_path, "#{current_path}/#{i}", validation_fixes, debug)
438
+ end
439
+ end
440
+ end
441
+
442
+ private_class_method def self.fix_array_items(schema, file_path, schema_path, validation_fixes, debug)
443
+ return unless schema.is_a?(Hash)
444
+
445
+ if schema['type'] == 'array'
446
+ if schema['items'].nil?
447
+ validation_fixes << {
448
+ path: "#{schema_path}/items",
449
+ error: 'Array schema missing items',
450
+ fix: 'Added default items { type: string }'
451
+ }
452
+ log("Fixing missing items at #{schema_path}/items in #{file_path}: Adding default { type: string }", debug: debug)
453
+ schema['items'] = { 'type' => 'string' }
454
+ elsif schema['items'].is_a?(Array)
455
+ validation_fixes << {
456
+ path: "#{schema_path}/items",
457
+ error: 'Array items must be an object, not an array',
458
+ fix: 'Converted items to object with type: string'
459
+ }
460
+ log("Fixing invalid array items at #{schema_path}/items in #{file_path}: Converting array to object", debug: debug)
461
+ schema['items'] = { 'type' => 'string' }
462
+ end
463
+ end
464
+
465
+ if schema['properties'].is_a?(Hash)
466
+ schema['properties'].each do |prop_name, prop_schema|
467
+ fix_array_items(prop_schema, file_path, "#{schema_path}/properties/#{prop_name}", validation_fixes, debug)
468
+ end
469
+ end
470
+
471
+ %w[allOf anyOf oneOf].each do |keyword|
472
+ next unless schema[keyword].is_a?(Array)
473
+
474
+ schema[keyword].each_with_index do |sub_schema, i|
475
+ fix_array_items(sub_schema, file_path, "#{schema_path}/#{keyword}/#{i}", validation_fixes, debug)
476
+ end
477
+ end
478
+
479
+ fix_array_items(schema['items'], file_path, "#{schema_path}/items", validation_fixes, debug) if schema['items'].is_a?(Hash)
480
+ end
481
+
482
+ private_class_method def self.combine_paths(base_path, endpoint)
483
+ base_path = base_path.to_s.sub(%r{^/+}, '').sub(%r{/+$}, '')
484
+ endpoint = endpoint.to_s.sub(%r{^/+}, '').sub(%r{/+$}, '')
485
+ combined_path = if base_path.empty?
486
+ endpoint.empty? ? '/' : "/#{endpoint}"
487
+ elsif endpoint.empty?
488
+ "/#{base_path}"
489
+ else
490
+ "/#{base_path}/#{endpoint}"
491
+ end
492
+ combined_path.gsub(%r{/+}, '/')
493
+ end
494
+
495
+ private_class_method def self.validate_openapi_spec(opts = {})
496
+ merged_spec = opts[:merged_spec]
497
+ target_version = opts[:target_version] || '3.0.3'
498
+ debug = opts[:debug] || false
499
+ validation_errors = []
500
+
501
+ schema_urls = {
502
+ '3.0.0' => 'https://spec.openapis.org/oas/3.0/schema/2021-09-28',
503
+ '3.0.1' => 'https://spec.openapis.org/oas/3.0/schema/2021-09-28',
504
+ '3.0.2' => 'https://spec.openapis.org/oas/3.0/schema/2021-09-28',
505
+ '3.0.3' => 'https://spec.openapis.org/oas/3.0/schema/2021-09-28',
506
+ '3.1.0' => 'https://spec.openapis.org/oas/3.1/schema/2021-09-28'
507
+ }
508
+
509
+ schema_url = schema_urls[target_version]
510
+ raise "No schema available for OpenAPI version #{target_version}" unless schema_url
511
+
512
+ begin
513
+ schema = JSON.parse(RestClient.get(schema_url))
514
+ schemer = JSONSchemer.schema(schema)
515
+
516
+ unless schemer.valid?(merged_spec)
517
+ schemer.validate(merged_spec).each do |error|
518
+ validation_errors << {
519
+ path: error['data_pointer'],
520
+ error: error['error'],
521
+ fix: 'Validation failed; manual correction required'
522
+ }
523
+ log("Validation error: #{error['error']} at #{error['data_pointer']}", debug: debug)
524
+ end
525
+ end
526
+ [merged_spec, validation_errors]
527
+ rescue OpenURI::HTTPError => e
528
+ log("Failed to fetch OpenAPI schema from #{schema_url}: #{e.message}", debug: debug)
529
+ raise "Failed to validate OpenAPI specification: #{e.message}"
530
+ rescue StandardError => e
531
+ log("Error validating OpenAPI specification: #{e.message}", debug: debug)
532
+ raise "Failed to validate OpenAPI specification: #{e.message}"
533
+ end
534
+ end
535
+
536
+ private_class_method def self.validate_path_parameters(paths, file_path, opts = {})
537
+ debug = opts[:debug] || false
538
+ server_base_path = opts[:server_base_path]&.sub(%r{^/+}, '')&.sub(%r{/+$}, '')
539
+
540
+ transformed_paths = {}
541
+ paths.each do |endpoint, path_item|
542
+ next unless path_item.is_a?(Hash)
543
+
544
+ # Normalize endpoint by stripping redundant server_base_path
545
+
546
+ normalized_endpoint = endpoint.to_s.sub(%r{^/+}, '').sub(%r{/+$}, '')
547
+ if server_base_path && !server_base_path.empty?
548
+ prefix_pattern = Regexp.new("^#{Regexp.escape(server_base_path)}/")
549
+ while normalized_endpoint.match?(prefix_pattern)
550
+ normalized_endpoint = normalized_endpoint.sub(prefix_pattern, '')
551
+ log("Stripped '#{server_base_path}' from endpoint '#{endpoint}' to '#{normalized_endpoint}' in #{file_path}", debug: debug)
552
+ end
553
+ end
554
+ normalized_endpoint = '/' if normalized_endpoint.empty?
555
+
556
+ log("Validating path '#{endpoint}' as '#{normalized_endpoint}' in #{file_path}", debug: debug)
557
+
558
+ path_params = path_item['parameters']&.select { |p| p['in'] == 'path' }&.map { |p| p['name'] }&.compact || []
559
+
560
+ path_item.each do |method, operation|
561
+ next unless operation.is_a?(Hash)
562
+
563
+ operation_params = operation['parameters']&.select { |p| p['in'] == 'path' }&.map { |p| p['name'] }&.compact || []
564
+ all_params = (path_params + operation_params).uniq
565
+ required_params = normalized_endpoint.scan(/\{([^}]+)\}/).flatten
566
+
567
+ missing_params = required_params - all_params
568
+ unless missing_params.empty?
569
+ log("In #{file_path}, path '#{normalized_endpoint}' (method: #{method}) has undeclared path parameters: #{missing_params.join(', ')}. Adding default definitions.", debug: debug)
570
+ operation['parameters'] ||= []
571
+ missing_params.each do |param|
572
+ operation['parameters'] << {
573
+ 'name' => param,
574
+ 'in' => 'path',
575
+ 'required' => true,
576
+ 'schema' => { 'type' => 'string' }
577
+ }
578
+ end
579
+ end
580
+
581
+ operation['parameters']&.each do |param|
582
+ next unless param['in'] == 'path'
583
+ raise "Path parameter #{param['name']} in #{file_path} (path: #{normalized_endpoint}, method: #{method}) must be required" unless param['required']
584
+ next unless param['schema'].nil?
585
+
586
+ log("Path parameter #{param['name']} in #{file_path} (path: #{normalized_endpoint}, method: #{method}) has null schema. Adding default schema (type: string).", debug: debug)
587
+ validation_fixes << {
588
+ path: "#{normalized_endpoint}/parameters/#{param['name']}",
589
+ error: 'Path parameter schema is null',
590
+ fix: 'Added default schema { type: string }'
591
+ }
592
+ param['schema'] = { 'type' => 'string' }
593
+ end
594
+
595
+ param_names = operation['parameters']&.map { |p| p['name'] }&.compact || []
596
+ duplicates = param_names.tally.select { |_, count| count > 1 }.keys
597
+ raise "Duplicate parameters found in #{file_path} for path '#{normalized_endpoint}' (method: #{method}): #{duplicates.join(', ') || 'unknown'}" unless duplicates.empty?
598
+ end
599
+ transformed_paths[normalized_endpoint] = path_item
600
+ end
601
+ transformed_paths
602
+ end
603
+
604
+ private_class_method def self.remove_server_path_prefixes(merged_spec, debug: false)
605
+ return merged_spec unless merged_spec['paths'].is_a?(Hash) && merged_spec['servers'].is_a?(Array)
606
+
607
+ transformed_paths = {}
608
+ servers = merged_spec['servers']
609
+ paths = merged_spec['paths']
610
+
611
+ paths.each do |path, path_item|
612
+ normalized_path = path.sub(%r{^/+}, '').sub(%r{/+$}, '')
613
+ path_segments = normalized_path.split('/').reject(&:empty?)
614
+ next unless path_segments.any?
615
+
616
+ first_segment = path_segments.first
617
+ matching_server = servers.find do |server|
618
+ server_url = server['url']
619
+ begin
620
+ server_path = URI.parse(server_url).path&.sub(%r{^/+}, '')&.sub(%r{/+$}, '')
621
+ server_path == first_segment
622
+ rescue URI::InvalidURIError
623
+ false
624
+ end
625
+ end
626
+
627
+ if matching_server
628
+ new_path = path_segments[1..-1].join('/')
629
+ new_path = '/' if new_path.empty?
630
+ new_path = "/#{new_path}" unless new_path.start_with?('/')
631
+ log("Removing server path prefix '#{first_segment}' from path '#{path}' to '#{new_path}'", debug: debug)
632
+ transformed_paths[new_path] = path_item
633
+ else
634
+ transformed_paths[path] = path_item
635
+ end
636
+ end
637
+
638
+ merged_spec['paths'] = transformed_paths
639
+ merged_spec
640
+ end
641
+
642
+ private_class_method def self.normalize_url(opts = {})
643
+ url = opts[:url]
644
+ base_url = opts[:base_url]
645
+ return [url, nil] if url.nil? || url.empty?
646
+
647
+ begin
648
+ uri = URI.parse(url)
649
+ if uri.absolute? && uri.scheme && uri.host
650
+ base_path = uri.path.empty? ? nil : uri.path.sub(%r{^/+}, '').sub(%r{/+$}, '')
651
+ [uri.to_s.sub(%r{/+$}, ''), base_path]
652
+ elsif base_url && !base_url.empty?
653
+ base_uri = URI.parse(base_url)
654
+ uri = base_uri.merge(url)
655
+ base_path = uri.path.empty? ? nil : uri.path.sub(%r{^/+}, '').sub(%r{/+$}, '')
656
+ [uri.to_s.sub(%r{/+$}, ''), base_path]
657
+ else
658
+ raise URI::InvalidURIError, "Relative URL '#{url}' provided without a valid base_url"
659
+ end
660
+ rescue URI::InvalidURIError => e
661
+ raise "Invalid server URL '#{url}': #{e.message}"
662
+ end
663
+ end
664
+
665
+ private_class_method def self.resolve_refs(opts = {})
666
+ spec = opts[:spec]
667
+ specs = opts[:specs]
668
+ spec_paths = opts[:spec_paths] ||= []
669
+ referencing_file = opts[:referencing_file] || 'unknown'
670
+ depth = opts[:depth] ||= 0
671
+ debug = opts[:debug] || false
672
+ max_depth = 50
673
+
674
+ raise "Maximum $ref resolution depth exceeded in #{referencing_file}" if depth > max_depth
675
+
676
+ case spec
677
+ when Hash
678
+ resolved = {}
679
+ spec.each do |key, value|
680
+ if key == '$ref' && value.is_a?(String)
681
+ ref_path, json_pointer = value.split('#', 2)
682
+ json_pointer ||= ''
683
+ if ref_path.empty? || ref_path == '#'
684
+ log("Resolving internal $ref: #{value} in #{referencing_file}", debug: debug)
685
+ target = resolve_json_pointer(spec, json_pointer, referencing_file, referencing_file)
686
+ if target.nil?
687
+ resolved[key] = value
688
+ else
689
+ resolved = resolve_refs(spec: target, specs: specs, spec_paths: spec_paths, referencing_file: referencing_file, depth: depth + 1, debug: debug)
690
+ end
691
+ else
692
+ matched_path = resolve_ref_path(ref: ref_path, spec_paths: spec_paths, referencing_file: referencing_file)
693
+ unless specs.key?(matched_path)
694
+ log("Unable to resolve external $ref: #{value} from #{referencing_file}", debug: debug)
695
+ begin
696
+ return value unless File.exist?(ref_path)
697
+
698
+ case File.extname(ref_path).downcase
699
+ when '.yaml', '.yml'
700
+ specs[ref_path] = YAML.load_file(ref_path, permitted_classes: [Symbol, Date, Time])
701
+ spec_paths << ref_path unless spec_paths.include?(ref_path)
702
+ when '.json'
703
+ specs[ref_path] = JSON.parse(File.read(ref_path))
704
+ spec_paths << ref_path unless spec_paths.include?(ref_path)
705
+ else
706
+ log("Unsupported file type for $ref: #{ref_path} from #{referencing_file}", debug: debug)
707
+ return value
708
+ end
709
+ rescue StandardError => e
710
+ log("Failed to load external $ref #{ref_path}: #{e.message} from #{referencing_file}", debug: debug)
711
+ return value
712
+ end
713
+ end
714
+ ref_spec = specs[matched_path]
715
+ target = json_pointer.empty? ? ref_spec : resolve_json_pointer(ref_spec, json_pointer, matched_path, referencing_file)
716
+ if target.nil?
717
+ log("Invalid JSON pointer #{json_pointer} in #{matched_path} from #{referencing_file}", debug: debug)
718
+ resolved[key] = value
719
+ else
720
+ resolved = resolve_refs(spec: target, specs: specs, spec_paths: spec_paths, referencing_file: matched_path, depth: depth + 1, debug: debug)
721
+ end
722
+ end
723
+ else
724
+ resolved[key] = resolve_refs(spec: value, specs: specs, spec_paths: spec_paths, referencing_file: referencing_file, depth: depth, debug: debug)
725
+ end
726
+ end
727
+ resolved
728
+ when Array
729
+ spec.map { |item| resolve_refs(spec: item, specs: specs, spec_paths: spec_paths, referencing_file: referencing_file, depth: depth, debug: debug) }
730
+ else
731
+ spec
732
+ end
733
+ end
734
+
735
+ private_class_method def self.resolve_json_pointer(spec, json_pointer, _matched_path, _referencing_file)
736
+ pointer_parts = json_pointer.split('/').reject(&:empty?)
737
+ target = spec
738
+ pointer_parts.each do |part|
739
+ part = part.gsub('~1', '/').gsub('~0', '~')
740
+ target = target[part] if target.is_a?(Hash)
741
+ target = target[part.to_i] if target.is_a?(Array) && part.match?(/^\d+$/)
742
+ return nil unless target
743
+ end
744
+ target
745
+ end
746
+
747
+ private_class_method def self.resolve_ref_path(opts = {})
748
+ ref = opts[:ref]
749
+ spec_paths = opts[:spec_paths] ||= []
750
+ referencing_file = opts[:referencing_file] || 'unknown'
751
+
752
+ ref = ref.sub('file://', '') if ref.start_with?('file://')
753
+ return ref if ref.start_with?('http://', 'https://')
754
+
755
+ normalized_ref = ref.sub(%r{^\./}, '').sub(%r{^/}, '')
756
+ spec_paths.each do |path|
757
+ normalized_path = path.sub(%r{^\./}, '').sub(%r{^/}, '')
758
+ return path if normalized_path == normalized_ref || File.basename(normalized_path) == File.basename(normalized_ref)
759
+ end
760
+
761
+ ref
762
+ end
763
+
764
+ private_class_method def self.deep_merge(opts = {})
765
+ hash1 = opts[:hash1] || {}
766
+ hash2 = opts[:hash2] || {}
767
+
768
+ # hash1.merge(hash2) do |key, old_val, new_val|
769
+ hash1.merge(hash2) do |_key, old_val, new_val|
770
+ # if key.start_with?('x-')
771
+ # new_val || old_val
772
+ # elsif old_val.is_a?(Hash) && new_val.is_a?(Hash)
773
+ if old_val.is_a?(Hash) && new_val.is_a?(Hash)
774
+ deep_merge(hash1: old_val, hash2: new_val)
775
+ elsif old_val.is_a?(Array) && new_val.is_a?(Array)
776
+ (old_val + new_val).uniq
777
+ else
778
+ new_val || old_val
779
+ end
780
+ end
781
+ end
782
+
783
+ private_class_method def self.extract_refs(opts = {})
784
+ spec = opts[:spec]
785
+ spec_paths = opts[:spec_paths]
786
+ refs = opts[:refs] ||= Set.new
787
+ case spec
788
+ when Hash
789
+ spec.each do |key, value|
790
+ if key == '$ref' && value.is_a?(String)
791
+ ref_path = value.split('#', 2).first
792
+ resolved_path = resolve_ref_path(ref: ref_path, spec_paths: spec_paths, referencing_file: nil)
793
+ refs << resolved_path unless ref_path.empty? || ref_path.start_with?('http://', 'https://')
794
+ end
795
+ extract_refs(spec: value, spec_paths: spec_paths, refs: refs)
796
+ end
797
+ when Array
798
+ spec.each { |item| extract_refs(spec: item, spec_paths: spec_paths, refs: refs) }
799
+ end
800
+ refs
801
+ end
802
+
803
+ private_class_method def self.dfs(opts = {})
804
+ node = opts[:node]
805
+ dependencies = opts[:dependencies]
806
+ visited = opts[:visited] ||= Set.new
807
+ temp = opts[:temp] ||= Set.new
808
+ result = opts[:result] ||= []
809
+ path = opts[:path] ||= []
810
+
811
+ if temp.include?(node)
812
+ path << node
813
+ cycle_start = path.index(node)
814
+ cycle = path[cycle_start..-1]
815
+ return cycle
816
+ end
817
+
818
+ unless visited.include?(node)
819
+ temp.add(node)
820
+ path << node
821
+ dependencies[node]&.each do |dep|
822
+ cycle = dfs(node: dep, dependencies: dependencies, visited: visited, temp: temp, result: result, path: path)
823
+ return cycle if cycle
824
+ end
825
+ visited.add(node)
826
+ temp.delete(node)
827
+ result << node
828
+ path.pop
829
+ end
830
+ nil
831
+ end
832
+
833
+ private_class_method def self.topological_sort(opts = {})
834
+ dependencies = opts[:dependencies]
835
+ spec_paths = opts[:spec_paths] || []
836
+
837
+ result = []
838
+ visited = Set.new
839
+ temp = Set.new
840
+ path = []
841
+
842
+ cycle = nil
843
+ dependencies.each_key do |node|
844
+ next if visited.include?(node)
845
+
846
+ cycle = dfs(node: node, dependencies: dependencies, visited: visited, temp: temp, result: result, path: path)
847
+ break if cycle
848
+ end
849
+
850
+ [cycle ? spec_paths : result.reverse, cycle]
851
+ end
852
+
853
+ private_class_method def self.log(message, opts = {})
854
+ debug = opts[:debug] || false
855
+ warn("[DEBUG] #{message}") if debug
856
+ end
857
+
858
+ public_class_method def self.authors
859
+ "AUTHOR(S):
860
+ 0day Inc. <support@0dayinc.com>
861
+ "
862
+ end
863
+
864
+ public_class_method def self.help
865
+ puts "USAGE:
866
+ openapi_spec = #{self}.generate_spec(
867
+ spec_paths: 'required - array of OpenAPI file paths to merge',
868
+ base_url: 'required - base URL for OpenAPI endpoints (e.g., http://fqdn.com)',
869
+ output_json_path: 'optional - path to save the merged OpenAPI JSON file',
870
+ target_version: 'optional - target OpenAPI version (default: 3.0.3)',
871
+ debug: 'optional - boolean to enable debug logging (default: false)'
872
+ )
873
+
874
+ #{self}.authors
875
+ "
876
+ end
877
+ end
878
+ end
879
+ end