pwn 0.5.332 → 0.5.334
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.rubocop.yml +5 -5
- data/Gemfile +3 -2
- data/bin/pwn_burp_suite_pro_active_scan +1 -1
- data/lib/pwn/plugins/burp_suite.rb +429 -143
- data/lib/pwn/plugins/open_api.rb +653 -184
- data/lib/pwn/plugins/transparent_browser.rb +48 -16
- data/lib/pwn/version.rb +1 -1
- data/third_party/pwn_rdoc.jsonl +13 -1
- metadata +19 -5
data/lib/pwn/plugins/open_api.rb
CHANGED
@@ -4,30 +4,44 @@ require 'yaml'
|
|
4
4
|
require 'json'
|
5
5
|
require 'uri'
|
6
6
|
require 'fileutils'
|
7
|
+
require 'json_schemer'
|
8
|
+
require 'rest-client'
|
7
9
|
|
8
10
|
module PWN
|
9
11
|
module Plugins
|
10
|
-
#
|
11
|
-
# while
|
12
|
+
# Module to interact with OpenAPI specifications, merging multiple specs
|
13
|
+
# while resolving schema dependencies and ensuring OpenAPI compliance.
|
12
14
|
module OpenAPI
|
13
15
|
# Supported Method Parameters:
|
14
|
-
# openapi_spec = PWN::Plugins::OpenAPI.
|
15
|
-
# spec_paths: 'required - array of OpenAPI file paths to merge
|
16
|
+
# openapi_spec = PWN::Plugins::OpenAPI.generate_spec(
|
17
|
+
# spec_paths: 'required - array of OpenAPI file paths to merge',
|
16
18
|
# base_url: 'required - base URL for OpenAPI endpoints (e.g., http://fqdn.com)',
|
17
|
-
# output_json_path: 'optional - path to save the merged OpenAPI JSON file
|
19
|
+
# output_json_path: 'optional - path to save the merged OpenAPI JSON file',
|
20
|
+
# target_version: 'optional - target OpenAPI version (default: 3.0.3)',
|
21
|
+
# debug: 'optional - boolean to enable debug logging (default: false)'
|
18
22
|
# )
|
19
|
-
def self.
|
23
|
+
def self.generate_spec(opts = {})
|
20
24
|
spec_paths = opts[:spec_paths] ||= []
|
21
25
|
raise ArgumentError, 'spec_paths must be a non-empty array' if spec_paths.empty?
|
22
26
|
|
23
27
|
base_url = opts[:base_url]
|
24
28
|
raise ArgumentError, 'base_url is required' if base_url.nil? || base_url.empty?
|
25
29
|
|
26
|
-
|
27
|
-
|
30
|
+
target_version = opts[:target_version] || '3.0.3'
|
31
|
+
raise ArgumentError, "Unsupported OpenAPI version: #{target_version}" unless %w[3.0.0 3.0.1 3.0.2 3.0.3 3.1.0].include?(target_version)
|
32
|
+
|
28
33
|
output_json_path = opts[:output_json_path]
|
34
|
+
raise ArgumentError, 'output_json_path is required' if output_json_path.nil? || output_json_path.empty?
|
35
|
+
|
36
|
+
debug = opts[:debug] || false
|
37
|
+
validation_fixes = []
|
29
38
|
|
30
39
|
begin
|
40
|
+
# Parse base_url to extract host and default base path
|
41
|
+
normalized_base_url, default_base_path = normalize_url(url: base_url)
|
42
|
+
default_base_path ||= '' # Fallback if base_url has no path
|
43
|
+
log("Using normalized base URL: #{normalized_base_url}, default base path: #{default_base_path}", debug: debug)
|
44
|
+
|
31
45
|
# Load and parse all OpenAPI files
|
32
46
|
specs = {}
|
33
47
|
spec_paths.each do |path|
|
@@ -47,86 +61,299 @@ module PWN
|
|
47
61
|
end
|
48
62
|
end
|
49
63
|
|
64
|
+
specs.each do |path, spec|
|
65
|
+
# Pre-validate input specs
|
66
|
+
if spec['paths'].is_a?(Hash)
|
67
|
+
spec['paths'].each do |endpoint, path_item|
|
68
|
+
next unless path_item.is_a?(Hash)
|
69
|
+
|
70
|
+
path_item.each do |method, operation|
|
71
|
+
next unless operation.is_a?(Hash) && operation['parameters'].is_a?(Array)
|
72
|
+
|
73
|
+
param_names = operation['parameters'].map { |p| p['name'] }.compact
|
74
|
+
duplicates = param_names.tally.select { |_, count| count > 1 }.keys
|
75
|
+
raise "Duplicate parameters found in #{path} for path '#{endpoint}' (method: #{method}): #{duplicates.join(', ')}" unless duplicates.empty?
|
76
|
+
|
77
|
+
operation['parameters'].each do |param|
|
78
|
+
next unless param['in'] == 'path'
|
79
|
+
|
80
|
+
raise "Path parameter #{param['name']} in #{path} (path: #{endpoint}, method: #{method}) must have a schema" unless param['schema'].is_a?(Hash)
|
81
|
+
end
|
82
|
+
end
|
83
|
+
end
|
84
|
+
end
|
85
|
+
|
86
|
+
# Clean up null schemas in each spec
|
87
|
+
clean_null_schemas(spec, path, '', validation_fixes, debug)
|
88
|
+
|
89
|
+
# Fix invalid header definitions
|
90
|
+
if spec['components']&.key?('headers')
|
91
|
+
spec['components']['headers'].each do |header_name, header|
|
92
|
+
next unless header.is_a?(Hash)
|
93
|
+
|
94
|
+
if header.key?('name') || header.key?('in')
|
95
|
+
validation_fixes << {
|
96
|
+
path: "/components/headers/#{header_name}",
|
97
|
+
error: "Invalid properties 'name' or 'in' in header",
|
98
|
+
fix: "Removed 'name' and 'in' from header definition"
|
99
|
+
}
|
100
|
+
log("Fixing header '#{header_name}' in #{path}: Removing invalid 'name' and 'in' properties", debug: debug)
|
101
|
+
header.delete('name')
|
102
|
+
header.delete('in')
|
103
|
+
end
|
104
|
+
next unless header['schema'].nil?
|
105
|
+
|
106
|
+
validation_fixes << {
|
107
|
+
path: "/components/headers/#{header_name}",
|
108
|
+
error: 'Header schema is null',
|
109
|
+
fix: 'Added default schema { type: string }'
|
110
|
+
}
|
111
|
+
log("Fixing header '#{header_name}' in #{path}: Replacing null schema with default { type: string }", debug: debug)
|
112
|
+
header['schema'] = { 'type' => 'string' }
|
113
|
+
end
|
114
|
+
end
|
115
|
+
|
116
|
+
# Fix schema items for arrays (e.g., mediaServers)
|
117
|
+
next unless spec['components']&.key?('schemas')
|
118
|
+
|
119
|
+
spec['components']['schemas'].each do |schema_name, schema|
|
120
|
+
fix_array_items(schema, path, "/components/schemas/#{schema_name}", validation_fixes, debug)
|
121
|
+
end
|
122
|
+
end
|
123
|
+
|
124
|
+
# # Pre-validate input specs
|
125
|
+
# specs.each do |path, spec|
|
126
|
+
# next unless spec['paths'].is_a?(Hash)
|
127
|
+
|
128
|
+
# spec['paths'].each do |endpoint, path_item|
|
129
|
+
# next unless path_item.is_a?(Hash)
|
130
|
+
|
131
|
+
# path_item.each do |method, operation|
|
132
|
+
# next unless operation.is_a?(Hash) && operation['parameters'].is_a?(Array)
|
133
|
+
|
134
|
+
# param_names = operation['parameters'].map { |p| p['name'] }.compact
|
135
|
+
# duplicates = param_names.tally.select { |_, count| count > 1 }.keys
|
136
|
+
# raise "Duplicate parameters found in #{path} for path '#{endpoint}' (method: #{method}): #{duplicates.join(', ')}" unless duplicates.empty?
|
137
|
+
|
138
|
+
# operation['parameters'].each do |param|
|
139
|
+
# next unless param['in'] == 'path'
|
140
|
+
|
141
|
+
# raise "Path parameter #{param['name']} in #{path} (path: #{endpoint}, method: #{method}) must have a schema" unless param['schema'].is_a?(Hash)
|
142
|
+
# end
|
143
|
+
# end
|
144
|
+
# end
|
145
|
+
# end
|
146
|
+
|
147
|
+
# # Fix invalid header definitions
|
148
|
+
# specs.each do |path, spec|
|
149
|
+
# # Clean up null schemas in each spec
|
150
|
+
# clean_null_schemas(spec, path, '', validation_fixes, debug)
|
151
|
+
|
152
|
+
# next unless spec['components']&.key?('headers')
|
153
|
+
|
154
|
+
# spec['components']['headers'].each do |header_name, header|
|
155
|
+
# next unless header.is_a?(Hash)
|
156
|
+
|
157
|
+
# if header.key?('name') || header.key?('in')
|
158
|
+
# validation_fixes << {
|
159
|
+
# path: "/components/headers/#{header_name}",
|
160
|
+
# error: "Invalid properties 'name' or 'in' in header",
|
161
|
+
# fix: "Removed 'name' and 'in' from header definition"
|
162
|
+
# }
|
163
|
+
# log("Fixing header '#{header_name}' in #{path}: Removing invalid 'name' and ''in' properties", debug: debug)
|
164
|
+
# header.delete('name')
|
165
|
+
# header.delete('in')
|
166
|
+
# end
|
167
|
+
# next unless header['schema'].nil?
|
168
|
+
|
169
|
+
# validation_fixes << {
|
170
|
+
# path: "/components/headers/#{header_name}",
|
171
|
+
# error: 'Header schema is null',
|
172
|
+
# fix: 'Added default schema { type: string }'
|
173
|
+
# }
|
174
|
+
# log("Fixing header '#{header_name}' in #{path}: Replacing null schema with default { type: string }", debug: debug)
|
175
|
+
# header['schema'] = { 'type' => 'string' }
|
176
|
+
# end
|
177
|
+
# end
|
178
|
+
|
179
|
+
# Fix schema items for arrays (e.g., mediaServers)
|
180
|
+
# specs.each do |path, spec|
|
181
|
+
# next unless spec['components']&.key?('schemas')
|
182
|
+
|
183
|
+
# spec['components']['schemas'].each do |schema_name, schema|
|
184
|
+
# fix_array_items(schema, path, "/components/schemas/#{schema_name}", validation_fixes, debug)
|
185
|
+
# end
|
186
|
+
# end
|
187
|
+
|
50
188
|
# Determine dependencies based on $ref
|
51
189
|
dependencies = {}
|
52
190
|
specs.each do |path, spec|
|
53
|
-
dependencies[path] = []
|
191
|
+
dependencies[path] = [] # Initialize empty array for all paths
|
54
192
|
refs = extract_refs(spec: spec, spec_paths: spec_paths)
|
55
193
|
refs.each do |ref|
|
56
|
-
dep_path = resolve_ref_path(ref, spec_paths, referencing_file: path)
|
194
|
+
dep_path = resolve_ref_path(ref: ref, spec_paths: spec_paths, referencing_file: path)
|
57
195
|
dependencies[path] << dep_path if specs.key?(dep_path) && dep_path != path
|
58
196
|
end
|
59
197
|
end
|
60
198
|
|
61
|
-
# Sort files by dependencies
|
62
|
-
ordered_paths, cycle_info = topological_sort(dependencies: dependencies)
|
199
|
+
# Sort files by dependencies
|
200
|
+
ordered_paths, cycle_info = topological_sort(dependencies: dependencies, spec_paths: spec_paths)
|
63
201
|
if cycle_info
|
64
|
-
|
202
|
+
log("Cyclic dependencies detected: #{cycle_info.join(' -> ')}. Processing files in provided order.", debug: debug)
|
65
203
|
ordered_paths = spec_paths
|
66
204
|
end
|
67
205
|
|
68
|
-
#
|
206
|
+
# Initialize merged specification with a single server
|
69
207
|
merged_spec = {
|
70
|
-
'openapi' =>
|
71
|
-
'info' => {
|
72
|
-
|
208
|
+
'openapi' => target_version,
|
209
|
+
'info' => {
|
210
|
+
'title' => 'Merged OpenAPI Specification',
|
211
|
+
'version' => '1.0.0'
|
212
|
+
},
|
213
|
+
'servers' => [{ 'url' => normalized_base_url, 'description' => 'Default server' }],
|
73
214
|
'paths' => {},
|
74
|
-
'components' => {},
|
215
|
+
'components' => { 'schemas' => {}, 'headers' => {} },
|
75
216
|
'tags' => [],
|
76
217
|
'security' => []
|
77
218
|
}
|
78
219
|
|
220
|
+
# Collect base paths from server URLs
|
221
|
+
server_base_paths = {}
|
222
|
+
|
79
223
|
ordered_paths.each do |path|
|
80
224
|
spec = specs[path]
|
81
225
|
unless spec.is_a?(Hash)
|
82
|
-
|
226
|
+
log("Skipping #{path}: Invalid OpenAPI specification", debug: debug)
|
83
227
|
next
|
84
228
|
end
|
85
229
|
|
86
|
-
#
|
87
|
-
resolved_spec = resolve_refs(spec: spec, specs: specs, spec_paths: spec_paths, referencing_file: path)
|
230
|
+
log("Warning: #{path} uses OpenAPI version #{spec['openapi']}, which may not be compatible with target version #{target_version}", debug: debug) if spec['openapi'] && !spec['openapi'].start_with?(target_version.split('.')[0..1].join('.'))
|
88
231
|
|
89
|
-
|
90
|
-
|
232
|
+
if spec['definitions'] && target_version.start_with?('3.')
|
233
|
+
log("Migrating OpenAPI 2.0 'definitions' to 'components/schemas' for #{path}", debug: debug)
|
234
|
+
spec['components'] ||= {}
|
235
|
+
spec['components']['schemas'] = spec.delete('definitions')
|
236
|
+
end
|
91
237
|
|
92
|
-
|
93
|
-
merged_spec['openapi'] = resolved_spec['openapi'] if resolved_spec['openapi'] && (resolved_spec['openapi'] > merged_spec['openapi'])
|
238
|
+
resolved_spec = resolve_refs(spec: spec, specs: specs, spec_paths: spec_paths, referencing_file: path, debug: debug)
|
94
239
|
|
95
|
-
#
|
96
|
-
|
240
|
+
# Process server URLs
|
241
|
+
selected_server = nil
|
242
|
+
server_base_path = nil
|
243
|
+
absolute_url = nil
|
97
244
|
|
98
|
-
# Merge 'servers'
|
99
245
|
if resolved_spec['servers']
|
100
246
|
servers = resolved_spec['servers'].is_a?(Array) ? resolved_spec['servers'] : [resolved_spec['servers']]
|
101
|
-
|
102
|
-
|
103
|
-
|
104
|
-
|
105
|
-
|
106
|
-
|
107
|
-
|
247
|
+
# Prioritize server with non-empty path
|
248
|
+
selected_server = servers.find { |s| s.is_a?(Hash) && s['url'] && !URI.parse(s['url']).path.empty? } ||
|
249
|
+
servers.find { |s| s.is_a?(Hash) && s['description'] } ||
|
250
|
+
servers.first
|
251
|
+
|
252
|
+
server_url = selected_server.is_a?(Hash) ? selected_server['url'] : selected_server
|
253
|
+
if server_url.is_a?(String)
|
254
|
+
absolute_url, server_base_path = normalize_url(url: server_url, base_url: normalized_base_url)
|
255
|
+
server_base_path ||= default_base_path
|
256
|
+
log("Selected server URL: #{server_url}, normalized: #{absolute_url}, base path: #{server_base_path} for #{path}", debug: debug)
|
257
|
+
server_obj = selected_server.is_a?(Hash) ? selected_server.merge('url' => absolute_url) : { 'url' => absolute_url }
|
258
|
+
unless merged_spec['servers'].any? { |s| s['url'] == absolute_url }
|
259
|
+
merged_spec['servers'] << server_obj
|
260
|
+
# Update default_base_path if servers length > 1
|
261
|
+
if merged_spec['servers'].length > 1
|
262
|
+
last_server_url = merged_spec['servers'].last['url']
|
263
|
+
new_base_path = URI.parse(last_server_url).path&.sub(%r{^/+}, '')&.sub(%r{/+$}, '')
|
264
|
+
default_base_path = new_base_path || default_base_path
|
265
|
+
log("Updated default_base_path to '#{default_base_path}' based on last server: #{last_server_url}", debug: debug)
|
266
|
+
end
|
267
|
+
end
|
268
|
+
else
|
269
|
+
log("No valid server URL in #{path}, using default base path: #{default_base_path}", debug: debug)
|
270
|
+
absolute_url = normalized_base_url
|
271
|
+
server_base_path = default_base_path
|
108
272
|
end
|
273
|
+
else
|
274
|
+
# Check dependencies for server URLs
|
275
|
+
(dependencies[path] || []).each do |dep_path|
|
276
|
+
dep_spec = specs[dep_path]
|
277
|
+
next unless dep_spec['servers']
|
278
|
+
|
279
|
+
dep_servers = dep_spec['servers'].is_a?(Array) ? dep_spec['servers'] : [dep_spec['servers']]
|
280
|
+
dep_server = dep_servers.find { |s| s.is_a?(Hash) && s['url'] && !URI.parse(s['url']).path.empty? }
|
281
|
+
next unless dep_server
|
282
|
+
|
283
|
+
dep_server_url = dep_server['url']
|
284
|
+
absolute_url, server_base_path = normalize_url(url: dep_server_url, base_url: normalized_base_url)
|
285
|
+
server_base_path ||= default_base_path
|
286
|
+
log("Using dependency server URL: #{dep_server_url}, normalized: #{absolute_url}, base path: #{server_base_path} for #{path}", debug: debug)
|
287
|
+
server_obj = dep_server.merge('url' => absolute_url)
|
288
|
+
unless merged_spec['servers'].any? { |s| s['url'] == absolute_url }
|
289
|
+
merged_spec['servers'] << server_obj
|
290
|
+
# Update default_base_path if servers length > 1
|
291
|
+
if merged_spec['servers'].length > 1
|
292
|
+
last_server_url = merged_spec['servers'].last['url']
|
293
|
+
new_base_path = URI.parse(last_server_url).path&.sub(%r{^/+}, '')&.sub(%r{/+$}, '')
|
294
|
+
default_base_path = new_base_path || default_base_path
|
295
|
+
log("Updated default_base_path to '#{default_base_path}' based on last server: #{last_server_url}", debug: debug)
|
296
|
+
end
|
297
|
+
end
|
298
|
+
break
|
299
|
+
end
|
300
|
+
unless absolute_url
|
301
|
+
log("No servers defined in #{path} or dependencies, using default base path: #{default_base_path}", debug: debug)
|
302
|
+
absolute_url = normalized_base_url
|
303
|
+
server_base_path = default_base_path
|
304
|
+
end
|
305
|
+
end
|
306
|
+
server_base_paths[path] = server_base_path
|
307
|
+
|
308
|
+
# Normalize paths
|
309
|
+
if resolved_spec['paths'].is_a?(Hash)
|
310
|
+
resolved_spec['paths'] = validate_path_parameters(
|
311
|
+
resolved_spec['paths'],
|
312
|
+
path,
|
313
|
+
server_base_path: server_base_path,
|
314
|
+
debug: debug
|
315
|
+
)
|
316
|
+
end
|
317
|
+
|
318
|
+
merged_spec['openapi'] = [resolved_spec['openapi'], target_version].max if resolved_spec['openapi']
|
319
|
+
|
320
|
+
if resolved_spec['info'].is_a?(Hash)
|
321
|
+
merged_spec['info'] = deep_merge(hash1: merged_spec['info'], hash2: resolved_spec['info'])
|
322
|
+
raise "Missing required info.title in #{path}" unless merged_spec['info']['title']
|
323
|
+
raise "Missing required info.version in #{path}" unless merged_spec['info']['version']
|
109
324
|
end
|
110
325
|
|
111
|
-
# Merge 'paths'
|
112
326
|
if resolved_spec['paths'].is_a?(Hash)
|
113
|
-
|
114
|
-
|
327
|
+
resolved_paths = resolved_spec['paths'].transform_keys do |endpoint|
|
328
|
+
effective_base_path = server_base_paths[path]
|
329
|
+
# Strip redundant base path before combining
|
330
|
+
normalized_endpoint = endpoint.to_s.sub(%r{^/+}, '').sub(%r{/+$}, '')
|
331
|
+
if effective_base_path && !effective_base_path.empty?
|
332
|
+
prefix_pattern = Regexp.new("^#{Regexp.escape(effective_base_path)}/")
|
333
|
+
while normalized_endpoint.match?(prefix_pattern)
|
334
|
+
normalized_endpoint = normalized_endpoint.sub(prefix_pattern, '')
|
335
|
+
log("Stripped '#{effective_base_path}' from endpoint '#{endpoint}' to '#{normalized_endpoint}' during merge in #{path}", debug: debug)
|
336
|
+
end
|
337
|
+
end
|
338
|
+
normalized_endpoint = '/' if normalized_endpoint.empty?
|
339
|
+
combined_path = combine_paths(effective_base_path, normalized_endpoint)
|
340
|
+
log("Merging path '#{endpoint}' as '#{combined_path}' from #{path}", debug: debug)
|
341
|
+
combined_path
|
342
|
+
end
|
343
|
+
merged_spec['paths'].merge!(resolved_paths) do |api_endpoint, _existing, new|
|
344
|
+
log("Path '#{api_endpoint}' in #{path} conflicts with existing path. Overwriting.", debug: debug)
|
115
345
|
new
|
116
346
|
end
|
117
347
|
end
|
118
348
|
|
119
|
-
|
120
|
-
merged_spec['components'] = deep_merge(merged_spec['components'], resolved_spec['components']) if resolved_spec['components'].is_a?(Hash)
|
349
|
+
merged_spec['components'] = deep_merge(hash1: merged_spec['components'], hash2: resolved_spec['components']) if resolved_spec['components'].is_a?(Hash)
|
121
350
|
|
122
|
-
|
123
|
-
|
124
|
-
|
125
|
-
|
126
|
-
merged_spec['tags'] << tag unless merged_spec['tags'].include?(tag)
|
351
|
+
if resolved_spec['tags'].is_a?(Array)
|
352
|
+
resolved_spec['tags'].each do |tag|
|
353
|
+
merged_spec['tags'] << tag unless merged_spec['tags'].include?(tag)
|
354
|
+
end
|
127
355
|
end
|
128
356
|
|
129
|
-
# Merge 'security'
|
130
357
|
next unless resolved_spec['security'].is_a?(Array)
|
131
358
|
|
132
359
|
resolved_spec['security'].each do |security|
|
@@ -134,19 +361,51 @@ module PWN
|
|
134
361
|
end
|
135
362
|
end
|
136
363
|
|
137
|
-
#
|
364
|
+
# Filter servers to keep only those with paths matching the first folder in paths
|
365
|
+
if merged_spec['paths'].any?
|
366
|
+
path_first_folders = merged_spec['paths'].keys.map do |path|
|
367
|
+
path_segments = path.sub(%r{^/+}, '').split('/')
|
368
|
+
path_segments.first if path_segments.any?
|
369
|
+
end.compact.uniq
|
370
|
+
log("First folders in paths: #{path_first_folders}", debug: debug)
|
371
|
+
|
372
|
+
if path_first_folders.any?
|
373
|
+
merged_spec['servers'] = merged_spec['servers'].select do |server|
|
374
|
+
server_url = server['url']
|
375
|
+
server_path = URI.parse(server_url).path&.sub(%r{^/+}, '')&.sub(%r{/+$}, '')
|
376
|
+
server_path && path_first_folders.include?(server_path)
|
377
|
+
end
|
378
|
+
log("Filtered servers to: #{merged_spec['servers'].map { |s| s['url'] }}", debug: debug)
|
379
|
+
end
|
380
|
+
end
|
381
|
+
|
382
|
+
# Ensure at least one server remains
|
138
383
|
if merged_spec['servers'].empty?
|
139
|
-
merged_spec['servers'] = [{ 'url' => normalized_base_url }]
|
140
|
-
|
384
|
+
merged_spec['servers'] = [{ 'url' => normalized_base_url, 'description' => 'Default server' }]
|
385
|
+
log("No servers matched path prefixes. Reverted to default: #{normalized_base_url}", debug: debug)
|
141
386
|
end
|
142
387
|
|
143
|
-
#
|
144
|
-
|
145
|
-
|
146
|
-
|
147
|
-
|
388
|
+
# Remove server path prefixes from path keys
|
389
|
+
merged_spec = remove_server_path_prefixes(merged_spec, debug: debug)
|
390
|
+
|
391
|
+
# Clean up null schemas in the merged spec
|
392
|
+
clean_null_schemas(merged_spec, 'merged_spec', '', validation_fixes, debug)
|
393
|
+
|
394
|
+
merged_spec, schema_validation_errors = validate_openapi_spec(
|
395
|
+
merged_spec: merged_spec,
|
396
|
+
target_version: target_version,
|
397
|
+
debug: debug
|
398
|
+
)
|
399
|
+
|
400
|
+
unless validation_fixes.empty? && schema_validation_errors.empty?
|
401
|
+
merged_spec['x-validation-fixes'] = validation_fixes + schema_validation_errors
|
402
|
+
log("Added validation fixes to spec: #{merged_spec['x-validation-fixes'].map { |f| f[:error] }.join(', ')}", debug: debug)
|
148
403
|
end
|
149
404
|
|
405
|
+
FileUtils.mkdir_p(File.dirname(output_json_path))
|
406
|
+
File.write(output_json_path, JSON.pretty_generate(merged_spec))
|
407
|
+
log("Merged OpenAPI specification written to: #{output_json_path}", debug: debug)
|
408
|
+
|
150
409
|
{ individual_specs: specs, merged_spec: merged_spec }
|
151
410
|
rescue Errno::ENOENT => e
|
152
411
|
raise "Error accessing file: #{e.message}"
|
@@ -155,162 +414,364 @@ module PWN
|
|
155
414
|
end
|
156
415
|
end
|
157
416
|
|
158
|
-
#
|
159
|
-
private_class_method def self.
|
160
|
-
|
161
|
-
|
417
|
+
# Recursively clean null schemas
|
418
|
+
private_class_method def self.clean_null_schemas(spec, file_path, current_path, validation_fixes, debug)
|
419
|
+
case spec
|
420
|
+
when Hash
|
421
|
+
spec.each do |key, value|
|
422
|
+
new_path = current_path.empty? ? key : "#{current_path}/#{key}"
|
423
|
+
if key == 'schema' && value.nil?
|
424
|
+
validation_fixes << {
|
425
|
+
path: new_path,
|
426
|
+
error: 'Schema is null',
|
427
|
+
fix: 'Replaced with default schema { type: string }'
|
428
|
+
}
|
429
|
+
log("Fixing null schema at #{new_path} in #{file_path}: Replacing with default { type: string }", debug: debug)
|
430
|
+
spec[key] = { 'type' => 'string' }
|
431
|
+
else
|
432
|
+
clean_null_schemas(value, file_path, new_path, validation_fixes, debug)
|
433
|
+
end
|
434
|
+
end
|
435
|
+
when Array
|
436
|
+
spec.each_with_index do |item, i|
|
437
|
+
clean_null_schemas(item, file_path, "#{current_path}/#{i}", validation_fixes, debug)
|
438
|
+
end
|
439
|
+
end
|
440
|
+
end
|
441
|
+
|
442
|
+
private_class_method def self.fix_array_items(schema, file_path, schema_path, validation_fixes, debug)
|
443
|
+
return unless schema.is_a?(Hash)
|
444
|
+
|
445
|
+
if schema['type'] == 'array'
|
446
|
+
if schema['items'].nil?
|
447
|
+
validation_fixes << {
|
448
|
+
path: "#{schema_path}/items",
|
449
|
+
error: 'Array schema missing items',
|
450
|
+
fix: 'Added default items { type: string }'
|
451
|
+
}
|
452
|
+
log("Fixing missing items at #{schema_path}/items in #{file_path}: Adding default { type: string }", debug: debug)
|
453
|
+
schema['items'] = { 'type' => 'string' }
|
454
|
+
elsif schema['items'].is_a?(Array)
|
455
|
+
validation_fixes << {
|
456
|
+
path: "#{schema_path}/items",
|
457
|
+
error: 'Array items must be an object, not an array',
|
458
|
+
fix: 'Converted items to object with type: string'
|
459
|
+
}
|
460
|
+
log("Fixing invalid array items at #{schema_path}/items in #{file_path}: Converting array to object", debug: debug)
|
461
|
+
schema['items'] = { 'type' => 'string' }
|
462
|
+
end
|
463
|
+
end
|
464
|
+
|
465
|
+
if schema['properties'].is_a?(Hash)
|
466
|
+
schema['properties'].each do |prop_name, prop_schema|
|
467
|
+
fix_array_items(prop_schema, file_path, "#{schema_path}/properties/#{prop_name}", validation_fixes, debug)
|
468
|
+
end
|
469
|
+
end
|
470
|
+
|
471
|
+
%w[allOf anyOf oneOf].each do |keyword|
|
472
|
+
next unless schema[keyword].is_a?(Array)
|
473
|
+
|
474
|
+
schema[keyword].each_with_index do |sub_schema, i|
|
475
|
+
fix_array_items(sub_schema, file_path, "#{schema_path}/#{keyword}/#{i}", validation_fixes, debug)
|
476
|
+
end
|
477
|
+
end
|
162
478
|
|
163
|
-
|
164
|
-
|
165
|
-
|
166
|
-
|
479
|
+
fix_array_items(schema['items'], file_path, "#{schema_path}/items", validation_fixes, debug) if schema['items'].is_a?(Hash)
|
480
|
+
end
|
481
|
+
|
482
|
+
private_class_method def self.combine_paths(base_path, endpoint)
|
483
|
+
base_path = base_path.to_s.sub(%r{^/+}, '').sub(%r{/+$}, '')
|
484
|
+
endpoint = endpoint.to_s.sub(%r{^/+}, '').sub(%r{/+$}, '')
|
485
|
+
combined_path = if base_path.empty?
|
486
|
+
endpoint.empty? ? '/' : "/#{endpoint}"
|
487
|
+
elsif endpoint.empty?
|
488
|
+
"/#{base_path}"
|
489
|
+
else
|
490
|
+
"/#{base_path}/#{endpoint}"
|
491
|
+
end
|
492
|
+
combined_path.gsub(%r{/+}, '/')
|
493
|
+
end
|
494
|
+
|
495
|
+
private_class_method def self.validate_openapi_spec(opts = {})
|
496
|
+
merged_spec = opts[:merged_spec]
|
497
|
+
target_version = opts[:target_version] || '3.0.3'
|
498
|
+
debug = opts[:debug] || false
|
499
|
+
validation_errors = []
|
500
|
+
|
501
|
+
schema_urls = {
|
502
|
+
'3.0.0' => 'https://spec.openapis.org/oas/3.0/schema/2021-09-28',
|
503
|
+
'3.0.1' => 'https://spec.openapis.org/oas/3.0/schema/2021-09-28',
|
504
|
+
'3.0.2' => 'https://spec.openapis.org/oas/3.0/schema/2021-09-28',
|
505
|
+
'3.0.3' => 'https://spec.openapis.org/oas/3.0/schema/2021-09-28',
|
506
|
+
'3.1.0' => 'https://spec.openapis.org/oas/3.1/schema/2021-09-28'
|
507
|
+
}
|
508
|
+
|
509
|
+
schema_url = schema_urls[target_version]
|
510
|
+
raise "No schema available for OpenAPI version #{target_version}" unless schema_url
|
511
|
+
|
512
|
+
begin
|
513
|
+
schema = JSON.parse(RestClient.get(schema_url))
|
514
|
+
schemer = JSONSchemer.schema(schema)
|
515
|
+
|
516
|
+
unless schemer.valid?(merged_spec)
|
517
|
+
schemer.validate(merged_spec).each do |error|
|
518
|
+
validation_errors << {
|
519
|
+
path: error['data_pointer'],
|
520
|
+
error: error['error'],
|
521
|
+
fix: 'Validation failed; manual correction required'
|
522
|
+
}
|
523
|
+
log("Validation error: #{error['error']} at #{error['data_pointer']}", debug: debug)
|
524
|
+
end
|
525
|
+
end
|
526
|
+
[merged_spec, validation_errors]
|
527
|
+
rescue OpenURI::HTTPError => e
|
528
|
+
log("Failed to fetch OpenAPI schema from #{schema_url}: #{e.message}", debug: debug)
|
529
|
+
raise "Failed to validate OpenAPI specification: #{e.message}"
|
530
|
+
rescue StandardError => e
|
531
|
+
log("Error validating OpenAPI specification: #{e.message}", debug: debug)
|
532
|
+
raise "Failed to validate OpenAPI specification: #{e.message}"
|
533
|
+
end
|
534
|
+
end
|
535
|
+
|
536
|
+
private_class_method def self.validate_path_parameters(paths, file_path, opts = {})
|
537
|
+
debug = opts[:debug] || false
|
538
|
+
server_base_path = opts[:server_base_path]&.sub(%r{^/+}, '')&.sub(%r{/+$}, '')
|
539
|
+
|
540
|
+
transformed_paths = {}
|
541
|
+
paths.each do |endpoint, path_item|
|
542
|
+
next unless path_item.is_a?(Hash)
|
543
|
+
|
544
|
+
# Normalize endpoint by stripping redundant server_base_path
|
545
|
+
|
546
|
+
normalized_endpoint = endpoint.to_s.sub(%r{^/+}, '').sub(%r{/+$}, '')
|
547
|
+
if server_base_path && !server_base_path.empty?
|
548
|
+
prefix_pattern = Regexp.new("^#{Regexp.escape(server_base_path)}/")
|
549
|
+
while normalized_endpoint.match?(prefix_pattern)
|
550
|
+
normalized_endpoint = normalized_endpoint.sub(prefix_pattern, '')
|
551
|
+
log("Stripped '#{server_base_path}' from endpoint '#{endpoint}' to '#{normalized_endpoint}' in #{file_path}", debug: debug)
|
552
|
+
end
|
553
|
+
end
|
554
|
+
normalized_endpoint = '/' if normalized_endpoint.empty?
|
555
|
+
|
556
|
+
log("Validating path '#{endpoint}' as '#{normalized_endpoint}' in #{file_path}", debug: debug)
|
557
|
+
|
558
|
+
path_params = path_item['parameters']&.select { |p| p['in'] == 'path' }&.map { |p| p['name'] }&.compact || []
|
559
|
+
|
560
|
+
path_item.each do |method, operation|
|
167
561
|
next unless operation.is_a?(Hash)
|
168
562
|
|
169
|
-
|
170
|
-
|
171
|
-
|
172
|
-
|
173
|
-
|
174
|
-
|
175
|
-
|
176
|
-
|
177
|
-
|
178
|
-
|
179
|
-
|
180
|
-
|
181
|
-
|
182
|
-
|
183
|
-
|
563
|
+
operation_params = operation['parameters']&.select { |p| p['in'] == 'path' }&.map { |p| p['name'] }&.compact || []
|
564
|
+
all_params = (path_params + operation_params).uniq
|
565
|
+
required_params = normalized_endpoint.scan(/\{([^}]+)\}/).flatten
|
566
|
+
|
567
|
+
missing_params = required_params - all_params
|
568
|
+
unless missing_params.empty?
|
569
|
+
log("In #{file_path}, path '#{normalized_endpoint}' (method: #{method}) has undeclared path parameters: #{missing_params.join(', ')}. Adding default definitions.", debug: debug)
|
570
|
+
operation['parameters'] ||= []
|
571
|
+
missing_params.each do |param|
|
572
|
+
operation['parameters'] << {
|
573
|
+
'name' => param,
|
574
|
+
'in' => 'path',
|
575
|
+
'required' => true,
|
576
|
+
'schema' => { 'type' => 'string' }
|
577
|
+
}
|
578
|
+
end
|
579
|
+
end
|
580
|
+
|
581
|
+
operation['parameters']&.each do |param|
|
582
|
+
next unless param['in'] == 'path'
|
583
|
+
raise "Path parameter #{param['name']} in #{file_path} (path: #{normalized_endpoint}, method: #{method}) must be required" unless param['required']
|
584
|
+
next unless param['schema'].nil?
|
585
|
+
|
586
|
+
log("Path parameter #{param['name']} in #{file_path} (path: #{normalized_endpoint}, method: #{method}) has null schema. Adding default schema (type: string).", debug: debug)
|
587
|
+
validation_fixes << {
|
588
|
+
path: "#{normalized_endpoint}/parameters/#{param['name']}",
|
589
|
+
error: 'Path parameter schema is null',
|
590
|
+
fix: 'Added default schema { type: string }'
|
184
591
|
}
|
592
|
+
param['schema'] = { 'type' => 'string' }
|
185
593
|
end
|
594
|
+
|
595
|
+
param_names = operation['parameters']&.map { |p| p['name'] }&.compact || []
|
596
|
+
duplicates = param_names.tally.select { |_, count| count > 1 }.keys
|
597
|
+
raise "Duplicate parameters found in #{file_path} for path '#{normalized_endpoint}' (method: #{method}): #{duplicates.join(', ') || 'unknown'}" unless duplicates.empty?
|
186
598
|
end
|
187
|
-
path_item
|
599
|
+
transformed_paths[normalized_endpoint] = path_item
|
188
600
|
end
|
601
|
+
transformed_paths
|
189
602
|
end
|
190
603
|
|
191
|
-
|
192
|
-
|
193
|
-
|
604
|
+
private_class_method def self.remove_server_path_prefixes(merged_spec, debug: false)
|
605
|
+
return merged_spec unless merged_spec['paths'].is_a?(Hash) && merged_spec['servers'].is_a?(Array)
|
606
|
+
|
607
|
+
transformed_paths = {}
|
608
|
+
servers = merged_spec['servers']
|
609
|
+
paths = merged_spec['paths']
|
610
|
+
|
611
|
+
paths.each do |path, path_item|
|
612
|
+
normalized_path = path.sub(%r{^/+}, '').sub(%r{/+$}, '')
|
613
|
+
path_segments = normalized_path.split('/').reject(&:empty?)
|
614
|
+
next unless path_segments.any?
|
615
|
+
|
616
|
+
first_segment = path_segments.first
|
617
|
+
matching_server = servers.find do |server|
|
618
|
+
server_url = server['url']
|
619
|
+
begin
|
620
|
+
server_path = URI.parse(server_url).path&.sub(%r{^/+}, '')&.sub(%r{/+$}, '')
|
621
|
+
server_path == first_segment
|
622
|
+
rescue URI::InvalidURIError
|
623
|
+
false
|
624
|
+
end
|
625
|
+
end
|
626
|
+
|
627
|
+
if matching_server
|
628
|
+
new_path = path_segments[1..-1].join('/')
|
629
|
+
new_path = '/' if new_path.empty?
|
630
|
+
new_path = "/#{new_path}" unless new_path.start_with?('/')
|
631
|
+
log("Removing server path prefix '#{first_segment}' from path '#{path}' to '#{new_path}'", debug: debug)
|
632
|
+
transformed_paths[new_path] = path_item
|
633
|
+
else
|
634
|
+
transformed_paths[path] = path_item
|
635
|
+
end
|
636
|
+
end
|
637
|
+
|
638
|
+
merged_spec['paths'] = transformed_paths
|
639
|
+
merged_spec
|
640
|
+
end
|
641
|
+
|
642
|
+
private_class_method def self.normalize_url(opts = {})
|
643
|
+
url = opts[:url]
|
644
|
+
base_url = opts[:base_url]
|
645
|
+
return [url, nil] if url.nil? || url.empty?
|
194
646
|
|
195
647
|
begin
|
196
648
|
uri = URI.parse(url)
|
197
|
-
|
198
|
-
|
199
|
-
|
200
|
-
|
201
|
-
|
202
|
-
else
|
203
|
-
URI.parse('http://localhost')
|
204
|
-
end
|
205
|
-
|
206
|
-
# Handle relative URLs
|
207
|
-
if url.start_with?('/')
|
208
|
-
# Absolute path relative to base_url
|
209
|
-
uri = base_uri.dup
|
210
|
-
uri.path = url
|
211
|
-
uri.query = nil
|
212
|
-
uri.fragment = nil
|
213
|
-
else
|
214
|
-
# Relative path
|
649
|
+
if uri.absolute? && uri.scheme && uri.host
|
650
|
+
base_path = uri.path.empty? ? nil : uri.path.sub(%r{^/+}, '').sub(%r{/+$}, '')
|
651
|
+
[uri.to_s.sub(%r{/+$}, ''), base_path]
|
652
|
+
elsif base_url && !base_url.empty?
|
653
|
+
base_uri = URI.parse(base_url)
|
215
654
|
uri = base_uri.merge(url)
|
655
|
+
base_path = uri.path.empty? ? nil : uri.path.sub(%r{^/+}, '').sub(%r{/+$}, '')
|
656
|
+
[uri.to_s.sub(%r{/+$}, ''), base_path]
|
657
|
+
else
|
658
|
+
raise URI::InvalidURIError, "Relative URL '#{url}' provided without a valid base_url"
|
216
659
|
end
|
217
|
-
uri.to_s
|
218
660
|
rescue URI::InvalidURIError => e
|
219
|
-
|
220
|
-
base_url || 'http://localhost'
|
661
|
+
raise "Invalid server URL '#{url}': #{e.message}"
|
221
662
|
end
|
222
663
|
end
|
223
664
|
|
224
|
-
|
225
|
-
|
665
|
+
private_class_method def self.resolve_refs(opts = {})
|
666
|
+
spec = opts[:spec]
|
667
|
+
specs = opts[:specs]
|
668
|
+
spec_paths = opts[:spec_paths] ||= []
|
669
|
+
referencing_file = opts[:referencing_file] || 'unknown'
|
670
|
+
depth = opts[:depth] ||= 0
|
671
|
+
debug = opts[:debug] || false
|
672
|
+
max_depth = 50
|
673
|
+
|
674
|
+
raise "Maximum $ref resolution depth exceeded in #{referencing_file}" if depth > max_depth
|
675
|
+
|
226
676
|
case spec
|
227
677
|
when Hash
|
228
678
|
resolved = {}
|
229
679
|
spec.each do |key, value|
|
230
|
-
|
231
|
-
|
232
|
-
|
233
|
-
|
234
|
-
|
235
|
-
|
236
|
-
|
237
|
-
|
238
|
-
|
239
|
-
|
240
|
-
unless File.exist?(ref_path)
|
241
|
-
puts "Warning: File #{ref_path} does not exist on filesystem"
|
242
|
-
return value
|
680
|
+
if key == '$ref' && value.is_a?(String)
|
681
|
+
ref_path, json_pointer = value.split('#', 2)
|
682
|
+
json_pointer ||= ''
|
683
|
+
if ref_path.empty? || ref_path == '#'
|
684
|
+
log("Resolving internal $ref: #{value} in #{referencing_file}", debug: debug)
|
685
|
+
target = resolve_json_pointer(spec, json_pointer, referencing_file, referencing_file)
|
686
|
+
if target.nil?
|
687
|
+
resolved[key] = value
|
688
|
+
else
|
689
|
+
resolved = resolve_refs(spec: target, specs: specs, spec_paths: spec_paths, referencing_file: referencing_file, depth: depth + 1, debug: debug)
|
243
690
|
end
|
244
|
-
|
245
|
-
|
246
|
-
|
247
|
-
|
248
|
-
|
249
|
-
|
250
|
-
|
691
|
+
else
|
692
|
+
matched_path = resolve_ref_path(ref: ref_path, spec_paths: spec_paths, referencing_file: referencing_file)
|
693
|
+
unless specs.key?(matched_path)
|
694
|
+
log("Unable to resolve external $ref: #{value} from #{referencing_file}", debug: debug)
|
695
|
+
begin
|
696
|
+
return value unless File.exist?(ref_path)
|
697
|
+
|
698
|
+
case File.extname(ref_path).downcase
|
699
|
+
when '.yaml', '.yml'
|
700
|
+
specs[ref_path] = YAML.load_file(ref_path, permitted_classes: [Symbol, Date, Time])
|
701
|
+
spec_paths << ref_path unless spec_paths.include?(ref_path)
|
702
|
+
when '.json'
|
703
|
+
specs[ref_path] = JSON.parse(File.read(ref_path))
|
704
|
+
spec_paths << ref_path unless spec_paths.include?(ref_path)
|
705
|
+
else
|
706
|
+
log("Unsupported file type for $ref: #{ref_path} from #{referencing_file}", debug: debug)
|
707
|
+
return value
|
708
|
+
end
|
709
|
+
rescue StandardError => e
|
710
|
+
log("Failed to load external $ref #{ref_path}: #{e.message} from #{referencing_file}", debug: debug)
|
711
|
+
return value
|
712
|
+
end
|
713
|
+
end
|
714
|
+
ref_spec = specs[matched_path]
|
715
|
+
target = json_pointer.empty? ? ref_spec : resolve_json_pointer(ref_spec, json_pointer, matched_path, referencing_file)
|
716
|
+
if target.nil?
|
717
|
+
log("Invalid JSON pointer #{json_pointer} in #{matched_path} from #{referencing_file}", debug: debug)
|
718
|
+
resolved[key] = value
|
251
719
|
else
|
252
|
-
|
253
|
-
return value
|
720
|
+
resolved = resolve_refs(spec: target, specs: specs, spec_paths: spec_paths, referencing_file: matched_path, depth: depth + 1, debug: debug)
|
254
721
|
end
|
255
|
-
rescue StandardError => e
|
256
|
-
puts "Warning: Failed to load #{ref_path}: #{e.message}"
|
257
|
-
return value
|
258
722
|
end
|
723
|
+
else
|
724
|
+
resolved[key] = resolve_refs(spec: value, specs: specs, spec_paths: spec_paths, referencing_file: referencing_file, depth: depth, debug: debug)
|
259
725
|
end
|
260
|
-
|
261
|
-
ref_spec = specs[matched_path]
|
262
|
-
resolved[key] = if json_pointer.empty?
|
263
|
-
resolve_refs(spec: ref_spec, specs: specs, spec_paths: spec_paths, referencing_file: matched_path)
|
264
|
-
else
|
265
|
-
pointer_parts = json_pointer.split('/').reject(&:empty?)
|
266
|
-
target = ref_spec
|
267
|
-
pointer_parts.each do |part|
|
268
|
-
target = target[part] if target.is_a?(Hash) || target.is_a?(Array)
|
269
|
-
break unless target
|
270
|
-
end
|
271
|
-
if target
|
272
|
-
resolve_refs(spec: target, specs: specs, spec_paths: spec_paths, referencing_file: matched_path)
|
273
|
-
else
|
274
|
-
puts "Warning: Invalid JSON pointer #{json_pointer} in #{matched_path} from #{referencing_file}"
|
275
|
-
value
|
276
|
-
end
|
277
|
-
end
|
278
726
|
end
|
279
727
|
resolved
|
280
728
|
when Array
|
281
|
-
spec.map { |item| resolve_refs(spec: item, specs: specs, spec_paths: spec_paths, referencing_file: referencing_file) }
|
729
|
+
spec.map { |item| resolve_refs(spec: item, specs: specs, spec_paths: spec_paths, referencing_file: referencing_file, depth: depth, debug: debug) }
|
282
730
|
else
|
283
731
|
spec
|
284
732
|
end
|
285
733
|
end
|
286
734
|
|
287
|
-
|
288
|
-
|
289
|
-
|
290
|
-
|
735
|
+
private_class_method def self.resolve_json_pointer(spec, json_pointer, _matched_path, _referencing_file)
|
736
|
+
pointer_parts = json_pointer.split('/').reject(&:empty?)
|
737
|
+
target = spec
|
738
|
+
pointer_parts.each do |part|
|
739
|
+
part = part.gsub('~1', '/').gsub('~0', '~')
|
740
|
+
target = target[part] if target.is_a?(Hash)
|
741
|
+
target = target[part.to_i] if target.is_a?(Array) && part.match?(/^\d+$/)
|
742
|
+
return nil unless target
|
743
|
+
end
|
744
|
+
target
|
745
|
+
end
|
746
|
+
|
747
|
+
private_class_method def self.resolve_ref_path(opts = {})
|
748
|
+
ref = opts[:ref]
|
749
|
+
spec_paths = opts[:spec_paths] ||= []
|
750
|
+
referencing_file = opts[:referencing_file] || 'unknown'
|
291
751
|
|
292
|
-
|
752
|
+
ref = ref.sub('file://', '') if ref.start_with?('file://')
|
293
753
|
return ref if ref.start_with?('http://', 'https://')
|
294
754
|
|
295
|
-
# Normalize ref by removing leading './' or '/' for matching
|
296
755
|
normalized_ref = ref.sub(%r{^\./}, '').sub(%r{^/}, '')
|
297
|
-
|
298
|
-
# Check if ref matches any path in spec_paths
|
299
756
|
spec_paths.each do |path|
|
300
757
|
normalized_path = path.sub(%r{^\./}, '').sub(%r{^/}, '')
|
301
758
|
return path if normalized_path == normalized_ref || File.basename(normalized_path) == File.basename(normalized_ref)
|
302
759
|
end
|
303
760
|
|
304
|
-
# If no match, return the original ref to allow fallback loading
|
305
|
-
puts "Warning: Could not resolve $ref '#{ref}' from #{referencing_file} to any spec_paths entry"
|
306
761
|
ref
|
307
762
|
end
|
308
763
|
|
309
|
-
|
310
|
-
|
764
|
+
private_class_method def self.deep_merge(opts = {})
|
765
|
+
hash1 = opts[:hash1] || {}
|
766
|
+
hash2 = opts[:hash2] || {}
|
767
|
+
|
768
|
+
# hash1.merge(hash2) do |key, old_val, new_val|
|
311
769
|
hash1.merge(hash2) do |_key, old_val, new_val|
|
770
|
+
# if key.start_with?('x-')
|
771
|
+
# new_val || old_val
|
772
|
+
# elsif old_val.is_a?(Hash) && new_val.is_a?(Hash)
|
312
773
|
if old_val.is_a?(Hash) && new_val.is_a?(Hash)
|
313
|
-
deep_merge(old_val, new_val)
|
774
|
+
deep_merge(hash1: old_val, hash2: new_val)
|
314
775
|
elsif old_val.is_a?(Array) && new_val.is_a?(Array)
|
315
776
|
(old_val + new_val).uniq
|
316
777
|
else
|
@@ -319,7 +780,6 @@ module PWN
|
|
319
780
|
end
|
320
781
|
end
|
321
782
|
|
322
|
-
# Extracts $ref references and matches against spec_paths
|
323
783
|
private_class_method def self.extract_refs(opts = {})
|
324
784
|
spec = opts[:spec]
|
325
785
|
spec_paths = opts[:spec_paths]
|
@@ -329,8 +789,8 @@ module PWN
|
|
329
789
|
spec.each do |key, value|
|
330
790
|
if key == '$ref' && value.is_a?(String)
|
331
791
|
ref_path = value.split('#', 2).first
|
332
|
-
resolved_path = resolve_ref_path(ref_path, spec_paths, referencing_file: nil)
|
333
|
-
refs << resolved_path unless ref_path.start_with?('http://', 'https://')
|
792
|
+
resolved_path = resolve_ref_path(ref: ref_path, spec_paths: spec_paths, referencing_file: nil)
|
793
|
+
refs << resolved_path unless ref_path.empty? || ref_path.start_with?('http://', 'https://')
|
334
794
|
end
|
335
795
|
extract_refs(spec: value, spec_paths: spec_paths, refs: refs)
|
336
796
|
end
|
@@ -340,34 +800,40 @@ module PWN
|
|
340
800
|
refs
|
341
801
|
end
|
342
802
|
|
343
|
-
|
344
|
-
|
345
|
-
|
803
|
+
private_class_method def self.dfs(opts = {})
|
804
|
+
node = opts[:node]
|
805
|
+
dependencies = opts[:dependencies]
|
806
|
+
visited = opts[:visited] ||= Set.new
|
807
|
+
temp = opts[:temp] ||= Set.new
|
808
|
+
result = opts[:result] ||= []
|
809
|
+
path = opts[:path] ||= []
|
810
|
+
|
346
811
|
if temp.include?(node)
|
347
812
|
path << node
|
348
813
|
cycle_start = path.index(node)
|
349
814
|
cycle = path[cycle_start..-1]
|
350
|
-
return cycle
|
815
|
+
return cycle
|
351
816
|
end
|
352
817
|
|
353
818
|
unless visited.include?(node)
|
354
819
|
temp.add(node)
|
355
820
|
path << node
|
356
821
|
dependencies[node]&.each do |dep|
|
357
|
-
cycle = dfs(dep, dependencies, visited, temp, result, path)
|
358
|
-
return cycle if cycle
|
822
|
+
cycle = dfs(node: dep, dependencies: dependencies, visited: visited, temp: temp, result: result, path: path)
|
823
|
+
return cycle if cycle
|
359
824
|
end
|
360
825
|
visited.add(node)
|
361
826
|
temp.delete(node)
|
362
827
|
result << node
|
363
828
|
path.pop
|
364
829
|
end
|
365
|
-
nil
|
830
|
+
nil
|
366
831
|
end
|
367
|
-
# rubocop:enable Metrics/ParameterLists
|
368
832
|
|
369
|
-
|
370
|
-
|
833
|
+
private_class_method def self.topological_sort(opts = {})
|
834
|
+
dependencies = opts[:dependencies]
|
835
|
+
spec_paths = opts[:spec_paths] || []
|
836
|
+
|
371
837
|
result = []
|
372
838
|
visited = Set.new
|
373
839
|
temp = Set.new
|
@@ -377,15 +843,16 @@ module PWN
|
|
377
843
|
dependencies.each_key do |node|
|
378
844
|
next if visited.include?(node)
|
379
845
|
|
380
|
-
cycle = dfs(node, dependencies, visited, temp, result, path)
|
846
|
+
cycle = dfs(node: node, dependencies: dependencies, visited: visited, temp: temp, result: result, path: path)
|
381
847
|
break if cycle
|
382
848
|
end
|
383
849
|
|
384
|
-
|
385
|
-
|
386
|
-
|
387
|
-
|
388
|
-
|
850
|
+
[cycle ? spec_paths : result.reverse, cycle]
|
851
|
+
end
|
852
|
+
|
853
|
+
private_class_method def self.log(message, opts = {})
|
854
|
+
debug = opts[:debug] || false
|
855
|
+
warn("[DEBUG] #{message}") if debug
|
389
856
|
end
|
390
857
|
|
391
858
|
public_class_method def self.authors
|
@@ -396,10 +863,12 @@ module PWN
|
|
396
863
|
|
397
864
|
public_class_method def self.help
|
398
865
|
puts "USAGE:
|
399
|
-
openapi_spec = #{self}.
|
400
|
-
spec_paths: 'required - array of OpenAPI file paths to merge
|
401
|
-
base_url: 'required - base URL
|
402
|
-
output_json_path: 'optional - path to save the merged OpenAPI JSON file
|
866
|
+
openapi_spec = #{self}.generate_spec(
|
867
|
+
spec_paths: 'required - array of OpenAPI file paths to merge',
|
868
|
+
base_url: 'required - base URL for OpenAPI endpoints (e.g., http://fqdn.com)',
|
869
|
+
output_json_path: 'optional - path to save the merged OpenAPI JSON file',
|
870
|
+
target_version: 'optional - target OpenAPI version (default: 3.0.3)',
|
871
|
+
debug: 'optional - boolean to enable debug logging (default: false)'
|
403
872
|
)
|
404
873
|
|
405
874
|
#{self}.authors
|