open_api_import 0.10.11 → 0.11.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/lib/open_api_import/filter.rb +28 -0
- data/lib/open_api_import/get_data_all_of_bodies.rb +22 -0
- data/lib/open_api_import/get_examples.rb +88 -0
- data/lib/open_api_import/get_patterns.rb +67 -0
- data/lib/open_api_import/get_required_data.rb +30 -0
- data/lib/open_api_import/get_response_examples.rb +104 -0
- data/lib/open_api_import/open_api_import.rb +693 -0
- data/lib/open_api_import/pretty_hash_symbolized.rb +18 -0
- data/lib/open_api_import.rb +10 -1027
- metadata +13 -5
data/lib/open_api_import.rb
CHANGED
@@ -1,1034 +1,17 @@
|
|
1
1
|
require_relative "open_api_import/utils"
|
2
|
+
require_relative "open_api_import/filter"
|
3
|
+
require_relative "open_api_import/pretty_hash_symbolized"
|
4
|
+
require_relative "open_api_import/get_patterns"
|
5
|
+
require_relative "open_api_import/get_required_data"
|
6
|
+
require_relative "open_api_import/get_data_all_of_bodies"
|
7
|
+
require_relative "open_api_import/get_response_examples"
|
8
|
+
require_relative "open_api_import/get_examples"
|
9
|
+
require_relative "open_api_import/open_api_import"
|
10
|
+
|
11
|
+
include LibOpenApiImport
|
2
12
|
|
3
13
|
require "oas_parser"
|
4
14
|
require "rufo"
|
5
15
|
require "nice_hash"
|
6
16
|
require "logger"
|
7
17
|
|
8
|
-
class OpenApiImport
|
9
|
-
##############################################################################################
|
10
|
-
# Import a Swagger or Open API file and create a Ruby Request Hash file including all requests and responses.
|
11
|
-
# The http methods that will be treated are: 'get','post','put','delete', 'patch'.
|
12
|
-
# @param swagger_file [String]. Path and file name. Could be absolute or relative to project root folder.
|
13
|
-
# @param include_responses [Boolean]. (default: true) if you want to add the examples of responses in the resultant file.
|
14
|
-
# @param mock_response [Boolean]. (default:false) Add the first response on the request as mock_response to be used.
|
15
|
-
# In case using nice_http gem: if NiceHttp.use_mocks = true will use it instead of getting the real response from the WS.
|
16
|
-
# @param create_method_name [Symbol]. (:path, :operation_id, :operationId) (default: operation_id). How the name of the methods will be generated.
|
17
|
-
# path: it will be used the path and http method, for example for a GET on path: /users/list, the method name will be get_users_list
|
18
|
-
# operation_id: it will be used the operationId field but using the snake_case version, for example for listUsers: list_users
|
19
|
-
# operationId: it will be used the operationId field like it is, for example: listUsers
|
20
|
-
# @param name_for_module [Symbol]. (:path, :path_file, :fixed, :tags, :tags_file) (default: :path). How the module names will be created.
|
21
|
-
# @param create_constants [Boolean]. (default: false) For required arguments, it will create keyword arguments assigning by default a constant.
|
22
|
-
# @param silent [Boolean]. (default: false) It will display only errors.
|
23
|
-
# path: It will be used the first folder of the path to create the module name, for example the path /users/list will be in the module Users and all the requests from all modules in the same file.
|
24
|
-
# path_file: It will be used the first folder of the path to create the module name, for example the path /users/list will be in the module Users and each module will be in a new requests file.
|
25
|
-
# tags: It will be used the tags key to create the module name, for example the tags: [users,list] will create the module UsersList and all the requests from all modules in the same file.
|
26
|
-
# tags_file: It will be used the tags key to create the module name, for example the tags: [users,list] will create the module UsersList and and each module will be in a new requests file.
|
27
|
-
# fixed: all the requests will be under the module Requests
|
28
|
-
##############################################################################################
|
29
|
-
def self.from(swagger_file, create_method_name: :operation_id, include_responses: true, mock_response: false, name_for_module: :path, silent: false, create_constants: false)
|
30
|
-
begin
|
31
|
-
f = File.new("#{swagger_file}_open_api_import.log", "w")
|
32
|
-
f.sync = true
|
33
|
-
@logger = Logger.new f
|
34
|
-
puts "Logs file: #{swagger_file}_open_api_import.log" unless silent
|
35
|
-
rescue StandardError => e
|
36
|
-
warn "Not possible to create the Logger file"
|
37
|
-
warn e
|
38
|
-
@logger = Logger.new nil
|
39
|
-
end
|
40
|
-
|
41
|
-
begin
|
42
|
-
@logger.info "swagger_file: #{swagger_file}, include_responses: #{include_responses}, mock_response: #{mock_response}\n"
|
43
|
-
@logger.info "create_method_name: #{create_method_name}, name_for_module: #{name_for_module}\n"
|
44
|
-
|
45
|
-
file_to_convert = if swagger_file["./"].nil?
|
46
|
-
swagger_file
|
47
|
-
else
|
48
|
-
Dir.pwd.to_s + "/" + swagger_file.gsub("./", "")
|
49
|
-
end
|
50
|
-
unless File.exist?(file_to_convert)
|
51
|
-
raise "The file #{file_to_convert} doesn't exist"
|
52
|
-
end
|
53
|
-
|
54
|
-
file_errors = file_to_convert + ".errors.log"
|
55
|
-
File.delete(file_errors) if File.exist?(file_errors)
|
56
|
-
import_errors = ""
|
57
|
-
required_constants = []
|
58
|
-
|
59
|
-
begin
|
60
|
-
definition = OasParser::Definition.resolve(swagger_file)
|
61
|
-
rescue Exception => stack
|
62
|
-
message = "There was a problem parsing the Open Api document using the oas_parser gem. The execution was aborted.\n"
|
63
|
-
message += "Visit the github for oas_parser gem for bugs and more info: https://github.com/Nexmo/oas_parser\n"
|
64
|
-
message += "Error: #{stack.message}"
|
65
|
-
puts message
|
66
|
-
@logger.fatal message
|
67
|
-
@logger.fatal stack.backtrace
|
68
|
-
exit!
|
69
|
-
end
|
70
|
-
|
71
|
-
raw = definition.raw.deep_symbolize_keys
|
72
|
-
|
73
|
-
if raw.key?(:openapi) && (raw[:openapi].to_f > 0)
|
74
|
-
raw[:swagger] = raw[:openapi]
|
75
|
-
end
|
76
|
-
if raw[:swagger].to_f < 2.0
|
77
|
-
raise "Unsupported Swagger version. Only versions >= 2.0 are valid."
|
78
|
-
end
|
79
|
-
|
80
|
-
base_host = ""
|
81
|
-
base_path = ""
|
82
|
-
|
83
|
-
base_host = raw[:host] if raw.key?(:host)
|
84
|
-
base_path = raw[:basePath] if raw.key?(:basePath)
|
85
|
-
module_name = raw[:info][:title].camel_case
|
86
|
-
module_version = "V#{raw[:info][:version].to_s.snake_case}"
|
87
|
-
|
88
|
-
output = []
|
89
|
-
output_header = []
|
90
|
-
output_header << "#" * 50
|
91
|
-
output_header << "# #{raw[:info][:title]}"
|
92
|
-
output_header << "# version: #{raw[:info][:version]}"
|
93
|
-
output_header << "# description: "
|
94
|
-
raw[:info][:description].to_s.split("\n").each do |d|
|
95
|
-
output_header << "# #{d}" unless d == ""
|
96
|
-
end
|
97
|
-
output_header << "#" * 50
|
98
|
-
|
99
|
-
output_header << "module Swagger"
|
100
|
-
output_header << "module #{module_name}"
|
101
|
-
output_header << "module #{module_version}"
|
102
|
-
output_header << "module Requests" if name_for_module == :fixed
|
103
|
-
|
104
|
-
files = {}
|
105
|
-
|
106
|
-
module_requests = ""
|
107
|
-
|
108
|
-
definition.paths.each do |path|
|
109
|
-
|
110
|
-
raw = path.raw.deep_symbolize_keys
|
111
|
-
|
112
|
-
if raw.key?(:parameters)
|
113
|
-
raw.each do |met, cont|
|
114
|
-
if met != :parameters
|
115
|
-
if raw[met].key?(:parameters)
|
116
|
-
#in case parameters for all methods in path is present
|
117
|
-
raw[met][:parameters] = raw[met][:parameters] + raw[:parameters]
|
118
|
-
else
|
119
|
-
raw[met][:parameters] = raw[:parameters]
|
120
|
-
end
|
121
|
-
end
|
122
|
-
end
|
123
|
-
raw.delete(:parameters)
|
124
|
-
end
|
125
|
-
|
126
|
-
raw.each do |met, cont|
|
127
|
-
|
128
|
-
if %w[get post put delete patch].include?(met.to_s.downcase)
|
129
|
-
params = []
|
130
|
-
params_path = []
|
131
|
-
params_query = []
|
132
|
-
params_required = []
|
133
|
-
params_data = []
|
134
|
-
description_parameters = []
|
135
|
-
data_form = []
|
136
|
-
data_required = []
|
137
|
-
#todo: add nested one.true.three to data_read_only
|
138
|
-
data_read_only = []
|
139
|
-
data_default = []
|
140
|
-
data_examples = []
|
141
|
-
data_pattern = []
|
142
|
-
responses = []
|
143
|
-
|
144
|
-
# for the case operationId is missing
|
145
|
-
cont[:operationId] = "undefined" unless cont.key?(:operationId)
|
146
|
-
|
147
|
-
if create_method_name == :path
|
148
|
-
method_name = (met.to_s + "_" + path.path.to_s).snake_case
|
149
|
-
method_name.chop! if method_name[-1] == "_"
|
150
|
-
elsif create_method_name == :operation_id
|
151
|
-
if (name_for_module == :tags or name_for_module == :tags_file) and cont.key?(:tags) and cont[:tags].is_a?(Array) and cont[:tags].size>0
|
152
|
-
metnametmp = cont[:operationId].gsub(/^#{cont[:tags].join}[\s_]*/, '')
|
153
|
-
cont[:tags].join.split(' ').each do |tag|
|
154
|
-
metnametmp.gsub!(/^#{tag}[\s_]*/i, '')
|
155
|
-
end
|
156
|
-
metnametmp = met if metnametmp == ''
|
157
|
-
else
|
158
|
-
metnametmp = cont[:operationId]
|
159
|
-
end
|
160
|
-
method_name = metnametmp.to_s.snake_case
|
161
|
-
else
|
162
|
-
if (name_for_module == :tags or name_for_module == :tags_file) and cont.key?(:tags) and cont[:tags].is_a?(Array) and cont[:tags].size>0
|
163
|
-
method_name = cont[:operationId].gsub(/^#{cont[:tags].join}[\s_]*/, '')
|
164
|
-
cont[:tags].join.split(' ').each do |tag|
|
165
|
-
method_name.gsub!(/^#{tag}[\s_]*/i, '')
|
166
|
-
end
|
167
|
-
method_name = met if method_name == ''
|
168
|
-
else
|
169
|
-
method_name = cont[:operationId]
|
170
|
-
end
|
171
|
-
end
|
172
|
-
path_txt = path.path.dup.to_s
|
173
|
-
if [:path, :path_file, :tags, :tags_file].include?(name_for_module)
|
174
|
-
old_module_requests = module_requests
|
175
|
-
if [:path, :path_file].include?(name_for_module)
|
176
|
-
# to remove version from path fex: /v1/Customer
|
177
|
-
path_requests = path_txt.gsub(/^\/v[\d\.]*\//i, "")
|
178
|
-
# to remove version from path fex: /1.0/Customer
|
179
|
-
path_requests = path_requests.gsub(/^\/[\d\.]*\//i, "")
|
180
|
-
if (path_requests == path_txt) && (path_txt.scan("/").size == 1)
|
181
|
-
# no folder in path
|
182
|
-
module_requests = "Root"
|
183
|
-
else
|
184
|
-
res_path = path_requests.scan(/(\w+)/)
|
185
|
-
module_requests = res_path[0][0].camel_case
|
186
|
-
end
|
187
|
-
else
|
188
|
-
if cont.key?(:tags) and cont[:tags].is_a?(Array) and cont[:tags].size>0
|
189
|
-
module_requests = cont[:tags].join(" ").camel_case
|
190
|
-
else
|
191
|
-
module_requests = "Undefined"
|
192
|
-
end
|
193
|
-
end
|
194
|
-
|
195
|
-
# to remove from method_name: v1_list_regions and add it to module
|
196
|
-
if /^(?<vers>v\d+)/i =~ method_name
|
197
|
-
method_name.gsub!(/^#{vers}_?/,'')
|
198
|
-
module_requests = (vers.capitalize + module_requests).camel_case unless module_requests.start_with?(vers)
|
199
|
-
end
|
200
|
-
|
201
|
-
if old_module_requests != module_requests
|
202
|
-
output << "end" unless old_module_requests == "" or name_for_module == :path_file or name_for_module == :tags_file
|
203
|
-
if name_for_module == :path or name_for_module == :tags
|
204
|
-
# to add the end for the previous module unless is the first one
|
205
|
-
output << "module #{module_requests}"
|
206
|
-
else #:path_file, :tags_file
|
207
|
-
if old_module_requests != ""
|
208
|
-
unless files.key?(old_module_requests)
|
209
|
-
files[old_module_requests] = Array.new
|
210
|
-
end
|
211
|
-
files[old_module_requests].concat(output)
|
212
|
-
output = Array.new
|
213
|
-
end
|
214
|
-
output << "module #{module_requests}" unless files.key?(module_requests) # don't add in case already existed
|
215
|
-
end
|
216
|
-
end
|
217
|
-
end
|
218
|
-
|
219
|
-
output << ""
|
220
|
-
output << "# operationId: #{cont[:operationId]}, method: #{met}"
|
221
|
-
output << "# summary: #{cont[:summary]}"
|
222
|
-
if !cont[:description].to_s.split("\n").empty?
|
223
|
-
output << "# description: "
|
224
|
-
cont[:description].to_s.split("\n").each do |d|
|
225
|
-
output << "# #{d}" unless d == ""
|
226
|
-
end
|
227
|
-
else
|
228
|
-
output << "# description: #{cont[:description]}"
|
229
|
-
end
|
230
|
-
|
231
|
-
mock_example = []
|
232
|
-
|
233
|
-
if include_responses && cont.key?(:responses) && cont[:responses].is_a?(Hash)
|
234
|
-
cont[:responses].each do |k, v|
|
235
|
-
response_example = []
|
236
|
-
response_example = get_response_examples(v)
|
237
|
-
|
238
|
-
data_pattern += get_patterns('', v[:schema]) if v.key?(:schema)
|
239
|
-
data_pattern.uniq!
|
240
|
-
v[:description] = v[:description].to_s.gsub("'", %q(\\\'))
|
241
|
-
if !response_example.empty?
|
242
|
-
responses << "'#{k}': { "
|
243
|
-
responses << "message: '#{v[:description]}', "
|
244
|
-
responses << "data: "
|
245
|
-
responses << response_example
|
246
|
-
responses << "},"
|
247
|
-
if mock_response and mock_example.size==0
|
248
|
-
mock_example << "code: '#{k}',"
|
249
|
-
mock_example << "message: '#{v[:description]}',"
|
250
|
-
mock_example << "data: "
|
251
|
-
mock_example << response_example
|
252
|
-
end
|
253
|
-
|
254
|
-
else
|
255
|
-
responses << "'#{k}': { message: '#{v[:description]}'}, "
|
256
|
-
end
|
257
|
-
end
|
258
|
-
end
|
259
|
-
# todo: for open api 3.0 add the new Link feature: https://swagger.io/docs/specification/links/
|
260
|
-
# todo: for open api 3.0 is not getting the required params in all cases
|
261
|
-
|
262
|
-
# for the case open api 3 with cont.requestBody.content.'applicatin/json'.schema
|
263
|
-
# example: petstore-expanded.yaml operationId=addPet
|
264
|
-
if cont.key?(:requestBody) and cont[:requestBody].key?(:content) and
|
265
|
-
cont[:requestBody][:content].key?(:'application/json') and cont[:requestBody][:content][:'application/json'].key?(:schema)
|
266
|
-
cont[:parameters] = [] unless cont.key?(:parameters)
|
267
|
-
cont[:parameters] << {in: 'body', schema: cont[:requestBody][:content][:'application/json'][:schema] }
|
268
|
-
end
|
269
|
-
data_examples_all_of = false
|
270
|
-
if cont.key?(:parameters) && cont[:parameters].is_a?(Array)
|
271
|
-
cont[:parameters].each do |p|
|
272
|
-
if p.keys.include?(:schema) and p[:schema].include?(:type)
|
273
|
-
type = p[:schema][:type]
|
274
|
-
elsif p.keys.include?(:type)
|
275
|
-
type = p[:type]
|
276
|
-
else
|
277
|
-
type = ""
|
278
|
-
end
|
279
|
-
if p[:in] == "path"
|
280
|
-
if create_method_name == :operationId
|
281
|
-
param_name = p[:name]
|
282
|
-
path_txt.gsub!("{#{param_name}}", "\#{#{param_name}}")
|
283
|
-
else
|
284
|
-
param_name = p[:name].to_s.snake_case
|
285
|
-
path_txt.gsub!("{#{p[:name]}}", "\#{#{param_name}}")
|
286
|
-
end
|
287
|
-
unless params_path.include?(param_name)
|
288
|
-
if create_constants
|
289
|
-
params_path << "#{param_name}: #{param_name.upcase}"
|
290
|
-
required_constants << param_name.upcase
|
291
|
-
else
|
292
|
-
params_path << param_name
|
293
|
-
end
|
294
|
-
#params_required << param_name if p[:required].to_s=="true"
|
295
|
-
description_parameters << "# #{p[:name]}: (#{type}) #{"(required)" if p[:required].to_s=="true"} #{p[:description].split("\n").join("\n#\t\t\t")}"
|
296
|
-
end
|
297
|
-
elsif p[:in] == "query"
|
298
|
-
params_query << p[:name]
|
299
|
-
params_required << p[:name] if p[:required].to_s=="true"
|
300
|
-
description_parameters << "# #{p[:name]}: (#{type}) #{"(required)" if p[:required].to_s=="true"} #{p[:description].split("\n").join("\n#\t\t\t")}"
|
301
|
-
elsif p[:in] == "formData" or p[:in] == "formdata"
|
302
|
-
#todo: take in consideration: default, required
|
303
|
-
#todo: see if we should add the required as params to the method and not required as options
|
304
|
-
#todo: set on data the required fields with the values from args
|
305
|
-
|
306
|
-
description_parameters << "# #{p[:name]}: (#{p[:type]}) #{p[:description].split("\n").join("\n#\t\t\t")}"
|
307
|
-
|
308
|
-
case p[:type]
|
309
|
-
when /^string$/i
|
310
|
-
data_form << "#{p[:name]}: ''"
|
311
|
-
when /^boolean$/i
|
312
|
-
data_form << "#{p[:name]}: true"
|
313
|
-
when /^number$/i
|
314
|
-
data_form << "#{p[:name]}: 0"
|
315
|
-
when /^integer$/i
|
316
|
-
data_form << "#{p[:name]}: 0"
|
317
|
-
else
|
318
|
-
puts "! on formData not supported type #{p[:type]}"
|
319
|
-
end
|
320
|
-
|
321
|
-
elsif p[:in] == "body"
|
322
|
-
if p.keys.include?(:schema)
|
323
|
-
if p[:schema].key?(:oneOf)
|
324
|
-
bodies = p[:schema][:oneOf]
|
325
|
-
elsif p[:schema].key?(:anyOf)
|
326
|
-
bodies = p[:schema][:anyOf]
|
327
|
-
elsif p[:schema].key?(:allOf)
|
328
|
-
data_examples_all_of, bodies = get_data_all_of_bodies(p)
|
329
|
-
data_examples_all_of = true # because we are on data and allOf already
|
330
|
-
else
|
331
|
-
bodies = [p[:schema]]
|
332
|
-
end
|
333
|
-
|
334
|
-
params_data = []
|
335
|
-
|
336
|
-
bodies.each do |body|
|
337
|
-
if body.keys.include?(:required) and body[:required].size > 0
|
338
|
-
data_required += get_required_data(body)
|
339
|
-
output << "# required data: #{data_required.inspect}"
|
340
|
-
end
|
341
|
-
|
342
|
-
if body.keys.include?(:properties) and body[:properties].size > 0
|
343
|
-
|
344
|
-
body[:properties].each { |dpk, dpv|
|
345
|
-
if dpv.keys.include?(:example)
|
346
|
-
if dpv[:example].is_a?(Array) and dpv.type != 'array'
|
347
|
-
valv = dpv[:example][0]
|
348
|
-
else
|
349
|
-
valv = dpv[:example].to_s
|
350
|
-
end
|
351
|
-
else
|
352
|
-
if dpv.type == "object"
|
353
|
-
if dpv.key?(:properties)
|
354
|
-
valv = get_examples(dpv[:properties], :key_value, true).join("\n")
|
355
|
-
else
|
356
|
-
valv = "{}"
|
357
|
-
end
|
358
|
-
elsif dpv.type == 'array'
|
359
|
-
if dpv.key?(:items)
|
360
|
-
valv = get_examples({dpk => dpv}, :only_value)
|
361
|
-
valv = valv.join("\n")
|
362
|
-
else
|
363
|
-
valv = "[]"
|
364
|
-
end
|
365
|
-
else
|
366
|
-
valv = ""
|
367
|
-
end
|
368
|
-
end
|
369
|
-
|
370
|
-
if dpv.keys.include?(:description)
|
371
|
-
description_parameters << "# #{dpk}: (#{dpv[:type]}) #{dpv[:description].split("\n").join("\n#\t\t\t")}"
|
372
|
-
end
|
373
|
-
|
374
|
-
data_pattern += get_patterns(dpk,dpv)
|
375
|
-
data_pattern.uniq!
|
376
|
-
dpkeys = []
|
377
|
-
data_pattern.reject! do |dp|
|
378
|
-
dpkey = dp.scan(/^'[\w\.]+'/)
|
379
|
-
|
380
|
-
if dpkeys.include?(dpkey)
|
381
|
-
true
|
382
|
-
else
|
383
|
-
dpkeys << dpkey
|
384
|
-
false
|
385
|
-
end
|
386
|
-
end
|
387
|
-
|
388
|
-
if dpv.keys.include?(:readOnly) and dpv[:readOnly] == true
|
389
|
-
data_read_only << dpk
|
390
|
-
end
|
391
|
-
if dpv.keys.include?(:default)
|
392
|
-
if dpv[:default].nil?
|
393
|
-
data_default << "#{dpk}: nil"
|
394
|
-
elsif dpv.type != "string"
|
395
|
-
data_default << "#{dpk}: #{dpv[:default]}"
|
396
|
-
else
|
397
|
-
data_default << "#{dpk}: '#{dpv[:default]}'"
|
398
|
-
end
|
399
|
-
end
|
400
|
-
|
401
|
-
#todo: consider check default and insert it
|
402
|
-
#todo: remove array from here and add the option to get_examples for the case thisisthekey: ['xxxx']
|
403
|
-
if dpv.key?(:type) and dpv[:type]!='array'
|
404
|
-
params_data << get_examples({dpk => dpv}, :only_value, true).join
|
405
|
-
params_data[-1].chop!.chop! if params_data[-1].to_s[-2..-1]==', '
|
406
|
-
params_data.pop if params_data[-1].match?(/^\s*$/im)
|
407
|
-
else
|
408
|
-
if valv.to_s == ""
|
409
|
-
valv = '""'
|
410
|
-
elsif valv.include?('"')
|
411
|
-
valv.gsub!('"',"'")
|
412
|
-
end
|
413
|
-
params_data << "#{dpk}: #{valv}"
|
414
|
-
end
|
415
|
-
}
|
416
|
-
if params_data.size > 0
|
417
|
-
if data_examples_all_of == true and data_examples.size > 0
|
418
|
-
data_examples[0]+=params_data
|
419
|
-
else
|
420
|
-
data_examples << params_data
|
421
|
-
end
|
422
|
-
params_data = []
|
423
|
-
end
|
424
|
-
end
|
425
|
-
end
|
426
|
-
end
|
427
|
-
elsif p[:in]=="header"
|
428
|
-
#todo: see how we can treat those cases
|
429
|
-
else
|
430
|
-
puts "! not imported data with :in:#{p[:in]} => #{p.inspect}"
|
431
|
-
end
|
432
|
-
end
|
433
|
-
|
434
|
-
params = params_path
|
435
|
-
|
436
|
-
unless params_query.empty?
|
437
|
-
path_txt += "?"
|
438
|
-
params_required.each do |pr|
|
439
|
-
if create_constants
|
440
|
-
if params_query.include?(pr)
|
441
|
-
if create_method_name == :operationId
|
442
|
-
path_txt += "#{pr}=\#{#{pr}}&"
|
443
|
-
params << "#{pr}: #{pr.upcase}"
|
444
|
-
required_constants << pr.upcase
|
445
|
-
else
|
446
|
-
path_txt += "#{pr}=\#{#{pr.to_s.snake_case}}&"
|
447
|
-
params << "#{pr.to_s.snake_case}: #{pr.to_s.snake_case.upcase}"
|
448
|
-
required_constants << pr.to_s.snake_case.upcase
|
449
|
-
end
|
450
|
-
end
|
451
|
-
else
|
452
|
-
if params_query.include?(pr)
|
453
|
-
if create_method_name == :operationId
|
454
|
-
path_txt += "#{pr}=\#{#{pr}}&"
|
455
|
-
params << "#{pr}"
|
456
|
-
else
|
457
|
-
path_txt += "#{pr}=\#{#{pr.to_s.snake_case}}&"
|
458
|
-
params << "#{pr.to_s.snake_case}"
|
459
|
-
end
|
460
|
-
end
|
461
|
-
end
|
462
|
-
end
|
463
|
-
params_query.each do |pq|
|
464
|
-
unless params_required.include?(pq)
|
465
|
-
if create_method_name == :operationId
|
466
|
-
path_txt += "#{pq}=\#{#{pq}}&"
|
467
|
-
params << "#{pq}: ''"
|
468
|
-
else
|
469
|
-
path_txt += "#{pq}=\#{#{pq.to_s.snake_case}}&"
|
470
|
-
params << "#{pq.to_s.snake_case}: ''"
|
471
|
-
end
|
472
|
-
end
|
473
|
-
end
|
474
|
-
end
|
475
|
-
|
476
|
-
end
|
477
|
-
|
478
|
-
if description_parameters.size > 0
|
479
|
-
output << "# parameters description: "
|
480
|
-
output << description_parameters
|
481
|
-
end
|
482
|
-
|
483
|
-
#for the case we still have some parameters on path that were not in 'parameters'
|
484
|
-
if path_txt.scan(/[^#]{\w+}/).size > 0
|
485
|
-
paramst = []
|
486
|
-
prms = path_txt.scan(/[^#]{(\w+)}/)
|
487
|
-
prms.each do |p|
|
488
|
-
#if create_constants
|
489
|
-
# paramst<<"#{p[0].to_s.snake_case}: #{p[0].to_s.snake_case.upcase}"
|
490
|
-
# required_constants << p[0].to_s.snake_case.upcase
|
491
|
-
#else
|
492
|
-
paramst<<p[0].to_s.snake_case
|
493
|
-
#end
|
494
|
-
path_txt.gsub!("{#{p[0]}}", "\#{#{p[0].to_s.snake_case}}")
|
495
|
-
end
|
496
|
-
paramst.concat params
|
497
|
-
params = paramst
|
498
|
-
end
|
499
|
-
params.uniq!
|
500
|
-
output << "def self.#{method_name} (#{params.join(", ")})"
|
501
|
-
|
502
|
-
output << "{"
|
503
|
-
|
504
|
-
output << "name: \"#{module_requests}.#{method_name}\","
|
505
|
-
|
506
|
-
output << "path: \"#{base_path}#{path_txt}\","
|
507
|
-
|
508
|
-
output << "method: :#{met}," if met.to_s != ""
|
509
|
-
|
510
|
-
unless data_required.empty?
|
511
|
-
output << "data_required: ["
|
512
|
-
output << ":'#{data_required.uniq.join("', :'")}'"
|
513
|
-
output << "],"
|
514
|
-
end
|
515
|
-
unless data_read_only.empty?
|
516
|
-
output << "data_read_only: ["
|
517
|
-
output << ":'#{data_read_only.uniq.join("', :'")}'"
|
518
|
-
output << "],"
|
519
|
-
end
|
520
|
-
unless data_default.empty?
|
521
|
-
output << "data_default: {"
|
522
|
-
output << data_default.join(", \n")
|
523
|
-
output << "},"
|
524
|
-
end
|
525
|
-
|
526
|
-
unless data_pattern.empty?
|
527
|
-
output << "data_pattern: {"
|
528
|
-
output << data_pattern.uniq.join(", \n")
|
529
|
-
output << "},"
|
530
|
-
end
|
531
|
-
|
532
|
-
unless data_form.empty?
|
533
|
-
data_examples << data_form
|
534
|
-
end
|
535
|
-
|
536
|
-
unless data_examples.empty?
|
537
|
-
unless data_required.empty?
|
538
|
-
reqdata = []
|
539
|
-
begin
|
540
|
-
data_ex = eval("{#{data_examples[0].join(", ")}}")
|
541
|
-
rescue
|
542
|
-
data_ex = {}
|
543
|
-
end
|
544
|
-
if (data_required.grep(/\./)).empty?
|
545
|
-
reqdata = filter(data_ex, data_required) #not nested
|
546
|
-
else
|
547
|
-
reqdata = filter(data_ex, data_required, true) #nested
|
548
|
-
end
|
549
|
-
unless reqdata.empty?
|
550
|
-
phsd = pretty_hash_symbolized(reqdata)
|
551
|
-
phsd[0]="data: {"
|
552
|
-
output += phsd
|
553
|
-
end
|
554
|
-
end
|
555
|
-
unless data_read_only.empty? or !data_required.empty?
|
556
|
-
reqdata = []
|
557
|
-
#remove read only fields from :data
|
558
|
-
data_examples[0].each do |edata|
|
559
|
-
read_only = false
|
560
|
-
data_read_only.each do |rdata|
|
561
|
-
if edata.scan(/^#{rdata}:/).size>0
|
562
|
-
read_only = true
|
563
|
-
break
|
564
|
-
elsif edata.scan(/:/).size==0
|
565
|
-
break
|
566
|
-
end
|
567
|
-
end
|
568
|
-
reqdata << edata unless read_only
|
569
|
-
end
|
570
|
-
unless reqdata.empty?
|
571
|
-
output << "data: {"
|
572
|
-
output << reqdata.join(", \n")
|
573
|
-
output << "},"
|
574
|
-
end
|
575
|
-
end
|
576
|
-
|
577
|
-
output << "data_examples: ["
|
578
|
-
data_examples.each do |data|
|
579
|
-
output << "{"
|
580
|
-
output << data.join(", \n")
|
581
|
-
output << "}, "
|
582
|
-
end
|
583
|
-
output << "],"
|
584
|
-
end
|
585
|
-
|
586
|
-
unless mock_example.empty?
|
587
|
-
output << "mock_response: {"
|
588
|
-
output << mock_example
|
589
|
-
output << "},"
|
590
|
-
end
|
591
|
-
|
592
|
-
unless responses.empty?
|
593
|
-
output << "responses: {"
|
594
|
-
output << responses
|
595
|
-
output << "},"
|
596
|
-
end
|
597
|
-
|
598
|
-
output << "}"
|
599
|
-
output << "end"
|
600
|
-
else
|
601
|
-
@logger.warn "Not imported method: #{met} for path: #{path.path} since it is not supported by OpenApiImport"
|
602
|
-
end
|
603
|
-
end
|
604
|
-
end
|
605
|
-
output_footer = []
|
606
|
-
|
607
|
-
output_footer << "end" unless (module_requests == "") && ([:path, :path_file, :tags, :tags_file].include?(name_for_module))
|
608
|
-
output_footer << "end" << "end" << "end"
|
609
|
-
|
610
|
-
if files.size == 0
|
611
|
-
output = output_header + output + output_footer
|
612
|
-
output_txt = output.join("\n")
|
613
|
-
requests_file_path = file_to_convert + ".rb"
|
614
|
-
File.open(requests_file_path, "w") { |file| file.write(output_txt) }
|
615
|
-
res_rufo = `rufo #{requests_file_path}`
|
616
|
-
message = "** Requests file: #{swagger_file}.rb that contains the code of the requests after importing the Swagger file"
|
617
|
-
puts message unless silent
|
618
|
-
@logger.info message
|
619
|
-
@logger.error " Error formating with rufo" unless res_rufo.to_s.match?(/\AFormat:.+$\s*\z/)
|
620
|
-
@logger.error " Syntax Error: #{`ruby -c #{requests_file_path}`}" unless `ruby -c #{requests_file_path}`.include?("Syntax OK")
|
621
|
-
else
|
622
|
-
unless files.key?(module_requests)
|
623
|
-
files[module_requests] = Array.new
|
624
|
-
end
|
625
|
-
files[module_requests].concat(output) #for the last one
|
626
|
-
|
627
|
-
requires_txt = ""
|
628
|
-
message = "** Generated files that contain the code of the requests after importing the Swagger file: "
|
629
|
-
puts message unless silent
|
630
|
-
@logger.info message
|
631
|
-
files.each do |mod, out_mod|
|
632
|
-
output = output_header + out_mod + output_footer
|
633
|
-
output_txt = output.join("\n")
|
634
|
-
requests_file_path = file_to_convert + "_" + mod + ".rb"
|
635
|
-
requires_txt += "require_relative '#{File.basename(swagger_file)}_#{mod}'\n"
|
636
|
-
File.open(requests_file_path, "w") { |file| file.write(output_txt) }
|
637
|
-
res_rufo = `rufo #{requests_file_path}`
|
638
|
-
message = " - #{requests_file_path}"
|
639
|
-
puts message unless silent
|
640
|
-
@logger.info message
|
641
|
-
@logger.error " Error formating with rufo" unless res_rufo.to_s.match?(/\AFormat:.+$\s*\z/)
|
642
|
-
@logger.error " Syntax Error: #{`ruby -c #{requests_file_path}`}" unless `ruby -c #{requests_file_path}`.include?("Syntax OK")
|
643
|
-
end
|
644
|
-
|
645
|
-
requests_file_path = file_to_convert + ".rb"
|
646
|
-
if required_constants.size > 0
|
647
|
-
rconsts = "# Required constants\n"
|
648
|
-
required_constants.uniq!
|
649
|
-
required_constants.each do |rq|
|
650
|
-
rconsts += "#{rq} ||= ENV['#{rq}'] ||=''\n"
|
651
|
-
end
|
652
|
-
rconsts += "\n\n"
|
653
|
-
else
|
654
|
-
rconsts = ''
|
655
|
-
end
|
656
|
-
|
657
|
-
File.open(requests_file_path, "w") { |file| file.write(rconsts + requires_txt) }
|
658
|
-
res_rufo = `rufo #{requests_file_path}`
|
659
|
-
message = "** File that contains all the requires for all Request files: \n"
|
660
|
-
message += " - #{requests_file_path} "
|
661
|
-
puts message unless silent
|
662
|
-
@logger.info message
|
663
|
-
@logger.error " Error formating with rufo" unless res_rufo.to_s.match?(/\AFormat:.+$\s*\z/)
|
664
|
-
@logger.error " Syntax Error: #{`ruby -c #{requests_file_path}`}" unless `ruby -c #{requests_file_path}`.include?("Syntax OK")
|
665
|
-
end
|
666
|
-
|
667
|
-
begin
|
668
|
-
res = eval(output_txt)
|
669
|
-
rescue Exception => stack
|
670
|
-
import_errors += "\n\nResult evaluating the ruby file generated: \n" + stack.to_s
|
671
|
-
end
|
672
|
-
|
673
|
-
if import_errors.to_s != ""
|
674
|
-
File.open(file_errors, "w") { |file| file.write(import_errors) }
|
675
|
-
message = "* It seems there was a problem importing the Swagger file #{file_to_convert}\n"
|
676
|
-
message += "* Take a look at the detected errors at #{file_errors}\n"
|
677
|
-
warn message
|
678
|
-
@logger.fatal message
|
679
|
-
return false
|
680
|
-
else
|
681
|
-
return true
|
682
|
-
end
|
683
|
-
rescue StandardError => stack
|
684
|
-
puts stack.message
|
685
|
-
@logger.fatal stack.message
|
686
|
-
@logger.fatal stack.backtrace
|
687
|
-
puts stack.backtrace
|
688
|
-
end
|
689
|
-
end
|
690
|
-
|
691
|
-
class << self
|
692
|
-
# Retrieve the examples from the properties hash
|
693
|
-
private def get_examples(properties, type=:key_value, remove_readonly=false)
|
694
|
-
#todo: consider using this method also to get data examples
|
695
|
-
example = []
|
696
|
-
example << "{" unless properties.empty? or type==:only_value
|
697
|
-
properties.each do |prop, val|
|
698
|
-
unless remove_readonly and val.key?(:readOnly) and val[:readOnly]==true
|
699
|
-
if val.key?(:properties) and !val.key?(:example) and !val.key?(:type)
|
700
|
-
val[:type]='object'
|
701
|
-
end
|
702
|
-
if val.key?(:items) and !val.key?(:example) and !val.key?(:type)
|
703
|
-
val[:type]='array'
|
704
|
-
end
|
705
|
-
if val.key?(:example)
|
706
|
-
if val[:example].is_a?(Array) and val.key?(:type) and val[:type]=='string'
|
707
|
-
example << " #{prop.to_sym}: \"#{val[:example][0]}\", " # only the first example
|
708
|
-
else
|
709
|
-
if val[:example].is_a?(String)
|
710
|
-
val[:example].gsub!('"', "'")
|
711
|
-
example << " #{prop.to_sym}: \"#{val[:example]}\", "
|
712
|
-
elsif val[:example].is_a?(Time)
|
713
|
-
example << " #{prop.to_sym}: \"#{val[:example]}\", "
|
714
|
-
else
|
715
|
-
example << " #{prop.to_sym}: #{val[:example]}, "
|
716
|
-
end
|
717
|
-
end
|
718
|
-
elsif val.key?(:type)
|
719
|
-
format = val[:format]
|
720
|
-
format = val[:type] if format.to_s == ""
|
721
|
-
case val[:type].downcase
|
722
|
-
when "string"
|
723
|
-
example << " #{prop.to_sym}: \"#{format}\", "
|
724
|
-
when "integer", "number"
|
725
|
-
example << " #{prop.to_sym}: 0, "
|
726
|
-
when "boolean"
|
727
|
-
example << " #{prop.to_sym}: true, "
|
728
|
-
when "array"
|
729
|
-
if val.key?(:items) and val[:items].size==1 and val[:items].is_a?(Hash) and val[:items].key?(:type)
|
730
|
-
val[:items][:enum]=[val[:items][:type]]
|
731
|
-
end
|
732
|
-
|
733
|
-
if val.key?(:items) and val[:items].key?(:enum)
|
734
|
-
#before we were getting in all these cases a random value from the enum, now we are getting the first position by default
|
735
|
-
#the reason is to avoid confusion later in case we want to compare two swaggers and verify the changes
|
736
|
-
if type==:only_value
|
737
|
-
if val[:items][:enum][0].is_a?(String)
|
738
|
-
example << " [\"" + val[:items][:enum][0] + "\"] "
|
739
|
-
else
|
740
|
-
example << " [" + val[:items][:enum][0] + "] "
|
741
|
-
end
|
742
|
-
else
|
743
|
-
if val[:items][:enum][0].is_a?(String)
|
744
|
-
example << " #{prop.to_sym}: [\"" + val[:items][:enum][0] + "\"], "
|
745
|
-
else
|
746
|
-
example << " #{prop.to_sym}: [" + val[:items][:enum][0] + "], "
|
747
|
-
end
|
748
|
-
end
|
749
|
-
else
|
750
|
-
#todo: differ between response examples and data examples
|
751
|
-
if type == :only_value
|
752
|
-
example << get_response_examples({schema: val}, remove_readonly).join("\n")
|
753
|
-
else
|
754
|
-
example << " #{prop.to_sym}: " + get_response_examples({schema: val}, remove_readonly).join("\n") + ", "
|
755
|
-
end
|
756
|
-
end
|
757
|
-
when "object"
|
758
|
-
#todo: differ between response examples and data examples
|
759
|
-
res_ex = get_response_examples({schema: val}, remove_readonly)
|
760
|
-
if res_ex.size == 0
|
761
|
-
res_ex = "{ }"
|
762
|
-
else
|
763
|
-
res_ex = res_ex.join("\n")
|
764
|
-
end
|
765
|
-
example << " #{prop.to_sym}: " + res_ex + ", "
|
766
|
-
else
|
767
|
-
example << " #{prop.to_sym}: \"#{format}\", "
|
768
|
-
end
|
769
|
-
end
|
770
|
-
end
|
771
|
-
end
|
772
|
-
example << "}" unless properties.empty? or type==:only_value
|
773
|
-
example
|
774
|
-
end
|
775
|
-
|
776
|
-
# Retrieve the response examples from the hash
|
777
|
-
private def get_response_examples(v, remove_readonly = false)
|
778
|
-
# TODO: take in consideration the case allOf, oneOf... schema.items.allOf[0].properties schema.items.allOf[1].properties
|
779
|
-
# example on https://github.com/OAI/OpenAPI-Specification/blob/master/examples/v2.0/yaml/petstore-expanded.yaml
|
780
|
-
v=v.dup
|
781
|
-
response_example = Array.new()
|
782
|
-
# for open api 3.0 with responses schema inside content
|
783
|
-
if v.key?(:content) && v[:content].is_a?(Hash) && v[:content].key?(:'application/json') &&
|
784
|
-
v[:content][:'application/json'].key?(:schema)
|
785
|
-
v=v[:content][:'application/json'].dup
|
786
|
-
end
|
787
|
-
if v.key?(:examples) && v[:examples].is_a?(Hash) && v[:examples].key?(:'application/json')
|
788
|
-
if v[:examples][:'application/json'].is_a?(String)
|
789
|
-
response_example << v[:examples][:'application/json']
|
790
|
-
elsif v[:examples][:'application/json'].is_a?(Hash)
|
791
|
-
exs = v[:examples][:'application/json'].to_s
|
792
|
-
exs.gsub!(/:(\w+)=>/, "\n\\1: ")
|
793
|
-
response_example << exs
|
794
|
-
elsif v[:examples][:'application/json'].is_a?(Array)
|
795
|
-
response_example << "["
|
796
|
-
v[:examples][:'application/json'].each do |ex|
|
797
|
-
exs = ex.to_s
|
798
|
-
if ex.is_a?(Hash)
|
799
|
-
exs.gsub!(/:(\w+)=>/, "\n\\1: ")
|
800
|
-
end
|
801
|
-
response_example << (exs + ", ")
|
802
|
-
end
|
803
|
-
response_example << "]"
|
804
|
-
end
|
805
|
-
# for open api 3.0. examples on reponses, for example: api-with-examples.yaml
|
806
|
-
elsif v.key?(:content) && v[:content].is_a?(Hash) && v[:content].key?(:'application/json') &&
|
807
|
-
v[:content][:'application/json'].key?(:examples)
|
808
|
-
v[:content][:'application/json'][:examples].each do |tk, tv|
|
809
|
-
#todo: for the moment we only take in consideration the first example of response.
|
810
|
-
# we need to decide how to manage to do it correctly
|
811
|
-
if tv.key?(:value)
|
812
|
-
tresp = tv[:value]
|
813
|
-
else
|
814
|
-
tresp = ""
|
815
|
-
end
|
816
|
-
if tresp.is_a?(String)
|
817
|
-
response_example << tresp
|
818
|
-
elsif tresp.is_a?(Hash)
|
819
|
-
exs = tresp.to_s
|
820
|
-
exs.gsub!(/:(\w+)=>/, "\n\\1: ")
|
821
|
-
response_example << exs
|
822
|
-
elsif tresp.is_a?(Array)
|
823
|
-
response_example << "["
|
824
|
-
tresp.each do |ex|
|
825
|
-
exs = ex.to_s
|
826
|
-
if ex.is_a?(Hash)
|
827
|
-
exs.gsub!(/:(\w+)=>/, "\n\\1: ")
|
828
|
-
end
|
829
|
-
response_example << (exs + ", ")
|
830
|
-
end
|
831
|
-
response_example << "]"
|
832
|
-
end
|
833
|
-
break #only the first one it is considered
|
834
|
-
end
|
835
|
-
elsif v.key?(:schema) && v[:schema].is_a?(Hash) &&
|
836
|
-
(v[:schema].key?(:properties) ||
|
837
|
-
(v[:schema].key?(:items) && v[:schema][:items].key?(:properties)) ||
|
838
|
-
(v[:schema].key?(:items) && v[:schema][:items].key?(:allOf)) ||
|
839
|
-
v[:schema].key?(:allOf))
|
840
|
-
properties = {}
|
841
|
-
if v[:schema].key?(:properties)
|
842
|
-
properties = v[:schema][:properties]
|
843
|
-
elsif v[:schema].key?(:allOf)
|
844
|
-
v[:schema][:allOf].each do |pr|
|
845
|
-
properties.merge!(pr[:properties]) if pr.key?(:properties)
|
846
|
-
end
|
847
|
-
elsif v[:schema][:items].key?(:properties)
|
848
|
-
properties = v[:schema][:items][:properties]
|
849
|
-
response_example << "["
|
850
|
-
elsif v[:schema][:items].key?(:allOf)
|
851
|
-
v[:schema][:items][:allOf].each do |pr|
|
852
|
-
properties.merge!(pr[:properties]) if pr.key?(:properties)
|
853
|
-
end
|
854
|
-
response_example << "["
|
855
|
-
end
|
856
|
-
|
857
|
-
response_example += get_examples(properties, :key_value, remove_readonly) unless properties.empty?
|
858
|
-
|
859
|
-
unless response_example.empty?
|
860
|
-
if v[:schema].key?(:properties) || v[:schema].key?(:allOf)
|
861
|
-
#
|
862
|
-
else # array, items
|
863
|
-
response_example << "]"
|
864
|
-
end
|
865
|
-
end
|
866
|
-
|
867
|
-
elsif v.key?(:schema) and v[:schema].key?(:items) and v[:schema][:items].key?(:type)
|
868
|
-
# for the case only type supplied but nothing else for the array
|
869
|
-
response_example << "[\"#{v[:schema][:items][:type]}\"]"
|
870
|
-
end
|
871
|
-
response_example.each do |rs|
|
872
|
-
#(@type Google) for the case in example the key is something like: @type:
|
873
|
-
if rs.match?(/^\s*@\w+:/)
|
874
|
-
rs.gsub!(/@(\w+):/,'\'@\1\':')
|
875
|
-
end
|
876
|
-
end
|
877
|
-
return response_example
|
878
|
-
end
|
879
|
-
|
880
|
-
|
881
|
-
private def get_data_all_of_bodies(p)
|
882
|
-
bodies = []
|
883
|
-
data_examples_all_of = false
|
884
|
-
if p.is_a?(Array)
|
885
|
-
q = p
|
886
|
-
elsif p.key?(:schema) and p[:schema].key?(:allOf)
|
887
|
-
q = p[:schema][:allOf]
|
888
|
-
else
|
889
|
-
q =[p]
|
890
|
-
end
|
891
|
-
q.each do |pt|
|
892
|
-
if pt.is_a?(Hash) and pt.key?(:allOf)
|
893
|
-
#bodies += pt[:allOf]
|
894
|
-
bodies += get_data_all_of_bodies(pt[:allOf])[1]
|
895
|
-
data_examples_all_of = true
|
896
|
-
else
|
897
|
-
bodies << pt
|
898
|
-
end
|
899
|
-
end
|
900
|
-
return data_examples_all_of, bodies
|
901
|
-
end
|
902
|
-
|
903
|
-
# Get required data
|
904
|
-
private def get_required_data(body)
|
905
|
-
data_required = []
|
906
|
-
if body.keys.include?(:required) and body[:required].size > 0
|
907
|
-
body[:required].each do |r|
|
908
|
-
data_required << r.to_sym
|
909
|
-
end
|
910
|
-
end
|
911
|
-
data_required.each do |key|
|
912
|
-
if body.key?(:properties) and body[:properties][key].is_a?(Hash) and
|
913
|
-
body[:properties][key].key?(:required) and body[:properties][key][:required].size>0
|
914
|
-
dr = get_required_data(body[:properties][key])
|
915
|
-
dr.each do |k|
|
916
|
-
data_required.push("#{key}.#{k}".to_sym)
|
917
|
-
end
|
918
|
-
end
|
919
|
-
end
|
920
|
-
return data_required
|
921
|
-
end
|
922
|
-
|
923
|
-
# Get patterns
|
924
|
-
private def get_patterns(dpk, dpv)
|
925
|
-
data_pattern = []
|
926
|
-
if dpv.keys.include?(:pattern)
|
927
|
-
#todo: control better the cases with back slashes
|
928
|
-
if dpv[:pattern].include?('\\\\/')
|
929
|
-
#for cases like this: ^[^\.\\/:*?"<>|][^\\/:*?"<>|]{0,13}[^\.\\/:*?"<>|]?$
|
930
|
-
data_pattern << "'#{dpk}': /#{dpv[:pattern].to_s.gsub('\/','/')}/"
|
931
|
-
elsif dpv[:pattern].include?('\\x')
|
932
|
-
data_pattern << "'#{dpk}': /#{dpv[:pattern].to_s.gsub('\\x','\\u')}/"
|
933
|
-
else
|
934
|
-
data_pattern << "'#{dpk}': /#{dpv[:pattern].to_s}/"
|
935
|
-
end
|
936
|
-
elsif dpv.key?(:minLength) and dpv.key?(:maxLength)
|
937
|
-
data_pattern << "'#{dpk}': :'#{dpv[:minLength]}-#{dpv[:maxLength]}:LN$'"
|
938
|
-
elsif dpv.key?(:minLength) and !dpv.key?(:maxLength)
|
939
|
-
data_pattern << "'#{dpk}': :'#{dpv[:minLength]}:LN$'"
|
940
|
-
elsif !dpv.key?(:minLength) and dpv.key?(:maxLength)
|
941
|
-
data_pattern << "'#{dpk}': :'0-#{dpv[:maxLength]}:LN$'"
|
942
|
-
elsif dpv.key?(:minimum) and dpv.key?(:maximum) and dpv[:type]=='string'
|
943
|
-
data_pattern << "'#{dpk}': :'#{dpv[:minimum]}-#{dpv[:maximum]}:LN$'"
|
944
|
-
elsif dpv.key?(:minimum) and dpv.key?(:maximum)
|
945
|
-
data_pattern << "'#{dpk}': #{dpv[:minimum]}..#{dpv[:maximum]}"
|
946
|
-
elsif dpv.key?(:minimum) and !dpv.key?(:maximum)
|
947
|
-
if RUBY_VERSION >= '2.6.0'
|
948
|
-
data_pattern << "'#{dpk}': #{dpv[:minimum]}.. "
|
949
|
-
else
|
950
|
-
data_pattern << "#'#{dpk}': #{dpv[:minimum]}.. # INFINITE only working on ruby>=2.6.0"
|
951
|
-
end
|
952
|
-
elsif !dpv.key?(:minimum) and dpv.key?(:maximum)
|
953
|
-
data_pattern << "'#{dpk}': 0..#{dpv[:maximum]}"
|
954
|
-
elsif dpv[:format] == 'date-time'
|
955
|
-
data_pattern << "'#{dpk}': DateTime"
|
956
|
-
elsif dpv[:type] == 'boolean'
|
957
|
-
data_pattern << "'#{dpk}': Boolean"
|
958
|
-
elsif dpv.key?(:enum)
|
959
|
-
data_pattern << "'#{dpk}': :'#{dpv[:enum].join('|')}'"
|
960
|
-
elsif dpv[:type] == 'array' and dpv.key?(:items) and dpv[:items].is_a?(Hash) and dpv[:items].key?(:enum) and dpv[:items][:enum].is_a?(Array)
|
961
|
-
#{:title=>"Balala", :type=>"array", :items=>{:type=>"string", :enum=>["uno","dos"], :example=>"uno"}}
|
962
|
-
data_pattern << "'#{dpk}': [:'#{dpv[:items][:enum].join('|')}']"
|
963
|
-
elsif dpv[:type] == 'array' and dpv.key?(:items) and dpv[:items].is_a?(Hash) and !dpv[:items].key?(:enum) and dpv[:items].key?(:properties)
|
964
|
-
#{:title=>"Balala", :type=>"array", :items=>{title: 'xxxx, properties: {server: {enum:['ibm','msa','pytan']}}}
|
965
|
-
dpv[:items][:properties].each do |dpkk,dpvv|
|
966
|
-
if dpk == ''
|
967
|
-
data_pattern += get_patterns("#{dpkk}",dpvv)
|
968
|
-
else
|
969
|
-
data_pattern += get_patterns("#{dpk}.#{dpkk}",dpvv)
|
970
|
-
end
|
971
|
-
end
|
972
|
-
elsif dpv[:type] == 'array' and dpv.key?(:items) and dpv[:items].is_a?(Hash) and
|
973
|
-
!dpv[:items].key?(:enum) and !dpv[:items].key?(:properties) and dpv[:items].key?(:type)
|
974
|
-
#{:title=>"labels", :description=>"Labels specified for the file system", :type=>"array", :items=>{:type=>"string", :enum=>["string"]}}
|
975
|
-
data_pattern << "'#{dpk}': [ #{get_patterns('', dpv[:items]).join[4..-1]} ]"
|
976
|
-
elsif dpv[:type] == 'object' and dpv.key?(:properties)
|
977
|
-
dpv[:properties].each do |dpkk,dpvv|
|
978
|
-
if dpk == ''
|
979
|
-
data_pattern += get_patterns("#{dpkk}",dpvv)
|
980
|
-
else
|
981
|
-
data_pattern += get_patterns("#{dpk}.#{dpkk}",dpvv)
|
982
|
-
end
|
983
|
-
end
|
984
|
-
end
|
985
|
-
data_pattern.uniq!
|
986
|
-
return data_pattern
|
987
|
-
|
988
|
-
end
|
989
|
-
|
990
|
-
#filter hash
|
991
|
-
def filter(hash, keys, nested = false)
|
992
|
-
result = {}
|
993
|
-
keys = [keys] unless keys.is_a?(Array)
|
994
|
-
if nested
|
995
|
-
result = hash.nice_filter(keys)
|
996
|
-
else
|
997
|
-
#to be backwards compatible
|
998
|
-
keys.each do |k|
|
999
|
-
if k.is_a?(Symbol) and hash.key?(k)
|
1000
|
-
if hash[k].is_a?(Hash)
|
1001
|
-
result[k] = {}
|
1002
|
-
else
|
1003
|
-
result[k] = hash[k]
|
1004
|
-
end
|
1005
|
-
elsif k.is_a?(Symbol) and k.to_s.include?('.') and hash.key?((k.to_s.scan(/(\w+)\./).join).to_sym) #nested 'uno.dos.tres
|
1006
|
-
kn = k.to_s.split('.')
|
1007
|
-
vn = kn[1].to_sym
|
1008
|
-
result[kn.first.to_sym][vn] = filter(hash[kn.first.to_sym], vn).values[0]
|
1009
|
-
elsif k.is_a?(Hash) and hash.key?(k.keys[0]) #nested {uno: {dos: :tres}}
|
1010
|
-
result[k.keys[0]][k.values[0]] = filter(hash[k.keys[0]], k.values[0]).values[0]
|
1011
|
-
end
|
1012
|
-
end
|
1013
|
-
end
|
1014
|
-
return result
|
1015
|
-
end
|
1016
|
-
|
1017
|
-
#gen pretty hash symbolized
|
1018
|
-
private def pretty_hash_symbolized(hash)
|
1019
|
-
output = []
|
1020
|
-
output << "{"
|
1021
|
-
hash.each do |kr,kv|
|
1022
|
-
if kv.kind_of?(Hash)
|
1023
|
-
restv = pretty_hash_symbolized(kv)
|
1024
|
-
restv[0] = "#{kr}: {"
|
1025
|
-
output += restv
|
1026
|
-
else
|
1027
|
-
output << "#{kr}: #{kv.inspect}, "
|
1028
|
-
end
|
1029
|
-
end
|
1030
|
-
output << "},"
|
1031
|
-
return output
|
1032
|
-
end
|
1033
|
-
end
|
1034
|
-
end
|