yml_merger 0.9.1 → 1.0.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/lib/yml_merger.rb +342 -318
- metadata +3 -3
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: ee8fc229ad3ece250642e1723e3897d1494bf70c
|
4
|
+
data.tar.gz: 4e7d8ee189c605791c86130f7d2562dc7ef3b03b
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 943f1609f38ae057f30539d1be50a01fac1b5c7a0608553ba2cde890760fd21daab4a6e7da035b96461a967210644093ae7ec30e7e5bf5fd53b717cdc916f547
|
7
|
+
data.tar.gz: e4b408e75ca07309ddb8a6c968f72cf65b6ca021e51a3ab38eb6960d79b90a4baaaacf25c413c9027948cf9ae8fe6313dc0ed41c026925316306cdb06559d625
|
data/lib/yml_merger.rb
CHANGED
@@ -1,318 +1,342 @@
|
|
1
|
-
require 'rubygems'
|
2
|
-
require "yaml"
|
3
|
-
require "deep_merge"
|
4
|
-
require 'fileutils'
|
5
|
-
require 'open-uri'
|
6
|
-
require 'uri'
|
7
|
-
require 'logger'
|
8
|
-
|
9
|
-
|
10
|
-
# implement of deep merge for nested hash and array of hash
|
11
|
-
class YML_Merger
|
12
|
-
attr_accessor :filestructure, :filestack, :ENTRY_YML, :search_paths
|
13
|
-
|
14
|
-
# initialize YML merge
|
15
|
-
# Params:
|
16
|
-
# - filepath: the entry file name
|
17
|
-
# - seatch_paths: rootpath to search all the needed YML files
|
18
|
-
def initialize(filename, search_paths, logger: nil)
|
19
|
-
@logger = logger
|
20
|
-
unless (logger)
|
21
|
-
@logger = Logger.new(STDOUT)
|
22
|
-
@logger.level = Logger::INFO
|
23
|
-
end
|
24
|
-
@ENTRY_YML = search_paths + '/' + filename
|
25
|
-
@search_paths = search_paths
|
26
|
-
@filestructure = Hash.new()
|
27
|
-
@filestack = Array.new()
|
28
|
-
@KEY_LIST = ['__remove__','__load__', '__common__', '__hierarchy__', '__replace__', '__add__']
|
29
|
-
end
|
30
|
-
|
31
|
-
# process the YMLs
|
32
|
-
def process
|
33
|
-
@filestructure = process_file(@ENTRY_YML)
|
34
|
-
merge_by_add(@filestructure)
|
35
|
-
merge_by_common(@filestructure)
|
36
|
-
delete_node(@filestructure,'__common__')
|
37
|
-
delete_node(@filestructure,'__load__')
|
38
|
-
#delete_node(@filestructure,'__add__')
|
39
|
-
post_process(@filestructure)
|
40
|
-
end
|
41
|
-
|
42
|
-
private
|
43
|
-
|
44
|
-
# post process nodes with strong condition
|
45
|
-
# execute post-process-lib and post-process-app
|
46
|
-
# clean the pre-process-merge node
|
47
|
-
# Params:
|
48
|
-
# - struct: the hash to be processed
|
49
|
-
def post_process(struct)
|
50
|
-
return if struct.class != Hash
|
51
|
-
merge_by_replace!(struct)
|
52
|
-
merge_by_remove!(struct)
|
53
|
-
struct.each_key do |key|
|
54
|
-
if Hash == struct[key].class
|
55
|
-
if struct.key?("mode") and struct["mode"] == "post-process-lib"
|
56
|
-
if struct[key].has_key?("attribute")
|
57
|
-
if struct[key]["attribute"] == "required"
|
58
|
-
@logger.debug "keep #{key}"
|
59
|
-
else
|
60
|
-
struct.delete(key)
|
61
|
-
@logger.debug "deletes #{key}"
|
62
|
-
next
|
63
|
-
end
|
64
|
-
else
|
65
|
-
@logger.debug "delete #{key}"
|
66
|
-
struct.delete(key)
|
67
|
-
next
|
68
|
-
end
|
69
|
-
end
|
70
|
-
if struct[key].key?("mode") and struct[key]["mode"] == "post-process-app"
|
71
|
-
if struct[key].has_key?("attribute")
|
72
|
-
if struct[key]["attribute"] == "required"
|
73
|
-
@logger.debug "keep #{key}"
|
74
|
-
else
|
75
|
-
struct.delete(key)
|
76
|
-
@logger.debug "deletes #{key}"
|
77
|
-
next
|
78
|
-
end
|
79
|
-
else
|
80
|
-
@logger.debug "delete #{key}"
|
81
|
-
struct.delete(key)
|
82
|
-
next
|
83
|
-
end
|
84
|
-
end
|
85
|
-
if struct[key].key?("mode") and struct[key]["mode"] == "pre-process-merge"
|
86
|
-
struct.delete(key)
|
87
|
-
@logger.debug "deletes #{key}"
|
88
|
-
next
|
89
|
-
end
|
90
|
-
post_process(struct[key])
|
91
|
-
end
|
92
|
-
end
|
93
|
-
end
|
94
|
-
|
95
|
-
|
96
|
-
|
97
|
-
|
98
|
-
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
|
104
|
-
|
105
|
-
|
106
|
-
|
107
|
-
|
108
|
-
|
109
|
-
|
110
|
-
|
111
|
-
if
|
112
|
-
|
113
|
-
|
114
|
-
|
115
|
-
|
116
|
-
|
117
|
-
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
|
122
|
-
|
123
|
-
|
124
|
-
|
125
|
-
|
126
|
-
|
127
|
-
|
128
|
-
|
129
|
-
|
130
|
-
|
131
|
-
|
132
|
-
|
133
|
-
|
134
|
-
|
135
|
-
|
136
|
-
|
137
|
-
|
138
|
-
|
139
|
-
|
140
|
-
|
141
|
-
|
142
|
-
|
143
|
-
|
144
|
-
|
145
|
-
|
146
|
-
|
147
|
-
|
148
|
-
|
149
|
-
|
150
|
-
|
151
|
-
|
152
|
-
|
153
|
-
|
154
|
-
|
155
|
-
|
156
|
-
#
|
157
|
-
#
|
158
|
-
|
159
|
-
|
160
|
-
|
161
|
-
|
162
|
-
|
163
|
-
|
164
|
-
|
165
|
-
|
166
|
-
|
167
|
-
|
168
|
-
|
169
|
-
|
170
|
-
|
171
|
-
|
172
|
-
|
173
|
-
|
174
|
-
|
175
|
-
|
176
|
-
|
177
|
-
|
178
|
-
|
179
|
-
|
180
|
-
|
181
|
-
|
182
|
-
|
183
|
-
|
184
|
-
|
185
|
-
|
186
|
-
|
187
|
-
|
188
|
-
|
189
|
-
|
190
|
-
|
191
|
-
|
192
|
-
|
193
|
-
|
194
|
-
|
195
|
-
|
196
|
-
|
197
|
-
|
198
|
-
|
199
|
-
|
200
|
-
|
201
|
-
|
202
|
-
|
203
|
-
|
204
|
-
|
205
|
-
|
206
|
-
|
207
|
-
|
208
|
-
|
209
|
-
|
210
|
-
#
|
211
|
-
|
212
|
-
|
213
|
-
|
214
|
-
|
215
|
-
|
216
|
-
|
217
|
-
|
218
|
-
|
219
|
-
|
220
|
-
|
221
|
-
|
222
|
-
|
223
|
-
|
224
|
-
|
225
|
-
|
226
|
-
|
227
|
-
|
228
|
-
|
229
|
-
|
230
|
-
|
231
|
-
|
232
|
-
|
233
|
-
|
234
|
-
|
235
|
-
|
236
|
-
|
237
|
-
|
238
|
-
#
|
239
|
-
|
240
|
-
struct
|
241
|
-
|
242
|
-
|
243
|
-
|
244
|
-
|
245
|
-
|
246
|
-
|
247
|
-
|
248
|
-
|
249
|
-
|
250
|
-
|
251
|
-
|
252
|
-
|
253
|
-
|
254
|
-
|
255
|
-
|
256
|
-
|
257
|
-
|
258
|
-
|
259
|
-
|
260
|
-
|
261
|
-
|
262
|
-
|
263
|
-
|
264
|
-
|
265
|
-
|
266
|
-
|
267
|
-
|
268
|
-
|
269
|
-
|
270
|
-
|
271
|
-
|
272
|
-
|
273
|
-
|
274
|
-
|
275
|
-
|
276
|
-
|
277
|
-
|
278
|
-
|
279
|
-
|
280
|
-
|
281
|
-
|
282
|
-
|
283
|
-
|
284
|
-
|
285
|
-
|
286
|
-
|
287
|
-
|
288
|
-
|
289
|
-
|
290
|
-
|
291
|
-
|
292
|
-
|
293
|
-
|
294
|
-
|
295
|
-
|
296
|
-
|
297
|
-
|
298
|
-
|
299
|
-
|
300
|
-
|
301
|
-
|
302
|
-
|
303
|
-
|
304
|
-
|
305
|
-
|
306
|
-
|
307
|
-
|
308
|
-
|
309
|
-
|
310
|
-
|
311
|
-
|
312
|
-
|
313
|
-
|
314
|
-
|
315
|
-
|
316
|
-
|
317
|
-
|
318
|
-
|
1
|
+
require 'rubygems'
|
2
|
+
require "yaml"
|
3
|
+
require "deep_merge"
|
4
|
+
require 'fileutils'
|
5
|
+
require 'open-uri'
|
6
|
+
require 'uri'
|
7
|
+
require 'logger'
|
8
|
+
|
9
|
+
|
10
|
+
# implement of deep merge for nested hash and array of hash
|
11
|
+
class YML_Merger
|
12
|
+
attr_accessor :filestructure, :filestack, :ENTRY_YML, :search_paths
|
13
|
+
|
14
|
+
# initialize YML merge
|
15
|
+
# Params:
|
16
|
+
# - filepath: the entry file name
|
17
|
+
# - seatch_paths: rootpath to search all the needed YML files
|
18
|
+
def initialize(filename, search_paths, logger: nil)
|
19
|
+
@logger = logger
|
20
|
+
unless (logger)
|
21
|
+
@logger = Logger.new(STDOUT)
|
22
|
+
@logger.level = Logger::INFO
|
23
|
+
end
|
24
|
+
@ENTRY_YML = search_paths + '/' + filename
|
25
|
+
@search_paths = search_paths
|
26
|
+
@filestructure = Hash.new()
|
27
|
+
@filestack = Array.new()
|
28
|
+
@KEY_LIST = ['__remove__','__load__', '__common__', '__hierarchy__', '__replace__', '__add__']
|
29
|
+
end
|
30
|
+
|
31
|
+
# process the YMLs
|
32
|
+
def process
|
33
|
+
@filestructure = process_file(@ENTRY_YML)
|
34
|
+
merge_by_add(@filestructure)
|
35
|
+
merge_by_common(@filestructure)
|
36
|
+
delete_node(@filestructure,'__common__')
|
37
|
+
delete_node(@filestructure,'__load__')
|
38
|
+
#delete_node(@filestructure,'__add__')
|
39
|
+
post_process(@filestructure)
|
40
|
+
end
|
41
|
+
|
42
|
+
private
|
43
|
+
|
44
|
+
# post process nodes with strong condition
|
45
|
+
# execute post-process-lib and post-process-app
|
46
|
+
# clean the pre-process-merge node
|
47
|
+
# Params:
|
48
|
+
# - struct: the hash to be processed
|
49
|
+
def post_process(struct)
|
50
|
+
return if struct.class != Hash
|
51
|
+
merge_by_replace!(struct)
|
52
|
+
merge_by_remove!(struct)
|
53
|
+
struct.each_key do |key|
|
54
|
+
if Hash == struct[key].class
|
55
|
+
if struct.key?("mode") and struct["mode"] == "post-process-lib"
|
56
|
+
if struct[key].has_key?("attribute")
|
57
|
+
if struct[key]["attribute"] == "required"
|
58
|
+
@logger.debug "keep #{key}"
|
59
|
+
else
|
60
|
+
struct.delete(key)
|
61
|
+
@logger.debug "deletes #{key}"
|
62
|
+
next
|
63
|
+
end
|
64
|
+
else
|
65
|
+
@logger.debug "delete #{key}"
|
66
|
+
struct.delete(key)
|
67
|
+
next
|
68
|
+
end
|
69
|
+
end
|
70
|
+
if struct[key].key?("mode") and struct[key]["mode"] == "post-process-app"
|
71
|
+
if struct[key].has_key?("attribute")
|
72
|
+
if struct[key]["attribute"] == "required"
|
73
|
+
@logger.debug "keep #{key}"
|
74
|
+
else
|
75
|
+
struct.delete(key)
|
76
|
+
@logger.debug "deletes #{key}"
|
77
|
+
next
|
78
|
+
end
|
79
|
+
else
|
80
|
+
@logger.debug "delete #{key}"
|
81
|
+
struct.delete(key)
|
82
|
+
next
|
83
|
+
end
|
84
|
+
end
|
85
|
+
if struct[key].key?("mode") and struct[key]["mode"] == "pre-process-merge"
|
86
|
+
struct.delete(key)
|
87
|
+
@logger.debug "deletes #{key}"
|
88
|
+
next
|
89
|
+
end
|
90
|
+
post_process(struct[key])
|
91
|
+
end
|
92
|
+
end
|
93
|
+
end
|
94
|
+
|
95
|
+
def load_file(path)
|
96
|
+
if path =~ URI::regexp
|
97
|
+
structure = process_file(path)
|
98
|
+
else
|
99
|
+
structure = process_file(@search_paths + path.gsub("\\","/"))
|
100
|
+
end
|
101
|
+
return structure
|
102
|
+
end
|
103
|
+
|
104
|
+
# load and process yml files
|
105
|
+
# recursive load and process all files in '__load__' section
|
106
|
+
# Params:
|
107
|
+
# - file: the path to the yml file
|
108
|
+
def process_file(file)
|
109
|
+
#check if file is loaded yet
|
110
|
+
return if @filestack.include?(file)
|
111
|
+
#if not loaded,push to stack
|
112
|
+
@filestack.push(file)
|
113
|
+
#add globals.yml before load file
|
114
|
+
@logger.info file
|
115
|
+
content = open(file.gsub("\\","/")){|f| f.read}
|
116
|
+
content = YAML::load(content)
|
117
|
+
return if content.class == FalseClass
|
118
|
+
#if has file dependence load it
|
119
|
+
if content['__load__'] != nil
|
120
|
+
#load file in reversed sequence
|
121
|
+
content['__load__'].reverse_each do |loadfile|
|
122
|
+
if loadfile.class == Hash
|
123
|
+
structure = Hash.new
|
124
|
+
temp_structure = load_file(loadfile.keys[0])
|
125
|
+
loadfile.each do |key, value|
|
126
|
+
if value.class == Array
|
127
|
+
value.each do |vt|
|
128
|
+
t = Hash.new
|
129
|
+
t[vt] = Hash.new
|
130
|
+
t[vt].deep_merge(temp_structure[vt])
|
131
|
+
structure.deep_merge(t)
|
132
|
+
end
|
133
|
+
end
|
134
|
+
end
|
135
|
+
else
|
136
|
+
if loadfile =~ URI::regexp
|
137
|
+
structure = process_file(loadfile)
|
138
|
+
else
|
139
|
+
structure = process_file(@search_paths + loadfile.gsub("\\","/"))
|
140
|
+
end
|
141
|
+
end
|
142
|
+
content = content.deep_merge(deep_copy(structure))
|
143
|
+
pre_process(content)
|
144
|
+
merge_by_hierarchy(content)
|
145
|
+
#content = content.deep_merge(structure)
|
146
|
+
end
|
147
|
+
end
|
148
|
+
pre_process(content)
|
149
|
+
merge_by_hierarchy(content)
|
150
|
+
delete_node(content, '__hierarchy__')
|
151
|
+
return content
|
152
|
+
end
|
153
|
+
|
154
|
+
# load and process yml files
|
155
|
+
# recursive load and process all files in '__load__' section
|
156
|
+
# Params:
|
157
|
+
# - file: the path to the yml file
|
158
|
+
def pre_process(struct)
|
159
|
+
return if struct.class != Hash
|
160
|
+
struct.each_key do |key|
|
161
|
+
next if struct[key].class != Hash
|
162
|
+
if struct[key].has_key?('mode') and struct[key]['mode'] == "pre-process-merge"
|
163
|
+
if struct[key]['attribute'] == "required"
|
164
|
+
struct[key].each_key do |subkey|
|
165
|
+
if struct.has_key?(subkey) and struct[subkey].class == Hash
|
166
|
+
@logger.debug "pre process #{key} -> #{subkey}"
|
167
|
+
struct[subkey] = struct[subkey].deep_merge(deep_copy(struct[key][subkey]))
|
168
|
+
#struct[subkey] = struct[subkey].deep_merge(struct[key][subkey])
|
169
|
+
#puts struct[subkey].to_yaml
|
170
|
+
end
|
171
|
+
end
|
172
|
+
end
|
173
|
+
end
|
174
|
+
pre_process(struct[key])
|
175
|
+
end
|
176
|
+
end
|
177
|
+
|
178
|
+
|
179
|
+
# delete all node/subnode which key name is 'key' in 'hash'
|
180
|
+
# Params:
|
181
|
+
# - struct: the hash
|
182
|
+
# - type: the key to delete
|
183
|
+
def delete_node(struct, key)
|
184
|
+
struct.each_key do |subnode|
|
185
|
+
next if struct[subnode] == nil
|
186
|
+
struct.delete(key)
|
187
|
+
delete_node(struct[subnode],key) if Hash == struct[subnode].class
|
188
|
+
end
|
189
|
+
end
|
190
|
+
|
191
|
+
# perform merge by "__hierarchy__" struct
|
192
|
+
# Params:
|
193
|
+
# - struct: the hash
|
194
|
+
def merge_by_hierarchy(struct)
|
195
|
+
return if Hash != struct.class
|
196
|
+
if struct['__hierarchy__'] != nil
|
197
|
+
struct.each_key do |subnode|
|
198
|
+
next if subnode =~ /^__/ or @KEY_LIST.include?(subnode)
|
199
|
+
next if struct[subnode].class != Hash or struct['__hierarchy__'].class != Hash
|
200
|
+
struct[subnode] = struct[subnode].deep_merge(deep_copy(struct['__hierarchy__']))
|
201
|
+
#struct[subnode] = struct[subnode].deep_merge(struct["__hierarchy__"])
|
202
|
+
end
|
203
|
+
#struct.delete('__hierarchy__')
|
204
|
+
end
|
205
|
+
struct.each_key do |subnode|
|
206
|
+
merge_by_hierarchy(struct[subnode])
|
207
|
+
end
|
208
|
+
end
|
209
|
+
|
210
|
+
# perform merge by "__common__" node
|
211
|
+
# Params:
|
212
|
+
# - struct: the hash
|
213
|
+
def merge_by_common(struct)
|
214
|
+
return if Hash != struct.class
|
215
|
+
if struct['__common__'] != nil
|
216
|
+
struct.each_key do |subnode|
|
217
|
+
next if @KEY_LIST.include?(subnode)
|
218
|
+
next if struct[subnode].class != Hash or struct['__common__'].class != Hash
|
219
|
+
struct[subnode] = struct[subnode].deep_merge(deep_copy(struct['__common__']))
|
220
|
+
#struct[subnode] = struct[subnode].deep_merge(struct['__common__'])
|
221
|
+
end
|
222
|
+
struct.delete('__common__')
|
223
|
+
end
|
224
|
+
struct.each_key do |subnode|
|
225
|
+
merge_by_common(struct[subnode])
|
226
|
+
end
|
227
|
+
end
|
228
|
+
|
229
|
+
# hash deep merge with __add__ recursively
|
230
|
+
# Params:
|
231
|
+
# - struct: the hash
|
232
|
+
# - subnode: the subnode key to be add
|
233
|
+
# - addon: the key that idetify the addon module
|
234
|
+
def deep_add_merge(struct, subnode, addon)
|
235
|
+
return if Hash != struct.class
|
236
|
+
return if struct[addon].nil?
|
237
|
+
if struct[addon]['__add__'].nil?
|
238
|
+
#we do not want the addon module to change the status
|
239
|
+
struct[addon]['attribute'] = ""
|
240
|
+
#struct[subnode] = struct[subnode].deep_merge(deep_copy(struct[addon]))
|
241
|
+
struct[addon]['attribute'] = 'required'
|
242
|
+
return
|
243
|
+
end
|
244
|
+
#if has more addon
|
245
|
+
if struct[addon]['__add__'].count != 0
|
246
|
+
struct[addon]['__add__'].each do |submodule|
|
247
|
+
deep_add_merge(struct, addon, submodule)
|
248
|
+
end
|
249
|
+
else
|
250
|
+
#puts "add #{addon}"
|
251
|
+
struct[addon]['attribute'] = ""
|
252
|
+
#struct[subnode] = struct[subnode].deep_merge(deep_copy(struct[addon]))
|
253
|
+
struct[addon]['attribute'] = 'required'
|
254
|
+
end
|
255
|
+
end
|
256
|
+
|
257
|
+
# perform merge by "__add__" node only applys to application type
|
258
|
+
# Params:
|
259
|
+
# - struct: the hash to be processed
|
260
|
+
def merge_by_add(struct)
|
261
|
+
#only scan the top level
|
262
|
+
return if Hash != struct.class
|
263
|
+
struct.each_key do |subnode|
|
264
|
+
next if @KEY_LIST.include?(subnode)
|
265
|
+
next if struct[subnode].nil?
|
266
|
+
next if struct[subnode].class != Hash
|
267
|
+
if struct[subnode].has_key?('__add__')
|
268
|
+
struct[subnode]['__add__'].each do |addon|
|
269
|
+
next if struct[addon].class != Hash
|
270
|
+
begin
|
271
|
+
next if struct[subnode]['configuration']['section-type'] != "application"
|
272
|
+
if struct[addon]['configuration']['section-type'] != "component"
|
273
|
+
@logger.warn "WARNING #{addon} is required as component but has not a component attribute"
|
274
|
+
end
|
275
|
+
rescue
|
276
|
+
@logger.warn "no full configuration/section-type with the merge_by_add with #{subnode} add #{addon}"
|
277
|
+
end
|
278
|
+
deep_add_merge(struct, subnode, addon)
|
279
|
+
end
|
280
|
+
#struct[subnode].delete('__add__')
|
281
|
+
end
|
282
|
+
end
|
283
|
+
end
|
284
|
+
|
285
|
+
# prepare merge by "__replace__" node
|
286
|
+
# Params:
|
287
|
+
# - struct: the hash to be processed
|
288
|
+
def merge_by_replace!(struct)
|
289
|
+
return if Hash != struct.class
|
290
|
+
#get the replace hash
|
291
|
+
return if ! struct.has_key?("__replace__")
|
292
|
+
temp = Hash.new
|
293
|
+
temp = temp.deep_merge(deep_copy(struct["__replace__"]))
|
294
|
+
temp.each_key do |key|
|
295
|
+
next if ! struct.has_key?(key)
|
296
|
+
delete_node(struct, key)
|
297
|
+
struct[key] = temp[key]
|
298
|
+
end
|
299
|
+
struct.delete('__replace__')
|
300
|
+
end
|
301
|
+
|
302
|
+
# perform merge by "__remove__" node
|
303
|
+
# Params:
|
304
|
+
# - struct: the hash to be processed
|
305
|
+
def merge_by_remove!(struct)
|
306
|
+
return if Hash != struct.class
|
307
|
+
#get the replace hash
|
308
|
+
return if ! struct.has_key?("__remove__")
|
309
|
+
temp = Hash.new
|
310
|
+
temp = temp.deep_merge(deep_copy(struct["__remove__"]))
|
311
|
+
temp.each_key do |key|
|
312
|
+
next if ! struct.has_key?(key)
|
313
|
+
if struct["__remove__"][key] == nil
|
314
|
+
delete_node(struct, key)
|
315
|
+
else
|
316
|
+
if struct["__remove__"][key].class == Array
|
317
|
+
arr = Array.new
|
318
|
+
arr = deep_copy(struct["__remove__"][key])
|
319
|
+
arr.each do |item|
|
320
|
+
next if ! struct[key].include?(item)
|
321
|
+
struct[key].delete(item)
|
322
|
+
end
|
323
|
+
elsif struct["__remove__"][key].class == Hash
|
324
|
+
hash = Hash.new
|
325
|
+
hash = hash.deep_merge(deep_copy(struct["__remove__"][key]))
|
326
|
+
hash.each_key do |subkey|
|
327
|
+
next if ! struct[key].has_key?(subkey)
|
328
|
+
delete_node(struct[key], subkey)
|
329
|
+
end
|
330
|
+
end
|
331
|
+
end
|
332
|
+
end
|
333
|
+
struct.delete('__remove__')
|
334
|
+
end
|
335
|
+
|
336
|
+
# deep copy the hash in compare the shallow copy
|
337
|
+
# Params:
|
338
|
+
# -o: the hash to be copied
|
339
|
+
def deep_copy(o)
|
340
|
+
Marshal.load(Marshal.dump(o))
|
341
|
+
end
|
342
|
+
end
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: yml_merger
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 1.0.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Hake Huang
|
@@ -9,7 +9,7 @@ authors:
|
|
9
9
|
autorequire:
|
10
10
|
bindir: bin
|
11
11
|
cert_chain: []
|
12
|
-
date: 2016-
|
12
|
+
date: 2016-12-20 00:00:00.000000000 Z
|
13
13
|
dependencies: []
|
14
14
|
description: yaml extension for heiarchy merge
|
15
15
|
email: hakehuang@gmail.com
|
@@ -38,7 +38,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
38
38
|
version: '0'
|
39
39
|
requirements: []
|
40
40
|
rubyforge_project:
|
41
|
-
rubygems_version: 2.6.
|
41
|
+
rubygems_version: 2.6.7
|
42
42
|
signing_key:
|
43
43
|
specification_version: 4
|
44
44
|
summary: yaml_merger
|