yml_merger 0.9.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/lib/yml_merger.rb +310 -0
- metadata +45 -0
checksums.yaml
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
---
|
|
2
|
+
SHA1:
|
|
3
|
+
metadata.gz: 53bab445865ad2f7414909d6d5d06c94b077de30
|
|
4
|
+
data.tar.gz: bbde43c5fe4e037f2711e18c652a2b27823fa4dc
|
|
5
|
+
SHA512:
|
|
6
|
+
metadata.gz: b9177063653202da7147bf8068f19ff1b3d6f063de2e0392bba467b1e3abe9796047d643abff33c1f9cea7b9d56cb2c34494d9013e7d602fa1bc88dbb3238d9a
|
|
7
|
+
data.tar.gz: 7e8936d1301086d2cb9b0396a79a4a60e8036f2ba66e6ee8b056f9f2a11b9c3decdf7b75cb0e024996d9d05cd96b1a1bf1236eafb940c6aa8431fad7c18837da
|
data/lib/yml_merger.rb
ADDED
|
@@ -0,0 +1,310 @@
|
|
|
1
|
+
require 'rubygems'
|
|
2
|
+
require "yaml"
|
|
3
|
+
require "deep_merge"
|
|
4
|
+
require 'fileutils'
|
|
5
|
+
require 'open-uri'
|
|
6
|
+
require 'uri'
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
# implement of deep merge for nested hash and array of hash
|
|
10
|
+
class YML_Merger
|
|
11
|
+
attr_accessor :filestructure, :filestack, :ENTRY_YML, :search_paths
|
|
12
|
+
|
|
13
|
+
# initialize YML merge
|
|
14
|
+
# Params:
|
|
15
|
+
# - filepath: the entry file name
|
|
16
|
+
# - seatch_paths: rootpath to search all the needed YML files
|
|
17
|
+
def initialize(filename, search_paths)
|
|
18
|
+
@ENTRY_YML = search_paths + '/' + filename
|
|
19
|
+
@search_paths = search_paths
|
|
20
|
+
@filestructure = Hash.new()
|
|
21
|
+
@filestack = Array.new()
|
|
22
|
+
@KEY_LIST = ['__remove__','__load__', '__common__', '__hierarchy__', '__replace__', '__add__']
|
|
23
|
+
end
|
|
24
|
+
|
|
25
|
+
# process the YMLs
|
|
26
|
+
def process
|
|
27
|
+
@filestructure = process_file(@ENTRY_YML)
|
|
28
|
+
merge_by_add(@filestructure)
|
|
29
|
+
merge_by_common(@filestructure)
|
|
30
|
+
delete_node(@filestructure,'__common__')
|
|
31
|
+
delete_node(@filestructure,'__load__')
|
|
32
|
+
#delete_node(@filestructure,'__add__')
|
|
33
|
+
post_process(@filestructure)
|
|
34
|
+
end
|
|
35
|
+
|
|
36
|
+
private
|
|
37
|
+
|
|
38
|
+
# post process nodes with strong condition
|
|
39
|
+
# execute post-process-lib and post-process-app
|
|
40
|
+
# clean the pre-process-merge node
|
|
41
|
+
# Params:
|
|
42
|
+
# - struct: the hash to be processed
|
|
43
|
+
def post_process(struct)
|
|
44
|
+
return if struct.class != Hash
|
|
45
|
+
merge_by_replace!(struct)
|
|
46
|
+
merge_by_remove!(struct)
|
|
47
|
+
struct.each_key do |key|
|
|
48
|
+
if Hash == struct[key].class
|
|
49
|
+
if struct.key?("mode") and struct["mode"] == "post-process-lib"
|
|
50
|
+
if struct[key].has_key?("attribute")
|
|
51
|
+
if struct[key]["attribute"] == "required"
|
|
52
|
+
#puts "keep #{key}"
|
|
53
|
+
else
|
|
54
|
+
struct.delete(key)
|
|
55
|
+
#puts "deletes #{key}"
|
|
56
|
+
next
|
|
57
|
+
end
|
|
58
|
+
else
|
|
59
|
+
#puts "delete #{key}"
|
|
60
|
+
struct.delete(key)
|
|
61
|
+
next
|
|
62
|
+
end
|
|
63
|
+
end
|
|
64
|
+
if struct[key].key?("mode") and struct[key]["mode"] == "post-process-app"
|
|
65
|
+
if struct[key].has_key?("attribute")
|
|
66
|
+
if struct[key]["attribute"] == "required"
|
|
67
|
+
#puts "keep #{key}"
|
|
68
|
+
else
|
|
69
|
+
struct.delete(key)
|
|
70
|
+
#puts "deletes #{key}"
|
|
71
|
+
next
|
|
72
|
+
end
|
|
73
|
+
else
|
|
74
|
+
#puts "delete #{key}"
|
|
75
|
+
struct.delete(key)
|
|
76
|
+
next
|
|
77
|
+
end
|
|
78
|
+
end
|
|
79
|
+
if struct[key].key?("mode") and struct[key]["mode"] == "pre-process-merge"
|
|
80
|
+
struct.delete(key)
|
|
81
|
+
#puts "deletes #{key}"
|
|
82
|
+
next
|
|
83
|
+
end
|
|
84
|
+
post_process(struct[key])
|
|
85
|
+
end
|
|
86
|
+
end
|
|
87
|
+
end
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
# load and process yml files
|
|
91
|
+
# recursive load and process all files in '__load__' section
|
|
92
|
+
# Params:
|
|
93
|
+
# - file: the path to the yml file
|
|
94
|
+
def process_file(file)
|
|
95
|
+
#check if file is loaded yet
|
|
96
|
+
return if @filestack.include?(file)
|
|
97
|
+
#if not loaded,push to stack
|
|
98
|
+
@filestack.push(file)
|
|
99
|
+
#add globals.yml before load file
|
|
100
|
+
#puts file
|
|
101
|
+
content = open(file.gsub("\\","/")){|f| f.read}
|
|
102
|
+
content = YAML::load(content)
|
|
103
|
+
return if content.class == FalseClass
|
|
104
|
+
#if has file dependence load it
|
|
105
|
+
if content['__load__'] != nil
|
|
106
|
+
#load file in reversed sequence
|
|
107
|
+
content['__load__'].reverse_each do |loadfile|
|
|
108
|
+
if loadfile =~ URI::regexp
|
|
109
|
+
structure = process_file(loadfile)
|
|
110
|
+
else
|
|
111
|
+
structure = process_file(@search_paths + loadfile.gsub("\\","/"))
|
|
112
|
+
end
|
|
113
|
+
content = content.deep_merge(deep_copy(structure))
|
|
114
|
+
pre_process(content)
|
|
115
|
+
merge_by_hierarchy(content)
|
|
116
|
+
#content = content.deep_merge(structure)
|
|
117
|
+
end
|
|
118
|
+
end
|
|
119
|
+
pre_process(content)
|
|
120
|
+
merge_by_hierarchy(content)
|
|
121
|
+
delete_node(content, '__hierarchy__')
|
|
122
|
+
return content
|
|
123
|
+
end
|
|
124
|
+
|
|
125
|
+
# load and process yml files
|
|
126
|
+
# recursive load and process all files in '__load__' section
|
|
127
|
+
# Params:
|
|
128
|
+
# - file: the path to the yml file
|
|
129
|
+
def pre_process(struct)
|
|
130
|
+
return if struct.class != Hash
|
|
131
|
+
struct.each_key do |key|
|
|
132
|
+
next if struct[key].class != Hash
|
|
133
|
+
if struct[key].has_key?('mode') and struct[key]['mode'] == "pre-process-merge"
|
|
134
|
+
if struct[key]['attribute'] == "required"
|
|
135
|
+
struct[key].each_key do |subkey|
|
|
136
|
+
if struct.has_key?(subkey) and struct[subkey].class == Hash
|
|
137
|
+
#puts "pre process #{key} -> #{subkey}"
|
|
138
|
+
struct[subkey] = struct[subkey].deep_merge(deep_copy(struct[key][subkey]))
|
|
139
|
+
#struct[subkey] = struct[subkey].deep_merge(struct[key][subkey])
|
|
140
|
+
#puts struct[subkey].to_yaml
|
|
141
|
+
end
|
|
142
|
+
end
|
|
143
|
+
end
|
|
144
|
+
end
|
|
145
|
+
pre_process(struct[key])
|
|
146
|
+
end
|
|
147
|
+
end
|
|
148
|
+
|
|
149
|
+
|
|
150
|
+
# delete all node/subnode which key name is 'key' in 'hash'
|
|
151
|
+
# Params:
|
|
152
|
+
# - struct: the hash
|
|
153
|
+
# - type: the key to delete
|
|
154
|
+
def delete_node(struct, key)
|
|
155
|
+
struct.each_key do |subnode|
|
|
156
|
+
next if struct[subnode] == nil
|
|
157
|
+
struct.delete(key)
|
|
158
|
+
delete_node(struct[subnode],key) if Hash == struct[subnode].class
|
|
159
|
+
end
|
|
160
|
+
end
|
|
161
|
+
|
|
162
|
+
# perform merge by "__hierarchy__" struct
|
|
163
|
+
# Params:
|
|
164
|
+
# - struct: the hash
|
|
165
|
+
def merge_by_hierarchy(struct)
|
|
166
|
+
return if Hash != struct.class
|
|
167
|
+
if struct['__hierarchy__'] != nil
|
|
168
|
+
struct.each_key do |subnode|
|
|
169
|
+
next if subnode =~ /^__/ or @KEY_LIST.include?(subnode)
|
|
170
|
+
next if struct[subnode].class != Hash or struct['__hierarchy__'].class != Hash
|
|
171
|
+
struct[subnode] = struct[subnode].deep_merge(deep_copy(struct['__hierarchy__']))
|
|
172
|
+
#struct[subnode] = struct[subnode].deep_merge(struct["__hierarchy__"])
|
|
173
|
+
end
|
|
174
|
+
#struct.delete('__hierarchy__')
|
|
175
|
+
end
|
|
176
|
+
struct.each_key do |subnode|
|
|
177
|
+
merge_by_hierarchy(struct[subnode])
|
|
178
|
+
end
|
|
179
|
+
end
|
|
180
|
+
|
|
181
|
+
# perform merge by "__common__" node
|
|
182
|
+
# Params:
|
|
183
|
+
# - struct: the hash
|
|
184
|
+
def merge_by_common(struct)
|
|
185
|
+
return if Hash != struct.class
|
|
186
|
+
if struct['__common__'] != nil
|
|
187
|
+
struct.each_key do |subnode|
|
|
188
|
+
next if @KEY_LIST.include?(subnode)
|
|
189
|
+
next if struct[subnode].class != Hash or struct['__common__'].class != Hash
|
|
190
|
+
struct[subnode] = struct[subnode].deep_merge(deep_copy(struct['__common__']))
|
|
191
|
+
#struct[subnode] = struct[subnode].deep_merge(struct['__common__'])
|
|
192
|
+
end
|
|
193
|
+
struct.delete('__common__')
|
|
194
|
+
end
|
|
195
|
+
struct.each_key do |subnode|
|
|
196
|
+
merge_by_common(struct[subnode])
|
|
197
|
+
end
|
|
198
|
+
end
|
|
199
|
+
|
|
200
|
+
# hash deep merge with __add__ recursively
|
|
201
|
+
# Params:
|
|
202
|
+
# - struct: the hash
|
|
203
|
+
# - subnode: the subnode key to be add
|
|
204
|
+
# - addon: the key that idetify the addon module
|
|
205
|
+
def deep_add_merge(struct, subnode, addon)
|
|
206
|
+
return if Hash != struct.class
|
|
207
|
+
if struct[addon]['__add__'].nil?
|
|
208
|
+
#we do not want the addon module to change the status
|
|
209
|
+
struct[addon]['attribute'] = ""
|
|
210
|
+
#struct[subnode] = struct[subnode].deep_merge(deep_copy(struct[addon]))
|
|
211
|
+
struct[addon]['attribute'] = 'required'
|
|
212
|
+
return
|
|
213
|
+
end
|
|
214
|
+
#if has more addon
|
|
215
|
+
if struct[addon]['__add__'].count != 0
|
|
216
|
+
struct[addon]['__add__'].each do |submodule|
|
|
217
|
+
deep_add_merge(struct, addon, submodule)
|
|
218
|
+
end
|
|
219
|
+
else
|
|
220
|
+
#puts "add #{addon}"
|
|
221
|
+
struct[addon]['attribute'] = ""
|
|
222
|
+
#struct[subnode] = struct[subnode].deep_merge(deep_copy(struct[addon]))
|
|
223
|
+
struct[addon]['attribute'] = 'required'
|
|
224
|
+
end
|
|
225
|
+
end
|
|
226
|
+
|
|
227
|
+
# perform merge by "__add__" node only applys to application type
|
|
228
|
+
# Params:
|
|
229
|
+
# - struct: the hash to be processed
|
|
230
|
+
def merge_by_add(struct)
|
|
231
|
+
#only scan the top level
|
|
232
|
+
return if Hash != struct.class
|
|
233
|
+
struct.each_key do |subnode|
|
|
234
|
+
next if @KEY_LIST.include?(subnode)
|
|
235
|
+
if struct[subnode]['__add__'] != nil
|
|
236
|
+
struct[subnode]['__add__'].each do |addon|
|
|
237
|
+
next if struct[addon].class != Hash
|
|
238
|
+
begin
|
|
239
|
+
next if struct[subnode]['configuration']['section-type'] != "application"
|
|
240
|
+
if struct[addon]['configuration']['section-type'] != "component"
|
|
241
|
+
puts "WARNING #{addon} is required as component but has not a component attribute"
|
|
242
|
+
end
|
|
243
|
+
rescue
|
|
244
|
+
puts "error with the merge_by_add with #{subnode} add #{addon}"
|
|
245
|
+
end
|
|
246
|
+
deep_add_merge(struct, subnode, addon)
|
|
247
|
+
end
|
|
248
|
+
#struct[subnode].delete('__add__')
|
|
249
|
+
end
|
|
250
|
+
end
|
|
251
|
+
end
|
|
252
|
+
|
|
253
|
+
# prepare merge by "__replace__" node
|
|
254
|
+
# Params:
|
|
255
|
+
# - struct: the hash to be processed
|
|
256
|
+
def merge_by_replace!(struct)
|
|
257
|
+
return if Hash != struct.class
|
|
258
|
+
#get the replace hash
|
|
259
|
+
return if ! struct.has_key?("__replace__")
|
|
260
|
+
temp = Hash.new
|
|
261
|
+
temp = temp.deep_merge(deep_copy(struct["__replace__"]))
|
|
262
|
+
temp.each_key do |key|
|
|
263
|
+
next if ! struct.has_key?(key)
|
|
264
|
+
delete_node(struct, key)
|
|
265
|
+
struct[key] = temp[key]
|
|
266
|
+
end
|
|
267
|
+
struct.delete('__replace__')
|
|
268
|
+
end
|
|
269
|
+
|
|
270
|
+
# perform merge by "__remove__" node
|
|
271
|
+
# Params:
|
|
272
|
+
# - struct: the hash to be processed
|
|
273
|
+
def merge_by_remove!(struct)
|
|
274
|
+
return if Hash != struct.class
|
|
275
|
+
#get the replace hash
|
|
276
|
+
return if ! struct.has_key?("__remove__")
|
|
277
|
+
temp = Hash.new
|
|
278
|
+
temp = temp.deep_merge(deep_copy(struct["__remove__"]))
|
|
279
|
+
temp.each_key do |key|
|
|
280
|
+
next if ! struct.has_key?(key)
|
|
281
|
+
if struct["__remove__"][key] == nil
|
|
282
|
+
delete_node(struct, key)
|
|
283
|
+
else
|
|
284
|
+
if struct["__remove__"][key].class == Array
|
|
285
|
+
arr = Array.new
|
|
286
|
+
arr = deep_copy(struct["__remove__"][key])
|
|
287
|
+
arr.each do |item|
|
|
288
|
+
next if ! struct[key].include?(item)
|
|
289
|
+
struct[key].delete(item)
|
|
290
|
+
end
|
|
291
|
+
elsif struct["__remove__"][key].class == Hash
|
|
292
|
+
hash = Hash.new
|
|
293
|
+
hash = hash.deep_merge(deep_copy(struct["__remove__"][key]))
|
|
294
|
+
hash.each_key do |subkey|
|
|
295
|
+
next if ! struct[key].has_key?(subkey)
|
|
296
|
+
delete_node(struct[key], subkey)
|
|
297
|
+
end
|
|
298
|
+
end
|
|
299
|
+
end
|
|
300
|
+
end
|
|
301
|
+
struct.delete('__remove__')
|
|
302
|
+
end
|
|
303
|
+
|
|
304
|
+
# deep copy the hash in compare the shallow copy
|
|
305
|
+
# Params:
|
|
306
|
+
# -o: the hash to be copied
|
|
307
|
+
def deep_copy(o)
|
|
308
|
+
Marshal.load(Marshal.dump(o))
|
|
309
|
+
end
|
|
310
|
+
end
|
metadata
ADDED
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
|
2
|
+
name: yml_merger
|
|
3
|
+
version: !ruby/object:Gem::Version
|
|
4
|
+
version: 0.9.0
|
|
5
|
+
platform: ruby
|
|
6
|
+
authors:
|
|
7
|
+
- Hake Huang
|
|
8
|
+
- Marian Cingel
|
|
9
|
+
autorequire:
|
|
10
|
+
bindir: bin
|
|
11
|
+
cert_chain: []
|
|
12
|
+
date: 2016-11-18 00:00:00.000000000 Z
|
|
13
|
+
dependencies: []
|
|
14
|
+
description: yaml extension for heiarchy merge
|
|
15
|
+
email: hakehuang@gmail.com
|
|
16
|
+
executables: []
|
|
17
|
+
extensions: []
|
|
18
|
+
extra_rdoc_files: []
|
|
19
|
+
files:
|
|
20
|
+
- lib/yml_merger.rb
|
|
21
|
+
homepage: http://rubygems.org/gems/yml_merger
|
|
22
|
+
licenses:
|
|
23
|
+
- Apache-2.0
|
|
24
|
+
metadata: {}
|
|
25
|
+
post_install_message:
|
|
26
|
+
rdoc_options: []
|
|
27
|
+
require_paths:
|
|
28
|
+
- lib
|
|
29
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
|
30
|
+
requirements:
|
|
31
|
+
- - ">="
|
|
32
|
+
- !ruby/object:Gem::Version
|
|
33
|
+
version: '0'
|
|
34
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
|
35
|
+
requirements:
|
|
36
|
+
- - ">="
|
|
37
|
+
- !ruby/object:Gem::Version
|
|
38
|
+
version: '0'
|
|
39
|
+
requirements: []
|
|
40
|
+
rubyforge_project:
|
|
41
|
+
rubygems_version: 2.6.6
|
|
42
|
+
signing_key:
|
|
43
|
+
specification_version: 4
|
|
44
|
+
summary: yaml_merger
|
|
45
|
+
test_files: []
|