bonethug 0.0.70 → 0.0.71
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/config/deploy.rb +14 -4
- data/config/syncer.rb +38 -15
- data/lib/bonethug/cli.rb +15 -6
- data/lib/bonethug/conf.rb +279 -279
- data/lib/bonethug/installer.rb +19 -19
- data/lib/bonethug/version.rb +2 -2
- metadata +3 -3
data/config/deploy.rb
CHANGED
@@ -176,8 +176,18 @@ task :backup => :environment do
|
|
176
176
|
queue! %[cd #{deploy_to}/current && export to=#{env} && bundle exec astrails-safe .bonethug/backup.rb]
|
177
177
|
end
|
178
178
|
|
179
|
-
desc "Syncs
|
180
|
-
task :
|
179
|
+
desc "Syncs application state between two remote environments"
|
180
|
+
task :sync_remote_from => :environment do
|
181
|
+
queue! %[cd #{deploy_to}/current && bundle exec thug sync-local-from #{env} #{remote_env}]
|
182
|
+
end
|
183
|
+
|
184
|
+
desc "Syncs application state between two remote environments"
|
185
|
+
task :sync_remote_to => :environment do
|
186
|
+
queue! %[cd #{deploy_to}/current && bundle exec thug sync-local-to #{env} #{remote_env}]
|
187
|
+
end
|
188
|
+
|
189
|
+
desc "Syncs backup with a location"
|
190
|
+
task :sync_backup_from => :environment do
|
181
191
|
if rsync = conf.get('backup.rsync')
|
182
192
|
path = deploy.get('project_slug') + "_" + env + "_sync"
|
183
193
|
ssh_pass = rsync.get('pass') ? "sshpass -p #{rsync.get('pass')}" : ""
|
@@ -192,8 +202,8 @@ task :sync_from => :environment do
|
|
192
202
|
end
|
193
203
|
end
|
194
204
|
|
195
|
-
desc "Restores files from a location"
|
196
|
-
task :
|
205
|
+
desc "Restores files from a backup to a location"
|
206
|
+
task :sync_backup_to => :environment do
|
197
207
|
if rsync = conf.get('backup.rsync')
|
198
208
|
path = deploy.get('project_slug') + "_" + env + "_sync"
|
199
209
|
ssh_pass = rsync.get('pass') ? "sshpass -p #{rsync.get('pass')}" : ""
|
data/config/syncer.rb
CHANGED
@@ -8,10 +8,17 @@ require 'bonethug/conf'
|
|
8
8
|
# Config
|
9
9
|
# ---------------------------------------------------------------
|
10
10
|
|
11
|
-
#
|
12
|
-
|
11
|
+
# exec env
|
12
|
+
exec_path = File.expand_path(File.dirname(__FILE__) + '/..')
|
13
|
+
env = ENV['to']
|
14
|
+
|
15
|
+
# load config
|
16
|
+
conf = Bonethug::Conf.new.add(exec_path + '/config/cnf.yml')
|
17
|
+
conf.add(exec_path + '/config/database.yml' => { root: 'dbs.default' }) if File.exist? exec_path + '/config/database.yml'
|
18
|
+
|
19
|
+
# extract some data
|
13
20
|
cnf = conf.to_hash
|
14
|
-
envs = conf.get
|
21
|
+
envs = conf.get('deploy.environments').to_hash
|
15
22
|
|
16
23
|
# args
|
17
24
|
env_local = ARGV[1]
|
@@ -19,21 +26,29 @@ env_remote = ARGV[2]
|
|
19
26
|
type = ARGV[0]
|
20
27
|
|
21
28
|
# validate
|
29
|
+
|
22
30
|
unless env_local and env_remote
|
23
31
|
puts 'Usage: syncer.rb ' + type + ' [local_environment] [remote_environment]'
|
24
32
|
return
|
25
33
|
end
|
26
34
|
|
27
|
-
|
28
|
-
|
35
|
+
unless envs.has_key? env_local
|
36
|
+
puts 'could not find local environment'
|
37
|
+
return
|
38
|
+
end
|
39
|
+
|
40
|
+
unless envs.has_key? env_remote
|
41
|
+
puts 'could not find remote environment'
|
42
|
+
return
|
43
|
+
end
|
29
44
|
|
30
45
|
# build config
|
31
|
-
|
46
|
+
dbs = conf.get 'dbs'
|
47
|
+
remote_deploy = conf.node_merge 'deploy.common', 'deploy.environments.' + env_remote
|
32
48
|
local_deploy = conf.node_merge 'deploy.common', 'deploy.environments.' + env_local
|
33
49
|
resources = conf.get('resources','Array') || []
|
34
50
|
log_dirs = conf.get('log_dirs','Array') || []
|
35
|
-
remote_vhost =
|
36
|
-
dbs = conf.get 'dbs'
|
51
|
+
remote_vhost = remote_deploy.get('project_slug') + '_' + env_remote
|
37
52
|
|
38
53
|
# directories we need to track
|
39
54
|
resources += ['backups']
|
@@ -49,17 +64,20 @@ remote_ssh = "ssh -p #{remote_deploy.get 'port'} #{remote_deploy.get 'user'}@#{
|
|
49
64
|
puts "Cloning Databases... "
|
50
65
|
|
51
66
|
# output
|
52
|
-
dbs.each do |index,db|
|
67
|
+
dbs.each do |index, db|
|
53
68
|
|
54
69
|
db_remote = db.get env_remote
|
55
70
|
db_local = db.get env_local
|
56
71
|
|
57
|
-
if type == "sync-local-
|
58
|
-
|
59
|
-
elsif type == "sync-local-
|
60
|
-
|
72
|
+
if type == "sync-local-from"
|
73
|
+
cmd = "#{remote_ssh} \"mysqldump -u #{db_remote.get 'user'} -p#{db_remote.get 'pass'} #{db_remote.get 'name'} --verbose | bzip2 -c\" | bunzip2 -c | mysql -u #{db_local.get 'user'} -p#{db_local.get 'pass'} #{db_local.get 'name'}"
|
74
|
+
elsif type == "sync-local-to"
|
75
|
+
cmd = "mysqldump -u #{db_local.get 'user'} -p#{db_local.get 'pass'} #{db_local.get 'name'} --verbose | bzip2 -c | #{remote_ssh} \"bunzip2 -c | mysql -u #{db_remote.get 'user'} -p#{db_remote.get 'pass'} #{db_remote.get 'name'}\""
|
61
76
|
end
|
62
77
|
|
78
|
+
puts cmd
|
79
|
+
system cmd
|
80
|
+
|
63
81
|
end
|
64
82
|
|
65
83
|
puts "Done."
|
@@ -67,12 +85,17 @@ puts "Syncing Files... "
|
|
67
85
|
|
68
86
|
# sync the files
|
69
87
|
(resources + log_dirs).each do |item|
|
88
|
+
|
70
89
|
case type
|
71
90
|
when "sync-local-from"
|
72
|
-
|
91
|
+
cmd = "rsync -zrav -e \"ssh -p #{remote_deploy.get('port')} -l #{remote_deploy.get('user')}\" --delete --copy-dirlinks #{remote_deploy.get('domain')}:#{remote_path}/current/#{item} #{exec_path}/#{item}"
|
73
92
|
when "sync-local-to"
|
74
|
-
|
93
|
+
cmd = "rsync -zrav -e \"ssh -p #{remote_deploy.get('port')} -l #{remote_deploy.get('user')}\" --delete --copy-dirlinks #{exec_path}/#{item} #{remote_deploy.get('domain')}:#{remote_path}/current/#{item}"
|
75
94
|
end
|
95
|
+
|
96
|
+
puts cmd
|
97
|
+
system cmd
|
98
|
+
|
76
99
|
end
|
77
100
|
|
78
101
|
puts "Done."
|
data/lib/bonethug/cli.rb
CHANGED
@@ -159,11 +159,13 @@ module Bonethug
|
|
159
159
|
end
|
160
160
|
|
161
161
|
when 'sync-local-to',
|
162
|
-
'sync-local-from'
|
162
|
+
'sync-local-from',
|
163
|
+
'sync-remote-to',
|
164
|
+
'sync-remote-from'
|
163
165
|
|
164
166
|
# args
|
165
167
|
env_local = ARGV[1]
|
166
|
-
env_remote = ARGV[
|
168
|
+
env_remote = ARGV[2]
|
167
169
|
|
168
170
|
# validate
|
169
171
|
unless env_local and env_remote
|
@@ -171,8 +173,15 @@ module Bonethug
|
|
171
173
|
return
|
172
174
|
end
|
173
175
|
|
174
|
-
#
|
175
|
-
|
176
|
+
# Do Sync
|
177
|
+
case task
|
178
|
+
when 'sync-local-to', 'sync-local-from'
|
179
|
+
exec "ruby .bonethug/syncer.rb #{task} #{env_local} #{env_remote}"
|
180
|
+
when 'sync-remote-to'
|
181
|
+
exec "export to=#{env_local} && export remote_env=#{env_remote} && bundle exec mina -f .bonethug/deploy.rb sync_remote_to --verbose"
|
182
|
+
when 'sync-remote-from'
|
183
|
+
exec "export to=#{env_local} && export remote_env=#{env_remote} && bundle exec mina -f .bonethug/deploy.rb sync_remote_from --verbose"
|
184
|
+
end
|
176
185
|
|
177
186
|
when 'deploy',
|
178
187
|
'setup',
|
@@ -214,9 +223,9 @@ module Bonethug
|
|
214
223
|
|
215
224
|
# Synchronised backup
|
216
225
|
when 'sync-backup-to'
|
217
|
-
exec "export to=#{environment} && bundle exec mina -f .bonethug/deploy.rb
|
226
|
+
exec "export to=#{environment} && bundle exec mina -f .bonethug/deploy.rb sync_backup_to --verbose"
|
218
227
|
when 'sync-backup-from'
|
219
|
-
exec "export to=#{environment} && bundle exec mina -f .bonethug/deploy.rb
|
228
|
+
exec "export to=#{environment} && bundle exec mina -f .bonethug/deploy.rb sync_backup_from --verbose"
|
220
229
|
|
221
230
|
end
|
222
231
|
|
data/lib/bonethug/conf.rb
CHANGED
@@ -1,280 +1,280 @@
|
|
1
|
-
require 'yaml'
|
2
|
-
|
3
|
-
module Bonethug
|
4
|
-
|
5
|
-
class Conf
|
6
|
-
|
7
|
-
@@default_paths = {
|
8
|
-
File.expand_path('./config/cnf.yml') => nil,
|
9
|
-
File.expand_path('./config/database.yml') => {root: 'dbs.default'}
|
10
|
-
}
|
11
|
-
@@fallbacks = {
|
12
|
-
'name' => 'database',
|
13
|
-
'user' => 'username',
|
14
|
-
'pass' => 'password'
|
15
|
-
}
|
16
|
-
|
17
|
-
def initialize(new_hash = nil, options = {})
|
18
|
-
raise "New hash must be of type Hash" if new_hash && new_hash.class.name != 'Hash'
|
19
|
-
@options = {use_fallbacks: true}.merge options
|
20
|
-
@loaded_paths = []
|
21
|
-
@paths = {}
|
22
|
-
@config_hashes = {}
|
23
|
-
@compiled_hash = new_hash ? new_hash : {}
|
24
|
-
end
|
25
|
-
|
26
|
-
def add_path(new_path)
|
27
|
-
if new_path.class.name == 'Hash'
|
28
|
-
path_hash = new_path
|
29
|
-
elsif new_path.class.name == 'String'
|
30
|
-
path_hash = {new_path => nil}
|
31
|
-
else
|
32
|
-
raise "add_path only accepts stings or hashes"
|
33
|
-
end
|
34
|
-
@paths = @paths.merge path_hash
|
35
|
-
self
|
36
|
-
end
|
37
|
-
|
38
|
-
def remove_path(path_to_remove)
|
39
|
-
# deletes an element from a hash if its key can be found
|
40
|
-
@paths.delete path_to_remove
|
41
|
-
self
|
42
|
-
end
|
43
|
-
|
44
|
-
def compile_configuration
|
45
|
-
|
46
|
-
# load the defaults if we haven't loaded anything
|
47
|
-
use_defaults if @paths.empty?
|
48
|
-
|
49
|
-
# generate output
|
50
|
-
out = {}
|
51
|
-
@paths.each do |path,options|
|
52
|
-
|
53
|
-
# load the file if we haven't already
|
54
|
-
load_path path unless @loaded_paths.include? path
|
55
|
-
|
56
|
-
# create a base fragment
|
57
|
-
fragment_base = {}
|
58
|
-
|
59
|
-
# create the other nodes
|
60
|
-
if options and options.has_key? :root
|
61
|
-
fragment = fragment_base
|
62
|
-
nodes = options[:root].split '.'
|
63
|
-
nodes.each_with_index do |node,i|
|
64
|
-
fragment[node] = i == nodes.length-1 ? @config_hashes[path] : {}
|
65
|
-
fragment = fragment[node]
|
66
|
-
end
|
67
|
-
else
|
68
|
-
fragment_base = @config_hashes[path]
|
69
|
-
end
|
70
|
-
|
71
|
-
# output
|
72
|
-
out = out.merge fragment_base
|
73
|
-
|
74
|
-
end
|
75
|
-
@compiled_hash = out
|
76
|
-
self
|
77
|
-
|
78
|
-
end
|
79
|
-
|
80
|
-
def all_paths_loaded?
|
81
|
-
@paths.each do |path,options|
|
82
|
-
return false unless @loaded_paths.include? path
|
83
|
-
end
|
84
|
-
true
|
85
|
-
end
|
86
|
-
|
87
|
-
def path_ok?(path)
|
88
|
-
path && path.class.name == 'String' and File.exist?(path) and File.file?(path)
|
89
|
-
end
|
90
|
-
|
91
|
-
def check_path!(path)
|
92
|
-
raise 'config file "' + path.to_s + '" does not exist' unless path_ok? path
|
93
|
-
end
|
94
|
-
|
95
|
-
def check_paths!
|
96
|
-
raise 'No config files have not been set' if @paths.empty?
|
97
|
-
@paths.each do |path,options|
|
98
|
-
check_path! path
|
99
|
-
end
|
100
|
-
self
|
101
|
-
end
|
102
|
-
|
103
|
-
def check_paths
|
104
|
-
@paths.each do |path,options|
|
105
|
-
@paths.delete path unless path_ok? path
|
106
|
-
end
|
107
|
-
self
|
108
|
-
end
|
109
|
-
|
110
|
-
def load_paths
|
111
|
-
@paths.each do |path,options|
|
112
|
-
load_path path
|
113
|
-
end
|
114
|
-
self
|
115
|
-
end
|
116
|
-
|
117
|
-
def load_path(path)
|
118
|
-
load_path? path
|
119
|
-
self
|
120
|
-
end
|
121
|
-
|
122
|
-
def load_path?(path)
|
123
|
-
return false unless path_ok? path
|
124
|
-
@loaded_paths.push path
|
125
|
-
@config_hashes[path] = YAML.load_file path
|
126
|
-
self
|
127
|
-
end
|
128
|
-
|
129
|
-
def use_defaults
|
130
|
-
@paths = @@default_paths if @paths.empty?
|
131
|
-
load_paths
|
132
|
-
self
|
133
|
-
end
|
134
|
-
|
135
|
-
def get(node = nil, force_type = nil)
|
136
|
-
node_val = node ? get_compiled_hash_node_handle(node) : self
|
137
|
-
case force_type
|
138
|
-
when 'Array'
|
139
|
-
return [] unless node_val
|
140
|
-
return [node_val] if node_val.class.name == 'String'
|
141
|
-
return node_val.class.name == 'Array' ? node_val.clone : node_val.to_a
|
142
|
-
when 'Hash'
|
143
|
-
return {} unless node_val
|
144
|
-
if node_val.class.name == 'Array'
|
145
|
-
return array2hash node_val
|
146
|
-
elsif node_val.class.name == 'Hash'
|
147
|
-
return node_val.clone
|
148
|
-
else
|
149
|
-
return node_val.to_hash
|
150
|
-
end
|
151
|
-
else
|
152
|
-
return handle_node_value node_val
|
153
|
-
end
|
154
|
-
end
|
155
|
-
|
156
|
-
def has_key?(key)
|
157
|
-
compiled_hash.has_key? key
|
158
|
-
end
|
159
|
-
|
160
|
-
def get_compiled_hash_node_handle(node = nil)
|
161
|
-
if node
|
162
|
-
nodes = node.split('.')
|
163
|
-
current = compiled_hash
|
164
|
-
nodes.each do |node|
|
165
|
-
node = @@fallbacks[node] if @options[:use_fallbacks] and !current[node] and @@fallbacks[node]
|
166
|
-
current = (current.class.name == 'Hash' or current.class.name == 'Array') ? current[node] : nil
|
167
|
-
end
|
168
|
-
return current
|
169
|
-
else
|
170
|
-
return self.compiled_hash
|
171
|
-
end
|
172
|
-
end
|
173
|
-
|
174
|
-
def handle_node_value(node)
|
175
|
-
return node if node.class.name == 'Conf'
|
176
|
-
node = array2hash node if node.class.name == 'Array'
|
177
|
-
return node.class.name == 'Hash' ? self.clone.set_compiled_hash(node) : node
|
178
|
-
end
|
179
|
-
|
180
|
-
def array2hash(arr)
|
181
|
-
return arr if arr.class.name == 'Hash'
|
182
|
-
hsh = {}
|
183
|
-
arr.each_with_index do |item,i|
|
184
|
-
hsh[i] = item
|
185
|
-
end
|
186
|
-
hsh
|
187
|
-
end
|
188
|
-
|
189
|
-
def get_hash(node = nil)
|
190
|
-
get(node).compiled_hash
|
191
|
-
end
|
192
|
-
|
193
|
-
def to_hash
|
194
|
-
compiled_hash.clone
|
195
|
-
end
|
196
|
-
|
197
|
-
def to_a
|
198
|
-
to_hash.to_a
|
199
|
-
end
|
200
|
-
|
201
|
-
def node_merge!(node1,node2)
|
202
|
-
cnf1 = get_compiled_hash_node_handle node1
|
203
|
-
cnf2 = get_compiled_hash_node_handle node2
|
204
|
-
cnf1.merge!(cnf2) if cnf1 && cnf2
|
205
|
-
return self
|
206
|
-
end
|
207
|
-
|
208
|
-
def node_merge(node1,node2)
|
209
|
-
cnf1 = get_compiled_hash_node_handle node1
|
210
|
-
cnf2 = get_compiled_hash_node_handle node2
|
211
|
-
return handle_node_value cnf1 if cnf1 && !cnf2
|
212
|
-
return handle_node_value cnf1 if cnf2 && !cnf1
|
213
|
-
return handle_node_value cnf1.merge(cnf2) if cnf1 && cnf2
|
214
|
-
end
|
215
|
-
|
216
|
-
def merge(node)
|
217
|
-
return
|
218
|
-
return
|
219
|
-
end
|
220
|
-
|
221
|
-
def each
|
222
|
-
compiled_hash.each do |k,v|
|
223
|
-
yield k,handle_node_value(v)
|
224
|
-
end
|
225
|
-
end
|
226
|
-
|
227
|
-
# Getters and Setters
|
228
|
-
# -------------------
|
229
|
-
|
230
|
-
def paths=(new_paths)
|
231
|
-
raise "paths must be a hash" unless new_hash.class.name == 'Hash'
|
232
|
-
@paths = new_paths
|
233
|
-
end
|
234
|
-
|
235
|
-
def paths
|
236
|
-
@paths
|
237
|
-
end
|
238
|
-
|
239
|
-
def config_hashes
|
240
|
-
@config_hashes
|
241
|
-
end
|
242
|
-
|
243
|
-
def compiled_hash
|
244
|
-
compile_configuration if @compiled_hash.empty?
|
245
|
-
@compiled_hash
|
246
|
-
end
|
247
|
-
|
248
|
-
def compiled_hash=(new_hash)
|
249
|
-
raise "compiled hash must be a hash" unless new_hash.class.name == 'Hash'
|
250
|
-
@compiled_hash = new_hash
|
251
|
-
end
|
252
|
-
|
253
|
-
def set_compiled_hash(new_hash)
|
254
|
-
raise "compiled hash must be a hash" unless new_hash.class.name == 'Hash'
|
255
|
-
@compiled_hash = new_hash
|
256
|
-
self
|
257
|
-
end
|
258
|
-
|
259
|
-
# Method Aliases
|
260
|
-
# --------------
|
261
|
-
|
262
|
-
def a2h(arr)
|
263
|
-
array2hash arr
|
264
|
-
end
|
265
|
-
|
266
|
-
def to_hash
|
267
|
-
compiled_hash
|
268
|
-
end
|
269
|
-
|
270
|
-
def add(new_path)
|
271
|
-
add_path new_path
|
272
|
-
end
|
273
|
-
|
274
|
-
def remove(path_to_remove)
|
275
|
-
remove_path path_to_remove
|
276
|
-
end
|
277
|
-
|
278
|
-
end
|
279
|
-
|
1
|
+
require 'yaml'
|
2
|
+
|
3
|
+
module Bonethug
|
4
|
+
|
5
|
+
class Conf
|
6
|
+
|
7
|
+
@@default_paths = {
|
8
|
+
File.expand_path('./config/cnf.yml') => nil,
|
9
|
+
File.expand_path('./config/database.yml') => {root: 'dbs.default'}
|
10
|
+
}
|
11
|
+
@@fallbacks = {
|
12
|
+
'name' => 'database',
|
13
|
+
'user' => 'username',
|
14
|
+
'pass' => 'password'
|
15
|
+
}
|
16
|
+
|
17
|
+
def initialize(new_hash = nil, options = {})
|
18
|
+
raise "New hash must be of type Hash" if new_hash && new_hash.class.name != 'Hash'
|
19
|
+
@options = {use_fallbacks: true}.merge options
|
20
|
+
@loaded_paths = []
|
21
|
+
@paths = {}
|
22
|
+
@config_hashes = {}
|
23
|
+
@compiled_hash = new_hash ? new_hash : {}
|
24
|
+
end
|
25
|
+
|
26
|
+
def add_path(new_path)
|
27
|
+
if new_path.class.name == 'Hash'
|
28
|
+
path_hash = new_path
|
29
|
+
elsif new_path.class.name == 'String'
|
30
|
+
path_hash = {new_path => nil}
|
31
|
+
else
|
32
|
+
raise "add_path only accepts stings or hashes"
|
33
|
+
end
|
34
|
+
@paths = @paths.merge path_hash
|
35
|
+
self
|
36
|
+
end
|
37
|
+
|
38
|
+
def remove_path(path_to_remove)
|
39
|
+
# deletes an element from a hash if its key can be found
|
40
|
+
@paths.delete path_to_remove
|
41
|
+
self
|
42
|
+
end
|
43
|
+
|
44
|
+
def compile_configuration
|
45
|
+
|
46
|
+
# load the defaults if we haven't loaded anything
|
47
|
+
use_defaults if @paths.empty?
|
48
|
+
|
49
|
+
# generate output
|
50
|
+
out = {}
|
51
|
+
@paths.each do |path,options|
|
52
|
+
|
53
|
+
# load the file if we haven't already
|
54
|
+
load_path path unless @loaded_paths.include? path
|
55
|
+
|
56
|
+
# create a base fragment
|
57
|
+
fragment_base = {}
|
58
|
+
|
59
|
+
# create the other nodes
|
60
|
+
if options and options.has_key? :root
|
61
|
+
fragment = fragment_base
|
62
|
+
nodes = options[:root].split '.'
|
63
|
+
nodes.each_with_index do |node,i|
|
64
|
+
fragment[node] = i == nodes.length-1 ? @config_hashes[path] : {}
|
65
|
+
fragment = fragment[node]
|
66
|
+
end
|
67
|
+
else
|
68
|
+
fragment_base = @config_hashes[path]
|
69
|
+
end
|
70
|
+
|
71
|
+
# output
|
72
|
+
out = out.merge fragment_base
|
73
|
+
|
74
|
+
end
|
75
|
+
@compiled_hash = out
|
76
|
+
self
|
77
|
+
|
78
|
+
end
|
79
|
+
|
80
|
+
def all_paths_loaded?
|
81
|
+
@paths.each do |path,options|
|
82
|
+
return false unless @loaded_paths.include? path
|
83
|
+
end
|
84
|
+
true
|
85
|
+
end
|
86
|
+
|
87
|
+
def path_ok?(path)
|
88
|
+
path && path.class.name == 'String' and File.exist?(path) and File.file?(path)
|
89
|
+
end
|
90
|
+
|
91
|
+
def check_path!(path)
|
92
|
+
raise 'config file "' + path.to_s + '" does not exist' unless path_ok? path
|
93
|
+
end
|
94
|
+
|
95
|
+
def check_paths!
|
96
|
+
raise 'No config files have not been set' if @paths.empty?
|
97
|
+
@paths.each do |path,options|
|
98
|
+
check_path! path
|
99
|
+
end
|
100
|
+
self
|
101
|
+
end
|
102
|
+
|
103
|
+
def check_paths
|
104
|
+
@paths.each do |path,options|
|
105
|
+
@paths.delete path unless path_ok? path
|
106
|
+
end
|
107
|
+
self
|
108
|
+
end
|
109
|
+
|
110
|
+
def load_paths
|
111
|
+
@paths.each do |path,options|
|
112
|
+
load_path path
|
113
|
+
end
|
114
|
+
self
|
115
|
+
end
|
116
|
+
|
117
|
+
def load_path(path)
|
118
|
+
load_path? path
|
119
|
+
self
|
120
|
+
end
|
121
|
+
|
122
|
+
def load_path?(path)
|
123
|
+
return false unless path_ok? path
|
124
|
+
@loaded_paths.push path
|
125
|
+
@config_hashes[path] = YAML.load_file path
|
126
|
+
self
|
127
|
+
end
|
128
|
+
|
129
|
+
def use_defaults
|
130
|
+
@paths = @@default_paths if @paths.empty?
|
131
|
+
load_paths
|
132
|
+
self
|
133
|
+
end
|
134
|
+
|
135
|
+
def get(node = nil, force_type = nil)
|
136
|
+
node_val = node ? get_compiled_hash_node_handle(node) : self
|
137
|
+
case force_type
|
138
|
+
when 'Array'
|
139
|
+
return [] unless node_val
|
140
|
+
return [node_val] if node_val.class.name == 'String'
|
141
|
+
return node_val.class.name == 'Array' ? node_val.clone : node_val.to_a
|
142
|
+
when 'Hash'
|
143
|
+
return {} unless node_val
|
144
|
+
if node_val.class.name == 'Array'
|
145
|
+
return array2hash node_val
|
146
|
+
elsif node_val.class.name == 'Hash'
|
147
|
+
return node_val.clone
|
148
|
+
else
|
149
|
+
return node_val.to_hash
|
150
|
+
end
|
151
|
+
else
|
152
|
+
return handle_node_value node_val
|
153
|
+
end
|
154
|
+
end
|
155
|
+
|
156
|
+
def has_key?(key)
|
157
|
+
compiled_hash.has_key? key
|
158
|
+
end
|
159
|
+
|
160
|
+
def get_compiled_hash_node_handle(node = nil)
|
161
|
+
if node
|
162
|
+
nodes = node.split('.')
|
163
|
+
current = compiled_hash
|
164
|
+
nodes.each do |node|
|
165
|
+
node = @@fallbacks[node] if @options[:use_fallbacks] and !current[node] and @@fallbacks[node]
|
166
|
+
current = (current.class.name == 'Hash' or current.class.name == 'Array') ? current[node] : nil
|
167
|
+
end
|
168
|
+
return current
|
169
|
+
else
|
170
|
+
return self.compiled_hash
|
171
|
+
end
|
172
|
+
end
|
173
|
+
|
174
|
+
def handle_node_value(node)
|
175
|
+
return node if node.class.name == 'Conf'
|
176
|
+
node = array2hash node if node.class.name == 'Array'
|
177
|
+
return node.class.name == 'Hash' ? self.clone.set_compiled_hash(node) : node
|
178
|
+
end
|
179
|
+
|
180
|
+
def array2hash(arr)
|
181
|
+
return arr if arr.class.name == 'Hash'
|
182
|
+
hsh = {}
|
183
|
+
arr.each_with_index do |item,i|
|
184
|
+
hsh[i] = item
|
185
|
+
end
|
186
|
+
hsh
|
187
|
+
end
|
188
|
+
|
189
|
+
def get_hash(node = nil)
|
190
|
+
get(node).compiled_hash
|
191
|
+
end
|
192
|
+
|
193
|
+
def to_hash
|
194
|
+
compiled_hash.clone
|
195
|
+
end
|
196
|
+
|
197
|
+
def to_a
|
198
|
+
to_hash.to_a
|
199
|
+
end
|
200
|
+
|
201
|
+
def node_merge!(node1,node2)
|
202
|
+
cnf1 = get_compiled_hash_node_handle node1
|
203
|
+
cnf2 = get_compiled_hash_node_handle node2
|
204
|
+
cnf1.merge!(cnf2) if cnf1 && cnf2
|
205
|
+
return self
|
206
|
+
end
|
207
|
+
|
208
|
+
def node_merge(node1,node2)
|
209
|
+
cnf1 = get_compiled_hash_node_handle node1
|
210
|
+
cnf2 = get_compiled_hash_node_handle node2
|
211
|
+
return handle_node_value cnf1 if cnf1 && !cnf2
|
212
|
+
return handle_node_value cnf1 if cnf2 && !cnf1
|
213
|
+
return handle_node_value cnf1.merge(cnf2) if cnf1 && cnf2
|
214
|
+
end
|
215
|
+
|
216
|
+
def merge(node)
|
217
|
+
return handle_node_value compiled_hash.merge(node.to_hash) if node
|
218
|
+
return self
|
219
|
+
end
|
220
|
+
|
221
|
+
def each
|
222
|
+
compiled_hash.each do |k,v|
|
223
|
+
yield k,handle_node_value(v)
|
224
|
+
end
|
225
|
+
end
|
226
|
+
|
227
|
+
# Getters and Setters
|
228
|
+
# -------------------
|
229
|
+
|
230
|
+
def paths=(new_paths)
|
231
|
+
raise "paths must be a hash" unless new_hash.class.name == 'Hash'
|
232
|
+
@paths = new_paths
|
233
|
+
end
|
234
|
+
|
235
|
+
def paths
|
236
|
+
@paths
|
237
|
+
end
|
238
|
+
|
239
|
+
def config_hashes
|
240
|
+
@config_hashes
|
241
|
+
end
|
242
|
+
|
243
|
+
def compiled_hash
|
244
|
+
compile_configuration if @compiled_hash.empty?
|
245
|
+
@compiled_hash
|
246
|
+
end
|
247
|
+
|
248
|
+
def compiled_hash=(new_hash)
|
249
|
+
raise "compiled hash must be a hash" unless new_hash.class.name == 'Hash'
|
250
|
+
@compiled_hash = new_hash
|
251
|
+
end
|
252
|
+
|
253
|
+
def set_compiled_hash(new_hash)
|
254
|
+
raise "compiled hash must be a hash" unless new_hash.class.name == 'Hash'
|
255
|
+
@compiled_hash = new_hash
|
256
|
+
self
|
257
|
+
end
|
258
|
+
|
259
|
+
# Method Aliases
|
260
|
+
# --------------
|
261
|
+
|
262
|
+
def a2h(arr)
|
263
|
+
array2hash arr
|
264
|
+
end
|
265
|
+
|
266
|
+
def to_hash
|
267
|
+
compiled_hash
|
268
|
+
end
|
269
|
+
|
270
|
+
def add(new_path)
|
271
|
+
add_path new_path
|
272
|
+
end
|
273
|
+
|
274
|
+
def remove(path_to_remove)
|
275
|
+
remove_path path_to_remove
|
276
|
+
end
|
277
|
+
|
278
|
+
end
|
279
|
+
|
280
280
|
end
|
data/lib/bonethug/installer.rb
CHANGED
@@ -24,7 +24,7 @@ module Bonethug
|
|
24
24
|
@@bonthug_gem_dir = File.expand_path(File.dirname(__FILE__) + '/../..')
|
25
25
|
@@skel_dir = @@bonthug_gem_dir + '/skel'
|
26
26
|
@@conf = Conf.new.add(@@skel_dir + '/skel.yml')
|
27
|
-
@@project_config_files = {editable: ['cnf.yml','schedule.rb'], generated: ['backup.rb','deploy.rb']}
|
27
|
+
@@project_config_files = {editable: ['cnf.yml','schedule.rb'], generated: ['syncer.rb','backup.rb','deploy.rb']}
|
28
28
|
|
29
29
|
def self.install(type, target = '.')
|
30
30
|
|
@@ -38,7 +38,7 @@ module Bonethug
|
|
38
38
|
|
39
39
|
# load the configuration
|
40
40
|
unless @@conf.get('project_types').has_key? type.to_s
|
41
|
-
puts "Unsupported type: " + type.to_s
|
41
|
+
puts "Unsupported type: " + type.to_s
|
42
42
|
exit
|
43
43
|
end
|
44
44
|
conf = @@conf.node_merge 'base', 'project_types.' + type
|
@@ -59,7 +59,7 @@ module Bonethug
|
|
59
59
|
|
60
60
|
# build the file set
|
61
61
|
puts 'Building ' + type + ' skeleton...'
|
62
|
-
FileUtils.cp_r @@skel_dir + '/base/.', tmp_dir
|
62
|
+
FileUtils.cp_r @@skel_dir + '/base/.', tmp_dir
|
63
63
|
FileUtils.cp_r @@skel_dir + '/project_types/' + type + '/.', tmp_dir
|
64
64
|
|
65
65
|
# build the manifest
|
@@ -68,8 +68,8 @@ module Bonethug
|
|
68
68
|
|
69
69
|
# modify the manifest root
|
70
70
|
manifest_path = tmp_dir + '/.bonethug/manifest'
|
71
|
-
File.open(manifest_path,'w') do |file|
|
72
|
-
file.puts File.read(manifest_path).gsub(/\.bonethug-tmp/,'')
|
71
|
+
File.open(manifest_path,'w') do |file|
|
72
|
+
file.puts File.read(manifest_path).gsub(/\.bonethug-tmp/,'')
|
73
73
|
end
|
74
74
|
|
75
75
|
# clean up the target dir
|
@@ -86,7 +86,7 @@ module Bonethug
|
|
86
86
|
|
87
87
|
# clean up any exisitng install tmp files
|
88
88
|
puts 'Cleaning up temporary files...'
|
89
|
-
FileUtils.rm_rf tmp_dir
|
89
|
+
FileUtils.rm_rf tmp_dir
|
90
90
|
|
91
91
|
puts "Installation Complete"
|
92
92
|
|
@@ -122,7 +122,7 @@ module Bonethug
|
|
122
122
|
puts 'Removal of the following' + failed.count.to_s + ' files failed'
|
123
123
|
puts failed.join("\n")
|
124
124
|
|
125
|
-
end
|
125
|
+
end
|
126
126
|
|
127
127
|
else
|
128
128
|
puts 'Nothing to do'
|
@@ -158,25 +158,25 @@ module Bonethug
|
|
158
158
|
puts "creating: " + db.get('name')
|
159
159
|
system Bonethug::Installer.init_mysql_db_script(db, path, admin_user)
|
160
160
|
|
161
|
-
end
|
161
|
+
end
|
162
162
|
|
163
|
-
end
|
163
|
+
end
|
164
164
|
|
165
165
|
|
166
166
|
# Reads system setup scripts
|
167
167
|
# --------------------------
|
168
|
-
|
168
|
+
|
169
169
|
def self.get_setup_script
|
170
170
|
@@bonthug_gem_dir + '/scripts/ubuntu_setup.sh'
|
171
171
|
end
|
172
172
|
|
173
173
|
def self.get_setup_script_content
|
174
174
|
File.read self.get_setup_script
|
175
|
-
end
|
175
|
+
end
|
176
176
|
|
177
177
|
def self.get_setup_env_cmds
|
178
178
|
self.parse_sh self.get_setup_script_content
|
179
|
-
end
|
179
|
+
end
|
180
180
|
|
181
181
|
def self.parse_sh(content)
|
182
182
|
content.split("\n").select { |line| !(line =~ /^[\s\t]+$/ || line =~ /^[\s\t]*#/ || line.strip.length == 0) }
|
@@ -213,7 +213,7 @@ module Bonethug
|
|
213
213
|
def self.save_project_meta_data(base_dir)
|
214
214
|
|
215
215
|
meta_data = {'config_digests' => {}}
|
216
|
-
@@project_config_files[:editable].each do |file|
|
216
|
+
@@project_config_files[:editable].each do |file|
|
217
217
|
meta_data['config_digests']['example/' + file] = self.contents_md5(base_dir + '/config/example/' + file)
|
218
218
|
end
|
219
219
|
File.open(base_dir + '/.bonethug/data','w') { |file| file.puts meta_data.to_yaml }
|
@@ -236,20 +236,20 @@ module Bonethug
|
|
236
236
|
# copy cnf.yml + schedule.rb to config/example if possible
|
237
237
|
# copy backup.rb and deploy.rb to .bonethug if possible
|
238
238
|
# add bonethug to gemfile if required
|
239
|
-
# run bundle install
|
239
|
+
# run bundle install
|
240
240
|
|
241
241
|
# mode == :update
|
242
242
|
# copy cnf.yml + schedule.rb to config if possible
|
243
|
-
# force copy cnf.yml + schedule.rb to config/example
|
243
|
+
# force copy cnf.yml + schedule.rb to config/example
|
244
244
|
# force copy backup.rb and deploy.rb to .bonethug
|
245
245
|
# add bonethug to gemfile if required
|
246
|
-
# run bundle install
|
246
|
+
# run bundle install
|
247
247
|
|
248
248
|
def self.bonethugise(dir='.', mode=:init)
|
249
249
|
|
250
250
|
target = File.expand_path(dir)
|
251
|
-
|
252
|
-
# run bundle update first
|
251
|
+
|
252
|
+
# run bundle update first
|
253
253
|
system('bundle update bonethug') if mode == :update
|
254
254
|
|
255
255
|
# check for the existence of required dirs and create if required
|
@@ -275,7 +275,7 @@ module Bonethug
|
|
275
275
|
FileUtils.cp src_file, target_file unless File.exist?(target_file)
|
276
276
|
elsif mode == :update
|
277
277
|
FileUtils.cp src_file, example_file if type == :editable
|
278
|
-
FileUtils.cp src_file, target_file if type == :generated or !File.exist?(target_file)
|
278
|
+
FileUtils.cp src_file, target_file if type == :generated or !File.exist?(target_file)
|
279
279
|
else
|
280
280
|
puts "Invalid bonethugise mode"
|
281
281
|
exit
|
data/lib/bonethug/version.rb
CHANGED
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: bonethug
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.0.
|
4
|
+
version: 0.0.71
|
5
5
|
prerelease:
|
6
6
|
platform: ruby
|
7
7
|
authors:
|
@@ -505,7 +505,7 @@ required_ruby_version: !ruby/object:Gem::Requirement
|
|
505
505
|
version: '0'
|
506
506
|
segments:
|
507
507
|
- 0
|
508
|
-
hash:
|
508
|
+
hash: -3367511845365029782
|
509
509
|
required_rubygems_version: !ruby/object:Gem::Requirement
|
510
510
|
none: false
|
511
511
|
requirements:
|
@@ -514,7 +514,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
514
514
|
version: '0'
|
515
515
|
segments:
|
516
516
|
- 0
|
517
|
-
hash:
|
517
|
+
hash: -3367511845365029782
|
518
518
|
requirements: []
|
519
519
|
rubyforge_project:
|
520
520
|
rubygems_version: 1.8.23
|