sp-job 0.2.3 → 0.3.22
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/VERSION +1 -1
- data/bin/configure +40 -0
- data/lib/sp-job.rb +21 -2
- data/lib/sp/job/back_burner.rb +350 -68
- data/lib/sp/job/broker.rb +18 -16
- data/lib/sp/job/broker_http_client.rb +80 -20
- data/lib/sp/job/broker_oauth2_client.rb +12 -4
- data/lib/sp/job/common.rb +876 -62
- data/lib/sp/job/configure/configure.rb +640 -0
- data/lib/sp/job/curl_http_client.rb +100 -0
- data/lib/sp/job/easy_http_client.rb +94 -0
- data/lib/sp/job/http_client.rb +51 -0
- data/lib/sp/job/job_db_adapter.rb +38 -36
- data/lib/sp/job/jsonapi_error.rb +31 -74
- data/lib/sp/job/jwt.rb +55 -5
- data/lib/sp/job/mail_queue.rb +9 -2
- data/lib/sp/job/manticore_http_client.rb +94 -0
- data/lib/sp/job/pg_connection.rb +90 -10
- data/lib/sp/job/query_params.rb +45 -0
- data/lib/sp/job/rfc822.rb +13 -0
- data/lib/sp/job/session.rb +239 -0
- data/lib/sp/job/unique_file.rb +37 -1
- data/lib/sp/job/uploaded_image_converter.rb +27 -19
- data/lib/sp/job/worker.rb +51 -1
- data/lib/sp/job/worker_thread.rb +22 -7
- data/lib/sp/jsonapi.rb +24 -0
- data/lib/sp/jsonapi/adapters/base.rb +177 -0
- data/lib/sp/jsonapi/adapters/db.rb +26 -0
- data/lib/sp/jsonapi/adapters/raw_db.rb +96 -0
- data/lib/sp/jsonapi/exceptions.rb +54 -0
- data/lib/sp/jsonapi/model/base.rb +31 -0
- data/lib/sp/jsonapi/model/concerns/attributes.rb +91 -0
- data/lib/sp/jsonapi/model/concerns/model.rb +39 -0
- data/lib/sp/jsonapi/model/concerns/persistence.rb +212 -0
- data/lib/sp/jsonapi/model/concerns/serialization.rb +57 -0
- data/lib/sp/jsonapi/parameters.rb +54 -0
- data/lib/sp/jsonapi/service.rb +96 -0
- data/lib/tasks/configure.rake +2 -496
- data/sp-job.gemspec +3 -2
- metadata +24 -2
@@ -0,0 +1,640 @@
|
|
1
|
+
#
|
2
|
+
# Copyright (c) 2011-2017 Cloudware S.A. All rights reserved.
|
3
|
+
#
|
4
|
+
# This file is part of sp-job.
|
5
|
+
#
|
6
|
+
# sp-job is free software: you can redistribute it and/or modify
|
7
|
+
# it under the terms of the GNU Affero General Public License as published by
|
8
|
+
# the Free Software Foundation, either version 3 of the License, or
|
9
|
+
# (at your option) any later version.
|
10
|
+
#
|
11
|
+
# sp-job is distributed in the hope that it will be useful,
|
12
|
+
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
13
|
+
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
14
|
+
# GNU General Public License for more details.
|
15
|
+
#
|
16
|
+
# You should have received a copy of the GNU Affero General Public License
|
17
|
+
# along with sp-job. If not, see <http://www.gnu.org/licenses/>.
|
18
|
+
#
|
19
|
+
# encoding: utf-8
|
20
|
+
#
|
21
|
+
require 'json'
|
22
|
+
require 'erb'
|
23
|
+
require 'ostruct'
|
24
|
+
require 'awesome_print'
|
25
|
+
require 'os'
|
26
|
+
require 'fileutils'
|
27
|
+
require 'tempfile'
|
28
|
+
require 'etc'
|
29
|
+
|
30
|
+
puts "Running configure poison".red
|
31
|
+
|
32
|
+
# Monkey patch for configuration deep merge
|
33
|
+
class ::Hash
|
34
|
+
|
35
|
+
def config_merge (second)
|
36
|
+
|
37
|
+
second.each do |skey, sval|
|
38
|
+
if self.has_key?(skey+'!')
|
39
|
+
self[skey] = self[skey+'!']
|
40
|
+
self.delete(skey+'!')
|
41
|
+
next
|
42
|
+
elsif skey[-1] == '!'
|
43
|
+
tkey = skey[0..-2]
|
44
|
+
if self.has_key?(tkey)
|
45
|
+
if Array === self[tkey] && Array === sval
|
46
|
+
self[tkey] = self[tkey] | sval
|
47
|
+
elsif Hash === self[tkey] && Hash === sval
|
48
|
+
self[tkey].config_merge(sval)
|
49
|
+
else
|
50
|
+
raise "Error can't merge #{skey} with different types"
|
51
|
+
end
|
52
|
+
end
|
53
|
+
end
|
54
|
+
|
55
|
+
if ! self.has_key?(skey)
|
56
|
+
self[skey] = sval
|
57
|
+
else
|
58
|
+
if Array === self[skey] && Array === sval
|
59
|
+
self[skey] = self[skey] | sval
|
60
|
+
elsif Hash === self[skey] && Hash === sval
|
61
|
+
self[skey].config_merge(sval)
|
62
|
+
end
|
63
|
+
end
|
64
|
+
end
|
65
|
+
end
|
66
|
+
|
67
|
+
def clean_keys!
|
68
|
+
tmp = Hash.new
|
69
|
+
|
70
|
+
self.each do |key, val|
|
71
|
+
if Hash === val
|
72
|
+
val.clean_keys!
|
73
|
+
end
|
74
|
+
|
75
|
+
if key[-1] == '!'
|
76
|
+
tmp[key[0..-2]] = val
|
77
|
+
self.delete(key)
|
78
|
+
end
|
79
|
+
end
|
80
|
+
|
81
|
+
self.merge! tmp
|
82
|
+
end
|
83
|
+
|
84
|
+
end # Hash monkey patch
|
85
|
+
|
86
|
+
class SpDataStruct < OpenStruct
|
87
|
+
|
88
|
+
def self.to_hash_sp (object)
|
89
|
+
hash = {}
|
90
|
+
object.each_pair do |key, value|
|
91
|
+
if value.is_a?(SpDataStruct)
|
92
|
+
hash[key] = SpDataStruct::to_hash_sp(value)
|
93
|
+
elsif value.is_a?(Array)
|
94
|
+
hash[key] = []
|
95
|
+
value.each do |member|
|
96
|
+
if member.is_a?(SpDataStruct)
|
97
|
+
hash[key] << SpDataStruct::to_hash_sp(member)
|
98
|
+
else
|
99
|
+
hash[key] << member
|
100
|
+
end
|
101
|
+
end
|
102
|
+
else
|
103
|
+
hash[key] = value
|
104
|
+
end
|
105
|
+
end
|
106
|
+
hash
|
107
|
+
end
|
108
|
+
|
109
|
+
def to_json
|
110
|
+
SpDataStruct::to_hash_sp(self).to_json
|
111
|
+
end
|
112
|
+
|
113
|
+
end
|
114
|
+
|
115
|
+
def self.safesudo(cmd)
|
116
|
+
unless true == system(cmd)
|
117
|
+
system("sudo #{cmd}")
|
118
|
+
end
|
119
|
+
end
|
120
|
+
|
121
|
+
def self.create_directory (path)
|
122
|
+
|
123
|
+
if ! Dir.exist?(path)
|
124
|
+
if OS.mac?
|
125
|
+
if path.match("^/usr/local/")
|
126
|
+
info = Etc.getpwnam(Etc.getlogin)
|
127
|
+
puts "\t* Creating '#{path}'...".yellow
|
128
|
+
safesudo("mkdir -p #{path}")
|
129
|
+
if 0 != $?.exitstatus
|
130
|
+
puts "\t* Failed to create #{path}".red
|
131
|
+
end
|
132
|
+
next_parent_path = File.join("/usr/local", path.split(File::SEPARATOR).map {|x| x=="" ? File::SEPARATOR : x}[1..-1][2])
|
133
|
+
if ! next_parent_path
|
134
|
+
throw "Unable to create path #{path} - parent not found!"
|
135
|
+
end
|
136
|
+
safesudo("chown -R #{info.name}:#{Etc.getgrgid(info.gid).name} #{next_parent_path}")
|
137
|
+
if 0 != $?.exitstatus
|
138
|
+
puts "\t* Failed to change ownership to #{path}".red
|
139
|
+
end
|
140
|
+
else
|
141
|
+
safesudo("mkdir -p #{path}")
|
142
|
+
if 0 != $?.exitstatus
|
143
|
+
puts "\t* Failed to create #{path}".red
|
144
|
+
end
|
145
|
+
end
|
146
|
+
else
|
147
|
+
if path.match("^/home/")
|
148
|
+
safesudo("mkdir -p #{path}")
|
149
|
+
else
|
150
|
+
safesudo("mkdir -p #{path}")
|
151
|
+
end
|
152
|
+
if 0 != $?.exitstatus
|
153
|
+
puts "\t* Failed to create #{path}".red
|
154
|
+
end
|
155
|
+
end
|
156
|
+
if ! OS.mac? && !path.match("^/home/")
|
157
|
+
safesudo("chown #{$user}:#{$group} #{path}")
|
158
|
+
else
|
159
|
+
safesudo("chown #{$user}:#{$group} #{path}")
|
160
|
+
end
|
161
|
+
if 0 != $?.exitstatus
|
162
|
+
puts "\t* Failed to change ownership to #{path}".red
|
163
|
+
end
|
164
|
+
if ! OS.mac? && !path.match("^/home/")
|
165
|
+
safesudo("chmod 755 #{path}")
|
166
|
+
else
|
167
|
+
safesudo("chmod 755 #{path}")
|
168
|
+
end
|
169
|
+
if 0 != $?.exitstatus
|
170
|
+
puts "\t* Failed to change permissions to #{path}".red
|
171
|
+
end
|
172
|
+
end
|
173
|
+
|
174
|
+
end
|
175
|
+
|
176
|
+
def self.diff_and_write (contents:, path:, diff: true, dry_run: false)
|
177
|
+
|
178
|
+
if contents.length == 0
|
179
|
+
puts "\t* contents for #{path} is empty, ignored, we don't write empty files".green
|
180
|
+
return
|
181
|
+
end
|
182
|
+
|
183
|
+
if OS.mac?
|
184
|
+
create_directory File.dirname path
|
185
|
+
end
|
186
|
+
|
187
|
+
if ! File.exists?(path)
|
188
|
+
if contents.length == 0
|
189
|
+
puts "\t* #{path} does not exist and it's empty, ignored".green
|
190
|
+
return
|
191
|
+
else
|
192
|
+
safesudo("touch #{path}")
|
193
|
+
end
|
194
|
+
end
|
195
|
+
|
196
|
+
if true == diff
|
197
|
+
tmp_file = Tempfile.new File.basename path
|
198
|
+
FileUtils::mkdir_p File.dirname tmp_file
|
199
|
+
File.write(tmp_file,contents)
|
200
|
+
diff_contents = %x[diff -u #{path} #{tmp_file.path} 2>/dev/null]
|
201
|
+
if 0 == $?.exitstatus
|
202
|
+
puts "\t* #{path} not changed".green
|
203
|
+
return
|
204
|
+
end
|
205
|
+
if File.exists?(path)
|
206
|
+
puts "\t* #{path} changed:".red
|
207
|
+
puts diff_contents
|
208
|
+
else
|
209
|
+
puts "\t* #{path} does not exist. Will be created".blue
|
210
|
+
end
|
211
|
+
|
212
|
+
end
|
213
|
+
puts "\t* Writing #{path}".green
|
214
|
+
unless dry_run
|
215
|
+
if OS.mac? || File.writable?(path) || path.match("^/home/")
|
216
|
+
File.write(path, contents)
|
217
|
+
else
|
218
|
+
safesudo("chown #{$user}:#{$group} #{path}")
|
219
|
+
File.write(path, contents)
|
220
|
+
safesudo("chown root:root #{path}")
|
221
|
+
end
|
222
|
+
end
|
223
|
+
FileUtils.rm(tmp_file)
|
224
|
+
end
|
225
|
+
|
226
|
+
def self.pg_conn_string (db)
|
227
|
+
"host=#{db.host} port=#{db.port} dbname=#{db.dbname} user=#{db.user}#{db.password != nil && db.password.size != 0 ? ' password='+ db.password : '' }"
|
228
|
+
end
|
229
|
+
|
230
|
+
def self.expand_template (template, pretty_json: false)
|
231
|
+
begin
|
232
|
+
contents = ERB.new(File.read(template), nil, '-').result()
|
233
|
+
if pretty_json
|
234
|
+
JSON.pretty_generate(JSON.parse(contents))
|
235
|
+
else
|
236
|
+
contents
|
237
|
+
end
|
238
|
+
rescue Exception => e
|
239
|
+
puts "Expansion of #{template} failed".yellow
|
240
|
+
puts e.message.red
|
241
|
+
exit
|
242
|
+
end
|
243
|
+
end
|
244
|
+
|
245
|
+
def self.get_config (args)
|
246
|
+
hostname = %x[hostname -s].strip
|
247
|
+
@project = Dir.pwd
|
248
|
+
@user_home = File.expand_path('~')
|
249
|
+
|
250
|
+
#
|
251
|
+
# Pick file named 'hostname', or use 'developer' as basefile
|
252
|
+
#
|
253
|
+
if File.exists?("#{@project}/configure/#{hostname}.yml")
|
254
|
+
conf = YAML.load_file("#{@project}/configure/#{hostname}.yml")
|
255
|
+
conf['file_name'] = hostname
|
256
|
+
else
|
257
|
+
conf = YAML.load_file("#{@project}/configure/developer.yml")
|
258
|
+
conf['file_name'] = 'developer'
|
259
|
+
end
|
260
|
+
|
261
|
+
#
|
262
|
+
# Follow configuration dependencies and merge the configurations
|
263
|
+
#
|
264
|
+
configs = [ conf ]
|
265
|
+
loop do
|
266
|
+
break if conf['extends'].nil?
|
267
|
+
ancestor = conf['extends']
|
268
|
+
conf = YAML.load_file("#{@project}/configure/#{ancestor}.yml")
|
269
|
+
conf['file_name'] = ancestor || 'developer'
|
270
|
+
configs << conf
|
271
|
+
end
|
272
|
+
|
273
|
+
(configs.size - 2).downto(0).each do |i|
|
274
|
+
puts "Step #{i}: merging '#{configs[i]['file_name']}' with '#{configs[i+1]['file_name']}'"
|
275
|
+
configs[i].config_merge(configs[i+1])
|
276
|
+
end
|
277
|
+
|
278
|
+
conf = configs[0]
|
279
|
+
|
280
|
+
#
|
281
|
+
# Allow overide of project directory
|
282
|
+
#
|
283
|
+
conf['paths']['project'] ||= @project
|
284
|
+
|
285
|
+
#
|
286
|
+
# Resolve user and group if needed
|
287
|
+
#
|
288
|
+
if conf['user'].nil?
|
289
|
+
conf['user'] = %x[id -u -nr].strip
|
290
|
+
end
|
291
|
+
if conf['group'].nil?
|
292
|
+
conf['group'] = %x[id -g -nr].strip
|
293
|
+
end
|
294
|
+
|
295
|
+
#
|
296
|
+
# Pre-cook the connection string # TODO remove this after Hydra goes live
|
297
|
+
#
|
298
|
+
if conf['db']
|
299
|
+
dbname = conf['db']['dbname']
|
300
|
+
dbuser = conf['db']['user']
|
301
|
+
dbhost = conf['db']['host']
|
302
|
+
dbport = conf['db']['port'] || 5432
|
303
|
+
dbpass = conf['db']['password'] || ''
|
304
|
+
conf['db']['connection_string'] = "host=#{dbhost} port=#{dbport} dbname=#{dbname} user=#{dbuser}#{dbpass.size != 0 ? ' password='+ dbpass : '' }"
|
305
|
+
end
|
306
|
+
|
307
|
+
#
|
308
|
+
# Resolve project and user relative paths
|
309
|
+
#
|
310
|
+
conf['paths'].each do |name, path|
|
311
|
+
if path.start_with? '$project'
|
312
|
+
conf['paths'][name] = path.sub('$project', conf['paths']['project'] || @project)
|
313
|
+
elsif path.start_with? '$user_home'
|
314
|
+
conf['paths'][name] = path.sub('$user_home', @user_home)
|
315
|
+
end
|
316
|
+
end
|
317
|
+
|
318
|
+
#
|
319
|
+
# Read optional brand information
|
320
|
+
#
|
321
|
+
if conf['product']
|
322
|
+
brand_file = "#{@project}/configure/products/#{conf['product']}/brands.yml"
|
323
|
+
if File.exists?(brand_file)
|
324
|
+
brands = YAML.load_file(brand_file)
|
325
|
+
end
|
326
|
+
conf['brands'] = brands['brands']
|
327
|
+
end
|
328
|
+
|
329
|
+
conf.clean_keys!
|
330
|
+
|
331
|
+
if args[:print_config]
|
332
|
+
puts conf.to_yaml(:Indent => 4).white
|
333
|
+
end
|
334
|
+
return JSON.parse(conf.to_json, object_class: SpDataStruct), conf
|
335
|
+
end
|
336
|
+
|
337
|
+
def self.run_configure (args)
|
338
|
+
|
339
|
+
if args[:action] == 'overwrite'
|
340
|
+
dry_run = false
|
341
|
+
action = 'overwrite'
|
342
|
+
elsif args[:action] == 'hotfix'
|
343
|
+
dry_run = false
|
344
|
+
action = 'hotfix'
|
345
|
+
else
|
346
|
+
dry_run = true
|
347
|
+
action = 'dry-run'
|
348
|
+
end
|
349
|
+
|
350
|
+
#
|
351
|
+
# Read the configuration into ostruct @config will be accessible to the ERB templates
|
352
|
+
#
|
353
|
+
@config, conf = get_config(args)
|
354
|
+
|
355
|
+
#
|
356
|
+
# Resolve project and user again to create the relative paths
|
357
|
+
#
|
358
|
+
conf['paths'].each do |name, path|
|
359
|
+
if path.start_with? '$project'
|
360
|
+
conf['paths'][name] = path.sub('$project', conf['paths']['project'] || @project)
|
361
|
+
FileUtils.mkdir_p conf['paths'][name]
|
362
|
+
elsif path.start_with? '$user_home'
|
363
|
+
conf['paths'][name] = path.sub('$user_home', @user_home)
|
364
|
+
FileUtils.mkdir_p conf['paths'][name]
|
365
|
+
end
|
366
|
+
end
|
367
|
+
|
368
|
+
|
369
|
+
# Set helper variables on the task context
|
370
|
+
$user = @config.user
|
371
|
+
$group = @config.group
|
372
|
+
@project = Dir.pwd
|
373
|
+
@user_home = File.expand_path('~')
|
374
|
+
diff_before_copy = true
|
375
|
+
|
376
|
+
#
|
377
|
+
# Create required paths
|
378
|
+
#
|
379
|
+
if @config.nginx_broker && @config.nginx_broker.nginx && @config.nginx_broker.nginx.paths
|
380
|
+
@config.nginx_broker.nginx.paths.each do |path|
|
381
|
+
if @config.nginx_broker.nginx.suffix
|
382
|
+
path = path.sub('nginx-broker', "nginx-broker#{@config.nginx_broker.nginx.suffix}")
|
383
|
+
end
|
384
|
+
create_directory "#{@config.prefix}#{path}"
|
385
|
+
end
|
386
|
+
end
|
387
|
+
if @config.nginx_epaper && @config.nginx_epaper.nginx && @config.nginx_epaper.nginx.paths
|
388
|
+
@config.nginx_epaper.nginx.paths.each do |path|
|
389
|
+
if @config.nginx_epaper.nginx.suffix
|
390
|
+
path = path.sub('nginx-epaper', "nginx-epaper#{@config.nginx_epaper.nginx.suffix}")
|
391
|
+
end
|
392
|
+
create_directory "#{@config.prefix}#{path}"
|
393
|
+
end
|
394
|
+
end
|
395
|
+
if OS.mac? && @config.jobs
|
396
|
+
@config.jobs.each do |job|
|
397
|
+
if job.paths
|
398
|
+
job.paths.each do |path|
|
399
|
+
puts "Creating directory #{@config.prefix}#{path}"
|
400
|
+
create_directory "#{@config.prefix}#{path}"
|
401
|
+
end
|
402
|
+
end
|
403
|
+
end
|
404
|
+
end
|
405
|
+
|
406
|
+
#
|
407
|
+
# Copy /usr/share/ files to suffix directory
|
408
|
+
#
|
409
|
+
OS.mac? ? local_dir = '/local' : local_dir = ''
|
410
|
+
if @config.nginx_broker && @config.nginx_broker.nginx.suffix
|
411
|
+
create_directory("/usr#{local_dir}/share/nginx-broker#{@config.nginx_broker.nginx.suffix}")
|
412
|
+
safesudo("cp /usr#{local_dir}/share/nginx-broker/i18.json /usr#{local_dir}/share/nginx-broker#{@config.nginx_broker.nginx.suffix}/")
|
413
|
+
end
|
414
|
+
|
415
|
+
if @config.nginx_epaper && @config.nginx_epaper.nginx.suffix
|
416
|
+
create_directory("/usr#{local_dir}/share/nginx-epaper#{@config.nginx_epaper.nginx.suffix}/fonts/ttf/dejavu")
|
417
|
+
safesudo("cp -v -f /usr#{local_dir}/share/nginx-epaper/fonts/ttf/dejavu/* /usr#{local_dir}/share/nginx-epaper#{@config.nginx_epaper.nginx.suffix}/fonts/ttf/dejavu")
|
418
|
+
end
|
419
|
+
|
420
|
+
#
|
421
|
+
# Configure system, projects and user files
|
422
|
+
#
|
423
|
+
hostname = %x[hostname -s].strip
|
424
|
+
locations = {}
|
425
|
+
used_locations = []
|
426
|
+
if action == 'dry-run' || action == 'overwrite'
|
427
|
+
paths = { 'system' => @config.prefix, 'project' => @project, 'user' => @user_home}
|
428
|
+
else
|
429
|
+
paths = { 'project' => @project }
|
430
|
+
end
|
431
|
+
paths.each do |src, dest|
|
432
|
+
puts "Configuring #{src.upcase}"
|
433
|
+
|
434
|
+
# List all .erb files in hidden and visible folders
|
435
|
+
erblist = Dir.glob("#{@project}/configure/#{src}/.**/*.erb") +
|
436
|
+
Dir.glob("#{@project}/configure/#{src}/**/*.erb")
|
437
|
+
|
438
|
+
erblist.each do |template|
|
439
|
+
dst_file = template.sub("#{@project}/configure/#{src}", "#{dest}").sub(/\.erb$/, '')
|
440
|
+
|
441
|
+
# do not configure motd
|
442
|
+
if dst_file == '/etc/motd'
|
443
|
+
if OS.mac? || ! @config.motd || ! @config.motd[hostname.to_sym]
|
444
|
+
next
|
445
|
+
end
|
446
|
+
end
|
447
|
+
|
448
|
+
# developer exception
|
449
|
+
if dst_file.include?('nb-xattr') && @config.nginx_broker && @config.nginx_broker.nginx.suffix
|
450
|
+
dst_file = dst_file.sub('nb-xattr', "nb-xattr#{@config.nginx_broker.nginx.suffix}")
|
451
|
+
end
|
452
|
+
if dst_file.include?('nginx-broker') && @config.nginx_broker && @config.nginx_broker.nginx.suffix
|
453
|
+
dst_file = dst_file.sub('nginx-broker', "nginx-broker#{@config.nginx_broker.nginx.suffix}")
|
454
|
+
end
|
455
|
+
if dst_file.include?('nginx-epaper') && @config.nginx_epaper && @config.nginx_epaper.nginx.suffix
|
456
|
+
dst_file = dst_file.sub('nginx-epaper', "nginx-epaper#{@config.nginx_epaper.nginx.suffix}")
|
457
|
+
end
|
458
|
+
|
459
|
+
# Nginx Locations must be filtered, only handle locations that are used
|
460
|
+
m = /.*\.location$/.match(dst_file)
|
461
|
+
if m
|
462
|
+
locations[dst_file] = template
|
463
|
+
next
|
464
|
+
end
|
465
|
+
|
466
|
+
# Filter nginx vhosts that do not have and entry, only install the vhosts that have an entry in nginx-xxxxx
|
467
|
+
m = /.*(nginx-broker|nginx-epaper)[^\/]*?\/conf.d\/(.*)\.conf$/.match(dst_file)
|
468
|
+
if m && m.size == 3
|
469
|
+
key_l1 = m[1].gsub('-', '_')
|
470
|
+
if conf[key_l1].nil? or !conf[key_l1].key?(m[2])
|
471
|
+
puts "Filtered #{m[1]} - #{m[2]} - #{dst_file}"
|
472
|
+
next
|
473
|
+
end
|
474
|
+
end
|
475
|
+
# do not touch config files on top folder if that nginx is not requested
|
476
|
+
m = /.*(nginx-broker|nginx-epaper)[^\/]*?\/(.*)$/.match(dst_file)
|
477
|
+
if m && m.size == 3
|
478
|
+
key_l1 = m[1].gsub('-', '_')
|
479
|
+
if conf[key_l1].nil?
|
480
|
+
puts "Filtered #{m[1]} - #{m[2]} - #{dst_file}"
|
481
|
+
next
|
482
|
+
end
|
483
|
+
end
|
484
|
+
|
485
|
+
# Keep redis conf files always readable
|
486
|
+
if !OS.mac?
|
487
|
+
m = /.*(redis)\/(.*)$/.match(dst_file)
|
488
|
+
if m
|
489
|
+
safesudo("chmod +r #{dst_file}")
|
490
|
+
end
|
491
|
+
end
|
492
|
+
|
493
|
+
# 2nd filtered
|
494
|
+
if @config.erb_exclusion_list
|
495
|
+
base_filename = File.basename(dst_file)
|
496
|
+
if @config.erb_exclusion_list.include?(base_filename)
|
497
|
+
puts "Filtered #{base_filename}".yellow
|
498
|
+
next
|
499
|
+
end
|
500
|
+
end
|
501
|
+
|
502
|
+
# Now expand the template
|
503
|
+
file_contents = expand_template(template)
|
504
|
+
|
505
|
+
m = /.*(nginx-broker|nginx-epaper)[^\/]*?\/conf.d\/(.*)\.conf$/.match(dst_file)
|
506
|
+
if m && m.size == 3
|
507
|
+
# override destination path
|
508
|
+
nginx_name = m[1].gsub('-', '_')
|
509
|
+
module_name = m[2].gsub('-', '_')
|
510
|
+
if conf[nginx_name] && conf[nginx_name]['nginx'] && conf[nginx_name]['nginx']['alt_conf_dir_per_module']
|
511
|
+
alt_conf_dir_per_module = conf[nginx_name]['nginx']['alt_conf_dir_per_module'][module_name]
|
512
|
+
if alt_conf_dir_per_module
|
513
|
+
dst_file = "#{@config.prefix}#{alt_conf_dir_per_module}/#{File.basename(dst_file)}"
|
514
|
+
end
|
515
|
+
end
|
516
|
+
|
517
|
+
|
518
|
+
# included locations
|
519
|
+
includes = file_contents.scan(/^\s*include\s+conf\.d\/(.*)\.location\;/)
|
520
|
+
includes.each do |loc|
|
521
|
+
used_locations << loc[0]
|
522
|
+
end
|
523
|
+
end
|
524
|
+
|
525
|
+
# Write text expanded configuration file
|
526
|
+
create_directory(File.dirname dst_file)
|
527
|
+
diff_and_write(contents: file_contents,
|
528
|
+
path: dst_file,
|
529
|
+
diff: diff_before_copy,
|
530
|
+
dry_run: dry_run
|
531
|
+
)
|
532
|
+
end
|
533
|
+
end
|
534
|
+
|
535
|
+
#
|
536
|
+
# configure the nginx locations that are used
|
537
|
+
#
|
538
|
+
if action == 'dry-run' || action == 'overwrite'
|
539
|
+
if used_locations.size
|
540
|
+
puts "Configuring NGINX LOCATIONS"
|
541
|
+
locations.each do |dst_file, template|
|
542
|
+
m = /.*\/(.*).location$/.match dst_file
|
543
|
+
if used_locations.include? m[1]
|
544
|
+
# Write text expanded configuration file
|
545
|
+
create_directory(File.dirname dst_file)
|
546
|
+
diff_and_write(contents: expand_template(template),
|
547
|
+
path: dst_file,
|
548
|
+
diff: diff_before_copy,
|
549
|
+
dry_run: dry_run
|
550
|
+
)
|
551
|
+
end
|
552
|
+
end
|
553
|
+
end
|
554
|
+
end
|
555
|
+
|
556
|
+
#
|
557
|
+
# Configure JOBS
|
558
|
+
#
|
559
|
+
if action == 'dry-run' || action == 'overwrite'
|
560
|
+
puts "Configuring JOBS"
|
561
|
+
@config.jobs.to_h.each do |name, job|
|
562
|
+
@job_name = name
|
563
|
+
@job_description = @job_name
|
564
|
+
@job_dir = "#{@config.paths.working_directory}/jobs/#{@job_name}"
|
565
|
+
@job_args = ''
|
566
|
+
@job_exec = @config.bundle_exec
|
567
|
+
@job_working_dir = @config.paths.working_directory
|
568
|
+
@job_environment = nil
|
569
|
+
@job_threads = nil
|
570
|
+
if job
|
571
|
+
if job.args
|
572
|
+
job.args.to_h.each do | k, v |
|
573
|
+
@job_args += "-#{k} #{v}"
|
574
|
+
end
|
575
|
+
end
|
576
|
+
if job.exec_prefix
|
577
|
+
@job_exec = job.exec_prefix
|
578
|
+
end
|
579
|
+
if job.working_directory_suffix
|
580
|
+
@job_working_dir += "/#{job.working_directory_suffix}"
|
581
|
+
end
|
582
|
+
if job.environment
|
583
|
+
@job_environment = "#{job.environment}"
|
584
|
+
end
|
585
|
+
@job_threads = job.threads
|
586
|
+
end
|
587
|
+
puts " #{name}:"
|
588
|
+
if File.exists? "#{@job_dir}/conf.json.erb"
|
589
|
+
template = "#{@job_dir}/conf.json.erb"
|
590
|
+
else
|
591
|
+
template = "#{@config.paths.working_directory}/jobs/default_conf.json.erb"
|
592
|
+
end
|
593
|
+
unless File.exists? template
|
594
|
+
throw "Missing #{template} => configuration file for #{@job_name}"
|
595
|
+
end
|
596
|
+
if OS.mac?
|
597
|
+
create_directory("/usr/local/var/lock/#{@job_name}/")
|
598
|
+
end
|
599
|
+
create_directory "#{@config.prefix}/etc/#{@job_name}"
|
600
|
+
create_directory "#{@config.prefix}/var/log/#{@job_name}"
|
601
|
+
diff_and_write(contents: expand_template(template, pretty_json: true),
|
602
|
+
path: "#{@config.prefix}/etc/#{@job_name}/conf.json",
|
603
|
+
diff: diff_before_copy,
|
604
|
+
dry_run: dry_run
|
605
|
+
)
|
606
|
+
|
607
|
+
if File.exists? "#{@job_dir}/service.erb"
|
608
|
+
template = "#{@job_dir}/service.erb"
|
609
|
+
else
|
610
|
+
template = "#{@config.paths.working_directory}/jobs/default.service.erb"
|
611
|
+
end
|
612
|
+
unless File.exists? template
|
613
|
+
throw "Missing service file for #{@job_name}"
|
614
|
+
end
|
615
|
+
|
616
|
+
diff_and_write(contents: expand_template(template),
|
617
|
+
path: "#{@config.prefix}/lib/systemd/system/#{@job_name}@.service",
|
618
|
+
diff: diff_before_copy,
|
619
|
+
dry_run: dry_run
|
620
|
+
)
|
621
|
+
|
622
|
+
# logrotate.erb?
|
623
|
+
if File.exists? "#{@job_dir}/logrorate.erb"
|
624
|
+
template = "#{@job_dir}/logrotate.erb"
|
625
|
+
else
|
626
|
+
template = "#{@config.paths.working_directory}/jobs/default.logrotate.erb"
|
627
|
+
end
|
628
|
+
if File.exists? template
|
629
|
+
diff_and_write(contents: expand_template(template),
|
630
|
+
path: "#{@config.prefix}/etc/logrotate.d/#{@job_name}",
|
631
|
+
diff: diff_before_copy,
|
632
|
+
dry_run: dry_run
|
633
|
+
)
|
634
|
+
end
|
635
|
+
|
636
|
+
end
|
637
|
+
end
|
638
|
+
|
639
|
+
|
640
|
+
end
|