hdeploy 0.1.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/.gitignore +51 -0
- data/Gemfile +5 -0
- data/README.md +2 -0
- data/bin/hdeploy +10 -0
- data/bin/hdeploy_client +39 -0
- data/hdeploy.gemspec +25 -0
- data/lib/hdeploy/apiclient.rb +38 -0
- data/lib/hdeploy/cli.rb +523 -0
- data/lib/hdeploy/client.rb +326 -0
- data/lib/hdeploy/conf.rb +56 -0
- data/lib/hdeploy/version.rb +4 -0
- data/lib/hdeploy.rb +11 -0
- metadata +141 -0
checksums.yaml
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
---
|
2
|
+
SHA1:
|
3
|
+
metadata.gz: eec00ee2ca9aaa249c5cc4119718de01ffad5609
|
4
|
+
data.tar.gz: 14adbd225cb0154c7113561afcfeebe468fdd101
|
5
|
+
SHA512:
|
6
|
+
metadata.gz: f64ebdb9712e3c1e02edd45c69d6d94dad0ff6c581ebd49fc4caea3e74ab887839161666eb17dd7f8ffd4060b0e232a57deea0dcc97ce68b1ae6809ee2f71cb3
|
7
|
+
data.tar.gz: 56cee39bec219096382fba67d79dbe4ef41bfa9837d8e3bf0bea61af684983a2d7cadb737f9a093809ce6fc1d584d833970178c042e067f901f24130a4986ddf
|
data/.gitignore
ADDED
@@ -0,0 +1,51 @@
|
|
1
|
+
*.gem
|
2
|
+
*.rbc
|
3
|
+
/.config
|
4
|
+
/coverage/
|
5
|
+
/InstalledFiles
|
6
|
+
/pkg/
|
7
|
+
/spec/reports/
|
8
|
+
/spec/examples.txt
|
9
|
+
/test/tmp/
|
10
|
+
/test/version_tmp/
|
11
|
+
/tmp/
|
12
|
+
|
13
|
+
# Used by dotenv library to load environment variables.
|
14
|
+
# .env
|
15
|
+
|
16
|
+
## Specific to RubyMotion:
|
17
|
+
.dat*
|
18
|
+
.repl_history
|
19
|
+
build/
|
20
|
+
*.bridgesupport
|
21
|
+
build-iPhoneOS/
|
22
|
+
build-iPhoneSimulator/
|
23
|
+
|
24
|
+
## Specific to RubyMotion (use of CocoaPods):
|
25
|
+
#
|
26
|
+
# We recommend against adding the Pods directory to your .gitignore. However
|
27
|
+
# you should judge for yourself, the pros and cons are mentioned at:
|
28
|
+
# https://guides.cocoapods.org/using/using-cocoapods.html#should-i-check-the-pods-directory-into-source-control
|
29
|
+
#
|
30
|
+
# vendor/Pods/
|
31
|
+
|
32
|
+
## Documentation cache and generated files:
|
33
|
+
/.yardoc/
|
34
|
+
/_yardoc/
|
35
|
+
/doc/
|
36
|
+
/rdoc/
|
37
|
+
|
38
|
+
## Environment normalization:
|
39
|
+
/.bundle/
|
40
|
+
/vendor/bundle
|
41
|
+
/lib/bundler/man/
|
42
|
+
|
43
|
+
# for a library or gem, you might want to ignore these files since the code is
|
44
|
+
# intended to run in multiple environments; otherwise, check them in:
|
45
|
+
# Gemfile.lock
|
46
|
+
# .ruby-version
|
47
|
+
# .ruby-gemset
|
48
|
+
|
49
|
+
# unless supporting rvm < 1.11.0 or doing something fancy, ignore this:
|
50
|
+
.rvmrc
|
51
|
+
|
data/Gemfile
ADDED
data/README.md
ADDED
data/bin/hdeploy
ADDED
data/bin/hdeploy_client
ADDED
@@ -0,0 +1,39 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
|
3
|
+
# FIXME: this allows to load without bundle or installing the gem
|
4
|
+
$LOAD_PATH.unshift File.expand_path(File.join(__FILE__,'../../lib'))
|
5
|
+
|
6
|
+
require 'hdeploy'
|
7
|
+
|
8
|
+
# Note: this means this can't really be done by executing
|
9
|
+
node = HDeploy::Client.new
|
10
|
+
|
11
|
+
# Fixme: slightly better error msgs etc
|
12
|
+
|
13
|
+
if ARGV.length == 0
|
14
|
+
raise "please specify a command such symlink post_distribute_run_once post_symlink_run_once"
|
15
|
+
else
|
16
|
+
if ARGV.first == 'symlink'
|
17
|
+
# we need special syntax: symlink app env
|
18
|
+
params = {}
|
19
|
+
STDIN.read.split(' ').each do |param|
|
20
|
+
k,v = param.split(':')
|
21
|
+
params[k] = v
|
22
|
+
end
|
23
|
+
|
24
|
+
client.symlink(params)
|
25
|
+
|
26
|
+
elsif %w[symlink post_distribute_run_once post_symlink_run_once].include? ARGV.first
|
27
|
+
|
28
|
+
# Decode the next ARGV stuff. This encoding k:v is the simplest I found.
|
29
|
+
params = {}
|
30
|
+
STDIN.read.split(' ').each do |param|
|
31
|
+
k,v = param.split(':')
|
32
|
+
params[k] = v
|
33
|
+
end
|
34
|
+
|
35
|
+
client.run_hook(ARGV[0],params)
|
36
|
+
else
|
37
|
+
client.send(ARGV.first)
|
38
|
+
end
|
39
|
+
end
|
data/hdeploy.gemspec
ADDED
@@ -0,0 +1,25 @@
|
|
1
|
+
$:.push File.expand_path("../lib", __FILE__)
|
2
|
+
require 'hdeploy/version'
|
3
|
+
|
4
|
+
Gem::Specification.new do |s|
|
5
|
+
s.name = "hdeploy"
|
6
|
+
s.version = HDeploy::VERSION
|
7
|
+
s.authors = ["Patrick Viet"]
|
8
|
+
s.email = ["patrick.viet@gmail.com"]
|
9
|
+
s.description = %q{HDeploy tool}
|
10
|
+
s.summary = %q{no summary}
|
11
|
+
s.homepage = "https://github.com/hdeploy/hdeploy"
|
12
|
+
|
13
|
+
s.files = `git ls-files`.split($/).select{|i| not i.start_with? 'omnibus/'}
|
14
|
+
s.executables = s.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
|
15
|
+
s.test_files = s.files.grep(%r{^(test|spec|features)/})
|
16
|
+
s.require_paths = ["lib"]
|
17
|
+
|
18
|
+
s.add_runtime_dependency 'json', '~> 1.7'
|
19
|
+
s.add_runtime_dependency 'curb', '~> 0.8'
|
20
|
+
s.add_runtime_dependency 'inifile', '~> 2.0'
|
21
|
+
s.add_runtime_dependency 'deep_clone', '~> 0.0' # For configuration
|
22
|
+
s.add_runtime_dependency 'deep_merge', '~> 1.1'
|
23
|
+
|
24
|
+
s.add_development_dependency 'pry', '~> 0'
|
25
|
+
end
|
@@ -0,0 +1,38 @@
|
|
1
|
+
require 'curb'
|
2
|
+
require 'singleton'
|
3
|
+
|
4
|
+
module HDeploy
|
5
|
+
class APIClient
|
6
|
+
include Singleton
|
7
|
+
|
8
|
+
def initialize
|
9
|
+
@conf = Conf.instance
|
10
|
+
|
11
|
+
@c = Curl::Easy.new()
|
12
|
+
@c.http_auth_types = :basic
|
13
|
+
@c.username = @conf.api['http_user']
|
14
|
+
@c.password = @conf.api['http_password']
|
15
|
+
end
|
16
|
+
|
17
|
+
def get(url)
|
18
|
+
@c.url = @conf.api['endpoint'] + url
|
19
|
+
@c.perform
|
20
|
+
raise "response code for #{url} was not 200 : #{@c.response_code}" unless @c.response_code == 200
|
21
|
+
return @c.body_str
|
22
|
+
end
|
23
|
+
|
24
|
+
def put(uri,data)
|
25
|
+
@c.url = @config.api['endpoint'] + uri
|
26
|
+
@c.http_put(data)
|
27
|
+
raise "response code for #{url} was not 200 : #{@c.response_code} - #{@c.body_str}" unless @c.response_code == 200
|
28
|
+
return @c.body_str
|
29
|
+
end
|
30
|
+
|
31
|
+
def delete(uri)
|
32
|
+
@c.url = @config.conf['global']['endpoint'] + uri
|
33
|
+
@c.http_delete
|
34
|
+
raise "response code for #{url} was not 200 : #{@c.response_code} - #{@c.body_str}" unless @c.response_code == 200
|
35
|
+
return @c.body_str
|
36
|
+
end
|
37
|
+
end
|
38
|
+
end
|
data/lib/hdeploy/cli.rb
ADDED
@@ -0,0 +1,523 @@
|
|
1
|
+
require 'hdeploy/apiclient'
|
2
|
+
require 'json'
|
3
|
+
require 'fileutils'
|
4
|
+
require 'inifile'
|
5
|
+
require 'digest'
|
6
|
+
|
7
|
+
module HDeploy
|
8
|
+
class CLI
|
9
|
+
|
10
|
+
def initialize
|
11
|
+
@config = HDeploy::Conf.instance
|
12
|
+
@client = HDeploy::APIClient.instance
|
13
|
+
@domain_name = @conf['cli']['domain_name']
|
14
|
+
@app = @conf['cli']['default_app']
|
15
|
+
@env = @conf['cli']['default_env']
|
16
|
+
@force = false
|
17
|
+
@fakebuild = false
|
18
|
+
|
19
|
+
|
20
|
+
@conf.each do |k|
|
21
|
+
next unless k[0..3] == 'app:'
|
22
|
+
@conf[k].each do |k2,v|
|
23
|
+
@conf[k][k2] = File.expand_path(v) if k2 =~ /\_path$/
|
24
|
+
end
|
25
|
+
end
|
26
|
+
end
|
27
|
+
|
28
|
+
def run!
|
29
|
+
#begin
|
30
|
+
cmds = []
|
31
|
+
ARGV.each do |arg|
|
32
|
+
cmd = arg.split(':',2)
|
33
|
+
if cmd[0][0] == '_'
|
34
|
+
raise "you cant use cmd starting with a _"
|
35
|
+
end
|
36
|
+
|
37
|
+
unless respond_to?(cmd[0])
|
38
|
+
raise "no such command '#{cmd[0]}' in #{self.class} (#{__FILE__})"
|
39
|
+
end
|
40
|
+
cmds << cmd
|
41
|
+
end
|
42
|
+
|
43
|
+
cmds.each do |cmd|
|
44
|
+
m = method(cmd[0]).parameters
|
45
|
+
|
46
|
+
# only zero or one param
|
47
|
+
raise "method #{cmd[0]} takes several parameters. This is a programming mistake. Ask Patrick to edit #{__FILE__}" if m.length > 1
|
48
|
+
|
49
|
+
if m.length == 1
|
50
|
+
if cmd.length > 1
|
51
|
+
# in this case it always works
|
52
|
+
puts send(cmd[0],cmd[1])
|
53
|
+
elsif m[0][0] == :opt
|
54
|
+
puts send(cmd[0])
|
55
|
+
else
|
56
|
+
# This means you didn't give parameter to command that wants an option
|
57
|
+
raise "method #{cmd[0]} requires an option. please specify with #{cmd[0]}:parameter"
|
58
|
+
end
|
59
|
+
else
|
60
|
+
if cmd.length > 1
|
61
|
+
raise "method #{cmd[0]} does not take parameters and you gave parameter #{cmd[1]}"
|
62
|
+
else
|
63
|
+
puts send(cmd[0])
|
64
|
+
end
|
65
|
+
end
|
66
|
+
end
|
67
|
+
#rescue Exception => e
|
68
|
+
# puts "ERROR: #{e}"
|
69
|
+
# exit 1
|
70
|
+
#end
|
71
|
+
end
|
72
|
+
|
73
|
+
def mysystem(cmd)
|
74
|
+
system cmd
|
75
|
+
raise "error running #{cmd} #{$?}" unless $?.success?
|
76
|
+
end
|
77
|
+
|
78
|
+
def fab # looking for python 'fabric'
|
79
|
+
return @conf['cli']['fab'] if @conf['cli']['fab']
|
80
|
+
|
81
|
+
try_files = %w[
|
82
|
+
/usr/local/bin/fab
|
83
|
+
/usr/bin/fab
|
84
|
+
/opt/hdeploy/embedded/bin/fab
|
85
|
+
/opt/hdeploy/bin/fab
|
86
|
+
]
|
87
|
+
|
88
|
+
try_files.each do |f|
|
89
|
+
return f if File.executable?(f)
|
90
|
+
end
|
91
|
+
|
92
|
+
raise "could not find fabric. tried #{try_files.join(' ')}"
|
93
|
+
end
|
94
|
+
|
95
|
+
# -------------------------------------------------------------------------
|
96
|
+
def app(newapp)
|
97
|
+
@app = newapp
|
98
|
+
puts "set app to #{newapp}"
|
99
|
+
end
|
100
|
+
|
101
|
+
def list_servers(recipe = 'common')
|
102
|
+
return @client.get("/srv_by_recipe/#{recipe}")
|
103
|
+
end
|
104
|
+
|
105
|
+
def prune_artifacts
|
106
|
+
c = @conf['build'][@app]
|
107
|
+
keepnum = c['prune'] || 5
|
108
|
+
keepnum = keepnum.to_i
|
109
|
+
|
110
|
+
artdir = c['artifacts']
|
111
|
+
|
112
|
+
artlist = []
|
113
|
+
Dir.entries(artdir).sort.each do |f|
|
114
|
+
if f =~ /(#{@app}\..*)\.tar\.gz$/
|
115
|
+
artlist << $1
|
116
|
+
end
|
117
|
+
end
|
118
|
+
|
119
|
+
distributed_by_env = JSON.parse(@client.get("/distribute/#{@app}"))
|
120
|
+
distributed = {}
|
121
|
+
distributed_by_env.each do |env,list|
|
122
|
+
list.each do |artname|
|
123
|
+
distributed[artname] = true
|
124
|
+
end
|
125
|
+
end
|
126
|
+
|
127
|
+
artlist = artlist.delete_if {|a| distributed.has_key? a }
|
128
|
+
|
129
|
+
while artlist.length > keepnum
|
130
|
+
art = artlist.shift
|
131
|
+
artfile = art + ".tar.gz"
|
132
|
+
puts "File.unlink #{File.join(artdir,artfile)}"
|
133
|
+
File.unlink File.join(artdir,artfile)
|
134
|
+
end
|
135
|
+
end
|
136
|
+
|
137
|
+
def prune_build_env
|
138
|
+
c = @conf[@app]
|
139
|
+
keepnum = c['prune_build_env'] || 2
|
140
|
+
keepnum = keepnum.to_i
|
141
|
+
|
142
|
+
raise "incorrect dir config" unless c['build_dir']
|
143
|
+
builddir = File.expand_path(c['build_dir'])
|
144
|
+
return unless Dir.exists?(builddir)
|
145
|
+
dirs = Dir.entries(builddir).delete_if{|d| d == '.' or d == '..' }.sort
|
146
|
+
puts "build env pruning: keeping maximum #{keepnum} builds"
|
147
|
+
|
148
|
+
while dirs.length > keepnum
|
149
|
+
dirtodel = dirs.shift
|
150
|
+
puts "FileUtils.rm_rf #{File.join(builddir,dirtodel)}"
|
151
|
+
FileUtils.rm_rf File.join(builddir,dirtodel)
|
152
|
+
end
|
153
|
+
end
|
154
|
+
|
155
|
+
def prune(prune_env='nowhere')
|
156
|
+
|
157
|
+
c = @conf['build'][@app]
|
158
|
+
prune_count = c['prune'].to_i #FIXME: integrity check.
|
159
|
+
raise "no proper prune count" unless prune_count >= 3 and prune_count < 20
|
160
|
+
|
161
|
+
dist = JSON.parse(@client.get("/distribute/#{@app}"))
|
162
|
+
if dist.has_key? prune_env
|
163
|
+
|
164
|
+
# Now we want to be careful to not eliminate any current artifact (ie. symlinked)
|
165
|
+
# or any target either. Usually they would both be the same obviously.
|
166
|
+
|
167
|
+
artifacts_to_keep = {}
|
168
|
+
|
169
|
+
dist_states = JSON.parse(@client.get("/distribute_state/#{@app}"))
|
170
|
+
dist_states.each do |dist_state|
|
171
|
+
if prune_env == 'nowhere'
|
172
|
+
# We take EVERYTHING into account
|
173
|
+
artifacts_to_keep[dist_state['current']] = true
|
174
|
+
dist_state['artifacts'].each do |art|
|
175
|
+
artifacts_to_keep[art] = true
|
176
|
+
end
|
177
|
+
|
178
|
+
elsif dist_state['env'] == prune_env
|
179
|
+
# Otherwise, we only take into account the current env
|
180
|
+
artifacts_to_keep[dist_state['current']] = true
|
181
|
+
end
|
182
|
+
end
|
183
|
+
|
184
|
+
# If the prune_env is not 'nowhere', we also want to keep the target
|
185
|
+
# fixme: check integrity of reply
|
186
|
+
artifacts_to_keep[@client.get("/target/#{@app}/#{prune_env}")] = true
|
187
|
+
|
188
|
+
if dist[prune_env].length <= prune_count
|
189
|
+
return "nothing to prune in env. #{prune_env}"
|
190
|
+
end
|
191
|
+
|
192
|
+
delete_max_count = dist[prune_env].length - prune_count
|
193
|
+
delete_count = 0
|
194
|
+
|
195
|
+
dist[prune_env].sort.each do |artifact|
|
196
|
+
|
197
|
+
next if artifacts_to_keep.has_key? artifact
|
198
|
+
|
199
|
+
delete_count += 1
|
200
|
+
if prune_env == 'nowhere'
|
201
|
+
# we must also delete file
|
202
|
+
puts @client.delete("/artifact/#{@app}/#{artifact}")
|
203
|
+
else
|
204
|
+
puts @client.delete("/distribute/#{@app}/#{prune_env}/#{artifact}")
|
205
|
+
end
|
206
|
+
break if delete_count >= delete_max_count
|
207
|
+
end
|
208
|
+
|
209
|
+
return ""
|
210
|
+
else
|
211
|
+
return "Nothing to prune"
|
212
|
+
end
|
213
|
+
|
214
|
+
prune_artifacts
|
215
|
+
end
|
216
|
+
|
217
|
+
def state
|
218
|
+
dist = JSON.parse(@client.get("/distribute/#{@app}"))
|
219
|
+
dist_state = JSON.parse(@client.get("/distribute_state/#{@app}"))
|
220
|
+
targets = JSON.parse(@client.get("/target/#{@app}"))
|
221
|
+
|
222
|
+
# What I'm trying to do here is, for each artifact from 'dist', figure where it actually is.
|
223
|
+
# For this, I need to know how many servers are active per env, then I can cross-reference the artifacts
|
224
|
+
todisplay = {}
|
225
|
+
dist.each do |env,artlist|
|
226
|
+
next if env == 'nowhere'
|
227
|
+
todisplay[env] = {}
|
228
|
+
artlist.each do |art|
|
229
|
+
todisplay[env][art] = []
|
230
|
+
end
|
231
|
+
end
|
232
|
+
|
233
|
+
servers_by_env = {}
|
234
|
+
current_links = {}
|
235
|
+
|
236
|
+
dist_state.each do |stdata|
|
237
|
+
env,hostname,artifacts,current = stdata.values_at('env','hostname','artifacts','current')
|
238
|
+
|
239
|
+
servers_by_env[env] = {} unless servers_by_env.has_key? env
|
240
|
+
servers_by_env[env][hostname] = true
|
241
|
+
|
242
|
+
current_links[env] = {} unless current_links.has_key? env
|
243
|
+
current_links[env][hostname] = current
|
244
|
+
|
245
|
+
artifacts.each do |art|
|
246
|
+
if todisplay.has_key? env
|
247
|
+
if todisplay[env].has_key? art
|
248
|
+
todisplay[env][art] << hostname
|
249
|
+
end
|
250
|
+
end
|
251
|
+
end
|
252
|
+
end
|
253
|
+
|
254
|
+
# now that we have a servers by env, we can tell for each artifact what is distributed for it, and where it's missing.
|
255
|
+
|
256
|
+
ret = "---------------------------------------------------\n" +
|
257
|
+
"Artifact distribution state for app #{@app}\n" +
|
258
|
+
"---------------------------------------------------\n\n"
|
259
|
+
|
260
|
+
ret += "Inactive: "
|
261
|
+
if dist['nowhere'].length == 0
|
262
|
+
ret += "none\n\n"
|
263
|
+
else
|
264
|
+
ret += "\n" + dist['nowhere'].collect{|art| "- #{art}"}.sort.join("\n") + "\n\n"
|
265
|
+
end
|
266
|
+
|
267
|
+
todisplay.each do |env,artifacts|
|
268
|
+
srvnum = servers_by_env[env].length
|
269
|
+
txt = "ENV \"#{env}\" (#{srvnum} servers)\n"
|
270
|
+
ret += ("-" * txt.length) + "\n" + txt + ("-" * txt.length) + "\n"
|
271
|
+
ret += "TARGET: " + targets[env].to_s
|
272
|
+
|
273
|
+
# Consistent targets?
|
274
|
+
current_by_art = {}
|
275
|
+
inconsistent = []
|
276
|
+
current_links[env].each do |srv,link|
|
277
|
+
inconsistent << srv if link != targets[env]
|
278
|
+
current_by_art[link] = [] unless current_by_art.has_key? link
|
279
|
+
current_by_art[link] << srv
|
280
|
+
end
|
281
|
+
if inconsistent.length > 0
|
282
|
+
ret += " (#{inconsistent.length}/#{servers_by_env[env].length} inconsistent servers: #{inconsistent.join(', ')})\n\n"
|
283
|
+
else
|
284
|
+
ret += " (All OK)\n\n"
|
285
|
+
end
|
286
|
+
|
287
|
+
# distributed artifacts. Sort by key.
|
288
|
+
artifacts.keys.sort.each do |art|
|
289
|
+
hosts = artifacts[art]
|
290
|
+
ret += "- #{art}"
|
291
|
+
ret += " (target)" if art == targets[env]
|
292
|
+
ret += " (current #{current_by_art[art].length}/#{servers_by_env[env].length})" if current_by_art.has_key? art
|
293
|
+
|
294
|
+
# and if it's not distributed somewhere
|
295
|
+
if hosts.length < servers_by_env[env].length
|
296
|
+
ret += " (missing on: #{(servers_by_env[env].keys - hosts).join(', ')})"
|
297
|
+
end
|
298
|
+
|
299
|
+
ret += "\n"
|
300
|
+
end
|
301
|
+
ret += "\n"
|
302
|
+
end
|
303
|
+
|
304
|
+
ret
|
305
|
+
end
|
306
|
+
|
307
|
+
def force
|
308
|
+
@force=true
|
309
|
+
end
|
310
|
+
|
311
|
+
def env(newenv)
|
312
|
+
@env = newenv
|
313
|
+
puts "set env to #{@env}"
|
314
|
+
end
|
315
|
+
|
316
|
+
def undistribute(build_tag)
|
317
|
+
@client.delete("/distribute/#{@app}/#{@env}/#{build_tag}")
|
318
|
+
end
|
319
|
+
|
320
|
+
def help
|
321
|
+
puts "Possible commands:"
|
322
|
+
puts " env:branch"
|
323
|
+
puts " build (or build:branch)"
|
324
|
+
puts " app:appname"
|
325
|
+
puts " distribute:nameofartifact"
|
326
|
+
puts " symlink:nameofartifact"
|
327
|
+
puts " list"
|
328
|
+
puts ""
|
329
|
+
puts "Example: hdeploy env:production build"
|
330
|
+
end
|
331
|
+
|
332
|
+
def fakebuild
|
333
|
+
@fakebuild = true
|
334
|
+
end
|
335
|
+
|
336
|
+
def initrepo
|
337
|
+
init()
|
338
|
+
end
|
339
|
+
|
340
|
+
def init
|
341
|
+
c = @conf['build'][@app]
|
342
|
+
repo = File.expand_path(c['repo'])
|
343
|
+
|
344
|
+
if !(Dir.exists?(File.join(repo,'.git')))
|
345
|
+
FileUtils.rm_rf repo
|
346
|
+
FileUtils.mkdir_p File.join(repo,'..')
|
347
|
+
mysystem("git clone #{c['git']} #{repo}")
|
348
|
+
end
|
349
|
+
end
|
350
|
+
|
351
|
+
def notify(msg)
|
352
|
+
if File.executable?('/usr/local/bin/hdeploy_hipchat')
|
353
|
+
mysystem("/usr/local/bin/hdeploy_hipchat #{msg}")
|
354
|
+
end
|
355
|
+
end
|
356
|
+
|
357
|
+
def build(branch = 'master')
|
358
|
+
|
359
|
+
prune_build_env
|
360
|
+
|
361
|
+
# Starting now..
|
362
|
+
start_time = Time.new
|
363
|
+
|
364
|
+
# Copy GIT directory
|
365
|
+
c = @conf['build'][@app]
|
366
|
+
repo = File.expand_path(c['repo'])
|
367
|
+
|
368
|
+
raise "Error in source dir #{repo}. Please run hdeploy initrepo" unless Dir.exists? (File.join(repo, '.git'))
|
369
|
+
directory = File.expand_path(File.join(c['build_dir'], (@app + start_time.strftime('.%Y%m%d_%H_%M_%S.'))) + ENV['USER'] + (@fakebuild? '.fakebuild' : ''))
|
370
|
+
FileUtils.mkdir_p directory
|
371
|
+
|
372
|
+
# Update GIT directory
|
373
|
+
Dir.chdir(repo)
|
374
|
+
|
375
|
+
subgit = `find . -mindepth 2 -name .git -type d`
|
376
|
+
if subgit.length > 0
|
377
|
+
subgit.split("\n").each do |d|
|
378
|
+
if Dir.exists? d
|
379
|
+
FileUtils.rm_rf d
|
380
|
+
end
|
381
|
+
end
|
382
|
+
end
|
383
|
+
|
384
|
+
[
|
385
|
+
'git clean -xdf',
|
386
|
+
'git reset --hard HEAD',
|
387
|
+
'git clean -xdf',
|
388
|
+
'git checkout master',
|
389
|
+
'git pull',
|
390
|
+
'git remote show origin',
|
391
|
+
'git remote prune origin',
|
392
|
+
].each do |cmd|
|
393
|
+
mysystem(cmd)
|
394
|
+
end
|
395
|
+
|
396
|
+
# Choose branch
|
397
|
+
mysystem("git checkout #{branch}")
|
398
|
+
|
399
|
+
if branch != 'master'
|
400
|
+
[
|
401
|
+
'git reset --hard HEAD',
|
402
|
+
'git clean -xdf',
|
403
|
+
'git pull'
|
404
|
+
].each do |cmd|
|
405
|
+
mysystem(cmd)
|
406
|
+
end
|
407
|
+
end
|
408
|
+
|
409
|
+
|
410
|
+
# Copy GIT
|
411
|
+
if c['subdir'].empty?
|
412
|
+
mysystem "rsync -av --exclude=.git #{c['repo']}/ #{directory}/"
|
413
|
+
else
|
414
|
+
mysystem "rsync -av --exclude=.git #{c['repo']}/c['subdir']/ #{directory}/"
|
415
|
+
end
|
416
|
+
|
417
|
+
# Get a tag
|
418
|
+
gitrev = (`git log -1 --pretty=oneline`)[0..11] # not 39.
|
419
|
+
build_tag = @app + start_time.strftime('.%Y%m%d_%H_%M_%S.') + branch + '.' + gitrev + '.' + ENV['USER'] + (@fakebuild? '.fakebuild' : '')
|
420
|
+
|
421
|
+
notify "build start - #{ENV['USER']} - #{build_tag}"
|
422
|
+
|
423
|
+
Dir.chdir(directory)
|
424
|
+
|
425
|
+
# Write the tag in the dest directory
|
426
|
+
File.write 'REVISION', (gitrev + "\n")
|
427
|
+
|
428
|
+
# Run the build process # FIXME: add sanity check
|
429
|
+
try_files = %w[build.sh build/build.sh hdeploy/build.sh]
|
430
|
+
if File.exists? 'hdeploy.ini'
|
431
|
+
repoconf = IniFile.load('hdeploy.ini')['global']
|
432
|
+
try_files.unshift(repoconf['build_script']) if repoconf['build_script']
|
433
|
+
end
|
434
|
+
|
435
|
+
unless @fakebuild
|
436
|
+
build_script = false
|
437
|
+
try_files.each do |f|
|
438
|
+
if File.exists?(f) and File.executable?(f)
|
439
|
+
build_script = f
|
440
|
+
break
|
441
|
+
end
|
442
|
+
end
|
443
|
+
|
444
|
+
raise "no executable build script file. Tried files: #{try_files.join(' ')}" unless build_script
|
445
|
+
mysystem(build_script)
|
446
|
+
end
|
447
|
+
|
448
|
+
# Make tarball
|
449
|
+
FileUtils.mkdir_p c['artifacts']
|
450
|
+
mysystem("tar czf #{File.join(c['artifacts'],build_tag)}.tar.gz .")
|
451
|
+
|
452
|
+
# FIXME: upload to S3
|
453
|
+
register_tarball(build_tag)
|
454
|
+
|
455
|
+
notify "build success - #{ENV['USER']} - #{build_tag}"
|
456
|
+
|
457
|
+
prune_build_env
|
458
|
+
end
|
459
|
+
|
460
|
+
def register_tarball(build_tag)
|
461
|
+
# Register tarball
|
462
|
+
filename = build_tag + '.tar.gz'
|
463
|
+
checksum = Digest::MD5.file(File.join(@conf['build'][@app]['artifacts'], filename))
|
464
|
+
|
465
|
+
@client.put("/artifact/#{@app}/#{build_tag}", JSON.pretty_generate({
|
466
|
+
source: "http://build.gyg.io:8502/#{filename}",
|
467
|
+
altsource: "",
|
468
|
+
checksum: checksum,
|
469
|
+
}))
|
470
|
+
end
|
471
|
+
|
472
|
+
def fulldeploy(build_tag)
|
473
|
+
distribute(build_tag)
|
474
|
+
symlink(build_tag)
|
475
|
+
end
|
476
|
+
|
477
|
+
def distribute(build_tag)
|
478
|
+
r = @client.put("/distribute/#{@app}/#{@env}",build_tag)
|
479
|
+
if r =~ /^OK /
|
480
|
+
h = JSON.parse(@client.get("/srv/by_app/#{@app}/#{@env}"))
|
481
|
+
|
482
|
+
# On all servers, do a standard check deploy.
|
483
|
+
system("#{_fab} -f $(hdeploy_filepath fabfile.py) -H #{h.keys.join(',')} -P host_monkeypatch:#{@domain_name} -- sudo hdeploy_node check_deploy")
|
484
|
+
|
485
|
+
# And on a single server, run the single hook.
|
486
|
+
hookparams = { app: @app, env: @env, artifact: build_tag, servers:h.keys.join(','), user: ENV['USER'] }.collect {|k,v| "#{k}:#{v}" }.join(" ")
|
487
|
+
system("#{_fab} -f $(hdeploy_filepath fabfile.py) -H #{h.keys.sample} -P host_monkeypatch:#{@domain_name} -- 'echo #{hookparams} | sudo hdeploy_node post_distribute_run_once'")
|
488
|
+
end
|
489
|
+
end
|
490
|
+
|
491
|
+
# Does this really have to exist? Or should I just put it in the symlink method?
|
492
|
+
def target(artid = 'someid')
|
493
|
+
|
494
|
+
# We just check if the artifact is set to be distributed to the server
|
495
|
+
# for the actual presence we will only check in the symlink part.
|
496
|
+
|
497
|
+
todist = JSON.parse(@client.get("/distribute/#{@app}/#{@env}"))
|
498
|
+
raise "artifact #{artid} is not set to be distributed for #{@app}/#{@env}" unless todist.has_key? artid
|
499
|
+
return @client.put("/target/#{@app}/#{@env}", artid)
|
500
|
+
end
|
501
|
+
|
502
|
+
def symlink(target)
|
503
|
+
target(target)
|
504
|
+
|
505
|
+
h = JSON.parse(@client.get("/srv/by_app/#{@app}/#{@env}"))
|
506
|
+
|
507
|
+
raise "no server with #{@app}/#{@env}" unless h.keys.length > 0
|
508
|
+
h.each do |host,conf|
|
509
|
+
if !(conf['artifacts'].include? target)
|
510
|
+
raise "artifact #{target} is not present on server #{host}. Please run hdeploy env:#{@env} distribute:#{target}"
|
511
|
+
end
|
512
|
+
end
|
513
|
+
|
514
|
+
# On all servers, do a standard symlink
|
515
|
+
system("#{_fab} -f $(hdeploy_filepath fabfile.py) -H #{h.keys.join(',')} -P host_monkeypatch:#{@domain_name} -- 'echo app:#{@app} env:#{@env} | sudo hdeploy_node symlink'")
|
516
|
+
|
517
|
+
# And on a single server, run the single hook.
|
518
|
+
hookparams = { app: @app, env: @env, artifact: target, servers:h.keys.join(','), user: ENV['USER'] }.collect {|k,v| "#{k}:#{v}" }.join(" ")
|
519
|
+
system("#{_fab} -f $(hdeploy_filepath fabfile.py) -H #{h.keys.sample} -P host_monkeypatch:#{@domain_name} -- 'echo #{hookparams} | sudo hdeploy_node post_symlink_run_once'")
|
520
|
+
end
|
521
|
+
end
|
522
|
+
end
|
523
|
+
|
@@ -0,0 +1,326 @@
|
|
1
|
+
require 'curb'
|
2
|
+
require 'json'
|
3
|
+
require 'fileutils'
|
4
|
+
require 'pathname'
|
5
|
+
require 'inifile'
|
6
|
+
require 'pry'
|
7
|
+
|
8
|
+
module HDeploy
|
9
|
+
class Client
|
10
|
+
|
11
|
+
def initialize
|
12
|
+
@conf = HDeploy::Conf.instance('./hdeploy.conf.json')
|
13
|
+
@conf.add_defaults({
|
14
|
+
'client' => {
|
15
|
+
'keepalive_delay' => 60,
|
16
|
+
'check_deploy_delay' => 60,
|
17
|
+
'max_run_duration' => 3600,
|
18
|
+
'hostname' => `/bin/hostname`.chomp,
|
19
|
+
}
|
20
|
+
})
|
21
|
+
|
22
|
+
# Check for needed configuration parameters
|
23
|
+
# API
|
24
|
+
api_params = %w[http_user http_password endpoint]
|
25
|
+
raise "#{@conf.file}: you need 'api' section for hdeploy client (#{api_params.join(', ')})" unless @conf['api']
|
26
|
+
api_params.each do |p|
|
27
|
+
raise "#{@conf.file}: you need param for hdeploy client: api/#{p}" unless @conf['api'][p]
|
28
|
+
end
|
29
|
+
|
30
|
+
# Deploy
|
31
|
+
raise "#{@conf.file}: you need 'deploy' section for hdeploy client" unless @conf['deploy']
|
32
|
+
@conf['deploy'].keys.each do |k|
|
33
|
+
raise "#{@conf.file}: deploy key must be in the format app:env - found #{k}" unless k =~ /^[a-z0-9\-\_]+:[a-z0-9\-\_]+$/
|
34
|
+
end
|
35
|
+
|
36
|
+
default_user = Process.uid == 0 ? 'www-data' : Process.uid
|
37
|
+
default_group = Process.gid == 0 ? 'www-data' : Process.gid
|
38
|
+
|
39
|
+
@conf['deploy'].each do |k,c|
|
40
|
+
raise "#{@conf.file}: deploy section '#{k}': missing symlink param" unless c['symlink']
|
41
|
+
c['symlink'] = File.expand_path(c['symlink'])
|
42
|
+
|
43
|
+
# FIXME: throw exception if user/group are root and/or don't exist
|
44
|
+
{
|
45
|
+
'relpath' => File.expand_path('../releases', c['symlink']),
|
46
|
+
'tgzpath' => File.expand_path('../tarballs', c['symlink']),
|
47
|
+
'user' => default_user,
|
48
|
+
'group' => default_group,
|
49
|
+
}.each do |k2,v|
|
50
|
+
c[k2] ||= v
|
51
|
+
end
|
52
|
+
|
53
|
+
# It's not a mistake to check for uid in the gid section: only root can change gid.
|
54
|
+
raise "You must run client as uid root if you want a different user for deploy #{k}" if Process.uid != 0 and c['user'] != Process.uid
|
55
|
+
raise "You must run client as gid root if you want a different group for deploy #{k}" if Process.uid != 0 and c['group'] != Process.gid
|
56
|
+
end
|
57
|
+
end
|
58
|
+
|
59
|
+
# -------------------------------------------------------------------------
|
60
|
+
def keepalive
|
61
|
+
hostname = @conf['client']['hostname']
|
62
|
+
c = Curl::Easy.new(@conf['api']['endpoint'] + '/srv/keepalive/' + hostname)
|
63
|
+
c.http_auth_types = :basic
|
64
|
+
c.username = @conf['api']['http_user']
|
65
|
+
c.password = @conf['api']['http_password']
|
66
|
+
c.put((@conf['client']['keepalive_delay'].to_i * 2).to_s)
|
67
|
+
end
|
68
|
+
|
69
|
+
def put_state
|
70
|
+
hostname = @conf['client']['hostname']
|
71
|
+
|
72
|
+
c = Curl::Easy.new(@conf['api']['endpoint'] + '/distribute_state/' + hostname)
|
73
|
+
c.http_auth_types = :basic
|
74
|
+
c.username = @conf['api']['http_user']
|
75
|
+
c.password = @conf['api']['http_password']
|
76
|
+
|
77
|
+
r = []
|
78
|
+
|
79
|
+
# Will look at directories and figure out current state
|
80
|
+
@conf['deploy'].each do |section,conf|
|
81
|
+
app,env = section.split(':')
|
82
|
+
|
83
|
+
relpath,tgzpath,symlink = conf.values_at('relpath','tgzpath','symlink')
|
84
|
+
|
85
|
+
# could be done with ternary operator but I find it more readable like that.
|
86
|
+
current = "unknown"
|
87
|
+
if File.symlink? symlink and Dir.exists? symlink
|
88
|
+
current = File.basename(File.readlink(symlink))
|
89
|
+
end
|
90
|
+
|
91
|
+
# For artifacts, what we want is a directory, that contains the file "READY"
|
92
|
+
artifacts = Dir.glob(File.join(relpath, '*', 'READY')).map{|x| File.basename(File.expand_path(File.join(x,'..'))) }
|
93
|
+
|
94
|
+
r << {
|
95
|
+
app: app,
|
96
|
+
env: env,
|
97
|
+
current: current,
|
98
|
+
artifacts: artifacts.sort,
|
99
|
+
}
|
100
|
+
|
101
|
+
end
|
102
|
+
|
103
|
+
puts JSON.pretty_generate(r) if ENV.has_key?'DEBUG'
|
104
|
+
c.put(JSON.generate(r))
|
105
|
+
end
|
106
|
+
|
107
|
+
def find_executable(name) #FIXME should be in some other path
|
108
|
+
%w[
|
109
|
+
/opt/hdeploy/embedded/bin
|
110
|
+
/opt/hdeploy/bin
|
111
|
+
/usr/local/bin
|
112
|
+
/usr/bin
|
113
|
+
].each do |p|
|
114
|
+
e = File.join p,name
|
115
|
+
next unless File.exists? e
|
116
|
+
st = File.stat(e)
|
117
|
+
next unless st.uid == 0
|
118
|
+
next unless st.gid == 0
|
119
|
+
if sprintf("%o", st.mode) == '100755'
|
120
|
+
return e
|
121
|
+
else
|
122
|
+
warn "file #{file} does not have permissions 100755"
|
123
|
+
end
|
124
|
+
end
|
125
|
+
return nil
|
126
|
+
end
|
127
|
+
|
128
|
+
def check_deploy
|
129
|
+
put_state
|
130
|
+
|
131
|
+
c = Curl::Easy.new()
|
132
|
+
c.http_auth_types = :basic
|
133
|
+
c.username = @conf['api']['http_user']
|
134
|
+
c.password = @conf['api']['http_password']
|
135
|
+
|
136
|
+
# Now this is the big stuff
|
137
|
+
@conf['deploy'].each do |section,conf|
|
138
|
+
app,env = section.split(':') #it's already checked for syntax higher in the code
|
139
|
+
|
140
|
+
# Here we get the info.
|
141
|
+
# FIXME: double check that config is ok
|
142
|
+
relpath,tgzpath,symlink,user,group = conf.values_at('relpath','tgzpath','symlink','user','group')
|
143
|
+
|
144
|
+
# Now the release info from the server
|
145
|
+
c.url = @conf['api']['endpoint'] + '/distribute/' + app + '/' + env
|
146
|
+
c.perform
|
147
|
+
|
148
|
+
# prepare directories
|
149
|
+
FileUtils.mkdir_p(relpath)
|
150
|
+
FileUtils.mkdir_p(tgzpath)
|
151
|
+
|
152
|
+
artifacts = JSON.parse(c.body_str)
|
153
|
+
puts "found #{artifacts.keys.length} artifacts for #{app} / #{env}"
|
154
|
+
|
155
|
+
dir_to_keep = []
|
156
|
+
tgz_to_keep = []
|
157
|
+
|
158
|
+
artifacts.each do |artifact,artdata|
|
159
|
+
puts "checking artifact #{artifact}"
|
160
|
+
destdir = File.join relpath,artifact
|
161
|
+
tgzfile = File.join tgzpath,(artifact+'.tar.gz')
|
162
|
+
readyfile = File.join destdir,'READY'
|
163
|
+
|
164
|
+
if !(File.exists?readyfile)
|
165
|
+
# we have to release. let's cleanup.
|
166
|
+
FileUtils.rm_rf(destdir) if File.exists?(destdir)
|
167
|
+
count = 0
|
168
|
+
while count < 5 and !(File.exists?tgzfile and Digest::MD5.file(tgzfile) == artdata['checksum'])
|
169
|
+
count += 1
|
170
|
+
File.unlink tgzfile if File.exists?tgzfile
|
171
|
+
# FIXME: add altsource and BREAK
|
172
|
+
# FIXME: don't run download as root!!
|
173
|
+
#####
|
174
|
+
if f = find_executable('aria2')
|
175
|
+
system("#{f} -x 5 -d #{tgzpath} -o #{artifact}.tar.gz #{artdata['source']}")
|
176
|
+
|
177
|
+
elsif f = find_executable('wget')
|
178
|
+
system("#{f} -o #{tgzfile} #{artdata['source']}")
|
179
|
+
|
180
|
+
elsif f = find_executable('curl')
|
181
|
+
system("#{f} -o #{tgzfile} #{artdata['source']}")
|
182
|
+
|
183
|
+
else
|
184
|
+
raise "no aria2c, wget or curl available. please install one of them."
|
185
|
+
end
|
186
|
+
end
|
187
|
+
|
188
|
+
raise "unable to download artifact" unless File.exists?tgzfile
|
189
|
+
raise "incorrect checksum for #{tgzfile}" unless Digest::MD5.file(tgzfile) == artdata['checksum']
|
190
|
+
|
191
|
+
|
192
|
+
FileUtils.mkdir_p destdir
|
193
|
+
FileUtils.chown user, group, destdir
|
194
|
+
Dir.chdir destdir
|
195
|
+
|
196
|
+
chpst = ''
|
197
|
+
if Process.uid == 0
|
198
|
+
chpst = find_executable('chpst') or raise "unable to find chpst binary"
|
199
|
+
chpst += " -u #{user}:#{group} "
|
200
|
+
end
|
201
|
+
|
202
|
+
tar = find_executable('tar')
|
203
|
+
system("#{chpst}#{tar} xzf #{tgzfile}") or raise "unable to extract #{tgzfile} as #{user}:#{group}"
|
204
|
+
File.chmod 0755, destdir
|
205
|
+
|
206
|
+
# Post distribute hook
|
207
|
+
run_hook('post_distribute', {'app' => app, 'env' => env, 'artifact' => artifact})
|
208
|
+
FileUtils.touch(File.join(destdir,'READY')) #FIXME: root?
|
209
|
+
end
|
210
|
+
|
211
|
+
# we only get here if previous step worked.
|
212
|
+
tgz_to_keep << File.expand_path(tgzfile)
|
213
|
+
dir_to_keep << File.expand_path(destdir)
|
214
|
+
end
|
215
|
+
|
216
|
+
# check for symlink
|
217
|
+
symlink({'app' => app,'env' => env, 'force' => false})
|
218
|
+
|
219
|
+
# cleanup
|
220
|
+
if Dir.exists? conf['symlink']
|
221
|
+
dir_to_keep << File.expand_path(File.join(File.join(conf['symlink'],'..'),File.readlink(conf['symlink'])))
|
222
|
+
end
|
223
|
+
|
224
|
+
(Dir.glob(File.join conf['relpath'], '*') - dir_to_keep).each do |d|
|
225
|
+
puts "cleanup dir #{d}"
|
226
|
+
FileUtils.rm_rf d
|
227
|
+
end
|
228
|
+
|
229
|
+
(Dir.glob(File.join conf['tgzpath'],'*') - tgz_to_keep).each do |f|
|
230
|
+
puts "cleanup file #{f}"
|
231
|
+
File.unlink f
|
232
|
+
end
|
233
|
+
|
234
|
+
end
|
235
|
+
put_state
|
236
|
+
end
|
237
|
+
|
238
|
+
def run_hook(hook,params)
|
239
|
+
# This is a generic function to run the hooks defined in hdeploy.ini.
|
240
|
+
# Standard hooks are
|
241
|
+
|
242
|
+
app,env,artifact = params.values_at('app','env','artifact')
|
243
|
+
|
244
|
+
oldpwd = Dir.pwd
|
245
|
+
|
246
|
+
raise "no such app/env #{app} / #{env}" unless @conf['deploy'].has_key? "#{app}:#{env}"
|
247
|
+
|
248
|
+
relpath,user,group = @conf['deploy']["#{app}:#{env}"].values_at('relpath','user','group')
|
249
|
+
destdir = File.join relpath,artifact
|
250
|
+
|
251
|
+
# It's OK if the file doesn't exist
|
252
|
+
hdeployini = File.join destdir, 'hdeploy.ini'
|
253
|
+
return unless File.exists? hdeployini
|
254
|
+
|
255
|
+
# It's also OK if that hook doesn't exist
|
256
|
+
hdc = IniFile.load(hdeployini)['hooks']
|
257
|
+
return unless hdc.has_key? hook
|
258
|
+
|
259
|
+
hfile = hdc[hook]
|
260
|
+
|
261
|
+
# But if it is defined, we're gonna scream if it's defined incorrectly.
|
262
|
+
raise "no such file #{hfile} for hook #{hook}" unless File.exists? (File.join destdir,hfile)
|
263
|
+
raise "non-executable file #{hfile} for hook #{hook}" unless File.executable? (File.join destdir,hfile)
|
264
|
+
|
265
|
+
# OK let's run the hook
|
266
|
+
Dir.chdir destdir
|
267
|
+
|
268
|
+
chpst = ''
|
269
|
+
if Process.uid == 0
|
270
|
+
chpst = find_executable('chpst') or raise "unable to find chpst binary"
|
271
|
+
chpst += " -u #{user}:#{group} "
|
272
|
+
end
|
273
|
+
|
274
|
+
system("#{chpst}#{hfile} '#{JSON.generate(params)}'")
|
275
|
+
if $?.success?
|
276
|
+
puts "Successfully run #{hook} hook / #{hfile}"
|
277
|
+
Dir.chdir oldpwd
|
278
|
+
else
|
279
|
+
Dir.chdir oldpwd
|
280
|
+
raise "Error while running file #{hfile} hook #{hook} : #{$?} - (DEBUG: (pwd: #{destdir}): #{chpst}#{hfile} '#{JSON.generate(params)}'"
|
281
|
+
end
|
282
|
+
end
|
283
|
+
|
284
|
+
def symlink(params)
|
285
|
+
|
286
|
+
app,env = params.values_at('app','env')
|
287
|
+
force = true
|
288
|
+
if params.has_key? 'force'
|
289
|
+
force = params['force']
|
290
|
+
end
|
291
|
+
|
292
|
+
raise "no such app/env #{app} / #{env}" unless @conf['deploy'].has_key? "#{app}:#{env}"
|
293
|
+
|
294
|
+
conf = @conf['deploy']["#{app}:#{env}"]
|
295
|
+
link,relpath = conf.values_at('symlink','relpath')
|
296
|
+
|
297
|
+
if force or !(File.exists?link)
|
298
|
+
FileUtils.rm_rf(link) unless File.symlink?link
|
299
|
+
|
300
|
+
c = Curl::Easy.new(@conf['api']['endpoint'] + '/target/' + app + '/' + env)
|
301
|
+
c.http_auth_types = :basic
|
302
|
+
c.username = @conf['api']['http_user']
|
303
|
+
c.password = @conf['api']['http_password']
|
304
|
+
c.perform
|
305
|
+
|
306
|
+
target = c.body_str
|
307
|
+
target_relative_path = Pathname.new(File.join relpath,target).relative_path_from(Pathname.new(File.join(link,'..')))
|
308
|
+
|
309
|
+
if File.symlink?(link) and (File.readlink(link) == target_relative_path)
|
310
|
+
puts "symlink for app #{app} is already OK (#{target_relative_path})"
|
311
|
+
else
|
312
|
+
# atomic symlink override
|
313
|
+
puts "setting symlink for app #{app} to #{target_relative_path}"
|
314
|
+
File.symlink(target_relative_path,link + '.tmp') #FIXME: should this belong to root?
|
315
|
+
File.rename(link + '.tmp', link)
|
316
|
+
put_state
|
317
|
+
end
|
318
|
+
|
319
|
+
run_hook('post_symlink', {'app' => app, 'env' => env, 'artifact' => target})
|
320
|
+
else
|
321
|
+
puts "not changing symlink for app #{app}"
|
322
|
+
end
|
323
|
+
end
|
324
|
+
|
325
|
+
end
|
326
|
+
end
|
data/lib/hdeploy/conf.rb
ADDED
@@ -0,0 +1,56 @@
|
|
1
|
+
require 'json'
|
2
|
+
require 'deep_merge'
|
3
|
+
require 'deep_clone'
|
4
|
+
|
5
|
+
module HDeploy
|
6
|
+
class Conf
|
7
|
+
|
8
|
+
@@instance = nil
|
9
|
+
@@default_values = []
|
10
|
+
|
11
|
+
attr_reader :file
|
12
|
+
|
13
|
+
def initialize(file)
|
14
|
+
@file = file
|
15
|
+
reload
|
16
|
+
end
|
17
|
+
|
18
|
+
# FIXME: find a good way to set default path
|
19
|
+
def self.instance(path = '/opt/hdeploy/etc/hdeploy.conf.json')
|
20
|
+
@@instance ||= new(path)
|
21
|
+
end
|
22
|
+
|
23
|
+
# -------------------------------------------------------------------------
|
24
|
+
|
25
|
+
def reload
|
26
|
+
raise "unable to find conf file #{@file}" unless File.exists? @file
|
27
|
+
|
28
|
+
st = File.stat(@file)
|
29
|
+
raise "config file #{@file} must not be a symlink" if File.symlink?(@file)
|
30
|
+
raise "config file #{@file} must be a regular file" unless st.file?
|
31
|
+
raise "config file #{@file} must have uid 0" unless st.uid == 0 or Process.uid != 0
|
32
|
+
raise "config file #{@file} must not allow group/others to write" unless sprintf("%o", st.mode) =~ /^100[46][04][04]/
|
33
|
+
|
34
|
+
# Seems we have checked everything. Woohoo!
|
35
|
+
@conf = JSON.parse(File.read(@file))
|
36
|
+
end
|
37
|
+
|
38
|
+
# -------------------------------------------------------------------------
|
39
|
+
def [](k)
|
40
|
+
@conf[k]
|
41
|
+
end
|
42
|
+
|
43
|
+
# -------------------------------------------------------------------------
|
44
|
+
def add_defaults(h)
|
45
|
+
# This is pretty crappy code in that it loads stuff twice etc. But that way no re-implementing a variation of deep_merge for default stuff...
|
46
|
+
@@default_values << h.__deep_clone__
|
47
|
+
|
48
|
+
rebuild_conf = {}
|
49
|
+
@@default_values.each do |defval|
|
50
|
+
rebuild_conf.deep_merge!(defval)
|
51
|
+
end
|
52
|
+
|
53
|
+
@conf = rebuild_conf.deep_merge!(@conf)
|
54
|
+
end
|
55
|
+
end
|
56
|
+
end
|
data/lib/hdeploy.rb
ADDED
metadata
ADDED
@@ -0,0 +1,141 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: hdeploy
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 0.1.0
|
5
|
+
platform: ruby
|
6
|
+
authors:
|
7
|
+
- Patrick Viet
|
8
|
+
autorequire:
|
9
|
+
bindir: bin
|
10
|
+
cert_chain: []
|
11
|
+
date: 2016-09-15 00:00:00.000000000 Z
|
12
|
+
dependencies:
|
13
|
+
- !ruby/object:Gem::Dependency
|
14
|
+
name: json
|
15
|
+
requirement: !ruby/object:Gem::Requirement
|
16
|
+
requirements:
|
17
|
+
- - "~>"
|
18
|
+
- !ruby/object:Gem::Version
|
19
|
+
version: '1.7'
|
20
|
+
type: :runtime
|
21
|
+
prerelease: false
|
22
|
+
version_requirements: !ruby/object:Gem::Requirement
|
23
|
+
requirements:
|
24
|
+
- - "~>"
|
25
|
+
- !ruby/object:Gem::Version
|
26
|
+
version: '1.7'
|
27
|
+
- !ruby/object:Gem::Dependency
|
28
|
+
name: curb
|
29
|
+
requirement: !ruby/object:Gem::Requirement
|
30
|
+
requirements:
|
31
|
+
- - "~>"
|
32
|
+
- !ruby/object:Gem::Version
|
33
|
+
version: '0.8'
|
34
|
+
type: :runtime
|
35
|
+
prerelease: false
|
36
|
+
version_requirements: !ruby/object:Gem::Requirement
|
37
|
+
requirements:
|
38
|
+
- - "~>"
|
39
|
+
- !ruby/object:Gem::Version
|
40
|
+
version: '0.8'
|
41
|
+
- !ruby/object:Gem::Dependency
|
42
|
+
name: inifile
|
43
|
+
requirement: !ruby/object:Gem::Requirement
|
44
|
+
requirements:
|
45
|
+
- - "~>"
|
46
|
+
- !ruby/object:Gem::Version
|
47
|
+
version: '2.0'
|
48
|
+
type: :runtime
|
49
|
+
prerelease: false
|
50
|
+
version_requirements: !ruby/object:Gem::Requirement
|
51
|
+
requirements:
|
52
|
+
- - "~>"
|
53
|
+
- !ruby/object:Gem::Version
|
54
|
+
version: '2.0'
|
55
|
+
- !ruby/object:Gem::Dependency
|
56
|
+
name: deep_clone
|
57
|
+
requirement: !ruby/object:Gem::Requirement
|
58
|
+
requirements:
|
59
|
+
- - "~>"
|
60
|
+
- !ruby/object:Gem::Version
|
61
|
+
version: '0.0'
|
62
|
+
type: :runtime
|
63
|
+
prerelease: false
|
64
|
+
version_requirements: !ruby/object:Gem::Requirement
|
65
|
+
requirements:
|
66
|
+
- - "~>"
|
67
|
+
- !ruby/object:Gem::Version
|
68
|
+
version: '0.0'
|
69
|
+
- !ruby/object:Gem::Dependency
|
70
|
+
name: deep_merge
|
71
|
+
requirement: !ruby/object:Gem::Requirement
|
72
|
+
requirements:
|
73
|
+
- - "~>"
|
74
|
+
- !ruby/object:Gem::Version
|
75
|
+
version: '1.1'
|
76
|
+
type: :runtime
|
77
|
+
prerelease: false
|
78
|
+
version_requirements: !ruby/object:Gem::Requirement
|
79
|
+
requirements:
|
80
|
+
- - "~>"
|
81
|
+
- !ruby/object:Gem::Version
|
82
|
+
version: '1.1'
|
83
|
+
- !ruby/object:Gem::Dependency
|
84
|
+
name: pry
|
85
|
+
requirement: !ruby/object:Gem::Requirement
|
86
|
+
requirements:
|
87
|
+
- - "~>"
|
88
|
+
- !ruby/object:Gem::Version
|
89
|
+
version: '0'
|
90
|
+
type: :development
|
91
|
+
prerelease: false
|
92
|
+
version_requirements: !ruby/object:Gem::Requirement
|
93
|
+
requirements:
|
94
|
+
- - "~>"
|
95
|
+
- !ruby/object:Gem::Version
|
96
|
+
version: '0'
|
97
|
+
description: HDeploy tool
|
98
|
+
email:
|
99
|
+
- patrick.viet@gmail.com
|
100
|
+
executables:
|
101
|
+
- hdeploy
|
102
|
+
- hdeploy_client
|
103
|
+
extensions: []
|
104
|
+
extra_rdoc_files: []
|
105
|
+
files:
|
106
|
+
- ".gitignore"
|
107
|
+
- Gemfile
|
108
|
+
- README.md
|
109
|
+
- bin/hdeploy
|
110
|
+
- bin/hdeploy_client
|
111
|
+
- hdeploy.gemspec
|
112
|
+
- lib/hdeploy.rb
|
113
|
+
- lib/hdeploy/apiclient.rb
|
114
|
+
- lib/hdeploy/cli.rb
|
115
|
+
- lib/hdeploy/client.rb
|
116
|
+
- lib/hdeploy/conf.rb
|
117
|
+
- lib/hdeploy/version.rb
|
118
|
+
homepage: https://github.com/hdeploy/hdeploy
|
119
|
+
licenses: []
|
120
|
+
metadata: {}
|
121
|
+
post_install_message:
|
122
|
+
rdoc_options: []
|
123
|
+
require_paths:
|
124
|
+
- lib
|
125
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
126
|
+
requirements:
|
127
|
+
- - ">="
|
128
|
+
- !ruby/object:Gem::Version
|
129
|
+
version: '0'
|
130
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
131
|
+
requirements:
|
132
|
+
- - ">="
|
133
|
+
- !ruby/object:Gem::Version
|
134
|
+
version: '0'
|
135
|
+
requirements: []
|
136
|
+
rubyforge_project:
|
137
|
+
rubygems_version: 2.4.8
|
138
|
+
signing_key:
|
139
|
+
specification_version: 4
|
140
|
+
summary: no summary
|
141
|
+
test_files: []
|