ey-flex-test 0.3.3

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,22 @@
1
+ module EyApi
2
+ def call_api(path, opts={})
3
+ JSON.parse(@rest["/api/#{path}"].post(@keys.merge(opts), {"Accept" => "application/json"}))
4
+ rescue RestClient::RequestFailed => e
5
+ case e.http_code
6
+ when 503
7
+ sleep 10 # Nanite, save us...
8
+ retry
9
+ else
10
+ raise "API call to Engine Yard failed. Are there any running instances for #{@env}?"
11
+ end
12
+ end
13
+
14
+ def get_envs
15
+ @_envs ||= call_api("environments")
16
+ end
17
+
18
+ def get_json(instance_id = nil)
19
+ env = get_envs[@env]
20
+ call_api("json_for_instance", :id => env['id'], :instance_id => instance_id)
21
+ end
22
+ end
@@ -0,0 +1,305 @@
1
+ require 'rubygems'
2
+ require 'aws/s3'
3
+ require 'date'
4
+ require 'digest'
5
+ require 'fileutils'
6
+ require File.join(File.dirname(__FILE__), 'bucket_minder')
7
+ require 'rest_client'
8
+ require 'json'
9
+ require 'open-uri'
10
+ require 'zlib'
11
+ require 'stringio'
12
+ require 'ey-api'
13
+
14
+ $stdout.sync = true
15
+
16
+ module EY
17
+
18
+ class ChefRecipes
19
+
20
+ include EyApi
21
+
22
+ def initialize(opts={})
23
+ raise ArgumentError.new("must provide environment name") unless opts[:env] or opts[:command] == :get_envs
24
+ @opts = opts
25
+ @eyenv = opts[:eyenv] || 'production'
26
+ @env = opts[:env]
27
+ @recipeloc = opts[:recipeloc] || "/etc/chef-custom/recipes"
28
+ @rest = RestClient::Resource.new(opts[:api])
29
+ @keys = {:aws_secret_id => @opts[:aws_secret_id], :aws_secret_key => @opts[:aws_secret_key]}
30
+ if @opts[:identifier]
31
+ @opts[:instance_id] = @opts[:identifier]
32
+ end
33
+ @lock = Mutex.new
34
+ @bucket = BucketMinder.new(@opts)
35
+ unless get_envs[@env] or opts[:command] == :get_envs
36
+ puts %Q{#{@env} is not a valid environment name, your available environments are:\n#{get_envs.keys.join("\n")}}
37
+ exit 1
38
+ end
39
+ end
40
+
41
+ def converge
42
+ return if @bucket.empty? and !@opts[:main]
43
+ require 'chef'
44
+ require 'chef/client'
45
+ FileUtils.mkdir_p @recipeloc
46
+ logtype = nil
47
+ build_problem = false
48
+ with_logging do
49
+ begin
50
+ instance_id = open("http://169.254.169.254/latest/meta-data/instance-id").gets
51
+ defaults = {
52
+ :log_level => :info,
53
+ :solo => true,
54
+ :cookbook_path => "#{@recipeloc}/cookbooks",
55
+ :file_store_path => "#{@recipeloc}/",
56
+ :file_cache_path => "#{@recipeloc}/",
57
+ :node_name => instance_id
58
+ }
59
+ Chef::Config.configure { |c| c.merge!(defaults) }
60
+ Chef::Log::Formatter.show_time = false
61
+ Chef::Log.level(Chef::Config[:log_level])
62
+ Chef::Log.info "[Begin] Chef converge of Amazon instance: #{instance_id}"
63
+
64
+ Chef::Log.info "Removing cookbooks"
65
+ if File.exist?("#{@recipeloc}/cookbooks")
66
+ FileUtils.rm_rf("#{@recipeloc}/cookbooks")
67
+ end
68
+
69
+ if @opts[:main]
70
+ logtype = "main.logs"
71
+ Chef::Log.info "Installing main recipes"
72
+ install_main_recipes
73
+ else
74
+ logtype = "logs"
75
+ Chef::Log.info "Installing custom recipes"
76
+ install_recipes
77
+ end
78
+ Chef::Log.info "Getting instance's json dna"
79
+ json = get_json(instance_id)
80
+ json["quick"] = true if @opts[:quick]
81
+ json["removed_applications"] = self.class.find_removed_applications(json)
82
+ Chef::Log.info "Writing json dna to file system"
83
+ File.open("/etc/chef/dna.json", 'w'){|f| f.puts JSON.pretty_generate(json)}
84
+ Chef::Log.info "Running Chef solo"
85
+ c = Chef::Client.new
86
+ c.json_attribs = json
87
+ c.run_solo
88
+ Chef::Log.info "Running telinit"
89
+ `telinit q`
90
+ Chef::Log.info "[End] Chef converge of Amazon instance: #{instance_id}"
91
+ rescue Object => e
92
+ build_problem = true
93
+ Chef::Log.error(describe_error(e))
94
+ end
95
+ end
96
+
97
+ begin
98
+ @bucket = BucketMinder.new(@opts.merge(:type => logtype, :extension => 'gz', :instance_id => nil))
99
+ upload_logs(file)
100
+ @bucket.cleanup
101
+ exit(1) if build_problem
102
+ rescue AWS::S3::InternalError
103
+ sleep 2
104
+ retry
105
+ end
106
+ end
107
+
108
+ def install_main_recipes
109
+ unless @opts[:main_recipes]
110
+ puts "you must specify :main_recipes: in your ey-cloud.yml"
111
+ exit 1
112
+ end
113
+ recipes_path = Chef::Config[:cookbook_path].gsub(/cookbooks/, '')
114
+ FileUtils.mkdir_p recipes_path
115
+ path = File.join(recipes_path, 'recipes.tgz')
116
+ File.open(path, 'wb') do |f|
117
+ f.write open(@opts[:main_recipes]).read
118
+ end
119
+ system("cd #{recipes_path} && tar xzf #{path}")
120
+ FileUtils.rm path
121
+ end
122
+
123
+ def deploy
124
+ unless File.exist?("cookbooks")
125
+ puts "you must run this command from the root of your chef recipe git repo"
126
+ exit 1
127
+ end
128
+ env = get_envs[@env]
129
+ unless env['instances'] > 0
130
+ puts "There are no running instances for ENV: #{@env}"
131
+ exit 1
132
+ end
133
+ if upload_recipes
134
+ if env
135
+ puts "deploying recipes..."
136
+ if call_api("deploy_recipes", :id => env['id'] )[0] == 'working'
137
+ threads = []
138
+ env['instance_ids'].each do |instance_id|
139
+ threads << Thread.new { wait_for_logs('logs', instance_id) }
140
+ end
141
+ threads.each {|t| t.join}
142
+ else
143
+ puts "deploying main recipes failed..."
144
+ end
145
+ else
146
+ puts "No matching environments"
147
+ end
148
+ else
149
+ puts "Failed to deploy: #{@env}"
150
+ end
151
+ end
152
+
153
+ def rollback
154
+ @bucket.rollback
155
+ env = get_envs[@env]
156
+ if env
157
+ puts "rolling back recipes..."
158
+ call_api("deploy_recipes", :id => env['id'] )
159
+ wait_for_logs('logs')
160
+ else
161
+ puts "No matching environments for #{@env}"
162
+ end
163
+ end
164
+
165
+ def wait_for_logs(logtype,instance_id=nil)
166
+ logbucket = BucketMinder.new(@opts.merge(:type => logtype, :extension => 'gz', :instance_id => instance_id))
167
+ newest = @lock.synchronize { logbucket.list.last }
168
+ count = 0
169
+ until @lock.synchronize { newest != logbucket.list.last }
170
+ print "."
171
+ sleep 3
172
+ count += 1
173
+ if count > 600
174
+ puts "timed out waiting for deployed logs"
175
+ exit 1
176
+ end
177
+ end
178
+ puts
179
+ puts "retrieving logs..."
180
+ puts @lock.synchronize { display_logs(logbucket.list.last) }
181
+ end
182
+
183
+ def deploy_main
184
+ env = get_envs[@env]
185
+ if env
186
+ unless env['instances'] > 0
187
+ puts "There are no running instances for ENV: #{@env}"
188
+ exit 1
189
+ end
190
+ puts "deploying main EY recipes..."
191
+ if call_api("deploy_main_recipes", :id => env['id'] )[0] == 'working'
192
+ threads = []
193
+ env['instance_ids'].each do |instance_id|
194
+ threads << Thread.new { wait_for_logs('main.logs', instance_id) }
195
+ end
196
+ threads.each {|t| t.join}
197
+ else
198
+ puts "deploying main recipes failed..."
199
+ end
200
+ else
201
+ puts "No matching environments"
202
+ end
203
+ end
204
+
205
+ def display_logs(obj)
206
+ if obj
207
+ Zlib::GzipReader.new(StringIO.new(obj.value, 'rb')).read
208
+ else
209
+ "no logs..."
210
+ end
211
+ end
212
+
213
+ def view_logs
214
+ env = get_envs[@env]
215
+ env['instance_ids'].each do |instance_id|
216
+ logtype = "#{@opts[:main] ? 'main.' : ''}logs"
217
+ logbucket = BucketMinder.new(@opts.merge(:type => logtype, :extension => 'gz', :instance_id => instance_id))
218
+ puts "Logs for: #{instance_id}"
219
+ puts display_logs(logbucket.list.last)
220
+ end
221
+ end
222
+
223
+ def upload_recipes
224
+ file = "recipes.#{rand(1000)}.tmp.tgz"
225
+ tarcmd = "git archive --format=tar HEAD | gzip > #{file}"
226
+ if system(tarcmd)
227
+ @bucket.upload_object(file)
228
+ @bucket.cleanup
229
+ true
230
+ else
231
+ puts "Unable to tar up recipes for #{@opts[:env]} wtf?"
232
+ false
233
+ end
234
+ end
235
+
236
+ def upload_logs(file)
237
+ name = "#{file}.#{rand(1000)}.tgz"
238
+ tarcmd = "cat #{file} | gzip > #{name}"
239
+ if system(tarcmd)
240
+ @bucket.upload_object(name)
241
+ @bucket.cleanup
242
+ true
243
+ else
244
+ puts "Unable to tar up log files for #{@opts[:env]} wtf?"
245
+ false
246
+ end
247
+ end
248
+
249
+ def cleanup
250
+ @bucket.cleanup
251
+ end
252
+
253
+ def list(*args)
254
+ @bucket.list(*args)
255
+ end
256
+
257
+ def download(*args)
258
+ @bucket.download(*args)
259
+ end
260
+
261
+ def install_recipes
262
+ file = get_current
263
+ Dir.chdir(@recipeloc) {
264
+ system("tar xzf #{file}")
265
+ }
266
+ FileUtils.rm file
267
+ end
268
+
269
+ def get_current
270
+ @bucket.get_current
271
+ end
272
+
273
+ def clear_bucket
274
+ @bucket.clear_bucket
275
+ end
276
+
277
+ def describe_error(e)
278
+ "#{e.class.name}: #{e.message}\n #{e.backtrace.join("\n ")}"
279
+ end
280
+
281
+ def with_logging(&block)
282
+ File.open("/var/log/chef.log", 'w') do |file|
283
+ Chef::Log.init(file)
284
+ block.call
285
+ end
286
+ end
287
+
288
+ def self.find_removed_applications(new_dna, file="/etc/chef/dna.json")
289
+ json = if file.respond_to?(:string)
290
+ file.string
291
+ elsif File.exists?(file)
292
+ IO.read(file)
293
+ else
294
+ raise "File Not Found"
295
+ end
296
+
297
+ old_dna = JSON.parse(json)
298
+ old_dna['applications'].keys - new_dna['applications'].keys
299
+ rescue
300
+ []
301
+ end
302
+
303
+ end
304
+
305
+ end
@@ -0,0 +1,127 @@
1
+ require 'aws/s3'
2
+ require 'date'
3
+ require 'digest'
4
+ require 'net/http'
5
+ require 'fileutils'
6
+
7
+ module AWS::S3
8
+ class S3Object
9
+ def <=>(other)
10
+ DateTime.parse(self.about['last-modified']) <=> DateTime.parse(other.about['last-modified'])
11
+ end
12
+ end
13
+ end
14
+
15
+ module EyBackup
16
+
17
+ class MysqlBackup
18
+ def initialize(opts={})
19
+ AWS::S3::Base.establish_connection!(
20
+ :access_key_id => opts[:aws_secret_id],
21
+ :secret_access_key => opts[:aws_secret_key]
22
+ )
23
+ @dbuser = opts[:dbuser]
24
+ @dbpass = opts[:dbpass]
25
+ @databases = opts[:databases]
26
+ @keep = opts[:keep]
27
+ @bucket = "ey-backup-#{Digest::SHA1.hexdigest(opts[:aws_secret_id])[0..11]}"
28
+ @tmpname = "#{Time.now.strftime("%Y-%m-%dT%H:%M:%S").gsub(/:/, '-')}.sql.gz"
29
+ @env = opts[:env]
30
+ FileUtils.mkdir_p '/mnt/backups'
31
+ FileUtils.mkdir_p '/mnt/tmp'
32
+ begin
33
+ AWS::S3::Bucket.create @bucket
34
+ rescue AWS::S3::BucketAlreadyExists
35
+ end
36
+
37
+ system("mkdir -p #{self.backup_dir}")
38
+ end
39
+
40
+ def new_backup
41
+ @databases.each do |db|
42
+ backup_database(db)
43
+ end
44
+ end
45
+
46
+ def backup_database(database)
47
+ full_path_to_backup = "#{self.backup_dir}/#{database}.#{@tmpname}"
48
+ mysqlcmd = "mysqldump -u #{@dbuser} -p'#{@dbpass}' #{database} | gzip - > #{full_path_to_backup}"
49
+ if system(mysqlcmd)
50
+ AWS::S3::S3Object.store(
51
+ "/#{@env}.#{database}/#{database}.#{@tmpname}",
52
+ open(full_path_to_backup),
53
+ @bucket,
54
+ :access => :private
55
+ )
56
+ FileUtils.rm full_path_to_backup
57
+ puts "successful backup: #{database}.#{@tmpname}"
58
+ else
59
+ raise "Unable to dump database: #{database}"
60
+ end
61
+ end
62
+
63
+ def download(index)
64
+ idx, db = index.split(":")
65
+ obj = list(db)[idx.to_i]
66
+ puts "downloading: #{normalize_name(obj)}"
67
+ File.open(normalize_name(obj), 'wb') do |f|
68
+ print "."
69
+ obj.value {|chunk| f.write chunk }
70
+ end
71
+ puts
72
+ puts "finished"
73
+ normalize_name(obj)
74
+ end
75
+
76
+ def restore(index)
77
+ name = download(index)
78
+ db = name.split('.').first
79
+ cmd = "gunzip -c #{name} | mysql -u #{@dbuser} -p'#{@dbpass}' #{db}"
80
+ if system(cmd)
81
+ puts "successfully restored backup: #{name}"
82
+ else
83
+ puts "FAIL"
84
+ end
85
+ end
86
+
87
+ def cleanup
88
+ list('all',false)[0...-(@keep*@databases.size)].each{|o|
89
+ puts "deleting: #{o.key}"
90
+ o.delete
91
+ }
92
+ end
93
+
94
+ def normalize_name(obj)
95
+ obj.key.gsub(/^.*?\//, '')
96
+ end
97
+
98
+ def find_obj(name)
99
+ AWS::S3::S3Object.find name, @bucket
100
+ end
101
+
102
+ def list(database='all', printer = false)
103
+ puts "listing #{database} database" if printer
104
+ backups = []
105
+ if database == 'all'
106
+ @databases.each do |db|
107
+ backups << AWS::S3::Bucket.objects(@bucket, :prefix => "#{@env}.#{db}")
108
+ end
109
+ backups = backups.flatten.sort
110
+ else
111
+ backups = AWS::S3::Bucket.objects(@bucket, :prefix => "#{@env}.#{database}").sort
112
+ end
113
+ if printer
114
+ backups.each_with_index do |b,i|
115
+ puts "#{i}:#{database} #{normalize_name(b)}"
116
+ end
117
+ end
118
+ backups
119
+ end
120
+
121
+ protected
122
+ def backup_dir
123
+ "/mnt/tmp"
124
+ end
125
+
126
+ end
127
+ end