cknife 0.1.8 → 1.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
data/VERSION CHANGED
@@ -1 +1 @@
1
- 0.1.8
1
+ 1.1.0
@@ -1,505 +1,5 @@
1
1
  #!/usr/bin/env ruby
2
2
 
3
- require 'fog'
4
- require 'thor'
5
- require 'active_support/all'
6
- require 'zlib'
7
- require 'digest/md5'
8
- require 'pathname'
3
+ require 'cknife/cknife_aws'
9
4
 
10
- class Aws < Thor
11
-
12
- FILE_BUFFER_SIZE = 10.megabytes
13
- LOCAL_MOD_KEY = "x-amz-meta-mtime"
14
- EPSILON = 1.second
15
-
16
- no_tasks do
17
-
18
- def config
19
- return @config if @config
20
-
21
- @config = {
22
- :key => ENV["KEY"] || ENV['AMAZON_ACCESS_KEY_ID'],
23
- :secret => ENV["SECRET"] || ENV['AMAZON_SECRET_ACCESS_KEY']
24
- }
25
-
26
- config_file = nil
27
- Pathname.new(Dir.getwd).tap do |here|
28
- config_file = [["cknife.yml"], ["tmp", "cknife.yml"]].map { |args|
29
- here.join(*args)
30
- }.select { |path|
31
- File.exists?(path)
32
- }.first
33
- end
34
-
35
- if config_file
36
- begin
37
- @config.merge!(YAML.load(config_file.read).symbolize_keys!)
38
- rescue
39
- say ("Found, but could not parse config: #{config_file}")
40
- end
41
- end
42
-
43
- @config
44
- end
45
-
46
- def fog_opts
47
- opts = {
48
- :provider => 'AWS',
49
- :aws_access_key_id => config[:key],
50
- :aws_secret_access_key => config[:secret]
51
- }
52
- opts.merge!({ :region => options[:region] }) if !options[:region].blank?
53
- opts
54
- end
55
-
56
- def fog_storage
57
- return @storage if @storage
58
- @storage = Fog::Storage.new(fog_opts)
59
- begin
60
- @storage.directories.count # test login
61
- rescue Excon::Errors::Forbidden => e
62
- say("Received Forbidden error while accessing account info. Is your key/secret correct?")
63
- raise SystemExit
64
- end
65
- @storage
66
- end
67
-
68
- def fog_compute
69
- @compute ||= Fog::Compute.new(fog_opts)
70
- end
71
-
72
- def fog_cdn
73
- @cdn ||= Fog::CDN.new(fog_opts)
74
- end
75
-
76
- def show_buckets
77
- fog_storage.directories.sort { |a,b| a.key <=> b.key }.each { |b| puts "#{b.key}" }
78
- end
79
-
80
- def show_servers
81
- fog_compute.servers.sort { |a,b| a.key_name <=> b.key_name }.each do |s|
82
- puts "#{s.tags['Name']} (state: #{s.state}): id=#{s.id} keyname=#{s.key_name} dns=#{s.dns_name} flavor=#{s.flavor_id}"
83
- end
84
- end
85
-
86
- def show_cdns
87
- puts fog_cdn.get_distribution_list.body['DistributionSummary'].to_yaml
88
- end
89
-
90
- def with_bucket(bucket_name)
91
- d = fog_storage.directories.select { |d| d.key == bucket_name }.first
92
- if d.nil?
93
- say ("Could not find bucket with name #{bucket_name}")
94
- return
95
- end
96
-
97
- say ("Found bucket named #{bucket_name}")
98
- yield d
99
- end
100
-
101
- def fog_key_for(target_root, file_path)
102
- target_root_path_length ||= target_root.to_s.length + "/".length
103
- relative = file_path[ target_root_path_length, file_path.length]
104
- relative
105
- end
106
-
107
- def s3_download(s3_file)
108
- dir_path = Pathname.new(s3_file.key).dirname
109
- dir_path.mkpath
110
- File.open(s3_file.key, "w") do |f|
111
- f.write s3_file.body
112
- end
113
- end
114
-
115
-
116
- def content_hash(file)
117
- md5 = Digest::MD5.new
118
-
119
- while !file.eof?
120
- md5.update(file.read(FILE_BUFFER_SIZE))
121
- end
122
-
123
- md5.hexdigest
124
- end
125
-
126
- end
127
-
128
- desc "list_servers", "Show all servers"
129
- def list_servers
130
- show_servers
131
- end
132
-
133
- desc "start_server [SERVER_ID]", "Start a given EC2 server"
134
- def start_server(server_id)
135
- s = fog_compute.servers.select { |s| s.id == server_id}.first
136
- if s
137
- say("found server. starting/resuming. #{s.id}")
138
- s.start
139
- show_servers
140
- else
141
- say("no server with that id found. nothing done.")
142
- end
143
- end
144
-
145
- desc "stop_server [SERVER_ID]", "Stop a given EC2 server (does not terminate it)"
146
- def stop_server(server_id)
147
- s = fog_compute.servers.select { |s| s.id == server_id}.first
148
- if s
149
- say("found server. stopping. #{s.id}")
150
- s.stop
151
- else
152
- say("no server with that id found. nothing done.")
153
- end
154
- end
155
-
156
- desc "list_cloudfront", "List cloudfront distributions (CDNs)"
157
- def list_cloudfront
158
- show_cdns
159
- end
160
-
161
- desc "create_cloudfront [BUCKET_NAME]", "Create a cloudfront distribution (a CDN)"
162
- def create_cloudfront(bucket_id)
163
- fog_cdn.post_distribution({
164
- 'S3Origin' => {
165
- 'DNSName' => "#{bucket_id}.s3.amazonaws.com"
166
- },
167
- 'Enabled' => true
168
- })
169
-
170
- show_cdns
171
- end
172
-
173
- desc "list", "Show all buckets"
174
- method_options :region => "us-east-1"
175
- def list
176
- show_buckets
177
- end
178
-
179
- desc "afew [BUCKET_NAME]", "Show first 5 files in bucket"
180
- method_options :count => "5"
181
- method_options :glob => "*"
182
- def afew(bucket_name)
183
- d = fog_storage.directories.select { |d| d.key == bucket_name }.first
184
- if d.nil?
185
- say ("Found no bucket by name #{bucket_name}")
186
- return
187
- end
188
-
189
- found = []
190
-
191
- i = 0
192
- d.files.each do |f|
193
- if File.fnmatch(options[:glob], f.key)
194
- found.push(d.files.head(f.key))
195
- break if i >= options[:count].to_i
196
- i += 1
197
- end
198
- end
199
-
200
- unit_to_mult = {
201
- 'B' => 1,
202
- 'K' => 2**10,
203
- 'M' => 2**20,
204
- 'G' => 2**30
205
- }
206
-
207
- found.map { |f|
208
- matching = unit_to_mult.keys.select { |k|
209
- f.content_length >= unit_to_mult[k]
210
- }.last
211
-
212
- [f.key,
213
- "#{f.content_length == 0 ? 0 : (f.content_length.to_f / unit_to_mult[matching]).round(2)}#{matching}",
214
- f.content_type,
215
- f.last_modified
216
- ]
217
- }.tap do |tabular|
218
- print_table(tabular, :ident => 2)
219
- end
220
-
221
- end
222
-
223
- desc "download [BUCKET_NAME]", "Download all files in a bucket to CWD. Or one file."
224
- method_options :region => "us-east-1"
225
- method_options :one => nil
226
- def download(bucket_name)
227
- with_bucket bucket_name do |d|
228
- if options[:one].nil?
229
- if yes?("Are you sure you want to download all files into the CWD?", :red)
230
- d.files.each do |s3_file|
231
- say("Creating path for and downloading #{s3_file.key}")
232
- s3_download(s3_file)
233
- end
234
- else
235
- say("No action taken.")
236
- end
237
- else
238
- s3_file = d.files.get(options[:one])
239
- if !s3_file.nil?
240
- s3_download(s3_file)
241
- else
242
- say("Could not find #{options[:one]}. No action taken.")
243
- end
244
- end
245
- end
246
- end
247
-
248
- desc "upsync [BUCKET_NAME] [DIRECTORY]", "Push local files matching glob PATTERN into bucket. Ignore unchanged files."
249
- method_options :public => false
250
- method_options :region => "us-east-1"
251
- method_options :noprompt => nil
252
- method_options :glob => "**/*"
253
- method_options :backups_retain => false
254
- method_options :days_retain => 30
255
- method_options :months_retain => 3
256
- method_options :weeks_retain => 5
257
- method_options :dry_run => false
258
- def upsync(bucket_name, directory)
259
-
260
- say("This is a dry run.") if options[:dry_run]
261
-
262
- if !File.exists?(directory) || !File.directory?(directory)
263
- say("'#{directory} does not exist or is not a directory.")
264
- return
265
- end
266
-
267
- target_root = Pathname.new(directory)
268
-
269
- files = Dir.glob(target_root.join(options[:glob])).select { |f| !File.directory?(f) }.map(&:to_s)
270
- if !options[:backups_retain] && files.count == 0
271
- say("No files to upload and no backups retain requested.")
272
- return
273
- end
274
-
275
- say("Found #{files.count} candidate file upload(s).")
276
-
277
- spn = dn = sn = un = cn = 0
278
- with_bucket bucket_name do |d|
279
-
280
- # having a brain fart and cant get this to simplify
281
- go = false
282
- if options[:noprompt] != nil
283
- go = true
284
- else
285
- go = yes?("Proceed?", :red)
286
- end
287
-
288
- if go
289
- time_marks = []
290
- immediate_successors = {}
291
- if options[:backups_retain]
292
- # inclusive lower bound, exclusive upper bound
293
- time_marks = []
294
- Time.now.beginning_of_day.tap do |start|
295
- options[:days_retain].times do |i|
296
- time_marks.push(start - i.days)
297
- end
298
- end
299
-
300
- Time.now.beginning_of_week.tap do |start|
301
- options[:weeks_retain].times do |i|
302
- time_marks.push(start - i.weeks)
303
- end
304
- end
305
-
306
- Time.now.beginning_of_month.tap do |start|
307
- options[:months_retain].times do |i|
308
- time_marks.push(start - i.months)
309
- end
310
- end
311
-
312
- time_marks.each do |tm|
313
- files.each do |to_upload|
314
- File.open(to_upload) do |localfile|
315
- if localfile.mtime >= tm && (immediate_successors[tm].nil? || localfile.mtime < immediate_successors[tm][:last_modified])
316
- immediate_successors[tm] = { :local_path => to_upload, :last_modified => localfile.mtime }
317
- end
318
- end
319
- end
320
- end
321
- end
322
-
323
- # don't pointlessly upload large files if we already know we're going to delete them!
324
- if options[:backups_retain]
325
- immediate_successors.values.map { |h| h[:local_path] }.tap do |kept_files|
326
- before_reject = files.count # blah...lame
327
- files.reject! { |to_upload| !kept_files.include?(to_upload) }
328
- sn += before_reject - files.count
329
-
330
- say("Found #{files.count} file(s) that meet backups retention criteria for upload. Comparing against bucket...")
331
-
332
- end
333
- end
334
-
335
- files.each do |to_upload|
336
- say("#{to_upload} (no output if skipped)...")
337
- k = fog_key_for(target_root, to_upload)
338
-
339
- existing_head = d.files.head(k)
340
-
341
- time_mismatch = false
342
- content_hash_mistmatched = false
343
- File.open(to_upload) do |localfile|
344
- time_mismatch = !existing_head.nil? && (existing_head.metadata[LOCAL_MOD_KEY].nil? || (Time.parse(existing_head.metadata[LOCAL_MOD_KEY]) - localfile.mtime).abs > EPSILON)
345
- if time_mismatch
346
- content_hash_mistmatched = existing_head.etag != content_hash(localfile)
347
- end
348
- end
349
-
350
- if existing_head && time_mismatch && content_hash_mistmatched
351
- if !options[:dry_run]
352
- File.open(to_upload) do |localfile|
353
- existing_head.metadata = { LOCAL_MOD_KEY => localfile.mtime.to_s }
354
- existing_head.body = localfile
355
- existing_head.multipart_chunk_size = FILE_BUFFER_SIZE # creates multipart_save
356
- existing_head.save
357
- end
358
- end
359
- say("updated.")
360
- un += 1
361
- elsif existing_head && time_mismatch
362
- if !options[:dry_run]
363
- File.open(to_upload) do |localfile|
364
- existing_head.metadata = { LOCAL_MOD_KEY => localfile.mtime.to_s }
365
- existing_head.save
366
- end
367
- end
368
- say("updated.")
369
- un += 1
370
- elsif existing_head.nil?
371
- if !options[:dry_run]
372
- File.open(to_upload) do |localfile|
373
- file = d.files.create(
374
- :key => k,
375
- :public => options[:public],
376
- :body => ""
377
- )
378
- file.metadata = { LOCAL_MOD_KEY => localfile.mtime.to_s }
379
- file.multipart_chunk_size = FILE_BUFFER_SIZE # creates multipart_save
380
- file.body = localfile
381
- file.save
382
- end
383
- end
384
- say("created.")
385
- cn += 1
386
- else
387
- sn += 1
388
- # skipped
389
- end
390
- end
391
-
392
-
393
- if options[:backups_retain]
394
-
395
- # This array of hashes is computed because we need to do
396
- # nested for loops of M*N complexity, where M=time_marks
397
- # and N=files. We also need to do an remote get call to
398
- # fetch the metadata of all N remote files (d.files.each
399
- # will not do this). so, for performance sanity, we cache
400
- # all the meta data for all the N files.
401
-
402
- file_keys_modtimes = []
403
- d.files.each { |f|
404
- if File.fnmatch(options[:glob], f.key)
405
- existing_head = d.files.head(f.key)
406
- md = existing_head.metadata
407
- file_keys_modtimes.push({
408
- :key => f.key,
409
- :last_modified => md[LOCAL_MOD_KEY] ? Time.parse(md[LOCAL_MOD_KEY]) : f.last_modified,
410
- :existing_head => existing_head
411
- })
412
- end
413
- }
414
-
415
- say("#{file_keys_modtimes.length} file(s) found to consider for remote retention or remote deletion.")
416
-
417
- # this generates as many 'kept files' as there are time marks...which seems wrong.
418
- immediate_successors = {}
419
- time_marks.each do |tm|
420
- file_keys_modtimes.each do |fkm|
421
- if fkm[:last_modified] >= tm && (immediate_successors[tm].nil? || fkm[:last_modified] < immediate_successors[tm][:last_modified])
422
- immediate_successors[tm] = fkm
423
- end
424
- end
425
- end
426
-
427
- immediate_successors.values.map { |v| v[:key] }.tap do |kept_keys|
428
- file_keys_modtimes.each do |fkm|
429
- if kept_keys.include?(fkm[:key])
430
- say("Remote retained #{fkm[:key]}.")
431
- spn += 1
432
- else
433
- fkm[:existing_head].destroy if !options[:dry_run]
434
- say("Remote deleted #{fkm[:key]}.")
435
- dn += 1
436
- end
437
- end
438
- end
439
- end
440
- else
441
- say ("No action taken.")
442
- end
443
- end
444
- say("Done. #{cn} created. #{un} updated. #{sn} local skipped. #{dn} deleted remotely. #{spn} retained remotely.")
445
- end
446
-
447
- desc "delete [BUCKET_NAME]", "Destroy a bucket"
448
- method_options :region => "us-east-1"
449
- def delete(bucket_name)
450
- d = fog_storage.directories.select { |d| d.key == bucket_name }.first
451
-
452
- if d.nil?
453
- say ("Found no bucket by name #{bucket_name}")
454
- return
455
- end
456
-
457
- if d.files.length > 0
458
- say "Bucket has #{d.files.length} files. Please empty before destroying."
459
- return
460
- end
461
-
462
- if yes?("Are you sure you want to delete this bucket #{d.key}?", :red)
463
- d.destroy
464
- say "Destroyed bucket named #{bucket_name}."
465
- show_buckets
466
- else
467
- say "No action taken."
468
- end
469
-
470
- end
471
-
472
- desc "create [BUCKET_NAME]", "Create a bucket"
473
- method_options :region => "us-east-1"
474
- def create(bucket_name = nil)
475
- if !bucket_name
476
- puts "No bucket name given."
477
- return
478
- end
479
-
480
- fog_storage.directories.create(
481
- :key => bucket_name,
482
- :location => options[:region]
483
- )
484
-
485
- puts "Created bucket #{bucket_name}."
486
- show_buckets
487
- end
488
-
489
- desc "show [BUCKET_NAME]", "Show info about bucket"
490
- method_options :region => "us-east-1"
491
- def show(bucket_name = nil)
492
- if !bucket_name
493
- puts "No bucket name given."
494
- return
495
- end
496
-
497
- with_bucket(bucket_name) do |d|
498
- say "#{d}: "
499
- say d.location
500
- end
501
- end
502
-
503
- end
504
-
505
- Aws.start
5
+ CKnifeAws.start