cheftacular 2.0.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/bin/cft +4 -0
- data/bin/cftclr +4 -0
- data/bin/cheftacular +4 -0
- data/bin/client-list +4 -0
- data/lib/cheftacular/README.md +416 -0
- data/lib/cheftacular/actions/check.rb +32 -0
- data/lib/cheftacular/actions/console.rb +62 -0
- data/lib/cheftacular/actions/database.rb +13 -0
- data/lib/cheftacular/actions/db_console.rb +67 -0
- data/lib/cheftacular/actions/deploy.rb +40 -0
- data/lib/cheftacular/actions/log.rb +47 -0
- data/lib/cheftacular/actions/migrate.rb +57 -0
- data/lib/cheftacular/actions/run.rb +64 -0
- data/lib/cheftacular/actions/scale.rb +94 -0
- data/lib/cheftacular/actions/tail.rb +55 -0
- data/lib/cheftacular/actions.rb +14 -0
- data/lib/cheftacular/auditor.rb +46 -0
- data/lib/cheftacular/chef/data_bag.rb +104 -0
- data/lib/cheftacular/cheftacular.rb +55 -0
- data/lib/cheftacular/decryptors.rb +45 -0
- data/lib/cheftacular/encryptors.rb +48 -0
- data/lib/cheftacular/getters.rb +153 -0
- data/lib/cheftacular/helpers.rb +296 -0
- data/lib/cheftacular/initializers.rb +451 -0
- data/lib/cheftacular/parsers.rb +199 -0
- data/lib/cheftacular/remote_helpers.rb +30 -0
- data/lib/cheftacular/stateless_action.rb +16 -0
- data/lib/cheftacular/stateless_actions/add_ssh_key_to_bag.rb +44 -0
- data/lib/cheftacular/stateless_actions/arguments.rb +68 -0
- data/lib/cheftacular/stateless_actions/backups.rb +116 -0
- data/lib/cheftacular/stateless_actions/bootstrappers/centos_bootstrap.rb +7 -0
- data/lib/cheftacular/stateless_actions/bootstrappers/coreos_bootstrap.rb +7 -0
- data/lib/cheftacular/stateless_actions/bootstrappers/fedora_bootstrap.rb +7 -0
- data/lib/cheftacular/stateless_actions/bootstrappers/redhat_bootstrap.rb +7 -0
- data/lib/cheftacular/stateless_actions/bootstrappers/ubuntu_bootstrap.rb +102 -0
- data/lib/cheftacular/stateless_actions/bootstrappers/vyatta_bootstrap.rb +7 -0
- data/lib/cheftacular/stateless_actions/chef_bootstrap.rb +40 -0
- data/lib/cheftacular/stateless_actions/chef_environment.rb +21 -0
- data/lib/cheftacular/stateless_actions/clean_cookbooks.rb +104 -0
- data/lib/cheftacular/stateless_actions/clean_sensu_plugins.rb +19 -0
- data/lib/cheftacular/stateless_actions/clean_server_passwords.rb +14 -0
- data/lib/cheftacular/stateless_actions/cleanup_log_files.rb +14 -0
- data/lib/cheftacular/stateless_actions/client_list.rb +89 -0
- data/lib/cheftacular/stateless_actions/cloud.rb +107 -0
- data/lib/cheftacular/stateless_actions/cloud_bootstrap.rb +109 -0
- data/lib/cheftacular/stateless_actions/compile_audit_log.rb +60 -0
- data/lib/cheftacular/stateless_actions/compile_readme.rb +41 -0
- data/lib/cheftacular/stateless_actions/create_git_key.rb +67 -0
- data/lib/cheftacular/stateless_actions/disk_report.rb +75 -0
- data/lib/cheftacular/stateless_actions/environment.rb +100 -0
- data/lib/cheftacular/stateless_actions/fetch_file.rb +24 -0
- data/lib/cheftacular/stateless_actions/fix_known_hosts.rb +70 -0
- data/lib/cheftacular/stateless_actions/full_bootstrap.rb +30 -0
- data/lib/cheftacular/stateless_actions/get_active_ssh_connections.rb +18 -0
- data/lib/cheftacular/stateless_actions/get_haproxy_log.rb +55 -0
- data/lib/cheftacular/stateless_actions/get_log_from_bag.rb +38 -0
- data/lib/cheftacular/stateless_actions/get_pg_pass.rb +61 -0
- data/lib/cheftacular/stateless_actions/help.rb +71 -0
- data/lib/cheftacular/stateless_actions/initialize_data_bag_contents.rb +220 -0
- data/lib/cheftacular/stateless_actions/knife_upload.rb +23 -0
- data/lib/cheftacular/stateless_actions/pass.rb +49 -0
- data/lib/cheftacular/stateless_actions/reinitialize.rb +46 -0
- data/lib/cheftacular/stateless_actions/remove_client.rb +81 -0
- data/lib/cheftacular/stateless_actions/replication_status.rb +103 -0
- data/lib/cheftacular/stateless_actions/restart_swap.rb +55 -0
- data/lib/cheftacular/stateless_actions/rvm.rb +14 -0
- data/lib/cheftacular/stateless_actions/server_update.rb +99 -0
- data/lib/cheftacular/stateless_actions/service.rb +14 -0
- data/lib/cheftacular/stateless_actions/test_env.rb +82 -0
- data/lib/cheftacular/stateless_actions/update_split_branches.rb +64 -0
- data/lib/cheftacular/stateless_actions/update_tld.rb +62 -0
- data/lib/cheftacular/stateless_actions/upload_nodes.rb +120 -0
- data/lib/cheftacular/stateless_actions/upload_roles.rb +24 -0
- data/lib/cheftacular/version.rb +5 -0
- data/lib/cheftacular.rb +4 -0
- data/lib/cloud_interactor/authentication.rb +56 -0
- data/lib/cloud_interactor/cloud_interactor.rb +23 -0
- data/lib/cloud_interactor/domain/create.rb +17 -0
- data/lib/cloud_interactor/domain/create_record.rb +27 -0
- data/lib/cloud_interactor/domain/destroy.rb +17 -0
- data/lib/cloud_interactor/domain/destroy_record.rb +23 -0
- data/lib/cloud_interactor/domain/list.rb +9 -0
- data/lib/cloud_interactor/domain/list_records.rb +22 -0
- data/lib/cloud_interactor/domain/read.rb +23 -0
- data/lib/cloud_interactor/domain/read_record.rb +27 -0
- data/lib/cloud_interactor/domain/update.rb +18 -0
- data/lib/cloud_interactor/domain/update_record.rb +42 -0
- data/lib/cloud_interactor/domain.rb +18 -0
- data/lib/cloud_interactor/flavor.rb +27 -0
- data/lib/cloud_interactor/helpers.rb +70 -0
- data/lib/cloud_interactor/image.rb +27 -0
- data/lib/cloud_interactor/parser.rb +37 -0
- data/lib/cloud_interactor/server/attach_volume.rb +33 -0
- data/lib/cloud_interactor/server/create.rb +39 -0
- data/lib/cloud_interactor/server/destroy.rb +11 -0
- data/lib/cloud_interactor/server/detach_volume.rb +21 -0
- data/lib/cloud_interactor/server/list.rb +7 -0
- data/lib/cloud_interactor/server/list_volumes.rb +25 -0
- data/lib/cloud_interactor/server/poll.rb +22 -0
- data/lib/cloud_interactor/server/read.rb +9 -0
- data/lib/cloud_interactor/server/read_volume.rb +24 -0
- data/lib/cloud_interactor/server.rb +17 -0
- data/lib/cloud_interactor/version.rb +4 -0
- data/lib/cloud_interactor/volume/create.rb +13 -0
- data/lib/cloud_interactor/volume/destroy.rb +11 -0
- data/lib/cloud_interactor/volume/list.rb +7 -0
- data/lib/cloud_interactor/volume/read.rb +9 -0
- data/lib/cloud_interactor/volume.rb +20 -0
- data/lib/ridley/monkeypatches.rb +11 -0
- data/lib/sshkit/actions/start_commit_check.rb +19 -0
- data/lib/sshkit/actions/start_deploy.rb +25 -0
- data/lib/sshkit/actions/start_log_fetch.rb +91 -0
- data/lib/sshkit/actions/start_task.rb +29 -0
- data/lib/sshkit/getters.rb +67 -0
- data/lib/sshkit/helpers.rb +13 -0
- data/lib/sshkit/monkeypatches.rb +19 -0
- metadata +375 -0
@@ -0,0 +1,33 @@
|
|
1
|
+
class CloudInteractor
|
2
|
+
class Server
|
3
|
+
#handles self and create_and_attach case
|
4
|
+
def attach_volume args
|
5
|
+
@classes['volume'].read args['volume_name'], false
|
6
|
+
|
7
|
+
if @main_obj['specific_volumes'].nil? || @main_obj['specific_volumes'].nil?
|
8
|
+
|
9
|
+
create_hash = { "display_name" => args['volume_name'] }
|
10
|
+
create_hash['size'] = args['size'] if args['size']
|
11
|
+
create_hash['volume_type'] = args['volume_type'] ? args['volume_type'] : 'SATA'
|
12
|
+
|
13
|
+
@classes['volume'].create create_hash
|
14
|
+
|
15
|
+
sleep 5
|
16
|
+
|
17
|
+
@classes['volume'].read args
|
18
|
+
end
|
19
|
+
|
20
|
+
puts "Attaching #{ args['volume_name'] } to #{ args['server_name'] } in #{ IDENTITY }..."
|
21
|
+
|
22
|
+
read args, false, 'name', 'server_name'
|
23
|
+
|
24
|
+
specific_fog_object = @classes['auth'].auth_service(RESOURCE).instance_eval(IDENTITY).get @main_obj["specific_#{ IDENTITY }"].last['id']
|
25
|
+
|
26
|
+
if args['device_location']
|
27
|
+
specific_fog_object.attach_volume @main_obj['specific_volumes'].first['id'], args['device_location']
|
28
|
+
else
|
29
|
+
specific_fog_object.attach_volume @main_obj['specific_volumes'].first['id']
|
30
|
+
end
|
31
|
+
end
|
32
|
+
end
|
33
|
+
end
|
@@ -0,0 +1,39 @@
|
|
1
|
+
|
2
|
+
class CloudInteractor
|
3
|
+
class Server #http://docs.rackspace.com/servers/api/v2/cs-devguide/content/Servers-d1e2073.html
|
4
|
+
def create args
|
5
|
+
@classes['image'].read @options['preferred_cloud_image'], false
|
6
|
+
|
7
|
+
#Note, if no flavor is passed it defaults to a 512MB standard!
|
8
|
+
@classes['flavor'].read args['flavor']
|
9
|
+
|
10
|
+
read args, false
|
11
|
+
|
12
|
+
unless @main_obj["specific_#{ IDENTITY }"].empty?
|
13
|
+
puts "#{ IDENTITY } #{ args['name'] } already exists... returning."
|
14
|
+
|
15
|
+
return false
|
16
|
+
end
|
17
|
+
|
18
|
+
puts "Creating #{ args['name'] } in #{ IDENTITY }..."
|
19
|
+
|
20
|
+
final_create_args = {
|
21
|
+
name: args['name'],
|
22
|
+
flavor_id: @main_obj['specific_flavors'].first['id'],
|
23
|
+
image_id: @main_obj['specific_images'].first['id']
|
24
|
+
}
|
25
|
+
|
26
|
+
@main_obj["#{ IDENTITY }_create_request"] = JSON.parse(@classes['auth'].auth_service(RESOURCE).instance_eval(IDENTITY).create(final_create_args).to_json)
|
27
|
+
|
28
|
+
@main_obj["#{ IDENTITY }_created_passwords"] ||= {}
|
29
|
+
@main_obj["#{ IDENTITY }_created_passwords"][args['name']] = @main_obj["#{ IDENTITY }_create_request"]['password']
|
30
|
+
|
31
|
+
@main_obj["#{ IDENTITY }_created_details"] ||= {}
|
32
|
+
@main_obj["#{ IDENTITY }_created_details"][args['name']] = @main_obj["#{ IDENTITY }_create_request"]
|
33
|
+
|
34
|
+
puts "Successfully created #{ args['name'] } with pass #{ @main_obj["#{ IDENTITY }_created_passwords"][args['name']] }"
|
35
|
+
|
36
|
+
@main_obj['output']['admin_passwords'] = { args['name'] => @main_obj["#{ IDENTITY }_created_passwords"][args['name']] }
|
37
|
+
end
|
38
|
+
end
|
39
|
+
end
|
@@ -0,0 +1,11 @@
|
|
1
|
+
class CloudInteractor
|
2
|
+
class Server
|
3
|
+
def destroy args
|
4
|
+
read args, false
|
5
|
+
|
6
|
+
#TODO strict checking on servers to ensure a server can't be destroyed while it still has volumes attached (which can corrupt the volume)
|
7
|
+
|
8
|
+
@classes['helper'].generic_destroy_parse args, IDENTITY, RESOURCE
|
9
|
+
end
|
10
|
+
end
|
11
|
+
end
|
@@ -0,0 +1,21 @@
|
|
1
|
+
class CloudInteractor
|
2
|
+
class Server
|
3
|
+
def detach_volume args, out=""
|
4
|
+
read args, false, 'name', 'server_name'
|
5
|
+
|
6
|
+
read_volume args, false, true
|
7
|
+
|
8
|
+
puts "Detaching #{ args['volume_name'] } from #{ args['server_name'] } in #{ IDENTITY }..."
|
9
|
+
|
10
|
+
specific_fog_object = @classes['auth'].auth_service(RESOURCE).instance_eval(IDENTITY).get @main_obj["specific_#{ IDENTITY }"].last['id']
|
11
|
+
|
12
|
+
specific_fog_object.attachments.each do |attachment|
|
13
|
+
next unless attachment.volume_id == @main_obj["specific_attached_volumes"].first['id']
|
14
|
+
|
15
|
+
out << attachment.detach.to_s
|
16
|
+
end
|
17
|
+
|
18
|
+
puts "The state of the volume detachment is #{ out } for #{ args['server_name'] } in #{ IDENTITY }"
|
19
|
+
end
|
20
|
+
end
|
21
|
+
end
|
@@ -0,0 +1,25 @@
|
|
1
|
+
class CloudInteractor
|
2
|
+
class Server
|
3
|
+
def list_volumes args, output=true
|
4
|
+
puts "Returning list of volumes for #{ args['server_name'] } in #{ IDENTITY }..."
|
5
|
+
|
6
|
+
read(args, false, 'name', 'server_name') if @main_obj["specific_#{ IDENTITY }"].nil?
|
7
|
+
|
8
|
+
specific_fog_object = @classes['auth'].auth_service(RESOURCE).instance_eval(IDENTITY).get @main_obj["specific_#{ IDENTITY }"].last['id']
|
9
|
+
|
10
|
+
@main_obj["#{ IDENTITY }_volume_list_request"] = JSON.parse(specific_fog_object.attachments.all.to_json)
|
11
|
+
|
12
|
+
@main_obj['server_attached_volumes'] ||= {}
|
13
|
+
|
14
|
+
@main_obj['server_attached_volumes'][args['server_name']] ||= []
|
15
|
+
|
16
|
+
@main_obj["#{ IDENTITY }_volume_list_request"].each do |volume_hash|
|
17
|
+
@classes['volume'].read volume_hash, false, 'id'
|
18
|
+
|
19
|
+
@main_obj['server_attached_volumes'][args['server_name']] << @main_obj['specific_volumes'].last
|
20
|
+
end
|
21
|
+
|
22
|
+
ap( @main_obj['server_attached_volumes'][args['server_name']] ) if output
|
23
|
+
end
|
24
|
+
end
|
25
|
+
end
|
@@ -0,0 +1,22 @@
|
|
1
|
+
class CloudInteractor
|
2
|
+
class Server
|
3
|
+
def poll args
|
4
|
+
read args, false
|
5
|
+
|
6
|
+
raise "Server #{ args['name'] } does not exist!" if @main_obj["specific_#{ IDENTITY }"].empty?
|
7
|
+
|
8
|
+
puts "Polling #{ args['name'] } for status...(execution will continue when the server is finished building)"
|
9
|
+
|
10
|
+
specific_fog_object = @classes['auth'].auth_service(RESOURCE).instance_eval(IDENTITY).get @main_obj["specific_#{ IDENTITY }"].last['id']
|
11
|
+
|
12
|
+
#specific_servers is an ARRAY, the latest status of the server is the LAST ENTRY
|
13
|
+
duration_hash = specific_fog_object.wait_for { ready? }
|
14
|
+
|
15
|
+
@main_obj['output']["created_servers"] ||= []
|
16
|
+
|
17
|
+
@main_obj['output']["created_servers"] << JSON.parse(specific_fog_object.reload.to_json)
|
18
|
+
|
19
|
+
puts "#{ args['name'] } became active in #{ duration_hash[:duration] } seconds!"
|
20
|
+
end
|
21
|
+
end
|
22
|
+
end
|
@@ -0,0 +1,24 @@
|
|
1
|
+
class CloudInteractor
|
2
|
+
class Server
|
3
|
+
def read_volume args, output=true, strict_match=false
|
4
|
+
specific_volume = args['volume_name']
|
5
|
+
|
6
|
+
raise "Volume not passed! Value for volume name is: #{ specific_volume }" if specific_volume.nil?
|
7
|
+
|
8
|
+
list_volumes args, false
|
9
|
+
|
10
|
+
@main_obj['server_attached_volumes'][args['server_name']].each do |volume_hash|
|
11
|
+
next if strict_match && volume_hash['display_name'] != (specific_volume)
|
12
|
+
next if !strict_match && !volume_hash['display_name'].include?(specific_volume)
|
13
|
+
|
14
|
+
@main_obj["specific_attached_volumes"] ||= []
|
15
|
+
|
16
|
+
@main_obj["specific_attached_volumes"] << volume_hash
|
17
|
+
|
18
|
+
ap(volume_hash) if output
|
19
|
+
end
|
20
|
+
|
21
|
+
puts("#{ specific_volume } not attached to #{ args['server_name'] }!") if @main_obj["specific_attached_volumes"].nil?
|
22
|
+
end
|
23
|
+
end
|
24
|
+
end
|
@@ -0,0 +1,17 @@
|
|
1
|
+
|
2
|
+
class CloudInteractor
|
3
|
+
class Server #http://docs.rackspace.com/servers/api/v2/cs-devguide/content/Servers-d1e2073.html
|
4
|
+
IDENTITY = 'servers'
|
5
|
+
RESOURCE = 'compute'
|
6
|
+
|
7
|
+
def initialize main_obj, classes, options={}
|
8
|
+
@main_obj = main_obj
|
9
|
+
@options = options
|
10
|
+
@classes = classes
|
11
|
+
end
|
12
|
+
|
13
|
+
def run method, args
|
14
|
+
self.send(method, args)
|
15
|
+
end
|
16
|
+
end
|
17
|
+
end
|
@@ -0,0 +1,13 @@
|
|
1
|
+
class CloudInteractor
|
2
|
+
class Volume
|
3
|
+
def create args
|
4
|
+
puts "Creating #{ args['display_name'] } in #{ IDENTITY }..."
|
5
|
+
|
6
|
+
puts("Creating #{ IDENTITY.singularize } with args #{ ap(args) }") if @options['verbose']
|
7
|
+
|
8
|
+
args['volume_type'] = 'SSD' unless args['volume_type']
|
9
|
+
|
10
|
+
@main_obj["#{ IDENTITY }_create_request"] = JSON.parse(@classes['auth'].auth_service(RESOURCE).instance_eval(IDENTITY).create(args).to_json)
|
11
|
+
end
|
12
|
+
end
|
13
|
+
end
|
@@ -0,0 +1,11 @@
|
|
1
|
+
class CloudInteractor
|
2
|
+
class Volume
|
3
|
+
def destroy args
|
4
|
+
read args, false
|
5
|
+
|
6
|
+
#TODO strict checking on volumes to ensure a volume can't be destroyed when it is still attached
|
7
|
+
|
8
|
+
@classes['helper'].generic_destroy_parse args, IDENTITY, RESOURCE, 'display_name'
|
9
|
+
end
|
10
|
+
end
|
11
|
+
end
|
@@ -0,0 +1,20 @@
|
|
1
|
+
|
2
|
+
class CloudInteractor
|
3
|
+
class Volume #http://docs.rackspace.com/cbs/api/v1.0/cbs-devguide/content/volumes.html
|
4
|
+
IDENTITY = 'volumes'
|
5
|
+
RESOURCE = 'volume'
|
6
|
+
|
7
|
+
def initialize main_obj, classes, options={}
|
8
|
+
@main_obj = main_obj
|
9
|
+
@options = options
|
10
|
+
@classes = classes
|
11
|
+
end
|
12
|
+
|
13
|
+
def run method, args, mode="name"
|
14
|
+
case method
|
15
|
+
when "read" then self.send(method, args, mode)
|
16
|
+
else self.send(method, args)
|
17
|
+
end
|
18
|
+
end
|
19
|
+
end
|
20
|
+
end
|
@@ -0,0 +1,11 @@
|
|
1
|
+
#https://github.com/reset/ridley/blob/master/lib/ridley/chef_objects/data_bag_item_obect.rb
|
2
|
+
#This monkeypatch fixes decrypt being unable to return unencrypted data for encrypted bags with nested hashes in hashes
|
3
|
+
module Ridley
|
4
|
+
class DataBagItemObject < ChefObject
|
5
|
+
def decrypt
|
6
|
+
decrypted_hash = Hash[_attributes_.map { |key, value| [key, key == "id" ? value : decrypt_value(value)] }]
|
7
|
+
|
8
|
+
Hashie::Mash.new(decrypted_hash) #old:mass_assign(decrypted_hash)
|
9
|
+
end
|
10
|
+
end
|
11
|
+
end
|
@@ -0,0 +1,19 @@
|
|
1
|
+
module SSHKit
|
2
|
+
module Backend
|
3
|
+
class Netssh
|
4
|
+
def start_commit_check name, ip_address, options, locs, cheftacular, out={'name'=>'', 'time'=> ''}
|
5
|
+
app_loc = "#{ cheftacular['base_file_path'] }/#{ options['repository'] }/releases"
|
6
|
+
|
7
|
+
if test("[ -d #{ app_loc } ]") #true if file exists
|
8
|
+
within app_loc do
|
9
|
+
out['name'] = capture( :ls, '-rt', :|, :tail, '-1' )
|
10
|
+
|
11
|
+
out['time'] = Time.parse(capture( :stat, out['name'], '--printf=%y' )).strftime('%Y-%m-%d %I:%M:%S %p')
|
12
|
+
end
|
13
|
+
end
|
14
|
+
|
15
|
+
out
|
16
|
+
end
|
17
|
+
end
|
18
|
+
end
|
19
|
+
end
|
@@ -0,0 +1,25 @@
|
|
1
|
+
module SSHKit
|
2
|
+
module Backend
|
3
|
+
class Netssh
|
4
|
+
def start_deploy name, ip_address, options, locs, passwords, out=""
|
5
|
+
log_loc, timestamp = set_log_loc_and_timestamp(locs)
|
6
|
+
|
7
|
+
puts "Generating log file for #{ name } (#{ ip_address }) at #{ log_loc }/#{ name }-deploy-#{ timestamp }.txt"
|
8
|
+
|
9
|
+
capture_args = [ "chef-client" ]
|
10
|
+
capture_args << [ '-l', 'debug' ] if options['debug']
|
11
|
+
#capture_args << [ '>', '/dev/tty']
|
12
|
+
|
13
|
+
out << sudo_capture( passwords[ip_address], *capture_args.flatten )
|
14
|
+
|
15
|
+
::File.open("#{ log_loc }/#{ name }-deploy-#{ timestamp }.txt", "w") { |f| f.write(out.scrub_pretty_text) } unless options['no_logs']
|
16
|
+
|
17
|
+
puts(out) if options['output'] || options['verbose']
|
18
|
+
|
19
|
+
puts "Succeeded deploy of #{ name } (#{ ip_address }) on role #{ options['role'] }"
|
20
|
+
|
21
|
+
[out, timestamp] #return out to send to logs_bag
|
22
|
+
end
|
23
|
+
end
|
24
|
+
end
|
25
|
+
end
|
@@ -0,0 +1,91 @@
|
|
1
|
+
module SSHKit
|
2
|
+
module Backend
|
3
|
+
class Netssh
|
4
|
+
def start_log_role_map name, ip_address, target_log_loc, options, locs, cheftacular, passwords, out=""
|
5
|
+
log_loc, timestamp = set_log_loc_and_timestamp(locs)
|
6
|
+
log_cmnd, log_lines = get_log_command_and_lines(options)
|
7
|
+
|
8
|
+
if !test("[ -e #{ target_log_loc }]") #true if file exists
|
9
|
+
puts "#{ name } (#{ ip_address }) does not have a log file for #{ options['env'] } at the moment..."
|
10
|
+
|
11
|
+
else
|
12
|
+
puts "Fetching log file(s) for #{ name } (#{ ip_address }). Outputting to #{ log_loc }/#{ name }-#{ options['role'] }-log-#{ timestamp }.txt"
|
13
|
+
|
14
|
+
target_log_loc.split(',').each do |parsed_log_loc|
|
15
|
+
parsed_log_loc = parsed_log_loc.gsub('|current_repo_location|', "#{ cheftacular['base_file_path'] }/#{ options['repository'] }/current")
|
16
|
+
if log_lines.nil?
|
17
|
+
out << sudo_capture(passwords[ip_address], log_cmnd.to_sym, parsed_log_loc)
|
18
|
+
|
19
|
+
else
|
20
|
+
out << sudo_capture(passwords[ip_address], log_cmnd.to_sym, log_lines, parsed_log_loc)
|
21
|
+
end
|
22
|
+
end
|
23
|
+
|
24
|
+
::File.open("#{ log_loc }/#{ name }-#{ options['role'] }-log-#{ timestamp }.txt", "w") { |f| f.write(out.scrub_pretty_text) } unless options['no_logs']
|
25
|
+
end
|
26
|
+
end
|
27
|
+
|
28
|
+
def start_log_fetch_ruby_on_rails name, ip_address, run_list, options, locs, cheftacular, passwords, out=""
|
29
|
+
log_loc, timestamp = set_log_loc_and_timestamp(locs)
|
30
|
+
true_env = get_true_environment run_list, cheftacular['run_list_environments'], options['env']
|
31
|
+
app_log_loc = "#{ cheftacular['base_file_path'] }/#{ options['repository'] }/current/log"
|
32
|
+
log_cmnd, log_lines = get_log_command_and_lines(options)
|
33
|
+
|
34
|
+
if !test("[ -e /#{ app_log_loc }/#{ true_env }.log ]") #true if file exists
|
35
|
+
puts "#{ name } (#{ ip_address }) does not have a log file for #{ true_env } at the moment..."
|
36
|
+
|
37
|
+
else
|
38
|
+
|
39
|
+
puts "Fetching log file(s) for #{ name } (#{ ip_address }). Outputting to #{ log_loc }/#{ name }-applog-#{ timestamp }.txt"
|
40
|
+
|
41
|
+
within app_log_loc do
|
42
|
+
if log_lines.nil?
|
43
|
+
out << capture(log_cmnd.to_sym, "#{ true_env }.log")
|
44
|
+
|
45
|
+
else
|
46
|
+
out << capture(log_cmnd.to_sym, log_lines, "#{ true_env }.log")
|
47
|
+
end
|
48
|
+
end
|
49
|
+
|
50
|
+
#To create the file locally you must namespace like this
|
51
|
+
::File.open("#{ log_loc }/#{ name }-applog-#{ timestamp }.txt", "w") { |f| f.write(out.scrub_pretty_text) } unless options['no_logs']
|
52
|
+
end
|
53
|
+
|
54
|
+
out << start_log_fetch_nginx(name, log_loc, log_cmnd, timestamp, options, out) if run_list.include?('role[web]') && options['get_nginx_logs']
|
55
|
+
|
56
|
+
out << start_log_role_map(name, ip_address, get_role_map(cheftacular, get_worker_role(cheftacular))['log_location'], log_cmnd, app_log_loc, timestamp, options) if run_list.include?("role[#{ get_worker_role(cheftacular) }]")
|
57
|
+
|
58
|
+
puts(out) if options['verbose']
|
59
|
+
end
|
60
|
+
|
61
|
+
def start_log_fetch_nginx name, log_loc, log_cmnd, timestamp, options, out=""
|
62
|
+
out = "" unless options['no_logs']
|
63
|
+
|
64
|
+
nginx_log_loc = "/var/log/nginx/#{ options['repository'] }_access.log"
|
65
|
+
|
66
|
+
puts "Fetching nginx log file... Outputting to #{ log_loc }/#{ name }-nginxlog-#{ timestamp }.txt "
|
67
|
+
|
68
|
+
if log_lines.nil?
|
69
|
+
out << capture(log_cmnd.to_sym, nginx_log_loc)
|
70
|
+
|
71
|
+
else
|
72
|
+
out << capture(log_cmnd.to_sym, log_lines, nginx_log_loc)
|
73
|
+
end
|
74
|
+
|
75
|
+
::File.open("#{ log_loc }/#{ name }-nginxlog-#{ timestamp }.txt", "w") { |f| f.write(out.scrub_pretty_text) } unless options['no_logs']
|
76
|
+
|
77
|
+
out
|
78
|
+
end
|
79
|
+
|
80
|
+
private
|
81
|
+
|
82
|
+
def get_log_command_and_lines options
|
83
|
+
log_cmnd = options['get_full_logs'] ? 'cat' : 'tail'
|
84
|
+
|
85
|
+
log_lines = options['get_full_logs'] ? nil : "-" + ( options['get_log_lines'] ? options['get_log_lines'] : "500" )
|
86
|
+
|
87
|
+
[log_cmnd, log_lines]
|
88
|
+
end
|
89
|
+
end
|
90
|
+
end
|
91
|
+
end
|
@@ -0,0 +1,29 @@
|
|
1
|
+
module SSHKit
|
2
|
+
module Backend
|
3
|
+
class Netssh
|
4
|
+
def start_task name, ip_address, run_list, command, options, locs, cheftacular, out=""
|
5
|
+
log_loc, timestamp = set_log_loc_and_timestamp(locs)
|
6
|
+
true_env = get_true_environment cheftacular['run_list_environments'], run_list, options['env']
|
7
|
+
|
8
|
+
puts "Running #{ command } for #{ name } (#{ ip_address }) (Run with with --debug to generate a log as well)"
|
9
|
+
|
10
|
+
mig_loc = "/var/www/vhosts/#{ options['repository'] }/current"
|
11
|
+
|
12
|
+
capture_args = ["RAILS_ENV=#{ true_env }"]
|
13
|
+
capture_args << command.split(' ')
|
14
|
+
|
15
|
+
within mig_loc do
|
16
|
+
out << capture( *capture_args.flatten )
|
17
|
+
end
|
18
|
+
|
19
|
+
::File.open("#{ log_loc }/#{ name }-task-#{ timestamp }.txt", "w") {|f| f.write(out.scrub_pretty_text) } if options['debug']
|
20
|
+
|
21
|
+
puts out
|
22
|
+
|
23
|
+
puts("Nothing to migrate for #{ options['role'] }...") if out.empty? || out == 'config/local.yml file detected. Its environment variables will be merged on top of those from config/application.yml.'
|
24
|
+
|
25
|
+
[out, timestamp] #return out to send to logs_bag
|
26
|
+
end
|
27
|
+
end
|
28
|
+
end
|
29
|
+
end
|
@@ -0,0 +1,67 @@
|
|
1
|
+
module SSHKit
|
2
|
+
module Backend
|
3
|
+
class Netssh
|
4
|
+
def get_repository_from_role_name name, repositories, *args
|
5
|
+
args = args.flatten
|
6
|
+
|
7
|
+
repo_role_name = ""
|
8
|
+
|
9
|
+
repositories.each_pair { |key, repo_hash| repo_role_name = key if repo_hash['repo_name'] == name }
|
10
|
+
|
11
|
+
if repositories.has_key?(name) && args.empty?
|
12
|
+
return repositories[name]['repo_name']
|
13
|
+
elsif !repo_role_name.empty? && args.empty?
|
14
|
+
return repo_role_name
|
15
|
+
end
|
16
|
+
|
17
|
+
if args.include?('has_key?')
|
18
|
+
return repositories.has_key?(name)
|
19
|
+
elsif args.include?('has_value?')
|
20
|
+
return !repo_role_name.empty?
|
21
|
+
end
|
22
|
+
|
23
|
+
raise "Unknown repository or rolename for #{ name }"
|
24
|
+
end
|
25
|
+
|
26
|
+
def get_node_from_address nodes, address, ret_node = nil
|
27
|
+
nodes.each do |n|
|
28
|
+
if n.public_ipaddress == address
|
29
|
+
ret_node = n
|
30
|
+
|
31
|
+
break
|
32
|
+
end
|
33
|
+
end
|
34
|
+
|
35
|
+
ret_node
|
36
|
+
end
|
37
|
+
|
38
|
+
def get_true_environment run_list, chef_env_roles, default_env
|
39
|
+
chef_env_roles.each_pair do |role, env|
|
40
|
+
if run_list.include?("role[#{ role }]")
|
41
|
+
default_env = env
|
42
|
+
|
43
|
+
break
|
44
|
+
end
|
45
|
+
end
|
46
|
+
|
47
|
+
default_env
|
48
|
+
end
|
49
|
+
|
50
|
+
def get_worker_role cheftacular, ret=""
|
51
|
+
cheftacular['role_maps'].each_pair do |main_role, role_hash|
|
52
|
+
ret = role_hash['role_name'] if main_role.include?('worker')
|
53
|
+
end
|
54
|
+
|
55
|
+
ret
|
56
|
+
end
|
57
|
+
|
58
|
+
def get_role_map cheftacular, target_role, ret=""
|
59
|
+
cheftacular['role_maps'].each_pair do |main_role, role_hash|
|
60
|
+
ret = role_hash if role_hash['role_name'] == target_role
|
61
|
+
end
|
62
|
+
|
63
|
+
ret
|
64
|
+
end
|
65
|
+
end
|
66
|
+
end
|
67
|
+
end
|
@@ -0,0 +1,19 @@
|
|
1
|
+
module SSHKit
|
2
|
+
class Command
|
3
|
+
#returns the full contents of stdout when an error occurs rather than just the first line (needed for chef debugging)
|
4
|
+
def exit_status=(new_exit_status)
|
5
|
+
@finished_at = Time.now
|
6
|
+
@exit_status = new_exit_status
|
7
|
+
|
8
|
+
if options[:raise_on_non_zero_exit] && exit_status > 0
|
9
|
+
message = ""
|
10
|
+
message += "#{command} exit status: " + exit_status.to_s + "\n"
|
11
|
+
message += "#{command} stdout: " + (full_stdout.strip || "Nothing written") + "\n"
|
12
|
+
|
13
|
+
stderr_message = [stderr.strip, full_stderr.strip].delete_if(&:empty?).first
|
14
|
+
message += "#{command} stderr: " + (full_stderr.strip || 'Nothing written') + "\n\n"
|
15
|
+
raise Failed, message
|
16
|
+
end
|
17
|
+
end
|
18
|
+
end
|
19
|
+
end
|