cloud66 0.0.26

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of cloud66 might be problematic. Click here for more details.

@@ -0,0 +1,95 @@
1
+ require File.join(File.dirname(__FILE__), 'quartz_plugin')
2
+ require 'fileutils'
3
+
4
+ class RedisBackup < QuartzPlugin
5
+
6
+ @@version_major = 0
7
+ @@version_minor = 0
8
+ @@version_revision = 1
9
+
10
+ def info
11
+ { :uid => "6342c1ef0d8bb2a47ab1362a6b02c058", :name => "Redis Backup", :version => get_version }
12
+ end
13
+
14
+ def run(message)
15
+ pl = payload(message)
16
+
17
+ @job_name = pl['job_name'].gsub(/[^\w\s_-]+/, '').gsub(/(^|\b\s)\s+($|\s?\b)/, '\\1\\2').gsub(/\s/, '_')
18
+ @redisdump_utility = pl['redis_client'] || '/usr/bin/redis-cli'
19
+ @name = pl['db_name'] || 'dump'
20
+ @password = pl['password']
21
+ @socket = pl['socket']
22
+ @host = pl['host'] || '127.0.0.1'
23
+ @port = pl['port'] || 6379
24
+ @additional_options = pl['additional_options'] || []
25
+ @path = pl['db_path']
26
+ @dump_path = pl['backup_folder']
27
+
28
+ @name = 'dump' if @name.empty?
29
+ @host = '127.0.0.1' if @host.empty?
30
+ @port = 6379 if @port.empty?
31
+ @additional_options = [] if @additional_options.empty?
32
+ @redisdump_utility = '/usr/bin/redis-cli' if @redisdump_utility.empty?
33
+
34
+ save_result = invoke_save
35
+ return save_result unless save_result.nil?
36
+
37
+ result = copy
38
+ if result[:ok]
39
+ run_result(true, "Redis Backup finished successfully")
40
+ else
41
+ run_result(false, result[:message])
42
+ end
43
+ end
44
+
45
+ private
46
+
47
+ ## based on Backup Gem with minor modifications
48
+
49
+ def database
50
+ "#{@name}.rdb"
51
+ end
52
+
53
+ def credential_options
54
+ @password.to_s.empty? ? '' : "-a '#{@password}'"
55
+ end
56
+
57
+ def connectivity_options
58
+ %w[host port socket].map do |option|
59
+ value = instance_variable_get("@#{option}")
60
+ next if value.to_s.empty?
61
+ "-#{option[0,1]} '#{value}'"
62
+ end.compact.join(' ')
63
+ end
64
+
65
+ def user_options
66
+ @additional_options.join(' ')
67
+ end
68
+
69
+ def invoke_save
70
+ command = "#{@redisdump_utility} #{credential_options} #{connectivity_options} #{user_options} SAVE"
71
+ @log.debug "Running #{command}"
72
+ response = run_shell(command)
73
+ @log.debug "redis-cli run result: #{response}"
74
+ unless response[:ok]
75
+ run_result(false, "Failed to save from server '#{response[:message]}'")
76
+ else
77
+ nil
78
+ end
79
+ end
80
+
81
+ def copy
82
+ src_path = File.join(@path, database)
83
+ unless File.exist?(src_path)
84
+ raise "Redis database dump not found at #{src_path}"
85
+ end
86
+
87
+ FileUtils.mkdir_p(@dump_path)
88
+
89
+ dst_path = File.join(@dump_path, database)
90
+ dump_cmd = "gzip -c #{src_path} > #{dst_path}.gz"
91
+ @log.debug "Running #{dump_cmd}"
92
+ run_shell dump_cmd
93
+ end
94
+
95
+ end
@@ -0,0 +1,200 @@
1
+ require File.join(File.dirname(__FILE__), 'quartz_plugin')
2
+ require 'fileutils'
3
+ require 'fog'
4
+
5
+ class S3Backup < QuartzPlugin
6
+
7
+ @@version_major = 0
8
+ @@version_minor = 0
9
+ @@version_revision = 1
10
+
11
+ def info
12
+ { :uid => "d3533989f9d542f393566511e8eb2090", :name => "S3 Backup", :version => get_version }
13
+ end
14
+
15
+ def run(message)
16
+ pl = payload(message)
17
+
18
+ @log.debug "Pruned payload #{pl}"
19
+
20
+ @access_key = pl['access_key']
21
+ @secret_key = pl['secret_key']
22
+ @bucket = pl['bucket']
23
+ @region = pl['region']
24
+ @remote_path = pl['remote_path']
25
+ @local_pattern = pl['local_pattern']
26
+ @keep = pl['keep'].empty? ? 0 : pl['keep'].to_i
27
+
28
+ @testing = pl['testing']
29
+
30
+ return transfer
31
+ end
32
+
33
+ private
34
+
35
+ def get_connection
36
+ #Fog.mock! unless @testing.nil? || @testing == false
37
+ connection = Fog::Storage.new(
38
+ :provider => 'AWS',
39
+ :aws_access_key_id => @access_key,
40
+ :aws_secret_access_key => @secret_key,
41
+ :region => @region
42
+ )
43
+ connection
44
+ end
45
+
46
+ def transfer
47
+ begin
48
+ #set up the s3 connection
49
+ @connection = get_connection
50
+ #synchronie the file times
51
+ @connection.sync_clock
52
+ if @keep <= 0
53
+ sync_files_without_version_history
54
+ else
55
+ sync_files_with_version_history
56
+ end
57
+
58
+ rescue Excon::Errors::SocketError => exc
59
+ @log.error exc.message
60
+ return run_result(false, exc.message)
61
+ rescue Excon::Errors::Error => exc
62
+ @log.error exc.message
63
+ result = exc.response.body
64
+ message = result.match(/\<Message\>(.*)\<\/Message\>/)
65
+ if !message.nil?
66
+ message = message[1]
67
+ return run_result(false, message)
68
+ elsif exc.response.status == 404
69
+ return run_result(false, "Remote s3 service or bucket not found (404)")
70
+ elsif exc.response.status != 0
71
+ return run_result(false, "Remote s3 service returned error #{exc.response.status} without any more details")
72
+ else
73
+ return run_result(false, exc.message)
74
+ end
75
+ end
76
+ end
77
+
78
+ def remove_initial_slash(filename)
79
+ filename.sub(/^\//, '')
80
+ end
81
+
82
+ def ensure_trailing_slash(filename)
83
+ return "" if filename.empty?
84
+ filename.sub(/\/$/, '') + '/'
85
+ end
86
+
87
+ def sync_files_without_version_history
88
+
89
+ #prepare the remote directory variable
90
+ @remote_path = remove_initial_slash(ensure_trailing_slash(@remote_path))
91
+ count = 0
92
+
93
+ #for each local file match
94
+ Dir.glob(File.expand_path(@local_pattern)).each do |f|
95
+
96
+ #skip to next match if current is a directory
97
+ next if File.directory?(f)
98
+
99
+ #assign the remote filename
100
+ new_remote_filename = remove_initial_slash(File.join(@remote_path, f))
101
+ @log.debug "Copying #{f} to #{new_remote_filename}"
102
+ count += 1
103
+
104
+ #push file to s3
105
+ File.open(f, 'r') do |file|
106
+ @connection.put_object(@bucket, new_remote_filename, file)
107
+ end
108
+ end
109
+
110
+ return run_result(true, "Successully pushed #{count} file(s) to Amazon s3 bucket (without version history)")
111
+
112
+ end
113
+
114
+ def sync_files_with_version_history
115
+
116
+ #prepare the remote directory variable
117
+ @remote_path = remove_initial_slash(ensure_trailing_slash(@remote_path))
118
+ count = 0
119
+
120
+ #get remote directory
121
+ directory = @connection.directories.get(@bucket)
122
+
123
+ #cache the s3 remote directory identifing all appropriate files
124
+ remote_path_match = Regexp.new("^#{@remote_path}", "i")
125
+ s3_directory = directory.files.map {|f| f.key }
126
+
127
+ all_remote_files = s3_directory.select {|m| m =~ remote_path_match}.map {|m| remove_initial_slash(m.gsub(remote_path_match, ''))}
128
+ archive_regex = /(?<folder>^Archive_Cloud66 \(\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}\)\/)/
129
+ non_archive_files = all_remote_files.reject { |m| m =~ archive_regex }
130
+ archived_files = all_remote_files.select { |m| m =~ archive_regex }
131
+
132
+ new_archive_folder = "Archive_Cloud66 (#{Time.now.strftime("%Y-%m-%d %H:%M:%S")})/"
133
+
134
+ #copy all non-archive files to new backup folder
135
+ non_archive_files.each do |relative_file|
136
+
137
+ puts "name is #{relative_file}"
138
+
139
+ #move file to archive
140
+ existing_remote_filename = remove_initial_slash(File.join(@remote_path, relative_file))
141
+ new_remote_relative_filename = File.join(new_archive_folder, "#{relative_file}")
142
+ new_remote_filename = remove_initial_slash(File.join(@remote_path, new_remote_relative_filename))
143
+
144
+ @log.debug "Copying #{existing_remote_filename} to #{new_remote_filename}"
145
+ @connection.copy_object @bucket, existing_remote_filename, @bucket, new_remote_filename
146
+
147
+ @log.debug "Removing #{existing_remote_filename}"
148
+ @connection.delete_object @bucket, existing_remote_filename
149
+
150
+ #add newly archived file to list of archived files
151
+ archived_files << new_remote_relative_filename
152
+ end
153
+
154
+ #copy up all new files from source
155
+ all_local_files = Dir.glob(File.expand_path(@local_pattern))
156
+ return run_result(true, "No file(s) identified to push to Amazon s3 bucket (with version history)") if all_local_files.size == 0
157
+
158
+ #determine a local root to create relative files (TODO?)
159
+ #local_root = ""
160
+ #local_root_regex = Regexp.new local_root
161
+
162
+ #copy all local matches up to s3
163
+ all_local_files.each do |f|
164
+
165
+ #skip to next match if current is a directory
166
+ next if File.directory?(f)
167
+
168
+ #assign the remote filename
169
+ new_remote_filename = remove_initial_slash(File.join(@remote_path, f))
170
+ @log.debug "Copying #{f} to #{new_remote_filename}"
171
+ count += 1
172
+
173
+ #push file to s3
174
+ File.open(f, 'r') do |file|
175
+ @connection.put_object @bucket, new_remote_filename, file
176
+ end
177
+
178
+ end
179
+
180
+ #get list of archive folders
181
+ archive_folders = archived_files.map {|m| archive_regex.match(m)['folder']}.uniq.sort.reverse
182
+
183
+ #if we have too many archive folders
184
+ while archive_folders.size > @keep do
185
+ archive_folder = archive_folders.delete_at(archive_folders.size-1)
186
+ archive_regex = Regexp.new "^#{Regexp.escape(archive_folder)}", "i"
187
+
188
+ #remove old archived files
189
+ archived_files.select { |m| m =~ archive_regex }.each do |file|
190
+ remote_file_to_remove = remove_initial_slash(File.join(@remote_path, file))
191
+ @log.debug "Removing old archive file #{remote_file_to_remove}"
192
+ @connection.delete_object @bucket, remote_file_to_remove
193
+ end
194
+ end
195
+
196
+ return run_result(true, "Successully pushed #{count} file(s) to Amazon s3 bucket (with version history)")
197
+
198
+ end
199
+
200
+ end
@@ -0,0 +1,26 @@
1
+ require File.join(File.dirname(__FILE__), 'quartz_plugin')
2
+
3
+ class Shell < QuartzPlugin
4
+
5
+ @@version_major = 0
6
+ @@version_minor = 0
7
+ @@version_revision = 1
8
+
9
+ def info
10
+ { :uid => "20e07c656e2f477d969e9561e13229fb", :name => "Shell", :version => get_version }
11
+ end
12
+
13
+ def run(message)
14
+ @log.debug "Running with #{message}"
15
+ payload = payload(message)
16
+ command = payload['command']
17
+ @log.info "Shell command '#{command}'"
18
+
19
+ begin
20
+ result = run_shell("#{command}")
21
+ run_result(result[:ok], result[:message])
22
+ rescue => ex
23
+ run_result(false, "Failed to run shell command due to #{ex}")
24
+ end
25
+ end
26
+ end
@@ -0,0 +1,22 @@
1
+ require File.join(File.dirname(__FILE__), 'quartz_plugin')
2
+
3
+ class Tester < QuartzPlugin
4
+
5
+ @@version_major = 1
6
+ @@version_minor = 0
7
+ @@version_revision = 0
8
+
9
+ def info
10
+ { :uid => "c0bb6ed7950b489f9abba8071ff0e0ab", :name => "Tester", :version => get_version }
11
+ end
12
+
13
+ def run(message)
14
+ @log.info "Running with #{message}"
15
+ i = Random.rand(10)
16
+ @log.info "Waiting for #{i} seconds"
17
+ sleep i
18
+ @log.info "Done"
19
+
20
+ run_result(true, "Super! Done in #{i} seconds")
21
+ end
22
+ end
@@ -0,0 +1,39 @@
1
+ require File.join(File.dirname(__FILE__), 'quartz_plugin')
2
+ require 'httparty'
3
+
4
+ class Webget < QuartzPlugin
5
+
6
+ @@version_major = 0
7
+ @@version_minor = 0
8
+ @@version_revision = 1
9
+
10
+ def info
11
+ { :uid => "6b5f722d214f4d71a5be237d44094721", :name => "WebGet", :version => get_version }
12
+ end
13
+
14
+ def run(message)
15
+ @log.debug "Running with #{message}"
16
+ payload = payload(message)
17
+ url = payload['url']
18
+ local = payload['local file']
19
+ @log.info "Webget from #{url} into #{local}"
20
+
21
+ begin
22
+ response = HTTParty.get(url)
23
+ if response.code == 200
24
+ body = response.body
25
+ file = File.new(local, "w")
26
+ begin
27
+ file.write(body)
28
+ run_result(true, "Saved WebGet to local file")
29
+ ensure
30
+ file.close
31
+ end
32
+ else
33
+ run_result(false, response.message)
34
+ end
35
+ rescue => ex
36
+ run_result(false, "Failed to webget due to #{ex}")
37
+ end
38
+ end
39
+ end
data/lib/version.rb ADDED
@@ -0,0 +1,52 @@
1
+ # encoding: utf-8
2
+
3
+ module Agent
4
+ class Version
5
+
6
+ ##
7
+ # Change the MAJOR, MINOR and PATCH constants below
8
+ # to adjust the version of the Cloud66 Agent gem
9
+ #
10
+ # MAJOR:
11
+ # Defines the major version
12
+ # MINOR:
13
+ # Defines the minor version
14
+ # PATCH:
15
+ # Defines the patch version
16
+ MAJOR, MINOR, PATCH = 0, 0, 26
17
+
18
+ #ie. PRERELEASE_MODIFIER = 'beta1'
19
+ PRERELEASE_MODIFIER = nil
20
+
21
+ ##
22
+ # Returns the major version ( big release based off of multiple minor releases )
23
+ def self.major
24
+ MAJOR
25
+ end
26
+
27
+ ##
28
+ # Returns the minor version ( small release based off of multiple patches )
29
+ def self.minor
30
+ MINOR
31
+ end
32
+
33
+ ##
34
+ # Returns the patch version ( updates, features and (crucial) bug fixes )
35
+ def self.patch
36
+ PATCH
37
+ end
38
+
39
+ ##
40
+ # Returns the prerelease modifier ( not quite ready for public consumption )
41
+ def self.prerelease_modifier
42
+ PRERELEASE_MODIFIER
43
+ end
44
+
45
+ ##
46
+ # Returns the current version of the Backup gem ( qualified for the gemspec )
47
+ def self.current
48
+ prerelease_modifier.nil? ? "#{major}.#{minor}.#{patch}" : "#{major}.#{minor}.#{patch}.#{prerelease_modifier}"
49
+ end
50
+
51
+ end
52
+ end
@@ -0,0 +1,161 @@
1
+ require 'sys/filesystem'
2
+ require 'socket'
3
+
4
+ class VitalSignsUtils
5
+
6
+ def self.get_disk_usage_info
7
+ space_info = {}
8
+ Sys::Filesystem.mounts do |mount|
9
+ stat = Sys::Filesystem.stat(mount.mount_point)
10
+
11
+ #skip if this mount is not active
12
+ #next if stat.blocks_available == 0 && stat.blocks == 0
13
+
14
+ mb_free = Float(stat.block_size) * Float(stat.blocks_available) / 1000 / 1000
15
+ mb_total = Float(stat.block_size) * Float(stat.blocks) / 1000 / 1000
16
+ mb_used = mb_total - mb_free
17
+ percent_used = mb_total > 0.0 ? mb_used / mb_total * 100 : 0.0
18
+
19
+ space_info[mount.mount_point] = { mb_free: mb_free, mb_used: mb_used, mb_total: mb_total, percent_used: percent_used }
20
+ mount.mount_point
21
+ end
22
+ return space_info
23
+ rescue => exc
24
+ @log.error "Failure during disk usage gathering due to #{exc}"
25
+ return { invalid: exc.message }
26
+ end
27
+
28
+ def self.get_cpu_usage_info
29
+
30
+ #NOTE: we can get core-level info with mpstat -P ALL 1 1
31
+ #parse mpstat result
32
+ mpstat_result = `mpstat 1 5`
33
+
34
+ # mpstat_result = <<-SAMPLE
35
+ #Linux 3.2.0-23-generic (precise64) 12/07/2012 _x86_64_ (2 CPU)
36
+ #
37
+ #10:42:50 AM CPU %usr %nice %sys %ddle %irq %soft %steal %guest %idle
38
+ #10:42:51 AM all 0.00 0.00 0.50 5.00 0.00 0.00 0.00 0.00 99.50
39
+ #Average: all 0.00 0.00 0.50 50.00 0.00 0.00 0.00 0.00 99.50
40
+ #SAMPLE
41
+
42
+ #split output into lines
43
+ lines = mpstat_result.split(/\r?\n/)
44
+
45
+ #get rid of time (first 13 chars)
46
+ lines = lines.map { |line| line[13..-1] }
47
+
48
+ #get the header line and split into columns
49
+ header_line = lines.detect { |line| line =~ /%idle/ }
50
+ columns = header_line.split(/\s+/)
51
+
52
+ #detect position of %idle column
53
+ idle_index = columns.index('%idle')
54
+
55
+ #get average line
56
+ average_line = lines[-1]
57
+ columns = average_line.split(/\s+/)
58
+
59
+ #get idle value
60
+ idle_string = columns[idle_index]
61
+ idle_value = idle_string.to_f
62
+
63
+ percent_used = 100.0 - idle_value
64
+
65
+ #get average utilization value
66
+ return { percent_used: percent_used, percent_free: idle_value }
67
+ rescue => exc
68
+ @log.error "Failure during CPU usage gathering due to #{exc}"
69
+ return { invalid: exc.message }
70
+ end
71
+
72
+ def self.get_memory_usage_info
73
+ free_m_result = `free -m`
74
+ # free_m_result = <<-SAMPLE
75
+ # total used free shared buffers cached
76
+ #Mem: 590 480 109 0 37 227
77
+ #-/+ buffers/cache: 216 373
78
+ #Swap: 0 0 0
79
+ #SAMPLE
80
+
81
+ free_m_result.each_line do |line|
82
+ if line =~ /^Mem:/
83
+ parts = line.scan(/.*?(\d+)/)
84
+ parts.flatten!
85
+
86
+ mb_total = parts[0].to_f
87
+ # mb_used is not a true representation due to OS gobbling up mem
88
+ # mb_used = parts[1].to_i
89
+ mb_free = parts[2].to_f
90
+ mb_shared = parts[3].to_f
91
+ mb_buffers = parts[4].to_f
92
+ mb_cached = parts[5].to_f
93
+
94
+ #The total free memory available to proceses is calculated by adding up Mem:cached + Mem:buffers + Mem:free (99 + 63 + 296)
95
+ #This then needs to be divided by Mem:total to get the total available free memory (1692)
96
+ mb_available = mb_cached + mb_buffers + mb_free
97
+ mb_used = mb_total - mb_available
98
+ mb_free = mb_total - mb_used
99
+ percent_used = mb_used / mb_total * 100
100
+ return { mb_free: mb_free, mb_used: mb_used, mb_total: mb_total, percent_used: percent_used }
101
+ end
102
+ end
103
+ rescue => exc
104
+ @log.error "Failure during memory usage gathering due to #{exc}"
105
+ return { invalid: exc.message }
106
+ end
107
+
108
+ def self.get_facter_info
109
+ # facter command
110
+ facter_text = `sudo facter`
111
+ return parse_facter_text(facter_text)
112
+ end
113
+
114
+ def self.get_ip_address_info
115
+ # facter command for ip information (collect up to 5 local ip addresses)
116
+
117
+ facter_text = `facter ipaddress ec2_public_ipv4 ipaddress_eth0 ipaddress6 ipaddress6_eth0`
118
+ ip_hash = parse_facter_text(facter_text)
119
+
120
+ result = {}
121
+
122
+ if ip_hash.has_key?('ec2_public_ipv4')
123
+ # return ec2 info first (most specific)
124
+ result[:ext_ipv4] = ip_hash['ec2_public_ipv4']
125
+ elsif ip_hash.has_key?('ipaddress')
126
+ # return ipaddress next (general)
127
+ result[:ext_ipv4] = ip_hash['ipaddress']
128
+ end
129
+ result[:int_ipv4] = ip_hash['ipaddress_eth0'] if ip_hash.has_key?('ipaddress_eth0')
130
+ result[:ext_ipv6] = ip_hash['ipaddress6'] if ip_hash.has_key?('ipaddress6')
131
+ result[:int_ipv6] = ip_hash['ipaddress6_eth0'] if ip_hash.has_key?('ipaddress6_eth0')
132
+
133
+ # don't have any ip address info
134
+ return {} if result.empty?
135
+ # return ip address info
136
+ return { :ip_addresses => result }
137
+ end
138
+
139
+ # parse the factor text, not using YAML due to YAML parsing issues, and facter JSON output not working
140
+ def self.parse_facter_text(facter_text)
141
+ facter_hash = {}
142
+ facter_text.lines.each do |line|
143
+ split = line.split('=>')
144
+ key = split[0]
145
+ if split.size == 2
146
+ value = split[1]
147
+ if !value.nil?
148
+ value = value.strip
149
+ # exclude empty or long results (like ssh keys)
150
+ if !value.empty? && value.size < 100
151
+ key = key.strip
152
+ facter_hash[key] = value
153
+ end
154
+ end
155
+ end
156
+ end
157
+ return facter_hash
158
+ end
159
+ end
160
+
161
+