cloudblocks 0.0.14 → 0.0.15

Sign up to get free protection for your applications and to get access to all the features.
@@ -50,13 +50,26 @@ def start
50
50
 
51
51
  check_version
52
52
  load_plugins
53
-
54
- @log.info @quartz.status(1, @version, plugin_meta_data)
55
53
  rescue => exc
56
54
  @log.error exc.message
57
55
  exit -1
58
56
  end
59
57
 
58
+ begin
59
+ @log.info @quartz.status(1, @version, plugin_meta_data)
60
+ rescue Exception => e
61
+ message = e.message
62
+ if message =~ /Couldn't find Agent with uid =/
63
+ @log.warn "This agent is no longer registered at the server. The old registration details have been removed from this agent. Please re-run the agent to re-register it."
64
+ puts "This agent is no longer registered at the server. The old registration details have been removed from this agent. Please re-run the agent to re-register it."
65
+ puts @config_full
66
+ File.delete(@config_full)
67
+ else
68
+ @log.error e.message
69
+ end
70
+ exit -1
71
+ end
72
+
60
73
  if @daemon_mode
61
74
  pid = fork {
62
75
  run
@@ -137,10 +150,11 @@ def load_plugins
137
150
 
138
151
  files = Dir.glob("#{@load_path}/*.rb")
139
152
  files.each do |file|
140
- if file != 'quartz_plugin'
153
+ unless file =~ /quartz_plugin/
154
+
141
155
  # is it a valid plugin?
142
156
  require "#{file}"
143
- classname = File.basename(file, '.rb').capitalize
157
+ classname = File.basename(file, '.rb').split('_').collect{ |part| part.capitalize }.join
144
158
  begin
145
159
  clazz = Kernel.const_get(classname)
146
160
  if clazz.ancestors[1].name == 'QuartzPlugin'
@@ -1,7 +1,7 @@
1
1
  require File.join(File.dirname(__FILE__), 'quartz_plugin')
2
2
  require 'fileutils'
3
3
 
4
- class Rotater < QuartzPlugin
4
+ class FileRotate < QuartzPlugin
5
5
 
6
6
  @@version_major = 0
7
7
  @@version_minor = 0
@@ -1,7 +1,7 @@
1
1
  require File.join(File.dirname(__FILE__), 'quartz_plugin')
2
2
  require 'fileutils'
3
3
 
4
- class Logrotate < QuartzPlugin
4
+ class LogRotate < QuartzPlugin
5
5
 
6
6
  @@version_major = 0
7
7
  @@version_minor = 0
@@ -1,7 +1,7 @@
1
1
  require File.join(File.dirname(__FILE__), 'quartz_plugin')
2
2
  require 'fileutils'
3
3
 
4
- class Mysql < QuartzPlugin
4
+ class MysqlBackup < QuartzPlugin
5
5
 
6
6
  @@version_major = 0
7
7
  @@version_minor = 0
@@ -16,7 +16,6 @@ class QuartzPlugin
16
16
  end
17
17
 
18
18
  def payload(message)
19
- @log.debug "Message #{message}"
20
19
  raw_payload = message['payload']
21
20
  @log.debug "Payload #{raw_payload}"
22
21
  parsed_payload = JSON.parse(raw_payload) unless raw_payload.nil?
@@ -0,0 +1,207 @@
1
+ require File.join(File.dirname(__FILE__), 'quartz_plugin')
2
+ require 'fileutils'
3
+ require 'fog'
4
+ require 'set'
5
+
6
+ class RackspaceBackup < QuartzPlugin
7
+
8
+ @@version_major = 0
9
+ @@version_minor = 0
10
+ @@version_revision = 1
11
+
12
+ def info
13
+ { :uid => "86a34908c51311e1a0a923db6188709b", :name => "Rackspace Backup", :version => get_version }
14
+ end
15
+
16
+ def run(message)
17
+
18
+ pl = payload(message)
19
+ @log.debug "Pruned payload #{pl}"
20
+
21
+ @username = pl['username']
22
+ @api_key = pl['api_key']
23
+ @container = pl['container']
24
+ @remote_path = pl['remote_path']
25
+ @region = pl['region']
26
+ @keep = pl['keep'].empty? ? 0 : pl['keep'].to_i
27
+ @local_pattern = pl['local_pattern']
28
+ @testing = pl['testing']
29
+
30
+ return transfer
31
+ end
32
+
33
+ private
34
+
35
+ def get_connection
36
+ #Fog.mock! unless @testing.nil? || @testing == false
37
+ if @region == 'europe'
38
+ connection = Fog::Storage.new(
39
+ :provider => 'Rackspace',
40
+ :rackspace_username => @username,
41
+ :rackspace_api_key => @api_key,
42
+ :rackspace_auth_url => "lon.auth.api.rackspacecloud.com"
43
+ )
44
+ else
45
+ connection = Fog::Storage.new(
46
+ :provider => 'Rackspace',
47
+ :rackspace_username => @username,
48
+ :rackspace_api_key => @api_key,
49
+ )
50
+ end
51
+ connection
52
+
53
+ end
54
+
55
+ def transfer
56
+ begin
57
+ #set up the rackspace connection
58
+ @connection = get_connection
59
+ if @keep <= 0
60
+ sync_files_without_version_history
61
+ else
62
+ sync_files_with_version_history
63
+ end
64
+
65
+ rescue Excon::Errors::SocketError => exc
66
+ @log.error exc.message
67
+ return run_result(false, exc.message)
68
+ rescue Excon::Errors::Error => exc
69
+ @log.error exc.message
70
+ result = exc.response.body
71
+ message = result.match(/\<Message\>(.*)\<\/Message\>/)
72
+ if !message.nil?
73
+ message = message[1]
74
+ return run_result(false, message)
75
+ elsif exc.response.status == 404
76
+ return run_result(false, "Remote rackspace serivce or container not found (404)")
77
+ elsif exc.response.status != 0
78
+ return run_result(false, "Remote rackspace serivce returned error #{exc.response.status} without any more details")
79
+ else
80
+ return run_result(false, exc.message)
81
+ end
82
+ end
83
+ end
84
+
85
+ def remove_initial_slash(filename)
86
+ filename.sub(/^\//, '')
87
+ end
88
+
89
+ def ensure_trailing_slash(filename)
90
+ return "" if filename.empty?
91
+ filename.sub(/\/$/, '') + '/'
92
+ end
93
+
94
+ def sync_files_without_version_history
95
+
96
+ #prepare the remote directory variable
97
+ @remote_path = remove_initial_slash(ensure_trailing_slash(@remote_path))
98
+ count = 0
99
+
100
+ #for each local file match
101
+ Dir.glob(File.expand_path(@local_pattern)).each do |f|
102
+
103
+ #skip to next match if current is a directory
104
+ next if File.directory?(f)
105
+
106
+ #assign the remote filename
107
+ new_remote_filename = remove_initial_slash(File.join(@remote_path, f))
108
+ @log.debug "Copying #{f} to #{new_remote_filename}"
109
+ count += 1
110
+
111
+ #push file to rackspace
112
+ File.open(f, 'r') do |file|
113
+ @connection.put_object(@container, new_remote_filename, file)
114
+ end
115
+ end
116
+
117
+ return run_result(true, "Successully pushed #{count} file(s) to Rackspace Cloud Files container (without version history)")
118
+
119
+ end
120
+
121
+ def sync_files_with_version_history
122
+
123
+ #prepare the remote directory variable
124
+ @remote_path = remove_initial_slash(ensure_trailing_slash(@remote_path))
125
+ count = 0
126
+
127
+ #get remote directory
128
+ directory = @connection.directories.get(@container)
129
+
130
+ #cache the rackspace remote directory identifing all appropriate files
131
+ remote_path_match = Regexp.new("^#{@remote_path}", "i")
132
+ rackspace_directory = directory.files.map {|f| f.key }
133
+
134
+ all_remote_files = rackspace_directory.select {|m| m =~ remote_path_match}.map {|m| remove_initial_slash(m.gsub(remote_path_match, ''))}
135
+ archive_regex = /(?<folder>^Archive_CloudBlocks \(\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}\)\/)/
136
+ non_archive_files = all_remote_files.reject { |m| m =~ archive_regex }
137
+ archived_files = all_remote_files.select { |m| m =~ archive_regex }
138
+
139
+ new_archive_folder = "Archive_CloudBlocks (#{Time.now.strftime("%Y-%m-%d %H:%M:%S")})/"
140
+
141
+ #copy all non-archive files to new backup folder
142
+ non_archive_files.each do |relative_file|
143
+
144
+ puts "name is #{relative_file}"
145
+
146
+ #move file to archive
147
+ existing_remote_filename = remove_initial_slash(File.join(@remote_path, relative_file))
148
+ new_remote_relative_filename = File.join(new_archive_folder, "#{relative_file}")
149
+ new_remote_filename = remove_initial_slash(File.join(@remote_path, new_remote_relative_filename))
150
+
151
+ @log.debug "Copying #{existing_remote_filename} to #{new_remote_filename}"
152
+ @connection.copy_object @container, existing_remote_filename, @container, new_remote_filename
153
+
154
+ @log.debug "Removing #{existing_remote_filename}"
155
+ @connection.delete_object @container, existing_remote_filename
156
+
157
+ #add newly archived file to list of archived files
158
+ archived_files << new_remote_relative_filename
159
+ end
160
+
161
+ #copy up all new files from source
162
+ all_local_files = Dir.glob(File.expand_path(@local_pattern))
163
+ return run_result(true, "No file(s) identified to push to Rackspace Cloud Files container (with version history)") if all_local_files.size == 0
164
+
165
+ #determine a local root to create relative files (TODO?)
166
+ #local_root = ""
167
+ #local_root_regex = Regexp.new local_root
168
+
169
+ #copy all local matches up to rackspace
170
+ all_local_files.each do |f|
171
+
172
+ #skip to next match if current is a directory
173
+ next if File.directory?(f)
174
+
175
+ #assign the remote filename
176
+ new_remote_filename = remove_initial_slash(File.join(@remote_path, f))
177
+ @log.debug "Copying #{f} to #{new_remote_filename}"
178
+ count += 1
179
+
180
+ #push file to rackspace
181
+ File.open(f, 'r') do |file|
182
+ @connection.put_object @container, new_remote_filename, file
183
+ end
184
+
185
+ end
186
+
187
+ #get list of archive folders
188
+ archive_folders = archived_files.map {|m| archive_regex.match(m)['folder']}.uniq.sort.reverse
189
+
190
+ #if we have too many archive folders
191
+ while archive_folders.size > @keep do
192
+ archive_folder = archive_folders.delete_at(archive_folders.size-1)
193
+ archive_regex = Regexp.new "^#{Regexp.escape(archive_folder)}", "i"
194
+
195
+ #remove old archived files
196
+ archived_files.select { |m| m =~ archive_regex }.each do |file|
197
+ remote_file_to_remove = remove_initial_slash(File.join(@remote_path, file))
198
+ @log.debug "Removing old archive file #{remote_file_to_remove}"
199
+ @connection.delete_object @container, remote_file_to_remove
200
+ end
201
+ end
202
+
203
+ return run_result(true, "Successully pushed #{count} file(s) to Rackspace Cloud Files container (with version history)")
204
+
205
+ end
206
+
207
+ end
@@ -1,7 +1,7 @@
1
1
  require File.join(File.dirname(__FILE__), 'quartz_plugin')
2
2
  require 'fileutils'
3
3
 
4
- class Redis < QuartzPlugin
4
+ class RedisBackup < QuartzPlugin
5
5
 
6
6
  @@version_major = 0
7
7
  @@version_minor = 0
@@ -0,0 +1,200 @@
1
+ require File.join(File.dirname(__FILE__), 'quartz_plugin')
2
+ require 'fileutils'
3
+ require 'fog'
4
+
5
+ class S3Backup < QuartzPlugin
6
+
7
+ @@version_major = 0
8
+ @@version_minor = 0
9
+ @@version_revision = 1
10
+
11
+ def info
12
+ { :uid => "d3533989f9d542f393566511e8eb2090", :name => "S3 Backup", :version => get_version }
13
+ end
14
+
15
+ def run(message)
16
+ pl = payload(message)
17
+
18
+ @log.debug "Pruned payload #{pl}"
19
+
20
+ @access_key = pl['access_key']
21
+ @secret_key = pl['secret_key']
22
+ @bucket = pl['bucket']
23
+ @region = pl['region']
24
+ @remote_path = pl['remote_path']
25
+ @local_pattern = pl['local_pattern']
26
+ @keep = pl['keep'].empty? ? 0 : pl['keep'].to_i
27
+
28
+ @testing = pl['testing']
29
+
30
+ return transfer
31
+ end
32
+
33
+ private
34
+
35
+ def get_connection
36
+ #Fog.mock! unless @testing.nil? || @testing == false
37
+ connection = Fog::Storage.new(
38
+ :provider => 'AWS',
39
+ :aws_access_key_id => @access_key,
40
+ :aws_secret_access_key => @secret_key,
41
+ :region => @region
42
+ )
43
+ connection
44
+ end
45
+
46
+ def transfer
47
+ begin
48
+ #set up the s3 connection
49
+ @connection = get_connection
50
+ #synchronie the file times
51
+ @connection.sync_clock
52
+ if @keep <= 0
53
+ sync_files_without_version_history
54
+ else
55
+ sync_files_with_version_history
56
+ end
57
+
58
+ rescue Excon::Errors::SocketError => exc
59
+ @log.error exc.message
60
+ return run_result(false, exc.message)
61
+ rescue Excon::Errors::Error => exc
62
+ @log.error exc.message
63
+ result = exc.response.body
64
+ message = result.match(/\<Message\>(.*)\<\/Message\>/)
65
+ if !message.nil?
66
+ message = message[1]
67
+ return run_result(false, message)
68
+ elsif exc.response.status == 404
69
+ return run_result(false, "Remote s3 service or bucket not found (404)")
70
+ elsif exc.response.status != 0
71
+ return run_result(false, "Remote s3 service returned error #{exc.response.status} without any more details")
72
+ else
73
+ return run_result(false, exc.message)
74
+ end
75
+ end
76
+ end
77
+
78
+ def remove_initial_slash(filename)
79
+ filename.sub(/^\//, '')
80
+ end
81
+
82
+ def ensure_trailing_slash(filename)
83
+ return "" if filename.empty?
84
+ filename.sub(/\/$/, '') + '/'
85
+ end
86
+
87
+ def sync_files_without_version_history
88
+
89
+ #prepare the remote directory variable
90
+ @remote_path = remove_initial_slash(ensure_trailing_slash(@remote_path))
91
+ count = 0
92
+
93
+ #for each local file match
94
+ Dir.glob(File.expand_path(@local_pattern)).each do |f|
95
+
96
+ #skip to next match if current is a directory
97
+ next if File.directory?(f)
98
+
99
+ #assign the remote filename
100
+ new_remote_filename = remove_initial_slash(File.join(@remote_path, f))
101
+ @log.debug "Copying #{f} to #{new_remote_filename}"
102
+ count += 1
103
+
104
+ #push file to s3
105
+ File.open(f, 'r') do |file|
106
+ @connection.put_object(@bucket, new_remote_filename, file)
107
+ end
108
+ end
109
+
110
+ return run_result(true, "Successully pushed #{count} file(s) to Amazon s3 bucket (without version history)")
111
+
112
+ end
113
+
114
+ def sync_files_with_version_history
115
+
116
+ #prepare the remote directory variable
117
+ @remote_path = remove_initial_slash(ensure_trailing_slash(@remote_path))
118
+ count = 0
119
+
120
+ #get remote directory
121
+ directory = @connection.directories.get(@bucket)
122
+
123
+ #cache the s3 remote directory identifing all appropriate files
124
+ remote_path_match = Regexp.new("^#{@remote_path}", "i")
125
+ s3_directory = directory.files.map {|f| f.key }
126
+
127
+ all_remote_files = s3_directory.select {|m| m =~ remote_path_match}.map {|m| remove_initial_slash(m.gsub(remote_path_match, ''))}
128
+ archive_regex = /(?<folder>^Archive_CloudBlocks \(\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}\)\/)/
129
+ non_archive_files = all_remote_files.reject { |m| m =~ archive_regex }
130
+ archived_files = all_remote_files.select { |m| m =~ archive_regex }
131
+
132
+ new_archive_folder = "Archive_CloudBlocks (#{Time.now.strftime("%Y-%m-%d %H:%M:%S")})/"
133
+
134
+ #copy all non-archive files to new backup folder
135
+ non_archive_files.each do |relative_file|
136
+
137
+ puts "name is #{relative_file}"
138
+
139
+ #move file to archive
140
+ existing_remote_filename = remove_initial_slash(File.join(@remote_path, relative_file))
141
+ new_remote_relative_filename = File.join(new_archive_folder, "#{relative_file}")
142
+ new_remote_filename = remove_initial_slash(File.join(@remote_path, new_remote_relative_filename))
143
+
144
+ @log.debug "Copying #{existing_remote_filename} to #{new_remote_filename}"
145
+ @connection.copy_object @bucket, existing_remote_filename, @bucket, new_remote_filename
146
+
147
+ @log.debug "Removing #{existing_remote_filename}"
148
+ @connection.delete_object @bucket, existing_remote_filename
149
+
150
+ #add newly archived file to list of archived files
151
+ archived_files << new_remote_relative_filename
152
+ end
153
+
154
+ #copy up all new files from source
155
+ all_local_files = Dir.glob(File.expand_path(@local_pattern))
156
+ return run_result(true, "No file(s) identified to push to Amazon s3 bucket (with version history)") if all_local_files.size == 0
157
+
158
+ #determine a local root to create relative files (TODO?)
159
+ #local_root = ""
160
+ #local_root_regex = Regexp.new local_root
161
+
162
+ #copy all local matches up to s3
163
+ all_local_files.each do |f|
164
+
165
+ #skip to next match if current is a directory
166
+ next if File.directory?(f)
167
+
168
+ #assign the remote filename
169
+ new_remote_filename = remove_initial_slash(File.join(@remote_path, f))
170
+ @log.debug "Copying #{f} to #{new_remote_filename}"
171
+ count += 1
172
+
173
+ #push file to s3
174
+ File.open(f, 'r') do |file|
175
+ @connection.put_object @bucket, new_remote_filename, file
176
+ end
177
+
178
+ end
179
+
180
+ #get list of archive folders
181
+ archive_folders = archived_files.map {|m| archive_regex.match(m)['folder']}.uniq.sort.reverse
182
+
183
+ #if we have too many archive folders
184
+ while archive_folders.size > @keep do
185
+ archive_folder = archive_folders.delete_at(archive_folders.size-1)
186
+ archive_regex = Regexp.new "^#{Regexp.escape(archive_folder)}", "i"
187
+
188
+ #remove old archived files
189
+ archived_files.select { |m| m =~ archive_regex }.each do |file|
190
+ remote_file_to_remove = remove_initial_slash(File.join(@remote_path, file))
191
+ @log.debug "Removing old archive file #{remote_file_to_remove}"
192
+ @connection.delete_object @bucket, remote_file_to_remove
193
+ end
194
+ end
195
+
196
+ return run_result(true, "Successully pushed #{count} file(s) to Amazon s3 bucket (with version history)")
197
+
198
+ end
199
+
200
+ end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: cloudblocks
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.0.14
4
+ version: 0.0.15
5
5
  prerelease:
6
6
  platform: ruby
7
7
  authors:
@@ -9,7 +9,7 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2012-06-29 00:00:00.000000000 Z
12
+ date: 2012-07-10 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: httparty
@@ -149,13 +149,14 @@ files:
149
149
  - lib/config-chief.rb
150
150
  - lib/cloud-quartz.rb
151
151
  - lib/plugins/broken.rb
152
- - lib/plugins/logrotate.rb
153
- - lib/plugins/mysql.rb
152
+ - lib/plugins/file_rotate.rb
153
+ - lib/plugins/log_rotate.rb
154
+ - lib/plugins/mysql_backup.rb
154
155
  - lib/plugins/quartz_plugin.rb
156
+ - lib/plugins/rackspace_backup.rb
155
157
  - lib/plugins/rake.rb
156
- - lib/plugins/redis.rb
157
- - lib/plugins/rotater.rb
158
- - lib/plugins/s3backup.rb
158
+ - lib/plugins/redis_backup.rb
159
+ - lib/plugins/s3_backup.rb
159
160
  - lib/plugins/shell.rb
160
161
  - lib/plugins/tester.rb
161
162
  - lib/plugins/webget.rb
@@ -1,110 +0,0 @@
1
- require File.join(File.dirname(__FILE__), 'quartz_plugin')
2
- require 'fileutils'
3
- require 'fog'
4
-
5
- class S3backup < QuartzPlugin
6
-
7
- @@version_major = 0
8
- @@version_minor = 0
9
- @@version_revision = 1
10
-
11
- def info
12
- { :uid => "d3533989f9d542f393566511e8eb2090", :name => "S3 Backup", :version => get_version }
13
- end
14
-
15
- def run(message)
16
- pl = payload(message)
17
-
18
- @log.debug "Pruned payload #{pl}"
19
-
20
- @access_key_id = pl['access_key']
21
- @secret_access_key = pl['secret_key']
22
- @bucket = pl['bucket']
23
- @remote_path = pl['remote_path']
24
- @region = pl['region']
25
- @local_pattern = pl['local_pattern']
26
- @keep = pl['keep'].empty? ? 0 : pl['keep'].to_i
27
-
28
- @testing = pl['testing']
29
-
30
- return transfer
31
- end
32
-
33
- private
34
-
35
- def connection
36
- Fog.mock! unless @testing.nil? || @testing == false
37
- @connection ||= Fog::Storage.new(
38
- :provider => 'AWS',
39
- :aws_access_key_id => @access_key_id,
40
- :aws_secret_access_key => @secret_access_key,
41
- :region => @region
42
- )
43
- end
44
-
45
- def remote_path_for(filename)
46
- filename.sub(/^\//, '')
47
- end
48
-
49
- def transfer
50
- begin
51
- remote_path = remote_path_for(@remote_path)
52
- @log.debug "Remote path is #{remote_path}"
53
-
54
- @log.debug "Syncronizing local and remote clocks"
55
- connection.sync_clock
56
-
57
- count = 0
58
- # get local files
59
- directory = connection.directories.get(@bucket)
60
- all_rotated = directory.files.reject { |m| File.dirname(m.key) != @remote_path }
61
-
62
- Dir.glob(@local_pattern).each do |f|
63
- base_file = File.basename(f)
64
- remote_files = all_rotated.map {|m| File.basename(m.key)}
65
- unless remote_files.include? base_file
66
- remote_file = File.join(@remote_path, base_file)
67
- next if File.directory?(f)
68
- @log.debug "Copying #{f} to #{remote_file}"
69
- count += 1
70
- File.open(f, 'r') do |file|
71
- connection.put_object(@bucket, File.join(remote_path, base_file), file)
72
- end
73
- end
74
- end
75
-
76
- return run_result(true, "Files copied to S3 bucket successfully with no rotation") if @keep == 0
77
-
78
- @log.debug "Found #{all_rotated.count} in the remote bucket"
79
- if all_rotated.count > @keep
80
- remove_count = all_rotated.count - @keep
81
- @log.debug "Removing #{remove_count} and keeping the most recent #{@keep}"
82
- to_remove = all_rotated.sort { |a,b| a.last_modified <=> b.last_modified }.map{|m| m.key }[0...remove_count]
83
- @log.debug "Removing extra files"
84
- to_remove.each do |tr|
85
- @log.debug "Removing #{tr}"
86
- connection.delete_object(@bucket, tr)
87
- end
88
- end
89
- rescue Excon::Errors::SocketError => exc
90
- @log.error exc.message
91
- return run_result(false, exc.message)
92
- rescue Excon::Errors::Error => exc
93
- @log.error exc.message
94
- result = exc.response.body
95
- message = result.match(/\<Message\>(.*)\<\/Message\>/)
96
- if !message.nil?
97
- message = message[1]
98
- return run_result(false, message)
99
- elsif exc.response.status == 404
100
- return run_result(false, "Remote S3 serivce or bucket not found (404)")
101
- elsif exc.response.status != 0
102
- return run_result(false, "Remote S3 serivce returned error #{exc.response.status} without any more details")
103
- else
104
- return run_result(false, exc.message)
105
- end
106
- end
107
-
108
- run_result(true, "Successfully copied #{count} files to S3")
109
- end
110
- end