capistrano-s3_archive 0.9.9 → 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,76 @@
1
+ require "aws-sdk-s3"
2
+ require "uri"
3
+
4
+ module Capistrano
5
+ class SCM
6
+ class S3Archive
7
+ class ArchiveObject
8
+ attr_reader :bucket, :prefix, :version_id, :sort_proc, :branch, :client
9
+
10
+ def initialize(repo_url: nil, version_id: nil, sort_proc: nil, branch: :latest, client_options: {})
11
+ uri = URI.parse(repo_url)
12
+ @bucket = uri.host
13
+ @prefix = uri.path.sub(%r{/?\Z}, '/').slice(1..-1) # normalize path
14
+ @version_id = version_id
15
+ @sort_proc = sort_proc
16
+ @branch = branch
17
+ @client = Aws::S3::Client.new(client_options)
18
+ end
19
+
20
+ def check_access!
21
+ client.list_objects(bucket: bucket, prefix: prefix)
22
+ end
23
+
24
+ def key
25
+ @key ||= case branch.to_sym
26
+ when :master, :latest
27
+ latest_key
28
+ else
29
+ prefix + branch.to_s
30
+ end
31
+ end
32
+
33
+ def key_basename
34
+ File.basename(key)
35
+ end
36
+
37
+ def latest_key
38
+ list_all_objects.min(&sort_proc).key
39
+ end
40
+
41
+ def list_all_objects
42
+ response = client.list_objects(bucket: bucket, prefix: prefix)
43
+ response.inject([]) do |objects, page|
44
+ objects + page.contents
45
+ end
46
+ end
47
+
48
+ def etag
49
+ metadata.tap { |it| raise "No such object: #{current_revision}" if it.nil? }.etag
50
+ end
51
+
52
+ def current_revision
53
+ if version_id
54
+ "#{key}?versionid=#{version_id}"
55
+ else
56
+ key
57
+ end
58
+ end
59
+
60
+ def metadata
61
+ client.list_object_versions(bucket: bucket, prefix: key).versions.find do |v|
62
+ if version_id then v.version_id == version_id
63
+ else v.is_latest
64
+ end
65
+ end
66
+ end
67
+
68
+ def get_object(io)
69
+ options = { bucket: bucket, key: key }
70
+ options[:version_id] = version_id if version_id
71
+ client.get_object(options, target: io)
72
+ end
73
+ end
74
+ end
75
+ end
76
+ end
@@ -0,0 +1,82 @@
1
+ module Capistrano
2
+ class SCM
3
+ class S3Archive
4
+ class LocalCache
5
+ attr_reader :backend, :download_dir, :cache_dir, :archive_object
6
+ include FileUtils
7
+ class ResourceBusyError < StandardError; end
8
+
9
+ def initialize(backend, download_dir, cache_dir, archive_object)
10
+ @backend = backend
11
+ @download_dir = download_dir
12
+ @cache_dir = cache_dir
13
+ @archive_object = archive_object
14
+ end
15
+
16
+ def download
17
+ download_lock do
18
+ tmp_file = "#{target_file}.part"
19
+ etag_file = File.join(download_dir, ".#{archive_object.key_basename}.etag")
20
+ raise "#{tmp_file} is found. Another process is running?" if File.exist?(tmp_file)
21
+
22
+ if all_file_exist?([target_file, etag_file]) && File.read(etag_file) == archive_object.etag
23
+ backend.info "#{target_file} (etag:#{archive_object.etag}) is found. download skipped."
24
+ else
25
+ backend.info "Download s3://#{archive_object.bucket}/#{archive_object.key} to #{target_file}"
26
+ mkdir_p(File.dirname(target_file))
27
+ File.open(tmp_file, 'w') do |file|
28
+ archive_object.get_object(file)
29
+ end
30
+ move(tmp_file, target_file)
31
+ File.write(etag_file, archive_object.etag)
32
+ end
33
+ end
34
+ end
35
+
36
+ def extract
37
+ remove_entry_secure(cache_dir) if File.exist?(cache_dir)
38
+ mkdir_p(cache_dir)
39
+ case target_file
40
+ when /\.zip\?.*\Z/
41
+ cmd = "unzip -q -d #{cache_dir} #{target_file}"
42
+ when /\.tar\.gz\?.*\Z|\.tar\.bz2\?.*\Z|\.tgz\?.*\Z/
43
+ cmd = "tar xf #{target_file} -C #{cache_dir}"
44
+ end
45
+
46
+ backend.execute cmd # should I use `execute`?
47
+ end
48
+
49
+ def cleanup(keep: 0)
50
+ downloaded_files = Dir.glob(download_dir).sort_by(&File.method(:mtime))
51
+ return if downloaded_files.count <= keep
52
+
53
+ to_be_removes = (downloaded_files - downloaded_files.last(keep)).flat_map { |f| [f, ".#{f}.etag"] }
54
+ remove(to_be_removes, force: true)
55
+ end
56
+
57
+ def target_file
58
+ basename = [archive_object.key_basename, archive_object.version_id].join('?')
59
+ File.join(download_dir, basename)
60
+ end
61
+
62
+ def all_file_exist?(arr)
63
+ arr.all?(&File.method(:exist?))
64
+ end
65
+
66
+ def download_lock(&block)
67
+ mkdir_p(File.dirname(download_dir))
68
+ lockfile = "#{download_dir}.lock"
69
+ begin
70
+ File.open(lockfile, "w") do |file|
71
+ raise ResourceBusyError, "Could not get #{lockfile}" unless file.flock(File::LOCK_EX | File::LOCK_NB)
72
+
73
+ block.call
74
+ end
75
+ ensure
76
+ rm lockfile if File.exist? lockfile
77
+ end
78
+ end
79
+ end
80
+ end
81
+ end
82
+ end
@@ -0,0 +1,45 @@
1
+ module Capistrano
2
+ class SCM
3
+ class S3Archive
4
+ class RemoteCache
5
+ attr_reader :backend, :download_dir, :archive_object
6
+
7
+ def initialize(backend, download_dir, archive_object)
8
+ @backend = backend
9
+ @download_dir = download_dir
10
+ @archive_object = archive_object
11
+ end
12
+
13
+ def download
14
+ tmp_file = "#{target_file}.part"
15
+ etag_file = File.join(download_dir, ".#{archive_object.key_basename}.etag")
16
+ if backend.test("[ -f #{target_file} -a -f #{etag_file} ]") &&
17
+ backend.capture(:cat, etag_file) == archive_object.etag
18
+ backend.info "#{target_file} (etag:#{archive_object.etag}) is found. download skipped."
19
+ else
20
+ backend.info "Download s3://#{archive_object.bucket}/#{archive_object.key} to #{target_file}"
21
+ backend.execute(:mkdir, "-p", download_dir)
22
+ backend.execute(:aws, *['s3api', 'get-object', "--bucket #{archive_object.bucket}", "--key #{archive_object.key}", archive_object.version_id ? "--version-id #{archive_object.version_id}" : nil, tmp_file].compact)
23
+ backend.execute(:mv, tmp_file, target_file)
24
+ backend.execute(:echo, "-n", "'#{archive_object.etag}'", "|tee", etag_file)
25
+ end
26
+ end
27
+
28
+ def cleanup(keep: 0)
29
+ downloaded_files = backend.capture(:ls, "-xtr", download_dir).split
30
+ return if downloaded_files.count <= keep
31
+
32
+ to_be_removes = (downloaded_files - downloaded_files.last(keep)).flat_map do |file|
33
+ [File.join(download_dir, file), File.join(download_dir, ".#{f}.etag")]
34
+ end
35
+ backend.execute(:rm, '-f', *to_be_removes)
36
+ end
37
+
38
+ def target_file
39
+ basename = [archive_object.key_basename, archive_object.version_id].join('?')
40
+ File.join(download_dir, basename)
41
+ end
42
+ end
43
+ end
44
+ end
45
+ end
@@ -1,20 +1,40 @@
1
1
  require "capistrano/scm/plugin"
2
- require "aws-sdk"
3
- require "uri"
4
2
 
5
3
  module Capistrano
6
4
  class SCM
7
5
  class S3Archive < Capistrano::SCM::Plugin
8
- attr_reader :extractor
9
- include FileUtils
6
+ end
7
+ end
8
+ end
10
9
 
11
- class ResourceBusyError < StandardError; end
10
+ require "capistrano/scm/s3_archive/archive_object"
11
+ require "capistrano/scm/s3_archive/local_cache"
12
+ require "capistrano/scm/s3_archive/remote_cache"
13
+
14
+ module Capistrano
15
+ class SCM
16
+ class S3Archive
17
+ def define_tasks
18
+ eval_rakefile File.expand_path("tasks/s3_archive.rake", __dir__)
19
+ end
20
+
21
+ def register_hooks
22
+ after "deploy:new_release_path", "s3_archive:create_release"
23
+ before "deploy:check", "s3_archive:check"
24
+ before "deploy:set_current_revision", "s3_archive:set_current_revision"
25
+ end
12
26
 
13
27
  def set_defaults
14
28
  set_if_empty :s3_archive_client_options, {}
15
- set_if_empty :s3_archive_extract_to, :local # :local or :remote
16
29
  set_if_empty(:s3_archive_sort_proc, ->(new, old) { old.key <=> new.key })
30
+ set_if_empty :s3_archive_strategy, :rsync
17
31
  set_if_empty :s3_archive_object_version_id, nil
32
+
33
+ # strategy direct (alpha)
34
+ set_if_empty :s3_archive_remote_cache_dir, -> { File.join(shared_path, "archives") }
35
+
36
+ # strategy rsync
37
+ set_if_empty :s3_archive_skip_download, nil
18
38
  set_if_empty :s3_archive_local_download_dir, "tmp/archives"
19
39
  set_if_empty :s3_archive_local_cache_dir, "tmp/deploy"
20
40
  set_if_empty :s3_archive_remote_rsync_options, ['-az', '--delete']
@@ -22,268 +42,115 @@ module Capistrano
22
42
  set_if_empty :s3_archive_remote_rsync_runner_options, {}
23
43
  set_if_empty :s3_archive_rsync_cache_dir, "shared/deploy"
24
44
  set_if_empty :s3_archive_hardlink_release, false
25
- # internal use
26
- set_if_empty :s3_archive_rsync_copy, "rsync --archive --acls --xattrs"
27
- end
28
-
29
- def define_tasks
30
- eval_rakefile File.expand_path("../tasks/s3_archive.rake", __FILE__)
31
- end
32
-
33
- def register_hooks
34
- after "deploy:new_release_path", "s3_archive:create_release"
35
- before "deploy:check", "s3_archive:check"
36
- before "deploy:set_current_revision", "s3_archive:set_current_revision"
45
+ set_if_empty :s3_archive_remote_rsync_copy_option, "--archive --acls --xattrs"
37
46
  end
38
47
 
48
+ ######
39
49
  def local_check
40
- s3_client.list_objects(bucket: s3params.bucket, prefix: s3params.object_prefix)
41
- end
42
-
43
- def get_object(target)
44
- opts = { bucket: s3params.bucket, key: archive_object_key }
45
- opts[:version_id] = fetch(:s3_archive_object_version_id) if fetch(:s3_archive_object_version_id)
46
- s3_client.get_object(opts, target: target)
50
+ archive_object.check_access!
47
51
  end
48
52
 
49
53
  def remote_check
50
- backend.execute :echo, 'check ssh'
54
+ case strategy
55
+ when :direct
56
+ backend.execute :aws, "s3", "ls", ["s3:/", archive_object.bucket, archive_object.key].join("/")
57
+ when :rsync
58
+ backend.execute :echo, "ssh connected"
59
+ end
51
60
  end
52
61
 
53
- def stage
54
- stage_lock do
55
- archive_dir = File.join(fetch(:s3_archive_local_download_dir), fetch(:stage).to_s)
56
- archive_file = File.join(archive_dir, File.basename(archive_object_key))
57
- tmp_file = "#{archive_file}.part"
58
- etag_file = File.join(archive_dir, ".#{File.basename(archive_object_key)}.etag")
59
- fail "#{tmp_file} is found. Another process is running?" if File.exist?(tmp_file)
60
- etag = get_object_metadata.tap { |it| fail "No such object: #{current_revision}" if it.nil? }.etag
61
-
62
+ def strategy
63
+ @strategy ||= fetch(:s3_archive_strategy)
64
+ end
62
65
 
63
- if [archive_file, etag_file].all? { |f| File.exist?(f) } && File.read(etag_file) == etag
64
- backend.info "#{archive_file} (etag:#{etag}) is found. download skipped."
65
- else
66
- backend.info "Download #{current_revision} to #{archive_file}"
67
- mkdir_p(File.dirname(archive_file))
68
- File.open(tmp_file, 'w') do |f|
69
- get_object(f)
70
- end
71
- move(tmp_file, archive_file)
72
- File.write(etag_file, etag)
73
- end
66
+ def current_revision
67
+ archive_object.current_revision
68
+ end
74
69
 
75
- remove_entry_secure(fetch(:s3_archive_local_cache_dir)) if File.exist? fetch(:s3_archive_local_cache_dir)
76
- mkdir_p(fetch(:s3_archive_local_cache_dir))
70
+ def deploy_to_release_path
71
+ case strategy
72
+ when :direct
73
+ archive_file = remote_cache.target_file
77
74
  case archive_file
78
- when /\.zip\Z/
79
- cmd = "unzip -q -d #{fetch(:s3_archive_local_cache_dir)} #{archive_file}"
80
- when /\.tar\.gz\Z|\.tar\.bz2\Z|\.tgz\Z/
81
- cmd = "tar xf #{archive_file} -C #{fetch(:s3_archive_local_cache_dir)}"
82
- end
83
-
84
- release_lock_on_stage do
85
- run_locally do
86
- execute cmd
87
- end
75
+ when /\.zip\?.*\Z/
76
+ backend.execute :unzip, "-q -d", release_path, archive_file
77
+ when /\.tar\.gz\?.*\Z|\.tar\.bz2\?.*\Z|\.tgz\?.*\Z/
78
+ backend.execute :tar, "xf", archive_file, "-C", release_path
88
79
  end
80
+ when :rsync
81
+ link_option = if fetch(:s3_archive_hardlink_release) && backend.test("[ `readlink #{current_path}` != #{release_path} ]")
82
+ "--link-dest `readlink #{current_path}`"
83
+ end
84
+ create_release = %[rsync #{fetch(:s3_archive_remote_rsync_copy_option)} #{link_option} "#{rsync_cache_dir}/" "#{release_path}/"]
85
+ backend.execute create_release
89
86
  end
90
87
  end
91
88
 
92
- def cleanup_stage_dir
93
- run_locally do
94
- archives_dir = File.join(fetch(:s3_archive_local_download_dir), fetch(:stage).to_s)
95
- archives = capture(:ls, '-xtr', archives_dir).split
96
- if archives.count >= fetch(:keep_releases)
97
- to_be_removes = (archives - archives.last(fetch(:keep_releases)))
98
- if to_be_removes.any?
99
- to_be_removes_str = to_be_removes.map do |file|
100
- File.join(archives_dir, file)
101
- end.join(' ')
102
- execute :rm, to_be_removes_str
103
- end
104
- end
105
- end
89
+ # for rsync
90
+ def download_to_local_cache
91
+ local_cache.download
92
+ local_cache.extract
106
93
  end
107
94
 
108
- def transfer_sources(dest)
109
- fail "#{__method__} must be called in run_locally" unless backend.is_a?(SSHKit::Backend::Local)
110
-
111
- rsync = ['rsync']
112
- rsync.concat fetch(:s3_archive_remote_rsync_options, [])
113
- rsync << (fetch(:s3_archive_local_cache_dir) + '/')
114
-
115
- if dest.local?
116
- rsync << ('--no-compress')
117
- rsync << rsync_cache_dir
118
- else
119
- rsync << "-e 'ssh #{dest.ssh_key_option} #{fetch(:s3_archive_remote_rsync_ssh_options).join(' ')}'"
120
- rsync << "#{dest.login_user_at}#{dest.hostname}:#{rsync_cache_dir}"
121
- end
122
-
123
- release_lock_on_create do
124
- backend.execute(*rsync)
125
- end
95
+ def cleanup_local_cache
96
+ local_cache.cleanup(keep: fetch(:keep_releases))
126
97
  end
127
98
 
128
- def release
129
- link_option = if fetch(:s3_archive_hardlink_release) && backend.test("[ `readlink #{current_path}` != #{release_path} ]")
130
- "--link-dest `readlink #{current_path}`"
131
- end
132
- create_release = %[#{fetch(:s3_archive_rsync_copy)} #{link_option} "#{rsync_cache_dir}/" "#{release_path}/"]
133
- backend.execute create_release
134
- end
99
+ def transfer_sources(dest)
100
+ rsync_options = []
101
+ rsync_options.concat fetch(:s3_archive_remote_rsync_options, [])
102
+ rsync_options << local_cache.cache_dir + "/"
135
103
 
136
- def current_revision
137
- if fetch(:s3_archive_object_version_id)
138
- "#{archive_object_key}?versionid=#{fetch(:s3_archive_object_version_id)}"
104
+ if dest.local?
105
+ rsync_options << '--no-compress'
106
+ rsync_options << rsync_cache_dir
139
107
  else
140
- archive_object_key
108
+ rsync_ssh_options = []
109
+ rsync_ssh_options << dest.ssh_key_option unless dest.ssh_key_option.empty?
110
+ rsync_ssh_options.concat fetch(:s3_archive_remote_rsync_ssh_options)
111
+ rsync_options << "-e 'ssh #{rsync_ssh_options.join(' ')}'" unless rsync_ssh_options.empty?
112
+ rsync_options << "#{dest.login_user_at}#{dest.hostname}:#{rsync_cache_dir}"
141
113
  end
142
- end
143
114
 
144
- def archive_object_key
145
- @archive_object_key ||=
146
- case fetch(:branch, :latest).to_sym
147
- when :master, :latest
148
- latest_object_key
149
- else
150
- s3params.object_prefix + fetch(:branch).to_s
151
- end
115
+ backend.execute :rsync, *rsync_options
152
116
  end
153
117
 
154
118
  def rsync_cache_dir
155
119
  File.join(deploy_to, fetch(:s3_archive_rsync_cache_dir))
156
120
  end
157
121
 
158
- def s3params
159
- @s3params ||= S3Params.new(fetch(:repo_url))
160
- end
161
-
162
- def get_object_metadata
163
- s3_client.list_object_versions(bucket: s3params.bucket, prefix: archive_object_key).versions.find do |v|
164
- if fetch(:s3_archive_object_version_id) then v.version_id == fetch(:s3_archive_object_version_id)
165
- else v.is_latest
166
- end
167
- end
168
- end
169
-
170
- def list_all_objects
171
- response = s3_client.list_objects(bucket: s3params.bucket, prefix: s3params.object_prefix)
172
- response.inject([]) do |objects, page|
173
- objects + page.contents
174
- end
175
- end
176
-
177
- def latest_object_key
178
- list_all_objects.sort(&fetch(:s3_archive_sort_proc)).first.key
179
- end
180
-
181
- private
182
-
183
- def release_lock_on_stage(&block)
184
- release_lock((File::LOCK_EX | File::LOCK_NB), &block) # exclusive lock
185
- end
186
-
187
- def release_lock_on_create(&block)
188
- release_lock(File::LOCK_SH, &block)
122
+ # for direct
123
+ def download_to_shared_path
124
+ remote_cache.download
189
125
  end
190
126
 
191
- def release_lock(lock_mode, &block)
192
- mkdir_p(File.dirname(fetch(:s3_archive_local_cache_dir)))
193
- lockfile = "#{fetch(:s3_archive_local_cache_dir)}.#{fetch(:stage)}.release.lock"
194
- File.open(lockfile, File::RDONLY | File::CREAT) do |file|
195
- if file.flock(lock_mode)
196
- block.call
197
- else
198
- fail ResourceBusyError, "Could not get #{lockfile}"
199
- end
200
- end
127
+ def cleanup_shared_path
128
+ remote_cache.cleanup(keep: fetch(:keep_releases))
201
129
  end
202
130
 
203
- def stage_lock(&block)
204
- mkdir_p(File.dirname(fetch(:s3_archive_local_cache_dir)))
205
- lockfile = "#{fetch(:s3_archive_local_cache_dir)}.#{fetch(:stage)}.lock"
206
- File.open(lockfile, "w") do |file|
207
- fail ResourceBusyError, "Could not get #{lockfile}" unless file.flock(File::LOCK_EX | File::LOCK_NB)
208
- block.call
209
- end
210
- ensure
211
- rm lockfile if File.exist? lockfile
131
+ def archive_object
132
+ @archive_object ||= ArchiveObject.new(repo_url: fetch(:repo_url),
133
+ version_id: fetch(:s3_archive_object_version_id),
134
+ sort_proc: fetch(:s3_archive_sort_proc),
135
+ branch: fetch(:branch),
136
+ client_options: fetch(:s3_archive_client_options))
212
137
  end
213
138
 
214
- def s3_client
215
- @s3_client ||= Aws::S3::Client.new(fetch(:s3_archive_client_options))
139
+ def remote_cache
140
+ @remote_cache ||= RemoteCache.new(
141
+ backend,
142
+ File.join(fetch(:s3_archive_remote_cache_dir), fetch(:stage).to_s),
143
+ archive_object
144
+ )
216
145
  end
217
146
 
218
- class LocalExtractor
219
- # class ResourceBusyError < StandardError; end
220
-
221
- # include FileUtils
222
-
223
- def stage
224
- stage_lock do
225
- archive_dir = File.join(fetch(:s3_archive_local_download_dir), fetch(:stage).to_s)
226
- archive_file = File.join(archive_dir, File.basename(archive_object_key))
227
- tmp_file = "#{archive_file}.part"
228
- etag_file = File.join(archive_dir, ".#{File.basename(archive_object_key)}.etag")
229
- fail "#{tmp_file} is found. Another process is running?" if File.exist?(tmp_file)
230
- etag = get_object_metadata.tap { |it| fail "No such object: #{current_revision}" if it.nil? }.etag
231
-
232
-
233
- if [archive_file, etag_file].all? { |f| File.exist?(f) } && File.read(etag_file) == etag
234
- context.info "#{archive_file} (etag:#{etag}) is found. download skipped."
235
- else
236
- context.info "Download #{current_revision} to #{archive_file}"
237
- mkdir_p(File.dirname(archive_file))
238
- File.open(tmp_file, 'w') do |f|
239
- get_object(f)
240
- end
241
- move(tmp_file, archive_file)
242
- File.write(etag_file, etag)
243
- end
244
-
245
- remove_entry_secure(fetch(:s3_archive_local_cache_dir)) if File.exist? fetch(:s3_archive_local_cache_dir)
246
- mkdir_p(fetch(:s3_archive_local_cache_dir))
247
- case archive_file
248
- when /\.zip\Z/
249
- cmd = "unzip -q -d #{fetch(:s3_archive_local_cache_dir)} #{archive_file}"
250
- when /\.tar\.gz\Z|\.tar\.bz2\Z|\.tgz\Z/
251
- cmd = "tar xf #{archive_file} -C #{fetch(:s3_archive_local_cache_dir)}"
252
- end
253
-
254
- release_lock_on_stage do
255
- run_locally do
256
- execute cmd
257
- end
258
- end
259
- end
260
- end
261
-
262
- def stage_lock(&block)
263
- mkdir_p(File.dirname(fetch(:s3_archive_local_cache_dir)))
264
- lockfile = "#{fetch(:s3_archive_local_cache_dir)}.#{fetch(:stage)}.lock"
265
- begin
266
- File.open(lockfile, "w") do |file|
267
- fail ResourceBusyError, "Could not get #{lockfile}" unless file.flock(File::LOCK_EX | File::LOCK_NB)
268
- block.call
269
- end
270
- ensure
271
- rm lockfile if File.exist? lockfile
272
- end
273
- end
274
- end
275
-
276
- class RemoteExtractor
277
- end
278
-
279
- class S3Params
280
- attr_reader :bucket, :object_prefix
281
-
282
- def initialize(repo_url)
283
- uri = URI.parse(repo_url)
284
- @bucket = uri.host
285
- @object_prefix = uri.path.sub(/\/?\Z/, '/').slice(1..-1) # normalize path
286
- end
147
+ def local_cache
148
+ @local_cache ||= LocalCache.new(
149
+ backend,
150
+ File.join(fetch(:s3_archive_local_download_dir), fetch(:stage).to_s),
151
+ File.join(fetch(:s3_archive_local_cache_dir), fetch(:stage).to_s),
152
+ archive_object
153
+ )
287
154
  end
288
155
  end
289
156
  end
@@ -7,51 +7,55 @@ namespace :s3_archive do
7
7
  plugin.local_check
8
8
  end
9
9
 
10
- on release_roles :all do
10
+ on release_roles(:all) do
11
11
  plugin.remote_check
12
12
  end
13
13
  end
14
14
 
15
- desc 'Extruct and stage the S3 archive in a stage directory'
16
- task :stage do
17
- if fetch(:skip_staging, false)
18
- info "Skip extracting and staging."
19
- next
20
- end
21
-
22
- run_locally do
23
- plugin.stage
15
+ desc 'Deploy to release_path'
16
+ task create_release: :stage do
17
+ on release_roles(:all) do
18
+ execute :mkdir, '-p', release_path
19
+ plugin.deploy_to_release_path
24
20
  end
25
21
  end
26
22
 
27
- after :stage, :cleanup_stage_dir do
28
- run_locally do
29
- plugin.cleanup_stage_dir
30
- end
23
+ desc 'Determine the revision that will be deployed'
24
+ task :set_current_revision do
25
+ set :current_revision, plugin.current_revision
31
26
  end
32
27
 
33
- desc 'Copy repo to releases'
34
- task create_release: :stage do
35
- on release_roles(:all), fetch(:s3_archive_remote_rsync_runner_options) do |server|
36
- test "[ -e #{plugin.rsync_cache_dir} ]" # implicit initialize for 'server'
28
+ desc 'Stage the S3 archive to cache directory'
29
+ task :stage do
30
+ case plugin.strategy
31
+ when :direct
32
+ on release_roles(:all) do
33
+ plugin.download_to_shared_path
34
+ end
35
+ when :rsync
37
36
  run_locally do
38
- plugin.transfer_sources(server)
37
+ plugin.download_to_local_cache unless fetch(:s3_archive_skip_download)
39
38
  end
40
- end
41
-
42
- on release_roles(:all) do
43
- execute :mkdir, '-p', release_path
44
- plugin.release
39
+ on release_roles(:all), fetch(:s3_archive_remote_rsync_runner_options) do |server|
40
+ test "[ -e #{plugin.rsync_cache_dir} ]" # implicit initialize for 'server'
41
+ run_locally { plugin.transfer_sources(server) }
42
+ end
43
+ else
44
+ error "Invalid stragegy #{plugin.strategy} of SCM::S3Archive"
45
+ exit 1
45
46
  end
46
47
  end
47
48
 
48
- desc 'Determine the revision that will be deployed'
49
- task :set_current_revision do
50
- set :current_revision, plugin.current_revision
49
+ after :stage, :cleanup_stage_dir do
50
+ case plugin.strategy
51
+ when :direct
52
+ on release_roles(:all) do
53
+ plugin.cleanup_shared_path
54
+ end
55
+ when :rsync
56
+ run_locally do
57
+ plugin.cleanup_local_cache
58
+ end
59
+ end
51
60
  end
52
61
  end unless Rake::Task.task_defined?("s3_archive:check")
53
-
54
- task :deploy_only do
55
- set :skip_staging, true
56
- invoke :deploy
57
- end