dockly 1.13.0 → 2.0.0

Sign up to get free protection for your applications and to get access to all the features.
data/.travis.yml CHANGED
@@ -1,12 +1,8 @@
1
1
  language: ruby
2
2
  rvm:
3
- - 1.9
4
- - 2.0
5
- - 2.1
6
- - 2.2
7
- script:
8
- - git fetch --tags
9
- - CI=true bundle exec rake
3
+ - 1.9.3
4
+ - 2.0.0
5
+ script: CI=true bundle exec rake
10
6
  before_install:
11
7
  - sudo apt-get update -qq
12
8
  - sudo apt-get install -qq rpm
data/README.md CHANGED
@@ -9,6 +9,11 @@
9
9
 
10
10
  Although only a specific type of repository may be used, these assumptions allow us to define a simple DSL to describe your repository.
11
11
 
12
+ Tool Requirements
13
+ -----------------
14
+
15
+ To use the generated startup scripts, you'll need to use AWS CLI v1.5.0+
16
+
12
17
  Usage
13
18
  -----
14
19
 
@@ -106,9 +111,6 @@ The `docker` DSL is used to define Docker containers. It has the following attri
106
111
  - required: `false` -- only required when `import` is not supplied
107
112
  - description: the location of the base image to start building from
108
113
  - examples: `paintedfox/ruby`, `registry.example.com/my-custom-image`
109
- - `build_env`
110
- - required: `false`
111
- - description: Hash whose values are environment variables and keys are their values. These variables are only used during build commands, exported images will not contain them.
112
114
  - `import`
113
115
  - required: `false` -- only required when `registry_import` is not supplied
114
116
  - description: the location (url or S3 path) of the base image to start building from
@@ -281,7 +283,7 @@ In addition to the above attributes, `deb` has the following references:
281
283
  - required: `false`
282
284
  - default: `nil`
283
285
  - class: `Dockly::Foreman`
284
- - description: any Foreman scripts used in the deb.
286
+ - description: any Foreman scripts used in the deb
285
287
 
286
288
  `rpm`
287
289
  -----
data/Rakefile CHANGED
@@ -6,11 +6,6 @@ require 'rake'
6
6
  require 'dockly'
7
7
  require 'rspec/core/rake_task'
8
8
  require 'cane/rake_task'
9
- require 'pry'
10
-
11
- task :shell do
12
- Pry.start(Dockly)
13
- end
14
9
 
15
10
  task :default => [:spec, :quality]
16
11
 
data/dockly.gemspec CHANGED
@@ -22,7 +22,6 @@ Gem::Specification.new do |gem|
22
22
  gem.add_dependency 'foreman'
23
23
  gem.add_dependency 'fpm', '~> 1.2.0'
24
24
  gem.add_dependency 'grit'
25
- gem.add_dependency 'rugged'
26
25
  gem.add_development_dependency 'cane'
27
26
  gem.add_development_dependency 'pry'
28
27
  gem.add_development_dependency 'rake'
@@ -9,7 +9,7 @@ class Dockly::BuildCache::Base
9
9
  dsl_attribute :s3_bucket, :s3_object_prefix, :use_latest,
10
10
  :hash_command, :build_command, :parameter_commands,
11
11
  :base_dir, :command_dir, :output_dir, :tmp_dir,
12
- :keep_old_files, :safe_push_cache
12
+ :keep_old_files
13
13
 
14
14
  default_value :use_latest, false
15
15
  default_value :parameter_commands, {}
@@ -17,7 +17,6 @@ class Dockly::BuildCache::Base
17
17
  default_value :output_dir, '.'
18
18
  default_value :tmp_dir, Dir.tmpdir
19
19
  default_value :keep_old_files, false
20
- default_value :safe_push_cache, false
21
20
 
22
21
  def execute!
23
22
  debug "Looking for cache for hash: #{hash_output}"
@@ -1,6 +1,10 @@
1
1
  class Dockly::BuildCache::Docker < Dockly::BuildCache::Base
2
2
  attr_accessor :image
3
3
 
4
+ def wait_time
5
+ 300 # max 5 minutes
6
+ end
7
+
4
8
  def execute!
5
9
  ensure_present! :image
6
10
  super
@@ -25,11 +29,25 @@ class Dockly::BuildCache::Docker < Dockly::BuildCache::Base
25
29
  ensure_present! :output_dir
26
30
  if cache = pull_from_s3(version)
27
31
  debug "inserting to #{output_directory}"
28
- if safe_push_cache
29
- push_cache_safe(cache)
30
- else
31
- push_cache_with_volumes(cache)
32
- end
32
+ path = File.expand_path(cache.path)
33
+ path_parent = File.dirname(path)
34
+ tar_flags = keep_old_files ? '-xkf' : 'xf'
35
+ container = ::Docker::Container.create(
36
+ 'Image' => image.id,
37
+ 'Cmd' => ['/bin/bash', '-c', [
38
+ "mkdir -p #{File.dirname(output_directory)}",
39
+ '&&',
40
+ "tar #{tar_flags} #{File.join('/', 'host', path)} -C #{File.dirname(output_directory)}"
41
+ ].join(' ')
42
+ ],
43
+ 'Volumes' => {
44
+ File.join('/', 'host', path_parent) => { path_parent => 'rw' }
45
+ }
46
+ )
47
+ container.start('Binds' => ["#{path_parent}:#{File.join('/', 'host', path_parent)}"])
48
+ result = container.wait['StatusCode']
49
+ raise "Got bad status code when copying build cache: #{result}" unless result.zero?
50
+ self.image = container.commit
33
51
  debug "inserted cache into #{output_directory}"
34
52
  cache.close
35
53
  else
@@ -37,37 +55,6 @@ class Dockly::BuildCache::Docker < Dockly::BuildCache::Base
37
55
  end
38
56
  end
39
57
 
40
- def push_cache_safe(cache)
41
- container = image.run("mkdir -p #{File.dirname(output_directory)}")
42
- image_with_dir = container.tap(&:wait).commit
43
- self.image = image_with_dir.insert_local(
44
- 'localPath' => cache.path,
45
- 'outputPath' => File.dirname(output_directory)
46
- )
47
- end
48
-
49
- def push_cache_with_volumes(cache)
50
- path = File.expand_path(cache.path)
51
- path_parent = File.dirname(path)
52
- tar_flags = keep_old_files ? '-xkf' : 'xf'
53
- container = ::Docker::Container.create(
54
- 'Image' => image.id,
55
- 'Cmd' => ['/bin/bash', '-c', [
56
- "mkdir -p #{File.dirname(output_directory)}",
57
- '&&',
58
- "tar #{tar_flags} #{File.join('/', 'host', path)} -C #{File.dirname(output_directory)}"
59
- ].join(' ')
60
- ],
61
- 'Volumes' => {
62
- File.join('/', 'host', path_parent) => { path_parent => 'rw' }
63
- }
64
- )
65
- container.start('Binds' => ["#{path_parent}:#{File.join('/', 'host', path_parent)}"])
66
- result = container.wait['StatusCode']
67
- raise "Got bad status code when copying build cache: #{result}" unless result.zero?
68
- self.image = container.commit
69
- end
70
-
71
58
  def copy_output_dir(container)
72
59
  ensure_present! :output_dir
73
60
  file_path = File.join(tmp_dir,s3_object(hash_output))
@@ -105,7 +92,7 @@ class Dockly::BuildCache::Docker < Dockly::BuildCache::Base
105
92
  debug "running command `#{command}` on image #{image.id}"
106
93
  container = image.run(["/bin/bash", "-c", "cd #{command_directory} && #{command}"])
107
94
  debug "command running in container #{container.id}"
108
- status = container.wait(docker.timeout)['StatusCode']
95
+ status = container.wait(wait_time)['StatusCode']
109
96
  resp = container.streaming_logs(stdout: true, stderr: true)
110
97
  debug "`#{command}` returned the following output:"
111
98
  debug resp.strip
data/lib/dockly/deb.rb CHANGED
@@ -7,11 +7,10 @@ class Dockly::Deb
7
7
  logger_prefix '[dockly deb]'
8
8
  dsl_attribute :package_name, :version, :release, :arch, :build_dir,
9
9
  :deb_build_dir, :pre_install, :post_install, :pre_uninstall,
10
- :post_uninstall, :s3_bucket, :files, :app_user, :vendor,
11
- :package_startup_script
10
+ :post_uninstall, :s3_bucket, :files, :app_user, :vendor
12
11
 
13
12
  dsl_class_attribute :docker, Dockly::Docker
14
- dsl_class_attribute :foreman, Dockly::Foreman, type: Array
13
+ dsl_class_attribute :foreman, Dockly::Foreman
15
14
 
16
15
  default_value :version, '0.0'
17
16
  default_value :release, '0'
@@ -21,14 +20,12 @@ class Dockly::Deb
21
20
  default_value :files, []
22
21
  default_value :app_user, 'nobody'
23
22
  default_value :vendor, 'Dockly'
24
- default_value :package_startup_script, true
25
23
 
26
24
  def file(source, destination)
27
25
  @files << { :source => source, :destination => destination }
28
26
  end
29
27
 
30
28
  def create_package!
31
- info "creating package"
32
29
  ensure_present! :build_dir, :deb_build_dir
33
30
  FileUtils.mkdir_p(File.join(build_dir, deb_build_dir))
34
31
  FileUtils.rm(build_path) if File.exist?(build_path)
@@ -44,18 +41,12 @@ class Dockly::Deb
44
41
  end
45
42
 
46
43
  def build
44
+ info "creating package"
47
45
  create_package!
46
+ info "uploading to s3"
48
47
  upload_to_s3
49
48
  end
50
49
 
51
- def copy_from_s3(sha)
52
- ensure_present! :s3_bucket
53
- object = s3_object_name_for(sha)
54
- info "Copying s3://#{s3_bucket}/#{object} to s3://#{s3_bucket}/#{s3_object_name}"
55
- Dockly::AWS.s3.copy_object(s3_bucket, object, s3_bucket, s3_object_name)
56
- info "Successfully copied s3://#{s3_bucket}/#{object} to s3://#{s3_bucket}/#{s3_object_name}"
57
- end
58
-
59
50
  def build_path
60
51
  ensure_present! :build_dir, :deb_build_dir
61
52
  File.join(build_dir, deb_build_dir, output_filename)
@@ -67,12 +58,12 @@ class Dockly::Deb
67
58
  info "#{name}: found package: #{s3_url}"
68
59
  true
69
60
  rescue
70
- info "#{name}: could not find package: #{s3_url}"
61
+ info "#{name}: could not find package: " +
62
+ "#{s3_url}"
71
63
  false
72
64
  end
73
65
 
74
66
  def upload_to_s3
75
- info "uploading to s3"
76
67
  return if s3_bucket.nil?
77
68
  raise "Package wasn't created!" unless File.exist?(build_path)
78
69
  info "uploading package to s3"
@@ -85,11 +76,7 @@ class Dockly::Deb
85
76
  end
86
77
 
87
78
  def s3_object_name
88
- s3_object_name_for(Dockly::Util::Git.git_sha)
89
- end
90
-
91
- def s3_object_name_for(sha)
92
- "#{package_name}/#{sha}/#{output_filename}"
79
+ "#{package_name}/#{Dockly::Util::Git.git_sha}/#{output_filename}"
93
80
  end
94
81
 
95
82
  def output_filename
@@ -115,7 +102,7 @@ private
115
102
  add_files(@dir_package)
116
103
  add_docker_auth_config(@dir_package)
117
104
  add_docker(@dir_package)
118
- add_startup_script(@dir_package) if package_startup_script
105
+ add_startup_script(@dir_package)
119
106
 
120
107
  convert_package
121
108
 
@@ -140,14 +127,14 @@ private
140
127
  end
141
128
 
142
129
  def add_foreman(package)
143
- return if (foreman || []).empty?
144
- foreman.each do |fore|
145
- info "adding foreman export '#{fore.name}'"
146
- fore.create!
147
- package.attributes[:prefix] = fore.init_dir
148
- Dir.chdir(fore.build_dir) { package.input('.') }
149
- package.attributes[:prefix] = nil
130
+ return if foreman.nil?
131
+ info "adding foreman export"
132
+ foreman.create!
133
+ package.attributes[:prefix] = foreman.init_dir
134
+ Dir.chdir(foreman.build_dir) do
135
+ package.input('.')
150
136
  end
137
+ package.attributes[:prefix] = nil
151
138
  end
152
139
 
153
140
  def add_files(package)
data/lib/dockly/docker.rb CHANGED
@@ -28,18 +28,6 @@ class Dockly::Docker
28
28
  default_value :s3_bucket, nil
29
29
  default_value :s3_object_prefix, ""
30
30
 
31
- def build_env(hash = nil)
32
- (@build_env ||= {}).tap { |env| env.merge!(hash) if hash.is_a?(Hash) }
33
- end
34
-
35
- def copy_from_s3(sha)
36
- return if s3_bucket.nil?
37
- object = s3_object_for(sha)
38
- info "Copying s3://#{s3_bucket}/#{object} to #{s3_bucket}/#{s3_object}"
39
- Dockly::AWS.s3.copy_object(s3_bucket, object, s3_bucket, s3_object)
40
- info "Successfully copied s3://#{s3_bucket}/#{object} to s3://#{s3_bucket}/#{s3_object}"
41
- end
42
-
43
31
  def generate!
44
32
  image = generate_build
45
33
  export_image(image)
@@ -47,21 +35,6 @@ class Dockly::Docker
47
35
  cleanup([image]) if cleanup_images
48
36
  end
49
37
 
50
- def export_only
51
- if image = find_image_by_repotag
52
- info "Found image by repo:tag: #{repo}:#{tag} - #{image.inspect}"
53
- export_image(image)
54
- else
55
- raise "Could not find image"
56
- end
57
- end
58
-
59
- def find_image_by_repotag
60
- Docker::Image.all.find do |image|
61
- image.info["RepoTags"].include?("#{repo}:#{tag}")
62
- end
63
- end
64
-
65
38
  def generate_build
66
39
  Docker.options = { :read_timeout => timeout, :write_timeout => timeout }
67
40
  images = {}
@@ -77,10 +50,9 @@ class Dockly::Docker
77
50
  info "Successfully pulled #{full_name}"
78
51
  end
79
52
 
80
- images[:two] = add_build_env(images[:one])
81
- images[:three] = add_git_archive(images[:two])
82
- images[:four] = run_build_caches(images[:three])
83
- build_image(images[:four])
53
+ images[:two] = add_git_archive(images[:one])
54
+ images[:three] = run_build_caches(images[:two])
55
+ build_image(images[:three])
84
56
  ensure
85
57
  cleanup(images.values.compact) if cleanup_images
86
58
  end
@@ -97,8 +69,6 @@ class Dockly::Docker
97
69
 
98
70
  def cleanup(images)
99
71
  info 'Cleaning up intermediate images'
100
- images ||= []
101
- images = images.compact
102
72
  ::Docker::Container.all(:all => true).each do |container|
103
73
  image_id = container.json['Image']
104
74
  if images.any? { |image| image.id.start_with?(image_id) || image_id.start_with?(image.id) }
@@ -174,18 +144,6 @@ class Dockly::Docker
174
144
  image
175
145
  end
176
146
 
177
- def add_build_env(image)
178
- return image if build_env.empty?
179
- info "Setting the following environment variables in the docker image: #{build_env.keys}"
180
- dockerfile = [
181
- "FROM #{image.id}",
182
- *build_env.map { |key, val| "ENV #{key.to_s.shellescape}=#{val.to_s.shellescape}" }
183
- ].join("\n")
184
- out_image = ::Docker::Image.build(dockerfile)
185
- info "Successfully set the environment variables in the dockerfile"
186
- out_image
187
- end
188
-
189
147
  def add_git_archive(image)
190
148
  return image if git_archive.nil?
191
149
  info "adding the git archive"
@@ -202,7 +160,7 @@ class Dockly::Docker
202
160
  info "running custom build steps, starting with id: #{image.id}"
203
161
  out_image = ::Docker::Image.build("from #{image.id}\n#{build}")
204
162
  info "finished running custom build steps, result id: #{out_image.id}"
205
- out_image.tap { |img| img.tag(repo: repo, tag: tag, force: true) }
163
+ out_image.tap { |img| img.tag(:repo => repo, :tag => tag) }
206
164
  end
207
165
 
208
166
  def repo
@@ -228,10 +186,10 @@ class Dockly::Docker
228
186
  container = image.run('true')
229
187
  info "created the container: #{container.id}"
230
188
 
231
- if s3_bucket.nil?
232
- output = File.open(tar_path, 'wb')
233
- else
189
+ unless s3_bucket.nil?
234
190
  output = Dockly::AWS::S3Writer.new(connection, s3_bucket, s3_object)
191
+ else
192
+ output = File.open(tar_path, 'wb')
235
193
  end
236
194
 
237
195
  gzip_output = Zlib::GzipWriter.new(output)
@@ -250,7 +208,6 @@ class Dockly::Docker
250
208
  end
251
209
  raise
252
210
  ensure
253
- container.tap(&:wait).remove if container
254
211
  gzip_output.close if gzip_output
255
212
  end
256
213
 
@@ -293,11 +250,9 @@ class Dockly::Docker
293
250
  end
294
251
 
295
252
  def s3_object
296
- s3_object_for(Dockly::Util::Git.git_sha)
297
- end
298
-
299
- def s3_object_for(sha)
300
- [s3_object_prefix, sha, '/', export_filename].join
253
+ output = "#{s3_object_prefix}"
254
+ output << "#{Dockly::Util::Git.git_sha}/"
255
+ output << "#{export_filename}"
301
256
  end
302
257
 
303
258
  def push_to_registry(image)
@@ -53,131 +53,48 @@ namespace :dockly do
53
53
  raise "No dockly.rb found!" unless File.exist?('dockly.rb')
54
54
  end
55
55
 
56
- prepare_targets = []
57
- upload_targets = []
58
56
  build_targets = []
59
- copy_targets = []
60
57
 
61
58
  namespace :deb do
62
59
  Dockly.debs.values.each do |inst|
63
- namespace :prepare do
64
- task inst.name => 'dockly:load' do |name|
65
- inst.create_package!
66
- end
67
- end
68
-
69
- namespace :upload do
70
- deb inst.name => 'dockly:load' do |name|
71
- inst.upload_to_s3
72
- end
60
+ deb inst.name => 'dockly:load' do |name|
61
+ Thread.current[:rake_task] = name
62
+ inst.build
73
63
  end
74
-
75
- namespace :copy do
76
- task inst.name => 'dockly:load' do |name|
77
- inst.copy_from_s3(Dockly::History.duplicate_build_sha[0..6])
78
- end
79
- end
80
-
81
- deb inst.name => [
82
- 'dockly:load',
83
- "dockly:deb:prepare:#{inst.name}",
84
- "dockly:deb:upload:#{inst.name}"
85
- ]
86
- prepare_targets << "dockly:deb:prepare:#{inst.name}"
87
- upload_targets << "dockly:deb:upload:#{inst.name}"
88
- copy_targets << "dockly:deb:copy:#{inst.name}"
89
64
  build_targets << "dockly:deb:#{inst.name}"
90
65
  end
91
66
  end
92
67
 
93
68
  namespace :rpm do
94
69
  Dockly.rpms.values.each do |inst|
95
- namespace :prepare do
96
- task inst.name => 'dockly:load' do |name|
97
- inst.create_package!
98
- end
99
- end
100
-
101
- namespace :upload do
102
- rpm inst.name => 'dockly:load' do |name|
103
- inst.upload_to_s3
104
- end
70
+ rpm inst.name => 'dockly:load' do |name|
71
+ Thread.current[:rake_task] = name
72
+ inst.build
105
73
  end
106
-
107
- namespace :copy do
108
- task inst.name => 'dockly:load' do |name|
109
- inst.copy_from_s3(Dockly::History.duplicate_build_sha[0..6])
110
- end
111
- end
112
-
113
- rpm inst.name => [
114
- 'dockly:load',
115
- "dockly:rpm:prepare:#{inst.name}",
116
- "dockly:rpm:upload:#{inst.name}"
117
- ]
118
- prepare_targets << "dockly:rpm:prepare:#{inst.name}"
119
- upload_targets << "dockly:rpm:upload:#{inst.name}"
120
- copy_targets << "dockly:rpm:copy:#{inst.name}"
121
74
  build_targets << "dockly:rpm:#{inst.name}"
122
75
  end
123
76
  end
124
77
 
125
78
  namespace :docker do
126
79
  Dockly.dockers.values.each do |inst|
127
- # For backwards compatibility
128
- namespace :noexport do
129
- task inst.name => "dockly:docker:prepare:#{inst.name}"
80
+ docker inst.name => 'dockly:load' do
81
+ Thread.current[:rake_task] = inst.name
82
+ inst.generate!
130
83
  end
131
84
 
132
- namespace :prepare do
85
+ namespace :noexport do
133
86
  task inst.name => 'dockly:load' do
134
87
  Thread.current[:rake_task] = inst.name
135
88
  inst.generate_build
136
89
  end
137
90
  end
138
91
 
139
- namespace :upload do
140
- task inst.name => 'dockly:load' do
141
- Thread.current[:rake_task] = inst.name
142
- inst.export_only
143
- end
144
- end
145
-
146
- namespace :copy do
147
- task inst.name => 'dockly:load' do
148
- Thread.current[:rake_task] = inst.name
149
- inst.copy_from_s3(Dockly::History.duplicate_build_sha[0..6])
150
- end
151
- end
152
-
153
- docker inst.name => [
154
- 'dockly:load',
155
- "dockly:docker:prepare:#{inst.name}",
156
- "dockly:docker:upload:#{inst.name}"
157
- ]
158
-
159
92
  # Docker image will be generated by 'dockly:deb:package'
160
93
  unless inst.s3_bucket.nil?
161
- prepare_targets << "dockly:docker:prepare:#{inst.name}"
162
- upload_targets << "dockly:docker:upload:#{inst.name}"
163
- copy_targets << "dockly:docker:copy:#{inst.name}"
164
94
  build_targets << "dockly:docker:#{inst.name}"
165
95
  end
166
96
  end
167
97
  end
168
98
 
169
- multitask :prepare_all => prepare_targets
170
- multitask :upload_all => upload_targets
171
99
  multitask :build_all => build_targets
172
- multitask :copy_all => copy_targets
173
-
174
- task :build_or_copy_all do
175
- if Dockly::History.duplicate_build?
176
- Rake::Task['dockly:copy_all'].invoke
177
- else
178
- Rake::Task['dockly:build_all'].invoke
179
- Dockly::History.write_content_tag!
180
- Dockly::History.push_content_tag!
181
- end
182
- end
183
100
  end
@@ -1,3 +1,3 @@
1
1
  module Dockly
2
- VERSION = '1.13.0'
2
+ VERSION = '2.0.0'
3
3
  end
data/lib/dockly.rb CHANGED
@@ -4,7 +4,6 @@ require 'dockly/util/git'
4
4
  require 'fog'
5
5
  require 'foreman/cli_fix'
6
6
  require 'foreman/export/base_fix'
7
- require 'rugged'
8
7
 
9
8
  module Dockly
10
9
  attr_reader :instance, :git_sha
@@ -16,7 +15,6 @@ module Dockly
16
15
  autoload :BuildCache, 'dockly/build_cache'
17
16
  autoload :Docker, 'dockly/docker'
18
17
  autoload :Deb, 'dockly/deb'
19
- autoload :History, 'dockly/history'
20
18
  autoload :Rpm, 'dockly/rpm'
21
19
  autoload :TarDiff, 'dockly/tar_diff'
22
20
 
@@ -1,7 +1,7 @@
1
1
  file_diff_docker_import_fn() {
2
2
  s3_path="<%= data[:base_image] %>"
3
3
  log "fetch: starting to fetch $s3_path"
4
- s3cmd -f get $s3_path - 2> >(log)
4
+ aws s3 cp --quiet $s3_path - 2> >(log)
5
5
  log "fetch: successfully fetched $s3_path"
6
6
  }
7
7
 
@@ -5,7 +5,7 @@ log "fetch: starting to fetch $s3_path"
5
5
  for attempt in {1..200}; do
6
6
  [[ $worked != 0 ]] || break
7
7
  log "fetch: attempt ${attempt} to get $s3_path ..."
8
- s3cmd -f get $s3_path $output_path 2> >(log) && worked=0 || (log "fetch: attempt failed, sleeping 30"; sleep 30)
8
+ aws s3 cp --quiet $s3_path $output_path 2> >(log) && worked=0 || (log "fetch: attempt failed, sleeping 30"; sleep 30)
9
9
  done
10
10
  [[ $worked != 0 ]] && fatal "fetch: failed to pull deb from S3"
11
11
  log "fetch: successfully fetched $s3_path"
@@ -5,7 +5,7 @@ log "fetch: starting to fetch $s3_path"
5
5
  for attempt in {1..200}; do
6
6
  [[ $worked != 0 ]] || break
7
7
  log "fetch: attempt ${attempt} to get $s3_path ..."
8
- s3cmd -f get $s3_path $output_path 2> >(log) && worked=0 || (log "fetch: attempt failed, sleeping 30"; sleep 30)
8
+ aws s3 cp --quiet $s3_path $output_path 2> >(log) && worked=0 || (log "fetch: attempt failed, sleeping 30"; sleep 30)
9
9
  done
10
10
  [[ $worked != 0 ]] && fatal "fetch: failed to pull deb from S3"
11
11
  log "fetch: successfully fetched $s3_path"
@@ -5,14 +5,14 @@ base_image = "/opt/dockly/base_image.tar"
5
5
  s3_diff_docker_import_base_fn() {
6
6
  s3_path="<%= data[:base_image] %>"
7
7
  log "fetch: starting to fetch $s3_path"
8
- s3cmd -f get $s3_path - 2> >(log)
8
+ aws s3 cp --quiet $s3_path - 2> >(log)
9
9
  log "fetch: successfully fetched $s3_path"
10
10
  }
11
11
 
12
12
  s3_diff_docker_import_diff_fn() {
13
13
  s3_path="<%= data[:diff_image] %>"
14
14
  log "fetch: starting to fetch $s3_path"
15
- s3cmd -f get $s3_path - 2> >(log)
15
+ aws s3 cp --quiet $s3_path - 2> >(log)
16
16
  log "fetch: successfully fetched $s3_path"
17
17
  }
18
18
 
@@ -33,18 +33,6 @@ docker_import() {
33
33
  docker import - $repo:$tag > >(log) 2>&1 || fatal "docker failed to import"
34
34
  }
35
35
 
36
- remove_bad_imports() {
37
- log "removing bad imports"
38
- images=$(docker images | grep \<none | awk '{ print $3 }' || echo "")
39
- log "images: $images"
40
- if [[ "x$images" != "x" ]]; then
41
- docker rmi $images > >(log)
42
- log "bad import removed"
43
- else
44
- log "no bad images"
45
- fi
46
- }
47
-
48
36
  worked=1
49
37
  for attempt in {1..200}; do
50
38
  [[ $worked != 0 ]] || break
@@ -56,7 +44,7 @@ log "fetch: successfully pulled base image"
56
44
  worked=1
57
45
  for attempt in {1..200}; do
58
46
  [[ $worked != 0 ]] || break
59
- stream_image | docker_import && worked=0 || (remove_bad_imports; log "fetch: attempt $attempt failed, sleeping 30"; sleep 30)
47
+ stream_image | docker_import && worked=0 || (log "fetch: attempt $attempt failed, sleeping 30"; sleep 30)
60
48
  done
61
49
  [[ $worked != 0 ]] && fatal "fetch: failed to import diff image"
62
50
  log "fetch: successfully imported diff image"
@@ -1,7 +1,7 @@
1
1
  s3_docker_import_fn() {
2
2
  s3_path="<%= data[:s3_url] %>"
3
3
  log "fetch: starting to fetch $s3_path"
4
- s3cmd -f get $s3_path - 2> >(log)
4
+ aws s3 cp --quiet $s3_path - 2> >(log)
5
5
  log "fetch: successfully fetched $s3_path"
6
6
  }
7
7