dockly 1.2.1 → 1.3.1
Sign up to get free protection for your applications and to get access to all the features.
- data/.gitignore +1 -0
- data/README.md +9 -4
- data/dockly.gemspec +2 -2
- data/img/dockly.png +0 -0
- data/lib/dockly/build_cache.rb +10 -133
- data/lib/dockly/build_cache/base.rb +117 -0
- data/lib/dockly/build_cache/docker.rb +72 -0
- data/lib/dockly/build_cache/local.rb +56 -0
- data/lib/dockly/cli.rb +39 -0
- data/lib/dockly/docker.rb +5 -7
- data/lib/dockly/util/tar.rb +31 -0
- data/lib/dockly/version.rb +1 -1
- data/spec/dockly/build_cache/base_spec.rb +74 -0
- data/spec/dockly/build_cache/docker_spec.rb +181 -0
- data/spec/dockly/build_cache/local_spec.rb +141 -0
- metadata +16 -8
- data/spec/dockly/build_cache_spec.rb +0 -175
data/.gitignore
CHANGED
data/README.md
CHANGED
@@ -2,8 +2,8 @@
|
|
2
2
|
[![Build Status](https://travis-ci.org/swipely/dockly.png?branch=refactor_setup)](https://travis-ci.org/swipely/dockly)
|
3
3
|
[![Dependency Status](https://gemnasium.com/swipely/dockly.png)](https://gemnasium.com/swipely/dockly)
|
4
4
|
|
5
|
-
Dockly
|
6
|
-
|
5
|
+
![Dockly](https://raw.github.com/swipely/dockly/master/img/dockly.png)
|
6
|
+
======================================================================
|
7
7
|
|
8
8
|
`dockly` is a gem made to ease the pain of packaging an application. For this gem to be useful, quite a few assumptions can be made about your stack:
|
9
9
|
|
@@ -74,10 +74,14 @@ The `build_cache` DSL is used to prevent rebuilding assets every build and used
|
|
74
74
|
- description: the name prepended to the package; allows for namespacing your caches
|
75
75
|
- `hash_command`
|
76
76
|
- required: `true`
|
77
|
-
- description: command run
|
77
|
+
- description: command run to determine if the build cache is up to date (eg. `md5sum ... | awk '{ print $1 }'`)
|
78
|
+
- `parameter_command`
|
79
|
+
- required: `false`
|
80
|
+
- allows multiple
|
81
|
+
- description: command run to build specific versions of build caches -- useful for multiple operating systems (not required)
|
78
82
|
- `build_command`
|
79
83
|
- required: `true`
|
80
|
-
- description: command run
|
84
|
+
- description: command run when the build cache is out of date
|
81
85
|
- `output_dir`
|
82
86
|
- required: `true`
|
83
87
|
- description: where the cache is located in the Docker image filesystem
|
@@ -135,6 +139,7 @@ In addition to the above attributes, `docker` has the following references:
|
|
135
139
|
|
136
140
|
- `build_cache`
|
137
141
|
- required: `false`
|
142
|
+
- allows multiple
|
138
143
|
- class: `Dockly::BuildCache`
|
139
144
|
- description: a caching system to stop rebuilding/compiling the same files every time
|
140
145
|
|
data/dockly.gemspec
CHANGED
@@ -15,8 +15,8 @@ Gem::Specification.new do |gem|
|
|
15
15
|
gem.require_paths = %w{lib}
|
16
16
|
gem.version = Dockly::VERSION
|
17
17
|
gem.add_dependency 'clamp', '~> 0.6'
|
18
|
-
gem.add_dependency 'docker-api', '~> 1.7.
|
19
|
-
gem.add_dependency 'dockly-util', '~> 0.0.
|
18
|
+
gem.add_dependency 'docker-api', '~> 1.7.3'
|
19
|
+
gem.add_dependency 'dockly-util', '~> 0.0.6'
|
20
20
|
gem.add_dependency 'excon'
|
21
21
|
gem.add_dependency 'fog', '~> 1.18.0'
|
22
22
|
gem.add_dependency 'foreman'
|
data/img/dockly.png
ADDED
Binary file
|
data/lib/dockly/build_cache.rb
CHANGED
@@ -1,139 +1,16 @@
|
|
1
|
-
|
2
|
-
|
3
|
-
class Dockly::BuildCache
|
4
|
-
include Dockly::Util::DSL
|
5
|
-
include Dockly::Util::Logger::Mixin
|
6
|
-
|
7
|
-
logger_prefix '[dockly build_cache]'
|
8
|
-
|
9
|
-
attr_accessor :image
|
10
|
-
dsl_attribute :s3_bucket, :s3_object_prefix, :hash_command, :output_dir, :build_command,
|
11
|
-
:use_latest, :tmp_dir
|
12
|
-
|
13
|
-
default_value :use_latest, false
|
14
|
-
default_value :tmp_dir, '/tmp'
|
15
|
-
|
16
|
-
def execute!
|
17
|
-
ensure_present! :image
|
18
|
-
debug "Looking for cache for hash: #{hash_output}"
|
19
|
-
if up_to_date?
|
20
|
-
debug "build cache up to date, pulling from s3"
|
21
|
-
insert_cache
|
22
|
-
else
|
23
|
-
insert_latest
|
24
|
-
debug "build cache out of date, running build"
|
25
|
-
run_build
|
26
|
-
end
|
27
|
-
debug "finished build cache"
|
28
|
-
image
|
29
|
-
end
|
30
|
-
|
31
|
-
def insert_cache
|
32
|
-
push_cache(hash_output)
|
33
|
-
end
|
34
|
-
|
35
|
-
def insert_latest
|
36
|
-
if use_latest
|
37
|
-
debug "attempting to push latest"
|
38
|
-
if cache = push_cache("latest")
|
39
|
-
debug "pushed latest, removing local file"
|
40
|
-
File.delete(cache.path)
|
41
|
-
end
|
42
|
-
end
|
43
|
-
end
|
44
|
-
|
45
|
-
def run_build
|
46
|
-
container = image.run(build_command)
|
47
|
-
status = container.wait(3600)['StatusCode'] # 1 hour max timeout
|
48
|
-
raise "Build Cache `#{build_command}` failed to run." unless status.zero?
|
49
|
-
cache = copy_output_dir(container)
|
50
|
-
debug "pushing #{output_dir} to s3"
|
51
|
-
push_to_s3(cache)
|
52
|
-
cache.close
|
53
|
-
self.image = container.commit
|
54
|
-
end
|
55
|
-
|
56
|
-
def push_cache(version)
|
57
|
-
ensure_present! :output_dir
|
58
|
-
if cache = pull_from_s3(version)
|
59
|
-
debug "inserting to #{output_dir}"
|
60
|
-
container = image.run("mkdir -p #{File.dirname(output_dir)}")
|
61
|
-
image_with_dir = container.tap { |c| c.wait }.commit
|
62
|
-
self.image = image_with_dir.insert_local(
|
63
|
-
'localPath' => cache.path,
|
64
|
-
'outputPath' => File.dirname(output_dir)
|
65
|
-
)
|
66
|
-
cache.close
|
67
|
-
else
|
68
|
-
info "could not find #{s3_object(version)}"
|
69
|
-
end
|
70
|
-
end
|
71
|
-
|
72
|
-
def up_to_date?
|
73
|
-
ensure_present! :s3_bucket, :s3_object_prefix
|
74
|
-
connection.head_object(s3_bucket, s3_object(hash_output))
|
75
|
-
true
|
76
|
-
rescue Excon::Errors::NotFound
|
77
|
-
false
|
78
|
-
end
|
79
|
-
|
80
|
-
def pull_from_s3(version)
|
81
|
-
ensure_present! :s3_bucket, :s3_object_prefix
|
1
|
+
module Dockly::BuildCache
|
2
|
+
end
|
82
3
|
|
83
|
-
|
84
|
-
|
4
|
+
require 'dockly/build_cache/base'
|
5
|
+
require 'dockly/build_cache/docker'
|
6
|
+
require 'dockly/build_cache/local'
|
85
7
|
|
86
|
-
|
87
|
-
|
88
|
-
|
8
|
+
module Dockly::BuildCache
|
9
|
+
class << self
|
10
|
+
attr_writer :model
|
89
11
|
|
90
|
-
|
91
|
-
|
92
|
-
file.tap(&:rewind)
|
93
|
-
else
|
94
|
-
File.open(file_path, 'rb')
|
12
|
+
def model
|
13
|
+
@mode ||= Dockly::BuildCache::Docker
|
95
14
|
end
|
96
|
-
rescue Excon::Errors::NotFound
|
97
|
-
nil
|
98
|
-
end
|
99
|
-
|
100
|
-
def push_to_s3(file)
|
101
|
-
ensure_present! :s3_bucket, :s3_object_prefix
|
102
|
-
connection.put_object(s3_bucket, s3_object(hash_output), file.read)
|
103
|
-
connection.copy_object(s3_bucket, s3_object(hash_output), s3_bucket, s3_object("latest"))
|
104
|
-
end
|
105
|
-
|
106
|
-
def copy_output_dir(container)
|
107
|
-
ensure_present! :output_dir
|
108
|
-
file_path = File.join(tmp_dir,s3_object(hash_output))
|
109
|
-
FileUtils.mkdir_p(File.dirname(file_path))
|
110
|
-
file = File.open(file_path, 'w+b')
|
111
|
-
container.wait(3600) # 1 hour max timeout
|
112
|
-
container.copy(output_dir) { |chunk| file.write(chunk) }
|
113
|
-
file.tap(&:rewind)
|
114
|
-
end
|
115
|
-
|
116
|
-
def hash_output
|
117
|
-
ensure_present! :image, :hash_command
|
118
|
-
@hash_output ||= begin
|
119
|
-
resp = ""
|
120
|
-
container = image.run(hash_command)
|
121
|
-
container.attach { |source,chunk| resp += chunk }
|
122
|
-
status = container.wait['StatusCode']
|
123
|
-
raise "Hash Command `#{hash_command} failed to run" unless status.zero?
|
124
|
-
resp.strip
|
125
|
-
end
|
126
|
-
end
|
127
|
-
|
128
|
-
def file_output(file)
|
129
|
-
File.join(File.dirname(output_dir), File.basename(file.path))
|
130
|
-
end
|
131
|
-
|
132
|
-
def s3_object(file)
|
133
|
-
"#{s3_object_prefix}#{file}"
|
134
|
-
end
|
135
|
-
|
136
|
-
def connection
|
137
|
-
Dockly::AWS.s3
|
138
15
|
end
|
139
16
|
end
|
@@ -0,0 +1,117 @@
|
|
1
|
+
require 'tempfile'
|
2
|
+
|
3
|
+
class Dockly::BuildCache::Base
|
4
|
+
include Dockly::Util::DSL
|
5
|
+
include Dockly::Util::Logger::Mixin
|
6
|
+
|
7
|
+
logger_prefix '[dockly build_cache]'
|
8
|
+
|
9
|
+
dsl_attribute :s3_bucket, :s3_object_prefix, :use_latest,
|
10
|
+
:hash_command, :build_command, :parameter_commands,
|
11
|
+
:base_dir, :command_dir, :output_dir, :tmp_dir
|
12
|
+
|
13
|
+
default_value :use_latest, false
|
14
|
+
default_value :parameter_commands, {}
|
15
|
+
default_value :command_dir, '.'
|
16
|
+
default_value :output_dir, '.'
|
17
|
+
default_value :tmp_dir, '/tmp'
|
18
|
+
|
19
|
+
def execute!
|
20
|
+
debug "Looking for cache for hash: #{hash_output}"
|
21
|
+
if up_to_date?
|
22
|
+
debug "build cache up to date, pulling from s3"
|
23
|
+
insert_cache
|
24
|
+
else
|
25
|
+
insert_latest
|
26
|
+
debug "build cache out of date, running build"
|
27
|
+
run_build
|
28
|
+
end
|
29
|
+
debug "finished build cache"
|
30
|
+
end
|
31
|
+
|
32
|
+
def insert_cache
|
33
|
+
push_cache(hash_output)
|
34
|
+
end
|
35
|
+
|
36
|
+
def insert_latest
|
37
|
+
if use_latest
|
38
|
+
debug "attempting to push latest"
|
39
|
+
if cache = push_cache("latest")
|
40
|
+
debug "pushed latest, removing local file"
|
41
|
+
File.delete(cache.path)
|
42
|
+
end
|
43
|
+
end
|
44
|
+
end
|
45
|
+
|
46
|
+
def up_to_date?
|
47
|
+
ensure_present! :s3_bucket, :s3_object_prefix
|
48
|
+
connection.head_object(s3_bucket, s3_object(hash_output))
|
49
|
+
true
|
50
|
+
rescue Excon::Errors::NotFound
|
51
|
+
false
|
52
|
+
end
|
53
|
+
|
54
|
+
def pull_from_s3(version)
|
55
|
+
ensure_present! :s3_bucket, :s3_object_prefix
|
56
|
+
|
57
|
+
file_name = s3_object(version)
|
58
|
+
file_path = File.join(tmp_dir,file_name)
|
59
|
+
|
60
|
+
FileUtils.mkdir_p(File.dirname(file_path))
|
61
|
+
unless File.exist?(file_path)
|
62
|
+
object = connection.get_object(s3_bucket, file_name)
|
63
|
+
|
64
|
+
file = File.open(file_path, 'w+b')
|
65
|
+
file.write(object.body)
|
66
|
+
file.tap(&:rewind)
|
67
|
+
else
|
68
|
+
File.open(file_path, 'rb')
|
69
|
+
end
|
70
|
+
rescue Excon::Errors::NotFound
|
71
|
+
nil
|
72
|
+
end
|
73
|
+
|
74
|
+
def hash_output
|
75
|
+
end
|
76
|
+
|
77
|
+
def parameter_output(command)
|
78
|
+
end
|
79
|
+
|
80
|
+
def parameter_command(command)
|
81
|
+
parameter_commands[command] = nil
|
82
|
+
end
|
83
|
+
|
84
|
+
def push_to_s3(file)
|
85
|
+
ensure_present! :s3_bucket, :s3_object_prefix
|
86
|
+
connection.put_object(s3_bucket, s3_object(hash_output), file.read)
|
87
|
+
connection.copy_object(s3_bucket, s3_object(hash_output), s3_bucket, s3_object("latest"))
|
88
|
+
end
|
89
|
+
|
90
|
+
def file_output(file)
|
91
|
+
File.join(File.dirname(output_dir), File.basename(file.path))
|
92
|
+
end
|
93
|
+
|
94
|
+
def s3_object(file)
|
95
|
+
output = "#{s3_object_prefix}"
|
96
|
+
parameter_commands.each do |parameter_command, _|
|
97
|
+
output << "#{parameter_output(parameter_command)}_" unless parameter_output(parameter_command).nil?
|
98
|
+
end
|
99
|
+
output << "#{file}"
|
100
|
+
end
|
101
|
+
|
102
|
+
def command_directory
|
103
|
+
File.join(base_directory, command_dir)
|
104
|
+
end
|
105
|
+
|
106
|
+
def output_directory
|
107
|
+
File.join(base_directory, output_dir)
|
108
|
+
end
|
109
|
+
|
110
|
+
def base_directory
|
111
|
+
base_dir || docker.git_archive
|
112
|
+
end
|
113
|
+
|
114
|
+
def connection
|
115
|
+
Dockly::AWS.s3
|
116
|
+
end
|
117
|
+
end
|
@@ -0,0 +1,72 @@
|
|
1
|
+
class Dockly::BuildCache::Docker < Dockly::BuildCache::Base
|
2
|
+
attr_accessor :image
|
3
|
+
|
4
|
+
def execute!
|
5
|
+
ensure_present! :image
|
6
|
+
super
|
7
|
+
image
|
8
|
+
end
|
9
|
+
|
10
|
+
def run_build
|
11
|
+
status, body, container = run_command(build_command)
|
12
|
+
raise "Build Cache `#{build_command}` failed to run." unless status.zero?
|
13
|
+
cache = copy_output_dir(container)
|
14
|
+
debug "pushing #{output_directory} to s3"
|
15
|
+
push_to_s3(cache)
|
16
|
+
cache.close
|
17
|
+
self.image = container.commit
|
18
|
+
end
|
19
|
+
|
20
|
+
def push_cache(version)
|
21
|
+
ensure_present! :output_dir
|
22
|
+
if cache = pull_from_s3(version)
|
23
|
+
debug "inserting to #{output_directory}"
|
24
|
+
container = image.run("mkdir -p #{File.dirname(output_directory)}")
|
25
|
+
image_with_dir = container.tap { |c| c.wait }.commit
|
26
|
+
self.image = image_with_dir.insert_local(
|
27
|
+
'localPath' => cache.path,
|
28
|
+
'outputPath' => File.dirname(output_directory)
|
29
|
+
)
|
30
|
+
cache.close
|
31
|
+
else
|
32
|
+
info "could not find #{s3_object(version)}"
|
33
|
+
end
|
34
|
+
end
|
35
|
+
|
36
|
+
def copy_output_dir(container)
|
37
|
+
ensure_present! :output_dir
|
38
|
+
file_path = File.join(tmp_dir,s3_object(hash_output))
|
39
|
+
FileUtils.mkdir_p(File.dirname(file_path))
|
40
|
+
file = File.open(file_path, 'w+b')
|
41
|
+
container.wait(3600) # 1 hour max timeout
|
42
|
+
container.copy(output_directory) { |chunk| file.write(chunk) }
|
43
|
+
file.tap(&:rewind)
|
44
|
+
end
|
45
|
+
|
46
|
+
def hash_output
|
47
|
+
ensure_present! :image, :hash_command
|
48
|
+
@hash_output ||= begin
|
49
|
+
status, body, container = run_command(hash_command)
|
50
|
+
raise "Hash Command `#{hash_command}` failed to run" unless status.zero?
|
51
|
+
body
|
52
|
+
end
|
53
|
+
end
|
54
|
+
|
55
|
+
def parameter_output(command)
|
56
|
+
ensure_present! :image
|
57
|
+
raise "Parameter Command tried to run but not found" unless parameter_commands.keys.include?(command)
|
58
|
+
@parameter_commands[command] ||= begin
|
59
|
+
status, body, container = run_command(command)
|
60
|
+
raise "Parameter Command `#{command}` failed to run" unless status.zero?
|
61
|
+
body
|
62
|
+
end
|
63
|
+
end
|
64
|
+
|
65
|
+
def run_command(command)
|
66
|
+
resp = ""
|
67
|
+
container = image.run(["/bin/bash", "-lc", "cd #{command_directory} && #{command}"])
|
68
|
+
container.attach { |source,chunk| resp += chunk }
|
69
|
+
status = container.wait['StatusCode']
|
70
|
+
[status, resp.strip, container]
|
71
|
+
end
|
72
|
+
end
|
@@ -0,0 +1,56 @@
|
|
1
|
+
class Dockly::BuildCache::Local < Dockly::BuildCache::Base
|
2
|
+
def run_build
|
3
|
+
puts "Build command: #{build_command}"
|
4
|
+
status, body = run_command(build_command)
|
5
|
+
raise "Build Cache `#{build_command}` failed to run." unless status.success?
|
6
|
+
FileUtils.mkdir_p(File.dirname(save_file))
|
7
|
+
tar_file = Dockly::Util::Tar.tar(output_directory, save_file)
|
8
|
+
push_to_s3(tar_file)
|
9
|
+
end
|
10
|
+
|
11
|
+
def output_directory
|
12
|
+
File.expand_path(File.join(Dir.pwd, output_dir))
|
13
|
+
end
|
14
|
+
|
15
|
+
def save_file
|
16
|
+
File.expand_path("build/build_cache/#{s3_object_prefix}#{hash_output}")
|
17
|
+
end
|
18
|
+
|
19
|
+
def push_cache(version)
|
20
|
+
ensure_present! :output_dir
|
21
|
+
if cache = pull_from_s3(version)
|
22
|
+
dest = File.dirname(File.expand_path(output_dir))
|
23
|
+
Dockly::Util::Tar.untar(cache, dest)
|
24
|
+
else
|
25
|
+
info "could not find #{s3_object(output_dir)}"
|
26
|
+
end
|
27
|
+
end
|
28
|
+
|
29
|
+
def hash_output
|
30
|
+
ensure_present! :hash_command
|
31
|
+
@hash_output ||= begin
|
32
|
+
status, body = run_command(hash_command)
|
33
|
+
raise "Hash Command `#{hash_command} failed to run" unless status.success?
|
34
|
+
body
|
35
|
+
end
|
36
|
+
end
|
37
|
+
|
38
|
+
def parameter_output(command)
|
39
|
+
raise "Parameter Command tried to run but not found" unless parameter_commands.keys.include?(command)
|
40
|
+
@parameter_commands[command] ||= begin
|
41
|
+
status, body = run_command(command)
|
42
|
+
raise "Parameter Command `#{command} failed to run" unless status.success?
|
43
|
+
body
|
44
|
+
end
|
45
|
+
end
|
46
|
+
|
47
|
+
def run_command(command)
|
48
|
+
resp = ""
|
49
|
+
Bundler.with_clean_env do
|
50
|
+
IO.popen(command) do |io|
|
51
|
+
resp << io.read
|
52
|
+
end
|
53
|
+
end
|
54
|
+
[$?, resp.strip]
|
55
|
+
end
|
56
|
+
end
|
data/lib/dockly/cli.rb
CHANGED
@@ -39,8 +39,47 @@ class Dockly::ListCommand < Dockly::AbstractCommand
|
|
39
39
|
end
|
40
40
|
end
|
41
41
|
|
42
|
+
class Dockly::BuildCacheCommand < Dockly::AbstractCommand
|
43
|
+
parameter 'DOCKER', 'the name of the docker image to build for', :attribute_name => :docker_name
|
44
|
+
option ['-l', '--list'], :flag, 'list the build caches', :default => false, :attribute_name => :list
|
45
|
+
option ['-L', '--local'], :flag, 'use local build caches', :default => false, :attribute_name => :local
|
46
|
+
|
47
|
+
def execute
|
48
|
+
Dockly::BuildCache.model = Dockly::BuildCache::Local
|
49
|
+
super
|
50
|
+
build_caches = Dockly.docker(docker_name.to_sym).build_cache || []
|
51
|
+
raise "No build cache for #{docker_name}" if build_caches.empty?
|
52
|
+
if list?
|
53
|
+
build_caches.each_with_index do |build_cache, index|
|
54
|
+
puts "#{index + 1}. Hash: #{build_cache.hash_command} Build: #{build_cache.build_command}"
|
55
|
+
end
|
56
|
+
else
|
57
|
+
bcs = if local?
|
58
|
+
convert_bc_to_local_bc(docker_name)
|
59
|
+
else
|
60
|
+
build_caches
|
61
|
+
end
|
62
|
+
bcs.each do |bc|
|
63
|
+
bc.execute!
|
64
|
+
end
|
65
|
+
end
|
66
|
+
end
|
67
|
+
end
|
68
|
+
|
42
69
|
class Dockly::Cli < Dockly::AbstractCommand
|
43
70
|
subcommand ['build', 'b'], 'Create package', Dockly::BuildCommand
|
44
71
|
subcommand ['list', 'l'], 'List packages', Dockly::ListCommand
|
72
|
+
subcommand ['build_cache', 'bc'], 'Build Cache commands', Dockly::BuildCacheCommand
|
45
73
|
end
|
46
74
|
|
75
|
+
def convert_bc_to_local_bc(docker_name)
|
76
|
+
lbcs = []
|
77
|
+
Dockly.docker(docker_name.to_sym).build_cache.each do |bc|
|
78
|
+
lbc = Dockly::BuildCache::Local.new! { name bc.name }
|
79
|
+
bc.instance_variables.each do |variable|
|
80
|
+
lbc.instance_variable_set(variable, bc.instance_variable_get(variable))
|
81
|
+
end
|
82
|
+
lbcs << lbc
|
83
|
+
end
|
84
|
+
lbcs
|
85
|
+
end
|
data/lib/dockly/docker.rb
CHANGED
@@ -10,13 +10,15 @@ class Dockly::Docker
|
|
10
10
|
include Dockly::Util::Logger::Mixin
|
11
11
|
|
12
12
|
logger_prefix '[dockly docker]'
|
13
|
+
|
14
|
+
dsl_class_attribute :build_cache, Dockly::BuildCache.model, type: Array
|
15
|
+
|
13
16
|
dsl_attribute :name, :import, :git_archive, :build, :tag, :build_dir, :package_dir,
|
14
|
-
:timeout, :cleanup_images
|
17
|
+
:timeout, :cleanup_images
|
15
18
|
|
16
19
|
default_value :tag, nil
|
17
20
|
default_value :build_dir, 'build/docker'
|
18
21
|
default_value :package_dir, '/opt/docker'
|
19
|
-
default_value :build_caches, []
|
20
22
|
default_value :cleanup_images, false
|
21
23
|
default_value :timeout, 60
|
22
24
|
|
@@ -43,7 +45,7 @@ class Dockly::Docker
|
|
43
45
|
|
44
46
|
def run_build_caches(image)
|
45
47
|
info "starting build caches"
|
46
|
-
|
48
|
+
(build_cache || []).each do |cache|
|
47
49
|
cache.image = image
|
48
50
|
image = cache.execute!
|
49
51
|
end
|
@@ -158,10 +160,6 @@ class Dockly::Docker
|
|
158
160
|
path
|
159
161
|
end
|
160
162
|
|
161
|
-
def build_cache(&block)
|
162
|
-
build_caches << Dockly::BuildCache.new(&block)
|
163
|
-
end
|
164
|
-
|
165
163
|
def repository(value = nil)
|
166
164
|
name(value)
|
167
165
|
end
|
data/lib/dockly/util/tar.rb
CHANGED
@@ -1,3 +1,5 @@
|
|
1
|
+
require 'fileutils'
|
2
|
+
|
1
3
|
module Dockly::Util::Tar
|
2
4
|
extend self
|
3
5
|
|
@@ -24,4 +26,33 @@ module Dockly::Util::Tar
|
|
24
26
|
magic = magic.unpack('H*')[0]
|
25
27
|
magic == "1f8b"
|
26
28
|
end
|
29
|
+
|
30
|
+
# Creates a tar file in memory recursively
|
31
|
+
# from the given path.
|
32
|
+
#
|
33
|
+
# Returns a StringIO whose underlying String
|
34
|
+
# is the contents of the tar file.
|
35
|
+
def tar(path, output)
|
36
|
+
FileUtils.mkdir_p(File.dirname(output))
|
37
|
+
puts "tarring #{path} to #{output}"
|
38
|
+
tar_command = "tar -cf #{output} -C #{File.dirname(path)} #{File.basename(path)}"
|
39
|
+
puts "Tar Command: #{tar_command}"
|
40
|
+
IO.popen(tar_command) do |io|
|
41
|
+
puts io.read
|
42
|
+
end
|
43
|
+
File.open(output, 'rb+')
|
44
|
+
end
|
45
|
+
|
46
|
+
# untars the given IO into the specified
|
47
|
+
# directory
|
48
|
+
def untar(input_io, destination)
|
49
|
+
puts "untarring #{input_io.path} to #{destination}"
|
50
|
+
FileUtils.mkdir_p(destination)
|
51
|
+
untar_command = "tar -xf #{input_io.path} -C #{destination}"
|
52
|
+
puts "Untar command: #{untar_command}"
|
53
|
+
IO.popen(untar_command) do |io|
|
54
|
+
puts io.read
|
55
|
+
end
|
56
|
+
input_io
|
57
|
+
end
|
27
58
|
end
|
data/lib/dockly/version.rb
CHANGED
@@ -0,0 +1,74 @@
|
|
1
|
+
require 'spec_helper'
|
2
|
+
|
3
|
+
describe Dockly::BuildCache::Base do
|
4
|
+
subject { described_class.new(:name => :test_build_cache) }
|
5
|
+
|
6
|
+
before do
|
7
|
+
subject.s3_bucket 'lol'
|
8
|
+
subject.s3_object_prefix 'swag'
|
9
|
+
subject.hash_command 'md5sum /etc/vim/vimrc'
|
10
|
+
subject.build_command 'touch lol'
|
11
|
+
subject.output_dir '/'
|
12
|
+
end
|
13
|
+
|
14
|
+
describe '#up_to_date?' do
|
15
|
+
context 'when the object exists in s3' do
|
16
|
+
before { subject.connection.stub(:head_object) }
|
17
|
+
|
18
|
+
its(:up_to_date?) { should be_true }
|
19
|
+
end
|
20
|
+
|
21
|
+
context 'when the object does not exist in s3' do
|
22
|
+
before do
|
23
|
+
subject.connection.stub(:head_object)
|
24
|
+
.and_raise(Excon::Errors::NotFound.new('help'))
|
25
|
+
end
|
26
|
+
|
27
|
+
its(:up_to_date?) { should be_false }
|
28
|
+
end
|
29
|
+
end
|
30
|
+
|
31
|
+
describe '#pull_from_s3' do
|
32
|
+
let(:file) { subject.pull_from_s3('hey') }
|
33
|
+
let(:object) { double(:object) }
|
34
|
+
|
35
|
+
before do
|
36
|
+
subject.connection.stub(:get_object).and_return object
|
37
|
+
object.stub(:body).and_return 'hey dad'
|
38
|
+
end
|
39
|
+
|
40
|
+
after do
|
41
|
+
path = file.path
|
42
|
+
file.close
|
43
|
+
File.delete(path)
|
44
|
+
end
|
45
|
+
|
46
|
+
it 'returns a File with the data pulled' do
|
47
|
+
file.read.should == 'hey dad'
|
48
|
+
end
|
49
|
+
end
|
50
|
+
|
51
|
+
describe '#s3_object' do
|
52
|
+
before do
|
53
|
+
subject.stub(:s3_object_prefix) { 'lol' }
|
54
|
+
subject.stub(:hash_output) { 'lel' }
|
55
|
+
end
|
56
|
+
|
57
|
+
context "without an arch_output" do
|
58
|
+
it 'returns the s3_prefix merged with the hash_output' do
|
59
|
+
subject.s3_object(subject.hash_output).should == 'lollel'
|
60
|
+
end
|
61
|
+
end
|
62
|
+
|
63
|
+
context "with an arch_output" do
|
64
|
+
before do
|
65
|
+
subject.parameter_command "linux"
|
66
|
+
subject.stub(:parameter_output) { "linux" }
|
67
|
+
end
|
68
|
+
|
69
|
+
it 'returns the s3_prefix merged with the hash_output' do
|
70
|
+
subject.s3_object(subject.hash_output).should == 'lollinux_lel'
|
71
|
+
end
|
72
|
+
end
|
73
|
+
end
|
74
|
+
end
|
@@ -0,0 +1,181 @@
|
|
1
|
+
require 'spec_helper'
|
2
|
+
|
3
|
+
describe Dockly::BuildCache::Docker, :docker do
|
4
|
+
let!(:build_cache) { described_class.new!(:name => :test_build_cache) }
|
5
|
+
let!(:docker) do
|
6
|
+
Dockly::Docker.new!(:name => :test_docker) do
|
7
|
+
git_archive '/app'
|
8
|
+
end
|
9
|
+
end
|
10
|
+
let(:image) { ::Docker::Image.build('from base') }
|
11
|
+
|
12
|
+
before do
|
13
|
+
build_cache.s3_bucket 'lol'
|
14
|
+
build_cache.s3_object_prefix 'swag'
|
15
|
+
build_cache.image = image
|
16
|
+
build_cache.hash_command 'md5sum /etc/vim/vimrc'
|
17
|
+
build_cache.build_command 'touch lol'
|
18
|
+
build_cache.output_dir '/etc/vim'
|
19
|
+
build_cache.base_dir '/'
|
20
|
+
docker.build_cache :test_build_cache
|
21
|
+
end
|
22
|
+
|
23
|
+
describe "#initialize" do
|
24
|
+
context "base_dir is the docker git_archive" do
|
25
|
+
before do
|
26
|
+
build_cache.instance_variable_set(:@base_dir, nil)
|
27
|
+
end
|
28
|
+
|
29
|
+
it "should return the base_directory as the git_archive" do
|
30
|
+
expect(build_cache.base_directory).to eq(docker.git_archive)
|
31
|
+
end
|
32
|
+
end
|
33
|
+
end
|
34
|
+
|
35
|
+
describe '#execute!' do
|
36
|
+
before do
|
37
|
+
build_cache.stub(:up_to_date?).and_return(up_to_date)
|
38
|
+
build_cache.stub(:push_cache)
|
39
|
+
build_cache.stub(:push_to_s3)
|
40
|
+
end
|
41
|
+
|
42
|
+
context 'when the object is up to date' do
|
43
|
+
let(:up_to_date) { true }
|
44
|
+
|
45
|
+
it "does not have the file lol" do
|
46
|
+
i = build_cache.execute!
|
47
|
+
output = ""
|
48
|
+
i.run('ls').attach { |source,chunk| output += chunk }
|
49
|
+
output.should_not include('lol')
|
50
|
+
end
|
51
|
+
end
|
52
|
+
|
53
|
+
context 'when the object is not up to date' do
|
54
|
+
let(:up_to_date) { false }
|
55
|
+
|
56
|
+
before do
|
57
|
+
build_cache.stub(:copy_output_dir) { StringIO.new }
|
58
|
+
end
|
59
|
+
|
60
|
+
it "does have the file lol" do
|
61
|
+
i = build_cache.execute!
|
62
|
+
output = ""
|
63
|
+
i.run('ls /').attach { |source,chunk| output += chunk }
|
64
|
+
output.should include('lol')
|
65
|
+
end
|
66
|
+
end
|
67
|
+
end
|
68
|
+
|
69
|
+
describe "#run_build" do
|
70
|
+
before do
|
71
|
+
build_cache.stub(:push_to_s3)
|
72
|
+
end
|
73
|
+
|
74
|
+
context "when the build succeeds" do
|
75
|
+
it "does have the file lol" do
|
76
|
+
i = build_cache.run_build
|
77
|
+
output = ""
|
78
|
+
i.run('ls').attach { |source,chunk| output += chunk }
|
79
|
+
output.should include('lol')
|
80
|
+
end
|
81
|
+
end
|
82
|
+
|
83
|
+
context "when the build fails" do
|
84
|
+
let!(:image) { build_cache.image }
|
85
|
+
before do
|
86
|
+
build_cache.image = double(:image).stub(:run) {
|
87
|
+
stub(:container, { :wait => { 'StatusCode' => 1 } })
|
88
|
+
}
|
89
|
+
end
|
90
|
+
|
91
|
+
after do
|
92
|
+
build_cache.image = image
|
93
|
+
end
|
94
|
+
|
95
|
+
it "raises an error" do
|
96
|
+
expect { build_cache.run_build }.to raise_error
|
97
|
+
end
|
98
|
+
end
|
99
|
+
end
|
100
|
+
|
101
|
+
describe '#hash_output' do
|
102
|
+
let(:output) {
|
103
|
+
"682aa2a07693cc27756eee9751db3903 /etc/vim/vimrc"
|
104
|
+
}
|
105
|
+
|
106
|
+
context "when hash command returns successfully" do
|
107
|
+
before do
|
108
|
+
build_cache.image = image
|
109
|
+
end
|
110
|
+
|
111
|
+
it 'returns the output of the hash_command in the container' do
|
112
|
+
build_cache.hash_output.should == output
|
113
|
+
end
|
114
|
+
end
|
115
|
+
|
116
|
+
context "when hash command returns failure" do
|
117
|
+
before do
|
118
|
+
build_cache.image = double(:image).stub(:run, {
|
119
|
+
:wait => { 'StatusCode' => 1 }
|
120
|
+
})
|
121
|
+
end
|
122
|
+
|
123
|
+
it 'raises an error' do
|
124
|
+
expect { build_cache.hash_output }.to raise_error
|
125
|
+
end
|
126
|
+
end
|
127
|
+
end
|
128
|
+
|
129
|
+
describe '#copy_output_dir' do
|
130
|
+
let(:container) { Docker::Container.create('Image' => 'base', 'Cmd' => %w[true]) }
|
131
|
+
let(:file) { build_cache.copy_output_dir(container) }
|
132
|
+
let(:hash) { 'this_really_unique_hash' }
|
133
|
+
let(:path) { file.path }
|
134
|
+
|
135
|
+
before do
|
136
|
+
build_cache.stub(:hash_output).and_return(hash)
|
137
|
+
build_cache.output_dir '/root/'; container.wait
|
138
|
+
end
|
139
|
+
after do
|
140
|
+
file.close
|
141
|
+
File.delete(path)
|
142
|
+
end
|
143
|
+
|
144
|
+
it 'returns a File of the specified directory from the Container' do
|
145
|
+
expect(file.path).to include("#{hash}")
|
146
|
+
file.should be_a File
|
147
|
+
file.read.should include('root/.bashrc')
|
148
|
+
end
|
149
|
+
end
|
150
|
+
|
151
|
+
describe '#parameter_output' do
|
152
|
+
before do
|
153
|
+
build_cache.parameter_command command
|
154
|
+
end
|
155
|
+
let(:output) { "3.8.0-27-generic" }
|
156
|
+
|
157
|
+
context "when parameter command returns successfully" do
|
158
|
+
let(:command) { "uname -r" }
|
159
|
+
it 'returns the output of the parameter_command' do
|
160
|
+
expect(build_cache.parameter_output(command)).to eq(output)
|
161
|
+
end
|
162
|
+
end
|
163
|
+
|
164
|
+
context "when parameter command returns failure" do
|
165
|
+
let(:command) { 'md6sum' }
|
166
|
+
|
167
|
+
it 'raises an error' do
|
168
|
+
expect { build_cache.parameter_output(command) }.to raise_error
|
169
|
+
end
|
170
|
+
end
|
171
|
+
|
172
|
+
context "when a parameter command isn't previously added" do
|
173
|
+
let(:command) { "md5sum /etc/vim/vimrc" }
|
174
|
+
|
175
|
+
it 'raises an error' do
|
176
|
+
expect { build_cache.parameter_output("#{command}1") }.to raise_error
|
177
|
+
end
|
178
|
+
end
|
179
|
+
end
|
180
|
+
end
|
181
|
+
|
@@ -0,0 +1,141 @@
|
|
1
|
+
require 'spec_helper'
|
2
|
+
|
3
|
+
describe Dockly::BuildCache::Local do
|
4
|
+
let(:build_cache) { described_class.new!(:name => :test_local_build_cache) }
|
5
|
+
|
6
|
+
before do
|
7
|
+
build_cache.s3_bucket 'fake'
|
8
|
+
build_cache.s3_object_prefix 'object'
|
9
|
+
build_cache.hash_command "md5sum #{File.join(Dir.pwd, 'Gemfile')} | awk '{ print $1 }'"
|
10
|
+
build_cache.build_command 'mkdir -p tmp && touch tmp/lol'
|
11
|
+
build_cache.output_dir 'lib'
|
12
|
+
end
|
13
|
+
|
14
|
+
describe '#execute!' do
|
15
|
+
before do
|
16
|
+
build_cache.stub(:hash_output).and_return('abcdef')
|
17
|
+
build_cache.stub(:up_to_date?).and_return(up_to_date)
|
18
|
+
build_cache.stub(:push_cache)
|
19
|
+
build_cache.stub(:push_to_s3)
|
20
|
+
|
21
|
+
if File.exist?('tmp/lol')
|
22
|
+
File.delete('tmp/lol')
|
23
|
+
end
|
24
|
+
end
|
25
|
+
|
26
|
+
context 'when the object is up to date' do
|
27
|
+
let(:up_to_date) { true }
|
28
|
+
|
29
|
+
it "does not have the file lol" do
|
30
|
+
i = build_cache.execute!
|
31
|
+
output = ""
|
32
|
+
IO.popen('ls tmp') { |io| output += io.read }
|
33
|
+
output.should_not include('lol')
|
34
|
+
end
|
35
|
+
end
|
36
|
+
|
37
|
+
context 'when the object is not up to date' do
|
38
|
+
let(:up_to_date) { false }
|
39
|
+
|
40
|
+
before do
|
41
|
+
build_cache.stub(:copy_output_dir) { StringIO.new }
|
42
|
+
end
|
43
|
+
|
44
|
+
after do
|
45
|
+
if File.exist?('tmp/lol')
|
46
|
+
File.delete('tmp/lol')
|
47
|
+
end
|
48
|
+
end
|
49
|
+
|
50
|
+
it "does have the file lol" do
|
51
|
+
i = build_cache.execute!
|
52
|
+
output = ""
|
53
|
+
IO.popen('ls tmp') { |io| output << io.read }
|
54
|
+
output.should include('lol')
|
55
|
+
end
|
56
|
+
end
|
57
|
+
end
|
58
|
+
|
59
|
+
describe "#run_build" do
|
60
|
+
before do
|
61
|
+
build_cache.stub(:push_to_s3)
|
62
|
+
end
|
63
|
+
|
64
|
+
context "when the build succeeds" do
|
65
|
+
it "does have the file lol" do
|
66
|
+
i = build_cache.run_build
|
67
|
+
output = ""
|
68
|
+
IO.popen('ls tmp') { |io| output << io.read }
|
69
|
+
output.should include('lol')
|
70
|
+
end
|
71
|
+
end
|
72
|
+
|
73
|
+
context "when the build fails" do
|
74
|
+
before do
|
75
|
+
build_cache.build_command 'md6sum'
|
76
|
+
end
|
77
|
+
|
78
|
+
it "raises an error" do
|
79
|
+
expect { build_cache.run_build }.to raise_error
|
80
|
+
end
|
81
|
+
end
|
82
|
+
end
|
83
|
+
|
84
|
+
describe '#hash_output' do
|
85
|
+
let(:output) {
|
86
|
+
"f683463a09482287c33959ab71a87189"
|
87
|
+
}
|
88
|
+
|
89
|
+
context "when hash command returns successfully" do
|
90
|
+
it 'returns the output of the hash_command' do
|
91
|
+
build_cache.hash_output.should == output
|
92
|
+
end
|
93
|
+
end
|
94
|
+
|
95
|
+
context "when hash command returns failure" do
|
96
|
+
before do
|
97
|
+
build_cache.hash_command 'md6sum'
|
98
|
+
end
|
99
|
+
|
100
|
+
it 'raises an error' do
|
101
|
+
expect { build_cache.hash_output }.to raise_error
|
102
|
+
end
|
103
|
+
end
|
104
|
+
end
|
105
|
+
|
106
|
+
describe '#parameter_output' do
|
107
|
+
before do
|
108
|
+
build_cache.parameter_command command
|
109
|
+
end
|
110
|
+
|
111
|
+
let(:output) { "3.8.0-23-generic" }
|
112
|
+
context "when parameter command returns successfully" do
|
113
|
+
let(:command) { "uname -r" }
|
114
|
+
let(:status) { double(:status) }
|
115
|
+
before do
|
116
|
+
status.stub(:"success?") { true }
|
117
|
+
build_cache.stub(:run_command) { [status, output] }
|
118
|
+
end
|
119
|
+
|
120
|
+
it 'returns the output of the parameter_command' do
|
121
|
+
expect(build_cache.parameter_output(command)).to eq(output)
|
122
|
+
end
|
123
|
+
end
|
124
|
+
|
125
|
+
context "when parameter command returns failure" do
|
126
|
+
let(:command) { "md6sum" }
|
127
|
+
|
128
|
+
it 'raises an error' do
|
129
|
+
expect { build_cache.parameter_output(command) }.to raise_error
|
130
|
+
end
|
131
|
+
end
|
132
|
+
|
133
|
+
context "when a parameter command isn't previously added" do
|
134
|
+
let(:command) { "md5sum /etc/vim/vimrc" }
|
135
|
+
|
136
|
+
it 'raises an error' do
|
137
|
+
expect { build_cache.parameter_output("#{command}1") }.to raise_error
|
138
|
+
end
|
139
|
+
end
|
140
|
+
end
|
141
|
+
end
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: dockly
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 1.
|
4
|
+
version: 1.3.1
|
5
5
|
prerelease:
|
6
6
|
platform: ruby
|
7
7
|
authors:
|
@@ -9,7 +9,7 @@ authors:
|
|
9
9
|
autorequire:
|
10
10
|
bindir: bin
|
11
11
|
cert_chain: []
|
12
|
-
date: 2013-11-
|
12
|
+
date: 2013-11-28 00:00:00.000000000 Z
|
13
13
|
dependencies:
|
14
14
|
- !ruby/object:Gem::Dependency
|
15
15
|
name: clamp
|
@@ -34,7 +34,7 @@ dependencies:
|
|
34
34
|
requirements:
|
35
35
|
- - ~>
|
36
36
|
- !ruby/object:Gem::Version
|
37
|
-
version: 1.7.
|
37
|
+
version: 1.7.3
|
38
38
|
type: :runtime
|
39
39
|
prerelease: false
|
40
40
|
version_requirements: !ruby/object:Gem::Requirement
|
@@ -42,7 +42,7 @@ dependencies:
|
|
42
42
|
requirements:
|
43
43
|
- - ~>
|
44
44
|
- !ruby/object:Gem::Version
|
45
|
-
version: 1.7.
|
45
|
+
version: 1.7.3
|
46
46
|
- !ruby/object:Gem::Dependency
|
47
47
|
name: dockly-util
|
48
48
|
requirement: !ruby/object:Gem::Requirement
|
@@ -50,7 +50,7 @@ dependencies:
|
|
50
50
|
requirements:
|
51
51
|
- - ~>
|
52
52
|
- !ruby/object:Gem::Version
|
53
|
-
version: 0.0.
|
53
|
+
version: 0.0.6
|
54
54
|
type: :runtime
|
55
55
|
prerelease: false
|
56
56
|
version_requirements: !ruby/object:Gem::Requirement
|
@@ -58,7 +58,7 @@ dependencies:
|
|
58
58
|
requirements:
|
59
59
|
- - ~>
|
60
60
|
- !ruby/object:Gem::Version
|
61
|
-
version: 0.0.
|
61
|
+
version: 0.0.6
|
62
62
|
- !ruby/object:Gem::Dependency
|
63
63
|
name: excon
|
64
64
|
requirement: !ruby/object:Gem::Requirement
|
@@ -256,9 +256,13 @@ files:
|
|
256
256
|
- Rakefile
|
257
257
|
- bin/dockly
|
258
258
|
- dockly.gemspec
|
259
|
+
- img/dockly.png
|
259
260
|
- lib/dockly.rb
|
260
261
|
- lib/dockly/aws.rb
|
261
262
|
- lib/dockly/build_cache.rb
|
263
|
+
- lib/dockly/build_cache/base.rb
|
264
|
+
- lib/dockly/build_cache/docker.rb
|
265
|
+
- lib/dockly/build_cache/local.rb
|
262
266
|
- lib/dockly/cli.rb
|
263
267
|
- lib/dockly/deb.rb
|
264
268
|
- lib/dockly/docker.rb
|
@@ -270,7 +274,9 @@ files:
|
|
270
274
|
- lib/foreman/cli_fix.rb
|
271
275
|
- lib/foreman/export/base_fix.rb
|
272
276
|
- spec/dockly/aws_spec.rb
|
273
|
-
- spec/dockly/
|
277
|
+
- spec/dockly/build_cache/base_spec.rb
|
278
|
+
- spec/dockly/build_cache/docker_spec.rb
|
279
|
+
- spec/dockly/build_cache/local_spec.rb
|
274
280
|
- spec/dockly/deb_spec.rb
|
275
281
|
- spec/dockly/docker_spec.rb
|
276
282
|
- spec/dockly/foreman_spec.rb
|
@@ -311,7 +317,9 @@ specification_version: 3
|
|
311
317
|
summary: Packaging made easy
|
312
318
|
test_files:
|
313
319
|
- spec/dockly/aws_spec.rb
|
314
|
-
- spec/dockly/
|
320
|
+
- spec/dockly/build_cache/base_spec.rb
|
321
|
+
- spec/dockly/build_cache/docker_spec.rb
|
322
|
+
- spec/dockly/build_cache/local_spec.rb
|
315
323
|
- spec/dockly/deb_spec.rb
|
316
324
|
- spec/dockly/docker_spec.rb
|
317
325
|
- spec/dockly/foreman_spec.rb
|
@@ -1,175 +0,0 @@
|
|
1
|
-
require 'spec_helper'
|
2
|
-
|
3
|
-
describe Dockly::BuildCache, :docker do
|
4
|
-
subject { described_class.new(:name => :test_build_cache) }
|
5
|
-
let(:image) { ::Docker::Image.build('from base') }
|
6
|
-
|
7
|
-
before do
|
8
|
-
subject.s3_bucket 'lol'
|
9
|
-
subject.s3_object_prefix 'swag'
|
10
|
-
subject.image = image
|
11
|
-
subject.hash_command 'md5sum /etc/vim/vimrc'
|
12
|
-
subject.build_command 'touch lol'
|
13
|
-
subject.output_dir '/'
|
14
|
-
end
|
15
|
-
|
16
|
-
describe '#execute!' do
|
17
|
-
before do
|
18
|
-
subject.stub(:up_to_date?).and_return(up_to_date)
|
19
|
-
subject.stub(:push_cache)
|
20
|
-
subject.stub(:push_to_s3)
|
21
|
-
end
|
22
|
-
|
23
|
-
context 'when the object is up to date' do
|
24
|
-
let(:up_to_date) { true }
|
25
|
-
|
26
|
-
it "does not have the file lol" do
|
27
|
-
i = subject.execute!
|
28
|
-
output = ""
|
29
|
-
i.run('ls').attach { |source,chunk| output += chunk }
|
30
|
-
output.should_not include('lol')
|
31
|
-
end
|
32
|
-
end
|
33
|
-
|
34
|
-
context 'when the object is not up to date' do
|
35
|
-
let(:up_to_date) { false }
|
36
|
-
|
37
|
-
it "does have the file lol" do
|
38
|
-
i = subject.execute!
|
39
|
-
output = ""
|
40
|
-
i.run('ls').attach { |source,chunk| output += chunk }
|
41
|
-
output.should include('lol')
|
42
|
-
end
|
43
|
-
end
|
44
|
-
end
|
45
|
-
|
46
|
-
describe "#run_build" do
|
47
|
-
before do
|
48
|
-
subject.stub(:push_to_s3)
|
49
|
-
end
|
50
|
-
|
51
|
-
context "when the build succeeds" do
|
52
|
-
it "does have the file lol" do
|
53
|
-
i = subject.run_build
|
54
|
-
output = ""
|
55
|
-
i.run('ls').attach { |source,chunk| output += chunk }
|
56
|
-
output.should include('lol')
|
57
|
-
end
|
58
|
-
end
|
59
|
-
|
60
|
-
context "when the build fails" do
|
61
|
-
let!(:image) { subject.image }
|
62
|
-
before do
|
63
|
-
subject.image = double(:image).stub(:run) {
|
64
|
-
stub(:container, { :wait => { 'StatusCode' => 1 } })
|
65
|
-
}
|
66
|
-
end
|
67
|
-
|
68
|
-
after do
|
69
|
-
subject.image = image
|
70
|
-
end
|
71
|
-
|
72
|
-
it "raises an error" do
|
73
|
-
expect { subject.run_build }.to raise_error
|
74
|
-
end
|
75
|
-
end
|
76
|
-
end
|
77
|
-
|
78
|
-
describe '#pull_from_s3' do
|
79
|
-
let(:file) { subject.pull_from_s3('hey') }
|
80
|
-
let(:object) { double(:object) }
|
81
|
-
|
82
|
-
before do
|
83
|
-
subject.connection.stub(:get_object).and_return object
|
84
|
-
object.stub(:body).and_return 'hey dad'
|
85
|
-
end
|
86
|
-
|
87
|
-
after do
|
88
|
-
path = file.path
|
89
|
-
file.close
|
90
|
-
File.delete(path)
|
91
|
-
end
|
92
|
-
|
93
|
-
it 'returns a File with the data pulled' do
|
94
|
-
file.read.should == 'hey dad'
|
95
|
-
end
|
96
|
-
end
|
97
|
-
|
98
|
-
describe '#up_to_date?' do
|
99
|
-
context 'when the object exists in s3' do
|
100
|
-
before { subject.connection.stub(:head_object) }
|
101
|
-
|
102
|
-
its(:up_to_date?) { should be_true }
|
103
|
-
end
|
104
|
-
|
105
|
-
context 'when the object does not exist in s3' do
|
106
|
-
before do
|
107
|
-
subject.connection.stub(:head_object)
|
108
|
-
.and_raise(Excon::Errors::NotFound.new('help'))
|
109
|
-
end
|
110
|
-
|
111
|
-
its(:up_to_date?) { should be_false }
|
112
|
-
end
|
113
|
-
end
|
114
|
-
|
115
|
-
describe '#hash_output' do
|
116
|
-
let(:output) {
|
117
|
-
"682aa2a07693cc27756eee9751db3903 /etc/vim/vimrc"
|
118
|
-
}
|
119
|
-
|
120
|
-
context "when hash command returns successfully" do
|
121
|
-
before do
|
122
|
-
subject.image = image
|
123
|
-
end
|
124
|
-
|
125
|
-
it 'returns the output of the hash_command in the container' do
|
126
|
-
subject.hash_output.should == output
|
127
|
-
end
|
128
|
-
end
|
129
|
-
|
130
|
-
context "when hash command returns failure" do
|
131
|
-
before do
|
132
|
-
subject.image = double(:image).stub(:run, {
|
133
|
-
:wait => { 'StatusCode' => 1 }
|
134
|
-
})
|
135
|
-
end
|
136
|
-
|
137
|
-
it 'raises an error' do
|
138
|
-
expect { subject.hash_output }.to raise_error
|
139
|
-
end
|
140
|
-
end
|
141
|
-
end
|
142
|
-
|
143
|
-
describe '#copy_output_dir' do
|
144
|
-
let(:container) { Docker::Container.create('Image' => 'base', 'Cmd' => %w[true]) }
|
145
|
-
let(:file) { subject.copy_output_dir(container) }
|
146
|
-
let(:hash) { 'this_really_unique_hash' }
|
147
|
-
let(:path) { file.path }
|
148
|
-
|
149
|
-
before do
|
150
|
-
subject.stub(:hash_output).and_return(hash)
|
151
|
-
subject.output_dir '/root/'; container.wait
|
152
|
-
end
|
153
|
-
after do
|
154
|
-
file.close
|
155
|
-
File.delete(path)
|
156
|
-
end
|
157
|
-
|
158
|
-
it 'returns a File of the specified directory from the Container' do
|
159
|
-
expect(file.path).to include("#{hash}")
|
160
|
-
file.should be_a File
|
161
|
-
file.read.should include('root/.bashrc')
|
162
|
-
end
|
163
|
-
end
|
164
|
-
|
165
|
-
describe '#s3_object' do
|
166
|
-
before do
|
167
|
-
subject.stub(:s3_object_prefix) { 'lol' }
|
168
|
-
subject.stub(:hash_output) { 'lel' }
|
169
|
-
end
|
170
|
-
|
171
|
-
it 'returns the s3_prefix merged with the hash_output' do
|
172
|
-
subject.s3_object(subject.hash_output).should == 'lollel'
|
173
|
-
end
|
174
|
-
end
|
175
|
-
end
|