dockly 0.0.2

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,131 @@
1
+ require 'tempfile'
2
+
3
+ class Dockly::BuildCache
4
+ include Dockly::Util::DSL
5
+ include Dockly::Util::Logger::Mixin
6
+
7
+ logger_prefix '[dockly build_cache]'
8
+
9
+ attr_accessor :image
10
+ dsl_attribute :s3_bucket, :s3_object_prefix, :hash_command, :output_dir, :build_command,
11
+ :use_latest, :tmp_dir
12
+
13
+ default_value :use_latest, false
14
+ default_value :tmp_dir, '/tmp'
15
+
16
+ def execute!
17
+ ensure_present! :image
18
+ if up_to_date?
19
+ debug "build cache up to date, pulling from s3"
20
+ insert_cache
21
+ else
22
+ insert_latest
23
+ debug "build cache out of date, running build"
24
+ run_build
25
+ end
26
+ debug "finished build cache"
27
+ image
28
+ end
29
+
30
+ def insert_cache
31
+ push_cache(hash_output)
32
+ end
33
+
34
+ def insert_latest
35
+ if use_latest
36
+ debug "attempting to push latest"
37
+ if cache = push_cache("latest")
38
+ debug "pushed latest, removing local file"
39
+ File.delete(cache.path)
40
+ end
41
+ end
42
+ end
43
+
44
+ def run_build
45
+ container = image.run(build_command).tap(&:start)
46
+ cache = copy_output_dir(container)
47
+ debug "pushing #{output_dir} to s3"
48
+ push_to_s3(cache)
49
+ cache.close
50
+ self.image = container.commit
51
+ end
52
+
53
+ def push_cache(version)
54
+ ensure_present! :output_dir
55
+ if cache = pull_from_s3(version)
56
+ debug "inserting to #{output_dir}"
57
+ self.image = image.insert_local(
58
+ 'localPath' => cache.path,
59
+ 'outputPath' => File.dirname(output_dir)
60
+ )
61
+ cache.close
62
+ else
63
+ info "could not find #{s3_object(version)}"
64
+ end
65
+ end
66
+
67
+ def up_to_date?
68
+ ensure_present! :s3_bucket, :s3_object_prefix
69
+ connection.head_object(s3_bucket, s3_object(hash_output))
70
+ true
71
+ rescue Excon::Errors::NotFound
72
+ false
73
+ end
74
+
75
+ def pull_from_s3(version)
76
+ ensure_present! :s3_bucket, :s3_object_prefix
77
+
78
+ file_name = s3_object(version)
79
+ file_path = File.join(tmp_dir,file_name)
80
+
81
+ FileUtils.mkdir_p(File.dirname(file_path))
82
+ unless File.exist?(file_path)
83
+ object = connection.get_object(s3_bucket, file_name)
84
+
85
+ file = File.open(file_path, 'w+b')
86
+ file.write(object.body)
87
+ file.tap(&:rewind)
88
+ else
89
+ File.open(file_path, 'rb')
90
+ end
91
+ rescue Excon::Errors::NotFound
92
+ nil
93
+ end
94
+
95
+ def push_to_s3(file)
96
+ ensure_present! :s3_bucket, :s3_object_prefix
97
+ connection.put_object(s3_bucket, s3_object(hash_output), file.read)
98
+ connection.copy_object(s3_bucket, s3_object(hash_output), s3_bucket, s3_object("latest"))
99
+ end
100
+
101
+ def copy_output_dir(container)
102
+ ensure_present! :output_dir
103
+ file_path = File.join(tmp_dir,s3_object(hash_output))
104
+ FileUtils.mkdir_p(File.dirname(file_path))
105
+ file = File.open(file_path, 'w+b')
106
+ container.wait(3600) # 1 hour max timeout
107
+ container.copy(output_dir) { |chunk| file.write(chunk) }
108
+ file.tap(&:rewind)
109
+ end
110
+
111
+ def hash_output
112
+ ensure_present! :image, :hash_command
113
+ @hash_output ||= begin
114
+ resp = ""
115
+ image.run(hash_command).start.attach { |chunk| resp += chunk }
116
+ resp.strip
117
+ end
118
+ end
119
+
120
+ def file_output(file)
121
+ File.join(File.dirname(output_dir), File.basename(file.path))
122
+ end
123
+
124
+ def s3_object(file)
125
+ "#{s3_object_prefix}#{file}"
126
+ end
127
+
128
+ def connection
129
+ Dockly::AWS.s3
130
+ end
131
+ end
data/lib/dockly/cli.rb ADDED
@@ -0,0 +1,46 @@
1
+ require 'rubygems'
2
+ require 'dockly'
3
+ require 'clamp'
4
+
5
+ class Dockly::AbstractCommand < Clamp::Command
6
+ option ['-F', '--file'], 'FILE', 'dockly file to read', :default => 'dockly.rb', :attribute_name => :file
7
+
8
+ def execute
9
+ if File.exist?(file)
10
+ Dockly.setup(file)
11
+ else
12
+ raise 'Could not find a dockly file!'
13
+ end
14
+ end
15
+ end
16
+
17
+ class Dockly::BuildCommand < Dockly::AbstractCommand
18
+ parameter 'PACKAGE', 'the name to build the package for', :attribute_name => :package_name
19
+ option ['-f', '--force'], :flag, 'force the package build', :default => false, :attribute_name => :force
20
+
21
+ def execute
22
+ super
23
+ if package = Dockly::Deb.instances[package_name.to_sym]
24
+ if force? || !package.exists?
25
+ package.build
26
+ else
27
+ puts "Package already exists!"
28
+ end
29
+ end
30
+ end
31
+ end
32
+
33
+ class Dockly::ListCommand < Dockly::AbstractCommand
34
+ def execute
35
+ super
36
+ Dockly::Deb.instances.each_with_index do |(name, package), index|
37
+ puts "#{index + 1}. #{name}"
38
+ end
39
+ end
40
+ end
41
+
42
+ class Dockly::Cli < Dockly::AbstractCommand
43
+ subcommand ['build', 'b'], 'Create package', Dockly::BuildCommand
44
+ subcommand ['list', 'l'], 'List packages', Dockly::ListCommand
45
+ end
46
+
data/lib/dockly/deb.rb ADDED
@@ -0,0 +1,141 @@
1
+ require 'fpm'
2
+
3
+ class Dockly::Deb
4
+ include Dockly::Util::DSL
5
+ include Dockly::Util::Logger::Mixin
6
+
7
+ logger_prefix '[dockly deb]'
8
+ dsl_attribute :package_name, :version, :release, :arch, :build_dir,
9
+ :pre_install, :post_install, :pre_uninstall, :post_uninstall,
10
+ :s3_bucket, :files
11
+ dsl_class_attribute :docker, Dockly::Docker
12
+ dsl_class_attribute :foreman, Dockly::Foreman
13
+
14
+ default_value :version, '0.0'
15
+ default_value :release, '0'
16
+ default_value :arch, 'x86_64'
17
+ default_value :build_dir, 'build/deb'
18
+ default_value :files, []
19
+
20
+ def file(source, destination)
21
+ @files << { :source => source, :destination => destination }
22
+ end
23
+
24
+ def create_package!
25
+ ensure_present! :build_dir
26
+ FileUtils.mkdir_p(build_dir)
27
+ FileUtils.rm(build_path) if File.exist?(build_path)
28
+ debug "exporting #{package_name} to #{build_path}"
29
+ build_package
30
+ if @deb_package
31
+ @deb_package.output(build_path)
32
+ info "exported #{package_name} to #{build_path}"
33
+ end
34
+ ensure
35
+ @dir_package.cleanup if @dir_package
36
+ @deb_package.cleanup if @deb_package
37
+ end
38
+
39
+ def build
40
+ info "creating package"
41
+ create_package!
42
+ info "uploading to s3"
43
+ upload_to_s3
44
+ end
45
+
46
+ def build_path
47
+ ensure_present! :build_dir
48
+ "#{build_dir}/#{output_filename}"
49
+ end
50
+
51
+ def exists?
52
+ debug "#{name}: checking for package: #{s3_url}"
53
+ Dockly::AWS.s3.head_object(s3_bucket, s3_object_name)
54
+ info "#{name}: found package: #{s3_url}"
55
+ true
56
+ rescue
57
+ info "#{name}: could not find package: " +
58
+ "#{s3_url}"
59
+ false
60
+ end
61
+
62
+ def upload_to_s3
63
+ return if s3_bucket.nil?
64
+ create_package! unless File.exist?(build_path)
65
+ info "uploading package to s3"
66
+ Dockly::AWS.s3.put_bucket(s3_bucket) rescue nil
67
+ Dockly::AWS.s3.put_object(s3_bucket, s3_object_name, File.new(build_path))
68
+ end
69
+
70
+ def s3_url
71
+ "s3://#{s3_bucket}/#{s3_object_name}"
72
+ end
73
+
74
+ def s3_object_name
75
+ "#{package_name}/#{Dockly::Util::Git.git_sha}/#{output_filename}"
76
+ end
77
+
78
+ def output_filename
79
+ "#{package_name}_#{version}.#{release}_#{arch}.deb"
80
+ end
81
+
82
+ private
83
+ def build_package
84
+ ensure_present! :package_name, :version, :release, :arch
85
+
86
+ info "building #{package_name}"
87
+ @dir_package = FPM::Package::Dir.new
88
+ add_docker(@dir_package)
89
+ add_foreman(@dir_package)
90
+ add_files(@dir_package)
91
+
92
+ debug "converting to deb"
93
+ @deb_package = @dir_package.convert(FPM::Package::Deb)
94
+
95
+ @deb_package.scripts[:prein] = pre_install
96
+ @deb_package.scripts[:postin] = post_install
97
+ @deb_package.scripts[:preun] = pre_uninstall
98
+ @deb_package.scripts[:postun] = post_uninstall
99
+
100
+ @deb_package.name = package_name
101
+ @deb_package.version = version
102
+ @deb_package.iteration = release
103
+ @deb_package.architecture = arch
104
+
105
+ info "done building #{package_name}"
106
+ end
107
+
108
+ def add_foreman(package)
109
+ return if foreman.nil?
110
+ info "adding foreman export"
111
+ foreman.create!
112
+ package.attributes[:prefix] = foreman.init_dir
113
+ Dir.chdir(foreman.build_dir) do
114
+ package.input('.')
115
+ end
116
+ package.attributes[:prefix] = nil
117
+ end
118
+
119
+ def add_docker(package)
120
+ return if docker.nil?
121
+ info "adding docker image"
122
+ docker.generate!
123
+ package.attributes[:prefix] = docker.package_dir
124
+ Dir.chdir(File.dirname(docker.tar_path)) do
125
+ package.input(File.basename(docker.tar_path))
126
+ end
127
+ package.attributes[:prefix] = nil
128
+ end
129
+
130
+ def add_files(package)
131
+ return if files.empty?
132
+ info "adding files to package"
133
+ files.each do |file|
134
+ package.attributes[:prefix] = file[:destination]
135
+ Dir.chdir(File.dirname(file[:source])) do
136
+ package.input(File.basename(file[:source]))
137
+ end
138
+ package.attributes[:prefix] = nil
139
+ end
140
+ end
141
+ end
@@ -0,0 +1,169 @@
1
+ require 'docker'
2
+ require 'excon'
3
+ require 'tempfile'
4
+ require 'zlib'
5
+ require 'rubygems/package'
6
+ require 'fileutils'
7
+
8
+ class Dockly::Docker
9
+ include Dockly::Util::DSL
10
+ include Dockly::Util::Logger::Mixin
11
+
12
+ logger_prefix '[dockly docker]'
13
+ dsl_attribute :import, :git_archive, :build, :repo, :tag, :build_dir, :package_dir,
14
+ :timeout, :cleanup_images, :build_caches
15
+
16
+ default_value :repo, 'dockly'
17
+ default_value :build_dir, 'build/docker'
18
+ default_value :package_dir, '/opt/docker'
19
+ default_value :build_caches, []
20
+ default_value :cleanup_images, false
21
+ default_value :timeout, 60
22
+
23
+ def generate!
24
+ Docker.options = { :read_timeout => timeout, :write_timeout => timeout }
25
+ docker_tar = File.absolute_path(ensure_tar(fetch_import))
26
+
27
+ import = import_base(docker_tar)
28
+
29
+ cleanup = add_git_archive(import)
30
+ cleanup = run_build_caches(cleanup)
31
+ cleanup = build_image(cleanup)
32
+
33
+ export_image(cleanup)
34
+
35
+ true
36
+ ensure
37
+ cleanup.remove if cleanup_images && !cleanup.nil?
38
+ end
39
+
40
+ def export_filename
41
+ "#{repo}-#{tag}-image.tgz"
42
+ end
43
+
44
+ def run_build_caches(image)
45
+ info "starting build caches"
46
+ build_caches.each do |cache|
47
+ cache.image = image
48
+ image = cache.execute!
49
+ end
50
+ info "finished build caches"
51
+ image
52
+ end
53
+
54
+ def tar_path
55
+ File.join(build_dir, export_filename)
56
+ end
57
+
58
+ def ensure_tar(file_name)
59
+ if Dockly::Util::Tar.is_tar?(file_name)
60
+ file_name
61
+ elsif Dockly::Util::Tar.is_gzip?(file_name)
62
+ file_name
63
+ else
64
+ raise "Expected a (possibly gzipped) tar: #{file_name}"
65
+ end
66
+ end
67
+
68
+ def make_git_archive
69
+ ensure_present! :tag, :git_archive
70
+ info "initializing"
71
+
72
+ prefix = git_archive
73
+ prefix += '/' unless prefix.end_with?('/')
74
+
75
+ FileUtils.rm_rf(git_archive_dir)
76
+ FileUtils.mkdir_p(git_archive_dir)
77
+ info "archiving #{Dockly::Util::Git.git_sha}"
78
+ Grit::Git.with_timeout(120) do
79
+ Dockly::Util::Git.git_repo.archive_to_file(Dockly::Util::Git.git_sha, prefix, git_archive_path, 'tar', 'cat')
80
+ end
81
+ git_archive_path
82
+ end
83
+
84
+ def git_archive_dir
85
+ @git_archive_dir ||= File.join(build_dir, "gitarc")
86
+ end
87
+
88
+ def git_archive_path
89
+ "#{git_archive_dir}/#{name}.tar"
90
+ end
91
+
92
+ def git_archive_tar
93
+ git_archive && File.absolute_path(make_git_archive)
94
+ end
95
+
96
+ def import_base(docker_tar)
97
+ info "importing the docker image from #{docker_tar}"
98
+ image = ::Docker::Image.import(docker_tar)
99
+ info "imported docker image: #{image.id}"
100
+ image
101
+ end
102
+
103
+ def add_git_archive(image)
104
+ return image if git_archive.nil?
105
+
106
+ image.insert_local(
107
+ 'localPath' => git_archive_tar,
108
+ 'outputPath' => '/'
109
+ )
110
+ end
111
+
112
+ def build_image(image)
113
+ ensure_present! :repo, :tag, :build
114
+ info "starting build from #{image.id}"
115
+ out_image = ::Docker::Image.build("from #{image.id}\n#{build}")
116
+ info "built the image: #{out_image.id}"
117
+ out_image.tag(:repo => repo, :tag => tag)
118
+ out_image
119
+ end
120
+
121
+ def export_image(image)
122
+ ensure_present! :repo, :tag, :build_dir
123
+ container = ::Docker::Container.create('Image' => image.id, 'Cmd' => %w[true])
124
+ info "created the container: #{container.id}"
125
+ Zlib::GzipWriter.open(tar_path) do |file|
126
+ container.export do |chunk, remaining, total|
127
+ file.write(chunk)
128
+ end
129
+ end
130
+ info "done writing the docker tar: #{export_filename}"
131
+ end
132
+
133
+ def fetch_import
134
+ ensure_present! :tag, :import
135
+ path = "/tmp/dockly-docker-import.#{name}.#{File.basename(import)}"
136
+
137
+ if File.exist?(path)
138
+ debug "already fetched #{import}"
139
+ else
140
+ debug "fetching #{import}"
141
+ File.open("#{path}.tmp", 'wb') do |file|
142
+ case import
143
+ when /^s3:\/\/(?<bucket_name>.+?)\/(?<object_path>.+)$/
144
+ connection.get_object(Regexp.last_match[:bucket_name],
145
+ Regexp.last_match[:object_path]) do |chunk, remaining, total|
146
+ file.write(chunk)
147
+ end
148
+ when /^https?:\/\//
149
+ Excon.get(import, :response_block => lambda { |chunk, remaining, total|
150
+ file.write(chunk)
151
+ })
152
+ else
153
+ raise "You can only import from S3 or a public url"
154
+ end
155
+ end
156
+ FileUtils.mv("#{path}.tmp", path, :force => true)
157
+ end
158
+ path
159
+ end
160
+
161
+ def build_cache(&block)
162
+ build_caches << Dockly::BuildCache.new(&block)
163
+ end
164
+
165
+ private
166
+ def connection
167
+ Dockly::AWS.s3
168
+ end
169
+ end