dpkg-s3 0.3.1 → 0.4.0

Sign up to get free protection for your applications and to get access to all the features.
@@ -1,161 +1,159 @@
1
- # -*- encoding : utf-8 -*-
2
- require "tempfile"
3
-
4
- class Dpkg::S3::Release
5
- include Dpkg::S3::Utils
6
-
7
- attr_accessor :codename
8
- attr_accessor :origin
9
- attr_accessor :suite
10
- attr_accessor :architectures
11
- attr_accessor :components
12
- attr_accessor :cache_control
13
-
14
- attr_accessor :files
15
- attr_accessor :policy
16
-
17
- def initialize
18
- @origin = nil
19
- @suite = nil
20
- @codename = nil
21
- @architectures = []
22
- @components = []
23
- @cache_control = ""
24
- @files = {}
25
- @policy = :public_read
26
- end
27
-
28
- class << self
29
- def retrieve(codename, origin=nil, suite=nil, cache_control=nil)
30
- if s = Dpkg::S3::Utils.s3_read("dists/#{codename}/Release")
31
- rel = self.parse_release(s)
32
- else
33
- rel = self.new
1
+ # frozen_string_literal: true
2
+
3
+ require 'tempfile'
4
+
5
+ module Dpkg
6
+ module S3
7
+ # Release is resposible of creating/retrieving and rebuilding the debian Release manifest with
8
+ # standard information required when publishing the package to a debian repository
9
+ class Release
10
+ include Dpkg::S3::Utils
11
+
12
+ attr_accessor :codename, :origin, :suite, :architectures, :components, :cache_control, :files, :policy
13
+
14
+ def initialize
15
+ @origin = nil
16
+ @suite = nil
17
+ @codename = nil
18
+ @architectures = []
19
+ @components = []
20
+ @cache_control = ''
21
+ @files = {}
22
+ @policy = :public_read
34
23
  end
35
- rel.codename = codename
36
- rel.origin = origin unless origin.nil?
37
- rel.suite = suite unless suite.nil?
38
- rel.cache_control = cache_control
39
- rel
40
- end
41
-
42
- def parse_release(str)
43
- rel = self.new
44
- rel.parse(str)
45
- rel
46
- end
47
- end
48
24
 
49
- def filename
50
- "dists/#{@codename}/Release"
51
- end
25
+ class << self
26
+ def retrieve(codename, origin = nil, suite = nil, cache_control = nil)
27
+ rel = if (s = Dpkg::S3::Utils.s3_read("dists/#{codename}/Release"))
28
+ parse_release(s)
29
+ else
30
+ new
31
+ end
32
+ rel.codename = codename
33
+ rel.origin = origin unless origin.nil?
34
+ rel.suite = suite unless suite.nil?
35
+ rel.cache_control = cache_control
36
+ rel
37
+ end
52
38
 
53
- def parse(str)
54
- parse = lambda do |field|
55
- value = str[/^#{field}: .*/]
56
- if value.nil?
57
- return nil
58
- else
59
- return value.split(": ",2).last
39
+ def parse_release(str)
40
+ rel = new
41
+ rel.parse(str)
42
+ rel
43
+ end
60
44
  end
61
- end
62
45
 
63
- # grab basic fields
64
- self.codename = parse.call("Codename")
65
- self.origin = parse.call("Origin") || nil
66
- self.suite = parse.call("Suite") || nil
67
- self.architectures = (parse.call("Architectures") || "").split(/\s+/)
68
- self.components = (parse.call("Components") || "").split(/\s+/)
69
-
70
- # find all the hashes
71
- str.scan(/^\s+([^\s]+)\s+(\d+)\s+(.+)$/).each do |(hash,size,name)|
72
- self.files[name] ||= { :size => size.to_i }
73
- case hash.length
74
- when 32
75
- self.files[name][:md5] = hash
76
- when 40
77
- self.files[name][:sha1] = hash
78
- when 64
79
- self.files[name][:sha256] = hash
46
+ def filename
47
+ "dists/#{@codename}/Release"
80
48
  end
81
- end
82
- end
83
49
 
84
- def generate
85
- template("release.erb").result(binding)
86
- end
50
+ def parse(str)
51
+ parse = lambda do |field|
52
+ value = str[/^#{field}: .*/]
53
+ return nil if value.nil?
87
54
 
88
- def write_to_s3
89
- # validate some other files are present
90
- if block_given?
91
- self.validate_others { |f| yield f }
92
- else
93
- self.validate_others
94
- end
55
+ return value.split(': ', 2).last
56
+ end
95
57
 
96
- # generate the Release file
97
- release_tmp = Tempfile.new("Release")
98
- release_tmp.puts self.generate
99
- release_tmp.close
100
- yield self.filename if block_given?
101
- s3_store(release_tmp.path, self.filename, 'text/plain; charset=utf-8', self.cache_control)
102
-
103
- # sign the file, if necessary
104
- if Dpkg::S3::Utils.signing_key
105
- key_param = Dpkg::S3::Utils.signing_key != "" ? "--default-key=#{Dpkg::S3::Utils.signing_key}" : ""
106
- if system("gpg -a #{key_param} --digest-algo SHA256 #{Dpkg::S3::Utils.gpg_options} -s --clearsign #{release_tmp.path}")
107
- local_file = release_tmp.path+".asc"
108
- remote_file = "dists/#{@codename}/InRelease"
109
- yield remote_file if block_given?
110
- raise "Unable to locate InRelease file" unless File.exists?(local_file)
111
- s3_store(local_file, remote_file, 'application/pgp-signature; charset=us-ascii', self.cache_control)
112
- File.unlink(local_file)
113
- else
114
- raise "Signing the InRelease file failed."
58
+ # grab basic fields
59
+ self.codename = parse.call('Codename')
60
+ self.origin = parse.call('Origin') || nil
61
+ self.suite = parse.call('Suite') || nil
62
+ self.architectures = (parse.call('Architectures') || '').split(/\s+/)
63
+ self.components = (parse.call('Components') || '').split(/\s+/)
64
+
65
+ # find all the hashes
66
+ str.scan(/^\s+([^\s]+)\s+(\d+)\s+(.+)$/).each do |(hash, size, name)|
67
+ files[name] ||= { size: size.to_i }
68
+ case hash.length
69
+ when 32
70
+ files[name][:md5] = hash
71
+ when 40
72
+ files[name][:sha1] = hash
73
+ when 64
74
+ files[name][:sha256] = hash
75
+ end
76
+ end
115
77
  end
116
- if system("gpg -a #{key_param} --digest-algo SHA256 #{Dpkg::S3::Utils.gpg_options} -b #{release_tmp.path}")
117
- local_file = release_tmp.path+".asc"
118
- remote_file = self.filename+".gpg"
119
- yield remote_file if block_given?
120
- raise "Unable to locate Release signature file" unless File.exists?(local_file)
121
- s3_store(local_file, remote_file, 'application/pgp-signature; charset=us-ascii', self.cache_control)
122
- File.unlink(local_file)
123
- else
124
- raise "Signing the Release file failed."
78
+
79
+ def generate
80
+ template('release.erb').result(binding)
125
81
  end
126
- else
127
- # remove an existing Release.gpg, if it was there
128
- s3_remove(self.filename+".gpg")
129
- end
130
82
 
131
- release_tmp.unlink
132
- end
83
+ def write_to_s3(&block)
84
+ # validate some other files are present
85
+ if block_given?
86
+ validate_others(&block)
87
+ else
88
+ validate_others
89
+ end
133
90
 
134
- def update_manifest(manifest)
135
- self.components << manifest.component unless self.components.include?(manifest.component)
136
- self.architectures << manifest.architecture unless self.architectures.include?(manifest.architecture)
137
- self.files.merge!(manifest.files)
138
- end
91
+ # generate the Release file
92
+ release_tmp = Tempfile.new('Release')
93
+ release_tmp.puts generate
94
+ release_tmp.close
95
+ yield filename if block_given?
96
+ s3_store(release_tmp.path, filename, 'text/plain; charset=utf-8', cache_control)
97
+
98
+ # sign the file, if necessary
99
+ if Dpkg::S3::Utils.signing_key
100
+ key_param = Dpkg::S3::Utils.signing_key == '' ? '' : "--default-key=#{Dpkg::S3::Utils.signing_key}"
101
+ gpg_clear = "gpg -a #{key_param} --digest-algo SHA256 #{Dpkg::S3::Utils.gpg_options} -s --clearsign #{release_tmp.path}" # rubocop:disable Layout/LineLength
102
+ gpg_sign = "gpg -a #{key_param} --digest-algo SHA256 #{Dpkg::S3::Utils.gpg_options} -b #{release_tmp.path}"
103
+ raise 'Signing the InRelease file failed.' unless system(gpg_clear)
104
+
105
+ local_file = "#{release_tmp.path}.asc"
106
+ remote_file = "dists/#{@codename}/InRelease"
107
+ yield remote_file if block_given?
108
+ raise 'Unable to locate InRelease file' unless File.exist?(local_file)
109
+
110
+ s3_store(local_file, remote_file, 'application/pgp-signature; charset=us-ascii', cache_control)
111
+ File.unlink(local_file)
112
+
113
+ raise 'Signing the Release file failed.' unless system(gpg_sign)
114
+
115
+ local_file = "#{release_tmp.path}.asc"
116
+ remote_file = "#{filename}.gpg"
117
+ yield remote_file if block_given?
118
+ raise 'Unable to locate Release signature file' unless File.exist?(local_file)
119
+
120
+ s3_store(local_file, remote_file, 'application/pgp-signature; charset=us-ascii', cache_control)
121
+ File.unlink(local_file)
122
+ else
123
+ # remove an existing Release.gpg, if it was there
124
+ s3_remove("#{filename}.gpg")
125
+ end
139
126
 
140
- def validate_others
141
- to_apply = []
142
- self.components.each do |comp|
143
- %w(amd64 i386 armhf).each do |arch|
144
- next if self.files.has_key?("#{comp}/binary-#{arch}/Packages")
127
+ release_tmp.unlink
128
+ end
145
129
 
146
- m = Dpkg::S3::Manifest.new
147
- m.codename = self.codename
148
- m.component = comp
149
- m.architecture = arch
150
- if block_given?
151
- m.write_to_s3 { |f| yield f }
152
- else
153
- m.write_to_s3
130
+ def update_manifest(manifest)
131
+ components << manifest.component unless components.include?(manifest.component)
132
+ architectures << manifest.architecture unless architectures.include?(manifest.architecture)
133
+ files.merge!(manifest.files)
134
+ end
135
+
136
+ def validate_others(&block)
137
+ to_apply = []
138
+ components.each do |comp|
139
+ %w[amd64 i386 armhf].each do |arch|
140
+ next if files.key?("#{comp}/binary-#{arch}/Packages")
141
+
142
+ m = Dpkg::S3::Manifest.new
143
+ m.codename = codename
144
+ m.component = comp
145
+ m.architecture = arch
146
+ if block_given?
147
+ m.write_to_s3(&block)
148
+ else
149
+ m.write_to_s3
150
+ end
151
+ to_apply << m
152
+ end
154
153
  end
155
- to_apply << m
154
+
155
+ to_apply.each { |m| update_manifest(m) }
156
156
  end
157
157
  end
158
-
159
- to_apply.each { |m| self.update_manifest(m) }
160
158
  end
161
159
  end
@@ -40,7 +40,7 @@ Origin: <%= attributes[:deb_origin] %>
40
40
  <% end -%>
41
41
  Priority: <%= attributes[:deb_priority] %>
42
42
  Homepage: <%= url or "http://nourlgiven.example.com/" %>
43
- Filename: <%= url_filename(codename) %>
43
+ Filename: <%= url_filename_encoded(codename) %>
44
44
  <% if size -%>
45
45
  Size: <%= size %>
46
46
  <% end -%>
data/lib/dpkg/s3/utils.rb CHANGED
@@ -1,117 +1,115 @@
1
- # -*- encoding : utf-8 -*-
2
- require "base64"
3
- require "digest/md5"
4
- require "erb"
5
- require "tmpdir"
6
-
7
- module Dpkg::S3::Utils
8
- module_function
9
- def s3; @s3 end
10
- def s3= v; @s3 = v end
11
- def bucket; @bucket end
12
- def bucket= v; @bucket = v end
13
- def access_policy; @access_policy end
14
- def access_policy= v; @access_policy = v end
15
- def signing_key; @signing_key end
16
- def signing_key= v; @signing_key = v end
17
- def gpg_options; @gpg_options end
18
- def gpg_options= v; @gpg_options = v end
19
- def prefix; @prefix end
20
- def prefix= v; @prefix = v end
21
- def encryption; @encryption end
22
- def encryption= v; @encryption = v end
23
-
24
- class SafeSystemError < RuntimeError; end
25
- class AlreadyExistsError < RuntimeError; end
26
-
27
- def safesystem(*args)
28
- success = system(*args)
29
- if !success
30
- raise SafeSystemError, "'system(#{args.inspect})' failed with error code: #{$?.exitstatus}"
31
- end
32
- return success
33
- end
34
-
35
- def debianize_op(op)
36
- # Operators in debian packaging are <<, <=, =, >= and >>
37
- # So any operator like < or > must be replaced
38
- {:< => "<<", :> => ">>"}[op.to_sym] or op
39
- end
40
-
41
- def template(path)
42
- template_file = File.join(File.dirname(__FILE__), "templates", path)
43
- template_code = File.read(template_file)
44
- ERB.new(template_code, nil, "-")
45
- end
46
-
47
- def s3_path(path)
48
- File.join(*[Dpkg::S3::Utils.prefix, path].compact)
49
- end
50
-
51
- # from fog, Fog::AWS.escape
52
- def s3_escape(string)
53
- string.gsub(/([^a-zA-Z0-9_.\-~+]+)/) {
54
- "%" + $1.unpack("H2" * $1.bytesize).join("%").upcase
55
- }
56
- end
57
-
58
- def s3_exists?(path)
59
- Dpkg::S3::Utils.s3.head_object(
60
- :bucket => Dpkg::S3::Utils.bucket,
61
- :key => s3_path(path),
62
- )
63
- rescue Aws::S3::Errors::NotFound
64
- false
65
- end
66
-
67
- def s3_read(path)
68
- Dpkg::S3::Utils.s3.get_object(
69
- :bucket => Dpkg::S3::Utils.bucket,
70
- :key => s3_path(path),
71
- )[:body].read
72
- rescue Aws::S3::Errors::NoSuchKey
73
- false
74
- end
75
-
76
- def s3_store(path, filename=nil, content_type='application/x-debian-package', cache_control=nil, fail_if_exists=false)
77
- filename = File.basename(path) unless filename
78
- obj = s3_exists?(filename)
79
-
80
- file_md5 = Digest::MD5.file(path)
81
-
82
- # check if the object already exists
83
- if obj != false
84
- return if (file_md5.to_s == obj[:etag].gsub('"', '') or file_md5.to_s == obj[:metadata]['md5'])
85
- raise AlreadyExistsError, "file #{filename} already exists with different contents" if fail_if_exists
86
- end
87
-
88
- options = {
89
- :bucket => Dpkg::S3::Utils.bucket,
90
- :key => s3_path(filename),
91
- :acl => Dpkg::S3::Utils.access_policy,
92
- :content_type => content_type,
93
- :metadata => { "md5" => file_md5.to_s },
94
- }
95
- if !cache_control.nil?
96
- options[:cache_control] = cache_control
97
- end
98
-
99
- # specify if encryption is required
100
- options[:server_side_encryption] = "AES256" if Dpkg::S3::Utils.encryption
101
-
102
- # upload the file
103
- File.open(path) do |f|
104
- options[:body] = f
105
- Dpkg::S3::Utils.s3.put_object(options)
106
- end
107
- end
108
-
109
- def s3_remove(path)
110
- if s3_exists?(path)
111
- Dpkg::S3::Utils.s3.delete_object(
112
- :bucket =>Dpkg::S3::Utils.bucket,
113
- :key => s3_path(path),
114
- )
1
+ # frozen_string_literal: true
2
+
3
+ require 'base64'
4
+ require 'digest/md5'
5
+ require 'erb'
6
+ require 'tmpdir'
7
+
8
+ # Dpkg is the root module for all storage modules including S3
9
+ module Dpkg
10
+ # S3 storage module resposible of handling packages on S3 including upload, delete
11
+ module S3
12
+ # Utils contains functions will be used in Package and Release modules
13
+ module Utils
14
+ extend self
15
+
16
+ attr_accessor :s3, :bucket, :access_policy, :signing_key, :gpg_options, :prefix, :encryption
17
+
18
+ class SafeSystemError < RuntimeError; end
19
+
20
+ class AlreadyExistsError < RuntimeError; end
21
+
22
+ def safesystem(*args)
23
+ success = system(*args)
24
+ unless success
25
+ raise SafeSystemError,
26
+ "'system(#{args.inspect})' failed with error code: #{$CHILD_STATUS.exitstatus}"
27
+ end
28
+
29
+ success
30
+ end
31
+
32
+ def debianize_op(operator)
33
+ # Operators in debian packaging are <<, <=, =, >= and >>
34
+ # So any operator like < or > must be replaced
35
+ { :< => '<<', :> => '>>' }[operator.to_sym] or operator
36
+ end
37
+
38
+ def template(path)
39
+ template_file = File.join(File.dirname(__FILE__), 'templates', path)
40
+ template_code = File.read(template_file)
41
+ ERB.new(template_code, nil, '-')
42
+ end
43
+
44
+ def s3_path(path)
45
+ File.join(*[Dpkg::S3::Utils.prefix, path].compact)
46
+ end
47
+
48
+ # from fog, Fog::AWS.escape
49
+ def s3_escape(string)
50
+ string.gsub(/([^a-zA-Z0-9_.\-~+]+)/) do
51
+ "%#{Regexp.last_match(1).unpack('H2' * Regexp.last_match(1).bytesize).join('%').upcase}"
52
+ end
53
+ end
54
+
55
+ def s3_exists?(path)
56
+ Dpkg::S3::Utils.s3.head_object(
57
+ bucket: Dpkg::S3::Utils.bucket,
58
+ key: s3_path(path)
59
+ )
60
+ rescue Aws::S3::Errors::NotFound
61
+ false
62
+ end
63
+
64
+ def s3_read(path)
65
+ Dpkg::S3::Utils.s3.get_object(
66
+ bucket: Dpkg::S3::Utils.bucket,
67
+ key: s3_path(path)
68
+ )[:body].read
69
+ rescue Aws::S3::Errors::NoSuchKey
70
+ false
71
+ end
72
+
73
+ def s3_store(path, filename = nil, content_type = 'application/x-debian-package',
74
+ cache_control = nil, fail_if_exists: false)
75
+ filename ||= File.basename(path)
76
+ obj = s3_exists?(filename)
77
+
78
+ file_md5 = Digest::MD5.file(path)
79
+
80
+ # check if the object already exists
81
+ if obj != false
82
+ return if (file_md5.to_s == obj[:etag].gsub('"', '')) || (file_md5.to_s == obj[:metadata]['md5'])
83
+ raise AlreadyExistsError, "file #{filename} already exists with different contents" if fail_if_exists
84
+ end
85
+
86
+ options = {
87
+ bucket: Dpkg::S3::Utils.bucket,
88
+ key: s3_path(filename),
89
+ acl: Dpkg::S3::Utils.access_policy,
90
+ content_type: content_type,
91
+ metadata: { 'md5' => file_md5.to_s }
92
+ }
93
+ options[:cache_control] = cache_control unless cache_control.nil?
94
+
95
+ # specify if encryption is required
96
+ options[:server_side_encryption] = 'AES256' if Dpkg::S3::Utils.encryption
97
+
98
+ # upload the file
99
+ File.open(path) do |f|
100
+ options[:body] = f
101
+ Dpkg::S3::Utils.s3.put_object(options)
102
+ end
103
+ end
104
+
105
+ def s3_remove(path)
106
+ return unless s3_exists?(path)
107
+
108
+ Dpkg::S3::Utils.s3.delete_object(
109
+ bucket: Dpkg::S3::Utils.bucket,
110
+ key: s3_path(path)
111
+ )
112
+ end
115
113
  end
116
114
  end
117
115
  end