dpkg-s3 0.3.1 → 0.4.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: b810c961735475ddaa1c7c796f14c92f2625995082bd2800a6bd25ae36e9b112
4
- data.tar.gz: a20b8c3ca88bce3c73e46c5923da96a0baf75252b872cb350dc0c3573d624a7f
3
+ metadata.gz: 9387fafa37c357aa1ceafee3b3e1cccee3f12ff7d6d503bf5e07ac5d141b7f11
4
+ data.tar.gz: c80b9551e201f18fda1c0b466f233cb6912bd4ebadc4e829fd706a167cf26f23
5
5
  SHA512:
6
- metadata.gz: 846afd19f584a6476ce9f6a8fa6ec35019cfad11d761ebc0eed2e229cc1cdf70bd9b47ec3fae0961965e0fb43f22eef3d2cbcd3899a239d25af18c9887c07122
7
- data.tar.gz: 57e25b91f94625e813d6b473d7173130ed34834344bcb7f661215e245ed4392078a7a220debdd0d406ec4f1ebaf7750bc855cb3a9a249f9beec20525bccf0a1b
6
+ metadata.gz: df6087bebb91c0ae7237264df676cb86486e6a1d57d7c041659a5bdde57d78ae7bb2018bab9b2a8ec71f1f00ca4bc28aa96c52ae472563967b3b77593ff83539
7
+ data.tar.gz: 1165f3a4b48fb249a2d4e766eea5b5445e8ad00344c45fef98d322ccb705f1d40fb13782399e0e128615dbffe99aef353cd472e0a2f58e56143e141505e85221
data/README.md CHANGED
@@ -1,7 +1,7 @@
1
1
  # dpkg-s3
2
2
 
3
3
  <p align="left">
4
- <a href="https://github.com/gamunu/dpkg-s3/actions"><img alt="test status" src="https://github.com/gamunu/dpkg-s3/workflows/Ruby/badge.svg"></a>
4
+ <a href="https://github.com/gamunu/dpkg-s3/actions"><img alt="test status" src="https://github.com/gamunu/dpkg-s3/workflows/ruby/badge.svg"></a>
5
5
  </p>
6
6
 
7
7
  `dpkg-s3` is a simple utility to make creating and managing APT repositories on
data/bin/dpkg-s3 CHANGED
@@ -1,13 +1,28 @@
1
1
  #!/usr/bin/env ruby
2
+ # frozen_string_literal: true
2
3
 
3
4
  require 'pathname'
4
- $:.unshift File.join(Pathname.new(__FILE__).realpath,'../../lib')
5
+ $LOAD_PATH.unshift File.join(Pathname.new(__FILE__).realpath, '../../lib')
5
6
 
6
7
  require 'rubygems'
7
8
  require 'dpkg/s3/cli'
8
9
 
10
+ # String monkey patch class to add red and gray colors
11
+ class String
12
+ def red
13
+ "\e[31m#{self}\e[0m"
14
+ end
15
+
16
+ def gray
17
+ "\e[37m#{self}\e[0m"
18
+ end
19
+ end
20
+
9
21
  begin
10
22
  Dpkg::S3::CLI.start
11
- rescue
12
- $stderr.puts "\n\tError during processing: #{$!.message}\n\n"
13
- end
23
+ rescue StandardError => e
24
+ warn "\n\tError during processing: #{e.message}\n\n".red
25
+ warn "\n\tDebug: #{e.backtrace}\n\n".gray
26
+ rescue Interrupt
27
+ warn "\nOperation canceled. Please verify repository for broken packages".red
28
+ end
data/lib/dpkg/s3.rb CHANGED
@@ -1,6 +1,7 @@
1
- # -*- encoding : utf-8 -*-
1
+ # frozen_string_literal: true
2
+
2
3
  module Dpkg
3
4
  module S3
4
- VERSION = "0.3.1"
5
+ VERSION = '0.4.0'
5
6
  end
6
7
  end
data/lib/dpkg/s3/cli.rb CHANGED
@@ -1,641 +1,639 @@
1
- # -*- encoding : utf-8 -*-
2
- require "aws-sdk-s3"
3
- require "thor"
4
- require "dpkg/s3"
5
- require "dpkg/s3/utils"
6
- require "dpkg/s3/manifest"
7
- require "dpkg/s3/package"
8
- require "dpkg/s3/release"
9
- require "dpkg/s3/lock"
10
-
11
- class Dpkg::S3::CLI < Thor
12
- class_option :bucket,
13
- :type => :string,
14
- :aliases => "-b",
15
- :desc => "The name of the S3 bucket to upload to."
16
-
17
- class_option :prefix,
18
- :type => :string,
19
- :desc => "The path prefix to use when storing on S3."
20
-
21
- class_option :origin,
22
- :type => :string,
23
- :aliases => "-o",
24
- :desc => "The origin to use in the repository Release file."
25
-
26
- class_option :suite,
27
- :type => :string,
28
- :desc => "The suite to use in the repository Release file."
29
-
30
- class_option :codename,
31
- :default => "stable",
32
- :type => :string,
33
- :aliases => "-c",
34
- :desc => "The codename of the APT repository."
35
-
36
- class_option :component,
37
- :default => "main",
38
- :type => :string,
39
- :aliases => "-m",
40
- :desc => "The component of the APT repository."
41
-
42
- class_option :section,
43
- :type => :string,
44
- :aliases => "-s",
45
- :hide => true
46
-
47
- class_option :access_key_id,
48
- :type => :string,
49
- :desc => "The access key for connecting to S3."
50
-
51
- class_option :secret_access_key,
52
- :type => :string,
53
- :desc => "The secret key for connecting to S3."
54
-
55
- class_option :session_token,
56
- :type => :string,
57
- :desc => "The (optional) session token for connecting to S3."
58
-
59
- class_option :endpoint,
60
- :type => :string,
61
- :desc => "The URL endpoint to the S3 API."
62
-
63
- class_option :s3_region,
64
- :type => :string,
65
- :desc => "The region for connecting to S3.",
66
- :default => ENV["AWS_DEFAULT_REGION"] || "us-east-1"
67
-
68
- class_option :force_path_style,
69
- :default => false,
70
- :type => :boolean,
71
- :desc => "Use S3 path style instead of subdomains."
72
-
73
- class_option :proxy_uri,
74
- :type => :string,
75
- :desc => "The URI of the proxy to send service requests through."
76
-
77
- class_option :visibility,
78
- :default => "public",
79
- :type => :string,
80
- :aliases => "-v",
81
- :desc => "The access policy for the uploaded files. " +
82
- "Can be public, private, or authenticated."
83
-
84
- class_option :sign,
85
- :type => :string,
86
- :desc => "GPG Sign the Release file when uploading a package, " +
87
- "or when verifying it after removing a package. " +
88
- "Use --sign with your GPG key ID to use a specific key (--sign=6643C242C18FE05B)."
89
-
90
- class_option :gpg_options,
91
- :default => "",
92
- :type => :string,
93
- :desc => "Additional command line options to pass to GPG when signing."
94
-
95
- class_option :encryption,
96
- :default => false,
97
- :type => :boolean,
98
- :aliases => "-e",
99
- :desc => "Use S3 server side encryption."
100
-
101
- class_option :quiet,
102
- :type => :boolean,
103
- :aliases => "-q",
104
- :desc => "Doesn't output information, just returns status appropriately."
105
-
106
- class_option :cache_control,
107
- :type => :string,
108
- :aliases => "-C",
109
- :desc => "Add cache-control headers to S3 objects."
110
-
111
- desc "upload FILES",
112
- "Uploads the given files to a S3 bucket as an APT repository."
113
-
114
- option :arch,
115
- :type => :string,
116
- :aliases => "-a",
117
- :desc => "The architecture of the package in the APT repository."
118
-
119
- option :preserve_versions,
120
- :default => false,
121
- :type => :boolean,
122
- :aliases => "-p",
123
- :desc => "Whether to preserve other versions of a package " +
124
- "in the repository when uploading one."
125
-
126
- option :lock,
127
- :default => false,
128
- :type => :boolean,
129
- :aliases => "-l",
130
- :desc => "Whether to check for an existing lock on the repository " +
131
- "to prevent simultaneous updates "
132
-
133
- option :fail_if_exists,
134
- :default => false,
135
- :type => :boolean,
136
- :desc => "Whether to overwrite any existing package that has the same " +
137
- "filename in the pool or the same name and version in the manifest but " +
138
- "different contents."
139
-
140
- option :skip_package_upload,
141
- :default => false,
142
- :type => :boolean,
143
- :desc => "Whether to skip all package uploads." +
144
- "This is useful when hosting .deb files outside of the bucket."
145
-
146
- def self.exit_on_failure?
147
- true
148
- end
149
-
150
- def upload(*files)
151
- if files.nil? || files.empty?
152
- error("You must specify at least one file to upload")
153
- end
1
+ # frozen_string_literal: true
2
+
3
+ require 'aws-sdk-s3'
4
+ require 'thor'
5
+ require 'dpkg/s3'
6
+ require 'dpkg/s3/utils'
7
+ require 'dpkg/s3/manifest'
8
+ require 'dpkg/s3/package'
9
+ require 'dpkg/s3/release'
10
+ require 'dpkg/s3/lock'
11
+
12
+ module Dpkg
13
+ module S3
14
+ # CLI interface for dpkg-s3
15
+ class CLI < Thor
16
+ class_option :bucket,
17
+ type: :string,
18
+ aliases: '-b',
19
+ desc: 'The name of the S3 bucket to upload to.'
20
+
21
+ class_option :prefix,
22
+ type: :string,
23
+ desc: 'The path prefix to use when storing on S3.'
24
+
25
+ class_option :origin,
26
+ type: :string,
27
+ aliases: '-o',
28
+ desc: 'The origin to use in the repository Release file.'
29
+
30
+ class_option :suite,
31
+ type: :string,
32
+ desc: 'The suite to use in the repository Release file.'
33
+
34
+ class_option :codename,
35
+ default: 'stable',
36
+ type: :string,
37
+ aliases: '-c',
38
+ desc: 'The codename of the APT repository.'
39
+
40
+ class_option :component,
41
+ default: 'main',
42
+ type: :string,
43
+ aliases: '-m',
44
+ desc: 'The component of the APT repository.'
45
+
46
+ class_option :section,
47
+ type: :string,
48
+ aliases: '-s',
49
+ hide: true
50
+
51
+ class_option :access_key_id,
52
+ type: :string,
53
+ desc: 'The access key for connecting to S3.'
54
+
55
+ class_option :secret_access_key,
56
+ type: :string,
57
+ desc: 'The secret key for connecting to S3.'
58
+
59
+ class_option :session_token,
60
+ type: :string,
61
+ desc: 'The (optional) session token for connecting to S3.'
62
+
63
+ class_option :endpoint,
64
+ type: :string,
65
+ desc: 'The URL endpoint to the S3 API.'
66
+
67
+ class_option :s3_region,
68
+ type: :string,
69
+ desc: 'The region for connecting to S3.',
70
+ default: ENV['AWS_DEFAULT_REGION'] || 'us-east-1'
71
+
72
+ class_option :force_path_style,
73
+ default: false,
74
+ type: :boolean,
75
+ desc: 'Use S3 path style instead of subdomains.'
76
+
77
+ class_option :proxy_uri,
78
+ type: :string,
79
+ desc: 'The URI of the proxy to send service requests through.'
80
+
81
+ class_option :visibility,
82
+ default: 'public',
83
+ type: :string,
84
+ aliases: '-v',
85
+ desc: 'The access policy for the uploaded files. ' \
86
+ 'Can be public, private, or authenticated.'
87
+
88
+ class_option :sign,
89
+ type: :string,
90
+ desc: 'GPG Sign the Release file when uploading a package, ' \
91
+ 'or when verifying it after removing a package. ' \
92
+ 'Use --sign with your GPG key ID to use a specific key (--sign=6643C242C18FE05B).'
93
+
94
+ class_option :gpg_options,
95
+ default: '',
96
+ type: :string,
97
+ desc: 'Additional command line options to pass to GPG when signing.'
98
+
99
+ class_option :encryption,
100
+ default: false,
101
+ type: :boolean,
102
+ aliases: '-e',
103
+ desc: 'Use S3 server side encryption.'
104
+
105
+ class_option :quiet,
106
+ type: :boolean,
107
+ aliases: '-q',
108
+ desc: "Doesn't output information, just returns status appropriately."
109
+
110
+ class_option :cache_control,
111
+ type: :string,
112
+ aliases: '-C',
113
+ desc: 'Add cache-control headers to S3 objects.'
114
+
115
+ desc 'upload FILES',
116
+ 'Uploads the given files to a S3 bucket as an APT repository.'
117
+
118
+ option :arch,
119
+ type: :string,
120
+ aliases: '-a',
121
+ desc: 'The architecture of the package in the APT repository.'
122
+
123
+ option :preserve_versions,
124
+ default: false,
125
+ type: :boolean,
126
+ aliases: '-p',
127
+ desc: 'Whether to preserve other versions of a package ' \
128
+ 'in the repository when uploading one.'
129
+
130
+ option :lock,
131
+ default: false,
132
+ type: :boolean,
133
+ aliases: '-l',
134
+ desc: 'Whether to check for an existing lock on the repository ' \
135
+ 'to prevent simultaneous updates '
136
+
137
+ option :fail_if_exists,
138
+ default: false,
139
+ type: :boolean,
140
+ desc: 'Whether to overwrite any existing package that has the same ' \
141
+ 'filename in the pool or the same name and version in the manifest but ' \
142
+ 'different contents.'
143
+
144
+ option :skip_package_upload,
145
+ default: false,
146
+ type: :boolean,
147
+ desc: 'Whether to skip all package uploads.' \
148
+ 'This is useful when hosting .deb files outside of the bucket.'
149
+
150
+ def self.exit_on_failure?
151
+ true
152
+ end
154
153
 
155
- # make sure all the files exists
156
- if missing_file = files.find { |pattern| Dir.glob(pattern).empty? }
157
- error("File '#{missing_file}' doesn't exist")
158
- end
154
+ def upload(*files)
155
+ error('You must specify at least one file to upload') if files.nil? || files.empty?
159
156
 
160
- # configure AWS::S3
161
- configure_s3_client
162
-
163
- begin
164
- if options[:lock]
165
- log("Checking for existing lock file")
166
- if Dpkg::S3::Lock.locked?(options[:codename], component, options[:arch], options[:cache_control])
167
- lock = Dpkg::S3::Lock.current(options[:codename], component, options[:arch], options[:cache_control])
168
- log("Repository is locked by another user: #{lock.user} at host #{lock.host}")
169
- log("Attempting to obtain a lock")
170
- Dpkg::S3::Lock.wait_for_lock(options[:codename], component, options[:arch], options[:cache_control])
157
+ # make sure all the files exists
158
+ if (missing_file = files.find { |pattern| Dir.glob(pattern).empty? })
159
+ error("File '#{missing_file}' doesn't exist")
171
160
  end
172
- log("Locking repository for updates")
173
- Dpkg::S3::Lock.lock(options[:codename], component, options[:arch], options[:cache_control])
174
- @lock_acquired = true
175
- end
176
161
 
177
- # retrieve the existing manifests
178
- log("Retrieving existing manifests")
179
- release = Dpkg::S3::Release.retrieve(options[:codename], options[:origin], options[:suite], options[:cache_control])
180
- manifests = {}
181
- release.architectures.each do |arch|
182
- manifests[arch] = Dpkg::S3::Manifest.retrieve(options[:codename], component, arch, options[:cache_control], options[:fail_if_exists], options[:skip_package_upload])
183
- end
162
+ # configure AWS::S3
163
+ configure_s3_client
184
164
 
185
- packages_arch_all = []
165
+ begin
166
+ if options[:lock]
167
+ log('Checking for existing lock file')
168
+ if Dpkg::S3::Lock.locked?(options[:codename], component, options[:arch], options[:cache_control])
169
+ lock = Dpkg::S3::Lock.current(options[:codename], component, options[:arch], options[:cache_control])
170
+ log("Repository is locked by another user: #{lock.user} at host #{lock.host}")
171
+ log('Attempting to obtain a lock')
172
+ Dpkg::S3::Lock.wait_for_lock(options[:codename], component, options[:arch], options[:cache_control])
173
+ end
174
+ log('Locking repository for updates')
175
+ Dpkg::S3::Lock.lock(options[:codename], component, options[:arch], options[:cache_control])
176
+ @lock_acquired = true
177
+ end
186
178
 
187
- # examine all the files
188
- files.collect { |f| Dir.glob(f) }.flatten.each do |file|
189
- log("Examining package file #{File.basename(file)}")
190
- pkg = Dpkg::S3::Package.parse_file(file)
179
+ # retrieve the existing manifests
180
+ log('Retrieving existing manifests')
181
+ release = Dpkg::S3::Release.retrieve(options[:codename], options[:origin], options[:suite],
182
+ options[:cache_control])
183
+ manifests = {}
184
+ release.architectures.each do |arch|
185
+ manifests[arch] =
186
+ Dpkg::S3::Manifest.retrieve(options[:codename], component, arch, options[:cache_control],
187
+ options[:fail_if_exists], skip_upload: options[:skip_package_upload])
188
+ end
191
189
 
192
- # copy over some options if they weren't given
193
- arch = options[:arch] || pkg.architecture
190
+ packages_arch_all = []
194
191
 
195
- # If they've specified an arch type that doesn't match the package let them know
196
- if options.key?("arch") && options[:arch] != pkg.architecture
197
- warn("You specified architecture #{options[:arch]} but package #{pkg.name} has architecture type of #{pkg.architecture}")
198
- end
192
+ # examine all the files
193
+ files.collect { |f| Dir.glob(f) }.flatten.each do |file|
194
+ log("Examining package file #{File.basename(file)}")
195
+ pkg = Dpkg::S3::Package.parse_file(file)
199
196
 
200
- # validate we have them
201
- error("No architcture given and unable to determine one for #{file}. " +
202
- "Please specify one with --arch [i386|amd64|armhf].") unless arch
203
-
204
- # If the arch is all and the list of existing manifests is none, then
205
- # throw an error. This is mainly the case when initializing a brand new
206
- # repository. With "all", we won't know which architectures they're using.
207
- if arch == "all" && manifests.count == 0
208
- manifests['amd64'] = Dpkg::S3::Manifest.retrieve(options[:codename], component,'amd64', options[:cache_control], options[:fail_if_exists], options[:skip_package_upload])
209
- manifests['i386'] = Dpkg::S3::Manifest.retrieve(options[:codename], component,'i386', options[:cache_control], options[:fail_if_exists], options[:skip_package_upload])
210
- manifests['armhf'] = Dpkg::S3::Manifest.retrieve(options[:codename], component,'armhf', options[:cache_control], options[:fail_if_exists], options[:skip_package_upload])
211
-
212
- # error("Package #{File.basename(file)} had architecture \"all\", " +
213
- # "however noexisting package lists exist. This can often happen " +
214
- # "if the first package you are add to a new repository is an " +
215
- # "\"all\" architecture file. Please use --arch [i386|amd64|armhf] or " +
216
- # "another platform type to upload the file.")
217
- end
197
+ # copy over some options if they weren't given
198
+ arch = options[:arch] || pkg.architecture
218
199
 
219
- # retrieve the manifest for the arch if we don't have it already
220
- manifests[arch] ||= Dpkg::S3::Manifest.retrieve(options[:codename], component, arch, options[:cache_control], options[:fail_if_exists], options[:skip_package_upload])
200
+ # If they've specified an arch type that doesn't match the package let them know
201
+ if options.key?('arch') && options[:arch] != pkg.architecture
202
+ warn("You specified architecture #{options[:arch]} but package #{pkg.name} has architecture
203
+ type of #{pkg.architecture}")
204
+ end
221
205
 
222
- # add package in manifests
223
- begin
224
- manifests[arch].add(pkg, options[:preserve_versions])
225
- rescue Dpkg::S3::Utils::AlreadyExistsError => e
226
- error("Preparing manifest failed because: #{e}")
227
- end
206
+ # validate we have them
207
+ unless arch
208
+ error("No architcture given and unable to determine one for #{file}. " \
209
+ 'Please specify one with --arch [i386|amd64|armhf].')
210
+ end
228
211
 
229
- # If arch is all, we must add this package in all arch available
230
- if arch == 'all'
231
- packages_arch_all << pkg
232
- end
233
- end
212
+ # If the arch is all and the list of existing manifests is none, then
213
+ # throw an error. This is mainly the case when initializing a brand new
214
+ # repository. With "all", we won't know which architectures they're using.
215
+ if arch == 'all' && manifests.count.zero?
216
+ manifests['amd64'] =
217
+ Dpkg::S3::Manifest.retrieve(options[:codename], component, 'amd64', options[:cache_control],
218
+ options[:fail_if_exists], skip_upload: options[:skip_package_upload])
219
+ manifests['i386'] =
220
+ Dpkg::S3::Manifest.retrieve(options[:codename], component, 'i386', options[:cache_control],
221
+ options[:fail_if_exists], skip_upload: options[:skip_package_upload])
222
+ manifests['armhf'] =
223
+ Dpkg::S3::Manifest.retrieve(options[:codename], component, 'armhf', options[:cache_control],
224
+ options[:fail_if_exists], skip_upload: options[:skip_package_upload])
225
+
226
+ # error("Package #{File.basename(file)} had architecture \"all\", " +
227
+ # "however noexisting package lists exist. This can often happen " +
228
+ # "if the first package you are add to a new repository is an " +
229
+ # "\"all\" architecture file. Please use --arch [i386|amd64|armhf] or " +
230
+ # "another platform type to upload the file.")
231
+ end
234
232
 
235
- manifests.each do |arch, manifest|
236
- next if arch == 'all'
237
- packages_arch_all.each do |pkg|
238
- begin
239
- manifest.add(pkg, options[:preserve_versions], false)
240
- rescue Dpkg::S3::Utils::AlreadyExistsError => e
241
- error("Preparing manifest failed because: #{e}")
233
+ # retrieve the manifest for the arch if we don't have it already
234
+ manifests[arch] ||= Dpkg::S3::Manifest.retrieve(options[:codename], component, arch,
235
+ options[:cache_control], options[:fail_if_exists],
236
+ skip_upload: options[:skip_package_upload])
237
+
238
+ # add package in manifests
239
+ begin
240
+ manifests[arch].add(pkg, options[:preserve_versions])
241
+ rescue Dpkg::S3::Utils::AlreadyExistsError => e
242
+ error("Preparing manifest failed because: #{e}")
243
+ end
244
+
245
+ # If arch is all, we must add this package in all arch available
246
+ packages_arch_all << pkg if arch == 'all'
242
247
  end
243
- end
244
- end
245
248
 
246
- # upload the manifest
247
- log("Uploading packages and new manifests to S3")
248
- manifests.each_value do |manifest|
249
- begin
250
- manifest.write_to_s3 { |f| sublog("Transferring #{f}") }
251
- rescue Dpkg::S3::Utils::AlreadyExistsError => e
252
- error("Uploading manifest failed because: #{e}")
249
+ manifests.each do |arch, manifest|
250
+ next if arch == 'all'
251
+
252
+ packages_arch_all.each do |pkg|
253
+ begin
254
+ manifest.add(pkg, options[:preserve_versions], needs_uploading: false)
255
+ rescue Dpkg::S3::Utils::AlreadyExistsError => e
256
+ error("Preparing manifest failed because: #{e}")
257
+ end
258
+ end
259
+ end
260
+
261
+ # upload the manifest
262
+ log('Uploading packages and new manifests to S3')
263
+ manifests.each_value do |manifest|
264
+ begin
265
+ manifest.write_to_s3 { |f| sublog("Transferring #{f}") }
266
+ rescue Dpkg::S3::Utils::AlreadyExistsError => e
267
+ error("Uploading manifest failed because: #{e}")
268
+ end
269
+ release.update_manifest(manifest)
270
+ end
271
+ release.write_to_s3 { |f| sublog("Transferring #{f}") }
272
+
273
+ log('Update complete.')
274
+ ensure
275
+ if options[:lock] && @lock_acquired
276
+ Dpkg::S3::Lock.unlock(options[:codename], component, options[:arch], options[:cache_control])
277
+ log('Lock released.')
278
+ end
253
279
  end
254
- release.update_manifest(manifest)
255
280
  end
256
- release.write_to_s3 { |f| sublog("Transferring #{f}") }
257
281
 
258
- log("Update complete.")
259
- ensure
260
- if options[:lock] && @lock_acquired
261
- Dpkg::S3::Lock.unlock(options[:codename], component, options[:arch], options[:cache_control])
262
- log("Lock released.")
263
- end
264
- end
265
- end
282
+ desc 'list', 'Lists packages in given codename, component, and optionally architecture'
283
+
284
+ option :long,
285
+ type: :boolean,
286
+ aliases: '-l',
287
+ desc: 'Shows all package information in original format.',
288
+ default: false
289
+
290
+ option :arch,
291
+ type: :string,
292
+ aliases: '-a',
293
+ desc: 'The architecture of the package in the APT repository.'
294
+
295
+ def list
296
+ configure_s3_client
297
+
298
+ release = Dpkg::S3::Release.retrieve(options[:codename])
299
+ archs = release.architectures
300
+ archs &= [options[:arch]] if options[:arch] && options[:arch] != 'all'
301
+ widths = [0, 0]
302
+ rows = archs.map do |arch|
303
+ manifest = Dpkg::S3::Manifest.retrieve(options[:codename], component,
304
+ arch, options[:cache_control],
305
+ false, skip_upload: false)
306
+ manifest.packages.map do |package|
307
+ if options[:long]
308
+ package.generate(options[:codename])
309
+ else
310
+ [package.name, package.full_version, package.architecture].tap do |row|
311
+ row.each_with_index do |col, i|
312
+ widths[i] = [widths[i], col.size].max if widths[i]
313
+ end
314
+ end
315
+ end
316
+ end
317
+ end.flatten(1)
266
318
 
267
- desc "list", "Lists packages in given codename, component, and optionally architecture"
268
-
269
- option :long,
270
- :type => :boolean,
271
- :aliases => '-l',
272
- :desc => "Shows all package information in original format.",
273
- :default => false
274
-
275
- option :arch,
276
- :type => :string,
277
- :aliases => "-a",
278
- :desc => "The architecture of the package in the APT repository."
279
-
280
- def list
281
- configure_s3_client
282
-
283
- release = Dpkg::S3::Release.retrieve(options[:codename])
284
- archs = release.architectures
285
- archs &= [options[:arch]] if options[:arch] && options[:arch] != "all"
286
- widths = [0, 0]
287
- rows = archs.map { |arch|
288
- manifest = Dpkg::S3::Manifest.retrieve(options[:codename], component,
289
- arch, options[:cache_control],
290
- false, false)
291
- manifest.packages.map do |package|
292
319
  if options[:long]
293
- package.generate(options[:codename])
320
+ $stdout.puts rows.join("\n")
294
321
  else
295
- [package.name, package.full_version, package.architecture].tap do |row|
296
- row.each_with_index do |col, i|
297
- widths[i] = [widths[i], col.size].max if widths[i]
298
- end
322
+ rows.each do |row|
323
+ $stdout.puts format("% -#{widths[0]}<package>s % -#{widths[1]}<version>s %<arch>s",
324
+ package: row[0], version: row[1], arch: row[2])
299
325
  end
300
326
  end
301
327
  end
302
- }.flatten(1)
303
328
 
304
- if options[:long]
305
- $stdout.puts rows.join("\n")
306
- else
307
- rows.each do |row|
308
- $stdout.puts "% -#{widths[0]}s % -#{widths[1]}s %s" % row
309
- end
310
- end
311
- end
312
-
313
- desc "show PACKAGE VERSION ARCH", "Shows information about a package."
314
-
315
- def show(package_name, version, arch)
316
- if version.nil?
317
- error "You must specify the name of the package to show."
318
- end
319
- if version.nil?
320
- error "You must specify the version of the package to show."
321
- end
322
- if arch.nil?
323
- error "You must specify the architecture of the package to show."
324
- end
329
+ desc 'show PACKAGE VERSION ARCH', 'Shows information about a package.'
325
330
 
326
- configure_s3_client
327
-
328
- # retrieve the existing manifests
329
- manifest = Dpkg::S3::Manifest.retrieve(options[:codename], component, arch,
330
- options[:cache_control], false, false)
331
- package = manifest.packages.detect { |p|
332
- p.name == package_name && p.full_version == version
333
- }
334
- if package.nil?
335
- error "No such package found."
336
- end
331
+ def show(package_name, version, arch)
332
+ error 'You must specify the name of the package to show.' if version.nil?
333
+ error 'You must specify the version of the package to show.' if version.nil?
334
+ error 'You must specify the architecture of the package to show.' if arch.nil?
337
335
 
338
- puts package.generate(options[:codename])
339
- end
340
-
341
- desc "copy PACKAGE TO_CODENAME TO_COMPONENT ",
342
- "Copy the package named PACKAGE to given codename and component. If --versions is not specified, copy all versions of PACKAGE. Otherwise, only the specified versions will be copied. Source codename and component is given by --codename and --component options."
343
-
344
- option :cache_control,
345
- :type => :string,
346
- :aliases => "-C",
347
- :desc => "Add cache-control headers to S3 objects."
348
-
349
- option :arch,
350
- :type => :string,
351
- :aliases => "-a",
352
- :desc => "The architecture of the package in the APT repository."
353
-
354
- option :versions,
355
- :default => nil,
356
- :type => :array,
357
- :desc => "The space-delimited versions of PACKAGE to delete. If not " +
358
- "specified, ALL VERSIONS will be deleted. Fair warning. " +
359
- "E.g. --versions \"0.1 0.2 0.3\""
360
-
361
- option :preserve_versions,
362
- :default => false,
363
- :type => :boolean,
364
- :aliases => "-p",
365
- :desc => "Whether to preserve other versions of a package " +
366
- "in the repository when uploading one."
367
-
368
- option :fail_if_exists,
369
- :default => true,
370
- :type => :boolean,
371
- :desc => "Whether to overwrite any existing package that has the same " +
372
- "filename in the pool or the same name and version in the manifest."
373
-
374
- def copy(package_name, to_codename, to_component)
375
- if package_name.nil?
376
- error "You must specify a package name."
377
- end
378
- if to_codename.nil?
379
- error "You must specify a codename to copy to."
380
- end
381
- if to_component.nil?
382
- error "You must specify a component to copy to."
383
- end
384
-
385
- arch = options[:arch]
386
- if arch.nil?
387
- error "You must specify the architecture of the package to copy."
388
- end
336
+ configure_s3_client
389
337
 
390
- versions = options[:versions]
391
- if versions.nil?
392
- warn "===> WARNING: Copying all versions of #{package_name}"
393
- else
394
- log "Versions to copy: #{versions.join(', ')}"
395
- end
396
-
397
- configure_s3_client
398
-
399
- # retrieve the existing manifests
400
- log "Retrieving existing manifests"
401
- from_manifest = Dpkg::S3::Manifest.retrieve(options[:codename],
402
- component, arch,
403
- options[:cache_control],
404
- false, options[:skip_package_upload])
405
- to_release = Dpkg::S3::Release.retrieve(to_codename)
406
- to_manifest = Dpkg::S3::Manifest.retrieve(to_codename, to_component, arch,
407
- options[:cache_control],
408
- options[:fail_if_exists],
409
- options[:skip_package_upload])
410
- packages = from_manifest.packages.select { |p|
411
- p.name == package_name &&
412
- (versions.nil? || versions.include?(p.full_version))
413
- }
414
- if packages.size == 0
415
- error "No packages found in repository."
416
- end
338
+ # retrieve the existing manifests
339
+ manifest = Dpkg::S3::Manifest.retrieve(options[:codename], component, arch,
340
+ options[:cache_control], false, false)
341
+ package = manifest.packages.detect do |p|
342
+ p.name == package_name && p.full_version == version
343
+ end
344
+ error 'No such package found.' if package.nil?
417
345
 
418
- packages.each do |package|
419
- begin
420
- to_manifest.add package, options[:preserve_versions], false
421
- rescue Dpkg::S3::Utils::AlreadyExistsError => e
422
- error("Preparing manifest failed because: #{e}")
346
+ puts package.generate(options[:codename])
423
347
  end
424
- end
425
348
 
426
- begin
427
- to_manifest.write_to_s3 { |f| sublog("Transferring #{f}") }
428
- rescue Dpkg::S3::Utils::AlreadyExistsError => e
429
- error("Copying manifest failed because: #{e}")
430
- end
431
- to_release.update_manifest(to_manifest)
432
- to_release.write_to_s3 { |f| sublog("Transferring #{f}") }
349
+ desc 'copy PACKAGE TO_CODENAME TO_COMPONENT ',
350
+ 'Copy the package named PACKAGE to given codename and component. If --versions is not specified, '\
351
+ 'copy all versions of PACKAGE. Otherwise, only the specified versions will be copied. '\
352
+ 'Source codename and component is given by --codename and --component options.'
353
+
354
+ option :cache_control,
355
+ type: :string,
356
+ aliases: '-C',
357
+ desc: 'Add cache-control headers to S3 objects.'
358
+
359
+ option :arch,
360
+ type: :string,
361
+ aliases: '-a',
362
+ desc: 'The architecture of the package in the APT repository.'
363
+
364
+ option :versions,
365
+ default: nil,
366
+ type: :array,
367
+ desc: 'The space-delimited versions of PACKAGE to delete. If not ' \
368
+ 'specified, ALL VERSIONS will be deleted. Fair warning. ' \
369
+ 'E.g. --versions "0.1 0.2 0.3"'
370
+
371
+ option :preserve_versions,
372
+ default: false,
373
+ type: :boolean,
374
+ aliases: '-p',
375
+ desc: 'Whether to preserve other versions of a package ' \
376
+ 'in the repository when uploading one.'
377
+
378
+ option :fail_if_exists,
379
+ default: true,
380
+ type: :boolean,
381
+ desc: 'Whether to overwrite any existing package that has the same ' \
382
+ 'filename in the pool or the same name and version in the manifest.'
383
+
384
+ def copy(package_name, to_codename, to_component)
385
+ error 'You must specify a package name.' if package_name.nil?
386
+ error 'You must specify a codename to copy to.' if to_codename.nil?
387
+ error 'You must specify a component to copy to.' if to_component.nil?
388
+
389
+ arch = options[:arch]
390
+ error 'You must specify the architecture of the package to copy.' if arch.nil?
391
+
392
+ versions = options[:versions]
393
+ if versions.nil?
394
+ warn "===> WARNING: Copying all versions of #{package_name}"
395
+ else
396
+ log "Versions to copy: #{versions.join(', ')}"
397
+ end
433
398
 
434
- log "Copy complete."
435
- end
399
+ configure_s3_client
400
+
401
+ # retrieve the existing manifests
402
+ log 'Retrieving existing manifests'
403
+ from_manifest = Dpkg::S3::Manifest.retrieve(options[:codename],
404
+ component, arch,
405
+ options[:cache_control],
406
+ false, skip_upload: options[:skip_package_upload])
407
+ to_release = Dpkg::S3::Release.retrieve(to_codename)
408
+ to_manifest = Dpkg::S3::Manifest.retrieve(to_codename, to_component, arch,
409
+ options[:cache_control],
410
+ options[:fail_if_exists],
411
+ skip_upload: options[:skip_package_upload])
412
+ packages = from_manifest.packages.select do |p|
413
+ p.name == package_name &&
414
+ (versions.nil? || versions.include?(p.full_version))
415
+ end
416
+ error 'No packages found in repository.' if packages.size.zero?
436
417
 
437
- desc "delete PACKAGE",
438
- "Remove the package named PACKAGE. If --versions is not specified, delete" +
439
- "all versions of PACKAGE. Otherwise, only the specified versions will be " +
440
- "deleted."
441
-
442
- option :arch,
443
- :type => :string,
444
- :aliases => "-a",
445
- :desc => "The architecture of the package in the APT repository."
446
-
447
- option :versions,
448
- :default => nil,
449
- :type => :array,
450
- :desc => "The space-delimited versions of PACKAGE to delete. If not " +
451
- "specified, ALL VERSIONS will be deleted. Fair warning. " +
452
- "E.g. --versions \"0.1 0.2 0.3\""
453
-
454
- def delete(package)
455
- if package.nil?
456
- error("You must specify a package name.")
457
- end
418
+ packages.each do |package|
419
+ begin
420
+ to_manifest.add package, options[:preserve_versions], false
421
+ rescue Dpkg::S3::Utils::AlreadyExistsError => e
422
+ error("Preparing manifest failed because: #{e}")
423
+ end
424
+ end
458
425
 
459
- versions = options[:versions]
460
- if versions.nil?
461
- warn("===> WARNING: Deleting all versions of #{package}")
462
- else
463
- log("Versions to delete: #{versions.join(', ')}")
464
- end
426
+ begin
427
+ to_manifest.write_to_s3 { |f| sublog("Transferring #{f}") }
428
+ rescue Dpkg::S3::Utils::AlreadyExistsError => e
429
+ error("Copying manifest failed because: #{e}")
430
+ end
431
+ to_release.update_manifest(to_manifest)
432
+ to_release.write_to_s3 { |f| sublog("Transferring #{f}") }
465
433
 
466
- arch = options[:arch]
467
- if arch.nil?
468
- error("You must specify the architecture of the package to remove.")
469
- end
434
+ log 'Copy complete.'
435
+ end
470
436
 
471
- configure_s3_client
437
+ desc 'delete PACKAGE',
438
+ 'Remove the package named PACKAGE. If --versions is not specified, delete' \
439
+ 'all versions of PACKAGE. Otherwise, only the specified versions will be ' \
440
+ 'deleted.'
441
+
442
+ option :arch,
443
+ type: :string,
444
+ aliases: '-a',
445
+ desc: 'The architecture of the package in the APT repository.'
446
+
447
+ option :versions,
448
+ default: nil,
449
+ type: :array,
450
+ desc: 'The space-delimited versions of PACKAGE to delete. If not ' \
451
+ 'specified, ALL VERSIONS will be deleted. Fair warning. ' \
452
+ 'E.g. --versions "0.1 0.2 0.3"'
453
+
454
+ def delete(package)
455
+ error('You must specify a package name.') if package.nil?
456
+
457
+ versions = options[:versions]
458
+ if versions.nil?
459
+ warn("===> WARNING: Deleting all versions of #{package}")
460
+ else
461
+ log("Versions to delete: #{versions.join(', ')}")
462
+ end
472
463
 
473
- # retrieve the existing manifests
474
- log("Retrieving existing manifests")
475
- release = Dpkg::S3::Release.retrieve(options[:codename], options[:origin], options[:suite])
476
- if arch == 'all'
477
- selected_arch = release.architectures
478
- else
479
- selected_arch = [arch]
480
- end
481
- all_found = 0
482
- selected_arch.each { |ar|
483
- manifest = Dpkg::S3::Manifest.retrieve(options[:codename], component, ar, options[:cache_control], false, options[:skip_package_upload])
484
-
485
- deleted = manifest.delete_package(package, versions)
486
- all_found += deleted.length
487
- if deleted.length == 0
488
- if versions.nil?
464
+ arch = options[:arch]
465
+ error('You must specify the architecture of the package to remove.') if arch.nil?
466
+
467
+ configure_s3_client
468
+
469
+ # retrieve the existing manifests
470
+ log('Retrieving existing manifests')
471
+ release = Dpkg::S3::Release.retrieve(options[:codename], options[:origin], options[:suite])
472
+ selected_arch = if arch == 'all'
473
+ release.architectures
474
+ else
475
+ [arch]
476
+ end
477
+ all_found = 0
478
+ selected_arch.each do |ar|
479
+ manifest = Dpkg::S3::Manifest.retrieve(options[:codename], component, ar, options[:cache_control], false,
480
+ skip_upload: options[:skip_package_upload])
481
+
482
+ deleted = manifest.delete_package(package, versions)
483
+ all_found += deleted.length
484
+ if deleted.length.zero?
485
+ if versions.nil?
489
486
  sublog("No packages were deleted. #{package} not found in arch #{ar}.")
490
- next
487
+ else
488
+ sublog("No packages were deleted. #{package} versions #{versions.join(', ')}
489
+ could not be found in arch #{ar}.")
490
+ end
491
+ next
491
492
  else
492
- sublog("No packages were deleted. #{package} versions #{versions.join(', ')} could not be found in arch #{ar}.")
493
- next
494
- end
495
- else
496
- deleted.each { |p|
493
+ deleted.each do |p|
497
494
  sublog("Deleting #{p.name} version #{p.full_version} from arch #{ar}")
498
- }
499
- end
495
+ end
496
+ end
497
+
498
+ log('Uploading new manifests to S3')
499
+ manifest.write_to_s3 { |f| sublog("Transferring #{f}") }
500
+ release.update_manifest(manifest)
501
+ release.write_to_s3 { |f| sublog("Transferring #{f}") }
502
+
503
+ log('Update complete.')
504
+ end
505
+ return unless all_found.zero?
500
506
 
501
- log("Uploading new manifests to S3")
502
- manifest.write_to_s3 {|f| sublog("Transferring #{f}") }
503
- release.update_manifest(manifest)
504
- release.write_to_s3 {|f| sublog("Transferring #{f}") }
505
-
506
- log("Update complete.")
507
- }
508
- if all_found == 0
509
- if versions.nil?
510
- error("No packages were deleted. #{package} not found.")
511
- else
512
- error("No packages were deleted. #{package} versions #{versions.join(', ')} could not be found.")
507
+ if versions.nil?
508
+ error("No packages were deleted. #{package} not found.")
509
+ else
510
+ error("No packages were deleted. #{package} versions #{versions.join(', ')} could not be found.")
511
+ end
513
512
  end
514
- end
515
513
 
516
- end
514
+ desc 'verify', 'Verifies that the files in the package manifests exist'
515
+
516
+ option :fix_manifests,
517
+ default: false,
518
+ type: :boolean,
519
+ aliases: '-f',
520
+ desc: 'Whether to fix problems in manifests when verifying.'
517
521
 
522
+ def verify
523
+ configure_s3_client
518
524
 
519
- desc "verify", "Verifies that the files in the package manifests exist"
525
+ log('Retrieving existing manifests')
526
+ release = Dpkg::S3::Release.retrieve(options[:codename], options[:origin], options[:suite])
520
527
 
521
- option :fix_manifests,
522
- :default => false,
523
- :type => :boolean,
524
- :aliases => "-f",
525
- :desc => "Whether to fix problems in manifests when verifying."
528
+ release.architectures.each do |arch|
529
+ log("Checking for missing packages in: #{options[:codename]}/#{options[:component]} #{arch}")
530
+ manifest = Dpkg::S3::Manifest.retrieve(options[:codename], component,
531
+ arch, options[:cache_control], false,
532
+ skip_upload: options[:skip_package_upload])
533
+ missing_packages = []
526
534
 
527
- def verify
528
- configure_s3_client
535
+ manifest.packages.each do |p|
536
+ next if Dpkg::S3::Utils.s3_exists? p.url_filename_encoded(options[:codename])
529
537
 
530
- log("Retrieving existing manifests")
531
- release = Dpkg::S3::Release.retrieve(options[:codename], options[:origin], options[:suite])
538
+ sublog("The following packages are missing:\n\n") if missing_packages.empty?
539
+ puts(p.generate(options[:codename]))
540
+ puts('')
532
541
 
533
- release.architectures.each do |arch|
534
- log("Checking for missing packages in: #{options[:codename]}/#{options[:component]} #{arch}")
535
- manifest = Dpkg::S3::Manifest.retrieve(options[:codename], component,
536
- arch, options[:cache_control], false,
537
- options[:skip_package_upload])
538
- missing_packages = []
542
+ missing_packages << p
543
+ end
544
+
545
+ next unless options[:sign] || (options[:fix_manifests] && !missing_packages.empty?)
539
546
 
540
- manifest.packages.each do |p|
541
- unless Dpkg::S3::Utils.s3_exists? p.url_filename_encoded(options[:codename])
542
- sublog("The following packages are missing:\n\n") if missing_packages.empty?
543
- puts(p.generate(options[:codename]))
544
- puts("")
547
+ log("Removing #{missing_packages.length} package(s) from the manifest...")
548
+ missing_packages.each { |p| manifest.packages.delete(p) }
549
+ manifest.write_to_s3 { |f| sublog("Transferring #{f}") }
550
+ release.update_manifest(manifest)
551
+ release.write_to_s3 { |f| sublog("Transferring #{f}") }
545
552
 
546
- missing_packages << p
553
+ log('Update complete.')
547
554
  end
548
555
  end
549
556
 
550
- if options[:sign] || (options[:fix_manifests] && !missing_packages.empty?)
551
- log("Removing #{missing_packages.length} package(s) from the manifest...")
552
- missing_packages.each { |p| manifest.packages.delete(p) }
553
- manifest.write_to_s3 { |f| sublog("Transferring #{f}") }
554
- release.update_manifest(manifest)
555
- release.write_to_s3 { |f| sublog("Transferring #{f}") }
557
+ private
556
558
 
557
- log("Update complete.")
558
- end
559
- end
560
- end
559
+ def component
560
+ return @component if @component
561
561
 
562
- private
563
-
564
- def component
565
- return @component if @component
566
- @component = if (section = options[:section])
567
- warn("===> WARNING: The --section/-s argument is " \
568
- "deprecated, please use --component/-m.")
569
- section
570
- else
571
- options[:component]
572
- end
573
- end
562
+ @component = if (section = options[:section])
563
+ warn('===> WARNING: The --section/-s argument is ' \
564
+ 'deprecated, please use --component/-m.')
565
+ section
566
+ else
567
+ options[:component]
568
+ end
569
+ end
574
570
 
575
- def puts(*args)
576
- $stdout.puts(*args) unless options[:quiet]
577
- end
571
+ def puts(*args)
572
+ $stdout.puts(*args) unless options[:quiet]
573
+ end
578
574
 
579
- def log(message)
580
- puts ">> #{message}" unless options[:quiet]
581
- end
575
+ def log(message)
576
+ puts ">> #{message}" unless options[:quiet]
577
+ end
582
578
 
583
- def sublog(message)
584
- puts " -- #{message}" unless options[:quiet]
585
- end
579
+ def sublog(message)
580
+ puts " -- #{message}" unless options[:quiet]
581
+ end
586
582
 
587
- def error(message)
588
- $stderr.puts "!! #{message}" unless options[:quiet]
589
- exit 1
590
- end
583
+ def error(message)
584
+ warn "!! #{message}" unless options[:quiet]
585
+ exit 1
586
+ end
591
587
 
592
- def provider
593
- access_key_id = options[:access_key_id]
594
- secret_access_key = options[:secret_access_key]
595
- session_token = options[:session_token]
588
+ def provider
589
+ access_key_id = options[:access_key_id]
590
+ secret_access_key = options[:secret_access_key]
591
+ session_token = options[:session_token]
596
592
 
597
- if access_key_id.nil? ^ secret_access_key.nil?
598
- error("If you specify one of --access-key-id or --secret-access-key, you must specify the other.")
599
- end
600
- static_credentials = {}
601
- static_credentials[:access_key_id] = access_key_id if access_key_id
602
- static_credentials[:secret_access_key] = secret_access_key if secret_access_key
603
- static_credentials[:session_token] = session_token if session_token
593
+ if access_key_id.nil? ^ secret_access_key.nil?
594
+ error('If you specify one of --access-key-id or --secret-access-key, you must specify the other.')
595
+ end
596
+ static_credentials = {}
597
+ static_credentials[:access_key_id] = access_key_id if access_key_id
598
+ static_credentials[:secret_access_key] = secret_access_key if secret_access_key
599
+ static_credentials[:session_token] = session_token if session_token
604
600
 
605
- static_credentials
606
- end
601
+ static_credentials
602
+ end
607
603
 
608
- def configure_s3_client
609
- error("No value provided for required options '--bucket'") unless options[:bucket]
610
-
611
- settings = {
612
- :region => options[:s3_region],
613
- :http_proxy => options[:proxy_uri],
614
- :force_path_style => options[:force_path_style]
615
- }
616
- settings[:endpoint] = options[:endpoint] if options[:endpoint]
617
- settings.merge!(provider)
618
-
619
- Dpkg::S3::Utils.s3 = Aws::S3::Client.new(settings)
620
- Dpkg::S3::Utils.bucket = options[:bucket]
621
- Dpkg::S3::Utils.signing_key = options[:sign]
622
- Dpkg::S3::Utils.gpg_options = options[:gpg_options]
623
- Dpkg::S3::Utils.prefix = options[:prefix]
624
- Dpkg::S3::Utils.encryption = options[:encryption]
625
-
626
- # make sure we have a valid visibility setting
627
- Dpkg::S3::Utils.access_policy =
628
- case options[:visibility]
629
- when "public"
630
- "public-read"
631
- when "private"
632
- "private"
633
- when "authenticated"
634
- "authenticated-read"
635
- when "bucket_owner"
636
- "bucket-owner-full-control"
637
- else
638
- error("Invalid visibility setting given. Can be public, private, authenticated, or bucket_owner.")
604
+ def configure_s3_client
605
+ error("No value provided for required options '--bucket'") unless options[:bucket]
606
+
607
+ settings = {
608
+ region: options[:s3_region],
609
+ http_proxy: options[:proxy_uri],
610
+ force_path_style: options[:force_path_style]
611
+ }
612
+ settings[:endpoint] = options[:endpoint] if options[:endpoint]
613
+ settings.merge!(provider)
614
+
615
+ Dpkg::S3::Utils.s3 = Aws::S3::Client.new(settings)
616
+ Dpkg::S3::Utils.bucket = options[:bucket]
617
+ Dpkg::S3::Utils.signing_key = options[:sign]
618
+ Dpkg::S3::Utils.gpg_options = options[:gpg_options]
619
+ Dpkg::S3::Utils.prefix = options[:prefix]
620
+ Dpkg::S3::Utils.encryption = options[:encryption]
621
+
622
+ # make sure we have a valid visibility setting
623
+ Dpkg::S3::Utils.access_policy =
624
+ case options[:visibility]
625
+ when 'public'
626
+ 'public-read'
627
+ when 'private'
628
+ 'private'
629
+ when 'authenticated'
630
+ 'authenticated-read'
631
+ when 'bucket_owner'
632
+ 'bucket-owner-full-control'
633
+ else
634
+ error('Invalid visibility setting given. Can be public, private, authenticated, or bucket_owner.')
635
+ end
639
636
  end
637
+ end
640
638
  end
641
639
  end