miasma-aws 0.3.10 → 0.3.12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
- require 'stringio'
2
- require 'xmlsimple'
3
- require 'miasma'
1
+ require "stringio"
2
+ require "xmlsimple"
3
+ require "miasma"
4
4
 
5
5
  module Miasma
6
6
  module Models
@@ -9,11 +9,11 @@ module Miasma
9
9
  class Aws < Storage
10
10
 
11
11
  # Service name of the API
12
- API_SERVICE = 's3'.freeze
12
+ API_SERVICE = "s3".freeze
13
13
  # Service name of the API in eucalyptus
14
- EUCA_API_SERVICE = 'objectstorage'.freeze
14
+ EUCA_API_SERVICE = "objectstorage".freeze
15
15
  # Supported version of the Storage API
16
- API_VERSION = '2006-03-01'.freeze
16
+ API_VERSION = "2006-03-01".freeze
17
17
 
18
18
  include Contrib::AwsApiCore::ApiCommon
19
19
  include Contrib::AwsApiCore::RequestUtils
@@ -30,19 +30,17 @@ module Miasma
30
30
  # different than the usual token based fetching
31
31
  def all_result_pages(next_token, *result_key, &block)
32
32
  list = []
33
- options = next_token ? Smash.new('marker' => next_token) : Smash.new
33
+ options = next_token ? Smash.new("marker" => next_token) : Smash.new
34
34
  result = block.call(options)
35
35
  content = result.get(*result_key.dup)
36
- if(content.is_a?(Array))
36
+ if content.is_a?(Array)
37
37
  list += content
38
38
  else
39
39
  list << content
40
40
  end
41
41
  set = result.get(*result_key.slice(0, 2))
42
- if(set.is_a?(Hash) && set['IsTruncated'] && set['Contents'])
43
- content_key = (
44
- set['Contents'].respond_to?(:last) ? set['Contents'].last : set['Contents']
45
- )['Key']
42
+ if set.is_a?(Hash) && set["IsTruncated"] && set["Contents"]
43
+ content_key = (set["Contents"].respond_to?(:last) ? set["Contents"].last : set["Contents"])["Key"]
46
44
  list += all_result_pages(content_key, *result_key, &block)
47
45
  end
48
46
  list.compact
@@ -53,13 +51,13 @@ module Miasma
53
51
  def initialize(args)
54
52
  args = args.to_smash
55
53
  cache_region = args[:aws_region]
56
- args[:aws_region] = args.fetch(:aws_bucket_region, 'us-east-1')
54
+ args[:aws_region] = args.fetch(:aws_bucket_region, "us-east-1")
57
55
  super(args)
58
56
  aws_region = cache_region
59
- if(aws_bucket_region && aws_bucket_region != 'us-east-1')
57
+ if aws_bucket_region && aws_bucket_region != "us-east-1"
60
58
  self.aws_host = "s3-#{aws_bucket_region}.amazonaws.com"
61
59
  else
62
- self.aws_host = 's3.amazonaws.com'
60
+ self.aws_host = "s3.amazonaws.com"
63
61
  end
64
62
  end
65
63
 
@@ -68,24 +66,24 @@ module Miasma
68
66
  # @param bucket [Models::Storage::Bucket]
69
67
  # @return [Models::Storage::Bucket]
70
68
  def bucket_save(bucket)
71
- unless(bucket.persisted?)
69
+ unless bucket.persisted?
72
70
  req_args = Smash.new(
73
71
  :method => :put,
74
- :path => '/',
75
- :endpoint => bucket_endpoint(bucket)
72
+ :path => "/",
73
+ :endpoint => bucket_endpoint(bucket),
76
74
  )
77
- if(aws_bucket_region)
75
+ if aws_bucket_region
78
76
  req_args[:body] = XmlSimple.xml_out(
79
77
  Smash.new(
80
- 'CreateBucketConfiguration' => {
81
- 'LocationConstraint' => aws_bucket_region
82
- }
78
+ "CreateBucketConfiguration" => {
79
+ "LocationConstraint" => aws_bucket_region,
80
+ },
83
81
  ),
84
- 'AttrPrefix' => true,
85
- 'KeepRoot' => true
82
+ "AttrPrefix" => true,
83
+ "KeepRoot" => true,
86
84
  )
87
85
  req_args[:headers] = Smash.new(
88
- 'Content-Length' => req_args[:body].size.to_s
86
+ "Content-Length" => req_args[:body].size.to_s,
89
87
  )
90
88
  end
91
89
  request(req_args)
@@ -101,14 +99,13 @@ module Miasma
101
99
  # @return [Models::Storage::Bucket, NilClass]
102
100
  def bucket_get(ident)
103
101
  bucket = Bucket.new(self,
104
- :id => ident,
105
- :name => ident
106
- )
102
+ :id => ident,
103
+ :name => ident)
107
104
  begin
108
105
  bucket.reload
109
106
  bucket
110
107
  rescue Error::ApiError::RequestError => e
111
- if(e.response.status == 404)
108
+ if e.response.status == 404
112
109
  nil
113
110
  else
114
111
  raise
@@ -121,12 +118,12 @@ module Miasma
121
118
  # @param bucket [Models::Storage::Bucket]
122
119
  # @return [TrueClass, FalseClass]
123
120
  def bucket_destroy(bucket)
124
- if(bucket.persisted?)
121
+ if bucket.persisted?
125
122
  request(
126
- :path => '/',
123
+ :path => "/",
127
124
  :method => :delete,
128
125
  :endpoint => bucket_endpoint(bucket),
129
- :expects => 204
126
+ :expects => 204,
130
127
  )
131
128
  true
132
129
  else
@@ -139,15 +136,15 @@ module Miasma
139
136
  # @param bucket [Models::Storage::Bucket]
140
137
  # @return [Models::Storage::Bucket]
141
138
  def bucket_reload(bucket)
142
- if(bucket.persisted?)
139
+ if bucket.persisted?
143
140
  begin
144
141
  result = request(
145
- :path => '/',
142
+ :path => "/",
146
143
  :method => :head,
147
- :endpoint => bucket_endpoint(bucket)
144
+ :endpoint => bucket_endpoint(bucket),
148
145
  )
149
146
  rescue Error::ApiError::RequestError => e
150
- if(e.response.status == 404)
147
+ if e.response.status == 404
151
148
  bucket.data.clear
152
149
  bucket.dirty.clear
153
150
  else
@@ -171,18 +168,18 @@ module Miasma
171
168
  #
172
169
  # @return [Array<Models::Storage::Bucket>]
173
170
  def bucket_all
174
- result = all_result_pages(nil, :body, 'ListAllMyBucketsResult', 'Buckets', 'Bucket') do |options|
171
+ result = all_result_pages(nil, :body, "ListAllMyBucketsResult", "Buckets", "Bucket") do |options|
175
172
  request(
176
- :path => '/',
177
- :params => options
173
+ :path => "/",
174
+ :params => options,
178
175
  )
179
176
  end
180
177
  result.map do |bkt|
181
178
  Bucket.new(
182
179
  self,
183
- :id => bkt['Name'],
184
- :name => bkt['Name'],
185
- :created => bkt['CreationDate']
180
+ :id => bkt["Name"],
181
+ :name => bkt["Name"],
182
+ :created => bkt["CreationDate"],
186
183
  ).valid_state
187
184
  end
188
185
  end
@@ -192,21 +189,21 @@ module Miasma
192
189
  # @param args [Hash] filter options
193
190
  # @return [Array<Models::Storage::File>]
194
191
  def file_filter(bucket, args)
195
- if(args[:prefix])
192
+ if args[:prefix]
196
193
  result = request(
197
- :path => '/',
194
+ :path => "/",
198
195
  :endpoint => bucket_endpoint(bucket),
199
196
  :params => Smash.new(
200
- :prefix => args[:prefix]
201
- )
197
+ :prefix => args[:prefix],
198
+ ),
202
199
  )
203
- [result.get(:body, 'ListBucketResult', 'Contents')].flatten.compact.map do |file|
200
+ [result.get(:body, "ListBucketResult", "Contents")].flatten.compact.map do |file|
204
201
  File.new(
205
202
  bucket,
206
- :id => ::File.join(bucket.name, file['Key']),
207
- :name => file['Key'],
208
- :updated => file['LastModified'],
209
- :size => file['Size'].to_i
203
+ :id => ::File.join(bucket.name, file["Key"]),
204
+ :name => file["Key"],
205
+ :updated => file["LastModified"],
206
+ :size => file["Size"].to_i,
210
207
  ).valid_state
211
208
  end
212
209
  else
@@ -219,20 +216,20 @@ module Miasma
219
216
  # @param bucket [Bucket]
220
217
  # @return [Array<File>]
221
218
  def file_all(bucket)
222
- result = all_result_pages(nil, :body, 'ListBucketResult', 'Contents') do |options|
219
+ result = all_result_pages(nil, :body, "ListBucketResult", "Contents") do |options|
223
220
  request(
224
- :path => '/',
221
+ :path => "/",
225
222
  :params => options,
226
- :endpoint => bucket_endpoint(bucket)
223
+ :endpoint => bucket_endpoint(bucket),
227
224
  )
228
225
  end
229
226
  result.map do |file|
230
227
  File.new(
231
228
  bucket,
232
- :id => ::File.join(bucket.name, file['Key']),
233
- :name => file['Key'],
234
- :updated => file['LastModified'],
235
- :size => file['Size'].to_i
229
+ :id => ::File.join(bucket.name, file["Key"]),
230
+ :name => file["Key"],
231
+ :updated => file["LastModified"],
232
+ :size => file["Size"].to_i,
236
233
  ).valid_state
237
234
  end
238
235
  end
@@ -242,25 +239,25 @@ module Miasma
242
239
  # @param file [Models::Storage::File]
243
240
  # @return [Models::Storage::File]
244
241
  def file_save(file)
245
- if(file.dirty?)
242
+ if file.dirty?
246
243
  file.load_data(file.attributes)
247
244
  args = Smash.new
248
245
  headers = Smash[
249
246
  Smash.new(
250
- :content_type => 'Content-Type',
251
- :content_disposition => 'Content-Disposition',
252
- :content_encoding => 'Content-Encoding'
247
+ :content_type => "Content-Type",
248
+ :content_disposition => "Content-Disposition",
249
+ :content_encoding => "Content-Encoding",
253
250
  ).map do |attr, key|
254
- if(file.attributes[attr])
251
+ if file.attributes[attr]
255
252
  [key, file.attributes[attr]]
256
253
  end
257
254
  end.compact
258
255
  ]
259
- unless(headers.empty?)
256
+ unless headers.empty?
260
257
  args[:headers] = headers
261
258
  end
262
- if(file.attributes[:body].respond_to?(:read) &&
263
- file.attributes[:body].size >= Storage::MAX_BODY_SIZE_FOR_STRINGIFY)
259
+ if (file.attributes[:body].respond_to?(:read) &&
260
+ file.attributes[:body].size >= Storage::MAX_BODY_SIZE_FOR_STRINGIFY)
264
261
  upload_id = request(
265
262
  args.merge(
266
263
  Smash.new(
@@ -268,16 +265,16 @@ module Miasma
268
265
  :path => file_path(file),
269
266
  :endpoint => bucket_endpoint(file.bucket),
270
267
  :params => {
271
- :uploads => true
272
- }
268
+ :uploads => true,
269
+ },
273
270
  )
274
271
  )
275
- ).get(:body, 'InitiateMultipartUploadResult', 'UploadId')
272
+ ).get(:body, "InitiateMultipartUploadResult", "UploadId")
276
273
  begin
277
274
  count = 1
278
275
  parts = []
279
276
  file.body.rewind
280
- while(content = file.body.read(Storage::READ_BODY_CHUNK_SIZE * 1.5))
277
+ while content = file.body.read(Storage::READ_BODY_CHUNK_SIZE * 1.5)
281
278
  parts << [
282
279
  count,
283
280
  request(
@@ -285,69 +282,69 @@ module Miasma
285
282
  :path => file_path(file),
286
283
  :endpoint => bucket_endpoint(file.bucket),
287
284
  :headers => Smash.new(
288
- 'Content-Length' => content.size,
289
- 'Content-MD5' => Digest::MD5.base64digest(content)
285
+ "Content-Length" => content.size,
286
+ "Content-MD5" => Digest::MD5.base64digest(content),
290
287
  ),
291
288
  :params => Smash.new(
292
- 'partNumber' => count,
293
- 'uploadId' => upload_id
289
+ "partNumber" => count,
290
+ "uploadId" => upload_id,
294
291
  ),
295
- :body => content
296
- ).get(:headers, :etag)
292
+ :body => content,
293
+ ).get(:headers, :etag),
297
294
  ]
298
295
  count += 1
299
296
  end
300
297
  complete = XmlSimple.xml_out(
301
298
  Smash.new(
302
- 'CompleteMultipartUpload' => {
303
- 'Part' => parts.map{|part|
304
- {'PartNumber' => part.first, 'ETag' => part.last}
305
- }
306
- }
299
+ "CompleteMultipartUpload" => {
300
+ "Part" => parts.map { |part|
301
+ {"PartNumber" => part.first, "ETag" => part.last}
302
+ },
303
+ },
307
304
  ),
308
- 'AttrPrefix' => true,
309
- 'KeepRoot' => true
305
+ "AttrPrefix" => true,
306
+ "KeepRoot" => true,
310
307
  )
311
308
  result = request(
312
309
  :method => :post,
313
310
  :path => file_path(file),
314
311
  :endpoint => bucket_endpoint(file.bucket),
315
312
  :params => Smash.new(
316
- 'uploadId' => upload_id
313
+ "uploadId" => upload_id,
317
314
  ),
318
315
  :headers => Smash.new(
319
- 'Content-Length' => complete.size
316
+ "Content-Length" => complete.size,
320
317
  ),
321
- :body => complete
318
+ :body => complete,
322
319
  )
323
- file.etag = result.get(:body, 'CompleteMultipartUploadResult', 'ETag')
320
+ file.etag = result.get(:body, "CompleteMultipartUploadResult", "ETag")
324
321
  rescue => e
325
322
  request(
326
323
  :method => :delete,
327
324
  :path => file_path(file),
328
325
  :endpoint => bucket_endpoint(file.bucket),
329
326
  :params => {
330
- 'uploadId' => upload_id
327
+ "uploadId" => upload_id,
331
328
  },
332
- :expects => 204
329
+ :expects => 204,
333
330
  )
334
331
  raise
335
332
  end
336
333
  else
337
- if(file.attributes[:body].respond_to?(:readpartial))
338
- args.set(:headers, 'Content-Length', file.body.size.to_s)
334
+ if file.attributes[:body].respond_to?(:readpartial)
335
+ args.set(:headers, "Content-Length", file.body.size.to_s)
339
336
  file.body.rewind
340
337
  args[:body] = file.body.readpartial(file.body.size)
341
338
  file.body.rewind
342
339
  else
343
- args.set(:headers, 'Content-Length', 0)
340
+ args.set(:headers, "Content-Length", 0)
344
341
  end
345
342
  result = request(
346
343
  args.merge(
347
344
  Smash.new(
348
345
  :method => :put,
349
346
  :path => file_path(file),
350
- :endpoint => bucket_endpoint(file.bucket)
347
+ :endpoint => bucket_endpoint(file.bucket),
351
348
  )
352
349
  )
353
350
  )
@@ -364,12 +361,12 @@ module Miasma
364
361
  # @param file [Models::Storage::File]
365
362
  # @return [TrueClass, FalseClass]
366
363
  def file_destroy(file)
367
- if(file.persisted?)
364
+ if file.persisted?
368
365
  request(
369
366
  :method => :delete,
370
367
  :path => file_path(file),
371
368
  :endpoint => bucket_endpoint(file.bucket),
372
- :expects => 204
369
+ :expects => 204,
373
370
  )
374
371
  true
375
372
  else
@@ -382,11 +379,11 @@ module Miasma
382
379
  # @param file [Models::Storage::File]
383
380
  # @return [Models::Storage::File]
384
381
  def file_reload(file)
385
- if(file.persisted?)
382
+ if file.persisted?
386
383
  name = file.name
387
384
  result = request(
388
385
  :path => file_path(file),
389
- :endpoint => bucket_endpoint(file.bucket)
386
+ :endpoint => bucket_endpoint(file.bucket),
390
387
  )
391
388
  file.data.clear && file.dirty.clear
392
389
  info = result[:headers]
@@ -396,7 +393,7 @@ module Miasma
396
393
  :updated => info[:last_modified],
397
394
  :etag => info[:etag],
398
395
  :size => info[:content_length].to_i,
399
- :content_type => info[:content_type]
396
+ :content_type => info[:content_type],
400
397
  ).valid_state
401
398
  end
402
399
  file
@@ -407,16 +404,16 @@ module Miasma
407
404
  # @param timeout_secs [Integer] seconds available
408
405
  # @return [String] URL
409
406
  def file_url(file, timeout_secs)
410
- if(file.persisted?)
407
+ if file.persisted?
411
408
  signer.generate_url(
412
409
  :get, ::File.join(uri_escape(file.bucket.name), file_path(file)),
413
410
  :headers => Smash.new(
414
- 'Host' => aws_host
411
+ "Host" => aws_host,
415
412
  ),
416
413
  :params => Smash.new(
417
- 'X-Amz-Date' => Contrib::AwsApiCore.time_iso8601,
418
- 'X-Amz-Expires' => timeout_secs
419
- )
414
+ "X-Amz-Date" => Contrib::AwsApiCore.time_iso8601,
415
+ "X-Amz-Expires" => timeout_secs,
416
+ ),
420
417
  )
421
418
  else
422
419
  raise Error::ModelPersistError.new "#{file} has not been saved!"
@@ -429,18 +426,18 @@ module Miasma
429
426
  # @return [IO, HTTP::Response::Body]
430
427
  def file_body(file)
431
428
  file_content = nil
432
- if(file.persisted?)
429
+ if file.persisted?
433
430
  result = request(
434
431
  :path => file_path(file),
435
432
  :endpoint => bucket_endpoint(file.bucket),
436
- :disable_body_extraction => true
433
+ :disable_body_extraction => true,
437
434
  )
438
435
  content = result[:body]
439
436
  begin
440
- if(content.is_a?(String))
437
+ if content.is_a?(String)
441
438
  file_content = StringIO.new(content)
442
439
  else
443
- if(content.respond_to?(:stream!))
440
+ if content.respond_to?(:stream!)
444
441
  content.stream!
445
442
  end
446
443
  file_content = content
@@ -449,7 +446,7 @@ module Miasma
449
446
  file_content = StringIO.new(content.to_s)
450
447
  end
451
448
  else
452
- file_content = StringIO.new('')
449
+ file_content = StringIO.new("")
453
450
  end
454
451
  File::Streamable.new(file_content)
455
452
  end
@@ -463,18 +460,17 @@ module Miasma
463
460
  # happening (which implicitly forces :form) or :json is used
464
461
  # it will not properly checksum. (but that's probably okay)
465
462
  def update_request(con, opts)
466
- opts[:headers]['x-amz-content-sha256'] = Digest::SHA256.
467
- hexdigest(opts.fetch(:body, ''))
463
+ opts[:headers]["x-amz-content-sha256"] = Digest::SHA256.
464
+ hexdigest(opts.fetch(:body, ""))
468
465
  true
469
466
  end
470
467
 
471
468
  # @return [String] escaped file path
472
469
  def file_path(file)
473
- file.name.split('/').map do |part|
470
+ file.name.split("/").map do |part|
474
471
  uri_escape(part)
475
- end.join('/')
472
+ end.join("/")
476
473
  end
477
-
478
474
  end
479
475
  end
480
476
  end