s3 0.3.9 → 0.3.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
data/Gemfile CHANGED
@@ -1,4 +1,2 @@
1
- source :gemcutter
2
-
3
- # Specify your gem's dependencies in s3.gemspec
1
+ source "http://rubygems.org"
4
2
  gemspec
data/Gemfile.lock CHANGED
@@ -1,7 +1,7 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- s3 (0.3.8)
4
+ s3 (0.3.10)
5
5
  proxies (~> 0.2.0)
6
6
 
7
7
  GEM
data/README.md CHANGED
@@ -4,8 +4,6 @@ S3 library provides access to [Amazon's Simple Storage Service](http://aws.amazo
4
4
 
5
5
  It supports both: European and US buckets through the [REST API](http://docs.amazonwebservices.com/AmazonS3/latest/API/APIRest.html).
6
6
 
7
- <a href="http://pledgie.com/campaigns/14173"><img alt="Click here to lend your support to: S3 and make a donation at www.pledgie.com!" src="http://pledgie.com/campaigns/14173.png?skin_name=chrome" border="0" /></a>
8
-
9
7
  ## Installation
10
8
 
11
9
  gem install s3
@@ -66,6 +64,9 @@ It supports both: European and US buckets through the [REST API](http://docs.ama
66
64
  new_object.save
67
65
  #=> true
68
66
 
67
+ Please note that new objects are created with "public-read" ACL by
68
+ default.
69
+
69
70
  ## See also
70
71
 
71
72
  * [gemcutter](http://gemcutter.org/gems/s3)
data/lib/s3/bucket.rb CHANGED
@@ -19,11 +19,8 @@ module S3
19
19
 
20
20
  # Returns location of the bucket, e.g. "EU"
21
21
  def location(reload = false)
22
- if reload or @location.nil?
23
- @location = location_constraint
24
- else
25
- @location
26
- end
22
+ return @location if defined?(@location) and not reload
23
+ @location = location_constraint
27
24
  end
28
25
 
29
26
  # Compares the bucket with other bucket. Returns true if the names
@@ -88,8 +85,7 @@ module S3
88
85
  vhost? ? "" : "#@name/"
89
86
  end
90
87
 
91
- # Returns the objects in the bucket and caches the result (see
92
- # #reload method).
88
+ # Returns the objects in the bucket and caches the result
93
89
  def objects
94
90
  Proxy.new(lambda { list_bucket }, :owner => self, :extend => ObjectsExtension)
95
91
  end
@@ -116,13 +112,20 @@ module S3
116
112
 
117
113
  def list_bucket(options = {})
118
114
  response = bucket_request(:get, :params => options)
115
+ max_keys = options[:max_keys]
119
116
  objects_attributes = parse_list_bucket_result(response.body)
120
117
 
121
- # If there are more than 1000 objects S3 truncates listing
122
- # and we need to request another listing for the remaining objects.
118
+ # If there are more than 1000 objects S3 truncates listing and
119
+ # we need to request another listing for the remaining objects.
123
120
  while parse_is_truncated(response.body)
124
- marker = objects_attributes.last[:key]
125
- response = bucket_request(:get, :params => options.merge(:marker => marker))
121
+ next_request_options = {:marker => objects_attributes.last[:key]}
122
+
123
+ if max_keys
124
+ break if objects_attributes.length >= max_keys
125
+ next_request_options[:max_keys] = max_keys - objects_attributes.length
126
+ end
127
+
128
+ response = bucket_request(:get, :params => options.merge(next_request_options))
126
129
  objects_attributes += parse_list_bucket_result(response.body)
127
130
  end
128
131
 
@@ -12,14 +12,13 @@ module S3
12
12
  end
13
13
  alias :find :find_first
14
14
 
15
- # Find all buckets in the service
15
+ # Finds all buckets in the service
16
16
  def find_all
17
17
  proxy_target
18
18
  end
19
19
 
20
- # Destroy all buckets in the service. Doesn't destroy non-empty
21
- # buckets by default, pass true to force destroy (USE WITH
22
- # CARE!).
20
+ # Destroys all buckets in the service. Doesn't destroy non-empty
21
+ # buckets by default, pass true to force destroy (USE WITH CARE!).
23
22
  def destroy_all(force = false)
24
23
  proxy_target.each { |bucket| bucket.destroy(force) }
25
24
  end
data/lib/s3/object.rb CHANGED
@@ -70,10 +70,11 @@ module S3
70
70
  false
71
71
  end
72
72
 
73
- # Download the content of the object, and caches it. Pass true
74
- # to clear the cache and download the object again.
73
+ # Downloads the content of the object, and caches it. Pass true to
74
+ # clear the cache and download the object again.
75
75
  def content(reload = false)
76
- get_object if reload or @content.nil?
76
+ return @content if defined?(@content) and not reload
77
+ get_object
77
78
  @content
78
79
  end
79
80
 
data/lib/s3/service.rb CHANGED
@@ -75,15 +75,13 @@ module S3
75
75
  end
76
76
 
77
77
  def connection
78
- if @connection.nil?
79
- @connection = Connection.new(:access_key_id => @access_key_id,
80
- :secret_access_key => @secret_access_key,
81
- :use_ssl => @use_ssl,
82
- :timeout => @timeout,
83
- :debug => @debug,
84
- :proxy => @proxy)
85
- end
86
- @connection
78
+ return @connection if defined?(@connection)
79
+ @connection = Connection.new(:access_key_id => @access_key_id,
80
+ :secret_access_key => @secret_access_key,
81
+ :use_ssl => @use_ssl,
82
+ :timeout => @timeout,
83
+ :debug => @debug,
84
+ :proxy => @proxy)
87
85
  end
88
86
  end
89
87
  end
data/lib/s3/signature.rb CHANGED
@@ -52,9 +52,9 @@ module S3
52
52
  expires = options[:expires_at]
53
53
 
54
54
  headers = options[:headers] || {}
55
- headers.merge!('date' => expires.to_i.to_s)
55
+ headers.merge!("date" => expires.to_i.to_s)
56
56
 
57
- options.merge!(:resource => "/#{bucket}/#{resource}",
57
+ options.merge!(:resource => "/#{bucket}/#{URI.escape(resource)}",
58
58
  :method => options[:method] || :get,
59
59
  :headers => headers)
60
60
  signature = canonicalized_signature(options)
@@ -117,7 +117,7 @@ module S3
117
117
  string_to_sign << canonicalized_amz_headers
118
118
  string_to_sign << canonicalized_resource
119
119
 
120
- digest = OpenSSL::Digest::Digest.new('sha1')
120
+ digest = OpenSSL::Digest::Digest.new("sha1")
121
121
  hmac = OpenSSL::HMAC.digest(digest, secret_access_key, string_to_sign)
122
122
  base64 = Base64.encode64(hmac)
123
123
  base64.chomp
@@ -137,7 +137,7 @@ module S3
137
137
  headers = []
138
138
 
139
139
  # 1. Convert each HTTP header name to lower-case. For example,
140
- # 'X-Amz-Date' becomes 'x-amz-date'.
140
+ # "X-Amz-Date" becomes "x-amz-date".
141
141
  request.each { |key, value| headers << [key.downcase, value] if key =~ /\Ax-amz-/io }
142
142
  #=> [["c", 0], ["a", 1], ["a", 2], ["b", 3]]
143
143
 
@@ -150,9 +150,9 @@ module S3
150
150
  # "header-name:comma-separated-value-list" pair as prescribed by
151
151
  # RFC 2616, section 4.2, without any white-space between
152
152
  # values. For example, the two metadata headers
153
- # 'x-amz-meta-username: fred' and 'x-amz-meta-username: barney'
154
- # would be combined into the single header 'x-amz-meta-username:
155
- # fred,barney'.
153
+ # "x-amz-meta-username: fred" and "x-amz-meta-username: barney"
154
+ # would be combined into the single header "x-amz-meta-username:
155
+ # fred,barney".
156
156
  combined_headers = headers.inject([]) do |new_headers, header|
157
157
  existing_header = new_headers.find { |h| h.first == header.first }
158
158
  if existing_header
@@ -174,8 +174,8 @@ module S3
174
174
  end
175
175
 
176
176
  # 5. Trim any white-space around the colon in the header. For
177
- # example, the header 'x-amz-meta-username: fred,barney' would
178
- # become 'x-amz-meta-username:fred,barney'
177
+ # example, the header "x-amz-meta-username: fred,barney" would
178
+ # become "x-amz-meta-username:fred,barney"
179
179
  joined_headers = unfolded_headers.map do |header|
180
180
  key = header.first.strip
181
181
  value = header.last.strip
@@ -221,22 +221,22 @@ module S3
221
221
  # ?acl, or ?torrent, append the sub-resource including question
222
222
  # mark.
223
223
  sub_resources = [
224
- 'acl',
225
- 'location',
226
- 'logging',
227
- 'notification',
228
- 'partNumber',
229
- 'policy',
230
- 'requestPayment',
231
- 'torrent',
232
- 'uploadId',
233
- 'uploads',
234
- 'versionId',
235
- 'versioning',
236
- 'versions',
237
- 'website'
224
+ "acl",
225
+ "location",
226
+ "logging",
227
+ "notification",
228
+ "partNumber",
229
+ "policy",
230
+ "requestPayment",
231
+ "torrent",
232
+ "uploadId",
233
+ "uploads",
234
+ "versionId",
235
+ "versioning",
236
+ "versions",
237
+ "website"
238
238
  ]
239
- string << "?#{$1}" if uri.query =~ /&?(#{sub_resources.join('|')})(?:&|=|\Z)/
239
+ string << "?#{$1}" if uri.query =~ /&?(#{sub_resources.join("|")})(?:&|=|\Z)/
240
240
  string
241
241
  end
242
242
  end
data/lib/s3/version.rb CHANGED
@@ -1,3 +1,3 @@
1
1
  module S3
2
- VERSION = "0.3.9"
2
+ VERSION = "0.3.10"
3
3
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: s3
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.3.9
4
+ version: 0.3.10
5
5
  prerelease:
6
6
  platform: ruby
7
7
  authors:
@@ -9,11 +9,11 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2011-11-05 00:00:00.000000000Z
12
+ date: 2011-12-16 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: proxies
16
- requirement: &11258640 !ruby/object:Gem::Requirement
16
+ requirement: &10828500 !ruby/object:Gem::Requirement
17
17
  none: false
18
18
  requirements:
19
19
  - - ~>
@@ -21,10 +21,10 @@ dependencies:
21
21
  version: 0.2.0
22
22
  type: :runtime
23
23
  prerelease: false
24
- version_requirements: *11258640
24
+ version_requirements: *10828500
25
25
  - !ruby/object:Gem::Dependency
26
26
  name: test-unit
27
- requirement: &11258020 !ruby/object:Gem::Requirement
27
+ requirement: &10827960 !ruby/object:Gem::Requirement
28
28
  none: false
29
29
  requirements:
30
30
  - - ! '>='
@@ -32,10 +32,10 @@ dependencies:
32
32
  version: '2.0'
33
33
  type: :development
34
34
  prerelease: false
35
- version_requirements: *11258020
35
+ version_requirements: *10827960
36
36
  - !ruby/object:Gem::Dependency
37
37
  name: mocha
38
- requirement: &11257380 !ruby/object:Gem::Requirement
38
+ requirement: &10827580 !ruby/object:Gem::Requirement
39
39
  none: false
40
40
  requirements:
41
41
  - - ! '>='
@@ -43,10 +43,10 @@ dependencies:
43
43
  version: '0'
44
44
  type: :development
45
45
  prerelease: false
46
- version_requirements: *11257380
46
+ version_requirements: *10827580
47
47
  - !ruby/object:Gem::Dependency
48
48
  name: bundler
49
- requirement: &11256840 !ruby/object:Gem::Requirement
49
+ requirement: &10827020 !ruby/object:Gem::Requirement
50
50
  none: false
51
51
  requirements:
52
52
  - - ! '>='
@@ -54,7 +54,7 @@ dependencies:
54
54
  version: 1.0.0
55
55
  type: :development
56
56
  prerelease: false
57
- version_requirements: *11256840
57
+ version_requirements: *10827020
58
58
  description: ! 'S3 library provides access to Amazon''s Simple Storage Service. It
59
59
  supports both: European and US buckets through REST API.'
60
60
  email:
@@ -69,8 +69,6 @@ files:
69
69
  - LICENSE
70
70
  - README.md
71
71
  - Rakefile
72
- - extra/s3_attachment_fu.rb
73
- - extra/s3_paperclip.rb
74
72
  - lib/s3.rb
75
73
  - lib/s3/bucket.rb
76
74
  - lib/s3/buckets_extension.rb
@@ -1,159 +0,0 @@
1
- require "singleton"
2
- require "s3"
3
-
4
- # S3 Backend for attachment-fu plugin. After installing attachment-fu
5
- # plugin, copy the file to:
6
- # +vendor/plugins/attachment-fu/lib/technoweenie/attachment_fu/backends+
7
- #
8
- # To configure S3Backend create initializer file in your Rails
9
- # application, e.g. +config/initializers/s3_backend.rb+.
10
- #
11
- # Technoweenie::AttachmentFu::Backends::S3Backend.configuration do |config|
12
- # config.access_key_id = "..." # your access key id
13
- # config.secret_access_key = "..." # your secret access key
14
- # config.bucket_name = "..." # default bucket name to store attachments
15
- # config.use_ssl = false # pass true if you want to communicate via SSL
16
- # end
17
-
18
- module Technoweenie
19
- module AttachmentFu
20
- module Backends
21
- module S3Backend
22
-
23
- # S3Backend configuration class
24
- class Configuration
25
- include Singleton
26
-
27
- ATTRIBUTES = [:access_key_id, :secret_access_key, :use_ssl, :bucket_name]
28
-
29
- attr_accessor *ATTRIBUTES
30
- end
31
-
32
- # Method used to configure S3Backend, see the example above
33
- def self.configuration
34
- if block_given?
35
- yield Configuration.instance
36
- end
37
- Configuration.instance
38
- end
39
-
40
- # :nodoc:
41
- def self.included(base)
42
- include S3
43
-
44
- service = Service.new(:access_key_id => configuration.access_key_id,
45
- :secret_access_key => configuration.secret_access_key,
46
- :use_ssl => configuration.use_ssl)
47
-
48
- bucket_name = base.attachment_options[:bucket_name] || configuration.bucket_name
49
-
50
- base.cattr_accessor :bucket
51
- base.bucket = service.buckets.build(bucket_name) # don't connect
52
-
53
- base.before_update :rename_file
54
- end
55
-
56
- # The attachment ID used in the full path of a file
57
- def attachment_path_id
58
- ((respond_to?(:parent_id) && parent_id) || id).to_s
59
- end
60
-
61
- # The pseudo hierarchy containing the file relative to the bucket name
62
- # Example: <tt>:table_name/:id</tt>
63
- def base_path
64
- [attachment_options[:path_prefix], attachment_path_id].join("/")
65
- end
66
-
67
- # The full path to the file relative to the bucket name
68
- # Example: <tt>:table_name/:id/:filename</tt>
69
- def full_filename(thumbnail = nil)
70
- [base_path, thumbnail_name_for(thumbnail)].join("/")
71
- end
72
-
73
- # All public objects are accessible via a GET request to the S3 servers. You can generate a
74
- # url for an object using the s3_url method.
75
- #
76
- # @photo.s3_url
77
- #
78
- # The resulting url is in the form: <tt>http(s)://:server/:bucket_name/:table_name/:id/:file</tt> where
79
- # the <tt>:server</tt> variable defaults to <tt>AWS::S3 URL::DEFAULT_HOST</tt> (s3.amazonaws.com) and can be
80
- # set using the configuration parameters in <tt>RAILS_ROOT/config/amazon_s3.yml</tt>.
81
- #
82
- # The optional thumbnail argument will output the thumbnail's filename (if any).
83
- def s3_url(thumbnail = nil)
84
- if attachment_options[:cname]
85
- ["#{s3_protocol}#{bucket.name}", full_filename(thumbnail)].join("/")
86
- else
87
- ["#{s3_protocol}#{s3_hostname}#{bucket.path_prefix}", full_filename(thumbnail)].join("/")
88
- end
89
- end
90
- alias :public_url :s3_url
91
- alias :public_filename :s3_url
92
-
93
- # Name of the bucket used to store attachments
94
- def bucket_name
95
- self.class.bucket.name
96
- end
97
-
98
- # :nodoc:
99
- def create_temp_file
100
- write_to_temp_file current_data
101
- end
102
-
103
- # :nodoc:
104
- def current_data
105
- # Object.value full_filename, bucket_name
106
- object = self.class.bucket.objects.find(full_filename)
107
- object.content
108
- end
109
-
110
- # Returns http:// or https:// depending on use_ssl setting
111
- def s3_protocol
112
- attachment_options[:use_ssl] ? "https://" : "http://"
113
- end
114
-
115
- # Returns hostname of the bucket
116
- # e.g. +bucketname.com.s3.amazonaws.com+. Additionally you can
117
- # pass :cname => true option in has_attachment method to
118
- # return CNAME only, e.g. +bucketname.com+
119
- def s3_hostname
120
- attachment_options[:cname] ? self.class.bucket.name : self.class.bucket.host
121
- end
122
-
123
- protected
124
-
125
- # Frees the space in S3 bucket, used by after_destroy callback
126
- def destroy_file
127
- object = self.class.bucket.objects.find(full_filename)
128
- object.destroy
129
- end
130
-
131
- # Renames file if filename has been changed - copy the file to
132
- # new key and delete old one
133
- def rename_file
134
- return unless filename_changed?
135
-
136
- old_full_filename = [base_path, filename_was].join("/")
137
-
138
- object = self.class.bucket.objects.find(old_full_filename)
139
- new_object = object.copy(:key => full_filename, :acl => attachment_options[:acl])
140
- object.destroy
141
- true
142
- end
143
-
144
- # Saves the file to storage
145
- def save_to_storage
146
- if save_attachment?
147
- object = self.class.bucket.objects.build(full_filename)
148
-
149
- object.content_type = content_type
150
- object.acl = attachment_options[:acl]
151
- object.content = temp_path ? File.open(temp_path) : temp_data
152
- object.save
153
- end
154
- true
155
- end
156
- end
157
- end
158
- end
159
- end
@@ -1,176 +0,0 @@
1
- # S3 backend for paperclip plugin. Copy the file to:
2
- # +config/initializers/+ directory
3
- #
4
- # Example configuration for CNAME bucket:
5
- #
6
- # has_attached_file :image,
7
- # :s3_host_alias => "bucket.domain.tld",
8
- # :s3_headers => { :cache_control => 10.years.from_now.httpdate },
9
- # :url => ":s3_alias_url",
10
- # :styles => {
11
- # :medium => "300x300>",
12
- # :thumb => "100x100>"
13
- # },
14
- # :storage => :s3,
15
- # :s3_credentials => {
16
- # :access_key_id => "...",
17
- # :secret_access_key => "..."
18
- # },
19
- # :bucket => "bucket.domain.tld",
20
- # :path => ":attachment/:id/:style.:extension"
21
- module Paperclip
22
- module Storage
23
- module S3
24
- def self.extended base
25
- begin
26
- require "s3"
27
- rescue LoadError => e
28
- e.message << " (You may need to install the s3 gem)"
29
- raise e
30
- end
31
-
32
- base.instance_eval do
33
- @s3_credentials = parse_credentials(@options[:s3_credentials])
34
- @bucket_name = @options[:bucket] || @s3_credentials[:bucket]
35
- @bucket_name = @bucket_name.call(self) if @bucket_name.is_a?(Proc)
36
- @s3_options = @options[:s3_options] || {}
37
- @s3_permissions = @options[:s3_permissions] || :public_read
38
- @s3_storage_class = @options[:s3_storage_class] || :standard
39
- @s3_protocol = @options[:s3_protocol] || (@s3_permissions == :public_read ? "http" : "https")
40
- @s3_headers = @options[:s3_headers] || {}
41
- @s3_host_alias = @options[:s3_host_alias]
42
- @url = ":s3_path_url" unless @url.to_s.match(/^:s3.*url$/)
43
- @service = ::S3::Service.new(@s3_options.merge(
44
- :access_key_id => @s3_credentials[:access_key_id],
45
- :secret_access_key => @s3_credentials[:secret_access_key],
46
- :use_ssl => @s3_protocol == "https"
47
- ))
48
- @bucket = @service.buckets.build(@bucket_name)
49
- end
50
- Paperclip.interpolates(:s3_alias_url) do |attachment, style|
51
- "#{attachment.s3_protocol}://#{attachment.s3_host_alias}/#{Paperclip::Storage::S3.encode_path(attachment.path(style)).gsub(%r{^/}, "")}"
52
- end
53
- Paperclip.interpolates(:s3_path_url) do |attachment, style|
54
- "#{attachment.s3_protocol}://s3.amazonaws.com/#{attachment.bucket_name}/#{Paperclip::Storage::S3.encode_path(attachment.path(style)).gsub(%r{^/}, "")}"
55
- end
56
- Paperclip.interpolates(:s3_domain_url) do |attachment, style|
57
- "#{attachment.s3_protocol}://#{attachment.bucket_name}.s3.amazonaws.com/#{Paperclip::Storage::S3.encode_path(attachment.path(style)).gsub(%r{^/}, "")}"
58
- end
59
- end
60
-
61
- def expiring_url(style_name = default_style, time = 3600)
62
- bucket.objects.build(path(style_name)).temporary_url(Time.now + time)
63
- end
64
-
65
- def bucket_name
66
- @bucket_name
67
- end
68
-
69
- def bucket
70
- @bucket
71
- end
72
-
73
- def s3_host_alias
74
- @s3_host_alias
75
- end
76
-
77
- def content_disposition(style = default_style)
78
- cd = @s3_headers[:content_disposition]
79
- cd.respond_to?(:call) ? cd.call(self, style) : cd
80
- end
81
-
82
- def parse_credentials creds
83
- creds = find_credentials(creds).stringify_keys
84
- (creds[RAILS_ENV] || creds).symbolize_keys
85
- end
86
-
87
- def exists?(style = default_style)
88
- if original_filename
89
- bucket.objects.build(path(style)).exists?
90
- else
91
- false
92
- end
93
- end
94
-
95
- def s3_protocol
96
- @s3_protocol
97
- end
98
-
99
- # Returns representation of the data of the file assigned to the given
100
- # style, in the format most representative of the current storage.
101
- def to_file style = default_style
102
- return @queued_for_write[style] if @queued_for_write[style]
103
- begin
104
- filename = path(style)
105
- extname = File.extname(filename)
106
- basename = File.basename(filename, extname)
107
- file = Tempfile.new([basename, extname])
108
- file.binmode if file.respond_to?(:binmode)
109
- file.write(bucket.objects.find(path(style)).content)
110
- file.rewind
111
- rescue ::S3::Error::NoSuchKey
112
- file.close if file.respond_to?(:close)
113
- file = nil
114
- end
115
- file
116
- end
117
-
118
- # Encodes all characters except forward-slash (/) and explicitly legal URL characters
119
- def self.encode_path(path)
120
- URI.encode(path, /[^#{URI::REGEXP::PATTERN::UNRESERVED}\/]/)
121
- end
122
-
123
- def encoded_path(style)
124
- Paperclip::Storage::S3.encode_path(path(style))
125
- end
126
-
127
- def flush_writes #:nodoc:
128
- @queued_for_write.each do |style, file|
129
- begin
130
- log("saving #{path(style)}")
131
- object = bucket.objects.build(path(style))
132
- file.rewind
133
- object.content = file.read
134
- object.acl = @s3_permissions
135
- object.storage_class = @s3_storage_class
136
- object.content_type = instance_read(:content_type)
137
- object.cache_control = @s3_headers[:cache_control]
138
- object.content_disposition = content_disposition(style)
139
- object.content_encoding = @s3_headers[:content_encoding]
140
- object.save
141
- rescue ::S3::Error::ResponseError => e
142
- raise
143
- end
144
- end
145
- @queued_for_write = {}
146
- end
147
-
148
- def flush_deletes #:nodoc:
149
- @queued_for_delete.each do |path|
150
- begin
151
- log("deleting #{path}")
152
- bucket.objects.find(path).destroy
153
- rescue ::S3::Error::ResponseError
154
- # Ignore this.
155
- end
156
- end
157
- @queued_for_delete = []
158
- end
159
-
160
- def find_credentials creds
161
- case creds
162
- when File
163
- YAML::load(ERB.new(File.read(creds.path)).result)
164
- when String
165
- YAML::load(ERB.new(File.read(creds)).result)
166
- when Hash
167
- creds
168
- else
169
- raise ArgumentError, "Credentials are not a path, file, or hash."
170
- end
171
- end
172
- private :find_credentials
173
-
174
- end
175
- end
176
- end