dm-paperclip 2.3.1 → 2.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -25,6 +25,7 @@
25
25
  #
26
26
  # See the +has_attached_file+ documentation for more details.
27
27
 
28
+ require 'erb'
28
29
  require 'tempfile'
29
30
 
30
31
  require 'dm-core'
@@ -42,7 +43,7 @@ require 'dm-paperclip/attachment'
42
43
  # documentation for Paperclip::ClassMethods for more useful information.
43
44
  module Paperclip
44
45
 
45
- VERSION = "2.3.1"
46
+ VERSION = "2.4.0"
46
47
 
47
48
  # To configure Paperclip, put this code in an initializer, Rake task, or wherever:
48
49
  #
@@ -274,7 +274,7 @@ module Paperclip
274
274
  end
275
275
 
276
276
  def valid_assignment? file #:nodoc:
277
- if file.respond_to?(:[])
277
+ if file.is_a?(Hash) || (defined?(Mash) && file.is_a?(Mash))
278
278
  file[:filename] || file['filename']
279
279
  else
280
280
  file.nil? || (file.respond_to?(:original_filename) && file.respond_to?(:content_type))
@@ -4,7 +4,8 @@ module IOStream
4
4
 
5
5
  # Returns a Tempfile containing the contents of the readable object.
6
6
  def to_tempfile
7
- tempfile = Tempfile.new("stream")
7
+ name = respond_to?(:original_filename) ? original_filename : (respond_to?(:path) ? path : "stream")
8
+ tempfile = Paperclip::Tempfile.new(File.basename(name))
8
9
  tempfile.binmode
9
10
  self.stream_to(tempfile)
10
11
  end
@@ -25,7 +26,7 @@ module IOStream
25
26
  while self.read(in_blocks_of, buffer) do
26
27
  dstio.write(buffer)
27
28
  end
28
- dstio.rewind
29
+ dstio.rewind
29
30
  dstio
30
31
  end
31
32
  end
@@ -55,4 +56,4 @@ if defined? Tempfile
55
56
  end
56
57
  end
57
58
  end
58
- end
59
+ end
@@ -8,21 +8,21 @@ module Paperclip
8
8
  # * +path+: The location of the repository of attachments on disk. This can (and, in
9
9
  # almost all cases, should) be coordinated with the value of the +url+ option to
10
10
  # allow files to be saved into a place where Apache can serve them without
11
- # hitting your app. Defaults to
11
+ # hitting your app. Defaults to
12
12
  # ":rails_root/public/:attachment/:id/:style/:basename.:extension"
13
- # By default this places the files in the app's public directory which can be served
14
- # directly. If you are using capistrano for deployment, a good idea would be to
15
- # make a symlink to the capistrano-created system directory from inside your app's
13
+ # By default this places the files in the app's public directory which can be served
14
+ # directly. If you are using capistrano for deployment, a good idea would be to
15
+ # make a symlink to the capistrano-created system directory from inside your app's
16
16
  # public directory.
17
17
  # See Paperclip::Attachment#interpolate for more information on variable interpolaton.
18
18
  # :path => "/var/app/attachments/:class/:id/:style/:basename.:extension"
19
19
  module Filesystem
20
20
  def self.extended base
21
21
  end
22
-
23
- def exists?(style = default_style)
22
+
23
+ def exists?(style_name = default_style)
24
24
  if original_filename
25
- File.exist?(path(style))
25
+ File.exist?(path(style_name))
26
26
  else
27
27
  false
28
28
  end
@@ -30,18 +30,17 @@ module Paperclip
30
30
 
31
31
  # Returns representation of the data of the file assigned to the given
32
32
  # style, in the format most representative of the current storage.
33
- def to_file style = default_style
34
- @queued_for_write[style] || (File.new(path(style), 'rb') if exists?(style))
33
+ def to_file style_name = default_style
34
+ @queued_for_write[style_name] || (File.new(path(style_name), 'rb') if exists?(style_name))
35
35
  end
36
- alias_method :to_io, :to_file
37
36
 
38
37
  def flush_writes #:nodoc:
39
- @queued_for_write.each do |style, file|
38
+ @queued_for_write.each do |style_name, file|
40
39
  file.close
41
- FileUtils.mkdir_p(File.dirname(path(style)))
42
- log("saving #{path(style)}")
43
- FileUtils.mv(file.path, path(style))
44
- FileUtils.chmod(0644, path(style))
40
+ FileUtils.mkdir_p(File.dirname(path(style_name)))
41
+ log("saving #{path(style_name)}")
42
+ FileUtils.mv(file.path, path(style_name))
43
+ FileUtils.chmod(0644, path(style_name))
45
44
  end
46
45
  @queued_for_write = {}
47
46
  end
@@ -79,26 +78,26 @@ module Paperclip
79
78
  # database.yml file, so different environments can use different accounts:
80
79
  # development:
81
80
  # access_key_id: 123...
82
- # secret_access_key: 123...
81
+ # secret_access_key: 123...
83
82
  # test:
84
83
  # access_key_id: abc...
85
- # secret_access_key: abc...
84
+ # secret_access_key: abc...
86
85
  # production:
87
86
  # access_key_id: 456...
88
- # secret_access_key: 456...
87
+ # secret_access_key: 456...
89
88
  # This is not required, however, and the file may simply look like this:
90
89
  # access_key_id: 456...
91
- # secret_access_key: 456...
90
+ # secret_access_key: 456...
92
91
  # In which case, those access keys will be used in all environments. You can also
93
92
  # put your bucket name in this file, instead of adding it to the code directly.
94
- # This is useful when you want the same account but a different bucket for
93
+ # This is useful when you want the same account but a different bucket for
95
94
  # development versus production.
96
95
  # * +s3_permissions+: This is a String that should be one of the "canned" access
97
96
  # policies that S3 provides (more information can be found here:
98
97
  # http://docs.amazonwebservices.com/AmazonS3/2006-03-01/RESTAccessPolicy.html#RESTCannedAccessPolicies)
99
- # The default for Paperclip is "public-read".
100
- # * +s3_protocol+: The protocol for the URLs generated to your S3 assets. Can be either
101
- # 'http' or 'https'. Defaults to 'http' when your :s3_permissions are 'public-read' (the
98
+ # The default for Paperclip is :public_read.
99
+ # * +s3_protocol+: The protocol for the URLs generated to your S3 assets. Can be either
100
+ # 'http' or 'https'. Defaults to 'http' when your :s3_permissions are :public_read (the
102
101
  # default), and 'https' when your :s3_permissions are anything else.
103
102
  # * +s3_headers+: A hash of headers such as {'Expires' => 1.year.from_now.httpdate}
104
103
  # * +bucket+: This is the name of the S3 bucket that will store your files. Remember
@@ -112,7 +111,7 @@ module Paperclip
112
111
  # * +url+: There are three options for the S3 url. You can choose to have the bucket's name
113
112
  # placed domain-style (bucket.s3.amazonaws.com) or path-style (s3.amazonaws.com/bucket).
114
113
  # Lastly, you can specify a CNAME (which requires the CNAME to be specified as
115
- # :s3_alias_url. You can read more about CNAMEs and S3 at
114
+ # :s3_alias_url. You can read more about CNAMEs and S3 at
116
115
  # http://docs.amazonwebservices.com/AmazonS3/latest/index.html?VirtualHosting.html
117
116
  # Normally, this won't matter in the slightest and you can leave the default (which is
118
117
  # path-style, or :s3_path_url). But in some cases paths don't work and you need to use
@@ -128,17 +127,27 @@ module Paperclip
128
127
  # separate parts of your file name.
129
128
  module S3
130
129
  def self.extended base
131
- require 'right_aws'
130
+ begin
131
+ require 'aws/s3'
132
+ rescue LoadError => e
133
+ e.message << " (You may need to install the aws-s3 gem)"
134
+ raise e
135
+ end
136
+
132
137
  base.instance_eval do
133
138
  @s3_credentials = parse_credentials(@options[:s3_credentials])
134
139
  @bucket = @options[:bucket] || @s3_credentials[:bucket]
135
140
  @bucket = @bucket.call(self) if @bucket.is_a?(Proc)
136
141
  @s3_options = @options[:s3_options] || {}
137
- @s3_permissions = @options[:s3_permissions] || 'public-read'
138
- @s3_protocol = @options[:s3_protocol] || (@s3_permissions == 'public-read' ? 'http' : 'https')
142
+ @s3_permissions = @options[:s3_permissions] || :public_read
143
+ @s3_protocol = @options[:s3_protocol] || (@s3_permissions == :public_read ? 'http' : 'https')
139
144
  @s3_headers = @options[:s3_headers] || {}
140
145
  @s3_host_alias = @options[:s3_host_alias]
141
146
  @url = ":s3_path_url" unless @url.to_s.match(/^:s3.*url$/)
147
+ AWS::S3::Base.establish_connection!( @s3_options.merge(
148
+ :access_key_id => @s3_credentials[:access_key_id],
149
+ :secret_access_key => @s3_credentials[:secret_access_key]
150
+ ))
142
151
  end
143
152
  Paperclip.interpolates(:s3_alias_url) do |attachment, style|
144
153
  "#{attachment.s3_protocol}://#{attachment.s3_host_alias}/#{attachment.path(style).gsub(%r{^/}, "")}"
@@ -150,15 +159,9 @@ module Paperclip
150
159
  "#{attachment.s3_protocol}://#{attachment.bucket_name}.s3.amazonaws.com/#{attachment.path(style).gsub(%r{^/}, "")}"
151
160
  end
152
161
  end
153
-
154
- def s3
155
- @s3 ||= RightAws::S3.new(@s3_credentials[:access_key_id],
156
- @s3_credentials[:secret_access_key],
157
- @s3_options)
158
- end
159
-
160
- def s3_bucket
161
- @s3_bucket ||= s3.bucket(@bucket, true, @s3_permissions)
162
+
163
+ def expiring_url(time = 3600)
164
+ AWS::S3::S3Object.url_for(path, bucket_name, :expires_in => time )
162
165
  end
163
166
 
164
167
  def bucket_name
@@ -170,16 +173,26 @@ module Paperclip
170
173
  end
171
174
 
172
175
  def parse_credentials creds
173
- creds = find_credentials(creds).to_mash
176
+ creds = find_credentials(creds).to_mash.stringify_keys!
174
177
  if defined? Merb && Merb.respond_to?(:env)
175
- (creds[Merb.env] || creds)
178
+ (creds[Merb.env] || creds).symbolize_keys
179
+ elsif defined? RAILS_ENV
180
+ (creds[RAILS_ENV] || creds).symbolize_keys
181
+ elsif defined? Rails && Rails.respond_to(:env)
182
+ (creds[Rails.env] || creds).symbolize_keys
183
+ elsif defined? RACK_ENV
184
+ (creds[RACK_ENV] || creds).symbolize_keys
176
185
  else
177
- (creds[RAILS_ENV] || creds)
186
+ creds.symbolize_keys
178
187
  end
179
188
  end
180
-
189
+
181
190
  def exists?(style = default_style)
182
- s3_bucket.key(path(style)) ? true : false
191
+ if original_filename
192
+ AWS::S3::S3Object.exists?(path(style), bucket_name)
193
+ else
194
+ false
195
+ end
183
196
  end
184
197
 
185
198
  def s3_protocol
@@ -189,18 +202,24 @@ module Paperclip
189
202
  # Returns representation of the data of the file assigned to the given
190
203
  # style, in the format most representative of the current storage.
191
204
  def to_file style = default_style
192
- @queued_for_write[style] || s3_bucket.key(path(style))
205
+ return @queued_for_write[style] if @queued_for_write[style]
206
+ file = Tempfile.new(path(style))
207
+ file.write(AWS::S3::S3Object.value(path(style), bucket_name))
208
+ file.rewind
209
+ return file
193
210
  end
194
- alias_method :to_io, :to_file
195
211
 
196
212
  def flush_writes #:nodoc:
197
213
  @queued_for_write.each do |style, file|
198
214
  begin
199
215
  log("saving #{path(style)}")
200
- key = s3_bucket.key(path(style))
201
- key.data = file
202
- key.put(nil, @s3_permissions, {'Content-type' => instance_read(:content_type)}.merge(@s3_headers))
203
- rescue RightAws::AwsError => e
216
+ AWS::S3::S3Object.store(path(style),
217
+ file,
218
+ bucket_name,
219
+ {:content_type => instance_read(:content_type),
220
+ :access => @s3_permissions,
221
+ }.merge(@s3_headers))
222
+ rescue AWS::S3::ResponseError => e
204
223
  raise
205
224
  end
206
225
  end
@@ -211,22 +230,20 @@ module Paperclip
211
230
  @queued_for_delete.each do |path|
212
231
  begin
213
232
  log("deleting #{path}")
214
- if file = s3_bucket.key(path)
215
- file.delete
216
- end
217
- rescue RightAws::AwsError
233
+ AWS::S3::S3Object.delete(path, bucket_name)
234
+ rescue AWS::S3::ResponseError
218
235
  # Ignore this.
219
236
  end
220
237
  end
221
238
  @queued_for_delete = []
222
239
  end
223
-
240
+
224
241
  def find_credentials creds
225
242
  case creds
226
243
  when File
227
- YAML.load_file(creds.path)
244
+ YAML::load(ERB.new(File.read(creds.path)).result)
228
245
  when String
229
- YAML.load_file(creds)
246
+ YAML::load(ERB.new(File.read(creds)).result)
230
247
  when Hash
231
248
  creds
232
249
  else
@@ -237,4 +254,4 @@ module Paperclip
237
254
 
238
255
  end
239
256
  end
240
- end
257
+ end
@@ -149,9 +149,12 @@ class AttachmentTest < Test::Unit::TestCase
149
149
  setup do
150
150
  rebuild_model
151
151
 
152
+ @tempfile = mock
153
+ @tempfile.expects(:size).returns(10)
154
+
152
155
  @not_file = mock
153
156
  @not_file.stubs(:nil?).returns(false)
154
- @not_file.expects(:to_tempfile).returns(self)
157
+ @not_file.expects(:to_tempfile).returns(@tempfile)
155
158
  @not_file.expects(:original_filename).returns("filename.png\r\n")
156
159
  @not_file.expects(:content_type).returns("image/png\r\n")
157
160
  @not_file.expects(:size).returns(10)
@@ -178,9 +181,12 @@ class AttachmentTest < Test::Unit::TestCase
178
181
  setup do
179
182
  rebuild_model
180
183
 
184
+ @tempfile = mock
185
+ @tempfile.expects(:size).returns(10)
186
+
181
187
  @not_file = mock
182
188
  @not_file.stubs(:nil?).returns(false)
183
- @not_file.expects(:to_tempfile).returns(self)
189
+ @not_file.expects(:to_tempfile).returns(@tempfile)
184
190
  @not_file.expects(:original_filename).returns("sheep_say_b_.png\r\n")
185
191
  @not_file.expects(:content_type).returns("image/png\r\n")
186
192
  @not_file.expects(:size).returns(10)
@@ -257,12 +263,12 @@ class AttachmentTest < Test::Unit::TestCase
257
263
  end
258
264
 
259
265
  should "return the proper path when filename has a single .'s" do
260
- assert_equal "./test/../tmp/avatars/dummies/original/#{@instance.id}/5k.png", @attachment.path
266
+ assert_equal File.expand_path("./test/../tmp/avatars/dummies/original/#{@instance.id}/5k.png"), File.expand_path(@attachment.path)
261
267
  end
262
268
 
263
269
  should "return the proper path when filename has multiple .'s" do
264
270
  @instance.stubs(:avatar_file_name).returns("5k.old.png")
265
- assert_equal "./test/../tmp/avatars/dummies/original/#{@instance.id}/5k.old.png", @attachment.path
271
+ assert_equal File.expand_path("./test/../tmp/avatars/dummies/original/#{@instance.id}/5k.old.png"), File.expand_path(@attachment.path)
266
272
  end
267
273
 
268
274
  context "when expecting three styles" do
@@ -299,7 +305,7 @@ class AttachmentTest < Test::Unit::TestCase
299
305
 
300
306
  should "commit the files to disk" do
301
307
  [:large, :medium, :small].each do |style|
302
- io = @attachment.to_io(style)
308
+ io = @attachment.to_file(style)
303
309
  assert File.exists?(io)
304
310
  assert ! io.is_a?(::Tempfile)
305
311
  end
@@ -310,7 +316,7 @@ class AttachmentTest < Test::Unit::TestCase
310
316
  [:medium, 100, 15, "GIF"],
311
317
  [:small, 32, 32, "JPEG"]].each do |style|
312
318
  cmd = "identify -format '%w %h %b %m' " +
313
- "#{@attachment.to_io(style.first).path}"
319
+ "#{@attachment.path(style.first)}"
314
320
  out = `#{cmd}`
315
321
  width, height, size, format = out.split(" ")
316
322
  assert_equal style[1].to_s, width.to_s
@@ -22,7 +22,8 @@ Merb.class_eval do
22
22
  def self.root
23
23
  "#{ROOT}"
24
24
  end
25
- def self.env
25
+ def self.env(str=nil)
26
+ ENV['RAILS_ENV'] = str if str
26
27
  ENV['RAILS_ENV']
27
28
  end
28
29
  end
@@ -55,7 +56,7 @@ def rebuild_model options = {}
55
56
  include DataMapper::Resource
56
57
  include DataMapper::Validate
57
58
  include Paperclip::Resource
58
- property :id, Integer, :serial => true
59
+ property :id, ::DataMapper::Types::Serial
59
60
  property :other, String
60
61
  has_attached_file :avatar, options
61
62
  end
@@ -63,8 +64,8 @@ def rebuild_model options = {}
63
64
  end
64
65
 
65
66
  def temporary_env(new_env)
66
- old_env = defined?(RAILS_ENV) ? RAILS_ENV : nil
67
- Object.const_set("RAILS_ENV", new_env)
67
+ old_env = Merb.env
68
+ Merb.env(new_env)
68
69
  yield
69
- Object.const_set("RAILS_ENV", old_env)
70
+ Merb.env(old_env)
70
71
  end
@@ -1,10 +1,4 @@
1
- require 'rubygems'
2
- require 'test/unit'
3
- require 'stringio'
4
- require 'tempfile'
5
- require 'shoulda'
6
-
7
- require File.join(File.dirname(__FILE__), '..', 'lib', 'dm-paperclip', 'iostream.rb')
1
+ require 'test/helper'
8
2
 
9
3
  class IOStreamTest < Test::Unit::TestCase
10
4
  context "IOStream" do
@@ -17,29 +11,46 @@ class IOStreamTest < Test::Unit::TestCase
17
11
 
18
12
  context "A file" do
19
13
  setup do
20
- @file = File.new(File.join(File.dirname(__FILE__), "fixtures", "5k.png"))
14
+ @file = File.new(File.join(File.dirname(__FILE__), "fixtures", "5k.png"), 'rb')
21
15
  end
22
16
 
17
+ teardown { @file.close }
18
+
23
19
  context "that is sent #stream_to" do
24
20
 
25
- [["/tmp/iostream.string.test", File],
26
- [Tempfile.new('iostream.test'), Tempfile]].each do |args|
21
+ context "and given a String" do
22
+ setup do
23
+ FileUtils.mkdir_p(File.join(ROOT, 'tmp'))
24
+ assert @result = @file.stream_to(File.join(ROOT, 'tmp', 'iostream.string.test'))
25
+ end
27
26
 
28
- context "and given a #{args[0].class.to_s}" do
29
- setup do
30
- assert @result = @file.stream_to(args[0])
31
- end
27
+ should "return a File" do
28
+ assert @result.is_a?(File)
29
+ end
32
30
 
33
- should "return a #{args[1].to_s}" do
34
- assert @result.is_a?(args[1])
35
- end
31
+ should "contain the same data as the original file" do
32
+ @file.rewind; @result.rewind
33
+ assert_equal @file.read, @result.read
34
+ end
35
+ end
36
36
 
37
- should "contain the same data as the original file" do
38
- @file.rewind; @result.rewind
39
- assert_equal @file.read, @result.read
40
- end
37
+ context "and given a Tempfile" do
38
+ setup do
39
+ tempfile = Tempfile.new('iostream.test')
40
+ tempfile.binmode
41
+ assert @result = @file.stream_to(tempfile)
42
+ end
43
+
44
+ should "return a Tempfile" do
45
+ assert @result.is_a?(Tempfile)
46
+ end
47
+
48
+ should "contain the same data as the original file" do
49
+ @file.rewind; @result.rewind
50
+ assert_equal @file.read, @result.read
41
51
  end
42
52
  end
53
+
43
54
  end
44
55
 
45
56
  context "that is sent #to_tempfile" do
@@ -47,8 +58,15 @@ class IOStreamTest < Test::Unit::TestCase
47
58
  assert @tempfile = @file.to_tempfile
48
59
  end
49
60
 
50
- should "convert it to a Tempfile" do
51
- assert @tempfile.is_a?(Tempfile)
61
+ should "convert it to a Paperclip Tempfile" do
62
+ assert @tempfile.is_a?(Paperclip::Tempfile)
63
+ end
64
+
65
+ should "have the name be based on the original_filename" do
66
+ name = File.basename(@file.path)
67
+ extension = File.extname(name)
68
+ basename = File.basename(name, extension)
69
+ assert_match %r[^#{Regexp.quote(basename)}.*?#{Regexp.quote(extension)}], File.basename(@tempfile.path)
52
70
  end
53
71
 
54
72
  should "have the Tempfile contain the same data as the file" do
@@ -57,4 +75,4 @@ class IOStreamTest < Test::Unit::TestCase
57
75
  end
58
76
  end
59
77
  end
60
- end
78
+ end
@@ -21,7 +21,7 @@ class PaperclipTest < Test::Unit::TestCase
21
21
  include DataMapper::Resource
22
22
  include DataMapper::Validate
23
23
  include Paperclip::Resource
24
- property :id, Integer, :serial => true
24
+ property :id, DataMapper::Types::Serial
25
25
  property :other, String
26
26
  has_attached_file :file
27
27
  end
@@ -1,13 +1,10 @@
1
- require 'rubygems'
2
- require 'test/unit'
3
- require 'shoulda'
4
- require 'right_aws'
5
-
6
- require File.join(File.dirname(__FILE__), '..', 'lib', 'dm-paperclip', 'geometry.rb')
1
+ require 'test/helper'
2
+ require 'aws/s3'
7
3
 
8
4
  class StorageTest < Test::Unit::TestCase
9
5
  context "Parsing S3 credentials" do
10
6
  setup do
7
+ AWS::S3::Base.stubs(:establish_connection!)
11
8
  rebuild_model :storage => :s3,
12
9
  :bucket => "testing",
13
10
  :s3_credentials => {:not => :important}
@@ -15,48 +12,133 @@ class StorageTest < Test::Unit::TestCase
15
12
  @dummy = Dummy.new
16
13
  @avatar = @dummy.avatar
17
14
 
18
- @current_env = ENV['RAILS_ENV']
15
+ @current_env = Merb.env
19
16
  end
20
17
 
21
18
  teardown do
22
- ENV['RAILS_ENV'] = @current_env
19
+ Merb.env(@current_env)
23
20
  end
24
21
 
25
- should "get the correct credentials when RAILS_ENV is production" do
26
- ENV['RAILS_ENV'] = 'production'
27
- assert_equal({'key' => "12345"},
22
+ should "get the correct credentials when environment is production" do
23
+ Merb.env("production")
24
+ assert_equal({:key => "12345"},
28
25
  @avatar.parse_credentials('production' => {:key => '12345'},
29
26
  :development => {:key => "54321"}))
30
27
  end
31
28
 
32
- should "get the correct credentials when RAILS_ENV is development" do
33
- ENV['RAILS_ENV'] = 'development'
34
- assert_equal({'key' => "54321"},
29
+ should "get the correct credentials when environment is development" do
30
+ Merb.env("development")
31
+ assert_equal({:key => "54321"},
35
32
  @avatar.parse_credentials('production' => {:key => '12345'},
36
33
  :development => {:key => "54321"}))
37
34
  end
38
35
 
39
36
  should "return the argument if the key does not exist" do
40
- ENV['RAILS_ENV'] = "not really an env"
41
- assert_equal({'test' => "12345"}, @avatar.parse_credentials(:test => "12345"))
37
+ Merb.env("not really an env")
38
+ assert_equal({:test => "12345"}, @avatar.parse_credentials(:test => "12345"))
39
+ end
40
+ end
41
+
42
+ context "" do
43
+ setup do
44
+ AWS::S3::Base.stubs(:establish_connection!)
45
+ rebuild_model :storage => :s3,
46
+ :s3_credentials => {},
47
+ :bucket => "bucket",
48
+ :path => ":attachment/:basename.:extension",
49
+ :url => ":s3_path_url"
50
+ @dummy = Dummy.new
51
+ @dummy.avatar = StringIO.new(".")
52
+ end
53
+
54
+ should "return a url based on an S3 path" do
55
+ assert_match %r{^http://s3.amazonaws.com/bucket/avatars/stringio.txt}, @dummy.avatar.url
56
+ end
57
+ end
58
+ context "" do
59
+ setup do
60
+ AWS::S3::Base.stubs(:establish_connection!)
61
+ rebuild_model :storage => :s3,
62
+ :s3_credentials => {},
63
+ :bucket => "bucket",
64
+ :path => ":attachment/:basename.:extension",
65
+ :url => ":s3_domain_url"
66
+ @dummy = Dummy.new
67
+ @dummy.avatar = StringIO.new(".")
68
+ end
69
+
70
+ should "return a url based on an S3 subdomain" do
71
+ assert_match %r{^http://bucket.s3.amazonaws.com/avatars/stringio.txt}, @dummy.avatar.url
72
+ end
73
+ end
74
+ context "" do
75
+ setup do
76
+ AWS::S3::Base.stubs(:establish_connection!)
77
+ rebuild_model :storage => :s3,
78
+ :s3_credentials => {
79
+ :production => { :bucket => "prod_bucket" },
80
+ :development => { :bucket => "dev_bucket" }
81
+ },
82
+ :s3_host_alias => "something.something.com",
83
+ :path => ":attachment/:basename.:extension",
84
+ :url => ":s3_alias_url"
85
+ @dummy = Dummy.new
86
+ @dummy.avatar = StringIO.new(".")
87
+ end
88
+
89
+ should "return a url based on the host_alias" do
90
+ assert_match %r{^http://something.something.com/avatars/stringio.txt}, @dummy.avatar.url
91
+ end
92
+ end
93
+
94
+ context "Generating a url with an expiration" do
95
+ setup do
96
+ AWS::S3::Base.stubs(:establish_connection!)
97
+ rebuild_model :storage => :s3,
98
+ :s3_credentials => {
99
+ :production => { :bucket => "prod_bucket" },
100
+ :development => { :bucket => "dev_bucket" }
101
+ },
102
+ :s3_host_alias => "something.something.com",
103
+ :path => ":attachment/:basename.:extension",
104
+ :url => ":s3_alias_url"
105
+
106
+ Merb.env("production")
107
+
108
+ @dummy = Dummy.new
109
+ @dummy.avatar = StringIO.new(".")
110
+
111
+ AWS::S3::S3Object.expects(:url_for).with("avatars/stringio.txt", "prod_bucket", { :expires_in => 3600 })
112
+
113
+ @dummy.avatar.expiring_url
114
+ end
115
+
116
+ should "should succeed" do
117
+ assert true
42
118
  end
43
119
  end
44
120
 
45
121
  context "Parsing S3 credentials with a bucket in them" do
46
122
  setup do
123
+ AWS::S3::Base.stubs(:establish_connection!)
47
124
  rebuild_model :storage => :s3,
48
125
  :s3_credentials => {
49
126
  :production => { :bucket => "prod_bucket" },
50
127
  :development => { :bucket => "dev_bucket" }
51
128
  }
52
129
  @dummy = Dummy.new
130
+ @old_env = Merb.env
53
131
  end
54
132
 
55
- should "get the right bucket in production", :before => lambda{ ENV.expects(:[]).returns('production') } do
133
+ teardown{ Merb.env(@old_env) }
134
+
135
+ should "get the right bucket in production" do
136
+ Merb.env("production")
56
137
  assert_equal "prod_bucket", @dummy.avatar.bucket_name
57
138
  end
58
139
 
59
- should "get the right bucket in development", :before => lambda{ ENV.expects(:[]).returns('development') } do
140
+ should "get the right bucket in development" do
141
+ Merb.env("development")
60
142
  assert_equal "dev_bucket", @dummy.avatar.bucket_name
61
143
  end
62
144
  end
@@ -82,11 +164,13 @@ class StorageTest < Test::Unit::TestCase
82
164
 
83
165
  context "when assigned" do
84
166
  setup do
85
- @file = File.new(File.join(File.dirname(__FILE__), 'fixtures', '5k.png'))
167
+ @file = File.new(File.join(File.dirname(__FILE__), 'fixtures', '5k.png'), 'rb')
86
168
  @dummy = Dummy.new
87
169
  @dummy.avatar = @file
88
170
  end
89
171
 
172
+ teardown { @file.close }
173
+
90
174
  should "not get a bucket to get a URL" do
91
175
  @dummy.avatar.expects(:s3).never
92
176
  @dummy.avatar.expects(:s3_bucket).never
@@ -95,15 +179,7 @@ class StorageTest < Test::Unit::TestCase
95
179
 
96
180
  context "and saved" do
97
181
  setup do
98
- @s3_mock = stub
99
- @bucket_mock = stub
100
- RightAws::S3.expects(:new).with("12345", "54321", {}).returns(@s3_mock)
101
- @s3_mock.expects(:bucket).with("testing", true, "public-read").returns(@bucket_mock)
102
- @key_mock = stub
103
- @bucket_mock.expects(:key).returns(@key_mock)
104
- @key_mock.expects(:data=)
105
- @key_mock.expects(:put)
106
- @dummy.id = 1
182
+ AWS::S3::S3Object.stubs(:store).with(@dummy.avatar.path, anything, 'testing', :content_type => 'image/png', :access => :public_read)
107
183
  @dummy.save
108
184
  end
109
185
 
@@ -114,13 +190,8 @@ class StorageTest < Test::Unit::TestCase
114
190
 
115
191
  context "and remove" do
116
192
  setup do
117
- @s3_mock = stub
118
- @bucket_mock = stub
119
- RightAws::S3.expects(:new).with("12345", "54321", {}).returns(@s3_mock)
120
- @s3_mock.expects(:bucket).with("testing", true, "public-read").returns(@bucket_mock)
121
- @key_mock = stub
122
- @bucket_mock.expects(:key).at_least(2).returns(@key_mock)
123
- @key_mock.expects(:delete)
193
+ AWS::S3::S3Object.stubs(:exists?).returns(true)
194
+ AWS::S3::S3Object.stubs(:delete)
124
195
  @dummy.destroy_attached_files
125
196
  end
126
197
 
@@ -130,6 +201,84 @@ class StorageTest < Test::Unit::TestCase
130
201
  end
131
202
  end
132
203
  end
204
+
205
+ context "An attachment with S3 storage and bucket defined as a Proc" do
206
+ setup do
207
+ AWS::S3::Base.stubs(:establish_connection!)
208
+ rebuild_model :storage => :s3,
209
+ :bucket => lambda { |attachment| "bucket_#{attachment.instance.other}" },
210
+ :s3_credentials => {:not => :important}
211
+ end
212
+
213
+ should "get the right bucket name" do
214
+ assert "bucket_a", Dummy.new(:other => 'a').avatar.bucket_name
215
+ assert "bucket_b", Dummy.new(:other => 'b').avatar.bucket_name
216
+ end
217
+ end
218
+
219
+ context "An attachment with S3 storage and specific s3 headers set" do
220
+ setup do
221
+ AWS::S3::Base.stubs(:establish_connection!)
222
+ rebuild_model :storage => :s3,
223
+ :bucket => "testing",
224
+ :path => ":attachment/:style/:basename.:extension",
225
+ :s3_credentials => {
226
+ 'access_key_id' => "12345",
227
+ 'secret_access_key' => "54321"
228
+ },
229
+ :s3_headers => {'Cache-Control' => 'max-age=31557600'}
230
+ end
231
+
232
+ context "when assigned" do
233
+ setup do
234
+ @file = File.new(File.join(File.dirname(__FILE__), 'fixtures', '5k.png'), 'rb')
235
+ @dummy = Dummy.new
236
+ @dummy.avatar = @file
237
+ end
238
+
239
+ teardown { @file.close }
240
+
241
+ context "and saved" do
242
+ setup do
243
+ AWS::S3::Base.stubs(:establish_connection!)
244
+ AWS::S3::S3Object.stubs(:store).with(@dummy.avatar.path,
245
+ anything,
246
+ 'testing',
247
+ :content_type => 'image/png',
248
+ :access => :public_read,
249
+ 'Cache-Control' => 'max-age=31557600')
250
+ @dummy.save
251
+ end
252
+
253
+ should "succeed" do
254
+ assert true
255
+ end
256
+ end
257
+ end
258
+ end
259
+
260
+ context "with S3 credentials in a YAML file" do
261
+ setup do
262
+ ENV['S3_KEY'] = 'env_key'
263
+ ENV['S3_BUCKET'] = 'env_bucket'
264
+ ENV['S3_SECRET'] = 'env_secret'
265
+
266
+ Merb.env('test')
267
+
268
+ rebuild_model :storage => :s3,
269
+ :s3_credentials => File.new(File.join(File.dirname(__FILE__), "fixtures/s3.yml"))
270
+
271
+ Dummy.auto_migrate!
272
+
273
+ @dummy = Dummy.new
274
+ end
275
+
276
+ should "run it the file through ERB" do
277
+ assert_equal 'env_bucket', @dummy.avatar.bucket_name
278
+ assert_equal 'env_key', AWS::S3::Base.connection.options[:access_key_id]
279
+ assert_equal 'env_secret', AWS::S3::Base.connection.options[:secret_access_key]
280
+ end
281
+ end
133
282
 
134
283
  unless ENV["S3_TEST_BUCKET"].blank?
135
284
  context "Using S3 for real, an attachment with S3 storage" do
@@ -140,7 +289,7 @@ class StorageTest < Test::Unit::TestCase
140
289
  :path => ":class/:attachment/:id/:style.:extension",
141
290
  :s3_credentials => File.new(File.join(File.dirname(__FILE__), "s3.yml"))
142
291
 
143
- Dummy.delete_all
292
+ Dummy.auto_migrate!
144
293
  @dummy = Dummy.new
145
294
  end
146
295
 
@@ -150,12 +299,14 @@ class StorageTest < Test::Unit::TestCase
150
299
 
151
300
  context "when assigned" do
152
301
  setup do
153
- @file = File.new(File.join(File.dirname(__FILE__), 'fixtures', '5k.png'))
302
+ @file = File.new(File.join(File.dirname(__FILE__), 'fixtures', '5k.png'), 'rb')
154
303
  @dummy.avatar = @file
155
304
  end
156
305
 
157
- should "still return a Tempfile when sent #to_io" do
158
- assert_equal Tempfile, @dummy.avatar.to_io.class
306
+ teardown { @file.close }
307
+
308
+ should "still return a Tempfile when sent #to_file" do
309
+ assert_equal Tempfile, @dummy.avatar.to_file.class
159
310
  end
160
311
 
161
312
  context "and saved" do
@@ -170,4 +321,4 @@ class StorageTest < Test::Unit::TestCase
170
321
  end
171
322
  end
172
323
  end
173
- end
324
+ end
metadata CHANGED
@@ -4,9 +4,9 @@ version: !ruby/object:Gem::Version
4
4
  prerelease: false
5
5
  segments:
6
6
  - 2
7
- - 3
8
- - 1
9
- version: 2.3.1
7
+ - 4
8
+ - 0
9
+ version: 2.4.0
10
10
  platform: ruby
11
11
  authors:
12
12
  - Ken Robertson
@@ -14,10 +14,71 @@ autorequire:
14
14
  bindir: bin
15
15
  cert_chain: []
16
16
 
17
- date: 2010-03-31 00:00:00 -07:00
17
+ date: 2010-04-21 00:00:00 -07:00
18
18
  default_executable:
19
- dependencies: []
20
-
19
+ dependencies:
20
+ - !ruby/object:Gem::Dependency
21
+ name: shoulda
22
+ prerelease: false
23
+ requirement: &id001 !ruby/object:Gem::Requirement
24
+ requirements:
25
+ - - ">="
26
+ - !ruby/object:Gem::Version
27
+ segments:
28
+ - 0
29
+ version: "0"
30
+ type: :development
31
+ version_requirements: *id001
32
+ - !ruby/object:Gem::Dependency
33
+ name: mocha
34
+ prerelease: false
35
+ requirement: &id002 !ruby/object:Gem::Requirement
36
+ requirements:
37
+ - - "="
38
+ - !ruby/object:Gem::Version
39
+ segments:
40
+ - 0
41
+ - 9
42
+ - 8
43
+ version: 0.9.8
44
+ type: :development
45
+ version_requirements: *id002
46
+ - !ruby/object:Gem::Dependency
47
+ name: aws-s3
48
+ prerelease: false
49
+ requirement: &id003 !ruby/object:Gem::Requirement
50
+ requirements:
51
+ - - ">="
52
+ - !ruby/object:Gem::Version
53
+ segments:
54
+ - 0
55
+ version: "0"
56
+ type: :development
57
+ version_requirements: *id003
58
+ - !ruby/object:Gem::Dependency
59
+ name: datamapper
60
+ prerelease: false
61
+ requirement: &id004 !ruby/object:Gem::Requirement
62
+ requirements:
63
+ - - ">="
64
+ - !ruby/object:Gem::Version
65
+ segments:
66
+ - 0
67
+ version: "0"
68
+ type: :development
69
+ version_requirements: *id004
70
+ - !ruby/object:Gem::Dependency
71
+ name: do_sqlite3
72
+ prerelease: false
73
+ requirement: &id005 !ruby/object:Gem::Requirement
74
+ requirements:
75
+ - - ">="
76
+ - !ruby/object:Gem::Version
77
+ segments:
78
+ - 0
79
+ version: "0"
80
+ type: :development
81
+ version_requirements: *id005
21
82
  description:
22
83
  email: ken@invalidlogic.com
23
84
  executables: []
@@ -82,7 +143,6 @@ required_rubygems_version: !ruby/object:Gem::Requirement
82
143
  version: "0"
83
144
  requirements:
84
145
  - ImageMagick
85
- - data_mapper
86
146
  rubyforge_project: dm-paperclip
87
147
  rubygems_version: 1.3.6
88
148
  signing_key: