dm-paperclip 2.4.1 → 2.5.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (48) hide show
  1. data/Gemfile +29 -0
  2. data/Gemfile.lock +100 -0
  3. data/README.md +145 -0
  4. data/Rakefile +37 -71
  5. data/VERSION +1 -0
  6. data/dm-paperclip.gemspec +103 -0
  7. data/lib/dm-paperclip.rb +88 -74
  8. data/lib/dm-paperclip/attachment.rb +139 -102
  9. data/lib/dm-paperclip/callbacks.rb +55 -0
  10. data/lib/dm-paperclip/command_line.rb +86 -0
  11. data/lib/dm-paperclip/ext/blank.rb +24 -0
  12. data/lib/dm-paperclip/ext/class.rb +50 -0
  13. data/lib/dm-paperclip/ext/compatibility.rb +11 -0
  14. data/lib/dm-paperclip/ext/try_dup.rb +12 -0
  15. data/lib/dm-paperclip/geometry.rb +3 -5
  16. data/lib/dm-paperclip/interpolations.rb +57 -32
  17. data/lib/dm-paperclip/iostream.rb +12 -26
  18. data/lib/dm-paperclip/processor.rb +14 -4
  19. data/lib/dm-paperclip/storage.rb +2 -257
  20. data/lib/dm-paperclip/storage/filesystem.rb +73 -0
  21. data/lib/dm-paperclip/storage/s3.rb +209 -0
  22. data/lib/dm-paperclip/storage/s3/aws_library.rb +41 -0
  23. data/lib/dm-paperclip/storage/s3/aws_s3_library.rb +60 -0
  24. data/lib/dm-paperclip/style.rb +90 -0
  25. data/lib/dm-paperclip/thumbnail.rb +33 -24
  26. data/lib/dm-paperclip/upfile.rb +13 -5
  27. data/lib/dm-paperclip/validations.rb +40 -37
  28. data/lib/dm-paperclip/version.rb +4 -0
  29. data/test/attachment_test.rb +510 -67
  30. data/test/command_line_test.rb +138 -0
  31. data/test/fixtures/s3.yml +8 -0
  32. data/test/fixtures/twopage.pdf +0 -0
  33. data/test/fixtures/uppercase.PNG +0 -0
  34. data/test/geometry_test.rb +54 -19
  35. data/test/helper.rb +91 -28
  36. data/test/integration_test.rb +252 -79
  37. data/test/interpolations_test.rb +150 -0
  38. data/test/iostream_test.rb +8 -15
  39. data/test/paperclip_test.rb +222 -69
  40. data/test/processor_test.rb +10 -0
  41. data/test/storage_test.rb +102 -23
  42. data/test/style_test.rb +141 -0
  43. data/test/thumbnail_test.rb +106 -18
  44. data/test/upfile_test.rb +36 -0
  45. metadata +136 -121
  46. data/README.rdoc +0 -116
  47. data/init.rb +0 -1
  48. data/lib/dm-paperclip/callback_compatability.rb +0 -33
@@ -40,10 +40,20 @@ module Paperclip
40
40
  # on this blog post:
41
41
  # http://marsorange.com/archives/of-mogrify-ruby-tempfile-dynamic-class-definitions
42
42
  class Tempfile < ::Tempfile
43
- # Replaces Tempfile's +make_tmpname+ with one that honors file extensions.
44
- def make_tmpname(basename, n)
45
- extension = File.extname(basename)
46
- sprintf("%s,%d,%d%s", File.basename(basename, extension), $$, n, extension)
43
+ # This is Ruby 1.8.7's implementation.
44
+ if RUBY_VERSION <= "1.8.6"
45
+ def make_tmpname(basename, n)
46
+ case basename
47
+ when Array
48
+ prefix, suffix = *basename
49
+ else
50
+ prefix, suffix = basename, ''
51
+ end
52
+
53
+ t = Time.now.strftime("%y%m%d")
54
+ path = "#{prefix}#{t}-#{$$}-#{rand(0x100000000).to_s(36)}-#{n}#{suffix}"
55
+ end
47
56
  end
48
57
  end
58
+
49
59
  end
@@ -1,257 +1,2 @@
1
- module Paperclip
2
- module Storage
3
-
4
- # The default place to store attachments is in the filesystem. Files on the local
5
- # filesystem can be very easily served by Apache without requiring a hit to your app.
6
- # They also can be processed more easily after they've been saved, as they're just
7
- # normal files. There is one Filesystem-specific option for has_attached_file.
8
- # * +path+: The location of the repository of attachments on disk. This can (and, in
9
- # almost all cases, should) be coordinated with the value of the +url+ option to
10
- # allow files to be saved into a place where Apache can serve them without
11
- # hitting your app. Defaults to
12
- # ":rails_root/public/:attachment/:id/:style/:basename.:extension"
13
- # By default this places the files in the app's public directory which can be served
14
- # directly. If you are using capistrano for deployment, a good idea would be to
15
- # make a symlink to the capistrano-created system directory from inside your app's
16
- # public directory.
17
- # See Paperclip::Attachment#interpolate for more information on variable interpolaton.
18
- # :path => "/var/app/attachments/:class/:id/:style/:basename.:extension"
19
- module Filesystem
20
- def self.extended base
21
- end
22
-
23
- def exists?(style_name = default_style)
24
- if original_filename
25
- File.exist?(path(style_name))
26
- else
27
- false
28
- end
29
- end
30
-
31
- # Returns representation of the data of the file assigned to the given
32
- # style, in the format most representative of the current storage.
33
- def to_file style_name = default_style
34
- @queued_for_write[style_name] || (File.new(path(style_name), 'rb') if exists?(style_name))
35
- end
36
-
37
- def flush_writes #:nodoc:
38
- @queued_for_write.each do |style_name, file|
39
- file.close
40
- FileUtils.mkdir_p(File.dirname(path(style_name)))
41
- log("saving #{path(style_name)}")
42
- FileUtils.mv(file.path, path(style_name))
43
- FileUtils.chmod(0644, path(style_name))
44
- end
45
- @queued_for_write = {}
46
- end
47
-
48
- def flush_deletes #:nodoc:
49
- @queued_for_delete.each do |path|
50
- begin
51
- log("deleting #{path}")
52
- FileUtils.rm(path) if File.exist?(path)
53
- rescue Errno::ENOENT => e
54
- # ignore file-not-found, let everything else pass
55
- end
56
- begin
57
- while(true)
58
- path = File.dirname(path)
59
- FileUtils.rmdir(path)
60
- end
61
- rescue Errno::EEXIST, Errno::ENOTEMPTY, Errno::ENOENT, Errno::EINVAL, Errno::ENOTDIR
62
- # Stop trying to remove parent directories
63
- rescue SystemCallError => e
64
- log("There was an unexpected error while deleting directories: #{e.class}")
65
- # Ignore it
66
- end
67
- end
68
- @queued_for_delete = []
69
- end
70
- end
71
-
72
- # Amazon's S3 file hosting service is a scalable, easy place to store files for
73
- # distribution. You can find out more about it at http://aws.amazon.com/s3
74
- # There are a few S3-specific options for has_attached_file:
75
- # * +s3_credentials+: Takes a path, a File, or a Hash. The path (or File) must point
76
- # to a YAML file containing the +access_key_id+ and +secret_access_key+ that Amazon
77
- # gives you. You can 'environment-space' this just like you do to your
78
- # database.yml file, so different environments can use different accounts:
79
- # development:
80
- # access_key_id: 123...
81
- # secret_access_key: 123...
82
- # test:
83
- # access_key_id: abc...
84
- # secret_access_key: abc...
85
- # production:
86
- # access_key_id: 456...
87
- # secret_access_key: 456...
88
- # This is not required, however, and the file may simply look like this:
89
- # access_key_id: 456...
90
- # secret_access_key: 456...
91
- # In which case, those access keys will be used in all environments. You can also
92
- # put your bucket name in this file, instead of adding it to the code directly.
93
- # This is useful when you want the same account but a different bucket for
94
- # development versus production.
95
- # * +s3_permissions+: This is a String that should be one of the "canned" access
96
- # policies that S3 provides (more information can be found here:
97
- # http://docs.amazonwebservices.com/AmazonS3/2006-03-01/RESTAccessPolicy.html#RESTCannedAccessPolicies)
98
- # The default for Paperclip is :public_read.
99
- # * +s3_protocol+: The protocol for the URLs generated to your S3 assets. Can be either
100
- # 'http' or 'https'. Defaults to 'http' when your :s3_permissions are :public_read (the
101
- # default), and 'https' when your :s3_permissions are anything else.
102
- # * +s3_headers+: A hash of headers such as {'Expires' => 1.year.from_now.httpdate}
103
- # * +bucket+: This is the name of the S3 bucket that will store your files. Remember
104
- # that the bucket must be unique across all of Amazon S3. If the bucket does not exist
105
- # Paperclip will attempt to create it. The bucket name will not be interpolated.
106
- # You can define the bucket as a Proc if you want to determine it's name at runtime.
107
- # Paperclip will call that Proc with attachment as the only argument.
108
- # * +s3_host_alias+: The fully-qualified domain name (FQDN) that is the alias to the
109
- # S3 domain of your bucket. Used with the :s3_alias_url url interpolation. See the
110
- # link in the +url+ entry for more information about S3 domains and buckets.
111
- # * +url+: There are three options for the S3 url. You can choose to have the bucket's name
112
- # placed domain-style (bucket.s3.amazonaws.com) or path-style (s3.amazonaws.com/bucket).
113
- # Lastly, you can specify a CNAME (which requires the CNAME to be specified as
114
- # :s3_alias_url. You can read more about CNAMEs and S3 at
115
- # http://docs.amazonwebservices.com/AmazonS3/latest/index.html?VirtualHosting.html
116
- # Normally, this won't matter in the slightest and you can leave the default (which is
117
- # path-style, or :s3_path_url). But in some cases paths don't work and you need to use
118
- # the domain-style (:s3_domain_url). Anything else here will be treated like path-style.
119
- # NOTE: If you use a CNAME for use with CloudFront, you can NOT specify https as your
120
- # :s3_protocol; This is *not supported* by S3/CloudFront. Finally, when using the host
121
- # alias, the :bucket parameter is ignored, as the hostname is used as the bucket name
122
- # by S3.
123
- # * +path+: This is the key under the bucket in which the file will be stored. The
124
- # URL will be constructed from the bucket and the path. This is what you will want
125
- # to interpolate. Keys should be unique, like filenames, and despite the fact that
126
- # S3 (strictly speaking) does not support directories, you can still use a / to
127
- # separate parts of your file name.
128
- module S3
129
- def self.extended base
130
- begin
131
- require 'aws/s3'
132
- rescue LoadError => e
133
- e.message << " (You may need to install the aws-s3 gem)"
134
- raise e
135
- end
136
-
137
- base.instance_eval do
138
- @s3_credentials = parse_credentials(@options[:s3_credentials])
139
- @bucket = @options[:bucket] || @s3_credentials[:bucket]
140
- @bucket = @bucket.call(self) if @bucket.is_a?(Proc)
141
- @s3_options = @options[:s3_options] || {}
142
- @s3_permissions = @options[:s3_permissions] || :public_read
143
- @s3_protocol = @options[:s3_protocol] || (@s3_permissions == :public_read ? 'http' : 'https')
144
- @s3_headers = @options[:s3_headers] || {}
145
- @s3_host_alias = @options[:s3_host_alias]
146
- @url = ":s3_path_url" unless @url.to_s.match(/^:s3.*url$/)
147
- AWS::S3::Base.establish_connection!( @s3_options.merge(
148
- :access_key_id => @s3_credentials[:access_key_id],
149
- :secret_access_key => @s3_credentials[:secret_access_key]
150
- ))
151
- end
152
- Paperclip.interpolates(:s3_alias_url) do |attachment, style|
153
- "#{attachment.s3_protocol}://#{attachment.s3_host_alias}/#{attachment.path(style).gsub(%r{^/}, "")}"
154
- end
155
- Paperclip.interpolates(:s3_path_url) do |attachment, style|
156
- "#{attachment.s3_protocol}://s3.amazonaws.com/#{attachment.bucket_name}/#{attachment.path(style).gsub(%r{^/}, "")}"
157
- end
158
- Paperclip.interpolates(:s3_domain_url) do |attachment, style|
159
- "#{attachment.s3_protocol}://#{attachment.bucket_name}.s3.amazonaws.com/#{attachment.path(style).gsub(%r{^/}, "")}"
160
- end
161
- end
162
-
163
- def expiring_url(time = 3600)
164
- AWS::S3::S3Object.url_for(path, bucket_name, :expires_in => time )
165
- end
166
-
167
- def bucket_name
168
- @bucket
169
- end
170
-
171
- def s3_host_alias
172
- @s3_host_alias
173
- end
174
-
175
- def parse_credentials creds
176
- creds = find_credentials(creds).to_mash.stringify_keys!
177
- if defined? Merb && Merb.respond_to?(:env)
178
- (creds[Merb.env] || creds).symbolize_keys
179
- elsif defined? RAILS_ENV
180
- (creds[RAILS_ENV] || creds).symbolize_keys
181
- elsif defined? Rails && Rails.respond_to(:env)
182
- (creds[Rails.env] || creds).symbolize_keys
183
- elsif defined? RACK_ENV
184
- (creds[RACK_ENV] || creds).symbolize_keys
185
- else
186
- creds.symbolize_keys
187
- end
188
- end
189
-
190
- def exists?(style = default_style)
191
- if original_filename
192
- AWS::S3::S3Object.exists?(path(style), bucket_name)
193
- else
194
- false
195
- end
196
- end
197
-
198
- def s3_protocol
199
- @s3_protocol
200
- end
201
-
202
- # Returns representation of the data of the file assigned to the given
203
- # style, in the format most representative of the current storage.
204
- def to_file style = default_style
205
- return @queued_for_write[style] if @queued_for_write[style]
206
- file = Tempfile.new(path(style))
207
- file.write(AWS::S3::S3Object.value(path(style), bucket_name))
208
- file.rewind
209
- return file
210
- end
211
-
212
- def flush_writes #:nodoc:
213
- @queued_for_write.each do |style, file|
214
- begin
215
- log("saving #{path(style)}")
216
- AWS::S3::S3Object.store(path(style),
217
- file,
218
- bucket_name,
219
- {:content_type => instance_read(:content_type),
220
- :access => @s3_permissions,
221
- }.merge(@s3_headers))
222
- rescue AWS::S3::ResponseError => e
223
- raise
224
- end
225
- end
226
- @queued_for_write = {}
227
- end
228
-
229
- def flush_deletes #:nodoc:
230
- @queued_for_delete.each do |path|
231
- begin
232
- log("deleting #{path}")
233
- AWS::S3::S3Object.delete(path, bucket_name)
234
- rescue AWS::S3::ResponseError
235
- # Ignore this.
236
- end
237
- end
238
- @queued_for_delete = []
239
- end
240
-
241
- def find_credentials creds
242
- case creds
243
- when File
244
- YAML::load(ERB.new(File.read(creds.path)).result)
245
- when String
246
- YAML::load(ERB.new(File.read(creds)).result)
247
- when Hash
248
- creds
249
- else
250
- raise ArgumentError, "Credentials are not a path, file, or hash."
251
- end
252
- end
253
- private :find_credentials
254
-
255
- end
256
- end
257
- end
1
+ require 'dm-paperclip/storage/filesystem'
2
+ require 'dm-paperclip/storage/s3'
@@ -0,0 +1,73 @@
1
+ module Paperclip
2
+ module Storage
3
+ # The default place to store attachments is in the filesystem. Files on the local
4
+ # filesystem can be very easily served by Apache without requiring a hit to your app.
5
+ # They also can be processed more easily after they've been saved, as they're just
6
+ # normal files. There is one Filesystem-specific option for has_attached_file.
7
+ # * +path+: The location of the repository of attachments on disk. This can (and, in
8
+ # almost all cases, should) be coordinated with the value of the +url+ option to
9
+ # allow files to be saved into a place where Apache can serve them without
10
+ # hitting your app. Defaults to
11
+ # ":rails_root/public/:attachment/:id/:style/:basename.:extension"
12
+ # By default this places the files in the app's public directory which can be served
13
+ # directly. If you are using capistrano for deployment, a good idea would be to
14
+ # make a symlink to the capistrano-created system directory from inside your app's
15
+ # public directory.
16
+ # See Paperclip::Attachment#interpolate for more information on variable interpolaton.
17
+ # :path => "/var/app/attachments/:class/:id/:style/:basename.:extension"
18
+ module Filesystem
19
+ def self.extended base
20
+ end
21
+
22
+ def exists?(style_name = default_style)
23
+ if original_filename
24
+ File.exist?(path(style_name))
25
+ else
26
+ false
27
+ end
28
+ end
29
+
30
+ # Returns representation of the data of the file assigned to the given
31
+ # style, in the format most representative of the current storage.
32
+ def to_file style_name = default_style
33
+ @queued_for_write[style_name] || (File.new(path(style_name), 'rb') if exists?(style_name))
34
+ end
35
+
36
+ def flush_writes #:nodoc:
37
+ @queued_for_write.each do |style_name, file|
38
+ file.close
39
+ FileUtils.mkdir_p(File.dirname(path(style_name)))
40
+ log("saving #{path(style_name)}")
41
+ FileUtils.mv(file.path, path(style_name))
42
+ FileUtils.chmod(0644, path(style_name))
43
+ end
44
+ @queued_for_write = {}
45
+ end
46
+
47
+ def flush_deletes #:nodoc:
48
+ @queued_for_delete.each do |path|
49
+ begin
50
+ log("deleting #{path}")
51
+ FileUtils.rm(path) if File.exist?(path)
52
+ rescue Errno::ENOENT => e
53
+ # ignore file-not-found, let everything else pass
54
+ end
55
+ begin
56
+ while(true)
57
+ path = File.dirname(path)
58
+ FileUtils.rmdir(path)
59
+ break if File.exists?(path) # Ruby 1.9.2 does not raise if the removal failed.
60
+ end
61
+ rescue Errno::EEXIST, Errno::ENOTEMPTY, Errno::ENOENT, Errno::EINVAL, Errno::ENOTDIR
62
+ # Stop trying to remove parent directories
63
+ rescue SystemCallError => e
64
+ log("There was an unexpected error while deleting directories: #{e.class}")
65
+ # Ignore it
66
+ end
67
+ end
68
+ @queued_for_delete = []
69
+ end
70
+ end
71
+
72
+ end
73
+ end
@@ -0,0 +1,209 @@
1
+ require 'dm-paperclip/storage/s3/aws_s3_library'
2
+ require 'dm-paperclip/storage/s3/aws_library'
3
+
4
+ module Paperclip
5
+ module Storage
6
+ # Amazon's S3 file hosting service is a scalable, easy place to store
7
+ # files for distribution. You can find out more about it at
8
+ # http://aws.amazon.com/s3. There are a few S3-specific options for
9
+ # +has_attached_file+:
10
+ # * +s3_credentials+: Takes a path, a File, or a Hash. The path
11
+ # (or File) must point to a YAML file containing the
12
+ # +access_key_id+ and +secret_access_key+ that Amazon gives you.
13
+ # You can 'environment-space' this just like you do to your
14
+ # +database.yml+ file, so different environments can use different
15
+ # accounts:
16
+ #
17
+ # development:
18
+ # access_key_id: 123...
19
+ # secret_access_key: 123...
20
+ # test:
21
+ # access_key_id: abc...
22
+ # secret_access_key: abc...
23
+ # production:
24
+ # access_key_id: 456...
25
+ # secret_access_key: 456...
26
+ #
27
+ # This is not required, however, and the file may simply look like
28
+ # this:
29
+ #
30
+ # access_key_id: 456...
31
+ # secret_access_key: 456...
32
+ #
33
+ # In which case, those access keys will be used in all environments.
34
+ # You can also put your bucket name in this file, instead of adding
35
+ # it to the code directly. This is useful when you want the same
36
+ # account but a different bucket for development versus production.
37
+ # * +s3_permissions+: This is a String that should be one of the
38
+ # "canned" access policies that S3 provides (more information can be
39
+ # found here: http://docs.amazonwebservices.com/AmazonS3/2006-03-01/RESTAccessPolicy.html#RESTCannedAccessPolicies).
40
+ # The default for Paperclip is +:public_read+.
41
+ # * +s3_protocol+: The protocol for the URLs generated to your S3
42
+ # assets. Can be either 'http' or 'https'. Defaults to 'http' when
43
+ # your +:s3_permissions+ are +:public_read+ (the default), and 'https'
44
+ # when your +:s3_permissions+ are anything else.
45
+ # * +s3_headers+: A hash of headers such as:
46
+ #
47
+ # {'Expires' => 1.year.from_now.httpdate}
48
+ #
49
+ # * +bucket+: This is the name of the S3 bucket that will store your
50
+ # files. Remember that the bucket must be unique across all of
51
+ # Amazon S3. If the bucket does not exist Paperclip will attempt to
52
+ # create it. The bucket name will not be interpolated. You can define
53
+ # the bucket as a Proc if you want to determine it's name at runtime.
54
+ # Paperclip will call that Proc with attachment as the only argument.
55
+ # * +s3_host_alias+: The fully-qualified domain name (FQDN) that is the
56
+ # alias to the S3 domain of your bucket. Used with the +:s3_alias_url+
57
+ # url interpolation. See the link in the +url+ entry for more
58
+ # information about S3 domains and buckets.
59
+ # * +url+: There are three options for the S3 url. You can choose to
60
+ # have the bucket's name placed domain-style
61
+ # (bucket.s3.amazonaws.com) or path-style (s3.amazonaws.com/bucket).
62
+ # Lastly, you can specify a CNAME (which requires the CNAME to be
63
+ # specified as +:s3_alias_url+. You can read more about CNAMEs and S3
64
+ # at http://docs.amazonwebservices.com/AmazonS3/latest/index.html?VirtualHosting.html.
65
+ # Normally, this won't matter in the slightest and you can leave the
66
+ # default (which is path-style, or +:s3_path_url+). But in some cases
67
+ # paths don't work and you need to use the domain-style
68
+ # (+:s3_domain_url+). Anything else here will be treated like
69
+ # path-style.
70
+ # NOTE: If you use a CNAME for use with CloudFront, you can NOT
71
+ # specify https as your +:s3_protocol+; This is *not supported* by
72
+ # S3/CloudFront. Finally, when using the host alias, the +:bucket+
73
+ # parameter is ignored, as the hostname is used as the bucket name
74
+ # by S3.
75
+ # * +path+: This is the key under the bucket in which the file will be
76
+ # stored. The URL will be constructed from the bucket and the path.
77
+ # This is what you will want to interpolate. Keys should be unique,
78
+ # like filenames, and despite the fact that S3 (strictly speaking)
79
+ # does not support directories, you can still use a / to separate
80
+ # parts of your file name.
81
+ module S3
82
+ # Libraries and mixins that provide S3 support
83
+ LIBRARIES = {
84
+ 'aws/s3' => AwsS3Library,
85
+ 'right_aws' => AwsLibrary,
86
+ 'aws' => AwsLibrary
87
+ }
88
+
89
+ def self.extended(base)
90
+ # attempt to load one of the S3 libraries
91
+ s3_detected = LIBRARIES.any? do |path,mixin|
92
+ begin
93
+ require path
94
+
95
+ base.send :extend, mixin
96
+ true
97
+ rescue LoadError => e
98
+ false
99
+ end
100
+ end
101
+
102
+ unless s3_detected
103
+ raise(LoadError,"unable to load any S3 library (#{LIBRARIES.keys.join(', ')})",caller)
104
+ end
105
+
106
+ base.instance_eval do
107
+ @s3_credentials = parse_credentials(@options[:s3_credentials])
108
+ @bucket = @options[:bucket] || @s3_credentials[:bucket]
109
+ @bucket = @bucket.call(self) if @bucket.is_a?(Proc)
110
+ @s3_options = @options[:s3_options] || {}
111
+ @s3_permissions = @options[:s3_permissions] || :public_read
112
+ @s3_protocol = @options[:s3_protocol] || (@s3_permissions == :public_read ? 'http' : 'https')
113
+ @s3_headers = @options[:s3_headers] || {}
114
+ @s3_host_alias = @options[:s3_host_alias]
115
+ unless @url.to_s.match(/^:s3.*url$/)
116
+ @path = @path.gsub(/:url/, @url)
117
+ @url = ":s3_path_url"
118
+ end
119
+
120
+ s3_connect!
121
+ end
122
+ Paperclip.interpolates(:s3_alias_url) do |attachment, style|
123
+ "#{attachment.s3_protocol}://#{attachment.s3_host_alias}/#{attachment.path(style).gsub(%r{^/}, "")}"
124
+ end unless Paperclip::Interpolations.respond_to? :s3_alias_url
125
+ Paperclip.interpolates(:s3_path_url) do |attachment, style|
126
+ "#{attachment.s3_protocol}://s3.amazonaws.com/#{attachment.bucket_name}/#{attachment.path(style).gsub(%r{^/}, "")}"
127
+ end unless Paperclip::Interpolations.respond_to? :s3_path_url
128
+ Paperclip.interpolates(:s3_domain_url) do |attachment, style|
129
+ "#{attachment.s3_protocol}://#{attachment.bucket_name}.s3.amazonaws.com/#{attachment.path(style).gsub(%r{^/}, "")}"
130
+ end unless Paperclip::Interpolations.respond_to? :s3_domain_url
131
+ end
132
+
133
+ def expiring_url(time = 3600)
134
+ s3_expiring_url(path, time)
135
+ end
136
+
137
+ def bucket_name
138
+ @bucket
139
+ end
140
+
141
+ def s3_host_alias
142
+ @s3_host_alias
143
+ end
144
+
145
+ def parse_credentials(creds)
146
+ creds = DataMapper::Mash.new(find_credentials(creds)).stringify_keys!
147
+ (creds[Paperclip.config.env] || creds).symbolize_keys
148
+ end
149
+
150
+ def exists?(style = default_style)
151
+ if original_filename
152
+ s3_exists?(path(style))
153
+ else
154
+ false
155
+ end
156
+ end
157
+
158
+ def s3_protocol
159
+ @s3_protocol
160
+ end
161
+
162
+ # Returns representation of the data of the file assigned to the given
163
+ # style, in the format most representative of the current storage.
164
+ def to_file style = default_style
165
+ return @queued_for_write[style] if @queued_for_write[style]
166
+ filename = path(style)
167
+ extname = File.extname(filename)
168
+ basename = File.basename(filename, extname)
169
+ file = Tempfile.new([basename, extname])
170
+ file.binmode
171
+ s3_download(filename,file)
172
+ file.rewind
173
+ return file
174
+ end
175
+
176
+ def flush_writes #:nodoc:
177
+ @queued_for_write.each do |style, file|
178
+ log("saving #{path(style)}")
179
+ s3_store(path(style),file)
180
+ end
181
+
182
+ @queued_for_write = {}
183
+ end
184
+
185
+ def flush_deletes #:nodoc:
186
+ @queued_for_delete.each do |path|
187
+ log("deleting #{path}")
188
+ s3_delete(path)
189
+ end
190
+ @queued_for_delete = []
191
+ end
192
+
193
+ def find_credentials creds
194
+ case creds
195
+ when File
196
+ YAML::load(ERB.new(File.read(creds.path)).result)
197
+ when String, Pathname
198
+ YAML::load(ERB.new(File.read(creds)).result)
199
+ when Hash
200
+ creds
201
+ else
202
+ raise ArgumentError, "Credentials are not a path, file, or hash."
203
+ end
204
+ end
205
+ private :find_credentials
206
+
207
+ end
208
+ end
209
+ end