paperclip-cloudfiles 2.3.1.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. data/LICENSE +26 -0
  2. data/README.rdoc +176 -0
  3. data/Rakefile +105 -0
  4. data/generators/paperclip/USAGE +5 -0
  5. data/generators/paperclip/paperclip_generator.rb +27 -0
  6. data/generators/paperclip/templates/paperclip_migration.rb.erb +19 -0
  7. data/init.rb +1 -0
  8. data/lib/paperclip.rb +356 -0
  9. data/lib/paperclip/attachment.rb +414 -0
  10. data/lib/paperclip/callback_compatability.rb +33 -0
  11. data/lib/paperclip/geometry.rb +115 -0
  12. data/lib/paperclip/interpolations.rb +108 -0
  13. data/lib/paperclip/iostream.rb +59 -0
  14. data/lib/paperclip/matchers.rb +4 -0
  15. data/lib/paperclip/matchers/have_attached_file_matcher.rb +49 -0
  16. data/lib/paperclip/matchers/validate_attachment_content_type_matcher.rb +65 -0
  17. data/lib/paperclip/matchers/validate_attachment_presence_matcher.rb +48 -0
  18. data/lib/paperclip/matchers/validate_attachment_size_matcher.rb +85 -0
  19. data/lib/paperclip/processor.rb +49 -0
  20. data/lib/paperclip/storage.rb +369 -0
  21. data/lib/paperclip/thumbnail.rb +73 -0
  22. data/lib/paperclip/upfile.rb +49 -0
  23. data/shoulda_macros/paperclip.rb +117 -0
  24. data/tasks/paperclip_tasks.rake +79 -0
  25. data/test/attachment_test.rb +780 -0
  26. data/test/cloudfiles.yml +13 -0
  27. data/test/database.yml +4 -0
  28. data/test/fixtures/12k.png +0 -0
  29. data/test/fixtures/50x50.png +0 -0
  30. data/test/fixtures/5k.png +0 -0
  31. data/test/fixtures/bad.png +1 -0
  32. data/test/fixtures/s3.yml +8 -0
  33. data/test/fixtures/text.txt +0 -0
  34. data/test/fixtures/twopage.pdf +0 -0
  35. data/test/geometry_test.rb +177 -0
  36. data/test/helper.rb +111 -0
  37. data/test/integration_test.rb +483 -0
  38. data/test/interpolations_test.rb +124 -0
  39. data/test/iostream_test.rb +78 -0
  40. data/test/matchers/have_attached_file_matcher_test.rb +21 -0
  41. data/test/matchers/validate_attachment_content_type_matcher_test.rb +31 -0
  42. data/test/matchers/validate_attachment_presence_matcher_test.rb +23 -0
  43. data/test/matchers/validate_attachment_size_matcher_test.rb +51 -0
  44. data/test/paperclip_test.rb +319 -0
  45. data/test/processor_test.rb +10 -0
  46. data/test/storage_test.rb +549 -0
  47. data/test/thumbnail_test.rb +227 -0
  48. data/test/upfile_test.rb +28 -0
  49. metadata +175 -0
@@ -0,0 +1,369 @@
1
+ module Paperclip
2
+ module Storage
3
+
4
+ # The default place to store attachments is in the filesystem. Files on the local
5
+ # filesystem can be very easily served by Apache without requiring a hit to your app.
6
+ # They also can be processed more easily after they've been saved, as they're just
7
+ # normal files. There is one Filesystem-specific option for has_attached_file.
8
+ # * +path+: The location of the repository of attachments on disk. This can (and, in
9
+ # almost all cases, should) be coordinated with the value of the +url+ option to
10
+ # allow files to be saved into a place where Apache can serve them without
11
+ # hitting your app. Defaults to
12
+ # ":rails_root/public/:attachment/:id/:style/:basename.:extension"
13
+ # By default this places the files in the app's public directory which can be served
14
+ # directly. If you are using capistrano for deployment, a good idea would be to
15
+ # make a symlink to the capistrano-created system directory from inside your app's
16
+ # public directory.
17
+ # See Paperclip::Attachment#interpolate for more information on variable interpolaton.
18
+ # :path => "/var/app/attachments/:class/:id/:style/:basename.:extension"
19
+ module Filesystem
20
+ def self.extended base
21
+ end
22
+
23
+ def exists?(style = default_style)
24
+ if original_filename
25
+ File.exist?(path(style))
26
+ else
27
+ false
28
+ end
29
+ end
30
+
31
+ # Returns representation of the data of the file assigned to the given
32
+ # style, in the format most representative of the current storage.
33
+ def to_file style = default_style
34
+ @queued_for_write[style] || (File.new(path(style), 'rb') if exists?(style))
35
+ end
36
+
37
+ def flush_writes #:nodoc:
38
+ @queued_for_write.each do |style, file|
39
+ file.close
40
+ FileUtils.mkdir_p(File.dirname(path(style)))
41
+ log("saving #{path(style)}")
42
+ FileUtils.mv(file.path, path(style))
43
+ FileUtils.chmod(0644, path(style))
44
+ end
45
+ @queued_for_write = {}
46
+ end
47
+
48
+ def flush_deletes #:nodoc:
49
+ @queued_for_delete.each do |path|
50
+ begin
51
+ log("deleting #{path}")
52
+ FileUtils.rm(path) if File.exist?(path)
53
+ rescue Errno::ENOENT => e
54
+ # ignore file-not-found, let everything else pass
55
+ end
56
+ begin
57
+ while(true)
58
+ path = File.dirname(path)
59
+ FileUtils.rmdir(path)
60
+ end
61
+ rescue Errno::EEXIST, Errno::ENOTEMPTY, Errno::ENOENT, Errno::EINVAL, Errno::ENOTDIR
62
+ # Stop trying to remove parent directories
63
+ rescue SystemCallError => e
64
+ log("There was an unexpected error while deleting directories: #{e.class}")
65
+ # Ignore it
66
+ end
67
+ end
68
+ @queued_for_delete = []
69
+ end
70
+ end
71
+
72
+ # Amazon's S3 file hosting service is a scalable, easy place to store files for
73
+ # distribution. You can find out more about it at http://aws.amazon.com/s3
74
+ # There are a few S3-specific options for has_attached_file:
75
+ # * +s3_credentials+: Takes a path, a File, or a Hash. The path (or File) must point
76
+ # to a YAML file containing the +access_key_id+ and +secret_access_key+ that Amazon
77
+ # gives you. You can 'environment-space' this just like you do to your
78
+ # database.yml file, so different environments can use different accounts:
79
+ # development:
80
+ # access_key_id: 123...
81
+ # secret_access_key: 123...
82
+ # test:
83
+ # access_key_id: abc...
84
+ # secret_access_key: abc...
85
+ # production:
86
+ # access_key_id: 456...
87
+ # secret_access_key: 456...
88
+ # This is not required, however, and the file may simply look like this:
89
+ # access_key_id: 456...
90
+ # secret_access_key: 456...
91
+ # In which case, those access keys will be used in all environments. You can also
92
+ # put your bucket name in this file, instead of adding it to the code directly.
93
+ # This is useful when you want the same account but a different bucket for
94
+ # development versus production.
95
+ # * +s3_permissions+: This is a String that should be one of the "canned" access
96
+ # policies that S3 provides (more information can be found here:
97
+ # http://docs.amazonwebservices.com/AmazonS3/2006-03-01/RESTAccessPolicy.html#RESTCannedAccessPolicies)
98
+ # The default for Paperclip is :public_read.
99
+ # * +s3_protocol+: The protocol for the URLs generated to your S3 assets. Can be either
100
+ # 'http' or 'https'. Defaults to 'http' when your :s3_permissions are :public_read (the
101
+ # default), and 'https' when your :s3_permissions are anything else.
102
+ # * +s3_headers+: A hash of headers such as {'Expires' => 1.year.from_now.httpdate}
103
+ # * +bucket+: This is the name of the S3 bucket that will store your files. Remember
104
+ # that the bucket must be unique across all of Amazon S3. If the bucket does not exist
105
+ # Paperclip will attempt to create it. The bucket name will not be interpolated.
106
+ # You can define the bucket as a Proc if you want to determine it's name at runtime.
107
+ # Paperclip will call that Proc with attachment as the only argument.
108
+ # * +s3_host_alias+: The fully-qualified domain name (FQDN) that is the alias to the
109
+ # S3 domain of your bucket. Used with the :s3_alias_url url interpolation. See the
110
+ # link in the +url+ entry for more information about S3 domains and buckets.
111
+ # * +url+: There are three options for the S3 url. You can choose to have the bucket's name
112
+ # placed domain-style (bucket.s3.amazonaws.com) or path-style (s3.amazonaws.com/bucket).
113
+ # Lastly, you can specify a CNAME (which requires the CNAME to be specified as
114
+ # :s3_alias_url. You can read more about CNAMEs and S3 at
115
+ # http://docs.amazonwebservices.com/AmazonS3/latest/index.html?VirtualHosting.html
116
+ # Normally, this won't matter in the slightest and you can leave the default (which is
117
+ # path-style, or :s3_path_url). But in some cases paths don't work and you need to use
118
+ # the domain-style (:s3_domain_url). Anything else here will be treated like path-style.
119
+ # NOTE: If you use a CNAME for use with CloudFront, you can NOT specify https as your
120
+ # :s3_protocol; This is *not supported* by S3/CloudFront. Finally, when using the host
121
+ # alias, the :bucket parameter is ignored, as the hostname is used as the bucket name
122
+ # by S3.
123
+ # * +path+: This is the key under the bucket in which the file will be stored. The
124
+ # URL will be constructed from the bucket and the path. This is what you will want
125
+ # to interpolate. Keys should be unique, like filenames, and despite the fact that
126
+ # S3 (strictly speaking) does not support directories, you can still use a / to
127
+ # separate parts of your file name.
128
+ module S3
129
+ def self.extended base
130
+ begin
131
+ require 'aws/s3'
132
+ rescue LoadError => e
133
+ e.message << " (You may need to install the aws-s3 gem)"
134
+ raise e
135
+ end
136
+
137
+ base.instance_eval do
138
+ @s3_credentials = parse_credentials(@options[:s3_credentials])
139
+ @bucket = @options[:bucket] || @s3_credentials[:bucket]
140
+ @bucket = @bucket.call(self) if @bucket.is_a?(Proc)
141
+ @s3_options = @options[:s3_options] || {}
142
+ @s3_permissions = @options[:s3_permissions] || :public_read
143
+ @s3_protocol = @options[:s3_protocol] || (@s3_permissions == :public_read ? 'http' : 'https')
144
+ @s3_headers = @options[:s3_headers] || {}
145
+ @s3_host_alias = @options[:s3_host_alias]
146
+ @url = ":s3_path_url" unless @url.to_s.match(/^:s3.*url$/)
147
+ AWS::S3::Base.establish_connection!( @s3_options.merge(
148
+ :access_key_id => @s3_credentials[:access_key_id],
149
+ :secret_access_key => @s3_credentials[:secret_access_key]
150
+ ))
151
+ end
152
+ Paperclip.interpolates(:s3_alias_url) do |attachment, style|
153
+ "#{attachment.s3_protocol}://#{attachment.s3_host_alias}/#{attachment.path(style).gsub(%r{^/}, "")}"
154
+ end
155
+ Paperclip.interpolates(:s3_path_url) do |attachment, style|
156
+ "#{attachment.s3_protocol}://s3.amazonaws.com/#{attachment.bucket_name}/#{attachment.path(style).gsub(%r{^/}, "")}"
157
+ end
158
+ Paperclip.interpolates(:s3_domain_url) do |attachment, style|
159
+ "#{attachment.s3_protocol}://#{attachment.bucket_name}.s3.amazonaws.com/#{attachment.path(style).gsub(%r{^/}, "")}"
160
+ end
161
+ end
162
+
163
+ def expiring_url(time = 3600)
164
+ AWS::S3::S3Object.url_for(path, bucket_name, :expires_in => time )
165
+ end
166
+
167
+ def bucket_name
168
+ @bucket
169
+ end
170
+
171
+ def s3_host_alias
172
+ @s3_host_alias
173
+ end
174
+
175
+ def parse_credentials creds
176
+ creds = find_credentials(creds).stringify_keys
177
+ (creds[RAILS_ENV] || creds).symbolize_keys
178
+ end
179
+
180
+ def exists?(style = default_style)
181
+ if original_filename
182
+ AWS::S3::S3Object.exists?(path(style), bucket_name)
183
+ else
184
+ false
185
+ end
186
+ end
187
+
188
+ def s3_protocol
189
+ @s3_protocol
190
+ end
191
+
192
+ # Returns representation of the data of the file assigned to the given
193
+ # style, in the format most representative of the current storage.
194
+ def to_file style = default_style
195
+ return @queued_for_write[style] if @queued_for_write[style]
196
+ file = Tempfile.new(path(style))
197
+ file.write(AWS::S3::S3Object.value(path(style), bucket_name))
198
+ file.rewind
199
+ return file
200
+ end
201
+
202
+ def flush_writes #:nodoc:
203
+ @queued_for_write.each do |style, file|
204
+ begin
205
+ log("saving #{path(style)}")
206
+ AWS::S3::S3Object.store(path(style),
207
+ file,
208
+ bucket_name,
209
+ {:content_type => instance_read(:content_type),
210
+ :access => @s3_permissions,
211
+ }.merge(@s3_headers))
212
+ rescue AWS::S3::ResponseError => e
213
+ raise
214
+ end
215
+ end
216
+ @queued_for_write = {}
217
+ end
218
+
219
+ def flush_deletes #:nodoc:
220
+ @queued_for_delete.each do |path|
221
+ begin
222
+ log("deleting #{path}")
223
+ AWS::S3::S3Object.delete(path, bucket_name)
224
+ rescue AWS::S3::ResponseError
225
+ # Ignore this.
226
+ end
227
+ end
228
+ @queued_for_delete = []
229
+ end
230
+
231
+ def find_credentials creds
232
+ case creds
233
+ when File
234
+ YAML::load(ERB.new(File.read(creds.path)).result)
235
+ when String
236
+ YAML::load(ERB.new(File.read(creds)).result)
237
+ when Hash
238
+ creds
239
+ else
240
+ raise ArgumentError, "Credentials are not a path, file, or hash."
241
+ end
242
+ end
243
+ private :find_credentials
244
+
245
+ end
246
+
247
+ # Rackspace's Cloud Files service is a scalable, easy place to store files for
248
+ # distribution, and is integrated into the Limelight CDN. You can find out more about
249
+ # it at http://www.rackspacecloud.com/cloud_hosting_products/files
250
+ #
251
+ # To install the Cloud Files gem, add the github gem source ("gem sources -a http://gems.github.com"), then
252
+ # do a "gem install rackspace-cloudfiles". For more information, see the github repository at http://github.com/rackspace/ruby-cloudfiles/
253
+ #
254
+ # There are a few Cloud Files-specific options for has_attached_file:
255
+ # * +cloudfiles_credentials+: Takes a path, a File, or a Hash. The path (or File) must point
256
+ # to a YAML file containing the +username+ and +api_key+ that Rackspace
257
+ # gives you. Rackspace customers using the cloudfiles gem >= 1.4.1 can also set a servicenet
258
+ # variable to true to send traffic over the unbilled internal Rackspace service network.
259
+ # You can 'environment-space' this just like you do to your
260
+ # database.yml file, so different environments can use different accounts:
261
+ # development:
262
+ # username: hayley
263
+ # api_key: a7f...
264
+ # test:
265
+ # username: katherine
266
+ # api_key: 7fa...
267
+ # production:
268
+ # username: minter
269
+ # api_key: 87k...
270
+ # servicenet: true
271
+ # This is not required, however, and the file may simply look like this:
272
+ # username: minter...
273
+ # api_key: 11q...
274
+ # In which case, those access keys will be used in all environments. You can also
275
+ # put your container name in this file, instead of adding it to the code directly.
276
+ # This is useful when you want the same account but a different container for
277
+ # development versus production.
278
+ # * +container+: This is the name of the Cloud Files container that will store your files.
279
+ # This container should be marked "public" so that the files are available to the world at large.
280
+ # If the container does not exist, it will be created and marked public.
281
+ # * +path+: This is the path under the container in which the file will be stored. The
282
+ # CDN URL will be constructed from the CDN identifier for the container and the path. This is what
283
+ # you will want to interpolate. Keys should be unique, like filenames, and despite the fact that
284
+ # Cloud Files (strictly speaking) does not support directories, you can still use a / to
285
+ # separate parts of your file name, and they will show up in the URL structure.
286
+ module CloudFile
287
+ def self.extended base
288
+ require 'cloudfiles'
289
+ base.instance_eval do
290
+ @cloudfiles_credentials = parse_credentials(@options[:cloudfiles_credentials])
291
+ @container_name = @options[:container] || @cloudfiles_credentials[:container]
292
+ @cloudfiles_options = @options[:cloudfiles_options] || {}
293
+ @@cdn_url ||= cloudfiles_container.cdn_url
294
+ @path_filename = ":cf_path_filename" unless @url.to_s.match(/^:cf.*filename$/)
295
+ @url = @@cdn_url + "/#{URI.encode(@path_filename).gsub(/&/,'%26')}"
296
+ @path = Paperclip::Attachment.default_options[:path] == @options[:path] ? ":attachment/:id/:style/:basename.:extension" : @options[:path]
297
+ end
298
+ Paperclip.interpolates(:cf_path_filename) do |attachment, style|
299
+ attachment.path(style)
300
+ end
301
+ end
302
+
303
+ def cloudfiles
304
+ @@cf ||= CloudFiles::Connection.new(@cloudfiles_credentials[:username], @cloudfiles_credentials[:api_key], true, @cloudfiles_credentials[:servicenet])
305
+ end
306
+
307
+ def cloudfiles_container
308
+ if @container
309
+ @container
310
+ else
311
+ @container = cloudfiles.create_container(@container_name)
312
+ @container.make_public
313
+ @container
314
+ end
315
+ end
316
+
317
+ def container_name
318
+ @container_name
319
+ end
320
+
321
+ def parse_credentials creds
322
+ creds = find_credentials(creds).stringify_keys
323
+ (creds[RAILS_ENV] || creds).symbolize_keys
324
+ end
325
+
326
+ def exists?(style = default_style)
327
+ cloudfiles_container.object_exists?(path(style))
328
+ end
329
+
330
+ # Returns representation of the data of the file assigned to the given
331
+ # style, in the format most representative of the current storage.
332
+ def to_file style = default_style
333
+ @queued_for_write[style] || cloudfiles_container.create_object(path(style))
334
+ end
335
+ alias_method :to_io, :to_file
336
+
337
+ def flush_writes #:nodoc:
338
+ @queued_for_write.each do |style, file|
339
+ object = cloudfiles_container.create_object(path(style),false)
340
+ object.write(file)
341
+ end
342
+ @queued_for_write = {}
343
+ end
344
+
345
+ def flush_deletes #:nodoc:
346
+ @queued_for_delete.each do |path|
347
+ cloudfiles_container.delete_object(path)
348
+ end
349
+ @queued_for_delete = []
350
+ end
351
+
352
+ def find_credentials creds
353
+ case creds
354
+ when File
355
+ YAML.load_file(creds.path)
356
+ when String
357
+ YAML.load_file(creds)
358
+ when Hash
359
+ creds
360
+ else
361
+ raise ArgumentError, "Credentials are not a path, file, or hash."
362
+ end
363
+ end
364
+ private :find_credentials
365
+
366
+ end
367
+
368
+ end
369
+ end
@@ -0,0 +1,73 @@
1
+ module Paperclip
2
+ # Handles thumbnailing images that are uploaded.
3
+ class Thumbnail < Processor
4
+
5
+ attr_accessor :current_geometry, :target_geometry, :format, :whiny, :convert_options, :source_file_options
6
+
7
+ # Creates a Thumbnail object set to work on the +file+ given. It
8
+ # will attempt to transform the image into one defined by +target_geometry+
9
+ # which is a "WxH"-style string. +format+ will be inferred from the +file+
10
+ # unless specified. Thumbnail creation will raise no errors unless
11
+ # +whiny+ is true (which it is, by default. If +convert_options+ is
12
+ # set, the options will be appended to the convert command upon image conversion
13
+ def initialize file, options = {}, attachment = nil
14
+ super
15
+ geometry = options[:geometry]
16
+ @file = file
17
+ @crop = geometry[-1,1] == '#'
18
+ @target_geometry = Geometry.parse geometry
19
+ @current_geometry = Geometry.from_file @file
20
+ @source_file_options = options[:source_file_options]
21
+ @convert_options = options[:convert_options]
22
+ @whiny = options[:whiny].nil? ? true : options[:whiny]
23
+ @format = options[:format]
24
+
25
+ @current_format = File.extname(@file.path)
26
+ @basename = File.basename(@file.path, @current_format)
27
+ end
28
+
29
+ # Returns true if the +target_geometry+ is meant to crop.
30
+ def crop?
31
+ @crop
32
+ end
33
+
34
+ # Returns true if the image is meant to make use of additional convert options.
35
+ def convert_options?
36
+ !@convert_options.nil? && !@convert_options.empty?
37
+ end
38
+
39
+ # Performs the conversion of the +file+ into a thumbnail. Returns the Tempfile
40
+ # that contains the new image.
41
+ def make
42
+ src = @file
43
+ dst = Tempfile.new([@basename, @format].compact.join("."))
44
+ dst.binmode
45
+
46
+ command = <<-end_command
47
+ #{ source_file_options }
48
+ "#{ File.expand_path(src.path) }[0]"
49
+ #{ transformation_command }
50
+ "#{ File.expand_path(dst.path) }"
51
+ end_command
52
+
53
+ begin
54
+ success = Paperclip.run("convert", command.gsub(/\s+/, " "))
55
+ rescue PaperclipCommandLineError
56
+ raise PaperclipError, "There was an error processing the thumbnail for #{@basename}" if @whiny
57
+ end
58
+
59
+ dst
60
+ end
61
+
62
+ # Returns the command ImageMagick's +convert+ needs to transform the image
63
+ # into the thumbnail.
64
+ def transformation_command
65
+ scale, crop = @current_geometry.transformation_to(@target_geometry, crop?)
66
+ trans = ""
67
+ trans << " -resize \"#{scale}\"" unless scale.nil? || scale.empty?
68
+ trans << " -crop \"#{crop}\" +repage" if crop
69
+ trans << " #{convert_options}" if convert_options?
70
+ trans
71
+ end
72
+ end
73
+ end
@@ -0,0 +1,49 @@
1
+ module Paperclip
2
+ # The Upfile module is a convenience module for adding uploaded-file-type methods
3
+ # to the +File+ class. Useful for testing.
4
+ # user.avatar = File.new("test/test_avatar.jpg")
5
+ module Upfile
6
+
7
+ # Infer the MIME-type of the file from the extension.
8
+ def content_type
9
+ type = (self.path.match(/\.(\w+)$/)[1] rescue "octet-stream").downcase
10
+ case type
11
+ when %r"jp(e|g|eg)" then "image/jpeg"
12
+ when %r"tiff?" then "image/tiff"
13
+ when %r"png", "gif", "bmp" then "image/#{type}"
14
+ when "txt" then "text/plain"
15
+ when %r"html?" then "text/html"
16
+ when "js" then "application/js"
17
+ when "csv", "xml", "css" then "text/#{type}"
18
+ else "application/x-#{type}"
19
+ end
20
+ end
21
+
22
+ # Returns the file's normal name.
23
+ def original_filename
24
+ File.basename(self.path)
25
+ end
26
+
27
+ # Returns the size of the file.
28
+ def size
29
+ File.size(self)
30
+ end
31
+ end
32
+ end
33
+
34
+ if defined? StringIO
35
+ class StringIO
36
+ attr_accessor :original_filename, :content_type
37
+ def original_filename
38
+ @original_filename ||= "stringio.txt"
39
+ end
40
+ def content_type
41
+ @content_type ||= "text/plain"
42
+ end
43
+ end
44
+ end
45
+
46
+ class File #:nodoc:
47
+ include Paperclip::Upfile
48
+ end
49
+