paperclip-cloudfiles 2.3.2 → 2.3.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. data/README.rdoc +10 -3
  2. data/Rakefile +8 -4
  3. data/generators/paperclip/USAGE +2 -2
  4. data/generators/paperclip/paperclip_generator.rb +8 -8
  5. data/lib/generators/paperclip/USAGE +8 -0
  6. data/lib/generators/paperclip/paperclip_generator.rb +31 -0
  7. data/lib/generators/paperclip/templates/paperclip_migration.rb.erb +19 -0
  8. data/lib/paperclip/attachment.rb +38 -18
  9. data/lib/paperclip/command_line.rb +80 -0
  10. data/lib/paperclip/geometry.rb +7 -7
  11. data/lib/paperclip/interpolations.rb +8 -2
  12. data/lib/paperclip/iostream.rb +11 -25
  13. data/lib/paperclip/matchers/validate_attachment_content_type_matcher.rb +3 -3
  14. data/lib/paperclip/matchers/validate_attachment_presence_matcher.rb +0 -1
  15. data/lib/paperclip/matchers/validate_attachment_size_matcher.rb +0 -1
  16. data/lib/paperclip/processor.rb +15 -6
  17. data/lib/paperclip/railtie.rb +24 -0
  18. data/lib/paperclip/storage/cloud_files.rb +131 -0
  19. data/lib/paperclip/storage/filesystem.rb +73 -0
  20. data/lib/paperclip/storage/s3.rb +192 -0
  21. data/lib/paperclip/storage.rb +3 -371
  22. data/lib/paperclip/style.rb +11 -11
  23. data/lib/paperclip/thumbnail.rb +16 -15
  24. data/lib/paperclip/upfile.rb +5 -3
  25. data/lib/paperclip/version.rb +3 -0
  26. data/lib/paperclip.rb +78 -92
  27. data/lib/tasks/paperclip.rake +72 -0
  28. data/rails/init.rb +2 -0
  29. data/shoulda_macros/paperclip.rb +1 -2
  30. data/test/attachment_test.rb +74 -28
  31. data/test/command_line_test.rb +133 -0
  32. data/test/geometry_test.rb +2 -2
  33. data/test/helper.rb +22 -24
  34. data/test/integration_test.rb +10 -11
  35. data/test/interpolations_test.rb +7 -4
  36. data/test/iostream_test.rb +6 -13
  37. data/test/matchers/have_attached_file_matcher_test.rb +1 -1
  38. data/test/matchers/validate_attachment_content_type_matcher_test.rb +11 -1
  39. data/test/matchers/validate_attachment_presence_matcher_test.rb +1 -1
  40. data/test/matchers/validate_attachment_size_matcher_test.rb +1 -1
  41. data/test/paperclip_test.rb +54 -80
  42. data/test/processor_test.rb +1 -1
  43. data/test/storage_test.rb +32 -12
  44. data/test/style_test.rb +17 -17
  45. data/test/thumbnail_test.rb +18 -18
  46. data/test/upfile_test.rb +1 -1
  47. metadata +58 -31
  48. data/tasks/paperclip_tasks.rake +0 -79
@@ -0,0 +1,131 @@
1
+ module Paperclip
2
+ module Storage
3
+ # Rackspace's Cloud Files service is a scalable, easy place to store files for
4
+ # distribution, and is integrated into the Limelight CDN. You can find out more about
5
+ # it at http://www.rackspacecloud.com/cloud_hosting_products/files
6
+ #
7
+ # To install the Cloud Files gem, add the Gemcutter gem source ("gem sources -a http://gemcutter.org"), then
8
+ # do a "gem install cloudfiles". For more information, see the github repository at http://github.com/rackspace/ruby-cloudfiles/
9
+ #
10
+ # There are a few Cloud Files-specific options for has_attached_file:
11
+ # * +cloudfiles_credentials+: Takes a path, a File, or a Hash. The path (or File) must point
12
+ # to a YAML file containing the +username+ and +api_key+ that Rackspace
13
+ # gives you. Rackspace customers using the cloudfiles gem >= 1.4.1 can also set a servicenet
14
+ # variable to true to send traffic over the unbilled internal Rackspace service network.
15
+ # You can 'environment-space' this just like you do to your
16
+ # database.yml file, so different environments can use different accounts:
17
+ # development:
18
+ # username: hayley
19
+ # api_key: a7f...
20
+ # test:
21
+ # username: katherine
22
+ # api_key: 7fa...
23
+ # production:
24
+ # username: minter
25
+ # api_key: 87k...
26
+ # servicenet: true
27
+ # This is not required, however, and the file may simply look like this:
28
+ # username: minter...
29
+ # api_key: 11q...
30
+ # In which case, those access keys will be used in all environments. You can also
31
+ # put your container name in this file, instead of adding it to the code directly.
32
+ # This is useful when you want the same account but a different container for
33
+ # development versus production.
34
+ # * +container+: This is the name of the Cloud Files container that will store your files.
35
+ # This container should be marked "public" so that the files are available to the world at large.
36
+ # If the container does not exist, it will be created and marked public.
37
+ # * +path+: This is the path under the container in which the file will be stored. The
38
+ # CDN URL will be constructed from the CDN identifier for the container and the path. This is what
39
+ # you will want to interpolate. Keys should be unique, like filenames, and despite the fact that
40
+ # Cloud Files (strictly speaking) does not support directories, you can still use a / to
41
+ # separate parts of your file name, and they will show up in the URL structure.
42
+ module Cloud_files
43
+ def self.extended base
44
+ require 'cloudfiles'
45
+ @@container ||= {}
46
+ base.instance_eval do
47
+ @cloudfiles_credentials = parse_credentials(@options[:cloudfiles_credentials])
48
+ @container_name = @options[:container] || @cloudfiles_credentials[:container]
49
+ @container_name = @container_name.call(self) if @container_name.is_a?(Proc)
50
+ @cloudfiles_options = @options[:cloudfiles_options] || {}
51
+ @@cdn_url = cloudfiles_container.cdn_url
52
+ @path_filename = ":cf_path_filename" unless @url.to_s.match(/^:cf.*filename$/)
53
+ @url = @@cdn_url + "/#{URI.encode(@path_filename).gsub(/&/,'%26')}"
54
+ @path = (Paperclip::Attachment.default_options[:path] == @options[:path]) ? ":attachment/:id/:style/:basename.:extension" : @options[:path]
55
+ end
56
+ Paperclip.interpolates(:cf_path_filename) do |attachment, style|
57
+ attachment.path(style)
58
+ end
59
+ end
60
+
61
+ def cloudfiles
62
+ @@cf ||= CloudFiles::Connection.new(:username => @cloudfiles_credentials[:username], :api_key => @cloudfiles_credentials[:api_key], :snet => @cloudfiles_credentials[:servicenet])
63
+ end
64
+
65
+ def create_container
66
+ container = cloudfiles.create_container(@container_name)
67
+ container.make_public
68
+ container
69
+ end
70
+
71
+ def cloudfiles_container
72
+ @@container[@container_name] ||= create_container
73
+ end
74
+
75
+ def container_name
76
+ @container_name
77
+ end
78
+
79
+ def parse_credentials creds
80
+ creds = find_credentials(creds).stringify_keys
81
+ (creds[Rails.env] || creds).symbolize_keys
82
+ end
83
+
84
+ def exists?(style = default_style)
85
+ cloudfiles_container.object_exists?(path(style))
86
+ end
87
+
88
+ def read
89
+ self.data
90
+ end
91
+
92
+ # Returns representation of the data of the file assigned to the given
93
+ # style, in the format most representative of the current storage.
94
+ def to_file style = default_style
95
+ @queued_for_write[style] || cloudfiles_container.create_object(path(style))
96
+ end
97
+ alias_method :to_io, :to_file
98
+
99
+ def flush_writes #:nodoc:
100
+ @queued_for_write.each do |style, file|
101
+ object = cloudfiles_container.create_object(path(style),false)
102
+ object.write(file)
103
+ end
104
+ @queued_for_write = {}
105
+ end
106
+
107
+ def flush_deletes #:nodoc:
108
+ @queued_for_delete.each do |path|
109
+ cloudfiles_container.delete_object(path)
110
+ end
111
+ @queued_for_delete = []
112
+ end
113
+
114
+ def find_credentials creds
115
+ case creds
116
+ when File
117
+ YAML.load_file(creds.path)
118
+ when String
119
+ YAML.load_file(creds)
120
+ when Hash
121
+ creds
122
+ else
123
+ raise ArgumentError, "Credentials are not a path, file, or hash."
124
+ end
125
+ end
126
+ private :find_credentials
127
+
128
+ end
129
+
130
+ end
131
+ end
@@ -0,0 +1,73 @@
1
+ module Paperclip
2
+ module Storage
3
+ # The default place to store attachments is in the filesystem. Files on the local
4
+ # filesystem can be very easily served by Apache without requiring a hit to your app.
5
+ # They also can be processed more easily after they've been saved, as they're just
6
+ # normal files. There is one Filesystem-specific option for has_attached_file.
7
+ # * +path+: The location of the repository of attachments on disk. This can (and, in
8
+ # almost all cases, should) be coordinated with the value of the +url+ option to
9
+ # allow files to be saved into a place where Apache can serve them without
10
+ # hitting your app. Defaults to
11
+ # ":rails_root/public/:attachment/:id/:style/:basename.:extension"
12
+ # By default this places the files in the app's public directory which can be served
13
+ # directly. If you are using capistrano for deployment, a good idea would be to
14
+ # make a symlink to the capistrano-created system directory from inside your app's
15
+ # public directory.
16
+ # See Paperclip::Attachment#interpolate for more information on variable interpolaton.
17
+ # :path => "/var/app/attachments/:class/:id/:style/:basename.:extension"
18
+ module Filesystem
19
+ def self.extended base
20
+ end
21
+
22
+ def exists?(style_name = default_style)
23
+ if original_filename
24
+ File.exist?(path(style_name))
25
+ else
26
+ false
27
+ end
28
+ end
29
+
30
+ # Returns representation of the data of the file assigned to the given
31
+ # style, in the format most representative of the current storage.
32
+ def to_file style_name = default_style
33
+ @queued_for_write[style_name] || (File.new(path(style_name), 'rb') if exists?(style_name))
34
+ end
35
+
36
+ def flush_writes #:nodoc:
37
+ @queued_for_write.each do |style_name, file|
38
+ file.close
39
+ FileUtils.mkdir_p(File.dirname(path(style_name)))
40
+ log("saving #{path(style_name)}")
41
+ FileUtils.mv(file.path, path(style_name))
42
+ FileUtils.chmod(0644, path(style_name))
43
+ end
44
+ @queued_for_write = {}
45
+ end
46
+
47
+ def flush_deletes #:nodoc:
48
+ @queued_for_delete.each do |path|
49
+ begin
50
+ log("deleting #{path}")
51
+ FileUtils.rm(path) if File.exist?(path)
52
+ rescue Errno::ENOENT => e
53
+ # ignore file-not-found, let everything else pass
54
+ end
55
+ begin
56
+ while(true)
57
+ path = File.dirname(path)
58
+ FileUtils.rmdir(path)
59
+ break if File.exists?(path) # Ruby 1.9.2 does not raise if the removal failed.
60
+ end
61
+ rescue Errno::EEXIST, Errno::ENOTEMPTY, Errno::ENOENT, Errno::EINVAL, Errno::ENOTDIR
62
+ # Stop trying to remove parent directories
63
+ rescue SystemCallError => e
64
+ log("There was an unexpected error while deleting directories: #{e.class}")
65
+ # Ignore it
66
+ end
67
+ end
68
+ @queued_for_delete = []
69
+ end
70
+ end
71
+
72
+ end
73
+ end
@@ -0,0 +1,192 @@
1
+ module Paperclip
2
+ module Storage
3
+ # Amazon's S3 file hosting service is a scalable, easy place to store files for
4
+ # distribution. You can find out more about it at http://aws.amazon.com/s3
5
+ # There are a few S3-specific options for has_attached_file:
6
+ # * +s3_credentials+: Takes a path, a File, or a Hash. The path (or File) must point
7
+ # to a YAML file containing the +access_key_id+ and +secret_access_key+ that Amazon
8
+ # gives you. You can 'environment-space' this just like you do to your
9
+ # database.yml file, so different environments can use different accounts:
10
+ # development:
11
+ # access_key_id: 123...
12
+ # secret_access_key: 123...
13
+ # test:
14
+ # access_key_id: abc...
15
+ # secret_access_key: abc...
16
+ # production:
17
+ # access_key_id: 456...
18
+ # secret_access_key: 456...
19
+ # This is not required, however, and the file may simply look like this:
20
+ # access_key_id: 456...
21
+ # secret_access_key: 456...
22
+ # In which case, those access keys will be used in all environments. You can also
23
+ # put your bucket name in this file, instead of adding it to the code directly.
24
+ # This is useful when you want the same account but a different bucket for
25
+ # development versus production.
26
+ # * +s3_permissions+: This is a String that should be one of the "canned" access
27
+ # policies that S3 provides (more information can be found here:
28
+ # http://docs.amazonwebservices.com/AmazonS3/2006-03-01/RESTAccessPolicy.html#RESTCannedAccessPolicies)
29
+ # The default for Paperclip is :public_read.
30
+ # * +s3_protocol+: The protocol for the URLs generated to your S3 assets. Can be either
31
+ # 'http' or 'https'. Defaults to 'http' when your :s3_permissions are :public_read (the
32
+ # default), and 'https' when your :s3_permissions are anything else.
33
+ # * +s3_headers+: A hash of headers such as {'Expires' => 1.year.from_now.httpdate}
34
+ # * +bucket+: This is the name of the S3 bucket that will store your files. Remember
35
+ # that the bucket must be unique across all of Amazon S3. If the bucket does not exist
36
+ # Paperclip will attempt to create it. The bucket name will not be interpolated.
37
+ # You can define the bucket as a Proc if you want to determine it's name at runtime.
38
+ # Paperclip will call that Proc with attachment as the only argument.
39
+ # * +s3_host_alias+: The fully-qualified domain name (FQDN) that is the alias to the
40
+ # S3 domain of your bucket. Used with the :s3_alias_url url interpolation. See the
41
+ # link in the +url+ entry for more information about S3 domains and buckets.
42
+ # * +url+: There are three options for the S3 url. You can choose to have the bucket's name
43
+ # placed domain-style (bucket.s3.amazonaws.com) or path-style (s3.amazonaws.com/bucket).
44
+ # Lastly, you can specify a CNAME (which requires the CNAME to be specified as
45
+ # :s3_alias_url. You can read more about CNAMEs and S3 at
46
+ # http://docs.amazonwebservices.com/AmazonS3/latest/index.html?VirtualHosting.html
47
+ # Normally, this won't matter in the slightest and you can leave the default (which is
48
+ # path-style, or :s3_path_url). But in some cases paths don't work and you need to use
49
+ # the domain-style (:s3_domain_url). Anything else here will be treated like path-style.
50
+ # NOTE: If you use a CNAME for use with CloudFront, you can NOT specify https as your
51
+ # :s3_protocol; This is *not supported* by S3/CloudFront. Finally, when using the host
52
+ # alias, the :bucket parameter is ignored, as the hostname is used as the bucket name
53
+ # by S3.
54
+ # * +path+: This is the key under the bucket in which the file will be stored. The
55
+ # URL will be constructed from the bucket and the path. This is what you will want
56
+ # to interpolate. Keys should be unique, like filenames, and despite the fact that
57
+ # S3 (strictly speaking) does not support directories, you can still use a / to
58
+ # separate parts of your file name.
59
+ module S3
60
+ def self.extended base
61
+ begin
62
+ require 'aws/s3'
63
+ rescue LoadError => e
64
+ e.message << " (You may need to install the aws-s3 gem)"
65
+ raise e
66
+ end
67
+
68
+ base.instance_eval do
69
+ @s3_credentials = parse_credentials(@options[:s3_credentials])
70
+ @bucket = @options[:bucket] || @s3_credentials[:bucket]
71
+ @bucket = @bucket.call(self) if @bucket.is_a?(Proc)
72
+ @s3_options = @options[:s3_options] || {}
73
+ @s3_permissions = @options[:s3_permissions] || :public_read
74
+ @s3_protocol = @options[:s3_protocol] || (@s3_permissions == :public_read ? 'http' : 'https')
75
+ @s3_headers = @options[:s3_headers] || {}
76
+ @s3_host_alias = @options[:s3_host_alias]
77
+ unless @url.to_s.match(/^:s3.*url$/)
78
+ @path = @path.gsub(/:url/, @url)
79
+ @url = ":s3_path_url"
80
+ end
81
+ AWS::S3::Base.establish_connection!( @s3_options.merge(
82
+ :access_key_id => @s3_credentials[:access_key_id],
83
+ :secret_access_key => @s3_credentials[:secret_access_key]
84
+ ))
85
+ end
86
+ Paperclip.interpolates(:s3_alias_url) do |attachment, style|
87
+ "#{attachment.s3_protocol}://#{attachment.s3_host_alias}/#{attachment.path(style).gsub(%r{^/}, "")}"
88
+ end
89
+ Paperclip.interpolates(:s3_path_url) do |attachment, style|
90
+ "#{attachment.s3_protocol}://s3.amazonaws.com/#{attachment.bucket_name}/#{attachment.path(style).gsub(%r{^/}, "")}"
91
+ end
92
+ Paperclip.interpolates(:s3_domain_url) do |attachment, style|
93
+ "#{attachment.s3_protocol}://#{attachment.bucket_name}.s3.amazonaws.com/#{attachment.path(style).gsub(%r{^/}, "")}"
94
+ end
95
+ end
96
+
97
+ def expiring_url(time = 3600)
98
+ AWS::S3::S3Object.url_for(path, bucket_name, :expires_in => time )
99
+ end
100
+
101
+ def bucket_name
102
+ @bucket
103
+ end
104
+
105
+ def s3_host_alias
106
+ @s3_host_alias
107
+ end
108
+
109
+ def parse_credentials creds
110
+ creds = find_credentials(creds).stringify_keys
111
+ (creds[Rails.env] || creds).symbolize_keys
112
+ end
113
+
114
+ def exists?(style = default_style)
115
+ if original_filename
116
+ AWS::S3::S3Object.exists?(path(style), bucket_name)
117
+ else
118
+ false
119
+ end
120
+ end
121
+
122
+ def s3_protocol
123
+ @s3_protocol
124
+ end
125
+
126
+ # Returns representation of the data of the file assigned to the given
127
+ # style, in the format most representative of the current storage.
128
+ def to_file style = default_style
129
+ return @queued_for_write[style] if @queued_for_write[style]
130
+ filename = path(style)
131
+ extname = File.extname(filename)
132
+ basename = File.basename(filename, extname)
133
+ file = Tempfile.new([basename, extname])
134
+ file.binmode
135
+ file.write(AWS::S3::S3Object.value(path(style), bucket_name))
136
+ file.rewind
137
+ return file
138
+ end
139
+
140
+ def create_bucket
141
+ AWS::S3::Bucket.create(bucket_name)
142
+ end
143
+
144
+ def flush_writes #:nodoc:
145
+ @queued_for_write.each do |style, file|
146
+ begin
147
+ log("saving #{path(style)}")
148
+ AWS::S3::S3Object.store(path(style),
149
+ file,
150
+ bucket_name,
151
+ {:content_type => instance_read(:content_type),
152
+ :access => @s3_permissions,
153
+ }.merge(@s3_headers))
154
+ rescue AWS::S3::NoSuchBucket => e
155
+ create_bucket
156
+ retry
157
+ rescue AWS::S3::ResponseError => e
158
+ raise
159
+ end
160
+ end
161
+ @queued_for_write = {}
162
+ end
163
+
164
+ def flush_deletes #:nodoc:
165
+ @queued_for_delete.each do |path|
166
+ begin
167
+ log("deleting #{path}")
168
+ AWS::S3::S3Object.delete(path, bucket_name)
169
+ rescue AWS::S3::ResponseError
170
+ # Ignore this.
171
+ end
172
+ end
173
+ @queued_for_delete = []
174
+ end
175
+
176
+ def find_credentials creds
177
+ case creds
178
+ when File
179
+ YAML::load(ERB.new(File.read(creds.path)).result)
180
+ when String, Pathname
181
+ YAML::load(ERB.new(File.read(creds)).result)
182
+ when Hash
183
+ creds
184
+ else
185
+ raise ArgumentError, "Credentials are not a path, file, or hash."
186
+ end
187
+ end
188
+ private :find_credentials
189
+
190
+ end
191
+ end
192
+ end