mender_paperclip 2.4.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/LICENSE +26 -0
- data/README.md +402 -0
- data/Rakefile +86 -0
- data/generators/paperclip/USAGE +5 -0
- data/generators/paperclip/paperclip_generator.rb +27 -0
- data/generators/paperclip/templates/paperclip_migration.rb.erb +19 -0
- data/init.rb +4 -0
- data/lib/generators/paperclip/USAGE +8 -0
- data/lib/generators/paperclip/paperclip_generator.rb +33 -0
- data/lib/generators/paperclip/templates/paperclip_migration.rb.erb +19 -0
- data/lib/paperclip/attachment.rb +454 -0
- data/lib/paperclip/callback_compatibility.rb +61 -0
- data/lib/paperclip/geometry.rb +120 -0
- data/lib/paperclip/interpolations.rb +181 -0
- data/lib/paperclip/iostream.rb +45 -0
- data/lib/paperclip/matchers/have_attached_file_matcher.rb +57 -0
- data/lib/paperclip/matchers/validate_attachment_content_type_matcher.rb +81 -0
- data/lib/paperclip/matchers/validate_attachment_presence_matcher.rb +54 -0
- data/lib/paperclip/matchers/validate_attachment_size_matcher.rb +95 -0
- data/lib/paperclip/matchers.rb +33 -0
- data/lib/paperclip/missing_attachment_styles.rb +87 -0
- data/lib/paperclip/options.rb +79 -0
- data/lib/paperclip/processor.rb +58 -0
- data/lib/paperclip/railtie.rb +26 -0
- data/lib/paperclip/storage/filesystem.rb +81 -0
- data/lib/paperclip/storage/fog.rb +162 -0
- data/lib/paperclip/storage/s3.rb +262 -0
- data/lib/paperclip/storage.rb +3 -0
- data/lib/paperclip/style.rb +95 -0
- data/lib/paperclip/thumbnail.rb +105 -0
- data/lib/paperclip/upfile.rb +62 -0
- data/lib/paperclip/version.rb +3 -0
- data/lib/paperclip.rb +478 -0
- data/lib/tasks/paperclip.rake +97 -0
- data/rails/init.rb +2 -0
- data/shoulda_macros/paperclip.rb +124 -0
- data/test/attachment_test.rb +1120 -0
- data/test/database.yml +4 -0
- data/test/fixtures/12k.png +0 -0
- data/test/fixtures/50x50.png +0 -0
- data/test/fixtures/5k.png +0 -0
- data/test/fixtures/animated.gif +0 -0
- data/test/fixtures/bad.png +1 -0
- data/test/fixtures/fog.yml +8 -0
- data/test/fixtures/s3.yml +8 -0
- data/test/fixtures/spaced file.png +0 -0
- data/test/fixtures/text.txt +1 -0
- data/test/fixtures/twopage.pdf +0 -0
- data/test/fixtures/uppercase.PNG +0 -0
- data/test/fog_test.rb +191 -0
- data/test/geometry_test.rb +206 -0
- data/test/helper.rb +152 -0
- data/test/integration_test.rb +654 -0
- data/test/interpolations_test.rb +195 -0
- data/test/iostream_test.rb +71 -0
- data/test/matchers/have_attached_file_matcher_test.rb +24 -0
- data/test/matchers/validate_attachment_content_type_matcher_test.rb +87 -0
- data/test/matchers/validate_attachment_presence_matcher_test.rb +26 -0
- data/test/matchers/validate_attachment_size_matcher_test.rb +51 -0
- data/test/options_test.rb +68 -0
- data/test/paperclip_missing_attachment_styles_test.rb +80 -0
- data/test/paperclip_test.rb +329 -0
- data/test/processor_test.rb +10 -0
- data/test/storage/filesystem_test.rb +52 -0
- data/test/storage/s3_live_test.rb +51 -0
- data/test/storage/s3_test.rb +633 -0
- data/test/style_test.rb +180 -0
- data/test/thumbnail_test.rb +383 -0
- data/test/upfile_test.rb +53 -0
- metadata +243 -0
@@ -0,0 +1,79 @@
|
|
1
|
+
module Paperclip
|
2
|
+
class Options
|
3
|
+
|
4
|
+
attr_accessor :url, :path, :only_process, :normalized_styles, :default_url, :default_style,
|
5
|
+
:storage, :use_timestamp, :whiny, :use_default_time_zone, :hash_digest, :hash_secret,
|
6
|
+
:convert_options, :source_file_options, :preserve_files, :http_proxy
|
7
|
+
|
8
|
+
attr_accessor :s3_credentials, :s3_host_name, :s3_options, :s3_permissions, :s3_protocol,
|
9
|
+
:s3_headers, :s3_host_alias, :bucket
|
10
|
+
|
11
|
+
attr_accessor :fog_directory, :fog_credentials, :fog_host, :fog_public, :fog_file
|
12
|
+
|
13
|
+
def initialize(attachment, hash)
|
14
|
+
@attachment = attachment
|
15
|
+
|
16
|
+
@url = hash[:url]
|
17
|
+
@url = @url.call(@attachment) if @url.is_a?(Proc)
|
18
|
+
@path = hash[:path]
|
19
|
+
@path = @path.call(@attachment) if @path.is_a?(Proc)
|
20
|
+
@styles = hash[:styles]
|
21
|
+
@only_process = hash[:only_process]
|
22
|
+
@normalized_styles = nil
|
23
|
+
@default_url = hash[:default_url]
|
24
|
+
@default_style = hash[:default_style]
|
25
|
+
@storage = hash[:storage]
|
26
|
+
@use_timestamp = hash[:use_timestamp]
|
27
|
+
@whiny = hash[:whiny_thumbnails] || hash[:whiny]
|
28
|
+
@use_default_time_zone = hash[:use_default_time_zone]
|
29
|
+
@use_file_command = hash[:use_file_command]
|
30
|
+
@hash_digest = hash[:hash_digest]
|
31
|
+
@hash_data = hash[:hash_data]
|
32
|
+
@hash_secret = hash[:hash_secret]
|
33
|
+
@convert_options = hash[:convert_options]
|
34
|
+
@source_file_options = hash[:source_file_options]
|
35
|
+
@processors = hash[:processors]
|
36
|
+
@preserve_files = hash[:preserve_files]
|
37
|
+
@http_proxy = hash[:http_proxy]
|
38
|
+
|
39
|
+
#s3 options
|
40
|
+
@s3_credentials = hash[:s3_credentials]
|
41
|
+
@s3_host_name = hash[:s3_host_name]
|
42
|
+
@bucket = hash[:bucket]
|
43
|
+
@s3_options = hash[:s3_options]
|
44
|
+
@s3_permissions = hash[:s3_permissions]
|
45
|
+
@s3_protocol = hash[:s3_protocol]
|
46
|
+
@s3_headers = hash[:s3_headers]
|
47
|
+
@s3_host_alias = hash[:s3_host_alias]
|
48
|
+
|
49
|
+
#fog options
|
50
|
+
@fog_directory = hash[:fog_directory]
|
51
|
+
@fog_credentials = hash[:fog_credentials]
|
52
|
+
@fog_host = hash[:fog_host]
|
53
|
+
@fog_public = hash[:fog_public]
|
54
|
+
@fog_file = hash[:fog_file]
|
55
|
+
end
|
56
|
+
|
57
|
+
def method_missing(method, *args, &blk)
|
58
|
+
if method.to_s[-1] == "="
|
59
|
+
instance_variable_set("@#{method[0..-2]}", args[0])
|
60
|
+
else
|
61
|
+
instance_variable_get("@#{method}")
|
62
|
+
end
|
63
|
+
end
|
64
|
+
|
65
|
+
def processors
|
66
|
+
@processors.respond_to?(:call) ? @processors.call(@attachment.instance) : @processors
|
67
|
+
end
|
68
|
+
|
69
|
+
def styles
|
70
|
+
if @styles.respond_to?(:call) || !@normalized_styles
|
71
|
+
@normalized_styles = ActiveSupport::OrderedHash.new
|
72
|
+
(@styles.respond_to?(:call) ? @styles.call(@attachment) : @styles).each do |name, args|
|
73
|
+
normalized_styles[name] = Paperclip::Style.new(name, args.dup, @attachment)
|
74
|
+
end
|
75
|
+
end
|
76
|
+
@normalized_styles
|
77
|
+
end
|
78
|
+
end
|
79
|
+
end
|
@@ -0,0 +1,58 @@
|
|
1
|
+
module Paperclip
|
2
|
+
# Paperclip processors allow you to modify attached files when they are
|
3
|
+
# attached in any way you are able. Paperclip itself uses command-line
|
4
|
+
# programs for its included Thumbnail processor, but custom processors
|
5
|
+
# are not required to follow suit.
|
6
|
+
#
|
7
|
+
# Processors are required to be defined inside the Paperclip module and
|
8
|
+
# are also required to be a subclass of Paperclip::Processor. There is
|
9
|
+
# only one method you *must* implement to properly be a subclass:
|
10
|
+
# #make, but #initialize may also be of use. Both methods accept 3
|
11
|
+
# arguments: the file that will be operated on (which is an instance of
|
12
|
+
# File), a hash of options that were defined in has_attached_file's
|
13
|
+
# style hash, and the Paperclip::Attachment itself.
|
14
|
+
#
|
15
|
+
# All #make needs to return is an instance of File (Tempfile is
|
16
|
+
# acceptable) which contains the results of the processing.
|
17
|
+
#
|
18
|
+
# See Paperclip.run for more information about using command-line
|
19
|
+
# utilities from within Processors.
|
20
|
+
class Processor
|
21
|
+
attr_accessor :file, :options, :attachment
|
22
|
+
|
23
|
+
def initialize file, options = {}, attachment = nil
|
24
|
+
@file = file
|
25
|
+
@options = options
|
26
|
+
@attachment = attachment
|
27
|
+
end
|
28
|
+
|
29
|
+
def make
|
30
|
+
end
|
31
|
+
|
32
|
+
def self.make file, options = {}, attachment = nil
|
33
|
+
new(file, options, attachment).make
|
34
|
+
end
|
35
|
+
end
|
36
|
+
|
37
|
+
# Due to how ImageMagick handles its image format conversion and how Tempfile
|
38
|
+
# handles its naming scheme, it is necessary to override how Tempfile makes
|
39
|
+
# its names so as to allow for file extensions. Idea taken from the comments
|
40
|
+
# on this blog post:
|
41
|
+
# http://marsorange.com/archives/of-mogrify-ruby-tempfile-dynamic-class-definitions
|
42
|
+
class Tempfile < ::Tempfile
|
43
|
+
# This is Ruby 1.8.7's implementation.
|
44
|
+
if RUBY_VERSION <= "1.8.6" || RUBY_PLATFORM =~ /java/
|
45
|
+
def make_tmpname(basename, n)
|
46
|
+
case basename
|
47
|
+
when Array
|
48
|
+
prefix, suffix = *basename
|
49
|
+
else
|
50
|
+
prefix, suffix = basename, ''
|
51
|
+
end
|
52
|
+
|
53
|
+
t = Time.now.strftime("%y%m%d")
|
54
|
+
path = "#{prefix}#{t}-#{$$}-#{rand(0x100000000).to_s(36)}-#{n}#{suffix}"
|
55
|
+
end
|
56
|
+
end
|
57
|
+
end
|
58
|
+
end
|
@@ -0,0 +1,26 @@
|
|
1
|
+
require 'paperclip'
|
2
|
+
|
3
|
+
module Paperclip
|
4
|
+
if defined? Rails::Railtie
|
5
|
+
require 'rails'
|
6
|
+
class Railtie < Rails::Railtie
|
7
|
+
initializer 'paperclip.insert_into_active_record' do
|
8
|
+
ActiveSupport.on_load :active_record do
|
9
|
+
Paperclip::Railtie.insert
|
10
|
+
end
|
11
|
+
end
|
12
|
+
rake_tasks do
|
13
|
+
load "tasks/paperclip.rake"
|
14
|
+
end
|
15
|
+
end
|
16
|
+
end
|
17
|
+
|
18
|
+
class Railtie
|
19
|
+
def self.insert
|
20
|
+
ActiveRecord::Base.send(:include, Paperclip::Glue)
|
21
|
+
File.send(:include, Paperclip::Upfile)
|
22
|
+
|
23
|
+
Paperclip.options[:logger] = defined?(ActiveRecord) ? ActiveRecord::Base.logger : Rails.logger
|
24
|
+
end
|
25
|
+
end
|
26
|
+
end
|
@@ -0,0 +1,81 @@
|
|
1
|
+
module Paperclip
|
2
|
+
module Storage
|
3
|
+
# The default place to store attachments is in the filesystem. Files on the local
|
4
|
+
# filesystem can be very easily served by Apache without requiring a hit to your app.
|
5
|
+
# They also can be processed more easily after they've been saved, as they're just
|
6
|
+
# normal files. There is one Filesystem-specific option for has_attached_file.
|
7
|
+
# * +path+: The location of the repository of attachments on disk. This can (and, in
|
8
|
+
# almost all cases, should) be coordinated with the value of the +url+ option to
|
9
|
+
# allow files to be saved into a place where Apache can serve them without
|
10
|
+
# hitting your app. Defaults to
|
11
|
+
# ":rails_root/public/:attachment/:id/:style/:basename.:extension"
|
12
|
+
# By default this places the files in the app's public directory which can be served
|
13
|
+
# directly. If you are using capistrano for deployment, a good idea would be to
|
14
|
+
# make a symlink to the capistrano-created system directory from inside your app's
|
15
|
+
# public directory.
|
16
|
+
# See Paperclip::Attachment#interpolate for more information on variable interpolaton.
|
17
|
+
# :path => "/var/app/attachments/:class/:id/:style/:basename.:extension"
|
18
|
+
module Filesystem
|
19
|
+
def self.extended base
|
20
|
+
end
|
21
|
+
|
22
|
+
def exists?(style_name = default_style)
|
23
|
+
if original_filename
|
24
|
+
File.exist?(path(style_name))
|
25
|
+
else
|
26
|
+
false
|
27
|
+
end
|
28
|
+
end
|
29
|
+
|
30
|
+
# Returns representation of the data of the file assigned to the given
|
31
|
+
# style, in the format most representative of the current storage.
|
32
|
+
def to_file style_name = default_style
|
33
|
+
@queued_for_write[style_name] || (File.new(path(style_name), 'rb') if exists?(style_name))
|
34
|
+
end
|
35
|
+
|
36
|
+
def flush_writes #:nodoc:
|
37
|
+
@queued_for_write.each do |style_name, file|
|
38
|
+
file.close
|
39
|
+
FileUtils.mkdir_p(File.dirname(path(style_name)))
|
40
|
+
log("saving #{path(style_name)}")
|
41
|
+
begin
|
42
|
+
FileUtils.mv(file.path, path(style_name))
|
43
|
+
rescue SystemCallError
|
44
|
+
FileUtils.cp(file.path, path(style_name))
|
45
|
+
FileUtils.rm(file.path)
|
46
|
+
end
|
47
|
+
FileUtils.chmod(0666&~File.umask, path(style_name))
|
48
|
+
end
|
49
|
+
|
50
|
+
after_flush_writes # allows attachment to clean up temp files
|
51
|
+
|
52
|
+
@queued_for_write = {}
|
53
|
+
end
|
54
|
+
|
55
|
+
def flush_deletes #:nodoc:
|
56
|
+
@queued_for_delete.each do |path|
|
57
|
+
begin
|
58
|
+
log("deleting #{path}")
|
59
|
+
FileUtils.rm(path) if File.exist?(path)
|
60
|
+
rescue Errno::ENOENT => e
|
61
|
+
# ignore file-not-found, let everything else pass
|
62
|
+
end
|
63
|
+
begin
|
64
|
+
while(true)
|
65
|
+
path = File.dirname(path)
|
66
|
+
FileUtils.rmdir(path)
|
67
|
+
break if File.exists?(path) # Ruby 1.9.2 does not raise if the removal failed.
|
68
|
+
end
|
69
|
+
rescue Errno::EEXIST, Errno::ENOTEMPTY, Errno::ENOENT, Errno::EINVAL, Errno::ENOTDIR, Errno::EACCES
|
70
|
+
# Stop trying to remove parent directories
|
71
|
+
rescue SystemCallError => e
|
72
|
+
log("There was an unexpected error while deleting directories: #{e.class}")
|
73
|
+
# Ignore it
|
74
|
+
end
|
75
|
+
end
|
76
|
+
@queued_for_delete = []
|
77
|
+
end
|
78
|
+
end
|
79
|
+
|
80
|
+
end
|
81
|
+
end
|
@@ -0,0 +1,162 @@
|
|
1
|
+
module Paperclip
|
2
|
+
module Storage
|
3
|
+
# fog is a modern and versatile cloud computing library for Ruby.
|
4
|
+
# Among others, it supports Amazon S3 to store your files. In
|
5
|
+
# contrast to the outdated AWS-S3 gem it is actively maintained and
|
6
|
+
# supports multiple locations.
|
7
|
+
# Amazon's S3 file hosting service is a scalable, easy place to
|
8
|
+
# store files for distribution. You can find out more about it at
|
9
|
+
# http://aws.amazon.com/s3 There are a few fog-specific options for
|
10
|
+
# has_attached_file, which will be explained using S3 as an example:
|
11
|
+
# * +fog_credentials+: Takes a Hash with your credentials. For S3,
|
12
|
+
# you can use the following format:
|
13
|
+
# aws_access_key_id: '<your aws_access_key_id>'
|
14
|
+
# aws_secret_access_key: '<your aws_secret_access_key>'
|
15
|
+
# provider: 'AWS'
|
16
|
+
# region: 'eu-west-1'
|
17
|
+
# * +fog_directory+: This is the name of the S3 bucket that will
|
18
|
+
# store your files. Remember that the bucket must be unique across
|
19
|
+
# all of Amazon S3. If the bucket does not exist, Paperclip will
|
20
|
+
# attempt to create it.
|
21
|
+
# * +path+: This is the key under the bucket in which the file will
|
22
|
+
# be stored. The URL will be constructed from the bucket and the
|
23
|
+
# path. This is what you will want to interpolate. Keys should be
|
24
|
+
# unique, like filenames, and despite the fact that S3 (strictly
|
25
|
+
# speaking) does not support directories, you can still use a / to
|
26
|
+
# separate parts of your file name.
|
27
|
+
# * +fog_public+: (optional, defaults to true) Should the uploaded
|
28
|
+
# files be public or not? (true/false)
|
29
|
+
# * +fog_host+: (optional) The fully-qualified domain name (FQDN)
|
30
|
+
# that is the alias to the S3 domain of your bucket, e.g.
|
31
|
+
# 'http://images.example.com'. This can also be used in
|
32
|
+
# conjunction with Cloudfront (http://aws.amazon.com/cloudfront)
|
33
|
+
|
34
|
+
module Fog
|
35
|
+
def self.extended base
|
36
|
+
begin
|
37
|
+
require 'fog'
|
38
|
+
rescue LoadError => e
|
39
|
+
e.message << " (You may need to install the fog gem)"
|
40
|
+
raise e
|
41
|
+
end unless defined?(Fog)
|
42
|
+
|
43
|
+
base.instance_eval do
|
44
|
+
unless @options.url.to_s.match(/^:fog.*url$/)
|
45
|
+
@options.path = @options.path.gsub(/:url/, @options.url)
|
46
|
+
@options.url = ':fog_public_url'
|
47
|
+
end
|
48
|
+
Paperclip.interpolates(:fog_public_url) do |attachment, style|
|
49
|
+
attachment.public_url(style)
|
50
|
+
end unless Paperclip::Interpolations.respond_to? :fog_public_url
|
51
|
+
end
|
52
|
+
end
|
53
|
+
|
54
|
+
def exists?(style = default_style)
|
55
|
+
if original_filename
|
56
|
+
!!directory.files.head(path(style))
|
57
|
+
else
|
58
|
+
false
|
59
|
+
end
|
60
|
+
end
|
61
|
+
|
62
|
+
def fog_credentials
|
63
|
+
@fog_credentials ||= parse_credentials(@options.fog_credentials)
|
64
|
+
end
|
65
|
+
|
66
|
+
def fog_file
|
67
|
+
@fog_file ||= @options.fog_file || {}
|
68
|
+
end
|
69
|
+
|
70
|
+
def fog_public
|
71
|
+
@fog_public ||= @options.fog_public || true
|
72
|
+
end
|
73
|
+
|
74
|
+
def flush_writes
|
75
|
+
for style, file in @queued_for_write do
|
76
|
+
log("saving #{path(style)}")
|
77
|
+
retried = false
|
78
|
+
begin
|
79
|
+
directory.files.create(fog_file.merge(
|
80
|
+
:body => file,
|
81
|
+
:key => path(style),
|
82
|
+
:public => fog_public
|
83
|
+
))
|
84
|
+
rescue Excon::Errors::NotFound
|
85
|
+
raise if retried
|
86
|
+
retried = true
|
87
|
+
directory.save
|
88
|
+
retry
|
89
|
+
end
|
90
|
+
end
|
91
|
+
|
92
|
+
after_flush_writes # allows attachment to clean up temp files
|
93
|
+
|
94
|
+
@queued_for_write = {}
|
95
|
+
end
|
96
|
+
|
97
|
+
def flush_deletes
|
98
|
+
for path in @queued_for_delete do
|
99
|
+
log("deleting #{path}")
|
100
|
+
directory.files.new(:key => path).destroy
|
101
|
+
end
|
102
|
+
@queued_for_delete = []
|
103
|
+
end
|
104
|
+
|
105
|
+
# Returns representation of the data of the file assigned to the given
|
106
|
+
# style, in the format most representative of the current storage.
|
107
|
+
def to_file(style = default_style)
|
108
|
+
if @queued_for_write[style]
|
109
|
+
@queued_for_write[style]
|
110
|
+
else
|
111
|
+
body = directory.files.get(path(style)).body
|
112
|
+
filename = path(style)
|
113
|
+
extname = File.extname(filename)
|
114
|
+
basename = File.basename(filename, extname)
|
115
|
+
file = Tempfile.new([basename, extname])
|
116
|
+
file.binmode
|
117
|
+
file.write(body)
|
118
|
+
file.rewind
|
119
|
+
file
|
120
|
+
end
|
121
|
+
end
|
122
|
+
|
123
|
+
def public_url(style = default_style)
|
124
|
+
if @options.fog_host
|
125
|
+
host = (@options.fog_host =~ /%d/) ? @options.fog_host % (path(style).hash % 4) : @options.fog_host
|
126
|
+
"#{host}/#{path(style)}"
|
127
|
+
else
|
128
|
+
directory.files.new(:key => path(style)).public_url
|
129
|
+
end
|
130
|
+
end
|
131
|
+
|
132
|
+
def parse_credentials(creds)
|
133
|
+
creds = find_credentials(creds).stringify_keys
|
134
|
+
env = Object.const_defined?(:Rails) ? Rails.env : nil
|
135
|
+
(creds[env] || creds).symbolize_keys
|
136
|
+
end
|
137
|
+
|
138
|
+
private
|
139
|
+
|
140
|
+
def find_credentials(creds)
|
141
|
+
case creds
|
142
|
+
when File
|
143
|
+
YAML::load(ERB.new(File.read(creds.path)).result)
|
144
|
+
when String, Pathname
|
145
|
+
YAML::load(ERB.new(File.read(creds)).result)
|
146
|
+
when Hash
|
147
|
+
creds
|
148
|
+
else
|
149
|
+
raise ArgumentError, "Credentials are not a path, file, or hash."
|
150
|
+
end
|
151
|
+
end
|
152
|
+
|
153
|
+
def connection
|
154
|
+
@connection ||= ::Fog::Storage.new(fog_credentials)
|
155
|
+
end
|
156
|
+
|
157
|
+
def directory
|
158
|
+
@directory ||= connection.directories.new(:key => @options.fog_directory)
|
159
|
+
end
|
160
|
+
end
|
161
|
+
end
|
162
|
+
end
|
@@ -0,0 +1,262 @@
|
|
1
|
+
module Paperclip
|
2
|
+
module Storage
|
3
|
+
# Amazon's S3 file hosting service is a scalable, easy place to store files for
|
4
|
+
# distribution. You can find out more about it at http://aws.amazon.com/s3
|
5
|
+
# There are a few S3-specific options for has_attached_file:
|
6
|
+
# * +s3_credentials+: Takes a path, a File, or a Hash. The path (or File) must point
|
7
|
+
# to a YAML file containing the +access_key_id+ and +secret_access_key+ that Amazon
|
8
|
+
# gives you. You can 'environment-space' this just like you do to your
|
9
|
+
# database.yml file, so different environments can use different accounts:
|
10
|
+
# development:
|
11
|
+
# access_key_id: 123...
|
12
|
+
# secret_access_key: 123...
|
13
|
+
# test:
|
14
|
+
# access_key_id: abc...
|
15
|
+
# secret_access_key: abc...
|
16
|
+
# production:
|
17
|
+
# access_key_id: 456...
|
18
|
+
# secret_access_key: 456...
|
19
|
+
# This is not required, however, and the file may simply look like this:
|
20
|
+
# access_key_id: 456...
|
21
|
+
# secret_access_key: 456...
|
22
|
+
# In which case, those access keys will be used in all environments. You can also
|
23
|
+
# put your bucket name in this file, instead of adding it to the code directly.
|
24
|
+
# This is useful when you want the same account but a different bucket for
|
25
|
+
# development versus production.
|
26
|
+
# * +s3_permissions+: This is a String that should be one of the "canned" access
|
27
|
+
# policies that S3 provides (more information can be found here:
|
28
|
+
# http://docs.amazonwebservices.com/AmazonS3/latest/dev/index.html?RESTAccessPolicy.html)
|
29
|
+
# The default for Paperclip is :public_read.
|
30
|
+
#
|
31
|
+
# You can set permission on a per style bases by doing the following:
|
32
|
+
# :s3_permissions => {
|
33
|
+
# :original => :private
|
34
|
+
# }
|
35
|
+
# Or globaly:
|
36
|
+
# :s3_permissions => :private
|
37
|
+
#
|
38
|
+
# * +s3_protocol+: The protocol for the URLs generated to your S3 assets. Can be either
|
39
|
+
# 'http' or 'https'. Defaults to 'http' when your :s3_permissions are :public_read (the
|
40
|
+
# default), and 'https' when your :s3_permissions are anything else.
|
41
|
+
# * +s3_headers+: A hash of headers such as {'Expires' => 1.year.from_now.httpdate}
|
42
|
+
# * +bucket+: This is the name of the S3 bucket that will store your files. Remember
|
43
|
+
# that the bucket must be unique across all of Amazon S3. If the bucket does not exist
|
44
|
+
# Paperclip will attempt to create it. The bucket name will not be interpolated.
|
45
|
+
# You can define the bucket as a Proc if you want to determine it's name at runtime.
|
46
|
+
# Paperclip will call that Proc with attachment as the only argument.
|
47
|
+
# * +s3_host_alias+: The fully-qualified domain name (FQDN) that is the alias to the
|
48
|
+
# S3 domain of your bucket. Used with the :s3_alias_url url interpolation. See the
|
49
|
+
# link in the +url+ entry for more information about S3 domains and buckets.
|
50
|
+
# * +url+: There are four options for the S3 url. You can choose to have the bucket's name
|
51
|
+
# placed domain-style (bucket.s3.amazonaws.com) or path-style (s3.amazonaws.com/bucket).
|
52
|
+
# You can also specify a CNAME (which requires the CNAME to be specified as
|
53
|
+
# :s3_alias_url. You can read more about CNAMEs and S3 at
|
54
|
+
# http://docs.amazonwebservices.com/AmazonS3/latest/index.html?VirtualHosting.html
|
55
|
+
# Normally, this won't matter in the slightest and you can leave the default (which is
|
56
|
+
# path-style, or :s3_path_url). But in some cases paths don't work and you need to use
|
57
|
+
# the domain-style (:s3_domain_url). Anything else here will be treated like path-style.
|
58
|
+
# NOTE: If you use a CNAME for use with CloudFront, you can NOT specify https as your
|
59
|
+
# :s3_protocol; This is *not supported* by S3/CloudFront. Finally, when using the host
|
60
|
+
# alias, the :bucket parameter is ignored, as the hostname is used as the bucket name
|
61
|
+
# by S3. The fourth option for the S3 url is :asset_host, which uses Rails' built-in
|
62
|
+
# asset_host settings. NOTE: To get the full url from a paperclip'd object, use the
|
63
|
+
# image_path helper; this is what image_tag uses to generate the url for an img tag.
|
64
|
+
# * +path+: This is the key under the bucket in which the file will be stored. The
|
65
|
+
# URL will be constructed from the bucket and the path. This is what you will want
|
66
|
+
# to interpolate. Keys should be unique, like filenames, and despite the fact that
|
67
|
+
# S3 (strictly speaking) does not support directories, you can still use a / to
|
68
|
+
# separate parts of your file name.
|
69
|
+
# * +s3_host_name+: If you are using your bucket in Tokyo region etc, write host_name.
|
70
|
+
module S3
|
71
|
+
def self.extended base
|
72
|
+
begin
|
73
|
+
require 'aws/s3'
|
74
|
+
rescue LoadError => e
|
75
|
+
e.message << " (You may need to install the aws-s3 gem)"
|
76
|
+
raise e
|
77
|
+
end unless defined?(AWS::S3)
|
78
|
+
|
79
|
+
base.instance_eval do
|
80
|
+
@s3_options = @options.s3_options || {}
|
81
|
+
@s3_permissions = set_permissions(@options.s3_permissions)
|
82
|
+
@s3_protocol = @options.s3_protocol ||
|
83
|
+
Proc.new do |style|
|
84
|
+
(@s3_permissions[style.to_sym] || @s3_permissions[:default]) == :public_read ? 'http' : 'https'
|
85
|
+
end
|
86
|
+
@s3_headers = @options.s3_headers || {}
|
87
|
+
|
88
|
+
unless @options.url.to_s.match(/^:s3.*url$/) || @options.url == ":asset_host"
|
89
|
+
@options.path = @options.path.gsub(/:url/, @options.url)
|
90
|
+
@options.url = ":s3_path_url"
|
91
|
+
end
|
92
|
+
@options.url = @options.url.inspect if @options.url.is_a?(Symbol)
|
93
|
+
|
94
|
+
@http_proxy = @options.http_proxy || nil
|
95
|
+
if @http_proxy
|
96
|
+
@s3_options.merge!({:proxy => @http_proxy})
|
97
|
+
end
|
98
|
+
|
99
|
+
AWS::S3::Base.establish_connection!( @s3_options.merge(
|
100
|
+
:access_key_id => s3_credentials[:access_key_id],
|
101
|
+
:secret_access_key => s3_credentials[:secret_access_key]
|
102
|
+
))
|
103
|
+
end
|
104
|
+
Paperclip.interpolates(:s3_alias_url) do |attachment, style|
|
105
|
+
"#{attachment.s3_protocol(style)}://#{attachment.s3_host_alias}/#{attachment.path(style).gsub(%r{^/}, "")}"
|
106
|
+
end unless Paperclip::Interpolations.respond_to? :s3_alias_url
|
107
|
+
Paperclip.interpolates(:s3_path_url) do |attachment, style|
|
108
|
+
"#{attachment.s3_protocol(style)}://#{attachment.s3_host_name}/#{attachment.bucket_name}/#{attachment.path(style).gsub(%r{^/}, "")}"
|
109
|
+
end unless Paperclip::Interpolations.respond_to? :s3_path_url
|
110
|
+
Paperclip.interpolates(:s3_domain_url) do |attachment, style|
|
111
|
+
"#{attachment.s3_protocol(style)}://#{attachment.bucket_name}.#{attachment.s3_host_name}/#{attachment.path(style).gsub(%r{^/}, "")}"
|
112
|
+
end unless Paperclip::Interpolations.respond_to? :s3_domain_url
|
113
|
+
Paperclip.interpolates(:asset_host) do |attachment, style|
|
114
|
+
"#{attachment.path(style).gsub(%r{^/}, "")}"
|
115
|
+
end unless Paperclip::Interpolations.respond_to? :asset_host
|
116
|
+
end
|
117
|
+
|
118
|
+
def expiring_url(time = 3600, style_name = default_style)
|
119
|
+
AWS::S3::S3Object.url_for(path(style_name), bucket_name, :expires_in => time, :use_ssl => (s3_protocol(style_name) == 'https'))
|
120
|
+
end
|
121
|
+
|
122
|
+
def s3_credentials
|
123
|
+
@s3_credentials ||= parse_credentials(@options.s3_credentials)
|
124
|
+
end
|
125
|
+
|
126
|
+
def s3_host_name
|
127
|
+
@options.s3_host_name || s3_credentials[:s3_host_name] || "s3.amazonaws.com"
|
128
|
+
end
|
129
|
+
|
130
|
+
def s3_host_alias
|
131
|
+
@s3_host_alias = @options.s3_host_alias
|
132
|
+
@s3_host_alias = @s3_host_alias.call(self) if @s3_host_alias.is_a?(Proc)
|
133
|
+
@s3_host_alias
|
134
|
+
end
|
135
|
+
|
136
|
+
def bucket_name
|
137
|
+
@bucket = @options.bucket || s3_credentials[:bucket]
|
138
|
+
@bucket = @bucket.call(self) if @bucket.is_a?(Proc)
|
139
|
+
@bucket
|
140
|
+
end
|
141
|
+
|
142
|
+
def using_http_proxy?
|
143
|
+
!!@http_proxy
|
144
|
+
end
|
145
|
+
|
146
|
+
def http_proxy_host
|
147
|
+
using_http_proxy? ? @http_proxy[:host] : nil
|
148
|
+
end
|
149
|
+
|
150
|
+
def http_proxy_port
|
151
|
+
using_http_proxy? ? @http_proxy[:port] : nil
|
152
|
+
end
|
153
|
+
|
154
|
+
def http_proxy_user
|
155
|
+
using_http_proxy? ? @http_proxy[:user] : nil
|
156
|
+
end
|
157
|
+
|
158
|
+
def http_proxy_password
|
159
|
+
using_http_proxy? ? @http_proxy[:password] : nil
|
160
|
+
end
|
161
|
+
|
162
|
+
def set_permissions permissions
|
163
|
+
if permissions.is_a?(Hash)
|
164
|
+
permissions[:default] = permissions[:default] || :public_read
|
165
|
+
else
|
166
|
+
permissions = { :default => permissions || :public_read }
|
167
|
+
end
|
168
|
+
permissions
|
169
|
+
end
|
170
|
+
|
171
|
+
def parse_credentials creds
|
172
|
+
creds = find_credentials(creds).stringify_keys
|
173
|
+
env = Object.const_defined?(:Rails) ? Rails.env : nil
|
174
|
+
(creds[env] || creds).symbolize_keys
|
175
|
+
end
|
176
|
+
|
177
|
+
def exists?(style = default_style)
|
178
|
+
if original_filename
|
179
|
+
AWS::S3::S3Object.exists?(path(style), bucket_name)
|
180
|
+
else
|
181
|
+
false
|
182
|
+
end
|
183
|
+
end
|
184
|
+
|
185
|
+
def s3_protocol(style = default_style)
|
186
|
+
if @s3_protocol.is_a?(Proc)
|
187
|
+
@s3_protocol.call(style)
|
188
|
+
else
|
189
|
+
@s3_protocol
|
190
|
+
end
|
191
|
+
end
|
192
|
+
|
193
|
+
# Returns representation of the data of the file assigned to the given
|
194
|
+
# style, in the format most representative of the current storage.
|
195
|
+
def to_file style = default_style
|
196
|
+
return @queued_for_write[style] if @queued_for_write[style]
|
197
|
+
filename = path(style)
|
198
|
+
extname = File.extname(filename)
|
199
|
+
basename = File.basename(filename, extname)
|
200
|
+
file = Tempfile.new([basename, extname])
|
201
|
+
file.binmode
|
202
|
+
file.write(AWS::S3::S3Object.value(path(style), bucket_name))
|
203
|
+
file.rewind
|
204
|
+
return file
|
205
|
+
end
|
206
|
+
|
207
|
+
def create_bucket
|
208
|
+
AWS::S3::Bucket.create(bucket_name)
|
209
|
+
end
|
210
|
+
|
211
|
+
def flush_writes #:nodoc:
|
212
|
+
@queued_for_write.each do |style, file|
|
213
|
+
begin
|
214
|
+
log("saving #{path(style)}")
|
215
|
+
AWS::S3::S3Object.store(path(style),
|
216
|
+
file,
|
217
|
+
bucket_name,
|
218
|
+
{:content_type => file.content_type.to_s.strip,
|
219
|
+
:access => (@s3_permissions[style] || @s3_permissions[:default]),
|
220
|
+
}.merge(@s3_headers))
|
221
|
+
rescue AWS::S3::NoSuchBucket => e
|
222
|
+
create_bucket
|
223
|
+
retry
|
224
|
+
rescue AWS::S3::ResponseError => e
|
225
|
+
raise
|
226
|
+
end
|
227
|
+
end
|
228
|
+
|
229
|
+
after_flush_writes # allows attachment to clean up temp files
|
230
|
+
|
231
|
+
@queued_for_write = {}
|
232
|
+
end
|
233
|
+
|
234
|
+
def flush_deletes #:nodoc:
|
235
|
+
@queued_for_delete.each do |path|
|
236
|
+
begin
|
237
|
+
log("deleting #{path}")
|
238
|
+
AWS::S3::S3Object.delete(path, bucket_name)
|
239
|
+
rescue AWS::S3::ResponseError
|
240
|
+
# Ignore this.
|
241
|
+
end
|
242
|
+
end
|
243
|
+
@queued_for_delete = []
|
244
|
+
end
|
245
|
+
|
246
|
+
def find_credentials creds
|
247
|
+
case creds
|
248
|
+
when File
|
249
|
+
YAML::load(ERB.new(File.read(creds.path)).result)
|
250
|
+
when String, Pathname
|
251
|
+
YAML::load(ERB.new(File.read(creds)).result)
|
252
|
+
when Hash
|
253
|
+
creds
|
254
|
+
else
|
255
|
+
raise ArgumentError, "Credentials are not a path, file, or hash."
|
256
|
+
end
|
257
|
+
end
|
258
|
+
private :find_credentials
|
259
|
+
|
260
|
+
end
|
261
|
+
end
|
262
|
+
end
|