s3 0.2.1 → 0.2.2
Sign up to get free protection for your applications and to get access to all the features.
- data/.document +1 -0
- data/README.rdoc +12 -1
- data/VERSION +1 -1
- data/extra/{s3_backend.rb → s3_attachment_fu.rb} +0 -0
- data/extra/s3_paperclip.rb +142 -0
- metadata +7 -6
data/.document
CHANGED
data/README.rdoc
CHANGED
@@ -1,9 +1,20 @@
|
|
1
1
|
= S3
|
2
2
|
|
3
|
+
S3 library provides access to Amazon's Simple Storage Service. It
|
4
|
+
supports both: European and US buckets through REST API.
|
5
|
+
|
3
6
|
* repository: http://github.com/qoobaa/s3
|
4
7
|
* issue tracker: http://github.com/qoobaa/s3/issues
|
5
8
|
* rdoc: http://qoobaa.github.com/s3
|
6
9
|
|
10
|
+
== Installation
|
11
|
+
|
12
|
+
If you don't have the Gemcutter sources yet:
|
13
|
+
gem sources -a http://gemcutter.org
|
14
|
+
|
15
|
+
To install the gem type:
|
16
|
+
sudo gem install s3
|
17
|
+
|
7
18
|
== Usage
|
8
19
|
|
9
20
|
Coming soon, see Rdoc documentation.
|
@@ -13,7 +24,7 @@ Coming soon, see Rdoc documentation.
|
|
13
24
|
You have to pass access key id (-a) and secret access key (-s) to the
|
14
25
|
command line tool. S3 reads ACCESS_KEY_ID and SECRET_ACCESS_KEY
|
15
26
|
environment variables and uses them by default, so if you don't want
|
16
|
-
to pass them each time, export them (e.g. in
|
27
|
+
to pass them each time, export them (e.g. in ~/.bashrc file).
|
17
28
|
|
18
29
|
* list buckets
|
19
30
|
s3 bucket
|
data/VERSION
CHANGED
@@ -1 +1 @@
|
|
1
|
-
0.2.
|
1
|
+
0.2.2
|
File without changes
|
@@ -0,0 +1,142 @@
|
|
1
|
+
# S3 backend for paperclip plugin. Copy the file to:
|
2
|
+
# +config/initializers/+ directory
|
3
|
+
#
|
4
|
+
# Example configuration for CNAME bucket:
|
5
|
+
#
|
6
|
+
# has_attached_file :image,
|
7
|
+
# :s3_host_alias => "bucket.domain.tld",
|
8
|
+
# :url => ":s3_alias_url",
|
9
|
+
# :styles => {
|
10
|
+
# :medium => "300x300>",
|
11
|
+
# :thumb => "100x100>"
|
12
|
+
# },
|
13
|
+
# :storage => :s3,
|
14
|
+
# :s3_credentials => {
|
15
|
+
# :access_key_id => "...",
|
16
|
+
# :secret_access_key => "..."
|
17
|
+
# },
|
18
|
+
# :bucket => "bucket.domain.tld",
|
19
|
+
# :path => ":attachment/:id/:style.:extension"
|
20
|
+
|
21
|
+
module Paperclip
|
22
|
+
module Storage
|
23
|
+
module S3
|
24
|
+
def self.extended(base)
|
25
|
+
begin
|
26
|
+
require "s3"
|
27
|
+
rescue LoadError => e
|
28
|
+
e.message << " (You may need to install the s3 gem)"
|
29
|
+
raise e
|
30
|
+
end
|
31
|
+
|
32
|
+
base.instance_eval do
|
33
|
+
@s3_credentials = parse_credentials(@options[:s3_credentials])
|
34
|
+
@bucket_name = @options[:bucket] || @s3_credentials[:bucket]
|
35
|
+
@bucket_name = @bucket_name.call(self) if @bucket_name.is_a?(Proc)
|
36
|
+
@s3_options = @options[:s3_options] || {}
|
37
|
+
@s3_permissions = @options[:s3_permissions] || :public_read
|
38
|
+
@s3_protocol = @options[:s3_protocol] || (@s3_permissions == :public_read ? "http" : "https")
|
39
|
+
@s3_headers = @options[:s3_headers] || {}
|
40
|
+
@s3_host_alias = @options[:s3_host_alias]
|
41
|
+
@url = ":s3_path_url" unless @url.to_s.match(/^:s3.*url$/)
|
42
|
+
@service = ::S3::Service.new(@s3_options.merge(:access_key_id => @s3_credentials[:access_key_id],
|
43
|
+
:secret_access_key => @s3_credentials[:secret_access_key],
|
44
|
+
:use_ssl => @s3_protocol == "https"))
|
45
|
+
@bucket = @service.buckets.build(@bucket_name)
|
46
|
+
end
|
47
|
+
Paperclip.interpolates(:s3_alias_url) do |attachment, style|
|
48
|
+
"#{attachment.s3_protocol}://#{attachment.s3_host_alias}/#{attachment.path(style).gsub(%r{^/}, "")}"
|
49
|
+
end
|
50
|
+
Paperclip.interpolates(:s3_path_url) do |attachment, style|
|
51
|
+
"#{attachment.s3_protocol}://s3.amazonaws.com/#{attachment.bucket_name}/#{attachment.path(style).gsub(%r{^/}, "")}"
|
52
|
+
end
|
53
|
+
Paperclip.interpolates(:s3_domain_url) do |attachment, style|
|
54
|
+
"#{attachment.s3_protocol}://#{attachment.bucket_name}.s3.amazonaws.com/#{attachment.path(style).gsub(%r{^/}, "")}"
|
55
|
+
end
|
56
|
+
end
|
57
|
+
|
58
|
+
def bucket
|
59
|
+
@bucket
|
60
|
+
end
|
61
|
+
|
62
|
+
def bucket_name
|
63
|
+
@bucket.name
|
64
|
+
end
|
65
|
+
|
66
|
+
def s3_host_alias
|
67
|
+
@s3_host_alias
|
68
|
+
end
|
69
|
+
|
70
|
+
def parse_credentials(creds)
|
71
|
+
creds = find_credentials(creds).stringify_keys
|
72
|
+
(creds[RAILS_ENV] || creds).symbolize_keys
|
73
|
+
end
|
74
|
+
|
75
|
+
def exists?(style = default_style)
|
76
|
+
if original_filename
|
77
|
+
bucket.objects.build(path(style)).exists?
|
78
|
+
else
|
79
|
+
false
|
80
|
+
end
|
81
|
+
end
|
82
|
+
|
83
|
+
def s3_protocol
|
84
|
+
@s3_protocol
|
85
|
+
end
|
86
|
+
|
87
|
+
# Returns representation of the data of the file assigned to the given
|
88
|
+
# style, in the format most representative of the current storage.
|
89
|
+
def to_file(style = default_style)
|
90
|
+
return @queued_for_write[style] if @queued_for_write[style]
|
91
|
+
file = Tempfile.new(path(style))
|
92
|
+
file.write(bucket.objects.find(path(style)).content)
|
93
|
+
file.rewind
|
94
|
+
return file
|
95
|
+
end
|
96
|
+
|
97
|
+
def flush_writes #:nodoc:
|
98
|
+
@queued_for_write.each do |style, file|
|
99
|
+
begin
|
100
|
+
log("saving #{path(style)}")
|
101
|
+
object = bucket.objects.build(path(style))
|
102
|
+
object.content = file.read
|
103
|
+
object.acl = @s3_permisions
|
104
|
+
object.content_type = instance_read(:content_type)
|
105
|
+
object.content_disposition = @s3_headers[:content_disposition]
|
106
|
+
object.content_encoding = @s3_headers[:content_encoding]
|
107
|
+
object.save
|
108
|
+
rescue ::S3::Error::ResponseError => e
|
109
|
+
raise
|
110
|
+
end
|
111
|
+
end
|
112
|
+
@queued_for_write = {}
|
113
|
+
end
|
114
|
+
|
115
|
+
def flush_deletes #:nodoc:
|
116
|
+
@queued_for_delete.each do |path|
|
117
|
+
begin
|
118
|
+
log("deleting #{path}")
|
119
|
+
bucket.objects.find(path).destroy
|
120
|
+
rescue ::S3::Error::ResponseError
|
121
|
+
# Ignore this.
|
122
|
+
end
|
123
|
+
end
|
124
|
+
@queued_for_delete = []
|
125
|
+
end
|
126
|
+
|
127
|
+
def find_credentials(creds)
|
128
|
+
case creds
|
129
|
+
when File
|
130
|
+
YAML::load(ERB.new(File.read(creds.path)).result)
|
131
|
+
when String
|
132
|
+
YAML::load(ERB.new(File.read(creds)).result)
|
133
|
+
when Hash
|
134
|
+
creds
|
135
|
+
else
|
136
|
+
raise ArgumentError, "Credentials are not a path, file, or hash."
|
137
|
+
end
|
138
|
+
end
|
139
|
+
private :find_credentials
|
140
|
+
end
|
141
|
+
end
|
142
|
+
end
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: s3
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.2.
|
4
|
+
version: 0.2.2
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- "Jakub Ku\xC5\xBAma"
|
@@ -10,7 +10,7 @@ autorequire:
|
|
10
10
|
bindir: bin
|
11
11
|
cert_chain: []
|
12
12
|
|
13
|
-
date: 2009-10-
|
13
|
+
date: 2009-10-18 00:00:00 +02:00
|
14
14
|
default_executable: s3
|
15
15
|
dependencies:
|
16
16
|
- !ruby/object:Gem::Dependency
|
@@ -40,7 +40,8 @@ files:
|
|
40
40
|
- Rakefile
|
41
41
|
- VERSION
|
42
42
|
- bin/s3
|
43
|
-
- extra/
|
43
|
+
- extra/s3_attachment_fu.rb
|
44
|
+
- extra/s3_paperclip.rb
|
44
45
|
- lib/s3.rb
|
45
46
|
- lib/s3/bucket.rb
|
46
47
|
- lib/s3/connection.rb
|
@@ -86,9 +87,9 @@ signing_key:
|
|
86
87
|
specification_version: 3
|
87
88
|
summary: Library for accessing S3 objects and buckets, with command line tool
|
88
89
|
test_files:
|
90
|
+
- test/object_test.rb
|
91
|
+
- test/test_helper.rb
|
89
92
|
- test/bucket_test.rb
|
93
|
+
- test/connection_test.rb
|
90
94
|
- test/service_test.rb
|
91
95
|
- test/signature_test.rb
|
92
|
-
- test/connection_test.rb
|
93
|
-
- test/test_helper.rb
|
94
|
-
- test/object_test.rb
|