dragonfly-s3_data_store 1.0.2 → 1.0.3

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 8db0a53665e761115c7791d464ec6f88c8d19e64
4
- data.tar.gz: a8cbd2bbc9fa864f7c88fc1329f94f9d46c57816
3
+ metadata.gz: 7958747f8087147274b566b9a03b588016e1951c
4
+ data.tar.gz: baf40e36ace3f506553d1c227bfdae3e53a705ac
5
5
  SHA512:
6
- metadata.gz: b195b31ba392d8cd217333e0428211bbcc6c96910564c92650ca0bdf67f4363e31513d4fdb17d9378bb4dffaf307322667b0f2010c7b3dd3827b1019e8d5dc4c
7
- data.tar.gz: e2f60e2b217bc400b901c95cee0234b6ac20ce8619877f29cc7c8029b85ed98d632f34a507023760240dc1b2e607b9a0756c6dcef938cf6f5039f4f654f78c94
6
+ metadata.gz: 3a5a60312800230e4dcacd84486003e61114fab36c7a2922f28097f260dee1917b5f5f37de8fe930e32e12dcd298b56ebc65a079d1dba7f5134b72f1100f8763
7
+ data.tar.gz: 6b8c30b5863c92c0485f91f4d4b492149e559525a990b9cd44473e610fe2bb63158022327e7d8c8df2e45ba87afd8371bd6c663a665b3822a6a91ad254d84336
data/README.md CHANGED
@@ -9,10 +9,11 @@ gem 'dragonfly-s3_data_store'
9
9
  ```
10
10
 
11
11
  ## Usage
12
-
13
- In your dragonfly config block (with default options):
12
+ Configuration (remember the require)
14
13
 
15
14
  ```ruby
15
+ require 'dragonfly/s3_data_store'
16
+
16
17
  Dragonfly.app.configure do
17
18
  # ...
18
19
 
@@ -36,6 +37,7 @@ end
36
37
  :url_scheme # defaults to "http"
37
38
  :url_host # defaults to "<bucket-name>.s3.amazonaws.com", or "s3.amazonaws.com/<bucket-name>" if not a valid subdomain
38
39
  :use_iam_profile # boolean - if true, no need for access_key_id or secret_access_key
40
+ :root_path # store all content under a subdirectory - uids will be relative to this - defaults to nil
39
41
  ```
40
42
 
41
43
  ### Per-storage options
@@ -48,8 +50,12 @@ or
48
50
  ```ruby
49
51
  class MyModel
50
52
  dragonfly_accessor :photo do
51
- storage_path { "some/path/#{some_instance_method}/#{rand(100)}" }
52
- storage_headers { {"x-amz-acl" => "public-read-write"} }
53
+ storage_options do |attachment|
54
+ {
55
+ path: "some/path/#{some_instance_method}/#{rand(100)}",
56
+ headers: {"x-amz-acl" => "public-read-write"}
57
+ }
58
+ end
53
59
  end
54
60
  end
55
61
  ```
@@ -1,5 +1,5 @@
1
1
  module Dragonfly
2
2
  class S3DataStore
3
- VERSION = "1.0.2"
3
+ VERSION = "1.0.3"
4
4
  end
5
5
  end
@@ -31,9 +31,10 @@ module Dragonfly
31
31
  @url_scheme = opts[:url_scheme] || 'http'
32
32
  @url_host = opts[:url_host]
33
33
  @use_iam_profile = opts[:use_iam_profile]
34
+ @root_path = opts[:root_path]
34
35
  end
35
36
 
36
- attr_accessor :bucket_name, :access_key_id, :secret_access_key, :region, :storage_headers, :url_scheme, :url_host, :use_iam_profile
37
+ attr_accessor :bucket_name, :access_key_id, :secret_access_key, :region, :storage_headers, :url_scheme, :url_host, :use_iam_profile, :root_path
37
38
 
38
39
  def write(content, opts={})
39
40
  ensure_configured
@@ -45,7 +46,7 @@ module Dragonfly
45
46
 
46
47
  rescuing_socket_errors do
47
48
  content.file do |f|
48
- storage.put_object(bucket_name, uid, f, full_storage_headers(headers, content.meta))
49
+ storage.put_object(bucket_name, full_path(uid), f, full_storage_headers(headers, content.meta))
49
50
  end
50
51
  end
51
52
 
@@ -54,27 +55,27 @@ module Dragonfly
54
55
 
55
56
  def read(uid)
56
57
  ensure_configured
57
- response = rescuing_socket_errors{ storage.get_object(bucket_name, uid) }
58
+ response = rescuing_socket_errors{ storage.get_object(bucket_name, full_path(uid)) }
58
59
  [response.body, headers_to_meta(response.headers)]
59
60
  rescue Excon::Errors::NotFound => e
60
61
  nil
61
62
  end
62
63
 
63
64
  def destroy(uid)
64
- rescuing_socket_errors{ storage.delete_object(bucket_name, uid) }
65
+ rescuing_socket_errors{ storage.delete_object(bucket_name, full_path(uid)) }
65
66
  rescue Excon::Errors::NotFound, Excon::Errors::Conflict => e
66
67
  Dragonfly.warn("#{self.class.name} destroy error: #{e}")
67
68
  end
68
69
 
69
70
  def url_for(uid, opts={})
70
- if opts && opts[:expires]
71
- storage.get_object_https_url(bucket_name, uid, opts[:expires])
71
+ if opts[:expires]
72
+ storage.get_object_https_url(bucket_name, full_path(uid), opts[:expires])
72
73
  else
73
74
  scheme = opts[:scheme] || url_scheme
74
75
  host = opts[:host] || url_host || (
75
76
  bucket_name =~ SUBDOMAIN_PATTERN ? "#{bucket_name}.s3.amazonaws.com" : "s3.amazonaws.com/#{bucket_name}"
76
77
  )
77
- "#{scheme}://#{host}/#{uid}"
78
+ "#{scheme}://#{host}/#{full_path(uid)}"
78
79
  end
79
80
  end
80
81
 
@@ -135,6 +136,10 @@ module Dragonfly
135
136
  "#{Time.now.strftime '%Y/%m/%d/%H/%M/%S'}/#{rand(1000)}/#{name.gsub(/[^\w.]+/, '_')}"
136
137
  end
137
138
 
139
+ def full_path(uid)
140
+ File.join *[root_path, uid].compact
141
+ end
142
+
138
143
  def full_storage_headers(headers, meta)
139
144
  storage_headers.merge(meta_to_headers(meta)).merge(headers)
140
145
  end
@@ -165,4 +170,3 @@ module Dragonfly
165
170
 
166
171
  end
167
172
  end
168
-
@@ -165,7 +165,51 @@ describe Dragonfly::S3DataStore do
165
165
  expect{ @data_store.write(content) }.not_to raise_error
166
166
  end
167
167
  end
168
+ end
169
+
170
+ describe "root_path" do
171
+ before do
172
+ content.name = "something.png"
173
+ @data_store.root_path = "some/path"
174
+ end
175
+
176
+ it "stores files in the provided sub directory" do
177
+ @data_store.storage.should_receive(:put_object).with(BUCKET_NAME, /^some\/path\/.*\/something\.png$/, anything, anything)
178
+ @data_store.write(content)
179
+ end
168
180
 
181
+ it "finds files in the provided sub directory" do
182
+ mock_response = double("response", body: "", headers: {})
183
+ uid = @data_store.write(content)
184
+ @data_store.storage.should_receive(:get_object).with(BUCKET_NAME, /^some\/path\/.*\/something\.png$/).and_return(mock_response)
185
+ @data_store.read(uid)
186
+ end
187
+
188
+ it "does not alter the uid" do
189
+ uid = @data_store.write(content)
190
+ uid.should include("something.png")
191
+ uid.should_not include("some/path")
192
+ end
193
+
194
+ it "destroys files in the provided sub directory" do
195
+ uid = @data_store.write(content)
196
+ @data_store.storage.should_receive(:delete_object).with(BUCKET_NAME, /^some\/path\/.*\/something\.png$/)
197
+ @data_store.destroy(uid)
198
+ end
199
+
200
+ describe "url_for" do
201
+ before do
202
+ @uid = @data_store.write(content)
203
+ end
204
+
205
+ it "returns the uid prefixed with the root_path" do
206
+ @data_store.url_for(@uid).should =~ /some\/path\/.*\/something\.png/
207
+ end
208
+
209
+ it "gives an expiring url" do
210
+ @data_store.url_for(@uid, :expires => 1301476942).should =~ /\/some\/path\/.*\/something\.png\?AWSAccessKeyId=/
211
+ end
212
+ end
169
213
  end
170
214
 
171
215
  describe "autocreating the bucket" do
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: dragonfly-s3_data_store
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.0.2
4
+ version: 1.0.3
5
5
  platform: ruby
6
6
  authors:
7
7
  - Mark Evans
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2013-12-03 00:00:00.000000000 Z
11
+ date: 2014-01-07 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: dragonfly