dragonfly-s3_data_store 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml ADDED
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA1:
3
+ metadata.gz: de4b096e174cc3386bcdb840d6f942b94f8cbb6d
4
+ data.tar.gz: d11b8ef53ce003bf73b24c0db94c70a5fd0e5684
5
+ SHA512:
6
+ metadata.gz: 19319c9981cf83aed2f8147b568535afbc35de43c405ee2148e283b375870a9dc5d414085ad1b1679934e011988027fc2c20455b1485cb130ada64b16eac0e09
7
+ data.tar.gz: 81ee661b5d3c8a0fe7d6f39da1e8cd1c4ba59c9b36ed1c6ee9cd0a14008c27d87667870d70e98625b99ba43d5b154a5f2721defbc1bbbda646cc3f2331fd54a8
data/.gitignore ADDED
@@ -0,0 +1,19 @@
1
+ *.gem
2
+ *.rbc
3
+ .bundle
4
+ .config
5
+ .yardoc
6
+ Gemfile.lock
7
+ InstalledFiles
8
+ _yardoc
9
+ coverage
10
+ doc/
11
+ lib/bundler/man
12
+ pkg
13
+ rdoc
14
+ spec/reports
15
+ test/tmp
16
+ test/version_tmp
17
+ tmp
18
+ .s3_spec.yml
19
+
data/Gemfile ADDED
@@ -0,0 +1,2 @@
1
+ source 'https://rubygems.org'
2
+ gemspec
data/LICENSE.txt ADDED
@@ -0,0 +1,22 @@
1
+ Copyright (c) 2013 Mark Evans
2
+
3
+ MIT License
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining
6
+ a copy of this software and associated documentation files (the
7
+ "Software"), to deal in the Software without restriction, including
8
+ without limitation the rights to use, copy, modify, merge, publish,
9
+ distribute, sublicense, and/or sell copies of the Software, and to
10
+ permit persons to whom the Software is furnished to do so, subject to
11
+ the following conditions:
12
+
13
+ The above copyright notice and this permission notice shall be
14
+ included in all copies or substantial portions of the Software.
15
+
16
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
17
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18
+ MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
19
+ NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
20
+ LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
21
+ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
22
+ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
data/README.md ADDED
@@ -0,0 +1,90 @@
1
+ # Dragonfly::S3DataStore
2
+
3
+ Amazon AWS S3 data store for use with the [Dragonfly](http://github.com/markevans/dragonfly) gem.
4
+
5
+ ## Gemfile
6
+
7
+ ```ruby
8
+ gem 'dragonfly-s3_data_store'
9
+ ```
10
+
11
+ ## Usage
12
+
13
+ In your dragonfly config block (with default options):
14
+
15
+ ```ruby
16
+ Dragonfly.app.configure do
17
+ # ...
18
+
19
+ datastore :s3,
20
+ bucket_name: 'my-bucket',
21
+ access_key_id: 'blahblahblah',
22
+ secret_access_key: 'blublublublu'
23
+
24
+ # ...
25
+ end
26
+ ```
27
+
28
+ ### Available configuration options
29
+
30
+ ```ruby
31
+ :bucket_name
32
+ :access_key_id
33
+ :secret_access_key
34
+ :region # default 'us-east-1', see Dragonfly::S3DataStore::REGIONS for options
35
+ :storage_headers # defaults to {'x-amz-acl' => 'public-read'}, can be overridden per-write - see below
36
+ :url_scheme # defaults to "http"
37
+ :url_host # defaults to "<bucket-name>.s3.amazonaws.com"
38
+ :use_iam_profile # boolean - if true, no need for access_key_id or secret_access_key
39
+ ```
40
+
41
+ ### Per-storage options
42
+ ```ruby
43
+ Dragonfly.app.store(some_file, path: 'some/path.txt', headers: {'x-amz-acl' => 'public-read-write'})
44
+ ```
45
+
46
+ or
47
+
48
+ ```ruby
49
+ class MyModel
50
+ dragonfly_accessor :photo do
51
+ storage_path { "some/path/#{some_instance_method}/#{rand(100)}" }
52
+ storage_headers { {"x-amz-acl" => "public-read-write"} }
53
+ end
54
+ end
55
+ ```
56
+
57
+ **BEWARE!!!!** you must make sure the path (which will become the uid for the content) is unique and changes each time the content
58
+ is changed, otherwise you could have caching problems, as the generated urls will be the same for the same uid.
59
+
60
+ ### Serving directly from S3
61
+
62
+ You can get the S3 url using
63
+
64
+ ```ruby
65
+ Dragonfly.app.remote_url_for('some/uid')
66
+ ```
67
+
68
+ or
69
+
70
+ ```ruby
71
+ my_model.attachment.remote_url
72
+ ```
73
+
74
+ or with an expiring url:
75
+
76
+ ```ruby
77
+ my_model.attachment.remote_url(expires: 3.days.from_now)
78
+ ```
79
+
80
+ or with an https url:
81
+
82
+ ```ruby
83
+ my_model.attachment.remote_url(scheme: 'https') # also configurable for all urls with 'url_scheme'
84
+ ```
85
+
86
+ or with a custom host:
87
+
88
+ ```ruby
89
+ my_model.attachment.remote_url(host: 'custom.domain') # also configurable for all urls with 'url_host'
90
+ ```
data/Rakefile ADDED
@@ -0,0 +1 @@
1
+ require "bundler/gem_tasks"
@@ -0,0 +1,24 @@
1
+ # coding: utf-8
2
+ lib = File.expand_path('../lib', __FILE__)
3
+ $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
4
+ require 'dragonfly/s3_data_store/version'
5
+
6
+ Gem::Specification.new do |spec|
7
+ spec.name = "dragonfly-s3_data_store"
8
+ spec.version = Dragonfly::S3DataStore::VERSION
9
+ spec.authors = ["Mark Evans"]
10
+ spec.email = ["mark@new-bamboo.co.uk"]
11
+ spec.description = %q{S3 data store for Dragonfly}
12
+ spec.summary = %q{Data store for storing Dragonfly content (e.g. images) on S3}
13
+ spec.homepage = "https://github.com/markevans/dragonfly-s3_data_store"
14
+ spec.license = "MIT"
15
+
16
+ spec.files = `git ls-files`.split($/)
17
+ spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
18
+ spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
19
+ spec.require_paths = ["lib"]
20
+
21
+ spec.add_runtime_dependency "dragonfly", "~> 1.0"
22
+ spec.add_runtime_dependency "fog"
23
+ spec.add_development_dependency "rspec", "~> 2.0"
24
+ end
@@ -0,0 +1,5 @@
1
+ module Dragonfly
2
+ module S3DataStore
3
+ VERSION = "1.0.0"
4
+ end
5
+ end
@@ -0,0 +1,164 @@
1
+ require 'fog'
2
+ require 'dragonfly'
3
+
4
+ Dragonfly::App.register_datastore(:s3){ Dragonfly::S3DataStore }
5
+
6
+ module Dragonfly
7
+ class S3DataStore
8
+
9
+ # Exceptions
10
+ class NotConfigured < RuntimeError; end
11
+
12
+ REGIONS = {
13
+ 'us-east-1' => 's3.amazonaws.com', #default
14
+ 'us-west-1' => 's3-us-west-1.amazonaws.com',
15
+ 'us-west-2' => 's3-us-west-2.amazonaws.com',
16
+ 'ap-northeast-1' => 's3-ap-northeast-1.amazonaws.com',
17
+ 'ap-southeast-1' => 's3-ap-southeast-1.amazonaws.com',
18
+ 'eu-west-1' => 's3-eu-west-1.amazonaws.com',
19
+ 'sa-east-1' => 's3-sa-east-1.amazonaws.com',
20
+ 'sa-east-1' => 's3-sa-east-1.amazonaws.com'
21
+ }
22
+
23
+ def initialize(opts={})
24
+ @bucket_name = opts[:bucket_name]
25
+ @access_key_id = opts[:access_key_id]
26
+ @secret_access_key = opts[:secret_access_key]
27
+ @region = opts[:region]
28
+ @storage_headers = opts[:storage_headers] || {'x-amz-acl' => 'public-read'}
29
+ @url_scheme = opts[:url_scheme] || 'http'
30
+ @url_host = opts[:url_host]
31
+ @use_iam_profile = opts[:use_iam_profile]
32
+ end
33
+
34
+ attr_accessor :bucket_name, :access_key_id, :secret_access_key, :region, :storage_headers, :url_scheme, :url_host, :use_iam_profile
35
+
36
+ def write(content, opts={})
37
+ ensure_configured
38
+ ensure_bucket_initialized
39
+
40
+ headers = {'Content-Type' => content.mime_type}
41
+ headers.merge!(opts[:headers]) if opts[:headers]
42
+ uid = opts[:path] || generate_uid(content.name || 'file')
43
+
44
+ rescuing_socket_errors do
45
+ content.file do |f|
46
+ storage.put_object(bucket_name, uid, f, full_storage_headers(headers, content.meta))
47
+ end
48
+ end
49
+
50
+ uid
51
+ end
52
+
53
+ def read(uid)
54
+ ensure_configured
55
+ response = rescuing_socket_errors{ storage.get_object(bucket_name, uid) }
56
+ [response.body, headers_to_meta(response.headers)]
57
+ rescue Excon::Errors::NotFound => e
58
+ nil
59
+ end
60
+
61
+ def destroy(uid)
62
+ rescuing_socket_errors{ storage.delete_object(bucket_name, uid) }
63
+ rescue Excon::Errors::NotFound, Excon::Errors::Conflict => e
64
+ Dragonfly.warn("#{self.class.name} destroy error: #{e}")
65
+ end
66
+
67
+ def url_for(uid, opts={})
68
+ if opts && opts[:expires]
69
+ storage.get_object_https_url(bucket_name, uid, opts[:expires])
70
+ else
71
+ scheme = opts[:scheme] || url_scheme
72
+ host = opts[:host] || url_host || "#{bucket_name}.s3.amazonaws.com"
73
+ "#{scheme}://#{host}/#{uid}"
74
+ end
75
+ end
76
+
77
+ def domain
78
+ REGIONS[get_region]
79
+ end
80
+
81
+ def storage
82
+ @storage ||= begin
83
+ storage = Fog::Storage.new({
84
+ :provider => 'AWS',
85
+ :aws_access_key_id => access_key_id,
86
+ :aws_secret_access_key => secret_access_key,
87
+ :region => region,
88
+ :use_iam_profile => use_iam_profile
89
+ }.reject {|name, option| option.nil?})
90
+ storage.sync_clock
91
+ storage
92
+ end
93
+ end
94
+
95
+ def bucket_exists?
96
+ rescuing_socket_errors{ storage.get_bucket_location(bucket_name) }
97
+ true
98
+ rescue Excon::Errors::NotFound => e
99
+ false
100
+ end
101
+
102
+ private
103
+
104
+ def ensure_configured
105
+ unless @configured
106
+ if use_iam_profile
107
+ raise NotConfigured, "You need to configure #{self.class.name} with bucket_name" if bucket_name.nil?
108
+ else
109
+ [:bucket_name, :access_key_id, :secret_access_key].each do |attr|
110
+ raise NotConfigured, "You need to configure #{self.class.name} with #{attr}" if send(attr).nil?
111
+ end
112
+ end
113
+ @configured = true
114
+ end
115
+ end
116
+
117
+ def ensure_bucket_initialized
118
+ unless @bucket_initialized
119
+ rescuing_socket_errors{ storage.put_bucket(bucket_name, 'LocationConstraint' => region) } unless bucket_exists?
120
+ @bucket_initialized = true
121
+ end
122
+ end
123
+
124
+ def get_region
125
+ reg = region || 'us-east-1'
126
+ raise "Invalid region #{reg} - should be one of #{valid_regions.join(', ')}" unless valid_regions.include?(reg)
127
+ reg
128
+ end
129
+
130
+ def generate_uid(name)
131
+ "#{Time.now.strftime '%Y/%m/%d/%H/%M/%S'}/#{rand(1000)}/#{name.gsub(/[^\w.]+/, '_')}"
132
+ end
133
+
134
+ def full_storage_headers(headers, meta)
135
+ storage_headers.merge(meta_to_headers(meta)).merge(headers)
136
+ end
137
+
138
+ def headers_to_meta(headers)
139
+ json = headers['x-amz-meta-json']
140
+ if json && !json.empty?
141
+ Serializer.json_decode(json)
142
+ elsif marshal_data = headers['x-amz-meta-extra']
143
+ Utils.stringify_keys(Serializer.marshal_b64_decode(marshal_data))
144
+ end
145
+ end
146
+
147
+ def meta_to_headers(meta)
148
+ {'x-amz-meta-json' => Serializer.json_encode(meta)}
149
+ end
150
+
151
+ def valid_regions
152
+ REGIONS.keys
153
+ end
154
+
155
+ def rescuing_socket_errors(&block)
156
+ yield
157
+ rescue Excon::Errors::SocketError => e
158
+ storage.reload
159
+ yield
160
+ end
161
+
162
+ end
163
+ end
164
+
@@ -0,0 +1,286 @@
1
+ require 'spec_helper'
2
+ require 'dragonfly/spec/data_store_examples'
3
+ require 'yaml'
4
+ require 'dragonfly/s3_data_store'
5
+
6
+ describe Dragonfly::S3DataStore do
7
+
8
+ # To run these tests, put a file ".s3_spec.yml" in the dragonfly root dir, like this:
9
+ # key: XXXXXXXXXX
10
+ # secret: XXXXXXXXXX
11
+ # enabled: true
12
+ if File.exist?(file = File.expand_path('../../.s3_spec.yml', __FILE__))
13
+ config = YAML.load_file(file)
14
+ KEY = config['key']
15
+ SECRET = config['secret']
16
+ enabled = config['enabled']
17
+ else
18
+ enabled = false
19
+ end
20
+
21
+ if enabled
22
+
23
+ # Make sure it's a new bucket name
24
+ BUCKET_NAME = "dragonfly-test-#{Time.now.to_i.to_s(36)}"
25
+
26
+ before(:each) do
27
+ @data_store = Dragonfly::S3DataStore.new(
28
+ :bucket_name => BUCKET_NAME,
29
+ :access_key_id => KEY,
30
+ :secret_access_key => SECRET,
31
+ :region => 'eu-west-1'
32
+ )
33
+ end
34
+
35
+ else
36
+
37
+ BUCKET_NAME = 'test-bucket'
38
+
39
+ before(:each) do
40
+ Fog.mock!
41
+ @data_store = Dragonfly::S3DataStore.new(
42
+ :bucket_name => BUCKET_NAME,
43
+ :access_key_id => 'XXXXXXXXX',
44
+ :secret_access_key => 'XXXXXXXXX',
45
+ :region => 'eu-west-1'
46
+ )
47
+ end
48
+
49
+ end
50
+
51
+ it_should_behave_like 'data_store'
52
+
53
+ let (:app) { Dragonfly.app }
54
+ let (:content) { Dragonfly::Content.new(app, "eggheads") }
55
+ let (:new_content) { Dragonfly::Content.new(app) }
56
+
57
+ describe "registering with a symbol" do
58
+ it "registers a symbol for configuring" do
59
+ app.configure do
60
+ datastore :s3
61
+ end
62
+ app.datastore.should be_a(Dragonfly::S3DataStore)
63
+ end
64
+ end
65
+
66
+ describe "write" do
67
+ it "should use the name from the content if set" do
68
+ content.name = 'doobie.doo'
69
+ uid = @data_store.write(content)
70
+ uid.should =~ /doobie\.doo$/
71
+ new_content.update(*@data_store.read(uid))
72
+ new_content.data.should == 'eggheads'
73
+ end
74
+
75
+ it "should work ok with files with funny names" do
76
+ content.name = "A Picture with many spaces in its name (at 20:00 pm).png"
77
+ uid = @data_store.write(content)
78
+ uid.should =~ /A_Picture_with_many_spaces_in_its_name_at_20_00_pm_\.png$/
79
+ new_content.update(*@data_store.read(uid))
80
+ new_content.data.should == 'eggheads'
81
+ end
82
+
83
+ it "should allow for setting the path manually" do
84
+ uid = @data_store.write(content, :path => 'hello/there')
85
+ uid.should == 'hello/there'
86
+ new_content.update(*@data_store.read(uid))
87
+ new_content.data.should == 'eggheads'
88
+ end
89
+
90
+ if enabled # Fog.mock! doesn't act consistently here
91
+ it "should reset the connection and try again if Fog throws a socket EOFError" do
92
+ @data_store.storage.should_receive(:put_object).exactly(:once).and_raise(Excon::Errors::SocketError.new(EOFError.new))
93
+ @data_store.storage.should_receive(:put_object).with(BUCKET_NAME, anything, anything, hash_including)
94
+ @data_store.write(content)
95
+ end
96
+
97
+ it "should just let it raise if Fog throws a socket EOFError again" do
98
+ @data_store.storage.should_receive(:put_object).and_raise(Excon::Errors::SocketError.new(EOFError.new))
99
+ @data_store.storage.should_receive(:put_object).and_raise(Excon::Errors::SocketError.new(EOFError.new))
100
+ expect{
101
+ @data_store.write(content)
102
+ }.to raise_error(Excon::Errors::SocketError)
103
+ end
104
+ end
105
+ end
106
+
107
+ describe "domain" do
108
+ it "should default to the US" do
109
+ @data_store.region = nil
110
+ @data_store.domain.should == 's3.amazonaws.com'
111
+ end
112
+
113
+ it "should return the correct domain" do
114
+ @data_store.region = 'eu-west-1'
115
+ @data_store.domain.should == 's3-eu-west-1.amazonaws.com'
116
+ end
117
+
118
+ it "does raise an error if an unknown region is given" do
119
+ @data_store.region = 'latvia-central'
120
+ lambda{
121
+ @data_store.domain
122
+ }.should raise_error
123
+ end
124
+ end
125
+
126
+ describe "not configuring stuff properly" do
127
+ it "should require a bucket name on write" do
128
+ @data_store.bucket_name = nil
129
+ proc{ @data_store.write(content) }.should raise_error(Dragonfly::S3DataStore::NotConfigured)
130
+ end
131
+
132
+ it "should require an access_key_id on write" do
133
+ @data_store.access_key_id = nil
134
+ proc{ @data_store.write(content) }.should raise_error(Dragonfly::S3DataStore::NotConfigured)
135
+ end
136
+
137
+ it "should require a secret access key on write" do
138
+ @data_store.secret_access_key = nil
139
+ proc{ @data_store.write(content) }.should raise_error(Dragonfly::S3DataStore::NotConfigured)
140
+ end
141
+
142
+ it "should require a bucket name on read" do
143
+ @data_store.bucket_name = nil
144
+ proc{ @data_store.read('asdf') }.should raise_error(Dragonfly::S3DataStore::NotConfigured)
145
+ end
146
+
147
+ it "should require an access_key_id on read" do
148
+ @data_store.access_key_id = nil
149
+ proc{ @data_store.read('asdf') }.should raise_error(Dragonfly::S3DataStore::NotConfigured)
150
+ end
151
+
152
+ it "should require a secret access key on read" do
153
+ @data_store.secret_access_key = nil
154
+ proc{ @data_store.read('asdf') }.should raise_error(Dragonfly::S3DataStore::NotConfigured)
155
+ end
156
+
157
+ if !enabled #this will fail since the specs are not running on an ec2 instance with an iam role defined
158
+ it 'should allow missing secret key and access key on write if iam profiles are allowed' do
159
+ # This is slightly brittle but it's annoying waiting for fog doing stuff
160
+ @data_store.storage.stub(:get_bucket_location => nil, :put_object => nil)
161
+
162
+ @data_store.use_iam_profile = true
163
+ @data_store.secret_access_key = nil
164
+ @data_store.access_key_id = nil
165
+ expect{ @data_store.write(content) }.not_to raise_error
166
+ end
167
+ end
168
+
169
+ end
170
+
171
+ describe "autocreating the bucket" do
172
+ it "should create the bucket on write if it doesn't exist" do
173
+ @data_store.bucket_name = "dragonfly-test-blah-blah-#{rand(100000000)}"
174
+ @data_store.write(content)
175
+ end
176
+
177
+ it "should not try to create the bucket on read if it doesn't exist" do
178
+ @data_store.bucket_name = "dragonfly-test-blah-blah-#{rand(100000000)}"
179
+ @data_store.send(:storage).should_not_receive(:put_bucket)
180
+ @data_store.read("gungle").should be_nil
181
+ end
182
+ end
183
+
184
+ describe "headers" do
185
+ before(:each) do
186
+ @data_store.storage_headers = {'x-amz-foo' => 'biscuithead'}
187
+ end
188
+
189
+ it "should allow configuring globally" do
190
+ @data_store.storage.should_receive(:put_object).with(BUCKET_NAME, anything, anything,
191
+ hash_including('x-amz-foo' => 'biscuithead')
192
+ )
193
+ @data_store.write(content)
194
+ end
195
+
196
+ it "should allow adding per-store" do
197
+ @data_store.storage.should_receive(:put_object).with(BUCKET_NAME, anything, anything,
198
+ hash_including('x-amz-foo' => 'biscuithead', 'hello' => 'there')
199
+ )
200
+ @data_store.write(content, :headers => {'hello' => 'there'})
201
+ end
202
+
203
+ it "should let the per-store one take precedence" do
204
+ @data_store.storage.should_receive(:put_object).with(BUCKET_NAME, anything, anything,
205
+ hash_including('x-amz-foo' => 'override!')
206
+ )
207
+ @data_store.write(content, :headers => {'x-amz-foo' => 'override!'})
208
+ end
209
+
210
+ it "should write setting the content type" do
211
+ @data_store.storage.should_receive(:put_object) do |_, __, ___, headers|
212
+ headers['Content-Type'].should == 'image/png'
213
+ end
214
+ content.name = 'egg.png'
215
+ @data_store.write(content)
216
+ end
217
+
218
+ it "allow overriding the content type" do
219
+ @data_store.storage.should_receive(:put_object) do |_, __, ___, headers|
220
+ headers['Content-Type'].should == 'text/plain'
221
+ end
222
+ content.name = 'egg.png'
223
+ @data_store.write(content, :headers => {'Content-Type' => 'text/plain'})
224
+ end
225
+ end
226
+
227
+ describe "urls for serving directly" do
228
+
229
+ before(:each) do
230
+ @uid = 'some/path/on/s3'
231
+ end
232
+
233
+ it "should use the bucket subdomain" do
234
+ @data_store.url_for(@uid).should == "http://#{BUCKET_NAME}.s3.amazonaws.com/some/path/on/s3"
235
+ end
236
+
237
+ it "should use the bucket subdomain for other regions too" do
238
+ @data_store.region = 'eu-west-1'
239
+ @data_store.url_for(@uid).should == "http://#{BUCKET_NAME}.s3.amazonaws.com/some/path/on/s3"
240
+ end
241
+
242
+ it "should give an expiring url" do
243
+ @data_store.url_for(@uid, :expires => 1301476942).should =~
244
+ %r{^https://#{BUCKET_NAME}\.#{@data_store.domain}/some/path/on/s3\?AWSAccessKeyId=#{@data_store.access_key_id}&Signature=[\w%]+&Expires=1301476942$}
245
+ end
246
+
247
+ it "should allow for using https" do
248
+ @data_store.url_for(@uid, :scheme => 'https').should == "https://#{BUCKET_NAME}.s3.amazonaws.com/some/path/on/s3"
249
+ end
250
+
251
+ it "should allow for always using https" do
252
+ @data_store.url_scheme = 'https'
253
+ @data_store.url_for(@uid).should == "https://#{BUCKET_NAME}.s3.amazonaws.com/some/path/on/s3"
254
+ end
255
+
256
+ it "should allow for customizing the host" do
257
+ @data_store.url_for(@uid, :host => 'customised.domain.com/and/path').should == "http://customised.domain.com/and/path/some/path/on/s3"
258
+ end
259
+
260
+ it "should allow the url_host to be customised permanently" do
261
+ url_host = 'customised.domain.com/and/path'
262
+ @data_store.url_host = url_host
263
+ @data_store.url_for(@uid).should == "http://#{url_host}/some/path/on/s3"
264
+ end
265
+
266
+ end
267
+
268
+ describe "meta" do
269
+ it "uses the x-amz-meta-json header for meta" do
270
+ uid = @data_store.write(content, :headers => {'x-amz-meta-json' => Dragonfly::Serializer.json_encode({'potato' => 44})})
271
+ c, meta = @data_store.read(uid)
272
+ meta['potato'].should == 44
273
+ end
274
+
275
+ it "works with the deprecated x-amz-meta-extra header (but stringifies its keys)" do
276
+ uid = @data_store.write(content, :headers => {
277
+ 'x-amz-meta-extra' => Dragonfly::Serializer.marshal_b64_encode(:some => 'meta', :wo => 4),
278
+ 'x-amz-meta-json' => nil
279
+ })
280
+ c, meta = @data_store.read(uid)
281
+ meta['some'].should == 'meta'
282
+ meta['wo'].should == 4
283
+ end
284
+ end
285
+
286
+ end
@@ -0,0 +1,7 @@
1
+ RSpec.configure do |config|
2
+ config.treat_symbols_as_metadata_keys_with_true_values = true
3
+ config.run_all_when_everything_filtered = true
4
+ config.filter_run :focus
5
+ config.order = 'random'
6
+ end
7
+
metadata ADDED
@@ -0,0 +1,98 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: dragonfly-s3_data_store
3
+ version: !ruby/object:Gem::Version
4
+ version: 1.0.0
5
+ platform: ruby
6
+ authors:
7
+ - Mark Evans
8
+ autorequire:
9
+ bindir: bin
10
+ cert_chain: []
11
+ date: 2013-11-25 00:00:00.000000000 Z
12
+ dependencies:
13
+ - !ruby/object:Gem::Dependency
14
+ name: dragonfly
15
+ requirement: !ruby/object:Gem::Requirement
16
+ requirements:
17
+ - - ~>
18
+ - !ruby/object:Gem::Version
19
+ version: '1.0'
20
+ type: :runtime
21
+ prerelease: false
22
+ version_requirements: !ruby/object:Gem::Requirement
23
+ requirements:
24
+ - - ~>
25
+ - !ruby/object:Gem::Version
26
+ version: '1.0'
27
+ - !ruby/object:Gem::Dependency
28
+ name: fog
29
+ requirement: !ruby/object:Gem::Requirement
30
+ requirements:
31
+ - - '>='
32
+ - !ruby/object:Gem::Version
33
+ version: '0'
34
+ type: :runtime
35
+ prerelease: false
36
+ version_requirements: !ruby/object:Gem::Requirement
37
+ requirements:
38
+ - - '>='
39
+ - !ruby/object:Gem::Version
40
+ version: '0'
41
+ - !ruby/object:Gem::Dependency
42
+ name: rspec
43
+ requirement: !ruby/object:Gem::Requirement
44
+ requirements:
45
+ - - ~>
46
+ - !ruby/object:Gem::Version
47
+ version: '2.0'
48
+ type: :development
49
+ prerelease: false
50
+ version_requirements: !ruby/object:Gem::Requirement
51
+ requirements:
52
+ - - ~>
53
+ - !ruby/object:Gem::Version
54
+ version: '2.0'
55
+ description: S3 data store for Dragonfly
56
+ email:
57
+ - mark@new-bamboo.co.uk
58
+ executables: []
59
+ extensions: []
60
+ extra_rdoc_files: []
61
+ files:
62
+ - .gitignore
63
+ - Gemfile
64
+ - LICENSE.txt
65
+ - README.md
66
+ - Rakefile
67
+ - dragonfly-s3_data_store.gemspec
68
+ - lib/dragonfly/s3_data_store.rb
69
+ - lib/dragonfly/s3_data_store/version.rb
70
+ - spec/s3_data_store_spec.rb
71
+ - spec/spec_helper.rb
72
+ homepage: https://github.com/markevans/dragonfly-s3_data_store
73
+ licenses:
74
+ - MIT
75
+ metadata: {}
76
+ post_install_message:
77
+ rdoc_options: []
78
+ require_paths:
79
+ - lib
80
+ required_ruby_version: !ruby/object:Gem::Requirement
81
+ requirements:
82
+ - - '>='
83
+ - !ruby/object:Gem::Version
84
+ version: '0'
85
+ required_rubygems_version: !ruby/object:Gem::Requirement
86
+ requirements:
87
+ - - '>='
88
+ - !ruby/object:Gem::Version
89
+ version: '0'
90
+ requirements: []
91
+ rubyforge_project:
92
+ rubygems_version: 2.1.11
93
+ signing_key:
94
+ specification_version: 4
95
+ summary: Data store for storing Dragonfly content (e.g. images) on S3
96
+ test_files:
97
+ - spec/s3_data_store_spec.rb
98
+ - spec/spec_helper.rb