s4 0.0.3 → 0.0.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/CHANGELOG +3 -0
- data/README.md +37 -24
- data/lib/s4.rb +75 -71
- data/s4.gemspec +1 -0
- data/test/s4_test.rb +48 -37
- metadata +46 -51
data/CHANGELOG
ADDED
data/README.md
CHANGED
@@ -10,45 +10,58 @@ the basics (managing files in a bucket) in a very simple way with a
|
|
10
10
|
Usage
|
11
11
|
-----
|
12
12
|
|
13
|
-
$assets = S4.connect
|
14
|
-
|
15
|
-
$assets.upload
|
16
|
-
$assets.upload
|
17
|
-
$assets.list
|
18
|
-
|
19
|
-
$assets.download
|
20
|
-
|
21
|
-
$assets.delete
|
22
|
-
$assets.list
|
23
|
-
|
24
|
-
$assets.upload
|
13
|
+
$assets = S4.connect url: "s3://0PN5J17HBGZHT7JJ3X82:k3nL7gH3+PadhTEVn5EXAMPLE@s3.amazonaws.com/assets.mysite.com"
|
14
|
+
|
15
|
+
$assets.upload "puppy.jpg", "animals/puppy.jpg"
|
16
|
+
$assets.upload "penguin.jpg", "animals/penguin.jpg"
|
17
|
+
$assets.list "animals/" #=> [ "animals/puppy.jpg", "animals/penguin.jpg" ]
|
18
|
+
|
19
|
+
$assets.download "animals/penguin.jpg", "penguin.jpg"
|
20
|
+
|
21
|
+
$assets.delete "animals/penguin.jpg"
|
22
|
+
$assets.list "animals/" #=> [ "animals/puppy.jpg" ]
|
23
|
+
|
24
|
+
$assets.upload "ufo.jpg"
|
25
25
|
$assets.list #=> [ "ufo.jpg", "animals/puppy.jpg" ]
|
26
26
|
|
27
|
+
Without a URL given, S4 will attempt to read one from ENV["S3_URL"]:
|
28
|
+
|
29
|
+
$ export S3_URL="s3://0PN5J17HBGZHT7JJ3X82:k3nL7gH3+PadhTEVn5EXAMPLE@s3.amazonaws.com/assets.mysite.com"
|
30
|
+
...
|
31
|
+
$assets = S4.connect
|
32
|
+
|
33
|
+
Handy snippet for multiple buckets w/ the same account:
|
34
|
+
|
35
|
+
$ export S3_URL="s3://0PN5J17HBGZHT7JJ3X82:k3nL7gH3+PadhTEVn5EXAMPLE@s3.amazonaws.com/%s"
|
36
|
+
...
|
37
|
+
$assets = S4.connect url: ENV["S3_URL"] % "assets"
|
38
|
+
$videos = S4.connect url: ENV["S3_URL"] % "videos"
|
39
|
+
|
27
40
|
Low-level access:
|
28
|
-
|
29
|
-
$assets.get
|
30
|
-
File.open
|
41
|
+
|
42
|
+
$assets.get "animals/gigantic_penguin_movie.mp4" do |response|
|
43
|
+
File.open "gigantic_penguin_movie.mp4", "wb" do |io|
|
31
44
|
response.read_body do |chunk|
|
32
|
-
io.write
|
45
|
+
io.write chunk
|
33
46
|
puts "."
|
34
47
|
end
|
35
48
|
end
|
36
49
|
end
|
37
|
-
|
38
|
-
$assets.put
|
50
|
+
|
51
|
+
$assets.put StringIO.new("My Novel -- By Ben Alavi...", "r"), "novel.txt", "text/plain"
|
39
52
|
|
40
53
|
Create a bucket (returns the bucket if it already exists and is accessible):
|
41
54
|
|
42
|
-
$musics = S4.create
|
43
|
-
|
55
|
+
$musics = S4.create url: "s3://0PN5J17HBGZHT7JJ3X82:k3nL7gH3+PadhTEVn5EXAMPLE@s3.amazonaws.com/musics.mysite.com"
|
56
|
+
|
44
57
|
Make a bucket into a static website:
|
45
58
|
|
46
|
-
$site = S4.connect
|
59
|
+
$site = S4.connect url: "s3://0PN5J17HBGZHT7JJ3X82:k3nL7gH3+PadhTEVn5EXAMPLE@s3.amazonaws.com/website.mysite.com"
|
47
60
|
$site.website!
|
48
|
-
$site.put
|
49
|
-
Net::HTTP.get
|
61
|
+
$site.put StringIO.new("<!DOCTYPE html><html><head><title>My Website</title></head><body><h1><blink><font color="yellow">HELLO! WELCOME TO MY WEBSITE</font></blink></h1></body></html>", "r"), "index.html", "text/html"
|
62
|
+
Net::HTTP.get "http://#{$site.website}/" #=> ...HELLO! WELCOME TO MY WEBSITE...
|
50
63
|
|
51
|
-
Plus a handful of other miscellaneous things...
|
64
|
+
Plus a handful of other miscellaneous things (see [RDoc](http://rubydoc.info/gems/s4))...
|
52
65
|
|
53
66
|
Acknowledgements
|
54
67
|
----------------
|
data/lib/s4.rb
CHANGED
@@ -6,7 +6,7 @@ require "json"
|
|
6
6
|
|
7
7
|
# Simpler AWS S3 library
|
8
8
|
class S4
|
9
|
-
VERSION = "0.0.
|
9
|
+
VERSION = "0.0.4"
|
10
10
|
|
11
11
|
# sub-resource names which may appear in the query string and also must be
|
12
12
|
# signed against.
|
@@ -15,11 +15,11 @@ class S4
|
|
15
15
|
# Header over-rides which may appear in the query string and also must be
|
16
16
|
# signed against (in addition those which begin w/ 'x-amz-')
|
17
17
|
HeaderValues = %w( response-content-type response-content-language response-expires reponse-cache-control response-content-disposition response-content-encoding )
|
18
|
-
|
18
|
+
|
19
19
|
# List of available ACLs on buckets, first is used as default
|
20
20
|
# http://docs.amazonwebservices.com/AmazonS3/latest/API/index.html?RESTBucketPUT.html
|
21
21
|
BucketACLs = %w( private public-read public-read-write authenticated-read bucket-owner-read bucket-owner-full-control )
|
22
|
-
|
22
|
+
|
23
23
|
# Named policies
|
24
24
|
Policy = {
|
25
25
|
public_read: %Q{\{
|
@@ -35,18 +35,18 @@ class S4
|
|
35
35
|
}.freeze
|
36
36
|
|
37
37
|
attr_reader :connection, :access_key_id, :secret_access_key, :bucket, :host
|
38
|
-
|
38
|
+
|
39
39
|
# Cannot call #new explicitly (no reason to), use #connect instead
|
40
40
|
private_class_method :new
|
41
|
-
|
41
|
+
|
42
42
|
class << self
|
43
43
|
# Connect to an S3 bucket.
|
44
|
-
#
|
44
|
+
#
|
45
45
|
# Pass your S3 connection parameters as URL, or read from ENV["S3_URL"] if
|
46
46
|
# none is passed.
|
47
|
-
#
|
47
|
+
#
|
48
48
|
# S3_URL format is s3://<access key id>:<secret access key>@s3.amazonaws.com/<bucket>
|
49
|
-
#
|
49
|
+
#
|
50
50
|
# i.e.
|
51
51
|
# bucket = S4.connect #=> Connects to ENV["S3_URL"]
|
52
52
|
# bucket = S4.connect(url: "s3://0PN5J17HBGZHT7JJ3X82:k3nL7gH3+PadhTEVn5EXAMPLE@s3.amazonaws.com/bucket")
|
@@ -55,31 +55,31 @@ class S4
|
|
55
55
|
s4.connect
|
56
56
|
end
|
57
57
|
end
|
58
|
-
|
58
|
+
|
59
59
|
# Create a new S3 bucket.
|
60
|
-
#
|
60
|
+
#
|
61
61
|
# See #connect for S3_URL parameters.
|
62
|
-
#
|
62
|
+
#
|
63
63
|
# Will create the bucket on S3 and connect to it, or just connect if the
|
64
64
|
# bucket already exists and is owned by you.
|
65
|
-
#
|
65
|
+
#
|
66
66
|
# i.e.
|
67
67
|
# bucket = S4.create
|
68
68
|
def create(options={})
|
69
69
|
init(options) do |s4|
|
70
70
|
s4.create(options[:acl] || BucketACLs.first)
|
71
71
|
end
|
72
|
-
end
|
73
|
-
|
72
|
+
end
|
73
|
+
|
74
74
|
private
|
75
|
-
|
75
|
+
|
76
76
|
def init(options={}, &block)
|
77
77
|
new(options.has_key?(:url) ? options[:url] : ENV["S3_URL"]).tap do |s4|
|
78
78
|
yield(s4) if block_given?
|
79
79
|
end
|
80
80
|
end
|
81
81
|
end
|
82
|
-
|
82
|
+
|
83
83
|
def initialize(s3_url=ENV["S3_URL"])
|
84
84
|
raise ArgumentError, "No S3 URL provided. You can set ENV['S3_URL'], too." if s3_url.nil? || s3_url.empty?
|
85
85
|
|
@@ -95,31 +95,31 @@ class S4
|
|
95
95
|
@host = url.host
|
96
96
|
@bucket = url.path[1..-1]
|
97
97
|
end
|
98
|
-
|
98
|
+
|
99
99
|
# Connect to the S3 bucket.
|
100
|
-
#
|
100
|
+
#
|
101
101
|
# Since S3 doesn't really require a persistent connection this really just
|
102
102
|
# makes sure that it *can* connect (i.e. the bucket exists and you own it).
|
103
103
|
def connect
|
104
104
|
location
|
105
|
-
end
|
106
|
-
|
105
|
+
end
|
106
|
+
|
107
107
|
# Create the S3 bucket.
|
108
|
-
#
|
108
|
+
#
|
109
109
|
# If the bucket exists and you own it will not do anything, if it exists and
|
110
110
|
# you don't own it will raise an error.
|
111
|
-
#
|
111
|
+
#
|
112
112
|
# Optionally pass an ACL for the new bucket, see BucketACLs for valid ACLs.
|
113
|
-
#
|
113
|
+
#
|
114
114
|
# Default ACL is "private"
|
115
115
|
def create(acl=BucketACLs.first)
|
116
116
|
raise ArgumentError.new("Invalid ACL '#{acl}' for bucket. Available ACLs are: #{BucketACLs.join(", ")}.") unless BucketACLs.include?(acl)
|
117
|
-
|
117
|
+
|
118
118
|
uri = uri("/")
|
119
119
|
req = Net::HTTP::Put.new(uri.request_uri)
|
120
|
-
|
120
|
+
|
121
121
|
req.add_field "x-amz-acl", acl
|
122
|
-
|
122
|
+
|
123
123
|
request uri, req
|
124
124
|
end
|
125
125
|
|
@@ -132,7 +132,7 @@ class S4
|
|
132
132
|
end
|
133
133
|
|
134
134
|
# Download the file with the given filename to the given destination.
|
135
|
-
#
|
135
|
+
#
|
136
136
|
# i.e.
|
137
137
|
# bucket.download("images/palm_trees.jpg", "./palm_trees.jpg")
|
138
138
|
def download(name, destination=nil)
|
@@ -152,64 +152,68 @@ class S4
|
|
152
152
|
|
153
153
|
# Upload the file with the given filename to the given destination in your S3
|
154
154
|
# bucket.
|
155
|
-
#
|
155
|
+
#
|
156
156
|
# If no destination is given then uploads it with the same filename to the
|
157
157
|
# root of your bucket.
|
158
|
-
#
|
158
|
+
#
|
159
159
|
# i.e.
|
160
160
|
# bucket.upload("./images/1996_animated_explosion.gif", "website_background.gif")
|
161
161
|
def upload(name, destination=nil)
|
162
162
|
put File.open(name, "rb"), destination || File.basename(name)
|
163
163
|
end
|
164
|
-
|
164
|
+
|
165
165
|
# Write an IO stream to a file in this bucket.
|
166
|
-
#
|
166
|
+
#
|
167
167
|
# Will write file with content_type if given, otherwise will attempt to
|
168
168
|
# determine content type by shelling out to POSIX `file` command (if IO
|
169
169
|
# stream responds to #path). If no content_type could be determined, will
|
170
170
|
# default to application/x-www-form-urlencoded.
|
171
|
-
#
|
171
|
+
#
|
172
172
|
# i.e.
|
173
173
|
# bucket.put(StringIO.new("Awesome!"), "awesome.txt", "text/plain")
|
174
174
|
def put(io, name, content_type=nil)
|
175
175
|
uri = uri(name)
|
176
176
|
req = Net::HTTP::Put.new(uri.request_uri)
|
177
|
-
|
177
|
+
|
178
178
|
content_type = `file -ib #{io.path}`.chomp if !content_type && io.respond_to?(:path)
|
179
|
-
|
179
|
+
|
180
180
|
req.add_field "Content-Type", content_type
|
181
181
|
req.add_field "Content-Length", io.size
|
182
182
|
req.body_stream = io
|
183
183
|
|
184
|
-
|
184
|
+
target_uri = uri("/#{name}")
|
185
|
+
|
186
|
+
request(target_uri, req)
|
187
|
+
|
188
|
+
target_uri.to_s
|
185
189
|
end
|
186
190
|
|
187
191
|
# List bucket contents.
|
188
|
-
#
|
192
|
+
#
|
189
193
|
# Optionally pass a prefix to list from (useful for paths).
|
190
|
-
#
|
194
|
+
#
|
191
195
|
# i.e.
|
192
196
|
# bucket.list("images/") #=> [ "birds.jpg", "bees.jpg" ]
|
193
197
|
def list(prefix = "")
|
194
198
|
REXML::Document.new(request(uri("", query: "prefix=#{prefix}"))).elements.collect("//Key", &:text)
|
195
199
|
end
|
196
|
-
|
200
|
+
|
197
201
|
# Turns this bucket into a S3 static website bucket.
|
198
|
-
#
|
202
|
+
#
|
199
203
|
# IMPORTANT: by default a policy will be applied to the bucket allowing read
|
200
204
|
# access to all files contained in the bucket.
|
201
|
-
#
|
205
|
+
#
|
202
206
|
# i.e.
|
203
207
|
# site = S4.connect(url: "s3://0PN5J17HBGZHT7JJ3X82:k3nL7gH3+PadhTEVn5EXAMPLE@s3.amazonaws.com/mywebsite")
|
204
208
|
# site.website!
|
205
209
|
# site.put(StringIO.new("<!DOCTYPE html><html><head><title>Robots!</title></head><body><h1>So many robots!!!</h1></body></html>", "r"), "index.html")
|
206
|
-
# Net::HTTP.get(URI.parse("http://mywebsite.s3.amazonaws.com/")) #=> ...<h1>So many robots!!!</h1>...
|
210
|
+
# Net::HTTP.get(URI.parse("http://mywebsite.s3.amazonaws.com/")) #=> ...<h1>So many robots!!!</h1>...
|
207
211
|
def website!
|
208
212
|
self.policy = Policy[:public_read] % bucket
|
209
|
-
|
213
|
+
|
210
214
|
uri = uri("/", query: "website")
|
211
215
|
req = Net::HTTP::Put.new(uri.request_uri)
|
212
|
-
|
216
|
+
|
213
217
|
req.body = <<-XML
|
214
218
|
<WebsiteConfiguration xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
|
215
219
|
<IndexDocument>
|
@@ -220,23 +224,23 @@ class S4
|
|
220
224
|
</ErrorDocument>
|
221
225
|
</WebsiteConfiguration>
|
222
226
|
XML
|
223
|
-
|
224
|
-
request uri, req
|
227
|
+
|
228
|
+
request uri, req
|
225
229
|
end
|
226
|
-
|
230
|
+
|
227
231
|
# The URL of the bucket for use as a website.
|
228
232
|
def website
|
229
233
|
"#{bucket}.s3-website-#{location}.amazonaws.com"
|
230
234
|
end
|
231
|
-
|
235
|
+
|
232
236
|
# Sets the given policy on the bucket.
|
233
|
-
#
|
237
|
+
#
|
234
238
|
# Policy can be given as a string which will be applied as given, a hash
|
235
239
|
# which will be converted to json, or the name of a pre-defined policy as a
|
236
240
|
# symbol.
|
237
|
-
#
|
241
|
+
#
|
238
242
|
# See S4::Policy for pre-defined policies.
|
239
|
-
#
|
243
|
+
#
|
240
244
|
# i.e.
|
241
245
|
# $s4 = S4.connect
|
242
246
|
# $s4.policy = :public_read #=> apply named policy
|
@@ -244,38 +248,38 @@ class S4
|
|
244
248
|
# $s4.policy = "{\"Statement\": \"...\"}" #=> apply policy as string
|
245
249
|
def policy=(policy)
|
246
250
|
policy = Policy[policy] % bucket if policy.is_a?(Symbol)
|
247
|
-
|
251
|
+
|
248
252
|
uri = uri("/", query: "policy")
|
249
253
|
req = Net::HTTP::Put.new(uri.request_uri)
|
250
|
-
|
254
|
+
|
251
255
|
req.body = policy.is_a?(String) ? policy : policy.to_json
|
252
|
-
|
256
|
+
|
253
257
|
request uri, req
|
254
258
|
end
|
255
|
-
|
259
|
+
|
256
260
|
# Gets the policy on the bucket.
|
257
261
|
def policy
|
258
262
|
request uri("/", query: "policy")
|
259
263
|
end
|
260
|
-
|
264
|
+
|
261
265
|
# Gets information about the buckets location.
|
262
266
|
def location
|
263
267
|
response = request uri("/", query: "location")
|
264
268
|
location = REXML::Document.new(response).elements["LocationConstraint"].text
|
265
|
-
|
269
|
+
|
266
270
|
location || "us-east-1"
|
267
271
|
end
|
268
|
-
|
272
|
+
|
269
273
|
def inspect
|
270
274
|
"#<S4: bucket='#{bucket}'>"
|
271
275
|
end
|
272
|
-
|
276
|
+
|
273
277
|
private
|
274
|
-
|
278
|
+
|
275
279
|
def connection
|
276
280
|
@connection ||= Net::HTTP::Persistent.new("aws-s3/#{bucket}")
|
277
281
|
end
|
278
|
-
|
282
|
+
|
279
283
|
def uri(path, options={})
|
280
284
|
URI::HTTP.build(options.merge(host: host, path: "/#{bucket}/#{URI.escape(path.sub(/^\//, ""))}"))
|
281
285
|
end
|
@@ -283,7 +287,7 @@ class S4
|
|
283
287
|
# Makes a request to the S3 API.
|
284
288
|
def request(uri, request=nil)
|
285
289
|
request ||= Net::HTTP::Get.new(uri.request_uri)
|
286
|
-
|
290
|
+
|
287
291
|
connection.request(uri, sign(uri, request)) do |response|
|
288
292
|
case response
|
289
293
|
when Net::HTTPSuccess
|
@@ -310,9 +314,9 @@ class S4
|
|
310
314
|
|
311
315
|
def signature(uri, request)
|
312
316
|
query = signed_params(uri.query) if uri.query
|
313
|
-
|
317
|
+
|
314
318
|
string_to_sign = "#{request.class::METHOD}\n\n#{request["Content-Type"]}\n#{request["Date"]}\n#{canonicalized_headers(request)}" + "#{uri.path}" + (query ? "?#{query}" : "")
|
315
|
-
|
319
|
+
|
316
320
|
Base64.encode64(
|
317
321
|
OpenSSL::HMAC.digest(
|
318
322
|
OpenSSL::Digest::Digest.new("sha1"),
|
@@ -321,7 +325,7 @@ class S4
|
|
321
325
|
)
|
322
326
|
).chomp
|
323
327
|
end
|
324
|
-
|
328
|
+
|
325
329
|
# Returns the given query string consisting only of query parameters which
|
326
330
|
# need to be signed against, or nil if there are none in the query string.
|
327
331
|
def signed_params(query)
|
@@ -332,31 +336,31 @@ class S4
|
|
332
336
|
collect{ |pair| pair.join("=") }.
|
333
337
|
sort.
|
334
338
|
join("&")
|
335
|
-
|
339
|
+
|
336
340
|
signed unless signed.empty?
|
337
341
|
end
|
338
|
-
|
342
|
+
|
339
343
|
def canonicalized_headers(request)
|
340
344
|
headers = request.to_hash.
|
341
345
|
reject{ |k, v| k !~ /x-amz-/ && !HeaderValues.include?(k) }.
|
342
346
|
collect{ |k, v| "#{k}:#{v.join(",")}" }.
|
343
347
|
sort.
|
344
348
|
join("\n")
|
345
|
-
|
349
|
+
|
346
350
|
"#{headers}\n" unless headers.empty?
|
347
351
|
end
|
348
|
-
|
352
|
+
|
349
353
|
# Base class of all S3 Errors
|
350
354
|
class Error < ::RuntimeError
|
351
355
|
attr_reader :code, :status, :response
|
352
|
-
|
356
|
+
|
353
357
|
def initialize(response)
|
354
358
|
@response = REXML::Document.new(response.body).elements["//Error"]
|
355
359
|
|
356
360
|
@status = response.code
|
357
361
|
@code = @response.elements["Code"].text
|
358
|
-
|
362
|
+
|
359
363
|
super "#{@status}: #{@code} -- " + @response.elements["Message"].text
|
360
364
|
end
|
361
|
-
end
|
365
|
+
end
|
362
366
|
end
|
data/s4.gemspec
CHANGED
data/test/s4_test.rb
CHANGED
@@ -1,5 +1,5 @@
|
|
1
1
|
raise "You need to have ENV[\"S3_URL\"] set for the tests to connect to your testing bucket on S3. Format is: 's3://<access key id>:<secret access key>@s3.amazonaws.com/<s4 test bucket>'." unless ENV["S3_URL"]
|
2
|
-
raise "You need to have ENV[\"S4_NEW_BUCKET\"], which will be dynamically created and destroyed for testing bucket creation. i.e.: 's4-test-bucketthatdoesntexist'." unless ENV["
|
2
|
+
raise "You need to have ENV[\"S4_NEW_BUCKET\"], which will be dynamically created and destroyed for testing bucket creation. i.e.: 's4-test-bucketthatdoesntexist'." unless ENV["S3_NEW_BUCKET"]
|
3
3
|
|
4
4
|
require "contest"
|
5
5
|
require "timecop"
|
@@ -30,28 +30,28 @@ class S4Test < Test::Unit::TestCase
|
|
30
30
|
`s3cmd del 's3://#{TestBucket}/*' 2>&1`
|
31
31
|
`s3cmd rb 's3://#{TestBucket}' 2>&1`
|
32
32
|
end
|
33
|
-
|
33
|
+
|
34
34
|
setup do
|
35
35
|
FileUtils.rm_rf(output)
|
36
36
|
FileUtils.mkdir_p(output)
|
37
37
|
end
|
38
|
-
|
38
|
+
|
39
39
|
context "connecting to S3" do
|
40
40
|
should "return connected bucket if can connect" do
|
41
41
|
s4 = S4.connect
|
42
42
|
assert s4
|
43
43
|
end
|
44
|
-
|
44
|
+
|
45
45
|
should "bark when no URL is provided" do
|
46
46
|
assert_raise(ArgumentError) { S4.connect(url: "") }
|
47
47
|
assert_raise(ArgumentError) { S4.connect(url: nil) }
|
48
48
|
assert_raise(URI::InvalidURIError) { S4.connect(url: "s3://foo:bar/baz") }
|
49
49
|
end
|
50
|
-
|
50
|
+
|
51
51
|
should "raise error if cannot connect" do
|
52
52
|
`s3cmd del 's3://#{NewBucket}/*' 2>&1`
|
53
53
|
`s3cmd rb 's3://#{NewBucket}' 2>&1`
|
54
|
-
|
54
|
+
|
55
55
|
assert_raise(S4::Error) do
|
56
56
|
S4.connect url: ENV["S3_URL"].sub(TestBucket, NewBucket)
|
57
57
|
end
|
@@ -65,7 +65,7 @@ class S4Test < Test::Unit::TestCase
|
|
65
65
|
S4.create url: ENV["S3_URL"].sub(TestBucket, "foo")
|
66
66
|
end
|
67
67
|
end
|
68
|
-
|
68
|
+
|
69
69
|
should "capture code of S3 error" do
|
70
70
|
begin
|
71
71
|
S4.create url: ENV["S3_URL"].sub(TestBucket, "foo")
|
@@ -74,31 +74,31 @@ class S4Test < Test::Unit::TestCase
|
|
74
74
|
end
|
75
75
|
end
|
76
76
|
end
|
77
|
-
|
78
|
-
context "creating a bucket" do
|
77
|
+
|
78
|
+
context "creating a bucket" do
|
79
79
|
setup do
|
80
80
|
`s3cmd del 's3://#{NewBucket}/*' 2>&1`
|
81
81
|
`s3cmd rb 's3://#{NewBucket}' 2>&1`
|
82
82
|
end
|
83
|
-
|
83
|
+
|
84
84
|
should "create a bucket" do
|
85
85
|
assert_equal "ERROR: Bucket '#{NewBucket}' does not exist", `s3cmd ls 's3://#{NewBucket}' 2>&1`.chomp
|
86
|
-
|
86
|
+
|
87
87
|
S4.create url: ENV["S3_URL"].sub(TestBucket, NewBucket)
|
88
|
-
|
88
|
+
|
89
89
|
assert_equal "", `s3cmd ls 's3://#{NewBucket}' 2>&1`.chomp
|
90
90
|
end
|
91
|
-
|
91
|
+
|
92
92
|
should "create bucket with public-read ACL" do
|
93
93
|
# TODO...
|
94
94
|
end
|
95
|
-
|
95
|
+
|
96
96
|
should "raise if bucket creation failed" do
|
97
97
|
assert_raise(S4::Error) do
|
98
98
|
S4.create url: ENV["S3_URL"].sub(TestBucket, "foo")
|
99
99
|
end
|
100
100
|
end
|
101
|
-
|
101
|
+
|
102
102
|
should "raise if given invalid ACL" do
|
103
103
|
begin
|
104
104
|
S4.create url: ENV["S3_URL"], acl: "foo"
|
@@ -107,27 +107,27 @@ class S4Test < Test::Unit::TestCase
|
|
107
107
|
end
|
108
108
|
end
|
109
109
|
end
|
110
|
-
|
110
|
+
|
111
111
|
context "making a website" do
|
112
112
|
setup do
|
113
113
|
delete_test_bucket
|
114
114
|
end
|
115
|
-
|
115
|
+
|
116
116
|
should "make bucket a website" do
|
117
117
|
s4 = S4.create
|
118
|
-
|
118
|
+
|
119
119
|
begin
|
120
120
|
open("http://#{s4.website}/")
|
121
121
|
rescue OpenURI::HTTPError => e
|
122
122
|
assert_match /NoSuchWebsiteConfiguration/, e.io.read
|
123
123
|
end
|
124
|
-
|
124
|
+
|
125
125
|
s4.put(StringIO.new("<!DOCTYPE html><html><head><title>Robot Page</title></head><body><h1>Robots!</h1></body></html>", "r"), "index.html", "text/html")
|
126
126
|
s4.put(StringIO.new("<!DOCTYPE html><html><head><title>404!</title></head><body><h1>Oh No 404!!!</h1></body></html>", "r"), "404.html", "text/html")
|
127
127
|
s4.website!
|
128
|
-
|
128
|
+
|
129
129
|
assert_match /Robots!/, open("http://#{s4.website}/").read
|
130
|
-
|
130
|
+
|
131
131
|
begin
|
132
132
|
open("http://#{s4.website}/foo.html")
|
133
133
|
rescue OpenURI::HTTPError => e
|
@@ -136,55 +136,66 @@ class S4Test < Test::Unit::TestCase
|
|
136
136
|
end
|
137
137
|
end
|
138
138
|
end
|
139
|
-
|
139
|
+
|
140
140
|
context "setting policy on a bucket" do
|
141
141
|
setup do
|
142
142
|
delete_test_bucket
|
143
143
|
@s4 = S4.create
|
144
144
|
end
|
145
|
-
|
145
|
+
|
146
146
|
should "make all objects public by policy" do
|
147
147
|
@s4.upload(fixture("foo.txt"))
|
148
|
-
|
148
|
+
|
149
149
|
begin
|
150
150
|
open("http://s3.amazonaws.com/#{TestBucket}/foo.txt")
|
151
151
|
rescue OpenURI::HTTPError => e
|
152
152
|
assert_match /403 Forbidden/, e.message
|
153
153
|
end
|
154
|
-
|
154
|
+
|
155
155
|
@s4.policy = :public_read
|
156
|
-
|
156
|
+
|
157
157
|
assert_equal "abc123", open("http://s3.amazonaws.com/#{TestBucket}/foo.txt").read
|
158
158
|
end
|
159
159
|
end
|
160
|
-
|
160
|
+
|
161
161
|
context "uploading to bucket" do
|
162
162
|
setup do
|
163
163
|
delete_test_bucket
|
164
164
|
@s4 = S4.create
|
165
165
|
@s4.policy = :public_read
|
166
166
|
end
|
167
|
-
|
167
|
+
|
168
168
|
should "upload foo.txt" do
|
169
169
|
@s4.upload(fixture("foo.txt"))
|
170
|
-
|
170
|
+
|
171
171
|
foo = open("http://s3.amazonaws.com/#{TestBucket}/foo.txt")
|
172
|
-
|
172
|
+
|
173
173
|
assert_equal "abc123", foo.read
|
174
|
-
assert_equal "text/plain", foo.content_type
|
174
|
+
assert_equal "text/plain", foo.content_type
|
175
175
|
end
|
176
|
-
|
176
|
+
|
177
177
|
should "use given content_type" do
|
178
|
-
@s4.put
|
178
|
+
@s4.put StringIO.new("abcdef", "r"), "bar.txt", "text/foobar"
|
179
179
|
assert_equal "text/foobar", open("http://s3.amazonaws.com/#{TestBucket}/bar.txt").content_type
|
180
180
|
end
|
181
|
+
|
182
|
+
should "upload to a path" do
|
183
|
+
@s4.put StringIO.new("zoinks!", "r"), "foo/bar.txt", "text/plain"
|
184
|
+
assert_equal "zoinks!", open("http://s3.amazonaws.com/#{TestBucket}/foo/bar.txt").read
|
185
|
+
end
|
186
|
+
|
187
|
+
should "return the URL to the uploaded file" do
|
188
|
+
url = @s4.put StringIO.new("zoinks!", "r"), "foo/bar.txt", "text/plain"
|
189
|
+
assert_kind_of URI::HTTP, URI.parse(url)
|
190
|
+
assert_equal "zoinks!", open(url).read
|
191
|
+
end
|
181
192
|
end
|
182
|
-
|
193
|
+
|
183
194
|
context "when connected" do
|
184
195
|
setup do
|
185
196
|
@s4 = S4.connect
|
186
197
|
end
|
187
|
-
|
198
|
+
|
188
199
|
should "download foo.txt" do
|
189
200
|
`s3cmd put #{fixture("foo.txt")} s3://#{@s4.bucket}/foo.txt`
|
190
201
|
@s4.download("foo.txt", output("foo.txt"))
|
@@ -198,7 +209,7 @@ class S4Test < Test::Unit::TestCase
|
|
198
209
|
|
199
210
|
assert !File.exists?(output("foo.txt"))
|
200
211
|
end
|
201
|
-
|
212
|
+
|
202
213
|
should "return false when downloading non-existent files" do
|
203
214
|
`s3cmd del 's3://#{@s4.bucket}/foo.txt'`
|
204
215
|
assert_equal nil, @s4.download("foo.txt", output("foo.txt"))
|
@@ -223,7 +234,7 @@ class S4Test < Test::Unit::TestCase
|
|
223
234
|
should "return list of items in bucket" do
|
224
235
|
`s3cmd del 's3://#{@s4.bucket}/*'`
|
225
236
|
`s3cmd del 's3://#{@s4.bucket}/abc/*'`
|
226
|
-
|
237
|
+
|
227
238
|
`s3cmd put #{fixture("foo.txt")} s3://#{@s4.bucket}/foo.txt`
|
228
239
|
`s3cmd put #{fixture("foo.txt")} s3://#{@s4.bucket}/bar.txt`
|
229
240
|
`s3cmd put #{fixture("foo.txt")} s3://#{@s4.bucket}/baz.txt`
|
metadata
CHANGED
@@ -1,91 +1,86 @@
|
|
1
|
-
--- !ruby/object:Gem::Specification
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
2
|
name: s4
|
3
|
-
version: !ruby/object:Gem::Version
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 0.0.4
|
4
5
|
prerelease:
|
5
|
-
version: 0.0.3
|
6
6
|
platform: ruby
|
7
|
-
authors:
|
7
|
+
authors:
|
8
8
|
- Ben Alavi
|
9
9
|
autorequire:
|
10
10
|
bindir: bin
|
11
11
|
cert_chain: []
|
12
|
-
|
13
|
-
|
14
|
-
dependencies:
|
15
|
-
- !ruby/object:Gem::Dependency
|
12
|
+
date: 2011-09-20 00:00:00.000000000 -03:00
|
13
|
+
default_executable:
|
14
|
+
dependencies:
|
15
|
+
- !ruby/object:Gem::Dependency
|
16
16
|
name: net-http-persistent
|
17
|
-
|
18
|
-
requirement: &id001 !ruby/object:Gem::Requirement
|
17
|
+
requirement: &2153639160 !ruby/object:Gem::Requirement
|
19
18
|
none: false
|
20
|
-
requirements:
|
21
|
-
- -
|
22
|
-
- !ruby/object:Gem::Version
|
23
|
-
version:
|
19
|
+
requirements:
|
20
|
+
- - ! '>='
|
21
|
+
- !ruby/object:Gem::Version
|
22
|
+
version: '1.7'
|
24
23
|
type: :runtime
|
25
|
-
version_requirements: *id001
|
26
|
-
- !ruby/object:Gem::Dependency
|
27
|
-
name: cutest
|
28
24
|
prerelease: false
|
29
|
-
|
25
|
+
version_requirements: *2153639160
|
26
|
+
- !ruby/object:Gem::Dependency
|
27
|
+
name: cutest
|
28
|
+
requirement: &2153638740 !ruby/object:Gem::Requirement
|
30
29
|
none: false
|
31
|
-
requirements:
|
32
|
-
- -
|
33
|
-
- !ruby/object:Gem::Version
|
34
|
-
version:
|
30
|
+
requirements:
|
31
|
+
- - ! '>='
|
32
|
+
- !ruby/object:Gem::Version
|
33
|
+
version: '0'
|
35
34
|
type: :development
|
36
|
-
version_requirements: *id002
|
37
|
-
- !ruby/object:Gem::Dependency
|
38
|
-
name: timecop
|
39
35
|
prerelease: false
|
40
|
-
|
36
|
+
version_requirements: *2153638740
|
37
|
+
- !ruby/object:Gem::Dependency
|
38
|
+
name: timecop
|
39
|
+
requirement: &2153638200 !ruby/object:Gem::Requirement
|
41
40
|
none: false
|
42
|
-
requirements:
|
41
|
+
requirements:
|
43
42
|
- - ~>
|
44
|
-
- !ruby/object:Gem::Version
|
45
|
-
version:
|
43
|
+
- !ruby/object:Gem::Version
|
44
|
+
version: '0.3'
|
46
45
|
type: :development
|
47
|
-
|
46
|
+
prerelease: false
|
47
|
+
version_requirements: *2153638200
|
48
48
|
description: Simple API for AWS S3
|
49
|
-
email:
|
49
|
+
email:
|
50
50
|
- ben.alavi@citrusbyte.com
|
51
51
|
executables: []
|
52
|
-
|
53
52
|
extensions: []
|
54
|
-
|
55
53
|
extra_rdoc_files: []
|
56
|
-
|
57
|
-
|
54
|
+
files:
|
55
|
+
- CHANGELOG
|
58
56
|
- README.md
|
59
57
|
- rakefile
|
60
58
|
- s4.gemspec
|
61
59
|
- lib/s4.rb
|
62
60
|
- test/s4_test.rb
|
61
|
+
has_rdoc: true
|
63
62
|
homepage: http://github.com/benalavi/s4
|
64
63
|
licenses: []
|
65
|
-
|
66
64
|
post_install_message:
|
67
65
|
rdoc_options: []
|
68
|
-
|
69
|
-
require_paths:
|
66
|
+
require_paths:
|
70
67
|
- lib
|
71
|
-
required_ruby_version: !ruby/object:Gem::Requirement
|
68
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
72
69
|
none: false
|
73
|
-
requirements:
|
74
|
-
- -
|
75
|
-
- !ruby/object:Gem::Version
|
76
|
-
version:
|
77
|
-
required_rubygems_version: !ruby/object:Gem::Requirement
|
70
|
+
requirements:
|
71
|
+
- - ! '>='
|
72
|
+
- !ruby/object:Gem::Version
|
73
|
+
version: '0'
|
74
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
78
75
|
none: false
|
79
|
-
requirements:
|
80
|
-
- -
|
81
|
-
- !ruby/object:Gem::Version
|
82
|
-
version:
|
76
|
+
requirements:
|
77
|
+
- - ! '>='
|
78
|
+
- !ruby/object:Gem::Version
|
79
|
+
version: '0'
|
83
80
|
requirements: []
|
84
|
-
|
85
81
|
rubyforge_project:
|
86
|
-
rubygems_version: 1.
|
82
|
+
rubygems_version: 1.6.2
|
87
83
|
signing_key:
|
88
84
|
specification_version: 3
|
89
85
|
summary: Simple API for AWS S3
|
90
86
|
test_files: []
|
91
|
-
|