radosgw-s3 0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.gitignore +7 -0
- data/Gemfile +2 -0
- data/LICENSE +202 -0
- data/README.rdoc +126 -0
- data/Rakefile +21 -0
- data/lib/ceph/radosgw.rb +35 -0
- data/lib/radosgw-s3.rb +29 -0
- data/lib/s3/bucket.rb +217 -0
- data/lib/s3/buckets_extension.rb +29 -0
- data/lib/s3/connection.rb +225 -0
- data/lib/s3/exceptions.rb +111 -0
- data/lib/s3/object.rb +262 -0
- data/lib/s3/objects_extension.rb +37 -0
- data/lib/s3/parser.rb +90 -0
- data/lib/s3/request.rb +31 -0
- data/lib/s3/service.rb +98 -0
- data/lib/s3/signature.rb +261 -0
- data/lib/s3/version.rb +3 -0
- data/radosgw-s3.gemspec +30 -0
- data/test/bucket_test.rb +245 -0
- data/test/connection_test.rb +215 -0
- data/test/object_test.rb +223 -0
- data/test/service_test.rb +133 -0
- data/test/signature_test.rb +228 -0
- data/test/test_helper.rb +3 -0
- metadata +157 -0
data/lib/s3/version.rb
ADDED
data/radosgw-s3.gemspec
ADDED
@@ -0,0 +1,30 @@
|
|
1
|
+
# -*- encoding: utf-8 -*-
|
2
|
+
|
3
|
+
# Load version requiring the canonical "s3/version", otherwise Ruby will think
|
4
|
+
# is a different file and complaint about a double declaration of S3::VERSION.
|
5
|
+
$LOAD_PATH.unshift File.expand_path("../lib", __FILE__)
|
6
|
+
require "s3/version"
|
7
|
+
|
8
|
+
Gem::Specification.new do |s|
|
9
|
+
s.name = "radosgw-s3"
|
10
|
+
s.version = S3::VERSION
|
11
|
+
s.platform = Gem::Platform::RUBY
|
12
|
+
s.authors = ["Thomas Alrin, Kishorekumar Neelamegam, Rajthilak, Kuba Kuźma"]
|
13
|
+
s.email = ["thomasalrin@megam.io", "nkishore@megam.io", "rajthilak@megam.io", "kuba@jah.pl"]
|
14
|
+
s.homepage = "http://github.com/megamsys/radosgw-s3"
|
15
|
+
s.summary = "Library for accessing ceph objects and buckets"
|
16
|
+
s.description = "radosgw-s3 library provides access to your ceph-radosgw. It supports both: radosgw user creation and bucket operation using REST API."
|
17
|
+
s.license = "Apache V2"
|
18
|
+
|
19
|
+
s.add_dependency "proxies", "~> 0.2.0"
|
20
|
+
s.add_development_dependency "rake"
|
21
|
+
s.add_development_dependency "json"
|
22
|
+
s.add_development_dependency "test-unit"
|
23
|
+
s.add_development_dependency "mocha"
|
24
|
+
s.add_development_dependency "bundler"
|
25
|
+
|
26
|
+
s.files = `git ls-files`.split("\n")
|
27
|
+
s.executables = `git ls-files`.split("\n").map{|f| f =~ /^bin\/(.*)/ ? $1 : nil}.compact
|
28
|
+
s.require_path = "lib"
|
29
|
+
end
|
30
|
+
|
data/test/bucket_test.rb
ADDED
@@ -0,0 +1,245 @@
|
|
1
|
+
require "test_helper"
|
2
|
+
|
3
|
+
class BucketTest < Test::Unit::TestCase
|
4
|
+
def setup
|
5
|
+
@bucket_vhost = S3::Bucket.send(:new, S3::Service.new(access_key_id: 'test', secret_access_key: 'secret'), "Data-Bucket")
|
6
|
+
@bucket_path = S3::Bucket.send(:new, S3::Service.new(access_key_id: 'test', secret_access_key: 'secret', use_vhost: false), "Data_Bucket")
|
7
|
+
@secure_bucket = S3::Bucket.send(:new, S3::Service.new(access_key_id: 'test', secret_access_key: 'secret', use_ssl: true), "Data-Secured")
|
8
|
+
@bucket = @bucket_vhost
|
9
|
+
|
10
|
+
@bucket_location = "EU"
|
11
|
+
@bucket_location_body = <<-EOLocation
|
12
|
+
<?xml version="1.0" encoding="UTF-8"?>\n<LocationConstraint xmlns="http://s3.amazonaws.com/doc/2006-03-01/">EU</LocationConstraint>
|
13
|
+
EOLocation
|
14
|
+
|
15
|
+
@response_location = Net::HTTPOK.new("1.1", "200", "OK")
|
16
|
+
@response_location.stubs(:body).returns(@bucket_location_body)
|
17
|
+
|
18
|
+
@bucket_owned_by_you_body = <<-EOOwnedByYou
|
19
|
+
<?xml version="1.0" encoding="UTF-8"?>\n<Error> <Code>BucketAlreadyOwnedByYou</Code> <Message>Your previous request to create the named bucket succeeded and you already own it.</Message> <BucketName>bucket</BucketName> <RequestId>117D08EA0EC6E860</RequestId> <HostId>4VpMSvmJ+G5+DLtVox6O5cZNgdPlYcjCu3l0n4HjDe01vPxxuk5eTAtcAkUynRyV</HostId> </Error>
|
20
|
+
EOOwnedByYou
|
21
|
+
|
22
|
+
@response_owned_by_you = Net::HTTPConflict.new("1.1", "409", "Conflict")
|
23
|
+
@response_owned_by_you.stubs(:body).returns(@bucket_owned_by_you_body)
|
24
|
+
|
25
|
+
@bucket_already_exists_body = <<-EOAlreadyExists
|
26
|
+
<?xml version="1.0" encoding="UTF-8"?>\n<Error> <Code>BucketAlreadyExists</Code> <Message>The requested bucket name is not available. The bucket namespace is shared by all users of the system. Please select a different name and try again.</Message> <BucketName>bucket</BucketName> <RequestId>4C154D32807C92BD</RequestId> <HostId>/xyHQgXcUXTZQhoO+NUBzbaxbFrIhKlyuaRHFnmcId0bMePvY9Zwg+dyk2LYE4g5</HostId> </Error>
|
27
|
+
EOAlreadyExists
|
28
|
+
|
29
|
+
@response_already_exists = Net::HTTPConflict.new("1.1", "409", "Conflict")
|
30
|
+
@response_already_exists.stubs(:body).returns(@bucket_already_exists_body)
|
31
|
+
|
32
|
+
@objects_list_empty = []
|
33
|
+
@objects_list = [
|
34
|
+
S3::Object.send(:new, @bucket, :key => "obj1"),
|
35
|
+
S3::Object.send(:new, @bucket, :key => "obj2"),
|
36
|
+
S3::Object.send(:new, @bucket, :key => "prefix/"),
|
37
|
+
S3::Object.send(:new, @bucket, :key => "prefix/obj3")
|
38
|
+
]
|
39
|
+
|
40
|
+
@objects_list_prefix = [
|
41
|
+
S3::Object.send(:new, @bucket, :key => "prefix/"),
|
42
|
+
S3::Object.send(:new, @bucket, :key => "prefix/obj3")
|
43
|
+
]
|
44
|
+
|
45
|
+
@response_objects_list_empty_body = <<-EOEmpty
|
46
|
+
<?xml version="1.0" encoding="UTF-8"?>\n<ListBucketResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/"> <Name>bucket</Name> <Prefix></Prefix> <Marker></Marker> <MaxKeys>1000</MaxKeys> <IsTruncated>false</IsTruncated> </ListBucketResult>
|
47
|
+
EOEmpty
|
48
|
+
|
49
|
+
@response_objects_list_empty = Net::HTTPOK.new("1.1", "200", "OK")
|
50
|
+
@response_objects_list_empty.stubs(:body).returns(@response_objects_list_empty_body)
|
51
|
+
|
52
|
+
@response_objects_list_body = <<-EOObjects
|
53
|
+
<?xml version="1.0" encoding="UTF-8"?>\n<ListBucketResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/"> <Name>bucket</Name> <Prefix></Prefix> <Marker></Marker> <MaxKeys>1000</MaxKeys> <IsTruncated>false</IsTruncated> <Contents> <Key>obj1</Key> <LastModified>2009-07-03T10:17:33.000Z</LastModified> <ETag>"99519cdf14c255e580e1b7bca85a458c"</ETag> <Size>1729</Size> <Owner> <ID>df864aeb6f42be43f1d9e60aaabe3f15e245b035a4b79d1cfe36c4deaec67205</ID> <DisplayName>owner</DisplayName> </Owner> <StorageClass>STANDARD</StorageClass> </Contents> <Contents> <Key>obj2</Key> <LastModified>2009-07-03T11:17:33.000Z</LastModified> <ETag>"99519cdf14c255e586e1b12bca85a458c"</ETag> <Size>179</Size> <Owner> <ID>df864aeb6f42be43f1d9e60aaabe3f17e247b037a4b79d1cfe36c4deaec67205</ID> <DisplayName>owner</DisplayName> </Owner> <StorageClass>STANDARD</StorageClass> </Contents> <Contents> <Key>prefix/</Key> <LastModified>2009-07-03T10:17:33.000Z</LastModified> <ETag>"99519cdf14c255e580e1b7bca85a458c"</ETag> <Size>1729</Size> <Owner> <ID>df864aeb6f42be43f1d9e60aaabe3f15e245b035a4b79d1cfe36c4deaec67205</ID> <DisplayName>owner</DisplayName> </Owner> <StorageClass>STANDARD</StorageClass> </Contents> <Contents> <Key>prefix/obj3</Key> <LastModified>2009-07-03T10:17:33.000Z</LastModified> <ETag>"99519cdf14c255e580e1b7bca85a458c"</ETag> <Size>1729</Size> <Owner> <ID>df864aeb6f42be43f1d9e60aaabe3f15e245b035a4b79d1cfe36c4deaec67205</ID> <DisplayName>owner</DisplayName> </Owner> <StorageClass>STANDARD</StorageClass> </Contents> </ListBucketResult>
|
54
|
+
EOObjects
|
55
|
+
|
56
|
+
@response_objects_list = Net::HTTPOK.new("1.1", "200", "OK")
|
57
|
+
@response_objects_list.stubs(:body).returns(@response_objects_list_body)
|
58
|
+
|
59
|
+
@response_objects_list_body_prefix = <<-EOObjectsPrefix
|
60
|
+
<?xml version="1.0" encoding="UTF-8"?>\n<ListBucketResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/"> <Name>bucket</Name> <Prefix>prefix</Prefix> <Marker></Marker> <MaxKeys>1000</MaxKeys> <IsTruncated>false</IsTruncated> <Contents> <Key>prefix/</Key> <LastModified>2009-07-03T10:17:33.000Z</LastModified> <ETag>"99519cdf14c255e580e1b7bca85a458c"</ETag> <Size>1729</Size> <Owner> <ID>df864aeb6f42be43f1d9e60aaabe3f15e245b035a4b79d1cfe36c4deaec67205</ID> <DisplayName>owner</DisplayName> </Owner> <StorageClass>STANDARD</StorageClass> </Contents> <Contents> <Key>prefix/obj3</Key> <LastModified>2009-07-03T10:17:33.000Z</LastModified> <ETag>"99519cdf14c255e580e1b7bca85a458c"</ETag> <Size>1729</Size> <Owner> <ID>df864aeb6f42be43f1d9e60aaabe3f15e245b035a4b79d1cfe36c4deaec67205</ID> <DisplayName>owner</DisplayName> </Owner> <StorageClass>STANDARD</StorageClass> </Contents> </ListBucketResult>
|
61
|
+
EOObjectsPrefix
|
62
|
+
|
63
|
+
|
64
|
+
@response_objects_list_prefix = Net::HTTPOK.new("1.1", "200", "OK")
|
65
|
+
@response_objects_list_prefix.stubs(:body).returns(@response_objects_list_body_prefix)
|
66
|
+
end
|
67
|
+
|
68
|
+
test "name valid" do
|
69
|
+
assert_raise ArgumentError do S3::Bucket.send(:new, nil, "") end # should not be valid with empty name
|
70
|
+
assert_raise ArgumentError do S3::Bucket.send(:new, nil, "10.0.0.1") end # should not be valid with IP as name
|
71
|
+
assert_raise ArgumentError do S3::Bucket.send(:new, nil, "as") end # should not be valid with name shorter than 3 characters
|
72
|
+
assert_raise ArgumentError do S3::Bucket.send(:new, nil, "a" * 256) end # should not be valid with name longer than 255 characters
|
73
|
+
assert_raise ArgumentError do S3::Bucket.send(:new, nil, ".asdf") end # should not allow special characters as first character
|
74
|
+
assert_raise ArgumentError do S3::Bucket.send(:new, nil, "-asdf") end # should not allow special characters as first character
|
75
|
+
assert_raise ArgumentError do S3::Bucket.send(:new, nil, "_asdf") end # should not allow special characters as first character
|
76
|
+
|
77
|
+
assert_nothing_raised do
|
78
|
+
S3::Bucket.send(:new, nil, "a-a-")
|
79
|
+
S3::Bucket.send(:new, nil, "a.a.")
|
80
|
+
S3::Bucket.send(:new, nil, "a_a_")
|
81
|
+
end
|
82
|
+
end
|
83
|
+
|
84
|
+
test "path prefix" do
|
85
|
+
expected = ""
|
86
|
+
actual = @bucket_vhost.path_prefix
|
87
|
+
assert_equal expected, actual
|
88
|
+
|
89
|
+
expected = "Data_Bucket/"
|
90
|
+
actual = @bucket_path.path_prefix
|
91
|
+
assert_equal expected, actual
|
92
|
+
end
|
93
|
+
|
94
|
+
test "host" do
|
95
|
+
expected = "Data-Bucket.s3.amazonaws.com"
|
96
|
+
actual = @bucket_vhost.host
|
97
|
+
assert_equal expected, actual
|
98
|
+
|
99
|
+
expected = "s3.amazonaws.com"
|
100
|
+
actual = @bucket_path.host
|
101
|
+
assert_equal expected, actual
|
102
|
+
|
103
|
+
expected = "s3.amazonaws.com"
|
104
|
+
actual = @secure_bucket.host
|
105
|
+
assert_equal expected, actual
|
106
|
+
end
|
107
|
+
|
108
|
+
test "vhost" do
|
109
|
+
assert @bucket_vhost.vhost?
|
110
|
+
assert ! @bucket_path.vhost?
|
111
|
+
end
|
112
|
+
|
113
|
+
test "exists" do
|
114
|
+
@bucket.expects(:retrieve).returns(@bucket_vhost)
|
115
|
+
assert @bucket.exists?
|
116
|
+
|
117
|
+
@bucket.expects(:retrieve).raises(S3::Error::NoSuchBucket.new(nil, nil))
|
118
|
+
assert ! @bucket.exists?
|
119
|
+
end
|
120
|
+
|
121
|
+
test "location and parse location" do
|
122
|
+
@bucket.expects(:bucket_request).with(:get, { :params => { :location => nil } }).returns(@response_location)
|
123
|
+
|
124
|
+
expected = @bucket_location
|
125
|
+
actual = @bucket.location
|
126
|
+
assert_equal expected, actual
|
127
|
+
|
128
|
+
@bucket.stubs(:bucket_request).with(:get, { :params => { :location => nil } })
|
129
|
+
actual = @bucket.location
|
130
|
+
assert_equal expected, actual
|
131
|
+
end
|
132
|
+
|
133
|
+
test "save" do
|
134
|
+
@bucket.expects(:bucket_request).with(:put, { :headers => {} })
|
135
|
+
assert @bucket.save
|
136
|
+
# mock ensures that bucket_request was called
|
137
|
+
end
|
138
|
+
|
139
|
+
test "save failure owned by you" do
|
140
|
+
@bucket.expects(:bucket_request).with(:put, { :headers => {} }).raises(S3::Error::BucketAlreadyOwnedByYou.new(409, @response_owned_by_you))
|
141
|
+
assert_raise S3::Error::BucketAlreadyOwnedByYou do
|
142
|
+
@bucket.save
|
143
|
+
end
|
144
|
+
|
145
|
+
@bucket.expects(:bucket_request).with(:put, { :headers => {} }).raises(S3::Error::BucketAlreadyExists.new(409, @response_already_exists))
|
146
|
+
assert_raise S3::Error::BucketAlreadyExists do
|
147
|
+
@bucket.save
|
148
|
+
end
|
149
|
+
end
|
150
|
+
|
151
|
+
test "objects" do
|
152
|
+
@bucket.expects(:list_bucket).returns(@objects_list_empty)
|
153
|
+
expected = @objects_list_empty
|
154
|
+
actual = @bucket.objects
|
155
|
+
assert_equal expected, actual
|
156
|
+
|
157
|
+
@bucket.stubs(:list_bucket).returns(@objects_list_empty)
|
158
|
+
actual = @bucket.objects
|
159
|
+
assert_equal expected, actual
|
160
|
+
|
161
|
+
@bucket.stubs(:list_bucket).returns(@objects_list)
|
162
|
+
|
163
|
+
expected = @objects_list
|
164
|
+
actual = @bucket.objects
|
165
|
+
assert_equal expected, actual
|
166
|
+
|
167
|
+
@bucket.stubs(:list_bucket).with(:prefix=>'prefix').returns(@objects_list_prefix)
|
168
|
+
expected = @objects_list_prefix
|
169
|
+
actual = @bucket.objects(:prefix => 'prefix')
|
170
|
+
assert_equal expected, actual
|
171
|
+
end
|
172
|
+
|
173
|
+
test "list bucket and parse objects" do
|
174
|
+
@bucket.expects(:bucket_request).with(:get, :params => { :test=>true }).returns(@response_objects_list_empty)
|
175
|
+
expected = @objects_list_empty
|
176
|
+
actual = @bucket.objects.find_all(:test => true)
|
177
|
+
assert_equal expected, actual
|
178
|
+
|
179
|
+
@bucket.expects(:bucket_request).with(:get, :params => { :test => true }).returns(@response_objects_list)
|
180
|
+
expected = @objects_list
|
181
|
+
actual = @bucket.objects.find_all(:test => true)
|
182
|
+
assert_equal expected, actual
|
183
|
+
|
184
|
+
@bucket.expects(:bucket_request).with(:get, :params => { :test => true }).returns(@response_objects_list_prefix)
|
185
|
+
expected = @objects_list_prefix
|
186
|
+
actual = @bucket.objects(:prefix => "prefix").find_all(:test => true)
|
187
|
+
assert_equal expected, actual
|
188
|
+
end
|
189
|
+
|
190
|
+
test "destroy" do
|
191
|
+
@bucket.expects(:bucket_request).with(:delete)
|
192
|
+
assert @bucket.destroy
|
193
|
+
end
|
194
|
+
|
195
|
+
test "objects build" do
|
196
|
+
@bucket.stubs(:bucket_request)
|
197
|
+
|
198
|
+
expected = "object_name"
|
199
|
+
actual = @bucket.objects.build("object_name")
|
200
|
+
assert_kind_of S3::Object, actual
|
201
|
+
assert_equal expected, actual.key
|
202
|
+
end
|
203
|
+
|
204
|
+
test "objects find first" do
|
205
|
+
assert_nothing_raised do
|
206
|
+
S3::Object.any_instance.stubs(:retrieve).returns(S3::Object.send(:new, nil, :key => "obj2"))
|
207
|
+
expected = "obj2"
|
208
|
+
actual = @bucket.objects.find_first("obj2")
|
209
|
+
assert_equal "obj2", actual.key
|
210
|
+
end
|
211
|
+
end
|
212
|
+
|
213
|
+
test "objects find first fail" do
|
214
|
+
assert_raise S3::Error::NoSuchKey do
|
215
|
+
S3::Object.any_instance.stubs(:retrieve).raises(S3::Error::NoSuchKey.new(404, nil))
|
216
|
+
@bucket.objects.find_first("obj3")
|
217
|
+
end
|
218
|
+
end
|
219
|
+
|
220
|
+
test "objects find all on empty list" do
|
221
|
+
@bucket.stubs(:list_bucket).returns(@objects_list_empty)
|
222
|
+
assert_nothing_raised do
|
223
|
+
expected = @objects_list_empty
|
224
|
+
actual = @bucket.objects.find_all
|
225
|
+
assert_equal expected, actual
|
226
|
+
end
|
227
|
+
end
|
228
|
+
|
229
|
+
test "objects find all" do
|
230
|
+
@bucket.stubs(:list_bucket).returns(@objects_list)
|
231
|
+
assert_nothing_raised do
|
232
|
+
expected = @objects_list
|
233
|
+
actual = @bucket.objects.find_all
|
234
|
+
assert_equal expected, actual
|
235
|
+
end
|
236
|
+
end
|
237
|
+
|
238
|
+
test "objects destroy all" do
|
239
|
+
@bucket.stubs(:list_bucket).returns(@objects_list)
|
240
|
+
@bucket.objects.each do |obj|
|
241
|
+
obj.expects(:destroy)
|
242
|
+
end
|
243
|
+
@bucket.objects.destroy_all
|
244
|
+
end
|
245
|
+
end
|
@@ -0,0 +1,215 @@
|
|
1
|
+
require "test_helper"
|
2
|
+
|
3
|
+
class ConnectionTest < Test::Unit::TestCase
|
4
|
+
def setup
|
5
|
+
@connection = S3::Connection.new(
|
6
|
+
:access_key_id => "12345678901234567890",
|
7
|
+
:host => "192.168.1.248",
|
8
|
+
:secret_access_key => "qwertyuiopasdfghjklzxcvbnmQWERTYUIOPASDF"
|
9
|
+
)
|
10
|
+
@http_request = Net::HTTP.new("")
|
11
|
+
@response_ok = Net::HTTPOK.new("1.1", "200", "OK")
|
12
|
+
@response_not_found = Net::HTTPNotFound.new("1.1", "404", "Not Found")
|
13
|
+
@response_error = Net::HTTPInternalServerError.new("1.1", "500", "Internal Server Error")
|
14
|
+
@response_temporary_redirect = Net::HTTPInternalServerError.new("1.1", "307", "Temporary Redirect")
|
15
|
+
@connection.stubs(:http).returns(@http_request)
|
16
|
+
|
17
|
+
@http_request.stubs(:start).returns(@response_ok)
|
18
|
+
end
|
19
|
+
|
20
|
+
test "handle response not modify response when ok" do
|
21
|
+
assert_nothing_raised do
|
22
|
+
response = @connection.request(
|
23
|
+
:get,
|
24
|
+
:host => "s3.amazonaws.com",
|
25
|
+
:path => "/"
|
26
|
+
)
|
27
|
+
assert_equal @response_ok, response
|
28
|
+
end
|
29
|
+
end
|
30
|
+
|
31
|
+
test "handle response throws exception when error" do
|
32
|
+
response_body = <<-EOFakeBody
|
33
|
+
<?xml version=\"1.0\" encoding=\"UTF-8\"?>
|
34
|
+
<Error>
|
35
|
+
<Code>NoSuchBucket</Code>
|
36
|
+
<Message>The specified bucket does not exist</Message>
|
37
|
+
</Error>
|
38
|
+
EOFakeBody
|
39
|
+
|
40
|
+
@http_request.stubs(:start).returns(@response_not_found)
|
41
|
+
@response_not_found.stubs(:body).returns(response_body)
|
42
|
+
|
43
|
+
assert_raise S3::Error::NoSuchBucket do
|
44
|
+
response = @connection.request(
|
45
|
+
:get,
|
46
|
+
:host => "data.example.com.s3.amazonaws.com",
|
47
|
+
:path => "/"
|
48
|
+
)
|
49
|
+
end
|
50
|
+
end
|
51
|
+
|
52
|
+
test "handle response throws standard exception when error" do
|
53
|
+
@http_request.stubs(:start).returns(@response_error)
|
54
|
+
@response_error.stubs(:body)
|
55
|
+
assert_raise S3::Error::ResponseError do
|
56
|
+
response = @connection.request(
|
57
|
+
:get,
|
58
|
+
:host => "data.example.com.s3.amazonaws.com",
|
59
|
+
:path => "/"
|
60
|
+
)
|
61
|
+
end
|
62
|
+
|
63
|
+
@response_error.stubs(:body).returns("")
|
64
|
+
assert_raise S3::Error::ResponseError do
|
65
|
+
response = @connection.request(
|
66
|
+
:get,
|
67
|
+
:host => "data.example.com.s3.amazonaws.com",
|
68
|
+
:path => "/"
|
69
|
+
)
|
70
|
+
end
|
71
|
+
end
|
72
|
+
|
73
|
+
test "parse params empty" do
|
74
|
+
expected = ""
|
75
|
+
actual = S3::Connection.parse_params({})
|
76
|
+
assert_equal expected, actual
|
77
|
+
end
|
78
|
+
|
79
|
+
test "parse params only interesting params" do
|
80
|
+
expected = ""
|
81
|
+
actual = S3::Connection.parse_params(:param1 => "1", :maxkeys => "2")
|
82
|
+
assert_equal expected, actual
|
83
|
+
end
|
84
|
+
|
85
|
+
test "parse params remove underscore" do
|
86
|
+
expected = "max-keys=100"
|
87
|
+
actual = S3::Connection.parse_params(:max_keys => 100)
|
88
|
+
assert_equal expected, actual
|
89
|
+
end
|
90
|
+
|
91
|
+
test "parse params with and without values" do
|
92
|
+
params = S3::Connection.parse_params(:max_keys => 100, :prefix => nil)
|
93
|
+
|
94
|
+
splitted_params = params.split("&")
|
95
|
+
assert_equal 2, splitted_params.length
|
96
|
+
assert splitted_params.include?("max-keys=100")
|
97
|
+
assert splitted_params.include?("prefix")
|
98
|
+
end
|
99
|
+
|
100
|
+
test "headers empty" do
|
101
|
+
expected = {}
|
102
|
+
actual = S3::Connection.parse_headers({})
|
103
|
+
assert_equal expected, actual
|
104
|
+
end
|
105
|
+
|
106
|
+
test "parse only interesting headers" do
|
107
|
+
expected = {}
|
108
|
+
actual = S3::Connection.parse_headers(
|
109
|
+
:accept => "text/*, text/html, text/html;level=1, */*",
|
110
|
+
:accept_charset => "iso-8859-2, unicode-1-1;q=0.8"
|
111
|
+
)
|
112
|
+
assert_equal expected, actual
|
113
|
+
end
|
114
|
+
|
115
|
+
test "parse headers remove underscore" do
|
116
|
+
expected = {
|
117
|
+
"content-type" => nil,
|
118
|
+
"x-amz-acl" => nil,
|
119
|
+
"x-amz-storage-class" => nil,
|
120
|
+
"if-modified-since" => nil,
|
121
|
+
"if-unmodified-since" => nil,
|
122
|
+
"if-match" => nil,
|
123
|
+
"if-none-match" => nil,
|
124
|
+
"content-disposition" => nil,
|
125
|
+
"content-encoding" => nil
|
126
|
+
}
|
127
|
+
actual = S3::Connection.parse_headers(
|
128
|
+
:content_type => nil,
|
129
|
+
:x_amz_acl => nil,
|
130
|
+
:x_amz_storage_class => nil,
|
131
|
+
:if_modified_since => nil,
|
132
|
+
:if_unmodified_since => nil,
|
133
|
+
:if_match => nil,
|
134
|
+
:if_none_match => nil,
|
135
|
+
:content_disposition => nil,
|
136
|
+
:content_encoding => nil
|
137
|
+
)
|
138
|
+
assert_equal expected, actual
|
139
|
+
end
|
140
|
+
|
141
|
+
test "parse headers with values" do
|
142
|
+
expected = {
|
143
|
+
"content-type" => "text/html",
|
144
|
+
"x-amz-acl" => "public-read",
|
145
|
+
"x-amz-storage-class" => "STANDARD",
|
146
|
+
"if-modified-since" => "today",
|
147
|
+
"if-unmodified-since" => "tomorrow",
|
148
|
+
"if-match" => "1234",
|
149
|
+
"if-none-match" => "1243",
|
150
|
+
"content-disposition" => "inline",
|
151
|
+
"content-encoding" => "gzip"
|
152
|
+
}
|
153
|
+
actual = S3::Connection.parse_headers(
|
154
|
+
:content_type => "text/html",
|
155
|
+
:x_amz_acl => "public-read",
|
156
|
+
:x_amz_storage_class => "STANDARD",
|
157
|
+
:if_modified_since => "today",
|
158
|
+
:if_unmodified_since => "tomorrow",
|
159
|
+
:if_match => "1234",
|
160
|
+
:if_none_match => "1243",
|
161
|
+
:content_disposition => "inline",
|
162
|
+
:content_encoding => "gzip"
|
163
|
+
)
|
164
|
+
assert_equal expected, actual
|
165
|
+
end
|
166
|
+
|
167
|
+
test "parse headers with range" do
|
168
|
+
expected = {
|
169
|
+
"range" => "bytes=0-100"
|
170
|
+
}
|
171
|
+
actual = S3::Connection.parse_headers(
|
172
|
+
:range => 0..100
|
173
|
+
)
|
174
|
+
assert_equal expected, actual
|
175
|
+
end
|
176
|
+
|
177
|
+
test "response.body is nil on TemporaryRedirect" do
|
178
|
+
@http_request.stubs(:start).returns(@response_temporary_redirect)
|
179
|
+
@response_temporary_redirect.stubs(:body).returns(nil)
|
180
|
+
|
181
|
+
assert_nothing_raised do
|
182
|
+
response = @connection.request(
|
183
|
+
:get,
|
184
|
+
:host => "data.example.com.s3.amazonaws.com",
|
185
|
+
:path => "/"
|
186
|
+
)
|
187
|
+
assert_equal nil, response
|
188
|
+
end
|
189
|
+
end
|
190
|
+
|
191
|
+
test "response body with new host on TemporaryRedirect" do
|
192
|
+
response_body = <<-EOFakeBody
|
193
|
+
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>
|
194
|
+
<Error>
|
195
|
+
<Code>TemporaryRedirect</Code>
|
196
|
+
<Message>Please re-send this request to the specified temporary endpoint. Continue to use the original request endpoint for future requests.</Message>
|
197
|
+
<RequestId>24A0BB91158D470B</RequestId>
|
198
|
+
<Bucket>data.example.com</Bucket>
|
199
|
+
<HostId>DFcq9ktw5HvWZLduutz8fnVzqtXLwIZcAezc7mgyS7lJ2ux+RChY4qAJGa2fQDjV</HostId>
|
200
|
+
<Endpoint>data.example.com.s3-external-3.amazonaws.com</Endpoint>
|
201
|
+
</Error>"
|
202
|
+
EOFakeBody
|
203
|
+
|
204
|
+
@response_temporary_redirect.stubs(:body).returns(response_body)
|
205
|
+
|
206
|
+
assert_nothing_raised do
|
207
|
+
response = @connection.request(
|
208
|
+
:get,
|
209
|
+
:host => "data.example.com.s3.amazonaws.com",
|
210
|
+
:path => "/"
|
211
|
+
)
|
212
|
+
assert_equal @response_ok, response
|
213
|
+
end
|
214
|
+
end
|
215
|
+
end
|