s3-rb 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/Gemfile +9 -0
- data/Gemfile.lock +132 -0
- data/README.md +215 -0
- data/Rakefile +11 -0
- data/lib/s3/bucket_create_request.rb +26 -0
- data/lib/s3/bucket_delete_request.rb +11 -0
- data/lib/s3/bucket_head_request.rb +19 -0
- data/lib/s3/bucket_head_result.rb +8 -0
- data/lib/s3/bucket_list_request.rb +31 -0
- data/lib/s3/bucket_list_result.rb +31 -0
- data/lib/s3/bucket_methods.rb +45 -0
- data/lib/s3/error_result.rb +51 -0
- data/lib/s3/errors.rb +71 -0
- data/lib/s3/helpers.rb +95 -0
- data/lib/s3/module_methods.rb +37 -0
- data/lib/s3/multipart_abort_request.rb +14 -0
- data/lib/s3/multipart_complete_options.rb +9 -0
- data/lib/s3/multipart_complete_request.rb +38 -0
- data/lib/s3/multipart_complete_result.rb +11 -0
- data/lib/s3/multipart_create_request.rb +33 -0
- data/lib/s3/multipart_create_result.rb +10 -0
- data/lib/s3/multipart_list_request.rb +41 -0
- data/lib/s3/multipart_list_result.rb +32 -0
- data/lib/s3/multipart_methods.rb +87 -0
- data/lib/s3/multipart_parts_request.rb +40 -0
- data/lib/s3/multipart_parts_result.rb +33 -0
- data/lib/s3/multipart_upload_request.rb +25 -0
- data/lib/s3/multipart_upload_result.rb +9 -0
- data/lib/s3/object_copy_options.rb +24 -0
- data/lib/s3/object_copy_request.rb +37 -0
- data/lib/s3/object_copy_result.rb +9 -0
- data/lib/s3/object_delete_batch_request.rb +68 -0
- data/lib/s3/object_delete_batch_result.rb +47 -0
- data/lib/s3/object_delete_request.rb +18 -0
- data/lib/s3/object_get_request.rb +23 -0
- data/lib/s3/object_head_request.rb +35 -0
- data/lib/s3/object_head_result.rb +14 -0
- data/lib/s3/object_list_request.rb +47 -0
- data/lib/s3/object_list_result.rb +40 -0
- data/lib/s3/object_methods.rb +134 -0
- data/lib/s3/object_put_options.rb +26 -0
- data/lib/s3/object_put_request.rb +36 -0
- data/lib/s3/object_put_result.rb +9 -0
- data/lib/s3/presign_get_request.rb +64 -0
- data/lib/s3/presign_methods.rb +24 -0
- data/lib/s3/presign_put_request.rb +64 -0
- data/lib/s3/request.rb +167 -0
- data/lib/s3/response_methods.rb +13 -0
- data/lib/s3/schema_options.rb +19 -0
- data/lib/s3/service.rb +65 -0
- data/lib/s3/version.rb +3 -0
- data/lib/s3-rb.rb +1 -0
- data/lib/s3.rb +70 -0
- data/readme/buckets.md +115 -0
- data/readme/multipart.md +277 -0
- data/readme/objects.md +270 -0
- data/readme/presign.md +150 -0
- data/readme/testing.md +203 -0
- metadata +245 -0
checksums.yaml
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
---
|
|
2
|
+
SHA256:
|
|
3
|
+
metadata.gz: 8da7974f1a862a8b59312aa1b2a14e62acb50e0c395a59cb0e9163c81ca6c1d9
|
|
4
|
+
data.tar.gz: 271348a115ab248f843912064ce39ec87466165706304cbbfda8919f47e3d00b
|
|
5
|
+
SHA512:
|
|
6
|
+
metadata.gz: 89d5da5bdae4ba11bf4fd3de8145d2f992a012b4d28f2cba73c9752af4d220f788fcefb8e45fe73aea16eb31ee3d6286fd989f83ba1768cb60e4950a9063ee8c
|
|
7
|
+
data.tar.gz: a57ec119c7b94cc518a10869e95f58937df0ef553cf2d7e0aafab14341980c96fcb54e5bcf0885566b0d3cb268ecd24b9cfc9cbb793147311177ca0e4a1047b5
|
data/Gemfile
ADDED
data/Gemfile.lock
ADDED
|
@@ -0,0 +1,132 @@
|
|
|
1
|
+
PATH
|
|
2
|
+
remote: .
|
|
3
|
+
specs:
|
|
4
|
+
s3-rb (0.1.0)
|
|
5
|
+
dynamicschema (~> 2.0)
|
|
6
|
+
faraday (~> 2.0)
|
|
7
|
+
faraday-net_http_persistent (~> 2.0)
|
|
8
|
+
nokogiri (~> 1.0)
|
|
9
|
+
|
|
10
|
+
GEM
|
|
11
|
+
remote: https://rubygems.org/
|
|
12
|
+
specs:
|
|
13
|
+
addressable (2.8.8)
|
|
14
|
+
public_suffix (>= 2.0.2, < 8.0)
|
|
15
|
+
aws-eventstream (1.4.0)
|
|
16
|
+
aws-partitions (1.1211.0)
|
|
17
|
+
aws-sdk-core (3.241.4)
|
|
18
|
+
aws-eventstream (~> 1, >= 1.3.0)
|
|
19
|
+
aws-partitions (~> 1, >= 1.992.0)
|
|
20
|
+
aws-sigv4 (~> 1.9)
|
|
21
|
+
base64
|
|
22
|
+
bigdecimal
|
|
23
|
+
jmespath (~> 1, >= 1.6.1)
|
|
24
|
+
logger
|
|
25
|
+
aws-sdk-kms (1.121.0)
|
|
26
|
+
aws-sdk-core (~> 3, >= 3.241.4)
|
|
27
|
+
aws-sigv4 (~> 1.5)
|
|
28
|
+
aws-sdk-s3 (1.213.0)
|
|
29
|
+
aws-sdk-core (~> 3, >= 3.241.4)
|
|
30
|
+
aws-sdk-kms (~> 1)
|
|
31
|
+
aws-sigv4 (~> 1.5)
|
|
32
|
+
aws-sigv4 (1.12.1)
|
|
33
|
+
aws-eventstream (~> 1, >= 1.0.2)
|
|
34
|
+
base64 (0.3.0)
|
|
35
|
+
benchmark-ips (2.14.0)
|
|
36
|
+
bigdecimal (4.0.1)
|
|
37
|
+
connection_pool (3.0.2)
|
|
38
|
+
crack (1.0.1)
|
|
39
|
+
bigdecimal
|
|
40
|
+
rexml
|
|
41
|
+
date (3.5.1)
|
|
42
|
+
debug (1.11.1)
|
|
43
|
+
irb (~> 1.10)
|
|
44
|
+
reline (>= 0.3.8)
|
|
45
|
+
dynamicschema (2.2.0)
|
|
46
|
+
erb (6.0.1)
|
|
47
|
+
faraday (2.14.0)
|
|
48
|
+
faraday-net_http (>= 2.0, < 3.5)
|
|
49
|
+
json
|
|
50
|
+
logger
|
|
51
|
+
faraday-net_http (3.4.2)
|
|
52
|
+
net-http (~> 0.5)
|
|
53
|
+
faraday-net_http_persistent (2.3.1)
|
|
54
|
+
faraday (~> 2.5)
|
|
55
|
+
net-http-persistent (>= 4.0.4, < 5)
|
|
56
|
+
hashdiff (1.2.1)
|
|
57
|
+
io-console (0.8.2)
|
|
58
|
+
irb (1.16.0)
|
|
59
|
+
pp (>= 0.6.0)
|
|
60
|
+
rdoc (>= 4.0.0)
|
|
61
|
+
reline (>= 0.4.2)
|
|
62
|
+
jmespath (1.6.2)
|
|
63
|
+
json (2.18.0)
|
|
64
|
+
logger (1.7.0)
|
|
65
|
+
minitest (5.27.0)
|
|
66
|
+
net-http (0.9.1)
|
|
67
|
+
uri (>= 0.11.1)
|
|
68
|
+
net-http-persistent (4.0.8)
|
|
69
|
+
connection_pool (>= 2.2.4, < 4)
|
|
70
|
+
nokogiri (1.19.0-aarch64-linux-gnu)
|
|
71
|
+
racc (~> 1.4)
|
|
72
|
+
nokogiri (1.19.0-aarch64-linux-musl)
|
|
73
|
+
racc (~> 1.4)
|
|
74
|
+
nokogiri (1.19.0-arm-linux-gnu)
|
|
75
|
+
racc (~> 1.4)
|
|
76
|
+
nokogiri (1.19.0-arm-linux-musl)
|
|
77
|
+
racc (~> 1.4)
|
|
78
|
+
nokogiri (1.19.0-arm64-darwin)
|
|
79
|
+
racc (~> 1.4)
|
|
80
|
+
nokogiri (1.19.0-x86_64-darwin)
|
|
81
|
+
racc (~> 1.4)
|
|
82
|
+
nokogiri (1.19.0-x86_64-linux-gnu)
|
|
83
|
+
racc (~> 1.4)
|
|
84
|
+
nokogiri (1.19.0-x86_64-linux-musl)
|
|
85
|
+
racc (~> 1.4)
|
|
86
|
+
pp (0.6.3)
|
|
87
|
+
prettyprint
|
|
88
|
+
prettyprint (0.2.0)
|
|
89
|
+
psych (5.3.1)
|
|
90
|
+
date
|
|
91
|
+
stringio
|
|
92
|
+
public_suffix (7.0.2)
|
|
93
|
+
racc (1.8.1)
|
|
94
|
+
rake (13.3.1)
|
|
95
|
+
rdoc (7.1.0)
|
|
96
|
+
erb
|
|
97
|
+
psych (>= 4.0.0)
|
|
98
|
+
tsort
|
|
99
|
+
reline (0.6.3)
|
|
100
|
+
io-console (~> 0.5)
|
|
101
|
+
rexml (3.4.4)
|
|
102
|
+
stringio (3.2.0)
|
|
103
|
+
tsort (0.2.0)
|
|
104
|
+
uri (1.1.1)
|
|
105
|
+
vcr (6.4.0)
|
|
106
|
+
webmock (3.26.1)
|
|
107
|
+
addressable (>= 2.8.0)
|
|
108
|
+
crack (>= 0.3.2)
|
|
109
|
+
hashdiff (>= 0.4.0, < 2.0.0)
|
|
110
|
+
|
|
111
|
+
PLATFORMS
|
|
112
|
+
aarch64-linux-gnu
|
|
113
|
+
aarch64-linux-musl
|
|
114
|
+
arm-linux-gnu
|
|
115
|
+
arm-linux-musl
|
|
116
|
+
arm64-darwin
|
|
117
|
+
x86_64-darwin
|
|
118
|
+
x86_64-linux-gnu
|
|
119
|
+
x86_64-linux-musl
|
|
120
|
+
|
|
121
|
+
DEPENDENCIES
|
|
122
|
+
aws-sdk-s3 (~> 1.0)
|
|
123
|
+
benchmark-ips (~> 2.0)
|
|
124
|
+
debug (~> 1.0)
|
|
125
|
+
minitest (~> 5.0)
|
|
126
|
+
rake (~> 13.0)
|
|
127
|
+
s3-rb!
|
|
128
|
+
vcr (~> 6.0)
|
|
129
|
+
webmock (~> 3.0)
|
|
130
|
+
|
|
131
|
+
BUNDLED WITH
|
|
132
|
+
2.7.1
|
data/README.md
ADDED
|
@@ -0,0 +1,215 @@
|
|
|
1
|
+
# Yet another S3 gem
|
|
2
|
+
|
|
3
|
+
This is a lightweight, low dependency, high performance, high compatibility, 'bare metal' S3 API gem. I created it primarily because I wanted to avoid including Amazon's AWS API gem and all its various dependencies. The current implementation depends on Faraday, the Faraday 'net-http-persistent' adapter, Nokogiri, and the DynamicSchema gem. Although other S3 gems exist for Ruby I found that most of these have had no updates for many years and lacked compatibility with various alternate S3 providers.
|
|
4
|
+
|
|
5
|
+
I chose to make this implementation 'bare metal' in that there is minimal abstraction layer atop the S3 API - not even an iterator for the object list. You are then free to build your own abstraction best suited to the semantics of your application or gem.
|
|
6
|
+
|
|
7
|
+
At its most basic the implementation provides a request class per operation which returns a result structure. In some cases, for operations with many parameters, there is an options structure you can build. The operation is executed by 'submitting' an instance of the request.
|
|
8
|
+
|
|
9
|
+
```ruby
|
|
10
|
+
require 's3'
|
|
11
|
+
|
|
12
|
+
# build the options for the put operation
|
|
13
|
+
options = S3::ObjectPutOptions.build( content_type: 'text/plain',
|
|
14
|
+
acl: :public_read,
|
|
15
|
+
storage_class: :standard_ia )
|
|
16
|
+
|
|
17
|
+
# create the request and submit it
|
|
18
|
+
request = S3::ObjectPutRequest.new( access_key_id: 'AKIA...',
|
|
19
|
+
secret_access_key: '...',
|
|
20
|
+
region: 'us-east-1' )
|
|
21
|
+
|
|
22
|
+
response = request.submit( bucket: 'my-bucket',
|
|
23
|
+
key: 'hello.txt',
|
|
24
|
+
body: 'Hello, World!',
|
|
25
|
+
options: options )
|
|
26
|
+
|
|
27
|
+
# check for success and read the result
|
|
28
|
+
if response.success?
|
|
29
|
+
result = response.result
|
|
30
|
+
puts result.etag
|
|
31
|
+
else
|
|
32
|
+
result = response.result
|
|
33
|
+
puts "error: #{ result.error_code } - #{ result.error_description }"
|
|
34
|
+
end
|
|
35
|
+
```
|
|
36
|
+
|
|
37
|
+
The operation methods - similar to those in the AWS gem - then create a matching request object with the given arguments and submit a request, typically returning the same result structure.
|
|
38
|
+
|
|
39
|
+
```ruby
|
|
40
|
+
require 's3'
|
|
41
|
+
|
|
42
|
+
# create the service
|
|
43
|
+
s3 = S3::Service.new( access_key_id: 'AKIA...',
|
|
44
|
+
secret_access_key: '...',
|
|
45
|
+
region: 'us-east-1' )
|
|
46
|
+
|
|
47
|
+
# call the method directly; it raises an exception on error
|
|
48
|
+
result = s3.object_put( bucket: 'my-bucket',
|
|
49
|
+
key: 'hello.txt',
|
|
50
|
+
body: 'Hello, World!',
|
|
51
|
+
content_type: 'text/plain',
|
|
52
|
+
acl: :public_read,
|
|
53
|
+
storage_class: :standard_ia )
|
|
54
|
+
|
|
55
|
+
puts result.etag
|
|
56
|
+
```
|
|
57
|
+
|
|
58
|
+
## Installation
|
|
59
|
+
|
|
60
|
+
Add to your Gemfile:
|
|
61
|
+
|
|
62
|
+
```ruby
|
|
63
|
+
gem 's3-rb'
|
|
64
|
+
```
|
|
65
|
+
|
|
66
|
+
Or install directly:
|
|
67
|
+
|
|
68
|
+
```bash
|
|
69
|
+
gem install s3-rb
|
|
70
|
+
```
|
|
71
|
+
|
|
72
|
+
## Quick Start
|
|
73
|
+
|
|
74
|
+
```ruby
|
|
75
|
+
require 's3'
|
|
76
|
+
|
|
77
|
+
# create the service for AWS
|
|
78
|
+
s3 = S3::Service.new( access_key_id: 'AKIA...',
|
|
79
|
+
secret_access_key: '...',
|
|
80
|
+
region: 'us-east-1' )
|
|
81
|
+
|
|
82
|
+
# create a service for S3-compatible providers by specifying the endpoint
|
|
83
|
+
s3 = S3::Service.new( access_key_id: '...',
|
|
84
|
+
secret_access_key: '...',
|
|
85
|
+
endpoint: 'https://s3.us-east-005.dream.io',
|
|
86
|
+
region: 'us-east-005' )
|
|
87
|
+
|
|
88
|
+
# enable connection pooling for better performance
|
|
89
|
+
s3 = S3::Service.new( access_key_id: '...',
|
|
90
|
+
secret_access_key: '...',
|
|
91
|
+
region: 'us-east-1',
|
|
92
|
+
connection_pool: 5 )
|
|
93
|
+
|
|
94
|
+
# upload an object
|
|
95
|
+
s3.object_put( bucket: 'my-bucket',
|
|
96
|
+
key: 'hello.txt',
|
|
97
|
+
body: 'Hello, World!',
|
|
98
|
+
content_type: 'text/plain' )
|
|
99
|
+
|
|
100
|
+
# download an object
|
|
101
|
+
content = s3.object_get( bucket: 'my-bucket', key: 'hello.txt' )
|
|
102
|
+
|
|
103
|
+
# list objects
|
|
104
|
+
result = s3.object_list( bucket: 'my-bucket', prefix: 'documents/' )
|
|
105
|
+
result.each { | obj | puts "#{ obj.key } - #{ obj.size } bytes" }
|
|
106
|
+
|
|
107
|
+
# handle pagination explicitly
|
|
108
|
+
result = s3.object_list( bucket: 'my-bucket', max_keys: 100 )
|
|
109
|
+
while result.truncated?
|
|
110
|
+
result = s3.object_list( bucket: 'my-bucket',
|
|
111
|
+
max_keys: 100,
|
|
112
|
+
continuation_token: result.next_continuation_token )
|
|
113
|
+
result.each { | obj | process( obj ) }
|
|
114
|
+
end
|
|
115
|
+
```
|
|
116
|
+
|
|
117
|
+
## Error Handling
|
|
118
|
+
|
|
119
|
+
All S3 errors are raised as exceptions:
|
|
120
|
+
|
|
121
|
+
```ruby
|
|
122
|
+
begin
|
|
123
|
+
s3.object_get( bucket: 'my-bucket', key: 'missing.txt' )
|
|
124
|
+
rescue S3::NoSuchKeyError => e
|
|
125
|
+
puts "object not found: #{ e.message }"
|
|
126
|
+
rescue S3::AccessDeniedError => e
|
|
127
|
+
puts "access denied: #{ e.message }"
|
|
128
|
+
rescue S3::Error => e
|
|
129
|
+
puts "s3 error: #{ e.code } - #{ e.message }"
|
|
130
|
+
end
|
|
131
|
+
```
|
|
132
|
+
|
|
133
|
+
## Streaming
|
|
134
|
+
|
|
135
|
+
For large files, use streaming to avoid loading everything into memory:
|
|
136
|
+
|
|
137
|
+
```ruby
|
|
138
|
+
# streaming upload from a file
|
|
139
|
+
File.open( 'large-file.zip', 'rb' ) do | file |
|
|
140
|
+
s3.object_put( bucket: 'my-bucket',
|
|
141
|
+
key: 'large-file.zip',
|
|
142
|
+
body: file,
|
|
143
|
+
content_type: 'application/zip' )
|
|
144
|
+
end
|
|
145
|
+
|
|
146
|
+
# streaming download to a file
|
|
147
|
+
File.open( 'download.zip', 'wb' ) do | file |
|
|
148
|
+
s3.object_get( bucket: 'my-bucket', key: 'large-file.zip' ) do | chunk |
|
|
149
|
+
file.write( chunk )
|
|
150
|
+
end
|
|
151
|
+
end
|
|
152
|
+
```
|
|
153
|
+
|
|
154
|
+
## Storage Classes and ACLs
|
|
155
|
+
|
|
156
|
+
Pass symbols or lowercase strings - they're automatically normalized:
|
|
157
|
+
|
|
158
|
+
```ruby
|
|
159
|
+
# these are equivalent
|
|
160
|
+
s3.object_put( bucket: 'b', key: 'k', body: 'x', storage_class: :standard_ia )
|
|
161
|
+
s3.object_put( bucket: 'b', key: 'k', body: 'x', storage_class: 'standard_ia' )
|
|
162
|
+
s3.object_put( bucket: 'b', key: 'k', body: 'x', storage_class: 'STANDARD_IA' )
|
|
163
|
+
|
|
164
|
+
# same for ACLs
|
|
165
|
+
s3.object_put( bucket: 'b', key: 'k', body: 'x', acl: :public_read )
|
|
166
|
+
s3.object_put( bucket: 'b', key: 'k', body: 'x', acl: 'public-read' )
|
|
167
|
+
```
|
|
168
|
+
|
|
169
|
+
## API Reference
|
|
170
|
+
|
|
171
|
+
### Bucket Operations
|
|
172
|
+
|
|
173
|
+
| Method | Description |
|
|
174
|
+
|--------|-------------|
|
|
175
|
+
| `bucket_list` | List all buckets |
|
|
176
|
+
| `bucket_create( bucket:, region:, acl: )` | Create a bucket |
|
|
177
|
+
| `bucket_delete( bucket: )` | Delete a bucket |
|
|
178
|
+
| `bucket_head( bucket: )` | Check bucket existence, get region |
|
|
179
|
+
| `bucket_exists?( bucket: )` | Returns true/false |
|
|
180
|
+
|
|
181
|
+
### Object Operations
|
|
182
|
+
|
|
183
|
+
| Method | Description |
|
|
184
|
+
|--------|-------------|
|
|
185
|
+
| `object_list( bucket:, prefix:, ... )` | List objects |
|
|
186
|
+
| `object_get( bucket:, key:, &block )` | Download object |
|
|
187
|
+
| `object_put( bucket:, key:, body:, ... )` | Upload object |
|
|
188
|
+
| `object_delete( bucket:, key: )` | Delete object |
|
|
189
|
+
| `object_delete_batch( bucket:, keys: )` | Delete multiple objects |
|
|
190
|
+
| `object_head( bucket:, key: )` | Get object metadata |
|
|
191
|
+
| `object_exists?( bucket:, key: )` | Returns true/false |
|
|
192
|
+
| `object_copy( source_bucket:, source_key:, bucket:, key:, ... )` | Copy object |
|
|
193
|
+
| `object_metadata_set( bucket:, key:, metadata: )` | Update metadata |
|
|
194
|
+
|
|
195
|
+
### Multipart Operations
|
|
196
|
+
|
|
197
|
+
| Method | Description |
|
|
198
|
+
|--------|-------------|
|
|
199
|
+
| `multipart_create( bucket:, key:, ... )` | Initiate multipart upload |
|
|
200
|
+
| `multipart_upload( bucket:, key:, upload_id:, part_number:, body: )` | Upload a part |
|
|
201
|
+
| `multipart_complete( bucket:, key:, upload_id:, parts: )` | Complete upload |
|
|
202
|
+
| `multipart_abort( bucket:, key:, upload_id: )` | Abort upload |
|
|
203
|
+
| `multipart_list( bucket:, prefix: )` | List in-progress uploads |
|
|
204
|
+
| `multipart_parts( bucket:, key:, upload_id: )` | List uploaded parts |
|
|
205
|
+
|
|
206
|
+
### Presigned URLs
|
|
207
|
+
|
|
208
|
+
| Method | Description |
|
|
209
|
+
|--------|-------------|
|
|
210
|
+
| `presign_get( bucket:, key:, expires_in: )` | Generate download URL |
|
|
211
|
+
| `presign_put( bucket:, key:, expires_in:, content_type: )` | Generate upload URL |
|
|
212
|
+
|
|
213
|
+
## License
|
|
214
|
+
|
|
215
|
+
MIT License
|
data/Rakefile
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
module S3
|
|
2
|
+
class BucketCreateRequest < Request
|
|
3
|
+
def submit( bucket:, region: nil, acl: nil )
|
|
4
|
+
headers = {}
|
|
5
|
+
headers[ 'x-amz-acl' ] = acl if acl
|
|
6
|
+
|
|
7
|
+
body = nil
|
|
8
|
+
location = region || @region
|
|
9
|
+
|
|
10
|
+
if location && location != 'us-east-1'
|
|
11
|
+
body = <<~XML
|
|
12
|
+
<?xml version="1.0" encoding="UTF-8"?>
|
|
13
|
+
<CreateBucketConfiguration xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
|
|
14
|
+
<LocationConstraint>#{ location }</LocationConstraint>
|
|
15
|
+
</CreateBucketConfiguration>
|
|
16
|
+
XML
|
|
17
|
+
end
|
|
18
|
+
|
|
19
|
+
response = put( "/#{ bucket }", body: body, headers: headers )
|
|
20
|
+
|
|
21
|
+
build_result( response, TrueClass ) do
|
|
22
|
+
true
|
|
23
|
+
end
|
|
24
|
+
end
|
|
25
|
+
end
|
|
26
|
+
end
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
module S3
|
|
2
|
+
class BucketHeadRequest < Request
|
|
3
|
+
def submit( bucket: )
|
|
4
|
+
response = head( "/#{ bucket }" )
|
|
5
|
+
|
|
6
|
+
if response.status == 404
|
|
7
|
+
ResponseMethods.install( response, nil )
|
|
8
|
+
else
|
|
9
|
+
build_result( response, BucketHeadResult ) do
|
|
10
|
+
BucketHeadResult.new( parse_response( response ) )
|
|
11
|
+
end
|
|
12
|
+
end
|
|
13
|
+
end
|
|
14
|
+
|
|
15
|
+
def parse_response( response )
|
|
16
|
+
{ region: response.headers[ 'x-amz-bucket-region' ] }
|
|
17
|
+
end
|
|
18
|
+
end
|
|
19
|
+
end
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
module S3
|
|
2
|
+
class BucketListRequest < Request
|
|
3
|
+
def submit
|
|
4
|
+
response = get( '/' )
|
|
5
|
+
|
|
6
|
+
build_result( response, BucketListResult ) do
|
|
7
|
+
BucketListResult.new( parse_response( response.body ) )
|
|
8
|
+
end
|
|
9
|
+
end
|
|
10
|
+
|
|
11
|
+
def parse_response( body )
|
|
12
|
+
document = parse_xml( body )
|
|
13
|
+
document.remove_namespaces!
|
|
14
|
+
|
|
15
|
+
buckets = document.xpath( '//Bucket' ).map do | node |
|
|
16
|
+
{ name: node.at( 'Name' )&.text,
|
|
17
|
+
creation_date: Helpers.parse_iso8601( node.at( 'CreationDate' )&.text )
|
|
18
|
+
}
|
|
19
|
+
end
|
|
20
|
+
|
|
21
|
+
owner_node = document.at( '//Owner' )
|
|
22
|
+
owner_info = if owner_node
|
|
23
|
+
{ id: owner_node.at( 'ID' )&.text,
|
|
24
|
+
display_name: owner_node.at( 'DisplayName' )&.text
|
|
25
|
+
}
|
|
26
|
+
end
|
|
27
|
+
|
|
28
|
+
{ buckets: buckets, owner: owner_info }
|
|
29
|
+
end
|
|
30
|
+
end
|
|
31
|
+
end
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
require 'forwardable'
|
|
2
|
+
|
|
3
|
+
module S3
|
|
4
|
+
OwnerSchema = DynamicSchema::Struct.define do
|
|
5
|
+
id String
|
|
6
|
+
display_name String
|
|
7
|
+
end
|
|
8
|
+
|
|
9
|
+
class Owner < OwnerSchema
|
|
10
|
+
end
|
|
11
|
+
|
|
12
|
+
BucketEntrySchema = DynamicSchema::Struct.define do
|
|
13
|
+
name String
|
|
14
|
+
creation_date Time
|
|
15
|
+
end
|
|
16
|
+
|
|
17
|
+
class BucketEntry < BucketEntrySchema
|
|
18
|
+
end
|
|
19
|
+
|
|
20
|
+
BucketListResultSchema = DynamicSchema::Struct.define do
|
|
21
|
+
buckets BucketEntry, array: true
|
|
22
|
+
owner Owner
|
|
23
|
+
end
|
|
24
|
+
|
|
25
|
+
class BucketListResult < BucketListResultSchema
|
|
26
|
+
extend Forwardable
|
|
27
|
+
include Enumerable
|
|
28
|
+
|
|
29
|
+
def_delegators :buckets, :each, :[], :count, :size, :length, :first, :last, :empty?
|
|
30
|
+
end
|
|
31
|
+
end
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
module S3
|
|
2
|
+
module BucketMethods
|
|
3
|
+
def bucket_list
|
|
4
|
+
request = BucketListRequest.new( **request_options )
|
|
5
|
+
response = request.submit
|
|
6
|
+
|
|
7
|
+
raise_if_error( response )
|
|
8
|
+
|
|
9
|
+
response.result
|
|
10
|
+
end
|
|
11
|
+
|
|
12
|
+
def bucket_create( bucket:, region: nil, acl: nil )
|
|
13
|
+
request = BucketCreateRequest.new( **request_options )
|
|
14
|
+
response = request.submit( bucket: bucket, region: region, acl: acl )
|
|
15
|
+
|
|
16
|
+
raise_if_error( response )
|
|
17
|
+
|
|
18
|
+
response.result
|
|
19
|
+
end
|
|
20
|
+
|
|
21
|
+
def bucket_delete( bucket: )
|
|
22
|
+
request = BucketDeleteRequest.new( **request_options )
|
|
23
|
+
response = request.submit( bucket: bucket )
|
|
24
|
+
|
|
25
|
+
raise_if_error( response )
|
|
26
|
+
|
|
27
|
+
response.result
|
|
28
|
+
end
|
|
29
|
+
|
|
30
|
+
def bucket_head( bucket: )
|
|
31
|
+
request = BucketHeadRequest.new( **request_options )
|
|
32
|
+
response = request.submit( bucket: bucket )
|
|
33
|
+
|
|
34
|
+
return nil if response.status == 404
|
|
35
|
+
|
|
36
|
+
raise_if_error( response )
|
|
37
|
+
|
|
38
|
+
response.result
|
|
39
|
+
end
|
|
40
|
+
|
|
41
|
+
def bucket_exists?( bucket: )
|
|
42
|
+
!bucket_head( bucket: bucket ).nil?
|
|
43
|
+
end
|
|
44
|
+
end
|
|
45
|
+
end
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
module S3
|
|
2
|
+
class ErrorResult
|
|
3
|
+
attr_reader :error_type, :error_description, :error_code, :request_id, :resource
|
|
4
|
+
|
|
5
|
+
def initialize( status_code, attributes = nil )
|
|
6
|
+
@error_type, @error_description = status_code_to_error( status_code )
|
|
7
|
+
|
|
8
|
+
if attributes.is_a?( Hash )
|
|
9
|
+
@error_code = attributes[ :code ]
|
|
10
|
+
@error_description = attributes[ :message ] if attributes[ :message ]
|
|
11
|
+
@request_id = attributes[ :request_id ]
|
|
12
|
+
@resource = attributes[ :resource ]
|
|
13
|
+
end
|
|
14
|
+
end
|
|
15
|
+
|
|
16
|
+
def success?
|
|
17
|
+
false
|
|
18
|
+
end
|
|
19
|
+
|
|
20
|
+
private
|
|
21
|
+
|
|
22
|
+
def status_code_to_error( status_code )
|
|
23
|
+
case status_code
|
|
24
|
+
when 200
|
|
25
|
+
[ :unexpected_error,
|
|
26
|
+
"The response was successful but it did not include a valid payload." ]
|
|
27
|
+
when 400
|
|
28
|
+
[ :invalid_request_error,
|
|
29
|
+
"There was an issue with the format or content of your request." ]
|
|
30
|
+
when 401, 403
|
|
31
|
+
[ :authentication_error,
|
|
32
|
+
"There's an issue with your credentials or permissions." ]
|
|
33
|
+
when 404
|
|
34
|
+
[ :not_found_error,
|
|
35
|
+
"The requested resource was not found." ]
|
|
36
|
+
when 409
|
|
37
|
+
[ :conflict_error,
|
|
38
|
+
"There was a conflict with the current state of the resource." ]
|
|
39
|
+
when 429
|
|
40
|
+
[ :rate_limit_error,
|
|
41
|
+
"Your account has hit a rate limit." ]
|
|
42
|
+
when 500..599
|
|
43
|
+
[ :server_error,
|
|
44
|
+
"The S3 service encountered an unexpected server error." ]
|
|
45
|
+
else
|
|
46
|
+
[ :unknown_error,
|
|
47
|
+
"The S3 service returned an unexpected status code: '#{ status_code }'." ]
|
|
48
|
+
end
|
|
49
|
+
end
|
|
50
|
+
end
|
|
51
|
+
end
|
data/lib/s3/errors.rb
ADDED
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
module S3
|
|
2
|
+
class Error < StandardError
|
|
3
|
+
attr_reader :code, :request_id, :resource
|
|
4
|
+
|
|
5
|
+
def initialize( message = nil, code: nil, request_id: nil, resource: nil )
|
|
6
|
+
@code = code
|
|
7
|
+
@request_id = request_id
|
|
8
|
+
@resource = resource
|
|
9
|
+
super( message || code )
|
|
10
|
+
end
|
|
11
|
+
end
|
|
12
|
+
|
|
13
|
+
# Authentication & Authorization
|
|
14
|
+
class AuthenticationError < Error; end
|
|
15
|
+
class AccessDeniedError < Error; end
|
|
16
|
+
|
|
17
|
+
# Bucket
|
|
18
|
+
class BucketNotFoundError < Error; end
|
|
19
|
+
class BucketAlreadyExistsError < Error; end
|
|
20
|
+
class BucketNotEmptyError < Error; end
|
|
21
|
+
class InvalidBucketNameError < Error; end
|
|
22
|
+
|
|
23
|
+
# Object
|
|
24
|
+
class NoSuchKeyError < Error; end
|
|
25
|
+
class EntityTooLargeError < Error; end
|
|
26
|
+
class EntityTooSmallError < Error; end
|
|
27
|
+
|
|
28
|
+
# Multipart
|
|
29
|
+
class NoSuchUploadError < Error; end
|
|
30
|
+
class InvalidPartError < Error; end
|
|
31
|
+
class InvalidPartOrderError < Error; end
|
|
32
|
+
|
|
33
|
+
# Request
|
|
34
|
+
class InvalidRequestError < Error; end
|
|
35
|
+
|
|
36
|
+
# Service
|
|
37
|
+
class ServiceUnavailableError < Error; end
|
|
38
|
+
class InternalError < Error; end
|
|
39
|
+
|
|
40
|
+
# Network
|
|
41
|
+
class NetworkError < Error; end
|
|
42
|
+
class TimeoutError < Error; end
|
|
43
|
+
|
|
44
|
+
ERROR_CODE_MAP = {
|
|
45
|
+
'InvalidAccessKeyId' => AuthenticationError,
|
|
46
|
+
'SignatureDoesNotMatch' => AuthenticationError,
|
|
47
|
+
'AccessDenied' => AccessDeniedError,
|
|
48
|
+
'NoSuchBucket' => BucketNotFoundError,
|
|
49
|
+
'BucketAlreadyExists' => BucketAlreadyExistsError,
|
|
50
|
+
'BucketAlreadyOwnedByYou' => BucketAlreadyExistsError,
|
|
51
|
+
'BucketNotEmpty' => BucketNotEmptyError,
|
|
52
|
+
'InvalidBucketName' => InvalidBucketNameError,
|
|
53
|
+
'NoSuchKey' => NoSuchKeyError,
|
|
54
|
+
'EntityTooLarge' => EntityTooLargeError,
|
|
55
|
+
'EntityTooSmall' => EntityTooSmallError,
|
|
56
|
+
'NoSuchUpload' => NoSuchUploadError,
|
|
57
|
+
'InvalidPart' => InvalidPartError,
|
|
58
|
+
'InvalidPartOrder' => InvalidPartOrderError,
|
|
59
|
+
'MalformedXML' => InvalidRequestError,
|
|
60
|
+
'InvalidArgument' => InvalidRequestError,
|
|
61
|
+
'ServiceUnavailable' => ServiceUnavailableError,
|
|
62
|
+
'SlowDown' => ServiceUnavailableError,
|
|
63
|
+
'InternalError' => InternalError,
|
|
64
|
+
'RequestTimeout' => TimeoutError
|
|
65
|
+
}.freeze
|
|
66
|
+
|
|
67
|
+
def self.build_error( code:, message:, request_id: nil, resource: nil )
|
|
68
|
+
error_class = ERROR_CODE_MAP[ code ] || Error
|
|
69
|
+
error_class.new( message, code: code, request_id: request_id, resource: resource )
|
|
70
|
+
end
|
|
71
|
+
end
|