fakes3-docker 0.2.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,137 @@
1
+ require 'set'
2
+ module FakeS3
3
+ class S3MatchSet
4
+ attr_accessor :matches,:is_truncated,:common_prefixes
5
+ def initialize
6
+ @matches = []
7
+ @is_truncated = false
8
+ @common_prefixes = []
9
+ end
10
+ end
11
+
12
+ # This class has some of the semantics necessary for how buckets can return
13
+ # their items
14
+ #
15
+ # It is currently implemented naively as a sorted set + hash If you are going
16
+ # to try to put massive lists inside buckets and ls them, you will be sorely
17
+ # disappointed about this performance.
18
+ class SortedObjectList
19
+
20
+ def initialize
21
+ @sorted_set = SortedSet.new
22
+ @object_map = {}
23
+ @mutex = Mutex.new
24
+ end
25
+
26
+ def count
27
+ @sorted_set.count
28
+ end
29
+
30
+ def find(object_name)
31
+ @object_map[object_name]
32
+ end
33
+
34
+ # Add an S3 object into the sorted list
35
+ def add(s3_object)
36
+ return if !s3_object
37
+
38
+ @object_map[s3_object.name] = s3_object
39
+ @sorted_set << s3_object
40
+ end
41
+
42
+ def remove(s3_object)
43
+ return if !s3_object
44
+
45
+ @object_map.delete(s3_object.name)
46
+ @sorted_set.delete(s3_object)
47
+ end
48
+
49
+ # Return back a set of matches based on the passed in options
50
+ #
51
+ # options:
52
+ #
53
+ # :marker : a string to start the lexographical search (it is not included
54
+ # in the result)
55
+ # :max_keys : a maximum number of results
56
+ # :prefix : a string to filter the results by
57
+ # :delimiter : not supported yet
58
+ def list(options)
59
+ marker = options[:marker]
60
+ prefix = options[:prefix]
61
+ max_keys = options[:max_keys] || 1000
62
+ delimiter = options[:delimiter]
63
+
64
+ ms = S3MatchSet.new
65
+
66
+ marker_found = true
67
+ pseudo = nil
68
+ if marker
69
+ marker_found = false
70
+ if !@object_map[marker]
71
+ pseudo = S3Object.new
72
+ pseudo.name = marker
73
+ @sorted_set << pseudo
74
+ end
75
+ end
76
+
77
+ if delimiter
78
+ if prefix
79
+ base_prefix = prefix
80
+ else
81
+ base_prefix = ""
82
+ end
83
+ prefix_offset = base_prefix.length
84
+ end
85
+
86
+ count = 0
87
+ last_chunk = nil
88
+ @sorted_set.each do |s3_object|
89
+ if marker_found && (!prefix or s3_object.name.index(prefix) == 0)
90
+ if delimiter
91
+ name = s3_object.name
92
+ remainder = name.slice(prefix_offset, name.length)
93
+ chunks = remainder.split(delimiter, 2)
94
+ if chunks.length > 1
95
+ if (last_chunk != chunks[0])
96
+ # "All of the keys rolled up in a common prefix count as
97
+ # a single return when calculating the number of
98
+ # returns. See MaxKeys."
99
+ # (http://awsdocs.s3.amazonaws.com/S3/latest/s3-api.pdf)
100
+ count += 1
101
+ if count <= max_keys
102
+ ms.common_prefixes << base_prefix + chunks[0] + delimiter
103
+ last_chunk = chunks[0]
104
+ else
105
+ is_truncated = true
106
+ break
107
+ end
108
+ end
109
+
110
+ # Continue to the next key, since this one has a
111
+ # delimiter.
112
+ next
113
+ end
114
+ end
115
+
116
+ count += 1
117
+ if count <= max_keys
118
+ ms.matches << s3_object
119
+ else
120
+ is_truncated = true
121
+ break
122
+ end
123
+ end
124
+
125
+ if marker and marker == s3_object.name
126
+ marker_found = true
127
+ end
128
+ end
129
+
130
+ if pseudo
131
+ @sorted_set.delete(pseudo)
132
+ end
133
+
134
+ return ms
135
+ end
136
+ end
137
+ end
@@ -0,0 +1,4 @@
1
+ module FakeS3
2
+ class UnsupportedOperation < RuntimeError
3
+ end
4
+ end
@@ -0,0 +1,3 @@
1
+ module FakeS3
2
+ VERSION = "0.2.4"
3
+ end
@@ -0,0 +1,222 @@
1
+ require 'builder'
2
+ require 'time'
3
+
4
+ module FakeS3
5
+ class XmlAdapter
6
+ def self.buckets(bucket_objects)
7
+ output = ""
8
+ xml = Builder::XmlMarkup.new(:target => output)
9
+ xml.instruct! :xml, :version=>"1.0", :encoding=>"UTF-8"
10
+ xml.ListAllMyBucketsResult(:xmlns => "http://s3.amazonaws.com/doc/2006-03-01/") { |lam|
11
+ lam.Owner { |owner|
12
+ owner.ID("123")
13
+ owner.DisplayName("FakeS3")
14
+ }
15
+ lam.Buckets { |buckets|
16
+ bucket_objects.each do |bucket|
17
+ buckets.Bucket do |b|
18
+ b.Name(bucket.name)
19
+ b.CreationDate(bucket.creation_date.strftime("%Y-%m-%dT%H:%M:%S.000Z"))
20
+ end
21
+ end
22
+ }
23
+ }
24
+ output
25
+ end
26
+
27
+ def self.error(error)
28
+ output = ""
29
+ xml = Builder::XmlMarkup.new(:target => output)
30
+ xml.instruct! :xml, :version=>"1.0", :encoding=>"UTF-8"
31
+ xml.Error { |err|
32
+ err.Code(error.code)
33
+ err.Message(error.message)
34
+ err.Resource(error.resource)
35
+ err.RequestId(1)
36
+ }
37
+ output
38
+ end
39
+
40
+ # <?xml version="1.0" encoding="UTF-8"?>
41
+ #<Error>
42
+ # <Code>NoSuchKey</Code>
43
+ # <Message>The resource you requested does not exist</Message>
44
+ # <Resource>/mybucket/myfoto.jpg</Resource>
45
+ # <RequestId>4442587FB7D0A2F9</RequestId>
46
+ #</Error>
47
+ #
48
+ def self.error_no_such_bucket(name)
49
+ output = ""
50
+ xml = Builder::XmlMarkup.new(:target => output)
51
+ xml.instruct! :xml, :version=>"1.0", :encoding=>"UTF-8"
52
+ xml.Error { |err|
53
+ err.Code("NoSuchBucket")
54
+ err.Message("The resource you requested does not exist")
55
+ err.Resource(name)
56
+ err.RequestId(1)
57
+ }
58
+ output
59
+ end
60
+
61
+ def self.error_bucket_not_empty(name)
62
+ output = ""
63
+ xml = Builder::XmlMarkup.new(:target => output)
64
+ xml.instruct! :xml, :version=>"1.0", :encoding=>"UTF-8"
65
+ xml.Error { |err|
66
+ err.Code("BucketNotEmpty")
67
+ err.Message("The bucket you tried to delete is not empty.")
68
+ err.Resource(name)
69
+ err.RequestId(1)
70
+ }
71
+ output
72
+ end
73
+
74
+ def self.error_no_such_key(name)
75
+ output = ""
76
+ xml = Builder::XmlMarkup.new(:target => output)
77
+ xml.instruct! :xml, :version=>"1.0", :encoding=>"UTF-8"
78
+ xml.Error { |err|
79
+ err.Code("NoSuchKey")
80
+ err.Message("The specified key does not exist")
81
+ err.Key(name)
82
+ err.RequestId(1)
83
+ err.HostId(2)
84
+ }
85
+ output
86
+ end
87
+
88
+ def self.bucket(bucket)
89
+ output = ""
90
+ xml = Builder::XmlMarkup.new(:target => output)
91
+ xml.instruct! :xml, :version=>"1.0", :encoding=>"UTF-8"
92
+ xml.ListBucketResult(:xmlns => "http://s3.amazonaws.com/doc/2006-03-01/") { |lbr|
93
+ lbr.Name(bucket.name)
94
+ lbr.Prefix
95
+ lbr.Marker
96
+ lbr.MaxKeys("1000")
97
+ lbr.IsTruncated("false")
98
+ }
99
+ output
100
+ end
101
+
102
+ # A bucket query gives back the bucket along with contents
103
+ # <Contents>
104
+ #<Key>Nelson</Key>
105
+ # <LastModified>2006-01-01T12:00:00.000Z</LastModified>
106
+ # <ETag>&quot;828ef3fdfa96f00ad9f27c383fc9ac7f&quot;</ETag>
107
+ # <Size>5</Size>
108
+ # <StorageClass>STANDARD</StorageClass>
109
+ # <Owner>
110
+ # <ID>bcaf161ca5fb16fd081034f</ID>
111
+ # <DisplayName>webfile</DisplayName>
112
+ # </Owner>
113
+ # </Contents>
114
+
115
+ def self.append_objects_to_list_bucket_result(lbr,objects)
116
+ return if objects.nil? or objects.size == 0
117
+
118
+ if objects.index(nil)
119
+ require 'ruby-debug'
120
+ Debugger.start
121
+ debugger
122
+ end
123
+
124
+ objects.each do |s3_object|
125
+ lbr.Contents { |contents|
126
+ contents.Key(s3_object.name)
127
+ contents.LastModified(s3_object.modified_date)
128
+ contents.ETag("\"#{s3_object.md5}\"")
129
+ contents.Size(s3_object.size)
130
+ contents.StorageClass("STANDARD")
131
+
132
+ contents.Owner { |owner|
133
+ owner.ID("abc")
134
+ owner.DisplayName("You")
135
+ }
136
+ }
137
+ end
138
+ end
139
+
140
+ def self.append_common_prefixes_to_list_bucket_result(lbr, prefixes)
141
+ return if prefixes.nil? or prefixes.size == 0
142
+
143
+ prefixes.each do |common_prefix|
144
+ lbr.CommonPrefixes { |contents| contents.Prefix(common_prefix) }
145
+ end
146
+ end
147
+
148
+ def self.bucket_query(bucket_query)
149
+ output = ""
150
+ bucket = bucket_query.bucket
151
+ xml = Builder::XmlMarkup.new(:target => output)
152
+ xml.instruct! :xml, :version=>"1.0", :encoding=>"UTF-8"
153
+ xml.ListBucketResult(:xmlns => "http://s3.amazonaws.com/doc/2006-03-01/") { |lbr|
154
+ lbr.Name(bucket.name)
155
+ lbr.Prefix(bucket_query.prefix)
156
+ lbr.Marker(bucket_query.marker)
157
+ lbr.MaxKeys(bucket_query.max_keys)
158
+ lbr.IsTruncated(bucket_query.is_truncated?)
159
+ append_objects_to_list_bucket_result(lbr,bucket_query.matches)
160
+ append_common_prefixes_to_list_bucket_result(lbr, bucket_query.common_prefixes)
161
+ }
162
+ output
163
+ end
164
+
165
+ # ACL xml
166
+ def self.acl(object = nil)
167
+ output = ""
168
+ xml = Builder::XmlMarkup.new(:target => output)
169
+ xml.instruct! :xml, :version=>"1.0", :encoding=>"UTF-8"
170
+ xml.AccessControlPolicy(:xmlns => "http://s3.amazonaws.com/doc/2006-03-01/") { |acp|
171
+ acp.Owner do |owner|
172
+ owner.ID("abc")
173
+ owner.DisplayName("You")
174
+ end
175
+ acp.AccessControlList do |acl|
176
+ acl.Grant do |grant|
177
+ grant.Grantee("xmlns:xsi" => "http://www.w3.org/2001/XMLSchema-instance", "xsi:type" => "CanonicalUser") do |grantee|
178
+ grantee.ID("abc")
179
+ grantee.DisplayName("You")
180
+ end
181
+ grant.Permission("FULL_CONTROL")
182
+ end
183
+ end
184
+ }
185
+ output
186
+ end
187
+
188
+ # <CopyObjectResult>
189
+ # <LastModified>2009-10-28T22:32:00</LastModified>
190
+ # <ETag>"9b2cf535f27731c974343645a3985328"</ETag>
191
+ # </CopyObjectResult>
192
+ def self.copy_object_result(object)
193
+ output = ""
194
+ xml = Builder::XmlMarkup.new(:target => output)
195
+ xml.instruct! :xml, :version=>"1.0", :encoding=>"UTF-8"
196
+ xml.CopyObjectResult { |result|
197
+ result.LastModified(object.modified_date)
198
+ result.ETag("\"#{object.md5}\"")
199
+ }
200
+ output
201
+ end
202
+
203
+ # <CompleteMultipartUploadResult>
204
+ # <Location>http://Example-Bucket.s3.amazonaws.com/Example-Object</Location>
205
+ # <Bucket>Example-Bucket</Bucket>
206
+ # <Key>Example-Object</Key>
207
+ # <ETag>"3858f62230ac3c915f300c664312c11f-9"</ETag>
208
+ # </CompleteMultipartUploadResult>
209
+ def self.complete_multipart_result(object)
210
+ output = ""
211
+ xml = Builder::XmlMarkup.new(:target => output)
212
+ xml.instruct! :xml, :version=>"1.0", :encoding=>"UTF-8"
213
+ xml.CompleteMultipartUploadResult { |result|
214
+ result.Location("TODO: implement")
215
+ result.Bucket("TODO: implement")
216
+ result.Key(object.name)
217
+ result.ETag("\"#{object.md5}\"")
218
+ }
219
+ output
220
+ end
221
+ end
222
+ end
@@ -0,0 +1,59 @@
1
+ require 'test/test_helper'
2
+ require 'aws-sdk-v1'
3
+
4
+ class AwsSdkCommandsTest < Test::Unit::TestCase
5
+ def setup
6
+ @s3 = AWS::S3.new(:access_key_id => '123',
7
+ :secret_access_key => 'abc',
8
+ :s3_endpoint => 'localhost',
9
+ :s3_port => 10453,
10
+ :use_ssl => false)
11
+ end
12
+
13
+ def test_copy_to
14
+ bucket = @s3.buckets["test_copy_to"]
15
+ object = bucket.objects["key1"]
16
+ object.write("asdf")
17
+
18
+ assert object.exists?
19
+ object.copy_to("key2")
20
+
21
+ assert_equal 2, bucket.objects.count
22
+ end
23
+
24
+ def test_multipart_upload
25
+ bucket = @s3.buckets["test_multipart_upload"]
26
+ object = bucket.objects["key1"]
27
+ object.write("thisisaverybigfile", :multipart_threshold => 5)
28
+ assert object.exists?
29
+ assert_equal "thisisaverybigfile", object.read
30
+ end
31
+
32
+ def test_metadata
33
+ file_path = './test_root/test_metadata/metaobject'
34
+ FileUtils.rm_rf file_path
35
+
36
+ bucket = @s3.buckets["test_metadata"]
37
+ object = bucket.objects["metaobject"]
38
+ object.write(
39
+ 'data',
40
+ # this is sent as header x-amz-storage-class
41
+ :storage_class => 'REDUCED_REDUNDANCY',
42
+ # this is sent as header x-amz-meta-custom1
43
+ :metadata => {
44
+ "custom1" => "foobar"
45
+ }
46
+ )
47
+ assert object.exists?
48
+ metadata_file = YAML.load(IO.read("#{file_path}/.fakes3_metadataFFF/metadata"))
49
+
50
+ assert metadata_file.has_key?(:custom_metadata), 'Metadata file does not contain a :custom_metadata key'
51
+ assert metadata_file[:custom_metadata].has_key?('custom1'), ':custom_metadata does not contain field "custom1"'
52
+ assert_equal 'foobar', metadata_file[:custom_metadata]['custom1'], '"custom1" does not equal expected value "foobar"'
53
+
54
+ assert metadata_file.has_key?(:amazon_metadata), 'Metadata file does not contain an :amazon_metadata key'
55
+ assert metadata_file[:amazon_metadata].has_key?('storage-class'), ':amazon_metadata does not contain field "storage-class"'
56
+ assert_equal 'REDUCED_REDUNDANCY', metadata_file[:amazon_metadata]['storage-class'], '"storage-class" does not equal expected value "REDUCED_REDUNDANCY"'
57
+
58
+ end
59
+ end
@@ -0,0 +1,25 @@
1
+ require 'test/test_helper'
2
+ require 'fileutils'
3
+
4
+ class BotoTest < Test::Unit::TestCase
5
+ def setup
6
+ cmdpath = File.expand_path(File.join(File.dirname(__FILE__),'botocmd.py'))
7
+ @botocmd = "python #{cmdpath} -t localhost -p 10453"
8
+ end
9
+
10
+ def teardown
11
+ end
12
+
13
+ def test_store
14
+ File.open(__FILE__,'rb') do |input|
15
+ File.open("/tmp/fakes3_upload",'wb') do |output|
16
+ output << input.read
17
+ end
18
+ end
19
+ output = `#{@botocmd} put /tmp/fakes3_upload s3://s3cmd_bucket/upload`
20
+ assert_match(/stored/,output)
21
+
22
+ FileUtils.rm("/tmp/fakes3_upload")
23
+ end
24
+
25
+ end