fake_dynamo 0.0.1
Sign up to get free protection for your applications and to get access to all the features.
- data/.gitignore +17 -0
- data/.rspec +1 -0
- data/Gemfile +11 -0
- data/Guardfile +19 -0
- data/LICENSE +22 -0
- data/README.md +4 -0
- data/Rakefile +2 -0
- data/bin/fake_dynamo +17 -0
- data/fake_dynamo.gemspec +18 -0
- data/lib/fake_dynamo.rb +20 -0
- data/lib/fake_dynamo/api.yml +734 -0
- data/lib/fake_dynamo/attribute.rb +54 -0
- data/lib/fake_dynamo/db.rb +113 -0
- data/lib/fake_dynamo/exceptions.rb +57 -0
- data/lib/fake_dynamo/filter.rb +110 -0
- data/lib/fake_dynamo/item.rb +102 -0
- data/lib/fake_dynamo/key.rb +71 -0
- data/lib/fake_dynamo/key_schema.rb +26 -0
- data/lib/fake_dynamo/server.rb +43 -0
- data/lib/fake_dynamo/storage.rb +71 -0
- data/lib/fake_dynamo/table.rb +362 -0
- data/lib/fake_dynamo/validation.rb +155 -0
- data/lib/fake_dynamo/version.rb +3 -0
- data/spec/fake_dynamo/db_spec.rb +257 -0
- data/spec/fake_dynamo/filter_spec.rb +122 -0
- data/spec/fake_dynamo/item_spec.rb +97 -0
- data/spec/fake_dynamo/server_spec.rb +47 -0
- data/spec/fake_dynamo/table_spec.rb +435 -0
- data/spec/fake_dynamo/validation_spec.rb +63 -0
- data/spec/spec_helper.rb +28 -0
- metadata +105 -0
@@ -0,0 +1,26 @@
|
|
1
|
+
module FakeDynamo
|
2
|
+
class KeySchema
|
3
|
+
|
4
|
+
attr_accessor :hash_key, :range_key
|
5
|
+
|
6
|
+
def initialize(data)
|
7
|
+
extract_values(data)
|
8
|
+
end
|
9
|
+
|
10
|
+
def description
|
11
|
+
description = { 'HashKeyElement' => hash_key.description }
|
12
|
+
if range_key
|
13
|
+
description['RangeKeyElement'] = range_key.description
|
14
|
+
end
|
15
|
+
description
|
16
|
+
end
|
17
|
+
|
18
|
+
private
|
19
|
+
def extract_values(data)
|
20
|
+
@hash_key = Attribute.from_data(data['HashKeyElement'])
|
21
|
+
if range_key_element = data['RangeKeyElement']
|
22
|
+
@range_key = Attribute.from_data(range_key_element)
|
23
|
+
end
|
24
|
+
end
|
25
|
+
end
|
26
|
+
end
|
@@ -0,0 +1,43 @@
|
|
1
|
+
require 'sinatra/base'
|
2
|
+
|
3
|
+
module FakeDynamo
|
4
|
+
class Server < Sinatra::Base
|
5
|
+
|
6
|
+
set :show_exceptions, false
|
7
|
+
|
8
|
+
post '/' do
|
9
|
+
status = 200
|
10
|
+
content_type 'application/x-amz-json-1.0'
|
11
|
+
begin
|
12
|
+
data = JSON.parse(request.body.read)
|
13
|
+
operation = extract_operation(request.env)
|
14
|
+
puts "operation #{operation}"
|
15
|
+
puts "data"
|
16
|
+
pp data
|
17
|
+
response = db.process(operation, data)
|
18
|
+
storage.persist(operation, data)
|
19
|
+
rescue FakeDynamo::Error => e
|
20
|
+
response, status = e.response, e.status
|
21
|
+
end
|
22
|
+
puts "response"
|
23
|
+
pp response
|
24
|
+
[status, response.to_json]
|
25
|
+
end
|
26
|
+
|
27
|
+
def db
|
28
|
+
DB.instance
|
29
|
+
end
|
30
|
+
|
31
|
+
def storage
|
32
|
+
Storage.instance
|
33
|
+
end
|
34
|
+
|
35
|
+
def extract_operation(env)
|
36
|
+
if env['HTTP_X_AMZ_TARGET'] =~ /DynamoDB_\d+\.([a-zA-z]+)/
|
37
|
+
$1
|
38
|
+
else
|
39
|
+
raise UnknownOperationException
|
40
|
+
end
|
41
|
+
end
|
42
|
+
end
|
43
|
+
end
|
@@ -0,0 +1,71 @@
|
|
1
|
+
module FakeDynamo
|
2
|
+
class Storage
|
3
|
+
|
4
|
+
class << self
|
5
|
+
def instance
|
6
|
+
@storage ||= Storage.new
|
7
|
+
end
|
8
|
+
end
|
9
|
+
|
10
|
+
def initialize
|
11
|
+
init_db
|
12
|
+
end
|
13
|
+
|
14
|
+
def write_commands
|
15
|
+
%w[CreateTable DeleteItem DeleteTable PutItem UpdateItem UpdateTable]
|
16
|
+
end
|
17
|
+
|
18
|
+
def write_command?(command)
|
19
|
+
write_commands.include?(command)
|
20
|
+
end
|
21
|
+
|
22
|
+
def db_path
|
23
|
+
'/usr/local/var/fake_dynamo/db.fdb'
|
24
|
+
end
|
25
|
+
|
26
|
+
def init_db
|
27
|
+
return if File.exists? db_path
|
28
|
+
FileUtils.mkdir_p(File.dirname(db_path))
|
29
|
+
FileUtils.touch(db_path)
|
30
|
+
end
|
31
|
+
|
32
|
+
def delete_db
|
33
|
+
return unless File.exists? db_path
|
34
|
+
FileUtils.rm(db_path)
|
35
|
+
end
|
36
|
+
|
37
|
+
def db
|
38
|
+
DB.instance
|
39
|
+
end
|
40
|
+
|
41
|
+
def db_aof
|
42
|
+
@aof ||= File.new(db_path, 'a')
|
43
|
+
end
|
44
|
+
|
45
|
+
def shutdown
|
46
|
+
puts "shutting down fake_dynamo ..."
|
47
|
+
@aof.close if @aof
|
48
|
+
end
|
49
|
+
|
50
|
+
def persist(operation, data)
|
51
|
+
return unless write_command?(operation)
|
52
|
+
db_aof.puts(operation)
|
53
|
+
data = data.to_json
|
54
|
+
db_aof.puts(data.size + 1)
|
55
|
+
db_aof.puts(data)
|
56
|
+
end
|
57
|
+
|
58
|
+
def load_aof
|
59
|
+
file = File.new(db_path, 'r')
|
60
|
+
puts "Loading fake_dynamo data ..."
|
61
|
+
loop do
|
62
|
+
operation = file.readline.chomp
|
63
|
+
size = Integer(file.readline.chomp)
|
64
|
+
data = file.read(size)
|
65
|
+
db.process(operation, JSON.parse(data))
|
66
|
+
end
|
67
|
+
rescue EOFError
|
68
|
+
file.close
|
69
|
+
end
|
70
|
+
end
|
71
|
+
end
|
@@ -0,0 +1,362 @@
|
|
1
|
+
module FakeDynamo
|
2
|
+
class Table
|
3
|
+
include Validation
|
4
|
+
include Filter
|
5
|
+
|
6
|
+
attr_accessor :creation_date_time, :read_capacity_units, :write_capacity_units,
|
7
|
+
:name, :status, :key_schema, :items, :size_bytes, :last_increased_time,
|
8
|
+
:last_decreased_time
|
9
|
+
|
10
|
+
def initialize(data)
|
11
|
+
extract_values(data)
|
12
|
+
init
|
13
|
+
end
|
14
|
+
|
15
|
+
def description
|
16
|
+
{
|
17
|
+
'TableDescription' => {
|
18
|
+
'CreationDateTime' => creation_date_time,
|
19
|
+
'KeySchema' => key_schema.description,
|
20
|
+
'ProvisionedThroughput' => {
|
21
|
+
'ReadCapacityUnits' => read_capacity_units,
|
22
|
+
'WriteCapacityUnits' => write_capacity_units
|
23
|
+
},
|
24
|
+
'TableName' => name,
|
25
|
+
'TableStatus' => status
|
26
|
+
}
|
27
|
+
}
|
28
|
+
end
|
29
|
+
|
30
|
+
def size_description
|
31
|
+
{ 'ItemCount' => items.count,
|
32
|
+
'TableSizeBytes' => size_bytes }
|
33
|
+
end
|
34
|
+
|
35
|
+
def describe_table
|
36
|
+
{ 'Table' => description['TableDescription'] }.merge(size_description)
|
37
|
+
end
|
38
|
+
|
39
|
+
def activate
|
40
|
+
@status = 'ACTIVE'
|
41
|
+
end
|
42
|
+
|
43
|
+
def delete
|
44
|
+
@status = 'DELETING'
|
45
|
+
description
|
46
|
+
end
|
47
|
+
|
48
|
+
def update(read_capacity_units, write_capacity_units)
|
49
|
+
if @read_capacity_units > read_capacity_units
|
50
|
+
@last_decreased_time = Time.now.to_i
|
51
|
+
elsif @read_capacity_units < read_capacity_units
|
52
|
+
@last_increased_time = Time.now.to_i
|
53
|
+
end
|
54
|
+
|
55
|
+
if @write_capacity_units > write_capacity_units
|
56
|
+
@last_decreased_time = Time.now.to_i
|
57
|
+
elsif @write_capacity_units < write_capacity_units
|
58
|
+
@last_increased_time = Time.now.to_i
|
59
|
+
end
|
60
|
+
|
61
|
+
@read_capacity_units, @write_capacity_units = read_capacity_units, write_capacity_units
|
62
|
+
|
63
|
+
response = description.merge(size_description)
|
64
|
+
|
65
|
+
if last_increased_time
|
66
|
+
response['TableDescription']['ProvisionedThroughput']['LastIncreaseDateTime'] = @last_increased_time
|
67
|
+
end
|
68
|
+
|
69
|
+
if last_decreased_time
|
70
|
+
response['TableDescription']['ProvisionedThroughput']['LastDecreaseDateTime'] = @last_decreased_time
|
71
|
+
end
|
72
|
+
|
73
|
+
response['TableDescription']['TableStatus'] = 'UPDATING'
|
74
|
+
response
|
75
|
+
end
|
76
|
+
|
77
|
+
def put_item(data)
|
78
|
+
item = Item.from_data(data['Item'], key_schema)
|
79
|
+
old_item = @items[item.key]
|
80
|
+
check_conditions(old_item, data['Expected'])
|
81
|
+
@items[item.key] = item
|
82
|
+
|
83
|
+
consumed_capacity.merge(return_values(data, old_item))
|
84
|
+
end
|
85
|
+
|
86
|
+
def get_item(data)
|
87
|
+
response = consumed_capacity
|
88
|
+
if item_hash = get_raw_item(data['Key'], data['AttributesToGet'])
|
89
|
+
response.merge!('Item' => item_hash)
|
90
|
+
end
|
91
|
+
response
|
92
|
+
end
|
93
|
+
|
94
|
+
def get_raw_item(key_data, attributes_to_get)
|
95
|
+
key = Key.from_data(key_data, key_schema)
|
96
|
+
item = @items[key]
|
97
|
+
|
98
|
+
if item
|
99
|
+
filter_attributes(item, attributes_to_get)
|
100
|
+
end
|
101
|
+
end
|
102
|
+
|
103
|
+
def filter_attributes(item, attributes_to_get)
|
104
|
+
hash = item.as_hash
|
105
|
+
if attributes_to_get
|
106
|
+
hash.select! do |attribute, value|
|
107
|
+
attributes_to_get.include? attribute
|
108
|
+
end
|
109
|
+
end
|
110
|
+
hash
|
111
|
+
end
|
112
|
+
|
113
|
+
def delete_item(data)
|
114
|
+
key = Key.from_data(data['Key'], key_schema)
|
115
|
+
item = @items[key]
|
116
|
+
check_conditions(item, data['Expected'])
|
117
|
+
|
118
|
+
@items.delete(key) if item
|
119
|
+
consumed_capacity.merge(return_values(data, item))
|
120
|
+
end
|
121
|
+
|
122
|
+
def update_item(data)
|
123
|
+
key = Key.from_data(data['Key'], key_schema)
|
124
|
+
item = @items[key]
|
125
|
+
check_conditions(item, data['Expected'])
|
126
|
+
|
127
|
+
unless item
|
128
|
+
if create_item?(data)
|
129
|
+
item = @items[key] = Item.from_key(key)
|
130
|
+
else
|
131
|
+
return consumed_capacity
|
132
|
+
end
|
133
|
+
end
|
134
|
+
|
135
|
+
old_hash = item.as_hash
|
136
|
+
data['AttributeUpdates'].each do |name, update_data|
|
137
|
+
item.update(name, update_data)
|
138
|
+
end
|
139
|
+
|
140
|
+
consumed_capacity.merge(return_values(data, old_hash, item))
|
141
|
+
end
|
142
|
+
|
143
|
+
def query(data)
|
144
|
+
unless key_schema.range_key
|
145
|
+
raise ValidationException, "Query can be performed only on a table with a HASH,RANGE key schema"
|
146
|
+
end
|
147
|
+
|
148
|
+
count_and_attributes_to_get_present?(data)
|
149
|
+
validate_limit(data)
|
150
|
+
|
151
|
+
hash_attribute = Attribute.from_hash(key_schema.hash_key.name, data['HashKeyValue'])
|
152
|
+
matched_items = get_items_by_hash_key(hash_attribute)
|
153
|
+
|
154
|
+
|
155
|
+
forward = data.has_key?('ScanIndexForward') ? data['ScanIndexForward'] : true
|
156
|
+
|
157
|
+
if forward
|
158
|
+
matched_items.sort! { |a, b| a.key.range <=> b.key.range }
|
159
|
+
else
|
160
|
+
matched_items.sort! { |a, b| b.key.range <=> a.key.range }
|
161
|
+
end
|
162
|
+
|
163
|
+
matched_items = drop_till_start(matched_items, data['ExclusiveStartKey'])
|
164
|
+
|
165
|
+
if data['RangeKeyCondition']
|
166
|
+
conditions = {key_schema.range_key.name => data['RangeKeyCondition']}
|
167
|
+
else
|
168
|
+
conditions = {}
|
169
|
+
end
|
170
|
+
|
171
|
+
result, last_evaluated_item, _ = filter(matched_items, conditions, data['Limit'], true)
|
172
|
+
|
173
|
+
response = {
|
174
|
+
'Count' => result.size,
|
175
|
+
'ConsumedCapacityUnits' => 1 }
|
176
|
+
|
177
|
+
unless data['Count']
|
178
|
+
response['Items'] = result.map { |r| filter_attributes(r, data['AttributesToGet']) }
|
179
|
+
end
|
180
|
+
|
181
|
+
if last_evaluated_item
|
182
|
+
response['LastEvaluatedKey'] = last_evaluated_item.key.as_key_hash
|
183
|
+
end
|
184
|
+
response
|
185
|
+
end
|
186
|
+
|
187
|
+
def scan(data)
|
188
|
+
count_and_attributes_to_get_present?(data)
|
189
|
+
validate_limit(data)
|
190
|
+
conditions = data['ScanFilter'] || {}
|
191
|
+
all_items = drop_till_start(items.values, data['ExclusiveStartKey'])
|
192
|
+
result, last_evaluated_item, scaned_count = filter(all_items, conditions, data['Limit'], false)
|
193
|
+
response = {
|
194
|
+
'Count' => result.size,
|
195
|
+
'ScannedCount' => scaned_count,
|
196
|
+
'ConsumedCapacityUnits' => 1 }
|
197
|
+
|
198
|
+
unless data['Count']
|
199
|
+
response['Items'] = result.map { |r| filter_attributes(r, data['AttributesToGet']) }
|
200
|
+
end
|
201
|
+
|
202
|
+
if last_evaluated_item
|
203
|
+
response['LastEvaluatedKey'] = last_evaluated_item.key.as_key_hash
|
204
|
+
end
|
205
|
+
|
206
|
+
response
|
207
|
+
end
|
208
|
+
|
209
|
+
def count_and_attributes_to_get_present?(data)
|
210
|
+
if data['Count'] and data['AttributesToGet']
|
211
|
+
raise ValidationException, "Cannot specify the AttributesToGet when choosing to get only the Count"
|
212
|
+
end
|
213
|
+
end
|
214
|
+
|
215
|
+
def validate_limit(data)
|
216
|
+
if data['Limit'] and data['Limit'] <= 0
|
217
|
+
raise ValidationException, "Limit failed to satisfy constraint: Member must have value greater than or equal to 1"
|
218
|
+
end
|
219
|
+
end
|
220
|
+
|
221
|
+
def drop_till_start(all_items, start_key_hash)
|
222
|
+
if start_key_hash
|
223
|
+
all_items.drop_while { |i| i.key.as_key_hash != start_key_hash }.drop(1)
|
224
|
+
else
|
225
|
+
all_items
|
226
|
+
end
|
227
|
+
end
|
228
|
+
|
229
|
+
def filter(items, conditions, limit, fail_on_type_mismatch)
|
230
|
+
limit ||= -1
|
231
|
+
result = []
|
232
|
+
last_evaluated_item = nil
|
233
|
+
scaned_count = 0
|
234
|
+
items.each do |item|
|
235
|
+
select = true
|
236
|
+
conditions.each do |attribute_name, condition|
|
237
|
+
value = condition['AttributeValueList']
|
238
|
+
comparison_op = condition['ComparisonOperator']
|
239
|
+
unless self.send("#{comparison_op.downcase}_filter", value, item[attribute_name], fail_on_type_mismatch)
|
240
|
+
select = false
|
241
|
+
break
|
242
|
+
end
|
243
|
+
end
|
244
|
+
|
245
|
+
if select
|
246
|
+
result << item
|
247
|
+
if (limit -= 1) == 0
|
248
|
+
last_evaluated_item = item
|
249
|
+
break
|
250
|
+
end
|
251
|
+
end
|
252
|
+
|
253
|
+
scaned_count += 1
|
254
|
+
end
|
255
|
+
[result, last_evaluated_item, scaned_count]
|
256
|
+
end
|
257
|
+
|
258
|
+
def get_items_by_hash_key(hash_key)
|
259
|
+
items.values.select do |i|
|
260
|
+
i.key.primary == hash_key
|
261
|
+
end
|
262
|
+
end
|
263
|
+
|
264
|
+
def create_item?(data)
|
265
|
+
data['AttributeUpdates'].any? do |name, update_data|
|
266
|
+
action = update_data['Action']
|
267
|
+
['PUT', 'ADD', nil].include? action
|
268
|
+
end
|
269
|
+
end
|
270
|
+
|
271
|
+
def updated_attributes(data)
|
272
|
+
data['AttributeUpdates'].map { |name, _| name }
|
273
|
+
end
|
274
|
+
|
275
|
+
def return_values(data, old_item, new_item={})
|
276
|
+
old_item ||= {}
|
277
|
+
old_hash = old_item.kind_of?(Item) ? old_item.as_hash : old_item
|
278
|
+
|
279
|
+
new_item ||= {}
|
280
|
+
new_hash = new_item.kind_of?(Item) ? new_item.as_hash : new_item
|
281
|
+
|
282
|
+
|
283
|
+
return_value = data['ReturnValues']
|
284
|
+
result = case return_value
|
285
|
+
when 'ALL_OLD'
|
286
|
+
old_hash
|
287
|
+
when 'ALL_NEW'
|
288
|
+
new_hash
|
289
|
+
when 'UPDATED_OLD'
|
290
|
+
updated = updated_attributes(data)
|
291
|
+
old_hash.select { |name, _| updated.include? name }
|
292
|
+
when 'UPDATED_NEW'
|
293
|
+
updated = updated_attributes(data)
|
294
|
+
new_hash.select { |name, _| updated.include? name }
|
295
|
+
when 'NONE', nil
|
296
|
+
{}
|
297
|
+
else
|
298
|
+
raise 'unknown return value'
|
299
|
+
end
|
300
|
+
|
301
|
+
unless result.empty?
|
302
|
+
{ 'Attributes' => result }
|
303
|
+
else
|
304
|
+
{}
|
305
|
+
end
|
306
|
+
end
|
307
|
+
|
308
|
+
def consumed_capacity
|
309
|
+
{ 'ConsumedCapacityUnits' => 1 }
|
310
|
+
end
|
311
|
+
|
312
|
+
def check_conditions(old_item, conditions)
|
313
|
+
return unless conditions
|
314
|
+
|
315
|
+
conditions.each do |name, predicate|
|
316
|
+
exist = predicate['Exists']
|
317
|
+
value = predicate['Value']
|
318
|
+
|
319
|
+
if not value
|
320
|
+
if exist.nil?
|
321
|
+
raise ValidationException, "'Exists' is set to null. 'Exists' must be set to false when no Attribute value is specified"
|
322
|
+
elsif exist
|
323
|
+
raise ValidationException, "'Exists' is set to true. 'Exists' must be set to false when no Attribute value is specified"
|
324
|
+
elsif !exist # false
|
325
|
+
if old_item and old_item[name]
|
326
|
+
raise ConditionalCheckFailedException
|
327
|
+
end
|
328
|
+
end
|
329
|
+
else
|
330
|
+
expected_attr = Attribute.from_hash(name, value)
|
331
|
+
|
332
|
+
if exist.nil? or exist
|
333
|
+
raise ConditionalCheckFailedException unless (old_item and old_item[name] == expected_attr)
|
334
|
+
elsif !exist # false
|
335
|
+
raise ValidationException, "Cannot expect an attribute to have a specified value while expecting it to not exist"
|
336
|
+
end
|
337
|
+
end
|
338
|
+
end
|
339
|
+
end
|
340
|
+
|
341
|
+
|
342
|
+
private
|
343
|
+
def init
|
344
|
+
@creation_date_time = Time.now.to_i
|
345
|
+
@status = 'CREATING'
|
346
|
+
@items = {}
|
347
|
+
@size_bytes = 0
|
348
|
+
end
|
349
|
+
|
350
|
+
def extract_values(data)
|
351
|
+
@name = data['TableName']
|
352
|
+
@key_schema = KeySchema.new(data['KeySchema'])
|
353
|
+
set_throughput(data['ProvisionedThroughput'])
|
354
|
+
end
|
355
|
+
|
356
|
+
def set_throughput(throughput)
|
357
|
+
@read_capacity_units = throughput['ReadCapacityUnits']
|
358
|
+
@write_capacity_units = throughput['WriteCapacityUnits']
|
359
|
+
end
|
360
|
+
|
361
|
+
end
|
362
|
+
end
|