fake_dynamo 0.1.4 → 0.2.0

Sign up to get free protection for your applications and to get access to all the features.
@@ -90,26 +90,32 @@ module FakeDynamo
90
90
 
91
91
  def batch_get_item(data)
92
92
  response = {}
93
+ consumed_capacity = {}
93
94
 
94
95
  data['RequestItems'].each do |table_name, table_data|
95
96
  table = find_table(table_name)
96
97
 
97
98
  unless response[table_name]
98
- response[table_name] = { 'ConsumedCapacityUnits' => 1, 'Items' => [] }
99
+ response[table_name] = []
100
+ set_consumed_capacity(consumed_capacity, table, data)
99
101
  end
100
102
 
101
103
  table_data['Keys'].each do |key|
102
104
  if item_hash = table.get_raw_item(key, table_data['AttributesToGet'])
103
- response[table_name]['Items'] << item_hash
105
+ response[table_name] << item_hash
104
106
  end
105
107
  end
106
108
  end
107
109
 
108
- { 'Responses' => response, 'UnprocessedKeys' => {}}
110
+ response = { 'Responses' => response, 'UnprocessedKeys' => {} }
111
+ merge_consumed_capacity(consumed_capacity, response)
109
112
  end
110
113
 
111
114
  def batch_write_item(data)
112
115
  response = {}
116
+ consumed_capacity = {}
117
+ item_collection_metrics = {}
118
+ merge_metrics = false
113
119
  items = {}
114
120
  request_count = 0
115
121
 
@@ -118,6 +124,7 @@ module FakeDynamo
118
124
  table = find_table(table_name)
119
125
 
120
126
  items[table.name] ||= {}
127
+ item_collection_metrics[table.name] ||= []
121
128
 
122
129
  requests.each do |request|
123
130
  if request['PutRequest']
@@ -139,21 +146,47 @@ module FakeDynamo
139
146
  # real modification
140
147
  items.each do |table_name, requests|
141
148
  table = find_table(table_name)
149
+ item_collection_metrics[table.name] ||= []
150
+
142
151
  requests.each do |key, value|
143
152
  if value == :delete
144
153
  table.batch_delete(key)
145
154
  else
146
155
  table.batch_put(value)
147
156
  end
157
+
158
+ unless (metrics = Item.from_key(key).collection_metrics(data)).empty?
159
+ merge_metrics = true
160
+ item_collection_metrics[table.name] << metrics['ItemCollectionMetrics']
161
+ end
162
+
148
163
  end
149
- response[table_name] = { 'ConsumedCapacityUnits' => 1 }
164
+ set_consumed_capacity(consumed_capacity, table, data)
150
165
  end
151
166
 
152
- { 'Responses' => response, 'UnprocessedItems' => {} }
167
+ response = { 'UnprocessedItems' => {} }
168
+ response = merge_consumed_capacity(consumed_capacity, response)
169
+ if merge_metrics
170
+ response.merge!({'ItemCollectionMetrics' => item_collection_metrics})
171
+ end
172
+ response
153
173
  end
154
174
 
155
175
  private
156
176
 
177
+ def set_consumed_capacity(consumed_capacity, table, data)
178
+ unless (capacity = table.consumed_capacity(data)).empty?
179
+ consumed_capacity[table.name] = capacity['ConsumedCapacity']
180
+ end
181
+ end
182
+
183
+ def merge_consumed_capacity(consumed_capacity, response)
184
+ unless consumed_capacity.empty?
185
+ response['ConsumedCapacity'] = consumed_capacity.values
186
+ end
187
+ response
188
+ end
189
+
157
190
  def check_item_conflict(items, table_name, key)
158
191
  if items[table_name][key]
159
192
  raise ValidationException, 'Provided list of item keys contains duplicates'
@@ -104,5 +104,15 @@ module FakeDynamo
104
104
  attributes[name] = attribute
105
105
  end
106
106
  end
107
+
108
+ def collection_metrics(data)
109
+ if data['ReturnItemCollectionMetrics'] == 'SIZE'
110
+ { 'ItemCollectionMetrics' =>
111
+ { 'ItemCollectionKey' => key.primary.as_hash,
112
+ 'SizeEstimateRangeGB' => [ 0, 1 ] } }
113
+ else
114
+ {}
115
+ end
116
+ end
107
117
  end
108
118
  end
@@ -9,10 +9,10 @@ module FakeDynamo
9
9
  def from_data(key_data, key_schema)
10
10
  key = Key.new
11
11
  validate_key_data(key_data, key_schema)
12
- key.primary = Attribute.from_hash(key_schema.hash_key.name, key_data['HashKeyElement'])
12
+ key.primary = Attribute.from_hash(key_schema.hash_key.name, key_data[key_schema.hash_key.name])
13
13
 
14
14
  if key_schema.range_key
15
- key.range = Attribute.from_hash(key_schema.range_key.name, key_data['RangeKeyElement'])
15
+ key.range = Attribute.from_hash(key_schema.range_key.name, key_data[key_schema.range_key.name])
16
16
  end
17
17
  key
18
18
  end
@@ -60,14 +60,6 @@ module FakeDynamo
60
60
  result
61
61
  end
62
62
 
63
- def as_key_hash
64
- result = { 'HashKeyElement' => { @primary.type => @primary.value }}
65
- if @range
66
- result.merge!({'RangeKeyElement' => { @range.type => @range.value }})
67
- end
68
- result
69
- end
70
-
71
63
  def <=>(other)
72
64
  [primary, range] <=> [other.primary, other.range]
73
65
  end
@@ -3,23 +3,42 @@ module FakeDynamo
3
3
 
4
4
  attr_accessor :hash_key, :range_key
5
5
 
6
- def initialize(data)
7
- extract_values(data)
6
+ def initialize(key_schema, attribute_definitions)
7
+ extract_values(key_schema, attribute_definitions)
8
8
  end
9
9
 
10
10
  def description
11
- description = { 'HashKeyElement' => hash_key.description }
11
+ description = [{'AttributeName' => hash_key.name, 'KeyType' => 'HASH'}]
12
12
  if range_key
13
- description['RangeKeyElement'] = range_key.description
13
+ description << [{'AttributeName' => range_key.name, 'KeyType' => 'RANGE'}]
14
14
  end
15
15
  description
16
16
  end
17
17
 
18
+ def keys
19
+ result = [hash_key.name]
20
+ if range_key
21
+ result << range_key.name
22
+ end
23
+ result
24
+ end
25
+
18
26
  private
19
- def extract_values(data)
20
- @hash_key = Attribute.from_data(data['HashKeyElement'])
21
- if range_key_element = data['RangeKeyElement']
22
- @range_key = Attribute.from_data(range_key_element)
27
+ def extract_values(key_schema, attribute_definitions)
28
+ hash_key_name = find(key_schema, 'KeyType', 'HASH', 'AttributeName')
29
+ hash_key_type = find(attribute_definitions, 'AttributeName', hash_key_name, 'AttributeType')
30
+ @hash_key = Attribute.new(hash_key_name, nil, hash_key_type)
31
+ if range_key_name = find(key_schema, 'KeyType', 'RANGE', 'AttributeName', false)
32
+ range_key_type = find(attribute_definitions, 'AttributeName', range_key_name, 'AttributeType')
33
+ @range_key = Attribute.new(range_key_name, nil, range_key_type)
34
+ end
35
+ end
36
+
37
+ def find(list, key, value, pluck, raise_on_error = true)
38
+ if element = list.find { |e| e[key] == value }
39
+ element[pluck]
40
+ elsif raise_on_error
41
+ raise ValidationException, 'Some index key attributes are not defined in AttributeDefinitions'
23
42
  end
24
43
  end
25
44
  end
@@ -0,0 +1,27 @@
1
+ module FakeDynamo
2
+ class LocalSecondaryIndex
3
+ extend Validation
4
+
5
+ attr_accessor :name, :key_schema, :projection
6
+
7
+ class << self
8
+ def from_data(index_data, attribute_definitions, table_key_schema)
9
+ index = LocalSecondaryIndex.new
10
+ index.name = index_data['IndexName']
11
+ index.key_schema = KeySchema.new(index_data['KeySchema'], attribute_definitions)
12
+ index.projection = Projection.from_data(index_data['Projection'])
13
+ validate_range_key(index.key_schema)
14
+ validate_hash_key(index.key_schema, table_key_schema)
15
+ index
16
+ end
17
+ end
18
+
19
+ def description
20
+ {'IndexName' => name,
21
+ 'IndexSizeBytes' => 0,
22
+ 'ItemCount' => 0,
23
+ 'KeySchema' => key_schema.description,
24
+ 'Projection' => projection.description}
25
+ end
26
+ end
27
+ end
@@ -0,0 +1,30 @@
1
+ module FakeDynamo
2
+ class Projection
3
+ extend Validation
4
+ attr_accessor :type, :non_key_attributes
5
+
6
+ def initialize(type, non_key_attributes)
7
+ @type, @non_key_attributes = type, non_key_attributes
8
+ end
9
+
10
+ class << self
11
+ def from_data(data)
12
+ projection = Projection.new(data['ProjectionType'], data['NonKeyAttributes'])
13
+ validate_projection(projection)
14
+ projection
15
+ end
16
+ end
17
+
18
+ def description
19
+ {'ProjectionType' => type}.merge(non_key_attributes_description)
20
+ end
21
+
22
+ def non_key_attributes_description
23
+ if non_key_attributes
24
+ {'NonKeyAttributes' => @non_key_attributes}
25
+ else
26
+ {}
27
+ end
28
+ end
29
+ end
30
+ end
@@ -4,8 +4,8 @@ module FakeDynamo
4
4
  include Filter
5
5
 
6
6
  attr_accessor :creation_date_time, :read_capacity_units, :write_capacity_units,
7
- :name, :status, :key_schema, :items, :size_bytes, :last_increased_time,
8
- :last_decreased_time
7
+ :name, :status, :attribute_definitions, :key_schema, :items, :size_bytes,
8
+ :local_secondary_indexes, :last_increased_time, :last_decreased_time
9
9
 
10
10
  def initialize(data)
11
11
  extract_values(data)
@@ -15,21 +15,48 @@ module FakeDynamo
15
15
  def description
16
16
  {
17
17
  'TableDescription' => {
18
+ 'AttributeDefinitions' => attribute_definitions.map(&:description),
18
19
  'CreationDateTime' => creation_date_time,
19
20
  'KeySchema' => key_schema.description,
20
- 'ProvisionedThroughput' => {
21
- 'ReadCapacityUnits' => read_capacity_units,
22
- 'WriteCapacityUnits' => write_capacity_units
23
- },
21
+ 'ProvisionedThroughput' => throughput_description,
24
22
  'TableName' => name,
25
- 'TableStatus' => status
26
- }
23
+ 'TableStatus' => status,
24
+ 'ItemCount' => items.count,
25
+ 'TableSizeBytes' => size_bytes
26
+ }.merge(local_secondary_indexes_description)
27
27
  }
28
28
  end
29
29
 
30
+ def throughput_description
31
+ result = {
32
+ 'NumberOfDecreasesToday' => 0,
33
+ 'ReadCapacityUnits' => read_capacity_units,
34
+ 'WriteCapacityUnits' => write_capacity_units
35
+ }
36
+
37
+ if last_increased_time
38
+ result['LastIncreaseDateTime'] = @last_increased_time
39
+ end
40
+
41
+ if last_decreased_time
42
+ result['LastDecreaseDateTime'] = @last_decreased_time
43
+ end
44
+
45
+ result
46
+ end
47
+
48
+ def local_secondary_indexes_description
49
+ if local_secondary_indexes
50
+ { 'LocalSecondaryIndexes' => local_secondary_indexes.map(&:description) }
51
+ else
52
+ {}
53
+ end
54
+ end
55
+
30
56
  def create_table_data
31
57
  {
32
58
  'TableName' => name,
59
+ 'AttributeDefinitions' => attribute_definitions.map(&:description),
33
60
  'KeySchema' => key_schema.description,
34
61
  'ProvisionedThroughput' => {
35
62
  'ReadCapacityUnits' => read_capacity_units,
@@ -45,13 +72,8 @@ module FakeDynamo
45
72
  }
46
73
  end
47
74
 
48
- def size_description
49
- { 'ItemCount' => items.count,
50
- 'TableSizeBytes' => size_bytes }
51
- end
52
-
53
75
  def describe_table
54
- { 'Table' => description['TableDescription'] }.merge(size_description)
76
+ { 'Table' => description['TableDescription'] }
55
77
  end
56
78
 
57
79
  def activate
@@ -78,16 +100,7 @@ module FakeDynamo
78
100
 
79
101
  @read_capacity_units, @write_capacity_units = read_capacity_units, write_capacity_units
80
102
 
81
- response = description.merge(size_description)
82
-
83
- if last_increased_time
84
- response['TableDescription']['ProvisionedThroughput']['LastIncreaseDateTime'] = @last_increased_time
85
- end
86
-
87
- if last_decreased_time
88
- response['TableDescription']['ProvisionedThroughput']['LastDecreaseDateTime'] = @last_decreased_time
89
- end
90
-
103
+ response = description
91
104
  response['TableDescription']['TableStatus'] = 'UPDATING'
92
105
  response
93
106
  end
@@ -98,7 +111,7 @@ module FakeDynamo
98
111
  check_conditions(old_item, data['Expected'])
99
112
  @items[item.key] = item
100
113
 
101
- consumed_capacity.merge(return_values(data, old_item))
114
+ return_values(data, old_item).merge(item.collection_metrics(data))
102
115
  end
103
116
 
104
117
  def batch_put_request(data)
@@ -110,7 +123,7 @@ module FakeDynamo
110
123
  end
111
124
 
112
125
  def get_item(data)
113
- response = consumed_capacity
126
+ response = consumed_capacity(data)
114
127
  if item_hash = get_raw_item(data['Key'], data['AttributesToGet'])
115
128
  response.merge!('Item' => item_hash)
116
129
  end
@@ -142,7 +155,12 @@ module FakeDynamo
142
155
  check_conditions(item, data['Expected'])
143
156
 
144
157
  @items.delete(key) if item
145
- consumed_capacity.merge(return_values(data, item))
158
+ if !item
159
+ item = Item.from_key(key)
160
+ consumed_capacity(data).merge(item.collection_metrics(data))
161
+ else
162
+ return_values(data, item).merge(consumed_capacity(data)).merge(item.collection_metrics(data))
163
+ end
146
164
  end
147
165
 
148
166
  def batch_delete_request(data)
@@ -159,10 +177,11 @@ module FakeDynamo
159
177
  check_conditions(item, data['Expected'])
160
178
 
161
179
  unless item
180
+ item = Item.from_key(key)
162
181
  if create_item?(data)
163
- item = @items[key] = Item.from_key(key)
182
+ @items[key] = item
164
183
  else
165
- return consumed_capacity
184
+ return consumed_capacity(data).merge(item.collection_metrics(data))
166
185
  end
167
186
  item_created = true
168
187
  end
@@ -170,8 +189,10 @@ module FakeDynamo
170
189
  old_item = deep_copy(item)
171
190
  begin
172
191
  old_hash = item.as_hash
173
- data['AttributeUpdates'].each do |name, update_data|
174
- item.update(name, update_data)
192
+ if attribute_updates = data['AttributeUpdates']
193
+ attribute_updates.each do |name, update_data|
194
+ item.update(name, update_data)
195
+ end
175
196
  end
176
197
  rescue => e
177
198
  if item_created
@@ -182,7 +203,7 @@ module FakeDynamo
182
203
  raise e
183
204
  end
184
205
 
185
- consumed_capacity.merge(return_values(data, old_hash, item))
206
+ return_values(data, old_hash, item).merge(item.collection_metrics(data))
186
207
  end
187
208
 
188
209
  def deep_copy(x)
@@ -190,66 +211,100 @@ module FakeDynamo
190
211
  end
191
212
 
192
213
  def query(data)
193
- unless key_schema.range_key
194
- raise ValidationException, "Query can be performed only on a table with a HASH,RANGE key schema"
214
+ range_key_present
215
+ select_and_attributes_to_get_present(data)
216
+ validate_limit(data)
217
+
218
+ index = nil
219
+ if index_name = data['IndexName']
220
+ index = local_secondary_indexes.find { |i| i.name == index_name }
221
+ raise ValidationException, "The provided starting key is invalid" unless index
222
+ schema = index.key_schema
223
+ else
224
+ schema = key_schema
195
225
  end
196
226
 
197
- count_and_attributes_to_get_present?(data)
198
- validate_limit(data)
227
+ hash_condition = data['KeyConditions'][schema.hash_key.name]
228
+ validate_hash_condition(hash_condition)
199
229
 
200
- hash_attribute = Attribute.from_hash(key_schema.hash_key.name, data['HashKeyValue'])
230
+ hash_attribute = Attribute.from_hash(schema.hash_key.name, hash_condition['AttributeValueList'].first)
201
231
  matched_items = get_items_by_hash_key(hash_attribute)
202
232
 
203
233
  forward = data.has_key?('ScanIndexForward') ? data['ScanIndexForward'] : true
204
- matched_items = drop_till_start(matched_items, data['ExclusiveStartKey'], forward)
234
+ matched_items = drop_till_start(matched_items, data['ExclusiveStartKey'], forward, schema)
205
235
 
206
- if data['RangeKeyCondition']
207
- conditions = {key_schema.range_key.name => data['RangeKeyCondition']}
236
+ if !(range_condition = data['KeyConditions'].clone.tap { |h| h.delete(schema.hash_key.name) }).empty?
237
+ validate_range_condition(range_condition, schema)
238
+ conditions = range_condition
208
239
  else
209
240
  conditions = {}
210
241
  end
211
242
 
212
- result, last_evaluated_item, _ = filter(matched_items, conditions, data['Limit'], true)
213
-
214
- response = {
215
- 'Count' => result.size,
216
- 'ConsumedCapacityUnits' => 1 }
243
+ results, last_evaluated_item, _ = filter(matched_items, conditions, data['Limit'], true)
217
244
 
218
- unless data['Count']
219
- response['Items'] = result.map { |r| filter_attributes(r, data['AttributesToGet']) }
220
- end
245
+ response = {'Count' => results.size}.merge(consumed_capacity(data))
246
+ merge_items(response, data, results, index)
221
247
 
222
248
  if last_evaluated_item
223
- response['LastEvaluatedKey'] = last_evaluated_item.key.as_key_hash
249
+ response['LastEvaluatedKey'] = last_evaluated_item.key.as_hash
224
250
  end
225
251
  response
226
252
  end
227
253
 
228
254
  def scan(data)
229
- count_and_attributes_to_get_present?(data)
255
+ select_and_attributes_to_get_present(data)
230
256
  validate_limit(data)
231
257
  conditions = data['ScanFilter'] || {}
232
- all_items = drop_till_start(items.values, data['ExclusiveStartKey'], true)
233
- result, last_evaluated_item, scaned_count = filter(all_items, conditions, data['Limit'], false)
258
+ all_items = drop_till_start(items.values, data['ExclusiveStartKey'], true, key_schema)
259
+ results, last_evaluated_item, scaned_count = filter(all_items, conditions, data['Limit'], false)
234
260
  response = {
235
- 'Count' => result.size,
236
- 'ScannedCount' => scaned_count,
237
- 'ConsumedCapacityUnits' => 1 }
261
+ 'Count' => results.size,
262
+ 'ScannedCount' => scaned_count}.merge(consumed_capacity(data))
238
263
 
239
- unless data['Count']
240
- response['Items'] = result.map { |r| filter_attributes(r, data['AttributesToGet']) }
241
- end
264
+ merge_items(response, data, results)
242
265
 
243
266
  if last_evaluated_item
244
- response['LastEvaluatedKey'] = last_evaluated_item.key.as_key_hash
267
+ response['LastEvaluatedKey'] = last_evaluated_item.key.as_hash
268
+ end
269
+
270
+ response
271
+ end
272
+
273
+ def merge_items(response, data, results, index = nil)
274
+ if data['Select'] != 'COUNT'
275
+ attributes_to_get = nil # select everything
276
+
277
+ if data['AttributesToGet']
278
+ attributes_to_get = data['AttributesToGet']
279
+ elsif data['Select'] == 'ALL_PROJECTED_ATTRIBUTES'
280
+ attributes_to_get = projected_attributes(index)
281
+ end
282
+
283
+ response['Items'] = results.map { |r| filter_attributes(r, attributes_to_get) }
245
284
  end
246
285
 
247
286
  response
248
287
  end
249
288
 
250
- def count_and_attributes_to_get_present?(data)
251
- if data['Count'] and data['AttributesToGet']
252
- raise ValidationException, "Cannot specify the AttributesToGet when choosing to get only the Count"
289
+ def projected_attributes(index)
290
+ if !index
291
+ raise ValidationException, "ALL_PROJECTED_ATTRIBUTES can be used only when Querying using an IndexName"
292
+ else
293
+ case index.projection.type
294
+ when 'ALL'
295
+ nil
296
+ when 'KEYS_ONLY'
297
+ (key_schema.keys + index.key_schema.keys).uniq
298
+ when 'INCLUDE'
299
+ (key_schema.keys + index.key_schema.keys + index.projection.non_key_attributes).uniq
300
+ end
301
+ end
302
+ end
303
+
304
+ def select_and_attributes_to_get_present(data)
305
+ select = data['Select']
306
+ if select and data['AttributesToGet'] and (select != 'SPECIFIC_ATTRIBUTES')
307
+ raise ValidationException, "Cannot specify the AttributesToGet when choosing to get only the #{select}"
253
308
  end
254
309
  end
255
310
 
@@ -259,7 +314,7 @@ module FakeDynamo
259
314
  end
260
315
  end
261
316
 
262
- def drop_till_start(all_items, start_key_hash, forward)
317
+ def drop_till_start(all_items, start_key_hash, forward, schema)
263
318
  all_items = all_items.sort_by { |item| item.key }
264
319
 
265
320
  unless forward
@@ -267,7 +322,7 @@ module FakeDynamo
267
322
  end
268
323
 
269
324
  if start_key_hash
270
- start_key = Key.from_data(start_key_hash, key_schema)
325
+ start_key = Key.from_data(start_key_hash, schema)
271
326
  all_items.drop_while do |item|
272
327
  if forward
273
328
  item.key <= start_key
@@ -296,6 +351,8 @@ module FakeDynamo
296
351
  end
297
352
  end
298
353
 
354
+ scaned_count += 1
355
+
299
356
  if select
300
357
  result << item
301
358
  if (limit -= 1) == 0
@@ -303,8 +360,6 @@ module FakeDynamo
303
360
  break
304
361
  end
305
362
  end
306
-
307
- scaned_count += 1
308
363
  end
309
364
  [result, last_evaluated_item, scaned_count]
310
365
  end
@@ -316,9 +371,13 @@ module FakeDynamo
316
371
  end
317
372
 
318
373
  def create_item?(data)
319
- data['AttributeUpdates'].any? do |name, update_data|
320
- action = update_data['Action']
321
- ['PUT', 'ADD', nil].include? action
374
+ if attribute_updates = data['AttributeUpdates']
375
+ attribute_updates.any? do |name, update_data|
376
+ action = update_data['Action']
377
+ ['PUT', 'ADD', nil].include? action
378
+ end
379
+ else
380
+ true
322
381
  end
323
382
  end
324
383
 
@@ -352,17 +411,23 @@ module FakeDynamo
352
411
  raise 'unknown return value'
353
412
  end
354
413
 
355
- unless result.empty?
356
- { 'Attributes' => result }
414
+ result = unless result.empty?
415
+ { 'Attributes' => result }
416
+ else
417
+ {}
418
+ end
419
+
420
+ result.merge(consumed_capacity(data))
421
+ end
422
+
423
+ def consumed_capacity(data)
424
+ if data['ReturnConsumedCapacity'] == 'TOTAL'
425
+ {'ConsumedCapacity' => { 'CapacityUnits' => 1, 'TableName' => @name }}
357
426
  else
358
427
  {}
359
428
  end
360
429
  end
361
430
 
362
- def consumed_capacity
363
- { 'ConsumedCapacityUnits' => 1 }
364
- end
365
-
366
431
  def check_conditions(old_item, conditions)
367
432
  return unless conditions
368
433
 
@@ -403,8 +468,12 @@ module FakeDynamo
403
468
 
404
469
  def extract_values(data)
405
470
  @name = data['TableName']
406
- @key_schema = KeySchema.new(data['KeySchema'])
471
+ @key_schema = KeySchema.new(data['KeySchema'], data['AttributeDefinitions'])
472
+ set_local_secondary_indexes(data)
473
+ @attribute_definitions = data['AttributeDefinitions'].map(&Attribute.method(:from_data))
407
474
  set_throughput(data['ProvisionedThroughput'])
475
+
476
+ validate_attribute_definitions
408
477
  end
409
478
 
410
479
  def set_throughput(throughput)
@@ -412,5 +481,34 @@ module FakeDynamo
412
481
  @write_capacity_units = throughput['WriteCapacityUnits']
413
482
  end
414
483
 
484
+ def set_local_secondary_indexes(data)
485
+ if indexes_data = data['LocalSecondaryIndexes']
486
+ @local_secondary_indexes = indexes_data.map do |index|
487
+ LocalSecondaryIndex.from_data(index, data['AttributeDefinitions'], @key_schema)
488
+ end
489
+ validate_range_key(key_schema)
490
+ validate_index_names(@local_secondary_indexes)
491
+ end
492
+ end
493
+
494
+ def validate_attribute_definitions
495
+ attribute_keys = @attribute_definitions.map(&:name)
496
+ used_keys = @key_schema.keys
497
+ if @local_secondary_indexes
498
+ used_keys += @local_secondary_indexes.map(&:key_schema).map(&:keys).flatten
499
+ end
500
+
501
+ used_keys.uniq!
502
+
503
+ if used_keys.uniq.size != attribute_keys.size
504
+ raise ValidationException, "Some AttributeDefinitions are not used AttributeDefinitions: #{attribute_keys.inspect}, keys used: #{used_keys.inspect}"
505
+ end
506
+ end
507
+
508
+ def range_key_present
509
+ unless key_schema.range_key
510
+ raise ValidationException, "Query can be performed only on a table with a HASH,RANGE key schema"
511
+ end
512
+ end
415
513
  end
416
514
  end