clientside_aws 0.0.17
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/.gitignore +4 -0
- data/Dockerfile +46 -0
- data/Gemfile +23 -0
- data/Gemfile.lock +99 -0
- data/README.md +105 -0
- data/bin/clientside_aws_build +6 -0
- data/bin/clientside_aws_run +5 -0
- data/bin/clientside_aws_test +4 -0
- data/clientside_aws.gemspec +31 -0
- data/clientside_aws/dynamodb.rb +722 -0
- data/clientside_aws/ec2.rb +103 -0
- data/clientside_aws/elastic_transcoder.rb +179 -0
- data/clientside_aws/firehose.rb +13 -0
- data/clientside_aws/kinesis.rb +13 -0
- data/clientside_aws/mock/core.rb +7 -0
- data/clientside_aws/mock/firehose.rb +14 -0
- data/clientside_aws/mock/kinesis.rb +18 -0
- data/clientside_aws/mock/s3.rb +59 -0
- data/clientside_aws/mock/ses.rb +74 -0
- data/clientside_aws/mock/sns.rb +17 -0
- data/clientside_aws/s3.rb +223 -0
- data/clientside_aws/ses.rb +9 -0
- data/clientside_aws/sns.rb +41 -0
- data/clientside_aws/sqs.rb +233 -0
- data/docker/clientside-aws-run +3 -0
- data/docker/redis-server-run +2 -0
- data/index.rb +57 -0
- data/lib/clientside_aws.rb +27 -0
- data/lib/clientside_aws/configuration.rb +14 -0
- data/lib/clientside_aws/mock.rb +224 -0
- data/lib/clientside_aws/version.rb +3 -0
- data/public/images/jscruff.jpg +0 -0
- data/public/images/spacer.gif +0 -0
- data/public/images/stock_video.mp4 +0 -0
- data/spec/dynamodb_spec.rb +1069 -0
- data/spec/ec2_spec.rb +138 -0
- data/spec/firehose_spec.rb +16 -0
- data/spec/kinesis_spec.rb +22 -0
- data/spec/s3_spec.rb +219 -0
- data/spec/sns_spec.rb +72 -0
- data/spec/spec_helper.rb +71 -0
- data/spec/sqs_spec.rb +87 -0
- data/spec/test_client/test.rb +45 -0
- data/spec/transcoder_spec.rb +138 -0
- metadata +241 -0
checksums.yaml
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
---
|
2
|
+
SHA1:
|
3
|
+
metadata.gz: cc7c9075e9963aa0ea834be7548e3ebe0fe2bd72
|
4
|
+
data.tar.gz: fd64a23e57207414f358a7dd1f4455c4bdd5681d
|
5
|
+
SHA512:
|
6
|
+
metadata.gz: 5ee2fae298c383b15e79ac83980ea96aaaec520051b87d89e510919b284f3ddae0e0f7ff72882d0220ca75252f1f94337628e35316de5b19235b1b3d76bc3ecd
|
7
|
+
data.tar.gz: 30e522b64c96cfe5423880c6a00957208e03e09e8491cb40c9e99df5c4e16f122b850ea7f88b1e8d883bfa3772a9caea581498453d585e7f7de2e7a5321344e3
|
data/.gitignore
ADDED
data/Dockerfile
ADDED
@@ -0,0 +1,46 @@
|
|
1
|
+
FROM phusion/baseimage:0.9.22
|
2
|
+
MAINTAINER Perry Street Software
|
3
|
+
|
4
|
+
# Set correct environment variables.
|
5
|
+
ENV HOME /root
|
6
|
+
|
7
|
+
# Use baseimage-docker's init system.
|
8
|
+
CMD ["/sbin/my_init"]
|
9
|
+
|
10
|
+
RUN mkdir /mnt/redis
|
11
|
+
|
12
|
+
RUN apt-add-repository ppa:brightbox/ruby-ng -y
|
13
|
+
RUN apt-get update && apt-get install -y ruby2.4 ruby2.4-dev git-core build-essential zlib1g-dev
|
14
|
+
RUN apt-get install -y wget curl
|
15
|
+
|
16
|
+
RUN cd /opt ; wget "http://download.redis.io/releases/redis-2.8.24.tar.gz"
|
17
|
+
RUN cd /opt ; gunzip redis-2.8.24.tar.gz ; tar -xvf redis-2.8.24.tar
|
18
|
+
RUN cd /opt/redis-2.8.24 ; ./configure ; make ; make install
|
19
|
+
|
20
|
+
# Install for testing ffmpeg stuff
|
21
|
+
RUN apt-get install -y libav-tools
|
22
|
+
|
23
|
+
RUN mkdir /etc/service/redis-server
|
24
|
+
ADD docker/redis-server-run /etc/service/redis-server/run
|
25
|
+
RUN chmod 755 /etc/service/redis-server/run
|
26
|
+
|
27
|
+
# Add redis conf file
|
28
|
+
RUN mkdir /etc/redis
|
29
|
+
RUN cd /opt/redis-2.8.24 ; cat redis.conf | sed "s/dir \.\//dir \/mnt\/redis\//" > /etc/redis/redis.conf
|
30
|
+
|
31
|
+
# Now, fetch clientside aws
|
32
|
+
RUN gem install bundler
|
33
|
+
RUN cd /opt
|
34
|
+
COPY . /opt/clientside_aws/
|
35
|
+
RUN cd /opt/clientside_aws ; bundle install
|
36
|
+
|
37
|
+
RUN mkdir /etc/service/clientside-aws
|
38
|
+
ADD docker/clientside-aws-run /etc/service/clientside-aws/run
|
39
|
+
RUN chmod 755 /etc/service/clientside-aws/run
|
40
|
+
|
41
|
+
EXPOSE 4567
|
42
|
+
|
43
|
+
# Clean up APT when done.
|
44
|
+
RUN apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
45
|
+
|
46
|
+
WORKDIR /opt/clientside_aws
|
data/Gemfile
ADDED
@@ -0,0 +1,23 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
source 'https://rubygems.org'
|
4
|
+
|
5
|
+
# These are gem dependencies for our Docker image,
|
6
|
+
# not for our Gemfile which points clients to our docker image
|
7
|
+
gem 'aws-sdk', '2.10.26'
|
8
|
+
gem 'aws-sdk-v1', '1.66.0'
|
9
|
+
gem 'builder', '3.1.4'
|
10
|
+
gem 'httparty', '0.15.6'
|
11
|
+
gem 'json', '1.8.6'
|
12
|
+
gem 'json_pure', '1.8.6'
|
13
|
+
gem 'monkey-lib', '0.5.4'
|
14
|
+
gem 'pry'
|
15
|
+
gem 'rack', '1.5.2'
|
16
|
+
gem 'rack-cors', '0.2.9', require: 'rack/cors'
|
17
|
+
gem 'rack-test', '0.5.7'
|
18
|
+
gem 'redis', '3.2.0'
|
19
|
+
gem 'rspec', '2.14.1'
|
20
|
+
gem 'sinatra', '1.4.8'
|
21
|
+
gem 'sinatra-reloader', '0.5.0'
|
22
|
+
gem 'uuid', '2.3.1'
|
23
|
+
gem 'webmock', '3.1.0'
|
data/Gemfile.lock
ADDED
@@ -0,0 +1,99 @@
|
|
1
|
+
GEM
|
2
|
+
remote: https://rubygems.org/
|
3
|
+
specs:
|
4
|
+
addressable (2.5.2)
|
5
|
+
public_suffix (>= 2.0.2, < 4.0)
|
6
|
+
aws-sdk (2.10.26)
|
7
|
+
aws-sdk-resources (= 2.10.26)
|
8
|
+
aws-sdk-core (2.10.26)
|
9
|
+
aws-sigv4 (~> 1.0)
|
10
|
+
jmespath (~> 1.0)
|
11
|
+
aws-sdk-resources (2.10.26)
|
12
|
+
aws-sdk-core (= 2.10.26)
|
13
|
+
aws-sdk-v1 (1.66.0)
|
14
|
+
json (~> 1.4)
|
15
|
+
nokogiri (>= 1.4.4)
|
16
|
+
aws-sigv4 (1.0.2)
|
17
|
+
backports (3.10.0)
|
18
|
+
builder (3.1.4)
|
19
|
+
coderay (1.1.2)
|
20
|
+
crack (0.4.3)
|
21
|
+
safe_yaml (~> 1.0.0)
|
22
|
+
diff-lcs (1.3)
|
23
|
+
hashdiff (0.3.7)
|
24
|
+
httparty (0.15.6)
|
25
|
+
multi_xml (>= 0.5.2)
|
26
|
+
jmespath (1.3.1)
|
27
|
+
json (1.8.6)
|
28
|
+
json_pure (1.8.6)
|
29
|
+
macaddr (1.7.1)
|
30
|
+
systemu (~> 2.6.2)
|
31
|
+
method_source (0.9.0)
|
32
|
+
mini_portile2 (2.3.0)
|
33
|
+
monkey-lib (0.5.4)
|
34
|
+
backports
|
35
|
+
multi_xml (0.6.0)
|
36
|
+
nokogiri (1.8.1)
|
37
|
+
mini_portile2 (~> 2.3.0)
|
38
|
+
pry (0.11.1)
|
39
|
+
coderay (~> 1.1.0)
|
40
|
+
method_source (~> 0.9.0)
|
41
|
+
public_suffix (3.0.0)
|
42
|
+
rack (1.5.2)
|
43
|
+
rack-cors (0.2.9)
|
44
|
+
rack-protection (1.5.3)
|
45
|
+
rack
|
46
|
+
rack-test (0.5.7)
|
47
|
+
rack (>= 1.0)
|
48
|
+
redis (3.2.0)
|
49
|
+
rspec (2.14.1)
|
50
|
+
rspec-core (~> 2.14.0)
|
51
|
+
rspec-expectations (~> 2.14.0)
|
52
|
+
rspec-mocks (~> 2.14.0)
|
53
|
+
rspec-core (2.14.8)
|
54
|
+
rspec-expectations (2.14.5)
|
55
|
+
diff-lcs (>= 1.1.3, < 2.0)
|
56
|
+
rspec-mocks (2.14.6)
|
57
|
+
safe_yaml (1.0.4)
|
58
|
+
sinatra (1.4.8)
|
59
|
+
rack (~> 1.5)
|
60
|
+
rack-protection (~> 1.4)
|
61
|
+
tilt (>= 1.3, < 3)
|
62
|
+
sinatra-advanced-routes (0.5.3)
|
63
|
+
sinatra (~> 1.0)
|
64
|
+
sinatra-reloader (0.5.0)
|
65
|
+
sinatra (~> 1.0)
|
66
|
+
sinatra-advanced-routes (~> 0.5.0)
|
67
|
+
systemu (2.6.5)
|
68
|
+
tilt (2.0.8)
|
69
|
+
uuid (2.3.1)
|
70
|
+
macaddr (~> 1.0)
|
71
|
+
webmock (3.1.0)
|
72
|
+
addressable (>= 2.3.6)
|
73
|
+
crack (>= 0.3.2)
|
74
|
+
hashdiff
|
75
|
+
|
76
|
+
PLATFORMS
|
77
|
+
ruby
|
78
|
+
|
79
|
+
DEPENDENCIES
|
80
|
+
aws-sdk (= 2.10.26)
|
81
|
+
aws-sdk-v1 (= 1.66.0)
|
82
|
+
builder (= 3.1.4)
|
83
|
+
httparty (= 0.15.6)
|
84
|
+
json (= 1.8.6)
|
85
|
+
json_pure (= 1.8.6)
|
86
|
+
monkey-lib (= 0.5.4)
|
87
|
+
pry
|
88
|
+
rack (= 1.5.2)
|
89
|
+
rack-cors (= 0.2.9)
|
90
|
+
rack-test (= 0.5.7)
|
91
|
+
redis (= 3.2.0)
|
92
|
+
rspec (= 2.14.1)
|
93
|
+
sinatra (= 1.4.8)
|
94
|
+
sinatra-reloader (= 0.5.0)
|
95
|
+
uuid (= 2.3.1)
|
96
|
+
webmock (= 3.1.0)
|
97
|
+
|
98
|
+
BUNDLED WITH
|
99
|
+
1.15.4
|
data/README.md
ADDED
@@ -0,0 +1,105 @@
|
|
1
|
+
clientside_aws
|
2
|
+
===================
|
3
|
+
|
4
|
+
This code is meant to be used by developers who are attempting to build web applications on AWS but wish to run client-side testing and validation. Presently, this project mocks DynamoDB and SQS.
|
5
|
+
|
6
|
+
While creating and tearing down "free-tier" SQS and DynamoDB databases may be an acceptable solution for some, the time required (tens of seconds or minutes) quickly makes TDD (test-driven development) impractical. Just like we can use an in-memory sqlite3-based solution for mocking Mysql databases with ActiveRecord, we can now mock SQS and DynamoDB databases in memory using Redis.
|
7
|
+
|
8
|
+
To run this code, you will need ruby, sinatra, httparty, and the json and redis rubygems. I also use the sinatra/reloader gem to aid in development, but it is not necessary.
|
9
|
+
|
10
|
+
You will also need redis-server installed locally
|
11
|
+
|
12
|
+
Make sure redis-server is in your path
|
13
|
+
|
14
|
+
Then, from the command line, run:
|
15
|
+
|
16
|
+
ruby spec/dynamodb_spec.rb
|
17
|
+
or
|
18
|
+
|
19
|
+
ruby spec/sqs_spec.rb
|
20
|
+
|
21
|
+
That will run the unit tests against this code.
|
22
|
+
|
23
|
+
Overview
|
24
|
+
--------
|
25
|
+
|
26
|
+
This code works by overwriting the AWS service URLs in the aws-sdk gem, then monkeypatching the AWS::Core::Client request methods to use Rack's put, get, post and delete methods (see aws_mock.rb). This points to a Sinatra endpoint that processes the DynamoDB requests. Provided you are using the DynamoDB methods defined in aws-sdk when running tests and validations, the ruby client never knows it isn't talking to the real service.
|
27
|
+
|
28
|
+
I have not packaged this up as a gem, because it needs to be a standalone sinatra project so you can launch a server from the command line (see below). I am open to suggestions about how to make it easier/cleaner to include dynamodb_mock into your actual project; right now you have to use a require statement that has knowledge of your directory structure.
|
29
|
+
|
30
|
+
Adding to your project
|
31
|
+
---------------------------
|
32
|
+
|
33
|
+
First, if you plan on running any rspec unit tests, you should update the REDIS_PATH variable in spec_helper.rb to point to your redis binary.
|
34
|
+
|
35
|
+
To start clientside_aws stand-alone, from the command line, run:
|
36
|
+
|
37
|
+
cd ~/clientside_aws/
|
38
|
+
ruby index.rb -p 4568
|
39
|
+
|
40
|
+
This launches a Sinatra app, running on port 4568, that can respond to and support various services using the AWS protocol. You have your own, client-side SQS and DynamoDB server! If you are capable of mocking the requests in your language of choice to point to localhost:4568 you are ready to go. Included in this project is the code to mock in Ruby.
|
41
|
+
|
42
|
+
For example, here's how I added clientside_aws to my Sinatra project:
|
43
|
+
|
44
|
+
configure :development do
|
45
|
+
require 'clientside_aws'
|
46
|
+
DYNAMODB = AWS::DynamoDB.new(
|
47
|
+
:access_key_id => "...",
|
48
|
+
:secret_access_key => "...")
|
49
|
+
# more config
|
50
|
+
end
|
51
|
+
|
52
|
+
I can then access the DynamoDB API from my code using the standard ruby aws-sdk DynamoDB class, discussed in more detail here:
|
53
|
+
http://rubydoc.info/github/amazonwebservices/aws-sdk-for-ruby/master/AWS/DynamoDB
|
54
|
+
|
55
|
+
Assuming you are including the 'aws_mock' file, you can call DynamoDB just as you normally would in your code. For example:
|
56
|
+
|
57
|
+
dynamo_db = AWS::DynamoDB.new(
|
58
|
+
:access_key_id => "...",
|
59
|
+
:secret_access_key => "...")
|
60
|
+
|
61
|
+
visitors_table = dynamo_db.tables.create("visitors", 10, 5,
|
62
|
+
:hash_key => { :creator_id => :number },
|
63
|
+
:range_key => {:date => :number})
|
64
|
+
|
65
|
+
visitors_table.hash_key = [:creator_id, :number]
|
66
|
+
visitors_table.range_key = [:date, :number]
|
67
|
+
|
68
|
+
(0..10).each do |idx|
|
69
|
+
visitors_table.items.put(:creator_id => 1, :date => Time.now.to_f - (60 * idx), :target_id => 10 + idx)
|
70
|
+
end
|
71
|
+
|
72
|
+
You can check out the dynamodb_spec.rb file for more unit tests and sample DynamoDB ruby code.
|
73
|
+
|
74
|
+
If testing on a localhost(which you most likely are), you will need to add this line to your /etc/hosts file:
|
75
|
+
|
76
|
+
127.0.0.1 test.localhost
|
77
|
+
|
78
|
+
Amazon sticks the bucket to the front of the localhost to create a subdomain
|
79
|
+
|
80
|
+
TODO
|
81
|
+
--------------------
|
82
|
+
|
83
|
+
I am developing this code for my own test purposes as I go along. There are certainly bugs and I have not yet implemented all the DynamoDB methods. Code lacks support at this time for the following:
|
84
|
+
|
85
|
+
* Scan
|
86
|
+
* UpdateTable
|
87
|
+
* BatchGetItem
|
88
|
+
|
89
|
+
I also have very a limited test suite; I will expand as I can. Feel free to fork, add, and submit a pull request.
|
90
|
+
|
91
|
+
There are clearly many more AWS services one can mock up.
|
92
|
+
|
93
|
+
* * *
|
94
|
+
|
95
|
+
License
|
96
|
+
=======
|
97
|
+
MIT License (http://en.wikipedia.org/wiki/MIT_License). Some parts of this code were adapted from the aws-sdk project, which can be found at: https://github.com/amazonwebservices/aws-sdk-for-ruby and is itself licensed under the Apache 2.0 license.
|
98
|
+
|
99
|
+
Copyright (C) 2012 Perry Street Software, Inc.
|
100
|
+
|
101
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
102
|
+
|
103
|
+
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
104
|
+
|
105
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
@@ -0,0 +1,31 @@
|
|
1
|
+
# coding: utf-8
|
2
|
+
lib = File.expand_path('../lib', __FILE__)
|
3
|
+
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
|
4
|
+
require 'clientside_aws/version'
|
5
|
+
|
6
|
+
Gem::Specification.new do |spec|
|
7
|
+
spec.name = 'clientside_aws'
|
8
|
+
spec.version = ClientsideAws::VERSION
|
9
|
+
spec.authors = ['Perry Street Software, Inc.']
|
10
|
+
spec.email = ['noreply@scruff.com']
|
11
|
+
spec.description = 'This code is meant to be used by developers who are attempting to build web applications on AWS but wish to run client-side testing and validation.'
|
12
|
+
spec.summary = 'Select AWS Services Replicated on Your Client'
|
13
|
+
spec.homepage = 'https://github.com/perrystreetsoftware/clientside_aws'
|
14
|
+
spec.license = 'MIT'
|
15
|
+
|
16
|
+
spec.files = `git ls-files`.split($INPUT_RECORD_SEPARATOR)
|
17
|
+
spec.executables = %w[clientside_aws_build clientside_aws_run]
|
18
|
+
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
|
19
|
+
spec.require_paths = ['lib']
|
20
|
+
|
21
|
+
spec.add_development_dependency 'rake', '10.1.0'
|
22
|
+
spec.add_development_dependency 'rack-test', '0.5.7'
|
23
|
+
spec.add_development_dependency 'rspec', '2.14.1'
|
24
|
+
spec.add_development_dependency 'bundler', '~> 1.3'
|
25
|
+
spec.add_dependency 'aws-sdk-v1', '1.59.0'
|
26
|
+
spec.add_dependency 'aws-sdk', '~> 2.0'
|
27
|
+
spec.add_dependency 'builder', '3.1.4'
|
28
|
+
spec.add_dependency 'httparty', '0.11.0'
|
29
|
+
spec.add_dependency 'json', '~> 1.8'
|
30
|
+
spec.add_dependency 'webmock', '3.1.0'
|
31
|
+
end
|
@@ -0,0 +1,722 @@
|
|
1
|
+
helpers do
|
2
|
+
def list_tables(_args)
|
3
|
+
tables = AWS_REDIS.smembers 'tables'
|
4
|
+
content_type 'application/x-amz-json-1.0'
|
5
|
+
{ 'TableNames' => tables,
|
6
|
+
'LastEvaluatedTableName' => nil }.to_json
|
7
|
+
end
|
8
|
+
|
9
|
+
def delete_table(args)
|
10
|
+
halt 500 unless args['TableName']
|
11
|
+
|
12
|
+
table_name = args['TableName']
|
13
|
+
key_schema = get_key_schema(table_name)
|
14
|
+
|
15
|
+
keys = AWS_REDIS.keys "tables.#{args['TableName']}.*"
|
16
|
+
AWS_REDIS.del *keys unless keys.empty?
|
17
|
+
AWS_REDIS.srem 'tables', table_name
|
18
|
+
|
19
|
+
{ Table: { CreationDateTime: (Time.now.to_i * 1000),
|
20
|
+
ItemCount: 0,
|
21
|
+
KeySchema: key_schema,
|
22
|
+
ProvisionedThroughput: {
|
23
|
+
LastIncreaseDateTime: (Time.now.to_i * 1000),
|
24
|
+
LastDecreaseDateTime: (Time.now.to_i * 1000),
|
25
|
+
ReadCapacityUnits: 10,
|
26
|
+
WriteCapacityUnits: 10
|
27
|
+
},
|
28
|
+
TableName: table_name,
|
29
|
+
TableStatus: 'DELETING' } }.to_json
|
30
|
+
end
|
31
|
+
|
32
|
+
def clear_from_secondary_indices(table_name, record_id)
|
33
|
+
# Note the keys operation is a table scan that is brutally inefficient
|
34
|
+
secondary_indexes = AWS_REDIS.smembers "tables.#{table_name}.secondary_indexes"
|
35
|
+
secondary_indexes.each do |si_raw|
|
36
|
+
si = JSON.parse(si_raw)
|
37
|
+
keys = AWS_REDIS.keys "tables.#{table_name}.secondary_index.#{si['IndexName']}.*"
|
38
|
+
keys.each do |key|
|
39
|
+
AWS_REDIS.srem key, record_id
|
40
|
+
end
|
41
|
+
end
|
42
|
+
end
|
43
|
+
|
44
|
+
def get_key_schema(table)
|
45
|
+
hashkey_json = AWS_REDIS.get "tables.#{table}.hashkey"
|
46
|
+
rangekey_json = AWS_REDIS.get "tables.#{table}.rangekey"
|
47
|
+
|
48
|
+
result = {}
|
49
|
+
if hashkey_json
|
50
|
+
hashkey = JSON.parse(hashkey_json)
|
51
|
+
result[:HashKeyElement] =
|
52
|
+
{ AttributeName: hashkey['AttributeName'], AttributeType: 'S' }
|
53
|
+
end
|
54
|
+
if rangekey_json
|
55
|
+
rangekey = JSON.parse(rangekey_json)
|
56
|
+
result[:RangeKeyElement] =
|
57
|
+
{ AttributeName: rangekey['AttributeName'], AttributeType: 'N' }
|
58
|
+
end
|
59
|
+
|
60
|
+
result
|
61
|
+
end
|
62
|
+
|
63
|
+
def describe_table(args)
|
64
|
+
table_name = args['TableName']
|
65
|
+
key_schema = get_key_schema(table_name)
|
66
|
+
halt 404 unless key_schema && !key_schema.keys.empty?
|
67
|
+
{ Table: { CreationDateTime: (Time.now.to_i * 1000),
|
68
|
+
ItemCount: 0,
|
69
|
+
KeySchema: key_schema,
|
70
|
+
ProvisionedThroughput: {
|
71
|
+
LastIncreaseDateTime: (Time.now.to_i * 1000),
|
72
|
+
LastDecreaseDateTime: (Time.now.to_i * 1000),
|
73
|
+
ReadCapacityUnits: 10,
|
74
|
+
WriteCapacityUnits: 10
|
75
|
+
},
|
76
|
+
TableName: table_name,
|
77
|
+
TableSizeBytes: 1,
|
78
|
+
TableStatus: 'ACTIVE' } }.to_json
|
79
|
+
end
|
80
|
+
|
81
|
+
def create_table(args)
|
82
|
+
halt 500, 'no table name' unless args['TableName']
|
83
|
+
halt 500, 'no key schema' unless args['KeySchema']
|
84
|
+
halt 500, 'no provisioned throughput' unless args['ProvisionedThroughput']
|
85
|
+
halt 500, 'already created' if AWS_REDIS.sismember('tables', args['TableName'])
|
86
|
+
|
87
|
+
AWS_REDIS.sadd 'tables', args['TableName']
|
88
|
+
AWS_REDIS.set "tables.#{args['TableName']}.auto_incr", 0
|
89
|
+
|
90
|
+
if args.key?('LocalSecondaryIndexes')
|
91
|
+
args['LocalSecondaryIndexes'].each do |si|
|
92
|
+
index_name = si['IndexName']
|
93
|
+
AWS_REDIS.sadd "tables.#{args['TableName']}.secondary_indexes", si.to_json
|
94
|
+
end
|
95
|
+
end
|
96
|
+
|
97
|
+
if args.key?('GlobalSecondaryIndexes')
|
98
|
+
args['GlobalSecondaryIndexes'].each do |si|
|
99
|
+
index_name = si['IndexName']
|
100
|
+
AWS_REDIS.sadd "tables.#{args['TableName']}.secondary_indexes", si.to_json
|
101
|
+
end
|
102
|
+
end
|
103
|
+
|
104
|
+
if args['KeySchema'].class == Array
|
105
|
+
args['KeySchema'].each do |ks|
|
106
|
+
key_defn = args['AttributeDefinitions'].select { |a| a['AttributeName'] == ks['AttributeName'] }.first
|
107
|
+
halt 500 unless key_defn
|
108
|
+
|
109
|
+
if ks['KeyType'] == 'HASH'
|
110
|
+
AWS_REDIS.set "tables.#{args['TableName']}.hashkey", key_defn.to_json
|
111
|
+
elsif ks['KeyType'] == 'RANGE'
|
112
|
+
AWS_REDIS.set "tables.#{args['TableName']}.rangekey", key_defn.to_json
|
113
|
+
end
|
114
|
+
end
|
115
|
+
else
|
116
|
+
args['KeySchema'].each do |k, v|
|
117
|
+
case k
|
118
|
+
when 'HashKeyElement'
|
119
|
+
AWS_REDIS.set "tables.#{args['TableName']}.hashkey", v.to_json
|
120
|
+
when 'RangeKeyElement'
|
121
|
+
AWS_REDIS.set "tables.#{args['TableName']}.rangekey", v.to_json
|
122
|
+
end
|
123
|
+
end
|
124
|
+
end
|
125
|
+
|
126
|
+
{
|
127
|
+
TableDescription: {
|
128
|
+
CreationDateTime: (Time.now.to_i * 1000),
|
129
|
+
KeySchema: args['KeySchema'],
|
130
|
+
ProvisionedThroughput: { ReadsPerSecond: args['ProvisionedThroughput']['ReadsPerSecond'],
|
131
|
+
WritesPerSecond: args['ProvisionedThroughput']['WritesPerSecond'] },
|
132
|
+
TableName: args['TableName'],
|
133
|
+
TableStatus: 'CREATING'
|
134
|
+
}
|
135
|
+
}.to_json
|
136
|
+
end
|
137
|
+
|
138
|
+
def update_item(args)
|
139
|
+
halt 500, 'no table name' unless args['TableName']
|
140
|
+
|
141
|
+
record_value = nil
|
142
|
+
record_id = get_record_id(args)
|
143
|
+
|
144
|
+
# No record, probably doing an add
|
145
|
+
if record_id.nil?
|
146
|
+
|
147
|
+
# Figure out the range key
|
148
|
+
rangekey_json = AWS_REDIS.get "tables.#{args['TableName']}.rangekey"
|
149
|
+
rangekey = JSON.parse(rangekey_json) if rangekey_json
|
150
|
+
|
151
|
+
# Add the range key and give a default value of zero
|
152
|
+
attribute_name = rangekey['AttributeName']
|
153
|
+
attribute_type = rangekey['AttributeType']
|
154
|
+
item_key = args['Key'].clone
|
155
|
+
item_key[attribute_name] = { attribute_type.to_s => 0 }
|
156
|
+
put_item('TableName' => args['TableName'], 'Item' => item_key)
|
157
|
+
|
158
|
+
record_id = get_record_id(args)
|
159
|
+
end
|
160
|
+
|
161
|
+
if record_id
|
162
|
+
record_value = JSON.parse(AWS_REDIS.get("tables.#{args['TableName']}.#{record_id}"))
|
163
|
+
args['AttributeUpdates'].each do |key, update|
|
164
|
+
if update['Action'] == 'ADD'
|
165
|
+
if update['Value'].key?('N')
|
166
|
+
increment_amount = update['Value']['N'].to_i
|
167
|
+
|
168
|
+
if record_value.key?(key)
|
169
|
+
halt 500, 'Incorrect type' unless record_value[key].key?('N')
|
170
|
+
record_value[key]['N'] = record_value[key]['N'].to_i + increment_amount
|
171
|
+
else # it's new, so add it
|
172
|
+
record_value[key] = update['Value']
|
173
|
+
end # record_value
|
174
|
+
elsif update['Value'].key?('S')
|
175
|
+
record_value[key] = update['Value']
|
176
|
+
end
|
177
|
+
elsif update['Action'] == 'DELETE'
|
178
|
+
record_value.delete(key)
|
179
|
+
end
|
180
|
+
end
|
181
|
+
|
182
|
+
AWS_REDIS.set "tables.#{args['TableName']}.#{record_id}",
|
183
|
+
record_value.to_json
|
184
|
+
end
|
185
|
+
|
186
|
+
{ Attributes: record_value, ConsumedCapacityUnits: 1 }.to_json
|
187
|
+
end
|
188
|
+
|
189
|
+
def put_item(args)
|
190
|
+
halt 500, 'no table name' unless args['TableName']
|
191
|
+
halt 500, 'no item' unless args['Item']
|
192
|
+
|
193
|
+
hashkey = rangekey = nil
|
194
|
+
hashkey_json = AWS_REDIS.get "tables.#{args['TableName']}.hashkey"
|
195
|
+
rangekey_json = AWS_REDIS.get "tables.#{args['TableName']}.rangekey"
|
196
|
+
|
197
|
+
hashkey = JSON.parse(hashkey_json) if hashkey_json
|
198
|
+
rangekey = JSON.parse(rangekey_json) if rangekey_json
|
199
|
+
|
200
|
+
halt 500 if hashkey.nil?
|
201
|
+
|
202
|
+
if args['Item'][hashkey['AttributeName']].key?('S')
|
203
|
+
hashkey_value = args['Item'][hashkey['AttributeName']]['S']
|
204
|
+
else
|
205
|
+
hashkey_value = BigDecimal(args['Item'][hashkey['AttributeName']]['N'])
|
206
|
+
end
|
207
|
+
|
208
|
+
if args.key?('Expected') && args['Expected'].key?('Name') &&
|
209
|
+
args['Expected']['Name'].key?('Exists') && args['Expected']['Name']['Exists'] == false
|
210
|
+
if AWS_REDIS.hexists "tables.#{args['TableName']}.hashkey_index", hashkey_value
|
211
|
+
halt 400, { '__type' => 'com.amazonaws.dynamodb.v20111205#ConditionalCheckFailedException', :message => 'The conditional request failed' }.to_json
|
212
|
+
end
|
213
|
+
end
|
214
|
+
|
215
|
+
halt 500 unless hashkey_value
|
216
|
+
|
217
|
+
if rangekey
|
218
|
+
rangekey_value = get_rangekey_value(args['Item'][rangekey['AttributeName']])
|
219
|
+
|
220
|
+
if AWS_REDIS.hexists("tables.#{args['TableName']}.hashkey_index.#{hashkey_value}", rangekey_value)
|
221
|
+
record_id = AWS_REDIS.hget "tables.#{args['TableName']}.hashkey_index.#{hashkey_value}", rangekey_value
|
222
|
+
clear_from_secondary_indices(args['TableName'], record_id)
|
223
|
+
else
|
224
|
+
record_id = AWS_REDIS.incr "tables.#{args['TableName']}.auto_incr"
|
225
|
+
AWS_REDIS.hset "tables.#{args['TableName']}.hashkey_index.#{hashkey_value}", rangekey_value, record_id
|
226
|
+
AWS_REDIS.lpush "tables.#{args['TableName']}.items", record_id
|
227
|
+
AWS_REDIS.hset "tables.#{args['TableName']}.hashkey_index", hashkey_value, record_id
|
228
|
+
end
|
229
|
+
else
|
230
|
+
record_id = AWS_REDIS.incr "tables.#{args['TableName']}.auto_incr"
|
231
|
+
AWS_REDIS.lpush "tables.#{args['TableName']}.items", record_id
|
232
|
+
AWS_REDIS.hset "tables.#{args['TableName']}.hashkey_index", hashkey_value, record_id
|
233
|
+
end
|
234
|
+
|
235
|
+
AWS_REDIS.set "tables.#{args['TableName']}.#{record_id}", args['Item'].to_json
|
236
|
+
|
237
|
+
# setup secondary indexes
|
238
|
+
secondary_indexes = AWS_REDIS.smembers "tables.#{args['TableName']}.secondary_indexes"
|
239
|
+
secondary_indexes.each do |raw|
|
240
|
+
lsi = JSON.parse(raw)
|
241
|
+
index_name = lsi['IndexName']
|
242
|
+
hashkey_value = nil
|
243
|
+
rangekey_value = nil
|
244
|
+
|
245
|
+
lsi['KeySchema'].each do |attrs|
|
246
|
+
attr_name = attrs['AttributeName']
|
247
|
+
key_type = attrs['KeyType']
|
248
|
+
|
249
|
+
if key_type == 'HASH'
|
250
|
+
if args['Item'][attrs['AttributeName']].key?('S')
|
251
|
+
hashkey_value = args['Item'][attrs['AttributeName']]['S']
|
252
|
+
else
|
253
|
+
hashkey_value = BigDecimal(args['Item'][attrs['AttributeName']]['N'])
|
254
|
+
end
|
255
|
+
else
|
256
|
+
if args['Item'][attrs['AttributeName']].key?('S')
|
257
|
+
rangekey_value = args['Item'][attrs['AttributeName']]['S']
|
258
|
+
else
|
259
|
+
rangekey_value = BigDecimal(args['Item'][attrs['AttributeName']]['N'])
|
260
|
+
end
|
261
|
+
end
|
262
|
+
end
|
263
|
+
|
264
|
+
# Secondary indexes store sets, not hmaps
|
265
|
+
# H => 1, R => 2, TS => 3
|
266
|
+
# H => 1, R => 3, TS => 3
|
267
|
+
# This means the secondary index on TS should have two records, {H => 1, R => 2} and {H => 1, R => 3}
|
268
|
+
# Storing as a hmap on H, TS would overwrite
|
269
|
+
|
270
|
+
AWS_REDIS.sadd "tables.#{args['TableName']}.secondary_index.#{index_name}.#{hashkey_value}/#{rangekey_value}", record_id
|
271
|
+
end
|
272
|
+
|
273
|
+
{
|
274
|
+
Attributes: args['Item'],
|
275
|
+
WritesUsed: 1
|
276
|
+
}.to_json
|
277
|
+
end
|
278
|
+
|
279
|
+
def get_record_id(args)
|
280
|
+
hashkey_value = nil
|
281
|
+
rangekey_value = nil
|
282
|
+
|
283
|
+
if !args['Key'].key?('HashKeyElement')
|
284
|
+
hashkey_json = AWS_REDIS.get "tables.#{args['TableName']}.hashkey"
|
285
|
+
rangekey_json = AWS_REDIS.get "tables.#{args['TableName']}.rangekey"
|
286
|
+
|
287
|
+
return nil unless hashkey_json
|
288
|
+
|
289
|
+
hashkey = JSON.parse(hashkey_json) if hashkey_json
|
290
|
+
rangekey = JSON.parse(rangekey_json) if rangekey_json
|
291
|
+
|
292
|
+
if args['Key'][hashkey['AttributeName']].key?('S')
|
293
|
+
hashkey_value = args['Key'][hashkey['AttributeName']]['S']
|
294
|
+
else
|
295
|
+
hashkey_value = BigDecimal(args['Key'][hashkey['AttributeName']]['N'])
|
296
|
+
end
|
297
|
+
|
298
|
+
if rangekey
|
299
|
+
if args['Key'][rangekey['AttributeName']].nil?
|
300
|
+
rangekey_value = nil
|
301
|
+
elsif args['Key'][rangekey['AttributeName']].key?('S')
|
302
|
+
rangekey_value = args['Key'][rangekey['AttributeName']]['S']
|
303
|
+
else
|
304
|
+
rangekey_value = BigDecimal(args['Key'][rangekey['AttributeName']]['N'])
|
305
|
+
end
|
306
|
+
end
|
307
|
+
else
|
308
|
+
halt 500 unless args['Key'].key?('HashKeyElement')
|
309
|
+
|
310
|
+
if args['Key']['HashKeyElement'].key?('S')
|
311
|
+
hashkey_value = args['Key']['HashKeyElement']['S']
|
312
|
+
else
|
313
|
+
hashkey_value = BigDecimal(args['Key']['HashKeyElement']['N'])
|
314
|
+
end
|
315
|
+
|
316
|
+
if args['Key'].key?('RangeKeyElement')
|
317
|
+
if args['Key']['RangeKeyElement'].key?('S')
|
318
|
+
rangekey_value = args['Key']['RangeKeyElement']['S']
|
319
|
+
else
|
320
|
+
rangekey_value = BigDecimal(args['Key']['RangeKeyElement']['N'])
|
321
|
+
end
|
322
|
+
end
|
323
|
+
end
|
324
|
+
|
325
|
+
raise 'no hashkey value' unless hashkey_value
|
326
|
+
|
327
|
+
record_id = nil
|
328
|
+
if rangekey_value.nil?
|
329
|
+
record_id = AWS_REDIS.hget("tables.#{args['TableName']}.hashkey_index", hashkey_value)
|
330
|
+
else
|
331
|
+
record_id = AWS_REDIS.hget("tables.#{args['TableName']}.hashkey_index.#{hashkey_value}", rangekey_value)
|
332
|
+
end
|
333
|
+
|
334
|
+
record_id
|
335
|
+
end
|
336
|
+
|
337
|
+
def get_item(args)
|
338
|
+
halt 500, 'no table name' unless args['TableName']
|
339
|
+
halt 500, 'no key' unless args['Key']
|
340
|
+
|
341
|
+
record_id = get_record_id(args)
|
342
|
+
record_value = record_id ? JSON.parse(AWS_REDIS.get("tables.#{args['TableName']}.#{record_id}")) : nil
|
343
|
+
|
344
|
+
record_value ? { Item: record_value, ReadsUsed: 1 }.to_json : {}.to_json
|
345
|
+
end
|
346
|
+
|
347
|
+
def convert_rangekey_value(rangekey_value, rangekey_type)
|
348
|
+
if rangekey_type == 'N'
|
349
|
+
return BigDecimal(rangekey_value)
|
350
|
+
else
|
351
|
+
return rangekey_value
|
352
|
+
end
|
353
|
+
end
|
354
|
+
|
355
|
+
def get_rangekey_value(rangekey)
|
356
|
+
rangekey_value = if rangekey.key?('N')
|
357
|
+
BigDecimal(rangekey['N'])
|
358
|
+
else
|
359
|
+
rangekey['S']
|
360
|
+
end
|
361
|
+
|
362
|
+
rangekey_value
|
363
|
+
end
|
364
|
+
|
365
|
+
def query(args)
|
366
|
+
halt 500, 'no table name' \
|
367
|
+
unless args['TableName']
|
368
|
+
halt 500, 'no hash key value' \
|
369
|
+
unless args['HashKeyValue'] ||
|
370
|
+
args['IndexName'] ||
|
371
|
+
args['KeyConditions']
|
372
|
+
|
373
|
+
key_conditions = args['KeyConditions']
|
374
|
+
limit = args.key?('Limit') ? args['Limit'] : nil
|
375
|
+
scan_index_forward = if args.key?('ScanIndexForward')
|
376
|
+
args['ScanIndexForward']
|
377
|
+
else
|
378
|
+
true
|
379
|
+
end
|
380
|
+
|
381
|
+
# exclusive_start_key = nil
|
382
|
+
last_evaluated_key = nil
|
383
|
+
|
384
|
+
hashkey_obj = \
|
385
|
+
JSON.parse((AWS_REDIS.get "tables.#{args['TableName']}.hashkey"))
|
386
|
+
hashkey_name = hashkey_obj['AttributeName']
|
387
|
+
# hashkey_type = hashkey_obj['AttributeType']
|
388
|
+
|
389
|
+
# V1 api sends HashKeyValue directly
|
390
|
+
if args.key?('HashKeyValue') && args['HashKeyValue'].key?('N')
|
391
|
+
hashkey_value = BigDecimal(args['HashKeyValue']['N'])
|
392
|
+
elsif !key_conditions.nil?
|
393
|
+
# V2 api sends hash key value inside a keyconditions hash
|
394
|
+
hashkey_value = \
|
395
|
+
get_rangekey_value( \
|
396
|
+
key_conditions[hashkey_name]['AttributeValueList'].first
|
397
|
+
)
|
398
|
+
else
|
399
|
+
raise 'Unknown hash key value'
|
400
|
+
end
|
401
|
+
|
402
|
+
rangekey_obj = \
|
403
|
+
JSON.parse(AWS_REDIS.get("tables.#{args['TableName']}.rangekey"))
|
404
|
+
rangekey_name = rangekey_obj['AttributeName']
|
405
|
+
rangekey_type = rangekey_obj['AttributeType']
|
406
|
+
|
407
|
+
exclusive_start_hashkey_value = nil
|
408
|
+
exclusive_start_rangekey_value = nil
|
409
|
+
|
410
|
+
unless args['ExclusiveStartKey'].nil?
|
411
|
+
exclusive_start_hashkey_value = \
|
412
|
+
args['ExclusiveStartKey']['HashKeyElement'].values.last
|
413
|
+
exclusive_start_rangekey_value = \
|
414
|
+
args['ExclusiveStartKey']['RangeKeyElement'].values.last
|
415
|
+
end
|
416
|
+
|
417
|
+
if args.key?('IndexName') # we are doing a new-style query
|
418
|
+
# remove the hash-key from the conditions,
|
419
|
+
# leaving only the key on which we are querying
|
420
|
+
rangekey_name = key_conditions.keys.select { |k| k != hashkey_name }.first
|
421
|
+
rangekey = key_conditions[rangekey_name]
|
422
|
+
|
423
|
+
rangekeys = AWS_REDIS.keys "tables.#{args['TableName']}.secondary_index.#{args['IndexName']}.#{hashkey_value}/*"
|
424
|
+
|
425
|
+
if rangekey['ComparisonOperator'] == 'GE'
|
426
|
+
rangekey_value = get_rangekey_value(rangekey['AttributeValueList'].first)
|
427
|
+
rangekey_type = rangekey['AttributeValueList'].first.keys.first # "N" or "S"
|
428
|
+
|
429
|
+
valid_rangekeys = rangekeys.select do |rk|
|
430
|
+
(convert_rangekey_value(rk.split('/').last, rangekey_type) <=> rangekey_value) >= 0
|
431
|
+
end.sort do |a, b|
|
432
|
+
convert_rangekey_value(a.split('/').last, rangekey_type) <=> convert_rangekey_value(b.split('/').last, rangekey_type)
|
433
|
+
end
|
434
|
+
elsif rangekey['ComparisonOperator'] == 'LE' || rangekey['ComparisonOperator'] == 'LT'
|
435
|
+
rangekey_value = get_rangekey_value(rangekey['AttributeValueList'].first)
|
436
|
+
rangekey_type = rangekey['AttributeValueList'].first.keys.first # "N" or "S"
|
437
|
+
|
438
|
+
valid_rangekeys = rangekeys.select do |rk|
|
439
|
+
if rangekey['ComparisonOperator'] == 'LE'
|
440
|
+
(convert_rangekey_value(rk.split('/').last, rangekey_type) <=> rangekey_value) <= 0
|
441
|
+
else
|
442
|
+
(convert_rangekey_value(rk.split('/').last, rangekey_type) <=> rangekey_value) < 0
|
443
|
+
end
|
444
|
+
end.sort do |a, b|
|
445
|
+
convert_rangekey_value(a.split('/').last, rangekey_type) <=> convert_rangekey_value(b.split('/').last, rangekey_type)
|
446
|
+
end
|
447
|
+
end
|
448
|
+
|
449
|
+
if !valid_rangekeys.empty?
|
450
|
+
record_ids = []
|
451
|
+
valid_rangekeys.each do |rk|
|
452
|
+
record_ids += AWS_REDIS.smembers(rk)
|
453
|
+
end
|
454
|
+
record_keys = record_ids.map { |record_id| "tables.#{args['TableName']}.#{record_id}" }
|
455
|
+
items = !record_keys.empty? ? (AWS_REDIS.mget *record_keys).map { |i| JSON.parse(i) } : []
|
456
|
+
else
|
457
|
+
items = []
|
458
|
+
end
|
459
|
+
|
460
|
+
elsif (args.key?('KeyConditions') && args['KeyConditions'][rangekey_name]) ||
|
461
|
+
args.key?('RangeKeyCondition')
|
462
|
+
|
463
|
+
if args['KeyConditions']
|
464
|
+
# New API v2: comes in as { table_name: {KeyCondtions}}
|
465
|
+
key_conditions = args['KeyConditions']
|
466
|
+
elsif args['RangeKeyCondition']
|
467
|
+
# Old API v1: comes in as { KeyConditions }
|
468
|
+
# So lets map it to the API v2
|
469
|
+
key_conditions = {}
|
470
|
+
key_conditions[rangekey_name] = args['RangeKeyCondition']
|
471
|
+
end
|
472
|
+
|
473
|
+
rangekeys = AWS_REDIS.hkeys "tables.#{args['TableName']}.hashkey_index.#{hashkey_value}"
|
474
|
+
|
475
|
+
rangekey_value = \
|
476
|
+
get_rangekey_value( \
|
477
|
+
key_conditions[rangekey_name]['AttributeValueList'].first
|
478
|
+
)
|
479
|
+
last_rangekey_value = \
|
480
|
+
get_rangekey_value( \
|
481
|
+
key_conditions[rangekey_name]['AttributeValueList'].last
|
482
|
+
)
|
483
|
+
|
484
|
+
case key_conditions[rangekey_name]['ComparisonOperator']
|
485
|
+
when 'LT'
|
486
|
+
comparator = lambda do |rk|
|
487
|
+
(rk <=> rangekey_value) == -1
|
488
|
+
end
|
489
|
+
when 'GT'
|
490
|
+
comparator = lambda do |rk|
|
491
|
+
(rk <=> rangekey_value) == +1
|
492
|
+
end
|
493
|
+
when 'GE'
|
494
|
+
comparator = lambda do |rk|
|
495
|
+
(rk <=> rangekey_value) >= 0
|
496
|
+
end
|
497
|
+
when 'LE'
|
498
|
+
comparator = lambda do |rk|
|
499
|
+
(rk <=> rangekey_value) <= 0
|
500
|
+
end
|
501
|
+
when 'EQ'
|
502
|
+
comparator = lambda do |rk|
|
503
|
+
(rk <=> rangekey_value).zero?
|
504
|
+
end
|
505
|
+
when 'BETWEEN'
|
506
|
+
comparator = lambda do |rk|
|
507
|
+
(rk <=> rangekey_value) >= 0 && (rk <=> last_rangekey_value) <= 0
|
508
|
+
end
|
509
|
+
end
|
510
|
+
|
511
|
+
valid_rangekeys = \
|
512
|
+
rangekeys.map do |rk|
|
513
|
+
convert_rangekey_value(rk, rangekey_type)
|
514
|
+
end.select(&comparator).sort
|
515
|
+
|
516
|
+
record_ids = []
|
517
|
+
|
518
|
+
unless valid_rangekeys.empty?
|
519
|
+
record_ids = \
|
520
|
+
AWS_REDIS.hmget("tables.#{args['TableName']}.hashkey_index." \
|
521
|
+
"#{hashkey_value}", *valid_rangekeys).map do |rid|
|
522
|
+
"tables.#{args['TableName']}.#{rid}"
|
523
|
+
end
|
524
|
+
end
|
525
|
+
|
526
|
+
items = []
|
527
|
+
unless record_ids.empty?
|
528
|
+
items = (AWS_REDIS.mget record_ids).map do |i|
|
529
|
+
JSON.parse(i)
|
530
|
+
end
|
531
|
+
end
|
532
|
+
else
|
533
|
+
record_ids = AWS_REDIS.hvals("tables.#{args['TableName']}." \
|
534
|
+
"hashkey_index.#{hashkey_value}")
|
535
|
+
keys = record_ids.map do |item|
|
536
|
+
"tables.#{args['TableName']}.#{item}"
|
537
|
+
end
|
538
|
+
items = !keys.empty? ? AWS_REDIS.mget(*keys).map { |i| JSON.parse(i) } : []
|
539
|
+
end
|
540
|
+
|
541
|
+
if exclusive_start_hashkey_value && exclusive_start_rangekey_value
|
542
|
+
|
543
|
+
# So we move through it correctly depending on asc or desc
|
544
|
+
items.reverse! if scan_index_forward
|
545
|
+
|
546
|
+
idx = 0
|
547
|
+
items.each do |item|
|
548
|
+
idx += 1
|
549
|
+
hashkey_value_dict = item[hashkey_name]
|
550
|
+
rangekey_value_dict = item[rangekey_name]
|
551
|
+
hashkey_value_type = hashkey_value_dict.keys.first
|
552
|
+
rangekey_value_type = rangekey_value_dict.keys.first
|
553
|
+
|
554
|
+
hashkey_value = hashkey_value_dict.values.first
|
555
|
+
rangekey_value = rangekey_value_dict.values.first
|
556
|
+
|
557
|
+
if exclusive_start_hashkey_value == hashkey_value &&
|
558
|
+
exclusive_start_rangekey_value == rangekey_value
|
559
|
+
break
|
560
|
+
end
|
561
|
+
end
|
562
|
+
|
563
|
+
items = items[idx..-1]
|
564
|
+
end
|
565
|
+
|
566
|
+
if scan_index_forward && rangekey_name
|
567
|
+
items.sort! do |a, b|
|
568
|
+
get_rangekey_value(a[rangekey_name]) <=> get_rangekey_value(b[rangekey_name])
|
569
|
+
end
|
570
|
+
elsif !scan_index_forward && rangekey_name
|
571
|
+
items.sort! do |a, b|
|
572
|
+
get_rangekey_value(b[rangekey_name]) <=> get_rangekey_value(a[rangekey_name])
|
573
|
+
end
|
574
|
+
end
|
575
|
+
|
576
|
+
if items && items.count > 0 && limit && limit < items.count
|
577
|
+
hashkey_value_dict = items[limit][hashkey_name]
|
578
|
+
rangekey_value_dict = items[limit][rangekey_name]
|
579
|
+
hashkey_value_type = hashkey_value_dict.keys.first
|
580
|
+
rangekey_value_type = rangekey_value_dict.keys.first
|
581
|
+
|
582
|
+
hashkey_value = hashkey_value_dict.values.first
|
583
|
+
rangekey_value = rangekey_value_dict.values.first
|
584
|
+
|
585
|
+
last_evaluated_key = {
|
586
|
+
HashKeyElement: { hashkey_value_type => hashkey_value },
|
587
|
+
RangeKeyElement: { rangekey_value_type => rangekey_value }
|
588
|
+
}
|
589
|
+
items = items[0...limit] # apply limit
|
590
|
+
end
|
591
|
+
|
592
|
+
result = { Count: items.length, Items: items, ReadsUsed: 1 }
|
593
|
+
|
594
|
+
# This should not be the last key returned, but instead the next key you would
|
595
|
+
# have returned but didn't.
|
596
|
+
|
597
|
+
result[:LastEvaluatedKey] = last_evaluated_key if last_evaluated_key
|
598
|
+
|
599
|
+
result.to_json
|
600
|
+
end
|
601
|
+
|
602
|
+
def delete_item(args)
|
603
|
+
halt 500, 'no table name' unless args['TableName']
|
604
|
+
halt 500, 'no key' unless args['Key']
|
605
|
+
|
606
|
+
if args['Key'].key?('HashKeyElement')
|
607
|
+
if args['Key']['HashKeyElement'].key?('N')
|
608
|
+
hashkey_value = BigDecimal(args['Key']['HashKeyElement']['N'])
|
609
|
+
else
|
610
|
+
hashkey_value = args['Key']['HashKeyElement']['S']
|
611
|
+
end
|
612
|
+
else
|
613
|
+
hashkey_raw = AWS_REDIS.get "tables.#{args['TableName']}.hashkey"
|
614
|
+
hashkey = JSON.parse(hashkey_raw)
|
615
|
+
if hashkey['AttributeType'] == 'N'
|
616
|
+
hashkey_value = BigDecimal(args['Key'][hashkey['AttributeName']][hashkey['AttributeType']])
|
617
|
+
else
|
618
|
+
hashkey_value = args['Key'][hashkey['AttributeName']][hashkey['AttributeType']]
|
619
|
+
end
|
620
|
+
end
|
621
|
+
|
622
|
+
rangekey_value = nil
|
623
|
+
if args['Key'].key?('RangeKeyElement')
|
624
|
+
rangekey_value = get_rangekey_value(args['Key']['RangeKeyElement'])
|
625
|
+
else
|
626
|
+
rangekey_raw = AWS_REDIS.get "tables.#{args['TableName']}.rangekey"
|
627
|
+
if rangekey_raw
|
628
|
+
rangekey = JSON.parse(rangekey_raw)
|
629
|
+
if rangekey['AttributeType'] == 'N'
|
630
|
+
rangekey_value = BigDecimal(args['Key'][rangekey['AttributeName']][rangekey['AttributeType']])
|
631
|
+
else
|
632
|
+
rangekey_value = args['Key'][rangekey['AttributeName']][rangekey['AttributeType']]
|
633
|
+
end
|
634
|
+
end
|
635
|
+
end
|
636
|
+
|
637
|
+
if hashkey_value && rangekey_value
|
638
|
+
record_id = AWS_REDIS.hget "tables.#{args['TableName']}.hashkey_index.#{hashkey_value}", rangekey_value
|
639
|
+
else
|
640
|
+
record_id = AWS_REDIS.hget "tables.#{args['TableName']}.hashkey_index", hashkey_value
|
641
|
+
end
|
642
|
+
|
643
|
+
item = nil
|
644
|
+
if record_id
|
645
|
+
AWS_REDIS.hdel "tables.#{args['TableName']}.hashkey_index", hashkey_value
|
646
|
+
if rangekey_value
|
647
|
+
AWS_REDIS.hdel "tables.#{args['TableName']}.hashkey_index.#{hashkey_value}", rangekey_value
|
648
|
+
end
|
649
|
+
item = JSON.parse(AWS_REDIS.get("tables.#{args['TableName']}.#{record_id}"))
|
650
|
+
AWS_REDIS.del "tables.#{args['TableName']}.#{record_id}"
|
651
|
+
|
652
|
+
clear_from_secondary_indices(args['TableName'], record_id)
|
653
|
+
end
|
654
|
+
|
655
|
+
{ 'Item' => item, 'ReadsUsed' => 1 }.to_json
|
656
|
+
end
|
657
|
+
|
658
|
+
def batch_write_item(args)
|
659
|
+
items = []
|
660
|
+
responses = {}
|
661
|
+
|
662
|
+
args['RequestItems'].each do |k, v|
|
663
|
+
table_name = k
|
664
|
+
requests = v
|
665
|
+
requests.each do |request|
|
666
|
+
request.each do |k2, v2|
|
667
|
+
case k2
|
668
|
+
when 'DeleteRequest'
|
669
|
+
delete_item('TableName' => table_name, 'Key' => v2['Key'])
|
670
|
+
responses[table_name] = { 'ConsumedCapacityUnits' => 1 }
|
671
|
+
end
|
672
|
+
end
|
673
|
+
end
|
674
|
+
end
|
675
|
+
|
676
|
+
{ Responses: responses, UnprocessedItems: [] }.to_json
|
677
|
+
end
|
678
|
+
end
|
679
|
+
|
680
|
+
post %r{/dynamodb\.([\w-]+?)\.amazonaws\.com/?} do
|
681
|
+
req = Rack::Request.new(env)
|
682
|
+
|
683
|
+
amz_target = nil
|
684
|
+
|
685
|
+
%w(HTTP_X_AMZ_TARGET x-amz-target X-Amz-Target).each do |key|
|
686
|
+
next unless env.key?(key)
|
687
|
+
amz_target = env[key].split('.').last
|
688
|
+
break
|
689
|
+
end
|
690
|
+
|
691
|
+
args = if env['REQUEST_METHOD'] == 'POST'
|
692
|
+
JSON.parse(env['rack.input'].read)
|
693
|
+
else
|
694
|
+
env['rack.request.form_hash']
|
695
|
+
end
|
696
|
+
|
697
|
+
content_type 'application/x-amz-json-1.0'
|
698
|
+
case amz_target
|
699
|
+
when 'CreateTable'
|
700
|
+
return create_table(args)
|
701
|
+
when 'DeleteTable'
|
702
|
+
return delete_table(args)
|
703
|
+
when 'DescribeTable'
|
704
|
+
return describe_table(args)
|
705
|
+
when 'PutItem'
|
706
|
+
return put_item(args)
|
707
|
+
when 'GetItem'
|
708
|
+
return get_item(args)
|
709
|
+
when 'DeleteItem'
|
710
|
+
return delete_item(args)
|
711
|
+
when 'UpdateItem'
|
712
|
+
return update_item(args)
|
713
|
+
when 'Query'
|
714
|
+
return query(args)
|
715
|
+
when 'ListTables'
|
716
|
+
return list_tables(args)
|
717
|
+
when 'BatchWriteItem'
|
718
|
+
return batch_write_item(args)
|
719
|
+
else
|
720
|
+
halt 500, "unknown command #{req.inspect}"
|
721
|
+
end
|
722
|
+
end
|