aws_test_dump 0.1.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/.dockerignore +20 -0
- data/.editorconfig +19 -0
- data/.gitignore +21 -0
- data/.rspec +2 -0
- data/.travis.yml +5 -0
- data/CODE_OF_CONDUCT.md +74 -0
- data/Dockerfile +11 -0
- data/Gemfile +6 -0
- data/LICENSE.txt +21 -0
- data/README.md +90 -0
- data/Rakefile +6 -0
- data/aws_test_dump.gemspec +27 -0
- data/bin/aws_test_dump +24 -0
- data/bin/console +14 -0
- data/bin/setup +8 -0
- data/docker-compose.yml +40 -0
- data/lib/aws_test_dump.rb +392 -0
- data/lib/aws_test_dump/version.rb +3 -0
- metadata +133 -0
checksums.yaml
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
---
|
2
|
+
SHA1:
|
3
|
+
metadata.gz: 87d2bbc3615e526da41dc76ac1a04e19e7450a4d
|
4
|
+
data.tar.gz: b7a72b2dbbb9008ebb010a988fb54e0698e384af
|
5
|
+
SHA512:
|
6
|
+
metadata.gz: 656e91bd7ed8052557b1fdbbffe8e7d5cfb42b8f6939cb6e9277310c92f9f51dff6e2e1d6be7d08ba2c025c8a82a04e3cc0eff60c656c6e8afe3381c05418d62
|
7
|
+
data.tar.gz: a5f0c31fabe01848ae225ff493c5a57a62c6d598ef45e722494841e4afedae87d29b411e5fe03325df709826bd853db4291c2c5132a8b148395084ccef64ca50
|
data/.dockerignore
ADDED
data/.editorconfig
ADDED
@@ -0,0 +1,19 @@
|
|
1
|
+
root = true
|
2
|
+
|
3
|
+
[*]
|
4
|
+
trim_trailing_whitespace = true
|
5
|
+
insert_final_newline = true
|
6
|
+
|
7
|
+
[*.sh]
|
8
|
+
indent_style = space
|
9
|
+
indent_size = 2
|
10
|
+
|
11
|
+
# 2 spaces for ruby https://github.com/bbatsov/ruby-style-guide
|
12
|
+
[*.rb]
|
13
|
+
indent_style = space
|
14
|
+
indent_size = 2
|
15
|
+
|
16
|
+
# 2 spaces
|
17
|
+
[*.{js,coffee,yml,html,css,sass,scss,yaml}]
|
18
|
+
indent_style = space
|
19
|
+
indent_size = 2
|
data/.gitignore
ADDED
@@ -0,0 +1,21 @@
|
|
1
|
+
spec/tmp/
|
2
|
+
/.bundle/
|
3
|
+
/.yardoc
|
4
|
+
/Gemfile.lock
|
5
|
+
/_yardoc/
|
6
|
+
/coverage/
|
7
|
+
/doc/
|
8
|
+
/pkg/
|
9
|
+
/spec/reports/
|
10
|
+
/tmp/
|
11
|
+
.idea/**
|
12
|
+
.DS_Store
|
13
|
+
*.swp
|
14
|
+
tags/
|
15
|
+
tags
|
16
|
+
coverage/
|
17
|
+
stats/
|
18
|
+
|
19
|
+
# rspec failure tracking
|
20
|
+
.rspec_status
|
21
|
+
|
data/.rspec
ADDED
data/.travis.yml
ADDED
data/CODE_OF_CONDUCT.md
ADDED
@@ -0,0 +1,74 @@
|
|
1
|
+
# Contributor Covenant Code of Conduct
|
2
|
+
|
3
|
+
## Our Pledge
|
4
|
+
|
5
|
+
In the interest of fostering an open and welcoming environment, we as
|
6
|
+
contributors and maintainers pledge to making participation in our project and
|
7
|
+
our community a harassment-free experience for everyone, regardless of age, body
|
8
|
+
size, disability, ethnicity, gender identity and expression, level of experience,
|
9
|
+
nationality, personal appearance, race, religion, or sexual identity and
|
10
|
+
orientation.
|
11
|
+
|
12
|
+
## Our Standards
|
13
|
+
|
14
|
+
Examples of behavior that contributes to creating a positive environment
|
15
|
+
include:
|
16
|
+
|
17
|
+
* Using welcoming and inclusive language
|
18
|
+
* Being respectful of differing viewpoints and experiences
|
19
|
+
* Gracefully accepting constructive criticism
|
20
|
+
* Focusing on what is best for the community
|
21
|
+
* Showing empathy towards other community members
|
22
|
+
|
23
|
+
Examples of unacceptable behavior by participants include:
|
24
|
+
|
25
|
+
* The use of sexualized language or imagery and unwelcome sexual attention or
|
26
|
+
advances
|
27
|
+
* Trolling, insulting/derogatory comments, and personal or political attacks
|
28
|
+
* Public or private harassment
|
29
|
+
* Publishing others' private information, such as a physical or electronic
|
30
|
+
address, without explicit permission
|
31
|
+
* Other conduct which could reasonably be considered inappropriate in a
|
32
|
+
professional setting
|
33
|
+
|
34
|
+
## Our Responsibilities
|
35
|
+
|
36
|
+
Project maintainers are responsible for clarifying the standards of acceptable
|
37
|
+
behavior and are expected to take appropriate and fair corrective action in
|
38
|
+
response to any instances of unacceptable behavior.
|
39
|
+
|
40
|
+
Project maintainers have the right and responsibility to remove, edit, or
|
41
|
+
reject comments, commits, code, wiki edits, issues, and other contributions
|
42
|
+
that are not aligned to this Code of Conduct, or to ban temporarily or
|
43
|
+
permanently any contributor for other behaviors that they deem inappropriate,
|
44
|
+
threatening, offensive, or harmful.
|
45
|
+
|
46
|
+
## Scope
|
47
|
+
|
48
|
+
This Code of Conduct applies both within project spaces and in public spaces
|
49
|
+
when an individual is representing the project or its community. Examples of
|
50
|
+
representing a project or community include using an official project e-mail
|
51
|
+
address, posting via an official social media account, or acting as an appointed
|
52
|
+
representative at an online or offline event. Representation of a project may be
|
53
|
+
further defined and clarified by project maintainers.
|
54
|
+
|
55
|
+
## Enforcement
|
56
|
+
|
57
|
+
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
58
|
+
reported by contacting the project team at TODO: Write your email address. All
|
59
|
+
complaints will be reviewed and investigated and will result in a response that
|
60
|
+
is deemed necessary and appropriate to the circumstances. The project team is
|
61
|
+
obligated to maintain confidentiality with regard to the reporter of an incident.
|
62
|
+
Further details of specific enforcement policies may be posted separately.
|
63
|
+
|
64
|
+
Project maintainers who do not follow or enforce the Code of Conduct in good
|
65
|
+
faith may face temporary or permanent repercussions as determined by other
|
66
|
+
members of the project's leadership.
|
67
|
+
|
68
|
+
## Attribution
|
69
|
+
|
70
|
+
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
|
71
|
+
available at [http://contributor-covenant.org/version/1/4][version]
|
72
|
+
|
73
|
+
[homepage]: http://contributor-covenant.org
|
74
|
+
[version]: http://contributor-covenant.org/version/1/4/
|
data/Dockerfile
ADDED
data/Gemfile
ADDED
data/LICENSE.txt
ADDED
@@ -0,0 +1,21 @@
|
|
1
|
+
The MIT License (MIT)
|
2
|
+
|
3
|
+
Copyright (c) 2017 TODO: Write your name
|
4
|
+
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
7
|
+
in the Software without restriction, including without limitation the rights
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
10
|
+
furnished to do so, subject to the following conditions:
|
11
|
+
|
12
|
+
The above copyright notice and this permission notice shall be included in
|
13
|
+
all copies or substantial portions of the Software.
|
14
|
+
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
21
|
+
THE SOFTWARE.
|
data/README.md
ADDED
@@ -0,0 +1,90 @@
|
|
1
|
+
# AwsTestDump
|
2
|
+
|
3
|
+
## Installation
|
4
|
+
|
5
|
+
Add this line to your application's Gemfile:
|
6
|
+
|
7
|
+
```ruby
|
8
|
+
gem 'aws_test_dump'
|
9
|
+
```
|
10
|
+
|
11
|
+
And then execute:
|
12
|
+
|
13
|
+
$ bundle
|
14
|
+
|
15
|
+
Or install it yourself as:
|
16
|
+
|
17
|
+
$ gem install aws_test_dump
|
18
|
+
|
19
|
+
## Usage
|
20
|
+
|
21
|
+
### generating test data
|
22
|
+
|
23
|
+
#### dynamo data
|
24
|
+
|
25
|
+
The dynamo schema and test data is generated and restored using the aws_test_dump script.
|
26
|
+
|
27
|
+
To generate the schema, with real AWS credentials, run:
|
28
|
+
|
29
|
+
`aws_test_dump schema_dump`
|
30
|
+
|
31
|
+
To generate the test data, make sure the appropriate environment variables are set and an appropriate entry is entered in the spec/test_data_dump_definition.rb file.
|
32
|
+
|
33
|
+
An example entry:
|
34
|
+
```ruby
|
35
|
+
{
|
36
|
+
table_name: 'relation-customer-toe-analysis',
|
37
|
+
key_conditions: {
|
38
|
+
'customer_id' => {
|
39
|
+
:attribute_value_list => [ENV['CUSTOMER_ID']],
|
40
|
+
:comparison_operator => 'EQ'
|
41
|
+
},
|
42
|
+
},
|
43
|
+
replace_these: {
|
44
|
+
'customer_id' => ENV['CUSTOMER_ID'],
|
45
|
+
},
|
46
|
+
replace_first: {
|
47
|
+
'toe_id' => ENV['TOE_ID'],
|
48
|
+
'analysis_id' => ENV['ANALYSIS_ID'],
|
49
|
+
},
|
50
|
+
},
|
51
|
+
```
|
52
|
+
To create data dump files for each entry in the DATA_DUMP_DEFINITIONS, run:
|
53
|
+
|
54
|
+
`aws_test_dump data_dump`
|
55
|
+
|
56
|
+
and to create a dump of a specific table:
|
57
|
+
|
58
|
+
`aws_test_dump data_dump staging-export-analyses`
|
59
|
+
|
60
|
+
|
61
|
+
#### s3 data
|
62
|
+
|
63
|
+
To dump test s3 files, run the following in a racksh session with production aws credentials
|
64
|
+
|
65
|
+
```ruby
|
66
|
+
require_relative 'aws_test_dump'
|
67
|
+
|
68
|
+
bucket_name = 'some_bucket_name'
|
69
|
+
key_name = 'some_s3_file.json'
|
70
|
+
s3_dump = AwsTestDump::S3FileDump.new(bucket_name, key_name)
|
71
|
+
s3_dump.run
|
72
|
+
```
|
73
|
+
|
74
|
+
make sure any new buckets are added to the links mapping in the compose file so they are hitting the fakes3 service.
|
75
|
+
|
76
|
+
## Development
|
77
|
+
|
78
|
+
After checking out the repo, run `bin/setup` to install dependencies. Then, run `rake spec` to run the tests. You can also run `bin/console` for an interactive prompt that will allow you to experiment.
|
79
|
+
|
80
|
+
To install this gem onto your local machine, run `bundle exec rake install`. To release a new version, update the version number in `version.rb`, and then run `bundle exec rake release`, which will create a git tag for the version, push git commits and tags, and push the `.gem` file to [rubygems.org](https://rubygems.org).
|
81
|
+
|
82
|
+
## Contributing
|
83
|
+
|
84
|
+
Bug reports and pull requests are welcome on GitHub at https://github.com/DanHanson82/ruby-aws-test-dump. This project is intended to be a safe, welcoming space for collaboration, and contributors are expected to adhere to the [Contributor Covenant](http://contributor-covenant.org) code of conduct.
|
85
|
+
|
86
|
+
|
87
|
+
## License
|
88
|
+
|
89
|
+
The gem is available as open source under the terms of the [MIT License](http://opensource.org/licenses/MIT).
|
90
|
+
|
data/Rakefile
ADDED
@@ -0,0 +1,27 @@
|
|
1
|
+
# coding: utf-8
|
2
|
+
lib = File.expand_path('../lib', __FILE__)
|
3
|
+
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
|
4
|
+
require 'aws_test_dump/version'
|
5
|
+
|
6
|
+
Gem::Specification.new do |spec|
|
7
|
+
spec.name = "aws_test_dump"
|
8
|
+
spec.version = AwsTestDump::VERSION
|
9
|
+
spec.authors = ["Daniel Hanson"]
|
10
|
+
spec.email = ["daniel.hanson82@gmail.com"]
|
11
|
+
|
12
|
+
spec.summary = %q{simple script for dumping and restoring aws test data for local testing}
|
13
|
+
spec.homepage = "https://github.com/DanHanson82/ruby-aws-test-dump"
|
14
|
+
spec.license = "MIT"
|
15
|
+
|
16
|
+
spec.files = `git ls-files -z`.split("\x0").reject do |f|
|
17
|
+
f.match(%r{^(test|spec|features)/})
|
18
|
+
end
|
19
|
+
spec.executables = ["aws_test_dump"]
|
20
|
+
spec.require_paths = ["lib"]
|
21
|
+
|
22
|
+
spec.add_runtime_dependency "aws-sdk", "~> 2"
|
23
|
+
spec.add_development_dependency "bundler", "~> 1.14"
|
24
|
+
spec.add_development_dependency "rake", "~> 10.0"
|
25
|
+
spec.add_development_dependency "rspec", "~> 3.0"
|
26
|
+
spec.add_development_dependency "simplecov", "~> 3.0"
|
27
|
+
end
|
data/bin/aws_test_dump
ADDED
@@ -0,0 +1,24 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
|
3
|
+
require 'optparse'
|
4
|
+
require 'aws_test_dump'
|
5
|
+
|
6
|
+
|
7
|
+
action = ARGV[0]
|
8
|
+
table_name = ARGV[1]
|
9
|
+
dump_file = nil
|
10
|
+
|
11
|
+
ARGV.options do |opts|
|
12
|
+
opts.on("-o", "--out=val", String) { |val| dump_file = val }
|
13
|
+
opts.parse!
|
14
|
+
end
|
15
|
+
|
16
|
+
|
17
|
+
if action == 'schema_dump'
|
18
|
+
processor = AwsTestDump::DynamoSchemaDump.new dump_file
|
19
|
+
elsif action == 'data_dump'
|
20
|
+
processor = AwsTestDump::DynamoDataDump.new table_name
|
21
|
+
else
|
22
|
+
raise NotValidOptionError
|
23
|
+
end
|
24
|
+
processor.run
|
data/bin/console
ADDED
@@ -0,0 +1,14 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
|
3
|
+
require "bundler/setup"
|
4
|
+
require "aws_test_dump"
|
5
|
+
|
6
|
+
# You can add fixtures and/or initialization code here to make experimenting
|
7
|
+
# with your gem easier. You can also use a different console, if you like.
|
8
|
+
|
9
|
+
# (If you use this, don't forget to add pry to your Gemfile!)
|
10
|
+
# require "pry"
|
11
|
+
# Pry.start
|
12
|
+
|
13
|
+
require "irb"
|
14
|
+
IRB.start(__FILE__)
|
data/bin/setup
ADDED
data/docker-compose.yml
ADDED
@@ -0,0 +1,40 @@
|
|
1
|
+
version: "3"
|
2
|
+
services:
|
3
|
+
test:
|
4
|
+
build: .
|
5
|
+
command: rspec
|
6
|
+
volumes:
|
7
|
+
- .:/usr/src/app
|
8
|
+
depends_on:
|
9
|
+
- dynamo
|
10
|
+
- fakes3
|
11
|
+
links:
|
12
|
+
- fakes3
|
13
|
+
environment:
|
14
|
+
- DYNAMO_ENDPOINT=http://dynamo:5000
|
15
|
+
- FAKES3_ENDPOINT=http://fakes3:4000
|
16
|
+
- AWS_ACCESS_KEY_ID=potato
|
17
|
+
- AWS_SECRET_ACCESS_KEY=chorizo
|
18
|
+
- AWS_REGION=us-west-2
|
19
|
+
networks:
|
20
|
+
- fake-aws
|
21
|
+
|
22
|
+
dynamo:
|
23
|
+
image: fingershock/dynamodb-local
|
24
|
+
command: -inMemory -port 5000
|
25
|
+
ports:
|
26
|
+
- "5000:5000"
|
27
|
+
networks:
|
28
|
+
- fake-aws
|
29
|
+
|
30
|
+
fakes3:
|
31
|
+
image: jlynn/moto_server
|
32
|
+
command: s3 -p4000
|
33
|
+
ports:
|
34
|
+
- "4000:4000"
|
35
|
+
networks:
|
36
|
+
- fake-aws
|
37
|
+
|
38
|
+
networks:
|
39
|
+
fake-aws:
|
40
|
+
driver: bridge
|
@@ -0,0 +1,392 @@
|
|
1
|
+
require 'aws_test_dump/version'
|
2
|
+
|
3
|
+
|
4
|
+
class NotValidOptionError < StandardError
|
5
|
+
end
|
6
|
+
|
7
|
+
|
8
|
+
def keep_keys(key_names, some_hash)
|
9
|
+
some_hash.delete_if {|k, v| !key_names.include? k}
|
10
|
+
some_hash.each_pair do |k,v|
|
11
|
+
if v.is_a?(Hash)
|
12
|
+
keep_keys(key_names, v)
|
13
|
+
elsif v.is_a?(Array)
|
14
|
+
v.each { |x| keep_keys(key_names, x) if x.is_a?(Hash)}
|
15
|
+
end
|
16
|
+
end
|
17
|
+
some_hash
|
18
|
+
end
|
19
|
+
|
20
|
+
|
21
|
+
module AwsTestDump
|
22
|
+
require 'aws-sdk-core'
|
23
|
+
require 'fileutils'
|
24
|
+
require 'yaml'
|
25
|
+
|
26
|
+
DATA_DUMP_DEFINITION = ENV['DATA_DUMP_DEFINITION'] || File.join(Dir.pwd, 'spec', 'test_data_dump_definition.rb')
|
27
|
+
begin
|
28
|
+
require_relative DATA_DUMP_DEFINITION
|
29
|
+
rescue LoadError
|
30
|
+
DATA_DUMP_DEFINITIONS = []
|
31
|
+
end
|
32
|
+
|
33
|
+
Aws.config[:region] = ENV['AWS_REGION']
|
34
|
+
|
35
|
+
DEFAULT_DUMP_FILE = File.join(Dir.pwd, 'spec', 'dynamo_schema_dump.yml')
|
36
|
+
DEFAULT_DATA_DUMP_DIR = File.join(Dir.pwd, 'spec', 'dynamo_data_dumps')
|
37
|
+
DEFAULT_S3_DUMP_DIR = File.join(Dir.pwd, 'spec', 's3_test_files')
|
38
|
+
DYNAMO_TABLE_FIELDS = %i(
|
39
|
+
local_secondary_indexes
|
40
|
+
global_secondary_indexes
|
41
|
+
|
42
|
+
index_name
|
43
|
+
projection
|
44
|
+
projection_type
|
45
|
+
non_key_attributes
|
46
|
+
|
47
|
+
attribute_definitions
|
48
|
+
key_schema
|
49
|
+
provisioned_throughput
|
50
|
+
|
51
|
+
attribute_name
|
52
|
+
attribute_type
|
53
|
+
key_type
|
54
|
+
|
55
|
+
table_name
|
56
|
+
read_capacity_units
|
57
|
+
write_capacity_units
|
58
|
+
)
|
59
|
+
|
60
|
+
class BaseProcessor
|
61
|
+
def run
|
62
|
+
raise NotImplementedError
|
63
|
+
end
|
64
|
+
end
|
65
|
+
|
66
|
+
class BaseDynamoProcessor < BaseProcessor
|
67
|
+
attr_accessor :dump_file
|
68
|
+
|
69
|
+
def initialize(dump_file=nil)
|
70
|
+
dynamo_args = {}
|
71
|
+
dynamo_args[:endpoint] = ENV['DYNAMO_ENDPOINT'] if ENV['DYNAMO_ENDPOINT']
|
72
|
+
@dynamo_client = Aws::DynamoDB::Client.new(**dynamo_args)
|
73
|
+
@dump_file = dump_file
|
74
|
+
@dump_file ||= DEFAULT_DUMP_FILE
|
75
|
+
end
|
76
|
+
|
77
|
+
end
|
78
|
+
|
79
|
+
class S3BaseProcessor < BaseProcessor
|
80
|
+
attr_accessor :bucket_name, :key_name
|
81
|
+
|
82
|
+
def initialize(bucket_name, key_name)
|
83
|
+
s3_args = Hash.new
|
84
|
+
s3_args[:endpoint] = ENV['FAKES3_ENDPOINT'] if ENV['FAKES3_ENDPOINT']
|
85
|
+
@s3_client = Aws::S3::Client.new(**s3_args)
|
86
|
+
@bucket_name = bucket_name
|
87
|
+
@key_name = key_name
|
88
|
+
@dump_file = File.join(DEFAULT_S3_DUMP_DIR, bucket_name, key_name)
|
89
|
+
@file_contents = nil
|
90
|
+
end
|
91
|
+
end
|
92
|
+
|
93
|
+
class S3FileDump < S3BaseProcessor
|
94
|
+
def run
|
95
|
+
dump_data
|
96
|
+
end
|
97
|
+
|
98
|
+
def file_contents
|
99
|
+
if @file_contents.nil?
|
100
|
+
response = @s3_client.get_object(
|
101
|
+
bucket: @bucket_name, key: @key_name
|
102
|
+
).body.read
|
103
|
+
@file_contents = @key_name.end_with?('.json') ? JSON.pretty_generate(JSON.parse(response)) : response
|
104
|
+
end
|
105
|
+
@file_contents
|
106
|
+
end
|
107
|
+
|
108
|
+
def dump_data
|
109
|
+
dirname = File.dirname(@dump_file)
|
110
|
+
unless File.directory?(dirname)
|
111
|
+
FileUtils.mkdir_p(dirname)
|
112
|
+
end
|
113
|
+
File.open(@dump_file, 'w') { |file| file.write file_contents }
|
114
|
+
end
|
115
|
+
end
|
116
|
+
|
117
|
+
class S3FileRestore < S3BaseProcessor
|
118
|
+
def run
|
119
|
+
restore
|
120
|
+
end
|
121
|
+
|
122
|
+
def file_contents
|
123
|
+
if @file_contents.nil?
|
124
|
+
@file_contents = File.read(@dump_file)
|
125
|
+
end
|
126
|
+
@file_contents
|
127
|
+
end
|
128
|
+
|
129
|
+
def restore
|
130
|
+
puts @bucket_name
|
131
|
+
puts @key_name
|
132
|
+
@s3_client.create_bucket({bucket: @bucket_name})
|
133
|
+
@s3_client.put_object({bucket: @bucket_name, key: @key_name, body: file_contents})
|
134
|
+
end
|
135
|
+
end
|
136
|
+
|
137
|
+
class S3Restore < BaseProcessor
|
138
|
+
def initialize
|
139
|
+
@s3_files = nil
|
140
|
+
@s3_restore_processors = Array.new
|
141
|
+
end
|
142
|
+
|
143
|
+
def run
|
144
|
+
s3_restore_processors.each { |x| x.run }
|
145
|
+
end
|
146
|
+
|
147
|
+
def s3_files
|
148
|
+
if @s3_files.nil?
|
149
|
+
@s3_files = Dir[ File.join(DEFAULT_S3_DUMP_DIR, '**', '*') ].reject { |p| File.directory? p }
|
150
|
+
end
|
151
|
+
@s3_files
|
152
|
+
end
|
153
|
+
|
154
|
+
def s3_restore_processors
|
155
|
+
if @s3_restore_processors.empty?
|
156
|
+
s3_files.each do |s3_file|
|
157
|
+
relative_path = s3_file.split(DEFAULT_S3_DUMP_DIR)[1][1..-1]
|
158
|
+
bucket = relative_path.split('/')[0]
|
159
|
+
key = relative_path.gsub(bucket, '')[1..-1]
|
160
|
+
@s3_restore_processors << S3FileRestore.new(bucket, key)
|
161
|
+
end
|
162
|
+
end
|
163
|
+
@s3_restore_processors
|
164
|
+
end
|
165
|
+
end
|
166
|
+
|
167
|
+
|
168
|
+
class DynamoTableDump < BaseDynamoProcessor
|
169
|
+
attr_accessor :table_name, :data_dump_definition
|
170
|
+
|
171
|
+
def initialize(data_dump_definition=nil, dump_dir=nil, table_name=nil)
|
172
|
+
super nil
|
173
|
+
@data_dump_definition = data_dump_definition
|
174
|
+
@data_dump_definition ||= {}
|
175
|
+
@table_name = table_name
|
176
|
+
@table_name ||= @data_dump_definition[:table_name]
|
177
|
+
@dump_dir = dump_dir
|
178
|
+
@dump_dir ||= DEFAULT_DATA_DUMP_DIR
|
179
|
+
@dump_file = File.join(@dump_dir, "#{@table_name}.yml")
|
180
|
+
@query_results = nil
|
181
|
+
end
|
182
|
+
|
183
|
+
def run
|
184
|
+
dump_data
|
185
|
+
end
|
186
|
+
|
187
|
+
def dump_data
|
188
|
+
data = {table_name: @table_name, data: query_results}
|
189
|
+
unless File.directory?(@dump_dir)
|
190
|
+
FileUtils.mkdir_p(@dump_dir)
|
191
|
+
end
|
192
|
+
File.open(@dump_file, 'w') { |file| file.write data.to_yaml }
|
193
|
+
end
|
194
|
+
|
195
|
+
def _query
|
196
|
+
@dynamo_client.query({
|
197
|
+
:table_name => @table_name,
|
198
|
+
:select => 'ALL_ATTRIBUTES',
|
199
|
+
:key_conditions => @data_dump_definition[:key_conditions]
|
200
|
+
})
|
201
|
+
end
|
202
|
+
|
203
|
+
def _scan
|
204
|
+
@dynamo_client.scan({table_name: @table_name})
|
205
|
+
end
|
206
|
+
|
207
|
+
def query_results
|
208
|
+
if @query_results.nil?
|
209
|
+
response = !@data_dump_definition[:key_conditions].nil? ? _query : _scan
|
210
|
+
@query_results = response.items
|
211
|
+
end
|
212
|
+
@query_results
|
213
|
+
end
|
214
|
+
|
215
|
+
end
|
216
|
+
|
217
|
+
class DynamoDataDump < BaseDynamoProcessor
|
218
|
+
attr_accessor :table_name, :data_dump_definitions
|
219
|
+
|
220
|
+
def initialize(table_name=nil, dump_dir=nil)
|
221
|
+
super nil
|
222
|
+
@dump_dir = dump_dir
|
223
|
+
@dump_dir ||= DEFAULT_DATA_DUMP_DIR
|
224
|
+
@table_name = table_name
|
225
|
+
@data_dump_definitions = nil
|
226
|
+
end
|
227
|
+
|
228
|
+
def data_dump_definitions
|
229
|
+
if @data_dump_definitions.nil?
|
230
|
+
if !@table_name.nil?
|
231
|
+
data_dump_definition = DATA_DUMP_DEFINITIONS.find { |x| x[:table_name] == table_name }
|
232
|
+
@data_dump_definitions = !data_dump_definition.nil? ? [data_dump_definition] : []
|
233
|
+
else
|
234
|
+
@data_dump_definitions = DATA_DUMP_DEFINITIONS
|
235
|
+
end
|
236
|
+
end
|
237
|
+
@data_dump_definitions
|
238
|
+
end
|
239
|
+
|
240
|
+
def run
|
241
|
+
if !data_dump_definitions.empty?
|
242
|
+
data_dump_definitions.each do |data_dump_definition|
|
243
|
+
dynamo_table_dump = DynamoTableDump.new data_dump_definition, @dump_dir
|
244
|
+
dynamo_table_dump.run
|
245
|
+
end
|
246
|
+
elsif !@table_name.nil?
|
247
|
+
dynamo_table_dump = DynamoTableDump.new nil, @dump_dir, @table_name
|
248
|
+
dynamo_table_dump.run
|
249
|
+
end
|
250
|
+
end
|
251
|
+
end
|
252
|
+
|
253
|
+
class DynamoTableDataRestore < BaseDynamoProcessor
|
254
|
+
def initialize(dump_file)
|
255
|
+
super dump_file
|
256
|
+
@table_name = nil
|
257
|
+
@data = nil
|
258
|
+
@data_dump_definition = nil
|
259
|
+
end
|
260
|
+
|
261
|
+
def run
|
262
|
+
data.each_with_index do |item, index|
|
263
|
+
if index == 0
|
264
|
+
item.merge!(data_dump_definition.fetch(:replace_first, {}))
|
265
|
+
end
|
266
|
+
item.merge!(data_dump_definition.fetch(:replace_these, {}))
|
267
|
+
@dynamo_client.put_item({:table_name => table_name, item: item})
|
268
|
+
end
|
269
|
+
end
|
270
|
+
|
271
|
+
def parse_file
|
272
|
+
file_contents = YAML.load(File.open(@dump_file))
|
273
|
+
@table_name = file_contents[:table_name]
|
274
|
+
@data = file_contents[:data]
|
275
|
+
end
|
276
|
+
|
277
|
+
def data_dump_definition
|
278
|
+
if @data_dump_definition.nil?
|
279
|
+
@data_dump_definition = DATA_DUMP_DEFINITIONS.find { |x| x[:table_name] == table_name} || {}
|
280
|
+
end
|
281
|
+
@data_dump_definition
|
282
|
+
end
|
283
|
+
|
284
|
+
def table_name
|
285
|
+
parse_file if @table_name.nil?
|
286
|
+
@table_name
|
287
|
+
end
|
288
|
+
|
289
|
+
def data
|
290
|
+
parse_file if @data.nil?
|
291
|
+
@data
|
292
|
+
end
|
293
|
+
end
|
294
|
+
|
295
|
+
class DynamoDataRestore < BaseDynamoProcessor
|
296
|
+
|
297
|
+
def initialize(dump_dir=nil)
|
298
|
+
super
|
299
|
+
@dump_dir = dump_dir
|
300
|
+
@dump_dir ||= DEFAULT_DATA_DUMP_DIR
|
301
|
+
@data_dump_files = Array.new
|
302
|
+
end
|
303
|
+
|
304
|
+
def data_dump_files
|
305
|
+
if @data_dump_files.empty?
|
306
|
+
Dir.entries(@dump_dir).each do |f|
|
307
|
+
full_path = File.join(@dump_dir, f)
|
308
|
+
@data_dump_files << full_path if File.file?(full_path)
|
309
|
+
end
|
310
|
+
end
|
311
|
+
@data_dump_files
|
312
|
+
end
|
313
|
+
|
314
|
+
def run
|
315
|
+
data_dump_files.each do |f|
|
316
|
+
dynamo_table_data_restore = DynamoTableDataRestore.new(f)
|
317
|
+
dynamo_table_data_restore.run
|
318
|
+
end
|
319
|
+
end
|
320
|
+
end
|
321
|
+
|
322
|
+
class DynamoSchemaDump < BaseDynamoProcessor
|
323
|
+
attr_accessor :table_names, :schemata
|
324
|
+
|
325
|
+
def initialize(dump_file=nil)
|
326
|
+
super dump_file
|
327
|
+
@table_names = nil
|
328
|
+
@schemata = nil
|
329
|
+
end
|
330
|
+
|
331
|
+
def run
|
332
|
+
dump_schemata
|
333
|
+
end
|
334
|
+
|
335
|
+
def dump_schemata
|
336
|
+
dirname = File.dirname(@dump_file)
|
337
|
+
unless File.directory?(dirname)
|
338
|
+
FileUtils.mkdir_p(dirname)
|
339
|
+
end
|
340
|
+
File.open(@dump_file, 'w') { |file| file.write schemata.to_yaml }
|
341
|
+
end
|
342
|
+
|
343
|
+
def table_names
|
344
|
+
if @table_names.nil?
|
345
|
+
@table_names = @dynamo_client.list_tables.table_names
|
346
|
+
end
|
347
|
+
@table_names
|
348
|
+
end
|
349
|
+
|
350
|
+
def schemata
|
351
|
+
if @schemata.nil?
|
352
|
+
@schemata = Array.new
|
353
|
+
table_names.each do |table_name|
|
354
|
+
table_schema = @dynamo_client.describe_table(table_name: table_name).to_h[:table]
|
355
|
+
keep_keys(DYNAMO_TABLE_FIELDS, table_schema)
|
356
|
+
@schemata << table_schema
|
357
|
+
end
|
358
|
+
end
|
359
|
+
@schemata
|
360
|
+
end
|
361
|
+
|
362
|
+
end
|
363
|
+
|
364
|
+
class DynamoSchemaRestore < BaseDynamoProcessor
|
365
|
+
attr_accessor :schemata
|
366
|
+
|
367
|
+
def initialize(dump_file=nil)
|
368
|
+
super dump_file
|
369
|
+
@schemata = nil
|
370
|
+
end
|
371
|
+
|
372
|
+
def run
|
373
|
+
create_tables
|
374
|
+
end
|
375
|
+
|
376
|
+
def create_tables
|
377
|
+
schemata.each do |schema|
|
378
|
+
begin
|
379
|
+
@dynamo_client.create_table(schema)
|
380
|
+
rescue Aws::DynamoDB::Errors::ResourceInUseException
|
381
|
+
end
|
382
|
+
end
|
383
|
+
end
|
384
|
+
|
385
|
+
def schemata
|
386
|
+
if @schemata.nil?
|
387
|
+
@schemata = YAML.load(File.open(@dump_file))
|
388
|
+
end
|
389
|
+
@schemata
|
390
|
+
end
|
391
|
+
end
|
392
|
+
end
|
metadata
ADDED
@@ -0,0 +1,133 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: aws_test_dump
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 0.1.0
|
5
|
+
platform: ruby
|
6
|
+
authors:
|
7
|
+
- Daniel Hanson
|
8
|
+
autorequire:
|
9
|
+
bindir: bin
|
10
|
+
cert_chain: []
|
11
|
+
date: 2017-10-09 00:00:00.000000000 Z
|
12
|
+
dependencies:
|
13
|
+
- !ruby/object:Gem::Dependency
|
14
|
+
name: aws-sdk
|
15
|
+
requirement: !ruby/object:Gem::Requirement
|
16
|
+
requirements:
|
17
|
+
- - "~>"
|
18
|
+
- !ruby/object:Gem::Version
|
19
|
+
version: '2'
|
20
|
+
type: :runtime
|
21
|
+
prerelease: false
|
22
|
+
version_requirements: !ruby/object:Gem::Requirement
|
23
|
+
requirements:
|
24
|
+
- - "~>"
|
25
|
+
- !ruby/object:Gem::Version
|
26
|
+
version: '2'
|
27
|
+
- !ruby/object:Gem::Dependency
|
28
|
+
name: bundler
|
29
|
+
requirement: !ruby/object:Gem::Requirement
|
30
|
+
requirements:
|
31
|
+
- - "~>"
|
32
|
+
- !ruby/object:Gem::Version
|
33
|
+
version: '1.14'
|
34
|
+
type: :development
|
35
|
+
prerelease: false
|
36
|
+
version_requirements: !ruby/object:Gem::Requirement
|
37
|
+
requirements:
|
38
|
+
- - "~>"
|
39
|
+
- !ruby/object:Gem::Version
|
40
|
+
version: '1.14'
|
41
|
+
- !ruby/object:Gem::Dependency
|
42
|
+
name: rake
|
43
|
+
requirement: !ruby/object:Gem::Requirement
|
44
|
+
requirements:
|
45
|
+
- - "~>"
|
46
|
+
- !ruby/object:Gem::Version
|
47
|
+
version: '10.0'
|
48
|
+
type: :development
|
49
|
+
prerelease: false
|
50
|
+
version_requirements: !ruby/object:Gem::Requirement
|
51
|
+
requirements:
|
52
|
+
- - "~>"
|
53
|
+
- !ruby/object:Gem::Version
|
54
|
+
version: '10.0'
|
55
|
+
- !ruby/object:Gem::Dependency
|
56
|
+
name: rspec
|
57
|
+
requirement: !ruby/object:Gem::Requirement
|
58
|
+
requirements:
|
59
|
+
- - "~>"
|
60
|
+
- !ruby/object:Gem::Version
|
61
|
+
version: '3.0'
|
62
|
+
type: :development
|
63
|
+
prerelease: false
|
64
|
+
version_requirements: !ruby/object:Gem::Requirement
|
65
|
+
requirements:
|
66
|
+
- - "~>"
|
67
|
+
- !ruby/object:Gem::Version
|
68
|
+
version: '3.0'
|
69
|
+
- !ruby/object:Gem::Dependency
|
70
|
+
name: simplecov
|
71
|
+
requirement: !ruby/object:Gem::Requirement
|
72
|
+
requirements:
|
73
|
+
- - "~>"
|
74
|
+
- !ruby/object:Gem::Version
|
75
|
+
version: '3.0'
|
76
|
+
type: :development
|
77
|
+
prerelease: false
|
78
|
+
version_requirements: !ruby/object:Gem::Requirement
|
79
|
+
requirements:
|
80
|
+
- - "~>"
|
81
|
+
- !ruby/object:Gem::Version
|
82
|
+
version: '3.0'
|
83
|
+
description:
|
84
|
+
email:
|
85
|
+
- daniel.hanson82@gmail.com
|
86
|
+
executables:
|
87
|
+
- aws_test_dump
|
88
|
+
extensions: []
|
89
|
+
extra_rdoc_files: []
|
90
|
+
files:
|
91
|
+
- ".dockerignore"
|
92
|
+
- ".editorconfig"
|
93
|
+
- ".gitignore"
|
94
|
+
- ".rspec"
|
95
|
+
- ".travis.yml"
|
96
|
+
- CODE_OF_CONDUCT.md
|
97
|
+
- Dockerfile
|
98
|
+
- Gemfile
|
99
|
+
- LICENSE.txt
|
100
|
+
- README.md
|
101
|
+
- Rakefile
|
102
|
+
- aws_test_dump.gemspec
|
103
|
+
- bin/aws_test_dump
|
104
|
+
- bin/console
|
105
|
+
- bin/setup
|
106
|
+
- docker-compose.yml
|
107
|
+
- lib/aws_test_dump.rb
|
108
|
+
- lib/aws_test_dump/version.rb
|
109
|
+
homepage: https://github.com/DanHanson82/ruby-aws-test-dump
|
110
|
+
licenses:
|
111
|
+
- MIT
|
112
|
+
metadata: {}
|
113
|
+
post_install_message:
|
114
|
+
rdoc_options: []
|
115
|
+
require_paths:
|
116
|
+
- lib
|
117
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
118
|
+
requirements:
|
119
|
+
- - ">="
|
120
|
+
- !ruby/object:Gem::Version
|
121
|
+
version: '0'
|
122
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
123
|
+
requirements:
|
124
|
+
- - ">="
|
125
|
+
- !ruby/object:Gem::Version
|
126
|
+
version: '0'
|
127
|
+
requirements: []
|
128
|
+
rubyforge_project:
|
129
|
+
rubygems_version: 2.4.8
|
130
|
+
signing_key:
|
131
|
+
specification_version: 4
|
132
|
+
summary: simple script for dumping and restoring aws test data for local testing
|
133
|
+
test_files: []
|