aws_docker_utils 0.0.5 → 0.0.6
Sign up to get free protection for your applications and to get access to all the features.
- data/.rbenv-gemsets +1 -0
- data/README.md +38 -0
- data/bin/aws_docker_utils +4 -3
- data/lib/aws_docker_utils/aws_config_storage.rb +42 -0
- data/lib/aws_docker_utils/compressor.rb +14 -0
- data/lib/aws_docker_utils/controllers/base.rb +13 -0
- data/lib/aws_docker_utils/controllers/configurator.rb +40 -0
- data/lib/aws_docker_utils/controllers/s3.rb +33 -0
- data/lib/aws_docker_utils/docker/client.rb +18 -0
- data/lib/aws_docker_utils/providers/s3.rb +59 -0
- data/lib/aws_docker_utils/router.rb +20 -0
- data/lib/aws_docker_utils/version.rb +1 -1
- metadata +11 -5
- data/lib/collectors/to_s3.rb +0 -33
- data/lib/docker_compose/client.rb +0 -14
- data/lib/s3/client.rb +0 -49
- data/lib/utils.rb +0 -15
data/.rbenv-gemsets
ADDED
@@ -0,0 +1 @@
|
|
1
|
+
aws_docker_utils
|
data/README.md
ADDED
@@ -0,0 +1,38 @@
|
|
1
|
+
#### This is WIP
|
2
|
+
|
3
|
+
|
4
|
+
####Usage:
|
5
|
+
|
6
|
+
##### Installation:
|
7
|
+
```bash
|
8
|
+
gem install aws_docker_utils
|
9
|
+
```
|
10
|
+
|
11
|
+
##### Set AWS credentials:
|
12
|
+
```bash
|
13
|
+
aws_docker_utils configure init
|
14
|
+
```
|
15
|
+
|
16
|
+
#####Reset (delete) stored AWS credentials:
|
17
|
+
```bash
|
18
|
+
aws_docker_utils configure reset
|
19
|
+
```
|
20
|
+
#####Create SQL from Docker container and upload SQL file to AWS S3:
|
21
|
+
```bash
|
22
|
+
aws_docker_utils backup_file from \<container_name\> to \<s3_bucket_name\> as 'backup_database' using \<cmd\>
|
23
|
+
```
|
24
|
+
|
25
|
+
#####Example:
|
26
|
+
```bash
|
27
|
+
aws_docker_utils backup_file from database to 'my-super-bucket-name' as 'backup' using "pg_dump -U postgres -O my_app_production"
|
28
|
+
```
|
29
|
+
|
30
|
+
The SQL backup file will be extacted from the database container, compressed and pushed to 'my-super-bucket-name' S3 bucket with the name like
|
31
|
+
```bash
|
32
|
+
backup_16-04-15_16-34.sql
|
33
|
+
```
|
34
|
+
|
35
|
+
If the AWS S3 bucket is not exists, it will be created.
|
36
|
+
|
37
|
+
#####License
|
38
|
+
Author: Bogdan Kulbida, under MIT Lic. 2016.
|
data/bin/aws_docker_utils
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
#!/usr/bin/env ruby
|
2
2
|
|
3
3
|
require "docopt"
|
4
|
-
require_relative "../lib/
|
4
|
+
require_relative "../lib/aws_docker_utils/router.rb"
|
5
5
|
|
6
6
|
module AwsDockerUtils
|
7
7
|
|
@@ -13,6 +13,8 @@ Author: Bogdan Kulbida.
|
|
13
13
|
|
14
14
|
Usage:
|
15
15
|
#{__FILE__} backup_file from <container_name> to <s3_bucket_name> as <as_task> using <cmd>
|
16
|
+
#{__FILE__} configure init
|
17
|
+
#{__FILE__} configure reset
|
16
18
|
#{__FILE__} -h | --help
|
17
19
|
#{__FILE__} --version
|
18
20
|
|
@@ -23,8 +25,7 @@ Options:
|
|
23
25
|
DOCOPT
|
24
26
|
|
25
27
|
begin
|
26
|
-
|
27
|
-
collector.push
|
28
|
+
Router.new(Docopt::docopt(doc)).route!.activate
|
28
29
|
rescue Docopt::Exit => e
|
29
30
|
puts e.message
|
30
31
|
end
|
@@ -0,0 +1,42 @@
|
|
1
|
+
require 'yaml'
|
2
|
+
|
3
|
+
module AwsDockerUtils
|
4
|
+
class AwsConfigStorage
|
5
|
+
|
6
|
+
attr_accessor :config
|
7
|
+
|
8
|
+
CONFIG_FILE_PATH = "./aws_docker_utils.yml"
|
9
|
+
|
10
|
+
def initialize
|
11
|
+
self.config = fetch_config
|
12
|
+
end
|
13
|
+
|
14
|
+
def persist!(key, value)
|
15
|
+
`echo '#{key}: "#{value}"' >> #{file_name}`
|
16
|
+
end
|
17
|
+
|
18
|
+
def clear!
|
19
|
+
`echo "# AWS credentials:" > #{file_name}`
|
20
|
+
end
|
21
|
+
|
22
|
+
def valid?
|
23
|
+
@config && @config.fetch('access_key') && @config.fetch('secret_key')
|
24
|
+
end
|
25
|
+
|
26
|
+
private
|
27
|
+
|
28
|
+
def exists?
|
29
|
+
!`ls #{file_name}`.empty?
|
30
|
+
end
|
31
|
+
|
32
|
+
def fetch_config
|
33
|
+
clear! unless exists?
|
34
|
+
YAML::load_file(file_name)
|
35
|
+
end
|
36
|
+
|
37
|
+
def file_name
|
38
|
+
CONFIG_FILE_PATH
|
39
|
+
end
|
40
|
+
|
41
|
+
end
|
42
|
+
end
|
@@ -0,0 +1,40 @@
|
|
1
|
+
require 'io/console'
|
2
|
+
require_relative '../aws_config_storage'
|
3
|
+
|
4
|
+
module AwsDockerUtils
|
5
|
+
module Controllers
|
6
|
+
|
7
|
+
class Configurator
|
8
|
+
|
9
|
+
def initialize(opts={})
|
10
|
+
@opts = opts
|
11
|
+
@config = AwsConfigStorage.new
|
12
|
+
end
|
13
|
+
|
14
|
+
def activate
|
15
|
+
if @opts.fetch('init')
|
16
|
+
publish(:access_key, std_input("Please enter AWS user ACCESS KEY:"))
|
17
|
+
publish(:secret_key, std_input("Please enter AWS user SERCET KEY:"))
|
18
|
+
publish(:region, std_input("Please enter AWS preferred REGION:"))
|
19
|
+
else
|
20
|
+
@config.clear!
|
21
|
+
end
|
22
|
+
end
|
23
|
+
|
24
|
+
private
|
25
|
+
|
26
|
+
def std_input(request)
|
27
|
+
print request
|
28
|
+
STDIN.noecho do |b|
|
29
|
+
b.gets.chomp
|
30
|
+
end
|
31
|
+
end
|
32
|
+
|
33
|
+
def publish(type, value)
|
34
|
+
@config.persist!(type, value)
|
35
|
+
end
|
36
|
+
|
37
|
+
end
|
38
|
+
|
39
|
+
end
|
40
|
+
end
|
@@ -0,0 +1,33 @@
|
|
1
|
+
require 'tempfile'
|
2
|
+
require_relative "../compressor"
|
3
|
+
require_relative "../docker/client"
|
4
|
+
require_relative "../providers/s3"
|
5
|
+
|
6
|
+
module AwsDockerUtils
|
7
|
+
module Controllers
|
8
|
+
|
9
|
+
class S3
|
10
|
+
|
11
|
+
def initialize(opts={})
|
12
|
+
@bucket_name = opts.fetch('<s3_bucket_name>')
|
13
|
+
@task_name = opts.fetch('<as_task>', "file")
|
14
|
+
@container_name = opts.fetch('<container_name>')
|
15
|
+
@cmd = opts.fetch('<cmd>')
|
16
|
+
@file_name = Tempfile.new("#{`date '+#{@task_name}_%y-%m-%d_%H-%M'`.chop}.sql")
|
17
|
+
end
|
18
|
+
|
19
|
+
def activate
|
20
|
+
Docker::Client.new(container_name: @container_name, cmd: "#{@cmd} > #{@file_name.path}").exec
|
21
|
+
s3 = Providers::S3.new(bucket_name: @bucket_name)
|
22
|
+
if s3.put(Compressor.new(@file_name.path).compress)
|
23
|
+
puts "Pushed to S3."
|
24
|
+
else
|
25
|
+
puts "Unable to do the job."
|
26
|
+
end
|
27
|
+
@file_name.close
|
28
|
+
end
|
29
|
+
|
30
|
+
end
|
31
|
+
|
32
|
+
end
|
33
|
+
end
|
@@ -0,0 +1,18 @@
|
|
1
|
+
module AwsDockerUtils
|
2
|
+
module Docker
|
3
|
+
|
4
|
+
class Client
|
5
|
+
|
6
|
+
def initialize(opts={})
|
7
|
+
@container_name = opts[:container_name]
|
8
|
+
@cmd = opts[:cmd]
|
9
|
+
end
|
10
|
+
|
11
|
+
def exec
|
12
|
+
`docker exec $(docker ps -q --filter 'name=#{@container_name}') #{@cmd}`
|
13
|
+
end
|
14
|
+
|
15
|
+
end
|
16
|
+
|
17
|
+
end
|
18
|
+
end
|
@@ -0,0 +1,59 @@
|
|
1
|
+
require 'yaml'
|
2
|
+
require 'aws-sdk'
|
3
|
+
|
4
|
+
module AwsDockerUtils
|
5
|
+
module Providers
|
6
|
+
|
7
|
+
class S3
|
8
|
+
|
9
|
+
DEFAULT_REGION = "us-east-1"
|
10
|
+
|
11
|
+
def initialize(opts={})
|
12
|
+
@bucket_name = opts.fetch(:bucket_name)
|
13
|
+
|
14
|
+
storage = AwsConfigStorage.new
|
15
|
+
client = if storage.valid?
|
16
|
+
config = storage.config
|
17
|
+
Aws::S3::Client.new(
|
18
|
+
region: (config.fetch("region").to_s || DEFAULT_REGION),
|
19
|
+
access_key_id: config.fetch("access_key").to_s,
|
20
|
+
secret_access_key: config.fetch("secret_key").to_s
|
21
|
+
)
|
22
|
+
else
|
23
|
+
Aws::S3::Client.new(region: DEFAULT_REGION)
|
24
|
+
end
|
25
|
+
|
26
|
+
@s3 = Aws::S3::Resource.new(client: client)
|
27
|
+
end
|
28
|
+
|
29
|
+
def put(file_path)
|
30
|
+
raise "Please set bucket name with constructor." if @bucket_name.nil?
|
31
|
+
|
32
|
+
bucket = create_bucket(@bucket_name)
|
33
|
+
obj = bucket.object(File.basename(file_path.gsub(/\.sql.+/, '.sql')))
|
34
|
+
|
35
|
+
if obj.upload_file(file_path)
|
36
|
+
return true
|
37
|
+
else
|
38
|
+
puts "could not upload file #{@file_path} to S3."
|
39
|
+
end
|
40
|
+
|
41
|
+
false
|
42
|
+
end
|
43
|
+
|
44
|
+
private
|
45
|
+
|
46
|
+
def create_bucket(bucket_name)
|
47
|
+
bucket = @s3.bucket(bucket_name)
|
48
|
+
unless bucket.exists?
|
49
|
+
puts 'Bucket does not exist. Creating...'
|
50
|
+
bucket.create
|
51
|
+
puts 'Done.'
|
52
|
+
end
|
53
|
+
bucket
|
54
|
+
end
|
55
|
+
|
56
|
+
end
|
57
|
+
|
58
|
+
end
|
59
|
+
end
|
@@ -0,0 +1,20 @@
|
|
1
|
+
require_relative "controllers/s3.rb"
|
2
|
+
require_relative "controllers/configurator.rb"
|
3
|
+
|
4
|
+
class Router
|
5
|
+
|
6
|
+
def initialize(opts={})
|
7
|
+
@opts = opts
|
8
|
+
end
|
9
|
+
|
10
|
+
def route!
|
11
|
+
case
|
12
|
+
when @opts.fetch('configure') then AwsDockerUtils::Controllers::Configurator.new(@opts)
|
13
|
+
when @opts.fetch('backup_file') then AwsDockerUtils::Controllers::S3.new(@opts)
|
14
|
+
else raise "NOP"
|
15
|
+
end
|
16
|
+
end
|
17
|
+
|
18
|
+
private
|
19
|
+
|
20
|
+
end
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: aws_docker_utils
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.0.
|
4
|
+
version: 0.0.6
|
5
5
|
prerelease:
|
6
6
|
platform: ruby
|
7
7
|
authors:
|
@@ -51,16 +51,22 @@ extensions: []
|
|
51
51
|
extra_rdoc_files: []
|
52
52
|
files:
|
53
53
|
- .gitignore
|
54
|
+
- .rbenv-gemsets
|
54
55
|
- Gemfile
|
55
56
|
- Gemfile.lock
|
57
|
+
- README.md
|
56
58
|
- aws_docker_utils.gemspec
|
57
59
|
- bin/aws_docker_utils
|
58
60
|
- lib/aws_docker_utils.rb
|
61
|
+
- lib/aws_docker_utils/aws_config_storage.rb
|
62
|
+
- lib/aws_docker_utils/compressor.rb
|
63
|
+
- lib/aws_docker_utils/controllers/base.rb
|
64
|
+
- lib/aws_docker_utils/controllers/configurator.rb
|
65
|
+
- lib/aws_docker_utils/controllers/s3.rb
|
66
|
+
- lib/aws_docker_utils/docker/client.rb
|
67
|
+
- lib/aws_docker_utils/providers/s3.rb
|
68
|
+
- lib/aws_docker_utils/router.rb
|
59
69
|
- lib/aws_docker_utils/version.rb
|
60
|
-
- lib/collectors/to_s3.rb
|
61
|
-
- lib/docker_compose/client.rb
|
62
|
-
- lib/s3/client.rb
|
63
|
-
- lib/utils.rb
|
64
70
|
homepage: http://rubygems.org/gems/aws_docker_utils
|
65
71
|
licenses:
|
66
72
|
- MIT
|
data/lib/collectors/to_s3.rb
DELETED
@@ -1,33 +0,0 @@
|
|
1
|
-
require "utils"
|
2
|
-
require "docker_compose/client"
|
3
|
-
require "s3/client"
|
4
|
-
|
5
|
-
module AwsDockerUtils
|
6
|
-
module Collectors
|
7
|
-
class ToS3
|
8
|
-
|
9
|
-
def initialize(opts={})
|
10
|
-
@bucket_name = opts.fetch('<s3_bucket_name>')
|
11
|
-
@task_name = opts.fetch('<as_task>', "file")
|
12
|
-
@container_name = opts.fetch('<container_name>')
|
13
|
-
@cmd = opts.fetch('<cmd>')
|
14
|
-
|
15
|
-
@file_name = "#{`date '+#{@task_name}_%y-%m-%d_%H-%M'`.chop}.sql"
|
16
|
-
end
|
17
|
-
|
18
|
-
def push
|
19
|
-
DockerCompose::Client.new(container_name: @container_name, cmd: "#{@cmd} > #{@file_name}").exec
|
20
|
-
file_name = Utils.compress(@file_name)
|
21
|
-
|
22
|
-
s3 = S3::Client.new(file_name: file_name, bucket_name: @bucket_name)
|
23
|
-
if s3.put(file_name)
|
24
|
-
Utils.rm(file_name)
|
25
|
-
puts "Pushed to S3."
|
26
|
-
else
|
27
|
-
puts "Unable to do the job."
|
28
|
-
end
|
29
|
-
end
|
30
|
-
|
31
|
-
end
|
32
|
-
end
|
33
|
-
end
|
data/lib/s3/client.rb
DELETED
@@ -1,49 +0,0 @@
|
|
1
|
-
require 'yaml'
|
2
|
-
require 'aws-sdk'
|
3
|
-
|
4
|
-
module S3
|
5
|
-
class Client
|
6
|
-
|
7
|
-
DEFAULT_REGION = "us-east-1"
|
8
|
-
|
9
|
-
def initialize(opts={})
|
10
|
-
@bucket_name = opts[:bucket_name]
|
11
|
-
|
12
|
-
config = YAML.load(File.read(File.join(File.dirname(__FILE__), "credentials.yml")))
|
13
|
-
client = Aws::S3::Client.new(
|
14
|
-
region: (config["region"] || DEFAULT_REGION),
|
15
|
-
access_key_id: config["access_key"],
|
16
|
-
secret_access_key: config["secret_key"]
|
17
|
-
)
|
18
|
-
@s3 = Aws::S3::Resource.new(client: client)
|
19
|
-
end
|
20
|
-
|
21
|
-
def put(file_path)
|
22
|
-
raise "Please set bucket name with constructor." if @bucket_name.nil?
|
23
|
-
|
24
|
-
bucket = create_bucket(@bucket_name)
|
25
|
-
obj = bucket.object(File.basename(file_path))
|
26
|
-
|
27
|
-
if obj.upload_file(file_path)
|
28
|
-
return true
|
29
|
-
else
|
30
|
-
puts "could not upload file #{@file_path} to S3."
|
31
|
-
end
|
32
|
-
|
33
|
-
false
|
34
|
-
end
|
35
|
-
|
36
|
-
private
|
37
|
-
|
38
|
-
def create_bucket(bucket_name)
|
39
|
-
bucket = @s3.bucket(bucket_name)
|
40
|
-
unless bucket.exists?
|
41
|
-
puts 'Bucket does not exist. Creating...'
|
42
|
-
bucket.create
|
43
|
-
puts 'Done.'
|
44
|
-
end
|
45
|
-
bucket
|
46
|
-
end
|
47
|
-
|
48
|
-
end
|
49
|
-
end
|
data/lib/utils.rb
DELETED