hansolo 0.0.1.alpha.3 → 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/.yardopts +1 -0
- data/README.md +79 -15
- data/bin/hansolo +2 -49
- data/bin/hansolo-databag +6 -0
- data/bin/hansolo-ssh +6 -0
- data/hansolo.gemspec +5 -0
- data/lib/hansolo/commands/base.rb +85 -0
- data/lib/hansolo/commands/data_bag.rb +82 -0
- data/lib/hansolo/commands/solo.rb +79 -0
- data/lib/hansolo/commands/ssh.rb +58 -0
- data/lib/hansolo/librarians/berkshelf.rb +15 -0
- data/lib/hansolo/librarians.rb +1 -0
- data/lib/hansolo/providers/aws/data_bags.rb +25 -0
- data/lib/hansolo/providers/aws/discovery.rb +58 -0
- data/lib/hansolo/providers/aws/solo.rb +27 -0
- data/lib/hansolo/providers/aws.rb +30 -0
- data/lib/hansolo/providers/default/data_bags.rb +29 -0
- data/lib/hansolo/providers/default/solo.rb +68 -0
- data/lib/hansolo/providers/default.rb +21 -0
- data/lib/hansolo/version.rb +1 -1
- data/lib/hansolo.rb +35 -199
- metadata +103 -5
data/.yardopts
ADDED
@@ -0,0 +1 @@
|
|
1
|
+
--markup markdown --title hansolo --protected -M github-markup -M redcarpet lib/**/*.rb
|
data/README.md
CHANGED
@@ -2,7 +2,7 @@
|
|
2
2
|
|
3
3
|
NOTE: This is alpha code.
|
4
4
|
|
5
|
-
|
5
|
+
CLI tool to automate berkshelf and chef-solo deployment
|
6
6
|
|
7
7
|
## Installation
|
8
8
|
|
@@ -18,25 +18,89 @@ Or install it yourself as:
|
|
18
18
|
|
19
19
|
$ gem install hansolo
|
20
20
|
|
21
|
-
##
|
21
|
+
## Usage
|
22
22
|
|
23
|
-
|
24
|
-
"urls": [ "vagrant@localhost:2222" ],
|
25
|
-
"runlist": [ "my_app::deploy" ],
|
26
|
-
"app":"my_app",
|
27
|
-
"keydir":"/Applications/Vagrant/embedded/gems/gems/vagrant-1.1.4/keys/vagrant",
|
28
|
-
"aws_access_key_id":"AAAAAAAAAAAAAAAAAAAA",
|
29
|
-
"aws_secret_access_key":"1111111111111111111111111111111111111111",
|
30
|
-
"aws_bucket_name":"acme-data_bags",
|
31
|
-
"aws_data_bag_keys":["my_app/stage/environment.json"]
|
32
|
-
}
|
23
|
+
`hansolo` provides three command line utilities for managing nodes with `chef-solo`.
|
33
24
|
|
34
|
-
|
25
|
+
* `hansolo`: runs `rsync` to copy cookbooks and data bags to the target nodes, generates a manifest and executes `chef-solo` against the generated manifest.
|
26
|
+
* `hansolo-databag`: Manages data bags.
|
27
|
+
* `hansolo-ssh`: SSHs into one of the target nodes.
|
28
|
+
|
29
|
+
To see what options can be supplied, run the command with `-h` or `--help`.
|
30
|
+
|
31
|
+
|
32
|
+
## `Hanfile` options
|
33
|
+
|
34
|
+
```ruby
|
35
|
+
Hansolo.configure do |config|
|
36
|
+
# Path to SSH keys
|
37
|
+
config.keydir = '~/.ssh/chef'
|
38
|
+
|
39
|
+
# Gateway server if nodes are in a private network. Must be a valid ssh URI
|
40
|
+
# or URI instance.
|
41
|
+
config.gateway = 'ssh://user@gateway.example.com:20202'
|
42
|
+
|
43
|
+
# Name of the application
|
44
|
+
config.app = 'blog'
|
45
|
+
|
46
|
+
# Nodes to run `chef-solo` on. Can be a single or array of ssh URIs or URI
|
47
|
+
# instance.
|
48
|
+
config.target = 'ssh://user@blog.example.com'
|
49
|
+
|
50
|
+
# List of recipes to run.
|
51
|
+
config.runlist = ['recipe']
|
52
|
+
|
53
|
+
# Local path where cookbooks should be installed to using
|
54
|
+
# `Hansolo.librarian`. Defaults to `./tmp/data_bags`
|
55
|
+
config.cookbooks_path = '/tmp/chef/cookbooks'
|
56
|
+
|
57
|
+
# Local path where data bags will be written when using `hansolo-databag`.
|
58
|
+
# Defaults to `./tmp/cookbooks`
|
59
|
+
config.data_bags_path = '/tmp/chef/cookbooks'
|
60
|
+
|
61
|
+
# Command to run on the node after SSHing.
|
62
|
+
config.post_ssh_command = 'export RAILS_ENV=production; cd /srv/blog/current'
|
63
|
+
|
64
|
+
# Which chef cookbook manager to use. Currently, only `#berkshelf` is
|
65
|
+
# supported.
|
66
|
+
config.librarian = :berkshelf
|
67
|
+
|
68
|
+
# SSH options to use when running `rsync` or `hansolo-ssh`.
|
69
|
+
# Defaults to `-q -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no`.
|
70
|
+
config.ssh_options = '-vvv'
|
71
|
+
end
|
72
|
+
```
|
73
|
+
|
74
|
+
## Providers
|
75
|
+
|
76
|
+
`hansolo`'s behavior can be augmented by different providers by requiring them
|
77
|
+
in a `Hanfile`. Currently, AWS is the only provider provided.
|
78
|
+
|
79
|
+
```ruby
|
80
|
+
# Add AWS functionality to the toolset
|
81
|
+
require 'hansolo/providers/aws'
|
82
|
+
|
83
|
+
Hansolo.configure do |config|
|
84
|
+
# ...
|
85
|
+
end
|
86
|
+
```
|
87
|
+
|
88
|
+
### AWS Provider
|
89
|
+
|
90
|
+
The AWS provider augments `hansolo` to store data\_bags in S3 and adds the
|
91
|
+
ability to query EC2 for the IP address of the gateway and/or target nodes.
|
35
92
|
|
36
|
-
|
93
|
+
Data bags are stored in a bucket with the name `data_bags-:app`. The bucket is
|
94
|
+
created if it does not exist.
|
37
95
|
|
38
|
-
|
96
|
+
To have the IP address of the gateway queried, use the following URI scheme:
|
97
|
+
`<tag_name>://user@<value>:port`. The `<tag_name>` is the name of any tag on
|
98
|
+
the instance (e.g. `Name://user@bastion`).
|
39
99
|
|
100
|
+
To query instances, set `Hansolo.target` to a hash with the keys `:user`,
|
101
|
+
`:host`, and optionally `:port` (if not `22`). The `:host` key should be set to
|
102
|
+
the tag to query and the value should be the name of the application (e.g.
|
103
|
+
`role://user@api`).
|
40
104
|
|
41
105
|
## Contributing
|
42
106
|
|
data/bin/hansolo
CHANGED
@@ -1,53 +1,6 @@
|
|
1
1
|
#!/usr/bin/env ruby
|
2
2
|
$: << File.expand_path(File.join(__FILE__, '..', '..', 'lib'))
|
3
3
|
|
4
|
-
require 'hansolo'
|
5
|
-
require 'optparse'
|
4
|
+
require 'hansolo/commands/solo'
|
6
5
|
|
7
|
-
|
8
|
-
|
9
|
-
optparse = OptionParser.new do |opts|
|
10
|
-
opts.banner = Hansolo::Cli.banner
|
11
|
-
|
12
|
-
opts.on( '-h', '--help', 'Display this screen' ) do
|
13
|
-
puts opts
|
14
|
-
puts "\n"
|
15
|
-
puts Hansolo::Cli.help
|
16
|
-
exit
|
17
|
-
end
|
18
|
-
|
19
|
-
opts.on( '-c', '--config file', String, 'Path to config file') do |filename|
|
20
|
-
conf_options = JSON.parse(File.read(filename)) if filename != '' and File.exists?(filename)
|
21
|
-
end
|
22
|
-
|
23
|
-
opts.on( '-u', '--urls a,b,c', Array, "Comma-sep list of urls, e.g.: user@host:port/dest/path") do |o|
|
24
|
-
options[:urls] = o
|
25
|
-
end
|
26
|
-
|
27
|
-
opts.on( '-k', '--keydir s', String, "Your local ssh key directory") do |o|
|
28
|
-
options[:keydir] = o
|
29
|
-
end
|
30
|
-
|
31
|
-
opts.on( '-a', '--app s', String, "The application name") do |o|
|
32
|
-
options[:app] = o
|
33
|
-
end
|
34
|
-
|
35
|
-
opts.on( '-s', '--stage s', String, "The stage name") do |o|
|
36
|
-
options[:stage] = o
|
37
|
-
end
|
38
|
-
|
39
|
-
opts.on( '-r', '--runlist a,b,c', Array, "The runlist you want to effect on the target(s)") do |o|
|
40
|
-
options[:runlist] = o
|
41
|
-
end
|
42
|
-
end.parse!(ARGV)
|
43
|
-
|
44
|
-
unless conf_options.any?
|
45
|
-
default_conf_filename = File.expand_path(File.join(".",".hansolo.json"))
|
46
|
-
conf_options = JSON.parse(File.read(default_conf_filename)) if File.exists?(default_conf_filename)
|
47
|
-
end
|
48
|
-
|
49
|
-
|
50
|
-
opts = conf_options.merge(options).inject({}){|m,(k,v)| m[k.to_sym] = v; m}
|
51
|
-
|
52
|
-
h = Hansolo::Cli.new conf_options.merge(opts)
|
53
|
-
h.all!
|
6
|
+
Hansolo::Commands::Solo.run(ARGV)
|
data/bin/hansolo-databag
ADDED
data/bin/hansolo-ssh
ADDED
data/hansolo.gemspec
CHANGED
@@ -20,9 +20,14 @@ Gem::Specification.new do |spec|
|
|
20
20
|
|
21
21
|
spec.add_dependency "aws-sdk"
|
22
22
|
spec.add_dependency "net-ssh"
|
23
|
+
spec.add_dependency "net-ssh-gateway"
|
23
24
|
spec.add_dependency "json"
|
25
|
+
spec.add_dependency "terminal-table"
|
26
|
+
spec.add_dependency "cocaine"
|
24
27
|
|
25
28
|
spec.add_development_dependency "bundler", "~> 1.3"
|
26
29
|
spec.add_development_dependency "rake"
|
27
30
|
spec.add_development_dependency "mocha"
|
31
|
+
spec.add_development_dependency "yard"
|
32
|
+
spec.add_development_dependency "redcarpet"
|
28
33
|
end
|
@@ -0,0 +1,85 @@
|
|
1
|
+
require 'optparse'
|
2
|
+
require 'cocaine'
|
3
|
+
require 'net/ssh'
|
4
|
+
require 'net/ssh/gateway'
|
5
|
+
require 'hansolo'
|
6
|
+
require 'hansolo/providers/default'
|
7
|
+
|
8
|
+
module Hansolo
|
9
|
+
module Commands
|
10
|
+
# Responsible for taking in command line options and reading in `Hanfile`.
|
11
|
+
# Any unique command line options should be added in a subclass. Provides
|
12
|
+
# minimal helpers for executing commands.
|
13
|
+
class Base
|
14
|
+
include Providers::DefaultBehavior
|
15
|
+
|
16
|
+
# @!attribute [r] bastion
|
17
|
+
# @return [URI] attributes of the bastion server
|
18
|
+
attr_reader :bastion
|
19
|
+
|
20
|
+
# Run the command
|
21
|
+
# @see {#run}
|
22
|
+
def self.run(arguments)
|
23
|
+
new(arguments).run
|
24
|
+
end
|
25
|
+
|
26
|
+
# Sets up command
|
27
|
+
#
|
28
|
+
# * Loads the `Hanfile`
|
29
|
+
# * Parses command line arguments
|
30
|
+
# * Determines the {#bastion} if {Hansolo.gateway} is specified
|
31
|
+
def initialize(arguments)
|
32
|
+
load_hanfile!
|
33
|
+
|
34
|
+
setup_parser
|
35
|
+
parser.parse!(arguments)
|
36
|
+
|
37
|
+
determine_bastion if Hansolo.gateway
|
38
|
+
end
|
39
|
+
|
40
|
+
# Public interface to the command to be implemented in a subclass.
|
41
|
+
def run
|
42
|
+
raise NotImplementedError
|
43
|
+
end
|
44
|
+
|
45
|
+
private
|
46
|
+
|
47
|
+
def exec(command)
|
48
|
+
Hansolo.logger.debug(command)
|
49
|
+
Kernel.exec(command)
|
50
|
+
end
|
51
|
+
|
52
|
+
def call(command)
|
53
|
+
Hansolo.logger.debug(command)
|
54
|
+
%x{#{command}}
|
55
|
+
end
|
56
|
+
|
57
|
+
def parser
|
58
|
+
@parser ||= OptionParser.new
|
59
|
+
end
|
60
|
+
|
61
|
+
def load_hanfile!
|
62
|
+
load hanfile_path if File.exists?(hanfile_path)
|
63
|
+
end
|
64
|
+
|
65
|
+
def hanfile_path
|
66
|
+
@hanfile_path ||= File.expand_path('Hanfile')
|
67
|
+
end
|
68
|
+
|
69
|
+
def setup_parser
|
70
|
+
parser.on( '-h', '--help', 'display this screen' ) do
|
71
|
+
puts parser
|
72
|
+
exit
|
73
|
+
end
|
74
|
+
|
75
|
+
parser.on( '-t', '--target a,b,c', Array, "comma-sep list of urls, e.g.: user@host:port/dest/path") do |option|
|
76
|
+
Hansolo.target = option
|
77
|
+
end
|
78
|
+
|
79
|
+
parser.on( '-a', '--app s', String, "the application name") do |option|
|
80
|
+
Hansolo.app = option
|
81
|
+
end
|
82
|
+
end
|
83
|
+
end
|
84
|
+
end
|
85
|
+
end
|
@@ -0,0 +1,82 @@
|
|
1
|
+
require 'terminal-table'
|
2
|
+
require 'hansolo/commands/base'
|
3
|
+
require 'hansolo/providers/default/data_bags'
|
4
|
+
|
5
|
+
module Hansolo
|
6
|
+
module Commands
|
7
|
+
class DataBag < Base
|
8
|
+
include Providers::DefaultBehavior::DataBags
|
9
|
+
|
10
|
+
attr_accessor :bag, :item, :changes
|
11
|
+
|
12
|
+
def run
|
13
|
+
changes.nil? ? print : write and print
|
14
|
+
end
|
15
|
+
|
16
|
+
def changes=(key_value_pairs)
|
17
|
+
@changes = key_value_pairs.inject({}) do |hash, pair|
|
18
|
+
key, value = pair.split('=', 2)
|
19
|
+
hash[key] = value
|
20
|
+
hash
|
21
|
+
end
|
22
|
+
end
|
23
|
+
|
24
|
+
private
|
25
|
+
|
26
|
+
def read(content = item_content)
|
27
|
+
JSON.parse(content)
|
28
|
+
end
|
29
|
+
|
30
|
+
def all
|
31
|
+
data_bags.map { |key, content| [key, read(content)] }
|
32
|
+
end
|
33
|
+
|
34
|
+
def write
|
35
|
+
content = read.merge(changes).delete_if { |k, v| v.nil? || v.strip.empty? }
|
36
|
+
content['id'] ||= item
|
37
|
+
|
38
|
+
write_to_storage(content.to_json)
|
39
|
+
end
|
40
|
+
|
41
|
+
def print
|
42
|
+
if !bag.nil? && !item.nil?
|
43
|
+
rows = read
|
44
|
+
rows.delete('id')
|
45
|
+
|
46
|
+
terminal_table = Terminal::Table.new(rows: rows, headings: ['key', 'value'])
|
47
|
+
else
|
48
|
+
terminal_table = Terminal::Table.new do |table|
|
49
|
+
table.headings = ['key', 'value']
|
50
|
+
all.each_with_index do |(bag_and_item, content), i|
|
51
|
+
table.add_separator if i != 0
|
52
|
+
|
53
|
+
table.add_row [{ value: ' ', colspan: 2, alignment: :center, border_y: ' ' }]
|
54
|
+
table.add_row [{ value: "BAG/ITEM: #{bag_and_item}", colspan: 2, alignment: :center }]
|
55
|
+
|
56
|
+
table.add_separator
|
57
|
+
|
58
|
+
content.delete('id')
|
59
|
+
content.each do |k, v|
|
60
|
+
table.add_row [k, v]
|
61
|
+
end
|
62
|
+
end
|
63
|
+
end
|
64
|
+
end
|
65
|
+
|
66
|
+
STDOUT.puts terminal_table
|
67
|
+
end
|
68
|
+
|
69
|
+
def setup_parser
|
70
|
+
super
|
71
|
+
|
72
|
+
parser.on('-b', '--data-bag-and-item BAG/ITEM', String, 'The data-bag and data-item, e.g. config/environment') do |option|
|
73
|
+
self.bag, self.item = option.split('/')
|
74
|
+
end
|
75
|
+
|
76
|
+
parser.on('--set CONFIG', Array, 'Set or unset (with an empty value) key-value pairs, e.g. foo=bar,key=value') do |option|
|
77
|
+
self.changes = option
|
78
|
+
end
|
79
|
+
end
|
80
|
+
end
|
81
|
+
end
|
82
|
+
end
|
@@ -0,0 +1,79 @@
|
|
1
|
+
require 'hansolo/commands/base'
|
2
|
+
require 'hansolo/providers/default/solo'
|
3
|
+
|
4
|
+
module Hansolo
|
5
|
+
module Commands
|
6
|
+
class Solo < Base
|
7
|
+
include Providers::DefaultBehavior::Solo
|
8
|
+
|
9
|
+
# Puts cookbooks and data bags on the target nodes and runs `chef-solo`.
|
10
|
+
# Providers should implement the {#sync_data_bags} and {#sync_cookbooks}.
|
11
|
+
def run
|
12
|
+
sync_data_bags
|
13
|
+
|
14
|
+
Hansolo.librarian.install!
|
15
|
+
sync_cookbooks
|
16
|
+
|
17
|
+
execute_chef_solo
|
18
|
+
end
|
19
|
+
|
20
|
+
# SSH into each node to prepare and execute a `chef-solo` run.
|
21
|
+
def execute_chef_solo
|
22
|
+
threads = hosts.map do |host|
|
23
|
+
ssh = connect(host)
|
24
|
+
|
25
|
+
Thread.new do
|
26
|
+
ssh.exec! generate_manifest.command(manifest: manifest)
|
27
|
+
ssh.exec! generate_json.command(json: json)
|
28
|
+
|
29
|
+
ssh.exec! chef_solo do |channel, stream, line|
|
30
|
+
puts line
|
31
|
+
end
|
32
|
+
|
33
|
+
ssh.close
|
34
|
+
end
|
35
|
+
end
|
36
|
+
|
37
|
+
threads.map(&:join)
|
38
|
+
end
|
39
|
+
|
40
|
+
private
|
41
|
+
|
42
|
+
def chef_solo
|
43
|
+
'sudo chef-solo -c /tmp/solo.rb -j /tmp/deploy.json'
|
44
|
+
end
|
45
|
+
|
46
|
+
def connect(host)
|
47
|
+
if bastion.nil?
|
48
|
+
Net::SSH.new(host.host, host.user, port: host.port)
|
49
|
+
else
|
50
|
+
gateway.ssh(host.host, host.user, port: host.port)
|
51
|
+
end
|
52
|
+
end
|
53
|
+
|
54
|
+
def generate_manifest
|
55
|
+
Cocaine::CommandLine.new('echo', ':manifest > /tmp/solo.rb')
|
56
|
+
end
|
57
|
+
|
58
|
+
def generate_json
|
59
|
+
Cocaine::CommandLine.new('echo', ':json > /tmp/deploy.json')
|
60
|
+
end
|
61
|
+
|
62
|
+
def manifest
|
63
|
+
<<-MANIFEST
|
64
|
+
file_cache_path '/tmp'
|
65
|
+
cookbook_path '/tmp/cookbooks'
|
66
|
+
data_bag_path '/tmp/data_bags'
|
67
|
+
MANIFEST
|
68
|
+
end
|
69
|
+
|
70
|
+
def json
|
71
|
+
{ :run_list => Hansolo.runlist }.to_json
|
72
|
+
end
|
73
|
+
|
74
|
+
def gateway
|
75
|
+
@gateway ||= Net::SSH::Gateway.new(bastion.host, bastion.user, port: bastion.port)
|
76
|
+
end
|
77
|
+
end
|
78
|
+
end
|
79
|
+
end
|
@@ -0,0 +1,58 @@
|
|
1
|
+
require 'hansolo/commands/base'
|
2
|
+
|
3
|
+
module Hansolo::Commands
|
4
|
+
class SSH < Base
|
5
|
+
def run
|
6
|
+
if bastion.nil?
|
7
|
+
exec(ssh.command(ssh_params))
|
8
|
+
else
|
9
|
+
exec(bastion_ssh.command(bastion_params))
|
10
|
+
end
|
11
|
+
end
|
12
|
+
|
13
|
+
private
|
14
|
+
|
15
|
+
def post_ssh_command
|
16
|
+
"#{Hansolo.post_ssh_command}; bash -i"
|
17
|
+
end
|
18
|
+
|
19
|
+
def ssh
|
20
|
+
Cocaine::CommandLine.new('ssh', ssh_params)
|
21
|
+
end
|
22
|
+
|
23
|
+
def ssh_options
|
24
|
+
options = ":user@:host #{Hansolo.ssh_options} -p :port"
|
25
|
+
options << ' -t :command' if Hansolo.post_ssh_command
|
26
|
+
options
|
27
|
+
end
|
28
|
+
|
29
|
+
def ssh_params
|
30
|
+
@ssh_params ||= begin
|
31
|
+
uri = hosts.sample
|
32
|
+
|
33
|
+
{
|
34
|
+
user: uri.user,
|
35
|
+
host: uri.host,
|
36
|
+
port: uri.port.to_s,
|
37
|
+
command: post_ssh_command
|
38
|
+
}
|
39
|
+
end
|
40
|
+
end
|
41
|
+
|
42
|
+
def bastion_ssh
|
43
|
+
Cocaine::CommandLine.new('ssh', bastion_ssh_options)
|
44
|
+
end
|
45
|
+
|
46
|
+
def bastion_ssh_options
|
47
|
+
"-A -l :bastion_user #{Hansolo.ssh_options} -p :bastion_port :bastion_host -t \"ssh #{ssh_options}\""
|
48
|
+
end
|
49
|
+
|
50
|
+
def bastion_params
|
51
|
+
@bastion_params ||= {
|
52
|
+
bastion_user: bastion.user,
|
53
|
+
bastion_port: bastion.port.to_s,
|
54
|
+
bastion_host: bastion.host
|
55
|
+
}.merge(ssh_params)
|
56
|
+
end
|
57
|
+
end
|
58
|
+
end
|
@@ -0,0 +1,15 @@
|
|
1
|
+
module Hansolo::Librarians
|
2
|
+
module Berkshelf
|
3
|
+
module_function
|
4
|
+
|
5
|
+
def install!
|
6
|
+
directory = Pathname.new("tmp/cookbooks/#{Hansolo.app}")
|
7
|
+
FileUtils.mkdir_p(directory)
|
8
|
+
|
9
|
+
files = Dir[directory.join('*')]
|
10
|
+
FileUtils.rm_rf(files)
|
11
|
+
|
12
|
+
system("berks install --path #{directory}")
|
13
|
+
end
|
14
|
+
end
|
15
|
+
end
|
@@ -0,0 +1 @@
|
|
1
|
+
require 'hansolo/librarians/berkshelf'
|
@@ -0,0 +1,25 @@
|
|
1
|
+
module Hansolo::Providers::AWS
|
2
|
+
module DataBags
|
3
|
+
def data_bags
|
4
|
+
objects = bucket.objects.with_prefix(Hansolo.app).to_a
|
5
|
+
objects.map do |o|
|
6
|
+
key = o.key.chomp('.json').sub("#{Hansolo.app}/", '')
|
7
|
+
[key, o.read]
|
8
|
+
end
|
9
|
+
end
|
10
|
+
|
11
|
+
def item_key
|
12
|
+
@item_key ||= "#{Hansolo.app}/#{bag}/#{item}.json"
|
13
|
+
end
|
14
|
+
|
15
|
+
def item_content
|
16
|
+
bucket.objects[item_key].read
|
17
|
+
rescue AWS::S3::Errors::NoSuchKey
|
18
|
+
"{}"
|
19
|
+
end
|
20
|
+
|
21
|
+
def write_to_storage(content)
|
22
|
+
bucket.objects[item_key].write(content)
|
23
|
+
end
|
24
|
+
end
|
25
|
+
end
|
@@ -0,0 +1,58 @@
|
|
1
|
+
module Hansolo
|
2
|
+
module Providers
|
3
|
+
module AWS
|
4
|
+
module Discovery
|
5
|
+
def ec2
|
6
|
+
@ec2 ||= ::AWS::EC2.new(Hansolo.aws_credentials)
|
7
|
+
end
|
8
|
+
|
9
|
+
def s3
|
10
|
+
@s3 ||= ::AWS::S3.new(Hansolo.aws_credentials)
|
11
|
+
end
|
12
|
+
|
13
|
+
def determine_bastion
|
14
|
+
@bastion = begin
|
15
|
+
uri = super
|
16
|
+
|
17
|
+
return uri if uri.scheme == 'ssh'
|
18
|
+
|
19
|
+
instance = instances_by_tag(uri.scheme.to_s, uri.host).first
|
20
|
+
raise ArgumentError, "no gateway with #{uri.scheme} #{uri.host} found" if instance.nil?
|
21
|
+
|
22
|
+
URI.parse("ssh://#{uri.user}@#{instance.public_ip_address}:#{uri.port || 22}")
|
23
|
+
end
|
24
|
+
end
|
25
|
+
|
26
|
+
def hosts
|
27
|
+
@hosts ||= begin
|
28
|
+
target = Hansolo.target
|
29
|
+
return super unless target.is_a?(Hash)
|
30
|
+
|
31
|
+
target_instances = instances_by_tag(target[:host].to_s, Hansolo.app)
|
32
|
+
|
33
|
+
target_instances.map do |instance|
|
34
|
+
ip_address = instance.ip_address || instance.private_ip_address
|
35
|
+
URI.parse("ssh://#{target[:user]}@#{ip_address}:#{target[:port] || 22}")
|
36
|
+
end
|
37
|
+
end
|
38
|
+
end
|
39
|
+
|
40
|
+
private
|
41
|
+
|
42
|
+
def bucket
|
43
|
+
@bucket ||= begin
|
44
|
+
name = Hansolo.bucket_name
|
45
|
+
|
46
|
+
bucket = s3.buckets[name]
|
47
|
+
bucket = s3.buckets.create(name) unless bucket.exists?
|
48
|
+
bucket
|
49
|
+
end
|
50
|
+
end
|
51
|
+
|
52
|
+
def instances_by_tag(tag, value)
|
53
|
+
ec2.instances.tagged(tag).tagged_values(value)
|
54
|
+
end
|
55
|
+
end
|
56
|
+
end
|
57
|
+
end
|
58
|
+
end
|
@@ -0,0 +1,27 @@
|
|
1
|
+
module Hansolo::Providers::AWS
|
2
|
+
module Solo
|
3
|
+
def sync_data_bags
|
4
|
+
threads = hosts.map do |host|
|
5
|
+
Thread.new do
|
6
|
+
ssh = connect(host)
|
7
|
+
|
8
|
+
command = data_bag_items.inject([]) do |cmd, object|
|
9
|
+
path = Pathname.new('/tmp/data_bags').join(object.key)
|
10
|
+
|
11
|
+
cmd << "mkdir -p #{path.dirname}"
|
12
|
+
cmd << "echo '#{object.read}' > #{path}"
|
13
|
+
end
|
14
|
+
|
15
|
+
ssh.exec! command.join('; ')
|
16
|
+
ssh.close
|
17
|
+
end
|
18
|
+
end
|
19
|
+
|
20
|
+
threads.map(&:join)
|
21
|
+
end
|
22
|
+
|
23
|
+
def data_bag_items
|
24
|
+
bucket.objects.select { |o| o.key =~ /\.json$/ }
|
25
|
+
end
|
26
|
+
end
|
27
|
+
end
|
@@ -0,0 +1,30 @@
|
|
1
|
+
require 'aws-sdk'
|
2
|
+
require 'hansolo'
|
3
|
+
require 'hansolo/providers/aws/data_bags'
|
4
|
+
require 'hansolo/providers/aws/discovery'
|
5
|
+
require 'hansolo/providers/aws/solo'
|
6
|
+
|
7
|
+
module Hansolo
|
8
|
+
class << self
|
9
|
+
attr_accessor :aws_access_key_id, :aws_secret_access_key, :bucket_name
|
10
|
+
end
|
11
|
+
|
12
|
+
def self.aws_credentials
|
13
|
+
@aws_credentials ||= {
|
14
|
+
access_key_id: aws_access_key_id,
|
15
|
+
secret_access_key: aws_secret_access_key
|
16
|
+
}
|
17
|
+
end
|
18
|
+
|
19
|
+
class Commands::Base
|
20
|
+
include Providers::AWS::Discovery
|
21
|
+
end
|
22
|
+
|
23
|
+
class Commands::DataBag
|
24
|
+
include Providers::AWS::DataBags
|
25
|
+
end
|
26
|
+
|
27
|
+
class Commands::Solo
|
28
|
+
include Providers::AWS::Solo
|
29
|
+
end
|
30
|
+
end
|
@@ -0,0 +1,29 @@
|
|
1
|
+
module Hansolo::Providers::DefaultBehavior
|
2
|
+
module DataBags
|
3
|
+
# Key-value pairs of the name of the data bag item to the item's content.
|
4
|
+
# @return [Hash]
|
5
|
+
def data_bags
|
6
|
+
@data_bags ||= Dir[Hansolo.data_bags_path.join('*', '**')].map { |path| [path.chomp('.json'), load_content(path)] }
|
7
|
+
end
|
8
|
+
|
9
|
+
# Path to the
|
10
|
+
def item_path
|
11
|
+
Hansolo.data_bags_path.join(bag, "#{item}.json")
|
12
|
+
end
|
13
|
+
|
14
|
+
def load_content(path)
|
15
|
+
File.read(path)
|
16
|
+
end
|
17
|
+
|
18
|
+
def item_content
|
19
|
+
load_content(item_path)
|
20
|
+
rescue
|
21
|
+
'{}'
|
22
|
+
end
|
23
|
+
|
24
|
+
def write_to_storage(content)
|
25
|
+
FileUtils.mkdir_p(item_path.dirname)
|
26
|
+
File.open(item_path, 'w') { |f| f.write content }
|
27
|
+
end
|
28
|
+
end
|
29
|
+
end
|
@@ -0,0 +1,68 @@
|
|
1
|
+
module Hansolo::Providers::DefaultBehavior
|
2
|
+
module Solo
|
3
|
+
|
4
|
+
# `rsync` data bags to the node
|
5
|
+
def sync_data_bags
|
6
|
+
rsync_resource(:data_bags)
|
7
|
+
end
|
8
|
+
|
9
|
+
# `rsync` cookbooks to the node
|
10
|
+
def sync_cookbooks
|
11
|
+
rsync_resource(:cookbooks)
|
12
|
+
end
|
13
|
+
|
14
|
+
private
|
15
|
+
|
16
|
+
def rsync_resource(resource)
|
17
|
+
threads = hosts.map do |host|
|
18
|
+
Thread.new { call rsync.command(rsync_params(host, resource)) }
|
19
|
+
end
|
20
|
+
|
21
|
+
threads.map(&:join)
|
22
|
+
end
|
23
|
+
|
24
|
+
def rsync
|
25
|
+
Cocaine::CommandLine.new('rsync', rsync_options)
|
26
|
+
end
|
27
|
+
|
28
|
+
def rsync_options
|
29
|
+
"--delete -av -e \"#{ssh_options}\" :source :destination"
|
30
|
+
end
|
31
|
+
|
32
|
+
def ssh_options
|
33
|
+
if !bastion.nil?
|
34
|
+
"ssh -A -l :bastion_user #{Hansolo.ssh_options} :bastion_host ssh -l :user #{Hansolo.ssh_options} -p :port"
|
35
|
+
else
|
36
|
+
"ssh -l :user #{Hansolo.ssh_options} -p :port"
|
37
|
+
end
|
38
|
+
end
|
39
|
+
|
40
|
+
def rsync_params(host, content)
|
41
|
+
params = {
|
42
|
+
user: host.user,
|
43
|
+
ssh_options: Hansolo.ssh_options,
|
44
|
+
port: host.port.to_s,
|
45
|
+
source: source(content),
|
46
|
+
destination: destination(host, content)
|
47
|
+
}
|
48
|
+
|
49
|
+
if !bastion.nil?
|
50
|
+
params.merge!(
|
51
|
+
bastion_user: bastion.user,
|
52
|
+
bastion_port: bastion.port.to_s,
|
53
|
+
bastion_host: bastion.host
|
54
|
+
)
|
55
|
+
end
|
56
|
+
|
57
|
+
params
|
58
|
+
end
|
59
|
+
|
60
|
+
def source(content)
|
61
|
+
"#{Hansolo.send("#{content}_path").join(Hansolo.app)}/"
|
62
|
+
end
|
63
|
+
|
64
|
+
def destination(host, content)
|
65
|
+
"#{host.user}@#{host.host}:/tmp/#{content}"
|
66
|
+
end
|
67
|
+
end
|
68
|
+
end
|
@@ -0,0 +1,21 @@
|
|
1
|
+
module Hansolo
|
2
|
+
module Providers
|
3
|
+
module DefaultBehavior
|
4
|
+
# Sets {Hansolo::Commands::Base#bastion}
|
5
|
+
# @return [URI, NilClass]
|
6
|
+
def determine_bastion
|
7
|
+
@bastion = case Hansolo.gateway
|
8
|
+
when String then URI.parse(Hansolo.gateway)
|
9
|
+
when URI then Hansolo.gateway
|
10
|
+
else raise ArgumentError, 'pass in a String or URI object'
|
11
|
+
end
|
12
|
+
end
|
13
|
+
|
14
|
+
# Builds an array of `URI` instances representing target nodes
|
15
|
+
# @return [Array<URI>]
|
16
|
+
def hosts
|
17
|
+
@hosts ||= Array(Hansolo.target).map { |target| URI.parse(target) }
|
18
|
+
end
|
19
|
+
end
|
20
|
+
end
|
21
|
+
end
|
data/lib/hansolo/version.rb
CHANGED
data/lib/hansolo.rb
CHANGED
@@ -1,210 +1,46 @@
|
|
1
|
-
require '
|
2
|
-
require
|
3
|
-
require '
|
1
|
+
require 'logger'
|
2
|
+
require "hansolo/version"
|
3
|
+
require 'hansolo/librarians'
|
4
4
|
|
5
5
|
module Hansolo
|
6
|
-
class
|
7
|
-
attr_accessor :keydir,
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
@aws_secret_access_key = args[:aws_secret_access_key]
|
18
|
-
@aws_access_key_id = args[:aws_access_key_id]
|
19
|
-
|
20
|
-
if (@aws_secret_access_key && @aws_access_key_id && @aws_bucket_name && @aws_data_bag_keys)
|
21
|
-
@s3conn = AWS::S3.new(:access_key_id => args[:aws_access_key_id],
|
22
|
-
:secret_access_key => args[:aws_secret_access_key])
|
23
|
-
end
|
24
|
-
end
|
25
|
-
|
26
|
-
def self.banner
|
27
|
-
"Usage: hansolo [OPTS]"
|
28
|
-
end
|
29
|
-
|
30
|
-
def self.help
|
31
|
-
DATA.read
|
32
|
-
end
|
33
|
-
|
34
|
-
def tmpdir
|
35
|
-
'/tmp'
|
36
|
-
end
|
37
|
-
|
38
|
-
def all!
|
39
|
-
vendor_berkshelf!
|
40
|
-
rsync_cookbooks!
|
41
|
-
rsync_data_bags! if s3conn
|
42
|
-
solo!
|
43
|
-
end
|
44
|
-
|
45
|
-
def username(url)
|
46
|
-
@username ||= Util.parse_url(url)[:username]
|
47
|
-
end
|
48
|
-
|
49
|
-
def dest_cookbooks_dir(url)
|
50
|
-
File.join("/", "home", username(url), "cookbooks")
|
51
|
-
end
|
52
|
-
|
53
|
-
def dest_data_bags_dir(url)
|
54
|
-
File.join("/", "home", username(url), "data_bags")
|
55
|
-
end
|
56
|
-
|
57
|
-
def local_cookbooks_tmpdir
|
58
|
-
File.join(tmpdir, 'cookbooks.working')
|
59
|
-
end
|
60
|
-
|
61
|
-
def local_data_bags_tmpdir
|
62
|
-
File.join(tmpdir, 'data_bags.working')
|
63
|
-
end
|
64
|
-
|
65
|
-
def vendor_berkshelf!
|
66
|
-
Util.call_vendor_berkshelf(local_cookbooks_tmpdir)
|
67
|
-
end
|
68
|
-
|
69
|
-
def s3_bucket
|
70
|
-
s3_bucket = s3conn.buckets[aws_bucket_name]
|
71
|
-
if s3_bucket.exists?
|
72
|
-
s3_bucket
|
73
|
-
else
|
74
|
-
s3conn.buckets.create(aws_bucket_name)
|
75
|
-
end
|
76
|
-
end
|
77
|
-
|
78
|
-
#def s3_key_name
|
79
|
-
#"#{app}/#{stage}/environment.json"
|
80
|
-
#end
|
81
|
-
|
82
|
-
#def s3_item
|
83
|
-
#s3_bucket.objects[s3_key_name]
|
84
|
-
#end
|
85
|
-
|
86
|
-
def rsync_cookbooks!
|
87
|
-
raise ArgumentError, "missing urls array and keydir" unless (urls && keydir)
|
88
|
-
urls.each do |url|
|
89
|
-
opts = Util.parse_url(url).merge(keydir: keydir, sourcedir: local_cookbooks_tmpdir, destdir: dest_cookbooks_dir(url))
|
90
|
-
Util.call_rsync(opts)
|
91
|
-
end
|
92
|
-
end
|
93
|
-
|
94
|
-
def rsync_data_bags!
|
95
|
-
# Grab JSON file from S3, and place it into a conventional place
|
96
|
-
Util.call("mkdir -p #{File.join(local_data_bags_tmpdir, 'app')}")
|
97
|
-
|
98
|
-
aws_data_bag_keys.each do |key_name|
|
99
|
-
item = s3_bucket.objects[key_name]
|
100
|
-
base_key_name = File.basename(key_name)
|
101
|
-
File.open(File.join(local_data_bags_tmpdir, 'app', base_key_name), 'w') do |f|
|
102
|
-
f.write item.read
|
103
|
-
end if item.exists?
|
104
|
-
end
|
105
|
-
|
106
|
-
urls.each do |url|
|
107
|
-
opts = Util.parse_url(url).merge(keydir: keydir, sourcedir: local_data_bags_tmpdir, destdir: dest_data_bags_dir(url))
|
108
|
-
Util.call_rsync(opts)
|
109
|
-
end
|
110
|
-
end
|
111
|
-
|
112
|
-
def solo!
|
113
|
-
raise ArgumentError, "missing urls array and keydir" unless (urls && keydir)
|
114
|
-
urls.each { |url| Util.chef_solo(Util.parse_url(url).merge(keydir: keydir, cookbooks_dir: dest_cookbooks_dir(url), data_bags_dir: dest_data_bags_dir(url), runlist: runlist)) }
|
115
|
-
end
|
6
|
+
class << self
|
7
|
+
attr_accessor :keydir,
|
8
|
+
:gateway,
|
9
|
+
:app,
|
10
|
+
:target,
|
11
|
+
:runlist,
|
12
|
+
:cookbooks_path,
|
13
|
+
:data_bags_path,
|
14
|
+
:post_ssh_command,
|
15
|
+
:librarian,
|
16
|
+
:ssh_options
|
116
17
|
end
|
117
18
|
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
|
122
|
-
end
|
123
|
-
|
124
|
-
def self.call_vendor_berkshelf(tmpdir)
|
125
|
-
call("rm -rf #{tmpdir} && bundle exec berks install --path #{tmpdir}")
|
126
|
-
end
|
127
|
-
|
128
|
-
def self.call_rsync(args={})
|
129
|
-
cmd = "rsync -av -e 'ssh -l #{args[:username]} #{ssh_options(["-p #{args[:port]}", "-i #{args[:keydir]}"])}' "
|
130
|
-
cmd << "#{args[:sourcedir]}/ #{args[:username]}@#{args[:hostname]}:#{args[:destdir]}"
|
131
|
-
call cmd
|
132
|
-
end
|
133
|
-
|
134
|
-
def self.chef_solo(args={})
|
135
|
-
# on remote do:
|
136
|
-
# build a solo.rb
|
137
|
-
# build a tmp json file with the contents { "run_list": [ "recipe[my_app::default]" ] }
|
138
|
-
# chef-solo -c solo.rb -j tmp.json
|
19
|
+
LOGGER = Logger.new(STDOUT)
|
20
|
+
LOGGER.formatter = proc do |severity, datetime, progname, msg|
|
21
|
+
"* #{msg}\n"
|
22
|
+
end
|
139
23
|
|
140
|
-
|
141
|
-
|
142
|
-
puts ssh.exec! "echo '#{ { :run_list => args[:runlist] }.to_json }' > /tmp/deploy.json"
|
143
|
-
ssh.exec! 'PATH="$PATH:/opt/vagrant_ruby/bin" sudo chef-solo -l debug -c /tmp/solo.rb -j /tmp/deploy.json' do |ch, stream, line|
|
144
|
-
puts line
|
145
|
-
end
|
146
|
-
end
|
147
|
-
end
|
24
|
+
def self.configure
|
25
|
+
yield self
|
148
26
|
|
149
|
-
|
27
|
+
self.cookbooks_path ||= Pathname.new('tmp/cookbooks')
|
28
|
+
self.data_bags_path ||= Pathname.new('tmp/data_bags')
|
29
|
+
self.ssh_options ||= '-q -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no'
|
30
|
+
end
|
150
31
|
|
151
|
-
|
152
|
-
|
153
|
-
|
154
|
-
"cookbook_path '#{cookbooks_dir}'",
|
155
|
-
"data_bag_path '#{data_bags_dir}'"
|
156
|
-
].join("\n")
|
157
|
-
end
|
32
|
+
def self.logger
|
33
|
+
LOGGER
|
34
|
+
end
|
158
35
|
|
159
|
-
|
160
|
-
|
161
|
-
|
162
|
-
|
163
|
-
|
164
|
-
|
165
|
-
end
|
36
|
+
def self.librarians
|
37
|
+
{
|
38
|
+
berkshelf: Librarians::Berkshelf
|
39
|
+
}
|
40
|
+
end
|
41
|
+
private_class_method :librarians
|
166
42
|
|
167
|
-
|
168
|
-
|
169
|
-
[
|
170
|
-
"-q",
|
171
|
-
"-o StrictHostKeyChecking=no",
|
172
|
-
"-o UserKnownHostsFile=/dev/null"
|
173
|
-
] + opts
|
174
|
-
).join(' ')
|
175
|
-
end
|
43
|
+
def self.librarian=(librarian)
|
44
|
+
@librarian = librarians[librarian]
|
176
45
|
end
|
177
46
|
end
|
178
|
-
|
179
|
-
require "hansolo/version"
|
180
|
-
|
181
|
-
__END__
|
182
|
-
This is a simple cli program to automate deploy using chef-solo and
|
183
|
-
berkshelf.
|
184
|
-
|
185
|
-
If you pass a filename, put in JSON for the configuration. So in .hansolo.json:
|
186
|
-
|
187
|
-
{ "keydir": "/Applications/Vagrant/embedded/gems/gems/vagrant-1.1.4/keys/vagrant" }
|
188
|
-
|
189
|
-
Then you can pass to the command as:
|
190
|
-
|
191
|
-
$ hansolo -c .hansolo.json
|
192
|
-
|
193
|
-
NOTE: Command-line args trump config settings.
|
194
|
-
|
195
|
-
Example Usage:
|
196
|
-
|
197
|
-
$ hansolo -s approval -t /tmp/myapp.cookbooks \
|
198
|
-
|
199
|
-
-k /Applications/Vagrant/embedded/gems/gems/vagrant-1.1.4/keys/vagrant \
|
200
|
-
|
201
|
-
-u user@host1:22/path,user@host2:22/path \
|
202
|
-
|
203
|
-
-r apt::default,myapp::deploy
|
204
|
-
|
205
|
-
$ hansolo -s approval -c .hansolo.json
|
206
|
-
|
207
|
-
$ hansolo -s approval
|
208
|
-
|
209
|
-
NOTE: You don't need to pass -c if you use the filename .hansolo.json. Passing -c
|
210
|
-
will override reading this default.
|
metadata
CHANGED
@@ -1,15 +1,15 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: hansolo
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
5
|
-
prerelease:
|
4
|
+
version: 0.1.0
|
5
|
+
prerelease:
|
6
6
|
platform: ruby
|
7
7
|
authors:
|
8
8
|
- Brian Kaney
|
9
9
|
autorequire:
|
10
10
|
bindir: bin
|
11
11
|
cert_chain: []
|
12
|
-
date: 2013-
|
12
|
+
date: 2013-07-09 00:00:00.000000000 Z
|
13
13
|
dependencies:
|
14
14
|
- !ruby/object:Gem::Dependency
|
15
15
|
name: aws-sdk
|
@@ -43,6 +43,22 @@ dependencies:
|
|
43
43
|
- - ! '>='
|
44
44
|
- !ruby/object:Gem::Version
|
45
45
|
version: '0'
|
46
|
+
- !ruby/object:Gem::Dependency
|
47
|
+
name: net-ssh-gateway
|
48
|
+
requirement: !ruby/object:Gem::Requirement
|
49
|
+
none: false
|
50
|
+
requirements:
|
51
|
+
- - ! '>='
|
52
|
+
- !ruby/object:Gem::Version
|
53
|
+
version: '0'
|
54
|
+
type: :runtime
|
55
|
+
prerelease: false
|
56
|
+
version_requirements: !ruby/object:Gem::Requirement
|
57
|
+
none: false
|
58
|
+
requirements:
|
59
|
+
- - ! '>='
|
60
|
+
- !ruby/object:Gem::Version
|
61
|
+
version: '0'
|
46
62
|
- !ruby/object:Gem::Dependency
|
47
63
|
name: json
|
48
64
|
requirement: !ruby/object:Gem::Requirement
|
@@ -59,6 +75,38 @@ dependencies:
|
|
59
75
|
- - ! '>='
|
60
76
|
- !ruby/object:Gem::Version
|
61
77
|
version: '0'
|
78
|
+
- !ruby/object:Gem::Dependency
|
79
|
+
name: terminal-table
|
80
|
+
requirement: !ruby/object:Gem::Requirement
|
81
|
+
none: false
|
82
|
+
requirements:
|
83
|
+
- - ! '>='
|
84
|
+
- !ruby/object:Gem::Version
|
85
|
+
version: '0'
|
86
|
+
type: :runtime
|
87
|
+
prerelease: false
|
88
|
+
version_requirements: !ruby/object:Gem::Requirement
|
89
|
+
none: false
|
90
|
+
requirements:
|
91
|
+
- - ! '>='
|
92
|
+
- !ruby/object:Gem::Version
|
93
|
+
version: '0'
|
94
|
+
- !ruby/object:Gem::Dependency
|
95
|
+
name: cocaine
|
96
|
+
requirement: !ruby/object:Gem::Requirement
|
97
|
+
none: false
|
98
|
+
requirements:
|
99
|
+
- - ! '>='
|
100
|
+
- !ruby/object:Gem::Version
|
101
|
+
version: '0'
|
102
|
+
type: :runtime
|
103
|
+
prerelease: false
|
104
|
+
version_requirements: !ruby/object:Gem::Requirement
|
105
|
+
none: false
|
106
|
+
requirements:
|
107
|
+
- - ! '>='
|
108
|
+
- !ruby/object:Gem::Version
|
109
|
+
version: '0'
|
62
110
|
- !ruby/object:Gem::Dependency
|
63
111
|
name: bundler
|
64
112
|
requirement: !ruby/object:Gem::Requirement
|
@@ -107,22 +155,72 @@ dependencies:
|
|
107
155
|
- - ! '>='
|
108
156
|
- !ruby/object:Gem::Version
|
109
157
|
version: '0'
|
158
|
+
- !ruby/object:Gem::Dependency
|
159
|
+
name: yard
|
160
|
+
requirement: !ruby/object:Gem::Requirement
|
161
|
+
none: false
|
162
|
+
requirements:
|
163
|
+
- - ! '>='
|
164
|
+
- !ruby/object:Gem::Version
|
165
|
+
version: '0'
|
166
|
+
type: :development
|
167
|
+
prerelease: false
|
168
|
+
version_requirements: !ruby/object:Gem::Requirement
|
169
|
+
none: false
|
170
|
+
requirements:
|
171
|
+
- - ! '>='
|
172
|
+
- !ruby/object:Gem::Version
|
173
|
+
version: '0'
|
174
|
+
- !ruby/object:Gem::Dependency
|
175
|
+
name: redcarpet
|
176
|
+
requirement: !ruby/object:Gem::Requirement
|
177
|
+
none: false
|
178
|
+
requirements:
|
179
|
+
- - ! '>='
|
180
|
+
- !ruby/object:Gem::Version
|
181
|
+
version: '0'
|
182
|
+
type: :development
|
183
|
+
prerelease: false
|
184
|
+
version_requirements: !ruby/object:Gem::Requirement
|
185
|
+
none: false
|
186
|
+
requirements:
|
187
|
+
- - ! '>='
|
188
|
+
- !ruby/object:Gem::Version
|
189
|
+
version: '0'
|
110
190
|
description: Tool to automate deployment using chef-solo and berkshelf
|
111
191
|
email:
|
112
192
|
- brian@vermonster.com
|
113
193
|
executables:
|
114
194
|
- hansolo
|
195
|
+
- hansolo-databag
|
196
|
+
- hansolo-ssh
|
115
197
|
extensions: []
|
116
198
|
extra_rdoc_files: []
|
117
199
|
files:
|
118
200
|
- .gitignore
|
201
|
+
- .yardopts
|
119
202
|
- Gemfile
|
120
203
|
- LICENSE.txt
|
121
204
|
- README.md
|
122
205
|
- Rakefile
|
123
206
|
- bin/hansolo
|
207
|
+
- bin/hansolo-databag
|
208
|
+
- bin/hansolo-ssh
|
124
209
|
- hansolo.gemspec
|
125
210
|
- lib/hansolo.rb
|
211
|
+
- lib/hansolo/commands/base.rb
|
212
|
+
- lib/hansolo/commands/data_bag.rb
|
213
|
+
- lib/hansolo/commands/solo.rb
|
214
|
+
- lib/hansolo/commands/ssh.rb
|
215
|
+
- lib/hansolo/librarians.rb
|
216
|
+
- lib/hansolo/librarians/berkshelf.rb
|
217
|
+
- lib/hansolo/providers/aws.rb
|
218
|
+
- lib/hansolo/providers/aws/data_bags.rb
|
219
|
+
- lib/hansolo/providers/aws/discovery.rb
|
220
|
+
- lib/hansolo/providers/aws/solo.rb
|
221
|
+
- lib/hansolo/providers/default.rb
|
222
|
+
- lib/hansolo/providers/default/data_bags.rb
|
223
|
+
- lib/hansolo/providers/default/solo.rb
|
126
224
|
- lib/hansolo/version.rb
|
127
225
|
- tests/hansolo_test.rb
|
128
226
|
homepage: ''
|
@@ -141,9 +239,9 @@ required_ruby_version: !ruby/object:Gem::Requirement
|
|
141
239
|
required_rubygems_version: !ruby/object:Gem::Requirement
|
142
240
|
none: false
|
143
241
|
requirements:
|
144
|
-
- - ! '
|
242
|
+
- - ! '>='
|
145
243
|
- !ruby/object:Gem::Version
|
146
|
-
version:
|
244
|
+
version: '0'
|
147
245
|
requirements: []
|
148
246
|
rubyforge_project:
|
149
247
|
rubygems_version: 1.8.23
|