bjn_inventory 1.3.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (43) hide show
  1. checksums.yaml +7 -0
  2. data/.gitignore +15 -0
  3. data/.rspec +2 -0
  4. data/.travis.yml +5 -0
  5. data/Gemfile +4 -0
  6. data/README.md +227 -0
  7. data/Rakefile +17 -0
  8. data/bin/ansible-from +48 -0
  9. data/bin/aws-ec2-source +46 -0
  10. data/bin/aws-rds-source +47 -0
  11. data/bin/console +14 -0
  12. data/bin/inventory_model +34 -0
  13. data/bin/refresh_inventory_data +51 -0
  14. data/bin/setup +8 -0
  15. data/bjn_inventory.gemspec +33 -0
  16. data/lib/bjn_inventory.rb +5 -0
  17. data/lib/bjn_inventory/ansible.rb +86 -0
  18. data/lib/bjn_inventory/array.rb +22 -0
  19. data/lib/bjn_inventory/bykey.rb +7 -0
  20. data/lib/bjn_inventory/context.rb +60 -0
  21. data/lib/bjn_inventory/data_files.rb +41 -0
  22. data/lib/bjn_inventory/default_logger.rb +15 -0
  23. data/lib/bjn_inventory/device.rb +272 -0
  24. data/lib/bjn_inventory/device/map.rb +18 -0
  25. data/lib/bjn_inventory/hash.rb +6 -0
  26. data/lib/bjn_inventory/inventory.rb +105 -0
  27. data/lib/bjn_inventory/inventory/source.rb +66 -0
  28. data/lib/bjn_inventory/list.rb +11 -0
  29. data/lib/bjn_inventory/metadata.rb +7 -0
  30. data/lib/bjn_inventory/source_command.rb +41 -0
  31. data/lib/bjn_inventory/source_command/aws_ec2.rb +58 -0
  32. data/lib/bjn_inventory/source_command/aws_rds.rb +92 -0
  33. data/lib/bjn_inventory/version.rb +3 -0
  34. data/lib/inventory.rb +12 -0
  35. data/tasks/package/_package.sh +131 -0
  36. data/tasks/package/_validate.sh +36 -0
  37. data/tasks/package/run.sh +41 -0
  38. data/tasks/package/validate.sh +41 -0
  39. data/tasks/package/validate/01version.sh +11 -0
  40. data/tasks/test/Dockerfile +14 -0
  41. data/tasks/test/run.sh +23 -0
  42. data/tools/packaging_tasks.rb +123 -0
  43. metadata +188 -0
@@ -0,0 +1,34 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ # Creat list of inventory devices based on device model
4
+
5
+ require 'trollop'
6
+ require 'json'
7
+ require 'logger'
8
+ require 'syslog/logger'
9
+ require 'bjn_inventory'
10
+
11
+ parser = Trollop::Parser.new do
12
+ version BjnInventory::VERSION
13
+ banner <<-USAGE.gsub(/^\s{8}/,'')
14
+ Usage:
15
+ inventory-devices [options]
16
+ USAGE
17
+
18
+ opt :key, 'Specify the key of inventory hash', required: true, type: :string
19
+ opt :manifest, 'Specify the manifest that defines this inventory', required: true, type: :string
20
+ opt :debug, 'Enable debug output', :short => '-D'
21
+ opt :syslog, 'Log to Syslog', :short => '-S'
22
+ stop_on_unknown
23
+ end
24
+
25
+ opt = Trollop::with_standard_exception_handling parser do
26
+ parser.parse(ARGV)
27
+ end
28
+
29
+ logger = opt[:syslog]? Syslog::Logger.new('inventory-devices') : Logger.new(STDERR)
30
+ logger.level = opt[:debug]? Logger::DEBUG : Logger::WARN
31
+
32
+ manifest = JSON.parse(File.read(opt[:manifest]))
33
+ inventory = BjnInventory::Inventory.new(manifest.merge({logger: logger}))
34
+ puts JSON.pretty_generate(inventory.by(opt[:key]))
@@ -0,0 +1,51 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ require 'trollop'
4
+ require 'json'
5
+ require 'bjn_inventory'
6
+ require 'bjn_inventory/data_files'
7
+ require 'bjn_inventory/ansible'
8
+
9
+ parser = Trollop::Parser.new do
10
+ version BjnInventory::VERSION
11
+ banner <<-USAGE.gsub(/^\s{8}/,'')
12
+ Usage:
13
+ refresh_inventory_data [options]
14
+ USAGE
15
+
16
+ opt :ansible, 'Specify ansible groupings file', required: true, type: :string
17
+ opt :manifest, 'Specify the manifest that defines this inventory', required: true, type: :string
18
+ opt :debug, 'Enable debug output', :short => '-D'
19
+ opt :syslog, 'Log to Syslog', :short => '-S'
20
+ opt :datadir, 'Location of inventory data', default: '/var/cache/inventory'
21
+ stop_on_unknown
22
+ end
23
+
24
+ opt = Trollop::with_standard_exception_handling parser do
25
+ parser.parse(ARGV)
26
+ end
27
+
28
+ if opt[:syslog]
29
+ require 'syslog/logger'
30
+ logger = Syslog::Logger.new 'refresh_inventory_data'
31
+ else
32
+ logger = Logger.new STDERR
33
+ end
34
+
35
+ if opt[:debug]
36
+ logger.level = Logger::DEBUG
37
+ else
38
+ logger.level = Logger::WARN
39
+ end
40
+
41
+ manifest = JSON.parse(File.read(opt[:manifest]))
42
+ ansible_spec = JSON.parse(File.read(opt[:ansible]))
43
+
44
+ inventory = BjnInventory::Inventory.new(manifest.merge({logger: logger})).by('name')
45
+ devices_data = inventory
46
+ groups_data = BjnInventory::get_groups_data(inventory, ansible_spec)
47
+
48
+ BjnInventory::refresh_inventory_data('devices', devices_data, opt)
49
+ BjnInventory::refresh_inventory_data('groups', groups_data, opt)
50
+
51
+
data/bin/setup ADDED
@@ -0,0 +1,8 @@
1
+ #!/usr/bin/env bash
2
+ set -euo pipefail
3
+ IFS=$'\n\t'
4
+ set -vx
5
+
6
+ bundle install
7
+
8
+ # Do any other automated setup that you need to do here
@@ -0,0 +1,33 @@
1
+ # coding: utf-8
2
+ # -*- mode: ruby; ruby-indent-level: 2; -*-
3
+
4
+ lib = File.expand_path('../lib', __FILE__)
5
+ $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
6
+ require 'bjn_inventory/version'
7
+ require 'bjn_inventory/metadata'
8
+
9
+ Gem::Specification.new do |spec|
10
+ spec.name = "bjn_inventory"
11
+ spec.version = BjnInventory::VERSION
12
+ spec.authors = [BjnInventory::AUTHOR]
13
+ spec.email = [BjnInventory::EMAIL]
14
+
15
+ spec.summary = BjnInventory::SUMMARY
16
+ spec.homepage = BjnInventory::URL
17
+
18
+ spec.files = `git ls-files -z`.split("\x0").reject do |f|
19
+ f.match(%r{^(test|spec|features)/})
20
+ end
21
+ spec.bindir = "bin"
22
+ spec.executables = ["ansible-from", "aws-ec2-source", "aws-rds-source", "inventory_model", "refresh_inventory_data"]
23
+ spec.require_paths = ["lib"]
24
+
25
+ spec.add_dependency 'jsonpath', "~> 0.7.2"
26
+ spec.add_dependency 'trollop'
27
+ spec.add_dependency 'aws-sdk'
28
+
29
+ spec.add_development_dependency "bundler", "~> 1.9"
30
+ spec.add_development_dependency "rake", "~> 10.0"
31
+ spec.add_development_dependency "rspec", "~> 3.0"
32
+ spec.add_development_dependency "rspec_junit_formatter"
33
+ end
@@ -0,0 +1,5 @@
1
+ require "bjn_inventory/version"
2
+ require "bjn_inventory/device"
3
+ require "bjn_inventory/inventory"
4
+ require "bjn_inventory/bykey"
5
+ require "bjn_inventory/list"
@@ -0,0 +1,86 @@
1
+ require 'bjn_inventory'
2
+ require 'bjn_inventory/default_logger'
3
+
4
+ module BjnInventory
5
+
6
+ class ByKey
7
+
8
+ def _ansible_name(value)
9
+ value.gsub(/[^a-zA-Z0-9_-]+/, '_')
10
+ end
11
+
12
+ def _field_groups(fields, device, sep='__')
13
+ fields = [fields] unless fields.respond_to? :inject
14
+ value_map = fields.map do |field|
15
+ values = device[field]
16
+ values = [values] unless values.respond_to? :map
17
+ values.map { |val| _ansible_name(val) }
18
+ end
19
+ #
20
+ # So now we have an array of arrays, eg.:
21
+ # fields='region' =>
22
+ # [['dc2']]
23
+ #
24
+ # fields=['roles', 'region'] =>
25
+ # [['web', ['dc2']]
26
+ # 'db'],
27
+ #
28
+ groups =
29
+ if fields.length == 1
30
+ value_map.first
31
+ else
32
+ driving_array, *rest = value_map
33
+ driving_array.product(*rest).map { |compound_value| compound_value.join(sep) }
34
+ end
35
+ groups
36
+ end
37
+
38
+ # This basically builds an ansible inventory given a hash of hostvars
39
+ def to_ansible(*ansible_spec)
40
+ if ansible_spec[-1].respond_to? :to_hash
41
+ kwargs = ansible_spec.pop.stringify_keys
42
+ else
43
+ kwargs = { }
44
+ end
45
+ group_by = []
46
+ if kwargs['group_by']
47
+ group_by = kwargs['group_by']
48
+ group_by = [group_by] unless group_by.respond_to? :each
49
+ end
50
+ group_by.concat(ansible_spec)
51
+
52
+ if group_by.empty?
53
+ raise ArgumentError, "Expected group_by either as keyword or as argument list"
54
+ end
55
+
56
+ logger ||= BjnInventory::DefaultLogger.new
57
+ # We need at least one field to create groups
58
+ separator = kwargs['separator'] || '__'
59
+
60
+ ansible_inventory = { '_meta' => {'hostvars' => self.to_hash } }
61
+
62
+ self.each do |name, device_hash|
63
+ group_by.each do |group_field_spec|
64
+ group_field_spec = [group_field_spec] unless group_field_spec.respond_to? :all?
65
+ if group_field_spec.all? { |field| !device_hash[field].nil? && !device_hash[field].empty? }
66
+ field_groups = _field_groups(group_field_spec, device_hash, separator)
67
+ field_groups.each do |group_name|
68
+ ansible_inventory[group_name] = [ ] unless ansible_inventory.has_key? group_name
69
+ ansible_inventory[group_name] << name
70
+ end
71
+ end
72
+ end
73
+ end
74
+
75
+ if kwargs['groups']
76
+ ansible_inventory.merge! Hash[kwargs['groups'].map do |group, children|
77
+ [group, { "hosts" => [ ], "children" => children }]
78
+ end]
79
+ end
80
+
81
+ ansible_inventory
82
+ end
83
+
84
+ end
85
+
86
+ end
@@ -0,0 +1,22 @@
1
+ # Inventory array helper
2
+ class Array
3
+ # This array is treated as a list of hashes, with
4
+ # certain command key or name fields that you can
5
+ # search by.
6
+
7
+ def find_key(key, value)
8
+ keys = [key, key.upcase, key.downcase, key.capitalize]
9
+ keys.concat(keys.map &:intern)
10
+ self.find do |hash|
11
+ keys.any? { |key| hash[key] == value }
12
+ end
13
+ end
14
+
15
+ def bykey(key)
16
+ find_key 'key', key
17
+ end
18
+
19
+ def byname(name)
20
+ find_key 'name', name
21
+ end
22
+ end
@@ -0,0 +1,7 @@
1
+ module BjnInventory
2
+
3
+ class ByKey < Hash
4
+
5
+ end
6
+
7
+ end
@@ -0,0 +1,60 @@
1
+ require 'json'
2
+ require 'bjn_inventory/hash'
3
+
4
+ module BjnInventory
5
+
6
+ class Context < Hash
7
+
8
+ def initialize(initial_data={})
9
+ if initial_data.respond_to? :stringify_keys
10
+ self.merge! initial_data.stringify_keys
11
+ elsif File.directory? initial_data
12
+ self.load_directory('', initial_data)
13
+ elsif File.exists? initial_data
14
+ self.load_file('', initial_data)
15
+ else
16
+ raise Errno::ENOENT, "File not found to create context: #{initial_data.inspect}"
17
+ end
18
+ end
19
+
20
+ def load_directory(key, dir)
21
+ other = _load_directory(key, dir)
22
+ if key.empty?
23
+ self.merge! other['']
24
+ else
25
+ self.merge! _load_directory(key, dir)
26
+ end
27
+ end
28
+
29
+ def load_file(key, file)
30
+ if key.empty?
31
+ self.merge! JSON.parse(File.read(file))
32
+ else
33
+ self[key] = JSON.parse(File.read(file))
34
+ end
35
+ end
36
+
37
+ def _load_directory(key, dir)
38
+ { key =>
39
+ Dir.entries(dir).inject({}) do |h, entry|
40
+ unless entry.start_with? '.'
41
+ if File.directory? File.join(dir, entry)
42
+ h.merge! _load_directory(entry, File.join(dir, entry))
43
+ elsif entry =~ /\.json$/
44
+ key = entry.sub(/\.json$/, '')
45
+ value = JSON.parse(File.read(File.join(dir, entry)))
46
+ if h.has_key? key and h[key].respond_to? :merge! and value.is_a? Hash
47
+ h[key].merge! value
48
+ else
49
+ h[key] = value
50
+ end
51
+ end
52
+ end
53
+ h
54
+ end
55
+ }
56
+ end
57
+
58
+ end
59
+
60
+ end
@@ -0,0 +1,41 @@
1
+
2
+ module BjnInventory
3
+ def self.get_groups_data(inventory, ansible_spec)
4
+ ansible_inventory = inventory.to_ansible(ansible_spec)
5
+ groups_data = ansible_inventory.reject {|group, devices| group == '_meta'}
6
+ groups_data
7
+ end
8
+
9
+ def self.refresh_inventory_data(type, data, opt)
10
+ type_dir = "#{opt[:datadir]}/#{type}"
11
+ IO.write("#{opt[:datadir]}/#{type}.json", JSON.pretty_generate(data))
12
+ if !Dir.exist?(type_dir)
13
+ Dir.mkdir(type_dir)
14
+ end
15
+ refresh_inventory_entries(type, data, type_dir)
16
+ end
17
+
18
+ def self.refresh_inventory_entries(type, data, type_dir)
19
+ previous_entries = Dir.entries(type_dir).reject {|listing| (listing == '.' || listing == '..' || listing == "#{type}.json")}
20
+ previous_entries = previous_entries.map { |file| file.sub('.json', '') }
21
+ current_entries = data.keys
22
+
23
+ remove_entries = previous_entries - current_entries
24
+
25
+ remove_entries.each do |name|
26
+ entry = "#{type_dir}/#{name}.json"
27
+ File.delete(entry)
28
+ end
29
+
30
+ current_entries.each do |name|
31
+ entry = "#{type_dir}/#{name}.json"
32
+ tmp_file = "#{type_dir}/.#{name}.#{Process.pid}.tmp"
33
+ begin
34
+ IO.write(tmp_file, JSON.pretty_generate(data[name]))
35
+ IO.copy_stream(tmp_file, entry)
36
+ rescue
37
+ end
38
+ File.delete(tmp_file)
39
+ end
40
+ end
41
+ end
@@ -0,0 +1,15 @@
1
+ require 'logger'
2
+
3
+ module BjnInventory
4
+
5
+ class DefaultLogger < Logger
6
+
7
+ def initialize(progname='bjn_inventory')
8
+ logger = Logger.new $stderr
9
+ logger.level = Logger::WARN
10
+ logger
11
+ end
12
+
13
+ end
14
+
15
+ end
@@ -0,0 +1,272 @@
1
+ require 'ipaddr'
2
+ require 'json'
3
+ require 'jsonpath'
4
+ require 'bjn_inventory/hash'
5
+ require 'bjn_inventory/array'
6
+
7
+ module BjnInventory
8
+
9
+ def self.map(origin, &block)
10
+ return block
11
+ end
12
+
13
+ DEFAULT_MODEL = {
14
+ name: nil,
15
+ service_level: nil,
16
+ environment: nil,
17
+ chef_runlist: [],
18
+ roles: [],
19
+ interfaces: [],
20
+ ipaddress: nil,
21
+ management_ipaddress: nil,
22
+ tags: [],
23
+ region: nil,
24
+ type: nil,
25
+ os: nil,
26
+ os_release: nil
27
+ }
28
+
29
+ class Device
30
+
31
+ @@model = DEFAULT_MODEL
32
+ @@context = { }
33
+
34
+ def self.use_context(data)
35
+ if data
36
+ @@context = @@context.merge(data.stringify_keys)
37
+ end
38
+ end
39
+
40
+ def self.use_model(model)
41
+ if model
42
+ @@model = model
43
+ else
44
+ @@model = DEFAULT_MODEL
45
+ end
46
+ self
47
+ end
48
+
49
+ def self.use_default_model()
50
+ @@model = DEFAULT_MODEL
51
+ self
52
+ end
53
+
54
+ def self.model()
55
+ @@model
56
+ end
57
+
58
+ def self.model=(new_model)
59
+ @@model = new_model
60
+ end
61
+
62
+ def self.maybe_block(args, block=nil, default=nil)
63
+ if args.length == 1
64
+ if args[0].respond_to? :call
65
+ return args[0]
66
+ else
67
+ # A constant
68
+ return Proc.new { |_dummy| args[0] }
69
+ end
70
+ elsif block
71
+ return block
72
+ end
73
+ return default
74
+ end
75
+
76
+ def generate_getter(field)
77
+ self.define_singleton_method(field.intern) do |*args, &block|
78
+ value = nil
79
+ new_block = Device.maybe_block(args, block)
80
+ if new_block
81
+ @rule[field.to_s] = new_block
82
+ else
83
+ if @rule[field.to_s]
84
+ begin
85
+ value = @rule[field.to_s].call(@data, self)
86
+ rescue Exception => err
87
+ identifier_phrase = " for #{self.guessed_identity}"
88
+ raise RuntimeError, "Error evaluating #{field.to_s} rule (#{err.class}: #{err.to_s})#{identifier_phrase}"
89
+ end
90
+ else
91
+ value = @data[field.to_s]
92
+ end
93
+ end
94
+ value
95
+ end
96
+ end
97
+
98
+ def guessed_identity
99
+ # try to find a way to summarize the name or id
100
+ # this needs to work regardless of rules (within rules) so it can't exercise actual name/id methods
101
+ identifier_key = %w(name id fqdn device_name device_id hostname instance_id).find { |key| @data[key] }
102
+ if identifier_key
103
+ identifier_phrase = "#{identifier_key}=#{@data[identifier_key]}"
104
+ else
105
+ identifier_phrase = "(no-id)"
106
+ end
107
+ identifier_phrase
108
+ end
109
+
110
+ def generate_setter(field)
111
+ self.define_singleton_method((field.to_s + '=').intern) do |new_value|
112
+ @data[field.to_s] = new_value
113
+ end
114
+ end
115
+
116
+ # Creates a prototype using the specified origin rules
117
+ # file (or rules text), which can be used to create new
118
+ # objects based on data, kind of like a class (but it's
119
+ # not a class, it's a prototype).
120
+ def self.using(rules=nil)
121
+ prototype = self.new()
122
+ if rules
123
+ prototype.load_rules(rules)
124
+ end
125
+ # Define an instance method new() that copies
126
+ # the prototype
127
+ prototype.define_singleton_method(:new) do |hash_data={}|
128
+ self.clone().set(hash_data)
129
+ end
130
+ prototype
131
+ end
132
+
133
+ def initialize(hash_data={})
134
+ @@model.each do |field, _dummy|
135
+ generate_getter(field)
136
+ generate_setter(field)
137
+ end
138
+ @rule = { }
139
+ @data = @@model.stringify_keys
140
+ self.set(hash_data)
141
+ end
142
+
143
+ def set(hash_data={})
144
+ @data = @data.merge(hash_data.stringify_keys)
145
+ self
146
+ end
147
+
148
+ def context()
149
+ @@context
150
+ end
151
+
152
+ def load_rules(origin)
153
+ if File.exist? origin
154
+ rules_text = File.read(origin)
155
+ else
156
+ rules_text = origin
157
+ end
158
+ self.instance_eval(rules_text)
159
+ end
160
+
161
+ # DSL - Set origin name
162
+ def origin(origin_name=nil)
163
+ if origin_name
164
+ @origin = origin_name
165
+ else
166
+ @origin
167
+ end
168
+ end
169
+
170
+ # DSL
171
+ # Allows: map field: ruby { |data| data['alias'] }
172
+ # Allows: map field: jsonpath 'expr'
173
+ # but it is actually unnecessary
174
+ def map(fieldmap)
175
+ fieldmap.each do |field, value|
176
+ self.send(field.intern, value)
177
+ end
178
+ end
179
+
180
+ # DSL
181
+ # Syntactic sugar for Proc.new basically. Allows:
182
+ # Allows: name ruby { |data| data['aliases'][0] || device['name'] }
183
+ # Allows: name ruby { |_dummy| name + '.domain.org' } ???
184
+ def ruby(&block)
185
+ block
186
+ end
187
+
188
+ # DSL
189
+ # Allows: name jsonpath '$.aliases[0]'
190
+ def jsonpath(expr)
191
+ Proc.new do |data|
192
+ value = JsonPath.on(data.to_json, expr).first
193
+ value
194
+ end
195
+ end
196
+
197
+ # DSL
198
+ # Allows: name synonym :model_field
199
+ def synonym(symbol)
200
+ Proc.new do |data, device|
201
+ device.send(symbol)
202
+ end
203
+ end
204
+
205
+ # DSL
206
+ # Allows: name always 'virtual'
207
+ def always(value)
208
+ Proc.new do |_data, _device|
209
+ value
210
+ end
211
+ end
212
+
213
+ def entry
214
+ @data
215
+ end
216
+
217
+ def validate()
218
+ # Raises exceptions if any of the getters barf, basically
219
+ @@model.each { |field, _default| self.send field.intern }
220
+ self
221
+ end
222
+
223
+ def merge(other)
224
+ # Use myself as prototype or create generic Device?
225
+ origins = self.origin.nil? ? [] : [@origin]
226
+
227
+ unless other.origin.nil?
228
+ origins.push other.origin
229
+ end
230
+
231
+ merged_data = Hash[@@model.map do |field, default|
232
+ mine = self.send field.intern
233
+ theirs = other.send field.intern
234
+
235
+ value = if mine.nil?
236
+ theirs
237
+ elsif theirs.nil?
238
+ mine
239
+ elsif default.respond_to? :merge
240
+ # The default value is a hash
241
+ begin
242
+ mine.merge theirs
243
+ rescue Exception => err
244
+ raise RuntimeError, "Error hash-merging field value #{theirs.inspect} " +
245
+ "into #{mine.inspect} for #{self.guessed_identity}: #{err.to_s}"
246
+ end
247
+ elsif default.respond_to? :push
248
+ # The default value is an array
249
+ begin
250
+ mine.concat theirs.reject { |el| mine.include? el }
251
+ rescue Exception => err
252
+ raise RuntimeError, "Error concatenating field value #{theirs.inspect} " +
253
+ "onto #{mine.inspect} for #{self.guessed_identity}: #{err.to_s}"
254
+ end
255
+ else
256
+ theirs
257
+ end
258
+ [field, value]
259
+ end]
260
+
261
+ device = Device.new(merged_data)
262
+ device.origin origins
263
+ device
264
+ end
265
+
266
+
267
+ def to_hash()
268
+ Hash[@@model.map { |field, _default| [field.to_s, self.send(field.intern)] }]
269
+ end
270
+ end
271
+
272
+ end