puppet-runner 0.0.13
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.editorconfig +11 -0
- data/.gitignore +25 -0
- data/Gemfile +4 -0
- data/LICENSE.txt +191 -0
- data/README.md +175 -0
- data/Rakefile +2 -0
- data/bin/puppet-runner +349 -0
- data/puppet-runner.gemspec +28 -0
- data/test/configs/defaults/confluence.yaml +20 -0
- data/test/configs/defaults/connector_proxy.yaml +4 -0
- data/test/configs/defaults/jira.yaml +3 -0
- data/test/configs/defaults/mysql.yaml +5 -0
- data/test/configs/hostname1.yaml +28 -0
- data/test/configs/hostname1_facts.yaml +34 -0
- data/test/configs/puppetfile_dictionary.yaml +506 -0
- data/test/configs/templates/confluence.yaml +43 -0
- data/test/configs/templates/connector_proxy.yaml +14 -0
- data/test/configs/templates/jira.yaml +36 -0
- data/test/configs/templates/mysql.yaml +18 -0
- metadata +174 -0
data/bin/puppet-runner
ADDED
|
@@ -0,0 +1,349 @@
|
|
|
1
|
+
#!/usr/bin/env ruby
|
|
2
|
+
# Copyright 2015 Adaptavist.com Ltd.
|
|
3
|
+
#
|
|
4
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
+
# you may not use this file except in compliance with the License.
|
|
6
|
+
# You may obtain a copy of the License at
|
|
7
|
+
#
|
|
8
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
+
#
|
|
10
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
11
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
+
# See the License for the specific language governing permissions and
|
|
14
|
+
# limitations under the License.
|
|
15
|
+
|
|
16
|
+
require 'fileutils'
|
|
17
|
+
require 'yaml'
|
|
18
|
+
require 'set'
|
|
19
|
+
require 'pathname'
|
|
20
|
+
require 'deep_merge'
|
|
21
|
+
require 'docopt'
|
|
22
|
+
require 'colorize'
|
|
23
|
+
require 'facter'
|
|
24
|
+
|
|
25
|
+
doc = <<DOCOPT
|
|
26
|
+
Adaptavist puppet runner
|
|
27
|
+
|
|
28
|
+
Usage:
|
|
29
|
+
puppet-runner (prepare|all) [-c CONFIG_DIR] [-t TEMPLATES] [-d DESTINATION_DIR] [-f FACTS_DEST] [-s SERVERNAME] [-p PUPPET_APPLY] [-r PUPPETFILE_CONFIG] [-o PUPPETFILE_OUTPUT_PATH] [-e EYAML_KEY_PATH]
|
|
30
|
+
puppet-runner start [-p PUPPET_APPLY]
|
|
31
|
+
puppet-runner -h | --help
|
|
32
|
+
|
|
33
|
+
Options:
|
|
34
|
+
-h --help Show this screen.
|
|
35
|
+
-s SERVERNAME --servername SERVERNAME Custom identification of server, hostname fact if not provided
|
|
36
|
+
-c CONFIG_DIR --config_dir CONFIG_DIR Hiera configuration directory, must ontain <hostname>.yaml and <hostname>_facts.yaml
|
|
37
|
+
-d DESTINATION_DIR --dest_dir DESTINATION_DIR Directory for result hiera config.
|
|
38
|
+
-t TEMPLATES --templates TEMPLATES Directory containing templates and defaults folder with functionality templates and default facts
|
|
39
|
+
-f FACTS_DEST --facts_dest_dir FACTS_DEST Destination directory to store result facts
|
|
40
|
+
-p PUPPET_APPLY --puppet_apply PUPPET_APPLY Custom puppet apply command to run
|
|
41
|
+
-r PUPPETFILE_CONFIG --puppetfile_config puppetfile_config Puppetfile composition config file
|
|
42
|
+
-o PUPPETFILE_OUTPUT_PATH --puppetfile_output_path PUPPETFILE_OUTPUT_PATH Result Puppetfile path
|
|
43
|
+
-e EYAML_KEY_PATH --eyaml_key_path EYAML_KEY_PATH Path to eyaml encryption key pair
|
|
44
|
+
Commands:
|
|
45
|
+
all Runs the following commands prepare, start
|
|
46
|
+
start Runs puppet apply
|
|
47
|
+
prepare Creates result hiera config as a composition of functionalities based on config, merges provided facts with defaults
|
|
48
|
+
DOCOPT
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def path_join_glob(dir, file_pattern)
|
|
52
|
+
"#{dir}/#{file_pattern}"
|
|
53
|
+
end
|
|
54
|
+
|
|
55
|
+
def debug(msg)
|
|
56
|
+
puts msg.green
|
|
57
|
+
end
|
|
58
|
+
|
|
59
|
+
def warning(msg)
|
|
60
|
+
puts msg.yellow
|
|
61
|
+
end
|
|
62
|
+
|
|
63
|
+
def extract_value_from_hash(input)
|
|
64
|
+
res = {}
|
|
65
|
+
if input
|
|
66
|
+
res = input.map{|key, val|
|
|
67
|
+
if val.is_a?(Hash)
|
|
68
|
+
value = val["value"]
|
|
69
|
+
else
|
|
70
|
+
value = val
|
|
71
|
+
end
|
|
72
|
+
{key => value}
|
|
73
|
+
}
|
|
74
|
+
end
|
|
75
|
+
res
|
|
76
|
+
end
|
|
77
|
+
|
|
78
|
+
def extract_comment_from_hash(input)
|
|
79
|
+
res = {}
|
|
80
|
+
if input
|
|
81
|
+
res = input.map{|key, val|
|
|
82
|
+
|
|
83
|
+
if val != nil
|
|
84
|
+
if val.is_a?(Hash)
|
|
85
|
+
value = val["comment"]
|
|
86
|
+
else
|
|
87
|
+
value = nil
|
|
88
|
+
end
|
|
89
|
+
else
|
|
90
|
+
value = nil
|
|
91
|
+
end
|
|
92
|
+
{key => value}
|
|
93
|
+
}
|
|
94
|
+
end
|
|
95
|
+
res
|
|
96
|
+
end
|
|
97
|
+
|
|
98
|
+
begin
|
|
99
|
+
options = Docopt::docopt(doc)
|
|
100
|
+
rescue Docopt::Exit => e
|
|
101
|
+
abort(e.message.red)
|
|
102
|
+
end
|
|
103
|
+
|
|
104
|
+
stop_apply = false
|
|
105
|
+
|
|
106
|
+
if options['all'] || options['prepare']
|
|
107
|
+
input_dir = options["--config_dir"] || options["-c"]
|
|
108
|
+
dest_dir = options["--dest_dir"] || options["-d"]
|
|
109
|
+
facts_dest_dir = options["--facts_dest_dir"] || options["-f"]
|
|
110
|
+
templates = options["--templates"] || options["-t"]
|
|
111
|
+
puppetfile_config_path = options["--puppetfile_config"] || options["-r"]
|
|
112
|
+
puppetfile_output_path = options["--puppetfile_output_path"] || options["-o"]
|
|
113
|
+
eyaml_key_path = options["--eyaml_key_path"] || options["-e"] || "/etc/puppet/config"
|
|
114
|
+
hostname = options["--servername"] || options["-s"] || Facter.value("hostname")
|
|
115
|
+
puts "Hostname #{hostname}"
|
|
116
|
+
|
|
117
|
+
config_file_path = path_join_glob(input_dir, hostname+".yaml")
|
|
118
|
+
templates_dir = path_join_glob(templates, "templates")
|
|
119
|
+
def_facts_dir = path_join_glob(templates, "defaults")
|
|
120
|
+
|
|
121
|
+
debug "Reading #{config_file_path}"
|
|
122
|
+
if File.file? config_file_path and File.directory? templates_dir and File.directory? def_facts_dir and File.directory? dest_dir and File.directory? facts_dest_dir and File.file? puppetfile_config_path
|
|
123
|
+
config = YAML.load_file(config_file_path)
|
|
124
|
+
else
|
|
125
|
+
abort "Can not find config file #{config_file_path}. \
|
|
126
|
+
or #{templates_dir}. \
|
|
127
|
+
or #{def_facts_dir}. \
|
|
128
|
+
or #{dest_dir}. \
|
|
129
|
+
or #{facts_dest_dir}.".red
|
|
130
|
+
end
|
|
131
|
+
|
|
132
|
+
functionalities = config["functionalities"]
|
|
133
|
+
|
|
134
|
+
output_file_path = path_join_glob(dest_dir, "#{hostname}.eyaml")
|
|
135
|
+
output_encrypted_facts_file_path = "/tmp/#{hostname}_facts.eyaml"
|
|
136
|
+
output_facts_file_path = path_join_glob(facts_dest_dir, "#{hostname}_facts.yaml")
|
|
137
|
+
if File.file? output_file_path
|
|
138
|
+
FileUtils.rm output_file_path
|
|
139
|
+
end
|
|
140
|
+
if File.file? output_encrypted_facts_file_path
|
|
141
|
+
FileUtils.rm output_encrypted_facts_file_path
|
|
142
|
+
end
|
|
143
|
+
if File.file? output_facts_file_path
|
|
144
|
+
FileUtils.rm output_facts_file_path
|
|
145
|
+
end
|
|
146
|
+
|
|
147
|
+
debug "Writing to #{output_file_path}"
|
|
148
|
+
result_template = {}
|
|
149
|
+
result_default_facts = {}
|
|
150
|
+
prefixed_required_facts = Set.new
|
|
151
|
+
prefixed_facts_comments = {}
|
|
152
|
+
puppetfile_config = YAML.load_file(puppetfile_config_path) || {}
|
|
153
|
+
puppetfile_dependencies = []
|
|
154
|
+
# functionalities:
|
|
155
|
+
# # In honor of Henry...
|
|
156
|
+
# 1_app:
|
|
157
|
+
# - confluence: "conf1"
|
|
158
|
+
# - confluence: "conf2"
|
|
159
|
+
# - jira
|
|
160
|
+
# 2_database:
|
|
161
|
+
# - mysql
|
|
162
|
+
|
|
163
|
+
functionalities.keys.sort.each do |key|
|
|
164
|
+
next unless functionalities[key]
|
|
165
|
+
|
|
166
|
+
functionalities[key].each do |to_add|
|
|
167
|
+
|
|
168
|
+
if to_add.is_a?(Hash)
|
|
169
|
+
template_to_add = path_join_glob(templates_dir, "#{to_add.keys[0]}.yaml")
|
|
170
|
+
facts_to_add = path_join_glob(def_facts_dir, "#{to_add.keys[0]}.yaml")
|
|
171
|
+
else
|
|
172
|
+
template_to_add = path_join_glob(templates_dir, "#{to_add}.yaml")
|
|
173
|
+
facts_to_add = path_join_glob(def_facts_dir, "#{to_add}.yaml")
|
|
174
|
+
end
|
|
175
|
+
|
|
176
|
+
debug "Adding template #{template_to_add}"
|
|
177
|
+
debug "Adding facts #{facts_to_add}"
|
|
178
|
+
if File.file? template_to_add and File.file? facts_to_add
|
|
179
|
+
# prefix is defined, must replace
|
|
180
|
+
data = YAML.load_file(template_to_add) || {}
|
|
181
|
+
default_facts = YAML.load_file(facts_to_add) || {}
|
|
182
|
+
prefixes = data["prefixes"] || []
|
|
183
|
+
required_facts = data["required_facts"] || []
|
|
184
|
+
puppetfile_parts = data["dependencies"] || []
|
|
185
|
+
# merge dependencies
|
|
186
|
+
puppetfile_dependencies = puppetfile_dependencies | puppetfile_parts
|
|
187
|
+
|
|
188
|
+
data_as_string = data.to_s
|
|
189
|
+
facts_as_string = extract_value_from_hash(default_facts).to_s
|
|
190
|
+
|
|
191
|
+
fact_comments_as_string = extract_comment_from_hash(default_facts).to_s
|
|
192
|
+
|
|
193
|
+
if to_add.is_a?(Hash)
|
|
194
|
+
# if prefixes are not defined skip replacing
|
|
195
|
+
if prefixes
|
|
196
|
+
|
|
197
|
+
# in case of hash, replace each otherwise replace all with prefix
|
|
198
|
+
if to_add.values[0].is_a?(Hash)
|
|
199
|
+
to_add.values[0].keys.each do |prefix_key|
|
|
200
|
+
|
|
201
|
+
prefixes.each do |prefix|
|
|
202
|
+
if prefix == prefix_key
|
|
203
|
+
replace_prefixes_with = to_add.values[0][prefix_key]
|
|
204
|
+
debug "will substiture: #{prefix} with #{replace_prefixes_with}"
|
|
205
|
+
data_as_string = data_as_string.gsub(/\%{::#{prefix}/, "\%{::#{replace_prefixes_with}")
|
|
206
|
+
facts_as_string = facts_as_string.gsub(/#{prefix}/, "#{replace_prefixes_with}")
|
|
207
|
+
fact_comments_as_string = fact_comments_as_string.gsub(/#{prefix}/, "#{replace_prefixes_with}")
|
|
208
|
+
prefixed_required_facts = prefixed_required_facts.merge(required_facts.map! { |item| item.gsub(/#{prefix}/, "#{replace_prefixes_with}") })
|
|
209
|
+
end
|
|
210
|
+
end
|
|
211
|
+
end
|
|
212
|
+
|
|
213
|
+
else
|
|
214
|
+
replace_prefixes_with = to_add.values[0]
|
|
215
|
+
prefixes.each do |prefix|
|
|
216
|
+
debug "will substitute: #{prefix} with #{replace_prefixes_with}"
|
|
217
|
+
data_as_string = data_as_string.gsub(/\%{::#{prefix}/, "\%{::#{replace_prefixes_with}")
|
|
218
|
+
facts_as_string = facts_as_string.gsub(/#{prefix}/, "#{replace_prefixes_with}")
|
|
219
|
+
fact_comments_as_string = fact_comments_as_string.gsub(/#{prefix}/, "#{replace_prefixes_with}")
|
|
220
|
+
prefixed_required_facts = prefixed_required_facts.merge(required_facts.map! { |item| item.gsub(/#{prefix}/, "#{replace_prefixes_with}") })
|
|
221
|
+
end
|
|
222
|
+
end
|
|
223
|
+
end
|
|
224
|
+
|
|
225
|
+
template = eval (data_as_string)
|
|
226
|
+
default_facts_prefixed = eval (facts_as_string)
|
|
227
|
+
default_fact_comments = eval (fact_comments_as_string)
|
|
228
|
+
else
|
|
229
|
+
template = YAML.load_file(template_to_add)
|
|
230
|
+
plain_facts = YAML.load_file(facts_to_add)
|
|
231
|
+
default_facts_prefixed = extract_value_from_hash(plain_facts)
|
|
232
|
+
default_fact_comments = extract_comment_from_hash(plain_facts)
|
|
233
|
+
prefixed_required_facts = prefixed_required_facts.merge(required_facts)
|
|
234
|
+
end
|
|
235
|
+
result_template.deep_merge!(template)
|
|
236
|
+
# default_facts_prefixed is Array of hashes as the result of map, this will create hash from it
|
|
237
|
+
result_default_facts.merge!(default_facts_prefixed.reduce Hash.new, :merge)
|
|
238
|
+
prefixed_facts_comments.merge!(default_fact_comments.reduce Hash.new, :merge)
|
|
239
|
+
else
|
|
240
|
+
abort "Can not find template in templates folder #{template_to_add} or #{facts_to_add}".red
|
|
241
|
+
end
|
|
242
|
+
end
|
|
243
|
+
end
|
|
244
|
+
# Write results
|
|
245
|
+
File.open(output_file_path, 'w+') do |output_file|
|
|
246
|
+
YAML.dump(result_template, output_file)
|
|
247
|
+
end
|
|
248
|
+
custom_facts_path = path_join_glob(input_dir, "#{hostname}_facts.yaml")
|
|
249
|
+
custom_facts = YAML.load_file(custom_facts_path) || {}
|
|
250
|
+
File.open(output_encrypted_facts_file_path, 'w+') do |output_file|
|
|
251
|
+
output_result_default_facts = result_default_facts.deep_merge!(custom_facts, {:merge_hash_arrays => true}).to_yaml
|
|
252
|
+
prefixed_facts_comments.each do |pattern, replacement|
|
|
253
|
+
if replacement != nil
|
|
254
|
+
output_result_default_facts.gsub!(pattern, "\##{replacement}\n#{pattern}")
|
|
255
|
+
end
|
|
256
|
+
end
|
|
257
|
+
output_file.write(output_result_default_facts)
|
|
258
|
+
end
|
|
259
|
+
|
|
260
|
+
# decrypt facts file because Puppet doesn't appear to be able to read encrypted facts
|
|
261
|
+
require 'hiera/backend/eyaml/plugins'
|
|
262
|
+
require 'hiera/backend/eyaml/encryptors/pkcs7'
|
|
263
|
+
require 'hiera/backend/eyaml/subcommands/decrypt'
|
|
264
|
+
require 'hiera/backend/eyaml/options'
|
|
265
|
+
Hiera::Backend::Eyaml::Encryptors::Pkcs7.register
|
|
266
|
+
options = {
|
|
267
|
+
:eyaml=>output_encrypted_facts_file_path,
|
|
268
|
+
:pkcs7_public_key =>"#{eyaml_key_path}/public_key.pkcs7.pem",
|
|
269
|
+
:pkcs7_private_key=>"#{eyaml_key_path}/private_key.pkcs7.pem" }
|
|
270
|
+
Hiera::Backend::Eyaml::Options.set(Hiera::Backend::Eyaml::Subcommands::Decrypt.validate options)
|
|
271
|
+
|
|
272
|
+
# manually ensure multi-line encrypted values are output correctly into the new decrypted yaml file
|
|
273
|
+
# this is just modifed source from http://www.rubydoc.info/gems/hiera-eyaml/2.0.8/Hiera%2FBackend%2FEyaml%2FSubcommands%2FDecrypt.execute
|
|
274
|
+
File.open(output_facts_file_path, 'w') do |output_file|
|
|
275
|
+
parser = Hiera::Backend::Eyaml::Parser::ParserFactory.encrypted_parser
|
|
276
|
+
tokens = parser.parse(Hiera::Backend::Eyaml::Options[:input_data])
|
|
277
|
+
case Hiera::Backend::Eyaml::Options[:source]
|
|
278
|
+
when :eyaml
|
|
279
|
+
decrypted = tokens.map{ |token|
|
|
280
|
+
decrypted_value = token.to_decrypted
|
|
281
|
+
encryption_indicator = 'DEC::PKCS7['
|
|
282
|
+
four_spaces = ' '
|
|
283
|
+
multiline_value = if decrypted_value.include? encryption_indicator and decrypted_value.include? "\n" then
|
|
284
|
+
"|\n#{four_spaces}" + decrypted_value.gsub("\n", "\n#{four_spaces}")
|
|
285
|
+
else
|
|
286
|
+
decrypted_value
|
|
287
|
+
end
|
|
288
|
+
multiline_value.gsub(encryption_indicator, '').gsub(']!', '')
|
|
289
|
+
}
|
|
290
|
+
else
|
|
291
|
+
decrypted = tokens.map{ |token|
|
|
292
|
+
case token.class.name
|
|
293
|
+
when /::EncToken$/
|
|
294
|
+
token.plain_text
|
|
295
|
+
else
|
|
296
|
+
token.match
|
|
297
|
+
end
|
|
298
|
+
}
|
|
299
|
+
end
|
|
300
|
+
output_file.write(decrypted.join)
|
|
301
|
+
end
|
|
302
|
+
|
|
303
|
+
File.open(puppetfile_output_path, 'w+') do |output_file|
|
|
304
|
+
header = "#!/usr/bin/env ruby\n\n"
|
|
305
|
+
output_file.write(header)
|
|
306
|
+
puppetfile_dependencies.each do |pup|
|
|
307
|
+
dep = puppetfile_config[pup]
|
|
308
|
+
if dep
|
|
309
|
+
res = "mod \"#{dep['name']}\", \n" +
|
|
310
|
+
" :#{dep['repo_type']} => '#{dep['repo']}',\n" +
|
|
311
|
+
" :#{dep['ref_type']} => '#{dep['ref_value']}'\n\n"
|
|
312
|
+
output_file.write(res)
|
|
313
|
+
else
|
|
314
|
+
warning "Can not find configuration for module #{pup} in config of puppet modules!"
|
|
315
|
+
end
|
|
316
|
+
end
|
|
317
|
+
end
|
|
318
|
+
|
|
319
|
+
nil_value_facts = result_default_facts.select{|key, val| val == nil}
|
|
320
|
+
|
|
321
|
+
# Check that all required prefixed facts are present
|
|
322
|
+
if (prefixed_required_facts && !prefixed_required_facts.empty?) or !nil_value_facts.empty?
|
|
323
|
+
not_provided_required_facts = prefixed_required_facts - custom_facts.keys
|
|
324
|
+
|
|
325
|
+
if !not_provided_required_facts.empty? or !nil_value_facts.empty?
|
|
326
|
+
not_provided_required_facts.merge(nil_value_facts.keys)
|
|
327
|
+
warning "You have to provide all required fields, they will default to empty string and puppet will fail: "
|
|
328
|
+
not_provided_required_facts.each do |f|
|
|
329
|
+
warning "#{f}"
|
|
330
|
+
end
|
|
331
|
+
warning "Puppet apply will not run as it will fail without those facts provided!"
|
|
332
|
+
stop_apply = true
|
|
333
|
+
end
|
|
334
|
+
end
|
|
335
|
+
end
|
|
336
|
+
|
|
337
|
+
|
|
338
|
+
# start puppet
|
|
339
|
+
if (options['start'] || options['all']) && !stop_apply
|
|
340
|
+
puppet_command = "sudo su -c 'source /usr/local/rvm/scripts/rvm; puppet apply /etc/puppet/manifests/site.pp --confdir=/etc/puppet --verbose --detailed-exitcodes'"
|
|
341
|
+
to_execute = options["--puppet_apply"] || options["-p"] || puppet_command
|
|
342
|
+
debug "Running #{to_execute}"
|
|
343
|
+
`#{to_execute}`
|
|
344
|
+
exit_code = $?.exitstatus
|
|
345
|
+
if exit_code != 2
|
|
346
|
+
raise "execute_puppet exit status: #{exit_code}"
|
|
347
|
+
end
|
|
348
|
+
end
|
|
349
|
+
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
# coding: utf-8
|
|
2
|
+
lib = File.expand_path('../lib', __FILE__)
|
|
3
|
+
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
|
|
4
|
+
|
|
5
|
+
Gem::Specification.new do |spec|
|
|
6
|
+
spec.name = "puppet-runner"
|
|
7
|
+
spec.version = "0.0.13"
|
|
8
|
+
spec.authors = ["Martin Brehovsky"]
|
|
9
|
+
spec.email = ["mbrehovsky@adaptavist.com"]
|
|
10
|
+
spec.summary = %q{Preprocessor for hiera config}
|
|
11
|
+
spec.description = %q{Loads user config and created result hiera config and executes puppet apply with it.}
|
|
12
|
+
spec.homepage = "http://www.adaptavist.com"
|
|
13
|
+
spec.license = "Apache-2.0"
|
|
14
|
+
|
|
15
|
+
spec.files = `git ls-files -z`.split("\x0")
|
|
16
|
+
spec.executables = ["puppet-runner"]
|
|
17
|
+
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
|
|
18
|
+
spec.require_paths = ["lib"]
|
|
19
|
+
|
|
20
|
+
spec.add_development_dependency "bundler", "~> 1.6"
|
|
21
|
+
spec.add_development_dependency "rake"
|
|
22
|
+
spec.add_dependency "docopt", ">= 0.5.0"
|
|
23
|
+
spec.add_dependency "colorize", ">= 0.7.3"
|
|
24
|
+
spec.add_dependency 'deep_merge'
|
|
25
|
+
spec.add_dependency 'facter'
|
|
26
|
+
spec.add_dependency 'hiera-eyaml'
|
|
27
|
+
end
|
|
28
|
+
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
---
|
|
2
|
+
|
|
3
|
+
confluence_host_name:
|
|
4
|
+
confluence_license:
|
|
5
|
+
|
|
6
|
+
confluence_instance_name: "confluence"
|
|
7
|
+
confluence_version: '5.2.5'
|
|
8
|
+
confluence_context_path: ""
|
|
9
|
+
confluence_app_shutport: 8012
|
|
10
|
+
confluence_app_port: 8090
|
|
11
|
+
confluence_admin_user: "admin"
|
|
12
|
+
confluence_admin_pass: "administrator"
|
|
13
|
+
confluence_admin_full_name: "Admin Admin"
|
|
14
|
+
confluence_admin_email: "admin@adaptavist.com"
|
|
15
|
+
confluence_database_name: "confluence"
|
|
16
|
+
confluence_database_user: "confluence"
|
|
17
|
+
confluence_database_pass: "confluence"
|
|
18
|
+
confluence_JVM_MINIMUM_MEMORY: '512m'
|
|
19
|
+
confluence_JVM_MAXIMUM_MEMORY: '1024m'
|
|
20
|
+
confluence_JVM_MAX_PERM_SIZE: '256m'
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
---
|
|
2
|
+
|
|
3
|
+
functionalities:
|
|
4
|
+
# In honor of Henry...
|
|
5
|
+
0_global:
|
|
6
|
+
# - baseusers
|
|
7
|
+
1_app:
|
|
8
|
+
- confluence: "conf1_"
|
|
9
|
+
- confluence: "conf2_"
|
|
10
|
+
- jira
|
|
11
|
+
2_connectors:
|
|
12
|
+
- connector_proxy:
|
|
13
|
+
"application_": "conf1_"
|
|
14
|
+
"connector_": "connector1_"
|
|
15
|
+
- connector_proxy:
|
|
16
|
+
"application_": "conf1_"
|
|
17
|
+
"connector_": "connector2_"
|
|
18
|
+
- connector_proxy:
|
|
19
|
+
"application_": "conf2_"
|
|
20
|
+
"connector_": "connector3_"
|
|
21
|
+
- connector_proxy:
|
|
22
|
+
"application_": "jira_"
|
|
23
|
+
"connector_": "connector4_"
|
|
24
|
+
3_database:
|
|
25
|
+
- mysql: "jira1_"
|
|
26
|
+
- mysql: "confluence1_"
|
|
27
|
+
- mysql: "confluence2_"
|
|
28
|
+
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
---
|
|
2
|
+
conf1_host_name: "avst-conf1.dyn.adaptavist.com"
|
|
3
|
+
conf1_license: "LICENSE1"
|
|
4
|
+
conf1_instance_name: "conf1"
|
|
5
|
+
conf1_version: '5.2.5'
|
|
6
|
+
conf1_context_path: ""
|
|
7
|
+
conf1_app_shutport: 8012
|
|
8
|
+
conf1_app_port: 8090
|
|
9
|
+
conf1_admin_user: "admin"
|
|
10
|
+
conf1_admin_pass: "administrator"
|
|
11
|
+
conf1_admin_full_name: "Admin Admin"
|
|
12
|
+
conf1_admin_email: "admin@adaptavist.com"
|
|
13
|
+
conf1_database_name: "confluence1"
|
|
14
|
+
conf1_database_user: "confluence1"
|
|
15
|
+
conf1_database_pass: "confluence1"
|
|
16
|
+
conf1_JVM_MINIMUM_MEMORY: '1024m'
|
|
17
|
+
conf1_JVM_MAXIMUM_MEMORY: '2048m'
|
|
18
|
+
conf1_JVM_MAX_PERM_SIZE: '512m'
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
conf2_host_name: "avst-conf2.dyn.adaptavist.com"
|
|
22
|
+
conf2_license: "LICENSE2"
|
|
23
|
+
conf2_instance_name: "conf2"
|
|
24
|
+
conf2_version: '5.2.5'
|
|
25
|
+
conf2_context_path: ""
|
|
26
|
+
conf2_app_shutport: 8012
|
|
27
|
+
conf2_app_port: 8090
|
|
28
|
+
conf2_admin_user: "admin"
|
|
29
|
+
conf2_admin_pass: "administrator"
|
|
30
|
+
conf2_admin_full_name: "Admin Admin"
|
|
31
|
+
conf2_admin_email: "admin@adaptavist.com"
|
|
32
|
+
conf2_database_name: "confluence2"
|
|
33
|
+
conf2_database_user: "confluence2"
|
|
34
|
+
conf2_database_pass: "confluence2"
|