dtk-node-agent 0.7.7 → 0.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +5 -13
- data/README.md +21 -4
- data/bin/dtk-node-agent +17 -0
- data/lib/config/install.config +2 -2
- data/lib/dtk-node-agent/installer.rb +30 -25
- data/lib/dtk-node-agent/version.rb +18 -1
- metadata +23 -110
- data/mcollective_additions/debian.mcollective.init +0 -92
- data/mcollective_additions/plugins/README.md +0 -1
- data/mcollective_additions/plugins/v1.2/agent/discovery.rb +0 -39
- data/mcollective_additions/plugins/v1.2/agent/get_log_fragment.ddl +0 -15
- data/mcollective_additions/plugins/v1.2/agent/get_log_fragment.rb +0 -79
- data/mcollective_additions/plugins/v1.2/agent/git_access.ddl +0 -9
- data/mcollective_additions/plugins/v1.2/agent/git_access.rb +0 -79
- data/mcollective_additions/plugins/v1.2/agent/netstat.ddl +0 -9
- data/mcollective_additions/plugins/v1.2/agent/netstat.rb +0 -34
- data/mcollective_additions/plugins/v1.2/agent/puppet_apply.ddl +0 -9
- data/mcollective_additions/plugins/v1.2/agent/puppet_apply.rb +0 -630
- data/mcollective_additions/plugins/v1.2/agent/rpcutil.ddl +0 -204
- data/mcollective_additions/plugins/v1.2/agent/rpcutil.rb +0 -101
- data/mcollective_additions/plugins/v1.2/facts/pbuilder_facts.rb +0 -35
- data/mcollective_additions/plugins/v2.2/agent/action_agent.ddl +0 -9
- data/mcollective_additions/plugins/v2.2/agent/action_agent.rb +0 -47
- data/mcollective_additions/plugins/v2.2/agent/dev_manager.ddl +0 -9
- data/mcollective_additions/plugins/v2.2/agent/dev_manager.rb +0 -111
- data/mcollective_additions/plugins/v2.2/agent/discovery.rb +0 -39
- data/mcollective_additions/plugins/v2.2/agent/dtk_node_agent_git_client.rb +0 -94
- data/mcollective_additions/plugins/v2.2/agent/execute_tests.ddl +0 -9
- data/mcollective_additions/plugins/v2.2/agent/execute_tests.rb +0 -111
- data/mcollective_additions/plugins/v2.2/agent/execute_tests_v2.ddl +0 -9
- data/mcollective_additions/plugins/v2.2/agent/execute_tests_v2.rb +0 -131
- data/mcollective_additions/plugins/v2.2/agent/get_log_fragment.ddl +0 -15
- data/mcollective_additions/plugins/v2.2/agent/get_log_fragment.rb +0 -79
- data/mcollective_additions/plugins/v2.2/agent/git_access.ddl +0 -9
- data/mcollective_additions/plugins/v2.2/agent/git_access.rb +0 -61
- data/mcollective_additions/plugins/v2.2/agent/netstat.ddl +0 -9
- data/mcollective_additions/plugins/v2.2/agent/netstat.rb +0 -34
- data/mcollective_additions/plugins/v2.2/agent/ps.ddl +0 -9
- data/mcollective_additions/plugins/v2.2/agent/ps.rb +0 -37
- data/mcollective_additions/plugins/v2.2/agent/puppet_apply.ddl +0 -9
- data/mcollective_additions/plugins/v2.2/agent/puppet_apply.rb +0 -818
- data/mcollective_additions/plugins/v2.2/agent/puppet_cancel.ddl +0 -10
- data/mcollective_additions/plugins/v2.2/agent/puppet_cancel.rb +0 -78
- data/mcollective_additions/plugins/v2.2/agent/rpcutil.ddl +0 -204
- data/mcollective_additions/plugins/v2.2/agent/rpcutil.rb +0 -101
- data/mcollective_additions/plugins/v2.2/agent/ssh_agent.ddl +0 -13
- data/mcollective_additions/plugins/v2.2/agent/ssh_agent.rb +0 -97
- data/mcollective_additions/plugins/v2.2/agent/sync_agent_code.ddl +0 -10
- data/mcollective_additions/plugins/v2.2/agent/sync_agent_code.rb +0 -85
- data/mcollective_additions/plugins/v2.2/agent/tail.ddl +0 -11
- data/mcollective_additions/plugins/v2.2/agent/tail.rb +0 -67
- data/mcollective_additions/plugins/v2.2/audit/logfile.rb +0 -26
- data/mcollective_additions/plugins/v2.2/connector/r8stomp.rb +0 -238
- data/mcollective_additions/plugins/v2.2/connector/stomp.rb +0 -349
- data/mcollective_additions/plugins/v2.2/connector/stomp_em.rb +0 -191
- data/mcollective_additions/plugins/v2.2/data/agent_data.ddl +0 -22
- data/mcollective_additions/plugins/v2.2/data/agent_data.rb +0 -17
- data/mcollective_additions/plugins/v2.2/data/collective_data.ddl +0 -20
- data/mcollective_additions/plugins/v2.2/data/collective_data.rb +0 -9
- data/mcollective_additions/plugins/v2.2/data/fact_data.ddl +0 -28
- data/mcollective_additions/plugins/v2.2/data/fact_data.rb +0 -55
- data/mcollective_additions/plugins/v2.2/data/fstat_data.ddl +0 -89
- data/mcollective_additions/plugins/v2.2/data/fstat_data.rb +0 -56
- data/mcollective_additions/plugins/v2.2/discovery/flatfile.ddl +0 -11
- data/mcollective_additions/plugins/v2.2/discovery/flatfile.rb +0 -48
- data/mcollective_additions/plugins/v2.2/discovery/mc.ddl +0 -11
- data/mcollective_additions/plugins/v2.2/discovery/mc.rb +0 -30
- data/mcollective_additions/plugins/v2.2/discovery/stdin.ddl +0 -11
- data/mcollective_additions/plugins/v2.2/discovery/stdin.rb +0 -66
- data/mcollective_additions/plugins/v2.2/facts/pbuilder_facts.rb +0 -37
- data/mcollective_additions/plugins/v2.2/facts/yaml_facts.rb +0 -61
- data/mcollective_additions/plugins/v2.2/registration/agentlist.rb +0 -10
- data/mcollective_additions/plugins/v2.2/security/sshkey.ddl +0 -9
- data/mcollective_additions/plugins/v2.2/security/sshkey.rb +0 -362
- data/mcollective_additions/plugins/v2.2/util/puppetrunner.rb +0 -36
- data/mcollective_additions/plugins/v2.2/validator/array_validator.ddl +0 -7
- data/mcollective_additions/plugins/v2.2/validator/array_validator.rb +0 -9
- data/mcollective_additions/plugins/v2.2/validator/ipv4address_validator.ddl +0 -7
- data/mcollective_additions/plugins/v2.2/validator/ipv4address_validator.rb +0 -16
- data/mcollective_additions/plugins/v2.2/validator/ipv6address_validator.ddl +0 -7
- data/mcollective_additions/plugins/v2.2/validator/ipv6address_validator.rb +0 -16
- data/mcollective_additions/plugins/v2.2/validator/length_validator.ddl +0 -7
- data/mcollective_additions/plugins/v2.2/validator/length_validator.rb +0 -11
- data/mcollective_additions/plugins/v2.2/validator/regex_validator.ddl +0 -7
- data/mcollective_additions/plugins/v2.2/validator/regex_validator.rb +0 -9
- data/mcollective_additions/plugins/v2.2/validator/shellsafe_validator.ddl +0 -7
- data/mcollective_additions/plugins/v2.2/validator/shellsafe_validator.rb +0 -13
- data/mcollective_additions/plugins/v2.2/validator/typecheck_validator.ddl +0 -7
- data/mcollective_additions/plugins/v2.2/validator/typecheck_validator.rb +0 -28
- data/mcollective_additions/redhat.mcollective.init +0 -139
- data/mcollective_additions/redhat.mcollective.service +0 -14
- data/mcollective_additions/server.cfg +0 -22
- data/src/etc/logrotate.d/mcollective +0 -10
- data/src/etc/mcollective.default +0 -6
|
@@ -1,61 +0,0 @@
|
|
|
1
|
-
module MCollective
|
|
2
|
-
module Agent
|
|
3
|
-
class Git_access < RPC::Agent
|
|
4
|
-
action "add_rsa_info" do
|
|
5
|
-
ssh_folder_path = '/root/.ssh'
|
|
6
|
-
rsa_path = "#{ssh_folder_path}/id_rsa"
|
|
7
|
-
rsa_pub_path = "#{ssh_folder_path}/id_rsa.pub"
|
|
8
|
-
known_hosts = "#{ssh_folder_path}/known_hosts"
|
|
9
|
-
|
|
10
|
-
begin
|
|
11
|
-
# validate request
|
|
12
|
-
validate_request(request)
|
|
13
|
-
|
|
14
|
-
#create private rsa file if needed
|
|
15
|
-
unless donot_create_file?(:private,rsa_path,request[:agent_ssh_key_private])
|
|
16
|
-
File.open(rsa_path,"w",0600){|f|f.print request[:agent_ssh_key_private]}
|
|
17
|
-
end
|
|
18
|
-
|
|
19
|
-
#create public rsa file if needed
|
|
20
|
-
unless donot_create_file?(:public,rsa_pub_path,request[:agent_ssh_key_public])
|
|
21
|
-
File.open(rsa_pub_path,"w"){|f|f.print request[:agent_ssh_key_public]}
|
|
22
|
-
end
|
|
23
|
-
|
|
24
|
-
# add rsa_fingerprint to known hsots; server logic makes sure that is not requested twice so no duplicates
|
|
25
|
-
File.open(known_hosts,"a"){|f|f.print request[:server_ssh_rsa_fingerprint]}
|
|
26
|
-
|
|
27
|
-
reply.data = { :status => :succeeded}
|
|
28
|
-
rescue Exception => e
|
|
29
|
-
reply.data = { :status => :failed, :error => {:message => e.message}}
|
|
30
|
-
end
|
|
31
|
-
end
|
|
32
|
-
|
|
33
|
-
#TODO: move to using mcollective vallidation on ddl
|
|
34
|
-
def validate_request(req)
|
|
35
|
-
required_params = [:agent_ssh_key_public, :agent_ssh_key_private, :server_ssh_rsa_fingerprint]
|
|
36
|
-
missing_params = []
|
|
37
|
-
required_params.each do |param|
|
|
38
|
-
missing_params << param if req[param].nil?
|
|
39
|
-
end
|
|
40
|
-
|
|
41
|
-
unless missing_params.empty?
|
|
42
|
-
raise "Request is missing required param(s): #{missing_params.join(',')} please review your request."
|
|
43
|
-
end
|
|
44
|
-
end
|
|
45
|
-
|
|
46
|
-
def donot_create_file?(type,path,content)
|
|
47
|
-
# raises exception if these files already exists and content differs
|
|
48
|
-
if File.exists?(path)
|
|
49
|
-
existing = File.open(path).read
|
|
50
|
-
if existing == content
|
|
51
|
-
true
|
|
52
|
-
else
|
|
53
|
-
raise "RSA #{type} key already exists and differs from one in payload"
|
|
54
|
-
end
|
|
55
|
-
end
|
|
56
|
-
end
|
|
57
|
-
end
|
|
58
|
-
end
|
|
59
|
-
end
|
|
60
|
-
|
|
61
|
-
|
|
@@ -1,34 +0,0 @@
|
|
|
1
|
-
module MCollective
|
|
2
|
-
module Agent
|
|
3
|
-
class Netstat < RPC::Agent
|
|
4
|
-
metadata :name => "netstat info",
|
|
5
|
-
:description => "Agent to get netstat info",
|
|
6
|
-
:author => "Reactor8",
|
|
7
|
-
:license => "",
|
|
8
|
-
:version => "",
|
|
9
|
-
:url => "",
|
|
10
|
-
:timeout => 2
|
|
11
|
-
action "get_tcp_udp" do
|
|
12
|
-
output = `netstat -nltpu`
|
|
13
|
-
results = output.scan(/(^[a-z0-9]+)\s+(\d)\s+(\d)\s+([a-z0-9:.*]+)\s+([0-9:.*]+)\s+(LISTEN)?\s+([0-9a-zA-Z\/\-: ]+)/m)
|
|
14
|
-
|
|
15
|
-
netstat_result = []
|
|
16
|
-
results.each do |result|
|
|
17
|
-
netstat_packet = {}
|
|
18
|
-
netstat_packet.store(:protocol, result[0])
|
|
19
|
-
netstat_packet.store(:recv_q, result[1])
|
|
20
|
-
netstat_packet.store(:send_q, result[2])
|
|
21
|
-
netstat_packet.store(:local, result[3])
|
|
22
|
-
netstat_packet.store(:foreign, result[4])
|
|
23
|
-
netstat_packet.store(:state, result[5])
|
|
24
|
-
netstat_packet.store(:program, result[6].strip)
|
|
25
|
-
netstat_result << netstat_packet
|
|
26
|
-
end
|
|
27
|
-
|
|
28
|
-
reply[:data] = netstat_result
|
|
29
|
-
reply[:pbuilderid] = Facts["pbuilderid"]
|
|
30
|
-
reply[:status] = :ok
|
|
31
|
-
end
|
|
32
|
-
end
|
|
33
|
-
end
|
|
34
|
-
end
|
|
@@ -1,37 +0,0 @@
|
|
|
1
|
-
module MCollective
|
|
2
|
-
module Agent
|
|
3
|
-
class Ps < RPC::Agent
|
|
4
|
-
metadata :name => "ps info",
|
|
5
|
-
:description => "Agent to get ps info (running processes)",
|
|
6
|
-
:author => "Reactor8",
|
|
7
|
-
:license => "",
|
|
8
|
-
:version => "",
|
|
9
|
-
:url => "",
|
|
10
|
-
:timeout => 2
|
|
11
|
-
action "get_ps" do
|
|
12
|
-
output=`ps -ef`
|
|
13
|
-
output.gsub!(/^.+\]$/,'')
|
|
14
|
-
results = output.scan(/(\S+)[\s].*?(\S+)[\s].*?(\S+)[\s].*?(\S+)[\s].*?(\S+)[\s].*?(\S+)[\s].*?(\S+)[\s].*?(.+)/)
|
|
15
|
-
results.shift
|
|
16
|
-
ps_result = []
|
|
17
|
-
results.each do |result|
|
|
18
|
-
ps_packet = {}
|
|
19
|
-
ps_packet.store(:uid, result[0])
|
|
20
|
-
ps_packet.store(:pid, result[1])
|
|
21
|
-
ps_packet.store(:ppid, result[2])
|
|
22
|
-
ps_packet.store(:cpu, result[3])
|
|
23
|
-
ps_packet.store(:start_time, result[4])
|
|
24
|
-
ps_packet.store(:tty, result[5])
|
|
25
|
-
ps_packet.store(:time, result[6])
|
|
26
|
-
result[7] = (result[7][0...60].strip + '...') if result[7].strip.length > 60
|
|
27
|
-
ps_packet.store(:command, result[7])
|
|
28
|
-
ps_result << ps_packet
|
|
29
|
-
end
|
|
30
|
-
|
|
31
|
-
reply[:data] = ps_result
|
|
32
|
-
reply[:pbuilderid] = Facts["pbuilderid"]
|
|
33
|
-
reply[:status] = :ok
|
|
34
|
-
end
|
|
35
|
-
end
|
|
36
|
-
end
|
|
37
|
-
end
|
|
@@ -1,818 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env ruby
|
|
2
|
-
require 'rubygems'
|
|
3
|
-
require 'puppet'
|
|
4
|
-
require 'grit'
|
|
5
|
-
require 'tempfile'
|
|
6
|
-
require 'fileutils'
|
|
7
|
-
require 'etc'
|
|
8
|
-
require File.expand_path('dtk_node_agent_git_client',File.dirname(__FILE__))
|
|
9
|
-
|
|
10
|
-
#TODO: move to be shared by agents
|
|
11
|
-
PuppetApplyLogDir = "/var/log/puppet"
|
|
12
|
-
ModulePath = "/etc/puppet/modules"
|
|
13
|
-
DTKPuppetCacheBaseDir = "/usr/share/dtk/tasks"
|
|
14
|
-
DTKPuppetModulePath = "/usr/share/dtk/puppet-modules"
|
|
15
|
-
# make sure HOME variable is set correctly
|
|
16
|
-
ENV['HOME'] = Etc.getpwuid.dir
|
|
17
|
-
|
|
18
|
-
module MCollective
|
|
19
|
-
module Agent
|
|
20
|
-
class Puppet_apply < RPC::Agent
|
|
21
|
-
|
|
22
|
-
NUMBER_OF_RETRIES = 5
|
|
23
|
-
|
|
24
|
-
def initialize()
|
|
25
|
-
super()
|
|
26
|
-
@log = Log.instance
|
|
27
|
-
@reply_data = nil
|
|
28
|
-
end
|
|
29
|
-
|
|
30
|
-
def run_action
|
|
31
|
-
#validate :components_with_attributes
|
|
32
|
-
#validate :version_context
|
|
33
|
-
#validate :node_manifest
|
|
34
|
-
#validate :task_id, Fixnum
|
|
35
|
-
#validate :top_task_id, Fixnum
|
|
36
|
-
|
|
37
|
-
log_params()
|
|
38
|
-
@reply_data = nil
|
|
39
|
-
@msg_id = request.uniqid
|
|
40
|
-
@service_name = request[:service_name] || "UNKNOWN"
|
|
41
|
-
@task_info = [:task_id,:top_task_id].inject({}) do |h,k|
|
|
42
|
-
h.merge(k => request[k])
|
|
43
|
-
end.merge(:msg_id => @msg_id)
|
|
44
|
-
|
|
45
|
-
more_generic_response = Response.new()
|
|
46
|
-
puppet_run_response = nil
|
|
47
|
-
begin
|
|
48
|
-
unless git_server = Facts["git-server"]
|
|
49
|
-
raise "git-server is not set in facts"
|
|
50
|
-
end
|
|
51
|
-
response = pull_modules(request[:version_context],git_server)
|
|
52
|
-
return set_reply!(response) if response.failed?()
|
|
53
|
-
puppet_run_response = run(request)
|
|
54
|
-
rescue Exception => e
|
|
55
|
-
more_generic_response.set_status_failed!()
|
|
56
|
-
more_generic_response.merge!(error_info(e))
|
|
57
|
-
end
|
|
58
|
-
set_reply?(puppet_run_response || more_generic_response)
|
|
59
|
-
end
|
|
60
|
-
private
|
|
61
|
-
def pull_modules(version_context,git_server)
|
|
62
|
-
ret = Response.new
|
|
63
|
-
ENV['GIT_SHELL'] = nil #This is put in because if vcsrepo Puppet module used it sets this
|
|
64
|
-
error_backtrace = nil
|
|
65
|
-
begin
|
|
66
|
-
version_context.each do |vc|
|
|
67
|
-
[:repo,:implementation,:branch].each do |field|
|
|
68
|
-
unless vc[field]
|
|
69
|
-
raise "version context does not have :#{field} field"
|
|
70
|
-
end
|
|
71
|
-
end
|
|
72
|
-
|
|
73
|
-
FileUtils.mkdir_p(DTKPuppetModulePath) unless File.directory?(DTKPuppetModulePath)
|
|
74
|
-
|
|
75
|
-
module_name = vc[:implementation]
|
|
76
|
-
puppet_repo_dir = "#{DTKPuppetModulePath}/#{module_name}"
|
|
77
|
-
repo_dir = "#{ModulePath}/#{module_name}"
|
|
78
|
-
remote_repo = git_repo_full_url(git_server, vc[:repo])
|
|
79
|
-
|
|
80
|
-
opts = Hash.new
|
|
81
|
-
opts.merge!(:sha => vc[:sha]) if vc[:sha]
|
|
82
|
-
|
|
83
|
-
clean_and_clone = true
|
|
84
|
-
if File.exists?("#{puppet_repo_dir}/.git")
|
|
85
|
-
pull_err = trap_and_return_error do
|
|
86
|
-
pull_module(puppet_repo_dir, vc[:branch], opts)
|
|
87
|
-
end
|
|
88
|
-
# clean_and_clone set so if pull error then try again, this time cleaning dir and freshly cleaning
|
|
89
|
-
clean_and_clone = !pull_err.nil?
|
|
90
|
-
end
|
|
91
|
-
|
|
92
|
-
if clean_and_clone
|
|
93
|
-
begin
|
|
94
|
-
tries ||= NUMBER_OF_RETRIES
|
|
95
|
-
clean_and_clone_module(puppet_repo_dir, remote_repo,vc[:branch], opts)
|
|
96
|
-
rescue Exception => e
|
|
97
|
-
# to achieve idempotent behavior; fully remove directory if any problems
|
|
98
|
-
FileUtils.rm_rf puppet_repo_dir
|
|
99
|
-
unless (tries -= 1).zero?
|
|
100
|
-
@log.info("Re-trying last command becuase of error: #{e.message}, retries left: #{tries}")
|
|
101
|
-
sleep(1)
|
|
102
|
-
retry
|
|
103
|
-
end
|
|
104
|
-
# TODO: not used now
|
|
105
|
-
error_backtrace = backtrace_subset(e)
|
|
106
|
-
raise e
|
|
107
|
-
end
|
|
108
|
-
end
|
|
109
|
-
|
|
110
|
-
# remove symlink if exist already
|
|
111
|
-
if File.symlink?(repo_dir)
|
|
112
|
-
FileUtils.rm(repo_dir)
|
|
113
|
-
elsif File.directory?(repo_dir)
|
|
114
|
-
FileUtils.rm_r(repo_dir)
|
|
115
|
-
end
|
|
116
|
-
|
|
117
|
-
puppet_dir = "#{DTKPuppetModulePath}/#{module_name}/puppet"
|
|
118
|
-
|
|
119
|
-
if File.directory?(puppet_dir)
|
|
120
|
-
FileUtils.ln_sf(puppet_dir, repo_dir)
|
|
121
|
-
else
|
|
122
|
-
FileUtils.ln_sf("#{DTKPuppetModulePath}/#{module_name}", repo_dir)
|
|
123
|
-
end
|
|
124
|
-
end
|
|
125
|
-
ret.set_status_succeeded!()
|
|
126
|
-
rescue Exception => e
|
|
127
|
-
log_error(e)
|
|
128
|
-
ret.set_status_failed!()
|
|
129
|
-
ret.merge!(error_info(e))
|
|
130
|
-
ensure
|
|
131
|
-
#TODO: may mot be needed now switch to grit
|
|
132
|
-
#git library sets these vars; so reseting here
|
|
133
|
-
%w{GIT_DIR GIT_INDEX_FILE GIT_WORK_TREE}.each{|var|ENV[var]=nil}
|
|
134
|
-
end
|
|
135
|
-
ret
|
|
136
|
-
end
|
|
137
|
-
|
|
138
|
-
#
|
|
139
|
-
# Keep in mind that if we are using default format of git url the name of repo is added after ':' symbol.
|
|
140
|
-
# When using ssh style URL repo name is added after '/'
|
|
141
|
-
def git_repo_full_url(git_url, repo_name)
|
|
142
|
-
"#{git_url}/#{repo_name}"
|
|
143
|
-
end
|
|
144
|
-
|
|
145
|
-
# returns a trapped error
|
|
146
|
-
def trap_and_return_error(&body)
|
|
147
|
-
error = nil
|
|
148
|
-
begin
|
|
149
|
-
yield
|
|
150
|
-
rescue => e
|
|
151
|
-
error = e
|
|
152
|
-
end
|
|
153
|
-
error
|
|
154
|
-
end
|
|
155
|
-
|
|
156
|
-
def pull_module(repo_dir,branch,opts={})
|
|
157
|
-
git_repo = ::DTK::NodeAgent::GitClient.new(repo_dir)
|
|
158
|
-
git_repo.pull_and_checkout_branch?(branch,opts)
|
|
159
|
-
end
|
|
160
|
-
|
|
161
|
-
def clean_and_clone_module(repo_dir,remote_repo,branch,opts={})
|
|
162
|
-
FileUtils.rm_rf repo_dir if File.exists?(repo_dir)
|
|
163
|
-
git_repo = ::DTK::NodeAgent::GitClient.new(repo_dir,:create=>true)
|
|
164
|
-
git_repo.clone_branch(remote_repo,branch,opts)
|
|
165
|
-
end
|
|
166
|
-
|
|
167
|
-
def run(request)
|
|
168
|
-
cmps_with_attrs = request[:components_with_attributes]
|
|
169
|
-
node_manifest = request[:node_manifest]
|
|
170
|
-
inter_node_stage = request[:inter_node_stage]
|
|
171
|
-
puppet_version = request[:puppet_version]
|
|
172
|
-
|
|
173
|
-
if puppet_version
|
|
174
|
-
@log.info("Setting user provided puppet version '#{puppet_version}'")
|
|
175
|
-
puppet_version = "_#{puppet_version}_"
|
|
176
|
-
end
|
|
177
|
-
|
|
178
|
-
# Amar: Added task ID to current thread, so puppet apply can be canceled from puppet_cancel.rb when user requests cancel
|
|
179
|
-
task_id = request[:top_task_id]
|
|
180
|
-
Thread.current[:task_id] = task_id
|
|
181
|
-
clean_state()
|
|
182
|
-
ret = nil
|
|
183
|
-
runtime_errors = nil # in contast to compile errors
|
|
184
|
-
# TODO: harmonize request[:top_task_id] and top_task_id()
|
|
185
|
-
dtk_puppet_cache = DTKPuppetCache.new(@service_name,top_task_id())
|
|
186
|
-
log_file_path = dtk_puppet_cache.log_file_path(inter_node_stage)
|
|
187
|
-
log_file = nil
|
|
188
|
-
begin
|
|
189
|
-
save_stderr = nil
|
|
190
|
-
stderr_capture = nil
|
|
191
|
-
log_file = File.open(log_file_path,"a")
|
|
192
|
-
log_file.close
|
|
193
|
-
Puppet[:autoflush] = true
|
|
194
|
-
most_recent_link = puppet_last_log_link()
|
|
195
|
-
ln_s(log_file_path,most_recent_link)
|
|
196
|
-
|
|
197
|
-
# Amar: Node manifest contains list of generated puppet manifests
|
|
198
|
-
# This is done to support multiple puppet calls inside one puppet_apply agent call
|
|
199
|
-
node_manifest.each_with_index do |puppet_manifest, i|
|
|
200
|
-
execute_lines = puppet_manifest || ret_execute_lines(cmps_with_attrs)
|
|
201
|
-
execute_string = execute_lines.join("\n")
|
|
202
|
-
@log.info("\n----------------execute_string------------\n#{execute_string}\n----------------execute_string------------")
|
|
203
|
-
task_dir = dtk_puppet_cache.task_dir()
|
|
204
|
-
# set the link to last_task
|
|
205
|
-
ln_s(task_dir, dtk_puppet_cache.last_task_link())
|
|
206
|
-
|
|
207
|
-
manifest_path = dtk_puppet_cache.node_manifest_path(inter_node_stage,i+1)
|
|
208
|
-
File.open(manifest_path,"w"){|f| f << execute_string}
|
|
209
|
-
|
|
210
|
-
cmd_line =
|
|
211
|
-
[
|
|
212
|
-
"apply",
|
|
213
|
-
"-l", log_file_path,
|
|
214
|
-
"-d",
|
|
215
|
-
"--report", "true", "--reports", "r8report",
|
|
216
|
-
#"--storeconfigs_backend", "r8_storeconfig_backend",
|
|
217
|
-
"-e", execute_string
|
|
218
|
-
]
|
|
219
|
-
cmd = "/usr/bin/puppet"
|
|
220
|
-
save_stderr = $stderr
|
|
221
|
-
stderr_capture = Tempfile.new("stderr")
|
|
222
|
-
$stderr = stderr_capture
|
|
223
|
-
begin
|
|
224
|
-
Puppet::Node::Environment.clear()
|
|
225
|
-
Thread.current[:known_resource_types] = nil #TODO: when move up to later versions of puupet think can remove because Puppet::Node::Environment.clear() does this
|
|
226
|
-
Puppet::Util::CommandLine.new(cmd,cmd_line).execute
|
|
227
|
-
rescue SystemExit => exit
|
|
228
|
-
report_status = Report::get_status()
|
|
229
|
-
report_info = Report::get_report_info()
|
|
230
|
-
# For multiple puppet calls, if one fails, rest will not get executed
|
|
231
|
-
raise exit if report_status == :failed || report_info[:errors] || (i == node_manifest.size - 1)
|
|
232
|
-
end
|
|
233
|
-
end
|
|
234
|
-
rescue SystemExit => exit
|
|
235
|
-
report_status = Report::get_status()
|
|
236
|
-
report_info = Report::get_report_info()
|
|
237
|
-
exit_status = exit.status
|
|
238
|
-
@log.info("exit.status = #{exit_status}")
|
|
239
|
-
@log.info("report_status = #{report_status}")
|
|
240
|
-
@log.info("report_info = #{report_info.inspect}")
|
|
241
|
-
return_code = ((report_status == :failed || report_info[:errors]) ? 1 : exit_status)
|
|
242
|
-
ret ||= Response.new()
|
|
243
|
-
if return_code == 0
|
|
244
|
-
if dynamic_attr_info = has_dynamic_attributes?(cmps_with_attrs)
|
|
245
|
-
@log.info("dynamic_attributes = #{dynamic_attr_info.inspect}")
|
|
246
|
-
process_dynamic_attributes!(ret,dynamic_attr_info)
|
|
247
|
-
else
|
|
248
|
-
ret.set_status_succeeded!()
|
|
249
|
-
end
|
|
250
|
-
else
|
|
251
|
-
ret.set_status_failed!()
|
|
252
|
-
error_info = {
|
|
253
|
-
:return_code => return_code
|
|
254
|
-
}
|
|
255
|
-
if runtime_errors = (report_info||{})[:errors]
|
|
256
|
-
error_info[:errors] = runtime_errors.map{|e|e.merge(:type => "user_error")}
|
|
257
|
-
end
|
|
258
|
-
ret.merge!(error_info)
|
|
259
|
-
end
|
|
260
|
-
rescue Exception => e
|
|
261
|
-
log_error(e)
|
|
262
|
-
ret ||= Response.new()
|
|
263
|
-
ret.set_status_failed!()
|
|
264
|
-
ret.merge!(error_info(e))
|
|
265
|
-
ensure
|
|
266
|
-
# Amar: If puppet_apply thread was killed from puppet_cancel, ':is_canceled' flag is set on the thread,
|
|
267
|
-
# so puppet_apply can send status canceled in the response
|
|
268
|
-
ret ||= Response.new()
|
|
269
|
-
if Thread.current[:is_canceled]
|
|
270
|
-
@log.info("Setting cancel status...")
|
|
271
|
-
ret.set_status_canceled!()
|
|
272
|
-
return set_reply!(ret)
|
|
273
|
-
end
|
|
274
|
-
if save_stderr #test if this is nil as to whether did the stderr swap
|
|
275
|
-
$stderr = save_stderr
|
|
276
|
-
stderr_capture.rewind
|
|
277
|
-
stderr_msg = stderr_capture.read
|
|
278
|
-
stderr_capture.close
|
|
279
|
-
stderr_capture.unlink
|
|
280
|
-
# dont look for compile errors if runtime errors
|
|
281
|
-
unless runtime_errors
|
|
282
|
-
if err_message = compile_error_message?(return_code,stderr_msg,log_file_path)
|
|
283
|
-
ret[:errors] = [{:message => err_message, :type => "user_error" }]
|
|
284
|
-
ret.set_status_failed!()
|
|
285
|
-
Puppet::err stderr_msg
|
|
286
|
-
Puppet::info "(end)"
|
|
287
|
-
end
|
|
288
|
-
end
|
|
289
|
-
end
|
|
290
|
-
Puppet::Util::Log.close_all()
|
|
291
|
-
end
|
|
292
|
-
@log.info("Debuging response from mcollective (RESPONSE):")
|
|
293
|
-
@log.info(ret)
|
|
294
|
-
ret
|
|
295
|
-
end
|
|
296
|
-
|
|
297
|
-
def compile_error_message?(return_code,stderr_msg,log_file_path)
|
|
298
|
-
if stderr_msg and not stderr_msg.empty?
|
|
299
|
-
stderr_msg
|
|
300
|
-
elsif return_code != 0
|
|
301
|
-
rest_reverse = Array.new
|
|
302
|
-
error = nil
|
|
303
|
-
begin
|
|
304
|
-
File.open(log_file_path).read.split("\n").reverse_each do |line|
|
|
305
|
-
if line =~ /^.+Puppet \(err\):\s*(.+$)/
|
|
306
|
-
error = $1
|
|
307
|
-
break
|
|
308
|
-
else
|
|
309
|
-
rest_reverse << line
|
|
310
|
-
end
|
|
311
|
-
end
|
|
312
|
-
rescue
|
|
313
|
-
end
|
|
314
|
-
([error || 'Puppet catalog compile error'] + rest_reverse.reverse).join("\n")
|
|
315
|
-
end
|
|
316
|
-
end
|
|
317
|
-
|
|
318
|
-
def backtrace_subset(e)
|
|
319
|
-
e.backtrace[0..10]
|
|
320
|
-
end
|
|
321
|
-
|
|
322
|
-
def log_error(e)
|
|
323
|
-
log_error = ([e.inspect]+backtrace_subset(e)).join("\n")
|
|
324
|
-
@log.info("\n----------------error-----\n#{log_error}\n----------------error-----")
|
|
325
|
-
end
|
|
326
|
-
|
|
327
|
-
def error_info(e,backtrace=nil)
|
|
328
|
-
{
|
|
329
|
-
:error => {
|
|
330
|
-
:message => e.inspect,
|
|
331
|
-
:backtrace => backtrace||backtrace_subset(e)
|
|
332
|
-
}
|
|
333
|
-
}
|
|
334
|
-
end
|
|
335
|
-
|
|
336
|
-
#TODO: cleanup fn; need to fix on serevr side; inconsient use of symbol and string keys
|
|
337
|
-
#execute_lines
|
|
338
|
-
def ret_execute_lines(cmps_with_attrs)
|
|
339
|
-
ret = Array.new
|
|
340
|
-
@import_statement_modules = Array.new
|
|
341
|
-
cmps_with_attrs.each_with_index do |cmp_with_attrs,i|
|
|
342
|
-
stage = i+1
|
|
343
|
-
module_name = cmp_with_attrs["module_name"]
|
|
344
|
-
ret << "stage{#{quote_form(stage)} :}"
|
|
345
|
-
attrs = process_and_return_attr_name_val_pairs(cmp_with_attrs)
|
|
346
|
-
stage_assign = "stage => #{quote_form(stage)}"
|
|
347
|
-
case cmp_with_attrs["component_type"]
|
|
348
|
-
when "class"
|
|
349
|
-
cmp = cmp_with_attrs["name"]
|
|
350
|
-
raise "No component name" unless cmp
|
|
351
|
-
if imp_stmt = needs_import_statement?(cmp,module_name)
|
|
352
|
-
ret << imp_stmt
|
|
353
|
-
end
|
|
354
|
-
|
|
355
|
-
#TODO: see if need \" and quote form
|
|
356
|
-
attr_str_array = attrs.map{|k,v|"#{k} => #{process_val(v)}"} + [stage_assign]
|
|
357
|
-
attr_str = attr_str_array.join(", ")
|
|
358
|
-
ret << "class {\"#{cmp}\": #{attr_str}}"
|
|
359
|
-
when "definition"
|
|
360
|
-
defn = cmp_with_attrs["name"]
|
|
361
|
-
raise "No definition name" unless defn
|
|
362
|
-
name_attr = nil
|
|
363
|
-
attr_str_array = attrs.map do |k,v|
|
|
364
|
-
if k == "name"
|
|
365
|
-
name_attr = quote_form(v)
|
|
366
|
-
nil
|
|
367
|
-
else
|
|
368
|
-
"#{k} => #{process_val(v)}"
|
|
369
|
-
end
|
|
370
|
-
end.compact
|
|
371
|
-
attr_str = attr_str_array.join(", ")
|
|
372
|
-
raise "No name attribute for definition" unless name_attr
|
|
373
|
-
if imp_stmt = needs_import_statement?(defn,module_name)
|
|
374
|
-
ret << imp_stmt
|
|
375
|
-
end
|
|
376
|
-
#putting def in class because defs cannot go in stages
|
|
377
|
-
class_wrapper = "stage#{stage.to_s}"
|
|
378
|
-
ret << "class #{class_wrapper} {"
|
|
379
|
-
ret << "#{defn} {#{name_attr}: #{attr_str}}"
|
|
380
|
-
ret << "}"
|
|
381
|
-
ret << "class {\"#{class_wrapper}\": #{stage_assign}}"
|
|
382
|
-
end
|
|
383
|
-
end
|
|
384
|
-
size = cmps_with_attrs.size
|
|
385
|
-
if size > 1
|
|
386
|
-
ordering_statement = (1..cmps_with_attrs.size).map{|s|"Stage[#{s.to_s}]"}.join(" -> ")
|
|
387
|
-
ret << ordering_statement
|
|
388
|
-
end
|
|
389
|
-
|
|
390
|
-
if attr_val_stmts = get_attr_val_statements(cmps_with_attrs)
|
|
391
|
-
ret += attr_val_stmts
|
|
392
|
-
end
|
|
393
|
-
ret
|
|
394
|
-
end
|
|
395
|
-
|
|
396
|
-
#removes imported collections and puts them on global array
|
|
397
|
-
def process_and_return_attr_name_val_pairs(cmp_with_attrs)
|
|
398
|
-
ret = Hash.new
|
|
399
|
-
return ret unless attrs = cmp_with_attrs["attributes"]
|
|
400
|
-
cmp_name = cmp_with_attrs["name"]
|
|
401
|
-
attrs.each do |attr_info|
|
|
402
|
-
attr_name = attr_info["name"]
|
|
403
|
-
val = attr_info["value"]
|
|
404
|
-
case attr_info["type"]
|
|
405
|
-
when "attribute"
|
|
406
|
-
ret[attr_name] = val
|
|
407
|
-
when "imported_collection"
|
|
408
|
-
add_imported_collection(cmp_name,attr_name,val,{"resource_type" => attr_info["resource_type"], "import_coll_query" => attr_info["import_coll_query"]})
|
|
409
|
-
else raise "unexpected attribute type (#{attr_info["type"]})"
|
|
410
|
-
end
|
|
411
|
-
end
|
|
412
|
-
ret
|
|
413
|
-
end
|
|
414
|
-
|
|
415
|
-
def get_attr_val_statements(cmps_with_attrs)
|
|
416
|
-
ret = Array.new
|
|
417
|
-
cmps_with_attrs.each do |cmp_with_attrs|
|
|
418
|
-
(cmp_with_attrs["dynamic_attributes"]||[]).each do |dyn_attr|
|
|
419
|
-
if dyn_attr[:type] == "default_variable"
|
|
420
|
-
qualified_var = "#{cmp_with_attrs["name"]}::#{dyn_attr[:name]}"
|
|
421
|
-
ret << "r8::export_variable{'#{qualified_var}' :}"
|
|
422
|
-
end
|
|
423
|
-
end
|
|
424
|
-
end
|
|
425
|
-
ret.empty? ? nil : ret
|
|
426
|
-
end
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
def needs_import_statement?(cmp_or_def,module_name)
|
|
430
|
-
return nil if cmp_or_def =~ /::/
|
|
431
|
-
return nil if @import_statement_modules.include?(module_name)
|
|
432
|
-
@import_statement_modules << module_name
|
|
433
|
-
"import '#{module_name}'"
|
|
434
|
-
end
|
|
435
|
-
|
|
436
|
-
def process_val(val)
|
|
437
|
-
#a guarded val
|
|
438
|
-
if val.kind_of?(Hash) and val.size == 1 and val.keys.first == "__ref"
|
|
439
|
-
"$#{val.values.join("::")}"
|
|
440
|
-
else
|
|
441
|
-
quote_form(val)
|
|
442
|
-
end
|
|
443
|
-
end
|
|
444
|
-
|
|
445
|
-
def has_dynamic_attributes?(cmps_with_attrs)
|
|
446
|
-
ret = cmps_with_attrs.map do |cmp_with_attrs|
|
|
447
|
-
dyn_attrs = cmp_with_attrs["dynamic_attributes"]||[]
|
|
448
|
-
if !dyn_attrs.empty?
|
|
449
|
-
{
|
|
450
|
-
:cmp_ref => component_ref(cmp_with_attrs),
|
|
451
|
-
:dynamic_attrs => dyn_attrs
|
|
452
|
-
}
|
|
453
|
-
end
|
|
454
|
-
end.compact
|
|
455
|
-
!ret.empty? && ret
|
|
456
|
-
end
|
|
457
|
-
|
|
458
|
-
def process_dynamic_attributes!(ret,dynamic_attr_info)
|
|
459
|
-
dyn_attr_assigns = Array.new
|
|
460
|
-
missing_dyn_attrs = Array.new
|
|
461
|
-
dynamic_attr_info.each do |info|
|
|
462
|
-
cmp_ref = info[:cmp_ref]
|
|
463
|
-
info[:dynamic_attrs].each do |dyn_attr|
|
|
464
|
-
if dyn_attr_assign = dynamic_attr_response_el(cmp_ref,dyn_attr)
|
|
465
|
-
dyn_attr_assigns << dyn_attr_assign
|
|
466
|
-
else
|
|
467
|
-
missing_attr = {
|
|
468
|
-
:cmp_ref => cmp_ref,
|
|
469
|
-
:attr => dyn_attr[:name]
|
|
470
|
-
}
|
|
471
|
-
missing_dyn_attrs << missing_attr
|
|
472
|
-
end
|
|
473
|
-
end
|
|
474
|
-
end
|
|
475
|
-
if missing_dyn_attrs.empty?
|
|
476
|
-
ret.set_dynamic_attributes!(dyn_attr_assigns)
|
|
477
|
-
ret.set_status_succeeded!()
|
|
478
|
-
else
|
|
479
|
-
set_error_missing_dynamic_attrs!(ret,missing_dyn_attrs)
|
|
480
|
-
ret.set_status_failed!()
|
|
481
|
-
end
|
|
482
|
-
end
|
|
483
|
-
|
|
484
|
-
def set_error_missing_dynamic_attrs!(ret,missing_dyn_attrs)
|
|
485
|
-
errors = missing_dyn_attrs.map do |info|
|
|
486
|
-
err_message = "Dynamic Attribute (#{info[:attr]}) is not set by component (#{info[:cmp_ref]})"
|
|
487
|
-
{
|
|
488
|
-
:message => err_message,
|
|
489
|
-
:type => "user_error"
|
|
490
|
-
}
|
|
491
|
-
end
|
|
492
|
-
error_info = {
|
|
493
|
-
:return_code => 1,
|
|
494
|
-
:errors => errors
|
|
495
|
-
}
|
|
496
|
-
ret.merge!(error_info)
|
|
497
|
-
end
|
|
498
|
-
|
|
499
|
-
def dynamic_attr_response_el(cmp_name,dyn_attr)
|
|
500
|
-
ret = nil
|
|
501
|
-
val =
|
|
502
|
-
if dyn_attr[:type] == "exported_resource"
|
|
503
|
-
dynamic_attr_response_el__exported_resource(cmp_name,dyn_attr)
|
|
504
|
-
elsif dyn_attr[:type] == "default_variable"
|
|
505
|
-
dynamic_attr_response_el__default_attribute(cmp_name,dyn_attr)
|
|
506
|
-
else #assumption only three types: "exported_resource", "default_attribute, (and other can by "dynamic")
|
|
507
|
-
dynamic_attr_response_el__default_attribute(cmp_name,dyn_attr)||dynamic_attr_response_el__dynamic(cmp_name,dyn_attr)
|
|
508
|
-
end
|
|
509
|
-
if val
|
|
510
|
-
ret = {
|
|
511
|
-
:component_name => cmp_name,
|
|
512
|
-
:attribute_name => dyn_attr[:name],
|
|
513
|
-
:attribute_id => dyn_attr[:id],
|
|
514
|
-
:attribute_val => val
|
|
515
|
-
}
|
|
516
|
-
end
|
|
517
|
-
ret
|
|
518
|
-
end
|
|
519
|
-
|
|
520
|
-
def dynamic_attr_response_el__exported_resource(cmp_name,dyn_attr)
|
|
521
|
-
ret = nil
|
|
522
|
-
if cmp_exp_rscs = exported_resources(cmp_name)
|
|
523
|
-
cmp_exp_rscs.each do |title,val|
|
|
524
|
-
return val if exp_rsc_match(title,dyn_attr[:title_with_vars])
|
|
525
|
-
end
|
|
526
|
-
else
|
|
527
|
-
@log.info("no exported resources set for component #{cmp_name}")
|
|
528
|
-
end
|
|
529
|
-
ret
|
|
530
|
-
end
|
|
531
|
-
|
|
532
|
-
#TODO: more sophistiacted would take var bindings
|
|
533
|
-
def exp_rsc_match(title,title_with_vars)
|
|
534
|
-
regexp_str = regexp_string(title_with_vars)
|
|
535
|
-
@log.info("debug: regexp_str = #{regexp_str}")
|
|
536
|
-
title =~ Regexp.new("^#{regexp_str}$") if regexp_str
|
|
537
|
-
end
|
|
538
|
-
|
|
539
|
-
def regexp_string(title_with_vars)
|
|
540
|
-
if title_with_vars.kind_of?(Array)
|
|
541
|
-
case title_with_vars.first
|
|
542
|
-
when "variable" then ".+"
|
|
543
|
-
when "fn" then regexp_string__when_op(title_with_vars)
|
|
544
|
-
else
|
|
545
|
-
@log.info("unexpected first element in title with vars (#{title_with_vars.first})")
|
|
546
|
-
nil
|
|
547
|
-
end
|
|
548
|
-
else
|
|
549
|
-
title_with_vars.gsub(".","\\.")
|
|
550
|
-
end
|
|
551
|
-
end
|
|
552
|
-
|
|
553
|
-
def regexp_string__when_op(title_with_vars)
|
|
554
|
-
unless title_with_vars[1] == "concat"
|
|
555
|
-
@log.info("not treating operation (#{title_with_vars[1]})")
|
|
556
|
-
return nil
|
|
557
|
-
end
|
|
558
|
-
title_with_vars[2..title_with_vars.size-1].map do |x|
|
|
559
|
-
return nil unless re = regexp_string(x)
|
|
560
|
-
re
|
|
561
|
-
end.join("")
|
|
562
|
-
end
|
|
563
|
-
|
|
564
|
-
def dynamic_attr_response_el__dynamic(cmp_name,dyn_attr)
|
|
565
|
-
ret = nil
|
|
566
|
-
attr_name = dyn_attr[:name]
|
|
567
|
-
filepath = (exported_files(cmp_name)||{})[attr_name]
|
|
568
|
-
#TODO; legacy; remove when deprecate
|
|
569
|
-
filepath ||= "/tmp/#{cmp_name.gsub(/::/,".")}.#{attr_name}"
|
|
570
|
-
begin
|
|
571
|
-
val = File.open(filepath){|f|f.read}.chomp
|
|
572
|
-
ret = val unless val.empty?
|
|
573
|
-
rescue Exception
|
|
574
|
-
end
|
|
575
|
-
ret
|
|
576
|
-
end
|
|
577
|
-
|
|
578
|
-
def dynamic_attr_response_el__default_attribute(cmp_name,dyn_attr)
|
|
579
|
-
ret = nil
|
|
580
|
-
unless cmp_exp_vars = exported_variables(cmp_name)
|
|
581
|
-
@log.info("no exported varaibles for component #{cmp_name}")
|
|
582
|
-
return ret
|
|
583
|
-
end
|
|
584
|
-
|
|
585
|
-
attr_name = dyn_attr[:name]
|
|
586
|
-
unless cmp_exp_vars.has_key?(attr_name)
|
|
587
|
-
@log.info("no exported variable entry for component #{cmp_name}, attribute #{dyn_attr[:name]})")
|
|
588
|
-
return ret
|
|
589
|
-
end
|
|
590
|
-
|
|
591
|
-
cmp_exp_vars[attr_name]
|
|
592
|
-
end
|
|
593
|
-
|
|
594
|
-
def clean_state()
|
|
595
|
-
[:exported_resources, :exported_variables, :report_status, :imported_collections].each do |k|
|
|
596
|
-
Thread.current[k] = nil if Thread.current.keys.include?(k)
|
|
597
|
-
end
|
|
598
|
-
end
|
|
599
|
-
def exported_resources(cmp_name)
|
|
600
|
-
(Thread.current[:exported_resources]||{})[cmp_name]
|
|
601
|
-
end
|
|
602
|
-
def exported_variables(cmp_name)
|
|
603
|
-
(Thread.current[:exported_variables]||{})[cmp_name]
|
|
604
|
-
end
|
|
605
|
-
def exported_files(cmp_name)
|
|
606
|
-
(Thread.current[:exported_files]||{})[cmp_name]
|
|
607
|
-
end
|
|
608
|
-
def add_imported_collection(cmp_name,attr_name,val,context={})
|
|
609
|
-
p = (Thread.current[:imported_collections] ||= Hash.new)[cmp_name] ||= Hash.new
|
|
610
|
-
p[attr_name] = {"value" => val}.merge(context)
|
|
611
|
-
end
|
|
612
|
-
|
|
613
|
-
def component_ref(cmp_with_attrs)
|
|
614
|
-
case cmp_with_attrs["component_type"]
|
|
615
|
-
when "class"
|
|
616
|
-
cmp_with_attrs["name"]
|
|
617
|
-
when "definition"
|
|
618
|
-
defn = cmp_with_attrs["name"]
|
|
619
|
-
unless name_attr = cmp_with_attrs["attributes"].find{|attr|attr["name"] == "name"}
|
|
620
|
-
raise "Cannot find the name associated with definition #{defn}"
|
|
621
|
-
end
|
|
622
|
-
"#{cmp_with_attrs["name"]}[#{name_attr["value"]}]"
|
|
623
|
-
else
|
|
624
|
-
raise "Reference to type #{cmp_with_attrs["component_type"]} not treated"
|
|
625
|
-
end
|
|
626
|
-
end
|
|
627
|
-
|
|
628
|
-
def self.capitalize_resource_name(name)
|
|
629
|
-
name.split('::').map{|p|p.capitalize}.join("::")
|
|
630
|
-
end
|
|
631
|
-
def capitalize_resource_name(name)
|
|
632
|
-
self.class.capitalize_resource_name(name)
|
|
633
|
-
end
|
|
634
|
-
|
|
635
|
-
DynamicVarDefName = "r8_dynamic_vars::set_var"
|
|
636
|
-
DynamicVarDefNameRN = capitalize_resource_name(DynamicVarDefName)
|
|
637
|
-
|
|
638
|
-
def quote_form(obj)
|
|
639
|
-
if obj.kind_of?(Hash)
|
|
640
|
-
"{#{obj.map{|k,v|"#{quote_form(k)} => #{quote_form(v)}"}.join(",")}}"
|
|
641
|
-
elsif obj.kind_of?(Array)
|
|
642
|
-
"[#{obj.map{|el|quote_form(el)}.join(",")}]"
|
|
643
|
-
elsif obj.kind_of?(String)
|
|
644
|
-
"\"#{obj}\""
|
|
645
|
-
elsif obj.nil?
|
|
646
|
-
"nil"
|
|
647
|
-
else
|
|
648
|
-
obj.to_s
|
|
649
|
-
end
|
|
650
|
-
end
|
|
651
|
-
|
|
652
|
-
def set_reply!(response)
|
|
653
|
-
reply.data = @reply_data = response.to_hash
|
|
654
|
-
end
|
|
655
|
-
def set_reply?(response)
|
|
656
|
-
reply.data = @reply_data ||= response.to_hash
|
|
657
|
-
end
|
|
658
|
-
def log_params()
|
|
659
|
-
@log.info("params: #{request.data.inspect}")
|
|
660
|
-
end
|
|
661
|
-
|
|
662
|
-
def puppet_last_log_link()
|
|
663
|
-
"#{PuppetApplyLogDir}/last.log"
|
|
664
|
-
end
|
|
665
|
-
def id_info()
|
|
666
|
-
[:msg_id,:task_id,:top_task_id].map do |k|
|
|
667
|
-
if @task_info.has_key?(k)
|
|
668
|
-
"#{k}:#{@task_info[k].to_s}"
|
|
669
|
-
end
|
|
670
|
-
end.compact.join(":")
|
|
671
|
-
end
|
|
672
|
-
def top_task_id()
|
|
673
|
-
"task_id_#{@task_info[:top_task_id] || @task_info[:task_id] || 'task' }"
|
|
674
|
-
end
|
|
675
|
-
|
|
676
|
-
def ln_s(target,link)
|
|
677
|
-
File.delete(link) if File.exists? link
|
|
678
|
-
FileUtils.ln_s(target,link,:force => true)
|
|
679
|
-
end
|
|
680
|
-
|
|
681
|
-
class DTKPuppetCache
|
|
682
|
-
BaseDir = DTKPuppetCacheBaseDir
|
|
683
|
-
def initialize(service_name,top_task_id)
|
|
684
|
-
@service_name = service_name
|
|
685
|
-
@top_task_id = top_task_id
|
|
686
|
-
end
|
|
687
|
-
|
|
688
|
-
def task_dir()
|
|
689
|
-
@task_dir ||= mkdir_p("#{base_dir()}/#{@service_name}/#{@top_task_id}")
|
|
690
|
-
end
|
|
691
|
-
|
|
692
|
-
def log_file_path(stage)
|
|
693
|
-
"#{task_dir()}/stage-#{stage}-puppet.log"
|
|
694
|
-
end
|
|
695
|
-
def node_manifest_path(stage,invocation)
|
|
696
|
-
"#{task_dir()}/site-stage-#{stage}-invocation-#{invocation}.pp"
|
|
697
|
-
end
|
|
698
|
-
|
|
699
|
-
def last_task_link()
|
|
700
|
-
"#{base_dir()}/last-task"
|
|
701
|
-
end
|
|
702
|
-
|
|
703
|
-
private
|
|
704
|
-
def base_dir()
|
|
705
|
-
@base_dir ||= mkdir_p(BaseDir)
|
|
706
|
-
end
|
|
707
|
-
|
|
708
|
-
def mkdir_p(dir_path)
|
|
709
|
-
FileUtils.mkdir_p(dir_path)
|
|
710
|
-
dir_path
|
|
711
|
-
end
|
|
712
|
-
end
|
|
713
|
-
|
|
714
|
-
#TODO: this should be common accross Agents
|
|
715
|
-
class Response < Hash
|
|
716
|
-
def initialize(hash={})
|
|
717
|
-
super()
|
|
718
|
-
self.merge!(hash)
|
|
719
|
-
self[:status] = :unknown unless hash.has_key?(:status)
|
|
720
|
-
end
|
|
721
|
-
|
|
722
|
-
def to_hash()
|
|
723
|
-
Hash.new.merge(self)
|
|
724
|
-
end
|
|
725
|
-
|
|
726
|
-
def failed?()
|
|
727
|
-
self[:status] == :failed
|
|
728
|
-
end
|
|
729
|
-
|
|
730
|
-
def set_status_failed!()
|
|
731
|
-
self[:status] = :failed
|
|
732
|
-
end
|
|
733
|
-
def set_status_succeeded!()
|
|
734
|
-
self[:status] = :succeeded
|
|
735
|
-
end
|
|
736
|
-
def set_status_canceled!()
|
|
737
|
-
self[:status] = :canceled
|
|
738
|
-
end
|
|
739
|
-
def set_dynamic_attributes!(dynamic_attributes)
|
|
740
|
-
self[:dynamic_attributes] = dynamic_attributes
|
|
741
|
-
end
|
|
742
|
-
end
|
|
743
|
-
class ResponseFailed < Response
|
|
744
|
-
def initialize(error,info={})
|
|
745
|
-
super({:status => :failed, :error => error}.merge(info))
|
|
746
|
-
end
|
|
747
|
-
end
|
|
748
|
-
class ResponseSucceeded < Response
|
|
749
|
-
def initialize(info={})
|
|
750
|
-
super({:status => :succeeded}.merge(info))
|
|
751
|
-
end
|
|
752
|
-
end
|
|
753
|
-
end
|
|
754
|
-
end
|
|
755
|
-
|
|
756
|
-
class Report
|
|
757
|
-
def self.set_status(status)
|
|
758
|
-
Thread.current[:report_status] = status.to_sym
|
|
759
|
-
end
|
|
760
|
-
def self.get_status()
|
|
761
|
-
Thread.current[:report_status] || :failed
|
|
762
|
-
end
|
|
763
|
-
def self.set_report_info(report_info)
|
|
764
|
-
Thread.current[:report_info] = report_info
|
|
765
|
-
end
|
|
766
|
-
def self.get_report_info()
|
|
767
|
-
Thread.current[:report_info]||{}
|
|
768
|
-
end
|
|
769
|
-
end
|
|
770
|
-
end
|
|
771
|
-
|
|
772
|
-
#below is more complicated to allow reloading
|
|
773
|
-
if Puppet::Reports.constants.include?('R8report')
|
|
774
|
-
Puppet::Reports.send(:remove_const,:R8report)
|
|
775
|
-
end
|
|
776
|
-
#TODO: needed to pass {:overwrite => true} to Puppet::Reports.genmodule so expanded def Puppet::Reports.register_report(:r8report)
|
|
777
|
-
def register_report(name,&block)
|
|
778
|
-
name = name.intern
|
|
779
|
-
mod = Puppet::Reports.genmodule(name, :overwrite=> true,:extend => Puppet::Util::Docs, :hash => Puppet::Reports.instance_hash(:report), :block => block)
|
|
780
|
-
mod.send(:define_method, :report_name) do
|
|
781
|
-
name
|
|
782
|
-
end
|
|
783
|
-
end
|
|
784
|
-
register_report(:r8report) do
|
|
785
|
-
desc "report for R8 agent"
|
|
786
|
-
|
|
787
|
-
def process
|
|
788
|
-
MCollective::Report.set_status(status)
|
|
789
|
-
report_info = Hash.new
|
|
790
|
-
errors = logs.select{|log_el|log_el.level == :err}
|
|
791
|
-
unless errors.empty?
|
|
792
|
-
report_info[:errors] = errors.map do |err|
|
|
793
|
-
{
|
|
794
|
-
"message" => err.message,
|
|
795
|
-
"source" => err.source,
|
|
796
|
-
"tags" => err.tags,
|
|
797
|
-
"time" => err.time
|
|
798
|
-
}
|
|
799
|
-
end
|
|
800
|
-
end
|
|
801
|
-
MCollective::Report.set_report_info(report_info)
|
|
802
|
-
self
|
|
803
|
-
end
|
|
804
|
-
end
|
|
805
|
-
|
|
806
|
-
class Puppet::Settings
|
|
807
|
-
def initialize_global_settings(args = [])
|
|
808
|
-
#raise Puppet::DevError, "Attempting to initialize global default settings more than once!" if global_defaults_initialized?
|
|
809
|
-
return if global_defaults_initialized?
|
|
810
|
-
# The first two phases of the lifecycle of a puppet application are:
|
|
811
|
-
# 1) Parse the command line options and handle any of them that are
|
|
812
|
-
# registered, defined "global" puppet settings (mostly from defaults.rb).
|
|
813
|
-
# 2) Parse the puppet config file(s).
|
|
814
|
-
parse_global_options(args)
|
|
815
|
-
parse_config_files
|
|
816
|
-
@global_defaults_initialized = true
|
|
817
|
-
end
|
|
818
|
-
end
|