choria-mcorpc-support 2.20.8 → 2.23.0.pre
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/lib/mcollective.rb +1 -1
- data/lib/mcollective/agent/bolt_tasks.ddl +235 -0
- data/lib/mcollective/agent/bolt_tasks.json +347 -0
- data/lib/mcollective/agent/bolt_tasks.rb +176 -0
- data/lib/mcollective/agent/choria_util.ddl +152 -0
- data/lib/mcollective/agent/choria_util.json +244 -0
- data/lib/mcollective/agent/rpcutil.ddl +7 -3
- data/lib/mcollective/agent/rpcutil.json +333 -0
- data/lib/mcollective/agent/scout.ddl +169 -0
- data/lib/mcollective/agent/scout.json +224 -0
- data/lib/mcollective/agents.rb +7 -6
- data/lib/mcollective/aggregate.rb +4 -4
- data/lib/mcollective/aggregate/average.rb +2 -2
- data/lib/mcollective/aggregate/base.rb +2 -2
- data/lib/mcollective/aggregate/result.rb +3 -3
- data/lib/mcollective/aggregate/result/collection_result.rb +2 -2
- data/lib/mcollective/aggregate/result/numeric_result.rb +2 -2
- data/lib/mcollective/aggregate/sum.rb +2 -2
- data/lib/mcollective/aggregate/summary.rb +3 -4
- data/lib/mcollective/application.rb +57 -21
- data/lib/mcollective/application/choria.rb +249 -0
- data/lib/mcollective/application/completion.rb +6 -6
- data/lib/mcollective/application/describe_filter.rb +20 -20
- data/lib/mcollective/application/facts.rb +19 -11
- data/lib/mcollective/application/federation.rb +239 -0
- data/lib/mcollective/application/find.rb +4 -4
- data/lib/mcollective/application/help.rb +3 -3
- data/lib/mcollective/application/inventory.rb +3 -341
- data/lib/mcollective/application/ping.rb +3 -77
- data/lib/mcollective/application/playbook.rb +207 -0
- data/lib/mcollective/application/plugin.rb +106 -106
- data/lib/mcollective/application/rpc.rb +3 -108
- data/lib/mcollective/application/tasks.rb +416 -0
- data/lib/mcollective/applications.rb +11 -10
- data/lib/mcollective/audit/choria.rb +33 -0
- data/lib/mcollective/cache.rb +2 -4
- data/lib/mcollective/client.rb +11 -10
- data/lib/mcollective/config.rb +21 -34
- data/lib/mcollective/connector/base.rb +2 -1
- data/lib/mcollective/connector/nats.ddl +9 -0
- data/lib/mcollective/connector/nats.rb +450 -0
- data/lib/mcollective/data.rb +8 -3
- data/lib/mcollective/data/agent_data.rb +1 -1
- data/lib/mcollective/data/base.rb +6 -5
- data/lib/mcollective/data/bolt_task_data.ddl +90 -0
- data/lib/mcollective/data/bolt_task_data.rb +32 -0
- data/lib/mcollective/data/collective_data.rb +1 -1
- data/lib/mcollective/data/fact_data.rb +6 -6
- data/lib/mcollective/data/fstat_data.rb +2 -4
- data/lib/mcollective/data/result.rb +7 -2
- data/lib/mcollective/ddl/agentddl.rb +5 -17
- data/lib/mcollective/ddl/base.rb +11 -14
- data/lib/mcollective/discovery.rb +12 -26
- data/lib/mcollective/discovery/choria.ddl +11 -0
- data/lib/mcollective/discovery/choria.rb +223 -0
- data/lib/mcollective/discovery/flatfile.rb +7 -8
- data/lib/mcollective/discovery/mc.rb +2 -2
- data/lib/mcollective/discovery/stdin.rb +17 -18
- data/lib/mcollective/exceptions.rb +13 -0
- data/lib/mcollective/facts/base.rb +9 -9
- data/lib/mcollective/facts/yaml_facts.rb +12 -12
- data/lib/mcollective/generators.rb +3 -3
- data/lib/mcollective/generators/agent_generator.rb +3 -4
- data/lib/mcollective/generators/base.rb +14 -15
- data/lib/mcollective/generators/data_generator.rb +5 -6
- data/lib/mcollective/log.rb +2 -2
- data/lib/mcollective/logger/base.rb +3 -2
- data/lib/mcollective/logger/console_logger.rb +10 -10
- data/lib/mcollective/logger/file_logger.rb +7 -7
- data/lib/mcollective/logger/syslog_logger.rb +11 -15
- data/lib/mcollective/matcher.rb +14 -14
- data/lib/mcollective/matcher/parser.rb +31 -41
- data/lib/mcollective/matcher/scanner.rb +69 -74
- data/lib/mcollective/message.rb +10 -17
- data/lib/mcollective/monkey_patches.rb +2 -4
- data/lib/mcollective/optionparser.rb +1 -0
- data/lib/mcollective/pluginmanager.rb +3 -5
- data/lib/mcollective/pluginpackager.rb +1 -3
- data/lib/mcollective/pluginpackager/agent_definition.rb +10 -11
- data/lib/mcollective/pluginpackager/forge_packager.rb +7 -9
- data/lib/mcollective/pluginpackager/standard_definition.rb +1 -2
- data/lib/mcollective/registration/base.rb +18 -16
- data/lib/mcollective/rpc.rb +2 -4
- data/lib/mcollective/rpc/actionrunner.rb +16 -18
- data/lib/mcollective/rpc/agent.rb +26 -43
- data/lib/mcollective/rpc/audit.rb +1 -0
- data/lib/mcollective/rpc/client.rb +67 -85
- data/lib/mcollective/rpc/helpers.rb +55 -62
- data/lib/mcollective/rpc/progress.rb +2 -2
- data/lib/mcollective/rpc/reply.rb +17 -19
- data/lib/mcollective/rpc/request.rb +7 -5
- data/lib/mcollective/rpc/result.rb +6 -8
- data/lib/mcollective/rpc/stats.rb +49 -58
- data/lib/mcollective/security/base.rb +29 -36
- data/lib/mcollective/security/choria.rb +765 -0
- data/lib/mcollective/shell.rb +9 -4
- data/lib/mcollective/signer/base.rb +28 -0
- data/lib/mcollective/signer/choria.rb +185 -0
- data/lib/mcollective/ssl.rb +8 -6
- data/lib/mcollective/util.rb +58 -55
- data/lib/mcollective/util/bolt_support.rb +176 -0
- data/lib/mcollective/util/bolt_support/plan_runner.rb +167 -0
- data/lib/mcollective/util/bolt_support/task_result.rb +94 -0
- data/lib/mcollective/util/bolt_support/task_results.rb +128 -0
- data/lib/mcollective/util/choria.rb +1103 -0
- data/lib/mcollective/util/indifferent_hash.rb +12 -0
- data/lib/mcollective/util/natswrapper.rb +242 -0
- data/lib/mcollective/util/playbook.rb +435 -0
- data/lib/mcollective/util/playbook/data_stores.rb +201 -0
- data/lib/mcollective/util/playbook/data_stores/base.rb +99 -0
- data/lib/mcollective/util/playbook/data_stores/consul_data_store.rb +88 -0
- data/lib/mcollective/util/playbook/data_stores/environment_data_store.rb +33 -0
- data/lib/mcollective/util/playbook/data_stores/etcd_data_store.rb +42 -0
- data/lib/mcollective/util/playbook/data_stores/file_data_store.rb +106 -0
- data/lib/mcollective/util/playbook/data_stores/shell_data_store.rb +103 -0
- data/lib/mcollective/util/playbook/inputs.rb +265 -0
- data/lib/mcollective/util/playbook/nodes.rb +207 -0
- data/lib/mcollective/util/playbook/nodes/mcollective_nodes.rb +86 -0
- data/lib/mcollective/util/playbook/nodes/pql_nodes.rb +40 -0
- data/lib/mcollective/util/playbook/nodes/shell_nodes.rb +55 -0
- data/lib/mcollective/util/playbook/nodes/terraform_nodes.rb +65 -0
- data/lib/mcollective/util/playbook/nodes/yaml_nodes.rb +47 -0
- data/lib/mcollective/util/playbook/playbook_logger.rb +47 -0
- data/lib/mcollective/util/playbook/puppet_logger.rb +51 -0
- data/lib/mcollective/util/playbook/report.rb +152 -0
- data/lib/mcollective/util/playbook/task_result.rb +55 -0
- data/lib/mcollective/util/playbook/tasks.rb +196 -0
- data/lib/mcollective/util/playbook/tasks/base.rb +45 -0
- data/lib/mcollective/util/playbook/tasks/graphite_event_task.rb +64 -0
- data/lib/mcollective/util/playbook/tasks/mcollective_task.rb +356 -0
- data/lib/mcollective/util/playbook/tasks/shell_task.rb +93 -0
- data/lib/mcollective/util/playbook/tasks/slack_task.rb +105 -0
- data/lib/mcollective/util/playbook/tasks/webhook_task.rb +136 -0
- data/lib/mcollective/util/playbook/template_util.rb +98 -0
- data/lib/mcollective/util/playbook/uses.rb +169 -0
- data/lib/mcollective/util/tasks_support.rb +733 -0
- data/lib/mcollective/util/tasks_support/cli.rb +260 -0
- data/lib/mcollective/util/tasks_support/default_formatter.rb +138 -0
- data/lib/mcollective/util/tasks_support/json_formatter.rb +108 -0
- data/lib/mcollective/validator.rb +8 -3
- data/lib/mcollective/validator/bolt_task_name_validator.ddl +7 -0
- data/lib/mcollective/validator/bolt_task_name_validator.rb +11 -0
- data/lib/mcollective/validator/length_validator.rb +1 -3
- data/lib/mcollective/validator/typecheck_validator.rb +4 -0
- metadata +67 -4
@@ -0,0 +1,733 @@
|
|
1
|
+
require "digest"
|
2
|
+
require "uri"
|
3
|
+
require "tempfile"
|
4
|
+
|
5
|
+
module MCollective
|
6
|
+
module Util
|
7
|
+
class TasksSupport
|
8
|
+
attr_reader :cache_dir, :choria
|
9
|
+
|
10
|
+
def initialize(choria, cache_dir=nil)
|
11
|
+
@choria = choria
|
12
|
+
@cache_dir = cache_dir || @choria.get_option("choria.tasks_cache")
|
13
|
+
end
|
14
|
+
|
15
|
+
# Creates an instance of the CLI helpers
|
16
|
+
#
|
17
|
+
# @param format [:json, :default] the output format to use
|
18
|
+
# @return [CLI]
|
19
|
+
def cli(format, verbose)
|
20
|
+
require_relative "tasks_support/cli"
|
21
|
+
CLI.new(self, format, verbose)
|
22
|
+
end
|
23
|
+
|
24
|
+
# Converts a Puppet type into something mcollective understands
|
25
|
+
#
|
26
|
+
# This is inevitably hacky by its nature, there is no way for me to
|
27
|
+
# parse the types. PAL might get some helpers for this but till then
|
28
|
+
# this is going to have to be best efforts.
|
29
|
+
#
|
30
|
+
# When there is a too complex situation users can always put in --input
|
31
|
+
# and some JSON to work around it until something better comes around
|
32
|
+
#
|
33
|
+
# @param type [String] a puppet type
|
34
|
+
# @return [Class, Boolean, Boolean] The data type, if its an array input or not and if its required
|
35
|
+
def puppet_type_to_ruby(type)
|
36
|
+
array = false
|
37
|
+
required = true
|
38
|
+
|
39
|
+
if type =~ /Optional\[(.+)/
|
40
|
+
type = $1
|
41
|
+
required = false
|
42
|
+
end
|
43
|
+
|
44
|
+
if type =~ /Array\[(.+)/
|
45
|
+
type = $1
|
46
|
+
array = true
|
47
|
+
end
|
48
|
+
|
49
|
+
return [Numeric, array, required] if type =~ /Integer/
|
50
|
+
return [Numeric, array, required] if type =~ /Float/
|
51
|
+
return [Hash, array, required] if type =~ /Hash/
|
52
|
+
return [:boolean, array, required] if type =~ /Boolean/
|
53
|
+
|
54
|
+
[String, array, required]
|
55
|
+
end
|
56
|
+
|
57
|
+
# Determines if a machine is compatible with running bolt
|
58
|
+
#
|
59
|
+
# @note this should check for a compatible version of Puppet more
|
60
|
+
# @return [Boolean]
|
61
|
+
def tasks_compatible?
|
62
|
+
File.exist?(wrapper_path) && File.executable?(wrapper_path)
|
63
|
+
end
|
64
|
+
|
65
|
+
# AIO path to binaries like wrappers etc
|
66
|
+
def aio_bin_path
|
67
|
+
if Util.windows?
|
68
|
+
'C:\Program Files\Puppet Labs\Puppet\bin'
|
69
|
+
else
|
70
|
+
"/opt/puppetlabs/puppet/bin"
|
71
|
+
end
|
72
|
+
end
|
73
|
+
|
74
|
+
# Path to the AIO task wrapper executable
|
75
|
+
#
|
76
|
+
# @return [String]
|
77
|
+
def aio_wrapper_path
|
78
|
+
if Util.windows?
|
79
|
+
legacy = File.join(aio_bin_path, "task_wrapper.exe")
|
80
|
+
return legacy if File.exist?(legacy)
|
81
|
+
|
82
|
+
File.join(aio_bin_path, "execution_wrapper.exe")
|
83
|
+
else
|
84
|
+
legacy = File.join(aio_bin_path, "task_wrapper")
|
85
|
+
return legacy if File.exist?(legacy)
|
86
|
+
|
87
|
+
File.join(aio_bin_path, "execution_wrapper")
|
88
|
+
end
|
89
|
+
end
|
90
|
+
|
91
|
+
# Path to the task wrapper executable
|
92
|
+
#
|
93
|
+
# @return [String]
|
94
|
+
def wrapper_path
|
95
|
+
@choria.get_option("choria.tasks.wrapper_path", aio_wrapper_path)
|
96
|
+
end
|
97
|
+
|
98
|
+
# Path to the powershell shim for powershell input method
|
99
|
+
#
|
100
|
+
# @return [String]
|
101
|
+
def ps_shim_path
|
102
|
+
File.join(aio_bin_path, "PowershellShim.ps1")
|
103
|
+
end
|
104
|
+
|
105
|
+
# Expands the path into a platform specific version
|
106
|
+
#
|
107
|
+
# @see https://github.com/puppetlabs/puppet-specifications/tree/730a2aa23e58b93387d194dbac64af508bdeab01/tasks#task-execution
|
108
|
+
# @param path [Array<String>] the path to the executable and any arguments
|
109
|
+
# @raise [StandardError] when execution of a specific file is not supported
|
110
|
+
def platform_specific_command(path)
|
111
|
+
return [path] unless Util.windows?
|
112
|
+
|
113
|
+
extension = File.extname(path)
|
114
|
+
|
115
|
+
# https://github.com/puppetlabs/pxp-agent/blob/3e7cada3cedf7f78703781d44e70010d0c5ad209/lib/src/modules/task.cc#L98-L107
|
116
|
+
case extension
|
117
|
+
when ".rb"
|
118
|
+
["ruby", path]
|
119
|
+
when ".pp"
|
120
|
+
["puppet", "apply", path]
|
121
|
+
when ".ps1"
|
122
|
+
["powershell", "-NoProfile", "-NonInteractive", "-NoLogo", "-ExecutionPolicy", "Bypass", "-File", path]
|
123
|
+
else
|
124
|
+
[path]
|
125
|
+
end
|
126
|
+
end
|
127
|
+
|
128
|
+
# Given a task description checks all files are correctly cached
|
129
|
+
#
|
130
|
+
# @note this checks all files, though for now there's only ever one file
|
131
|
+
# @see #task_file?
|
132
|
+
# @param files [Array] files list
|
133
|
+
# @return [Boolean]
|
134
|
+
def cached?(files)
|
135
|
+
files.map {|f| task_file?(f)}.all?
|
136
|
+
end
|
137
|
+
|
138
|
+
# Given a task spec figures out the input method
|
139
|
+
#
|
140
|
+
# @param task [Hash] task specification
|
141
|
+
# @return ["powershell", "both", "stdin", "environment"]
|
142
|
+
def task_input_method(task)
|
143
|
+
# the spec says only 1 executable, no idea what the point of the 'files' is
|
144
|
+
file_extension = File.extname(task["files"][0]["filename"])
|
145
|
+
|
146
|
+
input_method = task["input_method"]
|
147
|
+
input_method = "powershell" if input_method.nil? && file_extension == ".ps1"
|
148
|
+
input_method ||= "both"
|
149
|
+
|
150
|
+
input_method
|
151
|
+
end
|
152
|
+
|
153
|
+
# Given a task spec figures out the command to run using the wrapper
|
154
|
+
#
|
155
|
+
# @param spooldir [String] path to the spool for this specific request
|
156
|
+
# @param task [Hash] task specification
|
157
|
+
# @return [String] path to the command
|
158
|
+
def task_command(spooldir, task)
|
159
|
+
file_spec = task["files"][0]
|
160
|
+
file_name = File.join(spooldir, "files", file_spec["filename"])
|
161
|
+
|
162
|
+
command = platform_specific_command(file_name)
|
163
|
+
|
164
|
+
command.unshift(ps_shim_path) if task_input_method(task) == "powershell"
|
165
|
+
|
166
|
+
command
|
167
|
+
end
|
168
|
+
|
169
|
+
# Given a task spec calculates the correct environment hash
|
170
|
+
#
|
171
|
+
# @param task [Hash] task specification
|
172
|
+
# @param task_id [String] task id - usually the mcollective request id
|
173
|
+
# @param task_caller [String] the caller invoking the task
|
174
|
+
# @return [Hash]
|
175
|
+
def task_environment(task, task_id, task_caller)
|
176
|
+
environment = {
|
177
|
+
"_task" => task["task"],
|
178
|
+
"_choria_task_id" => task_id,
|
179
|
+
"_choria_task_caller" => task_caller
|
180
|
+
}
|
181
|
+
|
182
|
+
return environment unless task["input"]
|
183
|
+
return environment unless ["both", "environment"].include?(task_input_method(task))
|
184
|
+
|
185
|
+
JSON.parse(task["input"]).each do |k, v|
|
186
|
+
environment["PT_%s" % k] = v.to_s
|
187
|
+
end
|
188
|
+
|
189
|
+
environment["PT__installdir"] = File.join(request_spooldir(task_id), "files")
|
190
|
+
|
191
|
+
environment
|
192
|
+
end
|
193
|
+
|
194
|
+
# Generate the path to the spool for a specific request
|
195
|
+
#
|
196
|
+
# @param requestid [String] task id
|
197
|
+
# @return [String] directory
|
198
|
+
def request_spooldir(requestid)
|
199
|
+
File.join(choria.tasks_spool_dir, requestid)
|
200
|
+
end
|
201
|
+
|
202
|
+
# Generates the spool path and create it
|
203
|
+
#
|
204
|
+
# @param requestid [String] unique mco request id
|
205
|
+
# @param task [Hash] task specification
|
206
|
+
# @return [String] path to the spool dir
|
207
|
+
# @raise [StandardError] should it not be able to make the directory
|
208
|
+
def create_request_spooldir(requestid, task)
|
209
|
+
dir = request_spooldir(requestid)
|
210
|
+
|
211
|
+
FileUtils.mkdir_p(dir, :mode => 0o0750)
|
212
|
+
|
213
|
+
populate_spooldir(dir, task)
|
214
|
+
|
215
|
+
dir
|
216
|
+
end
|
217
|
+
|
218
|
+
# Copy task files to the spool directory
|
219
|
+
# @param spooldir [String] path to the spool dir
|
220
|
+
# @param task [Hash] task specification
|
221
|
+
def populate_spooldir(spooldir, task)
|
222
|
+
task["files"].each do |file|
|
223
|
+
spool_filename = File.join(spooldir, "files", file["filename"])
|
224
|
+
|
225
|
+
FileUtils.mkdir_p(File.dirname(spool_filename), :mode => 0o0750)
|
226
|
+
FileUtils.cp(task_file_name(file), spool_filename)
|
227
|
+
end
|
228
|
+
end
|
229
|
+
|
230
|
+
# Given a task spec, creates the standard input
|
231
|
+
#
|
232
|
+
# @param task [Hash] task specification
|
233
|
+
# @return [Hash, nil]
|
234
|
+
def task_input(task)
|
235
|
+
task["input"] if ["both", "powershell", "stdin"].include?(task_input_method(task))
|
236
|
+
end
|
237
|
+
|
238
|
+
# Runs the wrapper command detached from mcollective
|
239
|
+
#
|
240
|
+
# We always detach we have no idea how long these tasks will run
|
241
|
+
# since people can do whatever they like, we'll then check them
|
242
|
+
# till the agent timeout but if timeout happens they keep running
|
243
|
+
#
|
244
|
+
# The idea is that UI will in that case present the user with a request
|
245
|
+
# id - which is also the spool name - and the user can later come and
|
246
|
+
# act on these tasks either by asking for their status or perhaps killing
|
247
|
+
# them?
|
248
|
+
#
|
249
|
+
# @param command [Array<String>] command to run
|
250
|
+
# @param environment [Hash] environment to run with
|
251
|
+
# @param stdin [String] stdin to send to the command
|
252
|
+
# @param spooldir [String] path to the spool for this specific request
|
253
|
+
# @return [Integer] the pid that was spawned
|
254
|
+
def spawn_command(command, environment, stdin, spooldir)
|
255
|
+
wrapper_input = File.join(spooldir, "wrapper_stdin")
|
256
|
+
wrapper_stdout = File.join(spooldir, "wrapper_stdout")
|
257
|
+
wrapper_stderr = File.join(spooldir, "wrapper_stderr")
|
258
|
+
wrapper_pid = File.join(spooldir, "wrapper_pid")
|
259
|
+
|
260
|
+
options = {
|
261
|
+
:chdir => "/",
|
262
|
+
:in => :close,
|
263
|
+
:out => wrapper_stdout,
|
264
|
+
:err => wrapper_stderr
|
265
|
+
}
|
266
|
+
|
267
|
+
if stdin
|
268
|
+
File.open(wrapper_input, "w") {|i| i.print(stdin) }
|
269
|
+
options[:in] = wrapper_input
|
270
|
+
end
|
271
|
+
|
272
|
+
pid = Process.spawn(environment, command, options)
|
273
|
+
|
274
|
+
sleep 0.1 until File.exist?(wrapper_stdout)
|
275
|
+
|
276
|
+
File.open(wrapper_pid, "w") {|p| p.write(pid)}
|
277
|
+
|
278
|
+
Process.detach(pid)
|
279
|
+
|
280
|
+
pid
|
281
|
+
end
|
282
|
+
|
283
|
+
# Determines if a task already ran by checkinf if its spool exist
|
284
|
+
#
|
285
|
+
# @param requestid [String] request id for the task
|
286
|
+
# @return [Boolean]
|
287
|
+
def task_ran?(requestid)
|
288
|
+
File.directory?(request_spooldir(requestid))
|
289
|
+
end
|
290
|
+
|
291
|
+
# Determines if a task is completed
|
292
|
+
#
|
293
|
+
# Tasks are run under the wrapper which will write the existcode
|
294
|
+
# to a file only after the command have exited, so this will wait
|
295
|
+
# for that to appear
|
296
|
+
#
|
297
|
+
# @param requestid [String] request id for the task
|
298
|
+
# @return [Boolean]
|
299
|
+
def task_complete?(requestid)
|
300
|
+
exitcode = File.join(request_spooldir(requestid), "exitcode")
|
301
|
+
wrapper_stderr = File.join(request_spooldir(requestid), "wrapper_stderr")
|
302
|
+
|
303
|
+
File.exist?(wrapper_stderr) && file_size(wrapper_stderr) > 0 || File.exist?(exitcode) && file_size(exitcode) > 0
|
304
|
+
end
|
305
|
+
|
306
|
+
# Waits for a task to complete
|
307
|
+
#
|
308
|
+
# @param requestid [String] request id for the task
|
309
|
+
def wait_for_task_completion(requestid)
|
310
|
+
sleep 0.1 until task_complete?(requestid)
|
311
|
+
end
|
312
|
+
|
313
|
+
# Given a task spec runs it via the Puppet wrappers
|
314
|
+
#
|
315
|
+
# The task is run in the background and this method waits for it to
|
316
|
+
# finish, but should the thread this method runs in be killed the process
|
317
|
+
# will continue and one can later check again using the request id
|
318
|
+
#
|
319
|
+
# @note before this should be run be sure to download the tasks first
|
320
|
+
# @param requestid [String] the task requestid
|
321
|
+
# @param task [Hash] task specification
|
322
|
+
# @param wait [Boolean] should the we wait for the task to complete
|
323
|
+
# @param callerid [String] the mcollective callerid who is running the task
|
324
|
+
# @return [Hash] the task result as per {#task_result}
|
325
|
+
# @raise [StandardError] when calling the wrapper fails etc
|
326
|
+
def run_task_command(requestid, task, wait=true, callerid="local")
|
327
|
+
raise("The task wrapper %s does not exist, please upgrade Puppet" % wrapper_path) unless File.exist?(wrapper_path)
|
328
|
+
raise("Task %s is not available or does not match the specification, please download it" % task["task"]) unless cached?(task["files"])
|
329
|
+
raise("Task spool for request %s already exist, cannot rerun", requestid) if task_ran?(requestid)
|
330
|
+
|
331
|
+
spool = create_request_spooldir(requestid, task)
|
332
|
+
command = task_command(spool, task)
|
333
|
+
|
334
|
+
Log.debug("Trying to spawn task %s in spool %s using command %s" % [task["task"], spool, command])
|
335
|
+
|
336
|
+
wrapper_input = {
|
337
|
+
"executable" => command[0],
|
338
|
+
"arguments" => command[1..-1],
|
339
|
+
"input" => task_input(task),
|
340
|
+
"stdout" => File.join(spool, "stdout"),
|
341
|
+
"stderr" => File.join(spool, "stderr"),
|
342
|
+
"exitcode" => File.join(spool, "exitcode")
|
343
|
+
}
|
344
|
+
|
345
|
+
File.open(File.join(spool, "choria.json"), "w") do |meta|
|
346
|
+
data = {
|
347
|
+
"start_time" => Time.now.utc.to_i,
|
348
|
+
"caller" => callerid,
|
349
|
+
"task" => task["task"],
|
350
|
+
"request" => wrapper_input
|
351
|
+
}
|
352
|
+
|
353
|
+
meta.print(data.to_json)
|
354
|
+
end
|
355
|
+
|
356
|
+
pid = spawn_command(wrapper_path, task_environment(task, requestid, callerid), wrapper_input.to_json, spool)
|
357
|
+
|
358
|
+
Log.info("Spawned task %s in spool %s with pid %s" % [task["task"], spool, pid])
|
359
|
+
|
360
|
+
wait_for_task_completion(requestid) if wait
|
361
|
+
|
362
|
+
task_status(requestid)
|
363
|
+
end
|
364
|
+
|
365
|
+
# Determines how long a task ran for
|
366
|
+
#
|
367
|
+
# Tasks that had wrapper failures will have a 0 run time, still
|
368
|
+
# running tasks will calculate runtime till now and so increase on
|
369
|
+
# each invocation
|
370
|
+
#
|
371
|
+
# @param requestid [String] the request if for the task
|
372
|
+
# @return [Float]
|
373
|
+
def task_runtime(requestid)
|
374
|
+
spool = request_spooldir(requestid)
|
375
|
+
wrapper_stderr = File.join(spool, "wrapper_stderr")
|
376
|
+
wrapper_pid = File.join(spool, "wrapper_pid")
|
377
|
+
exitcode = File.join(spool, "exitcode")
|
378
|
+
|
379
|
+
if task_complete?(requestid) && File.exist?(exitcode)
|
380
|
+
Float(File::Stat.new(exitcode).mtime - File::Stat.new(wrapper_pid).mtime)
|
381
|
+
elsif task_complete?(requestid) && file_size(wrapper_stderr) > 0
|
382
|
+
0.0
|
383
|
+
else
|
384
|
+
Float(Time.now - File::Stat.new(wrapper_pid).mtime)
|
385
|
+
end
|
386
|
+
end
|
387
|
+
|
388
|
+
# Parses the stdout and turns it into a JSON object
|
389
|
+
#
|
390
|
+
# If the output is JSON parsable the output is returned else
|
391
|
+
# it's wrapped in _output as per the Tasks spec version 1
|
392
|
+
#
|
393
|
+
# @note https://github.com/puppetlabs/puppet-specifications/blob/730a2aa23e58b93387d194dbac64af508bdeab01/tasks/README.md#output-handling
|
394
|
+
# @param stdout [String] the stdout from the script
|
395
|
+
# @param completed [Boolean] if the task is done running
|
396
|
+
# @param exitcode [Integer] the exitcode from the script
|
397
|
+
# @param wrapper_output [String] the wrapper output
|
398
|
+
# @return [Object] the new stdout according to spec and the stdout object, not JSON encoded
|
399
|
+
def create_task_stdout(stdout, completed, exitcode, wrapper_output)
|
400
|
+
result = {}
|
401
|
+
|
402
|
+
unless wrapper_output.empty?
|
403
|
+
result["_error"] = {
|
404
|
+
"kind" => "choria.tasks/wrapper-error",
|
405
|
+
"msg" => "The task wrapper failed to run",
|
406
|
+
"details" => {
|
407
|
+
"wrapper_output" => wrapper_output
|
408
|
+
}
|
409
|
+
}
|
410
|
+
|
411
|
+
return result.to_json
|
412
|
+
end
|
413
|
+
|
414
|
+
begin
|
415
|
+
data = JSON.parse(stdout)
|
416
|
+
|
417
|
+
if data.is_a?(Hash)
|
418
|
+
result = data
|
419
|
+
else
|
420
|
+
result["_output"] = stdout
|
421
|
+
end
|
422
|
+
rescue
|
423
|
+
result["_output"] = stdout
|
424
|
+
end
|
425
|
+
|
426
|
+
if exitcode != 0 && completed && !result["_error"]
|
427
|
+
result["_error"] = {
|
428
|
+
"kind" => "choria.tasks/task-error",
|
429
|
+
"msg" => "The task errored with a code %d" % exitcode,
|
430
|
+
"details" => {
|
431
|
+
"exitcode" => exitcode
|
432
|
+
}
|
433
|
+
}
|
434
|
+
end
|
435
|
+
|
436
|
+
result
|
437
|
+
end
|
438
|
+
|
439
|
+
# Determines if a task failed based on its status
|
440
|
+
#
|
441
|
+
# @param status [Hash] the status as produced by {#task_status}
|
442
|
+
# @return [Boolean]
|
443
|
+
def task_failed?(status)
|
444
|
+
return true unless status["wrapper_spawned"]
|
445
|
+
return true unless status["wrapper_pid"]
|
446
|
+
return true unless status["wrapper_error"].empty?
|
447
|
+
return true if status["exitcode"] != 0 && status["completed"]
|
448
|
+
return true if status["stdout"].include?("_error")
|
449
|
+
|
450
|
+
false
|
451
|
+
end
|
452
|
+
|
453
|
+
# Determines the task status for given request
|
454
|
+
#
|
455
|
+
# @param requestid [String] request id for the task
|
456
|
+
# @return [Hash] the task status
|
457
|
+
def task_status(requestid)
|
458
|
+
raise("Task %s have not been requested" % requestid) unless task_ran?(requestid)
|
459
|
+
|
460
|
+
spool = request_spooldir(requestid)
|
461
|
+
stdout = File.join(spool, "stdout")
|
462
|
+
stderr = File.join(spool, "stderr")
|
463
|
+
exitcode = File.join(spool, "exitcode")
|
464
|
+
wrapper_stderr = File.join(spool, "wrapper_stderr")
|
465
|
+
wrapper_pid = File.join(spool, "wrapper_pid")
|
466
|
+
meta = File.join(spool, "choria.json")
|
467
|
+
|
468
|
+
result = {
|
469
|
+
"spool" => spool,
|
470
|
+
"task" => nil,
|
471
|
+
"caller" => nil,
|
472
|
+
"stdout" => "",
|
473
|
+
"stderr" => "",
|
474
|
+
"exitcode" => -1,
|
475
|
+
"runtime" => task_runtime(requestid),
|
476
|
+
"start_time" => Time.at(0).utc,
|
477
|
+
"wrapper_spawned" => false,
|
478
|
+
"wrapper_error" => "",
|
479
|
+
"wrapper_pid" => nil,
|
480
|
+
"completed" => task_complete?(requestid)
|
481
|
+
}
|
482
|
+
|
483
|
+
result["exitcode"] = Integer(File.read(exitcode)) if File.exist?(exitcode)
|
484
|
+
|
485
|
+
if task_ran?(requestid)
|
486
|
+
result["stdout"] = File.read(stdout) if File.exist?(stdout)
|
487
|
+
result["stderr"] = File.read(stderr) if File.exist?(stderr)
|
488
|
+
result["wrapper_spawned"] = File.exist?(wrapper_stderr) && file_size(wrapper_stderr) == 0
|
489
|
+
|
490
|
+
result["wrapper_error"] = File.read(wrapper_stderr) if File.exist?(wrapper_stderr) && file_size(wrapper_stderr) > 0
|
491
|
+
|
492
|
+
if File.exist?(wrapper_pid) && file_size(wrapper_pid) > 0
|
493
|
+
result["start_time"] = File::Stat.new(wrapper_pid).mtime.utc
|
494
|
+
result["wrapper_pid"] = Integer(File.read(wrapper_pid))
|
495
|
+
end
|
496
|
+
end
|
497
|
+
|
498
|
+
if File.exist?(meta)
|
499
|
+
choria_metadata = JSON.parse(File.read(meta))
|
500
|
+
|
501
|
+
result["start_time"] = Time.at(choria_metadata["start_time"]).utc
|
502
|
+
result["caller"] = choria_metadata["caller"]
|
503
|
+
result["task"] = choria_metadata["task"]
|
504
|
+
end
|
505
|
+
|
506
|
+
result["stdout"] = create_task_stdout(
|
507
|
+
result["stdout"],
|
508
|
+
result["completed"],
|
509
|
+
result["exitcode"],
|
510
|
+
result["wrapper_error"]
|
511
|
+
)
|
512
|
+
|
513
|
+
result
|
514
|
+
end
|
515
|
+
|
516
|
+
# Retrieves the list of known tasks in an environment
|
517
|
+
#
|
518
|
+
# @param environment [String] the environment to query
|
519
|
+
# @return [Hash] the v3 task list
|
520
|
+
# @raise [StandardError] on http failure
|
521
|
+
def tasks(environment)
|
522
|
+
resp = http_get("/puppet/v3/tasks?environment=%s" % [environment])
|
523
|
+
|
524
|
+
raise("Failed to retrieve task list: %s: %s" % [$!.class, $!.to_s]) unless resp.code == "200"
|
525
|
+
|
526
|
+
tasks = JSON.parse(resp.body)
|
527
|
+
|
528
|
+
tasks.sort_by {|t| t["name"]}
|
529
|
+
end
|
530
|
+
|
531
|
+
# Retrieves the list of known task names
|
532
|
+
#
|
533
|
+
# @param environment [String] the environment to query
|
534
|
+
# @return [Array<String>] list of task names
|
535
|
+
# @raise [StandardError] on http failure
|
536
|
+
def task_names(environment)
|
537
|
+
tasks(environment).map {|t| t["name"]}
|
538
|
+
end
|
539
|
+
|
540
|
+
# Parse a task name like module::task into it's 2 pieces
|
541
|
+
#
|
542
|
+
# @param task [String]
|
543
|
+
# @return [Array<String>] 2 part array, first the module name then the task name
|
544
|
+
# @raise [StandardError] for invalid task names
|
545
|
+
def parse_task(task)
|
546
|
+
parts = task.split("::")
|
547
|
+
|
548
|
+
parts << "init" if parts.size == 1
|
549
|
+
|
550
|
+
parts
|
551
|
+
end
|
552
|
+
|
553
|
+
# Determines the cache path for a task file
|
554
|
+
#
|
555
|
+
# @param file [Hash] a file hash as per the task metadata
|
556
|
+
# @return [String] the directory the file would go into
|
557
|
+
def task_file_name(file)
|
558
|
+
File.join(cache_dir, file["sha256"])
|
559
|
+
end
|
560
|
+
|
561
|
+
# Does a HTTP GET against the Puppet Server
|
562
|
+
#
|
563
|
+
# @param path [String] the path to get
|
564
|
+
# @param headers [Hash] headers to passs
|
565
|
+
# @return [Net::HTTPRequest]
|
566
|
+
def http_get(path, headers={}, &blk)
|
567
|
+
transport = choria.https(choria.puppet_server, true)
|
568
|
+
request = choria.http_get(path)
|
569
|
+
|
570
|
+
headers.each do |k, v|
|
571
|
+
request[k] = v
|
572
|
+
end
|
573
|
+
|
574
|
+
transport.request(request, &blk)
|
575
|
+
end
|
576
|
+
|
577
|
+
# Requests a task metadata from Puppet Server
|
578
|
+
#
|
579
|
+
# @param task [String] a task name like module::task
|
580
|
+
# @param environment [String] the puppet environmnet like production
|
581
|
+
# @return [Hash] the metadata according to the v3 spec
|
582
|
+
# @raise [StandardError] when the request failed
|
583
|
+
def task_metadata(task, environment)
|
584
|
+
parsed = parse_task(task)
|
585
|
+
path = "/puppet/v3/tasks/%s/%s?environment=%s" % [parsed[0], parsed[1], environment]
|
586
|
+
|
587
|
+
resp = http_get(path)
|
588
|
+
|
589
|
+
raise("Failed to request task metadata: %s: %s" % [resp.code, resp.body]) unless resp.code == "200"
|
590
|
+
|
591
|
+
result = JSON.parse(resp.body)
|
592
|
+
|
593
|
+
result["metadata"] ||= {}
|
594
|
+
result["metadata"]["parameters"] ||= {}
|
595
|
+
result["files"] ||= []
|
596
|
+
|
597
|
+
result
|
598
|
+
end
|
599
|
+
|
600
|
+
# Validates that the inputs would be acceptable to the task
|
601
|
+
#
|
602
|
+
# @note Copied from PAL TaskSignature#runnable_with?
|
603
|
+
# @param inputs [Hash] of keys and values
|
604
|
+
# @param task [Hash] task metadata
|
605
|
+
# @return [Array[Boolean, Error]]
|
606
|
+
def validate_task_inputs(inputs, task)
|
607
|
+
return [true, ""] unless task["metadata"]["parameters"]
|
608
|
+
return [true, ""] if task["metadata"]["parameters"].empty? && inputs.empty?
|
609
|
+
|
610
|
+
require "puppet"
|
611
|
+
|
612
|
+
signature = {}
|
613
|
+
|
614
|
+
task["metadata"]["parameters"].each do |k, v|
|
615
|
+
signature[k] = Puppet::Pops::Types::TypeParser.singleton.parse(v["type"])
|
616
|
+
end
|
617
|
+
|
618
|
+
signature_type = Puppet::Pops::Types::TypeFactory.struct(signature)
|
619
|
+
|
620
|
+
return [true, ""] if signature_type.instance?(inputs)
|
621
|
+
|
622
|
+
tm = Puppet::Pops::Types::TypeMismatchDescriber.singleton
|
623
|
+
reason = tm.describe_struct_signature(signature_type, inputs).flatten.map(&:format).join("\n")
|
624
|
+
reason = "\nInvalid input: \n\t%s" % [reason]
|
625
|
+
|
626
|
+
[false, reason]
|
627
|
+
end
|
628
|
+
|
629
|
+
# Calculates a hex digest SHA256 for a specific file
|
630
|
+
#
|
631
|
+
# @param file_path [String] a full path to the file to check
|
632
|
+
# @return [String]
|
633
|
+
# @raise [StandardError] when the file does not exist
|
634
|
+
def file_sha256(file_path)
|
635
|
+
Digest::SHA256.file(file_path).hexdigest
|
636
|
+
end
|
637
|
+
|
638
|
+
# Determines the file size of a specific file
|
639
|
+
#
|
640
|
+
# @param file_path [String] a full path to the file to check
|
641
|
+
# @return [Integer] bytes, -1 when the file does not exist
|
642
|
+
def file_size(file_path)
|
643
|
+
File.stat(file_path).size
|
644
|
+
rescue
|
645
|
+
-1
|
646
|
+
end
|
647
|
+
|
648
|
+
# Validates a task cache file
|
649
|
+
#
|
650
|
+
# @param file [Hash] a file hash as per the task metadata
|
651
|
+
# @return [Boolean]
|
652
|
+
def task_file?(file)
|
653
|
+
file_name = task_file_name(file)
|
654
|
+
|
655
|
+
Log.debug("Checking if file %s is cached using %s" % [file_name, file.pretty_inspect])
|
656
|
+
|
657
|
+
return false unless File.exist?(file_name)
|
658
|
+
return false unless file_size(file_name) == file["size_bytes"]
|
659
|
+
return false unless file_sha256(file_name) == file["sha256"]
|
660
|
+
|
661
|
+
true
|
662
|
+
end
|
663
|
+
|
664
|
+
# Attempts to download and cache the file
|
665
|
+
#
|
666
|
+
# @note Does not first check if the cache is ok, unconditionally downloads
|
667
|
+
# @see #task_file?
|
668
|
+
# @param file [Hash] a file hash as per the task metadata
|
669
|
+
# @raise [StandardError] when downloading fails
|
670
|
+
def cache_task_file(file)
|
671
|
+
path = [file["uri"]["path"], URI.encode_www_form(file["uri"]["params"])].join("?")
|
672
|
+
|
673
|
+
file_name = task_file_name(file)
|
674
|
+
|
675
|
+
Log.debug("Caching task to %s" % file_name)
|
676
|
+
|
677
|
+
http_get(path, "Accept" => "application/octet-stream") do |resp|
|
678
|
+
raise("Failed to request task content %s: %s: %s" % [path, resp.code, resp.body]) unless resp.code == "200"
|
679
|
+
|
680
|
+
FileUtils.mkdir_p(cache_dir, :mode => 0o0750)
|
681
|
+
FileUtils.rm_rf(file_name) if File.directory?(file_name)
|
682
|
+
|
683
|
+
task_file = Tempfile.new("tasks_%s" % file["filename"])
|
684
|
+
task_file.binmode
|
685
|
+
|
686
|
+
resp.read_body do |segment|
|
687
|
+
task_file.write(segment)
|
688
|
+
end
|
689
|
+
|
690
|
+
task_file.close
|
691
|
+
|
692
|
+
FileUtils.chmod(0o0750, task_file.path)
|
693
|
+
FileUtils.mv(task_file.path, file_name)
|
694
|
+
end
|
695
|
+
end
|
696
|
+
|
697
|
+
# Downloads and caches a file set
|
698
|
+
#
|
699
|
+
# @param files [Array] the files description
|
700
|
+
# @return [Boolean] indicating download success
|
701
|
+
# @raise [StandardError] on download failures
|
702
|
+
def download_files(files)
|
703
|
+
Log.info("Downloading %d task files" % files.size)
|
704
|
+
|
705
|
+
files.each do |file|
|
706
|
+
next if task_file?(file)
|
707
|
+
|
708
|
+
try = 0
|
709
|
+
|
710
|
+
begin
|
711
|
+
return false if try == 2
|
712
|
+
|
713
|
+
try += 1
|
714
|
+
|
715
|
+
Log.debug("Downloading task file %s (try %s/2)" % [file["filename"], try])
|
716
|
+
|
717
|
+
cache_task_file(file)
|
718
|
+
rescue
|
719
|
+
Log.error(msg = "Could not download task file: %s: %s" % [$!.class, $!.to_s])
|
720
|
+
|
721
|
+
sleep 0.1
|
722
|
+
|
723
|
+
retry if try < 2
|
724
|
+
|
725
|
+
raise(msg)
|
726
|
+
end
|
727
|
+
end
|
728
|
+
|
729
|
+
true
|
730
|
+
end
|
731
|
+
end
|
732
|
+
end
|
733
|
+
end
|