smart_proxy_remote_execution_ssh 0.5.3 → 0.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/lib/smart_proxy_remote_execution_ssh/actions/pull_script.rb +36 -11
- data/lib/smart_proxy_remote_execution_ssh/api.rb +9 -7
- data/lib/smart_proxy_remote_execution_ssh/plugin.rb +5 -1
- data/lib/smart_proxy_remote_execution_ssh/runners/polling_script_runner.rb +15 -7
- data/lib/smart_proxy_remote_execution_ssh/runners/script_runner.rb +88 -76
- data/lib/smart_proxy_remote_execution_ssh/version.rb +1 -1
- data/lib/smart_proxy_remote_execution_ssh.rb +27 -14
- data/settings.d/remote_execution_ssh.yml.example +7 -0
- metadata +4 -4
checksums.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
---
|
|
2
2
|
SHA256:
|
|
3
|
-
metadata.gz:
|
|
4
|
-
data.tar.gz:
|
|
3
|
+
metadata.gz: a3e83401d99929917142c15969fce19bf17e6fffc82000da554c457567016f8d
|
|
4
|
+
data.tar.gz: 9dbdec23d2203557204fb60f45cadc884d640d49b436eb291ad0cc4769182e2c
|
|
5
5
|
SHA512:
|
|
6
|
-
metadata.gz:
|
|
7
|
-
data.tar.gz:
|
|
6
|
+
metadata.gz: 3cc070d29a185dc80cee33c3b69bf06772c226721ed9f42fba167a702a917ac04b50729ee3354d430b2a7c840581e98b75236cb4298e77aeee749fd9af6f56f2
|
|
7
|
+
data.tar.gz: a040bf6621ea2e35b421bc9098a5ec2569bc24e51ea6ae5346eec8c9e05d91e2560b96843d553589f8bbab10f009725947e76b306c5bb785c16ae0f3006baed8
|
|
@@ -56,37 +56,52 @@ module Proxy::RemoteExecution::Ssh::Actions
|
|
|
56
56
|
# Client was notified or is already running, dealing with this situation
|
|
57
57
|
# is only supported if mqtt is available
|
|
58
58
|
# Otherwise we have to wait it out
|
|
59
|
-
|
|
60
|
-
# if input[:with_mqtt]
|
|
59
|
+
mqtt_cancel if input[:with_mqtt]
|
|
61
60
|
end
|
|
62
61
|
suspend
|
|
63
62
|
end
|
|
64
63
|
|
|
65
64
|
def mqtt_start(otp_password)
|
|
66
|
-
payload =
|
|
67
|
-
|
|
68
|
-
message_id: SecureRandom.uuid,
|
|
69
|
-
version: 1,
|
|
70
|
-
sent: DateTime.now.iso8601,
|
|
71
|
-
directive: 'foreman',
|
|
65
|
+
payload = mqtt_payload_base.merge(
|
|
66
|
+
content: "#{input[:proxy_url]}/ssh/jobs/#{input[:job_uuid]}",
|
|
72
67
|
metadata: {
|
|
68
|
+
'event': 'start',
|
|
73
69
|
'job_uuid': input[:job_uuid],
|
|
74
70
|
'username': execution_plan_id,
|
|
75
71
|
'password': otp_password,
|
|
76
72
|
'return_url': "#{input[:proxy_url]}/ssh/jobs/#{input[:job_uuid]}/update",
|
|
77
73
|
},
|
|
78
|
-
|
|
79
|
-
}
|
|
74
|
+
)
|
|
80
75
|
mqtt_notify payload
|
|
81
76
|
output[:state] = :notified
|
|
82
77
|
end
|
|
83
78
|
|
|
79
|
+
def mqtt_cancel
|
|
80
|
+
cleanup
|
|
81
|
+
payload = mqtt_payload_base.merge(
|
|
82
|
+
metadata: {
|
|
83
|
+
'event': 'cancel',
|
|
84
|
+
'job_uuid': input[:job_uuid]
|
|
85
|
+
}
|
|
86
|
+
)
|
|
87
|
+
mqtt_notify payload
|
|
88
|
+
end
|
|
89
|
+
|
|
84
90
|
def mqtt_notify(payload)
|
|
85
|
-
|
|
91
|
+
with_mqtt_client do |c|
|
|
86
92
|
c.publish(mqtt_topic, JSON.dump(payload), false, 1)
|
|
87
93
|
end
|
|
88
94
|
end
|
|
89
95
|
|
|
96
|
+
def with_mqtt_client(&block)
|
|
97
|
+
MQTT::Client.connect(settings.mqtt_broker, settings.mqtt_port,
|
|
98
|
+
:ssl => settings.mqtt_tls,
|
|
99
|
+
:cert_file => ::Proxy::SETTINGS.foreman_ssl_cert,
|
|
100
|
+
:key_file => ::Proxy::SETTINGS.foreman_ssl_key,
|
|
101
|
+
:ca_file => ::Proxy::SETTINGS.foreman_ssl_ca,
|
|
102
|
+
&block)
|
|
103
|
+
end
|
|
104
|
+
|
|
90
105
|
def host_name
|
|
91
106
|
alternative_names = input.fetch(:alternative_names, {})
|
|
92
107
|
|
|
@@ -106,5 +121,15 @@ module Proxy::RemoteExecution::Ssh::Actions
|
|
|
106
121
|
def job_storage
|
|
107
122
|
Proxy::RemoteExecution::Ssh.job_storage
|
|
108
123
|
end
|
|
124
|
+
|
|
125
|
+
def mqtt_payload_base
|
|
126
|
+
{
|
|
127
|
+
type: 'data',
|
|
128
|
+
message_id: SecureRandom.uuid,
|
|
129
|
+
version: 1,
|
|
130
|
+
sent: DateTime.now.iso8601,
|
|
131
|
+
directive: 'foreman'
|
|
132
|
+
}
|
|
133
|
+
end
|
|
109
134
|
end
|
|
110
135
|
end
|
|
@@ -13,14 +13,16 @@ module Proxy::RemoteExecution
|
|
|
13
13
|
File.read(Ssh.public_key_file)
|
|
14
14
|
end
|
|
15
15
|
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
16
|
+
if Proxy::RemoteExecution::Ssh::Plugin.settings.cockpit_integration
|
|
17
|
+
post "/session" do
|
|
18
|
+
do_authorize_any
|
|
19
|
+
session = Cockpit::Session.new(env)
|
|
20
|
+
unless session.valid?
|
|
21
|
+
return [ 400, "Invalid request: /ssh/session requires connection upgrade to 'raw'" ]
|
|
22
|
+
end
|
|
23
|
+
session.hijack!
|
|
24
|
+
101
|
|
21
25
|
end
|
|
22
|
-
session.hijack!
|
|
23
|
-
101
|
|
24
26
|
end
|
|
25
27
|
|
|
26
28
|
delete '/known_hosts/:name' do |name|
|
|
@@ -12,15 +12,19 @@ module Proxy::RemoteExecution::Ssh
|
|
|
12
12
|
:remote_working_dir => '/var/tmp',
|
|
13
13
|
:local_working_dir => '/var/tmp',
|
|
14
14
|
:kerberos_auth => false,
|
|
15
|
+
:cockpit_integration => true,
|
|
15
16
|
# When set to nil, makes REX use the runner's default interval
|
|
16
17
|
# :runner_refresh_interval => nil,
|
|
17
18
|
:ssh_log_level => :error,
|
|
18
19
|
:cleanup_working_dirs => true,
|
|
19
20
|
# :mqtt_broker => nil,
|
|
20
21
|
# :mqtt_port => nil,
|
|
22
|
+
# :mqtt_tls => nil,
|
|
21
23
|
:mode => :ssh
|
|
22
24
|
|
|
23
|
-
|
|
25
|
+
capability(proc { 'cockpit' if settings.cockpit_integration })
|
|
26
|
+
|
|
27
|
+
plugin :script, Proxy::RemoteExecution::Ssh::VERSION
|
|
24
28
|
after_activation do
|
|
25
29
|
require 'smart_proxy_dynflow'
|
|
26
30
|
require 'smart_proxy_remote_execution_ssh/version'
|
|
@@ -36,13 +36,12 @@ module Proxy::RemoteExecution::Ssh::Runners
|
|
|
36
36
|
def initialization_script
|
|
37
37
|
close_stdin = '</dev/null'
|
|
38
38
|
close_fds = close_stdin + ' >/dev/null 2>/dev/null'
|
|
39
|
-
main_script = "(#{@remote_script} #{close_stdin} 2>&1; echo $?>#{@base_dir}/init_exit_code) >#{@base_dir}/output"
|
|
39
|
+
main_script = "(#{@remote_script_wrapper} #{@remote_script} #{close_stdin} 2>&1; echo $?>#{@base_dir}/init_exit_code) >#{@base_dir}/output"
|
|
40
40
|
control_script_finish = "#{@control_script_path} init-script-finish"
|
|
41
41
|
<<-SCRIPT.gsub(/^ +\| /, '')
|
|
42
42
|
| export CONTROL_SCRIPT="#{@control_script_path}"
|
|
43
|
-
| #{"chown #{@user_method.effective_user}
|
|
43
|
+
| #{"chown #{@user_method.effective_user} #{@base_dir}" if @user_method.cli_command_prefix}
|
|
44
44
|
| #{@user_method.cli_command_prefix} sh -c '#{main_script}; #{control_script_finish}' #{close_fds} &
|
|
45
|
-
| echo $! > '#{@base_dir}/pid'
|
|
46
45
|
SCRIPT
|
|
47
46
|
end
|
|
48
47
|
|
|
@@ -51,18 +50,23 @@ module Proxy::RemoteExecution::Ssh::Runners
|
|
|
51
50
|
end
|
|
52
51
|
|
|
53
52
|
def refresh
|
|
54
|
-
err = output = nil
|
|
55
53
|
begin
|
|
56
|
-
|
|
54
|
+
pm = run_sync("#{@user_method.cli_command_prefix} #{@retrieval_script}")
|
|
57
55
|
rescue StandardError => e
|
|
58
56
|
@logger.info("Error while connecting to the remote host on refresh: #{e.message}")
|
|
59
57
|
end
|
|
60
58
|
|
|
61
|
-
process_retrieved_data(
|
|
59
|
+
process_retrieved_data(pm.stdout.to_s.chomp, pm.stderr.to_s.chomp)
|
|
62
60
|
ensure
|
|
63
61
|
destroy_session
|
|
64
62
|
end
|
|
65
63
|
|
|
64
|
+
def kill
|
|
65
|
+
run_sync("pkill -P $(cat #{@pid_path})")
|
|
66
|
+
rescue StandardError => e
|
|
67
|
+
publish_exception('Unexpected error', e, false)
|
|
68
|
+
end
|
|
69
|
+
|
|
66
70
|
def process_retrieved_data(output, err)
|
|
67
71
|
return if output.nil? || output.empty?
|
|
68
72
|
|
|
@@ -127,7 +131,11 @@ module Proxy::RemoteExecution::Ssh::Runners
|
|
|
127
131
|
end
|
|
128
132
|
|
|
129
133
|
def cleanup
|
|
130
|
-
|
|
134
|
+
if @cleanup_working_dirs
|
|
135
|
+
ensure_remote_command("rm -rf #{remote_command_dir}",
|
|
136
|
+
publish: true,
|
|
137
|
+
error: "Unable to remove working directory #{remote_command_dir} on remote system, exit code: %{exit_code}")
|
|
138
|
+
end
|
|
131
139
|
end
|
|
132
140
|
|
|
133
141
|
def destroy_session
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
require 'fileutils'
|
|
2
|
-
require 'smart_proxy_dynflow/runner/
|
|
2
|
+
require 'smart_proxy_dynflow/runner/process_manager_command'
|
|
3
|
+
require 'smart_proxy_dynflow/process_manager'
|
|
3
4
|
|
|
4
5
|
module Proxy::RemoteExecution::Ssh::Runners
|
|
5
6
|
class EffectiveUserMethod
|
|
@@ -12,9 +13,9 @@ module Proxy::RemoteExecution::Ssh::Runners
|
|
|
12
13
|
@password_sent = false
|
|
13
14
|
end
|
|
14
15
|
|
|
15
|
-
def on_data(received_data,
|
|
16
|
+
def on_data(received_data, io_buffer)
|
|
16
17
|
if received_data.match(login_prompt)
|
|
17
|
-
|
|
18
|
+
io_buffer.add_data(effective_user_password + "\n")
|
|
18
19
|
@password_sent = true
|
|
19
20
|
end
|
|
20
21
|
end
|
|
@@ -90,7 +91,7 @@ module Proxy::RemoteExecution::Ssh::Runners
|
|
|
90
91
|
|
|
91
92
|
# rubocop:disable Metrics/ClassLength
|
|
92
93
|
class ScriptRunner < Proxy::Dynflow::Runner::Base
|
|
93
|
-
include Proxy::Dynflow::Runner::
|
|
94
|
+
include Proxy::Dynflow::Runner::ProcessManagerCommand
|
|
94
95
|
attr_reader :execution_timeout_interval
|
|
95
96
|
|
|
96
97
|
EXPECTED_POWER_ACTION_MESSAGES = ['restart host', 'shutdown host'].freeze
|
|
@@ -110,7 +111,7 @@ module Proxy::RemoteExecution::Ssh::Runners
|
|
|
110
111
|
|
|
111
112
|
@client_private_key_file = settings.ssh_identity_key_file
|
|
112
113
|
@local_working_dir = options.fetch(:local_working_dir, settings.local_working_dir)
|
|
113
|
-
@remote_working_dir = options.fetch(:remote_working_dir, settings.remote_working_dir)
|
|
114
|
+
@remote_working_dir = options.fetch(:remote_working_dir, settings.remote_working_dir.shellescape)
|
|
114
115
|
@cleanup_working_dirs = options.fetch(:cleanup_working_dirs, settings.cleanup_working_dirs)
|
|
115
116
|
@first_execution = options.fetch(:first_execution, false)
|
|
116
117
|
@user_method = user_method
|
|
@@ -141,6 +142,8 @@ module Proxy::RemoteExecution::Ssh::Runners
|
|
|
141
142
|
|
|
142
143
|
def start
|
|
143
144
|
Proxy::RemoteExecution::Utils.prune_known_hosts!(@host, @ssh_port, logger) if @first_execution
|
|
145
|
+
establish_connection
|
|
146
|
+
preflight_checks
|
|
144
147
|
prepare_start
|
|
145
148
|
script = initialization_script
|
|
146
149
|
logger.debug("executing script:\n#{indent_multiline(script)}")
|
|
@@ -154,32 +157,63 @@ module Proxy::RemoteExecution::Ssh::Runners
|
|
|
154
157
|
run_async(*args)
|
|
155
158
|
end
|
|
156
159
|
|
|
160
|
+
def preflight_checks
|
|
161
|
+
ensure_remote_command(cp_script_to_remote("#!/bin/sh\nexec true", 'test'),
|
|
162
|
+
publish: true,
|
|
163
|
+
error: 'Failed to execute script on remote machine, exit code: %{exit_code}.'
|
|
164
|
+
)
|
|
165
|
+
unless @user_method.is_a? NoopUserMethod
|
|
166
|
+
path = cp_script_to_remote("#!/bin/sh\nexec #{@user_method.cli_command_prefix} true", 'effective-user-test')
|
|
167
|
+
ensure_remote_command(path,
|
|
168
|
+
error: 'Failed to change to effective user, exit code: %{exit_code}',
|
|
169
|
+
publish: true,
|
|
170
|
+
tty: true,
|
|
171
|
+
close_stdin: false)
|
|
172
|
+
end
|
|
173
|
+
end
|
|
174
|
+
|
|
175
|
+
def establish_connection
|
|
176
|
+
# run_sync ['-f', '-N'] would be cleaner, but ssh does not close its
|
|
177
|
+
# stderr which trips up the process manager which expects all FDs to be
|
|
178
|
+
# closed
|
|
179
|
+
ensure_remote_command(
|
|
180
|
+
'true',
|
|
181
|
+
publish: true,
|
|
182
|
+
error: 'Failed to establish connection to remote host, exit code: %{exit_code}'
|
|
183
|
+
)
|
|
184
|
+
end
|
|
185
|
+
|
|
157
186
|
def prepare_start
|
|
158
187
|
@remote_script = cp_script_to_remote
|
|
159
188
|
@output_path = File.join(File.dirname(@remote_script), 'output')
|
|
160
189
|
@exit_code_path = File.join(File.dirname(@remote_script), 'exit_code')
|
|
190
|
+
@pid_path = File.join(File.dirname(@remote_script), 'pid')
|
|
191
|
+
@remote_script_wrapper = upload_data("echo $$ > #{@pid_path}; exec \"$@\";", File.join(File.dirname(@remote_script), 'script-wrapper'), 555)
|
|
161
192
|
end
|
|
162
193
|
|
|
163
194
|
# the script that initiates the execution
|
|
164
195
|
def initialization_script
|
|
165
196
|
su_method = @user_method.instance_of?(SuUserMethod)
|
|
166
197
|
# pipe the output to tee while capturing the exit code in a file
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
198
|
+
<<~SCRIPT
|
|
199
|
+
sh <<EOF | /usr/bin/tee #{@output_path}
|
|
200
|
+
#{@remote_script_wrapper} #{@user_method.cli_command_prefix}#{su_method ? "'#{@remote_script} < /dev/null '" : "#{@remote_script} < /dev/null"}
|
|
201
|
+
echo \\$?>#{@exit_code_path}
|
|
202
|
+
EOF
|
|
203
|
+
exit $(cat #{@exit_code_path})
|
|
170
204
|
SCRIPT
|
|
171
205
|
end
|
|
172
206
|
|
|
173
207
|
def refresh
|
|
174
|
-
return if @
|
|
208
|
+
return if @process_manager.nil?
|
|
175
209
|
super
|
|
176
210
|
ensure
|
|
177
211
|
check_expecting_disconnect
|
|
178
212
|
end
|
|
179
213
|
|
|
180
214
|
def kill
|
|
181
|
-
if @
|
|
182
|
-
run_sync("pkill -
|
|
215
|
+
if @process_manager&.started?
|
|
216
|
+
run_sync("pkill -P $(cat #{@pid_path})")
|
|
183
217
|
else
|
|
184
218
|
logger.debug('connection closed')
|
|
185
219
|
end
|
|
@@ -197,28 +231,28 @@ module Proxy::RemoteExecution::Ssh::Runners
|
|
|
197
231
|
end
|
|
198
232
|
|
|
199
233
|
def close_session
|
|
200
|
-
@session = nil
|
|
201
234
|
raise 'Control socket file does not exist' unless File.exist?(local_command_file("socket"))
|
|
202
235
|
@logger.debug("Sending exit request for session #{@ssh_user}@#{@host}")
|
|
203
|
-
args = ['/usr/bin/ssh', @host, "-o", "
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
236
|
+
args = ['/usr/bin/ssh', @host, "-o", "ControlPath=#{local_command_file("socket")}", "-O", "exit"].flatten
|
|
237
|
+
pm = Proxy::Dynflow::ProcessManager.new(args)
|
|
238
|
+
pm.on_stdout { |data| @logger.debug "[close_session]: #{data.chomp}"; data }
|
|
239
|
+
pm.on_stderr { |data| @logger.debug "[close_session]: #{data.chomp}"; data }
|
|
240
|
+
pm.run!
|
|
208
241
|
end
|
|
209
242
|
|
|
210
243
|
def close
|
|
211
|
-
run_sync("rm -rf
|
|
244
|
+
run_sync("rm -rf #{remote_command_dir}") if should_cleanup?
|
|
212
245
|
rescue StandardError => e
|
|
213
246
|
publish_exception('Error when removing remote working dir', e, false)
|
|
214
247
|
ensure
|
|
215
|
-
close_session if @
|
|
248
|
+
close_session if @process_manager
|
|
216
249
|
FileUtils.rm_rf(local_command_dir) if Dir.exist?(local_command_dir) && @cleanup_working_dirs
|
|
217
250
|
end
|
|
218
251
|
|
|
219
|
-
def publish_data(data, type)
|
|
252
|
+
def publish_data(data, type, pm = nil)
|
|
253
|
+
pm ||= @process_manager
|
|
220
254
|
super(data.force_encoding('UTF-8'), type) unless @user_method.filter_password?(data)
|
|
221
|
-
@user_method.on_data(data,
|
|
255
|
+
@user_method.on_data(data, pm.stdin) if pm
|
|
222
256
|
end
|
|
223
257
|
|
|
224
258
|
private
|
|
@@ -228,24 +262,7 @@ module Proxy::RemoteExecution::Ssh::Runners
|
|
|
228
262
|
end
|
|
229
263
|
|
|
230
264
|
def should_cleanup?
|
|
231
|
-
@
|
|
232
|
-
end
|
|
233
|
-
|
|
234
|
-
# Creates session with three pipes - one for reading and two for
|
|
235
|
-
# writing. Similar to `Open3.popen3` method but without creating
|
|
236
|
-
# a separate thread to monitor it.
|
|
237
|
-
def session(args, in_stream: true, out_stream: true, err_stream: true)
|
|
238
|
-
@session = true
|
|
239
|
-
|
|
240
|
-
in_read, in_write = in_stream ? IO.pipe : '/dev/null'
|
|
241
|
-
out_read, out_write = out_stream ? IO.pipe : [nil, '/dev/null']
|
|
242
|
-
err_read, err_write = err_stream ? IO.pipe : [nil, '/dev/null']
|
|
243
|
-
command_pid = spawn(*args, :in => in_read, :out => out_write, :err => err_write)
|
|
244
|
-
in_read.close if in_stream
|
|
245
|
-
out_write.close if out_stream
|
|
246
|
-
err_write.close if err_stream
|
|
247
|
-
|
|
248
|
-
return command_pid, in_write, out_read, err_read
|
|
265
|
+
@process_manager && @cleanup_working_dirs
|
|
249
266
|
end
|
|
250
267
|
|
|
251
268
|
def ssh_options(with_pty = false)
|
|
@@ -280,42 +297,31 @@ module Proxy::RemoteExecution::Ssh::Runners
|
|
|
280
297
|
# available. The yielding doesn't happen automatically, but as
|
|
281
298
|
# part of calling the `refresh` method.
|
|
282
299
|
def run_async(command)
|
|
283
|
-
raise 'Async command already in progress' if @started
|
|
300
|
+
raise 'Async command already in progress' if @process_manager&.started?
|
|
284
301
|
|
|
285
|
-
@started = false
|
|
286
302
|
@user_method.reset
|
|
287
|
-
|
|
288
|
-
@started = true
|
|
303
|
+
initialize_command(*get_args(command, true))
|
|
289
304
|
|
|
290
|
-
|
|
305
|
+
true
|
|
291
306
|
end
|
|
292
307
|
|
|
293
308
|
def run_started?
|
|
294
|
-
@started && @user_method.sent_all_data?
|
|
309
|
+
@process_manager&.started? && @user_method.sent_all_data?
|
|
295
310
|
end
|
|
296
311
|
|
|
297
|
-
def
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
stdout += out_io.read until out_io.eof? rescue
|
|
303
|
-
out_io.close
|
|
312
|
+
def run_sync(command, stdin: nil, publish: false, close_stdin: true, tty: false)
|
|
313
|
+
pm = Proxy::Dynflow::ProcessManager.new(get_args(command, tty))
|
|
314
|
+
if publish
|
|
315
|
+
pm.on_stdout { |data| publish_data(data, 'stdout', pm); '' }
|
|
316
|
+
pm.on_stderr { |data| publish_data(data, 'stderr', pm); '' }
|
|
304
317
|
end
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
def run_sync(command, stdin = nil)
|
|
313
|
-
pid, tx, rx, err = session(get_args(command))
|
|
314
|
-
tx.puts(stdin) unless stdin.nil?
|
|
315
|
-
tx.close
|
|
316
|
-
stdout, stderr = read_output_debug(err, rx)
|
|
317
|
-
exit_status = Process.wait2(pid)[1].exitstatus
|
|
318
|
-
return exit_status, stdout, stderr
|
|
318
|
+
pm.start!
|
|
319
|
+
unless pm.status
|
|
320
|
+
pm.stdin.io.puts(stdin) if stdin
|
|
321
|
+
pm.stdin.io.close if close_stdin
|
|
322
|
+
pm.run!
|
|
323
|
+
end
|
|
324
|
+
pm
|
|
319
325
|
end
|
|
320
326
|
|
|
321
327
|
def prepare_known_hosts
|
|
@@ -362,15 +368,14 @@ module Proxy::RemoteExecution::Ssh::Runners
|
|
|
362
368
|
# We use tee here to pipe stdin coming from ssh to a file at $path, while silencing its output
|
|
363
369
|
# This is used to write to $path with elevated permissions, solutions using cat and output redirection
|
|
364
370
|
# would not work, because the redirection would happen in the non-elevated shell.
|
|
365
|
-
command = "tee
|
|
371
|
+
command = "tee #{path} >/dev/null && chmod #{permissions} #{path}"
|
|
366
372
|
|
|
367
373
|
@logger.debug("Sending data to #{path} on remote host:\n#{data}")
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
|
|
373
|
-
end
|
|
374
|
+
ensure_remote_command(command,
|
|
375
|
+
publish: true,
|
|
376
|
+
stdin: data,
|
|
377
|
+
error: "Unable to upload file to #{path} on remote system, exit code: %{exit_code}"
|
|
378
|
+
)
|
|
374
379
|
|
|
375
380
|
path
|
|
376
381
|
end
|
|
@@ -382,9 +387,16 @@ module Proxy::RemoteExecution::Ssh::Runners
|
|
|
382
387
|
end
|
|
383
388
|
|
|
384
389
|
def ensure_remote_directory(path)
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
390
|
+
ensure_remote_command("mkdir -p #{path}",
|
|
391
|
+
publish: true,
|
|
392
|
+
error: "Unable to create directory #{path} on remote system, exit code: %{exit_code}"
|
|
393
|
+
)
|
|
394
|
+
end
|
|
395
|
+
|
|
396
|
+
def ensure_remote_command(cmd, error: nil, **kwargs)
|
|
397
|
+
if (pm = run_sync(cmd, **kwargs)).status != 0
|
|
398
|
+
msg = error || 'Failed to run command %{command} on remote machine, exit code: %{exit_code}'
|
|
399
|
+
raise(msg % { command: cmd, exit_code: pm.status })
|
|
388
400
|
end
|
|
389
401
|
end
|
|
390
402
|
|
|
@@ -7,21 +7,8 @@ module Proxy::RemoteExecution
|
|
|
7
7
|
module Ssh
|
|
8
8
|
class << self
|
|
9
9
|
def validate!
|
|
10
|
-
unless private_key_file
|
|
11
|
-
raise "settings for `ssh_identity_key` not set"
|
|
12
|
-
end
|
|
13
|
-
|
|
14
|
-
unless File.exist?(private_key_file)
|
|
15
|
-
raise "Ssh public key file #{private_key_file} doesn't exist.\n"\
|
|
16
|
-
"You can generate one with `ssh-keygen -t rsa -b 4096 -f #{private_key_file} -N ''`"
|
|
17
|
-
end
|
|
18
|
-
|
|
19
|
-
unless File.exist?(public_key_file)
|
|
20
|
-
raise "Ssh public key file #{public_key_file} doesn't exist"
|
|
21
|
-
end
|
|
22
|
-
|
|
23
10
|
validate_mode!
|
|
24
|
-
|
|
11
|
+
validate_ssh_settings!
|
|
25
12
|
validate_mqtt_settings!
|
|
26
13
|
end
|
|
27
14
|
|
|
@@ -60,6 +47,28 @@ module Proxy::RemoteExecution
|
|
|
60
47
|
|
|
61
48
|
raise 'mqtt_broker has to be set when pull-mqtt mode is used' if Plugin.settings.mqtt_broker.nil?
|
|
62
49
|
raise 'mqtt_port has to be set when pull-mqtt mode is used' if Plugin.settings.mqtt_port.nil?
|
|
50
|
+
|
|
51
|
+
if Plugin.settings.mqtt_tls.nil?
|
|
52
|
+
Plugin.settings.mqtt_tls = [:foreman_ssl_cert, :foreman_ssl_key, :foreman_ssl_ca].all? { |key| ::Proxy::SETTINGS[key] }
|
|
53
|
+
end
|
|
54
|
+
end
|
|
55
|
+
|
|
56
|
+
def validate_ssh_settings!
|
|
57
|
+
return unless requires_configured_ssh?
|
|
58
|
+
unless private_key_file
|
|
59
|
+
raise "settings for `ssh_identity_key` not set"
|
|
60
|
+
end
|
|
61
|
+
|
|
62
|
+
unless File.exist?(private_key_file)
|
|
63
|
+
raise "SSH public key file #{private_key_file} doesn't exist.\n"\
|
|
64
|
+
"You can generate one with `ssh-keygen -t rsa -b 4096 -f #{private_key_file} -N ''`"
|
|
65
|
+
end
|
|
66
|
+
|
|
67
|
+
unless File.exist?(public_key_file)
|
|
68
|
+
raise "SSH public key file #{public_key_file} doesn't exist"
|
|
69
|
+
end
|
|
70
|
+
|
|
71
|
+
validate_ssh_log_level!
|
|
63
72
|
end
|
|
64
73
|
|
|
65
74
|
def validate_ssh_log_level!
|
|
@@ -83,6 +92,10 @@ module Proxy::RemoteExecution
|
|
|
83
92
|
Plugin.settings.ssh_log_level = Plugin.settings.ssh_log_level.to_sym
|
|
84
93
|
end
|
|
85
94
|
|
|
95
|
+
def requires_configured_ssh?
|
|
96
|
+
%i[ssh ssh-async].include?(Plugin.settings.mode) || Plugin.settings.cockpit_integration
|
|
97
|
+
end
|
|
98
|
+
|
|
86
99
|
def job_storage
|
|
87
100
|
@job_storage ||= Proxy::RemoteExecution::Ssh::JobStorage.new
|
|
88
101
|
end
|
|
@@ -5,6 +5,8 @@
|
|
|
5
5
|
:remote_working_dir: '/var/tmp'
|
|
6
6
|
# :kerberos_auth: false
|
|
7
7
|
|
|
8
|
+
# :cockpit_integration: true
|
|
9
|
+
|
|
8
10
|
# Mode of operation, one of ssh, ssh-async, pull, pull-mqtt
|
|
9
11
|
:mode: ssh
|
|
10
12
|
|
|
@@ -24,3 +26,8 @@
|
|
|
24
26
|
# MQTT configuration, need to be set if mode is set to pull-mqtt
|
|
25
27
|
# :mqtt_broker: localhost
|
|
26
28
|
# :mqtt_port: 1883
|
|
29
|
+
|
|
30
|
+
# Use of SSL can be forced either way by explicitly setting mqtt_tls setting. If
|
|
31
|
+
# unset, SSL gets used if smart-proxy's foreman_ssl_cert, foreman_ssl_key and
|
|
32
|
+
# foreman_ssl_ca settings are set available.
|
|
33
|
+
# :mqtt_tls:
|
metadata
CHANGED
|
@@ -1,14 +1,14 @@
|
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
|
2
2
|
name: smart_proxy_remote_execution_ssh
|
|
3
3
|
version: !ruby/object:Gem::Version
|
|
4
|
-
version: 0.
|
|
4
|
+
version: 0.6.0
|
|
5
5
|
platform: ruby
|
|
6
6
|
authors:
|
|
7
7
|
- Ivan Nečas
|
|
8
8
|
autorequire:
|
|
9
9
|
bindir: bin
|
|
10
10
|
cert_chain: []
|
|
11
|
-
date: 2022-04-
|
|
11
|
+
date: 2022-04-19 00:00:00.000000000 Z
|
|
12
12
|
dependencies:
|
|
13
13
|
- !ruby/object:Gem::Dependency
|
|
14
14
|
name: bundler
|
|
@@ -114,14 +114,14 @@ dependencies:
|
|
|
114
114
|
requirements:
|
|
115
115
|
- - "~>"
|
|
116
116
|
- !ruby/object:Gem::Version
|
|
117
|
-
version: '0.
|
|
117
|
+
version: '0.8'
|
|
118
118
|
type: :runtime
|
|
119
119
|
prerelease: false
|
|
120
120
|
version_requirements: !ruby/object:Gem::Requirement
|
|
121
121
|
requirements:
|
|
122
122
|
- - "~>"
|
|
123
123
|
- !ruby/object:Gem::Version
|
|
124
|
-
version: '0.
|
|
124
|
+
version: '0.8'
|
|
125
125
|
- !ruby/object:Gem::Dependency
|
|
126
126
|
name: net-ssh
|
|
127
127
|
requirement: !ruby/object:Gem::Requirement
|