smart_proxy_remote_execution_ssh 0.5.1 → 0.6.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 2515f08b67e5b567e50182d278d73d2db8466e7a12cb6e8000ad8e4f8ce6bf65
4
- data.tar.gz: 686ba55bf862fc81f3154ebadbaafa3702416109e81641bdcb2442873c5a451b
3
+ metadata.gz: a3e83401d99929917142c15969fce19bf17e6fffc82000da554c457567016f8d
4
+ data.tar.gz: 9dbdec23d2203557204fb60f45cadc884d640d49b436eb291ad0cc4769182e2c
5
5
  SHA512:
6
- metadata.gz: 9f9e84c78cab2997b711ee559440ad9323c6c3605726f6360a94415ccda3f7a08db0450fba8fe5479e06b649a8e6e8e545be72f35751762c50bf8ef5bc32696e
7
- data.tar.gz: 0705716e01985cd8b5a3aa2ffd4104f6d4ce0938dc8a0e9b105f65f149d62c1c030916250cf0f1c35ed51bfaceb36964d0b959e5e13f42c70fb0a42389a01a2e
6
+ metadata.gz: 3cc070d29a185dc80cee33c3b69bf06772c226721ed9f42fba167a702a917ac04b50729ee3354d430b2a7c840581e98b75236cb4298e77aeee749fd9af6f56f2
7
+ data.tar.gz: a040bf6621ea2e35b421bc9098a5ec2569bc24e51ea6ae5346eec8c9e05d91e2560b96843d553589f8bbab10f009725947e76b306c5bb785c16ae0f3006baed8
@@ -56,37 +56,52 @@ module Proxy::RemoteExecution::Ssh::Actions
56
56
  # Client was notified or is already running, dealing with this situation
57
57
  # is only supported if mqtt is available
58
58
  # Otherwise we have to wait it out
59
- # TODO
60
- # if input[:with_mqtt]
59
+ mqtt_cancel if input[:with_mqtt]
61
60
  end
62
61
  suspend
63
62
  end
64
63
 
65
64
  def mqtt_start(otp_password)
66
- payload = {
67
- type: 'data',
68
- message_id: SecureRandom.uuid,
69
- version: 1,
70
- sent: DateTime.now.iso8601,
71
- directive: 'foreman',
65
+ payload = mqtt_payload_base.merge(
66
+ content: "#{input[:proxy_url]}/ssh/jobs/#{input[:job_uuid]}",
72
67
  metadata: {
68
+ 'event': 'start',
73
69
  'job_uuid': input[:job_uuid],
74
70
  'username': execution_plan_id,
75
71
  'password': otp_password,
76
72
  'return_url': "#{input[:proxy_url]}/ssh/jobs/#{input[:job_uuid]}/update",
77
73
  },
78
- content: "#{input[:proxy_url]}/ssh/jobs/#{input[:job_uuid]}",
79
- }
74
+ )
80
75
  mqtt_notify payload
81
76
  output[:state] = :notified
82
77
  end
83
78
 
79
+ def mqtt_cancel
80
+ cleanup
81
+ payload = mqtt_payload_base.merge(
82
+ metadata: {
83
+ 'event': 'cancel',
84
+ 'job_uuid': input[:job_uuid]
85
+ }
86
+ )
87
+ mqtt_notify payload
88
+ end
89
+
84
90
  def mqtt_notify(payload)
85
- MQTT::Client.connect(settings.mqtt_broker, settings.mqtt_port) do |c|
91
+ with_mqtt_client do |c|
86
92
  c.publish(mqtt_topic, JSON.dump(payload), false, 1)
87
93
  end
88
94
  end
89
95
 
96
+ def with_mqtt_client(&block)
97
+ MQTT::Client.connect(settings.mqtt_broker, settings.mqtt_port,
98
+ :ssl => settings.mqtt_tls,
99
+ :cert_file => ::Proxy::SETTINGS.foreman_ssl_cert,
100
+ :key_file => ::Proxy::SETTINGS.foreman_ssl_key,
101
+ :ca_file => ::Proxy::SETTINGS.foreman_ssl_ca,
102
+ &block)
103
+ end
104
+
90
105
  def host_name
91
106
  alternative_names = input.fetch(:alternative_names, {})
92
107
 
@@ -106,5 +121,15 @@ module Proxy::RemoteExecution::Ssh::Actions
106
121
  def job_storage
107
122
  Proxy::RemoteExecution::Ssh.job_storage
108
123
  end
124
+
125
+ def mqtt_payload_base
126
+ {
127
+ type: 'data',
128
+ message_id: SecureRandom.uuid,
129
+ version: 1,
130
+ sent: DateTime.now.iso8601,
131
+ directive: 'foreman'
132
+ }
133
+ end
109
134
  end
110
135
  end
@@ -13,14 +13,16 @@ module Proxy::RemoteExecution
13
13
  File.read(Ssh.public_key_file)
14
14
  end
15
15
 
16
- post "/session" do
17
- do_authorize_any
18
- session = Cockpit::Session.new(env)
19
- unless session.valid?
20
- return [ 400, "Invalid request: /ssh/session requires connection upgrade to 'raw'" ]
16
+ if Proxy::RemoteExecution::Ssh::Plugin.settings.cockpit_integration
17
+ post "/session" do
18
+ do_authorize_any
19
+ session = Cockpit::Session.new(env)
20
+ unless session.valid?
21
+ return [ 400, "Invalid request: /ssh/session requires connection upgrade to 'raw'" ]
22
+ end
23
+ session.hijack!
24
+ 101
21
25
  end
22
- session.hijack!
23
- 101
24
26
  end
25
27
 
26
28
  delete '/known_hosts/:name' do |name|
@@ -12,15 +12,19 @@ module Proxy::RemoteExecution::Ssh
12
12
  :remote_working_dir => '/var/tmp',
13
13
  :local_working_dir => '/var/tmp',
14
14
  :kerberos_auth => false,
15
+ :cockpit_integration => true,
15
16
  # When set to nil, makes REX use the runner's default interval
16
17
  # :runner_refresh_interval => nil,
17
- :ssh_log_level => :fatal,
18
+ :ssh_log_level => :error,
18
19
  :cleanup_working_dirs => true,
19
20
  # :mqtt_broker => nil,
20
21
  # :mqtt_port => nil,
22
+ # :mqtt_tls => nil,
21
23
  :mode => :ssh
22
24
 
23
- plugin :ssh, Proxy::RemoteExecution::Ssh::VERSION
25
+ capability(proc { 'cockpit' if settings.cockpit_integration })
26
+
27
+ plugin :script, Proxy::RemoteExecution::Ssh::VERSION
24
28
  after_activation do
25
29
  require 'smart_proxy_dynflow'
26
30
  require 'smart_proxy_remote_execution_ssh/version'
@@ -36,12 +36,12 @@ module Proxy::RemoteExecution::Ssh::Runners
36
36
  def initialization_script
37
37
  close_stdin = '</dev/null'
38
38
  close_fds = close_stdin + ' >/dev/null 2>/dev/null'
39
- main_script = "(#{@remote_script} #{close_stdin} 2>&1; echo $?>#{@base_dir}/init_exit_code) >#{@base_dir}/output"
39
+ main_script = "(#{@remote_script_wrapper} #{@remote_script} #{close_stdin} 2>&1; echo $?>#{@base_dir}/init_exit_code) >#{@base_dir}/output"
40
40
  control_script_finish = "#{@control_script_path} init-script-finish"
41
41
  <<-SCRIPT.gsub(/^ +\| /, '')
42
42
  | export CONTROL_SCRIPT="#{@control_script_path}"
43
- | sh -c '#{main_script}; #{control_script_finish}' #{close_fds} &
44
- | echo $! > '#{@base_dir}/pid'
43
+ | #{"chown #{@user_method.effective_user} #{@base_dir}" if @user_method.cli_command_prefix}
44
+ | #{@user_method.cli_command_prefix} sh -c '#{main_script}; #{control_script_finish}' #{close_fds} &
45
45
  SCRIPT
46
46
  end
47
47
 
@@ -50,18 +50,23 @@ module Proxy::RemoteExecution::Ssh::Runners
50
50
  end
51
51
 
52
52
  def refresh
53
- err = output = nil
54
53
  begin
55
- _, output, err = run_sync("#{@user_method.cli_command_prefix} #{@retrieval_script}")
54
+ pm = run_sync("#{@user_method.cli_command_prefix} #{@retrieval_script}")
56
55
  rescue StandardError => e
57
56
  @logger.info("Error while connecting to the remote host on refresh: #{e.message}")
58
57
  end
59
58
 
60
- process_retrieved_data(output, err)
59
+ process_retrieved_data(pm.stdout.to_s.chomp, pm.stderr.to_s.chomp)
61
60
  ensure
62
61
  destroy_session
63
62
  end
64
63
 
64
+ def kill
65
+ run_sync("pkill -P $(cat #{@pid_path})")
66
+ rescue StandardError => e
67
+ publish_exception('Unexpected error', e, false)
68
+ end
69
+
65
70
  def process_retrieved_data(output, err)
66
71
  return if output.nil? || output.empty?
67
72
 
@@ -126,7 +131,11 @@ module Proxy::RemoteExecution::Ssh::Runners
126
131
  end
127
132
 
128
133
  def cleanup
129
- run_sync("rm -rf \"#{remote_command_dir}\"") if @cleanup_working_dirs
134
+ if @cleanup_working_dirs
135
+ ensure_remote_command("rm -rf #{remote_command_dir}",
136
+ publish: true,
137
+ error: "Unable to remove working directory #{remote_command_dir} on remote system, exit code: %{exit_code}")
138
+ end
130
139
  end
131
140
 
132
141
  def destroy_session
@@ -1,5 +1,6 @@
1
1
  require 'fileutils'
2
- require 'smart_proxy_dynflow/runner/command'
2
+ require 'smart_proxy_dynflow/runner/process_manager_command'
3
+ require 'smart_proxy_dynflow/process_manager'
3
4
 
4
5
  module Proxy::RemoteExecution::Ssh::Runners
5
6
  class EffectiveUserMethod
@@ -12,9 +13,9 @@ module Proxy::RemoteExecution::Ssh::Runners
12
13
  @password_sent = false
13
14
  end
14
15
 
15
- def on_data(received_data, ssh_channel)
16
+ def on_data(received_data, io_buffer)
16
17
  if received_data.match(login_prompt)
17
- ssh_channel.puts(effective_user_password)
18
+ io_buffer.add_data(effective_user_password + "\n")
18
19
  @password_sent = true
19
20
  end
20
21
  end
@@ -90,7 +91,7 @@ module Proxy::RemoteExecution::Ssh::Runners
90
91
 
91
92
  # rubocop:disable Metrics/ClassLength
92
93
  class ScriptRunner < Proxy::Dynflow::Runner::Base
93
- include Proxy::Dynflow::Runner::Command
94
+ include Proxy::Dynflow::Runner::ProcessManagerCommand
94
95
  attr_reader :execution_timeout_interval
95
96
 
96
97
  EXPECTED_POWER_ACTION_MESSAGES = ['restart host', 'shutdown host'].freeze
@@ -110,7 +111,7 @@ module Proxy::RemoteExecution::Ssh::Runners
110
111
 
111
112
  @client_private_key_file = settings.ssh_identity_key_file
112
113
  @local_working_dir = options.fetch(:local_working_dir, settings.local_working_dir)
113
- @remote_working_dir = options.fetch(:remote_working_dir, settings.remote_working_dir)
114
+ @remote_working_dir = options.fetch(:remote_working_dir, settings.remote_working_dir.shellescape)
114
115
  @cleanup_working_dirs = options.fetch(:cleanup_working_dirs, settings.cleanup_working_dirs)
115
116
  @first_execution = options.fetch(:first_execution, false)
116
117
  @user_method = user_method
@@ -141,6 +142,8 @@ module Proxy::RemoteExecution::Ssh::Runners
141
142
 
142
143
  def start
143
144
  Proxy::RemoteExecution::Utils.prune_known_hosts!(@host, @ssh_port, logger) if @first_execution
145
+ establish_connection
146
+ preflight_checks
144
147
  prepare_start
145
148
  script = initialization_script
146
149
  logger.debug("executing script:\n#{indent_multiline(script)}")
@@ -154,32 +157,63 @@ module Proxy::RemoteExecution::Ssh::Runners
154
157
  run_async(*args)
155
158
  end
156
159
 
160
+ def preflight_checks
161
+ ensure_remote_command(cp_script_to_remote("#!/bin/sh\nexec true", 'test'),
162
+ publish: true,
163
+ error: 'Failed to execute script on remote machine, exit code: %{exit_code}.'
164
+ )
165
+ unless @user_method.is_a? NoopUserMethod
166
+ path = cp_script_to_remote("#!/bin/sh\nexec #{@user_method.cli_command_prefix} true", 'effective-user-test')
167
+ ensure_remote_command(path,
168
+ error: 'Failed to change to effective user, exit code: %{exit_code}',
169
+ publish: true,
170
+ tty: true,
171
+ close_stdin: false)
172
+ end
173
+ end
174
+
175
+ def establish_connection
176
+ # run_sync ['-f', '-N'] would be cleaner, but ssh does not close its
177
+ # stderr which trips up the process manager which expects all FDs to be
178
+ # closed
179
+ ensure_remote_command(
180
+ 'true',
181
+ publish: true,
182
+ error: 'Failed to establish connection to remote host, exit code: %{exit_code}'
183
+ )
184
+ end
185
+
157
186
  def prepare_start
158
187
  @remote_script = cp_script_to_remote
159
188
  @output_path = File.join(File.dirname(@remote_script), 'output')
160
189
  @exit_code_path = File.join(File.dirname(@remote_script), 'exit_code')
190
+ @pid_path = File.join(File.dirname(@remote_script), 'pid')
191
+ @remote_script_wrapper = upload_data("echo $$ > #{@pid_path}; exec \"$@\";", File.join(File.dirname(@remote_script), 'script-wrapper'), 555)
161
192
  end
162
193
 
163
194
  # the script that initiates the execution
164
195
  def initialization_script
165
196
  su_method = @user_method.instance_of?(SuUserMethod)
166
197
  # pipe the output to tee while capturing the exit code in a file
167
- <<-SCRIPT.gsub(/^\s+\| /, '')
168
- | sh -c "(#{@user_method.cli_command_prefix}#{su_method ? "'#{@remote_script} < /dev/null '" : "#{@remote_script} < /dev/null"}; echo \\$?>#{@exit_code_path}) | /usr/bin/tee #{@output_path}
169
- | exit \\$(cat #{@exit_code_path})"
198
+ <<~SCRIPT
199
+ sh <<EOF | /usr/bin/tee #{@output_path}
200
+ #{@remote_script_wrapper} #{@user_method.cli_command_prefix}#{su_method ? "'#{@remote_script} < /dev/null '" : "#{@remote_script} < /dev/null"}
201
+ echo \\$?>#{@exit_code_path}
202
+ EOF
203
+ exit $(cat #{@exit_code_path})
170
204
  SCRIPT
171
205
  end
172
206
 
173
207
  def refresh
174
- return if @session.nil?
208
+ return if @process_manager.nil?
175
209
  super
176
210
  ensure
177
211
  check_expecting_disconnect
178
212
  end
179
213
 
180
214
  def kill
181
- if @session
182
- run_sync("pkill -f #{remote_command_file('script')}")
215
+ if @process_manager&.started?
216
+ run_sync("pkill -P $(cat #{@pid_path})")
183
217
  else
184
218
  logger.debug('connection closed')
185
219
  end
@@ -197,26 +231,28 @@ module Proxy::RemoteExecution::Ssh::Runners
197
231
  end
198
232
 
199
233
  def close_session
200
- @session = nil
201
234
  raise 'Control socket file does not exist' unless File.exist?(local_command_file("socket"))
202
235
  @logger.debug("Sending exit request for session #{@ssh_user}@#{@host}")
203
- args = ['/usr/bin/ssh', @host, "-o", "User=#{@ssh_user}", "-o", "ControlPath=#{local_command_file("socket")}", "-O", "exit"].flatten
204
- *, err = session(args, in_stream: false, out_stream: false)
205
- read_output_debug(err)
236
+ args = ['/usr/bin/ssh', @host, "-o", "ControlPath=#{local_command_file("socket")}", "-O", "exit"].flatten
237
+ pm = Proxy::Dynflow::ProcessManager.new(args)
238
+ pm.on_stdout { |data| @logger.debug "[close_session]: #{data.chomp}"; data }
239
+ pm.on_stderr { |data| @logger.debug "[close_session]: #{data.chomp}"; data }
240
+ pm.run!
206
241
  end
207
242
 
208
243
  def close
209
- run_sync("rm -rf \"#{remote_command_dir}\"") if should_cleanup?
244
+ run_sync("rm -rf #{remote_command_dir}") if should_cleanup?
210
245
  rescue StandardError => e
211
246
  publish_exception('Error when removing remote working dir', e, false)
212
247
  ensure
213
- close_session if @session
248
+ close_session if @process_manager
214
249
  FileUtils.rm_rf(local_command_dir) if Dir.exist?(local_command_dir) && @cleanup_working_dirs
215
250
  end
216
251
 
217
- def publish_data(data, type)
252
+ def publish_data(data, type, pm = nil)
253
+ pm ||= @process_manager
218
254
  super(data.force_encoding('UTF-8'), type) unless @user_method.filter_password?(data)
219
- @user_method.on_data(data, @command_in)
255
+ @user_method.on_data(data, pm.stdin) if pm
220
256
  end
221
257
 
222
258
  private
@@ -226,24 +262,7 @@ module Proxy::RemoteExecution::Ssh::Runners
226
262
  end
227
263
 
228
264
  def should_cleanup?
229
- @session && @cleanup_working_dirs
230
- end
231
-
232
- # Creates session with three pipes - one for reading and two for
233
- # writing. Similar to `Open3.popen3` method but without creating
234
- # a separate thread to monitor it.
235
- def session(args, in_stream: true, out_stream: true, err_stream: true)
236
- @session = true
237
-
238
- in_read, in_write = in_stream ? IO.pipe : '/dev/null'
239
- out_read, out_write = out_stream ? IO.pipe : [nil, '/dev/null']
240
- err_read, err_write = err_stream ? IO.pipe : [nil, '/dev/null']
241
- command_pid = spawn(*args, :in => in_read, :out => out_write, :err => err_write)
242
- in_read.close if in_stream
243
- out_write.close if out_stream
244
- err_write.close if err_stream
245
-
246
- return command_pid, in_write, out_read, err_read
265
+ @process_manager && @cleanup_working_dirs
247
266
  end
248
267
 
249
268
  def ssh_options(with_pty = false)
@@ -278,42 +297,31 @@ module Proxy::RemoteExecution::Ssh::Runners
278
297
  # available. The yielding doesn't happen automatically, but as
279
298
  # part of calling the `refresh` method.
280
299
  def run_async(command)
281
- raise 'Async command already in progress' if @started
300
+ raise 'Async command already in progress' if @process_manager&.started?
282
301
 
283
- @started = false
284
302
  @user_method.reset
285
- @command_pid, @command_in, @command_out = session(get_args(command, with_pty: true), err_stream: false)
286
- @started = true
303
+ initialize_command(*get_args(command, true))
287
304
 
288
- return true
305
+ true
289
306
  end
290
307
 
291
308
  def run_started?
292
- @started && @user_method.sent_all_data?
309
+ @process_manager&.started? && @user_method.sent_all_data?
293
310
  end
294
311
 
295
- def read_output_debug(err_io, out_io = nil)
296
- stdout = ''
297
- debug_str = ''
298
-
299
- if out_io
300
- stdout += out_io.read until out_io.eof? rescue
301
- out_io.close
312
+ def run_sync(command, stdin: nil, publish: false, close_stdin: true, tty: false)
313
+ pm = Proxy::Dynflow::ProcessManager.new(get_args(command, tty))
314
+ if publish
315
+ pm.on_stdout { |data| publish_data(data, 'stdout', pm); '' }
316
+ pm.on_stderr { |data| publish_data(data, 'stderr', pm); '' }
302
317
  end
303
- debug_str += err_io.read until err_io.eof? rescue
304
- err_io.close
305
- debug_str.lines.each { |line| @logger.debug(line.strip) }
306
-
307
- return stdout, debug_str
308
- end
309
-
310
- def run_sync(command, stdin = nil)
311
- pid, tx, rx, err = session(get_args(command))
312
- tx.puts(stdin) unless stdin.nil?
313
- tx.close
314
- stdout, stderr = read_output_debug(err, rx)
315
- exit_status = Process.wait2(pid)[1].exitstatus
316
- return exit_status, stdout, stderr
318
+ pm.start!
319
+ unless pm.status
320
+ pm.stdin.io.puts(stdin) if stdin
321
+ pm.stdin.io.close if close_stdin
322
+ pm.run!
323
+ end
324
+ pm
317
325
  end
318
326
 
319
327
  def prepare_known_hosts
@@ -360,15 +368,14 @@ module Proxy::RemoteExecution::Ssh::Runners
360
368
  # We use tee here to pipe stdin coming from ssh to a file at $path, while silencing its output
361
369
  # This is used to write to $path with elevated permissions, solutions using cat and output redirection
362
370
  # would not work, because the redirection would happen in the non-elevated shell.
363
- command = "tee '#{path}' >/dev/null && chmod '#{permissions}' '#{path}'"
371
+ command = "tee #{path} >/dev/null && chmod #{permissions} #{path}"
364
372
 
365
373
  @logger.debug("Sending data to #{path} on remote host:\n#{data}")
366
- status, _out, err = run_sync(command, data)
367
-
368
- @logger.warn("Output on stderr while uploading #{path}:\n#{err}") unless err.empty?
369
- if status != 0
370
- raise "Unable to upload file to #{path} on remote system: exit code: #{status}"
371
- end
374
+ ensure_remote_command(command,
375
+ publish: true,
376
+ stdin: data,
377
+ error: "Unable to upload file to #{path} on remote system, exit code: %{exit_code}"
378
+ )
372
379
 
373
380
  path
374
381
  end
@@ -380,9 +387,16 @@ module Proxy::RemoteExecution::Ssh::Runners
380
387
  end
381
388
 
382
389
  def ensure_remote_directory(path)
383
- exit_code, _output, err = run_sync("mkdir -p #{path}")
384
- if exit_code != 0
385
- raise "Unable to create directory on remote system #{path}: exit code: #{exit_code}\n #{err}"
390
+ ensure_remote_command("mkdir -p #{path}",
391
+ publish: true,
392
+ error: "Unable to create directory #{path} on remote system, exit code: %{exit_code}"
393
+ )
394
+ end
395
+
396
+ def ensure_remote_command(cmd, error: nil, **kwargs)
397
+ if (pm = run_sync(cmd, **kwargs)).status != 0
398
+ msg = error || 'Failed to run command %{command} on remote machine, exit code: %{exit_code}'
399
+ raise(msg % { command: cmd, exit_code: pm.status })
386
400
  end
387
401
  end
388
402
 
@@ -1,7 +1,7 @@
1
1
  module Proxy
2
2
  module RemoteExecution
3
3
  module Ssh
4
- VERSION = '0.5.1'
4
+ VERSION = '0.6.0'
5
5
  end
6
6
  end
7
7
  end
@@ -7,21 +7,8 @@ module Proxy::RemoteExecution
7
7
  module Ssh
8
8
  class << self
9
9
  def validate!
10
- unless private_key_file
11
- raise "settings for `ssh_identity_key` not set"
12
- end
13
-
14
- unless File.exist?(private_key_file)
15
- raise "Ssh public key file #{private_key_file} doesn't exist.\n"\
16
- "You can generate one with `ssh-keygen -t rsa -b 4096 -f #{private_key_file} -N ''`"
17
- end
18
-
19
- unless File.exist?(public_key_file)
20
- raise "Ssh public key file #{public_key_file} doesn't exist"
21
- end
22
-
23
10
  validate_mode!
24
- validate_ssh_log_level!
11
+ validate_ssh_settings!
25
12
  validate_mqtt_settings!
26
13
  end
27
14
 
@@ -47,7 +34,7 @@ module Proxy::RemoteExecution
47
34
  when :ssh
48
35
  Plugin.logger.warn('Deprecated option async_ssh used together with ssh mode, switching mode to ssh-async.')
49
36
  Plugin.settings.mode = :'ssh-async'
50
- when :'async-ssh'
37
+ when :'ssh-async'
51
38
  # This is a noop
52
39
  else
53
40
  Plugin.logger.warn('Deprecated option async_ssh used together with incompatible mode, ignoring.')
@@ -60,6 +47,28 @@ module Proxy::RemoteExecution
60
47
 
61
48
  raise 'mqtt_broker has to be set when pull-mqtt mode is used' if Plugin.settings.mqtt_broker.nil?
62
49
  raise 'mqtt_port has to be set when pull-mqtt mode is used' if Plugin.settings.mqtt_port.nil?
50
+
51
+ if Plugin.settings.mqtt_tls.nil?
52
+ Plugin.settings.mqtt_tls = [:foreman_ssl_cert, :foreman_ssl_key, :foreman_ssl_ca].all? { |key| ::Proxy::SETTINGS[key] }
53
+ end
54
+ end
55
+
56
+ def validate_ssh_settings!
57
+ return unless requires_configured_ssh?
58
+ unless private_key_file
59
+ raise "settings for `ssh_identity_key` not set"
60
+ end
61
+
62
+ unless File.exist?(private_key_file)
63
+ raise "SSH public key file #{private_key_file} doesn't exist.\n"\
64
+ "You can generate one with `ssh-keygen -t rsa -b 4096 -f #{private_key_file} -N ''`"
65
+ end
66
+
67
+ unless File.exist?(public_key_file)
68
+ raise "SSH public key file #{public_key_file} doesn't exist"
69
+ end
70
+
71
+ validate_ssh_log_level!
63
72
  end
64
73
 
65
74
  def validate_ssh_log_level!
@@ -83,6 +92,10 @@ module Proxy::RemoteExecution
83
92
  Plugin.settings.ssh_log_level = Plugin.settings.ssh_log_level.to_sym
84
93
  end
85
94
 
95
+ def requires_configured_ssh?
96
+ %i[ssh ssh-async].include?(Plugin.settings.mode) || Plugin.settings.cockpit_integration
97
+ end
98
+
86
99
  def job_storage
87
100
  @job_storage ||= Proxy::RemoteExecution::Ssh::JobStorage.new
88
101
  end
@@ -5,6 +5,8 @@
5
5
  :remote_working_dir: '/var/tmp'
6
6
  # :kerberos_auth: false
7
7
 
8
+ # :cockpit_integration: true
9
+
8
10
  # Mode of operation, one of ssh, ssh-async, pull, pull-mqtt
9
11
  :mode: ssh
10
12
 
@@ -16,7 +18,7 @@
16
18
  # Defines the verbosity of logging coming from ssh command
17
19
  # one of :debug, :info, :error, :fatal
18
20
  # must be lower than general log level
19
- # :ssh_log_level: fatal
21
+ # :ssh_log_level: error
20
22
 
21
23
  # Remove working directories on job completion
22
24
  # :cleanup_working_dirs: true
@@ -24,3 +26,8 @@
24
26
  # MQTT configuration, need to be set if mode is set to pull-mqtt
25
27
  # :mqtt_broker: localhost
26
28
  # :mqtt_port: 1883
29
+
30
+ # Use of SSL can be forced either way by explicitly setting mqtt_tls setting. If
31
+ # unset, SSL gets used if smart-proxy's foreman_ssl_cert, foreman_ssl_key and
32
+ # foreman_ssl_ca settings are set available.
33
+ # :mqtt_tls:
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: smart_proxy_remote_execution_ssh
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.5.1
4
+ version: 0.6.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Ivan Nečas
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2022-01-04 00:00:00.000000000 Z
11
+ date: 2022-04-19 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: bundler
@@ -114,14 +114,14 @@ dependencies:
114
114
  requirements:
115
115
  - - "~>"
116
116
  - !ruby/object:Gem::Version
117
- version: '0.5'
117
+ version: '0.8'
118
118
  type: :runtime
119
119
  prerelease: false
120
120
  version_requirements: !ruby/object:Gem::Requirement
121
121
  requirements:
122
122
  - - "~>"
123
123
  - !ruby/object:Gem::Version
124
- version: '0.5'
124
+ version: '0.8'
125
125
  - !ruby/object:Gem::Dependency
126
126
  name: net-ssh
127
127
  requirement: !ruby/object:Gem::Requirement
@@ -203,7 +203,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
203
203
  - !ruby/object:Gem::Version
204
204
  version: '0'
205
205
  requirements: []
206
- rubygems_version: 3.1.2
206
+ rubygems_version: 3.1.4
207
207
  signing_key:
208
208
  specification_version: 4
209
209
  summary: Ssh remote execution provider for Foreman Smart-Proxy