smart_proxy_remote_execution_ssh 0.4.1 → 0.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/lib/smart_proxy_remote_execution_ssh/actions/pull_script.rb +110 -0
- data/lib/smart_proxy_remote_execution_ssh/actions/run_script.rb +14 -1
- data/lib/smart_proxy_remote_execution_ssh/actions.rb +6 -0
- data/lib/smart_proxy_remote_execution_ssh/api.rb +49 -0
- data/lib/smart_proxy_remote_execution_ssh/cockpit.rb +82 -71
- data/lib/smart_proxy_remote_execution_ssh/job_storage.rb +51 -0
- data/lib/smart_proxy_remote_execution_ssh/net_ssh_compat.rb +228 -0
- data/lib/smart_proxy_remote_execution_ssh/plugin.rb +10 -5
- data/lib/smart_proxy_remote_execution_ssh/runners/polling_script_runner.rb +1 -2
- data/lib/smart_proxy_remote_execution_ssh/runners/script_runner.rb +83 -132
- data/lib/smart_proxy_remote_execution_ssh/utils.rb +24 -0
- data/lib/smart_proxy_remote_execution_ssh/version.rb +1 -1
- data/lib/smart_proxy_remote_execution_ssh.rb +35 -0
- data/settings.d/remote_execution_ssh.yml.example +9 -3
- metadata +21 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: e2f91ec259ca105bcc18a5cecf4342019f4023d44cbff5695998b5e57c25e325
|
4
|
+
data.tar.gz: 51ccf299324d4adf10c07475dca2749d885c258913c8d9e7593d8b8149e6d18d
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 74313e91e51ff5d58fce0bc880131b12984a750110a2d76c88875a6b824eec40a2ba9d7336dfda34df7c11d465ded62c14d4d33ba10cf4bed1b2e0309aff9a2b
|
7
|
+
data.tar.gz: 92f85e971f10d9e45418e75d0325f3e548e65954160be9f86cb154adde6bc3a512b4e3a5429d162291704b49a50a5570fcdeb4b129f735b8f8f737938a0194d0
|
@@ -0,0 +1,110 @@
|
|
1
|
+
require 'mqtt'
|
2
|
+
require 'json'
|
3
|
+
|
4
|
+
module Proxy::RemoteExecution::Ssh::Actions
|
5
|
+
class PullScript < Proxy::Dynflow::Action::Runner
|
6
|
+
JobDelivered = Class.new
|
7
|
+
|
8
|
+
execution_plan_hooks.use :cleanup, :on => :stopped
|
9
|
+
|
10
|
+
def plan(action_input, mqtt: false)
|
11
|
+
super(action_input)
|
12
|
+
input[:with_mqtt] = mqtt
|
13
|
+
end
|
14
|
+
|
15
|
+
def run(event = nil)
|
16
|
+
if event == JobDelivered
|
17
|
+
output[:state] = :delivered
|
18
|
+
suspend
|
19
|
+
else
|
20
|
+
super
|
21
|
+
end
|
22
|
+
end
|
23
|
+
|
24
|
+
def init_run
|
25
|
+
otp_password = if input[:with_mqtt]
|
26
|
+
::Proxy::Dynflow::OtpManager.generate_otp(execution_plan_id)
|
27
|
+
end
|
28
|
+
input[:job_uuid] = job_storage.store_job(host_name, execution_plan_id, run_step_id, input[:script])
|
29
|
+
output[:state] = :ready_for_pickup
|
30
|
+
output[:result] = []
|
31
|
+
mqtt_start(otp_password) if input[:with_mqtt]
|
32
|
+
suspend
|
33
|
+
end
|
34
|
+
|
35
|
+
def cleanup(_plan = nil)
|
36
|
+
job_storage.drop_job(execution_plan_id, run_step_id)
|
37
|
+
Proxy::Dynflow::OtpManager.passwords.delete(execution_plan_id)
|
38
|
+
end
|
39
|
+
|
40
|
+
def process_external_event(event)
|
41
|
+
output[:state] = :running
|
42
|
+
data = event.data
|
43
|
+
continuous_output = Proxy::Dynflow::ContinuousOutput.new
|
44
|
+
Array(data['output']).each { |line| continuous_output.add_output(line, 'stdout') } if data.key?('output')
|
45
|
+
exit_code = data['exit_code'].to_i if data['exit_code']
|
46
|
+
process_update(Proxy::Dynflow::Runner::Update.new(continuous_output, exit_code))
|
47
|
+
end
|
48
|
+
|
49
|
+
def kill_run
|
50
|
+
case output[:state]
|
51
|
+
when :ready_for_pickup
|
52
|
+
# If the job is not running yet on the client, wipe it from storage
|
53
|
+
cleanup
|
54
|
+
# TODO: Stop the action
|
55
|
+
when :notified, :running
|
56
|
+
# Client was notified or is already running, dealing with this situation
|
57
|
+
# is only supported if mqtt is available
|
58
|
+
# Otherwise we have to wait it out
|
59
|
+
# TODO
|
60
|
+
# if input[:with_mqtt]
|
61
|
+
end
|
62
|
+
suspend
|
63
|
+
end
|
64
|
+
|
65
|
+
def mqtt_start(otp_password)
|
66
|
+
payload = {
|
67
|
+
type: 'data',
|
68
|
+
message_id: SecureRandom.uuid,
|
69
|
+
version: 1,
|
70
|
+
sent: DateTime.now.iso8601,
|
71
|
+
directive: 'foreman',
|
72
|
+
metadata: {
|
73
|
+
'job_uuid': input[:job_uuid],
|
74
|
+
'username': execution_plan_id,
|
75
|
+
'password': otp_password,
|
76
|
+
'return_url': "#{input[:proxy_url]}/ssh/jobs/#{input[:job_uuid]}/update",
|
77
|
+
},
|
78
|
+
content: "#{input[:proxy_url]}/ssh/jobs/#{input[:job_uuid]}",
|
79
|
+
}
|
80
|
+
mqtt_notify payload
|
81
|
+
output[:state] = :notified
|
82
|
+
end
|
83
|
+
|
84
|
+
def mqtt_notify(payload)
|
85
|
+
MQTT::Client.connect(settings.mqtt_broker, settings.mqtt_port) do |c|
|
86
|
+
c.publish(mqtt_topic, JSON.dump(payload), false, 1)
|
87
|
+
end
|
88
|
+
end
|
89
|
+
|
90
|
+
def host_name
|
91
|
+
alternative_names = input.fetch(:alternative_names, {})
|
92
|
+
|
93
|
+
alternative_names[:consumer_uuid] ||
|
94
|
+
alternative_names[:fqdn] ||
|
95
|
+
input[:hostname]
|
96
|
+
end
|
97
|
+
|
98
|
+
def mqtt_topic
|
99
|
+
"yggdrasil/#{host_name}/data/in"
|
100
|
+
end
|
101
|
+
|
102
|
+
def settings
|
103
|
+
Proxy::RemoteExecution::Ssh::Plugin.settings
|
104
|
+
end
|
105
|
+
|
106
|
+
def job_storage
|
107
|
+
Proxy::RemoteExecution::Ssh.job_storage
|
108
|
+
end
|
109
|
+
end
|
110
|
+
end
|
@@ -3,7 +3,20 @@ require 'smart_proxy_dynflow/action/runner'
|
|
3
3
|
|
4
4
|
module Proxy::RemoteExecution::Ssh
|
5
5
|
module Actions
|
6
|
-
class RunScript <
|
6
|
+
class RunScript < ::Dynflow::Action
|
7
|
+
def plan(*args)
|
8
|
+
mode = Proxy::RemoteExecution::Ssh::Plugin.settings.mode
|
9
|
+
case mode
|
10
|
+
when :ssh, :'ssh-async'
|
11
|
+
plan_action(ScriptRunner, *args)
|
12
|
+
when :pull, :'pull-mqtt'
|
13
|
+
plan_action(PullScript, *args,
|
14
|
+
mqtt: mode == :'pull-mqtt')
|
15
|
+
end
|
16
|
+
end
|
17
|
+
end
|
18
|
+
|
19
|
+
class ScriptRunner < Proxy::Dynflow::Action::Runner
|
7
20
|
def initiate_runner
|
8
21
|
additional_options = {
|
9
22
|
:step_id => run_step_id,
|
@@ -1,11 +1,13 @@
|
|
1
1
|
require 'net/ssh'
|
2
2
|
require 'base64'
|
3
|
+
require 'smart_proxy_dynflow/runner'
|
3
4
|
|
4
5
|
module Proxy::RemoteExecution
|
5
6
|
module Ssh
|
6
7
|
|
7
8
|
class Api < ::Sinatra::Base
|
8
9
|
include Sinatra::Authorization::Helpers
|
10
|
+
include Proxy::Dynflow::Helpers
|
9
11
|
|
10
12
|
get "/pubkey" do
|
11
13
|
File.read(Ssh.public_key_file)
|
@@ -37,6 +39,53 @@ module Proxy::RemoteExecution
|
|
37
39
|
end
|
38
40
|
204
|
39
41
|
end
|
42
|
+
|
43
|
+
# Payload is a hash where
|
44
|
+
# exit_code: Integer | NilClass
|
45
|
+
# output: String
|
46
|
+
post '/jobs/:job_uuid/update' do |job_uuid|
|
47
|
+
do_authorize_with_ssl_client
|
48
|
+
|
49
|
+
with_authorized_job(job_uuid) do |job_record|
|
50
|
+
data = MultiJson.load(request.body.read)
|
51
|
+
notify_job(job_record, ::Proxy::Dynflow::Runner::ExternalEvent.new(data))
|
52
|
+
end
|
53
|
+
end
|
54
|
+
|
55
|
+
get '/jobs' do
|
56
|
+
do_authorize_with_ssl_client
|
57
|
+
|
58
|
+
MultiJson.dump(Proxy::RemoteExecution::Ssh.job_storage.job_uuids_for_host(https_cert_cn))
|
59
|
+
end
|
60
|
+
|
61
|
+
get "/jobs/:job_uuid" do |job_uuid|
|
62
|
+
do_authorize_with_ssl_client
|
63
|
+
|
64
|
+
with_authorized_job(job_uuid) do |job_record|
|
65
|
+
notify_job(job_record, Actions::PullScript::JobDelivered)
|
66
|
+
job_record[:job]
|
67
|
+
end
|
68
|
+
end
|
69
|
+
|
70
|
+
private
|
71
|
+
|
72
|
+
def notify_job(job_record, event)
|
73
|
+
world.event(job_record[:execution_plan_uuid], job_record[:run_step_id], event)
|
74
|
+
end
|
75
|
+
|
76
|
+
def with_authorized_job(uuid)
|
77
|
+
if (job = authorized_job(uuid))
|
78
|
+
yield job
|
79
|
+
else
|
80
|
+
halt 404
|
81
|
+
end
|
82
|
+
end
|
83
|
+
|
84
|
+
def authorized_job(uuid)
|
85
|
+
job_record = Proxy::RemoteExecution::Ssh.job_storage.find_job(uuid) || {}
|
86
|
+
return job_record if authorize_with_token(clear: false, task_id: job_record[:execution_plan_uuid]) ||
|
87
|
+
job_record[:hostname] == https_cert_cn
|
88
|
+
end
|
40
89
|
end
|
41
90
|
end
|
42
91
|
end
|
@@ -1,11 +1,11 @@
|
|
1
|
-
require '
|
1
|
+
require 'smart_proxy_remote_execution_ssh/net_ssh_compat'
|
2
2
|
require 'forwardable'
|
3
3
|
|
4
4
|
module Proxy::RemoteExecution
|
5
5
|
module Cockpit
|
6
6
|
# A wrapper class around different kind of sockets to comply with Net::SSH event loop
|
7
7
|
class BufferedSocket
|
8
|
-
include
|
8
|
+
include Proxy::RemoteExecution::NetSSHCompat::BufferedIO
|
9
9
|
extend Forwardable
|
10
10
|
|
11
11
|
# The list of methods taken from OpenSSL::SSL::SocketForwarder for the object to act like a socket
|
@@ -52,14 +52,14 @@ module Proxy::RemoteExecution
|
|
52
52
|
end
|
53
53
|
def_delegators(:@socket, :read_nonblock, :write_nonblock, :close)
|
54
54
|
|
55
|
-
def recv(
|
55
|
+
def recv(count)
|
56
56
|
res = ""
|
57
57
|
begin
|
58
58
|
# To drain a SSLSocket before we can go back to the event
|
59
59
|
# loop, we need to repeatedly call read_nonblock; a single
|
60
60
|
# call is not enough.
|
61
61
|
loop do
|
62
|
-
res += @socket.read_nonblock(
|
62
|
+
res += @socket.read_nonblock(count)
|
63
63
|
end
|
64
64
|
rescue IO::WaitReadable
|
65
65
|
# Sometimes there is no payload after reading everything
|
@@ -95,8 +95,8 @@ module Proxy::RemoteExecution
|
|
95
95
|
end
|
96
96
|
def_delegators(:@socket, :read_nonblock, :write_nonblock, :close)
|
97
97
|
|
98
|
-
def recv(
|
99
|
-
@socket.read_nonblock(
|
98
|
+
def recv(count)
|
99
|
+
@socket.read_nonblock(count)
|
100
100
|
end
|
101
101
|
|
102
102
|
def send(mesg, flags)
|
@@ -113,6 +113,7 @@ module Proxy::RemoteExecution
|
|
113
113
|
|
114
114
|
def initialize(env)
|
115
115
|
@env = env
|
116
|
+
@open_ios = []
|
116
117
|
end
|
117
118
|
|
118
119
|
def valid?
|
@@ -127,6 +128,7 @@ module Proxy::RemoteExecution
|
|
127
128
|
begin
|
128
129
|
@env['rack.hijack'].call
|
129
130
|
rescue NotImplementedError
|
131
|
+
# This is fine
|
130
132
|
end
|
131
133
|
@socket = @env['rack.hijack_io']
|
132
134
|
end
|
@@ -137,15 +139,11 @@ module Proxy::RemoteExecution
|
|
137
139
|
private
|
138
140
|
|
139
141
|
def ssh_on_socket
|
140
|
-
with_error_handling {
|
142
|
+
with_error_handling { system_ssh_loop }
|
141
143
|
end
|
142
144
|
|
143
145
|
def with_error_handling
|
144
146
|
yield
|
145
|
-
rescue Net::SSH::AuthenticationFailed => e
|
146
|
-
send_error(401, e.message)
|
147
|
-
rescue Errno::EHOSTUNREACH
|
148
|
-
send_error(400, "No route to #{host}")
|
149
147
|
rescue SystemCallError => e
|
150
148
|
send_error(400, e.message)
|
151
149
|
rescue SocketError => e
|
@@ -161,50 +159,62 @@ module Proxy::RemoteExecution
|
|
161
159
|
end
|
162
160
|
end
|
163
161
|
|
164
|
-
def
|
165
|
-
|
166
|
-
|
167
|
-
|
168
|
-
|
169
|
-
|
170
|
-
|
171
|
-
|
172
|
-
|
173
|
-
|
174
|
-
|
175
|
-
|
176
|
-
|
177
|
-
|
178
|
-
|
179
|
-
|
180
|
-
|
181
|
-
|
182
|
-
|
183
|
-
|
184
|
-
|
185
|
-
|
186
|
-
|
187
|
-
|
188
|
-
|
189
|
-
|
190
|
-
|
191
|
-
|
192
|
-
|
193
|
-
|
194
|
-
|
195
|
-
|
196
|
-
err_buf += "Process was terminated with signal #{data.read_string}.\r\n"
|
197
|
-
ch.close
|
198
|
-
end
|
199
|
-
|
200
|
-
ch.on_extended_data do |ch2, type, data|
|
201
|
-
err_buf += data
|
202
|
-
end
|
203
|
-
end
|
162
|
+
def system_ssh_loop
|
163
|
+
in_read, in_write = IO.pipe
|
164
|
+
out_read, out_write = IO.pipe
|
165
|
+
err_read, err_write = IO.pipe
|
166
|
+
|
167
|
+
pid = spawn(*script_runner.send(:get_args, command), :in => in_read, :out => out_write, :err => err_write)
|
168
|
+
[in_read, out_write, err_write].each(&:close)
|
169
|
+
|
170
|
+
send_start
|
171
|
+
# Not SSL buffer, but the interface kinda matches
|
172
|
+
out_buf = MiniSSLBufferedSocket.new(out_read)
|
173
|
+
err_buf = MiniSSLBufferedSocket.new(err_read)
|
174
|
+
in_buf = MiniSSLBufferedSocket.new(in_write)
|
175
|
+
|
176
|
+
inner_system_ssh_loop out_buf, err_buf, in_buf, pid
|
177
|
+
end
|
178
|
+
|
179
|
+
def inner_system_ssh_loop2(out_buf, err_buf, in_buf, pid)
|
180
|
+
err_buf_raw = ''
|
181
|
+
readers = [buf_socket, out_buf, err_buf]
|
182
|
+
loop do
|
183
|
+
# Prime the sockets for reading
|
184
|
+
ready_readers, ready_writers = IO.select(readers, [buf_socket, in_buf], nil, 300)
|
185
|
+
(ready_readers || []).each { |reader| reader.close if reader.fill.zero? }
|
186
|
+
|
187
|
+
proxy_data(out_buf, in_buf)
|
188
|
+
|
189
|
+
if out_buf.closed?
|
190
|
+
code = Process.wait2(pid).last.exitstatus
|
191
|
+
send_start if code.zero? # TODO: Why?
|
192
|
+
err_buf_raw += "Process exited with code #{code}.\r\n"
|
193
|
+
break
|
204
194
|
end
|
205
195
|
|
206
|
-
|
207
|
-
|
196
|
+
if err_buf.available.positive?
|
197
|
+
err_buf_raw += err_buf.read_available
|
198
|
+
end
|
199
|
+
|
200
|
+
flush_pending_writes(ready_writers || [])
|
201
|
+
end
|
202
|
+
rescue # rubocop:disable Style/RescueStandardError
|
203
|
+
send_error(400, err_buf_raw) unless @started
|
204
|
+
ensure
|
205
|
+
[out_buff, err_buf, in_buf].each(&:close)
|
206
|
+
end
|
207
|
+
|
208
|
+
def proxy_data(out_buf, in_buf)
|
209
|
+
{ out_buf => buf_socket, buf_socket => in_buf }.each do |src, dst|
|
210
|
+
dst.enqueue(src.read_available) if src.available.positive?
|
211
|
+
dst.close if src.closed?
|
212
|
+
end
|
213
|
+
end
|
214
|
+
|
215
|
+
def flush_pending_writes(writers)
|
216
|
+
writers.each do |writer|
|
217
|
+
writer.respond_to?(:send_pending) ? writer.send_pending : writer.flush
|
208
218
|
end
|
209
219
|
end
|
210
220
|
|
@@ -215,6 +225,7 @@ module Proxy::RemoteExecution
|
|
215
225
|
buf_socket.enqueue("Connection: upgrade\r\n")
|
216
226
|
buf_socket.enqueue("Upgrade: raw\r\n")
|
217
227
|
buf_socket.enqueue("\r\n")
|
228
|
+
buf_socket.send_pending
|
218
229
|
end
|
219
230
|
end
|
220
231
|
|
@@ -223,6 +234,7 @@ module Proxy::RemoteExecution
|
|
223
234
|
buf_socket.enqueue("Connection: close\r\n")
|
224
235
|
buf_socket.enqueue("\r\n")
|
225
236
|
buf_socket.enqueue(msg)
|
237
|
+
buf_socket.send_pending
|
226
238
|
end
|
227
239
|
|
228
240
|
def params
|
@@ -234,34 +246,33 @@ module Proxy::RemoteExecution
|
|
234
246
|
end
|
235
247
|
|
236
248
|
def buf_socket
|
237
|
-
@
|
249
|
+
@buf_socket ||= BufferedSocket.build(@socket)
|
238
250
|
end
|
239
251
|
|
240
252
|
def command
|
241
253
|
params["command"]
|
242
254
|
end
|
243
255
|
|
244
|
-
def ssh_user
|
245
|
-
params["ssh_user"]
|
246
|
-
end
|
247
|
-
|
248
256
|
def host
|
249
257
|
params["hostname"]
|
250
258
|
end
|
251
259
|
|
252
|
-
def
|
253
|
-
|
254
|
-
|
255
|
-
|
256
|
-
|
257
|
-
|
258
|
-
|
259
|
-
|
260
|
-
ret
|
261
|
-
ret[:
|
262
|
-
ret[:
|
263
|
-
ret[:
|
264
|
-
ret[:
|
260
|
+
def script_runner
|
261
|
+
@script_runner ||= Proxy::RemoteExecution::Ssh::Runners::ScriptRunner.build(
|
262
|
+
runner_params,
|
263
|
+
suspended_action: nil
|
264
|
+
)
|
265
|
+
end
|
266
|
+
|
267
|
+
def runner_params
|
268
|
+
ret = { secrets: {} }
|
269
|
+
ret[:secrets][:ssh_password] = params["ssh_password"] if params["ssh_password"]
|
270
|
+
ret[:secrets][:key_passphrase] = params["ssh_key_passphrase"] if params["ssh_key_passphrase"]
|
271
|
+
ret[:ssh_port] = params["ssh_port"] if params["ssh_port"]
|
272
|
+
ret[:ssh_user] = params["ssh_user"]
|
273
|
+
# For compatibility only
|
274
|
+
ret[:script] = nil
|
275
|
+
ret[:hostname] = host
|
265
276
|
ret
|
266
277
|
end
|
267
278
|
end
|
@@ -0,0 +1,51 @@
|
|
1
|
+
# lib/job_storage.rb
|
2
|
+
require 'sequel'
|
3
|
+
|
4
|
+
module Proxy::RemoteExecution::Ssh
|
5
|
+
class JobStorage
|
6
|
+
def initialize
|
7
|
+
@db = Sequel.sqlite
|
8
|
+
@db.create_table :jobs do
|
9
|
+
DateTime :timestamp, null: false, default: Sequel::CURRENT_TIMESTAMP
|
10
|
+
String :uuid, fixed: true, size: 36, primary_key: true, null: false
|
11
|
+
String :hostname, null: false, index: true
|
12
|
+
String :execution_plan_uuid, fixed: true, size: 36, null: false, index: true
|
13
|
+
Integer :run_step_id, null: false
|
14
|
+
String :job, text: true
|
15
|
+
end
|
16
|
+
end
|
17
|
+
|
18
|
+
def find_job(uuid)
|
19
|
+
jobs.where(uuid: uuid).first
|
20
|
+
end
|
21
|
+
|
22
|
+
def job_uuids_for_host(hostname)
|
23
|
+
jobs_for_host(hostname).order(:timestamp)
|
24
|
+
.select_map(:uuid)
|
25
|
+
end
|
26
|
+
|
27
|
+
def store_job(hostname, execution_plan_uuid, run_step_id, job, uuid: SecureRandom.uuid, timestamp: Time.now.utc)
|
28
|
+
jobs.insert(timestamp: timestamp,
|
29
|
+
uuid: uuid,
|
30
|
+
hostname: hostname,
|
31
|
+
execution_plan_uuid: execution_plan_uuid,
|
32
|
+
run_step_id: run_step_id,
|
33
|
+
job: job)
|
34
|
+
uuid
|
35
|
+
end
|
36
|
+
|
37
|
+
def drop_job(execution_plan_uuid, run_step_id)
|
38
|
+
jobs.where(execution_plan_uuid: execution_plan_uuid, run_step_id: run_step_id).delete
|
39
|
+
end
|
40
|
+
|
41
|
+
private
|
42
|
+
|
43
|
+
def jobs_for_host(hostname)
|
44
|
+
jobs.where(hostname: hostname)
|
45
|
+
end
|
46
|
+
|
47
|
+
def jobs
|
48
|
+
@db[:jobs]
|
49
|
+
end
|
50
|
+
end
|
51
|
+
end
|
@@ -0,0 +1,228 @@
|
|
1
|
+
module Proxy::RemoteExecution
|
2
|
+
module NetSSHCompat
|
3
|
+
class Buffer
|
4
|
+
# exposes the raw content of the buffer
|
5
|
+
attr_reader :content
|
6
|
+
|
7
|
+
# the current position of the pointer in the buffer
|
8
|
+
attr_accessor :position
|
9
|
+
|
10
|
+
# Creates a new buffer, initialized to the given content. The position
|
11
|
+
# is initialized to the beginning of the buffer.
|
12
|
+
def initialize(content = +'')
|
13
|
+
@content = content.to_s
|
14
|
+
@position = 0
|
15
|
+
end
|
16
|
+
|
17
|
+
# Returns the length of the buffer's content.
|
18
|
+
def length
|
19
|
+
@content.length
|
20
|
+
end
|
21
|
+
|
22
|
+
# Returns the number of bytes available to be read (e.g., how many bytes
|
23
|
+
# remain between the current position and the end of the buffer).
|
24
|
+
def available
|
25
|
+
length - position
|
26
|
+
end
|
27
|
+
|
28
|
+
# Returns a copy of the buffer's content.
|
29
|
+
def to_s
|
30
|
+
(@content || "").dup
|
31
|
+
end
|
32
|
+
|
33
|
+
# Returns +true+ if the buffer contains no data (e.g., it is of zero length).
|
34
|
+
def empty?
|
35
|
+
@content.empty?
|
36
|
+
end
|
37
|
+
|
38
|
+
# Resets the pointer to the start of the buffer. Subsequent reads will
|
39
|
+
# begin at position 0.
|
40
|
+
def reset!
|
41
|
+
@position = 0
|
42
|
+
end
|
43
|
+
|
44
|
+
# Returns true if the pointer is at the end of the buffer. Subsequent
|
45
|
+
# reads will return nil, in this case.
|
46
|
+
def eof?
|
47
|
+
@position >= length
|
48
|
+
end
|
49
|
+
|
50
|
+
# Resets the buffer, making it empty. Also, resets the read position to
|
51
|
+
# 0.
|
52
|
+
def clear!
|
53
|
+
@content = +''
|
54
|
+
@position = 0
|
55
|
+
end
|
56
|
+
|
57
|
+
# Consumes n bytes from the buffer, where n is the current position
|
58
|
+
# unless otherwise specified. This is useful for removing data from the
|
59
|
+
# buffer that has previously been read, when you are expecting more data
|
60
|
+
# to be appended. It helps to keep the size of buffers down when they
|
61
|
+
# would otherwise tend to grow without bound.
|
62
|
+
#
|
63
|
+
# Returns the buffer object itself.
|
64
|
+
def consume!(count = position)
|
65
|
+
if count >= length
|
66
|
+
# OPTIMIZE: a fairly common case
|
67
|
+
clear!
|
68
|
+
elsif count.positive?
|
69
|
+
@content = @content[count..-1] || +''
|
70
|
+
@position -= count
|
71
|
+
@position = 0 if @position.negative?
|
72
|
+
end
|
73
|
+
self
|
74
|
+
end
|
75
|
+
|
76
|
+
# Appends the given text to the end of the buffer. Does not alter the
|
77
|
+
# read position. Returns the buffer object itself.
|
78
|
+
def append(text)
|
79
|
+
@content << text
|
80
|
+
self
|
81
|
+
end
|
82
|
+
|
83
|
+
# Reads and returns the next +count+ bytes from the buffer, starting from
|
84
|
+
# the read position. If +count+ is +nil+, this will return all remaining
|
85
|
+
# text in the buffer. This method will increment the pointer.
|
86
|
+
def read(count = nil)
|
87
|
+
count ||= length
|
88
|
+
count = length - @position if @position + count > length
|
89
|
+
@position += count
|
90
|
+
@content[@position - count, count]
|
91
|
+
end
|
92
|
+
|
93
|
+
# Writes the given data literally into the string. Does not alter the
|
94
|
+
# read position. Returns the buffer object.
|
95
|
+
def write(*data)
|
96
|
+
data.each { |datum| @content << datum.dup.force_encoding('BINARY') }
|
97
|
+
self
|
98
|
+
end
|
99
|
+
end
|
100
|
+
|
101
|
+
module BufferedIO
|
102
|
+
# This module is used to extend sockets and other IO objects, to allow
|
103
|
+
# them to be buffered for both read and write. This abstraction makes it
|
104
|
+
# quite easy to write a select-based event loop
|
105
|
+
# (see Net::SSH::Connection::Session#listen_to).
|
106
|
+
#
|
107
|
+
# The general idea is that instead of calling #read directly on an IO that
|
108
|
+
# has been extended with this module, you call #fill (to add pending input
|
109
|
+
# to the internal read buffer), and then #read_available (to read from that
|
110
|
+
# buffer). Likewise, you don't call #write directly, you call #enqueue to
|
111
|
+
# add data to the write buffer, and then #send_pending or #wait_for_pending_sends
|
112
|
+
# to actually send the data across the wire.
|
113
|
+
#
|
114
|
+
# In this way you can easily use the object as an argument to IO.select,
|
115
|
+
# calling #fill when it is available for read, or #send_pending when it is
|
116
|
+
# available for write, and then call #enqueue and #read_available during
|
117
|
+
# the idle times.
|
118
|
+
#
|
119
|
+
# socket = TCPSocket.new(address, port)
|
120
|
+
# socket.extend(Net::SSH::BufferedIo)
|
121
|
+
#
|
122
|
+
# ssh.listen_to(socket)
|
123
|
+
#
|
124
|
+
# ssh.loop do
|
125
|
+
# if socket.available > 0
|
126
|
+
# puts socket.read_available
|
127
|
+
# socket.enqueue("response\n")
|
128
|
+
# end
|
129
|
+
# end
|
130
|
+
#
|
131
|
+
# Note that this module must be used to extend an instance, and should not
|
132
|
+
# be included in a class. If you do want to use it via an include, then you
|
133
|
+
# must make sure to invoke the private #initialize_buffered_io method in
|
134
|
+
# your class' #initialize method:
|
135
|
+
#
|
136
|
+
# class Foo < IO
|
137
|
+
# include Net::SSH::BufferedIo
|
138
|
+
#
|
139
|
+
# def initialize
|
140
|
+
# initialize_buffered_io
|
141
|
+
# # ...
|
142
|
+
# end
|
143
|
+
# end
|
144
|
+
|
145
|
+
# Tries to read up to +n+ bytes of data from the remote end, and appends
|
146
|
+
# the data to the input buffer. It returns the number of bytes read, or 0
|
147
|
+
# if no data was available to be read.
|
148
|
+
def fill(count = 8192)
|
149
|
+
input.consume!
|
150
|
+
data = recv(count)
|
151
|
+
input.append(data)
|
152
|
+
return data.length
|
153
|
+
rescue EOFError => e
|
154
|
+
@input_errors << e
|
155
|
+
return 0
|
156
|
+
end
|
157
|
+
|
158
|
+
# Read up to +length+ bytes from the input buffer. If +length+ is nil,
|
159
|
+
# all available data is read from the buffer. (See #available.)
|
160
|
+
def read_available(length = nil)
|
161
|
+
input.read(length || available)
|
162
|
+
end
|
163
|
+
|
164
|
+
# Returns the number of bytes available to be read from the input buffer.
|
165
|
+
# (See #read_available.)
|
166
|
+
def available
|
167
|
+
input.available
|
168
|
+
end
|
169
|
+
|
170
|
+
# Enqueues data in the output buffer, to be written when #send_pending
|
171
|
+
# is called. Note that the data is _not_ sent immediately by this method!
|
172
|
+
def enqueue(data)
|
173
|
+
output.append(data)
|
174
|
+
end
|
175
|
+
|
176
|
+
# Sends as much of the pending output as possible. Returns +true+ if any
|
177
|
+
# data was sent, and +false+ otherwise.
|
178
|
+
def send_pending
|
179
|
+
if output.length.positive?
|
180
|
+
sent = send(output.to_s, 0)
|
181
|
+
output.consume!(sent)
|
182
|
+
return sent.positive?
|
183
|
+
else
|
184
|
+
return false
|
185
|
+
end
|
186
|
+
end
|
187
|
+
|
188
|
+
# Calls #send_pending repeatedly, if necessary, blocking until the output
|
189
|
+
# buffer is empty.
|
190
|
+
def wait_for_pending_sends
|
191
|
+
send_pending
|
192
|
+
while output.length.positive?
|
193
|
+
result = IO.select(nil, [self]) || next
|
194
|
+
next unless result[1].any?
|
195
|
+
|
196
|
+
send_pending
|
197
|
+
end
|
198
|
+
end
|
199
|
+
|
200
|
+
private
|
201
|
+
|
202
|
+
#--
|
203
|
+
# Can't use attr_reader here (after +private+) without incurring the
|
204
|
+
# wrath of "ruby -w". We hates it.
|
205
|
+
#++
|
206
|
+
|
207
|
+
def input
|
208
|
+
@input
|
209
|
+
end
|
210
|
+
|
211
|
+
def output
|
212
|
+
@output
|
213
|
+
end
|
214
|
+
|
215
|
+
# Initializes the intput and output buffers for this object. This method
|
216
|
+
# is called automatically when the module is mixed into an object via
|
217
|
+
# Object#extend (see Net::SSH::BufferedIo.extended), but must be called
|
218
|
+
# explicitly in the +initialize+ method of any class that uses
|
219
|
+
# Module#include to add this module.
|
220
|
+
def initialize_buffered_io
|
221
|
+
@input = Buffer.new
|
222
|
+
@input_errors = []
|
223
|
+
@output = Buffer.new
|
224
|
+
@output_errors = []
|
225
|
+
end
|
226
|
+
end
|
227
|
+
end
|
228
|
+
end
|
@@ -1,6 +1,7 @@
|
|
1
1
|
module Proxy::RemoteExecution::Ssh
|
2
2
|
class Plugin < Proxy::Plugin
|
3
|
-
SSH_LOG_LEVELS = %w[debug info
|
3
|
+
SSH_LOG_LEVELS = %w[debug info error fatal].freeze
|
4
|
+
MODES = %i[ssh async-ssh pull pull-mqtt].freeze
|
4
5
|
|
5
6
|
http_rackup_path File.expand_path("http_config.ru", File.expand_path("../", __FILE__))
|
6
7
|
https_rackup_path File.expand_path("http_config.ru", File.expand_path("../", __FILE__))
|
@@ -11,11 +12,13 @@ module Proxy::RemoteExecution::Ssh
|
|
11
12
|
:remote_working_dir => '/var/tmp',
|
12
13
|
:local_working_dir => '/var/tmp',
|
13
14
|
:kerberos_auth => false,
|
14
|
-
:async_ssh => false,
|
15
15
|
# When set to nil, makes REX use the runner's default interval
|
16
16
|
# :runner_refresh_interval => nil,
|
17
17
|
:ssh_log_level => :fatal,
|
18
|
-
:cleanup_working_dirs => true
|
18
|
+
:cleanup_working_dirs => true,
|
19
|
+
# :mqtt_broker => nil,
|
20
|
+
# :mqtt_port => nil,
|
21
|
+
:mode => :ssh
|
19
22
|
|
20
23
|
plugin :ssh, Proxy::RemoteExecution::Ssh::VERSION
|
21
24
|
after_activation do
|
@@ -23,10 +26,12 @@ module Proxy::RemoteExecution::Ssh
|
|
23
26
|
require 'smart_proxy_remote_execution_ssh/version'
|
24
27
|
require 'smart_proxy_remote_execution_ssh/cockpit'
|
25
28
|
require 'smart_proxy_remote_execution_ssh/api'
|
26
|
-
require 'smart_proxy_remote_execution_ssh/actions
|
29
|
+
require 'smart_proxy_remote_execution_ssh/actions'
|
27
30
|
require 'smart_proxy_remote_execution_ssh/dispatcher'
|
28
31
|
require 'smart_proxy_remote_execution_ssh/log_filter'
|
29
32
|
require 'smart_proxy_remote_execution_ssh/runners'
|
33
|
+
require 'smart_proxy_remote_execution_ssh/utils'
|
34
|
+
require 'smart_proxy_remote_execution_ssh/job_storage'
|
30
35
|
|
31
36
|
Proxy::RemoteExecution::Ssh.validate!
|
32
37
|
|
@@ -40,7 +45,7 @@ module Proxy::RemoteExecution::Ssh
|
|
40
45
|
def self.runner_class
|
41
46
|
@runner_class ||= if simulate?
|
42
47
|
Runners::FakeScriptRunner
|
43
|
-
elsif settings
|
48
|
+
elsif settings.mode == :'ssh-async'
|
44
49
|
Runners::PollingScriptRunner
|
45
50
|
else
|
46
51
|
Runners::ScriptRunner
|
@@ -1,12 +1,5 @@
|
|
1
|
-
require 'net/ssh'
|
2
1
|
require 'fileutils'
|
3
|
-
|
4
|
-
# Rubocop can't make up its mind what it wants
|
5
|
-
# rubocop:disable Lint/SuppressedException, Lint/RedundantCopDisableDirective
|
6
|
-
begin
|
7
|
-
require 'net/ssh/krb'
|
8
|
-
rescue LoadError; end
|
9
|
-
# rubocop:enable Lint/SuppressedException, Lint/RedundantCopDisableDirective
|
2
|
+
require 'smart_proxy_dynflow/runner/command'
|
10
3
|
|
11
4
|
module Proxy::RemoteExecution::Ssh::Runners
|
12
5
|
class EffectiveUserMethod
|
@@ -21,7 +14,7 @@ module Proxy::RemoteExecution::Ssh::Runners
|
|
21
14
|
|
22
15
|
def on_data(received_data, ssh_channel)
|
23
16
|
if received_data.match(login_prompt)
|
24
|
-
ssh_channel.
|
17
|
+
ssh_channel.puts(effective_user_password)
|
25
18
|
@password_sent = true
|
26
19
|
end
|
27
20
|
end
|
@@ -97,11 +90,11 @@ module Proxy::RemoteExecution::Ssh::Runners
|
|
97
90
|
|
98
91
|
# rubocop:disable Metrics/ClassLength
|
99
92
|
class ScriptRunner < Proxy::Dynflow::Runner::Base
|
93
|
+
include Proxy::Dynflow::Runner::Command
|
100
94
|
attr_reader :execution_timeout_interval
|
101
95
|
|
102
96
|
EXPECTED_POWER_ACTION_MESSAGES = ['restart host', 'shutdown host'].freeze
|
103
97
|
DEFAULT_REFRESH_INTERVAL = 1
|
104
|
-
MAX_PROCESS_RETRIES = 4
|
105
98
|
|
106
99
|
def initialize(options, user_method, suspended_action: nil)
|
107
100
|
super suspended_action: suspended_action
|
@@ -119,6 +112,7 @@ module Proxy::RemoteExecution::Ssh::Runners
|
|
119
112
|
@local_working_dir = options.fetch(:local_working_dir, settings.local_working_dir)
|
120
113
|
@remote_working_dir = options.fetch(:remote_working_dir, settings.remote_working_dir)
|
121
114
|
@cleanup_working_dirs = options.fetch(:cleanup_working_dirs, settings.cleanup_working_dirs)
|
115
|
+
@first_execution = options.fetch(:first_execution, false)
|
122
116
|
@user_method = user_method
|
123
117
|
end
|
124
118
|
|
@@ -146,12 +140,13 @@ module Proxy::RemoteExecution::Ssh::Runners
|
|
146
140
|
end
|
147
141
|
|
148
142
|
def start
|
143
|
+
Proxy::RemoteExecution::Utils.prune_known_hosts!(@host, @ssh_port, logger) if @first_execution
|
149
144
|
prepare_start
|
150
145
|
script = initialization_script
|
151
146
|
logger.debug("executing script:\n#{indent_multiline(script)}")
|
152
147
|
trigger(script)
|
153
|
-
rescue StandardError => e
|
154
|
-
logger.error("error while
|
148
|
+
rescue StandardError, NotImplementedError => e
|
149
|
+
logger.error("error while initializing command #{e.class} #{e.message}:\n #{e.backtrace.join("\n")}")
|
155
150
|
publish_exception('Error initializing command', e)
|
156
151
|
end
|
157
152
|
|
@@ -177,12 +172,7 @@ module Proxy::RemoteExecution::Ssh::Runners
|
|
177
172
|
|
178
173
|
def refresh
|
179
174
|
return if @session.nil?
|
180
|
-
|
181
|
-
with_retries do
|
182
|
-
with_disconnect_handling do
|
183
|
-
@session.process(0)
|
184
|
-
end
|
185
|
-
end
|
175
|
+
super
|
186
176
|
ensure
|
187
177
|
check_expecting_disconnect
|
188
178
|
end
|
@@ -206,32 +196,13 @@ module Proxy::RemoteExecution::Ssh::Runners
|
|
206
196
|
execution_timeout_interval
|
207
197
|
end
|
208
198
|
|
209
|
-
def
|
210
|
-
|
211
|
-
|
212
|
-
|
213
|
-
|
214
|
-
|
215
|
-
|
216
|
-
if tries <= MAX_PROCESS_RETRIES
|
217
|
-
logger.error('Retrying')
|
218
|
-
retry
|
219
|
-
else
|
220
|
-
publish_exception('Unexpected error', e)
|
221
|
-
end
|
222
|
-
end
|
223
|
-
end
|
224
|
-
|
225
|
-
def with_disconnect_handling
|
226
|
-
yield
|
227
|
-
rescue IOError, Net::SSH::Disconnect => e
|
228
|
-
@session.shutdown!
|
229
|
-
check_expecting_disconnect
|
230
|
-
if @expecting_disconnect
|
231
|
-
publish_exit_status(0)
|
232
|
-
else
|
233
|
-
publish_exception('Unexpected disconnect', e)
|
234
|
-
end
|
199
|
+
def close_session
|
200
|
+
@session = nil
|
201
|
+
raise 'Control socket file does not exist' unless File.exist?(local_command_file("socket"))
|
202
|
+
@logger.debug("Sending exit request for session #{@ssh_user}@#{@host}")
|
203
|
+
args = ['/usr/bin/ssh', @host, "-o", "User=#{@ssh_user}", "-o", "ControlPath=#{local_command_file("socket")}", "-O", "exit"].flatten
|
204
|
+
*, err = session(args, in_stream: false, out_stream: false)
|
205
|
+
read_output_debug(err)
|
235
206
|
end
|
236
207
|
|
237
208
|
def close
|
@@ -239,12 +210,13 @@ module Proxy::RemoteExecution::Ssh::Runners
|
|
239
210
|
rescue StandardError => e
|
240
211
|
publish_exception('Error when removing remote working dir', e, false)
|
241
212
|
ensure
|
242
|
-
|
213
|
+
close_session if @session
|
243
214
|
FileUtils.rm_rf(local_command_dir) if Dir.exist?(local_command_dir) && @cleanup_working_dirs
|
244
215
|
end
|
245
216
|
|
246
217
|
def publish_data(data, type)
|
247
|
-
super(data.force_encoding('UTF-8'), type)
|
218
|
+
super(data.force_encoding('UTF-8'), type) unless @user_method.filter_password?(data)
|
219
|
+
@user_method.on_data(data, @command_in)
|
248
220
|
end
|
249
221
|
|
250
222
|
private
|
@@ -254,38 +226,54 @@ module Proxy::RemoteExecution::Ssh::Runners
|
|
254
226
|
end
|
255
227
|
|
256
228
|
def should_cleanup?
|
257
|
-
@session &&
|
258
|
-
end
|
259
|
-
|
260
|
-
|
261
|
-
|
262
|
-
|
263
|
-
|
264
|
-
|
265
|
-
|
266
|
-
|
267
|
-
|
268
|
-
|
269
|
-
|
270
|
-
|
271
|
-
|
272
|
-
|
273
|
-
|
274
|
-
|
275
|
-
|
276
|
-
|
277
|
-
|
278
|
-
ssh_options
|
279
|
-
ssh_options
|
280
|
-
ssh_options
|
281
|
-
ssh_options
|
282
|
-
|
229
|
+
@session && @cleanup_working_dirs
|
230
|
+
end
|
231
|
+
|
232
|
+
# Creates session with three pipes - one for reading and two for
|
233
|
+
# writing. Similar to `Open3.popen3` method but without creating
|
234
|
+
# a separate thread to monitor it.
|
235
|
+
def session(args, in_stream: true, out_stream: true, err_stream: true)
|
236
|
+
@session = true
|
237
|
+
|
238
|
+
in_read, in_write = in_stream ? IO.pipe : '/dev/null'
|
239
|
+
out_read, out_write = out_stream ? IO.pipe : [nil, '/dev/null']
|
240
|
+
err_read, err_write = err_stream ? IO.pipe : [nil, '/dev/null']
|
241
|
+
command_pid = spawn(*args, :in => in_read, :out => out_write, :err => err_write)
|
242
|
+
in_read.close if in_stream
|
243
|
+
out_write.close if out_stream
|
244
|
+
err_write.close if err_stream
|
245
|
+
|
246
|
+
return command_pid, in_write, out_read, err_read
|
247
|
+
end
|
248
|
+
|
249
|
+
def ssh_options(with_pty = false)
|
250
|
+
ssh_options = []
|
251
|
+
ssh_options << "-tt" if with_pty
|
252
|
+
ssh_options << "-o User=#{@ssh_user}"
|
253
|
+
ssh_options << "-o Port=#{@ssh_port}" if @ssh_port
|
254
|
+
ssh_options << "-o IdentityFile=#{@client_private_key_file}" if @client_private_key_file
|
255
|
+
ssh_options << "-o IdentitiesOnly=yes"
|
256
|
+
ssh_options << "-o StrictHostKeyChecking=no"
|
257
|
+
ssh_options << "-o PreferredAuthentications=#{available_authentication_methods.join(',')}"
|
258
|
+
ssh_options << "-o UserKnownHostsFile=#{prepare_known_hosts}" if @host_public_key
|
259
|
+
ssh_options << "-o NumberOfPasswordPrompts=1"
|
260
|
+
ssh_options << "-o LogLevel=#{settings[:ssh_log_level]}"
|
261
|
+
ssh_options << "-o ControlMaster=auto"
|
262
|
+
ssh_options << "-o ControlPath=#{local_command_file("socket")}"
|
263
|
+
ssh_options << "-o ControlPersist=yes"
|
283
264
|
end
|
284
265
|
|
285
266
|
def settings
|
286
267
|
Proxy::RemoteExecution::Ssh::Plugin.settings
|
287
268
|
end
|
288
269
|
|
270
|
+
def get_args(command, with_pty = false)
|
271
|
+
args = []
|
272
|
+
args = [{'SSHPASS' => @key_passphrase}, '/usr/bin/sshpass', '-P', 'passphrase', '-e'] if @key_passphrase
|
273
|
+
args = [{'SSHPASS' => @ssh_password}, '/usr/bin/sshpass', '-e'] if @ssh_password
|
274
|
+
args += ['/usr/bin/ssh', @host, ssh_options(with_pty), command].flatten
|
275
|
+
end
|
276
|
+
|
289
277
|
# Initiates run of the remote command and yields the data when
|
290
278
|
# available. The yielding doesn't happen automatically, but as
|
291
279
|
# part of calling the `refresh` method.
|
@@ -294,30 +282,9 @@ module Proxy::RemoteExecution::Ssh::Runners
|
|
294
282
|
|
295
283
|
@started = false
|
296
284
|
@user_method.reset
|
285
|
+
@command_pid, @command_in, @command_out = session(get_args(command, with_pty: true), err_stream: false)
|
286
|
+
@started = true
|
297
287
|
|
298
|
-
session.open_channel do |channel|
|
299
|
-
channel.request_pty
|
300
|
-
channel.on_data do |ch, data|
|
301
|
-
publish_data(data, 'stdout') unless @user_method.filter_password?(data)
|
302
|
-
@user_method.on_data(data, ch)
|
303
|
-
end
|
304
|
-
channel.on_extended_data { |ch, type, data| publish_data(data, 'stderr') }
|
305
|
-
# standard exit of the command
|
306
|
-
channel.on_request('exit-status') { |ch, data| publish_exit_status(data.read_long) }
|
307
|
-
# on signal: sending the signal value (such as 'TERM')
|
308
|
-
channel.on_request('exit-signal') do |ch, data|
|
309
|
-
publish_exit_status(data.read_string)
|
310
|
-
ch.close
|
311
|
-
# wait for the channel to finish so that we know at the end
|
312
|
-
# that the session is inactive
|
313
|
-
ch.wait
|
314
|
-
end
|
315
|
-
channel.exec(command) do |_, success|
|
316
|
-
@started = true
|
317
|
-
raise('Error initializing command') unless success
|
318
|
-
end
|
319
|
-
end
|
320
|
-
session.process(0) { !run_started? }
|
321
288
|
return true
|
322
289
|
end
|
323
290
|
|
@@ -325,36 +292,27 @@ module Proxy::RemoteExecution::Ssh::Runners
|
|
325
292
|
@started && @user_method.sent_all_data?
|
326
293
|
end
|
327
294
|
|
328
|
-
def
|
295
|
+
def read_output_debug(err_io, out_io = nil)
|
329
296
|
stdout = ''
|
330
|
-
|
331
|
-
|
332
|
-
|
333
|
-
|
334
|
-
|
335
|
-
ch.on_data do |c, data|
|
336
|
-
stdout.concat(data)
|
337
|
-
end
|
338
|
-
ch.on_extended_data { |_, _, data| stderr.concat(data) }
|
339
|
-
ch.on_request('exit-status') { |_, data| exit_status = data.read_long }
|
340
|
-
# Send data to stdin if we have some
|
341
|
-
ch.send_data(stdin) unless stdin.nil?
|
342
|
-
# on signal: sending the signal value (such as 'TERM')
|
343
|
-
ch.on_request('exit-signal') do |_, data|
|
344
|
-
exit_status = data.read_string
|
345
|
-
ch.close
|
346
|
-
ch.wait
|
347
|
-
end
|
348
|
-
ch.exec command do |_, success|
|
349
|
-
raise 'could not execute command' unless success
|
350
|
-
|
351
|
-
started = true
|
352
|
-
end
|
297
|
+
debug_str = ''
|
298
|
+
|
299
|
+
if out_io
|
300
|
+
stdout += out_io.read until out_io.eof? rescue
|
301
|
+
out_io.close
|
353
302
|
end
|
354
|
-
|
355
|
-
|
356
|
-
|
357
|
-
|
303
|
+
debug_str += err_io.read until err_io.eof? rescue
|
304
|
+
err_io.close
|
305
|
+
debug_str.lines.each { |line| @logger.debug(line.strip) }
|
306
|
+
|
307
|
+
return stdout, debug_str
|
308
|
+
end
|
309
|
+
|
310
|
+
def run_sync(command, stdin = nil)
|
311
|
+
pid, tx, rx, err = session(get_args(command))
|
312
|
+
tx.puts(stdin) unless stdin.nil?
|
313
|
+
tx.close
|
314
|
+
stdout, stderr = read_output_debug(err, rx)
|
315
|
+
exit_status = Process.wait2(pid)[1].exitstatus
|
358
316
|
return exit_status, stdout, stderr
|
359
317
|
end
|
360
318
|
|
@@ -371,7 +329,7 @@ module Proxy::RemoteExecution::Ssh::Runners
|
|
371
329
|
end
|
372
330
|
|
373
331
|
def local_command_file(filename)
|
374
|
-
File.join(local_command_dir, filename)
|
332
|
+
File.join(ensure_local_directory(local_command_dir), filename)
|
375
333
|
end
|
376
334
|
|
377
335
|
def remote_command_dir
|
@@ -453,15 +411,8 @@ module Proxy::RemoteExecution::Ssh::Runners
|
|
453
411
|
|
454
412
|
def available_authentication_methods
|
455
413
|
methods = %w[publickey] # Always use pubkey auth as fallback
|
456
|
-
if settings[:kerberos_auth]
|
457
|
-
if defined? Net::SSH::Kerberos
|
458
|
-
methods << 'gssapi-with-mic'
|
459
|
-
else
|
460
|
-
@logger.warn('Kerberos authentication requested but not available')
|
461
|
-
end
|
462
|
-
end
|
414
|
+
methods << 'gssapi-with-mic' if settings[:kerberos_auth]
|
463
415
|
methods.unshift('password') if @ssh_password
|
464
|
-
|
465
416
|
methods
|
466
417
|
end
|
467
418
|
end
|
@@ -0,0 +1,24 @@
|
|
1
|
+
require 'open3'
|
2
|
+
|
3
|
+
module Proxy::RemoteExecution
|
4
|
+
module Utils
|
5
|
+
class << self
|
6
|
+
def prune_known_hosts!(hostname, port, logger = Logger.new($stdout))
|
7
|
+
return if Net::SSH::KnownHosts.search_for(hostname).empty?
|
8
|
+
|
9
|
+
target = if port == 22
|
10
|
+
hostname
|
11
|
+
else
|
12
|
+
"[#{hostname}]:#{port}"
|
13
|
+
end
|
14
|
+
|
15
|
+
Open3.popen3('ssh-keygen', '-R', target) do |_stdin, stdout, _stderr, wait_thr|
|
16
|
+
wait_thr.join
|
17
|
+
stdout.read
|
18
|
+
end
|
19
|
+
rescue Errno::ENOENT => e
|
20
|
+
logger.warn("Could not remove #{hostname} from know_hosts: #{e}")
|
21
|
+
end
|
22
|
+
end
|
23
|
+
end
|
24
|
+
end
|
@@ -20,7 +20,9 @@ module Proxy::RemoteExecution
|
|
20
20
|
raise "Ssh public key file #{public_key_file} doesn't exist"
|
21
21
|
end
|
22
22
|
|
23
|
+
validate_mode!
|
23
24
|
validate_ssh_log_level!
|
25
|
+
validate_mqtt_settings!
|
24
26
|
end
|
25
27
|
|
26
28
|
def private_key_file
|
@@ -31,6 +33,35 @@ module Proxy::RemoteExecution
|
|
31
33
|
File.expand_path("#{private_key_file}.pub")
|
32
34
|
end
|
33
35
|
|
36
|
+
def validate_mode!
|
37
|
+
Plugin.settings.mode = Plugin.settings.mode.to_sym
|
38
|
+
|
39
|
+
unless Plugin::MODES.include? Plugin.settings.mode
|
40
|
+
raise "Mode has to be one of #{Plugin::MODES.join(', ')}, given #{Plugin.settings.mode}"
|
41
|
+
end
|
42
|
+
|
43
|
+
if Plugin.settings.async_ssh
|
44
|
+
Plugin.logger.warn('Option async_ssh is deprecated, use ssh-async mode instead.')
|
45
|
+
|
46
|
+
case Plugin.settings.mode
|
47
|
+
when :ssh
|
48
|
+
Plugin.logger.warn('Deprecated option async_ssh used together with ssh mode, switching mode to ssh-async.')
|
49
|
+
Plugin.settings.mode = :'ssh-async'
|
50
|
+
when :'async-ssh'
|
51
|
+
# This is a noop
|
52
|
+
else
|
53
|
+
Plugin.logger.warn('Deprecated option async_ssh used together with incompatible mode, ignoring.')
|
54
|
+
end
|
55
|
+
end
|
56
|
+
end
|
57
|
+
|
58
|
+
def validate_mqtt_settings!
|
59
|
+
return unless Plugin.settings.mode == :'pull-mqtt'
|
60
|
+
|
61
|
+
raise 'mqtt_broker has to be set when pull-mqtt mode is used' if Plugin.settings.mqtt_broker.nil?
|
62
|
+
raise 'mqtt_port has to be set when pull-mqtt mode is used' if Plugin.settings.mqtt_port.nil?
|
63
|
+
end
|
64
|
+
|
34
65
|
def validate_ssh_log_level!
|
35
66
|
wanted_level = Plugin.settings.ssh_log_level.to_s
|
36
67
|
levels = Plugin::SSH_LOG_LEVELS
|
@@ -51,6 +82,10 @@ module Proxy::RemoteExecution
|
|
51
82
|
|
52
83
|
Plugin.settings.ssh_log_level = Plugin.settings.ssh_log_level.to_sym
|
53
84
|
end
|
85
|
+
|
86
|
+
def job_storage
|
87
|
+
@job_storage ||= Proxy::RemoteExecution::Ssh::JobStorage.new
|
88
|
+
end
|
54
89
|
end
|
55
90
|
end
|
56
91
|
end
|
@@ -4,17 +4,23 @@
|
|
4
4
|
:local_working_dir: '/var/tmp'
|
5
5
|
:remote_working_dir: '/var/tmp'
|
6
6
|
# :kerberos_auth: false
|
7
|
-
|
7
|
+
|
8
|
+
# Mode of operation, one of ssh, ssh-async, pull, pull-mqtt
|
9
|
+
:mode: ssh
|
8
10
|
|
9
11
|
# Defines how often (in seconds) should the runner check
|
10
12
|
# for new data leave empty to use the runner's default
|
11
13
|
# (1 second for regular, 60 seconds with async_ssh enabled)
|
12
14
|
# :runner_refresh_interval:
|
13
15
|
|
14
|
-
# Defines the verbosity of logging coming from
|
15
|
-
# one of :debug, :info, :
|
16
|
+
# Defines the verbosity of logging coming from ssh command
|
17
|
+
# one of :debug, :info, :error, :fatal
|
16
18
|
# must be lower than general log level
|
17
19
|
# :ssh_log_level: fatal
|
18
20
|
|
19
21
|
# Remove working directories on job completion
|
20
22
|
# :cleanup_working_dirs: true
|
23
|
+
|
24
|
+
# MQTT configuration, need to be set if mode is set to pull-mqtt
|
25
|
+
# :mqtt_broker: localhost
|
26
|
+
# :mqtt_port: 1883
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: smart_proxy_remote_execution_ssh
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 0.5.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Ivan Nečas
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2021-
|
11
|
+
date: 2021-11-16 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: bundler
|
@@ -124,6 +124,20 @@ dependencies:
|
|
124
124
|
version: '0.5'
|
125
125
|
- !ruby/object:Gem::Dependency
|
126
126
|
name: net-ssh
|
127
|
+
requirement: !ruby/object:Gem::Requirement
|
128
|
+
requirements:
|
129
|
+
- - ">="
|
130
|
+
- !ruby/object:Gem::Version
|
131
|
+
version: 4.2.0
|
132
|
+
type: :runtime
|
133
|
+
prerelease: false
|
134
|
+
version_requirements: !ruby/object:Gem::Requirement
|
135
|
+
requirements:
|
136
|
+
- - ">="
|
137
|
+
- !ruby/object:Gem::Version
|
138
|
+
version: 4.2.0
|
139
|
+
- !ruby/object:Gem::Dependency
|
140
|
+
name: mqtt
|
127
141
|
requirement: !ruby/object:Gem::Requirement
|
128
142
|
requirements:
|
129
143
|
- - ">="
|
@@ -149,6 +163,8 @@ files:
|
|
149
163
|
- README.md
|
150
164
|
- bundler.d/remote_execution_ssh.rb
|
151
165
|
- lib/smart_proxy_remote_execution_ssh.rb
|
166
|
+
- lib/smart_proxy_remote_execution_ssh/actions.rb
|
167
|
+
- lib/smart_proxy_remote_execution_ssh/actions/pull_script.rb
|
152
168
|
- lib/smart_proxy_remote_execution_ssh/actions/run_script.rb
|
153
169
|
- lib/smart_proxy_remote_execution_ssh/api.rb
|
154
170
|
- lib/smart_proxy_remote_execution_ssh/async_scripts/control.sh
|
@@ -156,12 +172,15 @@ files:
|
|
156
172
|
- lib/smart_proxy_remote_execution_ssh/cockpit.rb
|
157
173
|
- lib/smart_proxy_remote_execution_ssh/dispatcher.rb
|
158
174
|
- lib/smart_proxy_remote_execution_ssh/http_config.ru
|
175
|
+
- lib/smart_proxy_remote_execution_ssh/job_storage.rb
|
159
176
|
- lib/smart_proxy_remote_execution_ssh/log_filter.rb
|
177
|
+
- lib/smart_proxy_remote_execution_ssh/net_ssh_compat.rb
|
160
178
|
- lib/smart_proxy_remote_execution_ssh/plugin.rb
|
161
179
|
- lib/smart_proxy_remote_execution_ssh/runners.rb
|
162
180
|
- lib/smart_proxy_remote_execution_ssh/runners/fake_script_runner.rb
|
163
181
|
- lib/smart_proxy_remote_execution_ssh/runners/polling_script_runner.rb
|
164
182
|
- lib/smart_proxy_remote_execution_ssh/runners/script_runner.rb
|
183
|
+
- lib/smart_proxy_remote_execution_ssh/utils.rb
|
165
184
|
- lib/smart_proxy_remote_execution_ssh/version.rb
|
166
185
|
- lib/smart_proxy_remote_execution_ssh/webrick_ext.rb
|
167
186
|
- settings.d/remote_execution_ssh.yml.example
|