smart_proxy_remote_execution_ssh 0.3.2 → 0.5.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/{bundler.plugins.d → bundler.d}/remote_execution_ssh.rb +0 -0
- data/lib/smart_proxy_remote_execution_ssh/actions/pull_script.rb +110 -0
- data/lib/smart_proxy_remote_execution_ssh/actions/run_script.rb +34 -0
- data/lib/smart_proxy_remote_execution_ssh/actions.rb +6 -0
- data/lib/smart_proxy_remote_execution_ssh/api.rb +49 -0
- data/lib/smart_proxy_remote_execution_ssh/async_scripts/control.sh +110 -0
- data/lib/smart_proxy_remote_execution_ssh/async_scripts/retrieve.sh +151 -0
- data/lib/smart_proxy_remote_execution_ssh/cockpit.rb +87 -71
- data/lib/smart_proxy_remote_execution_ssh/dispatcher.rb +10 -0
- data/lib/smart_proxy_remote_execution_ssh/job_storage.rb +51 -0
- data/lib/smart_proxy_remote_execution_ssh/log_filter.rb +14 -0
- data/lib/smart_proxy_remote_execution_ssh/net_ssh_compat.rb +228 -0
- data/lib/smart_proxy_remote_execution_ssh/plugin.rb +32 -10
- data/lib/smart_proxy_remote_execution_ssh/runners/fake_script_runner.rb +87 -0
- data/lib/smart_proxy_remote_execution_ssh/runners/polling_script_runner.rb +139 -0
- data/lib/smart_proxy_remote_execution_ssh/runners/script_runner.rb +420 -0
- data/lib/smart_proxy_remote_execution_ssh/runners.rb +7 -0
- data/lib/smart_proxy_remote_execution_ssh/utils.rb +24 -0
- data/lib/smart_proxy_remote_execution_ssh/version.rb +1 -1
- data/lib/smart_proxy_remote_execution_ssh.rb +60 -2
- data/settings.d/remote_execution_ssh.yml.example +12 -3
- metadata +33 -5
@@ -1,11 +1,11 @@
|
|
1
|
-
require '
|
1
|
+
require 'smart_proxy_remote_execution_ssh/net_ssh_compat'
|
2
2
|
require 'forwardable'
|
3
3
|
|
4
4
|
module Proxy::RemoteExecution
|
5
5
|
module Cockpit
|
6
6
|
# A wrapper class around different kind of sockets to comply with Net::SSH event loop
|
7
7
|
class BufferedSocket
|
8
|
-
include
|
8
|
+
include Proxy::RemoteExecution::NetSSHCompat::BufferedIO
|
9
9
|
extend Forwardable
|
10
10
|
|
11
11
|
# The list of methods taken from OpenSSL::SSL::SocketForwarder for the object to act like a socket
|
@@ -52,14 +52,14 @@ module Proxy::RemoteExecution
|
|
52
52
|
end
|
53
53
|
def_delegators(:@socket, :read_nonblock, :write_nonblock, :close)
|
54
54
|
|
55
|
-
def recv(
|
55
|
+
def recv(count)
|
56
56
|
res = ""
|
57
57
|
begin
|
58
58
|
# To drain a SSLSocket before we can go back to the event
|
59
59
|
# loop, we need to repeatedly call read_nonblock; a single
|
60
60
|
# call is not enough.
|
61
61
|
loop do
|
62
|
-
res += @socket.read_nonblock(
|
62
|
+
res += @socket.read_nonblock(count)
|
63
63
|
end
|
64
64
|
rescue IO::WaitReadable
|
65
65
|
# Sometimes there is no payload after reading everything
|
@@ -95,8 +95,8 @@ module Proxy::RemoteExecution
|
|
95
95
|
end
|
96
96
|
def_delegators(:@socket, :read_nonblock, :write_nonblock, :close)
|
97
97
|
|
98
|
-
def recv(
|
99
|
-
@socket.read_nonblock(
|
98
|
+
def recv(count)
|
99
|
+
@socket.read_nonblock(count)
|
100
100
|
end
|
101
101
|
|
102
102
|
def send(mesg, flags)
|
@@ -113,6 +113,7 @@ module Proxy::RemoteExecution
|
|
113
113
|
|
114
114
|
def initialize(env)
|
115
115
|
@env = env
|
116
|
+
@open_ios = []
|
116
117
|
end
|
117
118
|
|
118
119
|
def valid?
|
@@ -127,6 +128,7 @@ module Proxy::RemoteExecution
|
|
127
128
|
begin
|
128
129
|
@env['rack.hijack'].call
|
129
130
|
rescue NotImplementedError
|
131
|
+
# This is fine
|
130
132
|
end
|
131
133
|
@socket = @env['rack.hijack_io']
|
132
134
|
end
|
@@ -137,15 +139,11 @@ module Proxy::RemoteExecution
|
|
137
139
|
private
|
138
140
|
|
139
141
|
def ssh_on_socket
|
140
|
-
with_error_handling {
|
142
|
+
with_error_handling { system_ssh_loop }
|
141
143
|
end
|
142
144
|
|
143
145
|
def with_error_handling
|
144
146
|
yield
|
145
|
-
rescue Net::SSH::AuthenticationFailed => e
|
146
|
-
send_error(401, e.message)
|
147
|
-
rescue Errno::EHOSTUNREACH
|
148
|
-
send_error(400, "No route to #{host}")
|
149
147
|
rescue SystemCallError => e
|
150
148
|
send_error(400, e.message)
|
151
149
|
rescue SocketError => e
|
@@ -161,50 +159,67 @@ module Proxy::RemoteExecution
|
|
161
159
|
end
|
162
160
|
end
|
163
161
|
|
164
|
-
def
|
165
|
-
|
166
|
-
|
167
|
-
|
168
|
-
|
169
|
-
|
170
|
-
|
171
|
-
|
172
|
-
|
173
|
-
|
174
|
-
|
175
|
-
|
176
|
-
|
177
|
-
|
178
|
-
|
179
|
-
|
180
|
-
|
181
|
-
|
182
|
-
|
183
|
-
|
184
|
-
|
185
|
-
|
186
|
-
|
187
|
-
|
188
|
-
|
189
|
-
|
190
|
-
|
191
|
-
|
192
|
-
|
193
|
-
|
194
|
-
|
195
|
-
ch.on_request('exit-signal') do |ch, data|
|
196
|
-
err_buf += "Process was terminated with signal #{data.read_string}.\r\n"
|
197
|
-
ch.close
|
198
|
-
end
|
199
|
-
|
200
|
-
ch.on_extended_data do |ch2, type, data|
|
201
|
-
err_buf += data
|
202
|
-
end
|
203
|
-
end
|
162
|
+
def system_ssh_loop
|
163
|
+
in_read, in_write = IO.pipe
|
164
|
+
out_read, out_write = IO.pipe
|
165
|
+
err_read, err_write = IO.pipe
|
166
|
+
|
167
|
+
# Force the script runner to initialize its logger
|
168
|
+
script_runner.logger
|
169
|
+
pid = spawn(*script_runner.send(:get_args, command), :in => in_read, :out => out_write, :err => err_write)
|
170
|
+
[in_read, out_write, err_write].each(&:close)
|
171
|
+
|
172
|
+
send_start
|
173
|
+
# Not SSL buffer, but the interface kinda matches
|
174
|
+
out_buf = MiniSSLBufferedSocket.new(out_read)
|
175
|
+
err_buf = MiniSSLBufferedSocket.new(err_read)
|
176
|
+
in_buf = MiniSSLBufferedSocket.new(in_write)
|
177
|
+
|
178
|
+
inner_system_ssh_loop out_buf, err_buf, in_buf, pid
|
179
|
+
end
|
180
|
+
|
181
|
+
def inner_system_ssh_loop(out_buf, err_buf, in_buf, pid)
|
182
|
+
err_buf_raw = ''
|
183
|
+
readers = [buf_socket, out_buf, err_buf]
|
184
|
+
loop do
|
185
|
+
# Prime the sockets for reading
|
186
|
+
ready_readers, ready_writers = IO.select(readers, [buf_socket, in_buf], nil, 300)
|
187
|
+
(ready_readers || []).each { |reader| reader.close if reader.fill.zero? }
|
188
|
+
|
189
|
+
proxy_data(out_buf, in_buf)
|
190
|
+
if buf_socket.closed?
|
191
|
+
script_runner.close_session
|
204
192
|
end
|
205
193
|
|
206
|
-
|
207
|
-
|
194
|
+
if out_buf.closed?
|
195
|
+
code = Process.wait2(pid).last.exitstatus
|
196
|
+
send_start if code.zero? # TODO: Why?
|
197
|
+
err_buf_raw += "Process exited with code #{code}.\r\n"
|
198
|
+
break
|
199
|
+
end
|
200
|
+
|
201
|
+
if err_buf.available.positive?
|
202
|
+
err_buf_raw += err_buf.read_available
|
203
|
+
end
|
204
|
+
|
205
|
+
flush_pending_writes(ready_writers || [])
|
206
|
+
end
|
207
|
+
rescue # rubocop:disable Style/RescueStandardError
|
208
|
+
send_error(400, err_buf_raw) unless @started
|
209
|
+
ensure
|
210
|
+
[out_buf, err_buf, in_buf].each(&:close)
|
211
|
+
end
|
212
|
+
|
213
|
+
def proxy_data(out_buf, in_buf)
|
214
|
+
{ out_buf => buf_socket, buf_socket => in_buf }.each do |src, dst|
|
215
|
+
dst.enqueue(src.read_available) if src.available.positive?
|
216
|
+
dst.close if src.closed?
|
217
|
+
end
|
218
|
+
end
|
219
|
+
|
220
|
+
def flush_pending_writes(writers)
|
221
|
+
writers.each do |writer|
|
222
|
+
writer.respond_to?(:send_pending) ? writer.send_pending : writer.flush
|
208
223
|
end
|
209
224
|
end
|
210
225
|
|
@@ -215,6 +230,7 @@ module Proxy::RemoteExecution
|
|
215
230
|
buf_socket.enqueue("Connection: upgrade\r\n")
|
216
231
|
buf_socket.enqueue("Upgrade: raw\r\n")
|
217
232
|
buf_socket.enqueue("\r\n")
|
233
|
+
buf_socket.send_pending
|
218
234
|
end
|
219
235
|
end
|
220
236
|
|
@@ -223,6 +239,7 @@ module Proxy::RemoteExecution
|
|
223
239
|
buf_socket.enqueue("Connection: close\r\n")
|
224
240
|
buf_socket.enqueue("\r\n")
|
225
241
|
buf_socket.enqueue(msg)
|
242
|
+
buf_socket.send_pending
|
226
243
|
end
|
227
244
|
|
228
245
|
def params
|
@@ -234,34 +251,33 @@ module Proxy::RemoteExecution
|
|
234
251
|
end
|
235
252
|
|
236
253
|
def buf_socket
|
237
|
-
@
|
254
|
+
@buf_socket ||= BufferedSocket.build(@socket)
|
238
255
|
end
|
239
256
|
|
240
257
|
def command
|
241
258
|
params["command"]
|
242
259
|
end
|
243
260
|
|
244
|
-
def ssh_user
|
245
|
-
params["ssh_user"]
|
246
|
-
end
|
247
|
-
|
248
261
|
def host
|
249
262
|
params["hostname"]
|
250
263
|
end
|
251
264
|
|
252
|
-
def
|
253
|
-
|
254
|
-
|
255
|
-
|
256
|
-
|
257
|
-
|
258
|
-
|
259
|
-
|
260
|
-
ret
|
261
|
-
ret[:
|
262
|
-
ret[:
|
263
|
-
ret[:
|
264
|
-
ret[:
|
265
|
+
def script_runner
|
266
|
+
@script_runner ||= Proxy::RemoteExecution::Ssh::Runners::ScriptRunner.build(
|
267
|
+
runner_params,
|
268
|
+
suspended_action: nil
|
269
|
+
)
|
270
|
+
end
|
271
|
+
|
272
|
+
def runner_params
|
273
|
+
ret = { secrets: {} }
|
274
|
+
ret[:secrets][:ssh_password] = params["ssh_password"] if params["ssh_password"]
|
275
|
+
ret[:secrets][:key_passphrase] = params["ssh_key_passphrase"] if params["ssh_key_passphrase"]
|
276
|
+
ret[:ssh_port] = params["ssh_port"] if params["ssh_port"]
|
277
|
+
ret[:ssh_user] = params["ssh_user"]
|
278
|
+
# For compatibility only
|
279
|
+
ret[:script] = nil
|
280
|
+
ret[:hostname] = host
|
265
281
|
ret
|
266
282
|
end
|
267
283
|
end
|
@@ -0,0 +1,10 @@
|
|
1
|
+
require 'smart_proxy_dynflow/runner/dispatcher'
|
2
|
+
|
3
|
+
module Proxy::RemoteExecution::Ssh
|
4
|
+
class Dispatcher < ::Proxy::Dynflow::Runner::Dispatcher
|
5
|
+
def refresh_interval
|
6
|
+
@refresh_interval ||= Plugin.settings[:runner_refresh_interval] ||
|
7
|
+
Plugin.runner_class::DEFAULT_REFRESH_INTERVAL
|
8
|
+
end
|
9
|
+
end
|
10
|
+
end
|
@@ -0,0 +1,51 @@
|
|
1
|
+
# lib/job_storage.rb
|
2
|
+
require 'sequel'
|
3
|
+
|
4
|
+
module Proxy::RemoteExecution::Ssh
|
5
|
+
class JobStorage
|
6
|
+
def initialize
|
7
|
+
@db = Sequel.sqlite
|
8
|
+
@db.create_table :jobs do
|
9
|
+
DateTime :timestamp, null: false, default: Sequel::CURRENT_TIMESTAMP
|
10
|
+
String :uuid, fixed: true, size: 36, primary_key: true, null: false
|
11
|
+
String :hostname, null: false, index: true
|
12
|
+
String :execution_plan_uuid, fixed: true, size: 36, null: false, index: true
|
13
|
+
Integer :run_step_id, null: false
|
14
|
+
String :job, text: true
|
15
|
+
end
|
16
|
+
end
|
17
|
+
|
18
|
+
def find_job(uuid)
|
19
|
+
jobs.where(uuid: uuid).first
|
20
|
+
end
|
21
|
+
|
22
|
+
def job_uuids_for_host(hostname)
|
23
|
+
jobs_for_host(hostname).order(:timestamp)
|
24
|
+
.select_map(:uuid)
|
25
|
+
end
|
26
|
+
|
27
|
+
def store_job(hostname, execution_plan_uuid, run_step_id, job, uuid: SecureRandom.uuid, timestamp: Time.now.utc)
|
28
|
+
jobs.insert(timestamp: timestamp,
|
29
|
+
uuid: uuid,
|
30
|
+
hostname: hostname,
|
31
|
+
execution_plan_uuid: execution_plan_uuid,
|
32
|
+
run_step_id: run_step_id,
|
33
|
+
job: job)
|
34
|
+
uuid
|
35
|
+
end
|
36
|
+
|
37
|
+
def drop_job(execution_plan_uuid, run_step_id)
|
38
|
+
jobs.where(execution_plan_uuid: execution_plan_uuid, run_step_id: run_step_id).delete
|
39
|
+
end
|
40
|
+
|
41
|
+
private
|
42
|
+
|
43
|
+
def jobs_for_host(hostname)
|
44
|
+
jobs.where(hostname: hostname)
|
45
|
+
end
|
46
|
+
|
47
|
+
def jobs
|
48
|
+
@db[:jobs]
|
49
|
+
end
|
50
|
+
end
|
51
|
+
end
|
@@ -0,0 +1,14 @@
|
|
1
|
+
module Proxy::RemoteExecution::Ssh
|
2
|
+
class LogFilter < ::Logger
|
3
|
+
def initialize(base_logger)
|
4
|
+
@base_logger = base_logger
|
5
|
+
end
|
6
|
+
|
7
|
+
def add(severity, *args, &block)
|
8
|
+
severity ||= ::Logger::UNKNOWN
|
9
|
+
return true if @base_logger.nil? || severity < @level
|
10
|
+
|
11
|
+
@base_logger.add(severity, *args, &block)
|
12
|
+
end
|
13
|
+
end
|
14
|
+
end
|
@@ -0,0 +1,228 @@
|
|
1
|
+
module Proxy::RemoteExecution
|
2
|
+
module NetSSHCompat
|
3
|
+
class Buffer
|
4
|
+
# exposes the raw content of the buffer
|
5
|
+
attr_reader :content
|
6
|
+
|
7
|
+
# the current position of the pointer in the buffer
|
8
|
+
attr_accessor :position
|
9
|
+
|
10
|
+
# Creates a new buffer, initialized to the given content. The position
|
11
|
+
# is initialized to the beginning of the buffer.
|
12
|
+
def initialize(content = +'')
|
13
|
+
@content = content.to_s
|
14
|
+
@position = 0
|
15
|
+
end
|
16
|
+
|
17
|
+
# Returns the length of the buffer's content.
|
18
|
+
def length
|
19
|
+
@content.length
|
20
|
+
end
|
21
|
+
|
22
|
+
# Returns the number of bytes available to be read (e.g., how many bytes
|
23
|
+
# remain between the current position and the end of the buffer).
|
24
|
+
def available
|
25
|
+
length - position
|
26
|
+
end
|
27
|
+
|
28
|
+
# Returns a copy of the buffer's content.
|
29
|
+
def to_s
|
30
|
+
(@content || "").dup
|
31
|
+
end
|
32
|
+
|
33
|
+
# Returns +true+ if the buffer contains no data (e.g., it is of zero length).
|
34
|
+
def empty?
|
35
|
+
@content.empty?
|
36
|
+
end
|
37
|
+
|
38
|
+
# Resets the pointer to the start of the buffer. Subsequent reads will
|
39
|
+
# begin at position 0.
|
40
|
+
def reset!
|
41
|
+
@position = 0
|
42
|
+
end
|
43
|
+
|
44
|
+
# Returns true if the pointer is at the end of the buffer. Subsequent
|
45
|
+
# reads will return nil, in this case.
|
46
|
+
def eof?
|
47
|
+
@position >= length
|
48
|
+
end
|
49
|
+
|
50
|
+
# Resets the buffer, making it empty. Also, resets the read position to
|
51
|
+
# 0.
|
52
|
+
def clear!
|
53
|
+
@content = +''
|
54
|
+
@position = 0
|
55
|
+
end
|
56
|
+
|
57
|
+
# Consumes n bytes from the buffer, where n is the current position
|
58
|
+
# unless otherwise specified. This is useful for removing data from the
|
59
|
+
# buffer that has previously been read, when you are expecting more data
|
60
|
+
# to be appended. It helps to keep the size of buffers down when they
|
61
|
+
# would otherwise tend to grow without bound.
|
62
|
+
#
|
63
|
+
# Returns the buffer object itself.
|
64
|
+
def consume!(count = position)
|
65
|
+
if count >= length
|
66
|
+
# OPTIMIZE: a fairly common case
|
67
|
+
clear!
|
68
|
+
elsif count.positive?
|
69
|
+
@content = @content[count..-1] || +''
|
70
|
+
@position -= count
|
71
|
+
@position = 0 if @position.negative?
|
72
|
+
end
|
73
|
+
self
|
74
|
+
end
|
75
|
+
|
76
|
+
# Appends the given text to the end of the buffer. Does not alter the
|
77
|
+
# read position. Returns the buffer object itself.
|
78
|
+
def append(text)
|
79
|
+
@content << text
|
80
|
+
self
|
81
|
+
end
|
82
|
+
|
83
|
+
# Reads and returns the next +count+ bytes from the buffer, starting from
|
84
|
+
# the read position. If +count+ is +nil+, this will return all remaining
|
85
|
+
# text in the buffer. This method will increment the pointer.
|
86
|
+
def read(count = nil)
|
87
|
+
count ||= length
|
88
|
+
count = length - @position if @position + count > length
|
89
|
+
@position += count
|
90
|
+
@content[@position - count, count]
|
91
|
+
end
|
92
|
+
|
93
|
+
# Writes the given data literally into the string. Does not alter the
|
94
|
+
# read position. Returns the buffer object.
|
95
|
+
def write(*data)
|
96
|
+
data.each { |datum| @content << datum.dup.force_encoding('BINARY') }
|
97
|
+
self
|
98
|
+
end
|
99
|
+
end
|
100
|
+
|
101
|
+
module BufferedIO
|
102
|
+
# This module is used to extend sockets and other IO objects, to allow
|
103
|
+
# them to be buffered for both read and write. This abstraction makes it
|
104
|
+
# quite easy to write a select-based event loop
|
105
|
+
# (see Net::SSH::Connection::Session#listen_to).
|
106
|
+
#
|
107
|
+
# The general idea is that instead of calling #read directly on an IO that
|
108
|
+
# has been extended with this module, you call #fill (to add pending input
|
109
|
+
# to the internal read buffer), and then #read_available (to read from that
|
110
|
+
# buffer). Likewise, you don't call #write directly, you call #enqueue to
|
111
|
+
# add data to the write buffer, and then #send_pending or #wait_for_pending_sends
|
112
|
+
# to actually send the data across the wire.
|
113
|
+
#
|
114
|
+
# In this way you can easily use the object as an argument to IO.select,
|
115
|
+
# calling #fill when it is available for read, or #send_pending when it is
|
116
|
+
# available for write, and then call #enqueue and #read_available during
|
117
|
+
# the idle times.
|
118
|
+
#
|
119
|
+
# socket = TCPSocket.new(address, port)
|
120
|
+
# socket.extend(Net::SSH::BufferedIo)
|
121
|
+
#
|
122
|
+
# ssh.listen_to(socket)
|
123
|
+
#
|
124
|
+
# ssh.loop do
|
125
|
+
# if socket.available > 0
|
126
|
+
# puts socket.read_available
|
127
|
+
# socket.enqueue("response\n")
|
128
|
+
# end
|
129
|
+
# end
|
130
|
+
#
|
131
|
+
# Note that this module must be used to extend an instance, and should not
|
132
|
+
# be included in a class. If you do want to use it via an include, then you
|
133
|
+
# must make sure to invoke the private #initialize_buffered_io method in
|
134
|
+
# your class' #initialize method:
|
135
|
+
#
|
136
|
+
# class Foo < IO
|
137
|
+
# include Net::SSH::BufferedIo
|
138
|
+
#
|
139
|
+
# def initialize
|
140
|
+
# initialize_buffered_io
|
141
|
+
# # ...
|
142
|
+
# end
|
143
|
+
# end
|
144
|
+
|
145
|
+
# Tries to read up to +n+ bytes of data from the remote end, and appends
|
146
|
+
# the data to the input buffer. It returns the number of bytes read, or 0
|
147
|
+
# if no data was available to be read.
|
148
|
+
def fill(count = 8192)
|
149
|
+
input.consume!
|
150
|
+
data = recv(count)
|
151
|
+
input.append(data)
|
152
|
+
return data.length
|
153
|
+
rescue EOFError => e
|
154
|
+
@input_errors << e
|
155
|
+
return 0
|
156
|
+
end
|
157
|
+
|
158
|
+
# Read up to +length+ bytes from the input buffer. If +length+ is nil,
|
159
|
+
# all available data is read from the buffer. (See #available.)
|
160
|
+
def read_available(length = nil)
|
161
|
+
input.read(length || available)
|
162
|
+
end
|
163
|
+
|
164
|
+
# Returns the number of bytes available to be read from the input buffer.
|
165
|
+
# (See #read_available.)
|
166
|
+
def available
|
167
|
+
input.available
|
168
|
+
end
|
169
|
+
|
170
|
+
# Enqueues data in the output buffer, to be written when #send_pending
|
171
|
+
# is called. Note that the data is _not_ sent immediately by this method!
|
172
|
+
def enqueue(data)
|
173
|
+
output.append(data)
|
174
|
+
end
|
175
|
+
|
176
|
+
# Sends as much of the pending output as possible. Returns +true+ if any
|
177
|
+
# data was sent, and +false+ otherwise.
|
178
|
+
def send_pending
|
179
|
+
if output.length.positive?
|
180
|
+
sent = send(output.to_s, 0)
|
181
|
+
output.consume!(sent)
|
182
|
+
return sent.positive?
|
183
|
+
else
|
184
|
+
return false
|
185
|
+
end
|
186
|
+
end
|
187
|
+
|
188
|
+
# Calls #send_pending repeatedly, if necessary, blocking until the output
|
189
|
+
# buffer is empty.
|
190
|
+
def wait_for_pending_sends
|
191
|
+
send_pending
|
192
|
+
while output.length.positive?
|
193
|
+
result = IO.select(nil, [self]) || next
|
194
|
+
next unless result[1].any?
|
195
|
+
|
196
|
+
send_pending
|
197
|
+
end
|
198
|
+
end
|
199
|
+
|
200
|
+
private
|
201
|
+
|
202
|
+
#--
|
203
|
+
# Can't use attr_reader here (after +private+) without incurring the
|
204
|
+
# wrath of "ruby -w". We hates it.
|
205
|
+
#++
|
206
|
+
|
207
|
+
def input
|
208
|
+
@input
|
209
|
+
end
|
210
|
+
|
211
|
+
def output
|
212
|
+
@output
|
213
|
+
end
|
214
|
+
|
215
|
+
# Initializes the intput and output buffers for this object. This method
|
216
|
+
# is called automatically when the module is mixed into an object via
|
217
|
+
# Object#extend (see Net::SSH::BufferedIo.extended), but must be called
|
218
|
+
# explicitly in the +initialize+ method of any class that uses
|
219
|
+
# Module#include to add this module.
|
220
|
+
def initialize_buffered_io
|
221
|
+
@input = Buffer.new
|
222
|
+
@input_errors = []
|
223
|
+
@output = Buffer.new
|
224
|
+
@output_errors = []
|
225
|
+
end
|
226
|
+
end
|
227
|
+
end
|
228
|
+
end
|
@@ -1,5 +1,8 @@
|
|
1
1
|
module Proxy::RemoteExecution::Ssh
|
2
2
|
class Plugin < Proxy::Plugin
|
3
|
+
SSH_LOG_LEVELS = %w[debug info error fatal].freeze
|
4
|
+
MODES = %i[ssh async-ssh pull pull-mqtt].freeze
|
5
|
+
|
3
6
|
http_rackup_path File.expand_path("http_config.ru", File.expand_path("../", __FILE__))
|
4
7
|
https_rackup_path File.expand_path("http_config.ru", File.expand_path("../", __FILE__))
|
5
8
|
|
@@ -9,7 +12,13 @@ module Proxy::RemoteExecution::Ssh
|
|
9
12
|
:remote_working_dir => '/var/tmp',
|
10
13
|
:local_working_dir => '/var/tmp',
|
11
14
|
:kerberos_auth => false,
|
12
|
-
|
15
|
+
# When set to nil, makes REX use the runner's default interval
|
16
|
+
# :runner_refresh_interval => nil,
|
17
|
+
:ssh_log_level => :fatal,
|
18
|
+
:cleanup_working_dirs => true,
|
19
|
+
# :mqtt_broker => nil,
|
20
|
+
# :mqtt_port => nil,
|
21
|
+
:mode => :ssh
|
13
22
|
|
14
23
|
plugin :ssh, Proxy::RemoteExecution::Ssh::VERSION
|
15
24
|
after_activation do
|
@@ -17,17 +26,30 @@ module Proxy::RemoteExecution::Ssh
|
|
17
26
|
require 'smart_proxy_remote_execution_ssh/version'
|
18
27
|
require 'smart_proxy_remote_execution_ssh/cockpit'
|
19
28
|
require 'smart_proxy_remote_execution_ssh/api'
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
# Dynflow core is not available in the proxy, will be handled
|
27
|
-
# by standalone Dynflow core
|
28
|
-
end
|
29
|
+
require 'smart_proxy_remote_execution_ssh/actions'
|
30
|
+
require 'smart_proxy_remote_execution_ssh/dispatcher'
|
31
|
+
require 'smart_proxy_remote_execution_ssh/log_filter'
|
32
|
+
require 'smart_proxy_remote_execution_ssh/runners'
|
33
|
+
require 'smart_proxy_remote_execution_ssh/utils'
|
34
|
+
require 'smart_proxy_remote_execution_ssh/job_storage'
|
29
35
|
|
30
36
|
Proxy::RemoteExecution::Ssh.validate!
|
37
|
+
|
38
|
+
Proxy::Dynflow::TaskLauncherRegistry.register('ssh', Proxy::Dynflow::TaskLauncher::Batch)
|
39
|
+
end
|
40
|
+
|
41
|
+
def self.simulate?
|
42
|
+
@simulate ||= %w[yes true 1].include? ENV.fetch('REX_SIMULATE', '').downcase
|
43
|
+
end
|
44
|
+
|
45
|
+
def self.runner_class
|
46
|
+
@runner_class ||= if simulate?
|
47
|
+
Runners::FakeScriptRunner
|
48
|
+
elsif settings.mode == :'ssh-async'
|
49
|
+
Runners::PollingScriptRunner
|
50
|
+
else
|
51
|
+
Runners::ScriptRunner
|
52
|
+
end
|
31
53
|
end
|
32
54
|
end
|
33
55
|
end
|