openbolt 5.0.0.rc1 → 5.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/Puppetfile +18 -12
- data/bolt-modules/boltlib/lib/puppet/functions/apply_prep.rb +5 -3
- data/bolt-modules/boltlib/lib/puppet/functions/download_file.rb +1 -0
- data/bolt-modules/boltlib/lib/puppet/functions/get_resources.rb +2 -0
- data/bolt-modules/boltlib/lib/puppet/functions/puppetdb_command.rb +1 -1
- data/bolt-modules/boltlib/lib/puppet/functions/run_container.rb +2 -2
- data/bolt-modules/boltlib/lib/puppet/functions/run_script.rb +1 -0
- data/bolt-modules/boltlib/lib/puppet/functions/set_resources.rb +2 -2
- data/bolt-modules/boltlib/lib/puppet/functions/upload_file.rb +1 -0
- data/bolt-modules/boltlib/lib/puppet/functions/wait.rb +1 -1
- data/bolt-modules/boltlib/lib/puppet/functions/wait_until_available.rb +1 -0
- data/bolt-modules/ctrl/lib/puppet/functions/ctrl/do_until.rb +1 -0
- data/lib/bolt/analytics.rb +1 -1
- data/lib/bolt/application.rb +17 -22
- data/lib/bolt/applicator.rb +4 -0
- data/lib/bolt/bolt_option_parser.rb +10 -8
- data/lib/bolt/cli.rb +7 -6
- data/lib/bolt/config/options.rb +59 -67
- data/lib/bolt/config/transport/base.rb +1 -0
- data/lib/bolt/config/transport/options.rb +59 -59
- data/lib/bolt/config.rb +8 -6
- data/lib/bolt/executor.rb +9 -24
- data/lib/bolt/fiber_executor.rb +3 -1
- data/lib/bolt/inventory/group.rb +3 -0
- data/lib/bolt/inventory/inventory.rb +2 -0
- data/lib/bolt/inventory/options.rb +7 -7
- data/lib/bolt/inventory/target.rb +3 -2
- data/lib/bolt/inventory.rb +1 -0
- data/lib/bolt/logger.rb +2 -0
- data/lib/bolt/module.rb +1 -0
- data/lib/bolt/module_installer/puppetfile.rb +4 -4
- data/lib/bolt/module_installer/resolver.rb +2 -2
- data/lib/bolt/module_installer/specs/forge_spec.rb +4 -4
- data/lib/bolt/module_installer/specs/git_spec.rb +6 -6
- data/lib/bolt/module_installer/specs/id/gitclone.rb +1 -0
- data/lib/bolt/module_installer/specs/id/github.rb +2 -1
- data/lib/bolt/module_installer/specs/id/gitlab.rb +2 -1
- data/lib/bolt/module_installer.rb +3 -1
- data/lib/bolt/outputter/human.rb +9 -4
- data/lib/bolt/outputter/rainbow.rb +1 -0
- data/lib/bolt/pal/yaml_plan/parameter.rb +2 -2
- data/lib/bolt/pal/yaml_plan/step/resources.rb +1 -1
- data/lib/bolt/pal.rb +7 -4
- data/lib/bolt/plan_creator.rb +5 -4
- data/lib/bolt/plugin/cache.rb +2 -0
- data/lib/bolt/plugin/module.rb +7 -0
- data/lib/bolt/plugin/puppet_connect_data.rb +1 -0
- data/lib/bolt/plugin/task.rb +3 -0
- data/lib/bolt/plugin.rb +4 -0
- data/lib/bolt/project.rb +3 -3
- data/lib/bolt/project_manager/config_migrator.rb +3 -3
- data/lib/bolt/project_manager/inventory_migrator.rb +1 -1
- data/lib/bolt/project_manager/module_migrator.rb +7 -6
- data/lib/bolt/project_manager.rb +11 -11
- data/lib/bolt/puppetdb/config.rb +4 -0
- data/lib/bolt/puppetdb/instance.rb +1 -0
- data/lib/bolt/rerun.rb +1 -0
- data/lib/bolt/resource_instance.rb +1 -1
- data/lib/bolt/result.rb +2 -1
- data/lib/bolt/shell/bash.rb +2 -1
- data/lib/bolt/shell/powershell.rb +4 -3
- data/lib/bolt/shell.rb +1 -1
- data/lib/bolt/task/run.rb +1 -0
- data/lib/bolt/task.rb +3 -0
- data/lib/bolt/transport/docker/connection.rb +2 -0
- data/lib/bolt/transport/jail/connection.rb +2 -0
- data/lib/bolt/transport/lxd/connection.rb +2 -0
- data/lib/bolt/transport/lxd.rb +1 -1
- data/lib/bolt/transport/podman/connection.rb +2 -0
- data/lib/bolt/transport/remote.rb +1 -0
- data/lib/bolt/transport/ssh/connection.rb +1 -1
- data/lib/bolt/transport/winrm/connection.rb +4 -3
- data/lib/bolt/util/format.rb +1 -0
- data/lib/bolt/util.rb +7 -4
- data/lib/bolt/validator.rb +1 -1
- data/lib/bolt/version.rb +1 -1
- data/lib/bolt_spec/plans/action_stubs.rb +5 -0
- data/lib/bolt_spec/plans/mock_executor.rb +2 -4
- data/libexec/apply_catalog.rb +2 -1
- data/libexec/custom_facts.rb +1 -1
- data/libexec/query_resources.rb +1 -1
- metadata +63 -77
- data/lib/bolt/config/transport/orch.rb +0 -41
- data/lib/bolt/transport/orch/connection.rb +0 -111
- data/lib/bolt/transport/orch.rb +0 -271
- data/lib/bolt_server/acl.rb +0 -39
- data/lib/bolt_server/base_config.rb +0 -112
- data/lib/bolt_server/config.rb +0 -64
- data/lib/bolt_server/file_cache.rb +0 -200
- data/lib/bolt_server/request_error.rb +0 -11
- data/lib/bolt_server/schemas/action-check_node_connections.json +0 -14
- data/lib/bolt_server/schemas/action-run_command.json +0 -12
- data/lib/bolt_server/schemas/action-run_script.json +0 -47
- data/lib/bolt_server/schemas/action-run_task.json +0 -20
- data/lib/bolt_server/schemas/action-upload_file.json +0 -47
- data/lib/bolt_server/schemas/partials/target-any.json +0 -10
- data/lib/bolt_server/schemas/partials/target-ssh.json +0 -88
- data/lib/bolt_server/schemas/partials/target-winrm.json +0 -67
- data/lib/bolt_server/schemas/partials/task.json +0 -94
- data/lib/bolt_server/schemas/transport-ssh.json +0 -25
- data/lib/bolt_server/schemas/transport-winrm.json +0 -19
- data/lib/bolt_server/transport_app.rb +0 -554
data/lib/bolt/transport/orch.rb
DELETED
|
@@ -1,271 +0,0 @@
|
|
|
1
|
-
# frozen_string_literal: true
|
|
2
|
-
|
|
3
|
-
require 'base64'
|
|
4
|
-
require 'find'
|
|
5
|
-
require 'json'
|
|
6
|
-
require 'pathname'
|
|
7
|
-
require_relative '../../bolt/transport/base'
|
|
8
|
-
require_relative 'orch/connection'
|
|
9
|
-
|
|
10
|
-
module Bolt
|
|
11
|
-
module Transport
|
|
12
|
-
class Orch < Base
|
|
13
|
-
BOLT_COMMAND_TASK = Struct.new(:name).new('bolt_shim::command').freeze
|
|
14
|
-
BOLT_SCRIPT_TASK = Struct.new(:name).new('bolt_shim::script').freeze
|
|
15
|
-
BOLT_UPLOAD_TASK = Struct.new(:name).new('bolt_shim::upload').freeze
|
|
16
|
-
|
|
17
|
-
attr_writer :plan_context
|
|
18
|
-
|
|
19
|
-
def provided_features
|
|
20
|
-
['puppet-agent']
|
|
21
|
-
end
|
|
22
|
-
|
|
23
|
-
def initialize(*args)
|
|
24
|
-
# lazy-load expensive gem code
|
|
25
|
-
require 'orchestrator_client'
|
|
26
|
-
|
|
27
|
-
@connections = {}
|
|
28
|
-
super
|
|
29
|
-
end
|
|
30
|
-
|
|
31
|
-
def finish_plan(result)
|
|
32
|
-
if result.is_a? Bolt::PlanResult
|
|
33
|
-
@connections.each_value do |conn|
|
|
34
|
-
conn.finish_plan(result)
|
|
35
|
-
rescue StandardError => e
|
|
36
|
-
@logger.trace("Failed to finish plan on #{conn.key}: #{e.message}")
|
|
37
|
-
end
|
|
38
|
-
end
|
|
39
|
-
end
|
|
40
|
-
|
|
41
|
-
# It's safe to create connections here for now because the
|
|
42
|
-
# batches/threads are per connection.
|
|
43
|
-
def get_connection(conn_opts)
|
|
44
|
-
key = Connection.get_key(conn_opts)
|
|
45
|
-
unless (conn = @connections[key])
|
|
46
|
-
conn = @connections[key] = Connection.new(conn_opts, @plan_context, logger)
|
|
47
|
-
end
|
|
48
|
-
conn
|
|
49
|
-
end
|
|
50
|
-
|
|
51
|
-
def process_run_results(targets, results, task_name, position = [])
|
|
52
|
-
targets_by_name = Hash[targets.map { |t| t.host || t.name }.zip(targets)]
|
|
53
|
-
results.map do |node_result|
|
|
54
|
-
target = targets_by_name[node_result['name']]
|
|
55
|
-
state = node_result['state']
|
|
56
|
-
result = node_result['result']
|
|
57
|
-
|
|
58
|
-
# If it's finished or already has a proper error simply pass it to the
|
|
59
|
-
# the result otherwise make sure an error is generated
|
|
60
|
-
if state == 'finished' || (result && result['_error'])
|
|
61
|
-
if result['_error']
|
|
62
|
-
unless result['_error'].is_a?(Hash)
|
|
63
|
-
result['_error'] = { 'kind' => 'puppetlabs.tasks/task-error',
|
|
64
|
-
'issue_code' => 'TASK_ERROR',
|
|
65
|
-
'msg' => result['_error'],
|
|
66
|
-
'details' => {} }
|
|
67
|
-
end
|
|
68
|
-
|
|
69
|
-
result['_error']['details'] ||= {}
|
|
70
|
-
unless result['_error']['details'].is_a?(Hash)
|
|
71
|
-
deets = result['_error']['details']
|
|
72
|
-
result['_error']['details'] = { 'msg' => deets }
|
|
73
|
-
end
|
|
74
|
-
file_line = %w[file line].zip(position).to_h.compact
|
|
75
|
-
result['_error']['details'].merge!(file_line) unless result['_error']['details']['file']
|
|
76
|
-
end
|
|
77
|
-
|
|
78
|
-
Bolt::Result.new(target, value: result, action: 'task', object: task_name)
|
|
79
|
-
elsif state == 'skipped'
|
|
80
|
-
details = %w[file line].zip(position).to_h.compact
|
|
81
|
-
Bolt::Result.new(
|
|
82
|
-
target,
|
|
83
|
-
value: { '_error' => {
|
|
84
|
-
'kind' => 'puppetlabs.tasks/skipped-node',
|
|
85
|
-
'msg' => "Target #{target.safe_name} was skipped",
|
|
86
|
-
'details' => details
|
|
87
|
-
} },
|
|
88
|
-
action: 'task', object: task_name
|
|
89
|
-
)
|
|
90
|
-
else
|
|
91
|
-
# Make a generic error with a unkown exit_code
|
|
92
|
-
Bolt::Result.for_task(target, result.to_json, '', 'unknown', task_name, position)
|
|
93
|
-
end
|
|
94
|
-
end
|
|
95
|
-
end
|
|
96
|
-
|
|
97
|
-
def batch_command(targets, command, options = {}, position = [], &callback)
|
|
98
|
-
if options[:env_vars] && !options[:env_vars].empty?
|
|
99
|
-
raise NotImplementedError, "pcp transport does not support setting environment variables"
|
|
100
|
-
end
|
|
101
|
-
|
|
102
|
-
params = {
|
|
103
|
-
'command' => command
|
|
104
|
-
}
|
|
105
|
-
results = run_task_job(targets,
|
|
106
|
-
BOLT_COMMAND_TASK,
|
|
107
|
-
params,
|
|
108
|
-
options,
|
|
109
|
-
position,
|
|
110
|
-
&callback)
|
|
111
|
-
callback ||= proc {}
|
|
112
|
-
results.map! { |result| unwrap_bolt_result(result.target, result, 'command', command) }
|
|
113
|
-
results.each do |result|
|
|
114
|
-
callback.call(type: :node_result, result: result)
|
|
115
|
-
end
|
|
116
|
-
end
|
|
117
|
-
|
|
118
|
-
def batch_script(targets, script, arguments, options = {}, position = [], &callback)
|
|
119
|
-
if options[:env_vars] && !options[:env_vars].empty?
|
|
120
|
-
raise NotImplementedError, "pcp transport does not support setting environment variables"
|
|
121
|
-
end
|
|
122
|
-
|
|
123
|
-
content = File.open(script, &:read)
|
|
124
|
-
content = Base64.encode64(content)
|
|
125
|
-
params = {
|
|
126
|
-
'content' => content,
|
|
127
|
-
'arguments' => arguments,
|
|
128
|
-
'name' => Pathname(script).basename.to_s
|
|
129
|
-
}
|
|
130
|
-
callback ||= proc {}
|
|
131
|
-
results = run_task_job(targets, BOLT_SCRIPT_TASK, params, options, position, &callback)
|
|
132
|
-
results.map! { |result| unwrap_bolt_result(result.target, result, 'script', script) }
|
|
133
|
-
results.each do |result|
|
|
134
|
-
callback.call(type: :node_result, result: result)
|
|
135
|
-
end
|
|
136
|
-
end
|
|
137
|
-
|
|
138
|
-
def pack(directory)
|
|
139
|
-
# lazy-load expensive gem code
|
|
140
|
-
require 'minitar'
|
|
141
|
-
require 'zlib'
|
|
142
|
-
|
|
143
|
-
start_time = Time.now
|
|
144
|
-
io = StringIO.new
|
|
145
|
-
output = Minitar::Output.new(Zlib::GzipWriter.new(io))
|
|
146
|
-
Find.find(directory) do |file|
|
|
147
|
-
next unless File.file?(file)
|
|
148
|
-
|
|
149
|
-
tar_path = Pathname.new(file).relative_path_from(Pathname.new(directory))
|
|
150
|
-
@logger.trace("Packing #{file} to #{tar_path}")
|
|
151
|
-
stat = File.stat(file)
|
|
152
|
-
content = File.binread(file)
|
|
153
|
-
output.tar.add_file_simple(
|
|
154
|
-
tar_path.to_s,
|
|
155
|
-
data: content,
|
|
156
|
-
size: content.size,
|
|
157
|
-
mode: stat.mode & 0o777,
|
|
158
|
-
mtime: stat.mtime
|
|
159
|
-
)
|
|
160
|
-
end
|
|
161
|
-
|
|
162
|
-
duration = Time.now - start_time
|
|
163
|
-
@logger.trace("Packed upload in #{duration * 1000} ms")
|
|
164
|
-
|
|
165
|
-
output.close
|
|
166
|
-
io.string
|
|
167
|
-
ensure
|
|
168
|
-
# Closes both tar and sgz.
|
|
169
|
-
output&.close
|
|
170
|
-
end
|
|
171
|
-
|
|
172
|
-
def batch_upload(targets, source, destination, options = {}, position = [], &callback)
|
|
173
|
-
stat = File.stat(source)
|
|
174
|
-
content = if stat.directory?
|
|
175
|
-
pack(source)
|
|
176
|
-
else
|
|
177
|
-
File.open(source, &:read)
|
|
178
|
-
end
|
|
179
|
-
content = Base64.encode64(content)
|
|
180
|
-
mode = File.stat(source).mode
|
|
181
|
-
params = {
|
|
182
|
-
'path' => destination,
|
|
183
|
-
'content' => content,
|
|
184
|
-
'mode' => mode,
|
|
185
|
-
'directory' => stat.directory?
|
|
186
|
-
}
|
|
187
|
-
callback ||= proc {}
|
|
188
|
-
results = run_task_job(targets, BOLT_UPLOAD_TASK, params, options, position, &callback)
|
|
189
|
-
results.map! do |result|
|
|
190
|
-
if result.error_hash
|
|
191
|
-
result
|
|
192
|
-
else
|
|
193
|
-
Bolt::Result.for_upload(result.target, source, destination)
|
|
194
|
-
end
|
|
195
|
-
end
|
|
196
|
-
results.each do |result|
|
|
197
|
-
callback&.call(type: :node_result, result: result)
|
|
198
|
-
end
|
|
199
|
-
end
|
|
200
|
-
|
|
201
|
-
def batch_download(targets, *_args)
|
|
202
|
-
error = {
|
|
203
|
-
'kind' => 'bolt/not-supported-error',
|
|
204
|
-
'msg' => 'pcp transport does not support downloading files',
|
|
205
|
-
'details' => {}
|
|
206
|
-
}
|
|
207
|
-
|
|
208
|
-
targets.map do |target|
|
|
209
|
-
Bolt::Result.new(target, error: error, action: 'download')
|
|
210
|
-
end
|
|
211
|
-
end
|
|
212
|
-
|
|
213
|
-
def batches(targets)
|
|
214
|
-
targets.group_by { |target| Connection.get_key(target.options) }.values
|
|
215
|
-
end
|
|
216
|
-
|
|
217
|
-
def run_task_job(targets, task, arguments, options, position)
|
|
218
|
-
targets.each do |target|
|
|
219
|
-
yield(type: :node_start, target: target) if block_given?
|
|
220
|
-
end
|
|
221
|
-
|
|
222
|
-
begin
|
|
223
|
-
# unpack any Sensitive data
|
|
224
|
-
arguments = unwrap_sensitive_args(arguments)
|
|
225
|
-
results = get_connection(targets.first.options).run_task(targets, task, arguments, options)
|
|
226
|
-
|
|
227
|
-
process_run_results(targets, results, task.name, position)
|
|
228
|
-
rescue OrchestratorClient::ApiError => e
|
|
229
|
-
targets.map do |target|
|
|
230
|
-
Bolt::Result.new(target, error: e.data)
|
|
231
|
-
end
|
|
232
|
-
rescue StandardError => e
|
|
233
|
-
targets.map do |target|
|
|
234
|
-
Bolt::Result.from_exception(target, e, action: 'task')
|
|
235
|
-
end
|
|
236
|
-
end
|
|
237
|
-
end
|
|
238
|
-
|
|
239
|
-
def batch_task(targets, task, arguments, options = {}, position = [], &callback)
|
|
240
|
-
callback ||= proc {}
|
|
241
|
-
results = run_task_job(targets, task, arguments, options, position, &callback)
|
|
242
|
-
results.each do |result|
|
|
243
|
-
callback.call(type: :node_result, result: result)
|
|
244
|
-
end
|
|
245
|
-
end
|
|
246
|
-
|
|
247
|
-
def batch_task_with(_targets, _task, _target_mapping, _options = {}, _position = [])
|
|
248
|
-
raise NotImplementedError, "pcp transport does not support run_task_with()"
|
|
249
|
-
end
|
|
250
|
-
|
|
251
|
-
def batch_connected?(targets)
|
|
252
|
-
resp = get_connection(targets.first.options).query_inventory(targets)
|
|
253
|
-
resp['items'].all? { |node| node['connected'] }
|
|
254
|
-
end
|
|
255
|
-
|
|
256
|
-
# run_task generates a result that makes sense for a generic task which
|
|
257
|
-
# needs to be unwrapped to extract stdout/stderr/exitcode.
|
|
258
|
-
#
|
|
259
|
-
def unwrap_bolt_result(target, result, action, obj)
|
|
260
|
-
if result.error_hash
|
|
261
|
-
# something went wrong return the failure
|
|
262
|
-
return result
|
|
263
|
-
end
|
|
264
|
-
|
|
265
|
-
# If we get here, there's no error so we don't need the file or line
|
|
266
|
-
# number
|
|
267
|
-
Bolt::Result.for_command(target, result.value, action, obj, [])
|
|
268
|
-
end
|
|
269
|
-
end
|
|
270
|
-
end
|
|
271
|
-
end
|
data/lib/bolt_server/acl.rb
DELETED
|
@@ -1,39 +0,0 @@
|
|
|
1
|
-
# frozen_string_literal: true
|
|
2
|
-
|
|
3
|
-
require 'rails/auth/rack'
|
|
4
|
-
|
|
5
|
-
module BoltServer
|
|
6
|
-
class ACL < Rails::Auth::ErrorPage::Middleware
|
|
7
|
-
class X509Matcher
|
|
8
|
-
def initialize(options)
|
|
9
|
-
@options = options.freeze
|
|
10
|
-
end
|
|
11
|
-
|
|
12
|
-
def match(env)
|
|
13
|
-
certificate = Rails::Auth::X509::Certificate.new(env['puma.peercert'])
|
|
14
|
-
# This can be extended fairly easily to search OpenSSL::X509::Certificate#extensions for subjectAltNames.
|
|
15
|
-
@options.all? { |name, value| certificate[name] == value }
|
|
16
|
-
end
|
|
17
|
-
end
|
|
18
|
-
|
|
19
|
-
def initialize(app, allowlist)
|
|
20
|
-
acls = []
|
|
21
|
-
allowlist.each do |entry|
|
|
22
|
-
acls << {
|
|
23
|
-
'resources' => [
|
|
24
|
-
{
|
|
25
|
-
'method' => 'ALL',
|
|
26
|
-
'path' => '/.*'
|
|
27
|
-
}
|
|
28
|
-
],
|
|
29
|
-
'allow_x509_subject' => {
|
|
30
|
-
'cn' => entry
|
|
31
|
-
}
|
|
32
|
-
}
|
|
33
|
-
end
|
|
34
|
-
acl = Rails::Auth::ACL.new(acls, matchers: { allow_x509_subject: X509Matcher })
|
|
35
|
-
mid = Rails::Auth::ACL::Middleware.new(app, acl: acl)
|
|
36
|
-
super(mid, page_body: 'Access denied')
|
|
37
|
-
end
|
|
38
|
-
end
|
|
39
|
-
end
|
|
@@ -1,112 +0,0 @@
|
|
|
1
|
-
# frozen_string_literal: true
|
|
2
|
-
|
|
3
|
-
require 'hocon'
|
|
4
|
-
require 'bolt/error'
|
|
5
|
-
|
|
6
|
-
module BoltServer
|
|
7
|
-
class BaseConfig
|
|
8
|
-
def config_keys
|
|
9
|
-
%w[host port ssl-cert ssl-key ssl-ca-cert
|
|
10
|
-
ssl-cipher-suites loglevel logfile allowlist
|
|
11
|
-
environments-codedir
|
|
12
|
-
environmentpath basemodulepath]
|
|
13
|
-
end
|
|
14
|
-
|
|
15
|
-
def env_keys
|
|
16
|
-
%w[ssl-cert ssl-key ssl-ca-cert loglevel]
|
|
17
|
-
end
|
|
18
|
-
|
|
19
|
-
def defaults
|
|
20
|
-
{ 'host' => '127.0.0.1',
|
|
21
|
-
'loglevel' => 'warn',
|
|
22
|
-
'ssl-cipher-suites' => %w[ECDHE-ECDSA-AES256-GCM-SHA384
|
|
23
|
-
ECDHE-RSA-AES256-GCM-SHA384
|
|
24
|
-
ECDHE-ECDSA-CHACHA20-POLY1305
|
|
25
|
-
ECDHE-RSA-CHACHA20-POLY1305
|
|
26
|
-
ECDHE-ECDSA-AES128-GCM-SHA256
|
|
27
|
-
ECDHE-RSA-AES128-GCM-SHA256
|
|
28
|
-
ECDHE-ECDSA-AES256-SHA384
|
|
29
|
-
ECDHE-RSA-AES256-SHA384
|
|
30
|
-
ECDHE-ECDSA-AES128-SHA256
|
|
31
|
-
ECDHE-RSA-AES128-SHA256] }
|
|
32
|
-
end
|
|
33
|
-
|
|
34
|
-
def ssl_keys
|
|
35
|
-
%w[ssl-cert ssl-key ssl-ca-cert]
|
|
36
|
-
end
|
|
37
|
-
|
|
38
|
-
def required_keys
|
|
39
|
-
ssl_keys
|
|
40
|
-
end
|
|
41
|
-
|
|
42
|
-
def service_name
|
|
43
|
-
raise "Method service_name must be defined in the service class"
|
|
44
|
-
end
|
|
45
|
-
|
|
46
|
-
def initialize(config = nil)
|
|
47
|
-
@data = defaults
|
|
48
|
-
@data = @data.merge(config.select { |key, _| config_keys.include?(key) }) if config
|
|
49
|
-
@config_path = nil
|
|
50
|
-
end
|
|
51
|
-
|
|
52
|
-
def load_file_config(path)
|
|
53
|
-
@config_path = path
|
|
54
|
-
begin
|
|
55
|
-
# This lets us get the actual config values without needing to
|
|
56
|
-
# know the service name
|
|
57
|
-
parsed_hocon = Hocon.load(path)[service_name]
|
|
58
|
-
rescue Hocon::ConfigError => e
|
|
59
|
-
raise "Hocon data in '#{path}' failed to load.\n Error: '#{e.message}'"
|
|
60
|
-
rescue Errno::EACCES
|
|
61
|
-
raise "Your user doesn't have permission to read #{path}"
|
|
62
|
-
end
|
|
63
|
-
|
|
64
|
-
raise "Could not find service config at #{path}" if parsed_hocon.nil?
|
|
65
|
-
|
|
66
|
-
parsed_hocon = parsed_hocon.select { |key, _| config_keys.include?(key) }
|
|
67
|
-
|
|
68
|
-
@data = @data.merge(parsed_hocon)
|
|
69
|
-
end
|
|
70
|
-
|
|
71
|
-
def load_env_config
|
|
72
|
-
raise "load_env_config should be defined in the service class"
|
|
73
|
-
end
|
|
74
|
-
|
|
75
|
-
def natural?(num)
|
|
76
|
-
num.is_a?(Integer) && num.positive?
|
|
77
|
-
end
|
|
78
|
-
|
|
79
|
-
def validate
|
|
80
|
-
required_keys.each do |k|
|
|
81
|
-
# Handled nested config
|
|
82
|
-
if k.is_a?(Array)
|
|
83
|
-
next unless @data.dig(*k).nil?
|
|
84
|
-
else
|
|
85
|
-
next unless @data[k].nil?
|
|
86
|
-
end
|
|
87
|
-
raise Bolt::ValidationError, "You must configure #{k} in #{@config_path}"
|
|
88
|
-
end
|
|
89
|
-
|
|
90
|
-
unless natural?(@data['port'])
|
|
91
|
-
raise Bolt::ValidationError, "Configured 'port' must be a valid integer greater than 0"
|
|
92
|
-
end
|
|
93
|
-
ssl_keys.each do |sk|
|
|
94
|
-
unless File.file?(@data[sk]) && File.readable?(@data[sk])
|
|
95
|
-
raise Bolt::ValidationError, "Configured #{sk} must be a valid filepath"
|
|
96
|
-
end
|
|
97
|
-
end
|
|
98
|
-
|
|
99
|
-
unless @data['ssl-cipher-suites'].is_a?(Array)
|
|
100
|
-
raise Bolt::ValidationError, "Configured 'ssl-cipher-suites' must be an array of cipher suite names"
|
|
101
|
-
end
|
|
102
|
-
|
|
103
|
-
unless @data['allowlist'].nil? || @data['allowlist'].is_a?(Array)
|
|
104
|
-
raise Bolt::ValidationError, "Configured 'allowlist' must be an array of names"
|
|
105
|
-
end
|
|
106
|
-
end
|
|
107
|
-
|
|
108
|
-
def [](key)
|
|
109
|
-
@data[key]
|
|
110
|
-
end
|
|
111
|
-
end
|
|
112
|
-
end
|
data/lib/bolt_server/config.rb
DELETED
|
@@ -1,64 +0,0 @@
|
|
|
1
|
-
# frozen_string_literal: true
|
|
2
|
-
|
|
3
|
-
require 'hocon'
|
|
4
|
-
require 'bolt_server/base_config'
|
|
5
|
-
require 'bolt/error'
|
|
6
|
-
|
|
7
|
-
module BoltServer
|
|
8
|
-
class Config < BoltServer::BaseConfig
|
|
9
|
-
def config_keys
|
|
10
|
-
super + %w[concurrency cache-dir file-server-conn-timeout
|
|
11
|
-
file-server-uri environments-codedir
|
|
12
|
-
environmentpath basemodulepath builtin-content-dir]
|
|
13
|
-
end
|
|
14
|
-
|
|
15
|
-
def env_keys
|
|
16
|
-
super + %w[concurrency file-server-conn-timeout file-server-uri]
|
|
17
|
-
end
|
|
18
|
-
|
|
19
|
-
def int_keys
|
|
20
|
-
%w[concurrency file-server-conn-timeout]
|
|
21
|
-
end
|
|
22
|
-
|
|
23
|
-
def defaults
|
|
24
|
-
super.merge(
|
|
25
|
-
'port' => 62658,
|
|
26
|
-
'concurrency' => 100,
|
|
27
|
-
'cache-dir' => "/opt/puppetlabs/server/data/bolt-server/cache",
|
|
28
|
-
'file-server-conn-timeout' => 120
|
|
29
|
-
)
|
|
30
|
-
end
|
|
31
|
-
|
|
32
|
-
def required_keys
|
|
33
|
-
super + %w[file-server-uri]
|
|
34
|
-
end
|
|
35
|
-
|
|
36
|
-
def service_name
|
|
37
|
-
'bolt-server'
|
|
38
|
-
end
|
|
39
|
-
|
|
40
|
-
def load_env_config
|
|
41
|
-
env_keys.each do |key|
|
|
42
|
-
transformed_key = "BOLT_#{key.tr('-', '_').upcase}"
|
|
43
|
-
next unless ENV.key?(transformed_key)
|
|
44
|
-
@data[key] = if int_keys.include?(key)
|
|
45
|
-
ENV[transformed_key].to_i
|
|
46
|
-
else
|
|
47
|
-
ENV[transformed_key]
|
|
48
|
-
end
|
|
49
|
-
end
|
|
50
|
-
end
|
|
51
|
-
|
|
52
|
-
def validate
|
|
53
|
-
super
|
|
54
|
-
|
|
55
|
-
unless natural?(@data['concurrency'])
|
|
56
|
-
raise Bolt::ValidationError, "Configured 'concurrency' must be a positive integer"
|
|
57
|
-
end
|
|
58
|
-
|
|
59
|
-
unless natural?(@data['file-server-conn-timeout'])
|
|
60
|
-
raise Bolt::ValidationError, "Configured 'file-server-conn-timeout' must be a positive integer"
|
|
61
|
-
end
|
|
62
|
-
end
|
|
63
|
-
end
|
|
64
|
-
end
|
|
@@ -1,200 +0,0 @@
|
|
|
1
|
-
# frozen_string_literal: true
|
|
2
|
-
|
|
3
|
-
require 'concurrent/atomic/read_write_lock'
|
|
4
|
-
require 'concurrent/executor/single_thread_executor'
|
|
5
|
-
require 'concurrent/promise'
|
|
6
|
-
require 'concurrent/timer_task'
|
|
7
|
-
require 'digest'
|
|
8
|
-
require 'fileutils'
|
|
9
|
-
require 'net/http'
|
|
10
|
-
require 'logging'
|
|
11
|
-
require 'timeout'
|
|
12
|
-
|
|
13
|
-
require 'bolt/error'
|
|
14
|
-
|
|
15
|
-
module BoltServer
|
|
16
|
-
class FileCache
|
|
17
|
-
class Error < Bolt::Error
|
|
18
|
-
def initialize(msg)
|
|
19
|
-
super(msg, 'bolt-server/file-cache-error')
|
|
20
|
-
end
|
|
21
|
-
end
|
|
22
|
-
|
|
23
|
-
PURGE_TIMEOUT = 60 * 60
|
|
24
|
-
PURGE_INTERVAL = 24 * PURGE_TIMEOUT
|
|
25
|
-
PURGE_TTL = 7 * PURGE_INTERVAL
|
|
26
|
-
|
|
27
|
-
def initialize(config,
|
|
28
|
-
executor: Concurrent::SingleThreadExecutor.new,
|
|
29
|
-
purge_interval: PURGE_INTERVAL,
|
|
30
|
-
purge_timeout: PURGE_TIMEOUT,
|
|
31
|
-
purge_ttl: PURGE_TTL,
|
|
32
|
-
cache_dir_mutex: Concurrent::ReadWriteLock.new,
|
|
33
|
-
do_purge: true)
|
|
34
|
-
@executor = executor
|
|
35
|
-
@cache_dir = config['cache-dir']
|
|
36
|
-
@config = config
|
|
37
|
-
@logger = Bolt::Logger.logger(self)
|
|
38
|
-
@cache_dir_mutex = cache_dir_mutex
|
|
39
|
-
|
|
40
|
-
if do_purge
|
|
41
|
-
@purge = Concurrent::TimerTask.new(execution_interval: purge_interval,
|
|
42
|
-
run_now: true) { expire(purge_ttl, purge_timeout) }
|
|
43
|
-
@purge.execute
|
|
44
|
-
end
|
|
45
|
-
end
|
|
46
|
-
|
|
47
|
-
def tmppath
|
|
48
|
-
File.join(@cache_dir, 'tmp')
|
|
49
|
-
end
|
|
50
|
-
|
|
51
|
-
def setup
|
|
52
|
-
FileUtils.mkdir_p(@cache_dir)
|
|
53
|
-
FileUtils.mkdir_p(tmppath)
|
|
54
|
-
self
|
|
55
|
-
end
|
|
56
|
-
|
|
57
|
-
def ssl_cert
|
|
58
|
-
@ssl_cert ||= File.read(@config['ssl-cert'])
|
|
59
|
-
end
|
|
60
|
-
|
|
61
|
-
def ssl_key
|
|
62
|
-
@ssl_key ||= File.read(@config['ssl-key'])
|
|
63
|
-
end
|
|
64
|
-
|
|
65
|
-
def client
|
|
66
|
-
# rubocop:disable Naming/VariableNumber
|
|
67
|
-
@client ||= begin
|
|
68
|
-
uri = URI(@config['file-server-uri'])
|
|
69
|
-
https = Net::HTTP.new(uri.host, uri.port)
|
|
70
|
-
https.use_ssl = true
|
|
71
|
-
https.ssl_version = :TLSv1_2
|
|
72
|
-
https.ca_file = @config['ssl-ca-cert']
|
|
73
|
-
https.cert = OpenSSL::X509::Certificate.new(ssl_cert)
|
|
74
|
-
https.key = OpenSSL::PKey::RSA.new(ssl_key)
|
|
75
|
-
https.verify_mode = OpenSSL::SSL::VERIFY_PEER
|
|
76
|
-
https.open_timeout = @config['file-server-conn-timeout']
|
|
77
|
-
https
|
|
78
|
-
end
|
|
79
|
-
# rubocop:enable Naming/VariableNumber
|
|
80
|
-
end
|
|
81
|
-
|
|
82
|
-
def request_file(path, params, file)
|
|
83
|
-
uri = "#{@config['file-server-uri'].chomp('/')}#{path}"
|
|
84
|
-
uri = URI(uri)
|
|
85
|
-
uri.query = URI.encode_www_form(params)
|
|
86
|
-
|
|
87
|
-
req = Net::HTTP::Get.new(uri)
|
|
88
|
-
|
|
89
|
-
begin
|
|
90
|
-
client.request(req) do |resp|
|
|
91
|
-
if resp.code != "200"
|
|
92
|
-
msg = "Failed to download file: #{resp.body}"
|
|
93
|
-
@logger.warn resp.body
|
|
94
|
-
raise Error, msg
|
|
95
|
-
end
|
|
96
|
-
resp.read_body do |chunk|
|
|
97
|
-
file.write(chunk)
|
|
98
|
-
end
|
|
99
|
-
end
|
|
100
|
-
rescue StandardError => e
|
|
101
|
-
if e.is_a?(Bolt::Error)
|
|
102
|
-
raise e
|
|
103
|
-
else
|
|
104
|
-
@logger.warn e
|
|
105
|
-
raise Error, "Failed to download file: #{e.message}"
|
|
106
|
-
end
|
|
107
|
-
end
|
|
108
|
-
ensure
|
|
109
|
-
file.close
|
|
110
|
-
end
|
|
111
|
-
|
|
112
|
-
def check_file(file_path, sha)
|
|
113
|
-
File.exist?(file_path) && Digest::SHA256.file(file_path) == sha
|
|
114
|
-
end
|
|
115
|
-
|
|
116
|
-
def serial_execute(&block)
|
|
117
|
-
promise = Concurrent::Promise.new(executor: @executor, &block).execute.wait
|
|
118
|
-
raise promise.reason if promise.rejected?
|
|
119
|
-
promise.value
|
|
120
|
-
end
|
|
121
|
-
|
|
122
|
-
# Create a cache dir if necessary and update it's last write time. Returns the dir.
|
|
123
|
-
# Acquires @cache_dir_mutex to ensure we don't try to purge the directory at the same time.
|
|
124
|
-
# Uses the directory mtime because it's simpler to ensure the directory exists and update
|
|
125
|
-
# mtime in a single place than with a file in a directory that may not exist.
|
|
126
|
-
def create_cache_dir(sha)
|
|
127
|
-
file_dir = File.join(@cache_dir, sha)
|
|
128
|
-
@cache_dir_mutex.with_read_lock do
|
|
129
|
-
# mkdir_p doesn't error if the file exists
|
|
130
|
-
FileUtils.mkdir_p(file_dir, mode: 0o750)
|
|
131
|
-
FileUtils.touch(file_dir)
|
|
132
|
-
end
|
|
133
|
-
file_dir
|
|
134
|
-
end
|
|
135
|
-
|
|
136
|
-
def download_file(file_path, sha, uri)
|
|
137
|
-
if check_file(file_path, sha)
|
|
138
|
-
@logger.debug("File was downloaded while queued: #{file_path}")
|
|
139
|
-
return file_path
|
|
140
|
-
end
|
|
141
|
-
|
|
142
|
-
@logger.debug("Downloading file: #{file_path}")
|
|
143
|
-
|
|
144
|
-
tmpfile = Tempfile.new(sha, tmppath)
|
|
145
|
-
request_file(uri['path'], uri['params'], tmpfile)
|
|
146
|
-
|
|
147
|
-
if Digest::SHA256.file(tmpfile.path) == sha
|
|
148
|
-
# mv doesn't error if the file exists
|
|
149
|
-
FileUtils.mv(tmpfile.path, file_path)
|
|
150
|
-
@logger.debug("Downloaded file: #{file_path}")
|
|
151
|
-
file_path
|
|
152
|
-
else
|
|
153
|
-
msg = "Downloaded file did not match checksum for: #{file_path}"
|
|
154
|
-
@logger.warn msg
|
|
155
|
-
raise Error, msg
|
|
156
|
-
end
|
|
157
|
-
end
|
|
158
|
-
|
|
159
|
-
# If the file doesn't exist or is invalid redownload it
|
|
160
|
-
# This downloads, validates and moves into place
|
|
161
|
-
def update_file(file_data)
|
|
162
|
-
sha = file_data['sha256']
|
|
163
|
-
file_dir = create_cache_dir(file_data['sha256'])
|
|
164
|
-
file_path = File.join(file_dir, File.basename(file_data['filename']))
|
|
165
|
-
if check_file(file_path, sha)
|
|
166
|
-
@logger.debug("Using prexisting file: #{file_path}")
|
|
167
|
-
return file_path
|
|
168
|
-
end
|
|
169
|
-
|
|
170
|
-
@logger.debug("Queueing download for: #{file_path}")
|
|
171
|
-
serial_execute { download_file(file_path, sha, file_data['uri']) }
|
|
172
|
-
end
|
|
173
|
-
|
|
174
|
-
def expire(purge_ttl, purge_timeout)
|
|
175
|
-
expired_time = Time.now - purge_ttl
|
|
176
|
-
Timeout.timeout(purge_timeout) do
|
|
177
|
-
@cache_dir_mutex.with_write_lock do
|
|
178
|
-
Dir.glob(File.join(@cache_dir, '*')).select { |f| File.directory?(f) }.each do |dir|
|
|
179
|
-
if (mtime = File.mtime(dir)) < expired_time && dir != tmppath
|
|
180
|
-
@logger.debug("Removing #{dir}, last used at #{mtime}")
|
|
181
|
-
FileUtils.remove_dir(dir)
|
|
182
|
-
end
|
|
183
|
-
end
|
|
184
|
-
end
|
|
185
|
-
end
|
|
186
|
-
end
|
|
187
|
-
|
|
188
|
-
def get_cached_project_file(versioned_project, file_name)
|
|
189
|
-
file_dir = create_cache_dir(versioned_project)
|
|
190
|
-
file_path = File.join(file_dir, file_name)
|
|
191
|
-
serial_execute { File.read(file_path) if File.exist?(file_path) }
|
|
192
|
-
end
|
|
193
|
-
|
|
194
|
-
def cache_project_file(versioned_project, file_name, data)
|
|
195
|
-
file_dir = create_cache_dir(versioned_project)
|
|
196
|
-
file_path = File.join(file_dir, file_name)
|
|
197
|
-
serial_execute { File.open(file_path, 'w') { |f| f.write(data) } }
|
|
198
|
-
end
|
|
199
|
-
end
|
|
200
|
-
end
|