hybrid_platforms_conductor 32.13.1 → 32.15.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGELOG.md +48 -0
- data/README.md +9 -2
- data/bin/get_impacted_nodes +1 -1
- data/bin/setup +6 -1
- data/docs/plugins.md +1 -0
- data/docs/plugins/platform_handler/serverless_chef.md +111 -0
- data/docs/tutorial.md +6 -6
- data/docs/tutorial/01_installation.md +14 -12
- data/docs/tutorial/02_first_node.md +14 -12
- data/docs/tutorial/03_scale.md +14 -12
- data/docs/tutorial/04_test.md +14 -12
- data/docs/tutorial/05_extend_with_plugins.md +14 -12
- data/lib/hybrid_platforms_conductor/deployer.rb +2 -1
- data/lib/hybrid_platforms_conductor/hpc_plugins/platform_handler/serverless_chef.rb +535 -0
- data/lib/hybrid_platforms_conductor/hpc_plugins/platform_handler/serverless_chef/dsl_parser.rb +51 -0
- data/lib/hybrid_platforms_conductor/hpc_plugins/platform_handler/serverless_chef/recipes_tree_builder.rb +232 -0
- data/lib/hybrid_platforms_conductor/hpc_plugins/test/vulnerabilities.rb +1 -0
- data/lib/hybrid_platforms_conductor/nodes_handler.rb +9 -5
- data/lib/hybrid_platforms_conductor/version.rb +1 -1
- data/spec/hybrid_platforms_conductor_test.rb +3 -0
- data/spec/hybrid_platforms_conductor_test/api/deployer/provisioner_spec.rb +23 -0
- data/spec/hybrid_platforms_conductor_test/api/nodes_handler/cmdbs_plugins_api_spec.rb +11 -0
- data/spec/hybrid_platforms_conductor_test/api/platform_handlers/serverless_chef/config_dsl_spec.rb +17 -0
- data/spec/hybrid_platforms_conductor_test/api/platform_handlers/serverless_chef/deploy_output_parsing_spec.rb +94 -0
- data/spec/hybrid_platforms_conductor_test/api/platform_handlers/serverless_chef/diff_impacts_spec.rb +317 -0
- data/spec/hybrid_platforms_conductor_test/api/platform_handlers/serverless_chef/inventory_spec.rb +65 -0
- data/spec/hybrid_platforms_conductor_test/api/platform_handlers/serverless_chef/packaging_spec.rb +292 -0
- data/spec/hybrid_platforms_conductor_test/api/platform_handlers/serverless_chef/services_deployment_spec.rb +272 -0
- data/spec/hybrid_platforms_conductor_test/helpers/serverless_chef_helpers.rb +53 -0
- data/spec/hybrid_platforms_conductor_test/serverless_chef_repositories/1_node/chef_versions.yml +3 -0
- data/spec/hybrid_platforms_conductor_test/serverless_chef_repositories/1_node/nodes/node.json +14 -0
- data/spec/hybrid_platforms_conductor_test/serverless_chef_repositories/1_node/policyfiles/test_policy.rb +3 -0
- data/spec/hybrid_platforms_conductor_test/serverless_chef_repositories/data_bags/chef_versions.yml +3 -0
- data/spec/hybrid_platforms_conductor_test/serverless_chef_repositories/data_bags/data_bags/my_bag/my_item.json +4 -0
- data/spec/hybrid_platforms_conductor_test/serverless_chef_repositories/data_bags/nodes/node.json +14 -0
- data/spec/hybrid_platforms_conductor_test/serverless_chef_repositories/data_bags/policyfiles/test_policy.rb +3 -0
- data/spec/hybrid_platforms_conductor_test/serverless_chef_repositories/hpc_test/chef_versions.yml +3 -0
- data/spec/hybrid_platforms_conductor_test/serverless_chef_repositories/hpc_test/cookbooks/hpc_test/recipes/after_run.rb +1 -0
- data/spec/hybrid_platforms_conductor_test/serverless_chef_repositories/hpc_test/cookbooks/hpc_test/recipes/before_run.rb +1 -0
- data/spec/hybrid_platforms_conductor_test/serverless_chef_repositories/hpc_test/nodes/node.json +10 -0
- data/spec/hybrid_platforms_conductor_test/serverless_chef_repositories/hpc_test/policyfiles/test_policy.rb +3 -0
- data/spec/hybrid_platforms_conductor_test/serverless_chef_repositories/recipes/cookbooks/test_cookbook_1/recipes/default.rb +1 -0
- data/spec/hybrid_platforms_conductor_test/serverless_chef_repositories/recipes/cookbooks/test_cookbook_2/libraries/default.rb +4 -0
- data/spec/hybrid_platforms_conductor_test/serverless_chef_repositories/recipes/cookbooks/test_cookbook_2/recipes/default.rb +1 -0
- data/spec/hybrid_platforms_conductor_test/serverless_chef_repositories/recipes/cookbooks/test_cookbook_2/recipes/other_recipe.rb +1 -0
- data/spec/hybrid_platforms_conductor_test/serverless_chef_repositories/recipes/cookbooks/test_cookbook_2/resources/my_resource.rb +1 -0
- data/spec/hybrid_platforms_conductor_test/serverless_chef_repositories/recipes/nodes/node1.json +10 -0
- data/spec/hybrid_platforms_conductor_test/serverless_chef_repositories/recipes/nodes/node2.json +10 -0
- data/spec/hybrid_platforms_conductor_test/serverless_chef_repositories/recipes/policyfiles/test_policy_1.rb +4 -0
- data/spec/hybrid_platforms_conductor_test/serverless_chef_repositories/recipes/policyfiles/test_policy_2.rb +4 -0
- data/spec/hybrid_platforms_conductor_test/serverless_chef_repositories/several_cookbooks/config.rb +1 -0
- data/spec/hybrid_platforms_conductor_test/serverless_chef_repositories/several_cookbooks/cookbooks/test_cookbook_1/recipes/default.rb +1 -0
- data/spec/hybrid_platforms_conductor_test/serverless_chef_repositories/several_cookbooks/nodes/node1.json +10 -0
- data/spec/hybrid_platforms_conductor_test/serverless_chef_repositories/several_cookbooks/nodes/node2.json +10 -0
- data/spec/hybrid_platforms_conductor_test/serverless_chef_repositories/several_cookbooks/other_cookbooks/test_cookbook_2/libraries/default.rb +4 -0
- data/spec/hybrid_platforms_conductor_test/serverless_chef_repositories/several_cookbooks/other_cookbooks/test_cookbook_2/recipes/default.rb +1 -0
- data/spec/hybrid_platforms_conductor_test/serverless_chef_repositories/several_cookbooks/other_cookbooks/test_cookbook_2/recipes/other_recipe.rb +1 -0
- data/spec/hybrid_platforms_conductor_test/serverless_chef_repositories/several_cookbooks/other_cookbooks/test_cookbook_2/resources/my_resource.rb +1 -0
- data/spec/hybrid_platforms_conductor_test/serverless_chef_repositories/several_cookbooks/policyfiles/test_policy_1.rb +4 -0
- data/spec/hybrid_platforms_conductor_test/serverless_chef_repositories/several_cookbooks/policyfiles/test_policy_2.rb +4 -0
- data/spec/hybrid_platforms_conductor_test/serverless_chef_repositories/several_nodes/chef_versions.yml +3 -0
- data/spec/hybrid_platforms_conductor_test/serverless_chef_repositories/several_nodes/nodes/local.json +10 -0
- data/spec/hybrid_platforms_conductor_test/serverless_chef_repositories/several_nodes/nodes/node1.json +10 -0
- data/spec/hybrid_platforms_conductor_test/serverless_chef_repositories/several_nodes/nodes/node2.json +10 -0
- data/spec/hybrid_platforms_conductor_test/serverless_chef_repositories/several_nodes/policyfiles/test_policy_1.rb +3 -0
- data/spec/hybrid_platforms_conductor_test/serverless_chef_repositories/several_nodes/policyfiles/test_policy_2.rb +3 -0
- metadata +192 -143
@@ -318,8 +318,9 @@ module HybridPlatformsConductor
|
|
318
318
|
actions_executor: @actions_executor
|
319
319
|
)
|
320
320
|
instance.with_running_instance(stop_on_exit: true, destroy_on_exit: !reuse_instance, port: 22) do
|
321
|
-
# Test-provisioned nodes have SSH Session Exec capabilities
|
321
|
+
# Test-provisioned nodes have SSH Session Exec capabilities and are not local
|
322
322
|
sub_executable.nodes_handler.override_metadata_of node, :ssh_session_exec, 'true'
|
323
|
+
sub_executable.nodes_handler.override_metadata_of node, :local_node, false
|
323
324
|
# Test-provisioned nodes use default sudo
|
324
325
|
sub_executable.config.sudo_procs.replace(sub_executable.config.sudo_procs.map do |sudo_proc_info|
|
325
326
|
{
|
@@ -0,0 +1,535 @@
|
|
1
|
+
require 'fileutils'
|
2
|
+
require 'json'
|
3
|
+
require 'yaml'
|
4
|
+
require 'hybrid_platforms_conductor/platform_handler'
|
5
|
+
require 'hybrid_platforms_conductor/hpc_plugins/platform_handler/serverless_chef/dsl_parser'
|
6
|
+
require 'hybrid_platforms_conductor/hpc_plugins/platform_handler/serverless_chef/recipes_tree_builder'
|
7
|
+
|
8
|
+
module HybridPlatformsConductor
|
9
|
+
|
10
|
+
module HpcPlugins
|
11
|
+
|
12
|
+
module PlatformHandler
|
13
|
+
|
14
|
+
# Handle a Chef repository without using a Chef Infra Server.
|
15
|
+
# Inventory is read from nodes/*.json.
|
16
|
+
# Services are defined from policy files in policyfiles/*.rb.
|
17
|
+
# Roles are not supported as they are considered made obsolete with the usage of policies by the Chef community.
|
18
|
+
# Required Chef versions are taken from a chef_versions.yml file containing the following keys:
|
19
|
+
# * *workstation* (String): The Chef Workstation version to be installed during setup (can be specified as major.minor only)
|
20
|
+
# * *client* (String): The Chef Infra Client version to be installed during nodes deployment (can be specified as major.minor only)
|
21
|
+
class ServerlessChef < HybridPlatformsConductor::PlatformHandler
|
22
|
+
|
23
|
+
# Add a Mixin to the DSL parsing the platforms configuration file.
|
24
|
+
# This can be used by any plugin to add plugin-specific configuration getters and setters, accessible later from NodesHandler instances.
|
25
|
+
# An optional initializer can also be given.
|
26
|
+
# [API] - Those calls are optional
|
27
|
+
module MyDSLExtension
|
28
|
+
|
29
|
+
# The list of library helpers we know include some recipes.
|
30
|
+
# This is used when parsing some recipe code: if such a helper is encountered then we assume a dependency on a given recipe.
|
31
|
+
# Hash< Symbol, Array<String> >: List of recipes definitions per helper name.
|
32
|
+
attr_reader :known_helpers_including_recipes
|
33
|
+
|
34
|
+
# Initialize the DSL
|
35
|
+
def init_serverless_chef
|
36
|
+
@known_helpers_including_recipes = {}
|
37
|
+
end
|
38
|
+
|
39
|
+
# Define helpers including recipes
|
40
|
+
#
|
41
|
+
# Parameters::
|
42
|
+
# * *included_recipes* (Hash< Symbol, Array<String> >): List of recipes definitions per helper name.
|
43
|
+
def helpers_including_recipes(included_recipes)
|
44
|
+
@known_helpers_including_recipes.merge!(included_recipes)
|
45
|
+
end
|
46
|
+
|
47
|
+
end
|
48
|
+
self.extend_config_dsl_with MyDSLExtension, :init_serverless_chef
|
49
|
+
|
50
|
+
# Constructor
|
51
|
+
#
|
52
|
+
# Parameters::
|
53
|
+
# * *platform_type* (Symbol): Platform type
|
54
|
+
# * *repository_path* (String): Repository path
|
55
|
+
# * *logger* (Logger): Logger to be used [default: Logger.new(STDOUT)]
|
56
|
+
# * *logger_stderr* (Logger): Logger to be used for stderr [default: Logger.new(STDERR)]
|
57
|
+
# * *config* (Config): Config to be used. [default: Config.new]
|
58
|
+
# * *cmd_runner* (CmdRunner): Command executor to be used. [default: CmdRunner.new]
|
59
|
+
def initialize(
|
60
|
+
platform_type,
|
61
|
+
repository_path,
|
62
|
+
logger: Logger.new(STDOUT),
|
63
|
+
logger_stderr: Logger.new(STDERR),
|
64
|
+
config: Config.new,
|
65
|
+
cmd_runner: CmdRunner.new
|
66
|
+
)
|
67
|
+
super
|
68
|
+
# Mutex for getting the full recipes tree
|
69
|
+
@recipes_tree_mutex = Mutex.new
|
70
|
+
end
|
71
|
+
|
72
|
+
# Setup the platform, install dependencies...
|
73
|
+
# [API] - This method is optional.
|
74
|
+
# [API] - @cmd_runner is accessible.
|
75
|
+
def setup
|
76
|
+
required_version = YAML.load_file("#{@repository_path}/chef_versions.yml")['workstation']
|
77
|
+
Bundler.with_unbundled_env do
|
78
|
+
exit_status, stdout, _stderr = @cmd_runner.run_cmd '/opt/chef-workstation/bin/chef --version', expected_code: [0, 127]
|
79
|
+
existing_version =
|
80
|
+
if exit_status == 127
|
81
|
+
'not installed'
|
82
|
+
else
|
83
|
+
expected_match = stdout.match(/^Chef Workstation version: (.+)\.\d+$/)
|
84
|
+
expected_match.nil? ? 'unreadable' : expected_match[1]
|
85
|
+
end
|
86
|
+
log_debug "Current Chef version: #{existing_version}. Required version: #{required_version}"
|
87
|
+
@cmd_runner.run_cmd "curl -L https://omnitruck.chef.io/install.sh | sudo bash -s -- -P chef-workstation -v #{required_version}" unless existing_version == required_version
|
88
|
+
end
|
89
|
+
end
|
90
|
+
|
91
|
+
# Get the list of known nodes.
|
92
|
+
# [API] - This method is mandatory.
|
93
|
+
#
|
94
|
+
# Result::
|
95
|
+
# * Array<String>: List of node names
|
96
|
+
def known_nodes
|
97
|
+
Dir.glob("#{@repository_path}/nodes/*.json").map { |file| File.basename(file, '.json') }
|
98
|
+
end
|
99
|
+
|
100
|
+
# Get the metadata of a given node.
|
101
|
+
# [API] - This method is mandatory.
|
102
|
+
#
|
103
|
+
# Parameters::
|
104
|
+
# * *node* (String): Node to read metadata from
|
105
|
+
# Result::
|
106
|
+
# * Hash<Symbol,Object>: The corresponding metadata
|
107
|
+
def metadata_for(node)
|
108
|
+
(json_for(node)['normal'] || {}).transform_keys(&:to_sym)
|
109
|
+
end
|
110
|
+
|
111
|
+
# Return the services for a given node
|
112
|
+
# [API] - This method is mandatory.
|
113
|
+
#
|
114
|
+
# Parameters::
|
115
|
+
# * *node* (String): node to read configuration from
|
116
|
+
# Result::
|
117
|
+
# * Array<String>: The corresponding services
|
118
|
+
def services_for(node)
|
119
|
+
[json_for(node)['policy_name']]
|
120
|
+
end
|
121
|
+
|
122
|
+
# Get the list of services we can deploy
|
123
|
+
# [API] - This method is mandatory.
|
124
|
+
#
|
125
|
+
# Result::
|
126
|
+
# * Array<String>: The corresponding services
|
127
|
+
def deployable_services
|
128
|
+
Dir.glob("#{@repository_path}/policyfiles/*.rb").map { |file| File.basename(file, '.rb') }
|
129
|
+
end
|
130
|
+
|
131
|
+
# Package the repository, ready to be deployed on artefacts or directly to a node.
|
132
|
+
# [API] - This method is optional.
|
133
|
+
# [API] - @cmd_runner is accessible.
|
134
|
+
# [API] - @actions_executor is accessible.
|
135
|
+
#
|
136
|
+
# Parameters::
|
137
|
+
# * *services* (Hash< String, Array<String> >): Services to be deployed, per node
|
138
|
+
# * *secrets* (Hash): Secrets to be used for deployment
|
139
|
+
# * *local_environment* (Boolean): Are we deploying to a local environment?
|
140
|
+
def package(services:, secrets:, local_environment:)
|
141
|
+
# Make a stamp of the info that has been packaged, so that we don't package it again if useless
|
142
|
+
package_info = {
|
143
|
+
secrets: secrets,
|
144
|
+
commit: info[:commit].nil? ? Time.now.utc.strftime('%F %T') : info[:commit][:id],
|
145
|
+
other_files:
|
146
|
+
if info[:status].nil?
|
147
|
+
{}
|
148
|
+
else
|
149
|
+
Hash[
|
150
|
+
(info[:status][:added_files] + info[:status][:changed_files] + info[:status][:untracked_files]).
|
151
|
+
sort.
|
152
|
+
map { |f| [f, File.mtime("#{@repository_path}/#{f}").strftime('%F %T')] }
|
153
|
+
]
|
154
|
+
end,
|
155
|
+
deleted_files: info[:status].nil? ? [] : info[:status][:deleted_files].sort
|
156
|
+
}
|
157
|
+
# Each service is packaged individually.
|
158
|
+
services.values.flatten.sort.uniq.each do |service|
|
159
|
+
package_dir = "dist/#{local_environment ? 'local' : 'prod'}/#{service}"
|
160
|
+
package_info_file = "#{@repository_path}/#{package_dir}/hpc_package.info"
|
161
|
+
current_package_info = File.exist?(package_info_file) ? JSON.parse(File.read(package_info_file)).transform_keys(&:to_sym) : {}
|
162
|
+
unless current_package_info == package_info
|
163
|
+
Bundler.with_unbundled_env do
|
164
|
+
policy_file = "policyfiles/#{service}.rb"
|
165
|
+
if local_environment
|
166
|
+
local_policy_file = "policyfiles/#{service}.local.rb"
|
167
|
+
# In local mode, we always regenerate the lock file as we may modify the run list
|
168
|
+
run_list = known_cookbook_paths.any? { |cookbook_path| File.exist?("#{@repository_path}/#{cookbook_path}/hpc_test/recipes/before_run.rb") } ? ['hpc_test::before_run'] : []
|
169
|
+
dsl_parser = DslParser.new
|
170
|
+
dsl_parser.parse("#{@repository_path}/#{policy_file}")
|
171
|
+
run_list.concat dsl_parser.calls.find { |call_info| call_info[:method] == :run_list }[:args].flatten
|
172
|
+
run_list << 'hpc_test::after_run' if known_cookbook_paths.any? { |cookbook_path| File.exist?("#{@repository_path}/#{cookbook_path}/hpc_test/recipes/after_run.rb") }
|
173
|
+
File.write("#{@repository_path}/#{local_policy_file}", File.read("#{@repository_path}/#{policy_file}") + "\nrun_list #{run_list.map { |recipe| "'#{recipe}'" }.join(', ')}\n")
|
174
|
+
policy_file = local_policy_file
|
175
|
+
end
|
176
|
+
lock_file = "#{File.dirname(policy_file)}/#{File.basename(policy_file, '.rb')}.lock.json"
|
177
|
+
# If the policy lock file does not exist, generate it
|
178
|
+
@cmd_runner.run_cmd "cd #{@repository_path} && /opt/chef-workstation/bin/chef install #{policy_file}" unless File.exist?("#{@repository_path}/#{lock_file}")
|
179
|
+
extra_cp_data_bags = File.exist?("#{@repository_path}/data_bags") ? " && cp -ar data_bags/ #{package_dir}/" : ''
|
180
|
+
@cmd_runner.run_cmd "cd #{@repository_path} && \
|
181
|
+
sudo rm -rf #{package_dir} && \
|
182
|
+
/opt/chef-workstation/bin/chef export #{policy_file} #{package_dir}#{extra_cp_data_bags}"
|
183
|
+
end
|
184
|
+
unless @cmd_runner.dry_run
|
185
|
+
# Create secrets file
|
186
|
+
secrets_file = "#{@repository_path}/#{package_dir}/data_bags/hpc_secrets/hpc_secrets.json"
|
187
|
+
FileUtils.mkdir_p(File.dirname(secrets_file))
|
188
|
+
File.write(secrets_file, secrets.merge(id: 'hpc_secrets').to_json)
|
189
|
+
# Remember the package info
|
190
|
+
File.write(package_info_file, package_info.to_json)
|
191
|
+
end
|
192
|
+
end
|
193
|
+
end
|
194
|
+
end
|
195
|
+
|
196
|
+
# Prepare deployments.
|
197
|
+
# This method is called just before getting and executing the actions to be deployed.
|
198
|
+
# It is called once per platform.
|
199
|
+
# [API] - This method is optional.
|
200
|
+
# [API] - @cmd_runner is accessible.
|
201
|
+
# [API] - @actions_executor is accessible.
|
202
|
+
#
|
203
|
+
# Parameters::
|
204
|
+
# * *services* (Hash< String, Array<String> >): Services to be deployed, per node
|
205
|
+
# * *secrets* (Hash): Secrets to be used for deployment
|
206
|
+
# * *local_environment* (Boolean): Are we deploying to a local environment?
|
207
|
+
# * *why_run* (Boolean): Are we deploying in why-run mode?
|
208
|
+
def prepare_for_deploy(services:, secrets:, local_environment:, why_run:)
|
209
|
+
@local_env = local_environment
|
210
|
+
end
|
211
|
+
|
212
|
+
# Get the list of actions to perform to deploy on a given node.
|
213
|
+
# Those actions can be executed in parallel with other deployments on other nodes. They must be thread safe.
|
214
|
+
# [API] - This method is mandatory.
|
215
|
+
# [API] - @cmd_runner is accessible.
|
216
|
+
# [API] - @actions_executor is accessible.
|
217
|
+
#
|
218
|
+
# Parameters::
|
219
|
+
# * *node* (String): Node to deploy on
|
220
|
+
# * *service* (String): Service to be deployed
|
221
|
+
# * *use_why_run* (Boolean): Do we use a why-run mode? [default = true]
|
222
|
+
# Result::
|
223
|
+
# * Array< Hash<Symbol,Object> >: List of actions to be done
|
224
|
+
def actions_to_deploy_on(node, service, use_why_run: true)
|
225
|
+
package_dir = "#{@repository_path}/dist/#{@local_env ? 'local' : 'prod'}/#{service}"
|
226
|
+
# Generate the nodes attributes file
|
227
|
+
unless @cmd_runner.dry_run
|
228
|
+
FileUtils.mkdir_p "#{package_dir}/nodes"
|
229
|
+
File.write("#{package_dir}/nodes/#{node}.json", (known_nodes.include?(node) ? metadata_for(node) : {}).merge(@nodes_handler.metadata_of(node)).to_json)
|
230
|
+
end
|
231
|
+
client_options = [
|
232
|
+
'--local-mode',
|
233
|
+
'--chef-license', 'accept',
|
234
|
+
'--json-attributes', "nodes/#{node}.json"
|
235
|
+
]
|
236
|
+
client_options << '--why-run' if use_why_run
|
237
|
+
if @nodes_handler.get_use_local_chef_of(node)
|
238
|
+
# Just run the chef-client directly from the packaged repository
|
239
|
+
[{ bash: "cd #{package_dir} && sudo SSL_CERT_DIR=/etc/ssl/certs /opt/chef-workstation/bin/chef-client #{client_options.join(' ')}" }]
|
240
|
+
else
|
241
|
+
# Upload the package and run it from the node
|
242
|
+
package_name = File.basename(package_dir)
|
243
|
+
chef_versions_file = "#{@repository_path}/chef_versions.yml"
|
244
|
+
raise "Missing file #{chef_versions_file} specifying the Chef Infra Client version to be deployed" unless File.exist?(chef_versions_file)
|
245
|
+
required_chef_client_version = YAML.load_file(chef_versions_file)['client']
|
246
|
+
sudo = (@actions_executor.connector(:ssh).ssh_user == 'root' ? '' : "#{@nodes_handler.sudo_on(node)} ")
|
247
|
+
[
|
248
|
+
{
|
249
|
+
# Install dependencies
|
250
|
+
remote_bash: [
|
251
|
+
'set -e',
|
252
|
+
'set -o pipefail',
|
253
|
+
"if [ -n \"$(command -v apt)\" ]; then #{sudo}apt update && #{sudo}apt install -y curl build-essential ; else #{sudo}yum groupinstall 'Development Tools' && #{sudo}yum install -y curl ; fi",
|
254
|
+
'mkdir -p ./hpc_deploy',
|
255
|
+
'rm -rf ./hpc_deploy/tmp',
|
256
|
+
'mkdir -p ./hpc_deploy/tmp',
|
257
|
+
'curl --location https://omnitruck.chef.io/install.sh --output ./hpc_deploy/install.sh',
|
258
|
+
'chmod a+x ./hpc_deploy/install.sh',
|
259
|
+
"#{sudo}TMPDIR=./hpc_deploy/tmp ./hpc_deploy/install.sh -d /opt/artefacts -v #{required_chef_client_version} -s once"
|
260
|
+
]
|
261
|
+
},
|
262
|
+
{
|
263
|
+
scp: { package_dir => './hpc_deploy' },
|
264
|
+
remote_bash: [
|
265
|
+
'set -e',
|
266
|
+
"cd ./hpc_deploy/#{package_name}",
|
267
|
+
"#{sudo}SSL_CERT_DIR=/etc/ssl/certs /opt/chef/bin/chef-client #{client_options.join(' ')}",
|
268
|
+
'cd ..'
|
269
|
+
] + (log_debug? ? [] : ["#{sudo}rm -rf ./hpc_deploy/#{package_name}"])
|
270
|
+
}
|
271
|
+
]
|
272
|
+
end
|
273
|
+
end
|
274
|
+
|
275
|
+
# Parse stdout and stderr of a given deploy run and get the list of tasks with their status
|
276
|
+
# [API] - This method is mandatory.
|
277
|
+
#
|
278
|
+
# Parameters::
|
279
|
+
# * *stdout* (String): stdout to be parsed
|
280
|
+
# * *stderr* (String): stderr to be parsed
|
281
|
+
# Result::
|
282
|
+
# * Array< Hash<Symbol,Object> >: List of task properties. The following properties should be returned, among free ones:
|
283
|
+
# * *name* (String): Task name
|
284
|
+
# * *status* (Symbol): Task status. Should be one of:
|
285
|
+
# * *:changed*: The task has been changed
|
286
|
+
# * *:identical*: The task has not been changed
|
287
|
+
# * *diffs* (String): Differences, if any
|
288
|
+
def parse_deploy_output(stdout, stderr)
|
289
|
+
tasks = []
|
290
|
+
current_task = nil
|
291
|
+
stdout.split("\n").each do |line|
|
292
|
+
# Remove control chars and spaces around
|
293
|
+
case line.gsub(/\e\[[^\x40-\x7E]*[\x40-\x7E]/, '').strip
|
294
|
+
when /^\* (\w+\[[^\]]+\]) action (.+)$/
|
295
|
+
# New task
|
296
|
+
task_name = $1
|
297
|
+
task_action = $2
|
298
|
+
current_task = {
|
299
|
+
name: task_name,
|
300
|
+
action: task_action,
|
301
|
+
status: :identical
|
302
|
+
}
|
303
|
+
tasks << current_task
|
304
|
+
when /^- (.+)$/
|
305
|
+
# Diff on the current task
|
306
|
+
diff_description = $1
|
307
|
+
unless current_task.nil?
|
308
|
+
current_task[:diffs] = '' unless current_task.key?(:diffs)
|
309
|
+
current_task[:diffs] << "#{diff_description}\n"
|
310
|
+
current_task[:status] = :changed
|
311
|
+
end
|
312
|
+
end
|
313
|
+
end
|
314
|
+
tasks
|
315
|
+
end
|
316
|
+
|
317
|
+
# Get the list of impacted nodes and services from a files diff.
|
318
|
+
# [API] - This method is optional
|
319
|
+
#
|
320
|
+
# Parameters::
|
321
|
+
# * *files_diffs* (Hash< String, Hash< Symbol, Object > >): List of diffs info, per file name having a diff. Diffs info have the following properties:
|
322
|
+
# * *moved_to* (String): The new file path, in case it has been moved [optional]
|
323
|
+
# * *diff* (String): The diff content
|
324
|
+
# Result::
|
325
|
+
# * Array<String>: The list of nodes impacted by this diff
|
326
|
+
# * Array<String>: The list of services impacted by this diff
|
327
|
+
# * Boolean: Are there some files that have a global impact (meaning all nodes are potentially impacted by this diff)?
|
328
|
+
def impacts_from(files_diffs)
|
329
|
+
impacted_nodes = []
|
330
|
+
impacted_services = []
|
331
|
+
# List of impacted [cookbook, recipe]
|
332
|
+
# Array< [Symbol, Symbol] >
|
333
|
+
impacted_recipes = []
|
334
|
+
impacted_global = false
|
335
|
+
files_diffs.keys.sort.each do |impacted_file|
|
336
|
+
if impacted_file =~ /^policyfiles\/([^\/]+)\.rb$/
|
337
|
+
log_debug "[#{impacted_file}] - Impacted service: #{$1}"
|
338
|
+
impacted_services << $1
|
339
|
+
elsif impacted_file =~ /^policyfiles\/([^\/]+)\.lock.json$/
|
340
|
+
log_debug "[#{impacted_file}] - Impacted service: #{$1}"
|
341
|
+
impacted_services << $1
|
342
|
+
elsif impacted_file =~ /^nodes\/([^\/]+)\.json/
|
343
|
+
log_debug "[#{impacted_file}] - Impacted node: #{$1}"
|
344
|
+
impacted_nodes << $1
|
345
|
+
else
|
346
|
+
cookbook_path = known_cookbook_paths.find { |cookbooks_path| impacted_file =~ /^#{Regexp.escape(cookbooks_path)}\/.+$/ }
|
347
|
+
if cookbook_path.nil?
|
348
|
+
# Global file
|
349
|
+
log_debug "[#{impacted_file}] - Global file impacted"
|
350
|
+
impacted_global = true
|
351
|
+
else
|
352
|
+
# File belonging to a cookbook
|
353
|
+
cookbook_name, file_path = impacted_file.match(/^#{cookbook_path}\/(\w+)\/(.+)$/)[1..2]
|
354
|
+
cookbook = cookbook_name.to_sym
|
355
|
+
# Small helper to register a recipe
|
356
|
+
register = proc do |source, recipe_name, cookbook_name: cookbook|
|
357
|
+
cookbook_name = cookbook_name.to_sym if cookbook_name.is_a?(String)
|
358
|
+
log_debug "[#{impacted_file}] - Impacted recipe from #{source}: #{cookbook_name}::#{recipe_name}"
|
359
|
+
impacted_recipes << [cookbook_name, recipe_name.to_sym]
|
360
|
+
end
|
361
|
+
case file_path
|
362
|
+
when /recipes\/(.+)\.rb/
|
363
|
+
register.call('direct', $1)
|
364
|
+
when /attributes\/.+\.rb/, 'metadata.rb'
|
365
|
+
# Consider all recipes are impacted
|
366
|
+
Dir.glob("#{@repository_path}/#{cookbook_path}/#{cookbook}/recipes/*.rb") do |recipe_path|
|
367
|
+
register.call('attributes', File.basename(recipe_path, '.rb'))
|
368
|
+
end
|
369
|
+
when /(templates|files)\/(.+)/
|
370
|
+
# Find recipes using this file name
|
371
|
+
included_file = File.basename($2)
|
372
|
+
template_regexp = /["']#{Regexp.escape(included_file)}["']/
|
373
|
+
Dir.glob("#{@repository_path}/#{cookbook_path}/#{cookbook}/recipes/*.rb") do |recipe_path|
|
374
|
+
register.call("included file #{included_file}", File.basename(recipe_path, '.rb')) if File.read(recipe_path) =~ template_regexp
|
375
|
+
end
|
376
|
+
when /resources\/(.+)/
|
377
|
+
# Find any recipe using this resource
|
378
|
+
included_resource = "#{cookbook}_#{File.basename($1, '.rb')}"
|
379
|
+
resource_regexp = /(\W|^)#{Regexp.escape(included_resource)}(\W|$)/
|
380
|
+
known_cookbook_paths.each do |cookbooks_path|
|
381
|
+
Dir.glob("#{@repository_path}/#{cookbooks_path}/**/recipes/*.rb") do |recipe_path|
|
382
|
+
if File.read(recipe_path) =~ resource_regexp
|
383
|
+
cookbook_name, recipe_name = recipe_path.match(/#{cookbooks_path}\/(\w+)\/recipes\/(\w+)\.rb/)[1..2]
|
384
|
+
register.call("included resource #{included_resource}", recipe_name, cookbook_name: cookbook_name)
|
385
|
+
end
|
386
|
+
end
|
387
|
+
end
|
388
|
+
when /libraries\/(.+)/
|
389
|
+
# Find any recipe using methods from this library
|
390
|
+
lib_methods_regexps = File.read("#{@repository_path}/#{impacted_file}").scan(/(\W|^)def\s+(\w+)(\W|$)/).map { |_grp1, method_name, _grp2| /(\W|^)#{Regexp.escape(method_name)}(\W|$)/ }
|
391
|
+
known_cookbook_paths.each do |cookbooks_path|
|
392
|
+
Dir.glob("#{@repository_path}/#{cookbooks_path}/**/recipes/*.rb") do |recipe_path|
|
393
|
+
file_content = File.read(recipe_path)
|
394
|
+
found_lib_regexp = lib_methods_regexps.find { |regexp| file_content =~ regexp }
|
395
|
+
unless found_lib_regexp.nil?
|
396
|
+
cookbook_name, recipe_name = recipe_path.match(/#{cookbooks_path}\/(\w+)\/recipes\/(\w+)\.rb/)[1..2]
|
397
|
+
register.call("included library helper #{found_lib_regexp.source[6..-7]}", recipe_name, cookbook_name: cookbook_name)
|
398
|
+
end
|
399
|
+
end
|
400
|
+
end
|
401
|
+
when 'README.md', 'README.rdoc', 'CHANGELOG.md', '.rubocop.yml'
|
402
|
+
# Ignore them
|
403
|
+
else
|
404
|
+
log_warn "[#{impacted_file}] - Unknown impact for cookbook file belonging to #{cookbook}"
|
405
|
+
# Consider all recipes are impacted by default
|
406
|
+
Dir.glob("#{@repository_path}/#{cookbook_path}/#{cookbook}/recipes/*.rb") do |recipe_path|
|
407
|
+
register.call('attributes', File.basename(recipe_path, '.rb'))
|
408
|
+
end
|
409
|
+
end
|
410
|
+
end
|
411
|
+
end
|
412
|
+
end
|
413
|
+
|
414
|
+
# Devise the impacted services from the impacted recipes we just found.
|
415
|
+
impacted_recipes.uniq!
|
416
|
+
log_debug "* #{impacted_recipes.size} impacted recipes:\n#{impacted_recipes.map { |(cookbook, recipe)| "#{cookbook}::#{recipe}" }.sort.join("\n")}"
|
417
|
+
|
418
|
+
recipes_tree = full_recipes_tree
|
419
|
+
[
|
420
|
+
impacted_nodes,
|
421
|
+
(
|
422
|
+
impacted_services +
|
423
|
+
# Gather the list of services using the impacted recipes
|
424
|
+
impacted_recipes.map do |(cookbook, recipe)|
|
425
|
+
recipe_info = recipes_tree.dig cookbook, recipe
|
426
|
+
recipe_info.nil? ? [] : recipe_info[:used_by_policies]
|
427
|
+
end.flatten
|
428
|
+
).sort.uniq,
|
429
|
+
impacted_global
|
430
|
+
]
|
431
|
+
end
|
432
|
+
|
433
|
+
# Return the list of possible cookbook paths from this repository only.
|
434
|
+
# Returned paths are relative to the repository path.
|
435
|
+
#
|
436
|
+
# Result::
|
437
|
+
# * Array<String>: Known cookbook paths
|
438
|
+
def known_cookbook_paths
|
439
|
+
# Keep a cache of it for performance.
|
440
|
+
unless defined?(@cookbook_paths)
|
441
|
+
config_file = "#{@repository_path}/config.rb"
|
442
|
+
@cookbook_paths = (
|
443
|
+
['cookbooks'] +
|
444
|
+
if File.exist?(config_file)
|
445
|
+
# Read the knife configuration to get cookbook paths
|
446
|
+
dsl_parser = DslParser.new
|
447
|
+
dsl_parser.parse(config_file)
|
448
|
+
cookbook_path_call = dsl_parser.calls.find { |call_info| call_info[:method] == :cookbook_path }
|
449
|
+
cookbook_path_call.nil? ? [] : cookbook_path_call[:args].first
|
450
|
+
else
|
451
|
+
[]
|
452
|
+
end
|
453
|
+
).
|
454
|
+
map do |dir|
|
455
|
+
# Only keep dirs that actually exist and are part of our repository
|
456
|
+
full_path = dir.start_with?('/') ? dir : File.expand_path("#{@repository_path}/#{dir}")
|
457
|
+
full_path.start_with?(@repository_path) && File.exist?(full_path) ? full_path.gsub("#{@repository_path}/", '') : nil
|
458
|
+
end.
|
459
|
+
compact.
|
460
|
+
sort.
|
461
|
+
uniq
|
462
|
+
end
|
463
|
+
@cookbook_paths
|
464
|
+
end
|
465
|
+
|
466
|
+
# Get the run list of a given policy
|
467
|
+
#
|
468
|
+
# Parameters::
|
469
|
+
# * *policy* (String): Policy to get the run list from
|
470
|
+
# Result::
|
471
|
+
# * Array<[String or nil, Symbol, Symbol]>: Run list of the given policy, as [cookbook_dir, cookbook, recipe]
|
472
|
+
def policy_run_list(policy)
|
473
|
+
# Read the policy file
|
474
|
+
dsl_parser = DslParser.new
|
475
|
+
policy_file = "#{@repository_path}/policyfiles/#{policy}.rb"
|
476
|
+
dsl_parser.parse(policy_file)
|
477
|
+
run_list_call = dsl_parser.calls.find { |call_info| call_info[:method] == :run_list }
|
478
|
+
raise "Policy #{policy} has no run list defined in #{policy_file}" if run_list_call.nil?
|
479
|
+
run_list_call[:args].map { |recipe_def| decode_recipe(recipe_def) }
|
480
|
+
end
|
481
|
+
|
482
|
+
# Return the cookbook directory, cookbook name and recipe name from which a recipe definition is found.
|
483
|
+
# The following forms are handled:
|
484
|
+
# * cookbook
|
485
|
+
# * cookbook::recipe
|
486
|
+
# * recipe[cookbook]
|
487
|
+
# * recipe[cookbook::recipe]
|
488
|
+
#
|
489
|
+
# Parameters::
|
490
|
+
# * *recipe_def* (String): Recipe definition (cookbook or cookbook::recipe).
|
491
|
+
# Result::
|
492
|
+
# * String: The cookbook directory, or nil if unknown
|
493
|
+
# * Symbol: The cookbook name
|
494
|
+
# * Symbol: The recipe name
|
495
|
+
def decode_recipe(recipe_def)
|
496
|
+
recipe_def = $1 if recipe_def =~ /^recipe\[(.+)\]$/
|
497
|
+
cookbook, recipe = recipe_def.split('::').map(&:to_sym)
|
498
|
+
recipe = :default if recipe.nil?
|
499
|
+
# Find the cookbook it belongs to
|
500
|
+
cookbook_dir = known_cookbook_paths.find { |cookbook_path| File.exist?("#{@repository_path}/#{cookbook_path}/#{cookbook}") }
|
501
|
+
raise "Unknown recipe #{cookbook}::#{recipe} from cookbook #{@repository_path}/#{cookbook_dir}/#{cookbook}." if !cookbook_dir.nil? && !File.exist?("#{@repository_path}/#{cookbook_dir}/#{cookbook}/recipes/#{recipe}.rb")
|
502
|
+
return cookbook_dir, cookbook, recipe
|
503
|
+
end
|
504
|
+
|
505
|
+
private
|
506
|
+
|
507
|
+
# Return the JSON associated to a node
|
508
|
+
#
|
509
|
+
# Parameters::
|
510
|
+
# * *node* (String): The node to search for
|
511
|
+
# Result::
|
512
|
+
# * Hash: JSON object of this node
|
513
|
+
def json_for(node)
|
514
|
+
JSON.parse(File.read("#{@repository_path}/nodes/#{node}.json"))
|
515
|
+
end
|
516
|
+
|
517
|
+
# Get the full recipes tree.
|
518
|
+
# Keep it in a cache for performance.
|
519
|
+
#
|
520
|
+
# Result::
|
521
|
+
# * Hash: The recipes tree. See RecipesTreeBuilder#full_recipes_tree for the detailed signature
|
522
|
+
def full_recipes_tree
|
523
|
+
@recipes_tree_mutex.synchronize do
|
524
|
+
@recipes_tree = RecipesTreeBuilder.new(@config, self).full_recipes_tree unless defined?(@recipes_tree)
|
525
|
+
end
|
526
|
+
@recipes_tree
|
527
|
+
end
|
528
|
+
|
529
|
+
end
|
530
|
+
|
531
|
+
end
|
532
|
+
|
533
|
+
end
|
534
|
+
|
535
|
+
end
|