tobsch-krane 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.buildkite/pipeline.nightly.yml +43 -0
- data/.github/probots.yml +2 -0
- data/.gitignore +20 -0
- data/.rubocop.yml +17 -0
- data/.shopify-build/VERSION +1 -0
- data/.shopify-build/kubernetes-deploy.yml +53 -0
- data/1.0-Upgrade.md +185 -0
- data/CHANGELOG.md +431 -0
- data/CODE_OF_CONDUCT.md +46 -0
- data/CONTRIBUTING.md +164 -0
- data/Gemfile +16 -0
- data/ISSUE_TEMPLATE.md +25 -0
- data/LICENSE.txt +21 -0
- data/README.md +655 -0
- data/Rakefile +36 -0
- data/bin/ci +21 -0
- data/bin/setup +16 -0
- data/bin/test +47 -0
- data/dev.yml +28 -0
- data/dev/flamegraph-from-tests +35 -0
- data/exe/krane +5 -0
- data/krane.gemspec +44 -0
- data/lib/krane.rb +7 -0
- data/lib/krane/bindings_parser.rb +88 -0
- data/lib/krane/cli/deploy_command.rb +75 -0
- data/lib/krane/cli/global_deploy_command.rb +54 -0
- data/lib/krane/cli/krane.rb +91 -0
- data/lib/krane/cli/render_command.rb +41 -0
- data/lib/krane/cli/restart_command.rb +34 -0
- data/lib/krane/cli/run_command.rb +54 -0
- data/lib/krane/cli/version_command.rb +13 -0
- data/lib/krane/cluster_resource_discovery.rb +113 -0
- data/lib/krane/common.rb +23 -0
- data/lib/krane/concerns/template_reporting.rb +29 -0
- data/lib/krane/concurrency.rb +18 -0
- data/lib/krane/container_logs.rb +106 -0
- data/lib/krane/deferred_summary_logging.rb +95 -0
- data/lib/krane/delayed_exceptions.rb +14 -0
- data/lib/krane/deploy_task.rb +363 -0
- data/lib/krane/deploy_task_config_validator.rb +29 -0
- data/lib/krane/duration_parser.rb +27 -0
- data/lib/krane/ejson_secret_provisioner.rb +154 -0
- data/lib/krane/errors.rb +28 -0
- data/lib/krane/formatted_logger.rb +57 -0
- data/lib/krane/global_deploy_task.rb +210 -0
- data/lib/krane/global_deploy_task_config_validator.rb +12 -0
- data/lib/krane/kubeclient_builder.rb +156 -0
- data/lib/krane/kubectl.rb +120 -0
- data/lib/krane/kubernetes_resource.rb +621 -0
- data/lib/krane/kubernetes_resource/cloudsql.rb +43 -0
- data/lib/krane/kubernetes_resource/config_map.rb +22 -0
- data/lib/krane/kubernetes_resource/cron_job.rb +18 -0
- data/lib/krane/kubernetes_resource/custom_resource.rb +87 -0
- data/lib/krane/kubernetes_resource/custom_resource_definition.rb +98 -0
- data/lib/krane/kubernetes_resource/daemon_set.rb +90 -0
- data/lib/krane/kubernetes_resource/deployment.rb +213 -0
- data/lib/krane/kubernetes_resource/horizontal_pod_autoscaler.rb +65 -0
- data/lib/krane/kubernetes_resource/ingress.rb +18 -0
- data/lib/krane/kubernetes_resource/job.rb +60 -0
- data/lib/krane/kubernetes_resource/network_policy.rb +22 -0
- data/lib/krane/kubernetes_resource/persistent_volume_claim.rb +80 -0
- data/lib/krane/kubernetes_resource/pod.rb +269 -0
- data/lib/krane/kubernetes_resource/pod_disruption_budget.rb +23 -0
- data/lib/krane/kubernetes_resource/pod_set_base.rb +71 -0
- data/lib/krane/kubernetes_resource/pod_template.rb +20 -0
- data/lib/krane/kubernetes_resource/replica_set.rb +92 -0
- data/lib/krane/kubernetes_resource/resource_quota.rb +22 -0
- data/lib/krane/kubernetes_resource/role.rb +22 -0
- data/lib/krane/kubernetes_resource/role_binding.rb +22 -0
- data/lib/krane/kubernetes_resource/secret.rb +24 -0
- data/lib/krane/kubernetes_resource/service.rb +104 -0
- data/lib/krane/kubernetes_resource/service_account.rb +22 -0
- data/lib/krane/kubernetes_resource/stateful_set.rb +70 -0
- data/lib/krane/label_selector.rb +42 -0
- data/lib/krane/oj.rb +4 -0
- data/lib/krane/options_helper.rb +39 -0
- data/lib/krane/remote_logs.rb +60 -0
- data/lib/krane/render_task.rb +118 -0
- data/lib/krane/renderer.rb +118 -0
- data/lib/krane/resource_cache.rb +68 -0
- data/lib/krane/resource_deployer.rb +265 -0
- data/lib/krane/resource_watcher.rb +171 -0
- data/lib/krane/restart_task.rb +228 -0
- data/lib/krane/rollout_conditions.rb +103 -0
- data/lib/krane/runner_task.rb +212 -0
- data/lib/krane/runner_task_config_validator.rb +18 -0
- data/lib/krane/statsd.rb +65 -0
- data/lib/krane/task_config.rb +22 -0
- data/lib/krane/task_config_validator.rb +96 -0
- data/lib/krane/template_sets.rb +173 -0
- data/lib/krane/version.rb +4 -0
- data/pull_request_template.md +8 -0
- data/screenshots/deploy-demo.gif +0 -0
- data/screenshots/migrate-logs.png +0 -0
- data/screenshots/missing-secret-fail.png +0 -0
- data/screenshots/success.png +0 -0
- data/screenshots/test-output.png +0 -0
- metadata +375 -0
@@ -0,0 +1,103 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
module Krane
|
3
|
+
class RolloutConditionsError < StandardError
|
4
|
+
end
|
5
|
+
|
6
|
+
class RolloutConditions
|
7
|
+
VALID_FAILURE_CONDITION_KEYS = [:path, :value, :error_msg_path, :custom_error_msg]
|
8
|
+
VALID_SUCCESS_CONDITION_KEYS = [:path, :value]
|
9
|
+
|
10
|
+
class << self
|
11
|
+
def from_annotation(conditions_string)
|
12
|
+
return new(default_conditions) if conditions_string.downcase.strip == "true"
|
13
|
+
|
14
|
+
conditions = JSON.parse(conditions_string).slice('success_conditions', 'failure_conditions')
|
15
|
+
conditions.deep_symbolize_keys!
|
16
|
+
|
17
|
+
# Create JsonPath objects
|
18
|
+
conditions[:success_conditions]&.each do |query|
|
19
|
+
query.slice!(*VALID_SUCCESS_CONDITION_KEYS)
|
20
|
+
query[:path] = JsonPath.new(query[:path]) if query.key?(:path)
|
21
|
+
end
|
22
|
+
conditions[:failure_conditions]&.each do |query|
|
23
|
+
query.slice!(*VALID_FAILURE_CONDITION_KEYS)
|
24
|
+
query[:path] = JsonPath.new(query[:path]) if query.key?(:path)
|
25
|
+
query[:error_msg_path] = JsonPath.new(query[:error_msg_path]) if query.key?(:error_msg_path)
|
26
|
+
end
|
27
|
+
|
28
|
+
new(conditions)
|
29
|
+
rescue JSON::ParserError => e
|
30
|
+
raise RolloutConditionsError, "Rollout conditions are not valid JSON: #{e}"
|
31
|
+
rescue StandardError => e
|
32
|
+
raise RolloutConditionsError,
|
33
|
+
"Error parsing rollout conditions. " \
|
34
|
+
"This is most likely caused by an invalid JsonPath expression. Failed with: #{e}"
|
35
|
+
end
|
36
|
+
|
37
|
+
def default_conditions
|
38
|
+
{
|
39
|
+
success_conditions: [
|
40
|
+
{
|
41
|
+
path: JsonPath.new('$.status.conditions[?(@.type == "Ready")].status'),
|
42
|
+
value: "True",
|
43
|
+
},
|
44
|
+
],
|
45
|
+
failure_conditions: [
|
46
|
+
{
|
47
|
+
path: JsonPath.new('$.status.conditions[?(@.type == "Failed")].status'),
|
48
|
+
value: "True",
|
49
|
+
error_msg_path: JsonPath.new('$.status.conditions[?(@.type == "Failed")].message'),
|
50
|
+
},
|
51
|
+
],
|
52
|
+
}
|
53
|
+
end
|
54
|
+
end
|
55
|
+
|
56
|
+
def initialize(conditions)
|
57
|
+
@success_conditions = conditions.fetch(:success_conditions, [])
|
58
|
+
@failure_conditions = conditions.fetch(:failure_conditions, [])
|
59
|
+
end
|
60
|
+
|
61
|
+
def rollout_successful?(instance_data)
|
62
|
+
@success_conditions.all? do |query|
|
63
|
+
query[:path].first(instance_data) == query[:value]
|
64
|
+
end
|
65
|
+
end
|
66
|
+
|
67
|
+
def rollout_failed?(instance_data)
|
68
|
+
@failure_conditions.any? do |query|
|
69
|
+
query[:path].first(instance_data) == query[:value]
|
70
|
+
end
|
71
|
+
end
|
72
|
+
|
73
|
+
def failure_messages(instance_data)
|
74
|
+
@failure_conditions.map do |query|
|
75
|
+
next unless query[:path].first(instance_data) == query[:value]
|
76
|
+
query[:custom_error_msg].presence || query[:error_msg_path]&.first(instance_data)
|
77
|
+
end.compact
|
78
|
+
end
|
79
|
+
|
80
|
+
def validate!
|
81
|
+
errors = validate_conditions(@success_conditions, 'success_conditions')
|
82
|
+
errors += validate_conditions(@failure_conditions, 'failure_conditions', required: false)
|
83
|
+
raise RolloutConditionsError, errors.join(", ") unless errors.empty?
|
84
|
+
end
|
85
|
+
|
86
|
+
private
|
87
|
+
|
88
|
+
def validate_conditions(conditions, source_key, required: true)
|
89
|
+
return [] unless conditions.present? || required
|
90
|
+
errors = []
|
91
|
+
errors << "#{source_key} should be Array but found #{conditions.class}" unless conditions.is_a?(Array)
|
92
|
+
return errors if errors.present?
|
93
|
+
errors << "#{source_key} must contain at least one entry" if conditions.empty?
|
94
|
+
return errors if errors.present?
|
95
|
+
|
96
|
+
conditions.each do |query|
|
97
|
+
missing = [:path, :value].reject { |k| query.key?(k) }
|
98
|
+
errors << "Missing required key(s) for #{source_key.singularize}: #{missing}" if missing.present?
|
99
|
+
end
|
100
|
+
errors
|
101
|
+
end
|
102
|
+
end
|
103
|
+
end
|
@@ -0,0 +1,212 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
require 'tempfile'
|
3
|
+
|
4
|
+
require 'krane/common'
|
5
|
+
require 'krane/kubeclient_builder'
|
6
|
+
require 'krane/kubectl'
|
7
|
+
require 'krane/resource_cache'
|
8
|
+
require 'krane/resource_watcher'
|
9
|
+
require 'krane/kubernetes_resource'
|
10
|
+
require 'krane/kubernetes_resource/pod'
|
11
|
+
require 'krane/runner_task_config_validator'
|
12
|
+
|
13
|
+
module Krane
|
14
|
+
# Run a pod that exits upon completing a task
|
15
|
+
class RunnerTask
|
16
|
+
class TaskTemplateMissingError < TaskConfigurationError; end
|
17
|
+
|
18
|
+
attr_reader :pod_name
|
19
|
+
|
20
|
+
# Initializes the runner task
|
21
|
+
#
|
22
|
+
# @param namespace [String] Kubernetes namespace (*required*)
|
23
|
+
# @param context [String] Kubernetes context / cluster (*required*)
|
24
|
+
# @param logger [Object] Logger object (defaults to an instance of Krane::FormattedLogger)
|
25
|
+
# @param global_timeout [Integer] Timeout in seconds
|
26
|
+
def initialize(namespace:, context:, logger: nil, global_timeout: nil)
|
27
|
+
@logger = logger || Krane::FormattedLogger.build(namespace, context)
|
28
|
+
@task_config = Krane::TaskConfig.new(context, namespace, @logger)
|
29
|
+
@namespace = namespace
|
30
|
+
@context = context
|
31
|
+
@global_timeout = global_timeout
|
32
|
+
end
|
33
|
+
|
34
|
+
# Runs the task, returning a boolean representing success or failure
|
35
|
+
#
|
36
|
+
# @return [Boolean]
|
37
|
+
def run(*args)
|
38
|
+
run!(*args)
|
39
|
+
true
|
40
|
+
rescue DeploymentTimeoutError, FatalDeploymentError
|
41
|
+
false
|
42
|
+
end
|
43
|
+
|
44
|
+
# Runs the task, raising exceptions in case of issues
|
45
|
+
#
|
46
|
+
# @param template [String] The filename of the template you'll be rendering (*required*)
|
47
|
+
# @param command [Array<String>] Override the default command in the container image
|
48
|
+
# @param arguments [Array<String>] Override the default arguments for the command
|
49
|
+
# @param env_vars [Array<String>] List of env vars
|
50
|
+
# @param verify_result [Boolean] Wait for completion and verify pod success
|
51
|
+
#
|
52
|
+
# @return [nil]
|
53
|
+
def run!(template:, command:, arguments:, env_vars: [], verify_result: true)
|
54
|
+
start = Time.now.utc
|
55
|
+
@logger.reset
|
56
|
+
|
57
|
+
@logger.phase_heading("Initializing task")
|
58
|
+
|
59
|
+
@logger.info("Validating configuration")
|
60
|
+
verify_config!(template)
|
61
|
+
@logger.info("Using namespace '#{@namespace}' in context '#{@context}'")
|
62
|
+
|
63
|
+
pod = build_pod(template, command, arguments, env_vars, verify_result)
|
64
|
+
validate_pod(pod)
|
65
|
+
|
66
|
+
@logger.phase_heading("Running pod")
|
67
|
+
create_pod(pod)
|
68
|
+
|
69
|
+
if verify_result
|
70
|
+
@logger.phase_heading("Streaming logs")
|
71
|
+
watch_pod(pod)
|
72
|
+
else
|
73
|
+
record_status_once(pod)
|
74
|
+
end
|
75
|
+
StatsD.client.distribution('task_runner.duration', StatsD.duration(start), tags: statsd_tags('success'))
|
76
|
+
@logger.print_summary(:success)
|
77
|
+
rescue DeploymentTimeoutError
|
78
|
+
StatsD.client.distribution('task_runner.duration', StatsD.duration(start), tags: statsd_tags('timeout'))
|
79
|
+
@logger.print_summary(:timed_out)
|
80
|
+
raise
|
81
|
+
rescue FatalDeploymentError
|
82
|
+
StatsD.client.distribution('task_runner.duration', StatsD.duration(start), tags: statsd_tags('failure'))
|
83
|
+
@logger.print_summary(:failure)
|
84
|
+
raise
|
85
|
+
end
|
86
|
+
|
87
|
+
private
|
88
|
+
|
89
|
+
def create_pod(pod)
|
90
|
+
@logger.info("Creating pod '#{pod.name}'")
|
91
|
+
pod.deploy_started_at = Time.now.utc
|
92
|
+
kubeclient.create_pod(pod.to_kubeclient_resource)
|
93
|
+
@pod_name = pod.name
|
94
|
+
@logger.info("Pod creation succeeded")
|
95
|
+
rescue Kubeclient::HttpError => e
|
96
|
+
msg = "Failed to create pod: #{e.class.name}: #{e.message}"
|
97
|
+
@logger.summary.add_paragraph(msg)
|
98
|
+
raise FatalDeploymentError, msg
|
99
|
+
end
|
100
|
+
|
101
|
+
def build_pod(template_name, command, args, env_vars, verify_result)
|
102
|
+
task_template = get_template(template_name)
|
103
|
+
@logger.info("Using template '#{template_name}'")
|
104
|
+
pod_template = build_pod_definition(task_template)
|
105
|
+
set_container_overrides!(pod_template, command, args, env_vars)
|
106
|
+
ensure_valid_restart_policy!(pod_template, verify_result)
|
107
|
+
Pod.new(namespace: @namespace, context: @context, logger: @logger, stream_logs: true,
|
108
|
+
definition: pod_template.to_hash.deep_stringify_keys, statsd_tags: [])
|
109
|
+
end
|
110
|
+
|
111
|
+
def validate_pod(pod)
|
112
|
+
pod.validate_definition(kubectl)
|
113
|
+
end
|
114
|
+
|
115
|
+
def watch_pod(pod)
|
116
|
+
rw = ResourceWatcher.new(resources: [pod], timeout: @global_timeout,
|
117
|
+
operation_name: "run", task_config: @task_config)
|
118
|
+
rw.run(delay_sync: 1, reminder_interval: 30.seconds)
|
119
|
+
raise DeploymentTimeoutError if pod.deploy_timed_out?
|
120
|
+
raise FatalDeploymentError if pod.deploy_failed?
|
121
|
+
end
|
122
|
+
|
123
|
+
def record_status_once(pod)
|
124
|
+
cache = ResourceCache.new(@task_config)
|
125
|
+
pod.sync(cache)
|
126
|
+
warning = <<~STRING
|
127
|
+
#{ColorizedString.new('Result verification is disabled for this task.').yellow}
|
128
|
+
The following status was observed immediately after pod creation:
|
129
|
+
#{pod.pretty_status}
|
130
|
+
STRING
|
131
|
+
@logger.summary.add_paragraph(warning)
|
132
|
+
end
|
133
|
+
|
134
|
+
def verify_config!(task_template)
|
135
|
+
task_config_validator = RunnerTaskConfigValidator.new(task_template, @task_config, kubectl,
|
136
|
+
kubeclient_builder)
|
137
|
+
unless task_config_validator.valid?
|
138
|
+
@logger.summary.add_action("Configuration invalid")
|
139
|
+
@logger.summary.add_paragraph([task_config_validator.errors].map { |err| "- #{err}" }.join("\n"))
|
140
|
+
raise Krane::TaskConfigurationError
|
141
|
+
end
|
142
|
+
end
|
143
|
+
|
144
|
+
def get_template(template_name)
|
145
|
+
pod_template = kubeclient.get_pod_template(template_name, @namespace)
|
146
|
+
pod_template.template
|
147
|
+
rescue Kubeclient::ResourceNotFoundError
|
148
|
+
msg = "Pod template `#{template_name}` not found in namespace `#{@namespace}`, context `#{@context}`"
|
149
|
+
@logger.summary.add_paragraph(msg)
|
150
|
+
raise TaskTemplateMissingError, msg
|
151
|
+
rescue Kubeclient::HttpError => error
|
152
|
+
raise FatalKubeAPIError, "Error retrieving pod template: #{error.class.name}: #{error.message}"
|
153
|
+
end
|
154
|
+
|
155
|
+
def build_pod_definition(base_template)
|
156
|
+
pod_definition = base_template.dup
|
157
|
+
pod_definition.kind = 'Pod'
|
158
|
+
pod_definition.apiVersion = 'v1'
|
159
|
+
pod_definition.metadata.namespace = @namespace
|
160
|
+
|
161
|
+
unique_name = pod_definition.metadata.name + "-" + SecureRandom.hex(8)
|
162
|
+
@logger.warn("Name is too long, using '#{unique_name[0..62]}'") if unique_name.length > 63
|
163
|
+
pod_definition.metadata.name = unique_name[0..62]
|
164
|
+
|
165
|
+
pod_definition
|
166
|
+
end
|
167
|
+
|
168
|
+
def set_container_overrides!(pod_definition, command, args, env_vars)
|
169
|
+
container = pod_definition.spec.containers.find { |cont| cont.name == 'task-runner' }
|
170
|
+
if container.nil?
|
171
|
+
message = "Pod spec does not contain a template container called 'task-runner'"
|
172
|
+
@logger.summary.add_paragraph(message)
|
173
|
+
raise TaskConfigurationError, message
|
174
|
+
end
|
175
|
+
|
176
|
+
container.command = command if command
|
177
|
+
container.args = args if args
|
178
|
+
|
179
|
+
env_args = env_vars.map do |env|
|
180
|
+
key, value = env.split('=', 2)
|
181
|
+
{ name: key, value: value }
|
182
|
+
end
|
183
|
+
container.env ||= []
|
184
|
+
container.env = container.env.map(&:to_h) + env_args
|
185
|
+
end
|
186
|
+
|
187
|
+
def ensure_valid_restart_policy!(template, verify)
|
188
|
+
restart_policy = template.spec.restartPolicy
|
189
|
+
if verify && restart_policy != "Never"
|
190
|
+
@logger.warn("Changed Pod RestartPolicy from '#{restart_policy}' to 'Never'. Disable "\
|
191
|
+
"result verification to use '#{restart_policy}'.")
|
192
|
+
template.spec.restartPolicy = "Never"
|
193
|
+
end
|
194
|
+
end
|
195
|
+
|
196
|
+
def kubectl
|
197
|
+
@kubectl ||= Kubectl.new(task_config: @task_config, log_failure_by_default: true)
|
198
|
+
end
|
199
|
+
|
200
|
+
def kubeclient
|
201
|
+
@kubeclient ||= kubeclient_builder.build_v1_kubeclient(@context)
|
202
|
+
end
|
203
|
+
|
204
|
+
def kubeclient_builder
|
205
|
+
@kubeclient_builder ||= KubeclientBuilder.new
|
206
|
+
end
|
207
|
+
|
208
|
+
def statsd_tags(status)
|
209
|
+
%W(namespace:#{@namespace} context:#{@context} status:#{status})
|
210
|
+
end
|
211
|
+
end
|
212
|
+
end
|
@@ -0,0 +1,18 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
module Krane
|
3
|
+
class RunnerTaskConfigValidator < TaskConfigValidator
|
4
|
+
def initialize(template, *arguments)
|
5
|
+
super(*arguments)
|
6
|
+
@template = template
|
7
|
+
@validations += %i(validate_template)
|
8
|
+
end
|
9
|
+
|
10
|
+
private
|
11
|
+
|
12
|
+
def validate_template
|
13
|
+
if @template.blank?
|
14
|
+
@errors << "Task template name can't be nil"
|
15
|
+
end
|
16
|
+
end
|
17
|
+
end
|
18
|
+
end
|
data/lib/krane/statsd.rb
ADDED
@@ -0,0 +1,65 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
require 'statsd-instrument'
|
3
|
+
require 'logger'
|
4
|
+
|
5
|
+
module Krane
|
6
|
+
class StatsD
|
7
|
+
PREFIX = "Krane"
|
8
|
+
|
9
|
+
def self.duration(start_time)
|
10
|
+
(Time.now.utc - start_time).round(1)
|
11
|
+
end
|
12
|
+
|
13
|
+
def self.client
|
14
|
+
@client ||= begin
|
15
|
+
sink = if ::StatsD::Instrument::Environment.current.env.fetch('STATSD_ENV', nil) == 'development'
|
16
|
+
::StatsD::Instrument::LogSink.new(Logger.new($stderr))
|
17
|
+
elsif (addr = ::StatsD::Instrument::Environment.current.env.fetch('STATSD_ADDR', nil))
|
18
|
+
::StatsD::Instrument::UDPSink.for_addr(addr)
|
19
|
+
else
|
20
|
+
::StatsD::Instrument::NullSink.new
|
21
|
+
end
|
22
|
+
::StatsD::Instrument::Client.new(prefix: PREFIX, sink: sink, default_sample_rate: 1.0)
|
23
|
+
end
|
24
|
+
end
|
25
|
+
|
26
|
+
module MeasureMethods
|
27
|
+
def measure_method(method_name, metric = nil)
|
28
|
+
unless method_defined?(method_name) || private_method_defined?(method_name)
|
29
|
+
raise NotImplementedError, "Cannot instrument undefined method #{method_name}"
|
30
|
+
end
|
31
|
+
|
32
|
+
unless const_defined?("InstrumentationProxy")
|
33
|
+
const_set("InstrumentationProxy", Module.new)
|
34
|
+
should_prepend = true
|
35
|
+
end
|
36
|
+
|
37
|
+
metric ||= "#{method_name}.duration"
|
38
|
+
self::InstrumentationProxy.send(:define_method, method_name) do |*args, &block|
|
39
|
+
begin
|
40
|
+
start_time = Time.now.utc
|
41
|
+
super(*args, &block)
|
42
|
+
rescue
|
43
|
+
error = true
|
44
|
+
raise
|
45
|
+
ensure
|
46
|
+
dynamic_tags = send(:statsd_tags) if respond_to?(:statsd_tags, true)
|
47
|
+
dynamic_tags ||= {}
|
48
|
+
if error
|
49
|
+
dynamic_tags[:error] = error if dynamic_tags.is_a?(Hash)
|
50
|
+
dynamic_tags << "error:#{error}" if dynamic_tags.is_a?(Array)
|
51
|
+
end
|
52
|
+
|
53
|
+
Krane::StatsD.client.distribution(
|
54
|
+
metric,
|
55
|
+
Krane::StatsD.duration(start_time),
|
56
|
+
tags: dynamic_tags
|
57
|
+
)
|
58
|
+
end
|
59
|
+
end
|
60
|
+
|
61
|
+
prepend(self::InstrumentationProxy) if should_prepend
|
62
|
+
end
|
63
|
+
end
|
64
|
+
end
|
65
|
+
end
|
@@ -0,0 +1,22 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'krane/cluster_resource_discovery'
|
4
|
+
|
5
|
+
module Krane
|
6
|
+
class TaskConfig
|
7
|
+
attr_reader :context, :namespace, :logger
|
8
|
+
|
9
|
+
def initialize(context, namespace, logger = nil)
|
10
|
+
@context = context
|
11
|
+
@namespace = namespace
|
12
|
+
@logger = logger || FormattedLogger.build(@namespace, @context)
|
13
|
+
end
|
14
|
+
|
15
|
+
def global_kinds
|
16
|
+
@global_kinds ||= begin
|
17
|
+
cluster_resource_discoverer = ClusterResourceDiscovery.new(task_config: self)
|
18
|
+
cluster_resource_discoverer.fetch_resources(namespaced: false).map { |g| g["kind"] }
|
19
|
+
end
|
20
|
+
end
|
21
|
+
end
|
22
|
+
end
|
@@ -0,0 +1,96 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
module Krane
|
3
|
+
class TaskConfigValidator
|
4
|
+
DEFAULT_VALIDATIONS = %i(
|
5
|
+
validate_kubeconfig
|
6
|
+
validate_context_exists_in_kubeconfig
|
7
|
+
validate_context_reachable
|
8
|
+
validate_server_version
|
9
|
+
validate_namespace_exists
|
10
|
+
).freeze
|
11
|
+
|
12
|
+
delegate :context, :namespace, :logger, to: :@task_config
|
13
|
+
|
14
|
+
def initialize(task_config, kubectl, kubeclient_builder, only: nil)
|
15
|
+
@task_config = task_config
|
16
|
+
@kubectl = kubectl
|
17
|
+
@kubeclient_builder = kubeclient_builder
|
18
|
+
@errors = nil
|
19
|
+
@validations = only || DEFAULT_VALIDATIONS
|
20
|
+
end
|
21
|
+
|
22
|
+
def valid?
|
23
|
+
@errors = []
|
24
|
+
@validations.each do |validator_name|
|
25
|
+
break if @errors.present?
|
26
|
+
send(validator_name)
|
27
|
+
end
|
28
|
+
@errors.empty?
|
29
|
+
end
|
30
|
+
|
31
|
+
def errors
|
32
|
+
valid?
|
33
|
+
@errors
|
34
|
+
end
|
35
|
+
|
36
|
+
private
|
37
|
+
|
38
|
+
def validate_kubeconfig
|
39
|
+
@errors += @kubeclient_builder.validate_config_files
|
40
|
+
end
|
41
|
+
|
42
|
+
def validate_context_exists_in_kubeconfig
|
43
|
+
unless context.present?
|
44
|
+
return @errors << "Context can not be blank"
|
45
|
+
end
|
46
|
+
|
47
|
+
_, err, st = @kubectl.run("config", "get-contexts", context, "-o", "name",
|
48
|
+
use_namespace: false, use_context: false, log_failure: false)
|
49
|
+
|
50
|
+
unless st.success?
|
51
|
+
@errors << if err.match("error: context #{context} not found")
|
52
|
+
"Context #{context} missing from your kubeconfig file(s)"
|
53
|
+
else
|
54
|
+
"Something went wrong. #{err} "
|
55
|
+
end
|
56
|
+
end
|
57
|
+
end
|
58
|
+
|
59
|
+
def validate_context_reachable
|
60
|
+
_, err, st = @kubectl.run("get", "namespaces", "-o", "name",
|
61
|
+
use_namespace: false, log_failure: false)
|
62
|
+
|
63
|
+
unless st.success?
|
64
|
+
@errors << "Something went wrong connecting to #{context}. #{err} "
|
65
|
+
end
|
66
|
+
end
|
67
|
+
|
68
|
+
def validate_namespace_exists
|
69
|
+
unless namespace.present?
|
70
|
+
return @errors << "Namespace can not be blank"
|
71
|
+
end
|
72
|
+
|
73
|
+
_, err, st = @kubectl.run("get", "namespace", "-o", "name", namespace,
|
74
|
+
use_namespace: false, log_failure: false)
|
75
|
+
|
76
|
+
unless st.success?
|
77
|
+
@errors << if err.match("Error from server [(]NotFound[)]: namespace")
|
78
|
+
"Could not find Namespace: #{namespace} in Context: #{context}"
|
79
|
+
else
|
80
|
+
"Could not connect to kubernetes cluster. #{err}"
|
81
|
+
end
|
82
|
+
end
|
83
|
+
end
|
84
|
+
|
85
|
+
def validate_server_version
|
86
|
+
if @kubectl.server_version < Gem::Version.new(MIN_KUBE_VERSION)
|
87
|
+
logger.warn(server_version_warning(@kubectl.server_version))
|
88
|
+
end
|
89
|
+
end
|
90
|
+
|
91
|
+
def server_version_warning(server_version)
|
92
|
+
"Minimum cluster version requirement of #{MIN_KUBE_VERSION} not met. "\
|
93
|
+
"Using #{server_version} could result in unexpected behavior as it is no longer tested against"
|
94
|
+
end
|
95
|
+
end
|
96
|
+
end
|