kennel 1.75.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,21 @@
1
+ # frozen_string_literal: true
2
+ module Kennel
3
+ module OptionalValidations
4
+ def self.included(base)
5
+ base.settings :validate
6
+ base.defaults(validate: -> { true })
7
+ end
8
+
9
+ private
10
+
11
+ def validate_json(data)
12
+ bad = Kennel::Utils.all_keys(data).grep_v(Symbol)
13
+ return if bad.empty?
14
+ invalid!(
15
+ "Only use Symbols as hash keys to avoid permanent diffs when updating.\n" \
16
+ "Change these keys to be symbols (usually 'foo' => 1 --> 'foo': 1)\n" \
17
+ "#{bad.map(&:inspect).join("\n")}"
18
+ )
19
+ end
20
+ end
21
+ end
@@ -0,0 +1,34 @@
1
+ # frozen_string_literal: true
2
+ require "benchmark"
3
+
4
+ module Kennel
5
+ class Progress
6
+ # print what we are doing and a spinner until it is done ... then show how long it took
7
+ def self.progress(name)
8
+ Kennel.err.print "#{name} ... "
9
+
10
+ animation = "-\\|/"
11
+ count = 0
12
+ stop = false
13
+ result = nil
14
+
15
+ spinner = Thread.new do
16
+ loop do
17
+ break if stop
18
+ Kennel.err.print animation[count % animation.size]
19
+ sleep 0.2
20
+ Kennel.err.print "\b"
21
+ count += 1
22
+ end
23
+ end
24
+
25
+ time = Benchmark.realtime { result = yield }
26
+
27
+ stop = true
28
+ spinner.join
29
+ Kennel.err.print "#{time.round(2)}s\n"
30
+
31
+ result
32
+ end
33
+ end
34
+ end
@@ -0,0 +1,86 @@
1
+ # frozen_string_literal: true
2
+ module Kennel
3
+ module SettingsAsMethods
4
+ SETTING_OVERRIDABLE_METHODS = [].freeze
5
+
6
+ def self.included(base)
7
+ base.extend ClassMethods
8
+ base.instance_variable_set(:@settings, [])
9
+ end
10
+
11
+ module ClassMethods
12
+ def settings(*names)
13
+ duplicates = (@settings & names)
14
+ if duplicates.any?
15
+ raise ArgumentError, "Settings #{duplicates.map(&:inspect).join(", ")} are already defined"
16
+ end
17
+
18
+ overrides = ((instance_methods - self::SETTING_OVERRIDABLE_METHODS) & names)
19
+ if overrides.any?
20
+ raise ArgumentError, "Settings #{overrides.map(&:inspect).join(", ")} are already used as methods"
21
+ end
22
+
23
+ @settings.concat names
24
+
25
+ names.each do |name|
26
+ next if method_defined?(name)
27
+ define_method name do
28
+ raise_with_location ArgumentError, "'#{name}' on #{self.class} was not set or passed as option"
29
+ end
30
+ end
31
+ end
32
+
33
+ def defaults(options)
34
+ options.each do |name, block|
35
+ validate_setting_exist name
36
+ define_method name, &block
37
+ end
38
+ end
39
+
40
+ private
41
+
42
+ def validate_setting_exist(name)
43
+ return if !@settings || @settings.include?(name)
44
+ supported = @settings.map(&:inspect)
45
+ raise ArgumentError, "Unsupported setting #{name.inspect}, supported settings are #{supported.join(", ")}"
46
+ end
47
+
48
+ def inherited(child)
49
+ super
50
+ child.instance_variable_set(:@settings, (@settings || []).dup)
51
+ end
52
+ end
53
+
54
+ def initialize(options = {})
55
+ super()
56
+
57
+ unless options.is_a?(Hash)
58
+ raise ArgumentError, "Expected #{self.class.name}.new options to be a Hash, got a #{options.class}"
59
+ end
60
+
61
+ options.each do |k, v|
62
+ next if v.class == Proc
63
+ raise ArgumentError, "Expected #{self.class.name}.new option :#{k} to be Proc, for example `#{k}: -> { 12 }`"
64
+ end
65
+
66
+ options.each do |name, block|
67
+ self.class.send :validate_setting_exist, name
68
+ define_singleton_method name, &block
69
+ end
70
+
71
+ # need expand_path so it works wih rake and when run individually
72
+ pwd = /^#{Regexp.escape(Dir.pwd)}\//
73
+ @invocation_location = caller.detect do |l|
74
+ if found = File.expand_path(l).sub!(pwd, "")
75
+ break found
76
+ end
77
+ end
78
+ end
79
+
80
+ def raise_with_location(error, message)
81
+ message = message.dup
82
+ message << " on #{@invocation_location}" if @invocation_location
83
+ raise error, message
84
+ end
85
+ end
86
+ end
@@ -0,0 +1,19 @@
1
+ # frozen_string_literal: true
2
+ module Kennel
3
+ module SubclassTracking
4
+ def recursive_subclasses
5
+ subclasses + subclasses.flat_map(&:recursive_subclasses)
6
+ end
7
+
8
+ def subclasses
9
+ @subclasses ||= []
10
+ end
11
+
12
+ private
13
+
14
+ def inherited(child)
15
+ super
16
+ subclasses << child
17
+ end
18
+ end
19
+ end
@@ -0,0 +1,260 @@
1
+ # frozen_string_literal: true
2
+ module Kennel
3
+ class Syncer
4
+ CACHE_FILE = "tmp/cache/details" # keep in sync with .travis.yml caching
5
+ TRACKING_FIELDS = [:message, :description].freeze
6
+ DELETE_ORDER = ["dashboard", "slo", "monitor"].freeze # dashboards references monitors + slos, slos reference monitors
7
+
8
+ def initialize(api, expected, project: nil)
9
+ @api = api
10
+ @project_filter = project
11
+ @expected = expected
12
+ if @project_filter
13
+ original = @expected
14
+ @expected = @expected.select { |e| e.project.kennel_id == @project_filter }
15
+ if @expected.empty?
16
+ possible = original.map { |e| e.project.kennel_id }.uniq.sort
17
+ raise "#{@project_filter} does not match any projects, try any of these:\n#{possible.join("\n")}"
18
+ end
19
+ end
20
+ @expected.each { |e| add_tracking_id e }
21
+ calculate_diff
22
+ prevent_irreversible_partial_updates
23
+ end
24
+
25
+ def plan
26
+ Kennel.out.puts "Plan:"
27
+ if noop?
28
+ Kennel.out.puts Utils.color(:green, "Nothing to do")
29
+ else
30
+ print_plan "Create", @create, :green
31
+ print_plan "Update", @update, :yellow
32
+ print_plan "Delete", @delete, :red
33
+ end
34
+ end
35
+
36
+ def confirm
37
+ ENV["CI"] || !STDIN.tty? || Utils.ask("Execute Plan ?") unless noop?
38
+ end
39
+
40
+ def update
41
+ changed = (@create + @update).map { |_, e| e } unless @create.empty?
42
+
43
+ @create.each do |_, e|
44
+ e.resolve_linked_tracking_ids!({}, force: true)
45
+
46
+ reply = @api.create e.class.api_resource, e.as_json
47
+ id = reply.fetch(:id)
48
+
49
+ # resolve ids we could previously no resolve
50
+ changed.delete e
51
+ resolve_linked_tracking_ids! from: [reply], to: changed
52
+
53
+ Kennel.out.puts "Created #{e.class.api_resource} #{tracking_id(e.as_json)} #{e.url(id)}"
54
+ end
55
+
56
+ @update.each do |id, e|
57
+ e.resolve_linked_tracking_ids!({}, force: true)
58
+ @api.update e.class.api_resource, id, e.as_json
59
+ Kennel.out.puts "Updated #{e.class.api_resource} #{tracking_id(e.as_json)} #{e.url(id)}"
60
+ end
61
+
62
+ @delete.each do |id, _, a|
63
+ @api.delete a.fetch(:api_resource), id
64
+ Kennel.out.puts "Deleted #{a.fetch(:api_resource)} #{tracking_id(a)} #{id}"
65
+ end
66
+ end
67
+
68
+ private
69
+
70
+ def noop?
71
+ @create.empty? && @update.empty? && @delete.empty?
72
+ end
73
+
74
+ def calculate_diff
75
+ @update = []
76
+ @delete = []
77
+
78
+ actual = Progress.progress("Downloading definitions") { download_definitions }
79
+ resolve_linked_tracking_ids! from: actual, to: @expected
80
+ filter_by_project! actual
81
+
82
+ Progress.progress "Diffing" do
83
+ items = actual.map do |a|
84
+ e = matching_expected(a)
85
+ if e && @expected.delete(e)
86
+ [e, a]
87
+ else
88
+ [nil, a]
89
+ end
90
+ end
91
+
92
+ details_cache do |cache|
93
+ # fill details of things we need to compare (only do this part in parallel for safety & balancing)
94
+ Utils.parallel(items.select { |e, _| e && e.class::API_LIST_INCOMPLETE }) { |_, a| fill_details(a, cache) }
95
+ end
96
+
97
+ # pick out things to update or delete
98
+ items.each do |e, a|
99
+ id = a.fetch(:id)
100
+ if e
101
+ diff = e.diff(a)
102
+ @update << [id, e, a, diff] if diff.any?
103
+ elsif tracking_id(a) # was previously managed
104
+ @delete << [id, nil, a]
105
+ end
106
+ end
107
+
108
+ ensure_all_ids_found
109
+ @create = @expected.map { |e| [nil, e] }
110
+ @create.sort_by! { |_, e| -DELETE_ORDER.index(e.class.api_resource) }
111
+ end
112
+
113
+ @delete.sort_by! { |_, _, a| DELETE_ORDER.index a.fetch(:api_resource) }
114
+ end
115
+
116
+ # Make diff work even though we cannot mass-fetch definitions
117
+ def fill_details(a, cache)
118
+ resource = a.fetch(:api_resource)
119
+ args = [resource, a.fetch(:id)]
120
+ full = cache.fetch(args, a[:modified] || a.fetch(:modified_at)) do
121
+ @api.show(*args)
122
+ end
123
+ a.merge!(full)
124
+ end
125
+
126
+ def details_cache(&block)
127
+ cache = FileCache.new CACHE_FILE, Kennel::VERSION
128
+ cache.open(&block)
129
+ end
130
+
131
+ def download_definitions
132
+ Utils.parallel(Models::Record.subclasses.map(&:api_resource)) do |api_resource|
133
+ results = @api.list(api_resource, with_downtimes: false) # lookup monitors without adding unnecessary downtime information
134
+ results = results[results.keys.first] if results.is_a?(Hash) # dashboards are nested in {dashboards: []}
135
+ results.each { |c| c[:api_resource] = api_resource } # store api resource for later diffing
136
+ end.flatten(1)
137
+ end
138
+
139
+ def ensure_all_ids_found
140
+ @expected.each do |e|
141
+ next unless id = e.id
142
+ raise "Unable to find existing #{e.class.api_resource} with id #{id}"
143
+ end
144
+ end
145
+
146
+ def matching_expected(a)
147
+ # index list by all the thing we look up by: tracking id and actual id
148
+ @lookup_map ||= @expected.each_with_object({}) do |e, all|
149
+ keys = [tracking_id(e.as_json)]
150
+ keys << "#{e.class.api_resource}:#{e.id}" if e.id
151
+ keys.compact.each do |key|
152
+ raise "Lookup #{key} is duplicated" if all[key]
153
+ all[key] = e
154
+ end
155
+ end
156
+
157
+ @lookup_map["#{a.fetch(:api_resource)}:#{a.fetch(:id)}"] || @lookup_map[tracking_id(a)]
158
+ end
159
+
160
+ def print_plan(step, list, color)
161
+ return if list.empty?
162
+ list.each do |_, e, a, diff|
163
+ api_resource = (e ? e.class.api_resource : a.fetch(:api_resource))
164
+ Kennel.out.puts Utils.color(color, "#{step} #{api_resource} #{e&.tracking_id || tracking_id(a)}")
165
+ print_diff(diff) if diff # only for update
166
+ end
167
+ end
168
+
169
+ def print_diff(diff)
170
+ diff.each do |type, field, old, new|
171
+ if type == "+"
172
+ temp = Utils.pretty_inspect(new)
173
+ new = Utils.pretty_inspect(old)
174
+ old = temp
175
+ else # ~ and -
176
+ old = Utils.pretty_inspect(old)
177
+ new = Utils.pretty_inspect(new)
178
+ end
179
+
180
+ if (old + new).size > 100
181
+ Kennel.out.puts " #{type}#{field}"
182
+ Kennel.out.puts " #{old} ->"
183
+ Kennel.out.puts " #{new}"
184
+ else
185
+ Kennel.out.puts " #{type}#{field} #{old} -> #{new}"
186
+ end
187
+ end
188
+ end
189
+
190
+ # Do not add tracking-id when working with existing ids on a branch,
191
+ # so resource do not get deleted fr:om merges to master.
192
+ # Also make sure the diff still makes sense, by kicking out the now noop-update.
193
+ #
194
+ # Note: ideally we'd never add tracking in the first place, but at that point we do not know the diff yet
195
+ def prevent_irreversible_partial_updates
196
+ return unless @project_filter
197
+ @update.select! do |_, e, _, diff|
198
+ next true unless e.id # short circuit for performance
199
+
200
+ diff.select! do |field_diff|
201
+ (_, field, old, new) = field_diff
202
+ next true unless tracking_field?(field)
203
+
204
+ if (old_tracking = tracking_value(old))
205
+ old_tracking == tracking_value(new) || raise("do not update! (atm unreachable)")
206
+ else
207
+ field_diff[3] = remove_tracking_id(e) # make plan output match update
208
+ old != field_diff[3]
209
+ end
210
+ end
211
+
212
+ !diff.empty?
213
+ end
214
+ end
215
+
216
+ def resolve_linked_tracking_ids!(from:, to:)
217
+ map = from.each_with_object({}) { |a, lookup| lookup[tracking_id(a)] = a.fetch(:id) }
218
+ to.each { |e| map[e.tracking_id] ||= :new }
219
+ to.each { |e| e.resolve_linked_tracking_ids!(map, force: false) }
220
+ end
221
+
222
+ def filter_by_project!(definitions)
223
+ return unless @project_filter
224
+ definitions.select! do |a|
225
+ id = tracking_id(a)
226
+ !id || id.start_with?("#{@project_filter}:")
227
+ end
228
+ end
229
+
230
+ def add_tracking_id(e)
231
+ json = e.as_json
232
+ field = tracking_field(json)
233
+ raise "remove \"-- Managed by kennel\" line it from #{field} to copy a resource" if tracking_value(json[field])
234
+ json[field] = "#{json[field]}\n-- Managed by kennel #{e.tracking_id} in #{e.project.class.file_location}, do not modify manually".lstrip
235
+ end
236
+
237
+ def remove_tracking_id(e)
238
+ json = e.as_json
239
+ field = tracking_field(json)
240
+ value = json[field]
241
+ json[field] = value.dup.sub!(/\n?-- Managed by kennel .*/, "") || raise("did not find tracking id in #{value}")
242
+ end
243
+
244
+ def tracking_id(a)
245
+ tracking_value a[tracking_field(a)]
246
+ end
247
+
248
+ def tracking_value(content)
249
+ content.to_s[/-- Managed by kennel (\S+:\S+)/, 1]
250
+ end
251
+
252
+ def tracking_field(a)
253
+ TRACKING_FIELDS.detect { |f| a.key?(f) }
254
+ end
255
+
256
+ def tracking_field?(field)
257
+ TRACKING_FIELDS.include?(field.to_sym)
258
+ end
259
+ end
260
+ end
@@ -0,0 +1,148 @@
1
+ # frozen_string_literal: true
2
+ require "English"
3
+ require "kennel"
4
+ require "kennel/unmuted_alerts"
5
+ require "kennel/importer"
6
+
7
+ module Kennel
8
+ module Tasks
9
+ class << self
10
+ def abort(message = nil)
11
+ Kennel.err.puts message if message
12
+ raise SystemExit.new(1), message
13
+ end
14
+ end
15
+ end
16
+ end
17
+
18
+ namespace :kennel do
19
+ desc "Ensure there are no uncommited changes that would be hidden from PR reviewers"
20
+ task no_diff: :generate do
21
+ result = `git status --porcelain generated/`.strip
22
+ Kennel::Tasks.abort "Diff found:\n#{result}\nrun `rake generate` and commit the diff to fix" unless result == ""
23
+ Kennel::Tasks.abort "Error during diffing" unless $CHILD_STATUS.success?
24
+ end
25
+
26
+ # ideally do this on every run, but it's slow (~1.5s) and brittle (might not find all + might find false-positives)
27
+ # https://help.datadoghq.com/hc/en-us/requests/254114 for automatic validation
28
+ desc "Verify that all used monitor mentions are valid"
29
+ task validate_mentions: :environment do
30
+ known = Kennel.send(:api)
31
+ .send(:request, :get, "/monitor/notifications")
32
+ .fetch(:handles)
33
+ .values
34
+ .flatten(1)
35
+ .map { |v| v.fetch(:value) }
36
+
37
+ known += ENV["KNOWN"].to_s.split(",")
38
+
39
+ bad = []
40
+ Dir["generated/**/*.json"].each do |f|
41
+ next unless message = JSON.parse(File.read(f))["message"]
42
+ used = message.scan(/\s(@[^\s{,'"]+)/).flatten(1)
43
+ .grep(/^@.*@|^@.*-/) # ignore @here etc handles ... datadog uses @foo@bar.com for emails and @foo-bar for integrations
44
+ (used - known).each { |v| bad << [f, v] }
45
+ end
46
+
47
+ if bad.any?
48
+ url = Kennel::Utils.path_to_url "/account/settings"
49
+ puts "Invalid mentions found, either ignore them by adding to `KNOWN` env var or add them via #{url}"
50
+ bad.each { |f, v| puts "Invalid mention #{v} in monitor message of #{f}" }
51
+ Kennel::Tasks.abort
52
+ end
53
+ end
54
+
55
+ desc "generate local definitions"
56
+ task generate: :environment do
57
+ Kennel.generate
58
+ end
59
+
60
+ # also generate parts so users see and commit updated generated automatically
61
+ desc "show planned datadog changes (scope with PROJECT=name)"
62
+ task plan: :generate do
63
+ Kennel.plan
64
+ end
65
+
66
+ desc "update datadog (scope with PROJECT=name)"
67
+ task update_datadog: :environment do
68
+ Kennel.update
69
+ end
70
+
71
+ desc "update on push to the default branch, otherwise show plan"
72
+ task :ci do
73
+ branch = (ENV["TRAVIS_BRANCH"] || ENV["GITHUB_REF"]).to_s.sub(/^refs\/heads\//, "")
74
+ on_default_branch = (branch == (ENV["DEFAULT_BRANCH"] || "master"))
75
+ is_push = (ENV["TRAVIS_PULL_REQUEST"] == "false" || ENV["GITHUB_EVENT_NAME"] == "push")
76
+ task_name =
77
+ if on_default_branch && is_push
78
+ "kennel:update_datadog"
79
+ else
80
+ "kennel:plan" # show plan in CI logs
81
+ end
82
+
83
+ Rake::Task[task_name].invoke
84
+ end
85
+
86
+ desc "show unmuted alerts filtered by TAG, for example TAG=team:foo"
87
+ task alerts: :environment do
88
+ tag = ENV["TAG"] || Kennel::Tasks.abort("Call with TAG=foo:bar")
89
+ Kennel::UnmutedAlerts.print(Kennel.send(:api), tag)
90
+ end
91
+
92
+ desc "show monitors with no data by TAG, for example TAG=team:foo"
93
+ task nodata: :environment do
94
+ tag = ENV["TAG"] || Kennel::Tasks.abort("Call with TAG=foo:bar")
95
+ monitors = Kennel.send(:api).list("monitor", monitor_tags: tag, group_states: "no data")
96
+ monitors.select! { |m| m[:overall_state] == "No Data" }
97
+ monitors.reject! { |m| m[:tags].include? "nodata:ignore" }
98
+ if monitors.any?
99
+ Kennel.err.puts <<~TEXT
100
+ This is a useful task to find monitors that have mis-spelled metrics or never received data at any time.
101
+ To ignore monitors with nodata, tag the monitor with "nodata:ignore"
102
+
103
+ TEXT
104
+ end
105
+
106
+ monitors.each do |m|
107
+ Kennel.out.puts m[:name]
108
+ Kennel.out.puts Kennel::Utils.path_to_url("/monitors/#{m[:id]}")
109
+ Kennel.out.puts
110
+ end
111
+ end
112
+
113
+ desc "Convert existing resources to copy-pasteable definitions to import existing resources (call with URL= or call with RESOURCE= and ID=)"
114
+ task import: :environment do
115
+ if (id = ENV["ID"]) && (resource = ENV["RESOURCE"])
116
+ id = Integer(id) if id =~ /^\d+$/ # dashboards can have alphanumeric ids
117
+ elsif (url = ENV["URL"])
118
+ resource, id = Kennel::Models::Record.parse_any_url(url) || Kennel::Tasks.abort("Unable to parse url")
119
+ else
120
+ possible_resources = Kennel::Models::Record.subclasses.map(&:api_resource)
121
+ Kennel::Tasks.abort("Call with URL= or call with RESOURCE=#{possible_resources.join(" or ")} and ID=")
122
+ end
123
+
124
+ Kennel.out.puts Kennel::Importer.new(Kennel.send(:api)).import(resource, id)
125
+ end
126
+
127
+ desc "Dump ALL of datadog config as raw json ... useful for grep/search TYPE=slo|monitor|dashboard"
128
+ task dump: :environment do
129
+ Kennel.send(:api).list(ENV.fetch("TYPE")).each do |r|
130
+ Kennel.out.puts JSON.pretty_generate(r)
131
+ end
132
+ end
133
+
134
+ task :environment do
135
+ require "kennel"
136
+ gem "dotenv"
137
+ require "dotenv"
138
+ source = ".env"
139
+
140
+ # warn when users have things like DATADOG_TOKEN already set and it will not be loaded from .env
141
+ unless ENV["KENNEL_SILENCE_UPDATED_ENV"]
142
+ updated = Dotenv.parse(source).select { |k, v| ENV[k] && ENV[k] != v }
143
+ warn "Environment variables #{updated.keys.join(", ")} need to be unset to be sourced from #{source}" if updated.any?
144
+ end
145
+
146
+ Dotenv.load(source)
147
+ end
148
+ end