kennel 1.87.1 → 1.90.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 42e4d1fd80bfa23ccbed32dcc6cfeb05fda2a2748bc29b302e6f982273302551
4
- data.tar.gz: 6ffc6ddafa5518868cc913fd05add8f11eb49729ba4ca498b357241ab80111f3
3
+ metadata.gz: 861edba684922b252c0af4cbbdecbe258e28fc0dcba9a31f8c77e14ea3391110
4
+ data.tar.gz: fc11becb68eb696db030e517d99e12191eae10024c74ca852c7c308646517ef4
5
5
  SHA512:
6
- metadata.gz: 76f2b875dd8e918db62b0ee13a6ad01acd6a187f11127824176cdee2f8665ec0ffa4d5ad3a0f8fbfd97fbf980faae8039cdbca8f2b078ef81e091a9a34c3ed0e
7
- data.tar.gz: a612a2d02f684f847ef6c167d2a3ef9fc815f80c1fd618106c8824017cede188c77dec3700d1a7afd46c481cd3c8fa7c37358af6a9737c1703a68491d159a5b1
6
+ metadata.gz: 660e66d7a1ae02a0f67d723ddd1beccddea63d9bf62658b4327ec2622ba32f161a6c9336b86a1295b8037195ed513c4514c0c1759e6eb71764665d11feeb19fe
7
+ data.tar.gz: a2f479c8579e94a7a796e59ea6be3433c795f1db35892a5ce57a345ac7449c87a344632ad8c35fd27256da70d5c49eb227dd70f89c7566ea4084085277f25e94
data/Readme.md CHANGED
@@ -52,6 +52,7 @@ end
52
52
  ```
53
53
 
54
54
  <!-- NOT IN template/Readme.md -->
55
+
55
56
  ## Installation
56
57
 
57
58
  - create a new private `kennel` repo for your organization (do not fork this repo)
@@ -114,6 +115,7 @@ end
114
115
  - use [datadog monitor UI](https://app.datadoghq.com/monitors/manage) to find a monitor
115
116
  - get the `id` from the url
116
117
  - run `URL='https://app.datadoghq.com/monitors/123' bundle exec rake kennel:import` and copy the output
118
+ - import task also works with SLO alerts, e.g. `URL='https://app.datadoghq.com/slo/edit/123abc456def123/alerts/789' bundle exec rake kennel:import`
117
119
  - find or create a project in `projects/`
118
120
  - add the monitor to `parts: [` list, for example:
119
121
  ```Ruby
@@ -292,9 +294,12 @@ https://foo.datadog.com/monitor/123
292
294
 
293
295
  <!-- NOT IN template/Readme.md -->
294
296
 
295
-
296
297
  ## Development
297
298
 
299
+ ### Benchmarking
300
+
301
+ Setting `FORCE_GET_CACHE=true` will cache all get requests, which makes benchmarking improvements more reliable.
302
+
298
303
  ### Integration testing
299
304
 
300
305
  ```Bash
data/lib/kennel.rb CHANGED
@@ -55,7 +55,7 @@ module Kennel
55
55
 
56
56
  def store(parts)
57
57
  Progress.progress "Storing" do
58
- old = Dir["generated/**/*"]
58
+ old = Dir["generated/#{project_filter || "**"}/*"]
59
59
  used = []
60
60
 
61
61
  Utils.parallel(parts, max: 2) do |part|
@@ -83,7 +83,7 @@ module Kennel
83
83
  end
84
84
 
85
85
  def syncer
86
- @syncer ||= Syncer.new(api, generated, project: ENV["PROJECT"])
86
+ @syncer ||= Syncer.new(api, generated, project: project_filter)
87
87
  end
88
88
 
89
89
  def api
@@ -94,9 +94,21 @@ module Kennel
94
94
  @generated ||= begin
95
95
  Progress.progress "Generating" do
96
96
  load_all
97
+ known = []
97
98
  parts = Models::Project.recursive_subclasses.flat_map do |project_class|
98
- project_class.new.validated_parts
99
+ project = project_class.new
100
+ kennel_id = project.kennel_id
101
+ if project_filter
102
+ known << kennel_id
103
+ next [] if kennel_id != project_filter
104
+ end
105
+ project.validated_parts
99
106
  end
107
+
108
+ if project_filter && parts.empty?
109
+ raise "#{project_filter} does not match any projects, try any of these:\n#{known.uniq.sort.join("\n")}"
110
+ end
111
+
100
112
  parts.group_by(&:tracking_id).each do |tracking_id, same|
101
113
  next if same.size == 1
102
114
  raise <<~ERROR
@@ -109,6 +121,10 @@ module Kennel
109
121
  end
110
122
  end
111
123
 
124
+ def project_filter
125
+ ENV["PROJECT"]
126
+ end
127
+
112
128
  def load_all
113
129
  ["teams", "parts", "projects"].each do |folder|
114
130
  Dir["#{folder}/**/*.rb"].sort.each { |f| require "./#{f}" }
data/lib/kennel/api.rb CHANGED
@@ -1,6 +1,7 @@
1
1
  # frozen_string_literal: true
2
+ # encapsulates knowledge around how the api works
3
+ # especially 1-off weirdness that should not lak into other parts of the code
2
4
  module Kennel
3
- # encapsulates knowledge around how the api works
4
5
  class Api
5
6
  CACHE_FILE = "tmp/cache/details"
6
7
 
@@ -11,34 +12,24 @@ module Kennel
11
12
  end
12
13
 
13
14
  def show(api_resource, id, params = {})
14
- reply = request :get, "/api/v1/#{api_resource}/#{id}", params: params
15
- api_resource == "slo" ? reply[:data] : reply
15
+ response = request :get, "/api/v1/#{api_resource}/#{id}", params: params
16
+ response = response.fetch(:data) if api_resource == "slo"
17
+ response
16
18
  end
17
19
 
18
20
  def list(api_resource, params = {})
19
- if api_resource == "slo"
20
- raise ArgumentError if params[:limit] || params[:offset]
21
- limit = 1000
22
- offset = 0
23
- all = []
24
-
25
- loop do
26
- result = request :get, "/api/v1/#{api_resource}", params: params.merge(limit: limit, offset: offset)
27
- data = result.fetch(:data)
28
- all.concat data
29
- break all if data.size < limit
30
- offset += limit
31
- end
32
- else
33
- result = request :get, "/api/v1/#{api_resource}", params: params
34
- result = result.fetch(:dashboards) if api_resource == "dashboard"
35
- result
21
+ with_pagination api_resource == "slo", params do |paginated_params|
22
+ response = request :get, "/api/v1/#{api_resource}", params: paginated_params
23
+ response = response.fetch(:dashboards) if api_resource == "dashboard"
24
+ response = response.fetch(:data) if api_resource == "slo"
25
+ response
36
26
  end
37
27
  end
38
28
 
39
29
  def create(api_resource, attributes)
40
- reply = request :post, "/api/v1/#{api_resource}", body: attributes
41
- api_resource == "slo" ? reply[:data].first : reply
30
+ response = request :post, "/api/v1/#{api_resource}", body: attributes
31
+ response = response.fetch(:data).first if api_resource == "slo"
32
+ response
42
33
  end
43
34
 
44
35
  def update(api_resource, id, attributes)
@@ -53,7 +44,6 @@ module Kennel
53
44
  end
54
45
 
55
46
  def fill_details!(api_resource, list)
56
- return unless api_resource == "dashboard"
57
47
  details_cache do |cache|
58
48
  Utils.parallel(list) { |a| fill_detail!(api_resource, a, cache) }
59
49
  end
@@ -61,6 +51,21 @@ module Kennel
61
51
 
62
52
  private
63
53
 
54
+ def with_pagination(enabled, params)
55
+ return yield params unless enabled
56
+ raise ArgumentError if params[:limit] || params[:offset]
57
+ limit = 1000
58
+ offset = 0
59
+ all = []
60
+
61
+ loop do
62
+ response = yield params.merge(limit: limit, offset: offset)
63
+ all.concat response
64
+ return all if response.size < limit
65
+ offset += limit
66
+ end
67
+ end
68
+
64
69
  # Make diff work even though we cannot mass-fetch definitions
65
70
  def fill_detail!(api_resource, a, cache)
66
71
  args = [api_resource, a.fetch(:id)]
@@ -74,34 +79,52 @@ module Kennel
74
79
  end
75
80
 
76
81
  def request(method, path, body: nil, params: {}, ignore_404: false)
77
- params = params.merge(application_key: @app_key, api_key: @api_key)
78
- query = Faraday::FlatParamsEncoder.encode(params)
79
- response = nil
80
- tries = 2
81
-
82
- tries.times do |i|
83
- response = Utils.retry Faraday::ConnectionFailed, Faraday::TimeoutError, times: 2 do
84
- @client.send(method, "#{path}?#{query}") do |request|
85
- request.body = JSON.generate(body) if body
86
- request.headers["Content-type"] = "application/json"
82
+ path = "#{path}?#{Faraday::FlatParamsEncoder.encode(params)}" if params.any?
83
+ with_cache ENV["FORCE_GET_CACHE"] && method == :get, path do
84
+ response = nil
85
+ tries = 2
86
+
87
+ tries.times do |i|
88
+ response = Utils.retry Faraday::ConnectionFailed, Faraday::TimeoutError, times: 2 do
89
+ @client.send(method, path) do |request|
90
+ request.body = JSON.generate(body) if body
91
+ request.headers["Content-type"] = "application/json"
92
+ request.headers["DD-API-KEY"] = @api_key
93
+ request.headers["DD-APPLICATION-KEY"] = @app_key
94
+ end
87
95
  end
96
+
97
+ break if i == tries - 1 || method != :get || response.status < 500
98
+ Kennel.err.puts "Retrying on server error #{response.status} for #{path}"
88
99
  end
89
100
 
90
- break if i == tries - 1 || method != :get || response.status < 500
91
- Kennel.err.puts "Retrying on server error #{response.status} for #{path}"
92
- end
101
+ if !response.success? && (response.status != 404 || !ignore_404)
102
+ message = +"Error #{response.status} during #{method.upcase} #{path}\n"
103
+ message << "request:\n#{JSON.pretty_generate(body)}\nresponse:\n" if body
104
+ message << response.body
105
+ raise message
106
+ end
93
107
 
94
- if !response.success? && (response.status != 404 || !ignore_404)
95
- message = +"Error #{response.status} during #{method.upcase} #{path}\n"
96
- message << "request:\n#{JSON.pretty_generate(body)}\nresponse:\n" if body
97
- message << response.body
98
- raise message
108
+ if response.body.empty?
109
+ {}
110
+ else
111
+ JSON.parse(response.body, symbolize_names: true)
112
+ end
99
113
  end
114
+ end
100
115
 
101
- if response.body.empty?
102
- {}
116
+ # allow caching all requests to speedup/benchmark logic that includes repeated requests
117
+ def with_cache(enabled, key)
118
+ return yield unless enabled
119
+ dir = "tmp/cache"
120
+ FileUtils.mkdir_p(dir) unless File.directory?(dir)
121
+ file = "#{dir}/#{key.delete("/?=")}" # TODO: encode nicely
122
+ if File.exist?(file)
123
+ Marshal.load(File.read(file)) # rubocop:disable Security/MarshalLoad
103
124
  else
104
- JSON.parse(response.body, symbolize_names: true)
125
+ result = yield
126
+ File.write(file, Marshal.dump(result))
127
+ result
105
128
  end
106
129
  end
107
130
  end
@@ -1,8 +1,8 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  # cache that reads everything from a single file
4
- # to avoid doing multiple disk reads while iterating all definitions
5
- # it also replaces updated keys and has an overall expiry to not keep deleted things forever
4
+ # - avoids doing multiple disk reads while iterating all definitions
5
+ # - has a global expiry to not keep deleted resources forever
6
6
  module Kennel
7
7
  class FileCache
8
8
  def initialize(file, cache_version)
@@ -22,10 +22,11 @@ module Kennel
22
22
 
23
23
  def fetch(key, key_version)
24
24
  old_value, old_version = @data[key]
25
- return old_value if old_version == [key_version, @cache_version]
25
+ expected_version = [key_version, @cache_version]
26
+ return old_value if old_version == expected_version
26
27
 
27
28
  new_value = yield
28
- @data[key] = [new_value, [key_version, @cache_version], @expires]
29
+ @data[key] = [new_value, expected_version, @expires]
29
30
  new_value
30
31
  end
31
32
 
@@ -46,8 +47,11 @@ module Kennel
46
47
  File.write(@file, Marshal.dump(@data))
47
48
  end
48
49
 
50
+ # keep the cache small to make loading it fast (5MB ~= 100ms)
51
+ # - delete expired keys
52
+ # - delete what would be deleted anyway when updating
49
53
  def expire_old_data
50
- @data.reject! { |_, (_, _, ex)| ex < @now }
54
+ @data.reject! { |_, (_, (_, cv), expires)| expires < @now || cv != @cache_version }
51
55
  end
52
56
  end
53
57
  end
@@ -3,7 +3,7 @@
3
3
  module Kennel
4
4
  class Importer
5
5
  TITLES = [:name, :title].freeze
6
- SORT_ORDER = [*TITLES, :id, :kennel_id, :type, :tags, :query, *Syncer::TRACKING_FIELDS, :template_variables].freeze
6
+ SORT_ORDER = [*TITLES, :id, :kennel_id, :type, :tags, :query, *Models::Record.subclasses.map { |k| k::TRACKING_FIELDS }, :template_variables].freeze
7
7
 
8
8
  def initialize(api)
9
9
  @api = api
@@ -31,11 +31,10 @@ module Kennel
31
31
  title.tr!(Kennel::Models::Record::LOCK, "") # avoid double lock icon
32
32
 
33
33
  # calculate or reuse kennel_id
34
- # TODO: this is copy-pasted from syncer, need to find a nice way to reuse it
35
- tracking_field = Syncer::TRACKING_FIELDS.detect { |f| data[f] }
36
34
  data[:kennel_id] =
37
- if tracking_field && data[tracking_field].sub!(/\n?-- Managed by kennel (\S+:\S+).*/, "")
38
- $1.split(":").last
35
+ if tracking_id = model.parse_tracking_id(data)
36
+ model.remove_tracking_id(data)
37
+ tracking_id.split(":").last
39
38
  else
40
39
  Kennel::Utils.parameterize(title)
41
40
  end
@@ -67,9 +66,12 @@ module Kennel
67
66
  when "dashboard"
68
67
  widgets = data[:widgets]&.flat_map { |widget| widget.dig(:definition, :widgets) || [widget] }
69
68
  widgets&.each do |widget|
70
- dry_up_query!(widget)
69
+ convert_widget_to_compact_format!(widget)
70
+ dry_up_widget_metadata!(widget)
71
71
  (widget.dig(:definition, :markers) || []).each { |m| m[:label]&.delete! " " }
72
72
  end
73
+ else
74
+ # noop
73
75
  end
74
76
 
75
77
  data.delete(:tags) if data[:tags] == [] # do not create super + [] call
@@ -91,7 +93,7 @@ module Kennel
91
93
  private
92
94
 
93
95
  # reduce duplication in imports by using dry `q: :metadata` when possible
94
- def dry_up_query!(widget)
96
+ def dry_up_widget_metadata!(widget)
95
97
  (widget.dig(:definition, :requests) || []).each do |request|
96
98
  next unless request.is_a?(Hash)
97
99
  next unless metadata = request[:metadata]
@@ -104,6 +106,20 @@ module Kennel
104
106
  end
105
107
  end
106
108
 
109
+ # new api format is very verbose, so use old dry format when possible
110
+ def convert_widget_to_compact_format!(widget)
111
+ (widget.dig(:definition, :requests) || []).each do |request|
112
+ next unless request.is_a?(Hash)
113
+ next if request[:formulas] && request[:formulas] != [{ formula: "query1" }]
114
+ next if request[:queries]&.size != 1
115
+ next if request[:queries].any? { |q| q[:data_source] != "metrics" }
116
+ next if widget.dig(:definition, :type) != request[:response_format]
117
+ request.delete(:formulas)
118
+ request.delete(:response_format)
119
+ request[:q] = request.delete(:queries).first.fetch(:query)
120
+ end
121
+ end
122
+
107
123
  def pretty_print(hash)
108
124
  sort_widgets hash
109
125
 
@@ -11,7 +11,7 @@ module Kennel
11
11
 
12
12
  def kennel_id
13
13
  name = self.class.name
14
- if name.start_with?("Kennel::")
14
+ if name.start_with?("Kennel::") # core objects would always generate the same id
15
15
  raise_with_location ArgumentError, "Set :kennel_id"
16
16
  end
17
17
  @kennel_id ||= Utils.snake_case name
@@ -8,6 +8,7 @@ module Kennel
8
8
  READONLY_ATTRIBUTES = superclass::READONLY_ATTRIBUTES + [
9
9
  :author_handle, :author_name, :modified_at, :url, :is_read_only, :notify_list
10
10
  ]
11
+ TRACKING_FIELD = :description
11
12
  REQUEST_DEFAULTS = {
12
13
  style: { line_width: "normal", palette: "dog_classic", line_type: "solid" }
13
14
  }.freeze
@@ -9,6 +9,7 @@ module Kennel
9
9
  READONLY_ATTRIBUTES = superclass::READONLY_ATTRIBUTES + [
10
10
  :multi, :matching_downtimes, :overall_state_modified, :overall_state, :restricted_roles
11
11
  ]
12
+ TRACKING_FIELD = :message
12
13
 
13
14
  MONITOR_DEFAULTS = {
14
15
  priority: nil
@@ -25,6 +26,7 @@ module Kennel
25
26
  groupby_simple_monitor: false
26
27
  }.freeze
27
28
  DEFAULT_ESCALATION_MESSAGE = ["", nil].freeze
29
+ ALLOWED_PRIORITY_CLASSES = [NilClass, Integer].freeze
28
30
 
29
31
  settings(
30
32
  :query, :name, :message, :escalation_message, :critical, :type, :renotify_interval, :warning, :timeout_h, :evaluation_delay,
@@ -127,9 +129,15 @@ module Kennel
127
129
  Utils.path_to_url "/monitors##{id}/edit"
128
130
  end
129
131
 
130
- # datadog uses / for show and # for edit as separator in it's links
131
132
  def self.parse_url(url)
132
- return unless id = url[/\/monitors[\/#](\d+)/, 1]
133
+ # datadog uses / for show and # for edit as separator in it's links
134
+ id = url[/\/monitors[\/#](\d+)/, 1]
135
+
136
+ # slo alert url
137
+ id ||= url[/\/slo\/edit\/[a-z\d]{10,}\/alerts\/(\d+)/, 1]
138
+
139
+ return unless id
140
+
133
141
  Integer(id)
134
142
  end
135
143
 
@@ -217,6 +225,10 @@ module Kennel
217
225
  end
218
226
  end
219
227
  end
228
+
229
+ unless ALLOWED_PRIORITY_CLASSES.include?(priority.class)
230
+ invalid! "priority needs to be an Integer"
231
+ end
220
232
  end
221
233
  end
222
234
  end
@@ -3,8 +3,10 @@ module Kennel
3
3
  module Models
4
4
  class Record < Base
5
5
  LOCK = "\u{1F512}"
6
+ TRACKING_FIELDS = [:message, :description].freeze
6
7
  READONLY_ATTRIBUTES = [
7
- :deleted, :id, :created, :created_at, :creator, :org_id, :modified, :modified_at, :api_resource
8
+ :deleted, :id, :created, :created_at, :creator, :org_id, :modified, :modified_at,
9
+ :klass, :tracking_id # added by syncer.rb
8
10
  ].freeze
9
11
 
10
12
  settings :id, :kennel_id
@@ -22,6 +24,18 @@ module Kennel
22
24
  subclasses.map { |s| [s.api_resource, s] }.to_h
23
25
  end
24
26
 
27
+ def parse_tracking_id(a)
28
+ a[self::TRACKING_FIELD].to_s[/-- Managed by kennel (\S+:\S+)/, 1]
29
+ end
30
+
31
+ # TODO: combine with parse into a single method or a single regex
32
+ def remove_tracking_id(a)
33
+ value = a[self::TRACKING_FIELD]
34
+ a[self::TRACKING_FIELD] =
35
+ value.dup.sub!(/\n?-- Managed by kennel .*/, "") ||
36
+ raise("did not find tracking id in #{value}")
37
+ end
38
+
25
39
  private
26
40
 
27
41
  def normalize(_expected, actual)
@@ -60,19 +74,40 @@ module Kennel
60
74
  end
61
75
 
62
76
  def tracking_id
63
- "#{project.kennel_id}:#{kennel_id}"
77
+ @tracking_id ||= begin
78
+ id = "#{project.kennel_id}:#{kennel_id}"
79
+ raise ValidationError, "#{id} kennel_id cannot include whitespace" if id.match?(/\s/) # <-> parse_tracking_id
80
+ id
81
+ end
64
82
  end
65
83
 
66
84
  def resolve_linked_tracking_ids!(*)
67
85
  end
68
86
 
87
+ def add_tracking_id
88
+ json = as_json
89
+ if self.class.parse_tracking_id(json)
90
+ invalid! "remove \"-- Managed by kennel\" line it from #{self.class::TRACKING_FIELD} to copy a resource"
91
+ end
92
+ json[self.class::TRACKING_FIELD] =
93
+ "#{json[self.class::TRACKING_FIELD]}\n" \
94
+ "-- Managed by kennel #{tracking_id} in #{project.class.file_location}, do not modify manually".lstrip
95
+ end
96
+
97
+ def remove_tracking_id
98
+ self.class.remove_tracking_id(as_json)
99
+ end
100
+
69
101
  private
70
102
 
71
103
  def resolve_link(tracking_id, type, id_map, force:)
72
104
  id = id_map[tracking_id]
73
105
  if id == :new
74
106
  if force
75
- invalid! "#{type} #{tracking_id} was referenced but is also created by the current run.\nIt could not be created because of a circular dependency, try creating only some of the resources"
107
+ invalid!(
108
+ "#{type} #{tracking_id} was referenced but is also created by the current run.\n" \
109
+ "It could not be created because of a circular dependency, try creating only some of the resources"
110
+ )
76
111
  else
77
112
  nil # will be re-resolved after the linked object was created
78
113
  end
@@ -3,6 +3,7 @@ module Kennel
3
3
  module Models
4
4
  class Slo < Record
5
5
  READONLY_ATTRIBUTES = superclass::READONLY_ATTRIBUTES + [:type_id, :monitor_tags]
6
+ TRACKING_FIELD = :description
6
7
  DEFAULTS = {
7
8
  description: nil,
8
9
  query: nil,
@@ -63,7 +64,7 @@ module Kennel
63
64
  end
64
65
 
65
66
  def self.parse_url(url)
66
- url[/\/slo(\?.*slo_id=|\/edit\/)([a-z\d]{10,})/, 2]
67
+ url[/\/slo(\?.*slo_id=|\/edit\/)([a-z\d]{10,})(&|$)/, 2]
67
68
  end
68
69
 
69
70
  def resolve_linked_tracking_ids!(id_map, **args)
@@ -29,6 +29,8 @@ module Kennel
29
29
  Kennel.err.print "#{time.round(2)}s\n"
30
30
 
31
31
  result
32
+ ensure
33
+ stop = true
32
34
  end
33
35
  end
34
36
  end
data/lib/kennel/syncer.rb CHANGED
@@ -1,22 +1,13 @@
1
1
  # frozen_string_literal: true
2
2
  module Kennel
3
3
  class Syncer
4
- TRACKING_FIELDS = [:message, :description].freeze
5
4
  DELETE_ORDER = ["dashboard", "slo", "monitor"].freeze # dashboards references monitors + slos, slos reference monitors
5
+ LINE_UP = "\e[1A\033[K" # go up and clear
6
6
 
7
7
  def initialize(api, expected, project: nil)
8
8
  @api = api
9
9
  @project_filter = project
10
10
  @expected = expected
11
- if @project_filter
12
- original = @expected
13
- @expected = @expected.select { |e| e.project.kennel_id == @project_filter }
14
- if @expected.empty?
15
- possible = original.map { |e| e.project.kennel_id }.uniq.sort
16
- raise "#{@project_filter} does not match any projects, try any of these:\n#{possible.join("\n")}"
17
- end
18
- end
19
- @expected.each { |e| add_tracking_id e }
20
11
  calculate_diff
21
12
  prevent_irreversible_partial_updates
22
13
  end
@@ -38,20 +29,28 @@ module Kennel
38
29
 
39
30
  def update
40
31
  each_resolved @create do |_, e|
32
+ message = "#{e.class.api_resource} #{e.tracking_id}"
33
+ Kennel.out.puts "Creating #{message}"
41
34
  reply = @api.create e.class.api_resource, e.as_json
35
+ cache_metadata reply, e.class
42
36
  id = reply.fetch(:id)
43
- populate_id_map [reply] # allow resolving ids we could previously no resolve
44
- Kennel.out.puts "Created #{e.class.api_resource} #{tracking_id(e.as_json)} #{e.class.url(id)}"
37
+ populate_id_map [], [reply] # allow resolving ids we could previously no resolve
38
+ Kennel.out.puts "#{LINE_UP}Created #{message} #{e.class.url(id)}"
45
39
  end
46
40
 
47
41
  each_resolved @update do |id, e|
42
+ message = "#{e.class.api_resource} #{e.tracking_id} #{e.class.url(id)}"
43
+ Kennel.out.puts "Updating #{message}"
48
44
  @api.update e.class.api_resource, id, e.as_json
49
- Kennel.out.puts "Updated #{e.class.api_resource} #{tracking_id(e.as_json)} #{e.class.url(id)}"
45
+ Kennel.out.puts "#{LINE_UP}Updated #{message}"
50
46
  end
51
47
 
52
48
  @delete.each do |id, _, a|
53
- @api.delete a.fetch(:api_resource), id
54
- Kennel.out.puts "Deleted #{a.fetch(:api_resource)} #{tracking_id(a)} #{id}"
49
+ klass = a.fetch(:klass)
50
+ message = "#{klass.api_resource} #{a.fetch(:tracking_id)} #{id}"
51
+ Kennel.out.puts "Deleting #{message}"
52
+ @api.delete klass.api_resource, id
53
+ Kennel.out.puts "#{LINE_UP}Deleted #{message}"
55
54
  end
56
55
  end
57
56
 
@@ -99,14 +98,13 @@ module Kennel
99
98
 
100
99
  actual = Progress.progress("Downloading definitions") { download_definitions }
101
100
 
102
- # resolve dependencies to avoid diff
103
- populate_id_map actual
104
- @expected.each { |e| @id_map[e.tracking_id] ||= :new }
105
- resolve_linked_tracking_ids! @expected
101
+ Progress.progress "Diffing" do
102
+ populate_id_map @expected, actual
103
+ filter_actual_by_project! actual
104
+ resolve_linked_tracking_ids! @expected # resolve dependencies to avoid diff
106
105
 
107
- filter_by_project! actual
106
+ @expected.each(&:add_tracking_id) # avoid diff with actual
108
107
 
109
- Progress.progress "Diffing" do
110
108
  items = actual.map do |a|
111
109
  e = matching_expected(a)
112
110
  if e && @expected.delete(e)
@@ -117,9 +115,8 @@ module Kennel
117
115
  end
118
116
 
119
117
  # fill details of things we need to compare
120
- detailed = Hash.new { |h, k| h[k] = [] }
121
- items.each { |e, a| detailed[a[:api_resource]] << a if e }
122
- detailed.each { |api_resource, actuals| @api.fill_details! api_resource, actuals }
118
+ details = items.map { |e, a| a if e && e.class.api_resource == "dashboard" }.compact
119
+ @api.fill_details! "dashboard", details
123
120
 
124
121
  # pick out things to update or delete
125
122
  items.each do |e, a|
@@ -127,26 +124,30 @@ module Kennel
127
124
  if e
128
125
  diff = e.diff(a)
129
126
  @update << [id, e, a, diff] if diff.any?
130
- elsif tracking_id(a) # was previously managed
127
+ elsif a.fetch(:tracking_id) # was previously managed
131
128
  @delete << [id, nil, a]
132
129
  end
133
130
  end
134
131
 
135
132
  ensure_all_ids_found
136
133
  @create = @expected.map { |e| [nil, e] }
134
+ @delete.sort_by! { |_, _, a| DELETE_ORDER.index a.fetch(:klass).api_resource }
137
135
  end
138
-
139
- @delete.sort_by! { |_, _, a| DELETE_ORDER.index a.fetch(:api_resource) }
140
136
  end
141
137
 
142
138
  def download_definitions
143
- Utils.parallel(Models::Record.subclasses.map(&:api_resource)) do |api_resource|
144
- results = @api.list(api_resource, with_downtimes: false) # lookup monitors without adding unnecessary downtime information
139
+ Utils.parallel(Models::Record.subclasses) do |klass|
140
+ results = @api.list(klass.api_resource, with_downtimes: false) # lookup monitors without adding unnecessary downtime information
145
141
  results = results[results.keys.first] if results.is_a?(Hash) # dashboards are nested in {dashboards: []}
146
- results.each { |c| c[:api_resource] = api_resource } # store api resource for later diffing
142
+ results.each { |a| cache_metadata(a, klass) }
147
143
  end.flatten(1)
148
144
  end
149
145
 
146
+ def cache_metadata(a, klass)
147
+ a[:klass] = klass
148
+ a[:tracking_id] = a.fetch(:klass).parse_tracking_id(a)
149
+ end
150
+
150
151
  def ensure_all_ids_found
151
152
  @expected.each do |e|
152
153
  next unless id = e.id
@@ -158,7 +159,7 @@ module Kennel
158
159
  def matching_expected(a)
159
160
  # index list by all the thing we look up by: tracking id and actual id
160
161
  @lookup_map ||= @expected.each_with_object({}) do |e, all|
161
- keys = [tracking_id(e.as_json)]
162
+ keys = [e.tracking_id]
162
163
  keys << "#{e.class.api_resource}:#{e.id}" if e.id
163
164
  keys.compact.each do |key|
164
165
  raise "Lookup #{key} is duplicated" if all[key]
@@ -166,14 +167,15 @@ module Kennel
166
167
  end
167
168
  end
168
169
 
169
- @lookup_map["#{a.fetch(:api_resource)}:#{a.fetch(:id)}"] || @lookup_map[tracking_id(a)]
170
+ klass = a.fetch(:klass)
171
+ @lookup_map["#{klass.api_resource}:#{a.fetch(:id)}"] || @lookup_map[a.fetch(:tracking_id)]
170
172
  end
171
173
 
172
174
  def print_plan(step, list, color)
173
175
  return if list.empty?
174
176
  list.each do |_, e, a, diff|
175
- api_resource = (e ? e.class.api_resource : a.fetch(:api_resource))
176
- Kennel.out.puts Utils.color(color, "#{step} #{api_resource} #{e&.tracking_id || tracking_id(a)}")
177
+ klass = (e ? e.class : a.fetch(:klass))
178
+ Kennel.out.puts Utils.color(color, "#{step} #{klass.api_resource} #{e&.tracking_id || a.fetch(:tracking_id)}")
177
179
  print_diff(diff) if diff # only for update
178
180
  end
179
181
  end
@@ -199,76 +201,47 @@ module Kennel
199
201
  end
200
202
  end
201
203
 
202
- # Do not add tracking-id when working with existing ids on a branch,
203
- # so resource do not get deleted from running an update on master (for example merge->CI).
204
- # Also make sure the diff still makes sense, by kicking out the now noop-update.
205
- #
206
- # Note: ideally we'd never add tracking in the first place, but at that point we do not know the diff yet
204
+ # - do not add tracking-id when working with existing ids on a branch,
205
+ # so resource do not get deleted when running an update on master (for example merge->CI)
206
+ # - make sure the diff is clean, by kicking out the now noop-update
207
+ # - ideally we'd never add tracking in the first place, but when adding tracking we do not know the diff yet
207
208
  def prevent_irreversible_partial_updates
208
209
  return unless @project_filter
209
210
  @update.select! do |_, e, _, diff|
210
- next true unless e.id # short circuit for performance
211
+ next true unless e.id # safe to add tracking when not having id
211
212
 
212
213
  diff.select! do |field_diff|
213
- (_, field, old, new) = field_diff
214
- next true unless tracking_field?(field)
214
+ (_, field, actual) = field_diff
215
+ # TODO: refactor this so TRACKING_FIELD stays record-private
216
+ next true if e.class::TRACKING_FIELD != field.to_sym # need to sym here because Hashdiff produces strings
217
+ next true if e.class.parse_tracking_id(field.to_sym => actual) # already has tracking id
215
218
 
216
- if (old_tracking = tracking_value(old))
217
- old_tracking == tracking_value(new) || raise("do not update! (atm unreachable)")
218
- else
219
- field_diff[3] = remove_tracking_id(e) # make plan output match update
220
- old != field_diff[3]
221
- end
219
+ field_diff[3] = e.remove_tracking_id # make `rake plan` output match what we are sending
220
+ actual != field_diff[3] # discard diff if now nothing changes
222
221
  end
223
222
 
224
223
  !diff.empty?
225
224
  end
226
225
  end
227
226
 
228
- def populate_id_map(actual)
229
- actual.each { |a| @id_map[tracking_id(a)] = a.fetch(:id) }
227
+ def populate_id_map(expected, actual)
228
+ actual.each do |a|
229
+ next unless tracking_id = a.fetch(:tracking_id)
230
+ @id_map[tracking_id] = a.fetch(:id)
231
+ end
232
+ expected.each { |e| @id_map[e.tracking_id] ||= :new }
230
233
  end
231
234
 
232
235
  def resolve_linked_tracking_ids!(list, force: false)
233
236
  list.each { |e| e.resolve_linked_tracking_ids!(@id_map, force: force) }
234
237
  end
235
238
 
236
- def filter_by_project!(definitions)
239
+ def filter_actual_by_project!(actual)
237
240
  return unless @project_filter
238
- definitions.select! do |a|
239
- id = tracking_id(a)
240
- !id || id.start_with?("#{@project_filter}:")
241
+ actual.select! do |a|
242
+ tracking_id = a.fetch(:tracking_id)
243
+ !tracking_id || tracking_id.start_with?("#{@project_filter}:")
241
244
  end
242
245
  end
243
-
244
- def add_tracking_id(e)
245
- json = e.as_json
246
- field = tracking_field(json)
247
- raise "remove \"-- Managed by kennel\" line it from #{field} to copy a resource" if tracking_value(json[field])
248
- json[field] = "#{json[field]}\n-- Managed by kennel #{e.tracking_id} in #{e.project.class.file_location}, do not modify manually".lstrip
249
- end
250
-
251
- def remove_tracking_id(e)
252
- json = e.as_json
253
- field = tracking_field(json)
254
- value = json[field]
255
- json[field] = value.dup.sub!(/\n?-- Managed by kennel .*/, "") || raise("did not find tracking id in #{value}")
256
- end
257
-
258
- def tracking_id(a)
259
- tracking_value a[tracking_field(a)]
260
- end
261
-
262
- def tracking_value(content)
263
- content.to_s[/-- Managed by kennel (\S+:\S+)/, 1]
264
- end
265
-
266
- def tracking_field(a)
267
- TRACKING_FIELDS.detect { |f| a.key?(f) }
268
- end
269
-
270
- def tracking_field?(field)
271
- TRACKING_FIELDS.include?(field.to_sym)
272
- end
273
246
  end
274
247
  end
data/lib/kennel/tasks.rb CHANGED
@@ -138,7 +138,7 @@ namespace :kennel do
138
138
  resources.each do |resource|
139
139
  Kennel::Progress.progress("Downloading #{resource}") do
140
140
  list = api.list(resource)
141
- api.fill_details!(resource, list)
141
+ api.fill_details!(resource, list) if resource == "dashboard"
142
142
  end
143
143
  list.each do |r|
144
144
  r[:api_resource] = resource
@@ -36,7 +36,8 @@ module Kennel
36
36
 
37
37
  def widget_queries(widget)
38
38
  requests = widget.dig(:definition, :requests) || []
39
- (requests.is_a?(Hash) ? requests.values : requests).map { |r| r[:q] } # hostmap widgets have hash requests
39
+ return requests.values.map { |r| r[:q] } if requests.is_a?(Hash) # hostmap widgets have hash requests
40
+ requests.flat_map { |r| r[:q] || r[:queries]&.map { |q| q[:query] } } # old format with q: or queries: [{query:}]
40
41
  end
41
42
  end
42
43
  end
@@ -1,4 +1,4 @@
1
1
  # frozen_string_literal: true
2
2
  module Kennel
3
- VERSION = "1.87.1"
3
+ VERSION = "1.90.0"
4
4
  end
data/template/Readme.md CHANGED
@@ -96,6 +96,7 @@ end
96
96
  - use [datadog monitor UI](https://app.datadoghq.com/monitors/manage) to find a monitor
97
97
  - get the `id` from the url
98
98
  - run `URL='https://app.datadoghq.com/monitors/123' bundle exec rake kennel:import` and copy the output
99
+ - import task also works with SLO alerts, e.g. `URL='https://app.datadoghq.com/slo/edit/123abc456def123/alerts/789' bundle exec rake kennel:import`
99
100
  - find or create a project in `projects/`
100
101
  - add the monitor to `parts: [` list, for example:
101
102
  ```Ruby
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: kennel
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.87.1
4
+ version: 1.90.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Michael Grosser
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2021-05-14 00:00:00.000000000 Z
11
+ date: 2021-07-02 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: faraday