kennel 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml ADDED
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA1:
3
+ metadata.gz: 9b0f8c0885f03a37f8cd2c60d8aadb1596953267
4
+ data.tar.gz: 91428cc5840c0530f25103c5d94569badbd5c832
5
+ SHA512:
6
+ metadata.gz: c8399a1eaa4f9f59e16e60c781f20bf9d682f49df8ba43d37bd5d103125922d97cb6371aa583212ac498759ea331084cd26371bb951d4a955244a8779862bcac
7
+ data.tar.gz: bfa9c9132785e9a01066cfc1c2c9fcfec66a41648636f0551bfd3f8ec94e0036a3c4b5883fcc227973b5c1f89d5d100c12d78597c51ad4f1808189d290e927bf
data/lib/kennel.rb ADDED
@@ -0,0 +1,70 @@
1
+ # frozen_string_literal: true
2
+ require "faraday"
3
+ require "json"
4
+ require "English"
5
+
6
+ require "kennel/utils"
7
+ require "kennel/progress"
8
+ require "kennel/syncer"
9
+ require "kennel/api"
10
+ require "kennel/github_reporter"
11
+ require "kennel/subclass_tracking"
12
+ require "kennel/file_cache"
13
+
14
+ require "kennel/models/base"
15
+ require "kennel/models/monitor"
16
+ require "kennel/models/dash"
17
+ require "kennel/models/project"
18
+ require "kennel/models/team"
19
+
20
+ module Kennel
21
+ class << self
22
+ def generate
23
+ FileUtils.rm_rf("generated")
24
+ generated.each do |part|
25
+ path = "generated/#{part.tracking_id.sub(":", "/")}.json"
26
+ FileUtils.mkdir_p(File.dirname(path))
27
+ File.write(path, JSON.pretty_generate(part.as_json) << "\n")
28
+ end
29
+ end
30
+
31
+ def plan
32
+ syncer.plan
33
+ end
34
+
35
+ def update
36
+ syncer.plan
37
+ syncer.update if syncer.confirm
38
+ end
39
+
40
+ def report_plan_to_github
41
+ reporter = GithubReporter.new(ENV.fetch("GITHUB_TOKEN"))
42
+ reporter.report { plan }
43
+ end
44
+
45
+ private
46
+
47
+ def syncer
48
+ @syncer ||= Syncer.new(api, generated)
49
+ end
50
+
51
+ def api
52
+ @api ||= Api.new(ENV.fetch("DATADOG_APP_KEY"), ENV.fetch("DATADOG_API_KEY"))
53
+ end
54
+
55
+ def generated
56
+ @generated ||= begin
57
+ Progress.progress "Generating" do
58
+ load_all
59
+ Models::Project.recursive_subclasses.flat_map do |project_class|
60
+ project_class.new.parts
61
+ end
62
+ end
63
+ end
64
+ end
65
+
66
+ def load_all
67
+ Dir["{parts,teams,projects}/**/*.rb"].each { |f| require "./#{f}" }
68
+ end
69
+ end
70
+ end
data/lib/kennel/api.rb ADDED
@@ -0,0 +1,43 @@
1
+ # frozen_string_literal: true
2
+ module Kennel
3
+ class Api
4
+ def initialize(app_key, api_key)
5
+ @app_key = app_key
6
+ @api_key = api_key
7
+ @client = Faraday.new(url: "https://app.datadoghq.com")
8
+ end
9
+
10
+ def show(api_resource, id)
11
+ request :get, "/api/v1/#{api_resource}/#{id}"
12
+ end
13
+
14
+ def list(api_resource, params)
15
+ request :get, "/api/v1/#{api_resource}", params: params
16
+ end
17
+
18
+ def create(api_resource, attributes)
19
+ request :post, "/api/v1/#{api_resource}", body: attributes
20
+ end
21
+
22
+ def update(api_resource, id, attributes)
23
+ request :put, "/api/v1/#{api_resource}/#{id}", body: attributes
24
+ end
25
+
26
+ def delete(api_resource, id)
27
+ request :delete, "/api/v1/#{api_resource}/#{id}"
28
+ end
29
+
30
+ private
31
+
32
+ def request(method, path, body: nil, params: {})
33
+ params = params.merge(application_key: @app_key, api_key: @api_key)
34
+ query = Faraday::FlatParamsEncoder.encode(params)
35
+ response = @client.send(method, "#{path}?#{query}") do |request|
36
+ request.body = JSON.generate(body) if body
37
+ request.headers["Content-type"] = "application/json"
38
+ end
39
+ raise "Error #{method} #{path} -> #{response.status}:\n#{response.body}" unless response.success?
40
+ JSON.parse(response.body, symbolize_names: true)
41
+ end
42
+ end
43
+ end
@@ -0,0 +1,35 @@
1
+ # frozen_string_literal: true
2
+
3
+ # cache that reads everything from a single file
4
+ # to avoid doing multiple disk reads while interating all definitions
5
+ # it also replaces updated keys and has an overall expiry to not keep deleted things forever
6
+ module Kennel
7
+ class FileCache
8
+ def initialize(file)
9
+ @file = file
10
+ @data =
11
+ begin
12
+ Marshal.load(File.read(@file)) # rubocop:disable Security/MarshalLoad
13
+ rescue StandardError
14
+ {}
15
+ end
16
+ @now = Time.now.to_i
17
+ @expires = @now + (30 * 24 * 60 * 60) # 1 month
18
+ @data.reject! { |_, (_, _, ex)| ex < @now } # expire old data
19
+ end
20
+
21
+ def fetch(key, version)
22
+ old_value, old_version = @data[key]
23
+ return old_value if old_version == version
24
+
25
+ new_value = yield
26
+ @data[key] = [new_value, version, @expires]
27
+ new_value
28
+ end
29
+
30
+ def persist
31
+ FileUtils.mkdir_p(File.dirname(@file))
32
+ File.write(@file, Marshal.dump(@data))
33
+ end
34
+ end
35
+ end
@@ -0,0 +1,30 @@
1
+ # frozen_string_literal: true
2
+ module Kennel
3
+ class GithubReporter
4
+ def initialize(token)
5
+ @token = token
6
+ @git_sha = Utils.capture_sh("git rev-parse HEAD").strip
7
+ origin = ENV["PROJECT_REPOSITORY"] || Utils.capture_sh("git remote -v").split("\n").first
8
+ @repo_part = origin[%r{github\.com[:/](.+?)(\.git|$)}, 1] || raise("no origin found")
9
+ end
10
+
11
+ def report
12
+ output = Utils.strip_shell_control(Utils.capture_stdout { yield }.strip)
13
+ ensure
14
+ comment "```\n#{output || "Error"}\n```"
15
+ end
16
+
17
+ private
18
+
19
+ # https://developer.github.com/v3/repos/comments/#create-a-commit-comment
20
+ def comment(body)
21
+ post "commits/#{@git_sha}/comments", body: body
22
+ end
23
+
24
+ def post(path, data)
25
+ url = "https://api.github.com/repos/#{@repo_part}/#{path}"
26
+ response = Faraday.post(url, data.to_json, authorization: "token #{@token}")
27
+ raise "failed to POST to github:\n#{url} -> #{response.status}\n#{response.body}" unless response.status == 201
28
+ end
29
+ end
30
+ end
@@ -0,0 +1,94 @@
1
+ # frozen_string_literal: true
2
+ require "hashdiff"
3
+
4
+ module Kennel
5
+ module Models
6
+ class Base
7
+ LOCK = "\u{1F512}"
8
+ READONLY_ATTRIBUTES = [
9
+ :deleted, :matching_downtimes, :id, :created, :created_at, :creator, :org_id, :modified,
10
+ :overall_state_modified, :overall_state, :api_resource
11
+ ].freeze
12
+
13
+ class << self
14
+ include SubclassTracking
15
+
16
+ def settings(*names)
17
+ duplicates = (@set & names)
18
+ if duplicates.any?
19
+ raise ArgumentError, "Settings #{duplicates.map(&:inspect).join(", ")} are already defined"
20
+ end
21
+
22
+ @set.concat names
23
+ names.each do |name|
24
+ next if method_defined?(name)
25
+ define_method name do
26
+ raise ArgumentError, "Trying to call #{name} for #{self.class} but it was never set or passed as option"
27
+ end
28
+ end
29
+ end
30
+
31
+ def defaults(options)
32
+ options.each do |name, block|
33
+ validate_setting_exists name
34
+ define_method name, &block
35
+ end
36
+ end
37
+
38
+ def inherited(child)
39
+ super
40
+ child.instance_variable_set(:@set, (@set || []).dup)
41
+ end
42
+
43
+ def validate_setting_exists(name)
44
+ return if !@set || @set.include?(name)
45
+ supported = @set.map(&:inspect)
46
+ raise ArgumentError, "Unsupported setting #{name.inspect}, supported settings are #{supported.join(", ")}"
47
+ end
48
+ end
49
+
50
+ def initialize(options = {})
51
+ validate_options(options)
52
+
53
+ options.each do |name, block|
54
+ self.class.validate_setting_exists name
55
+ define_singleton_method name, &block
56
+ end
57
+ end
58
+
59
+ def kennel_id
60
+ @kennel_id ||= Utils.snake_case self.class.name
61
+ end
62
+
63
+ def name
64
+ self.class.name
65
+ end
66
+
67
+ def diff(actual)
68
+ expected = as_json
69
+ expected.delete(:id)
70
+
71
+ READONLY_ATTRIBUTES.each { |k| actual.delete k }
72
+
73
+ diff = HashDiff.diff(actual, expected, use_lcs: false)
74
+ diff if diff.any?
75
+ end
76
+
77
+ def tracking_id
78
+ "#{project.kennel_id}:#{kennel_id}"
79
+ end
80
+
81
+ private
82
+
83
+ def validate_options(options)
84
+ unless options.is_a?(Hash)
85
+ raise ArgumentError, "Expected #{self.class.name}.new options to be a Hash, got a #{options.class}"
86
+ end
87
+ options.each do |k, v|
88
+ next if v.class == Proc
89
+ raise ArgumentError, "Expected #{self.class.name}.new option :#{k} to be Proc, for example `#{k}: -> { 12 }`"
90
+ end
91
+ end
92
+ end
93
+ end
94
+ end
@@ -0,0 +1,86 @@
1
+ # frozen_string_literal: true
2
+ module Kennel
3
+ module Models
4
+ class Dash < Base
5
+ settings :id, :title, :description, :graphs, :kennel_id, :graphs, :definitions, :template_variables
6
+
7
+ defaults(
8
+ id: -> { nil },
9
+ description: -> { "" },
10
+ definitions: -> { [] },
11
+ graphs: -> { [] },
12
+ template_variables: -> { [] }
13
+ )
14
+
15
+ attr_reader :project
16
+
17
+ def initialize(project, *args)
18
+ @project = project
19
+ super(*args)
20
+ end
21
+
22
+ def self.api_resource
23
+ "dash"
24
+ end
25
+
26
+ def as_json
27
+ return @json if @json
28
+ @json = {
29
+ id: id,
30
+ title: "#{title}#{LOCK}",
31
+ description: description,
32
+ read_only: false,
33
+ template_variables: render_template_variables,
34
+ graphs: render_graphs
35
+ }
36
+
37
+ validate_json(@json)
38
+
39
+ @json
40
+ end
41
+
42
+ def diff(actual)
43
+ actual.delete :resource
44
+ actual.delete :created_by
45
+ actual[:template_variables] ||= []
46
+ actual[:graphs].each do |g|
47
+ g[:definition].delete(:status)
48
+ g[:definition][:requests].each { |r| r.delete(:aggregator) }
49
+ end
50
+ super
51
+ end
52
+
53
+ def url(id)
54
+ Utils.path_to_url "/dash/#{id}"
55
+ end
56
+
57
+ private
58
+
59
+ def validate_json(data)
60
+ variables = data.fetch(:template_variables).map { |v| "$#{v.fetch(:name)}" }
61
+ queries = data[:graphs].flat_map { |g| g[:definition][:requests].map { |r| r.fetch(:q) } }
62
+ bad = queries.grep_v(/(#{variables.map { |v| Regexp.escape(v) }.join("|")})\b/)
63
+ if bad.any?
64
+ raise "#{tracking_id} queries #{bad.join(", ")} must use the template variables #{variables.join(", ")}"
65
+ end
66
+ end
67
+
68
+ def render_template_variables
69
+ template_variables.map do |v|
70
+ v.is_a?(String) ? { default: "*", prefix: v, name: v } : v
71
+ end
72
+ end
73
+
74
+ def render_graphs
75
+ all = definitions.map do |title, viz, type, query|
76
+ { title: title, definition: { viz: viz, requests: [{ q: query, type: type }] } }
77
+ end + graphs
78
+
79
+ all.each do |g|
80
+ g[:definition][:requests].each { |r| r[:conditional_formats] ||= [] }
81
+ g[:definition][:autoscale] = true unless g[:definition].key?(:autoscale)
82
+ end
83
+ end
84
+ end
85
+ end
86
+ end
@@ -0,0 +1,159 @@
1
+ # frozen_string_literal: true
2
+ module Kennel
3
+ module Models
4
+ class Monitor < Base
5
+ RENOTIFY_INTERVALS = [0, 10, 20, 30, 40, 50, 60, 90, 120, 180, 240, 300, 360, 720, 1440].freeze # minutes
6
+ QUERY_INTERVALS = ["1m", "5m", "10m", "15m", "30m", "1h", "2h", "4h", "24h"].freeze
7
+ METRIC_TYPES = ["query alert", "metric alert"].freeze
8
+
9
+ settings(
10
+ :query, :name, :message, :escalation_message, :critical, :kennel_id, :type, :renotify_interval, :warning,
11
+ :ok, :id, :no_data_timeframe, :notify_no_data, :tags, :multi
12
+ )
13
+ defaults(
14
+ message: -> { "" },
15
+ escalation_message: -> { "" },
16
+ type: -> { "metric alert" }, # TODO: can we auto-determine that ?
17
+ renotify_interval: -> { 120 },
18
+ warning: -> { nil },
19
+ ok: -> { nil },
20
+ id: -> { nil },
21
+ notify_no_data: -> { true },
22
+ no_data_timeframe: -> { notify_no_data ? 60 : nil },
23
+ tags: -> { [] },
24
+ multi: -> { !METRIC_TYPES.include?(type) || query.include?(" by ") }
25
+ )
26
+
27
+ attr_reader :project
28
+
29
+ def initialize(project, *args)
30
+ @project = project
31
+ super(*args)
32
+ end
33
+
34
+ def kennel_id
35
+ if self.class == Kennel::Models::Monitor
36
+ raise "Need to set :kennel_id when defining monitors from Kennel::Models::Monitor"
37
+ end
38
+ super
39
+ end
40
+
41
+ def as_json
42
+ return @as_json if @as_json
43
+ data = {
44
+ name: "#{name}#{LOCK}",
45
+ type: type,
46
+ query: query,
47
+ message: <<~TEXT.strip,
48
+ #{message}
49
+
50
+ @slack-#{project.team.slack}
51
+ TEXT
52
+ tags: @project.tags + tags,
53
+ multi: multi,
54
+ options: {
55
+ timeout_h: 0,
56
+ notify_no_data: notify_no_data,
57
+ no_data_timeframe: no_data_timeframe,
58
+ notify_audit: true,
59
+ require_full_window: true,
60
+ new_host_delay: 300,
61
+ include_tags: true,
62
+ escalation_message: Utils.presence(escalation_message.strip),
63
+ evaluation_delay: nil,
64
+ locked: false, # setting this to true prevents any edit and breaks updates when using replace workflow
65
+ renotify_interval: renotify_interval || 0,
66
+ thresholds: {
67
+ critical: critical
68
+ }
69
+ }
70
+ }
71
+
72
+ options = data[:options]
73
+ thresholds = options[:thresholds]
74
+
75
+ data[:id] = id if id
76
+
77
+ # warning and ok are optional
78
+ thresholds[:warning] = warning if warning
79
+ thresholds[:ok] = ok if ok
80
+
81
+ # metric and query values are stored as float by datadog
82
+ if ["metric alert", "query alert"].include? data.fetch(:type)
83
+ thresholds.each { |k, v| thresholds[k] = Float(v) }
84
+ end
85
+
86
+ validate_json(data)
87
+
88
+ @as_json = data
89
+ end
90
+
91
+ def self.api_resource
92
+ "monitor"
93
+ end
94
+
95
+ def url(id)
96
+ Utils.path_to_url "/monitors##{id}/edit"
97
+ end
98
+
99
+ def diff(actual)
100
+ options = actual.fetch(:options)
101
+ options.delete(:silenced) # we do not manage silenced, so ignore it when diffing
102
+ options[:escalation_message] ||= nil # unset field is not returned and would break the diff
103
+
104
+ # fields are not returned when set to true
105
+ if ["service check", "event alert"].include?(actual[:type])
106
+ options[:include_tags] = true unless options.key?(:include_tags)
107
+ options[:require_full_window] = true unless options.key?(:require_full_window)
108
+ end
109
+
110
+ # setting 0 results in thresholds not getting returned from the api
111
+ if actual[:type] == "event alert"
112
+ options[:thresholds] ||= { critical: 0 }
113
+ end
114
+
115
+ # fields are not returned when set to true
116
+ if actual[:type] == "service check"
117
+ options[:thresholds][:critical] ||= 1
118
+ options[:thresholds][:warning] ||= 1
119
+ options[:thresholds][:ok] ||= 1
120
+ end
121
+
122
+ # nil or "" are not returned from the api
123
+ options[:evaluation_delay] ||= nil
124
+
125
+ # datadog uses these types randomly
126
+ actual[:type] = type if METRIC_TYPES.include?(actual[:type])
127
+
128
+ super
129
+ end
130
+
131
+ private
132
+
133
+ def validate_json(data)
134
+ type = data.fetch(:type)
135
+
136
+ if type == "service check" && [ok, warning, critical].compact.map(&:class).uniq != [Integer]
137
+ raise "#{tracking_id} :ok, :warning and :critical must be integers for service check type"
138
+ end
139
+
140
+ if query_value = data.fetch(:query)[/\s*[<>]\s*(\d+(\.\d+)?)\s*$/, 1]
141
+ if Float(query_value) != Float(data.dig(:options, :thresholds, :critical))
142
+ raise "#{tracking_id} critical and value used in query must match"
143
+ end
144
+ end
145
+
146
+ unless RENOTIFY_INTERVALS.include? data.dig(:options, :renotify_interval)
147
+ raise "#{tracking_id} renotify_interval must be one of #{RENOTIFY_INTERVALS.join(", ")}"
148
+ end
149
+
150
+ if ["metric alert", "query alert"].include?(type)
151
+ interval = data.fetch(:query)[/\(last_(\S+?)\)/, 1]
152
+ unless QUERY_INTERVALS.include?(interval)
153
+ raise "#{tracking_id} query interval was #{interval}, but must be one of #{QUERY_INTERVALS.join(", ")}"
154
+ end
155
+ end
156
+ end
157
+ end
158
+ end
159
+ end
@@ -0,0 +1,18 @@
1
+ # frozen_string_literal: true
2
+ module Kennel
3
+ module Models
4
+ class Project < Base
5
+ settings :team, :parts, :tags
6
+ defaults(
7
+ tags: -> { ["service:#{kennel_id}"] }
8
+ )
9
+
10
+ def self.file_location
11
+ @file_location ||= begin
12
+ method_in_file = instance_methods(false).first
13
+ instance_method(method_in_file).source_location.first.sub("#{Bundler.root}/", "")
14
+ end
15
+ end
16
+ end
17
+ end
18
+ end
@@ -0,0 +1,9 @@
1
+ # frozen_string_literal: true
2
+ module Kennel
3
+ module Models
4
+ class Team < Base
5
+ # TODO: validate slack has no leading #
6
+ settings :slack, :email
7
+ end
8
+ end
9
+ end
@@ -0,0 +1,34 @@
1
+ # frozen_string_literal: true
2
+ require "benchmark"
3
+
4
+ module Kennel
5
+ class Progress
6
+ # print what we are doing and a spinner until it is done ... then show how long it took
7
+ def self.progress(name)
8
+ print "#{name} ... "
9
+
10
+ animation = "-\\|/"
11
+ count = 0
12
+ stop = false
13
+ result = nil
14
+
15
+ spinner = Thread.new do
16
+ loop do
17
+ break if stop
18
+ print animation[count % animation.size]
19
+ sleep 0.2
20
+ print "\b"
21
+ count += 1
22
+ end
23
+ end
24
+
25
+ time = Benchmark.realtime { result = yield }
26
+
27
+ stop = true
28
+ spinner.join
29
+ print "#{time.round(2)}s\n"
30
+
31
+ result
32
+ end
33
+ end
34
+ end
@@ -0,0 +1,19 @@
1
+ # frozen_string_literal: true
2
+ module Kennel
3
+ module SubclassTracking
4
+ def recursive_subclasses
5
+ subclasses + subclasses.flat_map(&:recursive_subclasses)
6
+ end
7
+
8
+ def subclasses
9
+ @subclasses ||= []
10
+ end
11
+
12
+ private
13
+
14
+ def inherited(child)
15
+ super
16
+ subclasses << child
17
+ end
18
+ end
19
+ end
@@ -0,0 +1,176 @@
1
+ # frozen_string_literal: true
2
+ module Kennel
3
+ class Syncer
4
+ CACHE_FILE = "tmp/cache/details" # keep in sync with .travis.yml caching
5
+
6
+ def initialize(api, expected)
7
+ @api = api
8
+ @expected = expected
9
+ @expected.each { |e| add_tracking_id e }
10
+ calculate_diff
11
+ end
12
+
13
+ def plan
14
+ puts "Plan:"
15
+ if noop?
16
+ puts Utils.color(:green, "Nothing to do.")
17
+ else
18
+ print_plan "Create", @create, :green
19
+ print_plan "Update", @update, :yellow
20
+ print_plan "Delete", @delete, :red
21
+ end
22
+ end
23
+
24
+ def confirm
25
+ !STDIN.tty? || Utils.ask("Execute Plan ?") unless noop?
26
+ end
27
+
28
+ def update
29
+ @create.each do |_, e|
30
+ reply = @api.create e.class.api_resource, e.as_json
31
+ puts "Created #{e.class.api_resource} #{tracking_id(e.as_json)} #{e.url(reply.fetch(:id))}"
32
+ end
33
+
34
+ @update.each do |id, e|
35
+ @api.update e.class.api_resource, id, e.as_json
36
+ puts "Updated #{e.class.api_resource} #{tracking_id(e.as_json)} #{e.url(id)}"
37
+ end
38
+
39
+ @delete.each do |id, _, a|
40
+ @api.delete a.fetch(:api_resource), id
41
+ puts "Deleted #{a.fetch(:api_resource)} #{tracking_id(a)} #{id}"
42
+ end
43
+ end
44
+
45
+ private
46
+
47
+ def noop?
48
+ @create.empty? && @update.empty? && @delete.empty?
49
+ end
50
+
51
+ def calculate_diff
52
+ @update = []
53
+ @delete = []
54
+
55
+ actual = Progress.progress "Downloading definitions" do
56
+ download_definitions
57
+ end
58
+
59
+ Progress.progress "Diffing" do
60
+ details_cache do |cache|
61
+ actual.each do |a|
62
+ id = a.fetch(:id)
63
+
64
+ if e = delete_matching_expected(a)
65
+ fill_details(a, cache) if a[:api_resource] == "dash"
66
+ if diff = e.diff(a)
67
+ @update << [id, e, a, diff]
68
+ end
69
+ elsif tracking_id(a) # was previously managed
70
+ @delete << [id, nil, a]
71
+ end
72
+ end
73
+ end
74
+
75
+ ensure_all_ids_found
76
+ @create = @expected.map { |e| [nil, e] }
77
+ end
78
+ end
79
+
80
+ # Hack to get diff to work until we can mass-fetch definitions
81
+ def fill_details(a, cache)
82
+ args = [a.fetch(:api_resource), a.fetch(:id)]
83
+ full = cache.fetch(args, a.fetch(:modified)) do
84
+ @api.show(*args).fetch(a.fetch(:api_resource).to_sym)
85
+ end
86
+ a.merge!(full)
87
+ end
88
+
89
+ def details_cache
90
+ cache = FileCache.new CACHE_FILE
91
+ yield cache
92
+ cache.persist
93
+ end
94
+
95
+ def download_definitions
96
+ api_resources = Models::Base.subclasses.map do |m|
97
+ next unless m.respond_to?(:api_resource)
98
+ m.api_resource
99
+ end
100
+
101
+ api_resources.compact.uniq.flat_map do |api_resource|
102
+ # lookup monitors without adding unnecessary downtime information
103
+ results = @api.list(api_resource, with_downtimes: false)
104
+ if results.is_a?(Hash) # dashes
105
+ results = results.fetch("#{api_resource}es".to_sym) # dash reply is returned under dashes
106
+ results.each { |r| r[:id] = Integer(r.fetch(:id)) }
107
+ end
108
+ results.each { |c| c[:api_resource] = api_resource }
109
+ end
110
+ end
111
+
112
+ def ensure_all_ids_found
113
+ @expected.each do |e|
114
+ next unless id = e.id
115
+ raise "Unable to find existing #{e.class.api_resource} with id #{id}"
116
+ end
117
+ end
118
+
119
+ def delete_matching_expected(a)
120
+ # index list by all the thing we look up by: tracking id and actual id
121
+ @lookup_map ||= @expected.each_with_object({}) do |e, all|
122
+ keys = [tracking_id(e.as_json)]
123
+ keys << "#{e.class.api_resource}:#{e.id}" if e.id
124
+ keys.compact.each do |key|
125
+ raise "Lookup #{key} is duplicated" if all[key]
126
+ all[key] = e
127
+ end
128
+ end
129
+
130
+ e = @lookup_map["#{a.fetch(:api_resource)}:#{a.fetch(:id)}"] || @lookup_map[tracking_id(a)]
131
+ @expected.delete(e) if e
132
+ end
133
+
134
+ def print_plan(step, list, color)
135
+ return if list.empty?
136
+ list.each do |_, e, a, diff|
137
+ puts Utils.color(color, "#{step} #{tracking_id(e&.as_json || a)}")
138
+ print_diff(diff) if diff # only for update
139
+ end
140
+ end
141
+
142
+ def print_diff(diff)
143
+ diff.each do |type, field, old, new|
144
+ if type == "+"
145
+ temp = new.inspect
146
+ new = old.inspect
147
+ old = temp
148
+ else # ~ and -
149
+ old = old.inspect
150
+ new = new.inspect
151
+ end
152
+
153
+ if (old + new).size > 100
154
+ puts " #{type}#{field}"
155
+ puts " #{old} ->"
156
+ puts " #{new}"
157
+ else
158
+ puts " #{type}#{field} #{old} -> #{new}"
159
+ end
160
+ end
161
+ end
162
+
163
+ def add_tracking_id(e)
164
+ e.as_json[tracking_field(e.as_json)] +=
165
+ "\n-- Managed by kennel #{e.tracking_id} in #{e.project.class.file_location}, do not modify manually"
166
+ end
167
+
168
+ def tracking_id(a)
169
+ a[tracking_field(a)][/-- Managed by kennel (\S+:\S+)/, 1]
170
+ end
171
+
172
+ def tracking_field(a)
173
+ a[:message] ? :message : :description
174
+ end
175
+ end
176
+ end
@@ -0,0 +1,58 @@
1
+ # frozen_string_literal: true
2
+ module Kennel
3
+ module Utils
4
+ class << self
5
+ def snake_case(string)
6
+ string.gsub(/::/, "_") # Foo::Bar -> foo_bar
7
+ .gsub(/([A-Z]+)([A-Z][a-z])/, '\1_\2') # FOOBar -> foo_bar
8
+ .gsub(/([a-z\d])([A-Z])/, '\1_\2') # fooBar -> foo_bar
9
+ .downcase
10
+ end
11
+
12
+ def presence(value)
13
+ value.empty? ? nil : value
14
+ end
15
+
16
+ def ask(question)
17
+ printf color(:red, "#{question} - press 'y' to continue: ")
18
+ begin
19
+ STDIN.gets.chomp == "y"
20
+ rescue Interrupt # do not show a backtrace if user decides to Ctrl+C here
21
+ printf "\n"
22
+ exit 1
23
+ end
24
+ end
25
+
26
+ def color(color, text)
27
+ code = { red: 31, green: 32, yellow: 33 }.fetch(color)
28
+ "\e[#{code}m#{text}\e[0m"
29
+ end
30
+
31
+ def strip_shell_control(text)
32
+ text.gsub(/\e\[\d+m(.*?)\e\[0m/, "\\1").tr("\b", "")
33
+ end
34
+
35
+ def capture_stdout
36
+ $stdout = StringIO.new
37
+ yield
38
+ $stdout.string
39
+ ensure
40
+ $stdout = STDOUT
41
+ end
42
+
43
+ def capture_sh(command)
44
+ result = `#{command} 2>&1`
45
+ raise "Command failed:\n#{command}\n#{result}" unless $CHILD_STATUS.success?
46
+ result
47
+ end
48
+
49
+ def path_to_url(path)
50
+ if subdomain = ENV["DATADOG_SUBDOMAIN"]
51
+ "https://#{subdomain}.datadoghq.com#{path}"
52
+ else
53
+ path
54
+ end
55
+ end
56
+ end
57
+ end
58
+ end
@@ -0,0 +1,4 @@
1
+ # frozen_string_literal: true
2
+ module Kennel
3
+ VERSION = "0.1.0"
4
+ end
metadata ADDED
@@ -0,0 +1,86 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: kennel
3
+ version: !ruby/object:Gem::Version
4
+ version: 0.1.0
5
+ platform: ruby
6
+ authors:
7
+ - Michael Grosser
8
+ autorequire:
9
+ bindir: bin
10
+ cert_chain: []
11
+ date: 2017-12-09 00:00:00.000000000 Z
12
+ dependencies:
13
+ - !ruby/object:Gem::Dependency
14
+ name: faraday
15
+ requirement: !ruby/object:Gem::Requirement
16
+ requirements:
17
+ - - ">="
18
+ - !ruby/object:Gem::Version
19
+ version: '0'
20
+ type: :runtime
21
+ prerelease: false
22
+ version_requirements: !ruby/object:Gem::Requirement
23
+ requirements:
24
+ - - ">="
25
+ - !ruby/object:Gem::Version
26
+ version: '0'
27
+ - !ruby/object:Gem::Dependency
28
+ name: hashdiff
29
+ requirement: !ruby/object:Gem::Requirement
30
+ requirements:
31
+ - - ">="
32
+ - !ruby/object:Gem::Version
33
+ version: '0'
34
+ type: :runtime
35
+ prerelease: false
36
+ version_requirements: !ruby/object:Gem::Requirement
37
+ requirements:
38
+ - - ">="
39
+ - !ruby/object:Gem::Version
40
+ version: '0'
41
+ description:
42
+ email: michael@grosser.it
43
+ executables: []
44
+ extensions: []
45
+ extra_rdoc_files: []
46
+ files:
47
+ - lib/kennel.rb
48
+ - lib/kennel/api.rb
49
+ - lib/kennel/file_cache.rb
50
+ - lib/kennel/github_reporter.rb
51
+ - lib/kennel/models/base.rb
52
+ - lib/kennel/models/dash.rb
53
+ - lib/kennel/models/monitor.rb
54
+ - lib/kennel/models/project.rb
55
+ - lib/kennel/models/team.rb
56
+ - lib/kennel/progress.rb
57
+ - lib/kennel/subclass_tracking.rb
58
+ - lib/kennel/syncer.rb
59
+ - lib/kennel/utils.rb
60
+ - lib/kennel/version.rb
61
+ homepage: https://github.com/grosser/kennel
62
+ licenses:
63
+ - MIT
64
+ metadata: {}
65
+ post_install_message:
66
+ rdoc_options: []
67
+ require_paths:
68
+ - lib
69
+ required_ruby_version: !ruby/object:Gem::Requirement
70
+ requirements:
71
+ - - ">="
72
+ - !ruby/object:Gem::Version
73
+ version: 2.4.2
74
+ required_rubygems_version: !ruby/object:Gem::Requirement
75
+ requirements:
76
+ - - ">="
77
+ - !ruby/object:Gem::Version
78
+ version: '0'
79
+ requirements: []
80
+ rubyforge_project:
81
+ rubygems_version: 2.6.13
82
+ signing_key:
83
+ specification_version: 4
84
+ summary: Keep datadog monitors/dashboards/etc in version control, avoid chaotic management
85
+ via UI
86
+ test_files: []