flapjack 0.7.2 → 0.7.3
Sign up to get free protection for your applications and to get access to all the features.
- data/CHANGELOG.md +7 -0
- data/etc/flapjack_config.yaml.example +2 -0
- data/features/notification_rules.feature +4 -4
- data/features/steps/events_steps.rb +28 -15
- data/lib/flapjack/coordinator.rb +9 -1
- data/lib/flapjack/data/contact.rb +1 -0
- data/lib/flapjack/data/notification_rule.rb +197 -93
- data/lib/flapjack/executive.rb +11 -20
- data/lib/flapjack/gateways/api.rb +247 -363
- data/lib/flapjack/gateways/web.rb +28 -16
- data/lib/flapjack/gateways/web/views/_foot.haml +2 -2
- data/lib/flapjack/gateways/web/views/entity.haml +8 -2
- data/lib/flapjack/gateways/web/views/self_stats.haml +1 -1
- data/lib/flapjack/patches.rb +20 -1
- data/lib/flapjack/pikelet.rb +5 -14
- data/lib/flapjack/utility.rb +16 -0
- data/lib/flapjack/version.rb +1 -1
- data/spec/lib/flapjack/coordinator_spec.rb +29 -0
- data/spec/lib/flapjack/data/contact_spec.rb +1 -4
- data/spec/lib/flapjack/data/notification_rule_spec.rb +102 -9
- data/spec/lib/flapjack/gateways/api_spec.rb +9 -11
- data/spec/lib/flapjack/gateways/web_spec.rb +15 -19
- data/spec/lib/flapjack/pikelet_spec.rb +2 -56
- metadata +8 -2
data/CHANGELOG.md
CHANGED
@@ -1,5 +1,12 @@
|
|
1
1
|
## Flapjack Changelog
|
2
2
|
|
3
|
+
# 0.7.3 - 2013-05-14
|
4
|
+
- Bug: Web and api gateways have configuable http timeout gh-170 (@jessereynolds)
|
5
|
+
- Bug: Support POSTs to API larger than ~112 KB gh-169 (@jessereynolds)
|
6
|
+
- Bug: Validate notification rules before adding, updating gh-146 (@ali-graham)
|
7
|
+
- Bug: Web UI very slow with large number of keys gh-164 (@jessereynolds)
|
8
|
+
- Bug: crash in executive should exit flapjack gh-143 (@ali-graham)
|
9
|
+
|
3
10
|
# 0.7.2 - 2013-05-06
|
4
11
|
- Feature: executive instance keys now expire after 7 days, touched every event gh-111 (@jessereynolds)
|
5
12
|
- Feature: slightly less sucky looking web UI, also now includes entity listing screens (@jessereynolds)
|
@@ -80,12 +80,14 @@ development:
|
|
80
80
|
web:
|
81
81
|
enabled: yes
|
82
82
|
port: 5080
|
83
|
+
timeout: 300
|
83
84
|
access_log: "log/web_access.log"
|
84
85
|
logger:
|
85
86
|
level: INFO
|
86
87
|
api:
|
87
88
|
enabled: yes
|
88
89
|
port: 5081
|
90
|
+
timeout: 300
|
89
91
|
access_log: "log/api_access.log"
|
90
92
|
logger:
|
91
93
|
level: INFO
|
@@ -3,9 +3,9 @@ Feature: Notification rules on a per contact basis
|
|
3
3
|
|
4
4
|
Background:
|
5
5
|
Given the following users exist:
|
6
|
-
| id | first_name | last_name | email | sms |
|
7
|
-
| 1 | Malak | Al-Musawi | malak@example.com | +61400000001 |
|
8
|
-
| 2 | Imani | Farooq | imani@example.com | +61400000002 |
|
6
|
+
| id | first_name | last_name | email | sms | timezone |
|
7
|
+
| 1 | Malak | Al-Musawi | malak@example.com | +61400000001 | Asia/Baghdad |
|
8
|
+
| 2 | Imani | Farooq | imani@example.com | +61400000002 | Europe/Moscow |
|
9
9
|
|
10
10
|
And the following entities exist:
|
11
11
|
| id | name | contacts |
|
@@ -25,7 +25,7 @@ Feature: Notification rules on a per contact basis
|
|
25
25
|
|
26
26
|
@time_restrictions @time
|
27
27
|
Scenario: Alerts only during specified time restrictions
|
28
|
-
Given the timezone is
|
28
|
+
Given the timezone is Asia/Baghdad
|
29
29
|
And the time is February 1 2013 6:59
|
30
30
|
And the check is check 'ping' on entity 'foo'
|
31
31
|
And the check is in an ok state
|
@@ -111,6 +111,19 @@ def submit_acknowledgement(entity, check)
|
|
111
111
|
submit_event(event)
|
112
112
|
end
|
113
113
|
|
114
|
+
def icecube_schedule_to_time_restriction(sched, time_zone)
|
115
|
+
tr = sched.to_hash
|
116
|
+
tr[:start_time] = time_zone.utc_to_local(tr[:start_date][:time]).strftime "%Y-%m-%d %H:%M:%S"
|
117
|
+
tr[:end_time] = time_zone.utc_to_local(tr[:end_time][:time]).strftime "%Y-%m-%d %H:%M:%S"
|
118
|
+
|
119
|
+
# rewrite IceCube::WeeklyRule to Weekly, etc
|
120
|
+
tr[:rrules].each {|rrule|
|
121
|
+
rrule[:rule_type] = /^.*\:\:(.*)Rule$/.match(rrule[:rule_type])[1]
|
122
|
+
}
|
123
|
+
|
124
|
+
tr
|
125
|
+
end
|
126
|
+
|
114
127
|
Given /^an entity '([\w\.\-]+)' exists$/ do |entity|
|
115
128
|
Flapjack::Data::Entity.add({'id' => '5000',
|
116
129
|
'name' => entity},
|
@@ -240,7 +253,7 @@ Given /^the following users exist:$/ do |contacts|
|
|
240
253
|
'last_name' => contact['last_name'],
|
241
254
|
'email' => contact['email'],
|
242
255
|
'media' => media},
|
243
|
-
:redis => @redis )
|
256
|
+
:redis => @redis ).timezone = contact['timezone']
|
244
257
|
end
|
245
258
|
end
|
246
259
|
|
@@ -258,27 +271,27 @@ Given /^user (\d+) has the following notification rules:$/ do |contact_id, rules
|
|
258
271
|
entity_tags = rule['entity_tags'].split(',').map { |x| x.strip }
|
259
272
|
warning_media = rule['warning_media'].split(',').map { |x| x.strip }
|
260
273
|
critical_media = rule['critical_media'].split(',').map { |x| x.strip }
|
261
|
-
warning_blackhole = rule['warning_blackhole'].downcase == 'true'
|
262
|
-
critical_blackhole = rule['critical_blackhole'].downcase == 'true'
|
274
|
+
warning_blackhole = (rule['warning_blackhole'].downcase == 'true')
|
275
|
+
critical_blackhole = (rule['critical_blackhole'].downcase == 'true')
|
276
|
+
timezone = Flapjack::Data::Contact.find_by_id(contact_id, :redis => @redis).timezone
|
263
277
|
time_restrictions = []
|
264
278
|
rule['time_restrictions'].split(',').map { |x| x.strip }.each do |time_restriction|
|
265
279
|
case time_restriction
|
266
280
|
when '8-18 weekdays'
|
267
|
-
|
268
|
-
time_zone = ActiveSupport::TimeZone.new("America/New_York")
|
269
|
-
weekdays_8_18 = IceCube::Schedule.new(time_zone.local(2013,2,1,8,0,0), :duration => 60 * 60 * 10)
|
281
|
+
weekdays_8_18 = IceCube::Schedule.new(timezone.local(2013,2,1,8,0,0), :duration => 60 * 60 * 10)
|
270
282
|
weekdays_8_18.add_recurrence_rule(IceCube::Rule.weekly.day(:monday, :tuesday, :wednesday, :thursday, :friday))
|
271
|
-
time_restrictions <<
|
283
|
+
time_restrictions << icecube_schedule_to_time_restriction(weekdays_8_18, timezone)
|
272
284
|
end
|
273
285
|
end
|
274
|
-
|
275
|
-
|
276
|
-
|
277
|
-
|
278
|
-
|
279
|
-
|
280
|
-
|
281
|
-
|
286
|
+
rule_data = {:contact_id => contact_id,
|
287
|
+
:entities => entities,
|
288
|
+
:entity_tags => entity_tags,
|
289
|
+
:warning_media => warning_media,
|
290
|
+
:critical_media => critical_media,
|
291
|
+
:warning_blackhole => warning_blackhole,
|
292
|
+
:critical_blackhole => critical_blackhole,
|
293
|
+
:time_restrictions => time_restrictions}
|
294
|
+
Flapjack::Data::NotificationRule.add(rule_data, :redis => @redis)
|
282
295
|
end
|
283
296
|
end
|
284
297
|
|
data/lib/flapjack/coordinator.rb
CHANGED
@@ -128,11 +128,19 @@ module Flapjack
|
|
128
128
|
|
129
129
|
# passed a hash with {PIKELET_TYPE => PIKELET_CFG, ...}
|
130
130
|
def add_pikelets(pikelets_data = {})
|
131
|
+
start_piks = []
|
131
132
|
pikelets_data.each_pair do |type, cfg|
|
132
133
|
next unless pikelet = Flapjack::Pikelet.create(type,
|
133
134
|
:config => cfg, :redis_config => @redis_options)
|
135
|
+
start_piks << pikelet
|
134
136
|
@pikelets << pikelet
|
135
|
-
|
137
|
+
end
|
138
|
+
begin
|
139
|
+
start_piks.each {|pik| pik.start }
|
140
|
+
rescue Exception => e
|
141
|
+
trace = e.backtrace.join("\n")
|
142
|
+
@logger.fatal "#{e.message}\n#{trace}"
|
143
|
+
stop
|
136
144
|
end
|
137
145
|
end
|
138
146
|
|
@@ -29,13 +29,11 @@ module Flapjack
|
|
29
29
|
# sanity check
|
30
30
|
return unless redis.exists("notification_rule:#{rule_id}")
|
31
31
|
|
32
|
-
|
33
|
-
rule.refresh
|
34
|
-
rule
|
32
|
+
self.new({:id => rule_id.to_s}, {:redis => redis})
|
35
33
|
end
|
36
34
|
|
37
35
|
# replacing save! etc
|
38
|
-
def self.add(rule_data, options)
|
36
|
+
def self.add(rule_data, options = {})
|
39
37
|
raise "Redis connection not set" unless redis = options[:redis]
|
40
38
|
|
41
39
|
rule_id = SecureRandom.uuid
|
@@ -43,129 +41,235 @@ module Flapjack
|
|
43
41
|
self.find_by_id(rule_id, :redis => redis)
|
44
42
|
end
|
45
43
|
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
tr[:start_date] = tr[:start_time].dup
|
52
|
-
tr.delete(:start_time)
|
53
|
-
|
54
|
-
if tr[:start_date].is_a?(String)
|
55
|
-
tr[:start_date] = { :time => tr[:start_date] }
|
56
|
-
end
|
57
|
-
if tr[:start_date].is_a?(Hash)
|
58
|
-
tr[:start_date][:time] = time_zone.parse(tr[:start_date][:time])
|
59
|
-
tr[:start_date][:zone] = time_zone.name
|
60
|
-
end
|
61
|
-
|
62
|
-
if tr[:end_time].is_a?(String)
|
63
|
-
tr[:end_time] = { :time => tr[:end_time] }
|
64
|
-
end
|
65
|
-
if tr[:end_time].is_a?(Hash)
|
66
|
-
tr[:end_time][:time] = time_zone.parse(tr[:end_time][:time])
|
67
|
-
tr[:end_time][:zone] = time_zone.name
|
68
|
-
end
|
69
|
-
|
70
|
-
# rewrite Weekly to IceCube::WeeklyRule, etc
|
71
|
-
tr[:rrules].each {|rrule|
|
72
|
-
rrule[:rule_type] = "IceCube::#{rrule[:rule_type]}Rule"
|
73
|
-
}
|
74
|
-
|
75
|
-
tr
|
76
|
-
end
|
77
|
-
|
78
|
-
def self.time_restriction_from_ice_cube_hash(tr, time_zone)
|
79
|
-
tr[:start_date] = time_zone.utc_to_local(tr[:start_date][:time]).strftime "%Y-%m-%d %H:%M:%S"
|
80
|
-
tr[:end_time] = time_zone.utc_to_local(tr[:end_time][:time]).strftime "%Y-%m-%d %H:%M:%S"
|
81
|
-
|
82
|
-
# rewrite IceCube::WeeklyRule to Weekly, etc
|
83
|
-
tr[:rrules].each {|rrule|
|
84
|
-
rrule[:rule_type] = /^.*\:\:(.*)Rule$/.match(rrule[:rule_type])[1]
|
85
|
-
}
|
86
|
-
|
87
|
-
tr[:start_time] = tr[:start_date].dup
|
88
|
-
tr.delete(:start_date)
|
89
|
-
|
90
|
-
tr
|
91
|
-
end
|
92
|
-
|
93
|
-
def refresh
|
94
|
-
rule_data = @redis.hgetall("notification_rule:#{@id}")
|
95
|
-
|
96
|
-
@contact_id = rule_data['contact_id']
|
97
|
-
@entity_tags = Yajl::Parser.parse(rule_data['entity_tags'] || '')
|
98
|
-
@entities = Yajl::Parser.parse(rule_data['entities'] || '')
|
99
|
-
@time_restrictions = Yajl::Parser.parse(rule_data['time_restrictions'] || '')
|
100
|
-
@warning_media = Yajl::Parser.parse(rule_data['warning_media'] || '')
|
101
|
-
@critical_media = Yajl::Parser.parse(rule_data['critical_media'] || '')
|
102
|
-
@warning_blackhole = ((rule_data['warning_blackhole'] || 'false').downcase == 'true')
|
103
|
-
@critical_blackhole = ((rule_data['critical_blackhole'] || 'false').downcase == 'true')
|
104
|
-
|
44
|
+
def update(rule_data)
|
45
|
+
return false unless self.class.add_or_update(rule_data.merge(:id => @id),
|
46
|
+
:redis => @redis)
|
47
|
+
refresh
|
48
|
+
true
|
105
49
|
end
|
106
50
|
|
107
|
-
|
108
|
-
|
109
|
-
|
51
|
+
# NB: ice_cube doesn't have much rule data validation, and has
|
52
|
+
# problems with infinite loops if the data can't logically match; see
|
53
|
+
# https://github.com/seejohnrun/ice_cube/issues/127 &
|
54
|
+
# https://github.com/seejohnrun/ice_cube/issues/137
|
55
|
+
# We may want to consider some sort of timeout-based check around
|
56
|
+
# anything that could fall into that.
|
57
|
+
#
|
58
|
+
# We don't want to replicate IceCube's from_hash behaviour here,
|
59
|
+
# but we do need to apply some sanity checking on the passed data.
|
60
|
+
def self.time_restriction_to_icecube_schedule(tr, timezone)
|
61
|
+
return unless !tr.nil? && tr.is_a?(Hash)
|
62
|
+
return if timezone.nil? && !timezone.is_a?(ActiveSupport::TimeZone)
|
63
|
+
return unless tr = prepare_time_restriction(tr, timezone)
|
64
|
+
|
65
|
+
IceCube::Schedule.from_hash(tr)
|
110
66
|
end
|
111
67
|
|
112
68
|
def to_json(*args)
|
113
|
-
|
114
|
-
|
115
|
-
|
116
|
-
|
117
|
-
|
118
|
-
hash.to_json
|
69
|
+
self.class.hashify(:id, :contact_id, :entity_tags, :entities,
|
70
|
+
:time_restrictions, :warning_media, :critical_media,
|
71
|
+
:warning_blackhole, :critical_blackhole) {|k|
|
72
|
+
[k, self.send(k)]
|
73
|
+
}.to_json
|
119
74
|
end
|
120
75
|
|
121
76
|
# tags or entity names match?
|
122
77
|
# nil @entity_tags and nil @entities matches
|
123
78
|
def match_entity?(event)
|
124
|
-
return true if (@entity_tags.nil? or @entity_tags.empty?) and
|
125
|
-
(@entities.nil? or @entities.empty?)
|
126
|
-
return true if @entities.include?(event.split(':').first)
|
127
79
|
# TODO: return true if event's entity tags match entity tag list on the rule
|
128
|
-
|
80
|
+
((@entity_tags.nil? || @entity_tags.empty?) && (@entities.nil? || @entities.empty?)) ||
|
81
|
+
(@entities.include?(event.split(':').first))
|
129
82
|
end
|
130
83
|
|
131
84
|
def blackhole?(severity)
|
132
|
-
|
133
|
-
|
134
|
-
return false
|
85
|
+
('warning'.eql?(severity.downcase) && @warning_blackhole) ||
|
86
|
+
('critical'.eql?(severity.downcase) && @critical_blackhole)
|
135
87
|
end
|
136
88
|
|
137
89
|
def media_for_severity(severity)
|
138
90
|
case severity
|
139
91
|
when 'warning'
|
140
|
-
|
92
|
+
@warning_media
|
141
93
|
when 'critical'
|
142
|
-
|
94
|
+
@critical_media
|
143
95
|
end
|
144
|
-
media_list
|
145
96
|
end
|
146
97
|
|
147
98
|
private
|
148
99
|
|
149
100
|
def initialize(rule_data, opts = {})
|
150
101
|
@redis ||= opts[:redis]
|
151
|
-
@logger = opts[:logger]
|
152
102
|
raise "a redis connection must be supplied" unless @redis
|
153
|
-
@
|
103
|
+
@logger = opts[:logger]
|
104
|
+
@id = rule_data[:id]
|
105
|
+
refresh
|
154
106
|
end
|
155
107
|
|
156
108
|
def self.add_or_update(rule_data, options = {})
|
157
|
-
raise ":id is a required key in rule_data" unless rule_data[:id]
|
158
|
-
|
159
109
|
redis = options[:redis]
|
110
|
+
raise "a redis connection must be supplied" unless redis
|
111
|
+
|
112
|
+
return unless self.validate_data(rule_data, options)
|
113
|
+
|
114
|
+
# whitelisting fields, rather than passing through submitted data directly
|
115
|
+
json_rule_data = {
|
116
|
+
:id => rule_data[:id].to_s,
|
117
|
+
:contact_id => rule_data[:contact_id].to_s,
|
118
|
+
:entities => Yajl::Encoder.encode(rule_data[:entities]),
|
119
|
+
:entity_tags => Yajl::Encoder.encode(rule_data[:entity_tags]),
|
120
|
+
:time_restrictions => Yajl::Encoder.encode(rule_data[:time_restrictions]),
|
121
|
+
:warning_media => Yajl::Encoder.encode(rule_data[:warning_media]),
|
122
|
+
:critical_media => Yajl::Encoder.encode(rule_data[:critical_media]),
|
123
|
+
:warning_blackhole => rule_data[:warning_blackhole],
|
124
|
+
:critical_blackhole => rule_data[:critical_blackhole],
|
125
|
+
}
|
126
|
+
|
127
|
+
redis.sadd("contact_notification_rules:#{json_rule_data[:contact_id]}",
|
128
|
+
json_rule_data[:id])
|
129
|
+
redis.hmset("notification_rule:#{json_rule_data[:id]}",
|
130
|
+
*json_rule_data.flatten)
|
131
|
+
true
|
132
|
+
end
|
133
|
+
|
134
|
+
def self.prepare_time_restriction(time_restriction, timezone = nil)
|
135
|
+
# this will hand back a 'deep' copy
|
136
|
+
tr = symbolize(time_restriction)
|
137
|
+
|
138
|
+
return unless tr.has_key?(:start_time) && tr.has_key?(:end_time)
|
139
|
+
|
140
|
+
parsed_time = proc {|t|
|
141
|
+
if t.is_a?(Time)
|
142
|
+
t
|
143
|
+
else
|
144
|
+
begin; (timezone || Time).parse(t); rescue ArgumentError; nil; end
|
145
|
+
end
|
146
|
+
}
|
147
|
+
|
148
|
+
start_time = case tr[:start_time]
|
149
|
+
when String, Time
|
150
|
+
parsed_time.call(tr.delete(:start_time).dup)
|
151
|
+
when Hash
|
152
|
+
time_hash = tr.delete(:start_time).dup
|
153
|
+
parsed_time.call(time_hash[:time])
|
154
|
+
end
|
155
|
+
|
156
|
+
end_time = case tr[:end_time]
|
157
|
+
when String, Time
|
158
|
+
parsed_time.call(tr.delete(:end_time).dup)
|
159
|
+
when Hash
|
160
|
+
time_hash = tr.delete(:end_time).dup
|
161
|
+
parsed_time.call(time_hash[:time])
|
162
|
+
end
|
163
|
+
|
164
|
+
return unless start_time && end_time
|
165
|
+
|
166
|
+
tr[:start_date] = timezone ?
|
167
|
+
{:time => start_time, :zone => timezone.name} :
|
168
|
+
start_time
|
169
|
+
|
170
|
+
tr[:end_date] = timezone ?
|
171
|
+
{:time => end_time, :zone => timezone.name} :
|
172
|
+
end_time
|
173
|
+
|
174
|
+
tr[:duration] = end_time - start_time
|
175
|
+
|
176
|
+
# check that rrule types are valid IceCube rule types
|
177
|
+
return unless tr[:rrules].is_a?(Array) &&
|
178
|
+
tr[:rrules].all? {|rr| rr.is_a?(Hash)} &&
|
179
|
+
(tr[:rrules].map {|rr| rr[:rule_type]} -
|
180
|
+
['Daily', 'Hourly', 'Minutely', 'Monthly', 'Secondly',
|
181
|
+
'Weekly', 'Yearly']).empty?
|
182
|
+
|
183
|
+
# rewrite Weekly to IceCube::WeeklyRule, etc
|
184
|
+
tr[:rrules].each {|rrule|
|
185
|
+
rrule[:rule_type] = "IceCube::#{rrule[:rule_type]}Rule"
|
186
|
+
}
|
160
187
|
|
161
|
-
|
162
|
-
|
163
|
-
|
164
|
-
|
165
|
-
|
188
|
+
# TODO does this need to check classes for the following values?
|
189
|
+
# "validations": {
|
190
|
+
# "day": [1,2,3,4,5]
|
191
|
+
# },
|
192
|
+
# "interval": 1,
|
193
|
+
# "week_start": 0
|
166
194
|
|
167
|
-
|
168
|
-
|
195
|
+
tr
|
196
|
+
end
|
197
|
+
|
198
|
+
def self.validate_data(d, options = {})
|
199
|
+
# hash with validation => error_message
|
200
|
+
validations = {proc { d.has_key?(:id) } =>
|
201
|
+
"id not set",
|
202
|
+
|
203
|
+
proc { d.has_key?(:entities) &&
|
204
|
+
d[:entities].is_a?(Array) &&
|
205
|
+
d[:entities].all? {|e| e.is_a?(String)} } =>
|
206
|
+
"entities must be a list of strings",
|
207
|
+
|
208
|
+
proc { d.has_key?(:entity_tags) &&
|
209
|
+
d[:entity_tags].is_a?(Array) &&
|
210
|
+
d[:entity_tags].all? {|et| et.is_a?(String)}} =>
|
211
|
+
"entity_tags must be a list of strings",
|
212
|
+
|
213
|
+
proc { (d.has_key?(:entities) &&
|
214
|
+
d[:entities].is_a?(Array) &&
|
215
|
+
(d[:entities].size > 0)) ||
|
216
|
+
(d.has_key?(:entity_tags) &&
|
217
|
+
d[:entity_tags].is_a?(Array) &&
|
218
|
+
(d[:entity_tags].size > 0)) } =>
|
219
|
+
"entities or entity tags must have at least one value",
|
220
|
+
|
221
|
+
proc { d.has_key?(:time_restrictions) &&
|
222
|
+
d[:time_restrictions].all? {|tr|
|
223
|
+
!!prepare_time_restriction(symbolize(tr))
|
224
|
+
}
|
225
|
+
} =>
|
226
|
+
"time restrictions are invalid",
|
227
|
+
|
228
|
+
# TODO should the media types be checked against a whitelist?
|
229
|
+
proc { d.has_key?(:warning_media) &&
|
230
|
+
d[:warning_media].is_a?(Array) &&
|
231
|
+
d[:warning_media].all? {|et| et.is_a?(String)}} =>
|
232
|
+
"warning_media must be a list of strings",
|
233
|
+
|
234
|
+
proc { d.has_key?(:critical_media) &&
|
235
|
+
d[:critical_media].is_a?(Array) &&
|
236
|
+
d[:critical_media].all? {|et| et.is_a?(String)}} =>
|
237
|
+
"critical_media must be a list of strings",
|
238
|
+
|
239
|
+
proc { d.has_key?(:warning_blackhole) &&
|
240
|
+
[TrueClass, FalseClass].include?(d[:warning_blackhole].class) } =>
|
241
|
+
"warning_blackhole must be true or false",
|
242
|
+
|
243
|
+
proc { d.has_key?(:critical_blackhole) &&
|
244
|
+
[TrueClass, FalseClass].include?(d[:critical_blackhole].class) } =>
|
245
|
+
"critical_blackhole must be true or false",
|
246
|
+
}
|
247
|
+
|
248
|
+
errors = validations.keys.inject([]) {|ret,vk|
|
249
|
+
ret << "Rule #{validations[vk]}" unless vk.call
|
250
|
+
ret
|
251
|
+
}
|
252
|
+
|
253
|
+
return true if errors.empty?
|
254
|
+
|
255
|
+
if logger = options[:logger]
|
256
|
+
error_str = errors.join(", ")
|
257
|
+
logger.info "validation error: #{error_str}"
|
258
|
+
end
|
259
|
+
false
|
260
|
+
end
|
261
|
+
|
262
|
+
def refresh
|
263
|
+
rule_data = @redis.hgetall("notification_rule:#{@id}")
|
264
|
+
|
265
|
+
@contact_id = rule_data['contact_id']
|
266
|
+
@entity_tags = Yajl::Parser.parse(rule_data['entity_tags'] || '')
|
267
|
+
@entities = Yajl::Parser.parse(rule_data['entities'] || '')
|
268
|
+
@time_restrictions = Yajl::Parser.parse(rule_data['time_restrictions'] || '')
|
269
|
+
@warning_media = Yajl::Parser.parse(rule_data['warning_media'] || '')
|
270
|
+
@critical_media = Yajl::Parser.parse(rule_data['critical_media'] || '')
|
271
|
+
@warning_blackhole = ((rule_data['warning_blackhole'] || 'false').downcase == 'true')
|
272
|
+
@critical_blackhole = ((rule_data['critical_blackhole'] || 'false').downcase == 'true')
|
169
273
|
end
|
170
274
|
|
171
275
|
end
|