pulse_meter_core 0.4.13

Sign up to get free protection for your applications and to get access to all the features.
Files changed (81) hide show
  1. data/.gitignore +19 -0
  2. data/.rbenv-version +1 -0
  3. data/.rspec +1 -0
  4. data/.rvmrc +1 -0
  5. data/.travis.yml +8 -0
  6. data/Gemfile +2 -0
  7. data/LICENSE +22 -0
  8. data/README.md +40 -0
  9. data/Rakefile +20 -0
  10. data/lib/pulse_meter/command_aggregator/async.rb +83 -0
  11. data/lib/pulse_meter/command_aggregator/sync.rb +18 -0
  12. data/lib/pulse_meter/command_aggregator/udp.rb +48 -0
  13. data/lib/pulse_meter/mixins/dumper.rb +87 -0
  14. data/lib/pulse_meter/mixins/utils.rb +155 -0
  15. data/lib/pulse_meter/observer.rb +118 -0
  16. data/lib/pulse_meter/observer/extended.rb +32 -0
  17. data/lib/pulse_meter/sensor.rb +61 -0
  18. data/lib/pulse_meter/sensor/base.rb +88 -0
  19. data/lib/pulse_meter/sensor/configuration.rb +106 -0
  20. data/lib/pulse_meter/sensor/counter.rb +39 -0
  21. data/lib/pulse_meter/sensor/hashed_counter.rb +36 -0
  22. data/lib/pulse_meter/sensor/hashed_indicator.rb +24 -0
  23. data/lib/pulse_meter/sensor/indicator.rb +35 -0
  24. data/lib/pulse_meter/sensor/multi.rb +97 -0
  25. data/lib/pulse_meter/sensor/timeline.rb +236 -0
  26. data/lib/pulse_meter/sensor/timeline_reduce.rb +68 -0
  27. data/lib/pulse_meter/sensor/timelined/average.rb +32 -0
  28. data/lib/pulse_meter/sensor/timelined/counter.rb +23 -0
  29. data/lib/pulse_meter/sensor/timelined/hashed_counter.rb +31 -0
  30. data/lib/pulse_meter/sensor/timelined/hashed_indicator.rb +30 -0
  31. data/lib/pulse_meter/sensor/timelined/indicator.rb +23 -0
  32. data/lib/pulse_meter/sensor/timelined/max.rb +19 -0
  33. data/lib/pulse_meter/sensor/timelined/median.rb +14 -0
  34. data/lib/pulse_meter/sensor/timelined/min.rb +19 -0
  35. data/lib/pulse_meter/sensor/timelined/multi_percentile.rb +34 -0
  36. data/lib/pulse_meter/sensor/timelined/percentile.rb +22 -0
  37. data/lib/pulse_meter/sensor/timelined/uniq_counter.rb +22 -0
  38. data/lib/pulse_meter/sensor/timelined/zset_based.rb +37 -0
  39. data/lib/pulse_meter/sensor/uniq_counter.rb +24 -0
  40. data/lib/pulse_meter/server.rb +0 -0
  41. data/lib/pulse_meter/server/command_line_options.rb +0 -0
  42. data/lib/pulse_meter/server/config_options.rb +0 -0
  43. data/lib/pulse_meter/server/sensors.rb +0 -0
  44. data/lib/pulse_meter/udp_server.rb +45 -0
  45. data/lib/pulse_meter_core.rb +66 -0
  46. data/pulse_meter_core.gemspec +33 -0
  47. data/spec/pulse_meter/command_aggregator/async_spec.rb +53 -0
  48. data/spec/pulse_meter/command_aggregator/sync_spec.rb +25 -0
  49. data/spec/pulse_meter/command_aggregator/udp_spec.rb +45 -0
  50. data/spec/pulse_meter/mixins/dumper_spec.rb +162 -0
  51. data/spec/pulse_meter/mixins/utils_spec.rb +212 -0
  52. data/spec/pulse_meter/observer/extended_spec.rb +92 -0
  53. data/spec/pulse_meter/observer_spec.rb +207 -0
  54. data/spec/pulse_meter/sensor/base_spec.rb +106 -0
  55. data/spec/pulse_meter/sensor/configuration_spec.rb +103 -0
  56. data/spec/pulse_meter/sensor/counter_spec.rb +54 -0
  57. data/spec/pulse_meter/sensor/hashed_counter_spec.rb +43 -0
  58. data/spec/pulse_meter/sensor/hashed_indicator_spec.rb +39 -0
  59. data/spec/pulse_meter/sensor/indicator_spec.rb +43 -0
  60. data/spec/pulse_meter/sensor/multi_spec.rb +137 -0
  61. data/spec/pulse_meter/sensor/timeline_spec.rb +88 -0
  62. data/spec/pulse_meter/sensor/timelined/average_spec.rb +6 -0
  63. data/spec/pulse_meter/sensor/timelined/counter_spec.rb +6 -0
  64. data/spec/pulse_meter/sensor/timelined/hashed_counter_spec.rb +8 -0
  65. data/spec/pulse_meter/sensor/timelined/hashed_indicator_spec.rb +8 -0
  66. data/spec/pulse_meter/sensor/timelined/indicator_spec.rb +6 -0
  67. data/spec/pulse_meter/sensor/timelined/max_spec.rb +7 -0
  68. data/spec/pulse_meter/sensor/timelined/median_spec.rb +7 -0
  69. data/spec/pulse_meter/sensor/timelined/min_spec.rb +7 -0
  70. data/spec/pulse_meter/sensor/timelined/multi_percentile_spec.rb +21 -0
  71. data/spec/pulse_meter/sensor/timelined/percentile_spec.rb +17 -0
  72. data/spec/pulse_meter/sensor/timelined/uniq_counter_spec.rb +9 -0
  73. data/spec/pulse_meter/sensor/uniq_counter_spec.rb +28 -0
  74. data/spec/pulse_meter/udp_server_spec.rb +36 -0
  75. data/spec/pulse_meter_spec.rb +73 -0
  76. data/spec/shared_examples/timeline_sensor.rb +439 -0
  77. data/spec/shared_examples/timelined_subclass.rb +23 -0
  78. data/spec/spec_helper.rb +37 -0
  79. data/spec/support/matchers.rb +34 -0
  80. data/spec/support/observered.rb +40 -0
  81. metadata +342 -0
@@ -0,0 +1,6 @@
1
+ require 'spec_helper'
2
+
3
+ describe PulseMeter::Sensor::Timelined::Indicator do
4
+ it_should_behave_like "timeline sensor"
5
+ it_should_behave_like "timelined subclass", [1, 5, 2], 2
6
+ end
@@ -0,0 +1,7 @@
1
+ require 'spec_helper'
2
+
3
+ describe PulseMeter::Sensor::Timelined::Max do
4
+ it_should_behave_like "timeline sensor"
5
+ it_should_behave_like "timelined subclass", [1, 2, -1, -1, 5, 0], 5
6
+ it_should_behave_like "timelined subclass", [1], 1
7
+ end
@@ -0,0 +1,7 @@
1
+ require 'spec_helper'
2
+
3
+ describe PulseMeter::Sensor::Timelined::Median do
4
+ it_should_behave_like "timeline sensor"
5
+ it_should_behave_like "timelined subclass", [5, 4, 3, 2, 1], 3
6
+ it_should_behave_like "timelined subclass", [1], 1
7
+ end
@@ -0,0 +1,7 @@
1
+ require 'spec_helper'
2
+
3
+ describe PulseMeter::Sensor::Timelined::Min do
4
+ it_should_behave_like "timeline sensor"
5
+ it_should_behave_like "timelined subclass", [1, 2, -1, -1, 5, 0], -1
6
+ it_should_behave_like "timelined subclass", [1], 1
7
+ end
@@ -0,0 +1,21 @@
1
+ require 'spec_helper'
2
+
3
+ describe PulseMeter::Sensor::Timelined::MultiPercentile do
4
+ it_should_behave_like "timeline sensor", {:p => [0.8]}
5
+ it_should_behave_like "timelined subclass", [5, 4, 2, 2, 2, 2, 2, 2, 2, 1], {0.8 => 2, 0.5 => 2}.to_json, {:p => [0.8, 0.5]}
6
+ it_should_behave_like "timelined subclass", [1], {0.8 => 1}.to_json, {:p => [0.8]}
7
+
8
+ let(:init_values) {{:ttl => 1, :raw_data_ttl => 1, :interval => 1, :reduce_delay => 1}}
9
+ let(:name) {"percentile"}
10
+
11
+ it "should raise exception when extra parameter is not array of percentiles" do
12
+ expect {described_class.new(name, init_values.merge({:p => :bad}))}.to raise_exception(ArgumentError)
13
+ end
14
+
15
+ it "should raise exception when one of percentiles is not between 0 and 1" do
16
+ expect {described_class.new(name, init_values.merge({:p => [0.5, -1]}))}.to raise_exception(ArgumentError)
17
+ expect {described_class.new(name, init_values.merge({:p => [0.5, 1.1]}))}.to raise_exception(ArgumentError)
18
+ expect {described_class.new(name, init_values.merge({:p => [0.5, 0.1]}))}.not_to raise_exception(ArgumentError)
19
+ end
20
+
21
+ end
@@ -0,0 +1,17 @@
1
+ require 'spec_helper'
2
+
3
+ describe PulseMeter::Sensor::Timelined::Percentile do
4
+ it_should_behave_like "timeline sensor", {:p => 0.8}
5
+ it_should_behave_like "timelined subclass", [5, 4, 2, 2, 2, 2, 2, 2, 2, 1], 2, {:p => 0.8}
6
+ it_should_behave_like "timelined subclass", [1], 1, {:p => 0.8}
7
+
8
+ let(:init_values) {{:ttl => 1, :raw_data_ttl => 1, :interval => 1, :reduce_delay => 1}}
9
+ let(:name) {"percentile"}
10
+
11
+ it "should raise exception when percentile is not between 0 and 1" do
12
+ expect {described_class.new(name, init_values.merge({:p => -1}))}.to raise_exception(ArgumentError)
13
+ expect {described_class.new(name, init_values.merge({:p => 1.1}))}.to raise_exception(ArgumentError)
14
+ expect {described_class.new(name, init_values.merge({:p => 0.1}))}.not_to raise_exception(ArgumentError)
15
+ end
16
+
17
+ end
@@ -0,0 +1,9 @@
1
+ require 'spec_helper'
2
+
3
+ describe PulseMeter::Sensor::Timelined::UniqCounter do
4
+ it_should_behave_like "timeline sensor"
5
+ it_should_behave_like "timelined subclass", [:foo, :bar], 2
6
+ it_should_behave_like "timelined subclass", [:foo, :bar, :foo], 2
7
+ data = (1..100).map {rand(200)}
8
+ it_should_behave_like "timelined subclass", data, data.uniq.count
9
+ end
@@ -0,0 +1,28 @@
1
+ require 'spec_helper'
2
+
3
+ describe PulseMeter::Sensor::UniqCounter do
4
+ let(:name){ :some_counter }
5
+ let(:sensor){ described_class.new(name) }
6
+ let(:redis){ PulseMeter.redis }
7
+
8
+ describe "#event" do
9
+ it "should count unique values" do
10
+ expect{ sensor.event(:first) }.to change{sensor.value}.to(1)
11
+ expect{ sensor.event(:first) }.not_to change{sensor.value}
12
+ expect{ sensor.event(:second) }.to change{sensor.value}.from(1).to(2)
13
+ end
14
+ end
15
+
16
+ describe "#value" do
17
+ it "should have initial value 0" do
18
+ sensor.value.should == 0
19
+ end
20
+
21
+ it "should return count of unique values" do
22
+ data = (1..100).map {rand(200)}
23
+ data.each {|e| sensor.event(e)}
24
+ sensor.value.should == data.uniq.count
25
+ end
26
+ end
27
+
28
+ end
@@ -0,0 +1,36 @@
1
+ require 'spec_helper'
2
+
3
+ describe PulseMeter::UDPServer do
4
+ let(:host){'127.0.0.1'}
5
+ let(:port){33333}
6
+ let(:udp_sock){mock(:socket)}
7
+ let(:redis){PulseMeter.redis}
8
+ before do
9
+ UDPSocket.should_receive(:new).and_return(udp_sock)
10
+ udp_sock.should_receive(:bind).with(host, port).and_return(nil)
11
+ udp_sock.should_receive("do_not_reverse_lookup=").with(true).and_return(nil)
12
+ @server = described_class.new(host, port)
13
+ end
14
+
15
+ describe "#start" do
16
+ let(:data){
17
+ [
18
+ ["set", "xxxx", "zzzz"],
19
+ ["set", "yyyy", "zzzz"]
20
+ ].to_json
21
+ }
22
+ it "should process proper incoming commands" do
23
+ udp_sock.should_receive(:recvfrom).with(described_class::MAX_PACKET).and_return(data)
24
+ @server.start(1)
25
+ redis.get("xxxx").should == "zzzz"
26
+ redis.get("yyyy").should == "zzzz"
27
+ end
28
+
29
+ it "should suppress JSON errors" do
30
+ udp_sock.should_receive(:recvfrom).with(described_class::MAX_PACKET).and_return("xxx")
31
+ expect{ @server.start(1) }.not_to raise_exception
32
+ end
33
+ end
34
+
35
+ end
36
+
@@ -0,0 +1,73 @@
1
+ require 'spec_helper'
2
+
3
+ describe PulseMeter do
4
+ describe "::redis=" do
5
+ it "should store redis" do
6
+ PulseMeter.redis = 'redis'
7
+ PulseMeter.redis.should == 'redis'
8
+ end
9
+ end
10
+ describe "::redis" do
11
+ it "should retrieve redis" do
12
+ PulseMeter.redis = 'redis'
13
+ PulseMeter.redis.should == 'redis'
14
+ end
15
+ end
16
+ describe "::command_aggregator=" do
17
+ context "when :async passed" do
18
+ it "should set async command_aggregator to be used" do
19
+ PulseMeter.command_aggregator = :async
20
+ PulseMeter.command_aggregator.should be_kind_of(PulseMeter::CommandAggregator::Async)
21
+ end
22
+ end
23
+ context "when :sync passed" do
24
+ it "should set sync command_aggregator to be used" do
25
+ PulseMeter.command_aggregator = :sync
26
+ PulseMeter.command_aggregator.should be_kind_of(PulseMeter::CommandAggregator::Sync)
27
+ end
28
+ end
29
+ context "otherwise" do
30
+ it "should set command_aggregator to the passed value" do
31
+ PulseMeter.command_aggregator = :xxx
32
+ PulseMeter.command_aggregator.should == :xxx
33
+ end
34
+ end
35
+ end
36
+
37
+ describe "::command_aggregator" do
38
+ it "should return current command_aggregator" do
39
+ PulseMeter.command_aggregator = :async
40
+ PulseMeter.command_aggregator.should be_kind_of(PulseMeter::CommandAggregator::Async)
41
+ PulseMeter.command_aggregator = :sync
42
+ PulseMeter.command_aggregator.should be_kind_of(PulseMeter::CommandAggregator::Sync)
43
+ end
44
+
45
+ it "should always return the same command_aggregator for each type" do
46
+ PulseMeter.command_aggregator = :async
47
+ ca1 = PulseMeter.command_aggregator
48
+ PulseMeter.command_aggregator = :sync
49
+ PulseMeter.command_aggregator = :async
50
+ ca2 = PulseMeter.command_aggregator
51
+ ca1.should == ca2
52
+ end
53
+ end
54
+
55
+ describe "::logger" do
56
+ it "should return PulseMeter logger" do
57
+ PulseMeter.logger = 123
58
+ PulseMeter.logger.should == 123
59
+ end
60
+
61
+ it "should return default logger" do
62
+ PulseMeter.logger = nil
63
+ PulseMeter.logger.should be_kind_of(Logger)
64
+ end
65
+ end
66
+
67
+ describe "::error" do
68
+ it "should delegate error message to logger" do
69
+ PulseMeter.logger.should_receive(:error)
70
+ PulseMeter.error("foo")
71
+ end
72
+ end
73
+ end
@@ -0,0 +1,439 @@
1
+ shared_examples_for "timeline sensor" do |extra_init_values, default_event|
2
+ class Dummy
3
+ include PulseMeter::Mixins::Dumper
4
+ def name; :dummy end
5
+ def redis; PulseMeter.redis; end
6
+ end
7
+
8
+ let(:name){ :some_value_with_history }
9
+ let(:ttl){ 10000 }
10
+ let(:raw_data_ttl){ 3000 }
11
+ let(:interval){ 5 }
12
+ let(:reduce_delay){ 3 }
13
+ let(:good_init_values){ {:ttl => ttl, :raw_data_ttl => raw_data_ttl, :interval => interval, :reduce_delay => reduce_delay}.merge(extra_init_values || {}) }
14
+ let!(:sensor){ described_class.new(name, good_init_values) }
15
+ let(:dummy) {Dummy.new}
16
+ let(:base_class){ PulseMeter::Sensor::Base }
17
+ let(:redis){ PulseMeter.redis }
18
+ let(:sample_event) {default_event || 123}
19
+
20
+ before(:each) do
21
+ @interval_id = (Time.now.to_i / interval) * interval
22
+ @prev_interval_id = (Time.now.to_i / interval) * interval - interval
23
+
24
+ @raw_data_key = sensor.raw_data_key(@interval_id)
25
+ @prev_raw_data_key = sensor.raw_data_key(@prev_interval_id)
26
+
27
+ @next_raw_data_key = sensor.raw_data_key(@interval_id + interval)
28
+
29
+ @start_of_interval = Time.at(@interval_id)
30
+ @start_of_prev_interval = Time.at(@prev_interval_id)
31
+ end
32
+
33
+ describe "#dump" do
34
+ it "should be dumped succesfully" do
35
+ expect {sensor.dump!}.not_to raise_exception
36
+ end
37
+ end
38
+
39
+ describe ".restore" do
40
+ before do
41
+ # no need to call sensor.dump! explicitly for it
42
+ # will be called automatically after creation
43
+ @restored = base_class.restore(sensor.name)
44
+ end
45
+
46
+ it "should restore #{described_class} instance" do
47
+ @restored.should be_instance_of(described_class)
48
+ end
49
+
50
+ it "should restore object with the same data" do
51
+ def inner_data(obj)
52
+ obj.instance_variables.sort.map {|v| obj.instance_variable_get(v)}
53
+ end
54
+
55
+ inner_data(sensor).should == inner_data(@restored)
56
+ end
57
+ end
58
+
59
+ describe "#event" do
60
+ it "should write events to redis" do
61
+ expect{
62
+ sensor.event(sample_event)
63
+ }.to change{ redis.keys('*').count }.by(1)
64
+ end
65
+
66
+ it "should write data so that it totally expires after :raw_data_ttl" do
67
+ key_count = redis.keys('*').count
68
+ sensor.event(sample_event)
69
+ Timecop.freeze(Time.now + raw_data_ttl + 1) do
70
+ redis.keys('*').count.should == key_count
71
+ end
72
+ end
73
+
74
+ it "should write data to bucket indicated by truncated timestamp" do
75
+ expect{
76
+ Timecop.freeze(@start_of_interval) do
77
+ sensor.event(sample_event)
78
+ end
79
+ }.to change{ redis.ttl(@raw_data_key) }
80
+ end
81
+
82
+ it "returns true if event processed correctly" do
83
+ sensor.event(sample_event).should be_true
84
+ end
85
+
86
+ it "catches StandardErrors and returns false" do
87
+ sensor.stub(:aggregate_event) {raise StandardError}
88
+ sensor.event(sample_event).should be_false
89
+ end
90
+ end
91
+
92
+ describe "#event_at" do
93
+ let(:now) {Time.now}
94
+ it "should write events to redis" do
95
+ expect{
96
+ sensor.event_at(now, sample_event)
97
+ }.to change{ redis.keys('*').count }.by(1)
98
+ end
99
+
100
+ it "should write data so that it totally expires after :raw_data_ttl" do
101
+ key_count = redis.keys('*').count
102
+ sensor.event_at(now, sample_event)
103
+ Timecop.freeze(now + raw_data_ttl + 1) do
104
+ redis.keys('*').count.should == key_count
105
+ end
106
+ end
107
+
108
+ it "should write data to bucket indicated by passed time" do
109
+ expect{
110
+ Timecop.freeze(@start_of_interval) do
111
+ sensor.event_at(@start_of_prev_interval, sample_event)
112
+ end
113
+ }.to change{ redis.ttl(@prev_raw_data_key) }
114
+ end
115
+ end
116
+
117
+ describe "#summarize" do
118
+ it "should convert data stored by raw_data_key to a value defined only by stored data" do
119
+ Timecop.freeze(@start_of_interval) do
120
+ sensor.event(sample_event)
121
+ end
122
+ Timecop.freeze(@start_of_interval + interval) do
123
+ sensor.event(sample_event)
124
+ end
125
+ sensor.summarize(@raw_data_key).should == sensor.summarize(@next_raw_data_key)
126
+ sensor.summarize(@raw_data_key).should_not be_nil
127
+ end
128
+ end
129
+
130
+ describe "#reduce" do
131
+ it "should store summarized value into data_key" do
132
+ Timecop.freeze(@start_of_interval){ sensor.event(sample_event) }
133
+ val = sensor.summarize(@raw_data_key)
134
+ val.should_not be_nil
135
+ sensor.reduce(@interval_id)
136
+ redis.get(sensor.data_key(@interval_id)).should == val.to_s
137
+ end
138
+
139
+ it "should remove original raw_data_key" do
140
+ Timecop.freeze(@start_of_interval){ sensor.event(sample_event) }
141
+ expect{
142
+ sensor.reduce(@interval_id)
143
+ }.to change{ redis.keys(sensor.raw_data_key(@interval_id)).count }.from(1).to(0)
144
+ end
145
+
146
+ it "should expire stored summarized data" do
147
+ Timecop.freeze(@start_of_interval) do
148
+ sensor.event(sample_event)
149
+ sensor.reduce(@interval_id)
150
+ redis.keys(sensor.data_key(@interval_id)).count.should == 1
151
+ end
152
+ Timecop.freeze(@start_of_interval + ttl + 1) do
153
+ redis.keys(sensor.data_key(@interval_id)).count.should == 0
154
+ end
155
+ end
156
+
157
+ it "should not store data if there is no corresponding raw data" do
158
+ Timecop.freeze(@start_of_interval) do
159
+ sensor.reduce(@interval_id)
160
+ redis.keys(sensor.data_key(@interval_id)).count.should == 0
161
+ end
162
+ end
163
+
164
+ it "should not store summarized data if it already exists" do
165
+ data_key = sensor.data_key(@interval_id)
166
+ redis.set(data_key, :dummy)
167
+ Timecop.freeze(@start_of_interval) do
168
+ sensor.event(sample_event)
169
+ sensor.reduce(@interval_id)
170
+ redis.get(data_key).should == "dummy"
171
+ end
172
+ end
173
+ end
174
+
175
+ describe "#reduce_all_raw" do
176
+ it "should reduce all data older than reduce_delay" do
177
+ Timecop.freeze(@start_of_interval){ sensor.event(sample_event) }
178
+ val0 = sensor.summarize(@raw_data_key)
179
+ Timecop.freeze(@start_of_interval + interval){ sensor.event(sample_event) }
180
+ val1 = sensor.summarize(@next_raw_data_key)
181
+ expect{
182
+ Timecop.freeze(@start_of_interval + interval + interval + reduce_delay + 1) do
183
+ sensor.reduce_all_raw
184
+ end
185
+ }.to change{ redis.keys(sensor.raw_data_key('*')).count }.from(2).to(0)
186
+
187
+ redis.get(sensor.data_key(@interval_id)).should == val0.to_s
188
+ redis.get(sensor.data_key(@interval_id + interval)).should == val1.to_s
189
+ end
190
+
191
+ it "creates up to MAX_INTERVALS compresed data pieces from previously uncompressed data" do
192
+ max_count = described_class::MAX_INTERVALS
193
+ start = @start_of_interval - reduce_delay - max_count * interval
194
+ (max_count + 100).times do |i|
195
+ Timecop.freeze(start + i * interval) {sensor.event(sample_event)}
196
+ end
197
+
198
+ Timecop.freeze(@start_of_interval) do
199
+ expect {
200
+ sensor.reduce_all_raw
201
+ }.to change {redis.keys(sensor.data_key('*')).count}.from(0).to(max_count)
202
+ end
203
+ end
204
+
205
+ it "should not reduce fresh data" do
206
+ Timecop.freeze(@start_of_interval){ sensor.event(sample_event) }
207
+
208
+ expect{
209
+ Timecop.freeze(@start_of_interval + interval + reduce_delay - 1) { sensor.reduce_all_raw }
210
+ }.not_to change{ redis.keys(sensor.raw_data_key('*')).count }
211
+
212
+ expect{
213
+ Timecop.freeze(@start_of_interval + interval + reduce_delay - 1) { sensor.reduce_all_raw }
214
+ }.not_to change{ redis.keys(sensor.data_key('*')).count }
215
+ end
216
+ end
217
+
218
+ describe ".reduce_all_raw" do
219
+ it "should silently skip objects without reduce logic" do
220
+ dummy.dump!
221
+ expect {described_class.reduce_all_raw}.not_to raise_exception
222
+ end
223
+
224
+ it "should send reduce_all_raw to all dumped objects" do
225
+ described_class.any_instance.should_receive(:reduce_all_raw)
226
+ described_class.reduce_all_raw
227
+ end
228
+ end
229
+
230
+ describe "#timeline_within" do
231
+ it "should raise exception unless both arguments are Time objects" do
232
+ [:q, nil, -1].each do |bad_value|
233
+ expect{ sensor.timeline_within(Time.now, bad_value) }.to raise_exception(ArgumentError)
234
+ expect{ sensor.timeline_within(bad_value, Time.now) }.to raise_exception(ArgumentError)
235
+ end
236
+ end
237
+
238
+ it "should return an array of SensorData objects corresponding to stored data for passed interval" do
239
+ sensor.event(sample_event)
240
+ now = Time.now
241
+ timeline = sensor.timeline_within(now - 1, now)
242
+ timeline.should be_kind_of(Array)
243
+ timeline.each{|i| i.should be_kind_of(SensorData) }
244
+ end
245
+
246
+ it "should return array of results containing as many results as there are sensor interval beginnings in the passed interval" do
247
+ Timecop.freeze(@start_of_interval){ sensor.event(sample_event) }
248
+ Timecop.freeze(@start_of_interval + interval){ sensor.event(sample_event) }
249
+
250
+ future = @start_of_interval + 3600
251
+ Timecop.freeze(future) do
252
+ sensor.timeline_within(
253
+ Time.at(@start_of_interval + interval - 1),
254
+ Time.at(@start_of_interval + interval + 1)
255
+ ).size.should == 1
256
+
257
+ sensor.timeline_within(
258
+ Time.at(@start_of_interval - 1),
259
+ Time.at(@start_of_interval + interval + 1)
260
+ ).size.should == 2
261
+ end
262
+
263
+ Timecop.freeze(@start_of_interval + interval + 2) do
264
+ sensor.timeline_within(
265
+ Time.at(@start_of_interval + interval + 1),
266
+ Time.at(@start_of_interval + interval + 2)
267
+ ).size.should == 0
268
+ end
269
+ end
270
+
271
+ context "to avoid getting to much data" do
272
+ let(:max) {PulseMeter::Sensor::Timeline::MAX_TIMESPAN_POINTS}
273
+
274
+ it "should skip some points not to exceed MAX_TIMESPAN_POINTS" do
275
+ count = max * 2
276
+ sensor.timeline_within(
277
+ Time.at(@start_of_interval - 1),
278
+ Time.at(@start_of_interval + count * interval)
279
+ ).size.should < max
280
+ end
281
+
282
+ it "should not skip any points when timeline orginal size is less then MAX_TIMESPAN_POINTS" do
283
+ count = max - 1
284
+ sensor.timeline_within(
285
+ Time.at(@start_of_interval - 1),
286
+ Time.at(@start_of_interval + count * interval)
287
+ ).size.should == count
288
+ end
289
+
290
+ it "should give full data in case skip_optimization parameter set to true" do
291
+ count = max * 2
292
+ sensor.timeline_within(
293
+ Time.at(@start_of_interval - 1),
294
+ Time.at(@start_of_interval + count * interval),
295
+ true
296
+ ).size.should == count
297
+ end
298
+ end
299
+ end
300
+
301
+ describe "#timeline" do
302
+ it "should raise exception if passed interval is not a positive integer" do
303
+ [:q, nil, -1].each do |bad_interval|
304
+ expect{ sensor.timeline(bad_interval) }.to raise_exception(ArgumentError)
305
+ end
306
+ end
307
+
308
+ it "should request timeline within interval from given number of seconds ago till now" do
309
+ Timecop.freeze do
310
+ now = Time.now
311
+ ago = interval * 100
312
+ sensor.timeline(ago).should == sensor.timeline_within(now - ago, now)
313
+ end
314
+ end
315
+
316
+ it "should return array of results containing as many results as there are sensor interval beginnings in the passed interval" do
317
+ Timecop.freeze(@start_of_interval){ sensor.event(sample_event) }
318
+ Timecop.freeze(@start_of_interval + interval){ sensor.event(sample_event) }
319
+
320
+ Timecop.freeze(@start_of_interval + interval + 1) do
321
+ sensor.timeline(2).size.should == 1
322
+ end
323
+ Timecop.freeze(@start_of_interval + interval + 2) do
324
+ sensor.timeline(1).size.should == 0
325
+ end
326
+ Timecop.freeze(@start_of_interval + interval + 1) do
327
+ sensor.timeline(2 + interval).size.should == 2
328
+ end
329
+ end
330
+ end
331
+
332
+ describe "#drop_within" do
333
+ it "should raise exception unless both arguments are Time objects" do
334
+ [:q, nil, -1].each do |bad_value|
335
+ expect{ sensor.drop_within(Time.now, bad_value) }.to raise_exception(ArgumentError)
336
+ expect{ sensor.drop_within(bad_value, Time.now) }.to raise_exception(ArgumentError)
337
+ end
338
+ end
339
+
340
+ it "should drop as many raw results as there are sensor interval beginnings in the passed interval" do
341
+ Timecop.freeze(@start_of_interval){ sensor.event(sample_event) }
342
+ Timecop.freeze(@start_of_interval + interval){ sensor.event(sample_event) }
343
+
344
+ future = @start_of_interval + interval * 3
345
+ Timecop.freeze(future) do
346
+ sensor.drop_within(
347
+ Time.at(@start_of_interval + interval - 1),
348
+ Time.at(@start_of_interval + interval + 1)
349
+ ).should == 1
350
+
351
+ data = sensor.timeline_within(
352
+ Time.at(@start_of_interval + interval - 1),
353
+ Time.at(@start_of_interval + interval + 1)
354
+ )
355
+ data.size.should == 1
356
+ data.first.value.should be_nil # since data is dropped
357
+
358
+ end
359
+
360
+ Timecop.freeze(@start_of_interval + interval + 2) do
361
+ sensor.drop_within(
362
+ Time.at(@start_of_interval + interval + 1),
363
+ Time.at(@start_of_interval + interval + 2)
364
+ ).should == 0
365
+ end
366
+ end
367
+
368
+ it "should drop as many reduced results as there are sensor interval beginnings in the passed interval" do
369
+ Timecop.freeze(@start_of_interval){ sensor.event(sample_event) }
370
+ Timecop.freeze(@start_of_interval + interval){ sensor.event(sample_event) }
371
+
372
+ future = @start_of_interval
373
+ Timecop.freeze(future) do
374
+ sensor.reduce_all_raw
375
+ sensor.drop_within(
376
+ Time.at(@start_of_interval + interval - 1),
377
+ Time.at(@start_of_interval + interval + 1)
378
+ ).should == 1
379
+
380
+ data = sensor.timeline_within(
381
+ Time.at(@start_of_interval + interval - 1),
382
+ Time.at(@start_of_interval + interval + 1)
383
+ )
384
+ data.size.should == 1
385
+ data.first.value.should be_nil # since data is dropped
386
+
387
+ end
388
+
389
+ Timecop.freeze(@start_of_interval + interval + 2) do
390
+ sensor.drop_within(
391
+ Time.at(@start_of_interval + interval + 1),
392
+ Time.at(@start_of_interval + interval + 2)
393
+ ).should == 0
394
+ end
395
+ end
396
+ end
397
+
398
+ describe "SensorData value for an interval" do
399
+ def check_sensor_data(sensor, value)
400
+ data = sensor.timeline(2).first
401
+ data.value.should be_generally_equal(sensor.deflate_safe(value))
402
+ data.start_time.to_i.should == @interval_id
403
+ end
404
+
405
+ it "should contain summarized value stored by data_key for reduced intervals" do
406
+ Timecop.freeze(@start_of_interval){ sensor.event(sample_event) }
407
+ sensor.reduce(@interval_id)
408
+ Timecop.freeze(@start_of_interval + 1){
409
+ check_sensor_data(sensor, redis.get(sensor.data_key(@interval_id)))
410
+ }
411
+ end
412
+
413
+ it "should contain summarized value based on raw data for intervals not yet reduced" do
414
+ Timecop.freeze(@start_of_interval){ sensor.event(sample_event) }
415
+ Timecop.freeze(@start_of_interval + 1){
416
+ check_sensor_data(sensor, sensor.summarize(@raw_data_key))
417
+ }
418
+ end
419
+
420
+ it "should contain nil for intervals without any data" do
421
+ Timecop.freeze(@start_of_interval + 1) {
422
+ check_sensor_data(sensor, nil)
423
+ }
424
+ end
425
+ end
426
+
427
+ describe "#cleanup" do
428
+ it "should remove all sensor data (raw data, reduced data, annotations) from redis" do
429
+ Timecop.freeze(@start_of_interval){ sensor.event(sample_event) }
430
+ sensor.reduce(@interval_id)
431
+ Timecop.freeze(@start_of_interval + interval){ sensor.event(sample_event) }
432
+ sensor.annotate("Fooo sensor")
433
+
434
+ sensor.cleanup
435
+ redis.keys('*').should be_empty
436
+ end
437
+ end
438
+
439
+ end