fresh_redis 0.0.5 → 0.0.6
Sign up to get free protection for your applications and to get access to all the features.
- data/Gemfile +7 -0
- data/Gemfile.lock +9 -1
- data/README.md +19 -1
- data/fresh_redis.gemspec +1 -0
- data/lib/fresh_redis/hash.rb +10 -6
- data/lib/fresh_redis/key.rb +10 -8
- data/lib/fresh_redis/version.rb +1 -1
- data/lib/fresh_redis.rb +0 -14
- data/spec/fresh_redis/hash_spec.rb +38 -65
- data/spec/fresh_redis/key_spec.rb +14 -11
- data/spec/fresh_redis/string_spec.rb +23 -19
- metadata +20 -4
data/Gemfile
CHANGED
@@ -2,3 +2,10 @@ source 'https://rubygems.org'
|
|
2
2
|
|
3
3
|
# Specify your gem's dependencies in fresh_redis.gemspec
|
4
4
|
gemspec
|
5
|
+
|
6
|
+
# HAX to allow native file change detection to work on linux AND OSX
|
7
|
+
# from https://github.com/carlhuda/bundler/issues/663#issuecomment-2849045
|
8
|
+
group :development do
|
9
|
+
gem 'rb-fsevent', :require => RUBY_PLATFORM.include?('darwin') && 'rb-fsevent'
|
10
|
+
gem 'rb-inotify', :require => RUBY_PLATFORM.include?('linux') && 'rb-inotify'
|
11
|
+
end
|
data/Gemfile.lock
CHANGED
@@ -1,13 +1,14 @@
|
|
1
1
|
PATH
|
2
2
|
remote: .
|
3
3
|
specs:
|
4
|
-
fresh_redis (0.0.
|
4
|
+
fresh_redis (0.0.6)
|
5
5
|
redis
|
6
6
|
|
7
7
|
GEM
|
8
8
|
remote: https://rubygems.org/
|
9
9
|
specs:
|
10
10
|
diff-lcs (1.1.3)
|
11
|
+
ffi (1.1.5)
|
11
12
|
guard (1.4.0)
|
12
13
|
listen (>= 0.4.2)
|
13
14
|
thor (>= 0.14.6)
|
@@ -17,6 +18,9 @@ GEM
|
|
17
18
|
listen (0.5.3)
|
18
19
|
mock_redis (0.5.2)
|
19
20
|
rake (0.9.2.2)
|
21
|
+
rb-fsevent (0.9.2)
|
22
|
+
rb-inotify (0.8.8)
|
23
|
+
ffi (>= 0.5.0)
|
20
24
|
redis (3.0.2)
|
21
25
|
rspec (2.11.0)
|
22
26
|
rspec-core (~> 2.11.0)
|
@@ -27,6 +31,7 @@ GEM
|
|
27
31
|
diff-lcs (~> 1.1.3)
|
28
32
|
rspec-mocks (2.11.3)
|
29
33
|
thor (0.16.0)
|
34
|
+
timecop (0.5.2)
|
30
35
|
|
31
36
|
PLATFORMS
|
32
37
|
ruby
|
@@ -36,4 +41,7 @@ DEPENDENCIES
|
|
36
41
|
guard-rspec (= 2.1.0)
|
37
42
|
mock_redis (= 0.5.2)
|
38
43
|
rake (= 0.9.2.2)
|
44
|
+
rb-fsevent
|
45
|
+
rb-inotify
|
39
46
|
rspec
|
47
|
+
timecop (= 0.5.2)
|
data/README.md
CHANGED
@@ -24,7 +24,7 @@ Or install it yourself as:
|
|
24
24
|
|
25
25
|
## Usage
|
26
26
|
|
27
|
-
### Simple
|
27
|
+
### Simple counts
|
28
28
|
|
29
29
|
```ruby
|
30
30
|
require "redis"
|
@@ -45,6 +45,14 @@ fresh.fsum "failed_login" # will return 3
|
|
45
45
|
fresh.fsum "failed_login" # will return 2, cause the first incr has expired by now
|
46
46
|
```
|
47
47
|
|
48
|
+
### Hash operations
|
49
|
+
|
50
|
+
```ruby
|
51
|
+
# TODO
|
52
|
+
```
|
53
|
+
|
54
|
+
TODO note about handling of deletes/nil values and :force option on `fhdel` operation
|
55
|
+
|
48
56
|
### Tweaking _"freshness"_ and _"granularity"_.
|
49
57
|
|
50
58
|
Think of it like stock rotation at your local supermarket. Freshness is how long we'll keep food around for before throwing it out, granularity is what batches we'll throw old food out together as. Something like _"we'll keep food around for a week, but we'll throw out everything for the same day at the same time."_ This is a performance trade off. Smaller granularity means more precise expiration of data, at the expense of having to store, retrieve, and check more buckets of data to get the aggregate value.
|
@@ -61,6 +69,14 @@ fresh.fincr "recent_posts:#{user.id}", :freshness => 600, :granularity => 30
|
|
61
69
|
fresh.fsum "recent_posts:#{user.id}", :freshness => 600, :granularity => 30
|
62
70
|
```
|
63
71
|
|
72
|
+
# Recipes
|
73
|
+
|
74
|
+
## Tracking user signin attempts count over the last hour
|
75
|
+
TODO
|
76
|
+
|
77
|
+
## Tracking dropped requests for the last day
|
78
|
+
TODO
|
79
|
+
|
64
80
|
## Contributing
|
65
81
|
|
66
82
|
1. Fork it
|
@@ -71,3 +87,5 @@ fresh.fsum "recent_posts:#{user.id}", :freshness => 600, :granularity => 30
|
|
71
87
|
|
72
88
|
## Who the hell?
|
73
89
|
I blame [@madlep](http://twitter.com/madlep) aka Julian Doherty. Send hate mail to [madlep@madlep.com](mailto:madlep@madlep.com), or deface [madlep.com](http://madlep.com) in protest
|
90
|
+
|
91
|
+
Thanks to [chendo](https://github.com/chendo) for initial hash operations.
|
data/fresh_redis.gemspec
CHANGED
data/lib/fresh_redis/hash.rb
CHANGED
@@ -4,7 +4,7 @@ class FreshRedis
|
|
4
4
|
def fhset(key, hash_key, value, options={})
|
5
5
|
key = build_key(key, options)
|
6
6
|
@redis.multi do
|
7
|
-
@redis.hset(key.redis_key, hash_key,
|
7
|
+
@redis.hset(key.redis_key, hash_key, value)
|
8
8
|
@redis.expire(key.redis_key, key.freshness)
|
9
9
|
end
|
10
10
|
end
|
@@ -19,9 +19,7 @@ class FreshRedis
|
|
19
19
|
}
|
20
20
|
|
21
21
|
# find the first non-nil value
|
22
|
-
|
23
|
-
|
24
|
-
un_n(most_recent_value)
|
22
|
+
bucket_values.find{|e| e }
|
25
23
|
end
|
26
24
|
|
27
25
|
def fhgetall(key, options={})
|
@@ -37,11 +35,17 @@ class FreshRedis
|
|
37
35
|
acc.merge(bucket_hash)
|
38
36
|
}
|
39
37
|
|
40
|
-
merged_values.reject{ |key, value|
|
38
|
+
merged_values.reject{ |key, value| !value }
|
41
39
|
end
|
42
40
|
|
43
41
|
def fhdel(key, hash_key, options={})
|
44
|
-
|
42
|
+
key = build_key(key, options)
|
43
|
+
|
44
|
+
bucket_values = @redis.pipelined {
|
45
|
+
key.timestamp_buckets.each do |bucket_key|
|
46
|
+
@redis.hdel(bucket_key, hash_key)
|
47
|
+
end
|
48
|
+
}
|
45
49
|
end
|
46
50
|
end
|
47
51
|
end
|
data/lib/fresh_redis/key.rb
CHANGED
@@ -1,5 +1,8 @@
|
|
1
|
+
require 'time'
|
2
|
+
|
1
3
|
class FreshRedis
|
2
4
|
class Key
|
5
|
+
# TODO remove concept of time from a key. Just be about redis key, freshness, granularity
|
3
6
|
|
4
7
|
DEFAULT_OPTIONS = {
|
5
8
|
:freshness => 60 * 60, # 1 hour
|
@@ -14,27 +17,27 @@ class FreshRedis
|
|
14
17
|
base_key = args[0]
|
15
18
|
|
16
19
|
options = DEFAULT_OPTIONS.merge(args[1] || {})
|
17
|
-
options[:t] ||= Time.now.to_i
|
18
20
|
|
19
|
-
self.new(base_key, options[:
|
21
|
+
self.new(base_key, options[:freshness], options[:granularity])
|
20
22
|
end
|
21
23
|
|
22
24
|
attr_reader :freshness
|
23
25
|
|
24
|
-
def initialize(base_key,
|
26
|
+
def initialize(base_key, freshness, granularity)
|
25
27
|
@base_key = base_key
|
26
|
-
@t = t
|
27
28
|
@freshness = freshness
|
28
29
|
@granularity = granularity
|
29
30
|
end
|
30
31
|
|
31
32
|
def redis_key
|
32
|
-
[@base_key, normalize_time(
|
33
|
+
[@base_key, normalize_time(Time.now.to_i, @granularity)].join(":")
|
33
34
|
end
|
34
35
|
|
35
36
|
def timestamp_buckets
|
36
|
-
|
37
|
-
|
37
|
+
t = Time.now.to_i
|
38
|
+
|
39
|
+
from = normalize_time(t - @freshness, @granularity)
|
40
|
+
to = normalize_time(t, @granularity)
|
38
41
|
(from..to).step(@granularity).map{|timestamp| [@base_key, timestamp].join(":") }
|
39
42
|
end
|
40
43
|
|
@@ -42,7 +45,6 @@ class FreshRedis
|
|
42
45
|
same = true
|
43
46
|
same &= Key === other
|
44
47
|
same &= @base_key == other.instance_variable_get(:@base_key)
|
45
|
-
same &= @t == other.instance_variable_get(:@t)
|
46
48
|
same &= @freshness == other.instance_variable_get(:@freshness)
|
47
49
|
same &= @granularity == other.instance_variable_get(:@granularity)
|
48
50
|
same
|
data/lib/fresh_redis/version.rb
CHANGED
data/lib/fresh_redis.rb
CHANGED
@@ -7,8 +7,6 @@ class FreshRedis
|
|
7
7
|
include Hash
|
8
8
|
include String
|
9
9
|
|
10
|
-
NIL_VALUE = "__FR_NIL__"
|
11
|
-
|
12
10
|
def initialize(redis, options={})
|
13
11
|
@redis = redis
|
14
12
|
@options = options
|
@@ -19,16 +17,4 @@ class FreshRedis
|
|
19
17
|
Key.build(base_key, options)
|
20
18
|
end
|
21
19
|
|
22
|
-
private
|
23
|
-
def n(value)
|
24
|
-
value || NIL_VALUE
|
25
|
-
end
|
26
|
-
|
27
|
-
def un_n(value)
|
28
|
-
n?(value) ? nil : value
|
29
|
-
end
|
30
|
-
|
31
|
-
def n?(value)
|
32
|
-
value == NIL_VALUE
|
33
|
-
end
|
34
20
|
end
|
@@ -1,112 +1,85 @@
|
|
1
1
|
require 'fresh_redis'
|
2
2
|
require 'mock_redis'
|
3
|
+
require 'timecop'
|
3
4
|
|
4
5
|
describe FreshRedis do
|
5
6
|
subject{ FreshRedis.new(mock_redis) }
|
6
|
-
let(:mock_redis) { MockRedis.new }
|
7
|
-
let(:now) { Time.new(2012, 9, 27, 15, 40, 56, "+10:00")
|
8
|
-
let(:normalized_now_minute) { Time.new(2012, 9, 27, 15, 40, 0, "+10:00")
|
9
|
-
let(:normalized_one_minute_ago) { Time.new(2012, 9, 27, 15, 39, 0, "+10:00")
|
10
|
-
let(:normalized_two_minutes_ago) { Time.new(2012, 9, 27, 15, 38, 0, "+10:00")
|
11
|
-
let(:normalized_old) { Time.new(2012, 9, 27, 14, 38, 0, "+10:00")
|
7
|
+
let!(:mock_redis) { MockRedis.new }
|
8
|
+
let(:now) { Time.new(2012, 9, 27, 15, 40, 56, "+10:00") }
|
9
|
+
let(:normalized_now_minute) { Time.new(2012, 9, 27, 15, 40, 0, "+10:00") }
|
10
|
+
let(:normalized_one_minute_ago) { Time.new(2012, 9, 27, 15, 39, 0, "+10:00") }
|
11
|
+
let(:normalized_two_minutes_ago) { Time.new(2012, 9, 27, 15, 38, 0, "+10:00") }
|
12
|
+
let(:normalized_old) { Time.new(2012, 9, 27, 14, 38, 0, "+10:00") }
|
12
13
|
|
13
14
|
context "hash keys" do
|
15
|
+
before(:each) { Timecop.travel(now) }
|
16
|
+
after(:each) { Timecop.return }
|
14
17
|
|
15
18
|
describe "#fhset" do
|
16
19
|
it "sets a value for a key in a hash for the normalized timestamp" do
|
17
|
-
subject.fhset "foo", "bar", "value"
|
18
|
-
subject.fhset "foo", "bar", "newer_value"
|
19
|
-
subject.fhset "foo", "bar", "different_bucket"
|
20
|
+
Timecop.freeze(now - 3) { subject.fhset "foo", "bar", "value" }
|
21
|
+
Timecop.freeze(now) { subject.fhset "foo", "bar", "newer_value" }
|
22
|
+
Timecop.freeze(now - 60) { subject.fhset "foo", "bar", "different_bucket" } # different normalized key
|
20
23
|
|
21
|
-
mock_redis.data["foo:#{normalized_now_minute}"].should == {"bar" => "newer_value"}
|
24
|
+
mock_redis.data["foo:#{normalized_now_minute.to_i}"].should == {"bar" => "newer_value"}
|
22
25
|
end
|
23
26
|
|
24
|
-
it "sets a
|
25
|
-
subject.fhset "foo", "bar", nil
|
27
|
+
it "sets a nil value ok" do
|
28
|
+
Timecop.freeze(now) { subject.fhset "foo", "bar", nil }
|
26
29
|
|
27
|
-
mock_redis.data["foo:#{normalized_now_minute}"].should == {"bar" =>
|
30
|
+
mock_redis.data["foo:#{normalized_now_minute.to_i}"].should == {"bar" => "" }
|
28
31
|
end
|
29
32
|
|
30
33
|
it "sets the freshness as the expiry" do
|
31
34
|
# relying on mock_redis's time handling here - which converts to/from using Time.now Possible flakey temporal breakage potential
|
32
|
-
subject.fhset "foo", "bar", "baz", :freshness => 3600
|
35
|
+
subject.fhset "foo", "bar", "baz", :freshness => 3600
|
33
36
|
|
34
|
-
mock_redis.ttl("foo:#{normalized_now_minute}").should == 3600
|
37
|
+
mock_redis.ttl("foo:#{normalized_now_minute.to_i}").should == 3600
|
35
38
|
end
|
36
39
|
end
|
37
40
|
|
38
41
|
describe "#fhdel" do
|
39
|
-
it "
|
40
|
-
subject.fhset "foo", "bar", "value"
|
41
|
-
subject.fhset "foo", "
|
42
|
+
it "removes the field from all timestamp buckets" do
|
43
|
+
Timecop.travel(now) { subject.fhset "foo", "bar", "value" }
|
44
|
+
Timecop.travel(now) { subject.fhset "foo", "baz", "don't touch" }
|
45
|
+
Timecop.travel(now - 60) { subject.fhset "foo", "bar", "different_bucket" }
|
46
|
+
Timecop.travel(now - 60) { subject.fhset "foo", "baz", "I shouldn't be returned" }
|
42
47
|
|
43
|
-
subject.fhdel "foo", "bar"
|
48
|
+
Timecop.travel(now) { subject.fhdel "foo", "bar" } # Should only change the most recent bucket
|
44
49
|
|
45
|
-
mock_redis.data["foo:#{normalized_now_minute}"].should == {
|
46
|
-
mock_redis.data["foo:#{normalized_one_minute_ago}"].should == {
|
50
|
+
mock_redis.data["foo:#{normalized_now_minute.to_i}"].should == {"baz" => "don't touch"}
|
51
|
+
mock_redis.data["foo:#{normalized_one_minute_ago.to_i}"].should == {"baz" => "I shouldn't be returned"}
|
47
52
|
end
|
48
53
|
end
|
49
54
|
|
50
55
|
describe "#fhget" do
|
51
56
|
it "gets the most recent value for the field across timestamped buckets" do
|
52
|
-
mock_redis.hset "foo:#{normalized_now_minute}", "notbar", "francis"
|
53
|
-
mock_redis.hset "foo:#{normalized_one_minute_ago}", "bar", "bill"
|
54
|
-
mock_redis.hset "foo:#{normalized_two_minutes_ago}", "bar", "louis"
|
57
|
+
mock_redis.hset "foo:#{normalized_now_minute.to_i}", "notbar", "francis"
|
58
|
+
mock_redis.hset "foo:#{normalized_one_minute_ago.to_i}", "bar", "bill"
|
59
|
+
mock_redis.hset "foo:#{normalized_two_minutes_ago.to_i}", "bar", "louis"
|
55
60
|
|
56
|
-
subject.fhget("foo", "bar"
|
57
|
-
end
|
58
|
-
|
59
|
-
it "returns nil if the most recent value is the nil placeholder" do
|
60
|
-
mock_redis.hset "foo:#{normalized_now_minute}", "notbar", "francis"
|
61
|
-
mock_redis.hset "foo:#{normalized_one_minute_ago}", "bar", FreshRedis::NIL_VALUE
|
62
|
-
mock_redis.hset "foo:#{normalized_two_minutes_ago}", "bar", "louis"
|
63
|
-
|
64
|
-
subject.fhget("foo", "bar", :t => now).should be_nil
|
65
|
-
end
|
66
|
-
|
67
|
-
it "returns the most recent value if a nil placeholder value in an earlier bucket has been overwritten in a later bucket" do
|
68
|
-
mock_redis.hset "foo:#{normalized_now_minute}", "bar", "francis"
|
69
|
-
mock_redis.hset "foo:#{normalized_one_minute_ago}", "bar", FreshRedis::NIL_VALUE
|
70
|
-
mock_redis.hset "foo:#{normalized_two_minutes_ago}", "bar", "louis"
|
71
|
-
|
72
|
-
subject.fhget("foo", "bar", :t => now).should == "francis"
|
61
|
+
subject.fhget("foo", "bar").should == "bill"
|
73
62
|
end
|
74
63
|
|
75
64
|
it "returns nil if value is not found" do
|
76
|
-
subject.fhget("foo", "bar"
|
65
|
+
subject.fhget("foo", "bar").should be_nil
|
77
66
|
end
|
78
67
|
|
79
68
|
it "returns nil if the value is in a bucket that has expired" do
|
80
69
|
# this should be handled by redis expiry anyway, but verify code is behaving as expected and not querying more data than needed
|
81
|
-
mock_redis.hset "foo:#{normalized_old}", "bar", "louis"
|
82
|
-
subject.fhget("foo", "bar"
|
70
|
+
mock_redis.hset "foo:#{normalized_old.to_i}", "bar", "louis"
|
71
|
+
subject.fhget("foo", "bar").should be_nil
|
83
72
|
end
|
84
73
|
end
|
85
74
|
|
86
75
|
describe "#fhgetall" do
|
87
76
|
it "merges the values for all keys across timestamp buckets" do
|
88
|
-
mock_redis.hset "foo:#{normalized_now_minute}", "bar", "francis"
|
89
|
-
mock_redis.hset "foo:#{normalized_one_minute_ago}", "baz", "zoey"
|
90
|
-
mock_redis.hset "foo:#{
|
91
|
-
|
92
|
-
subject.fhgetall("foo", :t => now).should == { "bar" => "francis", "baz" => "zoey", "boz" => "louis" }
|
93
|
-
end
|
94
|
-
|
95
|
-
it "removes keys that have a nil placeholder value as the most recent value" do
|
96
|
-
mock_redis.hset "foo:#{normalized_now_minute}", "bar", FreshRedis::NIL_VALUE
|
97
|
-
mock_redis.hset "foo:#{normalized_one_minute_ago}", "bar", "zoey"
|
98
|
-
mock_redis.hset "foo:#{normalized_one_minute_ago}", "baz", "louis"
|
99
|
-
|
100
|
-
subject.fhgetall("foo", :t => now).should == { "baz" => "louis" }
|
101
|
-
end
|
102
|
-
|
103
|
-
it "returns the most recent value if a nil placeholder value in an earlier bucket has been overwritten in a later bucket" do
|
104
|
-
mock_redis.hset "foo:#{normalized_now_minute}", "bar", "francis"
|
105
|
-
mock_redis.hset "foo:#{normalized_one_minute_ago}", "bar", FreshRedis::NIL_VALUE
|
106
|
-
mock_redis.hset "foo:#{normalized_two_minutes_ago}", "bar", "louis"
|
107
|
-
mock_redis.hset "foo:#{normalized_one_minute_ago}", "baz", "bill"
|
77
|
+
mock_redis.hset "foo:#{normalized_now_minute.to_i}", "bar", "francis"
|
78
|
+
mock_redis.hset "foo:#{normalized_one_minute_ago.to_i}", "baz", "zoey"
|
79
|
+
mock_redis.hset "foo:#{normalized_one_minute_ago.to_i}", "bar", "I shouldn't be returned"
|
80
|
+
mock_redis.hset "foo:#{normalized_two_minutes_ago.to_i}", "boz", "louis"
|
108
81
|
|
109
|
-
subject.fhgetall("foo"
|
82
|
+
subject.fhgetall("foo").should == { "bar" => "francis", "baz" => "zoey", "boz" => "louis" }
|
110
83
|
end
|
111
84
|
end
|
112
85
|
end
|
@@ -1,8 +1,12 @@
|
|
1
1
|
require 'fresh_redis'
|
2
|
+
require 'timecop'
|
2
3
|
|
3
4
|
describe FreshRedis::Key do
|
4
|
-
let(:now) { Time.new(2012, 9, 27, 15, 40, 56, "+10:00")
|
5
|
-
let(:normalized_now_minute) { Time.new(2012, 9, 27, 15, 40, 0, "+10:00")
|
5
|
+
let(:now) { Time.new(2012, 9, 27, 15, 40, 56, "+10:00") }
|
6
|
+
let(:normalized_now_minute) { Time.new(2012, 9, 27, 15, 40, 0, "+10:00") }
|
7
|
+
|
8
|
+
before(:each) { Timecop.travel(now) }
|
9
|
+
after(:each) { Timecop.return }
|
6
10
|
|
7
11
|
describe ".build" do
|
8
12
|
it "complains if no args" do
|
@@ -10,13 +14,13 @@ describe FreshRedis::Key do
|
|
10
14
|
end
|
11
15
|
|
12
16
|
it "just returns the key if a FreshRedis::Key is provided" do
|
13
|
-
key = FreshRedis::Key.new("key", 123, 456
|
17
|
+
key = FreshRedis::Key.new("key", 123, 456)
|
14
18
|
FreshRedis::Key.build(key).should == key
|
15
19
|
end
|
16
20
|
|
17
21
|
it "constructs a FreshRedis::Key with the provided options" do
|
18
|
-
key = FreshRedis::Key.build("key", :
|
19
|
-
key.should == FreshRedis::Key.new("key", 123, 456
|
22
|
+
key = FreshRedis::Key.build("key", :freshness => 123, :granularity => 456)
|
23
|
+
key.should == FreshRedis::Key.new("key", 123, 456)
|
20
24
|
end
|
21
25
|
|
22
26
|
|
@@ -24,7 +28,6 @@ describe FreshRedis::Key do
|
|
24
28
|
key = FreshRedis::Key.build("key")
|
25
29
|
key.should == FreshRedis::Key.new(
|
26
30
|
"key",
|
27
|
-
Time.now.to_i,
|
28
31
|
FreshRedis::Key::DEFAULT_OPTIONS[:freshness],
|
29
32
|
FreshRedis::Key::DEFAULT_OPTIONS[:granularity]
|
30
33
|
)
|
@@ -34,12 +37,12 @@ describe FreshRedis::Key do
|
|
34
37
|
|
35
38
|
describe "#redis_key" do
|
36
39
|
it "should append the normalized timestamp to the key" do
|
37
|
-
FreshRedis::Key.build("foo", :
|
40
|
+
FreshRedis::Key.build("foo", :granularity => 60).redis_key.should == "foo:#{normalized_now_minute.to_i}"
|
38
41
|
end
|
39
42
|
end
|
40
43
|
|
41
44
|
describe "#timestamp_buckets" do
|
42
|
-
let(:buckets) { FreshRedis::Key.build("foo", :
|
45
|
+
let(:buckets) { FreshRedis::Key.build("foo", :freshness => 600, :granularity => 60).timestamp_buckets }
|
43
46
|
it "generates an enumerable over the range" do
|
44
47
|
buckets.should be_kind_of(Enumerable)
|
45
48
|
end
|
@@ -49,15 +52,15 @@ describe FreshRedis::Key do
|
|
49
52
|
end
|
50
53
|
|
51
54
|
it "has the first timestamp as the maximum freshness" do
|
52
|
-
buckets.first.should == ["foo", normalized_now_minute - 600].join(":")
|
55
|
+
buckets.first.should == ["foo", normalized_now_minute.to_i - 600].join(":")
|
53
56
|
end
|
54
57
|
|
55
58
|
it "has now as the maximum freshness" do
|
56
|
-
buckets.to_a.last.should == ["foo", normalized_now_minute].join(":")
|
59
|
+
buckets.to_a.last.should == ["foo", normalized_now_minute.to_i].join(":")
|
57
60
|
end
|
58
61
|
|
59
62
|
it "steps through the normalized timestamps split up by granularity" do
|
60
|
-
buckets.each_with_index{|b, i| b.should == ["foo", normalized_now_minute - 600 + i * 60].join(":") }
|
63
|
+
buckets.each_with_index{|b, i| b.should == ["foo", normalized_now_minute.to_i - 600 + i * 60].join(":") }
|
61
64
|
end
|
62
65
|
end
|
63
66
|
end
|
@@ -1,43 +1,47 @@
|
|
1
1
|
require 'fresh_redis'
|
2
2
|
require 'mock_redis'
|
3
|
+
require 'timecop'
|
3
4
|
|
4
5
|
describe FreshRedis do
|
5
6
|
subject{ FreshRedis.new(mock_redis) }
|
6
7
|
let(:mock_redis) { MockRedis.new }
|
7
|
-
let(:now) { Time.new(2012, 9, 27, 15, 40, 56, "+10:00")
|
8
|
-
let(:normalized_now_minute) { Time.new(2012, 9, 27, 15, 40, 0, "+10:00")
|
8
|
+
let(:now) { Time.new(2012, 9, 27, 15, 40, 56, "+10:00") }
|
9
|
+
let(:normalized_now_minute) { Time.new(2012, 9, 27, 15, 40, 0, "+10:00") }
|
10
|
+
|
11
|
+
before(:each) { Timecop.travel(now) }
|
12
|
+
after(:each) { Timecop.return }
|
9
13
|
|
10
14
|
context "string keys" do
|
11
15
|
describe "#fincr" do
|
12
16
|
it "should increment the key for the normalized timestamp" do
|
13
|
-
subject.fincr "foo"
|
14
|
-
subject.fincr "foo"
|
15
|
-
subject.fincr "foo"
|
16
|
-
mock_redis.data["foo:#{normalized_now_minute}"].to_i.should == 2
|
17
|
+
Timecop.freeze(now) { subject.fincr "foo" }
|
18
|
+
Timecop.freeze(now + 3) { subject.fincr "foo" }
|
19
|
+
Timecop.freeze(now + 60) { subject.fincr "foo" } # different normalized key
|
20
|
+
mock_redis.data["foo:#{normalized_now_minute.to_i}"].to_i.should == 2
|
17
21
|
end
|
18
22
|
|
19
23
|
it "should set the freshness as the expiry" do
|
20
24
|
# relying on mock_redis's time handling here - which converts to/from using Time.now Possible flakey temporal breakage potential
|
21
|
-
subject.fincr "foo", :freshness => 3600
|
22
|
-
mock_redis.ttl("foo:#{normalized_now_minute}").should == 3600
|
25
|
+
subject.fincr "foo", :freshness => 3600
|
26
|
+
mock_redis.ttl("foo:#{normalized_now_minute.to_i}").should == 3600
|
23
27
|
end
|
24
28
|
end
|
25
29
|
|
26
30
|
describe "#fsum" do
|
27
31
|
subject{ FreshRedis.new(mock_redis, :granularity => 10, :freshness => 60) }
|
28
32
|
it "should add the values of keys for specified freshness and granularity" do
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
33
|
+
Timecop.freeze(now - 60 - 10) { subject.fincr "foo" }
|
34
|
+
Timecop.freeze(now - 60 + 1) { subject.fincr "foo" }
|
35
|
+
Timecop.freeze(now - 60 + 2) { subject.fincr "foo" }
|
36
|
+
Timecop.freeze(now - 60 + 3) { subject.fincr "foo" }
|
37
|
+
Timecop.freeze(now - 60 + 5) { subject.fincr "foo" }
|
38
|
+
Timecop.freeze(now - 60 + 8) { subject.fincr "foo" }
|
39
|
+
Timecop.freeze(now - 60 + 13) { subject.fincr "foo" }
|
40
|
+
Timecop.freeze(now - 60 + 21) { subject.fincr "foo" }
|
41
|
+
Timecop.freeze(now - 60 + 34) { subject.fincr "foo" }
|
42
|
+
Timecop.freeze(now - 60 + 55) { subject.fincr "foo" }
|
39
43
|
|
40
|
-
subject.fsum("foo"
|
44
|
+
subject.fsum("foo").should == 9
|
41
45
|
end
|
42
46
|
end
|
43
47
|
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: fresh_redis
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.0.
|
4
|
+
version: 0.0.6
|
5
5
|
prerelease:
|
6
6
|
platform: ruby
|
7
7
|
authors:
|
@@ -9,7 +9,7 @@ authors:
|
|
9
9
|
autorequire:
|
10
10
|
bindir: bin
|
11
11
|
cert_chain: []
|
12
|
-
date: 2012-10-
|
12
|
+
date: 2012-10-17 00:00:00.000000000 Z
|
13
13
|
dependencies:
|
14
14
|
- !ruby/object:Gem::Dependency
|
15
15
|
name: redis
|
@@ -91,6 +91,22 @@ dependencies:
|
|
91
91
|
- - '='
|
92
92
|
- !ruby/object:Gem::Version
|
93
93
|
version: 0.9.2.2
|
94
|
+
- !ruby/object:Gem::Dependency
|
95
|
+
name: timecop
|
96
|
+
requirement: !ruby/object:Gem::Requirement
|
97
|
+
none: false
|
98
|
+
requirements:
|
99
|
+
- - '='
|
100
|
+
- !ruby/object:Gem::Version
|
101
|
+
version: 0.5.2
|
102
|
+
type: :development
|
103
|
+
prerelease: false
|
104
|
+
version_requirements: !ruby/object:Gem::Requirement
|
105
|
+
none: false
|
106
|
+
requirements:
|
107
|
+
- - '='
|
108
|
+
- !ruby/object:Gem::Version
|
109
|
+
version: 0.5.2
|
94
110
|
description: Aggregate, expiring, recent data in Redis
|
95
111
|
email:
|
96
112
|
- madlep@madlep.com
|
@@ -130,7 +146,7 @@ required_ruby_version: !ruby/object:Gem::Requirement
|
|
130
146
|
version: '0'
|
131
147
|
segments:
|
132
148
|
- 0
|
133
|
-
hash: -
|
149
|
+
hash: -3713423432962146166
|
134
150
|
required_rubygems_version: !ruby/object:Gem::Requirement
|
135
151
|
none: false
|
136
152
|
requirements:
|
@@ -139,7 +155,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
139
155
|
version: '0'
|
140
156
|
segments:
|
141
157
|
- 0
|
142
|
-
hash: -
|
158
|
+
hash: -3713423432962146166
|
143
159
|
requirements: []
|
144
160
|
rubyforge_project:
|
145
161
|
rubygems_version: 1.8.23
|