memcache-client 1.6.3 → 1.6.5
Sign up to get free protection for your applications and to get access to all the features.
- data/History.txt +23 -0
- data/README.rdoc +11 -9
- data/Rakefile +10 -1
- data/lib/memcache.rb +88 -29
- data/test/test_mem_cache.rb +67 -6
- metadata +4 -16
- data/lib/continuum.rb +0 -77
- data/lib/memcache_util.rb +0 -102
data/History.txt
CHANGED
@@ -1,3 +1,26 @@
|
|
1
|
+
= 1.6.5 (2009-02-27)
|
2
|
+
|
3
|
+
* Change memcache-client to multithreaded by default. The mutex does not add significant
|
4
|
+
overhead and it is far too easy, now that Sinatra, Rails and Merb are all thread-safe, to
|
5
|
+
use memcache-client in a thread-unsafe manner. Remove some unnecessary mutexing and add
|
6
|
+
a test to verify heavily multithreaded usage does not act unexpectedly.
|
7
|
+
|
8
|
+
* Add optional support for the SystemTimer gem when running on Ruby 1.8.x. This gem is
|
9
|
+
highly recommended - it ensures timeouts actually work and halves the overhead of using
|
10
|
+
timeouts. Using this gem, Ruby 1.8.x is actually faster in my performance tests
|
11
|
+
than Ruby 1.9.x. Just "gem install SystemTimer" and it should be picked up automatically.
|
12
|
+
|
13
|
+
= 1.6.4 (2009-02-19)
|
14
|
+
|
15
|
+
* Remove native code altogether. The speedup was only 10% on Ruby 1.8.6 and did not work
|
16
|
+
on Ruby 1.9.1.
|
17
|
+
|
18
|
+
* Removed memcache_util.rb from the distribution. If you are using it, please copy the code
|
19
|
+
into your own project. The file will live in the github repository for a few more months
|
20
|
+
for this purposes. http://github.com/mperham/memcache-client/raw/7a276089aa3c914e47e3960f9740ac7377204970/lib/memcache_util.rb
|
21
|
+
|
22
|
+
* Roll continuum.rb into memcache.rb. The project is again a single Ruby file, with no dependencies.
|
23
|
+
|
1
24
|
= 1.6.3 (2009-02-14)
|
2
25
|
|
3
26
|
* Remove gem native extension in preference to RubyInline. This allows the gem to install
|
data/README.rdoc
CHANGED
@@ -16,22 +16,24 @@ Just install the gem:
|
|
16
16
|
|
17
17
|
With one server:
|
18
18
|
|
19
|
-
CACHE = MemCache.new 'localhost:11211'
|
19
|
+
CACHE = MemCache.new 'localhost:11211'
|
20
20
|
|
21
21
|
Or with multiple servers:
|
22
22
|
|
23
|
-
CACHE = MemCache.new %w[one.example.com:11211 two.example.com:11211]
|
24
|
-
|
23
|
+
CACHE = MemCache.new %w[one.example.com:11211 two.example.com:11211]
|
24
|
+
|
25
|
+
|
26
|
+
== Tuning memcache-client
|
27
|
+
|
28
|
+
The MemCache.new method takes a number of options which can be useful at times. Please
|
29
|
+
read the source comments there for an overview.
|
25
30
|
|
26
|
-
See MemCache.new for details. Please note memcache-client is not thread-safe
|
27
|
-
by default. You should create a separate instance for each thread in your
|
28
|
-
process.
|
29
31
|
|
30
32
|
== Using memcache-client with Rails
|
31
33
|
|
32
|
-
|
33
|
-
|
34
|
-
|
34
|
+
Rails 2.1+ includes memcache-client out of the box. See ActiveSupport::Cache::MemCacheStore
|
35
|
+
and the Rails.cache method for more details.
|
36
|
+
|
35
37
|
|
36
38
|
== Questions?
|
37
39
|
|
data/Rakefile
CHANGED
@@ -11,6 +11,15 @@ task :install => [:gem] do
|
|
11
11
|
sh "sudo gem install memcache-client-*.gem"
|
12
12
|
end
|
13
13
|
|
14
|
+
task :clean do
|
15
|
+
sh "rm -f memcache-client-*.gem"
|
16
|
+
end
|
17
|
+
|
18
|
+
task :publish => [:clean, :gem, :install] do
|
19
|
+
require 'lib/memcache'
|
20
|
+
sh "rubyforge add_release seattlerb memcache-client #{MemCache::VERSION} memcache-client-#{MemCache::VERSION}.gem"
|
21
|
+
end
|
22
|
+
|
14
23
|
Rake::RDocTask.new do |rd|
|
15
24
|
rd.main = "README.rdoc"
|
16
25
|
rd.rdoc_files.include("README.rdoc", "lib/**/*.rb")
|
@@ -23,4 +32,4 @@ task :default => :test
|
|
23
32
|
|
24
33
|
task :rcov do
|
25
34
|
`rcov -Ilib test/*.rb`
|
26
|
-
end
|
35
|
+
end
|
data/lib/memcache.rb
CHANGED
@@ -2,11 +2,27 @@ $TESTING = defined?($TESTING) && $TESTING
|
|
2
2
|
|
3
3
|
require 'socket'
|
4
4
|
require 'thread'
|
5
|
-
require 'timeout'
|
6
5
|
require 'zlib'
|
7
6
|
require 'digest/sha1'
|
8
7
|
|
9
|
-
|
8
|
+
begin
|
9
|
+
# Try to use the SystemTimer gem instead of Ruby's timeout library
|
10
|
+
# when running on something that looks like Ruby 1.8.x. See:
|
11
|
+
# http://ph7spot.com/articles/system_timer
|
12
|
+
# We don't want to bother trying to load SystemTimer on jruby and
|
13
|
+
# ruby 1.9+.
|
14
|
+
if !defined?(RUBY_ENGINE)
|
15
|
+
require 'system_timer'
|
16
|
+
MemCacheTimer = SystemTimer
|
17
|
+
else
|
18
|
+
require 'timeout'
|
19
|
+
MemCacheTimer = Timeout
|
20
|
+
end
|
21
|
+
rescue LoadError => e
|
22
|
+
puts "[memcache-client] Could not load SystemTimer gem, falling back to Ruby's slower/unsafe timeout library: #{e.message}"
|
23
|
+
require 'timeout'
|
24
|
+
MemCacheTimer = Timeout
|
25
|
+
end
|
10
26
|
|
11
27
|
##
|
12
28
|
# A Ruby client library for memcached.
|
@@ -17,7 +33,7 @@ class MemCache
|
|
17
33
|
##
|
18
34
|
# The version of MemCache you are using.
|
19
35
|
|
20
|
-
VERSION = '1.6.
|
36
|
+
VERSION = '1.6.5'
|
21
37
|
|
22
38
|
##
|
23
39
|
# Default options for the cache object.
|
@@ -25,7 +41,7 @@ class MemCache
|
|
25
41
|
DEFAULT_OPTIONS = {
|
26
42
|
:namespace => nil,
|
27
43
|
:readonly => false,
|
28
|
-
:multithread =>
|
44
|
+
:multithread => true,
|
29
45
|
:failover => true,
|
30
46
|
:timeout => 0.5,
|
31
47
|
:logger => nil,
|
@@ -57,7 +73,7 @@ class MemCache
|
|
57
73
|
attr_reader :servers
|
58
74
|
|
59
75
|
##
|
60
|
-
# Socket timeout limit with this client, defaults to 0.
|
76
|
+
# Socket timeout limit with this client, defaults to 0.5 sec.
|
61
77
|
# Set to nil to disable timeouts.
|
62
78
|
|
63
79
|
attr_reader :timeout
|
@@ -81,12 +97,14 @@ class MemCache
|
|
81
97
|
#
|
82
98
|
# [:namespace] Prepends this value to all keys added or retrieved.
|
83
99
|
# [:readonly] Raises an exception on cache writes when true.
|
84
|
-
# [:multithread] Wraps cache access in a Mutex for thread safety.
|
100
|
+
# [:multithread] Wraps cache access in a Mutex for thread safety. Defaults to true.
|
85
101
|
# [:failover] Should the client try to failover to another server if the
|
86
102
|
# first server is down? Defaults to true.
|
87
|
-
# [:timeout] Time to use as the socket read timeout. Defaults to 0.
|
88
|
-
# set to nil to disable timeouts (this is a major performance penalty in Ruby 1.8
|
103
|
+
# [:timeout] Time to use as the socket read timeout. Defaults to 0.5 sec,
|
104
|
+
# set to nil to disable timeouts (this is a major performance penalty in Ruby 1.8,
|
105
|
+
# "gem install SystemTimer' to remove most of the penalty).
|
89
106
|
# [:logger] Logger to use for info/debug output, defaults to nil
|
107
|
+
#
|
90
108
|
# Other options are ignored.
|
91
109
|
|
92
110
|
def initialize(*args)
|
@@ -120,6 +138,8 @@ class MemCache
|
|
120
138
|
|
121
139
|
logger.info { "memcache-client #{VERSION} #{Array(servers).inspect}" } if logger
|
122
140
|
|
141
|
+
Thread.current[:memcache_client] = self.object_id if !@multithread
|
142
|
+
|
123
143
|
self.servers = servers
|
124
144
|
end
|
125
145
|
|
@@ -160,9 +180,6 @@ class MemCache
|
|
160
180
|
weight ||= DEFAULT_WEIGHT
|
161
181
|
Server.new self, host, port, weight
|
162
182
|
else
|
163
|
-
if server.multithread != @multithread then
|
164
|
-
raise ArgumentError, "can't mix threaded and non-threaded servers"
|
165
|
-
end
|
166
183
|
server
|
167
184
|
end
|
168
185
|
end
|
@@ -220,6 +237,8 @@ class MemCache
|
|
220
237
|
# cache["a"] = 1
|
221
238
|
# cache["b"] = 2
|
222
239
|
# cache.get_multi "a", "b" # => { "a" => 1, "b" => 2 }
|
240
|
+
#
|
241
|
+
# Note that get_multi assumes the values are marshalled.
|
223
242
|
|
224
243
|
def get_multi(*keys)
|
225
244
|
raise MemCacheError, 'No active servers' unless active?
|
@@ -353,7 +372,6 @@ class MemCache
|
|
353
372
|
raise MemCacheError, "Update of readonly cache" if @readonly
|
354
373
|
|
355
374
|
begin
|
356
|
-
@mutex.lock if @multithread
|
357
375
|
@servers.each do |server|
|
358
376
|
with_socket_management(server) do |socket|
|
359
377
|
socket.write "flush_all\r\n"
|
@@ -364,8 +382,6 @@ class MemCache
|
|
364
382
|
end
|
365
383
|
rescue IndexError => err
|
366
384
|
handle_error nil, err
|
367
|
-
ensure
|
368
|
-
@mutex.unlock if @multithread
|
369
385
|
end
|
370
386
|
end
|
371
387
|
|
@@ -606,6 +622,8 @@ class MemCache
|
|
606
622
|
# failures (but does still apply to unexpectedly lost connections etc.).
|
607
623
|
|
608
624
|
def with_socket_management(server, &block)
|
625
|
+
check_multithread_status!
|
626
|
+
|
609
627
|
@mutex.lock if @multithread
|
610
628
|
retried = false
|
611
629
|
|
@@ -619,7 +637,7 @@ class MemCache
|
|
619
637
|
|
620
638
|
block.call(socket)
|
621
639
|
|
622
|
-
rescue SocketError => err
|
640
|
+
rescue SocketError, Timeout::Error => err
|
623
641
|
logger.warn { "Socket failure: #{err.message}" } if logger
|
624
642
|
server.mark_dead(err)
|
625
643
|
handle_error(server, err)
|
@@ -697,6 +715,18 @@ class MemCache
|
|
697
715
|
((total_servers * Continuum::POINTS_PER_SERVER * server.weight) / Float(total_weight)).floor
|
698
716
|
end
|
699
717
|
|
718
|
+
def check_multithread_status!
|
719
|
+
return if @multithread
|
720
|
+
|
721
|
+
if Thread.current[:memcache_client] != self.object_id
|
722
|
+
raise MemCacheError, <<-EOM
|
723
|
+
You are accessing this memcache-client instance from multiple threads but have not enabled multithread support.
|
724
|
+
Normally: MemCache.new(['localhost:11211'], :multithread => true)
|
725
|
+
In Rails: config.cache_store = [:mem_cache_store, 'localhost:11211', { :multithread => true }]
|
726
|
+
EOM
|
727
|
+
end
|
728
|
+
end
|
729
|
+
|
700
730
|
##
|
701
731
|
# This class represents a memcached server instance.
|
702
732
|
|
@@ -740,7 +770,6 @@ class MemCache
|
|
740
770
|
|
741
771
|
attr_reader :status
|
742
772
|
|
743
|
-
attr_reader :multithread
|
744
773
|
attr_reader :logger
|
745
774
|
|
746
775
|
##
|
@@ -755,9 +784,6 @@ class MemCache
|
|
755
784
|
@port = port.to_i
|
756
785
|
@weight = weight.to_i
|
757
786
|
|
758
|
-
@multithread = memcache.multithread
|
759
|
-
@mutex = Mutex.new
|
760
|
-
|
761
787
|
@sock = nil
|
762
788
|
@retry = nil
|
763
789
|
@status = 'NOT CONNECTED'
|
@@ -787,7 +813,6 @@ class MemCache
|
|
787
813
|
# Returns the connected socket object on success or nil on failure.
|
788
814
|
|
789
815
|
def socket
|
790
|
-
@mutex.lock if @multithread
|
791
816
|
return @sock if @sock and not @sock.closed?
|
792
817
|
|
793
818
|
@sock = nil
|
@@ -810,8 +835,6 @@ class MemCache
|
|
810
835
|
end
|
811
836
|
|
812
837
|
return @sock
|
813
|
-
ensure
|
814
|
-
@mutex.unlock if @multithread
|
815
838
|
end
|
816
839
|
|
817
840
|
##
|
@@ -819,13 +842,10 @@ class MemCache
|
|
819
842
|
# object. The server is not considered dead.
|
820
843
|
|
821
844
|
def close
|
822
|
-
@mutex.lock if @multithread
|
823
845
|
@sock.close if @sock && !@sock.closed?
|
824
846
|
@sock = nil
|
825
847
|
@retry = nil
|
826
848
|
@status = "NOT CONNECTED"
|
827
|
-
ensure
|
828
|
-
@mutex.unlock if @multithread
|
829
849
|
end
|
830
850
|
|
831
851
|
##
|
@@ -854,26 +874,26 @@ end
|
|
854
874
|
class TCPTimeoutSocket
|
855
875
|
|
856
876
|
def initialize(host, port, timeout)
|
857
|
-
|
877
|
+
MemCacheTimer.timeout(MemCache::Server::CONNECT_TIMEOUT) do
|
858
878
|
@sock = TCPSocket.new(host, port)
|
859
879
|
@len = timeout
|
860
880
|
end
|
861
881
|
end
|
862
882
|
|
863
883
|
def write(*args)
|
864
|
-
|
884
|
+
MemCacheTimer.timeout(@len) do
|
865
885
|
@sock.write(*args)
|
866
886
|
end
|
867
887
|
end
|
868
888
|
|
869
889
|
def gets(*args)
|
870
|
-
|
890
|
+
MemCacheTimer.timeout(@len) do
|
871
891
|
@sock.gets(*args)
|
872
892
|
end
|
873
893
|
end
|
874
894
|
|
875
895
|
def read(*args)
|
876
|
-
|
896
|
+
MemCacheTimer.timeout(@len) do
|
877
897
|
@sock.read(*args)
|
878
898
|
end
|
879
899
|
end
|
@@ -894,3 +914,42 @@ class TCPTimeoutSocket
|
|
894
914
|
@sock.close
|
895
915
|
end
|
896
916
|
end
|
917
|
+
|
918
|
+
module Continuum
|
919
|
+
POINTS_PER_SERVER = 160 # this is the default in libmemcached
|
920
|
+
|
921
|
+
# Find the closest index in Continuum with value <= the given value
|
922
|
+
def self.binary_search(ary, value, &block)
|
923
|
+
upper = ary.size - 1
|
924
|
+
lower = 0
|
925
|
+
idx = 0
|
926
|
+
|
927
|
+
while(lower <= upper) do
|
928
|
+
idx = (lower + upper) / 2
|
929
|
+
comp = ary[idx].value <=> value
|
930
|
+
|
931
|
+
if comp == 0
|
932
|
+
return idx
|
933
|
+
elsif comp > 0
|
934
|
+
upper = idx - 1
|
935
|
+
else
|
936
|
+
lower = idx + 1
|
937
|
+
end
|
938
|
+
end
|
939
|
+
return upper
|
940
|
+
end
|
941
|
+
|
942
|
+
class Entry
|
943
|
+
attr_reader :value
|
944
|
+
attr_reader :server
|
945
|
+
|
946
|
+
def initialize(val, srv)
|
947
|
+
@value = val
|
948
|
+
@server = srv
|
949
|
+
end
|
950
|
+
|
951
|
+
def inspect
|
952
|
+
"<#{value}, #{server.host}:#{server.port}>"
|
953
|
+
end
|
954
|
+
end
|
955
|
+
end
|
data/test/test_mem_cache.rb
CHANGED
@@ -10,9 +10,10 @@ rescue LoadError => e
|
|
10
10
|
puts "Some tests require flexmock, please run `gem install flexmock`"
|
11
11
|
end
|
12
12
|
|
13
|
+
Thread.abort_on_exception = true
|
13
14
|
$TESTING = true
|
14
15
|
|
15
|
-
require File.dirname(__FILE__) + '/../lib/memcache'
|
16
|
+
require File.dirname(__FILE__) + '/../lib/memcache' if not defined?(MemCache)
|
16
17
|
|
17
18
|
class MemCache
|
18
19
|
|
@@ -79,7 +80,7 @@ end
|
|
79
80
|
|
80
81
|
class FakeServer
|
81
82
|
|
82
|
-
|
83
|
+
attr_accessor :host, :port, :socket, :weight, :multithread, :status
|
83
84
|
|
84
85
|
def initialize(socket = nil)
|
85
86
|
@closed = false
|
@@ -87,7 +88,7 @@ class FakeServer
|
|
87
88
|
@port = 11211
|
88
89
|
@socket = socket || FakeSocket.new
|
89
90
|
@weight = 1
|
90
|
-
@multithread =
|
91
|
+
@multithread = true
|
91
92
|
@status = "CONNECTED"
|
92
93
|
end
|
93
94
|
|
@@ -117,9 +118,9 @@ class TestMemCache < Test::Unit::TestCase
|
|
117
118
|
|
118
119
|
def test_performance
|
119
120
|
requirement(memcached_running?, 'A real memcached server must be running for performance testing') do
|
120
|
-
host = Socket.gethostname
|
121
121
|
|
122
|
-
cache = MemCache.new(['localhost:11211',"
|
122
|
+
cache = MemCache.new(['localhost:11211',"127.0.0.1:11211"])
|
123
|
+
cache.flush_all
|
123
124
|
cache.add('a', 1, 120)
|
124
125
|
with = xprofile 'get' do
|
125
126
|
1000.times do
|
@@ -129,7 +130,7 @@ class TestMemCache < Test::Unit::TestCase
|
|
129
130
|
puts ''
|
130
131
|
puts "1000 gets with socket timeout: #{with} sec"
|
131
132
|
|
132
|
-
cache = MemCache.new(['localhost:11211',"
|
133
|
+
cache = MemCache.new(['localhost:11211',"127.0.0.1:11211"], :timeout => nil)
|
133
134
|
cache.add('a', 1, 120)
|
134
135
|
without = xprofile 'get' do
|
135
136
|
1000.times do
|
@@ -333,6 +334,34 @@ class TestMemCache < Test::Unit::TestCase
|
|
333
334
|
assert !server.alive?
|
334
335
|
end
|
335
336
|
|
337
|
+
def test_multithread_error
|
338
|
+
server = FakeServer.new
|
339
|
+
server.multithread = false
|
340
|
+
|
341
|
+
@cache = MemCache.new(['localhost:1'], :multithread => false)
|
342
|
+
|
343
|
+
server.socket.data.write "bogus response\r\nbogus response\r\n"
|
344
|
+
server.socket.data.rewind
|
345
|
+
|
346
|
+
@cache.servers = []
|
347
|
+
@cache.servers << server
|
348
|
+
|
349
|
+
assert_nothing_raised do
|
350
|
+
@cache.set 'a', 1
|
351
|
+
end
|
352
|
+
|
353
|
+
passed = true
|
354
|
+
Thread.new do
|
355
|
+
begin
|
356
|
+
@cache.set 'b', 2
|
357
|
+
passed = false
|
358
|
+
rescue MemCache::MemCacheError => me
|
359
|
+
passed = me.message =~ /multiple threads/
|
360
|
+
end
|
361
|
+
end
|
362
|
+
assert passed
|
363
|
+
end
|
364
|
+
|
336
365
|
def test_initialize
|
337
366
|
cache = MemCache.new :namespace => 'my_namespace', :readonly => true
|
338
367
|
|
@@ -946,5 +975,37 @@ class TestMemCache < Test::Unit::TestCase
|
|
946
975
|
return server
|
947
976
|
end
|
948
977
|
|
978
|
+
def test_crazy_multithreaded_access
|
979
|
+
requirement(memcached_running?, 'A real memcached server must be running for performance testing') do
|
980
|
+
|
981
|
+
cache = MemCache.new(['localhost:11211', '127.0.0.1:11211'])
|
982
|
+
cache.flush_all
|
983
|
+
workers = []
|
984
|
+
|
985
|
+
# Have a bunch of threads perform a bunch of operations at the same time.
|
986
|
+
# Verify the result of each operation to ensure the request and response
|
987
|
+
# are not intermingled between threads.
|
988
|
+
10.times do
|
989
|
+
workers << Thread.new do
|
990
|
+
100.times do
|
991
|
+
cache.set('a', 9)
|
992
|
+
cache.set('b', 11)
|
993
|
+
cache.add('c', 10, 0, true)
|
994
|
+
assert_equal "NOT_STORED\r\n", cache.add('a', 11)
|
995
|
+
assert_equal({ 'a' => 9, 'b' => 11 }, cache.get_multi(['a', 'b']))
|
996
|
+
inc = cache.incr('c', 10)
|
997
|
+
assert_equal 0, inc % 5
|
998
|
+
assert inc > 14
|
999
|
+
assert cache.decr('c', 5) > 14
|
1000
|
+
assert_equal 11, cache.get('b')
|
1001
|
+
end
|
1002
|
+
end
|
1003
|
+
end
|
1004
|
+
|
1005
|
+
workers.each { |w| w.join }
|
1006
|
+
cache.flush_all
|
1007
|
+
end
|
1008
|
+
end
|
1009
|
+
|
949
1010
|
end
|
950
1011
|
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: memcache-client
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 1.6.
|
4
|
+
version: 1.6.5
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Eric Hodel
|
@@ -11,20 +11,10 @@ autorequire:
|
|
11
11
|
bindir: bin
|
12
12
|
cert_chain: []
|
13
13
|
|
14
|
-
date: 2009-02-
|
14
|
+
date: 2009-02-27 00:00:00 -06:00
|
15
15
|
default_executable:
|
16
|
-
dependencies:
|
17
|
-
|
18
|
-
name:
|
19
|
-
- RubyInline
|
20
|
-
type: :runtime
|
21
|
-
version_requirement:
|
22
|
-
version_requirements: !ruby/object:Gem::Requirement
|
23
|
-
requirements:
|
24
|
-
- - ">="
|
25
|
-
- !ruby/object:Gem::Version
|
26
|
-
version: "0"
|
27
|
-
version:
|
16
|
+
dependencies: []
|
17
|
+
|
28
18
|
description: A Ruby library for accessing memcached.
|
29
19
|
email: mperham@gmail.com
|
30
20
|
executables: []
|
@@ -38,9 +28,7 @@ files:
|
|
38
28
|
- LICENSE.txt
|
39
29
|
- History.txt
|
40
30
|
- Rakefile
|
41
|
-
- lib/continuum.rb
|
42
31
|
- lib/memcache.rb
|
43
|
-
- lib/memcache_util.rb
|
44
32
|
has_rdoc: false
|
45
33
|
homepage: http://github.com/mperham/memcache-client
|
46
34
|
post_install_message:
|
data/lib/continuum.rb
DELETED
@@ -1,77 +0,0 @@
|
|
1
|
-
module Continuum
|
2
|
-
POINTS_PER_SERVER = 160 # this is the default in libmemcached
|
3
|
-
|
4
|
-
class << self
|
5
|
-
|
6
|
-
begin
|
7
|
-
require 'inline'
|
8
|
-
inline do |builder|
|
9
|
-
builder.c <<-EOM
|
10
|
-
int binary_search(VALUE ary, unsigned int r) {
|
11
|
-
int upper = RARRAY_LEN(ary) - 1;
|
12
|
-
int lower = 0;
|
13
|
-
int idx = 0;
|
14
|
-
ID value = rb_intern("value");
|
15
|
-
|
16
|
-
while (lower <= upper) {
|
17
|
-
idx = (lower + upper) / 2;
|
18
|
-
|
19
|
-
VALUE continuumValue = rb_funcall(RARRAY_PTR(ary)[idx], value, 0);
|
20
|
-
unsigned int l = NUM2UINT(continuumValue);
|
21
|
-
if (l == r) {
|
22
|
-
return idx;
|
23
|
-
}
|
24
|
-
else if (l > r) {
|
25
|
-
upper = idx - 1;
|
26
|
-
}
|
27
|
-
else {
|
28
|
-
lower = idx + 1;
|
29
|
-
}
|
30
|
-
}
|
31
|
-
return upper;
|
32
|
-
}
|
33
|
-
EOM
|
34
|
-
end
|
35
|
-
rescue Exception => e
|
36
|
-
puts "Unable to generate native code, falling back to Ruby: #{e.message}"
|
37
|
-
|
38
|
-
# slow but pure ruby version
|
39
|
-
# Find the closest index in Continuum with value <= the given value
|
40
|
-
def binary_search(ary, value, &block)
|
41
|
-
upper = ary.size - 1
|
42
|
-
lower = 0
|
43
|
-
idx = 0
|
44
|
-
|
45
|
-
while(lower <= upper) do
|
46
|
-
idx = (lower + upper) / 2
|
47
|
-
comp = ary[idx].value <=> value
|
48
|
-
|
49
|
-
if comp == 0
|
50
|
-
return idx
|
51
|
-
elsif comp > 0
|
52
|
-
upper = idx - 1
|
53
|
-
else
|
54
|
-
lower = idx + 1
|
55
|
-
end
|
56
|
-
end
|
57
|
-
return upper
|
58
|
-
end
|
59
|
-
|
60
|
-
end
|
61
|
-
end
|
62
|
-
|
63
|
-
|
64
|
-
class Entry
|
65
|
-
attr_reader :value
|
66
|
-
attr_reader :server
|
67
|
-
|
68
|
-
def initialize(val, srv)
|
69
|
-
@value = val
|
70
|
-
@server = srv
|
71
|
-
end
|
72
|
-
|
73
|
-
def inspect
|
74
|
-
"<#{value}, #{server.host}:#{server.port}>"
|
75
|
-
end
|
76
|
-
end
|
77
|
-
end
|
data/lib/memcache_util.rb
DELETED
@@ -1,102 +0,0 @@
|
|
1
|
-
##
|
2
|
-
# A utility wrapper around the MemCache client to simplify cache access. All
|
3
|
-
# methods silently ignore MemCache errors.
|
4
|
-
|
5
|
-
module Cache
|
6
|
-
|
7
|
-
##
|
8
|
-
# Try to return a logger object that does not rely
|
9
|
-
# on ActiveRecord for logging.
|
10
|
-
def self.logger
|
11
|
-
@logger ||= if defined? Rails.logger # Rails 2.1 +
|
12
|
-
Rails.logger
|
13
|
-
elsif defined? RAILS_DEFAULT_LOGGER # Rails 1.2.2 +
|
14
|
-
RAILS_DEFAULT_LOGGER
|
15
|
-
else
|
16
|
-
ActiveRecord::Base.logger # ... very old Rails.
|
17
|
-
end
|
18
|
-
end
|
19
|
-
##
|
20
|
-
# Returns the object at +key+ from the cache if successful, or nil if either
|
21
|
-
# the object is not in the cache or if there was an error attermpting to
|
22
|
-
# access the cache.
|
23
|
-
#
|
24
|
-
# If there is a cache miss and a block is given the result of the block will
|
25
|
-
# be stored in the cache with optional +expiry+, using the +add+ method rather
|
26
|
-
# than +set+.
|
27
|
-
|
28
|
-
def self.get(key, expiry = 0)
|
29
|
-
start_time = Time.now
|
30
|
-
value = CACHE.get key
|
31
|
-
elapsed = Time.now - start_time
|
32
|
-
logger.debug('MemCache Get (%0.6f) %s' % [elapsed, key])
|
33
|
-
if value.nil? and block_given? then
|
34
|
-
value = yield
|
35
|
-
add key, value, expiry
|
36
|
-
end
|
37
|
-
value
|
38
|
-
rescue MemCache::MemCacheError => err
|
39
|
-
logger.debug "MemCache Error: #{err.message}"
|
40
|
-
if block_given? then
|
41
|
-
value = yield
|
42
|
-
put key, value, expiry
|
43
|
-
end
|
44
|
-
value
|
45
|
-
end
|
46
|
-
|
47
|
-
##
|
48
|
-
# Sets +value+ in the cache at +key+, with an optional +expiry+ time in
|
49
|
-
# seconds.
|
50
|
-
|
51
|
-
def self.put(key, value, expiry = 0)
|
52
|
-
start_time = Time.now
|
53
|
-
CACHE.set key, value, expiry
|
54
|
-
elapsed = Time.now - start_time
|
55
|
-
logger.debug('MemCache Set (%0.6f) %s' % [elapsed, key])
|
56
|
-
value
|
57
|
-
rescue MemCache::MemCacheError => err
|
58
|
-
ActiveRecord::Base.logger.debug "MemCache Error: #{err.message}"
|
59
|
-
nil
|
60
|
-
end
|
61
|
-
|
62
|
-
##
|
63
|
-
# Sets +value+ in the cache at +key+, with an optional +expiry+ time in
|
64
|
-
# seconds. If +key+ already exists in cache, returns nil.
|
65
|
-
|
66
|
-
def self.add(key, value, expiry = 0)
|
67
|
-
start_time = Time.now
|
68
|
-
response = CACHE.add key, value, expiry
|
69
|
-
elapsed = Time.now - start_time
|
70
|
-
logger.debug('MemCache Add (%0.6f) %s' % [elapsed, key])
|
71
|
-
(response == "STORED\r\n") ? value : nil
|
72
|
-
rescue MemCache::MemCacheError => err
|
73
|
-
ActiveRecord::Base.logger.debug "MemCache Error: #{err.message}"
|
74
|
-
nil
|
75
|
-
end
|
76
|
-
|
77
|
-
##
|
78
|
-
# Deletes +key+ from the cache in +delay+ seconds.
|
79
|
-
|
80
|
-
def self.delete(key, delay = nil)
|
81
|
-
start_time = Time.now
|
82
|
-
CACHE.delete key, delay
|
83
|
-
elapsed = Time.now - start_time
|
84
|
-
logger.debug('MemCache Delete (%0.6f) %s' %
|
85
|
-
[elapsed, key])
|
86
|
-
nil
|
87
|
-
rescue MemCache::MemCacheError => err
|
88
|
-
logger.debug "MemCache Error: #{err.message}"
|
89
|
-
nil
|
90
|
-
end
|
91
|
-
|
92
|
-
##
|
93
|
-
# Resets all connections to MemCache servers.
|
94
|
-
|
95
|
-
def self.reset
|
96
|
-
CACHE.reset
|
97
|
-
logger.debug 'MemCache Connections Reset'
|
98
|
-
nil
|
99
|
-
end
|
100
|
-
|
101
|
-
end
|
102
|
-
|