memcache-client 1.6.5 → 1.7.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/{History.txt → History.rdoc} +26 -0
- data/lib/memcache.rb +203 -14
- data/test/test_mem_cache.rb +118 -2
- metadata +3 -3
@@ -1,3 +1,29 @@
|
|
1
|
+
= 1.7.0 (2009-03-08)
|
2
|
+
|
3
|
+
* Go through the memcached protocol document and implement any commands not already implemented:
|
4
|
+
- cas
|
5
|
+
- append
|
6
|
+
- prepend
|
7
|
+
- replace
|
8
|
+
|
9
|
+
Append and prepend only work with raw data since it makes no sense to concatenate two Marshalled
|
10
|
+
values together. The cas functionality should be considered a prototype. Since I don't have an
|
11
|
+
application which uses +cas+, I'm not sure what semantic sugar the API should provide. Should it
|
12
|
+
retry if the value was changed? Should it massage the returned string into true/false? Feedback
|
13
|
+
would be appreciated.
|
14
|
+
|
15
|
+
* Add fetch method which provides a method very similar to ActiveSupport::Cache::Store#fetch,
|
16
|
+
basically a wrapper around get and add. (djanowski)
|
17
|
+
|
18
|
+
* Implement the flush_all delay parameter, to allow a large memcached farm to be flushed gradually.
|
19
|
+
|
20
|
+
* Implement the noreply flag, which tells memcached not to reply in operations which don't
|
21
|
+
need a reply, i.e. set/add/delete/flush_all.
|
22
|
+
|
23
|
+
* The only known functionality not implemented anymore is the <flags> parameter to the storage
|
24
|
+
commands. This would require modification of the API method signatures. If someone can come
|
25
|
+
up with a clean way to implement it, I would be happy to consider including it.
|
26
|
+
|
1
27
|
= 1.6.5 (2009-02-27)
|
2
28
|
|
3
29
|
* Change memcache-client to multithreaded by default. The mutex does not add significant
|
data/lib/memcache.rb
CHANGED
@@ -33,7 +33,7 @@ class MemCache
|
|
33
33
|
##
|
34
34
|
# The version of MemCache you are using.
|
35
35
|
|
36
|
-
VERSION = '1.
|
36
|
+
VERSION = '1.7.0'
|
37
37
|
|
38
38
|
##
|
39
39
|
# Default options for the cache object.
|
@@ -45,6 +45,7 @@ class MemCache
|
|
45
45
|
:failover => true,
|
46
46
|
:timeout => 0.5,
|
47
47
|
:logger => nil,
|
48
|
+
:no_reply => false,
|
48
49
|
}
|
49
50
|
|
50
51
|
##
|
@@ -89,6 +90,12 @@ class MemCache
|
|
89
90
|
|
90
91
|
attr_reader :logger
|
91
92
|
|
93
|
+
##
|
94
|
+
# Don't send or look for a reply from the memcached server for write operations.
|
95
|
+
# Please note this feature only works in memcached 1.2.5 and later. Earlier
|
96
|
+
# versions will reply with "ERROR".
|
97
|
+
attr_reader :no_reply
|
98
|
+
|
92
99
|
##
|
93
100
|
# Accepts a list of +servers+ and a list of +opts+. +servers+ may be
|
94
101
|
# omitted. See +servers=+ for acceptable server list arguments.
|
@@ -104,6 +111,9 @@ class MemCache
|
|
104
111
|
# set to nil to disable timeouts (this is a major performance penalty in Ruby 1.8,
|
105
112
|
# "gem install SystemTimer' to remove most of the penalty).
|
106
113
|
# [:logger] Logger to use for info/debug output, defaults to nil
|
114
|
+
# [:no_reply] Don't bother looking for a reply for write operations (i.e. they
|
115
|
+
# become 'fire and forget'), memcached 1.2.5 and later only, speeds up
|
116
|
+
# set/add/delete/incr/decr significantly.
|
107
117
|
#
|
108
118
|
# Other options are ignored.
|
109
119
|
|
@@ -134,6 +144,7 @@ class MemCache
|
|
134
144
|
@timeout = opts[:timeout]
|
135
145
|
@failover = opts[:failover]
|
136
146
|
@logger = opts[:logger]
|
147
|
+
@no_reply = opts[:no_reply]
|
137
148
|
@mutex = Mutex.new if @multithread
|
138
149
|
|
139
150
|
logger.info { "memcache-client #{VERSION} #{Array(servers).inspect}" } if logger
|
@@ -212,8 +223,8 @@ class MemCache
|
|
212
223
|
|
213
224
|
def get(key, raw = false)
|
214
225
|
with_server(key) do |server, cache_key|
|
226
|
+
logger.debug { "get #{key} from #{server.inspect}: #{value ? value.to_s.size : 'nil'}" } if logger
|
215
227
|
value = cache_get server, cache_key
|
216
|
-
logger.debug { "GET #{key} from #{server.inspect}: #{value ? value.to_s.size : 'nil'}" } if logger
|
217
228
|
return nil if value.nil?
|
218
229
|
value = Marshal.load value unless raw
|
219
230
|
return value
|
@@ -222,6 +233,25 @@ class MemCache
|
|
222
233
|
handle_error nil, err
|
223
234
|
end
|
224
235
|
|
236
|
+
##
|
237
|
+
# Performs a +get+ with the given +key+. If
|
238
|
+
# the value does not exist and a block was given,
|
239
|
+
# the block will be called and the result saved via +add+.
|
240
|
+
#
|
241
|
+
# If you do not provide a block, using this
|
242
|
+
# method is the same as using +get+.
|
243
|
+
#
|
244
|
+
def fetch(key, expiry = 0, raw = false)
|
245
|
+
value = get(key, raw)
|
246
|
+
|
247
|
+
if value.nil? && block_given?
|
248
|
+
value = yield
|
249
|
+
add(key, value, expiry, raw)
|
250
|
+
end
|
251
|
+
|
252
|
+
value
|
253
|
+
end
|
254
|
+
|
225
255
|
##
|
226
256
|
# Retrieves multiple values from memcached in parallel, if possible.
|
227
257
|
#
|
@@ -303,15 +333,62 @@ class MemCache
|
|
303
333
|
with_server(key) do |server, cache_key|
|
304
334
|
|
305
335
|
value = Marshal.dump value unless raw
|
306
|
-
logger.debug { "SET #{key} to #{server.inspect}: #{value ? value.to_s.size : 'nil'}" } if logger
|
307
|
-
|
308
336
|
data = value.to_s
|
337
|
+
logger.debug { "set #{key} to #{server.inspect}: #{data.size}" } if logger
|
338
|
+
|
309
339
|
raise MemCacheError, "Value too large, memcached can only store 1MB of data per key" if data.size > ONE_MB
|
310
340
|
|
311
|
-
command = "set #{cache_key} 0 #{expiry} #{data.size}\r\n#{data}\r\n"
|
341
|
+
command = "set #{cache_key} 0 #{expiry} #{data.size}#{noreply}\r\n#{data}\r\n"
|
312
342
|
|
313
343
|
with_socket_management(server) do |socket|
|
314
344
|
socket.write command
|
345
|
+
break nil if @no_reply
|
346
|
+
result = socket.gets
|
347
|
+
raise_on_error_response! result
|
348
|
+
|
349
|
+
if result.nil?
|
350
|
+
server.close
|
351
|
+
raise MemCacheError, "lost connection to #{server.host}:#{server.port}"
|
352
|
+
end
|
353
|
+
|
354
|
+
result
|
355
|
+
end
|
356
|
+
end
|
357
|
+
end
|
358
|
+
|
359
|
+
##
|
360
|
+
# "cas" is a check and set operation which means "store this data but
|
361
|
+
# only if no one else has updated since I last fetched it." This can
|
362
|
+
# be used as a form of optimistic locking.
|
363
|
+
#
|
364
|
+
# Works in block form like so:
|
365
|
+
# cache.cas('some-key') do |value|
|
366
|
+
# value + 1
|
367
|
+
# end
|
368
|
+
#
|
369
|
+
# Returns:
|
370
|
+
# +nil+ if the value was not found on the memcached server.
|
371
|
+
# +STORED+ if the value was updated successfully
|
372
|
+
# +EXISTS+ if the value was updated by someone else since last fetch
|
373
|
+
|
374
|
+
def cas(key, expiry=0, raw=false)
|
375
|
+
raise MemCacheError, "Update of readonly cache" if @readonly
|
376
|
+
raise MemCacheError, "A block is required" unless block_given?
|
377
|
+
|
378
|
+
(value, token) = gets(key, raw)
|
379
|
+
return nil unless value
|
380
|
+
updated = yield value
|
381
|
+
|
382
|
+
with_server(key) do |server, cache_key|
|
383
|
+
logger.debug { "cas #{key} to #{server.inspect}: #{data.size}" } if logger
|
384
|
+
|
385
|
+
value = Marshal.dump updated unless raw
|
386
|
+
data = value.to_s
|
387
|
+
command = "cas #{cache_key} 0 #{expiry} #{value.size} #{token}#{noreply}\r\n#{value}\r\n"
|
388
|
+
|
389
|
+
with_socket_management(server) do |socket|
|
390
|
+
socket.write command
|
391
|
+
break nil if @no_reply
|
315
392
|
result = socket.gets
|
316
393
|
raise_on_error_response! result
|
317
394
|
|
@@ -331,17 +408,79 @@ class MemCache
|
|
331
408
|
# If +raw+ is true, +value+ will not be Marshalled.
|
332
409
|
#
|
333
410
|
# Readers should call this method in the event of a cache miss, not
|
334
|
-
# MemCache#set
|
411
|
+
# MemCache#set.
|
335
412
|
|
336
413
|
def add(key, value, expiry = 0, raw = false)
|
337
414
|
raise MemCacheError, "Update of readonly cache" if @readonly
|
338
415
|
with_server(key) do |server, cache_key|
|
339
416
|
value = Marshal.dump value unless raw
|
340
|
-
logger.debug { "
|
341
|
-
command = "add #{cache_key} 0 #{expiry} #{value.to_s.size}\r\n#{value}\r\n"
|
417
|
+
logger.debug { "add #{key} to #{server}: #{value ? value.to_s.size : 'nil'}" } if logger
|
418
|
+
command = "add #{cache_key} 0 #{expiry} #{value.to_s.size}#{noreply}\r\n#{value}\r\n"
|
342
419
|
|
343
420
|
with_socket_management(server) do |socket|
|
344
421
|
socket.write command
|
422
|
+
break nil if @no_reply
|
423
|
+
result = socket.gets
|
424
|
+
raise_on_error_response! result
|
425
|
+
result
|
426
|
+
end
|
427
|
+
end
|
428
|
+
end
|
429
|
+
|
430
|
+
##
|
431
|
+
# Add +key+ to the cache with value +value+ that expires in +expiry+
|
432
|
+
# seconds, but only if +key+ already exists in the cache.
|
433
|
+
# If +raw+ is true, +value+ will not be Marshalled.
|
434
|
+
def replace(key, value, expiry = 0, raw = false)
|
435
|
+
raise MemCacheError, "Update of readonly cache" if @readonly
|
436
|
+
with_server(key) do |server, cache_key|
|
437
|
+
value = Marshal.dump value unless raw
|
438
|
+
logger.debug { "replace #{key} to #{server}: #{value ? value.to_s.size : 'nil'}" } if logger
|
439
|
+
command = "replace #{cache_key} 0 #{expiry} #{value.to_s.size}#{noreply}\r\n#{value}\r\n"
|
440
|
+
|
441
|
+
with_socket_management(server) do |socket|
|
442
|
+
socket.write command
|
443
|
+
break nil if @no_reply
|
444
|
+
result = socket.gets
|
445
|
+
raise_on_error_response! result
|
446
|
+
result
|
447
|
+
end
|
448
|
+
end
|
449
|
+
end
|
450
|
+
|
451
|
+
##
|
452
|
+
# Append - 'add this data to an existing key after existing data'
|
453
|
+
# Please note the value is always passed to memcached as raw since it
|
454
|
+
# doesn't make a lot of sense to concatenate marshalled data together.
|
455
|
+
def append(key, value)
|
456
|
+
raise MemCacheError, "Update of readonly cache" if @readonly
|
457
|
+
with_server(key) do |server, cache_key|
|
458
|
+
logger.debug { "append #{key} to #{server}: #{value ? value.to_s.size : 'nil'}" } if logger
|
459
|
+
command = "append #{cache_key} 0 0 #{value.to_s.size}#{noreply}\r\n#{value}\r\n"
|
460
|
+
|
461
|
+
with_socket_management(server) do |socket|
|
462
|
+
socket.write command
|
463
|
+
break nil if @no_reply
|
464
|
+
result = socket.gets
|
465
|
+
raise_on_error_response! result
|
466
|
+
result
|
467
|
+
end
|
468
|
+
end
|
469
|
+
end
|
470
|
+
|
471
|
+
##
|
472
|
+
# Prepend - 'add this data to an existing key before existing data'
|
473
|
+
# Please note the value is always passed to memcached as raw since it
|
474
|
+
# doesn't make a lot of sense to concatenate marshalled data together.
|
475
|
+
def prepend(key, value)
|
476
|
+
raise MemCacheError, "Update of readonly cache" if @readonly
|
477
|
+
with_server(key) do |server, cache_key|
|
478
|
+
logger.debug { "prepend #{key} to #{server}: #{value ? value.to_s.size : 'nil'}" } if logger
|
479
|
+
command = "prepend #{cache_key} 0 0 #{value.to_s.size}#{noreply}\r\n#{value}\r\n"
|
480
|
+
|
481
|
+
with_socket_management(server) do |socket|
|
482
|
+
socket.write command
|
483
|
+
break nil if @no_reply
|
345
484
|
result = socket.gets
|
346
485
|
raise_on_error_response! result
|
347
486
|
result
|
@@ -356,7 +495,9 @@ class MemCache
|
|
356
495
|
raise MemCacheError, "Update of readonly cache" if @readonly
|
357
496
|
with_server(key) do |server, cache_key|
|
358
497
|
with_socket_management(server) do |socket|
|
359
|
-
|
498
|
+
logger.debug { "delete #{cache_key} on #{server}" } if logger
|
499
|
+
socket.write "delete #{cache_key} #{expiry}#{noreply}\r\n"
|
500
|
+
break nil if @no_reply
|
360
501
|
result = socket.gets
|
361
502
|
raise_on_error_response! result
|
362
503
|
result
|
@@ -366,19 +507,29 @@ class MemCache
|
|
366
507
|
|
367
508
|
##
|
368
509
|
# Flush the cache from all memcache servers.
|
369
|
-
|
370
|
-
|
510
|
+
# A non-zero value for +delay+ will ensure that the flush
|
511
|
+
# is propogated slowly through your memcached server farm.
|
512
|
+
# The Nth server will be flushed N*delay seconds from now,
|
513
|
+
# asynchronously so this method returns quickly.
|
514
|
+
# This prevents a huge database spike due to a total
|
515
|
+
# flush all at once.
|
516
|
+
|
517
|
+
def flush_all(delay=0)
|
371
518
|
raise MemCacheError, 'No active servers' unless active?
|
372
519
|
raise MemCacheError, "Update of readonly cache" if @readonly
|
373
520
|
|
374
521
|
begin
|
522
|
+
delay_time = 0
|
375
523
|
@servers.each do |server|
|
376
524
|
with_socket_management(server) do |socket|
|
377
|
-
|
525
|
+
logger.debug { "flush_all #{delay_time} on #{server}" } if logger
|
526
|
+
socket.write "flush_all #{delay_time}#{noreply}\r\n"
|
527
|
+
break nil if @no_reply
|
378
528
|
result = socket.gets
|
379
529
|
raise_on_error_response! result
|
380
530
|
result
|
381
531
|
end
|
532
|
+
delay_time += delay
|
382
533
|
end
|
383
534
|
rescue IndexError => err
|
384
535
|
handle_error nil, err
|
@@ -530,7 +681,8 @@ class MemCache
|
|
530
681
|
|
531
682
|
def cache_decr(server, cache_key, amount)
|
532
683
|
with_socket_management(server) do |socket|
|
533
|
-
socket.write "decr #{cache_key} #{amount}\r\n"
|
684
|
+
socket.write "decr #{cache_key} #{amount}#{noreply}\r\n"
|
685
|
+
break nil if @no_reply
|
534
686
|
text = socket.gets
|
535
687
|
raise_on_error_response! text
|
536
688
|
return nil if text == "NOT_FOUND\r\n"
|
@@ -566,6 +718,38 @@ class MemCache
|
|
566
718
|
end
|
567
719
|
end
|
568
720
|
|
721
|
+
def gets(key, raw = false)
|
722
|
+
with_server(key) do |server, cache_key|
|
723
|
+
logger.debug { "gets #{key} from #{server.inspect}: #{value ? value.to_s.size : 'nil'}" } if logger
|
724
|
+
result = with_socket_management(server) do |socket|
|
725
|
+
socket.write "gets #{cache_key}\r\n"
|
726
|
+
keyline = socket.gets # "VALUE <key> <flags> <bytes> <cas token>\r\n"
|
727
|
+
|
728
|
+
if keyline.nil? then
|
729
|
+
server.close
|
730
|
+
raise MemCacheError, "lost connection to #{server.host}:#{server.port}"
|
731
|
+
end
|
732
|
+
|
733
|
+
raise_on_error_response! keyline
|
734
|
+
return nil if keyline == "END\r\n"
|
735
|
+
|
736
|
+
unless keyline =~ /(\d+) (\w+)\r/ then
|
737
|
+
server.close
|
738
|
+
raise MemCacheError, "unexpected response #{keyline.inspect}"
|
739
|
+
end
|
740
|
+
value = socket.read $1.to_i
|
741
|
+
socket.read 2 # "\r\n"
|
742
|
+
socket.gets # "END\r\n"
|
743
|
+
[value, $2]
|
744
|
+
end
|
745
|
+
result[0] = Marshal.load result[0] unless raw
|
746
|
+
result
|
747
|
+
end
|
748
|
+
rescue TypeError => err
|
749
|
+
handle_error nil, err
|
750
|
+
end
|
751
|
+
|
752
|
+
|
569
753
|
##
|
570
754
|
# Fetches +cache_keys+ from +server+ using a multi-get.
|
571
755
|
|
@@ -599,7 +783,8 @@ class MemCache
|
|
599
783
|
|
600
784
|
def cache_incr(server, cache_key, amount)
|
601
785
|
with_socket_management(server) do |socket|
|
602
|
-
socket.write "incr #{cache_key} #{amount}\r\n"
|
786
|
+
socket.write "incr #{cache_key} #{amount}#{noreply}\r\n"
|
787
|
+
break nil if @no_reply
|
603
788
|
text = socket.gets
|
604
789
|
raise_on_error_response! text
|
605
790
|
return nil if text == "NOT_FOUND\r\n"
|
@@ -679,6 +864,10 @@ class MemCache
|
|
679
864
|
raise new_error
|
680
865
|
end
|
681
866
|
|
867
|
+
def noreply
|
868
|
+
@no_reply ? ' noreply' : ''
|
869
|
+
end
|
870
|
+
|
682
871
|
##
|
683
872
|
# Performs setup for making a request with +key+ from memcached. Returns
|
684
873
|
# the server to fetch the key from and the complete key to use.
|
data/test/test_mem_cache.rb
CHANGED
@@ -478,6 +478,49 @@ class TestMemCache < Test::Unit::TestCase
|
|
478
478
|
assert_equal '0123456789', value
|
479
479
|
end
|
480
480
|
|
481
|
+
def test_fetch_without_a_block
|
482
|
+
server = FakeServer.new
|
483
|
+
server.socket.data.write "END\r\n"
|
484
|
+
server.socket.data.rewind
|
485
|
+
|
486
|
+
@cache.servers = [server]
|
487
|
+
|
488
|
+
flexmock(@cache).should_receive(:get).with('key', false).and_return(nil)
|
489
|
+
|
490
|
+
value = @cache.fetch('key', 1)
|
491
|
+
assert_equal nil, value
|
492
|
+
end
|
493
|
+
|
494
|
+
def test_fetch_miss
|
495
|
+
server = FakeServer.new
|
496
|
+
server.socket.data.write "END\r\n"
|
497
|
+
server.socket.data.rewind
|
498
|
+
|
499
|
+
@cache.servers = [server]
|
500
|
+
|
501
|
+
flexmock(@cache).should_receive(:get).with('key', false).and_return(nil)
|
502
|
+
flexmock(@cache).should_receive(:add).with('key', 'value', 1, false)
|
503
|
+
|
504
|
+
value = @cache.fetch('key', 1) { 'value' }
|
505
|
+
|
506
|
+
assert_equal 'value', value
|
507
|
+
end
|
508
|
+
|
509
|
+
def test_fetch_hit
|
510
|
+
server = FakeServer.new
|
511
|
+
server.socket.data.write "END\r\n"
|
512
|
+
server.socket.data.rewind
|
513
|
+
|
514
|
+
@cache.servers = [server]
|
515
|
+
|
516
|
+
flexmock(@cache).should_receive(:get).with('key', false).and_return('value')
|
517
|
+
flexmock(@cache).should_receive(:add).never
|
518
|
+
|
519
|
+
value = @cache.fetch('key', 1) { raise 'Should not be called.' }
|
520
|
+
|
521
|
+
assert_equal 'value', value
|
522
|
+
end
|
523
|
+
|
481
524
|
def test_get_bad_key
|
482
525
|
util_setup_fake_server
|
483
526
|
assert_raise ArgumentError do @cache.get 'k y' end
|
@@ -752,6 +795,49 @@ class TestMemCache < Test::Unit::TestCase
|
|
752
795
|
assert_match /object too large for cache/, e.message
|
753
796
|
end
|
754
797
|
|
798
|
+
def test_prepend
|
799
|
+
server = FakeServer.new
|
800
|
+
server.socket.data.write "STORED\r\n"
|
801
|
+
server.socket.data.rewind
|
802
|
+
@cache.servers = []
|
803
|
+
@cache.servers << server
|
804
|
+
|
805
|
+
@cache.prepend 'key', 'value'
|
806
|
+
|
807
|
+
dumped = Marshal.dump('value')
|
808
|
+
|
809
|
+
expected = "prepend my_namespace:key 0 0 5\r\nvalue\r\n"
|
810
|
+
assert_equal expected, server.socket.written.string
|
811
|
+
end
|
812
|
+
|
813
|
+
def test_append
|
814
|
+
server = FakeServer.new
|
815
|
+
server.socket.data.write "STORED\r\n"
|
816
|
+
server.socket.data.rewind
|
817
|
+
@cache.servers = []
|
818
|
+
@cache.servers << server
|
819
|
+
|
820
|
+
@cache.append 'key', 'value'
|
821
|
+
|
822
|
+
expected = "append my_namespace:key 0 0 5\r\nvalue\r\n"
|
823
|
+
assert_equal expected, server.socket.written.string
|
824
|
+
end
|
825
|
+
|
826
|
+
def test_replace
|
827
|
+
server = FakeServer.new
|
828
|
+
server.socket.data.write "STORED\r\n"
|
829
|
+
server.socket.data.rewind
|
830
|
+
@cache.servers = []
|
831
|
+
@cache.servers << server
|
832
|
+
|
833
|
+
@cache.replace 'key', 'value', 150
|
834
|
+
|
835
|
+
dumped = Marshal.dump('value')
|
836
|
+
|
837
|
+
expected = "replace my_namespace:key 0 150 #{dumped.length}\r\n#{dumped}\r\n"
|
838
|
+
assert_equal expected, server.socket.written.string
|
839
|
+
end
|
840
|
+
|
755
841
|
def test_add
|
756
842
|
server = FakeServer.new
|
757
843
|
server.socket.data.write "STORED\r\n"
|
@@ -859,12 +945,24 @@ class TestMemCache < Test::Unit::TestCase
|
|
859
945
|
|
860
946
|
@cache.flush_all
|
861
947
|
|
862
|
-
expected = "flush_all\r\n"
|
948
|
+
expected = "flush_all 0\r\n"
|
863
949
|
@cache.servers.each do |server|
|
864
950
|
assert_equal expected, server.socket.written.string
|
865
951
|
end
|
866
952
|
end
|
867
953
|
|
954
|
+
def test_flush_all_with_delay
|
955
|
+
@cache.servers = []
|
956
|
+
3.times { @cache.servers << FakeServer.new }
|
957
|
+
|
958
|
+
@cache.flush_all(10)
|
959
|
+
|
960
|
+
@cache.servers.each_with_index do |server, idx|
|
961
|
+
expected = "flush_all #{idx*10}\r\n"
|
962
|
+
assert_equal expected, server.socket.written.string
|
963
|
+
end
|
964
|
+
end
|
965
|
+
|
868
966
|
def test_flush_all_failure
|
869
967
|
socket = FakeSocket.new
|
870
968
|
|
@@ -881,7 +979,7 @@ class TestMemCache < Test::Unit::TestCase
|
|
881
979
|
@cache.flush_all
|
882
980
|
end
|
883
981
|
|
884
|
-
assert_match /flush_all\r\n/, socket.written.string
|
982
|
+
assert_match /flush_all 0\r\n/, socket.written.string
|
885
983
|
end
|
886
984
|
|
887
985
|
def test_stats
|
@@ -982,6 +1080,12 @@ class TestMemCache < Test::Unit::TestCase
|
|
982
1080
|
cache.flush_all
|
983
1081
|
workers = []
|
984
1082
|
|
1083
|
+
cache.set('f', 'zzz')
|
1084
|
+
assert_equal "STORED\r\n", (cache.cas('f') do |value|
|
1085
|
+
value << 'z'
|
1086
|
+
end)
|
1087
|
+
assert_equal 'zzzz', cache.get('f')
|
1088
|
+
|
985
1089
|
# Have a bunch of threads perform a bunch of operations at the same time.
|
986
1090
|
# Verify the result of each operation to ensure the request and response
|
987
1091
|
# are not intermingled between threads.
|
@@ -991,6 +1095,14 @@ class TestMemCache < Test::Unit::TestCase
|
|
991
1095
|
cache.set('a', 9)
|
992
1096
|
cache.set('b', 11)
|
993
1097
|
cache.add('c', 10, 0, true)
|
1098
|
+
cache.set('d', 'a', 100, true)
|
1099
|
+
cache.set('e', 'x', 100, true)
|
1100
|
+
cache.set('f', 'zzz')
|
1101
|
+
assert_not_nil(cache.cas('f') do |value|
|
1102
|
+
value << 'z'
|
1103
|
+
end)
|
1104
|
+
cache.append('d', 'b')
|
1105
|
+
cache.prepend('e', 'y')
|
994
1106
|
assert_equal "NOT_STORED\r\n", cache.add('a', 11)
|
995
1107
|
assert_equal({ 'a' => 9, 'b' => 11 }, cache.get_multi(['a', 'b']))
|
996
1108
|
inc = cache.incr('c', 10)
|
@@ -998,6 +1110,10 @@ class TestMemCache < Test::Unit::TestCase
|
|
998
1110
|
assert inc > 14
|
999
1111
|
assert cache.decr('c', 5) > 14
|
1000
1112
|
assert_equal 11, cache.get('b')
|
1113
|
+
d = cache.get('d', true)
|
1114
|
+
assert_match /\Aab+\Z/, d
|
1115
|
+
e = cache.get('e', true)
|
1116
|
+
assert_match /\Ay+x\Z/, e
|
1001
1117
|
end
|
1002
1118
|
end
|
1003
1119
|
end
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: memcache-client
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 1.
|
4
|
+
version: 1.7.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Eric Hodel
|
@@ -11,7 +11,7 @@ autorequire:
|
|
11
11
|
bindir: bin
|
12
12
|
cert_chain: []
|
13
13
|
|
14
|
-
date: 2009-
|
14
|
+
date: 2009-03-07 23:00:00 -06:00
|
15
15
|
default_executable:
|
16
16
|
dependencies: []
|
17
17
|
|
@@ -26,7 +26,7 @@ extra_rdoc_files: []
|
|
26
26
|
files:
|
27
27
|
- README.rdoc
|
28
28
|
- LICENSE.txt
|
29
|
-
- History.
|
29
|
+
- History.rdoc
|
30
30
|
- Rakefile
|
31
31
|
- lib/memcache.rb
|
32
32
|
has_rdoc: false
|