xunch 0.0.6 → 0.0.9

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 11095eadd1ac6cf93128811cd1bab2d818e345b1
4
- data.tar.gz: 8e60835af4879fd33dfa4fe7cfc67d009548d5c2
3
+ metadata.gz: 5b731a19b58bfaa3292a7c859ff34428b219d7a5
4
+ data.tar.gz: 7feaf350a70b458efc0fa9d9b9b78c5602ee8d5c
5
5
  SHA512:
6
- metadata.gz: 6b7ca3c02e48930d5d7b551a8393c677493d3ac74ba6dc69fbf54249233ff2804d62f353e93324b62449b255b76635e39d25a8a4ba98dca17ee3a697e9d934fa
7
- data.tar.gz: fed73c21df8039c4622c9ca91328f66aa5cc933f61a2230af0bd5c870804b41a1e502abeee45f875652168bcb04158947c2a2c1a6f734e183351d69f8e1041a3
6
+ metadata.gz: 0e842d7bfa0ee9b0c317cbed99de8e30e6fc338941f1b7a728e69a6e59ded2ad8c912dfdc6144174a0bc0cadb4bcc3488a21ef672d3784479fe0eae514741852
7
+ data.tar.gz: 4a7d6f78ca09f0f10ec83070cb63b5ec1a7e50f931ebf515688e4491c2cb6e344e3e22363da721a1cf47ca5b7570139780c2e3ef73574accc77efe4f371097c8
@@ -6,10 +6,12 @@ module Xunch
6
6
  @shard_redis = ShardRedis.new(@options[:regex],shard_infos)
7
7
  end
8
8
 
9
+ # 从缓存中删除指定的key
9
10
  def evict(key)
10
11
  @shard_redis.del(assembleKey(key))
11
12
  end
12
-
13
+
14
+ # 从缓存中删除指定的一组key
13
15
  def batch_evict(keys)
14
16
  new_keys = []
15
17
  keys.each { |key|
@@ -18,6 +20,12 @@ module Xunch
18
20
  @shard_redis.batch_del(new_keys)
19
21
  end
20
22
 
23
+ # 获取某个key的过期时间,单位秒
24
+ def ttl(key)
25
+ new_key = assembleKey(key)
26
+ @shard_redis.ttl(new_key)
27
+ end
28
+
21
29
  def destroy
22
30
  @shard_redis.destroy
23
31
  end
@@ -49,7 +57,7 @@ module Xunch
49
57
 
50
58
  if(options["cache_class"] != nil)
51
59
  cache_class = eval(options["cache_class"])
52
- elsif options["type"] != CacheType::LISTOBJECT && options["type"] != CacheType::LISTFIELDOBJECT
60
+ elsif options["type"] != CacheType::LISTID && options["type"] != CacheType::LISTOBJECT && options["type"] != CacheType::LISTFIELDOBJECT
53
61
  raise ArgumentError, "cache_class is nil"
54
62
  end
55
63
  use_options.store(:cache_class, cache_class)
@@ -39,6 +39,9 @@ module Xunch
39
39
  when CacheType::FIELDOBJECT
40
40
  cache = Xunch::FieldObjectCache.new(cache_config,shard_infos)
41
41
  caches[cache_config["name"]] = cache
42
+ when CacheType::LISTID
43
+ cache = Xunch::ListIdCache.new(cache_config,shard_infos)
44
+ caches[cache_config["name"]] = cache
42
45
  when CacheType::LISTOBJECT
43
46
  lazy_caches[cache_config] = shard_infos
44
47
  when CacheType::LISTFIELDOBJECT
@@ -16,8 +16,8 @@ module Xunch
16
16
  #
17
17
  def get(key, page, size)
18
18
  raise "key can not be nil." unless key != nil
19
- raise "page must be a positive number." unless page > 0
20
- raise "size must be a positive number and less than 100." unless page != nil or size < 100
19
+ raise "page must be a positive number." unless page != nil and page > 0
20
+ raise "size must be a positive number and less than 100." unless size != nil and size > 0 and size <= 100
21
21
  start = (page - 1) * size;
22
22
  stop = page * size - 1;
23
23
  new_key = assembleKey(key)
@@ -29,6 +29,10 @@ module Xunch
29
29
  end
30
30
 
31
31
  def put(key, values)
32
+ putex(key, values, @options[:expire_time])
33
+ end
34
+
35
+ def putex(key, values, ttl)
32
36
  raise "key can not be nil." unless key != nil
33
37
  raise "values can not be nil." unless values != nil
34
38
  sub_keys = []
@@ -38,15 +42,15 @@ module Xunch
38
42
  }
39
43
  temp_key = assembleTempKey(key)
40
44
  new_key = assembleKey(key)
41
- @delegate.multi_putex(values,@options[:expire_time])
42
- @shard_redis.lset(temp_key,new_key,sub_keys,@options[:expire_time])
45
+ @delegate.multi_putex(values,ttl)
46
+ @shard_redis.lset(temp_key,new_key,sub_keys,ttl)
43
47
  end
44
48
 
45
- def remove(key, sub_key)
49
+ def remove(key, *sub_keys)
46
50
  raise "key can not be nil." unless key != nil
47
- raise "sub_key can not be nil." unless sub_key != nil
51
+ raise "sub_key can not be nil." unless sub_keys != nil
48
52
  new_key = assembleKey(key)
49
- @shard_redis.lremove(new_key,sub_key)
53
+ @shard_redis.lremove(new_key,*sub_keys)
50
54
  end
51
55
 
52
56
  def size(key)
@@ -0,0 +1,61 @@
1
+ module Xunch
2
+ #列表缓存目前不支持并发写入,未来也不打算支持并发
3
+ #主要的使用场景是发现页热门的单线程写入和并发的读取
4
+ #并且提供remove接口,帮助从列表中移除已经不存在的声音,用户,专辑
5
+ class ListIdCache < Cache
6
+
7
+ def initialize(options, shard_infos)
8
+ super(options,shard_infos)
9
+ end
10
+
11
+ # 查询接口
12
+ # @key 列表的key
13
+ # @page 页码
14
+ # @size 页大小
15
+ #
16
+ def get(key, page, size)
17
+ raise "key can not be nil." unless key != nil
18
+ raise "page must be a positive number." unless page != nil and page > 0
19
+ raise "size must be a positive number and less than 100." unless size != nil and size > 0 and size <= 100
20
+ start = (page - 1) * size;
21
+ stop = page * size - 1;
22
+ new_key = assembleKey(key)
23
+ ids = @shard_redis.lrange(new_key,start,stop)
24
+ ids
25
+ end
26
+
27
+ def put(key, ids)
28
+ putex(key, ids, @options[:expire_time])
29
+ end
30
+
31
+ def putex(key, ids, ttl)
32
+ raise "key can not be nil." unless key != nil
33
+ raise "ids can not be nil." unless ids != nil
34
+ sub_keys = []
35
+ ids.each { | id |
36
+ raise "id in ids can not be nil." unless ids != nil
37
+ sub_keys.push(id.to_s)
38
+ }
39
+ if sub_keys.length == 0
40
+ return nil
41
+ end
42
+ temp_key = assembleTempKey(key)
43
+ new_key = assembleKey(key)
44
+ @shard_redis.lset(temp_key,new_key,sub_keys,ttl)
45
+ end
46
+
47
+ def remove(key, *sub_keys)
48
+ raise "key can not be nil." unless key != nil
49
+ raise "sub_key can not be nil." unless sub_keys != nil
50
+ new_key = assembleKey(key)
51
+ @shard_redis.lremove(new_key,*sub_keys)
52
+ end
53
+
54
+ def size(key)
55
+ raise "key can not be nil." unless key != nil
56
+ new_key = assembleKey(key)
57
+ @shard_redis.llen(new_key)
58
+ end
59
+
60
+ end
61
+ end
@@ -16,8 +16,8 @@ module Xunch
16
16
  #
17
17
  def get(key, page, size)
18
18
  raise "key can not be nil." unless key != nil
19
- raise "page must be a positive number." unless page > 0
20
- raise "size must be a positive number and less than 100." unless page != nil or size < 100
19
+ raise "page must be a positive number." unless page != nil and page > 0
20
+ raise "size must be a positive number and less than 100." unless size != nil and size > 0 and size <= 100
21
21
  start = (page - 1) * size;
22
22
  stop = page * size - 1;
23
23
  new_key = assembleKey(key)
@@ -29,6 +29,10 @@ module Xunch
29
29
  end
30
30
 
31
31
  def put(key, values)
32
+ putex(key, values, @options[:expire_time])
33
+ end
34
+
35
+ def putex(key, values, ttl)
32
36
  raise "key can not be nil." unless key != nil
33
37
  raise "values can not be nil." unless values != nil
34
38
  sub_keys = []
@@ -38,15 +42,15 @@ module Xunch
38
42
  }
39
43
  temp_key = assembleTempKey(key)
40
44
  new_key = assembleKey(key)
41
- @delegate.multi_putex(values,@options[:expire_time])
42
- @shard_redis.lset(temp_key,new_key,sub_keys,@options[:expire_time])
45
+ @delegate.multi_putex(values,ttl)
46
+ @shard_redis.lset(temp_key,new_key,sub_keys,ttl)
43
47
  end
44
48
 
45
- def remove(key, sub_key)
49
+ def remove(key, *sub_keys)
46
50
  raise "key can not be nil." unless key != nil
47
- raise "sub_key can not be nil." unless sub_key != nil
51
+ raise "sub_key can not be nil." unless sub_keys != nil
48
52
  new_key = assembleKey(key)
49
- @shard_redis.lremove(new_key,sub_key)
53
+ @shard_redis.lremove(new_key,*sub_keys)
50
54
  end
51
55
 
52
56
  def size(key)
@@ -55,5 +59,9 @@ module Xunch
55
59
  @shard_redis.llen(new_key)
56
60
  end
57
61
 
62
+ def delegate
63
+ @delegate
64
+ end
65
+
58
66
  end
59
67
  end
@@ -104,7 +104,7 @@ module Xunch
104
104
  }
105
105
  end
106
106
  else
107
- redis.mset(values)
107
+ redis.mapped_mset(hash)
108
108
  end
109
109
  end
110
110
  end
@@ -187,9 +187,21 @@ module Xunch
187
187
  end
188
188
  end
189
189
 
190
- def lrem(key,value)
191
- with do | redis |
192
- redis.lrem(key,1,value)
190
+ def lrem(key,*value)
191
+ if(value.length > 3)
192
+ with do | redis |
193
+ redis.pipelined do
194
+ value.each{ |v|
195
+ redis.lrem(key,1,v)
196
+ }
197
+ end
198
+ end
199
+ else
200
+ with do | redis |
201
+ value.each{ |v|
202
+ redis.lrem(key,1,v)
203
+ }
204
+ end
193
205
  end
194
206
  end
195
207
 
@@ -203,7 +215,7 @@ module Xunch
203
215
  }
204
216
  result = redis.rename(temp_key,new_key)
205
217
  if(ttl > 0)
206
- redis.expire(new_key,ttl)
218
+ redis.expire(new_key,ttl)
207
219
  end
208
220
  end
209
221
  end
@@ -46,12 +46,12 @@ module Xunch
46
46
 
47
47
  def expire(key, ttl)
48
48
  redis = get_shard(key)
49
- redis.pexpire(key, ttl)
49
+ redis.expire(key, ttl)
50
50
  end
51
51
 
52
- def get_expire(key)
52
+ def ttl(key)
53
53
  redis = get_shard(key)
54
- redis.pttl(key)
54
+ redis.ttl(key)
55
55
  end
56
56
 
57
57
  def get(key)
@@ -240,12 +240,13 @@ module Xunch
240
240
 
241
241
  def lremove(key,value)
242
242
  redis = get_shard(key)
243
- redis.lrem(key)
243
+ redis.lrem(key,*value)
244
244
  end
245
245
 
246
246
  def lset(temp_key, new_key, sub_keys, ttl)
247
247
  redis = get_shard(new_key)
248
- redis.lset(temp_key,new_key,sub_keys,ttl)
248
+ result = redis.lset(temp_key,new_key,sub_keys,ttl)
249
+ result[result.length - 2]
249
250
  end
250
251
 
251
252
  def lrange(key, start, stop)
@@ -27,7 +27,7 @@ module Xunch
27
27
  rescue SocketError => e
28
28
  raise e
29
29
  end
30
- if respose == nil || respose.code != "200"
30
+ if respose == nil || (respose.code != "200" && respose.code != "404")
31
31
  return false
32
32
  else
33
33
  return true
@@ -4,5 +4,6 @@ module Xunch
4
4
  FIELDOBJECT = 'field_object'
5
5
  LISTOBJECT = 'list_object'
6
6
  LISTFIELDOBJECT = 'list_field_object'
7
+ LISTID = "list_id"
7
8
  end
8
9
  end
data/lib/xunch.rb CHANGED
@@ -22,4 +22,5 @@ require 'xunch/cache/object_cache'
22
22
  require 'xunch/cache/field_object_cache'
23
23
  require 'xunch/cache/list_field_object_cache'
24
24
  require 'xunch/cache/list_object_cache'
25
+ require 'xunch/cache/list_id_cache'
25
26
  require 'xunch/cache/cache_builder'
@@ -11,14 +11,24 @@ class CacheBuilderTest < Test::Unit::TestCase
11
11
  puts "setup"
12
12
  end
13
13
 
14
- def test_build
14
+ def test_build_type
15
+ puts "CacheBuilderTest test_build_type start."
15
16
  root = File.expand_path("../..", __FILE__)
16
17
  file = File.join(root, 'test/xunch.yaml')
17
18
  caches = Xunch::CacheBuilder.build(file)
18
19
  caches.each { |key ,value|
19
- puts key
20
- puts value
20
+ case key
21
+ when "track"
22
+ assert(value.class.name == Xunch::ObjectCache.name,"track cache type is not Xunch::ObjectCache")
23
+ when "trackfield"
24
+ assert(value.class.name == Xunch::FieldObjectCache.name,"track cache type is not Xunch::FieldObjectCache")
25
+ when "tracklist"
26
+ assert(value.class.name == Xunch::ListObjectCache.name,"track cache type is not Xunch::ListObjectCache")
27
+ when "fieldtracklist"
28
+ assert(value.class.name == Xunch::ListFieldObjectCache.name,"track cache type is not Xunch::ListFieldObjectCache")
29
+ end
21
30
  }
31
+ puts "CacheBuilderTest test_build_type stop."
22
32
  end
23
33
 
24
34
  def teardown
@@ -0,0 +1,57 @@
1
+ $:.unshift File.expand_path("../../lib", __FILE__)
2
+ $:.unshift File.expand_path("../../test", __FILE__)
3
+ require "test/unit"
4
+ require 'xunch'
5
+ require 'yaml'
6
+ require 'test_helper'
7
+
8
+ class CacheTest < Test::Unit::TestCase
9
+ include Test::Unit::Assertions
10
+ def setup
11
+ root = File.expand_path("../..", __FILE__)
12
+ file = File.join(root, 'test/xunch.yaml')
13
+ caches = Xunch::CacheBuilder.build(file)
14
+ @object_cache = caches["track"]
15
+ hash = TestHelper.build_objects
16
+ @cache_object = hash["object"]
17
+ @cache_objects = hash["objects"]
18
+ @key = hash["key"]
19
+ puts "setup"
20
+ end
21
+
22
+ def test_evict
23
+ puts "CacheTest test_evict method start"
24
+ @object_cache.putex(@cache_object,3000)
25
+ @object_cache.get(@cache_object.id)
26
+ @object_cache.evict(@cache_object.id)
27
+ object = @object_cache.get(@cache_object.id)
28
+ assert_equal(nil,object)
29
+ puts "CacheTest test_evict method stop"
30
+ end
31
+
32
+ def test_batch_evict
33
+ puts "CacheTest test_batch_evict method start"
34
+ @object_cache.putex(@cache_object,3000)
35
+ @object_cache.get(@cache_object.id)
36
+ @object_cache.evict(@cache_object.id)
37
+ object = @object_cache.get(@cache_object.id)
38
+ assert_equal(nil,object)
39
+ puts "CacheTest test_batch_evict method stop"
40
+ end
41
+
42
+ def test_ttl
43
+ puts "CacheTest test_ttl method start"
44
+ @object_cache.putex(@cache_object,3000)
45
+ ttl = @object_cache.ttl(@cache_object.id)
46
+ @object_cache.evict(@cache_object.id)
47
+ assert_equal(3000,ttl)
48
+ ttl = @object_cache.ttl(@cache_object.id)
49
+ assert_equal(-1,ttl)
50
+ puts "CacheTest test_ttl method stop"
51
+ end
52
+
53
+ def teardown
54
+ super
55
+ puts "CacheTest teardown"
56
+ end
57
+ end
@@ -3,99 +3,26 @@ $:.unshift File.expand_path("../../test", __FILE__)
3
3
  require "test/unit"
4
4
  require "xunch"
5
5
  require 'yaml'
6
- require 'track_record_origin'
7
- require 'bigdecimal'
8
-
6
+ require 'test_helper'
9
7
 
10
8
  class FieldObjectCacheTest < Test::Unit::TestCase
11
9
  include Test::Unit::Assertions
12
10
  def setup
11
+ super
13
12
  root = File.expand_path("../..", __FILE__)
14
13
  file = File.join(root, 'test/xunch.yaml')
15
14
  caches = Xunch::CacheBuilder.build(file)
16
15
  @field_object_cache = caches["trackfield"]
17
16
  @fields = ["createdAt","updatedAt","approvedAt","isCrawler","isPublic","mp3size","longitude","trackId","playPath"]
18
- @cache_object = TrackRecordOrigin.find(1)
19
- @cache_objects = [@cache_object]
20
- @keys = [1]
21
- for i in 2 .. 100 do
22
- new_cache_object = TrackRecordOrigin.new
23
- new_cache_object.track_id = @cache_object.track_id
24
- new_cache_object.track_uid = @cache_object.track_uid
25
- new_cache_object.track_upload_source = @cache_object.track_upload_source
26
- new_cache_object.op_type = @cache_object.op_type
27
- new_cache_object.is_publish = @cache_object.is_publish
28
- new_cache_object.upload_source = @cache_object.upload_source
29
- new_cache_object.uid = @cache_object.uid
30
- new_cache_object.nickname = @cache_object.nickname
31
- new_cache_object.avatar_path = @cache_object.avatar_path
32
- new_cache_object.is_v = @cache_object.is_v
33
- new_cache_object.human_category_id = @cache_object.human_category_id
34
- new_cache_object.title = @cache_object.title
35
- new_cache_object.intro = @cache_object.intro
36
- new_cache_object.user_source = @cache_object.user_source
37
- new_cache_object.category_id = @cache_object.category_id
38
- new_cache_object.duration = @cache_object.duration
39
- new_cache_object.play_path = @cache_object.play_path
40
- new_cache_object.play_path_32 = @cache_object.play_path_32
41
- new_cache_object.play_path_64 = @cache_object.play_path_64
42
- new_cache_object.play_path_128 = @cache_object.play_path_128
43
- new_cache_object.transcode_state = @cache_object.transcode_state
44
- new_cache_object.download_path = @cache_object.download_path
45
- new_cache_object.cover_path = @cache_object.cover_path
46
- new_cache_object.album_id = @cache_object.album_id
47
- new_cache_object.album_title = @cache_object.album_title
48
- new_cache_object.album_cover_path = @cache_object.album_cover_path
49
- new_cache_object.tags = @cache_object.tags
50
- new_cache_object.ignore_tags = @cache_object.ignore_tags
51
- new_cache_object.extra_tags = @cache_object.extra_tags
52
- new_cache_object.singer = @cache_object.singer
53
- new_cache_object.singer_category = @cache_object.singer_category
54
- new_cache_object.author = @cache_object.author
55
- new_cache_object.composer = @cache_object.composer
56
- new_cache_object.arrangement = @cache_object.arrangement
57
- new_cache_object.post_production = @cache_object.post_production
58
- new_cache_object.lyric_path = @cache_object.lyric_path
59
- new_cache_object.lyric = @cache_object.lyric
60
- new_cache_object.language = @cache_object.language
61
- new_cache_object.resinger = @cache_object.resinger
62
- new_cache_object.announcer = @cache_object.announcer
63
- new_cache_object.is_public = @cache_object.is_public
64
- new_cache_object.access_password = @cache_object.access_password
65
- new_cache_object.allow_download = @cache_object.allow_download
66
- new_cache_object.allow_comment = @cache_object.allow_comment
67
- new_cache_object.is_crawler = @cache_object.is_crawler
68
- new_cache_object.inet_aton_ip = @cache_object.inet_aton_ip
69
- new_cache_object.longitude = @cache_object.longitude
70
- new_cache_object.latitude = @cache_object.latitude
71
- new_cache_object.music_category = @cache_object.music_category
72
- new_cache_object.order_num = @cache_object.order_num
73
- new_cache_object.is_pick = @cache_object.is_pick
74
- new_cache_object.rich_intro = @cache_object.rich_intro
75
- new_cache_object.short_intro = @cache_object.short_intro
76
- new_cache_object.comment_content = @cache_object.comment_content
77
- new_cache_object.comment_id = @cache_object.comment_id
78
- new_cache_object.dig_status = @cache_object.dig_status
79
- new_cache_object.approved_at = @cache_object.approved_at
80
- new_cache_object.is_deleted = @cache_object.is_deleted
81
- new_cache_object.mp3size = @cache_object.mp3size
82
- new_cache_object.mp3size_32 = @cache_object.mp3size_32
83
- new_cache_object.mp3size_64 = @cache_object.mp3size_64
84
- new_cache_object.waveform = @cache_object.waveform
85
- new_cache_object.upload_id = @cache_object.upload_id
86
- new_cache_object.updated_at = @cache_object.updated_at
87
- new_cache_object.created_at = @cache_object.created_at
88
- new_cache_object.source_url = @cache_object.source_url
89
- new_cache_object.status = @cache_object.status
90
- new_cache_object.explore_height = @cache_object.explore_height
91
- new_cache_object.id = i
92
- @cache_objects.push new_cache_object
93
- @keys.push new_cache_object.id
94
- end
17
+ hash = TestHelper.build_objects
18
+ @cache_object = hash["object"]
19
+ @cache_objects = hash["objects"]
20
+ @keys = hash["keys"]
95
21
  puts "setup"
96
22
  end
97
23
 
98
24
  def test_get_set
25
+ puts "FieldObjectCacheTest test_get_set method start"
99
26
  @field_object_cache.evict(1)
100
27
  object = @field_object_cache.get(1)
101
28
  assert_equal(nil,object)
@@ -182,11 +109,13 @@ class FieldObjectCacheTest < Test::Unit::TestCase
182
109
  # assert_equal(@cache_object.source_url,object.source_url)
183
110
  # assert_equal(@cache_object.status,object.status)
184
111
  # assert_equal(@cache_object.explore_height,object.explore_height)
112
+ puts "FieldObjectCacheTest test_get_set method stop"
185
113
  end
186
114
 
187
115
  def test_get_set_benchmark
116
+ puts "FieldObjectCacheTest test_get_set_benchmark method start"
188
117
  @field_object_cache.evict(1)
189
- times = 1000
118
+ times = TestHelper::TIMES
190
119
  start = Time.now
191
120
  for i in 1 .. times do
192
121
  @field_object_cache.put(@cache_object)
@@ -200,9 +129,11 @@ class FieldObjectCacheTest < Test::Unit::TestCase
200
129
  end
201
130
  stop = Time.now
202
131
  puts "#{times} times get operation total use #{stop-start} seconds"
132
+ puts "FieldObjectCacheTest test_get_set_benchmark method stop"
203
133
  end
204
134
 
205
135
  def test_mget_mset
136
+ puts "FieldObjectCacheTest test_mget_mset method start"
206
137
  @field_object_cache.batch_evict @keys
207
138
  result = @field_object_cache.multi_put(@cache_objects)
208
139
  for i in 0 .. result.length / 2 - 1 do
@@ -295,12 +226,14 @@ class FieldObjectCacheTest < Test::Unit::TestCase
295
226
  # assert_equal(@cache_objects[i].status,objects[i].status)
296
227
  # assert_equal(@cache_objects[i].explore_height,objects[i].explore_height)
297
228
  end
229
+ puts "FieldObjectCacheTest test_mget_mset method stop"
298
230
  end
299
231
 
300
232
  def test_mget_mset_benchmark
233
+ puts "FieldObjectCacheTest test_mget_mset_benchmark method start"
301
234
  @field_object_cache.batch_evict @keys
302
235
 
303
- times = 1000
236
+ times = TestHelper::TIMES
304
237
  start = Time.now
305
238
  for i in 1 .. times do
306
239
  @field_object_cache.multi_put(@cache_objects)
@@ -314,13 +247,17 @@ class FieldObjectCacheTest < Test::Unit::TestCase
314
247
  end
315
248
  stop = Time.now
316
249
  puts "#{times} times multi_get operation total use #{stop-start} seconds"
250
+ puts "FieldObjectCacheTest test_mget_mset_benchmark method stop"
317
251
  end
318
252
 
319
253
  def test_evict
254
+ puts "FieldObjectCacheTest test_evict method start"
320
255
  @field_object_cache.batch_evict(@keys)
256
+ puts "FieldObjectCacheTest test_evict method stop"
321
257
  end
322
258
 
323
259
  def test_get_set_with_field
260
+ puts "FieldObjectCacheTest test_get_set_with_field method start"
324
261
  # you must convert fields first
325
262
 
326
263
  @field_object_cache.evict(1)
@@ -348,11 +285,13 @@ class FieldObjectCacheTest < Test::Unit::TestCase
348
285
  assert_equal(@cache_object.mp3size,object.mp3size)
349
286
  assert_equal(@cache_object.updated_at,object.updated_at)
350
287
  assert_equal(@cache_object.created_at,object.created_at)
288
+ puts "FieldObjectCacheTest test_get_set_with_field method stop"
351
289
  end
352
290
 
353
291
  def test_get_set_with_field_benchmark
292
+ puts "FieldObjectCacheTest test_get_set_with_field_benchmark method start"
354
293
  @field_object_cache.evict(1)
355
- times = 1000
294
+ times = TestHelper::TIMES
356
295
  start = Time.now
357
296
  for i in 1 .. times do
358
297
  @field_object_cache.put_with_field(@cache_object,@fields)
@@ -366,9 +305,11 @@ class FieldObjectCacheTest < Test::Unit::TestCase
366
305
  end
367
306
  stop = Time.now
368
307
  puts "#{times} times get_with_field operation total use #{stop-start} seconds"
308
+ puts "FieldObjectCacheTest test_get_set_with_field_benchmark method stop"
369
309
  end
370
310
 
371
311
  def test_mget_mset_with_field
312
+ puts "FieldObjectCacheTest test_mget_mset_with_field method start"
372
313
  @field_object_cache.batch_evict @keys
373
314
  result = @field_object_cache.multi_put_with_field(@cache_objects,@fields)
374
315
  for i in 0 .. result.length / 2 - 1 do
@@ -401,11 +342,13 @@ class FieldObjectCacheTest < Test::Unit::TestCase
401
342
  assert_equal(@cache_objects[i].updated_at,objects[i].updated_at)
402
343
  assert_equal(@cache_objects[i].created_at,objects[i].created_at)
403
344
  end
345
+ puts "FieldObjectCacheTest test_mget_mset_with_field method stop"
404
346
  end
405
347
 
406
348
  def test_mget_mset_with_field_benchmark
349
+ puts "FieldObjectCacheTest test_mget_mset_with_field_benchmark method start"
407
350
  @field_object_cache.batch_evict @keys
408
- times = 1000
351
+ times = TestHelper::TIMES
409
352
  start = Time.now
410
353
  for i in 1 .. times do
411
354
  @field_object_cache.multi_put_with_field(@cache_objects,@fields)
@@ -419,6 +362,7 @@ class FieldObjectCacheTest < Test::Unit::TestCase
419
362
  end
420
363
  stop = Time.now
421
364
  puts "#{times} times multi_get_with_field operation total use #{stop-start} seconds"
365
+ puts "FieldObjectCacheTest test_mget_mset_with_field_benchmark method stop"
422
366
  end
423
367
 
424
368
  def teardown