cache_fu 0.1.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
data/LICENSE ADDED
@@ -0,0 +1,18 @@
1
+ Copyright (c) 2007 Chris Wanstrath
2
+
3
+ Permission is hereby granted, free of charge, to any person obtaining a copy of
4
+ this software and associated documentation files (the "Software"), to deal in
5
+ the Software without restriction, including without limitation the rights to
6
+ use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
7
+ the Software, and to permit persons to whom the Software is furnished to do so,
8
+ subject to the following conditions:
9
+
10
+ The above copyright notice and this permission notice shall be included in all
11
+ copies or substantial portions of the Software.
12
+
13
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
15
+ FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
16
+ COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
17
+ IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
18
+ CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
data/README ADDED
@@ -0,0 +1,18 @@
1
+ == cache_fu
2
+
3
+ A rewrite of acts_as_cached.
4
+ This version is only compatible with rails 3 and above.
5
+
6
+ == Changes from acts_as_cached 1
7
+
8
+ - You can no longer set a 'ttl' method on a class. Instead,
9
+ pass :ttl to acts_as_cached:
10
+ >> acts_as_cached :ttl => 15.minutes
11
+
12
+ - The is_cached? method is aliased as cached?
13
+
14
+ - set_cache on an instance can take a ttl
15
+ >> @story.set_cache(15.days)
16
+
17
+
18
+ Chris Wanstrath [ chris[at]ozmm[dot]org ]
@@ -0,0 +1,40 @@
1
+ ##
2
+ # Copy this file to vendor/plugins/acts_as_cached/extensions.rb if you
3
+ # wish to extend acts_as_cached with your own instance or class methods.
4
+ #
5
+ # You can, of course, do this directly in your cached classes,
6
+ # but keeping your custom methods here allows you to define
7
+ # methods for all cached objects DRYly.
8
+ module ActsAsCached
9
+ module Extensions
10
+ module ClassMethods
11
+ ##
12
+ # All acts_as_cached classes will be extended with
13
+ # this method.
14
+ #
15
+ # >> Story.multi_get_cache(13, 353, 1231, 505)
16
+ # => [<Story:13>, <Story:353>, ...]
17
+ def multi_get_cache(*ids)
18
+ ids.flatten.map { |id| get_cache(id) }
19
+ end
20
+ end
21
+
22
+ module InstanceMethods
23
+ ##
24
+ # All instances of a acts_as_cached class will be
25
+ # extended with this method.
26
+ #
27
+ # => story = Story.get_cache(1)
28
+ # => <Story:1>
29
+ # >> story.reset_included_caches
30
+ # => true
31
+ def reset_included_caches
32
+ return false unless associations = cache_config[:include]
33
+ associations.each do |association|
34
+ Array(send(association)).each { |item| item.reset_cache }
35
+ end
36
+ true
37
+ end
38
+ end
39
+ end
40
+ end
@@ -0,0 +1,34 @@
1
+ defaults:
2
+ ttl: 1800
3
+ readonly: false
4
+ urlencode: false
5
+ c_threshold: 10000
6
+ compression: true
7
+ debug: false
8
+ namespace: app
9
+ sessions: false
10
+ session_servers: false
11
+ fragments: false
12
+ memory: 64
13
+ servers: localhost:11211
14
+ benchmarking: true
15
+ raise_errors: true
16
+ fast_hash: false
17
+ fastest_hash: false
18
+
19
+ development:
20
+ sessions: false
21
+ fragments: false
22
+ servers: localhost:11211
23
+
24
+ # turn off caching
25
+ test:
26
+ disabled: true
27
+
28
+ production:
29
+ memory: 256
30
+ benchmarking: false
31
+ servers:
32
+ - 192.185.254.121:11211
33
+ - 192.185.254.138:11211
34
+ - 192.185.254.160:11211
@@ -0,0 +1,81 @@
1
+ #!/usr/bin/env ruby
2
+ # By atmos@atmos.org
3
+ # this goes in your script/ directory
4
+ # it parses your memcached.yml file and hooks you up w/ some info
5
+ # it keeps you from having to mess w/ stale memcached daemons for whatever reason.
6
+ require 'yaml'
7
+ require 'timeout'
8
+ require 'erb'
9
+
10
+ class MemcachedCtl
11
+ attr_accessor :memcached, :memory, :pids, :servers, :ip_address, :ethernet_device
12
+
13
+ def initialize
14
+ env = ENV['RAILS_ENV'] || 'development'
15
+ self.memcached = `which memcached`.chomp
16
+ self.servers = [ ]
17
+ self.pids = { }
18
+ self.ethernet_device = ENV['ETH'] || 'eth0'
19
+ self.ip_address = get_ip_address || '0.0.0.0'
20
+ self.memory = '128'
21
+
22
+ config = YAML.load(ERB.new(IO.read((File.expand_path(File.dirname(__FILE__) + "/../config/memcached.yml")))).result)
23
+ self.servers = [ config['defaults']['servers'] ].flatten rescue ['127.0.0.1:11211']
24
+ self.servers = [ config[env]['servers'] ].flatten if config[env]['servers']
25
+ self.servers.reject! { |server| host,port = server.split(/:/); self.ip_address == host }
26
+ self.memory = config[env]['memory'] unless config[env]['memory'].nil?
27
+
28
+ each_server do |host,port|
29
+ `ps auwwx | grep memcached | grep '\\-l #{ip_address} \\-p #{port}' | grep -v grep`.split(/\n/).each do |line|
30
+ self.pids[port] = line.split(/\s+/)[1]
31
+ end
32
+ self.pids[port] ||= 'Down'
33
+ end
34
+ end
35
+
36
+ def execute(cmd)
37
+ send(cmd) rescue usage
38
+ end
39
+
40
+ def restart; stop; sleep 1; start end
41
+
42
+ def status
43
+ each_server { |host,port| puts "Port #{port} -> #{pids[port] =~ /\d+/ ? 'Up' : 'Down'}" }
44
+ end
45
+
46
+ def kill
47
+ each_server { |host,port| `kill -9 #{pids[port]} > /dev/null 2>&1` if pids[port] =~ /\d+/ }
48
+ end
49
+
50
+ def stop; kill end
51
+
52
+ def start
53
+ each_server do |host,port|
54
+ `#{memcached} -d -m #{memory} -l #{ip_address} -p #{port}`
55
+ STDERR.puts "Try memcached_ctl status" unless $? == 0
56
+ end
57
+ end
58
+
59
+ def usage
60
+ methods = %w[start stop restart kill status]
61
+ puts "Usage: script/memcached_ctl [ " + (methods * ' | ') + " ]"
62
+ end
63
+
64
+ protected
65
+ def each_server
66
+ servers.each do |server|
67
+ host, port = server.split(/:/)
68
+ yield host, port
69
+ end
70
+ end
71
+
72
+ def get_ip_address # this works on linux you might have to tweak this on other oses
73
+ line = `/sbin/ifconfig #{ethernet_device} | grep inet | grep -v inet6`.chomp
74
+ if line =~ /\s*inet addr:((\d+\.){3}\d+)\s+.*/
75
+ self.ip_address = $1
76
+ end
77
+ end
78
+ end
79
+ ###########################################################################
80
+
81
+ MemcachedCtl.new.execute(ARGV.first)
@@ -0,0 +1,82 @@
1
+ require 'benchmark'
2
+
3
+ module ActsAsCached
4
+ module Benchmarking #:nodoc:
5
+ def self.cache_runtime
6
+ @@cache_runtime ||= 0.0
7
+ end
8
+
9
+ def self.cache_reset_runtime
10
+ @@cache_runtime = nil
11
+ end
12
+
13
+ def cache_benchmark(title, log_level = Logger::DEBUG, use_silence = true)
14
+ return yield unless logger && logger.level == log_level
15
+ result = nil
16
+
17
+ seconds = Benchmark.realtime {
18
+ result = use_silence ? ActionController::Base.silence { yield } : yield
19
+ }
20
+
21
+ @@cache_runtime ||= 0.0
22
+ @@cache_runtime += seconds
23
+
24
+ logger.add(log_level, "==> #{title} (#{'%.5f' % seconds})")
25
+ result
26
+ end
27
+
28
+ def fetch_cache_with_benchmarking(*args)
29
+ cache_benchmark "Got #{cache_key args.first} from cache." do
30
+ fetch_cache_without_benchmarking(*args)
31
+ end
32
+ end
33
+
34
+ def set_cache_with_benchmarking(*args)
35
+ cache_benchmark "Set #{cache_key args.first} to cache." do
36
+ set_cache_without_benchmarking(*args)
37
+ end
38
+ end
39
+
40
+ def expire_cache_with_benchmarking(*args)
41
+ cache_benchmark "Deleted #{cache_key args.first} from cache." do
42
+ expire_cache_without_benchmarking(*args)
43
+ end
44
+ end
45
+
46
+ def self.add_to(klass)
47
+ return if klass.respond_to? :fetch_cache_with_benchmarking
48
+ klass.extend self
49
+
50
+ class << klass
51
+ alias_method_chain :fetch_cache, :benchmarking
52
+ alias_method_chain :set_cache, :benchmarking
53
+ alias_method_chain :expire_cache, :benchmarking
54
+
55
+ def logger; Rails.logger end unless respond_to? :logger
56
+ end
57
+ end
58
+ end
59
+ end
60
+
61
+ module ActsAsCached
62
+ module MemcacheRuntime
63
+ extend ActiveSupport::Concern
64
+ protected
65
+
66
+ def append_info_to_payload(payload)
67
+ super
68
+ payload[:memcache_runtime] = ActsAsCached::Benchmarking.cache_runtime
69
+ ActsAsCached::Benchmarking.cache_reset_runtime
70
+ end
71
+
72
+ module ClassMethods
73
+ def log_process_action(payload)
74
+ messages, memcache_runtime = super, payload[:memcache_runtime]
75
+ messages << ("Memcache: %.1fms" % memcache_runtime.to_f) if memcache_runtime
76
+ messages
77
+ end
78
+ end
79
+ end
80
+ end
81
+
82
+
@@ -0,0 +1,307 @@
1
+ module ActsAsCached
2
+ module ClassMethods
3
+ @@nil_sentinel = :_nil
4
+
5
+ def cache_config
6
+ config = ActsAsCached::Config.class_config[cache_name] ||= {}
7
+ if name == cache_name
8
+ config
9
+ else
10
+ # sti
11
+ ActsAsCached::Config.class_config[name] ||= config.dup
12
+ end
13
+ end
14
+
15
+ def cache_options
16
+ cache_config[:options] ||= {}
17
+ end
18
+
19
+ def get_cache(*args)
20
+ options = args.last.is_a?(Hash) ? args.pop : {}
21
+ args = args.flatten
22
+
23
+ ##
24
+ # head off to get_caches if we were passed multiple cache_ids
25
+ if args.size > 1
26
+ return get_caches(args, options)
27
+ else
28
+ cache_id = args.first
29
+ end
30
+
31
+ if (item = fetch_cache(cache_id)).nil?
32
+ set_cache(cache_id, block_given? ? yield : fetch_cachable_data(cache_id), options[:ttl])
33
+ else
34
+ @@nil_sentinel == item ? nil : item
35
+ end
36
+ end
37
+
38
+ ##
39
+ # This method accepts an array of cache_ids which it will use to call
40
+ # get_multi on your cache store. Any misses will be fetched and saved to
41
+ # the cache, and a hash keyed by cache_id will ultimately be returned.
42
+ #
43
+ # If your cache store does not support #get_multi an exception will be raised.
44
+ def get_caches(*args)
45
+ raise NoGetMulti unless cache_store.respond_to? :get_multi
46
+
47
+ options = args.last.is_a?(Hash) ? args.pop : {}
48
+ cache_ids = args.flatten.map(&:to_s)
49
+ keys = cache_keys(cache_ids)
50
+
51
+ # Map memcache keys to object cache_ids in { memcache_key => object_id } format
52
+ keys_map = Hash[*keys.zip(cache_ids).flatten]
53
+
54
+ # Call get_multi and figure out which keys were missed based on what was a hit
55
+ hits = ActsAsCached.config[:disabled] ? {} : (cache_store(:get_multi, *keys) || {})
56
+
57
+ # Misses can take the form of key => nil
58
+ hits.delete_if { |key, value| value.nil? }
59
+
60
+ misses = keys - hits.keys
61
+ hits.each { |k, v| hits[k] = nil if v == @@nil_sentinel }
62
+
63
+ # Return our hash if there are no misses
64
+ return hits.values.index_by(&:cache_id) if misses.empty?
65
+
66
+ # Find any missed records
67
+ needed_ids = keys_map.values_at(*misses)
68
+ missed_records = Array(fetch_cachable_data(needed_ids))
69
+
70
+ # Cache the missed records
71
+ missed_records.each { |missed_record| missed_record.set_cache(options[:ttl]) }
72
+
73
+ # Return all records as a hash indexed by object cache_id
74
+ (hits.values + missed_records).index_by(&:cache_id)
75
+ end
76
+
77
+ # simple wrapper for get_caches that
78
+ # returns the items as an ordered array
79
+ def get_caches_as_list(*args)
80
+ cache_ids = args.last.is_a?(Hash) ? args.first : args
81
+ cache_ids = [cache_ids].flatten
82
+ hash = get_caches(*args)
83
+
84
+ cache_ids.map do |key|
85
+ hash[key]
86
+ end
87
+ end
88
+
89
+ def set_cache(cache_id, value, ttl = nil)
90
+ value.tap do |v|
91
+ v = @@nil_sentinel if v.nil?
92
+ cache_store(:set, cache_key(cache_id), v, ttl || cache_config[:ttl] || 1500)
93
+ end
94
+ end
95
+
96
+ def expire_cache(cache_id = nil)
97
+ cache_store(:delete, cache_key(cache_id))
98
+ true
99
+ end
100
+ alias :clear_cache :expire_cache
101
+
102
+ def reset_cache(cache_id = nil)
103
+ set_cache(cache_id, fetch_cachable_data(cache_id))
104
+ end
105
+
106
+ ##
107
+ # Encapsulates the pattern of writing custom cache methods
108
+ # which do nothing but wrap custom finders.
109
+ #
110
+ # => Story.caches(:find_popular)
111
+ #
112
+ # is the same as
113
+ #
114
+ # def self.cached_find_popular
115
+ # get_cache(:find_popular) { find_popular }
116
+ # end
117
+ #
118
+ # The method also accepts both a :ttl and/or a :with key.
119
+ # Obviously the :ttl value controls how long this method will
120
+ # stay cached, while the :with key's value will be passed along
121
+ # to the method. The hash of the :with key will be stored with the key,
122
+ # making two near-identical #caches calls with different :with values utilize
123
+ # different caches.
124
+ #
125
+ # => Story.caches(:find_popular, :with => :today)
126
+ #
127
+ # is the same as
128
+ #
129
+ # def self.cached_find_popular
130
+ # get_cache("find_popular:today") { find_popular(:today) }
131
+ # end
132
+ #
133
+ # If your target method accepts multiple parameters, pass :withs an array.
134
+ #
135
+ # => Story.caches(:find_popular, :withs => [ :one, :two ])
136
+ #
137
+ # is the same as
138
+ #
139
+ # def self.cached_find_popular
140
+ # get_cache("find_popular:onetwo") { find_popular(:one, :two) }
141
+ # end
142
+ def caches(method, options = {})
143
+ if options.keys.include?(:with)
144
+ with = options.delete(:with)
145
+ get_cache("#{method}:#{with}", options) { send(method, with) }
146
+ elsif withs = options.delete(:withs)
147
+ get_cache("#{method}:#{withs}", options) { send(method, *withs) }
148
+ else
149
+ get_cache(method, options) { send(method) }
150
+ end
151
+ end
152
+ alias :cached :caches
153
+
154
+ def cached?(cache_id = nil)
155
+ fetch_cache(cache_id).nil? ? false : true
156
+ end
157
+ alias :is_cached? :cached?
158
+
159
+ def fetch_cache(cache_id)
160
+ return if ActsAsCached.config[:skip_gets]
161
+
162
+ autoload_missing_constants do
163
+ cache_store(:get, cache_key(cache_id))
164
+ end
165
+ end
166
+
167
+ def fetch_cachable_data(cache_id = nil)
168
+ finder = cache_config[:finder] || :find
169
+ return send(finder) unless cache_id
170
+
171
+ args = [cache_id]
172
+ args << cache_options.dup unless cache_options.blank?
173
+ send(finder, *args)
174
+ end
175
+
176
+ def cache_namespace
177
+ cache_store.respond_to?(:namespace) ? cache_store(:namespace) : (CACHE.instance_variable_get('@options') && CACHE.instance_variable_get('@options')[:namespace])
178
+ end
179
+
180
+ # Memcache-client automatically prepends the namespace, plus a colon, onto keys, so we take that into account for the max key length.
181
+ # Rob Sanheim
182
+ def max_key_length
183
+ unless @max_key_length
184
+ key_size = cache_config[:key_size] || 250
185
+ @max_key_length = cache_namespace ? (key_size - cache_namespace.length - 1) : key_size
186
+ end
187
+ @max_key_length
188
+ end
189
+
190
+ def cache_name
191
+ @cache_name ||= respond_to?(:base_class) ? base_class.name : name
192
+ end
193
+
194
+ def cache_keys(*cache_ids)
195
+ cache_ids.flatten.map { |cache_id| cache_key(cache_id) }
196
+ end
197
+
198
+ def cache_key(cache_id)
199
+ [cache_name, cache_config[:version], cache_id].compact.join(':').gsub(' ', '_')[0..(max_key_length - 1)]
200
+ end
201
+
202
+ def cache_store(method = nil, *args)
203
+ return cache_config[:store] unless method
204
+
205
+ load_constants = %w( get get_multi ).include? method.to_s
206
+
207
+ swallow_or_raise_cache_errors(load_constants) do
208
+ cache_config[:store].send(method, *args)
209
+ end
210
+ end
211
+
212
+ def swallow_or_raise_cache_errors(load_constants = false, &block)
213
+ load_constants ? autoload_missing_constants(&block) : yield
214
+ rescue TypeError => error
215
+ if error.to_s.include? 'Proc'
216
+ raise MarshalError, "Most likely an association callback defined with a Proc is triggered, see http://ar.rubyonrails.com/classes/ActiveRecord/Associations/ClassMethods.html (Association Callbacks) for details on converting this to a method based callback"
217
+ else
218
+ raise error
219
+ end
220
+ rescue Exception => error
221
+ if ActsAsCached.config[:raise_errors]
222
+ raise error
223
+ else
224
+ Rails.logger.debug "MemCache Error: #{error.message}" rescue nil
225
+ nil
226
+ end
227
+ end
228
+
229
+ def autoload_missing_constants
230
+ yield
231
+ rescue ArgumentError, MemCache::MemCacheError => error
232
+ lazy_load ||= Hash.new { |hash, hash_key| hash[hash_key] = true; false }
233
+ if error.to_s[/undefined class|referred/] && !lazy_load[error.to_s.split.last.sub(/::$/, '').constantize] then retry
234
+ else raise error end
235
+ end
236
+ end
237
+
238
+ module InstanceMethods
239
+ def self.included(base)
240
+ base.send :delegate, :cache_config, :to => 'self.class'
241
+ base.send :delegate, :cache_options, :to => 'self.class'
242
+ end
243
+
244
+ def get_cache(key = nil, options = {}, &block)
245
+ self.class.get_cache(cache_id(key), options, &block)
246
+ end
247
+
248
+ def set_cache(ttl = nil)
249
+ self.class.set_cache(cache_id, self, ttl)
250
+ end
251
+
252
+ def reset_cache(key = nil)
253
+ self.class.reset_cache(cache_id(key))
254
+ end
255
+
256
+ def expire_cache(key = nil)
257
+ self.class.expire_cache(cache_id(key))
258
+ end
259
+ alias :clear_cache :expire_cache
260
+
261
+ def cached?(key = nil)
262
+ self.class.cached? cache_id(key)
263
+ end
264
+
265
+ def cache_key
266
+ self.class.cache_key(cache_id)
267
+ end
268
+
269
+ def cache_id(key = nil)
270
+ id = send(cache_config[:cache_id] || :id)
271
+ key.nil? ? id : "#{id}:#{key}"
272
+ end
273
+
274
+ def caches(method, options = {})
275
+ key = "#{id}:#{method}"
276
+ if options.keys.include?(:with)
277
+ with = options.delete(:with)
278
+ self.class.get_cache("#{key}:#{with}", options) { send(method, with) }
279
+ elsif withs = options.delete(:withs)
280
+ self.class.get_cache("#{key}:#{withs}", options) { send(method, *withs) }
281
+ else
282
+ self.class.get_cache(key, options) { send(method) }
283
+ end
284
+ end
285
+ alias :cached :caches
286
+
287
+ # Ryan King
288
+ def set_cache_with_associations
289
+ Array(cache_options[:include]).each do |assoc|
290
+ send(assoc).reload
291
+ end if cache_options[:include]
292
+ set_cache
293
+ end
294
+
295
+ # Lourens Naud
296
+ def expire_cache_with_associations(*associations_to_sweep)
297
+ (Array(cache_options[:include]) + associations_to_sweep).flatten.uniq.compact.each do |assoc|
298
+ Array(send(assoc)).compact.each { |item| item.expire_cache if item.respond_to?(:expire_cache) }
299
+ end
300
+ expire_cache
301
+ end
302
+ end
303
+
304
+ class MarshalError < StandardError; end
305
+ class MemCache; end
306
+ class MemCache::MemCacheError < StandardError; end
307
+ end
@@ -0,0 +1,111 @@
1
+ module ActsAsCached
2
+ module Config
3
+ extend self
4
+
5
+ @@class_config = {}
6
+ mattr_reader :class_config
7
+
8
+ def valued_keys
9
+ [ :store, :version, :pages, :per_page, :ttl, :finder, :cache_id, :find_by, :key_size ]
10
+ end
11
+
12
+ def setup(options)
13
+ config = options['defaults']
14
+
15
+ case options[Rails.env]
16
+ when Hash then config.update(options[Rails.env])
17
+ when String then config[:disabled] = true
18
+ end
19
+
20
+ config.symbolize_keys!
21
+
22
+ setup_benchmarking! if config[:benchmarking] && !config[:disabled]
23
+
24
+ setup_cache_store! config
25
+ config
26
+ end
27
+
28
+ def setup_benchmarking!
29
+ ActiveSupport.on_load(:action_controller) do
30
+ include ActsAsCached::MemcacheRuntime
31
+ end
32
+ end
33
+
34
+ def setup_cache_store!(config)
35
+ config[:store] =
36
+ if config[:store].nil?
37
+ setup_memcache config
38
+ elsif config[:store].respond_to? :constantize
39
+ config[:store].constantize.new
40
+ else
41
+ config[:store]
42
+ end
43
+ end
44
+
45
+ def setup_memcache(config)
46
+ config[:namespace] << "-#{Rails.env}"
47
+
48
+ # if someone (e.g., interlock) already set up memcached, then
49
+ # we need to stop here
50
+ return CACHE if Object.const_defined?(:CACHE)
51
+
52
+ silence_warnings do
53
+ Object.const_set :CACHE, memcache_client(config)
54
+ Object.const_set :SESSION_CACHE, memcache_client(config) if config[:session_servers]
55
+ end
56
+
57
+ CACHE.respond_to?(:servers=) ? (CACHE.servers = Array(config.delete(:servers))) : CACHE.instance_variable_set('@servers', Array(config.delete(:servers)))
58
+ CACHE.instance_variable_get('@options')[:namespace] = config[:namespace] if CACHE.instance_variable_get('@options')
59
+
60
+ SESSION_CACHE.servers = Array(config[:session_servers]) if config[:session_servers]
61
+
62
+ setup_session_store if config[:sessions]
63
+ setup_fragment_store! if config[:fragments]
64
+ setup_fast_hash! if config[:fast_hash]
65
+ setup_fastest_hash! if config[:fastest_hash]
66
+
67
+ CACHE
68
+ end
69
+
70
+ def memcache_client(config)
71
+ (config[:client] || "MemCache").classify.constantize.new(config)
72
+ end
73
+
74
+ def setup_session_store
75
+ return # Set up session store like normal in config/application.rb
76
+
77
+ ActionController::Base.session_store = :mem_cache_store
78
+ cache = defined?(SESSION_CACHE) ? SESSION_CACHE : CACHE
79
+ ActionController::Session::AbstractStore::DEFAULT_OPTIONS.update(
80
+ :memcache_server => cache.servers,
81
+ :readonly => cache.readonly?,
82
+ :failover => cache.failover,
83
+ :timeout => cache.timeout,
84
+ :logger => cache.logger,
85
+ :namespace => cache.namespace
86
+ )
87
+ end
88
+
89
+ def setup_fragment_store!
90
+ ActsAsCached::FragmentCache.setup!
91
+ end
92
+
93
+ # break compatiblity with non-ruby memcache clients in exchange for speedup.
94
+ # consistent across all platforms.
95
+ def setup_fast_hash!
96
+ def CACHE.hash_for(key)
97
+ (0...key.length).inject(0) do |sum, i|
98
+ sum + key[i]
99
+ end
100
+ end
101
+ end
102
+
103
+ # break compatiblity with non-ruby memcache clients in exchange for speedup.
104
+ # NOT consistent across all platforms. Object#hash gives different results
105
+ # on different architectures. only use if all your apps are running the
106
+ # same arch.
107
+ def setup_fastest_hash!
108
+ def CACHE.hash_for(key) key.hash end
109
+ end
110
+ end
111
+ end