revo-cache_fu 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,305 @@
1
+ module ActsAsCached
2
+ module ClassMethods
3
+ @@nil_sentinel = :_nil
4
+
5
+ def cache_config
6
+ config = ActsAsCached::Config.class_config[cache_name] ||= {}
7
+ if name == cache_name
8
+ config
9
+ else
10
+ # sti
11
+ ActsAsCached::Config.class_config[name] ||= config.dup
12
+ end
13
+ end
14
+
15
+ def cache_options
16
+ cache_config[:options] ||= {}
17
+ end
18
+
19
+ def get_cache(*args)
20
+ options = args.last.is_a?(Hash) ? args.pop : {}
21
+ args = args.flatten
22
+
23
+ ##
24
+ # head off to get_caches if we were passed multiple cache_ids
25
+ if args.size > 1
26
+ return get_caches(args, options)
27
+ else
28
+ cache_id = args.first
29
+ end
30
+
31
+ if (item = fetch_cache(cache_id)).nil?
32
+ set_cache(cache_id, block_given? ? yield : fetch_cachable_data(cache_id), options[:ttl])
33
+ else
34
+ @@nil_sentinel == item ? nil : item
35
+ end
36
+ end
37
+
38
+ ##
39
+ # This method accepts an array of cache_ids which it will use to call
40
+ # get_multi on your cache store. Any misses will be fetched and saved to
41
+ # the cache, and a hash keyed by cache_id will ultimately be returned.
42
+ #
43
+ # If your cache store does not support #get_multi an exception will be raised.
44
+ def get_caches(*args)
45
+ raise NoGetMulti unless cache_store.respond_to? :get_multi
46
+
47
+ options = args.last.is_a?(Hash) ? args.pop : {}
48
+ cache_ids = args.flatten.map(&:to_s)
49
+ keys = cache_keys(cache_ids)
50
+
51
+ # Map memcache keys to object cache_ids in { memcache_key => object_id } format
52
+ keys_map = Hash[*keys.zip(cache_ids).flatten]
53
+
54
+ # Call get_multi and figure out which keys were missed based on what was a hit
55
+ hits = ActsAsCached.config[:disabled] ? {} : (cache_store(:get_multi, *keys) || {})
56
+
57
+ # Misses can take the form of key => nil
58
+ hits.delete_if { |key, value| value.nil? }
59
+
60
+ misses = keys - hits.keys
61
+ hits.each { |k, v| hits[k] = nil if v == @@nil_sentinel }
62
+
63
+ # Return our hash if there are no misses
64
+ return hits.values.index_by(&:cache_id) if misses.empty?
65
+
66
+ # Find any missed records
67
+ needed_ids = keys_map.values_at(*misses)
68
+ missed_records = Array(fetch_cachable_data(needed_ids))
69
+
70
+ # Cache the missed records
71
+ missed_records.each { |missed_record| missed_record.set_cache(options[:ttl]) }
72
+
73
+ # Return all records as a hash indexed by object cache_id
74
+ (hits.values + missed_records).index_by(&:cache_id)
75
+ end
76
+
77
+ # simple wrapper for get_caches that
78
+ # returns the items as an ordered array
79
+ def get_caches_as_list(*args)
80
+ cache_ids = args.last.is_a?(Hash) ? args.first : args
81
+ cache_ids = [cache_ids].flatten
82
+ hash = get_caches(*args)
83
+
84
+ cache_ids.map do |key|
85
+ hash[key]
86
+ end
87
+ end
88
+
89
+ def set_cache(cache_id, value, ttl = nil)
90
+ returning(value) do |v|
91
+ v = @@nil_sentinel if v.nil?
92
+ cache_store(:set, cache_key(cache_id), v, ttl || cache_config[:ttl] || 1500)
93
+ end
94
+ end
95
+
96
+ def expire_cache(cache_id = nil)
97
+ cache_store(:delete, cache_key(cache_id))
98
+ true
99
+ end
100
+ alias :clear_cache :expire_cache
101
+
102
+ def reset_cache(cache_id = nil)
103
+ set_cache(cache_id, fetch_cachable_data(cache_id))
104
+ end
105
+
106
+ ##
107
+ # Encapsulates the pattern of writing custom cache methods
108
+ # which do nothing but wrap custom finders.
109
+ #
110
+ # => Story.caches(:find_popular)
111
+ #
112
+ # is the same as
113
+ #
114
+ # def self.cached_find_popular
115
+ # get_cache(:find_popular) { find_popular }
116
+ # end
117
+ #
118
+ # The method also accepts both a :ttl and/or a :with key.
119
+ # Obviously the :ttl value controls how long this method will
120
+ # stay cached, while the :with key's value will be passed along
121
+ # to the method. The hash of the :with key will be stored with the key,
122
+ # making two near-identical #caches calls with different :with values utilize
123
+ # different caches.
124
+ #
125
+ # => Story.caches(:find_popular, :with => :today)
126
+ #
127
+ # is the same as
128
+ #
129
+ # def self.cached_find_popular
130
+ # get_cache("find_popular:today") { find_popular(:today) }
131
+ # end
132
+ #
133
+ # If your target method accepts multiple parameters, pass :withs an array.
134
+ #
135
+ # => Story.caches(:find_popular, :withs => [ :one, :two ])
136
+ #
137
+ # is the same as
138
+ #
139
+ # def self.cached_find_popular
140
+ # get_cache("find_popular:onetwo") { find_popular(:one, :two) }
141
+ # end
142
+ def caches(method, options = {})
143
+ if options.keys.include?(:with)
144
+ with = options.delete(:with)
145
+ get_cache("#{method}:#{with}", options) { send(method, with) }
146
+ elsif withs = options.delete(:withs)
147
+ get_cache("#{method}:#{withs}", options) { send(method, *withs) }
148
+ else
149
+ get_cache(method, options) { send(method) }
150
+ end
151
+ end
152
+ alias :cached :caches
153
+
154
+ def cached?(cache_id = nil)
155
+ fetch_cache(cache_id).nil? ? false : true
156
+ end
157
+ alias :is_cached? :cached?
158
+
159
+ def fetch_cache(cache_id)
160
+ return if ActsAsCached.config[:skip_gets]
161
+
162
+ autoload_missing_constants do
163
+ cache_store(:get, cache_key(cache_id))
164
+ end
165
+ end
166
+
167
+ def fetch_cachable_data(cache_id = nil)
168
+ finder = cache_config[:finder] || :find
169
+ return send(finder) unless cache_id
170
+
171
+ args = [cache_id]
172
+ args << cache_options.dup unless cache_options.blank?
173
+ send(finder, *args)
174
+ end
175
+
176
+ def cache_namespace
177
+ cache_store(:namespace)
178
+ end
179
+
180
+ # Memcache-client automatically prepends the namespace, plus a colon, onto keys, so we take that into account for the max key length.
181
+ # Rob Sanheim
182
+ def max_key_length
183
+ unless @max_key_length
184
+ key_size = cache_config[:key_size] || 250
185
+ @max_key_length = cache_namespace ? (key_size - cache_namespace.length - 1) : key_size
186
+ end
187
+ @max_key_length
188
+ end
189
+
190
+ def cache_name
191
+ @cache_name ||= respond_to?(:base_class) ? base_class.name : name
192
+ end
193
+
194
+ def cache_keys(*cache_ids)
195
+ cache_ids.flatten.map { |cache_id| cache_key(cache_id) }
196
+ end
197
+
198
+ def cache_key(cache_id)
199
+ [cache_name, cache_config[:version], cache_id].compact.join(':').gsub(' ', '_')[0..(max_key_length - 1)]
200
+ end
201
+
202
+ def cache_store(method = nil, *args)
203
+ return cache_config[:store] unless method
204
+
205
+ load_constants = %w( get get_multi ).include? method.to_s
206
+
207
+ swallow_or_raise_cache_errors(load_constants) do
208
+ cache_config[:store].send(method, *args)
209
+ end
210
+ end
211
+
212
+ def swallow_or_raise_cache_errors(load_constants = false, &block)
213
+ load_constants ? autoload_missing_constants(&block) : yield
214
+ rescue TypeError => error
215
+ if error.to_s.include? 'Proc'
216
+ raise MarshalError, "Most likely an association callback defined with a Proc is triggered, see http://ar.rubyonrails.com/classes/ActiveRecord/Associations/ClassMethods.html (Association Callbacks) for details on converting this to a method based callback"
217
+ else
218
+ raise error
219
+ end
220
+ rescue Exception => error
221
+ if ActsAsCached.config[:raise_errors]
222
+ raise error
223
+ else
224
+ RAILS_DEFAULT_LOGGER.debug "MemCache Error: #{error.message}" rescue nil
225
+ nil
226
+ end
227
+ end
228
+
229
+ def autoload_missing_constants
230
+ yield
231
+ rescue ArgumentError, MemCache::MemCacheError => error
232
+ lazy_load ||= Hash.new { |hash, hash_key| hash[hash_key] = true; false }
233
+ if error.to_s[/undefined class|referred/] && !lazy_load[error.to_s.split.last.constantize] then retry
234
+ else raise error end
235
+ end
236
+ end
237
+
238
+ module InstanceMethods
239
+ def self.included(base)
240
+ base.send :delegate, :cache_config, :to => 'self.class'
241
+ base.send :delegate, :cache_options, :to => 'self.class'
242
+ end
243
+
244
+ def get_cache(key = nil, options = {}, &block)
245
+ self.class.get_cache(cache_id(key), options, &block)
246
+ end
247
+
248
+ def set_cache(ttl = nil)
249
+ self.class.set_cache(cache_id, self, ttl)
250
+ end
251
+
252
+ def reset_cache(key = nil)
253
+ self.class.reset_cache(cache_id(key))
254
+ end
255
+
256
+ def expire_cache(key = nil)
257
+ self.class.expire_cache(cache_id(key))
258
+ end
259
+ alias :clear_cache :expire_cache
260
+
261
+ def cached?(key = nil)
262
+ self.class.cached? cache_id(key)
263
+ end
264
+
265
+ def cache_key
266
+ self.class.cache_key(cache_id)
267
+ end
268
+
269
+ def cache_id(key = nil)
270
+ id = send(cache_config[:cache_id] || :id)
271
+ key.nil? ? id : "#{id}:#{key}"
272
+ end
273
+
274
+ def caches(method, options = {})
275
+ key = "#{id}:#{method}"
276
+ if options.keys.include?(:with)
277
+ with = options.delete(:with)
278
+ self.class.get_cache("#{key}:#{with}", options) { send(method, with) }
279
+ elsif withs = options.delete(:withs)
280
+ self.class.get_cache("#{key}:#{withs}", options) { send(method, *withs) }
281
+ else
282
+ self.class.get_cache(key, options) { send(method) }
283
+ end
284
+ end
285
+ alias :cached :caches
286
+
287
+ # Ryan King
288
+ def set_cache_with_associations
289
+ Array(cache_options[:include]).each do |assoc|
290
+ send(assoc).reload
291
+ end if cache_options[:include]
292
+ set_cache
293
+ end
294
+
295
+ # Lourens Naud
296
+ def expire_cache_with_associations(*associations_to_sweep)
297
+ (Array(cache_options[:include]) + associations_to_sweep).flatten.uniq.compact.each do |assoc|
298
+ Array(send(assoc)).compact.each { |item| item.expire_cache if item.respond_to?(:expire_cache) }
299
+ end
300
+ expire_cache
301
+ end
302
+ end
303
+
304
+ class MarshalError < StandardError; end
305
+ end
@@ -0,0 +1,97 @@
1
+ module ActsAsCached
2
+ module Config
3
+ extend self
4
+
5
+ @@class_config = {}
6
+ mattr_reader :class_config
7
+
8
+ def valued_keys
9
+ [ :store, :version, :pages, :per_page, :ttl, :finder, :cache_id, :find_by, :key_size ]
10
+ end
11
+
12
+ def setup(options)
13
+ config = options['defaults']
14
+
15
+ case options[RAILS_ENV]
16
+ when Hash then config.update(options[RAILS_ENV])
17
+ when String then config[:disabled] = true
18
+ end
19
+
20
+ config.symbolize_keys!
21
+
22
+ setup_benchmarking! if config[:benchmarking] && !config[:disabled]
23
+
24
+ setup_cache_store! config
25
+ config
26
+ end
27
+
28
+ def setup_benchmarking!
29
+ Benchmarking.inject_into_logs!
30
+ end
31
+
32
+ def setup_cache_store!(config)
33
+ config[:store] =
34
+ if config[:store].nil?
35
+ setup_memcache config
36
+ elsif config[:store].respond_to? :constantize
37
+ config[:store].constantize.new
38
+ else
39
+ config[:store]
40
+ end
41
+ end
42
+
43
+ def setup_memcache(config)
44
+ config[:namespace] << "-#{RAILS_ENV}"
45
+
46
+ # if someone (e.g., interlock) already set up memcached, then
47
+ # we need to stop here
48
+ return CACHE if Object.const_defined?(:CACHE)
49
+
50
+ silence_warnings do
51
+ Object.const_set :CACHE, memcache_client(config)
52
+ Object.const_set :SESSION_CACHE, memcache_client(config) if config[:session_servers]
53
+ end
54
+
55
+ CACHE.servers = Array(config.delete(:servers))
56
+ SESSION_CACHE.servers = Array(config[:session_servers]) if config[:session_servers]
57
+
58
+ setup_session_store if config[:sessions]
59
+ setup_fragment_store! if config[:fragments]
60
+ setup_fast_hash! if config[:fast_hash]
61
+ setup_fastest_hash! if config[:fastest_hash]
62
+
63
+ CACHE
64
+ end
65
+
66
+ def memcache_client(config)
67
+ (config[:client] || "MemCache").classify.constantize.new(config)
68
+ end
69
+
70
+ def setup_session_store
71
+ ActionController::Base.session_store = :mem_cache_store
72
+ ActionController::CgiRequest::DEFAULT_SESSION_OPTIONS.update 'cache' => defined?(SESSION_CACHE) ? SESSION_CACHE : CACHE
73
+ end
74
+
75
+ def setup_fragment_store!
76
+ ActsAsCached::FragmentCache.setup!
77
+ end
78
+
79
+ # break compatiblity with non-ruby memcache clients in exchange for speedup.
80
+ # consistent across all platforms.
81
+ def setup_fast_hash!
82
+ def CACHE.hash_for(key)
83
+ (0...key.length).inject(0) do |sum, i|
84
+ sum + key[i]
85
+ end
86
+ end
87
+ end
88
+
89
+ # break compatiblity with non-ruby memcache clients in exchange for speedup.
90
+ # NOT consistent across all platforms. Object#hash gives different results
91
+ # on different architectures. only use if all your apps are running the
92
+ # same arch.
93
+ def setup_fastest_hash!
94
+ def CACHE.hash_for(key) key.hash end
95
+ end
96
+ end
97
+ end
@@ -0,0 +1,30 @@
1
+ module ActsAsCached
2
+ module Disabled
3
+ def fetch_cache_with_disabled(*args)
4
+ nil
5
+ end
6
+
7
+ def set_cache_with_disabled(*args)
8
+ args[1]
9
+ end
10
+
11
+ def expire_cache_with_disabled(*args)
12
+ true
13
+ end
14
+
15
+ def self.add_to(klass)
16
+ return if klass.respond_to? :fetch_cache_with_disabled
17
+ klass.extend self
18
+
19
+ class << klass
20
+ alias_method_chain :fetch_cache, :disabled
21
+ alias_method_chain :set_cache, :disabled
22
+ alias_method_chain :expire_cache, :disabled
23
+ end
24
+
25
+ class << CACHE
26
+ include FragmentCache::DisabledExtensions
27
+ end if ActsAsCached.config[:fragments] && defined?(FragmentCache::DisabledExtensions)
28
+ end
29
+ end
30
+ end
@@ -0,0 +1,124 @@
1
+ module ActsAsCached
2
+ module FragmentCache
3
+ def self.setup!
4
+ class << CACHE
5
+ include Extensions
6
+ end
7
+
8
+ setup_fragment_cache_cache
9
+ setup_rails_for_memcache_fragments
10
+ setup_rails_for_action_cache_options
11
+ end
12
+
13
+ # add :ttl option to cache helper and set cache store memcache object
14
+ def self.setup_rails_for_memcache_fragments
15
+ if ::ActionView.const_defined?(:Template)
16
+ # Rails 2.1+
17
+ ::ActionController::Base.cache_store = CACHE
18
+ else
19
+ # Rails < svn r8619
20
+ ::ActionView::Helpers::CacheHelper.class_eval do
21
+ def cache(name = {}, options = nil, &block)
22
+ @controller.cache_erb_fragment(block, name, options)
23
+ end
24
+ end
25
+ ::ActionController::Base.fragment_cache_store = CACHE
26
+ end
27
+ end
28
+
29
+ def self.setup_fragment_cache_cache
30
+ Object.const_set(:FragmentCacheCache, Class.new { acts_as_cached :store => CACHE })
31
+ end
32
+
33
+ # add :ttl option to caches_action on the per action level by passing in a hash instead of an array
34
+ #
35
+ # Examples:
36
+ # caches_action :index # will use the default ttl from your memcache.yml, or 25 minutes
37
+ # caches_action :index => { :ttl => 5.minutes } # cache index action with 5 minute ttl
38
+ # caches_action :page, :feed, :index => { :ttl => 2.hours } # cache index action with 2 hours ttl, all others use default
39
+ #
40
+ def self.setup_rails_for_action_cache_options
41
+ ::ActionController::Caching::Actions::ActionCacheFilter.class_eval do
42
+ # convert all actions into a hash keyed by action named, with a value of a ttl hash (to match other cache APIs)
43
+ def initialize(*actions, &block)
44
+ if [].respond_to?(:extract_options!)
45
+ #edge
46
+ @options = actions.extract_options!
47
+ @actions = actions.inject(@options.except(:cache_path)) do |hsh, action|
48
+ action.is_a?(Hash) ? hsh.merge(action) : hsh.merge(action => { :ttl => nil })
49
+ end
50
+ @options.slice!(:cache_path)
51
+ else
52
+ #1.2.5
53
+ @actions = actions.inject({}) do |hsh, action|
54
+ action.is_a?(Hash) ? hsh.merge(action) : hsh.merge(action => { :ttl => nil })
55
+ end
56
+ end
57
+ end
58
+
59
+ # override to skip caching/rendering on evaluated if option
60
+ def before(controller)
61
+ return unless @actions.include?(controller.action_name.intern)
62
+
63
+ # maintaining edge and 1.2.x compatibility with this branch
64
+ if @options
65
+ action_cache_path = ActionController::Caching::Actions::ActionCachePath.new(controller, path_options_for(controller, @options))
66
+ else
67
+ action_cache_path = ActionController::Caching::Actions::ActionCachePath.new(controller)
68
+ end
69
+
70
+ # should probably be like ActiveRecord::Validations.evaluate_condition. color me lazy.
71
+ if conditional = @actions[controller.action_name.intern][:if]
72
+ conditional = conditional.respond_to?(:call) ? conditional.call(controller) : controller.send(conditional)
73
+ end
74
+ @actions.delete(controller.action_name.intern) if conditional == false
75
+
76
+ cache = controller.read_fragment(action_cache_path.path)
77
+ if cache && (conditional || conditional.nil?)
78
+ controller.rendered_action_cache = true
79
+ if method(:set_content_type!).arity == 2
80
+ set_content_type!(controller, action_cache_path.extension)
81
+ else
82
+ set_content_type!(action_cache_path)
83
+ end
84
+ controller.send(:render, :text => cache)
85
+ false
86
+ else
87
+ # 1.2.x compatibility
88
+ controller.action_cache_path = action_cache_path if controller.respond_to? :action_cache_path
89
+ end
90
+ end
91
+
92
+ # override to pass along the ttl hash
93
+ def after(controller)
94
+ return if !@actions.include?(controller.action_name.intern) || controller.rendered_action_cache
95
+ # 1.2.x compatibility
96
+ path = controller.respond_to?(:action_cache_path) ? controller.action_cache_path.path : ActionController::Caching::Actions::ActionCachePath.path_for(controller)
97
+ controller.write_fragment(path, controller.response.body, action_ttl(controller))
98
+ end
99
+
100
+ private
101
+ def action_ttl(controller)
102
+ @actions[controller.action_name.intern]
103
+ end
104
+ end
105
+ end
106
+
107
+ module Extensions
108
+ def read(*args)
109
+ return if ActsAsCached.config[:skip_gets]
110
+ FragmentCacheCache.cache_store(:get, args.first)
111
+ end
112
+
113
+ def write(name, content, options = {})
114
+ ttl = (options.is_a?(Hash) ? options[:ttl] : nil) || ActsAsCached.config[:ttl] || 25.minutes
115
+ FragmentCacheCache.cache_store(:set, name, content, ttl)
116
+ end
117
+ end
118
+
119
+ module DisabledExtensions
120
+ def read(*args) nil end
121
+ def write(*args) "" end
122
+ end
123
+ end
124
+ end
@@ -0,0 +1,44 @@
1
+ module ActsAsCached
2
+ module LocalCache
3
+ @@local_cache = {}
4
+ mattr_accessor :local_cache
5
+
6
+ def fetch_cache_with_local_cache(*args)
7
+ @@local_cache[cache_key(args.first)] ||= fetch_cache_without_local_cache(*args)
8
+ end
9
+
10
+ def set_cache_with_local_cache(*args)
11
+ @@local_cache[cache_key(args.first)] = set_cache_without_local_cache(*args)
12
+ end
13
+
14
+ def expire_cache_with_local_cache(*args)
15
+ @@local_cache.delete(cache_key(args.first))
16
+ expire_cache_without_local_cache(*args)
17
+ end
18
+ alias :clear_cache_with_local_cache :expire_cache_with_local_cache
19
+
20
+ def cached_with_local_cache?(*args)
21
+ !!@@local_cache[cache_key(args.first)] || cached_without_local_cache?(*args)
22
+ end
23
+
24
+ def self.add_to(klass)
25
+ return if klass.ancestors.include? self
26
+ klass.send :include, self
27
+
28
+ klass.class_eval do
29
+ %w( fetch_cache set_cache expire_cache clear_cache cached? ).each do |target|
30
+ alias_method_chain target, :local_cache
31
+ end
32
+ end
33
+ end
34
+ end
35
+ end
36
+
37
+ module ActionController
38
+ class Base
39
+ def local_cache_for_request
40
+ ActsAsCached::LocalCache.add_to ActsAsCached::ClassMethods
41
+ ActsAsCached::LocalCache.local_cache = {}
42
+ end
43
+ end
44
+ end
@@ -0,0 +1,17 @@
1
+ class Memcached
2
+ # A legacy compatibility wrapper for the Memcached class. It has basic compatibility with the <b>memcache-client</b> API.
3
+ class Rails < ::Memcached
4
+ def initialize(config)
5
+ super(config.delete(:servers), config.slice(DEFAULTS.keys))
6
+ end
7
+
8
+ def servers=(servers)
9
+
10
+ end
11
+
12
+ def delete(key, expiry = 0)
13
+ super(key)
14
+ rescue NotFound
15
+ end
16
+ end
17
+ end
@@ -0,0 +1,8 @@
1
+ Capistrano.configuration(:must_exist).load do
2
+ %w(start stop restart kill status).each do |cmd|
3
+ desc "#{cmd} your memcached servers"
4
+ task "memcached_#{cmd}".to_sym, :roles => :app do
5
+ run "RAILS_ENV=production #{ruby} #{current_path}/script/memcached_ctl #{cmd}"
6
+ end
7
+ end
8
+ end
@@ -0,0 +1,51 @@
1
+ require 'acts_as_cached/config'
2
+ require 'acts_as_cached/cache_methods'
3
+ require 'acts_as_cached/fragment_cache'
4
+ require 'acts_as_cached/benchmarking'
5
+ require 'acts_as_cached/disabled'
6
+ require 'acts_as_cached/local_cache'
7
+ require 'acts_as_cached/memcached_rails'
8
+
9
+ module ActsAsCached
10
+ @@config = {}
11
+ mattr_reader :config
12
+
13
+ def self.config=(options)
14
+ @@config = Config.setup options
15
+ end
16
+
17
+ def self.skip_cache_gets=(boolean)
18
+ ActsAsCached.config[:skip_gets] = boolean
19
+ end
20
+
21
+ module Mixin
22
+ def acts_as_cached(options = {})
23
+ extend ClassMethods
24
+ include InstanceMethods
25
+
26
+ extend Extensions::ClassMethods if defined? Extensions::ClassMethods
27
+ include Extensions::InstanceMethods if defined? Extensions::InstanceMethods
28
+
29
+ options.symbolize_keys!
30
+
31
+ options[:store] ||= ActsAsCached.config[:store]
32
+ options[:ttl] ||= ActsAsCached.config[:ttl]
33
+
34
+ # convert the find_by shorthand
35
+ if find_by = options.delete(:find_by)
36
+ options[:finder] = "find_by_#{find_by}".to_sym
37
+ options[:cache_id] = find_by
38
+ end
39
+
40
+ cache_config.replace options.reject { |key,| not Config.valued_keys.include? key }
41
+ cache_options.replace options.reject { |key,| Config.valued_keys.include? key }
42
+
43
+ Disabled.add_to self and return if ActsAsCached.config[:disabled]
44
+ Benchmarking.add_to self if ActsAsCached.config[:benchmarking]
45
+ end
46
+ end
47
+
48
+ class CacheException < StandardError; end
49
+ class NoCacheStore < CacheException; end
50
+ class NoGetMulti < CacheException; end
51
+ end