cache_sweeper 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/CODE_OF_CONDUCT.md +74 -0
- data/LICENSE.txt +21 -0
- data/README.md +586 -0
- data/lib/cache_sweeper/async_worker.rb +121 -0
- data/lib/cache_sweeper/base.rb +34 -0
- data/lib/cache_sweeper/dsl.rb +32 -0
- data/lib/cache_sweeper/flush_middleware.rb +152 -0
- data/lib/cache_sweeper/loader.rb +368 -0
- data/lib/cache_sweeper/logger.rb +104 -0
- data/lib/cache_sweeper/railtie.rb +13 -0
- data/lib/cache_sweeper/version.rb +3 -0
- data/lib/cache_sweeper.rb +108 -0
- data/lib/tasks/cache_sweeper.rake +4 -0
- metadata +104 -0
@@ -0,0 +1,121 @@
|
|
1
|
+
# Sidekiq worker for async cache invalidation
|
2
|
+
module CacheSweeper
|
3
|
+
class AsyncWorker
|
4
|
+
if defined?(Sidekiq)
|
5
|
+
include Sidekiq::Worker
|
6
|
+
end
|
7
|
+
|
8
|
+
def perform(keys, trigger = :instant)
|
9
|
+
start_time = Time.current
|
10
|
+
keys_array = Array(keys)
|
11
|
+
log_job_start(keys_array, trigger)
|
12
|
+
|
13
|
+
deleted_count = CacheSweeper.delete_cache_keys(keys_array, {
|
14
|
+
job_id: jid,
|
15
|
+
mode: :async,
|
16
|
+
trigger: trigger
|
17
|
+
})
|
18
|
+
failed_count = keys_array.length - deleted_count
|
19
|
+
|
20
|
+
duration = (Time.current - start_time) * 1000
|
21
|
+
log_job_completion(keys_array, deleted_count, failed_count, duration, [], trigger)
|
22
|
+
|
23
|
+
rescue => e
|
24
|
+
duration = (Time.current - start_time) * 1000
|
25
|
+
log_job_error(e, keys_array, duration, trigger)
|
26
|
+
raise e
|
27
|
+
end
|
28
|
+
|
29
|
+
def self.perform_async(keys, trigger = :instant)
|
30
|
+
if defined?(Sidekiq) && self.ancestors.include?(Sidekiq::Worker)
|
31
|
+
log_job_scheduling(keys, :sidekiq, trigger)
|
32
|
+
self.set(sidekiq_opts).super(keys, trigger)
|
33
|
+
else
|
34
|
+
# In test environment or when Sidekiq is not available, perform synchronously
|
35
|
+
log_job_scheduling(keys, :synchronous, trigger)
|
36
|
+
new.perform(keys, trigger)
|
37
|
+
end
|
38
|
+
end
|
39
|
+
|
40
|
+
private
|
41
|
+
|
42
|
+
def log_job_start(keys, trigger)
|
43
|
+
CacheSweeper::Logger.log_async_jobs("Async job started", :info, {
|
44
|
+
job_id: jid,
|
45
|
+
keys_count: keys.length,
|
46
|
+
keys: keys,
|
47
|
+
trigger: trigger
|
48
|
+
})
|
49
|
+
end
|
50
|
+
|
51
|
+
def log_cache_deletion(keys, status, error = nil)
|
52
|
+
case status
|
53
|
+
when :success
|
54
|
+
CacheSweeper::Logger.log_async_jobs("Cache deleted: #{Array(keys).length} keys", :debug, {
|
55
|
+
job_id: jid,
|
56
|
+
keys_count: Array(keys).length,
|
57
|
+
keys: Array(keys),
|
58
|
+
status: 'success'
|
59
|
+
})
|
60
|
+
when :rails_not_available
|
61
|
+
CacheSweeper::Logger.log_async_jobs("Rails cache not available for #{Array(keys).length} keys", :warn, {
|
62
|
+
job_id: jid,
|
63
|
+
keys_count: Array(keys).length,
|
64
|
+
keys: Array(keys),
|
65
|
+
status: 'rails_not_available'
|
66
|
+
})
|
67
|
+
when :error
|
68
|
+
CacheSweeper::Logger.log_error(error, {
|
69
|
+
job_id: jid,
|
70
|
+
keys_count: Array(keys).length,
|
71
|
+
keys: Array(keys),
|
72
|
+
status: 'error',
|
73
|
+
error_type: 'cache_delete_error'
|
74
|
+
})
|
75
|
+
end
|
76
|
+
end
|
77
|
+
|
78
|
+
def log_job_completion(keys, deleted_count, failed_count, duration, errors, trigger)
|
79
|
+
CacheSweeper::Logger.log_async_jobs("Async job completed", :info, {
|
80
|
+
job_id: jid,
|
81
|
+
keys_count: keys.length,
|
82
|
+
deleted_count: deleted_count,
|
83
|
+
failed_count: failed_count,
|
84
|
+
duration_ms: duration.round(3),
|
85
|
+
errors: errors,
|
86
|
+
trigger: trigger
|
87
|
+
})
|
88
|
+
|
89
|
+
CacheSweeper::Logger.log_performance("async_cache_deletion", duration, {
|
90
|
+
job_id: jid,
|
91
|
+
keys_count: keys.length,
|
92
|
+
deleted_count: deleted_count,
|
93
|
+
failed_count: failed_count,
|
94
|
+
trigger: trigger
|
95
|
+
})
|
96
|
+
end
|
97
|
+
|
98
|
+
def log_job_error(error, keys, duration, trigger)
|
99
|
+
CacheSweeper::Logger.log_error(error, {
|
100
|
+
job_id: jid,
|
101
|
+
keys: keys,
|
102
|
+
duration_ms: duration.round(3),
|
103
|
+
error_type: 'async_job_error',
|
104
|
+
trigger: trigger
|
105
|
+
})
|
106
|
+
end
|
107
|
+
|
108
|
+
def self.log_job_scheduling(keys, method, trigger)
|
109
|
+
CacheSweeper::Logger.log_async_jobs("Async job scheduled", :info, {
|
110
|
+
method: method,
|
111
|
+
keys_count: Array(keys).length,
|
112
|
+
keys: Array(keys),
|
113
|
+
trigger: trigger
|
114
|
+
})
|
115
|
+
end
|
116
|
+
|
117
|
+
def jid
|
118
|
+
@jid ||= SecureRandom.hex(8)
|
119
|
+
end
|
120
|
+
end
|
121
|
+
end
|
@@ -0,0 +1,34 @@
|
|
1
|
+
# Base class for all sweepers
|
2
|
+
module CacheSweeper
|
3
|
+
class Base
|
4
|
+
include CacheSweeper::DSL
|
5
|
+
|
6
|
+
class << self
|
7
|
+
# Sweeper-level configuration
|
8
|
+
attr_accessor :trigger, :mode, :queue, :sidekiq_options
|
9
|
+
|
10
|
+
# Clean DSL for sweeper configuration
|
11
|
+
def sweeper_options(options = {})
|
12
|
+
@trigger = options[:trigger] if options.key?(:trigger)
|
13
|
+
@mode = options[:mode] if options.key?(:mode)
|
14
|
+
@queue = options[:queue] if options.key?(:queue)
|
15
|
+
@sidekiq_options = options[:sidekiq_options] if options.key?(:sidekiq_options)
|
16
|
+
CacheSweeper.validate_async_mode(@mode, "sweeper #{self.name}")
|
17
|
+
end
|
18
|
+
|
19
|
+
end
|
20
|
+
|
21
|
+
|
22
|
+
# Instance method support for custom conditions
|
23
|
+
def call_condition(condition, *args)
|
24
|
+
case condition
|
25
|
+
when Proc
|
26
|
+
instance_exec(*args, &condition)
|
27
|
+
when Symbol, String
|
28
|
+
send(condition, *args)
|
29
|
+
else
|
30
|
+
true
|
31
|
+
end
|
32
|
+
end
|
33
|
+
end
|
34
|
+
end
|
@@ -0,0 +1,32 @@
|
|
1
|
+
# lib/cache_sweeper/dsl.rb
|
2
|
+
module CacheSweeper
|
3
|
+
module DSL
|
4
|
+
def self.included(base)
|
5
|
+
base.extend ClassMethods
|
6
|
+
end
|
7
|
+
|
8
|
+
module ClassMethods
|
9
|
+
def watch(association = nil, attributes: nil, if: nil, keys: nil, trigger: nil, mode: nil, queue: nil, sidekiq_options: nil, callback: nil, on: nil)
|
10
|
+
CacheSweeper.validate_async_mode(mode, "rule in #{self.name}")
|
11
|
+
@cache_sweeper_rules ||= []
|
12
|
+
@cache_sweeper_rules << {
|
13
|
+
association: association,
|
14
|
+
attributes: attributes,
|
15
|
+
condition: binding.local_variable_get(:if),
|
16
|
+
keys: keys,
|
17
|
+
trigger: trigger,
|
18
|
+
mode: mode,
|
19
|
+
queue: queue,
|
20
|
+
sidekiq_options: sidekiq_options,
|
21
|
+
callback: callback,
|
22
|
+
on: on,
|
23
|
+
sweeper_class: self
|
24
|
+
}
|
25
|
+
end
|
26
|
+
|
27
|
+
def cache_sweeper_rules
|
28
|
+
@cache_sweeper_rules || []
|
29
|
+
end
|
30
|
+
end
|
31
|
+
end
|
32
|
+
end
|
@@ -0,0 +1,152 @@
|
|
1
|
+
require 'request_store'
|
2
|
+
|
3
|
+
# Middleware to flush pending cache keys at the end of each request if batching is enabled
|
4
|
+
class CacheSweeperFlushMiddleware
|
5
|
+
def initialize(app)
|
6
|
+
@app = app
|
7
|
+
end
|
8
|
+
|
9
|
+
def call(env)
|
10
|
+
start_time = Time.current
|
11
|
+
request_id = SecureRandom.hex(8)
|
12
|
+
|
13
|
+
log_request_start(request_id, env)
|
14
|
+
|
15
|
+
status, headers, response = @app.call(env)
|
16
|
+
|
17
|
+
# At end of request, flush all request-level batched keys
|
18
|
+
pending = RequestStore.store[:cache_sweeper_request_pending] || []
|
19
|
+
|
20
|
+
if pending.any?
|
21
|
+
log_request_flush_start(request_id, pending)
|
22
|
+
|
23
|
+
async_jobs_scheduled = 0
|
24
|
+
instant_deletions = 0
|
25
|
+
failed_deletions = 0
|
26
|
+
errors = []
|
27
|
+
|
28
|
+
pending.each do |entry|
|
29
|
+
keys, mode, sidekiq_options = entry.values_at(:keys, :mode, :sidekiq_options)
|
30
|
+
|
31
|
+
begin
|
32
|
+
if mode == :async
|
33
|
+
CacheSweeper::AsyncWorker.set(sidekiq_options || {}).perform_async(keys, :request)
|
34
|
+
async_jobs_scheduled += 1
|
35
|
+
log_batch_processing(request_id, keys, :async_scheduled, sidekiq_options)
|
36
|
+
else
|
37
|
+
deleted_count = CacheSweeper.delete_cache_keys(keys, {
|
38
|
+
request_id: request_id,
|
39
|
+
mode: :inline,
|
40
|
+
trigger: :request
|
41
|
+
})
|
42
|
+
instant_deletions += deleted_count
|
43
|
+
failed_deletions += Array(keys).length - deleted_count
|
44
|
+
end
|
45
|
+
rescue => e
|
46
|
+
errors << { keys: keys, error: e.message }
|
47
|
+
log_batch_processing(request_id, keys, :batch_error, sidekiq_options, e)
|
48
|
+
end
|
49
|
+
end
|
50
|
+
|
51
|
+
log_request_flush_completion(request_id, {
|
52
|
+
total_batches: pending.length,
|
53
|
+
async_jobs_scheduled: async_jobs_scheduled,
|
54
|
+
instant_deletions: instant_deletions,
|
55
|
+
failed_deletions: failed_deletions,
|
56
|
+
errors: errors
|
57
|
+
})
|
58
|
+
else
|
59
|
+
log_request_no_flush(request_id)
|
60
|
+
end
|
61
|
+
|
62
|
+
[status, headers, response]
|
63
|
+
rescue => e
|
64
|
+
duration = (Time.current - start_time) * 1000
|
65
|
+
log_request_error(request_id, e, duration)
|
66
|
+
raise e
|
67
|
+
ensure
|
68
|
+
RequestStore.store[:cache_sweeper_request_pending] = []
|
69
|
+
duration = (Time.current - start_time) * 1000
|
70
|
+
log_request_completion(request_id, duration)
|
71
|
+
end
|
72
|
+
|
73
|
+
private
|
74
|
+
|
75
|
+
def log_request_start(request_id, env)
|
76
|
+
CacheSweeper::Logger.log_middleware("Request started", :debug, {
|
77
|
+
request_id: request_id,
|
78
|
+
method: env['REQUEST_METHOD'],
|
79
|
+
path: env['PATH_INFO']
|
80
|
+
})
|
81
|
+
end
|
82
|
+
|
83
|
+
def log_request_flush_start(request_id, pending)
|
84
|
+
total_keys = pending.sum { |entry| Array(entry[:keys]).length }
|
85
|
+
async_count = pending.count { |entry| entry[:async] }
|
86
|
+
instant_count = pending.length - async_count
|
87
|
+
|
88
|
+
CacheSweeper::Logger.log_middleware("Request flush started", :info, {
|
89
|
+
request_id: request_id,
|
90
|
+
batch_count: pending.length,
|
91
|
+
total_keys: total_keys,
|
92
|
+
async_batches: async_count,
|
93
|
+
instant_batches: instant_count
|
94
|
+
})
|
95
|
+
end
|
96
|
+
|
97
|
+
def log_batch_processing(request_id, keys, status, sidekiq_options = nil, error = nil)
|
98
|
+
case status
|
99
|
+
when :async_scheduled
|
100
|
+
CacheSweeper::Logger.log_middleware("Batch scheduled async", :debug, {
|
101
|
+
request_id: request_id,
|
102
|
+
keys: Array(keys),
|
103
|
+
sidekiq_options: sidekiq_options
|
104
|
+
})
|
105
|
+
when :rails_not_available
|
106
|
+
CacheSweeper::Logger.log_middleware("Rails cache not available", :warn, {
|
107
|
+
request_id: request_id,
|
108
|
+
keys: Array(keys)
|
109
|
+
})
|
110
|
+
when :error
|
111
|
+
CacheSweeper::Logger.log_error(error, {
|
112
|
+
request_id: request_id,
|
113
|
+
keys: Array(keys),
|
114
|
+
error_type: 'batch_processing_error'
|
115
|
+
})
|
116
|
+
when :batch_error
|
117
|
+
CacheSweeper::Logger.log_error(error, {
|
118
|
+
request_id: request_id,
|
119
|
+
keys: Array(keys),
|
120
|
+
sidekiq_options: sidekiq_options,
|
121
|
+
error_type: 'batch_error'
|
122
|
+
})
|
123
|
+
end
|
124
|
+
end
|
125
|
+
|
126
|
+
def log_request_flush_completion(request_id, stats)
|
127
|
+
CacheSweeper::Logger.log_middleware("Request flush completed", :info, {
|
128
|
+
request_id: request_id,
|
129
|
+
**stats
|
130
|
+
})
|
131
|
+
end
|
132
|
+
|
133
|
+
def log_request_no_flush(request_id)
|
134
|
+
CacheSweeper::Logger.log_middleware("No cache flush needed", :debug, {
|
135
|
+
request_id: request_id
|
136
|
+
})
|
137
|
+
end
|
138
|
+
|
139
|
+
def log_request_error(request_id, error, duration)
|
140
|
+
CacheSweeper::Logger.log_error(error, {
|
141
|
+
request_id: request_id,
|
142
|
+
duration_ms: duration.round(3),
|
143
|
+
error_type: 'middleware_error'
|
144
|
+
})
|
145
|
+
end
|
146
|
+
|
147
|
+
def log_request_completion(request_id, duration)
|
148
|
+
CacheSweeper::Logger.log_performance("request_processing", duration, {
|
149
|
+
request_id: request_id
|
150
|
+
})
|
151
|
+
end
|
152
|
+
end
|
@@ -0,0 +1,368 @@
|
|
1
|
+
require 'request_store'
|
2
|
+
require 'set'
|
3
|
+
|
4
|
+
module CacheSweeper
|
5
|
+
module Loader
|
6
|
+
SWEEPER_PATH = 'app/sweepers'.freeze
|
7
|
+
|
8
|
+
class << self
|
9
|
+
# Configuration resolution methods
|
10
|
+
def resolve_trigger(rule, sweeper)
|
11
|
+
rule[:trigger] || sweeper.trigger || CacheSweeper.trigger
|
12
|
+
end
|
13
|
+
|
14
|
+
def resolve_mode(rule, sweeper)
|
15
|
+
rule[:mode] || sweeper.mode || CacheSweeper.mode
|
16
|
+
end
|
17
|
+
|
18
|
+
def resolve_queue(rule, sweeper)
|
19
|
+
rule[:queue] || sweeper.queue || CacheSweeper.queue
|
20
|
+
end
|
21
|
+
|
22
|
+
def resolve_sidekiq_options(rule, sweeper)
|
23
|
+
# Merge options in order of precedence: rule > sweeper > global
|
24
|
+
options = CacheSweeper.sidekiq_options.dup
|
25
|
+
options.merge!(sweeper.sidekiq_options) if sweeper.sidekiq_options
|
26
|
+
options.merge!(rule[:sidekiq_options]) if rule[:sidekiq_options]
|
27
|
+
|
28
|
+
# Add queue to options if specified
|
29
|
+
queue = resolve_queue(rule, sweeper)
|
30
|
+
options[:queue] = queue if queue && queue != :default
|
31
|
+
|
32
|
+
options
|
33
|
+
end
|
34
|
+
end
|
35
|
+
|
36
|
+
def self.collect_pending_key(key, sweeper = nil, mode = :request)
|
37
|
+
if mode == :request
|
38
|
+
RequestStore.store["cache_sweeper_pending_keys_#{sweeper&.name || 'global'}"] ||= Set.new
|
39
|
+
RequestStore.store["cache_sweeper_pending_keys_#{sweeper&.name || 'global'}"] << key
|
40
|
+
elsif mode == :job
|
41
|
+
RequestStore.store["cache_sweeper_job_keys_#{sweeper&.name || 'global'}"] ||= Set.new
|
42
|
+
RequestStore.store["cache_sweeper_job_keys_#{sweeper&.name || 'global'}"] << key
|
43
|
+
end
|
44
|
+
end
|
45
|
+
|
46
|
+
def self.flush_pending_keys(sweeper: nil)
|
47
|
+
start_time = Time.current
|
48
|
+
sweeper_name = sweeper&.name || 'global'
|
49
|
+
|
50
|
+
CacheSweeper::Logger.log_cache_operations("Flushing pending keys for sweeper: #{sweeper_name}", :debug, {
|
51
|
+
sweeper: sweeper_name
|
52
|
+
})
|
53
|
+
|
54
|
+
# Flush request-level keys
|
55
|
+
request_key = "cache_sweeper_pending_keys_#{sweeper_name}"
|
56
|
+
keys = RequestStore.store[request_key]
|
57
|
+
if keys&.any?
|
58
|
+
if defined?(Rails) && Rails.respond_to?(:cache)
|
59
|
+
deleted_count = CacheSweeper.delete_cache_keys(keys, {
|
60
|
+
sweeper: sweeper_name,
|
61
|
+
mode: :inline,
|
62
|
+
trigger: :request
|
63
|
+
})
|
64
|
+
failed_count = keys.length - deleted_count
|
65
|
+
else
|
66
|
+
deleted_count = 0
|
67
|
+
failed_count = keys.length
|
68
|
+
end
|
69
|
+
|
70
|
+
CacheSweeper::Logger.log_cache_operations("Flushed request-level keys", :info, {
|
71
|
+
sweeper: sweeper_name,
|
72
|
+
deleted_count: deleted_count,
|
73
|
+
failed_count: failed_count,
|
74
|
+
total_keys: keys.length
|
75
|
+
})
|
76
|
+
|
77
|
+
RequestStore.store[request_key] = Set.new
|
78
|
+
end
|
79
|
+
|
80
|
+
# Flush job-level keys
|
81
|
+
job_key = "cache_sweeper_job_keys_#{sweeper_name}"
|
82
|
+
job_keys = RequestStore.store[job_key]
|
83
|
+
if job_keys&.any?
|
84
|
+
begin
|
85
|
+
CacheSweeper::AsyncWorker.perform_async(job_keys.to_a, :request)
|
86
|
+
CacheSweeper::Logger.log_cache_operations("Scheduled async job for pending keys", :info, {
|
87
|
+
sweeper: sweeper_name,
|
88
|
+
keys_count: job_keys.length,
|
89
|
+
keys: job_keys.to_a
|
90
|
+
})
|
91
|
+
rescue => e
|
92
|
+
CacheSweeper::Logger.log_error(e, {
|
93
|
+
sweeper: sweeper_name,
|
94
|
+
keys: job_keys.to_a,
|
95
|
+
error_type: 'async_job_schedule_error'
|
96
|
+
})
|
97
|
+
end
|
98
|
+
RequestStore.store[job_key] = Set.new
|
99
|
+
end
|
100
|
+
|
101
|
+
duration = (Time.current - start_time) * 1000
|
102
|
+
CacheSweeper::Logger.log_performance("flush_pending_keys", duration, {
|
103
|
+
sweeper: sweeper_name,
|
104
|
+
request_keys_flushed: keys&.length || 0,
|
105
|
+
job_keys_scheduled: job_keys&.length || 0
|
106
|
+
})
|
107
|
+
end
|
108
|
+
|
109
|
+
def self.load_sweepers!
|
110
|
+
Dir[Rails.root.join(SWEEPER_PATH, '**', '*_sweeper.rb')].each { |file| require_dependency file }
|
111
|
+
end
|
112
|
+
|
113
|
+
def self.hook_sweepers!
|
114
|
+
start_time = Time.current
|
115
|
+
CacheSweeper::Logger.log_initialization("Starting model attachment process")
|
116
|
+
|
117
|
+
sweeper_count = 0
|
118
|
+
rule_count = 0
|
119
|
+
error_count = 0
|
120
|
+
CacheSweeper::Base.descendants.each do |sweeper|
|
121
|
+
sweeper_count += 1
|
122
|
+
CacheSweeper::Logger.log_initialization("Processing sweeper: #{sweeper.name}", { sweeper: sweeper.name, rule_count: sweeper.cache_sweeper_rules.length })
|
123
|
+
|
124
|
+
sweeper.cache_sweeper_rules.each do |rule|
|
125
|
+
rule_count += 1
|
126
|
+
association = rule[:association]
|
127
|
+
attributes = rule[:attributes]
|
128
|
+
condition = rule[:condition]
|
129
|
+
keys = rule[:keys]
|
130
|
+
|
131
|
+
begin
|
132
|
+
if association.nil?
|
133
|
+
model_name = sweeper.name.sub('Sweeper', '')
|
134
|
+
model = model_name.constantize
|
135
|
+
CacheSweeper::Logger.log_initialization("Attaching direct model callback: #{model_name}", {
|
136
|
+
sweeper: sweeper.name,
|
137
|
+
model: model_name,
|
138
|
+
attributes: attributes,
|
139
|
+
callback: rule[:callback] || :after_commit,
|
140
|
+
events: rule[:on] || [:create, :update, :destroy]
|
141
|
+
})
|
142
|
+
attach_callbacks(model, sweeper, attributes, condition, keys, rule)
|
143
|
+
else
|
144
|
+
parent_model_name = sweeper.name.sub('Sweeper', '')
|
145
|
+
parent_model = parent_model_name.constantize
|
146
|
+
assoc_reflection = parent_model.reflect_on_association(association)
|
147
|
+
|
148
|
+
unless assoc_reflection
|
149
|
+
CacheSweeper::Logger.warn("Association not found: #{parent_model_name}##{association}", {
|
150
|
+
sweeper: sweeper.name,
|
151
|
+
parent_model: parent_model_name,
|
152
|
+
association: association
|
153
|
+
})
|
154
|
+
next
|
155
|
+
end
|
156
|
+
|
157
|
+
assoc_model = assoc_reflection.klass
|
158
|
+
CacheSweeper::Logger.log_initialization("Attaching association callback: #{parent_model_name}##{association} -> #{assoc_model.name}", {
|
159
|
+
sweeper: sweeper.name,
|
160
|
+
parent_model: parent_model_name,
|
161
|
+
association: association,
|
162
|
+
assoc_model: assoc_model.name,
|
163
|
+
attributes: attributes,
|
164
|
+
callback: rule[:callback] || :after_commit,
|
165
|
+
events: rule[:on] || [:create, :update, :destroy]
|
166
|
+
})
|
167
|
+
attach_callbacks(assoc_model, sweeper, attributes, condition, keys, rule, parent_model, association)
|
168
|
+
end
|
169
|
+
rescue NameError => e
|
170
|
+
error_count += 1
|
171
|
+
CacheSweeper::Logger.log_error(e, {
|
172
|
+
sweeper: sweeper.name,
|
173
|
+
association: association,
|
174
|
+
error_type: 'model_not_found'
|
175
|
+
})
|
176
|
+
# Model doesn't exist yet, skip for now
|
177
|
+
next
|
178
|
+
rescue => e
|
179
|
+
error_count += 1
|
180
|
+
CacheSweeper::Logger.log_error(e, {
|
181
|
+
sweeper: sweeper.name,
|
182
|
+
association: association,
|
183
|
+
error_type: 'attachment_error'
|
184
|
+
})
|
185
|
+
next
|
186
|
+
end
|
187
|
+
end
|
188
|
+
end
|
189
|
+
|
190
|
+
duration = (Time.current - start_time) * 1000
|
191
|
+
CacheSweeper::Logger.log_initialization("Model attachment completed", {
|
192
|
+
sweeper_count: sweeper_count,
|
193
|
+
rule_count: rule_count,
|
194
|
+
error_count: error_count,
|
195
|
+
duration_ms: duration.round(3)
|
196
|
+
})
|
197
|
+
end
|
198
|
+
|
199
|
+
def self.attach_callbacks(model, sweeper, attributes, condition, keys, rule, parent_model = nil, association = nil)
|
200
|
+
callback = lambda do |record|
|
201
|
+
start_time = Time.current
|
202
|
+
sweeper_instance = sweeper.new
|
203
|
+
|
204
|
+
# Log rule execution start
|
205
|
+
CacheSweeper::Logger.log_rule_execution(rule, record, 'started', {
|
206
|
+
event: record.previous_changes.keys.any? ? 'update' : (record.persisted? ? 'create' : 'destroy'),
|
207
|
+
changed_attributes: record.saved_changes.keys
|
208
|
+
})
|
209
|
+
|
210
|
+
begin
|
211
|
+
# Check attribute changes
|
212
|
+
if attributes
|
213
|
+
changed = (record.saved_changes.keys.map(&:to_sym) & attributes.map(&:to_sym)).any?
|
214
|
+
CacheSweeper::Logger.log_rule_execution(rule, record, "attribute_check: #{changed}", {
|
215
|
+
watched_attributes: attributes,
|
216
|
+
changed_attributes: record.saved_changes.keys,
|
217
|
+
relevant_changes: record.saved_changes.keys.map(&:to_sym) & attributes.map(&:to_sym)
|
218
|
+
})
|
219
|
+
next unless changed
|
220
|
+
end
|
221
|
+
|
222
|
+
# Check condition
|
223
|
+
if condition
|
224
|
+
condition_result = sweeper_instance.call_condition(condition, record)
|
225
|
+
CacheSweeper::Logger.log_rule_execution(rule, record, "condition_check: #{condition_result}", {
|
226
|
+
condition: condition.class.name,
|
227
|
+
condition_result: condition_result
|
228
|
+
})
|
229
|
+
next unless condition_result
|
230
|
+
end
|
231
|
+
|
232
|
+
# Generate cache keys
|
233
|
+
cache_keys = if keys.is_a?(Proc)
|
234
|
+
begin
|
235
|
+
generated_keys = keys.call(record)
|
236
|
+
CacheSweeper::Logger.log_rule_execution(rule, record, "keys_generated", {
|
237
|
+
keys_count: Array(generated_keys).length,
|
238
|
+
keys: Array(generated_keys)
|
239
|
+
})
|
240
|
+
generated_keys
|
241
|
+
rescue => e
|
242
|
+
CacheSweeper::Logger.log_error(e, {
|
243
|
+
sweeper: sweeper.name,
|
244
|
+
record_class: record.class.name,
|
245
|
+
record_id: record.id,
|
246
|
+
error_type: 'key_generation_error'
|
247
|
+
})
|
248
|
+
next
|
249
|
+
end
|
250
|
+
else
|
251
|
+
Array(keys)
|
252
|
+
end
|
253
|
+
|
254
|
+
# Execute cache invalidation
|
255
|
+
if parent_model && association
|
256
|
+
# For association rules, we need to find the parent records
|
257
|
+
# that have this record in their association
|
258
|
+
parents = parent_model.joins(association).where(association => { id: record.id })
|
259
|
+
CacheSweeper::Logger.log_rule_execution(rule, record, "association_processing", {
|
260
|
+
parent_count: parents.count,
|
261
|
+
association: association
|
262
|
+
})
|
263
|
+
Array(parents).each do |parent|
|
264
|
+
CacheSweeper::Loader.invalidate_cache(cache_keys, record, rule)
|
265
|
+
end
|
266
|
+
else
|
267
|
+
CacheSweeper::Loader.invalidate_cache(cache_keys, record, rule)
|
268
|
+
end
|
269
|
+
|
270
|
+
# Log successful completion
|
271
|
+
duration = (Time.current - start_time) * 1000
|
272
|
+
CacheSweeper::Logger.log_rule_execution(rule, record, "completed", {
|
273
|
+
duration_ms: duration.round(3),
|
274
|
+
cache_keys_processed: Array(cache_keys).length
|
275
|
+
})
|
276
|
+
|
277
|
+
rescue => e
|
278
|
+
duration = (Time.current - start_time) * 1000
|
279
|
+
CacheSweeper::Logger.log_error(e, {
|
280
|
+
sweeper: sweeper.name,
|
281
|
+
record_class: record.class.name,
|
282
|
+
record_id: record.id,
|
283
|
+
duration_ms: duration.round(3),
|
284
|
+
error_type: 'rule_execution_error'
|
285
|
+
})
|
286
|
+
end
|
287
|
+
end
|
288
|
+
|
289
|
+
callback_type = rule[:callback] || :after_commit
|
290
|
+
events = rule[:on] || [:create, :update, :destroy]
|
291
|
+
model.send(callback_type, callback, on: events)
|
292
|
+
end
|
293
|
+
|
294
|
+
def self.invalidate_cache(keys, *args, rule)
|
295
|
+
start_time = Time.current
|
296
|
+
sweeper = rule[:sweeper_class]
|
297
|
+
trigger = resolve_trigger(rule, sweeper)
|
298
|
+
mode = resolve_mode(rule, sweeper)
|
299
|
+
sidekiq_opts = resolve_sidekiq_options(rule, sweeper)
|
300
|
+
|
301
|
+
CacheSweeper::Logger.debug("Cache invalidation started", {
|
302
|
+
sweeper: sweeper&.name,
|
303
|
+
trigger: trigger,
|
304
|
+
mode: mode,
|
305
|
+
keys_count: Array(keys).length
|
306
|
+
})
|
307
|
+
|
308
|
+
begin
|
309
|
+
if trigger == :request
|
310
|
+
RequestStore.store[:cache_sweeper_request_pending] ||= []
|
311
|
+
RequestStore.store[:cache_sweeper_request_pending] << { keys: keys, mode: mode, sidekiq_options: sidekiq_opts }
|
312
|
+
CacheSweeper::Logger.log_cache_operations("Batched for request: #{Array(keys).inspect} (mode: #{mode})", :info, {
|
313
|
+
keys: Array(keys),
|
314
|
+
mode: mode,
|
315
|
+
batch_size: RequestStore.store[:cache_sweeper_request_pending].length
|
316
|
+
})
|
317
|
+
else
|
318
|
+
if mode == :async
|
319
|
+
CacheSweeper::AsyncWorker.set(sidekiq_opts).perform_async(keys, trigger)
|
320
|
+
CacheSweeper::Logger.log_cache_operations("Scheduled async job for: #{Array(keys).inspect}", :info, {
|
321
|
+
keys: Array(keys),
|
322
|
+
sidekiq_options: sidekiq_opts
|
323
|
+
})
|
324
|
+
else
|
325
|
+
if defined?(Rails) && Rails.respond_to?(:cache)
|
326
|
+
deleted_count = CacheSweeper.delete_cache_keys(keys, {
|
327
|
+
sweeper: sweeper&.name,
|
328
|
+
mode: :inline,
|
329
|
+
trigger: :instant
|
330
|
+
})
|
331
|
+
failed_count = Array(keys).length - deleted_count
|
332
|
+
else
|
333
|
+
CacheSweeper::Logger.log_cache_operations("Rails cache not available for #{Array(keys).length} keys", :warn, {
|
334
|
+
keys: Array(keys)
|
335
|
+
})
|
336
|
+
deleted_count = 0
|
337
|
+
failed_count = Array(keys).length
|
338
|
+
end
|
339
|
+
|
340
|
+
CacheSweeper::Logger.log_cache_operations("Instant deletion completed", :info, {
|
341
|
+
deleted_count: deleted_count,
|
342
|
+
failed_count: failed_count,
|
343
|
+
total_keys: Array(keys).length
|
344
|
+
})
|
345
|
+
end
|
346
|
+
end
|
347
|
+
|
348
|
+
duration = (Time.current - start_time) * 1000
|
349
|
+
CacheSweeper::Logger.log_performance("cache_invalidation", duration, {
|
350
|
+
trigger: trigger,
|
351
|
+
mode: mode,
|
352
|
+
keys_count: Array(keys).length
|
353
|
+
})
|
354
|
+
|
355
|
+
rescue => e
|
356
|
+
duration = (Time.current - start_time) * 1000
|
357
|
+
CacheSweeper::Logger.log_error(e, {
|
358
|
+
sweeper: sweeper&.name,
|
359
|
+
trigger: trigger,
|
360
|
+
mode: mode,
|
361
|
+
keys: Array(keys),
|
362
|
+
duration_ms: duration.round(3),
|
363
|
+
error_type: 'cache_invalidation_error'
|
364
|
+
})
|
365
|
+
end
|
366
|
+
end
|
367
|
+
end
|
368
|
+
end
|