tsikol 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (75) hide show
  1. checksums.yaml +7 -0
  2. data/CHANGELOG.md +22 -0
  3. data/CONTRIBUTING.md +84 -0
  4. data/LICENSE +21 -0
  5. data/README.md +579 -0
  6. data/Rakefile +12 -0
  7. data/docs/README.md +69 -0
  8. data/docs/api/middleware.md +721 -0
  9. data/docs/api/prompt.md +858 -0
  10. data/docs/api/resource.md +651 -0
  11. data/docs/api/server.md +509 -0
  12. data/docs/api/test-helpers.md +591 -0
  13. data/docs/api/tool.md +527 -0
  14. data/docs/cookbook/authentication.md +651 -0
  15. data/docs/cookbook/caching.md +877 -0
  16. data/docs/cookbook/dynamic-tools.md +970 -0
  17. data/docs/cookbook/error-handling.md +887 -0
  18. data/docs/cookbook/logging.md +1044 -0
  19. data/docs/cookbook/rate-limiting.md +717 -0
  20. data/docs/examples/code-assistant.md +922 -0
  21. data/docs/examples/complete-server.md +726 -0
  22. data/docs/examples/database-manager.md +1198 -0
  23. data/docs/examples/devops-tools.md +1382 -0
  24. data/docs/examples/echo-server.md +501 -0
  25. data/docs/examples/weather-service.md +822 -0
  26. data/docs/guides/completion.md +472 -0
  27. data/docs/guides/getting-started.md +462 -0
  28. data/docs/guides/middleware.md +823 -0
  29. data/docs/guides/project-structure.md +434 -0
  30. data/docs/guides/prompts.md +920 -0
  31. data/docs/guides/resources.md +720 -0
  32. data/docs/guides/sampling.md +804 -0
  33. data/docs/guides/testing.md +863 -0
  34. data/docs/guides/tools.md +627 -0
  35. data/examples/README.md +92 -0
  36. data/examples/advanced_features.rb +129 -0
  37. data/examples/basic-migrated/app/prompts/weather_chat.rb +44 -0
  38. data/examples/basic-migrated/app/resources/weather_alerts.rb +18 -0
  39. data/examples/basic-migrated/app/tools/get_current_weather.rb +34 -0
  40. data/examples/basic-migrated/app/tools/get_forecast.rb +30 -0
  41. data/examples/basic-migrated/app/tools/get_weather_by_coords.rb +48 -0
  42. data/examples/basic-migrated/server.rb +25 -0
  43. data/examples/basic.rb +73 -0
  44. data/examples/full_featured.rb +175 -0
  45. data/examples/middleware_example.rb +112 -0
  46. data/examples/sampling_example.rb +104 -0
  47. data/examples/weather-service/app/prompts/weather/chat.rb +90 -0
  48. data/examples/weather-service/app/resources/weather/alerts.rb +59 -0
  49. data/examples/weather-service/app/tools/weather/get_current.rb +82 -0
  50. data/examples/weather-service/app/tools/weather/get_forecast.rb +90 -0
  51. data/examples/weather-service/server.rb +28 -0
  52. data/exe/tsikol +6 -0
  53. data/lib/tsikol/cli/templates/Gemfile.erb +10 -0
  54. data/lib/tsikol/cli/templates/README.md.erb +38 -0
  55. data/lib/tsikol/cli/templates/gitignore.erb +49 -0
  56. data/lib/tsikol/cli/templates/prompt.rb.erb +53 -0
  57. data/lib/tsikol/cli/templates/resource.rb.erb +29 -0
  58. data/lib/tsikol/cli/templates/server.rb.erb +24 -0
  59. data/lib/tsikol/cli/templates/tool.rb.erb +60 -0
  60. data/lib/tsikol/cli.rb +203 -0
  61. data/lib/tsikol/error_handler.rb +141 -0
  62. data/lib/tsikol/health.rb +198 -0
  63. data/lib/tsikol/http_transport.rb +72 -0
  64. data/lib/tsikol/lifecycle.rb +149 -0
  65. data/lib/tsikol/middleware.rb +168 -0
  66. data/lib/tsikol/prompt.rb +101 -0
  67. data/lib/tsikol/resource.rb +53 -0
  68. data/lib/tsikol/router.rb +190 -0
  69. data/lib/tsikol/server.rb +660 -0
  70. data/lib/tsikol/stdio_transport.rb +108 -0
  71. data/lib/tsikol/test_helpers.rb +261 -0
  72. data/lib/tsikol/tool.rb +111 -0
  73. data/lib/tsikol/version.rb +5 -0
  74. data/lib/tsikol.rb +72 -0
  75. metadata +219 -0
@@ -0,0 +1,877 @@
1
+ # Caching Recipe
2
+
3
+ This recipe shows various caching strategies to improve performance and reduce load on your MCP server.
4
+
5
+ ## Basic In-Memory Caching
6
+
7
+ ### Simple Cache for Tools
8
+
9
+ ```ruby
10
+ class CachedTool < Tsikol::Tool
11
+ def initialize
12
+ super
13
+ @cache = {}
14
+ @cache_ttl = 300 # 5 minutes
15
+ end
16
+
17
+ parameter :query do
18
+ type :string
19
+ required
20
+ description "Query to process"
21
+ end
22
+
23
+ def execute(query:)
24
+ cache_key = generate_cache_key(query)
25
+
26
+ # Check cache
27
+ if cached = get_cached(cache_key)
28
+ log :debug, "Cache hit", key: cache_key
29
+ return cached[:value]
30
+ end
31
+
32
+ # Cache miss - compute result
33
+ log :debug, "Cache miss", key: cache_key
34
+ result = expensive_computation(query)
35
+
36
+ # Store in cache
37
+ set_cached(cache_key, result)
38
+
39
+ result
40
+ end
41
+
42
+ private
43
+
44
+ def generate_cache_key(query)
45
+ Digest::SHA256.hexdigest(query)
46
+ end
47
+
48
+ def get_cached(key)
49
+ entry = @cache[key]
50
+ return nil unless entry
51
+
52
+ # Check if expired
53
+ if Time.now - entry[:cached_at] > @cache_ttl
54
+ @cache.delete(key)
55
+ return nil
56
+ end
57
+
58
+ entry
59
+ end
60
+
61
+ def set_cached(key, value)
62
+ @cache[key] = {
63
+ value: value,
64
+ cached_at: Time.now
65
+ }
66
+
67
+ # Limit cache size
68
+ cleanup_cache if @cache.size > 1000
69
+ end
70
+
71
+ def cleanup_cache
72
+ # Remove oldest entries
73
+ sorted = @cache.sort_by { |_, v| v[:cached_at] }
74
+ sorted.first(@cache.size - 800).each { |k, _| @cache.delete(k) }
75
+ end
76
+
77
+ def expensive_computation(query)
78
+ # Simulate expensive operation
79
+ sleep(0.5)
80
+ "Result for: #{query}"
81
+ end
82
+ end
83
+ ```
84
+
85
+ ### Thread-Safe Memory Cache
86
+
87
+ ```ruby
88
+ require 'concurrent'
89
+
90
+ class ThreadSafeCache
91
+ def initialize(options = {})
92
+ @max_size = options[:max_size] || 1000
93
+ @ttl = options[:ttl] || 300
94
+ @cache = Concurrent::Map.new
95
+ @access_count = Concurrent::Map.new
96
+ @last_cleanup = Concurrent::AtomicReference.new(Time.now)
97
+ end
98
+
99
+ def fetch(key, &block)
100
+ # Try to get from cache
101
+ if entry = @cache[key]
102
+ if valid_entry?(entry)
103
+ track_access(key)
104
+ return entry[:value]
105
+ else
106
+ @cache.delete(key)
107
+ end
108
+ end
109
+
110
+ # Cache miss - compute value
111
+ value = yield
112
+
113
+ # Store in cache
114
+ @cache[key] = {
115
+ value: value,
116
+ cached_at: Time.now,
117
+ ttl: @ttl
118
+ }
119
+
120
+ # Cleanup if needed
121
+ cleanup if should_cleanup?
122
+
123
+ value
124
+ end
125
+
126
+ def clear
127
+ @cache.clear
128
+ @access_count.clear
129
+ end
130
+
131
+ def size
132
+ @cache.size
133
+ end
134
+
135
+ def stats
136
+ {
137
+ size: @cache.size,
138
+ max_size: @max_size,
139
+ access_counts: @access_count.each_pair.to_h
140
+ }
141
+ end
142
+
143
+ private
144
+
145
+ def valid_entry?(entry)
146
+ Time.now - entry[:cached_at] <= entry[:ttl]
147
+ end
148
+
149
+ def track_access(key)
150
+ @access_count.compute(key) { |old| (old || 0) + 1 }
151
+ end
152
+
153
+ def should_cleanup?
154
+ @cache.size > @max_size ||
155
+ (Time.now - @last_cleanup.get > 60) # Cleanup every minute
156
+ end
157
+
158
+ def cleanup
159
+ return unless @last_cleanup.compare_and_set(@last_cleanup.get, Time.now)
160
+
161
+ # Remove expired entries
162
+ @cache.each_pair do |key, entry|
163
+ @cache.delete(key) unless valid_entry?(entry)
164
+ end
165
+
166
+ # If still over size, remove least accessed
167
+ if @cache.size > @max_size
168
+ entries_to_remove = @cache.size - (@max_size * 0.8).to_i
169
+
170
+ sorted = @cache.keys.sort_by { |k| @access_count[k] || 0 }
171
+ sorted.first(entries_to_remove).each do |key|
172
+ @cache.delete(key)
173
+ @access_count.delete(key)
174
+ end
175
+ end
176
+ end
177
+ end
178
+
179
+ # Usage in tools
180
+ class CachedDatabaseTool < Tsikol::Tool
181
+ def initialize
182
+ super
183
+ @cache = ThreadSafeCache.new(max_size: 500, ttl: 600)
184
+ end
185
+
186
+ def execute(query:)
187
+ @cache.fetch(query) do
188
+ # Expensive database query
189
+ perform_database_query(query)
190
+ end
191
+ end
192
+ end
193
+ ```
194
+
195
+ ## Caching Middleware
196
+
197
+ ### Request/Response Cache
198
+
199
+ ```ruby
200
+ class CachingMiddleware < Tsikol::Middleware
201
+ def initialize(app, options = {})
202
+ @app = app
203
+ @cache = ThreadSafeCache.new(options)
204
+ @cacheable_methods = options[:cacheable_methods] || default_cacheable_methods
205
+ @cache_key_generator = options[:cache_key_generator] || method(:default_cache_key)
206
+ end
207
+
208
+ def call(request)
209
+ return @app.call(request) unless cacheable?(request)
210
+
211
+ cache_key = @cache_key_generator.call(request)
212
+
213
+ @cache.fetch(cache_key) do
214
+ log :debug, "Cache miss", method: request["method"], key: cache_key
215
+
216
+ response = @app.call(request)
217
+
218
+ # Only cache successful responses
219
+ if response[:result] && !response[:error]
220
+ response
221
+ else
222
+ # Don't cache errors, but return them
223
+ throw :cache_skip, response
224
+ end
225
+ end
226
+ rescue UncaughtThrowError => e
227
+ e.value # Return the response that was thrown
228
+ end
229
+
230
+ private
231
+
232
+ def default_cacheable_methods
233
+ [
234
+ "resources/read", # Resources are good cache candidates
235
+ "prompts/get", # Prompts rarely change
236
+ "completion/complete" # Completions for same input
237
+ ]
238
+ end
239
+
240
+ def cacheable?(request)
241
+ @cacheable_methods.include?(request["method"])
242
+ end
243
+
244
+ def default_cache_key(request)
245
+ # Include method and parameters in cache key
246
+ data = {
247
+ method: request["method"],
248
+ params: normalize_params(request["params"])
249
+ }
250
+
251
+ Digest::SHA256.hexdigest(data.to_json)
252
+ end
253
+
254
+ def normalize_params(params)
255
+ # Sort hash keys for consistent cache keys
256
+ return params unless params.is_a?(Hash)
257
+
258
+ params.sort.to_h.transform_values do |value|
259
+ value.is_a?(Hash) ? normalize_params(value) : value
260
+ end
261
+ end
262
+ end
263
+ ```
264
+
265
+ ### Selective Cache Invalidation
266
+
267
+ ```ruby
268
+ class SmartCachingMiddleware < CachingMiddleware
269
+ def initialize(app, options = {})
270
+ super
271
+ @invalidation_rules = options[:invalidation_rules] || default_invalidation_rules
272
+ @cache_dependencies = Concurrent::Map.new
273
+ end
274
+
275
+ def call(request)
276
+ # Check if this request should invalidate cache
277
+ if should_invalidate?(request)
278
+ invalidate_related_cache(request)
279
+ end
280
+
281
+ # Normal caching logic
282
+ super
283
+ end
284
+
285
+ private
286
+
287
+ def default_invalidation_rules
288
+ {
289
+ "tools/call" => ->(request) {
290
+ # Invalidate cache when certain tools are called
291
+ tool_name = request.dig("params", "name")
292
+ case tool_name
293
+ when "update_config"
294
+ ["resources/read:config/*"]
295
+ when "clear_cache"
296
+ ["*"] # Clear everything
297
+ else
298
+ []
299
+ end
300
+ }
301
+ }
302
+ end
303
+
304
+ def should_invalidate?(request)
305
+ @invalidation_rules.key?(request["method"])
306
+ end
307
+
308
+ def invalidate_related_cache(request)
309
+ rule = @invalidation_rules[request["method"]]
310
+ patterns = rule.call(request)
311
+
312
+ patterns.each do |pattern|
313
+ if pattern == "*"
314
+ @cache.clear
315
+ log :info, "Cache cleared"
316
+ else
317
+ invalidate_pattern(pattern)
318
+ end
319
+ end
320
+ end
321
+
322
+ def invalidate_pattern(pattern)
323
+ # Simple pattern matching
324
+ @cache.each_key do |key|
325
+ if key_matches_pattern?(key, pattern)
326
+ @cache.delete(key)
327
+ log :debug, "Cache invalidated", key: key, pattern: pattern
328
+ end
329
+ end
330
+ end
331
+ end
332
+ ```
333
+
334
+ ## Redis-Based Caching
335
+
336
+ ### Distributed Cache
337
+
338
+ ```ruby
339
+ require 'redis'
340
+ require 'json'
341
+
342
+ class RedisCache
343
+ def initialize(options = {})
344
+ @redis = options[:redis] || Redis.new
345
+ @ttl = options[:ttl] || 300
346
+ @prefix = options[:prefix] || "mcp:cache"
347
+ @compress = options[:compress] || false
348
+ @compress_threshold = options[:compress_threshold] || 1024 # bytes
349
+ end
350
+
351
+ def fetch(key, ttl: @ttl)
352
+ redis_key = make_key(key)
353
+
354
+ # Try to get from cache
355
+ if cached = @redis.get(redis_key)
356
+ return deserialize(cached)
357
+ end
358
+
359
+ # Cache miss - compute value
360
+ value = yield
361
+
362
+ # Store in cache
363
+ set(key, value, ttl: ttl)
364
+
365
+ value
366
+ end
367
+
368
+ def get(key)
369
+ redis_key = make_key(key)
370
+ cached = @redis.get(redis_key)
371
+ deserialize(cached) if cached
372
+ end
373
+
374
+ def set(key, value, ttl: @ttl)
375
+ redis_key = make_key(key)
376
+ serialized = serialize(value)
377
+
378
+ if ttl > 0
379
+ @redis.setex(redis_key, ttl, serialized)
380
+ else
381
+ @redis.set(redis_key, serialized)
382
+ end
383
+ end
384
+
385
+ def delete(key)
386
+ redis_key = make_key(key)
387
+ @redis.del(redis_key)
388
+ end
389
+
390
+ def clear_pattern(pattern)
391
+ redis_pattern = make_key(pattern)
392
+
393
+ # Use SCAN to avoid blocking on large datasets
394
+ cursor = 0
395
+ loop do
396
+ cursor, keys = @redis.scan(cursor, match: redis_pattern, count: 100)
397
+ @redis.del(*keys) unless keys.empty?
398
+ break if cursor == "0"
399
+ end
400
+ end
401
+
402
+ private
403
+
404
+ def make_key(key)
405
+ "#{@prefix}:#{key}"
406
+ end
407
+
408
+ def serialize(value)
409
+ json = value.to_json
410
+
411
+ if @compress && json.bytesize > @compress_threshold
412
+ compressed = Zlib.deflate(json)
413
+ "gzip:#{Base64.strict_encode64(compressed)}"
414
+ else
415
+ json
416
+ end
417
+ end
418
+
419
+ def deserialize(data)
420
+ if data.start_with?("gzip:")
421
+ compressed = Base64.strict_decode64(data[5..-1])
422
+ json = Zlib.inflate(compressed)
423
+ else
424
+ json = data
425
+ end
426
+
427
+ JSON.parse(json)
428
+ end
429
+ end
430
+
431
+ # Redis caching middleware
432
+ class RedisCachingMiddleware < Tsikol::Middleware
433
+ def initialize(app, options = {})
434
+ @app = app
435
+ @cache = RedisCache.new(options)
436
+ @cacheable_methods = options[:cacheable_methods] || []
437
+ end
438
+
439
+ def call(request)
440
+ return @app.call(request) unless cacheable?(request)
441
+
442
+ cache_key = generate_cache_key(request)
443
+ ttl = determine_ttl(request)
444
+
445
+ @cache.fetch(cache_key, ttl: ttl) do
446
+ response = @app.call(request)
447
+
448
+ # Only cache successful responses
449
+ response if response[:result]
450
+ end || @app.call(request) # Fallback if caching fails
451
+ end
452
+
453
+ private
454
+
455
+ def determine_ttl(request)
456
+ case request["method"]
457
+ when "resources/read"
458
+ # Resources might change less frequently
459
+ 600 # 10 minutes
460
+ when "completion/complete"
461
+ # Completions are more stable
462
+ 3600 # 1 hour
463
+ else
464
+ 300 # 5 minutes default
465
+ end
466
+ end
467
+ end
468
+ ```
469
+
470
+ ## Content-Based Caching
471
+
472
+ ### ETags and Conditional Requests
473
+
474
+ ```ruby
475
+ class ETagResource < Tsikol::Resource
476
+ def read
477
+ content = generate_content
478
+ etag = generate_etag(content)
479
+
480
+ # Check if client has matching ETag
481
+ if client_etag == etag
482
+ return not_modified_response
483
+ end
484
+
485
+ # Return content with ETag
486
+ {
487
+ content: content,
488
+ etag: etag,
489
+ last_modified: last_modified_time
490
+ }.to_json
491
+ end
492
+
493
+ private
494
+
495
+ def generate_etag(content)
496
+ Digest::MD5.hexdigest(content)
497
+ end
498
+
499
+ def client_etag
500
+ # In real implementation, extract from request headers
501
+ Thread.current[:mcp_request]&.dig("params", "if_none_match")
502
+ end
503
+
504
+ def not_modified_response
505
+ {
506
+ status: 304,
507
+ message: "Not Modified",
508
+ etag: etag
509
+ }.to_json
510
+ end
511
+
512
+ def last_modified_time
513
+ # Track when content actually changes
514
+ @last_modified ||= Time.now
515
+ end
516
+ end
517
+ ```
518
+
519
+ ### Cache Warming
520
+
521
+ ```ruby
522
+ class CacheWarmer
523
+ def initialize(server, options = {})
524
+ @server = server
525
+ @cache = options[:cache]
526
+ @warmup_paths = options[:warmup_paths] || []
527
+ @interval = options[:interval] || 300 # 5 minutes
528
+ end
529
+
530
+ def start
531
+ Thread.new do
532
+ loop do
533
+ warm_cache
534
+ sleep @interval
535
+ end
536
+ end
537
+ end
538
+
539
+ def warm_cache
540
+ @warmup_paths.each do |path|
541
+ begin
542
+ case path[:type]
543
+ when :resource
544
+ warm_resource(path[:uri])
545
+ when :tool
546
+ warm_tool(path[:name], path[:params])
547
+ end
548
+ rescue => e
549
+ log :error, "Cache warming failed", path: path, error: e.message
550
+ end
551
+ end
552
+ end
553
+
554
+ private
555
+
556
+ def warm_resource(uri)
557
+ request = {
558
+ "jsonrpc" => "2.0",
559
+ "id" => "cache_warm_#{Time.now.to_i}",
560
+ "method" => "resources/read",
561
+ "params" => { "uri" => uri }
562
+ }
563
+
564
+ response = @server.handle_request(request)
565
+ log :debug, "Warmed resource cache", uri: uri
566
+ end
567
+
568
+ def warm_tool(name, params)
569
+ request = {
570
+ "jsonrpc" => "2.0",
571
+ "id" => "cache_warm_#{Time.now.to_i}",
572
+ "method" => "tools/call",
573
+ "params" => { "name" => name, "arguments" => params }
574
+ }
575
+
576
+ response = @server.handle_request(request)
577
+ log :debug, "Warmed tool cache", tool: name
578
+ end
579
+ end
580
+
581
+ # Usage
582
+ Tsikol.start(name: "cached-server") do
583
+ cache = RedisCache.new
584
+
585
+ # Add caching middleware
586
+ use RedisCachingMiddleware,
587
+ cache: cache,
588
+ cacheable_methods: ["resources/read", "tools/call"]
589
+
590
+ # Start cache warmer
591
+ after_start do
592
+ warmer = CacheWarmer.new(self,
593
+ cache: cache,
594
+ warmup_paths: [
595
+ { type: :resource, uri: "config" },
596
+ { type: :resource, uri: "system/status" },
597
+ { type: :tool, name: "popular_query", params: { query: "common" } }
598
+ ]
599
+ )
600
+ warmer.start
601
+ end
602
+
603
+ # Your components
604
+ resource ConfigResource
605
+ tool QueryTool
606
+ end
607
+ ```
608
+
609
+ ## Cache Strategies
610
+
611
+ ### Write-Through Cache
612
+
613
+ ```ruby
614
+ class WriteThroughCache
615
+ def initialize(cache, data_source)
616
+ @cache = cache
617
+ @data_source = data_source
618
+ end
619
+
620
+ def read(key)
621
+ @cache.fetch(key) do
622
+ @data_source.read(key)
623
+ end
624
+ end
625
+
626
+ def write(key, value)
627
+ # Write to data source first
628
+ @data_source.write(key, value)
629
+
630
+ # Then update cache
631
+ @cache.set(key, value)
632
+
633
+ value
634
+ end
635
+
636
+ def delete(key)
637
+ # Delete from data source
638
+ @data_source.delete(key)
639
+
640
+ # Then remove from cache
641
+ @cache.delete(key)
642
+ end
643
+ end
644
+ ```
645
+
646
+ ### Write-Behind Cache
647
+
648
+ ```ruby
649
+ class WriteBehindCache
650
+ def initialize(cache, data_source, options = {})
651
+ @cache = cache
652
+ @data_source = data_source
653
+ @write_delay = options[:write_delay] || 5
654
+ @batch_size = options[:batch_size] || 100
655
+ @write_queue = Queue.new
656
+
657
+ start_write_thread
658
+ end
659
+
660
+ def read(key)
661
+ # Check write queue first
662
+ if pending = check_write_queue(key)
663
+ return pending
664
+ end
665
+
666
+ @cache.fetch(key) do
667
+ @data_source.read(key)
668
+ end
669
+ end
670
+
671
+ def write(key, value)
672
+ # Update cache immediately
673
+ @cache.set(key, value)
674
+
675
+ # Queue for later write
676
+ @write_queue << { key: key, value: value, time: Time.now }
677
+
678
+ value
679
+ end
680
+
681
+ private
682
+
683
+ def start_write_thread
684
+ Thread.new do
685
+ loop do
686
+ batch = []
687
+
688
+ # Collect writes for batch
689
+ deadline = Time.now + @write_delay
690
+ while Time.now < deadline && batch.size < @batch_size
691
+ begin
692
+ item = @write_queue.pop(timeout: deadline - Time.now)
693
+ batch << item if item
694
+ rescue ThreadError
695
+ # Timeout - process what we have
696
+ break
697
+ end
698
+ end
699
+
700
+ # Write batch to data source
701
+ write_batch(batch) unless batch.empty?
702
+ end
703
+ end
704
+ end
705
+
706
+ def write_batch(batch)
707
+ @data_source.write_batch(batch)
708
+ rescue => e
709
+ log :error, "Write-behind failed", error: e.message
710
+ # Re-queue failed writes
711
+ batch.each { |item| @write_queue << item }
712
+ end
713
+ end
714
+ ```
715
+
716
+ ## Cache Monitoring
717
+
718
+ ```ruby
719
+ class CacheMetrics
720
+ def initialize(cache)
721
+ @cache = cache
722
+ @hits = Concurrent::AtomicFixnum.new(0)
723
+ @misses = Concurrent::AtomicFixnum.new(0)
724
+ @errors = Concurrent::AtomicFixnum.new(0)
725
+ @total_time = Concurrent::AtomicFixnum.new(0)
726
+ end
727
+
728
+ def track_fetch(key)
729
+ start = Time.now
730
+ hit = false
731
+
732
+ begin
733
+ value = yield
734
+ hit = !value.nil?
735
+ value
736
+ rescue => e
737
+ @errors.increment
738
+ raise
739
+ ensure
740
+ duration = ((Time.now - start) * 1000).to_i
741
+ @total_time.add(duration)
742
+
743
+ if hit
744
+ @hits.increment
745
+ else
746
+ @misses.increment
747
+ end
748
+ end
749
+ end
750
+
751
+ def stats
752
+ total = @hits.value + @misses.value
753
+ hit_rate = total > 0 ? (@hits.value.to_f / total * 100).round(2) : 0
754
+ avg_time = total > 0 ? (@total_time.value.to_f / total).round(2) : 0
755
+
756
+ {
757
+ hits: @hits.value,
758
+ misses: @misses.value,
759
+ hit_rate: hit_rate,
760
+ errors: @errors.value,
761
+ avg_response_time_ms: avg_time,
762
+ cache_size: @cache.size
763
+ }
764
+ end
765
+ end
766
+
767
+ # Monitored cache middleware
768
+ class MonitoredCachingMiddleware < CachingMiddleware
769
+ def initialize(app, options = {})
770
+ super
771
+ @metrics = CacheMetrics.new(@cache)
772
+ end
773
+
774
+ def call(request)
775
+ if request["method"] == "cache/stats"
776
+ return cache_stats_response(request["id"])
777
+ end
778
+
779
+ return @app.call(request) unless cacheable?(request)
780
+
781
+ cache_key = @cache_key_generator.call(request)
782
+
783
+ @metrics.track_fetch(cache_key) do
784
+ @cache.fetch(cache_key) do
785
+ @app.call(request)
786
+ end
787
+ end
788
+ end
789
+
790
+ private
791
+
792
+ def cache_stats_response(id)
793
+ {
794
+ jsonrpc: "2.0",
795
+ id: id,
796
+ result: @metrics.stats
797
+ }
798
+ end
799
+ end
800
+ ```
801
+
802
+ ## Testing Cache Behavior
803
+
804
+ ```ruby
805
+ require 'minitest/autorun'
806
+
807
+ class CacheTest < Minitest::Test
808
+ def setup
809
+ @cache = ThreadSafeCache.new(ttl: 1) # 1 second TTL
810
+ end
811
+
812
+ def test_cache_hit
813
+ call_count = 0
814
+
815
+ # First call - cache miss
816
+ result1 = @cache.fetch("key") do
817
+ call_count += 1
818
+ "value"
819
+ end
820
+
821
+ # Second call - cache hit
822
+ result2 = @cache.fetch("key") do
823
+ call_count += 1
824
+ "value"
825
+ end
826
+
827
+ assert_equal "value", result1
828
+ assert_equal "value", result2
829
+ assert_equal 1, call_count # Block only called once
830
+ end
831
+
832
+ def test_cache_expiration
833
+ @cache.fetch("key") { "value1" }
834
+
835
+ # Wait for expiration
836
+ sleep(1.1)
837
+
838
+ # Should compute new value
839
+ result = @cache.fetch("key") { "value2" }
840
+ assert_equal "value2", result
841
+ end
842
+
843
+ def test_cache_size_limit
844
+ cache = ThreadSafeCache.new(max_size: 3)
845
+
846
+ # Fill cache
847
+ cache.fetch("key1") { "value1" }
848
+ cache.fetch("key2") { "value2" }
849
+ cache.fetch("key3") { "value3" }
850
+
851
+ assert_equal 3, cache.size
852
+
853
+ # Adding 4th item should trigger cleanup
854
+ cache.fetch("key4") { "value4" }
855
+
856
+ assert cache.size <= 3
857
+ end
858
+ end
859
+ ```
860
+
861
+ ## Best Practices
862
+
863
+ 1. **Cache appropriate data** - Not everything benefits from caching
864
+ 2. **Set reasonable TTLs** - Balance freshness with performance
865
+ 3. **Monitor cache performance** - Track hit rates and response times
866
+ 4. **Handle cache failures gracefully** - Don't let cache errors break functionality
867
+ 5. **Consider cache warming** - Pre-populate frequently accessed data
868
+ 6. **Use cache invalidation wisely** - Know when to clear stale data
869
+ 7. **Size caches appropriately** - Prevent memory issues
870
+ 8. **Use distributed caching** for multi-server deployments
871
+
872
+ ## Next Steps
873
+
874
+ - Add [Monitoring](monitoring.md) for cache metrics
875
+ - Implement [Rate Limiting](rate-limiting.md) with caching
876
+ - Review [Performance](performance.md) optimization
877
+ - Set up [Logging](logging.md) for cache debugging