familia 1.2.0 → 2.0.0.pre.pre

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (115) hide show
  1. checksums.yaml +4 -4
  2. data/.github/workflows/ci.yml +68 -0
  3. data/.github/workflows/docs.yml +64 -0
  4. data/.gitignore +4 -0
  5. data/.pre-commit-config.yaml +3 -1
  6. data/.rubocop.yml +16 -9
  7. data/.rubocop_todo.yml +177 -31
  8. data/.yardopts +9 -0
  9. data/CLAUDE.md +141 -0
  10. data/Gemfile +15 -2
  11. data/Gemfile.lock +76 -34
  12. data/README.md +39 -23
  13. data/bin/irb +3 -0
  14. data/docs/connection_pooling.md +317 -0
  15. data/familia.gemspec +9 -5
  16. data/lib/familia/base.rb +19 -9
  17. data/lib/familia/connection.rb +232 -65
  18. data/lib/familia/core_ext.rb +1 -1
  19. data/lib/familia/datatype/commands.rb +59 -0
  20. data/lib/familia/{redistype → datatype}/serialization.rb +9 -13
  21. data/lib/familia/{redistype → datatype}/types/hashkey.rb +25 -25
  22. data/lib/familia/{redistype → datatype}/types/list.rb +13 -13
  23. data/lib/familia/{redistype → datatype}/types/sorted_set.rb +20 -20
  24. data/lib/familia/{redistype → datatype}/types/string.rb +22 -21
  25. data/lib/familia/{redistype → datatype}/types/unsorted_set.rb +11 -11
  26. data/lib/familia/datatype.rb +243 -0
  27. data/lib/familia/errors.rb +5 -2
  28. data/lib/familia/features/expiration.rb +33 -34
  29. data/lib/familia/features/quantization.rb +9 -3
  30. data/lib/familia/features/safe_dump.rb +2 -3
  31. data/lib/familia/features.rb +2 -2
  32. data/lib/familia/horreum/class_methods.rb +97 -110
  33. data/lib/familia/horreum/commands.rb +46 -51
  34. data/lib/familia/horreum/connection.rb +82 -0
  35. data/lib/familia/horreum/{relations_management.rb → related_fields_management.rb} +37 -35
  36. data/lib/familia/horreum/serialization.rb +61 -198
  37. data/lib/familia/horreum/settings.rb +6 -17
  38. data/lib/familia/horreum/utils.rb +11 -10
  39. data/lib/familia/horreum.rb +69 -60
  40. data/lib/familia/logging.rb +12 -12
  41. data/lib/familia/multi_result.rb +72 -0
  42. data/lib/familia/refinements.rb +7 -44
  43. data/lib/familia/settings.rb +11 -11
  44. data/lib/familia/utils.rb +123 -90
  45. data/lib/familia/version.rb +4 -21
  46. data/lib/familia.rb +17 -12
  47. data/lib/middleware/database_middleware.rb +150 -0
  48. data/try/configuration/scenarios_try.rb +65 -0
  49. data/try/core/connection_try.rb +58 -0
  50. data/try/core/errors_try.rb +93 -0
  51. data/try/core/extensions_try.rb +26 -0
  52. data/try/{10_familia_try.rb → core/familia_extended_try.rb} +11 -10
  53. data/try/{00_familia_try.rb → core/familia_try.rb} +5 -3
  54. data/try/core/middleware_try.rb +68 -0
  55. data/try/core/refinements_try.rb +39 -0
  56. data/try/core/settings_try.rb +76 -0
  57. data/try/core/tools_try.rb +54 -0
  58. data/try/core/utils_try.rb +189 -0
  59. data/try/{26_redis_bool_try.rb → datatypes/boolean_try.rb} +4 -2
  60. data/try/datatypes/datatype_base_try.rb +69 -0
  61. data/try/{25_redis_type_hash_try.rb → datatypes/hash_try.rb} +5 -3
  62. data/try/{23_redis_type_list_try.rb → datatypes/list_try.rb} +5 -3
  63. data/try/{22_redis_type_set_try.rb → datatypes/set_try.rb} +5 -3
  64. data/try/{21_redis_type_zset_try.rb → datatypes/sorted_set_try.rb} +6 -4
  65. data/try/{24_redis_type_string_try.rb → datatypes/string_try.rb} +8 -8
  66. data/try/edge_cases/empty_identifiers_try.rb +48 -0
  67. data/try/{92_symbolize_try.rb → edge_cases/hash_symbolization_try.rb} +12 -7
  68. data/try/edge_cases/json_serialization_try.rb +85 -0
  69. data/try/edge_cases/race_conditions_try.rb +60 -0
  70. data/try/edge_cases/reserved_keywords_try.rb +59 -0
  71. data/try/{93_string_coercion_try.rb → edge_cases/string_coercion_try.rb} +60 -59
  72. data/try/edge_cases/ttl_side_effects_try.rb +51 -0
  73. data/try/features/expiration_try.rb +86 -0
  74. data/try/features/quantization_try.rb +90 -0
  75. data/try/{35_feature_safedump_try.rb → features/safe_dump_advanced_try.rb} +7 -6
  76. data/try/features/safe_dump_try.rb +137 -0
  77. data/try/{test_helpers.rb → helpers/test_helpers.rb} +25 -60
  78. data/try/{27_redis_horreum_try.rb → horreum/base_try.rb} +39 -14
  79. data/try/horreum/class_methods_try.rb +41 -0
  80. data/try/horreum/commands_try.rb +49 -0
  81. data/try/{29_redis_horreum_initialization_try.rb → horreum/initialization_try.rb} +9 -7
  82. data/try/horreum/relations_try.rb +146 -0
  83. data/try/{28_redis_horreum_serialization_try.rb → horreum/serialization_try.rb} +13 -11
  84. data/try/horreum/settings_try.rb +43 -0
  85. data/try/integration/cross_component_try.rb +46 -0
  86. data/try/{41_customer_safedump_try.rb → models/customer_safe_dump_try.rb} +9 -7
  87. data/try/{40_customer_try.rb → models/customer_try.rb} +20 -17
  88. data/try/models/datatype_base_try.rb +101 -0
  89. data/try/{30_familia_object_try.rb → models/familia_object_try.rb} +18 -16
  90. data/try/performance/benchmarks_try.rb +55 -0
  91. data/try/pooling/README.md +20 -0
  92. data/try/pooling/configurable_stress_test_try.rb +435 -0
  93. data/try/pooling/connection_pool_test_try.rb +273 -0
  94. data/try/pooling/lib/atomic_saves_v3_connection_pool_helpers.rb +192 -0
  95. data/try/pooling/lib/connection_pool_metrics.rb +372 -0
  96. data/try/pooling/lib/connection_pool_stress_test.rb +959 -0
  97. data/try/pooling/lib/connection_pool_threading_models.rb +421 -0
  98. data/try/pooling/lib/visualize_stress_results.rb +434 -0
  99. data/try/pooling/pool_siege_try.rb +509 -0
  100. data/try/pooling/run_stress_tests_try.rb +482 -0
  101. data/try/prototypes/atomic_saves_v1_context_proxy.rb +121 -0
  102. data/try/prototypes/atomic_saves_v2_connection_switching.rb +161 -0
  103. data/try/prototypes/atomic_saves_v3_connection_pool.rb +189 -0
  104. data/try/prototypes/atomic_saves_v4.rb +105 -0
  105. data/try/prototypes/lib/atomic_saves_v2_connection_switching_helpers.rb +124 -0
  106. data/try/prototypes/lib/atomic_saves_v3_connection_pool_helpers.rb +192 -0
  107. metadata +140 -43
  108. data/.github/workflows/ruby.yml +0 -71
  109. data/VERSION.yml +0 -4
  110. data/lib/familia/redistype/commands.rb +0 -59
  111. data/lib/familia/redistype.rb +0 -228
  112. data/lib/familia/tools.rb +0 -68
  113. data/lib/redis_middleware.rb +0 -109
  114. data/try/20_redis_type_try.rb +0 -70
  115. data/try/91_json_bug_try.rb +0 -86
@@ -0,0 +1,372 @@
1
+ # try/pooling/lib/connection_pool_metrics.rb
2
+ #
3
+ # Enhanced Metrics Collection and Reporting for Connection Pool Stress Tests
4
+ #
5
+ # This module provides detailed metrics collection, analysis, and export
6
+ # capabilities for stress test results. Outputs include CSV, summary reports,
7
+ # and simple ASCII visualizations.
8
+
9
+ require 'csv'
10
+ require 'json'
11
+
12
+ module ConnectionPoolMetrics
13
+ # Enhanced metrics collector with detailed tracking
14
+ class DetailedMetricsCollector < MetricsCollector
15
+ def initialize
16
+ super
17
+ @metrics[:connection_acquisitions] = []
18
+ @metrics[:thread_states] = []
19
+ @metrics[:pool_exhaustion_events] = []
20
+ @mutex = Mutex.new
21
+ end
22
+
23
+ def record_connection_acquisition(thread_id, wait_time, acquired)
24
+ @mutex.synchronize do
25
+ @metrics[:connection_acquisitions] << {
26
+ thread_id: thread_id,
27
+ wait_time: wait_time,
28
+ acquired: acquired,
29
+ timestamp: Time.now.to_f
30
+ }
31
+ end
32
+ end
33
+
34
+ def record_thread_state(thread_id, state, context = {})
35
+ @mutex.synchronize do
36
+ @metrics[:thread_states] << {
37
+ thread_id: thread_id,
38
+ state: state, # :waiting, :running, :completed, :failed
39
+ context: context,
40
+ timestamp: Time.now.to_f
41
+ }
42
+ end
43
+ end
44
+
45
+ def record_pool_exhaustion(wait_time, thread_count_waiting)
46
+ @mutex.synchronize do
47
+ @metrics[:pool_exhaustion_events] << {
48
+ wait_time: wait_time,
49
+ threads_waiting: thread_count_waiting,
50
+ timestamp: Time.now.to_f
51
+ }
52
+ end
53
+ end
54
+
55
+ def detailed_summary
56
+ summary = super
57
+
58
+ # Add connection acquisition stats
59
+ acquisitions = @metrics[:connection_acquisitions]
60
+ if acquisitions.any?
61
+ wait_times = acquisitions.map { |a| a[:wait_time] }
62
+ summary[:connection_stats] = {
63
+ total_acquisitions: acquisitions.size,
64
+ successful_acquisitions: acquisitions.count { |a| a[:acquired] },
65
+ avg_wait_time: wait_times.sum.to_f / wait_times.size,
66
+ max_wait_time: wait_times.max,
67
+ min_wait_time: wait_times.min,
68
+ p95_wait_time: percentile(wait_times, 0.95),
69
+ p99_wait_time: percentile(wait_times, 0.99)
70
+ }
71
+ end
72
+
73
+ # Add pool exhaustion stats
74
+ if @metrics[:pool_exhaustion_events].any?
75
+ summary[:pool_exhaustion] = {
76
+ total_events: @metrics[:pool_exhaustion_events].size,
77
+ max_threads_waiting: @metrics[:pool_exhaustion_events].map { |e| e[:threads_waiting] }.max
78
+ }
79
+ end
80
+
81
+ # Add operation breakdown by type
82
+ operations_by_type = @metrics[:operations].group_by { |op| op[:type] }
83
+ summary[:operations_by_type] = {}
84
+
85
+ operations_by_type.each do |type, ops|
86
+ successful = ops.count { |op| op[:success] }
87
+ durations = ops.map { |op| op[:duration] }
88
+
89
+ summary[:operations_by_type][type] = {
90
+ count: ops.size,
91
+ success_rate: (successful.to_f / ops.size * 100).round(2),
92
+ avg_duration: durations.sum.to_f / durations.size,
93
+ p95_duration: percentile(durations, 0.95),
94
+ p99_duration: percentile(durations, 0.99)
95
+ }
96
+ end
97
+
98
+ summary
99
+ end
100
+
101
+ def export_detailed_csv(filename_prefix = "stress_test")
102
+ timestamp = Time.now.strftime("%Y%m%d_%H%M%S")
103
+
104
+ # Export operations
105
+ CSV.open("#{filename_prefix}_operations_#{timestamp}.csv", "w") do |csv|
106
+ csv << ['timestamp', 'type', 'duration', 'success', 'wait_time']
107
+ @metrics[:operations].each do |op|
108
+ csv << [op[:timestamp], op[:type], op[:duration], op[:success], op[:wait_time]]
109
+ end
110
+ end
111
+
112
+ # Export errors
113
+ if @metrics[:errors].any?
114
+ CSV.open("#{filename_prefix}_errors_#{timestamp}.csv", "w") do |csv|
115
+ csv << ['timestamp', 'error_type', 'message', 'context']
116
+ @metrics[:errors].each do |err|
117
+ csv << [err[:timestamp], err[:error], err[:message], err[:context].to_json]
118
+ end
119
+ end
120
+ end
121
+
122
+ # Export pool stats
123
+ if @metrics[:pool_stats].any?
124
+ CSV.open("#{filename_prefix}_pool_stats_#{timestamp}.csv", "w") do |csv|
125
+ csv << ['timestamp', 'available', 'size', 'utilization']
126
+ @metrics[:pool_stats].each do |stat|
127
+ csv << [stat[:timestamp], stat[:available], stat[:size], stat[:utilization]]
128
+ end
129
+ end
130
+ end
131
+
132
+ # Export summary
133
+ CSV.open("#{filename_prefix}_summary_#{timestamp}.csv", "w") do |csv|
134
+ csv << ['metric', 'value']
135
+ flatten_hash(detailed_summary).each do |key, value|
136
+ csv << [key, value]
137
+ end
138
+ end
139
+
140
+ puts "Exported CSV files with prefix: #{filename_prefix}_*_#{timestamp}.csv"
141
+ end
142
+
143
+ def generate_ascii_report
144
+ summary = detailed_summary
145
+
146
+ report = []
147
+ report << "\n" + "=" * 80
148
+ report << "CONNECTION POOL STRESS TEST REPORT"
149
+ report << "=" * 80
150
+
151
+ # Overall Summary
152
+ report << "\nOVERALL SUMMARY:"
153
+ report << "-" * 40
154
+ report << sprintf("Total Operations: %d", summary[:total_operations])
155
+ report << sprintf("Success Rate: %.2f%%", summary[:success_rate])
156
+ report << sprintf("Average Duration: %.4f seconds", summary[:avg_duration])
157
+
158
+ # Connection Statistics
159
+ if summary[:connection_stats]
160
+ report << "\nCONNECTION ACQUISITION STATISTICS:"
161
+ report << "-" * 40
162
+ stats = summary[:connection_stats]
163
+ report << sprintf("Total Acquisitions: %d", stats[:total_acquisitions])
164
+ report << sprintf("Successful: %d (%.2f%%)",
165
+ stats[:successful_acquisitions],
166
+ stats[:successful_acquisitions].to_f / stats[:total_acquisitions] * 100)
167
+ report << sprintf("Avg Wait Time: %.4f seconds", stats[:avg_wait_time])
168
+ report << sprintf("Max Wait Time: %.4f seconds", stats[:max_wait_time])
169
+ report << sprintf("P95 Wait Time: %.4f seconds", stats[:p95_wait_time])
170
+ report << sprintf("P99 Wait Time: %.4f seconds", stats[:p99_wait_time])
171
+ end
172
+
173
+ # Operations by Type
174
+ if summary[:operations_by_type] && summary[:operations_by_type].any?
175
+ report << "\nOPERATIONS BY TYPE:"
176
+ report << "-" * 40
177
+ report << sprintf("%-15s %10s %10s %10s %10s", "Type", "Count", "Success%", "Avg(ms)", "P95(ms)")
178
+
179
+ summary[:operations_by_type].each do |type, stats|
180
+ report << sprintf("%-15s %10d %10.2f %10.2f %10.2f",
181
+ type,
182
+ stats[:count],
183
+ stats[:success_rate],
184
+ stats[:avg_duration] * 1000,
185
+ stats[:p95_duration] * 1000)
186
+ end
187
+ end
188
+
189
+ # Pool Utilization Graph
190
+ if @metrics[:pool_stats].any?
191
+ report << "\nPOOL UTILIZATION OVER TIME:"
192
+ report << "-" * 40
193
+ report << generate_utilization_graph
194
+ end
195
+
196
+ # Error Summary
197
+ if summary[:errors_by_type] && summary[:errors_by_type].any?
198
+ report << "\nERROR SUMMARY:"
199
+ report << "-" * 40
200
+ summary[:errors_by_type].each do |error_type, count|
201
+ report << sprintf("%-30s: %d", error_type, count)
202
+ end
203
+ end
204
+
205
+ report << "\n" + "=" * 80
206
+
207
+ report.join("\n")
208
+ end
209
+
210
+ private
211
+
212
+ def percentile(values, percentile)
213
+ return 0 if values.empty?
214
+ sorted = values.sort
215
+ index = (percentile * (sorted.length - 1)).round
216
+ sorted[index]
217
+ end
218
+
219
+ def flatten_hash(hash, prefix = '')
220
+ hash.each_with_object({}) do |(key, value), result|
221
+ new_key = prefix.empty? ? key.to_s : "#{prefix}.#{key}"
222
+ if value.is_a?(Hash)
223
+ result.merge!(flatten_hash(value, new_key))
224
+ else
225
+ result[new_key] = value
226
+ end
227
+ end
228
+ end
229
+
230
+ def generate_utilization_graph
231
+ return "No pool stats available" if @metrics[:pool_stats].empty?
232
+
233
+ # Sample data points for ASCII graph
234
+ samples = 50
235
+ stats = @metrics[:pool_stats]
236
+ sample_interval = [stats.size / samples, 1].max
237
+
238
+ sampled_stats = []
239
+ (0...stats.size).step(sample_interval) do |i|
240
+ sampled_stats << stats[i]
241
+ end
242
+
243
+ # Create ASCII graph
244
+ graph_height = 10
245
+ graph = Array.new(graph_height + 1) { ' ' * samples }
246
+
247
+ sampled_stats.each_with_index do |stat, i|
248
+ height = (stat[:utilization] / 100.0 * graph_height).round
249
+ (0..height).each do |h|
250
+ graph[graph_height - h][i] = '*'
251
+ end
252
+ end
253
+
254
+ # Add scale
255
+ result = []
256
+ result << "100% |" + graph[0]
257
+ (1...graph_height).each do |i|
258
+ percent = 100 - (i * 10)
259
+ result << sprintf("%3d%% |", percent) + graph[i]
260
+ end
261
+ result << " 0% |" + graph[graph_height]
262
+ result << " +" + "-" * samples
263
+ result << " " + "Time →"
264
+
265
+ result.join("\n")
266
+ end
267
+ end
268
+
269
+ # Test result aggregator for multiple runs
270
+ class ResultAggregator
271
+ def initialize
272
+ @results = []
273
+ end
274
+
275
+ def add_result(config, metrics_summary, model_info = {})
276
+ @results << {
277
+ timestamp: Time.now,
278
+ config: config,
279
+ summary: metrics_summary,
280
+ model: model_info
281
+ }
282
+ end
283
+
284
+ def export_comparison_csv(filename = "comparison_results.csv")
285
+ CSV.open(filename, "w") do |csv|
286
+ # Headers
287
+ headers = ['timestamp', 'model', 'threads', 'ops_per_thread', 'pool_size',
288
+ 'pool_timeout', 'scenario', 'success_rate', 'avg_duration',
289
+ 'avg_wait_time', 'max_pool_util', 'errors']
290
+ csv << headers
291
+
292
+ # Data rows
293
+ @results.each do |result|
294
+ csv << [
295
+ result[:timestamp].strftime("%Y-%m-%d %H:%M:%S"),
296
+ result[:model][:name] || 'default',
297
+ result[:config][:thread_count],
298
+ result[:config][:operations_per_thread],
299
+ result[:config][:pool_size],
300
+ result[:config][:pool_timeout],
301
+ result[:config][:scenario],
302
+ result[:summary][:success_rate],
303
+ result[:summary][:avg_duration],
304
+ result[:summary][:avg_wait_time],
305
+ result[:summary][:max_pool_utilization],
306
+ result[:summary][:failed_operations]
307
+ ]
308
+ end
309
+ end
310
+
311
+ puts "Comparison results exported to: #{filename}"
312
+ end
313
+
314
+ def generate_comparison_report
315
+ report = []
316
+ report << "\nCOMPARISON REPORT"
317
+ report << "=" * 80
318
+
319
+ # Group by scenario
320
+ by_scenario = @results.group_by { |r| r[:config][:scenario] }
321
+
322
+ by_scenario.each do |scenario, results|
323
+ report << "\nScenario: #{scenario}"
324
+ report << "-" * 40
325
+
326
+ # Find best and worst performers
327
+ sorted = results.sort_by { |r| -r[:summary][:success_rate] }
328
+ best = sorted.first
329
+ worst = sorted.last
330
+
331
+ report << sprintf("Best performer: %s (%.2f%% success rate)",
332
+ best[:model][:name] || 'default',
333
+ best[:summary][:success_rate])
334
+ report << sprintf("Worst performer: %s (%.2f%% success rate)",
335
+ worst[:model][:name] || 'default',
336
+ worst[:summary][:success_rate])
337
+ end
338
+
339
+ report.join("\n")
340
+ end
341
+ end
342
+ end
343
+
344
+ # Example usage
345
+ if __FILE__ == $0
346
+ # Create detailed metrics collector
347
+ metrics = ConnectionPoolMetrics::DetailedMetricsCollector.new
348
+
349
+ # Simulate some operations
350
+ 10.times do |i|
351
+ metrics.record_operation(:read, rand(0.001..0.1), rand < 0.95, rand(0.0..0.01))
352
+ metrics.record_connection_acquisition(i, rand(0.0..0.5), rand < 0.9)
353
+ end
354
+
355
+ 5.times do |i|
356
+ metrics.record_pool_stats(rand(0..10), 10)
357
+ end
358
+
359
+ # Generate reports
360
+ puts metrics.generate_ascii_report
361
+ metrics.export_detailed_csv("test_run")
362
+
363
+ # Test aggregator
364
+ aggregator = ConnectionPoolMetrics::ResultAggregator.new
365
+ aggregator.add_result(
366
+ { thread_count: 10, pool_size: 5, scenario: :test },
367
+ metrics.detailed_summary,
368
+ { name: 'test_model' }
369
+ )
370
+
371
+ aggregator.export_comparison_csv
372
+ end