behavior_analytics 0.1.0 → 2.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. checksums.yaml +4 -4
  2. data/README.md +146 -5
  3. data/behavior_analytics.gemspec +3 -1
  4. data/db/migrate/002_enhance_behavior_events_v2.rb +46 -0
  5. data/lib/behavior_analytics/analytics/cohorts.rb +242 -0
  6. data/lib/behavior_analytics/analytics/engine.rb +15 -0
  7. data/lib/behavior_analytics/analytics/funnels.rb +176 -0
  8. data/lib/behavior_analytics/analytics/retention.rb +186 -0
  9. data/lib/behavior_analytics/context.rb +38 -2
  10. data/lib/behavior_analytics/debug/inspector.rb +82 -0
  11. data/lib/behavior_analytics/event.rb +7 -1
  12. data/lib/behavior_analytics/export/csv_exporter.rb +102 -0
  13. data/lib/behavior_analytics/export/json_exporter.rb +55 -0
  14. data/lib/behavior_analytics/hooks/callback.rb +50 -0
  15. data/lib/behavior_analytics/hooks/manager.rb +106 -0
  16. data/lib/behavior_analytics/hooks/webhook.rb +114 -0
  17. data/lib/behavior_analytics/integrations/rails/middleware.rb +99 -0
  18. data/lib/behavior_analytics/integrations/rails.rb +123 -2
  19. data/lib/behavior_analytics/jobs/active_event_job.rb +37 -0
  20. data/lib/behavior_analytics/jobs/delayed_event_job.rb +29 -0
  21. data/lib/behavior_analytics/jobs/sidekiq_event_job.rb +37 -0
  22. data/lib/behavior_analytics/observability/metrics.rb +112 -0
  23. data/lib/behavior_analytics/observability/tracer.rb +85 -0
  24. data/lib/behavior_analytics/processors/async_processor.rb +24 -0
  25. data/lib/behavior_analytics/processors/background_job_processor.rb +72 -0
  26. data/lib/behavior_analytics/query.rb +89 -4
  27. data/lib/behavior_analytics/replay/engine.rb +108 -0
  28. data/lib/behavior_analytics/replay/processor.rb +107 -0
  29. data/lib/behavior_analytics/reporting/generator.rb +125 -0
  30. data/lib/behavior_analytics/sampling/strategy.rb +54 -0
  31. data/lib/behavior_analytics/schema/definition.rb +71 -0
  32. data/lib/behavior_analytics/schema/validator.rb +113 -0
  33. data/lib/behavior_analytics/storage/active_record_adapter.rb +183 -10
  34. data/lib/behavior_analytics/storage/elasticsearch_adapter.rb +185 -0
  35. data/lib/behavior_analytics/storage/in_memory_adapter.rb +234 -5
  36. data/lib/behavior_analytics/storage/kafka_adapter.rb +127 -0
  37. data/lib/behavior_analytics/storage/redis_adapter.rb +211 -0
  38. data/lib/behavior_analytics/streaming/event_stream.rb +77 -0
  39. data/lib/behavior_analytics/throttling/limiter.rb +97 -0
  40. data/lib/behavior_analytics/tracker.rb +130 -4
  41. data/lib/behavior_analytics/version.rb +1 -1
  42. data/lib/behavior_analytics.rb +139 -2
  43. metadata +33 -3
@@ -21,8 +21,70 @@ module BehaviorAnalytics
21
21
  events = filter_by_date_range(events, options[:since], options[:until]) if options[:since] || options[:until]
22
22
  events = filter_by_event_name(events, options[:event_name]) if options[:event_name]
23
23
  events = filter_by_event_type(events, options[:event_type]) if options[:event_type]
24
+
25
+ # Apply metadata filters
26
+ if options[:metadata_filters]
27
+ events = filter_by_metadata(events, options[:metadata_filters])
28
+ end
29
+
30
+ # Apply path filtering
31
+ if options[:path]
32
+ events = events.select { |e| get_metadata_value(e, "path") == options[:path] }
33
+ end
34
+
35
+ if options[:path_pattern]
36
+ pattern = Regexp.new(options[:path_pattern].gsub('%', '.*'))
37
+ events = events.select { |e| path = get_metadata_value(e, "path"); path && pattern.match?(path) }
38
+ end
39
+
40
+ # Apply method filtering
41
+ if options[:method]
42
+ events = events.select { |e| get_metadata_value(e, "method")&.upcase == options[:method].upcase }
43
+ end
44
+
45
+ # Apply status code filtering
46
+ if options[:status_code]
47
+ events = events.select { |e| get_metadata_value(e, "status_code")&.to_s == options[:status_code].to_s }
48
+ end
49
+
50
+ # Apply where conditions
51
+ if options[:where_conditions]
52
+ events = apply_where_conditions(events, options[:where_conditions])
53
+ end
54
+
55
+ # Apply aggregations and group by
56
+ if options[:group_by] && !options[:group_by].empty?
57
+ return apply_group_by(events, options[:group_by], options[:aggregations])
58
+ elsif options[:aggregations] && !options[:aggregations].empty?
59
+ return apply_aggregations(events, options[:aggregations])
60
+ end
61
+
62
+ # Apply having conditions (after aggregation - handled in group_by/aggregations)
63
+ # Note: In-memory adapter applies having before returning grouped results
64
+
65
+ # Apply distinct
66
+ if options[:distinct]
67
+ seen = {}
68
+ events = events.select do |e|
69
+ value = get_field_value(e, options[:distinct])
70
+ key = value.to_s
71
+ if seen[key]
72
+ false
73
+ else
74
+ seen[key] = true
75
+ true
76
+ end
77
+ end
78
+ end
24
79
 
25
- events = events.sort_by { |e| e[:created_at] }.reverse
80
+ # Apply order by
81
+ if options[:order_by]
82
+ events = apply_order_by(events, options[:order_by])
83
+ else
84
+ events = events.sort_by { |e| e[:created_at] || Time.at(0) }.reverse
85
+ end
86
+
87
+ # Apply limit
26
88
  events = events.first(options[:limit]) if options[:limit]
27
89
 
28
90
  events
@@ -35,7 +97,35 @@ module BehaviorAnalytics
35
97
  end
36
98
 
37
99
  def event_count(context, options = {})
38
- events_for_context(context, options).count
100
+ # For count, we don't need aggregations/group_by, so use simplified version
101
+ context.validate!
102
+ events = filter_by_context(@events, context)
103
+
104
+ events = filter_by_date_range(events, options[:since], options[:until]) if options[:since] || options[:until]
105
+ events = filter_by_event_name(events, options[:event_name]) if options[:event_name]
106
+ events = filter_by_event_type(events, options[:event_type]) if options[:event_type]
107
+
108
+ if options[:metadata_filters]
109
+ events = filter_by_metadata(events, options[:metadata_filters])
110
+ end
111
+
112
+ if options[:path]
113
+ events = events.select { |e| get_metadata_value(e, "path") == options[:path] }
114
+ end
115
+
116
+ if options[:method]
117
+ events = events.select { |e| get_metadata_value(e, "method")&.upcase == options[:method].upcase }
118
+ end
119
+
120
+ if options[:status_code]
121
+ events = events.select { |e| get_metadata_value(e, "status_code")&.to_s == options[:status_code].to_s }
122
+ end
123
+
124
+ if options[:where_conditions]
125
+ events = apply_where_conditions(events, options[:where_conditions])
126
+ end
127
+
128
+ events.count
39
129
  end
40
130
 
41
131
  def unique_users(context, options = {})
@@ -53,9 +143,26 @@ module BehaviorAnalytics
53
143
 
54
144
  def filter_by_context(events, context)
55
145
  events.select do |event|
56
- matches_tenant = event[:tenant_id] == context.tenant_id
57
- matches_user = context.user_id.nil? || event[:user_id] == context.user_id || event[:user_id].nil?
58
- matches_user_type = context.user_type.nil? || event[:user_type] == context.user_type || event[:user_type].nil?
146
+ # Support different business cases:
147
+ # - Multi-tenant: must match tenant_id
148
+ # - Single-tenant: match user_id (tenant_id may be nil)
149
+ # - API-only: no strict matching required
150
+
151
+ matches_tenant = if context.has_tenant?
152
+ event[:tenant_id] == context.tenant_id
153
+ else
154
+ true # No tenant filter if context doesn't have tenant
155
+ end
156
+
157
+ matches_user = if context.has_user?
158
+ event[:user_id] == context.user_id
159
+ else
160
+ true # No user filter if context doesn't have user
161
+ end
162
+
163
+ matches_user_type = context.user_type.nil? ||
164
+ event[:user_type] == context.user_type ||
165
+ event[:user_type].nil?
59
166
 
60
167
  matches_tenant && matches_user && matches_user_type
61
168
  end
@@ -76,6 +183,128 @@ module BehaviorAnalytics
76
183
  event_type_sym = event_type.is_a?(Symbol) ? event_type : event_type.to_sym
77
184
  events.select { |e| e[:event_type] == event_type_sym || e[:event_type].to_sym == event_type_sym }
78
185
  end
186
+
187
+ def filter_by_metadata(events, metadata_filters)
188
+ events.select do |event|
189
+ metadata_filters.all? do |key, value|
190
+ get_metadata_value(event, key) == value || get_metadata_value(event, key).to_s == value.to_s
191
+ end
192
+ end
193
+ end
194
+
195
+ def get_metadata_value(event, key)
196
+ metadata = event[:metadata] || event["metadata"] || {}
197
+ metadata[key.to_sym] || metadata[key.to_s] || metadata[key]
198
+ end
199
+
200
+ def get_field_value(event, field)
201
+ event[field.to_sym] || event[field.to_s] || event[field]
202
+ end
203
+
204
+ def apply_where_conditions(events, where_conditions)
205
+ where_conditions.reduce(events) do |filtered, condition|
206
+ if condition[:raw]
207
+ # For raw conditions, we'd need to evaluate them - simplified version
208
+ # In production, you might want to use a proper expression evaluator
209
+ filtered
210
+ else
211
+ condition.reduce(filtered) do |result, (key, value)|
212
+ next result if key == :raw
213
+ result.select { |e| get_field_value(e, key) == value }
214
+ end
215
+ end
216
+ end
217
+ end
218
+
219
+ def apply_group_by(events, group_by_fields, aggregations = [])
220
+ grouped = events.group_by do |event|
221
+ group_by_fields.map { |field| get_field_value(event, field) }
222
+ end
223
+
224
+ if aggregations && !aggregations.empty?
225
+ grouped.map do |keys, group_events|
226
+ result = {}
227
+ group_by_fields.each_with_index do |field, idx|
228
+ result[field.to_sym] = keys[idx]
229
+ end
230
+ aggregations.each do |agg|
231
+ field = agg[:field]
232
+ func = agg[:function]
233
+ values = group_events.map { |e| get_field_value(e, field) }.compact
234
+ result["#{func}_#{field}".to_sym] = case func
235
+ when "sum"
236
+ values.sum { |v| v.is_a?(Numeric) ? v : 0 }
237
+ when "avg", "average"
238
+ values.empty? ? 0 : values.sum { |v| v.is_a?(Numeric) ? v : 0 }.to_f / values.size
239
+ when "min"
240
+ values.min
241
+ when "max"
242
+ values.max
243
+ when "count"
244
+ values.size
245
+ else
246
+ values.size
247
+ end
248
+ end
249
+ result
250
+ end
251
+ else
252
+ grouped.map do |keys, group_events|
253
+ result = {}
254
+ group_by_fields.each_with_index do |field, idx|
255
+ result[field.to_sym] = keys[idx]
256
+ end
257
+ result[:count] = group_events.size
258
+ result
259
+ end
260
+ end
261
+ end
262
+
263
+ def apply_aggregations(events, aggregations)
264
+ result = {}
265
+ aggregations.each do |agg|
266
+ field = agg[:field]
267
+ func = agg[:function]
268
+ values = events.map { |e| get_field_value(e, field) }.compact
269
+ result["#{func}_#{field}".to_sym] = case func
270
+ when "sum"
271
+ values.sum { |v| v.is_a?(Numeric) ? v : 0 }
272
+ when "avg", "average"
273
+ values.empty? ? 0 : values.sum { |v| v.is_a?(Numeric) ? v : 0 }.to_f / values.size
274
+ when "min"
275
+ values.min
276
+ when "max"
277
+ values.max
278
+ when "count"
279
+ values.size
280
+ else
281
+ values.size
282
+ end
283
+ end
284
+ [result]
285
+ end
286
+
287
+ def apply_order_by(events, order_by)
288
+ field = order_by[:field]
289
+ direction = order_by[:direction] || :desc
290
+
291
+ events.sort do |a, b|
292
+ a_val = get_field_value(a, field)
293
+ b_val = get_field_value(b, field)
294
+
295
+ comparison = if a_val.nil? && b_val.nil?
296
+ 0
297
+ elsif a_val.nil?
298
+ 1
299
+ elsif b_val.nil?
300
+ -1
301
+ else
302
+ a_val <=> b_val
303
+ end
304
+
305
+ direction == :desc ? -comparison : comparison
306
+ end
307
+ end
79
308
  end
80
309
  end
81
310
  end
@@ -0,0 +1,127 @@
1
+ # frozen_string_literal: true
2
+
3
+ begin
4
+ require "ruby-kafka"
5
+ rescue LoadError
6
+ raise LoadError, "ruby-kafka gem is required for KafkaAdapter. Please add 'ruby-kafka' to your Gemfile."
7
+ end
8
+
9
+ module BehaviorAnalytics
10
+ module Storage
11
+ class KafkaAdapter < Adapter
12
+ def initialize(kafka: nil, topic: "behavior_events", producer: nil)
13
+ @kafka = kafka || Kafka.new(seed_brokers: ["localhost:9092"])
14
+ @topic = topic
15
+ @producer = producer || @kafka.producer
16
+ end
17
+
18
+ def save_events(events)
19
+ return if events.empty?
20
+
21
+ events.each do |event|
22
+ event_hash = event.is_a?(Hash) ? event : event.to_h
23
+ key = event_hash[:tenant_id] || "default"
24
+ value = serialize_event(event_hash)
25
+
26
+ @producer.produce(value, topic: @topic, key: key)
27
+ end
28
+
29
+ @producer.deliver_messages
30
+ rescue StandardError => e
31
+ raise Error, "Failed to save events to Kafka: #{e.message}"
32
+ end
33
+
34
+ def events_for_context(context, options = {})
35
+ context.validate!
36
+
37
+ # Kafka is primarily for streaming, so we need a consumer
38
+ # This is a simplified version - in production you'd use a proper consumer group
39
+ group_id = if context.has_tenant?
40
+ "behavior_analytics_#{context.tenant_id}"
41
+ elsif context.has_user?
42
+ "behavior_analytics_user_#{context.user_id}"
43
+ else
44
+ "behavior_analytics_global"
45
+ end
46
+
47
+ consumer = @kafka.consumer(group_id: group_id)
48
+ consumer.subscribe(@topic)
49
+
50
+ events = []
51
+ timeout = options[:timeout] || 5
52
+
53
+ begin
54
+ consumer.each_message(max_wait_time: timeout) do |message|
55
+ event = deserialize_event(message.value)
56
+
57
+ if matches_context?(event, context, options)
58
+ events << event
59
+ break if options[:limit] && events.size >= options[:limit]
60
+ end
61
+ end
62
+ rescue Kafka::Error
63
+ # Timeout or other Kafka errors
64
+ ensure
65
+ consumer.stop
66
+ end
67
+
68
+ events
69
+ end
70
+
71
+ def delete_old_events(before_date)
72
+ # Kafka doesn't support deletion of old messages directly
73
+ # Messages are retained based on retention policy
74
+ # This is a no-op for Kafka
75
+ end
76
+
77
+ def event_count(context, options = {})
78
+ events_for_context(context, options).count
79
+ end
80
+
81
+ def unique_users(context, options = {})
82
+ events = events_for_context(context, options)
83
+ events.map { |e| e[:user_id] }.compact.uniq.count
84
+ end
85
+
86
+ private
87
+
88
+ def serialize_event(event_hash)
89
+ require "json"
90
+ JSON.generate(event_hash)
91
+ end
92
+
93
+ def deserialize_event(data)
94
+ require "json"
95
+ JSON.parse(data, symbolize_names: true)
96
+ end
97
+
98
+ def matches_context?(event, context, options)
99
+ # Support different business cases
100
+ if context.has_tenant?
101
+ return false unless event[:tenant_id] == context.tenant_id
102
+ end
103
+
104
+ if context.has_user?
105
+ return false unless event[:user_id] == context.user_id
106
+ end
107
+
108
+ return false if context.user_type && event[:user_type] != context.user_type
109
+ return false if options[:event_name] && event[:event_name] != options[:event_name]
110
+ return false if options[:event_type] && event[:event_type] != options[:event_type]
111
+
112
+ if options[:since]
113
+ event_time = Time.parse(event[:created_at].to_s)
114
+ return false if event_time < options[:since]
115
+ end
116
+
117
+ if options[:until]
118
+ event_time = Time.parse(event[:created_at].to_s)
119
+ return false if event_time > options[:until]
120
+ end
121
+
122
+ true
123
+ end
124
+ end
125
+ end
126
+ end
127
+
@@ -0,0 +1,211 @@
1
+ # frozen_string_literal: true
2
+
3
+ begin
4
+ require "redis"
5
+ rescue LoadError
6
+ raise LoadError, "Redis gem is required for RedisAdapter. Please add 'redis' to your Gemfile."
7
+ end
8
+
9
+ module BehaviorAnalytics
10
+ module Storage
11
+ class RedisAdapter < Adapter
12
+ def initialize(redis: nil, key_prefix: "behavior_analytics", ttl: nil)
13
+ @redis = redis || Redis.new
14
+ @key_prefix = key_prefix
15
+ @ttl = ttl
16
+ end
17
+
18
+ def save_events(events)
19
+ return if events.empty?
20
+
21
+ events.each do |event|
22
+ event_hash = event.is_a?(Hash) ? event : event.to_h
23
+ key = event_key(event_hash)
24
+ @redis.setex(key, @ttl || 86400, serialize_event(event_hash))
25
+
26
+ # Add to index sets
27
+ index_event(event_hash)
28
+ end
29
+ rescue StandardError => e
30
+ raise Error, "Failed to save events to Redis: #{e.message}"
31
+ end
32
+
33
+ def events_for_context(context, options = {})
34
+ context.validate!
35
+
36
+ # Get event IDs from index
37
+ event_ids = find_event_ids(context, options)
38
+
39
+ # Fetch events
40
+ events = event_ids.map do |id|
41
+ deserialize_event(@redis.get("#{@key_prefix}:event:#{id}"))
42
+ end.compact
43
+
44
+ # Apply filters that can't be done in Redis
45
+ events = filter_events(events, context, options)
46
+
47
+ # Apply limit and ordering
48
+ events = sort_events(events, options[:order_by]) if options[:order_by]
49
+ events = events.first(options[:limit]) if options[:limit]
50
+
51
+ events
52
+ end
53
+
54
+ def delete_old_events(before_date)
55
+ # Redis TTL handles expiration, but we can also scan and delete
56
+ pattern = "#{@key_prefix}:event:*"
57
+ @redis.scan_each(match: pattern) do |key|
58
+ event = deserialize_event(@redis.get(key))
59
+ if event && event[:created_at] && Time.parse(event[:created_at].to_s) < before_date
60
+ @redis.del(key)
61
+ remove_from_indexes(event)
62
+ end
63
+ end
64
+ end
65
+
66
+ def event_count(context, options = {})
67
+ context.validate!
68
+ find_event_ids(context, options).count
69
+ end
70
+
71
+ def unique_users(context, options = {})
72
+ context.validate!
73
+ event_ids = find_event_ids(context, options)
74
+ events = event_ids.map { |id| deserialize_event(@redis.get("#{@key_prefix}:event:#{id}")) }.compact
75
+ events.map { |e| e[:user_id] }.compact.uniq.count
76
+ end
77
+
78
+ private
79
+
80
+ def event_key(event_hash)
81
+ id = event_hash[:id] || SecureRandom.uuid
82
+ "#{@key_prefix}:event:#{id}"
83
+ end
84
+
85
+ def serialize_event(event_hash)
86
+ require "json"
87
+ JSON.generate(event_hash)
88
+ end
89
+
90
+ def deserialize_event(data)
91
+ return nil unless data
92
+ require "json"
93
+ JSON.parse(data, symbolize_names: true)
94
+ end
95
+
96
+ def index_event(event_hash)
97
+ tenant_id = event_hash[:tenant_id]
98
+ user_id = event_hash[:user_id]
99
+ event_type = event_hash[:event_type]
100
+
101
+ # Index by tenant if present (multi-tenant)
102
+ @redis.sadd("#{@key_prefix}:tenant:#{tenant_id}", event_hash[:id]) if tenant_id
103
+
104
+ # Index by user if present (single-tenant or multi-tenant)
105
+ @redis.sadd("#{@key_prefix}:user:#{user_id}", event_hash[:id]) if user_id
106
+
107
+ # Index by event type
108
+ @redis.sadd("#{@key_prefix}:type:#{event_type}", event_hash[:id]) if event_type
109
+ end
110
+
111
+ def remove_from_indexes(event_hash)
112
+ tenant_id = event_hash[:tenant_id]
113
+ user_id = event_hash[:user_id]
114
+ event_type = event_hash[:event_type]
115
+
116
+ @redis.srem("#{@key_prefix}:tenant:#{tenant_id}", event_hash[:id])
117
+ @redis.srem("#{@key_prefix}:user:#{user_id}", event_hash[:id]) if user_id
118
+ @redis.srem("#{@key_prefix}:type:#{event_type}", event_hash[:id]) if event_type
119
+ end
120
+
121
+ def find_event_ids(context, options)
122
+ # Support different business cases:
123
+ # - Multi-tenant: use tenant index
124
+ # - Single-tenant: use user index
125
+ # - API-only: use event type or all events
126
+
127
+ if context.has_tenant?
128
+ # Start with tenant index
129
+ ids = @redis.smembers("#{@key_prefix}:tenant:#{context.tenant_id}").to_a
130
+ elsif context.has_user?
131
+ # Use user index for single-tenant systems
132
+ ids = @redis.smembers("#{@key_prefix}:user:#{context.user_id}").to_a
133
+ else
134
+ # API-only or anonymous tracking - start with all or event type
135
+ if options[:event_type]
136
+ ids = @redis.smembers("#{@key_prefix}:type:#{options[:event_type]}").to_a
137
+ else
138
+ # Get all event IDs (scan all keys - less efficient but supports API-only tracking)
139
+ ids = []
140
+ @redis.scan_each(match: "#{@key_prefix}:event:*") do |key|
141
+ ids << key.split(":").last
142
+ end
143
+ end
144
+ end
145
+
146
+ # Intersect with user index if specified (in addition to tenant)
147
+ if context.has_tenant? && context.has_user?
148
+ user_ids = @redis.smembers("#{@key_prefix}:user:#{context.user_id}").to_a
149
+ ids = ids & user_ids
150
+ end
151
+
152
+ # Intersect with event type if specified
153
+ if options[:event_type] && context.has_tenant?
154
+ type_ids = @redis.smembers("#{@key_prefix}:type:#{options[:event_type]}").to_a
155
+ ids = ids & type_ids
156
+ end
157
+
158
+ ids
159
+ end
160
+
161
+ def filter_events(events, context, options)
162
+ events.select do |event|
163
+ matches = true
164
+
165
+ # Tenant matching (if context has tenant)
166
+ if context.has_tenant?
167
+ matches &&= event[:tenant_id] == context.tenant_id
168
+ end
169
+
170
+ # User matching (if context has user)
171
+ if context.has_user?
172
+ matches &&= event[:user_id] == context.user_id
173
+ end
174
+
175
+ matches &&= event[:user_type] == context.user_type if context.user_type
176
+ matches &&= event[:event_name] == options[:event_name] if options[:event_name]
177
+
178
+ if options[:since]
179
+ matches &&= Time.parse(event[:created_at].to_s) >= options[:since]
180
+ end
181
+
182
+ if options[:until]
183
+ matches &&= Time.parse(event[:created_at].to_s) <= options[:until]
184
+ end
185
+
186
+ if options[:metadata_filters]
187
+ options[:metadata_filters].each do |key, value|
188
+ metadata = event[:metadata] || {}
189
+ matches &&= (metadata[key.to_sym] == value || metadata[key.to_s] == value)
190
+ end
191
+ end
192
+
193
+ matches
194
+ end
195
+ end
196
+
197
+ def sort_events(events, order_by)
198
+ field = order_by[:field]
199
+ direction = order_by[:direction] || :desc
200
+
201
+ events.sort do |a, b|
202
+ a_val = a[field.to_sym] || a[field.to_s]
203
+ b_val = b[field.to_sym] || b[field.to_s]
204
+ comparison = (a_val <=> b_val) || 0
205
+ direction == :desc ? -comparison : comparison
206
+ end
207
+ end
208
+ end
209
+ end
210
+ end
211
+
@@ -0,0 +1,77 @@
1
+ # frozen_string_literal: true
2
+
3
+ module BehaviorAnalytics
4
+ module Streaming
5
+ class EventStream
6
+ attr_reader :subscribers, :filters
7
+
8
+ def initialize
9
+ @subscribers = []
10
+ @filters = []
11
+ @mutex = Mutex.new
12
+ end
13
+
14
+ def subscribe(filter: nil, &block)
15
+ @mutex.synchronize do
16
+ @subscribers << { filter: filter, callback: block }
17
+ end
18
+ self
19
+ end
20
+
21
+ def publish(event)
22
+ @mutex.synchronize do
23
+ @subscribers.each do |subscriber|
24
+ if should_deliver?(event, subscriber[:filter])
25
+ begin
26
+ subscriber[:callback].call(event)
27
+ rescue StandardError => e
28
+ handle_subscriber_error(e, event, subscriber)
29
+ end
30
+ end
31
+ end
32
+ end
33
+ end
34
+
35
+ def unsubscribe_all
36
+ @mutex.synchronize do
37
+ @subscribers.clear
38
+ end
39
+ end
40
+
41
+ private
42
+
43
+ def should_deliver?(event, filter)
44
+ return true unless filter
45
+
46
+ case filter
47
+ when Proc
48
+ filter.call(event)
49
+ when Hash
50
+ filter.all? { |key, value| event_matches?(event, key, value) }
51
+ when Symbol, String
52
+ event[:event_type] == filter || event[:event_type].to_s == filter.to_s
53
+ else
54
+ true
55
+ end
56
+ end
57
+
58
+ def event_matches?(event, key, value)
59
+ event_value = event[key.to_sym] || event[key.to_s] || get_metadata_value(event, key.to_s)
60
+ event_value == value || event_value.to_s == value.to_s
61
+ end
62
+
63
+ def get_metadata_value(event, key)
64
+ metadata = event[:metadata] || event["metadata"] || {}
65
+ metadata[key.to_sym] || metadata[key.to_s] || metadata[key]
66
+ end
67
+
68
+ def handle_subscriber_error(error, event, subscriber)
69
+ # Log error but don't stop other subscribers
70
+ if defined?(Rails) && Rails.logger
71
+ Rails.logger.error("BehaviorAnalytics: Subscriber error: #{error.message}")
72
+ end
73
+ end
74
+ end
75
+ end
76
+ end
77
+