behavior_analytics 0.1.0 → 2.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (40) hide show
  1. checksums.yaml +4 -4
  2. data/behavior_analytics.gemspec +3 -1
  3. data/db/migrate/002_enhance_behavior_events_v2.rb +46 -0
  4. data/lib/behavior_analytics/analytics/cohorts.rb +242 -0
  5. data/lib/behavior_analytics/analytics/engine.rb +15 -0
  6. data/lib/behavior_analytics/analytics/funnels.rb +176 -0
  7. data/lib/behavior_analytics/analytics/retention.rb +186 -0
  8. data/lib/behavior_analytics/debug/inspector.rb +82 -0
  9. data/lib/behavior_analytics/export/csv_exporter.rb +102 -0
  10. data/lib/behavior_analytics/export/json_exporter.rb +55 -0
  11. data/lib/behavior_analytics/hooks/callback.rb +50 -0
  12. data/lib/behavior_analytics/hooks/manager.rb +106 -0
  13. data/lib/behavior_analytics/hooks/webhook.rb +114 -0
  14. data/lib/behavior_analytics/integrations/rails/middleware.rb +99 -0
  15. data/lib/behavior_analytics/integrations/rails.rb +106 -0
  16. data/lib/behavior_analytics/jobs/active_event_job.rb +37 -0
  17. data/lib/behavior_analytics/jobs/delayed_event_job.rb +29 -0
  18. data/lib/behavior_analytics/jobs/sidekiq_event_job.rb +37 -0
  19. data/lib/behavior_analytics/observability/metrics.rb +112 -0
  20. data/lib/behavior_analytics/observability/tracer.rb +85 -0
  21. data/lib/behavior_analytics/processors/async_processor.rb +24 -0
  22. data/lib/behavior_analytics/processors/background_job_processor.rb +72 -0
  23. data/lib/behavior_analytics/query.rb +87 -2
  24. data/lib/behavior_analytics/replay/engine.rb +108 -0
  25. data/lib/behavior_analytics/replay/processor.rb +107 -0
  26. data/lib/behavior_analytics/reporting/generator.rb +125 -0
  27. data/lib/behavior_analytics/sampling/strategy.rb +54 -0
  28. data/lib/behavior_analytics/schema/definition.rb +71 -0
  29. data/lib/behavior_analytics/schema/validator.rb +113 -0
  30. data/lib/behavior_analytics/storage/active_record_adapter.rb +168 -8
  31. data/lib/behavior_analytics/storage/elasticsearch_adapter.rb +175 -0
  32. data/lib/behavior_analytics/storage/in_memory_adapter.rb +214 -2
  33. data/lib/behavior_analytics/storage/kafka_adapter.rb +112 -0
  34. data/lib/behavior_analytics/storage/redis_adapter.rb +175 -0
  35. data/lib/behavior_analytics/streaming/event_stream.rb +77 -0
  36. data/lib/behavior_analytics/throttling/limiter.rb +97 -0
  37. data/lib/behavior_analytics/tracker.rb +130 -4
  38. data/lib/behavior_analytics/version.rb +1 -1
  39. data/lib/behavior_analytics.rb +138 -2
  40. metadata +33 -3
@@ -0,0 +1,112 @@
1
+ # frozen_string_literal: true
2
+
3
+ begin
4
+ require "ruby-kafka"
5
+ rescue LoadError
6
+ raise LoadError, "ruby-kafka gem is required for KafkaAdapter. Please add 'ruby-kafka' to your Gemfile."
7
+ end
8
+
9
+ module BehaviorAnalytics
10
+ module Storage
11
+ class KafkaAdapter < Adapter
12
+ def initialize(kafka: nil, topic: "behavior_events", producer: nil)
13
+ @kafka = kafka || Kafka.new(seed_brokers: ["localhost:9092"])
14
+ @topic = topic
15
+ @producer = producer || @kafka.producer
16
+ end
17
+
18
+ def save_events(events)
19
+ return if events.empty?
20
+
21
+ events.each do |event|
22
+ event_hash = event.is_a?(Hash) ? event : event.to_h
23
+ key = event_hash[:tenant_id] || "default"
24
+ value = serialize_event(event_hash)
25
+
26
+ @producer.produce(value, topic: @topic, key: key)
27
+ end
28
+
29
+ @producer.deliver_messages
30
+ rescue StandardError => e
31
+ raise Error, "Failed to save events to Kafka: #{e.message}"
32
+ end
33
+
34
+ def events_for_context(context, options = {})
35
+ context.validate!
36
+
37
+ # Kafka is primarily for streaming, so we need a consumer
38
+ # This is a simplified version - in production you'd use a proper consumer group
39
+ consumer = @kafka.consumer(group_id: "behavior_analytics_#{context.tenant_id}")
40
+ consumer.subscribe(@topic)
41
+
42
+ events = []
43
+ timeout = options[:timeout] || 5
44
+
45
+ begin
46
+ consumer.each_message(max_wait_time: timeout) do |message|
47
+ event = deserialize_event(message.value)
48
+
49
+ if matches_context?(event, context, options)
50
+ events << event
51
+ break if options[:limit] && events.size >= options[:limit]
52
+ end
53
+ end
54
+ rescue Kafka::Error
55
+ # Timeout or other Kafka errors
56
+ ensure
57
+ consumer.stop
58
+ end
59
+
60
+ events
61
+ end
62
+
63
+ def delete_old_events(before_date)
64
+ # Kafka doesn't support deletion of old messages directly
65
+ # Messages are retained based on retention policy
66
+ # This is a no-op for Kafka
67
+ end
68
+
69
+ def event_count(context, options = {})
70
+ events_for_context(context, options).count
71
+ end
72
+
73
+ def unique_users(context, options = {})
74
+ events = events_for_context(context, options)
75
+ events.map { |e| e[:user_id] }.compact.uniq.count
76
+ end
77
+
78
+ private
79
+
80
+ def serialize_event(event_hash)
81
+ require "json"
82
+ JSON.generate(event_hash)
83
+ end
84
+
85
+ def deserialize_event(data)
86
+ require "json"
87
+ JSON.parse(data, symbolize_names: true)
88
+ end
89
+
90
+ def matches_context?(event, context, options)
91
+ return false unless event[:tenant_id] == context.tenant_id
92
+ return false if context.user_id && event[:user_id] != context.user_id
93
+ return false if context.user_type && event[:user_type] != context.user_type
94
+ return false if options[:event_name] && event[:event_name] != options[:event_name]
95
+ return false if options[:event_type] && event[:event_type] != options[:event_type]
96
+
97
+ if options[:since]
98
+ event_time = Time.parse(event[:created_at].to_s)
99
+ return false if event_time < options[:since]
100
+ end
101
+
102
+ if options[:until]
103
+ event_time = Time.parse(event[:created_at].to_s)
104
+ return false if event_time > options[:until]
105
+ end
106
+
107
+ true
108
+ end
109
+ end
110
+ end
111
+ end
112
+
@@ -0,0 +1,175 @@
1
+ # frozen_string_literal: true
2
+
3
+ begin
4
+ require "redis"
5
+ rescue LoadError
6
+ raise LoadError, "Redis gem is required for RedisAdapter. Please add 'redis' to your Gemfile."
7
+ end
8
+
9
+ module BehaviorAnalytics
10
+ module Storage
11
+ class RedisAdapter < Adapter
12
+ def initialize(redis: nil, key_prefix: "behavior_analytics", ttl: nil)
13
+ @redis = redis || Redis.new
14
+ @key_prefix = key_prefix
15
+ @ttl = ttl
16
+ end
17
+
18
+ def save_events(events)
19
+ return if events.empty?
20
+
21
+ events.each do |event|
22
+ event_hash = event.is_a?(Hash) ? event : event.to_h
23
+ key = event_key(event_hash)
24
+ @redis.setex(key, @ttl || 86400, serialize_event(event_hash))
25
+
26
+ # Add to index sets
27
+ index_event(event_hash)
28
+ end
29
+ rescue StandardError => e
30
+ raise Error, "Failed to save events to Redis: #{e.message}"
31
+ end
32
+
33
+ def events_for_context(context, options = {})
34
+ context.validate!
35
+
36
+ # Get event IDs from index
37
+ event_ids = find_event_ids(context, options)
38
+
39
+ # Fetch events
40
+ events = event_ids.map do |id|
41
+ deserialize_event(@redis.get("#{@key_prefix}:event:#{id}"))
42
+ end.compact
43
+
44
+ # Apply filters that can't be done in Redis
45
+ events = filter_events(events, context, options)
46
+
47
+ # Apply limit and ordering
48
+ events = sort_events(events, options[:order_by]) if options[:order_by]
49
+ events = events.first(options[:limit]) if options[:limit]
50
+
51
+ events
52
+ end
53
+
54
+ def delete_old_events(before_date)
55
+ # Redis TTL handles expiration, but we can also scan and delete
56
+ pattern = "#{@key_prefix}:event:*"
57
+ @redis.scan_each(match: pattern) do |key|
58
+ event = deserialize_event(@redis.get(key))
59
+ if event && event[:created_at] && Time.parse(event[:created_at].to_s) < before_date
60
+ @redis.del(key)
61
+ remove_from_indexes(event)
62
+ end
63
+ end
64
+ end
65
+
66
+ def event_count(context, options = {})
67
+ context.validate!
68
+ find_event_ids(context, options).count
69
+ end
70
+
71
+ def unique_users(context, options = {})
72
+ context.validate!
73
+ event_ids = find_event_ids(context, options)
74
+ events = event_ids.map { |id| deserialize_event(@redis.get("#{@key_prefix}:event:#{id}")) }.compact
75
+ events.map { |e| e[:user_id] }.compact.uniq.count
76
+ end
77
+
78
+ private
79
+
80
+ def event_key(event_hash)
81
+ id = event_hash[:id] || SecureRandom.uuid
82
+ "#{@key_prefix}:event:#{id}"
83
+ end
84
+
85
+ def serialize_event(event_hash)
86
+ require "json"
87
+ JSON.generate(event_hash)
88
+ end
89
+
90
+ def deserialize_event(data)
91
+ return nil unless data
92
+ require "json"
93
+ JSON.parse(data, symbolize_names: true)
94
+ end
95
+
96
+ def index_event(event_hash)
97
+ tenant_id = event_hash[:tenant_id]
98
+ user_id = event_hash[:user_id]
99
+ event_type = event_hash[:event_type]
100
+
101
+ @redis.sadd("#{@key_prefix}:tenant:#{tenant_id}", event_hash[:id])
102
+ @redis.sadd("#{@key_prefix}:user:#{user_id}", event_hash[:id]) if user_id
103
+ @redis.sadd("#{@key_prefix}:type:#{event_type}", event_hash[:id]) if event_type
104
+ end
105
+
106
+ def remove_from_indexes(event_hash)
107
+ tenant_id = event_hash[:tenant_id]
108
+ user_id = event_hash[:user_id]
109
+ event_type = event_hash[:event_type]
110
+
111
+ @redis.srem("#{@key_prefix}:tenant:#{tenant_id}", event_hash[:id])
112
+ @redis.srem("#{@key_prefix}:user:#{user_id}", event_hash[:id]) if user_id
113
+ @redis.srem("#{@key_prefix}:type:#{event_type}", event_hash[:id]) if event_type
114
+ end
115
+
116
+ def find_event_ids(context, options)
117
+ # Start with tenant index
118
+ ids = @redis.smembers("#{@key_prefix}:tenant:#{context.tenant_id}").to_a
119
+
120
+ # Intersect with user index if specified
121
+ if context.user_id
122
+ user_ids = @redis.smembers("#{@key_prefix}:user:#{context.user_id}").to_a
123
+ ids = ids & user_ids
124
+ end
125
+
126
+ # Intersect with event type if specified
127
+ if options[:event_type]
128
+ type_ids = @redis.smembers("#{@key_prefix}:type:#{options[:event_type]}").to_a
129
+ ids = ids & type_ids
130
+ end
131
+
132
+ ids
133
+ end
134
+
135
+ def filter_events(events, context, options)
136
+ events.select do |event|
137
+ matches = true
138
+
139
+ matches &&= event[:user_type] == context.user_type if context.user_type
140
+ matches &&= event[:event_name] == options[:event_name] if options[:event_name]
141
+
142
+ if options[:since]
143
+ matches &&= Time.parse(event[:created_at].to_s) >= options[:since]
144
+ end
145
+
146
+ if options[:until]
147
+ matches &&= Time.parse(event[:created_at].to_s) <= options[:until]
148
+ end
149
+
150
+ if options[:metadata_filters]
151
+ options[:metadata_filters].each do |key, value|
152
+ metadata = event[:metadata] || {}
153
+ matches &&= (metadata[key.to_sym] == value || metadata[key.to_s] == value)
154
+ end
155
+ end
156
+
157
+ matches
158
+ end
159
+ end
160
+
161
+ def sort_events(events, order_by)
162
+ field = order_by[:field]
163
+ direction = order_by[:direction] || :desc
164
+
165
+ events.sort do |a, b|
166
+ a_val = a[field.to_sym] || a[field.to_s]
167
+ b_val = b[field.to_sym] || b[field.to_s]
168
+ comparison = (a_val <=> b_val) || 0
169
+ direction == :desc ? -comparison : comparison
170
+ end
171
+ end
172
+ end
173
+ end
174
+ end
175
+
@@ -0,0 +1,77 @@
1
+ # frozen_string_literal: true
2
+
3
+ module BehaviorAnalytics
4
+ module Streaming
5
+ class EventStream
6
+ attr_reader :subscribers, :filters
7
+
8
+ def initialize
9
+ @subscribers = []
10
+ @filters = []
11
+ @mutex = Mutex.new
12
+ end
13
+
14
+ def subscribe(filter: nil, &block)
15
+ @mutex.synchronize do
16
+ @subscribers << { filter: filter, callback: block }
17
+ end
18
+ self
19
+ end
20
+
21
+ def publish(event)
22
+ @mutex.synchronize do
23
+ @subscribers.each do |subscriber|
24
+ if should_deliver?(event, subscriber[:filter])
25
+ begin
26
+ subscriber[:callback].call(event)
27
+ rescue StandardError => e
28
+ handle_subscriber_error(e, event, subscriber)
29
+ end
30
+ end
31
+ end
32
+ end
33
+ end
34
+
35
+ def unsubscribe_all
36
+ @mutex.synchronize do
37
+ @subscribers.clear
38
+ end
39
+ end
40
+
41
+ private
42
+
43
+ def should_deliver?(event, filter)
44
+ return true unless filter
45
+
46
+ case filter
47
+ when Proc
48
+ filter.call(event)
49
+ when Hash
50
+ filter.all? { |key, value| event_matches?(event, key, value) }
51
+ when Symbol, String
52
+ event[:event_type] == filter || event[:event_type].to_s == filter.to_s
53
+ else
54
+ true
55
+ end
56
+ end
57
+
58
+ def event_matches?(event, key, value)
59
+ event_value = event[key.to_sym] || event[key.to_s] || get_metadata_value(event, key.to_s)
60
+ event_value == value || event_value.to_s == value.to_s
61
+ end
62
+
63
+ def get_metadata_value(event, key)
64
+ metadata = event[:metadata] || event["metadata"] || {}
65
+ metadata[key.to_sym] || metadata[key.to_s] || metadata[key]
66
+ end
67
+
68
+ def handle_subscriber_error(error, event, subscriber)
69
+ # Log error but don't stop other subscribers
70
+ if defined?(Rails) && Rails.logger
71
+ Rails.logger.error("BehaviorAnalytics: Subscriber error: #{error.message}")
72
+ end
73
+ end
74
+ end
75
+ end
76
+ end
77
+
@@ -0,0 +1,97 @@
1
+ # frozen_string_literal: true
2
+
3
+ module BehaviorAnalytics
4
+ module Throttling
5
+ class Limiter
6
+ def initialize(options = {})
7
+ @per_tenant_limits = options[:per_tenant] || {}
8
+ @per_user_limits = options[:per_user] || {}
9
+ @global_limit = options[:global]
10
+ @window_size = options[:window_size] || 60 # seconds
11
+ @counters = {}
12
+ @mutex = Mutex.new
13
+ end
14
+
15
+ def check_limit(context, event = nil)
16
+ return { allowed: true } unless should_check_limits?
17
+
18
+ @mutex.synchronize do
19
+ # Check global limit
20
+ if @global_limit && !check_global_limit
21
+ return { allowed: false, reason: "global_limit_exceeded" }
22
+ end
23
+
24
+ # Check per-tenant limit
25
+ if context.tenant_id && @per_tenant_limits[context.tenant_id]
26
+ limit = @per_tenant_limits[context.tenant_id]
27
+ if !check_limit_for_key("tenant:#{context.tenant_id}", limit)
28
+ return { allowed: false, reason: "tenant_limit_exceeded", tenant_id: context.tenant_id }
29
+ end
30
+ end
31
+
32
+ # Check per-user limit
33
+ if context.user_id && @per_user_limits[context.user_id]
34
+ limit = @per_user_limits[context.user_id]
35
+ if !check_limit_for_key("user:#{context.user_id}", limit)
36
+ return { allowed: false, reason: "user_limit_exceeded", user_id: context.user_id }
37
+ end
38
+ end
39
+
40
+ { allowed: true }
41
+ end
42
+ end
43
+
44
+ def record_event(context)
45
+ return unless should_check_limits?
46
+
47
+ @mutex.synchronize do
48
+ increment_counter("global") if @global_limit
49
+ increment_counter("tenant:#{context.tenant_id}") if context.tenant_id && @per_tenant_limits[context.tenant_id]
50
+ increment_counter("user:#{context.user_id}") if context.user_id && @per_user_limits[context.user_id]
51
+ end
52
+ end
53
+
54
+ def reset_counters
55
+ @mutex.synchronize do
56
+ @counters.clear
57
+ end
58
+ end
59
+
60
+ private
61
+
62
+ def should_check_limits?
63
+ @global_limit || !@per_tenant_limits.empty? || !@per_user_limits.empty?
64
+ end
65
+
66
+ def check_global_limit
67
+ check_limit_for_key("global", @global_limit)
68
+ end
69
+
70
+ def check_limit_for_key(key, limit)
71
+ counter = get_counter(key)
72
+ counter < limit
73
+ end
74
+
75
+ def get_counter(key)
76
+ counter = @counters[key]
77
+
78
+ # Reset counter if window expired
79
+ if counter && counter[:expires_at] < Time.now
80
+ @counters.delete(key)
81
+ counter = nil
82
+ end
83
+
84
+ counter ||= { count: 0, expires_at: Time.now + @window_size }
85
+ @counters[key] = counter
86
+
87
+ counter[:count]
88
+ end
89
+
90
+ def increment_counter(key)
91
+ counter = get_counter(key)
92
+ @counters[key][:count] += 1
93
+ end
94
+ end
95
+ end
96
+ end
97
+
@@ -5,13 +5,22 @@ require "thread"
5
5
 
6
6
  module BehaviorAnalytics
7
7
  class Tracker
8
- attr_reader :storage_adapter, :batch_size, :flush_interval, :context_resolver
8
+ attr_reader :storage_adapter, :batch_size, :flush_interval, :context_resolver, :async_processor, :event_stream
9
9
 
10
10
  def initialize(options = {})
11
11
  @storage_adapter = options[:storage_adapter] || BehaviorAnalytics.configuration.storage_adapter || Storage::InMemoryAdapter.new
12
12
  @batch_size = options[:batch_size] || BehaviorAnalytics.configuration.batch_size
13
13
  @flush_interval = options[:flush_interval] || BehaviorAnalytics.configuration.flush_interval
14
14
  @context_resolver = options[:context_resolver] || BehaviorAnalytics.configuration.context_resolver
15
+ @async_processor = options[:async_processor] || BehaviorAnalytics.configuration.async_processor
16
+ @use_async = options.fetch(:use_async, BehaviorAnalytics.configuration.use_async || false)
17
+ @event_stream = options[:event_stream] || BehaviorAnalytics.configuration.event_stream || Streaming::EventStream.new
18
+ @hooks_manager = options[:hooks_manager] || BehaviorAnalytics.configuration.hooks_manager || Hooks::Manager.new
19
+ @sampling_strategy = options[:sampling_strategy] || BehaviorAnalytics.configuration.sampling_strategy
20
+ @rate_limiter = options[:rate_limiter] || BehaviorAnalytics.configuration.rate_limiter
21
+ @schema_validator = options[:schema_validator] || BehaviorAnalytics.configuration.schema_validator
22
+ @metrics = options[:metrics] || BehaviorAnalytics.configuration.metrics || Observability::Metrics.new
23
+ @tracer = options[:tracer] || BehaviorAnalytics.configuration.tracer
15
24
 
16
25
  @buffer = []
17
26
  @mutex = Mutex.new
@@ -23,7 +32,50 @@ module BehaviorAnalytics
23
32
  context = normalize_context(context)
24
33
  context.validate!
25
34
 
26
- event = Event.new(
35
+ # Check rate limiting
36
+ if @rate_limiter
37
+ limit_check = @rate_limiter.check_limit(context)
38
+ unless limit_check[:allowed]
39
+ raise Error, "Rate limit exceeded: #{limit_check[:reason]}"
40
+ end
41
+ end
42
+
43
+ # Check sampling
44
+ if @sampling_strategy
45
+ event_data = {
46
+ tenant_id: context.tenant_id,
47
+ user_id: context.user_id,
48
+ event_name: event_name,
49
+ event_type: event_type
50
+ }
51
+ unless @sampling_strategy.should_sample?(event_data, context)
52
+ return # Skip this event
53
+ end
54
+ end
55
+
56
+ # Build event data
57
+ event_data = {
58
+ tenant_id: context.tenant_id,
59
+ user_id: context.user_id,
60
+ user_type: context.user_type,
61
+ event_name: event_name,
62
+ event_type: event_type,
63
+ metadata: metadata.merge(context.filters),
64
+ session_id: options[:session_id],
65
+ ip: options[:ip],
66
+ user_agent: options[:user_agent],
67
+ duration_ms: options[:duration_ms]
68
+ }
69
+
70
+ # Validate schema if validator is configured
71
+ if @schema_validator
72
+ validation_result = @schema_validator.validate(event_data)
73
+ unless validation_result[:valid]
74
+ raise Error, "Event validation failed: #{validation_result[:errors].join(', ')}"
75
+ end
76
+ end
77
+
78
+ event = Event.new(event_data)
27
79
  tenant_id: context.tenant_id,
28
80
  user_id: context.user_id,
29
81
  user_type: context.user_type,
@@ -36,7 +88,56 @@ module BehaviorAnalytics
36
88
  duration_ms: options[:duration_ms]
37
89
  )
38
90
 
39
- add_to_buffer(event)
91
+ # Execute before_track hooks
92
+ begin
93
+ @hooks_manager.execute_before_track(event.to_h, context.to_h)
94
+ rescue StandardError => e
95
+ @hooks_manager.execute_on_error(e, event.to_h, context.to_h)
96
+ raise if BehaviorAnalytics.configuration.raise_on_hook_error
97
+ end
98
+
99
+ begin
100
+ # Start tracing if enabled
101
+ span = @tracer&.start_span("track_event", tags: {
102
+ event_name: event_name,
103
+ event_type: event_type.to_s,
104
+ tenant_id: context.tenant_id
105
+ })
106
+
107
+ add_to_buffer(event)
108
+
109
+ # Record metrics
110
+ @metrics.increment_counter("events.tracked", tags: {
111
+ event_type: event_type.to_s,
112
+ tenant_id: context.tenant_id.to_s
113
+ })
114
+
115
+ # Debug logging
116
+ if BehaviorAnalytics.configuration.debug_mode
117
+ BehaviorAnalytics.configuration.debug("Event tracked: #{event_name}", context: context.to_h)
118
+ end
119
+
120
+ # Publish to event stream
121
+ @event_stream.publish(event.to_h) if @event_stream
122
+
123
+ # Execute after_track hooks
124
+ @hooks_manager.execute_after_track(event.to_h, context.to_h)
125
+
126
+ # Record event for rate limiting
127
+ @rate_limiter.record_event(context) if @rate_limiter
128
+
129
+ # Finish tracing
130
+ @tracer&.finish_span(span[:id]) if span
131
+ rescue StandardError => e
132
+ # Record error metrics
133
+ @metrics.increment_counter("events.errors", tags: {
134
+ event_type: event_type.to_s,
135
+ tenant_id: context.tenant_id.to_s
136
+ })
137
+
138
+ @hooks_manager.execute_on_error(e, event.to_h, context.to_h)
139
+ raise
140
+ end
40
141
  end
41
142
 
42
143
  def track_api_call(context:, method:, path:, status_code:, duration_ms: nil, **options)
@@ -76,7 +177,24 @@ module BehaviorAnalytics
76
177
 
77
178
  return if events_to_flush.empty?
78
179
 
79
- @storage_adapter.save_events(events_to_flush)
180
+ start_time = Time.now
181
+
182
+ begin
183
+ if @use_async && @async_processor
184
+ @async_processor.process_async(events_to_flush)
185
+ else
186
+ @storage_adapter.save_events(events_to_flush)
187
+ end
188
+
189
+ # Record flush metrics
190
+ duration_ms = ((Time.now - start_time) * 1000).to_i
191
+ @metrics.record_histogram("flush.duration_ms", duration_ms)
192
+ @metrics.increment_counter("flush.count", value: events_to_flush.size)
193
+ rescue StandardError => e
194
+ @metrics.increment_counter("flush.errors")
195
+ raise
196
+ end
197
+
80
198
  restart_flush_timer
81
199
  end
82
200
 
@@ -88,6 +206,14 @@ module BehaviorAnalytics
88
206
  Query.new(@storage_adapter)
89
207
  end
90
208
 
209
+ def subscribe_to_stream(filter: nil, &block)
210
+ @event_stream.subscribe(filter: filter, &block)
211
+ end
212
+
213
+ def inspector
214
+ @inspector ||= Debug::Inspector.new(self)
215
+ end
216
+
91
217
  private
92
218
 
93
219
  def normalize_context(context)
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module BehaviorAnalytics
4
- VERSION = "0.1.0"
4
+ VERSION = "2.0.0"
5
5
  end