amplitude-experiment 1.1.5 → 1.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/amplitude-experiment.gemspec +1 -0
- data/lib/amplitude/client.rb +54 -0
- data/lib/amplitude/config.rb +78 -0
- data/lib/amplitude/constants.rb +45 -0
- data/lib/amplitude/event.rb +244 -0
- data/lib/amplitude/exception.rb +15 -0
- data/lib/amplitude/http_client.rb +161 -0
- data/lib/amplitude/plugin.rb +131 -0
- data/lib/amplitude/processor.rb +100 -0
- data/lib/amplitude/storage.rb +146 -0
- data/lib/amplitude/timeline.rb +98 -0
- data/lib/amplitude/utils.rb +29 -0
- data/lib/amplitude/workers.rb +101 -0
- data/lib/amplitude-experiment.rb +6 -0
- data/lib/amplitude.rb +12 -0
- data/lib/experiment/local/assignment/assignment.rb +21 -0
- data/lib/experiment/local/assignment/assignment_config.rb +12 -0
- data/lib/experiment/local/assignment/assignment_filter.rb +15 -0
- data/lib/experiment/local/assignment/assignment_service.rb +48 -0
- data/lib/experiment/local/client.rb +19 -6
- data/lib/experiment/local/config.rb +6 -1
- data/lib/experiment/remote/client.rb +3 -5
- data/lib/experiment/util/hash.rb +15 -0
- data/lib/experiment/util/lru_cache.rb +107 -0
- data/lib/experiment/version.rb +1 -1
- metadata +35 -2
@@ -0,0 +1,131 @@
|
|
1
|
+
require 'securerandom'
|
2
|
+
module AmplitudeAnalytics
|
3
|
+
# Plugin
|
4
|
+
class Plugin
|
5
|
+
attr_reader :plugin_type
|
6
|
+
|
7
|
+
def initialize(plugin_type)
|
8
|
+
@plugin_type = plugin_type
|
9
|
+
end
|
10
|
+
|
11
|
+
def setup(client)
|
12
|
+
# Setup plugins with client instance parameter
|
13
|
+
end
|
14
|
+
|
15
|
+
def execute(event)
|
16
|
+
# Process event with plugin instance
|
17
|
+
end
|
18
|
+
end
|
19
|
+
|
20
|
+
# EventPlugin
|
21
|
+
class EventPlugin < Plugin
|
22
|
+
def execute(event)
|
23
|
+
track(event)
|
24
|
+
end
|
25
|
+
|
26
|
+
def track(event)
|
27
|
+
event
|
28
|
+
end
|
29
|
+
end
|
30
|
+
|
31
|
+
# DestinationPlugin
|
32
|
+
class DestinationPlugin < EventPlugin
|
33
|
+
attr_reader :timeline
|
34
|
+
|
35
|
+
def initialize
|
36
|
+
super(PluginType::DESTINATION)
|
37
|
+
@timeline = Timeline.new
|
38
|
+
end
|
39
|
+
|
40
|
+
def setup(client)
|
41
|
+
@timeline.setup(client)
|
42
|
+
end
|
43
|
+
|
44
|
+
def add(plugin)
|
45
|
+
@timeline.add(plugin)
|
46
|
+
self
|
47
|
+
end
|
48
|
+
|
49
|
+
def remove(plugin)
|
50
|
+
@timeline.remove(plugin)
|
51
|
+
self
|
52
|
+
end
|
53
|
+
|
54
|
+
def execute(event)
|
55
|
+
event = @timeline.process(event)
|
56
|
+
super(event)
|
57
|
+
end
|
58
|
+
|
59
|
+
def shutdown
|
60
|
+
@timeline.shutdown
|
61
|
+
end
|
62
|
+
end
|
63
|
+
|
64
|
+
# AmplitudeDestinationPlugin
|
65
|
+
class AmplitudeDestinationPlugin < DestinationPlugin
|
66
|
+
attr_reader :workers
|
67
|
+
|
68
|
+
def initialize
|
69
|
+
super
|
70
|
+
@workers = Workers.new
|
71
|
+
@storage = nil
|
72
|
+
@configuration = nil
|
73
|
+
end
|
74
|
+
|
75
|
+
def setup(client)
|
76
|
+
@configuration = client.configuration
|
77
|
+
@storage = client.configuration.storage
|
78
|
+
@workers.setup(client.configuration, @storage)
|
79
|
+
@storage.setup(client.configuration, @workers)
|
80
|
+
end
|
81
|
+
|
82
|
+
def verify_event(event)
|
83
|
+
return false unless event.is_a?(BaseEvent) && event.event_type && (event.user_id || event.device_id)
|
84
|
+
|
85
|
+
true
|
86
|
+
end
|
87
|
+
|
88
|
+
def execute(event)
|
89
|
+
event = @timeline.process(event)
|
90
|
+
raise InvalidEventError, 'Invalid event.' unless verify_event(event)
|
91
|
+
|
92
|
+
@storage.push(event)
|
93
|
+
end
|
94
|
+
|
95
|
+
def flush
|
96
|
+
@workers.flush
|
97
|
+
end
|
98
|
+
|
99
|
+
def shutdown
|
100
|
+
@timeline.shutdown
|
101
|
+
@workers.stop
|
102
|
+
end
|
103
|
+
end
|
104
|
+
|
105
|
+
# ContextPlugin
|
106
|
+
class ContextPlugin < Plugin
|
107
|
+
attr_accessor :configuration
|
108
|
+
|
109
|
+
def initialize
|
110
|
+
super(PluginType::BEFORE)
|
111
|
+
@context_string = "#{SDK_LIBRARY}/#{SDK_VERSION}"
|
112
|
+
@configuration = nil
|
113
|
+
end
|
114
|
+
|
115
|
+
def setup(client)
|
116
|
+
@configuration = client.configuration
|
117
|
+
end
|
118
|
+
|
119
|
+
def apply_context_data(event)
|
120
|
+
event.library = @context_string
|
121
|
+
end
|
122
|
+
|
123
|
+
def execute(event)
|
124
|
+
event.time ||= AmplitudeAnalytics.current_milliseconds
|
125
|
+
event.insert_id ||= SecureRandom.uuid
|
126
|
+
event.ingestion_metadata ||= @configuration.ingestion_metadata if @configuration.ingestion_metadata
|
127
|
+
apply_context_data(event)
|
128
|
+
event
|
129
|
+
end
|
130
|
+
end
|
131
|
+
end
|
@@ -0,0 +1,100 @@
|
|
1
|
+
module AmplitudeAnalytics
|
2
|
+
# ResponseProcessor
|
3
|
+
class ResponseProcessor
|
4
|
+
def initialize
|
5
|
+
@configuration = nil
|
6
|
+
@storage = nil
|
7
|
+
end
|
8
|
+
|
9
|
+
def setup(configuration, storage)
|
10
|
+
@configuration = configuration
|
11
|
+
@storage = storage
|
12
|
+
end
|
13
|
+
|
14
|
+
def process_response(res, events)
|
15
|
+
case res.status
|
16
|
+
when HttpStatus::SUCCESS
|
17
|
+
callback(events, res.code, 'Event sent successfully.')
|
18
|
+
log(events, res.code, 'Event sent successfully.')
|
19
|
+
when HttpStatus::TIMEOUT, HttpStatus::FAILED
|
20
|
+
push_to_storage(events, 0, res)
|
21
|
+
when HttpStatus::PAYLOAD_TOO_LARGE
|
22
|
+
if events.length == 1
|
23
|
+
callback(events, res.code, res.error)
|
24
|
+
log(events, res.code, res.error)
|
25
|
+
else
|
26
|
+
@configuration.increase_flush_divider
|
27
|
+
push_to_storage(events, 0, res)
|
28
|
+
end
|
29
|
+
when HttpStatus::INVALID_REQUEST
|
30
|
+
raise InvalidAPIKeyError, res.error if res.error.start_with?('Invalid API key:')
|
31
|
+
|
32
|
+
if res.missing_field
|
33
|
+
callback(events, res.code, "Request missing required field #{res.missing_field}")
|
34
|
+
log(events, res.code, "Request missing required field #{res.missing_field}")
|
35
|
+
else
|
36
|
+
invalid_index_set = res.invalid_or_silenced_index
|
37
|
+
events_for_retry = []
|
38
|
+
events_for_callback = []
|
39
|
+
events.each_with_index do |event, index|
|
40
|
+
if invalid_index_set.include?(index)
|
41
|
+
events_for_callback << event
|
42
|
+
else
|
43
|
+
events_for_retry << event
|
44
|
+
end
|
45
|
+
end
|
46
|
+
callback(events_for_callback, res.code, res.error)
|
47
|
+
log(events_for_callback, res.code, res.error)
|
48
|
+
push_to_storage(events_for_retry, 0, res)
|
49
|
+
end
|
50
|
+
when HttpStatus::TOO_MANY_REQUESTS
|
51
|
+
events_for_callback = []
|
52
|
+
events_for_retry_delay = []
|
53
|
+
events_for_retry = []
|
54
|
+
events.each_with_index do |event, index|
|
55
|
+
if res.throttled_events&.include?(index)
|
56
|
+
if res.exceed_daily_quota(event)
|
57
|
+
events_for_callback << event
|
58
|
+
else
|
59
|
+
events_for_retry_delay << event
|
60
|
+
end
|
61
|
+
else
|
62
|
+
events_for_retry << event
|
63
|
+
end
|
64
|
+
end
|
65
|
+
callback(events_for_callback, res.code, 'Exceeded daily quota')
|
66
|
+
push_to_storage(events_for_retry_delay, 30_000, res)
|
67
|
+
push_to_storage(events_for_retry, 0, res)
|
68
|
+
else
|
69
|
+
callback(events, res.code, res.error || 'Unknown error')
|
70
|
+
log(events, res.code, res.error || 'Unknown error')
|
71
|
+
end
|
72
|
+
end
|
73
|
+
|
74
|
+
def push_to_storage(events, delay, res)
|
75
|
+
events.each do |event|
|
76
|
+
event.retry += 1
|
77
|
+
success, message = @storage.push(event, delay)
|
78
|
+
unless success
|
79
|
+
callback([event], res.code, message)
|
80
|
+
log([event], res.code, message)
|
81
|
+
end
|
82
|
+
end
|
83
|
+
end
|
84
|
+
|
85
|
+
def callback(events, code, message)
|
86
|
+
events.each do |event|
|
87
|
+
@configuration.callback.call(event, code, message) if @configuration.callback.respond_to?(:call)
|
88
|
+
event.callback(code, message)
|
89
|
+
rescue StandardError => e
|
90
|
+
@configuration.logger.exception("Error callback for event #{event}: #{e.message}")
|
91
|
+
end
|
92
|
+
end
|
93
|
+
|
94
|
+
def log(events, code, message)
|
95
|
+
events.each do |event|
|
96
|
+
@configuration.logger.info("#{message}, response code: #{code}, event: #{event}")
|
97
|
+
end
|
98
|
+
end
|
99
|
+
end
|
100
|
+
end
|
@@ -0,0 +1,146 @@
|
|
1
|
+
require 'monitor'
|
2
|
+
module AmplitudeAnalytics
|
3
|
+
# Storage
|
4
|
+
class Storage
|
5
|
+
def push(_event, _delay = 0)
|
6
|
+
raise NotImplementedError, 'push method must be implemented in subclasses'
|
7
|
+
end
|
8
|
+
|
9
|
+
def pull(_batch_size)
|
10
|
+
raise NotImplementedError, 'pull method must be implemented in subclasses'
|
11
|
+
end
|
12
|
+
|
13
|
+
def pull_all
|
14
|
+
raise NotImplementedError, 'pull_all method must be implemented in subclasses'
|
15
|
+
end
|
16
|
+
end
|
17
|
+
|
18
|
+
# StorageProvider class
|
19
|
+
class StorageProvider
|
20
|
+
def storage
|
21
|
+
raise NotImplementedError, 'get_storage method must be implemented in subclasses'
|
22
|
+
end
|
23
|
+
end
|
24
|
+
|
25
|
+
# InMemoryStorage class
|
26
|
+
class InMemoryStorage < Storage
|
27
|
+
attr_reader :total_events, :ready_queue, :workers, :buffer_data, :monitor
|
28
|
+
|
29
|
+
def initialize
|
30
|
+
super
|
31
|
+
@total_events = 0
|
32
|
+
@buffer_data = []
|
33
|
+
@ready_queue = []
|
34
|
+
@monitor = Monitor.new
|
35
|
+
@buffer_lock_cv = @monitor.new_cond
|
36
|
+
@configuration = nil
|
37
|
+
@workers = nil
|
38
|
+
end
|
39
|
+
|
40
|
+
def lock
|
41
|
+
@buffer_lock_cv
|
42
|
+
end
|
43
|
+
|
44
|
+
def max_retry
|
45
|
+
@configuration.flush_max_retries
|
46
|
+
end
|
47
|
+
|
48
|
+
def wait_time
|
49
|
+
if @ready_queue.any?
|
50
|
+
0
|
51
|
+
elsif @buffer_data.any?
|
52
|
+
[@buffer_data[0][0] - AmplitudeAnalytics.current_milliseconds, @configuration.flush_interval_millis].min
|
53
|
+
else
|
54
|
+
@configuration.flush_interval_millis
|
55
|
+
end
|
56
|
+
end
|
57
|
+
|
58
|
+
def setup(configuration, workers)
|
59
|
+
@configuration = configuration
|
60
|
+
@workers = workers
|
61
|
+
end
|
62
|
+
|
63
|
+
def push(event, delay = 0)
|
64
|
+
return false, 'Destination buffer full. Retry temporarily disabled' if event.retry && @total_events >= MAX_BUFFER_CAPACITY
|
65
|
+
|
66
|
+
return false, "Event reached max retry times #{max_retry}." if event.retry >= max_retry
|
67
|
+
|
68
|
+
total_delay = delay + retry_delay(event.retry)
|
69
|
+
insert_event(total_delay, event)
|
70
|
+
@workers.start
|
71
|
+
[true, nil]
|
72
|
+
end
|
73
|
+
|
74
|
+
def pull(batch_size)
|
75
|
+
current_time = AmplitudeAnalytics.current_milliseconds
|
76
|
+
@monitor.synchronize do
|
77
|
+
result = @ready_queue.shift(batch_size)
|
78
|
+
index = 0
|
79
|
+
while index < @buffer_data.length && index < batch_size - result.length &&
|
80
|
+
current_time >= @buffer_data[index][0]
|
81
|
+
event = @buffer_data[index][1]
|
82
|
+
result << event
|
83
|
+
index += 1
|
84
|
+
end
|
85
|
+
@buffer_data.slice!(0, index)
|
86
|
+
@total_events -= result.length
|
87
|
+
result
|
88
|
+
end
|
89
|
+
end
|
90
|
+
|
91
|
+
def pull_all
|
92
|
+
@monitor.synchronize do
|
93
|
+
result = @ready_queue + @buffer_data.map { |element| element[1] }
|
94
|
+
@buffer_data.clear
|
95
|
+
@ready_queue.clear
|
96
|
+
@total_events = 0
|
97
|
+
result
|
98
|
+
end
|
99
|
+
end
|
100
|
+
|
101
|
+
def insert_event(total_delay, event)
|
102
|
+
current_time = AmplitudeAnalytics.current_milliseconds
|
103
|
+
@monitor.synchronize do
|
104
|
+
@ready_queue << @buffer_data.shift[1] while @buffer_data.any? && @buffer_data[0][0] <= current_time
|
105
|
+
|
106
|
+
if total_delay == 0
|
107
|
+
@ready_queue << event
|
108
|
+
else
|
109
|
+
time_stamp = current_time + total_delay
|
110
|
+
left = 0
|
111
|
+
right = @buffer_data.length - 1
|
112
|
+
while left <= right
|
113
|
+
mid = (left + right) / 2
|
114
|
+
if @buffer_data[mid][0] > time_stamp
|
115
|
+
right = mid - 1
|
116
|
+
else
|
117
|
+
left = mid + 1
|
118
|
+
end
|
119
|
+
end
|
120
|
+
@buffer_data.insert(left, [time_stamp, event])
|
121
|
+
end
|
122
|
+
|
123
|
+
@total_events += 1
|
124
|
+
|
125
|
+
lock.signal if @ready_queue.length >= @configuration.flush_queue_size
|
126
|
+
end
|
127
|
+
end
|
128
|
+
|
129
|
+
def retry_delay(ret)
|
130
|
+
if ret > max_retry
|
131
|
+
3200
|
132
|
+
elsif ret <= 0
|
133
|
+
0
|
134
|
+
else
|
135
|
+
100 * (2**((ret - 1) / 2))
|
136
|
+
end
|
137
|
+
end
|
138
|
+
end
|
139
|
+
|
140
|
+
# InMemoryStorageProvider class
|
141
|
+
class InMemoryStorageProvider < StorageProvider
|
142
|
+
def storage
|
143
|
+
InMemoryStorage.new
|
144
|
+
end
|
145
|
+
end
|
146
|
+
end
|
@@ -0,0 +1,98 @@
|
|
1
|
+
require_relative 'constants'
|
2
|
+
|
3
|
+
module AmplitudeAnalytics
|
4
|
+
# Timeline
|
5
|
+
class Timeline
|
6
|
+
attr_accessor :configuration
|
7
|
+
attr_reader :plugins
|
8
|
+
|
9
|
+
def initialize(configuration = nil)
|
10
|
+
@locks = {
|
11
|
+
PluginType::BEFORE => Mutex.new,
|
12
|
+
PluginType::ENRICHMENT => Mutex.new,
|
13
|
+
PluginType::DESTINATION => Mutex.new
|
14
|
+
}
|
15
|
+
@plugins = {
|
16
|
+
PluginType::BEFORE => [],
|
17
|
+
PluginType::ENRICHMENT => [],
|
18
|
+
PluginType::DESTINATION => []
|
19
|
+
}
|
20
|
+
@configuration = configuration
|
21
|
+
end
|
22
|
+
|
23
|
+
def logger
|
24
|
+
@configuration&.logger
|
25
|
+
Logger.new($stdout, progname: LOGGER_NAME)
|
26
|
+
end
|
27
|
+
|
28
|
+
def setup(client)
|
29
|
+
@configuration = client.configuration
|
30
|
+
end
|
31
|
+
|
32
|
+
def add(plugin)
|
33
|
+
@locks[plugin.plugin_type].synchronize do
|
34
|
+
@plugins[plugin.plugin_type] << plugin
|
35
|
+
end
|
36
|
+
end
|
37
|
+
|
38
|
+
def remove(plugin)
|
39
|
+
@locks.each_key do |plugin_type|
|
40
|
+
@locks[plugin_type].synchronize do
|
41
|
+
@plugins[plugin_type].reject! { |p| p == plugin }
|
42
|
+
end
|
43
|
+
end
|
44
|
+
end
|
45
|
+
|
46
|
+
def flush
|
47
|
+
destination_futures = []
|
48
|
+
@locks[PluginType::DESTINATION].synchronize do
|
49
|
+
@plugins[PluginType::DESTINATION].each do |destination|
|
50
|
+
destination_futures << destination.flush
|
51
|
+
rescue StandardError
|
52
|
+
logger.exception('Error for flush events')
|
53
|
+
end
|
54
|
+
end
|
55
|
+
destination_futures
|
56
|
+
end
|
57
|
+
|
58
|
+
def process(event)
|
59
|
+
if @configuration&.opt_out
|
60
|
+
logger.info('Skipped event for opt out config')
|
61
|
+
return event
|
62
|
+
end
|
63
|
+
|
64
|
+
before_result = apply_plugins(PluginType::BEFORE, event)
|
65
|
+
enrich_result = apply_plugins(PluginType::ENRICHMENT, before_result)
|
66
|
+
apply_plugins(PluginType::DESTINATION, enrich_result)
|
67
|
+
enrich_result
|
68
|
+
end
|
69
|
+
|
70
|
+
def apply_plugins(plugin_type, event)
|
71
|
+
result = event
|
72
|
+
@locks[plugin_type].synchronize do
|
73
|
+
@plugins[plugin_type].each do |plugin|
|
74
|
+
break unless result
|
75
|
+
|
76
|
+
begin
|
77
|
+
if plugin.plugin_type == PluginType::DESTINATION
|
78
|
+
plugin.execute(Marshal.load(Marshal.dump(result)))
|
79
|
+
else
|
80
|
+
result = plugin.execute(result)
|
81
|
+
end
|
82
|
+
rescue InvalidEventError
|
83
|
+
logger.error("Invalid event body #{event}")
|
84
|
+
rescue StandardError
|
85
|
+
logger.error("Error for apply #{PluginType.name(plugin_type)} plugin for event #{event}")
|
86
|
+
end
|
87
|
+
end
|
88
|
+
end
|
89
|
+
result
|
90
|
+
end
|
91
|
+
|
92
|
+
def shutdown
|
93
|
+
@locks[PluginType::DESTINATION].synchronize do
|
94
|
+
@plugins[PluginType::DESTINATION].each(&:shutdown)
|
95
|
+
end
|
96
|
+
end
|
97
|
+
end
|
98
|
+
end
|
@@ -0,0 +1,29 @@
|
|
1
|
+
require 'logger'
|
2
|
+
require 'time'
|
3
|
+
|
4
|
+
# Amplitude
|
5
|
+
module AmplitudeAnalytics
|
6
|
+
def self.logger
|
7
|
+
@logger ||= Logger.new($stdout, progname: LOGGER_NAME)
|
8
|
+
end
|
9
|
+
|
10
|
+
def self.current_milliseconds
|
11
|
+
(Time.now.to_f * 1000).to_i
|
12
|
+
end
|
13
|
+
|
14
|
+
def self.truncate(obj)
|
15
|
+
case obj
|
16
|
+
when Hash
|
17
|
+
if obj.length > MAX_PROPERTY_KEYS
|
18
|
+
logger.error("Too many properties. #{MAX_PROPERTY_KEYS} maximum.")
|
19
|
+
return {}
|
20
|
+
end
|
21
|
+
obj.each { |key, value| obj[key] = truncate(value) }
|
22
|
+
when Array
|
23
|
+
obj.map! { |element| truncate(element) }
|
24
|
+
when String
|
25
|
+
obj = obj[0, MAX_STRING_LENGTH]
|
26
|
+
end
|
27
|
+
obj
|
28
|
+
end
|
29
|
+
end
|
@@ -0,0 +1,101 @@
|
|
1
|
+
require 'json'
|
2
|
+
require 'concurrent'
|
3
|
+
|
4
|
+
module AmplitudeAnalytics
|
5
|
+
# Workers
|
6
|
+
class Workers
|
7
|
+
attr_reader :is_active, :is_started, :storage, :configuration, :threads_pool, :consumer_lock, :response_processor, :http_client
|
8
|
+
|
9
|
+
def initialize
|
10
|
+
@threads_pool = Concurrent::ThreadPoolExecutor.new(max_threads: 16)
|
11
|
+
@is_active = true
|
12
|
+
@consumer_lock = Mutex.new
|
13
|
+
@is_started = false
|
14
|
+
@configuration = nil
|
15
|
+
@storage = nil
|
16
|
+
@response_processor = ResponseProcessor.new
|
17
|
+
@http_client = HttpClient.new
|
18
|
+
end
|
19
|
+
|
20
|
+
def setup(configuration, storage)
|
21
|
+
@configuration = configuration
|
22
|
+
@storage = storage
|
23
|
+
@response_processor = ResponseProcessor.new
|
24
|
+
@response_processor.setup(configuration, storage)
|
25
|
+
end
|
26
|
+
|
27
|
+
def start
|
28
|
+
@consumer_lock.synchronize do
|
29
|
+
unless @is_started
|
30
|
+
@is_started = true
|
31
|
+
Thread.new { buffer_consumer }
|
32
|
+
end
|
33
|
+
end
|
34
|
+
end
|
35
|
+
|
36
|
+
def stop
|
37
|
+
flush
|
38
|
+
@is_active = false
|
39
|
+
@is_started = true
|
40
|
+
@threads_pool.shutdown
|
41
|
+
end
|
42
|
+
|
43
|
+
def flush
|
44
|
+
events = @storage.pull_all unless @storage.nil?
|
45
|
+
Concurrent::Future.execute do
|
46
|
+
send(events) if events && !events.empty?
|
47
|
+
end
|
48
|
+
end
|
49
|
+
|
50
|
+
def send(events)
|
51
|
+
url = @configuration.server_url
|
52
|
+
payload = get_payload(events)
|
53
|
+
res = @http_client.post(url, payload)
|
54
|
+
begin
|
55
|
+
@response_processor.process_response(res, events)
|
56
|
+
rescue InvalidAPIKeyError
|
57
|
+
@configuration.logger.error('Invalid API Key')
|
58
|
+
end
|
59
|
+
end
|
60
|
+
|
61
|
+
def get_payload(events)
|
62
|
+
payload_body = {
|
63
|
+
'api_key' => @configuration.api_key,
|
64
|
+
'events' => []
|
65
|
+
}
|
66
|
+
|
67
|
+
events.each do |event|
|
68
|
+
event_body = event.event_body
|
69
|
+
payload_body['events'] << event_body if event_body
|
70
|
+
end
|
71
|
+
payload_body['options'] = @configuration.options if @configuration.options
|
72
|
+
JSON.dump(payload_body).encode('utf-8')
|
73
|
+
end
|
74
|
+
|
75
|
+
def buffer_consumer
|
76
|
+
if @is_active
|
77
|
+
@storage.monitor.synchronize do
|
78
|
+
@storage.lock.wait(@configuration.flush_interval_millis.to_f / 1000)
|
79
|
+
|
80
|
+
loop do
|
81
|
+
break unless @storage.total_events.positive?
|
82
|
+
|
83
|
+
events = @storage.pull(@configuration.flush_queue_size)
|
84
|
+
if events
|
85
|
+
@threads_pool.post { send(events) }
|
86
|
+
else
|
87
|
+
wait_time = @storage.wait_time.to_f / 1000
|
88
|
+
@storage.lock.wait(wait_time) if wait_time > 0
|
89
|
+
end
|
90
|
+
end
|
91
|
+
end
|
92
|
+
end
|
93
|
+
rescue StandardError => e
|
94
|
+
@configuration.logger.error("Consumer thread error: #{e}")
|
95
|
+
ensure
|
96
|
+
@consumer_lock.synchronize do
|
97
|
+
@is_started = false
|
98
|
+
end
|
99
|
+
end
|
100
|
+
end
|
101
|
+
end
|
data/lib/amplitude-experiment.rb
CHANGED
@@ -9,6 +9,12 @@ require 'experiment/remote/client'
|
|
9
9
|
require 'experiment/local/client'
|
10
10
|
require 'experiment/local/config'
|
11
11
|
require 'experiment/local/fetcher'
|
12
|
+
require 'experiment/local/assignment/assignment'
|
13
|
+
require 'experiment/local/assignment/assignment_filter'
|
14
|
+
require 'experiment/local/assignment/assignment_service'
|
15
|
+
require 'experiment/local/assignment/assignment_config'
|
16
|
+
require 'experiment/util/lru_cache'
|
17
|
+
require 'experiment/util/hash'
|
12
18
|
|
13
19
|
# Amplitude Experiment Module
|
14
20
|
module AmplitudeExperiment
|
data/lib/amplitude.rb
ADDED
@@ -0,0 +1,12 @@
|
|
1
|
+
require 'amplitude/client'
|
2
|
+
require 'amplitude/config'
|
3
|
+
require 'amplitude/constants'
|
4
|
+
require 'amplitude/event'
|
5
|
+
require 'amplitude/exception'
|
6
|
+
require 'amplitude/http_client'
|
7
|
+
require 'amplitude/plugin'
|
8
|
+
require 'amplitude/processor'
|
9
|
+
require 'amplitude/storage'
|
10
|
+
require 'amplitude/timeline'
|
11
|
+
require 'amplitude/utils'
|
12
|
+
require 'amplitude/workers'
|
@@ -0,0 +1,21 @@
|
|
1
|
+
module AmplitudeExperiment
|
2
|
+
DAY_MILLIS = 86_400_000
|
3
|
+
# Assignment
|
4
|
+
class Assignment
|
5
|
+
attr_accessor :user, :results, :timestamp
|
6
|
+
|
7
|
+
def initialize(user, results)
|
8
|
+
@user = user
|
9
|
+
@results = results
|
10
|
+
@timestamp = (Time.now.to_f * 1000).to_i
|
11
|
+
end
|
12
|
+
|
13
|
+
def canonicalize
|
14
|
+
sb = "#{@user&.user_id&.strip} #{@user&.device_id&.strip} "
|
15
|
+
results.sort.to_h.each do |key, value|
|
16
|
+
sb += "#{key.strip} #{value['variant']&.fetch('key', '')&.strip} "
|
17
|
+
end
|
18
|
+
sb
|
19
|
+
end
|
20
|
+
end
|
21
|
+
end
|
@@ -0,0 +1,12 @@
|
|
1
|
+
module AmplitudeExperiment
|
2
|
+
# AssignmentConfig
|
3
|
+
class AssignmentConfig < AmplitudeAnalytics::Config
|
4
|
+
attr_accessor :api_key, :cache_capacity
|
5
|
+
|
6
|
+
def initialize(api_key, cache_capacity = 65_536, **kwargs)
|
7
|
+
super(**kwargs)
|
8
|
+
@api_key = api_key
|
9
|
+
@cache_capacity = cache_capacity
|
10
|
+
end
|
11
|
+
end
|
12
|
+
end
|
@@ -0,0 +1,15 @@
|
|
1
|
+
module AmplitudeExperiment
|
2
|
+
# AssignmentFilter
|
3
|
+
class AssignmentFilter
|
4
|
+
def initialize(size, ttl_millis = DAY_MILLIS)
|
5
|
+
@cache = LRUCache.new(size, ttl_millis)
|
6
|
+
end
|
7
|
+
|
8
|
+
def should_track(assignment)
|
9
|
+
canonical_assignment = assignment.canonicalize
|
10
|
+
track = @cache.get(canonical_assignment).nil?
|
11
|
+
@cache.put(canonical_assignment, 0) if track
|
12
|
+
track
|
13
|
+
end
|
14
|
+
end
|
15
|
+
end
|