splitclient-rb 7.2.3.pre.rc2-java → 7.3.0.pre.rc1-java
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/.rubocop.yml +12 -0
- data/CHANGES.txt +6 -0
- data/lib/splitclient-rb.rb +24 -9
- data/lib/splitclient-rb/cache/adapters/redis_adapter.rb +4 -0
- data/lib/splitclient-rb/cache/fetchers/segment_fetcher.rb +8 -7
- data/lib/splitclient-rb/cache/fetchers/split_fetcher.rb +7 -7
- data/lib/splitclient-rb/cache/repositories/events/memory_repository.rb +6 -3
- data/lib/splitclient-rb/cache/repositories/events_repository.rb +4 -3
- data/lib/splitclient-rb/cache/repositories/impressions/memory_repository.rb +8 -0
- data/lib/splitclient-rb/cache/repositories/impressions/redis_repository.rb +2 -0
- data/lib/splitclient-rb/cache/repositories/repository.rb +0 -4
- data/lib/splitclient-rb/cache/repositories/segments_repository.rb +20 -0
- data/lib/splitclient-rb/cache/repositories/splits_repository.rb +4 -0
- data/lib/splitclient-rb/cache/senders/localhost_repo_cleaner.rb +1 -3
- data/lib/splitclient-rb/cache/stores/sdk_blocker.rb +9 -0
- data/lib/splitclient-rb/clients/split_client.rb +59 -25
- data/lib/splitclient-rb/engine/api/client.rb +3 -2
- data/lib/splitclient-rb/engine/api/events.rb +10 -1
- data/lib/splitclient-rb/engine/api/impressions.rb +19 -2
- data/lib/splitclient-rb/engine/api/segments.rb +20 -18
- data/lib/splitclient-rb/engine/api/splits.rb +10 -10
- data/lib/splitclient-rb/engine/api/telemetry_api.rb +39 -0
- data/lib/splitclient-rb/engine/auth_api_client.rb +21 -8
- data/lib/splitclient-rb/engine/common/impressions_manager.rb +27 -3
- data/lib/splitclient-rb/engine/metrics/binary_search_latency_tracker.rb +3 -65
- data/lib/splitclient-rb/engine/push_manager.rb +10 -2
- data/lib/splitclient-rb/engine/sync_manager.rb +42 -20
- data/lib/splitclient-rb/engine/synchronizer.rb +13 -12
- data/lib/splitclient-rb/split_config.rb +46 -21
- data/lib/splitclient-rb/split_factory.rb +31 -13
- data/lib/splitclient-rb/split_factory_registry.rb +12 -0
- data/lib/splitclient-rb/sse/event_source/client.rb +10 -1
- data/lib/splitclient-rb/sse/notification_manager_keeper.rb +17 -3
- data/lib/splitclient-rb/sse/sse_handler.rb +10 -6
- data/lib/splitclient-rb/telemetry/domain/constants.rb +42 -0
- data/lib/splitclient-rb/telemetry/domain/structs.rb +31 -0
- data/lib/splitclient-rb/telemetry/evaluation_consumer.rb +14 -0
- data/lib/splitclient-rb/telemetry/evaluation_producer.rb +21 -0
- data/lib/splitclient-rb/telemetry/init_consumer.rb +14 -0
- data/lib/splitclient-rb/telemetry/init_producer.rb +19 -0
- data/lib/splitclient-rb/telemetry/memory/memory_evaluation_consumer.rb +32 -0
- data/lib/splitclient-rb/telemetry/memory/memory_evaluation_producer.rb +24 -0
- data/lib/splitclient-rb/telemetry/memory/memory_init_consumer.rb +28 -0
- data/lib/splitclient-rb/telemetry/memory/memory_init_producer.rb +34 -0
- data/lib/splitclient-rb/telemetry/memory/memory_runtime_consumer.rb +112 -0
- data/lib/splitclient-rb/telemetry/memory/memory_runtime_producer.rb +81 -0
- data/lib/splitclient-rb/telemetry/memory/memory_synchronizer.rb +192 -0
- data/lib/splitclient-rb/telemetry/redis/redis_evaluation_producer.rb +38 -0
- data/lib/splitclient-rb/telemetry/redis/redis_init_producer.rb +37 -0
- data/lib/splitclient-rb/telemetry/redis/redis_synchronizer.rb +28 -0
- data/lib/splitclient-rb/telemetry/runtime_consumer.rb +24 -0
- data/lib/splitclient-rb/telemetry/runtime_producer.rb +24 -0
- data/lib/splitclient-rb/telemetry/storages/memory.rb +139 -0
- data/lib/splitclient-rb/telemetry/sync_task.rb +38 -0
- data/lib/splitclient-rb/telemetry/synchronizer.rb +29 -0
- data/lib/splitclient-rb/version.rb +1 -1
- metadata +24 -9
- data/lib/splitclient-rb/cache/repositories/metrics/memory_repository.rb +0 -163
- data/lib/splitclient-rb/cache/repositories/metrics/redis_repository.rb +0 -131
- data/lib/splitclient-rb/cache/repositories/metrics_repository.rb +0 -23
- data/lib/splitclient-rb/cache/senders/metrics_sender.rb +0 -55
- data/lib/splitclient-rb/engine/api/metrics.rb +0 -61
- data/lib/splitclient-rb/engine/metrics/metrics.rb +0 -80
- data/lib/splitclient-rb/redis_metrics_fixer.rb +0 -36
@@ -8,12 +8,13 @@ module SplitIoClient
|
|
8
8
|
RUBY_ENCODING = '1.9'.respond_to?(:force_encoding)
|
9
9
|
|
10
10
|
def initialize(config)
|
11
|
-
@config = config
|
11
|
+
@config = config
|
12
12
|
end
|
13
13
|
|
14
|
-
def get_api(url, api_key, params = {})
|
14
|
+
def get_api(url, api_key, params = {}, cache_control_headers = false)
|
15
15
|
api_client.get(url, params) do |req|
|
16
16
|
req.headers = common_headers(api_key).merge('Accept-Encoding' => 'gzip')
|
17
|
+
req.headers = req.headers.merge('Cache-Control' => 'no-cache') if cache_control_headers
|
17
18
|
|
18
19
|
req.options[:timeout] = @config.read_timeout
|
19
20
|
req.options[:open_timeout] = @config.connection_timeout
|
@@ -3,9 +3,10 @@
|
|
3
3
|
module SplitIoClient
|
4
4
|
module Api
|
5
5
|
class Events < Client
|
6
|
-
def initialize(api_key, config)
|
6
|
+
def initialize(api_key, config, telemetry_runtime_producer)
|
7
7
|
super(config)
|
8
8
|
@api_key = api_key
|
9
|
+
@telemetry_runtime_producer = telemetry_runtime_producer
|
9
10
|
end
|
10
11
|
|
11
12
|
def post(events)
|
@@ -14,6 +15,8 @@ module SplitIoClient
|
|
14
15
|
return
|
15
16
|
end
|
16
17
|
|
18
|
+
start = Time.now
|
19
|
+
|
17
20
|
events.each_slice(@config.events_queue_size) do |events_slice|
|
18
21
|
response = post_api(
|
19
22
|
"#{@config.events_uri}/events/bulk",
|
@@ -23,7 +26,13 @@ module SplitIoClient
|
|
23
26
|
|
24
27
|
if response.success?
|
25
28
|
@config.split_logger.log_if_debug("Events reported: #{events_slice.size}")
|
29
|
+
|
30
|
+
bucket = BinarySearchLatencyTracker.get_bucket((Time.now - start) * 1000.0)
|
31
|
+
@telemetry_runtime_producer.record_sync_latency(Telemetry::Domain::Constants::EVENT_SYNC, bucket)
|
32
|
+
@telemetry_runtime_producer.record_successful_sync(Telemetry::Domain::Constants::EVENT_SYNC, (Time.now.to_f * 1000.0).to_i)
|
26
33
|
else
|
34
|
+
@telemetry_runtime_producer.record_sync_error(Telemetry::Domain::Constants::EVENT_SYNC, response.status)
|
35
|
+
|
27
36
|
@config.logger.error("Unexpected status code while posting events: #{response.status}." \
|
28
37
|
' - Check your API key and base URI')
|
29
38
|
raise 'Split SDK failed to connect to backend to post events'
|
@@ -3,9 +3,10 @@
|
|
3
3
|
module SplitIoClient
|
4
4
|
module Api
|
5
5
|
class Impressions < Client
|
6
|
-
def initialize(api_key, config)
|
6
|
+
def initialize(api_key, config, telemetry_runtime_producer)
|
7
|
+
super(config)
|
7
8
|
@api_key = api_key
|
8
|
-
@
|
9
|
+
@telemetry_runtime_producer = telemetry_runtime_producer
|
9
10
|
end
|
10
11
|
|
11
12
|
def post(impressions)
|
@@ -14,11 +15,19 @@ module SplitIoClient
|
|
14
15
|
return
|
15
16
|
end
|
16
17
|
|
18
|
+
start = Time.now
|
19
|
+
|
17
20
|
response = post_api("#{@config.events_uri}/testImpressions/bulk", @api_key, impressions, impressions_headers)
|
18
21
|
|
19
22
|
if response.success?
|
20
23
|
@config.split_logger.log_if_debug("Impressions reported: #{total_impressions(impressions)}")
|
24
|
+
|
25
|
+
bucket = BinarySearchLatencyTracker.get_bucket((Time.now - start) * 1000.0)
|
26
|
+
@telemetry_runtime_producer.record_sync_latency(Telemetry::Domain::Constants::IMPRESSIONS_SYNC, bucket)
|
27
|
+
@telemetry_runtime_producer.record_successful_sync(Telemetry::Domain::Constants::IMPRESSIONS_SYNC, (Time.now.to_f * 1000.0).to_i)
|
21
28
|
else
|
29
|
+
@telemetry_runtime_producer.record_sync_error(Telemetry::Domain::Constants::IMPRESSIONS_SYNC, response.status)
|
30
|
+
|
22
31
|
@config.logger.error("Unexpected status code while posting impressions: #{response.status}." \
|
23
32
|
' - Check your API key and base URI')
|
24
33
|
raise 'Split SDK failed to connect to backend to post impressions'
|
@@ -31,11 +40,19 @@ module SplitIoClient
|
|
31
40
|
return
|
32
41
|
end
|
33
42
|
|
43
|
+
start = Time.now
|
44
|
+
|
34
45
|
response = post_api("#{@config.events_uri}/testImpressions/count", @api_key, impressions_count)
|
35
46
|
|
36
47
|
if response.success?
|
37
48
|
@config.split_logger.log_if_debug("Impressions count sent: #{impressions_count[:pf].length}")
|
49
|
+
|
50
|
+
bucket = BinarySearchLatencyTracker.get_bucket((Time.now - start) * 1000.0)
|
51
|
+
@telemetry_runtime_producer.record_sync_latency(Telemetry::Domain::Constants::IMPRESSION_COUNT_SYNC, bucket)
|
52
|
+
@telemetry_runtime_producer.record_successful_sync(Telemetry::Domain::Constants::IMPRESSION_COUNT_SYNC, (Time.now.to_f * 1000.0).to_i)
|
38
53
|
else
|
54
|
+
@telemetry_runtime_producer.record_sync_error(Telemetry::Domain::Constants::IMPRESSION_COUNT_SYNC, response.status)
|
55
|
+
|
39
56
|
@config.logger.error("Unexpected status code while posting impressions count: #{response.status}." \
|
40
57
|
' - Check your API key and base URI')
|
41
58
|
raise 'Split SDK failed to connect to backend to post impressions'
|
@@ -4,24 +4,20 @@ module SplitIoClient
|
|
4
4
|
module Api
|
5
5
|
# Retrieves segment changes from the Split Backend
|
6
6
|
class Segments < Client
|
7
|
-
|
8
|
-
|
9
|
-
def initialize(api_key, metrics, segments_repository, config)
|
7
|
+
def initialize(api_key, segments_repository, config, telemetry_runtime_producer)
|
10
8
|
super(config)
|
11
|
-
@metrics = metrics
|
12
9
|
@api_key = api_key
|
13
10
|
@segments_repository = segments_repository
|
11
|
+
@telemetry_runtime_producer = telemetry_runtime_producer
|
14
12
|
end
|
15
13
|
|
16
|
-
def fetch_segments_by_names(names)
|
17
|
-
start = Time.now
|
18
|
-
|
14
|
+
def fetch_segments_by_names(names, cache_control_headers = false)
|
19
15
|
return if names.nil? || names.empty?
|
20
16
|
|
21
17
|
names.each do |name|
|
22
18
|
since = @segments_repository.get_change_number(name)
|
23
19
|
loop do
|
24
|
-
segment = fetch_segment_changes(name, since)
|
20
|
+
segment = fetch_segment_changes(name, since, cache_control_headers)
|
25
21
|
@segments_repository.add_to_segment(segment)
|
26
22
|
|
27
23
|
@config.split_logger.log_if_debug("Segment #{name} fetched before: #{since}, \
|
@@ -32,19 +28,17 @@ module SplitIoClient
|
|
32
28
|
since = @segments_repository.get_change_number(name)
|
33
29
|
end
|
34
30
|
end
|
35
|
-
|
36
|
-
latency = (Time.now - start) * 1000.0
|
37
|
-
@metrics.time(METRICS_PREFIX + '.time', latency)
|
38
31
|
end
|
39
32
|
|
40
33
|
private
|
41
34
|
|
42
|
-
def fetch_segment_changes(name, since)
|
43
|
-
|
35
|
+
def fetch_segment_changes(name, since, cache_control_headers = false)
|
36
|
+
start = Time.now
|
37
|
+
response = get_api("#{@config.base_uri}/segmentChanges/#{name}", @api_key, { since: since }, cache_control_headers)
|
38
|
+
|
44
39
|
if response.success?
|
45
40
|
segment = JSON.parse(response.body, symbolize_names: true)
|
46
41
|
@segments_repository.set_change_number(name, segment[:till])
|
47
|
-
@metrics.count(METRICS_PREFIX + '.status.' + response.status.to_s, 1)
|
48
42
|
|
49
43
|
@config.split_logger.log_if_debug("\'#{segment[:name]}\' segment retrieved.")
|
50
44
|
unless segment[:added].empty?
|
@@ -55,15 +49,23 @@ module SplitIoClient
|
|
55
49
|
end
|
56
50
|
@config.split_logger.log_if_transport("Segment changes response: #{segment.to_s}")
|
57
51
|
|
52
|
+
bucket = BinarySearchLatencyTracker.get_bucket((Time.now - start) * 1000.0)
|
53
|
+
@telemetry_runtime_producer.record_sync_latency(Telemetry::Domain::Constants::SEGMENT_SYNC, bucket)
|
54
|
+
@telemetry_runtime_producer.record_successful_sync(Telemetry::Domain::Constants::SEGMENT_SYNC, (Time.now.to_f * 1000.0).to_i)
|
55
|
+
|
58
56
|
segment
|
59
57
|
elsif response.status == 403
|
60
|
-
|
61
|
-
|
62
|
-
|
58
|
+
@telemetry_runtime_producer.record_sync_error(Telemetry::Domain::Constants::SEGMENT_SYNC, response.status)
|
59
|
+
|
60
|
+
@config.logger.error('Factory Instantiation: You passed a browser type api_key, ' \
|
61
|
+
'please grab an api key from the Split console that is of type sdk')
|
62
|
+
@config.valid_mode = false
|
63
63
|
else
|
64
|
+
@telemetry_runtime_producer.record_sync_error(Telemetry::Domain::Constants::SEGMENT_SYNC, response.status)
|
65
|
+
|
64
66
|
@config.logger.error("Unexpected status code while fetching segments: #{response.status}." \
|
65
67
|
"Since #{since} - Check your API key and base URI")
|
66
|
-
|
68
|
+
|
67
69
|
raise 'Split SDK failed to connect to backend to fetch segments'
|
68
70
|
end
|
69
71
|
end
|
@@ -4,35 +4,35 @@ module SplitIoClient
|
|
4
4
|
module Api
|
5
5
|
# Retrieves split definitions from the Split Backend
|
6
6
|
class Splits < Client
|
7
|
-
|
8
|
-
|
9
|
-
def initialize(api_key, metrics, config)
|
7
|
+
def initialize(api_key, config, telemetry_runtime_producer)
|
10
8
|
super(config)
|
11
9
|
@api_key = api_key
|
12
|
-
@
|
10
|
+
@telemetry_runtime_producer = telemetry_runtime_producer
|
13
11
|
end
|
14
12
|
|
15
|
-
def since(since)
|
13
|
+
def since(since, cache_control_headers = false)
|
16
14
|
start = Time.now
|
17
15
|
|
18
|
-
response = get_api("#{@config.base_uri}/splitChanges", @api_key, since: since)
|
16
|
+
response = get_api("#{@config.base_uri}/splitChanges", @api_key, { since: since }, cache_control_headers)
|
19
17
|
if response.success?
|
20
18
|
result = splits_with_segment_names(response.body)
|
21
19
|
|
22
|
-
@metrics.count(METRICS_PREFIX + '.status.' + response.status.to_s, 1)
|
23
20
|
unless result[:splits].empty?
|
24
21
|
@config.split_logger.log_if_debug("#{result[:splits].length} splits retrieved. since=#{since}")
|
25
22
|
end
|
26
23
|
@config.split_logger.log_if_transport("Split changes response: #{result.to_s}")
|
27
24
|
|
28
|
-
|
29
|
-
@
|
25
|
+
bucket = BinarySearchLatencyTracker.get_bucket((Time.now - start) * 1000.0)
|
26
|
+
@telemetry_runtime_producer.record_sync_latency(Telemetry::Domain::Constants::SPLIT_SYNC, bucket)
|
27
|
+
@telemetry_runtime_producer.record_successful_sync(Telemetry::Domain::Constants::SPLIT_SYNC, (Time.now.to_f * 1000.0).to_i)
|
30
28
|
|
31
29
|
result
|
32
30
|
else
|
33
|
-
@
|
31
|
+
@telemetry_runtime_producer.record_sync_error(Telemetry::Domain::Constants::SPLIT_SYNC, response.status)
|
32
|
+
|
34
33
|
@config.logger.error("Unexpected status code while fetching splits: #{response.status}. " \
|
35
34
|
'Check your API key and base URI')
|
35
|
+
|
36
36
|
raise 'Split SDK failed to connect to backend to fetch split definitions'
|
37
37
|
end
|
38
38
|
end
|
@@ -0,0 +1,39 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module SplitIoClient
|
4
|
+
module Api
|
5
|
+
class TelemetryApi < Client
|
6
|
+
def initialize(config, api_key, telemetry_runtime_producer)
|
7
|
+
super(config)
|
8
|
+
@api_key = api_key
|
9
|
+
@telemetry_runtime_producer = telemetry_runtime_producer
|
10
|
+
end
|
11
|
+
|
12
|
+
def record_init(config_init)
|
13
|
+
post_telemetry("#{@config.telemetry_service_url}/metrics/config", config_init, 'init')
|
14
|
+
end
|
15
|
+
|
16
|
+
def record_stats(stats)
|
17
|
+
post_telemetry("#{@config.telemetry_service_url}/metrics/usage", stats, 'stats')
|
18
|
+
end
|
19
|
+
|
20
|
+
private
|
21
|
+
|
22
|
+
def post_telemetry(url, obj, method)
|
23
|
+
start = Time.now
|
24
|
+
response = post_api(url, @api_key, obj)
|
25
|
+
|
26
|
+
if response.success?
|
27
|
+
@config.split_logger.log_if_debug("Telemetry post succeeded: record #{method}.")
|
28
|
+
|
29
|
+
bucket = BinarySearchLatencyTracker.get_bucket((Time.now - start) * 1000.0)
|
30
|
+
@telemetry_runtime_producer.record_sync_latency(Telemetry::Domain::Constants::TELEMETRY_SYNC, bucket)
|
31
|
+
@telemetry_runtime_producer.record_successful_sync(Telemetry::Domain::Constants::TELEMETRY_SYNC, (Time.now.to_f * 1000.0).to_i)
|
32
|
+
else
|
33
|
+
@telemetry_runtime_producer.record_sync_error(Telemetry::Domain::Constants::TELEMETRY_SYNC, response.status)
|
34
|
+
@config.logger.error("Unexpected status code while posting telemetry #{method}: #{response.status}.")
|
35
|
+
end
|
36
|
+
end
|
37
|
+
end
|
38
|
+
end
|
39
|
+
end
|
@@ -6,22 +6,21 @@ require 'cgi'
|
|
6
6
|
module SplitIoClient
|
7
7
|
module Engine
|
8
8
|
class AuthApiClient
|
9
|
-
def initialize(config)
|
9
|
+
def initialize(config, telemetry_runtime_producer)
|
10
10
|
@config = config
|
11
11
|
@api_client = SplitIoClient::Api::Client.new(@config)
|
12
|
+
@telemetry_runtime_producer = telemetry_runtime_producer
|
12
13
|
end
|
13
14
|
|
14
15
|
def authenticate(api_key)
|
16
|
+
start = Time.now
|
15
17
|
response = @api_client.get_api(@config.auth_service_url, api_key)
|
16
18
|
|
17
|
-
return process_success(response) if response.success?
|
19
|
+
return process_success(response, start) if response.success?
|
18
20
|
|
19
|
-
if response.status >= 400 && response.status < 500
|
20
|
-
@config.logger.debug("Error connecting to: #{@config.auth_service_url}. Response status: #{response.status}")
|
21
|
-
|
22
|
-
return { push_enabled: false, retry: false }
|
23
|
-
end
|
21
|
+
return process_error(response) if response.status >= 400 && response.status < 500
|
24
22
|
|
23
|
+
@telemetry_runtime_producer.record_sync_error(Telemetry::Domain::Constants::TOKEN_SYNC, response.status.to_i)
|
25
24
|
@config.logger.debug("Error connecting to: #{@config.auth_service_url}. Response status: #{response.status}")
|
26
25
|
{ push_enabled: false, retry: true }
|
27
26
|
rescue StandardError => e
|
@@ -51,9 +50,21 @@ module SplitIoClient
|
|
51
50
|
JWT.decode token, nil, false
|
52
51
|
end
|
53
52
|
|
54
|
-
def
|
53
|
+
def process_error(response)
|
54
|
+
@config.logger.debug("Error connecting to: #{@config.auth_service_url}. Response status: #{response.status}")
|
55
|
+
@telemetry_runtime_producer.record_auth_rejections if response.status == 401
|
56
|
+
|
57
|
+
{ push_enabled: false, retry: false }
|
58
|
+
end
|
59
|
+
|
60
|
+
def process_success(response, start)
|
55
61
|
@config.logger.debug("Success connection to: #{@config.auth_service_url}") if @config.debug_enabled
|
56
62
|
|
63
|
+
bucket = BinarySearchLatencyTracker.get_bucket((Time.now - start) * 1000.0)
|
64
|
+
@telemetry_runtime_producer.record_sync_latency(Telemetry::Domain::Constants::TOKEN_SYNC, bucket)
|
65
|
+
timestamp = (Time.now.to_f * 1000.0).to_i
|
66
|
+
@telemetry_runtime_producer.record_successful_sync(Telemetry::Domain::Constants::TOKEN_SYNC, timestamp)
|
67
|
+
|
57
68
|
body_json = JSON.parse(response.body, symbolize_names: true)
|
58
69
|
push_enabled = body_json[:pushEnabled]
|
59
70
|
token = body_json[:token]
|
@@ -62,6 +73,8 @@ module SplitIoClient
|
|
62
73
|
decoded_token = decode_token(token)
|
63
74
|
channels = channels(decoded_token)
|
64
75
|
exp = expiration(decoded_token)
|
76
|
+
|
77
|
+
@telemetry_runtime_producer.record_token_refreshes
|
65
78
|
end
|
66
79
|
|
67
80
|
{ push_enabled: push_enabled, token: token, channels: channels, exp: exp, retry: false }
|
@@ -4,11 +4,12 @@ module SplitIoClient
|
|
4
4
|
module Engine
|
5
5
|
module Common
|
6
6
|
class ImpressionManager
|
7
|
-
def initialize(config, impressions_repository, impression_counter)
|
7
|
+
def initialize(config, impressions_repository, impression_counter, telemetry_runtime_producer)
|
8
8
|
@config = config
|
9
9
|
@impressions_repository = impressions_repository
|
10
10
|
@impression_counter = impression_counter
|
11
11
|
@impression_observer = SplitIoClient::Observers::ImpressionObserver.new
|
12
|
+
@telemetry_runtime_producer = telemetry_runtime_producer
|
12
13
|
end
|
13
14
|
|
14
15
|
# added param time for test
|
@@ -29,18 +30,41 @@ module SplitIoClient
|
|
29
30
|
|
30
31
|
impression_router.add_bulk(impressions)
|
31
32
|
|
33
|
+
dropped = 0
|
34
|
+
queued = 0
|
35
|
+
dedupe = 0
|
36
|
+
|
32
37
|
if optimized? && !redis?
|
33
38
|
optimized_impressions = impressions.select { |imp| should_queue_impression?(imp[:i]) }
|
34
|
-
|
39
|
+
|
40
|
+
unless optimized_impressions.empty?
|
41
|
+
dropped = @impressions_repository.add_bulk(optimized_impressions)
|
42
|
+
dedupe = impressions.length - optimized_impressions.length
|
43
|
+
queued = optimized_impressions.length - dropped
|
44
|
+
end
|
35
45
|
else
|
36
|
-
@impressions_repository.add_bulk(impressions)
|
46
|
+
dropped = @impressions_repository.add_bulk(impressions)
|
47
|
+
queued = impressions.length - dropped
|
37
48
|
end
|
49
|
+
|
50
|
+
record_stats(queued, dropped, dedupe)
|
38
51
|
rescue StandardError => error
|
39
52
|
@config.log_found_exception(__method__.to_s, error)
|
40
53
|
end
|
41
54
|
|
42
55
|
private
|
43
56
|
|
57
|
+
def record_stats(queued, dropped, dedupe)
|
58
|
+
return if redis?
|
59
|
+
|
60
|
+
imp_queued = Telemetry::Domain::Constants::IMPRESSIONS_QUEUED
|
61
|
+
imp_dropped = Telemetry::Domain::Constants::IMPRESSIONS_DROPPED
|
62
|
+
imp_dedupe = Telemetry::Domain::Constants::IMPRESSIONS_DEDUPE
|
63
|
+
@telemetry_runtime_producer.record_impressions_stats(imp_queued, queued) unless queued.zero?
|
64
|
+
@telemetry_runtime_producer.record_impressions_stats(imp_dropped, dropped) unless dropped.zero?
|
65
|
+
@telemetry_runtime_producer.record_impressions_stats(imp_dedupe, dedupe) unless dedupe.zero?
|
66
|
+
end
|
67
|
+
|
44
68
|
# added param time for test
|
45
69
|
def impression_data(matching_key, bucketing_key, split_name, treatment, time = nil)
|
46
70
|
{
|
@@ -42,75 +42,13 @@ module SplitIoClient
|
|
42
42
|
|
43
43
|
MAX_LATENCY = 7481828
|
44
44
|
|
45
|
-
|
46
|
-
|
47
|
-
def initialize
|
48
|
-
@latencies = Array.new(BUCKETS.length, 0)
|
49
|
-
end
|
50
|
-
|
51
|
-
#
|
52
|
-
# Increment the internal counter for the bucket this latency falls into.
|
53
|
-
# @param millis
|
54
|
-
#
|
55
|
-
def add_latency_millis(millis, return_index = false)
|
56
|
-
index = find_bucket_index(millis * 1000)
|
57
|
-
|
58
|
-
return index if return_index
|
59
|
-
|
60
|
-
@latencies[index] += 1
|
61
|
-
@latencies
|
62
|
-
end
|
63
|
-
|
64
|
-
# Increment the internal counter for the bucket this latency falls into.
|
65
|
-
# @param micros
|
66
|
-
def add_latency_micros(micros, return_index = false)
|
67
|
-
index = find_bucket_index(micros)
|
68
|
-
|
69
|
-
return index if return_index
|
70
|
-
|
71
|
-
@latencies[index] += 1
|
72
|
-
@latencies
|
73
|
-
end
|
74
|
-
|
75
|
-
# Returns the list of latencies buckets as an array.
|
76
|
-
#
|
77
|
-
#
|
78
|
-
# @return the list of latencies buckets as an array.
|
79
|
-
def get_latencies
|
80
|
-
@latencies
|
81
|
-
end
|
82
|
-
|
83
|
-
def get_latency(index)
|
84
|
-
return @latencies[index]
|
85
|
-
end
|
86
|
-
|
87
|
-
def clear
|
88
|
-
@latencies = Array.new(BUCKETS.length, 0)
|
89
|
-
end
|
90
|
-
|
91
|
-
#
|
92
|
-
# Returns the counts in the bucket this latency falls into.
|
93
|
-
# The latencies will not be updated.
|
94
|
-
# @param latency
|
95
|
-
# @return the bucket content for the latency.
|
96
|
-
#
|
97
|
-
def get_bucket_for_latency_millis(latency)
|
98
|
-
return @latencies[find_bucket_index(latency * 1000)]
|
99
|
-
end
|
100
|
-
|
101
|
-
#
|
102
|
-
# Returns the counts in the bucket this latency falls into.
|
103
|
-
# The latencies will not be updated.
|
104
|
-
# @param latency
|
105
|
-
# @return the bucket content for the latency.
|
106
|
-
#
|
107
|
-
def get_bucket_for_latency_micros(latency)
|
108
|
-
return @latencies[find_bucket_index(latency)]
|
45
|
+
def self.get_bucket(latency)
|
46
|
+
return find_bucket_index(latency * 1000)
|
109
47
|
end
|
110
48
|
|
111
49
|
private
|
112
50
|
|
113
|
-
def find_bucket_index(micros)
|
51
|
+
def self.find_bucket_index(micros)
|
114
52
|
if (micros > MAX_LATENCY) then
|
115
53
|
return BUCKETS.length - 1
|
116
54
|
end
|