launchdarkly-server-sdk 5.5.7
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/.circleci/config.yml +134 -0
- data/.github/ISSUE_TEMPLATE/bug_report.md +37 -0
- data/.github/ISSUE_TEMPLATE/feature_request.md +20 -0
- data/.gitignore +15 -0
- data/.hound.yml +2 -0
- data/.rspec +2 -0
- data/.rubocop.yml +600 -0
- data/.simplecov +4 -0
- data/.yardopts +9 -0
- data/CHANGELOG.md +261 -0
- data/CODEOWNERS +1 -0
- data/CONTRIBUTING.md +37 -0
- data/Gemfile +3 -0
- data/Gemfile.lock +102 -0
- data/LICENSE.txt +13 -0
- data/README.md +56 -0
- data/Rakefile +5 -0
- data/azure-pipelines.yml +51 -0
- data/ext/mkrf_conf.rb +11 -0
- data/launchdarkly-server-sdk.gemspec +40 -0
- data/lib/ldclient-rb.rb +29 -0
- data/lib/ldclient-rb/cache_store.rb +45 -0
- data/lib/ldclient-rb/config.rb +411 -0
- data/lib/ldclient-rb/evaluation.rb +455 -0
- data/lib/ldclient-rb/event_summarizer.rb +55 -0
- data/lib/ldclient-rb/events.rb +468 -0
- data/lib/ldclient-rb/expiring_cache.rb +77 -0
- data/lib/ldclient-rb/file_data_source.rb +312 -0
- data/lib/ldclient-rb/flags_state.rb +76 -0
- data/lib/ldclient-rb/impl.rb +13 -0
- data/lib/ldclient-rb/impl/integrations/consul_impl.rb +158 -0
- data/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb +228 -0
- data/lib/ldclient-rb/impl/integrations/redis_impl.rb +155 -0
- data/lib/ldclient-rb/impl/store_client_wrapper.rb +47 -0
- data/lib/ldclient-rb/impl/store_data_set_sorter.rb +55 -0
- data/lib/ldclient-rb/in_memory_store.rb +100 -0
- data/lib/ldclient-rb/integrations.rb +55 -0
- data/lib/ldclient-rb/integrations/consul.rb +38 -0
- data/lib/ldclient-rb/integrations/dynamodb.rb +47 -0
- data/lib/ldclient-rb/integrations/redis.rb +55 -0
- data/lib/ldclient-rb/integrations/util/store_wrapper.rb +230 -0
- data/lib/ldclient-rb/interfaces.rb +153 -0
- data/lib/ldclient-rb/ldclient.rb +424 -0
- data/lib/ldclient-rb/memoized_value.rb +32 -0
- data/lib/ldclient-rb/newrelic.rb +17 -0
- data/lib/ldclient-rb/non_blocking_thread_pool.rb +46 -0
- data/lib/ldclient-rb/polling.rb +78 -0
- data/lib/ldclient-rb/redis_store.rb +87 -0
- data/lib/ldclient-rb/requestor.rb +101 -0
- data/lib/ldclient-rb/simple_lru_cache.rb +25 -0
- data/lib/ldclient-rb/stream.rb +141 -0
- data/lib/ldclient-rb/user_filter.rb +51 -0
- data/lib/ldclient-rb/util.rb +50 -0
- data/lib/ldclient-rb/version.rb +3 -0
- data/scripts/gendocs.sh +11 -0
- data/scripts/release.sh +27 -0
- data/spec/config_spec.rb +63 -0
- data/spec/evaluation_spec.rb +739 -0
- data/spec/event_summarizer_spec.rb +63 -0
- data/spec/events_spec.rb +642 -0
- data/spec/expiring_cache_spec.rb +76 -0
- data/spec/feature_store_spec_base.rb +213 -0
- data/spec/file_data_source_spec.rb +255 -0
- data/spec/fixtures/feature.json +37 -0
- data/spec/fixtures/feature1.json +36 -0
- data/spec/fixtures/user.json +9 -0
- data/spec/flags_state_spec.rb +81 -0
- data/spec/http_util.rb +109 -0
- data/spec/in_memory_feature_store_spec.rb +12 -0
- data/spec/integrations/consul_feature_store_spec.rb +42 -0
- data/spec/integrations/dynamodb_feature_store_spec.rb +105 -0
- data/spec/integrations/store_wrapper_spec.rb +276 -0
- data/spec/ldclient_spec.rb +471 -0
- data/spec/newrelic_spec.rb +5 -0
- data/spec/polling_spec.rb +120 -0
- data/spec/redis_feature_store_spec.rb +95 -0
- data/spec/requestor_spec.rb +214 -0
- data/spec/segment_store_spec_base.rb +95 -0
- data/spec/simple_lru_cache_spec.rb +24 -0
- data/spec/spec_helper.rb +9 -0
- data/spec/store_spec.rb +10 -0
- data/spec/stream_spec.rb +60 -0
- data/spec/user_filter_spec.rb +91 -0
- data/spec/util_spec.rb +17 -0
- data/spec/version_spec.rb +7 -0
- metadata +375 -0
@@ -0,0 +1,55 @@
|
|
1
|
+
|
2
|
+
module LaunchDarkly
|
3
|
+
# @private
|
4
|
+
EventSummary = Struct.new(:start_date, :end_date, :counters)
|
5
|
+
|
6
|
+
# Manages the state of summarizable information for the EventProcessor, including the
|
7
|
+
# event counters and user deduplication. Note that the methods of this class are
|
8
|
+
# deliberately not thread-safe; the EventProcessor is responsible for enforcing
|
9
|
+
# synchronization across both the summarizer and the event queue.
|
10
|
+
#
|
11
|
+
# @private
|
12
|
+
class EventSummarizer
|
13
|
+
def initialize
|
14
|
+
clear
|
15
|
+
end
|
16
|
+
|
17
|
+
# Adds this event to our counters, if it is a type of event we need to count.
|
18
|
+
def summarize_event(event)
|
19
|
+
if event[:kind] == "feature"
|
20
|
+
counter_key = {
|
21
|
+
key: event[:key],
|
22
|
+
version: event[:version],
|
23
|
+
variation: event[:variation]
|
24
|
+
}
|
25
|
+
c = @counters[counter_key]
|
26
|
+
if c.nil?
|
27
|
+
@counters[counter_key] = {
|
28
|
+
value: event[:value],
|
29
|
+
default: event[:default],
|
30
|
+
count: 1
|
31
|
+
}
|
32
|
+
else
|
33
|
+
c[:count] = c[:count] + 1
|
34
|
+
end
|
35
|
+
time = event[:creationDate]
|
36
|
+
if !time.nil?
|
37
|
+
@start_date = time if @start_date == 0 || time < @start_date
|
38
|
+
@end_date = time if time > @end_date
|
39
|
+
end
|
40
|
+
end
|
41
|
+
end
|
42
|
+
|
43
|
+
# Returns a snapshot of the current summarized event data, and resets this state.
|
44
|
+
def snapshot
|
45
|
+
ret = EventSummary.new(@start_date, @end_date, @counters)
|
46
|
+
ret
|
47
|
+
end
|
48
|
+
|
49
|
+
def clear
|
50
|
+
@start_date = 0
|
51
|
+
@end_date = 0
|
52
|
+
@counters = {}
|
53
|
+
end
|
54
|
+
end
|
55
|
+
end
|
@@ -0,0 +1,468 @@
|
|
1
|
+
require "concurrent"
|
2
|
+
require "concurrent/atomics"
|
3
|
+
require "concurrent/executors"
|
4
|
+
require "thread"
|
5
|
+
require "time"
|
6
|
+
|
7
|
+
module LaunchDarkly
|
8
|
+
MAX_FLUSH_WORKERS = 5
|
9
|
+
CURRENT_SCHEMA_VERSION = 3
|
10
|
+
USER_ATTRS_TO_STRINGIFY_FOR_EVENTS = [ :key, :secondary, :ip, :country, :email, :firstName, :lastName,
|
11
|
+
:avatar, :name ]
|
12
|
+
|
13
|
+
private_constant :MAX_FLUSH_WORKERS
|
14
|
+
private_constant :CURRENT_SCHEMA_VERSION
|
15
|
+
private_constant :USER_ATTRS_TO_STRINGIFY_FOR_EVENTS
|
16
|
+
|
17
|
+
# @private
|
18
|
+
class NullEventProcessor
|
19
|
+
def add_event(event)
|
20
|
+
end
|
21
|
+
|
22
|
+
def flush
|
23
|
+
end
|
24
|
+
|
25
|
+
def stop
|
26
|
+
end
|
27
|
+
end
|
28
|
+
|
29
|
+
# @private
|
30
|
+
class EventMessage
|
31
|
+
def initialize(event)
|
32
|
+
@event = event
|
33
|
+
end
|
34
|
+
attr_reader :event
|
35
|
+
end
|
36
|
+
|
37
|
+
# @private
|
38
|
+
class FlushMessage
|
39
|
+
end
|
40
|
+
|
41
|
+
# @private
|
42
|
+
class FlushUsersMessage
|
43
|
+
end
|
44
|
+
|
45
|
+
# @private
|
46
|
+
class SynchronousMessage
|
47
|
+
def initialize
|
48
|
+
@reply = Concurrent::Semaphore.new(0)
|
49
|
+
end
|
50
|
+
|
51
|
+
def completed
|
52
|
+
@reply.release
|
53
|
+
end
|
54
|
+
|
55
|
+
def wait_for_completion
|
56
|
+
@reply.acquire
|
57
|
+
end
|
58
|
+
end
|
59
|
+
|
60
|
+
# @private
|
61
|
+
class TestSyncMessage < SynchronousMessage
|
62
|
+
end
|
63
|
+
|
64
|
+
# @private
|
65
|
+
class StopMessage < SynchronousMessage
|
66
|
+
end
|
67
|
+
|
68
|
+
# @private
|
69
|
+
class EventProcessor
|
70
|
+
def initialize(sdk_key, config, client = nil)
|
71
|
+
@queue = Queue.new
|
72
|
+
@flush_task = Concurrent::TimerTask.new(execution_interval: config.flush_interval) do
|
73
|
+
@queue << FlushMessage.new
|
74
|
+
end
|
75
|
+
@flush_task.execute
|
76
|
+
@users_flush_task = Concurrent::TimerTask.new(execution_interval: config.user_keys_flush_interval) do
|
77
|
+
@queue << FlushUsersMessage.new
|
78
|
+
end
|
79
|
+
@users_flush_task.execute
|
80
|
+
@stopped = Concurrent::AtomicBoolean.new(false)
|
81
|
+
|
82
|
+
EventDispatcher.new(@queue, sdk_key, config, client)
|
83
|
+
end
|
84
|
+
|
85
|
+
def add_event(event)
|
86
|
+
event[:creationDate] = (Time.now.to_f * 1000).to_i
|
87
|
+
@queue << EventMessage.new(event)
|
88
|
+
end
|
89
|
+
|
90
|
+
def flush
|
91
|
+
# flush is done asynchronously
|
92
|
+
@queue << FlushMessage.new
|
93
|
+
end
|
94
|
+
|
95
|
+
def stop
|
96
|
+
# final shutdown, which includes a final flush, is done synchronously
|
97
|
+
if @stopped.make_true
|
98
|
+
@flush_task.shutdown
|
99
|
+
@users_flush_task.shutdown
|
100
|
+
@queue << FlushMessage.new
|
101
|
+
stop_msg = StopMessage.new
|
102
|
+
@queue << stop_msg
|
103
|
+
stop_msg.wait_for_completion
|
104
|
+
end
|
105
|
+
end
|
106
|
+
|
107
|
+
# exposed only for testing
|
108
|
+
def wait_until_inactive
|
109
|
+
sync_msg = TestSyncMessage.new
|
110
|
+
@queue << sync_msg
|
111
|
+
sync_msg.wait_for_completion
|
112
|
+
end
|
113
|
+
end
|
114
|
+
|
115
|
+
# @private
|
116
|
+
class EventDispatcher
|
117
|
+
def initialize(queue, sdk_key, config, client)
|
118
|
+
@sdk_key = sdk_key
|
119
|
+
@config = config
|
120
|
+
|
121
|
+
if client
|
122
|
+
@client = client
|
123
|
+
else
|
124
|
+
@client = Util.new_http_client(@config.events_uri, @config)
|
125
|
+
end
|
126
|
+
|
127
|
+
@user_keys = SimpleLRUCacheSet.new(config.user_keys_capacity)
|
128
|
+
@formatter = EventOutputFormatter.new(config)
|
129
|
+
@disabled = Concurrent::AtomicBoolean.new(false)
|
130
|
+
@last_known_past_time = Concurrent::AtomicReference.new(0)
|
131
|
+
|
132
|
+
buffer = EventBuffer.new(config.capacity, config.logger)
|
133
|
+
flush_workers = NonBlockingThreadPool.new(MAX_FLUSH_WORKERS)
|
134
|
+
|
135
|
+
Thread.new { main_loop(queue, buffer, flush_workers) }
|
136
|
+
end
|
137
|
+
|
138
|
+
private
|
139
|
+
|
140
|
+
def now_millis()
|
141
|
+
(Time.now.to_f * 1000).to_i
|
142
|
+
end
|
143
|
+
|
144
|
+
def main_loop(queue, buffer, flush_workers)
|
145
|
+
running = true
|
146
|
+
while running do
|
147
|
+
begin
|
148
|
+
message = queue.pop
|
149
|
+
case message
|
150
|
+
when EventMessage
|
151
|
+
dispatch_event(message.event, buffer)
|
152
|
+
when FlushMessage
|
153
|
+
trigger_flush(buffer, flush_workers)
|
154
|
+
when FlushUsersMessage
|
155
|
+
@user_keys.clear
|
156
|
+
when TestSyncMessage
|
157
|
+
synchronize_for_testing(flush_workers)
|
158
|
+
message.completed
|
159
|
+
when StopMessage
|
160
|
+
do_shutdown(flush_workers)
|
161
|
+
running = false
|
162
|
+
message.completed
|
163
|
+
end
|
164
|
+
rescue => e
|
165
|
+
Util.log_exception(@config.logger, "Unexpected error in event processor", e)
|
166
|
+
end
|
167
|
+
end
|
168
|
+
end
|
169
|
+
|
170
|
+
def do_shutdown(flush_workers)
|
171
|
+
flush_workers.shutdown
|
172
|
+
flush_workers.wait_for_termination
|
173
|
+
begin
|
174
|
+
@client.finish
|
175
|
+
rescue
|
176
|
+
end
|
177
|
+
end
|
178
|
+
|
179
|
+
def synchronize_for_testing(flush_workers)
|
180
|
+
# Used only by unit tests. Wait until all active flush workers have finished.
|
181
|
+
flush_workers.wait_all
|
182
|
+
end
|
183
|
+
|
184
|
+
def dispatch_event(event, buffer)
|
185
|
+
return if @disabled.value
|
186
|
+
|
187
|
+
# Always record the event in the summary.
|
188
|
+
buffer.add_to_summary(event)
|
189
|
+
|
190
|
+
# Decide whether to add the event to the payload. Feature events may be added twice, once for
|
191
|
+
# the event (if tracked) and once for debugging.
|
192
|
+
will_add_full_event = false
|
193
|
+
debug_event = nil
|
194
|
+
if event[:kind] == "feature"
|
195
|
+
will_add_full_event = event[:trackEvents]
|
196
|
+
if should_debug_event(event)
|
197
|
+
debug_event = event.clone
|
198
|
+
debug_event[:debug] = true
|
199
|
+
end
|
200
|
+
else
|
201
|
+
will_add_full_event = true
|
202
|
+
end
|
203
|
+
|
204
|
+
# For each user we haven't seen before, we add an index event - unless this is already
|
205
|
+
# an identify event for that user.
|
206
|
+
if !(will_add_full_event && @config.inline_users_in_events)
|
207
|
+
if event.has_key?(:user) && !notice_user(event[:user]) && event[:kind] != "identify"
|
208
|
+
buffer.add_event({
|
209
|
+
kind: "index",
|
210
|
+
creationDate: event[:creationDate],
|
211
|
+
user: event[:user]
|
212
|
+
})
|
213
|
+
end
|
214
|
+
end
|
215
|
+
|
216
|
+
buffer.add_event(event) if will_add_full_event
|
217
|
+
buffer.add_event(debug_event) if !debug_event.nil?
|
218
|
+
end
|
219
|
+
|
220
|
+
# Add to the set of users we've noticed, and return true if the user was already known to us.
|
221
|
+
def notice_user(user)
|
222
|
+
if user.nil? || !user.has_key?(:key)
|
223
|
+
true
|
224
|
+
else
|
225
|
+
@user_keys.add(user[:key].to_s)
|
226
|
+
end
|
227
|
+
end
|
228
|
+
|
229
|
+
def should_debug_event(event)
|
230
|
+
debug_until = event[:debugEventsUntilDate]
|
231
|
+
if !debug_until.nil?
|
232
|
+
last_past = @last_known_past_time.value
|
233
|
+
debug_until > last_past && debug_until > now_millis
|
234
|
+
else
|
235
|
+
false
|
236
|
+
end
|
237
|
+
end
|
238
|
+
|
239
|
+
def trigger_flush(buffer, flush_workers)
|
240
|
+
if @disabled.value
|
241
|
+
return
|
242
|
+
end
|
243
|
+
|
244
|
+
payload = buffer.get_payload
|
245
|
+
if !payload.events.empty? || !payload.summary.counters.empty?
|
246
|
+
# If all available worker threads are busy, success will be false and no job will be queued.
|
247
|
+
success = flush_workers.post do
|
248
|
+
begin
|
249
|
+
resp = EventPayloadSendTask.new.run(@sdk_key, @config, @client, payload, @formatter)
|
250
|
+
handle_response(resp) if !resp.nil?
|
251
|
+
rescue => e
|
252
|
+
Util.log_exception(@config.logger, "Unexpected error in event processor", e)
|
253
|
+
end
|
254
|
+
end
|
255
|
+
buffer.clear if success # Reset our internal state, these events now belong to the flush worker
|
256
|
+
end
|
257
|
+
end
|
258
|
+
|
259
|
+
def handle_response(res)
|
260
|
+
status = res.code.to_i
|
261
|
+
if status >= 400
|
262
|
+
message = Util.http_error_message(status, "event delivery", "some events were dropped")
|
263
|
+
@config.logger.error { "[LDClient] #{message}" }
|
264
|
+
if !Util.http_error_recoverable?(status)
|
265
|
+
@disabled.value = true
|
266
|
+
end
|
267
|
+
else
|
268
|
+
if !res["date"].nil?
|
269
|
+
begin
|
270
|
+
res_time = (Time.httpdate(res["date"]).to_f * 1000).to_i
|
271
|
+
@last_known_past_time.value = res_time
|
272
|
+
rescue ArgumentError
|
273
|
+
end
|
274
|
+
end
|
275
|
+
end
|
276
|
+
end
|
277
|
+
end
|
278
|
+
|
279
|
+
# @private
|
280
|
+
FlushPayload = Struct.new(:events, :summary)
|
281
|
+
|
282
|
+
# @private
|
283
|
+
class EventBuffer
|
284
|
+
def initialize(capacity, logger)
|
285
|
+
@capacity = capacity
|
286
|
+
@logger = logger
|
287
|
+
@capacity_exceeded = false
|
288
|
+
@events = []
|
289
|
+
@summarizer = EventSummarizer.new
|
290
|
+
end
|
291
|
+
|
292
|
+
def add_event(event)
|
293
|
+
if @events.length < @capacity
|
294
|
+
@logger.debug { "[LDClient] Enqueueing event: #{event.to_json}" }
|
295
|
+
@events.push(event)
|
296
|
+
@capacity_exceeded = false
|
297
|
+
else
|
298
|
+
if !@capacity_exceeded
|
299
|
+
@capacity_exceeded = true
|
300
|
+
@logger.warn { "[LDClient] Exceeded event queue capacity. Increase capacity to avoid dropping events." }
|
301
|
+
end
|
302
|
+
end
|
303
|
+
end
|
304
|
+
|
305
|
+
def add_to_summary(event)
|
306
|
+
@summarizer.summarize_event(event)
|
307
|
+
end
|
308
|
+
|
309
|
+
def get_payload
|
310
|
+
return FlushPayload.new(@events, @summarizer.snapshot)
|
311
|
+
end
|
312
|
+
|
313
|
+
def clear
|
314
|
+
@events = []
|
315
|
+
@summarizer.clear
|
316
|
+
end
|
317
|
+
end
|
318
|
+
|
319
|
+
# @private
|
320
|
+
class EventPayloadSendTask
|
321
|
+
def run(sdk_key, config, client, payload, formatter)
|
322
|
+
events_out = formatter.make_output_events(payload.events, payload.summary)
|
323
|
+
res = nil
|
324
|
+
body = events_out.to_json
|
325
|
+
(0..1).each do |attempt|
|
326
|
+
if attempt > 0
|
327
|
+
config.logger.warn { "[LDClient] Will retry posting events after 1 second" }
|
328
|
+
sleep(1)
|
329
|
+
end
|
330
|
+
begin
|
331
|
+
client.start if !client.started?
|
332
|
+
config.logger.debug { "[LDClient] sending #{events_out.length} events: #{body}" }
|
333
|
+
uri = URI(config.events_uri + "/bulk")
|
334
|
+
req = Net::HTTP::Post.new(uri)
|
335
|
+
req.content_type = "application/json"
|
336
|
+
req.body = body
|
337
|
+
req["Authorization"] = sdk_key
|
338
|
+
req["User-Agent"] = "RubyClient/" + LaunchDarkly::VERSION
|
339
|
+
req["X-LaunchDarkly-Event-Schema"] = CURRENT_SCHEMA_VERSION.to_s
|
340
|
+
req["Connection"] = "keep-alive"
|
341
|
+
res = client.request(req)
|
342
|
+
rescue StandardError => exn
|
343
|
+
config.logger.warn { "[LDClient] Error flushing events: #{exn.inspect}." }
|
344
|
+
next
|
345
|
+
end
|
346
|
+
status = res.code.to_i
|
347
|
+
if status < 200 || status >= 300
|
348
|
+
if Util.http_error_recoverable?(status)
|
349
|
+
next
|
350
|
+
end
|
351
|
+
end
|
352
|
+
break
|
353
|
+
end
|
354
|
+
# used up our retries, return the last response if any
|
355
|
+
res
|
356
|
+
end
|
357
|
+
end
|
358
|
+
|
359
|
+
# @private
|
360
|
+
class EventOutputFormatter
|
361
|
+
def initialize(config)
|
362
|
+
@inline_users = config.inline_users_in_events
|
363
|
+
@user_filter = UserFilter.new(config)
|
364
|
+
end
|
365
|
+
|
366
|
+
# Transforms events into the format used for event sending.
|
367
|
+
def make_output_events(events, summary)
|
368
|
+
events_out = events.map { |e| make_output_event(e) }
|
369
|
+
if !summary.counters.empty?
|
370
|
+
events_out.push(make_summary_event(summary))
|
371
|
+
end
|
372
|
+
events_out
|
373
|
+
end
|
374
|
+
|
375
|
+
private
|
376
|
+
|
377
|
+
def process_user(event)
|
378
|
+
filtered = @user_filter.transform_user_props(event[:user])
|
379
|
+
Util.stringify_attrs(filtered, USER_ATTRS_TO_STRINGIFY_FOR_EVENTS)
|
380
|
+
end
|
381
|
+
|
382
|
+
def make_output_event(event)
|
383
|
+
case event[:kind]
|
384
|
+
when "feature"
|
385
|
+
is_debug = event[:debug]
|
386
|
+
out = {
|
387
|
+
kind: is_debug ? "debug" : "feature",
|
388
|
+
creationDate: event[:creationDate],
|
389
|
+
key: event[:key],
|
390
|
+
value: event[:value]
|
391
|
+
}
|
392
|
+
out[:default] = event[:default] if event.has_key?(:default)
|
393
|
+
out[:variation] = event[:variation] if event.has_key?(:variation)
|
394
|
+
out[:version] = event[:version] if event.has_key?(:version)
|
395
|
+
out[:prereqOf] = event[:prereqOf] if event.has_key?(:prereqOf)
|
396
|
+
if @inline_users || is_debug
|
397
|
+
out[:user] = process_user(event)
|
398
|
+
else
|
399
|
+
out[:userKey] = event[:user].nil? ? nil : event[:user][:key]
|
400
|
+
end
|
401
|
+
out[:reason] = event[:reason] if !event[:reason].nil?
|
402
|
+
out
|
403
|
+
when "identify"
|
404
|
+
{
|
405
|
+
kind: "identify",
|
406
|
+
creationDate: event[:creationDate],
|
407
|
+
key: event[:user].nil? ? nil : event[:user][:key].to_s,
|
408
|
+
user: process_user(event)
|
409
|
+
}
|
410
|
+
when "custom"
|
411
|
+
out = {
|
412
|
+
kind: "custom",
|
413
|
+
creationDate: event[:creationDate],
|
414
|
+
key: event[:key]
|
415
|
+
}
|
416
|
+
out[:data] = event[:data] if event.has_key?(:data)
|
417
|
+
if @inline_users
|
418
|
+
out[:user] = process_user(event)
|
419
|
+
else
|
420
|
+
out[:userKey] = event[:user].nil? ? nil : event[:user][:key]
|
421
|
+
end
|
422
|
+
out
|
423
|
+
when "index"
|
424
|
+
{
|
425
|
+
kind: "index",
|
426
|
+
creationDate: event[:creationDate],
|
427
|
+
user: process_user(event)
|
428
|
+
}
|
429
|
+
else
|
430
|
+
event
|
431
|
+
end
|
432
|
+
end
|
433
|
+
|
434
|
+
# Transforms the summary data into the format used for event sending.
|
435
|
+
def make_summary_event(summary)
|
436
|
+
flags = {}
|
437
|
+
summary[:counters].each { |ckey, cval|
|
438
|
+
flag = flags[ckey[:key]]
|
439
|
+
if flag.nil?
|
440
|
+
flag = {
|
441
|
+
default: cval[:default],
|
442
|
+
counters: []
|
443
|
+
}
|
444
|
+
flags[ckey[:key]] = flag
|
445
|
+
end
|
446
|
+
c = {
|
447
|
+
value: cval[:value],
|
448
|
+
count: cval[:count]
|
449
|
+
}
|
450
|
+
if !ckey[:variation].nil?
|
451
|
+
c[:variation] = ckey[:variation]
|
452
|
+
end
|
453
|
+
if ckey[:version].nil?
|
454
|
+
c[:unknown] = true
|
455
|
+
else
|
456
|
+
c[:version] = ckey[:version]
|
457
|
+
end
|
458
|
+
flag[:counters].push(c)
|
459
|
+
}
|
460
|
+
{
|
461
|
+
kind: "summary",
|
462
|
+
startDate: summary[:start_date],
|
463
|
+
endDate: summary[:end_date],
|
464
|
+
features: flags
|
465
|
+
}
|
466
|
+
end
|
467
|
+
end
|
468
|
+
end
|