launchdarkly-server-sdk 6.3.0 → 8.0.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (67) hide show
  1. checksums.yaml +4 -4
  2. data/README.md +3 -4
  3. data/lib/ldclient-rb/config.rb +112 -62
  4. data/lib/ldclient-rb/context.rb +444 -0
  5. data/lib/ldclient-rb/evaluation_detail.rb +26 -22
  6. data/lib/ldclient-rb/events.rb +256 -146
  7. data/lib/ldclient-rb/flags_state.rb +26 -15
  8. data/lib/ldclient-rb/impl/big_segments.rb +18 -18
  9. data/lib/ldclient-rb/impl/broadcaster.rb +78 -0
  10. data/lib/ldclient-rb/impl/context.rb +96 -0
  11. data/lib/ldclient-rb/impl/context_filter.rb +145 -0
  12. data/lib/ldclient-rb/impl/data_source.rb +188 -0
  13. data/lib/ldclient-rb/impl/data_store.rb +59 -0
  14. data/lib/ldclient-rb/impl/dependency_tracker.rb +102 -0
  15. data/lib/ldclient-rb/impl/diagnostic_events.rb +9 -10
  16. data/lib/ldclient-rb/impl/evaluator.rb +386 -142
  17. data/lib/ldclient-rb/impl/evaluator_bucketing.rb +40 -41
  18. data/lib/ldclient-rb/impl/evaluator_helpers.rb +50 -0
  19. data/lib/ldclient-rb/impl/evaluator_operators.rb +26 -55
  20. data/lib/ldclient-rb/impl/event_sender.rb +7 -6
  21. data/lib/ldclient-rb/impl/event_summarizer.rb +68 -0
  22. data/lib/ldclient-rb/impl/event_types.rb +136 -0
  23. data/lib/ldclient-rb/impl/flag_tracker.rb +58 -0
  24. data/lib/ldclient-rb/impl/integrations/consul_impl.rb +19 -7
  25. data/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb +38 -30
  26. data/lib/ldclient-rb/impl/integrations/file_data_source.rb +24 -11
  27. data/lib/ldclient-rb/impl/integrations/redis_impl.rb +109 -12
  28. data/lib/ldclient-rb/impl/migrations/migrator.rb +287 -0
  29. data/lib/ldclient-rb/impl/migrations/tracker.rb +136 -0
  30. data/lib/ldclient-rb/impl/model/clause.rb +45 -0
  31. data/lib/ldclient-rb/impl/model/feature_flag.rb +255 -0
  32. data/lib/ldclient-rb/impl/model/preprocessed_data.rb +64 -0
  33. data/lib/ldclient-rb/impl/model/segment.rb +132 -0
  34. data/lib/ldclient-rb/impl/model/serialization.rb +54 -44
  35. data/lib/ldclient-rb/impl/repeating_task.rb +3 -4
  36. data/lib/ldclient-rb/impl/sampler.rb +25 -0
  37. data/lib/ldclient-rb/impl/store_client_wrapper.rb +102 -8
  38. data/lib/ldclient-rb/impl/store_data_set_sorter.rb +2 -2
  39. data/lib/ldclient-rb/impl/unbounded_pool.rb +1 -1
  40. data/lib/ldclient-rb/impl/util.rb +59 -1
  41. data/lib/ldclient-rb/in_memory_store.rb +9 -2
  42. data/lib/ldclient-rb/integrations/consul.rb +2 -2
  43. data/lib/ldclient-rb/integrations/dynamodb.rb +2 -2
  44. data/lib/ldclient-rb/integrations/file_data.rb +4 -4
  45. data/lib/ldclient-rb/integrations/redis.rb +5 -5
  46. data/lib/ldclient-rb/integrations/test_data/flag_builder.rb +287 -62
  47. data/lib/ldclient-rb/integrations/test_data.rb +18 -14
  48. data/lib/ldclient-rb/integrations/util/store_wrapper.rb +20 -9
  49. data/lib/ldclient-rb/interfaces.rb +600 -14
  50. data/lib/ldclient-rb/ldclient.rb +314 -134
  51. data/lib/ldclient-rb/memoized_value.rb +1 -1
  52. data/lib/ldclient-rb/migrations.rb +230 -0
  53. data/lib/ldclient-rb/non_blocking_thread_pool.rb +1 -1
  54. data/lib/ldclient-rb/polling.rb +52 -6
  55. data/lib/ldclient-rb/reference.rb +274 -0
  56. data/lib/ldclient-rb/requestor.rb +9 -11
  57. data/lib/ldclient-rb/stream.rb +96 -34
  58. data/lib/ldclient-rb/util.rb +97 -14
  59. data/lib/ldclient-rb/version.rb +1 -1
  60. data/lib/ldclient-rb.rb +3 -4
  61. metadata +65 -23
  62. data/lib/ldclient-rb/event_summarizer.rb +0 -55
  63. data/lib/ldclient-rb/file_data_source.rb +0 -23
  64. data/lib/ldclient-rb/impl/event_factory.rb +0 -126
  65. data/lib/ldclient-rb/newrelic.rb +0 -17
  66. data/lib/ldclient-rb/redis_store.rb +0 -88
  67. data/lib/ldclient-rb/user_filter.rb +0 -52
@@ -1,5 +1,8 @@
1
+ require "ldclient-rb/impl/context_filter"
1
2
  require "ldclient-rb/impl/diagnostic_events"
2
3
  require "ldclient-rb/impl/event_sender"
4
+ require "ldclient-rb/impl/event_summarizer"
5
+ require "ldclient-rb/impl/event_types"
3
6
  require "ldclient-rb/impl/util"
4
7
 
5
8
  require "concurrent"
@@ -18,7 +21,7 @@ require "time"
18
21
  # On a separate worker thread, EventDispatcher consumes events from the inbox. These are considered
19
22
  # "input events" because they may or may not actually be sent to LaunchDarkly; most flag evaluation
20
23
  # events are not sent, but are counted and the counters become part of a single summary event.
21
- # EventDispatcher updates those counters, creates "index" events for any users that have not been seen
24
+ # EventDispatcher updates those counters, creates "index" events for any contexts that have not been seen
22
25
  # recently, and places any events that will be sent to LaunchDarkly into the "outbox" queue.
23
26
  #
24
27
  # When it is time to flush events to LaunchDarkly, the contents of the outbox are handed off to
@@ -26,16 +29,35 @@ require "time"
26
29
  #
27
30
 
28
31
  module LaunchDarkly
29
- MAX_FLUSH_WORKERS = 5
30
- USER_ATTRS_TO_STRINGIFY_FOR_EVENTS = [ :key, :secondary, :ip, :country, :email, :firstName, :lastName,
31
- :avatar, :name ]
32
+ module EventProcessorMethods
33
+ def record_eval_event(
34
+ context,
35
+ key,
36
+ version = nil,
37
+ variation = nil,
38
+ value = nil,
39
+ reason = nil,
40
+ default = nil,
41
+ track_events = false,
42
+ debug_until = nil,
43
+ prereq_of = nil,
44
+ sampling_ratio = nil,
45
+ exclude_from_summaries = false
46
+ )
47
+ end
32
48
 
33
- private_constant :MAX_FLUSH_WORKERS
34
- private_constant :USER_ATTRS_TO_STRINGIFY_FOR_EVENTS
49
+ def record_identify_event(context)
50
+ end
35
51
 
36
- # @private
37
- class NullEventProcessor
38
- def add_event(event)
52
+ def record_custom_event(
53
+ context,
54
+ key,
55
+ data = nil,
56
+ metric_value = nil
57
+ )
58
+ end
59
+
60
+ def record_migration_op_event(event)
39
61
  end
40
62
 
41
63
  def flush
@@ -45,12 +67,12 @@ module LaunchDarkly
45
67
  end
46
68
  end
47
69
 
70
+ MAX_FLUSH_WORKERS = 5
71
+ private_constant :MAX_FLUSH_WORKERS
72
+
48
73
  # @private
49
- class EventMessage
50
- def initialize(event)
51
- @event = event
52
- end
53
- attr_reader :event
74
+ class NullEventProcessor
75
+ include EventProcessorMethods
54
76
  end
55
77
 
56
78
  # @private
@@ -58,7 +80,7 @@ module LaunchDarkly
58
80
  end
59
81
 
60
82
  # @private
61
- class FlushUsersMessage
83
+ class FlushContextsMessage
62
84
  end
63
85
 
64
86
  # @private
@@ -70,7 +92,7 @@ module LaunchDarkly
70
92
  def initialize
71
93
  @reply = Concurrent::Semaphore.new(0)
72
94
  end
73
-
95
+
74
96
  def completed
75
97
  @reply.release
76
98
  end
@@ -90,6 +112,8 @@ module LaunchDarkly
90
112
 
91
113
  # @private
92
114
  class EventProcessor
115
+ include EventProcessorMethods
116
+
93
117
  def initialize(sdk_key, config, client = nil, diagnostic_accumulator = nil, test_properties = nil)
94
118
  raise ArgumentError, "sdk_key must not be nil" if sdk_key.nil? # see LDClient constructor comment on sdk_key
95
119
  @logger = config.logger
@@ -98,10 +122,10 @@ module LaunchDarkly
98
122
  post_to_inbox(FlushMessage.new)
99
123
  end
100
124
  @flush_task.execute
101
- @users_flush_task = Concurrent::TimerTask.new(execution_interval: config.user_keys_flush_interval) do
102
- post_to_inbox(FlushUsersMessage.new)
125
+ @contexts_flush_task = Concurrent::TimerTask.new(execution_interval: config.context_keys_flush_interval) do
126
+ post_to_inbox(FlushContextsMessage.new)
103
127
  end
104
- @users_flush_task.execute
128
+ @contexts_flush_task.execute
105
129
  if !diagnostic_accumulator.nil?
106
130
  interval = test_properties && test_properties.has_key?(:diagnostic_recording_interval) ?
107
131
  test_properties[:diagnostic_recording_interval] :
@@ -116,16 +140,42 @@ module LaunchDarkly
116
140
  @stopped = Concurrent::AtomicBoolean.new(false)
117
141
  @inbox_full = Concurrent::AtomicBoolean.new(false)
118
142
 
119
- event_sender = test_properties && test_properties.has_key?(:event_sender) ?
120
- test_properties[:event_sender] :
121
- Impl::EventSender.new(sdk_key, config, client ? client : Util.new_http_client(config.events_uri, config))
143
+ event_sender = (test_properties || {})[:event_sender] ||
144
+ Impl::EventSender.new(sdk_key, config, client || Util.new_http_client(config.events_uri, config))
145
+
146
+ @timestamp_fn = (test_properties || {})[:timestamp_fn] || proc { Impl::Util.current_time_millis }
122
147
 
123
148
  EventDispatcher.new(@inbox, sdk_key, config, diagnostic_accumulator, event_sender)
124
149
  end
125
150
 
126
- def add_event(event)
127
- event[:creationDate] = Impl::Util.current_time_millis
128
- post_to_inbox(EventMessage.new(event))
151
+ def record_eval_event(
152
+ context,
153
+ key,
154
+ version = nil,
155
+ variation = nil,
156
+ value = nil,
157
+ reason = nil,
158
+ default = nil,
159
+ track_events = false,
160
+ debug_until = nil,
161
+ prereq_of = nil,
162
+ sampling_ratio = nil,
163
+ exclude_from_summaries = false
164
+ )
165
+ post_to_inbox(LaunchDarkly::Impl::EvalEvent.new(timestamp, context, key, version, variation, value, reason,
166
+ default, track_events, debug_until, prereq_of, sampling_ratio, exclude_from_summaries))
167
+ end
168
+
169
+ def record_identify_event(context)
170
+ post_to_inbox(LaunchDarkly::Impl::IdentifyEvent.new(timestamp, context))
171
+ end
172
+
173
+ def record_custom_event(context, key, data = nil, metric_value = nil)
174
+ post_to_inbox(LaunchDarkly::Impl::CustomEvent.new(timestamp, context, key, data, metric_value))
175
+ end
176
+
177
+ def record_migration_op_event(event)
178
+ post_to_inbox(event)
129
179
  end
130
180
 
131
181
  def flush
@@ -137,8 +187,8 @@ module LaunchDarkly
137
187
  # final shutdown, which includes a final flush, is done synchronously
138
188
  if @stopped.make_true
139
189
  @flush_task.shutdown
140
- @users_flush_task.shutdown
141
- @diagnostic_event_task.shutdown if !@diagnostic_event_task.nil?
190
+ @contexts_flush_task.shutdown
191
+ @diagnostic_event_task.shutdown unless @diagnostic_event_task.nil?
142
192
  # Note that here we are not calling post_to_inbox, because we *do* want to wait if the inbox
143
193
  # is full; an orderly shutdown can't happen unless these messages are received.
144
194
  @inbox << FlushMessage.new
@@ -155,9 +205,11 @@ module LaunchDarkly
155
205
  sync_msg.wait_for_completion
156
206
  end
157
207
 
158
- private
208
+ private def timestamp
209
+ @timestamp_fn.call()
210
+ end
159
211
 
160
- def post_to_inbox(message)
212
+ private def post_to_inbox(message)
161
213
  begin
162
214
  @inbox.push(message, non_block=true)
163
215
  rescue ThreadError
@@ -179,14 +231,15 @@ module LaunchDarkly
179
231
  @config = config
180
232
  @diagnostic_accumulator = config.diagnostic_opt_out? ? nil : diagnostic_accumulator
181
233
  @event_sender = event_sender
234
+ @sampler = LaunchDarkly::Impl::Sampler.new(Random.new)
182
235
 
183
- @user_keys = SimpleLRUCacheSet.new(config.user_keys_capacity)
236
+ @context_keys = SimpleLRUCacheSet.new(config.context_keys_capacity)
184
237
  @formatter = EventOutputFormatter.new(config)
185
238
  @disabled = Concurrent::AtomicBoolean.new(false)
186
239
  @last_known_past_time = Concurrent::AtomicReference.new(0)
187
- @deduplicated_users = 0
240
+ @deduplicated_contexts = 0
188
241
  @events_in_last_batch = 0
189
-
242
+
190
243
  outbox = EventBuffer.new(config.capacity, config.logger)
191
244
  flush_workers = NonBlockingThreadPool.new(MAX_FLUSH_WORKERS)
192
245
 
@@ -209,12 +262,10 @@ module LaunchDarkly
209
262
  begin
210
263
  message = inbox.pop
211
264
  case message
212
- when EventMessage
213
- dispatch_event(message.event, outbox)
214
265
  when FlushMessage
215
266
  trigger_flush(outbox, flush_workers)
216
- when FlushUsersMessage
217
- @user_keys.clear
267
+ when FlushContextsMessage
268
+ @context_keys.clear
218
269
  when DiagnosticEventMessage
219
270
  send_and_reset_diagnostics(outbox, diagnostic_event_workers)
220
271
  when TestSyncMessage
@@ -224,6 +275,8 @@ module LaunchDarkly
224
275
  do_shutdown(flush_workers, diagnostic_event_workers)
225
276
  running = false
226
277
  message.completed
278
+ else
279
+ dispatch_event(message, outbox)
227
280
  end
228
281
  rescue => e
229
282
  Util.log_exception(@config.logger, "Unexpected error in event processor", e)
@@ -234,7 +287,7 @@ module LaunchDarkly
234
287
  def do_shutdown(flush_workers, diagnostic_event_workers)
235
288
  flush_workers.shutdown
236
289
  flush_workers.wait_for_termination
237
- if !diagnostic_event_workers.nil?
290
+ unless diagnostic_event_workers.nil?
238
291
  diagnostic_event_workers.shutdown
239
292
  diagnostic_event_workers.wait_for_termination
240
293
  end
@@ -244,58 +297,52 @@ module LaunchDarkly
244
297
  def synchronize_for_testing(flush_workers, diagnostic_event_workers)
245
298
  # Used only by unit tests. Wait until all active flush workers have finished.
246
299
  flush_workers.wait_all
247
- diagnostic_event_workers.wait_all if !diagnostic_event_workers.nil?
300
+ diagnostic_event_workers.wait_all unless diagnostic_event_workers.nil?
248
301
  end
249
302
 
250
303
  def dispatch_event(event, outbox)
251
304
  return if @disabled.value
252
305
 
253
306
  # Always record the event in the summary.
254
- outbox.add_to_summary(event)
307
+ outbox.add_to_summary(event) unless event.exclude_from_summaries
255
308
 
256
309
  # Decide whether to add the event to the payload. Feature events may be added twice, once for
257
310
  # the event (if tracked) and once for debugging.
258
311
  will_add_full_event = false
259
312
  debug_event = nil
260
- if event[:kind] == "feature"
261
- will_add_full_event = event[:trackEvents]
313
+ if event.is_a?(LaunchDarkly::Impl::EvalEvent)
314
+ will_add_full_event = event.track_events
262
315
  if should_debug_event(event)
263
- debug_event = event.clone
264
- debug_event[:debug] = true
316
+ debug_event = LaunchDarkly::Impl::DebugEvent.new(event)
265
317
  end
266
318
  else
267
319
  will_add_full_event = true
268
320
  end
269
321
 
270
- # For each user we haven't seen before, we add an index event - unless this is already
271
- # an identify event for that user.
272
- if !(will_add_full_event && @config.inline_users_in_events)
273
- if event.has_key?(:user) && !notice_user(event[:user]) && event[:kind] != "identify"
274
- outbox.add_event({
275
- kind: "index",
276
- creationDate: event[:creationDate],
277
- user: event[:user]
278
- })
279
- end
322
+ # For each context we haven't seen before, we add an index event - unless this is already
323
+ # an identify event for that context.
324
+ if !event.context.nil? && !notice_context(event.context) && !event.is_a?(LaunchDarkly::Impl::IdentifyEvent) && !event.is_a?(LaunchDarkly::Impl::MigrationOpEvent)
325
+ outbox.add_event(LaunchDarkly::Impl::IndexEvent.new(event.timestamp, event.context))
280
326
  end
281
327
 
282
- outbox.add_event(event) if will_add_full_event
283
- outbox.add_event(debug_event) if !debug_event.nil?
328
+ outbox.add_event(event) if will_add_full_event && @sampler.sample(event.sampling_ratio.nil? ? 1 : event.sampling_ratio)
329
+ outbox.add_event(debug_event) if !debug_event.nil? && @sampler.sample(event.sampling_ratio.nil? ? 1 : event.sampling_ratio)
284
330
  end
285
331
 
286
- # Add to the set of users we've noticed, and return true if the user was already known to us.
287
- def notice_user(user)
288
- if user.nil? || !user.has_key?(:key)
289
- true
290
- else
291
- known = @user_keys.add(user[:key].to_s)
292
- @deduplicated_users += 1 if known
293
- known
294
- end
332
+ #
333
+ # Add to the set of contexts we've noticed, and return true if the context
334
+ # was already known to us.
335
+ # @param context [LaunchDarkly::LDContext]
336
+ # @return [Boolean]
337
+ #
338
+ def notice_context(context)
339
+ known = @context_keys.add(context.fully_qualified_key)
340
+ @deduplicated_contexts += 1 if known
341
+ known
295
342
  end
296
343
 
297
344
  def should_debug_event(event)
298
- debug_until = event[:debugEventsUntilDate]
345
+ debug_until = event.debug_until
299
346
  if !debug_until.nil?
300
347
  last_past = @last_known_past_time.value
301
348
  debug_until > last_past && debug_until > Impl::Util.current_time_millis
@@ -309,7 +356,7 @@ module LaunchDarkly
309
356
  return
310
357
  end
311
358
 
312
- payload = outbox.get_payload
359
+ payload = outbox.get_payload
313
360
  if !payload.events.empty? || !payload.summary.counters.empty?
314
361
  count = payload.events.length + (payload.summary.counters.empty? ? 0 : 1)
315
362
  @events_in_last_batch = count
@@ -319,7 +366,7 @@ module LaunchDarkly
319
366
  events_out = @formatter.make_output_events(payload.events, payload.summary)
320
367
  result = @event_sender.send_event_data(events_out.to_json, "#{events_out.length} events", false)
321
368
  @disabled.value = true if result.must_shutdown
322
- if !result.time_from_server.nil?
369
+ unless result.time_from_server.nil?
323
370
  @last_known_past_time.value = (result.time_from_server.to_f * 1000).to_i
324
371
  end
325
372
  rescue => e
@@ -335,8 +382,8 @@ module LaunchDarkly
335
382
  def send_and_reset_diagnostics(outbox, diagnostic_event_workers)
336
383
  return if @diagnostic_accumulator.nil?
337
384
  dropped_count = outbox.get_and_clear_dropped_count
338
- event = @diagnostic_accumulator.create_periodic_event_and_reset(dropped_count, @deduplicated_users, @events_in_last_batch)
339
- @deduplicated_users = 0
385
+ event = @diagnostic_accumulator.create_periodic_event_and_reset(dropped_count, @deduplicated_contexts, @events_in_last_batch)
386
+ @deduplicated_contexts = 0
340
387
  @events_in_last_batch = 0
341
388
  send_diagnostic_event(event, diagnostic_event_workers)
342
389
  end
@@ -365,17 +412,16 @@ module LaunchDarkly
365
412
  @capacity_exceeded = false
366
413
  @dropped_events = 0
367
414
  @events = []
368
- @summarizer = EventSummarizer.new
415
+ @summarizer = LaunchDarkly::Impl::EventSummarizer.new
369
416
  end
370
417
 
371
418
  def add_event(event)
372
419
  if @events.length < @capacity
373
- @logger.debug { "[LDClient] Enqueueing event: #{event.to_json}" }
374
420
  @events.push(event)
375
421
  @capacity_exceeded = false
376
422
  else
377
423
  @dropped_events += 1
378
- if !@capacity_exceeded
424
+ unless @capacity_exceeded
379
425
  @capacity_exceeded = true
380
426
  @logger.warn { "[LDClient] Exceeded event queue capacity. Increase capacity to avoid dropping events." }
381
427
  end
@@ -387,7 +433,7 @@ module LaunchDarkly
387
433
  end
388
434
 
389
435
  def get_payload
390
- return FlushPayload.new(@events, @summarizer.snapshot)
436
+ FlushPayload.new(@events, @summarizer.snapshot)
391
437
  end
392
438
 
393
439
  def get_and_clear_dropped_count
@@ -404,113 +450,177 @@ module LaunchDarkly
404
450
 
405
451
  # @private
406
452
  class EventOutputFormatter
453
+ FEATURE_KIND = 'feature'
454
+ IDENTIFY_KIND = 'identify'
455
+ CUSTOM_KIND = 'custom'
456
+ INDEX_KIND = 'index'
457
+ DEBUG_KIND = 'debug'
458
+ MIGRATION_OP_KIND = 'migration_op'
459
+ SUMMARY_KIND = 'summary'
460
+
407
461
  def initialize(config)
408
- @inline_users = config.inline_users_in_events
409
- @user_filter = UserFilter.new(config)
462
+ @context_filter = LaunchDarkly::Impl::ContextFilter.new(config.all_attributes_private, config.private_attributes)
410
463
  end
411
464
 
412
465
  # Transforms events into the format used for event sending.
413
466
  def make_output_events(events, summary)
414
467
  events_out = events.map { |e| make_output_event(e) }
415
- if !summary.counters.empty?
468
+ unless summary.counters.empty?
416
469
  events_out.push(make_summary_event(summary))
417
470
  end
418
471
  events_out
419
472
  end
420
473
 
421
- private
474
+ private def make_output_event(event)
475
+ case event
422
476
 
423
- def process_user(event)
424
- filtered = @user_filter.transform_user_props(event[:user])
425
- Util.stringify_attrs(filtered, USER_ATTRS_TO_STRINGIFY_FOR_EVENTS)
426
- end
477
+ when LaunchDarkly::Impl::EvalEvent
478
+ out = {
479
+ kind: FEATURE_KIND,
480
+ creationDate: event.timestamp,
481
+ key: event.key,
482
+ value: event.value,
483
+ }
484
+ out[:default] = event.default unless event.default.nil?
485
+ out[:variation] = event.variation unless event.variation.nil?
486
+ out[:version] = event.version unless event.version.nil?
487
+ out[:prereqOf] = event.prereq_of unless event.prereq_of.nil?
488
+ out[:contextKeys] = event.context.keys
489
+ out[:reason] = event.reason unless event.reason.nil?
490
+ out
427
491
 
428
- def make_output_event(event)
429
- case event[:kind]
430
- when "feature"
431
- is_debug = event[:debug]
492
+ when LaunchDarkly::Impl::MigrationOpEvent
432
493
  out = {
433
- kind: is_debug ? "debug" : "feature",
434
- creationDate: event[:creationDate],
435
- key: event[:key],
436
- value: event[:value]
494
+ kind: MIGRATION_OP_KIND,
495
+ creationDate: event.timestamp,
496
+ contextKeys: event.context.keys,
497
+ operation: event.operation.to_s,
498
+ evaluation: {
499
+ key: event.key,
500
+ value: event.evaluation.value,
501
+ },
437
502
  }
438
- out[:default] = event[:default] if event.has_key?(:default)
439
- out[:variation] = event[:variation] if event.has_key?(:variation)
440
- out[:version] = event[:version] if event.has_key?(:version)
441
- out[:prereqOf] = event[:prereqOf] if event.has_key?(:prereqOf)
442
- out[:contextKind] = event[:contextKind] if event.has_key?(:contextKind)
443
- if @inline_users || is_debug
444
- out[:user] = process_user(event)
445
- else
446
- out[:userKey] = event[:user][:key]
503
+
504
+ out[:evaluation][:version] = event.version unless event.version.nil?
505
+ out[:evaluation][:default] = event.default unless event.default.nil?
506
+ out[:evaluation][:variation] = event.evaluation.variation_index unless event.evaluation.variation_index.nil?
507
+ out[:evaluation][:reason] = event.evaluation.reason unless event.evaluation.reason.nil?
508
+ out[:samplingRatio] = event.sampling_ratio unless event.sampling_ratio.nil? || event.sampling_ratio == 1
509
+
510
+ measurements = []
511
+
512
+ unless event.invoked.empty?
513
+ measurements << {
514
+ "key": "invoked",
515
+ "values": event.invoked.map { |origin| [origin, true] }.to_h,
516
+ }
517
+ end
518
+
519
+ unless event.consistency_check.nil?
520
+ measurement = {
521
+ "key": "consistent",
522
+ "value": event.consistency_check,
523
+ }
524
+
525
+ unless event.consistency_check_ratio.nil? || event.consistency_check_ratio == 1
526
+ measurement[:samplingRatio] = event.consistency_check_ratio
527
+ end
528
+
529
+ measurements << measurement
530
+ end
531
+
532
+
533
+ unless event.latencies.empty?
534
+ measurements << {
535
+ "key": "latency_ms",
536
+ "values": event.latencies,
537
+ }
447
538
  end
448
- out[:reason] = event[:reason] if !event[:reason].nil?
539
+
540
+ unless event.errors.empty?
541
+ measurements << {
542
+ "key": "error",
543
+ "values": event.errors.map { |origin| [origin, true] }.to_h,
544
+ }
545
+ end
546
+ out[:measurements] = measurements unless measurements.empty?
547
+
449
548
  out
450
- when "identify"
549
+
550
+ when LaunchDarkly::Impl::IdentifyEvent
451
551
  {
452
- kind: "identify",
453
- creationDate: event[:creationDate],
454
- key: event[:user][:key].to_s,
455
- user: process_user(event)
552
+ kind: IDENTIFY_KIND,
553
+ creationDate: event.timestamp,
554
+ key: event.context.fully_qualified_key,
555
+ context: @context_filter.filter(event.context),
456
556
  }
457
- when "custom"
557
+
558
+ when LaunchDarkly::Impl::CustomEvent
458
559
  out = {
459
- kind: "custom",
460
- creationDate: event[:creationDate],
461
- key: event[:key]
560
+ kind: CUSTOM_KIND,
561
+ creationDate: event.timestamp,
562
+ key: event.key,
462
563
  }
463
- out[:data] = event[:data] if event.has_key?(:data)
464
- if @inline_users
465
- out[:user] = process_user(event)
466
- else
467
- out[:userKey] = event[:user][:key]
468
- end
469
- out[:metricValue] = event[:metricValue] if event.has_key?(:metricValue)
470
- out[:contextKind] = event[:contextKind] if event.has_key?(:contextKind)
564
+ out[:data] = event.data unless event.data.nil?
565
+ out[:contextKeys] = event.context.keys
566
+ out[:metricValue] = event.metric_value unless event.metric_value.nil?
471
567
  out
472
- when "index"
568
+
569
+ when LaunchDarkly::Impl::IndexEvent
473
570
  {
474
- kind: "index",
475
- creationDate: event[:creationDate],
476
- user: process_user(event)
571
+ kind: INDEX_KIND,
572
+ creationDate: event.timestamp,
573
+ context: @context_filter.filter(event.context),
477
574
  }
575
+
576
+ when LaunchDarkly::Impl::DebugEvent
577
+ original = event.eval_event
578
+ out = {
579
+ kind: DEBUG_KIND,
580
+ creationDate: original.timestamp,
581
+ key: original.key,
582
+ context: @context_filter.filter(original.context),
583
+ value: original.value,
584
+ }
585
+ out[:default] = original.default unless original.default.nil?
586
+ out[:variation] = original.variation unless original.variation.nil?
587
+ out[:version] = original.version unless original.version.nil?
588
+ out[:prereqOf] = original.prereq_of unless original.prereq_of.nil?
589
+ out[:reason] = original.reason unless original.reason.nil?
590
+ out
591
+
478
592
  else
479
- event
593
+ nil
480
594
  end
481
595
  end
482
596
 
483
597
  # Transforms the summary data into the format used for event sending.
484
- def make_summary_event(summary)
598
+ private def make_summary_event(summary)
485
599
  flags = {}
486
- summary[:counters].each { |ckey, cval|
487
- flag = flags[ckey[:key]]
488
- if flag.nil?
489
- flag = {
490
- default: cval[:default],
491
- counters: []
492
- }
493
- flags[ckey[:key]] = flag
494
- end
495
- c = {
496
- value: cval[:value],
497
- count: cval[:count]
498
- }
499
- if !ckey[:variation].nil?
500
- c[:variation] = ckey[:variation]
501
- end
502
- if ckey[:version].nil?
503
- c[:unknown] = true
504
- else
505
- c[:version] = ckey[:version]
600
+ summary.counters.each do |flagKey, flagInfo|
601
+ counters = []
602
+ flagInfo.versions.each do |version, variations|
603
+ variations.each do |variation, counter|
604
+ c = {
605
+ value: counter.value,
606
+ count: counter.count,
607
+ }
608
+ c[:variation] = variation unless variation.nil?
609
+ if version.nil?
610
+ c[:unknown] = true
611
+ else
612
+ c[:version] = version
613
+ end
614
+ counters.push(c)
615
+ end
506
616
  end
507
- flag[:counters].push(c)
508
- }
617
+ flags[flagKey] = { default: flagInfo.default, counters: counters, contextKinds: flagInfo.context_kinds.to_a }
618
+ end
509
619
  {
510
- kind: "summary",
620
+ kind: SUMMARY_KIND,
511
621
  startDate: summary[:start_date],
512
622
  endDate: summary[:end_date],
513
- features: flags
623
+ features: flags,
514
624
  }
515
625
  end
516
626
  end