launchdarkly-server-sdk 6.2.5 → 6.3.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 8f1b216e5ae2518b79aae9613ecd04b02ed525ea2d25084df805eae7d92cb219
4
- data.tar.gz: 317db7e14a292968b245e39e3cd4be33206fd8085c8ebd04be43f1c987434228
3
+ metadata.gz: 70c2bfa2af852ab863bdd25cceedb4ac6eca57bfb4d6ccd9df70653b73a862e0
4
+ data.tar.gz: 38d1bef24f2ce6cced2342264e0b70dde44ee79dcdfb4199ff90b9b4319303eb
5
5
  SHA512:
6
- metadata.gz: 5d9182ccde6530f67df30fd8a238e9b510e89dc235cf76c45140cb6eaa36b8e342a44a98595b8c86f171c8f6f1cc74291f3c2ebf11ac274f2a1e46d64798318c
7
- data.tar.gz: 4b54fc711795c06e3bdabb9dd593af8499aa8e8c28eec1f9db508a5a6c5288cd0268b7ce9d324c892425ffcf080f0ad21e78060a74e0186f19998bd40f076943
6
+ metadata.gz: f44affa6d4f7beb1c21be04f91caa39acd708a44fc642768c5c42419e8a2295f59592531c1e07f5ed8e407bbb1a97817b42c6be1e54ef2a80e20b1e014b57836
7
+ data.tar.gz: fe229a2d10add2afbc5fe539ca494fe8e73ae9b586571436e9bae9a3275721735ac755b35b40b6e432b69fec91803191bfe2c3bff1b876f30f3ef9de950af3eb
data/README.md CHANGED
@@ -4,7 +4,6 @@ LaunchDarkly Server-side SDK for Ruby
4
4
  [![Gem Version](https://badge.fury.io/rb/launchdarkly-server-sdk.svg)](http://badge.fury.io/rb/launchdarkly-server-sdk)
5
5
 
6
6
  [![Circle CI](https://circleci.com/gh/launchdarkly/ruby-server-sdk/tree/master.svg?style=svg)](https://circleci.com/gh/launchdarkly/ruby-server-sdk/tree/master)
7
- [![Security](https://hakiri.io/github/launchdarkly/ruby-server-sdk/master.svg)](https://hakiri.io/github/launchdarkly/ruby-server-sdk/master)
8
7
  [![RubyDoc](https://img.shields.io/static/v1?label=docs+-+all+versions&message=reference&color=00add8)](https://www.rubydoc.info/gems/launchdarkly-server-sdk)
9
8
  [![GitHub Pages](https://img.shields.io/static/v1?label=docs+-+latest&message=reference&color=00add8)](https://launchdarkly.github.io/ruby-server-sdk)
10
9
 
@@ -21,6 +21,7 @@ module LaunchDarkly
21
21
  # @option opts [Integer] :capacity (10000) See {#capacity}.
22
22
  # @option opts [Float] :flush_interval (30) See {#flush_interval}.
23
23
  # @option opts [Float] :read_timeout (10) See {#read_timeout}.
24
+ # @option opts [Float] :initial_reconnect_delay (1) See {#initial_reconnect_delay}.
24
25
  # @option opts [Float] :connect_timeout (2) See {#connect_timeout}.
25
26
  # @option opts [Object] :cache_store See {#cache_store}.
26
27
  # @option opts [Object] :feature_store See {#feature_store}.
@@ -42,6 +43,7 @@ module LaunchDarkly
42
43
  # @option opts [String] :wrapper_name See {#wrapper_name}.
43
44
  # @option opts [String] :wrapper_version See {#wrapper_version}.
44
45
  # @option opts [#open] :socket_factory See {#socket_factory}.
46
+ # @option opts [BigSegmentsConfig] :big_segments See {#big_segments}.
45
47
  #
46
48
  def initialize(opts = {})
47
49
  @base_uri = (opts[:base_uri] || Config.default_base_uri).chomp("/")
@@ -53,6 +55,7 @@ module LaunchDarkly
53
55
  @flush_interval = opts[:flush_interval] || Config.default_flush_interval
54
56
  @connect_timeout = opts[:connect_timeout] || Config.default_connect_timeout
55
57
  @read_timeout = opts[:read_timeout] || Config.default_read_timeout
58
+ @initial_reconnect_delay = opts[:initial_reconnect_delay] || Config.default_initial_reconnect_delay
56
59
  @feature_store = opts[:feature_store] || Config.default_feature_store
57
60
  @stream = opts.has_key?(:stream) ? opts[:stream] : Config.default_stream
58
61
  @use_ldd = opts.has_key?(:use_ldd) ? opts[:use_ldd] : Config.default_use_ldd
@@ -73,6 +76,7 @@ module LaunchDarkly
73
76
  @wrapper_name = opts[:wrapper_name]
74
77
  @wrapper_version = opts[:wrapper_version]
75
78
  @socket_factory = opts[:socket_factory]
79
+ @big_segments = opts[:big_segments] || BigSegmentsConfig.new(store: nil)
76
80
  end
77
81
 
78
82
  #
@@ -178,6 +182,13 @@ module LaunchDarkly
178
182
  #
179
183
  attr_reader :read_timeout
180
184
 
185
+ #
186
+ # The initial delay before reconnecting after an error in the SSE client.
187
+ # This only applies to the streaming connection.
188
+ # @return [Float]
189
+ #
190
+ attr_reader :initial_reconnect_delay
191
+
181
192
  #
182
193
  # The connect timeout for network connections in seconds.
183
194
  # @return [Float]
@@ -258,10 +269,21 @@ module LaunchDarkly
258
269
  # object.
259
270
  #
260
271
  # @return [LaunchDarkly::Interfaces::DataSource|lambda]
261
- # @see FileDataSource
272
+ # @see LaunchDarkly::Integrations::FileData
273
+ # @see LaunchDarkly::Integrations::TestData
262
274
  #
263
275
  attr_reader :data_source
264
276
 
277
+ #
278
+ # Configuration options related to Big Segments.
279
+ #
280
+ # Big Segments are a specific type of user segments. For more information, read the LaunchDarkly
281
+ # documentation: https://docs.launchdarkly.com/home/users/big-segments
282
+ #
283
+ # @return [BigSegmentsConfig]
284
+ #
285
+ attr_reader :big_segments
286
+
265
287
  # @deprecated This is replaced by {#data_source}.
266
288
  attr_reader :update_processor
267
289
 
@@ -382,6 +404,14 @@ module LaunchDarkly
382
404
  10
383
405
  end
384
406
 
407
+ #
408
+ # The default value for {#initial_reconnect_delay}.
409
+ # @return [Float] 1
410
+ #
411
+ def self.default_initial_reconnect_delay
412
+ 1
413
+ end
414
+
385
415
  #
386
416
  # The default value for {#connect_timeout}.
387
417
  # @return [Float] 10
@@ -484,4 +514,68 @@ module LaunchDarkly
484
514
  60
485
515
  end
486
516
  end
517
+
518
+ #
519
+ # Configuration options related to Big Segments.
520
+ #
521
+ # Big Segments are a specific type of user segments. For more information, read the LaunchDarkly
522
+ # documentation: https://docs.launchdarkly.com/home/users/big-segments
523
+ #
524
+ # If your application uses Big Segments, you will need to create a `BigSegmentsConfig` that at a
525
+ # minimum specifies what database integration to use, and then pass the `BigSegmentsConfig`
526
+ # object as the `big_segments` parameter when creating a {Config}.
527
+ #
528
+ # @example Configuring Big Segments with Redis
529
+ # store = LaunchDarkly::Integrations::Redis::new_big_segments_store(redis_url: "redis://my-server")
530
+ # config = LaunchDarkly::Config.new(big_segments:
531
+ # LaunchDarkly::BigSegmentsConfig.new(store: store))
532
+ # client = LaunchDarkly::LDClient.new(my_sdk_key, config)
533
+ #
534
+ class BigSegmentsConfig
535
+ DEFAULT_USER_CACHE_SIZE = 1000
536
+ DEFAULT_USER_CACHE_TIME = 5
537
+ DEFAULT_STATUS_POLL_INTERVAL = 5
538
+ DEFAULT_STALE_AFTER = 2 * 60
539
+
540
+ #
541
+ # Constructor for setting Big Segments options.
542
+ #
543
+ # @param store [LaunchDarkly::Interfaces::BigSegmentStore] the data store implementation
544
+ # @param user_cache_size [Integer] See {#user_cache_size}.
545
+ # @param user_cache_time [Float] See {#user_cache_time}.
546
+ # @param status_poll_interval [Float] See {#status_poll_interval}.
547
+ # @param stale_after [Float] See {#stale_after}.
548
+ #
549
+ def initialize(store:, user_cache_size: nil, user_cache_time: nil, status_poll_interval: nil, stale_after: nil)
550
+ @store = store
551
+ @user_cache_size = user_cache_size.nil? ? DEFAULT_USER_CACHE_SIZE : user_cache_size
552
+ @user_cache_time = user_cache_time.nil? ? DEFAULT_USER_CACHE_TIME : user_cache_time
553
+ @status_poll_interval = status_poll_interval.nil? ? DEFAULT_STATUS_POLL_INTERVAL : status_poll_interval
554
+ @stale_after = stale_after.nil? ? DEFAULT_STALE_AFTER : stale_after
555
+ end
556
+
557
+ # The implementation of {LaunchDarkly::Interfaces::BigSegmentStore} that will be used to
558
+ # query the Big Segments database.
559
+ # @return [LaunchDarkly::Interfaces::BigSegmentStore]
560
+ attr_reader :store
561
+
562
+ # The maximum number of users whose Big Segment state will be cached by the SDK at any given time.
563
+ # @return [Integer]
564
+ attr_reader :user_cache_size
565
+
566
+ # The maximum length of time (in seconds) that the Big Segment state for a user will be cached
567
+ # by the SDK.
568
+ # @return [Float]
569
+ attr_reader :user_cache_time
570
+
571
+ # The interval (in seconds) at which the SDK will poll the Big Segment store to make sure it is
572
+ # available and to determine how long ago it was updated.
573
+ # @return [Float]
574
+ attr_reader :status_poll_interval
575
+
576
+ # The maximum length of time between updates of the Big Segments data before the data is
577
+ # considered out of date.
578
+ # @return [Float]
579
+ attr_reader :stale_after
580
+ end
487
581
  end
@@ -110,27 +110,42 @@ module LaunchDarkly
110
110
 
111
111
  # Indicates the general category of the reason. Will always be one of the class constants such
112
112
  # as {#OFF}.
113
+ # @return [Symbol]
113
114
  attr_reader :kind
114
115
 
115
116
  # The index of the rule that was matched (0 for the first rule in the feature flag). If
116
117
  # {#kind} is not {#RULE_MATCH}, this will be `nil`.
118
+ # @return [Integer|nil]
117
119
  attr_reader :rule_index
118
120
 
119
121
  # A unique string identifier for the matched rule, which will not change if other rules are added
120
122
  # or deleted. If {#kind} is not {#RULE_MATCH}, this will be `nil`.
123
+ # @return [String]
121
124
  attr_reader :rule_id
122
125
 
123
126
  # A boolean or nil value representing if the rule or fallthrough has an experiment rollout.
127
+ # @return [Boolean|nil]
124
128
  attr_reader :in_experiment
125
129
 
126
130
  # The key of the prerequisite flag that did not return the desired variation. If {#kind} is not
127
131
  # {#PREREQUISITE_FAILED}, this will be `nil`.
132
+ # @return [String]
128
133
  attr_reader :prerequisite_key
129
134
 
130
135
  # A value indicating the general category of error. This should be one of the class constants such
131
136
  # as {#ERROR_FLAG_NOT_FOUND}. If {#kind} is not {#ERROR}, it will be `nil`.
137
+ # @return [Symbol]
132
138
  attr_reader :error_kind
133
139
 
140
+ # Describes the validity of Big Segment information, if and only if the flag evaluation required
141
+ # querying at least one Big Segment. Otherwise it returns `nil`. Possible values are defined by
142
+ # {BigSegmentsStatus}.
143
+ #
144
+ # Big Segments are a specific kind of user segments. For more information, read the LaunchDarkly
145
+ # documentation: https://docs.launchdarkly.com/home/users/big-segments
146
+ # @return [Symbol]
147
+ attr_reader :big_segments_status
148
+
134
149
  # Returns an instance whose {#kind} is {#OFF}.
135
150
  # @return [EvaluationReason]
136
151
  def self.off
@@ -196,11 +211,13 @@ module LaunchDarkly
196
211
  def ==(other)
197
212
  if other.is_a? EvaluationReason
198
213
  @kind == other.kind && @rule_index == other.rule_index && @rule_id == other.rule_id &&
199
- @prerequisite_key == other.prerequisite_key && @error_kind == other.error_kind
214
+ @prerequisite_key == other.prerequisite_key && @error_kind == other.error_kind &&
215
+ @big_segments_status == other.big_segments_status
200
216
  elsif other.is_a? Hash
201
217
  @kind.to_s == other[:kind] && @rule_index == other[:ruleIndex] && @rule_id == other[:ruleId] &&
202
218
  @prerequisite_key == other[:prerequisiteKey] &&
203
- (other[:errorKind] == @error_kind.nil? ? nil : @error_kind.to_s)
219
+ (other[:errorKind] == @error_kind.nil? ? nil : @error_kind.to_s) &&
220
+ (other[:bigSegmentsStatus] == @big_segments_status.nil? ? nil : @big_segments_status.to_s)
204
221
  end
205
222
  end
206
223
 
@@ -242,7 +259,7 @@ module LaunchDarkly
242
259
  # enabled for a flag and the application called variation_detail, or 2. experimentation is
243
260
  # enabled for an evaluation. We can't reuse these hashes because an application could call
244
261
  # as_json and then modify the result.
245
- case @kind
262
+ ret = case @kind
246
263
  when :RULE_MATCH
247
264
  if @in_experiment
248
265
  { kind: @kind, ruleIndex: @rule_index, ruleId: @rule_id, inExperiment: @in_experiment }
@@ -262,6 +279,10 @@ module LaunchDarkly
262
279
  else
263
280
  { kind: @kind }
264
281
  end
282
+ if !@big_segments_status.nil?
283
+ ret[:bigSegmentsStatus] = @big_segments_status
284
+ end
285
+ ret
265
286
  end
266
287
 
267
288
  # Same as {#as_json}, but converts the JSON structure into a string.
@@ -285,14 +306,24 @@ module LaunchDarkly
285
306
  @prerequisite_key
286
307
  when :errorKind
287
308
  @error_kind.nil? ? nil : @error_kind.to_s
309
+ when :bigSegmentsStatus
310
+ @big_segments_status.nil? ? nil : @big_segments_status.to_s
288
311
  else
289
312
  nil
290
313
  end
291
314
  end
292
315
 
293
- private
316
+ def with_big_segments_status(big_segments_status)
317
+ return self if @big_segments_status == big_segments_status
318
+ EvaluationReason.new(@kind, @rule_index, @rule_id, @prerequisite_key, @error_kind, @in_experiment, big_segments_status)
319
+ end
294
320
 
295
- def initialize(kind, rule_index, rule_id, prerequisite_key, error_kind, in_experiment=nil)
321
+ #
322
+ # Constructor that sets all properties. Applications should not normally use this constructor,
323
+ # but should use class methods like {#off} to avoid creating unnecessary instances.
324
+ #
325
+ def initialize(kind, rule_index, rule_id, prerequisite_key, error_kind, in_experiment=nil,
326
+ big_segments_status = nil)
296
327
  @kind = kind.to_sym
297
328
  @rule_index = rule_index
298
329
  @rule_id = rule_id
@@ -301,11 +332,10 @@ module LaunchDarkly
301
332
  @prerequisite_key.freeze if !prerequisite_key.nil?
302
333
  @error_kind = error_kind
303
334
  @in_experiment = in_experiment
335
+ @big_segments_status = big_segments_status
304
336
  end
305
337
 
306
- private_class_method :new
307
-
308
- def self.make_error(error_kind)
338
+ private_class_method def self.make_error(error_kind)
309
339
  new(:ERROR, nil, nil, nil, error_kind)
310
340
  end
311
341
 
@@ -321,4 +351,33 @@ module LaunchDarkly
321
351
  ERROR_EXCEPTION => make_error(ERROR_EXCEPTION)
322
352
  }
323
353
  end
354
+
355
+ #
356
+ # Defines the possible values of {EvaluationReason#big_segments_status}.
357
+ #
358
+ module BigSegmentsStatus
359
+ #
360
+ # Indicates that the Big Segment query involved in the flag evaluation was successful, and
361
+ # that the segment state is considered up to date.
362
+ #
363
+ HEALTHY = :HEALTHY
364
+
365
+ #
366
+ # Indicates that the Big Segment query involved in the flag evaluation was successful, but
367
+ # that the segment state may not be up to date.
368
+ #
369
+ STALE = :STALE
370
+
371
+ #
372
+ # Indicates that Big Segments could not be queried for the flag evaluation because the SDK
373
+ # configuration did not include a Big Segment store.
374
+ #
375
+ NOT_CONFIGURED = :NOT_CONFIGURED
376
+
377
+ #
378
+ # Indicates that the Big Segment query involved in the flag evaluation failed, for instance
379
+ # due to a database error.
380
+ #
381
+ STORE_ERROR = :STORE_ERROR
382
+ end
324
383
  end
@@ -1,314 +1,23 @@
1
- require 'concurrent/atomics'
2
- require 'json'
3
- require 'yaml'
4
- require 'pathname'
1
+ require "ldclient-rb/integrations/file_data"
5
2
 
6
3
  module LaunchDarkly
7
- # To avoid pulling in 'listen' and its transitive dependencies for people who aren't using the
8
- # file data source or who don't need auto-updating, we only enable auto-update if the 'listen'
9
- # gem has been provided by the host app.
10
- # @private
11
- @@have_listen = false
12
- begin
13
- require 'listen'
14
- @@have_listen = true
15
- rescue LoadError
16
- end
17
-
18
- # @private
19
- def self.have_listen?
20
- @@have_listen
21
- end
22
-
23
- #
24
- # Provides a way to use local files as a source of feature flag state. This allows using a
25
- # predetermined feature flag state without an actual LaunchDarkly connection.
26
- #
27
- # Reading flags from a file is only intended for pre-production environments. Production
28
- # environments should always be configured to receive flag updates from LaunchDarkly.
29
- #
30
- # To use this component, call {FileDataSource#factory}, and store its return value in the
31
- # {Config#data_source} property of your LaunchDarkly client configuration. In the options
32
- # to `factory`, set `paths` to the file path(s) of your data file(s):
33
- #
34
- # file_source = FileDataSource.factory(paths: [ myFilePath ])
35
- # config = LaunchDarkly::Config.new(data_source: file_source)
36
- #
37
- # This will cause the client not to connect to LaunchDarkly to get feature flags. The
38
- # client may still make network connections to send analytics events, unless you have disabled
39
- # this with {Config#send_events} or {Config#offline?}.
40
- #
41
- # Flag data files can be either JSON or YAML. They contain an object with three possible
42
- # properties:
43
- #
44
- # - `flags`: Feature flag definitions.
45
- # - `flagValues`: Simplified feature flags that contain only a value.
46
- # - `segments`: User segment definitions.
47
- #
48
- # The format of the data in `flags` and `segments` is defined by the LaunchDarkly application
49
- # and is subject to change. Rather than trying to construct these objects yourself, it is simpler
50
- # to request existing flags directly from the LaunchDarkly server in JSON format, and use this
51
- # output as the starting point for your file. In Linux you would do this:
52
- #
53
- # ```
54
- # curl -H "Authorization: YOUR_SDK_KEY" https://sdk.launchdarkly.com/sdk/latest-all
55
- # ```
56
4
  #
57
- # The output will look something like this (but with many more properties):
5
+ # Deprecated entry point for the file data source feature.
58
6
  #
59
- # {
60
- # "flags": {
61
- # "flag-key-1": {
62
- # "key": "flag-key-1",
63
- # "on": true,
64
- # "variations": [ "a", "b" ]
65
- # }
66
- # },
67
- # "segments": {
68
- # "segment-key-1": {
69
- # "key": "segment-key-1",
70
- # "includes": [ "user-key-1" ]
71
- # }
72
- # }
73
- # }
7
+ # The new preferred usage is {LaunchDarkly::Integrations::FileData#data_source}.
74
8
  #
75
- # Data in this format allows the SDK to exactly duplicate all the kinds of flag behavior supported
76
- # by LaunchDarkly. However, in many cases you will not need this complexity, but will just want to
77
- # set specific flag keys to specific values. For that, you can use a much simpler format:
78
- #
79
- # {
80
- # "flagValues": {
81
- # "my-string-flag-key": "value-1",
82
- # "my-boolean-flag-key": true,
83
- # "my-integer-flag-key": 3
84
- # }
85
- # }
86
- #
87
- # Or, in YAML:
88
- #
89
- # flagValues:
90
- # my-string-flag-key: "value-1"
91
- # my-boolean-flag-key: true
92
- # my-integer-flag-key: 1
93
- #
94
- # It is also possible to specify both "flags" and "flagValues", if you want some flags
95
- # to have simple values and others to have complex behavior. However, it is an error to use the
96
- # same flag key or segment key more than once, either in a single file or across multiple files.
97
- #
98
- # If the data source encounters any error in any file-- malformed content, a missing file, or a
99
- # duplicate key-- it will not load flags from any of the files.
9
+ # @deprecated This is replaced by {LaunchDarkly::Integrations::FileData}.
100
10
  #
101
11
  class FileDataSource
102
12
  #
103
- # Returns a factory for the file data source component.
104
- #
105
- # @param options [Hash] the configuration options
106
- # @option options [Array] :paths The paths of the source files for loading flag data. These
107
- # may be absolute paths or relative to the current working directory.
108
- # @option options [Boolean] :auto_update True if the data source should watch for changes to
109
- # the source file(s) and reload flags whenever there is a change. Auto-updating will only
110
- # work if all of the files you specified have valid directory paths at startup time.
111
- # Note that the default implementation of this feature is based on polling the filesystem,
112
- # which may not perform well. If you install the 'listen' gem (not included by default, to
113
- # avoid adding unwanted dependencies to the SDK), its native file watching mechanism will be
114
- # used instead. However, 'listen' will not be used in JRuby 9.1 due to a known instability.
115
- # @option options [Float] :poll_interval The minimum interval, in seconds, between checks for
116
- # file modifications - used only if auto_update is true, and if the native file-watching
117
- # mechanism from 'listen' is not being used. The default value is 1 second.
118
- # @return an object that can be stored in {Config#data_source}
13
+ # Deprecated entry point for the file data source feature.
119
14
  #
120
- def self.factory(options={})
121
- return lambda { |sdk_key, config| FileDataSourceImpl.new(config.feature_store, config.logger, options) }
122
- end
123
- end
124
-
125
- # @private
126
- class FileDataSourceImpl
127
- def initialize(feature_store, logger, options={})
128
- @feature_store = feature_store
129
- @logger = logger
130
- @paths = options[:paths] || []
131
- if @paths.is_a? String
132
- @paths = [ @paths ]
133
- end
134
- @auto_update = options[:auto_update]
135
- if @auto_update && LaunchDarkly.have_listen? && !options[:force_polling] # force_polling is used only for tests
136
- # We have seen unreliable behavior in the 'listen' gem in JRuby 9.1 (https://github.com/guard/listen/issues/449).
137
- # Therefore, on that platform we'll fall back to file polling instead.
138
- if defined?(JRUBY_VERSION) && JRUBY_VERSION.start_with?("9.1.")
139
- @use_listen = false
140
- else
141
- @use_listen = true
142
- end
143
- end
144
- @poll_interval = options[:poll_interval] || 1
145
- @initialized = Concurrent::AtomicBoolean.new(false)
146
- @ready = Concurrent::Event.new
147
- end
148
-
149
- def initialized?
150
- @initialized.value
151
- end
152
-
153
- def start
154
- ready = Concurrent::Event.new
155
-
156
- # We will return immediately regardless of whether the file load succeeded or failed -
157
- # the difference can be detected by checking "initialized?"
158
- ready.set
159
-
160
- load_all
161
-
162
- if @auto_update
163
- # If we're going to watch files, then the start event will be set the first time we get
164
- # a successful load.
165
- @listener = start_listener
166
- end
167
-
168
- ready
169
- end
170
-
171
- def stop
172
- @listener.stop if !@listener.nil?
173
- end
174
-
175
- private
176
-
177
- def load_all
178
- all_data = {
179
- FEATURES => {},
180
- SEGMENTS => {}
181
- }
182
- @paths.each do |path|
183
- begin
184
- load_file(path, all_data)
185
- rescue => exn
186
- Util.log_exception(@logger, "Unable to load flag data from \"#{path}\"", exn)
187
- return
188
- end
189
- end
190
- @feature_store.init(all_data)
191
- @initialized.make_true
192
- end
193
-
194
- def load_file(path, all_data)
195
- parsed = parse_content(IO.read(path))
196
- (parsed[:flags] || {}).each do |key, flag|
197
- add_item(all_data, FEATURES, flag)
198
- end
199
- (parsed[:flagValues] || {}).each do |key, value|
200
- add_item(all_data, FEATURES, make_flag_with_value(key.to_s, value))
201
- end
202
- (parsed[:segments] || {}).each do |key, segment|
203
- add_item(all_data, SEGMENTS, segment)
204
- end
205
- end
206
-
207
- def parse_content(content)
208
- # We can use the Ruby YAML parser for both YAML and JSON (JSON is a subset of YAML and while
209
- # not all YAML parsers handle it correctly, we have verified that the Ruby one does, at least
210
- # for all the samples of actual flag data that we've tested).
211
- symbolize_all_keys(YAML.safe_load(content))
212
- end
213
-
214
- def symbolize_all_keys(value)
215
- # This is necessary because YAML.load doesn't have an option for parsing keys as symbols, and
216
- # the SDK expects all objects to be formatted that way.
217
- if value.is_a?(Hash)
218
- value.map{ |k, v| [k.to_sym, symbolize_all_keys(v)] }.to_h
219
- elsif value.is_a?(Array)
220
- value.map{ |v| symbolize_all_keys(v) }
221
- else
222
- value
223
- end
224
- end
225
-
226
- def add_item(all_data, kind, item)
227
- items = all_data[kind]
228
- raise ArgumentError, "Received unknown item kind #{kind} in add_data" if items.nil? # shouldn't be possible since we preinitialize the hash
229
- key = item[:key].to_sym
230
- if !items[key].nil?
231
- raise ArgumentError, "#{kind[:namespace]} key \"#{item[:key]}\" was used more than once"
232
- end
233
- items[key] = item
234
- end
235
-
236
- def make_flag_with_value(key, value)
237
- {
238
- key: key,
239
- on: true,
240
- fallthrough: { variation: 0 },
241
- variations: [ value ]
242
- }
243
- end
244
-
245
- def start_listener
246
- resolved_paths = @paths.map { |p| Pathname.new(File.absolute_path(p)).realpath.to_s }
247
- if @use_listen
248
- start_listener_with_listen_gem(resolved_paths)
249
- else
250
- FileDataSourcePoller.new(resolved_paths, @poll_interval, self.method(:load_all), @logger)
251
- end
252
- end
253
-
254
- def start_listener_with_listen_gem(resolved_paths)
255
- path_set = resolved_paths.to_set
256
- dir_paths = resolved_paths.map{ |p| File.dirname(p) }.uniq
257
- opts = { latency: @poll_interval }
258
- l = Listen.to(*dir_paths, opts) do |modified, added, removed|
259
- paths = modified + added + removed
260
- if paths.any? { |p| path_set.include?(p) }
261
- load_all
262
- end
263
- end
264
- l.start
265
- l
266
- end
267
-
15
+ # The new preferred usage is {LaunchDarkly::Integrations::FileData#data_source}.
268
16
  #
269
- # Used internally by FileDataSource to track data file changes if the 'listen' gem is not available.
17
+ # @deprecated This is replaced by {LaunchDarkly::Integrations::FileData#data_source}.
270
18
  #
271
- class FileDataSourcePoller
272
- def initialize(resolved_paths, interval, reloader, logger)
273
- @stopped = Concurrent::AtomicBoolean.new(false)
274
- get_file_times = Proc.new do
275
- ret = {}
276
- resolved_paths.each do |path|
277
- begin
278
- ret[path] = File.mtime(path)
279
- rescue Errno::ENOENT
280
- ret[path] = nil
281
- end
282
- end
283
- ret
284
- end
285
- last_times = get_file_times.call
286
- @thread = Thread.new do
287
- while true
288
- sleep interval
289
- break if @stopped.value
290
- begin
291
- new_times = get_file_times.call
292
- changed = false
293
- last_times.each do |path, old_time|
294
- new_time = new_times[path]
295
- if !new_time.nil? && new_time != old_time
296
- changed = true
297
- break
298
- end
299
- end
300
- reloader.call if changed
301
- rescue => exn
302
- Util.log_exception(logger, "Unexpected exception in FileDataSourcePoller", exn)
303
- end
304
- end
305
- end
306
- end
307
-
308
- def stop
309
- @stopped.make_true
310
- @thread.run # wakes it up if it's sleeping
311
- end
19
+ def self.factory(options={})
20
+ LaunchDarkly::Integrations::FileData.data_source(options)
312
21
  end
313
22
  end
314
23
  end