launchdarkly-server-sdk 6.2.5 → 6.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,117 @@
1
+ require "ldclient-rb/config"
2
+ require "ldclient-rb/expiring_cache"
3
+ require "ldclient-rb/impl/repeating_task"
4
+ require "ldclient-rb/interfaces"
5
+ require "ldclient-rb/util"
6
+
7
+ require "digest"
8
+
9
+ module LaunchDarkly
10
+ module Impl
11
+ BigSegmentMembershipResult = Struct.new(:membership, :status)
12
+
13
+ class BigSegmentStoreManager
14
+ # use this as a singleton whenever a membership query returns nil; it's safe to reuse it because
15
+ # we will never modify the membership properties after they're queried
16
+ EMPTY_MEMBERSHIP = {}
17
+
18
+ def initialize(big_segments_config, logger)
19
+ @store = big_segments_config.store
20
+ @stale_after_millis = big_segments_config.stale_after * 1000
21
+ @status_provider = BigSegmentStoreStatusProviderImpl.new(-> { get_status })
22
+ @logger = logger
23
+ @last_status = nil
24
+
25
+ if !@store.nil?
26
+ @cache = ExpiringCache.new(big_segments_config.user_cache_size, big_segments_config.user_cache_time)
27
+ @poll_worker = RepeatingTask.new(big_segments_config.status_poll_interval, 0, -> { poll_store_and_update_status }, logger)
28
+ @poll_worker.start
29
+ end
30
+ end
31
+
32
+ attr_reader :status_provider
33
+
34
+ def stop
35
+ @poll_worker.stop if !@poll_worker.nil?
36
+ @store.stop if !@store.nil?
37
+ end
38
+
39
+ def get_user_membership(user_key)
40
+ return nil if !@store
41
+ membership = @cache[user_key]
42
+ if !membership
43
+ begin
44
+ membership = @store.get_membership(BigSegmentStoreManager.hash_for_user_key(user_key))
45
+ membership = EMPTY_MEMBERSHIP if membership.nil?
46
+ @cache[user_key] = membership
47
+ rescue => e
48
+ LaunchDarkly::Util.log_exception(@logger, "Big Segment store membership query returned error", e)
49
+ return BigSegmentMembershipResult.new(nil, BigSegmentsStatus::STORE_ERROR)
50
+ end
51
+ end
52
+ poll_store_and_update_status if !@last_status
53
+ if !@last_status.available
54
+ return BigSegmentMembershipResult.new(membership, BigSegmentsStatus::STORE_ERROR)
55
+ end
56
+ BigSegmentMembershipResult.new(membership, @last_status.stale ? BigSegmentsStatus::STALE : BigSegmentsStatus::HEALTHY)
57
+ end
58
+
59
+ def get_status
60
+ @last_status || poll_store_and_update_status
61
+ end
62
+
63
+ def poll_store_and_update_status
64
+ new_status = Interfaces::BigSegmentStoreStatus.new(false, false) # default to "unavailable" if we don't get a new status below
65
+ if !@store.nil?
66
+ begin
67
+ metadata = @store.get_metadata
68
+ new_status = Interfaces::BigSegmentStoreStatus.new(true, !metadata || is_stale(metadata.last_up_to_date))
69
+ rescue => e
70
+ LaunchDarkly::Util.log_exception(@logger, "Big Segment store status query returned error", e)
71
+ end
72
+ end
73
+ @last_status = new_status
74
+ @status_provider.update_status(new_status)
75
+
76
+ new_status
77
+ end
78
+
79
+ def is_stale(timestamp)
80
+ !timestamp || ((Impl::Util.current_time_millis - timestamp) >= @stale_after_millis)
81
+ end
82
+
83
+ def self.hash_for_user_key(user_key)
84
+ Digest::SHA256.base64digest(user_key)
85
+ end
86
+ end
87
+
88
+ #
89
+ # Default implementation of the BigSegmentStoreStatusProvider interface.
90
+ #
91
+ # There isn't much to this because the real implementation is in BigSegmentStoreManager - we pass in a lambda
92
+ # that allows us to get the current status from that class. Also, the standard Observer methods such as
93
+ # add_observer are provided for us because BigSegmentStoreStatusProvider mixes in Observer, so all we need to
94
+ # to do make notifications happen is to call the Observer methods "changed" and "notify_observers".
95
+ #
96
+ class BigSegmentStoreStatusProviderImpl
97
+ include LaunchDarkly::Interfaces::BigSegmentStoreStatusProvider
98
+
99
+ def initialize(status_fn)
100
+ @status_fn = status_fn
101
+ @last_status = nil
102
+ end
103
+
104
+ def status
105
+ @status_fn.call
106
+ end
107
+
108
+ def update_status(new_status)
109
+ if !@last_status || new_status != @last_status
110
+ @last_status = new_status
111
+ changed
112
+ notify_observers(new_status)
113
+ end
114
+ end
115
+ end
116
+ end
117
+ end
@@ -16,16 +16,28 @@ module LaunchDarkly
16
16
  # flag data - or nil if the flag is unknown or deleted
17
17
  # @param get_segment [Function] similar to `get_flag`, but is used to query a user segment.
18
18
  # @param logger [Logger] the client's logger
19
- def initialize(get_flag, get_segment, logger)
19
+ def initialize(get_flag, get_segment, get_big_segments_membership, logger)
20
20
  @get_flag = get_flag
21
21
  @get_segment = get_segment
22
+ @get_big_segments_membership = get_big_segments_membership
22
23
  @logger = logger
23
24
  end
24
25
 
25
- # Used internally to hold an evaluation result and the events that were generated from prerequisites. The
26
- # `detail` property is an EvaluationDetail. The `events` property can be either an array of feature request
27
- # events or nil.
28
- EvalResult = Struct.new(:detail, :events)
26
+ # Used internally to hold an evaluation result and additional state that may be accumulated during an
27
+ # evaluation. It's simpler and a bit more efficient to represent these as mutable properties rather than
28
+ # trying to use a pure functional approach, and since we're not exposing this object to any application code
29
+ # or retaining it anywhere, we don't have to be quite as strict about immutability.
30
+ #
31
+ # The big_segments_status and big_segments_membership properties are not used by the caller; they are used
32
+ # during an evaluation to cache the result of any Big Segments query that we've done for this user, because
33
+ # we don't want to do multiple queries for the same user if multiple Big Segments are referenced in the same
34
+ # evaluation.
35
+ EvalResult = Struct.new(
36
+ :detail, # the EvaluationDetail representing the evaluation result
37
+ :events, # an array of evaluation events generated by prerequisites, or nil
38
+ :big_segments_status,
39
+ :big_segments_membership
40
+ )
29
41
 
30
42
  # Helper function used internally to construct an EvaluationDetail for an error result.
31
43
  def self.error_result(errorKind, value = nil)
@@ -42,30 +54,38 @@ module LaunchDarkly
42
54
  # evaluated; the caller is responsible for constructing the feature event for the top-level evaluation
43
55
  # @return [EvalResult] the evaluation result
44
56
  def evaluate(flag, user, event_factory)
57
+ result = EvalResult.new
45
58
  if user.nil? || user[:key].nil?
46
- return EvalResult.new(Evaluator.error_result(EvaluationReason::ERROR_USER_NOT_SPECIFIED), [])
59
+ result.detail = Evaluator.error_result(EvaluationReason::ERROR_USER_NOT_SPECIFIED)
60
+ return result
47
61
  end
48
-
49
- # If the flag doesn't have any prerequisites (which most flags don't) then it cannot generate any feature
50
- # request events for prerequisites and we can skip allocating an array.
51
- if flag[:prerequisites] && !flag[:prerequisites].empty?
52
- events = []
53
- else
54
- events = nil
62
+
63
+ detail = eval_internal(flag, user, result, event_factory)
64
+ if !result.big_segments_status.nil?
65
+ # If big_segments_status is non-nil at the end of the evaluation, it means a query was done at
66
+ # some point and we will want to include the status in the evaluation reason.
67
+ detail = EvaluationDetail.new(detail.value, detail.variation_index,
68
+ detail.reason.with_big_segments_status(result.big_segments_status))
55
69
  end
70
+ result.detail = detail
71
+ return result
72
+ end
56
73
 
57
- detail = eval_internal(flag, user, events, event_factory)
58
- return EvalResult.new(detail, events.nil? || events.empty? ? nil : events)
74
+ def self.make_big_segment_ref(segment) # method is visible for testing
75
+ # The format of Big Segment references is independent of what store implementation is being
76
+ # used; the store implementation receives only this string and does not know the details of
77
+ # the data model. The Relay Proxy will use the same format when writing to the store.
78
+ "#{segment[:key]}.g#{segment[:generation]}"
59
79
  end
60
80
 
61
81
  private
62
82
 
63
- def eval_internal(flag, user, events, event_factory)
83
+ def eval_internal(flag, user, state, event_factory)
64
84
  if !flag[:on]
65
85
  return get_off_value(flag, EvaluationReason::off)
66
86
  end
67
87
 
68
- prereq_failure_reason = check_prerequisites(flag, user, events, event_factory)
88
+ prereq_failure_reason = check_prerequisites(flag, user, state, event_factory)
69
89
  if !prereq_failure_reason.nil?
70
90
  return get_off_value(flag, prereq_failure_reason)
71
91
  end
@@ -83,7 +103,7 @@ module LaunchDarkly
83
103
  rules = flag[:rules] || []
84
104
  rules.each_index do |i|
85
105
  rule = rules[i]
86
- if rule_match_user(rule, user)
106
+ if rule_match_user(rule, user, state)
87
107
  reason = rule[:_reason] # try to use cached reason for this rule
88
108
  reason = EvaluationReason::rule_match(i, rule[:id]) if reason.nil?
89
109
  return get_value_for_variation_or_rollout(flag, rule, user, reason)
@@ -98,7 +118,7 @@ module LaunchDarkly
98
118
  return EvaluationDetail.new(nil, nil, EvaluationReason::fallthrough)
99
119
  end
100
120
 
101
- def check_prerequisites(flag, user, events, event_factory)
121
+ def check_prerequisites(flag, user, state, event_factory)
102
122
  (flag[:prerequisites] || []).each do |prerequisite|
103
123
  prereq_ok = true
104
124
  prereq_key = prerequisite[:key]
@@ -109,14 +129,15 @@ module LaunchDarkly
109
129
  prereq_ok = false
110
130
  else
111
131
  begin
112
- prereq_res = eval_internal(prereq_flag, user, events, event_factory)
132
+ prereq_res = eval_internal(prereq_flag, user, state, event_factory)
113
133
  # Note that if the prerequisite flag is off, we don't consider it a match no matter what its
114
134
  # off variation was. But we still need to evaluate it in order to generate an event.
115
135
  if !prereq_flag[:on] || prereq_res.variation_index != prerequisite[:variation]
116
136
  prereq_ok = false
117
137
  end
118
138
  event = event_factory.new_eval_event(prereq_flag, user, prereq_res, nil, flag)
119
- events.push(event)
139
+ state.events = [] if state.events.nil?
140
+ state.events.push(event)
120
141
  rescue => exn
121
142
  Util.log_exception(@logger, "Error evaluating prerequisite flag \"#{prereq_key}\" for flag \"#{flag[:key]}\"", exn)
122
143
  prereq_ok = false
@@ -130,23 +151,23 @@ module LaunchDarkly
130
151
  nil
131
152
  end
132
153
 
133
- def rule_match_user(rule, user)
154
+ def rule_match_user(rule, user, state)
134
155
  return false if !rule[:clauses]
135
156
 
136
157
  (rule[:clauses] || []).each do |clause|
137
- return false if !clause_match_user(clause, user)
158
+ return false if !clause_match_user(clause, user, state)
138
159
  end
139
160
 
140
161
  return true
141
162
  end
142
163
 
143
- def clause_match_user(clause, user)
164
+ def clause_match_user(clause, user, state)
144
165
  # In the case of a segment match operator, we check if the user is in any of the segments,
145
166
  # and possibly negate
146
167
  if clause[:op].to_sym == :segmentMatch
147
168
  result = (clause[:values] || []).any? { |v|
148
169
  segment = @get_segment.call(v)
149
- !segment.nil? && segment_match_user(segment, user)
170
+ !segment.nil? && segment_match_user(segment, user, state)
150
171
  }
151
172
  clause[:negate] ? !result : result
152
173
  else
@@ -168,11 +189,42 @@ module LaunchDarkly
168
189
  clause[:negate] ? !result : result
169
190
  end
170
191
 
171
- def segment_match_user(segment, user)
192
+ def segment_match_user(segment, user, state)
172
193
  return false unless user[:key]
194
+ segment[:unbounded] ? big_segment_match_user(segment, user, state) : simple_segment_match_user(segment, user, true)
195
+ end
173
196
 
174
- return true if segment[:included].include?(user[:key])
175
- return false if segment[:excluded].include?(user[:key])
197
+ def big_segment_match_user(segment, user, state)
198
+ if !segment[:generation]
199
+ # Big segment queries can only be done if the generation is known. If it's unset,
200
+ # that probably means the data store was populated by an older SDK that doesn't know
201
+ # about the generation property and therefore dropped it from the JSON data. We'll treat
202
+ # that as a "not configured" condition.
203
+ state.big_segments_status = BigSegmentsStatus::NOT_CONFIGURED
204
+ return false
205
+ end
206
+ if !state.big_segments_status
207
+ result = @get_big_segments_membership.nil? ? nil : @get_big_segments_membership.call(user[:key])
208
+ if result
209
+ state.big_segments_membership = result.membership
210
+ state.big_segments_status = result.status
211
+ else
212
+ state.big_segments_membership = nil
213
+ state.big_segments_status = BigSegmentsStatus::NOT_CONFIGURED
214
+ end
215
+ end
216
+ segment_ref = Evaluator.make_big_segment_ref(segment)
217
+ membership = state.big_segments_membership
218
+ included = membership.nil? ? nil : membership[segment_ref]
219
+ return included if !included.nil?
220
+ simple_segment_match_user(segment, user, false)
221
+ end
222
+
223
+ def simple_segment_match_user(segment, user, use_includes_and_excludes)
224
+ if use_includes_and_excludes
225
+ return true if segment[:included].include?(user[:key])
226
+ return false if segment[:excluded].include?(user[:key])
227
+ end
176
228
 
177
229
  (segment[:rules] || []).each do |r|
178
230
  return true if segment_rule_match_user(r, user, segment[:key], segment[:salt])
@@ -4,10 +4,7 @@ module LaunchDarkly
4
4
  module Impl
5
5
  module Integrations
6
6
  module DynamoDB
7
- #
8
- # Internal implementation of the DynamoDB feature store, intended to be used with CachingStoreWrapper.
9
- #
10
- class DynamoDBFeatureStoreCore
7
+ class DynamoDBStoreImplBase
11
8
  begin
12
9
  require "aws-sdk-dynamodb"
13
10
  AWS_SDK_ENABLED = true
@@ -19,29 +16,50 @@ module LaunchDarkly
19
16
  AWS_SDK_ENABLED = false
20
17
  end
21
18
  end
22
-
19
+
23
20
  PARTITION_KEY = "namespace"
24
21
  SORT_KEY = "key"
25
22
 
26
- VERSION_ATTRIBUTE = "version"
27
- ITEM_JSON_ATTRIBUTE = "item"
28
-
29
23
  def initialize(table_name, opts)
30
24
  if !AWS_SDK_ENABLED
31
- raise RuntimeError.new("can't use DynamoDB feature store without the aws-sdk or aws-sdk-dynamodb gem")
25
+ raise RuntimeError.new("can't use #{description} without the aws-sdk or aws-sdk-dynamodb gem")
32
26
  end
33
-
27
+
34
28
  @table_name = table_name
35
- @prefix = opts[:prefix]
29
+ @prefix = opts[:prefix] ? (opts[:prefix] + ":") : ""
36
30
  @logger = opts[:logger] || Config.default_logger
37
-
31
+
38
32
  if !opts[:existing_client].nil?
39
33
  @client = opts[:existing_client]
40
34
  else
41
35
  @client = Aws::DynamoDB::Client.new(opts[:dynamodb_opts] || {})
42
36
  end
37
+
38
+ @logger.info("${description}: using DynamoDB table \"#{table_name}\"")
39
+ end
40
+
41
+ def stop
42
+ # AWS client doesn't seem to have a close method
43
+ end
43
44
 
44
- @logger.info("DynamoDBFeatureStore: using DynamoDB table \"#{table_name}\"")
45
+ protected def description
46
+ "DynamoDB"
47
+ end
48
+ end
49
+
50
+ #
51
+ # Internal implementation of the DynamoDB feature store, intended to be used with CachingStoreWrapper.
52
+ #
53
+ class DynamoDBFeatureStoreCore < DynamoDBStoreImplBase
54
+ VERSION_ATTRIBUTE = "version"
55
+ ITEM_JSON_ATTRIBUTE = "item"
56
+
57
+ def initialize(table_name, opts)
58
+ super(table_name, opts)
59
+ end
60
+
61
+ def description
62
+ "DynamoDBFeatureStore"
45
63
  end
46
64
 
47
65
  def init_internal(all_data)
@@ -124,14 +142,10 @@ module LaunchDarkly
124
142
  !resp.item.nil? && resp.item.length > 0
125
143
  end
126
144
 
127
- def stop
128
- # AWS client doesn't seem to have a close method
129
- end
130
-
131
145
  private
132
146
 
133
147
  def prefixed_namespace(base_str)
134
- (@prefix.nil? || @prefix == "") ? base_str : "#{@prefix}:#{base_str}"
148
+ @prefix + base_str
135
149
  end
136
150
 
137
151
  def namespace_for_kind(kind)
@@ -208,6 +222,56 @@ module LaunchDarkly
208
222
  end
209
223
  end
210
224
 
225
+ class DynamoDBBigSegmentStore < DynamoDBStoreImplBase
226
+ KEY_METADATA = 'big_segments_metadata';
227
+ KEY_USER_DATA = 'big_segments_user';
228
+ ATTR_SYNC_TIME = 'synchronizedOn';
229
+ ATTR_INCLUDED = 'included';
230
+ ATTR_EXCLUDED = 'excluded';
231
+
232
+ def initialize(table_name, opts)
233
+ super(table_name, opts)
234
+ end
235
+
236
+ def description
237
+ "DynamoDBBigSegmentStore"
238
+ end
239
+
240
+ def get_metadata
241
+ key = @prefix + KEY_METADATA
242
+ data = @client.get_item(
243
+ table_name: @table_name,
244
+ key: {
245
+ PARTITION_KEY => key,
246
+ SORT_KEY => key
247
+ }
248
+ )
249
+ timestamp = data.item && data.item[ATTR_SYNC_TIME] ?
250
+ data.item[ATTR_SYNC_TIME] : nil
251
+ LaunchDarkly::Interfaces::BigSegmentStoreMetadata.new(timestamp)
252
+ end
253
+
254
+ def get_membership(user_hash)
255
+ data = @client.get_item(
256
+ table_name: @table_name,
257
+ key: {
258
+ PARTITION_KEY => @prefix + KEY_USER_DATA,
259
+ SORT_KEY => user_hash
260
+ })
261
+ return nil if !data.item
262
+ excluded_refs = data.item[ATTR_EXCLUDED] || []
263
+ included_refs = data.item[ATTR_INCLUDED] || []
264
+ if excluded_refs.empty? && included_refs.empty?
265
+ nil
266
+ else
267
+ membership = {}
268
+ excluded_refs.each { |ref| membership[ref] = false }
269
+ included_refs.each { |ref| membership[ref] = true }
270
+ membership
271
+ end
272
+ end
273
+ end
274
+
211
275
  class DynamoDBUtil
212
276
  #
213
277
  # Calls client.batch_write_item as many times as necessary to submit all of the given requests.
@@ -0,0 +1,212 @@
1
+ require 'ldclient-rb/in_memory_store'
2
+ require 'ldclient-rb/util'
3
+
4
+ require 'concurrent/atomics'
5
+ require 'json'
6
+ require 'yaml'
7
+ require 'pathname'
8
+
9
+ module LaunchDarkly
10
+ module Impl
11
+ module Integrations
12
+ class FileDataSourceImpl
13
+ # To avoid pulling in 'listen' and its transitive dependencies for people who aren't using the
14
+ # file data source or who don't need auto-updating, we only enable auto-update if the 'listen'
15
+ # gem has been provided by the host app.
16
+ @@have_listen = false
17
+ begin
18
+ require 'listen'
19
+ @@have_listen = true
20
+ rescue LoadError
21
+ end
22
+
23
+ def initialize(feature_store, logger, options={})
24
+ @feature_store = feature_store
25
+ @logger = logger
26
+ @paths = options[:paths] || []
27
+ if @paths.is_a? String
28
+ @paths = [ @paths ]
29
+ end
30
+ @auto_update = options[:auto_update]
31
+ if @auto_update && @@have_listen && !options[:force_polling] # force_polling is used only for tests
32
+ # We have seen unreliable behavior in the 'listen' gem in JRuby 9.1 (https://github.com/guard/listen/issues/449).
33
+ # Therefore, on that platform we'll fall back to file polling instead.
34
+ if defined?(JRUBY_VERSION) && JRUBY_VERSION.start_with?("9.1.")
35
+ @use_listen = false
36
+ else
37
+ @use_listen = true
38
+ end
39
+ end
40
+ @poll_interval = options[:poll_interval] || 1
41
+ @initialized = Concurrent::AtomicBoolean.new(false)
42
+ @ready = Concurrent::Event.new
43
+ end
44
+
45
+ def initialized?
46
+ @initialized.value
47
+ end
48
+
49
+ def start
50
+ ready = Concurrent::Event.new
51
+
52
+ # We will return immediately regardless of whether the file load succeeded or failed -
53
+ # the difference can be detected by checking "initialized?"
54
+ ready.set
55
+
56
+ load_all
57
+
58
+ if @auto_update
59
+ # If we're going to watch files, then the start event will be set the first time we get
60
+ # a successful load.
61
+ @listener = start_listener
62
+ end
63
+
64
+ ready
65
+ end
66
+
67
+ def stop
68
+ @listener.stop if !@listener.nil?
69
+ end
70
+
71
+ private
72
+
73
+ def load_all
74
+ all_data = {
75
+ FEATURES => {},
76
+ SEGMENTS => {}
77
+ }
78
+ @paths.each do |path|
79
+ begin
80
+ load_file(path, all_data)
81
+ rescue => exn
82
+ LaunchDarkly::Util.log_exception(@logger, "Unable to load flag data from \"#{path}\"", exn)
83
+ return
84
+ end
85
+ end
86
+ @feature_store.init(all_data)
87
+ @initialized.make_true
88
+ end
89
+
90
+ def load_file(path, all_data)
91
+ parsed = parse_content(IO.read(path))
92
+ (parsed[:flags] || {}).each do |key, flag|
93
+ add_item(all_data, FEATURES, flag)
94
+ end
95
+ (parsed[:flagValues] || {}).each do |key, value|
96
+ add_item(all_data, FEATURES, make_flag_with_value(key.to_s, value))
97
+ end
98
+ (parsed[:segments] || {}).each do |key, segment|
99
+ add_item(all_data, SEGMENTS, segment)
100
+ end
101
+ end
102
+
103
+ def parse_content(content)
104
+ # We can use the Ruby YAML parser for both YAML and JSON (JSON is a subset of YAML and while
105
+ # not all YAML parsers handle it correctly, we have verified that the Ruby one does, at least
106
+ # for all the samples of actual flag data that we've tested).
107
+ symbolize_all_keys(YAML.safe_load(content))
108
+ end
109
+
110
+ def symbolize_all_keys(value)
111
+ # This is necessary because YAML.load doesn't have an option for parsing keys as symbols, and
112
+ # the SDK expects all objects to be formatted that way.
113
+ if value.is_a?(Hash)
114
+ value.map{ |k, v| [k.to_sym, symbolize_all_keys(v)] }.to_h
115
+ elsif value.is_a?(Array)
116
+ value.map{ |v| symbolize_all_keys(v) }
117
+ else
118
+ value
119
+ end
120
+ end
121
+
122
+ def add_item(all_data, kind, item)
123
+ items = all_data[kind]
124
+ raise ArgumentError, "Received unknown item kind #{kind} in add_data" if items.nil? # shouldn't be possible since we preinitialize the hash
125
+ key = item[:key].to_sym
126
+ if !items[key].nil?
127
+ raise ArgumentError, "#{kind[:namespace]} key \"#{item[:key]}\" was used more than once"
128
+ end
129
+ items[key] = item
130
+ end
131
+
132
+ def make_flag_with_value(key, value)
133
+ {
134
+ key: key,
135
+ on: true,
136
+ fallthrough: { variation: 0 },
137
+ variations: [ value ]
138
+ }
139
+ end
140
+
141
+ def start_listener
142
+ resolved_paths = @paths.map { |p| Pathname.new(File.absolute_path(p)).realpath.to_s }
143
+ if @use_listen
144
+ start_listener_with_listen_gem(resolved_paths)
145
+ else
146
+ FileDataSourcePoller.new(resolved_paths, @poll_interval, self.method(:load_all), @logger)
147
+ end
148
+ end
149
+
150
+ def start_listener_with_listen_gem(resolved_paths)
151
+ path_set = resolved_paths.to_set
152
+ dir_paths = resolved_paths.map{ |p| File.dirname(p) }.uniq
153
+ opts = { latency: @poll_interval }
154
+ l = Listen.to(*dir_paths, opts) do |modified, added, removed|
155
+ paths = modified + added + removed
156
+ if paths.any? { |p| path_set.include?(p) }
157
+ load_all
158
+ end
159
+ end
160
+ l.start
161
+ l
162
+ end
163
+
164
+ #
165
+ # Used internally by FileDataSource to track data file changes if the 'listen' gem is not available.
166
+ #
167
+ class FileDataSourcePoller
168
+ def initialize(resolved_paths, interval, reloader, logger)
169
+ @stopped = Concurrent::AtomicBoolean.new(false)
170
+ get_file_times = Proc.new do
171
+ ret = {}
172
+ resolved_paths.each do |path|
173
+ begin
174
+ ret[path] = File.mtime(path)
175
+ rescue Errno::ENOENT
176
+ ret[path] = nil
177
+ end
178
+ end
179
+ ret
180
+ end
181
+ last_times = get_file_times.call
182
+ @thread = Thread.new do
183
+ while true
184
+ sleep interval
185
+ break if @stopped.value
186
+ begin
187
+ new_times = get_file_times.call
188
+ changed = false
189
+ last_times.each do |path, old_time|
190
+ new_time = new_times[path]
191
+ if !new_time.nil? && new_time != old_time
192
+ changed = true
193
+ break
194
+ end
195
+ end
196
+ reloader.call if changed
197
+ rescue => exn
198
+ LaunchDarkly::Util.log_exception(logger, "Unexpected exception in FileDataSourcePoller", exn)
199
+ end
200
+ end
201
+ end
202
+ end
203
+
204
+ def stop
205
+ @stopped.make_true
206
+ @thread.run # wakes it up if it's sleeping
207
+ end
208
+ end
209
+ end
210
+ end
211
+ end
212
+ end