launchdarkly-server-sdk 8.10.2 → 8.11.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: a997ea769f00a058e79a185c3f41aa4d6e6ab05a45e09214678619e088430f53
4
- data.tar.gz: 5b2f8e637dcbf4d85f3db656a4cf02fcd665f78933447f8e0182b71faa151d97
3
+ metadata.gz: 70e4c89fb374c852caa88d772317ded544a0954755461a18c039bbc05cbb23f3
4
+ data.tar.gz: 2c7991d3cbeecb23698c1053642d69d1c894a3c4b9bb40fc4db968bb1aea1f35
5
5
  SHA512:
6
- metadata.gz: caddd7ba23f0ac71f12416a63e22b9238fd8d0f2ac080aa264637850bfbc9ac5c761b406befd68e5bc47605e2e9e6c0aeff84a66ada0f241bdd11593ca2eb2a7
7
- data.tar.gz: c152fb05a1ea42e75870377295d377abe77b9946545029d59828cdd342b0454a4420eb4dfd20484e57ecd6b13f2e80a0bc5837f2900aed35ae541107e758a7f2
6
+ metadata.gz: b3b250c43337773b9795c1f358872985662e5a050b9bed23d951c797afa06aa7542f60ecc2b7a3afc09a664835e40779675c2d45d96705d8b8a02cff0f269b0b
7
+ data.tar.gz: 6987659ccf43b8a5c1144a34f2a766ceb8d004f8d4f259e2193307d852c25d7ade2dc73aebe5deb8888b971c943c2c605d192e0ca96989bb7701d5ed1b6e1719
@@ -45,6 +45,7 @@ module LaunchDarkly
45
45
  # @option opts [String] :payload_filter_key See {#payload_filter_key}
46
46
  # @option opts [Boolean] :omit_anonymous_contexts See {#omit_anonymous_contexts}
47
47
  # @option hooks [Array<Interfaces::Hooks::Hook]
48
+ # @option plugins [Array<Interfaces::Plugins::Plugin]
48
49
  #
49
50
  def initialize(opts = {})
50
51
  @base_uri = (opts[:base_uri] || Config.default_base_uri).chomp("/")
@@ -79,6 +80,7 @@ module LaunchDarkly
79
80
  @application = LaunchDarkly::Impl::Util.validate_application_info(opts[:application] || {}, @logger)
80
81
  @payload_filter_key = LaunchDarkly::Impl::Util.validate_payload_filter_key(opts[:payload_filter_key] , @logger)
81
82
  @hooks = (opts[:hooks] || []).keep_if { |hook| hook.is_a? Interfaces::Hooks::Hook }
83
+ @plugins = (opts[:plugins] || []).keep_if { |plugin| plugin.is_a? Interfaces::Plugins::Plugin }
82
84
  @omit_anonymous_contexts = opts.has_key?(:omit_anonymous_contexts) && opts[:omit_anonymous_contexts]
83
85
  @data_source_update_sink = nil
84
86
  @instance_id = nil
@@ -412,6 +414,14 @@ module LaunchDarkly
412
414
  #
413
415
  attr_reader :hooks
414
416
 
417
+ #
418
+ # Initial set of plugins for the client.
419
+ #
420
+ # Plugins provide an interface which allows for initialization, access to credentials, and hook registration
421
+ # in a single interface.
422
+ #
423
+ attr_reader :plugins
424
+
415
425
  #
416
426
  # Sets whether anonymous contexts should be omitted from index and identify events.
417
427
  #
@@ -0,0 +1,153 @@
1
+ require "observer"
2
+
3
+ module LaunchDarkly
4
+ module Interfaces
5
+ module BigSegmentStore
6
+ #
7
+ # Returns information about the overall state of the store. This method will be called only
8
+ # when the SDK needs the latest state, so it should not be cached.
9
+ #
10
+ # @return [BigSegmentStoreMetadata]
11
+ #
12
+ def get_metadata
13
+ end
14
+
15
+ #
16
+ # Queries the store for a snapshot of the current segment state for a specific context.
17
+ #
18
+ # The context_hash is a base64-encoded string produced by hashing the context key as defined by
19
+ # the Big Segments specification; the store implementation does not need to know the details
20
+ # of how this is done, because it deals only with already-hashed keys, but the string can be
21
+ # assumed to only contain characters that are valid in base64.
22
+ #
23
+ # The return value should be either a Hash, or nil if the context is not referenced in any big
24
+ # segments. Each key in the Hash is a "segment reference", which is how segments are
25
+ # identified in Big Segment data. This string is not identical to the segment key-- the SDK
26
+ # will add other information. The store implementation should not be concerned with the
27
+ # format of the string. Each value in the Hash is true if the context is explicitly included in
28
+ # the segment, false if the context is explicitly excluded from the segment-- and is not also
29
+ # explicitly included (that is, if both an include and an exclude existed in the data, the
30
+ # include would take precedence). If the context's status in a particular segment is undefined,
31
+ # there should be no key or value for that segment.
32
+ #
33
+ # This Hash may be cached by the SDK, so it should not be modified after it is created. It
34
+ # is a snapshot of the segment membership state at one point in time.
35
+ #
36
+ # @param context_hash [String]
37
+ # @return [Hash] true/false values for Big Segments that reference this context
38
+ #
39
+ def get_membership(context_hash)
40
+ end
41
+
42
+ #
43
+ # Performs any necessary cleanup to shut down the store when the client is being shut down.
44
+ #
45
+ # @return [void]
46
+ #
47
+ def stop
48
+ end
49
+ end
50
+
51
+ #
52
+ # Values returned by {BigSegmentStore#get_metadata}.
53
+ #
54
+ class BigSegmentStoreMetadata
55
+ def initialize(last_up_to_date)
56
+ @last_up_to_date = last_up_to_date
57
+ end
58
+
59
+ # The Unix epoch millisecond timestamp of the last update to the {BigSegmentStore}. It is
60
+ # nil if the store has never been updated.
61
+ #
62
+ # @return [Integer|nil]
63
+ attr_reader :last_up_to_date
64
+ end
65
+
66
+ #
67
+ # Information about the status of a Big Segment store, provided by {BigSegmentStoreStatusProvider}.
68
+ #
69
+ # Big Segments are a specific type of segments. For more information, read the LaunchDarkly
70
+ # documentation: https://docs.launchdarkly.com/home/users/big-segments
71
+ #
72
+ class BigSegmentStoreStatus
73
+ def initialize(available, stale)
74
+ @available = available
75
+ @stale = stale
76
+ end
77
+
78
+ # True if the Big Segment store is able to respond to queries, so that the SDK can evaluate
79
+ # whether a context is in a segment or not.
80
+ #
81
+ # If this property is false, the store is not able to make queries (for instance, it may not have
82
+ # a valid database connection). In this case, the SDK will treat any reference to a Big Segment
83
+ # as if no contexts are included in that segment. Also, the {EvaluationReason} associated with
84
+ # with any flag evaluation that references a Big Segment when the store is not available will
85
+ # have a `big_segments_status` of `STORE_ERROR`.
86
+ #
87
+ # @return [Boolean]
88
+ attr_reader :available
89
+
90
+ # True if the Big Segment store is available, but has not been updated within the amount of time
91
+ # specified by {BigSegmentsConfig#stale_after}.
92
+ #
93
+ # This may indicate that the LaunchDarkly Relay Proxy, which populates the store, has stopped
94
+ # running or has become unable to receive fresh data from LaunchDarkly. Any feature flag
95
+ # evaluations that reference a Big Segment will be using the last known data, which may be out
96
+ # of date. Also, the {EvaluationReason} associated with those evaluations will have a
97
+ # `big_segments_status` of `STALE`.
98
+ #
99
+ # @return [Boolean]
100
+ attr_reader :stale
101
+
102
+ def ==(other)
103
+ self.available == other.available && self.stale == other.stale
104
+ end
105
+ end
106
+
107
+ #
108
+ # An interface for querying the status of a Big Segment store.
109
+ #
110
+ # The Big Segment store is the component that receives information about Big Segments, normally
111
+ # from a database populated by the LaunchDarkly Relay Proxy. Big Segments are a specific type
112
+ # of segments. For more information, read the LaunchDarkly documentation:
113
+ # https://docs.launchdarkly.com/home/users/big-segments
114
+ #
115
+ # An implementation of this interface is returned by {LDClient#big_segment_store_status_provider}.
116
+ # Application code never needs to implement this interface.
117
+ #
118
+ # There are two ways to interact with the status. One is to simply get the current status; if its
119
+ # `available` property is true, then the SDK is able to evaluate context membership in Big Segments,
120
+ # and the `stale`` property indicates whether the data might be out of date.
121
+ #
122
+ # The other way is to subscribe to status change notifications. Applications may wish to know if
123
+ # there is an outage in the Big Segment store, or if it has become stale (the Relay Proxy has
124
+ # stopped updating it with new data), since then flag evaluations that reference a Big Segment
125
+ # might return incorrect values. To allow finding out about status changes as soon as possible,
126
+ # `BigSegmentStoreStatusProvider` mixes in Ruby's
127
+ # [Observable](https://docs.ruby-lang.org/en/2.5.0/Observable.html) module to provide standard
128
+ # methods such as `add_observer`. Observers will be called with a new {BigSegmentStoreStatus}
129
+ # value whenever the status changes.
130
+ #
131
+ # @example Getting the current status
132
+ # status = client.big_segment_store_status_provider.status
133
+ #
134
+ # @example Subscribing to status notifications
135
+ # client.big_segment_store_status_provider.add_observer(self, :big_segments_status_changed)
136
+ #
137
+ # def big_segments_status_changed(new_status)
138
+ # puts "Big segment store status is now: #{new_status}"
139
+ # end
140
+ #
141
+ module BigSegmentStoreStatusProvider
142
+ include Observable
143
+ #
144
+ # Gets the current status of the store, if known.
145
+ #
146
+ # @return [BigSegmentStoreStatus] the status, or nil if the SDK has not yet queried the Big
147
+ # Segment store status
148
+ #
149
+ def status
150
+ end
151
+ end
152
+ end
153
+ end
@@ -0,0 +1,265 @@
1
+ module LaunchDarkly
2
+ module Interfaces
3
+ #
4
+ # Mixin that defines the required methods of a data source implementation. This is the
5
+ # component that delivers feature flag data from LaunchDarkly to the LDClient by putting
6
+ # the data in the {FeatureStore}. It is expected to run concurrently on its own thread.
7
+ #
8
+ # The client has its own standard implementation, which uses either a streaming connection or
9
+ # polling depending on your configuration. Normally you will not need to use another one
10
+ # except for testing purposes. Two such test fixtures are {LaunchDarkly::Integrations::FileData}
11
+ # and {LaunchDarkly::Integrations::TestData}.
12
+ #
13
+ module DataSource
14
+ #
15
+ # Checks whether the data source has finished initializing. Initialization is considered done
16
+ # once it has received one complete data set from LaunchDarkly.
17
+ #
18
+ # @return [Boolean] true if initialization is complete
19
+ #
20
+ def initialized?
21
+ end
22
+
23
+ #
24
+ # Puts the data source into an active state. Normally this means it will make its first
25
+ # connection attempt to LaunchDarkly. If `start` has already been called, calling it again
26
+ # should simply return the same value as the first call.
27
+ #
28
+ # @return [Concurrent::Event] an Event which will be set once initialization is complete
29
+ #
30
+ def start
31
+ end
32
+
33
+ #
34
+ # Puts the data source into an inactive state and releases all of its resources.
35
+ # This state should be considered permanent (`start` does not have to work after `stop`).
36
+ #
37
+ def stop
38
+ end
39
+
40
+ #
41
+ # An interface for querying the status of the SDK's data source. The data
42
+ # source is the component that receives updates to feature flag data;
43
+ # normally this is a streaming connection, but it could be polling or
44
+ # file data depending on your configuration.
45
+ #
46
+ # An implementation of this interface is returned by
47
+ # {LaunchDarkly::LDClient#data_source_status_provider}. Application code
48
+ # never needs to implement this interface.
49
+ #
50
+ module StatusProvider
51
+ #
52
+ # Returns the current status of the data source.
53
+ #
54
+ # All of the built-in data source implementations are guaranteed to update this status whenever they
55
+ # successfully initialize, encounter an error, or recover after an error.
56
+ #
57
+ # For a custom data source implementation, it is the responsibility of the data source to push
58
+ # status updates to the SDK; if it does not do so, the status will always be reported as
59
+ # {Status::INITIALIZING}.
60
+ #
61
+ # @return [Status]
62
+ #
63
+ def status
64
+ end
65
+
66
+ #
67
+ # Subscribes for notifications of status changes.
68
+ #
69
+ # The listener will be notified whenever any property of the status has changed. See {Status} for an
70
+ # explanation of the meaning of each property and what could cause it to change.
71
+ #
72
+ # Notifications will be dispatched on a worker thread. It is the listener's responsibility to return as soon as
73
+ # possible so as not to block subsequent notifications.
74
+ #
75
+ # @param [#update] the listener to add
76
+ #
77
+ def add_listener(listener) end
78
+
79
+ #
80
+ # Unsubscribes from notifications of status changes.
81
+ #
82
+ def remove_listener(listener) end
83
+ end
84
+
85
+ #
86
+ # Interface that a data source implementation will use to push data into
87
+ # the SDK.
88
+ #
89
+ # The data source interacts with this object, rather than manipulating
90
+ # the data store directly, so that the SDK can perform any other
91
+ # necessary operations that must happen when data is updated.
92
+ #
93
+ module UpdateSink
94
+ #
95
+ # Initializes (or re-initializes) the store with the specified set of entities. Any
96
+ # existing entries will be removed. Implementations can assume that this data set is up to
97
+ # date-- there is no need to perform individual version comparisons between the existing
98
+ # objects and the supplied features.
99
+ #
100
+ # If possible, the store should update the entire data set atomically. If that is not possible,
101
+ # it should iterate through the outer hash and then the inner hash using the existing iteration
102
+ # order of those hashes (the SDK will ensure that the items were inserted into the hashes in
103
+ # the correct order), storing each item, and then delete any leftover items at the very end.
104
+ #
105
+ # @param all_data [Hash] a hash where each key is one of the data kind objects, and each
106
+ # value is in turn a hash of string keys to entities
107
+ # @return [void]
108
+ #
109
+ def init(all_data) end
110
+
111
+ #
112
+ # Attempt to add an entity, or update an existing entity with the same key. An update
113
+ # should only succeed if the new item's `:version` is greater than the old one;
114
+ # otherwise, the method should do nothing.
115
+ #
116
+ # @param kind [Object] the kind of entity to add or update
117
+ # @param item [Hash] the entity to add or update
118
+ # @return [void]
119
+ #
120
+ def upsert(kind, item) end
121
+
122
+ #
123
+ # Attempt to delete an entity if it exists. Deletion should only succeed if the
124
+ # `version` parameter is greater than the existing entity's `:version`; otherwise, the
125
+ # method should do nothing.
126
+ #
127
+ # @param kind [Object] the kind of entity to delete
128
+ # @param key [String] the unique key of the entity
129
+ # @param version [Integer] the entity must have a lower version than this to be deleted
130
+ # @return [void]
131
+ #
132
+ def delete(kind, key, version) end
133
+
134
+ #
135
+ # Informs the SDK of a change in the data source's status.
136
+ #
137
+ # Data source implementations should use this method if they have any
138
+ # concept of being in a valid state, a temporarily disconnected state,
139
+ # or a permanently stopped state.
140
+ #
141
+ # If `new_state` is different from the previous state, and/or
142
+ # `new_error` is non-null, the SDK will start returning the new status
143
+ # (adding a timestamp for the change) from {StatusProvider#status}, and
144
+ # will trigger status change events to any registered listeners.
145
+ #
146
+ # A special case is that if {new_state} is {Status::INTERRUPTED}, but the
147
+ # previous state was {Status::INITIALIZING}, the state will remain at
148
+ # {Status::INITIALIZING} because {Status::INTERRUPTED} is only meaningful
149
+ # after a successful startup.
150
+ #
151
+ # @param new_state [Symbol]
152
+ # @param new_error [ErrorInfo, nil]
153
+ #
154
+ def update_status(new_state, new_error) end
155
+ end
156
+
157
+ #
158
+ # Information about the data source's status and about the last status change.
159
+ #
160
+ class Status
161
+ #
162
+ # The initial state of the data source when the SDK is being initialized.
163
+ #
164
+ # If it encounters an error that requires it to retry initialization, the state will remain at
165
+ # {INITIALIZING} until it either succeeds and becomes {VALID}, or permanently fails and
166
+ # becomes {OFF}.
167
+ #
168
+
169
+ INITIALIZING = :initializing
170
+
171
+ #
172
+ # Indicates that the data source is currently operational and has not had any problems since the
173
+ # last time it received data.
174
+ #
175
+ # In streaming mode, this means that there is currently an open stream connection and that at least
176
+ # one initial message has been received on the stream. In polling mode, it means that the last poll
177
+ # request succeeded.
178
+ #
179
+ VALID = :valid
180
+
181
+ #
182
+ # Indicates that the data source encountered an error that it will attempt to recover from.
183
+ #
184
+ # In streaming mode, this means that the stream connection failed, or had to be dropped due to some
185
+ # other error, and will be retried after a backoff delay. In polling mode, it means that the last poll
186
+ # request failed, and a new poll request will be made after the configured polling interval.
187
+ #
188
+ INTERRUPTED = :interrupted
189
+
190
+ #
191
+ # Indicates that the data source has been permanently shut down.
192
+ #
193
+ # This could be because it encountered an unrecoverable error (for instance, the LaunchDarkly service
194
+ # rejected the SDK key; an invalid SDK key will never become valid), or because the SDK client was
195
+ # explicitly shut down.
196
+ #
197
+ OFF = :off
198
+
199
+ # @return [Symbol] The basic state
200
+ attr_reader :state
201
+ # @return [Time] timestamp of the last state transition
202
+ attr_reader :state_since
203
+ # @return [ErrorInfo, nil] a description of the last error or nil if no errors have occurred since startup
204
+ attr_reader :last_error
205
+
206
+ def initialize(state, state_since, last_error)
207
+ @state = state
208
+ @state_since = state_since
209
+ @last_error = last_error
210
+ end
211
+ end
212
+
213
+ #
214
+ # A description of an error condition that the data source encountered.
215
+ #
216
+ class ErrorInfo
217
+ #
218
+ # An unexpected error, such as an uncaught exception, further described by {#message}.
219
+ #
220
+ UNKNOWN = :unknown
221
+
222
+ #
223
+ # An I/O error such as a dropped connection.
224
+ #
225
+ NETWORK_ERROR = :network_error
226
+
227
+ #
228
+ # The LaunchDarkly service returned an HTTP response with an error status, available with
229
+ # {#status_code}.
230
+ #
231
+ ERROR_RESPONSE = :error_response
232
+
233
+ #
234
+ # The SDK received malformed data from the LaunchDarkly service.
235
+ #
236
+ INVALID_DATA = :invalid_data
237
+
238
+ #
239
+ # The data source itself is working, but when it tried to put an update into the data store, the data
240
+ # store failed (so the SDK may not have the latest data).
241
+ #
242
+ # Data source implementations do not need to report this kind of error; it will be automatically
243
+ # reported by the SDK when exceptions are detected.
244
+ #
245
+ STORE_ERROR = :store_error
246
+
247
+ # @return [Symbol] the general category of the error
248
+ attr_reader :kind
249
+ # @return [Integer] an HTTP status or zero
250
+ attr_reader :status_code
251
+ # @return [String, nil] message an error message if applicable, or nil
252
+ attr_reader :message
253
+ # @return [Time] time the error timestamp
254
+ attr_reader :time
255
+
256
+ def initialize(kind, status_code, message, time)
257
+ @kind = kind
258
+ @status_code = status_code
259
+ @message = message
260
+ @time = time
261
+ end
262
+ end
263
+ end
264
+ end
265
+ end
@@ -0,0 +1,113 @@
1
+ module LaunchDarkly
2
+ module Interfaces
3
+ module DataStore
4
+ #
5
+ # An interface for querying the status of a persistent data store.
6
+ #
7
+ # An implementation of this interface is returned by {LaunchDarkly::LDClient#data_store_status_provider}.
8
+ # Application code should not implement this interface.
9
+ #
10
+ module StatusProvider
11
+ #
12
+ # Returns the current status of the store.
13
+ #
14
+ # This is only meaningful for persistent stores, or any custom data store implementation that makes use of
15
+ # the status reporting mechanism provided by the SDK. For the default in-memory store, the status will always
16
+ # be reported as "available".
17
+ #
18
+ # @return [Status] the latest status
19
+ #
20
+ def status
21
+ end
22
+
23
+ #
24
+ # Indicates whether the current data store implementation supports status monitoring.
25
+ #
26
+ # This is normally true for all persistent data stores, and false for the default in-memory store. A true value
27
+ # means that any listeners added with {#add_listener} can expect to be notified if there is any error in
28
+ # storing data, and then notified again when the error condition is resolved. A false value means that the
29
+ # status is not meaningful and listeners should not expect to be notified.
30
+ #
31
+ # @return [Boolean] true if status monitoring is enabled
32
+ #
33
+ def monitoring_enabled?
34
+ end
35
+
36
+ #
37
+ # Subscribes for notifications of status changes.
38
+ #
39
+ # Applications may wish to know if there is an outage in a persistent data store, since that could mean that
40
+ # flag evaluations are unable to get the flag data from the store (unless it is currently cached) and therefore
41
+ # might return default values.
42
+ #
43
+ # If the SDK receives an exception while trying to query or update the data store, then it notifies listeners
44
+ # that the store appears to be offline ({Status#available} is false) and begins polling the store
45
+ # at intervals until a query succeeds. Once it succeeds, it notifies listeners again with {Status#available}
46
+ # set to true.
47
+ #
48
+ # This method has no effect if the data store implementation does not support status tracking, such as if you
49
+ # are using the default in-memory store rather than a persistent store.
50
+ #
51
+ # @param listener [#update] the listener to add
52
+ #
53
+ def add_listener(listener)
54
+ end
55
+
56
+ #
57
+ # Unsubscribes from notifications of status changes.
58
+ #
59
+ # This method has no effect if the data store implementation does not support status tracking, such as if you
60
+ # are using the default in-memory store rather than a persistent store.
61
+ #
62
+ # @param listener [Object] the listener to remove; if no such listener was added, this does nothing
63
+ #
64
+ def remove_listener(listener)
65
+ end
66
+ end
67
+
68
+ #
69
+ # Interface that a data store implementation can use to report information back to the SDK.
70
+ #
71
+ module UpdateSink
72
+ #
73
+ # Reports a change in the data store's operational status.
74
+ #
75
+ # This is what makes the status monitoring mechanisms in {StatusProvider} work.
76
+ #
77
+ # @param status [Status] the updated status properties
78
+ #
79
+ def update_status(status)
80
+ end
81
+ end
82
+
83
+ class Status
84
+ def initialize(available, stale)
85
+ @available = available
86
+ @stale = stale
87
+ end
88
+
89
+ #
90
+ # Returns true if the SDK believes the data store is now available.
91
+ #
92
+ # This property is normally true. If the SDK receives an exception while trying to query or update the data
93
+ # store, then it sets this property to false (notifying listeners, if any) and polls the store at intervals
94
+ # until a query succeeds. Once it succeeds, it sets the property back to true (again notifying listeners).
95
+ #
96
+ # @return [Boolean] true if store is available
97
+ #
98
+ attr_reader :available
99
+
100
+ #
101
+ # Returns true if the store may be out of date due to a previous
102
+ # outage, so the SDK should attempt to refresh all feature flag data
103
+ # and rewrite it to the store.
104
+ #
105
+ # This property is not meaningful to application code.
106
+ #
107
+ # @return [Boolean] true if data should be rewritten
108
+ #
109
+ attr_reader :stale
110
+ end
111
+ end
112
+ end
113
+ end