launchdarkly-server-sdk 6.2.5 → 6.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -5,10 +5,7 @@ module LaunchDarkly
5
5
  module Impl
6
6
  module Integrations
7
7
  module Redis
8
- #
9
- # Internal implementation of the Redis feature store, intended to be used with CachingStoreWrapper.
10
- #
11
- class RedisFeatureStoreCore
8
+ class RedisStoreImplBase
12
9
  begin
13
10
  require "redis"
14
11
  require "connection_pool"
@@ -19,22 +16,14 @@ module LaunchDarkly
19
16
 
20
17
  def initialize(opts)
21
18
  if !REDIS_ENABLED
22
- raise RuntimeError.new("can't use Redis feature store because one of these gems is missing: redis, connection_pool")
19
+ raise RuntimeError.new("can't use #{description} because one of these gems is missing: redis, connection_pool")
23
20
  end
24
21
 
25
- @redis_opts = opts[:redis_opts] || Hash.new
26
- if opts[:redis_url]
27
- @redis_opts[:url] = opts[:redis_url]
28
- end
29
- if !@redis_opts.include?(:url)
30
- @redis_opts[:url] = LaunchDarkly::Integrations::Redis::default_redis_url
31
- end
32
- max_connections = opts[:max_connections] || 16
33
- @pool = opts[:pool] || ConnectionPool.new(size: max_connections) do
34
- ::Redis.new(@redis_opts)
35
- end
22
+ @pool = create_redis_pool(opts)
23
+
36
24
  # shutdown pool on close unless the client passed a custom pool and specified not to shutdown
37
25
  @pool_shutdown_on_close = (!opts[:pool] || opts.fetch(:pool_shutdown_on_close, true))
26
+
38
27
  @prefix = opts[:prefix] || LaunchDarkly::Integrations::Redis::default_prefix
39
28
  @logger = opts[:logger] || Config.default_logger
40
29
  @test_hook = opts[:test_hook] # used for unit tests, deliberately undocumented
@@ -42,10 +31,53 @@ module LaunchDarkly
42
31
  @stopped = Concurrent::AtomicBoolean.new(false)
43
32
 
44
33
  with_connection do |redis|
45
- @logger.info("RedisFeatureStore: using Redis instance at #{redis.connection[:host]}:#{redis.connection[:port]} \
46
- and prefix: #{@prefix}")
34
+ @logger.info("#{description}: using Redis instance at #{redis.connection[:host]}:#{redis.connection[:port]} and prefix: #{@prefix}")
35
+ end
36
+ end
37
+
38
+ def stop
39
+ if @stopped.make_true
40
+ return unless @pool_shutdown_on_close
41
+ @pool.shutdown { |redis| redis.close }
42
+ end
43
+ end
44
+
45
+ protected def description
46
+ "Redis"
47
+ end
48
+
49
+ protected def with_connection
50
+ @pool.with { |redis| yield(redis) }
51
+ end
52
+
53
+ private def create_redis_pool(opts)
54
+ redis_opts = opts[:redis_opts] ? opts[:redis_opts].clone : Hash.new
55
+ if opts[:redis_url]
56
+ redis_opts[:url] = opts[:redis_url]
57
+ end
58
+ if !redis_opts.include?(:url)
59
+ redis_opts[:url] = LaunchDarkly::Integrations::Redis::default_redis_url
60
+ end
61
+ max_connections = opts[:max_connections] || 16
62
+ return opts[:pool] || ConnectionPool.new(size: max_connections) do
63
+ ::Redis.new(redis_opts)
47
64
  end
48
65
  end
66
+ end
67
+
68
+ #
69
+ # Internal implementation of the Redis feature store, intended to be used with CachingStoreWrapper.
70
+ #
71
+ class RedisFeatureStoreCore < RedisStoreImplBase
72
+ def initialize(opts)
73
+ super(opts)
74
+
75
+ @test_hook = opts[:test_hook] # used for unit tests, deliberately undocumented
76
+ end
77
+
78
+ def description
79
+ "RedisFeatureStore"
80
+ end
49
81
 
50
82
  def init_internal(all_data)
51
83
  count = 0
@@ -103,8 +135,7 @@ module LaunchDarkly
103
135
  else
104
136
  final_item = old_item
105
137
  action = new_item[:deleted] ? "delete" : "update"
106
- @logger.warn { "RedisFeatureStore: attempted to #{action} #{key} version: #{old_item[:version]} \
107
- in '#{kind[:namespace]}' with a version that is the same or older: #{new_item[:version]}" }
138
+ @logger.warn { "RedisFeatureStore: attempted to #{action} #{key} version: #{old_item[:version]} in '#{kind[:namespace]}' with a version that is the same or older: #{new_item[:version]}" }
108
139
  end
109
140
  redis.unwatch
110
141
  end
@@ -117,13 +148,6 @@ module LaunchDarkly
117
148
  with_connection { |redis| redis.exists?(inited_key) }
118
149
  end
119
150
 
120
- def stop
121
- if @stopped.make_true
122
- return unless @pool_shutdown_on_close
123
- @pool.shutdown { |redis| redis.close }
124
- end
125
- end
126
-
127
151
  private
128
152
 
129
153
  def before_update_transaction(base_key, key)
@@ -142,14 +166,43 @@ module LaunchDarkly
142
166
  @prefix + ":$inited"
143
167
  end
144
168
 
145
- def with_connection
146
- @pool.with { |redis| yield(redis) }
147
- end
148
-
149
169
  def get_redis(redis, kind, key)
150
170
  Model.deserialize(kind, redis.hget(items_key(kind), key))
151
171
  end
152
172
  end
173
+
174
+ #
175
+ # Internal implementation of the Redis big segment store.
176
+ #
177
+ class RedisBigSegmentStore < RedisStoreImplBase
178
+ KEY_LAST_UP_TO_DATE = ':big_segments_synchronized_on'
179
+ KEY_USER_INCLUDE = ':big_segment_include:'
180
+ KEY_USER_EXCLUDE = ':big_segment_exclude:'
181
+
182
+ def description
183
+ "RedisBigSegmentStore"
184
+ end
185
+
186
+ def get_metadata
187
+ value = with_connection { |redis| redis.get(@prefix + KEY_LAST_UP_TO_DATE) }
188
+ Interfaces::BigSegmentStoreMetadata.new(value.nil? ? nil : value.to_i)
189
+ end
190
+
191
+ def get_membership(user_hash)
192
+ with_connection do |redis|
193
+ included_refs = redis.smembers(@prefix + KEY_USER_INCLUDE + user_hash)
194
+ excluded_refs = redis.smembers(@prefix + KEY_USER_EXCLUDE + user_hash)
195
+ if !included_refs && !excluded_refs
196
+ nil
197
+ else
198
+ membership = {}
199
+ excluded_refs.each { |ref| membership[ref] = false }
200
+ included_refs.each { |ref| membership[ref] = true }
201
+ membership
202
+ end
203
+ end
204
+ end
205
+ end
153
206
  end
154
207
  end
155
208
  end
@@ -0,0 +1,40 @@
1
+ require 'concurrent/atomics'
2
+ require 'ldclient-rb/interfaces'
3
+
4
+ module LaunchDarkly
5
+ module Impl
6
+ module Integrations
7
+ module TestData
8
+ # @private
9
+ class TestDataSource
10
+ include LaunchDarkly::Interfaces::DataSource
11
+
12
+ def initialize(feature_store, test_data)
13
+ @feature_store = feature_store
14
+ @test_data = test_data
15
+ end
16
+
17
+ def initialized?
18
+ true
19
+ end
20
+
21
+ def start
22
+ ready = Concurrent::Event.new
23
+ ready.set
24
+ init_data = @test_data.make_init_data
25
+ @feature_store.init(init_data)
26
+ ready
27
+ end
28
+
29
+ def stop
30
+ @test_data.closed_instance(self)
31
+ end
32
+
33
+ def upsert(kind, item)
34
+ @feature_store.upsert(kind, item)
35
+ end
36
+ end
37
+ end
38
+ end
39
+ end
40
+ end
@@ -0,0 +1,47 @@
1
+ require "ldclient-rb/util"
2
+
3
+ require "concurrent/atomics"
4
+
5
+ module LaunchDarkly
6
+ module Impl
7
+ class RepeatingTask
8
+ def initialize(interval, start_delay, task, logger)
9
+ @interval = interval
10
+ @start_delay = start_delay
11
+ @task = task
12
+ @logger = logger
13
+ @stopped = Concurrent::AtomicBoolean.new(false)
14
+ @worker = nil
15
+ end
16
+
17
+ def start
18
+ @worker = Thread.new do
19
+ if @start_delay
20
+ sleep(@start_delay)
21
+ end
22
+ while !@stopped.value do
23
+ started_at = Time.now
24
+ begin
25
+ @task.call
26
+ rescue => e
27
+ LaunchDarkly::Util.log_exception(@logger, "Uncaught exception from repeating task", e)
28
+ end
29
+ delta = @interval - (Time.now - started_at)
30
+ if delta > 0
31
+ sleep(delta)
32
+ end
33
+ end
34
+ end
35
+ end
36
+
37
+ def stop
38
+ if @stopped.make_true
39
+ if @worker && @worker.alive? && @worker != Thread.current
40
+ @worker.run # causes the thread to wake up if it's currently in a sleep
41
+ @worker.join
42
+ end
43
+ end
44
+ end
45
+ end
46
+ end
47
+ end
@@ -1,7 +1,10 @@
1
-
2
1
  module LaunchDarkly
3
2
  module Impl
4
3
  module Util
4
+ def self.is_bool(aObject)
5
+ [true,false].include? aObject
6
+ end
7
+
5
8
  def self.current_time_millis
6
9
  (Time.now.to_f * 1000).to_i
7
10
  end
@@ -3,6 +3,13 @@ require "ldclient-rb/integrations/util/store_wrapper"
3
3
 
4
4
  module LaunchDarkly
5
5
  module Integrations
6
+ #
7
+ # Integration with [Consul](https://www.consul.io/).
8
+ #
9
+ # Note that in order to use this integration, you must first install the gem `diplomat`.
10
+ #
11
+ # @since 5.5.0
12
+ #
6
13
  module Consul
7
14
  #
8
15
  # Default value for the `prefix` option for {new_feature_store}.
@@ -3,6 +3,14 @@ require "ldclient-rb/integrations/util/store_wrapper"
3
3
 
4
4
  module LaunchDarkly
5
5
  module Integrations
6
+ #
7
+ # Integration with [DynamoDB](https://aws.amazon.com/dynamodb/).
8
+ #
9
+ # Note that in order to use this integration, you must first install one of the AWS SDK gems: either
10
+ # `aws-sdk-dynamodb`, or the full `aws-sdk`.
11
+ #
12
+ # @since 5.5.0
13
+ #
6
14
  module DynamoDB
7
15
  #
8
16
  # Creates a DynamoDB-backed persistent feature store. For more details about how and why you can
@@ -40,7 +48,44 @@ module LaunchDarkly
40
48
  #
41
49
  def self.new_feature_store(table_name, opts)
42
50
  core = LaunchDarkly::Impl::Integrations::DynamoDB::DynamoDBFeatureStoreCore.new(table_name, opts)
43
- return LaunchDarkly::Integrations::Util::CachingStoreWrapper.new(core, opts)
51
+ LaunchDarkly::Integrations::Util::CachingStoreWrapper.new(core, opts)
52
+ end
53
+
54
+ #
55
+ # Creates a DynamoDB-backed Big Segment store.
56
+ #
57
+ # Big Segments are a specific type of user segments. For more information, read the LaunchDarkly
58
+ # documentation: https://docs.launchdarkly.com/home/users/big-segments
59
+ #
60
+ # To use this method, you must first install one of the AWS SDK gems: either `aws-sdk-dynamodb`, or
61
+ # the full `aws-sdk`. Then, put the object returned by this method into the `store` property of your
62
+ # Big Segments configuration (see `Config`).
63
+ #
64
+ # @example Configuring Big Segments
65
+ # store = LaunchDarkly::Integrations::DynamoDB::new_big_segment_store("my-table-name")
66
+ # config = LaunchDarkly::Config.new(big_segments: LaunchDarkly::BigSegmentsConfig.new(store: store)
67
+ # client = LaunchDarkly::LDClient.new(my_sdk_key, config)
68
+ #
69
+ # Note that the specified table must already exist in DynamoDB. It must have a partition key called
70
+ # "namespace", and a sort key called "key" (both strings). The SDK does not create the table
71
+ # automatically because it has no way of knowing what additional properties (such as permissions
72
+ # and throughput) you would want it to have.
73
+ #
74
+ # By default, the DynamoDB client will try to get your AWS credentials and region name from
75
+ # environment variables and/or local configuration files, as described in the AWS SDK documentation.
76
+ # You can also specify any supported AWS SDK options in `dynamodb_opts`-- or, provide an
77
+ # already-configured DynamoDB client in `existing_client`.
78
+ #
79
+ # @param opts [Hash] the configuration options (these are all the same as for `new_feature_store`,
80
+ # except that there are no caching parameters)
81
+ # @option opts [Hash] :dynamodb_opts options to pass to the DynamoDB client constructor (ignored if you specify `:existing_client`)
82
+ # @option opts [Object] :existing_client an already-constructed DynamoDB client for the feature store to use
83
+ # @option opts [String] :prefix namespace prefix to add to all keys used by LaunchDarkly
84
+ # @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger`
85
+ # @return [LaunchDarkly::Interfaces::BigSegmentStore] a Big Segment store object
86
+ #
87
+ def self.new_big_segment_store(table_name, opts)
88
+ LaunchDarkly::Impl::Integrations::DynamoDB::DynamoDBBigSegmentStore.new(table_name, opts)
44
89
  end
45
90
  end
46
91
  end
@@ -0,0 +1,108 @@
1
+ require 'ldclient-rb/impl/integrations/file_data_source'
2
+
3
+ module LaunchDarkly
4
+ module Integrations
5
+ #
6
+ # Provides a way to use local files as a source of feature flag state. This allows using a
7
+ # predetermined feature flag state without an actual LaunchDarkly connection.
8
+ #
9
+ # Reading flags from a file is only intended for pre-production environments. Production
10
+ # environments should always be configured to receive flag updates from LaunchDarkly.
11
+ #
12
+ # To use this component, call {FileData#data_source}, and store its return value in the
13
+ # {Config#data_source} property of your LaunchDarkly client configuration. In the options
14
+ # to `data_source`, set `paths` to the file path(s) of your data file(s):
15
+ #
16
+ # file_source = LaunchDarkly::Integrations::FileData.data_source(paths: [ myFilePath ])
17
+ # config = LaunchDarkly::Config.new(data_source: file_source)
18
+ #
19
+ # This will cause the client not to connect to LaunchDarkly to get feature flags. The
20
+ # client may still make network connections to send analytics events, unless you have disabled
21
+ # this with {Config#send_events} or {Config#offline?}.
22
+ #
23
+ # Flag data files can be either JSON or YAML. They contain an object with three possible
24
+ # properties:
25
+ #
26
+ # - `flags`: Feature flag definitions.
27
+ # - `flagValues`: Simplified feature flags that contain only a value.
28
+ # - `segments`: User segment definitions.
29
+ #
30
+ # The format of the data in `flags` and `segments` is defined by the LaunchDarkly application
31
+ # and is subject to change. Rather than trying to construct these objects yourself, it is simpler
32
+ # to request existing flags directly from the LaunchDarkly server in JSON format, and use this
33
+ # output as the starting point for your file. In Linux you would do this:
34
+ #
35
+ # ```
36
+ # curl -H "Authorization: YOUR_SDK_KEY" https://sdk.launchdarkly.com/sdk/latest-all
37
+ # ```
38
+ #
39
+ # The output will look something like this (but with many more properties):
40
+ #
41
+ # {
42
+ # "flags": {
43
+ # "flag-key-1": {
44
+ # "key": "flag-key-1",
45
+ # "on": true,
46
+ # "variations": [ "a", "b" ]
47
+ # }
48
+ # },
49
+ # "segments": {
50
+ # "segment-key-1": {
51
+ # "key": "segment-key-1",
52
+ # "includes": [ "user-key-1" ]
53
+ # }
54
+ # }
55
+ # }
56
+ #
57
+ # Data in this format allows the SDK to exactly duplicate all the kinds of flag behavior supported
58
+ # by LaunchDarkly. However, in many cases you will not need this complexity, but will just want to
59
+ # set specific flag keys to specific values. For that, you can use a much simpler format:
60
+ #
61
+ # {
62
+ # "flagValues": {
63
+ # "my-string-flag-key": "value-1",
64
+ # "my-boolean-flag-key": true,
65
+ # "my-integer-flag-key": 3
66
+ # }
67
+ # }
68
+ #
69
+ # Or, in YAML:
70
+ #
71
+ # flagValues:
72
+ # my-string-flag-key: "value-1"
73
+ # my-boolean-flag-key: true
74
+ # my-integer-flag-key: 1
75
+ #
76
+ # It is also possible to specify both "flags" and "flagValues", if you want some flags
77
+ # to have simple values and others to have complex behavior. However, it is an error to use the
78
+ # same flag key or segment key more than once, either in a single file or across multiple files.
79
+ #
80
+ # If the data source encounters any error in any file-- malformed content, a missing file, or a
81
+ # duplicate key-- it will not load flags from any of the files.
82
+ #
83
+ module FileData
84
+ #
85
+ # Returns a factory for the file data source component.
86
+ #
87
+ # @param options [Hash] the configuration options
88
+ # @option options [Array] :paths The paths of the source files for loading flag data. These
89
+ # may be absolute paths or relative to the current working directory.
90
+ # @option options [Boolean] :auto_update True if the data source should watch for changes to
91
+ # the source file(s) and reload flags whenever there is a change. Auto-updating will only
92
+ # work if all of the files you specified have valid directory paths at startup time.
93
+ # Note that the default implementation of this feature is based on polling the filesystem,
94
+ # which may not perform well. If you install the 'listen' gem (not included by default, to
95
+ # avoid adding unwanted dependencies to the SDK), its native file watching mechanism will be
96
+ # used instead. However, 'listen' will not be used in JRuby 9.1 due to a known instability.
97
+ # @option options [Float] :poll_interval The minimum interval, in seconds, between checks for
98
+ # file modifications - used only if auto_update is true, and if the native file-watching
99
+ # mechanism from 'listen' is not being used. The default value is 1 second.
100
+ # @return an object that can be stored in {Config#data_source}
101
+ #
102
+ def self.data_source(options={})
103
+ return lambda { |sdk_key, config|
104
+ Impl::Integrations::FileDataSourceImpl.new(config.feature_store, config.logger, options) }
105
+ end
106
+ end
107
+ end
108
+ end
@@ -2,6 +2,14 @@ require "ldclient-rb/redis_store" # eventually we will just refer to impl/integ
2
2
 
3
3
  module LaunchDarkly
4
4
  module Integrations
5
+ #
6
+ # Integration with [Redis](https://redis.io/).
7
+ #
8
+ # Note that in order to use this integration, you must first install the `redis` and `connection-pool`
9
+ # gems.
10
+ #
11
+ # @since 5.5.0
12
+ #
5
13
  module Redis
6
14
  #
7
15
  # Default value for the `redis_url` option for {new_feature_store}. This points to an instance of
@@ -53,6 +61,38 @@ module LaunchDarkly
53
61
  def self.new_feature_store(opts)
54
62
  return RedisFeatureStore.new(opts)
55
63
  end
64
+
65
+ #
66
+ # Creates a Redis-backed Big Segment store.
67
+ #
68
+ # Big Segments are a specific type of user segments. For more information, read the LaunchDarkly
69
+ # documentation: https://docs.launchdarkly.com/home/users/big-segments
70
+ #
71
+ # To use this method, you must first have the `redis` and `connection-pool` gems installed. Then,
72
+ # put the object returned by this method into the `store` property of your Big Segments configuration
73
+ # (see `Config`).
74
+ #
75
+ # @example Configuring Big Segments
76
+ # store = LaunchDarkly::Integrations::Redis::new_big_segment_store(redis_url: "redis://my-server")
77
+ # config = LaunchDarkly::Config.new(big_segments: LaunchDarkly::BigSegmentsConfig.new(store: store)
78
+ # client = LaunchDarkly::LDClient.new(my_sdk_key, config)
79
+ #
80
+ # @param opts [Hash] the configuration options (these are all the same as for `new_feature_store`,
81
+ # except that there are no caching parameters)
82
+ # @option opts [String] :redis_url (default_redis_url) URL of the Redis instance (shortcut for omitting `redis_opts`)
83
+ # @option opts [Hash] :redis_opts options to pass to the Redis constructor (if you want to specify more than just `redis_url`)
84
+ # @option opts [String] :prefix (default_prefix) namespace prefix to add to all hash keys used by LaunchDarkly
85
+ # @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger`
86
+ # @option opts [Integer] :max_connections size of the Redis connection pool
87
+ # @option opts [Object] :pool custom connection pool, if desired
88
+ # @option opts [Boolean] :pool_shutdown_on_close whether calling `close` should shutdown the custom connection pool;
89
+ # this is true by default, and should be set to false only if you are managing the pool yourself and want its
90
+ # lifecycle to be independent of the SDK client
91
+ # @return [LaunchDarkly::Interfaces::BigSegmentStore] a Big Segment store object
92
+ #
93
+ def self.new_big_segment_store(opts)
94
+ return LaunchDarkly::Impl::Integrations::Redis::RedisBigSegmentStore.new(opts)
95
+ end
56
96
  end
57
97
  end
58
98
  end