featureflip 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/lib/featureflip/client.rb +247 -0
- data/lib/featureflip/config.rb +66 -0
- data/lib/featureflip/data_source/polling.rb +44 -0
- data/lib/featureflip/data_source/streaming.rb +121 -0
- data/lib/featureflip/errors.rb +5 -0
- data/lib/featureflip/evaluation/bucketing.rb +14 -0
- data/lib/featureflip/evaluation/condition_evaluator.rb +88 -0
- data/lib/featureflip/evaluation/evaluator.rb +89 -0
- data/lib/featureflip/events/event.rb +5 -0
- data/lib/featureflip/events/event_processor.rb +63 -0
- data/lib/featureflip/http/client.rb +141 -0
- data/lib/featureflip/models/evaluation_detail.rb +9 -0
- data/lib/featureflip/models/flag.rb +38 -0
- data/lib/featureflip/models/segment.rb +5 -0
- data/lib/featureflip/store/flag_store.rb +44 -0
- data/lib/featureflip/version.rb +3 -0
- data/lib/featureflip.rb +92 -0
- metadata +103 -0
checksums.yaml
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
---
|
|
2
|
+
SHA256:
|
|
3
|
+
metadata.gz: 193c7bdc8f5d87d2bd2705848bbe245c3defb24f7e00328166f5add6fa251f4c
|
|
4
|
+
data.tar.gz: 9f33416673fde7b3621dc8cbcf06dc605a773c15ce49a2ed6df4fff6f2bec7a7
|
|
5
|
+
SHA512:
|
|
6
|
+
metadata.gz: 54f640cbb297fd817c0475fce44baae2603da0216c456c7fc76323c7ee88e86588f9763d42a18c8ec8e17eab734095e229165d5d9cd997693f420bf95f4ad30e
|
|
7
|
+
data.tar.gz: f472d47b4c48cbb546b64db5a3dff43cdd1251a97dfb37f559522bc0ff0f820a4f52d7188eed949e4f38f48010492e2b9ccc2a376f1fe8ad693adc7bf4855185
|
|
@@ -0,0 +1,247 @@
|
|
|
1
|
+
require "timeout"
|
|
2
|
+
|
|
3
|
+
module Featureflip
|
|
4
|
+
class Client
|
|
5
|
+
attr_reader :initialized
|
|
6
|
+
alias_method :initialized?, :initialized
|
|
7
|
+
|
|
8
|
+
def initialize(sdk_key: nil, config: nil)
|
|
9
|
+
@sdk_key = sdk_key || ENV["FEATUREFLIP_SDK_KEY"]
|
|
10
|
+
raise ConfigurationError, "SDK key is required. Pass sdk_key parameter or set FEATUREFLIP_SDK_KEY env var." unless @sdk_key
|
|
11
|
+
|
|
12
|
+
@config = config || Config.new
|
|
13
|
+
@store = Store::FlagStore.new
|
|
14
|
+
@evaluator = Evaluation::Evaluator.new
|
|
15
|
+
@initialized = false
|
|
16
|
+
@closed = false
|
|
17
|
+
@test_mode = false
|
|
18
|
+
@test_values = {}
|
|
19
|
+
@http_client = nil
|
|
20
|
+
@streaming_handler = nil
|
|
21
|
+
@polling_handler = nil
|
|
22
|
+
@event_processor = nil
|
|
23
|
+
|
|
24
|
+
initialize!
|
|
25
|
+
end
|
|
26
|
+
|
|
27
|
+
def bool_variation(key, context, default_value)
|
|
28
|
+
evaluate_flag(key, context, default_value)
|
|
29
|
+
end
|
|
30
|
+
|
|
31
|
+
def string_variation(key, context, default_value)
|
|
32
|
+
evaluate_flag(key, context, default_value)
|
|
33
|
+
end
|
|
34
|
+
|
|
35
|
+
def number_variation(key, context, default_value)
|
|
36
|
+
evaluate_flag(key, context, default_value)
|
|
37
|
+
end
|
|
38
|
+
|
|
39
|
+
def json_variation(key, context, default_value)
|
|
40
|
+
evaluate_flag(key, context, default_value)
|
|
41
|
+
end
|
|
42
|
+
|
|
43
|
+
def variation_detail(key, context, default_value)
|
|
44
|
+
context = normalize_context(context)
|
|
45
|
+
|
|
46
|
+
if @test_mode
|
|
47
|
+
value = @test_values.fetch(key, default_value)
|
|
48
|
+
reason = @test_values.key?(key) ? "Fallthrough" : "FlagNotFound"
|
|
49
|
+
return Models::EvaluationDetail.new(value: value, reason: reason)
|
|
50
|
+
end
|
|
51
|
+
|
|
52
|
+
flag = @store.get_flag(key)
|
|
53
|
+
unless flag
|
|
54
|
+
record_evaluation(key, context, nil)
|
|
55
|
+
return Models::EvaluationDetail.new(value: default_value, reason: "FlagNotFound")
|
|
56
|
+
end
|
|
57
|
+
|
|
58
|
+
result = @evaluator.evaluate(flag, context, get_segment: method(:get_segment))
|
|
59
|
+
value = result.value.nil? ? default_value : result.value
|
|
60
|
+
record_evaluation(key, context, result.variation_key)
|
|
61
|
+
|
|
62
|
+
Models::EvaluationDetail.new(
|
|
63
|
+
value: value,
|
|
64
|
+
reason: result.reason,
|
|
65
|
+
rule_id: result.rule_id,
|
|
66
|
+
variation_key: result.variation_key
|
|
67
|
+
)
|
|
68
|
+
rescue StandardError
|
|
69
|
+
Models::EvaluationDetail.new(value: default_value, reason: "Error")
|
|
70
|
+
end
|
|
71
|
+
|
|
72
|
+
def track(event_key, context, metadata = nil)
|
|
73
|
+
return unless @event_processor
|
|
74
|
+
|
|
75
|
+
context = normalize_context(context)
|
|
76
|
+
@event_processor.queue_event({
|
|
77
|
+
type: "Custom",
|
|
78
|
+
flagKey: event_key,
|
|
79
|
+
userId: context["user_id"]&.to_s,
|
|
80
|
+
metadata: metadata || {},
|
|
81
|
+
timestamp: Time.now.utc.iso8601
|
|
82
|
+
})
|
|
83
|
+
end
|
|
84
|
+
|
|
85
|
+
def identify(context)
|
|
86
|
+
return unless @event_processor
|
|
87
|
+
|
|
88
|
+
context = normalize_context(context)
|
|
89
|
+
@event_processor.queue_event({
|
|
90
|
+
type: "Identify",
|
|
91
|
+
flagKey: "$identify",
|
|
92
|
+
userId: context["user_id"]&.to_s,
|
|
93
|
+
timestamp: Time.now.utc.iso8601
|
|
94
|
+
})
|
|
95
|
+
end
|
|
96
|
+
|
|
97
|
+
def flush
|
|
98
|
+
@event_processor&.flush
|
|
99
|
+
end
|
|
100
|
+
|
|
101
|
+
def close
|
|
102
|
+
@closed = true
|
|
103
|
+
@streaming_handler&.stop
|
|
104
|
+
@streaming_handler = nil
|
|
105
|
+
@polling_handler&.stop
|
|
106
|
+
@polling_handler = nil
|
|
107
|
+
@event_processor&.stop
|
|
108
|
+
@event_processor = nil
|
|
109
|
+
end
|
|
110
|
+
|
|
111
|
+
def restart
|
|
112
|
+
return if @closed
|
|
113
|
+
|
|
114
|
+
@streaming_handler&.stop
|
|
115
|
+
@polling_handler&.stop
|
|
116
|
+
@event_processor&.stop
|
|
117
|
+
|
|
118
|
+
if @config.streaming
|
|
119
|
+
start_streaming
|
|
120
|
+
else
|
|
121
|
+
start_polling
|
|
122
|
+
end
|
|
123
|
+
start_event_processor if @config.send_events
|
|
124
|
+
end
|
|
125
|
+
|
|
126
|
+
def self.for_testing(flags)
|
|
127
|
+
instance = allocate
|
|
128
|
+
instance.instance_variable_set(:@sdk_key, "test-key")
|
|
129
|
+
instance.instance_variable_set(:@config, Config.new)
|
|
130
|
+
instance.instance_variable_set(:@store, Store::FlagStore.new)
|
|
131
|
+
instance.instance_variable_set(:@evaluator, Evaluation::Evaluator.new)
|
|
132
|
+
instance.instance_variable_set(:@initialized, true)
|
|
133
|
+
instance.instance_variable_set(:@closed, false)
|
|
134
|
+
instance.instance_variable_set(:@test_mode, true)
|
|
135
|
+
instance.instance_variable_set(:@test_values, flags.dup)
|
|
136
|
+
instance.instance_variable_set(:@http_client, nil)
|
|
137
|
+
instance.instance_variable_set(:@streaming_handler, nil)
|
|
138
|
+
instance.instance_variable_set(:@polling_handler, nil)
|
|
139
|
+
instance.instance_variable_set(:@event_processor, nil)
|
|
140
|
+
instance
|
|
141
|
+
end
|
|
142
|
+
|
|
143
|
+
private
|
|
144
|
+
|
|
145
|
+
def initialize!
|
|
146
|
+
@http_client = Http::Client.new(@sdk_key, @config)
|
|
147
|
+
fetch_initial_flags
|
|
148
|
+
start_data_source
|
|
149
|
+
start_event_processor if @config.send_events
|
|
150
|
+
end
|
|
151
|
+
|
|
152
|
+
def fetch_initial_flags
|
|
153
|
+
Timeout.timeout(@config.init_timeout) do
|
|
154
|
+
flags, segments = @http_client.get_flags
|
|
155
|
+
@store.init(flags, segments)
|
|
156
|
+
@initialized = true
|
|
157
|
+
end
|
|
158
|
+
rescue Timeout::Error
|
|
159
|
+
raise InitializationError, "Initialization timed out after #{@config.init_timeout}s"
|
|
160
|
+
rescue InitializationError
|
|
161
|
+
raise
|
|
162
|
+
rescue StandardError => e
|
|
163
|
+
raise InitializationError, "Failed to initialize: #{e.message}"
|
|
164
|
+
end
|
|
165
|
+
|
|
166
|
+
def start_data_source
|
|
167
|
+
return if @closed
|
|
168
|
+
|
|
169
|
+
if @config.streaming
|
|
170
|
+
start_streaming
|
|
171
|
+
else
|
|
172
|
+
start_polling
|
|
173
|
+
end
|
|
174
|
+
end
|
|
175
|
+
|
|
176
|
+
def start_streaming
|
|
177
|
+
@streaming_handler = DataSource::StreamingHandler.new(
|
|
178
|
+
sdk_key: @sdk_key,
|
|
179
|
+
config: @config,
|
|
180
|
+
http_client: @http_client,
|
|
181
|
+
on_flag_updated: ->(flag) { @store.upsert(flag) },
|
|
182
|
+
on_flag_deleted: ->(key) { @store.remove_flag(key) },
|
|
183
|
+
on_segment_updated: ->(flags, segments) { @store.init(flags, segments) },
|
|
184
|
+
on_error: ->(_err) { },
|
|
185
|
+
on_give_up: -> { fallback_to_polling }
|
|
186
|
+
)
|
|
187
|
+
@streaming_handler.start
|
|
188
|
+
end
|
|
189
|
+
|
|
190
|
+
def fallback_to_polling
|
|
191
|
+
@config.logger&.warn("Featureflip: streaming retries exhausted, falling back to polling")
|
|
192
|
+
@streaming_handler = nil
|
|
193
|
+
start_polling
|
|
194
|
+
end
|
|
195
|
+
|
|
196
|
+
def start_polling
|
|
197
|
+
@polling_handler = DataSource::PollingHandler.new(
|
|
198
|
+
http_client: @http_client,
|
|
199
|
+
config: @config,
|
|
200
|
+
on_update: ->(flags, segments) { @store.init(flags, segments) },
|
|
201
|
+
on_error: ->(_err) { }
|
|
202
|
+
)
|
|
203
|
+
@polling_handler.start
|
|
204
|
+
end
|
|
205
|
+
|
|
206
|
+
def start_event_processor
|
|
207
|
+
@event_processor = Events::EventProcessor.new(
|
|
208
|
+
@http_client,
|
|
209
|
+
flush_interval: @config.flush_interval,
|
|
210
|
+
flush_batch_size: @config.flush_batch_size
|
|
211
|
+
)
|
|
212
|
+
@event_processor.start
|
|
213
|
+
end
|
|
214
|
+
|
|
215
|
+
def evaluate_flag(key, context, default_value)
|
|
216
|
+
if @test_mode
|
|
217
|
+
return @test_values.fetch(key, default_value)
|
|
218
|
+
end
|
|
219
|
+
|
|
220
|
+
detail = variation_detail(key, context, default_value)
|
|
221
|
+
detail.value
|
|
222
|
+
rescue StandardError
|
|
223
|
+
default_value
|
|
224
|
+
end
|
|
225
|
+
|
|
226
|
+
def get_segment(key)
|
|
227
|
+
@store.get_segment(key)
|
|
228
|
+
end
|
|
229
|
+
|
|
230
|
+
def normalize_context(context)
|
|
231
|
+
return {} if context.nil?
|
|
232
|
+
context.transform_keys(&:to_s)
|
|
233
|
+
end
|
|
234
|
+
|
|
235
|
+
def record_evaluation(key, context, variation_key)
|
|
236
|
+
return unless @event_processor
|
|
237
|
+
|
|
238
|
+
@event_processor.queue_event({
|
|
239
|
+
type: "Evaluation",
|
|
240
|
+
flagKey: key,
|
|
241
|
+
userId: context["user_id"]&.to_s,
|
|
242
|
+
variation: variation_key,
|
|
243
|
+
timestamp: Time.now.utc.iso8601
|
|
244
|
+
})
|
|
245
|
+
end
|
|
246
|
+
end
|
|
247
|
+
end
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
module Featureflip
|
|
2
|
+
class Config
|
|
3
|
+
attr_accessor :sdk_key, :base_url, :streaming, :poll_interval, :flush_interval,
|
|
4
|
+
:flush_batch_size, :init_timeout, :connect_timeout, :read_timeout,
|
|
5
|
+
:max_stream_retries, :send_events, :logger
|
|
6
|
+
|
|
7
|
+
def initialize(
|
|
8
|
+
sdk_key: nil,
|
|
9
|
+
base_url: "https://eval.featureflip.io",
|
|
10
|
+
streaming: true,
|
|
11
|
+
poll_interval: 30,
|
|
12
|
+
flush_interval: 30,
|
|
13
|
+
flush_batch_size: 100,
|
|
14
|
+
init_timeout: 10,
|
|
15
|
+
connect_timeout: 5,
|
|
16
|
+
read_timeout: 10,
|
|
17
|
+
max_stream_retries: 5,
|
|
18
|
+
send_events: true,
|
|
19
|
+
logger: nil
|
|
20
|
+
)
|
|
21
|
+
@sdk_key = sdk_key
|
|
22
|
+
@base_url = base_url
|
|
23
|
+
@streaming = streaming
|
|
24
|
+
@poll_interval = poll_interval
|
|
25
|
+
@flush_interval = flush_interval
|
|
26
|
+
@flush_batch_size = flush_batch_size
|
|
27
|
+
@init_timeout = init_timeout
|
|
28
|
+
@connect_timeout = connect_timeout
|
|
29
|
+
@read_timeout = read_timeout
|
|
30
|
+
@max_stream_retries = max_stream_retries
|
|
31
|
+
@send_events = send_events
|
|
32
|
+
@logger = logger || default_logger
|
|
33
|
+
|
|
34
|
+
validate!
|
|
35
|
+
end
|
|
36
|
+
|
|
37
|
+
def validate!
|
|
38
|
+
finalize!
|
|
39
|
+
validate_positive_fields!
|
|
40
|
+
end
|
|
41
|
+
|
|
42
|
+
private
|
|
43
|
+
|
|
44
|
+
def finalize!
|
|
45
|
+
@base_url = @base_url.to_s.gsub(%r{/+$}, "")
|
|
46
|
+
end
|
|
47
|
+
|
|
48
|
+
def validate_positive_fields!
|
|
49
|
+
%i[poll_interval flush_interval flush_batch_size init_timeout connect_timeout read_timeout].each do |field|
|
|
50
|
+
value = send(field)
|
|
51
|
+
if value <= 0
|
|
52
|
+
raise ConfigurationError, "#{field} must be positive, got #{value}"
|
|
53
|
+
end
|
|
54
|
+
end
|
|
55
|
+
end
|
|
56
|
+
|
|
57
|
+
def default_logger
|
|
58
|
+
if defined?(Rails) && Rails.respond_to?(:logger)
|
|
59
|
+
Rails.logger
|
|
60
|
+
else
|
|
61
|
+
require "logger"
|
|
62
|
+
::Logger.new($stdout, level: ::Logger::INFO)
|
|
63
|
+
end
|
|
64
|
+
end
|
|
65
|
+
end
|
|
66
|
+
end
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
module Featureflip
|
|
2
|
+
module DataSource
|
|
3
|
+
class PollingHandler
|
|
4
|
+
def initialize(http_client:, config:, on_update:, on_error:)
|
|
5
|
+
@http_client = http_client
|
|
6
|
+
@config = config
|
|
7
|
+
@on_update = on_update
|
|
8
|
+
@on_error = on_error
|
|
9
|
+
@stop_flag = false
|
|
10
|
+
@thread = nil
|
|
11
|
+
end
|
|
12
|
+
|
|
13
|
+
def start
|
|
14
|
+
@stop_flag = false
|
|
15
|
+
@thread = Thread.new { run }
|
|
16
|
+
end
|
|
17
|
+
|
|
18
|
+
def stop
|
|
19
|
+
@stop_flag = true
|
|
20
|
+
@thread&.wakeup rescue nil
|
|
21
|
+
@thread&.join(5)
|
|
22
|
+
@thread = nil
|
|
23
|
+
end
|
|
24
|
+
|
|
25
|
+
private
|
|
26
|
+
|
|
27
|
+
def run
|
|
28
|
+
until @stop_flag
|
|
29
|
+
begin
|
|
30
|
+
flags, segments = @http_client.get_flags
|
|
31
|
+
@on_update.call(flags, segments)
|
|
32
|
+
rescue StandardError => e
|
|
33
|
+
@on_error.call(e)
|
|
34
|
+
end
|
|
35
|
+
elapsed = 0
|
|
36
|
+
while elapsed < @config.poll_interval && !@stop_flag
|
|
37
|
+
sleep(1)
|
|
38
|
+
elapsed += 1
|
|
39
|
+
end
|
|
40
|
+
end
|
|
41
|
+
end
|
|
42
|
+
end
|
|
43
|
+
end
|
|
44
|
+
end
|
|
@@ -0,0 +1,121 @@
|
|
|
1
|
+
require "net/http"
|
|
2
|
+
require "uri"
|
|
3
|
+
require "json"
|
|
4
|
+
|
|
5
|
+
module Featureflip
|
|
6
|
+
module DataSource
|
|
7
|
+
class StreamingHandler
|
|
8
|
+
def initialize(sdk_key:, config:, http_client:, on_flag_updated:, on_flag_deleted:, on_segment_updated:, on_error:, on_give_up: nil)
|
|
9
|
+
@sdk_key = sdk_key
|
|
10
|
+
@config = config
|
|
11
|
+
@http_client = http_client
|
|
12
|
+
@on_flag_updated = on_flag_updated
|
|
13
|
+
@on_flag_deleted = on_flag_deleted
|
|
14
|
+
@on_segment_updated = on_segment_updated
|
|
15
|
+
@on_error = on_error
|
|
16
|
+
@on_give_up = on_give_up
|
|
17
|
+
@stop_flag = false
|
|
18
|
+
@thread = nil
|
|
19
|
+
@retry_count = 0
|
|
20
|
+
@current_event_type = nil
|
|
21
|
+
@current_data = nil
|
|
22
|
+
end
|
|
23
|
+
|
|
24
|
+
def start
|
|
25
|
+
@stop_flag = false
|
|
26
|
+
@retry_count = 0
|
|
27
|
+
@thread = Thread.new { run }
|
|
28
|
+
end
|
|
29
|
+
|
|
30
|
+
def stop
|
|
31
|
+
@stop_flag = true
|
|
32
|
+
@thread&.wakeup rescue nil
|
|
33
|
+
@thread&.join(5)
|
|
34
|
+
@thread = nil
|
|
35
|
+
end
|
|
36
|
+
|
|
37
|
+
private
|
|
38
|
+
|
|
39
|
+
def run
|
|
40
|
+
until @stop_flag
|
|
41
|
+
begin
|
|
42
|
+
connect
|
|
43
|
+
rescue StandardError => e
|
|
44
|
+
break if @stop_flag
|
|
45
|
+
@on_error.call(e)
|
|
46
|
+
@retry_count += 1
|
|
47
|
+
if @retry_count > @config.max_stream_retries
|
|
48
|
+
@on_give_up&.call
|
|
49
|
+
break
|
|
50
|
+
end
|
|
51
|
+
delay = [2**(@retry_count - 1), 30].min
|
|
52
|
+
sleep(delay)
|
|
53
|
+
end
|
|
54
|
+
end
|
|
55
|
+
end
|
|
56
|
+
|
|
57
|
+
def connect
|
|
58
|
+
uri = URI("#{@config.base_url}/v1/sdk/stream")
|
|
59
|
+
http = Net::HTTP.new(uri.host, uri.port)
|
|
60
|
+
http.use_ssl = uri.scheme == "https"
|
|
61
|
+
http.open_timeout = @config.connect_timeout
|
|
62
|
+
http.read_timeout = 300 # 5 min — detect silent TCP drops
|
|
63
|
+
|
|
64
|
+
req = Net::HTTP::Get.new(uri.request_uri)
|
|
65
|
+
req["Authorization"] = @sdk_key
|
|
66
|
+
req["Accept"] = "text/event-stream"
|
|
67
|
+
req["User-Agent"] = "featureflip-ruby/#{Featureflip::VERSION}"
|
|
68
|
+
|
|
69
|
+
http.request(req) do |response|
|
|
70
|
+
unless response.is_a?(Net::HTTPSuccess)
|
|
71
|
+
raise Featureflip::Error, "SSE connection failed: #{response.code}"
|
|
72
|
+
end
|
|
73
|
+
|
|
74
|
+
@retry_count = 0
|
|
75
|
+
@current_event_type = nil
|
|
76
|
+
@current_data = nil
|
|
77
|
+
|
|
78
|
+
response.read_body do |chunk|
|
|
79
|
+
break if @stop_flag
|
|
80
|
+
chunk.each_line do |line|
|
|
81
|
+
process_sse_line(line.strip)
|
|
82
|
+
end
|
|
83
|
+
end
|
|
84
|
+
end
|
|
85
|
+
end
|
|
86
|
+
|
|
87
|
+
def process_sse_line(line)
|
|
88
|
+
if line.start_with?("event: ")
|
|
89
|
+
@current_event_type = line[7..]
|
|
90
|
+
elsif line.start_with?("data: ")
|
|
91
|
+
@current_data = line[6..]
|
|
92
|
+
elsif line.empty? && @current_event_type && @current_data
|
|
93
|
+
handle_event(@current_event_type, @current_data)
|
|
94
|
+
@current_event_type = nil
|
|
95
|
+
@current_data = nil
|
|
96
|
+
end
|
|
97
|
+
end
|
|
98
|
+
|
|
99
|
+
def handle_event(event_type, data)
|
|
100
|
+
case event_type
|
|
101
|
+
when "flag.created", "flag.updated"
|
|
102
|
+
payload = JSON.parse(data)
|
|
103
|
+
key = payload["key"]
|
|
104
|
+
return if key.nil? || key.empty?
|
|
105
|
+
flag = @http_client.get_flag(key)
|
|
106
|
+
@on_flag_updated.call(flag)
|
|
107
|
+
when "flag.deleted"
|
|
108
|
+
payload = JSON.parse(data)
|
|
109
|
+
key = payload["key"]
|
|
110
|
+
return if key.nil? || key.empty?
|
|
111
|
+
@on_flag_deleted.call(key)
|
|
112
|
+
when "segment.updated"
|
|
113
|
+
flags, segments = @http_client.get_flags
|
|
114
|
+
@on_segment_updated.call(flags, segments)
|
|
115
|
+
end
|
|
116
|
+
rescue StandardError
|
|
117
|
+
# Swallow event processing errors
|
|
118
|
+
end
|
|
119
|
+
end
|
|
120
|
+
end
|
|
121
|
+
end
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
require "digest/md5"
|
|
2
|
+
|
|
3
|
+
module Featureflip
|
|
4
|
+
module Evaluation
|
|
5
|
+
module Bucketing
|
|
6
|
+
def self.compute_bucket(salt, value)
|
|
7
|
+
input = "#{salt}:#{value}"
|
|
8
|
+
hash_bytes = Digest::MD5.digest(input)
|
|
9
|
+
hash_int = hash_bytes[0, 4].unpack1("V")
|
|
10
|
+
hash_int % 100
|
|
11
|
+
end
|
|
12
|
+
end
|
|
13
|
+
end
|
|
14
|
+
end
|
|
@@ -0,0 +1,88 @@
|
|
|
1
|
+
module Featureflip
|
|
2
|
+
module Evaluation
|
|
3
|
+
class ConditionEvaluator
|
|
4
|
+
def evaluate_condition(condition, context)
|
|
5
|
+
attr_value = context[condition.attribute]
|
|
6
|
+
|
|
7
|
+
return condition.negate if attr_value.nil?
|
|
8
|
+
|
|
9
|
+
str_value = attr_value.to_s.downcase
|
|
10
|
+
targets = condition.values.map { |v| v.to_s.downcase }
|
|
11
|
+
|
|
12
|
+
result = evaluate_operator(condition.operator, str_value, targets)
|
|
13
|
+
condition.negate ? !result : result
|
|
14
|
+
end
|
|
15
|
+
|
|
16
|
+
def evaluate_conditions(conditions, logic, context)
|
|
17
|
+
return true if conditions.empty?
|
|
18
|
+
|
|
19
|
+
if logic == "And"
|
|
20
|
+
conditions.all? { |c| evaluate_condition(c, context) }
|
|
21
|
+
else
|
|
22
|
+
conditions.any? { |c| evaluate_condition(c, context) }
|
|
23
|
+
end
|
|
24
|
+
end
|
|
25
|
+
|
|
26
|
+
def evaluate_condition_groups(condition_groups, context)
|
|
27
|
+
return true if condition_groups.nil? || condition_groups.empty?
|
|
28
|
+
|
|
29
|
+
condition_groups.all? do |group|
|
|
30
|
+
evaluate_conditions(group.conditions, group.operator, context)
|
|
31
|
+
end
|
|
32
|
+
end
|
|
33
|
+
|
|
34
|
+
private
|
|
35
|
+
|
|
36
|
+
def evaluate_operator(operator, value, targets)
|
|
37
|
+
case operator
|
|
38
|
+
when "Equals"
|
|
39
|
+
targets.any? { |t| value == t }
|
|
40
|
+
when "NotEquals"
|
|
41
|
+
targets.all? { |t| value != t }
|
|
42
|
+
when "Contains"
|
|
43
|
+
targets.any? { |t| value.include?(t) }
|
|
44
|
+
when "NotContains"
|
|
45
|
+
targets.all? { |t| !value.include?(t) }
|
|
46
|
+
when "StartsWith"
|
|
47
|
+
targets.any? { |t| value.start_with?(t) }
|
|
48
|
+
when "EndsWith"
|
|
49
|
+
targets.any? { |t| value.end_with?(t) }
|
|
50
|
+
when "In"
|
|
51
|
+
targets.include?(value)
|
|
52
|
+
when "NotIn"
|
|
53
|
+
!targets.include?(value)
|
|
54
|
+
when "MatchesRegex"
|
|
55
|
+
targets.any? do |t|
|
|
56
|
+
Regexp.new(t, Regexp::IGNORECASE).match?(value)
|
|
57
|
+
rescue RegexpError
|
|
58
|
+
false
|
|
59
|
+
end
|
|
60
|
+
when "GreaterThan"
|
|
61
|
+
compare_numeric(value, targets[0], :>)
|
|
62
|
+
when "GreaterThanOrEqual"
|
|
63
|
+
compare_numeric(value, targets[0], :>=)
|
|
64
|
+
when "LessThan"
|
|
65
|
+
compare_numeric(value, targets[0], :<)
|
|
66
|
+
when "LessThanOrEqual"
|
|
67
|
+
compare_numeric(value, targets[0], :<=)
|
|
68
|
+
when "Before"
|
|
69
|
+
return false if targets.empty?
|
|
70
|
+
value < targets[0]
|
|
71
|
+
when "After"
|
|
72
|
+
return false if targets.empty?
|
|
73
|
+
value > targets[0]
|
|
74
|
+
else
|
|
75
|
+
false
|
|
76
|
+
end
|
|
77
|
+
end
|
|
78
|
+
|
|
79
|
+
def compare_numeric(value, target, op)
|
|
80
|
+
val = Float(value)
|
|
81
|
+
tgt = Float(target)
|
|
82
|
+
val.send(op, tgt)
|
|
83
|
+
rescue ArgumentError, TypeError
|
|
84
|
+
false
|
|
85
|
+
end
|
|
86
|
+
end
|
|
87
|
+
end
|
|
88
|
+
end
|
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
require_relative "condition_evaluator"
|
|
2
|
+
require_relative "bucketing"
|
|
3
|
+
|
|
4
|
+
module Featureflip
|
|
5
|
+
module Evaluation
|
|
6
|
+
class Evaluator
|
|
7
|
+
def initialize
|
|
8
|
+
@condition_evaluator = ConditionEvaluator.new
|
|
9
|
+
end
|
|
10
|
+
|
|
11
|
+
def evaluate(flag, context, get_segment: nil)
|
|
12
|
+
unless flag.enabled
|
|
13
|
+
variation = flag.get_variation(flag.off_variation)
|
|
14
|
+
return Models::EvaluationDetail.new(
|
|
15
|
+
value: variation&.value,
|
|
16
|
+
reason: "FlagDisabled",
|
|
17
|
+
variation_key: flag.off_variation
|
|
18
|
+
)
|
|
19
|
+
end
|
|
20
|
+
|
|
21
|
+
sorted_rules = flag.rules.sort_by(&:priority)
|
|
22
|
+
sorted_rules.each do |rule|
|
|
23
|
+
conditions_match = if rule.segment_key && get_segment
|
|
24
|
+
segment = get_segment.call(rule.segment_key)
|
|
25
|
+
if segment
|
|
26
|
+
@condition_evaluator.evaluate_conditions(
|
|
27
|
+
segment.conditions, segment.condition_logic, context
|
|
28
|
+
)
|
|
29
|
+
else
|
|
30
|
+
false
|
|
31
|
+
end
|
|
32
|
+
else
|
|
33
|
+
@condition_evaluator.evaluate_condition_groups(
|
|
34
|
+
rule.condition_groups, context
|
|
35
|
+
)
|
|
36
|
+
end
|
|
37
|
+
|
|
38
|
+
if conditions_match
|
|
39
|
+
variation_key = resolve_serve(rule.serve, context)
|
|
40
|
+
variation = flag.get_variation(variation_key)
|
|
41
|
+
return Models::EvaluationDetail.new(
|
|
42
|
+
value: variation&.value,
|
|
43
|
+
reason: "RuleMatch",
|
|
44
|
+
rule_id: rule.id,
|
|
45
|
+
variation_key: variation_key
|
|
46
|
+
)
|
|
47
|
+
end
|
|
48
|
+
end
|
|
49
|
+
|
|
50
|
+
variation_key = resolve_serve(flag.fallthrough, context)
|
|
51
|
+
variation = flag.get_variation(variation_key)
|
|
52
|
+
Models::EvaluationDetail.new(
|
|
53
|
+
value: variation&.value,
|
|
54
|
+
reason: "Fallthrough",
|
|
55
|
+
variation_key: variation_key
|
|
56
|
+
)
|
|
57
|
+
end
|
|
58
|
+
|
|
59
|
+
private
|
|
60
|
+
|
|
61
|
+
def resolve_serve(serve, context)
|
|
62
|
+
if serve.type == "Fixed"
|
|
63
|
+
serve.variation || ""
|
|
64
|
+
else
|
|
65
|
+
resolve_rollout(serve, context)
|
|
66
|
+
end
|
|
67
|
+
end
|
|
68
|
+
|
|
69
|
+
def resolve_rollout(serve, context)
|
|
70
|
+
bucket_by = serve.bucket_by || "userId"
|
|
71
|
+
bucket_value = context[bucket_by]
|
|
72
|
+
# Alias "userId" <-> "user_id" for the built-in user identifier
|
|
73
|
+
bucket_value = context["user_id"] if bucket_value.nil? && bucket_by == "userId"
|
|
74
|
+
bucket_value = context["userId"] if bucket_value.nil? && bucket_by == "user_id"
|
|
75
|
+
bucket_value_str = bucket_value.nil? ? "" : bucket_value.to_s
|
|
76
|
+
|
|
77
|
+
bucket = Bucketing.compute_bucket(serve.salt || "", bucket_value_str)
|
|
78
|
+
|
|
79
|
+
cumulative = 0
|
|
80
|
+
(serve.variations || []).each do |wv|
|
|
81
|
+
cumulative += wv.weight
|
|
82
|
+
return wv.key if bucket < cumulative
|
|
83
|
+
end
|
|
84
|
+
|
|
85
|
+
serve.variations&.last&.key || ""
|
|
86
|
+
end
|
|
87
|
+
end
|
|
88
|
+
end
|
|
89
|
+
end
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
module Featureflip
|
|
2
|
+
module Events
|
|
3
|
+
class EventProcessor
|
|
4
|
+
def initialize(http_client, flush_interval: 30, flush_batch_size: 100)
|
|
5
|
+
@http_client = http_client
|
|
6
|
+
@flush_interval = flush_interval
|
|
7
|
+
@flush_batch_size = flush_batch_size
|
|
8
|
+
@queue = []
|
|
9
|
+
@mutex = Mutex.new
|
|
10
|
+
@stop_flag = false
|
|
11
|
+
@thread = nil
|
|
12
|
+
end
|
|
13
|
+
|
|
14
|
+
def queue_event(event)
|
|
15
|
+
should_flush = false
|
|
16
|
+
@mutex.synchronize do
|
|
17
|
+
@queue << event
|
|
18
|
+
should_flush = @queue.length >= @flush_batch_size
|
|
19
|
+
end
|
|
20
|
+
flush if should_flush
|
|
21
|
+
end
|
|
22
|
+
|
|
23
|
+
def flush
|
|
24
|
+
events_to_send = nil
|
|
25
|
+
@mutex.synchronize do
|
|
26
|
+
return if @queue.empty?
|
|
27
|
+
events_to_send = @queue.dup
|
|
28
|
+
@queue.clear
|
|
29
|
+
end
|
|
30
|
+
|
|
31
|
+
return unless events_to_send&.any?
|
|
32
|
+
|
|
33
|
+
@http_client.post_events(events_to_send)
|
|
34
|
+
rescue StandardError
|
|
35
|
+
# Events are best-effort — drop on failure
|
|
36
|
+
end
|
|
37
|
+
|
|
38
|
+
def start
|
|
39
|
+
@stop_flag = false
|
|
40
|
+
@thread = Thread.new do
|
|
41
|
+
elapsed = 0
|
|
42
|
+
until @stop_flag
|
|
43
|
+
sleep(1)
|
|
44
|
+
elapsed += 1
|
|
45
|
+
queue_size = @mutex.synchronize { @queue.length }
|
|
46
|
+
if elapsed >= @flush_interval || queue_size >= @flush_batch_size
|
|
47
|
+
elapsed = 0
|
|
48
|
+
flush unless @stop_flag
|
|
49
|
+
end
|
|
50
|
+
end
|
|
51
|
+
end
|
|
52
|
+
end
|
|
53
|
+
|
|
54
|
+
def stop
|
|
55
|
+
@stop_flag = true
|
|
56
|
+
@thread&.wakeup rescue nil
|
|
57
|
+
@thread&.join(5)
|
|
58
|
+
@thread = nil
|
|
59
|
+
flush
|
|
60
|
+
end
|
|
61
|
+
end
|
|
62
|
+
end
|
|
63
|
+
end
|
|
@@ -0,0 +1,141 @@
|
|
|
1
|
+
require "json"
|
|
2
|
+
require "net/http"
|
|
3
|
+
require "uri"
|
|
4
|
+
|
|
5
|
+
module Featureflip
|
|
6
|
+
module Http
|
|
7
|
+
class Client
|
|
8
|
+
def initialize(sdk_key, config)
|
|
9
|
+
@sdk_key = sdk_key
|
|
10
|
+
@config = config
|
|
11
|
+
@base_url = config.base_url
|
|
12
|
+
end
|
|
13
|
+
|
|
14
|
+
def get_flags
|
|
15
|
+
response = request(:get, "/v1/sdk/flags")
|
|
16
|
+
data = JSON.parse(response.body)
|
|
17
|
+
flags = (data["flags"] || []).map { |f| parse_flag(f) }
|
|
18
|
+
segments = (data["segments"] || []).map { |s| parse_segment(s) }
|
|
19
|
+
[flags, segments]
|
|
20
|
+
end
|
|
21
|
+
|
|
22
|
+
def get_flag(key)
|
|
23
|
+
response = request(:get, "/v1/sdk/flags/#{key}")
|
|
24
|
+
parse_flag(JSON.parse(response.body))
|
|
25
|
+
end
|
|
26
|
+
|
|
27
|
+
def post_events(events)
|
|
28
|
+
request(:post, "/v1/sdk/events", { events: events })
|
|
29
|
+
end
|
|
30
|
+
|
|
31
|
+
def close
|
|
32
|
+
# No persistent connection to close with net/http
|
|
33
|
+
end
|
|
34
|
+
|
|
35
|
+
private
|
|
36
|
+
|
|
37
|
+
def request(method, path, body = nil, retries: 1)
|
|
38
|
+
uri = URI("#{@base_url}#{path}")
|
|
39
|
+
http = Net::HTTP.new(uri.host, uri.port)
|
|
40
|
+
http.use_ssl = uri.scheme == "https"
|
|
41
|
+
http.open_timeout = @config.connect_timeout
|
|
42
|
+
http.read_timeout = @config.read_timeout
|
|
43
|
+
|
|
44
|
+
req = case method
|
|
45
|
+
when :get
|
|
46
|
+
Net::HTTP::Get.new(uri.request_uri)
|
|
47
|
+
when :post
|
|
48
|
+
r = Net::HTTP::Post.new(uri.request_uri)
|
|
49
|
+
r.body = JSON.generate(body)
|
|
50
|
+
r
|
|
51
|
+
end
|
|
52
|
+
|
|
53
|
+
req["Authorization"] = @sdk_key
|
|
54
|
+
req["Content-Type"] = "application/json"
|
|
55
|
+
req["User-Agent"] = "featureflip-ruby/#{Featureflip::VERSION}"
|
|
56
|
+
|
|
57
|
+
response = http.request(req)
|
|
58
|
+
|
|
59
|
+
if response.is_a?(Net::HTTPServerError) && retries > 0
|
|
60
|
+
sleep(1)
|
|
61
|
+
return request(method, path, body, retries: retries - 1)
|
|
62
|
+
end
|
|
63
|
+
|
|
64
|
+
unless response.is_a?(Net::HTTPSuccess)
|
|
65
|
+
raise Featureflip::Error, "HTTP #{response.code}: #{path}"
|
|
66
|
+
end
|
|
67
|
+
|
|
68
|
+
response
|
|
69
|
+
rescue IOError, Errno::ECONNREFUSED, Errno::ECONNRESET, Errno::ETIMEDOUT,
|
|
70
|
+
Net::OpenTimeout, Net::ReadTimeout => e
|
|
71
|
+
raise if retries <= 0
|
|
72
|
+
sleep(1)
|
|
73
|
+
request(method, path, body, retries: retries - 1)
|
|
74
|
+
end
|
|
75
|
+
|
|
76
|
+
def parse_flag(data)
|
|
77
|
+
Models::FlagConfiguration.new(
|
|
78
|
+
key: data["key"],
|
|
79
|
+
version: data["version"],
|
|
80
|
+
type: data["type"],
|
|
81
|
+
enabled: data["enabled"],
|
|
82
|
+
variations: (data["variations"] || []).map { |v| Models::Variation.new(key: v["key"], value: v["value"]) },
|
|
83
|
+
rules: (data["rules"] || []).map { |r| parse_rule(r) },
|
|
84
|
+
fallthrough: parse_serve(data["fallthrough"]),
|
|
85
|
+
off_variation: data["offVariation"]
|
|
86
|
+
)
|
|
87
|
+
end
|
|
88
|
+
|
|
89
|
+
def parse_rule(data)
|
|
90
|
+
condition_groups = (data["conditionGroups"] || []).map { |g| parse_condition_group(g) }
|
|
91
|
+
|
|
92
|
+
Models::TargetingRule.new(
|
|
93
|
+
id: data["id"],
|
|
94
|
+
priority: data["priority"],
|
|
95
|
+
condition_groups: condition_groups,
|
|
96
|
+
serve: parse_serve(data["serve"]),
|
|
97
|
+
segment_key: data["segmentKey"]
|
|
98
|
+
)
|
|
99
|
+
end
|
|
100
|
+
|
|
101
|
+
def parse_condition_group(data)
|
|
102
|
+
Models::ConditionGroup.new(
|
|
103
|
+
operator: data["operator"] || "And",
|
|
104
|
+
conditions: (data["conditions"] || []).map { |c| parse_condition(c) }
|
|
105
|
+
)
|
|
106
|
+
end
|
|
107
|
+
|
|
108
|
+
def parse_condition(data)
|
|
109
|
+
Models::Condition.new(
|
|
110
|
+
attribute: data["attribute"],
|
|
111
|
+
operator: data["operator"],
|
|
112
|
+
values: data["values"],
|
|
113
|
+
negate: data["negate"] || false
|
|
114
|
+
)
|
|
115
|
+
end
|
|
116
|
+
|
|
117
|
+
def parse_serve(data)
|
|
118
|
+
variations = if data["variations"]
|
|
119
|
+
data["variations"].map { |v| Models::WeightedVariation.new(key: v["key"], weight: v["weight"]) }
|
|
120
|
+
end
|
|
121
|
+
|
|
122
|
+
Models::ServeConfig.new(
|
|
123
|
+
type: data["type"],
|
|
124
|
+
variation: data["variation"],
|
|
125
|
+
bucket_by: data["bucketBy"],
|
|
126
|
+
salt: data["salt"],
|
|
127
|
+
variations: variations
|
|
128
|
+
)
|
|
129
|
+
end
|
|
130
|
+
|
|
131
|
+
def parse_segment(data)
|
|
132
|
+
Models::Segment.new(
|
|
133
|
+
key: data["key"],
|
|
134
|
+
version: data["version"],
|
|
135
|
+
conditions: (data["conditions"] || []).map { |c| parse_condition(c) },
|
|
136
|
+
condition_logic: data["conditionLogic"] || "And"
|
|
137
|
+
)
|
|
138
|
+
end
|
|
139
|
+
end
|
|
140
|
+
end
|
|
141
|
+
end
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
module Featureflip
|
|
2
|
+
module Models
|
|
3
|
+
Variation = Struct.new(:key, :value, keyword_init: true)
|
|
4
|
+
|
|
5
|
+
WeightedVariation = Struct.new(:key, :weight, keyword_init: true)
|
|
6
|
+
|
|
7
|
+
Condition = Struct.new(:attribute, :operator, :values, :negate, keyword_init: true) do
|
|
8
|
+
def initialize(attribute:, operator:, values:, negate: false)
|
|
9
|
+
super
|
|
10
|
+
end
|
|
11
|
+
end
|
|
12
|
+
|
|
13
|
+
ServeConfig = Struct.new(:type, :variation, :bucket_by, :salt, :variations, keyword_init: true) do
|
|
14
|
+
def initialize(type:, variation: nil, bucket_by: nil, salt: nil, variations: nil)
|
|
15
|
+
super
|
|
16
|
+
end
|
|
17
|
+
end
|
|
18
|
+
|
|
19
|
+
ConditionGroup = Struct.new(:operator, :conditions, keyword_init: true) do
|
|
20
|
+
def initialize(operator: "And", conditions: [])
|
|
21
|
+
super
|
|
22
|
+
end
|
|
23
|
+
end
|
|
24
|
+
|
|
25
|
+
TargetingRule = Struct.new(:id, :priority, :condition_groups, :serve, :segment_key, keyword_init: true) do
|
|
26
|
+
def initialize(id:, priority:, condition_groups:, serve:, segment_key: nil)
|
|
27
|
+
super
|
|
28
|
+
end
|
|
29
|
+
end
|
|
30
|
+
|
|
31
|
+
FlagConfiguration = Struct.new(:key, :version, :type, :enabled, :variations, :rules, :fallthrough, :off_variation, keyword_init: true) do
|
|
32
|
+
def get_variation(key)
|
|
33
|
+
@variations_by_key ||= variations.each_with_object({}) { |v, h| h[v.key] = v }
|
|
34
|
+
@variations_by_key[key]
|
|
35
|
+
end
|
|
36
|
+
end
|
|
37
|
+
end
|
|
38
|
+
end
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
module Featureflip
|
|
2
|
+
module Store
|
|
3
|
+
class FlagStore
|
|
4
|
+
def initialize
|
|
5
|
+
@flags = {}
|
|
6
|
+
@segments = {}
|
|
7
|
+
@mutex = Mutex.new
|
|
8
|
+
end
|
|
9
|
+
|
|
10
|
+
def init(flags, segments)
|
|
11
|
+
@mutex.synchronize do
|
|
12
|
+
@flags.clear
|
|
13
|
+
@segments.clear
|
|
14
|
+
flags.each { |f| @flags[f.key] = f }
|
|
15
|
+
segments.each { |s| @segments[s.key] = s }
|
|
16
|
+
end
|
|
17
|
+
end
|
|
18
|
+
|
|
19
|
+
def get_flag(key)
|
|
20
|
+
@mutex.synchronize { @flags[key] }
|
|
21
|
+
end
|
|
22
|
+
|
|
23
|
+
def get_segment(key)
|
|
24
|
+
@mutex.synchronize { @segments[key] }
|
|
25
|
+
end
|
|
26
|
+
|
|
27
|
+
def all_flags
|
|
28
|
+
@mutex.synchronize { @flags.values }
|
|
29
|
+
end
|
|
30
|
+
|
|
31
|
+
def upsert(flag)
|
|
32
|
+
@mutex.synchronize do
|
|
33
|
+
existing = @flags[flag.key]
|
|
34
|
+
return if existing && existing.version >= flag.version
|
|
35
|
+
@flags[flag.key] = flag
|
|
36
|
+
end
|
|
37
|
+
end
|
|
38
|
+
|
|
39
|
+
def remove_flag(key)
|
|
40
|
+
@mutex.synchronize { @flags.delete(key) }
|
|
41
|
+
end
|
|
42
|
+
end
|
|
43
|
+
end
|
|
44
|
+
end
|
data/lib/featureflip.rb
ADDED
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
require_relative "featureflip/version"
|
|
2
|
+
require_relative "featureflip/errors"
|
|
3
|
+
require_relative "featureflip/config"
|
|
4
|
+
require_relative "featureflip/models/flag"
|
|
5
|
+
require_relative "featureflip/models/segment"
|
|
6
|
+
require_relative "featureflip/models/evaluation_detail"
|
|
7
|
+
require_relative "featureflip/evaluation/bucketing"
|
|
8
|
+
require_relative "featureflip/evaluation/condition_evaluator"
|
|
9
|
+
require_relative "featureflip/evaluation/evaluator"
|
|
10
|
+
require_relative "featureflip/store/flag_store"
|
|
11
|
+
require_relative "featureflip/http/client"
|
|
12
|
+
require_relative "featureflip/events/event"
|
|
13
|
+
require_relative "featureflip/events/event_processor"
|
|
14
|
+
require_relative "featureflip/data_source/streaming"
|
|
15
|
+
require_relative "featureflip/data_source/polling"
|
|
16
|
+
require_relative "featureflip/client"
|
|
17
|
+
|
|
18
|
+
module Featureflip
|
|
19
|
+
@mutex = Mutex.new
|
|
20
|
+
|
|
21
|
+
class << self
|
|
22
|
+
attr_reader :default_client
|
|
23
|
+
|
|
24
|
+
def configure
|
|
25
|
+
@mutex.synchronize do
|
|
26
|
+
@config = Config.new
|
|
27
|
+
yield @config if block_given?
|
|
28
|
+
@config.validate!
|
|
29
|
+
@default_client = Client.new(sdk_key: @config.sdk_key, config: @config)
|
|
30
|
+
end
|
|
31
|
+
end
|
|
32
|
+
|
|
33
|
+
def bool_variation(key, context, default_value)
|
|
34
|
+
ensure_configured!
|
|
35
|
+
@default_client.bool_variation(key, context, default_value)
|
|
36
|
+
end
|
|
37
|
+
|
|
38
|
+
def string_variation(key, context, default_value)
|
|
39
|
+
ensure_configured!
|
|
40
|
+
@default_client.string_variation(key, context, default_value)
|
|
41
|
+
end
|
|
42
|
+
|
|
43
|
+
def number_variation(key, context, default_value)
|
|
44
|
+
ensure_configured!
|
|
45
|
+
@default_client.number_variation(key, context, default_value)
|
|
46
|
+
end
|
|
47
|
+
|
|
48
|
+
def json_variation(key, context, default_value)
|
|
49
|
+
ensure_configured!
|
|
50
|
+
@default_client.json_variation(key, context, default_value)
|
|
51
|
+
end
|
|
52
|
+
|
|
53
|
+
def variation_detail(key, context, default_value)
|
|
54
|
+
ensure_configured!
|
|
55
|
+
@default_client.variation_detail(key, context, default_value)
|
|
56
|
+
end
|
|
57
|
+
|
|
58
|
+
def track(event_key, context, metadata = nil)
|
|
59
|
+
ensure_configured!
|
|
60
|
+
@default_client.track(event_key, context, metadata)
|
|
61
|
+
end
|
|
62
|
+
|
|
63
|
+
def identify(context)
|
|
64
|
+
ensure_configured!
|
|
65
|
+
@default_client.identify(context)
|
|
66
|
+
end
|
|
67
|
+
|
|
68
|
+
def flush
|
|
69
|
+
ensure_configured!
|
|
70
|
+
@default_client.flush
|
|
71
|
+
end
|
|
72
|
+
|
|
73
|
+
def close
|
|
74
|
+
@mutex.synchronize do
|
|
75
|
+
return unless @default_client
|
|
76
|
+
@default_client.close
|
|
77
|
+
@default_client = nil
|
|
78
|
+
end
|
|
79
|
+
end
|
|
80
|
+
|
|
81
|
+
def restart
|
|
82
|
+
ensure_configured!
|
|
83
|
+
@default_client.restart
|
|
84
|
+
end
|
|
85
|
+
|
|
86
|
+
private
|
|
87
|
+
|
|
88
|
+
def ensure_configured!
|
|
89
|
+
raise Error, "Featureflip not configured. Call Featureflip.configure first." unless @default_client
|
|
90
|
+
end
|
|
91
|
+
end
|
|
92
|
+
end
|
metadata
ADDED
|
@@ -0,0 +1,103 @@
|
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
|
2
|
+
name: featureflip
|
|
3
|
+
version: !ruby/object:Gem::Version
|
|
4
|
+
version: 0.1.0
|
|
5
|
+
platform: ruby
|
|
6
|
+
authors:
|
|
7
|
+
- Featureflip
|
|
8
|
+
autorequire:
|
|
9
|
+
bindir: bin
|
|
10
|
+
cert_chain: []
|
|
11
|
+
date: 2026-04-02 00:00:00.000000000 Z
|
|
12
|
+
dependencies:
|
|
13
|
+
- !ruby/object:Gem::Dependency
|
|
14
|
+
name: rspec
|
|
15
|
+
requirement: !ruby/object:Gem::Requirement
|
|
16
|
+
requirements:
|
|
17
|
+
- - "~>"
|
|
18
|
+
- !ruby/object:Gem::Version
|
|
19
|
+
version: '3.13'
|
|
20
|
+
type: :development
|
|
21
|
+
prerelease: false
|
|
22
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
23
|
+
requirements:
|
|
24
|
+
- - "~>"
|
|
25
|
+
- !ruby/object:Gem::Version
|
|
26
|
+
version: '3.13'
|
|
27
|
+
- !ruby/object:Gem::Dependency
|
|
28
|
+
name: webmock
|
|
29
|
+
requirement: !ruby/object:Gem::Requirement
|
|
30
|
+
requirements:
|
|
31
|
+
- - "~>"
|
|
32
|
+
- !ruby/object:Gem::Version
|
|
33
|
+
version: '3.23'
|
|
34
|
+
type: :development
|
|
35
|
+
prerelease: false
|
|
36
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
37
|
+
requirements:
|
|
38
|
+
- - "~>"
|
|
39
|
+
- !ruby/object:Gem::Version
|
|
40
|
+
version: '3.23'
|
|
41
|
+
- !ruby/object:Gem::Dependency
|
|
42
|
+
name: simplecov
|
|
43
|
+
requirement: !ruby/object:Gem::Requirement
|
|
44
|
+
requirements:
|
|
45
|
+
- - "~>"
|
|
46
|
+
- !ruby/object:Gem::Version
|
|
47
|
+
version: '0.22'
|
|
48
|
+
type: :development
|
|
49
|
+
prerelease: false
|
|
50
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
51
|
+
requirements:
|
|
52
|
+
- - "~>"
|
|
53
|
+
- !ruby/object:Gem::Version
|
|
54
|
+
version: '0.22'
|
|
55
|
+
description: Server-side SDK for evaluating feature flags with Featureflip
|
|
56
|
+
email:
|
|
57
|
+
executables: []
|
|
58
|
+
extensions: []
|
|
59
|
+
extra_rdoc_files: []
|
|
60
|
+
files:
|
|
61
|
+
- lib/featureflip.rb
|
|
62
|
+
- lib/featureflip/client.rb
|
|
63
|
+
- lib/featureflip/config.rb
|
|
64
|
+
- lib/featureflip/data_source/polling.rb
|
|
65
|
+
- lib/featureflip/data_source/streaming.rb
|
|
66
|
+
- lib/featureflip/errors.rb
|
|
67
|
+
- lib/featureflip/evaluation/bucketing.rb
|
|
68
|
+
- lib/featureflip/evaluation/condition_evaluator.rb
|
|
69
|
+
- lib/featureflip/evaluation/evaluator.rb
|
|
70
|
+
- lib/featureflip/events/event.rb
|
|
71
|
+
- lib/featureflip/events/event_processor.rb
|
|
72
|
+
- lib/featureflip/http/client.rb
|
|
73
|
+
- lib/featureflip/models/evaluation_detail.rb
|
|
74
|
+
- lib/featureflip/models/flag.rb
|
|
75
|
+
- lib/featureflip/models/segment.rb
|
|
76
|
+
- lib/featureflip/store/flag_store.rb
|
|
77
|
+
- lib/featureflip/version.rb
|
|
78
|
+
homepage: https://featureflip.io
|
|
79
|
+
licenses:
|
|
80
|
+
- Apache-2.0
|
|
81
|
+
metadata:
|
|
82
|
+
homepage_uri: https://featureflip.io
|
|
83
|
+
documentation_uri: https://featureflip.io/docs/sdks/ruby/
|
|
84
|
+
post_install_message:
|
|
85
|
+
rdoc_options: []
|
|
86
|
+
require_paths:
|
|
87
|
+
- lib
|
|
88
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
|
89
|
+
requirements:
|
|
90
|
+
- - ">="
|
|
91
|
+
- !ruby/object:Gem::Version
|
|
92
|
+
version: 3.2.0
|
|
93
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
|
94
|
+
requirements:
|
|
95
|
+
- - ">="
|
|
96
|
+
- !ruby/object:Gem::Version
|
|
97
|
+
version: '0'
|
|
98
|
+
requirements: []
|
|
99
|
+
rubygems_version: 3.5.22
|
|
100
|
+
signing_key:
|
|
101
|
+
specification_version: 4
|
|
102
|
+
summary: Featureflip feature flag SDK for Ruby
|
|
103
|
+
test_files: []
|