openfeature-provider-flagd 0.1.5__py3-none-any.whl → 0.2.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- openfeature/.gitignore +2 -0
- openfeature/contrib/provider/flagd/config.py +214 -23
- openfeature/contrib/provider/flagd/provider.py +88 -12
- openfeature/contrib/provider/flagd/resolvers/__init__.py +1 -47
- openfeature/contrib/provider/flagd/resolvers/grpc.py +229 -17
- openfeature/contrib/provider/flagd/resolvers/in_process.py +40 -31
- openfeature/contrib/provider/flagd/resolvers/process/connector/__init__.py +11 -0
- openfeature/contrib/provider/flagd/resolvers/process/connector/file_watcher.py +107 -0
- openfeature/contrib/provider/flagd/resolvers/process/connector/grpc_watcher.py +218 -0
- openfeature/contrib/provider/flagd/resolvers/process/custom_ops.py +58 -19
- openfeature/contrib/provider/flagd/resolvers/process/flags.py +50 -6
- openfeature/contrib/provider/flagd/resolvers/process/targeting.py +35 -0
- openfeature/contrib/provider/flagd/resolvers/protocol.py +47 -0
- openfeature/contrib/provider/flagd/sync_metadata_hook.py +14 -0
- openfeature/schemas/protobuf/flagd/evaluation/v1/evaluation_pb2.py +72 -0
- openfeature/schemas/protobuf/flagd/evaluation/v1/evaluation_pb2.pyi +450 -0
- openfeature/schemas/protobuf/flagd/evaluation/v1/evaluation_pb2_grpc.py +358 -0
- openfeature/schemas/protobuf/flagd/evaluation/v1/evaluation_pb2_grpc.pyi +155 -0
- openfeature/schemas/protobuf/flagd/sync/v1/sync_pb2.py +50 -0
- openfeature/schemas/protobuf/flagd/sync/v1/sync_pb2.pyi +148 -0
- openfeature/schemas/protobuf/flagd/sync/v1/sync_pb2_grpc.py +186 -0
- openfeature/schemas/protobuf/flagd/sync/v1/sync_pb2_grpc.pyi +86 -0
- openfeature/schemas/protobuf/schema/v1/schema_pb2.py +72 -0
- openfeature/schemas/protobuf/schema/v1/schema_pb2.pyi +451 -0
- openfeature/schemas/protobuf/schema/v1/schema_pb2_grpc.py +358 -0
- openfeature/schemas/protobuf/schema/v1/schema_pb2_grpc.pyi +156 -0
- openfeature/schemas/protobuf/sync/v1/sync_service_pb2.py +47 -0
- openfeature/schemas/protobuf/sync/v1/sync_service_pb2.pyi +174 -0
- openfeature/schemas/protobuf/sync/v1/sync_service_pb2_grpc.py +143 -0
- openfeature/schemas/protobuf/sync/v1/sync_service_pb2_grpc.pyi +70 -0
- {openfeature_provider_flagd-0.1.5.dist-info → openfeature_provider_flagd-0.2.1.dist-info}/METADATA +116 -15
- openfeature_provider_flagd-0.2.1.dist-info/RECORD +36 -0
- {openfeature_provider_flagd-0.1.5.dist-info → openfeature_provider_flagd-0.2.1.dist-info}/WHEEL +1 -1
- {openfeature_provider_flagd-0.1.5.dist-info → openfeature_provider_flagd-0.2.1.dist-info}/licenses/LICENSE +1 -1
- openfeature/contrib/provider/flagd/proto/flagd/evaluation/v1/evaluation_pb2.py +0 -62
- openfeature/contrib/provider/flagd/proto/flagd/evaluation/v1/evaluation_pb2_grpc.py +0 -267
- openfeature/contrib/provider/flagd/proto/flagd/sync/v1/sync_pb2.py +0 -40
- openfeature/contrib/provider/flagd/proto/flagd/sync/v1/sync_pb2_grpc.py +0 -135
- openfeature/contrib/provider/flagd/proto/schema/v1/schema_pb2.py +0 -62
- openfeature/contrib/provider/flagd/proto/schema/v1/schema_pb2_grpc.py +0 -267
- openfeature/contrib/provider/flagd/proto/sync/v1/sync_service_pb2.py +0 -37
- openfeature/contrib/provider/flagd/proto/sync/v1/sync_service_pb2_grpc.py +0 -102
- openfeature/contrib/provider/flagd/resolvers/process/file_watcher.py +0 -89
- openfeature_provider_flagd-0.1.5.dist-info/RECORD +0 -22
|
@@ -1,36 +1,230 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import threading
|
|
3
|
+
import time
|
|
1
4
|
import typing
|
|
2
5
|
|
|
3
6
|
import grpc
|
|
7
|
+
from cachebox import BaseCacheImpl, LRUCache
|
|
8
|
+
from google.protobuf.json_format import MessageToDict
|
|
4
9
|
from google.protobuf.struct_pb2 import Struct
|
|
10
|
+
from grpc import ChannelConnectivity
|
|
5
11
|
|
|
6
12
|
from openfeature.evaluation_context import EvaluationContext
|
|
13
|
+
from openfeature.event import ProviderEventDetails
|
|
7
14
|
from openfeature.exception import (
|
|
15
|
+
ErrorCode,
|
|
8
16
|
FlagNotFoundError,
|
|
9
17
|
GeneralError,
|
|
10
18
|
InvalidContextError,
|
|
11
19
|
ParseError,
|
|
20
|
+
ProviderNotReadyError,
|
|
12
21
|
TypeMismatchError,
|
|
13
22
|
)
|
|
14
|
-
from openfeature.flag_evaluation import FlagResolutionDetails
|
|
23
|
+
from openfeature.flag_evaluation import FlagResolutionDetails, Reason
|
|
24
|
+
from openfeature.schemas.protobuf.flagd.evaluation.v1 import (
|
|
25
|
+
evaluation_pb2,
|
|
26
|
+
evaluation_pb2_grpc,
|
|
27
|
+
)
|
|
15
28
|
|
|
16
|
-
from ..config import Config
|
|
29
|
+
from ..config import CacheType, Config
|
|
17
30
|
from ..flag_type import FlagType
|
|
18
|
-
|
|
31
|
+
|
|
32
|
+
if typing.TYPE_CHECKING:
|
|
33
|
+
from google.protobuf.message import Message
|
|
19
34
|
|
|
20
35
|
T = typing.TypeVar("T")
|
|
21
36
|
|
|
37
|
+
logger = logging.getLogger("openfeature.contrib")
|
|
38
|
+
|
|
22
39
|
|
|
23
40
|
class GrpcResolver:
|
|
24
|
-
def __init__(
|
|
41
|
+
def __init__(
|
|
42
|
+
self,
|
|
43
|
+
config: Config,
|
|
44
|
+
emit_provider_ready: typing.Callable[[ProviderEventDetails], None],
|
|
45
|
+
emit_provider_error: typing.Callable[[ProviderEventDetails], None],
|
|
46
|
+
emit_provider_stale: typing.Callable[[ProviderEventDetails], None],
|
|
47
|
+
emit_provider_configuration_changed: typing.Callable[
|
|
48
|
+
[ProviderEventDetails], None
|
|
49
|
+
],
|
|
50
|
+
):
|
|
51
|
+
self.active = False
|
|
25
52
|
self.config = config
|
|
26
|
-
|
|
27
|
-
|
|
53
|
+
self.emit_provider_ready = emit_provider_ready
|
|
54
|
+
self.emit_provider_error = emit_provider_error
|
|
55
|
+
self.emit_provider_stale = emit_provider_stale
|
|
56
|
+
self.emit_provider_configuration_changed = emit_provider_configuration_changed
|
|
57
|
+
self.cache: typing.Optional[BaseCacheImpl] = (
|
|
58
|
+
LRUCache(maxsize=self.config.max_cache_size)
|
|
59
|
+
if self.config.cache == CacheType.LRU
|
|
60
|
+
else None
|
|
28
61
|
)
|
|
29
|
-
|
|
30
|
-
self.
|
|
62
|
+
|
|
63
|
+
self.retry_grace_period = config.retry_grace_period
|
|
64
|
+
self.streamline_deadline_seconds = config.stream_deadline_ms * 0.001
|
|
65
|
+
self.deadline = config.deadline_ms * 0.001
|
|
66
|
+
self.connected = False
|
|
67
|
+
self.channel = self._generate_channel(config)
|
|
68
|
+
self.stub = evaluation_pb2_grpc.ServiceStub(self.channel)
|
|
69
|
+
|
|
70
|
+
self.thread: typing.Optional[threading.Thread] = None
|
|
71
|
+
self.timer: typing.Optional[threading.Timer] = None
|
|
72
|
+
|
|
73
|
+
self.start_time = time.time()
|
|
74
|
+
|
|
75
|
+
def _generate_channel(self, config: Config) -> grpc.Channel:
|
|
76
|
+
target = f"{config.host}:{config.port}"
|
|
77
|
+
# Create the channel with the service config
|
|
78
|
+
options = [
|
|
79
|
+
("grpc.keepalive_time_ms", config.keep_alive_time),
|
|
80
|
+
("grpc.initial_reconnect_backoff_ms", config.retry_backoff_ms),
|
|
81
|
+
("grpc.max_reconnect_backoff_ms", config.retry_backoff_max_ms),
|
|
82
|
+
("grpc.min_reconnect_backoff_ms", config.deadline_ms),
|
|
83
|
+
]
|
|
84
|
+
if config.tls:
|
|
85
|
+
channel_args = {
|
|
86
|
+
"options": options,
|
|
87
|
+
"credentials": grpc.ssl_channel_credentials(),
|
|
88
|
+
}
|
|
89
|
+
if config.cert_path:
|
|
90
|
+
with open(config.cert_path, "rb") as f:
|
|
91
|
+
channel_args["credentials"] = grpc.ssl_channel_credentials(f.read())
|
|
92
|
+
|
|
93
|
+
channel = grpc.secure_channel(target, **channel_args)
|
|
94
|
+
|
|
95
|
+
else:
|
|
96
|
+
channel = grpc.insecure_channel(
|
|
97
|
+
target,
|
|
98
|
+
options=options,
|
|
99
|
+
)
|
|
100
|
+
|
|
101
|
+
return channel
|
|
102
|
+
|
|
103
|
+
def initialize(self, evaluation_context: EvaluationContext) -> None:
|
|
104
|
+
self.connect()
|
|
31
105
|
|
|
32
106
|
def shutdown(self) -> None:
|
|
107
|
+
self.active = False
|
|
108
|
+
self.channel.unsubscribe(self._state_change_callback)
|
|
33
109
|
self.channel.close()
|
|
110
|
+
if self.timer and self.timer.is_alive():
|
|
111
|
+
logger.debug("gRPC error timer cancelled due to shutdown")
|
|
112
|
+
self.timer.cancel()
|
|
113
|
+
if self.cache:
|
|
114
|
+
self.cache.clear()
|
|
115
|
+
|
|
116
|
+
def connect(self) -> None:
|
|
117
|
+
self.active = True
|
|
118
|
+
|
|
119
|
+
# Run monitoring in a separate thread
|
|
120
|
+
self.monitor_thread = threading.Thread(
|
|
121
|
+
target=self.monitor, daemon=True, name="FlagdGrpcServiceMonitorThread"
|
|
122
|
+
)
|
|
123
|
+
self.monitor_thread.start()
|
|
124
|
+
## block until ready or deadline reached
|
|
125
|
+
timeout = self.deadline + time.time()
|
|
126
|
+
while not self.connected and time.time() < timeout:
|
|
127
|
+
time.sleep(0.05)
|
|
128
|
+
logger.debug("Finished blocking gRPC state initialization")
|
|
129
|
+
|
|
130
|
+
if not self.connected:
|
|
131
|
+
raise ProviderNotReadyError(
|
|
132
|
+
"Blocking init finished before data synced. Consider increasing startup deadline to avoid inconsistent evaluations."
|
|
133
|
+
)
|
|
134
|
+
|
|
135
|
+
def monitor(self) -> None:
|
|
136
|
+
self.channel.subscribe(self._state_change_callback, try_to_connect=True)
|
|
137
|
+
|
|
138
|
+
def _state_change_callback(self, new_state: ChannelConnectivity) -> None:
|
|
139
|
+
logger.debug(f"gRPC state change: {new_state}")
|
|
140
|
+
if (
|
|
141
|
+
new_state == grpc.ChannelConnectivity.READY
|
|
142
|
+
or new_state == grpc.ChannelConnectivity.IDLE
|
|
143
|
+
):
|
|
144
|
+
if not self.thread or not self.thread.is_alive():
|
|
145
|
+
self.thread = threading.Thread(
|
|
146
|
+
target=self.listen,
|
|
147
|
+
daemon=True,
|
|
148
|
+
name="FlagdGrpcServiceWorkerThread",
|
|
149
|
+
)
|
|
150
|
+
self.thread.start()
|
|
151
|
+
|
|
152
|
+
if self.timer and self.timer.is_alive():
|
|
153
|
+
logger.debug("gRPC error timer expired")
|
|
154
|
+
self.timer.cancel()
|
|
155
|
+
|
|
156
|
+
elif new_state == ChannelConnectivity.TRANSIENT_FAILURE:
|
|
157
|
+
# this is the failed reconnect attempt so we are going into stale
|
|
158
|
+
self.emit_provider_stale(
|
|
159
|
+
ProviderEventDetails(
|
|
160
|
+
message="gRPC sync disconnected, reconnecting",
|
|
161
|
+
)
|
|
162
|
+
)
|
|
163
|
+
self.start_time = time.time()
|
|
164
|
+
# adding a timer, so we can emit the error event after time
|
|
165
|
+
self.timer = threading.Timer(self.retry_grace_period, self.emit_error)
|
|
166
|
+
|
|
167
|
+
logger.debug("gRPC error timer started")
|
|
168
|
+
self.timer.start()
|
|
169
|
+
self.connected = False
|
|
170
|
+
|
|
171
|
+
def emit_error(self) -> None:
|
|
172
|
+
logger.debug("gRPC error emitted")
|
|
173
|
+
if self.cache:
|
|
174
|
+
self.cache.clear()
|
|
175
|
+
self.emit_provider_error(
|
|
176
|
+
ProviderEventDetails(
|
|
177
|
+
message="gRPC sync disconnected, reconnecting",
|
|
178
|
+
error_code=ErrorCode.GENERAL,
|
|
179
|
+
)
|
|
180
|
+
)
|
|
181
|
+
|
|
182
|
+
def listen(self) -> None:
|
|
183
|
+
logger.debug("gRPC starting listener thread")
|
|
184
|
+
call_args = (
|
|
185
|
+
{"timeout": self.streamline_deadline_seconds}
|
|
186
|
+
if self.streamline_deadline_seconds > 0
|
|
187
|
+
else {}
|
|
188
|
+
)
|
|
189
|
+
request = evaluation_pb2.EventStreamRequest()
|
|
190
|
+
|
|
191
|
+
# defining a never ending loop to recreate the stream
|
|
192
|
+
while self.active:
|
|
193
|
+
try:
|
|
194
|
+
logger.debug("Setting up gRPC sync flags connection")
|
|
195
|
+
for message in self.stub.EventStream(
|
|
196
|
+
request, wait_for_ready=True, **call_args
|
|
197
|
+
):
|
|
198
|
+
if message.type == "provider_ready":
|
|
199
|
+
self.emit_provider_ready(
|
|
200
|
+
ProviderEventDetails(
|
|
201
|
+
message="gRPC sync connection established"
|
|
202
|
+
)
|
|
203
|
+
)
|
|
204
|
+
self.connected = True
|
|
205
|
+
elif message.type == "configuration_change":
|
|
206
|
+
data = MessageToDict(message)["data"]
|
|
207
|
+
self.handle_changed_flags(data)
|
|
208
|
+
|
|
209
|
+
if not self.active:
|
|
210
|
+
logger.info("Terminating gRPC sync thread")
|
|
211
|
+
return
|
|
212
|
+
except grpc.RpcError as e: # noqa: PERF203
|
|
213
|
+
# although it seems like this error log is not interesting, without it, the retry is not working as expected
|
|
214
|
+
logger.debug(f"SyncFlags stream error, {e.code()=} {e.details()=}")
|
|
215
|
+
except ParseError:
|
|
216
|
+
logger.exception(
|
|
217
|
+
f"Could not parse flag data using flagd syntax: {message=}"
|
|
218
|
+
)
|
|
219
|
+
|
|
220
|
+
def handle_changed_flags(self, data: typing.Any) -> None:
|
|
221
|
+
changed_flags = list(data["flags"].keys())
|
|
222
|
+
|
|
223
|
+
if self.cache:
|
|
224
|
+
for flag in changed_flags:
|
|
225
|
+
self.cache.pop(flag)
|
|
226
|
+
|
|
227
|
+
self.emit_provider_configuration_changed(ProviderEventDetails(changed_flags))
|
|
34
228
|
|
|
35
229
|
def resolve_boolean_details(
|
|
36
230
|
self,
|
|
@@ -72,41 +266,54 @@ class GrpcResolver:
|
|
|
72
266
|
) -> FlagResolutionDetails[typing.Union[dict, list]]:
|
|
73
267
|
return self._resolve(key, FlagType.OBJECT, default_value, evaluation_context)
|
|
74
268
|
|
|
75
|
-
def _resolve(
|
|
269
|
+
def _resolve( # noqa: PLR0915 C901
|
|
76
270
|
self,
|
|
77
271
|
flag_key: str,
|
|
78
272
|
flag_type: FlagType,
|
|
79
273
|
default_value: T,
|
|
80
274
|
evaluation_context: typing.Optional[EvaluationContext],
|
|
81
275
|
) -> FlagResolutionDetails[T]:
|
|
276
|
+
if self.cache is not None and flag_key in self.cache:
|
|
277
|
+
cached_flag: FlagResolutionDetails[T] = self.cache[flag_key]
|
|
278
|
+
cached_flag.reason = Reason.CACHED
|
|
279
|
+
return cached_flag
|
|
280
|
+
|
|
82
281
|
context = self._convert_context(evaluation_context)
|
|
83
|
-
call_args = {"timeout": self.
|
|
282
|
+
call_args = {"timeout": self.deadline, "wait_for_ready": True}
|
|
84
283
|
try:
|
|
284
|
+
request: Message
|
|
85
285
|
if flag_type == FlagType.BOOLEAN:
|
|
86
|
-
request =
|
|
286
|
+
request = evaluation_pb2.ResolveBooleanRequest(
|
|
87
287
|
flag_key=flag_key, context=context
|
|
88
288
|
)
|
|
89
289
|
response = self.stub.ResolveBoolean(request, **call_args)
|
|
290
|
+
value = response.value
|
|
90
291
|
elif flag_type == FlagType.STRING:
|
|
91
|
-
request =
|
|
292
|
+
request = evaluation_pb2.ResolveStringRequest(
|
|
92
293
|
flag_key=flag_key, context=context
|
|
93
294
|
)
|
|
94
295
|
response = self.stub.ResolveString(request, **call_args)
|
|
296
|
+
value = response.value
|
|
95
297
|
elif flag_type == FlagType.OBJECT:
|
|
96
|
-
request =
|
|
298
|
+
request = evaluation_pb2.ResolveObjectRequest(
|
|
97
299
|
flag_key=flag_key, context=context
|
|
98
300
|
)
|
|
99
301
|
response = self.stub.ResolveObject(request, **call_args)
|
|
302
|
+
value = MessageToDict(response, preserving_proto_field_name=True)[
|
|
303
|
+
"value"
|
|
304
|
+
]
|
|
100
305
|
elif flag_type == FlagType.FLOAT:
|
|
101
|
-
request =
|
|
306
|
+
request = evaluation_pb2.ResolveFloatRequest(
|
|
102
307
|
flag_key=flag_key, context=context
|
|
103
308
|
)
|
|
104
309
|
response = self.stub.ResolveFloat(request, **call_args)
|
|
310
|
+
value = response.value
|
|
105
311
|
elif flag_type == FlagType.INTEGER:
|
|
106
|
-
request =
|
|
312
|
+
request = evaluation_pb2.ResolveIntRequest(
|
|
107
313
|
flag_key=flag_key, context=context
|
|
108
314
|
)
|
|
109
315
|
response = self.stub.ResolveInt(request, **call_args)
|
|
316
|
+
value = response.value
|
|
110
317
|
else:
|
|
111
318
|
raise ValueError(f"Unknown flag type: {flag_type}")
|
|
112
319
|
|
|
@@ -123,12 +330,17 @@ class GrpcResolver:
|
|
|
123
330
|
raise GeneralError(message) from e
|
|
124
331
|
|
|
125
332
|
# Got a valid flag and valid type. Return it.
|
|
126
|
-
|
|
127
|
-
value=
|
|
333
|
+
result = FlagResolutionDetails(
|
|
334
|
+
value=value,
|
|
128
335
|
reason=response.reason,
|
|
129
336
|
variant=response.variant,
|
|
130
337
|
)
|
|
131
338
|
|
|
339
|
+
if response.reason == Reason.STATIC and self.cache is not None:
|
|
340
|
+
self.cache.insert(flag_key, result)
|
|
341
|
+
|
|
342
|
+
return result
|
|
343
|
+
|
|
132
344
|
def _convert_context(
|
|
133
345
|
self, evaluation_context: typing.Optional[EvaluationContext]
|
|
134
346
|
) -> Struct:
|
|
@@ -1,44 +1,54 @@
|
|
|
1
|
-
import time
|
|
2
1
|
import typing
|
|
3
2
|
|
|
4
|
-
from
|
|
5
|
-
|
|
3
|
+
from openfeature.contrib.provider.flagd.resolvers.process.connector.file_watcher import (
|
|
4
|
+
FileWatcher,
|
|
5
|
+
)
|
|
6
6
|
from openfeature.evaluation_context import EvaluationContext
|
|
7
|
+
from openfeature.event import ProviderEventDetails
|
|
7
8
|
from openfeature.exception import FlagNotFoundError, ParseError
|
|
8
9
|
from openfeature.flag_evaluation import FlagResolutionDetails, Reason
|
|
9
|
-
from openfeature.provider.provider import AbstractProvider
|
|
10
10
|
|
|
11
11
|
from ..config import Config
|
|
12
|
-
from .process.
|
|
13
|
-
from .process.
|
|
12
|
+
from .process.connector import FlagStateConnector
|
|
13
|
+
from .process.connector.grpc_watcher import GrpcWatcher
|
|
14
|
+
from .process.flags import FlagStore
|
|
15
|
+
from .process.targeting import targeting
|
|
14
16
|
|
|
15
17
|
T = typing.TypeVar("T")
|
|
16
18
|
|
|
17
19
|
|
|
18
20
|
class InProcessResolver:
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
21
|
+
def __init__(
|
|
22
|
+
self,
|
|
23
|
+
config: Config,
|
|
24
|
+
emit_provider_ready: typing.Callable[[ProviderEventDetails, dict], None],
|
|
25
|
+
emit_provider_error: typing.Callable[[ProviderEventDetails], None],
|
|
26
|
+
emit_provider_stale: typing.Callable[[ProviderEventDetails], None],
|
|
27
|
+
emit_provider_configuration_changed: typing.Callable[
|
|
28
|
+
[ProviderEventDetails], None
|
|
29
|
+
],
|
|
30
|
+
):
|
|
28
31
|
self.config = config
|
|
29
|
-
self.
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
32
|
+
self.flag_store = FlagStore(emit_provider_configuration_changed)
|
|
33
|
+
self.connector: FlagStateConnector = (
|
|
34
|
+
FileWatcher(
|
|
35
|
+
self.config, self.flag_store, emit_provider_ready, emit_provider_error
|
|
36
|
+
)
|
|
37
|
+
if self.config.offline_flag_source_path
|
|
38
|
+
else GrpcWatcher(
|
|
39
|
+
self.config,
|
|
40
|
+
self.flag_store,
|
|
41
|
+
emit_provider_ready,
|
|
42
|
+
emit_provider_error,
|
|
43
|
+
emit_provider_stale,
|
|
33
44
|
)
|
|
34
|
-
self.flag_store = FileWatcherFlagStore(
|
|
35
|
-
self.config.offline_flag_source_path,
|
|
36
|
-
self.provider,
|
|
37
|
-
self.config.offline_poll_interval_seconds,
|
|
38
45
|
)
|
|
39
46
|
|
|
47
|
+
def initialize(self, evaluation_context: EvaluationContext) -> None:
|
|
48
|
+
self.connector.initialize(evaluation_context)
|
|
49
|
+
|
|
40
50
|
def shutdown(self) -> None:
|
|
41
|
-
self.
|
|
51
|
+
self.connector.shutdown()
|
|
42
52
|
|
|
43
53
|
def resolve_boolean_details(
|
|
44
54
|
self,
|
|
@@ -62,7 +72,10 @@ class InProcessResolver:
|
|
|
62
72
|
default_value: float,
|
|
63
73
|
evaluation_context: typing.Optional[EvaluationContext] = None,
|
|
64
74
|
) -> FlagResolutionDetails[float]:
|
|
65
|
-
|
|
75
|
+
result = self._resolve(key, default_value, evaluation_context)
|
|
76
|
+
if isinstance(result.value, int):
|
|
77
|
+
result.value = float(result.value)
|
|
78
|
+
return result
|
|
66
79
|
|
|
67
80
|
def resolve_integer_details(
|
|
68
81
|
self,
|
|
@@ -97,12 +110,8 @@ class InProcessResolver:
|
|
|
97
110
|
variant, value = flag.default
|
|
98
111
|
return FlagResolutionDetails(value, variant=variant, reason=Reason.STATIC)
|
|
99
112
|
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
json_logic_context["targetingKey"] = (
|
|
103
|
-
evaluation_context.targeting_key if evaluation_context else None
|
|
104
|
-
)
|
|
105
|
-
variant = jsonLogic(flag.targeting, json_logic_context, self.OPERATORS)
|
|
113
|
+
variant = targeting(flag.key, flag.targeting, evaluation_context)
|
|
114
|
+
|
|
106
115
|
if variant is None:
|
|
107
116
|
variant, value = flag.default
|
|
108
117
|
return FlagResolutionDetails(value, variant=variant, reason=Reason.DEFAULT)
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
import typing
|
|
2
|
+
|
|
3
|
+
from openfeature.evaluation_context import EvaluationContext
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class FlagStateConnector(typing.Protocol):
|
|
7
|
+
def initialize(
|
|
8
|
+
self, evaluation_context: EvaluationContext
|
|
9
|
+
) -> None: ... # pragma: no cover
|
|
10
|
+
|
|
11
|
+
def shutdown(self) -> None: ... # pragma: no cover
|
|
@@ -0,0 +1,107 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import logging
|
|
3
|
+
import os
|
|
4
|
+
import threading
|
|
5
|
+
import time
|
|
6
|
+
import typing
|
|
7
|
+
|
|
8
|
+
import yaml
|
|
9
|
+
|
|
10
|
+
from openfeature.contrib.provider.flagd.config import Config
|
|
11
|
+
from openfeature.contrib.provider.flagd.resolvers.process.connector import (
|
|
12
|
+
FlagStateConnector,
|
|
13
|
+
)
|
|
14
|
+
from openfeature.contrib.provider.flagd.resolvers.process.flags import FlagStore
|
|
15
|
+
from openfeature.evaluation_context import EvaluationContext
|
|
16
|
+
from openfeature.event import ProviderEventDetails
|
|
17
|
+
from openfeature.exception import ParseError, ProviderNotReadyError
|
|
18
|
+
|
|
19
|
+
logger = logging.getLogger("openfeature.contrib")
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class FileWatcher(FlagStateConnector):
|
|
23
|
+
def __init__(
|
|
24
|
+
self,
|
|
25
|
+
config: Config,
|
|
26
|
+
flag_store: FlagStore,
|
|
27
|
+
emit_provider_ready: typing.Callable[[ProviderEventDetails, dict], None],
|
|
28
|
+
emit_provider_error: typing.Callable[[ProviderEventDetails], None],
|
|
29
|
+
):
|
|
30
|
+
if config.offline_flag_source_path is None:
|
|
31
|
+
raise ValueError(
|
|
32
|
+
f"`config.offline_flag_source_path` parameter invalid: {config.offline_flag_source_path}"
|
|
33
|
+
)
|
|
34
|
+
else:
|
|
35
|
+
self.file_path = config.offline_flag_source_path
|
|
36
|
+
|
|
37
|
+
self.emit_provider_ready = emit_provider_ready
|
|
38
|
+
self.emit_provider_error = emit_provider_error
|
|
39
|
+
self.deadline_seconds = config.deadline_ms * 0.001
|
|
40
|
+
|
|
41
|
+
self.last_modified = 0.0
|
|
42
|
+
self.flag_store = flag_store
|
|
43
|
+
self.should_emit_ready_on_success = False
|
|
44
|
+
|
|
45
|
+
def initialize(self, evaluation_context: EvaluationContext) -> None:
|
|
46
|
+
self.active = True
|
|
47
|
+
self.thread = threading.Thread(
|
|
48
|
+
target=self.refresh_file, daemon=True, name="FlagdFileWatcherWorkerThread"
|
|
49
|
+
)
|
|
50
|
+
self.thread.start()
|
|
51
|
+
|
|
52
|
+
# Let this throw exceptions so that provider status is set correctly
|
|
53
|
+
try:
|
|
54
|
+
self.should_emit_ready_on_success = True
|
|
55
|
+
self._load_data()
|
|
56
|
+
except Exception as err:
|
|
57
|
+
raise ProviderNotReadyError from err
|
|
58
|
+
|
|
59
|
+
def shutdown(self) -> None:
|
|
60
|
+
self.active = False
|
|
61
|
+
|
|
62
|
+
def refresh_file(self) -> None:
|
|
63
|
+
while self.active:
|
|
64
|
+
time.sleep(self.deadline_seconds)
|
|
65
|
+
logger.debug("checking for new flag store contents from file")
|
|
66
|
+
self.safe_load_data()
|
|
67
|
+
|
|
68
|
+
def safe_load_data(self) -> None:
|
|
69
|
+
try:
|
|
70
|
+
last_modified = os.path.getmtime(self.file_path)
|
|
71
|
+
if last_modified > self.last_modified:
|
|
72
|
+
self._load_data(last_modified)
|
|
73
|
+
except FileNotFoundError:
|
|
74
|
+
self.handle_error("Provided file path not valid")
|
|
75
|
+
except json.JSONDecodeError:
|
|
76
|
+
self.handle_error("Could not parse JSON flag data from file")
|
|
77
|
+
except yaml.error.YAMLError:
|
|
78
|
+
self.handle_error("Could not parse YAML flag data from file")
|
|
79
|
+
except ParseError:
|
|
80
|
+
self.handle_error("Could not parse flag data using flagd syntax")
|
|
81
|
+
except Exception:
|
|
82
|
+
self.handle_error("Could not read flags from file")
|
|
83
|
+
|
|
84
|
+
def _load_data(self, modified_time: typing.Optional[float] = None) -> None:
|
|
85
|
+
with open(self.file_path) as file:
|
|
86
|
+
if self.file_path.endswith(".yaml"):
|
|
87
|
+
data = yaml.safe_load(file)
|
|
88
|
+
else:
|
|
89
|
+
data = json.load(file)
|
|
90
|
+
|
|
91
|
+
self.flag_store.update(data)
|
|
92
|
+
|
|
93
|
+
if self.should_emit_ready_on_success:
|
|
94
|
+
self.emit_provider_ready(
|
|
95
|
+
ProviderEventDetails(
|
|
96
|
+
message="Reloading file contents recovered from error state"
|
|
97
|
+
),
|
|
98
|
+
{},
|
|
99
|
+
)
|
|
100
|
+
self.should_emit_ready_on_success = False
|
|
101
|
+
|
|
102
|
+
self.last_modified = modified_time or os.path.getmtime(self.file_path)
|
|
103
|
+
|
|
104
|
+
def handle_error(self, error_message: str) -> None:
|
|
105
|
+
logger.exception(error_message)
|
|
106
|
+
self.should_emit_ready_on_success = True
|
|
107
|
+
self.emit_provider_error(ProviderEventDetails(message=error_message))
|