posthoganalytics 6.7.0__py3-none-any.whl → 7.4.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- posthoganalytics/__init__.py +84 -7
- posthoganalytics/ai/anthropic/__init__.py +10 -0
- posthoganalytics/ai/anthropic/anthropic.py +95 -65
- posthoganalytics/ai/anthropic/anthropic_async.py +95 -65
- posthoganalytics/ai/anthropic/anthropic_converter.py +443 -0
- posthoganalytics/ai/gemini/__init__.py +15 -1
- posthoganalytics/ai/gemini/gemini.py +66 -71
- posthoganalytics/ai/gemini/gemini_async.py +423 -0
- posthoganalytics/ai/gemini/gemini_converter.py +652 -0
- posthoganalytics/ai/langchain/callbacks.py +58 -13
- posthoganalytics/ai/openai/__init__.py +16 -1
- posthoganalytics/ai/openai/openai.py +140 -149
- posthoganalytics/ai/openai/openai_async.py +127 -82
- posthoganalytics/ai/openai/openai_converter.py +741 -0
- posthoganalytics/ai/sanitization.py +248 -0
- posthoganalytics/ai/types.py +125 -0
- posthoganalytics/ai/utils.py +339 -356
- posthoganalytics/client.py +345 -97
- posthoganalytics/contexts.py +81 -0
- posthoganalytics/exception_utils.py +250 -2
- posthoganalytics/feature_flags.py +26 -10
- posthoganalytics/flag_definition_cache.py +127 -0
- posthoganalytics/integrations/django.py +157 -19
- posthoganalytics/request.py +203 -23
- posthoganalytics/test/test_client.py +250 -22
- posthoganalytics/test/test_exception_capture.py +418 -0
- posthoganalytics/test/test_feature_flag_result.py +441 -2
- posthoganalytics/test/test_feature_flags.py +308 -104
- posthoganalytics/test/test_flag_definition_cache.py +612 -0
- posthoganalytics/test/test_module.py +0 -8
- posthoganalytics/test/test_request.py +536 -0
- posthoganalytics/test/test_utils.py +4 -1
- posthoganalytics/types.py +40 -0
- posthoganalytics/version.py +1 -1
- {posthoganalytics-6.7.0.dist-info → posthoganalytics-7.4.3.dist-info}/METADATA +12 -12
- posthoganalytics-7.4.3.dist-info/RECORD +57 -0
- posthoganalytics-6.7.0.dist-info/RECORD +0 -49
- {posthoganalytics-6.7.0.dist-info → posthoganalytics-7.4.3.dist-info}/WHEEL +0 -0
- {posthoganalytics-6.7.0.dist-info → posthoganalytics-7.4.3.dist-info}/licenses/LICENSE +0 -0
- {posthoganalytics-6.7.0.dist-info → posthoganalytics-7.4.3.dist-info}/top_level.txt +0 -0
posthoganalytics/contexts.py
CHANGED
|
@@ -22,6 +22,9 @@ class ContextScope:
|
|
|
22
22
|
self.session_id: Optional[str] = None
|
|
23
23
|
self.distinct_id: Optional[str] = None
|
|
24
24
|
self.tags: Dict[str, Any] = {}
|
|
25
|
+
self.capture_exception_code_variables: Optional[bool] = None
|
|
26
|
+
self.code_variables_mask_patterns: Optional[list] = None
|
|
27
|
+
self.code_variables_ignore_patterns: Optional[list] = None
|
|
25
28
|
|
|
26
29
|
def set_session_id(self, session_id: str):
|
|
27
30
|
self.session_id = session_id
|
|
@@ -32,6 +35,15 @@ class ContextScope:
|
|
|
32
35
|
def add_tag(self, key: str, value: Any):
|
|
33
36
|
self.tags[key] = value
|
|
34
37
|
|
|
38
|
+
def set_capture_exception_code_variables(self, enabled: bool):
|
|
39
|
+
self.capture_exception_code_variables = enabled
|
|
40
|
+
|
|
41
|
+
def set_code_variables_mask_patterns(self, mask_patterns: list):
|
|
42
|
+
self.code_variables_mask_patterns = mask_patterns
|
|
43
|
+
|
|
44
|
+
def set_code_variables_ignore_patterns(self, ignore_patterns: list):
|
|
45
|
+
self.code_variables_ignore_patterns = ignore_patterns
|
|
46
|
+
|
|
35
47
|
def get_parent(self):
|
|
36
48
|
return self.parent
|
|
37
49
|
|
|
@@ -59,6 +71,27 @@ class ContextScope:
|
|
|
59
71
|
tags.update(new_tags)
|
|
60
72
|
return tags
|
|
61
73
|
|
|
74
|
+
def get_capture_exception_code_variables(self) -> Optional[bool]:
|
|
75
|
+
if self.capture_exception_code_variables is not None:
|
|
76
|
+
return self.capture_exception_code_variables
|
|
77
|
+
if self.parent is not None and not self.fresh:
|
|
78
|
+
return self.parent.get_capture_exception_code_variables()
|
|
79
|
+
return None
|
|
80
|
+
|
|
81
|
+
def get_code_variables_mask_patterns(self) -> Optional[list]:
|
|
82
|
+
if self.code_variables_mask_patterns is not None:
|
|
83
|
+
return self.code_variables_mask_patterns
|
|
84
|
+
if self.parent is not None and not self.fresh:
|
|
85
|
+
return self.parent.get_code_variables_mask_patterns()
|
|
86
|
+
return None
|
|
87
|
+
|
|
88
|
+
def get_code_variables_ignore_patterns(self) -> Optional[list]:
|
|
89
|
+
if self.code_variables_ignore_patterns is not None:
|
|
90
|
+
return self.code_variables_ignore_patterns
|
|
91
|
+
if self.parent is not None and not self.fresh:
|
|
92
|
+
return self.parent.get_code_variables_ignore_patterns()
|
|
93
|
+
return None
|
|
94
|
+
|
|
62
95
|
|
|
63
96
|
_context_stack: contextvars.ContextVar[Optional[ContextScope]] = contextvars.ContextVar(
|
|
64
97
|
"posthog_context_stack", default=None
|
|
@@ -243,6 +276,54 @@ def get_context_distinct_id() -> Optional[str]:
|
|
|
243
276
|
return None
|
|
244
277
|
|
|
245
278
|
|
|
279
|
+
def set_capture_exception_code_variables_context(enabled: bool) -> None:
|
|
280
|
+
"""
|
|
281
|
+
Set whether code variables are captured for the current context.
|
|
282
|
+
"""
|
|
283
|
+
current_context = _get_current_context()
|
|
284
|
+
if current_context:
|
|
285
|
+
current_context.set_capture_exception_code_variables(enabled)
|
|
286
|
+
|
|
287
|
+
|
|
288
|
+
def set_code_variables_mask_patterns_context(mask_patterns: list) -> None:
|
|
289
|
+
"""
|
|
290
|
+
Variable names matching these patterns will be masked with *** when capturing code variables.
|
|
291
|
+
"""
|
|
292
|
+
current_context = _get_current_context()
|
|
293
|
+
if current_context:
|
|
294
|
+
current_context.set_code_variables_mask_patterns(mask_patterns)
|
|
295
|
+
|
|
296
|
+
|
|
297
|
+
def set_code_variables_ignore_patterns_context(ignore_patterns: list) -> None:
|
|
298
|
+
"""
|
|
299
|
+
Variable names matching these patterns will be ignored completely when capturing code variables.
|
|
300
|
+
"""
|
|
301
|
+
current_context = _get_current_context()
|
|
302
|
+
if current_context:
|
|
303
|
+
current_context.set_code_variables_ignore_patterns(ignore_patterns)
|
|
304
|
+
|
|
305
|
+
|
|
306
|
+
def get_capture_exception_code_variables_context() -> Optional[bool]:
|
|
307
|
+
current_context = _get_current_context()
|
|
308
|
+
if current_context:
|
|
309
|
+
return current_context.get_capture_exception_code_variables()
|
|
310
|
+
return None
|
|
311
|
+
|
|
312
|
+
|
|
313
|
+
def get_code_variables_mask_patterns_context() -> Optional[list]:
|
|
314
|
+
current_context = _get_current_context()
|
|
315
|
+
if current_context:
|
|
316
|
+
return current_context.get_code_variables_mask_patterns()
|
|
317
|
+
return None
|
|
318
|
+
|
|
319
|
+
|
|
320
|
+
def get_code_variables_ignore_patterns_context() -> Optional[list]:
|
|
321
|
+
current_context = _get_current_context()
|
|
322
|
+
if current_context:
|
|
323
|
+
return current_context.get_code_variables_ignore_patterns()
|
|
324
|
+
return None
|
|
325
|
+
|
|
326
|
+
|
|
246
327
|
F = TypeVar("F", bound=Callable[..., Any])
|
|
247
328
|
|
|
248
329
|
|
|
@@ -5,6 +5,7 @@
|
|
|
5
5
|
# 💖open source (under MIT License)
|
|
6
6
|
# We want to keep payloads as similar to Sentry as possible for easy interoperability
|
|
7
7
|
|
|
8
|
+
import json
|
|
8
9
|
import linecache
|
|
9
10
|
import os
|
|
10
11
|
import re
|
|
@@ -13,22 +14,23 @@ import types
|
|
|
13
14
|
from datetime import datetime
|
|
14
15
|
from types import FrameType, TracebackType # noqa: F401
|
|
15
16
|
from typing import ( # noqa: F401
|
|
17
|
+
TYPE_CHECKING,
|
|
16
18
|
Any,
|
|
17
19
|
Dict,
|
|
18
20
|
Iterator,
|
|
19
21
|
List,
|
|
20
22
|
Literal,
|
|
21
23
|
Optional,
|
|
24
|
+
Pattern,
|
|
22
25
|
Set,
|
|
23
26
|
Tuple,
|
|
24
27
|
TypedDict,
|
|
25
28
|
TypeVar,
|
|
26
29
|
Union,
|
|
27
30
|
cast,
|
|
28
|
-
TYPE_CHECKING,
|
|
29
31
|
)
|
|
30
32
|
|
|
31
|
-
from posthoganalytics.args import
|
|
33
|
+
from posthoganalytics.args import ExceptionArg, ExcInfo # noqa: F401
|
|
32
34
|
|
|
33
35
|
try:
|
|
34
36
|
# Python 3.11
|
|
@@ -40,6 +42,46 @@ except ImportError:
|
|
|
40
42
|
|
|
41
43
|
DEFAULT_MAX_VALUE_LENGTH = 1024
|
|
42
44
|
|
|
45
|
+
DEFAULT_CODE_VARIABLES_MASK_PATTERNS = [
|
|
46
|
+
r"(?i).*password.*",
|
|
47
|
+
r"(?i).*secret.*",
|
|
48
|
+
r"(?i).*passwd.*",
|
|
49
|
+
r"(?i).*pwd.*",
|
|
50
|
+
r"(?i).*api_key.*",
|
|
51
|
+
r"(?i).*apikey.*",
|
|
52
|
+
r"(?i).*auth.*",
|
|
53
|
+
r"(?i).*credentials.*",
|
|
54
|
+
r"(?i).*privatekey.*",
|
|
55
|
+
r"(?i).*private_key.*",
|
|
56
|
+
r"(?i).*token.*",
|
|
57
|
+
r"(?i).*aws_access_key_id.*",
|
|
58
|
+
r"(?i).*_pass",
|
|
59
|
+
r"(?i)sk_.*",
|
|
60
|
+
r"(?i).*jwt.*",
|
|
61
|
+
]
|
|
62
|
+
|
|
63
|
+
DEFAULT_CODE_VARIABLES_IGNORE_PATTERNS = [r"^__.*"]
|
|
64
|
+
|
|
65
|
+
CODE_VARIABLES_REDACTED_VALUE = "$$_posthog_redacted_based_on_masking_rules_$$"
|
|
66
|
+
|
|
67
|
+
DEFAULT_TOTAL_VARIABLES_SIZE_LIMIT = 20 * 1024
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
class VariableSizeLimiter:
|
|
71
|
+
def __init__(self, max_size=DEFAULT_TOTAL_VARIABLES_SIZE_LIMIT):
|
|
72
|
+
self.max_size = max_size
|
|
73
|
+
self.current_size = 0
|
|
74
|
+
|
|
75
|
+
def can_add(self, size):
|
|
76
|
+
return self.current_size + size <= self.max_size
|
|
77
|
+
|
|
78
|
+
def add(self, size):
|
|
79
|
+
self.current_size += size
|
|
80
|
+
|
|
81
|
+
def get_remaining_space(self):
|
|
82
|
+
return self.max_size - self.current_size
|
|
83
|
+
|
|
84
|
+
|
|
43
85
|
LogLevelStr = Literal["fatal", "critical", "error", "warning", "info", "debug"]
|
|
44
86
|
|
|
45
87
|
Event = TypedDict(
|
|
@@ -884,3 +926,209 @@ def strip_string(value, max_length=None):
|
|
|
884
926
|
"rem": [["!limit", "x", max_length - 3, max_length]],
|
|
885
927
|
},
|
|
886
928
|
)
|
|
929
|
+
|
|
930
|
+
|
|
931
|
+
def _compile_patterns(patterns):
|
|
932
|
+
compiled = []
|
|
933
|
+
for pattern in patterns:
|
|
934
|
+
try:
|
|
935
|
+
compiled.append(re.compile(pattern))
|
|
936
|
+
except Exception:
|
|
937
|
+
pass
|
|
938
|
+
return compiled
|
|
939
|
+
|
|
940
|
+
|
|
941
|
+
def _pattern_matches(name, patterns):
|
|
942
|
+
for pattern in patterns:
|
|
943
|
+
if pattern.search(name):
|
|
944
|
+
return True
|
|
945
|
+
return False
|
|
946
|
+
|
|
947
|
+
|
|
948
|
+
def _mask_sensitive_data(value, compiled_mask):
|
|
949
|
+
if not compiled_mask:
|
|
950
|
+
return value
|
|
951
|
+
|
|
952
|
+
if isinstance(value, dict):
|
|
953
|
+
result = {}
|
|
954
|
+
for k, v in value.items():
|
|
955
|
+
key_str = str(k) if not isinstance(k, str) else k
|
|
956
|
+
if _pattern_matches(key_str, compiled_mask):
|
|
957
|
+
result[k] = CODE_VARIABLES_REDACTED_VALUE
|
|
958
|
+
else:
|
|
959
|
+
result[k] = _mask_sensitive_data(v, compiled_mask)
|
|
960
|
+
return result
|
|
961
|
+
elif isinstance(value, (list, tuple)):
|
|
962
|
+
masked_items = [_mask_sensitive_data(item, compiled_mask) for item in value]
|
|
963
|
+
return type(value)(masked_items)
|
|
964
|
+
elif isinstance(value, str):
|
|
965
|
+
if _pattern_matches(value, compiled_mask):
|
|
966
|
+
return CODE_VARIABLES_REDACTED_VALUE
|
|
967
|
+
return value
|
|
968
|
+
else:
|
|
969
|
+
return value
|
|
970
|
+
|
|
971
|
+
|
|
972
|
+
def _serialize_variable_value(value, limiter, max_length=1024, compiled_mask=None):
|
|
973
|
+
try:
|
|
974
|
+
if value is None:
|
|
975
|
+
result = "None"
|
|
976
|
+
elif isinstance(value, bool):
|
|
977
|
+
result = str(value)
|
|
978
|
+
elif isinstance(value, (int, float)):
|
|
979
|
+
result_size = len(str(value))
|
|
980
|
+
if not limiter.can_add(result_size):
|
|
981
|
+
return None
|
|
982
|
+
limiter.add(result_size)
|
|
983
|
+
return value
|
|
984
|
+
elif isinstance(value, str):
|
|
985
|
+
if compiled_mask and _pattern_matches(value, compiled_mask):
|
|
986
|
+
result = CODE_VARIABLES_REDACTED_VALUE
|
|
987
|
+
else:
|
|
988
|
+
result = value
|
|
989
|
+
else:
|
|
990
|
+
masked_value = _mask_sensitive_data(value, compiled_mask)
|
|
991
|
+
result = json.dumps(masked_value)
|
|
992
|
+
|
|
993
|
+
if len(result) > max_length:
|
|
994
|
+
result = result[: max_length - 3] + "..."
|
|
995
|
+
|
|
996
|
+
result_size = len(result)
|
|
997
|
+
if not limiter.can_add(result_size):
|
|
998
|
+
return None
|
|
999
|
+
limiter.add(result_size)
|
|
1000
|
+
|
|
1001
|
+
return result
|
|
1002
|
+
except Exception:
|
|
1003
|
+
try:
|
|
1004
|
+
result = repr(value)
|
|
1005
|
+
if len(result) > max_length:
|
|
1006
|
+
result = result[: max_length - 3] + "..."
|
|
1007
|
+
|
|
1008
|
+
result_size = len(result)
|
|
1009
|
+
if not limiter.can_add(result_size):
|
|
1010
|
+
return None
|
|
1011
|
+
limiter.add(result_size)
|
|
1012
|
+
return result
|
|
1013
|
+
except Exception:
|
|
1014
|
+
try:
|
|
1015
|
+
fallback = f"<{type(value).__name__}>"
|
|
1016
|
+
fallback_size = len(fallback)
|
|
1017
|
+
if not limiter.can_add(fallback_size):
|
|
1018
|
+
return None
|
|
1019
|
+
limiter.add(fallback_size)
|
|
1020
|
+
return fallback
|
|
1021
|
+
except Exception:
|
|
1022
|
+
fallback = "<unserializable object>"
|
|
1023
|
+
fallback_size = len(fallback)
|
|
1024
|
+
if not limiter.can_add(fallback_size):
|
|
1025
|
+
return None
|
|
1026
|
+
limiter.add(fallback_size)
|
|
1027
|
+
return fallback
|
|
1028
|
+
|
|
1029
|
+
|
|
1030
|
+
def _is_simple_type(value):
|
|
1031
|
+
return isinstance(value, (type(None), bool, int, float, str))
|
|
1032
|
+
|
|
1033
|
+
|
|
1034
|
+
def serialize_code_variables(
|
|
1035
|
+
frame, limiter, mask_patterns=None, ignore_patterns=None, max_length=1024
|
|
1036
|
+
):
|
|
1037
|
+
if mask_patterns is None:
|
|
1038
|
+
mask_patterns = []
|
|
1039
|
+
if ignore_patterns is None:
|
|
1040
|
+
ignore_patterns = []
|
|
1041
|
+
|
|
1042
|
+
compiled_mask = _compile_patterns(mask_patterns)
|
|
1043
|
+
compiled_ignore = _compile_patterns(ignore_patterns)
|
|
1044
|
+
|
|
1045
|
+
try:
|
|
1046
|
+
local_vars = frame.f_locals.copy()
|
|
1047
|
+
except Exception:
|
|
1048
|
+
return {}
|
|
1049
|
+
|
|
1050
|
+
simple_vars = {}
|
|
1051
|
+
complex_vars = {}
|
|
1052
|
+
|
|
1053
|
+
for name, value in local_vars.items():
|
|
1054
|
+
if _pattern_matches(name, compiled_ignore):
|
|
1055
|
+
continue
|
|
1056
|
+
|
|
1057
|
+
if _is_simple_type(value):
|
|
1058
|
+
simple_vars[name] = value
|
|
1059
|
+
else:
|
|
1060
|
+
complex_vars[name] = value
|
|
1061
|
+
|
|
1062
|
+
result = {}
|
|
1063
|
+
|
|
1064
|
+
all_vars = {**simple_vars, **complex_vars}
|
|
1065
|
+
ordered_names = list(sorted(simple_vars.keys())) + list(sorted(complex_vars.keys()))
|
|
1066
|
+
|
|
1067
|
+
for name in ordered_names:
|
|
1068
|
+
value = all_vars[name]
|
|
1069
|
+
|
|
1070
|
+
if _pattern_matches(name, compiled_mask):
|
|
1071
|
+
redacted_value = CODE_VARIABLES_REDACTED_VALUE
|
|
1072
|
+
redacted_size = len(redacted_value)
|
|
1073
|
+
if not limiter.can_add(redacted_size):
|
|
1074
|
+
break
|
|
1075
|
+
limiter.add(redacted_size)
|
|
1076
|
+
result[name] = redacted_value
|
|
1077
|
+
else:
|
|
1078
|
+
serialized = _serialize_variable_value(
|
|
1079
|
+
value, limiter, max_length, compiled_mask
|
|
1080
|
+
)
|
|
1081
|
+
if serialized is None:
|
|
1082
|
+
break
|
|
1083
|
+
result[name] = serialized
|
|
1084
|
+
|
|
1085
|
+
return result
|
|
1086
|
+
|
|
1087
|
+
|
|
1088
|
+
def try_attach_code_variables_to_frames(
|
|
1089
|
+
all_exceptions, exc_info, mask_patterns, ignore_patterns
|
|
1090
|
+
):
|
|
1091
|
+
try:
|
|
1092
|
+
attach_code_variables_to_frames(
|
|
1093
|
+
all_exceptions, exc_info, mask_patterns, ignore_patterns
|
|
1094
|
+
)
|
|
1095
|
+
except Exception:
|
|
1096
|
+
pass
|
|
1097
|
+
|
|
1098
|
+
|
|
1099
|
+
def attach_code_variables_to_frames(
|
|
1100
|
+
all_exceptions, exc_info, mask_patterns, ignore_patterns
|
|
1101
|
+
):
|
|
1102
|
+
exc_type, exc_value, traceback = exc_info
|
|
1103
|
+
|
|
1104
|
+
if traceback is None:
|
|
1105
|
+
return
|
|
1106
|
+
|
|
1107
|
+
tb_frames = list(iter_stacks(traceback))
|
|
1108
|
+
|
|
1109
|
+
if not tb_frames:
|
|
1110
|
+
return
|
|
1111
|
+
|
|
1112
|
+
limiter = VariableSizeLimiter()
|
|
1113
|
+
|
|
1114
|
+
for exception in all_exceptions:
|
|
1115
|
+
stacktrace = exception.get("stacktrace")
|
|
1116
|
+
if not stacktrace or "frames" not in stacktrace:
|
|
1117
|
+
continue
|
|
1118
|
+
|
|
1119
|
+
serialized_frames = stacktrace["frames"]
|
|
1120
|
+
|
|
1121
|
+
for serialized_frame, tb_item in zip(serialized_frames, tb_frames):
|
|
1122
|
+
if not serialized_frame.get("in_app"):
|
|
1123
|
+
continue
|
|
1124
|
+
|
|
1125
|
+
variables = serialize_code_variables(
|
|
1126
|
+
tb_item.tb_frame,
|
|
1127
|
+
limiter,
|
|
1128
|
+
mask_patterns=mask_patterns,
|
|
1129
|
+
ignore_patterns=ignore_patterns,
|
|
1130
|
+
max_length=1024,
|
|
1131
|
+
)
|
|
1132
|
+
|
|
1133
|
+
if variables:
|
|
1134
|
+
serialized_frame["code_variables"] = variables
|
|
@@ -22,6 +22,18 @@ class InconclusiveMatchError(Exception):
|
|
|
22
22
|
pass
|
|
23
23
|
|
|
24
24
|
|
|
25
|
+
class RequiresServerEvaluation(Exception):
|
|
26
|
+
"""
|
|
27
|
+
Raised when feature flag evaluation requires server-side data that is not
|
|
28
|
+
available locally (e.g., static cohorts, experience continuity).
|
|
29
|
+
|
|
30
|
+
This error should propagate immediately to trigger API fallback, unlike
|
|
31
|
+
InconclusiveMatchError which allows trying other conditions.
|
|
32
|
+
"""
|
|
33
|
+
|
|
34
|
+
pass
|
|
35
|
+
|
|
36
|
+
|
|
25
37
|
# This function takes a distinct_id and a feature flag key and returns a float between 0 and 1.
|
|
26
38
|
# Given the same distinct_id and key, it'll always return the same float. These floats are
|
|
27
39
|
# uniformly distributed between 0 and 1, so if we want to show this feature to 20% of traffic
|
|
@@ -220,14 +232,7 @@ def match_feature_flag_properties(
|
|
|
220
232
|
) or []
|
|
221
233
|
valid_variant_keys = [variant["key"] for variant in flag_variants]
|
|
222
234
|
|
|
223
|
-
|
|
224
|
-
# evaluated first, and the variant override is applied to the first matching condition.
|
|
225
|
-
sorted_flag_conditions = sorted(
|
|
226
|
-
flag_conditions,
|
|
227
|
-
key=lambda condition: 0 if condition.get("variant") else 1,
|
|
228
|
-
)
|
|
229
|
-
|
|
230
|
-
for condition in sorted_flag_conditions:
|
|
235
|
+
for condition in flag_conditions:
|
|
231
236
|
try:
|
|
232
237
|
# if any one condition resolves to True, we can shortcircuit and return
|
|
233
238
|
# the matching variant
|
|
@@ -246,7 +251,12 @@ def match_feature_flag_properties(
|
|
|
246
251
|
else:
|
|
247
252
|
variant = get_matching_variant(flag, distinct_id)
|
|
248
253
|
return variant or True
|
|
254
|
+
except RequiresServerEvaluation:
|
|
255
|
+
# Static cohort or other missing server-side data - must fallback to API
|
|
256
|
+
raise
|
|
249
257
|
except InconclusiveMatchError:
|
|
258
|
+
# Evaluation error (bad regex, invalid date, missing property, etc.)
|
|
259
|
+
# Track that we had an inconclusive match, but try other conditions
|
|
250
260
|
is_inconclusive = True
|
|
251
261
|
|
|
252
262
|
if is_inconclusive:
|
|
@@ -456,8 +466,8 @@ def match_cohort(
|
|
|
456
466
|
# }
|
|
457
467
|
cohort_id = str(property.get("value"))
|
|
458
468
|
if cohort_id not in cohort_properties:
|
|
459
|
-
raise
|
|
460
|
-
"
|
|
469
|
+
raise RequiresServerEvaluation(
|
|
470
|
+
f"cohort {cohort_id} not found in local cohorts - likely a static cohort that requires server evaluation"
|
|
461
471
|
)
|
|
462
472
|
|
|
463
473
|
property_group = cohort_properties[cohort_id]
|
|
@@ -510,6 +520,9 @@ def match_property_group(
|
|
|
510
520
|
# OR group
|
|
511
521
|
if matches:
|
|
512
522
|
return True
|
|
523
|
+
except RequiresServerEvaluation:
|
|
524
|
+
# Immediately propagate - this condition requires server-side data
|
|
525
|
+
raise
|
|
513
526
|
except InconclusiveMatchError as e:
|
|
514
527
|
log.debug(f"Failed to compute property {prop} locally: {e}")
|
|
515
528
|
error_matching_locally = True
|
|
@@ -559,6 +572,9 @@ def match_property_group(
|
|
|
559
572
|
return True
|
|
560
573
|
if not matches and negation:
|
|
561
574
|
return True
|
|
575
|
+
except RequiresServerEvaluation:
|
|
576
|
+
# Immediately propagate - this condition requires server-side data
|
|
577
|
+
raise
|
|
562
578
|
except InconclusiveMatchError as e:
|
|
563
579
|
log.debug(f"Failed to compute property {prop} locally: {e}")
|
|
564
580
|
error_matching_locally = True
|
|
@@ -0,0 +1,127 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Flag Definition Cache Provider interface for multi-worker environments.
|
|
3
|
+
|
|
4
|
+
EXPERIMENTAL: This API may change in future minor version bumps.
|
|
5
|
+
|
|
6
|
+
This module provides an interface for external caching of feature flag definitions,
|
|
7
|
+
enabling multi-worker environments (Kubernetes, load-balanced servers, serverless
|
|
8
|
+
functions) to share flag definitions and reduce API calls.
|
|
9
|
+
|
|
10
|
+
Usage:
|
|
11
|
+
|
|
12
|
+
from posthoganalytics import Posthog
|
|
13
|
+
from posthoganalytics.flag_definition_cache import FlagDefinitionCacheProvider
|
|
14
|
+
|
|
15
|
+
cache = RedisFlagDefinitionCache(redis_client, "my-team")
|
|
16
|
+
posthog = Posthog(
|
|
17
|
+
"<project_api_key>",
|
|
18
|
+
personal_api_key="<personal_api_key>",
|
|
19
|
+
flag_definition_cache_provider=cache,
|
|
20
|
+
)
|
|
21
|
+
"""
|
|
22
|
+
|
|
23
|
+
from typing import Any, Dict, List, Optional, Protocol, runtime_checkable
|
|
24
|
+
|
|
25
|
+
from typing_extensions import Required, TypedDict
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class FlagDefinitionCacheData(TypedDict):
|
|
29
|
+
"""
|
|
30
|
+
Data structure for cached flag definitions.
|
|
31
|
+
|
|
32
|
+
Attributes:
|
|
33
|
+
flags: List of feature flag definition dictionaries from the API.
|
|
34
|
+
group_type_mapping: Mapping of group type indices to group names.
|
|
35
|
+
cohorts: Dictionary of cohort definitions for local evaluation.
|
|
36
|
+
"""
|
|
37
|
+
|
|
38
|
+
flags: Required[List[Dict[str, Any]]]
|
|
39
|
+
group_type_mapping: Required[Dict[str, str]]
|
|
40
|
+
cohorts: Required[Dict[str, Any]]
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
@runtime_checkable
|
|
44
|
+
class FlagDefinitionCacheProvider(Protocol):
|
|
45
|
+
"""
|
|
46
|
+
Interface for external caching of feature flag definitions.
|
|
47
|
+
|
|
48
|
+
Enables multi-worker environments to share flag definitions, reducing API
|
|
49
|
+
calls while ensuring all workers have consistent data.
|
|
50
|
+
|
|
51
|
+
EXPERIMENTAL: This API may change in future minor version bumps.
|
|
52
|
+
|
|
53
|
+
The four methods handle the complete lifecycle of flag definition caching:
|
|
54
|
+
|
|
55
|
+
1. `should_fetch_flag_definitions()` - Called before each poll to determine
|
|
56
|
+
if this worker should fetch new definitions. Use for distributed lock
|
|
57
|
+
coordination to ensure only one worker fetches at a time.
|
|
58
|
+
|
|
59
|
+
2. `get_flag_definitions()` - Called when `should_fetch_flag_definitions()`
|
|
60
|
+
returns False. Returns cached definitions if available.
|
|
61
|
+
|
|
62
|
+
3. `on_flag_definitions_received()` - Called after successfully fetching
|
|
63
|
+
new definitions from the API. Store the data in your external cache
|
|
64
|
+
and release any locks.
|
|
65
|
+
|
|
66
|
+
4. `shutdown()` - Called when the PostHog client shuts down. Release any
|
|
67
|
+
distributed locks and clean up resources.
|
|
68
|
+
|
|
69
|
+
Error Handling:
|
|
70
|
+
All methods are wrapped in try/except. Errors will be logged but will
|
|
71
|
+
never break flag evaluation. On error:
|
|
72
|
+
- `should_fetch_flag_definitions()` errors default to fetching (fail-safe)
|
|
73
|
+
- `get_flag_definitions()` errors fall back to API fetch
|
|
74
|
+
- `on_flag_definitions_received()` errors are logged but flags remain in memory
|
|
75
|
+
- `shutdown()` errors are logged but shutdown continues
|
|
76
|
+
"""
|
|
77
|
+
|
|
78
|
+
def get_flag_definitions(self) -> Optional[FlagDefinitionCacheData]:
|
|
79
|
+
"""
|
|
80
|
+
Retrieve cached flag definitions.
|
|
81
|
+
|
|
82
|
+
Returns:
|
|
83
|
+
Cached flag definitions if available and valid, None otherwise.
|
|
84
|
+
Returning None will trigger a fetch from the API if this worker
|
|
85
|
+
has no flags loaded yet.
|
|
86
|
+
"""
|
|
87
|
+
...
|
|
88
|
+
|
|
89
|
+
def should_fetch_flag_definitions(self) -> bool:
|
|
90
|
+
"""
|
|
91
|
+
Determine whether this instance should fetch new flag definitions.
|
|
92
|
+
|
|
93
|
+
Use this for distributed lock coordination. Only one worker should
|
|
94
|
+
return True to avoid thundering herd problems. A typical implementation
|
|
95
|
+
uses a distributed lock (e.g., Redis SETNX) that expires after the
|
|
96
|
+
poll interval.
|
|
97
|
+
|
|
98
|
+
Returns:
|
|
99
|
+
True if this instance should fetch from the API, False otherwise.
|
|
100
|
+
When False, the client will call `get_flag_definitions()` to
|
|
101
|
+
retrieve cached data instead.
|
|
102
|
+
"""
|
|
103
|
+
...
|
|
104
|
+
|
|
105
|
+
def on_flag_definitions_received(self, data: FlagDefinitionCacheData) -> None:
|
|
106
|
+
"""
|
|
107
|
+
Called after successfully receiving new flag definitions from PostHog.
|
|
108
|
+
|
|
109
|
+
Use this to store the data in your external cache and release any
|
|
110
|
+
distributed locks acquired in `should_fetch_flag_definitions()`.
|
|
111
|
+
|
|
112
|
+
Args:
|
|
113
|
+
data: The flag definitions to cache, containing flags,
|
|
114
|
+
group_type_mapping, and cohorts.
|
|
115
|
+
"""
|
|
116
|
+
...
|
|
117
|
+
|
|
118
|
+
def shutdown(self) -> None:
|
|
119
|
+
"""
|
|
120
|
+
Called when the PostHog client shuts down.
|
|
121
|
+
|
|
122
|
+
Use this to release any distributed locks and clean up resources.
|
|
123
|
+
This method is called even if `should_fetch_flag_definitions()`
|
|
124
|
+
returned False, so implementations should handle the case where
|
|
125
|
+
no lock was acquired.
|
|
126
|
+
"""
|
|
127
|
+
...
|