growthbook 1.4.9__tar.gz → 2.0.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {growthbook-1.4.9/growthbook.egg-info → growthbook-2.0.0}/PKG-INFO +1 -1
- {growthbook-1.4.9 → growthbook-2.0.0}/growthbook/__init__.py +1 -1
- {growthbook-1.4.9 → growthbook-2.0.0}/growthbook/growthbook.py +39 -10
- {growthbook-1.4.9 → growthbook-2.0.0}/growthbook/growthbook_client.py +74 -21
- {growthbook-1.4.9 → growthbook-2.0.0}/growthbook/plugins/growthbook_tracking.py +2 -2
- {growthbook-1.4.9 → growthbook-2.0.0/growthbook.egg-info}/PKG-INFO +1 -1
- {growthbook-1.4.9 → growthbook-2.0.0}/setup.cfg +1 -1
- {growthbook-1.4.9 → growthbook-2.0.0}/tests/test_growthbook.py +17 -17
- {growthbook-1.4.9 → growthbook-2.0.0}/tests/test_growthbook_client.py +8 -2
- {growthbook-1.4.9 → growthbook-2.0.0}/LICENSE +0 -0
- {growthbook-1.4.9 → growthbook-2.0.0}/MANIFEST.in +0 -0
- {growthbook-1.4.9 → growthbook-2.0.0}/README.md +0 -0
- {growthbook-1.4.9 → growthbook-2.0.0}/growthbook/common_types.py +0 -0
- {growthbook-1.4.9 → growthbook-2.0.0}/growthbook/core.py +0 -0
- {growthbook-1.4.9 → growthbook-2.0.0}/growthbook/plugins/__init__.py +0 -0
- {growthbook-1.4.9 → growthbook-2.0.0}/growthbook/plugins/base.py +0 -0
- {growthbook-1.4.9 → growthbook-2.0.0}/growthbook/plugins/request_context.py +0 -0
- {growthbook-1.4.9 → growthbook-2.0.0}/growthbook/py.typed +0 -0
- {growthbook-1.4.9 → growthbook-2.0.0}/growthbook.egg-info/SOURCES.txt +0 -0
- {growthbook-1.4.9 → growthbook-2.0.0}/growthbook.egg-info/dependency_links.txt +0 -0
- {growthbook-1.4.9 → growthbook-2.0.0}/growthbook.egg-info/requires.txt +0 -0
- {growthbook-1.4.9 → growthbook-2.0.0}/growthbook.egg-info/top_level.txt +0 -0
- {growthbook-1.4.9 → growthbook-2.0.0}/pyproject.toml +0 -0
- {growthbook-1.4.9 → growthbook-2.0.0}/setup.py +0 -0
- {growthbook-1.4.9 → growthbook-2.0.0}/tests/conftest.py +0 -0
- {growthbook-1.4.9 → growthbook-2.0.0}/tests/test_etag.py +0 -0
- {growthbook-1.4.9 → growthbook-2.0.0}/tests/test_plugins.py +0 -0
|
@@ -409,16 +409,35 @@ class FeatureRepository(object):
|
|
|
409
409
|
self._notify_feature_update_callbacks(res)
|
|
410
410
|
return res
|
|
411
411
|
return cached
|
|
412
|
+
|
|
413
|
+
@property
|
|
414
|
+
def user_agent_suffix(self) -> Optional[str]:
|
|
415
|
+
return getattr(self, "_user_agent_suffix", None)
|
|
416
|
+
|
|
417
|
+
@user_agent_suffix.setter
|
|
418
|
+
def user_agent_suffix(self, value: Optional[str]) -> None:
|
|
419
|
+
self._user_agent_suffix = value
|
|
412
420
|
|
|
413
421
|
# Perform the GET request (separate method for easy mocking)
|
|
414
422
|
def _get(self, url: str, headers: Optional[Dict[str, str]] = None):
|
|
415
423
|
self.http = self.http or PoolManager()
|
|
416
424
|
return self.http.request("GET", url, headers=headers or {})
|
|
425
|
+
|
|
426
|
+
def _get_headers(self, client_key: str, existing_headers: Dict[str, str] = None) -> Dict[str, str]:
|
|
427
|
+
headers = existing_headers or {}
|
|
428
|
+
headers['Accept-Encoding'] = "gzip, deflate"
|
|
429
|
+
|
|
430
|
+
# Add User-Agent with optional suffix
|
|
431
|
+
ua = "Gb-Python"
|
|
432
|
+
ua += f"-{self.user_agent_suffix}" if self.user_agent_suffix else f"-{client_key[-4:]}"
|
|
433
|
+
headers['User-Agent'] = ua
|
|
434
|
+
|
|
435
|
+
return headers
|
|
417
436
|
|
|
418
437
|
def _fetch_and_decode(self, api_host: str, client_key: str) -> Optional[Dict]:
|
|
419
438
|
url = self._get_features_url(api_host, client_key)
|
|
420
|
-
headers
|
|
421
|
-
|
|
439
|
+
headers = self._get_headers(client_key)
|
|
440
|
+
logger.debug(f"Fetching features from {url} with headers {headers}")
|
|
422
441
|
|
|
423
442
|
# Check if we have a cached ETag for this URL
|
|
424
443
|
cached_etag = None
|
|
@@ -473,13 +492,13 @@ class FeatureRepository(object):
|
|
|
473
492
|
|
|
474
493
|
return decoded # type: ignore[no-any-return]
|
|
475
494
|
except Exception as e:
|
|
476
|
-
logger.
|
|
495
|
+
logger.error(f"Failed to decode feature JSON from GrowthBook API: {e}")
|
|
477
496
|
return None
|
|
478
497
|
|
|
479
498
|
async def _fetch_and_decode_async(self, api_host: str, client_key: str) -> Optional[Dict]:
|
|
480
499
|
url = self._get_features_url(api_host, client_key)
|
|
481
|
-
headers
|
|
482
|
-
|
|
500
|
+
headers = self._get_headers(client_key=client_key)
|
|
501
|
+
logger.debug(f"[Async] Fetching features from {url} with headers {headers}")
|
|
483
502
|
|
|
484
503
|
# Check if we have a cached ETag for this URL
|
|
485
504
|
cached_etag = None
|
|
@@ -535,7 +554,7 @@ class FeatureRepository(object):
|
|
|
535
554
|
logger.warning(f"HTTP request failed: {e}")
|
|
536
555
|
return None
|
|
537
556
|
except Exception as e:
|
|
538
|
-
logger.
|
|
557
|
+
logger.error(f"Failed to decode feature JSON from GrowthBook API: {e}")
|
|
539
558
|
return None
|
|
540
559
|
|
|
541
560
|
def decrypt_response(self, data, decryption_key: str):
|
|
@@ -763,7 +782,7 @@ class GrowthBook(object):
|
|
|
763
782
|
)
|
|
764
783
|
|
|
765
784
|
if features:
|
|
766
|
-
self.
|
|
785
|
+
self.set_features(features)
|
|
767
786
|
|
|
768
787
|
# Register for automatic feature updates when cache expires
|
|
769
788
|
if self._client_key:
|
|
@@ -794,7 +813,7 @@ class GrowthBook(object):
|
|
|
794
813
|
self._api_host, self._client_key, self._decryption_key, self._cache_ttl
|
|
795
814
|
)
|
|
796
815
|
if response is not None and "features" in response.keys():
|
|
797
|
-
self.
|
|
816
|
+
self.set_features(response["features"])
|
|
798
817
|
|
|
799
818
|
if response is not None and "savedGroups" in response:
|
|
800
819
|
self._saved_groups = response["savedGroups"]
|
|
@@ -809,7 +828,7 @@ class GrowthBook(object):
|
|
|
809
828
|
|
|
810
829
|
if features is not None:
|
|
811
830
|
if "features" in features:
|
|
812
|
-
self.
|
|
831
|
+
self.set_features(features["features"])
|
|
813
832
|
if "savedGroups" in features:
|
|
814
833
|
self._saved_groups = features["savedGroups"]
|
|
815
834
|
feature_repo.save_in_cache(self._client_key, features, self._cache_ttl)
|
|
@@ -823,7 +842,7 @@ class GrowthBook(object):
|
|
|
823
842
|
|
|
824
843
|
if data is not None:
|
|
825
844
|
if "features" in data:
|
|
826
|
-
self.
|
|
845
|
+
self.set_features(data["features"])
|
|
827
846
|
if "savedGroups" in data:
|
|
828
847
|
self._saved_groups = data["savedGroups"]
|
|
829
848
|
feature_repo.save_in_cache(self._client_key, features, self._cache_ttl)
|
|
@@ -1134,6 +1153,16 @@ class GrowthBook(object):
|
|
|
1134
1153
|
except Exception as e:
|
|
1135
1154
|
logger.error(f"Failed to initialize plugin {plugin}: {e}")
|
|
1136
1155
|
|
|
1156
|
+
@property
|
|
1157
|
+
def user_agent_suffix(self) -> Optional[str]:
|
|
1158
|
+
"""Get the suffix appended to the User-Agent header"""
|
|
1159
|
+
return feature_repo.user_agent_suffix
|
|
1160
|
+
|
|
1161
|
+
@user_agent_suffix.setter
|
|
1162
|
+
def user_agent_suffix(self, value: Optional[str]) -> None:
|
|
1163
|
+
"""Set a suffix to be appended to the User-Agent header"""
|
|
1164
|
+
feature_repo.user_agent_suffix = value
|
|
1165
|
+
|
|
1137
1166
|
def _cleanup_plugins(self) -> None:
|
|
1138
1167
|
"""Cleanup all initialized plugins."""
|
|
1139
1168
|
for plugin in self._initialized_plugins:
|
|
@@ -9,7 +9,7 @@ import asyncio
|
|
|
9
9
|
import threading
|
|
10
10
|
import traceback
|
|
11
11
|
from datetime import datetime
|
|
12
|
-
from growthbook import FeatureRepository
|
|
12
|
+
from growthbook import FeatureRepository, feature_repo
|
|
13
13
|
from contextlib import asynccontextmanager
|
|
14
14
|
|
|
15
15
|
from .core import eval_feature as core_eval_feature, run_experiment
|
|
@@ -169,40 +169,78 @@ class EnhancedFeatureRepository(FeatureRepository, metaclass=SingletonMeta):
|
|
|
169
169
|
if callback in self._callbacks:
|
|
170
170
|
self._callbacks.remove(callback)
|
|
171
171
|
|
|
172
|
+
"""
|
|
173
|
+
_start_sse_refresh flow mimics a bridge pattern to connect a blocking, synchronous background thread
|
|
174
|
+
(the SSEClient) with your non-blocking, async main loop.
|
|
175
|
+
|
|
176
|
+
Bridge - _maintain_sse_connection - runs on the main async loop, calls `startAutoRefresh` (which in turn spawns a thread)
|
|
177
|
+
and waits indefinitely. (Awaiting a Future suspends the coroutine, costing zero CPU)
|
|
178
|
+
|
|
179
|
+
The SSEClient runs in a separate thread, makes a blocking HTTP request, and invokes `on_event` synchronously.
|
|
180
|
+
|
|
181
|
+
The Hand off - when the event arrives (we're still on the background thread), sse_handler uses `asyncio.run_coroutine_threadsafe`
|
|
182
|
+
to schedule the async processing `_handle_sse_event` onto the main event loop.
|
|
183
|
+
"""
|
|
172
184
|
async def _start_sse_refresh(self) -> None:
|
|
173
185
|
"""Start SSE-based feature refresh"""
|
|
174
186
|
with self._refresh_lock:
|
|
175
187
|
if self._refresh_task is not None: # Already running
|
|
176
188
|
return
|
|
177
189
|
|
|
178
|
-
|
|
190
|
+
# SSEClient invokes `on_event` synchronously from a background thread.
|
|
191
|
+
async def _handle_sse_event(event_data: Dict[str, Any]) -> None:
|
|
179
192
|
try:
|
|
180
|
-
|
|
193
|
+
event_type = event_data.get("type")
|
|
194
|
+
if event_type == "features-updated":
|
|
181
195
|
response = await self.load_features_async(
|
|
182
196
|
self._api_host, self._client_key, self._decryption_key, self._cache_ttl
|
|
183
197
|
)
|
|
184
198
|
if response is not None:
|
|
185
199
|
await self._handle_feature_update(response)
|
|
186
|
-
elif
|
|
187
|
-
await self._handle_feature_update(event_data
|
|
200
|
+
elif event_type == "features":
|
|
201
|
+
await self._handle_feature_update(event_data.get("data", {}))
|
|
188
202
|
except Exception:
|
|
189
|
-
|
|
203
|
+
logger.exception("Error handling SSE event")
|
|
190
204
|
|
|
191
|
-
|
|
192
|
-
self._refresh_task = asyncio.create_task(
|
|
193
|
-
self._maintain_sse_connection(sse_handler)
|
|
194
|
-
)
|
|
205
|
+
main_loop = asyncio.get_running_loop()
|
|
195
206
|
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
207
|
+
# We must not pass an `async def` callback here (it would never be awaited).
|
|
208
|
+
def sse_handler(event_data: Dict[str, Any]) -> None:
|
|
209
|
+
# Schedule async processing onto the main event loop.
|
|
210
|
+
try:
|
|
211
|
+
asyncio.run_coroutine_threadsafe(_handle_sse_event(event_data), main_loop)
|
|
212
|
+
except Exception:
|
|
213
|
+
logger.exception("Failed to schedule SSE event handler")
|
|
214
|
+
|
|
215
|
+
async def _maintain_sse_connection() -> None:
|
|
216
|
+
"""
|
|
217
|
+
Start SSE streaming and keep the task alive until cancelled.
|
|
218
|
+
"""
|
|
219
|
+
try:
|
|
220
|
+
# NOTE: `startAutoRefresh` is synchronous and starts a background thread.
|
|
221
|
+
self.startAutoRefresh(self._api_host, self._client_key, sse_handler)
|
|
222
|
+
|
|
223
|
+
# Wait indefinitely until the task is cancelled - basically saying "Keep this service 'active' until someone cancels me."
|
|
224
|
+
# reconnection logic is handled inside SSEClient's thread
|
|
225
|
+
await asyncio.Future()
|
|
226
|
+
except asyncio.CancelledError:
|
|
227
|
+
# Normal shutdown flow
|
|
228
|
+
raise
|
|
229
|
+
except Exception:
|
|
230
|
+
logger.exception("Unexpected error in SSE lifecycle task")
|
|
231
|
+
finally:
|
|
232
|
+
try:
|
|
233
|
+
# stopAutoRefresh blocks joining a thread, so it needs to be run in executor
|
|
234
|
+
# to avoid blocking the async event loop
|
|
235
|
+
await main_loop.run_in_executor(
|
|
236
|
+
None,
|
|
237
|
+
lambda: self.stopAutoRefresh(timeout=10)
|
|
238
|
+
)
|
|
239
|
+
except Exception:
|
|
240
|
+
logger.exception("Failed to stop SSE auto-refresh")
|
|
241
|
+
|
|
242
|
+
# Start a task that owns the SSE lifecycle and cleanup.
|
|
243
|
+
self._refresh_task = asyncio.create_task(_maintain_sse_connection())
|
|
206
244
|
|
|
207
245
|
async def _start_http_refresh(self, interval: int = 60) -> None:
|
|
208
246
|
"""Enhanced HTTP polling with backoff"""
|
|
@@ -261,6 +299,12 @@ class EnhancedFeatureRepository(FeatureRepository, metaclass=SingletonMeta):
|
|
|
261
299
|
async def stop_refresh(self) -> None:
|
|
262
300
|
"""Clean shutdown of refresh tasks"""
|
|
263
301
|
self._stop_event.set()
|
|
302
|
+
# Ensure any SSE background thread is stopped as well.
|
|
303
|
+
try:
|
|
304
|
+
self.stopAutoRefresh(timeout=10)
|
|
305
|
+
except Exception:
|
|
306
|
+
# Best-effort cleanup; task cancellation below will proceed.
|
|
307
|
+
logger.exception("Error stopping SSE auto-refresh")
|
|
264
308
|
if self._refresh_task:
|
|
265
309
|
# Cancel the task
|
|
266
310
|
self._refresh_task.cancel()
|
|
@@ -569,7 +613,6 @@ class GrowthBookClient:
|
|
|
569
613
|
self._tracked.clear()
|
|
570
614
|
with self._subscriptions_lock:
|
|
571
615
|
self._subscriptions.clear()
|
|
572
|
-
|
|
573
616
|
# Clear context
|
|
574
617
|
async with self._context_lock:
|
|
575
618
|
self._global_context = None
|
|
@@ -577,6 +620,16 @@ class GrowthBookClient:
|
|
|
577
620
|
# Cleanup plugins
|
|
578
621
|
self._cleanup_plugins()
|
|
579
622
|
|
|
623
|
+
@property
|
|
624
|
+
def user_agent_suffix(self) -> Optional[str]:
|
|
625
|
+
"""Get the suffix appended to the User-Agent header"""
|
|
626
|
+
return feature_repo.user_agent_suffix
|
|
627
|
+
|
|
628
|
+
@user_agent_suffix.setter
|
|
629
|
+
def user_agent_suffix(self, value: Optional[str]) -> None:
|
|
630
|
+
"""Set a suffix to be appended to the User-Agent header"""
|
|
631
|
+
feature_repo.user_agent_suffix = value
|
|
632
|
+
|
|
580
633
|
def _initialize_plugins(self) -> None:
|
|
581
634
|
"""Initialize all tracking plugins with this GrowthBookClient instance."""
|
|
582
635
|
for plugin in self._tracking_plugins:
|
|
@@ -132,8 +132,8 @@ class GrowthBookTrackingPlugin(GrowthBookPlugin):
|
|
|
132
132
|
"""Setup feature evaluation tracking."""
|
|
133
133
|
original_eval_feature = gb_instance.eval_feature
|
|
134
134
|
|
|
135
|
-
def eval_feature_wrapper(key: str):
|
|
136
|
-
result = original_eval_feature(key)
|
|
135
|
+
def eval_feature_wrapper(key: str, *args, **kwargs):
|
|
136
|
+
result = original_eval_feature(key, *args, **kwargs)
|
|
137
137
|
self._track_feature_evaluated(key, result, gb_instance)
|
|
138
138
|
return result
|
|
139
139
|
|
|
@@ -117,7 +117,7 @@ def test_decrypt(decrypt_data):
|
|
|
117
117
|
def test_feature(feature_data):
|
|
118
118
|
_, ctx, key, expected = feature_data
|
|
119
119
|
gb = GrowthBook(**ctx)
|
|
120
|
-
res = gb.
|
|
120
|
+
res = gb.eval_feature(key)
|
|
121
121
|
|
|
122
122
|
if "experiment" in expected:
|
|
123
123
|
expected["experiment"] = Experiment(**expected["experiment"]).to_dict()
|
|
@@ -726,7 +726,7 @@ def test_stores_assigned_variations_in_the_user():
|
|
|
726
726
|
gb.run(Experiment(key="my-test", variations=[0, 1]))
|
|
727
727
|
gb.run(Experiment(key="my-test-3", variations=[0, 1]))
|
|
728
728
|
|
|
729
|
-
assigned = gb.
|
|
729
|
+
assigned = gb.get_all_results()
|
|
730
730
|
assignedArr = []
|
|
731
731
|
|
|
732
732
|
for e in assigned:
|
|
@@ -748,17 +748,17 @@ def test_getters_setters():
|
|
|
748
748
|
featuresInput = {"feature-1": feat.to_dict()}
|
|
749
749
|
attributes = {"id": "123", "url": "/"}
|
|
750
750
|
|
|
751
|
-
gb.
|
|
752
|
-
gb.
|
|
751
|
+
gb.set_features(featuresInput)
|
|
752
|
+
gb.set_attributes(attributes)
|
|
753
753
|
|
|
754
|
-
featuresOutput = {k: v.to_dict() for (k, v) in gb.
|
|
754
|
+
featuresOutput = {k: v.to_dict() for (k, v) in gb.get_features().items()}
|
|
755
755
|
|
|
756
756
|
assert featuresOutput == featuresInput
|
|
757
|
-
assert attributes == gb.
|
|
757
|
+
assert attributes == gb.get_attributes()
|
|
758
758
|
|
|
759
759
|
newAttrs = {"url": "/hello"}
|
|
760
|
-
gb.
|
|
761
|
-
assert newAttrs == gb.
|
|
760
|
+
gb.set_attributes(newAttrs)
|
|
761
|
+
assert newAttrs == gb.get_attributes()
|
|
762
762
|
|
|
763
763
|
gb.destroy()
|
|
764
764
|
|
|
@@ -780,17 +780,17 @@ def test_feature_methods():
|
|
|
780
780
|
}
|
|
781
781
|
)
|
|
782
782
|
|
|
783
|
-
assert gb.
|
|
784
|
-
assert gb.
|
|
785
|
-
assert gb.
|
|
783
|
+
assert gb.is_on("featureOn") is True
|
|
784
|
+
assert gb.is_off("featureOn") is False
|
|
785
|
+
assert gb.get_feature_value("featureOn", 15) == 12
|
|
786
786
|
|
|
787
|
-
assert gb.
|
|
788
|
-
assert gb.
|
|
789
|
-
assert gb.
|
|
787
|
+
assert gb.is_on("featureOff") is False
|
|
788
|
+
assert gb.is_off("featureOff") is True
|
|
789
|
+
assert gb.get_feature_value("featureOff", 10) == 0
|
|
790
790
|
|
|
791
|
-
assert gb.
|
|
792
|
-
assert gb.
|
|
793
|
-
assert gb.
|
|
791
|
+
assert gb.is_on("featureNone") is False
|
|
792
|
+
assert gb.is_off("featureNone") is True
|
|
793
|
+
assert gb.get_feature_value("featureNone", 10) == 10
|
|
794
794
|
|
|
795
795
|
gb.destroy()
|
|
796
796
|
|
|
@@ -89,10 +89,16 @@ async def test_sse_connection_lifecycle(mock_options, mock_features_response):
|
|
|
89
89
|
"refresh_strategy": FeatureRefreshStrategy.SERVER_SENT_EVENTS})
|
|
90
90
|
)
|
|
91
91
|
|
|
92
|
-
|
|
92
|
+
# `startAutoRefresh` is synchronous and should be invoked as part of SSE start-up.
|
|
93
|
+
# `stopAutoRefresh` should be called during shutdown to stop/join the SSE thread.
|
|
94
|
+
with patch('growthbook.growthbook_client.EnhancedFeatureRepository.startAutoRefresh') as mock_start, \
|
|
95
|
+
patch('growthbook.growthbook_client.EnhancedFeatureRepository.stopAutoRefresh') as mock_stop:
|
|
93
96
|
await client.initialize()
|
|
94
|
-
|
|
97
|
+
# Allow the SSE lifecycle task to start and invoke startAutoRefresh
|
|
98
|
+
await asyncio.sleep(0)
|
|
99
|
+
assert mock_start.called
|
|
95
100
|
await client.close()
|
|
101
|
+
assert mock_stop.called
|
|
96
102
|
|
|
97
103
|
@pytest.mark.asyncio
|
|
98
104
|
async def test_feature_repository_load():
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|