growthbook 2.1.2__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,103 @@
1
+ from abc import ABC, abstractmethod
2
+ from typing import Any, Dict, Optional
3
+ import logging
4
+
5
+ logger = logging.getLogger(__name__)
6
+
7
+
8
+ class GrowthBookPlugin(ABC):
9
+ """
10
+ Base class for all GrowthBook plugins.
11
+
12
+ Plugins extend GrowthBook functionality by adding auto-attributes,
13
+ tracking capabilities, or other enhancements.
14
+
15
+ Lifecycle:
16
+ 1. Plugin is instantiated with configuration options
17
+ 2. initialize(gb_instance) is called when GrowthBook is created
18
+ 3. Plugin enhances GrowthBook functionality
19
+ 4. cleanup() is called when GrowthBook.destroy() is called
20
+ """
21
+
22
+ def __init__(self, **options):
23
+ """Initialize plugin with configuration options."""
24
+ self.options = options
25
+ self._initialized = False
26
+ self._gb_instance = None
27
+ self.logger = logging.getLogger(f"{self.__class__.__module__}.{self.__class__.__name__}")
28
+
29
+ @abstractmethod
30
+ def initialize(self, gb_instance) -> None:
31
+ """
32
+ Initialize the plugin with a GrowthBook instance.
33
+
34
+ This method is called automatically when the GrowthBook instance
35
+ is created. Use this to set up the plugin functionality.
36
+
37
+ Args:
38
+ gb_instance: The GrowthBook instance to enhance
39
+ """
40
+ pass
41
+
42
+ def cleanup(self) -> None:
43
+ """
44
+ Cleanup plugin resources when GrowthBook instance is destroyed.
45
+
46
+ Override this method if your plugin needs to:
47
+ - Close network connections
48
+ - Cancel timers/threads
49
+ - Flush pending data
50
+ - Release resources
51
+
52
+ Default implementation does nothing.
53
+ """
54
+ self.logger.debug(f"Cleaning up plugin {self.__class__.__name__}")
55
+ self._gb_instance = None
56
+
57
+ def is_initialized(self) -> bool:
58
+ """Check if plugin has been initialized."""
59
+ return self._initialized
60
+
61
+ def _set_initialized(self, gb_instance) -> None:
62
+ """Mark plugin as initialized and store GrowthBook reference."""
63
+ self._initialized = True
64
+ self._gb_instance = gb_instance
65
+ self.logger.debug(f"Plugin {self.__class__.__name__} initialized successfully")
66
+
67
+ def _get_option(self, key: str, default: Any = None) -> Any:
68
+ """Get a configuration option with optional default."""
69
+ return self.options.get(key, default)
70
+
71
+ def _merge_attributes(self, new_attributes: Dict[str, Any]) -> None:
72
+ """
73
+ Helper method to merge new attributes with existing ones.
74
+
75
+ Args:
76
+ new_attributes: Dictionary of attributes to add/update
77
+ """
78
+ if not self._gb_instance:
79
+ self.logger.warning("Cannot merge attributes - plugin not initialized")
80
+ return
81
+
82
+ current_attributes = self._gb_instance.get_attributes()
83
+ merged_attributes = {**new_attributes, **current_attributes} # Existing attrs take precedence
84
+ self._gb_instance.set_attributes(merged_attributes)
85
+
86
+ self.logger.debug(f"Merged {len(new_attributes)} attributes: {list(new_attributes.keys())}")
87
+
88
+ def _safe_execute(self, func, *args, **kwargs):
89
+ """
90
+ Safely execute a function, logging any exceptions.
91
+
92
+ Args:
93
+ func: Function to execute
94
+ *args, **kwargs: Arguments to pass to function
95
+
96
+ Returns:
97
+ Function result or None if exception occurred
98
+ """
99
+ try:
100
+ return func(*args, **kwargs)
101
+ except Exception as e:
102
+ self.logger.error(f"Error in {func.__name__}: {e}")
103
+ return None
@@ -0,0 +1,285 @@
1
+ import json
2
+ import logging
3
+ import threading
4
+ import time
5
+ from typing import Dict, Any, Optional, List, Callable, TYPE_CHECKING
6
+ from .base import GrowthBookPlugin
7
+
8
+ if TYPE_CHECKING:
9
+ import requests
10
+ else:
11
+ try:
12
+ import requests # type: ignore
13
+ except ImportError:
14
+ requests = None
15
+
16
+ logger = logging.getLogger("growthbook.plugins.growthbook_tracking")
17
+
18
+
19
+ class GrowthBookTrackingPlugin(GrowthBookPlugin):
20
+ """
21
+ GrowthBook tracking plugin for Built-in Warehouse.
22
+
23
+ This plugin automatically tracks "Experiment Viewed" and "Feature Evaluated"
24
+ events to GrowthBook's built-in data warehouse.
25
+ """
26
+
27
+ def __init__(
28
+ self,
29
+ ingestor_host: str,
30
+ track_experiment_viewed: bool = True,
31
+ track_feature_evaluated: bool = True,
32
+ batch_size: int = 10,
33
+ batch_timeout: float = 10.0,
34
+ additional_callback: Optional[Callable] = None,
35
+ **options
36
+ ):
37
+ """
38
+ Initialize GrowthBook tracking plugin.
39
+
40
+ Args:
41
+ ingestor_host: The GrowthBook ingestor endpoint
42
+ track_experiment_viewed: Whether to track experiment viewed events
43
+ track_feature_evaluated: Whether to track feature evaluated events
44
+ batch_size: Number of events to batch before sending
45
+ batch_timeout: Maximum time (seconds) to wait before sending a batch
46
+ additional_callback: Optional additional tracking callback
47
+ """
48
+ super().__init__(**options)
49
+
50
+ if not requests:
51
+ raise ImportError("requests library is required for GrowthBookTrackingPlugin. Install with: pip install requests")
52
+
53
+ self.ingestor_host = ingestor_host.rstrip('/')
54
+ self.track_experiment_viewed = track_experiment_viewed
55
+ self.track_feature_evaluated = track_feature_evaluated
56
+ self.batch_size = batch_size
57
+ self.batch_timeout = batch_timeout
58
+ self.additional_callback = additional_callback
59
+
60
+ # batching
61
+ self._event_batch: List[Dict[str, Any]] = []
62
+ self._batch_lock = threading.Lock()
63
+ self._flush_timer: Optional[threading.Timer] = None
64
+ self._client_key: Optional[str] = None
65
+
66
+ def initialize(self, gb_instance) -> None:
67
+ """Initialize plugin with GrowthBook instance."""
68
+ try:
69
+ self._client_key = getattr(gb_instance, '_client_key', '')
70
+
71
+ # Hook into experiment tracking
72
+ if self.track_experiment_viewed:
73
+ self._setup_experiment_tracking(gb_instance)
74
+
75
+ # Hook into feature evaluation
76
+ if self.track_feature_evaluated:
77
+ self._setup_feature_tracking(gb_instance)
78
+
79
+ self._set_initialized(gb_instance)
80
+ self.logger.info(f"Tracking enabled for {self.ingestor_host}")
81
+
82
+ except Exception as e:
83
+ self.logger.error(f"Failed to initialize tracking plugin: {e}")
84
+
85
+ def cleanup(self) -> None:
86
+ """Cleanup plugin resources."""
87
+ self._flush_events()
88
+ if self._flush_timer:
89
+ self._flush_timer.cancel()
90
+ super().cleanup()
91
+
92
+ def _setup_experiment_tracking(self, gb_instance) -> None:
93
+ """Setup experiment tracking for both legacy and async clients."""
94
+
95
+ def tracking_wrapper(experiment, result, user_context=None):
96
+ # Track to ingestor
97
+ self._track_experiment_viewed(experiment, result)
98
+
99
+ # Call additional callback
100
+ if self.additional_callback:
101
+ self._safe_execute(self.additional_callback, experiment, result, user_context)
102
+
103
+ # Check if it's the legacy GrowthBook client (has _trackingCallback)
104
+ if hasattr(gb_instance, '_trackingCallback'):
105
+ # Legacy GrowthBook client
106
+ original_callback = getattr(gb_instance, '_trackingCallback', None)
107
+
108
+ def legacy_wrapper(experiment, result, user_context=None):
109
+ tracking_wrapper(experiment, result, user_context)
110
+ # Call original callback
111
+ if original_callback:
112
+ self._safe_execute(original_callback, experiment, result, user_context)
113
+
114
+ gb_instance._trackingCallback = legacy_wrapper
115
+
116
+ elif hasattr(gb_instance, 'options') and hasattr(gb_instance.options, 'on_experiment_viewed'):
117
+ # New GrowthBookClient (async)
118
+ original_callback = gb_instance.options.on_experiment_viewed
119
+
120
+ def async_wrapper(experiment, result, user_context):
121
+ tracking_wrapper(experiment, result, user_context)
122
+ # Call original callback
123
+ if original_callback:
124
+ self._safe_execute(original_callback, experiment, result, user_context)
125
+
126
+ gb_instance.options.on_experiment_viewed = async_wrapper
127
+
128
+ else:
129
+ self.logger.warning("_trackingCallback or on_experiment_viewed properties not found - tracking may not work properly")
130
+
131
+ def _setup_feature_tracking(self, gb_instance):
132
+ """Setup feature evaluation tracking."""
133
+ original_eval_feature = gb_instance.eval_feature
134
+
135
+ def eval_feature_wrapper(key: str, *args, **kwargs):
136
+ result = original_eval_feature(key, *args, **kwargs)
137
+ self._track_feature_evaluated(key, result, gb_instance)
138
+ return result
139
+
140
+ gb_instance.eval_feature = eval_feature_wrapper
141
+
142
+ def _track_experiment_viewed(self, experiment, result) -> None:
143
+ """Track experiment viewed event."""
144
+ try:
145
+ # Build event data with all metadata
146
+ event_data = {
147
+ 'event_type': 'experiment_viewed',
148
+ 'timestamp': int(time.time() * 1000),
149
+ 'client_key': self._client_key,
150
+ 'sdk_language': 'python',
151
+ 'sdk_version': self._get_sdk_version(),
152
+ # Core experiment data
153
+ 'experiment_id': experiment.key,
154
+ 'variation_id': result.variationId,
155
+ 'variation_key': getattr(result, 'key', str(result.variationId)),
156
+ 'variation_value': result.value,
157
+ 'in_experiment': result.inExperiment,
158
+ 'hash_used': result.hashUsed,
159
+ 'hash_attribute': result.hashAttribute,
160
+ 'hash_value': result.hashValue,
161
+ }
162
+
163
+ # Add optional metadata if available
164
+ if hasattr(experiment, 'name') and experiment.name:
165
+ event_data['experiment_name'] = experiment.name
166
+ if hasattr(result, 'featureId') and result.featureId:
167
+ event_data['feature_id'] = result.featureId
168
+
169
+ self._add_event_to_batch(event_data)
170
+
171
+ except Exception as e:
172
+ self.logger.error(f"Error tracking experiment: {e}")
173
+
174
+ def _track_feature_evaluated(self, feature_key: str, result, gb_instance) -> None:
175
+ """Track feature evaluated event."""
176
+ try:
177
+ # Build event data with all metadata
178
+ event_data = {
179
+ 'event_type': 'feature_evaluated',
180
+ 'timestamp': int(time.time() * 1000),
181
+ 'client_key': self._client_key,
182
+ 'sdk_language': 'python',
183
+ 'sdk_version': self._get_sdk_version(),
184
+ # Core feature data
185
+ 'feature_key': feature_key,
186
+ 'feature_value': result.value,
187
+ 'source': result.source,
188
+ 'on': getattr(result, 'on', bool(result.value)),
189
+ 'off': getattr(result, 'off', not bool(result.value)),
190
+ }
191
+
192
+ # Add optional metadata if available
193
+ if hasattr(result, 'ruleId') and result.ruleId:
194
+ event_data['rule_id'] = result.ruleId
195
+
196
+ # Add experiment info if feature came from experiment
197
+ if hasattr(result, 'experiment') and result.experiment:
198
+ event_data['experiment_id'] = result.experiment.key
199
+ if hasattr(result, 'experimentResult') and result.experimentResult:
200
+ event_data['variation_id'] = result.experimentResult.variationId
201
+ event_data['in_experiment'] = result.experimentResult.inExperiment
202
+
203
+ self._add_event_to_batch(event_data)
204
+
205
+ except Exception as e:
206
+ self.logger.error(f"Error tracking feature: {e}")
207
+
208
+ def _add_event_to_batch(self, event_data: Dict[str, Any]) -> None:
209
+ with self._batch_lock:
210
+ self._event_batch.append(event_data)
211
+
212
+ # Flush if batch is full
213
+ if len(self._event_batch) >= self.batch_size:
214
+ self._flush_batch_locked()
215
+ elif len(self._event_batch) == 1:
216
+ # Start timer for first event
217
+ self._start_flush_timer()
218
+
219
+ def _start_flush_timer(self) -> None:
220
+ """Start flush timer."""
221
+ if self._flush_timer:
222
+ self._flush_timer.cancel()
223
+
224
+ self._flush_timer = threading.Timer(self.batch_timeout, self._flush_events)
225
+ self._flush_timer.start()
226
+
227
+ def _flush_events(self) -> None:
228
+ """Flush events with lock."""
229
+ with self._batch_lock:
230
+ self._flush_batch_locked()
231
+
232
+ def _flush_batch_locked(self) -> None:
233
+ """Flush current batch (called while holding lock)."""
234
+ if not self._event_batch:
235
+ return
236
+
237
+ events_to_send = self._event_batch.copy()
238
+ self._event_batch.clear()
239
+
240
+ if self._flush_timer:
241
+ self._flush_timer.cancel()
242
+ self._flush_timer = None
243
+
244
+ # Send in background thread
245
+ threading.Thread(target=self._send_events, args=(events_to_send,), daemon=True).start()
246
+
247
+ def _send_events(self, events: List[Dict[str, Any]]) -> None:
248
+ """Send events using requests library."""
249
+ if not events:
250
+ return
251
+
252
+ try:
253
+ payload = {
254
+ 'events': events,
255
+ 'client_key': self._client_key
256
+ }
257
+
258
+ url = f"{self.ingestor_host}/events"
259
+ response = requests.post(
260
+ url,
261
+ json=payload,
262
+ headers={'User-Agent': f'growthbook-python-sdk/{self._get_sdk_version()}'},
263
+ timeout=30
264
+ )
265
+
266
+ if response.status_code == 200:
267
+ self.logger.debug(f"Successfully sent {len(events)} events")
268
+ else:
269
+ self.logger.warning(f"Ingestor returned status {response.status_code}")
270
+
271
+ except Exception as e:
272
+ self.logger.error(f"Failed to send events: {e}")
273
+
274
+ def _get_sdk_version(self) -> str:
275
+ """Get SDK version."""
276
+ try:
277
+ import growthbook
278
+ return getattr(growthbook, '__version__', 'unknown')
279
+ except:
280
+ return 'unknown'
281
+
282
+
283
+ def growthbook_tracking_plugin(**options) -> GrowthBookTrackingPlugin:
284
+ """Create a GrowthBook tracking plugin."""
285
+ return GrowthBookTrackingPlugin(**options)