growthbook 1.0.0__py2.py3-none-any.whl → 1.1.0__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {growthbook-1.0.0.data → growthbook-1.1.0.data}/scripts/growthbook.py +548 -82
- {growthbook-1.0.0.dist-info → growthbook-1.1.0.dist-info}/METADATA +89 -37
- growthbook-1.1.0.dist-info/RECORD +7 -0
- {growthbook-1.0.0.dist-info → growthbook-1.1.0.dist-info}/WHEEL +1 -1
- growthbook.py +548 -82
- growthbook-1.0.0.dist-info/RECORD +0 -7
- {growthbook-1.0.0.dist-info → growthbook-1.1.0.dist-info}/LICENSE +0 -0
- {growthbook-1.0.0.dist-info → growthbook-1.1.0.dist-info}/top_level.txt +0 -0
|
@@ -50,20 +50,20 @@ def gbhash(seed: str, value: str, version: int) -> Optional[float]:
|
|
|
50
50
|
|
|
51
51
|
|
|
52
52
|
def inRange(n: float, range: Tuple[float, float]) -> bool:
|
|
53
|
-
return
|
|
53
|
+
return range[0] <= n < range[1]
|
|
54
54
|
|
|
55
55
|
|
|
56
56
|
def inNamespace(userId: str, namespace: Tuple[str, float, float]) -> bool:
|
|
57
57
|
n = gbhash("__" + namespace[0], userId, 1)
|
|
58
58
|
if n is None:
|
|
59
59
|
return False
|
|
60
|
-
return
|
|
60
|
+
return namespace[1] <= n < namespace[2]
|
|
61
61
|
|
|
62
62
|
|
|
63
63
|
def getEqualWeights(numVariations: int) -> List[float]:
|
|
64
64
|
if numVariations < 1:
|
|
65
65
|
return []
|
|
66
|
-
return [1 / numVariations for
|
|
66
|
+
return [1 / numVariations for _ in range(numVariations)]
|
|
67
67
|
|
|
68
68
|
|
|
69
69
|
def getBucketRanges(
|
|
@@ -131,6 +131,35 @@ def decrypt(encrypted_str: str, key_str: str) -> str:
|
|
|
131
131
|
return bytestring.decode("utf-8")
|
|
132
132
|
|
|
133
133
|
|
|
134
|
+
def paddedVersionString(input) -> str:
|
|
135
|
+
# If input is a number, convert to a string
|
|
136
|
+
if type(input) is int or type(input) is float:
|
|
137
|
+
input = str(input)
|
|
138
|
+
|
|
139
|
+
if not input or type(input) is not str:
|
|
140
|
+
input = "0"
|
|
141
|
+
|
|
142
|
+
# Remove build info and leading `v` if any
|
|
143
|
+
input = re.sub(r"(^v|\+.*$)", "", input)
|
|
144
|
+
# Split version into parts (both core version numbers and pre-release tags)
|
|
145
|
+
# "v1.2.3-rc.1+build123" -> ["1","2","3","rc","1"]
|
|
146
|
+
parts = re.split(r"[-.]", input)
|
|
147
|
+
# If it's SemVer without a pre-release, add `~` to the end
|
|
148
|
+
# ["1","0","0"] -> ["1","0","0","~"]
|
|
149
|
+
# "~" is the largest ASCII character, so this will make "1.0.0" greater than "1.0.0-beta" for example
|
|
150
|
+
if len(parts) == 3:
|
|
151
|
+
parts.append("~")
|
|
152
|
+
# Left pad each numeric part with spaces so string comparisons will work ("9">"10", but " 9"<"10")
|
|
153
|
+
# Then, join back together into a single string
|
|
154
|
+
return "-".join([v.rjust(5, " ") if re.match(r"^[0-9]+$", v) else v for v in parts])
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
def isIn(conditionValue, attributeValue) -> bool:
|
|
158
|
+
if type(attributeValue) is list:
|
|
159
|
+
return bool(set(conditionValue) & set(attributeValue))
|
|
160
|
+
return attributeValue in conditionValue
|
|
161
|
+
|
|
162
|
+
|
|
134
163
|
def evalCondition(attributes: dict, condition: dict) -> bool:
|
|
135
164
|
if "$or" in condition:
|
|
136
165
|
return evalOr(attributes, condition["$or"])
|
|
@@ -224,19 +253,69 @@ def elemMatch(condition, attributeValue) -> bool:
|
|
|
224
253
|
return False
|
|
225
254
|
|
|
226
255
|
|
|
256
|
+
def compare(val1, val2) -> int:
|
|
257
|
+
if (type(val1) is int or type(val1) is float) and not (type(val2) is int or type(val2) is float):
|
|
258
|
+
if (val2 is None):
|
|
259
|
+
val2 = 0
|
|
260
|
+
else:
|
|
261
|
+
val2 = float(val2)
|
|
262
|
+
|
|
263
|
+
if (type(val2) is int or type(val2) is float) and not (type(val1) is int or type(val1) is float):
|
|
264
|
+
if (val1 is None):
|
|
265
|
+
val1 = 0
|
|
266
|
+
else:
|
|
267
|
+
val1 = float(val1)
|
|
268
|
+
|
|
269
|
+
if val1 > val2:
|
|
270
|
+
return 1
|
|
271
|
+
if val1 < val2:
|
|
272
|
+
return -1
|
|
273
|
+
return 0
|
|
274
|
+
|
|
275
|
+
|
|
227
276
|
def evalOperatorCondition(operator, attributeValue, conditionValue) -> bool:
|
|
228
277
|
if operator == "$eq":
|
|
229
|
-
|
|
278
|
+
try:
|
|
279
|
+
return compare(attributeValue, conditionValue) == 0
|
|
280
|
+
except Exception:
|
|
281
|
+
return False
|
|
230
282
|
elif operator == "$ne":
|
|
231
|
-
|
|
283
|
+
try:
|
|
284
|
+
return compare(attributeValue, conditionValue) != 0
|
|
285
|
+
except Exception:
|
|
286
|
+
return False
|
|
232
287
|
elif operator == "$lt":
|
|
233
|
-
|
|
288
|
+
try:
|
|
289
|
+
return compare(attributeValue, conditionValue) < 0
|
|
290
|
+
except Exception:
|
|
291
|
+
return False
|
|
234
292
|
elif operator == "$lte":
|
|
235
|
-
|
|
293
|
+
try:
|
|
294
|
+
return compare(attributeValue, conditionValue) <= 0
|
|
295
|
+
except Exception:
|
|
296
|
+
return False
|
|
236
297
|
elif operator == "$gt":
|
|
237
|
-
|
|
298
|
+
try:
|
|
299
|
+
return compare(attributeValue, conditionValue) > 0
|
|
300
|
+
except Exception:
|
|
301
|
+
return False
|
|
238
302
|
elif operator == "$gte":
|
|
239
|
-
|
|
303
|
+
try:
|
|
304
|
+
return compare(attributeValue, conditionValue) >= 0
|
|
305
|
+
except Exception:
|
|
306
|
+
return False
|
|
307
|
+
elif operator == "$veq":
|
|
308
|
+
return paddedVersionString(attributeValue) == paddedVersionString(conditionValue)
|
|
309
|
+
elif operator == "$vne":
|
|
310
|
+
return paddedVersionString(attributeValue) != paddedVersionString(conditionValue)
|
|
311
|
+
elif operator == "$vlt":
|
|
312
|
+
return paddedVersionString(attributeValue) < paddedVersionString(conditionValue)
|
|
313
|
+
elif operator == "$vlte":
|
|
314
|
+
return paddedVersionString(attributeValue) <= paddedVersionString(conditionValue)
|
|
315
|
+
elif operator == "$vgt":
|
|
316
|
+
return paddedVersionString(attributeValue) > paddedVersionString(conditionValue)
|
|
317
|
+
elif operator == "$vgte":
|
|
318
|
+
return paddedVersionString(attributeValue) >= paddedVersionString(conditionValue)
|
|
240
319
|
elif operator == "$regex":
|
|
241
320
|
try:
|
|
242
321
|
r = re.compile(conditionValue)
|
|
@@ -244,9 +323,13 @@ def evalOperatorCondition(operator, attributeValue, conditionValue) -> bool:
|
|
|
244
323
|
except Exception:
|
|
245
324
|
return False
|
|
246
325
|
elif operator == "$in":
|
|
247
|
-
|
|
326
|
+
if not type(conditionValue) is list:
|
|
327
|
+
return False
|
|
328
|
+
return isIn(conditionValue, attributeValue)
|
|
248
329
|
elif operator == "$nin":
|
|
249
|
-
|
|
330
|
+
if not type(conditionValue) is list:
|
|
331
|
+
return False
|
|
332
|
+
return not isIn(conditionValue, attributeValue)
|
|
250
333
|
elif operator == "$elemMatch":
|
|
251
334
|
return elemMatch(conditionValue, attributeValue)
|
|
252
335
|
elif operator == "$size":
|
|
@@ -304,6 +387,7 @@ class Experiment(object):
|
|
|
304
387
|
groups: list = None,
|
|
305
388
|
force: int = None,
|
|
306
389
|
hashAttribute: str = "id",
|
|
390
|
+
fallbackAttribute: str = None,
|
|
307
391
|
hashVersion: int = None,
|
|
308
392
|
ranges: List[Tuple[float, float]] = None,
|
|
309
393
|
meta: List[VariationMeta] = None,
|
|
@@ -311,6 +395,10 @@ class Experiment(object):
|
|
|
311
395
|
seed: str = None,
|
|
312
396
|
name: str = None,
|
|
313
397
|
phase: str = None,
|
|
398
|
+
disableStickyBucketing: bool = False,
|
|
399
|
+
bucketVersion: int = None,
|
|
400
|
+
minBucketVersion: int = None,
|
|
401
|
+
parentConditions: List[dict] = None,
|
|
314
402
|
) -> None:
|
|
315
403
|
self.key = key
|
|
316
404
|
self.variations = variations
|
|
@@ -328,6 +416,14 @@ class Experiment(object):
|
|
|
328
416
|
self.seed = seed
|
|
329
417
|
self.name = name
|
|
330
418
|
self.phase = phase
|
|
419
|
+
self.disableStickyBucketing = disableStickyBucketing
|
|
420
|
+
self.bucketVersion = bucketVersion or 0
|
|
421
|
+
self.minBucketVersion = minBucketVersion or 0
|
|
422
|
+
self.parentConditions = parentConditions
|
|
423
|
+
|
|
424
|
+
self.fallbackAttribute = None
|
|
425
|
+
if not self.disableStickyBucketing:
|
|
426
|
+
self.fallbackAttribute = fallbackAttribute
|
|
331
427
|
|
|
332
428
|
# Deprecated properties
|
|
333
429
|
self.status = status
|
|
@@ -354,6 +450,18 @@ class Experiment(object):
|
|
|
354
450
|
"name": self.name,
|
|
355
451
|
"phase": self.phase,
|
|
356
452
|
}
|
|
453
|
+
|
|
454
|
+
if self.fallbackAttribute:
|
|
455
|
+
obj["fallbackAttribute"] = self.fallbackAttribute
|
|
456
|
+
if self.disableStickyBucketing:
|
|
457
|
+
obj["disableStickyBucketing"] = True
|
|
458
|
+
if self.bucketVersion:
|
|
459
|
+
obj["bucketVersion"] = self.bucketVersion
|
|
460
|
+
if self.minBucketVersion:
|
|
461
|
+
obj["minBucketVersion"] = self.minBucketVersion
|
|
462
|
+
if self.parentConditions:
|
|
463
|
+
obj["parentConditions"] = self.parentConditions
|
|
464
|
+
|
|
357
465
|
return obj
|
|
358
466
|
|
|
359
467
|
def update(self, data: dict) -> None:
|
|
@@ -390,6 +498,7 @@ class Result(object):
|
|
|
390
498
|
featureId: Optional[str],
|
|
391
499
|
meta: VariationMeta = None,
|
|
392
500
|
bucket: float = None,
|
|
501
|
+
stickyBucketUsed: bool = False,
|
|
393
502
|
) -> None:
|
|
394
503
|
self.variationId = variationId
|
|
395
504
|
self.inExperiment = inExperiment
|
|
@@ -399,6 +508,7 @@ class Result(object):
|
|
|
399
508
|
self.hashValue = hashValue
|
|
400
509
|
self.featureId = featureId or None
|
|
401
510
|
self.bucket = bucket
|
|
511
|
+
self.stickyBucketUsed = stickyBucketUsed
|
|
402
512
|
|
|
403
513
|
self.key = str(variationId)
|
|
404
514
|
self.name = ""
|
|
@@ -422,6 +532,7 @@ class Result(object):
|
|
|
422
532
|
"hashAttribute": self.hashAttribute,
|
|
423
533
|
"hashValue": self.hashValue,
|
|
424
534
|
"key": self.key,
|
|
535
|
+
"stickyBucketUsed": self.stickyBucketUsed,
|
|
425
536
|
}
|
|
426
537
|
|
|
427
538
|
if self.bucket is not None:
|
|
@@ -442,7 +553,30 @@ class Feature(object):
|
|
|
442
553
|
if isinstance(rule, FeatureRule):
|
|
443
554
|
self.rules.append(rule)
|
|
444
555
|
else:
|
|
445
|
-
self.rules.append(FeatureRule(
|
|
556
|
+
self.rules.append(FeatureRule(
|
|
557
|
+
id=rule.get("id", None),
|
|
558
|
+
key=rule.get("key", ""),
|
|
559
|
+
variations=rule.get("variations", None),
|
|
560
|
+
weights=rule.get("weights", None),
|
|
561
|
+
coverage=rule.get("coverage", None),
|
|
562
|
+
condition=rule.get("condition", None),
|
|
563
|
+
namespace=rule.get("namespace", None),
|
|
564
|
+
force=rule.get("force", None),
|
|
565
|
+
hashAttribute=rule.get("hashAttribute", "id"),
|
|
566
|
+
fallbackAttribute=rule.get("fallbackAttribute", None),
|
|
567
|
+
hashVersion=rule.get("hashVersion", None),
|
|
568
|
+
range=rule.get("range", None),
|
|
569
|
+
ranges=rule.get("ranges", None),
|
|
570
|
+
meta=rule.get("meta", None),
|
|
571
|
+
filters=rule.get("filters", None),
|
|
572
|
+
seed=rule.get("seed", None),
|
|
573
|
+
name=rule.get("name", None),
|
|
574
|
+
phase=rule.get("phase", None),
|
|
575
|
+
disableStickyBucketing=rule.get("disableStickyBucketing", False),
|
|
576
|
+
bucketVersion=rule.get("bucketVersion", None),
|
|
577
|
+
minBucketVersion=rule.get("minBucketVersion", None),
|
|
578
|
+
parentConditions=rule.get("parentConditions", None),
|
|
579
|
+
))
|
|
446
580
|
|
|
447
581
|
def to_dict(self) -> dict:
|
|
448
582
|
return {
|
|
@@ -454,6 +588,7 @@ class Feature(object):
|
|
|
454
588
|
class FeatureRule(object):
|
|
455
589
|
def __init__(
|
|
456
590
|
self,
|
|
591
|
+
id: str = None,
|
|
457
592
|
key: str = "",
|
|
458
593
|
variations: list = None,
|
|
459
594
|
weights: List[float] = None,
|
|
@@ -462,6 +597,7 @@ class FeatureRule(object):
|
|
|
462
597
|
namespace: Tuple[str, float, float] = None,
|
|
463
598
|
force=None,
|
|
464
599
|
hashAttribute: str = "id",
|
|
600
|
+
fallbackAttribute: str = None,
|
|
465
601
|
hashVersion: int = None,
|
|
466
602
|
range: Tuple[float, float] = None,
|
|
467
603
|
ranges: List[Tuple[float, float]] = None,
|
|
@@ -470,7 +606,16 @@ class FeatureRule(object):
|
|
|
470
606
|
seed: str = None,
|
|
471
607
|
name: str = None,
|
|
472
608
|
phase: str = None,
|
|
609
|
+
disableStickyBucketing: bool = False,
|
|
610
|
+
bucketVersion: int = None,
|
|
611
|
+
minBucketVersion: int = None,
|
|
612
|
+
parentConditions: List[dict] = None,
|
|
473
613
|
) -> None:
|
|
614
|
+
|
|
615
|
+
if disableStickyBucketing:
|
|
616
|
+
fallbackAttribute = None
|
|
617
|
+
|
|
618
|
+
self.id = id
|
|
474
619
|
self.key = key
|
|
475
620
|
self.variations = variations
|
|
476
621
|
self.weights = weights
|
|
@@ -479,6 +624,7 @@ class FeatureRule(object):
|
|
|
479
624
|
self.namespace = namespace
|
|
480
625
|
self.force = force
|
|
481
626
|
self.hashAttribute = hashAttribute
|
|
627
|
+
self.fallbackAttribute = fallbackAttribute
|
|
482
628
|
self.hashVersion = hashVersion or 1
|
|
483
629
|
self.range = range
|
|
484
630
|
self.ranges = ranges
|
|
@@ -487,9 +633,15 @@ class FeatureRule(object):
|
|
|
487
633
|
self.seed = seed
|
|
488
634
|
self.name = name
|
|
489
635
|
self.phase = phase
|
|
636
|
+
self.disableStickyBucketing = disableStickyBucketing
|
|
637
|
+
self.bucketVersion = bucketVersion or 0
|
|
638
|
+
self.minBucketVersion = minBucketVersion or 0
|
|
639
|
+
self.parentConditions = parentConditions
|
|
490
640
|
|
|
491
641
|
def to_dict(self) -> dict:
|
|
492
642
|
data: Dict[str, Any] = {}
|
|
643
|
+
if self.id:
|
|
644
|
+
data["id"] = self.id
|
|
493
645
|
if self.key:
|
|
494
646
|
data["key"] = self.key
|
|
495
647
|
if self.variations is not None:
|
|
@@ -522,6 +674,16 @@ class FeatureRule(object):
|
|
|
522
674
|
data["name"] = self.name
|
|
523
675
|
if self.phase is not None:
|
|
524
676
|
data["phase"] = self.phase
|
|
677
|
+
if self.fallbackAttribute:
|
|
678
|
+
data["fallbackAttribute"] = self.fallbackAttribute
|
|
679
|
+
if self.disableStickyBucketing:
|
|
680
|
+
data["disableStickyBucketing"] = True
|
|
681
|
+
if self.bucketVersion:
|
|
682
|
+
data["bucketVersion"] = self.bucketVersion
|
|
683
|
+
if self.minBucketVersion:
|
|
684
|
+
data["minBucketVersion"] = self.minBucketVersion
|
|
685
|
+
if self.parentConditions:
|
|
686
|
+
data["parentConditions"] = self.parentConditions
|
|
525
687
|
|
|
526
688
|
return data
|
|
527
689
|
|
|
@@ -533,9 +695,11 @@ class FeatureResult(object):
|
|
|
533
695
|
source: str,
|
|
534
696
|
experiment: Experiment = None,
|
|
535
697
|
experimentResult: Result = None,
|
|
698
|
+
ruleId: str = None,
|
|
536
699
|
) -> None:
|
|
537
700
|
self.value = value
|
|
538
701
|
self.source = source
|
|
702
|
+
self.ruleId = ruleId
|
|
539
703
|
self.experiment = experiment
|
|
540
704
|
self.experimentResult = experimentResult
|
|
541
705
|
self.on = bool(value)
|
|
@@ -548,6 +712,8 @@ class FeatureResult(object):
|
|
|
548
712
|
"on": self.on,
|
|
549
713
|
"off": self.off,
|
|
550
714
|
}
|
|
715
|
+
if self.ruleId:
|
|
716
|
+
data["ruleId"] = self.ruleId
|
|
551
717
|
if self.experiment:
|
|
552
718
|
data["experiment"] = self.experiment.to_dict()
|
|
553
719
|
if self.experimentResult:
|
|
@@ -600,6 +766,43 @@ class InMemoryFeatureCache(AbstractFeatureCache):
|
|
|
600
766
|
self.cache.clear()
|
|
601
767
|
|
|
602
768
|
|
|
769
|
+
class AbstractStickyBucketService(ABC):
|
|
770
|
+
@abstractmethod
|
|
771
|
+
def get_assignments(self, attributeName: str, attributeValue: str) -> Optional[Dict]:
|
|
772
|
+
pass
|
|
773
|
+
|
|
774
|
+
@abstractmethod
|
|
775
|
+
def save_assignments(self, doc: Dict) -> None:
|
|
776
|
+
pass
|
|
777
|
+
|
|
778
|
+
def get_key(self, attributeName: str, attributeValue: str) -> str:
|
|
779
|
+
return f"{attributeName}||{attributeValue}"
|
|
780
|
+
|
|
781
|
+
# By default, just loop through all attributes and call get_assignments
|
|
782
|
+
# Override this method in subclasses to perform a multi-query instead
|
|
783
|
+
def get_all_assignments(self, attributes: Dict[str, str]) -> Dict[str, Dict]:
|
|
784
|
+
docs = {}
|
|
785
|
+
for attributeName, attributeValue in attributes.items():
|
|
786
|
+
doc = self.get_assignments(attributeName, attributeValue)
|
|
787
|
+
if doc:
|
|
788
|
+
docs[self.get_key(attributeName, attributeValue)] = doc
|
|
789
|
+
return docs
|
|
790
|
+
|
|
791
|
+
|
|
792
|
+
class InMemoryStickyBucketService(AbstractStickyBucketService):
|
|
793
|
+
def __init__(self) -> None:
|
|
794
|
+
self.docs: Dict[str, Dict] = {}
|
|
795
|
+
|
|
796
|
+
def get_assignments(self, attributeName: str, attributeValue: str) -> Optional[Dict]:
|
|
797
|
+
return self.docs.get(self.get_key(attributeName, attributeValue), None)
|
|
798
|
+
|
|
799
|
+
def save_assignments(self, doc: Dict) -> None:
|
|
800
|
+
self.docs[self.get_key(doc["attributeName"], doc["attributeValue"])] = doc
|
|
801
|
+
|
|
802
|
+
def destroy(self) -> None:
|
|
803
|
+
self.docs.clear()
|
|
804
|
+
|
|
805
|
+
|
|
603
806
|
class FeatureRepository(object):
|
|
604
807
|
def __init__(self) -> None:
|
|
605
808
|
self.cache: AbstractFeatureCache = InMemoryFeatureCache()
|
|
@@ -670,7 +873,8 @@ class FeatureRepository(object):
|
|
|
670
873
|
logger.warning("GrowthBook API response missing features")
|
|
671
874
|
return None
|
|
672
875
|
|
|
673
|
-
|
|
876
|
+
@staticmethod
|
|
877
|
+
def _get_features_url(api_host: str, client_key: str) -> str:
|
|
674
878
|
api_host = (api_host or "https://cdn.growthbook.io").rstrip("/")
|
|
675
879
|
return api_host + "/api/features/" + client_key
|
|
676
880
|
|
|
@@ -693,6 +897,8 @@ class GrowthBook(object):
|
|
|
693
897
|
decryption_key: str = "",
|
|
694
898
|
cache_ttl: int = 60,
|
|
695
899
|
forced_variations: dict = {},
|
|
900
|
+
sticky_bucket_service: AbstractStickyBucketService = None,
|
|
901
|
+
sticky_bucket_identifier_attributes: List[str] = None,
|
|
696
902
|
# Deprecated args
|
|
697
903
|
trackingCallback=None,
|
|
698
904
|
qaMode: bool = False,
|
|
@@ -709,9 +915,11 @@ class GrowthBook(object):
|
|
|
709
915
|
self._client_key = client_key
|
|
710
916
|
self._decryption_key = decryption_key
|
|
711
917
|
self._cache_ttl = cache_ttl
|
|
712
|
-
|
|
713
|
-
|
|
714
|
-
|
|
918
|
+
self.sticky_bucket_identifier_attributes = sticky_bucket_identifier_attributes
|
|
919
|
+
self.sticky_bucket_service = sticky_bucket_service
|
|
920
|
+
self._sticky_bucket_assignment_docs: dict = {}
|
|
921
|
+
self._using_derived_sticky_bucket_attributes = not sticky_bucket_identifier_attributes
|
|
922
|
+
self._sticky_bucket_attributes: Optional[dict] = None
|
|
715
923
|
|
|
716
924
|
self._qaMode = qa_mode or qaMode
|
|
717
925
|
self._trackingCallback = on_experiment_viewed or trackingCallback
|
|
@@ -726,6 +934,9 @@ class GrowthBook(object):
|
|
|
726
934
|
self._assigned: Dict[str, Any] = {}
|
|
727
935
|
self._subscriptions: Set[Any] = set()
|
|
728
936
|
|
|
937
|
+
if features:
|
|
938
|
+
self.setFeatures(features)
|
|
939
|
+
|
|
729
940
|
def load_features(self) -> None:
|
|
730
941
|
if not self._client_key:
|
|
731
942
|
raise ValueError("Must specify `client_key` to refresh features")
|
|
@@ -746,7 +957,11 @@ class GrowthBook(object):
|
|
|
746
957
|
if isinstance(feature, Feature):
|
|
747
958
|
self._features[key] = feature
|
|
748
959
|
else:
|
|
749
|
-
self._features[key] = Feature(
|
|
960
|
+
self._features[key] = Feature(
|
|
961
|
+
rules=feature.get("rules", []),
|
|
962
|
+
defaultValue=feature.get("defaultValue", None),
|
|
963
|
+
)
|
|
964
|
+
self.refresh_sticky_buckets()
|
|
750
965
|
|
|
751
966
|
# @deprecated, use get_features
|
|
752
967
|
def getFeatures(self) -> Dict[str, Feature]:
|
|
@@ -761,6 +976,7 @@ class GrowthBook(object):
|
|
|
761
976
|
|
|
762
977
|
def set_attributes(self, attributes: dict) -> None:
|
|
763
978
|
self._attributes = attributes
|
|
979
|
+
self.refresh_sticky_buckets()
|
|
764
980
|
|
|
765
981
|
# @deprecated, use get_attributes
|
|
766
982
|
def getAttributes(self) -> dict:
|
|
@@ -806,14 +1022,48 @@ class GrowthBook(object):
|
|
|
806
1022
|
def evalFeature(self, key: str) -> FeatureResult:
|
|
807
1023
|
return self.eval_feature(key)
|
|
808
1024
|
|
|
1025
|
+
def eval_prereqs(self, parentConditions: List[dict], stack: Set[str]) -> str:
|
|
1026
|
+
for parentCondition in parentConditions:
|
|
1027
|
+
parentRes = self._eval_feature(parentCondition.get("id", None), stack)
|
|
1028
|
+
|
|
1029
|
+
if parentRes.source == "cyclicPrerequisite":
|
|
1030
|
+
return "cyclic"
|
|
1031
|
+
|
|
1032
|
+
if not evalCondition({'value': parentRes.value}, parentCondition.get("condition", None)):
|
|
1033
|
+
if parentCondition.get("gate", False):
|
|
1034
|
+
return "gate"
|
|
1035
|
+
return "fail"
|
|
1036
|
+
return "pass"
|
|
1037
|
+
|
|
809
1038
|
def eval_feature(self, key: str) -> FeatureResult:
|
|
1039
|
+
return self._eval_feature(key, set())
|
|
1040
|
+
|
|
1041
|
+
def _eval_feature(self, key: str, stack: Set[str]) -> FeatureResult:
|
|
810
1042
|
logger.debug("Evaluating feature %s", key)
|
|
811
1043
|
if key not in self._features:
|
|
812
1044
|
logger.warning("Unknown feature %s", key)
|
|
813
1045
|
return FeatureResult(None, "unknownFeature")
|
|
814
1046
|
|
|
1047
|
+
if key in stack:
|
|
1048
|
+
logger.warning("Cyclic prerequisite detected, stack: %s", stack)
|
|
1049
|
+
return FeatureResult(None, "cyclicPrerequisite")
|
|
1050
|
+
stack.add(key)
|
|
1051
|
+
|
|
815
1052
|
feature = self._features[key]
|
|
816
1053
|
for rule in feature.rules:
|
|
1054
|
+
logger.debug("Evaluating feature %s, rule %s", key, rule.to_dict())
|
|
1055
|
+
if (rule.parentConditions):
|
|
1056
|
+
prereq_res = self.eval_prereqs(rule.parentConditions, stack)
|
|
1057
|
+
if prereq_res == "gate":
|
|
1058
|
+
logger.debug("Top-level prerequisite failed, return None, feature %s", key)
|
|
1059
|
+
return FeatureResult(None, "prerequisite")
|
|
1060
|
+
if prereq_res == "cyclic":
|
|
1061
|
+
# Warning already logged in this case
|
|
1062
|
+
return FeatureResult(None, "cyclicPrerequisite")
|
|
1063
|
+
if prereq_res == "fail":
|
|
1064
|
+
logger.debug("Skip rule because of failing prerequisite, feature %s", key)
|
|
1065
|
+
continue
|
|
1066
|
+
|
|
817
1067
|
if rule.condition:
|
|
818
1068
|
if not evalCondition(self._attributes, rule.condition):
|
|
819
1069
|
logger.debug(
|
|
@@ -830,6 +1080,7 @@ class GrowthBook(object):
|
|
|
830
1080
|
if not self._isIncludedInRollout(
|
|
831
1081
|
rule.seed or key,
|
|
832
1082
|
rule.hashAttribute,
|
|
1083
|
+
rule.fallbackAttribute,
|
|
833
1084
|
rule.range,
|
|
834
1085
|
rule.coverage,
|
|
835
1086
|
rule.hashVersion,
|
|
@@ -841,7 +1092,7 @@ class GrowthBook(object):
|
|
|
841
1092
|
continue
|
|
842
1093
|
|
|
843
1094
|
logger.debug("Force value from rule, feature %s", key)
|
|
844
|
-
return FeatureResult(rule.force, "force")
|
|
1095
|
+
return FeatureResult(rule.force, "force", ruleId=rule.id)
|
|
845
1096
|
|
|
846
1097
|
if rule.variations is None:
|
|
847
1098
|
logger.warning("Skip invalid rule, feature %s", key)
|
|
@@ -853,6 +1104,7 @@ class GrowthBook(object):
|
|
|
853
1104
|
coverage=rule.coverage,
|
|
854
1105
|
weights=rule.weights,
|
|
855
1106
|
hashAttribute=rule.hashAttribute,
|
|
1107
|
+
fallbackAttribute=rule.fallbackAttribute,
|
|
856
1108
|
namespace=rule.namespace,
|
|
857
1109
|
hashVersion=rule.hashVersion,
|
|
858
1110
|
meta=rule.meta,
|
|
@@ -861,13 +1113,19 @@ class GrowthBook(object):
|
|
|
861
1113
|
phase=rule.phase,
|
|
862
1114
|
seed=rule.seed,
|
|
863
1115
|
filters=rule.filters,
|
|
1116
|
+
condition=rule.condition,
|
|
1117
|
+
disableStickyBucketing=rule.disableStickyBucketing,
|
|
1118
|
+
bucketVersion=rule.bucketVersion,
|
|
1119
|
+
minBucketVersion=rule.minBucketVersion,
|
|
864
1120
|
)
|
|
865
1121
|
|
|
866
1122
|
result = self._run(exp, key)
|
|
867
1123
|
self._fireSubscriptions(exp, result)
|
|
868
1124
|
|
|
869
1125
|
if not result.inExperiment:
|
|
870
|
-
logger.debug(
|
|
1126
|
+
logger.debug(
|
|
1127
|
+
"Skip rule because user not included in experiment, feature %s", key
|
|
1128
|
+
)
|
|
871
1129
|
continue
|
|
872
1130
|
|
|
873
1131
|
if result.passthrough:
|
|
@@ -875,7 +1133,9 @@ class GrowthBook(object):
|
|
|
875
1133
|
continue
|
|
876
1134
|
|
|
877
1135
|
logger.debug("Assign value from experiment, feature %s", key)
|
|
878
|
-
return FeatureResult(
|
|
1136
|
+
return FeatureResult(
|
|
1137
|
+
result.value, "experiment", exp, result, ruleId=rule.id
|
|
1138
|
+
)
|
|
879
1139
|
|
|
880
1140
|
logger.debug("Use default value for feature %s", key)
|
|
881
1141
|
return FeatureResult(feature.defaultValue, "defaultValue")
|
|
@@ -887,21 +1147,36 @@ class GrowthBook(object):
|
|
|
887
1147
|
def get_all_results(self):
|
|
888
1148
|
return self._assigned.copy()
|
|
889
1149
|
|
|
890
|
-
def _getOrigHashValue(self, attr: str = None):
|
|
1150
|
+
def _getOrigHashValue(self, attr: str = None, fallbackAttr: str = None) -> Tuple[str, str]:
|
|
1151
|
+
|
|
891
1152
|
attr = attr or "id"
|
|
1153
|
+
val = ""
|
|
892
1154
|
if attr in self._attributes:
|
|
893
|
-
|
|
894
|
-
|
|
895
|
-
|
|
896
|
-
return ""
|
|
1155
|
+
val = self._attributes[attr] or ""
|
|
1156
|
+
elif attr in self._user:
|
|
1157
|
+
val = self._user[attr] or ""
|
|
897
1158
|
|
|
898
|
-
|
|
899
|
-
|
|
1159
|
+
# If no match, try fallback
|
|
1160
|
+
if (not val or val == "") and fallbackAttr and self.sticky_bucket_service:
|
|
1161
|
+
if fallbackAttr in self._attributes:
|
|
1162
|
+
val = self._attributes[fallbackAttr] or ""
|
|
1163
|
+
elif fallbackAttr in self._user:
|
|
1164
|
+
val = self._user[fallbackAttr] or ""
|
|
1165
|
+
|
|
1166
|
+
if not val or val != "":
|
|
1167
|
+
attr = fallbackAttr
|
|
1168
|
+
|
|
1169
|
+
return (attr, val)
|
|
1170
|
+
|
|
1171
|
+
def _getHashValue(self, attr: str = None, fallbackAttr: str = None) -> Tuple[str, str]:
|
|
1172
|
+
(attr, val) = self._getOrigHashValue(attr, fallbackAttr)
|
|
1173
|
+
return (attr, str(val))
|
|
900
1174
|
|
|
901
1175
|
def _isIncludedInRollout(
|
|
902
1176
|
self,
|
|
903
1177
|
seed: str,
|
|
904
1178
|
hashAttribute: str = None,
|
|
1179
|
+
fallbackAttribute: str = None,
|
|
905
1180
|
range: Tuple[float, float] = None,
|
|
906
1181
|
coverage: float = None,
|
|
907
1182
|
hashVersion: int = None,
|
|
@@ -909,7 +1184,7 @@ class GrowthBook(object):
|
|
|
909
1184
|
if coverage is None and range is None:
|
|
910
1185
|
return True
|
|
911
1186
|
|
|
912
|
-
hash_value = self._getHashValue(hashAttribute
|
|
1187
|
+
(_, hash_value) = self._getHashValue(hashAttribute, fallbackAttribute)
|
|
913
1188
|
if hash_value == "":
|
|
914
1189
|
return False
|
|
915
1190
|
|
|
@@ -926,7 +1201,7 @@ class GrowthBook(object):
|
|
|
926
1201
|
|
|
927
1202
|
def _isFilteredOut(self, filters: List[Filter]) -> bool:
|
|
928
1203
|
for filter in filters:
|
|
929
|
-
hash_value = self._getHashValue(filter.get("attribute", "id"))
|
|
1204
|
+
(_, hash_value) = self._getHashValue(filter.get("attribute", "id"))
|
|
930
1205
|
if hash_value == "":
|
|
931
1206
|
return False
|
|
932
1207
|
|
|
@@ -1010,9 +1285,9 @@ class GrowthBook(object):
|
|
|
1010
1285
|
if experiment.status == "draft" or not experiment.active:
|
|
1011
1286
|
logger.debug("Experiment %s is not active, skip", experiment.key)
|
|
1012
1287
|
return self._getExperimentResult(experiment, featureId=featureId)
|
|
1288
|
+
|
|
1013
1289
|
# 6. Get the user hash attribute and value
|
|
1014
|
-
hashAttribute = experiment.hashAttribute
|
|
1015
|
-
hashValue = self._getHashValue(hashAttribute)
|
|
1290
|
+
(hashAttribute, hashValue) = self._getHashValue(experiment.hashAttribute, experiment.fallbackAttribute)
|
|
1016
1291
|
if not hashValue:
|
|
1017
1292
|
logger.debug(
|
|
1018
1293
|
"Skip experiment %s because user's hashAttribute value is empty",
|
|
@@ -1020,55 +1295,90 @@ class GrowthBook(object):
|
|
|
1020
1295
|
)
|
|
1021
1296
|
return self._getExperimentResult(experiment, featureId=featureId)
|
|
1022
1297
|
|
|
1023
|
-
|
|
1024
|
-
if experiment.filters:
|
|
1025
|
-
if self._isFilteredOut(experiment.filters):
|
|
1026
|
-
logger.debug(
|
|
1027
|
-
"Skip experiment %s because of filters/namespaces", experiment.key
|
|
1028
|
-
)
|
|
1029
|
-
return self._getExperimentResult(experiment, featureId=featureId)
|
|
1030
|
-
elif experiment.namespace and not inNamespace(hashValue, experiment.namespace):
|
|
1031
|
-
logger.debug("Skip experiment %s because of namespace", experiment.key)
|
|
1032
|
-
return self._getExperimentResult(experiment, featureId=featureId)
|
|
1298
|
+
assigned = -1
|
|
1033
1299
|
|
|
1034
|
-
|
|
1035
|
-
|
|
1036
|
-
|
|
1037
|
-
|
|
1300
|
+
found_sticky_bucket = False
|
|
1301
|
+
sticky_bucket_version_is_blocked = False
|
|
1302
|
+
if self.sticky_bucket_service and not experiment.disableStickyBucketing:
|
|
1303
|
+
sticky_bucket = self._get_sticky_bucket_variation(
|
|
1304
|
+
experiment.key,
|
|
1305
|
+
experiment.bucketVersion,
|
|
1306
|
+
experiment.minBucketVersion,
|
|
1307
|
+
experiment.meta,
|
|
1308
|
+
hash_attribute=experiment.hashAttribute,
|
|
1309
|
+
fallback_attribute=experiment.fallbackAttribute,
|
|
1310
|
+
)
|
|
1311
|
+
found_sticky_bucket = sticky_bucket.get('variation', 0) >= 0
|
|
1312
|
+
assigned = sticky_bucket.get('variation', 0)
|
|
1313
|
+
sticky_bucket_version_is_blocked = sticky_bucket.get('versionIsBlocked', False)
|
|
1314
|
+
|
|
1315
|
+
if found_sticky_bucket:
|
|
1316
|
+
logger.debug("Found sticky bucket for experiment %s, assigning sticky variation %s", experiment.key, assigned)
|
|
1317
|
+
|
|
1318
|
+
# Some checks are not needed if we already have a sticky bucket
|
|
1319
|
+
if not found_sticky_bucket:
|
|
1320
|
+
# 7. Filtered out / not in namespace
|
|
1321
|
+
if experiment.filters:
|
|
1322
|
+
if self._isFilteredOut(experiment.filters):
|
|
1038
1323
|
logger.debug(
|
|
1039
|
-
"Skip experiment %s because
|
|
1040
|
-
experiment.key,
|
|
1324
|
+
"Skip experiment %s because of filters/namespaces", experiment.key
|
|
1041
1325
|
)
|
|
1042
1326
|
return self._getExperimentResult(experiment, featureId=featureId)
|
|
1043
|
-
|
|
1044
|
-
logger.
|
|
1045
|
-
"Skip experiment %s because include() raised an Exception",
|
|
1046
|
-
experiment.key,
|
|
1047
|
-
)
|
|
1327
|
+
elif experiment.namespace and not inNamespace(hashValue, experiment.namespace):
|
|
1328
|
+
logger.debug("Skip experiment %s because of namespace", experiment.key)
|
|
1048
1329
|
return self._getExperimentResult(experiment, featureId=featureId)
|
|
1049
1330
|
|
|
1050
|
-
|
|
1051
|
-
|
|
1052
|
-
|
|
1053
|
-
|
|
1054
|
-
|
|
1055
|
-
|
|
1056
|
-
|
|
1057
|
-
|
|
1331
|
+
# 7.5. If experiment has an include property
|
|
1332
|
+
if experiment.include:
|
|
1333
|
+
try:
|
|
1334
|
+
if not experiment.include():
|
|
1335
|
+
logger.debug(
|
|
1336
|
+
"Skip experiment %s because include() returned false",
|
|
1337
|
+
experiment.key,
|
|
1338
|
+
)
|
|
1339
|
+
return self._getExperimentResult(experiment, featureId=featureId)
|
|
1340
|
+
except Exception:
|
|
1341
|
+
logger.warning(
|
|
1342
|
+
"Skip experiment %s because include() raised an Exception",
|
|
1343
|
+
experiment.key,
|
|
1344
|
+
)
|
|
1345
|
+
return self._getExperimentResult(experiment, featureId=featureId)
|
|
1058
1346
|
|
|
1059
|
-
|
|
1060
|
-
|
|
1061
|
-
|
|
1062
|
-
|
|
1063
|
-
for group in experiment.groups:
|
|
1064
|
-
if expGroups[group]:
|
|
1065
|
-
matched = True
|
|
1066
|
-
if not matched:
|
|
1347
|
+
# 8. Exclude if condition is false
|
|
1348
|
+
if experiment.condition and not evalCondition(
|
|
1349
|
+
self._attributes, experiment.condition
|
|
1350
|
+
):
|
|
1067
1351
|
logger.debug(
|
|
1068
|
-
"Skip experiment %s because user
|
|
1069
|
-
experiment.key,
|
|
1352
|
+
"Skip experiment %s because user failed the condition", experiment.key
|
|
1070
1353
|
)
|
|
1071
1354
|
return self._getExperimentResult(experiment, featureId=featureId)
|
|
1355
|
+
|
|
1356
|
+
# 8.05 Exclude if parent conditions are not met
|
|
1357
|
+
if (experiment.parentConditions):
|
|
1358
|
+
prereq_res = self.eval_prereqs(experiment.parentConditions, set())
|
|
1359
|
+
if prereq_res == "gate" or prereq_res == "fail":
|
|
1360
|
+
logger.debug("Skip experiment %s because of failing prerequisite", experiment.key)
|
|
1361
|
+
return self._getExperimentResult(experiment, featureId=featureId)
|
|
1362
|
+
if prereq_res == "cyclic":
|
|
1363
|
+
logger.debug("Skip experiment %s because of cyclic prerequisite", experiment.key)
|
|
1364
|
+
return self._getExperimentResult(experiment, featureId=featureId)
|
|
1365
|
+
|
|
1366
|
+
# 8.1. Make sure user is in a matching group
|
|
1367
|
+
if experiment.groups and len(experiment.groups):
|
|
1368
|
+
expGroups = self._groups or {}
|
|
1369
|
+
matched = False
|
|
1370
|
+
for group in experiment.groups:
|
|
1371
|
+
if expGroups[group]:
|
|
1372
|
+
matched = True
|
|
1373
|
+
if not matched:
|
|
1374
|
+
logger.debug(
|
|
1375
|
+
"Skip experiment %s because user not in required group",
|
|
1376
|
+
experiment.key,
|
|
1377
|
+
)
|
|
1378
|
+
return self._getExperimentResult(experiment, featureId=featureId)
|
|
1379
|
+
|
|
1380
|
+
# The following apply even when in a sticky bucket
|
|
1381
|
+
|
|
1072
1382
|
# 8.2. If experiment.url is set, see if it's valid
|
|
1073
1383
|
if experiment.url:
|
|
1074
1384
|
if not self._urlIsValid(experiment.url):
|
|
@@ -1079,10 +1389,6 @@ class GrowthBook(object):
|
|
|
1079
1389
|
return self._getExperimentResult(experiment, featureId=featureId)
|
|
1080
1390
|
|
|
1081
1391
|
# 9. Get bucket ranges and choose variation
|
|
1082
|
-
c = experiment.coverage
|
|
1083
|
-
ranges = experiment.ranges or getBucketRanges(
|
|
1084
|
-
len(experiment.variations), c if c is not None else 1, experiment.weights
|
|
1085
|
-
)
|
|
1086
1392
|
n = gbhash(
|
|
1087
1393
|
experiment.seed or experiment.key, hashValue, experiment.hashVersion or 1
|
|
1088
1394
|
)
|
|
@@ -1091,7 +1397,18 @@ class GrowthBook(object):
|
|
|
1091
1397
|
"Skip experiment %s because of invalid hashVersion", experiment.key
|
|
1092
1398
|
)
|
|
1093
1399
|
return self._getExperimentResult(experiment, featureId=featureId)
|
|
1094
|
-
|
|
1400
|
+
|
|
1401
|
+
if not found_sticky_bucket:
|
|
1402
|
+
c = experiment.coverage
|
|
1403
|
+
ranges = experiment.ranges or getBucketRanges(
|
|
1404
|
+
len(experiment.variations), c if c is not None else 1, experiment.weights
|
|
1405
|
+
)
|
|
1406
|
+
assigned = chooseVariation(n, ranges)
|
|
1407
|
+
|
|
1408
|
+
# Unenroll if any prior sticky buckets are blocked by version
|
|
1409
|
+
if sticky_bucket_version_is_blocked:
|
|
1410
|
+
logger.debug("Skip experiment %s because sticky bucket version is blocked", experiment.key)
|
|
1411
|
+
return self._getExperimentResult(experiment, featureId=featureId, stickyBucketUsed=True)
|
|
1095
1412
|
|
|
1096
1413
|
# 10. Return if not in experiment
|
|
1097
1414
|
if assigned < 0:
|
|
@@ -1122,16 +1439,34 @@ class GrowthBook(object):
|
|
|
1122
1439
|
|
|
1123
1440
|
# 13. Build the result object
|
|
1124
1441
|
result = self._getExperimentResult(
|
|
1125
|
-
experiment, assigned, True, featureId=featureId, bucket=n
|
|
1442
|
+
experiment, assigned, True, featureId=featureId, bucket=n, stickyBucketUsed=found_sticky_bucket
|
|
1126
1443
|
)
|
|
1127
1444
|
|
|
1445
|
+
# 13.5 Persist sticky bucket
|
|
1446
|
+
if self.sticky_bucket_service and not experiment.disableStickyBucketing:
|
|
1447
|
+
assignment = {}
|
|
1448
|
+
assignment[self._get_sticky_bucket_experiment_key(
|
|
1449
|
+
experiment.key,
|
|
1450
|
+
experiment.bucketVersion
|
|
1451
|
+
)] = result.key
|
|
1452
|
+
|
|
1453
|
+
data = self._generate_sticky_bucket_assignment_doc(
|
|
1454
|
+
hashAttribute,
|
|
1455
|
+
hashValue,
|
|
1456
|
+
assignment
|
|
1457
|
+
)
|
|
1458
|
+
doc = data.get("doc", None)
|
|
1459
|
+
if doc and data.get('changed', False):
|
|
1460
|
+
if not self._sticky_bucket_assignment_docs:
|
|
1461
|
+
self._sticky_bucket_assignment_docs = {}
|
|
1462
|
+
self._sticky_bucket_assignment_docs[data.get('key')] = doc
|
|
1463
|
+
self.sticky_bucket_service.save_assignments(doc)
|
|
1464
|
+
|
|
1128
1465
|
# 14. Fire the tracking callback if set
|
|
1129
1466
|
self._track(experiment, result)
|
|
1130
1467
|
|
|
1131
1468
|
# 15. Return the result
|
|
1132
|
-
logger.debug(
|
|
1133
|
-
"Assigned variation %d in experiment %s", assigned, experiment.key
|
|
1134
|
-
)
|
|
1469
|
+
logger.debug("Assigned variation %d in experiment %s", assigned, experiment.key)
|
|
1135
1470
|
return result
|
|
1136
1471
|
|
|
1137
1472
|
def _track(self, experiment: Experiment, result: Result) -> None:
|
|
@@ -1173,9 +1508,8 @@ class GrowthBook(object):
|
|
|
1173
1508
|
hashUsed: bool = False,
|
|
1174
1509
|
featureId: str = None,
|
|
1175
1510
|
bucket: float = None,
|
|
1511
|
+
stickyBucketUsed: bool = False
|
|
1176
1512
|
) -> Result:
|
|
1177
|
-
hashAttribute = experiment.hashAttribute or "id"
|
|
1178
|
-
|
|
1179
1513
|
inExperiment = True
|
|
1180
1514
|
if variationId < 0 or variationId > len(experiment.variations) - 1:
|
|
1181
1515
|
variationId = 0
|
|
@@ -1185,6 +1519,8 @@ class GrowthBook(object):
|
|
|
1185
1519
|
if experiment.meta:
|
|
1186
1520
|
meta = experiment.meta[variationId]
|
|
1187
1521
|
|
|
1522
|
+
(hashAttribute, hashValue) = self._getOrigHashValue(experiment.hashAttribute, experiment.fallbackAttribute)
|
|
1523
|
+
|
|
1188
1524
|
return Result(
|
|
1189
1525
|
featureId=featureId,
|
|
1190
1526
|
inExperiment=inExperiment,
|
|
@@ -1192,7 +1528,137 @@ class GrowthBook(object):
|
|
|
1192
1528
|
value=experiment.variations[variationId],
|
|
1193
1529
|
hashUsed=hashUsed,
|
|
1194
1530
|
hashAttribute=hashAttribute,
|
|
1195
|
-
hashValue=
|
|
1531
|
+
hashValue=hashValue,
|
|
1196
1532
|
meta=meta,
|
|
1197
1533
|
bucket=bucket,
|
|
1534
|
+
stickyBucketUsed=stickyBucketUsed
|
|
1198
1535
|
)
|
|
1536
|
+
|
|
1537
|
+
def _derive_sticky_bucket_identifier_attributes(self) -> List[str]:
|
|
1538
|
+
attributes = set()
|
|
1539
|
+
for key, feature in self._features.items():
|
|
1540
|
+
for rule in feature.rules:
|
|
1541
|
+
if rule.variations:
|
|
1542
|
+
attributes.add(rule.hashAttribute or "id")
|
|
1543
|
+
if rule.fallbackAttribute:
|
|
1544
|
+
attributes.add(rule.fallbackAttribute)
|
|
1545
|
+
return list(attributes)
|
|
1546
|
+
|
|
1547
|
+
def _get_sticky_bucket_attributes(self) -> dict:
|
|
1548
|
+
attributes: Dict[str, str] = {}
|
|
1549
|
+
if self._using_derived_sticky_bucket_attributes:
|
|
1550
|
+
self.sticky_bucket_identifier_attributes = self._derive_sticky_bucket_identifier_attributes()
|
|
1551
|
+
|
|
1552
|
+
if not self.sticky_bucket_identifier_attributes:
|
|
1553
|
+
return attributes
|
|
1554
|
+
|
|
1555
|
+
for attr in self.sticky_bucket_identifier_attributes:
|
|
1556
|
+
_, hash_value = self._getHashValue(attr)
|
|
1557
|
+
if hash_value:
|
|
1558
|
+
attributes[attr] = hash_value
|
|
1559
|
+
return attributes
|
|
1560
|
+
|
|
1561
|
+
def _get_sticky_bucket_assignments(self, attr: str = None, fallback: str = None) -> Dict[str, str]:
|
|
1562
|
+
merged: Dict[str, str] = {}
|
|
1563
|
+
|
|
1564
|
+
_, hashValue = self._getHashValue(attr)
|
|
1565
|
+
key = f"{attr}||{hashValue}"
|
|
1566
|
+
if key in self._sticky_bucket_assignment_docs:
|
|
1567
|
+
merged = self._sticky_bucket_assignment_docs[key].get("assignments", {})
|
|
1568
|
+
|
|
1569
|
+
if fallback:
|
|
1570
|
+
_, hashValue = self._getHashValue(fallback)
|
|
1571
|
+
key = f"{fallback}||{hashValue}"
|
|
1572
|
+
if key in self._sticky_bucket_assignment_docs:
|
|
1573
|
+
# Merge the fallback assignments, but don't overwrite existing ones
|
|
1574
|
+
for k, v in self._sticky_bucket_assignment_docs[key].get("assignments", {}).items():
|
|
1575
|
+
if k not in merged:
|
|
1576
|
+
merged[k] = v
|
|
1577
|
+
|
|
1578
|
+
return merged
|
|
1579
|
+
|
|
1580
|
+
def _is_blocked(
|
|
1581
|
+
self,
|
|
1582
|
+
assignments: Dict[str, str],
|
|
1583
|
+
experiment_key: str,
|
|
1584
|
+
min_bucket_version: int
|
|
1585
|
+
) -> bool:
|
|
1586
|
+
if min_bucket_version > 0:
|
|
1587
|
+
for i in range(min_bucket_version):
|
|
1588
|
+
blocked_key = self._get_sticky_bucket_experiment_key(experiment_key, i)
|
|
1589
|
+
if blocked_key in assignments:
|
|
1590
|
+
return True
|
|
1591
|
+
return False
|
|
1592
|
+
|
|
1593
|
+
def _get_sticky_bucket_variation(
|
|
1594
|
+
self,
|
|
1595
|
+
experiment_key: str,
|
|
1596
|
+
bucket_version: int = None,
|
|
1597
|
+
min_bucket_version: int = None,
|
|
1598
|
+
meta: List[VariationMeta] = None,
|
|
1599
|
+
hash_attribute: str = None,
|
|
1600
|
+
fallback_attribute: str = None
|
|
1601
|
+
) -> dict:
|
|
1602
|
+
bucket_version = bucket_version or 0
|
|
1603
|
+
min_bucket_version = min_bucket_version or 0
|
|
1604
|
+
meta = meta or []
|
|
1605
|
+
|
|
1606
|
+
id = self._get_sticky_bucket_experiment_key(experiment_key, bucket_version)
|
|
1607
|
+
|
|
1608
|
+
assignments = self._get_sticky_bucket_assignments(hash_attribute, fallback_attribute)
|
|
1609
|
+
if self._is_blocked(assignments, experiment_key, min_bucket_version):
|
|
1610
|
+
return {
|
|
1611
|
+
'variation': -1,
|
|
1612
|
+
'versionIsBlocked': True
|
|
1613
|
+
}
|
|
1614
|
+
|
|
1615
|
+
variation_key = assignments.get(id, None)
|
|
1616
|
+
if not variation_key:
|
|
1617
|
+
return {
|
|
1618
|
+
'variation': -1
|
|
1619
|
+
}
|
|
1620
|
+
|
|
1621
|
+
# Find the key in meta
|
|
1622
|
+
variation = next((i for i, v in enumerate(meta) if v.get("key") == variation_key), -1)
|
|
1623
|
+
if variation < 0:
|
|
1624
|
+
return {
|
|
1625
|
+
'variation': -1
|
|
1626
|
+
}
|
|
1627
|
+
|
|
1628
|
+
return {'variation': variation}
|
|
1629
|
+
|
|
1630
|
+
def _get_sticky_bucket_experiment_key(self, experiment_key: str, bucket_version: int = 0) -> str:
|
|
1631
|
+
return experiment_key + "__" + str(bucket_version)
|
|
1632
|
+
|
|
1633
|
+
def refresh_sticky_buckets(self, force: bool = False) -> None:
|
|
1634
|
+
if not self.sticky_bucket_service:
|
|
1635
|
+
return
|
|
1636
|
+
|
|
1637
|
+
attributes = self._get_sticky_bucket_attributes()
|
|
1638
|
+
if not force and attributes == self._sticky_bucket_attributes:
|
|
1639
|
+
logger.debug("Skipping refresh of sticky bucket assignments, no changes")
|
|
1640
|
+
return
|
|
1641
|
+
|
|
1642
|
+
self._sticky_bucket_attributes = attributes
|
|
1643
|
+
self._sticky_bucket_assignment_docs = self.sticky_bucket_service.get_all_assignments(attributes)
|
|
1644
|
+
|
|
1645
|
+
def _generate_sticky_bucket_assignment_doc(self, attribute_name: str, attribute_value: str, assignments: dict):
|
|
1646
|
+
key = attribute_name + "||" + attribute_value
|
|
1647
|
+
existing_assignments = self._sticky_bucket_assignment_docs.get(key, {}).get("assignments", {})
|
|
1648
|
+
|
|
1649
|
+
new_assignments = {**existing_assignments, **assignments}
|
|
1650
|
+
|
|
1651
|
+
# Compare JSON strings to see if they have changed
|
|
1652
|
+
existing_json = json.dumps(existing_assignments, sort_keys=True)
|
|
1653
|
+
new_json = json.dumps(new_assignments, sort_keys=True)
|
|
1654
|
+
changed = existing_json != new_json
|
|
1655
|
+
|
|
1656
|
+
return {
|
|
1657
|
+
'key': key,
|
|
1658
|
+
'doc': {
|
|
1659
|
+
'attributeName': attribute_name,
|
|
1660
|
+
'attributeValue': attribute_value,
|
|
1661
|
+
'assignments': new_assignments
|
|
1662
|
+
},
|
|
1663
|
+
'changed': changed
|
|
1664
|
+
}
|