service-capacity-modeling 0.3.79__py3-none-any.whl → 0.3.81__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- service_capacity_modeling/capacity_planner.py +11 -14
- service_capacity_modeling/models/org/netflix/__init__.py +2 -0
- service_capacity_modeling/models/org/netflix/control.py +146 -0
- service_capacity_modeling/stats.py +5 -1
- {service_capacity_modeling-0.3.79.dist-info → service_capacity_modeling-0.3.81.dist-info}/METADATA +1 -1
- {service_capacity_modeling-0.3.79.dist-info → service_capacity_modeling-0.3.81.dist-info}/RECORD +10 -9
- {service_capacity_modeling-0.3.79.dist-info → service_capacity_modeling-0.3.81.dist-info}/WHEEL +0 -0
- {service_capacity_modeling-0.3.79.dist-info → service_capacity_modeling-0.3.81.dist-info}/entry_points.txt +0 -0
- {service_capacity_modeling-0.3.79.dist-info → service_capacity_modeling-0.3.81.dist-info}/licenses/LICENSE +0 -0
- {service_capacity_modeling-0.3.79.dist-info → service_capacity_modeling-0.3.81.dist-info}/top_level.txt +0 -0
|
@@ -85,7 +85,7 @@ def model_desires(
|
|
|
85
85
|
data_shape = desires.data_shape.model_copy(deep=True)
|
|
86
86
|
|
|
87
87
|
query_pattern_simulation = {}
|
|
88
|
-
for field in sorted(
|
|
88
|
+
for field in sorted(QueryPattern.model_fields):
|
|
89
89
|
d = getattr(query_pattern, field)
|
|
90
90
|
if isinstance(d, Interval):
|
|
91
91
|
query_pattern_simulation[field] = simulate_interval(d, field)(num_sims)
|
|
@@ -93,7 +93,7 @@ def model_desires(
|
|
|
93
93
|
query_pattern_simulation[field] = [d] * num_sims
|
|
94
94
|
|
|
95
95
|
data_shape_simulation = {}
|
|
96
|
-
for field in sorted(
|
|
96
|
+
for field in sorted(DataShape.model_fields):
|
|
97
97
|
d = getattr(data_shape, field)
|
|
98
98
|
if isinstance(d, Interval):
|
|
99
99
|
data_shape_simulation[field] = simulate_interval(d, field)(num_sims)
|
|
@@ -104,14 +104,11 @@ def model_desires(
|
|
|
104
104
|
query_pattern = QueryPattern(
|
|
105
105
|
**{
|
|
106
106
|
f: query_pattern_simulation[f][sim]
|
|
107
|
-
for f in sorted(
|
|
107
|
+
for f in sorted(QueryPattern.model_fields)
|
|
108
108
|
}
|
|
109
109
|
)
|
|
110
110
|
data_shape = DataShape(
|
|
111
|
-
**{
|
|
112
|
-
f: data_shape_simulation[f][sim]
|
|
113
|
-
for f in sorted(data_shape.model_fields)
|
|
114
|
-
}
|
|
111
|
+
**{f: data_shape_simulation[f][sim] for f in sorted(DataShape.model_fields)}
|
|
115
112
|
)
|
|
116
113
|
|
|
117
114
|
d = desires.model_copy(update={"query_pattern": None, "data_shape": None})
|
|
@@ -129,7 +126,7 @@ def model_desires_percentiles(
|
|
|
129
126
|
|
|
130
127
|
query_pattern_simulation = {}
|
|
131
128
|
query_pattern_means = {}
|
|
132
|
-
for field in sorted(
|
|
129
|
+
for field in sorted(QueryPattern.model_fields):
|
|
133
130
|
d = getattr(query_pattern, field)
|
|
134
131
|
if isinstance(d, Interval):
|
|
135
132
|
query_pattern_simulation[field] = interval_percentile(d, percentiles)
|
|
@@ -143,7 +140,7 @@ def model_desires_percentiles(
|
|
|
143
140
|
|
|
144
141
|
data_shape_simulation = {}
|
|
145
142
|
data_shape_means = {}
|
|
146
|
-
for field in sorted(
|
|
143
|
+
for field in sorted(DataShape.model_fields):
|
|
147
144
|
d = getattr(data_shape, field)
|
|
148
145
|
if isinstance(d, Interval):
|
|
149
146
|
data_shape_simulation[field] = interval_percentile(d, percentiles)
|
|
@@ -161,13 +158,13 @@ def model_desires_percentiles(
|
|
|
161
158
|
query_pattern = QueryPattern(
|
|
162
159
|
**{
|
|
163
160
|
f: query_pattern_simulation[f][i]
|
|
164
|
-
for f in sorted(
|
|
161
|
+
for f in sorted(QueryPattern.model_fields)
|
|
165
162
|
}
|
|
166
163
|
)
|
|
167
164
|
except Exception as exp:
|
|
168
165
|
raise exp
|
|
169
166
|
data_shape = DataShape(
|
|
170
|
-
**{f: data_shape_simulation[f][i] for f in sorted(
|
|
167
|
+
**{f: data_shape_simulation[f][i] for f in sorted(DataShape.model_fields)}
|
|
171
168
|
)
|
|
172
169
|
d = desires.model_copy(deep=True)
|
|
173
170
|
d.query_pattern = query_pattern
|
|
@@ -175,10 +172,10 @@ def model_desires_percentiles(
|
|
|
175
172
|
results.append(d)
|
|
176
173
|
|
|
177
174
|
mean_qp = QueryPattern(
|
|
178
|
-
**{f: query_pattern_means[f] for f in sorted(
|
|
175
|
+
**{f: query_pattern_means[f] for f in sorted(QueryPattern.model_fields)}
|
|
179
176
|
)
|
|
180
177
|
mean_ds = DataShape(
|
|
181
|
-
**{f: data_shape_means[f] for f in sorted(
|
|
178
|
+
**{f: data_shape_means[f] for f in sorted(DataShape.model_fields)}
|
|
182
179
|
)
|
|
183
180
|
d = desires.model_copy(deep=True)
|
|
184
181
|
d.query_pattern = mean_qp
|
|
@@ -298,7 +295,7 @@ def _add_requirement(
|
|
|
298
295
|
|
|
299
296
|
requirements = accum[requirement.requirement_type]
|
|
300
297
|
|
|
301
|
-
for field in sorted(
|
|
298
|
+
for field in sorted(CapacityRequirement.model_fields):
|
|
302
299
|
d = getattr(requirement, field)
|
|
303
300
|
if isinstance(d, Interval):
|
|
304
301
|
if field not in requirements:
|
|
@@ -3,6 +3,7 @@ from typing import Dict
|
|
|
3
3
|
|
|
4
4
|
from .aurora import nflx_aurora_capacity_model
|
|
5
5
|
from .cassandra import nflx_cassandra_capacity_model
|
|
6
|
+
from .control import nflx_control_capacity_model
|
|
6
7
|
from .counter import nflx_counter_capacity_model
|
|
7
8
|
from .crdb import nflx_cockroachdb_capacity_model
|
|
8
9
|
from .ddb import nflx_ddb_capacity_model
|
|
@@ -38,6 +39,7 @@ def models() -> Dict[str, Any]:
|
|
|
38
39
|
"org.netflix.elasticsearch.master": nflx_elasticsearch_master_capacity_model,
|
|
39
40
|
"org.netflix.elasticsearch.search": nflx_elasticsearch_search_capacity_model,
|
|
40
41
|
"org.netflix.entity": nflx_entity_capacity_model,
|
|
42
|
+
"org.netflix.control": nflx_control_capacity_model,
|
|
41
43
|
"org.netflix.cockroachdb": nflx_cockroachdb_capacity_model,
|
|
42
44
|
"org.netflix.aurora": nflx_aurora_capacity_model,
|
|
43
45
|
"org.netflix.postgres": nflx_postgres_capacity_model,
|
|
@@ -0,0 +1,146 @@
|
|
|
1
|
+
from typing import Any
|
|
2
|
+
from typing import Callable
|
|
3
|
+
from typing import Dict
|
|
4
|
+
from typing import Optional
|
|
5
|
+
from typing import Tuple
|
|
6
|
+
|
|
7
|
+
from .stateless_java import nflx_java_app_capacity_model
|
|
8
|
+
from service_capacity_modeling.interface import AccessConsistency
|
|
9
|
+
from service_capacity_modeling.interface import AccessPattern
|
|
10
|
+
from service_capacity_modeling.interface import CapacityDesires
|
|
11
|
+
from service_capacity_modeling.interface import CapacityPlan
|
|
12
|
+
from service_capacity_modeling.interface import Consistency
|
|
13
|
+
from service_capacity_modeling.interface import DataShape
|
|
14
|
+
from service_capacity_modeling.interface import Drive
|
|
15
|
+
from service_capacity_modeling.interface import FixedInterval
|
|
16
|
+
from service_capacity_modeling.interface import GlobalConsistency
|
|
17
|
+
from service_capacity_modeling.interface import Instance
|
|
18
|
+
from service_capacity_modeling.interface import Interval
|
|
19
|
+
from service_capacity_modeling.interface import QueryPattern
|
|
20
|
+
from service_capacity_modeling.interface import RegionContext
|
|
21
|
+
from service_capacity_modeling.models import CapacityModel
|
|
22
|
+
from service_capacity_modeling.interface import certain_int
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class NflxControlCapacityModel(CapacityModel):
|
|
26
|
+
@staticmethod
|
|
27
|
+
def capacity_plan(
|
|
28
|
+
instance: Instance,
|
|
29
|
+
drive: Drive,
|
|
30
|
+
context: RegionContext,
|
|
31
|
+
desires: CapacityDesires,
|
|
32
|
+
extra_model_arguments: Dict[str, Any],
|
|
33
|
+
) -> Optional[CapacityPlan]:
|
|
34
|
+
# Control wants 20GiB root volumes
|
|
35
|
+
extra_model_arguments.setdefault("root_disk_gib", 20)
|
|
36
|
+
|
|
37
|
+
# Ensure Java app has enough memory to cache the whole dataset
|
|
38
|
+
modified_desires = desires.model_copy(deep=True)
|
|
39
|
+
if modified_desires.data_shape.estimated_state_size_gib:
|
|
40
|
+
# double buffer the cache
|
|
41
|
+
additional_mem = 2 * desires.data_shape.estimated_state_size_gib.mid
|
|
42
|
+
modified_desires.data_shape.reserved_instance_app_mem_gib += additional_mem
|
|
43
|
+
|
|
44
|
+
control_app = nflx_java_app_capacity_model.capacity_plan(
|
|
45
|
+
instance=instance,
|
|
46
|
+
drive=drive,
|
|
47
|
+
context=context,
|
|
48
|
+
desires=modified_desires,
|
|
49
|
+
extra_model_arguments=extra_model_arguments,
|
|
50
|
+
)
|
|
51
|
+
if control_app is None:
|
|
52
|
+
return None
|
|
53
|
+
|
|
54
|
+
for cluster in control_app.candidate_clusters.regional:
|
|
55
|
+
cluster.cluster_type = "dgwcontrol"
|
|
56
|
+
return control_app
|
|
57
|
+
|
|
58
|
+
@staticmethod
|
|
59
|
+
def description() -> str:
|
|
60
|
+
return "Netflix Control Model"
|
|
61
|
+
|
|
62
|
+
@staticmethod
|
|
63
|
+
def extra_model_arguments_schema() -> Dict[str, Any]:
|
|
64
|
+
return nflx_java_app_capacity_model.extra_model_arguments_schema()
|
|
65
|
+
|
|
66
|
+
@staticmethod
|
|
67
|
+
def compose_with(
|
|
68
|
+
user_desires: CapacityDesires, extra_model_arguments: Dict[str, Any]
|
|
69
|
+
) -> Tuple[Tuple[str, Callable[[CapacityDesires], CapacityDesires]], ...]:
|
|
70
|
+
def _modify_aurora_desires(
|
|
71
|
+
user_desires: CapacityDesires,
|
|
72
|
+
) -> CapacityDesires:
|
|
73
|
+
relaxed = user_desires.model_copy(deep=True)
|
|
74
|
+
|
|
75
|
+
# Aurora doesn't support tier 0, so downgrade to tier 1
|
|
76
|
+
if relaxed.service_tier == 0:
|
|
77
|
+
relaxed.service_tier = 1
|
|
78
|
+
|
|
79
|
+
# Control caches reads in memory, only writes go to Aurora
|
|
80
|
+
# Set read QPS to minimal since Aurora only handles writes
|
|
81
|
+
if relaxed.query_pattern.estimated_read_per_second:
|
|
82
|
+
relaxed.query_pattern.estimated_read_per_second = certain_int(1)
|
|
83
|
+
|
|
84
|
+
return relaxed
|
|
85
|
+
|
|
86
|
+
return (("org.netflix.aurora", _modify_aurora_desires),)
|
|
87
|
+
|
|
88
|
+
@staticmethod
|
|
89
|
+
def default_desires(
|
|
90
|
+
user_desires: CapacityDesires, extra_model_arguments: Dict[str, Any]
|
|
91
|
+
) -> CapacityDesires:
|
|
92
|
+
return CapacityDesires(
|
|
93
|
+
query_pattern=QueryPattern(
|
|
94
|
+
access_pattern=AccessPattern.latency,
|
|
95
|
+
access_consistency=GlobalConsistency(
|
|
96
|
+
same_region=Consistency(
|
|
97
|
+
target_consistency=AccessConsistency.read_your_writes,
|
|
98
|
+
),
|
|
99
|
+
cross_region=Consistency(
|
|
100
|
+
target_consistency=AccessConsistency.eventual,
|
|
101
|
+
),
|
|
102
|
+
),
|
|
103
|
+
estimated_mean_read_size_bytes=Interval(
|
|
104
|
+
low=128, mid=1024, high=65536, confidence=0.95
|
|
105
|
+
),
|
|
106
|
+
estimated_mean_write_size_bytes=Interval(
|
|
107
|
+
low=128, mid=1024, high=65536, confidence=0.95
|
|
108
|
+
),
|
|
109
|
+
estimated_mean_read_latency_ms=Interval(
|
|
110
|
+
low=0.2, mid=1, high=2, confidence=0.98
|
|
111
|
+
),
|
|
112
|
+
estimated_mean_write_latency_ms=Interval(
|
|
113
|
+
low=0.2, mid=1, high=2, confidence=0.98
|
|
114
|
+
),
|
|
115
|
+
# "Single digit milliseconds SLO"
|
|
116
|
+
read_latency_slo_ms=FixedInterval(
|
|
117
|
+
minimum_value=0.2,
|
|
118
|
+
maximum_value=10,
|
|
119
|
+
low=1,
|
|
120
|
+
mid=3,
|
|
121
|
+
high=6,
|
|
122
|
+
confidence=0.98,
|
|
123
|
+
),
|
|
124
|
+
write_latency_slo_ms=FixedInterval(
|
|
125
|
+
minimum_value=0.2,
|
|
126
|
+
maximum_value=10,
|
|
127
|
+
low=0.4,
|
|
128
|
+
mid=2,
|
|
129
|
+
high=5,
|
|
130
|
+
confidence=0.98,
|
|
131
|
+
),
|
|
132
|
+
),
|
|
133
|
+
# Most Control clusters are small
|
|
134
|
+
data_shape=DataShape(
|
|
135
|
+
estimated_state_size_gib=Interval(
|
|
136
|
+
low=0.1, mid=1, high=10, confidence=0.98
|
|
137
|
+
),
|
|
138
|
+
estimated_state_item_count=Interval(
|
|
139
|
+
low=100000, mid=1000000, high=10000000, confidence=0.98
|
|
140
|
+
),
|
|
141
|
+
reserved_instance_app_mem_gib=8,
|
|
142
|
+
),
|
|
143
|
+
)
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
nflx_control_capacity_model = NflxControlCapacityModel()
|
|
@@ -45,7 +45,11 @@ def _gamma_fn_from_params(
|
|
|
45
45
|
|
|
46
46
|
def f(k: float) -> float:
|
|
47
47
|
zero = high / low
|
|
48
|
-
|
|
48
|
+
high_x = high_p * k / mid
|
|
49
|
+
low_x = low_p * k / mid
|
|
50
|
+
result = gammaf(k, high_x) / gammaf(k, low_x) - zero
|
|
51
|
+
# Use .item() to convert NumPy scalar to Python float
|
|
52
|
+
return result.item() if hasattr(result, "item") else float(result)
|
|
49
53
|
|
|
50
54
|
return f
|
|
51
55
|
|
{service_capacity_modeling-0.3.79.dist-info → service_capacity_modeling-0.3.81.dist-info}/RECORD
RENAMED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
service_capacity_modeling/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
2
|
-
service_capacity_modeling/capacity_planner.py,sha256=
|
|
2
|
+
service_capacity_modeling/capacity_planner.py,sha256=JRagEFlg3u_zB1N5GzGsKAN55JZLad6p4IF_PmL8kcg,32780
|
|
3
3
|
service_capacity_modeling/interface.py,sha256=p-pOUQUdA6VA-moTjFSBLQAZCRvcjFaAfb2jOmxMWsY,39074
|
|
4
|
-
service_capacity_modeling/stats.py,sha256=
|
|
4
|
+
service_capacity_modeling/stats.py,sha256=LCNUcQPfwF5hhIZwsfAsDe4ZbnuhDnl3vQHKfpK61Xc,6142
|
|
5
5
|
service_capacity_modeling/hardware/__init__.py,sha256=P5ostvoSOMUqPODtepeFYb4qfTVH0E73mMFraP49rYU,9196
|
|
6
6
|
service_capacity_modeling/hardware/profiles/__init__.py,sha256=7-y3JbCBkgzaAjFla2RIymREcImdZ51HTl3yn3vzoGw,1602
|
|
7
7
|
service_capacity_modeling/hardware/profiles/profiles.txt,sha256=tOfSR3B0E0uAOaXd5SLI3ioq83UYZ3yhK7UHhsK4awQ,49
|
|
@@ -50,9 +50,10 @@ service_capacity_modeling/models/common.py,sha256=Bs-G1eHrb9qmS9qEWzx0rzN9QjQ419
|
|
|
50
50
|
service_capacity_modeling/models/headroom_strategy.py,sha256=rGo_d7nxkQDjx0_hIAXKKZAWnQDBtqZhc0eTMouVh8s,682
|
|
51
51
|
service_capacity_modeling/models/utils.py,sha256=WosEEg4o1_WSbTb5mL-M1v8JuWJgvS2oWvnDS3qNz3k,2662
|
|
52
52
|
service_capacity_modeling/models/org/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
53
|
-
service_capacity_modeling/models/org/netflix/__init__.py,sha256=
|
|
53
|
+
service_capacity_modeling/models/org/netflix/__init__.py,sha256=2Ld2NPxiO3vbYtOMqHtrV4f1nEH390Hoxqo3I5NbBDI,2553
|
|
54
54
|
service_capacity_modeling/models/org/netflix/aurora.py,sha256=Js33ZjxCtt34HiDPsWRT9mjKCAsnnCo9du15QArVFMo,13073
|
|
55
55
|
service_capacity_modeling/models/org/netflix/cassandra.py,sha256=Fp37bHtWRJctJYYJhW78YbLeFXoX26QxREP_BMEItNE,39003
|
|
56
|
+
service_capacity_modeling/models/org/netflix/control.py,sha256=wkJnqG7Nn7kQNcKEMzl9LFlA9xTAji2wYxL011VAKPI,5762
|
|
56
57
|
service_capacity_modeling/models/org/netflix/counter.py,sha256=XopKlNMdvO5EbxAggn-KW_q7L3aKtXLXbry4ocl6i5Q,10494
|
|
57
58
|
service_capacity_modeling/models/org/netflix/crdb.py,sha256=iW7tyG8jpXhHIdXrw3DPYSHRAknPN42MlCRLJO4o9C8,20826
|
|
58
59
|
service_capacity_modeling/models/org/netflix/ddb.py,sha256=9qRiuTqWev9zbYFFzewyowU7M41uALsuLklYx20yAXw,26502
|
|
@@ -75,9 +76,9 @@ service_capacity_modeling/tools/auto_shape.py,sha256=Jx9H2ay9-H_kUDjtB141owQNxGF
|
|
|
75
76
|
service_capacity_modeling/tools/fetch_pricing.py,sha256=Qp-XMymkY1dvtyS51RufmEpfgOHv-IQ-XyzS8wp2-qM,4021
|
|
76
77
|
service_capacity_modeling/tools/generate_missing.py,sha256=F7YqvMJAV4nZc20GNrlIsnQSF8_77sLgwYZqc5k4LDg,3099
|
|
77
78
|
service_capacity_modeling/tools/instance_families.py,sha256=e5RuYkCLUITvsAazDH12B6KjX_PaBsv6Ne3mj0HK_sQ,9223
|
|
78
|
-
service_capacity_modeling-0.3.
|
|
79
|
-
service_capacity_modeling-0.3.
|
|
80
|
-
service_capacity_modeling-0.3.
|
|
81
|
-
service_capacity_modeling-0.3.
|
|
82
|
-
service_capacity_modeling-0.3.
|
|
83
|
-
service_capacity_modeling-0.3.
|
|
79
|
+
service_capacity_modeling-0.3.81.dist-info/licenses/LICENSE,sha256=nl_Lt5v9VvJ-5lWJDT4ddKAG-VZ-2IaLmbzpgYDz2hU,11343
|
|
80
|
+
service_capacity_modeling-0.3.81.dist-info/METADATA,sha256=g1x4TGRElgvLbJiQZ_D_H5wN-Mhiynf4nPhOEVbkIEg,10361
|
|
81
|
+
service_capacity_modeling-0.3.81.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
82
|
+
service_capacity_modeling-0.3.81.dist-info/entry_points.txt,sha256=ZsjzpG5SomWpT1zCE19n1uSXKH2gTI_yc33sdl0vmJg,146
|
|
83
|
+
service_capacity_modeling-0.3.81.dist-info/top_level.txt,sha256=H8XjTCLgR3enHq5t3bIbxt9SeUkUT8HT_SDv2dgIT_A,26
|
|
84
|
+
service_capacity_modeling-0.3.81.dist-info/RECORD,,
|
{service_capacity_modeling-0.3.79.dist-info → service_capacity_modeling-0.3.81.dist-info}/WHEEL
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|