sovereign 1.0.0a4__py3-none-any.whl → 1.0.0b148__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of sovereign might be problematic. Click here for more details.

Files changed (65) hide show
  1. sovereign/__init__.py +2 -2
  2. sovereign/app.py +3 -6
  3. sovereign/cache/__init__.py +12 -85
  4. sovereign/cache/backends/__init__.py +1 -1
  5. sovereign/cache/backends/s3.py +6 -24
  6. sovereign/cache/filesystem.py +5 -6
  7. sovereign/cache/types.py +0 -2
  8. sovereign/configuration.py +8 -42
  9. sovereign/context.py +9 -8
  10. sovereign/dynamic_config/__init__.py +4 -3
  11. sovereign/dynamic_config/deser.py +1 -1
  12. sovereign/dynamic_config/loaders.py +3 -3
  13. sovereign/error_info.py +3 -2
  14. sovereign/events.py +3 -3
  15. sovereign/logging/access_logger.py +1 -1
  16. sovereign/logging/application_logger.py +1 -1
  17. sovereign/logging/bootstrapper.py +1 -1
  18. sovereign/modifiers/lib.py +1 -1
  19. sovereign/rendering.py +90 -22
  20. sovereign/response_class.py +2 -2
  21. sovereign/server.py +26 -45
  22. sovereign/sources/__init__.py +3 -0
  23. sovereign/sources/file.py +21 -0
  24. sovereign/sources/inline.py +39 -0
  25. sovereign/sources/lib.py +41 -0
  26. sovereign/sources/poller.py +537 -0
  27. sovereign/statistics.py +1 -2
  28. sovereign/testing/loaders.py +0 -1
  29. sovereign/tracing.py +5 -6
  30. sovereign/types.py +10 -15
  31. sovereign/utils/auth.py +2 -3
  32. sovereign/utils/crypto/suites/disabled_cipher.py +2 -2
  33. sovereign/utils/dictupdate.py +1 -1
  34. sovereign/utils/eds.py +1 -3
  35. sovereign/utils/entry_point_loader.py +2 -2
  36. sovereign/utils/mock.py +3 -4
  37. sovereign/utils/resources.py +1 -1
  38. sovereign/utils/templates.py +2 -4
  39. sovereign/utils/timer.py +3 -5
  40. sovereign/utils/weighted_clusters.py +1 -2
  41. sovereign/views/__init__.py +3 -6
  42. sovereign/views/api.py +7 -28
  43. sovereign/views/crypto.py +1 -1
  44. sovereign/views/discovery.py +5 -20
  45. sovereign/views/healthchecks.py +27 -45
  46. sovereign/views/interface.py +10 -70
  47. sovereign/worker.py +31 -20
  48. {sovereign-1.0.0a4.dist-info → sovereign-1.0.0b148.dist-info}/METADATA +3 -4
  49. sovereign-1.0.0b148.dist-info/RECORD +77 -0
  50. {sovereign-1.0.0a4.dist-info → sovereign-1.0.0b148.dist-info}/entry_points.txt +0 -8
  51. sovereign/rendering_common.py +0 -91
  52. sovereign/v2/__init__.py +0 -0
  53. sovereign/v2/data/data_store.py +0 -621
  54. sovereign/v2/data/render_discovery_response.py +0 -24
  55. sovereign/v2/data/repositories.py +0 -90
  56. sovereign/v2/data/utils.py +0 -33
  57. sovereign/v2/data/worker_queue.py +0 -273
  58. sovereign/v2/jobs/refresh_context.py +0 -117
  59. sovereign/v2/jobs/render_discovery_job.py +0 -145
  60. sovereign/v2/logging.py +0 -81
  61. sovereign/v2/types.py +0 -41
  62. sovereign/v2/web.py +0 -101
  63. sovereign/v2/worker.py +0 -199
  64. sovereign-1.0.0a4.dist-info/RECORD +0 -85
  65. {sovereign-1.0.0a4.dist-info → sovereign-1.0.0b148.dist-info}/WHEEL +0 -0
@@ -0,0 +1,537 @@
1
+ import json
2
+ import uuid
3
+ import asyncio
4
+ import traceback
5
+ from copy import deepcopy
6
+ from importlib.metadata import EntryPoint
7
+ from datetime import timedelta, datetime
8
+ from typing import Iterable, Any, Dict, List, Union, Type, Optional
9
+
10
+ from glom import glom, PathAccessError
11
+ from sovereign.statistics import StatsDProxy
12
+
13
+ from sovereign.types import Node
14
+ from sovereign.configuration import ConfiguredSource, SourceData, config
15
+ from sovereign.utils.entry_point_loader import EntryPointLoader
16
+ from sovereign.sources.lib import Source
17
+ from sovereign.modifiers.lib import Modifier, GlobalModifier
18
+ from sovereign.events import bus, Topic, Event
19
+
20
+ from structlog.stdlib import BoundLogger
21
+
22
+
23
+ def is_debug_request(v: str, debug: bool = False) -> bool:
24
+ return v == "" and debug
25
+
26
+
27
+ def is_wildcard(v: List[str]) -> bool:
28
+ return v in [["*"], "*", ("*",)]
29
+
30
+
31
+ def contains(container: Iterable[Any], item: Any) -> bool:
32
+ return item in container
33
+
34
+
35
+ Mods = Dict[str, Type[Modifier]]
36
+ GMods = Dict[str, Type[GlobalModifier]]
37
+
38
+
39
+ def _deep_diff(old, new, path="") -> list[dict[str, Any]]:
40
+ changes: list[dict[str, Any]] = []
41
+
42
+ # handle add/remove
43
+ if (old, new) == (None, None):
44
+ return changes
45
+ elif old is None:
46
+ changes.append({"op": "add", "path": path, "value": new})
47
+ return changes
48
+ elif new is None:
49
+ changes.append({"op": "remove", "path": path, "old_value": old})
50
+ return changes
51
+
52
+ # handle completely different types
53
+ if type(old) is not type(new):
54
+ changes.append(
55
+ {"op": "change", "path": path, "old_value": old, "new_value": new}
56
+ )
57
+ return changes
58
+
59
+ # handle fields recursively
60
+ if isinstance(old, dict) and isinstance(new, dict):
61
+ all_keys = set(old.keys()) | set(new.keys())
62
+
63
+ for key in sorted(all_keys):
64
+ old_val = old.get(key)
65
+ new_val = new.get(key)
66
+
67
+ current_path = f"{path}.{key}" if path else key
68
+
69
+ if key not in old:
70
+ changes.append({"op": "add", "path": current_path, "value": new_val})
71
+ elif key not in new:
72
+ changes.append(
73
+ {"op": "remove", "path": current_path, "old_value": old_val}
74
+ )
75
+ elif old_val != new_val:
76
+ nested_changes = _deep_diff(old_val, new_val, current_path)
77
+ changes.extend(nested_changes)
78
+
79
+ # handle items recursively
80
+ elif isinstance(old, list) and isinstance(new, list):
81
+ max_len = max(len(old), len(new))
82
+
83
+ for i in range(max_len):
84
+ current_path = f"{path}[{i}]" if path else f"[{i}]"
85
+
86
+ if i >= len(old):
87
+ changes.append({"op": "add", "path": current_path, "value": new[i]})
88
+ elif i >= len(new):
89
+ changes.append(
90
+ {"op": "remove", "path": current_path, "old_value": old[i]}
91
+ )
92
+ elif old[i] != new[i]:
93
+ nested_changes = _deep_diff(old[i], new[i], current_path)
94
+ changes.extend(nested_changes)
95
+
96
+ # handle primitives
97
+ else:
98
+ if old != new:
99
+ changes.append(
100
+ {"op": "change", "path": path, "old_value": old, "new_value": new}
101
+ )
102
+
103
+ return changes
104
+
105
+
106
+ def per_field_diff(old, new) -> list[dict[str, Any]]:
107
+ changes = []
108
+ max_len = max(len(old), len(new))
109
+
110
+ for i in range(max_len):
111
+ old_inst = old[i] if i < len(old) else None
112
+ new_inst = new[i] if i < len(new) else None
113
+
114
+ if old_inst is None:
115
+ changes.append({"op": "add", "path": f"[{i}]", "value": new_inst})
116
+ elif new_inst is None:
117
+ changes.append({"op": "remove", "path": f"[{i}]", "old_value": old_inst})
118
+ elif old_inst != new_inst:
119
+ # Use the deep diff with index prefix
120
+ field_changes = _deep_diff(old_inst, new_inst, f"[{i}]")
121
+ changes.extend(field_changes)
122
+
123
+ return changes
124
+
125
+
126
+ def _gen_uuid(diff_summary: dict[str, Any]) -> str:
127
+ blob = json.dumps(diff_summary, sort_keys=True, separators=("", ""))
128
+ return str(uuid.uuid5(uuid.NAMESPACE_DNS, blob))
129
+
130
+
131
+ def source_diff_summary(prev, curr) -> dict[str, Any]:
132
+ if prev is None:
133
+ summary = {
134
+ "type": "initial_load",
135
+ "scopes": {
136
+ scope: {"added": len(instances)}
137
+ for scope, instances in curr.scopes.items()
138
+ if instances
139
+ },
140
+ }
141
+ else:
142
+ summary = {"type": "update", "scopes": {}}
143
+
144
+ all_scopes = set(prev.scopes.keys()) | set(curr.scopes.keys())
145
+
146
+ for scope in sorted(all_scopes):
147
+ old = prev.scopes.get(scope, [])
148
+ new = curr.scopes.get(scope, [])
149
+
150
+ n_old = len(old)
151
+ n_new = len(new)
152
+
153
+ scope_changes: dict[str, Any] = {}
154
+
155
+ if n_old == 0 and n_new > 0:
156
+ scope_changes["added"] = n_new
157
+ elif n_old > 0 and n_new == 0:
158
+ scope_changes["removed"] = n_old
159
+ elif old != new:
160
+ detailed_changes = per_field_diff(old, new)
161
+ if detailed_changes:
162
+ scope_changes["field_changes"] = detailed_changes
163
+ scope_changes["count_change"] = n_new - n_old
164
+
165
+ if scope_changes:
166
+ summary["scopes"][scope] = scope_changes # type: ignore
167
+
168
+ if not summary["scopes"]:
169
+ summary = {"type": "no_changes"}
170
+
171
+ summary["uuid"] = _gen_uuid(summary)
172
+ return summary
173
+
174
+
175
+ class SourcePoller:
176
+ stats: StatsDProxy
177
+
178
+ def __init__(
179
+ self,
180
+ sources: List[ConfiguredSource],
181
+ matching_enabled: bool,
182
+ node_match_key: Optional[str],
183
+ source_match_key: Optional[str],
184
+ source_refresh_rate: int,
185
+ logger: BoundLogger,
186
+ stats: Any,
187
+ ):
188
+ self.matching_enabled = matching_enabled
189
+ self.node_match_key = node_match_key
190
+ self.source_match_key = source_match_key
191
+ self.source_refresh_rate = source_refresh_rate
192
+ self.logger = logger
193
+ self.stats = stats
194
+
195
+ self.entry_points = EntryPointLoader("sources", "modifiers", "global_modifiers")
196
+
197
+ self.source_classes: Dict[str, Type[Source]] = {
198
+ e.name: e.load() for e in self.entry_points.groups["sources"]
199
+ }
200
+ self.sources = [self.setup_source(s) for s in sources]
201
+ if not self.sources:
202
+ raise RuntimeError("No data sources available!")
203
+
204
+ # These have to be loaded later to avoid circular imports
205
+ self.modifiers: Mods = dict()
206
+ self.global_modifiers: GMods = dict()
207
+
208
+ # initially set data and modify
209
+ self.source_data: SourceData = SourceData()
210
+ self.source_data_modified: SourceData = SourceData()
211
+ self.last_updated = datetime.now()
212
+ self.instance_count = 0
213
+ self.initialized = False
214
+
215
+ self.cache: dict[str, dict[str, list[dict[str, Any]]]] = {}
216
+ self.registry: set[Any] = set()
217
+
218
+ # Retry state
219
+ self.retry_count = 0
220
+
221
+ @property
222
+ def data_is_stale(self) -> bool:
223
+ return self.last_updated < datetime.now() - timedelta(minutes=2)
224
+
225
+ def setup_source(self, configured_source: ConfiguredSource) -> Source:
226
+ source_class = self.source_classes[configured_source.type]
227
+ source = source_class(
228
+ config=configured_source.config,
229
+ scope=configured_source.scope,
230
+ )
231
+ source.setup()
232
+ return source
233
+
234
+ def lazy_load_modifiers(self, modifiers: List[str]) -> None:
235
+ if len(self.modifiers) == len(modifiers):
236
+ return
237
+ self.modifiers = self.load_modifier_entrypoints(
238
+ self.entry_points.groups["modifiers"], modifiers
239
+ )
240
+
241
+ def lazy_load_global_modifiers(self, global_modifiers: List[str]) -> None:
242
+ if len(self.global_modifiers) == len(global_modifiers):
243
+ return
244
+ self.global_modifiers = self.load_global_modifier_entrypoints(
245
+ self.entry_points.groups["global_modifiers"], global_modifiers
246
+ )
247
+
248
+ def load_modifier_entrypoints(
249
+ self, entry_points: Iterable[EntryPoint], configured_modifiers: List[str]
250
+ ) -> Dict[str, Type[Modifier]]:
251
+ ret = dict()
252
+ for entry_point in entry_points:
253
+ if entry_point.name in configured_modifiers:
254
+ self.logger.debug(f"Loading modifier {entry_point.name}")
255
+ ret[entry_point.name] = entry_point.load()
256
+ loaded = len(ret)
257
+ configured = len(configured_modifiers)
258
+ assert loaded == configured, (
259
+ f"Number of modifiers loaded ({loaded})"
260
+ f"differ from configured: {configured_modifiers}"
261
+ )
262
+ return ret
263
+
264
+ def load_global_modifier_entrypoints(
265
+ self, entry_points: Iterable[EntryPoint], configured_modifiers: List[str]
266
+ ) -> Dict[str, Type[GlobalModifier]]:
267
+ ret = dict()
268
+ for entry_point in entry_points:
269
+ if entry_point.name in configured_modifiers:
270
+ self.logger.debug(f"Loading global modifier {entry_point.name}")
271
+ ret[entry_point.name] = entry_point.load()
272
+
273
+ loaded = len(ret)
274
+ configured = len(configured_modifiers)
275
+ assert loaded == configured, (
276
+ f"Number of global modifiers loaded ({loaded})"
277
+ f"differ from configured: {configured_modifiers}"
278
+ )
279
+ return ret
280
+
281
+ def apply_modifications(self, data: Optional[SourceData]) -> SourceData:
282
+ if data is None:
283
+ data = self.source_data
284
+ if len(self.modifiers) or len(self.global_modifiers):
285
+ try:
286
+ with self.stats.timed("modifiers.apply_ms"):
287
+ data = deepcopy(data)
288
+ for scope, instances in data.scopes.items():
289
+ for g in self.global_modifiers.values():
290
+ global_modifier = g(instances)
291
+ global_modifier.apply()
292
+ data.scopes[scope] = global_modifier.join()
293
+
294
+ for instance in data.scopes[scope]:
295
+ for m in self.modifiers.values():
296
+ modifier = m(instance)
297
+ if modifier.match():
298
+ # Modifies the instance in-place
299
+ modifier.apply()
300
+ self.stats.increment("modifiers.apply.success")
301
+
302
+ except Exception:
303
+ self.stats.increment("modifiers.apply.failure")
304
+ raise
305
+
306
+ return data
307
+
308
+ def refresh(self) -> bool:
309
+ self.stats.increment("sources.attempt")
310
+
311
+ # Get retry config from global source config
312
+ max_retries = config.source_config.max_retries
313
+
314
+ try:
315
+ new = SourceData()
316
+ for source in self.sources:
317
+ scope = source.scope
318
+ if scope not in new.scopes:
319
+ new.scopes[scope] = []
320
+ new.scopes[scope].extend(source.get())
321
+ except Exception as e:
322
+ self.retry_count += 1
323
+ self.logger.error(
324
+ event=f"Error while refreshing sources (attempt {self.retry_count}/{max_retries})",
325
+ traceback=[line for line in traceback.format_exc().split("\n")],
326
+ error=e.__class__.__name__,
327
+ detail=getattr(e, "detail", "-"),
328
+ retry_count=self.retry_count,
329
+ )
330
+ self.stats.increment("sources.error")
331
+
332
+ if self.retry_count >= max_retries:
333
+ # Reset retry count for next cycle
334
+ self.retry_count = 0
335
+ self.stats.increment("sources.error.final")
336
+ return False
337
+
338
+ # Success - reset retry count
339
+ self.retry_count = 0
340
+
341
+ # Is the new data the same as what we currently have
342
+ if new == getattr(self, "source_data", None):
343
+ self.stats.increment("sources.unchanged")
344
+ self.last_updated = datetime.now()
345
+ return False
346
+ else:
347
+ self.stats.increment("sources.refreshed")
348
+ self.last_updated = datetime.now()
349
+ old_data = getattr(self, "source_data", None)
350
+ self.instance_count = len(
351
+ [instance for scope in new.scopes.values() for instance in scope]
352
+ )
353
+
354
+ if config.logging.log_source_diffs:
355
+ diff_summary = source_diff_summary(old_data, new)
356
+ # printing json directly because the logger is fucking stupid
357
+ print(
358
+ json.dumps(
359
+ dict(
360
+ event="Sources refreshed with changes",
361
+ level="info",
362
+ diff=diff_summary,
363
+ total_instances=self.instance_count,
364
+ )
365
+ )
366
+ )
367
+
368
+ self.source_data = new
369
+ return True
370
+
371
+ def extract_node_key(self, node: Union[Node, Dict[Any, Any]]) -> Any:
372
+ if self.node_match_key is None:
373
+ return
374
+ if "." not in self.node_match_key:
375
+ # key is not nested, don't need glom
376
+ node_value = getattr(node, self.node_match_key)
377
+ else:
378
+ try:
379
+ node_value = glom(node, self.node_match_key)
380
+ except PathAccessError:
381
+ raise RuntimeError(
382
+ f'Failed to find key "{self.node_match_key}" in discoveryRequest({node})'
383
+ )
384
+ return node_value
385
+
386
+ def extract_source_key(self, source: Dict[Any, Any]) -> Any:
387
+ if self.source_match_key is None:
388
+ return
389
+ if "." not in self.source_match_key:
390
+ # key is not nested, don't need glom
391
+ source_value = source[self.source_match_key]
392
+ else:
393
+ try:
394
+ source_value = glom(source, self.source_match_key)
395
+ except PathAccessError:
396
+ raise RuntimeError(
397
+ f'Failed to find key "{self.source_match_key}" in instance({source})'
398
+ )
399
+ return source_value
400
+
401
+ def match_node(
402
+ self,
403
+ node_value: Any,
404
+ modify: bool = True,
405
+ ) -> SourceData:
406
+ """
407
+ Checks a node against all sources, using the node_match_key and source_match_key
408
+ to determine if the node should receive the source in its configuration.
409
+ """
410
+
411
+ if self.data_is_stale:
412
+ # Log/emit metric and manually refresh sources.
413
+ self.stats.increment("sources.stale")
414
+ self.logger.debug(
415
+ "Sources have not been refreshed in 2 minutes",
416
+ last_update=self.last_updated,
417
+ instance_count=self.instance_count,
418
+ )
419
+
420
+ ret = SourceData()
421
+ if modify:
422
+ data = self.source_data_modified
423
+ else:
424
+ data = self.source_data
425
+
426
+ for scope, instances in data.scopes.items():
427
+ if self.matching_enabled is False:
428
+ ret.scopes[scope] = instances
429
+ continue
430
+
431
+ for instance in instances:
432
+ source_value = self.extract_source_key(instance)
433
+
434
+ # If a single expression evaluates true, the remaining are not evaluated/executed.
435
+ # This saves (a small amount of) computation, which helps when the server starts
436
+ # to receive thousands of requests. The list has been ordered descending by what
437
+ # we think will more commonly be true.
438
+ match = (
439
+ contains(source_value, node_value)
440
+ or node_value == source_value
441
+ or is_wildcard(node_value)
442
+ or is_wildcard(source_value)
443
+ or is_debug_request(node_value)
444
+ )
445
+ if match:
446
+ if scope not in ret.scopes:
447
+ ret.scopes[scope] = []
448
+ ret.scopes[scope].append(instance)
449
+ return ret
450
+
451
+ @property
452
+ def match_keys(self) -> List[str]:
453
+ """
454
+ Checks for all match keys present in existing sources and adds them to a list
455
+
456
+ A dict is used instead of a set because dicts cannot have duplicate keys, and
457
+ have ordering since python 3.6
458
+ """
459
+ ret: Dict[str, None] = dict()
460
+ ret["*"] = None
461
+ for _, instances in self.source_data.scopes.items():
462
+ if self.matching_enabled is False:
463
+ break
464
+ for instance in instances:
465
+ source_value = glom(instance, self.source_match_key)
466
+ if isinstance(source_value, str):
467
+ ret[source_value] = None
468
+ elif isinstance(source_value, Iterable):
469
+ for item in source_value:
470
+ ret[item] = None
471
+ continue
472
+ ret[source_value] = None
473
+ return list(ret.keys())
474
+
475
+ def add_to_context(self, request, output):
476
+ """middleware for adding matched instances to context"""
477
+ node_value = self.extract_node_key(request.node)
478
+ self.registry.add(node_value)
479
+
480
+ if instances := self.cache.get(node_value, None):
481
+ output.update(instances)
482
+ return
483
+
484
+ result = self.get_filtered_instances(node_value)
485
+ output.update(result)
486
+
487
+ def get_filtered_instances(self, node_value):
488
+ matches = self.match_node(node_value=node_value)
489
+ result = {}
490
+ for scope, instances in matches.scopes.items():
491
+ if scope in ("default", None):
492
+ result["instances"] = instances
493
+ else:
494
+ result[scope] = instances
495
+ self.cache[node_value] = result
496
+ return result
497
+
498
+ async def poll(self) -> None:
499
+ updated = self.refresh()
500
+ self.source_data_modified = self.apply_modifications(self.source_data)
501
+ if not self.initialized:
502
+ await bus.publish(
503
+ Topic.CONTEXT,
504
+ Event(
505
+ message="Sources initialized",
506
+ metadata={"name": "sources"},
507
+ ),
508
+ )
509
+ self.initialized = True
510
+ if updated:
511
+ self.cache.clear()
512
+ await bus.publish(
513
+ Topic.CONTEXT,
514
+ Event(
515
+ message="Sources refreshed",
516
+ metadata={"name": "sources"},
517
+ ),
518
+ )
519
+
520
+ async def poll_forever(self) -> None:
521
+ while True:
522
+ try:
523
+ await self.poll()
524
+
525
+ # If we have retry count, use exponential backoff for next attempt
526
+ if self.retry_count > 0:
527
+ retry_delay = config.source_config.retry_delay
528
+ delay = min(
529
+ retry_delay * (2 ** (self.retry_count - 1)),
530
+ self.source_refresh_rate, # Cap at normal refresh rate
531
+ )
532
+ await asyncio.sleep(delay)
533
+ else:
534
+ await asyncio.sleep(self.source_refresh_rate)
535
+ except Exception as e:
536
+ self.logger.error(f"Unexpected error in poll loop: {e}")
537
+ await asyncio.sleep(self.source_refresh_rate)
sovereign/statistics.py CHANGED
@@ -1,7 +1,6 @@
1
1
  import logging
2
+ from typing import Optional, Any, Callable, Dict
2
3
  from functools import wraps
3
- from typing import Any, Callable, Dict, Optional
4
-
5
4
  from sovereign.configuration import config as sovereign_config
6
5
 
7
6
  STATSD: Dict[str, Optional["StatsDProxy"]] = {"instance": None}
@@ -1,5 +1,4 @@
1
1
  from typing import Any
2
-
3
2
  from sovereign.dynamic_config.loaders import CustomLoader
4
3
 
5
4
 
sovereign/tracing.py CHANGED
@@ -1,14 +1,13 @@
1
+ from typing_extensions import NotRequired
2
+ from typing import TypedDict, Any
3
+ from sovereign.configuration import TracingConfig
1
4
  import time
2
5
  import uuid
3
- from contextlib import nullcontext
4
- from contextvars import ContextVar
5
- from typing import Any, TypedDict
6
-
7
6
  import requests
8
- from typing_extensions import NotRequired
7
+ from contextvars import ContextVar
8
+ from contextlib import nullcontext
9
9
 
10
10
  from sovereign import config
11
- from sovereign.configuration import TracingConfig
12
11
 
13
12
  _trace_id_ctx_var: ContextVar[str] = ContextVar("trace_id", default="")
14
13
  _span_id_ctx_var: ContextVar[str] = ContextVar("span_id", default="")
sovereign/types.py CHANGED
@@ -1,7 +1,8 @@
1
1
  import hashlib
2
2
  import importlib
3
- from functools import cached_property
4
3
  from types import ModuleType
4
+ from typing_extensions import Any, cast
5
+ from functools import cached_property
5
6
 
6
7
  import jmespath
7
8
  from jinja2 import Template
@@ -11,11 +12,11 @@ from pydantic import (
11
12
  Field,
12
13
  computed_field,
13
14
  )
14
- from typing_extensions import Any, cast
15
15
 
16
16
  from sovereign.dynamic_config import Loadable
17
17
  from sovereign.utils.version_info import compute_hash
18
18
 
19
+
19
20
  missing_arguments = {"missing", "positional", "arguments:"}
20
21
 
21
22
 
@@ -69,7 +70,6 @@ class Node(BaseModel):
69
70
  description="The ``--service-cluster`` configured by the Envoy client",
70
71
  )
71
72
  metadata: dict[str, Any] = Field(default_factory=dict, title="Key:value metadata")
72
- # noinspection PyArgumentList
73
73
  locality: Locality = Field(Locality(), title="Locality")
74
74
  build_version: str | None = Field(
75
75
  None, # Optional in the v3 Envoy API
@@ -135,18 +135,14 @@ class XdsTemplate(BaseModel):
135
135
  return {"resources": list(template_fn(*args, **kwargs))}
136
136
  except TypeError as e:
137
137
  if not set(str(e).split()).issuperset(missing_arguments):
138
- raise ValueError(
139
- f"Tried to render template '{self.resource_type}'. "
140
- f"Error calling function: {str(e)}"
141
- )
138
+ raise e
142
139
  message_start = str(e).find(":")
143
140
  missing_args = str(e)[message_start + 2 :]
144
141
  supplied_args = list(kwargs.keys())
145
142
  raise TypeError(
146
- f"Tried to render template '{self.resource_type}' using partial arguments. "
143
+ f"Tried to render a template using partial arguments. "
147
144
  f"Missing args: {missing_args}. Supplied args: {args} "
148
- f"Supplied keyword args: {supplied_args}. "
149
- f"Add to `depends_on` to ensure required context is provided."
145
+ f"Supplied keyword args: {supplied_args}"
150
146
  )
151
147
  else:
152
148
  return self.code.render(*args, **kwargs)
@@ -238,7 +234,6 @@ class DiscoveryRequest(BaseModel):
238
234
  def resources(self) -> Resources:
239
235
  return Resources(self.resource_names)
240
236
 
241
- # noinspection PyShadowingBuiltins
242
237
  def cache_key(self, rules: list[str]) -> str:
243
238
  map = self.model_dump()
244
239
  hash = hashlib.sha256()
@@ -290,6 +285,10 @@ class DiscoveryResponse(BaseModel):
290
285
  resources: list[Any] = Field(..., title="The requested configuration resources")
291
286
 
292
287
 
288
+ class RegisterClientRequest(BaseModel):
289
+ request: DiscoveryRequest
290
+
291
+
293
292
  class ProcessedTemplate(BaseModel):
294
293
  resources: list[dict[str, Any]]
295
294
  metadata: list[str] = Field(default_factory=list, exclude=True)
@@ -298,7 +297,3 @@ class ProcessedTemplate(BaseModel):
298
297
  @cached_property
299
298
  def version_info(self) -> str:
300
299
  return compute_hash(self.resources)
301
-
302
-
303
- class RegisterClientRequest(BaseModel):
304
- request: DiscoveryRequest
sovereign/utils/auth.py CHANGED
@@ -1,10 +1,9 @@
1
1
  from cryptography.fernet import InvalidToken
2
2
  from fastapi.exceptions import HTTPException
3
3
 
4
- from sovereign import application_logger as log
5
- from sovereign import server_cipher_container, stats
6
- from sovereign.configuration import config
4
+ from sovereign import server_cipher_container, stats, application_logger as log
7
5
  from sovereign.types import DiscoveryRequest
6
+ from sovereign.configuration import config
8
7
 
9
8
  AUTH_ENABLED = config.authentication.enabled
10
9
 
@@ -10,10 +10,10 @@ class DisabledCipher(CipherSuite):
10
10
  def __str__(self) -> str:
11
11
  return "disabled"
12
12
 
13
- def encrypt(self, data: str) -> str:
13
+ def encrypt(self, _: str) -> str:
14
14
  return "Unavailable (No Secret Key)"
15
15
 
16
- def decrypt(self, data: str) -> str:
16
+ def decrypt(self, _: str) -> str:
17
17
  return "Unavailable (No Secret Key)"
18
18
 
19
19
  @property
@@ -2,8 +2,8 @@
2
2
  # pylint: disable=no-name-in-module,too-many-branches
3
3
  """Stolen from the saltstack library"""
4
4
 
5
- import copy
6
5
  from collections.abc import Mapping
6
+ import copy
7
7
 
8
8
 
9
9
  def update(dest, upd, recursive_update=True, merge_lists=False):