sovereign 0.19.3__py3-none-any.whl → 1.0.0b148__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of sovereign might be problematic. Click here for more details.

Files changed (80) hide show
  1. sovereign/__init__.py +13 -81
  2. sovereign/app.py +59 -48
  3. sovereign/cache/__init__.py +172 -0
  4. sovereign/cache/backends/__init__.py +110 -0
  5. sovereign/cache/backends/s3.py +143 -0
  6. sovereign/cache/filesystem.py +73 -0
  7. sovereign/cache/types.py +15 -0
  8. sovereign/configuration.py +573 -0
  9. sovereign/constants.py +1 -0
  10. sovereign/context.py +271 -104
  11. sovereign/dynamic_config/__init__.py +113 -0
  12. sovereign/dynamic_config/deser.py +78 -0
  13. sovereign/dynamic_config/loaders.py +120 -0
  14. sovereign/events.py +49 -0
  15. sovereign/logging/access_logger.py +85 -0
  16. sovereign/logging/application_logger.py +54 -0
  17. sovereign/logging/base_logger.py +41 -0
  18. sovereign/logging/bootstrapper.py +36 -0
  19. sovereign/logging/types.py +10 -0
  20. sovereign/middlewares.py +8 -7
  21. sovereign/modifiers/lib.py +1 -0
  22. sovereign/rendering.py +192 -0
  23. sovereign/response_class.py +18 -0
  24. sovereign/server.py +93 -35
  25. sovereign/sources/file.py +1 -1
  26. sovereign/sources/inline.py +1 -0
  27. sovereign/sources/lib.py +1 -0
  28. sovereign/sources/poller.py +296 -53
  29. sovereign/statistics.py +17 -20
  30. sovereign/templates/base.html +59 -46
  31. sovereign/templates/resources.html +203 -102
  32. sovereign/testing/loaders.py +8 -0
  33. sovereign/{modifiers/test.py → testing/modifiers.py} +0 -2
  34. sovereign/tracing.py +102 -0
  35. sovereign/types.py +299 -0
  36. sovereign/utils/auth.py +26 -13
  37. sovereign/utils/crypto/__init__.py +0 -0
  38. sovereign/utils/crypto/crypto.py +135 -0
  39. sovereign/utils/crypto/suites/__init__.py +21 -0
  40. sovereign/utils/crypto/suites/aes_gcm_cipher.py +42 -0
  41. sovereign/utils/crypto/suites/base_cipher.py +21 -0
  42. sovereign/utils/crypto/suites/disabled_cipher.py +25 -0
  43. sovereign/utils/crypto/suites/fernet_cipher.py +29 -0
  44. sovereign/utils/dictupdate.py +2 -1
  45. sovereign/utils/eds.py +37 -21
  46. sovereign/utils/mock.py +54 -16
  47. sovereign/utils/resources.py +17 -0
  48. sovereign/utils/version_info.py +8 -0
  49. sovereign/views/__init__.py +4 -0
  50. sovereign/views/api.py +61 -0
  51. sovereign/views/crypto.py +46 -15
  52. sovereign/views/discovery.py +37 -116
  53. sovereign/views/healthchecks.py +87 -18
  54. sovereign/views/interface.py +112 -112
  55. sovereign/worker.py +204 -0
  56. {sovereign-0.19.3.dist-info → sovereign-1.0.0b148.dist-info}/METADATA +79 -76
  57. sovereign-1.0.0b148.dist-info/RECORD +77 -0
  58. {sovereign-0.19.3.dist-info → sovereign-1.0.0b148.dist-info}/WHEEL +1 -1
  59. sovereign-1.0.0b148.dist-info/entry_points.txt +38 -0
  60. sovereign_files/__init__.py +0 -0
  61. sovereign_files/static/darkmode.js +51 -0
  62. sovereign_files/static/node_expression.js +42 -0
  63. sovereign_files/static/panel.js +76 -0
  64. sovereign_files/static/resources.css +246 -0
  65. sovereign_files/static/resources.js +642 -0
  66. sovereign_files/static/sass/style.scss +33 -0
  67. sovereign_files/static/style.css +16143 -0
  68. sovereign_files/static/style.css.map +1 -0
  69. sovereign/config_loader.py +0 -225
  70. sovereign/discovery.py +0 -175
  71. sovereign/logs.py +0 -131
  72. sovereign/schemas.py +0 -780
  73. sovereign/static/sass/style.scss +0 -27
  74. sovereign/static/style.css +0 -13553
  75. sovereign/templates/ul_filter.html +0 -22
  76. sovereign/utils/crypto.py +0 -103
  77. sovereign/views/admin.py +0 -120
  78. sovereign-0.19.3.dist-info/LICENSE.txt +0 -13
  79. sovereign-0.19.3.dist-info/RECORD +0 -47
  80. sovereign-0.19.3.dist-info/entry_points.txt +0 -10
@@ -0,0 +1,120 @@
1
+ import os
2
+ import importlib
3
+ from typing import Any, Protocol
4
+ from pathlib import Path
5
+ from importlib.machinery import SourceFileLoader
6
+
7
+ import requests
8
+
9
+ from sovereign.utils.resources import get_package_file_bytes
10
+
11
+ try:
12
+ import boto3
13
+
14
+ BOTO_IS_AVAILABLE = True
15
+ except ImportError:
16
+ BOTO_IS_AVAILABLE = False
17
+
18
+
19
+ class CustomLoader(Protocol):
20
+ """
21
+ Custom loaders can be added to sovereign by creating a subclass
22
+ and then in config:
23
+
24
+ template_context:
25
+ context:
26
+ ...:
27
+ protocol: <loader name>
28
+ serialization: ...
29
+ path: <path argument>
30
+ """
31
+
32
+ default_deser: str = "yaml"
33
+
34
+ def load(self, path: str) -> Any: ...
35
+
36
+
37
+ class File(CustomLoader):
38
+ default_deser = "passthrough"
39
+
40
+ def load(self, path: str) -> Any:
41
+ with open(path) as f:
42
+ contents = f.read()
43
+ try:
44
+ return contents
45
+ except FileNotFoundError:
46
+ raise FileNotFoundError(f"Unable to load {path}")
47
+
48
+
49
+ class PackageData(CustomLoader):
50
+ default_deser = "string"
51
+
52
+ def load(self, path: str) -> Any:
53
+ pkg, pkg_file = path.split(":")
54
+ data = get_package_file_bytes(pkg, pkg_file)
55
+ return data
56
+
57
+
58
+ class Web(CustomLoader):
59
+ default_deser = "json"
60
+
61
+ def load(self, path: str) -> Any:
62
+ response = requests.get(path)
63
+ response.raise_for_status()
64
+ data = response.text
65
+ return data
66
+
67
+
68
+ class EnvironmentVariable(CustomLoader):
69
+ default_deser = "raw"
70
+
71
+ def load(self, path: str) -> Any:
72
+ data = os.getenv(path)
73
+ if data is None:
74
+ raise AttributeError(f"Unable to read environment variable {path}")
75
+ return data
76
+
77
+
78
+ class PythonModule(CustomLoader):
79
+ default_deser = "passthrough"
80
+
81
+ def load(self, path: str) -> Any:
82
+ if ":" in path:
83
+ mod, fn = path.rsplit(":", maxsplit=1)
84
+ else:
85
+ mod, fn = path, ""
86
+ imported = importlib.import_module(mod)
87
+ if fn != "":
88
+ return getattr(imported, fn)
89
+ return imported
90
+
91
+
92
+ class S3Bucket(CustomLoader):
93
+ default_deser = "raw"
94
+
95
+ def load(self, path: str) -> Any:
96
+ if not BOTO_IS_AVAILABLE:
97
+ raise ImportError(
98
+ "boto3 must be installed to load S3 paths. Use ``pip install sovereign[boto]``"
99
+ )
100
+ bucket, key = path.split("/", maxsplit=1)
101
+ s3 = boto3.client("s3")
102
+ response = s3.get_object(Bucket=bucket, Key=key)
103
+ data = "".join([chunk.decode() for chunk in response["Body"]])
104
+ return data
105
+
106
+
107
+ class PythonInlineCode(CustomLoader):
108
+ default_deser = "passthrough"
109
+
110
+ def load(self, path: str) -> Any:
111
+ p = str(Path(path).absolute())
112
+ loader = SourceFileLoader(p, path=p)
113
+ return loader.load_module(p)
114
+
115
+
116
+ class Inline(CustomLoader):
117
+ default_deser = "string"
118
+
119
+ def load(self, path: str) -> Any:
120
+ return path
sovereign/events.py ADDED
@@ -0,0 +1,49 @@
1
+ import pydantic
2
+ from enum import IntEnum
3
+ from asyncio import Queue, gather
4
+ from collections import defaultdict
5
+ from typing import final, Sequence
6
+
7
+
8
+ Primitives = str | int | float | bool | Sequence[str]
9
+
10
+
11
+ class Topic(IntEnum):
12
+ CONTEXT = 1
13
+
14
+
15
+ class Event(pydantic.BaseModel):
16
+ message: str
17
+ metadata: dict[str, Primitives] = pydantic.Field(default_factory=dict)
18
+
19
+
20
+ @final
21
+ class EventBus:
22
+ def __init__(self, maxsize: int = 0):
23
+ self._topics: dict[Topic, list[Queue[Event]]] = defaultdict(list)
24
+ self._maxsize = maxsize
25
+
26
+ def subscribe(self, topic: Topic) -> Queue[Event]:
27
+ q: Queue[Event] = Queue(self._maxsize)
28
+ self._topics[topic].append(q)
29
+ return q
30
+
31
+ def unsubscribe(self, topic: Topic, q: Queue[Event]) -> None:
32
+ qs = self._topics.get(topic)
33
+ if not qs:
34
+ return
35
+ try:
36
+ qs.remove(q)
37
+ except ValueError:
38
+ pass
39
+ if not qs:
40
+ _ = self._topics.pop(topic, None)
41
+
42
+ async def publish(self, topic: Topic, msg: Event) -> None:
43
+ qs = self._topics.get(topic, [])
44
+ if not qs:
45
+ return
46
+ _ = await gather(*(q.put(msg) for q in qs))
47
+
48
+
49
+ bus = EventBus()
@@ -0,0 +1,85 @@
1
+ from copy import deepcopy
2
+ from functools import cached_property
3
+ from typing import Any, Dict
4
+
5
+ import structlog
6
+ from starlette_context import context
7
+ from structlog.stdlib import BoundLogger
8
+
9
+ from sovereign.logging.base_logger import BaseLogger
10
+ from sovereign.logging.types import EventDict, LoggingType, ProcessedMessage
11
+ from sovereign.configuration import SovereignConfigv2
12
+
13
+
14
+ class AccessLogger(BaseLogger):
15
+ def __init__(self, root_logger: BoundLogger, config: SovereignConfigv2):
16
+ self._access_logs_enabled = config.logging.access_logs.enabled
17
+ self._ignore_empty = config.logging.access_logs.ignore_empty_fields
18
+ self._user_log_fmt = config.logging.access_logs.log_fmt
19
+
20
+ self.logger: BoundLogger = structlog.wrap_logger(
21
+ root_logger,
22
+ wrapper_class=structlog.BoundLogger,
23
+ processors=[
24
+ self.is_enabled_processor,
25
+ self.merge_starlette_contextvars,
26
+ self.format_access_log_fields,
27
+ ],
28
+ type=LoggingType.ACCESS,
29
+ )
30
+
31
+ @cached_property
32
+ def is_enabled(self) -> bool:
33
+ return self._access_logs_enabled
34
+
35
+ @cached_property
36
+ def _default_log_fmt(self) -> Dict[str, str]:
37
+ return {
38
+ "type": "{type}",
39
+ "event": "{event}",
40
+ "env": "{ENVIRONMENT}",
41
+ "site": "{HOST}",
42
+ "method": "{METHOD}",
43
+ "uri_path": "{PATH}",
44
+ "uri_query": "{QUERY}",
45
+ "src_ip": "{SOURCE_IP}",
46
+ "src_port": "{SOURCE_PORT}",
47
+ "pid": "{PID}",
48
+ "user_agent": "{USER_AGENT}",
49
+ "bytes_in": "{BYTES_RX}",
50
+ "bytes_out": "{BYTES_TX}",
51
+ "status": "{STATUS_CODE}",
52
+ "duration": "{DURATION}",
53
+ "request_id": "{REQUEST_ID}",
54
+ "resource_version": "{XDS_CLIENT_VERSION} -> {XDS_SERVER_VERSION}",
55
+ "resource_names": "{XDS_RESOURCES}",
56
+ "envoy_ver": "{XDS_ENVOY_VERSION}",
57
+ "traceback": "{TRACEBACK}",
58
+ "error": "{ERROR}",
59
+ "detail": "{ERROR_DETAIL}",
60
+ }
61
+
62
+ def format_access_log_fields(
63
+ self, logger: BoundLogger, method_name: str, event_dict: EventDict
64
+ ) -> ProcessedMessage:
65
+ formatted_dict: Dict[str, Any] = dict()
66
+ for k, v in self.get_configured_log_format.items():
67
+ try:
68
+ value: str = v.format(**event_dict)
69
+ except KeyError:
70
+ value = "-"
71
+ if value in (None, "-") and self._ignore_empty:
72
+ continue
73
+ formatted_dict[k] = value
74
+ return formatted_dict
75
+
76
+ def merge_starlette_contextvars(
77
+ self, _, __, event_dict: EventDict
78
+ ) -> ProcessedMessage:
79
+ merged_context = deepcopy(event_dict)
80
+ for k, v in context.data.items():
81
+ merged_context[k] = v
82
+ return merged_context
83
+
84
+ def queue_log_fields(self, **kwargs: Any) -> None:
85
+ context.update(kwargs)
@@ -0,0 +1,54 @@
1
+ from functools import cached_property
2
+ from typing import Any, Dict
3
+
4
+ import structlog
5
+ from structlog.stdlib import BoundLogger
6
+
7
+ from sovereign.logging.base_logger import BaseLogger
8
+ from sovereign.logging.types import EventDict, LoggingType, ProcessedMessage
9
+ from sovereign.configuration import SovereignConfigv2
10
+
11
+
12
+ class ApplicationLogger(BaseLogger):
13
+ def __init__(self, root_logger: BoundLogger, config: SovereignConfigv2):
14
+ self._application_logs_enabled = config.logging.application_logs.enabled
15
+ self._user_log_fmt = config.logging.application_logs.log_fmt
16
+
17
+ self.logger: BoundLogger = structlog.wrap_logger(
18
+ root_logger,
19
+ wrapper_class=structlog.BoundLogger,
20
+ processors=[
21
+ self.is_enabled_processor,
22
+ self.format_application_log_fields,
23
+ ],
24
+ type=LoggingType.APPLICATION,
25
+ )
26
+
27
+ @cached_property
28
+ def is_enabled(self) -> bool:
29
+ return self._application_logs_enabled
30
+
31
+ @cached_property
32
+ def _default_log_fmt(self) -> Dict[str, str]:
33
+ return {
34
+ "type": "{type}",
35
+ "event": "{event}",
36
+ "error": "{error}",
37
+ "traceback": "{traceback}",
38
+ "last_update": "{last_update}",
39
+ "instance_count": "{instance_count}",
40
+ }
41
+
42
+ def format_application_log_fields(
43
+ self, logger: BoundLogger, method_name: str, event_dict: EventDict
44
+ ) -> ProcessedMessage:
45
+ formatted_dict: Dict[str, Any] = {
46
+ "level": method_name,
47
+ }
48
+ for k, v in self.get_configured_log_format.items():
49
+ try:
50
+ value: str = v.format(**event_dict)
51
+ except KeyError:
52
+ continue
53
+ formatted_dict[k] = value
54
+ return formatted_dict
@@ -0,0 +1,41 @@
1
+ import json
2
+ from abc import ABC, abstractmethod
3
+ from functools import cached_property
4
+ from typing import Dict, Optional
5
+
6
+ from structlog.exceptions import DropEvent
7
+ from structlog.stdlib import BoundLogger
8
+
9
+ from sovereign.logging.types import EventDict, ProcessedMessage
10
+
11
+
12
+ class BaseLogger(ABC):
13
+ _user_log_fmt: Optional[str]
14
+
15
+ @property
16
+ @abstractmethod
17
+ def is_enabled(self) -> bool: ...
18
+
19
+ @property
20
+ @abstractmethod
21
+ def _default_log_fmt(self) -> Dict[str, str]: ...
22
+
23
+ def is_enabled_processor(
24
+ self, logger: BoundLogger, method_name: str, event_dict: EventDict
25
+ ) -> ProcessedMessage:
26
+ if not self.is_enabled:
27
+ raise DropEvent
28
+ return event_dict
29
+
30
+ @cached_property
31
+ def get_configured_log_format(self) -> Dict[str, str]:
32
+ if isinstance(self._user_log_fmt, str) and self._user_log_fmt != "":
33
+ format = json.loads(self._user_log_fmt)
34
+ if not isinstance(format, dict):
35
+ raise RuntimeError(
36
+ f"Failed to parse log format as JSON: {self._user_log_fmt}"
37
+ )
38
+ if "event" not in format:
39
+ format["event"] = "{event}"
40
+ return format
41
+ return self._default_log_fmt
@@ -0,0 +1,36 @@
1
+ import structlog
2
+ from structlog.exceptions import DropEvent
3
+ from structlog.stdlib import BoundLogger
4
+
5
+ from sovereign.logging.access_logger import AccessLogger
6
+ from sovereign.logging.application_logger import ApplicationLogger
7
+ from sovereign.logging.types import EventDict, ProcessedMessage
8
+ from sovereign.configuration import SovereignConfigv2
9
+
10
+
11
+ class LoggerBootstrapper:
12
+ def __init__(self, config: SovereignConfigv2) -> None:
13
+ self.show_debug: bool = config.debug
14
+
15
+ structlog.configure(
16
+ processors=[
17
+ self.debug_logs_processor,
18
+ structlog.processors.JSONRenderer(),
19
+ ]
20
+ )
21
+ root_logger: BoundLogger = structlog.get_logger()
22
+ self.logger = root_logger
23
+
24
+ self.access_logger = AccessLogger(root_logger=root_logger, config=config)
25
+ self.application_logger = ApplicationLogger(
26
+ root_logger=root_logger, config=config
27
+ )
28
+
29
+ def debug_logs_processor(
30
+ self, logger: BoundLogger, method_name: str, event_dict: EventDict
31
+ ) -> ProcessedMessage:
32
+ if not self.show_debug and event_dict.get("level", "").lower() == "debug":
33
+ raise DropEvent
34
+ if method_name == "debug" and self.show_debug is False:
35
+ raise DropEvent
36
+ return event_dict
@@ -0,0 +1,10 @@
1
+ from enum import StrEnum
2
+ from typing import Any, Mapping, MutableMapping, Tuple, Union
3
+
4
+ EventDict = MutableMapping[str, Any]
5
+ ProcessedMessage = Union[Mapping[str, Any], str, bytes, Tuple[Any, ...]]
6
+
7
+
8
+ class LoggingType(StrEnum):
9
+ ACCESS = "access"
10
+ APPLICATION = "application"
sovereign/middlewares.py CHANGED
@@ -1,10 +1,12 @@
1
1
  import os
2
2
  import time
3
3
  from uuid import uuid4
4
+
4
5
  from fastapi.requests import Request
5
6
  from fastapi.responses import Response
6
7
  from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoint
7
- from sovereign import config, logs, get_request_id, _request_id_ctx_var, stats
8
+
9
+ from sovereign import _request_id_ctx_var, config, get_request_id, logs, stats
8
10
 
9
11
 
10
12
  class RequestContextLogMiddleware(BaseHTTPMiddleware):
@@ -17,8 +19,8 @@ class RequestContextLogMiddleware(BaseHTTPMiddleware):
17
19
  response = await call_next(request)
18
20
  finally:
19
21
  req_id = get_request_id()
20
- response.headers["X-Request-ID"] = req_id
21
- logs.queue_log_fields(REQUEST_ID=req_id)
22
+ req_id = response.headers.setdefault("X-Request-Id", get_request_id())
23
+ logs.access_logger.queue_log_fields(REQUEST_ID=req_id)
22
24
  _request_id_ctx_var.reset(token)
23
25
  return response
24
26
 
@@ -36,8 +38,7 @@ class LoggingMiddleware(BaseHTTPMiddleware):
36
38
  source_port = addr.port
37
39
  if xff := request.headers.get("X-Forwarded-For"):
38
40
  source_ip = xff.split(",")[0] # leftmost address
39
- logs.clear_log_fields()
40
- logs.queue_log_fields(
41
+ logs.access_logger.queue_log_fields(
41
42
  ENVIRONMENT=config.legacy_fields.environment,
42
43
  HOST=request.headers.get("host", "-"),
43
44
  METHOD=request.method,
@@ -53,7 +54,7 @@ class LoggingMiddleware(BaseHTTPMiddleware):
53
54
  response = await call_next(request)
54
55
  finally:
55
56
  duration = time.time() - start_time
56
- logs.queue_log_fields(
57
+ logs.access_logger.queue_log_fields(
57
58
  BYTES_TX=response.headers.get("content-length", "-"),
58
59
  STATUS_CODE=response.status_code,
59
60
  DURATION=duration,
@@ -72,5 +73,5 @@ class LoggingMiddleware(BaseHTTPMiddleware):
72
73
  ]
73
74
  stats.increment("discovery.rq_total", tags=tags)
74
75
  stats.timing("discovery.rq_ms", value=duration * 1000, tags=tags)
75
- logs.logger.msg("request")
76
+ logs.access_logger.logger.info("request")
76
77
  return response
@@ -6,6 +6,7 @@ used via configuration.
6
6
 
7
7
  `todo entry point install guide`
8
8
  """
9
+
9
10
  import abc
10
11
  from typing import List, Any, Dict
11
12
 
sovereign/rendering.py ADDED
@@ -0,0 +1,192 @@
1
+ """
2
+ Discovery
3
+ ---------
4
+
5
+ Functions used to render and return discovery responses to Envoy proxies.
6
+
7
+ The templates are configurable. `todo See ref:Configuration#Templates`
8
+ """
9
+
10
+ import traceback
11
+ import importlib
12
+ from concurrent.futures import ThreadPoolExecutor
13
+ from multiprocessing import Process, Pipe, cpu_count
14
+ from multiprocessing.connection import Connection
15
+ from typing import Any
16
+
17
+ import yaml
18
+ import pydantic
19
+ from starlette.exceptions import HTTPException
20
+ from yaml.parser import ParserError, ScannerError # type: ignore
21
+
22
+ from sovereign import logs, cache, stats, application_logger as log
23
+ from sovereign.cache.types import Entry
24
+ from sovereign.configuration import config
25
+ from sovereign.types import DiscoveryRequest, ProcessedTemplate
26
+
27
+
28
+ writer = cache.CacheWriter()
29
+ # limit render jobs to number of cores
30
+ POOL = ThreadPoolExecutor(max_workers=cpu_count())
31
+
32
+ type_urls = {
33
+ "v2": {
34
+ "listeners": "type.googleapis.com/envoy.api.v2.Listener",
35
+ "clusters": "type.googleapis.com/envoy.api.v2.Cluster",
36
+ "endpoints": "type.googleapis.com/envoy.api.v2.ClusterLoadAssignment",
37
+ "secrets": "type.googleapis.com/envoy.api.v2.auth.Secret",
38
+ "routes": "type.googleapis.com/envoy.api.v2.RouteConfiguration",
39
+ "scoped-routes": "type.googleapis.com/envoy.api.v2.ScopedRouteConfiguration",
40
+ },
41
+ "v3": {
42
+ "listeners": "type.googleapis.com/envoy.config.listener.v3.Listener",
43
+ "clusters": "type.googleapis.com/envoy.config.cluster.v3.Cluster",
44
+ "endpoints": "type.googleapis.com/envoy.config.endpoint.v3.ClusterLoadAssignment",
45
+ "secrets": "type.googleapis.com/envoy.extensions.transport_sockets.tls.v3.Secret",
46
+ "routes": "type.googleapis.com/envoy.config.route.v3.RouteConfiguration",
47
+ "scoped-routes": "type.googleapis.com/envoy.config.route.v3.ScopedRouteConfiguration",
48
+ "runtime": "type.googleapis.com/envoy.service.runtime.v3.Runtime",
49
+ },
50
+ }
51
+
52
+
53
+ class RenderJob(pydantic.BaseModel):
54
+ id: str
55
+ request: DiscoveryRequest
56
+ context: dict[str, Any]
57
+
58
+ def submit(self):
59
+ return POOL.submit(self._run)
60
+
61
+ def _run(self):
62
+ rx, tx = Pipe()
63
+ proc = Process(target=generate, args=[self, tx])
64
+ proc.start()
65
+ log.info(
66
+ (
67
+ f"Spawning process for id={self.id} "
68
+ f"max_workers={POOL._max_workers} "
69
+ f"threads={len(POOL._threads)} "
70
+ f"shutdown={POOL._shutdown} "
71
+ f"queue_size={POOL._work_queue.qsize()}"
72
+ )
73
+ )
74
+ proc.join(timeout=60) # TODO: render timeout configurable
75
+ if proc.is_alive():
76
+ log.warning(f"Render job for {self.id} has been running longer than 60s")
77
+ while rx.poll(timeout=10):
78
+ level, message = rx.recv()
79
+ logger = getattr(log, level)
80
+ logger(message)
81
+
82
+
83
+ def generate(job: RenderJob, tx: Connection) -> None:
84
+ request = job.request
85
+ tags = [f"type:{request.resource_type}"]
86
+ try:
87
+ with stats.timed("template.render_ms", tags=tags):
88
+ content = request.template.generate(
89
+ discovery_request=request,
90
+ host_header=request.desired_controlplane,
91
+ resource_names=request.resources,
92
+ **job.context,
93
+ )
94
+ if not request.template.is_python_source:
95
+ assert isinstance(content, str)
96
+ content = deserialize_config(content)
97
+ assert isinstance(content, dict)
98
+ resources = filter_resources(content["resources"], request.resources)
99
+ add_type_urls(request.api_version, request.resource_type, resources)
100
+ response = ProcessedTemplate(resources=resources)
101
+ tx.send(("info", f"Completed rendering of {request} for {job.id}"))
102
+ cached, cache_result = writer.set(
103
+ job.id,
104
+ Entry(
105
+ text=response.model_dump_json(indent=None),
106
+ len=len(response.resources),
107
+ version=response.version_info,
108
+ node=request.node,
109
+ ),
110
+ )
111
+ tx.send(cache_result)
112
+ if cached:
113
+ tags.append("result:ok")
114
+ else:
115
+ tags.append("result:cache_failed")
116
+ except Exception as e:
117
+ tx.send(
118
+ (
119
+ "error",
120
+ f"Failed to render job for {job.id}: " + str(traceback.format_exc()),
121
+ )
122
+ )
123
+ tags.append("result:err")
124
+ tags.append(f"error:{e.__class__.__name__.lower()}")
125
+ if config.sentry_dsn.get_secret_value():
126
+ mod = importlib.import_module("sentry_sdk")
127
+ mod.capture_exception(e)
128
+ finally:
129
+ stats.increment("template.render", tags=tags)
130
+ tx.close()
131
+
132
+
133
+ def deserialize_config(content: str) -> dict[str, Any]:
134
+ try:
135
+ envoy_configuration = yaml.safe_load(content)
136
+ except (ParserError, ScannerError) as e:
137
+ logs.access_logger.queue_log_fields(
138
+ error=repr(e),
139
+ YAML_CONTEXT=e.context,
140
+ YAML_CONTEXT_MARK=e.context_mark,
141
+ YAML_NOTE=e.note,
142
+ YAML_PROBLEM=e.problem,
143
+ YAML_PROBLEM_MARK=e.problem_mark,
144
+ )
145
+
146
+ if config.sentry_dsn:
147
+ mod = importlib.import_module("sentry_sdk")
148
+ mod.capture_exception(e)
149
+
150
+ raise HTTPException(
151
+ status_code=500,
152
+ detail=(
153
+ "Failed to load configuration, there may be "
154
+ "a syntax error in the configured templates. "
155
+ "Please check Sentry if you have configured Sentry DSN"
156
+ ),
157
+ )
158
+ if not isinstance(envoy_configuration, dict):
159
+ raise RuntimeError(
160
+ f"Deserialized configuration is of unexpected format: {envoy_configuration}"
161
+ )
162
+ return envoy_configuration
163
+
164
+
165
+ def filter_resources(
166
+ generated: list[dict[str, Any]], requested: list[str]
167
+ ) -> list[dict[str, Any]]:
168
+ """
169
+ If Envoy specifically requested a resource, this removes everything
170
+ that does not match the name of the resource.
171
+ If Envoy did not specifically request anything, every resource is retained.
172
+ """
173
+ if len(requested) == 0:
174
+ return generated
175
+ return [resource for resource in generated if resource_name(resource) in requested]
176
+
177
+
178
+ def resource_name(resource: dict[str, Any]) -> str:
179
+ name = resource.get("name") or resource.get("cluster_name")
180
+ if isinstance(name, str):
181
+ return name
182
+ raise KeyError(
183
+ f"Failed to determine the name or cluster_name of the following resource: {resource}"
184
+ )
185
+
186
+
187
+ def add_type_urls(api_version, resource_type, resources):
188
+ type_url = type_urls.get(api_version, {}).get(resource_type)
189
+ if type_url is not None:
190
+ for resource in resources:
191
+ if not resource.get("@type"):
192
+ resource["@type"] = type_url
@@ -0,0 +1,18 @@
1
+ from typing import Type
2
+ from importlib.util import find_spec
3
+ from fastapi.responses import JSONResponse
4
+
5
+
6
+ json_response_class: Type[JSONResponse] = JSONResponse
7
+ if find_spec("orjson"):
8
+ from fastapi.responses import ORJSONResponse
9
+
10
+ json_response_class = ORJSONResponse
11
+
12
+ elif find_spec("ujson"):
13
+ from fastapi.responses import UJSONResponse
14
+
15
+ json_response_class = UJSONResponse
16
+
17
+
18
+ __all__ = ["json_response_class"]