sovereign 0.19.3__py3-none-any.whl → 1.0.0b148__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of sovereign might be problematic. Click here for more details.

Files changed (80) hide show
  1. sovereign/__init__.py +13 -81
  2. sovereign/app.py +59 -48
  3. sovereign/cache/__init__.py +172 -0
  4. sovereign/cache/backends/__init__.py +110 -0
  5. sovereign/cache/backends/s3.py +143 -0
  6. sovereign/cache/filesystem.py +73 -0
  7. sovereign/cache/types.py +15 -0
  8. sovereign/configuration.py +573 -0
  9. sovereign/constants.py +1 -0
  10. sovereign/context.py +271 -104
  11. sovereign/dynamic_config/__init__.py +113 -0
  12. sovereign/dynamic_config/deser.py +78 -0
  13. sovereign/dynamic_config/loaders.py +120 -0
  14. sovereign/events.py +49 -0
  15. sovereign/logging/access_logger.py +85 -0
  16. sovereign/logging/application_logger.py +54 -0
  17. sovereign/logging/base_logger.py +41 -0
  18. sovereign/logging/bootstrapper.py +36 -0
  19. sovereign/logging/types.py +10 -0
  20. sovereign/middlewares.py +8 -7
  21. sovereign/modifiers/lib.py +1 -0
  22. sovereign/rendering.py +192 -0
  23. sovereign/response_class.py +18 -0
  24. sovereign/server.py +93 -35
  25. sovereign/sources/file.py +1 -1
  26. sovereign/sources/inline.py +1 -0
  27. sovereign/sources/lib.py +1 -0
  28. sovereign/sources/poller.py +296 -53
  29. sovereign/statistics.py +17 -20
  30. sovereign/templates/base.html +59 -46
  31. sovereign/templates/resources.html +203 -102
  32. sovereign/testing/loaders.py +8 -0
  33. sovereign/{modifiers/test.py → testing/modifiers.py} +0 -2
  34. sovereign/tracing.py +102 -0
  35. sovereign/types.py +299 -0
  36. sovereign/utils/auth.py +26 -13
  37. sovereign/utils/crypto/__init__.py +0 -0
  38. sovereign/utils/crypto/crypto.py +135 -0
  39. sovereign/utils/crypto/suites/__init__.py +21 -0
  40. sovereign/utils/crypto/suites/aes_gcm_cipher.py +42 -0
  41. sovereign/utils/crypto/suites/base_cipher.py +21 -0
  42. sovereign/utils/crypto/suites/disabled_cipher.py +25 -0
  43. sovereign/utils/crypto/suites/fernet_cipher.py +29 -0
  44. sovereign/utils/dictupdate.py +2 -1
  45. sovereign/utils/eds.py +37 -21
  46. sovereign/utils/mock.py +54 -16
  47. sovereign/utils/resources.py +17 -0
  48. sovereign/utils/version_info.py +8 -0
  49. sovereign/views/__init__.py +4 -0
  50. sovereign/views/api.py +61 -0
  51. sovereign/views/crypto.py +46 -15
  52. sovereign/views/discovery.py +37 -116
  53. sovereign/views/healthchecks.py +87 -18
  54. sovereign/views/interface.py +112 -112
  55. sovereign/worker.py +204 -0
  56. {sovereign-0.19.3.dist-info → sovereign-1.0.0b148.dist-info}/METADATA +79 -76
  57. sovereign-1.0.0b148.dist-info/RECORD +77 -0
  58. {sovereign-0.19.3.dist-info → sovereign-1.0.0b148.dist-info}/WHEEL +1 -1
  59. sovereign-1.0.0b148.dist-info/entry_points.txt +38 -0
  60. sovereign_files/__init__.py +0 -0
  61. sovereign_files/static/darkmode.js +51 -0
  62. sovereign_files/static/node_expression.js +42 -0
  63. sovereign_files/static/panel.js +76 -0
  64. sovereign_files/static/resources.css +246 -0
  65. sovereign_files/static/resources.js +642 -0
  66. sovereign_files/static/sass/style.scss +33 -0
  67. sovereign_files/static/style.css +16143 -0
  68. sovereign_files/static/style.css.map +1 -0
  69. sovereign/config_loader.py +0 -225
  70. sovereign/discovery.py +0 -175
  71. sovereign/logs.py +0 -131
  72. sovereign/schemas.py +0 -780
  73. sovereign/static/sass/style.scss +0 -27
  74. sovereign/static/style.css +0 -13553
  75. sovereign/templates/ul_filter.html +0 -22
  76. sovereign/utils/crypto.py +0 -103
  77. sovereign/views/admin.py +0 -120
  78. sovereign-0.19.3.dist-info/LICENSE.txt +0 -13
  79. sovereign-0.19.3.dist-info/RECORD +0 -47
  80. sovereign-0.19.3.dist-info/entry_points.txt +0 -10
sovereign/context.py CHANGED
@@ -1,119 +1,286 @@
1
- from typing import Dict, Any, Generator, Iterable, NoReturn, Optional
2
- from copy import deepcopy
3
- from fastapi import HTTPException
4
- from sovereign.config_loader import Loadable
5
- from sovereign.schemas import DiscoveryRequest, XdsTemplate
6
- from sovereign.sources import SourcePoller
7
- from sovereign.utils.crypto import CipherSuite, CipherContainer
8
- from sovereign.utils.timer import poll_forever, poll_forever_cron
1
+ import time
2
+ import heapq
3
+ import zlib
4
+ import datetime
5
+ import asyncio
6
+ import inspect
7
+ from enum import Enum
8
+ from typing import Any, Callable, Optional, Union
9
+ from typing_extensions import final, override
9
10
 
11
+ import pydantic
12
+ from croniter import croniter
10
13
 
14
+ from sovereign import application_logger as log
15
+ from sovereign.types import DiscoveryRequest
16
+ from sovereign.configuration import config
17
+ from sovereign.statistics import configure_statsd
18
+ from sovereign.utils.timer import wait_until
19
+ from sovereign.dynamic_config import Loadable
20
+ from sovereign.events import bus, Event, Topic
21
+
22
+
23
+ stats = configure_statsd()
24
+ DEFAULT_RETRY_INTERVAL = config.template_context.refresh_retry_interval_secs
25
+ DEFAULT_NUM_RETRIES = config.template_context.refresh_num_retries
26
+
27
+
28
+ @final
29
+ class ScheduledTask:
30
+ def __init__(self, task: "ContextTask"):
31
+ self.task = task
32
+ self.due = time.monotonic()
33
+
34
+ def __lt__(self, other: "ScheduledTask") -> bool:
35
+ return self.due < other.due
36
+
37
+ async def run(
38
+ self, output: dict[str, "ContextResult"], tasks: list["ScheduledTask"]
39
+ ):
40
+ await self.task.refresh(output)
41
+ self.due = time.monotonic() + self.task.seconds_til_next_run
42
+ heapq.heappush(tasks, self)
43
+
44
+ @override
45
+ def __str__(self) -> str:
46
+ return f"ScheduledTask({self.task.name})"
47
+
48
+
49
+ @final
11
50
  class TemplateContext:
12
51
  def __init__(
13
52
  self,
14
- refresh_rate: Optional[int],
15
- refresh_cron: Optional[str],
16
- configured_context: Dict[str, Loadable],
17
- poller: SourcePoller,
18
- encryption_suite: CipherContainer,
19
- disabled_suite: CipherSuite,
20
- logger: Any,
21
- stats: Any,
53
+ middleware: list[Callable[[DiscoveryRequest, dict[str, Any]], None]]
54
+ | None = None,
22
55
  ) -> None:
23
- self.poller = poller
24
- self.refresh_rate = refresh_rate
25
- self.refresh_cron = refresh_cron
26
- self.configured_context = configured_context
27
- self.crypto = encryption_suite
28
- self.disabled_suite = disabled_suite
29
- self.logger = logger
30
- self.stats = stats
31
- # initial load
32
- self.context = self.load_context_variables()
33
-
34
- async def start_refresh_context(self) -> NoReturn:
35
- if self.refresh_cron is not None:
36
- await poll_forever_cron(self.refresh_cron, self.refresh_context)
37
- elif self.refresh_rate is not None:
38
- await poll_forever(self.refresh_rate, self.refresh_context)
39
-
40
- raise RuntimeError("Failed to start refresh_context, this should never happen")
41
-
42
- async def refresh_context(self) -> None:
43
- self.context = self.load_context_variables()
44
-
45
- def load_context_variables(self) -> Dict[str, Any]:
46
- ret = dict()
47
- for k, v in self.configured_context.items():
56
+ self.tasks: dict[str, ContextTask] = dict()
57
+ self.results: dict[str, ContextResult] = dict()
58
+ self.hashes: dict[str, int] = dict()
59
+ self.scheduled: list[ScheduledTask] = list()
60
+ self.running: set[str] = set()
61
+ self.middleware = middleware or list()
62
+
63
+ @classmethod
64
+ def from_config(cls) -> "TemplateContext":
65
+ ret = TemplateContext()
66
+ for name, spec in config.template_context.context.items():
67
+ ret.register_task_from_loadable(name, spec)
68
+ return ret
69
+
70
+ def register_task(self, task: "ContextTask") -> None:
71
+ self.tasks[task.name] = task
72
+ self.scheduled.append(ScheduledTask(task))
73
+
74
+ def register_task_from_loadable(self, name: str, loadable: Loadable) -> None:
75
+ self.register_task(ContextTask.from_loadable(name, loadable))
76
+
77
+ async def update_hash(self, task: "ContextTask"):
78
+ name = task.name
79
+ result = self.results.get(name)
80
+ old = self.hashes.get(name)
81
+ new = hash(result)
82
+
83
+ if old != new:
84
+ stats.increment("context.updated", tags=[f"context:{name}"])
85
+ self.hashes[name] = new
86
+ await task.notify()
87
+
88
+ def get_context(self, req: DiscoveryRequest) -> dict[str, Any]:
89
+ ret = {r.name: r.data for r in self.results.values()}
90
+ for fn in self.middleware:
91
+ fn(req, ret)
92
+ return ret
93
+
94
+ def get(self, key: str, default: Any = None) -> Any:
95
+ if result := self.results.get(key):
96
+ return result.data
97
+ return default
98
+
99
+ async def _run_task(self, task: "ContextTask"):
100
+ if task.name in self.running:
101
+ return
102
+ self.running.add(task.name)
103
+ try:
104
+ await task.refresh(self.results)
105
+ await self.update_hash(task)
106
+ finally:
107
+ self.running.remove(task.name)
108
+
109
+ async def run_once(self):
110
+ heapq.heapify(self.scheduled)
111
+ for next_ in self.scheduled:
112
+ await self._run_task(next_.task)
113
+
114
+ async def start(self):
115
+ if not self.scheduled:
116
+ # No context jobs configured
117
+ return
118
+ heapq.heapify(self.scheduled)
119
+ while True:
120
+ # Obtain next task
121
+ next_ = heapq.heappop(self.scheduled)
122
+ task = next_.task
123
+ # Wait for due date
124
+ delay = max(0, next_.due - time.monotonic())
125
+ await asyncio.sleep(delay)
126
+ # reschedule immediately (at next due date)
127
+ next_.due = time.monotonic() + task.seconds_til_next_run
128
+ heapq.heappush(self.scheduled, next_)
129
+ # fire and forget, task writes to mutable dict reference
130
+ # no data race because each task writes to its unique key
131
+ asyncio.create_task(self._run_task(task))
132
+
133
+
134
+ class ContextStatus(Enum):
135
+ READY = "ready"
136
+ PENDING = "pending"
137
+ FAILED = "failed"
138
+
139
+
140
+ class ContextResult(pydantic.BaseModel):
141
+ name: str
142
+ data: Any = None
143
+ state: ContextStatus = ContextStatus.PENDING
144
+
145
+ def __str__(self) -> str:
146
+ return f"ContextResult({self.name}, {self.state.value})"
147
+
148
+ def __hash__(self) -> int:
149
+ data: bytes = repr(self.data).encode()
150
+ return zlib.adler32(data) & 0xFFFFFFFF
151
+
152
+
153
+ class ContextTask(pydantic.BaseModel):
154
+ name: str
155
+ spec: Loadable
156
+ interval: "TaskInterval"
157
+ retry_policy: Optional["TaskRetryPolicy"] = None
158
+
159
+ async def notify(self):
160
+ await bus.publish(
161
+ Topic.CONTEXT,
162
+ Event(
163
+ message=f"Context {self.name} updated",
164
+ metadata={"name": self.name},
165
+ ),
166
+ )
167
+
168
+ async def refresh(self, output: dict[str, "ContextResult"]) -> None:
169
+ result = await self.try_load()
170
+ if result.state == ContextStatus.READY:
171
+ output[self.name] = result
172
+
173
+ async def try_load(self) -> "ContextResult":
174
+ attempts_remaining, retry_interval = TaskRetryPolicy.from_task(self)
175
+ data = None
176
+ state = ContextStatus.PENDING
177
+ while attempts_remaining > 0:
178
+ stats.increment("context.refresh.attempt", tags=[f"context:{self.name}"])
48
179
  try:
49
- if isinstance(v, Loadable):
50
- ret[k] = v.load()
51
- elif isinstance(v, str):
52
- ret[k] = Loadable.from_legacy_fmt(v).load()
53
- self.stats.increment(
54
- "context.refresh.success",
55
- tags=[f"context:{k}"],
180
+ load_fn = self.spec.load
181
+ if inspect.iscoroutinefunction(load_fn):
182
+ data = await load_fn()
183
+ else:
184
+ data = load_fn()
185
+ stats.increment(
186
+ "context.refresh.success", tags=[f"context:{self.name}"]
56
187
  )
57
- except Exception as e: # pylint: disable=broad-exception-caught
58
- self.logger(event=e)
59
- self.stats.increment(
60
- "context.refresh.error",
61
- tags=[f"context:{k}"],
188
+ state = ContextStatus.READY
189
+ break
190
+ except Exception as e:
191
+ log.error(
192
+ "Context failed to refresh",
193
+ error=[line for line in str(e).splitlines()],
62
194
  )
63
- if "crypto" not in ret:
64
- ret["crypto"] = self.crypto
65
- return ret
195
+ state = ContextStatus.FAILED
196
+ stats.increment("context.refresh.error", tags=[f"context:{self.name}"])
197
+ attempts_remaining -= 1
198
+ await asyncio.sleep(retry_interval)
199
+ return ContextResult(
200
+ name=self.name,
201
+ data=data,
202
+ state=state,
203
+ )
66
204
 
67
- def build_new_context_from_instances(self, node_value: str) -> Dict[str, Any]:
68
- matches = self.poller.match_node(node_value=node_value)
69
- ret = dict()
70
- for key, value in self.context.items():
71
- try:
72
- ret[key] = deepcopy(value)
73
- except TypeError:
74
- ret[key] = value
75
-
76
- to_add = dict()
77
- for scope, instances in matches.scopes.items():
78
- if scope in ("default", None):
79
- to_add["instances"] = instances
205
+ @property
206
+ def seconds_til_next_run(self) -> int:
207
+ match self.interval.value:
208
+ case CronInterval(cron=expression):
209
+ cron = croniter(expression)
210
+ next_date = cron.get_next(datetime.datetime)
211
+ return int(wait_until(next_date))
212
+ case SecondsInterval(seconds=seconds):
213
+ return seconds
214
+ case _:
215
+ return 1
216
+
217
+ @classmethod
218
+ def from_loadable(cls, name: str, loadable: Loadable) -> "ContextTask":
219
+ interval = loadable.interval
220
+ if interval is None:
221
+ cfg = config.template_context
222
+ if cfg.refresh_rate is not None:
223
+ interval = str(cfg.refresh_rate)
224
+ elif cfg.refresh_cron is not None:
225
+ interval = cfg.refresh_cron
80
226
  else:
81
- to_add[scope] = instances
82
- if to_add == {}:
83
- raise HTTPException(
84
- detail=(
85
- "This node does not match any instances! ",
86
- "If node matching is enabled, check that the node "
87
- "match key aligns with the source match key. "
88
- "If you don't know what any of this is, disable "
89
- "node matching via the config",
90
- ),
91
- status_code=400,
92
- )
93
- ret.update(to_add)
94
- return ret
227
+ interval = "60"
228
+ retry_policy = None
229
+ if policy := loadable.retry_policy:
230
+ retry_policy = TaskRetryPolicy(**policy)
95
231
 
96
- def get_context(
97
- self, request: DiscoveryRequest, template: XdsTemplate
98
- ) -> Dict[str, Any]:
99
- ret = self.build_new_context_from_instances(
100
- node_value=self.poller.extract_node_key(request.node),
232
+ return ContextTask(
233
+ name=name,
234
+ spec=loadable,
235
+ interval=TaskInterval.from_str(interval),
236
+ retry_policy=retry_policy,
101
237
  )
102
- if request.hide_private_keys:
103
- ret["crypto"] = self.disabled_suite
104
- if not template.is_python_source:
105
- keys_to_remove = self.unused_variables(list(ret), template.jinja_variables)
106
- for key in keys_to_remove:
107
- ret.pop(key, None)
108
- return ret
238
+
239
+ def __str__(self) -> str:
240
+ return f"ContextTask({self.name}, {self.spec})"
241
+
242
+ __repr__ = __str__
243
+
244
+
245
+ class TaskRetryPolicy(pydantic.BaseModel):
246
+ num_retries: int
247
+ interval: int
109
248
 
110
249
  @staticmethod
111
- def unused_variables(
112
- keys: Iterable[str], variables: Iterable[str]
113
- ) -> Generator[str, None, None]:
114
- for key in keys:
115
- if key not in variables:
116
- yield key
117
-
118
- def get(self, *args: Any, **kwargs: Any) -> Any:
119
- return self.context.get(*args, **kwargs)
250
+ def from_task(t: "ContextTask") -> tuple[int, int]:
251
+ interval = DEFAULT_RETRY_INTERVAL
252
+ attempts = 1
253
+ if policy := t.spec.retry_policy:
254
+ try:
255
+ retry_policy = TaskRetryPolicy(**policy)
256
+ interval = retry_policy.interval
257
+ attempts += retry_policy.num_retries
258
+ except Exception as e:
259
+ # TODO: warning
260
+ print(f"Failed to parse retry policy of task: {t}. Error: {e}")
261
+ else:
262
+ attempts += DEFAULT_NUM_RETRIES
263
+ return attempts, interval
264
+
265
+
266
+ class TaskInterval(pydantic.BaseModel):
267
+ value: "TaskIntervalKind"
268
+
269
+ @classmethod
270
+ def from_str(cls, s: str) -> "TaskInterval":
271
+ if s.isdigit():
272
+ return TaskInterval(value=SecondsInterval(seconds=int(s)))
273
+ if croniter.is_valid(s):
274
+ return TaskInterval(value=CronInterval(cron=s))
275
+ raise ValueError(f"Invalid interval string: {s}")
276
+
277
+
278
+ class CronInterval(pydantic.BaseModel):
279
+ cron: str
280
+
281
+
282
+ class SecondsInterval(pydantic.BaseModel):
283
+ seconds: int
284
+
285
+
286
+ TaskIntervalKind = Union[CronInterval, SecondsInterval]
@@ -0,0 +1,113 @@
1
+ import inspect
2
+ from typing import Any
3
+
4
+ from pydantic import BaseModel, Field, ConfigDict
5
+
6
+ from sovereign.utils.entry_point_loader import EntryPointLoader
7
+ from sovereign.dynamic_config.loaders import CustomLoader
8
+ from sovereign.dynamic_config.deser import ConfigDeserializer
9
+
10
+
11
+ LOADERS: dict[str, CustomLoader] = {}
12
+ DESERIALIZERS: dict[str, ConfigDeserializer] = {}
13
+
14
+
15
+ class Loadable(BaseModel):
16
+ path: str = Field(alias="target")
17
+ protocol: str = Field(alias="loader")
18
+ serialization: str | None = Field(None, alias="deserialize_with")
19
+ interval: str | None = None
20
+ retry_policy: dict[str, Any] | None = None
21
+
22
+ model_config = ConfigDict(populate_by_name=True)
23
+
24
+ def load(self, default: Any = None) -> Any:
25
+ global LOADERS
26
+ if not LOADERS:
27
+ init_loaders()
28
+
29
+ global DESERIALIZERS
30
+ if not DESERIALIZERS:
31
+ init_deserializers()
32
+
33
+ if self.protocol not in LOADERS:
34
+ raise KeyError(
35
+ f"Could not find CustomLoader {self.protocol}. Available: {LOADERS}"
36
+ )
37
+ loader = LOADERS[self.protocol]
38
+
39
+ ser = self.serialization
40
+ if ser is None:
41
+ ser = loader.default_deser
42
+ elif ser not in DESERIALIZERS:
43
+ raise KeyError(
44
+ f"Could not find Deserializer {ser}. Available: {DESERIALIZERS}"
45
+ )
46
+ deserializer = DESERIALIZERS[ser]
47
+
48
+ try:
49
+ data = loader.load(self.path)
50
+ return deserializer.deserialize(data)
51
+ except Exception as original_error:
52
+ if default is not None:
53
+ return default
54
+ raise Exception(
55
+ f"Could not load value. {self.__str__()}, {original_error=}"
56
+ )
57
+
58
+ @staticmethod
59
+ def from_legacy_fmt(fmt_string: str) -> "Loadable":
60
+ if "://" not in fmt_string:
61
+ return Loadable(
62
+ loader="inline",
63
+ deserialize_with="string",
64
+ target=fmt_string,
65
+ )
66
+ try:
67
+ scheme, path = fmt_string.split("://")
68
+ except ValueError:
69
+ raise ValueError(fmt_string)
70
+ try:
71
+ proto, ser = scheme.split("+")
72
+ except ValueError:
73
+ proto, ser = scheme, "yaml"
74
+
75
+ if proto in ("python", "module"):
76
+ ser = "raw"
77
+ if proto in ("http", "https"):
78
+ path = "://".join([proto, path])
79
+
80
+ return Loadable(
81
+ loader=proto,
82
+ deserialize_with=ser,
83
+ target=path,
84
+ )
85
+
86
+ def __str__(self) -> str:
87
+ return f"Loadable({self.protocol}+{self.serialization}://{self.path})"
88
+
89
+
90
+ def init_loaders():
91
+ global LOADERS
92
+ for entry_point in EntryPointLoader("loaders").groups["loaders"]:
93
+ custom_loader = entry_point.load()
94
+ func = custom_loader()
95
+ method = getattr(func, "load")
96
+ if not inspect.ismethod(method):
97
+ raise AttributeError(
98
+ f"CustomLoader {entry_point.name} does not implement .load()"
99
+ )
100
+ LOADERS[entry_point.name] = func
101
+
102
+
103
+ def init_deserializers():
104
+ global DESERIALIZERS
105
+ for entry_point in EntryPointLoader("deserializers").groups["deserializers"]:
106
+ deserializer = entry_point.load()
107
+ func = deserializer()
108
+ method = getattr(func, "deserialize")
109
+ if not inspect.ismethod(method):
110
+ raise AttributeError(
111
+ f"Deserializer {entry_point.name} does not implement .deserialize()"
112
+ )
113
+ DESERIALIZERS[entry_point.name] = func
@@ -0,0 +1,78 @@
1
+ import json
2
+ from typing import Any, Protocol
3
+
4
+ import yaml
5
+ import jinja2
6
+
7
+ try:
8
+ import ujson
9
+
10
+ UJSON_AVAILABLE = True
11
+ except ImportError:
12
+ UJSON_AVAILABLE = False
13
+
14
+ try:
15
+ import orjson
16
+
17
+ ORJSON_AVAILABLE = True
18
+ except ImportError:
19
+ ORJSON_AVAILABLE = False
20
+
21
+ jinja_env = jinja2.Environment(autoescape=True)
22
+
23
+
24
+ class ConfigDeserializer(Protocol):
25
+ """
26
+ Deserializers can be added to sovereign by creating a subclass
27
+ and then specified in config:
28
+
29
+ template_context:
30
+ context:
31
+ ...:
32
+ protocol: ...
33
+ serialization: <serializer name>
34
+ path: ...
35
+ """
36
+
37
+ def deserialize(self, input: Any) -> Any: ...
38
+
39
+
40
+ class YamlDeserializer(ConfigDeserializer):
41
+ def deserialize(self, input: Any) -> Any:
42
+ return yaml.safe_load(input)
43
+
44
+
45
+ class JsonDeserializer(ConfigDeserializer):
46
+ def deserialize(self, input: Any) -> Any:
47
+ return json.loads(input)
48
+
49
+
50
+ class JinjaDeserializer(ConfigDeserializer):
51
+ def deserialize(self, input: Any) -> Any:
52
+ return jinja_env.from_string(input)
53
+
54
+
55
+ class StringDeserializer(ConfigDeserializer):
56
+ def deserialize(self, input: Any) -> Any:
57
+ return str(input)
58
+
59
+
60
+ class PassthroughDeserializer(ConfigDeserializer):
61
+ def deserialize(self, input: Any) -> Any:
62
+ return input
63
+
64
+
65
+ class UjsonDeserializer(ConfigDeserializer):
66
+ def deserialize(self, input: Any) -> Any:
67
+ if not UJSON_AVAILABLE:
68
+ raise ImportError("Configured a UJSON deserializer but it's not installed")
69
+ return ujson.loads(input)
70
+
71
+
72
+ class OrjsonDeserializer(ConfigDeserializer):
73
+ def deserialize(self, input: Any) -> Any:
74
+ if not ORJSON_AVAILABLE:
75
+ raise ImportError(
76
+ "Configured an ORJSON deserializer but it's not installed"
77
+ )
78
+ return orjson.loads(input)