sovereign 0.19.3__py3-none-any.whl → 1.0.0a4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of sovereign might be problematic. Click here for more details.
- sovereign/__init__.py +13 -81
- sovereign/app.py +62 -48
- sovereign/cache/__init__.py +245 -0
- sovereign/cache/backends/__init__.py +110 -0
- sovereign/cache/backends/s3.py +161 -0
- sovereign/cache/filesystem.py +74 -0
- sovereign/cache/types.py +17 -0
- sovereign/configuration.py +607 -0
- sovereign/constants.py +1 -0
- sovereign/context.py +270 -104
- sovereign/dynamic_config/__init__.py +112 -0
- sovereign/dynamic_config/deser.py +78 -0
- sovereign/dynamic_config/loaders.py +120 -0
- sovereign/error_info.py +2 -3
- sovereign/events.py +49 -0
- sovereign/logging/access_logger.py +85 -0
- sovereign/logging/application_logger.py +54 -0
- sovereign/logging/base_logger.py +41 -0
- sovereign/logging/bootstrapper.py +36 -0
- sovereign/logging/types.py +10 -0
- sovereign/middlewares.py +8 -7
- sovereign/modifiers/lib.py +2 -1
- sovereign/rendering.py +124 -0
- sovereign/rendering_common.py +91 -0
- sovereign/response_class.py +18 -0
- sovereign/server.py +112 -35
- sovereign/statistics.py +19 -21
- sovereign/templates/base.html +59 -46
- sovereign/templates/resources.html +203 -102
- sovereign/testing/loaders.py +9 -0
- sovereign/{modifiers/test.py → testing/modifiers.py} +0 -2
- sovereign/tracing.py +103 -0
- sovereign/types.py +304 -0
- sovereign/utils/auth.py +27 -13
- sovereign/utils/crypto/__init__.py +0 -0
- sovereign/utils/crypto/crypto.py +135 -0
- sovereign/utils/crypto/suites/__init__.py +21 -0
- sovereign/utils/crypto/suites/aes_gcm_cipher.py +42 -0
- sovereign/utils/crypto/suites/base_cipher.py +21 -0
- sovereign/utils/crypto/suites/disabled_cipher.py +25 -0
- sovereign/utils/crypto/suites/fernet_cipher.py +29 -0
- sovereign/utils/dictupdate.py +3 -2
- sovereign/utils/eds.py +40 -22
- sovereign/utils/entry_point_loader.py +2 -2
- sovereign/utils/mock.py +56 -17
- sovereign/utils/resources.py +17 -0
- sovereign/utils/templates.py +4 -2
- sovereign/utils/timer.py +5 -3
- sovereign/utils/version_info.py +8 -0
- sovereign/utils/weighted_clusters.py +2 -1
- sovereign/v2/__init__.py +0 -0
- sovereign/v2/data/data_store.py +621 -0
- sovereign/v2/data/render_discovery_response.py +24 -0
- sovereign/v2/data/repositories.py +90 -0
- sovereign/v2/data/utils.py +33 -0
- sovereign/v2/data/worker_queue.py +273 -0
- sovereign/v2/jobs/refresh_context.py +117 -0
- sovereign/v2/jobs/render_discovery_job.py +145 -0
- sovereign/v2/logging.py +81 -0
- sovereign/v2/types.py +41 -0
- sovereign/v2/web.py +101 -0
- sovereign/v2/worker.py +199 -0
- sovereign/views/__init__.py +7 -0
- sovereign/views/api.py +82 -0
- sovereign/views/crypto.py +46 -15
- sovereign/views/discovery.py +55 -119
- sovereign/views/healthchecks.py +107 -20
- sovereign/views/interface.py +171 -111
- sovereign/worker.py +193 -0
- {sovereign-0.19.3.dist-info → sovereign-1.0.0a4.dist-info}/METADATA +80 -76
- sovereign-1.0.0a4.dist-info/RECORD +85 -0
- {sovereign-0.19.3.dist-info → sovereign-1.0.0a4.dist-info}/WHEEL +1 -1
- sovereign-1.0.0a4.dist-info/entry_points.txt +46 -0
- sovereign_files/__init__.py +0 -0
- sovereign_files/static/darkmode.js +51 -0
- sovereign_files/static/node_expression.js +42 -0
- sovereign_files/static/panel.js +76 -0
- sovereign_files/static/resources.css +246 -0
- sovereign_files/static/resources.js +642 -0
- sovereign_files/static/sass/style.scss +33 -0
- sovereign_files/static/style.css +16143 -0
- sovereign_files/static/style.css.map +1 -0
- sovereign/config_loader.py +0 -225
- sovereign/discovery.py +0 -175
- sovereign/logs.py +0 -131
- sovereign/schemas.py +0 -780
- sovereign/sources/__init__.py +0 -3
- sovereign/sources/file.py +0 -21
- sovereign/sources/inline.py +0 -38
- sovereign/sources/lib.py +0 -40
- sovereign/sources/poller.py +0 -294
- sovereign/static/sass/style.scss +0 -27
- sovereign/static/style.css +0 -13553
- sovereign/templates/ul_filter.html +0 -22
- sovereign/utils/crypto.py +0 -103
- sovereign/views/admin.py +0 -120
- sovereign-0.19.3.dist-info/LICENSE.txt +0 -13
- sovereign-0.19.3.dist-info/RECORD +0 -47
- sovereign-0.19.3.dist-info/entry_points.txt +0 -10
sovereign/context.py
CHANGED
|
@@ -1,119 +1,285 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
from
|
|
8
|
-
from
|
|
1
|
+
import asyncio
|
|
2
|
+
import datetime
|
|
3
|
+
import heapq
|
|
4
|
+
import inspect
|
|
5
|
+
import time
|
|
6
|
+
import zlib
|
|
7
|
+
from enum import Enum
|
|
8
|
+
from typing import Any, Callable, Optional, Union
|
|
9
9
|
|
|
10
|
+
import pydantic
|
|
11
|
+
from croniter import croniter
|
|
12
|
+
from typing_extensions import final, override
|
|
10
13
|
|
|
14
|
+
from sovereign import application_logger as log
|
|
15
|
+
from sovereign.configuration import config
|
|
16
|
+
from sovereign.dynamic_config import Loadable
|
|
17
|
+
from sovereign.events import Event, Topic, bus
|
|
18
|
+
from sovereign.statistics import configure_statsd
|
|
19
|
+
from sovereign.types import DiscoveryRequest
|
|
20
|
+
from sovereign.utils.timer import wait_until
|
|
21
|
+
|
|
22
|
+
stats = configure_statsd()
|
|
23
|
+
DEFAULT_RETRY_INTERVAL = config.template_context.refresh_retry_interval_secs
|
|
24
|
+
DEFAULT_NUM_RETRIES = config.template_context.refresh_num_retries
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
@final
|
|
28
|
+
class ScheduledTask:
|
|
29
|
+
def __init__(self, task: "ContextTask"):
|
|
30
|
+
self.task = task
|
|
31
|
+
self.due = time.monotonic()
|
|
32
|
+
|
|
33
|
+
def __lt__(self, other: "ScheduledTask") -> bool:
|
|
34
|
+
return self.due < other.due
|
|
35
|
+
|
|
36
|
+
async def run(
|
|
37
|
+
self, output: dict[str, "ContextResult"], tasks: list["ScheduledTask"]
|
|
38
|
+
):
|
|
39
|
+
await self.task.refresh(output)
|
|
40
|
+
self.due = time.monotonic() + self.task.seconds_til_next_run
|
|
41
|
+
heapq.heappush(tasks, self)
|
|
42
|
+
|
|
43
|
+
@override
|
|
44
|
+
def __str__(self) -> str:
|
|
45
|
+
return f"ScheduledTask({self.task.name})"
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
@final
|
|
11
49
|
class TemplateContext:
|
|
12
50
|
def __init__(
|
|
13
51
|
self,
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
configured_context: Dict[str, Loadable],
|
|
17
|
-
poller: SourcePoller,
|
|
18
|
-
encryption_suite: CipherContainer,
|
|
19
|
-
disabled_suite: CipherSuite,
|
|
20
|
-
logger: Any,
|
|
21
|
-
stats: Any,
|
|
52
|
+
middleware: list[Callable[[DiscoveryRequest, dict[str, Any]], None]]
|
|
53
|
+
| None = None,
|
|
22
54
|
) -> None:
|
|
23
|
-
self.
|
|
24
|
-
self.
|
|
25
|
-
self.
|
|
26
|
-
self.
|
|
27
|
-
self.
|
|
28
|
-
self.
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
55
|
+
self.tasks: dict[str, ContextTask] = dict()
|
|
56
|
+
self.results: dict[str, ContextResult] = dict()
|
|
57
|
+
self.hashes: dict[str, int] = dict()
|
|
58
|
+
self.scheduled: list[ScheduledTask] = list()
|
|
59
|
+
self.running: set[str] = set()
|
|
60
|
+
self.middleware = middleware or list()
|
|
61
|
+
|
|
62
|
+
@classmethod
|
|
63
|
+
def from_config(cls) -> "TemplateContext":
|
|
64
|
+
ret = TemplateContext()
|
|
65
|
+
for name, spec in config.template_context.context.items():
|
|
66
|
+
ret.register_task_from_loadable(name, spec)
|
|
67
|
+
return ret
|
|
68
|
+
|
|
69
|
+
def register_task(self, task: "ContextTask") -> None:
|
|
70
|
+
self.tasks[task.name] = task
|
|
71
|
+
self.scheduled.append(ScheduledTask(task))
|
|
72
|
+
|
|
73
|
+
def register_task_from_loadable(self, name: str, loadable: Loadable) -> None:
|
|
74
|
+
self.register_task(ContextTask.from_loadable(name, loadable))
|
|
75
|
+
|
|
76
|
+
async def update_hash(self, task: "ContextTask"):
|
|
77
|
+
name = task.name
|
|
78
|
+
result = self.results.get(name)
|
|
79
|
+
old = self.hashes.get(name)
|
|
80
|
+
new = hash(result)
|
|
81
|
+
|
|
82
|
+
if old != new:
|
|
83
|
+
stats.increment("context.updated", tags=[f"context:{name}"])
|
|
84
|
+
self.hashes[name] = new
|
|
85
|
+
await task.notify()
|
|
86
|
+
|
|
87
|
+
def get_context(self, req: DiscoveryRequest) -> dict[str, Any]:
|
|
88
|
+
ret = {r.name: r.data for r in self.results.values()}
|
|
89
|
+
for fn in self.middleware:
|
|
90
|
+
fn(req, ret)
|
|
91
|
+
return ret
|
|
92
|
+
|
|
93
|
+
def get(self, key: str, default: Any = None) -> Any:
|
|
94
|
+
if result := self.results.get(key):
|
|
95
|
+
return result.data
|
|
96
|
+
return default
|
|
97
|
+
|
|
98
|
+
async def _run_task(self, task: "ContextTask"):
|
|
99
|
+
if task.name in self.running:
|
|
100
|
+
return
|
|
101
|
+
self.running.add(task.name)
|
|
102
|
+
try:
|
|
103
|
+
await task.refresh(self.results)
|
|
104
|
+
await self.update_hash(task)
|
|
105
|
+
finally:
|
|
106
|
+
self.running.remove(task.name)
|
|
107
|
+
|
|
108
|
+
async def run_once(self):
|
|
109
|
+
heapq.heapify(self.scheduled)
|
|
110
|
+
for next_ in self.scheduled:
|
|
111
|
+
await self._run_task(next_.task)
|
|
112
|
+
|
|
113
|
+
async def start(self):
|
|
114
|
+
if not self.scheduled:
|
|
115
|
+
# No context jobs configured
|
|
116
|
+
return
|
|
117
|
+
heapq.heapify(self.scheduled)
|
|
118
|
+
while True:
|
|
119
|
+
# Obtain next task
|
|
120
|
+
next_ = heapq.heappop(self.scheduled)
|
|
121
|
+
task = next_.task
|
|
122
|
+
# Wait for due date
|
|
123
|
+
delay = max(0, next_.due - time.monotonic())
|
|
124
|
+
await asyncio.sleep(delay)
|
|
125
|
+
# reschedule immediately (at next due date)
|
|
126
|
+
next_.due = time.monotonic() + task.seconds_til_next_run
|
|
127
|
+
heapq.heappush(self.scheduled, next_)
|
|
128
|
+
# fire and forget, task writes to mutable dict reference
|
|
129
|
+
# no data race because each task writes to its unique key
|
|
130
|
+
asyncio.create_task(self._run_task(task))
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
class ContextStatus(Enum):
|
|
134
|
+
READY = "ready"
|
|
135
|
+
PENDING = "pending"
|
|
136
|
+
FAILED = "failed"
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
class ContextResult(pydantic.BaseModel):
|
|
140
|
+
name: str
|
|
141
|
+
data: Any = None
|
|
142
|
+
state: ContextStatus = ContextStatus.PENDING
|
|
143
|
+
|
|
144
|
+
def __str__(self) -> str:
|
|
145
|
+
return f"ContextResult({self.name}, {self.state.value})"
|
|
146
|
+
|
|
147
|
+
def __hash__(self) -> int:
|
|
148
|
+
data: bytes = repr(self.data).encode()
|
|
149
|
+
return zlib.adler32(data) & 0xFFFFFFFF
|
|
150
|
+
|
|
151
|
+
|
|
152
|
+
class ContextTask(pydantic.BaseModel):
|
|
153
|
+
name: str
|
|
154
|
+
spec: Loadable
|
|
155
|
+
interval: "TaskInterval"
|
|
156
|
+
retry_policy: Optional["TaskRetryPolicy"] = None
|
|
157
|
+
|
|
158
|
+
async def notify(self):
|
|
159
|
+
await bus.publish(
|
|
160
|
+
Topic.CONTEXT,
|
|
161
|
+
Event(
|
|
162
|
+
message=f"Context {self.name} updated",
|
|
163
|
+
metadata={"name": self.name},
|
|
164
|
+
),
|
|
165
|
+
)
|
|
166
|
+
|
|
167
|
+
async def refresh(self, output: dict[str, "ContextResult"]) -> None:
|
|
168
|
+
result = await self.try_load()
|
|
169
|
+
if result.state == ContextStatus.READY:
|
|
170
|
+
output[self.name] = result
|
|
171
|
+
|
|
172
|
+
async def try_load(self) -> "ContextResult":
|
|
173
|
+
attempts_remaining, retry_interval = TaskRetryPolicy.from_task(self)
|
|
174
|
+
data = None
|
|
175
|
+
state = ContextStatus.PENDING
|
|
176
|
+
while attempts_remaining > 0:
|
|
177
|
+
stats.increment("context.refresh.attempt", tags=[f"context:{self.name}"])
|
|
48
178
|
try:
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
tags=[f"context:{
|
|
179
|
+
load_fn = self.spec.load
|
|
180
|
+
if inspect.iscoroutinefunction(load_fn):
|
|
181
|
+
data = await load_fn()
|
|
182
|
+
else:
|
|
183
|
+
data = load_fn()
|
|
184
|
+
stats.increment(
|
|
185
|
+
"context.refresh.success", tags=[f"context:{self.name}"]
|
|
56
186
|
)
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
187
|
+
state = ContextStatus.READY
|
|
188
|
+
break
|
|
189
|
+
except Exception as e:
|
|
190
|
+
log.error(
|
|
191
|
+
"Context failed to refresh",
|
|
192
|
+
error=[line for line in str(e).splitlines()],
|
|
62
193
|
)
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
194
|
+
state = ContextStatus.FAILED
|
|
195
|
+
stats.increment("context.refresh.error", tags=[f"context:{self.name}"])
|
|
196
|
+
attempts_remaining -= 1
|
|
197
|
+
await asyncio.sleep(retry_interval)
|
|
198
|
+
return ContextResult(
|
|
199
|
+
name=self.name,
|
|
200
|
+
data=data,
|
|
201
|
+
state=state,
|
|
202
|
+
)
|
|
66
203
|
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
204
|
+
@property
|
|
205
|
+
def seconds_til_next_run(self) -> int:
|
|
206
|
+
match self.interval.value:
|
|
207
|
+
case CronInterval(cron=expression):
|
|
208
|
+
cron = croniter(expression)
|
|
209
|
+
next_date = cron.get_next(datetime.datetime)
|
|
210
|
+
return int(wait_until(next_date))
|
|
211
|
+
case SecondsInterval(seconds=seconds):
|
|
212
|
+
return seconds
|
|
213
|
+
case _:
|
|
214
|
+
return 1
|
|
215
|
+
|
|
216
|
+
@classmethod
|
|
217
|
+
def from_loadable(cls, name: str, loadable: Loadable) -> "ContextTask":
|
|
218
|
+
interval = loadable.interval
|
|
219
|
+
if interval is None:
|
|
220
|
+
cfg = config.template_context
|
|
221
|
+
if cfg.refresh_rate is not None:
|
|
222
|
+
interval = str(cfg.refresh_rate)
|
|
223
|
+
elif cfg.refresh_cron is not None:
|
|
224
|
+
interval = cfg.refresh_cron
|
|
80
225
|
else:
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
"This node does not match any instances! ",
|
|
86
|
-
"If node matching is enabled, check that the node "
|
|
87
|
-
"match key aligns with the source match key. "
|
|
88
|
-
"If you don't know what any of this is, disable "
|
|
89
|
-
"node matching via the config",
|
|
90
|
-
),
|
|
91
|
-
status_code=400,
|
|
92
|
-
)
|
|
93
|
-
ret.update(to_add)
|
|
94
|
-
return ret
|
|
226
|
+
interval = "60"
|
|
227
|
+
retry_policy = None
|
|
228
|
+
if policy := loadable.retry_policy:
|
|
229
|
+
retry_policy = TaskRetryPolicy(**policy)
|
|
95
230
|
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
231
|
+
return ContextTask(
|
|
232
|
+
name=name,
|
|
233
|
+
spec=loadable,
|
|
234
|
+
interval=TaskInterval.from_str(interval),
|
|
235
|
+
retry_policy=retry_policy,
|
|
101
236
|
)
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
237
|
+
|
|
238
|
+
def __str__(self) -> str:
|
|
239
|
+
return f"ContextTask({self.name}, {self.spec})"
|
|
240
|
+
|
|
241
|
+
__repr__ = __str__
|
|
242
|
+
|
|
243
|
+
|
|
244
|
+
class TaskRetryPolicy(pydantic.BaseModel):
|
|
245
|
+
num_retries: int
|
|
246
|
+
interval: int
|
|
109
247
|
|
|
110
248
|
@staticmethod
|
|
111
|
-
def
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
249
|
+
def from_task(t: "ContextTask") -> tuple[int, int]:
|
|
250
|
+
interval = DEFAULT_RETRY_INTERVAL
|
|
251
|
+
attempts = 1
|
|
252
|
+
if policy := t.spec.retry_policy:
|
|
253
|
+
try:
|
|
254
|
+
retry_policy = TaskRetryPolicy(**policy)
|
|
255
|
+
interval = retry_policy.interval
|
|
256
|
+
attempts += retry_policy.num_retries
|
|
257
|
+
except Exception as e:
|
|
258
|
+
# TODO: warning
|
|
259
|
+
print(f"Failed to parse retry policy of task: {t}. Error: {e}")
|
|
260
|
+
else:
|
|
261
|
+
attempts += DEFAULT_NUM_RETRIES
|
|
262
|
+
return attempts, interval
|
|
263
|
+
|
|
264
|
+
|
|
265
|
+
class TaskInterval(pydantic.BaseModel):
|
|
266
|
+
value: "TaskIntervalKind"
|
|
267
|
+
|
|
268
|
+
@classmethod
|
|
269
|
+
def from_str(cls, s: str) -> "TaskInterval":
|
|
270
|
+
if s.isdigit():
|
|
271
|
+
return TaskInterval(value=SecondsInterval(seconds=int(s)))
|
|
272
|
+
if croniter.is_valid(s):
|
|
273
|
+
return TaskInterval(value=CronInterval(cron=s))
|
|
274
|
+
raise ValueError(f"Invalid interval string: {s}")
|
|
275
|
+
|
|
276
|
+
|
|
277
|
+
class CronInterval(pydantic.BaseModel):
|
|
278
|
+
cron: str
|
|
279
|
+
|
|
280
|
+
|
|
281
|
+
class SecondsInterval(pydantic.BaseModel):
|
|
282
|
+
seconds: int
|
|
283
|
+
|
|
284
|
+
|
|
285
|
+
TaskIntervalKind = Union[CronInterval, SecondsInterval]
|
|
@@ -0,0 +1,112 @@
|
|
|
1
|
+
import inspect
|
|
2
|
+
from typing import Any
|
|
3
|
+
|
|
4
|
+
from pydantic import BaseModel, ConfigDict, Field
|
|
5
|
+
|
|
6
|
+
from sovereign.dynamic_config.deser import ConfigDeserializer
|
|
7
|
+
from sovereign.dynamic_config.loaders import CustomLoader
|
|
8
|
+
from sovereign.utils.entry_point_loader import EntryPointLoader
|
|
9
|
+
|
|
10
|
+
LOADERS: dict[str, CustomLoader] = {}
|
|
11
|
+
DESERIALIZERS: dict[str, ConfigDeserializer] = {}
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class Loadable(BaseModel):
|
|
15
|
+
path: str = Field(alias="target")
|
|
16
|
+
protocol: str = Field(alias="loader")
|
|
17
|
+
serialization: str | None = Field(None, alias="deserialize_with")
|
|
18
|
+
interval: str | None = None
|
|
19
|
+
retry_policy: dict[str, Any] | None = None
|
|
20
|
+
|
|
21
|
+
model_config = ConfigDict(populate_by_name=True)
|
|
22
|
+
|
|
23
|
+
def load(self, default: Any = None) -> Any:
|
|
24
|
+
global LOADERS
|
|
25
|
+
if not LOADERS:
|
|
26
|
+
init_loaders()
|
|
27
|
+
|
|
28
|
+
global DESERIALIZERS
|
|
29
|
+
if not DESERIALIZERS:
|
|
30
|
+
init_deserializers()
|
|
31
|
+
|
|
32
|
+
if self.protocol not in LOADERS:
|
|
33
|
+
raise KeyError(
|
|
34
|
+
f"Could not find CustomLoader {self.protocol}. Available: {LOADERS}"
|
|
35
|
+
)
|
|
36
|
+
loader = LOADERS[self.protocol]
|
|
37
|
+
|
|
38
|
+
ser = self.serialization
|
|
39
|
+
if ser is None:
|
|
40
|
+
ser = loader.default_deser
|
|
41
|
+
elif ser not in DESERIALIZERS:
|
|
42
|
+
raise KeyError(
|
|
43
|
+
f"Could not find Deserializer {ser}. Available: {DESERIALIZERS}"
|
|
44
|
+
)
|
|
45
|
+
deserializer = DESERIALIZERS[ser]
|
|
46
|
+
|
|
47
|
+
try:
|
|
48
|
+
data = loader.load(self.path)
|
|
49
|
+
return deserializer.deserialize(data)
|
|
50
|
+
except Exception as original_error:
|
|
51
|
+
if default is not None:
|
|
52
|
+
return default
|
|
53
|
+
raise Exception(
|
|
54
|
+
f"Could not load value. {self.__str__()}, {original_error=}"
|
|
55
|
+
)
|
|
56
|
+
|
|
57
|
+
@staticmethod
|
|
58
|
+
def from_legacy_fmt(fmt_string: str) -> "Loadable":
|
|
59
|
+
if "://" not in fmt_string:
|
|
60
|
+
return Loadable(
|
|
61
|
+
loader="inline",
|
|
62
|
+
deserialize_with="string",
|
|
63
|
+
target=fmt_string,
|
|
64
|
+
)
|
|
65
|
+
try:
|
|
66
|
+
scheme, path = fmt_string.split("://")
|
|
67
|
+
except ValueError:
|
|
68
|
+
raise ValueError(fmt_string)
|
|
69
|
+
try:
|
|
70
|
+
proto, ser = scheme.split("+")
|
|
71
|
+
except ValueError:
|
|
72
|
+
proto, ser = scheme, "yaml"
|
|
73
|
+
|
|
74
|
+
if proto in ("python", "module"):
|
|
75
|
+
ser = "raw"
|
|
76
|
+
if proto in ("http", "https"):
|
|
77
|
+
path = "://".join([proto, path])
|
|
78
|
+
|
|
79
|
+
return Loadable(
|
|
80
|
+
loader=proto,
|
|
81
|
+
deserialize_with=ser,
|
|
82
|
+
target=path,
|
|
83
|
+
)
|
|
84
|
+
|
|
85
|
+
def __str__(self) -> str:
|
|
86
|
+
return f"Loadable({self.protocol}+{self.serialization}://{self.path})"
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def init_loaders():
|
|
90
|
+
global LOADERS
|
|
91
|
+
for entry_point in EntryPointLoader("loaders").groups["loaders"]:
|
|
92
|
+
custom_loader = entry_point.load()
|
|
93
|
+
func = custom_loader()
|
|
94
|
+
method = getattr(func, "load")
|
|
95
|
+
if not inspect.ismethod(method):
|
|
96
|
+
raise AttributeError(
|
|
97
|
+
f"CustomLoader {entry_point.name} does not implement .load()"
|
|
98
|
+
)
|
|
99
|
+
LOADERS[entry_point.name] = func
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
def init_deserializers():
|
|
103
|
+
global DESERIALIZERS
|
|
104
|
+
for entry_point in EntryPointLoader("deserializers").groups["deserializers"]:
|
|
105
|
+
deserializer = entry_point.load()
|
|
106
|
+
func = deserializer()
|
|
107
|
+
method = getattr(func, "deserialize")
|
|
108
|
+
if not inspect.ismethod(method):
|
|
109
|
+
raise AttributeError(
|
|
110
|
+
f"Deserializer {entry_point.name} does not implement .deserialize()"
|
|
111
|
+
)
|
|
112
|
+
DESERIALIZERS[entry_point.name] = func
|
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
import json
|
|
2
|
+
from typing import Any, Protocol
|
|
3
|
+
|
|
4
|
+
import jinja2
|
|
5
|
+
import yaml
|
|
6
|
+
|
|
7
|
+
try:
|
|
8
|
+
import ujson
|
|
9
|
+
|
|
10
|
+
UJSON_AVAILABLE = True
|
|
11
|
+
except ImportError:
|
|
12
|
+
UJSON_AVAILABLE = False
|
|
13
|
+
|
|
14
|
+
try:
|
|
15
|
+
import orjson
|
|
16
|
+
|
|
17
|
+
ORJSON_AVAILABLE = True
|
|
18
|
+
except ImportError:
|
|
19
|
+
ORJSON_AVAILABLE = False
|
|
20
|
+
|
|
21
|
+
jinja_env = jinja2.Environment(autoescape=True)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class ConfigDeserializer(Protocol):
|
|
25
|
+
"""
|
|
26
|
+
Deserializers can be added to sovereign by creating a subclass
|
|
27
|
+
and then specified in config:
|
|
28
|
+
|
|
29
|
+
template_context:
|
|
30
|
+
context:
|
|
31
|
+
...:
|
|
32
|
+
protocol: ...
|
|
33
|
+
serialization: <serializer name>
|
|
34
|
+
path: ...
|
|
35
|
+
"""
|
|
36
|
+
|
|
37
|
+
def deserialize(self, input: Any) -> Any: ...
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
class YamlDeserializer(ConfigDeserializer):
|
|
41
|
+
def deserialize(self, input: Any) -> Any:
|
|
42
|
+
return yaml.safe_load(input)
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
class JsonDeserializer(ConfigDeserializer):
|
|
46
|
+
def deserialize(self, input: Any) -> Any:
|
|
47
|
+
return json.loads(input)
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
class JinjaDeserializer(ConfigDeserializer):
|
|
51
|
+
def deserialize(self, input: Any) -> Any:
|
|
52
|
+
return jinja_env.from_string(input)
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
class StringDeserializer(ConfigDeserializer):
|
|
56
|
+
def deserialize(self, input: Any) -> Any:
|
|
57
|
+
return str(input)
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
class PassthroughDeserializer(ConfigDeserializer):
|
|
61
|
+
def deserialize(self, input: Any) -> Any:
|
|
62
|
+
return input
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
class UjsonDeserializer(ConfigDeserializer):
|
|
66
|
+
def deserialize(self, input: Any) -> Any:
|
|
67
|
+
if not UJSON_AVAILABLE:
|
|
68
|
+
raise ImportError("Configured a UJSON deserializer but it's not installed")
|
|
69
|
+
return ujson.loads(input)
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
class OrjsonDeserializer(ConfigDeserializer):
|
|
73
|
+
def deserialize(self, input: Any) -> Any:
|
|
74
|
+
if not ORJSON_AVAILABLE:
|
|
75
|
+
raise ImportError(
|
|
76
|
+
"Configured an ORJSON deserializer but it's not installed"
|
|
77
|
+
)
|
|
78
|
+
return orjson.loads(input)
|