sovereign 0.14.2__py3-none-any.whl → 1.0.0a4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of sovereign might be problematic. Click here for more details.
- sovereign/__init__.py +17 -78
- sovereign/app.py +74 -59
- sovereign/cache/__init__.py +245 -0
- sovereign/cache/backends/__init__.py +110 -0
- sovereign/cache/backends/s3.py +161 -0
- sovereign/cache/filesystem.py +74 -0
- sovereign/cache/types.py +17 -0
- sovereign/configuration.py +607 -0
- sovereign/constants.py +1 -0
- sovereign/context.py +271 -100
- sovereign/dynamic_config/__init__.py +112 -0
- sovereign/dynamic_config/deser.py +78 -0
- sovereign/dynamic_config/loaders.py +120 -0
- sovereign/error_info.py +61 -0
- sovereign/events.py +49 -0
- sovereign/logging/access_logger.py +85 -0
- sovereign/logging/application_logger.py +54 -0
- sovereign/logging/base_logger.py +41 -0
- sovereign/logging/bootstrapper.py +36 -0
- sovereign/logging/types.py +10 -0
- sovereign/middlewares.py +8 -7
- sovereign/modifiers/lib.py +2 -1
- sovereign/rendering.py +124 -0
- sovereign/rendering_common.py +91 -0
- sovereign/response_class.py +18 -0
- sovereign/server.py +123 -28
- sovereign/statistics.py +19 -21
- sovereign/templates/base.html +59 -46
- sovereign/templates/resources.html +203 -102
- sovereign/testing/loaders.py +9 -0
- sovereign/{modifiers/test.py → testing/modifiers.py} +0 -2
- sovereign/tracing.py +103 -0
- sovereign/types.py +304 -0
- sovereign/utils/auth.py +27 -13
- sovereign/utils/crypto/__init__.py +0 -0
- sovereign/utils/crypto/crypto.py +135 -0
- sovereign/utils/crypto/suites/__init__.py +21 -0
- sovereign/utils/crypto/suites/aes_gcm_cipher.py +42 -0
- sovereign/utils/crypto/suites/base_cipher.py +21 -0
- sovereign/utils/crypto/suites/disabled_cipher.py +25 -0
- sovereign/utils/crypto/suites/fernet_cipher.py +29 -0
- sovereign/utils/dictupdate.py +3 -2
- sovereign/utils/eds.py +40 -22
- sovereign/utils/entry_point_loader.py +18 -0
- sovereign/utils/mock.py +60 -17
- sovereign/utils/resources.py +17 -0
- sovereign/utils/templates.py +4 -2
- sovereign/utils/timer.py +5 -3
- sovereign/utils/version_info.py +8 -0
- sovereign/utils/weighted_clusters.py +2 -1
- sovereign/v2/__init__.py +0 -0
- sovereign/v2/data/data_store.py +621 -0
- sovereign/v2/data/render_discovery_response.py +24 -0
- sovereign/v2/data/repositories.py +90 -0
- sovereign/v2/data/utils.py +33 -0
- sovereign/v2/data/worker_queue.py +273 -0
- sovereign/v2/jobs/refresh_context.py +117 -0
- sovereign/v2/jobs/render_discovery_job.py +145 -0
- sovereign/v2/logging.py +81 -0
- sovereign/v2/types.py +41 -0
- sovereign/v2/web.py +101 -0
- sovereign/v2/worker.py +199 -0
- sovereign/views/__init__.py +7 -0
- sovereign/views/api.py +82 -0
- sovereign/views/crypto.py +46 -15
- sovereign/views/discovery.py +52 -67
- sovereign/views/healthchecks.py +107 -20
- sovereign/views/interface.py +173 -117
- sovereign/worker.py +193 -0
- {sovereign-0.14.2.dist-info → sovereign-1.0.0a4.dist-info}/METADATA +81 -73
- sovereign-1.0.0a4.dist-info/RECORD +85 -0
- {sovereign-0.14.2.dist-info → sovereign-1.0.0a4.dist-info}/WHEEL +1 -1
- sovereign-1.0.0a4.dist-info/entry_points.txt +46 -0
- sovereign_files/__init__.py +0 -0
- sovereign_files/static/darkmode.js +51 -0
- sovereign_files/static/node_expression.js +42 -0
- sovereign_files/static/panel.js +76 -0
- sovereign_files/static/resources.css +246 -0
- sovereign_files/static/resources.js +642 -0
- sovereign_files/static/sass/style.scss +33 -0
- sovereign_files/static/style.css +16143 -0
- sovereign_files/static/style.css.map +1 -0
- sovereign/config_loader.py +0 -225
- sovereign/discovery.py +0 -175
- sovereign/logs.py +0 -131
- sovereign/schemas.py +0 -715
- sovereign/sources/__init__.py +0 -3
- sovereign/sources/file.py +0 -21
- sovereign/sources/inline.py +0 -38
- sovereign/sources/lib.py +0 -40
- sovereign/sources/poller.py +0 -298
- sovereign/static/sass/style.scss +0 -27
- sovereign/static/style.css +0 -13553
- sovereign/templates/ul_filter.html +0 -22
- sovereign/utils/crypto.py +0 -64
- sovereign/views/admin.py +0 -120
- sovereign-0.14.2.dist-info/LICENSE.txt +0 -13
- sovereign-0.14.2.dist-info/RECORD +0 -45
- sovereign-0.14.2.dist-info/entry_points.txt +0 -10
|
@@ -0,0 +1,621 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import pickle
|
|
3
|
+
import sqlite3
|
|
4
|
+
from enum import StrEnum
|
|
5
|
+
from typing import Any, Protocol
|
|
6
|
+
|
|
7
|
+
from structlog.typing import FilteringBoundLogger
|
|
8
|
+
|
|
9
|
+
from sovereign import config
|
|
10
|
+
from sovereign.types import DiscoveryRequest, DiscoveryResponse
|
|
11
|
+
from sovereign.v2.logging import get_named_logger
|
|
12
|
+
from sovereign.v2.types import Context, DiscoveryEntry, WorkerNode
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class ComparisonOperator(StrEnum):
|
|
16
|
+
EqualTo = "equal_to"
|
|
17
|
+
LessThanOrEqualTo = "less_than_or_equal_to"
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class DataType(StrEnum):
|
|
21
|
+
Context = "context"
|
|
22
|
+
DiscoveryEntry = "discovery_request"
|
|
23
|
+
WorkerNode = "worker_node"
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class DataStoreProtocol(Protocol):
|
|
27
|
+
def delete_matching(
|
|
28
|
+
self,
|
|
29
|
+
data_type: DataType,
|
|
30
|
+
property_name: str,
|
|
31
|
+
comparison_operator: ComparisonOperator,
|
|
32
|
+
property_value: Any,
|
|
33
|
+
) -> bool: ...
|
|
34
|
+
def find_all_matching(
|
|
35
|
+
self,
|
|
36
|
+
data_type: DataType,
|
|
37
|
+
property_name: str,
|
|
38
|
+
comparison_operator: ComparisonOperator,
|
|
39
|
+
property_value: Any,
|
|
40
|
+
) -> list[Any]: ...
|
|
41
|
+
def find_all_matching_property(
|
|
42
|
+
self,
|
|
43
|
+
data_type: DataType,
|
|
44
|
+
match_property_name: str,
|
|
45
|
+
comparison_operator: ComparisonOperator,
|
|
46
|
+
match_property_value: Any,
|
|
47
|
+
property_name: str,
|
|
48
|
+
) -> list[Any]:
|
|
49
|
+
"""
|
|
50
|
+
Find all items of the given data type where the 'match property' matches the given value
|
|
51
|
+
according to the specified comparison operator, and return the specified property from
|
|
52
|
+
each matching item.
|
|
53
|
+
"""
|
|
54
|
+
...
|
|
55
|
+
|
|
56
|
+
def get(self, data_type: DataType, key: str) -> Any | None: ...
|
|
57
|
+
def get_property(
|
|
58
|
+
self, data_type: DataType, key: str, property_name: str
|
|
59
|
+
) -> Any | None: ...
|
|
60
|
+
def min_by_property(
|
|
61
|
+
self,
|
|
62
|
+
data_type: DataType,
|
|
63
|
+
property_name: str,
|
|
64
|
+
) -> Any | None: ...
|
|
65
|
+
def set(self, data_type: DataType, key: str, value: Any) -> bool: ...
|
|
66
|
+
def set_property(
|
|
67
|
+
self, data_type: DataType, key: str, property_name: str, property_value: Any
|
|
68
|
+
) -> bool: ...
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
class InMemoryDataStore(DataStoreProtocol):
|
|
72
|
+
def __init__(self):
|
|
73
|
+
self.logger: FilteringBoundLogger = get_named_logger(
|
|
74
|
+
f"{self.__class__.__module__}.{self.__class__.__qualname__}",
|
|
75
|
+
level=logging.DEBUG,
|
|
76
|
+
)
|
|
77
|
+
|
|
78
|
+
self.stores: dict[DataType, dict[str, Any]] = {
|
|
79
|
+
DataType.Context: dict[str, Context](),
|
|
80
|
+
DataType.DiscoveryEntry: dict[str, DiscoveryEntry](),
|
|
81
|
+
DataType.WorkerNode: dict[str, WorkerNode](),
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
@staticmethod
|
|
85
|
+
def _compare(left: Any, operator: ComparisonOperator, right: Any) -> bool:
|
|
86
|
+
if operator == ComparisonOperator.EqualTo:
|
|
87
|
+
return left == right
|
|
88
|
+
elif operator == ComparisonOperator.LessThanOrEqualTo:
|
|
89
|
+
return left <= right
|
|
90
|
+
return False
|
|
91
|
+
|
|
92
|
+
def delete_matching(
|
|
93
|
+
self,
|
|
94
|
+
data_type: DataType,
|
|
95
|
+
property_name: str,
|
|
96
|
+
comparison_operator: ComparisonOperator,
|
|
97
|
+
property_value: Any,
|
|
98
|
+
) -> bool:
|
|
99
|
+
store: dict[str, Any] = self.stores[data_type]
|
|
100
|
+
keys_to_delete = [
|
|
101
|
+
key
|
|
102
|
+
for key, store_item in store.items()
|
|
103
|
+
if self._compare(
|
|
104
|
+
getattr(store_item, property_name), comparison_operator, property_value
|
|
105
|
+
)
|
|
106
|
+
]
|
|
107
|
+
|
|
108
|
+
for key in keys_to_delete:
|
|
109
|
+
self.logger.debug("Deleting item", data_type=data_type, key=key)
|
|
110
|
+
del store[key]
|
|
111
|
+
|
|
112
|
+
return True
|
|
113
|
+
|
|
114
|
+
def find_all_matching(
|
|
115
|
+
self,
|
|
116
|
+
data_type: DataType,
|
|
117
|
+
property_name: str,
|
|
118
|
+
comparison_operator: ComparisonOperator,
|
|
119
|
+
property_value: Any,
|
|
120
|
+
) -> list[Any]:
|
|
121
|
+
store: dict[str, Any] = self.stores[data_type]
|
|
122
|
+
return [
|
|
123
|
+
item
|
|
124
|
+
for item in store.values()
|
|
125
|
+
if self._compare(
|
|
126
|
+
getattr(item, property_name), comparison_operator, property_value
|
|
127
|
+
)
|
|
128
|
+
]
|
|
129
|
+
|
|
130
|
+
def find_all_matching_property(
|
|
131
|
+
self,
|
|
132
|
+
data_type: DataType,
|
|
133
|
+
match_property_name: str,
|
|
134
|
+
comparison_operator: ComparisonOperator,
|
|
135
|
+
match_property_value: Any,
|
|
136
|
+
property_name: str,
|
|
137
|
+
) -> list[Any]:
|
|
138
|
+
return [
|
|
139
|
+
getattr(value, property_name)
|
|
140
|
+
for value in self.find_all_matching(
|
|
141
|
+
data_type,
|
|
142
|
+
match_property_name,
|
|
143
|
+
comparison_operator,
|
|
144
|
+
match_property_value,
|
|
145
|
+
)
|
|
146
|
+
]
|
|
147
|
+
|
|
148
|
+
def get(self, data_type: DataType, key: str) -> Any | None:
|
|
149
|
+
store: dict[str, Any] = self.stores[data_type]
|
|
150
|
+
return store.get(key)
|
|
151
|
+
|
|
152
|
+
def get_property(
|
|
153
|
+
self, data_type: DataType, key: str, property_name: str
|
|
154
|
+
) -> Any | None:
|
|
155
|
+
if value := self.get(data_type, key):
|
|
156
|
+
return getattr(value, property_name)
|
|
157
|
+
return None
|
|
158
|
+
|
|
159
|
+
def min_by_property(
|
|
160
|
+
self,
|
|
161
|
+
data_type: DataType,
|
|
162
|
+
property_name: str,
|
|
163
|
+
) -> Any | None:
|
|
164
|
+
store = self.stores[data_type]
|
|
165
|
+
if not store:
|
|
166
|
+
return None
|
|
167
|
+
return min(store.values(), key=lambda item: getattr(item, property_name))
|
|
168
|
+
|
|
169
|
+
def set(self, data_type: DataType, key: str, value: Any) -> bool:
|
|
170
|
+
store: dict[str, Any] = self.stores[data_type]
|
|
171
|
+
store[key] = value
|
|
172
|
+
return True
|
|
173
|
+
|
|
174
|
+
def set_property(
|
|
175
|
+
self, data_type: DataType, key: str, property_name: str, property_value: Any
|
|
176
|
+
) -> bool:
|
|
177
|
+
item = self.get(data_type, key)
|
|
178
|
+
if item is None:
|
|
179
|
+
return False
|
|
180
|
+
setattr(item, property_name, property_value)
|
|
181
|
+
return True
|
|
182
|
+
|
|
183
|
+
|
|
184
|
+
class SqliteDataStore(DataStoreProtocol):
|
|
185
|
+
def __init__(self):
|
|
186
|
+
self.logger: FilteringBoundLogger = get_named_logger(
|
|
187
|
+
f"{self.__class__.__module__}.{self.__class__.__qualname__}",
|
|
188
|
+
level=logging.DEBUG,
|
|
189
|
+
)
|
|
190
|
+
self.db_path = config.worker_v2_data_store_path
|
|
191
|
+
|
|
192
|
+
self._init_tables()
|
|
193
|
+
|
|
194
|
+
def _init_tables(self):
|
|
195
|
+
conn = self._get_connection()
|
|
196
|
+
cursor = conn.cursor()
|
|
197
|
+
|
|
198
|
+
cursor.execute("""
|
|
199
|
+
CREATE TABLE IF NOT EXISTS contexts (
|
|
200
|
+
name TEXT PRIMARY KEY,
|
|
201
|
+
data BLOB,
|
|
202
|
+
data_hash INT,
|
|
203
|
+
refresh_after INTEGER,
|
|
204
|
+
last_refreshed_at INTEGER
|
|
205
|
+
)
|
|
206
|
+
""")
|
|
207
|
+
|
|
208
|
+
cursor.execute("""
|
|
209
|
+
CREATE TABLE IF NOT EXISTS discovery_entries (
|
|
210
|
+
request_hash TEXT PRIMARY KEY,
|
|
211
|
+
template TEXT,
|
|
212
|
+
request TEXT,
|
|
213
|
+
response TEXT,
|
|
214
|
+
last_rendered_at INTEGER
|
|
215
|
+
)
|
|
216
|
+
""")
|
|
217
|
+
|
|
218
|
+
cursor.execute("""
|
|
219
|
+
CREATE TABLE IF NOT EXISTS worker_nodes (
|
|
220
|
+
node_id TEXT PRIMARY KEY,
|
|
221
|
+
last_heartbeat INTEGER
|
|
222
|
+
)
|
|
223
|
+
""")
|
|
224
|
+
|
|
225
|
+
conn.commit()
|
|
226
|
+
|
|
227
|
+
def _get_connection(self):
|
|
228
|
+
# check_same_thread=False allows SQLite connections to be shared across threads
|
|
229
|
+
# and means that we need to ensure thread safety ourselves.
|
|
230
|
+
# isolation_level=None uses autocommit mode,
|
|
231
|
+
# which prevents "cannot commit - no transaction is active" errors in multi-threaded contexts.
|
|
232
|
+
conn = sqlite3.connect(
|
|
233
|
+
self.db_path, check_same_thread=False, isolation_level=None
|
|
234
|
+
)
|
|
235
|
+
# configure the connection to return rows as sqlite3.Row objects,
|
|
236
|
+
# allowing access to columns by name as well as by index.
|
|
237
|
+
conn.row_factory = sqlite3.Row
|
|
238
|
+
return conn
|
|
239
|
+
|
|
240
|
+
@staticmethod
|
|
241
|
+
def _get_primary_key(data_type: DataType) -> str:
|
|
242
|
+
match data_type:
|
|
243
|
+
case DataType.Context:
|
|
244
|
+
return "name"
|
|
245
|
+
case DataType.DiscoveryEntry:
|
|
246
|
+
return "request_hash"
|
|
247
|
+
case DataType.WorkerNode:
|
|
248
|
+
return "node_id"
|
|
249
|
+
|
|
250
|
+
@staticmethod
|
|
251
|
+
def _get_operator_sql(operator: ComparisonOperator) -> str:
|
|
252
|
+
if operator == ComparisonOperator.EqualTo:
|
|
253
|
+
return "="
|
|
254
|
+
elif operator == ComparisonOperator.LessThanOrEqualTo:
|
|
255
|
+
return "<="
|
|
256
|
+
raise ValueError(f"Unsupported comparison operator: {operator}")
|
|
257
|
+
|
|
258
|
+
@staticmethod
|
|
259
|
+
def _get_table_name(data_type: DataType) -> str:
|
|
260
|
+
match data_type:
|
|
261
|
+
case DataType.Context:
|
|
262
|
+
return "contexts"
|
|
263
|
+
case DataType.DiscoveryEntry:
|
|
264
|
+
return "discovery_entries"
|
|
265
|
+
case DataType.WorkerNode:
|
|
266
|
+
return "worker_nodes"
|
|
267
|
+
|
|
268
|
+
@staticmethod
|
|
269
|
+
def _row_to_object(data_type: DataType, row: sqlite3.Row) -> Any:
|
|
270
|
+
match data_type:
|
|
271
|
+
case DataType.Context:
|
|
272
|
+
return Context(
|
|
273
|
+
name=row["name"],
|
|
274
|
+
data=pickle.loads(row["data"]),
|
|
275
|
+
data_hash=row["data_hash"],
|
|
276
|
+
last_refreshed_at=row["last_refreshed_at"],
|
|
277
|
+
refresh_after=row["refresh_after"],
|
|
278
|
+
)
|
|
279
|
+
case DataType.DiscoveryEntry:
|
|
280
|
+
return DiscoveryEntry(
|
|
281
|
+
request_hash=row["request_hash"],
|
|
282
|
+
template=row["template"],
|
|
283
|
+
request=DiscoveryRequest.model_validate_json(row["request"]),
|
|
284
|
+
response=DiscoveryResponse.model_validate_json(row["response"])
|
|
285
|
+
if row["response"] is not None
|
|
286
|
+
else None,
|
|
287
|
+
last_rendered_at=row["last_rendered_at"],
|
|
288
|
+
)
|
|
289
|
+
case DataType.WorkerNode:
|
|
290
|
+
return WorkerNode(
|
|
291
|
+
node_id=row["node_id"],
|
|
292
|
+
last_heartbeat=row["last_heartbeat"],
|
|
293
|
+
)
|
|
294
|
+
|
|
295
|
+
def _object_to_values(self, obj: Any) -> dict[str, Any]:
|
|
296
|
+
if isinstance(obj, Context):
|
|
297
|
+
try:
|
|
298
|
+
pickled = pickle.dumps(obj.data)
|
|
299
|
+
except TypeError as e:
|
|
300
|
+
self.logger.error("Failed to pickle context data", name=obj.name)
|
|
301
|
+
raise e
|
|
302
|
+
|
|
303
|
+
return {
|
|
304
|
+
"name": obj.name,
|
|
305
|
+
"data": pickled,
|
|
306
|
+
"data_hash": obj.data_hash,
|
|
307
|
+
"last_refreshed_at": obj.last_refreshed_at,
|
|
308
|
+
"refresh_after": obj.refresh_after,
|
|
309
|
+
}
|
|
310
|
+
elif isinstance(obj, DiscoveryEntry):
|
|
311
|
+
return {
|
|
312
|
+
"request_hash": obj.request_hash,
|
|
313
|
+
"template": obj.template,
|
|
314
|
+
"request": obj.request.model_dump_json(),
|
|
315
|
+
"response": obj.response.model_dump_json()
|
|
316
|
+
if obj.response is not None
|
|
317
|
+
else None,
|
|
318
|
+
"last_rendered_at": obj.last_rendered_at,
|
|
319
|
+
}
|
|
320
|
+
elif isinstance(obj, WorkerNode):
|
|
321
|
+
return {
|
|
322
|
+
"node_id": obj.node_id,
|
|
323
|
+
"last_heartbeat": obj.last_heartbeat,
|
|
324
|
+
}
|
|
325
|
+
raise ValueError(f"Unsupported object type: {type(obj)}")
|
|
326
|
+
|
|
327
|
+
@staticmethod
|
|
328
|
+
def _validate_column(data_type: DataType, column_name: str) -> str | None:
|
|
329
|
+
valid_columns = {
|
|
330
|
+
DataType.Context: {
|
|
331
|
+
"name",
|
|
332
|
+
"data",
|
|
333
|
+
"data_hash",
|
|
334
|
+
"last_refreshed_at",
|
|
335
|
+
"refresh_after",
|
|
336
|
+
},
|
|
337
|
+
DataType.DiscoveryEntry: {
|
|
338
|
+
"request_hash",
|
|
339
|
+
"template",
|
|
340
|
+
"request",
|
|
341
|
+
"response",
|
|
342
|
+
"last_rendered_at",
|
|
343
|
+
},
|
|
344
|
+
DataType.WorkerNode: {"node_id", "last_heartbeat"},
|
|
345
|
+
}
|
|
346
|
+
|
|
347
|
+
if column_name not in valid_columns[data_type]:
|
|
348
|
+
return None
|
|
349
|
+
|
|
350
|
+
return column_name
|
|
351
|
+
|
|
352
|
+
def delete_matching(
|
|
353
|
+
self,
|
|
354
|
+
data_type: DataType,
|
|
355
|
+
property_name: str,
|
|
356
|
+
comparison_operator: ComparisonOperator,
|
|
357
|
+
property_value: Any,
|
|
358
|
+
) -> bool:
|
|
359
|
+
column = self._validate_column(data_type, property_name)
|
|
360
|
+
|
|
361
|
+
if column is None:
|
|
362
|
+
self.logger.error(
|
|
363
|
+
"Cannot delete matching, invalid column name",
|
|
364
|
+
data_type=data_type,
|
|
365
|
+
column=property_name,
|
|
366
|
+
)
|
|
367
|
+
return False
|
|
368
|
+
|
|
369
|
+
operator = self._get_operator_sql(comparison_operator)
|
|
370
|
+
table = self._get_table_name(data_type)
|
|
371
|
+
sql = f"DELETE FROM {table} WHERE {column} {operator} ?"
|
|
372
|
+
|
|
373
|
+
conn = self._get_connection()
|
|
374
|
+
|
|
375
|
+
try:
|
|
376
|
+
cursor = conn.cursor()
|
|
377
|
+
cursor.execute(sql, (property_value,))
|
|
378
|
+
conn.commit()
|
|
379
|
+
return True
|
|
380
|
+
except (sqlite3.Error, ValueError):
|
|
381
|
+
self.logger.exception(
|
|
382
|
+
"Error deleting matching records",
|
|
383
|
+
data_type=data_type,
|
|
384
|
+
column=property_name,
|
|
385
|
+
operator=comparison_operator,
|
|
386
|
+
value=property_value,
|
|
387
|
+
)
|
|
388
|
+
return False
|
|
389
|
+
|
|
390
|
+
def find_all_matching(
|
|
391
|
+
self,
|
|
392
|
+
data_type: DataType,
|
|
393
|
+
property_name: str,
|
|
394
|
+
comparison_operator: ComparisonOperator,
|
|
395
|
+
property_value: Any,
|
|
396
|
+
) -> list[Any]:
|
|
397
|
+
column = self._validate_column(data_type, property_name)
|
|
398
|
+
|
|
399
|
+
if column is None:
|
|
400
|
+
self.logger.error(
|
|
401
|
+
"Cannot find all matching, invalid column name",
|
|
402
|
+
data_type=data_type,
|
|
403
|
+
column=property_name,
|
|
404
|
+
)
|
|
405
|
+
return []
|
|
406
|
+
|
|
407
|
+
operator = self._get_operator_sql(comparison_operator)
|
|
408
|
+
table = self._get_table_name(data_type)
|
|
409
|
+
sql = f"SELECT * FROM {table} WHERE {column} {operator} ?"
|
|
410
|
+
|
|
411
|
+
conn = self._get_connection()
|
|
412
|
+
|
|
413
|
+
try:
|
|
414
|
+
cursor = conn.cursor()
|
|
415
|
+
cursor.execute(sql, (property_value,))
|
|
416
|
+
return [self._row_to_object(data_type, row) for row in cursor.fetchall()]
|
|
417
|
+
except (sqlite3.Error, ValueError):
|
|
418
|
+
self.logger.exception(
|
|
419
|
+
"Error finding matching records",
|
|
420
|
+
data_type=data_type,
|
|
421
|
+
column=property_name,
|
|
422
|
+
operator=comparison_operator,
|
|
423
|
+
value=property_value,
|
|
424
|
+
)
|
|
425
|
+
return []
|
|
426
|
+
|
|
427
|
+
def find_all_matching_property(
|
|
428
|
+
self,
|
|
429
|
+
data_type: DataType,
|
|
430
|
+
match_property_name: str,
|
|
431
|
+
comparison_operator: ComparisonOperator,
|
|
432
|
+
match_property_value: Any,
|
|
433
|
+
property_name: str,
|
|
434
|
+
) -> list[Any]:
|
|
435
|
+
column = self._validate_column(data_type, property_name)
|
|
436
|
+
|
|
437
|
+
if column is None:
|
|
438
|
+
self.logger.error(
|
|
439
|
+
"Cannot find property for all matching, invalid column name",
|
|
440
|
+
data_type=data_type,
|
|
441
|
+
column=property_name,
|
|
442
|
+
)
|
|
443
|
+
return []
|
|
444
|
+
|
|
445
|
+
match_column = self._validate_column(data_type, match_property_name)
|
|
446
|
+
|
|
447
|
+
if match_column is None:
|
|
448
|
+
self.logger.error(
|
|
449
|
+
"Cannot find property for all matching, invalid column name",
|
|
450
|
+
data_type=data_type,
|
|
451
|
+
column=match_property_name,
|
|
452
|
+
)
|
|
453
|
+
return []
|
|
454
|
+
|
|
455
|
+
operator = self._get_operator_sql(comparison_operator)
|
|
456
|
+
table = self._get_table_name(data_type)
|
|
457
|
+
sql = f"SELECT {column} FROM {table} WHERE {match_column} {operator} ?"
|
|
458
|
+
|
|
459
|
+
conn = self._get_connection()
|
|
460
|
+
|
|
461
|
+
try:
|
|
462
|
+
cursor = conn.cursor()
|
|
463
|
+
cursor.execute(sql, (match_property_value,))
|
|
464
|
+
return [row[0] for row in cursor.fetchall()]
|
|
465
|
+
except (sqlite3.Error, ValueError):
|
|
466
|
+
self.logger.exception(
|
|
467
|
+
"Error finding matching records",
|
|
468
|
+
data_type=data_type,
|
|
469
|
+
column=property_name,
|
|
470
|
+
operator=comparison_operator,
|
|
471
|
+
value=match_property_value,
|
|
472
|
+
)
|
|
473
|
+
return []
|
|
474
|
+
|
|
475
|
+
def get(self, data_type: DataType, key: str) -> Any | None:
|
|
476
|
+
table = self._get_table_name(data_type)
|
|
477
|
+
primary_key_column = self._get_primary_key(data_type)
|
|
478
|
+
sql = f"SELECT * FROM {table} WHERE {primary_key_column} = ?"
|
|
479
|
+
|
|
480
|
+
conn = self._get_connection()
|
|
481
|
+
|
|
482
|
+
try:
|
|
483
|
+
cursor = conn.cursor()
|
|
484
|
+
cursor.execute(sql, (key,))
|
|
485
|
+
row = cursor.fetchone()
|
|
486
|
+
return self._row_to_object(data_type, row) if row else None
|
|
487
|
+
except (sqlite3.Error, ValueError):
|
|
488
|
+
self.logger.exception(
|
|
489
|
+
"Error getting record",
|
|
490
|
+
data_type=data_type,
|
|
491
|
+
key=key,
|
|
492
|
+
)
|
|
493
|
+
return None
|
|
494
|
+
|
|
495
|
+
def get_property(
|
|
496
|
+
self, data_type: DataType, key: str, property_name: str
|
|
497
|
+
) -> Any | None:
|
|
498
|
+
table = self._get_table_name(data_type)
|
|
499
|
+
primary_key_column = self._get_primary_key(data_type)
|
|
500
|
+
property_column = self._validate_column(data_type, property_name)
|
|
501
|
+
|
|
502
|
+
if property_column is None:
|
|
503
|
+
self.logger.error(
|
|
504
|
+
"Cannot get property, invalid column name",
|
|
505
|
+
data_type=data_type,
|
|
506
|
+
column=property_name,
|
|
507
|
+
)
|
|
508
|
+
return None
|
|
509
|
+
|
|
510
|
+
sql = f"SELECT {property_column} FROM {table} WHERE {primary_key_column} = ?"
|
|
511
|
+
|
|
512
|
+
conn = self._get_connection()
|
|
513
|
+
|
|
514
|
+
try:
|
|
515
|
+
cursor = conn.cursor()
|
|
516
|
+
cursor.execute(sql, (key,))
|
|
517
|
+
row = cursor.fetchone()
|
|
518
|
+
return row[0] if row else None
|
|
519
|
+
except (sqlite3.Error, ValueError):
|
|
520
|
+
self.logger.exception(
|
|
521
|
+
"Error getting property",
|
|
522
|
+
data_type=data_type,
|
|
523
|
+
key=key,
|
|
524
|
+
property=property_name,
|
|
525
|
+
)
|
|
526
|
+
return None
|
|
527
|
+
|
|
528
|
+
def min_by_property(
|
|
529
|
+
self,
|
|
530
|
+
data_type: DataType,
|
|
531
|
+
property_name: str,
|
|
532
|
+
) -> Any | None:
|
|
533
|
+
table = self._get_table_name(data_type)
|
|
534
|
+
column = self._validate_column(data_type, property_name)
|
|
535
|
+
|
|
536
|
+
if column is None:
|
|
537
|
+
self.logger.error(
|
|
538
|
+
"Cannot get min of property, invalid column name",
|
|
539
|
+
data_type=data_type,
|
|
540
|
+
column=property_name,
|
|
541
|
+
)
|
|
542
|
+
return None
|
|
543
|
+
|
|
544
|
+
sql = f"SELECT * FROM {table} ORDER BY {column} ASC LIMIT 1"
|
|
545
|
+
|
|
546
|
+
conn = self._get_connection()
|
|
547
|
+
|
|
548
|
+
try:
|
|
549
|
+
cursor = conn.cursor()
|
|
550
|
+
cursor.execute(sql)
|
|
551
|
+
row = cursor.fetchone()
|
|
552
|
+
return self._row_to_object(data_type, row) if row else None
|
|
553
|
+
except (sqlite3.Error, ValueError):
|
|
554
|
+
self.logger.exception(
|
|
555
|
+
"Error getting min by property",
|
|
556
|
+
data_type=data_type,
|
|
557
|
+
property=property_name,
|
|
558
|
+
)
|
|
559
|
+
return None
|
|
560
|
+
|
|
561
|
+
def set(self, data_type: DataType, key: str, value: Any) -> bool:
|
|
562
|
+
table = self._get_table_name(data_type)
|
|
563
|
+
value_dict = self._object_to_values(value)
|
|
564
|
+
|
|
565
|
+
columns = ", ".join(value_dict.keys())
|
|
566
|
+
placeholders = ", ".join("?" for _ in value_dict)
|
|
567
|
+
sql = f"INSERT OR REPLACE INTO {table} ({columns}) VALUES ({placeholders})"
|
|
568
|
+
|
|
569
|
+
conn = self._get_connection()
|
|
570
|
+
|
|
571
|
+
try:
|
|
572
|
+
cursor = conn.cursor()
|
|
573
|
+
cursor.execute(sql, tuple(value_dict.values()))
|
|
574
|
+
conn.commit()
|
|
575
|
+
if cursor.rowcount == 0:
|
|
576
|
+
return False
|
|
577
|
+
return True
|
|
578
|
+
except (sqlite3.Error, ValueError):
|
|
579
|
+
self.logger.exception(
|
|
580
|
+
"Error saving record",
|
|
581
|
+
data_type=data_type,
|
|
582
|
+
key=key,
|
|
583
|
+
values=value_dict,
|
|
584
|
+
)
|
|
585
|
+
return False
|
|
586
|
+
|
|
587
|
+
def set_property(
|
|
588
|
+
self, data_type: DataType, key: str, property_name: str, property_value: Any
|
|
589
|
+
) -> bool:
|
|
590
|
+
table = self._get_table_name(data_type)
|
|
591
|
+
primary_key_column = self._get_primary_key(data_type)
|
|
592
|
+
property_column = self._validate_column(data_type, property_name)
|
|
593
|
+
|
|
594
|
+
if property_column is None:
|
|
595
|
+
self.logger.error(
|
|
596
|
+
"Cannot set property, invalid column name",
|
|
597
|
+
data_type=data_type,
|
|
598
|
+
column=property_name,
|
|
599
|
+
)
|
|
600
|
+
return False
|
|
601
|
+
|
|
602
|
+
sql = f"UPDATE {table} SET {property_column} = ? WHERE {primary_key_column} = ?"
|
|
603
|
+
|
|
604
|
+
conn = self._get_connection()
|
|
605
|
+
|
|
606
|
+
try:
|
|
607
|
+
cursor = conn.cursor()
|
|
608
|
+
cursor.execute(sql, (property_value, key))
|
|
609
|
+
conn.commit()
|
|
610
|
+
if cursor.rowcount == 0:
|
|
611
|
+
return False
|
|
612
|
+
return True
|
|
613
|
+
except (sqlite3.Error, ValueError):
|
|
614
|
+
self.logger.exception(
|
|
615
|
+
"Error setting property",
|
|
616
|
+
data_type=data_type,
|
|
617
|
+
key=key,
|
|
618
|
+
property=property_name,
|
|
619
|
+
value=property_value,
|
|
620
|
+
)
|
|
621
|
+
return False
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
from sovereign.v2.data.repositories import ContextRepository, DiscoveryEntryRepository
|
|
2
|
+
from sovereign.v2.types import RenderDiscoveryJob
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
def render_discovery_response(
|
|
6
|
+
context_repository: ContextRepository,
|
|
7
|
+
discovery_job_repository: DiscoveryEntryRepository,
|
|
8
|
+
job: RenderDiscoveryJob,
|
|
9
|
+
):
|
|
10
|
+
pass
|
|
11
|
+
# discovery_job = discovery_job_repository.get(job.request_hash)
|
|
12
|
+
# version = discovery_job.request.envoy_version
|
|
13
|
+
# xds_template: XdsTemplate = XDS_TEMPLATES.get(version).get(
|
|
14
|
+
# discovery_job.request.template
|
|
15
|
+
# )
|
|
16
|
+
# required_context_names = xds_template.depends_on
|
|
17
|
+
# context: dict[str, Any] = {"request": discovery_job.request}
|
|
18
|
+
# for context_name in required_context_names:
|
|
19
|
+
# context[context_name] = context_repository.get(context_name)
|
|
20
|
+
# # todo: crypto
|
|
21
|
+
# # todo: hide ui
|
|
22
|
+
# render_job: RenderJob = RenderJob(
|
|
23
|
+
# id=job.request_hash, request=job.request, context=context
|
|
24
|
+
# )
|