sovereign 1.0.0b123__py3-none-any.whl → 1.0.0b134__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- sovereign/app.py +1 -1
- sovereign/cache/__init__.py +182 -0
- sovereign/cache/backends/__init__.py +110 -0
- sovereign/cache/backends/s3.py +139 -0
- sovereign/cache/filesystem.py +42 -0
- sovereign/cache/types.py +15 -0
- sovereign/context.py +20 -18
- sovereign/events.py +49 -0
- sovereign/middlewares.py +1 -1
- sovereign/rendering.py +74 -35
- sovereign/schemas.py +112 -110
- sovereign/server.py +4 -3
- sovereign/sources/poller.py +20 -4
- sovereign/statistics.py +1 -1
- sovereign/templates/base.html +59 -46
- sovereign/templates/resources.html +40 -835
- sovereign/utils/mock.py +7 -3
- sovereign/views/healthchecks.py +1 -1
- sovereign/views/interface.py +34 -15
- sovereign/worker.py +87 -46
- {sovereign-1.0.0b123.dist-info → sovereign-1.0.0b134.dist-info}/METADATA +4 -5
- {sovereign-1.0.0b123.dist-info → sovereign-1.0.0b134.dist-info}/RECORD +33 -24
- {sovereign-1.0.0b123.dist-info → sovereign-1.0.0b134.dist-info}/WHEEL +1 -1
- {sovereign-1.0.0b123.dist-info → sovereign-1.0.0b134.dist-info}/entry_points.txt +3 -0
- sovereign_files/__init__.py +0 -0
- sovereign_files/static/darkmode.js +51 -0
- sovereign_files/static/node_expression.js +42 -0
- sovereign_files/static/resources.css +246 -0
- sovereign_files/static/resources.js +642 -0
- sovereign_files/static/sass/style.scss +33 -0
- sovereign_files/static/style.css +16143 -0
- sovereign_files/static/style.css.map +1 -0
- sovereign/cache.py +0 -133
- sovereign/static/node_expression.js +0 -16
- sovereign/static/sass/style.scss +0 -27
- sovereign/static/style.css +0 -13553
- sovereign-1.0.0b123.dist-info/LICENSE.txt +0 -13
- {sovereign → sovereign_files}/static/panel.js +0 -0
sovereign/app.py
CHANGED
|
@@ -101,7 +101,7 @@ def init_app() -> FastAPI:
|
|
|
101
101
|
|
|
102
102
|
@application.get("/static/{filename}", summary="Return a static asset")
|
|
103
103
|
async def static(filename: str) -> Response:
|
|
104
|
-
return FileResponse(get_package_file("
|
|
104
|
+
return FileResponse(get_package_file("sovereign_files", f"static/{filename}")) # type: ignore[arg-type]
|
|
105
105
|
|
|
106
106
|
@application.get(
|
|
107
107
|
"/admin/xds_dump",
|
|
@@ -0,0 +1,182 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Sovereign Cache Module
|
|
3
|
+
|
|
4
|
+
This module provides an extensible cache backend system that allows clients
|
|
5
|
+
to configure their own remote cache backends through entry points.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import asyncio
|
|
9
|
+
import threading
|
|
10
|
+
from typing import Any
|
|
11
|
+
from typing_extensions import final
|
|
12
|
+
|
|
13
|
+
import requests
|
|
14
|
+
|
|
15
|
+
from sovereign import stats, application_logger as log
|
|
16
|
+
from sovereign.schemas import config, DiscoveryRequest, RegisterClientRequest
|
|
17
|
+
from sovereign.cache.types import Entry, CacheResult
|
|
18
|
+
from sovereign.cache.backends import CacheBackend, get_backend
|
|
19
|
+
from sovereign.cache.filesystem import FilesystemCache
|
|
20
|
+
|
|
21
|
+
CLIENTS_LOCK = "sovereign_clients_lock"
|
|
22
|
+
CLIENTS_KEY = "sovereign_clients"
|
|
23
|
+
CACHE_READ_TIMEOUT = config.cache.read_timeout
|
|
24
|
+
WORKER_URL = "http://localhost:9080/client"
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
@final
|
|
28
|
+
class DualCache:
|
|
29
|
+
"""Cache that writes to both filesystem and remote backend, reads filesystem first with remote fallback"""
|
|
30
|
+
|
|
31
|
+
def __init__(self, fs_cache: FilesystemCache, remote_cache: CacheBackend | None):
|
|
32
|
+
self.fs_cache = fs_cache
|
|
33
|
+
self.remote_cache = remote_cache
|
|
34
|
+
|
|
35
|
+
def get(self, key: str) -> CacheResult | None:
|
|
36
|
+
# Try filesystem first
|
|
37
|
+
if value := self.fs_cache.get(key):
|
|
38
|
+
stats.increment("cache.fs.hit")
|
|
39
|
+
return CacheResult(value=value, from_remote=False)
|
|
40
|
+
|
|
41
|
+
# Fallback to remote cache if available
|
|
42
|
+
if self.remote_cache:
|
|
43
|
+
try:
|
|
44
|
+
if value := self.remote_cache.get(key):
|
|
45
|
+
stats.increment("cache.remote.hit")
|
|
46
|
+
# Write back to filesystem
|
|
47
|
+
self.fs_cache.set(key, value)
|
|
48
|
+
return CacheResult(value=value, from_remote=True)
|
|
49
|
+
except Exception as e:
|
|
50
|
+
log.warning(f"Failed to read from remote cache: {e}")
|
|
51
|
+
stats.increment("cache.remote.error")
|
|
52
|
+
|
|
53
|
+
return None
|
|
54
|
+
|
|
55
|
+
def set(self, key, value, timeout=None):
|
|
56
|
+
self.fs_cache.set(key, value, timeout)
|
|
57
|
+
if self.remote_cache:
|
|
58
|
+
try:
|
|
59
|
+
self.remote_cache.set(key, value, timeout)
|
|
60
|
+
stats.increment("cache.remote.write.success")
|
|
61
|
+
except Exception as e:
|
|
62
|
+
log.warning(f"Failed to write to remote cache: {e}")
|
|
63
|
+
stats.increment("cache.remote.write.error")
|
|
64
|
+
|
|
65
|
+
def register(self, id: str, req: DiscoveryRequest):
|
|
66
|
+
self.fs_cache.register(id, req)
|
|
67
|
+
|
|
68
|
+
def registered(self, id: str) -> bool:
|
|
69
|
+
if value := self.fs_cache.registered(id):
|
|
70
|
+
return value
|
|
71
|
+
return False
|
|
72
|
+
|
|
73
|
+
def get_registered_clients(self) -> list[tuple[str, Any]]:
|
|
74
|
+
if value := self.fs_cache.get_registered_clients():
|
|
75
|
+
return value
|
|
76
|
+
return []
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
class CacheManager:
|
|
80
|
+
_instance = None
|
|
81
|
+
_lock = threading.Lock()
|
|
82
|
+
|
|
83
|
+
def __new__(cls):
|
|
84
|
+
if cls._instance is None:
|
|
85
|
+
with cls._lock:
|
|
86
|
+
if cls._instance is None:
|
|
87
|
+
cls._instance = super().__new__(cls)
|
|
88
|
+
cls._instance._initialized = False
|
|
89
|
+
return cls._instance
|
|
90
|
+
|
|
91
|
+
def __init__(self):
|
|
92
|
+
if self._initialized:
|
|
93
|
+
return
|
|
94
|
+
|
|
95
|
+
filesystem_cache = FilesystemCache()
|
|
96
|
+
remote_cache = get_backend()
|
|
97
|
+
|
|
98
|
+
if remote_cache is None:
|
|
99
|
+
log.info("Cache initialized with filesystem backend only")
|
|
100
|
+
else:
|
|
101
|
+
log.info("Cache initialized with filesystem and remote backends")
|
|
102
|
+
self._cache = DualCache(filesystem_cache, remote_cache)
|
|
103
|
+
self._initialized = True
|
|
104
|
+
|
|
105
|
+
def get(self, req: DiscoveryRequest) -> Entry | None:
|
|
106
|
+
id = client_id(req)
|
|
107
|
+
if result := self._cache.get(id):
|
|
108
|
+
if result.from_remote:
|
|
109
|
+
self.register(req)
|
|
110
|
+
stats.increment("cache.hit")
|
|
111
|
+
return result.value
|
|
112
|
+
stats.increment("cache.miss")
|
|
113
|
+
return None
|
|
114
|
+
|
|
115
|
+
def set(self, key: str, value: Entry, timeout: int | None = None) -> None:
|
|
116
|
+
return self._cache.set(key, value, timeout)
|
|
117
|
+
|
|
118
|
+
def register(self, req: DiscoveryRequest) -> tuple[str, DiscoveryRequest]:
|
|
119
|
+
"""Register a client using the cache backend"""
|
|
120
|
+
id = client_id(req)
|
|
121
|
+
log.debug(f"Registering client {id}")
|
|
122
|
+
self._cache.register(id, req)
|
|
123
|
+
return id, req
|
|
124
|
+
|
|
125
|
+
def registered(self, req: DiscoveryRequest) -> bool:
|
|
126
|
+
"""Check if a client is registered using the cache backend"""
|
|
127
|
+
id = client_id(req)
|
|
128
|
+
is_registered = self._cache.registered(id)
|
|
129
|
+
log.debug(f"Client {id} registered={is_registered}")
|
|
130
|
+
return is_registered
|
|
131
|
+
|
|
132
|
+
def get_registered_clients(self) -> list[tuple[str, Any]]:
|
|
133
|
+
"""Get all registered clients using the cache backend"""
|
|
134
|
+
return self._cache.get_registered_clients()
|
|
135
|
+
|
|
136
|
+
|
|
137
|
+
@stats.timed("cache.read_ms")
|
|
138
|
+
async def blocking_read(
|
|
139
|
+
req: DiscoveryRequest, timeout_s=CACHE_READ_TIMEOUT, poll_interval_s=0.5
|
|
140
|
+
) -> Entry | None:
|
|
141
|
+
metric = "client.registration"
|
|
142
|
+
if entry := read(req):
|
|
143
|
+
return entry
|
|
144
|
+
|
|
145
|
+
registered = False
|
|
146
|
+
registration = RegisterClientRequest(request=req)
|
|
147
|
+
start = asyncio.get_event_loop().time()
|
|
148
|
+
attempt = 1
|
|
149
|
+
while (asyncio.get_event_loop().time() - start) < timeout_s:
|
|
150
|
+
if not registered:
|
|
151
|
+
try:
|
|
152
|
+
response = requests.put(WORKER_URL, json=registration.model_dump())
|
|
153
|
+
match response.status_code:
|
|
154
|
+
case 200 | 202:
|
|
155
|
+
stats.increment(metric, tags=["status:registered"])
|
|
156
|
+
registered = True
|
|
157
|
+
case 429:
|
|
158
|
+
stats.increment(metric, tags=["status:ratelimited"])
|
|
159
|
+
await asyncio.sleep(min(attempt, CACHE_READ_TIMEOUT))
|
|
160
|
+
attempt *= 2
|
|
161
|
+
except Exception as e:
|
|
162
|
+
stats.increment(metric, tags=["status:failed"])
|
|
163
|
+
log.exception(f"Tried to register client but failed: {e}")
|
|
164
|
+
if entry := read(req):
|
|
165
|
+
return entry
|
|
166
|
+
await asyncio.sleep(poll_interval_s)
|
|
167
|
+
|
|
168
|
+
return None
|
|
169
|
+
|
|
170
|
+
|
|
171
|
+
def client_id(req: DiscoveryRequest) -> str:
|
|
172
|
+
return req.cache_key(config.cache.hash_rules)
|
|
173
|
+
|
|
174
|
+
|
|
175
|
+
manager = CacheManager()
|
|
176
|
+
|
|
177
|
+
# Old APIs
|
|
178
|
+
write = manager.set
|
|
179
|
+
read = manager.get
|
|
180
|
+
clients = manager.get_registered_clients
|
|
181
|
+
register = manager.register
|
|
182
|
+
registered = manager.registered
|
|
@@ -0,0 +1,110 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Cache backends module
|
|
3
|
+
|
|
4
|
+
This module provides the protocol definition for cache backends and
|
|
5
|
+
the loading mechanism for extensible cache backends via entry points.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from collections.abc import Sequence
|
|
9
|
+
from importlib.metadata import EntryPoints
|
|
10
|
+
from typing import Protocol, Any, runtime_checkable
|
|
11
|
+
|
|
12
|
+
from sovereign import application_logger as log
|
|
13
|
+
from sovereign.utils.entry_point_loader import EntryPointLoader
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
@runtime_checkable
|
|
17
|
+
class CacheBackend(Protocol):
|
|
18
|
+
def __init__(self, config: dict[str, Any]) -> None:
|
|
19
|
+
"""Initialize the cache backend with generic configuration
|
|
20
|
+
|
|
21
|
+
Args:
|
|
22
|
+
config: Dictionary containing backend-specific configuration
|
|
23
|
+
"""
|
|
24
|
+
...
|
|
25
|
+
|
|
26
|
+
def get(self, key: str) -> Any | None:
|
|
27
|
+
"""Get a value from the cache
|
|
28
|
+
|
|
29
|
+
Args:
|
|
30
|
+
key: The cache key
|
|
31
|
+
|
|
32
|
+
Returns:
|
|
33
|
+
The cached value or None if not found
|
|
34
|
+
"""
|
|
35
|
+
...
|
|
36
|
+
|
|
37
|
+
def set(self, key: str, value: Any, timeout: int | None = None) -> None:
|
|
38
|
+
"""Set a value in the cache
|
|
39
|
+
|
|
40
|
+
Args:
|
|
41
|
+
key: The cache key
|
|
42
|
+
value: The value to cache
|
|
43
|
+
timeout: Optional timeout in seconds
|
|
44
|
+
"""
|
|
45
|
+
...
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def get_backend() -> CacheBackend | None:
|
|
49
|
+
from sovereign import config
|
|
50
|
+
|
|
51
|
+
cache_config = config.cache.remote_backend
|
|
52
|
+
if not cache_config:
|
|
53
|
+
log.info("No remote cache backend configured, using filesystem only")
|
|
54
|
+
return None
|
|
55
|
+
|
|
56
|
+
backend_type = cache_config.type
|
|
57
|
+
|
|
58
|
+
loader = EntryPointLoader("cache.backends")
|
|
59
|
+
entry_points: EntryPoints | Sequence[Any] = loader.groups.get("cache.backends", [])
|
|
60
|
+
|
|
61
|
+
backend = None
|
|
62
|
+
for ep in entry_points:
|
|
63
|
+
if ep.name == backend_type:
|
|
64
|
+
backend = ep.load()
|
|
65
|
+
break
|
|
66
|
+
|
|
67
|
+
if not backend:
|
|
68
|
+
raise KeyError(
|
|
69
|
+
(
|
|
70
|
+
f"Cache backend '{backend_type}' not found. "
|
|
71
|
+
f"Available backends: {[ep.name for ep in entry_points]}"
|
|
72
|
+
)
|
|
73
|
+
)
|
|
74
|
+
|
|
75
|
+
backend_config = _process_loadable_config(cache_config.config)
|
|
76
|
+
instance = backend(backend_config)
|
|
77
|
+
|
|
78
|
+
if not isinstance(instance, CacheBackend):
|
|
79
|
+
raise TypeError(
|
|
80
|
+
(f"Cache backend '{backend_type}' does not implement CacheBackend protocol")
|
|
81
|
+
)
|
|
82
|
+
|
|
83
|
+
log.info(f"Successfully initialized cache backend: {backend_type}")
|
|
84
|
+
return instance
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
def _process_loadable_config(config: dict[str, Any]) -> dict[str, Any]:
|
|
88
|
+
from sovereign.dynamic_config import Loadable
|
|
89
|
+
|
|
90
|
+
processed = {}
|
|
91
|
+
for key, value in config.items():
|
|
92
|
+
try:
|
|
93
|
+
if isinstance(value, str):
|
|
94
|
+
loadable = Loadable.from_legacy_fmt(value)
|
|
95
|
+
processed[key] = loadable.load()
|
|
96
|
+
elif isinstance(value, dict):
|
|
97
|
+
loadable = Loadable(**value)
|
|
98
|
+
processed[key] = loadable.load()
|
|
99
|
+
else:
|
|
100
|
+
processed[key] = value
|
|
101
|
+
continue
|
|
102
|
+
except Exception as e:
|
|
103
|
+
log.warning(f"Failed to load value for {key}: {e}")
|
|
104
|
+
|
|
105
|
+
if isinstance(value, dict):
|
|
106
|
+
processed[key] = _process_loadable_config(value)
|
|
107
|
+
else:
|
|
108
|
+
processed[key] = value
|
|
109
|
+
|
|
110
|
+
return processed
|
|
@@ -0,0 +1,139 @@
|
|
|
1
|
+
import pickle
|
|
2
|
+
from datetime import datetime, timezone, timedelta
|
|
3
|
+
from typing import Any
|
|
4
|
+
from typing_extensions import override
|
|
5
|
+
from urllib.parse import quote
|
|
6
|
+
from importlib.util import find_spec
|
|
7
|
+
|
|
8
|
+
from sovereign import application_logger as log
|
|
9
|
+
from sovereign.cache.backends import CacheBackend
|
|
10
|
+
|
|
11
|
+
try:
|
|
12
|
+
import boto3
|
|
13
|
+
from botocore.exceptions import ClientError
|
|
14
|
+
except ImportError:
|
|
15
|
+
pass
|
|
16
|
+
|
|
17
|
+
BOTO_AVAILABLE = find_spec("boto3") is not None
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class S3Client:
|
|
21
|
+
def __init__(self, role_arn: str | None, client_args: dict[str, Any]):
|
|
22
|
+
self.role_arn = role_arn
|
|
23
|
+
self.client_args = client_args
|
|
24
|
+
self._client = None
|
|
25
|
+
self._credentials_expiry = None
|
|
26
|
+
self._base_session = boto3.Session()
|
|
27
|
+
self._make_client()
|
|
28
|
+
|
|
29
|
+
def _make_client(self) -> None:
|
|
30
|
+
if self.role_arn:
|
|
31
|
+
log.debug(f"Refreshing credentials for role: {self.role_arn}")
|
|
32
|
+
sts = self._base_session.client("sts")
|
|
33
|
+
duration_seconds = 3600 # 4 hours
|
|
34
|
+
response = sts.assume_role(
|
|
35
|
+
RoleArn=self.role_arn,
|
|
36
|
+
RoleSessionName="sovereign-s3-cache",
|
|
37
|
+
DurationSeconds=duration_seconds,
|
|
38
|
+
)
|
|
39
|
+
credentials = response["Credentials"]
|
|
40
|
+
session = boto3.Session(
|
|
41
|
+
aws_access_key_id=credentials["AccessKeyId"],
|
|
42
|
+
aws_secret_access_key=credentials["SecretAccessKey"],
|
|
43
|
+
aws_session_token=credentials["SessionToken"],
|
|
44
|
+
)
|
|
45
|
+
self._credentials_expiry = credentials["Expiration"]
|
|
46
|
+
else:
|
|
47
|
+
session = self._base_session
|
|
48
|
+
self._credentials_expiry = None
|
|
49
|
+
self._client = session.client("s3", **self.client_args)
|
|
50
|
+
|
|
51
|
+
def _session_expiring_soon(self) -> bool:
|
|
52
|
+
if not self.role_arn or self._credentials_expiry is None:
|
|
53
|
+
return False
|
|
54
|
+
refresh_threshold = timedelta(minutes=30).seconds
|
|
55
|
+
time_until_expiry = (
|
|
56
|
+
self._credentials_expiry - datetime.now(timezone.utc)
|
|
57
|
+
).total_seconds()
|
|
58
|
+
return time_until_expiry <= refresh_threshold
|
|
59
|
+
|
|
60
|
+
def __getattr__(self, name):
|
|
61
|
+
if self._session_expiring_soon():
|
|
62
|
+
self._make_client()
|
|
63
|
+
return getattr(self._client, name)
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
class S3Backend(CacheBackend):
|
|
67
|
+
"""S3 cache backend implementation"""
|
|
68
|
+
|
|
69
|
+
@override
|
|
70
|
+
def __init__(self, config: dict[str, Any]) -> None: # pyright: ignore[reportMissingSuperCall]
|
|
71
|
+
"""Initialize S3 backend
|
|
72
|
+
|
|
73
|
+
Args:
|
|
74
|
+
config: Configuration dictionary containing S3 connection parameters
|
|
75
|
+
Expected keys: bucket_name, key
|
|
76
|
+
Optional keys: assume_role, endpoint_url
|
|
77
|
+
"""
|
|
78
|
+
if not BOTO_AVAILABLE:
|
|
79
|
+
raise ImportError("boto3 not installed")
|
|
80
|
+
|
|
81
|
+
self.bucket_name = config.get("bucket_name")
|
|
82
|
+
if not self.bucket_name:
|
|
83
|
+
raise ValueError("bucket_name is required for S3 cache backend")
|
|
84
|
+
|
|
85
|
+
self.key = config.get("key", "sovereign-cache/")
|
|
86
|
+
self.registration_prefix = config.get("registration_prefix", "registrations-")
|
|
87
|
+
self.role = config.get("assume_role")
|
|
88
|
+
|
|
89
|
+
client_args: dict[str, Any] = {}
|
|
90
|
+
if endpoint_url := config.get("endpoint_url"):
|
|
91
|
+
client_args["endpoint_url"] = endpoint_url
|
|
92
|
+
|
|
93
|
+
self.client_args = client_args
|
|
94
|
+
self.s3 = S3Client(self.role, self.client_args)
|
|
95
|
+
|
|
96
|
+
try:
|
|
97
|
+
self.s3.head_bucket(Bucket=self.bucket_name)
|
|
98
|
+
log.info(f"S3 cache backend connected to bucket '{self.bucket_name}'")
|
|
99
|
+
except Exception as e:
|
|
100
|
+
log.error(
|
|
101
|
+
f"Failed to access S3 bucket '{self.bucket_name}' with current credentials: {e}"
|
|
102
|
+
)
|
|
103
|
+
raise
|
|
104
|
+
|
|
105
|
+
def _make_key(self, key: str) -> str:
|
|
106
|
+
encoded_key = quote(key, safe="")
|
|
107
|
+
return f"{self.key}{encoded_key}"
|
|
108
|
+
|
|
109
|
+
def get(self, key: str) -> Any | None:
|
|
110
|
+
try:
|
|
111
|
+
log.debug(f"Retrieving object {key} from bucket")
|
|
112
|
+
response = self.s3.get_object(
|
|
113
|
+
Bucket=self.bucket_name, Key=self._make_key(key)
|
|
114
|
+
)
|
|
115
|
+
data = response["Body"].read()
|
|
116
|
+
return pickle.loads(data)
|
|
117
|
+
except self.s3.exceptions.NoSuchKey:
|
|
118
|
+
return None
|
|
119
|
+
except ClientError as e:
|
|
120
|
+
if e.response["Error"]["Code"] == "404":
|
|
121
|
+
return None
|
|
122
|
+
log.warning(f"Failed to get key '{key}' from S3: {e}")
|
|
123
|
+
return None
|
|
124
|
+
except Exception as e:
|
|
125
|
+
log.warning(f"Failed to get key '{key}' from S3: {e}")
|
|
126
|
+
return None
|
|
127
|
+
|
|
128
|
+
@override
|
|
129
|
+
def set(self, key: str, value: Any, timeout: int | None = None) -> None:
|
|
130
|
+
try:
|
|
131
|
+
log.debug(f"Putting new object {key} into bucket")
|
|
132
|
+
self.s3.put_object(
|
|
133
|
+
Bucket=self.bucket_name,
|
|
134
|
+
Key=self._make_key(key),
|
|
135
|
+
Body=pickle.dumps(value),
|
|
136
|
+
)
|
|
137
|
+
except Exception as e:
|
|
138
|
+
log.warning(f"Failed to set key '{key}' in S3: {e}")
|
|
139
|
+
raise
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
from hashlib import sha256
|
|
2
|
+
from cachelib import FileSystemCache
|
|
3
|
+
|
|
4
|
+
from sovereign import config
|
|
5
|
+
from sovereign.schemas import DiscoveryRequest
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class FilesystemCache:
|
|
9
|
+
def __init__(self, cache_path: str | None = None, default_timeout: int = 0):
|
|
10
|
+
self.cache_path = cache_path or config.cache.local_fs_path
|
|
11
|
+
self.default_timeout = default_timeout
|
|
12
|
+
self._cache = FileSystemCache(
|
|
13
|
+
cache_dir=self.cache_path,
|
|
14
|
+
default_timeout=self.default_timeout,
|
|
15
|
+
hash_method=sha256,
|
|
16
|
+
)
|
|
17
|
+
|
|
18
|
+
def get(self, key):
|
|
19
|
+
return self._cache.get(key)
|
|
20
|
+
|
|
21
|
+
def set(self, key, value, timeout=None):
|
|
22
|
+
return self._cache.set(key, value, timeout)
|
|
23
|
+
|
|
24
|
+
def delete(self, key):
|
|
25
|
+
return self._cache.delete(key)
|
|
26
|
+
|
|
27
|
+
def clear(self):
|
|
28
|
+
return self._cache.clear()
|
|
29
|
+
|
|
30
|
+
def register(self, id: str, req: DiscoveryRequest) -> None:
|
|
31
|
+
clients = self.get_registered_clients()
|
|
32
|
+
if (id, req) in clients:
|
|
33
|
+
return
|
|
34
|
+
clients.append((id, req))
|
|
35
|
+
_ = self._cache.set("_registered_clients", clients)
|
|
36
|
+
|
|
37
|
+
def registered(self, id: str) -> bool:
|
|
38
|
+
clients = self.get_registered_clients()
|
|
39
|
+
return any(cid == id for cid, _ in clients)
|
|
40
|
+
|
|
41
|
+
def get_registered_clients(self) -> list[tuple[str, DiscoveryRequest]]:
|
|
42
|
+
return self._cache.get("_registered_clients") or []
|
sovereign/cache/types.py
ADDED
sovereign/context.py
CHANGED
|
@@ -6,6 +6,7 @@ import asyncio
|
|
|
6
6
|
import inspect
|
|
7
7
|
from enum import Enum
|
|
8
8
|
from typing import Any, Callable, Optional, Union
|
|
9
|
+
from typing_extensions import final, override
|
|
9
10
|
|
|
10
11
|
import pydantic
|
|
11
12
|
from croniter import croniter
|
|
@@ -14,14 +15,15 @@ from sovereign.schemas import DiscoveryRequest, config
|
|
|
14
15
|
from sovereign.statistics import configure_statsd
|
|
15
16
|
from sovereign.utils.timer import wait_until
|
|
16
17
|
from sovereign.dynamic_config import Loadable
|
|
18
|
+
from sovereign.events import bus, Event, Topic
|
|
17
19
|
|
|
18
20
|
|
|
19
21
|
stats = configure_statsd()
|
|
20
22
|
DEFAULT_RETRY_INTERVAL = config.template_context.refresh_retry_interval_secs
|
|
21
23
|
DEFAULT_NUM_RETRIES = config.template_context.refresh_num_retries
|
|
22
|
-
NEW_CONTEXT = asyncio.Event()
|
|
23
24
|
|
|
24
25
|
|
|
26
|
+
@final
|
|
25
27
|
class ScheduledTask:
|
|
26
28
|
def __init__(self, task: "ContextTask"):
|
|
27
29
|
self.task = task
|
|
@@ -37,22 +39,24 @@ class ScheduledTask:
|
|
|
37
39
|
self.due = time.monotonic() + self.task.seconds_til_next_run
|
|
38
40
|
heapq.heappush(tasks, self)
|
|
39
41
|
|
|
42
|
+
@override
|
|
40
43
|
def __str__(self) -> str:
|
|
41
44
|
return f"ScheduledTask({self.task.name})"
|
|
42
45
|
|
|
43
46
|
|
|
47
|
+
@final
|
|
44
48
|
class TemplateContext:
|
|
45
49
|
def __init__(
|
|
46
50
|
self,
|
|
47
|
-
middleware:
|
|
51
|
+
middleware: list[Callable[[DiscoveryRequest, dict[str, Any]], None]]
|
|
52
|
+
| None = None,
|
|
48
53
|
) -> None:
|
|
49
54
|
self.tasks: dict[str, ContextTask] = dict()
|
|
50
55
|
self.results: dict[str, ContextResult] = dict()
|
|
51
56
|
self.hashes: dict[str, int] = dict()
|
|
52
57
|
self.scheduled: list[ScheduledTask] = list()
|
|
53
|
-
self.middleware = middleware or list()
|
|
54
58
|
self.running: set[str] = set()
|
|
55
|
-
self.
|
|
59
|
+
self.middleware = middleware or list()
|
|
56
60
|
|
|
57
61
|
@classmethod
|
|
58
62
|
def from_config(cls) -> "TemplateContext":
|
|
@@ -68,7 +72,7 @@ class TemplateContext:
|
|
|
68
72
|
def register_task_from_loadable(self, name: str, loadable: Loadable) -> None:
|
|
69
73
|
self.register_task(ContextTask.from_loadable(name, loadable))
|
|
70
74
|
|
|
71
|
-
def update_hash(self, task: "ContextTask"):
|
|
75
|
+
async def update_hash(self, task: "ContextTask"):
|
|
72
76
|
name = task.name
|
|
73
77
|
result = self.results.get(name)
|
|
74
78
|
old = self.hashes.get(name)
|
|
@@ -77,18 +81,7 @@ class TemplateContext:
|
|
|
77
81
|
if old != new:
|
|
78
82
|
stats.increment("context.updated", tags=[f"context:{name}"])
|
|
79
83
|
self.hashes[name] = new
|
|
80
|
-
|
|
81
|
-
if self.notify_consumers:
|
|
82
|
-
# cancel existing and reset timer
|
|
83
|
-
self.notify_consumers.cancel()
|
|
84
|
-
self.notify_consumers = asyncio.create_task(self.publish_event())
|
|
85
|
-
|
|
86
|
-
async def publish_event(self):
|
|
87
|
-
try:
|
|
88
|
-
await asyncio.sleep(3.0)
|
|
89
|
-
NEW_CONTEXT.set()
|
|
90
|
-
except asyncio.CancelledError:
|
|
91
|
-
pass
|
|
84
|
+
await task.notify()
|
|
92
85
|
|
|
93
86
|
def get_context(self, req: DiscoveryRequest) -> dict[str, Any]:
|
|
94
87
|
ret = {r.name: r.data for r in self.results.values()}
|
|
@@ -107,7 +100,7 @@ class TemplateContext:
|
|
|
107
100
|
self.running.add(task.name)
|
|
108
101
|
try:
|
|
109
102
|
await task.refresh(self.results)
|
|
110
|
-
self.update_hash(task)
|
|
103
|
+
await self.update_hash(task)
|
|
111
104
|
finally:
|
|
112
105
|
self.running.remove(task.name)
|
|
113
106
|
|
|
@@ -161,6 +154,15 @@ class ContextTask(pydantic.BaseModel):
|
|
|
161
154
|
interval: "TaskInterval"
|
|
162
155
|
retry_policy: Optional["TaskRetryPolicy"] = None
|
|
163
156
|
|
|
157
|
+
async def notify(self):
|
|
158
|
+
await bus.publish(
|
|
159
|
+
Topic.CONTEXT,
|
|
160
|
+
Event(
|
|
161
|
+
message=f"Context {self.name} updated",
|
|
162
|
+
metadata={"name": self.name},
|
|
163
|
+
),
|
|
164
|
+
)
|
|
165
|
+
|
|
164
166
|
async def refresh(self, output: dict[str, "ContextResult"]) -> None:
|
|
165
167
|
output[self.name] = await self.try_load()
|
|
166
168
|
|
sovereign/events.py
ADDED
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
import pydantic
|
|
2
|
+
from enum import IntEnum
|
|
3
|
+
from asyncio import Queue, gather
|
|
4
|
+
from collections import defaultdict
|
|
5
|
+
from typing import final, Sequence
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
Primitives = str | int | float | bool | Sequence[str]
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class Topic(IntEnum):
|
|
12
|
+
CONTEXT = 1
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class Event(pydantic.BaseModel):
|
|
16
|
+
message: str
|
|
17
|
+
metadata: dict[str, Primitives] = pydantic.Field(default_factory=dict)
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
@final
|
|
21
|
+
class EventBus:
|
|
22
|
+
def __init__(self, maxsize: int = 0):
|
|
23
|
+
self._topics: dict[Topic, list[Queue[Event]]] = defaultdict(list)
|
|
24
|
+
self._maxsize = maxsize
|
|
25
|
+
|
|
26
|
+
def subscribe(self, topic: Topic) -> Queue[Event]:
|
|
27
|
+
q: Queue[Event] = Queue(self._maxsize)
|
|
28
|
+
self._topics[topic].append(q)
|
|
29
|
+
return q
|
|
30
|
+
|
|
31
|
+
def unsubscribe(self, topic: Topic, q: Queue[Event]) -> None:
|
|
32
|
+
qs = self._topics.get(topic)
|
|
33
|
+
if not qs:
|
|
34
|
+
return
|
|
35
|
+
try:
|
|
36
|
+
qs.remove(q)
|
|
37
|
+
except ValueError:
|
|
38
|
+
pass
|
|
39
|
+
if not qs:
|
|
40
|
+
_ = self._topics.pop(topic, None)
|
|
41
|
+
|
|
42
|
+
async def publish(self, topic: Topic, msg: Event) -> None:
|
|
43
|
+
qs = self._topics.get(topic, [])
|
|
44
|
+
if not qs:
|
|
45
|
+
return
|
|
46
|
+
_ = await gather(*(q.put(msg) for q in qs))
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
bus = EventBus()
|
sovereign/middlewares.py
CHANGED
|
@@ -19,7 +19,7 @@ class RequestContextLogMiddleware(BaseHTTPMiddleware):
|
|
|
19
19
|
response = await call_next(request)
|
|
20
20
|
finally:
|
|
21
21
|
req_id = get_request_id()
|
|
22
|
-
response.headers
|
|
22
|
+
req_id = response.headers.setdefault("X-Request-Id", get_request_id())
|
|
23
23
|
logs.access_logger.queue_log_fields(REQUEST_ID=req_id)
|
|
24
24
|
_request_id_ctx_var.reset(token)
|
|
25
25
|
return response
|