nlbone 0.8.7__tar.gz → 0.9.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {nlbone-0.8.7 → nlbone-0.9.1}/PKG-INFO +1 -1
- {nlbone-0.8.7 → nlbone-0.9.1}/pyproject.toml +1 -1
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/adapters/auth/auth_service.py +18 -3
- nlbone-0.9.1/src/nlbone/adapters/cache/async_redis.py +240 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/adapters/cache/redis.py +81 -39
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/adapters/db/postgres/engine.py +30 -19
- nlbone-0.9.1/src/nlbone/adapters/db/redis/client.py +36 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/config/settings.py +18 -5
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/interfaces/api/dependencies/async_auth.py +6 -11
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/interfaces/api/dependencies/auth.py +8 -11
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/interfaces/api/exception_handlers.py +3 -3
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/interfaces/api/middleware/authentication.py +3 -4
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/utils/cache.py +25 -27
- nlbone-0.8.7/src/nlbone/adapters/cache/async_redis.py +0 -190
- nlbone-0.8.7/src/nlbone/adapters/db/redis/client.py +0 -19
- {nlbone-0.8.7 → nlbone-0.9.1}/.gitignore +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/LICENSE +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/README.md +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/__init__.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/adapters/__init__.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/adapters/auth/__init__.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/adapters/auth/keycloak.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/adapters/auth/token_provider.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/adapters/cache/__init__.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/adapters/cache/memory.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/adapters/cache/pubsub_listener.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/adapters/db/__init__.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/adapters/db/postgres/__init__.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/adapters/db/postgres/audit.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/adapters/db/postgres/base.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/adapters/db/postgres/query_builder.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/adapters/db/postgres/repository.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/adapters/db/postgres/schema.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/adapters/db/postgres/types.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/adapters/db/postgres/uow.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/adapters/db/redis/__init__.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/adapters/http_clients/__init__.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/adapters/http_clients/pricing/__init__.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/adapters/http_clients/pricing/pricing_service.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/adapters/http_clients/uploadchi/__init__.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/adapters/http_clients/uploadchi/uploadchi.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/adapters/http_clients/uploadchi/uploadchi_async.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/adapters/i18n/__init__.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/adapters/i18n/engine.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/adapters/i18n/loaders.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/adapters/i18n/locales/fa-IR.json +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/adapters/messaging/__init__.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/adapters/messaging/event_bus.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/adapters/messaging/rabbitmq.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/adapters/outbox/__init__.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/adapters/outbox/outbox_consumer.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/adapters/outbox/outbox_repo.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/adapters/percolation/__init__.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/adapters/percolation/connection.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/adapters/repositories/__init__.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/adapters/snowflake.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/adapters/ticketing/__init__.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/adapters/ticketing/client.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/config/__init__.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/config/logging.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/container.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/core/__init__.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/core/application/__init__.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/core/application/base_worker.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/core/application/bus.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/core/application/di.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/core/application/registry.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/core/application/services/__init__.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/core/application/use_case.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/core/domain/__init__.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/core/domain/base.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/core/domain/models.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/core/ports/__init__.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/core/ports/auth.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/core/ports/cache.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/core/ports/event_bus.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/core/ports/files.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/core/ports/outbox.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/core/ports/repository.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/core/ports/translation.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/core/ports/uow.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/interfaces/__init__.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/interfaces/api/__init__.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/interfaces/api/additional_filed/__init__.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/interfaces/api/additional_filed/assembler.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/interfaces/api/additional_filed/default_field_rules/__init__.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/interfaces/api/additional_filed/default_field_rules/image_field_rules.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/interfaces/api/additional_filed/field_registry.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/interfaces/api/additional_filed/resolver.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/interfaces/api/dependencies/__init__.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/interfaces/api/dependencies/client_credential.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/interfaces/api/dependencies/db.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/interfaces/api/dependencies/uow.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/interfaces/api/exceptions.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/interfaces/api/middleware/__init__.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/interfaces/api/middleware/access_log.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/interfaces/api/middleware/add_request_context.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/interfaces/api/pagination/__init__.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/interfaces/api/pagination/offset_base.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/interfaces/api/routers.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/interfaces/api/schema/__init__.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/interfaces/api/schema/adaptive_schema.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/interfaces/api/schema/base_response_model.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/interfaces/api/schemas.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/interfaces/cli/__init__.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/interfaces/cli/crypto.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/interfaces/cli/init_db.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/interfaces/cli/main.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/interfaces/cli/ticket.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/interfaces/jobs/__init__.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/interfaces/jobs/dispatch_outbox.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/interfaces/jobs/sync_tokens.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/types.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/utils/__init__.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/utils/cache_keys.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/utils/cache_registry.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/utils/context.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/utils/crypto.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/utils/flatten_dict.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/utils/flatten_sqlalchemy_result.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/utils/http.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/utils/normalize_mobile.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/utils/read_files.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/utils/redactor.py +0 -0
- {nlbone-0.8.7 → nlbone-0.9.1}/src/nlbone/utils/time.py +0 -0
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
import functools
|
|
2
|
+
|
|
1
3
|
import requests
|
|
2
4
|
|
|
3
5
|
from nlbone.config.settings import get_settings
|
|
@@ -15,7 +17,12 @@ class AuthService(BaseAuthService):
|
|
|
15
17
|
self._timeout = float(s.HTTP_TIMEOUT_SECONDS)
|
|
16
18
|
self._client = requests.session()
|
|
17
19
|
|
|
18
|
-
def has_access(self, token: str, permissions: list[str]) -> bool:
|
|
20
|
+
def has_access(self, token: str, permissions: list[str]) -> bool:
|
|
21
|
+
data = self.verify_token(token)
|
|
22
|
+
if not data:
|
|
23
|
+
return False
|
|
24
|
+
has_access = [self.client_id + "#" + perm in data.get("allowed_permissions", []) for perm in permissions]
|
|
25
|
+
return all(has_access)
|
|
19
26
|
|
|
20
27
|
@cached(ttl=15 * 60)
|
|
21
28
|
def verify_token(self, token: str) -> dict:
|
|
@@ -41,7 +48,9 @@ class AuthService(BaseAuthService):
|
|
|
41
48
|
return result.json()
|
|
42
49
|
return None
|
|
43
50
|
|
|
44
|
-
def is_client_token(self, token: str, allowed_clients: set[str] | None = None) -> bool:
|
|
51
|
+
def is_client_token(self, token: str, allowed_clients: set[str] | None = None) -> bool:
|
|
52
|
+
data = self.verify_token(token)
|
|
53
|
+
return data.get('preferred_username').startswith('service-account')
|
|
45
54
|
|
|
46
55
|
def client_has_access(self, token: str, permissions: list[str], allowed_clients: set[str] | None = None) -> bool:
|
|
47
56
|
data = self.verify_token(token)
|
|
@@ -50,4 +59,10 @@ class AuthService(BaseAuthService):
|
|
|
50
59
|
has_access = [self.client_id + "#" + perm in data.get("allowed_permissions", []) for perm in permissions]
|
|
51
60
|
return all(has_access)
|
|
52
61
|
|
|
53
|
-
def get_permissions(self, token: str) -> list[str]:
|
|
62
|
+
def get_permissions(self, token: str) -> list[str]:
|
|
63
|
+
data = self.verify_token(token)
|
|
64
|
+
return data.get('allowed_permissions', [])
|
|
65
|
+
|
|
66
|
+
@functools.lru_cache(maxsize=1)
|
|
67
|
+
def get_auth_service() -> AuthService:
|
|
68
|
+
return AuthService()
|
|
@@ -0,0 +1,240 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import inspect
|
|
4
|
+
import json
|
|
5
|
+
import os
|
|
6
|
+
from typing import Any, Awaitable, Callable, Iterable, Mapping, Optional, Sequence, Union
|
|
7
|
+
|
|
8
|
+
from redis.asyncio import ConnectionPool, Redis
|
|
9
|
+
from redis.asyncio.retry import Retry
|
|
10
|
+
from redis.backoff import ExponentialBackoff
|
|
11
|
+
from redis.exceptions import LockError, RedisError
|
|
12
|
+
|
|
13
|
+
from nlbone.config.settings import get_settings
|
|
14
|
+
from nlbone.core.ports.cache import AsyncCachePort
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def _nsver_key(ns: str) -> str:
|
|
18
|
+
return f"nsver:{ns}"
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def _tag_key(tag: str) -> str:
|
|
22
|
+
return f"tag:{tag}"
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class AsyncRedisCache(AsyncCachePort):
|
|
26
|
+
def __init__(self, url: str, *, invalidate_channel: Optional[str] = None):
|
|
27
|
+
self._pool = ConnectionPool.from_url(
|
|
28
|
+
url,
|
|
29
|
+
decode_responses=False,
|
|
30
|
+
max_connections=get_settings().REDIS_MAX_CONNECTIONS,
|
|
31
|
+
socket_timeout=get_settings().REDIS_TIMEOUT,
|
|
32
|
+
socket_connect_timeout=get_settings().REDIS_TIMEOUT,
|
|
33
|
+
health_check_interval=get_settings().REDIS_CHECK_INTERVAL,
|
|
34
|
+
retry_on_timeout=True,
|
|
35
|
+
retry=Retry(ExponentialBackoff(), 3),
|
|
36
|
+
)
|
|
37
|
+
self._r = Redis(connection_pool=self._pool)
|
|
38
|
+
self._ch = invalidate_channel or os.getenv("NLBONE_REDIS_INVALIDATE_CHANNEL", "cache:invalidate")
|
|
39
|
+
|
|
40
|
+
@property
|
|
41
|
+
def redis(self) -> Redis:
|
|
42
|
+
return self._r
|
|
43
|
+
|
|
44
|
+
async def close(self):
|
|
45
|
+
await self._r.close()
|
|
46
|
+
await self._pool.disconnect()
|
|
47
|
+
|
|
48
|
+
async def _current_ver(self, ns: str) -> int:
|
|
49
|
+
try:
|
|
50
|
+
v = await self._r.get(_nsver_key(ns))
|
|
51
|
+
return int(v) if v else 1
|
|
52
|
+
except (ValueError, TypeError):
|
|
53
|
+
return 1
|
|
54
|
+
|
|
55
|
+
async def _full_key(self, key: str) -> str:
|
|
56
|
+
try:
|
|
57
|
+
ns, rest = key.split(":", 1)
|
|
58
|
+
except ValueError:
|
|
59
|
+
ns, rest = "app", key
|
|
60
|
+
|
|
61
|
+
ver = await self._current_ver(ns)
|
|
62
|
+
return f"{ns}:{ver}:{rest}"
|
|
63
|
+
|
|
64
|
+
# -------- basic --------
|
|
65
|
+
async def get(self, key: str) -> Optional[bytes]:
|
|
66
|
+
fk = await self._full_key(key)
|
|
67
|
+
return await self._r.get(fk)
|
|
68
|
+
|
|
69
|
+
async def set(
|
|
70
|
+
self, key: str, value: bytes, *, ttl: Optional[int] = None, tags: Optional[Iterable[str]] = None
|
|
71
|
+
) -> None:
|
|
72
|
+
fk = await self._full_key(key)
|
|
73
|
+
|
|
74
|
+
async with self._r.pipeline() as pipe:
|
|
75
|
+
if ttl is None:
|
|
76
|
+
await pipe.set(fk, value)
|
|
77
|
+
else:
|
|
78
|
+
await pipe.setex(fk, ttl, value)
|
|
79
|
+
|
|
80
|
+
if tags:
|
|
81
|
+
for t in tags:
|
|
82
|
+
await pipe.sadd(_tag_key(t), fk)
|
|
83
|
+
|
|
84
|
+
await pipe.execute()
|
|
85
|
+
|
|
86
|
+
async def delete(self, key: str) -> None:
|
|
87
|
+
fk = await self._full_key(key)
|
|
88
|
+
await self._r.delete(fk)
|
|
89
|
+
|
|
90
|
+
async def exists(self, key: str) -> bool:
|
|
91
|
+
fk = await self._full_key(key)
|
|
92
|
+
return bool(await self._r.exists(fk))
|
|
93
|
+
|
|
94
|
+
async def ttl(self, key: str) -> Optional[int]:
|
|
95
|
+
fk = await self._full_key(key)
|
|
96
|
+
t = await self._r.ttl(fk)
|
|
97
|
+
return int(t) if t >= 0 else None
|
|
98
|
+
|
|
99
|
+
# -------- multi --------
|
|
100
|
+
|
|
101
|
+
async def mget(self, keys: Sequence[str]) -> list[Optional[bytes]]:
|
|
102
|
+
if not keys:
|
|
103
|
+
return []
|
|
104
|
+
# Alternatively, await asyncio.gather(*[self._full_key(k) for k in keys])
|
|
105
|
+
fks = [await self._full_key(k) for k in keys]
|
|
106
|
+
return await self._r.mget(fks)
|
|
107
|
+
|
|
108
|
+
async def mset(
|
|
109
|
+
self, items: Mapping[str, bytes], *, ttl: Optional[int] = None, tags: Optional[Iterable[str]] = None
|
|
110
|
+
) -> None:
|
|
111
|
+
if not items:
|
|
112
|
+
return
|
|
113
|
+
|
|
114
|
+
async with self._r.pipeline() as pipe:
|
|
115
|
+
for k, v in items.items():
|
|
116
|
+
fk = await self._full_key(k)
|
|
117
|
+
if ttl is None:
|
|
118
|
+
await pipe.set(fk, v)
|
|
119
|
+
else:
|
|
120
|
+
await pipe.setex(fk, ttl, v)
|
|
121
|
+
|
|
122
|
+
if tags:
|
|
123
|
+
for t in tags:
|
|
124
|
+
await pipe.sadd(_tag_key(t), fk)
|
|
125
|
+
|
|
126
|
+
await pipe.execute()
|
|
127
|
+
|
|
128
|
+
# -------- json --------
|
|
129
|
+
|
|
130
|
+
async def get_json(self, key: str) -> Optional[Any]:
|
|
131
|
+
b = await self.get(key)
|
|
132
|
+
if b is None:
|
|
133
|
+
return None
|
|
134
|
+
try:
|
|
135
|
+
return json.loads(b)
|
|
136
|
+
except json.JSONDecodeError:
|
|
137
|
+
return None
|
|
138
|
+
|
|
139
|
+
async def set_json(
|
|
140
|
+
self, key: str, value: Any, *, ttl: Optional[int] = None, tags: Optional[Iterable[str]] = None
|
|
141
|
+
) -> None:
|
|
142
|
+
payload = json.dumps(value).encode("utf-8")
|
|
143
|
+
await self.set(key, payload, ttl=ttl, tags=tags)
|
|
144
|
+
|
|
145
|
+
# -------- invalidation --------
|
|
146
|
+
|
|
147
|
+
async def invalidate_tags(self, tags: Iterable[str]) -> int:
|
|
148
|
+
removed = 0
|
|
149
|
+
async with self._r.pipeline() as pipe:
|
|
150
|
+
for t in tags:
|
|
151
|
+
tk = _tag_key(t)
|
|
152
|
+
members = await self._r.smembers(tk)
|
|
153
|
+
if members:
|
|
154
|
+
await pipe.delete(*members)
|
|
155
|
+
await pipe.delete(tk)
|
|
156
|
+
removed += len(members or [])
|
|
157
|
+
await pipe.execute()
|
|
158
|
+
|
|
159
|
+
try:
|
|
160
|
+
payload = json.dumps({"tags": list(tags)}).encode("utf-8")
|
|
161
|
+
await self._r.publish(self._ch, payload)
|
|
162
|
+
except RedisError:
|
|
163
|
+
pass
|
|
164
|
+
|
|
165
|
+
return removed
|
|
166
|
+
|
|
167
|
+
async def bump_namespace(self, namespace: str) -> int:
|
|
168
|
+
v = await self._r.incr(_nsver_key(namespace))
|
|
169
|
+
try:
|
|
170
|
+
await self._r.publish(self._ch, json.dumps({"ns_bump": namespace}).encode("utf-8"))
|
|
171
|
+
except RedisError:
|
|
172
|
+
pass
|
|
173
|
+
return int(v)
|
|
174
|
+
|
|
175
|
+
async def clear_namespace(self, namespace: str) -> int:
|
|
176
|
+
cnt = 0
|
|
177
|
+
cursor = 0
|
|
178
|
+
pattern = f"{namespace}:*"
|
|
179
|
+
|
|
180
|
+
while True:
|
|
181
|
+
cursor, keys = await self._r.scan(cursor=cursor, match=pattern, count=1000)
|
|
182
|
+
if keys:
|
|
183
|
+
await self._r.delete(*keys)
|
|
184
|
+
cnt += len(keys)
|
|
185
|
+
if cursor == 0:
|
|
186
|
+
break
|
|
187
|
+
|
|
188
|
+
try:
|
|
189
|
+
await self._r.publish(self._ch, json.dumps({"ns_clear": namespace}).encode("utf-8"))
|
|
190
|
+
except RedisError:
|
|
191
|
+
pass
|
|
192
|
+
|
|
193
|
+
return cnt
|
|
194
|
+
|
|
195
|
+
# -------- dogpile-safe get_or_set --------
|
|
196
|
+
|
|
197
|
+
async def get_or_set(
|
|
198
|
+
self,
|
|
199
|
+
key: str,
|
|
200
|
+
producer: Callable[[], Union[bytes, str, Awaitable[Union[bytes, str]]]],
|
|
201
|
+
*,
|
|
202
|
+
ttl: int,
|
|
203
|
+
tags: Optional[Iterable[str]] = None,
|
|
204
|
+
) -> bytes:
|
|
205
|
+
fk = await self._full_key(key)
|
|
206
|
+
|
|
207
|
+
val = await self._r.get(fk)
|
|
208
|
+
if val is not None:
|
|
209
|
+
return val
|
|
210
|
+
|
|
211
|
+
lock_name = f"lock:{fk}"
|
|
212
|
+
|
|
213
|
+
try:
|
|
214
|
+
async with self._r.lock(lock_name, timeout=10, blocking_timeout=5):
|
|
215
|
+
val = await self._r.get(fk)
|
|
216
|
+
if val is not None:
|
|
217
|
+
return val
|
|
218
|
+
|
|
219
|
+
if inspect.iscoroutinefunction(producer):
|
|
220
|
+
produced = await producer()
|
|
221
|
+
else:
|
|
222
|
+
produced = producer()
|
|
223
|
+
|
|
224
|
+
if isinstance(produced, str):
|
|
225
|
+
produced = produced.encode("utf-8")
|
|
226
|
+
|
|
227
|
+
await self.set(key, produced, ttl=ttl, tags=tags)
|
|
228
|
+
return produced
|
|
229
|
+
|
|
230
|
+
except LockError:
|
|
231
|
+
if inspect.iscoroutinefunction(producer):
|
|
232
|
+
produced = await producer()
|
|
233
|
+
else:
|
|
234
|
+
produced = producer()
|
|
235
|
+
|
|
236
|
+
if isinstance(produced, str):
|
|
237
|
+
produced = produced.encode("utf-8")
|
|
238
|
+
|
|
239
|
+
await self.set(key, produced, ttl=ttl, tags=tags)
|
|
240
|
+
return produced
|
|
@@ -2,11 +2,15 @@ from __future__ import annotations
|
|
|
2
2
|
|
|
3
3
|
import json
|
|
4
4
|
import os
|
|
5
|
-
import time
|
|
6
5
|
from typing import Any, Iterable, Mapping, Optional, Sequence
|
|
7
6
|
|
|
8
|
-
import redis
|
|
7
|
+
import redis
|
|
8
|
+
from redis import RedisError
|
|
9
|
+
from redis.backoff import ExponentialBackoff
|
|
10
|
+
from redis.exceptions import LockError
|
|
11
|
+
from redis.retry import Retry
|
|
9
12
|
|
|
13
|
+
from nlbone.config.settings import get_settings
|
|
10
14
|
from nlbone.core.ports.cache import CachePort
|
|
11
15
|
|
|
12
16
|
|
|
@@ -20,17 +24,32 @@ def _tag_key(tag: str) -> str:
|
|
|
20
24
|
|
|
21
25
|
class RedisCache(CachePort):
|
|
22
26
|
def __init__(self, url: str):
|
|
23
|
-
self.
|
|
27
|
+
self._pool = redis.ConnectionPool.from_url(
|
|
28
|
+
url,
|
|
29
|
+
decode_responses=False,
|
|
30
|
+
max_connections=get_settings().REDIS_MAX_CONNECTIONS,
|
|
31
|
+
socket_timeout=get_settings().REDIS_TIMEOUT,
|
|
32
|
+
socket_connect_timeout=get_settings().REDIS_TIMEOUT,
|
|
33
|
+
health_check_interval=get_settings().REDIS_CHECK_INTERVAL,
|
|
34
|
+
retry_on_timeout=True,
|
|
35
|
+
retry=Retry(ExponentialBackoff(), 3),
|
|
36
|
+
)
|
|
37
|
+
|
|
38
|
+
self.r = redis.Redis(connection_pool=self._pool)
|
|
24
39
|
|
|
25
40
|
def _current_ver(self, ns: str) -> int:
|
|
26
|
-
|
|
27
|
-
|
|
41
|
+
try:
|
|
42
|
+
v = self.r.get(_nsver_key(ns))
|
|
43
|
+
return int(v) if v else 1
|
|
44
|
+
except (ValueError, TypeError):
|
|
45
|
+
return 1
|
|
28
46
|
|
|
29
47
|
def _full_key(self, key: str) -> str:
|
|
30
48
|
try:
|
|
31
49
|
ns, rest = key.split(":", 1)
|
|
32
50
|
except ValueError:
|
|
33
51
|
ns, rest = "app", key
|
|
52
|
+
|
|
34
53
|
ver = self._current_ver(ns)
|
|
35
54
|
return f"{ns}:{ver}:{rest}"
|
|
36
55
|
|
|
@@ -40,53 +59,67 @@ class RedisCache(CachePort):
|
|
|
40
59
|
|
|
41
60
|
def set(self, key: str, value: bytes, *, ttl: Optional[int] = None, tags: Optional[Iterable[str]] = None) -> None:
|
|
42
61
|
fk = self._full_key(key)
|
|
62
|
+
|
|
63
|
+
pipe = self.r.pipeline()
|
|
64
|
+
|
|
43
65
|
if ttl is None:
|
|
44
|
-
|
|
66
|
+
pipe.set(fk, value)
|
|
45
67
|
else:
|
|
46
|
-
|
|
68
|
+
pipe.setex(fk, ttl, value)
|
|
69
|
+
|
|
47
70
|
if tags:
|
|
48
|
-
pipe = self.r.pipeline()
|
|
49
71
|
for t in tags:
|
|
50
72
|
pipe.sadd(_tag_key(t), fk)
|
|
51
|
-
|
|
73
|
+
|
|
74
|
+
pipe.execute()
|
|
52
75
|
|
|
53
76
|
def delete(self, key: str) -> None:
|
|
54
77
|
fk = self._full_key(key)
|
|
55
78
|
self.r.delete(fk)
|
|
56
79
|
|
|
57
80
|
def exists(self, key: str) -> bool:
|
|
58
|
-
return bool(self.
|
|
81
|
+
return bool(self.r.exists(self._full_key(key)))
|
|
59
82
|
|
|
60
83
|
def ttl(self, key: str) -> Optional[int]:
|
|
61
84
|
fk = self._full_key(key)
|
|
62
85
|
t = self.r.ttl(fk)
|
|
63
|
-
return
|
|
86
|
+
return int(t) if t >= 0 else None
|
|
64
87
|
|
|
65
88
|
def mget(self, keys: Sequence[str]) -> list[Optional[bytes]]:
|
|
89
|
+
if not keys:
|
|
90
|
+
return []
|
|
66
91
|
fks = [self._full_key(k) for k in keys]
|
|
67
92
|
return self.r.mget(fks)
|
|
68
93
|
|
|
69
94
|
def mset(
|
|
70
95
|
self, items: Mapping[str, bytes], *, ttl: Optional[int] = None, tags: Optional[Iterable[str]] = None
|
|
71
96
|
) -> None:
|
|
97
|
+
if not items:
|
|
98
|
+
return
|
|
99
|
+
|
|
72
100
|
pipe = self.r.pipeline()
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
101
|
+
|
|
102
|
+
for k, v in items.items():
|
|
103
|
+
fk = self._full_key(k)
|
|
104
|
+
if ttl is None:
|
|
105
|
+
pipe.set(fk, v)
|
|
106
|
+
else:
|
|
107
|
+
pipe.setex(fk, ttl, v)
|
|
108
|
+
|
|
109
|
+
if tags:
|
|
110
|
+
for t in tags:
|
|
111
|
+
pipe.sadd(_tag_key(t), fk)
|
|
112
|
+
|
|
79
113
|
pipe.execute()
|
|
80
|
-
if tags:
|
|
81
|
-
pipe = self.r.pipeline()
|
|
82
|
-
for t in tags:
|
|
83
|
-
for k in items.keys():
|
|
84
|
-
pipe.sadd(_tag_key(t), self._full_key(k))
|
|
85
|
-
pipe.execute()
|
|
86
114
|
|
|
87
115
|
def get_json(self, key: str) -> Optional[Any]:
|
|
88
116
|
b = self.get(key)
|
|
89
|
-
|
|
117
|
+
if b is None:
|
|
118
|
+
return None
|
|
119
|
+
try:
|
|
120
|
+
return json.loads(b)
|
|
121
|
+
except json.JSONDecodeError:
|
|
122
|
+
return None
|
|
90
123
|
|
|
91
124
|
def set_json(
|
|
92
125
|
self, key: str, value: Any, *, ttl: Optional[int] = None, tags: Optional[Iterable[str]] = None
|
|
@@ -96,6 +129,7 @@ class RedisCache(CachePort):
|
|
|
96
129
|
def invalidate_tags(self, tags: Iterable[str]) -> int:
|
|
97
130
|
removed = 0
|
|
98
131
|
pipe = self.r.pipeline()
|
|
132
|
+
|
|
99
133
|
for t in tags:
|
|
100
134
|
tk = _tag_key(t)
|
|
101
135
|
keys = self.r.smembers(tk)
|
|
@@ -103,12 +137,15 @@ class RedisCache(CachePort):
|
|
|
103
137
|
pipe.delete(*keys)
|
|
104
138
|
pipe.delete(tk)
|
|
105
139
|
removed += len(keys or [])
|
|
140
|
+
|
|
106
141
|
pipe.execute()
|
|
142
|
+
|
|
107
143
|
try:
|
|
108
144
|
ch = os.getenv("NLBONE_REDIS_INVALIDATE_CHANNEL", "cache:invalidate")
|
|
109
145
|
self.r.publish(ch, json.dumps({"tags": list(tags)}).encode("utf-8"))
|
|
110
|
-
except
|
|
146
|
+
except RedisError:
|
|
111
147
|
pass
|
|
148
|
+
|
|
112
149
|
return removed
|
|
113
150
|
|
|
114
151
|
def bump_namespace(self, namespace: str) -> int:
|
|
@@ -119,6 +156,7 @@ class RedisCache(CachePort):
|
|
|
119
156
|
cnt = 0
|
|
120
157
|
cursor = 0
|
|
121
158
|
pattern = f"{namespace}:*"
|
|
159
|
+
|
|
122
160
|
while True:
|
|
123
161
|
cursor, keys = self.r.scan(cursor=cursor, match=pattern, count=1000)
|
|
124
162
|
if keys:
|
|
@@ -128,24 +166,28 @@ class RedisCache(CachePort):
|
|
|
128
166
|
break
|
|
129
167
|
return cnt
|
|
130
168
|
|
|
131
|
-
def get_or_set(self, key: str, producer, *, ttl: int, tags=None) -> bytes:
|
|
169
|
+
def get_or_set(self, key: str, producer, *, ttl: int, tags: Optional[Iterable[str]] = None) -> bytes:
|
|
132
170
|
fk = self._full_key(key)
|
|
171
|
+
|
|
133
172
|
val = self.r.get(fk)
|
|
134
173
|
if val is not None:
|
|
135
174
|
return val
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
175
|
+
|
|
176
|
+
lock_name = f"lock:{fk}"
|
|
177
|
+
try:
|
|
178
|
+
with self.r.lock(lock_name, timeout=10, blocking_timeout=5):
|
|
179
|
+
val = self.r.get(fk)
|
|
180
|
+
if val is not None:
|
|
181
|
+
return val
|
|
182
|
+
|
|
140
183
|
produced: bytes = producer()
|
|
141
184
|
self.set(key, produced, ttl=ttl, tags=tags)
|
|
142
185
|
return produced
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
return produced
|
|
186
|
+
|
|
187
|
+
except LockError:
|
|
188
|
+
try:
|
|
189
|
+
produced = producer()
|
|
190
|
+
self.set(key, produced, ttl=ttl, tags=tags)
|
|
191
|
+
return produced
|
|
192
|
+
except Exception:
|
|
193
|
+
raise
|
|
@@ -15,12 +15,16 @@ from nlbone.config.settings import get_settings
|
|
|
15
15
|
|
|
16
16
|
_settings = get_settings()
|
|
17
17
|
|
|
18
|
-
|
|
18
|
+
_dsn = _settings.POSTGRES_DB_DSN
|
|
19
19
|
|
|
20
|
-
if "+asyncpg" in
|
|
21
|
-
|
|
20
|
+
if "+asyncpg" in _dsn:
|
|
21
|
+
ASYNC_DSN = _dsn.replace("+asyncpg", "+psycopg")
|
|
22
|
+
elif "+psycopg" not in _dsn:
|
|
23
|
+
ASYNC_DSN = _dsn.replace("postgresql://", "postgresql+psycopg://")
|
|
22
24
|
else:
|
|
23
|
-
|
|
25
|
+
ASYNC_DSN = _dsn
|
|
26
|
+
|
|
27
|
+
SYNC_DSN = ASYNC_DSN
|
|
24
28
|
|
|
25
29
|
_async_engine: Optional[AsyncEngine] = None
|
|
26
30
|
_async_session_factory: Optional[async_sessionmaker[AsyncSession]] = None
|
|
@@ -38,14 +42,19 @@ def init_async_engine(echo: Optional[bool] = None) -> AsyncEngine:
|
|
|
38
42
|
ASYNC_DSN,
|
|
39
43
|
echo=_settings.DEBUG if echo is None else echo,
|
|
40
44
|
pool_pre_ping=True,
|
|
41
|
-
pool_size=
|
|
42
|
-
max_overflow=
|
|
45
|
+
pool_size=_settings.POSTGRES_POOL_SIZE,
|
|
46
|
+
max_overflow=_settings.POSTGRES_MAX_OVERFLOW,
|
|
47
|
+
pool_recycle=_settings.POSTGRES_POOL_RECYCLE,
|
|
48
|
+
pool_timeout=_settings.POSTGRES_POOL_TIMEOUT,
|
|
43
49
|
)
|
|
50
|
+
|
|
44
51
|
_async_session_factory = async_sessionmaker(
|
|
45
52
|
bind=_async_engine,
|
|
46
53
|
expire_on_commit=False,
|
|
47
54
|
autoflush=False,
|
|
55
|
+
class_=AsyncSession,
|
|
48
56
|
)
|
|
57
|
+
|
|
49
58
|
return _async_engine
|
|
50
59
|
|
|
51
60
|
|
|
@@ -54,6 +63,7 @@ async def async_session() -> AsyncGenerator[AsyncSession, Any]:
|
|
|
54
63
|
if _async_session_factory is None:
|
|
55
64
|
init_async_engine()
|
|
56
65
|
assert _async_session_factory is not None
|
|
66
|
+
|
|
57
67
|
session = _async_session_factory()
|
|
58
68
|
try:
|
|
59
69
|
yield session
|
|
@@ -64,12 +74,6 @@ async def async_session() -> AsyncGenerator[AsyncSession, Any]:
|
|
|
64
74
|
await session.close()
|
|
65
75
|
|
|
66
76
|
|
|
67
|
-
async def async_ping() -> None:
|
|
68
|
-
eng = init_async_engine()
|
|
69
|
-
async with eng.connect() as conn:
|
|
70
|
-
await conn.execute(text("SELECT 1"))
|
|
71
|
-
|
|
72
|
-
|
|
73
77
|
def init_sync_engine(echo: Optional[bool] = None) -> Engine:
|
|
74
78
|
global _sync_engine, _sync_session_factory
|
|
75
79
|
if _sync_engine is not None:
|
|
@@ -80,11 +84,12 @@ def init_sync_engine(echo: Optional[bool] = None) -> Engine:
|
|
|
80
84
|
echo=_settings.DEBUG if echo is None else echo,
|
|
81
85
|
pool_pre_ping=True,
|
|
82
86
|
pool_size=_settings.POSTGRES_POOL_SIZE,
|
|
83
|
-
max_overflow=_settings.
|
|
84
|
-
|
|
85
|
-
|
|
87
|
+
max_overflow=_settings.POSTGRES_MAX_OVERFLOW,
|
|
88
|
+
pool_recycle=_settings.POSTGRES_POOL_RECYCLE,
|
|
89
|
+
pool_timeout=_settings.POSTGRES_POOL_TIMEOUT,
|
|
86
90
|
future=True,
|
|
87
91
|
)
|
|
92
|
+
|
|
88
93
|
_sync_session_factory = sessionmaker(
|
|
89
94
|
bind=_sync_engine,
|
|
90
95
|
autocommit=False,
|
|
@@ -110,11 +115,17 @@ def sync_session() -> Generator[Session, None, None]:
|
|
|
110
115
|
s.close()
|
|
111
116
|
|
|
112
117
|
|
|
118
|
+
# --- Health Checks & Getters ---
|
|
119
|
+
|
|
120
|
+
|
|
121
|
+
async def async_ping() -> None:
|
|
122
|
+
async with async_session() as session:
|
|
123
|
+
await session.execute(text("SELECT 1"))
|
|
124
|
+
|
|
125
|
+
|
|
113
126
|
def sync_ping() -> None:
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
with eng.connect() as conn:
|
|
117
|
-
conn.execute(text("SELECT 1"))
|
|
127
|
+
with sync_session() as session:
|
|
128
|
+
session.execute(text("SELECT 1"))
|
|
118
129
|
|
|
119
130
|
|
|
120
131
|
def get_async_session_factory() -> async_sessionmaker[AsyncSession]:
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
from redis import Redis
|
|
2
|
+
from redis.asyncio import Redis as AsyncRedis
|
|
3
|
+
|
|
4
|
+
from nlbone.config.settings import get_settings
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class RedisClient:
|
|
8
|
+
_client: Redis | None = None
|
|
9
|
+
|
|
10
|
+
@classmethod
|
|
11
|
+
def get_client(cls) -> Redis:
|
|
12
|
+
if cls._client is None:
|
|
13
|
+
cls._client = Redis.from_url(get_settings().REDIS_URL, decode_responses=True)
|
|
14
|
+
return cls._client
|
|
15
|
+
|
|
16
|
+
@classmethod
|
|
17
|
+
def close(cls):
|
|
18
|
+
if cls._client is not None:
|
|
19
|
+
cls._client.close()
|
|
20
|
+
cls._client = None
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class AsyncRedisClient:
|
|
24
|
+
_client: AsyncRedis | None = None
|
|
25
|
+
|
|
26
|
+
@classmethod
|
|
27
|
+
def get_client(cls) -> Redis:
|
|
28
|
+
if cls._client is None:
|
|
29
|
+
cls._client = AsyncRedis.from_url(get_settings().REDIS_URL, decode_responses=True)
|
|
30
|
+
return cls._client
|
|
31
|
+
|
|
32
|
+
@classmethod
|
|
33
|
+
async def close(cls):
|
|
34
|
+
if cls._client is not None:
|
|
35
|
+
await cls._client.close()
|
|
36
|
+
cls._client = None
|
|
@@ -70,24 +70,37 @@ class Settings(BaseSettings):
|
|
|
70
70
|
# Database
|
|
71
71
|
# ---------------------------
|
|
72
72
|
POSTGRES_DB_DSN: str = Field(default="postgresql+asyncpg://user:pass@localhost:5432/nlbone")
|
|
73
|
+
POSTGRES_DB_ECHO: bool = Field(default=False)
|
|
73
74
|
POSTGRES_POOL_SIZE: int = Field(default=5)
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
DB_MAX_OVERFLOW: int = Field(default=10)
|
|
75
|
+
POSTGRES_MAX_OVERFLOW: int = Field(default=10)
|
|
76
|
+
POSTGRES_POOL_TIMEOUT: int = Field(default=30)
|
|
77
|
+
POSTGRES_POOL_RECYCLE: int = Field(default=1800)
|
|
78
78
|
|
|
79
79
|
# ---------------------------
|
|
80
80
|
# Messaging / Cache
|
|
81
81
|
# ---------------------------
|
|
82
82
|
REDIS_URL: str = Field(default="redis://localhost:6379/0")
|
|
83
|
+
REDIS_MAX_CONNECTIONS: int = Field(default=5)
|
|
84
|
+
REDIS_CHECK_INTERVAL: int = Field(default=30)
|
|
85
|
+
REDIS_TIMEOUT: float = Field(default=3.0)
|
|
86
|
+
|
|
83
87
|
CACHE_BACKEND: Literal["memory", "redis"] = Field(default="memory")
|
|
84
88
|
CACHE_DEFAULT_TTL_S: int = Field(default=300)
|
|
85
89
|
|
|
86
|
-
#
|
|
90
|
+
# ---------------------------
|
|
91
|
+
# Event bus / Outbox
|
|
92
|
+
# ---------------------------
|
|
87
93
|
EVENT_BUS_BACKEND: Literal["inmemory"] = Field(default="inmemory")
|
|
88
94
|
OUTBOX_ENABLED: bool = Field(default=False)
|
|
89
95
|
OUTBOX_POLL_INTERVAL_MS: int = Field(default=500)
|
|
90
96
|
|
|
97
|
+
# ---------------------------
|
|
98
|
+
# APM
|
|
99
|
+
# ---------------------------
|
|
100
|
+
APM_SERVER_URL: str = "https://apm.numberland.dev"
|
|
101
|
+
APM_SECRET_TOKEN: str = ""
|
|
102
|
+
APM_SAMPLE_RATE: float = Field(default=0.5)
|
|
103
|
+
|
|
91
104
|
# ---------------------------
|
|
92
105
|
# UPLOADCHI
|
|
93
106
|
# ---------------------------
|