nlbone 0.4.3__py3-none-any.whl → 0.6.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- nlbone/adapters/cache/async_redis.py +180 -0
- nlbone/adapters/cache/memory.py +104 -0
- nlbone/adapters/cache/pubsub_listener.py +42 -0
- nlbone/adapters/cache/redis.py +136 -0
- nlbone/adapters/http_clients/__init__.py +2 -0
- nlbone/adapters/http_clients/pricing/__init__.py +1 -0
- nlbone/adapters/http_clients/pricing/pricing_service.py +96 -0
- nlbone/adapters/http_clients/uploadchi/__init__.py +2 -0
- nlbone/adapters/http_clients/{uploadchi.py → uploadchi/uploadchi.py} +10 -34
- nlbone/adapters/http_clients/{uploadchi_async.py → uploadchi/uploadchi_async.py} +11 -10
- nlbone/config/settings.py +22 -12
- nlbone/container.py +19 -1
- nlbone/core/ports/cache.py +37 -0
- nlbone/utils/cache.py +196 -0
- nlbone/utils/cache_keys.py +30 -0
- nlbone/utils/cache_registry.py +23 -0
- nlbone/utils/http.py +29 -0
- {nlbone-0.4.3.dist-info → nlbone-0.6.0.dist-info}/METADATA +2 -1
- {nlbone-0.4.3.dist-info → nlbone-0.6.0.dist-info}/RECORD +23 -11
- /nlbone/adapters/{http_clients/email_gateway.py → cache/__init__.py} +0 -0
- {nlbone-0.4.3.dist-info → nlbone-0.6.0.dist-info}/WHEEL +0 -0
- {nlbone-0.4.3.dist-info → nlbone-0.6.0.dist-info}/entry_points.txt +0 -0
- {nlbone-0.4.3.dist-info → nlbone-0.6.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,180 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import json
|
|
3
|
+
import os
|
|
4
|
+
from typing import Optional, Iterable, Any, Mapping, Sequence, List
|
|
5
|
+
|
|
6
|
+
from redis.asyncio import Redis
|
|
7
|
+
from nlbone.core.ports.cache import AsyncCachePort
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def _nsver_key(ns: str) -> str: return f"nsver:{ns}"
|
|
11
|
+
def _tag_key(tag: str) -> str: return f"tag:{tag}"
|
|
12
|
+
|
|
13
|
+
class AsyncRedisCache(AsyncCachePort):
|
|
14
|
+
def __init__(self, url: str, *, invalidate_channel: str | None = None):
|
|
15
|
+
self._r = Redis.from_url(url, decode_responses=False)
|
|
16
|
+
self._ch = invalidate_channel or os.getenv("NLBONE_REDIS_INVALIDATE_CHANNEL", "cache:invalidate")
|
|
17
|
+
|
|
18
|
+
@property
|
|
19
|
+
def redis(self) -> Redis:
|
|
20
|
+
return self._r
|
|
21
|
+
|
|
22
|
+
async def _current_ver(self, ns: str) -> int:
|
|
23
|
+
v = await self._r.get(_nsver_key(ns))
|
|
24
|
+
return int(v) if v else 1
|
|
25
|
+
|
|
26
|
+
async def _full_key(self, key: str) -> str:
|
|
27
|
+
try:
|
|
28
|
+
ns, rest = key.split(":", 1)
|
|
29
|
+
except ValueError:
|
|
30
|
+
ns, rest = "app", key
|
|
31
|
+
ver = await self._current_ver(ns)
|
|
32
|
+
return f"{ns}:{ver}:{rest}"
|
|
33
|
+
|
|
34
|
+
# -------- basic --------
|
|
35
|
+
async def get(self, key: str) -> Optional[bytes]:
|
|
36
|
+
fk = await self._full_key(key)
|
|
37
|
+
return await self._r.get(fk)
|
|
38
|
+
|
|
39
|
+
async def set(self, key: str, value: bytes, *, ttl: Optional[int] = None, tags: Optional[Iterable[str]] = None) -> None:
|
|
40
|
+
fk = await self._full_key(key)
|
|
41
|
+
if ttl is None:
|
|
42
|
+
await self._r.set(fk, value)
|
|
43
|
+
else:
|
|
44
|
+
await self._r.setex(fk, ttl, value)
|
|
45
|
+
if tags:
|
|
46
|
+
pipe = self._r.pipeline()
|
|
47
|
+
for t in tags:
|
|
48
|
+
pipe.sadd(_tag_key(t), fk)
|
|
49
|
+
await pipe.execute()
|
|
50
|
+
|
|
51
|
+
async def delete(self, key: str) -> None:
|
|
52
|
+
fk = await self._full_key(key)
|
|
53
|
+
await self._r.delete(fk)
|
|
54
|
+
|
|
55
|
+
async def exists(self, key: str) -> bool:
|
|
56
|
+
return (await self.get(key)) is not None
|
|
57
|
+
|
|
58
|
+
async def ttl(self, key: str) -> Optional[int]:
|
|
59
|
+
fk = await self._full_key(key)
|
|
60
|
+
t = await self._r.ttl(fk)
|
|
61
|
+
return None if t < 0 else int(t)
|
|
62
|
+
|
|
63
|
+
# -------- multi --------
|
|
64
|
+
|
|
65
|
+
async def mget(self, keys: Sequence[str]) -> list[Optional[bytes]]:
|
|
66
|
+
fks = [await self._full_key(k) for k in keys]
|
|
67
|
+
return await self._r.mget(fks)
|
|
68
|
+
|
|
69
|
+
async def mset(self, items: Mapping[str, bytes], *, ttl: Optional[int] = None,
|
|
70
|
+
tags: Optional[Iterable[str]] = None) -> None:
|
|
71
|
+
pipe = self._r.pipeline()
|
|
72
|
+
if ttl is None:
|
|
73
|
+
for k, v in items.items():
|
|
74
|
+
fk = await self._full_key(k)
|
|
75
|
+
pipe.set(fk, v)
|
|
76
|
+
else:
|
|
77
|
+
for k, v in items.items():
|
|
78
|
+
fk = await self._full_key(k)
|
|
79
|
+
pipe.setex(fk, ttl, v)
|
|
80
|
+
await pipe.execute()
|
|
81
|
+
|
|
82
|
+
if tags:
|
|
83
|
+
pipe = self._r.pipeline()
|
|
84
|
+
for t in tags:
|
|
85
|
+
for k in items.keys():
|
|
86
|
+
fk = await self._full_key(k)
|
|
87
|
+
pipe.sadd(_tag_key(t), fk)
|
|
88
|
+
await pipe.execute()
|
|
89
|
+
|
|
90
|
+
# -------- json --------
|
|
91
|
+
|
|
92
|
+
async def get_json(self, key: str) -> Optional[Any]:
|
|
93
|
+
b = await self.get(key)
|
|
94
|
+
return None if b is None else json.loads(b)
|
|
95
|
+
|
|
96
|
+
async def set_json(self, key: str, value: Any, *, ttl: Optional[int] = None,
|
|
97
|
+
tags: Optional[Iterable[str]] = None) -> None:
|
|
98
|
+
await self.set(key, json.dumps(value).encode("utf-8"), ttl=ttl, tags=tags)
|
|
99
|
+
|
|
100
|
+
# -------- invalidation --------
|
|
101
|
+
|
|
102
|
+
async def invalidate_tags(self, tags: Iterable[str]) -> int:
|
|
103
|
+
removed = 0
|
|
104
|
+
pipe = self._r.pipeline()
|
|
105
|
+
key_sets: list[tuple[str, set[bytes]]] = []
|
|
106
|
+
for t in tags:
|
|
107
|
+
tk = _tag_key(t)
|
|
108
|
+
members = await self._r.smembers(tk)
|
|
109
|
+
if members:
|
|
110
|
+
pipe.delete(*members)
|
|
111
|
+
pipe.delete(tk)
|
|
112
|
+
key_sets.append((tk, members))
|
|
113
|
+
removed += len(members or [])
|
|
114
|
+
await pipe.execute()
|
|
115
|
+
|
|
116
|
+
# publish notification for other processes
|
|
117
|
+
try:
|
|
118
|
+
payload = json.dumps({"tags": list(tags)}).encode("utf-8")
|
|
119
|
+
await self._r.publish(self._ch, payload)
|
|
120
|
+
except Exception:
|
|
121
|
+
pass
|
|
122
|
+
|
|
123
|
+
return removed
|
|
124
|
+
|
|
125
|
+
async def bump_namespace(self, namespace: str) -> int:
|
|
126
|
+
v = await self._r.incr(_nsver_key(namespace))
|
|
127
|
+
# اطلاعرسانی اختیاری
|
|
128
|
+
try:
|
|
129
|
+
await self._r.publish(self._ch, json.dumps({"ns_bump": namespace}).encode("utf-8"))
|
|
130
|
+
except Exception:
|
|
131
|
+
pass
|
|
132
|
+
return int(v)
|
|
133
|
+
|
|
134
|
+
async def clear_namespace(self, namespace: str) -> int:
|
|
135
|
+
cnt = 0
|
|
136
|
+
cursor = 0
|
|
137
|
+
pattern = f"{namespace}:*"
|
|
138
|
+
while True:
|
|
139
|
+
cursor, keys = await self._r.scan(cursor=cursor, match=pattern, count=1000)
|
|
140
|
+
if keys:
|
|
141
|
+
await self._r.delete(*keys)
|
|
142
|
+
cnt += len(keys)
|
|
143
|
+
if cursor == 0:
|
|
144
|
+
break
|
|
145
|
+
try:
|
|
146
|
+
await self._r.publish(self._ch, json.dumps({"ns_clear": namespace}).encode("utf-8"))
|
|
147
|
+
except Exception:
|
|
148
|
+
pass
|
|
149
|
+
return cnt
|
|
150
|
+
|
|
151
|
+
# -------- dogpile-safe get_or_set --------
|
|
152
|
+
|
|
153
|
+
async def get_or_set(self, key: str, producer, *, ttl: int, tags=None) -> bytes:
|
|
154
|
+
fk = await self._full_key(key)
|
|
155
|
+
val = await self._r.get(fk)
|
|
156
|
+
if val is not None:
|
|
157
|
+
return val
|
|
158
|
+
|
|
159
|
+
lock_key = f"lock:{fk}"
|
|
160
|
+
got = await self._r.set(lock_key, b"1", ex=10, nx=True)
|
|
161
|
+
if got:
|
|
162
|
+
try:
|
|
163
|
+
produced = await producer() if asyncio.iscoroutinefunction(producer) else producer()
|
|
164
|
+
if isinstance(produced, str):
|
|
165
|
+
produced = produced.encode("utf-8")
|
|
166
|
+
await self.set(key, produced, ttl=ttl, tags=tags)
|
|
167
|
+
return produced
|
|
168
|
+
finally:
|
|
169
|
+
await self._r.delete(lock_key)
|
|
170
|
+
|
|
171
|
+
await asyncio.sleep(0.05)
|
|
172
|
+
val2 = await self._r.get(fk)
|
|
173
|
+
if val2 is not None:
|
|
174
|
+
return val2
|
|
175
|
+
# fallback
|
|
176
|
+
produced = await producer() if asyncio.iscoroutinefunction(producer) else producer()
|
|
177
|
+
if isinstance(produced, str):
|
|
178
|
+
produced = produced.encode("utf-8")
|
|
179
|
+
await self.set(key, produced, ttl=ttl, tags=tags)
|
|
180
|
+
return produced
|
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
import json, threading, time
|
|
2
|
+
from typing import Optional, Iterable, Any, Mapping, Sequence, Dict, Set
|
|
3
|
+
from nlbone.core.ports.cache import CachePort
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class InMemoryCache(CachePort):
|
|
7
|
+
def __init__(self):
|
|
8
|
+
self._data: Dict[str, tuple[bytes, Optional[float]]] = {}
|
|
9
|
+
self._tags: Dict[str, Set[str]] = {}
|
|
10
|
+
self._ns_ver: Dict[str, int] = {}
|
|
11
|
+
self._lock = threading.RLock()
|
|
12
|
+
|
|
13
|
+
def _expired(self, key: str) -> bool:
|
|
14
|
+
v = self._data.get(key)
|
|
15
|
+
if not v: return True
|
|
16
|
+
_, exp = v
|
|
17
|
+
return exp is not None and time.time() > exp
|
|
18
|
+
|
|
19
|
+
def _gc(self, key: str) -> None:
|
|
20
|
+
if self._expired(key):
|
|
21
|
+
self._data.pop(key, None)
|
|
22
|
+
|
|
23
|
+
def _attach_tags(self, key: str, tags: Optional[Iterable[str]]) -> None:
|
|
24
|
+
if not tags: return
|
|
25
|
+
for t in tags:
|
|
26
|
+
self._tags.setdefault(t, set()).add(key)
|
|
27
|
+
|
|
28
|
+
def get(self, key: str) -> Optional[bytes]:
|
|
29
|
+
with self._lock:
|
|
30
|
+
self._gc(key)
|
|
31
|
+
v = self._data.get(key)
|
|
32
|
+
return v[0] if v else None
|
|
33
|
+
|
|
34
|
+
def set(self, key: str, value: bytes, *, ttl: Optional[int] = None, tags: Optional[Iterable[str]] = None) -> None:
|
|
35
|
+
with self._lock:
|
|
36
|
+
exp = None if ttl is None else time.time() + ttl
|
|
37
|
+
self._data[key] = (value, exp)
|
|
38
|
+
self._attach_tags(key, tags)
|
|
39
|
+
|
|
40
|
+
def delete(self, key: str) -> None:
|
|
41
|
+
with self._lock:
|
|
42
|
+
self._data.pop(key, None)
|
|
43
|
+
for s in self._tags.values():
|
|
44
|
+
s.discard(key)
|
|
45
|
+
|
|
46
|
+
def exists(self, key: str) -> bool:
|
|
47
|
+
return self.get(key) is not None
|
|
48
|
+
|
|
49
|
+
def ttl(self, key: str) -> Optional[int]:
|
|
50
|
+
with self._lock:
|
|
51
|
+
self._gc(key)
|
|
52
|
+
v = self._data.get(key)
|
|
53
|
+
if not v: return None
|
|
54
|
+
_, exp = v
|
|
55
|
+
if exp is None: return None
|
|
56
|
+
rem = int(exp - time.time())
|
|
57
|
+
return rem if rem >= 0 else 0
|
|
58
|
+
|
|
59
|
+
def mget(self, keys: Sequence[str]) -> list[Optional[bytes]]:
|
|
60
|
+
return [self.get(k) for k in keys]
|
|
61
|
+
|
|
62
|
+
def mset(self, items: Mapping[str, bytes], *, ttl: Optional[int] = None,
|
|
63
|
+
tags: Optional[Iterable[str]] = None) -> None:
|
|
64
|
+
for k, v in items.items():
|
|
65
|
+
self.set(k, v, ttl=ttl, tags=tags)
|
|
66
|
+
|
|
67
|
+
def get_json(self, key: str) -> Optional[Any]:
|
|
68
|
+
b = self.get(key)
|
|
69
|
+
return None if b is None else json.loads(b)
|
|
70
|
+
|
|
71
|
+
def set_json(self, key: str, value: Any, *, ttl: Optional[int] = None,
|
|
72
|
+
tags: Optional[Iterable[str]] = None) -> None:
|
|
73
|
+
self.set(key, json.dumps(value).encode("utf-8"), ttl=ttl, tags=tags)
|
|
74
|
+
|
|
75
|
+
def invalidate_tags(self, tags: Iterable[str]) -> int:
|
|
76
|
+
removed = 0
|
|
77
|
+
with self._lock:
|
|
78
|
+
for t in tags:
|
|
79
|
+
keys = self._tags.pop(t, set())
|
|
80
|
+
for k in keys:
|
|
81
|
+
if k in self._data:
|
|
82
|
+
self._data.pop(k, None)
|
|
83
|
+
removed += 1
|
|
84
|
+
return removed
|
|
85
|
+
|
|
86
|
+
def bump_namespace(self, namespace: str) -> int:
|
|
87
|
+
with self._lock:
|
|
88
|
+
self._ns_ver[namespace] = self._ns_ver.get(namespace, 0) + 1
|
|
89
|
+
return self._ns_ver[namespace]
|
|
90
|
+
|
|
91
|
+
def clear_namespace(self, namespace: str) -> int:
|
|
92
|
+
with self._lock:
|
|
93
|
+
keys = [k for k in self._data.keys() if k.startswith(namespace + ":")]
|
|
94
|
+
for k in keys: self.delete(k)
|
|
95
|
+
return len(keys)
|
|
96
|
+
|
|
97
|
+
def get_or_set(self, key: str, producer, *, ttl: int, tags=None) -> bytes:
|
|
98
|
+
with self._lock:
|
|
99
|
+
b = self.get(key)
|
|
100
|
+
if b is not None:
|
|
101
|
+
return b
|
|
102
|
+
val: bytes = producer()
|
|
103
|
+
self.set(key, val, ttl=ttl, tags=tags)
|
|
104
|
+
return val
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
import asyncio
|
|
3
|
+
import json
|
|
4
|
+
from typing import Awaitable, Callable, Optional
|
|
5
|
+
from redis.asyncio import Redis
|
|
6
|
+
|
|
7
|
+
async def run_cache_invalidation_listener(
|
|
8
|
+
redis: Redis,
|
|
9
|
+
channel: str = "cache:invalidate",
|
|
10
|
+
*,
|
|
11
|
+
on_tags: Optional[Callable[[list[str]], Awaitable[None]]] = None,
|
|
12
|
+
on_ns_bump: Optional[Callable[[str], Awaitable[None]]] = None,
|
|
13
|
+
on_ns_clear: Optional[Callable[[str], Awaitable[None]]] = None,
|
|
14
|
+
stop_event: Optional[asyncio.Event] = None,
|
|
15
|
+
) -> None:
|
|
16
|
+
pubsub = redis.pubsub()
|
|
17
|
+
await pubsub.subscribe(channel)
|
|
18
|
+
try:
|
|
19
|
+
while True:
|
|
20
|
+
if stop_event and stop_event.is_set():
|
|
21
|
+
break
|
|
22
|
+
message = await pubsub.get_message(ignore_subscribe_messages=True, timeout=1.0)
|
|
23
|
+
if not message:
|
|
24
|
+
await asyncio.sleep(0.05)
|
|
25
|
+
continue
|
|
26
|
+
try:
|
|
27
|
+
data = json.loads(message["data"])
|
|
28
|
+
except Exception:
|
|
29
|
+
continue
|
|
30
|
+
|
|
31
|
+
if "tags" in data and on_tags:
|
|
32
|
+
tags = data.get("tags") or []
|
|
33
|
+
await on_tags(list(tags))
|
|
34
|
+
if "ns_bump" in data and on_ns_bump:
|
|
35
|
+
await on_ns_bump(str(data["ns_bump"]))
|
|
36
|
+
if "ns_clear" in data and on_ns_clear:
|
|
37
|
+
await on_ns_clear(str(data["ns_clear"]))
|
|
38
|
+
finally:
|
|
39
|
+
try:
|
|
40
|
+
await pubsub.unsubscribe(channel)
|
|
41
|
+
finally:
|
|
42
|
+
await pubsub.close()
|
|
@@ -0,0 +1,136 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
import json, os, time
|
|
3
|
+
from typing import Optional, Iterable, Any, Mapping, Sequence, List, Set
|
|
4
|
+
import redis # redis-py (sync)
|
|
5
|
+
from nlbone.core.ports.cache import CachePort
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def _nsver_key(ns: str) -> str: return f"nsver:{ns}"
|
|
10
|
+
def _tag_key(tag: str) -> str: return f"tag:{tag}"
|
|
11
|
+
|
|
12
|
+
class RedisCache(CachePort):
|
|
13
|
+
def __init__(self, url: str):
|
|
14
|
+
self.r = redis.Redis.from_url(url, decode_responses=False)
|
|
15
|
+
|
|
16
|
+
def _current_ver(self, ns: str) -> int:
|
|
17
|
+
v = self.r.get(_nsver_key(ns))
|
|
18
|
+
return int(v) if v else 1
|
|
19
|
+
|
|
20
|
+
def _full_key(self, key: str) -> str:
|
|
21
|
+
try:
|
|
22
|
+
ns, rest = key.split(":", 1)
|
|
23
|
+
except ValueError:
|
|
24
|
+
ns, rest = "app", key
|
|
25
|
+
ver = self._current_ver(ns)
|
|
26
|
+
return f"{ns}:{ver}:{rest}"
|
|
27
|
+
|
|
28
|
+
def get(self, key: str) -> Optional[bytes]:
|
|
29
|
+
fk = self._full_key(key)
|
|
30
|
+
return self.r.get(fk)
|
|
31
|
+
|
|
32
|
+
def set(self, key: str, value: bytes, *, ttl: Optional[int] = None, tags: Optional[Iterable[str]] = None) -> None:
|
|
33
|
+
fk = self._full_key(key)
|
|
34
|
+
if ttl is None:
|
|
35
|
+
self.r.set(fk, value)
|
|
36
|
+
else:
|
|
37
|
+
self.r.setex(fk, ttl, value)
|
|
38
|
+
if tags:
|
|
39
|
+
pipe = self.r.pipeline()
|
|
40
|
+
for t in tags:
|
|
41
|
+
pipe.sadd(_tag_key(t), fk)
|
|
42
|
+
pipe.execute()
|
|
43
|
+
|
|
44
|
+
def delete(self, key: str) -> None:
|
|
45
|
+
fk = self._full_key(key)
|
|
46
|
+
self.r.delete(fk)
|
|
47
|
+
|
|
48
|
+
def exists(self, key: str) -> bool:
|
|
49
|
+
return bool(self.get(key))
|
|
50
|
+
|
|
51
|
+
def ttl(self, key: str) -> Optional[int]:
|
|
52
|
+
fk = self._full_key(key)
|
|
53
|
+
t = self.r.ttl(fk)
|
|
54
|
+
return None if t < 0 else int(t)
|
|
55
|
+
|
|
56
|
+
def mget(self, keys: Sequence[str]) -> list[Optional[bytes]]:
|
|
57
|
+
fks = [self._full_key(k) for k in keys]
|
|
58
|
+
return self.r.mget(fks)
|
|
59
|
+
|
|
60
|
+
def mset(self, items: Mapping[str, bytes], *, ttl: Optional[int] = None, tags: Optional[Iterable[str]] = None) -> None:
|
|
61
|
+
pipe = self.r.pipeline()
|
|
62
|
+
if ttl is None:
|
|
63
|
+
for k, v in items.items():
|
|
64
|
+
pipe.set(self._full_key(k), v)
|
|
65
|
+
else:
|
|
66
|
+
for k, v in items.items():
|
|
67
|
+
pipe.setex(self._full_key(k), ttl, v)
|
|
68
|
+
pipe.execute()
|
|
69
|
+
if tags:
|
|
70
|
+
pipe = self.r.pipeline()
|
|
71
|
+
for t in tags:
|
|
72
|
+
for k in items.keys():
|
|
73
|
+
pipe.sadd(_tag_key(t), self._full_key(k))
|
|
74
|
+
pipe.execute()
|
|
75
|
+
|
|
76
|
+
def get_json(self, key: str) -> Optional[Any]:
|
|
77
|
+
b = self.get(key)
|
|
78
|
+
return None if b is None else json.loads(b)
|
|
79
|
+
|
|
80
|
+
def set_json(self, key: str, value: Any, *, ttl: Optional[int] = None, tags: Optional[Iterable[str]] = None) -> None:
|
|
81
|
+
self.set(key, json.dumps(value).encode("utf-8"), ttl=ttl, tags=tags)
|
|
82
|
+
|
|
83
|
+
def invalidate_tags(self, tags: Iterable[str]) -> int:
|
|
84
|
+
removed = 0
|
|
85
|
+
pipe = self.r.pipeline()
|
|
86
|
+
for t in tags:
|
|
87
|
+
tk = _tag_key(t)
|
|
88
|
+
keys = self.r.smembers(tk)
|
|
89
|
+
if keys:
|
|
90
|
+
pipe.delete(*keys)
|
|
91
|
+
pipe.delete(tk)
|
|
92
|
+
removed += len(keys or [])
|
|
93
|
+
pipe.execute()
|
|
94
|
+
try:
|
|
95
|
+
ch = os.getenv("NLBONE_REDIS_INVALIDATE_CHANNEL", "cache:invalidate")
|
|
96
|
+
self.r.publish(ch, json.dumps({"tags": list(tags)}).encode("utf-8"))
|
|
97
|
+
except Exception:
|
|
98
|
+
pass
|
|
99
|
+
return removed
|
|
100
|
+
|
|
101
|
+
def bump_namespace(self, namespace: str) -> int:
|
|
102
|
+
v = self.r.incr(_nsver_key(namespace))
|
|
103
|
+
return int(v)
|
|
104
|
+
|
|
105
|
+
def clear_namespace(self, namespace: str) -> int:
|
|
106
|
+
cnt = 0
|
|
107
|
+
cursor = 0
|
|
108
|
+
pattern = f"{namespace}:*"
|
|
109
|
+
while True:
|
|
110
|
+
cursor, keys = self.r.scan(cursor=cursor, match=pattern, count=1000)
|
|
111
|
+
if keys:
|
|
112
|
+
self.r.delete(*keys); cnt += len(keys)
|
|
113
|
+
if cursor == 0: break
|
|
114
|
+
return cnt
|
|
115
|
+
|
|
116
|
+
def get_or_set(self, key: str, producer, *, ttl: int, tags=None) -> bytes:
|
|
117
|
+
fk = self._full_key(key)
|
|
118
|
+
val = self.r.get(fk)
|
|
119
|
+
if val is not None:
|
|
120
|
+
return val
|
|
121
|
+
lock_key = f"lock:{fk}"
|
|
122
|
+
got = self.r.set(lock_key, b"1", nx=True, ex=10)
|
|
123
|
+
if got:
|
|
124
|
+
try:
|
|
125
|
+
produced: bytes = producer()
|
|
126
|
+
self.set(key, produced, ttl=ttl, tags=tags)
|
|
127
|
+
return produced
|
|
128
|
+
finally:
|
|
129
|
+
self.r.delete(lock_key)
|
|
130
|
+
time.sleep(0.05)
|
|
131
|
+
val2 = self.r.get(fk)
|
|
132
|
+
if val2 is not None:
|
|
133
|
+
return val2
|
|
134
|
+
produced: bytes = producer()
|
|
135
|
+
self.set(key, produced, ttl=ttl, tags=tags)
|
|
136
|
+
return produced
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
from .pricing_service import PricingService, CalculatePriceIn, CalculatePriceOut
|
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
from enum import Enum
|
|
2
|
+
from typing import Optional, Literal, List
|
|
3
|
+
|
|
4
|
+
import httpx
|
|
5
|
+
import requests
|
|
6
|
+
from pydantic import BaseModel, Field, NonNegativeInt, RootModel
|
|
7
|
+
|
|
8
|
+
from nlbone.adapters.auth.token_provider import ClientTokenProvider
|
|
9
|
+
from nlbone.config.settings import get_settings
|
|
10
|
+
from nlbone.utils.http import normalize_https_base, auth_headers
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class PricingError(Exception):
|
|
14
|
+
pass
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class CalculatePriceIn(BaseModel):
|
|
18
|
+
params: dict[str, str]
|
|
19
|
+
product_id: NonNegativeInt | None = None
|
|
20
|
+
product_title: str | None = None
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class DiscountType(str, Enum):
|
|
24
|
+
percent = "percent"
|
|
25
|
+
amount = "amount"
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class Product(BaseModel):
|
|
29
|
+
id: Optional[int] = Field(None, description="Nullable product id")
|
|
30
|
+
service_product_id: NonNegativeInt
|
|
31
|
+
title: Optional[str] = None
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class Pricing(BaseModel):
|
|
35
|
+
source: Literal["formula", "static"]
|
|
36
|
+
price: float
|
|
37
|
+
discount: Optional[float] = None
|
|
38
|
+
discount_type: Optional[DiscountType] = None
|
|
39
|
+
params: dict
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
class Formula(BaseModel):
|
|
43
|
+
id: int
|
|
44
|
+
title: str
|
|
45
|
+
key: str
|
|
46
|
+
status: str
|
|
47
|
+
description: str
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
class PricingRule(BaseModel):
|
|
51
|
+
product: Product
|
|
52
|
+
segment_name: str | None
|
|
53
|
+
formula: Optional[Formula] = None
|
|
54
|
+
specificity: int
|
|
55
|
+
matched_fields: list
|
|
56
|
+
pricing: Pricing
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
class CalculatePriceOut(RootModel[List[PricingRule]]):
|
|
60
|
+
pass
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
class PricingService:
|
|
64
|
+
def __init__(
|
|
65
|
+
self,
|
|
66
|
+
token_provider: ClientTokenProvider,
|
|
67
|
+
base_url: Optional[str] = None,
|
|
68
|
+
timeout_seconds: Optional[float] = None,
|
|
69
|
+
client: httpx.Client | None = None,
|
|
70
|
+
) -> None:
|
|
71
|
+
s = get_settings()
|
|
72
|
+
self._base_url = normalize_https_base(base_url or str(s.PRICING_SERVICE_URL), enforce_https=False)
|
|
73
|
+
self._timeout = timeout_seconds or float(s.HTTP_TIMEOUT_SECONDS)
|
|
74
|
+
self._client = client or requests.session()
|
|
75
|
+
self._token_provider = token_provider
|
|
76
|
+
|
|
77
|
+
def calculate(self, items: list[CalculatePriceIn]) -> CalculatePriceOut:
|
|
78
|
+
body = {
|
|
79
|
+
"items": [i.model_dump() for i in items]
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
r = self._client.post(
|
|
83
|
+
f"{self._base_url}/priced",
|
|
84
|
+
headers=auth_headers(self._token_provider.get_access_token()),
|
|
85
|
+
json=body,
|
|
86
|
+
timeout=self._timeout,
|
|
87
|
+
verify=False
|
|
88
|
+
)
|
|
89
|
+
|
|
90
|
+
if r.status_code not in (200, 204):
|
|
91
|
+
raise PricingError(r.status_code, r.text)
|
|
92
|
+
|
|
93
|
+
if r.status_code == 204 or not r.content:
|
|
94
|
+
return CalculatePriceOut.model_validate(root=[])
|
|
95
|
+
|
|
96
|
+
return CalculatePriceOut.model_validate(r.json())
|
|
@@ -1,8 +1,6 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
-
import json
|
|
4
3
|
from typing import Any, Optional
|
|
5
|
-
from urllib.parse import urlparse, urlunparse
|
|
6
4
|
|
|
7
5
|
import httpx
|
|
8
6
|
import requests
|
|
@@ -10,6 +8,7 @@ import requests
|
|
|
10
8
|
from nlbone.adapters.auth.token_provider import ClientTokenProvider
|
|
11
9
|
from nlbone.config.settings import get_settings
|
|
12
10
|
from nlbone.core.ports.files import FileServicePort
|
|
11
|
+
from nlbone.utils.http import auth_headers, build_list_query, normalize_https_base
|
|
13
12
|
|
|
14
13
|
|
|
15
14
|
class UploadchiError(RuntimeError):
|
|
@@ -26,21 +25,6 @@ def _resolve_token(explicit: str | None) -> str | None:
|
|
|
26
25
|
return s.UPLOADCHI_TOKEN.get_secret_value() if s.UPLOADCHI_TOKEN else None
|
|
27
26
|
|
|
28
27
|
|
|
29
|
-
def _auth_headers(token: str | None) -> dict[str, str]:
|
|
30
|
-
return {"Authorization": f"Bearer {token}"} if token else {}
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
def _build_list_query(
|
|
34
|
-
limit: int, offset: int, filters: dict[str, Any] | None, sort: list[tuple[str, str]] | None
|
|
35
|
-
) -> dict[str, Any]:
|
|
36
|
-
q: dict[str, Any] = {"limit": limit, "offset": offset}
|
|
37
|
-
if filters:
|
|
38
|
-
q["filters"] = json.dumps(filters)
|
|
39
|
-
if sort:
|
|
40
|
-
q["sort"] = ",".join([f"{f}:{o}" for f, o in sort])
|
|
41
|
-
return q
|
|
42
|
-
|
|
43
|
-
|
|
44
28
|
def _filename_from_cd(cd: str | None, fallback: str) -> str:
|
|
45
29
|
if not cd:
|
|
46
30
|
return fallback
|
|
@@ -49,14 +33,6 @@ def _filename_from_cd(cd: str | None, fallback: str) -> str:
|
|
|
49
33
|
return fallback
|
|
50
34
|
|
|
51
35
|
|
|
52
|
-
def _normalize_https_base(url: str) -> str:
|
|
53
|
-
p = urlparse(url.strip())
|
|
54
|
-
p = p._replace(scheme="https") # enforce https
|
|
55
|
-
if p.path.endswith("/"):
|
|
56
|
-
p = p._replace(path=p.path.rstrip("/"))
|
|
57
|
-
return str(urlunparse(p))
|
|
58
|
-
|
|
59
|
-
|
|
60
36
|
class UploadchiClient(FileServicePort):
|
|
61
37
|
def __init__(
|
|
62
38
|
self,
|
|
@@ -66,7 +42,7 @@ class UploadchiClient(FileServicePort):
|
|
|
66
42
|
client: httpx.Client | None = None,
|
|
67
43
|
) -> None:
|
|
68
44
|
s = get_settings()
|
|
69
|
-
self._base_url =
|
|
45
|
+
self._base_url = normalize_https_base(base_url or str(s.UPLOADCHI_BASE_URL))
|
|
70
46
|
self._timeout = timeout_seconds or float(s.HTTP_TIMEOUT_SECONDS)
|
|
71
47
|
self._client = client or requests.session()
|
|
72
48
|
self._token_provider = token_provider
|
|
@@ -80,7 +56,7 @@ class UploadchiClient(FileServicePort):
|
|
|
80
56
|
tok = _resolve_token(token)
|
|
81
57
|
files = {"file": (filename, file_bytes)}
|
|
82
58
|
data = (params or {}).copy()
|
|
83
|
-
r = self._client.post(self._base_url, files=files, data=data, headers=
|
|
59
|
+
r = self._client.post(self._base_url, files=files, data=data, headers=auth_headers(tok))
|
|
84
60
|
if r.status_code >= 400:
|
|
85
61
|
raise UploadchiError(r.status_code, r.text)
|
|
86
62
|
return r.json()
|
|
@@ -91,7 +67,7 @@ class UploadchiClient(FileServicePort):
|
|
|
91
67
|
tok = _resolve_token(token)
|
|
92
68
|
r = self._client.post(
|
|
93
69
|
f"{self._base_url}/{file_id}/commit",
|
|
94
|
-
headers=
|
|
70
|
+
headers=auth_headers(tok or self._token_provider.get_access_token()),
|
|
95
71
|
)
|
|
96
72
|
if r.status_code not in (204, 200):
|
|
97
73
|
raise UploadchiError(r.status_code, r.text)
|
|
@@ -102,7 +78,7 @@ class UploadchiClient(FileServicePort):
|
|
|
102
78
|
tok = _resolve_token(token)
|
|
103
79
|
r = self._client.post(
|
|
104
80
|
f"{self._base_url}/{file_id}/rollback",
|
|
105
|
-
headers=
|
|
81
|
+
headers=auth_headers(tok or self._token_provider.get_access_token()),
|
|
106
82
|
)
|
|
107
83
|
if r.status_code not in (204, 200):
|
|
108
84
|
raise UploadchiError(r.status_code, r.text)
|
|
@@ -116,22 +92,22 @@ class UploadchiClient(FileServicePort):
|
|
|
116
92
|
token: str | None = None,
|
|
117
93
|
) -> dict:
|
|
118
94
|
tok = _resolve_token(token)
|
|
119
|
-
q =
|
|
120
|
-
r = self._client.get(self._base_url, params=q, headers=
|
|
95
|
+
q = build_list_query(limit, offset, filters, sort)
|
|
96
|
+
r = self._client.get(self._base_url, params=q, headers=auth_headers(tok))
|
|
121
97
|
if r.status_code >= 400:
|
|
122
98
|
raise UploadchiError(r.status_code, r.text)
|
|
123
99
|
return r.json()
|
|
124
100
|
|
|
125
101
|
def get_file(self, file_id: str, token: str | None = None) -> dict:
|
|
126
102
|
tok = _resolve_token(token)
|
|
127
|
-
r = self._client.get(f"{self._base_url}/{file_id}", headers=
|
|
103
|
+
r = self._client.get(f"{self._base_url}/{file_id}", headers=auth_headers(tok))
|
|
128
104
|
if r.status_code >= 400:
|
|
129
105
|
raise UploadchiError(r.status_code, r.text)
|
|
130
106
|
return r.json()
|
|
131
107
|
|
|
132
108
|
def download_file(self, file_id: str, token: str | None = None) -> tuple[bytes, str, str]:
|
|
133
109
|
tok = _resolve_token(token)
|
|
134
|
-
r = self._client.get(f"{self._base_url}/{file_id}/download", headers=
|
|
110
|
+
r = self._client.get(f"{self._base_url}/{file_id}/download", headers=auth_headers(tok))
|
|
135
111
|
if r.status_code >= 400:
|
|
136
112
|
raise UploadchiError(r.status_code, r.text)
|
|
137
113
|
filename = _filename_from_cd(r.headers.get("content-disposition"), fallback=f"file-{file_id}")
|
|
@@ -140,6 +116,6 @@ class UploadchiClient(FileServicePort):
|
|
|
140
116
|
|
|
141
117
|
def delete_file(self, file_id: str, token: str | None = None) -> None:
|
|
142
118
|
tok = _resolve_token(token)
|
|
143
|
-
r = self._client.delete(f"{self._base_url}/{file_id}", headers=
|
|
119
|
+
r = self._client.delete(f"{self._base_url}/{file_id}", headers=auth_headers(tok))
|
|
144
120
|
if r.status_code not in (204, 200):
|
|
145
121
|
raise UploadchiError(r.status_code, r.text)
|
|
@@ -7,8 +7,9 @@ import httpx
|
|
|
7
7
|
from nlbone.config.settings import get_settings
|
|
8
8
|
from nlbone.core.ports.files import AsyncFileServicePort
|
|
9
9
|
|
|
10
|
-
from .uploadchi import UploadchiError,
|
|
11
|
-
from
|
|
10
|
+
from nlbone.adapters.http_clients.uploadchi.uploadchi import UploadchiError, _filename_from_cd, _resolve_token
|
|
11
|
+
from nlbone.adapters.auth.token_provider import ClientTokenProvider
|
|
12
|
+
from nlbone.utils.http import auth_headers, build_list_query
|
|
12
13
|
|
|
13
14
|
|
|
14
15
|
class UploadchiAsyncClient(AsyncFileServicePort):
|
|
@@ -36,7 +37,7 @@ class UploadchiAsyncClient(AsyncFileServicePort):
|
|
|
36
37
|
tok = _resolve_token(token)
|
|
37
38
|
files = {"file": (filename, file_bytes)}
|
|
38
39
|
data = (params or {}).copy()
|
|
39
|
-
r = await self._client.post("", files=files, data=data, headers=
|
|
40
|
+
r = await self._client.post("", files=files, data=data, headers=auth_headers(tok))
|
|
40
41
|
if r.status_code >= 400:
|
|
41
42
|
raise UploadchiError(r.status_code, await r.aread())
|
|
42
43
|
return r.json()
|
|
@@ -46,7 +47,7 @@ class UploadchiAsyncClient(AsyncFileServicePort):
|
|
|
46
47
|
raise UploadchiError(detail="token_provider is not provided", status=400)
|
|
47
48
|
tok = _resolve_token(token)
|
|
48
49
|
r = await self._client.post(
|
|
49
|
-
f"/{file_id}/commit", headers=
|
|
50
|
+
f"/{file_id}/commit", headers=auth_headers(tok or self._token_provider.get_access_token())
|
|
50
51
|
)
|
|
51
52
|
if r.status_code not in (204, 200):
|
|
52
53
|
raise UploadchiError(r.status_code, await r.aread())
|
|
@@ -56,7 +57,7 @@ class UploadchiAsyncClient(AsyncFileServicePort):
|
|
|
56
57
|
raise UploadchiError(detail="token_provider is not provided", status=400)
|
|
57
58
|
tok = _resolve_token(token)
|
|
58
59
|
r = await self._client.post(
|
|
59
|
-
f"/{file_id}/rollback", headers=
|
|
60
|
+
f"/{file_id}/rollback", headers=auth_headers(tok or self._token_provider.get_access_token())
|
|
60
61
|
)
|
|
61
62
|
if r.status_code not in (204, 200):
|
|
62
63
|
raise UploadchiError(r.status_code, await r.aread())
|
|
@@ -70,22 +71,22 @@ class UploadchiAsyncClient(AsyncFileServicePort):
|
|
|
70
71
|
token: str | None = None,
|
|
71
72
|
) -> dict:
|
|
72
73
|
tok = _resolve_token(token)
|
|
73
|
-
q =
|
|
74
|
-
r = await self._client.get("", params=q, headers=
|
|
74
|
+
q = build_list_query(limit, offset, filters, sort)
|
|
75
|
+
r = await self._client.get("", params=q, headers=auth_headers(tok))
|
|
75
76
|
if r.status_code >= 400:
|
|
76
77
|
raise UploadchiError(r.status_code, await r.aread())
|
|
77
78
|
return r.json()
|
|
78
79
|
|
|
79
80
|
async def get_file(self, file_id: str, token: str | None = None) -> dict:
|
|
80
81
|
tok = _resolve_token(token)
|
|
81
|
-
r = await self._client.get(f"/{file_id}", headers=
|
|
82
|
+
r = await self._client.get(f"/{file_id}", headers=auth_headers(tok))
|
|
82
83
|
if r.status_code >= 400:
|
|
83
84
|
raise UploadchiError(r.status_code, await r.aread())
|
|
84
85
|
return r.json()
|
|
85
86
|
|
|
86
87
|
async def download_file(self, file_id: str, token: str | None = None) -> tuple[AsyncIterator[bytes], str, str]:
|
|
87
88
|
tok = _resolve_token(token)
|
|
88
|
-
r = await self._client.get(f"/{file_id}/download", headers=
|
|
89
|
+
r = await self._client.get(f"/{file_id}/download", headers=auth_headers(tok), stream=True)
|
|
89
90
|
if r.status_code >= 400:
|
|
90
91
|
body = await r.aread()
|
|
91
92
|
raise UploadchiError(r.status_code, body.decode(errors="ignore"))
|
|
@@ -103,7 +104,7 @@ class UploadchiAsyncClient(AsyncFileServicePort):
|
|
|
103
104
|
|
|
104
105
|
async def delete_file(self, file_id: str, token: str | None = None) -> None:
|
|
105
106
|
tok = _resolve_token(token)
|
|
106
|
-
r = await self._client.delete(f"/{file_id}", headers=
|
|
107
|
+
r = await self._client.delete(f"/{file_id}", headers=auth_headers(tok))
|
|
107
108
|
if r.status_code not in (204, 200):
|
|
108
109
|
body = await r.aread()
|
|
109
110
|
raise UploadchiError(r.status_code, body.decode(errors="ignore"))
|
nlbone/config/settings.py
CHANGED
|
@@ -8,19 +8,22 @@ from pydantic_settings import BaseSettings, SettingsConfigDict
|
|
|
8
8
|
|
|
9
9
|
|
|
10
10
|
def _guess_env_file() -> str | None:
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
11
|
+
try:
|
|
12
|
+
explicit = os.getenv("NLBONE_ENV_FILE")
|
|
13
|
+
if explicit:
|
|
14
|
+
return explicit
|
|
14
15
|
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
16
|
+
cwd_env = Path.cwd() / ".env"
|
|
17
|
+
if cwd_env.exists():
|
|
18
|
+
return str(cwd_env)
|
|
18
19
|
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
20
|
+
for i in range(0, 8):
|
|
21
|
+
p = Path.cwd().resolve().parents[i]
|
|
22
|
+
f = p / ".env"
|
|
23
|
+
if f.exists():
|
|
24
|
+
return str(f)
|
|
25
|
+
except Exception as e:
|
|
26
|
+
raise Exception("Failed to guess env file path!") from e
|
|
24
27
|
|
|
25
28
|
|
|
26
29
|
def is_production_env() -> bool:
|
|
@@ -65,6 +68,8 @@ class Settings(BaseSettings):
|
|
|
65
68
|
# Messaging / Cache
|
|
66
69
|
# ---------------------------
|
|
67
70
|
REDIS_URL: str = Field(default="redis://localhost:6379/0")
|
|
71
|
+
CACHE_BACKEND: Literal["memory", "redis"] = Field(default="memory")
|
|
72
|
+
CACHE_DEFAULT_TTL_S: int = Field(default=300)
|
|
68
73
|
|
|
69
74
|
# --- Event bus / Outbox ---
|
|
70
75
|
EVENT_BUS_BACKEND: Literal["inmemory"] = Field(default="inmemory")
|
|
@@ -80,10 +85,15 @@ class Settings(BaseSettings):
|
|
|
80
85
|
# ---------------------------
|
|
81
86
|
# PERCOLATE
|
|
82
87
|
# ---------------------------
|
|
83
|
-
ELASTIC_PERCOLATE_URL: str =
|
|
88
|
+
ELASTIC_PERCOLATE_URL: str = Field(default="http://localhost:9200")
|
|
84
89
|
ELASTIC_PERCOLATE_USER: str = Field(default="")
|
|
85
90
|
ELASTIC_PERCOLATE_PASS: SecretStr = Field(default="")
|
|
86
91
|
|
|
92
|
+
# ---------------------------
|
|
93
|
+
# Pricing
|
|
94
|
+
# ---------------------------
|
|
95
|
+
PRICING_SERVICE_URL: AnyHttpUrl = Field(default="https://pricing.numberland.ir/v1")
|
|
96
|
+
|
|
87
97
|
model_config = SettingsConfigDict(
|
|
88
98
|
env_prefix="",
|
|
89
99
|
env_file=None,
|
nlbone/container.py
CHANGED
|
@@ -6,12 +6,17 @@ from dependency_injector import containers, providers
|
|
|
6
6
|
|
|
7
7
|
from nlbone.adapters.auth.keycloak import KeycloakAuthService
|
|
8
8
|
from nlbone.adapters.auth.token_provider import ClientTokenProvider
|
|
9
|
+
from nlbone.adapters.cache.async_redis import AsyncRedisCache
|
|
10
|
+
from nlbone.adapters.cache.memory import InMemoryCache
|
|
11
|
+
from nlbone.adapters.cache.redis import RedisCache
|
|
9
12
|
from nlbone.adapters.db.postgres import AsyncSqlAlchemyUnitOfWork, SqlAlchemyUnitOfWork
|
|
10
13
|
from nlbone.adapters.db.postgres.engine import get_async_session_factory, get_sync_session_factory
|
|
14
|
+
from nlbone.adapters.http_clients import PricingService
|
|
11
15
|
from nlbone.adapters.http_clients.uploadchi import UploadchiClient
|
|
12
|
-
from nlbone.adapters.http_clients.uploadchi_async import UploadchiAsyncClient
|
|
16
|
+
from nlbone.adapters.http_clients.uploadchi.uploadchi_async import UploadchiAsyncClient
|
|
13
17
|
from nlbone.adapters.messaging import InMemoryEventBus
|
|
14
18
|
from nlbone.core.ports import EventBusPort
|
|
19
|
+
from nlbone.core.ports.cache import CachePort, AsyncCachePort
|
|
15
20
|
from nlbone.core.ports.files import AsyncFileServicePort, FileServicePort
|
|
16
21
|
|
|
17
22
|
|
|
@@ -35,6 +40,19 @@ class Container(containers.DeclarativeContainer):
|
|
|
35
40
|
token_provider=token_provider)
|
|
36
41
|
afiles_service: providers.Singleton[AsyncFileServicePort] = providers.Singleton(UploadchiAsyncClient,
|
|
37
42
|
token_provider=token_provider)
|
|
43
|
+
pricing_service: providers.Singleton[PricingService] = providers.Singleton(PricingService, token_provider=token_provider)
|
|
44
|
+
|
|
45
|
+
cache: providers.Singleton[CachePort] = providers.Selector(
|
|
46
|
+
config.CACHE_BACKEND,
|
|
47
|
+
memory=providers.Singleton(InMemoryCache),
|
|
48
|
+
redis=providers.Singleton(RedisCache, url=config.REDIS_URL),
|
|
49
|
+
)
|
|
50
|
+
|
|
51
|
+
async_cache: providers.Singleton[AsyncCachePort] = providers.Selector(
|
|
52
|
+
config.CACHE_BACKEND,
|
|
53
|
+
memory=providers.Singleton(InMemoryCache),
|
|
54
|
+
redis=providers.Singleton(AsyncRedisCache, url=config.REDIS_URL),
|
|
55
|
+
)
|
|
38
56
|
|
|
39
57
|
|
|
40
58
|
def create_container(settings: Optional[Any] = None) -> Container:
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
from typing import Protocol, Optional, Iterable, Any, Mapping, Sequence, Tuple, TypeVar, Callable
|
|
2
|
+
|
|
3
|
+
T = TypeVar("T")
|
|
4
|
+
|
|
5
|
+
class CachePort(Protocol):
|
|
6
|
+
def get(self, key: str) -> Optional[bytes]: ...
|
|
7
|
+
def set(self, key: str, value: bytes, *, ttl: Optional[int] = None, tags: Optional[Iterable[str]] = None) -> None: ...
|
|
8
|
+
def delete(self, key: str) -> None: ...
|
|
9
|
+
def exists(self, key: str) -> bool: ...
|
|
10
|
+
def ttl(self, key: str) -> Optional[int]: ...
|
|
11
|
+
|
|
12
|
+
def mget(self, keys: Sequence[str]) -> list[Optional[bytes]]: ...
|
|
13
|
+
def mset(self, items: Mapping[str, bytes], *, ttl: Optional[int] = None, tags: Optional[Iterable[str]] = None) -> None: ...
|
|
14
|
+
|
|
15
|
+
def get_json(self, key: str) -> Optional[Any]: ...
|
|
16
|
+
def set_json(self, key: str, value: Any, *, ttl: Optional[int] = None, tags: Optional[Iterable[str]] = None) -> None: ...
|
|
17
|
+
|
|
18
|
+
def invalidate_tags(self, tags: Iterable[str]) -> int: ...
|
|
19
|
+
def bump_namespace(self, namespace: str) -> int: ... # versioned keys
|
|
20
|
+
def clear_namespace(self, namespace: str) -> int: ...
|
|
21
|
+
|
|
22
|
+
def get_or_set(self, key: str, producer: Callable[[], bytes], *, ttl: int, tags: Optional[Iterable[str]] = None) -> bytes: ...
|
|
23
|
+
|
|
24
|
+
class AsyncCachePort(Protocol):
|
|
25
|
+
async def get(self, key: str) -> Optional[bytes]: ...
|
|
26
|
+
async def set(self, key: str, value: bytes, *, ttl: Optional[int] = None, tags: Optional[Iterable[str]] = None) -> None: ...
|
|
27
|
+
async def delete(self, key: str) -> None: ...
|
|
28
|
+
async def exists(self, key: str) -> bool: ...
|
|
29
|
+
async def ttl(self, key: str) -> Optional[int]: ...
|
|
30
|
+
async def mget(self, keys: Sequence[str]) -> list[Optional[bytes]]: ...
|
|
31
|
+
async def mset(self, items: Mapping[str, bytes], *, ttl: Optional[int] = None, tags: Optional[Iterable[str]] = None) -> None: ...
|
|
32
|
+
async def get_json(self, key: str) -> Optional[Any]: ...
|
|
33
|
+
async def set_json(self, key: str, value: Any, *, ttl: Optional[int] = None, tags: Optional[Iterable[str]] = None) -> None: ...
|
|
34
|
+
async def invalidate_tags(self, tags: Iterable[str]) -> int: ...
|
|
35
|
+
async def bump_namespace(self, namespace: str) -> int: ...
|
|
36
|
+
async def clear_namespace(self, namespace: str) -> int: ...
|
|
37
|
+
async def get_or_set(self, key: str, producer, *, ttl: int, tags: Optional[Iterable[str]] = None) -> bytes: ...
|
nlbone/utils/cache.py
ADDED
|
@@ -0,0 +1,196 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import inspect
|
|
3
|
+
import json
|
|
4
|
+
from typing import Any, Callable, Iterable, Optional
|
|
5
|
+
|
|
6
|
+
from makefun import wraps as mf_wraps
|
|
7
|
+
from nlbone.utils.cache_registry import get_cache
|
|
8
|
+
|
|
9
|
+
try:
|
|
10
|
+
from pydantic import BaseModel # v1/v2
|
|
11
|
+
except Exception: # pragma: no cover
|
|
12
|
+
class BaseModel: # minimal fallback
|
|
13
|
+
pass
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
# -------- helpers --------
|
|
17
|
+
|
|
18
|
+
def _bind(func: Callable, args, kwargs):
|
|
19
|
+
sig = inspect.signature(func)
|
|
20
|
+
bound = sig.bind_partial(*args, **kwargs)
|
|
21
|
+
bound.apply_defaults()
|
|
22
|
+
return bound
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def _key_from_template(
|
|
26
|
+
tpl: Optional[str],
|
|
27
|
+
func: Callable,
|
|
28
|
+
args,
|
|
29
|
+
kwargs,
|
|
30
|
+
) -> str:
|
|
31
|
+
"""Format key template with bound arguments or build a stable default."""
|
|
32
|
+
bound = _bind(func, args, kwargs)
|
|
33
|
+
if tpl:
|
|
34
|
+
return tpl.format(**bound.arguments)
|
|
35
|
+
|
|
36
|
+
# Default stable key: module:qualname:sha of args
|
|
37
|
+
payload = json.dumps(bound.arguments, sort_keys=True, default=str)
|
|
38
|
+
return f"{func.__module__}:{func.__qualname__}:{hash(payload)}"
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def _format_tags(
|
|
42
|
+
tag_tpls: Optional[Iterable[str]],
|
|
43
|
+
func: Callable,
|
|
44
|
+
args,
|
|
45
|
+
kwargs,
|
|
46
|
+
) -> list[str] | None:
|
|
47
|
+
if not tag_tpls:
|
|
48
|
+
return None
|
|
49
|
+
bound = _bind(func, args, kwargs)
|
|
50
|
+
return [t.format(**bound.arguments) for t in tag_tpls]
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def default_serialize(val: Any) -> bytes:
|
|
54
|
+
"""Serialize BaseModel (v2/v1) or JSON-serializable data to bytes."""
|
|
55
|
+
if isinstance(val, BaseModel):
|
|
56
|
+
if hasattr(val, "model_dump_json"): # pydantic v2
|
|
57
|
+
return val.model_dump_json().encode("utf-8")
|
|
58
|
+
if hasattr(val, "json"): # pydantic v1
|
|
59
|
+
return val.json().encode("utf-8")
|
|
60
|
+
return json.dumps(val, default=str).encode("utf-8")
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def default_deserialize(b: bytes) -> Any:
|
|
64
|
+
return json.loads(b)
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
def _is_async_method(obj: Any, name: str) -> bool:
|
|
68
|
+
meth = getattr(obj, name, None)
|
|
69
|
+
return asyncio.iscoroutinefunction(meth)
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def _run_maybe_async(func: Callable, *args, **kwargs):
|
|
73
|
+
"""Call a function that may be async from sync context."""
|
|
74
|
+
result = func(*args, **kwargs)
|
|
75
|
+
if inspect.isawaitable(result):
|
|
76
|
+
return asyncio.run(result)
|
|
77
|
+
return result
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
# -------- cache decorators --------
|
|
81
|
+
|
|
82
|
+
def cached(
|
|
83
|
+
*,
|
|
84
|
+
ttl: int,
|
|
85
|
+
key: str | None = None,
|
|
86
|
+
tags: Iterable[str] | None = None,
|
|
87
|
+
serializer: Callable[[Any], bytes] = default_serialize,
|
|
88
|
+
deserializer: Callable[[bytes], Any] = default_deserialize,
|
|
89
|
+
cache_resolver: Optional[Callable[[], Any]] = None,
|
|
90
|
+
):
|
|
91
|
+
"""
|
|
92
|
+
Framework-agnostic caching for SYNC or ASYNC callables.
|
|
93
|
+
- Preserves function signature (good for FastAPI/OpenAPI).
|
|
94
|
+
- Works with sync/async cache backends (CachePort / AsyncCachePort).
|
|
95
|
+
- `key` & `tags` are string templates, e.g. "file:{file_id}".
|
|
96
|
+
"""
|
|
97
|
+
def deco(func: Callable):
|
|
98
|
+
is_async_func = asyncio.iscoroutinefunction(func)
|
|
99
|
+
|
|
100
|
+
if is_async_func:
|
|
101
|
+
@mf_wraps(func)
|
|
102
|
+
async def aw(*args, **kwargs):
|
|
103
|
+
cache = (cache_resolver or get_cache)()
|
|
104
|
+
k = _key_from_template(key, func, args, kwargs)
|
|
105
|
+
tg = _format_tags(tags, func, args, kwargs)
|
|
106
|
+
|
|
107
|
+
# GET
|
|
108
|
+
if _is_async_method(cache, "get"):
|
|
109
|
+
cached_bytes = await cache.get(k)
|
|
110
|
+
else:
|
|
111
|
+
cached_bytes = cache.get(k)
|
|
112
|
+
|
|
113
|
+
if cached_bytes is not None:
|
|
114
|
+
return deserializer(cached_bytes)
|
|
115
|
+
|
|
116
|
+
# MISS -> compute
|
|
117
|
+
result = await func(*args, **kwargs)
|
|
118
|
+
|
|
119
|
+
# SET
|
|
120
|
+
data = serializer(result)
|
|
121
|
+
if _is_async_method(cache, "set"):
|
|
122
|
+
await cache.set(k, data, ttl=ttl, tags=tg)
|
|
123
|
+
else:
|
|
124
|
+
cache.set(k, data, ttl=ttl, tags=tg)
|
|
125
|
+
|
|
126
|
+
return result
|
|
127
|
+
|
|
128
|
+
return aw
|
|
129
|
+
|
|
130
|
+
# SYNC callable
|
|
131
|
+
@mf_wraps(func)
|
|
132
|
+
def sw(*args, **kwargs):
|
|
133
|
+
cache = (cache_resolver or get_cache)()
|
|
134
|
+
k = _key_from_template(key, func, args, kwargs)
|
|
135
|
+
tg = _format_tags(tags, func, args, kwargs)
|
|
136
|
+
|
|
137
|
+
# GET (may be async)
|
|
138
|
+
if _is_async_method(cache, "get"):
|
|
139
|
+
cached_bytes = _run_maybe_async(cache.get, k)
|
|
140
|
+
else:
|
|
141
|
+
cached_bytes = cache.get(k)
|
|
142
|
+
|
|
143
|
+
if cached_bytes is not None:
|
|
144
|
+
return deserializer(cached_bytes)
|
|
145
|
+
|
|
146
|
+
# MISS -> compute
|
|
147
|
+
result = func(*args, **kwargs)
|
|
148
|
+
|
|
149
|
+
# SET (may be async)
|
|
150
|
+
data = serializer(result)
|
|
151
|
+
if _is_async_method(cache, "set"):
|
|
152
|
+
_run_maybe_async(cache.set, k, data, ttl=ttl, tags=tg)
|
|
153
|
+
else:
|
|
154
|
+
cache.set(k, data, ttl=ttl, tags=tg)
|
|
155
|
+
|
|
156
|
+
return result
|
|
157
|
+
|
|
158
|
+
return sw
|
|
159
|
+
|
|
160
|
+
return deco
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
def invalidate_by_tags(tags_builder: Callable[..., Iterable[str]]):
|
|
164
|
+
"""
|
|
165
|
+
Invalidate computed tags after function finishes.
|
|
166
|
+
Works with sync or async functions and cache backends.
|
|
167
|
+
"""
|
|
168
|
+
def deco(func: Callable):
|
|
169
|
+
is_async_func = asyncio.iscoroutinefunction(func)
|
|
170
|
+
|
|
171
|
+
if is_async_func:
|
|
172
|
+
@mf_wraps(func)
|
|
173
|
+
async def aw(*args, **kwargs):
|
|
174
|
+
out = await func(*args, **kwargs)
|
|
175
|
+
cache = get_cache()
|
|
176
|
+
tags = list(tags_builder(*args, **kwargs))
|
|
177
|
+
if _is_async_method(cache, "invalidate_tags"):
|
|
178
|
+
await cache.invalidate_tags(tags)
|
|
179
|
+
else:
|
|
180
|
+
cache.invalidate_tags(tags)
|
|
181
|
+
return out
|
|
182
|
+
return aw
|
|
183
|
+
|
|
184
|
+
@mf_wraps(func)
|
|
185
|
+
def sw(*args, **kwargs):
|
|
186
|
+
out = func(*args, **kwargs)
|
|
187
|
+
cache = get_cache()
|
|
188
|
+
tags = list(tags_builder(*args, **kwargs))
|
|
189
|
+
if _is_async_method(cache, "invalidate_tags"):
|
|
190
|
+
_run_maybe_async(cache.invalidate_tags, tags)
|
|
191
|
+
else:
|
|
192
|
+
cache.invalidate_tags(tags)
|
|
193
|
+
return out
|
|
194
|
+
return sw
|
|
195
|
+
|
|
196
|
+
return deco
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
import hashlib
|
|
2
|
+
import json
|
|
3
|
+
import random
|
|
4
|
+
from typing import Any, Mapping
|
|
5
|
+
|
|
6
|
+
def _stable_params(params: Mapping[str, Any]) -> str:
|
|
7
|
+
return json.dumps(params, sort_keys=True, separators=(",", ":"))
|
|
8
|
+
|
|
9
|
+
def make_key(ns: str, *parts: str) -> str:
|
|
10
|
+
safe_parts = [p.replace(" ", "_") for p in parts if p]
|
|
11
|
+
return f"{ns}:{':'.join(safe_parts)}" if safe_parts else f"{ns}:root"
|
|
12
|
+
|
|
13
|
+
def make_param_key(ns: str, base: str, params: Mapping[str, Any]) -> str:
|
|
14
|
+
payload = _stable_params(params)
|
|
15
|
+
digest = hashlib.sha256(payload.encode("utf-8")).hexdigest()[:16]
|
|
16
|
+
return f"{ns}:{base}:{digest}"
|
|
17
|
+
|
|
18
|
+
def tag_entity(ns: str, entity_id: Any) -> str:
|
|
19
|
+
return f"{ns}:{entity_id}"
|
|
20
|
+
|
|
21
|
+
def tag_list(ns: str, **filters) -> str:
|
|
22
|
+
if not filters:
|
|
23
|
+
return f"{ns}:list"
|
|
24
|
+
payload = _stable_params(filters)
|
|
25
|
+
digest = hashlib.md5(payload.encode("utf-8")).hexdigest()[:12]
|
|
26
|
+
return f"{ns}:list:{digest}"
|
|
27
|
+
|
|
28
|
+
def ttl_with_jitter(base_ttl: int, *, jitter_ratio: float = 0.1) -> int:
|
|
29
|
+
jitter = int(base_ttl * jitter_ratio)
|
|
30
|
+
return base_ttl + random.randint(-jitter, jitter)
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
from typing import Callable, Optional, TypeVar
|
|
2
|
+
from contextvars import ContextVar
|
|
3
|
+
|
|
4
|
+
T = TypeVar("T")
|
|
5
|
+
|
|
6
|
+
_global_resolver: Optional[Callable[[], T]] = None
|
|
7
|
+
|
|
8
|
+
_ctx_resolver: ContextVar[Optional[Callable[[], T]]] = ContextVar("_ctx_resolver", default=None)
|
|
9
|
+
|
|
10
|
+
def set_cache_resolver(fn: Callable[[], T]) -> None:
|
|
11
|
+
"""Set process-wide cache resolver (e.g., lambda: container.cache())."""
|
|
12
|
+
global _global_resolver
|
|
13
|
+
_global_resolver = fn
|
|
14
|
+
|
|
15
|
+
def set_context_cache_resolver(fn: Optional[Callable[[], T]]) -> None:
|
|
16
|
+
"""Override resolver in current context (useful in tests/background tasks)."""
|
|
17
|
+
_ctx_resolver.set(fn)
|
|
18
|
+
|
|
19
|
+
def get_cache() -> T:
|
|
20
|
+
fn = _ctx_resolver.get() or _global_resolver
|
|
21
|
+
if fn is None:
|
|
22
|
+
raise RuntimeError("Cache resolver not configured. Call set_cache_resolver(...) first.")
|
|
23
|
+
return fn()
|
nlbone/utils/http.py
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
from typing import Any
|
|
5
|
+
from urllib.parse import urlparse, urlunparse
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def auth_headers(token: str | None) -> dict[str, str]:
|
|
9
|
+
return {"Authorization": f"Bearer {token}"} if token else {}
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def build_list_query(
|
|
13
|
+
limit: int, offset: int, filters: dict[str, Any] | None, sort: list[tuple[str, str]] | None
|
|
14
|
+
) -> dict[str, Any]:
|
|
15
|
+
q: dict[str, Any] = {"limit": limit, "offset": offset}
|
|
16
|
+
if filters:
|
|
17
|
+
q["filters"] = json.dumps(filters)
|
|
18
|
+
if sort:
|
|
19
|
+
q["sort"] = ",".join([f"{f}:{o}" for f, o in sort])
|
|
20
|
+
return q
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def normalize_https_base(url: str, enforce_https: bool = True) -> str:
|
|
24
|
+
p = urlparse(url.strip())
|
|
25
|
+
if enforce_https:
|
|
26
|
+
p = p._replace(scheme="https") # enforce https
|
|
27
|
+
if p.path.endswith("/"):
|
|
28
|
+
p = p._replace(path=p.path.rstrip("/"))
|
|
29
|
+
return str(urlunparse(p))
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: nlbone
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.6.0
|
|
4
4
|
Summary: Backbone package for interfaces and infrastructure in Python projects
|
|
5
5
|
Author-email: Amir Hosein Kahkbazzadeh <a.khakbazzadeh@gmail.com>
|
|
6
6
|
License: MIT
|
|
@@ -11,6 +11,7 @@ Requires-Dist: dependency-injector>=4.48.1
|
|
|
11
11
|
Requires-Dist: elasticsearch==8.14.0
|
|
12
12
|
Requires-Dist: fastapi>=0.116
|
|
13
13
|
Requires-Dist: httpx>=0.27
|
|
14
|
+
Requires-Dist: makefun>=1.16.0
|
|
14
15
|
Requires-Dist: psycopg>=3.2.9
|
|
15
16
|
Requires-Dist: pydantic-settings>=2.0
|
|
16
17
|
Requires-Dist: pydantic>=2.0
|
|
@@ -1,10 +1,15 @@
|
|
|
1
1
|
nlbone/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
2
|
-
nlbone/container.py,sha256=
|
|
2
|
+
nlbone/container.py,sha256=hOkc8F6Zx4fP5HxRp_uBYQyxhi6QYRAIqUPFEYd18aQ,3361
|
|
3
3
|
nlbone/types.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
4
4
|
nlbone/adapters/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
5
5
|
nlbone/adapters/auth/__init__.py,sha256=hkDHvsFhw_UiOHG9ZSMqjiAhK4wumEforitveSZswVw,42
|
|
6
6
|
nlbone/adapters/auth/keycloak.py,sha256=dfAxODiARfR8y3FKoWNo9fjfb6QyWd_Qr7AbJ0E78AM,2729
|
|
7
7
|
nlbone/adapters/auth/token_provider.py,sha256=NhqjqTUsoZO4gbK-cybs0OkKydFN7CPTxAiypEw081o,1433
|
|
8
|
+
nlbone/adapters/cache/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
9
|
+
nlbone/adapters/cache/async_redis.py,sha256=E61tpBwAElMoGgyJ8CWO5G81n7u1-Wi_kVRDoh5rJuM,6206
|
|
10
|
+
nlbone/adapters/cache/memory.py,sha256=lRJqMdD2lbojndQ_dJ7AulfhSYY_uqjgXvd4ZT_J8co,3655
|
|
11
|
+
nlbone/adapters/cache/pubsub_listener.py,sha256=2y6DbWsERXlMOkmJSJMg8hNU9MTGwR7BhwQRveivh50,1457
|
|
12
|
+
nlbone/adapters/cache/redis.py,sha256=gMNfUIk1HkeXVBmtAtVchcr59ll06E4wam9rGWAhalM,4535
|
|
8
13
|
nlbone/adapters/db/__init__.py,sha256=saW-wN4E0NZ2_ldi-nrm5AgsH7EULNSa62lYMwfy1oo,252
|
|
9
14
|
nlbone/adapters/db/postgres/__init__.py,sha256=6JYJH0xZs3aR-zuyMpRhsdzFugmqz8nprwTQLprqhZc,313
|
|
10
15
|
nlbone/adapters/db/postgres/audit.py,sha256=zFzL-pXmfjcp5YLx6vBYczprsJjEPxSYKhQNR3WjKL0,4675
|
|
@@ -16,10 +21,12 @@ nlbone/adapters/db/postgres/schema.py,sha256=NlE7Rr8uXypsw4oWkdZhZwcIBHQEPIpoHLx
|
|
|
16
21
|
nlbone/adapters/db/postgres/uow.py,sha256=nRxNpY-WoWHpym-XeZ8VHm0MYvtB9wuopOeNdV_ebk8,2088
|
|
17
22
|
nlbone/adapters/db/redis/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
18
23
|
nlbone/adapters/db/redis/client.py,sha256=XAKcmU0lpPvWPMS0fChVQ3iSJfHV1g4bMOCgJaj2bCI,512
|
|
19
|
-
nlbone/adapters/http_clients/__init__.py,sha256=
|
|
20
|
-
nlbone/adapters/http_clients/
|
|
21
|
-
nlbone/adapters/http_clients/
|
|
22
|
-
nlbone/adapters/http_clients/
|
|
24
|
+
nlbone/adapters/http_clients/__init__.py,sha256=Ed75OHxkJ401XNHpfx3pz20e7Z_YFSAoKUct8o-SI9k,150
|
|
25
|
+
nlbone/adapters/http_clients/pricing/__init__.py,sha256=RLSnG9mgaw1Qwt5Qsc4SwRCjNiqWeyDpvFY79PlhkFE,81
|
|
26
|
+
nlbone/adapters/http_clients/pricing/pricing_service.py,sha256=cdYEF_ZY01QbhSXoifotELdp2n8GyzdKIC_nWHP696E,2568
|
|
27
|
+
nlbone/adapters/http_clients/uploadchi/__init__.py,sha256=VjLtgOPvNufoQvNCrwPLwlzJicdfJCZUqBK4VEXNQ3o,104
|
|
28
|
+
nlbone/adapters/http_clients/uploadchi/uploadchi.py,sha256=_D6Gv5nj4TJXcEq_YJWut92vEu69LflLTw6m2dlD7C0,4864
|
|
29
|
+
nlbone/adapters/http_clients/uploadchi/uploadchi_async.py,sha256=EGFI1ZhfMxotHkORyU1cJVGG4tsFAbnITZViIc3J7vI,4766
|
|
23
30
|
nlbone/adapters/messaging/__init__.py,sha256=UDAwu3s-JQmOZjWz2Nu0SgHhnkbeOhKDH_zLD75oWMY,40
|
|
24
31
|
nlbone/adapters/messaging/event_bus.py,sha256=w-NPwDiPMLFPU_enRQCtfQXOALsXfg31u57R8sG_-1U,781
|
|
25
32
|
nlbone/adapters/messaging/redis.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -27,7 +34,7 @@ nlbone/adapters/percolation/__init__.py,sha256=viq5WZqcSLlRBF5JwuyTD_IZaNWfpKzGJ
|
|
|
27
34
|
nlbone/adapters/percolation/connection.py,sha256=xZ-OtQVbyQYH83TUizS0UWI85Iic-AhUjiuyzO0e46s,331
|
|
28
35
|
nlbone/config/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
29
36
|
nlbone/config/logging.py,sha256=rGQz9W5ZgUFXBK74TFmTuwx_WMJhD8zPN39zfKVxwnI,4115
|
|
30
|
-
nlbone/config/settings.py,sha256=
|
|
37
|
+
nlbone/config/settings.py,sha256=W3NHZP6yjIyyKiGWNkjlUt_RYFKkcIfMBoKih_z_0Bs,3911
|
|
31
38
|
nlbone/core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
32
39
|
nlbone/core/application/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
33
40
|
nlbone/core/application/base_worker.py,sha256=uHqglsd33jXl_0kmkFlB4KQ5NdI1wArcOeQmdcifPQc,1192
|
|
@@ -41,6 +48,7 @@ nlbone/core/domain/events.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,
|
|
|
41
48
|
nlbone/core/domain/models.py,sha256=e2ig7PMBBpmc8pdHLNMnXhucMXr9OUq-G7bKGTq9Qj0,1458
|
|
42
49
|
nlbone/core/ports/__init__.py,sha256=gx-Ubj7h-1vvnu56sNnRqmer7HHfW3rX2WLl-0AX5U0,214
|
|
43
50
|
nlbone/core/ports/auth.py,sha256=Gh0yQsxx2OD6pDH2_p-khsA-bVoypP1juuqMoSfjZUo,493
|
|
51
|
+
nlbone/core/ports/cache.py,sha256=C9exWYPZsppCpkrAMiGfJuf4ehHkibtFfvB1aFbWuO4,2257
|
|
44
52
|
nlbone/core/ports/event_bus.py,sha256=_Om1GOOT-F325oV6_LJXtLdx4vu5i7KrpTDD3qPJXU0,325
|
|
45
53
|
nlbone/core/ports/files.py,sha256=7Ov2ITYRpPwwDTZGCeNVISg8e3A9l08jbOgpTImgfK8,1863
|
|
46
54
|
nlbone/core/ports/messaging.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -69,11 +77,15 @@ nlbone/interfaces/cli/main.py,sha256=65XXNmH0dX9Lib_yW5iQXo7wp_GRFwx9xXDYgy2LJtY
|
|
|
69
77
|
nlbone/interfaces/jobs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
70
78
|
nlbone/interfaces/jobs/sync_tokens.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
71
79
|
nlbone/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
80
|
+
nlbone/utils/cache.py,sha256=3sKtWoGBlrEvkg74r4C-Sx8u41OTup_G4s-Q57IaOXg,5921
|
|
81
|
+
nlbone/utils/cache_keys.py,sha256=a1yRMUuyRJ2-CswjtgVkLcJAeT2QmThLQ5kQWvpOKL4,1069
|
|
82
|
+
nlbone/utils/cache_registry.py,sha256=0csax1-GmKBcsZmQYWI4Bs0X9_BMo6Jdoac-e9Zusv8,819
|
|
72
83
|
nlbone/utils/context.py,sha256=MmclJ24BG2uvSTg1IK7J-Da9BhVFDQ5ag4Ggs2FF1_w,1600
|
|
84
|
+
nlbone/utils/http.py,sha256=krNfMoD9kTFE_E5yX-0DzgH97H-UgAiKRMXmATlwgIM,876
|
|
73
85
|
nlbone/utils/redactor.py,sha256=JbbPs2Qtnz0zHN85BGPYQNWwBigXMSzmMEmmZZOTs_U,1277
|
|
74
86
|
nlbone/utils/time.py,sha256=6e0A4_hG1rYDCrWoOklEGVJstBf8j9XSSTT7VNV2K9Y,1272
|
|
75
|
-
nlbone-0.
|
|
76
|
-
nlbone-0.
|
|
77
|
-
nlbone-0.
|
|
78
|
-
nlbone-0.
|
|
79
|
-
nlbone-0.
|
|
87
|
+
nlbone-0.6.0.dist-info/METADATA,sha256=402hd-duWpvMKlnSwcgbbYLJQ0OQFgwKtWd9zlfkdB8,2194
|
|
88
|
+
nlbone-0.6.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
89
|
+
nlbone-0.6.0.dist-info/entry_points.txt,sha256=CpIL45t5nbhl1dGQPhfIIDfqqak3teK0SxPGBBr7YCk,59
|
|
90
|
+
nlbone-0.6.0.dist-info/licenses/LICENSE,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
91
|
+
nlbone-0.6.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|