nlbone 0.4.2__py3-none-any.whl → 0.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,40 @@
1
+ import threading
2
+ import time
3
+ from typing import Optional, Dict, Any
4
+
5
+ from nlbone.adapters.auth.keycloak import KeycloakAuthService
6
+
7
+
8
+ class ClientTokenProvider:
9
+ """Caches Keycloak client-credentials token and refreshes before expiry."""
10
+
11
+ def __init__(self, auth: KeycloakAuthService, *, skew_seconds: int = 30) -> None:
12
+ self._auth = auth
13
+ self._skew = skew_seconds
14
+ self._lock = threading.Lock()
15
+ self._token: Optional[str] = None # access_token
16
+ self._expires_at: float = 0.0 # epoch seconds
17
+
18
+ def _needs_refresh(self) -> bool:
19
+ return not self._token or time.time() >= (self._expires_at - self._skew)
20
+
21
+ def get_access_token(self) -> str:
22
+ """Return a valid access token; refresh if needed."""
23
+ if not self._needs_refresh():
24
+ return self._token
25
+
26
+ with self._lock:
27
+ if not self._needs_refresh():
28
+ return self._token
29
+
30
+ data: Dict[str, Any] = self._auth.get_client_token()
31
+ access_token = data.get("access_token")
32
+ if not access_token:
33
+ raise RuntimeError("Keycloak: missing access_token")
34
+ expires_in = int(data.get("expires_in", 60))
35
+ self._token = access_token
36
+ self._expires_at = time.time() + max(1, expires_in)
37
+ return self._token
38
+
39
+ def get_auth_header(self) -> str:
40
+ return f"Bearer {self.get_access_token()}"
File without changes
@@ -0,0 +1,180 @@
1
+ import asyncio
2
+ import json
3
+ import os
4
+ from typing import Optional, Iterable, Any, Mapping, Sequence, List
5
+
6
+ from redis.asyncio import Redis
7
+ from nlbone.core.ports.cache import AsyncCachePort
8
+
9
+
10
+ def _nsver_key(ns: str) -> str: return f"nsver:{ns}"
11
+ def _tag_key(tag: str) -> str: return f"tag:{tag}"
12
+
13
+ class AsyncRedisCache(AsyncCachePort):
14
+ def __init__(self, url: str, *, invalidate_channel: str | None = None):
15
+ self._r = Redis.from_url(url, decode_responses=False)
16
+ self._ch = invalidate_channel or os.getenv("NLBONE_REDIS_INVALIDATE_CHANNEL", "cache:invalidate")
17
+
18
+ @property
19
+ def redis(self) -> Redis:
20
+ return self._r
21
+
22
+ async def _current_ver(self, ns: str) -> int:
23
+ v = await self._r.get(_nsver_key(ns))
24
+ return int(v) if v else 1
25
+
26
+ async def _full_key(self, key: str) -> str:
27
+ try:
28
+ ns, rest = key.split(":", 1)
29
+ except ValueError:
30
+ ns, rest = "app", key
31
+ ver = await self._current_ver(ns)
32
+ return f"{ns}:{ver}:{rest}"
33
+
34
+ # -------- basic --------
35
+ async def get(self, key: str) -> Optional[bytes]:
36
+ fk = await self._full_key(key)
37
+ return await self._r.get(fk)
38
+
39
+ async def set(self, key: str, value: bytes, *, ttl: Optional[int] = None, tags: Optional[Iterable[str]] = None) -> None:
40
+ fk = await self._full_key(key)
41
+ if ttl is None:
42
+ await self._r.set(fk, value)
43
+ else:
44
+ await self._r.setex(fk, ttl, value)
45
+ if tags:
46
+ pipe = self._r.pipeline()
47
+ for t in tags:
48
+ pipe.sadd(_tag_key(t), fk)
49
+ await pipe.execute()
50
+
51
+ async def delete(self, key: str) -> None:
52
+ fk = await self._full_key(key)
53
+ await self._r.delete(fk)
54
+
55
+ async def exists(self, key: str) -> bool:
56
+ return (await self.get(key)) is not None
57
+
58
+ async def ttl(self, key: str) -> Optional[int]:
59
+ fk = await self._full_key(key)
60
+ t = await self._r.ttl(fk)
61
+ return None if t < 0 else int(t)
62
+
63
+ # -------- multi --------
64
+
65
+ async def mget(self, keys: Sequence[str]) -> list[Optional[bytes]]:
66
+ fks = [await self._full_key(k) for k in keys]
67
+ return await self._r.mget(fks)
68
+
69
+ async def mset(self, items: Mapping[str, bytes], *, ttl: Optional[int] = None,
70
+ tags: Optional[Iterable[str]] = None) -> None:
71
+ pipe = self._r.pipeline()
72
+ if ttl is None:
73
+ for k, v in items.items():
74
+ fk = await self._full_key(k)
75
+ pipe.set(fk, v)
76
+ else:
77
+ for k, v in items.items():
78
+ fk = await self._full_key(k)
79
+ pipe.setex(fk, ttl, v)
80
+ await pipe.execute()
81
+
82
+ if tags:
83
+ pipe = self._r.pipeline()
84
+ for t in tags:
85
+ for k in items.keys():
86
+ fk = await self._full_key(k)
87
+ pipe.sadd(_tag_key(t), fk)
88
+ await pipe.execute()
89
+
90
+ # -------- json --------
91
+
92
+ async def get_json(self, key: str) -> Optional[Any]:
93
+ b = await self.get(key)
94
+ return None if b is None else json.loads(b)
95
+
96
+ async def set_json(self, key: str, value: Any, *, ttl: Optional[int] = None,
97
+ tags: Optional[Iterable[str]] = None) -> None:
98
+ await self.set(key, json.dumps(value).encode("utf-8"), ttl=ttl, tags=tags)
99
+
100
+ # -------- invalidation --------
101
+
102
+ async def invalidate_tags(self, tags: Iterable[str]) -> int:
103
+ removed = 0
104
+ pipe = self._r.pipeline()
105
+ key_sets: list[tuple[str, set[bytes]]] = []
106
+ for t in tags:
107
+ tk = _tag_key(t)
108
+ members = await self._r.smembers(tk)
109
+ if members:
110
+ pipe.delete(*members)
111
+ pipe.delete(tk)
112
+ key_sets.append((tk, members))
113
+ removed += len(members or [])
114
+ await pipe.execute()
115
+
116
+ # publish notification for other processes
117
+ try:
118
+ payload = json.dumps({"tags": list(tags)}).encode("utf-8")
119
+ await self._r.publish(self._ch, payload)
120
+ except Exception:
121
+ pass
122
+
123
+ return removed
124
+
125
+ async def bump_namespace(self, namespace: str) -> int:
126
+ v = await self._r.incr(_nsver_key(namespace))
127
+ # اطلاع‌رسانی اختیاری
128
+ try:
129
+ await self._r.publish(self._ch, json.dumps({"ns_bump": namespace}).encode("utf-8"))
130
+ except Exception:
131
+ pass
132
+ return int(v)
133
+
134
+ async def clear_namespace(self, namespace: str) -> int:
135
+ cnt = 0
136
+ cursor = 0
137
+ pattern = f"{namespace}:*"
138
+ while True:
139
+ cursor, keys = await self._r.scan(cursor=cursor, match=pattern, count=1000)
140
+ if keys:
141
+ await self._r.delete(*keys)
142
+ cnt += len(keys)
143
+ if cursor == 0:
144
+ break
145
+ try:
146
+ await self._r.publish(self._ch, json.dumps({"ns_clear": namespace}).encode("utf-8"))
147
+ except Exception:
148
+ pass
149
+ return cnt
150
+
151
+ # -------- dogpile-safe get_or_set --------
152
+
153
+ async def get_or_set(self, key: str, producer, *, ttl: int, tags=None) -> bytes:
154
+ fk = await self._full_key(key)
155
+ val = await self._r.get(fk)
156
+ if val is not None:
157
+ return val
158
+
159
+ lock_key = f"lock:{fk}"
160
+ got = await self._r.set(lock_key, b"1", ex=10, nx=True)
161
+ if got:
162
+ try:
163
+ produced = await producer() if asyncio.iscoroutinefunction(producer) else producer()
164
+ if isinstance(produced, str):
165
+ produced = produced.encode("utf-8")
166
+ await self.set(key, produced, ttl=ttl, tags=tags)
167
+ return produced
168
+ finally:
169
+ await self._r.delete(lock_key)
170
+
171
+ await asyncio.sleep(0.05)
172
+ val2 = await self._r.get(fk)
173
+ if val2 is not None:
174
+ return val2
175
+ # fallback
176
+ produced = await producer() if asyncio.iscoroutinefunction(producer) else producer()
177
+ if isinstance(produced, str):
178
+ produced = produced.encode("utf-8")
179
+ await self.set(key, produced, ttl=ttl, tags=tags)
180
+ return produced
@@ -0,0 +1,104 @@
1
+ import json, threading, time
2
+ from typing import Optional, Iterable, Any, Mapping, Sequence, Dict, Set
3
+ from nlbone.core.ports.cache import CachePort
4
+
5
+
6
+ class InMemoryCache(CachePort):
7
+ def __init__(self):
8
+ self._data: Dict[str, tuple[bytes, Optional[float]]] = {}
9
+ self._tags: Dict[str, Set[str]] = {}
10
+ self._ns_ver: Dict[str, int] = {}
11
+ self._lock = threading.RLock()
12
+
13
+ def _expired(self, key: str) -> bool:
14
+ v = self._data.get(key)
15
+ if not v: return True
16
+ _, exp = v
17
+ return exp is not None and time.time() > exp
18
+
19
+ def _gc(self, key: str) -> None:
20
+ if self._expired(key):
21
+ self._data.pop(key, None)
22
+
23
+ def _attach_tags(self, key: str, tags: Optional[Iterable[str]]) -> None:
24
+ if not tags: return
25
+ for t in tags:
26
+ self._tags.setdefault(t, set()).add(key)
27
+
28
+ def get(self, key: str) -> Optional[bytes]:
29
+ with self._lock:
30
+ self._gc(key)
31
+ v = self._data.get(key)
32
+ return v[0] if v else None
33
+
34
+ def set(self, key: str, value: bytes, *, ttl: Optional[int] = None, tags: Optional[Iterable[str]] = None) -> None:
35
+ with self._lock:
36
+ exp = None if ttl is None else time.time() + ttl
37
+ self._data[key] = (value, exp)
38
+ self._attach_tags(key, tags)
39
+
40
+ def delete(self, key: str) -> None:
41
+ with self._lock:
42
+ self._data.pop(key, None)
43
+ for s in self._tags.values():
44
+ s.discard(key)
45
+
46
+ def exists(self, key: str) -> bool:
47
+ return self.get(key) is not None
48
+
49
+ def ttl(self, key: str) -> Optional[int]:
50
+ with self._lock:
51
+ self._gc(key)
52
+ v = self._data.get(key)
53
+ if not v: return None
54
+ _, exp = v
55
+ if exp is None: return None
56
+ rem = int(exp - time.time())
57
+ return rem if rem >= 0 else 0
58
+
59
+ def mget(self, keys: Sequence[str]) -> list[Optional[bytes]]:
60
+ return [self.get(k) for k in keys]
61
+
62
+ def mset(self, items: Mapping[str, bytes], *, ttl: Optional[int] = None,
63
+ tags: Optional[Iterable[str]] = None) -> None:
64
+ for k, v in items.items():
65
+ self.set(k, v, ttl=ttl, tags=tags)
66
+
67
+ def get_json(self, key: str) -> Optional[Any]:
68
+ b = self.get(key)
69
+ return None if b is None else json.loads(b)
70
+
71
+ def set_json(self, key: str, value: Any, *, ttl: Optional[int] = None,
72
+ tags: Optional[Iterable[str]] = None) -> None:
73
+ self.set(key, json.dumps(value).encode("utf-8"), ttl=ttl, tags=tags)
74
+
75
+ def invalidate_tags(self, tags: Iterable[str]) -> int:
76
+ removed = 0
77
+ with self._lock:
78
+ for t in tags:
79
+ keys = self._tags.pop(t, set())
80
+ for k in keys:
81
+ if k in self._data:
82
+ self._data.pop(k, None)
83
+ removed += 1
84
+ return removed
85
+
86
+ def bump_namespace(self, namespace: str) -> int:
87
+ with self._lock:
88
+ self._ns_ver[namespace] = self._ns_ver.get(namespace, 0) + 1
89
+ return self._ns_ver[namespace]
90
+
91
+ def clear_namespace(self, namespace: str) -> int:
92
+ with self._lock:
93
+ keys = [k for k in self._data.keys() if k.startswith(namespace + ":")]
94
+ for k in keys: self.delete(k)
95
+ return len(keys)
96
+
97
+ def get_or_set(self, key: str, producer, *, ttl: int, tags=None) -> bytes:
98
+ with self._lock:
99
+ b = self.get(key)
100
+ if b is not None:
101
+ return b
102
+ val: bytes = producer()
103
+ self.set(key, val, ttl=ttl, tags=tags)
104
+ return val
@@ -0,0 +1,42 @@
1
+ from __future__ import annotations
2
+ import asyncio
3
+ import json
4
+ from typing import Awaitable, Callable, Optional
5
+ from redis.asyncio import Redis
6
+
7
+ async def run_cache_invalidation_listener(
8
+ redis: Redis,
9
+ channel: str = "cache:invalidate",
10
+ *,
11
+ on_tags: Optional[Callable[[list[str]], Awaitable[None]]] = None,
12
+ on_ns_bump: Optional[Callable[[str], Awaitable[None]]] = None,
13
+ on_ns_clear: Optional[Callable[[str], Awaitable[None]]] = None,
14
+ stop_event: Optional[asyncio.Event] = None,
15
+ ) -> None:
16
+ pubsub = redis.pubsub()
17
+ await pubsub.subscribe(channel)
18
+ try:
19
+ while True:
20
+ if stop_event and stop_event.is_set():
21
+ break
22
+ message = await pubsub.get_message(ignore_subscribe_messages=True, timeout=1.0)
23
+ if not message:
24
+ await asyncio.sleep(0.05)
25
+ continue
26
+ try:
27
+ data = json.loads(message["data"])
28
+ except Exception:
29
+ continue
30
+
31
+ if "tags" in data and on_tags:
32
+ tags = data.get("tags") or []
33
+ await on_tags(list(tags))
34
+ if "ns_bump" in data and on_ns_bump:
35
+ await on_ns_bump(str(data["ns_bump"]))
36
+ if "ns_clear" in data and on_ns_clear:
37
+ await on_ns_clear(str(data["ns_clear"]))
38
+ finally:
39
+ try:
40
+ await pubsub.unsubscribe(channel)
41
+ finally:
42
+ await pubsub.close()
@@ -0,0 +1,136 @@
1
+ from __future__ import annotations
2
+ import json, os, time
3
+ from typing import Optional, Iterable, Any, Mapping, Sequence, List, Set
4
+ import redis # redis-py (sync)
5
+ from nlbone.core.ports.cache import CachePort
6
+
7
+
8
+
9
+ def _nsver_key(ns: str) -> str: return f"nsver:{ns}"
10
+ def _tag_key(tag: str) -> str: return f"tag:{tag}"
11
+
12
+ class RedisCache(CachePort):
13
+ def __init__(self, url: str):
14
+ self.r = redis.Redis.from_url(url, decode_responses=False)
15
+
16
+ def _current_ver(self, ns: str) -> int:
17
+ v = self.r.get(_nsver_key(ns))
18
+ return int(v) if v else 1
19
+
20
+ def _full_key(self, key: str) -> str:
21
+ try:
22
+ ns, rest = key.split(":", 1)
23
+ except ValueError:
24
+ ns, rest = "app", key
25
+ ver = self._current_ver(ns)
26
+ return f"{ns}:{ver}:{rest}"
27
+
28
+ def get(self, key: str) -> Optional[bytes]:
29
+ fk = self._full_key(key)
30
+ return self.r.get(fk)
31
+
32
+ def set(self, key: str, value: bytes, *, ttl: Optional[int] = None, tags: Optional[Iterable[str]] = None) -> None:
33
+ fk = self._full_key(key)
34
+ if ttl is None:
35
+ self.r.set(fk, value)
36
+ else:
37
+ self.r.setex(fk, ttl, value)
38
+ if tags:
39
+ pipe = self.r.pipeline()
40
+ for t in tags:
41
+ pipe.sadd(_tag_key(t), fk)
42
+ pipe.execute()
43
+
44
+ def delete(self, key: str) -> None:
45
+ fk = self._full_key(key)
46
+ self.r.delete(fk)
47
+
48
+ def exists(self, key: str) -> bool:
49
+ return bool(self.get(key))
50
+
51
+ def ttl(self, key: str) -> Optional[int]:
52
+ fk = self._full_key(key)
53
+ t = self.r.ttl(fk)
54
+ return None if t < 0 else int(t)
55
+
56
+ def mget(self, keys: Sequence[str]) -> list[Optional[bytes]]:
57
+ fks = [self._full_key(k) for k in keys]
58
+ return self.r.mget(fks)
59
+
60
+ def mset(self, items: Mapping[str, bytes], *, ttl: Optional[int] = None, tags: Optional[Iterable[str]] = None) -> None:
61
+ pipe = self.r.pipeline()
62
+ if ttl is None:
63
+ for k, v in items.items():
64
+ pipe.set(self._full_key(k), v)
65
+ else:
66
+ for k, v in items.items():
67
+ pipe.setex(self._full_key(k), ttl, v)
68
+ pipe.execute()
69
+ if tags:
70
+ pipe = self.r.pipeline()
71
+ for t in tags:
72
+ for k in items.keys():
73
+ pipe.sadd(_tag_key(t), self._full_key(k))
74
+ pipe.execute()
75
+
76
+ def get_json(self, key: str) -> Optional[Any]:
77
+ b = self.get(key)
78
+ return None if b is None else json.loads(b)
79
+
80
+ def set_json(self, key: str, value: Any, *, ttl: Optional[int] = None, tags: Optional[Iterable[str]] = None) -> None:
81
+ self.set(key, json.dumps(value).encode("utf-8"), ttl=ttl, tags=tags)
82
+
83
+ def invalidate_tags(self, tags: Iterable[str]) -> int:
84
+ removed = 0
85
+ pipe = self.r.pipeline()
86
+ for t in tags:
87
+ tk = _tag_key(t)
88
+ keys = self.r.smembers(tk)
89
+ if keys:
90
+ pipe.delete(*keys)
91
+ pipe.delete(tk)
92
+ removed += len(keys or [])
93
+ pipe.execute()
94
+ try:
95
+ ch = os.getenv("NLBONE_REDIS_INVALIDATE_CHANNEL", "cache:invalidate")
96
+ self.r.publish(ch, json.dumps({"tags": list(tags)}).encode("utf-8"))
97
+ except Exception:
98
+ pass
99
+ return removed
100
+
101
+ def bump_namespace(self, namespace: str) -> int:
102
+ v = self.r.incr(_nsver_key(namespace))
103
+ return int(v)
104
+
105
+ def clear_namespace(self, namespace: str) -> int:
106
+ cnt = 0
107
+ cursor = 0
108
+ pattern = f"{namespace}:*"
109
+ while True:
110
+ cursor, keys = self.r.scan(cursor=cursor, match=pattern, count=1000)
111
+ if keys:
112
+ self.r.delete(*keys); cnt += len(keys)
113
+ if cursor == 0: break
114
+ return cnt
115
+
116
+ def get_or_set(self, key: str, producer, *, ttl: int, tags=None) -> bytes:
117
+ fk = self._full_key(key)
118
+ val = self.r.get(fk)
119
+ if val is not None:
120
+ return val
121
+ lock_key = f"lock:{fk}"
122
+ got = self.r.set(lock_key, b"1", nx=True, ex=10)
123
+ if got:
124
+ try:
125
+ produced: bytes = producer()
126
+ self.set(key, produced, ttl=ttl, tags=tags)
127
+ return produced
128
+ finally:
129
+ self.r.delete(lock_key)
130
+ time.sleep(0.05)
131
+ val2 = self.r.get(fk)
132
+ if val2 is not None:
133
+ return val2
134
+ produced: bytes = producer()
135
+ self.set(key, produced, ttl=ttl, tags=tags)
136
+ return produced
@@ -22,6 +22,8 @@ def _get_ops_for(obj) -> set[str]:
22
22
 
23
23
 
24
24
  def _is_audit_disabled(obj) -> bool:
25
+ if not DEFAULT_ENABLED:
26
+ return True
25
27
  if getattr(obj, "__audit_disable__", False):
26
28
  return True
27
29
  if hasattr(obj, "__audit_enable__") and not getattr(obj, "__audit_enable__"):
@@ -7,6 +7,7 @@ from urllib.parse import urlparse, urlunparse
7
7
  import httpx
8
8
  import requests
9
9
 
10
+ from nlbone.adapters.auth.token_provider import ClientTokenProvider
10
11
  from nlbone.config.settings import get_settings
11
12
  from nlbone.core.ports.files import FileServicePort
12
13
 
@@ -30,7 +31,7 @@ def _auth_headers(token: str | None) -> dict[str, str]:
30
31
 
31
32
 
32
33
  def _build_list_query(
33
- limit: int, offset: int, filters: dict[str, Any] | None, sort: list[tuple[str, str]] | None
34
+ limit: int, offset: int, filters: dict[str, Any] | None, sort: list[tuple[str, str]] | None
34
35
  ) -> dict[str, Any]:
35
36
  q: dict[str, Any] = {"limit": limit, "offset": offset}
36
37
  if filters:
@@ -58,21 +59,23 @@ def _normalize_https_base(url: str) -> str:
58
59
 
59
60
  class UploadchiClient(FileServicePort):
60
61
  def __init__(
61
- self,
62
- base_url: Optional[str] = None,
63
- timeout_seconds: Optional[float] = None,
64
- client: httpx.Client | None = None,
62
+ self,
63
+ token_provider: ClientTokenProvider | None = None,
64
+ base_url: Optional[str] = None,
65
+ timeout_seconds: Optional[float] = None,
66
+ client: httpx.Client | None = None,
65
67
  ) -> None:
66
68
  s = get_settings()
67
69
  self._base_url = _normalize_https_base(base_url or str(s.UPLOADCHI_BASE_URL))
68
70
  self._timeout = timeout_seconds or float(s.HTTP_TIMEOUT_SECONDS)
69
71
  self._client = client or requests.session()
72
+ self._token_provider = token_provider
70
73
 
71
74
  def close(self) -> None:
72
75
  self._client.close()
73
76
 
74
77
  def upload_file(
75
- self, file_bytes: bytes, filename: str, params: dict[str, Any] | None = None, token: str | None = None
78
+ self, file_bytes: bytes, filename: str, params: dict[str, Any] | None = None, token: str | None = None
76
79
  ) -> dict:
77
80
  tok = _resolve_token(token)
78
81
  files = {"file": (filename, file_bytes)}
@@ -82,23 +85,35 @@ class UploadchiClient(FileServicePort):
82
85
  raise UploadchiError(r.status_code, r.text)
83
86
  return r.json()
84
87
 
85
- def commit_file(self, file_id: int, client_id: str, token: str | None = None) -> None:
88
+ def commit_file(self, file_id: str, token: str | None = None) -> None:
89
+ if not token and not self._token_provider:
90
+ raise UploadchiError(detail="token_provider is not provided", status=400)
86
91
  tok = _resolve_token(token)
87
92
  r = self._client.post(
88
93
  f"{self._base_url}/{file_id}/commit",
89
- headers=_auth_headers(tok),
90
- params={"client_id": client_id} if client_id else None,
94
+ headers=_auth_headers(tok or self._token_provider.get_access_token()),
95
+ )
96
+ if r.status_code not in (204, 200):
97
+ raise UploadchiError(r.status_code, r.text)
98
+
99
+ def rollback(self, file_id: str, token: str | None = None) -> None:
100
+ if not token and not self._token_provider:
101
+ raise UploadchiError(detail="token_provider is not provided", status=400)
102
+ tok = _resolve_token(token)
103
+ r = self._client.post(
104
+ f"{self._base_url}/{file_id}/rollback",
105
+ headers=_auth_headers(tok or self._token_provider.get_access_token()),
91
106
  )
92
107
  if r.status_code not in (204, 200):
93
108
  raise UploadchiError(r.status_code, r.text)
94
109
 
95
110
  def list_files(
96
- self,
97
- limit: int = 10,
98
- offset: int = 0,
99
- filters: dict[str, Any] | None = None,
100
- sort: list[tuple[str, str]] | None = None,
101
- token: str | None = None,
111
+ self,
112
+ limit: int = 10,
113
+ offset: int = 0,
114
+ filters: dict[str, Any] | None = None,
115
+ sort: list[tuple[str, str]] | None = None,
116
+ token: str | None = None,
102
117
  ) -> dict:
103
118
  tok = _resolve_token(token)
104
119
  q = _build_list_query(limit, offset, filters, sort)
@@ -107,14 +122,14 @@ class UploadchiClient(FileServicePort):
107
122
  raise UploadchiError(r.status_code, r.text)
108
123
  return r.json()
109
124
 
110
- def get_file(self, file_id: int, token: str | None = None) -> dict:
125
+ def get_file(self, file_id: str, token: str | None = None) -> dict:
111
126
  tok = _resolve_token(token)
112
127
  r = self._client.get(f"{self._base_url}/{file_id}", headers=_auth_headers(tok))
113
128
  if r.status_code >= 400:
114
129
  raise UploadchiError(r.status_code, r.text)
115
130
  return r.json()
116
131
 
117
- def download_file(self, file_id: int, token: str | None = None) -> tuple[bytes, str, str]:
132
+ def download_file(self, file_id: str, token: str | None = None) -> tuple[bytes, str, str]:
118
133
  tok = _resolve_token(token)
119
134
  r = self._client.get(f"{self._base_url}/{file_id}/download", headers=_auth_headers(tok))
120
135
  if r.status_code >= 400:
@@ -123,7 +138,7 @@ class UploadchiClient(FileServicePort):
123
138
  media_type = r.headers.get("content-type", "application/octet-stream")
124
139
  return r.content, filename, media_type
125
140
 
126
- def delete_file(self, file_id: int, token: str | None = None) -> None:
141
+ def delete_file(self, file_id: str, token: str | None = None) -> None:
127
142
  tok = _resolve_token(token)
128
143
  r = self._client.delete(f"{self._base_url}/{file_id}", headers=_auth_headers(tok))
129
144
  if r.status_code not in (204, 200):
@@ -8,14 +8,16 @@ from nlbone.config.settings import get_settings
8
8
  from nlbone.core.ports.files import AsyncFileServicePort
9
9
 
10
10
  from .uploadchi import UploadchiError, _auth_headers, _build_list_query, _filename_from_cd, _resolve_token
11
+ from ..auth.token_provider import ClientTokenProvider
11
12
 
12
13
 
13
14
  class UploadchiAsyncClient(AsyncFileServicePort):
14
15
  def __init__(
15
- self,
16
- base_url: Optional[str] = None,
17
- timeout_seconds: Optional[float] = None,
18
- client: httpx.AsyncClient | None = None,
16
+ self,
17
+ token_provider: ClientTokenProvider | None = None,
18
+ base_url: Optional[str] = None,
19
+ timeout_seconds: Optional[float] = None,
20
+ client: httpx.AsyncClient | None = None,
19
21
  ) -> None:
20
22
  s = get_settings()
21
23
  self._base_url = base_url or str(s.UPLOADCHI_BASE_URL)
@@ -23,12 +25,13 @@ class UploadchiAsyncClient(AsyncFileServicePort):
23
25
  self._client = client or httpx.AsyncClient(
24
26
  base_url=self._base_url, timeout=self._timeout, follow_redirects=True
25
27
  )
28
+ self._token_provider = token_provider
26
29
 
27
30
  async def aclose(self) -> None:
28
31
  await self._client.aclose()
29
32
 
30
33
  async def upload_file(
31
- self, file_bytes: bytes, filename: str, params: dict[str, Any] | None = None, token: str | None = None
34
+ self, file_bytes: bytes, filename: str, params: dict[str, Any] | None = None, token: str | None = None
32
35
  ) -> dict:
33
36
  tok = _resolve_token(token)
34
37
  files = {"file": (filename, file_bytes)}
@@ -38,21 +41,33 @@ class UploadchiAsyncClient(AsyncFileServicePort):
38
41
  raise UploadchiError(r.status_code, await r.aread())
39
42
  return r.json()
40
43
 
41
- async def commit_file(self, file_id: int, client_id: str, token: str | None = None) -> None:
44
+ async def commit_file(self, file_id: str, token: str | None = None) -> None:
45
+ if not token and not self._token_provider:
46
+ raise UploadchiError(detail="token_provider is not provided", status=400)
42
47
  tok = _resolve_token(token)
43
48
  r = await self._client.post(
44
- f"/{file_id}/commit", headers=_auth_headers(tok), params={"client_id": client_id} if client_id else None
49
+ f"/{file_id}/commit", headers=_auth_headers(tok or self._token_provider.get_access_token())
50
+ )
51
+ if r.status_code not in (204, 200):
52
+ raise UploadchiError(r.status_code, await r.aread())
53
+
54
+ async def rollback(self, file_id: str, token: str | None = None) -> None:
55
+ if not token and not self._token_provider:
56
+ raise UploadchiError(detail="token_provider is not provided", status=400)
57
+ tok = _resolve_token(token)
58
+ r = await self._client.post(
59
+ f"/{file_id}/rollback", headers=_auth_headers(tok or self._token_provider.get_access_token())
45
60
  )
46
61
  if r.status_code not in (204, 200):
47
62
  raise UploadchiError(r.status_code, await r.aread())
48
63
 
49
64
  async def list_files(
50
- self,
51
- limit: int = 10,
52
- offset: int = 0,
53
- filters: dict[str, Any] | None = None,
54
- sort: list[tuple[str, str]] | None = None,
55
- token: str | None = None,
65
+ self,
66
+ limit: int = 10,
67
+ offset: int = 0,
68
+ filters: dict[str, Any] | None = None,
69
+ sort: list[tuple[str, str]] | None = None,
70
+ token: str | None = None,
56
71
  ) -> dict:
57
72
  tok = _resolve_token(token)
58
73
  q = _build_list_query(limit, offset, filters, sort)
@@ -61,14 +76,14 @@ class UploadchiAsyncClient(AsyncFileServicePort):
61
76
  raise UploadchiError(r.status_code, await r.aread())
62
77
  return r.json()
63
78
 
64
- async def get_file(self, file_id: int, token: str | None = None) -> dict:
79
+ async def get_file(self, file_id: str, token: str | None = None) -> dict:
65
80
  tok = _resolve_token(token)
66
81
  r = await self._client.get(f"/{file_id}", headers=_auth_headers(tok))
67
82
  if r.status_code >= 400:
68
83
  raise UploadchiError(r.status_code, await r.aread())
69
84
  return r.json()
70
85
 
71
- async def download_file(self, file_id: int, token: str | None = None) -> tuple[AsyncIterator[bytes], str, str]:
86
+ async def download_file(self, file_id: str, token: str | None = None) -> tuple[AsyncIterator[bytes], str, str]:
72
87
  tok = _resolve_token(token)
73
88
  r = await self._client.get(f"/{file_id}/download", headers=_auth_headers(tok), stream=True)
74
89
  if r.status_code >= 400:
@@ -86,7 +101,7 @@ class UploadchiAsyncClient(AsyncFileServicePort):
86
101
 
87
102
  return _aiter(), filename, media_type
88
103
 
89
- async def delete_file(self, file_id: int, token: str | None = None) -> None:
104
+ async def delete_file(self, file_id: str, token: str | None = None) -> None:
90
105
  tok = _resolve_token(token)
91
106
  r = await self._client.delete(f"/{file_id}", headers=_auth_headers(tok))
92
107
  if r.status_code not in (204, 200):
nlbone/config/settings.py CHANGED
@@ -65,6 +65,8 @@ class Settings(BaseSettings):
65
65
  # Messaging / Cache
66
66
  # ---------------------------
67
67
  REDIS_URL: str = Field(default="redis://localhost:6379/0")
68
+ CACHE_BACKEND: Literal["memory", "redis"] = Field(default="memory")
69
+ CACHE_DEFAULT_TTL_S: int = Field(default=300)
68
70
 
69
71
  # --- Event bus / Outbox ---
70
72
  EVENT_BUS_BACKEND: Literal["inmemory"] = Field(default="inmemory")
nlbone/container.py CHANGED
@@ -5,12 +5,17 @@ from typing import Any, Mapping, Optional
5
5
  from dependency_injector import containers, providers
6
6
 
7
7
  from nlbone.adapters.auth.keycloak import KeycloakAuthService
8
+ from nlbone.adapters.auth.token_provider import ClientTokenProvider
9
+ from nlbone.adapters.cache.async_redis import AsyncRedisCache
10
+ from nlbone.adapters.cache.memory import InMemoryCache
11
+ from nlbone.adapters.cache.redis import RedisCache
8
12
  from nlbone.adapters.db.postgres import AsyncSqlAlchemyUnitOfWork, SqlAlchemyUnitOfWork
9
13
  from nlbone.adapters.db.postgres.engine import get_async_session_factory, get_sync_session_factory
10
14
  from nlbone.adapters.http_clients.uploadchi import UploadchiClient
11
15
  from nlbone.adapters.http_clients.uploadchi_async import UploadchiAsyncClient
12
16
  from nlbone.adapters.messaging import InMemoryEventBus
13
17
  from nlbone.core.ports import EventBusPort
18
+ from nlbone.core.ports.cache import CachePort, AsyncCachePort
14
19
  from nlbone.core.ports.files import AsyncFileServicePort, FileServicePort
15
20
 
16
21
 
@@ -29,8 +34,23 @@ class Container(containers.DeclarativeContainer):
29
34
 
30
35
  # --- Services ---
31
36
  auth: providers.Singleton[KeycloakAuthService] = providers.Singleton(KeycloakAuthService, settings=config)
32
- file_service: providers.Singleton[FileServicePort] = providers.Singleton(UploadchiClient)
33
- afiles_service: providers.Singleton[AsyncFileServicePort] = providers.Singleton(UploadchiAsyncClient)
37
+ token_provider = providers.Singleton(ClientTokenProvider, auth=auth, skew_seconds=30)
38
+ file_service: providers.Singleton[FileServicePort] = providers.Singleton(UploadchiClient,
39
+ token_provider=token_provider)
40
+ afiles_service: providers.Singleton[AsyncFileServicePort] = providers.Singleton(UploadchiAsyncClient,
41
+ token_provider=token_provider)
42
+
43
+ cache: providers.Singleton[CachePort] = providers.Selector(
44
+ config.CACHE_BACKEND,
45
+ memory=providers.Singleton(InMemoryCache),
46
+ redis=providers.Singleton(RedisCache, url=config.REDIS_URL),
47
+ )
48
+
49
+ async_cache: providers.Singleton[AsyncCachePort] = providers.Selector(
50
+ config.CACHE_BACKEND,
51
+ memory=providers.Singleton(InMemoryCache),
52
+ redis=providers.Singleton(AsyncRedisCache, url=config.REDIS_URL),
53
+ )
34
54
 
35
55
 
36
56
  def create_container(settings: Optional[Any] = None) -> Container:
@@ -0,0 +1,37 @@
1
+ from typing import Protocol, Optional, Iterable, Any, Mapping, Sequence, Tuple, TypeVar, Callable
2
+
3
+ T = TypeVar("T")
4
+
5
+ class CachePort(Protocol):
6
+ def get(self, key: str) -> Optional[bytes]: ...
7
+ def set(self, key: str, value: bytes, *, ttl: Optional[int] = None, tags: Optional[Iterable[str]] = None) -> None: ...
8
+ def delete(self, key: str) -> None: ...
9
+ def exists(self, key: str) -> bool: ...
10
+ def ttl(self, key: str) -> Optional[int]: ...
11
+
12
+ def mget(self, keys: Sequence[str]) -> list[Optional[bytes]]: ...
13
+ def mset(self, items: Mapping[str, bytes], *, ttl: Optional[int] = None, tags: Optional[Iterable[str]] = None) -> None: ...
14
+
15
+ def get_json(self, key: str) -> Optional[Any]: ...
16
+ def set_json(self, key: str, value: Any, *, ttl: Optional[int] = None, tags: Optional[Iterable[str]] = None) -> None: ...
17
+
18
+ def invalidate_tags(self, tags: Iterable[str]) -> int: ...
19
+ def bump_namespace(self, namespace: str) -> int: ... # versioned keys
20
+ def clear_namespace(self, namespace: str) -> int: ...
21
+
22
+ def get_or_set(self, key: str, producer: Callable[[], bytes], *, ttl: int, tags: Optional[Iterable[str]] = None) -> bytes: ...
23
+
24
+ class AsyncCachePort(Protocol):
25
+ async def get(self, key: str) -> Optional[bytes]: ...
26
+ async def set(self, key: str, value: bytes, *, ttl: Optional[int] = None, tags: Optional[Iterable[str]] = None) -> None: ...
27
+ async def delete(self, key: str) -> None: ...
28
+ async def exists(self, key: str) -> bool: ...
29
+ async def ttl(self, key: str) -> Optional[int]: ...
30
+ async def mget(self, keys: Sequence[str]) -> list[Optional[bytes]]: ...
31
+ async def mset(self, items: Mapping[str, bytes], *, ttl: Optional[int] = None, tags: Optional[Iterable[str]] = None) -> None: ...
32
+ async def get_json(self, key: str) -> Optional[Any]: ...
33
+ async def set_json(self, key: str, value: Any, *, ttl: Optional[int] = None, tags: Optional[Iterable[str]] = None) -> None: ...
34
+ async def invalidate_tags(self, tags: Iterable[str]) -> int: ...
35
+ async def bump_namespace(self, namespace: str) -> int: ...
36
+ async def clear_namespace(self, namespace: str) -> int: ...
37
+ async def get_or_set(self, key: str, producer, *, ttl: int, tags: Optional[Iterable[str]] = None) -> bytes: ...
@@ -8,7 +8,8 @@ class FileServicePort(Protocol):
8
8
  def upload_file(
9
9
  self, file_bytes: bytes, filename: str, params: dict[str, Any] | None = None, token: str | None = None
10
10
  ) -> dict: ...
11
- def commit_file(self, file_id: int, client_id: str, token: str | None = None) -> None: ...
11
+ def commit_file(self, file_id: str, token: str | None = None) -> None: ...
12
+ def rollback(self, file_id: str, token: str | None = None) -> None: ...
12
13
  def list_files(
13
14
  self,
14
15
  limit: int = 10,
@@ -17,9 +18,9 @@ class FileServicePort(Protocol):
17
18
  sort: list[tuple[str, str]] | None = None,
18
19
  token: str | None = None,
19
20
  ) -> dict: ...
20
- def get_file(self, file_id: int, token: str | None = None) -> dict: ...
21
- def download_file(self, file_id: int, token: str | None = None) -> tuple[bytes, str, str]: ...
22
- def delete_file(self, file_id: int, token: str | None = None) -> None: ...
21
+ def get_file(self, file_id: str, token: str | None = None) -> dict: ...
22
+ def download_file(self, file_id: str, token: str | None = None) -> tuple[bytes, str, str]: ...
23
+ def delete_file(self, file_id: str, token: str | None = None) -> None: ...
23
24
 
24
25
 
25
26
  @runtime_checkable
@@ -27,7 +28,8 @@ class AsyncFileServicePort(Protocol):
27
28
  async def upload_file(
28
29
  self, file_bytes: bytes, filename: str, params: dict[str, Any] | None = None, token: str | None = None
29
30
  ) -> dict: ...
30
- async def commit_file(self, file_id: int, client_id: str, token: str | None = None) -> None: ...
31
+ async def commit_file(self, file_id: str, token: str | None = None) -> None: ...
32
+ async def rollback(self, file_id: str, token: str | None = None) -> None: ...
31
33
  async def list_files(
32
34
  self,
33
35
  limit: int = 10,
@@ -36,6 +38,6 @@ class AsyncFileServicePort(Protocol):
36
38
  sort: list[tuple[str, str]] | None = None,
37
39
  token: str | None = None,
38
40
  ) -> dict: ...
39
- async def get_file(self, file_id: int, token: str | None = None) -> dict: ...
40
- async def download_file(self, file_id: int, token: str | None = None) -> tuple[AsyncIterator[bytes], str, str]: ...
41
- async def delete_file(self, file_id: int, token: str | None = None) -> None: ...
41
+ async def get_file(self, file_id: str, token: str | None = None) -> dict: ...
42
+ async def download_file(self, file_id: str, token: str | None = None) -> tuple[AsyncIterator[bytes], str, str]: ...
43
+ async def delete_file(self, file_id: str, token: str | None = None) -> None: ...
nlbone/utils/cache.py ADDED
@@ -0,0 +1,196 @@
1
+ import asyncio
2
+ import inspect
3
+ import json
4
+ from typing import Any, Callable, Iterable, Optional
5
+
6
+ from makefun import wraps as mf_wraps
7
+ from nlbone.utils.cache_registry import get_cache
8
+
9
+ try:
10
+ from pydantic import BaseModel # v1/v2
11
+ except Exception: # pragma: no cover
12
+ class BaseModel: # minimal fallback
13
+ pass
14
+
15
+
16
+ # -------- helpers --------
17
+
18
+ def _bind(func: Callable, args, kwargs):
19
+ sig = inspect.signature(func)
20
+ bound = sig.bind_partial(*args, **kwargs)
21
+ bound.apply_defaults()
22
+ return bound
23
+
24
+
25
+ def _key_from_template(
26
+ tpl: Optional[str],
27
+ func: Callable,
28
+ args,
29
+ kwargs,
30
+ ) -> str:
31
+ """Format key template with bound arguments or build a stable default."""
32
+ bound = _bind(func, args, kwargs)
33
+ if tpl:
34
+ return tpl.format(**bound.arguments)
35
+
36
+ # Default stable key: module:qualname:sha of args
37
+ payload = json.dumps(bound.arguments, sort_keys=True, default=str)
38
+ return f"{func.__module__}:{func.__qualname__}:{hash(payload)}"
39
+
40
+
41
+ def _format_tags(
42
+ tag_tpls: Optional[Iterable[str]],
43
+ func: Callable,
44
+ args,
45
+ kwargs,
46
+ ) -> list[str] | None:
47
+ if not tag_tpls:
48
+ return None
49
+ bound = _bind(func, args, kwargs)
50
+ return [t.format(**bound.arguments) for t in tag_tpls]
51
+
52
+
53
+ def default_serialize(val: Any) -> bytes:
54
+ """Serialize BaseModel (v2/v1) or JSON-serializable data to bytes."""
55
+ if isinstance(val, BaseModel):
56
+ if hasattr(val, "model_dump_json"): # pydantic v2
57
+ return val.model_dump_json().encode("utf-8")
58
+ if hasattr(val, "json"): # pydantic v1
59
+ return val.json().encode("utf-8")
60
+ return json.dumps(val, default=str).encode("utf-8")
61
+
62
+
63
+ def default_deserialize(b: bytes) -> Any:
64
+ return json.loads(b)
65
+
66
+
67
+ def _is_async_method(obj: Any, name: str) -> bool:
68
+ meth = getattr(obj, name, None)
69
+ return asyncio.iscoroutinefunction(meth)
70
+
71
+
72
+ def _run_maybe_async(func: Callable, *args, **kwargs):
73
+ """Call a function that may be async from sync context."""
74
+ result = func(*args, **kwargs)
75
+ if inspect.isawaitable(result):
76
+ return asyncio.run(result)
77
+ return result
78
+
79
+
80
+ # -------- cache decorators --------
81
+
82
+ def cached(
83
+ *,
84
+ ttl: int,
85
+ key: str | None = None,
86
+ tags: Iterable[str] | None = None,
87
+ serializer: Callable[[Any], bytes] = default_serialize,
88
+ deserializer: Callable[[bytes], Any] = default_deserialize,
89
+ cache_resolver: Optional[Callable[[], Any]] = None,
90
+ ):
91
+ """
92
+ Framework-agnostic caching for SYNC or ASYNC callables.
93
+ - Preserves function signature (good for FastAPI/OpenAPI).
94
+ - Works with sync/async cache backends (CachePort / AsyncCachePort).
95
+ - `key` & `tags` are string templates, e.g. "file:{file_id}".
96
+ """
97
+ def deco(func: Callable):
98
+ is_async_func = asyncio.iscoroutinefunction(func)
99
+
100
+ if is_async_func:
101
+ @mf_wraps(func)
102
+ async def aw(*args, **kwargs):
103
+ cache = (cache_resolver or get_cache)()
104
+ k = _key_from_template(key, func, args, kwargs)
105
+ tg = _format_tags(tags, func, args, kwargs)
106
+
107
+ # GET
108
+ if _is_async_method(cache, "get"):
109
+ cached_bytes = await cache.get(k)
110
+ else:
111
+ cached_bytes = cache.get(k)
112
+
113
+ if cached_bytes is not None:
114
+ return deserializer(cached_bytes)
115
+
116
+ # MISS -> compute
117
+ result = await func(*args, **kwargs)
118
+
119
+ # SET
120
+ data = serializer(result)
121
+ if _is_async_method(cache, "set"):
122
+ await cache.set(k, data, ttl=ttl, tags=tg)
123
+ else:
124
+ cache.set(k, data, ttl=ttl, tags=tg)
125
+
126
+ return result
127
+
128
+ return aw
129
+
130
+ # SYNC callable
131
+ @mf_wraps(func)
132
+ def sw(*args, **kwargs):
133
+ cache = (cache_resolver or get_cache)()
134
+ k = _key_from_template(key, func, args, kwargs)
135
+ tg = _format_tags(tags, func, args, kwargs)
136
+
137
+ # GET (may be async)
138
+ if _is_async_method(cache, "get"):
139
+ cached_bytes = _run_maybe_async(cache.get, k)
140
+ else:
141
+ cached_bytes = cache.get(k)
142
+
143
+ if cached_bytes is not None:
144
+ return deserializer(cached_bytes)
145
+
146
+ # MISS -> compute
147
+ result = func(*args, **kwargs)
148
+
149
+ # SET (may be async)
150
+ data = serializer(result)
151
+ if _is_async_method(cache, "set"):
152
+ _run_maybe_async(cache.set, k, data, ttl=ttl, tags=tg)
153
+ else:
154
+ cache.set(k, data, ttl=ttl, tags=tg)
155
+
156
+ return result
157
+
158
+ return sw
159
+
160
+ return deco
161
+
162
+
163
+ def invalidate_by_tags(tags_builder: Callable[..., Iterable[str]]):
164
+ """
165
+ Invalidate computed tags after function finishes.
166
+ Works with sync or async functions and cache backends.
167
+ """
168
+ def deco(func: Callable):
169
+ is_async_func = asyncio.iscoroutinefunction(func)
170
+
171
+ if is_async_func:
172
+ @mf_wraps(func)
173
+ async def aw(*args, **kwargs):
174
+ out = await func(*args, **kwargs)
175
+ cache = get_cache()
176
+ tags = list(tags_builder(*args, **kwargs))
177
+ if _is_async_method(cache, "invalidate_tags"):
178
+ await cache.invalidate_tags(tags)
179
+ else:
180
+ cache.invalidate_tags(tags)
181
+ return out
182
+ return aw
183
+
184
+ @mf_wraps(func)
185
+ def sw(*args, **kwargs):
186
+ out = func(*args, **kwargs)
187
+ cache = get_cache()
188
+ tags = list(tags_builder(*args, **kwargs))
189
+ if _is_async_method(cache, "invalidate_tags"):
190
+ _run_maybe_async(cache.invalidate_tags, tags)
191
+ else:
192
+ cache.invalidate_tags(tags)
193
+ return out
194
+ return sw
195
+
196
+ return deco
@@ -0,0 +1,30 @@
1
+ import hashlib
2
+ import json
3
+ import random
4
+ from typing import Any, Mapping
5
+
6
+ def _stable_params(params: Mapping[str, Any]) -> str:
7
+ return json.dumps(params, sort_keys=True, separators=(",", ":"))
8
+
9
+ def make_key(ns: str, *parts: str) -> str:
10
+ safe_parts = [p.replace(" ", "_") for p in parts if p]
11
+ return f"{ns}:{':'.join(safe_parts)}" if safe_parts else f"{ns}:root"
12
+
13
+ def make_param_key(ns: str, base: str, params: Mapping[str, Any]) -> str:
14
+ payload = _stable_params(params)
15
+ digest = hashlib.sha256(payload.encode("utf-8")).hexdigest()[:16]
16
+ return f"{ns}:{base}:{digest}"
17
+
18
+ def tag_entity(ns: str, entity_id: Any) -> str:
19
+ return f"{ns}:{entity_id}"
20
+
21
+ def tag_list(ns: str, **filters) -> str:
22
+ if not filters:
23
+ return f"{ns}:list"
24
+ payload = _stable_params(filters)
25
+ digest = hashlib.md5(payload.encode("utf-8")).hexdigest()[:12]
26
+ return f"{ns}:list:{digest}"
27
+
28
+ def ttl_with_jitter(base_ttl: int, *, jitter_ratio: float = 0.1) -> int:
29
+ jitter = int(base_ttl * jitter_ratio)
30
+ return base_ttl + random.randint(-jitter, jitter)
@@ -0,0 +1,23 @@
1
+ from typing import Callable, Optional, TypeVar
2
+ from contextvars import ContextVar
3
+
4
+ T = TypeVar("T")
5
+
6
+ _global_resolver: Optional[Callable[[], T]] = None
7
+
8
+ _ctx_resolver: ContextVar[Optional[Callable[[], T]]] = ContextVar("_ctx_resolver", default=None)
9
+
10
+ def set_cache_resolver(fn: Callable[[], T]) -> None:
11
+ """Set process-wide cache resolver (e.g., lambda: container.cache())."""
12
+ global _global_resolver
13
+ _global_resolver = fn
14
+
15
+ def set_context_cache_resolver(fn: Optional[Callable[[], T]]) -> None:
16
+ """Override resolver in current context (useful in tests/background tasks)."""
17
+ _ctx_resolver.set(fn)
18
+
19
+ def get_cache() -> T:
20
+ fn = _ctx_resolver.get() or _global_resolver
21
+ if fn is None:
22
+ raise RuntimeError("Cache resolver not configured. Call set_cache_resolver(...) first.")
23
+ return fn()
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: nlbone
3
- Version: 0.4.2
3
+ Version: 0.5.0
4
4
  Summary: Backbone package for interfaces and infrastructure in Python projects
5
5
  Author-email: Amir Hosein Kahkbazzadeh <a.khakbazzadeh@gmail.com>
6
6
  License: MIT
@@ -11,6 +11,7 @@ Requires-Dist: dependency-injector>=4.48.1
11
11
  Requires-Dist: elasticsearch==8.14.0
12
12
  Requires-Dist: fastapi>=0.116
13
13
  Requires-Dist: httpx>=0.27
14
+ Requires-Dist: makefun>=1.16.0
14
15
  Requires-Dist: psycopg>=3.2.9
15
16
  Requires-Dist: pydantic-settings>=2.0
16
17
  Requires-Dist: pydantic>=2.0
@@ -79,4 +80,8 @@ async def main():
79
80
 
80
81
 
81
82
  anyio.run(main)
82
- ```
83
+ ```
84
+
85
+ ## 📦 Used In
86
+ - **Explore**
87
+ - **Pricing**
@@ -1,12 +1,18 @@
1
1
  nlbone/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
- nlbone/container.py,sha256=Jw5XMBmmrgkd_iZwItVfGCj1vkOiedpn8eIdYHpjWsk,2100
2
+ nlbone/container.py,sha256=gbO8D23HOCHe2lpZpHCvpb5zPMSJnShK9AOna1UQFWg,3169
3
3
  nlbone/types.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
4
  nlbone/adapters/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
5
5
  nlbone/adapters/auth/__init__.py,sha256=hkDHvsFhw_UiOHG9ZSMqjiAhK4wumEforitveSZswVw,42
6
6
  nlbone/adapters/auth/keycloak.py,sha256=dfAxODiARfR8y3FKoWNo9fjfb6QyWd_Qr7AbJ0E78AM,2729
7
+ nlbone/adapters/auth/token_provider.py,sha256=NhqjqTUsoZO4gbK-cybs0OkKydFN7CPTxAiypEw081o,1433
8
+ nlbone/adapters/cache/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
9
+ nlbone/adapters/cache/async_redis.py,sha256=E61tpBwAElMoGgyJ8CWO5G81n7u1-Wi_kVRDoh5rJuM,6206
10
+ nlbone/adapters/cache/memory.py,sha256=lRJqMdD2lbojndQ_dJ7AulfhSYY_uqjgXvd4ZT_J8co,3655
11
+ nlbone/adapters/cache/pubsub_listener.py,sha256=2y6DbWsERXlMOkmJSJMg8hNU9MTGwR7BhwQRveivh50,1457
12
+ nlbone/adapters/cache/redis.py,sha256=gMNfUIk1HkeXVBmtAtVchcr59ll06E4wam9rGWAhalM,4535
7
13
  nlbone/adapters/db/__init__.py,sha256=saW-wN4E0NZ2_ldi-nrm5AgsH7EULNSa62lYMwfy1oo,252
8
14
  nlbone/adapters/db/postgres/__init__.py,sha256=6JYJH0xZs3aR-zuyMpRhsdzFugmqz8nprwTQLprqhZc,313
9
- nlbone/adapters/db/postgres/audit.py,sha256=OYPQTDC0p27D3X8iQ-g3fu7Cno3bDm8egodLMPDcn3k,4627
15
+ nlbone/adapters/db/postgres/audit.py,sha256=zFzL-pXmfjcp5YLx6vBYczprsJjEPxSYKhQNR3WjKL0,4675
10
16
  nlbone/adapters/db/postgres/base.py,sha256=kha9xmklzhuQAK8QEkNBn-mAHq8dUKbOM-3abaBpWmQ,71
11
17
  nlbone/adapters/db/postgres/engine.py,sha256=UCegauVB1gvo42ThytYnn5VIcQBwR-5xhcXYFApRFNk,3448
12
18
  nlbone/adapters/db/postgres/query_builder.py,sha256=U5pqpCfJKuMIxIEHyodoHuPgE8jf53slC1ScKZR5xa4,8653
@@ -17,8 +23,8 @@ nlbone/adapters/db/redis/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZ
17
23
  nlbone/adapters/db/redis/client.py,sha256=XAKcmU0lpPvWPMS0fChVQ3iSJfHV1g4bMOCgJaj2bCI,512
18
24
  nlbone/adapters/http_clients/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
19
25
  nlbone/adapters/http_clients/email_gateway.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
20
- nlbone/adapters/http_clients/uploadchi.py,sha256=BGdGC-p8EmXYn29da577Kas2CzZVDAb9XJ9xx2LJioY,4713
21
- nlbone/adapters/http_clients/uploadchi_async.py,sha256=hBx0jzYYZAX1DCkImZ98zdUob8D9PQH7jykDXIjwG9I,3866
26
+ nlbone/adapters/http_clients/uploadchi.py,sha256=-goyqa3DNnDdkiIvrJRMlk5KVCaRNy34pPYklLHV04w,5556
27
+ nlbone/adapters/http_clients/uploadchi_async.py,sha256=9QxunVxPEyf6LxcGaQ9I2JFtVB6s-JtR9f7RtND8pIk,4695
22
28
  nlbone/adapters/messaging/__init__.py,sha256=UDAwu3s-JQmOZjWz2Nu0SgHhnkbeOhKDH_zLD75oWMY,40
23
29
  nlbone/adapters/messaging/event_bus.py,sha256=w-NPwDiPMLFPU_enRQCtfQXOALsXfg31u57R8sG_-1U,781
24
30
  nlbone/adapters/messaging/redis.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -26,7 +32,7 @@ nlbone/adapters/percolation/__init__.py,sha256=viq5WZqcSLlRBF5JwuyTD_IZaNWfpKzGJ
26
32
  nlbone/adapters/percolation/connection.py,sha256=xZ-OtQVbyQYH83TUizS0UWI85Iic-AhUjiuyzO0e46s,331
27
33
  nlbone/config/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
28
34
  nlbone/config/logging.py,sha256=rGQz9W5ZgUFXBK74TFmTuwx_WMJhD8zPN39zfKVxwnI,4115
29
- nlbone/config/settings.py,sha256=xxdZQDQJ7wSEGODKljtQWcfITXbSHoqAXOlQ9vhNSe4,3474
35
+ nlbone/config/settings.py,sha256=-FcCbS30yDK-ZXlC_336T4yea7sVjnzNak22wSptOXI,3596
30
36
  nlbone/core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
31
37
  nlbone/core/application/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
32
38
  nlbone/core/application/base_worker.py,sha256=uHqglsd33jXl_0kmkFlB4KQ5NdI1wArcOeQmdcifPQc,1192
@@ -40,8 +46,9 @@ nlbone/core/domain/events.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,
40
46
  nlbone/core/domain/models.py,sha256=e2ig7PMBBpmc8pdHLNMnXhucMXr9OUq-G7bKGTq9Qj0,1458
41
47
  nlbone/core/ports/__init__.py,sha256=gx-Ubj7h-1vvnu56sNnRqmer7HHfW3rX2WLl-0AX5U0,214
42
48
  nlbone/core/ports/auth.py,sha256=Gh0yQsxx2OD6pDH2_p-khsA-bVoypP1juuqMoSfjZUo,493
49
+ nlbone/core/ports/cache.py,sha256=C9exWYPZsppCpkrAMiGfJuf4ehHkibtFfvB1aFbWuO4,2257
43
50
  nlbone/core/ports/event_bus.py,sha256=_Om1GOOT-F325oV6_LJXtLdx4vu5i7KrpTDD3qPJXU0,325
44
- nlbone/core/ports/files.py,sha256=1k-Vm0ld89EnFK2wybSXIJm5gQNpeuO92PD7d4VMh8s,1737
51
+ nlbone/core/ports/files.py,sha256=7Ov2ITYRpPwwDTZGCeNVISg8e3A9l08jbOgpTImgfK8,1863
45
52
  nlbone/core/ports/messaging.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
46
53
  nlbone/core/ports/repo.py,sha256=zOw8CTMAu5DKKy2wZpT3_6JWWjaJCDt7q4dOiJYrCOQ,651
47
54
  nlbone/core/ports/uow.py,sha256=SmBdRf0NvSdIjQ3Le1QGz8kNGBk7jgNHtNguvXRwmgs,557
@@ -68,11 +75,14 @@ nlbone/interfaces/cli/main.py,sha256=65XXNmH0dX9Lib_yW5iQXo7wp_GRFwx9xXDYgy2LJtY
68
75
  nlbone/interfaces/jobs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
69
76
  nlbone/interfaces/jobs/sync_tokens.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
70
77
  nlbone/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
78
+ nlbone/utils/cache.py,sha256=3sKtWoGBlrEvkg74r4C-Sx8u41OTup_G4s-Q57IaOXg,5921
79
+ nlbone/utils/cache_keys.py,sha256=a1yRMUuyRJ2-CswjtgVkLcJAeT2QmThLQ5kQWvpOKL4,1069
80
+ nlbone/utils/cache_registry.py,sha256=0csax1-GmKBcsZmQYWI4Bs0X9_BMo6Jdoac-e9Zusv8,819
71
81
  nlbone/utils/context.py,sha256=MmclJ24BG2uvSTg1IK7J-Da9BhVFDQ5ag4Ggs2FF1_w,1600
72
82
  nlbone/utils/redactor.py,sha256=JbbPs2Qtnz0zHN85BGPYQNWwBigXMSzmMEmmZZOTs_U,1277
73
83
  nlbone/utils/time.py,sha256=6e0A4_hG1rYDCrWoOklEGVJstBf8j9XSSTT7VNV2K9Y,1272
74
- nlbone-0.4.2.dist-info/METADATA,sha256=nriPe98XsiI5atrlh8I13Z1dFPW-sRP_iF7B658Q0GE,2117
75
- nlbone-0.4.2.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
76
- nlbone-0.4.2.dist-info/entry_points.txt,sha256=CpIL45t5nbhl1dGQPhfIIDfqqak3teK0SxPGBBr7YCk,59
77
- nlbone-0.4.2.dist-info/licenses/LICENSE,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
78
- nlbone-0.4.2.dist-info/RECORD,,
84
+ nlbone-0.5.0.dist-info/METADATA,sha256=4EzJYpbkzQZ1LsKAYpY94NrDARztMctvov_bf1vjmfI,2194
85
+ nlbone-0.5.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
86
+ nlbone-0.5.0.dist-info/entry_points.txt,sha256=CpIL45t5nbhl1dGQPhfIIDfqqak3teK0SxPGBBr7YCk,59
87
+ nlbone-0.5.0.dist-info/licenses/LICENSE,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
88
+ nlbone-0.5.0.dist-info/RECORD,,
File without changes