nlbone 0.4.3__py3-none-any.whl → 0.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
File without changes
@@ -0,0 +1,180 @@
1
+ import asyncio
2
+ import json
3
+ import os
4
+ from typing import Optional, Iterable, Any, Mapping, Sequence, List
5
+
6
+ from redis.asyncio import Redis
7
+ from nlbone.core.ports.cache import AsyncCachePort
8
+
9
+
10
+ def _nsver_key(ns: str) -> str: return f"nsver:{ns}"
11
+ def _tag_key(tag: str) -> str: return f"tag:{tag}"
12
+
13
+ class AsyncRedisCache(AsyncCachePort):
14
+ def __init__(self, url: str, *, invalidate_channel: str | None = None):
15
+ self._r = Redis.from_url(url, decode_responses=False)
16
+ self._ch = invalidate_channel or os.getenv("NLBONE_REDIS_INVALIDATE_CHANNEL", "cache:invalidate")
17
+
18
+ @property
19
+ def redis(self) -> Redis:
20
+ return self._r
21
+
22
+ async def _current_ver(self, ns: str) -> int:
23
+ v = await self._r.get(_nsver_key(ns))
24
+ return int(v) if v else 1
25
+
26
+ async def _full_key(self, key: str) -> str:
27
+ try:
28
+ ns, rest = key.split(":", 1)
29
+ except ValueError:
30
+ ns, rest = "app", key
31
+ ver = await self._current_ver(ns)
32
+ return f"{ns}:{ver}:{rest}"
33
+
34
+ # -------- basic --------
35
+ async def get(self, key: str) -> Optional[bytes]:
36
+ fk = await self._full_key(key)
37
+ return await self._r.get(fk)
38
+
39
+ async def set(self, key: str, value: bytes, *, ttl: Optional[int] = None, tags: Optional[Iterable[str]] = None) -> None:
40
+ fk = await self._full_key(key)
41
+ if ttl is None:
42
+ await self._r.set(fk, value)
43
+ else:
44
+ await self._r.setex(fk, ttl, value)
45
+ if tags:
46
+ pipe = self._r.pipeline()
47
+ for t in tags:
48
+ pipe.sadd(_tag_key(t), fk)
49
+ await pipe.execute()
50
+
51
+ async def delete(self, key: str) -> None:
52
+ fk = await self._full_key(key)
53
+ await self._r.delete(fk)
54
+
55
+ async def exists(self, key: str) -> bool:
56
+ return (await self.get(key)) is not None
57
+
58
+ async def ttl(self, key: str) -> Optional[int]:
59
+ fk = await self._full_key(key)
60
+ t = await self._r.ttl(fk)
61
+ return None if t < 0 else int(t)
62
+
63
+ # -------- multi --------
64
+
65
+ async def mget(self, keys: Sequence[str]) -> list[Optional[bytes]]:
66
+ fks = [await self._full_key(k) for k in keys]
67
+ return await self._r.mget(fks)
68
+
69
+ async def mset(self, items: Mapping[str, bytes], *, ttl: Optional[int] = None,
70
+ tags: Optional[Iterable[str]] = None) -> None:
71
+ pipe = self._r.pipeline()
72
+ if ttl is None:
73
+ for k, v in items.items():
74
+ fk = await self._full_key(k)
75
+ pipe.set(fk, v)
76
+ else:
77
+ for k, v in items.items():
78
+ fk = await self._full_key(k)
79
+ pipe.setex(fk, ttl, v)
80
+ await pipe.execute()
81
+
82
+ if tags:
83
+ pipe = self._r.pipeline()
84
+ for t in tags:
85
+ for k in items.keys():
86
+ fk = await self._full_key(k)
87
+ pipe.sadd(_tag_key(t), fk)
88
+ await pipe.execute()
89
+
90
+ # -------- json --------
91
+
92
+ async def get_json(self, key: str) -> Optional[Any]:
93
+ b = await self.get(key)
94
+ return None if b is None else json.loads(b)
95
+
96
+ async def set_json(self, key: str, value: Any, *, ttl: Optional[int] = None,
97
+ tags: Optional[Iterable[str]] = None) -> None:
98
+ await self.set(key, json.dumps(value).encode("utf-8"), ttl=ttl, tags=tags)
99
+
100
+ # -------- invalidation --------
101
+
102
+ async def invalidate_tags(self, tags: Iterable[str]) -> int:
103
+ removed = 0
104
+ pipe = self._r.pipeline()
105
+ key_sets: list[tuple[str, set[bytes]]] = []
106
+ for t in tags:
107
+ tk = _tag_key(t)
108
+ members = await self._r.smembers(tk)
109
+ if members:
110
+ pipe.delete(*members)
111
+ pipe.delete(tk)
112
+ key_sets.append((tk, members))
113
+ removed += len(members or [])
114
+ await pipe.execute()
115
+
116
+ # publish notification for other processes
117
+ try:
118
+ payload = json.dumps({"tags": list(tags)}).encode("utf-8")
119
+ await self._r.publish(self._ch, payload)
120
+ except Exception:
121
+ pass
122
+
123
+ return removed
124
+
125
+ async def bump_namespace(self, namespace: str) -> int:
126
+ v = await self._r.incr(_nsver_key(namespace))
127
+ # اطلاع‌رسانی اختیاری
128
+ try:
129
+ await self._r.publish(self._ch, json.dumps({"ns_bump": namespace}).encode("utf-8"))
130
+ except Exception:
131
+ pass
132
+ return int(v)
133
+
134
+ async def clear_namespace(self, namespace: str) -> int:
135
+ cnt = 0
136
+ cursor = 0
137
+ pattern = f"{namespace}:*"
138
+ while True:
139
+ cursor, keys = await self._r.scan(cursor=cursor, match=pattern, count=1000)
140
+ if keys:
141
+ await self._r.delete(*keys)
142
+ cnt += len(keys)
143
+ if cursor == 0:
144
+ break
145
+ try:
146
+ await self._r.publish(self._ch, json.dumps({"ns_clear": namespace}).encode("utf-8"))
147
+ except Exception:
148
+ pass
149
+ return cnt
150
+
151
+ # -------- dogpile-safe get_or_set --------
152
+
153
+ async def get_or_set(self, key: str, producer, *, ttl: int, tags=None) -> bytes:
154
+ fk = await self._full_key(key)
155
+ val = await self._r.get(fk)
156
+ if val is not None:
157
+ return val
158
+
159
+ lock_key = f"lock:{fk}"
160
+ got = await self._r.set(lock_key, b"1", ex=10, nx=True)
161
+ if got:
162
+ try:
163
+ produced = await producer() if asyncio.iscoroutinefunction(producer) else producer()
164
+ if isinstance(produced, str):
165
+ produced = produced.encode("utf-8")
166
+ await self.set(key, produced, ttl=ttl, tags=tags)
167
+ return produced
168
+ finally:
169
+ await self._r.delete(lock_key)
170
+
171
+ await asyncio.sleep(0.05)
172
+ val2 = await self._r.get(fk)
173
+ if val2 is not None:
174
+ return val2
175
+ # fallback
176
+ produced = await producer() if asyncio.iscoroutinefunction(producer) else producer()
177
+ if isinstance(produced, str):
178
+ produced = produced.encode("utf-8")
179
+ await self.set(key, produced, ttl=ttl, tags=tags)
180
+ return produced
@@ -0,0 +1,104 @@
1
+ import json, threading, time
2
+ from typing import Optional, Iterable, Any, Mapping, Sequence, Dict, Set
3
+ from nlbone.core.ports.cache import CachePort
4
+
5
+
6
+ class InMemoryCache(CachePort):
7
+ def __init__(self):
8
+ self._data: Dict[str, tuple[bytes, Optional[float]]] = {}
9
+ self._tags: Dict[str, Set[str]] = {}
10
+ self._ns_ver: Dict[str, int] = {}
11
+ self._lock = threading.RLock()
12
+
13
+ def _expired(self, key: str) -> bool:
14
+ v = self._data.get(key)
15
+ if not v: return True
16
+ _, exp = v
17
+ return exp is not None and time.time() > exp
18
+
19
+ def _gc(self, key: str) -> None:
20
+ if self._expired(key):
21
+ self._data.pop(key, None)
22
+
23
+ def _attach_tags(self, key: str, tags: Optional[Iterable[str]]) -> None:
24
+ if not tags: return
25
+ for t in tags:
26
+ self._tags.setdefault(t, set()).add(key)
27
+
28
+ def get(self, key: str) -> Optional[bytes]:
29
+ with self._lock:
30
+ self._gc(key)
31
+ v = self._data.get(key)
32
+ return v[0] if v else None
33
+
34
+ def set(self, key: str, value: bytes, *, ttl: Optional[int] = None, tags: Optional[Iterable[str]] = None) -> None:
35
+ with self._lock:
36
+ exp = None if ttl is None else time.time() + ttl
37
+ self._data[key] = (value, exp)
38
+ self._attach_tags(key, tags)
39
+
40
+ def delete(self, key: str) -> None:
41
+ with self._lock:
42
+ self._data.pop(key, None)
43
+ for s in self._tags.values():
44
+ s.discard(key)
45
+
46
+ def exists(self, key: str) -> bool:
47
+ return self.get(key) is not None
48
+
49
+ def ttl(self, key: str) -> Optional[int]:
50
+ with self._lock:
51
+ self._gc(key)
52
+ v = self._data.get(key)
53
+ if not v: return None
54
+ _, exp = v
55
+ if exp is None: return None
56
+ rem = int(exp - time.time())
57
+ return rem if rem >= 0 else 0
58
+
59
+ def mget(self, keys: Sequence[str]) -> list[Optional[bytes]]:
60
+ return [self.get(k) for k in keys]
61
+
62
+ def mset(self, items: Mapping[str, bytes], *, ttl: Optional[int] = None,
63
+ tags: Optional[Iterable[str]] = None) -> None:
64
+ for k, v in items.items():
65
+ self.set(k, v, ttl=ttl, tags=tags)
66
+
67
+ def get_json(self, key: str) -> Optional[Any]:
68
+ b = self.get(key)
69
+ return None if b is None else json.loads(b)
70
+
71
+ def set_json(self, key: str, value: Any, *, ttl: Optional[int] = None,
72
+ tags: Optional[Iterable[str]] = None) -> None:
73
+ self.set(key, json.dumps(value).encode("utf-8"), ttl=ttl, tags=tags)
74
+
75
+ def invalidate_tags(self, tags: Iterable[str]) -> int:
76
+ removed = 0
77
+ with self._lock:
78
+ for t in tags:
79
+ keys = self._tags.pop(t, set())
80
+ for k in keys:
81
+ if k in self._data:
82
+ self._data.pop(k, None)
83
+ removed += 1
84
+ return removed
85
+
86
+ def bump_namespace(self, namespace: str) -> int:
87
+ with self._lock:
88
+ self._ns_ver[namespace] = self._ns_ver.get(namespace, 0) + 1
89
+ return self._ns_ver[namespace]
90
+
91
+ def clear_namespace(self, namespace: str) -> int:
92
+ with self._lock:
93
+ keys = [k for k in self._data.keys() if k.startswith(namespace + ":")]
94
+ for k in keys: self.delete(k)
95
+ return len(keys)
96
+
97
+ def get_or_set(self, key: str, producer, *, ttl: int, tags=None) -> bytes:
98
+ with self._lock:
99
+ b = self.get(key)
100
+ if b is not None:
101
+ return b
102
+ val: bytes = producer()
103
+ self.set(key, val, ttl=ttl, tags=tags)
104
+ return val
@@ -0,0 +1,42 @@
1
+ from __future__ import annotations
2
+ import asyncio
3
+ import json
4
+ from typing import Awaitable, Callable, Optional
5
+ from redis.asyncio import Redis
6
+
7
+ async def run_cache_invalidation_listener(
8
+ redis: Redis,
9
+ channel: str = "cache:invalidate",
10
+ *,
11
+ on_tags: Optional[Callable[[list[str]], Awaitable[None]]] = None,
12
+ on_ns_bump: Optional[Callable[[str], Awaitable[None]]] = None,
13
+ on_ns_clear: Optional[Callable[[str], Awaitable[None]]] = None,
14
+ stop_event: Optional[asyncio.Event] = None,
15
+ ) -> None:
16
+ pubsub = redis.pubsub()
17
+ await pubsub.subscribe(channel)
18
+ try:
19
+ while True:
20
+ if stop_event and stop_event.is_set():
21
+ break
22
+ message = await pubsub.get_message(ignore_subscribe_messages=True, timeout=1.0)
23
+ if not message:
24
+ await asyncio.sleep(0.05)
25
+ continue
26
+ try:
27
+ data = json.loads(message["data"])
28
+ except Exception:
29
+ continue
30
+
31
+ if "tags" in data and on_tags:
32
+ tags = data.get("tags") or []
33
+ await on_tags(list(tags))
34
+ if "ns_bump" in data and on_ns_bump:
35
+ await on_ns_bump(str(data["ns_bump"]))
36
+ if "ns_clear" in data and on_ns_clear:
37
+ await on_ns_clear(str(data["ns_clear"]))
38
+ finally:
39
+ try:
40
+ await pubsub.unsubscribe(channel)
41
+ finally:
42
+ await pubsub.close()
@@ -0,0 +1,136 @@
1
+ from __future__ import annotations
2
+ import json, os, time
3
+ from typing import Optional, Iterable, Any, Mapping, Sequence, List, Set
4
+ import redis # redis-py (sync)
5
+ from nlbone.core.ports.cache import CachePort
6
+
7
+
8
+
9
+ def _nsver_key(ns: str) -> str: return f"nsver:{ns}"
10
+ def _tag_key(tag: str) -> str: return f"tag:{tag}"
11
+
12
+ class RedisCache(CachePort):
13
+ def __init__(self, url: str):
14
+ self.r = redis.Redis.from_url(url, decode_responses=False)
15
+
16
+ def _current_ver(self, ns: str) -> int:
17
+ v = self.r.get(_nsver_key(ns))
18
+ return int(v) if v else 1
19
+
20
+ def _full_key(self, key: str) -> str:
21
+ try:
22
+ ns, rest = key.split(":", 1)
23
+ except ValueError:
24
+ ns, rest = "app", key
25
+ ver = self._current_ver(ns)
26
+ return f"{ns}:{ver}:{rest}"
27
+
28
+ def get(self, key: str) -> Optional[bytes]:
29
+ fk = self._full_key(key)
30
+ return self.r.get(fk)
31
+
32
+ def set(self, key: str, value: bytes, *, ttl: Optional[int] = None, tags: Optional[Iterable[str]] = None) -> None:
33
+ fk = self._full_key(key)
34
+ if ttl is None:
35
+ self.r.set(fk, value)
36
+ else:
37
+ self.r.setex(fk, ttl, value)
38
+ if tags:
39
+ pipe = self.r.pipeline()
40
+ for t in tags:
41
+ pipe.sadd(_tag_key(t), fk)
42
+ pipe.execute()
43
+
44
+ def delete(self, key: str) -> None:
45
+ fk = self._full_key(key)
46
+ self.r.delete(fk)
47
+
48
+ def exists(self, key: str) -> bool:
49
+ return bool(self.get(key))
50
+
51
+ def ttl(self, key: str) -> Optional[int]:
52
+ fk = self._full_key(key)
53
+ t = self.r.ttl(fk)
54
+ return None if t < 0 else int(t)
55
+
56
+ def mget(self, keys: Sequence[str]) -> list[Optional[bytes]]:
57
+ fks = [self._full_key(k) for k in keys]
58
+ return self.r.mget(fks)
59
+
60
+ def mset(self, items: Mapping[str, bytes], *, ttl: Optional[int] = None, tags: Optional[Iterable[str]] = None) -> None:
61
+ pipe = self.r.pipeline()
62
+ if ttl is None:
63
+ for k, v in items.items():
64
+ pipe.set(self._full_key(k), v)
65
+ else:
66
+ for k, v in items.items():
67
+ pipe.setex(self._full_key(k), ttl, v)
68
+ pipe.execute()
69
+ if tags:
70
+ pipe = self.r.pipeline()
71
+ for t in tags:
72
+ for k in items.keys():
73
+ pipe.sadd(_tag_key(t), self._full_key(k))
74
+ pipe.execute()
75
+
76
+ def get_json(self, key: str) -> Optional[Any]:
77
+ b = self.get(key)
78
+ return None if b is None else json.loads(b)
79
+
80
+ def set_json(self, key: str, value: Any, *, ttl: Optional[int] = None, tags: Optional[Iterable[str]] = None) -> None:
81
+ self.set(key, json.dumps(value).encode("utf-8"), ttl=ttl, tags=tags)
82
+
83
+ def invalidate_tags(self, tags: Iterable[str]) -> int:
84
+ removed = 0
85
+ pipe = self.r.pipeline()
86
+ for t in tags:
87
+ tk = _tag_key(t)
88
+ keys = self.r.smembers(tk)
89
+ if keys:
90
+ pipe.delete(*keys)
91
+ pipe.delete(tk)
92
+ removed += len(keys or [])
93
+ pipe.execute()
94
+ try:
95
+ ch = os.getenv("NLBONE_REDIS_INVALIDATE_CHANNEL", "cache:invalidate")
96
+ self.r.publish(ch, json.dumps({"tags": list(tags)}).encode("utf-8"))
97
+ except Exception:
98
+ pass
99
+ return removed
100
+
101
+ def bump_namespace(self, namespace: str) -> int:
102
+ v = self.r.incr(_nsver_key(namespace))
103
+ return int(v)
104
+
105
+ def clear_namespace(self, namespace: str) -> int:
106
+ cnt = 0
107
+ cursor = 0
108
+ pattern = f"{namespace}:*"
109
+ while True:
110
+ cursor, keys = self.r.scan(cursor=cursor, match=pattern, count=1000)
111
+ if keys:
112
+ self.r.delete(*keys); cnt += len(keys)
113
+ if cursor == 0: break
114
+ return cnt
115
+
116
+ def get_or_set(self, key: str, producer, *, ttl: int, tags=None) -> bytes:
117
+ fk = self._full_key(key)
118
+ val = self.r.get(fk)
119
+ if val is not None:
120
+ return val
121
+ lock_key = f"lock:{fk}"
122
+ got = self.r.set(lock_key, b"1", nx=True, ex=10)
123
+ if got:
124
+ try:
125
+ produced: bytes = producer()
126
+ self.set(key, produced, ttl=ttl, tags=tags)
127
+ return produced
128
+ finally:
129
+ self.r.delete(lock_key)
130
+ time.sleep(0.05)
131
+ val2 = self.r.get(fk)
132
+ if val2 is not None:
133
+ return val2
134
+ produced: bytes = producer()
135
+ self.set(key, produced, ttl=ttl, tags=tags)
136
+ return produced
@@ -91,7 +91,7 @@ class UploadchiClient(FileServicePort):
91
91
  tok = _resolve_token(token)
92
92
  r = self._client.post(
93
93
  f"{self._base_url}/{file_id}/commit",
94
- headers=_auth_headers(tok),
94
+ headers=_auth_headers(tok or self._token_provider.get_access_token()),
95
95
  )
96
96
  if r.status_code not in (204, 200):
97
97
  raise UploadchiError(r.status_code, r.text)
@@ -102,7 +102,7 @@ class UploadchiClient(FileServicePort):
102
102
  tok = _resolve_token(token)
103
103
  r = self._client.post(
104
104
  f"{self._base_url}/{file_id}/rollback",
105
- headers=_auth_headers(tok),
105
+ headers=_auth_headers(tok or self._token_provider.get_access_token()),
106
106
  )
107
107
  if r.status_code not in (204, 200):
108
108
  raise UploadchiError(r.status_code, r.text)
@@ -46,7 +46,7 @@ class UploadchiAsyncClient(AsyncFileServicePort):
46
46
  raise UploadchiError(detail="token_provider is not provided", status=400)
47
47
  tok = _resolve_token(token)
48
48
  r = await self._client.post(
49
- f"/{file_id}/commit", headers=_auth_headers(tok)
49
+ f"/{file_id}/commit", headers=_auth_headers(tok or self._token_provider.get_access_token())
50
50
  )
51
51
  if r.status_code not in (204, 200):
52
52
  raise UploadchiError(r.status_code, await r.aread())
@@ -56,7 +56,7 @@ class UploadchiAsyncClient(AsyncFileServicePort):
56
56
  raise UploadchiError(detail="token_provider is not provided", status=400)
57
57
  tok = _resolve_token(token)
58
58
  r = await self._client.post(
59
- f"/{file_id}/rollback", headers=_auth_headers(tok)
59
+ f"/{file_id}/rollback", headers=_auth_headers(tok or self._token_provider.get_access_token())
60
60
  )
61
61
  if r.status_code not in (204, 200):
62
62
  raise UploadchiError(r.status_code, await r.aread())
nlbone/config/settings.py CHANGED
@@ -65,6 +65,8 @@ class Settings(BaseSettings):
65
65
  # Messaging / Cache
66
66
  # ---------------------------
67
67
  REDIS_URL: str = Field(default="redis://localhost:6379/0")
68
+ CACHE_BACKEND: Literal["memory", "redis"] = Field(default="memory")
69
+ CACHE_DEFAULT_TTL_S: int = Field(default=300)
68
70
 
69
71
  # --- Event bus / Outbox ---
70
72
  EVENT_BUS_BACKEND: Literal["inmemory"] = Field(default="inmemory")
nlbone/container.py CHANGED
@@ -6,12 +6,16 @@ from dependency_injector import containers, providers
6
6
 
7
7
  from nlbone.adapters.auth.keycloak import KeycloakAuthService
8
8
  from nlbone.adapters.auth.token_provider import ClientTokenProvider
9
+ from nlbone.adapters.cache.async_redis import AsyncRedisCache
10
+ from nlbone.adapters.cache.memory import InMemoryCache
11
+ from nlbone.adapters.cache.redis import RedisCache
9
12
  from nlbone.adapters.db.postgres import AsyncSqlAlchemyUnitOfWork, SqlAlchemyUnitOfWork
10
13
  from nlbone.adapters.db.postgres.engine import get_async_session_factory, get_sync_session_factory
11
14
  from nlbone.adapters.http_clients.uploadchi import UploadchiClient
12
15
  from nlbone.adapters.http_clients.uploadchi_async import UploadchiAsyncClient
13
16
  from nlbone.adapters.messaging import InMemoryEventBus
14
17
  from nlbone.core.ports import EventBusPort
18
+ from nlbone.core.ports.cache import CachePort, AsyncCachePort
15
19
  from nlbone.core.ports.files import AsyncFileServicePort, FileServicePort
16
20
 
17
21
 
@@ -36,6 +40,18 @@ class Container(containers.DeclarativeContainer):
36
40
  afiles_service: providers.Singleton[AsyncFileServicePort] = providers.Singleton(UploadchiAsyncClient,
37
41
  token_provider=token_provider)
38
42
 
43
+ cache: providers.Singleton[CachePort] = providers.Selector(
44
+ config.CACHE_BACKEND,
45
+ memory=providers.Singleton(InMemoryCache),
46
+ redis=providers.Singleton(RedisCache, url=config.REDIS_URL),
47
+ )
48
+
49
+ async_cache: providers.Singleton[AsyncCachePort] = providers.Selector(
50
+ config.CACHE_BACKEND,
51
+ memory=providers.Singleton(InMemoryCache),
52
+ redis=providers.Singleton(AsyncRedisCache, url=config.REDIS_URL),
53
+ )
54
+
39
55
 
40
56
  def create_container(settings: Optional[Any] = None) -> Container:
41
57
  c = Container()
@@ -0,0 +1,37 @@
1
+ from typing import Protocol, Optional, Iterable, Any, Mapping, Sequence, Tuple, TypeVar, Callable
2
+
3
+ T = TypeVar("T")
4
+
5
+ class CachePort(Protocol):
6
+ def get(self, key: str) -> Optional[bytes]: ...
7
+ def set(self, key: str, value: bytes, *, ttl: Optional[int] = None, tags: Optional[Iterable[str]] = None) -> None: ...
8
+ def delete(self, key: str) -> None: ...
9
+ def exists(self, key: str) -> bool: ...
10
+ def ttl(self, key: str) -> Optional[int]: ...
11
+
12
+ def mget(self, keys: Sequence[str]) -> list[Optional[bytes]]: ...
13
+ def mset(self, items: Mapping[str, bytes], *, ttl: Optional[int] = None, tags: Optional[Iterable[str]] = None) -> None: ...
14
+
15
+ def get_json(self, key: str) -> Optional[Any]: ...
16
+ def set_json(self, key: str, value: Any, *, ttl: Optional[int] = None, tags: Optional[Iterable[str]] = None) -> None: ...
17
+
18
+ def invalidate_tags(self, tags: Iterable[str]) -> int: ...
19
+ def bump_namespace(self, namespace: str) -> int: ... # versioned keys
20
+ def clear_namespace(self, namespace: str) -> int: ...
21
+
22
+ def get_or_set(self, key: str, producer: Callable[[], bytes], *, ttl: int, tags: Optional[Iterable[str]] = None) -> bytes: ...
23
+
24
+ class AsyncCachePort(Protocol):
25
+ async def get(self, key: str) -> Optional[bytes]: ...
26
+ async def set(self, key: str, value: bytes, *, ttl: Optional[int] = None, tags: Optional[Iterable[str]] = None) -> None: ...
27
+ async def delete(self, key: str) -> None: ...
28
+ async def exists(self, key: str) -> bool: ...
29
+ async def ttl(self, key: str) -> Optional[int]: ...
30
+ async def mget(self, keys: Sequence[str]) -> list[Optional[bytes]]: ...
31
+ async def mset(self, items: Mapping[str, bytes], *, ttl: Optional[int] = None, tags: Optional[Iterable[str]] = None) -> None: ...
32
+ async def get_json(self, key: str) -> Optional[Any]: ...
33
+ async def set_json(self, key: str, value: Any, *, ttl: Optional[int] = None, tags: Optional[Iterable[str]] = None) -> None: ...
34
+ async def invalidate_tags(self, tags: Iterable[str]) -> int: ...
35
+ async def bump_namespace(self, namespace: str) -> int: ...
36
+ async def clear_namespace(self, namespace: str) -> int: ...
37
+ async def get_or_set(self, key: str, producer, *, ttl: int, tags: Optional[Iterable[str]] = None) -> bytes: ...
nlbone/utils/cache.py ADDED
@@ -0,0 +1,196 @@
1
+ import asyncio
2
+ import inspect
3
+ import json
4
+ from typing import Any, Callable, Iterable, Optional
5
+
6
+ from makefun import wraps as mf_wraps
7
+ from nlbone.utils.cache_registry import get_cache
8
+
9
+ try:
10
+ from pydantic import BaseModel # v1/v2
11
+ except Exception: # pragma: no cover
12
+ class BaseModel: # minimal fallback
13
+ pass
14
+
15
+
16
+ # -------- helpers --------
17
+
18
+ def _bind(func: Callable, args, kwargs):
19
+ sig = inspect.signature(func)
20
+ bound = sig.bind_partial(*args, **kwargs)
21
+ bound.apply_defaults()
22
+ return bound
23
+
24
+
25
+ def _key_from_template(
26
+ tpl: Optional[str],
27
+ func: Callable,
28
+ args,
29
+ kwargs,
30
+ ) -> str:
31
+ """Format key template with bound arguments or build a stable default."""
32
+ bound = _bind(func, args, kwargs)
33
+ if tpl:
34
+ return tpl.format(**bound.arguments)
35
+
36
+ # Default stable key: module:qualname:sha of args
37
+ payload = json.dumps(bound.arguments, sort_keys=True, default=str)
38
+ return f"{func.__module__}:{func.__qualname__}:{hash(payload)}"
39
+
40
+
41
+ def _format_tags(
42
+ tag_tpls: Optional[Iterable[str]],
43
+ func: Callable,
44
+ args,
45
+ kwargs,
46
+ ) -> list[str] | None:
47
+ if not tag_tpls:
48
+ return None
49
+ bound = _bind(func, args, kwargs)
50
+ return [t.format(**bound.arguments) for t in tag_tpls]
51
+
52
+
53
+ def default_serialize(val: Any) -> bytes:
54
+ """Serialize BaseModel (v2/v1) or JSON-serializable data to bytes."""
55
+ if isinstance(val, BaseModel):
56
+ if hasattr(val, "model_dump_json"): # pydantic v2
57
+ return val.model_dump_json().encode("utf-8")
58
+ if hasattr(val, "json"): # pydantic v1
59
+ return val.json().encode("utf-8")
60
+ return json.dumps(val, default=str).encode("utf-8")
61
+
62
+
63
+ def default_deserialize(b: bytes) -> Any:
64
+ return json.loads(b)
65
+
66
+
67
+ def _is_async_method(obj: Any, name: str) -> bool:
68
+ meth = getattr(obj, name, None)
69
+ return asyncio.iscoroutinefunction(meth)
70
+
71
+
72
+ def _run_maybe_async(func: Callable, *args, **kwargs):
73
+ """Call a function that may be async from sync context."""
74
+ result = func(*args, **kwargs)
75
+ if inspect.isawaitable(result):
76
+ return asyncio.run(result)
77
+ return result
78
+
79
+
80
+ # -------- cache decorators --------
81
+
82
+ def cached(
83
+ *,
84
+ ttl: int,
85
+ key: str | None = None,
86
+ tags: Iterable[str] | None = None,
87
+ serializer: Callable[[Any], bytes] = default_serialize,
88
+ deserializer: Callable[[bytes], Any] = default_deserialize,
89
+ cache_resolver: Optional[Callable[[], Any]] = None,
90
+ ):
91
+ """
92
+ Framework-agnostic caching for SYNC or ASYNC callables.
93
+ - Preserves function signature (good for FastAPI/OpenAPI).
94
+ - Works with sync/async cache backends (CachePort / AsyncCachePort).
95
+ - `key` & `tags` are string templates, e.g. "file:{file_id}".
96
+ """
97
+ def deco(func: Callable):
98
+ is_async_func = asyncio.iscoroutinefunction(func)
99
+
100
+ if is_async_func:
101
+ @mf_wraps(func)
102
+ async def aw(*args, **kwargs):
103
+ cache = (cache_resolver or get_cache)()
104
+ k = _key_from_template(key, func, args, kwargs)
105
+ tg = _format_tags(tags, func, args, kwargs)
106
+
107
+ # GET
108
+ if _is_async_method(cache, "get"):
109
+ cached_bytes = await cache.get(k)
110
+ else:
111
+ cached_bytes = cache.get(k)
112
+
113
+ if cached_bytes is not None:
114
+ return deserializer(cached_bytes)
115
+
116
+ # MISS -> compute
117
+ result = await func(*args, **kwargs)
118
+
119
+ # SET
120
+ data = serializer(result)
121
+ if _is_async_method(cache, "set"):
122
+ await cache.set(k, data, ttl=ttl, tags=tg)
123
+ else:
124
+ cache.set(k, data, ttl=ttl, tags=tg)
125
+
126
+ return result
127
+
128
+ return aw
129
+
130
+ # SYNC callable
131
+ @mf_wraps(func)
132
+ def sw(*args, **kwargs):
133
+ cache = (cache_resolver or get_cache)()
134
+ k = _key_from_template(key, func, args, kwargs)
135
+ tg = _format_tags(tags, func, args, kwargs)
136
+
137
+ # GET (may be async)
138
+ if _is_async_method(cache, "get"):
139
+ cached_bytes = _run_maybe_async(cache.get, k)
140
+ else:
141
+ cached_bytes = cache.get(k)
142
+
143
+ if cached_bytes is not None:
144
+ return deserializer(cached_bytes)
145
+
146
+ # MISS -> compute
147
+ result = func(*args, **kwargs)
148
+
149
+ # SET (may be async)
150
+ data = serializer(result)
151
+ if _is_async_method(cache, "set"):
152
+ _run_maybe_async(cache.set, k, data, ttl=ttl, tags=tg)
153
+ else:
154
+ cache.set(k, data, ttl=ttl, tags=tg)
155
+
156
+ return result
157
+
158
+ return sw
159
+
160
+ return deco
161
+
162
+
163
+ def invalidate_by_tags(tags_builder: Callable[..., Iterable[str]]):
164
+ """
165
+ Invalidate computed tags after function finishes.
166
+ Works with sync or async functions and cache backends.
167
+ """
168
+ def deco(func: Callable):
169
+ is_async_func = asyncio.iscoroutinefunction(func)
170
+
171
+ if is_async_func:
172
+ @mf_wraps(func)
173
+ async def aw(*args, **kwargs):
174
+ out = await func(*args, **kwargs)
175
+ cache = get_cache()
176
+ tags = list(tags_builder(*args, **kwargs))
177
+ if _is_async_method(cache, "invalidate_tags"):
178
+ await cache.invalidate_tags(tags)
179
+ else:
180
+ cache.invalidate_tags(tags)
181
+ return out
182
+ return aw
183
+
184
+ @mf_wraps(func)
185
+ def sw(*args, **kwargs):
186
+ out = func(*args, **kwargs)
187
+ cache = get_cache()
188
+ tags = list(tags_builder(*args, **kwargs))
189
+ if _is_async_method(cache, "invalidate_tags"):
190
+ _run_maybe_async(cache.invalidate_tags, tags)
191
+ else:
192
+ cache.invalidate_tags(tags)
193
+ return out
194
+ return sw
195
+
196
+ return deco
@@ -0,0 +1,30 @@
1
+ import hashlib
2
+ import json
3
+ import random
4
+ from typing import Any, Mapping
5
+
6
+ def _stable_params(params: Mapping[str, Any]) -> str:
7
+ return json.dumps(params, sort_keys=True, separators=(",", ":"))
8
+
9
+ def make_key(ns: str, *parts: str) -> str:
10
+ safe_parts = [p.replace(" ", "_") for p in parts if p]
11
+ return f"{ns}:{':'.join(safe_parts)}" if safe_parts else f"{ns}:root"
12
+
13
+ def make_param_key(ns: str, base: str, params: Mapping[str, Any]) -> str:
14
+ payload = _stable_params(params)
15
+ digest = hashlib.sha256(payload.encode("utf-8")).hexdigest()[:16]
16
+ return f"{ns}:{base}:{digest}"
17
+
18
+ def tag_entity(ns: str, entity_id: Any) -> str:
19
+ return f"{ns}:{entity_id}"
20
+
21
+ def tag_list(ns: str, **filters) -> str:
22
+ if not filters:
23
+ return f"{ns}:list"
24
+ payload = _stable_params(filters)
25
+ digest = hashlib.md5(payload.encode("utf-8")).hexdigest()[:12]
26
+ return f"{ns}:list:{digest}"
27
+
28
+ def ttl_with_jitter(base_ttl: int, *, jitter_ratio: float = 0.1) -> int:
29
+ jitter = int(base_ttl * jitter_ratio)
30
+ return base_ttl + random.randint(-jitter, jitter)
@@ -0,0 +1,23 @@
1
+ from typing import Callable, Optional, TypeVar
2
+ from contextvars import ContextVar
3
+
4
+ T = TypeVar("T")
5
+
6
+ _global_resolver: Optional[Callable[[], T]] = None
7
+
8
+ _ctx_resolver: ContextVar[Optional[Callable[[], T]]] = ContextVar("_ctx_resolver", default=None)
9
+
10
+ def set_cache_resolver(fn: Callable[[], T]) -> None:
11
+ """Set process-wide cache resolver (e.g., lambda: container.cache())."""
12
+ global _global_resolver
13
+ _global_resolver = fn
14
+
15
+ def set_context_cache_resolver(fn: Optional[Callable[[], T]]) -> None:
16
+ """Override resolver in current context (useful in tests/background tasks)."""
17
+ _ctx_resolver.set(fn)
18
+
19
+ def get_cache() -> T:
20
+ fn = _ctx_resolver.get() or _global_resolver
21
+ if fn is None:
22
+ raise RuntimeError("Cache resolver not configured. Call set_cache_resolver(...) first.")
23
+ return fn()
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: nlbone
3
- Version: 0.4.3
3
+ Version: 0.5.0
4
4
  Summary: Backbone package for interfaces and infrastructure in Python projects
5
5
  Author-email: Amir Hosein Kahkbazzadeh <a.khakbazzadeh@gmail.com>
6
6
  License: MIT
@@ -11,6 +11,7 @@ Requires-Dist: dependency-injector>=4.48.1
11
11
  Requires-Dist: elasticsearch==8.14.0
12
12
  Requires-Dist: fastapi>=0.116
13
13
  Requires-Dist: httpx>=0.27
14
+ Requires-Dist: makefun>=1.16.0
14
15
  Requires-Dist: psycopg>=3.2.9
15
16
  Requires-Dist: pydantic-settings>=2.0
16
17
  Requires-Dist: pydantic>=2.0
@@ -1,10 +1,15 @@
1
1
  nlbone/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
- nlbone/container.py,sha256=wmamsFU0Be6DlEqhLW30J2w4zVCwNlcZDd7HxkzWil0,2481
2
+ nlbone/container.py,sha256=gbO8D23HOCHe2lpZpHCvpb5zPMSJnShK9AOna1UQFWg,3169
3
3
  nlbone/types.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
4
  nlbone/adapters/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
5
5
  nlbone/adapters/auth/__init__.py,sha256=hkDHvsFhw_UiOHG9ZSMqjiAhK4wumEforitveSZswVw,42
6
6
  nlbone/adapters/auth/keycloak.py,sha256=dfAxODiARfR8y3FKoWNo9fjfb6QyWd_Qr7AbJ0E78AM,2729
7
7
  nlbone/adapters/auth/token_provider.py,sha256=NhqjqTUsoZO4gbK-cybs0OkKydFN7CPTxAiypEw081o,1433
8
+ nlbone/adapters/cache/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
9
+ nlbone/adapters/cache/async_redis.py,sha256=E61tpBwAElMoGgyJ8CWO5G81n7u1-Wi_kVRDoh5rJuM,6206
10
+ nlbone/adapters/cache/memory.py,sha256=lRJqMdD2lbojndQ_dJ7AulfhSYY_uqjgXvd4ZT_J8co,3655
11
+ nlbone/adapters/cache/pubsub_listener.py,sha256=2y6DbWsERXlMOkmJSJMg8hNU9MTGwR7BhwQRveivh50,1457
12
+ nlbone/adapters/cache/redis.py,sha256=gMNfUIk1HkeXVBmtAtVchcr59ll06E4wam9rGWAhalM,4535
8
13
  nlbone/adapters/db/__init__.py,sha256=saW-wN4E0NZ2_ldi-nrm5AgsH7EULNSa62lYMwfy1oo,252
9
14
  nlbone/adapters/db/postgres/__init__.py,sha256=6JYJH0xZs3aR-zuyMpRhsdzFugmqz8nprwTQLprqhZc,313
10
15
  nlbone/adapters/db/postgres/audit.py,sha256=zFzL-pXmfjcp5YLx6vBYczprsJjEPxSYKhQNR3WjKL0,4675
@@ -18,8 +23,8 @@ nlbone/adapters/db/redis/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZ
18
23
  nlbone/adapters/db/redis/client.py,sha256=XAKcmU0lpPvWPMS0fChVQ3iSJfHV1g4bMOCgJaj2bCI,512
19
24
  nlbone/adapters/http_clients/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
20
25
  nlbone/adapters/http_clients/email_gateway.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
21
- nlbone/adapters/http_clients/uploadchi.py,sha256=iMLbXUXqi60gQMEI7wfaqWG0G7p9fQN7otT9c8npnLQ,5470
22
- nlbone/adapters/http_clients/uploadchi_async.py,sha256=wmb2XIUNkowh48GJwttNZ_STgsFQdGKKRZc_luYNCu8,4609
26
+ nlbone/adapters/http_clients/uploadchi.py,sha256=-goyqa3DNnDdkiIvrJRMlk5KVCaRNy34pPYklLHV04w,5556
27
+ nlbone/adapters/http_clients/uploadchi_async.py,sha256=9QxunVxPEyf6LxcGaQ9I2JFtVB6s-JtR9f7RtND8pIk,4695
23
28
  nlbone/adapters/messaging/__init__.py,sha256=UDAwu3s-JQmOZjWz2Nu0SgHhnkbeOhKDH_zLD75oWMY,40
24
29
  nlbone/adapters/messaging/event_bus.py,sha256=w-NPwDiPMLFPU_enRQCtfQXOALsXfg31u57R8sG_-1U,781
25
30
  nlbone/adapters/messaging/redis.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -27,7 +32,7 @@ nlbone/adapters/percolation/__init__.py,sha256=viq5WZqcSLlRBF5JwuyTD_IZaNWfpKzGJ
27
32
  nlbone/adapters/percolation/connection.py,sha256=xZ-OtQVbyQYH83TUizS0UWI85Iic-AhUjiuyzO0e46s,331
28
33
  nlbone/config/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
29
34
  nlbone/config/logging.py,sha256=rGQz9W5ZgUFXBK74TFmTuwx_WMJhD8zPN39zfKVxwnI,4115
30
- nlbone/config/settings.py,sha256=xxdZQDQJ7wSEGODKljtQWcfITXbSHoqAXOlQ9vhNSe4,3474
35
+ nlbone/config/settings.py,sha256=-FcCbS30yDK-ZXlC_336T4yea7sVjnzNak22wSptOXI,3596
31
36
  nlbone/core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
32
37
  nlbone/core/application/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
33
38
  nlbone/core/application/base_worker.py,sha256=uHqglsd33jXl_0kmkFlB4KQ5NdI1wArcOeQmdcifPQc,1192
@@ -41,6 +46,7 @@ nlbone/core/domain/events.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,
41
46
  nlbone/core/domain/models.py,sha256=e2ig7PMBBpmc8pdHLNMnXhucMXr9OUq-G7bKGTq9Qj0,1458
42
47
  nlbone/core/ports/__init__.py,sha256=gx-Ubj7h-1vvnu56sNnRqmer7HHfW3rX2WLl-0AX5U0,214
43
48
  nlbone/core/ports/auth.py,sha256=Gh0yQsxx2OD6pDH2_p-khsA-bVoypP1juuqMoSfjZUo,493
49
+ nlbone/core/ports/cache.py,sha256=C9exWYPZsppCpkrAMiGfJuf4ehHkibtFfvB1aFbWuO4,2257
44
50
  nlbone/core/ports/event_bus.py,sha256=_Om1GOOT-F325oV6_LJXtLdx4vu5i7KrpTDD3qPJXU0,325
45
51
  nlbone/core/ports/files.py,sha256=7Ov2ITYRpPwwDTZGCeNVISg8e3A9l08jbOgpTImgfK8,1863
46
52
  nlbone/core/ports/messaging.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -69,11 +75,14 @@ nlbone/interfaces/cli/main.py,sha256=65XXNmH0dX9Lib_yW5iQXo7wp_GRFwx9xXDYgy2LJtY
69
75
  nlbone/interfaces/jobs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
70
76
  nlbone/interfaces/jobs/sync_tokens.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
71
77
  nlbone/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
78
+ nlbone/utils/cache.py,sha256=3sKtWoGBlrEvkg74r4C-Sx8u41OTup_G4s-Q57IaOXg,5921
79
+ nlbone/utils/cache_keys.py,sha256=a1yRMUuyRJ2-CswjtgVkLcJAeT2QmThLQ5kQWvpOKL4,1069
80
+ nlbone/utils/cache_registry.py,sha256=0csax1-GmKBcsZmQYWI4Bs0X9_BMo6Jdoac-e9Zusv8,819
72
81
  nlbone/utils/context.py,sha256=MmclJ24BG2uvSTg1IK7J-Da9BhVFDQ5ag4Ggs2FF1_w,1600
73
82
  nlbone/utils/redactor.py,sha256=JbbPs2Qtnz0zHN85BGPYQNWwBigXMSzmMEmmZZOTs_U,1277
74
83
  nlbone/utils/time.py,sha256=6e0A4_hG1rYDCrWoOklEGVJstBf8j9XSSTT7VNV2K9Y,1272
75
- nlbone-0.4.3.dist-info/METADATA,sha256=dKKof0RuBfc3ONT-R1n5qD_oJXaE4BIhqcezwx8NH8Q,2163
76
- nlbone-0.4.3.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
77
- nlbone-0.4.3.dist-info/entry_points.txt,sha256=CpIL45t5nbhl1dGQPhfIIDfqqak3teK0SxPGBBr7YCk,59
78
- nlbone-0.4.3.dist-info/licenses/LICENSE,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
79
- nlbone-0.4.3.dist-info/RECORD,,
84
+ nlbone-0.5.0.dist-info/METADATA,sha256=4EzJYpbkzQZ1LsKAYpY94NrDARztMctvov_bf1vjmfI,2194
85
+ nlbone-0.5.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
86
+ nlbone-0.5.0.dist-info/entry_points.txt,sha256=CpIL45t5nbhl1dGQPhfIIDfqqak3teK0SxPGBBr7YCk,59
87
+ nlbone-0.5.0.dist-info/licenses/LICENSE,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
88
+ nlbone-0.5.0.dist-info/RECORD,,
File without changes