cachu 0.2.3__tar.gz → 0.2.4__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. {cachu-0.2.3 → cachu-0.2.4}/PKG-INFO +7 -9
  2. {cachu-0.2.3 → cachu-0.2.4}/README.md +6 -8
  3. {cachu-0.2.3 → cachu-0.2.4}/pyproject.toml +1 -1
  4. {cachu-0.2.3 → cachu-0.2.4}/setup.cfg +1 -1
  5. {cachu-0.2.3 → cachu-0.2.4}/src/cachu/__init__.py +6 -9
  6. cachu-0.2.4/src/cachu/backends/__init__.py +95 -0
  7. cachu-0.2.3/src/cachu/backends/async_memory.py → cachu-0.2.4/src/cachu/backends/memory.py +96 -5
  8. cachu-0.2.4/src/cachu/backends/redis.py +242 -0
  9. cachu-0.2.3/src/cachu/backends/async_sqlite.py → cachu-0.2.4/src/cachu/backends/sqlite.py +233 -4
  10. {cachu-0.2.3 → cachu-0.2.4}/src/cachu/config.py +0 -6
  11. cachu-0.2.4/src/cachu/decorator.py +537 -0
  12. {cachu-0.2.3 → cachu-0.2.4}/src/cachu/keys.py +8 -0
  13. cachu-0.2.4/src/cachu/operations.py +331 -0
  14. {cachu-0.2.3 → cachu-0.2.4}/src/cachu.egg-info/PKG-INFO +7 -9
  15. {cachu-0.2.3 → cachu-0.2.4}/src/cachu.egg-info/SOURCES.txt +0 -6
  16. {cachu-0.2.3 → cachu-0.2.4}/tests/test_async_redis.py +3 -6
  17. {cachu-0.2.3 → cachu-0.2.4}/tests/test_async_sqlite.py +4 -9
  18. {cachu-0.2.3 → cachu-0.2.4}/tests/test_clearing.py +56 -22
  19. {cachu-0.2.3 → cachu-0.2.4}/tests/test_config.py +1 -5
  20. {cachu-0.2.3 → cachu-0.2.4}/tests/test_ttl_isolation.py +24 -24
  21. cachu-0.2.3/src/cachu/async_decorator.py +0 -262
  22. cachu-0.2.3/src/cachu/async_operations.py +0 -178
  23. cachu-0.2.3/src/cachu/backends/__init__.py +0 -52
  24. cachu-0.2.3/src/cachu/backends/async_base.py +0 -50
  25. cachu-0.2.3/src/cachu/backends/async_redis.py +0 -141
  26. cachu-0.2.3/src/cachu/backends/memory.py +0 -102
  27. cachu-0.2.3/src/cachu/backends/redis.py +0 -131
  28. cachu-0.2.3/src/cachu/backends/sqlite.py +0 -240
  29. cachu-0.2.3/src/cachu/decorator.py +0 -252
  30. cachu-0.2.3/src/cachu/operations.py +0 -182
  31. {cachu-0.2.3 → cachu-0.2.4}/src/cachu/backends/file.py +0 -0
  32. {cachu-0.2.3 → cachu-0.2.4}/src/cachu/types.py +0 -0
  33. {cachu-0.2.3 → cachu-0.2.4}/src/cachu.egg-info/dependency_links.txt +0 -0
  34. {cachu-0.2.3 → cachu-0.2.4}/src/cachu.egg-info/requires.txt +0 -0
  35. {cachu-0.2.3 → cachu-0.2.4}/src/cachu.egg-info/top_level.txt +0 -0
  36. {cachu-0.2.3 → cachu-0.2.4}/tests/test_async_memory.py +0 -0
  37. {cachu-0.2.3 → cachu-0.2.4}/tests/test_defaultcache.py +0 -0
  38. {cachu-0.2.3 → cachu-0.2.4}/tests/test_delete_keys.py +0 -0
  39. {cachu-0.2.3 → cachu-0.2.4}/tests/test_disable.py +0 -0
  40. {cachu-0.2.3 → cachu-0.2.4}/tests/test_exclude_params.py +0 -0
  41. {cachu-0.2.3 → cachu-0.2.4}/tests/test_file_cache.py +0 -0
  42. {cachu-0.2.3 → cachu-0.2.4}/tests/test_integration.py +0 -0
  43. {cachu-0.2.3 → cachu-0.2.4}/tests/test_memory_cache.py +0 -0
  44. {cachu-0.2.3 → cachu-0.2.4}/tests/test_namespace.py +0 -0
  45. {cachu-0.2.3 → cachu-0.2.4}/tests/test_namespace_isolation.py +0 -0
  46. {cachu-0.2.3 → cachu-0.2.4}/tests/test_redis_cache.py +0 -0
  47. {cachu-0.2.3 → cachu-0.2.4}/tests/test_set_keys.py +0 -0
  48. {cachu-0.2.3 → cachu-0.2.4}/tests/test_sqlite_backend.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: cachu
3
- Version: 0.2.3
3
+ Version: 0.2.4
4
4
  Summary: Flexible caching library with sync and async support for memory, file (SQLite), and Redis backends
5
5
  Author: bissli
6
6
  License-Expression: 0BSD
@@ -69,19 +69,17 @@ cachu.configure(
69
69
  key_prefix='v1:', # Prefix for all cache keys
70
70
  file_dir='/var/cache/app', # Directory for file cache
71
71
  redis_url='redis://localhost:6379/0', # Redis connection URL
72
- redis_distributed=False, # Use distributed locks for Redis
73
72
  )
74
73
  ```
75
74
 
76
75
  ### Configuration Options
77
76
 
78
- | Option | Default | Description |
79
- | ------------------- | ---------------------------- | ------------------------------------------------- |
80
- | `backend` | `'memory'` | Default backend type |
81
- | `key_prefix` | `''` | Prefix for all cache keys (useful for versioning) |
82
- | `file_dir` | `'/tmp'` | Directory for file-based caches |
83
- | `redis_url` | `'redis://localhost:6379/0'` | Redis connection URL |
84
- | `redis_distributed` | `False` | Enable distributed locks for Redis |
77
+ | Option | Default | Description |
78
+ | ------------ | ---------------------------- | ------------------------------------------------- |
79
+ | `backend` | `'memory'` | Default backend type |
80
+ | `key_prefix` | `''` | Prefix for all cache keys (useful for versioning) |
81
+ | `file_dir` | `'/tmp'` | Directory for file-based caches |
82
+ | `redis_url` | `'redis://localhost:6379/0'` | Redis connection URL |
85
83
 
86
84
  ### Package Isolation
87
85
 
@@ -46,19 +46,17 @@ cachu.configure(
46
46
  key_prefix='v1:', # Prefix for all cache keys
47
47
  file_dir='/var/cache/app', # Directory for file cache
48
48
  redis_url='redis://localhost:6379/0', # Redis connection URL
49
- redis_distributed=False, # Use distributed locks for Redis
50
49
  )
51
50
  ```
52
51
 
53
52
  ### Configuration Options
54
53
 
55
- | Option | Default | Description |
56
- | ------------------- | ---------------------------- | ------------------------------------------------- |
57
- | `backend` | `'memory'` | Default backend type |
58
- | `key_prefix` | `''` | Prefix for all cache keys (useful for versioning) |
59
- | `file_dir` | `'/tmp'` | Directory for file-based caches |
60
- | `redis_url` | `'redis://localhost:6379/0'` | Redis connection URL |
61
- | `redis_distributed` | `False` | Enable distributed locks for Redis |
54
+ | Option | Default | Description |
55
+ | ------------ | ---------------------------- | ------------------------------------------------- |
56
+ | `backend` | `'memory'` | Default backend type |
57
+ | `key_prefix` | `''` | Prefix for all cache keys (useful for versioning) |
58
+ | `file_dir` | `'/tmp'` | Directory for file-based caches |
59
+ | `redis_url` | `'redis://localhost:6379/0'` | Redis connection URL |
62
60
 
63
61
  ### Package Isolation
64
62
 
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "cachu"
3
- version = "0.2.3"
3
+ version = "0.2.4"
4
4
  description = "Flexible caching library with sync and async support for memory, file (SQLite), and Redis backends"
5
5
  readme = "README.md"
6
6
  license = "0BSD"
@@ -1,5 +1,5 @@
1
1
  [bumpversion]
2
- current_version = 0.2.3
2
+ current_version = 0.2.4
3
3
  commit = True
4
4
  tag = True
5
5
 
@@ -1,19 +1,16 @@
1
1
  """Flexible caching library with support for memory, file, and Redis backends.
2
2
  """
3
- __version__ = '0.2.3'
3
+ __version__ = '0.2.4'
4
4
 
5
- from .async_decorator import async_cache, clear_async_backends
6
- from .async_decorator import get_async_backend, get_async_cache_info
7
- from .async_operations import async_cache_clear, async_cache_delete
8
- from .async_operations import async_cache_get, async_cache_info
9
- from .async_operations import async_cache_set
10
5
  from .backends import AsyncBackend, Backend
11
6
  from .backends.redis import get_redis_client
12
7
  from .config import configure, disable, enable, get_all_configs, get_config
13
8
  from .config import is_disabled
14
- from .decorator import cache, get_backend
15
- from .operations import cache_clear, cache_delete, cache_get, cache_info
16
- from .operations import cache_set
9
+ from .decorator import async_cache, cache, clear_async_backends
10
+ from .decorator import get_async_backend, get_async_cache_info, get_backend
11
+ from .operations import async_cache_clear, async_cache_delete, async_cache_get
12
+ from .operations import async_cache_info, async_cache_set, cache_clear
13
+ from .operations import cache_delete, cache_get, cache_info, cache_set
17
14
 
18
15
  __all__ = [
19
16
  'configure',
@@ -0,0 +1,95 @@
1
+ """Cache backend implementations.
2
+ """
3
+ from abc import ABC, abstractmethod
4
+ from collections.abc import AsyncIterator, Iterator
5
+ from typing import Any
6
+
7
+ NO_VALUE = object()
8
+
9
+
10
+ class Backend(ABC):
11
+ """Abstract base class for cache backends.
12
+ """
13
+
14
+ @abstractmethod
15
+ def get(self, key: str) -> Any:
16
+ """Get value by key. Returns NO_VALUE if not found.
17
+ """
18
+
19
+ @abstractmethod
20
+ def get_with_metadata(self, key: str) -> tuple[Any, float | None]:
21
+ """Get value and creation timestamp. Returns (NO_VALUE, None) if not found.
22
+ """
23
+
24
+ @abstractmethod
25
+ def set(self, key: str, value: Any, ttl: int) -> None:
26
+ """Set value with TTL in seconds.
27
+ """
28
+
29
+ @abstractmethod
30
+ def delete(self, key: str) -> None:
31
+ """Delete value by key.
32
+ """
33
+
34
+ @abstractmethod
35
+ def clear(self, pattern: str | None = None) -> int:
36
+ """Clear entries matching pattern. Returns count of cleared entries.
37
+ """
38
+
39
+ @abstractmethod
40
+ def keys(self, pattern: str | None = None) -> Iterator[str]:
41
+ """Iterate over keys matching pattern.
42
+ """
43
+
44
+ @abstractmethod
45
+ def count(self, pattern: str | None = None) -> int:
46
+ """Count keys matching pattern.
47
+ """
48
+
49
+
50
+ class AsyncBackend(ABC):
51
+ """Abstract base class for async cache backends.
52
+ """
53
+
54
+ @abstractmethod
55
+ async def get(self, key: str) -> Any:
56
+ """Get value by key. Returns NO_VALUE if not found.
57
+ """
58
+
59
+ @abstractmethod
60
+ async def get_with_metadata(self, key: str) -> tuple[Any, float | None]:
61
+ """Get value and creation timestamp. Returns (NO_VALUE, None) if not found.
62
+ """
63
+
64
+ @abstractmethod
65
+ async def set(self, key: str, value: Any, ttl: int) -> None:
66
+ """Set value with TTL in seconds.
67
+ """
68
+
69
+ @abstractmethod
70
+ async def delete(self, key: str) -> None:
71
+ """Delete value by key.
72
+ """
73
+
74
+ @abstractmethod
75
+ async def clear(self, pattern: str | None = None) -> int:
76
+ """Clear entries matching pattern. Returns count of cleared entries.
77
+ """
78
+
79
+ @abstractmethod
80
+ async def keys(self, pattern: str | None = None) -> AsyncIterator[str]:
81
+ """Iterate over keys matching pattern.
82
+ """
83
+
84
+ @abstractmethod
85
+ async def count(self, pattern: str | None = None) -> int:
86
+ """Count keys matching pattern.
87
+ """
88
+
89
+ @abstractmethod
90
+ async def close(self) -> None:
91
+ """Close the backend and release resources.
92
+ """
93
+
94
+
95
+ __all__ = ['Backend', 'AsyncBackend', 'NO_VALUE']
@@ -1,14 +1,106 @@
1
- """Async memory cache backend implementation.
1
+ """Memory cache backend implementation.
2
2
  """
3
3
  import asyncio
4
4
  import fnmatch
5
5
  import pickle
6
+ import threading
6
7
  import time
7
- from collections.abc import AsyncIterator
8
+ from collections.abc import AsyncIterator, Iterator
8
9
  from typing import Any
9
10
 
10
- from . import NO_VALUE
11
- from .async_base import AsyncBackend
11
+ from . import NO_VALUE, AsyncBackend, Backend
12
+
13
+
14
+ class MemoryBackend(Backend):
15
+ """Thread-safe in-memory cache backend.
16
+ """
17
+
18
+ def __init__(self) -> None:
19
+ self._cache: dict[str, tuple[bytes, float, float]] = {}
20
+ self._lock = threading.RLock()
21
+
22
+ def get(self, key: str) -> Any:
23
+ """Get value by key. Returns NO_VALUE if not found or expired.
24
+ """
25
+ with self._lock:
26
+ entry = self._cache.get(key)
27
+ if entry is None:
28
+ return NO_VALUE
29
+
30
+ pickled_value, created_at, expires_at = entry
31
+ if time.time() > expires_at:
32
+ del self._cache[key]
33
+ return NO_VALUE
34
+
35
+ return pickle.loads(pickled_value)
36
+
37
+ def get_with_metadata(self, key: str) -> tuple[Any, float | None]:
38
+ """Get value and creation timestamp. Returns (NO_VALUE, None) if not found.
39
+ """
40
+ with self._lock:
41
+ entry = self._cache.get(key)
42
+ if entry is None:
43
+ return NO_VALUE, None
44
+
45
+ pickled_value, created_at, expires_at = entry
46
+ if time.time() > expires_at:
47
+ del self._cache[key]
48
+ return NO_VALUE, None
49
+
50
+ return pickle.loads(pickled_value), created_at
51
+
52
+ def set(self, key: str, value: Any, ttl: int) -> None:
53
+ """Set value with TTL in seconds.
54
+ """
55
+ now = time.time()
56
+ pickled_value = pickle.dumps(value)
57
+ with self._lock:
58
+ self._cache[key] = (pickled_value, now, now + ttl)
59
+
60
+ def delete(self, key: str) -> None:
61
+ """Delete value by key.
62
+ """
63
+ with self._lock:
64
+ self._cache.pop(key, None)
65
+
66
+ def clear(self, pattern: str | None = None) -> int:
67
+ """Clear entries matching pattern. Returns count of cleared entries.
68
+ """
69
+ with self._lock:
70
+ if pattern is None:
71
+ count = len(self._cache)
72
+ self._cache.clear()
73
+ return count
74
+
75
+ keys_to_delete = [k for k in self._cache if fnmatch.fnmatch(k, pattern)]
76
+ for key in keys_to_delete:
77
+ del self._cache[key]
78
+ return len(keys_to_delete)
79
+
80
+ def keys(self, pattern: str | None = None) -> Iterator[str]:
81
+ """Iterate over keys matching pattern.
82
+ """
83
+ now = time.time()
84
+ with self._lock:
85
+ all_keys = list(self._cache.keys())
86
+
87
+ for key in all_keys:
88
+ with self._lock:
89
+ entry = self._cache.get(key)
90
+ if entry is None:
91
+ continue
92
+ _, _, expires_at = entry
93
+ if now > expires_at:
94
+ del self._cache[key]
95
+ continue
96
+
97
+ if pattern is None or fnmatch.fnmatch(key, pattern):
98
+ yield key
99
+
100
+ def count(self, pattern: str | None = None) -> int:
101
+ """Count keys matching pattern.
102
+ """
103
+ return sum(1 for _ in self.keys(pattern))
12
104
 
13
105
 
14
106
  class AsyncMemoryBackend(AsyncBackend):
@@ -108,4 +200,3 @@ class AsyncMemoryBackend(AsyncBackend):
108
200
  async def close(self) -> None:
109
201
  """Close the backend (no-op for memory backend).
110
202
  """
111
- pass
@@ -0,0 +1,242 @@
1
+ """Redis cache backend implementation.
2
+ """
3
+ import pickle
4
+ import struct
5
+ import time
6
+ from collections.abc import AsyncIterator, Iterator
7
+ from typing import TYPE_CHECKING, Any
8
+
9
+ from . import NO_VALUE, AsyncBackend, Backend
10
+
11
+ if TYPE_CHECKING:
12
+ import redis
13
+ import redis.asyncio as aioredis
14
+
15
+
16
+ _METADATA_FORMAT = 'd'
17
+ _METADATA_SIZE = struct.calcsize(_METADATA_FORMAT)
18
+
19
+
20
+ def _get_redis_module() -> Any:
21
+ """Import redis module, raising helpful error if not installed.
22
+ """
23
+ try:
24
+ import redis
25
+ return redis
26
+ except ImportError as e:
27
+ raise RuntimeError(
28
+ "Redis support requires the 'redis' package. Install with: pip install cache[redis]"
29
+ ) from e
30
+
31
+
32
+ def _get_async_redis_module() -> Any:
33
+ """Import redis.asyncio module, raising helpful error if not installed.
34
+ """
35
+ try:
36
+ import redis.asyncio as aioredis
37
+ return aioredis
38
+ except ImportError as e:
39
+ raise RuntimeError(
40
+ "Async Redis support requires the 'redis' package (>=4.2.0). "
41
+ "Install with: pip install cachu[redis]"
42
+ ) from e
43
+
44
+
45
+ async def get_async_redis_client(url: str) -> 'aioredis.Redis':
46
+ """Create an async Redis client from URL.
47
+
48
+ Args:
49
+ url: Redis URL (e.g., 'redis://localhost:6379/0')
50
+ """
51
+ aioredis = _get_async_redis_module()
52
+ return aioredis.from_url(url)
53
+
54
+
55
+ def get_redis_client(url: str) -> 'redis.Redis':
56
+ """Create a Redis client from URL.
57
+
58
+ Args:
59
+ url: Redis URL (e.g., 'redis://localhost:6379/0')
60
+ """
61
+ redis_module = _get_redis_module()
62
+ return redis_module.from_url(url)
63
+
64
+
65
+ def _pack_value(value: Any, created_at: float) -> bytes:
66
+ """Pack value with creation timestamp.
67
+ """
68
+ metadata = struct.pack(_METADATA_FORMAT, created_at)
69
+ pickled = pickle.dumps(value)
70
+ return metadata + pickled
71
+
72
+
73
+ def _unpack_value(data: bytes) -> tuple[Any, float]:
74
+ """Unpack value and creation timestamp.
75
+ """
76
+ created_at = struct.unpack(_METADATA_FORMAT, data[:_METADATA_SIZE])[0]
77
+ value = pickle.loads(data[_METADATA_SIZE:])
78
+ return value, created_at
79
+
80
+
81
+ class RedisBackend(Backend):
82
+ """Redis cache backend.
83
+ """
84
+
85
+ def __init__(self, url: str) -> None:
86
+ self._url = url
87
+ self._client: redis.Redis | None = None
88
+
89
+ @property
90
+ def client(self) -> 'redis.Redis':
91
+ """Lazy-load Redis client.
92
+ """
93
+ if self._client is None:
94
+ self._client = get_redis_client(self._url)
95
+ return self._client
96
+
97
+ def get(self, key: str) -> Any:
98
+ """Get value by key. Returns NO_VALUE if not found.
99
+ """
100
+ data = self.client.get(key)
101
+ if data is None:
102
+ return NO_VALUE
103
+ value, _ = _unpack_value(data)
104
+ return value
105
+
106
+ def get_with_metadata(self, key: str) -> tuple[Any, float | None]:
107
+ """Get value and creation timestamp. Returns (NO_VALUE, None) if not found.
108
+ """
109
+ data = self.client.get(key)
110
+ if data is None:
111
+ return NO_VALUE, None
112
+ value, created_at = _unpack_value(data)
113
+ return value, created_at
114
+
115
+ def set(self, key: str, value: Any, ttl: int) -> None:
116
+ """Set value with TTL in seconds.
117
+ """
118
+ now = time.time()
119
+ packed = _pack_value(value, now)
120
+ self.client.setex(key, ttl, packed)
121
+
122
+ def delete(self, key: str) -> None:
123
+ """Delete value by key.
124
+ """
125
+ self.client.delete(key)
126
+
127
+ def clear(self, pattern: str | None = None) -> int:
128
+ """Clear entries matching pattern. Returns count of cleared entries.
129
+ """
130
+ if pattern is None:
131
+ pattern = '*'
132
+
133
+ count = 0
134
+ for key in self.client.scan_iter(match=pattern):
135
+ self.client.delete(key)
136
+ count += 1
137
+ return count
138
+
139
+ def keys(self, pattern: str | None = None) -> Iterator[str]:
140
+ """Iterate over keys matching pattern.
141
+ """
142
+ redis_pattern = pattern or '*'
143
+ for key in self.client.scan_iter(match=redis_pattern):
144
+ yield key.decode() if isinstance(key, bytes) else key
145
+
146
+ def count(self, pattern: str | None = None) -> int:
147
+ """Count keys matching pattern.
148
+ """
149
+ return sum(1 for _ in self.keys(pattern))
150
+
151
+ def close(self) -> None:
152
+ """Close the Redis connection.
153
+ """
154
+ if self._client is not None:
155
+ self._client.close()
156
+ self._client = None
157
+
158
+
159
+ class AsyncRedisBackend(AsyncBackend):
160
+ """Async Redis cache backend using redis.asyncio.
161
+ """
162
+
163
+ def __init__(self, url: str) -> None:
164
+ self._url = url
165
+ self._client: aioredis.Redis | None = None
166
+
167
+ async def _get_client(self) -> 'aioredis.Redis':
168
+ """Lazy-load async Redis client.
169
+ """
170
+ if self._client is None:
171
+ self._client = await get_async_redis_client(self._url)
172
+ return self._client
173
+
174
+ async def get(self, key: str) -> Any:
175
+ """Get value by key. Returns NO_VALUE if not found.
176
+ """
177
+ client = await self._get_client()
178
+ data = await client.get(key)
179
+ if data is None:
180
+ return NO_VALUE
181
+ value, _ = _unpack_value(data)
182
+ return value
183
+
184
+ async def get_with_metadata(self, key: str) -> tuple[Any, float | None]:
185
+ """Get value and creation timestamp. Returns (NO_VALUE, None) if not found.
186
+ """
187
+ client = await self._get_client()
188
+ data = await client.get(key)
189
+ if data is None:
190
+ return NO_VALUE, None
191
+ value, created_at = _unpack_value(data)
192
+ return value, created_at
193
+
194
+ async def set(self, key: str, value: Any, ttl: int) -> None:
195
+ """Set value with TTL in seconds.
196
+ """
197
+ client = await self._get_client()
198
+ now = time.time()
199
+ packed = _pack_value(value, now)
200
+ await client.setex(key, ttl, packed)
201
+
202
+ async def delete(self, key: str) -> None:
203
+ """Delete value by key.
204
+ """
205
+ client = await self._get_client()
206
+ await client.delete(key)
207
+
208
+ async def clear(self, pattern: str | None = None) -> int:
209
+ """Clear entries matching pattern. Returns count of cleared entries.
210
+ """
211
+ client = await self._get_client()
212
+ if pattern is None:
213
+ pattern = '*'
214
+
215
+ count = 0
216
+ async for key in client.scan_iter(match=pattern):
217
+ await client.delete(key)
218
+ count += 1
219
+ return count
220
+
221
+ async def keys(self, pattern: str | None = None) -> AsyncIterator[str]:
222
+ """Iterate over keys matching pattern.
223
+ """
224
+ client = await self._get_client()
225
+ redis_pattern = pattern or '*'
226
+ async for key in client.scan_iter(match=redis_pattern):
227
+ yield key.decode() if isinstance(key, bytes) else key
228
+
229
+ async def count(self, pattern: str | None = None) -> int:
230
+ """Count keys matching pattern.
231
+ """
232
+ count = 0
233
+ async for _ in self.keys(pattern):
234
+ count += 1
235
+ return count
236
+
237
+ async def close(self) -> None:
238
+ """Close the Redis connection.
239
+ """
240
+ if self._client is not None:
241
+ await self._client.close()
242
+ self._client = None