cachu 0.1.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
cachu/__init__.py ADDED
@@ -0,0 +1,27 @@
1
+ """Flexible caching library with support for memory, file, and Redis backends.
2
+ """
3
+ __version__ = '0.1.1'
4
+
5
+ from .backends.redis import get_redis_client
6
+ from .config import configure, disable, enable, get_all_configs, get_config
7
+ from .config import is_disabled
8
+ from .decorator import cache, get_backend
9
+ from .operations import cache_clear, cache_delete, cache_get, cache_info
10
+ from .operations import cache_set
11
+
12
+ __all__ = [
13
+ 'configure',
14
+ 'get_config',
15
+ 'get_all_configs',
16
+ 'disable',
17
+ 'enable',
18
+ 'is_disabled',
19
+ 'cache',
20
+ 'cache_get',
21
+ 'cache_set',
22
+ 'cache_delete',
23
+ 'cache_clear',
24
+ 'cache_info',
25
+ 'get_backend',
26
+ 'get_redis_client',
27
+ ]
@@ -0,0 +1,47 @@
1
+ """Cache backend implementations.
2
+ """
3
+ from abc import ABC, abstractmethod
4
+ from typing import Any
5
+ from collections.abc import Iterator
6
+
7
+ NO_VALUE = object()
8
+
9
+
10
+ class Backend(ABC):
11
+ """Abstract base class for cache backends.
12
+ """
13
+
14
+ @abstractmethod
15
+ def get(self, key: str) -> Any:
16
+ """Get value by key. Returns NO_VALUE if not found.
17
+ """
18
+
19
+ @abstractmethod
20
+ def get_with_metadata(self, key: str) -> tuple[Any, float | None]:
21
+ """Get value and creation timestamp. Returns (NO_VALUE, None) if not found.
22
+ """
23
+
24
+ @abstractmethod
25
+ def set(self, key: str, value: Any, ttl: int) -> None:
26
+ """Set value with TTL in seconds.
27
+ """
28
+
29
+ @abstractmethod
30
+ def delete(self, key: str) -> None:
31
+ """Delete value by key.
32
+ """
33
+
34
+ @abstractmethod
35
+ def clear(self, pattern: str | None = None) -> int:
36
+ """Clear entries matching pattern. Returns count of cleared entries.
37
+ """
38
+
39
+ @abstractmethod
40
+ def keys(self, pattern: str | None = None) -> Iterator[str]:
41
+ """Iterate over keys matching pattern.
42
+ """
43
+
44
+ @abstractmethod
45
+ def count(self, pattern: str | None = None) -> int:
46
+ """Count keys matching pattern.
47
+ """
cachu/backends/file.py ADDED
@@ -0,0 +1,158 @@
1
+ """File-based cache backend using DBM.
2
+ """
3
+ import dbm
4
+ import fnmatch
5
+ import pathlib
6
+ import pickle
7
+ import struct
8
+ import threading
9
+ import time
10
+ from collections.abc import Iterator
11
+ from typing import Any
12
+
13
+ from . import NO_VALUE, Backend
14
+
15
+ _METADATA_FORMAT = 'dd'
16
+ _METADATA_SIZE = struct.calcsize(_METADATA_FORMAT)
17
+
18
+
19
+ class FileBackend(Backend):
20
+ """DBM file-based cache backend.
21
+ """
22
+
23
+ def __init__(self, filepath: str) -> None:
24
+ self._filepath = filepath
25
+ self._lock = threading.RLock()
26
+ self._ensure_dir()
27
+
28
+ def _ensure_dir(self) -> None:
29
+ """Ensure the directory for the cache file exists.
30
+ """
31
+ directory = pathlib.Path(self._filepath).parent
32
+ if directory and not pathlib.Path(directory).exists():
33
+ pathlib.Path(directory).mkdir(exist_ok=True, parents=True)
34
+
35
+ def _pack_value(self, value: Any, created_at: float, expires_at: float) -> bytes:
36
+ """Pack value with metadata.
37
+ """
38
+ metadata = struct.pack(_METADATA_FORMAT, created_at, expires_at)
39
+ pickled = pickle.dumps(value)
40
+ return metadata + pickled
41
+
42
+ def _unpack_value(self, data: bytes) -> tuple[Any, float, float]:
43
+ """Unpack value and metadata.
44
+ """
45
+ created_at, expires_at = struct.unpack(_METADATA_FORMAT, data[:_METADATA_SIZE])
46
+ value = pickle.loads(data[_METADATA_SIZE:])
47
+ return value, created_at, expires_at
48
+
49
+ def get(self, key: str) -> Any:
50
+ """Get value by key. Returns NO_VALUE if not found or expired.
51
+ """
52
+ with self._lock:
53
+ try:
54
+ with dbm.open(self._filepath, 'c') as db:
55
+ data = db.get(key.encode())
56
+ if data is None:
57
+ return NO_VALUE
58
+
59
+ value, created_at, expires_at = self._unpack_value(data)
60
+ if time.time() > expires_at:
61
+ del db[key.encode()]
62
+ return NO_VALUE
63
+
64
+ return value
65
+ except Exception:
66
+ return NO_VALUE
67
+
68
+ def get_with_metadata(self, key: str) -> tuple[Any, float | None]:
69
+ """Get value and creation timestamp. Returns (NO_VALUE, None) if not found.
70
+ """
71
+ with self._lock:
72
+ try:
73
+ with dbm.open(self._filepath, 'c') as db:
74
+ data = db.get(key.encode())
75
+ if data is None:
76
+ return NO_VALUE, None
77
+
78
+ value, created_at, expires_at = self._unpack_value(data)
79
+ if time.time() > expires_at:
80
+ del db[key.encode()]
81
+ return NO_VALUE, None
82
+
83
+ return value, created_at
84
+ except Exception:
85
+ return NO_VALUE, None
86
+
87
+ def set(self, key: str, value: Any, ttl: int) -> None:
88
+ """Set value with TTL in seconds.
89
+ """
90
+ now = time.time()
91
+ packed = self._pack_value(value, now, now + ttl)
92
+ with self._lock, dbm.open(self._filepath, 'c') as db:
93
+ db[key.encode()] = packed
94
+
95
+ def delete(self, key: str) -> None:
96
+ """Delete value by key.
97
+ """
98
+ with self._lock:
99
+ try:
100
+ with dbm.open(self._filepath, 'c') as db:
101
+ if key.encode() in db:
102
+ del db[key.encode()]
103
+ except Exception:
104
+ pass
105
+
106
+ def clear(self, pattern: str | None = None) -> int:
107
+ """Clear entries matching pattern. Returns count of cleared entries.
108
+ """
109
+ with self._lock:
110
+ try:
111
+ if pattern is None:
112
+ with dbm.open(self._filepath, 'n'):
113
+ pass
114
+ return -1
115
+
116
+ with dbm.open(self._filepath, 'c') as db:
117
+ keys_to_delete = [
118
+ k for k in db
119
+ if fnmatch.fnmatch(k.decode(), pattern)
120
+ ]
121
+ for key in keys_to_delete:
122
+ del db[key]
123
+ return len(keys_to_delete)
124
+ except Exception:
125
+ return 0
126
+
127
+ def keys(self, pattern: str | None = None) -> Iterator[str]:
128
+ """Iterate over keys matching pattern.
129
+ """
130
+ now = time.time()
131
+ with self._lock:
132
+ try:
133
+ with dbm.open(self._filepath, 'c') as db:
134
+ all_keys = [k.decode() for k in db]
135
+ except Exception:
136
+ return
137
+
138
+ for key in all_keys:
139
+ with self._lock:
140
+ try:
141
+ with dbm.open(self._filepath, 'c') as db:
142
+ data = db.get(key.encode())
143
+ if data is None:
144
+ continue
145
+ _, _, expires_at = self._unpack_value(data)
146
+ if now > expires_at:
147
+ del db[key.encode()]
148
+ continue
149
+ except Exception:
150
+ continue
151
+
152
+ if pattern is None or fnmatch.fnmatch(key, pattern):
153
+ yield key
154
+
155
+ def count(self, pattern: str | None = None) -> int:
156
+ """Count keys matching pattern.
157
+ """
158
+ return sum(1 for _ in self.keys(pattern))
@@ -0,0 +1,102 @@
1
+ """Memory cache backend implementation.
2
+ """
3
+ import fnmatch
4
+ import pickle
5
+ import threading
6
+ import time
7
+ from collections.abc import Iterator
8
+ from typing import Any
9
+
10
+ from . import NO_VALUE, Backend
11
+
12
+
13
+ class MemoryBackend(Backend):
14
+ """Thread-safe in-memory cache backend.
15
+ """
16
+
17
+ def __init__(self) -> None:
18
+ self._cache: dict[str, tuple[bytes, float, float]] = {}
19
+ self._lock = threading.RLock()
20
+
21
+ def get(self, key: str) -> Any:
22
+ """Get value by key. Returns NO_VALUE if not found or expired.
23
+ """
24
+ with self._lock:
25
+ entry = self._cache.get(key)
26
+ if entry is None:
27
+ return NO_VALUE
28
+
29
+ pickled_value, created_at, expires_at = entry
30
+ if time.time() > expires_at:
31
+ del self._cache[key]
32
+ return NO_VALUE
33
+
34
+ return pickle.loads(pickled_value)
35
+
36
+ def get_with_metadata(self, key: str) -> tuple[Any, float | None]:
37
+ """Get value and creation timestamp. Returns (NO_VALUE, None) if not found.
38
+ """
39
+ with self._lock:
40
+ entry = self._cache.get(key)
41
+ if entry is None:
42
+ return NO_VALUE, None
43
+
44
+ pickled_value, created_at, expires_at = entry
45
+ if time.time() > expires_at:
46
+ del self._cache[key]
47
+ return NO_VALUE, None
48
+
49
+ return pickle.loads(pickled_value), created_at
50
+
51
+ def set(self, key: str, value: Any, ttl: int) -> None:
52
+ """Set value with TTL in seconds.
53
+ """
54
+ now = time.time()
55
+ pickled_value = pickle.dumps(value)
56
+ with self._lock:
57
+ self._cache[key] = (pickled_value, now, now + ttl)
58
+
59
+ def delete(self, key: str) -> None:
60
+ """Delete value by key.
61
+ """
62
+ with self._lock:
63
+ self._cache.pop(key, None)
64
+
65
+ def clear(self, pattern: str | None = None) -> int:
66
+ """Clear entries matching pattern. Returns count of cleared entries.
67
+ """
68
+ with self._lock:
69
+ if pattern is None:
70
+ count = len(self._cache)
71
+ self._cache.clear()
72
+ return count
73
+
74
+ keys_to_delete = [k for k in self._cache if fnmatch.fnmatch(k, pattern)]
75
+ for key in keys_to_delete:
76
+ del self._cache[key]
77
+ return len(keys_to_delete)
78
+
79
+ def keys(self, pattern: str | None = None) -> Iterator[str]:
80
+ """Iterate over keys matching pattern.
81
+ """
82
+ now = time.time()
83
+ with self._lock:
84
+ all_keys = list(self._cache.keys())
85
+
86
+ for key in all_keys:
87
+ with self._lock:
88
+ entry = self._cache.get(key)
89
+ if entry is None:
90
+ continue
91
+ _, _, expires_at = entry
92
+ if now > expires_at:
93
+ del self._cache[key]
94
+ continue
95
+
96
+ if pattern is None or fnmatch.fnmatch(key, pattern):
97
+ yield key
98
+
99
+ def count(self, pattern: str | None = None) -> int:
100
+ """Count keys matching pattern.
101
+ """
102
+ return sum(1 for _ in self.keys(pattern))
@@ -0,0 +1,131 @@
1
+ """Redis cache backend implementation.
2
+ """
3
+ import pickle
4
+ import struct
5
+ import time
6
+ from collections.abc import Iterator
7
+ from typing import TYPE_CHECKING, Any
8
+
9
+ from . import NO_VALUE, Backend
10
+
11
+ if TYPE_CHECKING:
12
+ import redis
13
+
14
+
15
+ _METADATA_FORMAT = 'd'
16
+ _METADATA_SIZE = struct.calcsize(_METADATA_FORMAT)
17
+
18
+
19
+ def _get_redis_module() -> Any:
20
+ """Import redis module, raising helpful error if not installed.
21
+ """
22
+ try:
23
+ import redis
24
+ return redis
25
+ except ImportError as e:
26
+ raise RuntimeError(
27
+ "Redis support requires the 'redis' package. Install with: pip install cache[redis]"
28
+ ) from e
29
+
30
+
31
+ def get_redis_client(url: str) -> 'redis.Redis':
32
+ """Create a Redis client from URL.
33
+
34
+ Args:
35
+ url: Redis URL (e.g., 'redis://localhost:6379/0')
36
+ """
37
+ redis_module = _get_redis_module()
38
+ return redis_module.from_url(url)
39
+
40
+
41
+ class RedisBackend(Backend):
42
+ """Redis cache backend.
43
+ """
44
+
45
+ def __init__(self, url: str, distributed_lock: bool = False) -> None:
46
+ self._url = url
47
+ self._distributed_lock = distributed_lock
48
+ self._client: redis.Redis | None = None
49
+
50
+ @property
51
+ def client(self) -> 'redis.Redis':
52
+ """Lazy-load Redis client.
53
+ """
54
+ if self._client is None:
55
+ self._client = get_redis_client(self._url)
56
+ return self._client
57
+
58
+ def _pack_value(self, value: Any, created_at: float) -> bytes:
59
+ """Pack value with creation timestamp.
60
+ """
61
+ metadata = struct.pack(_METADATA_FORMAT, created_at)
62
+ pickled = pickle.dumps(value)
63
+ return metadata + pickled
64
+
65
+ def _unpack_value(self, data: bytes) -> tuple[Any, float]:
66
+ """Unpack value and creation timestamp.
67
+ """
68
+ created_at = struct.unpack(_METADATA_FORMAT, data[:_METADATA_SIZE])[0]
69
+ value = pickle.loads(data[_METADATA_SIZE:])
70
+ return value, created_at
71
+
72
+ def get(self, key: str) -> Any:
73
+ """Get value by key. Returns NO_VALUE if not found.
74
+ """
75
+ data = self.client.get(key)
76
+ if data is None:
77
+ return NO_VALUE
78
+ value, _ = self._unpack_value(data)
79
+ return value
80
+
81
+ def get_with_metadata(self, key: str) -> tuple[Any, float | None]:
82
+ """Get value and creation timestamp. Returns (NO_VALUE, None) if not found.
83
+ """
84
+ data = self.client.get(key)
85
+ if data is None:
86
+ return NO_VALUE, None
87
+ value, created_at = self._unpack_value(data)
88
+ return value, created_at
89
+
90
+ def set(self, key: str, value: Any, ttl: int) -> None:
91
+ """Set value with TTL in seconds.
92
+ """
93
+ now = time.time()
94
+ packed = self._pack_value(value, now)
95
+ self.client.setex(key, ttl, packed)
96
+
97
+ def delete(self, key: str) -> None:
98
+ """Delete value by key.
99
+ """
100
+ self.client.delete(key)
101
+
102
+ def clear(self, pattern: str | None = None) -> int:
103
+ """Clear entries matching pattern. Returns count of cleared entries.
104
+ """
105
+ if pattern is None:
106
+ pattern = '*'
107
+
108
+ count = 0
109
+ for key in self.client.scan_iter(match=pattern):
110
+ self.client.delete(key)
111
+ count += 1
112
+ return count
113
+
114
+ def keys(self, pattern: str | None = None) -> Iterator[str]:
115
+ """Iterate over keys matching pattern.
116
+ """
117
+ redis_pattern = pattern or '*'
118
+ for key in self.client.scan_iter(match=redis_pattern):
119
+ yield key.decode() if isinstance(key, bytes) else key
120
+
121
+ def count(self, pattern: str | None = None) -> int:
122
+ """Count keys matching pattern.
123
+ """
124
+ return sum(1 for _ in self.keys(pattern))
125
+
126
+ def close(self) -> None:
127
+ """Close the Redis connection.
128
+ """
129
+ if self._client is not None:
130
+ self._client.close()
131
+ self._client = None