cachify 0.1.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cachify-0.1.0/LICENSE +21 -0
- cachify-0.1.0/PKG-INFO +171 -0
- cachify-0.1.0/README.md +145 -0
- cachify-0.1.0/cachify/__init__.py +22 -0
- cachify-0.1.0/cachify/cache.py +116 -0
- cachify-0.1.0/cachify/config/__init__.py +4 -0
- cachify-0.1.0/cachify/features/__init__.py +0 -0
- cachify-0.1.0/cachify/features/never_die.py +219 -0
- cachify-0.1.0/cachify/memory_cache.py +37 -0
- cachify-0.1.0/cachify/redis/__init__.py +19 -0
- cachify-0.1.0/cachify/redis/config.py +115 -0
- cachify-0.1.0/cachify/redis/lock.py +232 -0
- cachify-0.1.0/cachify/redis_cache.py +27 -0
- cachify-0.1.0/cachify/storage/__init__.py +9 -0
- cachify-0.1.0/cachify/storage/memory_storage.py +52 -0
- cachify-0.1.0/cachify/storage/redis_storage.py +138 -0
- cachify-0.1.0/cachify/types/__init__.py +95 -0
- cachify-0.1.0/cachify/utils/__init__.py +0 -0
- cachify-0.1.0/cachify/utils/arguments.py +65 -0
- cachify-0.1.0/cachify/utils/decorator_factory.py +44 -0
- cachify-0.1.0/cachify/utils/functions.py +10 -0
- cachify-0.1.0/cachify/utils/locks.py +6 -0
- cachify-0.1.0/pyproject.toml +48 -0
cachify-0.1.0/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2025 Pulsar Finance
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
cachify-0.1.0/PKG-INFO
ADDED
|
@@ -0,0 +1,171 @@
|
|
|
1
|
+
Metadata-Version: 2.1
|
|
2
|
+
Name: cachify
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: A simple cache library with sync/async support, Memory and Redis backend
|
|
5
|
+
Home-page: https://github.com/PulsarDataSolutions/cachify
|
|
6
|
+
License: MIT
|
|
7
|
+
Keywords: cachify,cache,caching,redis,async,decorator,memoization
|
|
8
|
+
Author: dynalz
|
|
9
|
+
Author-email: git@pulsar.finance
|
|
10
|
+
Requires-Python: >=3.10,<3.15
|
|
11
|
+
Classifier: Development Status :: 4 - Beta
|
|
12
|
+
Classifier: Intended Audience :: Developers
|
|
13
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
14
|
+
Classifier: Programming Language :: Python :: 3
|
|
15
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
16
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
17
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
18
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
19
|
+
Classifier: Programming Language :: Python :: 3.14
|
|
20
|
+
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
21
|
+
Classifier: Typing :: Typed
|
|
22
|
+
Requires-Dist: redis[hiredis] (>5.0.0)
|
|
23
|
+
Project-URL: Repository, https://github.com/PulsarDataSolutions/cachify
|
|
24
|
+
Description-Content-Type: text/markdown
|
|
25
|
+
|
|
26
|
+
# Python Cachify Library
|
|
27
|
+
|
|
28
|
+
A simple and robust caching library for Python functions, supporting both synchronous and asynchronous code.
|
|
29
|
+
|
|
30
|
+
## Features
|
|
31
|
+
|
|
32
|
+
- Cache function results based on function ID and arguments
|
|
33
|
+
- Supports both synchronous and asynchronous functions
|
|
34
|
+
- Thread-safe locking to prevent duplicate cached function calls
|
|
35
|
+
- Configurable Time-To-Live (TTL) for cached items
|
|
36
|
+
- "Never Die" mode for functions that should keep cache refreshed automatically
|
|
37
|
+
- Skip cache functionality to force fresh function execution while updating cache
|
|
38
|
+
- Redis cache for distributed caching across multiple processes/machines
|
|
39
|
+
|
|
40
|
+
## Installation
|
|
41
|
+
|
|
42
|
+
```bash
|
|
43
|
+
# Clone the repository
|
|
44
|
+
git clone https://github.com/PulsarDefi/cachify.git
|
|
45
|
+
cd cachify
|
|
46
|
+
|
|
47
|
+
# Install the package
|
|
48
|
+
poetry install
|
|
49
|
+
```
|
|
50
|
+
|
|
51
|
+
## Usage
|
|
52
|
+
|
|
53
|
+
### Basic Usage
|
|
54
|
+
|
|
55
|
+
```python
|
|
56
|
+
from cachify import cache
|
|
57
|
+
|
|
58
|
+
# Cache function in sync functions
|
|
59
|
+
@cache(ttl=60) # ttl in seconds
|
|
60
|
+
def expensive_calculation(a, b):
|
|
61
|
+
# Some expensive operation
|
|
62
|
+
return a + b
|
|
63
|
+
|
|
64
|
+
# And async functions
|
|
65
|
+
@cache(ttl=3600) # ttl in seconds
|
|
66
|
+
async def another_calculation(url):
|
|
67
|
+
# Some expensive IO call
|
|
68
|
+
return await httpx.get(url).json()
|
|
69
|
+
```
|
|
70
|
+
|
|
71
|
+
### Redis Cache
|
|
72
|
+
|
|
73
|
+
For distributed caching across multiple processes or machines, use `rcache`:
|
|
74
|
+
|
|
75
|
+
```python
|
|
76
|
+
import redis
|
|
77
|
+
from cachify import setup_redis_config, rcache
|
|
78
|
+
|
|
79
|
+
# Configure Redis (call once at startup)
|
|
80
|
+
setup_redis_config(
|
|
81
|
+
sync_client=redis.from_url("redis://localhost:6379/0"),
|
|
82
|
+
key_prefix="myapp", # default: "key_prefix", prefix searchable on redis "PREFIX:*"
|
|
83
|
+
lock_timeout=10, # default: 10, maximum lock lifetime in seconds
|
|
84
|
+
on_error="silent", # "silent" (default) or "raise" in case of redis errors
|
|
85
|
+
)
|
|
86
|
+
|
|
87
|
+
@rcache(ttl=300)
|
|
88
|
+
def get_user(user_id: int) -> dict:
|
|
89
|
+
return fetch_from_database(user_id)
|
|
90
|
+
|
|
91
|
+
# Async version
|
|
92
|
+
import redis.asyncio as aredis
|
|
93
|
+
|
|
94
|
+
setup_redis_config(async_client=aredis.from_url("redis://localhost:6379/0"))
|
|
95
|
+
|
|
96
|
+
@rcache(ttl=300)
|
|
97
|
+
def get_user_async(user_id: int) -> dict:
|
|
98
|
+
return await fetch_from_database(user_id)
|
|
99
|
+
```
|
|
100
|
+
|
|
101
|
+
### Never Die Cache
|
|
102
|
+
|
|
103
|
+
The `never_die` feature ensures that cached values never expire by automatically refreshing them in the background:
|
|
104
|
+
|
|
105
|
+
```python
|
|
106
|
+
# Cache with never_die (automatic refresh)
|
|
107
|
+
@cache(ttl=300, never_die=True)
|
|
108
|
+
def critical_operation(data_id: str):
|
|
109
|
+
# Expensive operation that should always be available from cache
|
|
110
|
+
return fetch_data_from_database(data_id)
|
|
111
|
+
```
|
|
112
|
+
|
|
113
|
+
**How Never Die Works:**
|
|
114
|
+
|
|
115
|
+
1. When a function with `never_die=True` is first called, the result is cached
|
|
116
|
+
2. A background thread monitors all `never_die` functions
|
|
117
|
+
3. On cache expiration (TTL), the function is automatically called again
|
|
118
|
+
4. The cache is updated with the new result
|
|
119
|
+
5. If the refresh operation fails, the existing cached value is preserved
|
|
120
|
+
6. Clients always get fast response times by reading from cache
|
|
121
|
+
|
|
122
|
+
**Benefits:**
|
|
123
|
+
|
|
124
|
+
- Cache is always "warm" and ready to serve
|
|
125
|
+
- No user request ever has to wait for the expensive operation
|
|
126
|
+
- If a dependency service from the cached function goes down temporarily, the last successful result is still available
|
|
127
|
+
- Perfect for critical operations where latency must be minimized
|
|
128
|
+
|
|
129
|
+
### Skip Cache
|
|
130
|
+
|
|
131
|
+
The `skip_cache` feature allows you to bypass reading from cache while still updating it with fresh results:
|
|
132
|
+
|
|
133
|
+
```python
|
|
134
|
+
@cache(ttl=300)
|
|
135
|
+
def get_user_data(user_id):
|
|
136
|
+
# Expensive operation to fetch user data
|
|
137
|
+
return fetch_from_database(user_id)
|
|
138
|
+
|
|
139
|
+
# Normal call - uses cache if available
|
|
140
|
+
user = get_user_data(123)
|
|
141
|
+
# Force fresh execution while updating cache
|
|
142
|
+
fresh_user = get_user_data(123, skip_cache=True)
|
|
143
|
+
# Next normal call will get the updated cached value
|
|
144
|
+
updated_user = get_user_data(123)
|
|
145
|
+
```
|
|
146
|
+
|
|
147
|
+
**How Skip Cache Works:**
|
|
148
|
+
|
|
149
|
+
1. When `skip_cache=True` is passed, the function bypasses reading from cache
|
|
150
|
+
2. The function executes normally and returns fresh results
|
|
151
|
+
3. The fresh result is stored in the cache, updating any existing cached value
|
|
152
|
+
4. Subsequent calls without `skip_cache=True` will use the updated cached value
|
|
153
|
+
5. The TTL timer resets from when the cache last was updated
|
|
154
|
+
|
|
155
|
+
**Benefits:**
|
|
156
|
+
|
|
157
|
+
- Force refresh of potentially stale data while keeping cache warm
|
|
158
|
+
- Ensuring fresh data for critical operations while maintaining cache for other calls
|
|
159
|
+
|
|
160
|
+
## Testing
|
|
161
|
+
|
|
162
|
+
Run the test scripts
|
|
163
|
+
|
|
164
|
+
```bash
|
|
165
|
+
poetry run python -m pytest
|
|
166
|
+
```
|
|
167
|
+
|
|
168
|
+
## License
|
|
169
|
+
|
|
170
|
+
MIT
|
|
171
|
+
|
cachify-0.1.0/README.md
ADDED
|
@@ -0,0 +1,145 @@
|
|
|
1
|
+
# Python Cachify Library
|
|
2
|
+
|
|
3
|
+
A simple and robust caching library for Python functions, supporting both synchronous and asynchronous code.
|
|
4
|
+
|
|
5
|
+
## Features
|
|
6
|
+
|
|
7
|
+
- Cache function results based on function ID and arguments
|
|
8
|
+
- Supports both synchronous and asynchronous functions
|
|
9
|
+
- Thread-safe locking to prevent duplicate cached function calls
|
|
10
|
+
- Configurable Time-To-Live (TTL) for cached items
|
|
11
|
+
- "Never Die" mode for functions that should keep cache refreshed automatically
|
|
12
|
+
- Skip cache functionality to force fresh function execution while updating cache
|
|
13
|
+
- Redis cache for distributed caching across multiple processes/machines
|
|
14
|
+
|
|
15
|
+
## Installation
|
|
16
|
+
|
|
17
|
+
```bash
|
|
18
|
+
# Clone the repository
|
|
19
|
+
git clone https://github.com/PulsarDefi/cachify.git
|
|
20
|
+
cd cachify
|
|
21
|
+
|
|
22
|
+
# Install the package
|
|
23
|
+
poetry install
|
|
24
|
+
```
|
|
25
|
+
|
|
26
|
+
## Usage
|
|
27
|
+
|
|
28
|
+
### Basic Usage
|
|
29
|
+
|
|
30
|
+
```python
|
|
31
|
+
from cachify import cache
|
|
32
|
+
|
|
33
|
+
# Cache function in sync functions
|
|
34
|
+
@cache(ttl=60) # ttl in seconds
|
|
35
|
+
def expensive_calculation(a, b):
|
|
36
|
+
# Some expensive operation
|
|
37
|
+
return a + b
|
|
38
|
+
|
|
39
|
+
# And async functions
|
|
40
|
+
@cache(ttl=3600) # ttl in seconds
|
|
41
|
+
async def another_calculation(url):
|
|
42
|
+
# Some expensive IO call
|
|
43
|
+
return await httpx.get(url).json()
|
|
44
|
+
```
|
|
45
|
+
|
|
46
|
+
### Redis Cache
|
|
47
|
+
|
|
48
|
+
For distributed caching across multiple processes or machines, use `rcache`:
|
|
49
|
+
|
|
50
|
+
```python
|
|
51
|
+
import redis
|
|
52
|
+
from cachify import setup_redis_config, rcache
|
|
53
|
+
|
|
54
|
+
# Configure Redis (call once at startup)
|
|
55
|
+
setup_redis_config(
|
|
56
|
+
sync_client=redis.from_url("redis://localhost:6379/0"),
|
|
57
|
+
key_prefix="myapp", # default: "key_prefix", prefix searchable on redis "PREFIX:*"
|
|
58
|
+
lock_timeout=10, # default: 10, maximum lock lifetime in seconds
|
|
59
|
+
on_error="silent", # "silent" (default) or "raise" in case of redis errors
|
|
60
|
+
)
|
|
61
|
+
|
|
62
|
+
@rcache(ttl=300)
|
|
63
|
+
def get_user(user_id: int) -> dict:
|
|
64
|
+
return fetch_from_database(user_id)
|
|
65
|
+
|
|
66
|
+
# Async version
|
|
67
|
+
import redis.asyncio as aredis
|
|
68
|
+
|
|
69
|
+
setup_redis_config(async_client=aredis.from_url("redis://localhost:6379/0"))
|
|
70
|
+
|
|
71
|
+
@rcache(ttl=300)
|
|
72
|
+
def get_user_async(user_id: int) -> dict:
|
|
73
|
+
return await fetch_from_database(user_id)
|
|
74
|
+
```
|
|
75
|
+
|
|
76
|
+
### Never Die Cache
|
|
77
|
+
|
|
78
|
+
The `never_die` feature ensures that cached values never expire by automatically refreshing them in the background:
|
|
79
|
+
|
|
80
|
+
```python
|
|
81
|
+
# Cache with never_die (automatic refresh)
|
|
82
|
+
@cache(ttl=300, never_die=True)
|
|
83
|
+
def critical_operation(data_id: str):
|
|
84
|
+
# Expensive operation that should always be available from cache
|
|
85
|
+
return fetch_data_from_database(data_id)
|
|
86
|
+
```
|
|
87
|
+
|
|
88
|
+
**How Never Die Works:**
|
|
89
|
+
|
|
90
|
+
1. When a function with `never_die=True` is first called, the result is cached
|
|
91
|
+
2. A background thread monitors all `never_die` functions
|
|
92
|
+
3. On cache expiration (TTL), the function is automatically called again
|
|
93
|
+
4. The cache is updated with the new result
|
|
94
|
+
5. If the refresh operation fails, the existing cached value is preserved
|
|
95
|
+
6. Clients always get fast response times by reading from cache
|
|
96
|
+
|
|
97
|
+
**Benefits:**
|
|
98
|
+
|
|
99
|
+
- Cache is always "warm" and ready to serve
|
|
100
|
+
- No user request ever has to wait for the expensive operation
|
|
101
|
+
- If a dependency service from the cached function goes down temporarily, the last successful result is still available
|
|
102
|
+
- Perfect for critical operations where latency must be minimized
|
|
103
|
+
|
|
104
|
+
### Skip Cache
|
|
105
|
+
|
|
106
|
+
The `skip_cache` feature allows you to bypass reading from cache while still updating it with fresh results:
|
|
107
|
+
|
|
108
|
+
```python
|
|
109
|
+
@cache(ttl=300)
|
|
110
|
+
def get_user_data(user_id):
|
|
111
|
+
# Expensive operation to fetch user data
|
|
112
|
+
return fetch_from_database(user_id)
|
|
113
|
+
|
|
114
|
+
# Normal call - uses cache if available
|
|
115
|
+
user = get_user_data(123)
|
|
116
|
+
# Force fresh execution while updating cache
|
|
117
|
+
fresh_user = get_user_data(123, skip_cache=True)
|
|
118
|
+
# Next normal call will get the updated cached value
|
|
119
|
+
updated_user = get_user_data(123)
|
|
120
|
+
```
|
|
121
|
+
|
|
122
|
+
**How Skip Cache Works:**
|
|
123
|
+
|
|
124
|
+
1. When `skip_cache=True` is passed, the function bypasses reading from cache
|
|
125
|
+
2. The function executes normally and returns fresh results
|
|
126
|
+
3. The fresh result is stored in the cache, updating any existing cached value
|
|
127
|
+
4. Subsequent calls without `skip_cache=True` will use the updated cached value
|
|
128
|
+
5. The TTL timer resets from when the cache last was updated
|
|
129
|
+
|
|
130
|
+
**Benefits:**
|
|
131
|
+
|
|
132
|
+
- Force refresh of potentially stale data while keeping cache warm
|
|
133
|
+
- Ensuring fresh data for critical operations while maintaining cache for other calls
|
|
134
|
+
|
|
135
|
+
## Testing
|
|
136
|
+
|
|
137
|
+
Run the test scripts
|
|
138
|
+
|
|
139
|
+
```bash
|
|
140
|
+
poetry run python -m pytest
|
|
141
|
+
```
|
|
142
|
+
|
|
143
|
+
## License
|
|
144
|
+
|
|
145
|
+
MIT
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
from .features.never_die import clear_never_die_registry
|
|
2
|
+
from .memory_cache import cache
|
|
3
|
+
from .redis import DEFAULT_KEY_PREFIX, get_redis_config, reset_redis_config, setup_redis_config
|
|
4
|
+
from .redis_cache import redis_cache
|
|
5
|
+
from .types import CacheKwargs
|
|
6
|
+
|
|
7
|
+
__version__ = "0.1.0"
|
|
8
|
+
|
|
9
|
+
rcache = redis_cache
|
|
10
|
+
|
|
11
|
+
__all__ = [
|
|
12
|
+
"__version__",
|
|
13
|
+
"cache",
|
|
14
|
+
"rcache",
|
|
15
|
+
"redis_cache",
|
|
16
|
+
"setup_redis_config",
|
|
17
|
+
"get_redis_config",
|
|
18
|
+
"reset_redis_config",
|
|
19
|
+
"DEFAULT_KEY_PREFIX",
|
|
20
|
+
"CacheKwargs",
|
|
21
|
+
"clear_never_die_registry",
|
|
22
|
+
]
|
|
@@ -0,0 +1,116 @@
|
|
|
1
|
+
import functools
|
|
2
|
+
import inspect
|
|
3
|
+
from typing import Any, Callable, cast
|
|
4
|
+
|
|
5
|
+
from cachify.features.never_die import register_never_die_function
|
|
6
|
+
from cachify.types import CacheConfig, CacheKeyFunction, F, Number
|
|
7
|
+
from cachify.utils.arguments import create_cache_key
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def _async_decorator(
|
|
11
|
+
function: F,
|
|
12
|
+
ttl: Number,
|
|
13
|
+
never_die: bool,
|
|
14
|
+
cache_key_func: CacheKeyFunction | None,
|
|
15
|
+
ignore_fields: tuple[str, ...],
|
|
16
|
+
config: CacheConfig,
|
|
17
|
+
) -> F:
|
|
18
|
+
@functools.wraps(function)
|
|
19
|
+
async def async_wrapper(*args: Any, **kwargs: Any) -> Any:
|
|
20
|
+
skip_cache = kwargs.pop("skip_cache", False)
|
|
21
|
+
cache_key = create_cache_key(function, cache_key_func, ignore_fields, args, kwargs)
|
|
22
|
+
|
|
23
|
+
if cache_entry := await config.storage.aget(cache_key, skip_cache):
|
|
24
|
+
return cache_entry.result
|
|
25
|
+
|
|
26
|
+
async with config.async_lock(cache_key):
|
|
27
|
+
if cache_entry := await config.storage.aget(cache_key, skip_cache):
|
|
28
|
+
return cache_entry.result
|
|
29
|
+
|
|
30
|
+
result = await function(*args, **kwargs)
|
|
31
|
+
await config.storage.aset(cache_key, result, None if never_die else ttl)
|
|
32
|
+
|
|
33
|
+
if never_die:
|
|
34
|
+
register_never_die_function(function, ttl, args, kwargs, cache_key_func, ignore_fields, config)
|
|
35
|
+
|
|
36
|
+
return result
|
|
37
|
+
|
|
38
|
+
return cast(F, async_wrapper)
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def _sync_decorator(
|
|
42
|
+
function: F,
|
|
43
|
+
ttl: Number,
|
|
44
|
+
never_die: bool,
|
|
45
|
+
cache_key_func: CacheKeyFunction | None,
|
|
46
|
+
ignore_fields: tuple[str, ...],
|
|
47
|
+
config: CacheConfig,
|
|
48
|
+
) -> F:
|
|
49
|
+
@functools.wraps(function)
|
|
50
|
+
def sync_wrapper(*args: Any, **kwargs: Any) -> Any:
|
|
51
|
+
skip_cache = kwargs.pop("skip_cache", False)
|
|
52
|
+
cache_key = create_cache_key(function, cache_key_func, ignore_fields, args, kwargs)
|
|
53
|
+
|
|
54
|
+
if cache_entry := config.storage.get(cache_key, skip_cache):
|
|
55
|
+
return cache_entry.result
|
|
56
|
+
|
|
57
|
+
with config.sync_lock(cache_key):
|
|
58
|
+
if cache_entry := config.storage.get(cache_key, skip_cache):
|
|
59
|
+
return cache_entry.result
|
|
60
|
+
|
|
61
|
+
result = function(*args, **kwargs)
|
|
62
|
+
config.storage.set(cache_key, result, None if never_die else ttl)
|
|
63
|
+
|
|
64
|
+
if never_die:
|
|
65
|
+
register_never_die_function(function, ttl, args, kwargs, cache_key_func, ignore_fields, config)
|
|
66
|
+
|
|
67
|
+
return result
|
|
68
|
+
|
|
69
|
+
return cast(F, sync_wrapper)
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def base_cache(
|
|
73
|
+
ttl: Number,
|
|
74
|
+
never_die: bool,
|
|
75
|
+
cache_key_func: CacheKeyFunction | None,
|
|
76
|
+
ignore_fields: tuple[str, ...],
|
|
77
|
+
config: CacheConfig,
|
|
78
|
+
) -> Callable[[F], F]:
|
|
79
|
+
"""
|
|
80
|
+
Base cache decorator factory used by both memory and Redis cache implementations.
|
|
81
|
+
|
|
82
|
+
Args:
|
|
83
|
+
ttl: Time to live for cached items in seconds
|
|
84
|
+
never_die: If True, the cache will never expire and will be recalculated based on the ttl
|
|
85
|
+
cache_key_func: Custom cache key function, used for more complex cache scenarios
|
|
86
|
+
ignore_fields: Tuple of strings with the function params to ignore when creating the cache key
|
|
87
|
+
config: Cache configuration specifying storage, locks, and never_die registration
|
|
88
|
+
|
|
89
|
+
Features:
|
|
90
|
+
- Works for both sync and async functions
|
|
91
|
+
- Only allows one execution at a time per function+args
|
|
92
|
+
- Makes subsequent calls wait for the first call to complete
|
|
93
|
+
"""
|
|
94
|
+
if cache_key_func and ignore_fields:
|
|
95
|
+
raise ValueError("Either cache_key_func or ignore_fields can be provided, but not both")
|
|
96
|
+
|
|
97
|
+
def decorator(function: F) -> F:
|
|
98
|
+
if inspect.iscoroutinefunction(function):
|
|
99
|
+
return _async_decorator(
|
|
100
|
+
function=function,
|
|
101
|
+
ttl=ttl,
|
|
102
|
+
never_die=never_die,
|
|
103
|
+
cache_key_func=cache_key_func,
|
|
104
|
+
ignore_fields=ignore_fields,
|
|
105
|
+
config=config,
|
|
106
|
+
)
|
|
107
|
+
return _sync_decorator(
|
|
108
|
+
function=function,
|
|
109
|
+
ttl=ttl,
|
|
110
|
+
never_die=never_die,
|
|
111
|
+
cache_key_func=cache_key_func,
|
|
112
|
+
ignore_fields=ignore_fields,
|
|
113
|
+
config=config,
|
|
114
|
+
)
|
|
115
|
+
|
|
116
|
+
return decorator
|
|
File without changes
|
|
@@ -0,0 +1,219 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import functools
|
|
3
|
+
import inspect
|
|
4
|
+
import threading
|
|
5
|
+
import time
|
|
6
|
+
from asyncio import AbstractEventLoop
|
|
7
|
+
from concurrent.futures import Future as ConcurrentFuture
|
|
8
|
+
from dataclasses import dataclass
|
|
9
|
+
from typing import Any, Callable
|
|
10
|
+
|
|
11
|
+
from cachify.config import logger
|
|
12
|
+
from cachify.types import CacheConfig, CacheKeyFunction, Number
|
|
13
|
+
from cachify.utils.arguments import create_cache_key
|
|
14
|
+
|
|
15
|
+
_NEVER_DIE_THREAD: threading.Thread | None = None
|
|
16
|
+
_NEVER_DIE_LOCK: threading.Lock = threading.Lock()
|
|
17
|
+
_NEVER_DIE_REGISTRY: list["NeverDieCacheEntry"] = []
|
|
18
|
+
_NEVER_DIE_CACHE_THREADS: dict[str, threading.Thread] = {}
|
|
19
|
+
_NEVER_DIE_CACHE_FUTURES: dict[str, ConcurrentFuture] = {}
|
|
20
|
+
|
|
21
|
+
_MAX_BACKOFF: int = 10
|
|
22
|
+
_BACKOFF_MULTIPLIER: float = 1.25
|
|
23
|
+
_REFRESH_INTERVAL_SECONDS: float = 0.1
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
@dataclass
|
|
27
|
+
class NeverDieCacheEntry:
|
|
28
|
+
function: Callable[..., Any]
|
|
29
|
+
ttl: Number
|
|
30
|
+
args: tuple
|
|
31
|
+
kwargs: dict
|
|
32
|
+
cache_key_func: CacheKeyFunction | None
|
|
33
|
+
ignore_fields: tuple[str, ...]
|
|
34
|
+
loop: AbstractEventLoop | None
|
|
35
|
+
config: CacheConfig
|
|
36
|
+
|
|
37
|
+
def __post_init__(self):
|
|
38
|
+
self._backoff: float = 1
|
|
39
|
+
self._expires_at: float = time.monotonic() + self.ttl
|
|
40
|
+
|
|
41
|
+
@functools.cached_property
|
|
42
|
+
def cache_key(self) -> str:
|
|
43
|
+
return create_cache_key(
|
|
44
|
+
self.function,
|
|
45
|
+
self.cache_key_func,
|
|
46
|
+
self.ignore_fields,
|
|
47
|
+
self.args,
|
|
48
|
+
self.kwargs,
|
|
49
|
+
)
|
|
50
|
+
|
|
51
|
+
def __eq__(self, other: Any) -> bool:
|
|
52
|
+
if not isinstance(other, NeverDieCacheEntry):
|
|
53
|
+
return False
|
|
54
|
+
return self.cache_key == other.cache_key
|
|
55
|
+
|
|
56
|
+
def __hash__(self) -> int:
|
|
57
|
+
return hash(self.cache_key)
|
|
58
|
+
|
|
59
|
+
def is_expired(self) -> bool:
|
|
60
|
+
return time.monotonic() > self._expires_at
|
|
61
|
+
|
|
62
|
+
def reset(self):
|
|
63
|
+
self._backoff = 1
|
|
64
|
+
self._expires_at = time.monotonic() + self.ttl
|
|
65
|
+
|
|
66
|
+
def revive(self):
|
|
67
|
+
self._backoff = min(self._backoff * _BACKOFF_MULTIPLIER, _MAX_BACKOFF)
|
|
68
|
+
self._expires_at = time.monotonic() + self.ttl * self._backoff
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
def _run_sync_function_and_cache(entry: NeverDieCacheEntry):
|
|
72
|
+
"""Run a function and cache its result"""
|
|
73
|
+
try:
|
|
74
|
+
with entry.config.sync_lock(entry.cache_key):
|
|
75
|
+
result = entry.function(*entry.args, **entry.kwargs)
|
|
76
|
+
entry.config.storage.set(entry.cache_key, result, None)
|
|
77
|
+
entry.reset()
|
|
78
|
+
except BaseException:
|
|
79
|
+
entry.revive()
|
|
80
|
+
logger.debug(
|
|
81
|
+
"Exception caching function with never_die",
|
|
82
|
+
extra={"function": entry.function.__qualname__},
|
|
83
|
+
exc_info=True,
|
|
84
|
+
)
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
async def _run_async_function_and_cache(entry: NeverDieCacheEntry):
|
|
88
|
+
"""Run a function and cache its result"""
|
|
89
|
+
try:
|
|
90
|
+
async with entry.config.async_lock(entry.cache_key):
|
|
91
|
+
result = await entry.function(*entry.args, **entry.kwargs)
|
|
92
|
+
await entry.config.storage.aset(entry.cache_key, result, None)
|
|
93
|
+
entry.reset()
|
|
94
|
+
except BaseException:
|
|
95
|
+
entry.revive()
|
|
96
|
+
logger.debug(
|
|
97
|
+
"Exception caching function with never_die",
|
|
98
|
+
extra={"function": entry.function.__qualname__},
|
|
99
|
+
exc_info=True,
|
|
100
|
+
)
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
def _cache_is_being_set(entry: NeverDieCacheEntry) -> bool:
|
|
104
|
+
if entry.loop:
|
|
105
|
+
return entry.cache_key in _NEVER_DIE_CACHE_FUTURES and not _NEVER_DIE_CACHE_FUTURES[entry.cache_key].done()
|
|
106
|
+
return entry.cache_key in _NEVER_DIE_CACHE_THREADS and _NEVER_DIE_CACHE_THREADS[entry.cache_key].is_alive()
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
def _clear_dead_futures():
|
|
110
|
+
"""Clear dead futures from the cache future registry"""
|
|
111
|
+
for cache_key, thread in list(_NEVER_DIE_CACHE_FUTURES.items()):
|
|
112
|
+
if thread.done():
|
|
113
|
+
del _NEVER_DIE_CACHE_FUTURES[cache_key]
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
def _clear_dead_threads():
|
|
117
|
+
"""Clear dead threads from the cache thread registry"""
|
|
118
|
+
for cache_key, thread in list(_NEVER_DIE_CACHE_THREADS.items()):
|
|
119
|
+
if thread.is_alive():
|
|
120
|
+
continue
|
|
121
|
+
del _NEVER_DIE_CACHE_THREADS[cache_key]
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
def _refresh_never_die_caches():
|
|
125
|
+
"""Background thread function that periodically refreshes never_die cache entries"""
|
|
126
|
+
while True:
|
|
127
|
+
try:
|
|
128
|
+
for entry in list(_NEVER_DIE_REGISTRY):
|
|
129
|
+
if not entry.is_expired():
|
|
130
|
+
continue
|
|
131
|
+
|
|
132
|
+
if _cache_is_being_set(entry):
|
|
133
|
+
continue
|
|
134
|
+
|
|
135
|
+
if not entry.loop: # sync
|
|
136
|
+
thread = threading.Thread(target=_run_sync_function_and_cache, args=(entry,), daemon=True)
|
|
137
|
+
thread.start()
|
|
138
|
+
_NEVER_DIE_CACHE_THREADS[entry.cache_key] = thread
|
|
139
|
+
continue
|
|
140
|
+
|
|
141
|
+
if entry.loop.is_closed():
|
|
142
|
+
logger.debug(
|
|
143
|
+
f"Loop is closed, skipping future creation",
|
|
144
|
+
extra={"function": entry.function.__qualname__},
|
|
145
|
+
exc_info=True,
|
|
146
|
+
)
|
|
147
|
+
continue
|
|
148
|
+
|
|
149
|
+
try:
|
|
150
|
+
coroutine = _run_async_function_and_cache(entry)
|
|
151
|
+
future = asyncio.run_coroutine_threadsafe(coroutine, entry.loop)
|
|
152
|
+
except RuntimeError:
|
|
153
|
+
coroutine.close()
|
|
154
|
+
logger.debug(
|
|
155
|
+
f"Loop is closed, skipping future creation",
|
|
156
|
+
extra={"function": entry.function.__qualname__},
|
|
157
|
+
exc_info=True,
|
|
158
|
+
)
|
|
159
|
+
continue
|
|
160
|
+
|
|
161
|
+
_NEVER_DIE_CACHE_FUTURES[entry.cache_key] = future
|
|
162
|
+
finally:
|
|
163
|
+
time.sleep(_REFRESH_INTERVAL_SECONDS)
|
|
164
|
+
_clear_dead_futures()
|
|
165
|
+
_clear_dead_threads()
|
|
166
|
+
|
|
167
|
+
|
|
168
|
+
def _start_never_die_thread():
|
|
169
|
+
"""Start the background thread if it's not already running"""
|
|
170
|
+
global _NEVER_DIE_THREAD
|
|
171
|
+
with _NEVER_DIE_LOCK:
|
|
172
|
+
if _NEVER_DIE_THREAD and _NEVER_DIE_THREAD.is_alive():
|
|
173
|
+
return
|
|
174
|
+
|
|
175
|
+
_NEVER_DIE_THREAD = threading.Thread(target=_refresh_never_die_caches, daemon=True)
|
|
176
|
+
_NEVER_DIE_THREAD.start()
|
|
177
|
+
|
|
178
|
+
|
|
179
|
+
def register_never_die_function(
|
|
180
|
+
function: Callable[..., Any],
|
|
181
|
+
ttl: Number,
|
|
182
|
+
args: tuple,
|
|
183
|
+
kwargs: dict,
|
|
184
|
+
cache_key_func: CacheKeyFunction | None,
|
|
185
|
+
ignore_fields: tuple[str, ...],
|
|
186
|
+
config: CacheConfig,
|
|
187
|
+
):
|
|
188
|
+
"""Register a function for never_die cache refreshing"""
|
|
189
|
+
is_async = inspect.iscoroutinefunction(function)
|
|
190
|
+
|
|
191
|
+
entry = NeverDieCacheEntry(
|
|
192
|
+
function=function,
|
|
193
|
+
ttl=ttl,
|
|
194
|
+
args=args,
|
|
195
|
+
kwargs=kwargs,
|
|
196
|
+
cache_key_func=cache_key_func,
|
|
197
|
+
ignore_fields=ignore_fields,
|
|
198
|
+
loop=asyncio.get_running_loop() if is_async else None,
|
|
199
|
+
config=config,
|
|
200
|
+
)
|
|
201
|
+
|
|
202
|
+
with _NEVER_DIE_LOCK:
|
|
203
|
+
if entry not in _NEVER_DIE_REGISTRY:
|
|
204
|
+
_NEVER_DIE_REGISTRY.append(entry)
|
|
205
|
+
|
|
206
|
+
_start_never_die_thread()
|
|
207
|
+
|
|
208
|
+
|
|
209
|
+
def clear_never_die_registry():
|
|
210
|
+
"""
|
|
211
|
+
Clear all entries from the never_die registry.
|
|
212
|
+
|
|
213
|
+
Useful for testing to prevent background threads from
|
|
214
|
+
accessing resources that have been cleaned up.
|
|
215
|
+
"""
|
|
216
|
+
with _NEVER_DIE_LOCK:
|
|
217
|
+
_NEVER_DIE_REGISTRY.clear()
|
|
218
|
+
_NEVER_DIE_CACHE_THREADS.clear()
|
|
219
|
+
_NEVER_DIE_CACHE_FUTURES.clear()
|