cachify 0.1.0__tar.gz → 0.2.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (23) hide show
  1. {cachify-0.1.0 → cachify-0.2.0}/LICENSE +21 -21
  2. {cachify-0.1.0 → cachify-0.2.0}/PKG-INFO +4 -3
  3. {cachify-0.1.0 → cachify-0.2.0}/README.md +145 -145
  4. {cachify-0.1.0 → cachify-0.2.0}/cachify/__init__.py +24 -22
  5. {cachify-0.1.0 → cachify-0.2.0}/cachify/cache.py +116 -116
  6. {cachify-0.1.0 → cachify-0.2.0}/cachify/config/__init__.py +4 -4
  7. {cachify-0.1.0 → cachify-0.2.0}/cachify/features/never_die.py +219 -219
  8. {cachify-0.1.0 → cachify-0.2.0}/cachify/memory_cache.py +37 -37
  9. {cachify-0.1.0 → cachify-0.2.0}/cachify/redis/__init__.py +19 -19
  10. {cachify-0.1.0 → cachify-0.2.0}/cachify/redis/config.py +115 -115
  11. {cachify-0.1.0 → cachify-0.2.0}/cachify/redis/lock.py +232 -232
  12. {cachify-0.1.0 → cachify-0.2.0}/cachify/redis_cache.py +27 -27
  13. {cachify-0.1.0 → cachify-0.2.0}/cachify/storage/__init__.py +9 -9
  14. {cachify-0.1.0 → cachify-0.2.0}/cachify/storage/memory_storage.py +52 -52
  15. {cachify-0.1.0 → cachify-0.2.0}/cachify/storage/redis_storage.py +138 -138
  16. {cachify-0.1.0 → cachify-0.2.0}/cachify/types/__init__.py +95 -95
  17. {cachify-0.1.0 → cachify-0.2.0}/cachify/utils/arguments.py +65 -65
  18. {cachify-0.1.0 → cachify-0.2.0}/cachify/utils/decorator_factory.py +44 -44
  19. {cachify-0.1.0 → cachify-0.2.0}/cachify/utils/functions.py +10 -10
  20. {cachify-0.1.0 → cachify-0.2.0}/cachify/utils/locks.py +6 -6
  21. {cachify-0.1.0 → cachify-0.2.0}/pyproject.toml +54 -48
  22. {cachify-0.1.0 → cachify-0.2.0}/cachify/features/__init__.py +0 -0
  23. {cachify-0.1.0 → cachify-0.2.0}/cachify/utils/__init__.py +0 -0
@@ -1,21 +1,21 @@
1
- MIT License
2
-
3
- Copyright (c) 2025 Pulsar Finance
4
-
5
- Permission is hereby granted, free of charge, to any person obtaining a copy
6
- of this software and associated documentation files (the "Software"), to deal
7
- in the Software without restriction, including without limitation the rights
8
- to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
- copies of the Software, and to permit persons to whom the Software is
10
- furnished to do so, subject to the following conditions:
11
-
12
- The above copyright notice and this permission notice shall be included in all
13
- copies or substantial portions of the Software.
14
-
15
- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
- IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
- FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
- AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
- LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
- OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
- SOFTWARE.
1
+ MIT License
2
+
3
+ Copyright (c) 2025 Pulsar Finance
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -1,9 +1,9 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.4
2
2
  Name: cachify
3
- Version: 0.1.0
3
+ Version: 0.2.0
4
4
  Summary: A simple cache library with sync/async support, Memory and Redis backend
5
- Home-page: https://github.com/PulsarDataSolutions/cachify
6
5
  License: MIT
6
+ License-File: LICENSE
7
7
  Keywords: cachify,cache,caching,redis,async,decorator,memoization
8
8
  Author: dynalz
9
9
  Author-email: git@pulsar.finance
@@ -20,6 +20,7 @@ Classifier: Programming Language :: Python :: 3.14
20
20
  Classifier: Topic :: Software Development :: Libraries :: Python Modules
21
21
  Classifier: Typing :: Typed
22
22
  Requires-Dist: redis[hiredis] (>5.0.0)
23
+ Project-URL: Homepage, https://github.com/PulsarDataSolutions/cachify
23
24
  Project-URL: Repository, https://github.com/PulsarDataSolutions/cachify
24
25
  Description-Content-Type: text/markdown
25
26
 
@@ -1,145 +1,145 @@
1
- # Python Cachify Library
2
-
3
- A simple and robust caching library for Python functions, supporting both synchronous and asynchronous code.
4
-
5
- ## Features
6
-
7
- - Cache function results based on function ID and arguments
8
- - Supports both synchronous and asynchronous functions
9
- - Thread-safe locking to prevent duplicate cached function calls
10
- - Configurable Time-To-Live (TTL) for cached items
11
- - "Never Die" mode for functions that should keep cache refreshed automatically
12
- - Skip cache functionality to force fresh function execution while updating cache
13
- - Redis cache for distributed caching across multiple processes/machines
14
-
15
- ## Installation
16
-
17
- ```bash
18
- # Clone the repository
19
- git clone https://github.com/PulsarDefi/cachify.git
20
- cd cachify
21
-
22
- # Install the package
23
- poetry install
24
- ```
25
-
26
- ## Usage
27
-
28
- ### Basic Usage
29
-
30
- ```python
31
- from cachify import cache
32
-
33
- # Cache function in sync functions
34
- @cache(ttl=60) # ttl in seconds
35
- def expensive_calculation(a, b):
36
- # Some expensive operation
37
- return a + b
38
-
39
- # And async functions
40
- @cache(ttl=3600) # ttl in seconds
41
- async def another_calculation(url):
42
- # Some expensive IO call
43
- return await httpx.get(url).json()
44
- ```
45
-
46
- ### Redis Cache
47
-
48
- For distributed caching across multiple processes or machines, use `rcache`:
49
-
50
- ```python
51
- import redis
52
- from cachify import setup_redis_config, rcache
53
-
54
- # Configure Redis (call once at startup)
55
- setup_redis_config(
56
- sync_client=redis.from_url("redis://localhost:6379/0"),
57
- key_prefix="myapp", # default: "key_prefix", prefix searchable on redis "PREFIX:*"
58
- lock_timeout=10, # default: 10, maximum lock lifetime in seconds
59
- on_error="silent", # "silent" (default) or "raise" in case of redis errors
60
- )
61
-
62
- @rcache(ttl=300)
63
- def get_user(user_id: int) -> dict:
64
- return fetch_from_database(user_id)
65
-
66
- # Async version
67
- import redis.asyncio as aredis
68
-
69
- setup_redis_config(async_client=aredis.from_url("redis://localhost:6379/0"))
70
-
71
- @rcache(ttl=300)
72
- def get_user_async(user_id: int) -> dict:
73
- return await fetch_from_database(user_id)
74
- ```
75
-
76
- ### Never Die Cache
77
-
78
- The `never_die` feature ensures that cached values never expire by automatically refreshing them in the background:
79
-
80
- ```python
81
- # Cache with never_die (automatic refresh)
82
- @cache(ttl=300, never_die=True)
83
- def critical_operation(data_id: str):
84
- # Expensive operation that should always be available from cache
85
- return fetch_data_from_database(data_id)
86
- ```
87
-
88
- **How Never Die Works:**
89
-
90
- 1. When a function with `never_die=True` is first called, the result is cached
91
- 2. A background thread monitors all `never_die` functions
92
- 3. On cache expiration (TTL), the function is automatically called again
93
- 4. The cache is updated with the new result
94
- 5. If the refresh operation fails, the existing cached value is preserved
95
- 6. Clients always get fast response times by reading from cache
96
-
97
- **Benefits:**
98
-
99
- - Cache is always "warm" and ready to serve
100
- - No user request ever has to wait for the expensive operation
101
- - If a dependency service from the cached function goes down temporarily, the last successful result is still available
102
- - Perfect for critical operations where latency must be minimized
103
-
104
- ### Skip Cache
105
-
106
- The `skip_cache` feature allows you to bypass reading from cache while still updating it with fresh results:
107
-
108
- ```python
109
- @cache(ttl=300)
110
- def get_user_data(user_id):
111
- # Expensive operation to fetch user data
112
- return fetch_from_database(user_id)
113
-
114
- # Normal call - uses cache if available
115
- user = get_user_data(123)
116
- # Force fresh execution while updating cache
117
- fresh_user = get_user_data(123, skip_cache=True)
118
- # Next normal call will get the updated cached value
119
- updated_user = get_user_data(123)
120
- ```
121
-
122
- **How Skip Cache Works:**
123
-
124
- 1. When `skip_cache=True` is passed, the function bypasses reading from cache
125
- 2. The function executes normally and returns fresh results
126
- 3. The fresh result is stored in the cache, updating any existing cached value
127
- 4. Subsequent calls without `skip_cache=True` will use the updated cached value
128
- 5. The TTL timer resets from when the cache last was updated
129
-
130
- **Benefits:**
131
-
132
- - Force refresh of potentially stale data while keeping cache warm
133
- - Ensuring fresh data for critical operations while maintaining cache for other calls
134
-
135
- ## Testing
136
-
137
- Run the test scripts
138
-
139
- ```bash
140
- poetry run python -m pytest
141
- ```
142
-
143
- ## License
144
-
145
- MIT
1
+ # Python Cachify Library
2
+
3
+ A simple and robust caching library for Python functions, supporting both synchronous and asynchronous code.
4
+
5
+ ## Features
6
+
7
+ - Cache function results based on function ID and arguments
8
+ - Supports both synchronous and asynchronous functions
9
+ - Thread-safe locking to prevent duplicate cached function calls
10
+ - Configurable Time-To-Live (TTL) for cached items
11
+ - "Never Die" mode for functions that should keep cache refreshed automatically
12
+ - Skip cache functionality to force fresh function execution while updating cache
13
+ - Redis cache for distributed caching across multiple processes/machines
14
+
15
+ ## Installation
16
+
17
+ ```bash
18
+ # Clone the repository
19
+ git clone https://github.com/PulsarDefi/cachify.git
20
+ cd cachify
21
+
22
+ # Install the package
23
+ poetry install
24
+ ```
25
+
26
+ ## Usage
27
+
28
+ ### Basic Usage
29
+
30
+ ```python
31
+ from cachify import cache
32
+
33
+ # Cache function in sync functions
34
+ @cache(ttl=60) # ttl in seconds
35
+ def expensive_calculation(a, b):
36
+ # Some expensive operation
37
+ return a + b
38
+
39
+ # And async functions
40
+ @cache(ttl=3600) # ttl in seconds
41
+ async def another_calculation(url):
42
+ # Some expensive IO call
43
+ return await httpx.get(url).json()
44
+ ```
45
+
46
+ ### Redis Cache
47
+
48
+ For distributed caching across multiple processes or machines, use `rcache`:
49
+
50
+ ```python
51
+ import redis
52
+ from cachify import setup_redis_config, rcache
53
+
54
+ # Configure Redis (call once at startup)
55
+ setup_redis_config(
56
+ sync_client=redis.from_url("redis://localhost:6379/0"),
57
+ key_prefix="myapp", # default: "key_prefix", prefix searchable on redis "PREFIX:*"
58
+ lock_timeout=10, # default: 10, maximum lock lifetime in seconds
59
+ on_error="silent", # "silent" (default) or "raise" in case of redis errors
60
+ )
61
+
62
+ @rcache(ttl=300)
63
+ def get_user(user_id: int) -> dict:
64
+ return fetch_from_database(user_id)
65
+
66
+ # Async version
67
+ import redis.asyncio as aredis
68
+
69
+ setup_redis_config(async_client=aredis.from_url("redis://localhost:6379/0"))
70
+
71
+ @rcache(ttl=300)
72
+ def get_user_async(user_id: int) -> dict:
73
+ return await fetch_from_database(user_id)
74
+ ```
75
+
76
+ ### Never Die Cache
77
+
78
+ The `never_die` feature ensures that cached values never expire by automatically refreshing them in the background:
79
+
80
+ ```python
81
+ # Cache with never_die (automatic refresh)
82
+ @cache(ttl=300, never_die=True)
83
+ def critical_operation(data_id: str):
84
+ # Expensive operation that should always be available from cache
85
+ return fetch_data_from_database(data_id)
86
+ ```
87
+
88
+ **How Never Die Works:**
89
+
90
+ 1. When a function with `never_die=True` is first called, the result is cached
91
+ 2. A background thread monitors all `never_die` functions
92
+ 3. On cache expiration (TTL), the function is automatically called again
93
+ 4. The cache is updated with the new result
94
+ 5. If the refresh operation fails, the existing cached value is preserved
95
+ 6. Clients always get fast response times by reading from cache
96
+
97
+ **Benefits:**
98
+
99
+ - Cache is always "warm" and ready to serve
100
+ - No user request ever has to wait for the expensive operation
101
+ - If a dependency service from the cached function goes down temporarily, the last successful result is still available
102
+ - Perfect for critical operations where latency must be minimized
103
+
104
+ ### Skip Cache
105
+
106
+ The `skip_cache` feature allows you to bypass reading from cache while still updating it with fresh results:
107
+
108
+ ```python
109
+ @cache(ttl=300)
110
+ def get_user_data(user_id):
111
+ # Expensive operation to fetch user data
112
+ return fetch_from_database(user_id)
113
+
114
+ # Normal call - uses cache if available
115
+ user = get_user_data(123)
116
+ # Force fresh execution while updating cache
117
+ fresh_user = get_user_data(123, skip_cache=True)
118
+ # Next normal call will get the updated cached value
119
+ updated_user = get_user_data(123)
120
+ ```
121
+
122
+ **How Skip Cache Works:**
123
+
124
+ 1. When `skip_cache=True` is passed, the function bypasses reading from cache
125
+ 2. The function executes normally and returns fresh results
126
+ 3. The fresh result is stored in the cache, updating any existing cached value
127
+ 4. Subsequent calls without `skip_cache=True` will use the updated cached value
128
+ 5. The TTL timer resets from when the cache last was updated
129
+
130
+ **Benefits:**
131
+
132
+ - Force refresh of potentially stale data while keeping cache warm
133
+ - Ensuring fresh data for critical operations while maintaining cache for other calls
134
+
135
+ ## Testing
136
+
137
+ Run the test scripts
138
+
139
+ ```bash
140
+ poetry run python -m pytest
141
+ ```
142
+
143
+ ## License
144
+
145
+ MIT
@@ -1,22 +1,24 @@
1
- from .features.never_die import clear_never_die_registry
2
- from .memory_cache import cache
3
- from .redis import DEFAULT_KEY_PREFIX, get_redis_config, reset_redis_config, setup_redis_config
4
- from .redis_cache import redis_cache
5
- from .types import CacheKwargs
6
-
7
- __version__ = "0.1.0"
8
-
9
- rcache = redis_cache
10
-
11
- __all__ = [
12
- "__version__",
13
- "cache",
14
- "rcache",
15
- "redis_cache",
16
- "setup_redis_config",
17
- "get_redis_config",
18
- "reset_redis_config",
19
- "DEFAULT_KEY_PREFIX",
20
- "CacheKwargs",
21
- "clear_never_die_registry",
22
- ]
1
+ from importlib.metadata import version
2
+
3
+ from .features.never_die import clear_never_die_registry
4
+ from .memory_cache import cache
5
+ from .redis import DEFAULT_KEY_PREFIX, get_redis_config, reset_redis_config, setup_redis_config
6
+ from .redis_cache import redis_cache
7
+ from .types import CacheKwargs
8
+
9
+ __version__ = version("cachify")
10
+
11
+ rcache = redis_cache
12
+
13
+ __all__ = [
14
+ "__version__",
15
+ "cache",
16
+ "rcache",
17
+ "redis_cache",
18
+ "setup_redis_config",
19
+ "get_redis_config",
20
+ "reset_redis_config",
21
+ "DEFAULT_KEY_PREFIX",
22
+ "CacheKwargs",
23
+ "clear_never_die_registry",
24
+ ]
@@ -1,116 +1,116 @@
1
- import functools
2
- import inspect
3
- from typing import Any, Callable, cast
4
-
5
- from cachify.features.never_die import register_never_die_function
6
- from cachify.types import CacheConfig, CacheKeyFunction, F, Number
7
- from cachify.utils.arguments import create_cache_key
8
-
9
-
10
- def _async_decorator(
11
- function: F,
12
- ttl: Number,
13
- never_die: bool,
14
- cache_key_func: CacheKeyFunction | None,
15
- ignore_fields: tuple[str, ...],
16
- config: CacheConfig,
17
- ) -> F:
18
- @functools.wraps(function)
19
- async def async_wrapper(*args: Any, **kwargs: Any) -> Any:
20
- skip_cache = kwargs.pop("skip_cache", False)
21
- cache_key = create_cache_key(function, cache_key_func, ignore_fields, args, kwargs)
22
-
23
- if cache_entry := await config.storage.aget(cache_key, skip_cache):
24
- return cache_entry.result
25
-
26
- async with config.async_lock(cache_key):
27
- if cache_entry := await config.storage.aget(cache_key, skip_cache):
28
- return cache_entry.result
29
-
30
- result = await function(*args, **kwargs)
31
- await config.storage.aset(cache_key, result, None if never_die else ttl)
32
-
33
- if never_die:
34
- register_never_die_function(function, ttl, args, kwargs, cache_key_func, ignore_fields, config)
35
-
36
- return result
37
-
38
- return cast(F, async_wrapper)
39
-
40
-
41
- def _sync_decorator(
42
- function: F,
43
- ttl: Number,
44
- never_die: bool,
45
- cache_key_func: CacheKeyFunction | None,
46
- ignore_fields: tuple[str, ...],
47
- config: CacheConfig,
48
- ) -> F:
49
- @functools.wraps(function)
50
- def sync_wrapper(*args: Any, **kwargs: Any) -> Any:
51
- skip_cache = kwargs.pop("skip_cache", False)
52
- cache_key = create_cache_key(function, cache_key_func, ignore_fields, args, kwargs)
53
-
54
- if cache_entry := config.storage.get(cache_key, skip_cache):
55
- return cache_entry.result
56
-
57
- with config.sync_lock(cache_key):
58
- if cache_entry := config.storage.get(cache_key, skip_cache):
59
- return cache_entry.result
60
-
61
- result = function(*args, **kwargs)
62
- config.storage.set(cache_key, result, None if never_die else ttl)
63
-
64
- if never_die:
65
- register_never_die_function(function, ttl, args, kwargs, cache_key_func, ignore_fields, config)
66
-
67
- return result
68
-
69
- return cast(F, sync_wrapper)
70
-
71
-
72
- def base_cache(
73
- ttl: Number,
74
- never_die: bool,
75
- cache_key_func: CacheKeyFunction | None,
76
- ignore_fields: tuple[str, ...],
77
- config: CacheConfig,
78
- ) -> Callable[[F], F]:
79
- """
80
- Base cache decorator factory used by both memory and Redis cache implementations.
81
-
82
- Args:
83
- ttl: Time to live for cached items in seconds
84
- never_die: If True, the cache will never expire and will be recalculated based on the ttl
85
- cache_key_func: Custom cache key function, used for more complex cache scenarios
86
- ignore_fields: Tuple of strings with the function params to ignore when creating the cache key
87
- config: Cache configuration specifying storage, locks, and never_die registration
88
-
89
- Features:
90
- - Works for both sync and async functions
91
- - Only allows one execution at a time per function+args
92
- - Makes subsequent calls wait for the first call to complete
93
- """
94
- if cache_key_func and ignore_fields:
95
- raise ValueError("Either cache_key_func or ignore_fields can be provided, but not both")
96
-
97
- def decorator(function: F) -> F:
98
- if inspect.iscoroutinefunction(function):
99
- return _async_decorator(
100
- function=function,
101
- ttl=ttl,
102
- never_die=never_die,
103
- cache_key_func=cache_key_func,
104
- ignore_fields=ignore_fields,
105
- config=config,
106
- )
107
- return _sync_decorator(
108
- function=function,
109
- ttl=ttl,
110
- never_die=never_die,
111
- cache_key_func=cache_key_func,
112
- ignore_fields=ignore_fields,
113
- config=config,
114
- )
115
-
116
- return decorator
1
+ import functools
2
+ import inspect
3
+ from typing import Any, Callable, cast
4
+
5
+ from cachify.features.never_die import register_never_die_function
6
+ from cachify.types import CacheConfig, CacheKeyFunction, F, Number
7
+ from cachify.utils.arguments import create_cache_key
8
+
9
+
10
+ def _async_decorator(
11
+ function: F,
12
+ ttl: Number,
13
+ never_die: bool,
14
+ cache_key_func: CacheKeyFunction | None,
15
+ ignore_fields: tuple[str, ...],
16
+ config: CacheConfig,
17
+ ) -> F:
18
+ @functools.wraps(function)
19
+ async def async_wrapper(*args: Any, **kwargs: Any) -> Any:
20
+ skip_cache = kwargs.pop("skip_cache", False)
21
+ cache_key = create_cache_key(function, cache_key_func, ignore_fields, args, kwargs)
22
+
23
+ if cache_entry := await config.storage.aget(cache_key, skip_cache):
24
+ return cache_entry.result
25
+
26
+ async with config.async_lock(cache_key):
27
+ if cache_entry := await config.storage.aget(cache_key, skip_cache):
28
+ return cache_entry.result
29
+
30
+ result = await function(*args, **kwargs)
31
+ await config.storage.aset(cache_key, result, None if never_die else ttl)
32
+
33
+ if never_die:
34
+ register_never_die_function(function, ttl, args, kwargs, cache_key_func, ignore_fields, config)
35
+
36
+ return result
37
+
38
+ return cast(F, async_wrapper)
39
+
40
+
41
+ def _sync_decorator(
42
+ function: F,
43
+ ttl: Number,
44
+ never_die: bool,
45
+ cache_key_func: CacheKeyFunction | None,
46
+ ignore_fields: tuple[str, ...],
47
+ config: CacheConfig,
48
+ ) -> F:
49
+ @functools.wraps(function)
50
+ def sync_wrapper(*args: Any, **kwargs: Any) -> Any:
51
+ skip_cache = kwargs.pop("skip_cache", False)
52
+ cache_key = create_cache_key(function, cache_key_func, ignore_fields, args, kwargs)
53
+
54
+ if cache_entry := config.storage.get(cache_key, skip_cache):
55
+ return cache_entry.result
56
+
57
+ with config.sync_lock(cache_key):
58
+ if cache_entry := config.storage.get(cache_key, skip_cache):
59
+ return cache_entry.result
60
+
61
+ result = function(*args, **kwargs)
62
+ config.storage.set(cache_key, result, None if never_die else ttl)
63
+
64
+ if never_die:
65
+ register_never_die_function(function, ttl, args, kwargs, cache_key_func, ignore_fields, config)
66
+
67
+ return result
68
+
69
+ return cast(F, sync_wrapper)
70
+
71
+
72
+ def base_cache(
73
+ ttl: Number,
74
+ never_die: bool,
75
+ cache_key_func: CacheKeyFunction | None,
76
+ ignore_fields: tuple[str, ...],
77
+ config: CacheConfig,
78
+ ) -> Callable[[F], F]:
79
+ """
80
+ Base cache decorator factory used by both memory and Redis cache implementations.
81
+
82
+ Args:
83
+ ttl: Time to live for cached items in seconds
84
+ never_die: If True, the cache will never expire and will be recalculated based on the ttl
85
+ cache_key_func: Custom cache key function, used for more complex cache scenarios
86
+ ignore_fields: Tuple of strings with the function params to ignore when creating the cache key
87
+ config: Cache configuration specifying storage, locks, and never_die registration
88
+
89
+ Features:
90
+ - Works for both sync and async functions
91
+ - Only allows one execution at a time per function+args
92
+ - Makes subsequent calls wait for the first call to complete
93
+ """
94
+ if cache_key_func and ignore_fields:
95
+ raise ValueError("Either cache_key_func or ignore_fields can be provided, but not both")
96
+
97
+ def decorator(function: F) -> F:
98
+ if inspect.iscoroutinefunction(function):
99
+ return _async_decorator(
100
+ function=function,
101
+ ttl=ttl,
102
+ never_die=never_die,
103
+ cache_key_func=cache_key_func,
104
+ ignore_fields=ignore_fields,
105
+ config=config,
106
+ )
107
+ return _sync_decorator(
108
+ function=function,
109
+ ttl=ttl,
110
+ never_die=never_die,
111
+ cache_key_func=cache_key_func,
112
+ ignore_fields=ignore_fields,
113
+ config=config,
114
+ )
115
+
116
+ return decorator
@@ -1,4 +1,4 @@
1
- import logging
2
-
3
- logger = logging.getLogger("cachify")
4
- logger.addHandler(logging.NullHandler())
1
+ import logging
2
+
3
+ logger = logging.getLogger("cachify")
4
+ logger.addHandler(logging.NullHandler())