cachify 0.2.0__tar.gz → 0.3.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (25) hide show
  1. {cachify-0.2.0 → cachify-0.3.0}/LICENSE +1 -1
  2. {cachify-0.2.0 → cachify-0.3.0}/PKG-INFO +70 -11
  3. {cachify-0.2.0 → cachify-0.3.0}/README.md +69 -10
  4. {cachify-0.2.0 → cachify-0.3.0}/cachify/cache.py +14 -6
  5. {cachify-0.2.0 → cachify-0.3.0}/cachify/features/never_die.py +3 -3
  6. {cachify-0.2.0 → cachify-0.3.0}/cachify/memory_cache.py +11 -6
  7. {cachify-0.2.0 → cachify-0.3.0}/cachify/redis_cache.py +4 -3
  8. {cachify-0.2.0 → cachify-0.3.0}/cachify/utils/arguments.py +5 -10
  9. cachify-0.3.0/cachify/utils/errors.py +2 -0
  10. cachify-0.3.0/cachify/utils/hash.py +18 -0
  11. {cachify-0.2.0 → cachify-0.3.0}/pyproject.toml +1 -1
  12. cachify-0.2.0/cachify/utils/decorator_factory.py +0 -44
  13. cachify-0.2.0/cachify/utils/locks.py +0 -6
  14. {cachify-0.2.0 → cachify-0.3.0}/cachify/__init__.py +5 -5
  15. {cachify-0.2.0 → cachify-0.3.0}/cachify/config/__init__.py +0 -0
  16. {cachify-0.2.0 → cachify-0.3.0}/cachify/features/__init__.py +0 -0
  17. {cachify-0.2.0 → cachify-0.3.0}/cachify/redis/__init__.py +0 -0
  18. {cachify-0.2.0 → cachify-0.3.0}/cachify/redis/config.py +0 -0
  19. {cachify-0.2.0 → cachify-0.3.0}/cachify/redis/lock.py +0 -0
  20. {cachify-0.2.0 → cachify-0.3.0}/cachify/storage/__init__.py +0 -0
  21. {cachify-0.2.0 → cachify-0.3.0}/cachify/storage/memory_storage.py +0 -0
  22. {cachify-0.2.0 → cachify-0.3.0}/cachify/storage/redis_storage.py +0 -0
  23. {cachify-0.2.0 → cachify-0.3.0}/cachify/types/__init__.py +0 -0
  24. {cachify-0.2.0 → cachify-0.3.0}/cachify/utils/__init__.py +0 -0
  25. {cachify-0.2.0 → cachify-0.3.0}/cachify/utils/functions.py +0 -0
@@ -1,6 +1,6 @@
1
1
  MIT License
2
2
 
3
- Copyright (c) 2025 Pulsar Finance
3
+ Copyright (c) 2026 Pulsar Finance
4
4
 
5
5
  Permission is hereby granted, free of charge, to any person obtaining a copy
6
6
  of this software and associated documentation files (the "Software"), to deal
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: cachify
3
- Version: 0.2.0
3
+ Version: 0.3.0
4
4
  Summary: A simple cache library with sync/async support, Memory and Redis backend
5
5
  License: MIT
6
6
  License-File: LICENSE
@@ -28,6 +28,19 @@ Description-Content-Type: text/markdown
28
28
 
29
29
  A simple and robust caching library for Python functions, supporting both synchronous and asynchronous code.
30
30
 
31
+ ## Table of Contents
32
+
33
+ - [Features](#features)
34
+ - [Installation](#installation)
35
+ - [Usage](#usage)
36
+ - [Basic Usage](#basic-usage)
37
+ - [Redis Cache](#redis-cache)
38
+ - [Never Die Cache](#never-die-cache)
39
+ - [Skip Cache](#skip-cache)
40
+ - [Testing](#testing)
41
+ - [Contributing](#contributing)
42
+ - [License](#license)
43
+
31
44
  ## Features
32
45
 
33
46
  - Cache function results based on function ID and arguments
@@ -41,12 +54,14 @@ A simple and robust caching library for Python functions, supporting both synchr
41
54
  ## Installation
42
55
 
43
56
  ```bash
44
- # Clone the repository
45
- git clone https://github.com/PulsarDefi/cachify.git
46
- cd cachify
57
+ # Using pip
58
+ pip install cachify
59
+
60
+ # Using poetry
61
+ poetry add cachify
47
62
 
48
- # Install the package
49
- poetry install
63
+ # Using uv
64
+ uv add cachify
50
65
  ```
51
66
 
52
67
  ## Usage
@@ -57,18 +72,58 @@ poetry install
57
72
  from cachify import cache
58
73
 
59
74
  # Cache function in sync functions
60
- @cache(ttl=60) # ttl in seconds
75
+ @cache(ttl=60) # ttl in seconds
61
76
  def expensive_calculation(a, b):
62
77
  # Some expensive operation
63
78
  return a + b
64
79
 
65
80
  # And async functions
66
- @cache(ttl=3600) # ttl in seconds
81
+ @cache(ttl=3600) # ttl in seconds
67
82
  async def another_calculation(url):
68
83
  # Some expensive IO call
69
84
  return await httpx.get(url).json()
70
85
  ```
71
86
 
87
+ ### Decorator Parameters
88
+
89
+ | Parameter | Type | Default | Description |
90
+ | ---------------- | --------------- | ------- | -------------------------------------------------------------- |
91
+ | `ttl` | `int \| float` | `300` | Time to live for cached items in seconds |
92
+ | `never_die` | `bool` | `False` | If True, cache refreshes automatically in background |
93
+ | `cache_key_func` | `Callable` | `None` | Custom function to generate cache keys |
94
+ | `ignore_fields` | `Sequence[str]` | `()` | Function parameters to exclude from cache key |
95
+ | `no_self` | `bool` | `False` | If True, ignores the first parameter (usually `self` or `cls`) |
96
+
97
+ ### Custom Cache Key Function
98
+
99
+ Use `cache_key_func` when you need custom control over how cache keys are generated:
100
+
101
+ ```python
102
+ from cachify import cache
103
+
104
+ def custom_key(args: tuple, kwargs: dict) -> str:
105
+ user_id = kwargs.get("user_id") or args[0]
106
+ return f"user:{user_id}"
107
+
108
+ @cache(ttl=60, cache_key_func=custom_key)
109
+ def get_user_profile(user_id: int):
110
+ return fetch_from_database(user_id)
111
+ ```
112
+
113
+ ### Ignore Fields
114
+
115
+ Use `ignore_fields` to exclude specific parameters from the cache key. Useful when some arguments don't affect the result:
116
+
117
+ ```python
118
+ from cachify import cache
119
+
120
+ @cache(ttl=300, ignore_fields=("logger", "request_id"))
121
+ def fetch_data(query: str, logger: Logger, request_id: str):
122
+ # Cache key only uses 'query', ignoring logger and request_id
123
+ logger.info(f"Fetching data for request {request_id}")
124
+ return database.execute(query)
125
+ ```
126
+
72
127
  ### Redis Cache
73
128
 
74
129
  For distributed caching across multiple processes or machines, use `rcache`:
@@ -80,7 +135,7 @@ from cachify import setup_redis_config, rcache
80
135
  # Configure Redis (call once at startup)
81
136
  setup_redis_config(
82
137
  sync_client=redis.from_url("redis://localhost:6379/0"),
83
- key_prefix="myapp", # default: "key_prefix", prefix searchable on redis "PREFIX:*"
138
+ key_prefix="myapp", # default: "cachify", prefix searchable on redis "PREFIX:*"
84
139
  lock_timeout=10, # default: 10, maximum lock lifetime in seconds
85
140
  on_error="silent", # "silent" (default) or "raise" in case of redis errors
86
141
  )
@@ -95,7 +150,7 @@ import redis.asyncio as aredis
95
150
  setup_redis_config(async_client=aredis.from_url("redis://localhost:6379/0"))
96
151
 
97
152
  @rcache(ttl=300)
98
- def get_user_async(user_id: int) -> dict:
153
+ async def get_user_async(user_id: int) -> dict:
99
154
  return await fetch_from_database(user_id)
100
155
  ```
101
156
 
@@ -166,7 +221,11 @@ Run the test scripts
166
221
  poetry run python -m pytest
167
222
  ```
168
223
 
224
+ ## Contributing
225
+
226
+ Contributions are welcome! Feel free to open an issue or submit a pull request.
227
+
169
228
  ## License
170
229
 
171
- MIT
230
+ This project is licensed under the MIT License - see the [LICENSE](https://github.com/PulsarDataSolutions/cachify/blob/master/LICENSE) file for details.
172
231
 
@@ -2,6 +2,19 @@
2
2
 
3
3
  A simple and robust caching library for Python functions, supporting both synchronous and asynchronous code.
4
4
 
5
+ ## Table of Contents
6
+
7
+ - [Features](#features)
8
+ - [Installation](#installation)
9
+ - [Usage](#usage)
10
+ - [Basic Usage](#basic-usage)
11
+ - [Redis Cache](#redis-cache)
12
+ - [Never Die Cache](#never-die-cache)
13
+ - [Skip Cache](#skip-cache)
14
+ - [Testing](#testing)
15
+ - [Contributing](#contributing)
16
+ - [License](#license)
17
+
5
18
  ## Features
6
19
 
7
20
  - Cache function results based on function ID and arguments
@@ -15,12 +28,14 @@ A simple and robust caching library for Python functions, supporting both synchr
15
28
  ## Installation
16
29
 
17
30
  ```bash
18
- # Clone the repository
19
- git clone https://github.com/PulsarDefi/cachify.git
20
- cd cachify
31
+ # Using pip
32
+ pip install cachify
33
+
34
+ # Using poetry
35
+ poetry add cachify
21
36
 
22
- # Install the package
23
- poetry install
37
+ # Using uv
38
+ uv add cachify
24
39
  ```
25
40
 
26
41
  ## Usage
@@ -31,18 +46,58 @@ poetry install
31
46
  from cachify import cache
32
47
 
33
48
  # Cache function in sync functions
34
- @cache(ttl=60) # ttl in seconds
49
+ @cache(ttl=60) # ttl in seconds
35
50
  def expensive_calculation(a, b):
36
51
  # Some expensive operation
37
52
  return a + b
38
53
 
39
54
  # And async functions
40
- @cache(ttl=3600) # ttl in seconds
55
+ @cache(ttl=3600) # ttl in seconds
41
56
  async def another_calculation(url):
42
57
  # Some expensive IO call
43
58
  return await httpx.get(url).json()
44
59
  ```
45
60
 
61
+ ### Decorator Parameters
62
+
63
+ | Parameter | Type | Default | Description |
64
+ | ---------------- | --------------- | ------- | -------------------------------------------------------------- |
65
+ | `ttl` | `int \| float` | `300` | Time to live for cached items in seconds |
66
+ | `never_die` | `bool` | `False` | If True, cache refreshes automatically in background |
67
+ | `cache_key_func` | `Callable` | `None` | Custom function to generate cache keys |
68
+ | `ignore_fields` | `Sequence[str]` | `()` | Function parameters to exclude from cache key |
69
+ | `no_self` | `bool` | `False` | If True, ignores the first parameter (usually `self` or `cls`) |
70
+
71
+ ### Custom Cache Key Function
72
+
73
+ Use `cache_key_func` when you need custom control over how cache keys are generated:
74
+
75
+ ```python
76
+ from cachify import cache
77
+
78
+ def custom_key(args: tuple, kwargs: dict) -> str:
79
+ user_id = kwargs.get("user_id") or args[0]
80
+ return f"user:{user_id}"
81
+
82
+ @cache(ttl=60, cache_key_func=custom_key)
83
+ def get_user_profile(user_id: int):
84
+ return fetch_from_database(user_id)
85
+ ```
86
+
87
+ ### Ignore Fields
88
+
89
+ Use `ignore_fields` to exclude specific parameters from the cache key. Useful when some arguments don't affect the result:
90
+
91
+ ```python
92
+ from cachify import cache
93
+
94
+ @cache(ttl=300, ignore_fields=("logger", "request_id"))
95
+ def fetch_data(query: str, logger: Logger, request_id: str):
96
+ # Cache key only uses 'query', ignoring logger and request_id
97
+ logger.info(f"Fetching data for request {request_id}")
98
+ return database.execute(query)
99
+ ```
100
+
46
101
  ### Redis Cache
47
102
 
48
103
  For distributed caching across multiple processes or machines, use `rcache`:
@@ -54,7 +109,7 @@ from cachify import setup_redis_config, rcache
54
109
  # Configure Redis (call once at startup)
55
110
  setup_redis_config(
56
111
  sync_client=redis.from_url("redis://localhost:6379/0"),
57
- key_prefix="myapp", # default: "key_prefix", prefix searchable on redis "PREFIX:*"
112
+ key_prefix="myapp", # default: "cachify", prefix searchable on redis "PREFIX:*"
58
113
  lock_timeout=10, # default: 10, maximum lock lifetime in seconds
59
114
  on_error="silent", # "silent" (default) or "raise" in case of redis errors
60
115
  )
@@ -69,7 +124,7 @@ import redis.asyncio as aredis
69
124
  setup_redis_config(async_client=aredis.from_url("redis://localhost:6379/0"))
70
125
 
71
126
  @rcache(ttl=300)
72
- def get_user_async(user_id: int) -> dict:
127
+ async def get_user_async(user_id: int) -> dict:
73
128
  return await fetch_from_database(user_id)
74
129
  ```
75
130
 
@@ -140,6 +195,10 @@ Run the test scripts
140
195
  poetry run python -m pytest
141
196
  ```
142
197
 
198
+ ## Contributing
199
+
200
+ Contributions are welcome! Feel free to open an issue or submit a pull request.
201
+
143
202
  ## License
144
203
 
145
- MIT
204
+ This project is licensed under the MIT License - see the [LICENSE](https://github.com/PulsarDataSolutions/cachify/blob/master/LICENSE) file for details.
@@ -1,6 +1,6 @@
1
1
  import functools
2
2
  import inspect
3
- from typing import Any, Callable, cast
3
+ from typing import Any, Callable, Sequence, cast
4
4
 
5
5
  from cachify.features.never_die import register_never_die_function
6
6
  from cachify.types import CacheConfig, CacheKeyFunction, F, Number
@@ -73,7 +73,8 @@ def base_cache(
73
73
  ttl: Number,
74
74
  never_die: bool,
75
75
  cache_key_func: CacheKeyFunction | None,
76
- ignore_fields: tuple[str, ...],
76
+ ignore_fields: Sequence[str],
77
+ no_self: bool,
77
78
  config: CacheConfig,
78
79
  ) -> Callable[[F], F]:
79
80
  """
@@ -83,7 +84,8 @@ def base_cache(
83
84
  ttl: Time to live for cached items in seconds
84
85
  never_die: If True, the cache will never expire and will be recalculated based on the ttl
85
86
  cache_key_func: Custom cache key function, used for more complex cache scenarios
86
- ignore_fields: Tuple of strings with the function params to ignore when creating the cache key
87
+ ignore_fields: Sequence of strings with the function params to ignore when creating the cache key
88
+ no_self: if True, the first parameter (typically 'self' for methods) will be ignored when creating the cache key
87
89
  config: Cache configuration specifying storage, locks, and never_die registration
88
90
 
89
91
  Features:
@@ -91,17 +93,23 @@ def base_cache(
91
93
  - Only allows one execution at a time per function+args
92
94
  - Makes subsequent calls wait for the first call to complete
93
95
  """
94
- if cache_key_func and ignore_fields:
96
+
97
+ if cache_key_func and (ignore_fields or no_self):
95
98
  raise ValueError("Either cache_key_func or ignore_fields can be provided, but not both")
96
99
 
97
100
  def decorator(function: F) -> F:
101
+ ignore = tuple(ignore_fields)
102
+
103
+ if no_self:
104
+ ignore += function.__code__.co_varnames[:1]
105
+
98
106
  if inspect.iscoroutinefunction(function):
99
107
  return _async_decorator(
100
108
  function=function,
101
109
  ttl=ttl,
102
110
  never_die=never_die,
103
111
  cache_key_func=cache_key_func,
104
- ignore_fields=ignore_fields,
112
+ ignore_fields=ignore,
105
113
  config=config,
106
114
  )
107
115
  return _sync_decorator(
@@ -109,7 +117,7 @@ def base_cache(
109
117
  ttl=ttl,
110
118
  never_die=never_die,
111
119
  cache_key_func=cache_key_func,
112
- ignore_fields=ignore_fields,
120
+ ignore_fields=ignore,
113
121
  config=config,
114
122
  )
115
123
 
@@ -140,19 +140,19 @@ def _refresh_never_die_caches():
140
140
 
141
141
  if entry.loop.is_closed():
142
142
  logger.debug(
143
- f"Loop is closed, skipping future creation",
143
+ "Loop is closed, skipping future creation",
144
144
  extra={"function": entry.function.__qualname__},
145
145
  exc_info=True,
146
146
  )
147
147
  continue
148
148
 
149
+ coroutine = _run_async_function_and_cache(entry)
149
150
  try:
150
- coroutine = _run_async_function_and_cache(entry)
151
151
  future = asyncio.run_coroutine_threadsafe(coroutine, entry.loop)
152
152
  except RuntimeError:
153
153
  coroutine.close()
154
154
  logger.debug(
155
- f"Loop is closed, skipping future creation",
155
+ "Loop is closed, skipping future creation",
156
156
  extra={"function": entry.function.__qualname__},
157
157
  exc_info=True,
158
158
  )
@@ -1,18 +1,22 @@
1
+ import asyncio
1
2
  import threading
2
- from typing import Callable
3
+ from collections import defaultdict
4
+ from typing import Callable, Sequence
3
5
 
4
6
  from cachify.cache import base_cache
5
7
  from cachify.storage.memory_storage import MemoryStorage
6
8
  from cachify.types import CacheConfig, CacheKeyFunction, F, Number
7
- from cachify.utils.locks import ASYNC_LOCKS, SYNC_LOCKS
8
9
 
9
10
  _CACHE_CLEAR_THREAD: threading.Thread | None = None
10
11
  _CACHE_CLEAR_LOCK: threading.Lock = threading.Lock()
11
12
 
13
+ _ASYNC_LOCKS: defaultdict[str, asyncio.Lock] = defaultdict(asyncio.Lock)
14
+ _SYNC_LOCKS: defaultdict[str, threading.Lock] = defaultdict(threading.Lock)
15
+
12
16
  _MEMORY_CONFIG = CacheConfig(
13
17
  storage=MemoryStorage,
14
- sync_lock=lambda cache_key: SYNC_LOCKS[cache_key],
15
- async_lock=lambda cache_key: ASYNC_LOCKS[cache_key],
18
+ sync_lock=_SYNC_LOCKS.__getitem__,
19
+ async_lock=_ASYNC_LOCKS.__getitem__,
16
20
  )
17
21
 
18
22
 
@@ -30,8 +34,9 @@ def cache(
30
34
  ttl: Number = 300,
31
35
  never_die: bool = False,
32
36
  cache_key_func: CacheKeyFunction | None = None,
33
- ignore_fields: tuple[str, ...] = (),
37
+ ignore_fields: Sequence[str] = (),
38
+ no_self: bool = False,
34
39
  ) -> Callable[[F], F]:
35
40
  """In-memory cache decorator. See `base_cache` for full documentation."""
36
41
  _start_cache_clear_thread()
37
- return base_cache(ttl, never_die, cache_key_func, ignore_fields, _MEMORY_CONFIG)
42
+ return base_cache(ttl, never_die, cache_key_func, ignore_fields, no_self, _MEMORY_CONFIG)
@@ -1,4 +1,4 @@
1
- from typing import Callable
1
+ from typing import Callable, Sequence
2
2
 
3
3
  from cachify.cache import base_cache
4
4
  from cachify.redis.lock import RedisLockManager
@@ -16,7 +16,8 @@ def redis_cache(
16
16
  ttl: Number = 300,
17
17
  never_die: bool = False,
18
18
  cache_key_func: CacheKeyFunction | None = None,
19
- ignore_fields: tuple[str, ...] = (),
19
+ ignore_fields: Sequence[str] = (),
20
+ no_self: bool = False,
20
21
  ) -> Callable[[F], F]:
21
22
  """
22
23
  Redis cache decorator. See `base_cache` for full documentation.
@@ -24,4 +25,4 @@ def redis_cache(
24
25
  Requires setup_redis_config() to be called before use.
25
26
  Uses Redis for distributed caching across multiple processes/machines.
26
27
  """
27
- return base_cache(ttl, never_die, cache_key_func, ignore_fields, _REDIS_CONFIG)
28
+ return base_cache(ttl, never_die, cache_key_func, ignore_fields, no_self, _REDIS_CONFIG)
@@ -1,17 +1,12 @@
1
- import hashlib
2
1
  import inspect
3
- import pickle
4
2
  from collections.abc import Callable, Generator
5
3
  from inspect import Signature
6
4
  from typing import Any
7
5
 
8
6
  from cachify.types import CacheKeyFunction
7
+ from cachify.utils.errors import CacheKeyError
9
8
  from cachify.utils.functions import get_function_id
10
-
11
-
12
- def _cache_key_fingerprint(value: object) -> str:
13
- payload = pickle.dumps(value, protocol=pickle.HIGHEST_PROTOCOL)
14
- return hashlib.blake2b(payload, digest_size=16).hexdigest()
9
+ from cachify.utils.hash import object_hash
15
10
 
16
11
 
17
12
  def _iter_arguments(
@@ -54,12 +49,12 @@ def create_cache_key(
54
49
  if not cache_key_func:
55
50
  function_signature = inspect.signature(function)
56
51
  items = tuple(_iter_arguments(function_signature, args, kwargs, ignore_fields))
57
- return f"{function_id}:{_cache_key_fingerprint(items)}"
52
+ return f"{function_id}:{object_hash(items)}"
58
53
 
59
54
  cache_key = cache_key_func(args, kwargs)
60
55
  try:
61
- return f"{function_id}:{_cache_key_fingerprint(cache_key)}"
56
+ return f"{function_id}:{object_hash(cache_key)}"
62
57
  except TypeError as exc:
63
- raise ValueError(
58
+ raise CacheKeyError(
64
59
  "Cache key function must return a hashable cache key - be careful with mutable types (list, dict, set) and non built-in types"
65
60
  ) from exc
@@ -0,0 +1,2 @@
1
+ class CacheKeyError(ValueError):
2
+ pass
@@ -0,0 +1,18 @@
1
+ import hashlib
2
+ import pickle
3
+ from typing import Any
4
+
5
+ from cachify.utils.errors import CacheKeyError
6
+
7
+
8
+ def object_hash(value: Any) -> str:
9
+ try:
10
+ payload = pickle.dumps(value, protocol=pickle.HIGHEST_PROTOCOL)
11
+
12
+ except Exception as exc:
13
+ raise CacheKeyError(
14
+ "Unable to serialize object for hashing - ensure all parts of the object are pickleable. "
15
+ "Hint: create a custom __reduce__ method for the suspected object if necessary."
16
+ ) from exc
17
+
18
+ return hashlib.blake2b(payload, digest_size=16).hexdigest()
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "cachify"
3
- version = "0.2.0"
3
+ version = "0.3.0"
4
4
  description = "A simple cache library with sync/async support, Memory and Redis backend"
5
5
  license = "MIT"
6
6
  readme = "README.md"
@@ -1,44 +0,0 @@
1
- import inspect
2
- from typing import Callable
3
-
4
- from cachify._async import async_decorator
5
- from cachify._sync import sync_decorator
6
- from cachify.types import CacheConfig, CacheKeyFunction, F, Number
7
-
8
-
9
- def create_cache_decorator(
10
- ttl: Number,
11
- never_die: bool,
12
- cache_key_func: CacheKeyFunction | None,
13
- ignore_fields: tuple[str, ...],
14
- config: CacheConfig,
15
- ) -> Callable[[F], F]:
16
- """
17
- Create a cache decorator with the given configuration.
18
-
19
- This is a shared factory used by both memory_cache and redis_cache
20
- to avoid code duplication.
21
- """
22
- if cache_key_func and ignore_fields:
23
- raise ValueError("Either cache_key_func or ignore_fields can be provided, but not both")
24
-
25
- def decorator(function: F) -> F:
26
- if inspect.iscoroutinefunction(function):
27
- return async_decorator(
28
- function=function,
29
- ttl=ttl,
30
- never_die=never_die,
31
- cache_key_func=cache_key_func,
32
- ignore_fields=ignore_fields,
33
- config=config,
34
- )
35
- return sync_decorator(
36
- function=function,
37
- ttl=ttl,
38
- never_die=never_die,
39
- cache_key_func=cache_key_func,
40
- ignore_fields=ignore_fields,
41
- config=config,
42
- )
43
-
44
- return decorator
@@ -1,6 +0,0 @@
1
- import asyncio
2
- import threading
3
- from collections import defaultdict
4
-
5
- ASYNC_LOCKS: defaultdict[str, asyncio.Lock] = defaultdict(asyncio.Lock)
6
- SYNC_LOCKS: defaultdict[str, threading.Lock] = defaultdict(threading.Lock)
@@ -12,13 +12,13 @@ rcache = redis_cache
12
12
 
13
13
  __all__ = [
14
14
  "__version__",
15
- "cache",
16
15
  "rcache",
17
- "redis_cache",
18
- "setup_redis_config",
16
+ "clear_never_die_registry",
17
+ "cache",
18
+ "DEFAULT_KEY_PREFIX",
19
19
  "get_redis_config",
20
20
  "reset_redis_config",
21
- "DEFAULT_KEY_PREFIX",
21
+ "setup_redis_config",
22
+ "redis_cache",
22
23
  "CacheKwargs",
23
- "clear_never_die_registry",
24
24
  ]
File without changes
File without changes