wool 0.1rc20__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- wool/__init__.py +122 -0
- wool/_context.py +29 -0
- wool/_protobuf/worker.py +26 -0
- wool/_resource_pool.py +376 -0
- wool/_typing.py +7 -0
- wool/_undefined.py +11 -0
- wool/_work.py +554 -0
- wool/core/__init__.py +0 -0
- wool/core/discovery/__init__.py +0 -0
- wool/core/discovery/base.py +249 -0
- wool/core/discovery/lan.py +534 -0
- wool/core/discovery/local.py +822 -0
- wool/core/loadbalancer/__init__.py +0 -0
- wool/core/loadbalancer/base.py +125 -0
- wool/core/loadbalancer/roundrobin.py +101 -0
- wool/core/protobuf/__init__.py +18 -0
- wool/core/protobuf/exception.py +3 -0
- wool/core/protobuf/task.py +11 -0
- wool/core/protobuf/task_pb2.py +42 -0
- wool/core/protobuf/task_pb2.pyi +43 -0
- wool/core/protobuf/task_pb2_grpc.py +24 -0
- wool/core/protobuf/worker.py +26 -0
- wool/core/protobuf/worker_pb2.py +53 -0
- wool/core/protobuf/worker_pb2.pyi +65 -0
- wool/core/protobuf/worker_pb2_grpc.py +141 -0
- wool/core/typing.py +22 -0
- wool/core/worker/__init__.py +0 -0
- wool/core/worker/base.py +300 -0
- wool/core/worker/connection.py +250 -0
- wool/core/worker/local.py +148 -0
- wool/core/worker/pool.py +386 -0
- wool/core/worker/process.py +249 -0
- wool/core/worker/proxy.py +427 -0
- wool/core/worker/service.py +231 -0
- wool-0.1rc20.dist-info/METADATA +463 -0
- wool-0.1rc20.dist-info/RECORD +38 -0
- wool-0.1rc20.dist-info/WHEEL +4 -0
- wool-0.1rc20.dist-info/entry_points.txt +2 -0
wool/__init__.py
ADDED
|
@@ -0,0 +1,122 @@
|
|
|
1
|
+
from contextvars import ContextVar
|
|
2
|
+
from importlib.metadata import PackageNotFoundError
|
|
3
|
+
from importlib.metadata import version
|
|
4
|
+
from typing import Final
|
|
5
|
+
|
|
6
|
+
from tblib import pickling_support
|
|
7
|
+
|
|
8
|
+
from wool._context import AppContext
|
|
9
|
+
from wool._resource_pool import ResourcePool
|
|
10
|
+
from wool._work import WoolTask
|
|
11
|
+
from wool._work import WoolTaskEvent
|
|
12
|
+
from wool._work import WoolTaskEventCallback
|
|
13
|
+
from wool._work import WoolTaskEventType
|
|
14
|
+
from wool._work import WoolTaskException
|
|
15
|
+
from wool._work import current_task as wool_current_task
|
|
16
|
+
from wool._work import work
|
|
17
|
+
from wool.core.discovery.base import Discovery
|
|
18
|
+
from wool.core.discovery.base import DiscoveryEvent
|
|
19
|
+
from wool.core.discovery.base import DiscoveryEventType
|
|
20
|
+
from wool.core.discovery.base import DiscoveryLike
|
|
21
|
+
from wool.core.discovery.base import DiscoveryPublisherLike
|
|
22
|
+
from wool.core.discovery.base import DiscoverySubscriberLike
|
|
23
|
+
from wool.core.discovery.base import PredicateFunction
|
|
24
|
+
from wool.core.discovery.base import WorkerInfo
|
|
25
|
+
from wool.core.discovery.lan import LanDiscovery
|
|
26
|
+
from wool.core.discovery.local import LocalDiscovery
|
|
27
|
+
from wool.core.loadbalancer.base import ConnectionResourceFactory
|
|
28
|
+
from wool.core.loadbalancer.base import LoadBalancerContext
|
|
29
|
+
from wool.core.loadbalancer.base import LoadBalancerLike
|
|
30
|
+
from wool.core.loadbalancer.base import NoWorkersAvailable
|
|
31
|
+
from wool.core.loadbalancer.roundrobin import RoundRobinLoadBalancer
|
|
32
|
+
from wool.core.typing import Factory
|
|
33
|
+
from wool.core.worker.base import Worker
|
|
34
|
+
from wool.core.worker.base import WorkerFactory
|
|
35
|
+
from wool.core.worker.base import WorkerLike
|
|
36
|
+
from wool.core.worker.connection import RpcError
|
|
37
|
+
from wool.core.worker.connection import TransientRpcError
|
|
38
|
+
from wool.core.worker.connection import UnexpectedResponse
|
|
39
|
+
from wool.core.worker.connection import WorkerConnection
|
|
40
|
+
from wool.core.worker.local import LocalWorker
|
|
41
|
+
from wool.core.worker.pool import WorkerPool
|
|
42
|
+
from wool.core.worker.proxy import WorkerProxy
|
|
43
|
+
from wool.core.worker.service import WorkerService
|
|
44
|
+
|
|
45
|
+
pickling_support.install()
|
|
46
|
+
|
|
47
|
+
# Alias for backwards compatibility
|
|
48
|
+
routine = work
|
|
49
|
+
|
|
50
|
+
try:
|
|
51
|
+
__version__ = version("wool")
|
|
52
|
+
except PackageNotFoundError:
|
|
53
|
+
__version__ = "unknown"
|
|
54
|
+
|
|
55
|
+
__proxy__: Final[ContextVar[WorkerProxy | None]] = ContextVar("__proxy__", default=None)
|
|
56
|
+
|
|
57
|
+
__proxy_pool__: Final[ContextVar[ResourcePool[WorkerProxy] | None]] = ContextVar(
|
|
58
|
+
"__proxy_pool__", default=None
|
|
59
|
+
)
|
|
60
|
+
|
|
61
|
+
__all__ = [
|
|
62
|
+
# Connection
|
|
63
|
+
"RpcError",
|
|
64
|
+
"TransientRpcError",
|
|
65
|
+
"UnexpectedResponse",
|
|
66
|
+
"WorkerConnection",
|
|
67
|
+
# Context
|
|
68
|
+
"AppContext",
|
|
69
|
+
# Load balancing
|
|
70
|
+
"ConnectionResourceFactory",
|
|
71
|
+
"LoadBalancerContext",
|
|
72
|
+
"LoadBalancerLike",
|
|
73
|
+
"NoWorkersAvailable",
|
|
74
|
+
"RoundRobinLoadBalancer",
|
|
75
|
+
# Work
|
|
76
|
+
"WoolTask",
|
|
77
|
+
"WoolTaskEvent",
|
|
78
|
+
"WoolTaskEventCallback",
|
|
79
|
+
"WoolTaskEventType",
|
|
80
|
+
"WoolTaskException",
|
|
81
|
+
"routine",
|
|
82
|
+
"work",
|
|
83
|
+
"wool_current_task",
|
|
84
|
+
# Workers
|
|
85
|
+
"LocalWorker",
|
|
86
|
+
"Worker",
|
|
87
|
+
"WorkerFactory",
|
|
88
|
+
"WorkerLike",
|
|
89
|
+
"WorkerPool",
|
|
90
|
+
"WorkerProxy",
|
|
91
|
+
"WorkerService",
|
|
92
|
+
# Discovery
|
|
93
|
+
"Discovery",
|
|
94
|
+
"DiscoveryEvent",
|
|
95
|
+
"DiscoveryEventType",
|
|
96
|
+
"DiscoveryLike",
|
|
97
|
+
"DiscoveryPublisherLike",
|
|
98
|
+
"DiscoverySubscriberLike",
|
|
99
|
+
"LanDiscovery",
|
|
100
|
+
"LocalDiscovery",
|
|
101
|
+
"PredicateFunction",
|
|
102
|
+
"WorkerInfo",
|
|
103
|
+
# Typing
|
|
104
|
+
"Factory",
|
|
105
|
+
]
|
|
106
|
+
|
|
107
|
+
for symbol in __all__:
|
|
108
|
+
attribute = globals().get(symbol)
|
|
109
|
+
try:
|
|
110
|
+
if attribute and "wool" in attribute.__module__.split("."):
|
|
111
|
+
# Set the module to reflect imports of the symbol
|
|
112
|
+
attribute.__module__ = __name__
|
|
113
|
+
except AttributeError:
|
|
114
|
+
continue
|
|
115
|
+
|
|
116
|
+
# for plugin in entry_points(group="wool_cli_plugins"):
|
|
117
|
+
# try:
|
|
118
|
+
# plugin.load()
|
|
119
|
+
# logging.info(f"Loaded CLI plugin {plugin.name}")
|
|
120
|
+
# except Exception as e:
|
|
121
|
+
# logging.error(f"Failed to load CLI plugin {plugin.name}: {e}")
|
|
122
|
+
# raise
|
wool/_context.py
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
from contextvars import ContextVar
|
|
2
|
+
from contextvars import Token
|
|
3
|
+
from typing import Final
|
|
4
|
+
|
|
5
|
+
from wool._undefined import Undefined
|
|
6
|
+
from wool._undefined import UndefinedType
|
|
7
|
+
|
|
8
|
+
dispatch_timeout: Final[ContextVar[float | None]] = ContextVar(
|
|
9
|
+
"_dispatch_timeout", default=None
|
|
10
|
+
)
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
# public
|
|
14
|
+
class AppContext:
|
|
15
|
+
_dispatch_timeout: float | None | UndefinedType
|
|
16
|
+
_dispatch_timeout_token: Token | UndefinedType
|
|
17
|
+
|
|
18
|
+
def __init__(self, *, dispatch_timeout: float | None | UndefinedType = Undefined):
|
|
19
|
+
self._dispatch_timeout = dispatch_timeout
|
|
20
|
+
|
|
21
|
+
def __enter__(self):
|
|
22
|
+
if self._dispatch_timeout is not Undefined:
|
|
23
|
+
self._dispatch_timeout_token = dispatch_timeout.set(self._dispatch_timeout)
|
|
24
|
+
else:
|
|
25
|
+
self._dispatch_timeout_token = Undefined
|
|
26
|
+
|
|
27
|
+
def __exit__(self, *_):
|
|
28
|
+
if self._dispatch_timeout_token is not Undefined:
|
|
29
|
+
dispatch_timeout.reset(self._dispatch_timeout_token)
|
wool/_protobuf/worker.py
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
try:
|
|
2
|
+
from wool._protobuf.worker_pb2 import Ack
|
|
3
|
+
from wool._protobuf.worker_pb2 import Nack
|
|
4
|
+
from wool._protobuf.worker_pb2 import Response
|
|
5
|
+
from wool._protobuf.worker_pb2 import StopRequest
|
|
6
|
+
from wool._protobuf.worker_pb2 import Void
|
|
7
|
+
from wool._protobuf.worker_pb2 import WorkerInfo
|
|
8
|
+
from wool._protobuf.worker_pb2_grpc import WorkerServicer
|
|
9
|
+
from wool._protobuf.worker_pb2_grpc import WorkerStub
|
|
10
|
+
from wool._protobuf.worker_pb2_grpc import add_WorkerServicer_to_server
|
|
11
|
+
except ImportError as e:
|
|
12
|
+
from wool._protobuf.exception import ProtobufImportError
|
|
13
|
+
|
|
14
|
+
raise ProtobufImportError(e) from e
|
|
15
|
+
|
|
16
|
+
__all__ = [
|
|
17
|
+
"Ack",
|
|
18
|
+
"Nack",
|
|
19
|
+
"Response",
|
|
20
|
+
"StopRequest",
|
|
21
|
+
"Void",
|
|
22
|
+
"WorkerInfo",
|
|
23
|
+
"WorkerServicer",
|
|
24
|
+
"WorkerStub",
|
|
25
|
+
"add_WorkerServicer_to_server",
|
|
26
|
+
]
|
wool/_resource_pool.py
ADDED
|
@@ -0,0 +1,376 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
from dataclasses import dataclass
|
|
5
|
+
from typing import Any
|
|
6
|
+
from typing import Awaitable
|
|
7
|
+
from typing import Callable
|
|
8
|
+
from typing import Final
|
|
9
|
+
from typing import Generic
|
|
10
|
+
from typing import TypeVar
|
|
11
|
+
from typing import cast
|
|
12
|
+
|
|
13
|
+
T = TypeVar("T")
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
SENTINEL: Final = object()
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class Resource(Generic[T]):
|
|
20
|
+
"""
|
|
21
|
+
A single-use async context manager for resource acquisition.
|
|
22
|
+
|
|
23
|
+
This class can only be used once as an async context manager. After
|
|
24
|
+
acquisition, it cannot be reacquired, and after release, it cannot be
|
|
25
|
+
released again.
|
|
26
|
+
|
|
27
|
+
:param pool:
|
|
28
|
+
The :class:`ResourcePool` this resource belongs to.
|
|
29
|
+
:param key:
|
|
30
|
+
The cache key for this resource.
|
|
31
|
+
"""
|
|
32
|
+
|
|
33
|
+
def __init__(self, pool: ResourcePool[T], key):
|
|
34
|
+
self._pool = pool
|
|
35
|
+
self._key = key
|
|
36
|
+
self._resource = None
|
|
37
|
+
self._acquired = False
|
|
38
|
+
self._released = False
|
|
39
|
+
|
|
40
|
+
async def __aenter__(self) -> T:
|
|
41
|
+
"""
|
|
42
|
+
Context manager entry - acquire resource.
|
|
43
|
+
|
|
44
|
+
:returns:
|
|
45
|
+
The cached resource object.
|
|
46
|
+
:raises RuntimeError:
|
|
47
|
+
If called on a resource that was previously acquired.
|
|
48
|
+
"""
|
|
49
|
+
if self._acquired:
|
|
50
|
+
raise RuntimeError(
|
|
51
|
+
"Cannot re-acquire a resource that has already been acquired"
|
|
52
|
+
)
|
|
53
|
+
|
|
54
|
+
self._acquired = True
|
|
55
|
+
try:
|
|
56
|
+
self._resource = await self._pool.acquire(self._key)
|
|
57
|
+
return cast(T, self._resource)
|
|
58
|
+
except Exception:
|
|
59
|
+
self._acquired = False
|
|
60
|
+
raise
|
|
61
|
+
|
|
62
|
+
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
|
63
|
+
"""
|
|
64
|
+
Context manager exit - release resource.
|
|
65
|
+
|
|
66
|
+
:param exc_type:
|
|
67
|
+
Exception type if an exception occurred, None otherwise.
|
|
68
|
+
:param exc_val:
|
|
69
|
+
Exception value if an exception occurred, None otherwise.
|
|
70
|
+
:param exc_tb:
|
|
71
|
+
Exception traceback if an exception occurred, None otherwise.
|
|
72
|
+
"""
|
|
73
|
+
await self._release()
|
|
74
|
+
|
|
75
|
+
async def _release(self):
|
|
76
|
+
"""
|
|
77
|
+
Release the resource.
|
|
78
|
+
|
|
79
|
+
:raises RuntimeError:
|
|
80
|
+
If attempting to release a resource that was not acquired or
|
|
81
|
+
already released.
|
|
82
|
+
"""
|
|
83
|
+
if not self._acquired:
|
|
84
|
+
raise RuntimeError("Cannot release a resource that was not acquired")
|
|
85
|
+
if self._released:
|
|
86
|
+
raise RuntimeError(
|
|
87
|
+
"Cannot release a resource that has already been released"
|
|
88
|
+
)
|
|
89
|
+
|
|
90
|
+
self._released = True
|
|
91
|
+
if self._resource:
|
|
92
|
+
await self._pool.release(self._key)
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
class ResourcePool(Generic[T]):
|
|
96
|
+
"""
|
|
97
|
+
An asynchronous reference-counted cache with TTL-based cleanup.
|
|
98
|
+
|
|
99
|
+
Objects are created on-demand via a factory function (sync or async) and
|
|
100
|
+
automatically cleaned up after all references are released and the TTL
|
|
101
|
+
expires.
|
|
102
|
+
|
|
103
|
+
:param factory:
|
|
104
|
+
Function to create new objects (sync or async).
|
|
105
|
+
:param finalizer:
|
|
106
|
+
Optional cleanup function (sync or async).
|
|
107
|
+
:param ttl:
|
|
108
|
+
Time-to-live in seconds after last reference is released.
|
|
109
|
+
"""
|
|
110
|
+
|
|
111
|
+
@dataclass
|
|
112
|
+
class CacheEntry:
|
|
113
|
+
"""
|
|
114
|
+
Internal cache entry tracking an object and its metadata.
|
|
115
|
+
|
|
116
|
+
:param obj:
|
|
117
|
+
The cached object.
|
|
118
|
+
:param reference_count:
|
|
119
|
+
Number of active references to this object.
|
|
120
|
+
:param cleanup:
|
|
121
|
+
Optional cleanup task scheduled when reference count reaches zero.
|
|
122
|
+
"""
|
|
123
|
+
|
|
124
|
+
obj: Any
|
|
125
|
+
reference_count: int
|
|
126
|
+
cleanup: asyncio.Task | None = None
|
|
127
|
+
|
|
128
|
+
@dataclass
|
|
129
|
+
class Stats:
|
|
130
|
+
"""
|
|
131
|
+
Statistics about the current state of the resource pool.
|
|
132
|
+
|
|
133
|
+
:param total_entries:
|
|
134
|
+
Total number of cached entries.
|
|
135
|
+
:param referenced_entries:
|
|
136
|
+
Number of entries currently being referenced (reference_count > 0).
|
|
137
|
+
:param pending_cleanup:
|
|
138
|
+
Number of cleanup tasks currently pending execution.
|
|
139
|
+
"""
|
|
140
|
+
|
|
141
|
+
total_entries: int
|
|
142
|
+
referenced_entries: int
|
|
143
|
+
pending_cleanup: int
|
|
144
|
+
|
|
145
|
+
def __init__(
|
|
146
|
+
self,
|
|
147
|
+
factory: Callable[[Any], T | Awaitable[T]],
|
|
148
|
+
*,
|
|
149
|
+
finalizer: Callable[[T], None | Awaitable[None]] | None = None,
|
|
150
|
+
ttl: float = 0,
|
|
151
|
+
):
|
|
152
|
+
self._factory = factory
|
|
153
|
+
self._finalizer = finalizer
|
|
154
|
+
self._ttl = ttl
|
|
155
|
+
self._cache: dict[Any, ResourcePool.CacheEntry] = {}
|
|
156
|
+
self._lock = asyncio.Lock()
|
|
157
|
+
|
|
158
|
+
async def __aenter__(self):
|
|
159
|
+
"""Async context manager entry.
|
|
160
|
+
|
|
161
|
+
:returns:
|
|
162
|
+
The ResourcePool instance itself.
|
|
163
|
+
"""
|
|
164
|
+
return self
|
|
165
|
+
|
|
166
|
+
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
|
167
|
+
"""Async context manager exit - cleanup all resources.
|
|
168
|
+
|
|
169
|
+
:param exc_type:
|
|
170
|
+
Exception type if an exception occurred, None otherwise.
|
|
171
|
+
:param exc_val:
|
|
172
|
+
Exception value if an exception occurred, None otherwise.
|
|
173
|
+
:param exc_tb:
|
|
174
|
+
Exception traceback if an exception occurred, None otherwise.
|
|
175
|
+
"""
|
|
176
|
+
await self.clear()
|
|
177
|
+
|
|
178
|
+
@property
|
|
179
|
+
def stats(self) -> Stats:
|
|
180
|
+
"""
|
|
181
|
+
Return cache statistics.
|
|
182
|
+
|
|
183
|
+
.. note::
|
|
184
|
+
This is synchronous for convenience, but should only be called
|
|
185
|
+
when not concurrently modifying the cache.
|
|
186
|
+
|
|
187
|
+
:returns:
|
|
188
|
+
:class:`ResourcePool.Stats` containing current statistics.
|
|
189
|
+
"""
|
|
190
|
+
pending_cleanup = sum(
|
|
191
|
+
1 for c in self.pending_cleanup.values() if c is not None and not c.done()
|
|
192
|
+
)
|
|
193
|
+
return self.Stats(
|
|
194
|
+
total_entries=len(self._cache),
|
|
195
|
+
referenced_entries=sum(
|
|
196
|
+
1 for e in self._cache.values() if e.reference_count > 0
|
|
197
|
+
),
|
|
198
|
+
pending_cleanup=pending_cleanup,
|
|
199
|
+
)
|
|
200
|
+
|
|
201
|
+
@property
|
|
202
|
+
def pending_cleanup(self):
|
|
203
|
+
"""Dictionary of cache keys with pending cleanup tasks.
|
|
204
|
+
|
|
205
|
+
:returns:
|
|
206
|
+
Dictionary mapping cache keys to their cleanup tasks.
|
|
207
|
+
"""
|
|
208
|
+
return {
|
|
209
|
+
k: v.cleanup
|
|
210
|
+
for k, v in self._cache.items()
|
|
211
|
+
if v.cleanup is not None and not v.cleanup.done()
|
|
212
|
+
}
|
|
213
|
+
|
|
214
|
+
def get(self, key: Any) -> Resource[T]:
|
|
215
|
+
"""
|
|
216
|
+
Get a resource acquisition that can be awaited or used as context
|
|
217
|
+
manager.
|
|
218
|
+
|
|
219
|
+
:param key:
|
|
220
|
+
The cache key.
|
|
221
|
+
:returns:
|
|
222
|
+
:class:`Resource` that can be awaited or used with 'async with'.
|
|
223
|
+
"""
|
|
224
|
+
return Resource(self, key)
|
|
225
|
+
|
|
226
|
+
async def acquire(self, key: Any) -> T:
|
|
227
|
+
"""
|
|
228
|
+
Internal acquire method - acquires a reference to the cached object.
|
|
229
|
+
|
|
230
|
+
Creates a new object via the factory if not cached. Increments
|
|
231
|
+
reference count and cancels any pending cleanup.
|
|
232
|
+
|
|
233
|
+
:param key:
|
|
234
|
+
The cache key.
|
|
235
|
+
:returns:
|
|
236
|
+
The cached or newly created object.
|
|
237
|
+
"""
|
|
238
|
+
async with self._lock:
|
|
239
|
+
if key in self._cache:
|
|
240
|
+
entry = self._cache[key]
|
|
241
|
+
entry.reference_count += 1
|
|
242
|
+
|
|
243
|
+
# Cancel pending cleanup task if it exists
|
|
244
|
+
if entry.cleanup is not None and not entry.cleanup.done():
|
|
245
|
+
entry.cleanup.cancel()
|
|
246
|
+
try:
|
|
247
|
+
await entry.cleanup
|
|
248
|
+
except asyncio.CancelledError:
|
|
249
|
+
pass
|
|
250
|
+
entry.cleanup = None
|
|
251
|
+
|
|
252
|
+
return entry.obj
|
|
253
|
+
else:
|
|
254
|
+
# Cache miss - create new object
|
|
255
|
+
obj = await self._await(self._factory, key)
|
|
256
|
+
self._cache[key] = self.CacheEntry(obj=obj, reference_count=1)
|
|
257
|
+
return obj
|
|
258
|
+
|
|
259
|
+
async def release(self, key: Any) -> None:
|
|
260
|
+
"""
|
|
261
|
+
Release a reference to the cached object.
|
|
262
|
+
|
|
263
|
+
Decrements reference count. If count reaches 0, schedules cleanup
|
|
264
|
+
after TTL expires (if TTL > 0).
|
|
265
|
+
|
|
266
|
+
:param key:
|
|
267
|
+
The cache key.
|
|
268
|
+
:raises KeyError:
|
|
269
|
+
If key not in cache.
|
|
270
|
+
"""
|
|
271
|
+
async with self._lock:
|
|
272
|
+
if key not in self._cache:
|
|
273
|
+
raise KeyError(f"Key '{key}' not found in cache")
|
|
274
|
+
entry = self._cache[key]
|
|
275
|
+
|
|
276
|
+
if entry.reference_count <= 0:
|
|
277
|
+
raise ValueError(f"Reference count for key '{key}' is already 0")
|
|
278
|
+
|
|
279
|
+
entry.reference_count -= 1
|
|
280
|
+
|
|
281
|
+
if entry.reference_count <= 0:
|
|
282
|
+
if self._ttl > 0:
|
|
283
|
+
# Schedule cleanup after TTL
|
|
284
|
+
entry.cleanup = asyncio.create_task(self._schedule_cleanup(key))
|
|
285
|
+
else:
|
|
286
|
+
# Immediate cleanup
|
|
287
|
+
await self._cleanup(key)
|
|
288
|
+
|
|
289
|
+
async def clear(self, key=SENTINEL) -> None:
|
|
290
|
+
"""Clear cache entries and cancel pending cleanups.
|
|
291
|
+
|
|
292
|
+
:param key:
|
|
293
|
+
Specific key to clear, or SENTINEL to clear all entries.
|
|
294
|
+
"""
|
|
295
|
+
async with self._lock:
|
|
296
|
+
# Clean up all entries
|
|
297
|
+
if key is SENTINEL:
|
|
298
|
+
keys = list(self._cache.keys())
|
|
299
|
+
else:
|
|
300
|
+
keys = [key]
|
|
301
|
+
for key in keys:
|
|
302
|
+
await self._cleanup(key)
|
|
303
|
+
|
|
304
|
+
async def _schedule_cleanup(self, key: Any) -> None:
|
|
305
|
+
"""
|
|
306
|
+
Schedule cleanup after TTL delay.
|
|
307
|
+
|
|
308
|
+
Only cleans up if the reference count is still 0 when TTL expires.
|
|
309
|
+
|
|
310
|
+
:param key:
|
|
311
|
+
The cache key to schedule cleanup for.
|
|
312
|
+
"""
|
|
313
|
+
try:
|
|
314
|
+
await asyncio.sleep(self._ttl)
|
|
315
|
+
|
|
316
|
+
async with self._lock:
|
|
317
|
+
# Double-check conditions - reference might have been re-acquired
|
|
318
|
+
if key in self._cache:
|
|
319
|
+
entry = self._cache[key]
|
|
320
|
+
if entry.reference_count == 0:
|
|
321
|
+
await self._cleanup(key)
|
|
322
|
+
|
|
323
|
+
except asyncio.CancelledError:
|
|
324
|
+
# Cleanup was cancelled due to new reference - this is expected
|
|
325
|
+
pass
|
|
326
|
+
|
|
327
|
+
async def _cleanup(self, key: Any) -> None:
|
|
328
|
+
"""
|
|
329
|
+
Remove entry from cache and call finalizer.
|
|
330
|
+
|
|
331
|
+
.. warning::
|
|
332
|
+
Must be called while holding the lock.
|
|
333
|
+
|
|
334
|
+
:param key:
|
|
335
|
+
The cache key to cleanup.
|
|
336
|
+
"""
|
|
337
|
+
entry = self._cache[key]
|
|
338
|
+
try:
|
|
339
|
+
# Cancel cleanup task if running
|
|
340
|
+
if entry.cleanup is not None and not entry.cleanup.done():
|
|
341
|
+
entry.cleanup.cancel()
|
|
342
|
+
try:
|
|
343
|
+
await entry.cleanup
|
|
344
|
+
except asyncio.CancelledError:
|
|
345
|
+
pass
|
|
346
|
+
finally:
|
|
347
|
+
# Call finalizer
|
|
348
|
+
if self._finalizer:
|
|
349
|
+
try:
|
|
350
|
+
await self._await(self._finalizer, entry.obj)
|
|
351
|
+
except Exception:
|
|
352
|
+
pass
|
|
353
|
+
del self._cache[key]
|
|
354
|
+
|
|
355
|
+
async def _await(self, func: Callable, *args) -> Any:
|
|
356
|
+
"""
|
|
357
|
+
Call a function that might be sync or async.
|
|
358
|
+
|
|
359
|
+
If the function is a coroutine function, await it. Otherwise, call it
|
|
360
|
+
synchronously. If the result is a coroutine, await that as well.
|
|
361
|
+
|
|
362
|
+
:param func:
|
|
363
|
+
The function to call.
|
|
364
|
+
:param args:
|
|
365
|
+
Arguments to pass to the function.
|
|
366
|
+
:returns:
|
|
367
|
+
The result of the function call.
|
|
368
|
+
"""
|
|
369
|
+
if asyncio.iscoroutinefunction(func):
|
|
370
|
+
return await func(*args)
|
|
371
|
+
else:
|
|
372
|
+
result = func(*args)
|
|
373
|
+
# Check if the result is a coroutine and await it if so
|
|
374
|
+
if asyncio.iscoroutine(result):
|
|
375
|
+
return await result
|
|
376
|
+
return result
|
wool/_typing.py
ADDED