thds.core 1.42.20250715194419__py3-none-any.whl → 1.43.20250718170951__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of thds.core might be problematic. Click here for more details.
- thds/core/refcount.py +99 -0
- {thds_core-1.42.20250715194419.dist-info → thds_core-1.43.20250718170951.dist-info}/METADATA +1 -1
- {thds_core-1.42.20250715194419.dist-info → thds_core-1.43.20250718170951.dist-info}/RECORD +6 -5
- {thds_core-1.42.20250715194419.dist-info → thds_core-1.43.20250718170951.dist-info}/WHEEL +0 -0
- {thds_core-1.42.20250715194419.dist-info → thds_core-1.43.20250718170951.dist-info}/entry_points.txt +0 -0
- {thds_core-1.42.20250715194419.dist-info → thds_core-1.43.20250718170951.dist-info}/top_level.txt +0 -0
thds/core/refcount.py
ADDED
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
"""There are times when what you want is a lazy-ish resource, but you don't want it to live forever - just for the duration of the current stack's usage.
|
|
2
|
+
At the same time, if another thread or stack is currently using the resource, you want to reuse it.
|
|
3
|
+
|
|
4
|
+
A good example of using this might be a ThreadPoolExecutor where multiple other threads might be doing similar work concurrently, and it would help if they could all share the same pool:
|
|
5
|
+
|
|
6
|
+
```
|
|
7
|
+
from concurrent.futures import ThreadPoolExecutor
|
|
8
|
+
from thds.core import refcount
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
shared_thread_pool = refcount.Resource(lambda: ThreadPoolExecutor())
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def do_work(thunks):
|
|
15
|
+
with shared_thread_pool.get() as thread_pool:
|
|
16
|
+
for res in parallel.yield_results(thunks, executor_cm=thread_pool):
|
|
17
|
+
print(res)
|
|
18
|
+
...
|
|
19
|
+
```
|
|
20
|
+
"""
|
|
21
|
+
|
|
22
|
+
import threading
|
|
23
|
+
import typing as ty
|
|
24
|
+
from contextlib import contextmanager
|
|
25
|
+
|
|
26
|
+
from . import log
|
|
27
|
+
|
|
28
|
+
R = ty.TypeVar("R")
|
|
29
|
+
logger = log.getLogger(__name__)
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class _ContextProxy(ty.Generic[R]):
|
|
33
|
+
"""A proxy to wrap a resource and neuter its context management methods."""
|
|
34
|
+
|
|
35
|
+
def __init__(self, resource: R):
|
|
36
|
+
self._resource = resource
|
|
37
|
+
|
|
38
|
+
def __getattr__(self, name: str) -> ty.Any:
|
|
39
|
+
return getattr(self._resource, name)
|
|
40
|
+
|
|
41
|
+
def __enter__(self) -> R:
|
|
42
|
+
# The user might still do `with proxy: ...`. We allow it,
|
|
43
|
+
# but it does nothing and just returns the underlying resource.
|
|
44
|
+
return self._resource
|
|
45
|
+
|
|
46
|
+
def __exit__(self, *args: ty.Any) -> None:
|
|
47
|
+
# This is a no-op; the real cleanup is handled by _RefCountResource
|
|
48
|
+
pass
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
class _RefCountResource(ty.Generic[R]):
|
|
52
|
+
def __init__(self, factory: ty.Callable[[], ty.ContextManager[R]]) -> None:
|
|
53
|
+
self._factory = factory
|
|
54
|
+
self._rcm__exit__: ty.Optional[ty.Callable[[ty.Any, ty.Any, ty.Any], ty.Optional[bool]]] = None
|
|
55
|
+
self._resource: ty.Optional[R] = None
|
|
56
|
+
self._ref_count: int = 0
|
|
57
|
+
self._lock = threading.RLock()
|
|
58
|
+
|
|
59
|
+
@contextmanager
|
|
60
|
+
def get(self) -> ty.Iterator[R]:
|
|
61
|
+
with self._lock:
|
|
62
|
+
assert self._ref_count >= 0, "Reference count should not be negative prior to incrementing"
|
|
63
|
+
if self._ref_count == 0:
|
|
64
|
+
assert (
|
|
65
|
+
self._rcm__exit__ is None
|
|
66
|
+
), "Resource CM __exit__ should be None when ref count is zero"
|
|
67
|
+
assert self._resource is None, "Resource should be None when ref count is zero"
|
|
68
|
+
resource_cm = self._factory()
|
|
69
|
+
resource = resource_cm.__enter__()
|
|
70
|
+
self._rcm__exit__ = resource_cm.__exit__
|
|
71
|
+
if id(resource) == id(resource_cm):
|
|
72
|
+
logger.info("Patching self-managing resource to avoid double exit: %s", resource)
|
|
73
|
+
# this is one of those context managers that returns itself
|
|
74
|
+
# since we manage this resource, we need to prevent others from trying to enter or exit it.
|
|
75
|
+
resource = _ContextProxy(resource) # type: ignore[assignment]
|
|
76
|
+
self._resource = resource
|
|
77
|
+
self._ref_count += 1
|
|
78
|
+
assert self._resource is not None, "Resource should not be None after incrementing ref count"
|
|
79
|
+
resource = self._resource
|
|
80
|
+
try:
|
|
81
|
+
yield resource
|
|
82
|
+
|
|
83
|
+
finally:
|
|
84
|
+
with self._lock:
|
|
85
|
+
self._ref_count -= 1
|
|
86
|
+
assert self._ref_count >= 0, "Reference count should not be negative after decrementing"
|
|
87
|
+
if self._ref_count == 0:
|
|
88
|
+
assert (
|
|
89
|
+
self._rcm__exit__ is not None
|
|
90
|
+
), "Resource CM __exit__ should not be None when ref count is zero"
|
|
91
|
+
assert (
|
|
92
|
+
self._resource is not None
|
|
93
|
+
), "Resource should not be None when ref count is zero"
|
|
94
|
+
self._rcm__exit__(None, None, None)
|
|
95
|
+
self._resource = None
|
|
96
|
+
self._rcm__exit__ = None
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
Resource = _RefCountResource # probably preferable to use this name
|
|
@@ -35,6 +35,7 @@ thds/core/progress.py,sha256=4YGbxliDl1i-k-88w4s86uy1E69eQ6xJySGPSkpH1QM,3358
|
|
|
35
35
|
thds/core/project_root.py,sha256=K18U3MLthZnzmdrWmKKtHLd6iu7am9b2vNAThqknpfo,891
|
|
36
36
|
thds/core/protocols.py,sha256=4na2EeWUDWfLn5-SxfMmKegDSndJ5z-vwMhDavhCpEM,409
|
|
37
37
|
thds/core/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
38
|
+
thds/core/refcount.py,sha256=KWhF5vULrCUCJPoqh3X58JkbfYo8HmYpUrmCHjrUx64,4020
|
|
38
39
|
thds/core/scaling.py,sha256=f7CtdgK0sN6nroTq5hLAkG8xwbWhbCZUULstSKjoxO0,1615
|
|
39
40
|
thds/core/scope.py,sha256=9RWWCFRqsgjTyH6rzRm_WnO69N_sEBRaykarc2PAnBY,10834
|
|
40
41
|
thds/core/source_serde.py,sha256=X4c7LiT3VidejqtTel9YB6dWGB3x-ct39KF9E50Nbx4,139
|
|
@@ -71,8 +72,8 @@ thds/core/sqlite/structured.py,sha256=SvZ67KcVcVdmpR52JSd52vMTW2ALUXmlHEeD-VrzWV
|
|
|
71
72
|
thds/core/sqlite/types.py,sha256=oUkfoKRYNGDPZRk29s09rc9ha3SCk2SKr_K6WKebBFs,1308
|
|
72
73
|
thds/core/sqlite/upsert.py,sha256=BmKK6fsGVedt43iY-Lp7dnAu8aJ1e9CYlPVEQR2pMj4,5827
|
|
73
74
|
thds/core/sqlite/write.py,sha256=z0219vDkQDCnsV0WLvsj94keItr7H4j7Y_evbcoBrWU,3458
|
|
74
|
-
thds_core-1.
|
|
75
|
-
thds_core-1.
|
|
76
|
-
thds_core-1.
|
|
77
|
-
thds_core-1.
|
|
78
|
-
thds_core-1.
|
|
75
|
+
thds_core-1.43.20250718170951.dist-info/METADATA,sha256=UUrXBliBFNet_IP4KZgrALXRSYdWWCjACqMcYcpmiZo,2216
|
|
76
|
+
thds_core-1.43.20250718170951.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
77
|
+
thds_core-1.43.20250718170951.dist-info/entry_points.txt,sha256=bOCOVhKZv7azF3FvaWX6uxE6yrjK6FcjqhtxXvLiFY8,161
|
|
78
|
+
thds_core-1.43.20250718170951.dist-info/top_level.txt,sha256=LTZaE5SkWJwv9bwOlMbIhiS-JWQEEIcjVYnJrt-CriY,5
|
|
79
|
+
thds_core-1.43.20250718170951.dist-info/RECORD,,
|
|
File without changes
|
{thds_core-1.42.20250715194419.dist-info → thds_core-1.43.20250718170951.dist-info}/entry_points.txt
RENAMED
|
File without changes
|
{thds_core-1.42.20250715194419.dist-info → thds_core-1.43.20250718170951.dist-info}/top_level.txt
RENAMED
|
File without changes
|