gisolate 0.2.15__tar.gz → 0.2.16.dev0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {gisolate-0.2.15 → gisolate-0.2.16.dev0}/PKG-INFO +61 -1
- {gisolate-0.2.15 → gisolate-0.2.16.dev0}/README.md +60 -0
- {gisolate-0.2.15 → gisolate-0.2.16.dev0}/pyproject.toml +1 -1
- {gisolate-0.2.15 → gisolate-0.2.16.dev0}/src/gisolate/__init__.py +5 -1
- {gisolate-0.2.15 → gisolate-0.2.16.dev0}/src/gisolate/_internal.py +16 -2
- gisolate-0.2.16.dev0/src/gisolate/pubsub.py +425 -0
- gisolate-0.2.16.dev0/tests/test_pubsub.py +644 -0
- {gisolate-0.2.15 → gisolate-0.2.16.dev0}/.code-review-graph/.gitignore +0 -0
- {gisolate-0.2.15 → gisolate-0.2.16.dev0}/.code-review-graph/graph.db +0 -0
- {gisolate-0.2.15 → gisolate-0.2.16.dev0}/.envrc +0 -0
- {gisolate-0.2.15 → gisolate-0.2.16.dev0}/.gitignore +0 -0
- {gisolate-0.2.15 → gisolate-0.2.16.dev0}/CLAUDE.md +0 -0
- {gisolate-0.2.15 → gisolate-0.2.16.dev0}/LICENSE +0 -0
- {gisolate-0.2.15 → gisolate-0.2.16.dev0}/lefthook.yml +0 -0
- {gisolate-0.2.15 → gisolate-0.2.16.dev0}/src/gisolate/_workers.py +0 -0
- {gisolate-0.2.15 → gisolate-0.2.16.dev0}/src/gisolate/bridge.py +0 -0
- {gisolate-0.2.15 → gisolate-0.2.16.dev0}/src/gisolate/hub.py +0 -0
- {gisolate-0.2.15 → gisolate-0.2.16.dev0}/src/gisolate/local.py +0 -0
- {gisolate-0.2.15 → gisolate-0.2.16.dev0}/src/gisolate/proxy.py +0 -0
- {gisolate-0.2.15 → gisolate-0.2.16.dev0}/src/gisolate/subprocess.py +0 -0
- {gisolate-0.2.15 → gisolate-0.2.16.dev0}/tests/__init__.py +0 -0
- {gisolate-0.2.15 → gisolate-0.2.16.dev0}/tests/bdd/__init__.py +0 -0
- {gisolate-0.2.15 → gisolate-0.2.16.dev0}/tests/bdd/features/process_proxy.feature +0 -0
- {gisolate-0.2.15 → gisolate-0.2.16.dev0}/tests/bdd/features/serialization.feature +0 -0
- {gisolate-0.2.15 → gisolate-0.2.16.dev0}/tests/bdd/features/subprocess_run.feature +0 -0
- {gisolate-0.2.15 → gisolate-0.2.16.dev0}/tests/bdd/features/thread_local.feature +0 -0
- {gisolate-0.2.15 → gisolate-0.2.16.dev0}/tests/bdd/test_proxy.py +0 -0
- {gisolate-0.2.15 → gisolate-0.2.16.dev0}/tests/bdd/test_serialization.py +0 -0
- {gisolate-0.2.15 → gisolate-0.2.16.dev0}/tests/bdd/test_subprocess.py +0 -0
- {gisolate-0.2.15 → gisolate-0.2.16.dev0}/tests/bdd/test_thread_local.py +0 -0
- {gisolate-0.2.15 → gisolate-0.2.16.dev0}/tests/conftest.py +0 -0
- {gisolate-0.2.15 → gisolate-0.2.16.dev0}/tests/helpers.py +0 -0
- {gisolate-0.2.15 → gisolate-0.2.16.dev0}/tests/test_bridge.py +0 -0
- {gisolate-0.2.15 → gisolate-0.2.16.dev0}/tests/test_hub.py +0 -0
- {gisolate-0.2.15 → gisolate-0.2.16.dev0}/tests/test_internal.py +0 -0
- {gisolate-0.2.15 → gisolate-0.2.16.dev0}/tests/test_local.py +0 -0
- {gisolate-0.2.15 → gisolate-0.2.16.dev0}/tests/test_proxy.py +0 -0
- {gisolate-0.2.15 → gisolate-0.2.16.dev0}/tests/test_subprocess.py +0 -0
- {gisolate-0.2.15 → gisolate-0.2.16.dev0}/tests/test_workers.py +0 -0
- {gisolate-0.2.15 → gisolate-0.2.16.dev0}/uv.lock +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: gisolate
|
|
3
|
-
Version: 0.2.
|
|
3
|
+
Version: 0.2.16.dev0
|
|
4
4
|
Summary: Process isolation for gevent applications — run any object in a clean subprocess, call methods transparently via ZMQ IPC.
|
|
5
5
|
Project-URL: Repository, https://github.com/wy-z/gisolate
|
|
6
6
|
License-Expression: MIT
|
|
@@ -105,6 +105,47 @@ asyncio.run(main())
|
|
|
105
105
|
server.close()
|
|
106
106
|
```
|
|
107
107
|
|
|
108
|
+
### ProcessPublisher / ProcessSubscriber — one-way fan-out
|
|
109
|
+
|
|
110
|
+
ZMQ PUB/SUB for one-way data streaming (snapshots, signals, heartbeats). Use this when message loss is acceptable; use `ProcessBridge` when you need request/response with delivery guarantees.
|
|
111
|
+
|
|
112
|
+
```python
|
|
113
|
+
# Producer (gevent side)
|
|
114
|
+
from gisolate import ProcessPublisher
|
|
115
|
+
|
|
116
|
+
pub = ProcessPublisher("ipc:///tmp/stream.sock").start()
|
|
117
|
+
pub.publish("v1.snapshot.AAPL", {"price": 150.0})
|
|
118
|
+
pub.publish("v1.heartbeat.gevent", {"ts_ns": 1234567890})
|
|
119
|
+
pub.close()
|
|
120
|
+
|
|
121
|
+
# Consumer (asyncio side)
|
|
122
|
+
import asyncio
|
|
123
|
+
from gisolate import ProcessSubscriber
|
|
124
|
+
|
|
125
|
+
async def main():
|
|
126
|
+
sub = ProcessSubscriber("ipc:///tmp/stream.sock")
|
|
127
|
+
|
|
128
|
+
async def on_snapshot(topic, payload):
|
|
129
|
+
print(topic, payload)
|
|
130
|
+
|
|
131
|
+
async def on_heartbeat(topic, payload):
|
|
132
|
+
print("heartbeat", payload)
|
|
133
|
+
|
|
134
|
+
sub.subscribe("v1.snapshot.", on_snapshot)
|
|
135
|
+
sub.subscribe("v1.heartbeat.", on_heartbeat)
|
|
136
|
+
sub.start()
|
|
137
|
+
await asyncio.sleep(10)
|
|
138
|
+
await sub.close()
|
|
139
|
+
|
|
140
|
+
asyncio.run(main())
|
|
141
|
+
```
|
|
142
|
+
|
|
143
|
+
Notes:
|
|
144
|
+
- **Topic prefix matching** — `sub.subscribe("v1.snapshot.", h)` receives every topic starting with that prefix.
|
|
145
|
+
- **Multiple handlers per prefix** — invoked concurrently with `asyncio.gather`. Exceptions in one handler do not kill the reader task.
|
|
146
|
+
- **Lossy by design** — `publish` is non-blocking; messages are dropped when the send queue is full (slow subscriber). Set `sndhwm=` to tune.
|
|
147
|
+
- **Pluggable serializer** — defaults to `SmartPickle`. Pass any object implementing the `Serializer` protocol (`dumps`/`loads`) to use msgpack, JSON, etc.
|
|
148
|
+
|
|
108
149
|
### ThreadLocalProxy — per-thread instances
|
|
109
150
|
|
|
110
151
|
Thread-local proxy using unpatched `threading.local` for true isolation in `gevent.threadpool`:
|
|
@@ -153,6 +194,25 @@ Run a function in an isolated subprocess. Blocks with gevent-safe polling.
|
|
|
153
194
|
- **`await bridge.call(func, *args, timeout=60, **kwargs)`** — async RPC call (client mode)
|
|
154
195
|
- **`bridge.close()`** — cleanup resources
|
|
155
196
|
|
|
197
|
+
### `ProcessPublisher(address, *, serializer=SmartPickle, sndhwm=1000)`
|
|
198
|
+
|
|
199
|
+
- **`pub.start()`** — bind the PUB socket (idempotent, returns self)
|
|
200
|
+
- **`pub.publish(topic, payload)`** — non-blocking publish; drops on slow consumers
|
|
201
|
+
- **`pub.close()`** — cleanup (idempotent)
|
|
202
|
+
- Supports context manager (`with` statement)
|
|
203
|
+
|
|
204
|
+
### `ProcessSubscriber(address, *, serializer=SmartPickle)`
|
|
205
|
+
|
|
206
|
+
- **`sub.subscribe(topic_prefix, handler)`** — register an async handler for a topic prefix
|
|
207
|
+
- **`sub.unsubscribe(topic_prefix, handler=None)`** — remove a handler or all handlers for a prefix
|
|
208
|
+
- **`sub.start()`** — connect and spawn the reader task (idempotent, returns self)
|
|
209
|
+
- **`await sub.close()`** — cancel reader and cleanup (idempotent)
|
|
210
|
+
- Supports async context manager (`async with` statement)
|
|
211
|
+
|
|
212
|
+
### `Serializer` (Protocol)
|
|
213
|
+
|
|
214
|
+
Anything with `dumps(obj) -> bytes` and `loads(bytes) -> obj` static methods can be used as a serializer for `ProcessPublisher` / `ProcessSubscriber`. Default is `SmartPickle` (pickle, falling back to dill).
|
|
215
|
+
|
|
156
216
|
### `ThreadLocalProxy(factory)`
|
|
157
217
|
|
|
158
218
|
Transparent proxy delegating attribute access to a per-thread instance.
|
|
@@ -84,6 +84,47 @@ asyncio.run(main())
|
|
|
84
84
|
server.close()
|
|
85
85
|
```
|
|
86
86
|
|
|
87
|
+
### ProcessPublisher / ProcessSubscriber — one-way fan-out
|
|
88
|
+
|
|
89
|
+
ZMQ PUB/SUB for one-way data streaming (snapshots, signals, heartbeats). Use this when message loss is acceptable; use `ProcessBridge` when you need request/response with delivery guarantees.
|
|
90
|
+
|
|
91
|
+
```python
|
|
92
|
+
# Producer (gevent side)
|
|
93
|
+
from gisolate import ProcessPublisher
|
|
94
|
+
|
|
95
|
+
pub = ProcessPublisher("ipc:///tmp/stream.sock").start()
|
|
96
|
+
pub.publish("v1.snapshot.AAPL", {"price": 150.0})
|
|
97
|
+
pub.publish("v1.heartbeat.gevent", {"ts_ns": 1234567890})
|
|
98
|
+
pub.close()
|
|
99
|
+
|
|
100
|
+
# Consumer (asyncio side)
|
|
101
|
+
import asyncio
|
|
102
|
+
from gisolate import ProcessSubscriber
|
|
103
|
+
|
|
104
|
+
async def main():
|
|
105
|
+
sub = ProcessSubscriber("ipc:///tmp/stream.sock")
|
|
106
|
+
|
|
107
|
+
async def on_snapshot(topic, payload):
|
|
108
|
+
print(topic, payload)
|
|
109
|
+
|
|
110
|
+
async def on_heartbeat(topic, payload):
|
|
111
|
+
print("heartbeat", payload)
|
|
112
|
+
|
|
113
|
+
sub.subscribe("v1.snapshot.", on_snapshot)
|
|
114
|
+
sub.subscribe("v1.heartbeat.", on_heartbeat)
|
|
115
|
+
sub.start()
|
|
116
|
+
await asyncio.sleep(10)
|
|
117
|
+
await sub.close()
|
|
118
|
+
|
|
119
|
+
asyncio.run(main())
|
|
120
|
+
```
|
|
121
|
+
|
|
122
|
+
Notes:
|
|
123
|
+
- **Topic prefix matching** — `sub.subscribe("v1.snapshot.", h)` receives every topic starting with that prefix.
|
|
124
|
+
- **Multiple handlers per prefix** — invoked concurrently with `asyncio.gather`. Exceptions in one handler do not kill the reader task.
|
|
125
|
+
- **Lossy by design** — `publish` is non-blocking; messages are dropped when the send queue is full (slow subscriber). Set `sndhwm=` to tune.
|
|
126
|
+
- **Pluggable serializer** — defaults to `SmartPickle`. Pass any object implementing the `Serializer` protocol (`dumps`/`loads`) to use msgpack, JSON, etc.
|
|
127
|
+
|
|
87
128
|
### ThreadLocalProxy — per-thread instances
|
|
88
129
|
|
|
89
130
|
Thread-local proxy using unpatched `threading.local` for true isolation in `gevent.threadpool`:
|
|
@@ -132,6 +173,25 @@ Run a function in an isolated subprocess. Blocks with gevent-safe polling.
|
|
|
132
173
|
- **`await bridge.call(func, *args, timeout=60, **kwargs)`** — async RPC call (client mode)
|
|
133
174
|
- **`bridge.close()`** — cleanup resources
|
|
134
175
|
|
|
176
|
+
### `ProcessPublisher(address, *, serializer=SmartPickle, sndhwm=1000)`
|
|
177
|
+
|
|
178
|
+
- **`pub.start()`** — bind the PUB socket (idempotent, returns self)
|
|
179
|
+
- **`pub.publish(topic, payload)`** — non-blocking publish; drops on slow consumers
|
|
180
|
+
- **`pub.close()`** — cleanup (idempotent)
|
|
181
|
+
- Supports context manager (`with` statement)
|
|
182
|
+
|
|
183
|
+
### `ProcessSubscriber(address, *, serializer=SmartPickle)`
|
|
184
|
+
|
|
185
|
+
- **`sub.subscribe(topic_prefix, handler)`** — register an async handler for a topic prefix
|
|
186
|
+
- **`sub.unsubscribe(topic_prefix, handler=None)`** — remove a handler or all handlers for a prefix
|
|
187
|
+
- **`sub.start()`** — connect and spawn the reader task (idempotent, returns self)
|
|
188
|
+
- **`await sub.close()`** — cancel reader and cleanup (idempotent)
|
|
189
|
+
- Supports async context manager (`async with` statement)
|
|
190
|
+
|
|
191
|
+
### `Serializer` (Protocol)
|
|
192
|
+
|
|
193
|
+
Anything with `dumps(obj) -> bytes` and `loads(bytes) -> obj` static methods can be used as a serializer for `ProcessPublisher` / `ProcessSubscriber`. Default is `SmartPickle` (pickle, falling back to dill).
|
|
194
|
+
|
|
135
195
|
### `ThreadLocalProxy(factory)`
|
|
136
196
|
|
|
137
197
|
Transparent proxy delegating attribute access to a per-thread instance.
|
|
@@ -4,11 +4,12 @@ Run any object in a clean subprocess, call methods transparently via ZMQ IPC.
|
|
|
4
4
|
Isolates libraries incompatible with gevent monkey-patching.
|
|
5
5
|
"""
|
|
6
6
|
|
|
7
|
-
from ._internal import ProcessError, RemoteError
|
|
7
|
+
from ._internal import ProcessError, RemoteError, Serializer
|
|
8
8
|
from .bridge import ProcessBridge
|
|
9
9
|
from .hub import ensure_hub_started, shutdown as shutdown_hub, spawn_on_main_hub
|
|
10
10
|
from .local import ThreadLocalProxy
|
|
11
11
|
from .proxy import ProcessProxy, get_default_mp_context, set_default_mp_context
|
|
12
|
+
from .pubsub import ProcessPublisher, ProcessSubscriber
|
|
12
13
|
from .subprocess import run_in_subprocess
|
|
13
14
|
|
|
14
15
|
# Pre-initialize threadpoolctl on main thread to cache library info.
|
|
@@ -24,7 +25,10 @@ __all__ = [
|
|
|
24
25
|
"ProcessBridge",
|
|
25
26
|
"ProcessError",
|
|
26
27
|
"ProcessProxy",
|
|
28
|
+
"ProcessPublisher",
|
|
29
|
+
"ProcessSubscriber",
|
|
27
30
|
"RemoteError",
|
|
31
|
+
"Serializer",
|
|
28
32
|
"ThreadLocalProxy",
|
|
29
33
|
"ensure_hub_started",
|
|
30
34
|
"get_default_mp_context",
|
|
@@ -4,7 +4,7 @@ import contextlib
|
|
|
4
4
|
import io
|
|
5
5
|
import logging
|
|
6
6
|
import pickle
|
|
7
|
-
from typing import Any
|
|
7
|
+
from typing import Any, Protocol, runtime_checkable
|
|
8
8
|
|
|
9
9
|
import dill
|
|
10
10
|
import gevent.monkey
|
|
@@ -44,8 +44,22 @@ class RemoteError(RuntimeError):
|
|
|
44
44
|
# ---------------------------------------------------------------------------
|
|
45
45
|
|
|
46
46
|
|
|
47
|
+
@runtime_checkable
|
|
48
|
+
class Serializer(Protocol):
|
|
49
|
+
"""Pluggable serializer protocol: ``dumps(obj) -> bytes`` / ``loads(bytes) -> obj``."""
|
|
50
|
+
|
|
51
|
+
@staticmethod
|
|
52
|
+
def dumps(obj: Any) -> bytes: ...
|
|
53
|
+
|
|
54
|
+
@staticmethod
|
|
55
|
+
def loads(data: bytes) -> Any: ...
|
|
56
|
+
|
|
57
|
+
|
|
47
58
|
class SmartPickle:
|
|
48
|
-
"""Serializer preferring pickle, falling back to dill. Learns from failures.
|
|
59
|
+
"""Serializer preferring pickle, falling back to dill. Learns from failures.
|
|
60
|
+
|
|
61
|
+
Implements the :class:`Serializer` protocol.
|
|
62
|
+
"""
|
|
49
63
|
|
|
50
64
|
_PICKLE = b"P"
|
|
51
65
|
_DILL = b"D"
|
|
@@ -0,0 +1,425 @@
|
|
|
1
|
+
"""ProcessPublisher / ProcessSubscriber: ZMQ PUB/SUB one-way fan-out.
|
|
2
|
+
|
|
3
|
+
Use ``ProcessBridge`` when you need request/response RPC.
|
|
4
|
+
Use these when you need one-way fan-out (snapshots, signals, heartbeats) where
|
|
5
|
+
message loss is acceptable — PUB drops messages for slow subscribers.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import asyncio
|
|
9
|
+
import contextlib
|
|
10
|
+
import logging
|
|
11
|
+
import os
|
|
12
|
+
from typing import Any, Awaitable, Callable
|
|
13
|
+
|
|
14
|
+
from ._internal import Serializer, SmartPickle
|
|
15
|
+
|
|
16
|
+
log = logging.getLogger(__name__)
|
|
17
|
+
|
|
18
|
+
_TOPIC_ENCODING = "utf-8"
|
|
19
|
+
|
|
20
|
+
# Default high-water mark for PUB send queue. ZMQ drops when exceeded.
|
|
21
|
+
_DEFAULT_SNDHWM = 1000
|
|
22
|
+
|
|
23
|
+
Handler = Callable[[str, Any], Awaitable[None]]
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def _encode_topic(topic: str) -> bytes:
|
|
27
|
+
return topic.encode(_TOPIC_ENCODING)
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def _decode_topic(data: bytes) -> str:
|
|
31
|
+
return data.decode(_TOPIC_ENCODING, errors="replace")
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def _safe_close(sock: Any, ctx: Any) -> None:
|
|
35
|
+
"""Best-effort tear down of a ZMQ socket + context. Swallows errors."""
|
|
36
|
+
with contextlib.suppress(Exception):
|
|
37
|
+
if sock is not None:
|
|
38
|
+
sock.close(linger=0)
|
|
39
|
+
with contextlib.suppress(Exception):
|
|
40
|
+
if ctx is not None:
|
|
41
|
+
ctx.term()
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
# ---------------------------------------------------------------------------
|
|
45
|
+
# Publisher (gevent side)
|
|
46
|
+
# ---------------------------------------------------------------------------
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
class ProcessPublisher:
|
|
50
|
+
"""ZMQ PUB socket for one-way fan-out from a gevent producer.
|
|
51
|
+
|
|
52
|
+
Topic-based dispatch with a pluggable serializer (default
|
|
53
|
+
:class:`SmartPickle`). ``publish`` is non-blocking — slow subscribers
|
|
54
|
+
cause messages to be dropped once the high-water mark is hit, matching
|
|
55
|
+
standard PUB semantics.
|
|
56
|
+
|
|
57
|
+
Args:
|
|
58
|
+
address: IPC/TCP address (e.g., ``"ipc:///tmp/stream.sock"``).
|
|
59
|
+
serializer: Optional serializer; defaults to :class:`SmartPickle`.
|
|
60
|
+
sndhwm: Send high-water mark. Beyond this, messages are dropped.
|
|
61
|
+
|
|
62
|
+
Example::
|
|
63
|
+
|
|
64
|
+
pub = ProcessPublisher("ipc:///tmp/stream.sock").start()
|
|
65
|
+
pub.publish("v1.snapshot.AAPL", {"price": 150.0})
|
|
66
|
+
pub.close()
|
|
67
|
+
"""
|
|
68
|
+
|
|
69
|
+
def __init__(
|
|
70
|
+
self,
|
|
71
|
+
address: str,
|
|
72
|
+
*,
|
|
73
|
+
serializer: Serializer = SmartPickle,
|
|
74
|
+
sndhwm: int = _DEFAULT_SNDHWM,
|
|
75
|
+
):
|
|
76
|
+
self._addr = address
|
|
77
|
+
self._serializer = serializer
|
|
78
|
+
self._sndhwm = sndhwm
|
|
79
|
+
self._started = False
|
|
80
|
+
self._sock: Any = None
|
|
81
|
+
self._ctx: Any = None
|
|
82
|
+
self._send_lock: Any = None
|
|
83
|
+
|
|
84
|
+
def __del__(self):
|
|
85
|
+
with contextlib.suppress(Exception):
|
|
86
|
+
if getattr(self, "_started", False):
|
|
87
|
+
self.close()
|
|
88
|
+
|
|
89
|
+
def __enter__(self) -> "ProcessPublisher":
|
|
90
|
+
return self.start()
|
|
91
|
+
|
|
92
|
+
def __exit__(self, *_) -> None:
|
|
93
|
+
self.close()
|
|
94
|
+
|
|
95
|
+
@property
|
|
96
|
+
def address(self) -> str:
|
|
97
|
+
"""IPC/TCP address."""
|
|
98
|
+
return self._addr
|
|
99
|
+
|
|
100
|
+
def start(self) -> "ProcessPublisher":
|
|
101
|
+
"""Bind the PUB socket. Idempotent. Returns self for chaining."""
|
|
102
|
+
if self._started:
|
|
103
|
+
return self
|
|
104
|
+
|
|
105
|
+
import gevent.lock
|
|
106
|
+
import zmq.green as zmq_mod
|
|
107
|
+
|
|
108
|
+
ctx = zmq_mod.Context()
|
|
109
|
+
sock = None
|
|
110
|
+
try:
|
|
111
|
+
sock = ctx.socket(zmq_mod.PUB)
|
|
112
|
+
sock.setsockopt(zmq_mod.LINGER, 0)
|
|
113
|
+
sock.setsockopt(zmq_mod.SNDHWM, self._sndhwm)
|
|
114
|
+
sock.bind(self._addr)
|
|
115
|
+
except Exception:
|
|
116
|
+
_safe_close(sock, ctx)
|
|
117
|
+
raise
|
|
118
|
+
|
|
119
|
+
self._ctx = ctx
|
|
120
|
+
self._sock = sock
|
|
121
|
+
self._send_lock = gevent.lock.Semaphore()
|
|
122
|
+
self._started = True
|
|
123
|
+
return self
|
|
124
|
+
|
|
125
|
+
def publish(self, topic: str, payload: Any) -> None:
|
|
126
|
+
"""Publish ``payload`` under ``topic``. Non-blocking.
|
|
127
|
+
|
|
128
|
+
Drops the message silently if the send queue is full (slow subscribers).
|
|
129
|
+
Safe to call concurrently from multiple greenlets.
|
|
130
|
+
"""
|
|
131
|
+
if not self._started:
|
|
132
|
+
raise RuntimeError("ProcessPublisher.publish() called before start()")
|
|
133
|
+
|
|
134
|
+
import zmq.green as zmq_mod
|
|
135
|
+
|
|
136
|
+
data = self._serializer.dumps(payload)
|
|
137
|
+
with self._send_lock:
|
|
138
|
+
# Concurrent close() may have torn the socket down between our
|
|
139
|
+
# _started check above and acquiring the lock; re-check.
|
|
140
|
+
if not self._started:
|
|
141
|
+
return
|
|
142
|
+
try:
|
|
143
|
+
self._sock.send_multipart(
|
|
144
|
+
[_encode_topic(topic), data], flags=zmq_mod.NOBLOCK
|
|
145
|
+
)
|
|
146
|
+
except zmq_mod.Again:
|
|
147
|
+
# SNDHWM hit — slow subscribers. Drop, matching PUB semantics.
|
|
148
|
+
log.debug("publisher dropped message for topic %s (HWM hit)", topic)
|
|
149
|
+
except zmq_mod.ZMQError as exc:
|
|
150
|
+
log.warning("publisher send failed for topic %s: %s", topic, exc)
|
|
151
|
+
|
|
152
|
+
def close(self) -> None:
|
|
153
|
+
"""Tear down the socket. Idempotent."""
|
|
154
|
+
if not self._started:
|
|
155
|
+
return
|
|
156
|
+
# Serialize with publish(): closing while a greenlet is mid-send is
|
|
157
|
+
# undefined. publish() re-checks _started inside the same lock.
|
|
158
|
+
with self._send_lock:
|
|
159
|
+
self._started = False
|
|
160
|
+
_safe_close(self._sock, self._ctx)
|
|
161
|
+
if self._addr.startswith("ipc://"):
|
|
162
|
+
with contextlib.suppress(OSError):
|
|
163
|
+
os.unlink(self._addr[6:])
|
|
164
|
+
|
|
165
|
+
|
|
166
|
+
# ---------------------------------------------------------------------------
|
|
167
|
+
# Subscriber (asyncio side)
|
|
168
|
+
# ---------------------------------------------------------------------------
|
|
169
|
+
|
|
170
|
+
|
|
171
|
+
class ProcessSubscriber:
|
|
172
|
+
"""ZMQ SUB socket for asyncio consumers.
|
|
173
|
+
|
|
174
|
+
Register topic-prefix handlers; a single reader task dispatches incoming
|
|
175
|
+
messages. Multiple handlers may share a prefix and are invoked
|
|
176
|
+
concurrently. An exception in one handler is logged but does not kill
|
|
177
|
+
the reader.
|
|
178
|
+
|
|
179
|
+
Args:
|
|
180
|
+
address: IPC/TCP address (e.g., ``"ipc:///tmp/stream.sock"``).
|
|
181
|
+
serializer: Optional serializer; defaults to :class:`SmartPickle`.
|
|
182
|
+
|
|
183
|
+
Example::
|
|
184
|
+
|
|
185
|
+
sub = ProcessSubscriber("ipc:///tmp/stream.sock")
|
|
186
|
+
|
|
187
|
+
async def on_snapshot(topic, payload): ...
|
|
188
|
+
|
|
189
|
+
sub.subscribe("v1.snapshot.", on_snapshot)
|
|
190
|
+
sub.start()
|
|
191
|
+
...
|
|
192
|
+
await sub.close()
|
|
193
|
+
"""
|
|
194
|
+
|
|
195
|
+
def __init__(
|
|
196
|
+
self,
|
|
197
|
+
address: str,
|
|
198
|
+
*,
|
|
199
|
+
serializer: Serializer = SmartPickle,
|
|
200
|
+
):
|
|
201
|
+
self._addr = address
|
|
202
|
+
self._serializer = serializer
|
|
203
|
+
self._started = False
|
|
204
|
+
self._sock: Any = None
|
|
205
|
+
self._ctx: Any = None
|
|
206
|
+
self._reader_task: Any = None
|
|
207
|
+
# Tasks currently running ProcessSubscriber._invoke. close() uses
|
|
208
|
+
# this to detect "called from a handler my reader is awaiting" and
|
|
209
|
+
# skip the reader-join (which would self-deadlock).
|
|
210
|
+
self._handler_tasks: set[Any] = set()
|
|
211
|
+
self._handlers: dict[str, list[Handler]] = {}
|
|
212
|
+
|
|
213
|
+
def __del__(self):
|
|
214
|
+
# Best-effort sync cleanup from finalizer. Reader task is leaked here;
|
|
215
|
+
# users should ``await close()`` explicitly for deterministic cleanup.
|
|
216
|
+
_safe_close(getattr(self, "_sock", None), getattr(self, "_ctx", None))
|
|
217
|
+
|
|
218
|
+
async def __aenter__(self) -> "ProcessSubscriber":
|
|
219
|
+
return self.start()
|
|
220
|
+
|
|
221
|
+
async def __aexit__(self, *_) -> None:
|
|
222
|
+
await self.close()
|
|
223
|
+
|
|
224
|
+
@property
|
|
225
|
+
def address(self) -> str:
|
|
226
|
+
"""IPC/TCP address."""
|
|
227
|
+
return self._addr
|
|
228
|
+
|
|
229
|
+
def start(self) -> "ProcessSubscriber":
|
|
230
|
+
"""Connect the SUB socket and spawn the reader task. Idempotent.
|
|
231
|
+
|
|
232
|
+
Must be called with a running asyncio loop. The reader task is bound
|
|
233
|
+
to that loop; subsequent :meth:`subscribe`, :meth:`unsubscribe`, and
|
|
234
|
+
:meth:`close` calls must run on the same loop/thread (ZMQ sockets
|
|
235
|
+
are not thread-safe).
|
|
236
|
+
"""
|
|
237
|
+
if self._started:
|
|
238
|
+
return self
|
|
239
|
+
|
|
240
|
+
import zmq.asyncio
|
|
241
|
+
|
|
242
|
+
# Require a running loop *before* allocating ZMQ resources, so a
|
|
243
|
+
# caller misusing the API doesn't leave the subscriber half-built
|
|
244
|
+
# (sock/ctx allocated, _started=True, no reader task).
|
|
245
|
+
loop = asyncio.get_running_loop()
|
|
246
|
+
|
|
247
|
+
# Fresh context per subscriber (mirrors ProcessBridge), so ``close()``
|
|
248
|
+
# fully releases libzmq resources and restart is clean.
|
|
249
|
+
ctx = zmq.asyncio.Context()
|
|
250
|
+
sock = None
|
|
251
|
+
try:
|
|
252
|
+
sock = ctx.socket(zmq.SUB)
|
|
253
|
+
sock.setsockopt(zmq.LINGER, 0)
|
|
254
|
+
sock.connect(self._addr)
|
|
255
|
+
# Re-subscribe to any prefixes registered before start().
|
|
256
|
+
for prefix in self._handlers:
|
|
257
|
+
sock.setsockopt(zmq.SUBSCRIBE, _encode_topic(prefix))
|
|
258
|
+
except Exception:
|
|
259
|
+
_safe_close(sock, ctx)
|
|
260
|
+
raise
|
|
261
|
+
|
|
262
|
+
self._ctx = ctx
|
|
263
|
+
self._sock = sock
|
|
264
|
+
self._started = True
|
|
265
|
+
self._reader_task = loop.create_task(self._read_loop())
|
|
266
|
+
return self
|
|
267
|
+
|
|
268
|
+
def subscribe(self, topic_prefix: str, handler: Handler) -> None:
|
|
269
|
+
"""Register ``handler`` for messages whose topic starts with ``topic_prefix``.
|
|
270
|
+
|
|
271
|
+
Multiple handlers may share a prefix; they are invoked concurrently.
|
|
272
|
+
Safe to call before or after :meth:`start`.
|
|
273
|
+
|
|
274
|
+
After :meth:`start`, call only from the same thread that owns the
|
|
275
|
+
asyncio loop (ZMQ sockets are not thread-safe).
|
|
276
|
+
"""
|
|
277
|
+
new_prefix = topic_prefix not in self._handlers
|
|
278
|
+
self._handlers.setdefault(topic_prefix, []).append(handler)
|
|
279
|
+
if new_prefix and self._started:
|
|
280
|
+
import zmq
|
|
281
|
+
|
|
282
|
+
self._sock.setsockopt(zmq.SUBSCRIBE, _encode_topic(topic_prefix))
|
|
283
|
+
|
|
284
|
+
def unsubscribe(
|
|
285
|
+
self, topic_prefix: str, handler: Handler | None = None
|
|
286
|
+
) -> None:
|
|
287
|
+
"""Remove ``handler`` (or all handlers) for ``topic_prefix``.
|
|
288
|
+
|
|
289
|
+
When the last handler for a prefix is removed, the ZMQ-level
|
|
290
|
+
subscription is also dropped. After :meth:`start`, call only from
|
|
291
|
+
the same thread that owns the asyncio loop.
|
|
292
|
+
"""
|
|
293
|
+
handlers = self._handlers.get(topic_prefix)
|
|
294
|
+
if not handlers:
|
|
295
|
+
return
|
|
296
|
+
if handler is None:
|
|
297
|
+
handlers.clear()
|
|
298
|
+
else:
|
|
299
|
+
with contextlib.suppress(ValueError):
|
|
300
|
+
handlers.remove(handler)
|
|
301
|
+
if not handlers:
|
|
302
|
+
self._handlers.pop(topic_prefix, None)
|
|
303
|
+
if self._started:
|
|
304
|
+
import zmq
|
|
305
|
+
|
|
306
|
+
with contextlib.suppress(Exception):
|
|
307
|
+
self._sock.setsockopt(
|
|
308
|
+
zmq.UNSUBSCRIBE, _encode_topic(topic_prefix)
|
|
309
|
+
)
|
|
310
|
+
|
|
311
|
+
def _match(self, topic: str) -> list[Handler]:
|
|
312
|
+
return [
|
|
313
|
+
h
|
|
314
|
+
for prefix, handlers in self._handlers.items()
|
|
315
|
+
if topic.startswith(prefix)
|
|
316
|
+
for h in handlers
|
|
317
|
+
]
|
|
318
|
+
|
|
319
|
+
async def _read_loop(self) -> None:
|
|
320
|
+
"""Single reader task: dispatch messages to matched handlers."""
|
|
321
|
+
try:
|
|
322
|
+
while True:
|
|
323
|
+
try:
|
|
324
|
+
parts = await self._sock.recv_multipart()
|
|
325
|
+
except Exception:
|
|
326
|
+
# close() torpedoed the socket; exit cleanly without
|
|
327
|
+
# logging a fake crash.
|
|
328
|
+
if not self._started:
|
|
329
|
+
return
|
|
330
|
+
raise
|
|
331
|
+
if len(parts) < 2:
|
|
332
|
+
continue
|
|
333
|
+
topic_bytes, data, *_ = parts
|
|
334
|
+
topic = _decode_topic(topic_bytes)
|
|
335
|
+
handlers = self._match(topic)
|
|
336
|
+
if not handlers:
|
|
337
|
+
continue
|
|
338
|
+
try:
|
|
339
|
+
payload = self._serializer.loads(data)
|
|
340
|
+
except Exception:
|
|
341
|
+
log.exception(
|
|
342
|
+
"subscriber failed to deserialize topic %s", topic
|
|
343
|
+
)
|
|
344
|
+
continue
|
|
345
|
+
# return_exceptions=True isolates the reader from a
|
|
346
|
+
# handler's CancelledError or any BaseException leaking
|
|
347
|
+
# past _invoke.
|
|
348
|
+
results = await asyncio.gather(
|
|
349
|
+
*(self._invoke(h, topic, payload) for h in handlers),
|
|
350
|
+
return_exceptions=True,
|
|
351
|
+
)
|
|
352
|
+
for r in results:
|
|
353
|
+
if isinstance(r, (SystemExit, KeyboardInterrupt)):
|
|
354
|
+
# Honor process-exit intent from a handler.
|
|
355
|
+
raise r
|
|
356
|
+
if isinstance(r, BaseException) and not isinstance(
|
|
357
|
+
r, asyncio.CancelledError
|
|
358
|
+
):
|
|
359
|
+
log.error(
|
|
360
|
+
"subscriber handler raised %s for topic %s",
|
|
361
|
+
type(r).__name__,
|
|
362
|
+
topic,
|
|
363
|
+
exc_info=r,
|
|
364
|
+
)
|
|
365
|
+
except asyncio.CancelledError:
|
|
366
|
+
pass
|
|
367
|
+
except Exception:
|
|
368
|
+
log.exception("subscriber reader task crashed")
|
|
369
|
+
|
|
370
|
+
async def _invoke(self, handler: Handler, topic: str, payload: Any) -> None:
|
|
371
|
+
task = asyncio.current_task()
|
|
372
|
+
if task is not None:
|
|
373
|
+
self._handler_tasks.add(task)
|
|
374
|
+
try:
|
|
375
|
+
await handler(topic, payload)
|
|
376
|
+
except Exception:
|
|
377
|
+
log.exception("subscriber handler failed for topic %s", topic)
|
|
378
|
+
finally:
|
|
379
|
+
if task is not None:
|
|
380
|
+
self._handler_tasks.discard(task)
|
|
381
|
+
|
|
382
|
+
async def close(self) -> None:
|
|
383
|
+
"""Tear down the socket and join the reader task. Idempotent.
|
|
384
|
+
|
|
385
|
+
Must be awaited from the asyncio loop that owns this subscriber
|
|
386
|
+
(the one that called :meth:`start`). Calling concurrently from
|
|
387
|
+
multiple coroutines is safe; the second caller returns immediately.
|
|
388
|
+
|
|
389
|
+
Safe to call from inside a handler: the reader task is not joined
|
|
390
|
+
in that case (joining yourself would deadlock), and sibling handlers
|
|
391
|
+
in the current dispatch are allowed to finish — we never
|
|
392
|
+
``task.cancel()`` the reader, so ``asyncio.gather`` is not torn down.
|
|
393
|
+
"""
|
|
394
|
+
if not self._started:
|
|
395
|
+
return
|
|
396
|
+
# Snapshot owned resources into locals, then null on self so a
|
|
397
|
+
# concurrent start() cannot have its fresh ctx/sock closed by us.
|
|
398
|
+
self._started = False
|
|
399
|
+
sock, ctx, task = self._sock, self._ctx, self._reader_task
|
|
400
|
+
self._sock = None
|
|
401
|
+
self._ctx = None
|
|
402
|
+
self._reader_task = None
|
|
403
|
+
|
|
404
|
+
# Close socket first. Any in-flight recv fails; the reader sees
|
|
405
|
+
# _started=False and exits cleanly. Avoid task.cancel() — it would
|
|
406
|
+
# propagate through asyncio.gather and cancel sibling handlers
|
|
407
|
+
# mid-execution when close() is called from inside a handler.
|
|
408
|
+
_safe_close(sock, ctx)
|
|
409
|
+
|
|
410
|
+
# Join the reader for deterministic cleanup. Skip when the caller
|
|
411
|
+
# IS the reader, or is a handler the reader is currently awaiting —
|
|
412
|
+
# both deadlock. In those cases the reader exits on its own once
|
|
413
|
+
# gather completes (closed socket + _started=False).
|
|
414
|
+
current = asyncio.current_task()
|
|
415
|
+
if (
|
|
416
|
+
task is not None
|
|
417
|
+
and task is not current
|
|
418
|
+
and current not in self._handler_tasks
|
|
419
|
+
and not task.done()
|
|
420
|
+
):
|
|
421
|
+
with contextlib.suppress(BaseException):
|
|
422
|
+
await asyncio.wait({task}, timeout=2.0)
|
|
423
|
+
|
|
424
|
+
|
|
425
|
+
__all__ = ["ProcessPublisher", "ProcessSubscriber"]
|