onesecondtrader 0.8.0__tar.gz → 0.10.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (17) hide show
  1. {onesecondtrader-0.8.0 → onesecondtrader-0.10.0}/PKG-INFO +1 -1
  2. {onesecondtrader-0.8.0 → onesecondtrader-0.10.0}/pyproject.toml +1 -1
  3. {onesecondtrader-0.8.0 → onesecondtrader-0.10.0}/src/onesecondtrader/core/models.py +26 -0
  4. onesecondtrader-0.10.0/src/onesecondtrader/datafeeds/base_datafeed.py +263 -0
  5. onesecondtrader-0.10.0/src/onesecondtrader/messaging/eventbus.py +490 -0
  6. onesecondtrader-0.10.0/src/onesecondtrader/py.typed +0 -0
  7. {onesecondtrader-0.8.0 → onesecondtrader-0.10.0}/LICENSE +0 -0
  8. {onesecondtrader-0.8.0 → onesecondtrader-0.10.0}/README.md +0 -0
  9. {onesecondtrader-0.8.0 → onesecondtrader-0.10.0}/src/onesecondtrader/__init__.py +0 -0
  10. {onesecondtrader-0.8.0 → onesecondtrader-0.10.0}/src/onesecondtrader/core/__init__.py +0 -0
  11. {onesecondtrader-0.8.0 → onesecondtrader-0.10.0}/src/onesecondtrader/core/py.typed +0 -0
  12. {onesecondtrader-0.8.0/src/onesecondtrader/messaging → onesecondtrader-0.10.0/src/onesecondtrader/datafeeds}/__init__.py +0 -0
  13. {onesecondtrader-0.8.0/src/onesecondtrader/monitoring → onesecondtrader-0.10.0/src/onesecondtrader/messaging}/__init__.py +0 -0
  14. {onesecondtrader-0.8.0 → onesecondtrader-0.10.0}/src/onesecondtrader/messaging/events.py +0 -0
  15. /onesecondtrader-0.8.0/src/onesecondtrader/monitoring/py.typed → /onesecondtrader-0.10.0/src/onesecondtrader/monitoring/__init__.py +0 -0
  16. {onesecondtrader-0.8.0 → onesecondtrader-0.10.0}/src/onesecondtrader/monitoring/console.py +0 -0
  17. {onesecondtrader-0.8.0/src/onesecondtrader → onesecondtrader-0.10.0/src/onesecondtrader/monitoring}/py.typed +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: onesecondtrader
3
- Version: 0.8.0
3
+ Version: 0.10.0
4
4
  Summary: The Trading Infrastructure Toolkit for Python. Research, simulate, and deploy algorithmic trading strategies — all in one place.
5
5
  Author: Nils P. Kujath
6
6
  Author-email: 63961429+NilsKujath@users.noreply.github.com
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "onesecondtrader"
3
- version = "0.8.0"
3
+ version = "0.10.0"
4
4
  description = "The Trading Infrastructure Toolkit for Python. Research, simulate, and deploy algorithmic trading strategies — all in one place."
5
5
  authors = [
6
6
  {name = "Nils P. Kujath",email = "63961429+NilsKujath@users.noreply.github.com"}
@@ -131,3 +131,29 @@ class OrderRejectionReason(enum.Enum):
131
131
 
132
132
  UNKNOWN = enum.auto()
133
133
  NEGATIVE_QUANTITY = enum.auto()
134
+
135
+
136
+ class TimeFrame(enum.Enum):
137
+ """
138
+ Enum for timeframes.
139
+
140
+ **Attributes:**
141
+
142
+ | Enum | Value | Description |
143
+ |------|-------|-------------|
144
+ | `SECOND` | `enum.auto()` | 1 second |
145
+ | `MINUTE` | `enum.auto()` | 1 minute |
146
+ | `HOUR` | `enum.auto()` | 1 hour |
147
+ | `DAY` | `enum.auto()` | 1 day |
148
+ | `WEEK` | `enum.auto()` | 1 week |
149
+ | `MONTH` | `enum.auto()` | 1 month |
150
+ | `YEAR` | `enum.auto()` | 1 year
151
+ """
152
+
153
+ SECOND = enum.auto()
154
+ MINUTE = enum.auto()
155
+ HOUR = enum.auto()
156
+ DAY = enum.auto()
157
+ WEEK = enum.auto()
158
+ MONTH = enum.auto()
159
+ YEAR = enum.auto()
@@ -0,0 +1,263 @@
1
+ """
2
+ This module provides the base class for datafeeds.
3
+ """
4
+
5
+ import abc
6
+ import threading
7
+ from onesecondtrader.messaging import eventbus
8
+ from onesecondtrader.core import models
9
+ from onesecondtrader.monitoring import console
10
+
11
+
12
+ class BaseDatafeed(abc.ABC):
13
+ """
14
+ Base class for all datafeeds.
15
+ """
16
+
17
+ def __init__(self, event_bus: eventbus.EventBus) -> None:
18
+ """
19
+ Initializes the datafeed with the provided event bus.
20
+
21
+ Args:
22
+ event_bus (eventbus.EventBus): The event bus to publish events to.
23
+
24
+ Attributes:
25
+ self.event_bus (eventbus.EventBus): The event bus to publish events to.
26
+ self._lock (threading.Lock): Lock for thread safety.
27
+ self._is_connected (bool): Whether the datafeed is connected. `True` if
28
+ connected, `False` otherwise.
29
+ self._streamed_symbols (set[tuple[str, models.TimeFrame]]): Set of symbols
30
+ and timeframes that are currently being streamed.
31
+ """
32
+ self.event_bus = event_bus
33
+
34
+ self._lock = threading.Lock()
35
+ self._is_connected = False
36
+ self._streamed_symbols: set[tuple[str, models.TimeFrame]] = set()
37
+
38
+ def connect(self) -> bool:
39
+ """
40
+ Connect to the datafeed.
41
+
42
+ Returns:
43
+ bool: True if connection successful, False otherwise.
44
+ """
45
+ with self._lock:
46
+ if self._is_connected:
47
+ console.logger.warning(f"{self.__class__.__name__} already connected")
48
+ return True
49
+
50
+ console.logger.info(f"Connecting to {self.__class__.__name__}...")
51
+ try:
52
+ success = self._connect()
53
+ if success:
54
+ self._is_connected = True
55
+ console.logger.info(
56
+ f"Successfully connected to {self.__class__.__name__}"
57
+ )
58
+ return True
59
+ else:
60
+ console.logger.error(
61
+ f"Failed to connect to {self.__class__.__name__}"
62
+ )
63
+ return False
64
+ except Exception as e:
65
+ console.logger.error(
66
+ f"Connection failed for {self.__class__.__name__}: {e}"
67
+ )
68
+ return False
69
+
70
+ @abc.abstractmethod
71
+ def _connect(self) -> bool:
72
+ """
73
+ Implement connection logic for the specific datafeed.
74
+
75
+ Returns:
76
+ bool: True if connection successful, False otherwise.
77
+ """
78
+ pass
79
+
80
+ def disconnect(self) -> bool:
81
+ """
82
+ Disconnect from the datafeed.
83
+ Clears the set of streamed symbols.
84
+
85
+ Returns:
86
+ bool: True if disconnection successful, False otherwise.
87
+ """
88
+ with self._lock:
89
+ if not self._is_connected:
90
+ console.logger.warning(
91
+ f"{self.__class__.__name__} already disconnected"
92
+ )
93
+ return True
94
+
95
+ console.logger.info(f"Disconnecting from {self.__class__.__name__}...")
96
+ try:
97
+ success = self._disconnect()
98
+ if success:
99
+ self._is_connected = False
100
+ self._streamed_symbols.clear()
101
+ console.logger.info(
102
+ f"Successfully disconnected from {self.__class__.__name__}"
103
+ )
104
+ return True
105
+ else:
106
+ console.logger.error(
107
+ f"Failed to disconnect from {self.__class__.__name__}"
108
+ )
109
+ return False
110
+ except Exception as e:
111
+ console.logger.error(
112
+ f"Disconnection failed for {self.__class__.__name__}: {e}"
113
+ )
114
+ self._is_connected = False
115
+ self._streamed_symbols.clear()
116
+ return False
117
+
118
+ @abc.abstractmethod
119
+ def _disconnect(self) -> bool:
120
+ """
121
+ Implement disconnection logic for the specific datafeed.
122
+
123
+ Returns:
124
+ bool: True if disconnection successful, False otherwise.
125
+ """
126
+ pass
127
+
128
+ def start_streaming_for_symbols(
129
+ self, symbols: list[tuple[str, models.TimeFrame]]
130
+ ) -> bool:
131
+ """
132
+ Start streaming market data for the specified symbols and timeframes.
133
+
134
+ Args:
135
+ symbols: List of (symbol, timeframe) tuples to start streaming.
136
+
137
+ Returns:
138
+ bool: True if streaming started successfully, False otherwise.
139
+ """
140
+ if not symbols:
141
+ console.logger.warning("No symbols provided for streaming")
142
+ return True
143
+
144
+ with self._lock:
145
+ if not self._is_connected:
146
+ console.logger.error("Cannot start streaming: datafeed not connected")
147
+ return False
148
+
149
+ new_symbols = set(symbols) - self._streamed_symbols
150
+ if not new_symbols:
151
+ console.logger.info("All requested symbols are already being streamed")
152
+ return True
153
+
154
+ try:
155
+ success = self._start_streaming_for_symbols(list(new_symbols))
156
+ if success:
157
+ self._streamed_symbols.update(new_symbols)
158
+ console.logger.info(
159
+ f"Successfully started streaming for {len(new_symbols)} symbols"
160
+ )
161
+ return True
162
+ else:
163
+ console.logger.error("Failed to start streaming for symbols")
164
+ return False
165
+ except Exception as e:
166
+ console.logger.error(f"Exception while starting streaming: {e}")
167
+ return False
168
+
169
+ @abc.abstractmethod
170
+ def _start_streaming_for_symbols(
171
+ self, symbols: list[tuple[str, models.TimeFrame]]
172
+ ) -> bool:
173
+ """
174
+ Implement streaming startup logic for the specific datafeed.
175
+
176
+ Args:
177
+ symbols: List of (symbol, timeframe) tuples to start streaming.
178
+ These are guaranteed to be new symbols not already being streamed.
179
+
180
+ Returns:
181
+ bool: True if streaming started successfully, False otherwise.
182
+ """
183
+ pass
184
+
185
+ def stop_streaming_for_symbols(
186
+ self, symbols: list[tuple[str, models.TimeFrame]]
187
+ ) -> bool:
188
+ """
189
+ Stop streaming market data for the specified symbols and timeframes.
190
+
191
+ Args:
192
+ symbols: List of (symbol, timeframe) tuples to stop streaming.
193
+
194
+ Returns:
195
+ bool: True if streaming stopped successfully, False otherwise.
196
+ """
197
+ if not symbols:
198
+ console.logger.warning("No symbols provided for stopping streaming")
199
+ return True
200
+
201
+ with self._lock:
202
+ if not self._is_connected:
203
+ console.logger.warning(
204
+ "Datafeed not connected, but removing symbols from tracking"
205
+ )
206
+ self._streamed_symbols.difference_update(symbols)
207
+ return True
208
+
209
+ symbols_to_stop = set(symbols) & self._streamed_symbols
210
+ if not symbols_to_stop:
211
+ console.logger.info(
212
+ "None of the requested symbols are currently being streamed"
213
+ )
214
+ return True
215
+
216
+ console.logger.info(
217
+ f"Stopping streaming for {len(symbols_to_stop)} symbols"
218
+ )
219
+ try:
220
+ success = self._stop_streaming_for_symbols(list(symbols_to_stop))
221
+ if success:
222
+ self._streamed_symbols.difference_update(symbols_to_stop)
223
+ console.logger.info(
224
+ f"Successfully stopped streaming for {len(symbols_to_stop)} "
225
+ f"symbols"
226
+ )
227
+ return True
228
+ else:
229
+ console.logger.error("Failed to stop streaming for symbols")
230
+ return False
231
+ except Exception as e:
232
+ console.logger.error(f"Exception while stopping streaming: {e}")
233
+ self._streamed_symbols.difference_update(symbols_to_stop)
234
+ return False
235
+
236
+ @abc.abstractmethod
237
+ def _stop_streaming_for_symbols(
238
+ self, symbols: list[tuple[str, models.TimeFrame]]
239
+ ) -> bool:
240
+ """
241
+ Implement streaming shutdown logic for the specific datafeed.
242
+
243
+ Args:
244
+ symbols: List of (symbol, timeframe) tuples to stop streaming.
245
+ These are guaranteed to be symbols currently being streamed.
246
+
247
+ Returns:
248
+ bool: True if streaming stopped successfully, False otherwise.
249
+ """
250
+ pass
251
+
252
+ @abc.abstractmethod
253
+ def preload_bars(
254
+ self, preload_list: list[tuple[str, models.TimeFrame, int]]
255
+ ) -> None:
256
+ """
257
+ Preload historical bars for the specified symbols, timeframes, and counts.
258
+
259
+ Args:
260
+ preload_list: List of (symbol, timeframe, count) tuples specifying
261
+ what historical data to preload.
262
+ """
263
+ pass
@@ -0,0 +1,490 @@
1
+ """
2
+ This module provides the event bus for managing event-driven communication between
3
+ the trading infrastructure's components via a publish-subscribe messaging pattern.
4
+ """
5
+
6
+ import collections
7
+ import inspect
8
+ import logging
9
+ import threading
10
+ from collections.abc import Callable
11
+ from onesecondtrader.messaging import events
12
+ from onesecondtrader.monitoring import console
13
+
14
+
15
+ class EventBus:
16
+ # noinspection PyTypeChecker
17
+ """
18
+ Event bus for managing event-driven communication between the trading
19
+ infrastructure's components via a publish-subscribe messaging pattern.
20
+ Supports inheritance-based subscriptions where handlers subscribed to a parent event
21
+ type will receive events of child types.
22
+ Each subscription can include an optional filter function to receive only specific
23
+ events of a given type (e.g. filtering `IncomingBar` events for a specific symbol).
24
+
25
+ Examples:
26
+ >>> # Import necessary modules
27
+ >>> import pandas as pd
28
+ >>> from onesecondtrader.messaging.eventbus import EventBus
29
+ >>> from onesecondtrader.messaging import events
30
+ >>> from onesecondtrader.core import models
31
+
32
+ >>> # Instantiate event bus
33
+ >>> event_bus = EventBus()
34
+
35
+ >>> # Create a dummy handler that simply prints the symbol of the received event
36
+ >>> def dummy_handler(incoming_bar_event: events.Market.IncomingBar):
37
+ ... print(f"Received: {incoming_bar_event.symbol}")
38
+
39
+ >>> # Subscribe to IncomingBar events whose symbol is AAPL
40
+ >>> event_bus.subscribe(
41
+ ... events.Market.IncomingBar,
42
+ ... dummy_handler,
43
+ ... lambda event: event.symbol == "AAPL" # Lambda filter function
44
+ ... )
45
+
46
+ >>> # Create events to publish
47
+ >>> aapl_event = events.Market.IncomingBar(
48
+ ... ts_event=pd.Timestamp("2023-01-01", tz="UTC"),
49
+ ... symbol="AAPL",
50
+ ... bar=models.Bar(
51
+ ... open=100.0, high=101.0, low=99.0,
52
+ ... close=100.5, volume=1000
53
+ ... )
54
+ ... )
55
+ >>> googl_event = events.Market.IncomingBar(
56
+ ... ts_event=pd.Timestamp("2023-01-01", tz="UTC"),
57
+ ... symbol="GOOGL",
58
+ ... bar=models.Bar(
59
+ ... open=2800.0, high=2801.0, low=2799.0,
60
+ ... close=2800.5, volume=500
61
+ ... )
62
+ ... )
63
+
64
+ >>> # Publish events - only AAPL passes filter and will be printed
65
+ >>> event_bus.publish(aapl_event)
66
+ Received: AAPL
67
+ >>> event_bus.publish(googl_event)
68
+
69
+ >>> # Unsubscribe the dummy handler
70
+ >>> event_bus.unsubscribe(events.Market.IncomingBar, dummy_handler)
71
+
72
+ >>> # Publish again - no handler receives it (warning will be logged)
73
+ >>> event_bus.publish(aapl_event) # doctest: +SKIP
74
+ WARNING:root:Published IncomingBar but no subscribers exist - check event wiring
75
+ """
76
+
77
+ def __init__(self) -> None:
78
+ """
79
+ Initializes the event bus with optimized data structures for high-performance
80
+ event publishing.
81
+
82
+ Attributes:
83
+ self._handlers (collections.defaultdict): Direct storage mapping event types
84
+ to handler lists
85
+ self._publish_cache (dict): Pre-computed cache for O(1) publish operations
86
+ self._lock (threading.Lock): Single lock for all operations
87
+ (subscribe/unsubscribe are rare)
88
+ self._sequence_number (int): Sequence number counter for events
89
+ """
90
+ self._handlers: dict[
91
+ type[events.Base.Event],
92
+ list[
93
+ tuple[
94
+ Callable[[events.Base.Event], None],
95
+ Callable[[events.Base.Event], bool],
96
+ ]
97
+ ],
98
+ ] = collections.defaultdict(list)
99
+
100
+ self._publish_cache: dict[
101
+ type[events.Base.Event],
102
+ list[
103
+ tuple[
104
+ Callable[[events.Base.Event], None],
105
+ Callable[[events.Base.Event], bool],
106
+ ]
107
+ ],
108
+ ] = {}
109
+
110
+ self._lock: threading.Lock = threading.Lock()
111
+ self._sequence_number: int = -1
112
+
113
+ self._rebuild_cache()
114
+
115
+ def subscribe(
116
+ self,
117
+ event_type: type[events.Base.Event],
118
+ event_handler: Callable[[events.Base.Event], None],
119
+ event_filter: Callable[[events.Base.Event], bool] | None = None,
120
+ ) -> None:
121
+ """
122
+ The `subscribe` method registers an event handler for event messages of a
123
+ specified type and all its subtypes (expressed as subclasses in the event
124
+ dataclass hierarchy, so-called inheritance-based subscription).
125
+ When an event of that type or any subtype is published, the handler will be
126
+ invoked if the associated `event_filter` returns `True` for that event
127
+ instance.
128
+ A given handler can only be subscribed once per event type.
129
+ If the handler is already subscribed to the given event type
130
+ —regardless of the filter function—
131
+ the subscription attempt is ignored and a warning is logged.
132
+
133
+ Arguments:
134
+ event_type (type[events.Base.Event]): Type of the event to subscribe to,
135
+ must be a subclass of `events.Base.Event`.
136
+ event_handler (Callable[events.Base.Event, None]): Function to call when an
137
+ event of the given type is published.
138
+ This callable must accept a single argument of type `events.Base.Event`
139
+ (or its subclass).
140
+ event_filter (Callable[[events.Base.Event], bool] | None): Function to
141
+ determine whether to call the event handler for a given event.
142
+ Should accept one event and return `True` to handle or `False` to
143
+ ignore.
144
+ Defaults to `None`, which creates a filter that always returns `True`
145
+ (i.e. always call the event handler).
146
+ """
147
+
148
+ if not issubclass(event_type, events.Base.Event):
149
+ console.logger.error(
150
+ f"Invalid subscription attempt: event_type must be a subclass of "
151
+ f"Event, got {type(event_type).__name__}"
152
+ )
153
+ return
154
+
155
+ if not callable(event_handler):
156
+ console.logger.error(
157
+ f"Invalid subscription attempt: event_handler must be callable, "
158
+ f"got {type(event_handler).__name__}"
159
+ )
160
+ return
161
+
162
+ if event_filter is None:
163
+
164
+ def event_filter(event: events.Base.Event) -> bool:
165
+ return True
166
+
167
+ if not callable(event_filter):
168
+ console.logger.error(
169
+ f"Invalid subscription attempt: event_filter must be callable, "
170
+ f"got {type(event_filter).__name__}"
171
+ )
172
+ return
173
+
174
+ is_valid, error_msg = self._validate_filter_signature(event_filter)
175
+ if not is_valid:
176
+ console.logger.error(f"Invalid subscription attempt: {error_msg}")
177
+ return
178
+
179
+ with self._lock:
180
+ if any(
181
+ event_handler == existing_handler
182
+ for existing_handler, _ in self._handlers[event_type]
183
+ ):
184
+ console.logger.warning(
185
+ f"Duplicate subscription attempt: event_handler was already "
186
+ f"subscribed to {event_type.__name__}"
187
+ )
188
+ return
189
+
190
+ self._handlers[event_type].append((event_handler, event_filter))
191
+
192
+ self._rebuild_cache()
193
+
194
+ handler_name = getattr(event_handler, "__name__", "<lambda>")
195
+ console.logger.info(f"Subscribed {handler_name} to {event_type.__name__}.")
196
+
197
+ def unsubscribe(
198
+ self,
199
+ event_type: type[events.Base.Event],
200
+ event_handler: Callable[[events.Base.Event], None],
201
+ ) -> None:
202
+ """
203
+ The `unsubscribe` method removes an event handler from the subscribers list for
204
+ the specified event type.
205
+ If the event handler is not subscribed to the given event type, the
206
+ unsubscription attempt is ignored and a warning is logged.
207
+ After removing the event handler, the event type may have an empty subscribers
208
+ list but remains in the `subscribers` dictionary.
209
+
210
+ Arguments:
211
+ event_type (type[events.Base.Event]): Type of the event to unsubscribe from,
212
+ must be a subclass of `events.Base.Event`.
213
+ event_handler (Callable[events.Base.Event, None]): Event handler to remove
214
+ from the subscribers list (this will also remove the associated filter
215
+ function).
216
+ """
217
+ if not issubclass(event_type, events.Base.Event):
218
+ console.logger.error(
219
+ f"Invalid unsubscription attempt: event_type must be a subclass of "
220
+ f"Event, got {type(event_type).__name__}"
221
+ )
222
+ return
223
+
224
+ if not callable(event_handler):
225
+ console.logger.error(
226
+ f"Invalid unsubscription attempt: callback must be callable, "
227
+ f"got {type(event_handler).__name__}"
228
+ )
229
+ return
230
+
231
+ with self._lock:
232
+ if event_type not in self._handlers:
233
+ console.logger.warning(
234
+ f"Attempted to unsubscribe from {event_type.__name__}, "
235
+ f"but no subscribers exist"
236
+ )
237
+ return
238
+
239
+ current_handlers = self._handlers[event_type]
240
+ new_handlers = [
241
+ (existing_handler, existing_filter)
242
+ for existing_handler, existing_filter in current_handlers
243
+ if existing_handler != event_handler
244
+ ]
245
+
246
+ removed_count = len(current_handlers) - len(new_handlers)
247
+ if removed_count == 0:
248
+ handler_name = getattr(event_handler, "__name__", "<lambda>")
249
+ console.logger.warning(
250
+ f"Attempted to unsubscribe {handler_name} from "
251
+ f"{event_type.__name__}, but it was not subscribed"
252
+ )
253
+ return
254
+
255
+ if new_handlers:
256
+ self._handlers[event_type] = new_handlers
257
+ else:
258
+ # Clean up empty lists
259
+ del self._handlers[event_type]
260
+
261
+ self._rebuild_cache()
262
+
263
+ handler_name = getattr(event_handler, "__name__", "<lambda>")
264
+ console.logger.info(
265
+ f"Unsubscribed {handler_name} from "
266
+ f"{event_type.__name__} (removed {removed_count} subscription(s))"
267
+ )
268
+
269
+ def publish(self, event: events.Base.Event) -> None:
270
+ """
271
+ The `publish` method delivers the event to all handlers subscribed to the
272
+ event's type or any of its parent types (inheritance-based subscription).
273
+ Handlers are only called if their filter function returns True for this event.
274
+ Handlers are called synchronously in the order they were subscribed.
275
+
276
+ This method uses a pre-computed handler cache for O(1) lookup performance
277
+ and runs without locks for maximum concurrency.
278
+
279
+ Arguments:
280
+ event (events.Base.Event): Event to publish. Must be an instance of
281
+ `events.Base.Event` or one of its subclasses.
282
+ """
283
+ if not isinstance(event, events.Base.Event):
284
+ console.logger.error(
285
+ f"Invalid publish attempt: event must be an instance of Event, "
286
+ f"got {type(event).__name__}"
287
+ )
288
+ return
289
+
290
+ object.__setattr__(
291
+ event, "event_bus_sequence_number", self._set_sequence_number()
292
+ )
293
+
294
+ event_type: type[events.Base.Event] = type(event)
295
+
296
+ handlers = self._publish_cache.get(event_type, [])
297
+
298
+ if not handlers:
299
+ console.logger.warning(
300
+ f"Published {event_type.__name__} but no subscribers exist - "
301
+ f"check event wiring"
302
+ )
303
+ return
304
+
305
+ delivered_count = 0
306
+ for event_handler, event_filter in handlers:
307
+ try:
308
+ should_handle = event_filter(event)
309
+
310
+ if not isinstance(should_handle, bool):
311
+ handler_name = getattr(event_handler, "__name__", "<lambda>")
312
+ console.logger.warning(
313
+ f"Filter for handler {handler_name} returned "
314
+ f"{type(should_handle).__name__}, expected bool. "
315
+ f"Treating as False."
316
+ )
317
+ should_handle = False
318
+
319
+ except TypeError as type_error:
320
+ handler_name = getattr(event_handler, "__name__", "<lambda>")
321
+ if "takes" in str(type_error) and "positional argument" in str(
322
+ type_error
323
+ ):
324
+ console.logger.error(
325
+ f"Filter for handler {handler_name} has wrong signature: "
326
+ f"{type_error}"
327
+ )
328
+ else:
329
+ console.logger.exception(
330
+ f"Filter function for handler {handler_name} failed "
331
+ f"processing {event_type.__name__}: {type_error}"
332
+ )
333
+ continue
334
+ except Exception as filter_exception:
335
+ handler_name = getattr(event_handler, "__name__", "<lambda>")
336
+ console.logger.exception(
337
+ f"Filter function for handler {handler_name} failed "
338
+ f"processing {event_type.__name__}: {filter_exception}"
339
+ )
340
+ continue
341
+
342
+ if should_handle:
343
+ try:
344
+ event_handler(event)
345
+ delivered_count += 1
346
+ except Exception as handler_exception:
347
+ handler_name = getattr(event_handler, "__name__", "<lambda>")
348
+ console.logger.exception(
349
+ f"Handler {handler_name} failed processing "
350
+ f"{event_type.__name__}: {handler_exception}"
351
+ )
352
+
353
+ if delivered_count == 0:
354
+ console.logger.warning(
355
+ f"Published {event_type.__name__} but no handlers received it - "
356
+ f"all {len(handlers)} handler(s) filtered out the event"
357
+ )
358
+ else:
359
+ # Conditional debug logging to avoid string formatting overhead
360
+ if console.logger.isEnabledFor(logging.DEBUG):
361
+ console.logger.debug(
362
+ f"Published {event_type.__name__} to {delivered_count} handler(s)"
363
+ )
364
+
365
+ @staticmethod
366
+ def _validate_filter_signature(
367
+ event_filter: Callable[[events.Base.Event], bool],
368
+ ) -> tuple[bool, str | None]:
369
+ """
370
+ Validate that filter function has the correct signature.
371
+
372
+ A valid filter function must:
373
+ - Accept exactly 1 parameter (the event)
374
+ - Not use *args or **kwargs
375
+ - Optionally return bool (if type annotated)
376
+
377
+ Arguments:
378
+ event_filter (Callable): The filter function to validate
379
+
380
+ Returns:
381
+ tuple[bool, str | None]: (is_valid, error_message)
382
+ is_valid: True if signature is valid, False otherwise
383
+ error_message: Description of the issue if invalid, None if valid
384
+ """
385
+ try:
386
+ sig = inspect.signature(event_filter)
387
+ params = list(sig.parameters.values())
388
+
389
+ if len(params) != 1:
390
+ return (
391
+ False,
392
+ f"Filter must accept exactly 1 parameter, got {len(params)}",
393
+ )
394
+
395
+ param = params[0]
396
+ if param.kind == inspect.Parameter.VAR_POSITIONAL:
397
+ return (
398
+ False,
399
+ "Filter cannot use *args - must accept exactly 1 event parameter",
400
+ )
401
+ if param.kind == inspect.Parameter.VAR_KEYWORD:
402
+ return (
403
+ False,
404
+ "Filter cannot use **kwargs - must accept exactly 1 event "
405
+ "parameter",
406
+ )
407
+
408
+ if sig.return_annotation is not inspect.Parameter.empty:
409
+ if sig.return_annotation is not bool:
410
+ return (
411
+ False,
412
+ f"Filter return type should be bool, got "
413
+ f"{sig.return_annotation}",
414
+ )
415
+
416
+ return True, None
417
+
418
+ except Exception as e:
419
+ return False, f"Could not inspect filter signature: {e}"
420
+
421
+ def _set_sequence_number(self) -> int:
422
+ """
423
+ Increment and return the event bus sequence number in a thread-safe manner.
424
+ """
425
+ with self._lock:
426
+ self._sequence_number += 1
427
+ return self._sequence_number
428
+
429
+ @staticmethod
430
+ def _get_all_concrete_event_types() -> list[type[events.Base.Event]]:
431
+ """
432
+ Dynamically discover all concrete event types from the events module.
433
+ Automatically adapts to namespace changes without code modifications.
434
+
435
+ Returns:
436
+ list[type[events.Base.Event]]: List of concrete event classes that can be
437
+ instantiated and published.
438
+ """
439
+ concrete_types = []
440
+
441
+ for attr_name in dir(events):
442
+ if attr_name.startswith("_"):
443
+ continue
444
+
445
+ attr = getattr(events, attr_name)
446
+
447
+ if not inspect.isclass(attr) or attr_name == "Base":
448
+ continue
449
+
450
+ for member_name, member_obj in inspect.getmembers(attr, inspect.isclass):
451
+ if (
452
+ issubclass(member_obj, events.Base.Event)
453
+ and member_obj != events.Base.Event
454
+ and not inspect.isabstract(member_obj)
455
+ ):
456
+ concrete_types.append(member_obj)
457
+
458
+ return concrete_types
459
+
460
+ def _rebuild_cache(self) -> None:
461
+ """
462
+ Rebuild the pre-computed publish cache for all concrete event types.
463
+ This method should be called whenever subscriptions change.
464
+ """
465
+ new_cache = {}
466
+ concrete_event_types = self._get_all_concrete_event_types()
467
+
468
+ for concrete_event_type in concrete_event_types:
469
+ handlers = []
470
+ seen_handler_ids = set()
471
+
472
+ for handler_type, handler_list in self._handlers.items():
473
+ if issubclass(concrete_event_type, handler_type):
474
+ for handler, filter_func in handler_list:
475
+ handler_id = id(handler)
476
+ if handler_id not in seen_handler_ids:
477
+ handlers.append((handler, filter_func))
478
+ seen_handler_ids.add(handler_id)
479
+
480
+ if handlers:
481
+ new_cache[concrete_event_type] = handlers
482
+
483
+ self._publish_cache = new_cache
484
+
485
+ if console.logger.isEnabledFor(logging.DEBUG):
486
+ console.logger.debug(
487
+ f"Publish cache rebuilt: {len(new_cache)} event types cached, "
488
+ f"total handlers: "
489
+ f"{sum(len(handlers) for handlers in new_cache.values())}"
490
+ )
File without changes