onesecondtrader 0.10.1__tar.gz → 0.11.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (21) hide show
  1. {onesecondtrader-0.10.1 → onesecondtrader-0.11.0}/PKG-INFO +2 -1
  2. {onesecondtrader-0.10.1 → onesecondtrader-0.11.0}/pyproject.toml +2 -1
  3. {onesecondtrader-0.10.1 → onesecondtrader-0.11.0}/src/onesecondtrader/core/models.py +28 -20
  4. onesecondtrader-0.11.0/src/onesecondtrader/datafeeds/__init__.py +2 -0
  5. onesecondtrader-0.11.0/src/onesecondtrader/datafeeds/base_datafeed.py +58 -0
  6. onesecondtrader-0.11.0/src/onesecondtrader/datafeeds/csv_datafeed.py +297 -0
  7. onesecondtrader-0.11.0/src/onesecondtrader/messaging/__init__.py +8 -0
  8. onesecondtrader-0.10.1/src/onesecondtrader/datafeeds/base_datafeed.py +0 -255
  9. onesecondtrader-0.10.1/src/onesecondtrader/messaging/__init__.py +0 -0
  10. onesecondtrader-0.10.1/src/onesecondtrader/monitoring/__init__.py +0 -0
  11. {onesecondtrader-0.10.1 → onesecondtrader-0.11.0}/LICENSE +0 -0
  12. {onesecondtrader-0.10.1 → onesecondtrader-0.11.0}/README.md +0 -0
  13. {onesecondtrader-0.10.1 → onesecondtrader-0.11.0}/src/onesecondtrader/__init__.py +0 -0
  14. {onesecondtrader-0.10.1 → onesecondtrader-0.11.0}/src/onesecondtrader/core/__init__.py +0 -0
  15. {onesecondtrader-0.10.1 → onesecondtrader-0.11.0}/src/onesecondtrader/core/py.typed +0 -0
  16. {onesecondtrader-0.10.1 → onesecondtrader-0.11.0}/src/onesecondtrader/messaging/eventbus.py +0 -0
  17. {onesecondtrader-0.10.1 → onesecondtrader-0.11.0}/src/onesecondtrader/messaging/events.py +0 -0
  18. {onesecondtrader-0.10.1/src/onesecondtrader/datafeeds → onesecondtrader-0.11.0/src/onesecondtrader/monitoring}/__init__.py +0 -0
  19. {onesecondtrader-0.10.1 → onesecondtrader-0.11.0}/src/onesecondtrader/monitoring/console.py +0 -0
  20. {onesecondtrader-0.10.1 → onesecondtrader-0.11.0}/src/onesecondtrader/monitoring/py.typed +0 -0
  21. {onesecondtrader-0.10.1 → onesecondtrader-0.11.0}/src/onesecondtrader/py.typed +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: onesecondtrader
3
- Version: 0.10.1
3
+ Version: 0.11.0
4
4
  Summary: The Trading Infrastructure Toolkit for Python. Research, simulate, and deploy algorithmic trading strategies — all in one place.
5
5
  Author: Nils P. Kujath
6
6
  Author-email: 63961429+NilsKujath@users.noreply.github.com
@@ -10,6 +10,7 @@ Classifier: Programming Language :: Python :: 3.11
10
10
  Classifier: Programming Language :: Python :: 3.12
11
11
  Classifier: Programming Language :: Python :: 3.13
12
12
  Requires-Dist: pandas (>=2.3.1,<3.0.0)
13
+ Requires-Dist: python-dotenv (>=1.0.0,<2.0.0)
13
14
  Description-Content-Type: text/markdown
14
15
 
15
16
  # OneSecondTrader
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "onesecondtrader"
3
- version = "0.10.1"
3
+ version = "0.11.0"
4
4
  description = "The Trading Infrastructure Toolkit for Python. Research, simulate, and deploy algorithmic trading strategies — all in one place."
5
5
  authors = [
6
6
  {name = "Nils P. Kujath",email = "63961429+NilsKujath@users.noreply.github.com"}
@@ -9,6 +9,7 @@ readme = "README.md"
9
9
  requires-python = ">=3.11"
10
10
  dependencies = [
11
11
  "pandas (>=2.3.1,<3.0.0)",
12
+ "python-dotenv (>=1.0.0,<2.0.0)",
12
13
  ]
13
14
 
14
15
  [tool.poetry]
@@ -32,14 +32,14 @@ class Bar:
32
32
  high (float): High price
33
33
  low (float): Low price
34
34
  close (float): Close price
35
- volume (float): Volume
35
+ volume (int | None): Volume
36
36
  """
37
37
 
38
38
  open: float
39
39
  high: float
40
40
  low: float
41
41
  close: float
42
- volume: float | None = None
42
+ volume: int | None = None
43
43
 
44
44
 
45
45
  class Side(enum.Enum):
@@ -133,27 +133,35 @@ class OrderRejectionReason(enum.Enum):
133
133
  NEGATIVE_QUANTITY = enum.auto()
134
134
 
135
135
 
136
- class TimeFrame(enum.Enum):
136
+ class RecordType(enum.Enum):
137
137
  """
138
- Enum for timeframes.
138
+ Enum for Databento record types.
139
139
 
140
140
  **Attributes:**
141
141
 
142
142
  | Enum | Value | Description |
143
143
  |------|-------|-------------|
144
- | `SECOND` | `enum.auto()` | 1 second |
145
- | `MINUTE` | `enum.auto()` | 1 minute |
146
- | `HOUR` | `enum.auto()` | 1 hour |
147
- | `DAY` | `enum.auto()` | 1 day |
148
- | `WEEK` | `enum.auto()` | 1 week |
149
- | `MONTH` | `enum.auto()` | 1 month |
150
- | `YEAR` | `enum.auto()` | 1 year
151
- """
152
-
153
- SECOND = enum.auto()
154
- MINUTE = enum.auto()
155
- HOUR = enum.auto()
156
- DAY = enum.auto()
157
- WEEK = enum.auto()
158
- MONTH = enum.auto()
159
- YEAR = enum.auto()
144
+ | `OHLCV_1S` | `32` | 1-second bars |
145
+ | `OHLCV_1M` | `33` | 1-minute bars |
146
+ | `OHLCV_1H` | `34` | 1-hour bars |
147
+ | `OHLCV_1D` | `35` | 1-day bars |
148
+ """
149
+
150
+ OHLCV_1S = 32
151
+ OHLCV_1M = 33
152
+ OHLCV_1H = 34
153
+ OHLCV_1D = 35
154
+
155
+ @classmethod
156
+ def to_string(cls, rtype: int) -> str:
157
+ match rtype:
158
+ case cls.OHLCV_1S.value:
159
+ return "1-second bars"
160
+ case cls.OHLCV_1M.value:
161
+ return "1-minute bars"
162
+ case cls.OHLCV_1H.value:
163
+ return "1-hour bars"
164
+ case cls.OHLCV_1D.value:
165
+ return "daily bars"
166
+ case _:
167
+ return f"unknown ({rtype})"
@@ -0,0 +1,2 @@
1
+ from .base_datafeed import BaseDatafeed as BaseDatafeed
2
+ from .csv_datafeed import CSVDatafeed as CSVDatafeed
@@ -0,0 +1,58 @@
1
+ """
2
+ This module provides the base class for all datafeeds.
3
+ """
4
+
5
+ import abc
6
+ from onesecondtrader import messaging
7
+ from onesecondtrader.core import models
8
+
9
+
10
+ class BaseDatafeed(abc.ABC):
11
+ """
12
+ Base class for all datafeeds.
13
+ """
14
+
15
+ def __init__(self, event_bus: messaging.EventBus):
16
+ """
17
+ Initialize the datafeed with an event bus.
18
+
19
+ Args:
20
+ event_bus (messaging.EventBus): Event bus to publish market data events to.
21
+ """
22
+ self.event_bus: messaging.EventBus = event_bus
23
+
24
+ @abc.abstractmethod
25
+ def connect(self):
26
+ """
27
+ Connect to the datafeed.
28
+ """
29
+ pass
30
+
31
+ @abc.abstractmethod
32
+ def watch(self, symbols: list[tuple[str, models.RecordType]]):
33
+ """
34
+ Start watching symbols.
35
+
36
+ Args:
37
+ symbols (list[tuple[str, models.TimeFrame]]): List of symbols to watch with
38
+ their respective timeframes.
39
+ """
40
+ pass
41
+
42
+ @abc.abstractmethod
43
+ def unwatch(self, symbols: list[tuple[str, models.RecordType]]):
44
+ """
45
+ Stop watching symbols.
46
+
47
+ Args:
48
+ symbols (list[tuple[str, models.TimeFrame]]): List of symbols to stop
49
+ watching with their respective timeframes.
50
+ """
51
+ pass
52
+
53
+ @abc.abstractmethod
54
+ def disconnect(self):
55
+ """
56
+ Disconnect from the datafeed.
57
+ """
58
+ pass
@@ -0,0 +1,297 @@
1
+ """
2
+ This module provides a CSV-based simulated live datafeed.
3
+ """
4
+
5
+ import os
6
+ import pandas as pd
7
+ import threading
8
+ import time
9
+ from pathlib import Path
10
+ from dotenv import load_dotenv
11
+ from onesecondtrader.messaging import events, eventbus
12
+ from onesecondtrader.core import models
13
+ from onesecondtrader.monitoring import console
14
+ from onesecondtrader.datafeeds import base_datafeed
15
+ from pandas.io.parsers.readers import TextFileReader
16
+
17
+
18
+ class CSVDatafeed(base_datafeed.BaseDatafeed):
19
+ """
20
+ CSV-based simulated live datafeed.
21
+
22
+ Only one instance of any BaseDatafeed subclass can exist at a time.
23
+ """
24
+
25
+ _instance = None
26
+
27
+ def __init__(
28
+ self,
29
+ event_bus: eventbus.EventBus,
30
+ csv_path: str | Path | None = None,
31
+ streaming_delay: float | None = None,
32
+ ):
33
+ """
34
+ Initialize CSV datafeed.
35
+
36
+ Args:
37
+ event_bus: Event bus used to publish market data events.
38
+ csv_path: Optional path to CSV file. Overrides CSV_PATH env var.
39
+ streaming_delay: Optional delay in seconds between processing rows.
40
+ Overrides CSV_STREAMING_DELAY env var.
41
+
42
+ Attributes:
43
+ self.csv_path (Path | None): Path to CSV file.
44
+ self.data_iterator (TextFileReader | None): Iterator for reading CSV.
45
+ self._watched_symbols (set[tuple[str, models.RecordType]]): Set of
46
+ symbols and record types currently being watched.
47
+ self._streaming_thread (threading.Thread | None): Background thread
48
+ for streaming data.
49
+ self._symbols_lock (threading.Lock): Lock to protect _watched_symbols
50
+ from concurrent access.
51
+ self._streaming_delay (float): Delay in seconds between processing
52
+ CSV rows (from CSV_STREAMING_DELAY env var, set in connect()).
53
+ self._init_csv_path (str | Path | None): CSV path provided during
54
+ initialization.
55
+ self._init_streaming_delay (float | None): Streaming delay provided
56
+ during initialization.
57
+ """
58
+ if CSVDatafeed._instance is not None:
59
+ console.logger.warning(
60
+ f"Only one BaseDatafeed instance allowed. "
61
+ f"Current: {type(CSVDatafeed._instance).__name__}. "
62
+ f"Initialization failed."
63
+ )
64
+ return
65
+
66
+ super().__init__(event_bus)
67
+ CSVDatafeed._instance = self
68
+
69
+ self.csv_path: Path | None = None
70
+ self.data_iterator: TextFileReader | None = None
71
+ self._watched_symbols: set[tuple[str, models.RecordType]] = set()
72
+ self._stop_event = threading.Event()
73
+ self._streaming_thread: threading.Thread | None = None
74
+ self._symbols_lock: threading.Lock = threading.Lock()
75
+ self._streaming_delay: float = 0.0
76
+
77
+ self._init_csv_path: str | Path | None = csv_path
78
+ self._init_streaming_delay: float | None = streaming_delay
79
+
80
+ def connect(self):
81
+ """
82
+ Connect to CSV file specified in .env file (CSV_PATH variable) and
83
+ create data iterator.
84
+ """
85
+ load_dotenv()
86
+
87
+ if self._init_csv_path is not None:
88
+ csv_path_str = str(self._init_csv_path)
89
+ console.logger.info(f"Using CSV path from initialization: {csv_path_str}")
90
+ else:
91
+ csv_path_str = os.getenv("CSV_PATH")
92
+ if not csv_path_str:
93
+ console.logger.error(
94
+ "CSV_PATH not found in environment variables and not "
95
+ "provided in __init__. Either set CSV_PATH in .env file "
96
+ "or pass csv_path to CSVDatafeed()"
97
+ )
98
+ return False
99
+
100
+ if self._init_streaming_delay is not None:
101
+ self._streaming_delay = self._init_streaming_delay
102
+ if self._streaming_delay < 0:
103
+ console.logger.warning(
104
+ f"Streaming delay cannot be negative "
105
+ f"({self._streaming_delay}), using default 0.0"
106
+ )
107
+ self._streaming_delay = 0.0
108
+ else:
109
+ console.logger.info(
110
+ f"CSV streaming delay set from initialization: "
111
+ f"{self._streaming_delay} seconds"
112
+ )
113
+ else:
114
+ streaming_delay_str = os.getenv("CSV_STREAMING_DELAY", "0.0")
115
+ try:
116
+ self._streaming_delay = float(streaming_delay_str)
117
+ if self._streaming_delay < 0:
118
+ console.logger.warning(
119
+ f"CSV_STREAMING_DELAY cannot be negative "
120
+ f"({self._streaming_delay}), using default 0.0"
121
+ )
122
+ self._streaming_delay = 0.0
123
+ else:
124
+ console.logger.info(
125
+ f"CSV streaming delay set from environment: "
126
+ f"{self._streaming_delay} seconds"
127
+ )
128
+ except ValueError:
129
+ console.logger.error(
130
+ f"Invalid CSV_STREAMING_DELAY value "
131
+ f"'{streaming_delay_str}', must be a number. "
132
+ f"Using default 0.0"
133
+ )
134
+ self._streaming_delay = 0.0
135
+
136
+ self.csv_path = Path(csv_path_str)
137
+
138
+ try:
139
+ self.data_iterator = pd.read_csv(
140
+ self.csv_path,
141
+ usecols=[
142
+ "ts_event",
143
+ "rtype",
144
+ "open",
145
+ "high",
146
+ "low",
147
+ "close",
148
+ "volume",
149
+ "symbol",
150
+ ],
151
+ dtype={
152
+ "ts_event": int,
153
+ "rtype": int,
154
+ "open": int,
155
+ "high": int,
156
+ "low": int,
157
+ "close": int,
158
+ "volume": int,
159
+ "symbol": str,
160
+ },
161
+ iterator=True,
162
+ chunksize=1,
163
+ )
164
+ console.logger.info(f"CSV datafeed connected to: {self.csv_path}")
165
+ self._stop_event.clear()
166
+ return True
167
+
168
+ except Exception as e:
169
+ console.logger.error(f"Failed to connect to CSV file {self.csv_path}: {e}")
170
+ return False
171
+
172
+ def watch(self, symbols):
173
+ """
174
+ Start streaming data for specified symbols.
175
+ Can be called multiple times to add more symbols.
176
+
177
+ Args:
178
+ symbols (list[tuple[str, models.RecordType]]): List of symbols to
179
+ watch with their respective record types.
180
+ """
181
+ if not self.data_iterator:
182
+ console.logger.error("Not connected. Call connect() first.")
183
+ return
184
+
185
+ with self._symbols_lock:
186
+ new_symbols = set(symbols) - self._watched_symbols
187
+ already_watched = set(symbols) & self._watched_symbols
188
+
189
+ self._watched_symbols.update(new_symbols)
190
+
191
+ if new_symbols:
192
+ console.logger.info(f"Added new symbols: {new_symbols}")
193
+ if already_watched:
194
+ console.logger.info(f"Already watching: {already_watched}")
195
+ console.logger.info(
196
+ f"Currently watching: {len(self._watched_symbols)} symbols"
197
+ )
198
+
199
+ if self._streaming_thread is None or not self._streaming_thread.is_alive():
200
+ self._streaming_thread = threading.Thread(
201
+ target=self._stream, name="CSVDatafeedStreaming", daemon=True
202
+ )
203
+ self._streaming_thread.start()
204
+ console.logger.info("Started CSV streaming thread")
205
+
206
+ def _stream(self):
207
+ """Internal method that runs in background thread to stream CSV data."""
208
+ console.logger.info("CSV streaming thread started")
209
+
210
+ should_delay = self._streaming_delay > 0
211
+ delay_time = self._streaming_delay
212
+
213
+ while not self._stop_event.is_set():
214
+ try:
215
+ chunk = next(self.data_iterator)
216
+ row = chunk.iloc[0]
217
+
218
+ symbol = row["symbol"]
219
+ rtype = row["rtype"]
220
+
221
+ with self._symbols_lock:
222
+ symbol_key = (symbol, models.RecordType(rtype))
223
+ if symbol_key not in self._watched_symbols:
224
+ continue
225
+
226
+ bar_event = events.Market.IncomingBar(
227
+ ts_event=pd.Timestamp(row["ts_event"], unit="ns", tz="UTC"),
228
+ symbol=symbol,
229
+ bar=models.Bar(
230
+ open=row["open"] / 1e9,
231
+ high=row["high"] / 1e9,
232
+ low=row["low"] / 1e9,
233
+ close=row["close"] / 1e9,
234
+ volume=int(row["volume"]),
235
+ ),
236
+ )
237
+
238
+ self.event_bus.publish(bar_event)
239
+
240
+ if should_delay:
241
+ time.sleep(delay_time)
242
+
243
+ except StopIteration:
244
+ console.logger.info("CSV datafeed reached end of file")
245
+ break
246
+ except ValueError as e:
247
+ console.logger.warning(f"Invalid rtype {row['rtype']} in CSV data: {e}")
248
+ continue
249
+ except Exception as e:
250
+ console.logger.error(f"CSV datafeed error reading data: {e}")
251
+ break
252
+
253
+ console.logger.info("CSV streaming thread stopped")
254
+
255
+ def unwatch(self, symbols):
256
+ """
257
+ Stop watching specific symbols.
258
+
259
+ Args:
260
+ symbols (list[tuple[str, models.RecordType]]): List of symbols to
261
+ stop watching.
262
+ """
263
+ with self._symbols_lock:
264
+ for symbol in symbols:
265
+ self._watched_symbols.discard(symbol)
266
+
267
+ console.logger.info(f"Stopped watching symbols: {symbols}")
268
+ console.logger.info(f"Still watching: {self._watched_symbols}")
269
+
270
+ def disconnect(self):
271
+ """
272
+ Disconnect from CSV datafeed.
273
+ """
274
+ self._stop_event.set()
275
+
276
+ if self._streaming_thread and self._streaming_thread.is_alive():
277
+ console.logger.info("Waiting for streaming thread to stop...")
278
+ self._streaming_thread.join(timeout=5.0)
279
+ if self._streaming_thread.is_alive():
280
+ console.logger.warning("Streaming thread did not stop within timeout")
281
+
282
+ with self._symbols_lock:
283
+ self._watched_symbols.clear()
284
+
285
+ if self.data_iterator is not None:
286
+ try:
287
+ self.data_iterator.close()
288
+ console.logger.info("CSV iterator closed successfully")
289
+ except Exception as e:
290
+ console.logger.warning(f"Error closing CSV iterator: {e}")
291
+ finally:
292
+ self.data_iterator = None
293
+
294
+ self.csv_path = None
295
+ self._streaming_thread = None
296
+
297
+ CSVDatafeed._instance = None
@@ -0,0 +1,8 @@
1
+ from .eventbus import EventBus as EventBus
2
+ from .events import (
3
+ Base as Base,
4
+ Market as Market,
5
+ Request as Request,
6
+ Response as Response,
7
+ System as System,
8
+ )
@@ -1,255 +0,0 @@
1
- """
2
- This module provides the base class for all datafeeds.
3
- """
4
-
5
- import abc
6
- import threading
7
- from onesecondtrader.messaging import eventbus
8
- from onesecondtrader.core import models
9
- from onesecondtrader.monitoring import console
10
-
11
-
12
- class BaseDatafeed(abc.ABC):
13
- """
14
- Base class for all datafeeds.
15
- """
16
-
17
- def __init__(self, event_bus: eventbus.EventBus) -> None:
18
- """
19
- Initialize the datafeed with the provided event bus.
20
-
21
- Args:
22
- event_bus: The event bus to publish events to.
23
-
24
- Attributes:
25
- self.event_bus: The event bus to publish events to.
26
- self._lock: Lock for thread safety.
27
- self._is_connected: Flag indicating if the datafeed is connected.
28
- self._watched_symbols: Set of symbols currently being watched.
29
- """
30
- self.event_bus = event_bus
31
- self._lock = threading.Lock()
32
- self._is_connected = False
33
- self._watched_symbols: set[tuple[str, models.TimeFrame]] = set()
34
-
35
- def connect(self) -> bool:
36
- """
37
- Connect to the datafeed.
38
-
39
- Returns:
40
- bool: True if connection successful, False otherwise.
41
- """
42
- with self._lock:
43
- if self._is_connected:
44
- console.logger.warning(f"{self.__class__.__name__} already connected")
45
- return True
46
-
47
- console.logger.info(f"Connecting to {self.__class__.__name__}...")
48
- try:
49
- success = self._connect()
50
- if success:
51
- self._is_connected = True
52
- console.logger.info(
53
- f"Successfully connected to {self.__class__.__name__}"
54
- )
55
- return True
56
- else:
57
- console.logger.error(
58
- f"Failed to connect to {self.__class__.__name__}"
59
- )
60
- return False
61
- except Exception as e:
62
- console.logger.error(
63
- f"Connection failed for {self.__class__.__name__}: {e}"
64
- )
65
- return False
66
-
67
- @abc.abstractmethod
68
- def _connect(self) -> bool:
69
- """
70
- Implement connection logic for the specific datafeed.
71
-
72
- Returns:
73
- bool: True if connection successful, False otherwise.
74
- """
75
- pass
76
-
77
- def disconnect(self) -> bool:
78
- """
79
- Disconnect from the datafeed.
80
-
81
- Returns:
82
- bool: True if disconnection successful, False otherwise.
83
- """
84
- with self._lock:
85
- if not self._is_connected:
86
- console.logger.warning(
87
- f"{self.__class__.__name__} already disconnected"
88
- )
89
- return True
90
-
91
- console.logger.info(f"Disconnecting from {self.__class__.__name__}...")
92
- try:
93
- success = self._disconnect()
94
- if success:
95
- self._is_connected = False
96
- self._watched_symbols.clear()
97
- console.logger.info(
98
- f"Successfully disconnected from {self.__class__.__name__}"
99
- )
100
- return True
101
- else:
102
- console.logger.error(
103
- f"Failed to disconnect from {self.__class__.__name__}"
104
- )
105
- return False
106
- except Exception as e:
107
- console.logger.error(
108
- f"Disconnection failed for {self.__class__.__name__}: {e}"
109
- )
110
- self._is_connected = False
111
- self._watched_symbols.clear()
112
- return False
113
-
114
- @abc.abstractmethod
115
- def _disconnect(self) -> bool:
116
- """
117
- Implement disconnection logic for the specific datafeed.
118
-
119
- Returns:
120
- bool: True if disconnection successful, False otherwise.
121
- """
122
- pass
123
-
124
- def watch(self, symbols: list[tuple[str, models.TimeFrame]]) -> bool:
125
- """
126
- Start watching market data for the specified symbols and timeframes.
127
-
128
- Args:
129
- symbols: List of (symbol, timeframe) tuples to start watching.
130
-
131
- Returns:
132
- bool: True if watching started successfully, False otherwise.
133
- """
134
- if not symbols:
135
- console.logger.warning("No symbols provided for watching")
136
- return True
137
-
138
- with self._lock:
139
- if not self._is_connected:
140
- console.logger.error("Cannot start watching: datafeed not connected")
141
- return False
142
-
143
- new_symbols = set(symbols) - self._watched_symbols
144
- if not new_symbols:
145
- console.logger.info("All requested symbols are already being watched")
146
- return True
147
-
148
- try:
149
- success = self._watch(list(new_symbols))
150
- if success:
151
- self._watched_symbols.update(new_symbols)
152
- console.logger.info(
153
- f"Successfully started watching {len(new_symbols)} symbols"
154
- )
155
- return True
156
- else:
157
- console.logger.error("Failed to start watching symbols")
158
- return False
159
- except Exception as e:
160
- console.logger.error(f"Exception while starting watching: {e}")
161
- return False
162
-
163
- @abc.abstractmethod
164
- def _watch(self, symbols: list[tuple[str, models.TimeFrame]]) -> bool:
165
- """
166
- Implement watching startup logic for the specific datafeed.
167
-
168
- Args:
169
- symbols: List of (symbol, timeframe) tuples to start watching.
170
- These are guaranteed to be new symbols not already being watched.
171
-
172
- Returns:
173
- bool: True if watching started successfully, False otherwise.
174
- """
175
- pass
176
-
177
- def unwatch(self, symbols: list[tuple[str, models.TimeFrame]]) -> bool:
178
- """
179
- Stop watching market data for the specified symbols and timeframes.
180
-
181
- Args:
182
- symbols: List of (symbol, timeframe) tuples to stop watching.
183
-
184
- Returns:
185
- bool: True if unwatching stopped successfully, False otherwise.
186
- """
187
- if not symbols:
188
- console.logger.warning("No symbols provided for unwatching")
189
- return True
190
-
191
- with self._lock:
192
- if not self._is_connected:
193
- console.logger.warning(
194
- "Datafeed not connected, but removing symbols from tracking"
195
- )
196
- self._watched_symbols.difference_update(symbols)
197
- return True
198
-
199
- symbols_to_stop = set(symbols) & self._watched_symbols
200
- if not symbols_to_stop:
201
- console.logger.info(
202
- "None of the requested symbols are currently being watched"
203
- )
204
- return True
205
-
206
- console.logger.info(f"Unwatching {len(symbols_to_stop)} symbols")
207
- try:
208
- success = self._unwatch(list(symbols_to_stop))
209
- if success:
210
- self._watched_symbols.difference_update(symbols_to_stop)
211
- console.logger.info(
212
- f"Successfully unwatched {len(symbols_to_stop)} symbols"
213
- )
214
- return True
215
- else:
216
- console.logger.error("Failed to unwatch symbols")
217
- return False
218
- except Exception as e:
219
- console.logger.error(f"Exception while unwatching: {e}")
220
- self._watched_symbols.difference_update(symbols_to_stop)
221
- return False
222
-
223
- @abc.abstractmethod
224
- def _unwatch(self, symbols: list[tuple[str, models.TimeFrame]]) -> bool:
225
- """
226
- Implement unwatching logic for the specific datafeed.
227
-
228
- Args:
229
- symbols: List of (symbol, timeframe) tuples to stop watching.
230
- These are guaranteed to be symbols currently being watched.
231
-
232
- Returns:
233
- bool: True if unwatching stopped successfully, False otherwise.
234
- """
235
- pass
236
-
237
- def is_connected(self) -> bool:
238
- """
239
- Check if the datafeed is currently connected.
240
-
241
- Returns:
242
- bool: True if connected, False otherwise.
243
- """
244
- with self._lock:
245
- return self._is_connected
246
-
247
- def get_watched_symbols(self) -> set[tuple[str, models.TimeFrame]]:
248
- """
249
- Get the set of currently watched symbols and timeframes.
250
-
251
- Returns:
252
- set: Set of (symbol, timeframe) tuples currently being watched.
253
- """
254
- with self._lock:
255
- return self._watched_symbols.copy()