onesecondtrader 0.19.0__tar.gz → 0.20.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: onesecondtrader
3
- Version: 0.19.0
3
+ Version: 0.20.0
4
4
  Summary: The Trading Infrastructure Toolkit for Python. Research, simulate, and deploy algorithmic trading strategies — all in one place.
5
5
  License-File: LICENSE
6
6
  Author: Nils P. Kujath
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "onesecondtrader"
3
- version = "0.19.0"
3
+ version = "0.20.0"
4
4
  description = "The Trading Infrastructure Toolkit for Python. Research, simulate, and deploy algorithmic trading strategies — all in one place."
5
5
  authors = [
6
6
  {name = "Nils P. Kujath",email = "63961429+NilsKujath@users.noreply.github.com"}
@@ -5,6 +5,7 @@ Core module containing the backbone of OneSecondTrader's event-driven architectu
5
5
  import abc
6
6
  import dataclasses
7
7
  import enum
8
+ import logging
8
9
  import pandas as pd
9
10
  import queue
10
11
  import threading
@@ -13,6 +14,13 @@ import uuid
13
14
  from collections import defaultdict
14
15
 
15
16
 
17
+ logging.basicConfig(
18
+ level=logging.DEBUG,
19
+ format="%(asctime)s - %(levelname)s - %(threadName)s - %(message)s",
20
+ )
21
+ logger = logging.getLogger("onesecondtrader")
22
+
23
+
16
24
  class Models:
17
25
  """
18
26
  Namespace for all models.
@@ -185,7 +193,9 @@ class BaseConsumer(abc.ABC):
185
193
 
186
194
  def __init__(self) -> None:
187
195
  self._queue: queue.Queue[Events.BaseEvent] = queue.Queue()
188
- self._thread = threading.Thread(target=self._consume, daemon=True)
196
+ self._thread = threading.Thread(
197
+ target=self._consume, name=self.__class__.__name__, daemon=True
198
+ )
189
199
  self._thread.start()
190
200
 
191
201
  @abc.abstractmethod
@@ -0,0 +1,172 @@
1
+ import abc
2
+ import pandas as pd
3
+ import threading
4
+
5
+ from pathlib import Path
6
+ from onesecondtrader.core import Events, Models, event_bus, logger
7
+
8
+
9
+ class DatafeedBase(abc.ABC):
10
+ """
11
+ Base class for all datafeeds.
12
+ """
13
+
14
+ def __init__(self) -> None:
15
+ self._is_connected: bool = False
16
+ self._watched_symbols: set[tuple[str, Models.RecordType]] = set()
17
+ self._lock: threading.Lock = threading.Lock()
18
+
19
+ @abc.abstractmethod
20
+ def watch(self, symbols: list[tuple[str, Models.RecordType]]) -> bool:
21
+ pass
22
+
23
+ @abc.abstractmethod
24
+ def unwatch(self, symbols: list[str]) -> None:
25
+ pass
26
+
27
+
28
+ class SimulatedDatafeedCSV(DatafeedBase):
29
+ """
30
+ CSV-based simulated datafeed for backtesting.
31
+ """
32
+
33
+ csv_path: str | Path = ""
34
+ artificial_delay: float = 0.0
35
+
36
+ def __init__(self) -> None:
37
+ super().__init__()
38
+ self._stop_event = threading.Event()
39
+ self._streaming_thread: threading.Thread | None = None
40
+ self._data_iterator: pd.io.parsers.readers.TextFileReader | None = None
41
+ self._connected_path: str | Path = ""
42
+
43
+ def watch(self, symbols: list[tuple[str, Models.RecordType]]) -> bool:
44
+ with self._lock:
45
+ if not self._is_connected:
46
+ try:
47
+ self._data_iterator = pd.read_csv(
48
+ Path(self.csv_path),
49
+ usecols=[
50
+ "ts_event",
51
+ "rtype",
52
+ "open",
53
+ "high",
54
+ "low",
55
+ "close",
56
+ "volume",
57
+ "symbol",
58
+ ],
59
+ dtype={
60
+ "ts_event": int,
61
+ "rtype": int,
62
+ "open": int,
63
+ "high": int,
64
+ "low": int,
65
+ "close": int,
66
+ "volume": int,
67
+ "symbol": str,
68
+ },
69
+ chunksize=1,
70
+ )
71
+ self._is_connected = True
72
+ self._connected_path = self.csv_path
73
+ logger.info(
74
+ f"{self.__class__.__name__} connected to {self.csv_path}"
75
+ )
76
+ except Exception as e:
77
+ logger.error(f"{self.__class__.__name__} failed to connect: {e}")
78
+ self._data_iterator = None
79
+ self._is_connected = False
80
+ return False
81
+ elif self._connected_path != self.csv_path:
82
+ logger.warning(
83
+ "csv_path changed while connected; unwatch all symbols first"
84
+ )
85
+
86
+ self._watched_symbols.update(symbols)
87
+ formatted = ", ".join(f"{s} ({r.name})" for s, r in symbols)
88
+ logger.info(f"{self.__class__.__name__} watching {formatted}")
89
+
90
+ if not self._streaming_thread or not self._streaming_thread.is_alive():
91
+ self._stop_event.clear()
92
+ self._streaming_thread = threading.Thread(
93
+ target=self._stream, name="CSVDatafeedStreaming", daemon=False
94
+ )
95
+ self._streaming_thread.start()
96
+
97
+ return True
98
+
99
+ def unwatch(self, symbols: list[str]) -> None:
100
+ thread_to_join = None
101
+ with self._lock:
102
+ symbols_set = set(symbols)
103
+ self._watched_symbols.difference_update(
104
+ {
105
+ (symbol, rtype)
106
+ for (symbol, rtype) in self._watched_symbols
107
+ if symbol in symbols_set
108
+ }
109
+ )
110
+ logger.info(f"{self.__class__.__name__} unwatched {', '.join(symbols)}")
111
+ if not self._watched_symbols:
112
+ self._stop_event.set()
113
+ thread_to_join = self._streaming_thread
114
+ self._streaming_thread = None
115
+
116
+ if thread_to_join and thread_to_join.is_alive():
117
+ thread_to_join.join(timeout=5.0)
118
+ if thread_to_join.is_alive():
119
+ logger.warning("Streaming thread did not terminate within timeout")
120
+ else:
121
+ logger.info(f"{self.__class__.__name__} disconnected")
122
+
123
+ def _stream(self) -> None:
124
+ if self._data_iterator is None:
125
+ logger.error("_stream called with no data iterator")
126
+ return
127
+ should_delay = self.artificial_delay > 0
128
+ delay_time = self.artificial_delay
129
+ while not self._stop_event.is_set():
130
+ try:
131
+ chunk = next(self._data_iterator)
132
+ row = chunk.iloc[0]
133
+
134
+ symbol = row["symbol"]
135
+ record_type = Models.RecordType(row["rtype"])
136
+ symbol_key = (symbol, record_type)
137
+
138
+ with self._lock:
139
+ if symbol_key not in self._watched_symbols:
140
+ continue
141
+
142
+ bar_event = Events.IncomingBar(
143
+ ts_event=pd.Timestamp(row["ts_event"], unit="ns", tz="UTC"),
144
+ symbol=symbol,
145
+ record_type=record_type,
146
+ open=row["open"] / 1e9,
147
+ high=row["high"] / 1e9,
148
+ low=row["low"] / 1e9,
149
+ close=row["close"] / 1e9,
150
+ volume=row["volume"],
151
+ )
152
+
153
+ event_bus.publish(bar_event)
154
+
155
+ if should_delay and self._stop_event.wait(delay_time):
156
+ break
157
+ except StopIteration:
158
+ logger.info("CSV datafeed reached end of file")
159
+ break
160
+ except Exception as e:
161
+ logger.error(f"CSV datafeed error reading data: {e}")
162
+ break
163
+
164
+ with self._lock:
165
+ self._data_iterator = None
166
+ self._is_connected = False
167
+
168
+
169
+ simulated_datafeed_csv = SimulatedDatafeedCSV()
170
+ """
171
+ Global instance of SimulatedDatafeedCSV.
172
+ """