onesecondtrader 0.10.1__py3-none-any.whl → 0.12.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- onesecondtrader/core/models.py +28 -20
- onesecondtrader/datafeeds/base_datafeed.py +18 -215
- onesecondtrader/datafeeds/csv_datafeed.py +297 -0
- onesecondtrader/indicators/__init__.py +0 -0
- onesecondtrader/indicators/base_indicator.py +137 -0
- onesecondtrader/messaging/__init__.py +8 -0
- {onesecondtrader-0.10.1.dist-info → onesecondtrader-0.12.0.dist-info}/METADATA +5 -2
- onesecondtrader-0.12.0.dist-info/RECORD +20 -0
- {onesecondtrader-0.10.1.dist-info → onesecondtrader-0.12.0.dist-info}/WHEEL +1 -1
- onesecondtrader-0.10.1.dist-info/RECORD +0 -17
- {onesecondtrader-0.10.1.dist-info → onesecondtrader-0.12.0.dist-info/licenses}/LICENSE +0 -0
onesecondtrader/core/models.py
CHANGED
|
@@ -32,14 +32,14 @@ class Bar:
|
|
|
32
32
|
high (float): High price
|
|
33
33
|
low (float): Low price
|
|
34
34
|
close (float): Close price
|
|
35
|
-
volume (
|
|
35
|
+
volume (int | None): Volume
|
|
36
36
|
"""
|
|
37
37
|
|
|
38
38
|
open: float
|
|
39
39
|
high: float
|
|
40
40
|
low: float
|
|
41
41
|
close: float
|
|
42
|
-
volume:
|
|
42
|
+
volume: int | None = None
|
|
43
43
|
|
|
44
44
|
|
|
45
45
|
class Side(enum.Enum):
|
|
@@ -133,27 +133,35 @@ class OrderRejectionReason(enum.Enum):
|
|
|
133
133
|
NEGATIVE_QUANTITY = enum.auto()
|
|
134
134
|
|
|
135
135
|
|
|
136
|
-
class
|
|
136
|
+
class RecordType(enum.Enum):
|
|
137
137
|
"""
|
|
138
|
-
Enum for
|
|
138
|
+
Enum for Databento record types.
|
|
139
139
|
|
|
140
140
|
**Attributes:**
|
|
141
141
|
|
|
142
142
|
| Enum | Value | Description |
|
|
143
143
|
|------|-------|-------------|
|
|
144
|
-
| `
|
|
145
|
-
| `
|
|
146
|
-
| `
|
|
147
|
-
| `
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
144
|
+
| `OHLCV_1S` | `32` | 1-second bars |
|
|
145
|
+
| `OHLCV_1M` | `33` | 1-minute bars |
|
|
146
|
+
| `OHLCV_1H` | `34` | 1-hour bars |
|
|
147
|
+
| `OHLCV_1D` | `35` | 1-day bars |
|
|
148
|
+
"""
|
|
149
|
+
|
|
150
|
+
OHLCV_1S = 32
|
|
151
|
+
OHLCV_1M = 33
|
|
152
|
+
OHLCV_1H = 34
|
|
153
|
+
OHLCV_1D = 35
|
|
154
|
+
|
|
155
|
+
@classmethod
|
|
156
|
+
def to_string(cls, rtype: int) -> str:
|
|
157
|
+
match rtype:
|
|
158
|
+
case cls.OHLCV_1S.value:
|
|
159
|
+
return "1-second bars"
|
|
160
|
+
case cls.OHLCV_1M.value:
|
|
161
|
+
return "1-minute bars"
|
|
162
|
+
case cls.OHLCV_1H.value:
|
|
163
|
+
return "1-hour bars"
|
|
164
|
+
case cls.OHLCV_1D.value:
|
|
165
|
+
return "daily bars"
|
|
166
|
+
case _:
|
|
167
|
+
return f"unknown ({rtype})"
|
|
@@ -3,10 +3,8 @@ This module provides the base class for all datafeeds.
|
|
|
3
3
|
"""
|
|
4
4
|
|
|
5
5
|
import abc
|
|
6
|
-
import
|
|
7
|
-
from onesecondtrader.messaging import eventbus
|
|
6
|
+
from onesecondtrader import messaging
|
|
8
7
|
from onesecondtrader.core import models
|
|
9
|
-
from onesecondtrader.monitoring import console
|
|
10
8
|
|
|
11
9
|
|
|
12
10
|
class BaseDatafeed(abc.ABC):
|
|
@@ -14,242 +12,47 @@ class BaseDatafeed(abc.ABC):
|
|
|
14
12
|
Base class for all datafeeds.
|
|
15
13
|
"""
|
|
16
14
|
|
|
17
|
-
def __init__(self, event_bus:
|
|
15
|
+
def __init__(self, event_bus: messaging.EventBus):
|
|
18
16
|
"""
|
|
19
|
-
Initialize the datafeed with
|
|
17
|
+
Initialize the datafeed with an event bus.
|
|
20
18
|
|
|
21
19
|
Args:
|
|
22
|
-
event_bus:
|
|
23
|
-
|
|
24
|
-
Attributes:
|
|
25
|
-
self.event_bus: The event bus to publish events to.
|
|
26
|
-
self._lock: Lock for thread safety.
|
|
27
|
-
self._is_connected: Flag indicating if the datafeed is connected.
|
|
28
|
-
self._watched_symbols: Set of symbols currently being watched.
|
|
29
|
-
"""
|
|
30
|
-
self.event_bus = event_bus
|
|
31
|
-
self._lock = threading.Lock()
|
|
32
|
-
self._is_connected = False
|
|
33
|
-
self._watched_symbols: set[tuple[str, models.TimeFrame]] = set()
|
|
34
|
-
|
|
35
|
-
def connect(self) -> bool:
|
|
36
|
-
"""
|
|
37
|
-
Connect to the datafeed.
|
|
38
|
-
|
|
39
|
-
Returns:
|
|
40
|
-
bool: True if connection successful, False otherwise.
|
|
20
|
+
event_bus (messaging.EventBus): Event bus to publish market data events to.
|
|
41
21
|
"""
|
|
42
|
-
|
|
43
|
-
if self._is_connected:
|
|
44
|
-
console.logger.warning(f"{self.__class__.__name__} already connected")
|
|
45
|
-
return True
|
|
46
|
-
|
|
47
|
-
console.logger.info(f"Connecting to {self.__class__.__name__}...")
|
|
48
|
-
try:
|
|
49
|
-
success = self._connect()
|
|
50
|
-
if success:
|
|
51
|
-
self._is_connected = True
|
|
52
|
-
console.logger.info(
|
|
53
|
-
f"Successfully connected to {self.__class__.__name__}"
|
|
54
|
-
)
|
|
55
|
-
return True
|
|
56
|
-
else:
|
|
57
|
-
console.logger.error(
|
|
58
|
-
f"Failed to connect to {self.__class__.__name__}"
|
|
59
|
-
)
|
|
60
|
-
return False
|
|
61
|
-
except Exception as e:
|
|
62
|
-
console.logger.error(
|
|
63
|
-
f"Connection failed for {self.__class__.__name__}: {e}"
|
|
64
|
-
)
|
|
65
|
-
return False
|
|
22
|
+
self.event_bus: messaging.EventBus = event_bus
|
|
66
23
|
|
|
67
24
|
@abc.abstractmethod
|
|
68
|
-
def
|
|
25
|
+
def connect(self):
|
|
69
26
|
"""
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
Returns:
|
|
73
|
-
bool: True if connection successful, False otherwise.
|
|
27
|
+
Connect to the datafeed.
|
|
74
28
|
"""
|
|
75
29
|
pass
|
|
76
30
|
|
|
77
|
-
def disconnect(self) -> bool:
|
|
78
|
-
"""
|
|
79
|
-
Disconnect from the datafeed.
|
|
80
|
-
|
|
81
|
-
Returns:
|
|
82
|
-
bool: True if disconnection successful, False otherwise.
|
|
83
|
-
"""
|
|
84
|
-
with self._lock:
|
|
85
|
-
if not self._is_connected:
|
|
86
|
-
console.logger.warning(
|
|
87
|
-
f"{self.__class__.__name__} already disconnected"
|
|
88
|
-
)
|
|
89
|
-
return True
|
|
90
|
-
|
|
91
|
-
console.logger.info(f"Disconnecting from {self.__class__.__name__}...")
|
|
92
|
-
try:
|
|
93
|
-
success = self._disconnect()
|
|
94
|
-
if success:
|
|
95
|
-
self._is_connected = False
|
|
96
|
-
self._watched_symbols.clear()
|
|
97
|
-
console.logger.info(
|
|
98
|
-
f"Successfully disconnected from {self.__class__.__name__}"
|
|
99
|
-
)
|
|
100
|
-
return True
|
|
101
|
-
else:
|
|
102
|
-
console.logger.error(
|
|
103
|
-
f"Failed to disconnect from {self.__class__.__name__}"
|
|
104
|
-
)
|
|
105
|
-
return False
|
|
106
|
-
except Exception as e:
|
|
107
|
-
console.logger.error(
|
|
108
|
-
f"Disconnection failed for {self.__class__.__name__}: {e}"
|
|
109
|
-
)
|
|
110
|
-
self._is_connected = False
|
|
111
|
-
self._watched_symbols.clear()
|
|
112
|
-
return False
|
|
113
|
-
|
|
114
31
|
@abc.abstractmethod
|
|
115
|
-
def
|
|
116
|
-
"""
|
|
117
|
-
Implement disconnection logic for the specific datafeed.
|
|
118
|
-
|
|
119
|
-
Returns:
|
|
120
|
-
bool: True if disconnection successful, False otherwise.
|
|
121
|
-
"""
|
|
122
|
-
pass
|
|
123
|
-
|
|
124
|
-
def watch(self, symbols: list[tuple[str, models.TimeFrame]]) -> bool:
|
|
32
|
+
def watch(self, symbols: list[tuple[str, models.RecordType]]):
|
|
125
33
|
"""
|
|
126
|
-
Start watching
|
|
34
|
+
Start watching symbols.
|
|
127
35
|
|
|
128
36
|
Args:
|
|
129
|
-
symbols: List of
|
|
130
|
-
|
|
131
|
-
Returns:
|
|
132
|
-
bool: True if watching started successfully, False otherwise.
|
|
37
|
+
symbols (list[tuple[str, models.TimeFrame]]): List of symbols to watch with
|
|
38
|
+
their respective timeframes.
|
|
133
39
|
"""
|
|
134
|
-
|
|
135
|
-
console.logger.warning("No symbols provided for watching")
|
|
136
|
-
return True
|
|
137
|
-
|
|
138
|
-
with self._lock:
|
|
139
|
-
if not self._is_connected:
|
|
140
|
-
console.logger.error("Cannot start watching: datafeed not connected")
|
|
141
|
-
return False
|
|
142
|
-
|
|
143
|
-
new_symbols = set(symbols) - self._watched_symbols
|
|
144
|
-
if not new_symbols:
|
|
145
|
-
console.logger.info("All requested symbols are already being watched")
|
|
146
|
-
return True
|
|
147
|
-
|
|
148
|
-
try:
|
|
149
|
-
success = self._watch(list(new_symbols))
|
|
150
|
-
if success:
|
|
151
|
-
self._watched_symbols.update(new_symbols)
|
|
152
|
-
console.logger.info(
|
|
153
|
-
f"Successfully started watching {len(new_symbols)} symbols"
|
|
154
|
-
)
|
|
155
|
-
return True
|
|
156
|
-
else:
|
|
157
|
-
console.logger.error("Failed to start watching symbols")
|
|
158
|
-
return False
|
|
159
|
-
except Exception as e:
|
|
160
|
-
console.logger.error(f"Exception while starting watching: {e}")
|
|
161
|
-
return False
|
|
40
|
+
pass
|
|
162
41
|
|
|
163
42
|
@abc.abstractmethod
|
|
164
|
-
def
|
|
43
|
+
def unwatch(self, symbols: list[tuple[str, models.RecordType]]):
|
|
165
44
|
"""
|
|
166
|
-
|
|
45
|
+
Stop watching symbols.
|
|
167
46
|
|
|
168
47
|
Args:
|
|
169
|
-
symbols: List of
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
Returns:
|
|
173
|
-
bool: True if watching started successfully, False otherwise.
|
|
48
|
+
symbols (list[tuple[str, models.TimeFrame]]): List of symbols to stop
|
|
49
|
+
watching with their respective timeframes.
|
|
174
50
|
"""
|
|
175
51
|
pass
|
|
176
52
|
|
|
177
|
-
def unwatch(self, symbols: list[tuple[str, models.TimeFrame]]) -> bool:
|
|
178
|
-
"""
|
|
179
|
-
Stop watching market data for the specified symbols and timeframes.
|
|
180
|
-
|
|
181
|
-
Args:
|
|
182
|
-
symbols: List of (symbol, timeframe) tuples to stop watching.
|
|
183
|
-
|
|
184
|
-
Returns:
|
|
185
|
-
bool: True if unwatching stopped successfully, False otherwise.
|
|
186
|
-
"""
|
|
187
|
-
if not symbols:
|
|
188
|
-
console.logger.warning("No symbols provided for unwatching")
|
|
189
|
-
return True
|
|
190
|
-
|
|
191
|
-
with self._lock:
|
|
192
|
-
if not self._is_connected:
|
|
193
|
-
console.logger.warning(
|
|
194
|
-
"Datafeed not connected, but removing symbols from tracking"
|
|
195
|
-
)
|
|
196
|
-
self._watched_symbols.difference_update(symbols)
|
|
197
|
-
return True
|
|
198
|
-
|
|
199
|
-
symbols_to_stop = set(symbols) & self._watched_symbols
|
|
200
|
-
if not symbols_to_stop:
|
|
201
|
-
console.logger.info(
|
|
202
|
-
"None of the requested symbols are currently being watched"
|
|
203
|
-
)
|
|
204
|
-
return True
|
|
205
|
-
|
|
206
|
-
console.logger.info(f"Unwatching {len(symbols_to_stop)} symbols")
|
|
207
|
-
try:
|
|
208
|
-
success = self._unwatch(list(symbols_to_stop))
|
|
209
|
-
if success:
|
|
210
|
-
self._watched_symbols.difference_update(symbols_to_stop)
|
|
211
|
-
console.logger.info(
|
|
212
|
-
f"Successfully unwatched {len(symbols_to_stop)} symbols"
|
|
213
|
-
)
|
|
214
|
-
return True
|
|
215
|
-
else:
|
|
216
|
-
console.logger.error("Failed to unwatch symbols")
|
|
217
|
-
return False
|
|
218
|
-
except Exception as e:
|
|
219
|
-
console.logger.error(f"Exception while unwatching: {e}")
|
|
220
|
-
self._watched_symbols.difference_update(symbols_to_stop)
|
|
221
|
-
return False
|
|
222
|
-
|
|
223
53
|
@abc.abstractmethod
|
|
224
|
-
def
|
|
54
|
+
def disconnect(self):
|
|
225
55
|
"""
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
Args:
|
|
229
|
-
symbols: List of (symbol, timeframe) tuples to stop watching.
|
|
230
|
-
These are guaranteed to be symbols currently being watched.
|
|
231
|
-
|
|
232
|
-
Returns:
|
|
233
|
-
bool: True if unwatching stopped successfully, False otherwise.
|
|
56
|
+
Disconnect from the datafeed.
|
|
234
57
|
"""
|
|
235
58
|
pass
|
|
236
|
-
|
|
237
|
-
def is_connected(self) -> bool:
|
|
238
|
-
"""
|
|
239
|
-
Check if the datafeed is currently connected.
|
|
240
|
-
|
|
241
|
-
Returns:
|
|
242
|
-
bool: True if connected, False otherwise.
|
|
243
|
-
"""
|
|
244
|
-
with self._lock:
|
|
245
|
-
return self._is_connected
|
|
246
|
-
|
|
247
|
-
def get_watched_symbols(self) -> set[tuple[str, models.TimeFrame]]:
|
|
248
|
-
"""
|
|
249
|
-
Get the set of currently watched symbols and timeframes.
|
|
250
|
-
|
|
251
|
-
Returns:
|
|
252
|
-
set: Set of (symbol, timeframe) tuples currently being watched.
|
|
253
|
-
"""
|
|
254
|
-
with self._lock:
|
|
255
|
-
return self._watched_symbols.copy()
|
|
@@ -0,0 +1,297 @@
|
|
|
1
|
+
"""
|
|
2
|
+
This module provides a CSV-based simulated live datafeed.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import os
|
|
6
|
+
import pandas as pd
|
|
7
|
+
import threading
|
|
8
|
+
import time
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
from dotenv import load_dotenv
|
|
11
|
+
from onesecondtrader.messaging import events, eventbus
|
|
12
|
+
from onesecondtrader.core import models
|
|
13
|
+
from onesecondtrader.monitoring import console
|
|
14
|
+
from onesecondtrader.datafeeds import base_datafeed
|
|
15
|
+
from pandas.io.parsers.readers import TextFileReader
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class CSVDatafeed(base_datafeed.BaseDatafeed):
|
|
19
|
+
"""
|
|
20
|
+
CSV-based simulated live datafeed.
|
|
21
|
+
|
|
22
|
+
Only one instance of any BaseDatafeed subclass can exist at a time.
|
|
23
|
+
"""
|
|
24
|
+
|
|
25
|
+
_instance = None
|
|
26
|
+
|
|
27
|
+
def __init__(
|
|
28
|
+
self,
|
|
29
|
+
event_bus: eventbus.EventBus,
|
|
30
|
+
csv_path: str | Path | None = None,
|
|
31
|
+
streaming_delay: float | None = None,
|
|
32
|
+
):
|
|
33
|
+
"""
|
|
34
|
+
Initialize CSV datafeed.
|
|
35
|
+
|
|
36
|
+
Args:
|
|
37
|
+
event_bus: Event bus used to publish market data events.
|
|
38
|
+
csv_path: Optional path to CSV file. Overrides CSV_PATH env var.
|
|
39
|
+
streaming_delay: Optional delay in seconds between processing rows.
|
|
40
|
+
Overrides CSV_STREAMING_DELAY env var.
|
|
41
|
+
|
|
42
|
+
Attributes:
|
|
43
|
+
self.csv_path (Path | None): Path to CSV file.
|
|
44
|
+
self.data_iterator (TextFileReader | None): Iterator for reading CSV.
|
|
45
|
+
self._watched_symbols (set[tuple[str, models.RecordType]]): Set of
|
|
46
|
+
symbols and record types currently being watched.
|
|
47
|
+
self._streaming_thread (threading.Thread | None): Background thread
|
|
48
|
+
for streaming data.
|
|
49
|
+
self._symbols_lock (threading.Lock): Lock to protect _watched_symbols
|
|
50
|
+
from concurrent access.
|
|
51
|
+
self._streaming_delay (float): Delay in seconds between processing
|
|
52
|
+
CSV rows (from CSV_STREAMING_DELAY env var, set in connect()).
|
|
53
|
+
self._init_csv_path (str | Path | None): CSV path provided during
|
|
54
|
+
initialization.
|
|
55
|
+
self._init_streaming_delay (float | None): Streaming delay provided
|
|
56
|
+
during initialization.
|
|
57
|
+
"""
|
|
58
|
+
if CSVDatafeed._instance is not None:
|
|
59
|
+
console.logger.warning(
|
|
60
|
+
f"Only one BaseDatafeed instance allowed. "
|
|
61
|
+
f"Current: {type(CSVDatafeed._instance).__name__}. "
|
|
62
|
+
f"Initialization failed."
|
|
63
|
+
)
|
|
64
|
+
return
|
|
65
|
+
|
|
66
|
+
super().__init__(event_bus)
|
|
67
|
+
CSVDatafeed._instance = self
|
|
68
|
+
|
|
69
|
+
self.csv_path: Path | None = None
|
|
70
|
+
self.data_iterator: TextFileReader | None = None
|
|
71
|
+
self._watched_symbols: set[tuple[str, models.RecordType]] = set()
|
|
72
|
+
self._stop_event = threading.Event()
|
|
73
|
+
self._streaming_thread: threading.Thread | None = None
|
|
74
|
+
self._symbols_lock: threading.Lock = threading.Lock()
|
|
75
|
+
self._streaming_delay: float = 0.0
|
|
76
|
+
|
|
77
|
+
self._init_csv_path: str | Path | None = csv_path
|
|
78
|
+
self._init_streaming_delay: float | None = streaming_delay
|
|
79
|
+
|
|
80
|
+
def connect(self):
|
|
81
|
+
"""
|
|
82
|
+
Connect to CSV file specified in .env file (CSV_PATH variable) and
|
|
83
|
+
create data iterator.
|
|
84
|
+
"""
|
|
85
|
+
load_dotenv()
|
|
86
|
+
|
|
87
|
+
if self._init_csv_path is not None:
|
|
88
|
+
csv_path_str = str(self._init_csv_path)
|
|
89
|
+
console.logger.info(f"Using CSV path from initialization: {csv_path_str}")
|
|
90
|
+
else:
|
|
91
|
+
csv_path_str = os.getenv("CSV_PATH")
|
|
92
|
+
if not csv_path_str:
|
|
93
|
+
console.logger.error(
|
|
94
|
+
"CSV_PATH not found in environment variables and not "
|
|
95
|
+
"provided in __init__. Either set CSV_PATH in .env file "
|
|
96
|
+
"or pass csv_path to CSVDatafeed()"
|
|
97
|
+
)
|
|
98
|
+
return False
|
|
99
|
+
|
|
100
|
+
if self._init_streaming_delay is not None:
|
|
101
|
+
self._streaming_delay = self._init_streaming_delay
|
|
102
|
+
if self._streaming_delay < 0:
|
|
103
|
+
console.logger.warning(
|
|
104
|
+
f"Streaming delay cannot be negative "
|
|
105
|
+
f"({self._streaming_delay}), using default 0.0"
|
|
106
|
+
)
|
|
107
|
+
self._streaming_delay = 0.0
|
|
108
|
+
else:
|
|
109
|
+
console.logger.info(
|
|
110
|
+
f"CSV streaming delay set from initialization: "
|
|
111
|
+
f"{self._streaming_delay} seconds"
|
|
112
|
+
)
|
|
113
|
+
else:
|
|
114
|
+
streaming_delay_str = os.getenv("CSV_STREAMING_DELAY", "0.0")
|
|
115
|
+
try:
|
|
116
|
+
self._streaming_delay = float(streaming_delay_str)
|
|
117
|
+
if self._streaming_delay < 0:
|
|
118
|
+
console.logger.warning(
|
|
119
|
+
f"CSV_STREAMING_DELAY cannot be negative "
|
|
120
|
+
f"({self._streaming_delay}), using default 0.0"
|
|
121
|
+
)
|
|
122
|
+
self._streaming_delay = 0.0
|
|
123
|
+
else:
|
|
124
|
+
console.logger.info(
|
|
125
|
+
f"CSV streaming delay set from environment: "
|
|
126
|
+
f"{self._streaming_delay} seconds"
|
|
127
|
+
)
|
|
128
|
+
except ValueError:
|
|
129
|
+
console.logger.error(
|
|
130
|
+
f"Invalid CSV_STREAMING_DELAY value "
|
|
131
|
+
f"'{streaming_delay_str}', must be a number. "
|
|
132
|
+
f"Using default 0.0"
|
|
133
|
+
)
|
|
134
|
+
self._streaming_delay = 0.0
|
|
135
|
+
|
|
136
|
+
self.csv_path = Path(csv_path_str)
|
|
137
|
+
|
|
138
|
+
try:
|
|
139
|
+
self.data_iterator = pd.read_csv(
|
|
140
|
+
self.csv_path,
|
|
141
|
+
usecols=[
|
|
142
|
+
"ts_event",
|
|
143
|
+
"rtype",
|
|
144
|
+
"open",
|
|
145
|
+
"high",
|
|
146
|
+
"low",
|
|
147
|
+
"close",
|
|
148
|
+
"volume",
|
|
149
|
+
"symbol",
|
|
150
|
+
],
|
|
151
|
+
dtype={
|
|
152
|
+
"ts_event": int,
|
|
153
|
+
"rtype": int,
|
|
154
|
+
"open": int,
|
|
155
|
+
"high": int,
|
|
156
|
+
"low": int,
|
|
157
|
+
"close": int,
|
|
158
|
+
"volume": int,
|
|
159
|
+
"symbol": str,
|
|
160
|
+
},
|
|
161
|
+
iterator=True,
|
|
162
|
+
chunksize=1,
|
|
163
|
+
)
|
|
164
|
+
console.logger.info(f"CSV datafeed connected to: {self.csv_path}")
|
|
165
|
+
self._stop_event.clear()
|
|
166
|
+
return True
|
|
167
|
+
|
|
168
|
+
except Exception as e:
|
|
169
|
+
console.logger.error(f"Failed to connect to CSV file {self.csv_path}: {e}")
|
|
170
|
+
return False
|
|
171
|
+
|
|
172
|
+
def watch(self, symbols):
|
|
173
|
+
"""
|
|
174
|
+
Start streaming data for specified symbols.
|
|
175
|
+
Can be called multiple times to add more symbols.
|
|
176
|
+
|
|
177
|
+
Args:
|
|
178
|
+
symbols (list[tuple[str, models.RecordType]]): List of symbols to
|
|
179
|
+
watch with their respective record types.
|
|
180
|
+
"""
|
|
181
|
+
if not self.data_iterator:
|
|
182
|
+
console.logger.error("Not connected. Call connect() first.")
|
|
183
|
+
return
|
|
184
|
+
|
|
185
|
+
with self._symbols_lock:
|
|
186
|
+
new_symbols = set(symbols) - self._watched_symbols
|
|
187
|
+
already_watched = set(symbols) & self._watched_symbols
|
|
188
|
+
|
|
189
|
+
self._watched_symbols.update(new_symbols)
|
|
190
|
+
|
|
191
|
+
if new_symbols:
|
|
192
|
+
console.logger.info(f"Added new symbols: {new_symbols}")
|
|
193
|
+
if already_watched:
|
|
194
|
+
console.logger.info(f"Already watching: {already_watched}")
|
|
195
|
+
console.logger.info(
|
|
196
|
+
f"Currently watching: {len(self._watched_symbols)} symbols"
|
|
197
|
+
)
|
|
198
|
+
|
|
199
|
+
if self._streaming_thread is None or not self._streaming_thread.is_alive():
|
|
200
|
+
self._streaming_thread = threading.Thread(
|
|
201
|
+
target=self._stream, name="CSVDatafeedStreaming", daemon=True
|
|
202
|
+
)
|
|
203
|
+
self._streaming_thread.start()
|
|
204
|
+
console.logger.info("Started CSV streaming thread")
|
|
205
|
+
|
|
206
|
+
def _stream(self):
|
|
207
|
+
"""Internal method that runs in background thread to stream CSV data."""
|
|
208
|
+
console.logger.info("CSV streaming thread started")
|
|
209
|
+
|
|
210
|
+
should_delay = self._streaming_delay > 0
|
|
211
|
+
delay_time = self._streaming_delay
|
|
212
|
+
|
|
213
|
+
while not self._stop_event.is_set():
|
|
214
|
+
try:
|
|
215
|
+
chunk = next(self.data_iterator)
|
|
216
|
+
row = chunk.iloc[0]
|
|
217
|
+
|
|
218
|
+
symbol = row["symbol"]
|
|
219
|
+
rtype = row["rtype"]
|
|
220
|
+
|
|
221
|
+
with self._symbols_lock:
|
|
222
|
+
symbol_key = (symbol, models.RecordType(rtype))
|
|
223
|
+
if symbol_key not in self._watched_symbols:
|
|
224
|
+
continue
|
|
225
|
+
|
|
226
|
+
bar_event = events.Market.IncomingBar(
|
|
227
|
+
ts_event=pd.Timestamp(row["ts_event"], unit="ns", tz="UTC"),
|
|
228
|
+
symbol=symbol,
|
|
229
|
+
bar=models.Bar(
|
|
230
|
+
open=row["open"] / 1e9,
|
|
231
|
+
high=row["high"] / 1e9,
|
|
232
|
+
low=row["low"] / 1e9,
|
|
233
|
+
close=row["close"] / 1e9,
|
|
234
|
+
volume=int(row["volume"]),
|
|
235
|
+
),
|
|
236
|
+
)
|
|
237
|
+
|
|
238
|
+
self.event_bus.publish(bar_event)
|
|
239
|
+
|
|
240
|
+
if should_delay:
|
|
241
|
+
time.sleep(delay_time)
|
|
242
|
+
|
|
243
|
+
except StopIteration:
|
|
244
|
+
console.logger.info("CSV datafeed reached end of file")
|
|
245
|
+
break
|
|
246
|
+
except ValueError as e:
|
|
247
|
+
console.logger.warning(f"Invalid rtype {row['rtype']} in CSV data: {e}")
|
|
248
|
+
continue
|
|
249
|
+
except Exception as e:
|
|
250
|
+
console.logger.error(f"CSV datafeed error reading data: {e}")
|
|
251
|
+
break
|
|
252
|
+
|
|
253
|
+
console.logger.info("CSV streaming thread stopped")
|
|
254
|
+
|
|
255
|
+
def unwatch(self, symbols):
|
|
256
|
+
"""
|
|
257
|
+
Stop watching specific symbols.
|
|
258
|
+
|
|
259
|
+
Args:
|
|
260
|
+
symbols (list[tuple[str, models.RecordType]]): List of symbols to
|
|
261
|
+
stop watching.
|
|
262
|
+
"""
|
|
263
|
+
with self._symbols_lock:
|
|
264
|
+
for symbol in symbols:
|
|
265
|
+
self._watched_symbols.discard(symbol)
|
|
266
|
+
|
|
267
|
+
console.logger.info(f"Stopped watching symbols: {symbols}")
|
|
268
|
+
console.logger.info(f"Still watching: {self._watched_symbols}")
|
|
269
|
+
|
|
270
|
+
def disconnect(self):
|
|
271
|
+
"""
|
|
272
|
+
Disconnect from CSV datafeed.
|
|
273
|
+
"""
|
|
274
|
+
self._stop_event.set()
|
|
275
|
+
|
|
276
|
+
if self._streaming_thread and self._streaming_thread.is_alive():
|
|
277
|
+
console.logger.info("Waiting for streaming thread to stop...")
|
|
278
|
+
self._streaming_thread.join(timeout=5.0)
|
|
279
|
+
if self._streaming_thread.is_alive():
|
|
280
|
+
console.logger.warning("Streaming thread did not stop within timeout")
|
|
281
|
+
|
|
282
|
+
with self._symbols_lock:
|
|
283
|
+
self._watched_symbols.clear()
|
|
284
|
+
|
|
285
|
+
if self.data_iterator is not None:
|
|
286
|
+
try:
|
|
287
|
+
self.data_iterator.close()
|
|
288
|
+
console.logger.info("CSV iterator closed successfully")
|
|
289
|
+
except Exception as e:
|
|
290
|
+
console.logger.warning(f"Error closing CSV iterator: {e}")
|
|
291
|
+
finally:
|
|
292
|
+
self.data_iterator = None
|
|
293
|
+
|
|
294
|
+
self.csv_path = None
|
|
295
|
+
self._streaming_thread = None
|
|
296
|
+
|
|
297
|
+
CSVDatafeed._instance = None
|
|
File without changes
|
|
@@ -0,0 +1,137 @@
|
|
|
1
|
+
"""
|
|
2
|
+
This module provides the base class for all indicators.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import abc
|
|
6
|
+
import collections
|
|
7
|
+
import threading
|
|
8
|
+
|
|
9
|
+
import numpy as np
|
|
10
|
+
from onesecondtrader.core import models
|
|
11
|
+
from onesecondtrader.monitoring import console
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class BaseIndicator(abc.ABC):
|
|
15
|
+
"""
|
|
16
|
+
Base class for all indicators.
|
|
17
|
+
|
|
18
|
+
If new market data is received, the indicator is updated by calling the
|
|
19
|
+
`<indicator_instance>.update(incoming_bar)` method.
|
|
20
|
+
|
|
21
|
+
When programming a new indicator, only the `name` property and the
|
|
22
|
+
`_compute_indicator()` method need to be implemented.
|
|
23
|
+
|
|
24
|
+
Examples:
|
|
25
|
+
>>> from onesecondtrader.indicators import base_indicator
|
|
26
|
+
>>> from onesecondtrader.core import models
|
|
27
|
+
>>> class DummyCloseIndicator(base_indicator.BaseIndicator):
|
|
28
|
+
... @property
|
|
29
|
+
... def name(self) -> str:
|
|
30
|
+
... return "dummy_close_indicator"
|
|
31
|
+
... def _compute_indicator(self, incoming_bar: models.Bar):
|
|
32
|
+
... return incoming_bar.close
|
|
33
|
+
...
|
|
34
|
+
>>> dummy_close_indicator = DummyCloseIndicator(max_history=10)
|
|
35
|
+
>>> incoming_bar = models.Bar(
|
|
36
|
+
... open=100.0, high=101.0, low=99.0, close=100.5, volume=10000
|
|
37
|
+
... )
|
|
38
|
+
>>> dummy_close_indicator.update(incoming_bar)
|
|
39
|
+
>>> dummy_close_indicator[0]
|
|
40
|
+
100.5
|
|
41
|
+
>>> dummy_close_indicator[-1]
|
|
42
|
+
nan
|
|
43
|
+
>>> next_incoming_bar = models.Bar(
|
|
44
|
+
... open=100.0, high=101.0, low=99.0, close=101.0, volume=10000
|
|
45
|
+
... )
|
|
46
|
+
>>> dummy_close_indicator.update(next_incoming_bar)
|
|
47
|
+
>>> dummy_close_indicator[0]
|
|
48
|
+
101.0
|
|
49
|
+
>>> dummy_close_indicator[-1]
|
|
50
|
+
100.5
|
|
51
|
+
"""
|
|
52
|
+
|
|
53
|
+
def __init__(self, max_history: int = 100) -> None:
|
|
54
|
+
"""
|
|
55
|
+
Initialize the indicator with a maximum lookback history length.
|
|
56
|
+
|
|
57
|
+
Args:
|
|
58
|
+
max_history (int): Maximum lookback history length as number of periods.
|
|
59
|
+
Defaults to 100.
|
|
60
|
+
|
|
61
|
+
Attributes:
|
|
62
|
+
self._lock (threading.Lock): Lock to protect concurrent access to the
|
|
63
|
+
indicator's state.
|
|
64
|
+
self._history (collections.deque): Deque to store the lookback history.
|
|
65
|
+
"""
|
|
66
|
+
if max_history < 1:
|
|
67
|
+
console.logger.warning(
|
|
68
|
+
f"max_history must be >= 1, got {max_history}; defaulting to 1"
|
|
69
|
+
)
|
|
70
|
+
max_history = 1
|
|
71
|
+
self._lock: threading.Lock = threading.Lock()
|
|
72
|
+
|
|
73
|
+
self._history: collections.deque[float] = collections.deque(maxlen=max_history)
|
|
74
|
+
|
|
75
|
+
@property
|
|
76
|
+
@abc.abstractmethod
|
|
77
|
+
def name(self) -> str:
|
|
78
|
+
"""
|
|
79
|
+
Name of the indicator.
|
|
80
|
+
This property must be implemented by subclasses.
|
|
81
|
+
|
|
82
|
+
Returns:
|
|
83
|
+
str: Name of the indicator.
|
|
84
|
+
"""
|
|
85
|
+
pass
|
|
86
|
+
|
|
87
|
+
def update(self, incoming_bar: models.Bar) -> None:
|
|
88
|
+
"""
|
|
89
|
+
Updates the indicator based on an incoming closed bar by calling
|
|
90
|
+
`self._compute_indicator()`.
|
|
91
|
+
"""
|
|
92
|
+
new_value = self._compute_indicator(incoming_bar)
|
|
93
|
+
with self._lock:
|
|
94
|
+
self._history.append(new_value)
|
|
95
|
+
|
|
96
|
+
def __getitem__(self, index: int) -> float:
|
|
97
|
+
"""
|
|
98
|
+
Return the indicator value at the given index with tolerant indexing.
|
|
99
|
+
|
|
100
|
+
Indexing rules:
|
|
101
|
+
|
|
102
|
+
- `0` returns the current (most recent) value
|
|
103
|
+
- `-1` returns the previous value, `-2` two periods back, and so on
|
|
104
|
+
- For convenience, a positive `k` behaves like `-k` (e.g., `1 == -1`,
|
|
105
|
+
`2 == -2`)
|
|
106
|
+
- Out-of-range indices return `np.nan` instead of raising an `IndexError`.
|
|
107
|
+
"""
|
|
108
|
+
normalized: int
|
|
109
|
+
if index == 0:
|
|
110
|
+
normalized = -1
|
|
111
|
+
elif index > 0:
|
|
112
|
+
normalized = -(index + 1)
|
|
113
|
+
else:
|
|
114
|
+
normalized = index - 1
|
|
115
|
+
|
|
116
|
+
with self._lock:
|
|
117
|
+
try:
|
|
118
|
+
return self._history[normalized]
|
|
119
|
+
except IndexError:
|
|
120
|
+
return np.nan
|
|
121
|
+
|
|
122
|
+
@property
|
|
123
|
+
def latest(self) -> float:
|
|
124
|
+
"""
|
|
125
|
+
The latest (most recent) indicator value.
|
|
126
|
+
|
|
127
|
+
Equivalent to self[0]. Returns numpy.nan when no value is available yet.
|
|
128
|
+
"""
|
|
129
|
+
return self[0]
|
|
130
|
+
|
|
131
|
+
@abc.abstractmethod
|
|
132
|
+
def _compute_indicator(self, incoming_bar: models.Bar) -> float:
|
|
133
|
+
"""
|
|
134
|
+
Computes the new indicator value based on an incoming closed bar.
|
|
135
|
+
This method must be implemented by subclasses.
|
|
136
|
+
"""
|
|
137
|
+
pass
|
|
@@ -1,7 +1,8 @@
|
|
|
1
|
-
Metadata-Version: 2.
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
2
|
Name: onesecondtrader
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.12.0
|
|
4
4
|
Summary: The Trading Infrastructure Toolkit for Python. Research, simulate, and deploy algorithmic trading strategies — all in one place.
|
|
5
|
+
License-File: LICENSE
|
|
5
6
|
Author: Nils P. Kujath
|
|
6
7
|
Author-email: 63961429+NilsKujath@users.noreply.github.com
|
|
7
8
|
Requires-Python: >=3.11
|
|
@@ -9,7 +10,9 @@ Classifier: Programming Language :: Python :: 3
|
|
|
9
10
|
Classifier: Programming Language :: Python :: 3.11
|
|
10
11
|
Classifier: Programming Language :: Python :: 3.12
|
|
11
12
|
Classifier: Programming Language :: Python :: 3.13
|
|
13
|
+
Classifier: Programming Language :: Python :: 3.14
|
|
12
14
|
Requires-Dist: pandas (>=2.3.1,<3.0.0)
|
|
15
|
+
Requires-Dist: python-dotenv (>=1.0.0,<2.0.0)
|
|
13
16
|
Description-Content-Type: text/markdown
|
|
14
17
|
|
|
15
18
|
# OneSecondTrader
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
onesecondtrader/__init__.py,sha256=TNqlT20sH46-J7F6giBxwWYG1-wFZZt7toDbZeQK6KQ,210
|
|
2
|
+
onesecondtrader/core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
3
|
+
onesecondtrader/core/models.py,sha256=I2NgWfZcvlyBPGUSsocrFrhP3KnLFJ-CFt8Cg4GOd0g,4185
|
|
4
|
+
onesecondtrader/core/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
5
|
+
onesecondtrader/datafeeds/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
6
|
+
onesecondtrader/datafeeds/base_datafeed.py,sha256=QGXtkVUVN7PsDYeIH8h7Hi_kiDDFcqRyW8K8LzZkRTk,1394
|
|
7
|
+
onesecondtrader/datafeeds/csv_datafeed.py,sha256=WMoZpoian_93CdAzo36hJoF15T0ywRADfuFQcfsPQNc,10957
|
|
8
|
+
onesecondtrader/indicators/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
9
|
+
onesecondtrader/indicators/base_indicator.py,sha256=ipN2uLavRAGvl7Z3z9Zxdzb2MydaZOuJXX86ksalnck,4381
|
|
10
|
+
onesecondtrader/messaging/__init__.py,sha256=9GBHlh6pXweknEacXVBvzpdoTJgVyb3ROLqe1Fhz2ww,179
|
|
11
|
+
onesecondtrader/messaging/eventbus.py,sha256=R2K85INeYVwJ1tMOybC3WpRraK0ZKVe8WehCbAzzznU,19359
|
|
12
|
+
onesecondtrader/messaging/events.py,sha256=eaWXQQIUnRNOR-9n5-6lyLbZ6bUtzjD4GI567U_vh4g,23625
|
|
13
|
+
onesecondtrader/monitoring/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
14
|
+
onesecondtrader/monitoring/console.py,sha256=1mrojXkyL4ro7ebkvDMGNQiCL-93WEylRuwnfmEKzVs,299
|
|
15
|
+
onesecondtrader/monitoring/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
16
|
+
onesecondtrader/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
17
|
+
onesecondtrader-0.12.0.dist-info/METADATA,sha256=H7yohZt3jchO0dUtQuOoEWKJ6GKSM-wCv06qlkYgeNM,9638
|
|
18
|
+
onesecondtrader-0.12.0.dist-info/WHEEL,sha256=M5asmiAlL6HEcOq52Yi5mmk9KmTVjY2RDPtO4p9DMrc,88
|
|
19
|
+
onesecondtrader-0.12.0.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
|
|
20
|
+
onesecondtrader-0.12.0.dist-info/RECORD,,
|
|
@@ -1,17 +0,0 @@
|
|
|
1
|
-
onesecondtrader/__init__.py,sha256=TNqlT20sH46-J7F6giBxwWYG1-wFZZt7toDbZeQK6KQ,210
|
|
2
|
-
onesecondtrader/core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
3
|
-
onesecondtrader/core/models.py,sha256=gvspEZp7pISZgpHeaKNQfvNJgwZY7DnS_kWpoGwXC-c,3940
|
|
4
|
-
onesecondtrader/core/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
5
|
-
onesecondtrader/datafeeds/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
6
|
-
onesecondtrader/datafeeds/base_datafeed.py,sha256=cHt8bdA9zON6CJLjz7blgNcs4xu4-iWxM15s3Gu3FO0,8774
|
|
7
|
-
onesecondtrader/messaging/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
8
|
-
onesecondtrader/messaging/eventbus.py,sha256=R2K85INeYVwJ1tMOybC3WpRraK0ZKVe8WehCbAzzznU,19359
|
|
9
|
-
onesecondtrader/messaging/events.py,sha256=eaWXQQIUnRNOR-9n5-6lyLbZ6bUtzjD4GI567U_vh4g,23625
|
|
10
|
-
onesecondtrader/monitoring/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
11
|
-
onesecondtrader/monitoring/console.py,sha256=1mrojXkyL4ro7ebkvDMGNQiCL-93WEylRuwnfmEKzVs,299
|
|
12
|
-
onesecondtrader/monitoring/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
13
|
-
onesecondtrader/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
14
|
-
onesecondtrader-0.10.1.dist-info/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
|
|
15
|
-
onesecondtrader-0.10.1.dist-info/METADATA,sha256=tGtxjkcRkaC39mi4glsawyMVYHmuBDzpsZQwR4-k3nY,9519
|
|
16
|
-
onesecondtrader-0.10.1.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
|
|
17
|
-
onesecondtrader-0.10.1.dist-info/RECORD,,
|
|
File without changes
|