Qubx 0.2.82__cp311-cp311-manylinux_2_35_x86_64.whl → 0.4.0__cp311-cp311-manylinux_2_35_x86_64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of Qubx might be problematic. Click here for more details.
- qubx/backtester/ome.py +1 -1
- qubx/backtester/queue.py +26 -166
- qubx/backtester/simulator.py +120 -118
- qubx/connectors/ccxt/__init__.py +0 -0
- qubx/connectors/ccxt/ccxt_connector.py +326 -0
- qubx/{impl → connectors/ccxt}/ccxt_customizations.py +9 -11
- qubx/connectors/ccxt/ccxt_exceptions.py +5 -0
- qubx/{impl → connectors/ccxt}/ccxt_trading.py +15 -17
- qubx/{impl → connectors/ccxt}/ccxt_utils.py +63 -5
- qubx/core/account.py +39 -28
- qubx/core/basics.py +17 -5
- qubx/core/context.py +211 -851
- qubx/core/helpers.py +100 -54
- qubx/core/interfaces.py +742 -0
- qubx/core/loggers.py +17 -17
- qubx/core/mixins/__init__.py +5 -0
- qubx/core/mixins/market.py +49 -0
- qubx/core/mixins/processing.py +359 -0
- qubx/core/mixins/subscription.py +64 -0
- qubx/core/mixins/trading.py +73 -0
- qubx/core/mixins/universe.py +143 -0
- qubx/core/series.cpython-311-x86_64-linux-gnu.so +0 -0
- qubx/core/series.pxd +12 -0
- qubx/core/series.pyi +18 -3
- qubx/core/series.pyx +22 -1
- qubx/core/utils.cpython-311-x86_64-linux-gnu.so +0 -0
- qubx/data/readers.py +2 -2
- qubx/gathering/simplest.py +8 -7
- qubx/ta/indicators.cpython-311-x86_64-linux-gnu.so +0 -0
- qubx/trackers/__init__.py +1 -1
- qubx/trackers/composite.py +23 -21
- qubx/trackers/rebalancers.py +18 -16
- qubx/trackers/riskctrl.py +38 -31
- qubx/trackers/sizers.py +68 -30
- qubx/utils/collections.py +53 -0
- qubx/utils/misc.py +36 -1
- qubx/utils/orderbook.py +497 -0
- qubx/utils/runner.py +41 -3
- {qubx-0.2.82.dist-info → qubx-0.4.0.dist-info}/METADATA +2 -1
- qubx-0.4.0.dist-info/RECORD +69 -0
- qubx/core/strategy.py +0 -434
- qubx/impl/ccxt_connector.py +0 -311
- qubx-0.2.82.dist-info/RECORD +0 -59
- {qubx-0.2.82.dist-info → qubx-0.4.0.dist-info}/WHEEL +0 -0
qubx/backtester/ome.py
CHANGED
qubx/backtester/queue.py
CHANGED
|
@@ -65,10 +65,6 @@ class DataLoader:
|
|
|
65
65
|
def instrument(self) -> Instrument:
|
|
66
66
|
return self._instrument
|
|
67
67
|
|
|
68
|
-
@property
|
|
69
|
-
def symbol(self) -> str:
|
|
70
|
-
return self._instrument.symbol
|
|
71
|
-
|
|
72
68
|
@property
|
|
73
69
|
def data_type(self) -> str:
|
|
74
70
|
if self._output_type:
|
|
@@ -76,16 +72,16 @@ class DataLoader:
|
|
|
76
72
|
return self._TYPE_MAPPERS.get(self._data_type, self._data_type)
|
|
77
73
|
|
|
78
74
|
def __hash__(self) -> int:
|
|
79
|
-
return hash((self._instrument
|
|
75
|
+
return hash((self._instrument, self._data_type))
|
|
80
76
|
|
|
81
77
|
def __eq__(self, other: Any) -> bool:
|
|
82
78
|
if not isinstance(other, DataLoader):
|
|
83
79
|
return False
|
|
84
|
-
return self._instrument
|
|
80
|
+
return self._instrument == other._instrument and self._data_type == other._data_type
|
|
85
81
|
|
|
86
82
|
|
|
87
83
|
class SimulatedDataQueue:
|
|
88
|
-
_loaders: dict[
|
|
84
|
+
_loaders: dict[Instrument, list[DataLoader]]
|
|
89
85
|
|
|
90
86
|
def __init__(self):
|
|
91
87
|
self._loaders = defaultdict(list)
|
|
@@ -104,7 +100,7 @@ class SimulatedDataQueue:
|
|
|
104
100
|
def __add__(self, loader: DataLoader) -> "SimulatedDataQueue":
|
|
105
101
|
self._latest_loader_index += 1
|
|
106
102
|
new_loader_index = self._latest_loader_index
|
|
107
|
-
self._loaders[loader.
|
|
103
|
+
self._loaders[loader.instrument].append(loader)
|
|
108
104
|
self._index_to_loader[new_loader_index] = loader
|
|
109
105
|
self._loader_to_index[loader] = new_loader_index
|
|
110
106
|
if self.is_running:
|
|
@@ -113,7 +109,7 @@ class SimulatedDataQueue:
|
|
|
113
109
|
|
|
114
110
|
def __sub__(self, loader: DataLoader) -> "SimulatedDataQueue":
|
|
115
111
|
loader_index = self._loader_to_index[loader]
|
|
116
|
-
self._loaders[loader.
|
|
112
|
+
self._loaders[loader.instrument].remove(loader)
|
|
117
113
|
del self._index_to_loader[loader_index]
|
|
118
114
|
del self._loader_to_index[loader]
|
|
119
115
|
del self._index_to_chunk_size[loader_index]
|
|
@@ -121,12 +117,12 @@ class SimulatedDataQueue:
|
|
|
121
117
|
self._removed_loader_indices.add(loader_index)
|
|
122
118
|
return self
|
|
123
119
|
|
|
124
|
-
def get_loader(self,
|
|
125
|
-
loaders = self._loaders[
|
|
120
|
+
def get_loader(self, instrument: Instrument, data_type: str) -> DataLoader:
|
|
121
|
+
loaders = self._loaders[instrument]
|
|
126
122
|
for loader in loaders:
|
|
127
123
|
if loader.data_type == data_type:
|
|
128
124
|
return loader
|
|
129
|
-
raise ValueError(f"Loader for {
|
|
125
|
+
raise ValueError(f"Loader for {instrument} and {data_type} not found")
|
|
130
126
|
|
|
131
127
|
def create_iterable(self, start: str | pd.Timestamp, stop: str | pd.Timestamp) -> Iterator:
|
|
132
128
|
self._start = start
|
|
@@ -146,7 +142,7 @@ class SimulatedDataQueue:
|
|
|
146
142
|
return self
|
|
147
143
|
|
|
148
144
|
@_SW.watch("DataQueue")
|
|
149
|
-
def __next__(self) -> tuple[
|
|
145
|
+
def __next__(self) -> tuple[Instrument, str, Any]:
|
|
150
146
|
if not self._event_heap:
|
|
151
147
|
raise StopIteration
|
|
152
148
|
|
|
@@ -162,6 +158,7 @@ class SimulatedDataQueue:
|
|
|
162
158
|
|
|
163
159
|
loader = self._index_to_loader[loader_index]
|
|
164
160
|
data_type = loader.data_type
|
|
161
|
+
# TODO: return an additional flag to indicate if the event is historical
|
|
165
162
|
if dt < self._current_time: # type: ignore
|
|
166
163
|
data_type = f"hist_{data_type}"
|
|
167
164
|
else:
|
|
@@ -172,7 +169,7 @@ class SimulatedDataQueue:
|
|
|
172
169
|
if chunk_index + 1 == chunk_size:
|
|
173
170
|
self._add_chunk_to_heap(loader_index)
|
|
174
171
|
|
|
175
|
-
return loader.
|
|
172
|
+
return loader.instrument, data_type, event
|
|
176
173
|
|
|
177
174
|
@_SW.watch("DataQueue")
|
|
178
175
|
def _add_chunk_to_heap(self, loader_index: int):
|
|
@@ -193,143 +190,6 @@ class SimulatedDataQueue:
|
|
|
193
190
|
return []
|
|
194
191
|
|
|
195
192
|
|
|
196
|
-
class SimulatedDataQueueWithThreads(SimulatedDataQueue):
|
|
197
|
-
_loaders: dict[str, list[DataLoader]]
|
|
198
|
-
|
|
199
|
-
def __init__(self, workers: int = 4, prefetch_chunk_count: int = 1):
|
|
200
|
-
self._loaders = defaultdict(list)
|
|
201
|
-
self._start = None
|
|
202
|
-
self._stop = None
|
|
203
|
-
self._current_time = None
|
|
204
|
-
self._index_to_loader: dict[int, DataLoader] = {}
|
|
205
|
-
self._index_to_prefetch: dict[int, list[Future]] = defaultdict(list)
|
|
206
|
-
self._index_to_done: dict[int, bool] = defaultdict(bool)
|
|
207
|
-
self._loader_to_index = {}
|
|
208
|
-
self._index_to_chunk_size = {}
|
|
209
|
-
self._index_to_iterator = {}
|
|
210
|
-
self._latest_loader_index = -1
|
|
211
|
-
self._removed_loader_indices = set()
|
|
212
|
-
# TODO: potentially use ProcessPoolExecutor for better performance
|
|
213
|
-
self._pool = ThreadPoolExecutor(max_workers=workers)
|
|
214
|
-
self._prefetch_chunk_count = prefetch_chunk_count
|
|
215
|
-
|
|
216
|
-
@property
|
|
217
|
-
def is_running(self) -> bool:
|
|
218
|
-
return self._current_time is not None
|
|
219
|
-
|
|
220
|
-
def __add__(self, loader: DataLoader) -> "SimulatedDataQueueWithThreads":
|
|
221
|
-
self._latest_loader_index += 1
|
|
222
|
-
new_loader_index = self._latest_loader_index
|
|
223
|
-
self._loaders[loader.symbol].append(loader)
|
|
224
|
-
self._index_to_loader[new_loader_index] = loader
|
|
225
|
-
self._loader_to_index[loader] = new_loader_index
|
|
226
|
-
if self.is_running:
|
|
227
|
-
self._submit_chunk(new_loader_index)
|
|
228
|
-
self._add_chunk_to_heap(new_loader_index)
|
|
229
|
-
return self
|
|
230
|
-
|
|
231
|
-
def __sub__(self, loader: DataLoader) -> "SimulatedDataQueueWithThreads":
|
|
232
|
-
loader_index = self._loader_to_index[loader]
|
|
233
|
-
self._loaders[loader.symbol].remove(loader)
|
|
234
|
-
del self._index_to_loader[loader_index]
|
|
235
|
-
del self._loader_to_index[loader]
|
|
236
|
-
del self._index_to_chunk_size[loader_index]
|
|
237
|
-
del self._index_to_iterator[loader_index]
|
|
238
|
-
del self._index_to_done[loader_index]
|
|
239
|
-
for future in self._index_to_prefetch[loader_index]:
|
|
240
|
-
future.cancel()
|
|
241
|
-
del self._index_to_prefetch[loader_index]
|
|
242
|
-
self._removed_loader_indices.add(loader_index)
|
|
243
|
-
return self
|
|
244
|
-
|
|
245
|
-
def get_loader(self, symbol: str, data_type: str) -> DataLoader:
|
|
246
|
-
loaders = self._loaders[symbol]
|
|
247
|
-
for loader in loaders:
|
|
248
|
-
if loader.data_type == data_type:
|
|
249
|
-
return loader
|
|
250
|
-
raise ValueError(f"Loader for {symbol} and {data_type} not found")
|
|
251
|
-
|
|
252
|
-
def create_iterable(self, start: str | pd.Timestamp, stop: str | pd.Timestamp) -> Iterator:
|
|
253
|
-
self._start = start
|
|
254
|
-
self._stop = stop
|
|
255
|
-
self._current_time = None
|
|
256
|
-
return self
|
|
257
|
-
|
|
258
|
-
def __iter__(self) -> Iterator:
|
|
259
|
-
logger.debug("Initializing chunks for each loader")
|
|
260
|
-
self._current_time = self._start
|
|
261
|
-
self._index_to_chunk_size = {}
|
|
262
|
-
self._index_to_iterator = {}
|
|
263
|
-
self._event_heap = []
|
|
264
|
-
self._submit_chunk_prefetchers()
|
|
265
|
-
for loader_index in self._index_to_loader.keys():
|
|
266
|
-
self._add_chunk_to_heap(loader_index)
|
|
267
|
-
return self
|
|
268
|
-
|
|
269
|
-
@_SW.watch("DataQueue")
|
|
270
|
-
def __next__(self) -> tuple[str, str, Any]:
|
|
271
|
-
self._submit_chunk_prefetchers()
|
|
272
|
-
|
|
273
|
-
if not self._event_heap:
|
|
274
|
-
raise StopIteration
|
|
275
|
-
|
|
276
|
-
loader_index = None
|
|
277
|
-
|
|
278
|
-
# get the next event from the heap
|
|
279
|
-
# if the loader_index is in the removed_loader_indices, skip it (optimization to avoid unnecessary heap operations)
|
|
280
|
-
while self._event_heap and (loader_index is None or loader_index in self._removed_loader_indices):
|
|
281
|
-
dt, loader_index, chunk_index, event = heapq.heappop(self._event_heap)
|
|
282
|
-
|
|
283
|
-
if loader_index is None or loader_index in self._removed_loader_indices:
|
|
284
|
-
raise StopIteration
|
|
285
|
-
|
|
286
|
-
self._current_time = dt
|
|
287
|
-
chunk_size = self._index_to_chunk_size[loader_index]
|
|
288
|
-
if chunk_index + 1 == chunk_size:
|
|
289
|
-
self._add_chunk_to_heap(loader_index)
|
|
290
|
-
|
|
291
|
-
loader = self._index_to_loader[loader_index]
|
|
292
|
-
return loader.symbol, loader.data_type, event
|
|
293
|
-
|
|
294
|
-
@_SW.watch("DataQueue")
|
|
295
|
-
def _add_chunk_to_heap(self, loader_index: int):
|
|
296
|
-
futures = self._index_to_prefetch[loader_index]
|
|
297
|
-
if not futures and not self._index_to_done[loader_index]:
|
|
298
|
-
loader = self._index_to_loader[loader_index]
|
|
299
|
-
logger.error(f"Error state: No submitted tasks for loader {loader.symbol} {loader.data_type}")
|
|
300
|
-
raise SimulatorError("No submitted tasks for loader")
|
|
301
|
-
elif self._index_to_done[loader_index]:
|
|
302
|
-
return
|
|
303
|
-
|
|
304
|
-
# wait for future to finish if needed
|
|
305
|
-
chunk = futures.pop(0).result()
|
|
306
|
-
self._index_to_chunk_size[loader_index] = len(chunk)
|
|
307
|
-
for chunk_index, event in enumerate(chunk):
|
|
308
|
-
dt = event.time # type: ignore
|
|
309
|
-
heapq.heappush(self._event_heap, (dt, loader_index, chunk_index, event))
|
|
310
|
-
|
|
311
|
-
def _next_chunk(self, index: int) -> list[Any]:
|
|
312
|
-
if index not in self._index_to_iterator:
|
|
313
|
-
self._index_to_iterator[index] = self._index_to_loader[index].load(self._current_time, self._stop) # type: ignore
|
|
314
|
-
iterator = self._index_to_iterator[index]
|
|
315
|
-
try:
|
|
316
|
-
return next(iterator)
|
|
317
|
-
except StopIteration:
|
|
318
|
-
return []
|
|
319
|
-
|
|
320
|
-
def _submit_chunk_prefetchers(self):
|
|
321
|
-
for index in self._index_to_loader.keys():
|
|
322
|
-
if len(self._index_to_prefetch[index]) < self._prefetch_chunk_count:
|
|
323
|
-
self._submit_chunk(index)
|
|
324
|
-
|
|
325
|
-
def _submit_chunk(self, loader_index: int) -> None:
|
|
326
|
-
future = self._pool.submit(self._next_chunk, loader_index)
|
|
327
|
-
self._index_to_prefetch[loader_index].append(future)
|
|
328
|
-
|
|
329
|
-
def __del__(self):
|
|
330
|
-
self._pool.shutdown()
|
|
331
|
-
|
|
332
|
-
|
|
333
193
|
class EventBatcher:
|
|
334
194
|
_BATCH_SETTINGS = {
|
|
335
195
|
"trade": "1Sec",
|
|
@@ -342,48 +202,48 @@ class EventBatcher:
|
|
|
342
202
|
self._batch_settings = {**self._BATCH_SETTINGS, **kwargs}
|
|
343
203
|
self._batch_settings = {k: pd.Timedelta(v) for k, v in self._batch_settings.items()}
|
|
344
204
|
|
|
345
|
-
def __iter__(self) -> Iterator[tuple[
|
|
205
|
+
def __iter__(self) -> Iterator[tuple[Instrument, str, Any]]:
|
|
346
206
|
if self._passthrough:
|
|
347
207
|
_iter = iter(self.source_iterator) if isinstance(self.source_iterator, Iterable) else self.source_iterator
|
|
348
208
|
yield from _iter
|
|
349
209
|
return
|
|
350
210
|
|
|
351
|
-
|
|
211
|
+
last_instrument: Instrument = None # type: ignore
|
|
352
212
|
last_data_type: str = None # type: ignore
|
|
353
213
|
buffer = []
|
|
354
|
-
for
|
|
214
|
+
for instrument, data_type, event in self.source_iterator:
|
|
355
215
|
time: dt_64 = event.time # type: ignore
|
|
356
216
|
|
|
357
217
|
if data_type not in self._batch_settings:
|
|
358
218
|
if buffer:
|
|
359
|
-
yield
|
|
219
|
+
yield last_instrument, last_data_type, self._batch_event(buffer)
|
|
360
220
|
buffer = []
|
|
361
|
-
yield
|
|
362
|
-
|
|
221
|
+
yield instrument, data_type, event
|
|
222
|
+
last_instrument, last_data_type = instrument, data_type
|
|
363
223
|
continue
|
|
364
224
|
|
|
365
|
-
if
|
|
225
|
+
if instrument != last_instrument:
|
|
366
226
|
if buffer:
|
|
367
|
-
yield
|
|
368
|
-
|
|
227
|
+
yield last_instrument, last_data_type, self._batch_event(buffer)
|
|
228
|
+
last_instrument, last_data_type = instrument, data_type
|
|
369
229
|
buffer = [event]
|
|
370
230
|
continue
|
|
371
231
|
|
|
372
232
|
if buffer and data_type != last_data_type:
|
|
373
|
-
yield
|
|
233
|
+
yield instrument, last_data_type, buffer
|
|
374
234
|
buffer = [event]
|
|
375
|
-
|
|
235
|
+
last_instrument, last_data_type = instrument, data_type
|
|
376
236
|
continue
|
|
377
237
|
|
|
378
|
-
|
|
238
|
+
last_instrument, last_data_type = instrument, data_type
|
|
379
239
|
buffer.append(event)
|
|
380
240
|
if pd.Timedelta(time - buffer[0].time) >= self._batch_settings[data_type]:
|
|
381
|
-
yield
|
|
241
|
+
yield instrument, data_type, self._batch_event(buffer)
|
|
382
242
|
buffer = []
|
|
383
|
-
|
|
243
|
+
last_instrument, last_data_type = None, None # type: ignore
|
|
384
244
|
|
|
385
245
|
if buffer:
|
|
386
|
-
yield
|
|
246
|
+
yield last_instrument, last_data_type, self._batch_event(buffer)
|
|
387
247
|
|
|
388
248
|
@staticmethod
|
|
389
249
|
def _batch_event(buffer: list[Any]) -> Any:
|