Qubx 0.3.0__tar.gz → 0.4.2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of Qubx might be problematic. Click here for more details.
- {qubx-0.3.0 → qubx-0.4.2}/PKG-INFO +1 -1
- {qubx-0.3.0 → qubx-0.4.2}/pyproject.toml +7 -1
- {qubx-0.3.0 → qubx-0.4.2}/src/qubx/backtester/ome.py +1 -1
- qubx-0.4.2/src/qubx/backtester/queue.py +250 -0
- {qubx-0.3.0 → qubx-0.4.2}/src/qubx/backtester/simulator.py +174 -154
- qubx-0.4.2/src/qubx/connectors/ccxt/ccxt_connector.py +676 -0
- {qubx-0.3.0 → qubx-0.4.2}/src/qubx/connectors/ccxt/ccxt_exceptions.py +4 -0
- {qubx-0.3.0 → qubx-0.4.2}/src/qubx/connectors/ccxt/ccxt_trading.py +22 -29
- {qubx-0.3.0 → qubx-0.4.2}/src/qubx/connectors/ccxt/ccxt_utils.py +5 -5
- {qubx-0.3.0 → qubx-0.4.2}/src/qubx/core/account.py +39 -28
- {qubx-0.3.0 → qubx-0.4.2}/src/qubx/core/basics.py +59 -9
- qubx-0.4.2/src/qubx/core/context.py +289 -0
- {qubx-0.3.0 → qubx-0.4.2}/src/qubx/core/exceptions.py +4 -0
- {qubx-0.3.0 → qubx-0.4.2}/src/qubx/core/helpers.py +82 -52
- qubx-0.4.2/src/qubx/core/interfaces.py +755 -0
- {qubx-0.3.0 → qubx-0.4.2}/src/qubx/core/loggers.py +17 -17
- {qubx-0.3.0 → qubx-0.4.2}/src/qubx/core/metrics.py +99 -43
- qubx-0.4.2/src/qubx/core/mixins/__init__.py +5 -0
- qubx-0.4.2/src/qubx/core/mixins/market.py +77 -0
- qubx-0.4.2/src/qubx/core/mixins/processing.py +389 -0
- qubx-0.4.2/src/qubx/core/mixins/subscription.py +78 -0
- qubx-0.4.2/src/qubx/core/mixins/trading.py +73 -0
- qubx-0.4.2/src/qubx/core/mixins/universe.py +140 -0
- {qubx-0.3.0 → qubx-0.4.2}/src/qubx/core/series.pyi +1 -1
- {qubx-0.3.0 → qubx-0.4.2}/src/qubx/data/helpers.py +27 -18
- {qubx-0.3.0 → qubx-0.4.2}/src/qubx/data/readers.py +5 -4
- {qubx-0.3.0 → qubx-0.4.2}/src/qubx/gathering/simplest.py +8 -7
- {qubx-0.3.0 → qubx-0.4.2}/src/qubx/pandaz/utils.py +12 -8
- {qubx-0.3.0 → qubx-0.4.2}/src/qubx/trackers/composite.py +23 -21
- {qubx-0.3.0 → qubx-0.4.2}/src/qubx/trackers/rebalancers.py +9 -9
- {qubx-0.3.0 → qubx-0.4.2}/src/qubx/trackers/riskctrl.py +38 -31
- {qubx-0.3.0 → qubx-0.4.2}/src/qubx/trackers/sizers.py +18 -14
- qubx-0.4.2/src/qubx/utils/collections.py +53 -0
- {qubx-0.3.0 → qubx-0.4.2}/src/qubx/utils/misc.py +36 -1
- {qubx-0.3.0 → qubx-0.4.2}/src/qubx/utils/ntp.py +8 -3
- {qubx-0.3.0 → qubx-0.4.2}/src/qubx/utils/runner.py +77 -15
- qubx-0.4.2/src/qubx/utils/threading.py +14 -0
- qubx-0.3.0/src/qubx/backtester/queue.py +0 -390
- qubx-0.3.0/src/qubx/connectors/ccxt/ccxt_connector.py +0 -319
- qubx-0.3.0/src/qubx/core/context.py +0 -934
- qubx-0.3.0/src/qubx/core/strategy.py +0 -436
- {qubx-0.3.0 → qubx-0.4.2}/README.md +0 -0
- {qubx-0.3.0 → qubx-0.4.2}/build.py +0 -0
- {qubx-0.3.0 → qubx-0.4.2}/src/qubx/__init__.py +0 -0
- {qubx-0.3.0 → qubx-0.4.2}/src/qubx/_nb_magic.py +0 -0
- {qubx-0.3.0 → qubx-0.4.2}/src/qubx/backtester/__init__.py +0 -0
- {qubx-0.3.0 → qubx-0.4.2}/src/qubx/backtester/optimization.py +0 -0
- {qubx-0.3.0 → qubx-0.4.2}/src/qubx/connectors/ccxt/__init__.py +0 -0
- {qubx-0.3.0 → qubx-0.4.2}/src/qubx/connectors/ccxt/ccxt_customizations.py +0 -0
- {qubx-0.3.0 → qubx-0.4.2}/src/qubx/core/__init__.py +0 -0
- {qubx-0.3.0 → qubx-0.4.2}/src/qubx/core/lookups.py +0 -0
- {qubx-0.3.0 → qubx-0.4.2}/src/qubx/core/series.pxd +0 -0
- {qubx-0.3.0 → qubx-0.4.2}/src/qubx/core/series.pyx +0 -0
- {qubx-0.3.0 → qubx-0.4.2}/src/qubx/core/utils.pyi +0 -0
- {qubx-0.3.0 → qubx-0.4.2}/src/qubx/core/utils.pyx +0 -0
- {qubx-0.3.0 → qubx-0.4.2}/src/qubx/data/__init__.py +0 -0
- {qubx-0.3.0 → qubx-0.4.2}/src/qubx/math/__init__.py +0 -0
- {qubx-0.3.0 → qubx-0.4.2}/src/qubx/math/stats.py +0 -0
- {qubx-0.3.0 → qubx-0.4.2}/src/qubx/pandaz/__init__.py +0 -0
- {qubx-0.3.0 → qubx-0.4.2}/src/qubx/pandaz/ta.py +0 -0
- {qubx-0.3.0 → qubx-0.4.2}/src/qubx/ta/__init__.py +0 -0
- {qubx-0.3.0 → qubx-0.4.2}/src/qubx/ta/indicators.pxd +0 -0
- {qubx-0.3.0 → qubx-0.4.2}/src/qubx/ta/indicators.pyi +0 -0
- {qubx-0.3.0 → qubx-0.4.2}/src/qubx/ta/indicators.pyx +0 -0
- {qubx-0.3.0 → qubx-0.4.2}/src/qubx/trackers/__init__.py +0 -0
- {qubx-0.3.0 → qubx-0.4.2}/src/qubx/utils/__init__.py +0 -0
- {qubx-0.3.0 → qubx-0.4.2}/src/qubx/utils/_pyxreloader.py +0 -0
- {qubx-0.3.0 → qubx-0.4.2}/src/qubx/utils/charting/lookinglass.py +0 -0
- {qubx-0.3.0 → qubx-0.4.2}/src/qubx/utils/charting/mpl_helpers.py +0 -0
- {qubx-0.3.0 → qubx-0.4.2}/src/qubx/utils/marketdata/binance.py +0 -0
- {qubx-0.3.0 → qubx-0.4.2}/src/qubx/utils/orderbook.py +0 -0
- {qubx-0.3.0 → qubx-0.4.2}/src/qubx/utils/time.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
[tool.poetry]
|
|
2
2
|
name = "Qubx"
|
|
3
|
-
version = "0.
|
|
3
|
+
version = "0.4.2"
|
|
4
4
|
description = "Qubx - quantitative trading framework"
|
|
5
5
|
authors = ["Dmitry Marienko <dmitry@gmail.com>", "Yuriy Arabskyy <yuriy.arabskyy@gmail.com>"]
|
|
6
6
|
readme = "README.md"
|
|
@@ -62,9 +62,15 @@ build-backend = "poetry.core.masonry.api"
|
|
|
62
62
|
script = "build.py"
|
|
63
63
|
generate-setup-file = false
|
|
64
64
|
|
|
65
|
+
[tool.poetry.scripts]
|
|
66
|
+
qubx = 'qubx.utils.runner:run'
|
|
67
|
+
|
|
65
68
|
[tool.poetry.group.test.dependencies]
|
|
66
69
|
pytest = "^7.1.3"
|
|
67
70
|
pytest-mock = "*"
|
|
68
71
|
|
|
69
72
|
[tool.pytest.ini_options]
|
|
70
73
|
pythonpath = ["src"]
|
|
74
|
+
filterwarnings = [
|
|
75
|
+
"ignore:.*Jupyter is migrating.*:DeprecationWarning",
|
|
76
|
+
]
|
|
@@ -0,0 +1,250 @@
|
|
|
1
|
+
import pandas as pd
|
|
2
|
+
import heapq
|
|
3
|
+
|
|
4
|
+
from dataclasses import dataclass
|
|
5
|
+
from collections import defaultdict
|
|
6
|
+
from typing import Any, Iterator, Iterable
|
|
7
|
+
from concurrent.futures import ProcessPoolExecutor, ThreadPoolExecutor, Future
|
|
8
|
+
|
|
9
|
+
from qubx import logger
|
|
10
|
+
from qubx.core.basics import Instrument, dt_64, BatchEvent
|
|
11
|
+
from qubx.data.readers import DataReader, DataTransformer
|
|
12
|
+
from qubx.utils.misc import Stopwatch
|
|
13
|
+
from qubx.core.exceptions import SimulatorError
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
_SW = Stopwatch()
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class DataLoader:
|
|
20
|
+
_TYPE_MAPPERS = {"agg_trade": "trade", "bar": "ohlc", "ohlcv": "ohlc"}
|
|
21
|
+
|
|
22
|
+
def __init__(
|
|
23
|
+
self,
|
|
24
|
+
transformer: DataTransformer,
|
|
25
|
+
reader: DataReader,
|
|
26
|
+
instrument: Instrument,
|
|
27
|
+
timeframe: str | None,
|
|
28
|
+
warmup_period: str | None = None,
|
|
29
|
+
data_type: str = "ohlc",
|
|
30
|
+
output_type: str | None = None, # transfomer can somtimes map to a different output type
|
|
31
|
+
chunksize: int = 5_000,
|
|
32
|
+
) -> None:
|
|
33
|
+
self._instrument = instrument
|
|
34
|
+
self._spec = f"{instrument.exchange}:{instrument.symbol}"
|
|
35
|
+
self._reader = reader
|
|
36
|
+
self._transformer = transformer
|
|
37
|
+
self._warmup_period = warmup_period
|
|
38
|
+
self._timeframe = timeframe
|
|
39
|
+
self._data_type = data_type
|
|
40
|
+
self._output_type = output_type
|
|
41
|
+
self._first_load = True
|
|
42
|
+
self._chunksize = chunksize
|
|
43
|
+
|
|
44
|
+
def load(self, start: str | pd.Timestamp, end: str | pd.Timestamp) -> Iterator:
|
|
45
|
+
if self._first_load:
|
|
46
|
+
if self._warmup_period:
|
|
47
|
+
start = pd.Timestamp(start) - pd.Timedelta(self._warmup_period)
|
|
48
|
+
self._first_load = False
|
|
49
|
+
|
|
50
|
+
args = dict(
|
|
51
|
+
data_id=self._spec,
|
|
52
|
+
start=start,
|
|
53
|
+
stop=end,
|
|
54
|
+
transform=self._transformer,
|
|
55
|
+
data_type=self._data_type,
|
|
56
|
+
chunksize=self._chunksize,
|
|
57
|
+
)
|
|
58
|
+
|
|
59
|
+
if self._timeframe:
|
|
60
|
+
args["timeframe"] = self._timeframe
|
|
61
|
+
|
|
62
|
+
return self._reader.read(**args) # type: ignore
|
|
63
|
+
|
|
64
|
+
@property
|
|
65
|
+
def instrument(self) -> Instrument:
|
|
66
|
+
return self._instrument
|
|
67
|
+
|
|
68
|
+
@property
|
|
69
|
+
def data_type(self) -> str:
|
|
70
|
+
if self._output_type:
|
|
71
|
+
return self._output_type
|
|
72
|
+
return self._TYPE_MAPPERS.get(self._data_type, self._data_type)
|
|
73
|
+
|
|
74
|
+
def __hash__(self) -> int:
|
|
75
|
+
return hash((self._instrument, self._data_type))
|
|
76
|
+
|
|
77
|
+
def __eq__(self, other: Any) -> bool:
|
|
78
|
+
if not isinstance(other, DataLoader):
|
|
79
|
+
return False
|
|
80
|
+
return self._instrument == other._instrument and self._data_type == other._data_type
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
class SimulatedDataQueue:
|
|
84
|
+
_loaders: dict[Instrument, list[DataLoader]]
|
|
85
|
+
|
|
86
|
+
def __init__(self):
|
|
87
|
+
self._loaders = defaultdict(list)
|
|
88
|
+
self._start = None
|
|
89
|
+
self._stop = None
|
|
90
|
+
self._current_time = None
|
|
91
|
+
self._index_to_loader: dict[int, DataLoader] = {}
|
|
92
|
+
self._loader_to_index = {}
|
|
93
|
+
self._latest_loader_index = -1
|
|
94
|
+
self._removed_loader_indices = set()
|
|
95
|
+
|
|
96
|
+
@property
|
|
97
|
+
def is_running(self) -> bool:
|
|
98
|
+
return self._current_time is not None
|
|
99
|
+
|
|
100
|
+
def __add__(self, loader: DataLoader) -> "SimulatedDataQueue":
|
|
101
|
+
self._latest_loader_index += 1
|
|
102
|
+
new_loader_index = self._latest_loader_index
|
|
103
|
+
self._loaders[loader.instrument].append(loader)
|
|
104
|
+
self._index_to_loader[new_loader_index] = loader
|
|
105
|
+
self._loader_to_index[loader] = new_loader_index
|
|
106
|
+
if self.is_running:
|
|
107
|
+
self._add_chunk_to_heap(new_loader_index)
|
|
108
|
+
return self
|
|
109
|
+
|
|
110
|
+
def __sub__(self, loader: DataLoader) -> "SimulatedDataQueue":
|
|
111
|
+
loader_index = self._loader_to_index[loader]
|
|
112
|
+
self._loaders[loader.instrument].remove(loader)
|
|
113
|
+
del self._index_to_loader[loader_index]
|
|
114
|
+
del self._loader_to_index[loader]
|
|
115
|
+
del self._index_to_chunk_size[loader_index]
|
|
116
|
+
del self._index_to_iterator[loader_index]
|
|
117
|
+
self._removed_loader_indices.add(loader_index)
|
|
118
|
+
return self
|
|
119
|
+
|
|
120
|
+
def get_loader(self, instrument: Instrument, data_type: str) -> DataLoader:
|
|
121
|
+
loaders = self._loaders[instrument]
|
|
122
|
+
for loader in loaders:
|
|
123
|
+
if loader.data_type == data_type:
|
|
124
|
+
return loader
|
|
125
|
+
raise ValueError(f"Loader for {instrument} and {data_type} not found")
|
|
126
|
+
|
|
127
|
+
def create_iterable(self, start: str | pd.Timestamp, stop: str | pd.Timestamp) -> Iterator:
|
|
128
|
+
self._start = start
|
|
129
|
+
self._stop = stop
|
|
130
|
+
self._current_time = None
|
|
131
|
+
return self
|
|
132
|
+
|
|
133
|
+
def __iter__(self) -> Iterator:
|
|
134
|
+
logger.debug("Initializing chunks for each loader")
|
|
135
|
+
assert self._start is not None
|
|
136
|
+
self._current_time = int(pd.Timestamp(self._start).timestamp() * 1e9)
|
|
137
|
+
self._index_to_chunk_size = {}
|
|
138
|
+
self._index_to_iterator = {}
|
|
139
|
+
self._event_heap = []
|
|
140
|
+
for loader_index in self._index_to_loader.keys():
|
|
141
|
+
self._add_chunk_to_heap(loader_index)
|
|
142
|
+
return self
|
|
143
|
+
|
|
144
|
+
@_SW.watch("DataQueue")
|
|
145
|
+
def __next__(self) -> tuple[Instrument, str, Any]:
|
|
146
|
+
if not self._event_heap:
|
|
147
|
+
raise StopIteration
|
|
148
|
+
|
|
149
|
+
loader_index = None
|
|
150
|
+
|
|
151
|
+
# get the next event from the heap
|
|
152
|
+
# if the loader_index is in the removed_loader_indices, skip it (optimization to avoid unnecessary heap operations)
|
|
153
|
+
while self._event_heap and (loader_index is None or loader_index in self._removed_loader_indices):
|
|
154
|
+
dt, loader_index, chunk_index, event = heapq.heappop(self._event_heap)
|
|
155
|
+
|
|
156
|
+
if loader_index is None or loader_index in self._removed_loader_indices:
|
|
157
|
+
raise StopIteration
|
|
158
|
+
|
|
159
|
+
loader = self._index_to_loader[loader_index]
|
|
160
|
+
data_type = loader.data_type
|
|
161
|
+
# TODO: return an additional flag to indicate if the event is historical
|
|
162
|
+
if dt < self._current_time: # type: ignore
|
|
163
|
+
data_type = f"hist_{data_type}"
|
|
164
|
+
else:
|
|
165
|
+
# only update the current time if the event is not historical
|
|
166
|
+
self._current_time = dt
|
|
167
|
+
|
|
168
|
+
chunk_size = self._index_to_chunk_size[loader_index]
|
|
169
|
+
if chunk_index + 1 == chunk_size:
|
|
170
|
+
self._add_chunk_to_heap(loader_index)
|
|
171
|
+
|
|
172
|
+
return loader.instrument, data_type, event
|
|
173
|
+
|
|
174
|
+
@_SW.watch("DataQueue")
|
|
175
|
+
def _add_chunk_to_heap(self, loader_index: int):
|
|
176
|
+
chunk = self._next_chunk(loader_index)
|
|
177
|
+
self._index_to_chunk_size[loader_index] = len(chunk)
|
|
178
|
+
for chunk_index, event in enumerate(chunk):
|
|
179
|
+
dt = event.time # type: ignore
|
|
180
|
+
heapq.heappush(self._event_heap, (dt, loader_index, chunk_index, event))
|
|
181
|
+
|
|
182
|
+
@_SW.watch("DataQueue")
|
|
183
|
+
def _next_chunk(self, index: int) -> list[Any]:
|
|
184
|
+
if index not in self._index_to_iterator:
|
|
185
|
+
self._index_to_iterator[index] = self._index_to_loader[index].load(pd.Timestamp(self._current_time, unit="ns"), self._stop) # type: ignore
|
|
186
|
+
iterator = self._index_to_iterator[index]
|
|
187
|
+
try:
|
|
188
|
+
return next(iterator)
|
|
189
|
+
except StopIteration:
|
|
190
|
+
return []
|
|
191
|
+
|
|
192
|
+
|
|
193
|
+
class EventBatcher:
|
|
194
|
+
_BATCH_SETTINGS = {
|
|
195
|
+
"trade": "1Sec",
|
|
196
|
+
"orderbook": "1Sec",
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
def __init__(self, source_iterator: Iterator | Iterable, passthrough: bool = False, **kwargs):
|
|
200
|
+
self.source_iterator = source_iterator
|
|
201
|
+
self._passthrough = passthrough
|
|
202
|
+
self._batch_settings = {**self._BATCH_SETTINGS, **kwargs}
|
|
203
|
+
self._batch_settings = {k: pd.Timedelta(v) for k, v in self._batch_settings.items()}
|
|
204
|
+
|
|
205
|
+
def __iter__(self) -> Iterator[tuple[Instrument, str, Any]]:
|
|
206
|
+
if self._passthrough:
|
|
207
|
+
_iter = iter(self.source_iterator) if isinstance(self.source_iterator, Iterable) else self.source_iterator
|
|
208
|
+
yield from _iter
|
|
209
|
+
return
|
|
210
|
+
|
|
211
|
+
last_instrument: Instrument = None # type: ignore
|
|
212
|
+
last_data_type: str = None # type: ignore
|
|
213
|
+
buffer = []
|
|
214
|
+
for instrument, data_type, event in self.source_iterator:
|
|
215
|
+
time: dt_64 = event.time # type: ignore
|
|
216
|
+
|
|
217
|
+
if data_type not in self._batch_settings:
|
|
218
|
+
if buffer:
|
|
219
|
+
yield last_instrument, last_data_type, self._batch_event(buffer)
|
|
220
|
+
buffer = []
|
|
221
|
+
yield instrument, data_type, event
|
|
222
|
+
last_instrument, last_data_type = instrument, data_type
|
|
223
|
+
continue
|
|
224
|
+
|
|
225
|
+
if instrument != last_instrument:
|
|
226
|
+
if buffer:
|
|
227
|
+
yield last_instrument, last_data_type, self._batch_event(buffer)
|
|
228
|
+
last_instrument, last_data_type = instrument, data_type
|
|
229
|
+
buffer = [event]
|
|
230
|
+
continue
|
|
231
|
+
|
|
232
|
+
if buffer and data_type != last_data_type:
|
|
233
|
+
yield instrument, last_data_type, buffer
|
|
234
|
+
buffer = [event]
|
|
235
|
+
last_instrument, last_data_type = instrument, data_type
|
|
236
|
+
continue
|
|
237
|
+
|
|
238
|
+
last_instrument, last_data_type = instrument, data_type
|
|
239
|
+
buffer.append(event)
|
|
240
|
+
if pd.Timedelta(time - buffer[0].time) >= self._batch_settings[data_type]:
|
|
241
|
+
yield instrument, data_type, self._batch_event(buffer)
|
|
242
|
+
buffer = []
|
|
243
|
+
last_instrument, last_data_type = None, None # type: ignore
|
|
244
|
+
|
|
245
|
+
if buffer:
|
|
246
|
+
yield last_instrument, last_data_type, self._batch_event(buffer)
|
|
247
|
+
|
|
248
|
+
@staticmethod
|
|
249
|
+
def _batch_event(buffer: list[Any]) -> Any:
|
|
250
|
+
return BatchEvent(buffer[-1].time, buffer) if len(buffer) > 1 else buffer[0]
|