Qubx 0.5.7__cp312-cp312-manylinux_2_39_x86_64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of Qubx might be problematic. Click here for more details.
- qubx/__init__.py +207 -0
- qubx/_nb_magic.py +100 -0
- qubx/backtester/__init__.py +5 -0
- qubx/backtester/account.py +145 -0
- qubx/backtester/broker.py +87 -0
- qubx/backtester/data.py +296 -0
- qubx/backtester/management.py +378 -0
- qubx/backtester/ome.py +296 -0
- qubx/backtester/optimization.py +201 -0
- qubx/backtester/simulated_data.py +558 -0
- qubx/backtester/simulator.py +362 -0
- qubx/backtester/utils.py +780 -0
- qubx/cli/__init__.py +0 -0
- qubx/cli/commands.py +67 -0
- qubx/connectors/ccxt/__init__.py +0 -0
- qubx/connectors/ccxt/account.py +495 -0
- qubx/connectors/ccxt/broker.py +132 -0
- qubx/connectors/ccxt/customizations.py +193 -0
- qubx/connectors/ccxt/data.py +612 -0
- qubx/connectors/ccxt/exceptions.py +17 -0
- qubx/connectors/ccxt/factory.py +93 -0
- qubx/connectors/ccxt/utils.py +307 -0
- qubx/core/__init__.py +0 -0
- qubx/core/account.py +251 -0
- qubx/core/basics.py +850 -0
- qubx/core/context.py +420 -0
- qubx/core/exceptions.py +38 -0
- qubx/core/helpers.py +480 -0
- qubx/core/interfaces.py +1150 -0
- qubx/core/loggers.py +514 -0
- qubx/core/lookups.py +475 -0
- qubx/core/metrics.py +1512 -0
- qubx/core/mixins/__init__.py +13 -0
- qubx/core/mixins/market.py +94 -0
- qubx/core/mixins/processing.py +428 -0
- qubx/core/mixins/subscription.py +203 -0
- qubx/core/mixins/trading.py +88 -0
- qubx/core/mixins/universe.py +270 -0
- qubx/core/series.cpython-312-x86_64-linux-gnu.so +0 -0
- qubx/core/series.pxd +125 -0
- qubx/core/series.pyi +118 -0
- qubx/core/series.pyx +988 -0
- qubx/core/utils.cpython-312-x86_64-linux-gnu.so +0 -0
- qubx/core/utils.pyi +6 -0
- qubx/core/utils.pyx +62 -0
- qubx/data/__init__.py +25 -0
- qubx/data/helpers.py +416 -0
- qubx/data/readers.py +1562 -0
- qubx/data/tardis.py +100 -0
- qubx/gathering/simplest.py +88 -0
- qubx/math/__init__.py +3 -0
- qubx/math/stats.py +129 -0
- qubx/pandaz/__init__.py +23 -0
- qubx/pandaz/ta.py +2757 -0
- qubx/pandaz/utils.py +638 -0
- qubx/resources/instruments/symbols-binance.cm.json +1 -0
- qubx/resources/instruments/symbols-binance.json +1 -0
- qubx/resources/instruments/symbols-binance.um.json +1 -0
- qubx/resources/instruments/symbols-bitfinex.f.json +1 -0
- qubx/resources/instruments/symbols-bitfinex.json +1 -0
- qubx/resources/instruments/symbols-kraken.f.json +1 -0
- qubx/resources/instruments/symbols-kraken.json +1 -0
- qubx/ta/__init__.py +0 -0
- qubx/ta/indicators.cpython-312-x86_64-linux-gnu.so +0 -0
- qubx/ta/indicators.pxd +149 -0
- qubx/ta/indicators.pyi +41 -0
- qubx/ta/indicators.pyx +787 -0
- qubx/trackers/__init__.py +3 -0
- qubx/trackers/abvanced.py +236 -0
- qubx/trackers/composite.py +146 -0
- qubx/trackers/rebalancers.py +129 -0
- qubx/trackers/riskctrl.py +641 -0
- qubx/trackers/sizers.py +235 -0
- qubx/utils/__init__.py +5 -0
- qubx/utils/_pyxreloader.py +281 -0
- qubx/utils/charting/lookinglass.py +1057 -0
- qubx/utils/charting/mpl_helpers.py +1183 -0
- qubx/utils/marketdata/binance.py +284 -0
- qubx/utils/marketdata/ccxt.py +90 -0
- qubx/utils/marketdata/dukas.py +130 -0
- qubx/utils/misc.py +541 -0
- qubx/utils/ntp.py +63 -0
- qubx/utils/numbers_utils.py +7 -0
- qubx/utils/orderbook.py +491 -0
- qubx/utils/plotting/__init__.py +0 -0
- qubx/utils/plotting/dashboard.py +150 -0
- qubx/utils/plotting/data.py +137 -0
- qubx/utils/plotting/interfaces.py +25 -0
- qubx/utils/plotting/renderers/__init__.py +0 -0
- qubx/utils/plotting/renderers/plotly.py +0 -0
- qubx/utils/runner/__init__.py +1 -0
- qubx/utils/runner/_jupyter_runner.pyt +60 -0
- qubx/utils/runner/accounts.py +88 -0
- qubx/utils/runner/configs.py +65 -0
- qubx/utils/runner/runner.py +470 -0
- qubx/utils/time.py +312 -0
- qubx-0.5.7.dist-info/METADATA +105 -0
- qubx-0.5.7.dist-info/RECORD +100 -0
- qubx-0.5.7.dist-info/WHEEL +4 -0
- qubx-0.5.7.dist-info/entry_points.txt +3 -0
qubx/core/helpers.py
ADDED
|
@@ -0,0 +1,480 @@
|
|
|
1
|
+
import re
|
|
2
|
+
import sched
|
|
3
|
+
import sys
|
|
4
|
+
import time
|
|
5
|
+
from collections import defaultdict, deque
|
|
6
|
+
from inspect import isbuiltin, isclass, isfunction, ismethod, ismethoddescriptor
|
|
7
|
+
from threading import Thread
|
|
8
|
+
from typing import Any, Callable, Dict, List
|
|
9
|
+
|
|
10
|
+
import numpy as np
|
|
11
|
+
import pandas as pd
|
|
12
|
+
from croniter import croniter
|
|
13
|
+
|
|
14
|
+
from qubx import logger
|
|
15
|
+
from qubx.core.basics import SW, CtrlChannel, DataType, Instrument, Timestamped
|
|
16
|
+
from qubx.core.series import OHLCV, Bar, OrderBook, Quote, Trade
|
|
17
|
+
from qubx.utils.time import convert_seconds_to_str, convert_tf_str_td64, interval_to_cron
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class CachedMarketDataHolder:
|
|
21
|
+
"""
|
|
22
|
+
Collected cached data updates from StrategyContext
|
|
23
|
+
"""
|
|
24
|
+
|
|
25
|
+
default_timeframe: np.timedelta64
|
|
26
|
+
_last_bar: dict[Instrument, Bar | None]
|
|
27
|
+
_ohlcvs: dict[Instrument, dict[np.timedelta64, OHLCV]]
|
|
28
|
+
_updates: dict[Instrument, Any]
|
|
29
|
+
|
|
30
|
+
_instr_to_sub_to_buffer: Dict[Instrument, Dict[str, deque]]
|
|
31
|
+
|
|
32
|
+
def __init__(self, default_timeframe: str | None = None, max_buffer_size: int = 10_000) -> None:
|
|
33
|
+
self._ohlcvs = dict()
|
|
34
|
+
self._last_bar = defaultdict(lambda: None)
|
|
35
|
+
self._updates = dict()
|
|
36
|
+
self._instr_to_sub_to_buffer = defaultdict(lambda: defaultdict(lambda: deque(maxlen=max_buffer_size)))
|
|
37
|
+
if default_timeframe:
|
|
38
|
+
self.update_default_timeframe(default_timeframe)
|
|
39
|
+
|
|
40
|
+
def update_default_timeframe(self, default_timeframe: str):
|
|
41
|
+
self.default_timeframe = convert_tf_str_td64(default_timeframe)
|
|
42
|
+
|
|
43
|
+
def init_ohlcv(self, instrument: Instrument, max_size=np.inf):
|
|
44
|
+
self._ohlcvs[instrument] = {self.default_timeframe: OHLCV(instrument.symbol, self.default_timeframe, max_size)}
|
|
45
|
+
|
|
46
|
+
def remove(self, instrument: Instrument) -> None:
|
|
47
|
+
self._ohlcvs.pop(instrument, None)
|
|
48
|
+
self._last_bar.pop(instrument, None)
|
|
49
|
+
self._updates.pop(instrument, None)
|
|
50
|
+
self._instr_to_sub_to_buffer.pop(instrument, None)
|
|
51
|
+
|
|
52
|
+
def is_data_ready(self) -> bool:
|
|
53
|
+
"""
|
|
54
|
+
Check if at least one symbol had an update.
|
|
55
|
+
"""
|
|
56
|
+
for v in self._ohlcvs.keys():
|
|
57
|
+
if v in self._updates:
|
|
58
|
+
return True
|
|
59
|
+
return False
|
|
60
|
+
|
|
61
|
+
@SW.watch("CachedMarketDataHolder")
|
|
62
|
+
def get_ohlcv(self, instrument: Instrument, timeframe: str | None = None, max_size: float | int = np.inf) -> OHLCV:
|
|
63
|
+
tf = convert_tf_str_td64(timeframe) if timeframe else self.default_timeframe
|
|
64
|
+
|
|
65
|
+
if instrument not in self._ohlcvs:
|
|
66
|
+
self._ohlcvs[instrument] = {}
|
|
67
|
+
|
|
68
|
+
if tf not in self._ohlcvs[instrument]:
|
|
69
|
+
# - check requested timeframe
|
|
70
|
+
new_ohlc = OHLCV(instrument.symbol, tf, max_size)
|
|
71
|
+
if tf < self.default_timeframe:
|
|
72
|
+
logger.warning(
|
|
73
|
+
f"[{instrument.symbol}] Request for timeframe {timeframe} that is smaller then minimal {self.default_timeframe}"
|
|
74
|
+
)
|
|
75
|
+
else:
|
|
76
|
+
# - first try to resample from smaller frame
|
|
77
|
+
if basis := self._ohlcvs[instrument].get(self.default_timeframe):
|
|
78
|
+
for b in basis[::-1]:
|
|
79
|
+
new_ohlc.update_by_bar(b.time, b.open, b.high, b.low, b.close, b.volume, b.bought_volume)
|
|
80
|
+
|
|
81
|
+
self._ohlcvs[instrument][tf] = new_ohlc
|
|
82
|
+
|
|
83
|
+
return self._ohlcvs[instrument][tf]
|
|
84
|
+
|
|
85
|
+
def get_data(self, instrument: Instrument, event_type: str) -> List[Any]:
|
|
86
|
+
return list(self._instr_to_sub_to_buffer[instrument][event_type])
|
|
87
|
+
|
|
88
|
+
def update(self, instrument: Instrument, event_type: str, data: Any, update_ohlc: bool = False) -> None:
|
|
89
|
+
# - store data in buffer if it's not OHLC
|
|
90
|
+
if event_type != DataType.OHLC:
|
|
91
|
+
self._instr_to_sub_to_buffer[instrument][event_type].append(data)
|
|
92
|
+
|
|
93
|
+
if not update_ohlc:
|
|
94
|
+
return
|
|
95
|
+
|
|
96
|
+
match event_type:
|
|
97
|
+
case DataType.OHLC:
|
|
98
|
+
self.update_by_bar(instrument, data)
|
|
99
|
+
case DataType.QUOTE:
|
|
100
|
+
self.update_by_quote(instrument, data)
|
|
101
|
+
case DataType.TRADE:
|
|
102
|
+
self.update_by_trade(instrument, data)
|
|
103
|
+
case DataType.ORDERBOOK:
|
|
104
|
+
assert isinstance(data, OrderBook)
|
|
105
|
+
self.update_by_quote(instrument, data.to_quote())
|
|
106
|
+
case _:
|
|
107
|
+
pass
|
|
108
|
+
|
|
109
|
+
@SW.watch("CachedMarketDataHolder")
|
|
110
|
+
def update_by_bars(self, instrument: Instrument, timeframe: str | np.timedelta64, bars: List[Bar]) -> OHLCV:
|
|
111
|
+
"""
|
|
112
|
+
Substitute or create new series based on provided historical bars
|
|
113
|
+
"""
|
|
114
|
+
if instrument not in self._ohlcvs:
|
|
115
|
+
self._ohlcvs[instrument] = {}
|
|
116
|
+
|
|
117
|
+
tf = convert_tf_str_td64(timeframe) if isinstance(timeframe, str) else timeframe
|
|
118
|
+
new_ohlc = OHLCV(instrument.symbol, tf)
|
|
119
|
+
for b in bars:
|
|
120
|
+
new_ohlc.update_by_bar(b.time, b.open, b.high, b.low, b.close, b.volume, b.bought_volume)
|
|
121
|
+
self._updates[instrument] = b
|
|
122
|
+
|
|
123
|
+
self._ohlcvs[instrument][tf] = new_ohlc
|
|
124
|
+
return new_ohlc
|
|
125
|
+
|
|
126
|
+
@SW.watch("CachedMarketDataHolder")
|
|
127
|
+
def update_by_bar(self, instrument: Instrument, bar: Bar):
|
|
128
|
+
self._updates[instrument] = bar
|
|
129
|
+
|
|
130
|
+
_last_bar = self._last_bar[instrument]
|
|
131
|
+
v_tot_inc = bar.volume
|
|
132
|
+
v_buy_inc = bar.bought_volume
|
|
133
|
+
|
|
134
|
+
if _last_bar is not None:
|
|
135
|
+
if _last_bar.time == bar.time: # just current bar updated
|
|
136
|
+
v_tot_inc -= _last_bar.volume
|
|
137
|
+
v_buy_inc -= _last_bar.bought_volume
|
|
138
|
+
|
|
139
|
+
if _last_bar.time > bar.time: # update is too late - skip it
|
|
140
|
+
return
|
|
141
|
+
|
|
142
|
+
if instrument in self._ohlcvs:
|
|
143
|
+
self._last_bar[instrument] = bar
|
|
144
|
+
for ser in self._ohlcvs[instrument].values():
|
|
145
|
+
ser.update_by_bar(bar.time, bar.open, bar.high, bar.low, bar.close, v_tot_inc, v_buy_inc)
|
|
146
|
+
|
|
147
|
+
@SW.watch("CachedMarketDataHolder")
|
|
148
|
+
def update_by_quote(self, instrument: Instrument, quote: Quote):
|
|
149
|
+
self._updates[instrument] = quote
|
|
150
|
+
series = self._ohlcvs.get(instrument)
|
|
151
|
+
if series:
|
|
152
|
+
for ser in series.values():
|
|
153
|
+
ser.update(quote.time, quote.mid_price(), 0)
|
|
154
|
+
|
|
155
|
+
@SW.watch("CachedMarketDataHolder")
|
|
156
|
+
def update_by_trade(self, instrument: Instrument, trade: Trade):
|
|
157
|
+
self._updates[instrument] = trade
|
|
158
|
+
series = self._ohlcvs.get(instrument)
|
|
159
|
+
if series:
|
|
160
|
+
total_vol = trade.size
|
|
161
|
+
bought_vol = total_vol if trade.taker >= 1 else 0.0
|
|
162
|
+
for ser in series.values():
|
|
163
|
+
if len(ser) > 0 and ser[0].time > trade.time:
|
|
164
|
+
continue
|
|
165
|
+
ser.update(trade.time, trade.price, total_vol, bought_vol)
|
|
166
|
+
|
|
167
|
+
|
|
168
|
+
SPEC_REGEX = re.compile(
|
|
169
|
+
r"((?P<type>[A-Za-z]+)(\.?(?P<timeframe>[0-9A-Za-z]+))?\ *:)?"
|
|
170
|
+
r"\ *"
|
|
171
|
+
r"((?P<spec>"
|
|
172
|
+
r"(?P<time>((\d+:\d+(:\d+)?)\ *,?\ *)+)?"
|
|
173
|
+
r"((\ *@\ *)(?P<by>([A-Za-z0-9-,\ ]+)))?"
|
|
174
|
+
r"(("
|
|
175
|
+
r"((?P<months>[-+]?\d+)(months|month|bm|mo))?"
|
|
176
|
+
r"((?P<weeks>[-+]?\d+)(weeks|week|w))?"
|
|
177
|
+
r"((?P<days>[-+]?\d+)(days|day|d))?"
|
|
178
|
+
r"((?P<hours>[-+]?\d+)(hours|hour|h))?"
|
|
179
|
+
r"((?P<minutes>[-+]?\d+)(mins|min|m))?"
|
|
180
|
+
r"((?P<seconds>[-+]?\d+)(sec|s))?"
|
|
181
|
+
r")(\ *)?)*"
|
|
182
|
+
r".*"
|
|
183
|
+
r"))?",
|
|
184
|
+
re.IGNORECASE,
|
|
185
|
+
)
|
|
186
|
+
|
|
187
|
+
|
|
188
|
+
def _mk_cron(time: str, by: list | None) -> str:
|
|
189
|
+
HMS = lambda s: list(map(int, s.split(":") if s.count(":") == 2 else [*s.split(":"), 0])) # noqa: E731
|
|
190
|
+
|
|
191
|
+
h, m, s = HMS(time)
|
|
192
|
+
assert h < 24, f"Wrong value for hour {h}"
|
|
193
|
+
assert m < 60, f"Wrong value for minute {m}"
|
|
194
|
+
assert s < 60, f"Wrong value for seconds {s}"
|
|
195
|
+
b = ",".join(by) if by else "*"
|
|
196
|
+
c = f"{m} {h} * * {b}"
|
|
197
|
+
return c if s == 0 else c + f" {s}"
|
|
198
|
+
|
|
199
|
+
|
|
200
|
+
def _make_shift(_b, _w, _d, _h, _m, _s):
|
|
201
|
+
D0 = pd.Timedelta(0)
|
|
202
|
+
AS_TD = lambda d: pd.Timedelta(d) # noqa: E731
|
|
203
|
+
P, N = D0, D0
|
|
204
|
+
|
|
205
|
+
# return AS_TD(f'{_b*4}W') + AS_TD(f'{_w}W') + AS_TD(f'{_d}D') + AS_TD(f'{_h}h') + AS_TD(f'{_m}Min') + AS_TD(f'{_s}Sec')
|
|
206
|
+
for t in [
|
|
207
|
+
AS_TD(f"{_b * 4}W"),
|
|
208
|
+
AS_TD(f"{_w}W"),
|
|
209
|
+
AS_TD(f"{_d}D"),
|
|
210
|
+
AS_TD(f"{_h}h"),
|
|
211
|
+
AS_TD(f"{_m}Min"),
|
|
212
|
+
AS_TD(f"{_s}Sec"),
|
|
213
|
+
]:
|
|
214
|
+
if t > D0:
|
|
215
|
+
P += t
|
|
216
|
+
else:
|
|
217
|
+
N += t
|
|
218
|
+
return P, N
|
|
219
|
+
|
|
220
|
+
|
|
221
|
+
def _parse_schedule_spec(schedule: str) -> dict[str, str]:
|
|
222
|
+
m = SPEC_REGEX.match(schedule)
|
|
223
|
+
return {k: v for k, v in m.groupdict().items() if v} if m else {}
|
|
224
|
+
|
|
225
|
+
|
|
226
|
+
def process_schedule_spec(spec_str: str | None) -> dict[str, Any]:
|
|
227
|
+
AS_INT = lambda d, k: int(d.get(k, 0)) # noqa: E731
|
|
228
|
+
S = lambda s: [x for x in re.split(r"[, ]", s) if x] # noqa: E731
|
|
229
|
+
config = {}
|
|
230
|
+
|
|
231
|
+
if not spec_str:
|
|
232
|
+
return config
|
|
233
|
+
|
|
234
|
+
# - parse schedule spec
|
|
235
|
+
spec = _parse_schedule_spec(spec_str)
|
|
236
|
+
|
|
237
|
+
# - check how to run it
|
|
238
|
+
_T, _S = spec.get("type"), spec.get("spec")
|
|
239
|
+
_F = spec.get("timeframe")
|
|
240
|
+
_t, _by = S(spec.get("time", "")), S(spec.get("by", ""))
|
|
241
|
+
_b, _w, _d = AS_INT(spec, "months"), AS_INT(spec, "weeks"), AS_INT(spec, "days")
|
|
242
|
+
_h, _m, _s = AS_INT(spec, "hours"), AS_INT(spec, "minutes"), AS_INT(spec, "seconds")
|
|
243
|
+
_has_intervals = (_b != 0) or (_w != 0) or (_d != 0) or (_h != 0) or (_m != 0) or (_s != 0)
|
|
244
|
+
_s_pos, _s_neg = _make_shift(_b, _w, _d, _h, _m, _s)
|
|
245
|
+
_shift = _s_pos + _s_neg
|
|
246
|
+
|
|
247
|
+
match _T:
|
|
248
|
+
case "cron":
|
|
249
|
+
if not _S:
|
|
250
|
+
raise ValueError(f"Empty specification for cron: {spec_str}")
|
|
251
|
+
|
|
252
|
+
if not croniter.is_valid(_S):
|
|
253
|
+
_S = interval_to_cron(_S)
|
|
254
|
+
|
|
255
|
+
if not croniter.is_valid(_S):
|
|
256
|
+
raise ValueError(f"Wrong specification for cron: {spec_str}")
|
|
257
|
+
|
|
258
|
+
config = dict(type="cron", schedule=_S, spec=_S)
|
|
259
|
+
|
|
260
|
+
case "time":
|
|
261
|
+
for t in _t:
|
|
262
|
+
config = dict(type="cron", schedule=_mk_cron(t, _by), spec=_S)
|
|
263
|
+
|
|
264
|
+
case None:
|
|
265
|
+
if _t: # - if time specified
|
|
266
|
+
for t in _t:
|
|
267
|
+
config = dict(type="cron", schedule=_mk_cron(t, _by), spec=_S)
|
|
268
|
+
else:
|
|
269
|
+
# - check if it's valid cron
|
|
270
|
+
if _S:
|
|
271
|
+
if croniter.is_valid(_S):
|
|
272
|
+
config = dict(type="cron", schedule=_S, spec=_S)
|
|
273
|
+
else:
|
|
274
|
+
# - try convert to cron
|
|
275
|
+
_S = interval_to_cron(_S)
|
|
276
|
+
if croniter.is_valid(_S):
|
|
277
|
+
config = dict(type="cron", schedule=_S, spec=_S)
|
|
278
|
+
else:
|
|
279
|
+
if _has_intervals:
|
|
280
|
+
_F = (
|
|
281
|
+
convert_seconds_to_str(
|
|
282
|
+
int(_s_pos.as_unit("s").to_timedelta64().item().total_seconds())
|
|
283
|
+
)
|
|
284
|
+
if not _F
|
|
285
|
+
else _F
|
|
286
|
+
)
|
|
287
|
+
config = dict(type="bar", schedule=None, timeframe=_F, delay=_s_neg, spec=_S)
|
|
288
|
+
case _:
|
|
289
|
+
config = dict(type=_T, schedule=None, timeframe=_F, delay=_shift, spec=_S)
|
|
290
|
+
|
|
291
|
+
return config
|
|
292
|
+
|
|
293
|
+
|
|
294
|
+
class BasicScheduler:
|
|
295
|
+
"""
|
|
296
|
+
Basic scheduler functionality. It helps to create scheduled event task
|
|
297
|
+
"""
|
|
298
|
+
|
|
299
|
+
_chan: CtrlChannel
|
|
300
|
+
_scdlr: sched.scheduler
|
|
301
|
+
_ns_time_fun: Callable[[], float]
|
|
302
|
+
_crons: dict[str, croniter]
|
|
303
|
+
_is_started: bool
|
|
304
|
+
_next_nearest_time: np.datetime64
|
|
305
|
+
_next_times: dict[str, float]
|
|
306
|
+
|
|
307
|
+
def __init__(self, channel: CtrlChannel, time_provider_ns: Callable[[], float]):
|
|
308
|
+
self._chan = channel
|
|
309
|
+
self._ns_time_fun = time_provider_ns
|
|
310
|
+
self._scdlr = sched.scheduler(self.time_sec)
|
|
311
|
+
self._crons = dict()
|
|
312
|
+
self._is_started = False
|
|
313
|
+
self._next_nearest_time = np.datetime64(sys.maxsize, "ns")
|
|
314
|
+
self._next_times = dict()
|
|
315
|
+
|
|
316
|
+
def time_sec(self) -> float:
|
|
317
|
+
return self._ns_time_fun() / 1000000000.0
|
|
318
|
+
|
|
319
|
+
def schedule_event(self, cron_schedule: str, event_name: str):
|
|
320
|
+
if not croniter.is_valid(cron_schedule):
|
|
321
|
+
raise ValueError(f"Specified schedule {cron_schedule} for {event_name} doesn't have valid cron format !")
|
|
322
|
+
self._crons[event_name] = croniter(cron_schedule, self.time_sec())
|
|
323
|
+
|
|
324
|
+
if self._is_started:
|
|
325
|
+
self._arm_schedule(event_name, self.time_sec())
|
|
326
|
+
|
|
327
|
+
def next_expected_event_time(self) -> np.datetime64:
|
|
328
|
+
"""
|
|
329
|
+
Returns the next scheduled event time
|
|
330
|
+
"""
|
|
331
|
+
return self._next_nearest_time
|
|
332
|
+
|
|
333
|
+
def get_schedule_for_event(self, event_name: str) -> str | None:
|
|
334
|
+
if event_name in self._crons:
|
|
335
|
+
return " ".join(self._crons[event_name].expressions)
|
|
336
|
+
return None
|
|
337
|
+
|
|
338
|
+
def get_event_last_time(self, event_name: str) -> pd.Timestamp | None:
|
|
339
|
+
if event_name in self._crons:
|
|
340
|
+
_iter = self._crons[event_name]
|
|
341
|
+
_c = _iter.get_current()
|
|
342
|
+
_t = pd.Timestamp(_iter.get_prev(), unit="s")
|
|
343
|
+
_iter.set_current(_c, force=True)
|
|
344
|
+
return _t
|
|
345
|
+
return None
|
|
346
|
+
|
|
347
|
+
def get_event_next_time(self, event_name: str) -> pd.Timestamp | None:
|
|
348
|
+
if event_name in self._crons:
|
|
349
|
+
_iter = self._crons[event_name]
|
|
350
|
+
_t = pd.Timestamp(_iter.get_next(start_time=self.time_sec()), unit="s")
|
|
351
|
+
return _t
|
|
352
|
+
return None
|
|
353
|
+
|
|
354
|
+
def _arm_schedule(self, event: str, start_time: float) -> bool:
|
|
355
|
+
iter = self._crons[event]
|
|
356
|
+
prev_time = iter.get_prev()
|
|
357
|
+
next_time = iter.get_next(start_time=start_time)
|
|
358
|
+
if next_time:
|
|
359
|
+
self._scdlr.enterabs(next_time, 1, self._trigger, (event, prev_time, next_time))
|
|
360
|
+
|
|
361
|
+
# - update next nearest time
|
|
362
|
+
self._next_times[event] = next_time
|
|
363
|
+
self._next_nearest_time = np.datetime64(int(min(self._next_times.values()) * 1000000000), "ns")
|
|
364
|
+
# logger.debug(f" >>> ({event}) task is scheduled at {self._next_nearest_time}")
|
|
365
|
+
|
|
366
|
+
return True
|
|
367
|
+
logger.debug(f"({event}) task is not scheduled")
|
|
368
|
+
return False
|
|
369
|
+
|
|
370
|
+
def _trigger(self, event: str, prev_time_sec: float, trig_time: float):
|
|
371
|
+
now = self.time_sec()
|
|
372
|
+
|
|
373
|
+
# - send notification to channel
|
|
374
|
+
self._chan.send((None, event, (prev_time_sec, trig_time), False))
|
|
375
|
+
|
|
376
|
+
# - try to arm this event again
|
|
377
|
+
self._arm_schedule(event, now)
|
|
378
|
+
|
|
379
|
+
def check_and_run_tasks(self) -> float | None:
|
|
380
|
+
return self._scdlr.run(blocking=False)
|
|
381
|
+
|
|
382
|
+
def run(self):
|
|
383
|
+
if self._is_started:
|
|
384
|
+
logger.warning("Scheduler is already running")
|
|
385
|
+
return
|
|
386
|
+
|
|
387
|
+
_has_tasks = False
|
|
388
|
+
for k in self._crons.keys():
|
|
389
|
+
_has_tasks |= self._arm_schedule(k, self.time_sec())
|
|
390
|
+
|
|
391
|
+
def _watcher():
|
|
392
|
+
while r := self.check_and_run_tasks():
|
|
393
|
+
if not self._chan.control.is_set():
|
|
394
|
+
break
|
|
395
|
+
_delay = max(min(r / 5, 5), 0.1)
|
|
396
|
+
time.sleep(_delay)
|
|
397
|
+
logger.debug("Scheduler is stopped ")
|
|
398
|
+
self._is_started = False
|
|
399
|
+
|
|
400
|
+
if _has_tasks:
|
|
401
|
+
Thread(target=_watcher).start()
|
|
402
|
+
self._is_started = True
|
|
403
|
+
|
|
404
|
+
|
|
405
|
+
def extract_parameters_from_object(strategy: Any) -> dict[str, Any]:
|
|
406
|
+
"""
|
|
407
|
+
Extract default parameters (as defined in class) and their values from object.
|
|
408
|
+
"""
|
|
409
|
+
from qubx.core.interfaces import IStrategyContext
|
|
410
|
+
|
|
411
|
+
_f_dict = {}
|
|
412
|
+
for o in [*strategy.__class__.mro()[::-1], strategy]:
|
|
413
|
+
if hasattr(o, "__dict__"): # only objects have __dict__ attribute
|
|
414
|
+
for k, v in o.__dict__.items():
|
|
415
|
+
if not k.startswith("_") and not (
|
|
416
|
+
# - skip any function, method, built-in, class, method descriptor
|
|
417
|
+
isinstance(v, IStrategyContext) # we don't want to have ctx object
|
|
418
|
+
or isfunction(v)
|
|
419
|
+
or ismethod(v)
|
|
420
|
+
or isbuiltin(v)
|
|
421
|
+
or isclass(v)
|
|
422
|
+
or ismethoddescriptor(v)
|
|
423
|
+
):
|
|
424
|
+
_f_dict[k] = getattr(o, k, v)
|
|
425
|
+
return _f_dict
|
|
426
|
+
|
|
427
|
+
|
|
428
|
+
def set_parameters_to_object(strategy: Any, **kwargs):
|
|
429
|
+
"""
|
|
430
|
+
Set given parameters values to object.
|
|
431
|
+
Parameter can be set only if it's declared as attribute of object and it's not starting with underscore (_).
|
|
432
|
+
"""
|
|
433
|
+
_log_info = ""
|
|
434
|
+
for k, v in kwargs.items():
|
|
435
|
+
if k.startswith("_"):
|
|
436
|
+
raise ValueError("Internal variable can't be set from external parameter !")
|
|
437
|
+
if hasattr(strategy, k):
|
|
438
|
+
strategy.__dict__[k] = v
|
|
439
|
+
v_str = str(v).replace(">", "").replace("<", "")
|
|
440
|
+
_log_info += f"\n\tset <green>{k}</green> <- <red>{v_str}</red>"
|
|
441
|
+
|
|
442
|
+
if _log_info:
|
|
443
|
+
logger.debug(f"<yellow>{strategy.__class__.__name__}</yellow> new parameters:" + _log_info)
|
|
444
|
+
|
|
445
|
+
|
|
446
|
+
def extract_price(update: float | Timestamped) -> float:
|
|
447
|
+
"""Extract the price from various types of market data updates.
|
|
448
|
+
|
|
449
|
+
Args:
|
|
450
|
+
update: The market data update, which can be a float, Quote, Trade, Bar or OrderBook.
|
|
451
|
+
|
|
452
|
+
Returns:
|
|
453
|
+
float: The extracted price.
|
|
454
|
+
|
|
455
|
+
Raises:
|
|
456
|
+
ValueError: If the update type is unknown.
|
|
457
|
+
"""
|
|
458
|
+
if isinstance(update, float):
|
|
459
|
+
return update
|
|
460
|
+
elif isinstance(update, Quote) or isinstance(update, OrderBook):
|
|
461
|
+
return update.mid_price()
|
|
462
|
+
elif isinstance(update, Trade):
|
|
463
|
+
return update.price
|
|
464
|
+
elif isinstance(update, Bar):
|
|
465
|
+
return update.close
|
|
466
|
+
elif isinstance(update, OrderBook):
|
|
467
|
+
return update.mid_price()
|
|
468
|
+
else:
|
|
469
|
+
raise ValueError(f"Unknown update type: {type(update)}")
|
|
470
|
+
|
|
471
|
+
|
|
472
|
+
def full_qualified_class_name(obj: object):
|
|
473
|
+
"""
|
|
474
|
+
Returns full qualified class name of object.
|
|
475
|
+
"""
|
|
476
|
+
klass = obj.__class__
|
|
477
|
+
module = klass.__module__
|
|
478
|
+
if module in ["__builtin__", "__main__"]:
|
|
479
|
+
return klass.__qualname__ # avoid outputs like 'builtins.str'
|
|
480
|
+
return module + "." + klass.__name__
|