Qubx 0.4.3__cp311-cp311-manylinux_2_35_x86_64.whl → 0.5.0__cp311-cp311-manylinux_2_35_x86_64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of Qubx might be problematic. Click here for more details.
- qubx/__init__.py +9 -6
- qubx/_nb_magic.py +1 -2
- qubx/backtester/account.py +134 -0
- qubx/backtester/broker.py +82 -0
- qubx/backtester/data.py +269 -0
- qubx/backtester/simulated_data.py +517 -0
- qubx/backtester/simulator.py +177 -904
- qubx/backtester/utils.py +691 -0
- qubx/connectors/ccxt/account.py +496 -0
- qubx/connectors/ccxt/broker.py +124 -0
- qubx/connectors/ccxt/ccxt_connector.py +0 -676
- qubx/connectors/ccxt/{ccxt_customizations.py → customizations.py} +48 -1
- qubx/connectors/ccxt/data.py +601 -0
- qubx/connectors/ccxt/{ccxt_exceptions.py → exceptions.py} +8 -0
- qubx/connectors/ccxt/factory.py +94 -0
- qubx/connectors/ccxt/{ccxt_utils.py → utils.py} +152 -14
- qubx/core/account.py +169 -160
- qubx/core/basics.py +351 -136
- qubx/core/context.py +234 -118
- qubx/core/exceptions.py +8 -0
- qubx/core/helpers.py +73 -20
- qubx/core/interfaces.py +463 -193
- qubx/core/loggers.py +44 -16
- qubx/core/lookups.py +82 -140
- qubx/core/metrics.py +9 -11
- qubx/core/mixins/__init__.py +1 -1
- qubx/core/mixins/market.py +34 -24
- qubx/core/mixins/processing.py +168 -135
- qubx/core/mixins/subscription.py +190 -68
- qubx/core/mixins/trading.py +33 -22
- qubx/core/mixins/universe.py +72 -54
- qubx/core/series.cpython-311-x86_64-linux-gnu.so +0 -0
- qubx/core/utils.cpython-311-x86_64-linux-gnu.so +0 -0
- qubx/core/utils.pyx +1 -1
- qubx/data/helpers.py +32 -30
- qubx/data/readers.py +486 -184
- qubx/data/tardis.py +100 -0
- qubx/pandaz/utils.py +24 -6
- qubx/plotting/__init__.py +0 -0
- qubx/plotting/dashboard.py +151 -0
- qubx/plotting/data.py +137 -0
- qubx/plotting/interfaces.py +25 -0
- qubx/plotting/renderers/__init__.py +0 -0
- qubx/plotting/renderers/plotly.py +0 -0
- qubx/ta/indicators.cpython-311-x86_64-linux-gnu.so +0 -0
- qubx/utils/marketdata/ccxt.py +88 -0
- qubx/utils/marketdata/dukas.py +130 -0
- qubx/utils/misc.py +83 -5
- qubx/utils/numbers_utils.py +7 -0
- qubx/utils/orderbook.py +15 -21
- qubx/utils/runner.py +200 -99
- qubx/utils/time.py +62 -15
- {qubx-0.4.3.dist-info → qubx-0.5.0.dist-info}/METADATA +29 -2
- qubx-0.5.0.dist-info/RECORD +86 -0
- qubx/backtester/queue.py +0 -250
- qubx/connectors/ccxt/ccxt_trading.py +0 -242
- qubx/utils/collections.py +0 -53
- qubx-0.4.3.dist-info/RECORD +0 -71
- qubx-0.4.3.dist-info/entry_points.txt +0 -3
- /qubx/utils/{threading.py → helpers.py} +0 -0
- {qubx-0.4.3.dist-info → qubx-0.5.0.dist-info}/WHEEL +0 -0
qubx/__init__.py
CHANGED
|
@@ -15,7 +15,10 @@ def formatter(record):
|
|
|
15
15
|
if record["level"].name in {"WARNING", "SNAKY"}:
|
|
16
16
|
fmt = "<cyan>{name}</cyan>:<cyan>{function}</cyan>:<cyan>{line}</cyan> - %s" % fmt
|
|
17
17
|
|
|
18
|
-
prefix =
|
|
18
|
+
prefix = (
|
|
19
|
+
"<green>{time:YYYY-MM-DD HH:mm:ss.SSS}</green> [ <level>%s</level> ] <cyan>({module})</cyan> "
|
|
20
|
+
% record["level"].icon
|
|
21
|
+
)
|
|
19
22
|
|
|
20
23
|
if record["exception"] is not None:
|
|
21
24
|
# stackprinter.set_excepthook(style='darkbg2')
|
|
@@ -29,7 +32,6 @@ def formatter(record):
|
|
|
29
32
|
|
|
30
33
|
|
|
31
34
|
class QubxLogConfig:
|
|
32
|
-
|
|
33
35
|
@staticmethod
|
|
34
36
|
def get_log_level():
|
|
35
37
|
return os.getenv("QUBX_LOG_LEVEL", "DEBUG")
|
|
@@ -107,14 +109,15 @@ if runtime_env() in ["notebook", "shell"]:
|
|
|
107
109
|
if line:
|
|
108
110
|
if "dark" in line.lower():
|
|
109
111
|
set_mpl_theme("dark")
|
|
112
|
+
# - temporary workaround for vscode - dark theme not applying to ipywidgets in notebook
|
|
113
|
+
# - see https://github.com/microsoft/vscode-jupyter/issues/7161
|
|
114
|
+
if runtime_env() == "notebook":
|
|
115
|
+
_vscode_clr_trick = """from IPython.display import display, HTML; display(HTML("<style> .cell-output-ipywidget-background { background-color: transparent !important; } :root { --jp-widgets-color: var(--vscode-editor-foreground); --jp-widgets-font-size: var(--vscode-editor-font-size); } </style>"))"""
|
|
116
|
+
exec(_vscode_clr_trick, self.shell.user_ns)
|
|
110
117
|
|
|
111
118
|
elif "light" in line.lower():
|
|
112
119
|
set_mpl_theme("light")
|
|
113
120
|
|
|
114
|
-
# install additional plotly helpers
|
|
115
|
-
# from qube.charting.plot_helpers import install_plotly_helpers
|
|
116
|
-
# install_plotly_helpers()
|
|
117
|
-
|
|
118
121
|
def _get_manager(self):
|
|
119
122
|
if self.__manager is None:
|
|
120
123
|
import multiprocessing as m
|
qubx/_nb_magic.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
""""
|
|
1
|
+
""" "
|
|
2
2
|
Here stuff we want to have in every Jupyter notebook after calling %qubx magic
|
|
3
3
|
"""
|
|
4
4
|
|
|
@@ -27,7 +27,6 @@ def np_fmt_reset():
|
|
|
27
27
|
|
|
28
28
|
|
|
29
29
|
if runtime_env() in ["notebook", "shell"]:
|
|
30
|
-
|
|
31
30
|
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
|
|
32
31
|
# -- all imports below will appear in notebook after calling %%qubx magic ---
|
|
33
32
|
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
|
|
@@ -0,0 +1,134 @@
|
|
|
1
|
+
from qubx import logger
|
|
2
|
+
from qubx.backtester.ome import OrdersManagementEngine
|
|
3
|
+
from qubx.core.account import BasicAccountProcessor
|
|
4
|
+
from qubx.core.basics import (
|
|
5
|
+
ZERO_COSTS,
|
|
6
|
+
CtrlChannel,
|
|
7
|
+
Instrument,
|
|
8
|
+
Order,
|
|
9
|
+
Position,
|
|
10
|
+
TransactionCostsCalculator,
|
|
11
|
+
dt_64,
|
|
12
|
+
)
|
|
13
|
+
from qubx.core.interfaces import ITimeProvider
|
|
14
|
+
from qubx.core.series import Bar, Quote, Trade
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class SimulatedAccountProcessor(BasicAccountProcessor):
|
|
18
|
+
ome: dict[Instrument, OrdersManagementEngine]
|
|
19
|
+
order_to_instrument: dict[str, Instrument]
|
|
20
|
+
|
|
21
|
+
_channel: CtrlChannel
|
|
22
|
+
_fill_stop_order_at_price: bool
|
|
23
|
+
_half_tick_size: dict[Instrument, float]
|
|
24
|
+
|
|
25
|
+
def __init__(
|
|
26
|
+
self,
|
|
27
|
+
account_id: str,
|
|
28
|
+
channel: CtrlChannel,
|
|
29
|
+
base_currency: str,
|
|
30
|
+
initial_capital: float,
|
|
31
|
+
time_provider: ITimeProvider,
|
|
32
|
+
tcc: TransactionCostsCalculator = ZERO_COSTS,
|
|
33
|
+
accurate_stop_orders_execution: bool = False,
|
|
34
|
+
) -> None:
|
|
35
|
+
super().__init__(
|
|
36
|
+
account_id=account_id,
|
|
37
|
+
time_provider=time_provider,
|
|
38
|
+
base_currency=base_currency,
|
|
39
|
+
tcc=tcc,
|
|
40
|
+
initial_capital=initial_capital,
|
|
41
|
+
)
|
|
42
|
+
self.ome = {}
|
|
43
|
+
self.order_to_instrument = {}
|
|
44
|
+
self._channel = channel
|
|
45
|
+
self._half_tick_size = {}
|
|
46
|
+
self._fill_stop_order_at_price = accurate_stop_orders_execution
|
|
47
|
+
if self._fill_stop_order_at_price:
|
|
48
|
+
logger.info(f"{self.__class__.__name__} emulates stop orders executions at exact price")
|
|
49
|
+
|
|
50
|
+
def get_orders(self, instrument: Instrument | None = None) -> list[Order]:
|
|
51
|
+
if instrument is not None:
|
|
52
|
+
ome = self.ome.get(instrument)
|
|
53
|
+
if ome is None:
|
|
54
|
+
raise ValueError(f"ExchangeService:get_orders :: No OME configured for '{instrument}'!")
|
|
55
|
+
return ome.get_open_orders()
|
|
56
|
+
|
|
57
|
+
return [o for ome in self.ome.values() for o in ome.get_open_orders()]
|
|
58
|
+
|
|
59
|
+
def get_position(self, instrument: Instrument) -> Position:
|
|
60
|
+
if instrument in self.positions:
|
|
61
|
+
return self.positions[instrument]
|
|
62
|
+
|
|
63
|
+
# - initiolize OME for this instrument
|
|
64
|
+
self.ome[instrument] = OrdersManagementEngine(
|
|
65
|
+
instrument=instrument,
|
|
66
|
+
time_provider=self.time_provider,
|
|
67
|
+
tcc=self._tcc, # type: ignore
|
|
68
|
+
fill_stop_order_at_price=self._fill_stop_order_at_price,
|
|
69
|
+
)
|
|
70
|
+
|
|
71
|
+
# - initiolize empty position
|
|
72
|
+
position = Position(instrument) # type: ignore
|
|
73
|
+
self._half_tick_size[instrument] = instrument.tick_size / 2 # type: ignore
|
|
74
|
+
self.attach_positions(position)
|
|
75
|
+
return self.positions[instrument]
|
|
76
|
+
|
|
77
|
+
def update_position_price(self, time: dt_64, instrument: Instrument, price: float) -> None:
|
|
78
|
+
super().update_position_price(time, instrument, price)
|
|
79
|
+
|
|
80
|
+
# - first we need to update OME with new quote.
|
|
81
|
+
# - if update is not a quote we need 'emulate' it.
|
|
82
|
+
# - actually if SimulatedExchangeService is used in backtesting mode it will recieve only quotes
|
|
83
|
+
# - case when we need that - SimulatedExchangeService is used for paper trading and data provider configured to listen to OHLC or TAS.
|
|
84
|
+
# - probably we need to subscribe to quotes in real data provider in any case and then this emulation won't be needed.
|
|
85
|
+
quote = price if isinstance(price, Quote) else self.emulate_quote_from_data(instrument, time, price)
|
|
86
|
+
if quote is None:
|
|
87
|
+
return
|
|
88
|
+
|
|
89
|
+
# - process new quote
|
|
90
|
+
self._process_new_quote(instrument, quote)
|
|
91
|
+
|
|
92
|
+
def process_order(self, order: Order, update_locked_value: bool = True) -> None:
|
|
93
|
+
_new = order.status == "NEW"
|
|
94
|
+
_open = order.status == "OPEN"
|
|
95
|
+
_cancel = order.status == "CANCELED"
|
|
96
|
+
_closed = order.status == "CLOSED"
|
|
97
|
+
if _new or _open:
|
|
98
|
+
self.order_to_instrument[order.id] = order.instrument
|
|
99
|
+
if (_cancel or _closed) and order.id in self.order_to_instrument:
|
|
100
|
+
self.order_to_instrument.pop(order.id)
|
|
101
|
+
return super().process_order(order, update_locked_value)
|
|
102
|
+
|
|
103
|
+
def emulate_quote_from_data(
|
|
104
|
+
self, instrument: Instrument, timestamp: dt_64, data: float | Trade | Bar
|
|
105
|
+
) -> Quote | None:
|
|
106
|
+
if instrument not in self._half_tick_size:
|
|
107
|
+
_ = self.get_position(instrument)
|
|
108
|
+
|
|
109
|
+
_ts2 = self._half_tick_size[instrument]
|
|
110
|
+
if isinstance(data, Quote):
|
|
111
|
+
return data
|
|
112
|
+
elif isinstance(data, Trade):
|
|
113
|
+
if data.taker: # type: ignore
|
|
114
|
+
return Quote(timestamp, data.price - _ts2 * 2, data.price, 0, 0) # type: ignore
|
|
115
|
+
else:
|
|
116
|
+
return Quote(timestamp, data.price, data.price + _ts2 * 2, 0, 0) # type: ignore
|
|
117
|
+
elif isinstance(data, Bar):
|
|
118
|
+
return Quote(timestamp, data.close - _ts2, data.close + _ts2, 0, 0) # type: ignore
|
|
119
|
+
elif isinstance(data, float):
|
|
120
|
+
return Quote(timestamp, data - _ts2, data + _ts2, 0, 0)
|
|
121
|
+
else:
|
|
122
|
+
return None
|
|
123
|
+
|
|
124
|
+
def _process_new_quote(self, instrument: Instrument, data: Quote) -> None:
|
|
125
|
+
ome = self.ome.get(instrument)
|
|
126
|
+
if ome is None:
|
|
127
|
+
logger.warning("ExchangeService:update :: No OME configured for '{symbol}' yet !")
|
|
128
|
+
return
|
|
129
|
+
for r in ome.update_bbo(data):
|
|
130
|
+
if r.exec is not None:
|
|
131
|
+
self.order_to_instrument.pop(r.order.id)
|
|
132
|
+
# - process methods will be called from stg context
|
|
133
|
+
self._channel.send((instrument, "order", r.order, False))
|
|
134
|
+
self._channel.send((instrument, "deals", [r.exec], False))
|
|
@@ -0,0 +1,82 @@
|
|
|
1
|
+
from qubx.backtester.ome import OmeReport
|
|
2
|
+
from qubx.core.basics import (
|
|
3
|
+
CtrlChannel,
|
|
4
|
+
Instrument,
|
|
5
|
+
Order,
|
|
6
|
+
)
|
|
7
|
+
from qubx.core.interfaces import IBroker
|
|
8
|
+
|
|
9
|
+
from .account import SimulatedAccountProcessor
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class SimulatedBroker(IBroker):
|
|
13
|
+
channel: CtrlChannel
|
|
14
|
+
|
|
15
|
+
_account: SimulatedAccountProcessor
|
|
16
|
+
|
|
17
|
+
def __init__(
|
|
18
|
+
self,
|
|
19
|
+
channel: CtrlChannel,
|
|
20
|
+
account: SimulatedAccountProcessor,
|
|
21
|
+
) -> None:
|
|
22
|
+
self.channel = channel
|
|
23
|
+
self._account = account
|
|
24
|
+
|
|
25
|
+
@property
|
|
26
|
+
def is_simulated_trading(self) -> bool:
|
|
27
|
+
return True
|
|
28
|
+
|
|
29
|
+
def send_order(
|
|
30
|
+
self,
|
|
31
|
+
instrument: Instrument,
|
|
32
|
+
order_side: str,
|
|
33
|
+
order_type: str,
|
|
34
|
+
amount: float,
|
|
35
|
+
price: float | None = None,
|
|
36
|
+
client_id: str | None = None,
|
|
37
|
+
time_in_force: str = "gtc",
|
|
38
|
+
**options,
|
|
39
|
+
) -> Order:
|
|
40
|
+
ome = self._account.ome.get(instrument)
|
|
41
|
+
if ome is None:
|
|
42
|
+
raise ValueError(f"ExchangeService:send_order :: No OME configured for '{instrument.symbol}'!")
|
|
43
|
+
|
|
44
|
+
# - try to place order in OME
|
|
45
|
+
report = ome.place_order(
|
|
46
|
+
order_side.upper(), # type: ignore
|
|
47
|
+
order_type.upper(), # type: ignore
|
|
48
|
+
amount,
|
|
49
|
+
price,
|
|
50
|
+
client_id,
|
|
51
|
+
time_in_force,
|
|
52
|
+
**options,
|
|
53
|
+
)
|
|
54
|
+
|
|
55
|
+
self._send_exec_report(instrument, report)
|
|
56
|
+
return report.order
|
|
57
|
+
|
|
58
|
+
def cancel_order(self, order_id: str) -> Order | None:
|
|
59
|
+
instrument = self._account.order_to_instrument.get(order_id)
|
|
60
|
+
if instrument is None:
|
|
61
|
+
raise ValueError(f"ExchangeService:cancel_order :: can't find order with id = '{order_id}'!")
|
|
62
|
+
|
|
63
|
+
ome = self._account.ome.get(instrument)
|
|
64
|
+
if ome is None:
|
|
65
|
+
raise ValueError(f"ExchangeService:send_order :: No OME configured for '{instrument}'!")
|
|
66
|
+
|
|
67
|
+
# - cancel order in OME and remove from the map to free memory
|
|
68
|
+
order_update = ome.cancel_order(order_id)
|
|
69
|
+
self._send_exec_report(instrument, order_update)
|
|
70
|
+
|
|
71
|
+
return order_update.order
|
|
72
|
+
|
|
73
|
+
def cancel_orders(self, instrument: Instrument) -> None:
|
|
74
|
+
raise NotImplementedError("Not implemented yet")
|
|
75
|
+
|
|
76
|
+
def update_order(self, order_id: str, price: float | None = None, amount: float | None = None) -> Order:
|
|
77
|
+
raise NotImplementedError("Not implemented yet")
|
|
78
|
+
|
|
79
|
+
def _send_exec_report(self, instrument: Instrument, report: OmeReport):
|
|
80
|
+
self.channel.send((instrument, "order", report.order, False))
|
|
81
|
+
if report.exec is not None:
|
|
82
|
+
self.channel.send((instrument, "deals", [report.exec], False))
|
qubx/backtester/data.py
ADDED
|
@@ -0,0 +1,269 @@
|
|
|
1
|
+
from collections import defaultdict
|
|
2
|
+
from typing import Any, Dict, Optional
|
|
3
|
+
|
|
4
|
+
import numpy as np
|
|
5
|
+
import pandas as pd
|
|
6
|
+
from tqdm.auto import tqdm
|
|
7
|
+
|
|
8
|
+
from qubx import logger
|
|
9
|
+
from qubx.backtester.simulated_data import EventBatcher, IterableSimulationData
|
|
10
|
+
from qubx.core.basics import (
|
|
11
|
+
CtrlChannel,
|
|
12
|
+
DataType,
|
|
13
|
+
Instrument,
|
|
14
|
+
TimestampedDict,
|
|
15
|
+
)
|
|
16
|
+
from qubx.core.helpers import BasicScheduler
|
|
17
|
+
from qubx.core.interfaces import IDataProvider
|
|
18
|
+
from qubx.core.series import Bar, Quote, time_as_nsec
|
|
19
|
+
from qubx.data.readers import AsDict, DataReader
|
|
20
|
+
from qubx.utils.time import infer_series_frequency
|
|
21
|
+
|
|
22
|
+
from .account import SimulatedAccountProcessor
|
|
23
|
+
from .utils import SimulatedTimeProvider
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class SimulatedDataProvider(IDataProvider):
|
|
27
|
+
time_provider: SimulatedTimeProvider
|
|
28
|
+
channel: CtrlChannel
|
|
29
|
+
|
|
30
|
+
_scheduler: BasicScheduler
|
|
31
|
+
_account: SimulatedAccountProcessor
|
|
32
|
+
_last_quotes: Dict[Instrument, Optional[Quote]]
|
|
33
|
+
_readers: dict[str, DataReader]
|
|
34
|
+
_scheduler: BasicScheduler
|
|
35
|
+
_pregenerated_signals: dict[Instrument, pd.Series | pd.DataFrame]
|
|
36
|
+
_to_process: dict[Instrument, list]
|
|
37
|
+
_data_source: IterableSimulationData
|
|
38
|
+
_open_close_time_indent_ns: int
|
|
39
|
+
|
|
40
|
+
def __init__(
|
|
41
|
+
self,
|
|
42
|
+
exchange_id: str,
|
|
43
|
+
channel: CtrlChannel,
|
|
44
|
+
scheduler: BasicScheduler,
|
|
45
|
+
time_provider: SimulatedTimeProvider,
|
|
46
|
+
account: SimulatedAccountProcessor,
|
|
47
|
+
readers: dict[str, DataReader],
|
|
48
|
+
open_close_time_indent_secs=1,
|
|
49
|
+
):
|
|
50
|
+
self.channel = channel
|
|
51
|
+
self.time_provider = time_provider
|
|
52
|
+
self._exchange_id = exchange_id
|
|
53
|
+
self._scheduler = scheduler
|
|
54
|
+
self._account = account
|
|
55
|
+
self._readers = readers
|
|
56
|
+
|
|
57
|
+
# - create exchange's instance
|
|
58
|
+
self._last_quotes = defaultdict(lambda: None)
|
|
59
|
+
|
|
60
|
+
# - pregenerated signals storage
|
|
61
|
+
self._pregenerated_signals = dict()
|
|
62
|
+
self._to_process = {}
|
|
63
|
+
|
|
64
|
+
# - simulation data source
|
|
65
|
+
self._data_source = IterableSimulationData(
|
|
66
|
+
self._readers, open_close_time_indent_secs=open_close_time_indent_secs
|
|
67
|
+
)
|
|
68
|
+
self._open_close_time_indent_ns = open_close_time_indent_secs * 1_000_000_000 # convert seconds to nanoseconds
|
|
69
|
+
|
|
70
|
+
logger.info(f"{self.__class__.__name__}.{exchange_id} is initialized")
|
|
71
|
+
|
|
72
|
+
def run(
|
|
73
|
+
self,
|
|
74
|
+
start: str | pd.Timestamp,
|
|
75
|
+
end: str | pd.Timestamp,
|
|
76
|
+
silent: bool = False,
|
|
77
|
+
enable_event_batching: bool = True,
|
|
78
|
+
) -> None:
|
|
79
|
+
logger.info(f"{self.__class__.__name__} ::: Simulation started at {start} :::")
|
|
80
|
+
|
|
81
|
+
if self._pregenerated_signals:
|
|
82
|
+
self._prepare_generated_signals(start, end)
|
|
83
|
+
_run = self._run_generated_signals
|
|
84
|
+
enable_event_batching = False # no batching for pre-generated signals
|
|
85
|
+
else:
|
|
86
|
+
_run = self._run_as_strategy
|
|
87
|
+
|
|
88
|
+
qiter = EventBatcher(self._data_source.create_iterable(start, end), passthrough=not enable_event_batching)
|
|
89
|
+
start, end = pd.Timestamp(start), pd.Timestamp(end)
|
|
90
|
+
total_duration = end - start
|
|
91
|
+
update_delta = total_duration / 100
|
|
92
|
+
prev_dt = pd.Timestamp(start)
|
|
93
|
+
|
|
94
|
+
if silent:
|
|
95
|
+
for instrument, data_type, event, is_hist in qiter:
|
|
96
|
+
if not _run(instrument, data_type, event, is_hist):
|
|
97
|
+
break
|
|
98
|
+
else:
|
|
99
|
+
_p = 0
|
|
100
|
+
with tqdm(total=100, desc="Simulating", unit="%", leave=False) as pbar:
|
|
101
|
+
for instrument, data_type, event, is_hist in qiter:
|
|
102
|
+
if not _run(instrument, data_type, event, is_hist):
|
|
103
|
+
break
|
|
104
|
+
dt = pd.Timestamp(event.time)
|
|
105
|
+
# update only if date has changed
|
|
106
|
+
if dt - prev_dt > update_delta:
|
|
107
|
+
_p += 1
|
|
108
|
+
pbar.n = _p
|
|
109
|
+
pbar.refresh()
|
|
110
|
+
prev_dt = dt
|
|
111
|
+
pbar.n = 100
|
|
112
|
+
pbar.refresh()
|
|
113
|
+
|
|
114
|
+
logger.info(f"{self.__class__.__name__} ::: Simulation finished at {end} :::")
|
|
115
|
+
|
|
116
|
+
def set_generated_signals(self, signals: pd.Series | pd.DataFrame):
|
|
117
|
+
logger.debug(f"Using pre-generated signals:\n {str(signals.count()).strip('ndtype: int64')}")
|
|
118
|
+
# - sanity check
|
|
119
|
+
signals.index = pd.DatetimeIndex(signals.index)
|
|
120
|
+
|
|
121
|
+
if isinstance(signals, pd.Series):
|
|
122
|
+
self._pregenerated_signals[str(signals.name)] = signals # type: ignore
|
|
123
|
+
|
|
124
|
+
elif isinstance(signals, pd.DataFrame):
|
|
125
|
+
for col in signals.columns:
|
|
126
|
+
self._pregenerated_signals[col] = signals[col] # type: ignore
|
|
127
|
+
else:
|
|
128
|
+
raise ValueError("Invalid signals or strategy configuration")
|
|
129
|
+
|
|
130
|
+
@property
|
|
131
|
+
def is_simulation(self) -> bool:
|
|
132
|
+
return True
|
|
133
|
+
|
|
134
|
+
def subscribe(self, subscription_type: str, instruments: set[Instrument], reset: bool) -> None:
|
|
135
|
+
logger.debug(f" | subscribe: {subscription_type} -> {instruments}")
|
|
136
|
+
self._data_source.add_instruments_for_subscription(subscription_type, list(instruments))
|
|
137
|
+
|
|
138
|
+
def unsubscribe(self, subscription_type: str, instruments: set[Instrument] | Instrument | None = None) -> None:
|
|
139
|
+
logger.debug(f" | unsubscribe: {subscription_type} -> {instruments}")
|
|
140
|
+
if instruments is not None:
|
|
141
|
+
self._data_source.remove_instruments_from_subscription(
|
|
142
|
+
subscription_type, [instruments] if isinstance(instruments, Instrument) else list(instruments)
|
|
143
|
+
)
|
|
144
|
+
|
|
145
|
+
def has_subscription(self, instrument: Instrument, subscription_type: str) -> bool:
|
|
146
|
+
return self._data_source.has_subscription(instrument, subscription_type)
|
|
147
|
+
|
|
148
|
+
def get_subscriptions(self, instrument: Instrument) -> list[str]:
|
|
149
|
+
_s_lst = self._data_source.get_subscriptions_for_instrument(instrument)
|
|
150
|
+
logger.debug(f" | get_subscriptions {instrument} -> {_s_lst}")
|
|
151
|
+
return _s_lst
|
|
152
|
+
|
|
153
|
+
def get_subscribed_instruments(self, subscription_type: str | None = None) -> list[Instrument]:
|
|
154
|
+
_in_lst = self._data_source.get_instruments_for_subscription(subscription_type or DataType.ALL)
|
|
155
|
+
logger.debug(f" | get_subscribed_instruments {subscription_type} -> {_in_lst}")
|
|
156
|
+
return _in_lst
|
|
157
|
+
|
|
158
|
+
def warmup(self, configs: dict[tuple[str, Instrument], str]) -> None:
|
|
159
|
+
for si, warm_period in configs.items():
|
|
160
|
+
logger.debug(f" | Warming up {si} -> {warm_period}")
|
|
161
|
+
self._data_source.set_warmup_period(si[0], warm_period)
|
|
162
|
+
|
|
163
|
+
def get_ohlc(self, instrument: Instrument, timeframe: str, nbarsback: int) -> list[Bar]:
|
|
164
|
+
_reader = self._readers.get(DataType.OHLC)
|
|
165
|
+
if _reader is None:
|
|
166
|
+
logger.error(f"Reader for {DataType.OHLC} data not configured")
|
|
167
|
+
return []
|
|
168
|
+
|
|
169
|
+
start = pd.Timestamp(self.time_provider.time())
|
|
170
|
+
end = start - nbarsback * (_timeframe := pd.Timedelta(timeframe))
|
|
171
|
+
_spec = f"{instrument.exchange}:{instrument.symbol}"
|
|
172
|
+
return self._convert_records_to_bars(
|
|
173
|
+
_reader.read(data_id=_spec, start=start, stop=end, transform=AsDict()), # type: ignore
|
|
174
|
+
time_as_nsec(self.time_provider.time()),
|
|
175
|
+
_timeframe.asm8.item(),
|
|
176
|
+
)
|
|
177
|
+
|
|
178
|
+
def get_quote(self, instrument: Instrument) -> Quote | None:
|
|
179
|
+
return self._last_quotes[instrument]
|
|
180
|
+
|
|
181
|
+
def close(self):
|
|
182
|
+
pass
|
|
183
|
+
|
|
184
|
+
def _prepare_generated_signals(self, start: str | pd.Timestamp, end: str | pd.Timestamp):
|
|
185
|
+
for s, v in self._pregenerated_signals.items():
|
|
186
|
+
_s_inst = None
|
|
187
|
+
|
|
188
|
+
for i in self.get_subscribed_instruments():
|
|
189
|
+
# - we can process series with variable id's if we can find some similar instrument
|
|
190
|
+
if s == i.symbol or s == str(i) or s == f"{i.exchange}:{i.symbol}" or str(s) == str(i):
|
|
191
|
+
_start, _end = pd.Timestamp(start), pd.Timestamp(end)
|
|
192
|
+
_start_idx, _end_idx = v.index.get_indexer([_start, _end], method="ffill")
|
|
193
|
+
sel = v.iloc[max(_start_idx, 0) : _end_idx + 1] # sel = v[pd.Timestamp(start) : pd.Timestamp(end)]
|
|
194
|
+
|
|
195
|
+
self._to_process[i] = list(zip(sel.index, sel.values))
|
|
196
|
+
_s_inst = i
|
|
197
|
+
break
|
|
198
|
+
|
|
199
|
+
if _s_inst is None:
|
|
200
|
+
logger.error(f"Can't find instrument for pregenerated signals with id '{s}'")
|
|
201
|
+
raise ValueError(f"Can't find instrument for pregenerated signals with id '{s}'")
|
|
202
|
+
|
|
203
|
+
def _convert_records_to_bars(
|
|
204
|
+
self, records: list[TimestampedDict], cut_time_ns: int, timeframe_ns: int
|
|
205
|
+
) -> list[Bar]:
|
|
206
|
+
"""
|
|
207
|
+
Convert records to bars and we need to cut last bar up to the cut_time_ns
|
|
208
|
+
"""
|
|
209
|
+
bars = []
|
|
210
|
+
|
|
211
|
+
_data_tf = infer_series_frequency([r.time for r in records[:50]])
|
|
212
|
+
timeframe_ns = _data_tf.item()
|
|
213
|
+
|
|
214
|
+
if records is not None:
|
|
215
|
+
for r in records:
|
|
216
|
+
# _b_ts_0 = np.datetime64(r.time, "ns").item()
|
|
217
|
+
_b_ts_0 = r.time
|
|
218
|
+
_b_ts_1 = _b_ts_0 + timeframe_ns - self._open_close_time_indent_ns
|
|
219
|
+
|
|
220
|
+
if _b_ts_0 <= cut_time_ns and cut_time_ns < _b_ts_1:
|
|
221
|
+
break
|
|
222
|
+
|
|
223
|
+
bars.append(
|
|
224
|
+
Bar(
|
|
225
|
+
_b_ts_0, r.data["open"], r.data["high"], r.data["low"], r.data["close"], r.data.get("volume", 0)
|
|
226
|
+
)
|
|
227
|
+
)
|
|
228
|
+
|
|
229
|
+
return bars
|
|
230
|
+
|
|
231
|
+
def _run_generated_signals(self, instrument: Instrument, data_type: str, data: Any, is_hist) -> bool:
|
|
232
|
+
if is_hist:
|
|
233
|
+
raise ValueError("Historical data is not supported for pre-generated signals !")
|
|
234
|
+
|
|
235
|
+
t = data.time # type: ignore
|
|
236
|
+
self.time_provider.set_time(np.datetime64(t, "ns"))
|
|
237
|
+
|
|
238
|
+
q = self._account.emulate_quote_from_data(instrument, np.datetime64(t, "ns"), data)
|
|
239
|
+
self._last_quotes[instrument] = q
|
|
240
|
+
cc = self.channel
|
|
241
|
+
|
|
242
|
+
# - we need to send quotes for invoking portfolio logging etc
|
|
243
|
+
cc.send((instrument, data_type, data, is_hist))
|
|
244
|
+
sigs = self._to_process[instrument]
|
|
245
|
+
_current_time = self.time_provider.time()
|
|
246
|
+
while sigs and sigs[0][0].as_unit("ns").asm8 <= _current_time:
|
|
247
|
+
cc.send((instrument, "event", {"order": sigs[0][1]}, is_hist))
|
|
248
|
+
sigs.pop(0)
|
|
249
|
+
|
|
250
|
+
return cc.control.is_set()
|
|
251
|
+
|
|
252
|
+
def _run_as_strategy(self, instrument: Instrument, data_type: str, data: Any, is_hist: bool) -> bool:
|
|
253
|
+
t = data.time # type: ignore
|
|
254
|
+
self.time_provider.set_time(np.datetime64(t, "ns"))
|
|
255
|
+
|
|
256
|
+
q = self._account.emulate_quote_from_data(instrument, np.datetime64(t, "ns"), data)
|
|
257
|
+
cc = self.channel
|
|
258
|
+
|
|
259
|
+
if not is_hist and q is not None:
|
|
260
|
+
self._last_quotes[instrument] = q
|
|
261
|
+
|
|
262
|
+
# we have to schedule possible crons before sending the data event itself
|
|
263
|
+
if self._scheduler.check_and_run_tasks():
|
|
264
|
+
# - push nothing - it will force to process last event
|
|
265
|
+
cc.send((None, "service_time", None, False))
|
|
266
|
+
|
|
267
|
+
cc.send((instrument, data_type, data, is_hist))
|
|
268
|
+
|
|
269
|
+
return cc.control.is_set()
|