Qubx 0.4.3__cp311-cp311-manylinux_2_35_x86_64.whl → 0.5.1__cp311-cp311-manylinux_2_35_x86_64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of Qubx might be problematic. Click here for more details.

Files changed (69) hide show
  1. qubx/__init__.py +19 -12
  2. qubx/_nb_magic.py +24 -25
  3. qubx/backtester/account.py +146 -0
  4. qubx/backtester/broker.py +87 -0
  5. qubx/backtester/data.py +272 -0
  6. qubx/backtester/ome.py +5 -8
  7. qubx/backtester/optimization.py +5 -5
  8. qubx/backtester/simulated_data.py +516 -0
  9. qubx/backtester/simulator.py +180 -905
  10. qubx/backtester/utils.py +691 -0
  11. qubx/connectors/ccxt/account.py +496 -0
  12. qubx/connectors/ccxt/broker.py +130 -0
  13. qubx/connectors/ccxt/ccxt_connector.py +0 -676
  14. qubx/connectors/ccxt/{ccxt_customizations.py → customizations.py} +48 -1
  15. qubx/connectors/ccxt/data.py +612 -0
  16. qubx/connectors/ccxt/{ccxt_exceptions.py → exceptions.py} +8 -0
  17. qubx/connectors/ccxt/factory.py +94 -0
  18. qubx/connectors/ccxt/{ccxt_utils.py → utils.py} +152 -14
  19. qubx/core/account.py +169 -160
  20. qubx/core/basics.py +317 -179
  21. qubx/core/context.py +241 -118
  22. qubx/core/exceptions.py +8 -0
  23. qubx/core/helpers.py +75 -20
  24. qubx/core/interfaces.py +492 -189
  25. qubx/core/loggers.py +45 -16
  26. qubx/core/lookups.py +82 -140
  27. qubx/core/metrics.py +256 -23
  28. qubx/core/mixins/__init__.py +1 -1
  29. qubx/core/mixins/market.py +34 -24
  30. qubx/core/mixins/processing.py +190 -165
  31. qubx/core/mixins/subscription.py +189 -64
  32. qubx/core/mixins/trading.py +36 -22
  33. qubx/core/mixins/universe.py +72 -54
  34. qubx/core/series.cpython-311-x86_64-linux-gnu.so +0 -0
  35. qubx/core/utils.cpython-311-x86_64-linux-gnu.so +0 -0
  36. qubx/core/utils.pyx +1 -1
  37. qubx/data/helpers.py +33 -31
  38. qubx/data/readers.py +490 -184
  39. qubx/data/tardis.py +100 -0
  40. qubx/pandaz/utils.py +27 -6
  41. qubx/plotting/__init__.py +0 -0
  42. qubx/plotting/dashboard.py +151 -0
  43. qubx/plotting/data.py +137 -0
  44. qubx/plotting/interfaces.py +25 -0
  45. qubx/plotting/renderers/__init__.py +0 -0
  46. qubx/plotting/renderers/plotly.py +0 -0
  47. qubx/ta/indicators.cpython-311-x86_64-linux-gnu.so +0 -0
  48. qubx/trackers/composite.py +5 -5
  49. qubx/trackers/rebalancers.py +13 -27
  50. qubx/trackers/riskctrl.py +10 -9
  51. qubx/trackers/sizers.py +4 -7
  52. qubx/utils/_jupyter_runner.pyt +59 -0
  53. qubx/utils/marketdata/ccxt.py +88 -0
  54. qubx/utils/marketdata/dukas.py +130 -0
  55. qubx/utils/misc.py +91 -5
  56. qubx/utils/numbers_utils.py +7 -0
  57. qubx/utils/orderbook.py +15 -21
  58. qubx/utils/runner.py +340 -181
  59. qubx/utils/time.py +62 -15
  60. {qubx-0.4.3.dist-info → qubx-0.5.1.dist-info}/METADATA +31 -2
  61. qubx-0.5.1.dist-info/RECORD +87 -0
  62. qubx-0.5.1.dist-info/entry_points.txt +3 -0
  63. qubx/backtester/queue.py +0 -250
  64. qubx/connectors/ccxt/ccxt_trading.py +0 -242
  65. qubx/utils/collections.py +0 -53
  66. qubx/utils/threading.py +0 -14
  67. qubx-0.4.3.dist-info/RECORD +0 -71
  68. qubx-0.4.3.dist-info/entry_points.txt +0 -3
  69. {qubx-0.4.3.dist-info → qubx-0.5.1.dist-info}/WHEEL +0 -0
qubx/__init__.py CHANGED
@@ -1,10 +1,13 @@
1
+ import os
2
+ import sys
1
3
  from typing import Callable
2
- from qubx.utils import set_mpl_theme, runtime_env
3
- from qubx.utils.misc import install_pyx_recompiler_for_dev
4
4
 
5
+ import stackprinter
5
6
  from loguru import logger
6
- import os, sys, stackprinter
7
+
7
8
  from qubx.core.lookups import FeesLookup, GlobalLookup, InstrumentsLookup
9
+ from qubx.utils import runtime_env, set_mpl_theme
10
+ from qubx.utils.misc import install_pyx_recompiler_for_dev
8
11
 
9
12
  # - TODO: import some main methods from packages
10
13
 
@@ -15,7 +18,10 @@ def formatter(record):
15
18
  if record["level"].name in {"WARNING", "SNAKY"}:
16
19
  fmt = "<cyan>{name}</cyan>:<cyan>{function}</cyan>:<cyan>{line}</cyan> - %s" % fmt
17
20
 
18
- prefix = "<green>{time:YYYY-MM-DD HH:mm:ss.SSS}</green> [ <level>%s</level> ] " % record["level"].icon
21
+ prefix = (
22
+ "<green>{time:YYYY-MM-DD HH:mm:ss.SSS}</green> [ <level>%s</level> ] <cyan>({module})</cyan> "
23
+ % record["level"].icon
24
+ )
19
25
 
20
26
  if record["exception"] is not None:
21
27
  # stackprinter.set_excepthook(style='darkbg2')
@@ -29,7 +35,6 @@ def formatter(record):
29
35
 
30
36
 
31
37
  class QubxLogConfig:
32
-
33
38
  @staticmethod
34
39
  def get_log_level():
35
40
  return os.getenv("QUBX_LOG_LEVEL", "DEBUG")
@@ -64,8 +69,8 @@ lookup = GlobalLookup(InstrumentsLookup(), FeesLookup())
64
69
 
65
70
  # registering magic for jupyter notebook
66
71
  if runtime_env() in ["notebook", "shell"]:
67
- from IPython.core.magic import Magics, magics_class, line_magic, line_cell_magic
68
72
  from IPython.core.getipython import get_ipython
73
+ from IPython.core.magic import Magics, line_cell_magic, line_magic, magics_class
69
74
 
70
75
  @magics_class
71
76
  class QubxMagics(Magics):
@@ -107,14 +112,15 @@ if runtime_env() in ["notebook", "shell"]:
107
112
  if line:
108
113
  if "dark" in line.lower():
109
114
  set_mpl_theme("dark")
115
+ # - temporary workaround for vscode - dark theme not applying to ipywidgets in notebook
116
+ # - see https://github.com/microsoft/vscode-jupyter/issues/7161
117
+ if runtime_env() == "notebook":
118
+ _vscode_clr_trick = """from IPython.display import display, HTML; display(HTML("<style> .cell-output-ipywidget-background { background-color: transparent !important; } :root { --jp-widgets-color: var(--vscode-editor-foreground); --jp-widgets-font-size: var(--vscode-editor-font-size); } </style>"))"""
119
+ exec(_vscode_clr_trick, self.shell.user_ns)
110
120
 
111
121
  elif "light" in line.lower():
112
122
  set_mpl_theme("light")
113
123
 
114
- # install additional plotly helpers
115
- # from qube.charting.plot_helpers import install_plotly_helpers
116
- # install_plotly_helpers()
117
-
118
124
  def _get_manager(self):
119
125
  if self.__manager is None:
120
126
  import multiprocessing as m
@@ -133,7 +139,8 @@ if runtime_env() in ["notebook", "shell"]:
133
139
 
134
140
  """
135
141
  import multiprocessing as m
136
- import time, re
142
+ import re
143
+ import time
137
144
 
138
145
  # create ext args
139
146
  name = None
@@ -148,7 +155,7 @@ if runtime_env() in ["notebook", "shell"]:
148
155
  return
149
156
 
150
157
  ipy = get_ipython()
151
- for a in [x for x in re.split("[\ ,;]", line.strip()) if x]:
158
+ for a in [x for x in re.split(r"[\ ,;]", line.strip()) if x]:
152
159
  ipy.push({a: self._get_manager().Value(None, None)})
153
160
 
154
161
  # code to run
qubx/_nb_magic.py CHANGED
@@ -1,4 +1,4 @@
1
- """"
1
+ """ "
2
2
  Here stuff we want to have in every Jupyter notebook after calling %qubx magic
3
3
  """
4
4
 
@@ -27,66 +27,65 @@ def np_fmt_reset():
27
27
 
28
28
 
29
29
  if runtime_env() in ["notebook", "shell"]:
30
-
31
30
  # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
32
31
  # -- all imports below will appear in notebook after calling %%qubx magic ---
33
32
  # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
34
33
 
35
34
  # - - - - Common stuff - - - -
35
+ from datetime import time, timedelta
36
+
36
37
  import numpy as np
37
38
  import pandas as pd
38
- from datetime import time, timedelta
39
+
40
+ # - - - - Charting stuff - - - -
41
+ from matplotlib import pyplot as plt
39
42
  from tqdm.auto import tqdm
40
43
 
41
44
  # - - - - TA stuff and indicators - - - -
42
45
  import qubx.pandaz.ta as pta
43
46
  import qubx.ta.indicators as ta
47
+ from qubx.backtester.optimization import variate
48
+
49
+ # - - - - Simulator stuff - - - -
50
+ from qubx.backtester.simulator import simulate
44
51
 
45
52
  # - - - - Portfolio analysis - - - -
46
53
  from qubx.core.metrics import (
47
- tearsheet,
48
54
  chart_signals,
49
- get_symbol_pnls,
50
- get_equity,
51
- portfolio_metrics,
52
- pnl,
53
55
  drop_symbols,
56
+ get_symbol_pnls,
54
57
  pick_symbols,
58
+ pnl,
59
+ portfolio_metrics,
60
+ tearsheet,
55
61
  )
62
+ from qubx.data.helpers import loader
56
63
 
57
64
  # - - - - Data reading - - - -
58
65
  from qubx.data.readers import (
59
- CsvStorageDataReader,
60
- MultiQdbConnector,
61
- QuestDBConnector,
62
66
  AsOhlcvSeries,
63
67
  AsPandasFrame,
64
68
  AsQuotes,
65
69
  AsTimestampedRecords,
70
+ CsvStorageDataReader,
71
+ MultiQdbConnector,
72
+ QuestDBConnector,
66
73
  RestoreTicksFromOHLC,
67
74
  )
68
- from qubx.data.helpers import loader
69
-
70
- # - - - - Simulator stuff - - - -
71
- from qubx.backtester.simulator import simulate
72
- from qubx.backtester.optimization import variate
73
-
74
- # - - - - Charting stuff - - - -
75
- from matplotlib import pyplot as plt
76
- from qubx.utils.charting.mpl_helpers import fig, subplot, sbp, plot_trends, ohlc_plot
77
- from qubx.utils.charting.lookinglass import LookingGlass
78
75
 
79
76
  # - - - - Utils - - - -
80
77
  from qubx.pandaz.utils import (
81
- scols,
82
- srows,
83
- ohlc_resample,
84
78
  continuous_periods,
85
- generate_equal_date_ranges,
86
79
  drop_duplicated_indexes,
80
+ generate_equal_date_ranges,
81
+ ohlc_resample,
87
82
  retain_columns_and_join,
88
83
  rolling_forward_test_split,
84
+ scols,
85
+ srows,
89
86
  )
87
+ from qubx.utils.charting.lookinglass import LookingGlass
88
+ from qubx.utils.charting.mpl_helpers import fig, ohlc_plot, plot_trends, sbp, subplot
90
89
 
91
90
  # - setup short numpy output format
92
91
  np_fmt_short()
@@ -0,0 +1,146 @@
1
+ from qubx import logger
2
+ from qubx.backtester.ome import OrdersManagementEngine
3
+ from qubx.core.account import BasicAccountProcessor
4
+ from qubx.core.basics import (
5
+ ZERO_COSTS,
6
+ BatchEvent,
7
+ CtrlChannel,
8
+ Instrument,
9
+ Order,
10
+ Position,
11
+ Timestamped,
12
+ TransactionCostsCalculator,
13
+ dt_64,
14
+ )
15
+ from qubx.core.interfaces import ITimeProvider
16
+ from qubx.core.series import Bar, OrderBook, Quote, Trade
17
+
18
+
19
+ class SimulatedAccountProcessor(BasicAccountProcessor):
20
+ ome: dict[Instrument, OrdersManagementEngine]
21
+ order_to_instrument: dict[str, Instrument]
22
+
23
+ _channel: CtrlChannel
24
+ _fill_stop_order_at_price: bool
25
+ _half_tick_size: dict[Instrument, float]
26
+
27
+ def __init__(
28
+ self,
29
+ account_id: str,
30
+ channel: CtrlChannel,
31
+ base_currency: str,
32
+ initial_capital: float,
33
+ time_provider: ITimeProvider,
34
+ tcc: TransactionCostsCalculator = ZERO_COSTS,
35
+ accurate_stop_orders_execution: bool = False,
36
+ ) -> None:
37
+ super().__init__(
38
+ account_id=account_id,
39
+ time_provider=time_provider,
40
+ base_currency=base_currency,
41
+ tcc=tcc,
42
+ initial_capital=initial_capital,
43
+ )
44
+ self.ome = {}
45
+ self.order_to_instrument = {}
46
+ self._channel = channel
47
+ self._half_tick_size = {}
48
+ self._fill_stop_order_at_price = accurate_stop_orders_execution
49
+ if self._fill_stop_order_at_price:
50
+ logger.info(f"{self.__class__.__name__} emulates stop orders executions at exact price")
51
+
52
+ def get_orders(self, instrument: Instrument | None = None) -> list[Order]:
53
+ if instrument is not None:
54
+ ome = self.ome.get(instrument)
55
+ if ome is None:
56
+ raise ValueError(f"ExchangeService:get_orders :: No OME configured for '{instrument}'!")
57
+ return ome.get_open_orders()
58
+
59
+ return [o for ome in self.ome.values() for o in ome.get_open_orders()]
60
+
61
+ def get_position(self, instrument: Instrument) -> Position:
62
+ if instrument in self.positions:
63
+ return self.positions[instrument]
64
+
65
+ # - initiolize OME for this instrument
66
+ self.ome[instrument] = OrdersManagementEngine(
67
+ instrument=instrument,
68
+ time_provider=self.time_provider,
69
+ tcc=self._tcc, # type: ignore
70
+ fill_stop_order_at_price=self._fill_stop_order_at_price,
71
+ )
72
+
73
+ # - initiolize empty position
74
+ position = Position(instrument) # type: ignore
75
+ self._half_tick_size[instrument] = instrument.tick_size / 2 # type: ignore
76
+ self.attach_positions(position)
77
+ return self.positions[instrument]
78
+
79
+ def update_position_price(self, time: dt_64, instrument: Instrument, price: float) -> None:
80
+ super().update_position_price(time, instrument, price)
81
+
82
+ # - first we need to update OME with new quote.
83
+ # - if update is not a quote we need 'emulate' it.
84
+ # - actually if SimulatedExchangeService is used in backtesting mode it will recieve only quotes
85
+ # - case when we need that - SimulatedExchangeService is used for paper trading and data provider configured to listen to OHLC or TAS.
86
+ # - probably we need to subscribe to quotes in real data provider in any case and then this emulation won't be needed.
87
+ quote = price if isinstance(price, Quote) else self.emulate_quote_from_data(instrument, time, price)
88
+ if quote is None:
89
+ return
90
+
91
+ # - process new quote
92
+ self._process_new_quote(instrument, quote)
93
+
94
+ def process_order(self, order: Order, update_locked_value: bool = True) -> None:
95
+ _new = order.status == "NEW"
96
+ _open = order.status == "OPEN"
97
+ _cancel = order.status == "CANCELED"
98
+ _closed = order.status == "CLOSED"
99
+ if _new or _open:
100
+ self.order_to_instrument[order.id] = order.instrument
101
+ if (_cancel or _closed) and order.id in self.order_to_instrument:
102
+ self.order_to_instrument.pop(order.id)
103
+ return super().process_order(order, update_locked_value)
104
+
105
+ def emulate_quote_from_data(
106
+ self, instrument: Instrument, timestamp: dt_64, data: float | Timestamped | BatchEvent
107
+ ) -> Quote | None:
108
+ if instrument not in self._half_tick_size:
109
+ _ = self.get_position(instrument)
110
+
111
+ _ts2 = self._half_tick_size[instrument]
112
+ if isinstance(data, Quote):
113
+ return data
114
+
115
+ elif isinstance(data, Trade):
116
+ if data.taker: # type: ignore
117
+ return Quote(timestamp, data.price - _ts2 * 2, data.price, 0, 0) # type: ignore
118
+ else:
119
+ return Quote(timestamp, data.price, data.price + _ts2 * 2, 0, 0) # type: ignore
120
+
121
+ elif isinstance(data, Bar):
122
+ return Quote(timestamp, data.close - _ts2, data.close + _ts2, 0, 0) # type: ignore
123
+
124
+ elif isinstance(data, OrderBook):
125
+ return data.to_quote()
126
+
127
+ elif isinstance(data, BatchEvent):
128
+ return self.emulate_quote_from_data(instrument, timestamp, data.data[-1])
129
+
130
+ elif isinstance(data, float):
131
+ return Quote(timestamp, data - _ts2, data + _ts2, 0, 0)
132
+
133
+ else:
134
+ return None
135
+
136
+ def _process_new_quote(self, instrument: Instrument, data: Quote) -> None:
137
+ ome = self.ome.get(instrument)
138
+ if ome is None:
139
+ logger.warning("ExchangeService:update :: No OME configured for '{symbol}' yet !")
140
+ return
141
+ for r in ome.update_bbo(data):
142
+ if r.exec is not None:
143
+ self.order_to_instrument.pop(r.order.id)
144
+ # - process methods will be called from stg context
145
+ self._channel.send((instrument, "order", r.order, False))
146
+ self._channel.send((instrument, "deals", [r.exec], False))
@@ -0,0 +1,87 @@
1
+ from qubx.backtester.ome import OmeReport
2
+ from qubx.core.basics import (
3
+ CtrlChannel,
4
+ Instrument,
5
+ Order,
6
+ )
7
+ from qubx.core.interfaces import IBroker
8
+
9
+ from .account import SimulatedAccountProcessor
10
+
11
+
12
+ class SimulatedBroker(IBroker):
13
+ channel: CtrlChannel
14
+
15
+ _account: SimulatedAccountProcessor
16
+
17
+ def __init__(
18
+ self,
19
+ channel: CtrlChannel,
20
+ account: SimulatedAccountProcessor,
21
+ exchange_id: str = "simulated",
22
+ ) -> None:
23
+ self.channel = channel
24
+ self._account = account
25
+ self._exchange_id = exchange_id
26
+
27
+ @property
28
+ def is_simulated_trading(self) -> bool:
29
+ return True
30
+
31
+ def send_order(
32
+ self,
33
+ instrument: Instrument,
34
+ order_side: str,
35
+ order_type: str,
36
+ amount: float,
37
+ price: float | None = None,
38
+ client_id: str | None = None,
39
+ time_in_force: str = "gtc",
40
+ **options,
41
+ ) -> Order:
42
+ ome = self._account.ome.get(instrument)
43
+ if ome is None:
44
+ raise ValueError(f"ExchangeService:send_order :: No OME configured for '{instrument.symbol}'!")
45
+
46
+ # - try to place order in OME
47
+ report = ome.place_order(
48
+ order_side.upper(), # type: ignore
49
+ order_type.upper(), # type: ignore
50
+ amount,
51
+ price,
52
+ client_id,
53
+ time_in_force,
54
+ **options,
55
+ )
56
+
57
+ self._send_exec_report(instrument, report)
58
+ return report.order
59
+
60
+ def cancel_order(self, order_id: str) -> Order | None:
61
+ instrument = self._account.order_to_instrument.get(order_id)
62
+ if instrument is None:
63
+ raise ValueError(f"ExchangeService:cancel_order :: can't find order with id = '{order_id}'!")
64
+
65
+ ome = self._account.ome.get(instrument)
66
+ if ome is None:
67
+ raise ValueError(f"ExchangeService:send_order :: No OME configured for '{instrument}'!")
68
+
69
+ # - cancel order in OME and remove from the map to free memory
70
+ order_update = ome.cancel_order(order_id)
71
+ self._send_exec_report(instrument, order_update)
72
+
73
+ return order_update.order
74
+
75
+ def cancel_orders(self, instrument: Instrument) -> None:
76
+ raise NotImplementedError("Not implemented yet")
77
+
78
+ def update_order(self, order_id: str, price: float | None = None, amount: float | None = None) -> Order:
79
+ raise NotImplementedError("Not implemented yet")
80
+
81
+ def _send_exec_report(self, instrument: Instrument, report: OmeReport):
82
+ self.channel.send((instrument, "order", report.order, False))
83
+ if report.exec is not None:
84
+ self.channel.send((instrument, "deals", [report.exec], False))
85
+
86
+ def exchange(self) -> str:
87
+ return self._exchange_id.upper()
@@ -0,0 +1,272 @@
1
+ from collections import defaultdict
2
+ from typing import Any, Dict, Optional
3
+
4
+ import numpy as np
5
+ import pandas as pd
6
+ from tqdm.auto import tqdm
7
+
8
+ from qubx import logger
9
+ from qubx.backtester.simulated_data import EventBatcher, IterableSimulationData
10
+ from qubx.core.basics import (
11
+ CtrlChannel,
12
+ DataType,
13
+ Instrument,
14
+ TimestampedDict,
15
+ )
16
+ from qubx.core.helpers import BasicScheduler
17
+ from qubx.core.interfaces import IDataProvider
18
+ from qubx.core.series import Bar, Quote, time_as_nsec
19
+ from qubx.data.readers import AsDict, DataReader
20
+ from qubx.utils.time import infer_series_frequency
21
+
22
+ from .account import SimulatedAccountProcessor
23
+ from .utils import SimulatedTimeProvider
24
+
25
+
26
+ class SimulatedDataProvider(IDataProvider):
27
+ time_provider: SimulatedTimeProvider
28
+ channel: CtrlChannel
29
+
30
+ _scheduler: BasicScheduler
31
+ _account: SimulatedAccountProcessor
32
+ _last_quotes: Dict[Instrument, Optional[Quote]]
33
+ _readers: dict[str, DataReader]
34
+ _scheduler: BasicScheduler
35
+ _pregenerated_signals: dict[Instrument, pd.Series | pd.DataFrame]
36
+ _to_process: dict[Instrument, list]
37
+ _data_source: IterableSimulationData
38
+ _open_close_time_indent_ns: int
39
+
40
+ def __init__(
41
+ self,
42
+ exchange_id: str,
43
+ channel: CtrlChannel,
44
+ scheduler: BasicScheduler,
45
+ time_provider: SimulatedTimeProvider,
46
+ account: SimulatedAccountProcessor,
47
+ readers: dict[str, DataReader],
48
+ open_close_time_indent_secs=1,
49
+ ):
50
+ self.channel = channel
51
+ self.time_provider = time_provider
52
+ self._exchange_id = exchange_id
53
+ self._scheduler = scheduler
54
+ self._account = account
55
+ self._readers = readers
56
+
57
+ # - create exchange's instance
58
+ self._last_quotes = defaultdict(lambda: None)
59
+
60
+ # - pregenerated signals storage
61
+ self._pregenerated_signals = dict()
62
+ self._to_process = {}
63
+
64
+ # - simulation data source
65
+ self._data_source = IterableSimulationData(
66
+ self._readers, open_close_time_indent_secs=open_close_time_indent_secs
67
+ )
68
+ self._open_close_time_indent_ns = open_close_time_indent_secs * 1_000_000_000 # convert seconds to nanoseconds
69
+
70
+ logger.info(f"{self.__class__.__name__}.{exchange_id} is initialized")
71
+
72
+ def run(
73
+ self,
74
+ start: str | pd.Timestamp,
75
+ end: str | pd.Timestamp,
76
+ silent: bool = False,
77
+ enable_event_batching: bool = True,
78
+ ) -> None:
79
+ logger.info(f"{self.__class__.__name__} ::: Simulation started at {start} :::")
80
+
81
+ if self._pregenerated_signals:
82
+ self._prepare_generated_signals(start, end)
83
+ _run = self._run_generated_signals
84
+ enable_event_batching = False # no batching for pre-generated signals
85
+ else:
86
+ _run = self._run_as_strategy
87
+
88
+ qiter = EventBatcher(self._data_source.create_iterable(start, end), passthrough=not enable_event_batching)
89
+ start, end = pd.Timestamp(start), pd.Timestamp(end)
90
+ total_duration = end - start
91
+ update_delta = total_duration / 100
92
+ prev_dt = pd.Timestamp(start)
93
+
94
+ if silent:
95
+ for instrument, data_type, event, is_hist in qiter:
96
+ if not _run(instrument, data_type, event, is_hist):
97
+ break
98
+ else:
99
+ _p = 0
100
+ with tqdm(total=100, desc="Simulating", unit="%", leave=False) as pbar:
101
+ for instrument, data_type, event, is_hist in qiter:
102
+ if not _run(instrument, data_type, event, is_hist):
103
+ break
104
+ dt = pd.Timestamp(event.time)
105
+ # update only if date has changed
106
+ if dt - prev_dt > update_delta:
107
+ _p += 1
108
+ pbar.n = _p
109
+ pbar.refresh()
110
+ prev_dt = dt
111
+ pbar.n = 100
112
+ pbar.refresh()
113
+
114
+ logger.info(f"{self.__class__.__name__} ::: Simulation finished at {end} :::")
115
+
116
+ def set_generated_signals(self, signals: pd.Series | pd.DataFrame):
117
+ logger.debug(f"Using pre-generated signals:\n {str(signals.count()).strip('ndtype: int64')}")
118
+ # - sanity check
119
+ signals.index = pd.DatetimeIndex(signals.index)
120
+
121
+ if isinstance(signals, pd.Series):
122
+ self._pregenerated_signals[str(signals.name)] = signals # type: ignore
123
+
124
+ elif isinstance(signals, pd.DataFrame):
125
+ for col in signals.columns:
126
+ self._pregenerated_signals[col] = signals[col] # type: ignore
127
+ else:
128
+ raise ValueError("Invalid signals or strategy configuration")
129
+
130
+ @property
131
+ def is_simulation(self) -> bool:
132
+ return True
133
+
134
+ def subscribe(self, subscription_type: str, instruments: set[Instrument], reset: bool) -> None:
135
+ logger.debug(f" | subscribe: {subscription_type} -> {instruments}")
136
+ self._data_source.add_instruments_for_subscription(subscription_type, list(instruments))
137
+
138
+ def unsubscribe(self, subscription_type: str, instruments: set[Instrument] | Instrument | None = None) -> None:
139
+ logger.debug(f" | unsubscribe: {subscription_type} -> {instruments}")
140
+ if instruments is not None:
141
+ self._data_source.remove_instruments_from_subscription(
142
+ subscription_type, [instruments] if isinstance(instruments, Instrument) else list(instruments)
143
+ )
144
+
145
+ def has_subscription(self, instrument: Instrument, subscription_type: str) -> bool:
146
+ return self._data_source.has_subscription(instrument, subscription_type)
147
+
148
+ def get_subscriptions(self, instrument: Instrument) -> list[str]:
149
+ _s_lst = self._data_source.get_subscriptions_for_instrument(instrument)
150
+ logger.debug(f" | get_subscriptions {instrument} -> {_s_lst}")
151
+ return _s_lst
152
+
153
+ def get_subscribed_instruments(self, subscription_type: str | None = None) -> list[Instrument]:
154
+ _in_lst = self._data_source.get_instruments_for_subscription(subscription_type or DataType.ALL)
155
+ logger.debug(f" | get_subscribed_instruments {subscription_type} -> {_in_lst}")
156
+ return _in_lst
157
+
158
+ def warmup(self, configs: dict[tuple[str, Instrument], str]) -> None:
159
+ for si, warm_period in configs.items():
160
+ logger.debug(f" | Warming up {si} -> {warm_period}")
161
+ self._data_source.set_warmup_period(si[0], warm_period)
162
+
163
+ def get_ohlc(self, instrument: Instrument, timeframe: str, nbarsback: int) -> list[Bar]:
164
+ _reader = self._readers.get(DataType.OHLC)
165
+ if _reader is None:
166
+ logger.error(f"Reader for {DataType.OHLC} data not configured")
167
+ return []
168
+
169
+ start = pd.Timestamp(self.time_provider.time())
170
+ end = start - nbarsback * (_timeframe := pd.Timedelta(timeframe))
171
+ _spec = f"{instrument.exchange}:{instrument.symbol}"
172
+ return self._convert_records_to_bars(
173
+ _reader.read(data_id=_spec, start=start, stop=end, transform=AsDict()), # type: ignore
174
+ time_as_nsec(self.time_provider.time()),
175
+ _timeframe.asm8.item(),
176
+ )
177
+
178
+ def get_quote(self, instrument: Instrument) -> Quote | None:
179
+ return self._last_quotes[instrument]
180
+
181
+ def close(self):
182
+ pass
183
+
184
+ def _prepare_generated_signals(self, start: str | pd.Timestamp, end: str | pd.Timestamp):
185
+ for s, v in self._pregenerated_signals.items():
186
+ _s_inst = None
187
+
188
+ for i in self.get_subscribed_instruments():
189
+ # - we can process series with variable id's if we can find some similar instrument
190
+ if s == i.symbol or s == str(i) or s == f"{i.exchange}:{i.symbol}" or str(s) == str(i):
191
+ _start, _end = pd.Timestamp(start), pd.Timestamp(end)
192
+ _start_idx, _end_idx = v.index.get_indexer([_start, _end], method="ffill")
193
+ sel = v.iloc[max(_start_idx, 0) : _end_idx + 1] # sel = v[pd.Timestamp(start) : pd.Timestamp(end)]
194
+
195
+ self._to_process[i] = list(zip(sel.index, sel.values))
196
+ _s_inst = i
197
+ break
198
+
199
+ if _s_inst is None:
200
+ logger.error(f"Can't find instrument for pregenerated signals with id '{s}'")
201
+ raise ValueError(f"Can't find instrument for pregenerated signals with id '{s}'")
202
+
203
+ def _convert_records_to_bars(
204
+ self, records: list[TimestampedDict], cut_time_ns: int, timeframe_ns: int
205
+ ) -> list[Bar]:
206
+ """
207
+ Convert records to bars and we need to cut last bar up to the cut_time_ns
208
+ """
209
+ bars = []
210
+
211
+ _data_tf = infer_series_frequency([r.time for r in records[:50]])
212
+ timeframe_ns = _data_tf.item()
213
+
214
+ if records is not None:
215
+ for r in records:
216
+ # _b_ts_0 = np.datetime64(r.time, "ns").item()
217
+ _b_ts_0 = r.time
218
+ _b_ts_1 = _b_ts_0 + timeframe_ns - self._open_close_time_indent_ns
219
+
220
+ if _b_ts_0 <= cut_time_ns and cut_time_ns < _b_ts_1:
221
+ break
222
+
223
+ bars.append(
224
+ Bar(
225
+ _b_ts_0, r.data["open"], r.data["high"], r.data["low"], r.data["close"], r.data.get("volume", 0)
226
+ )
227
+ )
228
+
229
+ return bars
230
+
231
+ def _run_generated_signals(self, instrument: Instrument, data_type: str, data: Any, is_hist) -> bool:
232
+ if is_hist:
233
+ raise ValueError("Historical data is not supported for pre-generated signals !")
234
+
235
+ t = data.time # type: ignore
236
+ self.time_provider.set_time(np.datetime64(t, "ns"))
237
+
238
+ q = self._account.emulate_quote_from_data(instrument, np.datetime64(t, "ns"), data)
239
+ self._last_quotes[instrument] = q
240
+ cc = self.channel
241
+
242
+ # - we need to send quotes for invoking portfolio logging etc
243
+ cc.send((instrument, data_type, data, is_hist))
244
+ sigs = self._to_process[instrument]
245
+ _current_time = self.time_provider.time()
246
+ while sigs and sigs[0][0].as_unit("ns").asm8 <= _current_time:
247
+ cc.send((instrument, "event", {"order": sigs[0][1]}, is_hist))
248
+ sigs.pop(0)
249
+
250
+ return cc.control.is_set()
251
+
252
+ def _run_as_strategy(self, instrument: Instrument, data_type: str, data: Any, is_hist: bool) -> bool:
253
+ t = data.time # type: ignore
254
+ self.time_provider.set_time(np.datetime64(t, "ns"))
255
+
256
+ q = self._account.emulate_quote_from_data(instrument, np.datetime64(t, "ns"), data)
257
+ cc = self.channel
258
+
259
+ if not is_hist and q is not None:
260
+ self._last_quotes[instrument] = q
261
+
262
+ # we have to schedule possible crons before sending the data event itself
263
+ if self._scheduler.check_and_run_tasks():
264
+ # - push nothing - it will force to process last event
265
+ cc.send((None, "service_time", None, False))
266
+
267
+ cc.send((instrument, data_type, data, is_hist))
268
+
269
+ return cc.control.is_set()
270
+
271
+ def exchange(self) -> str:
272
+ return self._exchange_id.upper()