Qubx 0.5.0__cp311-cp311-manylinux_2_35_x86_64.whl → 0.5.3__cp311-cp311-manylinux_2_35_x86_64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of Qubx might be problematic. Click here for more details.

Files changed (79) hide show
  1. qubx/__init__.py +12 -8
  2. qubx/_nb_magic.py +24 -23
  3. qubx/backtester/__init__.py +2 -1
  4. qubx/backtester/account.py +14 -4
  5. qubx/backtester/broker.py +5 -0
  6. qubx/backtester/data.py +61 -38
  7. qubx/backtester/management.py +119 -0
  8. qubx/backtester/ome.py +12 -18
  9. qubx/backtester/optimization.py +23 -16
  10. qubx/backtester/simulated_data.py +126 -85
  11. qubx/backtester/simulator.py +137 -127
  12. qubx/backtester/utils.py +105 -16
  13. qubx/cli/commands.py +67 -0
  14. qubx/connectors/ccxt/broker.py +10 -4
  15. qubx/connectors/ccxt/customizations.py +1 -3
  16. qubx/connectors/ccxt/data.py +11 -0
  17. qubx/core/account.py +6 -4
  18. qubx/core/basics.py +55 -124
  19. qubx/core/context.py +31 -20
  20. qubx/core/exceptions.py +2 -2
  21. qubx/core/helpers.py +43 -20
  22. qubx/core/interfaces.py +77 -10
  23. qubx/core/loggers.py +2 -0
  24. qubx/core/lookups.py +98 -17
  25. qubx/core/metrics.py +283 -27
  26. qubx/core/mixins/market.py +11 -4
  27. qubx/core/mixins/processing.py +58 -68
  28. qubx/core/mixins/subscription.py +21 -18
  29. qubx/core/mixins/trading.py +6 -2
  30. qubx/core/mixins/universe.py +2 -5
  31. qubx/core/series.cpython-311-x86_64-linux-gnu.so +0 -0
  32. qubx/core/series.pyi +1 -0
  33. qubx/core/series.pyx +13 -0
  34. qubx/core/utils.cpython-311-x86_64-linux-gnu.so +0 -0
  35. qubx/data/helpers.py +1 -1
  36. qubx/data/readers.py +8 -4
  37. qubx/gathering/simplest.py +5 -6
  38. qubx/math/stats.py +29 -6
  39. qubx/pandaz/ta.py +6 -9
  40. qubx/pandaz/utils.py +3 -0
  41. qubx/resources/instruments/symbols-binance.cm.json +1 -0
  42. qubx/resources/instruments/symbols-binance.json +1 -0
  43. qubx/resources/instruments/symbols-binance.um.json +1 -0
  44. qubx/resources/instruments/symbols-bitfinex.f.json +1 -0
  45. qubx/resources/instruments/symbols-bitfinex.json +1 -0
  46. qubx/resources/instruments/symbols-kraken.f.json +1 -0
  47. qubx/resources/instruments/symbols-kraken.json +1 -0
  48. qubx/ta/indicators.cpython-311-x86_64-linux-gnu.so +0 -0
  49. qubx/trackers/abvanced.py +236 -0
  50. qubx/trackers/composite.py +6 -6
  51. qubx/trackers/rebalancers.py +13 -27
  52. qubx/trackers/riskctrl.py +135 -88
  53. qubx/trackers/sizers.py +20 -33
  54. qubx/utils/__init__.py +5 -4
  55. qubx/utils/charting/lookinglass.py +36 -75
  56. qubx/utils/charting/mpl_helpers.py +26 -12
  57. qubx/utils/marketdata/ccxt.py +3 -1
  58. qubx/utils/misc.py +85 -15
  59. qubx/utils/orderbook.py +9 -9
  60. qubx/{plotting → utils/plotting}/dashboard.py +1 -2
  61. qubx/utils/runner/__init__.py +1 -0
  62. qubx/utils/runner/_jupyter_runner.pyt +60 -0
  63. qubx/utils/runner/accounts.py +88 -0
  64. qubx/utils/runner/configs.py +63 -0
  65. qubx/utils/runner/runner.py +421 -0
  66. qubx/utils/time.py +15 -11
  67. {qubx-0.5.0.dist-info → qubx-0.5.3.dist-info}/METADATA +4 -1
  68. qubx-0.5.3.dist-info/RECORD +100 -0
  69. qubx-0.5.3.dist-info/entry_points.txt +3 -0
  70. qubx/utils/helpers.py +0 -14
  71. qubx/utils/runner.py +0 -485
  72. qubx-0.5.0.dist-info/RECORD +0 -86
  73. /qubx/{plotting → cli}/__init__.py +0 -0
  74. /qubx/{plotting/renderers → utils/plotting}/__init__.py +0 -0
  75. /qubx/{plotting → utils/plotting}/data.py +0 -0
  76. /qubx/{plotting → utils/plotting}/interfaces.py +0 -0
  77. /qubx/{connectors/ccxt/ccxt_connector.py → utils/plotting/renderers/__init__.py} +0 -0
  78. /qubx/{plotting → utils/plotting}/renderers/plotly.py +0 -0
  79. {qubx-0.5.0.dist-info → qubx-0.5.3.dist-info}/WHEEL +0 -0
qubx/__init__.py CHANGED
@@ -1,10 +1,13 @@
1
+ import os
2
+ import sys
1
3
  from typing import Callable
2
- from qubx.utils import set_mpl_theme, runtime_env
3
- from qubx.utils.misc import install_pyx_recompiler_for_dev
4
4
 
5
+ import stackprinter
5
6
  from loguru import logger
6
- import os, sys, stackprinter
7
+
7
8
  from qubx.core.lookups import FeesLookup, GlobalLookup, InstrumentsLookup
9
+ from qubx.utils import runtime_env, set_mpl_theme
10
+ from qubx.utils.misc import install_pyx_recompiler_for_dev
8
11
 
9
12
  # - TODO: import some main methods from packages
10
13
 
@@ -34,7 +37,7 @@ def formatter(record):
34
37
  class QubxLogConfig:
35
38
  @staticmethod
36
39
  def get_log_level():
37
- return os.getenv("QUBX_LOG_LEVEL", "DEBUG")
40
+ return os.getenv("QUBX_LOG_LEVEL", "WARNING")
38
41
 
39
42
  @staticmethod
40
43
  def set_log_level(level: str):
@@ -66,8 +69,8 @@ lookup = GlobalLookup(InstrumentsLookup(), FeesLookup())
66
69
 
67
70
  # registering magic for jupyter notebook
68
71
  if runtime_env() in ["notebook", "shell"]:
69
- from IPython.core.magic import Magics, magics_class, line_magic, line_cell_magic
70
72
  from IPython.core.getipython import get_ipython
73
+ from IPython.core.magic import Magics, line_cell_magic, line_magic, magics_class
71
74
 
72
75
  @magics_class
73
76
  class QubxMagics(Magics):
@@ -112,7 +115,7 @@ if runtime_env() in ["notebook", "shell"]:
112
115
  # - temporary workaround for vscode - dark theme not applying to ipywidgets in notebook
113
116
  # - see https://github.com/microsoft/vscode-jupyter/issues/7161
114
117
  if runtime_env() == "notebook":
115
- _vscode_clr_trick = """from IPython.display import display, HTML; display(HTML("<style> .cell-output-ipywidget-background { background-color: transparent !important; } :root { --jp-widgets-color: var(--vscode-editor-foreground); --jp-widgets-font-size: var(--vscode-editor-font-size); } </style>"))"""
118
+ _vscode_clr_trick = """from IPython.display import display, HTML; display(HTML("<style> .cell-output-ipywidget-background { background-color: transparent !important; } :root { --jp-widgets-color: var(--vscode-editor-foreground); --jp-widgets-font-size: var(--vscode-editor-font-size); } .widget-hprogress, .jupyter-widget-hprogress { height: 16px; align-self: center; kj} table.dataframe, .dataframe td, .dataframe tr { border: 1px solid #55554a85; border-collapse: collapse; color: #859548d9 !important; } .dataframe th { border: 1px solid #55554a85; border-collapse: collapse; background-color: #010101 !important; color: #177 !important; } </style>"))"""
116
119
  exec(_vscode_clr_trick, self.shell.user_ns)
117
120
 
118
121
  elif "light" in line.lower():
@@ -136,7 +139,8 @@ if runtime_env() in ["notebook", "shell"]:
136
139
 
137
140
  """
138
141
  import multiprocessing as m
139
- import time, re
142
+ import re
143
+ import time
140
144
 
141
145
  # create ext args
142
146
  name = None
@@ -151,7 +155,7 @@ if runtime_env() in ["notebook", "shell"]:
151
155
  return
152
156
 
153
157
  ipy = get_ipython()
154
- for a in [x for x in re.split("[\ ,;]", line.strip()) if x]:
158
+ for a in [x for x in re.split(r"[\ ,;]", line.strip()) if x]:
155
159
  ipy.push({a: self._get_manager().Value(None, None)})
156
160
 
157
161
  # code to run
qubx/_nb_magic.py CHANGED
@@ -32,60 +32,61 @@ if runtime_env() in ["notebook", "shell"]:
32
32
  # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
33
33
 
34
34
  # - - - - Common stuff - - - -
35
+ from datetime import time, timedelta
36
+
35
37
  import numpy as np
36
38
  import pandas as pd
37
- from datetime import time, timedelta
39
+
40
+ # - - - - Charting stuff - - - -
41
+ from matplotlib import pyplot as plt
38
42
  from tqdm.auto import tqdm
39
43
 
40
44
  # - - - - TA stuff and indicators - - - -
41
45
  import qubx.pandaz.ta as pta
42
46
  import qubx.ta.indicators as ta
47
+ from qubx.backtester.optimization import variate
48
+
49
+ # - - - - Simulator stuff - - - -
50
+ from qubx.backtester.simulator import simulate
43
51
 
44
52
  # - - - - Portfolio analysis - - - -
45
53
  from qubx.core.metrics import (
46
- tearsheet,
47
54
  chart_signals,
48
- get_symbol_pnls,
49
- get_equity,
50
- portfolio_metrics,
51
- pnl,
52
55
  drop_symbols,
56
+ get_symbol_pnls,
53
57
  pick_symbols,
58
+ pnl,
59
+ portfolio_metrics,
60
+ tearsheet,
54
61
  )
62
+ from qubx.data.helpers import loader
55
63
 
56
64
  # - - - - Data reading - - - -
57
65
  from qubx.data.readers import (
58
- CsvStorageDataReader,
59
- MultiQdbConnector,
60
- QuestDBConnector,
61
66
  AsOhlcvSeries,
62
67
  AsPandasFrame,
63
68
  AsQuotes,
64
69
  AsTimestampedRecords,
70
+ CsvStorageDataReader,
71
+ MultiQdbConnector,
72
+ QuestDBConnector,
65
73
  RestoreTicksFromOHLC,
66
74
  )
67
- from qubx.data.helpers import loader
68
-
69
- # - - - - Simulator stuff - - - -
70
- from qubx.backtester.simulator import simulate
71
- from qubx.backtester.optimization import variate
72
-
73
- # - - - - Charting stuff - - - -
74
- from matplotlib import pyplot as plt
75
- from qubx.utils.charting.mpl_helpers import fig, subplot, sbp, plot_trends, ohlc_plot
76
- from qubx.utils.charting.lookinglass import LookingGlass
77
75
 
78
76
  # - - - - Utils - - - -
79
77
  from qubx.pandaz.utils import (
80
- scols,
81
- srows,
82
- ohlc_resample,
83
78
  continuous_periods,
84
- generate_equal_date_ranges,
85
79
  drop_duplicated_indexes,
80
+ generate_equal_date_ranges,
81
+ ohlc_resample,
86
82
  retain_columns_and_join,
87
83
  rolling_forward_test_split,
84
+ scols,
85
+ srows,
88
86
  )
87
+ from qubx.utils.charting.lookinglass import LookingGlass
88
+ from qubx.utils.charting.mpl_helpers import fig, ohlc_plot, plot_trends, sbp, subplot
89
+ from qubx.utils.misc import this_project_root
89
90
 
90
91
  # - setup short numpy output format
91
92
  np_fmt_short()
@@ -1,2 +1,3 @@
1
- from .simulator import simulate
1
+ from .management import BacktestsResultsManager
2
2
  from .optimization import variate
3
+ from .simulator import simulate
@@ -7,11 +7,12 @@ from qubx.core.basics import (
7
7
  Instrument,
8
8
  Order,
9
9
  Position,
10
+ Timestamped,
10
11
  TransactionCostsCalculator,
11
12
  dt_64,
12
13
  )
13
14
  from qubx.core.interfaces import ITimeProvider
14
- from qubx.core.series import Bar, Quote, Trade
15
+ from qubx.core.series import Bar, OrderBook, Quote, Trade
15
16
 
16
17
 
17
18
  class SimulatedAccountProcessor(BasicAccountProcessor):
@@ -45,7 +46,7 @@ class SimulatedAccountProcessor(BasicAccountProcessor):
45
46
  self._half_tick_size = {}
46
47
  self._fill_stop_order_at_price = accurate_stop_orders_execution
47
48
  if self._fill_stop_order_at_price:
48
- logger.info(f"{self.__class__.__name__} emulates stop orders executions at exact price")
49
+ logger.info(f"[<y>{self.__class__.__name__}</y>] :: emulates stop orders executions at exact price")
49
50
 
50
51
  def get_orders(self, instrument: Instrument | None = None) -> list[Order]:
51
52
  if instrument is not None:
@@ -101,23 +102,32 @@ class SimulatedAccountProcessor(BasicAccountProcessor):
101
102
  return super().process_order(order, update_locked_value)
102
103
 
103
104
  def emulate_quote_from_data(
104
- self, instrument: Instrument, timestamp: dt_64, data: float | Trade | Bar
105
+ self, instrument: Instrument, timestamp: dt_64, data: float | Timestamped
105
106
  ) -> Quote | None:
106
107
  if instrument not in self._half_tick_size:
107
108
  _ = self.get_position(instrument)
108
109
 
109
- _ts2 = self._half_tick_size[instrument]
110
110
  if isinstance(data, Quote):
111
111
  return data
112
+
112
113
  elif isinstance(data, Trade):
114
+ _ts2 = self._half_tick_size[instrument]
113
115
  if data.taker: # type: ignore
114
116
  return Quote(timestamp, data.price - _ts2 * 2, data.price, 0, 0) # type: ignore
115
117
  else:
116
118
  return Quote(timestamp, data.price, data.price + _ts2 * 2, 0, 0) # type: ignore
119
+
117
120
  elif isinstance(data, Bar):
121
+ _ts2 = self._half_tick_size[instrument]
118
122
  return Quote(timestamp, data.close - _ts2, data.close + _ts2, 0, 0) # type: ignore
123
+
124
+ elif isinstance(data, OrderBook):
125
+ return data.to_quote()
126
+
119
127
  elif isinstance(data, float):
128
+ _ts2 = self._half_tick_size[instrument]
120
129
  return Quote(timestamp, data - _ts2, data + _ts2, 0, 0)
130
+
121
131
  else:
122
132
  return None
123
133
 
qubx/backtester/broker.py CHANGED
@@ -18,9 +18,11 @@ class SimulatedBroker(IBroker):
18
18
  self,
19
19
  channel: CtrlChannel,
20
20
  account: SimulatedAccountProcessor,
21
+ exchange_id: str = "simulated",
21
22
  ) -> None:
22
23
  self.channel = channel
23
24
  self._account = account
25
+ self._exchange_id = exchange_id
24
26
 
25
27
  @property
26
28
  def is_simulated_trading(self) -> bool:
@@ -80,3 +82,6 @@ class SimulatedBroker(IBroker):
80
82
  self.channel.send((instrument, "order", report.order, False))
81
83
  if report.exec is not None:
82
84
  self.channel.send((instrument, "deals", [report.exec], False))
85
+
86
+ def exchange(self) -> str:
87
+ return self._exchange_id.upper()
qubx/backtester/data.py CHANGED
@@ -6,13 +6,14 @@ import pandas as pd
6
6
  from tqdm.auto import tqdm
7
7
 
8
8
  from qubx import logger
9
- from qubx.backtester.simulated_data import EventBatcher, IterableSimulationData
9
+ from qubx.backtester.simulated_data import IterableSimulationData
10
10
  from qubx.core.basics import (
11
11
  CtrlChannel,
12
12
  DataType,
13
13
  Instrument,
14
14
  TimestampedDict,
15
15
  )
16
+ from qubx.core.exceptions import SimulationError
16
17
  from qubx.core.helpers import BasicScheduler
17
18
  from qubx.core.interfaces import IDataProvider
18
19
  from qubx.core.series import Bar, Quote, time_as_nsec
@@ -74,23 +75,22 @@ class SimulatedDataProvider(IDataProvider):
74
75
  start: str | pd.Timestamp,
75
76
  end: str | pd.Timestamp,
76
77
  silent: bool = False,
77
- enable_event_batching: bool = True,
78
78
  ) -> None:
79
79
  logger.info(f"{self.__class__.__name__} ::: Simulation started at {start} :::")
80
80
 
81
81
  if self._pregenerated_signals:
82
82
  self._prepare_generated_signals(start, end)
83
- _run = self._run_generated_signals
84
- enable_event_batching = False # no batching for pre-generated signals
83
+ _run = self._process_generated_signals
85
84
  else:
86
- _run = self._run_as_strategy
85
+ _run = self._process_strategy
87
86
 
88
- qiter = EventBatcher(self._data_source.create_iterable(start, end), passthrough=not enable_event_batching)
89
87
  start, end = pd.Timestamp(start), pd.Timestamp(end)
90
88
  total_duration = end - start
91
89
  update_delta = total_duration / 100
92
90
  prev_dt = pd.Timestamp(start)
93
91
 
92
+ # - date iteration
93
+ qiter = self._data_source.create_iterable(start, end)
94
94
  if silent:
95
95
  for instrument, data_type, event, is_hist in qiter:
96
96
  if not _run(instrument, data_type, event, is_hist):
@@ -114,7 +114,9 @@ class SimulatedDataProvider(IDataProvider):
114
114
  logger.info(f"{self.__class__.__name__} ::: Simulation finished at {end} :::")
115
115
 
116
116
  def set_generated_signals(self, signals: pd.Series | pd.DataFrame):
117
- logger.debug(f"Using pre-generated signals:\n {str(signals.count()).strip('ndtype: int64')}")
117
+ logger.debug(
118
+ f"[<y>{self.__class__.__name__}</y>] :: Using pre-generated signals:\n {str(signals.count()).strip('ndtype: int64')}"
119
+ )
118
120
  # - sanity check
119
121
  signals.index = pd.DatetimeIndex(signals.index)
120
122
 
@@ -132,11 +134,29 @@ class SimulatedDataProvider(IDataProvider):
132
134
  return True
133
135
 
134
136
  def subscribe(self, subscription_type: str, instruments: set[Instrument], reset: bool) -> None:
135
- logger.debug(f" | subscribe: {subscription_type} -> {instruments}")
137
+ _new_instr = [i for i in instruments if not self.has_subscription(i, subscription_type)]
136
138
  self._data_source.add_instruments_for_subscription(subscription_type, list(instruments))
137
139
 
140
+ # - provide historical data and last quote for subscribed instruments
141
+ for i in _new_instr:
142
+ h_data = self._data_source.peek_historical_data(i, subscription_type)
143
+ if h_data:
144
+ # _s_type = DataType.from_str(subscription_type)[0]
145
+ last_update = h_data[-1]
146
+ if last_quote := self._account.emulate_quote_from_data(i, last_update.time, last_update): # type: ignore
147
+ # - send historical data to the channel
148
+ self.channel.send((i, subscription_type, h_data, True))
149
+
150
+ # - set last quote
151
+ self._last_quotes[i] = last_quote
152
+
153
+ # - also need to pass this quote to OME !
154
+ self._account._process_new_quote(i, last_quote)
155
+
156
+ logger.debug(f" | subscribed {subscription_type} {i} -> {last_quote}")
157
+
138
158
  def unsubscribe(self, subscription_type: str, instruments: set[Instrument] | Instrument | None = None) -> None:
139
- logger.debug(f" | unsubscribe: {subscription_type} -> {instruments}")
159
+ # logger.debug(f" | unsubscribe: {subscription_type} -> {instruments}")
140
160
  if instruments is not None:
141
161
  self._data_source.remove_instruments_from_subscription(
142
162
  subscription_type, [instruments] if isinstance(instruments, Instrument) else list(instruments)
@@ -147,12 +167,12 @@ class SimulatedDataProvider(IDataProvider):
147
167
 
148
168
  def get_subscriptions(self, instrument: Instrument) -> list[str]:
149
169
  _s_lst = self._data_source.get_subscriptions_for_instrument(instrument)
150
- logger.debug(f" | get_subscriptions {instrument} -> {_s_lst}")
170
+ # logger.debug(f" | get_subscriptions {instrument} -> {_s_lst}")
151
171
  return _s_lst
152
172
 
153
173
  def get_subscribed_instruments(self, subscription_type: str | None = None) -> list[Instrument]:
154
174
  _in_lst = self._data_source.get_instruments_for_subscription(subscription_type or DataType.ALL)
155
- logger.debug(f" | get_subscribed_instruments {subscription_type} -> {_in_lst}")
175
+ # logger.debug(f" | get_subscribed_instruments {subscription_type} -> {_in_lst}")
156
176
  return _in_lst
157
177
 
158
178
  def warmup(self, configs: dict[tuple[str, Instrument], str]) -> None:
@@ -190,15 +210,16 @@ class SimulatedDataProvider(IDataProvider):
190
210
  if s == i.symbol or s == str(i) or s == f"{i.exchange}:{i.symbol}" or str(s) == str(i):
191
211
  _start, _end = pd.Timestamp(start), pd.Timestamp(end)
192
212
  _start_idx, _end_idx = v.index.get_indexer([_start, _end], method="ffill")
193
- sel = v.iloc[max(_start_idx, 0) : _end_idx + 1] # sel = v[pd.Timestamp(start) : pd.Timestamp(end)]
213
+ sel = v.iloc[max(_start_idx, 0) : _end_idx + 1]
194
214
 
215
+ # TODO: check if data has exec_price - it means we have deals
195
216
  self._to_process[i] = list(zip(sel.index, sel.values))
196
217
  _s_inst = i
197
218
  break
198
219
 
199
220
  if _s_inst is None:
200
221
  logger.error(f"Can't find instrument for pregenerated signals with id '{s}'")
201
- raise ValueError(f"Can't find instrument for pregenerated signals with id '{s}'")
222
+ raise SimulationError(f"Can't find instrument for pregenerated signals with id '{s}'")
202
223
 
203
224
  def _convert_records_to_bars(
204
225
  self, records: list[TimestampedDict], cut_time_ns: int, timeframe_ns: int
@@ -228,42 +249,44 @@ class SimulatedDataProvider(IDataProvider):
228
249
 
229
250
  return bars
230
251
 
231
- def _run_generated_signals(self, instrument: Instrument, data_type: str, data: Any, is_hist) -> bool:
232
- if is_hist:
233
- raise ValueError("Historical data is not supported for pre-generated signals !")
252
+ def _process_generated_signals(self, instrument: Instrument, data_type: str, data: Any, is_hist: bool) -> bool:
253
+ cc = self.channel
254
+ t = np.datetime64(data.time, "ns")
234
255
 
235
- t = data.time # type: ignore
236
- self.time_provider.set_time(np.datetime64(t, "ns"))
256
+ if not is_hist:
257
+ # - signals for this instrument
258
+ sigs = self._to_process[instrument]
237
259
 
238
- q = self._account.emulate_quote_from_data(instrument, np.datetime64(t, "ns"), data)
239
- self._last_quotes[instrument] = q
240
- cc = self.channel
260
+ while sigs and t >= (_signal_time := sigs[0][0].as_unit("ns").asm8):
261
+ self.time_provider.set_time(_signal_time)
262
+ cc.send((instrument, "event", {"order": sigs[0][1]}, False))
263
+ sigs.pop(0)
264
+
265
+ if q := self._account.emulate_quote_from_data(instrument, t, data):
266
+ self._last_quotes[instrument] = q
241
267
 
242
- # - we need to send quotes for invoking portfolio logging etc
268
+ self.time_provider.set_time(t)
243
269
  cc.send((instrument, data_type, data, is_hist))
244
- sigs = self._to_process[instrument]
245
- _current_time = self.time_provider.time()
246
- while sigs and sigs[0][0].as_unit("ns").asm8 <= _current_time:
247
- cc.send((instrument, "event", {"order": sigs[0][1]}, is_hist))
248
- sigs.pop(0)
249
270
 
250
271
  return cc.control.is_set()
251
272
 
252
- def _run_as_strategy(self, instrument: Instrument, data_type: str, data: Any, is_hist: bool) -> bool:
253
- t = data.time # type: ignore
254
- self.time_provider.set_time(np.datetime64(t, "ns"))
255
-
256
- q = self._account.emulate_quote_from_data(instrument, np.datetime64(t, "ns"), data)
273
+ def _process_strategy(self, instrument: Instrument, data_type: str, data: Any, is_hist: bool) -> bool:
257
274
  cc = self.channel
275
+ t = np.datetime64(data.time, "ns")
258
276
 
259
- if not is_hist and q is not None:
260
- self._last_quotes[instrument] = q
277
+ if not is_hist:
278
+ if t >= (_next_exp_time := self._scheduler.next_expected_event_time()):
279
+ # - we use exact event's time
280
+ self.time_provider.set_time(_next_exp_time)
281
+ self._scheduler.check_and_run_tasks()
261
282
 
262
- # we have to schedule possible crons before sending the data event itself
263
- if self._scheduler.check_and_run_tasks():
264
- # - push nothing - it will force to process last event
265
- cc.send((None, "service_time", None, False))
283
+ if q := self._account.emulate_quote_from_data(instrument, t, data):
284
+ self._last_quotes[instrument] = q
266
285
 
286
+ self.time_provider.set_time(t)
267
287
  cc.send((instrument, data_type, data, is_hist))
268
288
 
269
289
  return cc.control.is_set()
290
+
291
+ def exchange(self) -> str:
292
+ return self._exchange_id.upper()
@@ -0,0 +1,119 @@
1
+ import re
2
+ import zipfile
3
+ from collections import defaultdict
4
+ from pathlib import Path
5
+
6
+ import pandas as pd
7
+ import yaml
8
+
9
+ from qubx.core.metrics import TradingSessionResult, _pfl_metrics_prepare
10
+ from qubx.utils.misc import blue, cyan, green, magenta, red, yellow
11
+
12
+
13
+ class BacktestsResultsManager:
14
+ """
15
+ Manager class for handling backtesting results.
16
+
17
+ This class provides functionality to load, list and manage backtesting results stored in zip files.
18
+ Each result contains trading session information and metrics that can be loaded and analyzed.
19
+
20
+ Parameters
21
+ ----------
22
+ path : str
23
+ Path to directory containing backtesting result zip files
24
+
25
+ Methods
26
+ -------
27
+ reload()
28
+ Reloads all backtesting results from the specified path
29
+ list(regex="", with_metrics=False)
30
+ Lists all backtesting results, optionally filtered by regex and including metrics
31
+ load(name)
32
+ Loads a specific backtesting result by name
33
+ """
34
+
35
+ def __init__(self, path: str):
36
+ self.path = path
37
+ self.reload()
38
+
39
+ def reload(self) -> "BacktestsResultsManager":
40
+ self.results = {}
41
+ names = defaultdict(lambda: 0)
42
+ for p in Path(self.path).glob("**/*.zip"):
43
+ with zipfile.ZipFile(p, "r") as zip_ref:
44
+ try:
45
+ info = yaml.safe_load(zip_ref.read("info.yml"))
46
+ info["path"] = str(p)
47
+ n = info.get("name", "")
48
+ _new_name = n if names[n] == 0 else f"{n}.{names[n]}"
49
+ names[n] += 1
50
+ info["name"] = _new_name
51
+ self.results[_new_name] = info
52
+ except Exception:
53
+ pass
54
+
55
+ # - reindex
56
+ _idx = 1
57
+ for n in sorted(self.results.keys()):
58
+ self.results[n]["idx"] = _idx
59
+ _idx += 1
60
+
61
+ return self
62
+
63
+ def load(self, name: str | int | list[int] | list[str]) -> TradingSessionResult | list[TradingSessionResult]:
64
+ for info in self.results.values():
65
+ match name:
66
+ case int():
67
+ if info.get("idx", -1) == name:
68
+ return TradingSessionResult.from_file(info["path"])
69
+ case str():
70
+ if info.get("name", "") == name:
71
+ return TradingSessionResult.from_file(info["path"])
72
+ case list():
73
+ return [self.load(i) for i in name]
74
+
75
+ raise ValueError(f"No result found for {name}")
76
+
77
+ def list(self, regex: str = "", with_metrics=False, params=False):
78
+ for n in sorted(self.results.keys()):
79
+ info = self.results[n]
80
+ s_cls = info.get("strategy_class", "").split(".")[-1]
81
+
82
+ if regex:
83
+ if not re.match(regex, n, re.IGNORECASE):
84
+ if not re.match(regex, s_cls, re.IGNORECASE):
85
+ continue
86
+
87
+ name = info.get("name", "")
88
+ smbs = ", ".join(info.get("symbols", list()))
89
+ start = pd.Timestamp(info.get("start", "")).round("1s")
90
+ stop = pd.Timestamp(info.get("stop", "")).round("1s")
91
+ dscr = info.get("description", "")
92
+ _s = f"{yellow(str(info.get('idx')))} - {red(name)} ::: {magenta(pd.Timestamp(info.get('creation_time', '')).round('1s'))} by {cyan(info.get('author', ''))}"
93
+ if dscr:
94
+ _s += f"\n\t{magenta(dscr)}"
95
+ _s += f"\n\tstrategy: {green(s_cls)}"
96
+ _s += f"\n\tinterval: {blue(start)} - {blue(stop)}"
97
+ _s += f"\n\tcapital: {blue(info.get('capital', ''))} {info.get('base_currency', '')} ({info.get('commissions', '')})"
98
+ _s += f"\n\tinstruments: {blue(smbs)}"
99
+ if params:
100
+ formats = ["{" + f":<{i}" + "}" for i in [50]]
101
+ _p = pd.DataFrame.from_dict(info.get("parameters", {}), orient="index")
102
+ for i in _p.to_string(
103
+ max_colwidth=30,
104
+ header=False,
105
+ formatters=[(lambda x: cyan(fmt.format(str(x)))) for fmt in formats],
106
+ justify="left",
107
+ ).split("\n"):
108
+ _s += f"\n\t | {yellow(i)}"
109
+ print(_s)
110
+
111
+ if with_metrics:
112
+ r = TradingSessionResult.from_file(info["path"])
113
+ metric = _pfl_metrics_prepare(r, True, 365)
114
+ _m_repr = str(metric[0][["Gain", "Cagr", "Sharpe", "Max dd pct", "Qr", "Fees"]].round(3)).split("\n")[
115
+ :-1
116
+ ]
117
+ for i in _m_repr:
118
+ print("\t " + cyan(i))
119
+ print()
qubx/backtester/ome.py CHANGED
@@ -1,4 +1,3 @@
1
- from typing import List, Dict
2
1
  from dataclasses import dataclass
3
2
  from operator import neg
4
3
 
@@ -7,23 +6,21 @@ from sortedcontainers import SortedDict
7
6
 
8
7
  from qubx import logger
9
8
  from qubx.core.basics import (
9
+ OPTION_FILL_AT_SIGNAL_PRICE,
10
10
  Deal,
11
11
  Instrument,
12
+ ITimeProvider,
12
13
  Order,
13
14
  OrderSide,
14
15
  OrderType,
15
- Position,
16
- Signal,
17
16
  TransactionCostsCalculator,
18
17
  dt_64,
19
- ITimeProvider,
20
- OPTION_FILL_AT_SIGNAL_PRICE,
21
18
  )
22
- from qubx.core.series import Quote, Trade
23
19
  from qubx.core.exceptions import (
24
20
  ExchangeError,
25
21
  InvalidOrder,
26
22
  )
23
+ from qubx.core.series import Quote, Trade
27
24
 
28
25
 
29
26
  @dataclass
@@ -36,10 +33,10 @@ class OmeReport:
36
33
  class OrdersManagementEngine:
37
34
  instrument: Instrument
38
35
  time_service: ITimeProvider
39
- active_orders: Dict[str, Order]
40
- stop_orders: Dict[str, Order]
41
- asks: SortedDict[float, List[str]]
42
- bids: SortedDict[float, List[str]]
36
+ active_orders: dict[str, Order]
37
+ stop_orders: dict[str, Order]
38
+ asks: SortedDict[float, list[str]]
39
+ bids: SortedDict[float, list[str]]
43
40
  bbo: Quote | None # current best bid/ask order book (simplest impl)
44
41
  __order_id: int
45
42
  __trade_id: int
@@ -78,10 +75,10 @@ class OrdersManagementEngine:
78
75
  def get_quote(self) -> Quote:
79
76
  return self.bbo
80
77
 
81
- def get_open_orders(self) -> List[Order]:
78
+ def get_open_orders(self) -> list[Order]:
82
79
  return list(self.active_orders.values()) + list(self.stop_orders.values())
83
80
 
84
- def update_bbo(self, quote: Quote) -> List[OmeReport]:
81
+ def update_bbo(self, quote: Quote) -> list[OmeReport]:
85
82
  timestamp = self.time_service.time()
86
83
  rep = []
87
84
 
@@ -127,11 +124,8 @@ class OrdersManagementEngine:
127
124
  time_in_force: str = "gtc",
128
125
  **options,
129
126
  ) -> OmeReport:
130
-
131
127
  if self.bbo is None:
132
- raise ExchangeError(
133
- f"Simulator is not ready for order management - no any quote for {self.instrument.symbol}"
134
- )
128
+ raise ExchangeError(f"Simulator is not ready for order management - no quote for {self.instrument.symbol}")
135
129
 
136
130
  # - validate order parameters
137
131
  self._validate_order(order_side, order_type, amount, price, time_in_force)
@@ -154,7 +148,7 @@ class OrdersManagementEngine:
154
148
  return self._process_order(timestamp, order)
155
149
 
156
150
  def _dbg(self, message, **kwargs) -> None:
157
- logger.debug(f"[OMS] {self.instrument.symbol} - {message}", **kwargs)
151
+ logger.debug(f" [<y>OME</y>(<g>{self.instrument}</g>)] :: {message}", **kwargs)
158
152
 
159
153
  def _process_order(self, timestamp: dt_64, order: Order) -> OmeReport:
160
154
  if order.status in ["CLOSED", "CANCELED"]:
@@ -182,7 +176,7 @@ class OrdersManagementEngine:
182
176
  self.stop_orders[order.id] = order
183
177
 
184
178
  elif order.type == "STOP_LIMIT":
185
- # TODO: check trigger conditions in options etc
179
+ # TODO: (OME) check trigger conditions in options etc
186
180
  raise NotImplementedError("'STOP_LIMIT' order is not supported in Qubx simulator yet !")
187
181
 
188
182
  # - if order must be "executed" immediately