Qubx 0.6.49__tar.gz → 0.6.52__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of Qubx might be problematic. Click here for more details.

Files changed (166) hide show
  1. {qubx-0.6.49 → qubx-0.6.52}/PKG-INFO +1 -1
  2. {qubx-0.6.49 → qubx-0.6.52}/pyproject.toml +1 -1
  3. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/backtester/data.py +22 -17
  4. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/backtester/simulator.py +103 -60
  5. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/backtester/utils.py +65 -23
  6. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/core/loggers.py +4 -6
  7. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/core/mixins/processing.py +53 -3
  8. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/core/mixins/universe.py +4 -3
  9. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/data/composite.py +11 -2
  10. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/data/readers.py +5 -0
  11. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/emitters/base.py +1 -1
  12. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/loggers/csv.py +2 -2
  13. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/loggers/factory.py +5 -6
  14. qubx-0.6.52/src/qubx/loggers/inmemory.py +74 -0
  15. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/loggers/mongo.py +22 -27
  16. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/notifications/slack.py +14 -8
  17. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/restarts/state_resolvers.py +2 -2
  18. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/restorers/signal.py +2 -23
  19. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/utils/runner/configs.py +1 -0
  20. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/utils/runner/factory.py +12 -5
  21. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/utils/runner/runner.py +34 -6
  22. qubx-0.6.49/src/qubx/loggers/inmemory.py +0 -68
  23. {qubx-0.6.49 → qubx-0.6.52}/LICENSE +0 -0
  24. {qubx-0.6.49 → qubx-0.6.52}/README.md +0 -0
  25. {qubx-0.6.49 → qubx-0.6.52}/build.py +0 -0
  26. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/__init__.py +0 -0
  27. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/_nb_magic.py +0 -0
  28. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/backtester/__init__.py +0 -0
  29. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/backtester/account.py +0 -0
  30. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/backtester/broker.py +0 -0
  31. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/backtester/management.py +0 -0
  32. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/backtester/ome.py +0 -0
  33. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/backtester/optimization.py +0 -0
  34. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/backtester/runner.py +0 -0
  35. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/backtester/simulated_data.py +0 -0
  36. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/backtester/simulated_exchange.py +0 -0
  37. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/cli/__init__.py +0 -0
  38. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/cli/commands.py +0 -0
  39. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/cli/deploy.py +0 -0
  40. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/cli/misc.py +0 -0
  41. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/cli/release.py +0 -0
  42. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/cli/tui.py +0 -0
  43. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/connectors/ccxt/__init__.py +0 -0
  44. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/connectors/ccxt/account.py +0 -0
  45. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/connectors/ccxt/broker.py +0 -0
  46. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/connectors/ccxt/data.py +0 -0
  47. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/connectors/ccxt/exceptions.py +0 -0
  48. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/connectors/ccxt/exchanges/__init__.py +0 -0
  49. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/connectors/ccxt/exchanges/binance/broker.py +0 -0
  50. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/connectors/ccxt/exchanges/binance/exchange.py +0 -0
  51. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/connectors/ccxt/exchanges/bitfinex/bitfinex.py +0 -0
  52. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/connectors/ccxt/exchanges/bitfinex/bitfinex_account.py +0 -0
  53. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/connectors/ccxt/exchanges/kraken/kraken.py +0 -0
  54. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/connectors/ccxt/factory.py +0 -0
  55. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/connectors/ccxt/reader.py +0 -0
  56. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/connectors/ccxt/utils.py +0 -0
  57. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/connectors/tardis/data.py +0 -0
  58. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/connectors/tardis/utils.py +0 -0
  59. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/core/__init__.py +0 -0
  60. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/core/account.py +0 -0
  61. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/core/basics.py +0 -0
  62. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/core/context.py +0 -0
  63. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/core/deque.py +0 -0
  64. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/core/errors.py +0 -0
  65. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/core/exceptions.py +0 -0
  66. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/core/helpers.py +0 -0
  67. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/core/initializer.py +0 -0
  68. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/core/interfaces.py +0 -0
  69. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/core/lookups.py +0 -0
  70. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/core/metrics.py +0 -0
  71. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/core/mixins/__init__.py +0 -0
  72. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/core/mixins/market.py +0 -0
  73. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/core/mixins/subscription.py +0 -0
  74. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/core/mixins/trading.py +0 -0
  75. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/core/series.pxd +0 -0
  76. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/core/series.pyi +0 -0
  77. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/core/series.pyx +0 -0
  78. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/core/utils.pyi +0 -0
  79. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/core/utils.pyx +0 -0
  80. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/data/__init__.py +0 -0
  81. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/data/helpers.py +0 -0
  82. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/data/hft.py +0 -0
  83. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/data/registry.py +0 -0
  84. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/data/tardis.py +0 -0
  85. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/emitters/__init__.py +0 -0
  86. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/emitters/composite.py +0 -0
  87. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/emitters/csv.py +0 -0
  88. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/emitters/prometheus.py +0 -0
  89. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/emitters/questdb.py +0 -0
  90. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/exporters/__init__.py +0 -0
  91. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/exporters/composite.py +0 -0
  92. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/exporters/formatters/__init__.py +0 -0
  93. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/exporters/formatters/base.py +0 -0
  94. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/exporters/formatters/incremental.py +0 -0
  95. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/exporters/formatters/slack.py +0 -0
  96. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/exporters/redis_streams.py +0 -0
  97. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/exporters/slack.py +0 -0
  98. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/features/__init__.py +0 -0
  99. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/features/core.py +0 -0
  100. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/features/orderbook.py +0 -0
  101. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/features/price.py +0 -0
  102. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/features/trades.py +0 -0
  103. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/features/utils.py +0 -0
  104. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/gathering/simplest.py +0 -0
  105. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/health/__init__.py +0 -0
  106. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/health/base.py +0 -0
  107. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/loggers/__init__.py +1 -1
  108. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/math/__init__.py +0 -0
  109. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/math/stats.py +0 -0
  110. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/notifications/__init__.py +0 -0
  111. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/notifications/composite.py +0 -0
  112. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/notifications/throttler.py +0 -0
  113. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/pandaz/__init__.py +0 -0
  114. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/pandaz/ta.py +0 -0
  115. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/pandaz/utils.py +0 -0
  116. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/resources/_build.py +0 -0
  117. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/resources/instruments/symbols-binance.cm.json +0 -0
  118. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/resources/instruments/symbols-binance.json +0 -0
  119. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/resources/instruments/symbols-binance.um.json +0 -0
  120. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/resources/instruments/symbols-bitfinex.f.json +0 -0
  121. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/resources/instruments/symbols-bitfinex.json +0 -0
  122. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/resources/instruments/symbols-kraken.f.json +0 -0
  123. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/resources/instruments/symbols-kraken.json +0 -0
  124. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/restarts/__init__.py +0 -0
  125. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/restarts/time_finders.py +0 -0
  126. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/restorers/__init__.py +0 -0
  127. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/restorers/balance.py +0 -0
  128. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/restorers/factory.py +0 -0
  129. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/restorers/interfaces.py +0 -0
  130. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/restorers/position.py +0 -0
  131. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/restorers/state.py +0 -0
  132. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/restorers/utils.py +0 -0
  133. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/ta/__init__.py +0 -0
  134. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/ta/indicators.pxd +0 -0
  135. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/ta/indicators.pyi +0 -0
  136. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/ta/indicators.pyx +0 -0
  137. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/trackers/__init__.py +0 -0
  138. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/trackers/advanced.py +0 -0
  139. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/trackers/composite.py +0 -0
  140. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/trackers/rebalancers.py +0 -0
  141. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/trackers/riskctrl.py +0 -0
  142. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/trackers/sizers.py +0 -0
  143. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/utils/__init__.py +0 -0
  144. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/utils/_pyxreloader.py +0 -0
  145. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/utils/charting/lookinglass.py +0 -0
  146. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/utils/charting/mpl_helpers.py +0 -0
  147. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/utils/collections.py +0 -0
  148. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/utils/marketdata/binance.py +0 -0
  149. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/utils/marketdata/ccxt.py +0 -0
  150. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/utils/marketdata/dukas.py +0 -0
  151. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/utils/misc.py +0 -0
  152. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/utils/ntp.py +0 -0
  153. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/utils/numbers_utils.py +0 -0
  154. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/utils/orderbook.py +0 -0
  155. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/utils/plotting/__init__.py +0 -0
  156. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/utils/plotting/dashboard.py +0 -0
  157. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/utils/plotting/data.py +0 -0
  158. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/utils/plotting/interfaces.py +0 -0
  159. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/utils/plotting/renderers/__init__.py +0 -0
  160. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/utils/plotting/renderers/plotly.py +0 -0
  161. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/utils/questdb.py +0 -0
  162. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/utils/runner/__init__.py +0 -0
  163. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/utils/runner/_jupyter_runner.pyt +0 -0
  164. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/utils/runner/accounts.py +0 -0
  165. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/utils/time.py +0 -0
  166. {qubx-0.6.49 → qubx-0.6.52}/src/qubx/utils/version.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: Qubx
3
- Version: 0.6.49
3
+ Version: 0.6.52
4
4
  Summary: Qubx - Quantitative Trading Framework
5
5
  Author: Dmitry Marienko
6
6
  Author-email: dmitry.marienko@xlydian.com
@@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api"
4
4
 
5
5
  [tool.poetry]
6
6
  name = "Qubx"
7
- version = "0.6.49"
7
+ version = "0.6.52"
8
8
  description = "Qubx - Quantitative Trading Framework"
9
9
  authors = [ "Dmitry Marienko <dmitry.marienko@xlydian.com>", "Yuriy Arabskyy <yuriy.arabskyy@xlydian.com>",]
10
10
  readme = "README.md"
@@ -139,26 +139,31 @@ class SimulatedDataProvider(IDataProvider):
139
139
  bars = []
140
140
 
141
141
  # - if no records, return empty list to avoid exception from infer_series_frequency
142
- if not records:
142
+ if not records or records is None:
143
143
  return bars
144
144
 
145
- _data_tf = infer_series_frequency([r.time for r in records[:50]])
146
- timeframe_ns = _data_tf.item()
147
-
148
- if records is not None:
149
- for r in records:
150
- # _b_ts_0 = np.datetime64(r.time, "ns").item()
151
- _b_ts_0 = r.time
152
- _b_ts_1 = _b_ts_0 + timeframe_ns - self._open_close_time_indent_ns
153
-
154
- if _b_ts_0 <= cut_time_ns and cut_time_ns < _b_ts_1:
155
- break
156
-
157
- bars.append(
158
- Bar(
159
- _b_ts_0, r.data["open"], r.data["high"], r.data["low"], r.data["close"], r.data.get("volume", 0)
160
- )
145
+ if len(records) > 1:
146
+ _data_tf = infer_series_frequency([r.time for r in records[:50]])
147
+ timeframe_ns = _data_tf.item()
148
+
149
+ for r in records:
150
+ _b_ts_0 = r.time
151
+ _b_ts_1 = _b_ts_0 + timeframe_ns - self._open_close_time_indent_ns
152
+
153
+ if _b_ts_0 <= cut_time_ns and cut_time_ns < _b_ts_1:
154
+ break
155
+
156
+ bars.append(
157
+ Bar(
158
+ _b_ts_0,
159
+ r.data["open"],
160
+ r.data["high"],
161
+ r.data["low"],
162
+ r.data["close"],
163
+ r.data.get("volume", 0),
164
+ r.data.get("bought_volume", 0),
161
165
  )
166
+ )
162
167
 
163
168
  return bars
164
169
 
@@ -48,6 +48,7 @@ def simulate(
48
48
  portfolio_log_freq: str = "5Min",
49
49
  parallel_backend: Literal["loky", "multiprocessing"] = "multiprocessing",
50
50
  emission: EmissionConfig | None = None,
51
+ run_separate_instruments: bool = False,
51
52
  ) -> list[TradingSessionResult]:
52
53
  """
53
54
  Backtest utility for trading strategies or signals using historical data.
@@ -73,6 +74,7 @@ def simulate(
73
74
  - portfolio_log_freq (str): Frequency for portfolio logging, default is "5Min".
74
75
  - parallel_backend (Literal["loky", "multiprocessing"]): Backend for parallel processing, default is "multiprocessing".
75
76
  - emission (EmissionConfig | None): Configuration for metric emitters, default is None.
77
+ - run_separate_instruments (bool): If True, creates separate simulation setups for each instrument, default is False.
76
78
 
77
79
  Returns:
78
80
  - list[TradingSessionResult]: A list of TradingSessionResult objects containing the results of each simulation setup.
@@ -109,6 +111,7 @@ def simulate(
109
111
  commissions=commissions,
110
112
  signal_timeframe=signal_timeframe,
111
113
  accurate_stop_orders_execution=accurate_stop_orders_execution,
114
+ run_separate_instruments=run_separate_instruments,
112
115
  )
113
116
  if not simulation_setups:
114
117
  logger.error(
@@ -117,6 +120,10 @@ def simulate(
117
120
  )
118
121
  raise SimulationError(_msg)
119
122
 
123
+ # - inform about separate instruments mode
124
+ if run_separate_instruments and len(simulation_setups) > 1:
125
+ logger.info(f"Running separate simulations for each instrument. Total simulations: {len(simulation_setups)}")
126
+
120
127
  # - preprocess start and stop and convert to datetime if necessary
121
128
  if stop is None:
122
129
  # - check stop time : here we try to backtest till now (may be we need to get max available time from data reader ?)
@@ -160,24 +167,50 @@ def _run_setups(
160
167
  _main_loop_silent = len(strategies_setups) == 1
161
168
  n_jobs = 1 if _main_loop_silent else n_jobs
162
169
 
163
- reports = ProgressParallel(
164
- n_jobs=n_jobs, total=len(strategies_setups), silent=_main_loop_silent, backend=parallel_backend
165
- )(
166
- delayed(_run_setup)(
167
- id,
168
- f"Simulated-{id}",
169
- setup,
170
- data_setup,
171
- start,
172
- stop,
173
- silent,
174
- show_latency_report,
175
- portfolio_log_freq,
176
- emission,
170
+ if n_jobs == 1:
171
+ reports = [
172
+ _run_setup(
173
+ id,
174
+ f"Simulated-{id}",
175
+ setup,
176
+ data_setup,
177
+ start,
178
+ stop,
179
+ silent,
180
+ show_latency_report,
181
+ portfolio_log_freq,
182
+ emission,
183
+ )
184
+ for id, setup in enumerate(strategies_setups)
185
+ ]
186
+ else:
187
+ reports = ProgressParallel(
188
+ n_jobs=n_jobs, total=len(strategies_setups), silent=_main_loop_silent, backend=parallel_backend
189
+ )(
190
+ delayed(_run_setup)(
191
+ id,
192
+ f"Simulated-{id}",
193
+ setup,
194
+ data_setup,
195
+ start,
196
+ stop,
197
+ silent,
198
+ show_latency_report,
199
+ portfolio_log_freq,
200
+ emission,
201
+ )
202
+ for id, setup in enumerate(strategies_setups)
177
203
  )
178
- for id, setup in enumerate(strategies_setups)
179
- )
180
- return reports # type: ignore
204
+
205
+ # Filter out None results and log warnings for failed simulations
206
+ successful_reports = []
207
+ for i, report in enumerate(reports):
208
+ if report is None:
209
+ logger.warning(f"Simulation setup {i} failed - skipping from results")
210
+ else:
211
+ successful_reports.append(report)
212
+
213
+ return successful_reports
181
214
 
182
215
 
183
216
  def _run_setup(
@@ -191,48 +224,58 @@ def _run_setup(
191
224
  show_latency_report: bool,
192
225
  portfolio_log_freq: str,
193
226
  emission: EmissionConfig | None = None,
194
- ) -> TradingSessionResult:
195
- # Create metric emitter if configured
196
- emitter = None
197
- if emission is not None:
198
- emitter = create_metric_emitters(emission, setup.name)
199
-
200
- runner = SimulationRunner(
201
- setup=setup,
202
- data_config=data_setup,
203
- start=start,
204
- stop=stop,
205
- account_id=account_id,
206
- portfolio_log_freq=portfolio_log_freq,
207
- emitter=emitter,
208
- )
227
+ ) -> TradingSessionResult | None:
228
+ try:
229
+ # Create metric emitter if configured
230
+ emitter = None
231
+ if emission is not None:
232
+ emitter = create_metric_emitters(emission, setup.name)
209
233
 
210
- # - we want to see simulate time in log messages
211
- QubxLogConfig.setup_logger(
212
- level=QubxLogConfig.get_log_level(), custom_formatter=SimulatedLogFormatter(runner.ctx).formatter
213
- )
234
+ runner = SimulationRunner(
235
+ setup=setup,
236
+ data_config=data_setup,
237
+ start=start,
238
+ stop=stop,
239
+ account_id=account_id,
240
+ portfolio_log_freq=portfolio_log_freq,
241
+ emitter=emitter,
242
+ )
214
243
 
215
- runner.run(silent=silent)
216
-
217
- # - service latency report
218
- if show_latency_report:
219
- runner.print_latency_report()
220
-
221
- return TradingSessionResult(
222
- setup_id,
223
- setup.name,
224
- start,
225
- stop,
226
- setup.exchanges,
227
- setup.instruments,
228
- setup.capital,
229
- setup.base_currency,
230
- setup.commissions,
231
- runner.logs_writer.get_portfolio(as_plain_dataframe=True),
232
- runner.logs_writer.get_executions(),
233
- runner.logs_writer.get_signals(),
234
- strategy_class=runner.strategy_class,
235
- parameters=runner.strategy_params,
236
- is_simulation=True,
237
- author=get_current_user(),
238
- )
244
+ # - we want to see simulate time in log messages
245
+ QubxLogConfig.setup_logger(
246
+ level=QubxLogConfig.get_log_level(), custom_formatter=SimulatedLogFormatter(runner.ctx).formatter
247
+ )
248
+
249
+ runner.run(silent=silent)
250
+
251
+ # - service latency report
252
+ if show_latency_report:
253
+ runner.print_latency_report()
254
+
255
+ # Convert commissions to the expected type for TradingSessionResult
256
+ commissions_for_result = setup.commissions
257
+ if isinstance(commissions_for_result, dict):
258
+ # Filter out None values to match TradingSessionResult expected type
259
+ commissions_for_result = {k: v for k, v in commissions_for_result.items() if v is not None}
260
+
261
+ return TradingSessionResult(
262
+ setup_id,
263
+ setup.name,
264
+ start,
265
+ stop,
266
+ setup.exchanges,
267
+ setup.instruments,
268
+ setup.capital,
269
+ setup.base_currency,
270
+ commissions_for_result,
271
+ runner.logs_writer.get_portfolio(as_plain_dataframe=True),
272
+ runner.logs_writer.get_executions(),
273
+ runner.logs_writer.get_signals(),
274
+ strategy_class=runner.strategy_class,
275
+ parameters=runner.strategy_params,
276
+ is_simulation=True,
277
+ author=get_current_user(),
278
+ )
279
+ except Exception as e:
280
+ logger.error(f"Simulation setup {setup_id} failed with error: {e}")
281
+ return None
@@ -419,6 +419,7 @@ def recognize_simulation_configuration(
419
419
  commissions: str | dict[str, str | None] | None,
420
420
  signal_timeframe: str,
421
421
  accurate_stop_orders_execution: bool,
422
+ run_separate_instruments: bool = False,
422
423
  ) -> list[SimulationSetup]:
423
424
  """
424
425
  Recognize and create setups based on the provided simulation configuration.
@@ -438,6 +439,7 @@ def recognize_simulation_configuration(
438
439
  - commissions (str): The commission structure to be applied.
439
440
  - signal_timeframe (str): Timeframe for generated signals.
440
441
  - accurate_stop_orders_execution (bool): If True, enables more accurate stop order execution simulation.
442
+ - run_separate_instruments (bool): If True, creates separate setups for each instrument.
441
443
 
442
444
  Returns:
443
445
  - list[SimulationSetup]: A list of SimulationSetup objects, each representing a
@@ -458,7 +460,7 @@ def recognize_simulation_configuration(
458
460
  r.extend(
459
461
  recognize_simulation_configuration(
460
462
  _n + n, v, instruments, exchanges, capital, basic_currency, commissions,
461
- signal_timeframe, accurate_stop_orders_execution
463
+ signal_timeframe, accurate_stop_orders_execution, run_separate_instruments
462
464
  )
463
465
  )
464
466
 
@@ -474,45 +476,85 @@ def recognize_simulation_configuration(
474
476
  _t = SetupTypes.STRATEGY_AND_TRACKER
475
477
 
476
478
  # - extract actual symbols that have signals
477
- r.append(
478
- SimulationSetup(
479
- _t, name, _s, c1, # type: ignore
480
- _sniffer._pick_instruments(instruments, _s) if _sniffer._is_signal(c0) else instruments,
481
- exchanges, capital, basic_currency, commissions,
482
- signal_timeframe, accurate_stop_orders_execution
479
+ setup_instruments = _sniffer._pick_instruments(instruments, _s) if _sniffer._is_signal(c0) else instruments
480
+
481
+ if run_separate_instruments:
482
+ # Create separate setups for each instrument
483
+ for instrument in setup_instruments:
484
+ r.append(
485
+ SimulationSetup(
486
+ _t, f"{name}/{instrument.symbol}", _s, c1, # type: ignore
487
+ [instrument],
488
+ exchanges, capital, basic_currency, commissions,
489
+ signal_timeframe, accurate_stop_orders_execution
490
+ )
491
+ )
492
+ else:
493
+ r.append(
494
+ SimulationSetup(
495
+ _t, name, _s, c1, # type: ignore
496
+ setup_instruments,
497
+ exchanges, capital, basic_currency, commissions,
498
+ signal_timeframe, accurate_stop_orders_execution
499
+ )
483
500
  )
484
- )
485
501
  else:
486
502
  for j, s in enumerate(configs):
487
503
  r.extend(
488
504
  recognize_simulation_configuration(
489
505
  # name + "/" + str(j), s, instruments, exchange, capital, basic_currency, commissions
490
506
  name, s, instruments, exchanges, capital, basic_currency, commissions, # type: ignore
491
- signal_timeframe, accurate_stop_orders_execution
507
+ signal_timeframe, accurate_stop_orders_execution, run_separate_instruments
492
508
  )
493
509
  )
494
510
 
495
511
  elif _sniffer._is_strategy(configs):
496
- r.append(
497
- SimulationSetup(
498
- SetupTypes.STRATEGY,
499
- name, configs, None, instruments,
500
- exchanges, capital, basic_currency, commissions,
501
- signal_timeframe, accurate_stop_orders_execution
512
+ if run_separate_instruments:
513
+ # Create separate setups for each instrument
514
+ for instrument in instruments:
515
+ r.append(
516
+ SimulationSetup(
517
+ SetupTypes.STRATEGY,
518
+ f"{name}/{instrument.symbol}", configs, None, [instrument],
519
+ exchanges, capital, basic_currency, commissions,
520
+ signal_timeframe, accurate_stop_orders_execution
521
+ )
522
+ )
523
+ else:
524
+ r.append(
525
+ SimulationSetup(
526
+ SetupTypes.STRATEGY,
527
+ name, configs, None, instruments,
528
+ exchanges, capital, basic_currency, commissions,
529
+ signal_timeframe, accurate_stop_orders_execution
530
+ )
502
531
  )
503
- )
504
532
 
505
533
  elif _sniffer._is_signal(configs):
506
534
  # - check structure of signals
507
535
  c1 = _sniffer._check_signals_structure(instruments, configs) # type: ignore
508
- r.append(
509
- SimulationSetup(
510
- SetupTypes.SIGNAL,
511
- name, c1, None, _sniffer._pick_instruments(instruments, c1),
512
- exchanges, capital, basic_currency, commissions,
513
- signal_timeframe, accurate_stop_orders_execution
536
+ setup_instruments = _sniffer._pick_instruments(instruments, c1)
537
+
538
+ if run_separate_instruments:
539
+ # Create separate setups for each instrument
540
+ for instrument in setup_instruments:
541
+ r.append(
542
+ SimulationSetup(
543
+ SetupTypes.SIGNAL,
544
+ f"{name}/{instrument.symbol}", c1, None, [instrument],
545
+ exchanges, capital, basic_currency, commissions,
546
+ signal_timeframe, accurate_stop_orders_execution
547
+ )
548
+ )
549
+ else:
550
+ r.append(
551
+ SimulationSetup(
552
+ SetupTypes.SIGNAL,
553
+ name, c1, None, setup_instruments,
554
+ exchanges, capital, basic_currency, commissions,
555
+ signal_timeframe, accurate_stop_orders_execution
556
+ )
514
557
  )
515
- )
516
558
 
517
559
  # fmt: on
518
560
  return r
@@ -10,10 +10,8 @@ from qubx.core.basics import (
10
10
  Position,
11
11
  TargetPosition,
12
12
  )
13
-
14
13
  from qubx.core.series import time_as_nsec
15
14
  from qubx.core.utils import recognize_timeframe
16
-
17
15
  from qubx.utils.misc import Stopwatch
18
16
  from qubx.utils.time import convert_tf_str_td64, floor_t64
19
17
 
@@ -21,14 +19,14 @@ _SW = Stopwatch()
21
19
 
22
20
 
23
21
  class LogsWriter:
24
- account_id: str
25
- strategy_id: str
26
- run_id: str
27
-
28
22
  """
29
23
  Log writer interface with default implementation
30
24
  """
31
25
 
26
+ account_id: str
27
+ strategy_id: str
28
+ run_id: str
29
+
32
30
  def __init__(self, account_id: str, strategy_id: str, run_id: str) -> None:
33
31
  self.account_id = account_id
34
32
  self.strategy_id = strategy_id
@@ -39,7 +39,8 @@ from qubx.core.series import Bar, OrderBook, Quote, Trade
39
39
 
40
40
 
41
41
  class ProcessingManager(IProcessingManager):
42
- MAX_NUMBER_OF_STRATEGY_FAILURES = 10
42
+ MAX_NUMBER_OF_STRATEGY_FAILURES: int = 10
43
+ DATA_READY_TIMEOUT_SECONDS: int = 60
43
44
 
44
45
  _context: IStrategyContext
45
46
  _strategy: IStrategy
@@ -67,6 +68,7 @@ class ProcessingManager(IProcessingManager):
67
68
  _trig_bar_freq_nsec: int | None = None
68
69
  _cur_sim_step: int | None = None
69
70
  _updated_instruments: set[Instrument] = set()
71
+ _data_ready_start_time: dt_64 | None = None
70
72
 
71
73
  def __init__(
72
74
  self,
@@ -111,6 +113,7 @@ class ProcessingManager(IProcessingManager):
111
113
  self._strategy_name = strategy.__class__.__name__
112
114
  self._trig_bar_freq_nsec = None
113
115
  self._updated_instruments = set()
116
+ self._data_ready_start_time = None
114
117
 
115
118
  def set_fit_schedule(self, schedule: str) -> None:
116
119
  rule = process_schedule_spec(schedule)
@@ -344,9 +347,56 @@ class ProcessingManager(IProcessingManager):
344
347
 
345
348
  def _is_data_ready(self) -> bool:
346
349
  """
347
- Check if at least one update was received for all instruments in the context.
350
+ Check if strategy can start based on data availability with timeout logic.
351
+
352
+ Two-phase approach:
353
+ - Phase 1 (0-DATA_READY_TIMEOUT_SECONDS): Wait for ALL instruments to have data
354
+ - Phase 2 (after timeout): Wait for at least 1 instrument to have data
355
+
356
+ Returns:
357
+ bool: True if strategy can start, False if still waiting
348
358
  """
349
- return all(instrument in self._updated_instruments for instrument in self._context.instruments)
359
+ total_instruments = len(self._context.instruments)
360
+
361
+ # Handle edge case: no instruments
362
+ if total_instruments == 0:
363
+ return True
364
+
365
+ ready_instruments = len(self._updated_instruments)
366
+
367
+ # Record start time on first call
368
+ if self._data_ready_start_time is None:
369
+ self._data_ready_start_time = self._time_provider.time()
370
+
371
+ # Phase 1: Try to get all instruments ready within timeout
372
+ elapsed_time_seconds = (self._time_provider.time() - self._data_ready_start_time) / 1e9
373
+
374
+ if elapsed_time_seconds <= self.DATA_READY_TIMEOUT_SECONDS:
375
+ # Within timeout period - wait for ALL instruments
376
+ if ready_instruments == total_instruments:
377
+ logger.info(f"All {total_instruments} instruments have data - strategy ready to start")
378
+ return True
379
+ else:
380
+ # Log periodic status during Phase 1
381
+ if int(elapsed_time_seconds) % 10 == 0 and elapsed_time_seconds > 0: # Log every 10 seconds
382
+ missing_instruments = set(self._context.instruments) - self._updated_instruments
383
+ missing_symbols = [inst.symbol for inst in missing_instruments]
384
+ logger.info(
385
+ f"Phase 1: Waiting for all instruments ({ready_instruments}/{total_instruments} ready). "
386
+ f"Missing: {missing_symbols}. Timeout in {self.DATA_READY_TIMEOUT_SECONDS - elapsed_time_seconds}s"
387
+ )
388
+ return False
389
+ else:
390
+ # Phase 2: After timeout - need at least 1 instrument
391
+ if ready_instruments >= 1:
392
+ missing_instruments = set(self._context.instruments) - self._updated_instruments
393
+ missing_symbols = [inst.symbol for inst in missing_instruments]
394
+ logger.info(
395
+ f"Starting strategy with {ready_instruments}/{total_instruments} instruments ready. Missing: {missing_symbols}"
396
+ )
397
+ return True
398
+ else:
399
+ return False
350
400
 
351
401
  def __update_base_data(
352
402
  self, instrument: Instrument, event_type: str, data: Timestamped, is_historical: bool = False
@@ -114,11 +114,12 @@ class UniverseManager(IUniverseManager):
114
114
  self._removal_queue.pop(instr)
115
115
 
116
116
  def add_instruments(self, instruments: list[Instrument]):
117
- self.__do_add_instruments(instruments)
117
+ to_add = list(set([instr for instr in instruments if instr not in self._instruments]))
118
+ self.__do_add_instruments(to_add)
118
119
  self.__cleanup_removal_queue(instruments)
119
- self._strategy.on_universe_change(self._context, instruments, [])
120
+ self._strategy.on_universe_change(self._context, to_add, [])
120
121
  self._subscription_manager.commit()
121
- self._instruments.update(instruments)
122
+ self._instruments.update(to_add)
122
123
 
123
124
  def remove_instruments(
124
125
  self,
@@ -71,12 +71,15 @@ class IteratedDataStreamsSlicer(Iterator[SlicerOutData]):
71
71
  return self
72
72
 
73
73
  def _build_initial_iteration_seq(self):
74
- _init_seq = {k: self._time_func(self._buffers[k][-1]) for k in self._keys}
74
+ _init_seq = {k: self._time_func(self._buffers[k][-1]) for k in self._keys if self._buffers[k]}
75
75
  _init_seq = dict(sorted(_init_seq.items(), key=lambda item: item[1]))
76
76
  self._keys = deque(_init_seq.keys())
77
77
 
78
78
  def _load_next_chunk_to_buffer(self, index: str) -> list[Timestamped]:
79
- return list(reversed(next(self._iterators[index])))
79
+ try:
80
+ return list(reversed(next(self._iterators[index])))
81
+ except StopIteration:
82
+ return []
80
83
 
81
84
  def _remove_iterator(self, key: str):
82
85
  self._buffers.pop(key)
@@ -95,6 +98,9 @@ class IteratedDataStreamsSlicer(Iterator[SlicerOutData]):
95
98
  Returns:
96
99
  Timestamped: The most recent timestamped data element from the buffer.
97
100
  """
101
+ if not self._buffers[k]:
102
+ raise StopIteration
103
+
98
104
  v = (data := self._buffers[k]).pop()
99
105
  if not data:
100
106
  try:
@@ -154,6 +160,9 @@ class IteratedDataStreamsSlicer(Iterator[SlicerOutData]):
154
160
  _min_t = math.inf
155
161
  _min_k = self._keys[0]
156
162
  for i in self._keys:
163
+ if not self._buffers[i]:
164
+ continue
165
+
157
166
  _x = self._buffers[i][-1]
158
167
  if self._time_func(_x) < _min_t:
159
168
  _min_t = self._time_func(_x)
@@ -1469,6 +1469,11 @@ class QuestDBConnector(DataReader):
1469
1469
  # Use efficient chunking with multiple smaller queries
1470
1470
  def _iter_efficient_chunks():
1471
1471
  time_windows = _calculate_time_windows_for_chunking(start, end, effective_timeframe, chunksize)
1472
+ if self._connection is None:
1473
+ self._connect()
1474
+ if self._connection is None:
1475
+ raise ConnectionError("Failed to connect to QuestDB")
1476
+
1472
1477
  _cursor = self._connection.cursor() # type: ignore
1473
1478
 
1474
1479
  try:
@@ -207,6 +207,6 @@ class BaseMetricEmitter(IMetricEmitter):
207
207
  elapsed = current_time - self._last_emission_time
208
208
 
209
209
  if elapsed >= self._stats_interval:
210
- logger.debug(f"[{self.__class__.__name__}] Emitting metrics at {current_time}")
210
+ # logger.debug(f"[{self.__class__.__name__}] Emitting metrics at {current_time}")
211
211
  self.emit_strategy_stats(context)
212
212
  self._last_emission_time = current_time
@@ -6,7 +6,8 @@ from multiprocessing.pool import ThreadPool
6
6
 
7
7
  from qubx import logger
8
8
  from qubx.core.loggers import LogsWriter
9
- from qubx.utils.misc import makedirs
9
+ from qubx.utils.misc import makedirs
10
+
10
11
 
11
12
  class CsvFileLogsWriter(LogsWriter):
12
13
  """
@@ -97,4 +98,3 @@ class CsvFileLogsWriter(LogsWriter):
97
98
  self._sig_file_.close()
98
99
  self.pool.close()
99
100
  self.pool.join()
100
-
@@ -1,19 +1,19 @@
1
1
  import inspect
2
-
3
2
  from typing import Type
4
3
 
5
4
  from qubx.core.loggers import LogsWriter
6
5
  from qubx.loggers.csv import CsvFileLogsWriter
7
- from qubx.loggers.mongo import MongoDBLogsWriter
8
6
  from qubx.loggers.inmemory import InMemoryLogsWriter
7
+ from qubx.loggers.mongo import MongoDBLogsWriter
9
8
 
10
9
  # Registry of logs writer types
11
10
  LOGS_WRITER_REGISTRY: dict[str, Type[LogsWriter]] = {
12
11
  "CsvFileLogsWriter": CsvFileLogsWriter,
13
12
  "MongoDBLogsWriter": MongoDBLogsWriter,
14
- "InMemoryLogsWriter": InMemoryLogsWriter
13
+ "InMemoryLogsWriter": InMemoryLogsWriter,
15
14
  }
16
15
 
16
+
17
17
  def create_logs_writer(log_writer_type: str, parameters: dict | None = None) -> LogsWriter:
18
18
  """
19
19
  Create a logs writer based on configuration.
@@ -30,8 +30,7 @@ def create_logs_writer(log_writer_type: str, parameters: dict | None = None) ->
30
30
  """
31
31
  if log_writer_type not in LOGS_WRITER_REGISTRY:
32
32
  raise ValueError(
33
- f"Unknown logs writer type: {log_writer_type}. "
34
- f"Available types: {', '.join(LOGS_WRITER_REGISTRY.keys())}"
33
+ f"Unknown logs writer type: {log_writer_type}. Available types: {', '.join(LOGS_WRITER_REGISTRY.keys())}"
35
34
  )
36
35
 
37
36
  logs_writer_class = LOGS_WRITER_REGISTRY[log_writer_type]
@@ -52,4 +51,4 @@ def register_logs_writer(log_writer_type: str, logs_witer_class: Type[LogsWriter
52
51
  log_writer_type: The name of the logs writer type.
53
52
  logs_witer_class: The logs writer class to register.
54
53
  """
55
- LOGS_WRITER_REGISTRY[log_writer_type] = logs_witer_class
54
+ LOGS_WRITER_REGISTRY[log_writer_type] = logs_witer_class