Qubx 0.6.14__cp312-cp312-manylinux_2_39_x86_64.whl → 0.6.17__cp312-cp312-manylinux_2_39_x86_64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of Qubx might be problematic. Click here for more details.

Files changed (39) hide show
  1. qubx/backtester/broker.py +13 -0
  2. qubx/backtester/runner.py +3 -0
  3. qubx/backtester/simulator.py +25 -1
  4. qubx/cli/commands.py +9 -17
  5. qubx/cli/release.py +24 -33
  6. qubx/connectors/ccxt/account.py +64 -16
  7. qubx/connectors/ccxt/broker.py +296 -47
  8. qubx/connectors/ccxt/data.py +9 -6
  9. qubx/connectors/ccxt/exchanges/__init__.py +37 -0
  10. qubx/connectors/ccxt/exchanges/binance/broker.py +56 -0
  11. qubx/connectors/ccxt/exchanges/binance/exchange.py +500 -0
  12. qubx/connectors/ccxt/factory.py +20 -18
  13. qubx/connectors/ccxt/utils.py +2 -2
  14. qubx/core/account.py +10 -2
  15. qubx/core/context.py +12 -0
  16. qubx/core/errors.py +32 -0
  17. qubx/core/exceptions.py +4 -0
  18. qubx/core/interfaces.py +89 -5
  19. qubx/core/mixins/processing.py +5 -1
  20. qubx/core/mixins/trading.py +65 -15
  21. qubx/core/mixins/universe.py +15 -0
  22. qubx/core/series.cpython-312-x86_64-linux-gnu.so +0 -0
  23. qubx/core/utils.cpython-312-x86_64-linux-gnu.so +0 -0
  24. qubx/emitters/__init__.py +2 -1
  25. qubx/emitters/base.py +2 -0
  26. qubx/emitters/csv.py +83 -0
  27. qubx/emitters/questdb.py +44 -10
  28. qubx/exporters/redis_streams.py +10 -10
  29. qubx/resources/_build.py +2 -2
  30. qubx/restarts/state_resolvers.py +16 -0
  31. qubx/ta/indicators.cpython-312-x86_64-linux-gnu.so +0 -0
  32. qubx/utils/runner/configs.py +4 -1
  33. qubx/utils/runner/factory.py +305 -0
  34. qubx/utils/runner/runner.py +43 -320
  35. {qubx-0.6.14.dist-info → qubx-0.6.17.dist-info}/METADATA +1 -1
  36. {qubx-0.6.14.dist-info → qubx-0.6.17.dist-info}/RECORD +38 -33
  37. qubx/connectors/ccxt/customizations.py +0 -193
  38. {qubx-0.6.14.dist-info → qubx-0.6.17.dist-info}/WHEEL +0 -0
  39. {qubx-0.6.14.dist-info → qubx-0.6.17.dist-info}/entry_points.txt +0 -0
qubx/backtester/broker.py CHANGED
@@ -57,6 +57,19 @@ class SimulatedBroker(IBroker):
57
57
  self._send_exec_report(instrument, report)
58
58
  return report.order
59
59
 
60
+ def send_order_async(
61
+ self,
62
+ instrument: Instrument,
63
+ order_side: str,
64
+ order_type: str,
65
+ amount: float,
66
+ price: float | None = None,
67
+ client_id: str | None = None,
68
+ time_in_force: str = "gtc",
69
+ **optional,
70
+ ) -> None:
71
+ self.send_order(instrument, order_side, order_type, amount, price, client_id, time_in_force, **optional)
72
+
60
73
  def cancel_order(self, order_id: str) -> Order | None:
61
74
  instrument = self._account.order_to_instrument.get(order_id)
62
75
  if instrument is None:
qubx/backtester/runner.py CHANGED
@@ -248,6 +248,9 @@ class SimulationRunner:
248
248
  initializer=self.initializer,
249
249
  )
250
250
 
251
+ if self.emitter is not None:
252
+ self.emitter.set_time_provider(simulated_clock)
253
+
251
254
  # - setup base subscription from spec
252
255
  if ctx.get_base_subscription() == DataType.NONE:
253
256
  logger.debug(
@@ -8,6 +8,8 @@ from qubx.core.exceptions import SimulationError
8
8
  from qubx.core.metrics import TradingSessionResult
9
9
  from qubx.data.readers import DataReader
10
10
  from qubx.utils.misc import ProgressParallel, Stopwatch, get_current_user
11
+ from qubx.utils.runner.configs import EmissionConfig
12
+ from qubx.utils.runner.factory import create_metric_emitters
11
13
  from qubx.utils.time import handle_start_stop
12
14
 
13
15
  from .runner import SimulationRunner
@@ -45,6 +47,7 @@ def simulate(
45
47
  show_latency_report: bool = False,
46
48
  portfolio_log_freq: str = "5Min",
47
49
  parallel_backend: Literal["loky", "multiprocessing"] = "multiprocessing",
50
+ emission: EmissionConfig | None = None,
48
51
  ) -> list[TradingSessionResult]:
49
52
  """
50
53
  Backtest utility for trading strategies or signals using historical data.
@@ -67,6 +70,9 @@ def simulate(
67
70
  - open_close_time_indent_secs (int): Time indent in seconds for open/close times, default is 1.
68
71
  - debug (Literal["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"] | None): Logging level for debugging.
69
72
  - show_latency_report: If True, shows simulator's latency report.
73
+ - portfolio_log_freq (str): Frequency for portfolio logging, default is "5Min".
74
+ - parallel_backend (Literal["loky", "multiprocessing"]): Backend for parallel processing, default is "multiprocessing".
75
+ - emission (EmissionConfig | None): Configuration for metric emitters, default is None.
70
76
 
71
77
  Returns:
72
78
  - list[TradingSessionResult]: A list of TradingSessionResult objects containing the results of each simulation setup.
@@ -139,6 +145,7 @@ def simulate(
139
145
  show_latency_report=show_latency_report,
140
146
  portfolio_log_freq=portfolio_log_freq,
141
147
  parallel_backend=parallel_backend,
148
+ emission=emission,
142
149
  )
143
150
 
144
151
 
@@ -152,6 +159,7 @@ def _run_setups(
152
159
  show_latency_report: bool = False,
153
160
  portfolio_log_freq: str = "5Min",
154
161
  parallel_backend: Literal["loky", "multiprocessing"] = "multiprocessing",
162
+ emission: EmissionConfig | None = None,
155
163
  ) -> list[TradingSessionResult]:
156
164
  # loggers don't work well with joblib and multiprocessing in general because they contain
157
165
  # open file handlers that cannot be pickled. I found a solution which requires the usage of enqueue=True
@@ -165,7 +173,16 @@ def _run_setups(
165
173
  n_jobs=n_jobs, total=len(strategies_setups), silent=_main_loop_silent, backend=parallel_backend
166
174
  )(
167
175
  delayed(_run_setup)(
168
- id, f"Simulated-{id}", setup, data_setup, start, stop, silent, show_latency_report, portfolio_log_freq
176
+ id,
177
+ f"Simulated-{id}",
178
+ setup,
179
+ data_setup,
180
+ start,
181
+ stop,
182
+ silent,
183
+ show_latency_report,
184
+ portfolio_log_freq,
185
+ emission,
169
186
  )
170
187
  for id, setup in enumerate(strategies_setups)
171
188
  )
@@ -182,7 +199,13 @@ def _run_setup(
182
199
  silent: bool,
183
200
  show_latency_report: bool,
184
201
  portfolio_log_freq: str,
202
+ emission: EmissionConfig | None = None,
185
203
  ) -> TradingSessionResult:
204
+ # Create metric emitter if configured
205
+ emitter = None
206
+ if emission is not None:
207
+ emitter = create_metric_emitters(emission, setup.name)
208
+
186
209
  runner = SimulationRunner(
187
210
  setup=setup,
188
211
  data_config=data_setup,
@@ -190,6 +213,7 @@ def _run_setup(
190
213
  stop=stop,
191
214
  account_id=account_id,
192
215
  portfolio_log_freq=portfolio_log_freq,
216
+ emitter=emitter,
193
217
  )
194
218
 
195
219
  # - we want to see simulate time in log messages
qubx/cli/commands.py CHANGED
@@ -140,16 +140,16 @@ def ls(directory: str):
140
140
  callback=lambda ctx, param, value: os.path.abspath(os.path.expanduser(value)),
141
141
  )
142
142
  @click.option(
143
- "--strategy",
144
- "-s",
145
- type=click.STRING,
146
- help="Strategy name to release (should match the strategy class name) or path to a config YAML file",
143
+ "--config",
144
+ "-c",
145
+ type=click.Path(exists=True, resolve_path=True),
146
+ help="Path to a config YAML file",
147
147
  required=True,
148
148
  )
149
149
  @click.option(
150
150
  "--output-dir",
151
151
  "-o",
152
- type=click.STRING,
152
+ type=click.Path(exists=False),
153
153
  help="Output directory to put zip file.",
154
154
  default=".releases",
155
155
  show_default=True,
@@ -172,7 +172,6 @@ def ls(directory: str):
172
172
  )
173
173
  @click.option(
174
174
  "--commit",
175
- "-c",
176
175
  is_flag=True,
177
176
  default=False,
178
177
  help="Commit changes and create tag in repo (default: False)",
@@ -180,7 +179,7 @@ def ls(directory: str):
180
179
  )
181
180
  def release(
182
181
  directory: str,
183
- strategy: str,
182
+ config: str,
184
183
  tag: str | None,
185
184
  message: str | None,
186
185
  commit: bool,
@@ -189,16 +188,9 @@ def release(
189
188
  """
190
189
  Releases the strategy to a zip file.
191
190
 
192
- The strategy can be specified in two ways:
193
- 1. As a strategy name (class name) - strategies are scanned in the given directory (NOT SUPPORTED ANYMORE !)
194
- 2. As a path to a config YAML file containing the strategy configuration in StrategyConfig format
195
-
196
- If a strategy name is provided, a default configuration will be generated with:
197
- - The strategy parameters from the strategy class
198
- - Default exchange, connector, and instruments from the command options
199
- - Standard logging configuration
191
+ The strategy is specified by a path to a config YAML file containing the strategy configuration in StrategyConfig format.
200
192
 
201
- If a config file is provided, it must follow the StrategyConfig structure with:
193
+ The config file must follow the StrategyConfig structure with:
202
194
  - strategy: The strategy name or path
203
195
  - parameters: Dictionary of strategy parameters
204
196
  - exchanges: Dictionary of exchange configurations
@@ -211,7 +203,7 @@ def release(
211
203
 
212
204
  release_strategy(
213
205
  directory=directory,
214
- strategy_name=strategy,
206
+ config_file=config,
215
207
  tag=tag,
216
208
  message=message,
217
209
  commit=commit,
qubx/cli/release.py CHANGED
@@ -16,7 +16,6 @@ from qubx import logger
16
16
  from qubx.utils.misc import (
17
17
  cyan,
18
18
  generate_name,
19
- get_local_qubx_folder,
20
19
  green,
21
20
  load_qubx_resources_as_text,
22
21
  magenta,
@@ -265,7 +264,7 @@ def load_strategy_from_config(config_path: Path, directory: str) -> StrategyInfo
265
264
 
266
265
  def release_strategy(
267
266
  directory: str,
268
- strategy_name: str,
267
+ config_file: str,
269
268
  tag: str | None,
270
269
  message: str | None,
271
270
  commit: bool,
@@ -276,7 +275,7 @@ def release_strategy(
276
275
 
277
276
  Args:
278
277
  directory: str - directory to scan for strategies
279
- strategy_name: str - strategy name to release or path to config file
278
+ config_file: str - path to config file
280
279
  tag: str - additional tag for this release
281
280
  message: str - release message
282
281
  commit: bool - commit changes and create tag in repo
@@ -288,29 +287,12 @@ def release_strategy(
288
287
 
289
288
  try:
290
289
  # - determine if strategy_name is a config file or a strategy name
291
- if is_config_file(strategy_name):
292
- # - load strategy from config file
293
- logger.info(f"Loading strategy from config file: {strategy_name}")
294
- stg_info = load_strategy_from_config(Path(strategy_name), directory)
295
- else:
296
- # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
297
- # TODO: generate default config from strategy class ? Do we really need it at all ?
298
- # - find strategy by name
299
- # logger.info(f"Looking for '{strategy_name}' strategy")
300
-
301
- # strat_name = "_".join([x.split(".")[-1] for x in strategy_class_names])
302
- # stg_info = StrategyInfo(name=strategy_name, classes=[find_class_by_name(directory, strategy_name)])
303
-
304
- # stg_info = find_class_by_name(directory, strategy_name)
290
+ if not is_config_file(config_file):
291
+ raise ValueError("Try using yaml config file path")
305
292
 
306
- # - generate default config
307
- # strategy_config = generate_default_config(
308
- # stg_info, default_exchange, default_connector, default_instruments
309
- # )
310
- # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
311
- raise ValueError(
312
- "!!! Release of strategy by name is not supported anymore ! Try to use config file instead !!!"
313
- )
293
+ # - load strategy from config file
294
+ logger.info(f"Loading strategy from config file: {config_file}")
295
+ stg_info = load_strategy_from_config(Path(config_file), directory)
314
296
 
315
297
  # - process git repo and pyproject.toml for each strategy component
316
298
  repos_paths = set()
@@ -413,9 +395,8 @@ def _save_strategy_config(stg_name: str, strategy_config: StrategyConfig, releas
413
395
 
414
396
  def _copy_strategy_file(strategy_path: str, pyproject_root: str, release_dir: str) -> None:
415
397
  """Copy the strategy file to the release directory."""
416
- src_dir = os.path.basename(pyproject_root)
417
398
  rel_path = os.path.relpath(strategy_path, pyproject_root)
418
- dest_file_path = os.path.join(release_dir, src_dir, rel_path)
399
+ dest_file_path = os.path.join(release_dir, rel_path)
419
400
 
420
401
  # Ensure the destination directory exists
421
402
  os.makedirs(os.path.dirname(dest_file_path), exist_ok=True)
@@ -428,11 +409,9 @@ def _copy_strategy_file(strategy_path: str, pyproject_root: str, release_dir: st
428
409
  def _try_copy_file(src_file: str, dest_dir: str, pyproject_root: str) -> None:
429
410
  """Try to copy the file to the release directory."""
430
411
  if os.path.exists(src_file):
431
- _src_dir = os.path.basename(pyproject_root)
432
-
433
412
  # Get the relative path from pyproject_root
434
413
  _rel_import_path = os.path.relpath(src_file, pyproject_root)
435
- _dest_import_path = os.path.join(dest_dir, _src_dir, _rel_import_path)
414
+ _dest_import_path = os.path.join(dest_dir, _rel_import_path)
436
415
 
437
416
  # Ensure the destination directory exists
438
417
  os.makedirs(os.path.dirname(_dest_import_path), exist_ok=True)
@@ -446,10 +425,22 @@ def _copy_dependencies(strategy_path: str, pyproject_root: str, release_dir: str
446
425
  """Copy all dependencies required by the strategy."""
447
426
  _src_dir = os.path.basename(pyproject_root)
448
427
  _imports = _get_imports(strategy_path, pyproject_root, [_src_dir])
428
+ # find inside of the pyproject_root a folder with the same name as the _src_dir
429
+ # for instance it could be like macd_crossover/src/macd_crossover
430
+ # or macd_crossover/macd_crossover
431
+ # and assign this folder to _src_root
432
+ _src_root = None
433
+ for root, dirs, files in os.walk(pyproject_root):
434
+ if _src_dir in dirs:
435
+ _src_root = os.path.join(root, _src_dir)
436
+ break
437
+
438
+ if _src_root is None:
439
+ raise ValueError(f"Could not find the source root for {_src_dir} in {pyproject_root}")
449
440
 
450
441
  for _imp in _imports:
451
442
  # Construct source path
452
- _base = os.path.join(pyproject_root, *[s for s in _imp.module if s != _src_dir])
443
+ _base = os.path.join(_src_root, *[s for s in _imp.module if s != _src_dir])
453
444
 
454
445
  # - try to copy all available files for satisfying the import
455
446
  if os.path.isdir(_base):
@@ -521,7 +512,7 @@ def _modify_pyproject_toml(pyproject_path: str, package_name: str) -> None:
521
512
  deps[d] = f">={version(d)}"
522
513
 
523
514
  # Replace the packages section with the new one
524
- pyproject_data["tool"]["poetry"]["packages"] = [{"include": package_name}]
515
+ # pyproject_data["tool"]["poetry"]["packages"] = [{"include": package_name}]
525
516
 
526
517
  # Check if build section exists
527
518
  if "build" not in pyproject_data["tool"]["poetry"]:
@@ -632,7 +623,7 @@ def _handle_project_files(pyproject_root: str, release_dir: str) -> None:
632
623
  # Copy build.py if it exists
633
624
  build_src = os.path.join(pyproject_root, "build.py")
634
625
  if not os.path.exists(build_src):
635
- logger.warning(f"build.py not found in {pyproject_root} using default one")
626
+ logger.info(f"build.py not found in {pyproject_root} using default one")
636
627
  build_src = load_qubx_resources_as_text("_build.py")
637
628
 
638
629
  # - setup project's name in default build.py
@@ -77,6 +77,8 @@ class CcxtAccountProcessor(BasicAccountProcessor):
77
77
  balance_interval: str = "30Sec",
78
78
  position_interval: str = "30Sec",
79
79
  subscription_interval: str = "10Sec",
80
+ open_order_interval: str = "1Min",
81
+ open_order_backoff: str = "1Min",
80
82
  max_position_restore_days: int = 30,
81
83
  max_retries: int = 10,
82
84
  ):
@@ -93,6 +95,8 @@ class CcxtAccountProcessor(BasicAccountProcessor):
93
95
  self.balance_interval = balance_interval
94
96
  self.position_interval = position_interval
95
97
  self.subscription_interval = subscription_interval
98
+ self.open_order_interval = open_order_interval
99
+ self.open_order_backoff = open_order_backoff
96
100
  self.max_position_restore_days = max_position_restore_days
97
101
  self._loop = AsyncThreadLoop(exchange.asyncio_loop)
98
102
  self._is_running = False
@@ -140,11 +144,17 @@ class CcxtAccountProcessor(BasicAccountProcessor):
140
144
  logger.info("Account polling tasks have been initialized")
141
145
 
142
146
  # - start subscription polling task
143
- self._polling_tasks["subscription"] = self._loop.submit(
144
- self._poller("subscription", self._update_subscriptions, self.subscription_interval)
145
- )
147
+ # self._polling_tasks["subscription"] = self._loop.submit(
148
+ # self._poller("subscription", self._update_subscriptions, self.subscription_interval)
149
+ # )
146
150
  # - subscribe to order executions
147
151
  self._polling_tasks["executions"] = self._loop.submit(self._subscribe_executions("executions", channel))
152
+ # - sync open orders
153
+ self._polling_tasks["open_orders"] = self._loop.submit(
154
+ self._poller(
155
+ "open_orders", self._sync_open_orders, self.open_order_interval, backoff=self.open_order_backoff
156
+ )
157
+ )
148
158
 
149
159
  def stop(self):
150
160
  """Stop all polling tasks"""
@@ -188,10 +198,15 @@ class CcxtAccountProcessor(BasicAccountProcessor):
188
198
  name: str,
189
199
  coroutine: Callable[[], Awaitable],
190
200
  interval: str,
201
+ backoff: str | None = None,
191
202
  ):
192
203
  sleep_time = pd.Timedelta(interval).total_seconds()
193
204
  retries = 0
194
205
 
206
+ if backoff is not None:
207
+ sleep_time = pd.Timedelta(backoff).total_seconds()
208
+ await asyncio.sleep(sleep_time)
209
+
195
210
  while self.channel.control.is_set():
196
211
  try:
197
212
  await coroutine()
@@ -276,7 +291,7 @@ class CcxtAccountProcessor(BasicAccountProcessor):
276
291
  async def _update_positions(self) -> None:
277
292
  # fetch and update positions from exchange
278
293
  ccxt_positions = await self.exchange.fetch_positions()
279
- positions = ccxt_convert_positions(ccxt_positions, self.exchange.name, self.exchange.markets)
294
+ positions = ccxt_convert_positions(ccxt_positions, self.exchange.name, self.exchange.markets) # type: ignore
280
295
  # update required instruments that we need to subscribe to
281
296
  self._required_instruments.update([p.instrument for p in positions])
282
297
  # update positions
@@ -388,7 +403,10 @@ class CcxtAccountProcessor(BasicAccountProcessor):
388
403
  async def _init_open_orders(self) -> None:
389
404
  # wait for balances and positions to be initialized
390
405
  await self._wait(lambda: all([self._polling_to_init[task] for task in ["balance", "position"]]))
391
- logger.debug("Fetching open orders ...")
406
+ await self._sync_open_orders(initial_call=True)
407
+
408
+ async def _sync_open_orders(self, initial_call: bool = False) -> None:
409
+ logger.debug("[SYNC] Fetching open orders ...")
392
410
 
393
411
  # in order to minimize order requests we only fetch open orders for instruments that we have positions in
394
412
  _nonzero_balances = {
@@ -405,20 +423,50 @@ class CcxtAccountProcessor(BasicAccountProcessor):
405
423
  _orders = await self._fetch_orders(instrument, is_open=True)
406
424
  _open_orders.update(_orders)
407
425
  except Exception as e:
408
- logger.warning(f"Error fetching open orders for {instrument}: {e}")
426
+ logger.warning(f"[SYNC] Error fetching open orders for {instrument}: {e}")
409
427
 
410
428
  await asyncio.gather(*[_add_open_orders(i) for i in _instruments])
411
429
 
412
- self.add_active_orders(_open_orders)
413
-
414
- logger.debug(f"Found {len(_open_orders)} open orders ->")
415
- _instr_to_open_orders: dict[Instrument, list[Order]] = defaultdict(list)
416
- for od in _open_orders.values():
417
- _instr_to_open_orders[od.instrument].append(od)
418
- for instr, orders in _instr_to_open_orders.items():
419
- logger.debug(f" :: {instr} ->")
420
- for order in orders:
421
- logger.debug(f" :: {order.side} {order.quantity} @ {order.price} ({order.status})")
430
+ if initial_call:
431
+ # - when it's the initial call, we add the open orders to the account
432
+ self.add_active_orders(_open_orders)
433
+ logger.debug(f"[SYNC] Found {len(_open_orders)} open orders ->")
434
+ _instr_to_open_orders: dict[Instrument, list[Order]] = defaultdict(list)
435
+ for od in _open_orders.values():
436
+ _instr_to_open_orders[od.instrument].append(od)
437
+ for instr, orders in _instr_to_open_orders.items():
438
+ logger.debug(f" :: [SYNC] {instr} ->")
439
+ for order in orders:
440
+ logger.debug(f" :: [SYNC] {order.side} {order.quantity} @ {order.price} ({order.status})")
441
+ else:
442
+ # TODO: think if this should actually be here
443
+ # - we need to cancel the unexpected orders
444
+ await self._cancel_unexpected_orders(_open_orders)
445
+
446
+ async def _cancel_unexpected_orders(self, open_orders: dict[str, Order]) -> None:
447
+ _expected_orders = set(self._active_orders.keys())
448
+ _unexpected_orders = set(open_orders.keys()) - _expected_orders
449
+ if _unexpected_orders:
450
+ logger.info(f"[SYNC] Canceling {len(_unexpected_orders)} unexpected open orders ...")
451
+ _instr_to_orders = defaultdict(list)
452
+ for _id in _unexpected_orders:
453
+ _order = open_orders[_id]
454
+ _instr_to_orders[_order.instrument].append(_order)
455
+
456
+ async def _cancel_order(order: Order) -> None:
457
+ try:
458
+ await self.exchange.cancel_order(order.id, symbol=instrument_to_ccxt_symbol(order.instrument))
459
+ logger.debug(
460
+ f" :: [SYNC] Canceled {order.id} {order.instrument.symbol} {order.side} {order.quantity} @ {order.price} ({order.status})"
461
+ )
462
+ except Exception as e:
463
+ logger.warning(f"[SYNC] Error canceling order {order.id}: {e}")
464
+
465
+ for instr, orders in _instr_to_orders.items():
466
+ logger.debug(
467
+ f"[SYNC] Canceling {len(orders)} (out of {len(open_orders)}) unexpected open orders for {instr}"
468
+ )
469
+ await asyncio.gather(*[_cancel_order(order) for order in orders])
422
470
 
423
471
  async def _fetch_orders(
424
472
  self, instrument: Instrument, days_before: int = 30, limit: int | None = None, is_open: bool = False