Qubx 0.5.7__cp312-cp312-manylinux_2_39_x86_64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of Qubx might be problematic. Click here for more details.

Files changed (100) hide show
  1. qubx/__init__.py +207 -0
  2. qubx/_nb_magic.py +100 -0
  3. qubx/backtester/__init__.py +5 -0
  4. qubx/backtester/account.py +145 -0
  5. qubx/backtester/broker.py +87 -0
  6. qubx/backtester/data.py +296 -0
  7. qubx/backtester/management.py +378 -0
  8. qubx/backtester/ome.py +296 -0
  9. qubx/backtester/optimization.py +201 -0
  10. qubx/backtester/simulated_data.py +558 -0
  11. qubx/backtester/simulator.py +362 -0
  12. qubx/backtester/utils.py +780 -0
  13. qubx/cli/__init__.py +0 -0
  14. qubx/cli/commands.py +67 -0
  15. qubx/connectors/ccxt/__init__.py +0 -0
  16. qubx/connectors/ccxt/account.py +495 -0
  17. qubx/connectors/ccxt/broker.py +132 -0
  18. qubx/connectors/ccxt/customizations.py +193 -0
  19. qubx/connectors/ccxt/data.py +612 -0
  20. qubx/connectors/ccxt/exceptions.py +17 -0
  21. qubx/connectors/ccxt/factory.py +93 -0
  22. qubx/connectors/ccxt/utils.py +307 -0
  23. qubx/core/__init__.py +0 -0
  24. qubx/core/account.py +251 -0
  25. qubx/core/basics.py +850 -0
  26. qubx/core/context.py +420 -0
  27. qubx/core/exceptions.py +38 -0
  28. qubx/core/helpers.py +480 -0
  29. qubx/core/interfaces.py +1150 -0
  30. qubx/core/loggers.py +514 -0
  31. qubx/core/lookups.py +475 -0
  32. qubx/core/metrics.py +1512 -0
  33. qubx/core/mixins/__init__.py +13 -0
  34. qubx/core/mixins/market.py +94 -0
  35. qubx/core/mixins/processing.py +428 -0
  36. qubx/core/mixins/subscription.py +203 -0
  37. qubx/core/mixins/trading.py +88 -0
  38. qubx/core/mixins/universe.py +270 -0
  39. qubx/core/series.cpython-312-x86_64-linux-gnu.so +0 -0
  40. qubx/core/series.pxd +125 -0
  41. qubx/core/series.pyi +118 -0
  42. qubx/core/series.pyx +988 -0
  43. qubx/core/utils.cpython-312-x86_64-linux-gnu.so +0 -0
  44. qubx/core/utils.pyi +6 -0
  45. qubx/core/utils.pyx +62 -0
  46. qubx/data/__init__.py +25 -0
  47. qubx/data/helpers.py +416 -0
  48. qubx/data/readers.py +1562 -0
  49. qubx/data/tardis.py +100 -0
  50. qubx/gathering/simplest.py +88 -0
  51. qubx/math/__init__.py +3 -0
  52. qubx/math/stats.py +129 -0
  53. qubx/pandaz/__init__.py +23 -0
  54. qubx/pandaz/ta.py +2757 -0
  55. qubx/pandaz/utils.py +638 -0
  56. qubx/resources/instruments/symbols-binance.cm.json +1 -0
  57. qubx/resources/instruments/symbols-binance.json +1 -0
  58. qubx/resources/instruments/symbols-binance.um.json +1 -0
  59. qubx/resources/instruments/symbols-bitfinex.f.json +1 -0
  60. qubx/resources/instruments/symbols-bitfinex.json +1 -0
  61. qubx/resources/instruments/symbols-kraken.f.json +1 -0
  62. qubx/resources/instruments/symbols-kraken.json +1 -0
  63. qubx/ta/__init__.py +0 -0
  64. qubx/ta/indicators.cpython-312-x86_64-linux-gnu.so +0 -0
  65. qubx/ta/indicators.pxd +149 -0
  66. qubx/ta/indicators.pyi +41 -0
  67. qubx/ta/indicators.pyx +787 -0
  68. qubx/trackers/__init__.py +3 -0
  69. qubx/trackers/abvanced.py +236 -0
  70. qubx/trackers/composite.py +146 -0
  71. qubx/trackers/rebalancers.py +129 -0
  72. qubx/trackers/riskctrl.py +641 -0
  73. qubx/trackers/sizers.py +235 -0
  74. qubx/utils/__init__.py +5 -0
  75. qubx/utils/_pyxreloader.py +281 -0
  76. qubx/utils/charting/lookinglass.py +1057 -0
  77. qubx/utils/charting/mpl_helpers.py +1183 -0
  78. qubx/utils/marketdata/binance.py +284 -0
  79. qubx/utils/marketdata/ccxt.py +90 -0
  80. qubx/utils/marketdata/dukas.py +130 -0
  81. qubx/utils/misc.py +541 -0
  82. qubx/utils/ntp.py +63 -0
  83. qubx/utils/numbers_utils.py +7 -0
  84. qubx/utils/orderbook.py +491 -0
  85. qubx/utils/plotting/__init__.py +0 -0
  86. qubx/utils/plotting/dashboard.py +150 -0
  87. qubx/utils/plotting/data.py +137 -0
  88. qubx/utils/plotting/interfaces.py +25 -0
  89. qubx/utils/plotting/renderers/__init__.py +0 -0
  90. qubx/utils/plotting/renderers/plotly.py +0 -0
  91. qubx/utils/runner/__init__.py +1 -0
  92. qubx/utils/runner/_jupyter_runner.pyt +60 -0
  93. qubx/utils/runner/accounts.py +88 -0
  94. qubx/utils/runner/configs.py +65 -0
  95. qubx/utils/runner/runner.py +470 -0
  96. qubx/utils/time.py +312 -0
  97. qubx-0.5.7.dist-info/METADATA +105 -0
  98. qubx-0.5.7.dist-info/RECORD +100 -0
  99. qubx-0.5.7.dist-info/WHEEL +4 -0
  100. qubx-0.5.7.dist-info/entry_points.txt +3 -0
@@ -0,0 +1,491 @@
1
+ import gzip
2
+ import os
3
+ import traceback
4
+ from collections import defaultdict
5
+ from datetime import datetime
6
+ from os.path import exists, join
7
+ from pathlib import Path
8
+ from typing import Any
9
+
10
+ import msgspec
11
+ import numpy as np
12
+ import pandas as pd
13
+ from numba import njit, types
14
+ from numba.typed import Dict
15
+ from tqdm.auto import tqdm
16
+
17
+ from qubx import QubxLogConfig, logger, lookup
18
+ from qubx.core.basics import Instrument
19
+ from qubx.pandaz.utils import scols, srows
20
+ from qubx.utils.numbers_utils import count_decimal_places
21
+
22
+
23
+ @njit
24
+ def prec_floor(a: float, precision: int) -> float:
25
+ return np.sign(a) * np.true_divide(np.floor(round(abs(a) * 10**precision, precision)), 10**precision)
26
+
27
+
28
+ @njit
29
+ def prec_ceil(a: float, precision: int):
30
+ return np.sign(a) * np.true_divide(np.ceil(round(abs(a) * 10**precision, precision)), 10**precision)
31
+
32
+
33
+ @njit
34
+ def get_tick(price: float, is_bid: bool, tick_size: float):
35
+ if is_bid:
36
+ return int(np.floor(round(price / tick_size, 1)))
37
+ else:
38
+ return int(np.ceil(round(price / tick_size, 1)))
39
+
40
+
41
+ @njit
42
+ def tick_to_price(tick: int, tick_size: float, decimals: int):
43
+ return round(tick * tick_size, decimals)
44
+
45
+
46
+ @njit
47
+ def get_tick_price(price: float, is_bid: bool, tick_size: float, decimals: int):
48
+ return tick_to_price(get_tick(price, is_bid, tick_size), tick_size, decimals)
49
+
50
+
51
+ @njit
52
+ def _interpolate_levels(
53
+ levels: list[tuple[float, float]],
54
+ is_bid: bool,
55
+ tick_count: int,
56
+ tick_size: float,
57
+ decimals: int,
58
+ size_decimals: int,
59
+ sizes_in_quoted: bool,
60
+ ):
61
+ # TODO: asks are not interpolated correctly
62
+ prices = []
63
+ for price, size in levels:
64
+ prices.append(price)
65
+
66
+ if is_bid:
67
+ max_tick = get_tick(max(prices), is_bid, tick_size)
68
+ min_tick = max_tick - tick_count + 1
69
+ start_tick = max_tick
70
+ else:
71
+ min_tick = get_tick(min(prices), is_bid, tick_size)
72
+ max_tick = min_tick + tick_count - 1
73
+ start_tick = min_tick
74
+
75
+ # Initialize a dictionary to hold the aggregated sizes
76
+ interp_levels = Dict.empty(key_type=types.float64, value_type=types.float64)
77
+
78
+ # Iterate through each bid and aggregate the sizes based on the tick size
79
+ for price, size in levels:
80
+ tick = get_tick(price, is_bid, tick_size)
81
+ if tick >= min_tick and tick <= max_tick:
82
+ _size = (price * size) if sizes_in_quoted else size
83
+ if tick in interp_levels:
84
+ interp_levels[tick] += _size
85
+ else:
86
+ interp_levels[tick] = _size
87
+
88
+ # Create the final list including zero sizes where necessary
89
+ result = []
90
+ for tick in range(min_tick, max_tick + 1):
91
+ size = round(interp_levels[tick], size_decimals) if tick in interp_levels else 0.0
92
+ idx = tick - start_tick
93
+ result.append((-idx if is_bid else idx, size))
94
+
95
+ return result, tick_to_price(max_tick if is_bid else min_tick, tick_size, decimals)
96
+
97
+
98
+ @njit
99
+ def __build_orderbook_snapshots(
100
+ dates: np.ndarray,
101
+ prices: np.ndarray,
102
+ sizes: np.ndarray,
103
+ is_bids: np.ndarray,
104
+ levels: int,
105
+ tick_size_fraction: float,
106
+ price_decimals: int,
107
+ size_decimals: int,
108
+ sizes_in_quoted: bool,
109
+ init_bid_ticks: np.ndarray,
110
+ init_bid_sizes: np.ndarray,
111
+ init_ask_ticks: np.ndarray,
112
+ init_ask_sizes: np.ndarray,
113
+ init_top_bid: float,
114
+ init_top_ask: float,
115
+ init_tick_size: float,
116
+ ) -> list[tuple[np.datetime64, list[tuple[float, float]], list[tuple[float, float]], float, float, float]]:
117
+ """
118
+ Build order book snapshots from given market data.
119
+
120
+ Parameters:
121
+ dates (np.ndarray): Array of datetime64 timestamps.
122
+ prices (np.ndarray): Array of price points.
123
+ sizes (np.ndarray): Array of sizes corresponding to the prices.
124
+ is_bids (np.ndarray): Array indicating if the price is a bid (True) or ask (False).
125
+ levels (int): Number of levels to interpolate for bids and asks.
126
+ tick_size_fraction (float): Fraction to determine the tick size dynamically based on mid-price.
127
+ price_decimals (int): Number of decimal places for price rounding.
128
+ size_decimals (int): Number of decimal places for size rounding.
129
+ sizes_in_quoted (bool): Flag indicating if sizes are in quoted currency.
130
+ init_bid_ticks (np.ndarray): Initial bid ticks.
131
+ init_bid_sizes (np.ndarray): Initial bid sizes.
132
+ init_ask_ticks (np.ndarray): Initial ask ticks.
133
+ init_ask_sizes (np.ndarray): Initial ask sizes.
134
+ init_top_bid (float): Initial top bid price.
135
+ init_top_ask (float): Initial top ask price.
136
+ init_tick_size (float): Initial tick size.
137
+
138
+ Returns:
139
+ list[tuple[np.datetime64, list[tuple[float, float]], list[tuple[float, float]], float, float, float]]:
140
+ A list of tuples where each tuple contains:
141
+ - Timestamp of the snapshot.
142
+ - List of interpolated bid levels (price, size).
143
+ - List of interpolated ask levels (price, size).
144
+ - Top bid price.
145
+ - Top ask price.
146
+ - Tick size.
147
+ """
148
+ price_to_size = Dict.empty(key_type=types.float64, value_type=types.float64)
149
+ price_to_bid_ask = Dict.empty(key_type=types.float64, value_type=types.boolean)
150
+
151
+ for i in range(init_bid_ticks.shape[0]):
152
+ bp = init_top_bid - init_tick_size * init_bid_ticks[i]
153
+ price_to_size[bp] = init_bid_sizes[i]
154
+ price_to_bid_ask[bp] = True
155
+
156
+ for i in range(init_ask_ticks.shape[0]):
157
+ ap = init_top_ask + init_tick_size * init_ask_ticks[i]
158
+ price_to_size[ap] = init_ask_sizes[i]
159
+ price_to_bid_ask[ap] = False
160
+
161
+ snapshots = []
162
+ prev_timestamp = dates[0]
163
+ for i in range(dates.shape[0]):
164
+ date = dates[i]
165
+ if date > prev_timestamp:
166
+ # emit snapshot
167
+ bids, asks = [], []
168
+ top_a, top_b = np.inf, 0
169
+ for price, size in price_to_size.items():
170
+ if price_to_bid_ask[price]:
171
+ bids.append((price, size))
172
+ top_b = max(top_b, price)
173
+ else:
174
+ asks.append((price, size))
175
+ top_a = min(top_a, price)
176
+
177
+ if len(bids) > 0 and len(asks) > 0:
178
+ # - find tick_size dynamically based on mid_price
179
+ tick_size = prec_ceil(0.5 * (top_b + top_a) * tick_size_fraction, price_decimals)
180
+ interp_bids, top_bid_price = _interpolate_levels(
181
+ bids,
182
+ True,
183
+ levels,
184
+ tick_size,
185
+ price_decimals,
186
+ size_decimals,
187
+ sizes_in_quoted,
188
+ )
189
+ interp_asks, top_ask_price = _interpolate_levels(
190
+ asks,
191
+ False,
192
+ levels,
193
+ tick_size,
194
+ price_decimals,
195
+ size_decimals,
196
+ sizes_in_quoted,
197
+ )
198
+ if len(interp_bids) >= levels and len(interp_asks) >= levels:
199
+ if top_bid_price <= top_ask_price:
200
+ snapshots.append(
201
+ (
202
+ prev_timestamp,
203
+ interp_bids[-levels:],
204
+ interp_asks[:levels],
205
+ # - also store top bid, ask prices and tick_size
206
+ top_b,
207
+ top_a,
208
+ tick_size,
209
+ )
210
+ )
211
+ else:
212
+ # something went wrong, bids can't be above asks
213
+ # clean up the local state and hope for the best
214
+ price_to_size.clear()
215
+ price_to_bid_ask.clear()
216
+
217
+ price = prices[i]
218
+ size = sizes[i]
219
+ is_bid = is_bids[i]
220
+ if size == 0:
221
+ if price in price_to_size:
222
+ del price_to_size[price]
223
+ if price in price_to_bid_ask:
224
+ del price_to_bid_ask[price]
225
+ else:
226
+ price_to_size[price] = size
227
+ price_to_bid_ask[price] = is_bid
228
+
229
+ prev_timestamp = date
230
+
231
+ return snapshots
232
+
233
+
234
+ def build_orderbook_snapshots(
235
+ updates: list[tuple[np.datetime64, float, float, bool]],
236
+ levels: int,
237
+ tick_size_pct: float,
238
+ min_tick_size: float,
239
+ min_size_step: float,
240
+ sizes_in_quoted: bool = False,
241
+ initial_snapshot: (
242
+ tuple[
243
+ np.datetime64, # timestamp [0]
244
+ list[tuple[float, float]], # bids levels [1]
245
+ list[tuple[float, float]], # asks levels [2]
246
+ float,
247
+ float,
248
+ float, # top bid, top ask prices, tick_size [3, 4, 5]
249
+ ]
250
+ | None
251
+ ) = None,
252
+ ):
253
+ dates, prices, sizes, is_bids = zip(*updates)
254
+ dates = np.array(dates, dtype=np.datetime64)
255
+ prices = np.array(prices)
256
+ sizes = np.array(sizes)
257
+ is_bids = np.array(is_bids)
258
+
259
+ price_decimals = max(count_decimal_places(min_tick_size), 1)
260
+ size_decimals = max(count_decimal_places(min_size_step), 1)
261
+
262
+ if initial_snapshot is not None and dates[0] > initial_snapshot[0]:
263
+ init_bid_ticks, init_bid_sizes = zip(*initial_snapshot[1])
264
+ init_ask_ticks, init_ask_sizes = zip(*initial_snapshot[2])
265
+ init_bid_ticks = np.array(init_bid_ticks, dtype=np.float64)
266
+ init_bid_sizes = np.array(init_bid_sizes, dtype=np.float64)
267
+ init_ask_ticks = np.array(init_ask_ticks, dtype=np.float64)
268
+ init_ask_sizes = np.array(init_ask_sizes, dtype=np.float64)
269
+ init_top_bid = initial_snapshot[3]
270
+ init_top_ask = initial_snapshot[4]
271
+ init_tick_size = initial_snapshot[5]
272
+ else:
273
+ init_bid_ticks = np.array([], dtype=np.float64)
274
+ init_bid_sizes = np.array([], dtype=np.float64)
275
+ init_ask_ticks = np.array([], dtype=np.float64)
276
+ init_ask_sizes = np.array([], dtype=np.float64)
277
+ init_top_bid, init_top_ask, init_tick_size = 0, 0, 0
278
+
279
+ snapshots = __build_orderbook_snapshots(
280
+ dates,
281
+ prices,
282
+ sizes,
283
+ is_bids,
284
+ levels,
285
+ tick_size_pct / 100,
286
+ price_decimals,
287
+ size_decimals,
288
+ sizes_in_quoted,
289
+ init_bid_ticks,
290
+ init_bid_sizes,
291
+ init_ask_ticks,
292
+ init_ask_sizes,
293
+ init_top_bid,
294
+ init_top_ask,
295
+ init_tick_size,
296
+ )
297
+ return snapshots
298
+
299
+
300
+ def snapshots_to_frame(snaps: list) -> pd.DataFrame:
301
+ """
302
+ Convert snapshots to dataframe
303
+ """
304
+ reindx = lambda s, d: {f"{s}{k}": v for k, v in d.items()} # noqa: E731
305
+ data = {
306
+ snaps[i][0]: (
307
+ reindx("b", dict(snaps[i][1]))
308
+ | reindx("a", dict(snaps[i][2]))
309
+ | {"top_bid": snaps[i][3], "top_ask": snaps[i][4], "tick_size": snaps[i][5]}
310
+ )
311
+ for i in range(len(snaps))
312
+ }
313
+ return pd.DataFrame.from_dict(data).T
314
+
315
+
316
+ def read_and_process_orderbook_updates(
317
+ exchange: str,
318
+ path: str,
319
+ price_bin_pct: float,
320
+ n_levels: int,
321
+ sizes_in_quoted=False,
322
+ symbols: list[str] | None = None,
323
+ dates: slice | None = None,
324
+ path_to_store: str | None = None,
325
+ collect_snapshots: bool = True,
326
+ ) -> dict[str, dict[datetime, pd.DataFrame]]:
327
+ QubxLogConfig.set_log_level("INFO")
328
+
329
+ # - preprocess ranges
330
+ dates_start = pd.Timestamp(dates.start if dates and dates.start else "1970-01-01")
331
+ dates_stop = pd.Timestamp(dates.stop if dates and dates.stop else "2170-01-01")
332
+ dates_start, dates_stop = min(dates_start, dates_stop), max(dates_start, dates_stop)
333
+
334
+ def __process_updates_record(line: str):
335
+ data = msgspec.json.decode(line)
336
+ # - we need only full depth here !
337
+ if (s_d := data.get("stream")) is not None and s_d[-6:] == "@depth":
338
+ update = data["data"]
339
+ if update.get("e") == "depthUpdate":
340
+ ts = datetime.fromtimestamp(update["E"] / 1000)
341
+ for is_bid, key in [(True, "b"), (False, "a")]:
342
+ for price, size in update[key]:
343
+ yield (ts, float(price), float(size), is_bid)
344
+
345
+ symb_snapshots = defaultdict(dict)
346
+ for s in Path(path).glob("*"):
347
+ symbol = s.name.upper()
348
+
349
+ # - skip if list is defined but symbol not in it
350
+ if symbols and symbol not in symbols:
351
+ continue
352
+
353
+ instr = lookup.find_symbol(exchange.upper(), symbol)
354
+ if not isinstance(instr, Instrument):
355
+ logger.error(f"Instrument not found for {symbol} !")
356
+ continue
357
+
358
+ _latest_snapshot = None
359
+ for d in sorted(s.glob("raw/*")):
360
+ _d_ts = pd.Timestamp(d.name)
361
+ if _d_ts < dates_start or _d_ts > dates_stop:
362
+ continue
363
+
364
+ if path_to_store and exists(_f := get_path_to_snapshots_file(path_to_store, symbol, _d_ts)):
365
+ logger.info(f"File {_f} already exists, skipping.")
366
+ continue
367
+
368
+ day_updates = []
369
+ logger.info(f"Loading {symbol} : {d.name} ... ")
370
+ for file in sorted(d.glob("*.txt.gz")):
371
+ try:
372
+ with gzip.open(file, "rt") as f:
373
+ try:
374
+ while line := f.readline():
375
+ for upd in __process_updates_record(line):
376
+ day_updates.append(upd)
377
+ except Exception as exc:
378
+ logger.warning(f">>> Exception in processing {file.name} : {exc}")
379
+ # logger.opt(colors=False).error(traceback.format_exc())
380
+ except EOFError as exc:
381
+ logger.error(f">>> Exception in reading {exc}")
382
+ logger.opt(colors=False).error(traceback.format_exc())
383
+
384
+ if len(day_updates) == 0:
385
+ logger.info(f"No data for {symbol} at {d.name}")
386
+ continue
387
+
388
+ logger.info(f"loaded {len(day_updates)} updates")
389
+
390
+ snaps = build_orderbook_snapshots(
391
+ day_updates,
392
+ n_levels,
393
+ price_bin_pct,
394
+ instr.tick_size,
395
+ instr.lot_size,
396
+ sizes_in_quoted=sizes_in_quoted,
397
+ initial_snapshot=_latest_snapshot,
398
+ )
399
+ _latest_snapshot = snaps[-1]
400
+
401
+ processed_snap = snapshots_to_frame(snaps)
402
+ t_key = pd.Timestamp(d.name).strftime("%Y-%m-%d")
403
+
404
+ # - collect snapshots
405
+ if collect_snapshots:
406
+ symb_snapshots[symbol][t_key] = processed_snap
407
+
408
+ # - save data
409
+ if path_to_store:
410
+ store_snapshots_to_h5(path_to_store, {symbol: {t_key: processed_snap}}, price_bin_pct, n_levels)
411
+
412
+ return symb_snapshots
413
+
414
+
415
+ def get_combined_cumulative_snapshot(data: dict[str, dict[datetime, pd.DataFrame]], max_levs=1000000) -> pd.DataFrame:
416
+ frms = []
417
+ for s, dv in data.items():
418
+ _f = {}
419
+ for d, v in dv.items():
420
+ ca = v.mean(axis=0).filter(regex="^a.*")[:max_levs].cumsum(axis=0)
421
+ cb = v.mean(axis=0).filter(regex="^b.*")[::-1][:max_levs].cumsum(axis=0)
422
+ _f[pd.Timestamp(d)] = srows(ca[::-1], cb, sort=False).to_dict()
423
+ frms.append(pd.DataFrame.from_dict(_f, orient="index"))
424
+ return scols(*frms, keys=data.keys())
425
+
426
+
427
+ def get_path_to_snapshots_file(path: str, symbol: str, date: str) -> str:
428
+ _s_path = join(path, symbol.upper())
429
+ if not os.path.exists(_s_path):
430
+ os.makedirs(_s_path)
431
+ return join(_s_path, pd.Timestamp(date).strftime("%Y-%m-%d")) + ".h5"
432
+
433
+
434
+ def store_snapshots_to_h5(path: str, data: dict[str, dict[str, pd.DataFrame]], p, nl):
435
+ """
436
+ Store orderbook data to HDF5 files
437
+ """
438
+ for s, v in data.items():
439
+ for t, vd in v.items():
440
+ logger.info(f"Storing {s} : {t}")
441
+ vd.to_hdf(
442
+ get_path_to_snapshots_file(path, s, t), key=f"orderbook_{str(p).replace('.', '_')}_{nl}", complevel=9
443
+ )
444
+
445
+
446
+ def load_snapshots_from_h5(path: str, symbol: str, dates: slice | str, p: float, nl: int) -> dict[str, pd.DataFrame]:
447
+ symbol = symbol.upper()
448
+ if isinstance(dates, slice):
449
+ dates_start = pd.Timestamp(dates.start if dates and dates.start else "1970-01-01")
450
+ dates_stop = pd.Timestamp(dates.stop if dates and dates.stop else "2170-01-01")
451
+ else:
452
+ dates_start = pd.Timestamp(dates)
453
+ dates_stop = pd.Timestamp(dates)
454
+ dates_start, dates_stop = min(dates_start, dates_stop), max(dates_start, dates_stop)
455
+ rs = {symbol: {}}
456
+ for d in tqdm(sorted((Path(path) / symbol).glob("*.h*"))):
457
+ _d_ts = pd.Timestamp(d.name.split(".")[0])
458
+ if _d_ts < dates_start or _d_ts > dates_stop:
459
+ continue
460
+ rs[symbol][_d_ts] = pd.read_hdf(d, f"orderbook_{str(p).replace('.', '_')}_{nl}")
461
+ return rs
462
+
463
+
464
+ def aggregate_symbol(path: str, symbol: str, p: float, nl: int, reload=False) -> pd.DataFrame | None:
465
+ """
466
+ Aggregate orderbook data for a symbol on a daily basis and save to HDF5 file
467
+ """
468
+ symbol = symbol.upper()
469
+ result = None
470
+ with pd.HDFStore(f"{path}/aggregated.h5", "a", complevel=9) as store:
471
+ if reload or (f"/{symbol}" not in store.keys()):
472
+ _f = {}
473
+ for d in tqdm(sorted((Path(path) / symbol).glob("*.h*")), leave=False, desc=symbol):
474
+ date = d.name.split(".")[0]
475
+ rs = pd.read_hdf(d, f"orderbook_{str(p).replace('.', '_')}_{nl}")
476
+ rs = rs.loc[date]
477
+ if not rs.empty:
478
+ ca = rs.mean(axis=0).filter(regex="^a.*").cumsum(axis=0)
479
+ cb = rs.mean(axis=0).filter(regex="^b.*")[::-1].cumsum(axis=0)
480
+ _f[pd.Timestamp(date)] = srows(ca[::-1], cb, sort=False).to_dict()
481
+ result = pd.DataFrame.from_dict(_f, orient="index")
482
+ store.put(symbol, result)
483
+ return result
484
+
485
+
486
+ def aggregate_symbols_from_list(path: str, symbols: list[str] | dict[str, Any], p: float, nl: int, reload=False):
487
+ """
488
+ Aggregate orderbook data for a list of symbols on a daily basis and save to HDF5 file
489
+ """
490
+ for s in tqdm(symbols):
491
+ aggregate_symbol(path, s, p, nl, reload)
File without changes
@@ -0,0 +1,150 @@
1
+ import threading
2
+ import time
3
+ from pathlib import Path
4
+ from typing import Any
5
+
6
+ import dash
7
+ import dash_bootstrap_components as dbc
8
+ import pandas as pd
9
+ import plotly.graph_objs as go
10
+ import plotly.io as pio
11
+ from dash import Dash, ctx, dcc, html
12
+ from dash._jupyter import JupyterDisplayMode
13
+ from dash.dependencies import Input, Output
14
+ from IPython.display import clear_output
15
+ from plotly.subplots import make_subplots
16
+
17
+ from qubx import QubxLogConfig, logger, lookup
18
+ from qubx.backtester.simulator import SimulatedBroker
19
+ from qubx.connectors.ccxt.broker import CcxtBroker
20
+ from qubx.connectors.ccxt.data import CcxtDataProvider
21
+ from qubx.core.basics import Instrument
22
+ from qubx.core.interfaces import IStrategy, IStrategyContext
23
+ from qubx.core.series import OrderBook, TimeSeries
24
+ from qubx.pandaz import scols
25
+ from qubx.utils.charting.lookinglass import LookingGlass
26
+ from qubx.utils.runner import get_account_config
27
+
28
+ pio.templates.default = "plotly_dark"
29
+
30
+ TIMEFRAMES = ["1s", "1m", "5m", "15m", "1h", "4h", "1d"]
31
+
32
+
33
+ class TradingDashboard:
34
+ ctx: IStrategyContext
35
+ max_history: int
36
+
37
+ def __init__(self, ctx: IStrategyContext, max_history: int = 10_000):
38
+ self.app = Dash(__name__, external_stylesheets=[dbc.themes.BOOTSTRAP])
39
+ self.ctx = ctx
40
+ self.max_history = max_history
41
+ self._symbol_to_instrument = {instr.symbol: instr for instr in ctx.instruments}
42
+
43
+ # Setup layout with dark theme
44
+ self.app.layout = html.Div(
45
+ [
46
+ html.H2("Trading Dashboard"),
47
+ dbc.Row(
48
+ [
49
+ dbc.Col(
50
+ [
51
+ html.Label("Symbol:"),
52
+ dcc.Dropdown(
53
+ id="symbol-dropdown",
54
+ options=[
55
+ {"label": instr.symbol, "value": instr.symbol} for instr in self.ctx.instruments
56
+ ],
57
+ value=self.ctx.instruments[0].symbol,
58
+ ),
59
+ ],
60
+ width=2,
61
+ ),
62
+ dbc.Col(
63
+ [
64
+ html.Label("Timeframe:"),
65
+ dcc.Dropdown(
66
+ id="timeframe",
67
+ options=[{"label": tf, "value": tf} for tf in TIMEFRAMES],
68
+ value="1s",
69
+ ),
70
+ ],
71
+ width=1,
72
+ ),
73
+ dbc.Col(
74
+ [
75
+ dbc.Button(
76
+ "Pause", id="play-pause-button", color="primary", className="ms-2", n_clicks=0
77
+ ),
78
+ ],
79
+ width=1,
80
+ ),
81
+ ],
82
+ ),
83
+ html.Div(
84
+ [
85
+ dcc.Graph(
86
+ id="live-graph",
87
+ ),
88
+ ],
89
+ ),
90
+ dcc.Interval(id="interval-component", interval=5 * 1000, n_intervals=0, disabled=False),
91
+ ],
92
+ className="dash-bootstrap",
93
+ )
94
+
95
+ @self.app.callback(
96
+ Output("interval-component", "disabled"),
97
+ Output("play-pause-button", "children"),
98
+ Input("play-pause-button", "n_clicks"),
99
+ Input("interval-component", "disabled"),
100
+ )
101
+ def toggle_updates(n_clicks, disabled):
102
+ if n_clicks > 0:
103
+ disabled = not disabled
104
+ return disabled, "Resume" if disabled else "Pause"
105
+
106
+ @self.app.callback(
107
+ Output("live-graph", "figure"),
108
+ [
109
+ Input("interval-component", "n_intervals"),
110
+ Input("symbol-dropdown", "value"),
111
+ Input("timeframe", "value"),
112
+ ],
113
+ )
114
+ def update_graph(n: int, symbol: str, timeframe: str):
115
+ if not self.ctx.is_running() or not self.ctx.is_fitted():
116
+ logger.info(f"Strategy running: {self.ctx.is_running()}, Strategy fitted: {self.ctx.is_fitted()}")
117
+ return {}
118
+
119
+ instrument = self.ctx.query_instrument(symbol, "BINANCE.UM")
120
+ if instrument is None:
121
+ logger.error(f"Could not find instrument for symbol: {symbol}")
122
+ return {}
123
+
124
+ ohlc = self.ctx.ohlc(instrument, timeframe).loc[-self.max_history :]
125
+ key_to_ind = self.ctx.strategy[symbol] # type: ignore
126
+ indicators = {key: ind.pd() for key, ind in key_to_ind.items()}
127
+ fig = (
128
+ LookingGlass(
129
+ ohlc,
130
+ indicators,
131
+ master_plot_height=800,
132
+ study_plot_height=100,
133
+ )
134
+ .look(title="")
135
+ .hover(h=900)
136
+ )
137
+ return fig
138
+
139
+ def run(
140
+ self,
141
+ host: str = "0.0.0.0",
142
+ port: int = 8050,
143
+ mode: JupyterDisplayMode = "external",
144
+ debug: bool = False,
145
+ use_reloader: bool = False,
146
+ **kwargs,
147
+ ):
148
+ self.app.run(
149
+ debug=debug, host=host, port=str(port), jupyter_mode=mode, dev_tools_hot_reload=use_reloader, **kwargs
150
+ )