Qubx 0.5.7__cp312-cp312-manylinux_2_39_x86_64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of Qubx might be problematic. Click here for more details.

Files changed (100) hide show
  1. qubx/__init__.py +207 -0
  2. qubx/_nb_magic.py +100 -0
  3. qubx/backtester/__init__.py +5 -0
  4. qubx/backtester/account.py +145 -0
  5. qubx/backtester/broker.py +87 -0
  6. qubx/backtester/data.py +296 -0
  7. qubx/backtester/management.py +378 -0
  8. qubx/backtester/ome.py +296 -0
  9. qubx/backtester/optimization.py +201 -0
  10. qubx/backtester/simulated_data.py +558 -0
  11. qubx/backtester/simulator.py +362 -0
  12. qubx/backtester/utils.py +780 -0
  13. qubx/cli/__init__.py +0 -0
  14. qubx/cli/commands.py +67 -0
  15. qubx/connectors/ccxt/__init__.py +0 -0
  16. qubx/connectors/ccxt/account.py +495 -0
  17. qubx/connectors/ccxt/broker.py +132 -0
  18. qubx/connectors/ccxt/customizations.py +193 -0
  19. qubx/connectors/ccxt/data.py +612 -0
  20. qubx/connectors/ccxt/exceptions.py +17 -0
  21. qubx/connectors/ccxt/factory.py +93 -0
  22. qubx/connectors/ccxt/utils.py +307 -0
  23. qubx/core/__init__.py +0 -0
  24. qubx/core/account.py +251 -0
  25. qubx/core/basics.py +850 -0
  26. qubx/core/context.py +420 -0
  27. qubx/core/exceptions.py +38 -0
  28. qubx/core/helpers.py +480 -0
  29. qubx/core/interfaces.py +1150 -0
  30. qubx/core/loggers.py +514 -0
  31. qubx/core/lookups.py +475 -0
  32. qubx/core/metrics.py +1512 -0
  33. qubx/core/mixins/__init__.py +13 -0
  34. qubx/core/mixins/market.py +94 -0
  35. qubx/core/mixins/processing.py +428 -0
  36. qubx/core/mixins/subscription.py +203 -0
  37. qubx/core/mixins/trading.py +88 -0
  38. qubx/core/mixins/universe.py +270 -0
  39. qubx/core/series.cpython-312-x86_64-linux-gnu.so +0 -0
  40. qubx/core/series.pxd +125 -0
  41. qubx/core/series.pyi +118 -0
  42. qubx/core/series.pyx +988 -0
  43. qubx/core/utils.cpython-312-x86_64-linux-gnu.so +0 -0
  44. qubx/core/utils.pyi +6 -0
  45. qubx/core/utils.pyx +62 -0
  46. qubx/data/__init__.py +25 -0
  47. qubx/data/helpers.py +416 -0
  48. qubx/data/readers.py +1562 -0
  49. qubx/data/tardis.py +100 -0
  50. qubx/gathering/simplest.py +88 -0
  51. qubx/math/__init__.py +3 -0
  52. qubx/math/stats.py +129 -0
  53. qubx/pandaz/__init__.py +23 -0
  54. qubx/pandaz/ta.py +2757 -0
  55. qubx/pandaz/utils.py +638 -0
  56. qubx/resources/instruments/symbols-binance.cm.json +1 -0
  57. qubx/resources/instruments/symbols-binance.json +1 -0
  58. qubx/resources/instruments/symbols-binance.um.json +1 -0
  59. qubx/resources/instruments/symbols-bitfinex.f.json +1 -0
  60. qubx/resources/instruments/symbols-bitfinex.json +1 -0
  61. qubx/resources/instruments/symbols-kraken.f.json +1 -0
  62. qubx/resources/instruments/symbols-kraken.json +1 -0
  63. qubx/ta/__init__.py +0 -0
  64. qubx/ta/indicators.cpython-312-x86_64-linux-gnu.so +0 -0
  65. qubx/ta/indicators.pxd +149 -0
  66. qubx/ta/indicators.pyi +41 -0
  67. qubx/ta/indicators.pyx +787 -0
  68. qubx/trackers/__init__.py +3 -0
  69. qubx/trackers/abvanced.py +236 -0
  70. qubx/trackers/composite.py +146 -0
  71. qubx/trackers/rebalancers.py +129 -0
  72. qubx/trackers/riskctrl.py +641 -0
  73. qubx/trackers/sizers.py +235 -0
  74. qubx/utils/__init__.py +5 -0
  75. qubx/utils/_pyxreloader.py +281 -0
  76. qubx/utils/charting/lookinglass.py +1057 -0
  77. qubx/utils/charting/mpl_helpers.py +1183 -0
  78. qubx/utils/marketdata/binance.py +284 -0
  79. qubx/utils/marketdata/ccxt.py +90 -0
  80. qubx/utils/marketdata/dukas.py +130 -0
  81. qubx/utils/misc.py +541 -0
  82. qubx/utils/ntp.py +63 -0
  83. qubx/utils/numbers_utils.py +7 -0
  84. qubx/utils/orderbook.py +491 -0
  85. qubx/utils/plotting/__init__.py +0 -0
  86. qubx/utils/plotting/dashboard.py +150 -0
  87. qubx/utils/plotting/data.py +137 -0
  88. qubx/utils/plotting/interfaces.py +25 -0
  89. qubx/utils/plotting/renderers/__init__.py +0 -0
  90. qubx/utils/plotting/renderers/plotly.py +0 -0
  91. qubx/utils/runner/__init__.py +1 -0
  92. qubx/utils/runner/_jupyter_runner.pyt +60 -0
  93. qubx/utils/runner/accounts.py +88 -0
  94. qubx/utils/runner/configs.py +65 -0
  95. qubx/utils/runner/runner.py +470 -0
  96. qubx/utils/time.py +312 -0
  97. qubx-0.5.7.dist-info/METADATA +105 -0
  98. qubx-0.5.7.dist-info/RECORD +100 -0
  99. qubx-0.5.7.dist-info/WHEEL +4 -0
  100. qubx-0.5.7.dist-info/entry_points.txt +3 -0
qubx/data/tardis.py ADDED
@@ -0,0 +1,100 @@
1
+ from dataclasses import field
2
+ from os.path import exists, expanduser
3
+ from pathlib import Path
4
+ from typing import Any, Iterable
5
+
6
+ import pandas as pd
7
+ from pyarrow import csv
8
+
9
+ from qubx.utils.time import handle_start_stop, infer_series_frequency
10
+
11
+ from .readers import CsvStorageDataReader, DataReader, DataTransformer, _recognize_t
12
+
13
+ TARDIS_EXCHANGE_MAPPERS = {
14
+ "bitfinex.f": "bitfinex-derivatives",
15
+ "binance.um": "binance-futures",
16
+ }
17
+
18
+
19
+ class TardisCsvDataReader(DataReader):
20
+ def __init__(self, path: str | Path) -> None:
21
+ _path = expanduser(path)
22
+ if not exists(_path):
23
+ raise ValueError(f"Folder is not found at {path}")
24
+ self.path = Path(_path)
25
+
26
+ def get_names(self, exchange: str | None = None, data_type: str | None = None) -> list[str]:
27
+ symbols = []
28
+ exchanges = [exchange] if exchange else self.get_exchanges()
29
+ for exchange in exchanges:
30
+ exchange_path = Path(self.path) / exchange
31
+ if not exists(exchange_path):
32
+ raise ValueError(f"Exchange is not found at {exchange_path}")
33
+ data_types = [data_type] if data_type else self.get_data_types(exchange)
34
+ for data_type in data_types:
35
+ data_type_path = exchange_path / data_type
36
+ if not exists(data_type_path):
37
+ return []
38
+ symbols += self._get_symbols(data_type_path)
39
+ return symbols
40
+
41
+ def read(
42
+ self,
43
+ data_id: str,
44
+ start: str | None = None,
45
+ stop: str | None = None,
46
+ transform: DataTransformer = DataTransformer(),
47
+ chunksize=0,
48
+ timeframe=None,
49
+ data_type="trades",
50
+ ) -> Iterable | Any:
51
+ if chunksize > 0:
52
+ raise NotImplementedError("Chunksize is not supported for TardisCsvDataReader")
53
+ exchange, symbol = data_id.split(":")
54
+ _exchange = exchange.lower()
55
+ _exchange = TARDIS_EXCHANGE_MAPPERS.get(_exchange, _exchange)
56
+ t_0, t_1 = handle_start_stop(start, stop, lambda x: pd.Timestamp(x).date().isoformat())
57
+ _path = self.path / _exchange / data_type
58
+ if not _path.exists():
59
+ raise ValueError(f"Data type is not found at {_path}")
60
+ _files = sorted(_path.glob(f"*_{symbol}.csv.gz"))
61
+ if not _files:
62
+ return None
63
+ _dates = [file.stem.split("_")[0] for file in _files]
64
+ if t_0 is None:
65
+ t_0 = _dates[0]
66
+ if t_1 is None:
67
+ t_1 = _dates[-1]
68
+ _filt_files = [file for file in _files if t_0 <= file.stem.split("_")[0] <= t_1]
69
+
70
+ tables = []
71
+ fieldnames = None
72
+ for f_path in _filt_files:
73
+ table = csv.read_csv(
74
+ f_path,
75
+ parse_options=csv.ParseOptions(ignore_empty_lines=True),
76
+ )
77
+ if not fieldnames:
78
+ fieldnames = table.column_names
79
+ tables.append(table.to_pandas())
80
+
81
+ transform.start_transform(data_id, fieldnames, start=start, stop=stop)
82
+ raw_data = pd.concat(tables).to_numpy()
83
+ transform.process_data(raw_data)
84
+
85
+ return transform.collect()
86
+
87
+ def get_exchanges(self) -> list[str]:
88
+ return [exchange.name for exchange in self.path.iterdir() if exchange.is_dir()]
89
+
90
+ def get_data_types(self, exchange: str) -> list[str]:
91
+ exchange_path = Path(self.path) / exchange
92
+ return [data_type.name for data_type in exchange_path.iterdir() if data_type.is_dir()]
93
+
94
+ def _get_symbols(self, data_type_path: Path) -> list[str]:
95
+ symbols = set()
96
+ for file in data_type_path.glob("*.gz"):
97
+ parts = file.stem.replace(".csv", "").split("_")
98
+ if len(parts) == 2:
99
+ symbols.add(parts[1])
100
+ return list(symbols)
@@ -0,0 +1,88 @@
1
+ from qubx import logger
2
+ from qubx.core.basics import Deal, Instrument, TargetPosition
3
+ from qubx.core.interfaces import IPositionGathering, IStrategyContext
4
+
5
+
6
+ class SimplePositionGatherer(IPositionGathering):
7
+ """
8
+ Default implementation of positions gathering by single orders through strategy context
9
+ """
10
+
11
+ entry_order_id: str | None = None
12
+
13
+ def _cncl_order(self, ctx: IStrategyContext, instrument: Instrument) -> None:
14
+ if self.entry_order_id:
15
+ logger.debug(
16
+ f" [<y>{self.__class__.__name__}</y>(<g>{instrument}</g>)] :: Cancelling previous entry order <red>{self.entry_order_id}</red>"
17
+ )
18
+ try:
19
+ ctx.cancel_order(self.entry_order_id)
20
+ except Exception as e:
21
+ logger.error(f"Cancelling entry order failed: {str(e)}")
22
+ self.entry_order_id = None
23
+
24
+ def alter_position_size(self, ctx: IStrategyContext, target: TargetPosition) -> float:
25
+ # Here is default inplementation:
26
+ # just trade it through the strategy context by using market (or limit) orders.
27
+ # but in general it may have complex logic for position adjustment
28
+ instrument, new_size, at_price = target.instrument, target.target_position_size, target.price
29
+ current_position = ctx.positions[instrument].quantity
30
+ to_trade = new_size - current_position
31
+
32
+ # - first cancel previous entry order if exists
33
+ self._cncl_order(ctx, instrument)
34
+
35
+ if abs(to_trade) < instrument.min_size:
36
+ if current_position != 0:
37
+ logger.debug(
38
+ f" [<y>{self.__class__.__name__}</y>(<g>{instrument}</g>)] :: Unable change position from {current_position} to {new_size} : too small difference"
39
+ )
40
+ else:
41
+ # - check how it should be traded: market or limit or stop order
42
+ opts = {}
43
+ _is_stop_or_limit = False
44
+ if at_price:
45
+ # - we already havbe position but it's requested to change at a specific price
46
+ if abs(current_position) > instrument.min_size:
47
+ logger.debug(
48
+ f" [<y>{self.__class__.__name__}</y>(<g>{instrument}</g>)] :: Attempt to change current position {current_position} to {new_size} at {at_price} !"
49
+ )
50
+
51
+ quote = ctx.quote(instrument)
52
+ assert quote is not None
53
+ if (to_trade > 0 and at_price > quote.ask) or (to_trade < 0 and at_price < quote.bid):
54
+ opts["stop_type"] = "market"
55
+ _is_stop_or_limit = True
56
+
57
+ if (to_trade > 0 and at_price <= quote.bid) or (to_trade < 0 and at_price >= quote.ask):
58
+ _is_stop_or_limit = True
59
+
60
+ r = ctx.trade(instrument, to_trade, at_price, **opts)
61
+ if _is_stop_or_limit:
62
+ self.entry_order_id = r.id
63
+ logger.debug(
64
+ f" [<y>{self.__class__.__name__}</y>(<g>{instrument}</g>)] :: Position may be adjusted from {current_position} to {new_size} at {at_price} : {r}"
65
+ )
66
+ else:
67
+ self.entry_order_id = None
68
+ logger.debug(
69
+ f" [<y>{self.__class__.__name__}</y>(<g>{instrument}</g>)] :: Position is adjusted from {current_position} to {new_size} : {r}"
70
+ )
71
+
72
+ current_position = new_size
73
+ # - TODO: need to check how fast position is being updated on live
74
+ # current_position = ctx.positions[instrument].quantity
75
+
76
+ return current_position
77
+
78
+ def on_execution_report(self, ctx: IStrategyContext, instrument: Instrument, deal: Deal):
79
+ if deal.order_id == self.entry_order_id:
80
+ self.entry_order_id = None
81
+
82
+
83
+ class SplittedOrdersPositionGatherer(IPositionGathering):
84
+ """
85
+ Gather position by splitting order into smaller parts randomly
86
+ """
87
+
88
+ pass
qubx/math/__init__.py ADDED
@@ -0,0 +1,3 @@
1
+ __all__ = ["compare_to_norm", "percentile_rank", "kde"]
2
+
3
+ from .stats import compare_to_norm, kde, percentile_rank
qubx/math/stats.py ADDED
@@ -0,0 +1,129 @@
1
+ import numpy as np
2
+ import pandas as pd
3
+ import statsmodels.api as sm
4
+ from statsmodels.tsa.stattools import coint
5
+
6
+ from qubx.utils import sbp
7
+
8
+
9
+ def percentile_rank(x: np.ndarray, v, pctls=np.arange(1, 101)):
10
+ """
11
+ Find percentile rank of value v
12
+ :param x: values array
13
+ :param v: vakue to be ranked
14
+ :param pctls: percentiles
15
+ :return: rank
16
+
17
+ >>> percentile_rank(np.random.randn(1000), 1.69)
18
+ >>> 95
19
+ >>> percentile_rank(np.random.randn(1000), 1.69, [10,50,100])
20
+ >>> 2
21
+ """
22
+ return np.argmax(np.sign(np.append(np.percentile(x, pctls), np.inf) - v))
23
+
24
+
25
+ def compare_to_norm(xs, xranges=None):
26
+ """
27
+ Compare distribution from xs against normal using estimated mean and std
28
+ """
29
+ import matplotlib.pyplot as plt
30
+ import scipy.stats as stats
31
+ import seaborn as sns
32
+
33
+ _m, _s = np.mean(xs), np.std(xs)
34
+ fit = stats.norm.pdf(sorted(xs), _m, _s)
35
+
36
+ sbp(12, 1)
37
+ plt.plot(sorted(xs), fit, "r--", lw=2, label="N(%.2f, %.2f)" % (_m, _s))
38
+ plt.legend(loc="upper right")
39
+
40
+ sns.kdeplot(xs, color="g", label="Data", fill=True)
41
+ if xranges is not None and len(xranges) > 1:
42
+ plt.xlim(xranges)
43
+ plt.legend(loc="upper right")
44
+
45
+ sbp(12, 2)
46
+ stats.probplot(xs, dist="norm", sparams=(_m, _s), plot=plt)
47
+
48
+
49
+ def kde(array, cut_down=True, bw_method="scott"):
50
+ """
51
+ Kernel dense estimation
52
+ """
53
+ from scipy.stats import gaussian_kde
54
+
55
+ if cut_down:
56
+ bins, counts = np.unique(array, return_counts=True)
57
+ f_mean = counts.mean()
58
+ f_above_mean = bins[counts > f_mean]
59
+ if len(f_above_mean) > 0:
60
+ bounds = [f_above_mean.min(), f_above_mean.max()]
61
+ array = array[np.bitwise_and(bounds[0] < array, array < bounds[1])]
62
+
63
+ return gaussian_kde(array, bw_method=bw_method)
64
+
65
+
66
+ def hurst(series: np.ndarray, max_lag: int = 20) -> float:
67
+ """
68
+ Calculate the Hurst exponent to determine the long-term memory of a time series.
69
+
70
+ The Hurst exponent (H) is a measure that helps identify:
71
+ - Random Walk (H ≈ 0.5): Each step is independent of past values
72
+ - Trending/Persistent (H > 0.5): Positive values tend to be followed by positive values
73
+ - Mean Reverting/Anti-persistent (H < 0.5): Positive values tend to be followed by negative values
74
+
75
+ The calculation uses the relationship between the range of the data and the time lag,
76
+ specifically examining how the variance of price differences scales with increasing lags.
77
+
78
+ Parameters
79
+ ----------
80
+ series : np.ndarray
81
+ Input time series data (typically price or returns)
82
+ max_lag : int, optional
83
+ Maximum lag to consider in calculation, by default 20
84
+
85
+ Returns
86
+ -------
87
+ float
88
+ Hurst exponent value between 0 and 1
89
+
90
+ Notes
91
+ -----
92
+ - Values very close to 0 or 1 may indicate issues with the data
93
+ - Requires sufficient data points for reliable estimation
94
+ - Implementation uses variance scaling method
95
+ """
96
+ tau, lagvec = [], []
97
+
98
+ # Step through the different lags
99
+ for lag in range(2, max_lag):
100
+ # Produce price different with lag
101
+ pp = np.subtract(series[lag:], series[:-lag])
102
+
103
+ # Write the different lags into a vector
104
+ lagvec.append(lag)
105
+
106
+ # Calculate the variance of the difference
107
+ tau.append(np.sqrt(np.std(pp)))
108
+
109
+ # Linear fit to a double-log graph to get power
110
+ m = np.polyfit(np.log10(lagvec), np.log10(tau), 1)
111
+
112
+ # Calculate hurst
113
+ return m[0] * 2
114
+
115
+
116
+ def half_life(price: pd.Series) -> int:
117
+ """
118
+ Half-life is the period of time it takes for the price to revert back to the mean.
119
+ """
120
+ xs_lag = price.shift(1).bfill()
121
+ xs_ret = price.diff().bfill()
122
+ res = sm.OLS(xs_ret, sm.add_constant(xs_lag)).fit()
123
+ return int(-np.log(2) / res.params.iloc[1])
124
+
125
+
126
+ def cointegration_test(p1: pd.Series, p2: pd.Series, alpha: float = 0.05) -> tuple[bool, float]:
127
+ p1, p2 = p1.dropna().align(p2.dropna(), join="inner")
128
+ _, pvalue, _ = coint(p1, p2)
129
+ return bool(pvalue < alpha), float(pvalue)
@@ -0,0 +1,23 @@
1
+ __all__ = [
2
+ "srows",
3
+ "scols",
4
+ "continuous_periods",
5
+ "ohlc_resample",
6
+ "retain_columns_and_join",
7
+ "select_column_and_join",
8
+ "dict_to_frame",
9
+ "drop_duplicated_indexes",
10
+ "process_duplicated_indexes",
11
+ ]
12
+
13
+ from .utils import (
14
+ continuous_periods,
15
+ dict_to_frame,
16
+ drop_duplicated_indexes,
17
+ ohlc_resample,
18
+ process_duplicated_indexes,
19
+ retain_columns_and_join,
20
+ scols,
21
+ select_column_and_join,
22
+ srows,
23
+ )