Qubx 0.5.7__cp312-cp312-manylinux_2_39_x86_64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of Qubx might be problematic. Click here for more details.
- qubx/__init__.py +207 -0
- qubx/_nb_magic.py +100 -0
- qubx/backtester/__init__.py +5 -0
- qubx/backtester/account.py +145 -0
- qubx/backtester/broker.py +87 -0
- qubx/backtester/data.py +296 -0
- qubx/backtester/management.py +378 -0
- qubx/backtester/ome.py +296 -0
- qubx/backtester/optimization.py +201 -0
- qubx/backtester/simulated_data.py +558 -0
- qubx/backtester/simulator.py +362 -0
- qubx/backtester/utils.py +780 -0
- qubx/cli/__init__.py +0 -0
- qubx/cli/commands.py +67 -0
- qubx/connectors/ccxt/__init__.py +0 -0
- qubx/connectors/ccxt/account.py +495 -0
- qubx/connectors/ccxt/broker.py +132 -0
- qubx/connectors/ccxt/customizations.py +193 -0
- qubx/connectors/ccxt/data.py +612 -0
- qubx/connectors/ccxt/exceptions.py +17 -0
- qubx/connectors/ccxt/factory.py +93 -0
- qubx/connectors/ccxt/utils.py +307 -0
- qubx/core/__init__.py +0 -0
- qubx/core/account.py +251 -0
- qubx/core/basics.py +850 -0
- qubx/core/context.py +420 -0
- qubx/core/exceptions.py +38 -0
- qubx/core/helpers.py +480 -0
- qubx/core/interfaces.py +1150 -0
- qubx/core/loggers.py +514 -0
- qubx/core/lookups.py +475 -0
- qubx/core/metrics.py +1512 -0
- qubx/core/mixins/__init__.py +13 -0
- qubx/core/mixins/market.py +94 -0
- qubx/core/mixins/processing.py +428 -0
- qubx/core/mixins/subscription.py +203 -0
- qubx/core/mixins/trading.py +88 -0
- qubx/core/mixins/universe.py +270 -0
- qubx/core/series.cpython-312-x86_64-linux-gnu.so +0 -0
- qubx/core/series.pxd +125 -0
- qubx/core/series.pyi +118 -0
- qubx/core/series.pyx +988 -0
- qubx/core/utils.cpython-312-x86_64-linux-gnu.so +0 -0
- qubx/core/utils.pyi +6 -0
- qubx/core/utils.pyx +62 -0
- qubx/data/__init__.py +25 -0
- qubx/data/helpers.py +416 -0
- qubx/data/readers.py +1562 -0
- qubx/data/tardis.py +100 -0
- qubx/gathering/simplest.py +88 -0
- qubx/math/__init__.py +3 -0
- qubx/math/stats.py +129 -0
- qubx/pandaz/__init__.py +23 -0
- qubx/pandaz/ta.py +2757 -0
- qubx/pandaz/utils.py +638 -0
- qubx/resources/instruments/symbols-binance.cm.json +1 -0
- qubx/resources/instruments/symbols-binance.json +1 -0
- qubx/resources/instruments/symbols-binance.um.json +1 -0
- qubx/resources/instruments/symbols-bitfinex.f.json +1 -0
- qubx/resources/instruments/symbols-bitfinex.json +1 -0
- qubx/resources/instruments/symbols-kraken.f.json +1 -0
- qubx/resources/instruments/symbols-kraken.json +1 -0
- qubx/ta/__init__.py +0 -0
- qubx/ta/indicators.cpython-312-x86_64-linux-gnu.so +0 -0
- qubx/ta/indicators.pxd +149 -0
- qubx/ta/indicators.pyi +41 -0
- qubx/ta/indicators.pyx +787 -0
- qubx/trackers/__init__.py +3 -0
- qubx/trackers/abvanced.py +236 -0
- qubx/trackers/composite.py +146 -0
- qubx/trackers/rebalancers.py +129 -0
- qubx/trackers/riskctrl.py +641 -0
- qubx/trackers/sizers.py +235 -0
- qubx/utils/__init__.py +5 -0
- qubx/utils/_pyxreloader.py +281 -0
- qubx/utils/charting/lookinglass.py +1057 -0
- qubx/utils/charting/mpl_helpers.py +1183 -0
- qubx/utils/marketdata/binance.py +284 -0
- qubx/utils/marketdata/ccxt.py +90 -0
- qubx/utils/marketdata/dukas.py +130 -0
- qubx/utils/misc.py +541 -0
- qubx/utils/ntp.py +63 -0
- qubx/utils/numbers_utils.py +7 -0
- qubx/utils/orderbook.py +491 -0
- qubx/utils/plotting/__init__.py +0 -0
- qubx/utils/plotting/dashboard.py +150 -0
- qubx/utils/plotting/data.py +137 -0
- qubx/utils/plotting/interfaces.py +25 -0
- qubx/utils/plotting/renderers/__init__.py +0 -0
- qubx/utils/plotting/renderers/plotly.py +0 -0
- qubx/utils/runner/__init__.py +1 -0
- qubx/utils/runner/_jupyter_runner.pyt +60 -0
- qubx/utils/runner/accounts.py +88 -0
- qubx/utils/runner/configs.py +65 -0
- qubx/utils/runner/runner.py +470 -0
- qubx/utils/time.py +312 -0
- qubx-0.5.7.dist-info/METADATA +105 -0
- qubx-0.5.7.dist-info/RECORD +100 -0
- qubx-0.5.7.dist-info/WHEEL +4 -0
- qubx-0.5.7.dist-info/entry_points.txt +3 -0
qubx/utils/misc.py
ADDED
|
@@ -0,0 +1,541 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import concurrent.futures
|
|
3
|
+
import getpass
|
|
4
|
+
import hashlib
|
|
5
|
+
import os
|
|
6
|
+
import re
|
|
7
|
+
import string
|
|
8
|
+
import sys
|
|
9
|
+
import time
|
|
10
|
+
from collections import OrderedDict, defaultdict, deque, namedtuple
|
|
11
|
+
from collections.abc import Callable
|
|
12
|
+
from functools import wraps
|
|
13
|
+
from os.path import abspath, exists, expanduser, relpath
|
|
14
|
+
from pathlib import Path
|
|
15
|
+
from threading import Lock
|
|
16
|
+
from typing import Any, Awaitable, Union
|
|
17
|
+
|
|
18
|
+
import joblib
|
|
19
|
+
import numpy as np
|
|
20
|
+
import pandas as pd
|
|
21
|
+
from tqdm.auto import tqdm
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def version() -> str:
|
|
25
|
+
# - check current version
|
|
26
|
+
version = "Dev"
|
|
27
|
+
try:
|
|
28
|
+
import importlib_metadata
|
|
29
|
+
|
|
30
|
+
version = importlib_metadata.version("qubx")
|
|
31
|
+
except: # noqa: E722
|
|
32
|
+
pass
|
|
33
|
+
|
|
34
|
+
return version
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def install_pyx_recompiler_for_dev():
|
|
38
|
+
from ._pyxreloader import pyx_install_loader
|
|
39
|
+
|
|
40
|
+
# if version().lower() == 'dev':
|
|
41
|
+
print(f" > [{green('dev')}] {red('installing cython rebuilding hook')}")
|
|
42
|
+
pyx_install_loader(["qubx.core", "qubx.ta", "qubx.data", "qubx.strategies"])
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def runtime_env():
|
|
46
|
+
"""
|
|
47
|
+
Check what environment this script is being run under
|
|
48
|
+
:return: environment name, possible values:
|
|
49
|
+
- 'notebook' jupyter notebook
|
|
50
|
+
- 'shell' any interactive shell (ipython, PyCharm's console etc)
|
|
51
|
+
- 'python' standard python interpreter
|
|
52
|
+
- 'unknown' can't recognize environment
|
|
53
|
+
"""
|
|
54
|
+
try:
|
|
55
|
+
from IPython.core.getipython import get_ipython
|
|
56
|
+
|
|
57
|
+
shell = get_ipython().__class__.__name__
|
|
58
|
+
|
|
59
|
+
if shell == "ZMQInteractiveShell": # Jupyter notebook or qtconsole
|
|
60
|
+
return "notebook"
|
|
61
|
+
elif shell.endswith("TerminalInteractiveShell"): # Terminal running IPython
|
|
62
|
+
return "shell"
|
|
63
|
+
else:
|
|
64
|
+
return "unknown" # Other type (?)
|
|
65
|
+
except (NameError, ImportError):
|
|
66
|
+
return "python" # Probably standard Python interpreter
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
_QUBX_FLDR = None
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def get_local_qubx_folder() -> str:
|
|
73
|
+
global _QUBX_FLDR
|
|
74
|
+
|
|
75
|
+
if _QUBX_FLDR is None:
|
|
76
|
+
_QUBX_FLDR = makedirs(os.getenv("QUBXSTORAGE", os.path.expanduser("~/.qubx")))
|
|
77
|
+
|
|
78
|
+
return _QUBX_FLDR
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
def get_current_user() -> str:
|
|
82
|
+
"""
|
|
83
|
+
Get current user's username.
|
|
84
|
+
"""
|
|
85
|
+
return getpass.getuser()
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
def this_project_root(path: str = ".") -> Path | None:
|
|
89
|
+
"""
|
|
90
|
+
Tries to find current research project root.
|
|
91
|
+
This is convenient when need to get relative paths in notebook in research project.
|
|
92
|
+
"""
|
|
93
|
+
_toml = Path("pyproject.toml")
|
|
94
|
+
_x = Path(abspath(expanduser(path)))
|
|
95
|
+
_terminator = str(_x.root)
|
|
96
|
+
while str(_x) != _terminator:
|
|
97
|
+
if (_x / _toml).exists():
|
|
98
|
+
return _x
|
|
99
|
+
_x = _x.parent
|
|
100
|
+
return None
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
def add_project_to_system_path(project_folder: str = "~/projects"):
|
|
104
|
+
"""
|
|
105
|
+
Add path to projects folder to system python path to be able importing any modules from project
|
|
106
|
+
from test.Models.handy_utils import some_module
|
|
107
|
+
"""
|
|
108
|
+
# we want to track folders with these files as separate paths
|
|
109
|
+
toml = Path("pyproject.toml")
|
|
110
|
+
src = Path("src")
|
|
111
|
+
|
|
112
|
+
try:
|
|
113
|
+
prj = Path(relpath(expanduser(project_folder)))
|
|
114
|
+
except ValueError as e:
|
|
115
|
+
# This error can occur on Windows if user folder and python file are on different drives
|
|
116
|
+
print(f"Qubx> Error during get path to projects folder:\n{e}")
|
|
117
|
+
else:
|
|
118
|
+
insert_path_iff = lambda p: (sys.path.insert(0, p.as_posix()) if p.as_posix() not in sys.path else None) # noqa: E731
|
|
119
|
+
if prj.exists():
|
|
120
|
+
insert_path_iff(prj)
|
|
121
|
+
|
|
122
|
+
for di in prj.iterdir():
|
|
123
|
+
_src = di / src
|
|
124
|
+
if (di / toml).exists():
|
|
125
|
+
# when we have src/
|
|
126
|
+
if _src.exists() and _src.is_dir():
|
|
127
|
+
insert_path_iff(_src)
|
|
128
|
+
else:
|
|
129
|
+
insert_path_iff(di)
|
|
130
|
+
else:
|
|
131
|
+
print(f"Qubx> Cant find {project_folder} folder for adding to python path !")
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
def class_import(name: str):
|
|
135
|
+
"""
|
|
136
|
+
Import class by its name.
|
|
137
|
+
|
|
138
|
+
For example:
|
|
139
|
+
>>> class_import("qubx.core.data.DataProvider")
|
|
140
|
+
<class 'qubx.core.data.DataProvider'>
|
|
141
|
+
"""
|
|
142
|
+
components = name.split(".")
|
|
143
|
+
clz = components[-1]
|
|
144
|
+
mod = __import__(".".join(components[:-1]), fromlist=[clz])
|
|
145
|
+
mod = getattr(mod, clz)
|
|
146
|
+
return mod
|
|
147
|
+
|
|
148
|
+
|
|
149
|
+
def is_localhost(host):
|
|
150
|
+
return host.lower() == "localhost" or host == "127.0.0.1"
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
def __wrap_with_color(code):
|
|
154
|
+
def inner(text, bold=False):
|
|
155
|
+
c = code
|
|
156
|
+
if bold:
|
|
157
|
+
c = "1;%s" % c
|
|
158
|
+
return "\033[%sm%s\033[0m" % (c, text)
|
|
159
|
+
|
|
160
|
+
return inner
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
red, green, yellow, blue, magenta, cyan, white = (
|
|
164
|
+
__wrap_with_color("31"),
|
|
165
|
+
__wrap_with_color("32"),
|
|
166
|
+
__wrap_with_color("33"),
|
|
167
|
+
__wrap_with_color("34"),
|
|
168
|
+
__wrap_with_color("35"),
|
|
169
|
+
__wrap_with_color("36"),
|
|
170
|
+
__wrap_with_color("37"),
|
|
171
|
+
)
|
|
172
|
+
|
|
173
|
+
|
|
174
|
+
def logo():
|
|
175
|
+
"""
|
|
176
|
+
Some fancy Qubx logo
|
|
177
|
+
"""
|
|
178
|
+
print(
|
|
179
|
+
f"""
|
|
180
|
+
⠀⠀⡰⡖⠒⠒⢒⢦⠀⠀
|
|
181
|
+
⠀⢠⠃⠈⢆⣀⣎⣀⣱⡀ {red("QUBX")} | {cyan("Quantitative Backtesting Environment")}
|
|
182
|
+
⠀⢳⠒⠒⡞⠚⡄⠀⡰⠁ (c) 2025, ver. {magenta(version().rstrip())}
|
|
183
|
+
⠀⠀⠱⣜⣀⣀⣈⣦⠃⠀⠀⠀
|
|
184
|
+
"""
|
|
185
|
+
)
|
|
186
|
+
|
|
187
|
+
|
|
188
|
+
class Struct:
|
|
189
|
+
"""
|
|
190
|
+
Dynamic structure (similar to matlab's struct it allows to add new properties dynamically)
|
|
191
|
+
|
|
192
|
+
>>> a = Struct(x=1, y=2)
|
|
193
|
+
>>> a.z = 'Hello'
|
|
194
|
+
>>> print(a)
|
|
195
|
+
|
|
196
|
+
Struct(x=1, y=2, z='Hello')
|
|
197
|
+
|
|
198
|
+
>>> Struct(a=234, b=Struct(c=222)).to_dict()
|
|
199
|
+
|
|
200
|
+
{'a': 234, 'b': {'c': 222}}
|
|
201
|
+
|
|
202
|
+
>>> Struct({'a': 555}, a=123, b=Struct(c=222)).to_dict()
|
|
203
|
+
|
|
204
|
+
{'a': 123, 'b': {'c': 222}}
|
|
205
|
+
"""
|
|
206
|
+
|
|
207
|
+
def __init__(self, *args, **kwargs):
|
|
208
|
+
_odw = OrderedDict(**kwargs)
|
|
209
|
+
if args:
|
|
210
|
+
if isinstance(args[0], dict):
|
|
211
|
+
_odw = OrderedDict(Struct.dict2struct(args[0]).to_dict()) | _odw
|
|
212
|
+
elif isinstance(args[0], Struct):
|
|
213
|
+
_odw = args[0].to_dict() | _odw
|
|
214
|
+
self.__initialize(_odw.keys(), _odw.values())
|
|
215
|
+
|
|
216
|
+
def __initialize(self, fields, values):
|
|
217
|
+
self._fields = list(fields)
|
|
218
|
+
self._meta = namedtuple("Struct", " ".join(fields))
|
|
219
|
+
self._inst = self._meta(*values)
|
|
220
|
+
|
|
221
|
+
def fields(self) -> list:
|
|
222
|
+
return self._fields
|
|
223
|
+
|
|
224
|
+
def __getitem__(self, idx: int):
|
|
225
|
+
return getattr(self._inst, self._fields[idx])
|
|
226
|
+
|
|
227
|
+
def __getattr__(self, k):
|
|
228
|
+
return getattr(self._inst, k)
|
|
229
|
+
|
|
230
|
+
def __or__(self, other: Union[dict, "Struct"]):
|
|
231
|
+
if isinstance(other, dict):
|
|
232
|
+
other = Struct.dict2struct(other)
|
|
233
|
+
elif not isinstance(other, Struct):
|
|
234
|
+
raise ValueError(f"Can't union with object of {type(other)} type ")
|
|
235
|
+
for f in other.fields():
|
|
236
|
+
self.__setattr__(f, other.__getattr__(f))
|
|
237
|
+
return self
|
|
238
|
+
|
|
239
|
+
def __dir__(self):
|
|
240
|
+
return self._fields
|
|
241
|
+
|
|
242
|
+
def __repr__(self):
|
|
243
|
+
return self._inst.__repr__()
|
|
244
|
+
|
|
245
|
+
def __setattr__(self, k, v):
|
|
246
|
+
if k not in ["_inst", "_meta", "_fields"]:
|
|
247
|
+
new_vals = {**self._inst._asdict(), **{k: v}}
|
|
248
|
+
self.__initialize(new_vals.keys(), new_vals.values())
|
|
249
|
+
else:
|
|
250
|
+
super().__setattr__(k, v)
|
|
251
|
+
|
|
252
|
+
def __getstate__(self):
|
|
253
|
+
return self._inst._asdict()
|
|
254
|
+
|
|
255
|
+
def __setstate__(self, state):
|
|
256
|
+
self.__init__(**state)
|
|
257
|
+
|
|
258
|
+
def __ms2d(self, m) -> dict:
|
|
259
|
+
r = {}
|
|
260
|
+
for f in m._fields:
|
|
261
|
+
v = m.__getattr__(f)
|
|
262
|
+
r[f] = self.__ms2d(v) if isinstance(v, Struct) else v
|
|
263
|
+
return r
|
|
264
|
+
|
|
265
|
+
def to_dict(self) -> dict:
|
|
266
|
+
"""
|
|
267
|
+
Return this structure as dictionary
|
|
268
|
+
"""
|
|
269
|
+
return self.__ms2d(self)
|
|
270
|
+
|
|
271
|
+
def copy(self) -> "Struct":
|
|
272
|
+
"""
|
|
273
|
+
Returns copy of this structure
|
|
274
|
+
"""
|
|
275
|
+
return Struct(self.to_dict())
|
|
276
|
+
|
|
277
|
+
@staticmethod
|
|
278
|
+
def dict2struct(d: dict) -> "Struct":
|
|
279
|
+
"""
|
|
280
|
+
Convert dictionary to structure
|
|
281
|
+
>>> s = dict2struct({'f_1_0': 1, 'z': {'x': 1, 'y': 2}})
|
|
282
|
+
>>> print(s.z.x)
|
|
283
|
+
1
|
|
284
|
+
"""
|
|
285
|
+
m = Struct()
|
|
286
|
+
for k, v in d.items():
|
|
287
|
+
# skip if key is not valid identifier
|
|
288
|
+
if not k.isidentifier():
|
|
289
|
+
print(f"Struct> {k} doesn't look like as identifier - skip it")
|
|
290
|
+
continue
|
|
291
|
+
if isinstance(v, dict):
|
|
292
|
+
v = Struct.dict2struct(v)
|
|
293
|
+
m.__setattr__(k, v)
|
|
294
|
+
return m
|
|
295
|
+
|
|
296
|
+
|
|
297
|
+
def makedirs(path: str, *args) -> str:
|
|
298
|
+
path = os.path.expanduser(os.path.join(*[path, *args]))
|
|
299
|
+
if not exists(path):
|
|
300
|
+
os.makedirs(path, exist_ok=True)
|
|
301
|
+
return path
|
|
302
|
+
|
|
303
|
+
|
|
304
|
+
class Stopwatch:
|
|
305
|
+
"""
|
|
306
|
+
Stopwatch timer for performance
|
|
307
|
+
"""
|
|
308
|
+
|
|
309
|
+
starts: dict[str | None, int] = {}
|
|
310
|
+
counts: dict[str | None, int] = defaultdict(lambda: 0)
|
|
311
|
+
latencies: dict[str | None, int] = {}
|
|
312
|
+
_current_scope: str | None = None
|
|
313
|
+
|
|
314
|
+
def __new__(cls):
|
|
315
|
+
if not hasattr(cls, "instance"):
|
|
316
|
+
cls.instance = super(Stopwatch, cls).__new__(cls)
|
|
317
|
+
return cls.instance
|
|
318
|
+
|
|
319
|
+
def start(self, scope: str | None):
|
|
320
|
+
self.starts[scope] = time.perf_counter_ns()
|
|
321
|
+
self.counts[scope] += 1
|
|
322
|
+
|
|
323
|
+
def stop(self, scope: str | None = None) -> int | None:
|
|
324
|
+
t = time.perf_counter_ns()
|
|
325
|
+
s = self.starts.get(scope, None)
|
|
326
|
+
lat = None
|
|
327
|
+
if s:
|
|
328
|
+
lat = t - s
|
|
329
|
+
n = self.counts[scope]
|
|
330
|
+
self.latencies[scope] = (self.latencies.get(scope, lat) * (n - 1) + lat) // n
|
|
331
|
+
del self.starts[scope]
|
|
332
|
+
return lat
|
|
333
|
+
|
|
334
|
+
def latency_sec(self, scope: str | None) -> float:
|
|
335
|
+
return self.latencies.get(scope, 0) / 1e9
|
|
336
|
+
|
|
337
|
+
def watch(self, scope="global"):
|
|
338
|
+
def _decorator(func):
|
|
339
|
+
info = scope + "." + func.__name__
|
|
340
|
+
|
|
341
|
+
def wrapper(*args, **kwargs):
|
|
342
|
+
self.start(info)
|
|
343
|
+
output = func(*args, **kwargs)
|
|
344
|
+
self.stop(info)
|
|
345
|
+
return output
|
|
346
|
+
|
|
347
|
+
return wrapper
|
|
348
|
+
|
|
349
|
+
return _decorator
|
|
350
|
+
|
|
351
|
+
def reset(self):
|
|
352
|
+
self.starts.clear()
|
|
353
|
+
self.counts.clear()
|
|
354
|
+
self.latencies.clear()
|
|
355
|
+
|
|
356
|
+
def __str__(self) -> str:
|
|
357
|
+
r = ""
|
|
358
|
+
for l in self.latencies.keys():
|
|
359
|
+
r += f"\n\t<w>{l}</w> took <r>{self.latency_sec(l):.7f}</r> secs"
|
|
360
|
+
return r
|
|
361
|
+
|
|
362
|
+
def __enter__(self):
|
|
363
|
+
self.start(self._current_scope)
|
|
364
|
+
return self
|
|
365
|
+
|
|
366
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
367
|
+
self.stop(self._current_scope)
|
|
368
|
+
|
|
369
|
+
def __call__(self, scope: str | None = "global"):
|
|
370
|
+
self._current_scope = scope
|
|
371
|
+
return self
|
|
372
|
+
|
|
373
|
+
@classmethod
|
|
374
|
+
def latency_report(cls) -> pd.DataFrame | None:
|
|
375
|
+
if not hasattr(cls, "instance"):
|
|
376
|
+
return None
|
|
377
|
+
sw = cls.instance
|
|
378
|
+
scope_to_latency_sec = {scope: sw.latency_sec(scope) for scope in sw.latencies.keys()}
|
|
379
|
+
scope_to_count = {l: sw.counts[l] for l in scope_to_latency_sec.keys()}
|
|
380
|
+
scope_to_total_time = {scope: scope_to_count[scope] * lat for scope, lat in scope_to_latency_sec.items()}
|
|
381
|
+
# create pandas datafrmae from dictionaries
|
|
382
|
+
lats = pd.DataFrame(
|
|
383
|
+
{
|
|
384
|
+
"scope": list(scope_to_latency_sec.keys()),
|
|
385
|
+
"latency": list(scope_to_latency_sec.values()),
|
|
386
|
+
"count": list(scope_to_count.values()),
|
|
387
|
+
"total_time": list(scope_to_total_time.values()),
|
|
388
|
+
}
|
|
389
|
+
)
|
|
390
|
+
lats["latency"] = lats["latency"].apply(lambda x: f"{x:.4f}")
|
|
391
|
+
lats["total_time (min)"] = lats["total_time"].apply(lambda x: f"{x / 60:.4f}")
|
|
392
|
+
lats.drop(columns=["total_time"], inplace=True)
|
|
393
|
+
return lats
|
|
394
|
+
|
|
395
|
+
|
|
396
|
+
def quotify(sx: str | list[str], quote="USDT"):
|
|
397
|
+
"""
|
|
398
|
+
Make XXX<quote> from anything if that anything doesn't end with <quote>
|
|
399
|
+
"""
|
|
400
|
+
if isinstance(sx, str):
|
|
401
|
+
return (sx if sx.endswith(quote) else sx + quote).upper()
|
|
402
|
+
elif isinstance(sx, (list, set, tuple)):
|
|
403
|
+
return [quotify(s, quote) for s in sx]
|
|
404
|
+
raise ValueError("Can't process input data !")
|
|
405
|
+
|
|
406
|
+
|
|
407
|
+
def dequotify(sx: str | list[str], quote="USDT"):
|
|
408
|
+
"""
|
|
409
|
+
Turns XXX<quote> to XXX (reverse of quotify)
|
|
410
|
+
"""
|
|
411
|
+
if isinstance(sx, str):
|
|
412
|
+
quote = quote.upper()
|
|
413
|
+
if (s := sx.upper()).endswith(quote):
|
|
414
|
+
s = s.split(":")[1] if ":" in s else s # remove exch: if presented
|
|
415
|
+
return s.split(quote)[0]
|
|
416
|
+
elif isinstance(sx, (list, set, tuple)):
|
|
417
|
+
return [dequotify(s, quote) for s in sx]
|
|
418
|
+
|
|
419
|
+
raise ValueError("Can't process input data !")
|
|
420
|
+
|
|
421
|
+
|
|
422
|
+
class ProgressParallel(joblib.Parallel):
|
|
423
|
+
def __init__(self, *args, **kwargs):
|
|
424
|
+
self.total = kwargs.pop("total", None)
|
|
425
|
+
self.silent = kwargs.pop("silent", False)
|
|
426
|
+
super().__init__(*args, **kwargs)
|
|
427
|
+
|
|
428
|
+
def __call__(self, *args, **kwargs):
|
|
429
|
+
if self.silent:
|
|
430
|
+
return joblib.Parallel.__call__(self, *args, **kwargs)
|
|
431
|
+
with tqdm(total=self.total) as self._pbar:
|
|
432
|
+
return joblib.Parallel.__call__(self, *args, **kwargs)
|
|
433
|
+
|
|
434
|
+
def print_progress(self):
|
|
435
|
+
if self.silent:
|
|
436
|
+
return
|
|
437
|
+
self._pbar.n = self.n_completed_tasks
|
|
438
|
+
self._pbar.refresh()
|
|
439
|
+
|
|
440
|
+
|
|
441
|
+
class AsyncThreadLoop:
|
|
442
|
+
"""
|
|
443
|
+
Helper class to submit coroutines to asyncio loop from separate thread.
|
|
444
|
+
"""
|
|
445
|
+
|
|
446
|
+
def __init__(self, loop: asyncio.AbstractEventLoop):
|
|
447
|
+
self.loop = loop
|
|
448
|
+
|
|
449
|
+
def submit(self, coro: Awaitable) -> concurrent.futures.Future:
|
|
450
|
+
return asyncio.run_coroutine_threadsafe(coro, self.loop)
|
|
451
|
+
|
|
452
|
+
|
|
453
|
+
def synchronized(func: Callable):
|
|
454
|
+
"""Decorator that ensures only one thread can execute the decorated function at a time."""
|
|
455
|
+
lock = Lock()
|
|
456
|
+
|
|
457
|
+
@wraps(func)
|
|
458
|
+
def wrapper(*args, **kwargs):
|
|
459
|
+
with lock:
|
|
460
|
+
return func(*args, **kwargs)
|
|
461
|
+
|
|
462
|
+
return wrapper
|
|
463
|
+
|
|
464
|
+
|
|
465
|
+
class TimeLimitedDeque(deque):
|
|
466
|
+
"""
|
|
467
|
+
A deque that removes elements older than a given time limit.
|
|
468
|
+
Assumes that elements are inserted in increasing order of time.
|
|
469
|
+
"""
|
|
470
|
+
|
|
471
|
+
def __init__(self, time_limit: str, time_key=lambda x: x[0], unit="ns", *args, **kwargs):
|
|
472
|
+
super().__init__(*args, **kwargs)
|
|
473
|
+
self.time_limit = pd.Timedelta(time_limit).to_timedelta64()
|
|
474
|
+
self.unit = unit
|
|
475
|
+
self.time_key = lambda x: self._to_datetime64(time_key(x))
|
|
476
|
+
|
|
477
|
+
def append(self, item):
|
|
478
|
+
super().append(item)
|
|
479
|
+
self._remove_old_elements()
|
|
480
|
+
|
|
481
|
+
def __getitem__(self, idx) -> list[Any]:
|
|
482
|
+
if isinstance(idx, slice) and (isinstance(idx.start, str) or isinstance(idx.stop, str)):
|
|
483
|
+
start_loc, end_loc = 0, len(self)
|
|
484
|
+
if idx.start is not None:
|
|
485
|
+
start = self._to_datetime64(idx.start)
|
|
486
|
+
while start_loc < len(self) and self.time_key(self[start_loc]) < start:
|
|
487
|
+
start_loc += 1
|
|
488
|
+
if idx.stop is not None:
|
|
489
|
+
stop = self._to_datetime64(idx.stop)
|
|
490
|
+
while end_loc > 0 and self.time_key(self[end_loc - 1]) > stop:
|
|
491
|
+
end_loc -= 1
|
|
492
|
+
return list(self)[start_loc:end_loc]
|
|
493
|
+
else:
|
|
494
|
+
return super().__getitem__(idx)
|
|
495
|
+
|
|
496
|
+
def appendleft(self, item):
|
|
497
|
+
raise NotImplementedError("appendleft is not supported for TimeLimitedDeque")
|
|
498
|
+
|
|
499
|
+
def extendleft(self, items):
|
|
500
|
+
raise NotImplementedError("extendleft is not supported for TimeLimitedDeque")
|
|
501
|
+
|
|
502
|
+
def _remove_old_elements(self):
|
|
503
|
+
if not self:
|
|
504
|
+
return
|
|
505
|
+
current_time = self.time_key(self[-1])
|
|
506
|
+
while self and (current_time - self.time_key(self[0])) > self.time_limit:
|
|
507
|
+
self.popleft()
|
|
508
|
+
|
|
509
|
+
def _to_datetime64(self, time):
|
|
510
|
+
return np.datetime64(time, self.unit)
|
|
511
|
+
|
|
512
|
+
|
|
513
|
+
__VOWS = "aeiou"
|
|
514
|
+
__CONS = "".join(sorted(set(string.ascii_lowercase) - set(__VOWS)))
|
|
515
|
+
|
|
516
|
+
|
|
517
|
+
def generate_name(content: Any, n1, ns=0) -> str:
|
|
518
|
+
"""
|
|
519
|
+
Generates short unique name for given content.
|
|
520
|
+
|
|
521
|
+
>>> print(generate_name("Qubix Trading Platform, (c) 2025", 8))
|
|
522
|
+
>>> 'Pojituke'
|
|
523
|
+
"""
|
|
524
|
+
__NV, __NC = len(__VOWS), len(__CONS)
|
|
525
|
+
hdg = hashlib.sha256(str(content).encode("utf-8")).hexdigest().upper()
|
|
526
|
+
w = ""
|
|
527
|
+
for i, x in enumerate(hdg[ns : n1 + ns]):
|
|
528
|
+
if i % 2 == 0:
|
|
529
|
+
w += __CONS[int(x, 16) % __NC]
|
|
530
|
+
else:
|
|
531
|
+
w += __VOWS[int(x, 16) % __NV]
|
|
532
|
+
return w[0].upper() + w[1:]
|
|
533
|
+
|
|
534
|
+
|
|
535
|
+
def string_shortener(s: str) -> str:
|
|
536
|
+
"""
|
|
537
|
+
Removes all vovels and squeeze repeating symbols
|
|
538
|
+
>>> print(string_shortener("QubxAssetManager"))
|
|
539
|
+
>>> 'QbxAstMngr'
|
|
540
|
+
"""
|
|
541
|
+
return re.sub(r"(.)\1+", r"\1", re.sub(r"[aeiou]", "", s))
|
qubx/utils/ntp.py
ADDED
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
import threading
|
|
2
|
+
import time
|
|
3
|
+
from datetime import datetime, timedelta
|
|
4
|
+
from time import sleep
|
|
5
|
+
|
|
6
|
+
import ntplib
|
|
7
|
+
import numpy as np
|
|
8
|
+
|
|
9
|
+
from qubx import logger
|
|
10
|
+
|
|
11
|
+
NTP_SERVERS_LIST = ["time.windows.com", "pool.ntp.org", "europe.pool.ntp.org", "time.google.com"]
|
|
12
|
+
|
|
13
|
+
__CORRECT_INTERVAL = timedelta(seconds=30)
|
|
14
|
+
__SLEEP_CORRECT_THREAD = 10
|
|
15
|
+
|
|
16
|
+
_offset = None # never use it explicitly but for tests! Always use get_offset()
|
|
17
|
+
_controlling_thread = None
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def __correct_offset():
|
|
21
|
+
global _offset
|
|
22
|
+
ntp_client = ntplib.NTPClient()
|
|
23
|
+
for ntp_url in NTP_SERVERS_LIST:
|
|
24
|
+
try:
|
|
25
|
+
response = ntp_client.request(ntp_url)
|
|
26
|
+
_offset = response.offset
|
|
27
|
+
return
|
|
28
|
+
except Exception as e:
|
|
29
|
+
logger.warning(f"{ntp_url} NTP server request exception: {e}")
|
|
30
|
+
logger.error(f"Unable to get ntp offset from neither of NTP servers list {NTP_SERVERS_LIST}")
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def __correct_offset_runnable():
|
|
34
|
+
logger.debug("NTP offset controller thread is started")
|
|
35
|
+
last_corrected_dt = None
|
|
36
|
+
while True:
|
|
37
|
+
# do correction every specified interval
|
|
38
|
+
if last_corrected_dt is None or datetime.now() - last_corrected_dt > __CORRECT_INTERVAL:
|
|
39
|
+
__correct_offset()
|
|
40
|
+
last_corrected_dt = datetime.now()
|
|
41
|
+
sleep(__SLEEP_CORRECT_THREAD)
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def start_ntp_thread():
|
|
45
|
+
global _controlling_thread
|
|
46
|
+
if _controlling_thread is not None:
|
|
47
|
+
return
|
|
48
|
+
_controlling_thread = threading.Thread(target=__correct_offset_runnable, daemon=True)
|
|
49
|
+
_controlling_thread.start()
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def time_now() -> np.datetime64:
|
|
53
|
+
return np.datetime64(int((time.time() + get_offset()) * 1_000_000_000), "ns")
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
def get_offset():
|
|
57
|
+
global _offset
|
|
58
|
+
if _offset is None:
|
|
59
|
+
__correct_offset()
|
|
60
|
+
if _offset is None: # if something really went wrong
|
|
61
|
+
logger.warning("Unable to get ntp offset value. Very unexpected!")
|
|
62
|
+
_offset = 0.0
|
|
63
|
+
return _offset
|