Qubx 0.1.83__tar.gz → 0.1.84__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of Qubx might be problematic. Click here for more details.

Files changed (37) hide show
  1. {qubx-0.1.83 → qubx-0.1.84}/PKG-INFO +1 -1
  2. {qubx-0.1.83 → qubx-0.1.84}/pyproject.toml +1 -1
  3. {qubx-0.1.83 → qubx-0.1.84}/src/qubx/__init__.py +62 -32
  4. {qubx-0.1.83 → qubx-0.1.84}/src/qubx/data/readers.py +71 -60
  5. {qubx-0.1.83 → qubx-0.1.84}/src/qubx/utils/misc.py +70 -60
  6. {qubx-0.1.83 → qubx-0.1.84}/README.md +0 -0
  7. {qubx-0.1.83 → qubx-0.1.84}/build.py +0 -0
  8. {qubx-0.1.83 → qubx-0.1.84}/src/qubx/_nb_magic.py +0 -0
  9. {qubx-0.1.83 → qubx-0.1.84}/src/qubx/core/__init__.py +0 -0
  10. {qubx-0.1.83 → qubx-0.1.84}/src/qubx/core/account.py +0 -0
  11. {qubx-0.1.83 → qubx-0.1.84}/src/qubx/core/basics.py +0 -0
  12. {qubx-0.1.83 → qubx-0.1.84}/src/qubx/core/helpers.py +0 -0
  13. {qubx-0.1.83 → qubx-0.1.84}/src/qubx/core/loggers.py +0 -0
  14. {qubx-0.1.83 → qubx-0.1.84}/src/qubx/core/lookups.py +0 -0
  15. {qubx-0.1.83 → qubx-0.1.84}/src/qubx/core/series.pxd +0 -0
  16. {qubx-0.1.83 → qubx-0.1.84}/src/qubx/core/series.pyx +0 -0
  17. {qubx-0.1.83 → qubx-0.1.84}/src/qubx/core/strategy.py +0 -0
  18. {qubx-0.1.83 → qubx-0.1.84}/src/qubx/core/utils.pyx +0 -0
  19. {qubx-0.1.83 → qubx-0.1.84}/src/qubx/impl/ccxt_connector.py +0 -0
  20. {qubx-0.1.83 → qubx-0.1.84}/src/qubx/impl/ccxt_customizations.py +0 -0
  21. {qubx-0.1.83 → qubx-0.1.84}/src/qubx/impl/ccxt_trading.py +0 -0
  22. {qubx-0.1.83 → qubx-0.1.84}/src/qubx/impl/ccxt_utils.py +0 -0
  23. {qubx-0.1.83 → qubx-0.1.84}/src/qubx/math/__init__.py +0 -0
  24. {qubx-0.1.83 → qubx-0.1.84}/src/qubx/math/stats.py +0 -0
  25. {qubx-0.1.83 → qubx-0.1.84}/src/qubx/pandaz/__init__.py +0 -0
  26. {qubx-0.1.83 → qubx-0.1.84}/src/qubx/pandaz/ta.py +0 -0
  27. {qubx-0.1.83 → qubx-0.1.84}/src/qubx/pandaz/utils.py +0 -0
  28. {qubx-0.1.83 → qubx-0.1.84}/src/qubx/ta/__init__.py +0 -0
  29. {qubx-0.1.83 → qubx-0.1.84}/src/qubx/ta/indicators.pyx +0 -0
  30. {qubx-0.1.83 → qubx-0.1.84}/src/qubx/trackers/__init__.py +0 -0
  31. {qubx-0.1.83 → qubx-0.1.84}/src/qubx/trackers/rebalancers.py +0 -0
  32. {qubx-0.1.83 → qubx-0.1.84}/src/qubx/utils/__init__.py +0 -0
  33. {qubx-0.1.83 → qubx-0.1.84}/src/qubx/utils/_pyxreloader.py +0 -0
  34. {qubx-0.1.83 → qubx-0.1.84}/src/qubx/utils/charting/mpl_helpers.py +0 -0
  35. {qubx-0.1.83 → qubx-0.1.84}/src/qubx/utils/marketdata/binance.py +0 -0
  36. {qubx-0.1.83 → qubx-0.1.84}/src/qubx/utils/runner.py +0 -0
  37. {qubx-0.1.83 → qubx-0.1.84}/src/qubx/utils/time.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: Qubx
3
- Version: 0.1.83
3
+ Version: 0.1.84
4
4
  Summary: Qubx - quantitative trading framework
5
5
  Home-page: https://github.com/dmarienko/Qubx
6
6
  Author: Dmitry Marienko
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "Qubx"
3
- version = "0.1.83"
3
+ version = "0.1.84"
4
4
  description = "Qubx - quantitative trading framework"
5
5
  authors = ["Dmitry Marienko <dmitry@gmail.com>"]
6
6
  readme = "README.md"
@@ -10,13 +10,20 @@ def formatter(record):
10
10
  end = record["extra"].get("end", "\n")
11
11
  fmt = "<lvl>{message}</lvl>%s" % end
12
12
  if record["level"].name in {"WARNING", "SNAKY"}:
13
- fmt = "<cyan>{name}</cyan>:<cyan>{function}</cyan>:<cyan>{line}</cyan> - %s" % fmt
13
+ fmt = (
14
+ "<cyan>{name}</cyan>:<cyan>{function}</cyan>:<cyan>{line}</cyan> - %s" % fmt
15
+ )
14
16
 
15
- prefix = "<green>{time:YYYY-MM-DD HH:mm:ss.SSS}</green> [ <level>%s</level> ] " % record["level"].icon
17
+ prefix = (
18
+ "<green>{time:YYYY-MM-DD HH:mm:ss.SSS}</green> [ <level>%s</level> ] "
19
+ % record["level"].icon
20
+ )
16
21
 
17
22
  if record["exception"] is not None:
18
23
  # stackprinter.set_excepthook(style='darkbg2')
19
- record["extra"]["stack"] = stackprinter.format(record["exception"], style="darkbg")
24
+ record["extra"]["stack"] = stackprinter.format(
25
+ record["exception"], style="darkbg"
26
+ )
20
27
  fmt += "\n{extra[stack]}\n"
21
28
 
22
29
  if record["level"].name in {"TEXT"}:
@@ -25,24 +32,43 @@ def formatter(record):
25
32
  return prefix + fmt
26
33
 
27
34
 
28
- config = {
29
- "handlers": [ {"sink": sys.stdout, "format": "{time} - {message}"}, ],
30
- "extra": {"user": "someone"},
31
- }
35
+ class QubxLogConfig:
32
36
 
37
+ @staticmethod
38
+ def get_log_level():
39
+ return os.getenv("QUBX_LOG_LEVEL", "DEBUG")
40
+
41
+ @staticmethod
42
+ def set_log_level(level: str):
43
+ os.environ["QUBX_LOG_LEVEL"] = level
44
+ QubxLogConfig.setup_logger(level)
45
+
46
+ @staticmethod
47
+ def setup_logger(level: str | None = None):
48
+ global logger
49
+ config = {
50
+ "handlers": [
51
+ {"sink": sys.stdout, "format": "{time} - {message}"},
52
+ ],
53
+ "extra": {"user": "someone"},
54
+ }
55
+ logger.configure(**config)
56
+ logger.remove(None)
57
+ level = level or QubxLogConfig.get_log_level()
58
+ logger.add(sys.stdout, format=formatter, colorize=True, level=level)
59
+ logger = logger.opt(colors=True)
60
+
61
+
62
+ QubxLogConfig.setup_logger()
33
63
 
34
- logger.configure(**config)
35
- logger.remove(None)
36
- logger.add(sys.stdout, format=formatter, colorize=True)
37
- logger = logger.opt(colors=True)
38
64
 
39
65
  # - global lookup helper
40
66
  lookup = GlobalLookup(InstrumentsLookup(), FeesLookup())
41
67
 
42
68
 
43
69
  # registering magic for jupyter notebook
44
- if runtime_env() in ['notebook', 'shell']:
45
- from IPython.core.magic import (Magics, magics_class, line_magic, line_cell_magic)
70
+ if runtime_env() in ["notebook", "shell"]:
71
+ from IPython.core.magic import Magics, magics_class, line_magic, line_cell_magic
46
72
  from IPython import get_ipython
47
73
 
48
74
  @magics_class
@@ -52,11 +78,11 @@ if runtime_env() in ['notebook', 'shell']:
52
78
 
53
79
  @line_magic
54
80
  def qubxd(self, line: str):
55
- self.qubx_setup('dark' + ' ' + line)
81
+ self.qubx_setup("dark" + " " + line)
56
82
 
57
83
  @line_magic
58
84
  def qubxl(self, line: str):
59
- self.qubx_setup('light' + ' ' + line)
85
+ self.qubx_setup("light" + " " + line)
60
86
 
61
87
  @line_magic
62
88
  def qubx_setup(self, line: str):
@@ -64,25 +90,26 @@ if runtime_env() in ['notebook', 'shell']:
64
90
  QUBX framework initialization
65
91
  """
66
92
  import os
67
- args = [x.strip() for x in line.split(' ')]
68
-
93
+
94
+ args = [x.strip() for x in line.split(" ")]
95
+
69
96
  # setup cython dev hooks - only if 'dev' is passed as argument
70
- if line and 'dev' in args:
97
+ if line and "dev" in args:
71
98
  install_pyx_recompiler_for_dev()
72
99
 
73
100
  tpl_path = os.path.join(os.path.dirname(__file__), "_nb_magic.py")
74
- with open(tpl_path, 'r', encoding="utf8") as myfile:
101
+ with open(tpl_path, "r", encoding="utf8") as myfile:
75
102
  s = myfile.read()
76
103
 
77
104
  exec(s, self.shell.user_ns)
78
105
 
79
106
  # setup more funcy mpl theme instead of ugly default
80
107
  if line:
81
- if 'dark' in line.lower():
82
- set_mpl_theme('dark')
108
+ if "dark" in line.lower():
109
+ set_mpl_theme("dark")
83
110
 
84
- elif 'light' in line.lower():
85
- set_mpl_theme('light')
111
+ elif "light" in line.lower():
112
+ set_mpl_theme("light")
86
113
 
87
114
  # install additional plotly helpers
88
115
  # from qube.charting.plot_helpers import install_plotly_helpers
@@ -91,6 +118,7 @@ if runtime_env() in ['notebook', 'shell']:
91
118
  def _get_manager(self):
92
119
  if self.__manager is None:
93
120
  import multiprocessing as m
121
+
94
122
  self.__manager = m.Manager()
95
123
  return self.__manager
96
124
 
@@ -102,7 +130,7 @@ if runtime_env() in ['notebook', 'shell']:
102
130
  >>> %%proc x, y as MyProc1
103
131
  >>> x.set('Hello')
104
132
  >>> y.set([1,2,3,4])
105
-
133
+
106
134
  """
107
135
  import multiprocessing as m
108
136
  import time, re
@@ -111,8 +139,8 @@ if runtime_env() in ['notebook', 'shell']:
111
139
  name = None
112
140
  if line:
113
141
  # check if custom process name was provided
114
- if ' as ' in line:
115
- line, name = line.split('as')
142
+ if " as " in line:
143
+ line, name = line.split("as")
116
144
  if not name.isspace():
117
145
  name = name.strip()
118
146
  else:
@@ -120,11 +148,11 @@ if runtime_env() in ['notebook', 'shell']:
120
148
  return
121
149
 
122
150
  ipy = get_ipython()
123
- for a in [x for x in re.split('[\ ,;]', line.strip()) if x]:
151
+ for a in [x for x in re.split("[\ ,;]", line.strip()) if x]:
124
152
  ipy.push({a: self._get_manager().Value(None, None)})
125
153
 
126
154
  # code to run
127
- lines = '\n'.join([' %s' % x for x in cell.split('\n')])
155
+ lines = "\n".join([" %s" % x for x in cell.split("\n")])
128
156
 
129
157
  def fn():
130
158
  result = get_ipython().run_cell(lines)
@@ -136,17 +164,18 @@ if runtime_env() in ['notebook', 'shell']:
136
164
  if result.error_in_exec:
137
165
  raise result.error_in_exec
138
166
 
139
- t_start = str(time.time()).replace('.', '_')
140
- f_id = f'proc_{t_start}' if name is None else name
167
+ t_start = str(time.time()).replace(".", "_")
168
+ f_id = f"proc_{t_start}" if name is None else name
141
169
  if self._is_task_name_already_used(f_id):
142
170
  f_id = f"{f_id}_{t_start}"
143
171
 
144
172
  task = m.Process(target=fn, name=f_id)
145
173
  task.start()
146
- print(' -> Task %s is started' % f_id)
174
+ print(" -> Task %s is started" % f_id)
147
175
 
148
176
  def _is_task_name_already_used(self, name):
149
177
  import multiprocessing as m
178
+
150
179
  for p in m.active_children():
151
180
  if p.name == name:
152
181
  return True
@@ -155,16 +184,17 @@ if runtime_env() in ['notebook', 'shell']:
155
184
  @line_magic
156
185
  def list_proc(self, line):
157
186
  import multiprocessing as m
187
+
158
188
  for p in m.active_children():
159
189
  print(p.name)
160
190
 
161
191
  @line_magic
162
192
  def kill_proc(self, line):
163
193
  import multiprocessing as m
194
+
164
195
  for p in m.active_children():
165
196
  if line and p.name.startswith(line):
166
197
  p.terminate()
167
198
 
168
-
169
199
  # - registering magic here
170
200
  get_ipython().register_magics(QubxMagics)
@@ -11,6 +11,7 @@ from functools import wraps
11
11
  from qubx import logger
12
12
  from qubx.core.series import TimeSeries, OHLCV, time_as_nsec, Quote, Trade
13
13
  from qubx.utils.time import infer_series_frequency, handle_start_stop
14
+ from psycopg.types.datetime import TimestampLoader
14
15
 
15
16
  _DT = lambda x: pd.Timedelta(x).to_numpy().item()
16
17
  D1, H1 = _DT("1D"), _DT("1h")
@@ -20,6 +21,12 @@ STOCK_DAILY_SESSION = (_DT("9:30:00.100"), _DT("15:59:59.900"))
20
21
  CME_FUTURES_DAILY_SESSION = (_DT("8:30:00.100"), _DT("15:14:59.900"))
21
22
 
22
23
 
24
+ class NpTimestampLoader(TimestampLoader):
25
+ def load(self, data) -> np.datetime64:
26
+ dt = super().load(data)
27
+ return np.datetime64(dt)
28
+
29
+
23
30
  def _recognize_t(t: Union[int, str], defaultvalue, timeunit) -> int:
24
31
  if isinstance(t, (str, pd.Timestamp)):
25
32
  try:
@@ -46,7 +53,7 @@ def _find_column_index_in_list(xs, *args):
46
53
 
47
54
 
48
55
  _FIND_TIME_COL_IDX = lambda column_names: _find_column_index_in_list(
49
- column_names, "time", "timestamp", "datetime", "date", "open_time"
56
+ column_names, "time", "timestamp", "datetime", "date", "open_time", "ts"
50
57
  )
51
58
 
52
59
 
@@ -56,7 +63,13 @@ class DataTransformer:
56
63
  self.buffer = []
57
64
  self._column_names = []
58
65
 
59
- def start_transform(self, name: str, column_names: List[str]):
66
+ def start_transform(
67
+ self,
68
+ name: str,
69
+ column_names: List[str],
70
+ start: str | None = None,
71
+ stop: str | None = None,
72
+ ):
60
73
  self._column_names = column_names
61
74
  self.buffer = []
62
75
 
@@ -181,7 +194,9 @@ class CsvStorageDataReader(DataReader):
181
194
 
182
195
  def _iter_chunks():
183
196
  for n in range(0, length // chunksize + 1):
184
- transform.start_transform(data_id, fieldnames)
197
+ transform.start_transform(
198
+ data_id, fieldnames, start=start, stop=stop
199
+ )
185
200
  raw_data = (
186
201
  selected_table[n * chunksize : min((n + 1) * chunksize, length)]
187
202
  .to_pandas()
@@ -192,7 +207,7 @@ class CsvStorageDataReader(DataReader):
192
207
 
193
208
  return _iter_chunks()
194
209
 
195
- transform.start_transform(data_id, fieldnames)
210
+ transform.start_transform(data_id, fieldnames, start=start, stop=stop)
196
211
  raw_data = selected_table.to_pandas().to_numpy()
197
212
  transform.process_data(raw_data)
198
213
  return transform.collect()
@@ -213,7 +228,7 @@ class AsPandasFrame(DataTransformer):
213
228
  def __init__(self, timestamp_units=None) -> None:
214
229
  self.timestamp_units = timestamp_units
215
230
 
216
- def start_transform(self, name: str, column_names: List[str]):
231
+ def start_transform(self, name: str, column_names: List[str], **kwargs):
217
232
  self._time_idx = _FIND_TIME_COL_IDX(column_names)
218
233
  self._column_names = column_names
219
234
  self._frame = pd.DataFrame()
@@ -256,7 +271,7 @@ class AsOhlcvSeries(DataTransformer):
256
271
  self._data_type = None
257
272
  self.timestamp_units = timestamp_units
258
273
 
259
- def start_transform(self, name: str, column_names: List[str]):
274
+ def start_transform(self, name: str, column_names: List[str], **kwargs):
260
275
  self._time_idx = _FIND_TIME_COL_IDX(column_names)
261
276
  self._volume_idx = None
262
277
  self._b_volume_idx = None
@@ -376,7 +391,7 @@ class AsQuotes(DataTransformer):
376
391
  Data must have appropriate structure: bid, ask, bidsize, asksize and time
377
392
  """
378
393
 
379
- def start_transform(self, name: str, column_names: List[str]):
394
+ def start_transform(self, name: str, column_names: List[str], **kwargs):
380
395
  self.buffer = list()
381
396
  self._time_idx = _FIND_TIME_COL_IDX(column_names)
382
397
  self._bid_idx = _find_column_index_in_list(column_names, "bid")
@@ -422,7 +437,7 @@ class AsTimestampedRecords(DataTransformer):
422
437
  def __init__(self, timestamp_units: str | None = None) -> None:
423
438
  self.timestamp_units = timestamp_units
424
439
 
425
- def start_transform(self, name: str, column_names: List[str]):
440
+ def start_transform(self, name: str, column_names: List[str], **kwargs):
426
441
  self.buffer = list()
427
442
  self._time_idx = _FIND_TIME_COL_IDX(column_names)
428
443
  self._column_names = column_names
@@ -465,7 +480,7 @@ class RestoreTicksFromOHLC(DataTransformer):
465
480
  self._d_session_start = daily_session_start_end[0]
466
481
  self._d_session_end = daily_session_start_end[1]
467
482
 
468
- def start_transform(self, name: str, column_names: List[str]):
483
+ def start_transform(self, name: str, column_names: List[str], **kwargs):
469
484
  self.buffer = []
470
485
  # - it will fail if receive data doesn't look as ohlcv
471
486
  self._time_idx = _FIND_TIME_COL_IDX(column_names)
@@ -606,10 +621,8 @@ def _retry(fn):
606
621
  # print(x, cls._reconnect_tries)
607
622
  try:
608
623
  return fn(*args, **kw)
609
- except (pg.InterfaceError, pg.OperationalError) as e:
610
- logger.warning(
611
- "Database Connection [InterfaceError or OperationalError]"
612
- )
624
+ except (pg.InterfaceError, pg.OperationalError, AttributeError) as e:
625
+ logger.debug("Database Connection [InterfaceError or OperationalError]")
613
626
  # print ("Idle for %s seconds" % (cls._reconnect_idle))
614
627
  # time.sleep(cls._reconnect_idle)
615
628
  cls._connect()
@@ -700,7 +713,7 @@ class QuestDBSqlCandlesBuilder(QuestDBSqlBuilder):
700
713
  resample
701
714
  )
702
715
  if resample
703
- else resample
716
+ else "1m" # if resample is empty let's use 1 minute timeframe
704
717
  )
705
718
  _rsmpl = f"SAMPLE by {resample}" if resample else ""
706
719
 
@@ -749,6 +762,16 @@ class QuestDBConnector(DataReader):
749
762
  self._builder = builder
750
763
  self._connect()
751
764
 
765
+ def __getstate__(self):
766
+ if self._connection:
767
+ self._connection.close()
768
+ self._connection = None
769
+ if self._cursor:
770
+ self._cursor.close()
771
+ self._cursor = None
772
+ state = self.__dict__.copy()
773
+ return state
774
+
752
775
  def _connect(self):
753
776
  self._connection = pg.connect(self.connection_url, autocommit=True)
754
777
  self._cursor = self._connection.cursor()
@@ -761,7 +784,7 @@ class QuestDBConnector(DataReader):
761
784
  stop: str | None = None,
762
785
  transform: DataTransformer = DataTransformer(),
763
786
  chunksize=0, # TODO: use self._cursor.fetchmany in this case !!!!
764
- timeframe: str = "1m",
787
+ timeframe: str | None = "1m",
765
788
  data_type="candles_1m",
766
789
  ) -> Any:
767
790
  return self._read(
@@ -786,7 +809,7 @@ class QuestDBConnector(DataReader):
786
809
  stop: str | None,
787
810
  transform: DataTransformer,
788
811
  chunksize: int, # TODO: use self._cursor.fetchmany in this case !!!!
789
- timeframe: str,
812
+ timeframe: str | None,
790
813
  data_type: str,
791
814
  builder: QuestDBSqlBuilder,
792
815
  ) -> Any:
@@ -795,9 +818,11 @@ class QuestDBConnector(DataReader):
795
818
 
796
819
  self._cursor.execute(_req) # type: ignore
797
820
  records = self._cursor.fetchall() # TODO: for chunksize > 0 use fetchmany etc
821
+ if not records:
822
+ return None
798
823
 
799
824
  names = [d.name for d in self._cursor.description] # type: ignore
800
- transform.start_transform(data_id, names)
825
+ transform.start_transform(data_id, names, start=start, stop=stop)
801
826
 
802
827
  transform.process_data(records)
803
828
  return transform.collect()
@@ -811,54 +836,20 @@ class QuestDBConnector(DataReader):
811
836
  def __del__(self):
812
837
  for c in (self._cursor, self._connection):
813
838
  try:
814
- logger.info("Closing connection")
839
+ logger.debug("Closing connection")
815
840
  c.close()
816
841
  except:
817
842
  pass
818
843
 
819
844
 
820
- class SnapshotsBuilder(DataTransformer):
821
- """
822
- Snapshots assembler from OB updates
823
- """
824
-
825
- def __init__(
826
- self,
827
- levels: int = -1, # how many levels restore, 1 - TOB, -1 - all
828
- as_frame=False, # result is dataframe
829
- ):
830
- self.buffer = []
831
- self.levels = levels
832
- self.as_frame = as_frame
833
-
834
- def start_transform(self, name: str, column_names: List[str]):
835
- # initialize buffer / series etc
836
- # let's keep restored snapshots into some buffer etc
837
- self.buffer = []
838
-
839
- # do additional init stuff here
840
-
841
- def process_data(self, rows_data: List[List]) -> Any:
842
- for r in rows_data:
843
- # restore snapshots and put into buffer or series
844
- pass
845
-
846
- def collect(self) -> Any:
847
- # - may be convert it to pandas DataFrame ?
848
- if self.as_frame:
849
- return pd.DataFrame.from_records(self.buffer) # or custom transform
850
-
851
- # - or just returns as plain list
852
- return self.buffer
853
-
854
-
855
- class QuestDBSqlOrderBookBilder(QuestDBSqlBuilder):
845
+ class QuestDBSqlOrderBookBuilder(QuestDBSqlCandlesBuilder):
856
846
  """
857
847
  Sql builder for snapshot data
858
848
  """
859
849
 
860
- def get_table_name(self, data_id: str, sfx: str = "") -> str:
861
- return ""
850
+ MAX_TIME_DELTA = pd.Timedelta("5h")
851
+ SNAPSHOT_DELTA = pd.Timedelta("1h")
852
+ MIN_DELTA = pd.Timedelta("1s")
862
853
 
863
854
  def prepare_data_sql(
864
855
  self,
@@ -868,7 +859,23 @@ class QuestDBSqlOrderBookBilder(QuestDBSqlBuilder):
868
859
  resample: str,
869
860
  data_type: str,
870
861
  ) -> str:
871
- return ""
862
+ if not start or not end:
863
+ raise ValueError("Start and end dates must be provided for orderbook data!")
864
+ start_dt, end_dt = pd.Timestamp(start), pd.Timestamp(end)
865
+ delta = end_dt - start_dt
866
+ if delta > self.MAX_TIME_DELTA:
867
+ raise ValueError(
868
+ f"Time range is too big for orderbook data: {delta}, max allowed: {self.MAX_TIME_DELTA}"
869
+ )
870
+
871
+ raw_start_dt = start_dt.floor(self.SNAPSHOT_DELTA) - self.MIN_DELTA
872
+
873
+ table_name = self.get_table_name(data_id, data_type)
874
+ query = f"""
875
+ SELECT * FROM {table_name}
876
+ WHERE timestamp BETWEEN '{raw_start_dt}' AND '{end_dt}'
877
+ """
878
+ return query
872
879
 
873
880
 
874
881
  class TradeSql(QuestDBSqlCandlesBuilder):
@@ -931,7 +938,8 @@ class MultiQdbConnector(QuestDBConnector):
931
938
  _TYPE_TO_BUILDER = {
932
939
  "candles_1m": QuestDBSqlCandlesBuilder(),
933
940
  "trade": TradeSql(),
934
- "orderbook": QuestDBSqlOrderBookBilder(),
941
+ "agg_trade": TradeSql(),
942
+ "orderbook": QuestDBSqlOrderBookBuilder(),
935
943
  }
936
944
 
937
945
  _TYPE_MAPPINGS = {
@@ -940,6 +948,9 @@ class MultiQdbConnector(QuestDBConnector):
940
948
  "ob": "orderbook",
941
949
  "trd": "trade",
942
950
  "td": "trade",
951
+ "aggTrade": "agg_trade",
952
+ "agg_trades": "agg_trade",
953
+ "aggTrades": "agg_trade",
943
954
  }
944
955
 
945
956
  def __init__(
@@ -974,9 +985,9 @@ class MultiQdbConnector(QuestDBConnector):
974
985
  start: str | None = None,
975
986
  stop: str | None = None,
976
987
  transform: DataTransformer = DataTransformer(),
977
- chunksize=0, # TODO: use self._cursor.fetchmany in this case !!!!
988
+ chunksize: int = 0, # TODO: use self._cursor.fetchmany in this case !!!!
978
989
  timeframe: str | None = None,
979
- data_type="candles",
990
+ data_type: str = "candles",
980
991
  ) -> Any:
981
992
  _mapped_data_type = self._TYPE_MAPPINGS.get(data_type, data_type)
982
993
  return self._read(
@@ -8,10 +8,11 @@ from pathlib import Path
8
8
 
9
9
  def version() -> str:
10
10
  # - check current version
11
- version = 'Dev'
12
- try:
11
+ version = "Dev"
12
+ try:
13
13
  import importlib_metadata
14
- version = importlib_metadata.version('qubx')
14
+
15
+ version = importlib_metadata.version("qubx")
15
16
  except:
16
17
  pass
17
18
 
@@ -23,12 +24,7 @@ def install_pyx_recompiler_for_dev():
23
24
 
24
25
  # if version().lower() == 'dev':
25
26
  print(f" > [{green('dev')}] {red('installing cython rebuilding hook')}")
26
- pyx_install_loader([
27
- 'qubx.core',
28
- 'qubx.ta',
29
- 'qubx.data',
30
- 'qubx.strategies'
31
- ])
27
+ pyx_install_loader(["qubx.core", "qubx.ta", "qubx.data", "qubx.strategies"])
32
28
 
33
29
 
34
30
  def runtime_env():
@@ -42,29 +38,32 @@ def runtime_env():
42
38
  """
43
39
  try:
44
40
  from IPython.core.getipython import get_ipython
41
+
45
42
  shell = get_ipython().__class__.__name__
46
43
 
47
- if shell == 'ZMQInteractiveShell': # Jupyter notebook or qtconsole
48
- return 'notebook'
49
- elif shell.endswith('TerminalInteractiveShell'): # Terminal running IPython
50
- return 'shell'
44
+ if shell == "ZMQInteractiveShell": # Jupyter notebook or qtconsole
45
+ return "notebook"
46
+ elif shell.endswith("TerminalInteractiveShell"): # Terminal running IPython
47
+ return "shell"
51
48
  else:
52
- return 'unknown' # Other type (?)
49
+ return "unknown" # Other type (?)
53
50
  except (NameError, ImportError):
54
- return 'python' # Probably standard Python interpreter
51
+ return "python" # Probably standard Python interpreter
52
+
55
53
 
56
54
  _QUBX_FLDR = None
57
55
 
56
+
58
57
  def get_local_qubx_folder() -> str:
59
58
  global _QUBX_FLDR
60
59
 
61
60
  if _QUBX_FLDR is None:
62
- _QUBX_FLDR = makedirs(os.getenv('QUBXSTORAGE', os.path.expanduser('~/.qubx')))
61
+ _QUBX_FLDR = makedirs(os.getenv("QUBXSTORAGE", os.path.expanduser("~/.qubx")))
63
62
 
64
63
  return _QUBX_FLDR
65
64
 
66
65
 
67
- def add_project_to_system_path(project_folder:str = '~/projects'):
66
+ def add_project_to_system_path(project_folder: str = "~/projects"):
68
67
  """
69
68
  Add path to projects folder to system python path to be able importing any modules from project
70
69
  from test.Models.handy_utils import some_module
@@ -72,21 +71,23 @@ def add_project_to_system_path(project_folder:str = '~/projects'):
72
71
  import sys
73
72
  from os.path import expanduser, relpath
74
73
  from pathlib import Path
75
-
74
+
76
75
  # we want to track folders with these files as separate paths
77
- toml = Path('pyproject.toml')
78
- src = Path('src')
79
-
76
+ toml = Path("pyproject.toml")
77
+ src = Path("src")
78
+
80
79
  try:
81
80
  prj = Path(relpath(expanduser(project_folder)))
82
81
  except ValueError as e:
83
82
  # This error can occur on Windows if user folder and python file are on different drives
84
83
  print(f"Qube> Error during get path to projects folder:\n{e}")
85
84
  else:
86
- insert_path_iff = lambda p: sys.path.insert(0, p.as_posix()) if p.as_posix() not in sys.path else None
85
+ insert_path_iff = lambda p: (
86
+ sys.path.insert(0, p.as_posix()) if p.as_posix() not in sys.path else None
87
+ )
87
88
  if prj.exists():
88
89
  insert_path_iff(prj)
89
-
90
+
90
91
  for di in prj.iterdir():
91
92
  _src = di / src
92
93
  if (di / toml).exists():
@@ -96,11 +97,13 @@ def add_project_to_system_path(project_folder:str = '~/projects'):
96
97
  else:
97
98
  insert_path_iff(di)
98
99
  else:
99
- print(f'Qube> Cant find {project_folder} folder for adding to python path !')
100
+ print(
101
+ f"Qube> Cant find {project_folder} folder for adding to python path !"
102
+ )
100
103
 
101
104
 
102
105
  def is_localhost(host):
103
- return host.lower() == 'localhost' or host == '127.0.0.1'
106
+ return host.lower() == "localhost" or host == "127.0.0.1"
104
107
 
105
108
 
106
109
  def __wrap_with_color(code):
@@ -114,13 +117,13 @@ def __wrap_with_color(code):
114
117
 
115
118
 
116
119
  red, green, yellow, blue, magenta, cyan, white = (
117
- __wrap_with_color('31'),
118
- __wrap_with_color('32'),
119
- __wrap_with_color('33'),
120
- __wrap_with_color('34'),
121
- __wrap_with_color('35'),
122
- __wrap_with_color('36'),
123
- __wrap_with_color('37'),
120
+ __wrap_with_color("31"),
121
+ __wrap_with_color("32"),
122
+ __wrap_with_color("33"),
123
+ __wrap_with_color("34"),
124
+ __wrap_with_color("35"),
125
+ __wrap_with_color("36"),
126
+ __wrap_with_color("37"),
124
127
  )
125
128
 
126
129
 
@@ -128,12 +131,13 @@ def logo():
128
131
  """
129
132
  Some fancy Qubx logo
130
133
  """
131
- print(f"""
134
+ print(
135
+ f"""
132
136
  ⠀⠀⡰⡖⠒⠒⢒⢦⠀⠀
133
137
  ⠀⢠⠃⠈⢆⣀⣎⣀⣱⡀ {red("QUBX")} | {cyan("Quantitative Backtesting Environment")}
134
138
  ⠀⢳⠒⠒⡞⠚⡄⠀⡰⠁ (c) 2024, ver. {magenta(version().rstrip())}
135
139
  ⠀⠀⠱⣜⣀⣀⣈⣦⠃⠀⠀⠀
136
- """
140
+ """
137
141
  )
138
142
 
139
143
 
@@ -146,9 +150,9 @@ class Struct:
146
150
  >>> print(a)
147
151
 
148
152
  Struct(x=1, y=2, z='Hello')
149
-
153
+
150
154
  >>> Struct(a=234, b=Struct(c=222)).to_dict()
151
-
155
+
152
156
  {'a': 234, 'b': {'c': 222}}
153
157
 
154
158
  >>> Struct({'a': 555}, a=123, b=Struct(c=222)).to_dict()
@@ -167,7 +171,7 @@ class Struct:
167
171
 
168
172
  def __initialize(self, fields, values):
169
173
  self._fields = list(fields)
170
- self._meta = namedtuple('Struct', ' '.join(fields))
174
+ self._meta = namedtuple("Struct", " ".join(fields))
171
175
  self._inst = self._meta(*values)
172
176
 
173
177
  def fields(self) -> list:
@@ -179,7 +183,7 @@ class Struct:
179
183
  def __getattr__(self, k):
180
184
  return getattr(self._inst, k)
181
185
 
182
- def __or__(self, other: Union[dict, 'Struct']):
186
+ def __or__(self, other: Union[dict, "Struct"]):
183
187
  if isinstance(other, dict):
184
188
  other = Struct.dict2struct(other)
185
189
  elif not isinstance(other, Struct):
@@ -195,7 +199,7 @@ class Struct:
195
199
  return self._inst.__repr__()
196
200
 
197
201
  def __setattr__(self, k, v):
198
- if k not in ['_inst', '_meta', '_fields']:
202
+ if k not in ["_inst", "_meta", "_fields"]:
199
203
  new_vals = {**self._inst._asdict(), **{k: v}}
200
204
  self.__initialize(new_vals.keys(), new_vals.values())
201
205
  else:
@@ -220,14 +224,14 @@ class Struct:
220
224
  """
221
225
  return self.__ms2d(self)
222
226
 
223
- def copy(self) -> 'Struct':
227
+ def copy(self) -> "Struct":
224
228
  """
225
229
  Returns copy of this structure
226
230
  """
227
231
  return Struct(self.to_dict())
228
232
 
229
233
  @staticmethod
230
- def dict2struct(d: dict) -> 'Struct':
234
+ def dict2struct(d: dict) -> "Struct":
231
235
  """
232
236
  Convert dictionary to structure
233
237
  >>> s = dict2struct({'f_1_0': 1, 'z': {'x': 1, 'y': 2}})
@@ -249,52 +253,58 @@ class Struct:
249
253
  def makedirs(path: str, *args) -> str:
250
254
  path = os.path.expanduser(os.path.join(*[path, *args]))
251
255
  if not exists(path):
252
- os.makedirs(path)
256
+ os.makedirs(path, exist_ok=True)
253
257
  return path
254
258
 
255
259
 
256
260
  class Stopwatch:
257
261
  """
258
- Stopwatch timer for performance
262
+ Stopwatch timer for performance
259
263
  """
260
- starts: Dict[str|None, int] = {}
261
- counts: Dict[str|None, int] = defaultdict(lambda: 0)
262
- latencies: Dict[str|None, int] = {}
263
-
264
+
265
+ starts: Dict[str | None, int] = {}
266
+ counts: Dict[str | None, int] = defaultdict(lambda: 0)
267
+ latencies: Dict[str | None, int] = {}
268
+
264
269
  def __new__(cls):
265
- if not hasattr(cls, 'instance'):
270
+ if not hasattr(cls, "instance"):
266
271
  cls.instance = super(Stopwatch, cls).__new__(cls)
267
272
  return cls.instance
268
-
273
+
269
274
  def start(self, scope: str | None):
270
275
  self.starts[scope] = time.perf_counter_ns()
271
276
  self.counts[scope] += 1
272
-
273
- def stop(self, scope: str|None=None) -> int | None:
277
+
278
+ def stop(self, scope: str | None = None) -> int | None:
274
279
  t = time.perf_counter_ns()
275
280
  s = self.starts.get(scope, None)
276
281
  lat = None
277
282
  if s:
278
283
  lat = t - s
279
284
  n = self.counts[scope]
280
- self.latencies[scope] = (lat * (n - 1) + self.latencies.get(scope, lat)) // n
285
+ self.latencies[scope] = (
286
+ lat * (n - 1) + self.latencies.get(scope, lat)
287
+ ) // n
281
288
  del self.starts[scope]
282
289
  return lat
283
290
 
284
291
  def latency_sec(self, scope: str | None) -> float:
285
292
  return self.latencies.get(scope, 0) / 1e9
286
293
 
287
- def watch(self, scope='global'):
294
+ def watch(self, scope="global"):
288
295
  def _decorator(func):
289
- info = scope + '.' + func.__name__
296
+ info = scope + "." + func.__name__
297
+
290
298
  def wrapper(*args, **kwargs):
291
299
  self.start(info)
292
300
  output = func(*args, **kwargs)
293
301
  self.stop(info)
294
302
  return output
303
+
295
304
  return wrapper
305
+
296
306
  return _decorator
297
-
307
+
298
308
  def reset(self):
299
309
  self.starts.clear()
300
310
  self.counts.clear()
@@ -307,7 +317,7 @@ class Stopwatch:
307
317
  return r
308
318
 
309
319
 
310
- def quotify(sx: Union[str, List[str]], quote='USDT'):
320
+ def quotify(sx: Union[str, List[str]], quote="USDT"):
311
321
  """
312
322
  Make XXX<quote> from anything if that anything doesn't end with <quote>
313
323
  """
@@ -318,16 +328,16 @@ def quotify(sx: Union[str, List[str]], quote='USDT'):
318
328
  raise ValueError("Can't process input data !")
319
329
 
320
330
 
321
- def dequotify(sx: Union[str, List[str]], quote='USDT'):
331
+ def dequotify(sx: Union[str, List[str]], quote="USDT"):
322
332
  """
323
333
  Turns XXX<quote> to XXX (reverse of quotify)
324
334
  """
325
335
  if isinstance(sx, str):
326
336
  quote = quote.upper()
327
- if (s:=sx.upper()).endswith(quote):
328
- s = s.split(':')[1] if ':' in s else s # remove exch: if presented
337
+ if (s := sx.upper()).endswith(quote):
338
+ s = s.split(":")[1] if ":" in s else s # remove exch: if presented
329
339
  return s.split(quote)[0]
330
340
  elif isinstance(sx, (list, set, tuple)):
331
341
  return [dequotify(s, quote) for s in sx]
332
342
 
333
- raise ValueError("Can't process input data !")
343
+ raise ValueError("Can't process input data !")
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes