Qubx 0.1.6__cp311-cp311-manylinux_2_35_x86_64.whl → 0.1.8__cp311-cp311-manylinux_2_35_x86_64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of Qubx might be problematic. Click here for more details.

qubx/data/readers.py CHANGED
@@ -29,6 +29,13 @@ def _recognize_t(t: Union[int, str], defaultvalue, timeunit) -> int:
29
29
  return defaultvalue
30
30
 
31
31
 
32
+ def _time(t, timestamp_units: str) -> int:
33
+ t = int(t) if isinstance(t, float) else t
34
+ if timestamp_units == 'ns':
35
+ return np.datetime64(t, 'ns').item()
36
+ return np.datetime64(t, timestamp_units).astype('datetime64[ns]').item()
37
+
38
+
32
39
  def _find_column_index_in_list(xs, *args):
33
40
  xs = [x.lower() for x in xs]
34
41
  for a in args:
@@ -38,6 +45,9 @@ def _find_column_index_in_list(xs, *args):
38
45
  raise IndexError(f"Can't find any from {args} in list: {xs}")
39
46
 
40
47
 
48
+ _FIND_TIME_COL_IDX = lambda column_names: _find_column_index_in_list(column_names, 'time', 'timestamp', 'datetime', 'date', 'open_time')
49
+
50
+
41
51
  class DataTransformer:
42
52
 
43
53
  def __init__(self) -> None:
@@ -121,9 +131,9 @@ class CsvStorageDataReader(DataReader):
121
131
  # - try to find range to load
122
132
  start_idx, stop_idx = 0, table.num_rows
123
133
  try:
124
- _time_field_idx = _find_column_index_in_list(fieldnames, 'time', 'timestamp', 'datetime', 'date')
134
+ _time_field_idx = _FIND_TIME_COL_IDX(fieldnames)
125
135
  _time_type = table.field(_time_field_idx).type
126
- _time_unit = _time_type.unit if hasattr(_time_type, 'unit') else 's'
136
+ _time_unit = _time_type.unit if hasattr(_time_type, 'unit') else 'ms'
127
137
  _time_data = table[_time_field_idx]
128
138
 
129
139
  # - check if need convert time to primitive types (i.e. Date32 -> timestamp[x])
@@ -181,9 +191,11 @@ class AsPandasFrame(DataTransformer):
181
191
  """
182
192
  List of records to pandas dataframe transformer
183
193
  """
194
+ def __init__(self, timestamp_units=None) -> None:
195
+ self.timestamp_units = timestamp_units
184
196
 
185
197
  def start_transform(self, name: str, column_names: List[str]):
186
- self._time_idx = _find_column_index_in_list(column_names, 'time', 'timestamp', 'datetime', 'date')
198
+ self._time_idx = _FIND_TIME_COL_IDX(column_names)
187
199
  self._column_names = column_names
188
200
  self._frame = pd.DataFrame()
189
201
 
@@ -191,6 +203,8 @@ class AsPandasFrame(DataTransformer):
191
203
  self._frame
192
204
  p = pd.DataFrame.from_records(rows_data, columns=self._column_names)
193
205
  p.set_index(self._column_names[self._time_idx], drop=True, inplace=True)
206
+ p.index = pd.to_datetime(p.index, unit=self.timestamp_units) if self.timestamp_units else p.index
207
+ p.index.rename('timestamp', inplace=True)
194
208
  p.sort_index(inplace=True)
195
209
  self._frame = pd.concat((self._frame, p), axis=0, sort=True)
196
210
  return p
@@ -200,6 +214,17 @@ class AsPandasFrame(DataTransformer):
200
214
 
201
215
 
202
216
  class AsOhlcvSeries(DataTransformer):
217
+ """
218
+ Convert incoming data into OHLCV series.
219
+
220
+ Incoming data may have one of the following structures:
221
+
222
+ ```
223
+ ohlcv: time,open,high,low,close,volume|quote_volume,(buy_volume)
224
+ quotes: time,bid,ask,bidsize,asksize
225
+ trades (TAS): time,price,size,(is_taker)
226
+ ```
227
+ """
203
228
 
204
229
  def __init__(self, timeframe: str | None = None, timestamp_units='ns') -> None:
205
230
  super().__init__()
@@ -209,7 +234,7 @@ class AsOhlcvSeries(DataTransformer):
209
234
  self.timestamp_units = timestamp_units
210
235
 
211
236
  def start_transform(self, name: str, column_names: List[str]):
212
- self._time_idx = _find_column_index_in_list(column_names, 'time', 'timestamp', 'datetime', 'date')
237
+ self._time_idx = _FIND_TIME_COL_IDX(column_names)
213
238
  self._volume_idx = None
214
239
  self._b_volume_idx = None
215
240
  try:
@@ -251,15 +276,10 @@ class AsOhlcvSeries(DataTransformer):
251
276
  if self.timeframe:
252
277
  self._series = OHLCV(self._name, self.timeframe)
253
278
 
254
- def _time(self, t) -> int:
255
- if self.timestamp_units == 'ns':
256
- return np.datetime64(t, 'ns').item()
257
- return np.datetime64(t, self.timestamp_units).astype('datetime64[ns]').item()
258
-
259
279
  def _proc_ohlc(self, rows_data: List[List]):
260
280
  for d in rows_data:
261
281
  self._series.update_by_bar(
262
- self._time(d[self._time_idx]),
282
+ _time(d[self._time_idx], self.timestamp_units),
263
283
  d[self._open_idx], d[self._high_idx], d[self._low_idx], d[self._close_idx],
264
284
  d[self._volume_idx] if self._volume_idx else 0,
265
285
  d[self._b_volume_idx] if self._b_volume_idx else 0
@@ -268,7 +288,7 @@ class AsOhlcvSeries(DataTransformer):
268
288
  def _proc_quotes(self, rows_data: List[List]):
269
289
  for d in rows_data:
270
290
  self._series.update(
271
- self._time(d[self._time_idx]),
291
+ _time(d[self._time_idx], self.timestamp_units),
272
292
  (d[self._ask_idx] + d[self._bid_idx])/2
273
293
  )
274
294
 
@@ -277,7 +297,7 @@ class AsOhlcvSeries(DataTransformer):
277
297
  a = d[self._taker_idx] if self._taker_idx else 0
278
298
  s = d[self._size_idx]
279
299
  b = s if a else 0
280
- self._series.update(self._time(d[self._time_idx]), d[self._price_idx], s, b)
300
+ self._series.update(_time(d[self._time_idx], self.timestamp_units), d[self._price_idx], s, b)
281
301
 
282
302
  def process_data(self, rows_data: List[List]) -> Any:
283
303
  if self._series is None:
@@ -302,10 +322,14 @@ class AsOhlcvSeries(DataTransformer):
302
322
 
303
323
 
304
324
  class AsQuotes(DataTransformer):
325
+ """
326
+ Tries to convert incoming data to list of Quote's
327
+ Data must have appropriate structure: bid, ask, bidsize, asksize and time
328
+ """
305
329
 
306
330
  def start_transform(self, name: str, column_names: List[str]):
307
331
  self.buffer = list()
308
- self._time_idx = _find_column_index_in_list(column_names, 'time', 'timestamp', 'datetime')
332
+ self._time_idx = _FIND_TIME_COL_IDX(column_names)
309
333
  self._bid_idx = _find_column_index_in_list(column_names, 'bid')
310
334
  self._ask_idx = _find_column_index_in_list(column_names, 'ask')
311
335
  self._bidvol_idx = _find_column_index_in_list(column_names, 'bidvol', 'bid_vol', 'bidsize', 'bid_size')
@@ -320,6 +344,48 @@ class AsQuotes(DataTransformer):
320
344
  bv = d[self._bidvol_idx]
321
345
  av = d[self._askvol_idx]
322
346
  self.buffer.append(Quote(t.as_unit('ns').asm8.item(), b, a, bv, av))
347
+
348
+
349
+ class AsTimestampedRecords(DataTransformer):
350
+ """
351
+ Convert incoming data to list or dictionaries with preprocessed timestamps ('timestamp_ns' and 'timestamp')
352
+ ```
353
+ [
354
+ {
355
+ 'open_time': 1711944240000.0,
356
+ 'open': 203.219,
357
+ 'high': 203.33,
358
+ 'low': 203.134,
359
+ 'close': 203.175,
360
+ 'volume': 10060.0,
361
+ ....
362
+ 'timestamp_ns': 1711944240000000000,
363
+ 'timestamp': Timestamp('2024-04-01 04:04:00')
364
+ },
365
+ ...
366
+ ] ```
367
+ """
368
+
369
+ def __init__(self, timestamp_units: str | None=None) -> None:
370
+ self.timestamp_units = timestamp_units
371
+
372
+ def start_transform(self, name: str, column_names: List[str]):
373
+ self.buffer = list()
374
+ self._time_idx = _FIND_TIME_COL_IDX(column_names)
375
+ self._column_names = column_names
376
+
377
+ def process_data(self, rows_data: Iterable) -> Any:
378
+ self.buffer.extend(rows_data)
379
+
380
+ def collect(self) -> Any:
381
+ res = []
382
+ for r in self.buffer:
383
+ t = r[self._time_idx]
384
+ if self.timestamp_units:
385
+ t = _time(t, self.timestamp_units)
386
+ di = dict(zip(self._column_names, r)) | { 'timestamp_ns': t, 'timestamp': pd.Timestamp(t) }
387
+ res.append(di)
388
+ return res
323
389
 
324
390
 
325
391
  class RestoreTicksFromOHLC(DataTransformer):
@@ -344,7 +410,7 @@ class RestoreTicksFromOHLC(DataTransformer):
344
410
  def start_transform(self, name: str, column_names: List[str]):
345
411
  self.buffer = []
346
412
  # - it will fail if receive data doesn't look as ohlcv
347
- self._time_idx = _find_column_index_in_list(column_names, 'time', 'timestamp', 'datetime', 'date')
413
+ self._time_idx = _FIND_TIME_COL_IDX(column_names)
348
414
  self._open_idx = _find_column_index_in_list(column_names, 'open')
349
415
  self._high_idx = _find_column_index_in_list(column_names, 'high')
350
416
  self._low_idx = _find_column_index_in_list(column_names, 'low')
@@ -441,23 +507,25 @@ class QuestDBConnector(DataReader):
441
507
  """
442
508
  Very first version of QuestDB connector
443
509
 
444
- # Connect to an existing QuestDB instance
445
- >>> db = QuestDBConnector('user=admin password=quest host=localhost port=8812', OhlcvPandasDataProcessor())
446
- >>> db.read('BINANCEF.ETHUSDT', '2024-01-01')
510
+ ### Connect to an existing QuestDB instance
511
+ >>> db = QuestDBConnector()
512
+ >>> db.read('BINANCE.UM:ETHUSDT', '2024-01-01', transform=AsPandasFrame())
447
513
  """
448
514
  _reconnect_tries = 5
449
515
  _reconnect_idle = 0.1 # wait seconds before retying
450
516
 
451
- def __init__(self, connection_url: str) -> None:
517
+ def __init__(self, host='localhost', user='admin', password='quest', port=8812) -> None:
452
518
  self._connection = None
453
519
  self._cursor = None
454
- self.connection_url = connection_url
520
+ self._host = host
521
+ self._port = port
522
+ self.connection_url = f'user={user} password={password} host={host} port={port}'
455
523
  self._connect()
456
524
 
457
525
  def _connect(self):
458
- logger.info("Connecting to QuestDB ...")
459
526
  self._connection = pg.connect(self.connection_url, autocommit=True)
460
527
  self._cursor = self._connection.cursor()
528
+ logger.debug(f"Connected to QuestDB at {self._host}:{self._port}")
461
529
 
462
530
  @_retry
463
531
  def read(self, data_id: str, start: str|None=None, stop: str|None=None,
@@ -476,14 +544,49 @@ class QuestDBConnector(DataReader):
476
544
  transform.process_data(records)
477
545
  return transform.collect()
478
546
 
479
- def _prepare_data_sql(self, data_id: str, start: str|None, end: str|None, resample: str) -> str:
480
- # just a temp hack - actually we need to discuss symbology etc
481
- symbol = data_id#.split('.')[-1]
547
+ @staticmethod
548
+ def _convert_time_delta_to_qdb_resample_format(c_tf: str):
549
+ if c_tf:
550
+ _t = re.match(r'(\d+)(\w+)', c_tf)
551
+ if _t and len(_t.groups()) > 1:
552
+ c_tf = f"{_t[1]}{_t[2][0].lower()}"
553
+ return c_tf
554
+
555
+ def _get_table_name(self, data_id: str, sfx: str='') -> str:
556
+ """
557
+ Get table name for data_id
558
+ data_id can have format <exchange>.<type>:<symbol>
559
+ for example:
560
+ BINANCE.UM:BTCUSDT or BINANCE:BTCUSDT for spot
561
+ """
562
+ _aliases = {'um': 'umfutures', 'cm': 'cmfutures', 'f': 'futures'}
563
+ table_name = data_id
564
+ _ss = data_id.split(':')
565
+ if len(_ss) > 1:
566
+ _exch, symb = _ss
567
+ _mktype = 'spot'
568
+ _ss = _exch.split('.')
569
+ if len(_ss) > 1:
570
+ _exch = _ss[0]
571
+ _mktype = _ss[1]
572
+ _mktype = _mktype.lower()
573
+ table_name = '.'.join(filter(lambda x: x, [_exch.lower(), _aliases.get(_mktype, _mktype), symb.lower(), sfx]))
574
+ return table_name
482
575
 
576
+ def _prepare_data_sql(self, data_id: str, start: str|None, end: str|None, resample: str) -> str:
577
+ where = ''
483
578
  w0 = f"timestamp >= '{start}'" if start else ''
484
579
  w1 = f"timestamp <= '{end}'" if end else ''
485
- where = f'where {w0} and {w1}' if (w0 and w1) else f"where {(w0 or w1)}"
486
580
 
581
+ # - fix: when no data ranges are provided we must skip empy where keyword
582
+ if w0 or w1:
583
+ where = f'where {w0} and {w1}' if (w0 and w1) else f"where {(w0 or w1)}"
584
+
585
+ # - check resample format
586
+ resample = QuestDBConnector._convert_time_delta_to_qdb_resample_format(resample) if resample else resample
587
+ _rsmpl = f"SAMPLE by {resample}" if resample else ''
588
+
589
+ table_name = self._get_table_name(data_id, 'candles_1m')
487
590
  return f"""
488
591
  select timestamp,
489
592
  first(open) as open,
@@ -495,8 +598,7 @@ class QuestDBConnector(DataReader):
495
598
  sum(count) as count,
496
599
  sum(taker_buy_volume) as taker_buy_volume,
497
600
  sum(taker_buy_quote_volume) as taker_buy_quote_volume
498
- from "{symbol.upper()}" {where}
499
- SAMPLE by {resample};
601
+ from "{table_name}" {where} {_rsmpl};
500
602
  """
501
603
 
502
604
  def _prepare_names_sql(self) -> str:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: Qubx
3
- Version: 0.1.6
3
+ Version: 0.1.8
4
4
  Summary: Qubx - quantitative trading framework
5
5
  Home-page: https://github.com/dmarienko/Qubx
6
6
  Author: Dmitry Marienko
@@ -6,13 +6,13 @@ qubx/core/basics.py,sha256=2u7WV5KX-RbTmzoKfi1yT4HNLDPfQcFMCUZ1pVsM_VE,14777
6
6
  qubx/core/helpers.py,sha256=gPE78dO718NBY0-JbfqNGCzIvr4BVatFntNIy2RUrEY,11559
7
7
  qubx/core/loggers.py,sha256=HpgavBZegoDv9ssihtqX0pitXKULVAPHUpoE_volJw0,11910
8
8
  qubx/core/lookups.py,sha256=4aEC7b2AyEXFqHHGDenex3Z1FZGrpDSb8IwzBZrSqIA,13688
9
- qubx/core/series.cpython-311-x86_64-linux-gnu.so,sha256=48xXevH8u0RzK6uWFve0vgd6cCFBQcUqyXWRzOww198,698952
9
+ qubx/core/series.cpython-311-x86_64-linux-gnu.so,sha256=ltVxtoC4f0B4skXk6yRtoryDzon6gwLX_3NuX7gb5io,698952
10
10
  qubx/core/series.pxd,sha256=IS89NQ5FYp3T0YIHe1lELKZIAKrNvX8K6WlLyac44I4,2847
11
11
  qubx/core/series.pyx,sha256=WEAjn4j3zn540Cxx68X5gRXilvwa7NGdbki6myzZbIM,28108
12
12
  qubx/core/strategy.py,sha256=Fs4fFyHaEGYuz7mBeQHBWFu3Ipg0yFzcxXhskgsPxJE,30330
13
- qubx/core/utils.cpython-311-x86_64-linux-gnu.so,sha256=bNoo-8OPy-YeF0AgPNPhBkcWjHuMWSIke-WiAKlA6wU,74216
13
+ qubx/core/utils.cpython-311-x86_64-linux-gnu.so,sha256=5ddzSg68VpiWMCEtase_w0XhyEb4DUjP7xWxL55XQ4k,74216
14
14
  qubx/core/utils.pyx,sha256=6dQ8R02bl8V3f-W3Wk9-e86D9OvDz-5-4NA_dlF_xwc,1368
15
- qubx/data/readers.py,sha256=i_QbRaCXc5Lf4G22XIsl86YZBS0OrFSJq4wDuo3k-5w,21310
15
+ qubx/data/readers.py,sha256=EuYLt7mmf4TPtVzlbbxGvgw-luR7oDPwdLJQe37iU6c,24786
16
16
  qubx/impl/ccxt_connector.py,sha256=NqF-tgxfTATnmVqKUonNXCAzECrDU8YrgqM3Nq06fw8,9150
17
17
  qubx/impl/ccxt_customizations.py,sha256=kK_4KmOyKvDVgd4MTkVg4CyqdjE-6r41siZIvLj-A-Q,3488
18
18
  qubx/impl/ccxt_trading.py,sha256=cmg4P-zd78w-V8j3-IGS2LFxikGhxFPgmCvz3sr065Q,9097
@@ -23,7 +23,7 @@ qubx/pandaz/__init__.py,sha256=Iw5uzicYGSC3FEKZ-W1O5-7cXq_P0kH11-EcXV0zZhs,175
23
23
  qubx/pandaz/ta.py,sha256=TUvjrvmk4EQvDcXoRp6Os08-HUap-ZvpSDGawhViOgg,85271
24
24
  qubx/pandaz/utils.py,sha256=FyLKQy8spkqxhBij_nPFC_ZzI_L3-IgB9O53MqWKmq0,19109
25
25
  qubx/ta/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
26
- qubx/ta/indicators.cpython-311-x86_64-linux-gnu.so,sha256=ktYA1gKh1Lo-rNWs7LkeVcBu_6r4duK_Ju9hyzlKgQ8,284552
26
+ qubx/ta/indicators.cpython-311-x86_64-linux-gnu.so,sha256=JDkaQwaNS6-J9ICrxJsATcSxaZKvURf0jtQ-09vhUGg,284552
27
27
  qubx/ta/indicators.pyx,sha256=P-GEYUks2lSHo6hbtUFAB7TWE1AunjLR4jIjwqPHrwU,7708
28
28
  qubx/trackers/__init__.py,sha256=1y_yvIy0OQwBqfhAW_EY33NxFzFSWvI0qNAPU6zchYc,60
29
29
  qubx/trackers/rebalancers.py,sha256=QCzANCooZBi2VMCBjjCPMq_Dt1h1zrBelATnfmVve74,5522
@@ -34,6 +34,6 @@ qubx/utils/marketdata/binance.py,sha256=36dl4rxOAGTeY3uoONmiPanj8BkP0oBdDiH-URJJ
34
34
  qubx/utils/misc.py,sha256=z5rdz5hbRu9-F2QgF47OCkMvhfIkRKs-PHR8L5DWkBM,9831
35
35
  qubx/utils/runner.py,sha256=OY7SoRfxHwzn0rKTGB_lbg5zNASEL_49hQXWqs-LiMk,9306
36
36
  qubx/utils/time.py,sha256=_DjCdQditzZwMy_8rvPdWyw5tjw__2p24LMPgXdZ8i0,4911
37
- qubx-0.1.6.dist-info/METADATA,sha256=y_DcaF5WFk44CtZCJVQWfsON1wEAa1iSvz5cZjshGA8,2490
38
- qubx-0.1.6.dist-info/WHEEL,sha256=MLOa6LysROdjgj4FVxsHitAnIh8Be2D_c9ZSBHKrz2M,110
39
- qubx-0.1.6.dist-info/RECORD,,
37
+ qubx-0.1.8.dist-info/METADATA,sha256=W62rM-gCLkzeF0GQ087NFox6vZ6wQBouO_aSfRaDoV8,2490
38
+ qubx-0.1.8.dist-info/WHEEL,sha256=MLOa6LysROdjgj4FVxsHitAnIh8Be2D_c9ZSBHKrz2M,110
39
+ qubx-0.1.8.dist-info/RECORD,,
File without changes