Qubx 0.5.7__cp312-cp312-manylinux_2_39_x86_64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of Qubx might be problematic. Click here for more details.

Files changed (100) hide show
  1. qubx/__init__.py +207 -0
  2. qubx/_nb_magic.py +100 -0
  3. qubx/backtester/__init__.py +5 -0
  4. qubx/backtester/account.py +145 -0
  5. qubx/backtester/broker.py +87 -0
  6. qubx/backtester/data.py +296 -0
  7. qubx/backtester/management.py +378 -0
  8. qubx/backtester/ome.py +296 -0
  9. qubx/backtester/optimization.py +201 -0
  10. qubx/backtester/simulated_data.py +558 -0
  11. qubx/backtester/simulator.py +362 -0
  12. qubx/backtester/utils.py +780 -0
  13. qubx/cli/__init__.py +0 -0
  14. qubx/cli/commands.py +67 -0
  15. qubx/connectors/ccxt/__init__.py +0 -0
  16. qubx/connectors/ccxt/account.py +495 -0
  17. qubx/connectors/ccxt/broker.py +132 -0
  18. qubx/connectors/ccxt/customizations.py +193 -0
  19. qubx/connectors/ccxt/data.py +612 -0
  20. qubx/connectors/ccxt/exceptions.py +17 -0
  21. qubx/connectors/ccxt/factory.py +93 -0
  22. qubx/connectors/ccxt/utils.py +307 -0
  23. qubx/core/__init__.py +0 -0
  24. qubx/core/account.py +251 -0
  25. qubx/core/basics.py +850 -0
  26. qubx/core/context.py +420 -0
  27. qubx/core/exceptions.py +38 -0
  28. qubx/core/helpers.py +480 -0
  29. qubx/core/interfaces.py +1150 -0
  30. qubx/core/loggers.py +514 -0
  31. qubx/core/lookups.py +475 -0
  32. qubx/core/metrics.py +1512 -0
  33. qubx/core/mixins/__init__.py +13 -0
  34. qubx/core/mixins/market.py +94 -0
  35. qubx/core/mixins/processing.py +428 -0
  36. qubx/core/mixins/subscription.py +203 -0
  37. qubx/core/mixins/trading.py +88 -0
  38. qubx/core/mixins/universe.py +270 -0
  39. qubx/core/series.cpython-312-x86_64-linux-gnu.so +0 -0
  40. qubx/core/series.pxd +125 -0
  41. qubx/core/series.pyi +118 -0
  42. qubx/core/series.pyx +988 -0
  43. qubx/core/utils.cpython-312-x86_64-linux-gnu.so +0 -0
  44. qubx/core/utils.pyi +6 -0
  45. qubx/core/utils.pyx +62 -0
  46. qubx/data/__init__.py +25 -0
  47. qubx/data/helpers.py +416 -0
  48. qubx/data/readers.py +1562 -0
  49. qubx/data/tardis.py +100 -0
  50. qubx/gathering/simplest.py +88 -0
  51. qubx/math/__init__.py +3 -0
  52. qubx/math/stats.py +129 -0
  53. qubx/pandaz/__init__.py +23 -0
  54. qubx/pandaz/ta.py +2757 -0
  55. qubx/pandaz/utils.py +638 -0
  56. qubx/resources/instruments/symbols-binance.cm.json +1 -0
  57. qubx/resources/instruments/symbols-binance.json +1 -0
  58. qubx/resources/instruments/symbols-binance.um.json +1 -0
  59. qubx/resources/instruments/symbols-bitfinex.f.json +1 -0
  60. qubx/resources/instruments/symbols-bitfinex.json +1 -0
  61. qubx/resources/instruments/symbols-kraken.f.json +1 -0
  62. qubx/resources/instruments/symbols-kraken.json +1 -0
  63. qubx/ta/__init__.py +0 -0
  64. qubx/ta/indicators.cpython-312-x86_64-linux-gnu.so +0 -0
  65. qubx/ta/indicators.pxd +149 -0
  66. qubx/ta/indicators.pyi +41 -0
  67. qubx/ta/indicators.pyx +787 -0
  68. qubx/trackers/__init__.py +3 -0
  69. qubx/trackers/abvanced.py +236 -0
  70. qubx/trackers/composite.py +146 -0
  71. qubx/trackers/rebalancers.py +129 -0
  72. qubx/trackers/riskctrl.py +641 -0
  73. qubx/trackers/sizers.py +235 -0
  74. qubx/utils/__init__.py +5 -0
  75. qubx/utils/_pyxreloader.py +281 -0
  76. qubx/utils/charting/lookinglass.py +1057 -0
  77. qubx/utils/charting/mpl_helpers.py +1183 -0
  78. qubx/utils/marketdata/binance.py +284 -0
  79. qubx/utils/marketdata/ccxt.py +90 -0
  80. qubx/utils/marketdata/dukas.py +130 -0
  81. qubx/utils/misc.py +541 -0
  82. qubx/utils/ntp.py +63 -0
  83. qubx/utils/numbers_utils.py +7 -0
  84. qubx/utils/orderbook.py +491 -0
  85. qubx/utils/plotting/__init__.py +0 -0
  86. qubx/utils/plotting/dashboard.py +150 -0
  87. qubx/utils/plotting/data.py +137 -0
  88. qubx/utils/plotting/interfaces.py +25 -0
  89. qubx/utils/plotting/renderers/__init__.py +0 -0
  90. qubx/utils/plotting/renderers/plotly.py +0 -0
  91. qubx/utils/runner/__init__.py +1 -0
  92. qubx/utils/runner/_jupyter_runner.pyt +60 -0
  93. qubx/utils/runner/accounts.py +88 -0
  94. qubx/utils/runner/configs.py +65 -0
  95. qubx/utils/runner/runner.py +470 -0
  96. qubx/utils/time.py +312 -0
  97. qubx-0.5.7.dist-info/METADATA +105 -0
  98. qubx-0.5.7.dist-info/RECORD +100 -0
  99. qubx-0.5.7.dist-info/WHEEL +4 -0
  100. qubx-0.5.7.dist-info/entry_points.txt +3 -0
qubx/core/series.pyx ADDED
@@ -0,0 +1,988 @@
1
+ import pandas as pd
2
+ import numpy as np
3
+ cimport numpy as np
4
+ from cython cimport abs
5
+ from typing import Union
6
+ from qubx.core.utils import time_to_str, time_delta_to_str, recognize_timeframe
7
+ from qubx.utils.time import infer_series_frequency
8
+
9
+
10
+ cdef extern from "math.h":
11
+ float INFINITY
12
+
13
+
14
+ cdef np.ndarray nans(int dims):
15
+ """
16
+ nans(n) is an n length array of NaNs.
17
+
18
+ :param dims: array size
19
+ :return: nans matrix
20
+ """
21
+ return np.nan * np.ones(dims)
22
+
23
+
24
+ cdef inline long long floor_t64(long long time, long long dt):
25
+ """
26
+ Floor timestamp by dt
27
+ """
28
+ return time - time % dt
29
+
30
+
31
+ cpdef long long time_as_nsec(time):
32
+ """
33
+ Tries to recognize input time and convert it to nanosec
34
+ """
35
+ if isinstance(time, np.datetime64):
36
+ return time.astype('<M8[ns]').item()
37
+ elif isinstance(time, pd.Timestamp):
38
+ return time.asm8
39
+ elif isinstance(time, str):
40
+ return np.datetime64(time).astype('<M8[ns]').item()
41
+ return time
42
+
43
+
44
+ cdef class RollingSum:
45
+ """
46
+ Rolling fast summator
47
+ """
48
+
49
+ def __init__(self, int period):
50
+ self.period = period
51
+ self.__s = np.zeros(period)
52
+ self.__i = 0
53
+ self.rsum = 0.0
54
+ self.is_init_stage = 1
55
+
56
+ cpdef double update(self, double value, short new_item_started):
57
+ if np.isnan(value):
58
+ return np.nan
59
+ sub = self.__s[self.__i]
60
+ if new_item_started:
61
+ self.__i += 1
62
+ if self.__i >= self.period:
63
+ self.__i = 0
64
+ self.is_init_stage = 0
65
+ sub = self.__s[self.__i]
66
+ self.__s[self.__i] = value
67
+ self.rsum -= sub
68
+ self.rsum += value
69
+ return self.rsum
70
+
71
+ def __str__(self):
72
+ return f"rs[{self.period}] = {self.__s} @ {self.__i} -> {self.is_init_stage}"
73
+
74
+
75
+ cdef class Indexed:
76
+
77
+ def __init__(self, max_series_length=INFINITY):
78
+ self.max_series_length = max_series_length
79
+ self.values = list()
80
+ self._is_empty = 1
81
+
82
+ def __len__(self) -> int:
83
+ return len(self.values)
84
+
85
+ def empty(self) -> bool:
86
+ return self._is_empty
87
+
88
+ def __getitem__(self, idx):
89
+ if isinstance(idx, slice):
90
+ return [self.values[self._get_index(i)] for i in range(*idx.indices(len(self.values)))]
91
+ return self.values[self._get_index(idx)]
92
+
93
+ def _get_index(self, idx: int) -> int:
94
+ n_len = len(self)
95
+ if n_len == 0 or (idx > 0 and idx > (n_len - 1)) or (idx < 0 and abs(idx) > n_len):
96
+ raise IndexError(f"Can't find record at index {idx}")
97
+ return (n_len - idx - 1) if idx >= 0 else abs(1 + idx)
98
+
99
+ def add(self, v):
100
+ self.values.append(v)
101
+ self._is_empty = 0
102
+ if len(self.values) >= self.max_series_length:
103
+ self.values.pop(0)
104
+
105
+ def update_last(self, v):
106
+ if self.values:
107
+ self.values[-1] = v
108
+ else:
109
+ self.append(v)
110
+ self._is_empty = 0
111
+
112
+ def set_values(self, new_values: list):
113
+ self._is_empty = False
114
+ self.values = new_values
115
+
116
+ def clear(self):
117
+ self.values.clear()
118
+ self._is_empty = 1
119
+
120
+ def lookup_idx(self, value, str method) -> int:
121
+ """
122
+ Find value's index in series using specified method (ffill: previous index, bfill: next index)
123
+ """
124
+ cdef int i0
125
+ if method == 'ffill':
126
+ i0 = int(np.searchsorted(self.values, value, side='right'))
127
+ return max(-1, i0 - 1)
128
+ elif method == 'bfill':
129
+ i0 = int(np.searchsorted(self.values, value, side='left'))
130
+ return -1 if i0 >= len(self.values) else i0
131
+ else:
132
+ raise ValueError(f"Unsupported method {method}")
133
+
134
+
135
+ global _plot_func
136
+
137
+
138
+ cdef class Locator:
139
+ """
140
+ Locator service class for TimeSeries
141
+ """
142
+
143
+ def __init__(self, TimeSeries series):
144
+ self._series = series
145
+
146
+ def __getitem__(self, idx):
147
+ cdef int _nlen = len(self._series)
148
+ cdef int _ix
149
+
150
+ if isinstance(idx, slice):
151
+
152
+ # - check slice has the same type or None
153
+ if not ((type(idx.start) == type(idx.stop)) or idx.start is None or idx.stop is None):
154
+ raise TypeError(f"Cannot do slice indexing with indexers of different types: [{idx.start} : {idx.stop}]")
155
+
156
+ start_idx = 0 if idx.start is None else idx.start
157
+
158
+ if isinstance(idx.start, str):
159
+ # - even if start is not found we still want to start from first record
160
+ start_idx = max(self._series.times.lookup_idx(np.datetime64(start_idx, 'ns').item(), 'ffill'), 0)
161
+
162
+ if idx.stop is None:
163
+ stop_idx = _nlen
164
+ else:
165
+ if isinstance(idx.stop, str):
166
+ _ix = self._series.times.lookup_idx(np.datetime64(idx.stop, 'ns').item(), 'ffill')
167
+
168
+ if _ix < 0 or _ix < start_idx:
169
+ raise IndexError(f"Stop index {idx.stop} is not found or before start index {idx.start}")
170
+
171
+ stop_idx = min(max(_ix, 0) + 1, _nlen)
172
+ else:
173
+ stop_idx = min(idx.stop, _nlen)
174
+
175
+ # print(f" >>>> LOC[{start_idx} : {stop_idx}] stop={stop_idx}")
176
+ return self._series.copy(start_idx, stop_idx)
177
+
178
+ elif isinstance(idx, str):
179
+ # - handle single timestamp string
180
+ return self.find(idx)
181
+
182
+ return self._series.values[idx]
183
+
184
+ def find(self, t: str):
185
+ ix = self._series.times.lookup_idx(np.datetime64(t, 'ns').item(), 'ffill')
186
+ return np.datetime64(self._series.times.values[ix], 'ns'), self._series.values.values[ix]
187
+
188
+
189
+ cdef class TimeSeries:
190
+
191
+ def __init__(
192
+ self, str name, timeframe, max_series_length=INFINITY,
193
+ process_every_update=True, # calculate indicators on every update (tick) - by default
194
+ ) -> None:
195
+ self.name = name
196
+ self.max_series_length = max_series_length
197
+ self.timeframe = recognize_timeframe(timeframe)
198
+ self.times = Indexed(max_series_length)
199
+ self.values = Indexed(max_series_length)
200
+ self.indicators = dict()
201
+ self.calculation_order = []
202
+ # - locator service
203
+ self.loc = Locator(self)
204
+
205
+ # - processing every update
206
+ self._process_every_update = process_every_update
207
+ self._last_bar_update_value = np.nan
208
+ self._last_bar_update_time = -1
209
+
210
+ def __len__(self) -> int:
211
+ return len(self.times)
212
+
213
+ def _clone_empty(self, str name, long long timeframe, float max_series_length):
214
+ """
215
+ Make empty TimeSeries instance (no data and indicators)
216
+ """
217
+ return TimeSeries(name, timeframe, max_series_length)
218
+
219
+ def copy(self, int start, int stop):
220
+ ts_copy = self._clone_empty(self.name, self.timeframe, self.max_series_length)
221
+ for i in range(start, stop):
222
+ ts_copy._add_new_item(self.times.values[i], self.values.values[i])
223
+ return ts_copy
224
+
225
+ def clone(self):
226
+ """
227
+ Clone TimeSeries instance with data without indcators attached
228
+ """
229
+ return self.loc[:]
230
+
231
+ def _on_attach_indicator(self, indicator: Indicator, indicator_input: TimeSeries):
232
+ self.calculation_order.append((
233
+ id(indicator_input), indicator, id(indicator)
234
+ ))
235
+
236
+ def __getitem__(self, idx):
237
+ return self.values[idx]
238
+
239
+ def _add_new_item(self, long long time, double value):
240
+ self.times.add(time)
241
+ self.values.add(value)
242
+ self._is_new_item = True
243
+
244
+ def _update_last_item(self, long long time, double value):
245
+ self.times.update_last(time)
246
+ self.values.update_last(value)
247
+ self._is_new_item = False
248
+
249
+ def update(self, long long time, double value) -> bool:
250
+ item_start_time = floor_t64(time, self.timeframe)
251
+
252
+ if not self.times:
253
+ self._add_new_item(item_start_time, value)
254
+
255
+ # - disable first notification because first item may be incomplete
256
+ self._is_new_item = False
257
+
258
+ elif (_dt := time - self.times[0]) >= self.timeframe:
259
+ # - add new item
260
+ self._add_new_item(item_start_time, value)
261
+
262
+ # - if it's needed to process every tick in indicator
263
+ if self._process_every_update:
264
+ self._update_indicators(item_start_time, value, True)
265
+ else:
266
+ # - it's required to update indicators only on closed (formed) bar
267
+ self._update_indicators(self._last_bar_update_time, self._last_bar_update_value, True)
268
+
269
+ # - store last data
270
+ self._last_bar_update_time = item_start_time
271
+ self._last_bar_update_value = value
272
+
273
+ return self._is_new_item
274
+ else:
275
+ if _dt < 0:
276
+ raise ValueError(f"Attempt to update past data at {time_to_str(time)} !")
277
+ self._update_last_item(item_start_time, value)
278
+
279
+ # - update indicators by new data
280
+ if self._process_every_update:
281
+ self._update_indicators(item_start_time, value, False)
282
+
283
+ # - store last data
284
+ self._last_bar_update_time = item_start_time
285
+ self._last_bar_update_value = value
286
+
287
+ return self._is_new_item
288
+
289
+ cdef _update_indicators(self, long long time, value, short new_item_started):
290
+ mem = dict() # store calculated values during this update
291
+ mem[id(self)] = value # initail value - new data from itself
292
+ for input, indicator, iid in self.calculation_order:
293
+ if input not in mem:
294
+ raise ValueError("> No input data - something wrong in calculation order !")
295
+ mem[iid] = indicator.update(time, mem[input], new_item_started)
296
+
297
+ def shift(self, int period):
298
+ """
299
+ Returns shifted series by period
300
+ """
301
+ if period < 0:
302
+ raise ValueError("Only positive shift (from past) period is allowed !")
303
+ return lag(self, period)
304
+
305
+ def __add__(self, other: Union[TimeSeries, float, int]):
306
+ return plus(self, other)
307
+
308
+ def __sub__(self, other: Union[TimeSeries, float, int]):
309
+ return minus(self, other)
310
+
311
+ def __mul__(self, other: Union[TimeSeries, float, int]):
312
+ return mult(self, other)
313
+
314
+ def __truediv__(self, other: Union[TimeSeries, float, int]):
315
+ return divide(self, other)
316
+
317
+ def __lt__(self, other: Union[TimeSeries, float, int]):
318
+ return lt(self, other)
319
+
320
+ def __le__(self, other: Union[TimeSeries, float, int]):
321
+ return le(self, other)
322
+
323
+ def __gt__(self, other: Union[TimeSeries, float, int]):
324
+ return gt(self, other)
325
+
326
+ def __ge__(self, other: Union[TimeSeries, float, int]):
327
+ return ge(self, other)
328
+
329
+ def __eq__(self, other: Union[TimeSeries, float, int]):
330
+ return eq(self, other)
331
+
332
+ def __ne__(self, other: Union[TimeSeries, float, int]):
333
+ return ne(self, other)
334
+
335
+ def __neg__(self):
336
+ return neg(self)
337
+
338
+ def __abs__(self):
339
+ return series_abs(self)
340
+
341
+ def to_records(self) -> dict:
342
+ ts = [np.datetime64(t, 'ns') for t in self.times[::-1]]
343
+ return dict(zip(ts, self.values[::-1]))
344
+
345
+ def to_series(self):
346
+ return pd.Series(self.values.values, index=pd.DatetimeIndex(self.times.values), name=self.name, dtype=float)
347
+ # return pd.Series(self.to_records(), name=self.name, dtype=float)
348
+
349
+ def pd(self):
350
+ return self.to_series()
351
+
352
+ def get_indicators(self) -> dict:
353
+ return self.indicators
354
+
355
+ def plot(self, *args, **kwargs):
356
+ _timeseries_plot_func(self, *args, **kwargs)
357
+
358
+ def __str__(self):
359
+ nl = len(self)
360
+ r = f"{self.name}[{time_delta_to_str(self.timeframe)}] | {nl} records\n"
361
+ hd, tl = 3, 3
362
+ if nl <= hd + tl:
363
+ hd, tl = nl, 0
364
+
365
+ for n in range(hd):
366
+ r += f" {time_to_str(self.times[n], 'ns')} {str(self[n])}\n"
367
+
368
+ if tl > 0:
369
+ r += " .......... \n"
370
+ for n in range(-tl, 0):
371
+ r += f" {time_to_str(self.times[n], 'ns')} {str(self[n])}\n"
372
+
373
+ return r
374
+
375
+ def __repr__(self):
376
+ return repr(self.pd())
377
+
378
+
379
+ def _wrap_indicator(series: TimeSeries, clz, *args, **kwargs):
380
+ aw = ','.join([a.name if isinstance(a, TimeSeries) else str(a) for a in args])
381
+ if kwargs:
382
+ aw += ',' + ','.join([f"{k}={str(v)}" for k,v in kwargs.items()])
383
+ nn = clz.__name__.lower() + "(" + aw + ")"
384
+ inds = series.get_indicators()
385
+ if nn in inds:
386
+ return inds[nn]
387
+ return clz(nn, series, *args, **kwargs)
388
+
389
+
390
+ cdef class Indicator(TimeSeries):
391
+ """
392
+ Basic class for indicator that can be attached to TimeSeries
393
+ """
394
+
395
+ def __init__(self, str name, TimeSeries series):
396
+ if not name:
397
+ raise ValueError(f" > Name must not be empty for {self.__class__.__name__}!")
398
+ super().__init__(name, series.timeframe, series.max_series_length)
399
+ series.indicators[name] = self
400
+ self.name = name
401
+
402
+ # - we need to make a empty copy and fill it
403
+ self.series = self._clone_empty(series.name, series.timeframe, series.max_series_length)
404
+ self.parent = series
405
+
406
+ # - notify the parent series that indicator has been attached
407
+ self._on_attach_indicator(self, series)
408
+
409
+ # - recalculate indicator on data as if it would being streamed
410
+ self._initial_data_recalculate(series)
411
+
412
+ def _on_attach_indicator(self, indicator: Indicator, indicator_input: TimeSeries):
413
+ self.parent._on_attach_indicator(indicator, indicator_input)
414
+
415
+ def _initial_data_recalculate(self, TimeSeries series):
416
+ for t, v in zip(series.times[::-1], series.values[::-1]):
417
+ self.update(t, v, True)
418
+
419
+ def update(self, long long time, value, short new_item_started) -> object:
420
+ if new_item_started or len(self) == 0:
421
+ self.series._add_new_item(time, value)
422
+ iv = self.calculate(time, value, new_item_started)
423
+ self._add_new_item(time, iv)
424
+ else:
425
+ self.series._update_last_item(time, value)
426
+ iv = self.calculate(time, value, new_item_started)
427
+ self._update_last_item(time, iv)
428
+
429
+ return iv
430
+
431
+ def calculate(self, long long time, value, short new_item_started) -> object:
432
+ raise ValueError("Indicator must implement calculate() method")
433
+
434
+ @classmethod
435
+ def wrap(clz, series:TimeSeries, *args, **kwargs):
436
+ return _wrap_indicator(series, clz, *args, **kwargs)
437
+
438
+
439
+ cdef class IndicatorOHLC(Indicator):
440
+ """
441
+ Extension of indicator class to be used for OHLCV series
442
+ """
443
+ def _clone_empty(self, str name, long long timeframe, float max_series_length):
444
+ return OHLCV(name, timeframe, max_series_length)
445
+
446
+ def _copy_internal_series(self, int start, int stop, *origins):
447
+ """
448
+ Helper method to copy internal series data
449
+ """
450
+ t0, t1 = self.times.values[start], self.times.values[stop - 1]
451
+ return [
452
+ o.loc[
453
+ o.times.lookup_idx(t0, 'bfill') : o.times.lookup_idx(t1, 'ffill') + 1
454
+ ] for o in origins
455
+ ]
456
+
457
+ def calculate(self, long long time, Bar value, short new_item_started) -> object:
458
+ raise ValueError("Indicator must implement calculate() method")
459
+
460
+
461
+ cdef class Lag(Indicator):
462
+ cdef int period
463
+
464
+ def __init__(self, str name, TimeSeries series, int period):
465
+ self.period = period
466
+ super().__init__(name, series)
467
+
468
+ cpdef double calculate(self, long long time, double value, short new_item_started):
469
+ if len(self.series) <= self.period:
470
+ return np.nan
471
+ return self.series[self.period]
472
+
473
+
474
+ def lag(series:TimeSeries, period: int):
475
+ return Lag.wrap(series, period)
476
+
477
+
478
+ cdef class Abs(Indicator):
479
+
480
+ def __init__(self, str name, TimeSeries series):
481
+ super().__init__(name, series)
482
+
483
+ cpdef double calculate(self, long long time, double value, short new_item_started):
484
+ return abs(self.series[0])
485
+
486
+
487
+ def series_abs(series:TimeSeries):
488
+ return Abs.wrap(series)
489
+
490
+
491
+ cdef class Compare(Indicator):
492
+ cdef TimeSeries to_compare
493
+ cdef double comparable_scalar
494
+ cdef short _cmp_to_series
495
+
496
+ def __init__(self, name: str, original: TimeSeries, comparable: Union[TimeSeries, float, int]):
497
+ if isinstance(comparable, TimeSeries):
498
+ if comparable.timeframe != original.timeframe:
499
+ raise ValueError("Series must be of the same timeframe for performing operation !")
500
+ self.to_compare = comparable
501
+ self._cmp_to_series = 1
502
+ else:
503
+ self.comparable_scalar = comparable
504
+ self._cmp_to_series = 0
505
+ super().__init__(name, original)
506
+
507
+ cdef double _operation(self, double a, double b):
508
+ if np.isnan(a) or np.isnan(b):
509
+ return np.nan
510
+ return +1 if a > b else -1 if a < b else 0
511
+
512
+ def _initial_data_recalculate(self, TimeSeries series):
513
+ if self._cmp_to_series:
514
+ r = pd.concat((series.to_series(), self.to_compare.to_series()), axis=1)
515
+ for t, (a, b) in zip(r.index, r.values):
516
+ self.series._add_new_item(t.asm8, a)
517
+ self._add_new_item(t.asm8, self._operation(a, b))
518
+ else:
519
+ r = series.to_series()
520
+ for t, a in zip(r.index, r.values):
521
+ self.series._add_new_item(t.asm8, a)
522
+ self._add_new_item(t.asm8, self._operation(a, self.comparable_scalar))
523
+
524
+ cpdef double calculate(self, long long time, double value, short new_item_started):
525
+ if self._cmp_to_series:
526
+ if len(self.to_compare) == 0 or len(self.series) == 0 or time != self.to_compare.times[0]:
527
+ return np.nan
528
+ return self._operation(value, self.to_compare[0])
529
+ else:
530
+ if len(self.series) == 0:
531
+ return np.nan
532
+ return self._operation(value, self.comparable_scalar)
533
+
534
+
535
+ def compare(series0:TimeSeries, series1:TimeSeries):
536
+ return Compare.wrap(series0, series1)
537
+
538
+
539
+ cdef class Plus(Compare):
540
+
541
+ def __init__(self, name: str, original:TimeSeries, comparable: Union[TimeSeries, float, int]):
542
+ super().__init__(name, original, comparable)
543
+
544
+ cdef double _operation(self, double a, double b):
545
+ return a + b
546
+
547
+
548
+ cdef class Minus(Compare):
549
+
550
+ def __init__(self, name: str, original:TimeSeries, comparable: Union[TimeSeries, float, int]):
551
+ super().__init__(name, original, comparable)
552
+
553
+ cdef double _operation(self, double a, double b):
554
+ return a - b
555
+
556
+
557
+ cdef class Mult(Compare):
558
+
559
+ def __init__(self, name: str, original:TimeSeries, comparable: Union[TimeSeries, float, int]):
560
+ super().__init__(name, original, comparable)
561
+
562
+ cdef double _operation(self, double a, double b):
563
+ return a * b
564
+
565
+
566
+ cdef class Divide(Compare):
567
+
568
+ def __init__(self, name: str, original:TimeSeries, comparable: Union[TimeSeries, float, int]):
569
+ super().__init__(name, original, comparable)
570
+
571
+ cdef double _operation(self, double a, double b):
572
+ return a / b
573
+
574
+
575
+ cdef class EqualTo(Compare):
576
+
577
+ def __init__(self, name: str, original:TimeSeries, comparable: Union[TimeSeries, float, int]):
578
+ super().__init__(name, original, comparable)
579
+
580
+ cdef double _operation(self, double a, double b):
581
+ return a == b
582
+
583
+
584
+ cdef class NotEqualTo(Compare):
585
+
586
+ def __init__(self, name: str, original:TimeSeries, comparable: Union[TimeSeries, float, int]):
587
+ super().__init__(name, original, comparable)
588
+
589
+ cdef double _operation(self, double a, double b):
590
+ return a != b
591
+
592
+
593
+ cdef class LessThan(Compare):
594
+
595
+ def __init__(self, name: str, original:TimeSeries, comparable: Union[TimeSeries, float, int]):
596
+ super().__init__(name, original, comparable)
597
+
598
+ cdef double _operation(self, double a, double b):
599
+ return a < b
600
+
601
+
602
+ cdef class LessEqualThan(Compare):
603
+
604
+ def __init__(self, name: str, original:TimeSeries, comparable: Union[TimeSeries, float, int]):
605
+ super().__init__(name, original, comparable)
606
+
607
+ cdef double _operation(self, double a, double b):
608
+ return a <= b
609
+
610
+
611
+ cdef class GreaterThan(Compare):
612
+
613
+ def __init__(self, name: str, original:TimeSeries, comparable: Union[TimeSeries, float, int]):
614
+ super().__init__(name, original, comparable)
615
+
616
+ cdef double _operation(self, double a, double b):
617
+ return a > b
618
+
619
+
620
+ cdef class GreaterEqualThan(Compare):
621
+
622
+ def __init__(self, name: str, original:TimeSeries, comparable: Union[TimeSeries, float, int]):
623
+ super().__init__(name, original, comparable)
624
+
625
+ cdef double _operation(self, double a, double b):
626
+ return a >= b
627
+
628
+
629
+ cdef class Neg(Indicator):
630
+
631
+ def __init__(self, name: str, series:TimeSeries):
632
+ super().__init__(name, series)
633
+
634
+ cpdef double calculate(self, long long time, double value, short new_item_started):
635
+ return -value
636
+
637
+
638
+ def plus(series0:TimeSeries, series1:Union[TimeSeries, float, int]):
639
+ return Plus.wrap(series0, series1)
640
+
641
+
642
+ def minus(series0:TimeSeries, series1:Union[TimeSeries, float, int]):
643
+ return Minus.wrap(series0, series1)
644
+
645
+
646
+ def mult(series0:TimeSeries, series1:Union[TimeSeries, float, int]):
647
+ return Mult.wrap(series0, series1)
648
+
649
+
650
+ def divide(series0:TimeSeries, series1:Union[TimeSeries, float, int]):
651
+ return Divide.wrap(series0, series1)
652
+
653
+
654
+ def eq(series0:TimeSeries, series1:Union[TimeSeries, float, int]):
655
+ return EqualTo.wrap(series0, series1)
656
+
657
+
658
+ def ne(series0:TimeSeries, series1:Union[TimeSeries, float, int]):
659
+ return NotEqualTo.wrap(series0, series1)
660
+
661
+
662
+ def lt(series0:TimeSeries, series1:Union[TimeSeries, float, int]):
663
+ return LessThan.wrap(series0, series1)
664
+
665
+
666
+ def le(series0:TimeSeries, series1:Union[TimeSeries, float, int]):
667
+ return LessEqualThan.wrap(series0, series1)
668
+
669
+
670
+ def gt(series0:TimeSeries, series1:Union[TimeSeries, float, int]):
671
+ return GreaterThan.wrap(series0, series1)
672
+
673
+
674
+ def ge(series0:TimeSeries, series1:Union[TimeSeries, float, int]):
675
+ return GreaterEqualThan.wrap(series0, series1)
676
+
677
+
678
+ def neg(series: TimeSeries):
679
+ return Neg.wrap(series)
680
+
681
+
682
+ cdef class Trade:
683
+ def __init__(self, time, double price, double size, short taker=-1, long long trade_id=0):
684
+ self.time = time_as_nsec(time)
685
+ self.price = price
686
+ self.size = size
687
+ self.taker = taker
688
+ self.trade_id = trade_id
689
+
690
+ def __repr__(self):
691
+ return "[%s]\t%.5f (%.2f) %s %s" % (
692
+ time_to_str(self.time, 'ns'), self.price, self.size,
693
+ 'take' if self.taker == 1 else 'make' if self.taker == 0 else '???',
694
+ str(self.trade_id) if self.trade_id > 0 else ''
695
+ )
696
+
697
+
698
+ cdef class Quote:
699
+ def __init__(self, time, double bid, double ask, double bid_size, double ask_size):
700
+ self.time = time_as_nsec(time)
701
+ self.bid = bid
702
+ self.ask = ask
703
+ self.bid_size = bid_size
704
+ self.ask_size = ask_size
705
+
706
+ cpdef double mid_price(self):
707
+ return 0.5 * (self.ask + self.bid)
708
+
709
+ def __repr__(self):
710
+ return "[%s]\t%.5f (%.1f) | %.5f (%.1f)" % (
711
+ time_to_str(self.time, 'ns'), self.bid, self.bid_size, self.ask, self.ask_size
712
+ )
713
+
714
+
715
+ cdef class Bar:
716
+
717
+ def __init__(self, long long time, double open, double high, double low, double close, double volume, double bought_volume=0) -> None:
718
+ self.time = time
719
+ self.open = open
720
+ self.high = high
721
+ self.low = low
722
+ self.close = close
723
+ self.volume = volume
724
+ self.bought_volume = bought_volume
725
+
726
+ cpdef Bar update(self, double price, double volume, double bought_volume=0):
727
+ self.close = price
728
+ self.high = max(price, self.high)
729
+ self.low = min(price, self.low)
730
+ self.volume += volume
731
+ self.bought_volume += bought_volume
732
+ return self
733
+
734
+ cpdef dict to_dict(self, unsigned short skip_time=0):
735
+ if skip_time:
736
+ return {
737
+ 'open': self.open, 'high': self.high, 'low': self.low, 'close': self.close,
738
+ 'volume': self.volume, 'bought_volume': self.bought_volume,
739
+ }
740
+ return {
741
+ 'timestamp': np.datetime64(self.time, 'ns'),
742
+ 'open': self.open, 'high': self.high, 'low': self.low, 'close': self.close,
743
+ 'volume': self.volume,
744
+ 'bought_volume': self.bought_volume,
745
+ }
746
+
747
+ def __repr__(self):
748
+ return "{o:%f | h:%f | l:%f | c:%f | v:%f}" % (self.open, self.high, self.low, self.close, self.volume)
749
+
750
+
751
+ cdef class OrderBook:
752
+
753
+ def __init__(self, long long time, top_bid: float, top_ask: float, tick_size: float, bids: np.ndarray, asks: np.ndarray):
754
+ self.time = time
755
+ self.top_bid = top_bid
756
+ self.top_ask = top_ask
757
+ self.tick_size = tick_size
758
+ self.bids = bids
759
+ self.asks = asks
760
+
761
+ def __repr__(self):
762
+ return f"[{time_to_str(self.time, 'ns')}] {self.top_bid} ({self.bids[0]}) | {self.top_ask} ({self.asks[0]})"
763
+
764
+ cpdef Quote to_quote(self):
765
+ return Quote(self.time, self.top_bid, self.top_ask, self.bids[0], self.asks[0])
766
+
767
+ cpdef double mid_price(self):
768
+ return 0.5 * (self.top_ask + self.top_bid)
769
+
770
+
771
+
772
+ cdef class OHLCV(TimeSeries):
773
+
774
+ def __init__(self, str name, timeframe, max_series_length=INFINITY) -> None:
775
+ super().__init__(name, timeframe, max_series_length)
776
+ self.open = TimeSeries('open', timeframe, max_series_length)
777
+ self.high = TimeSeries('high', timeframe, max_series_length)
778
+ self.low = TimeSeries('low', timeframe, max_series_length)
779
+ self.close = TimeSeries('close', timeframe, max_series_length)
780
+ self.volume = TimeSeries('volume', timeframe, max_series_length)
781
+ self.bvolume = TimeSeries('bvolume', timeframe, max_series_length)
782
+
783
+ cpdef object append_data(self,
784
+ np.ndarray times,
785
+ np.ndarray opens,
786
+ np.ndarray highs,
787
+ np.ndarray lows,
788
+ np.ndarray closes,
789
+ np.ndarray volumes,
790
+ np.ndarray bvolumes
791
+ ):
792
+ cdef long long t
793
+ cdef short _conv
794
+ cdef short _upd_inds, _has_vol
795
+ cdef Bar b
796
+
797
+ # - check if volume data presented
798
+ _has_vol = len(volumes) > 0
799
+ _has_bvol = len(bvolumes) > 0
800
+
801
+ # - check if need to convert time to nanosec
802
+ _conv = 0
803
+ if not isinstance(times[0].item(), long):
804
+ _conv = 1
805
+
806
+ # - check if need to update any indicators
807
+ _upd_inds = 0
808
+ if (
809
+ len(self.indicators) > 0 or
810
+ len(self.open.indicators) > 0 or
811
+ len(self.high.indicators) > 0 or
812
+ len(self.low.indicators) > 0 or
813
+ len(self.close.indicators) > 0 or
814
+ len(self.volume.indicators) > 0
815
+ ):
816
+ _upd_inds = 1
817
+
818
+ for i in range(len(times)):
819
+ if _conv:
820
+ t = times[i].astype('datetime64[ns]').item()
821
+ else:
822
+ t = times[i].item()
823
+
824
+ b = Bar(t, opens[i], highs[i], lows[i], closes[i],
825
+ volumes[i] if _has_vol else 0,
826
+ bvolumes[i] if _has_bvol else 0)
827
+ self._add_new_item(t, b)
828
+
829
+ if _upd_inds:
830
+ self._update_indicators(t, b, True)
831
+
832
+ return self
833
+
834
+ def _clone_empty(self, str name, long long timeframe, float max_series_length):
835
+ return OHLCV(name, timeframe, max_series_length)
836
+
837
+ def _add_new_item(self, long long time, Bar value):
838
+ self.times.add(time)
839
+ self.values.add(value)
840
+ self.open._add_new_item(time, value.open)
841
+ self.high._add_new_item(time, value.high)
842
+ self.low._add_new_item(time, value.low)
843
+ self.close._add_new_item(time, value.close)
844
+ self.volume._add_new_item(time, value.volume)
845
+ self.bvolume._add_new_item(time, value.bought_volume)
846
+ self._is_new_item = True
847
+
848
+ def _update_last_item(self, long long time, Bar value):
849
+ self.times.update_last(time)
850
+ self.values.update_last(value)
851
+ self.open._update_last_item(time, value.open)
852
+ self.high._update_last_item(time, value.high)
853
+ self.low._update_last_item(time, value.low)
854
+ self.close._update_last_item(time, value.close)
855
+ self.volume._update_last_item(time, value.volume)
856
+ self.bvolume._update_last_item(time, value.bought_volume)
857
+ self._is_new_item = False
858
+
859
+ cpdef short update(self, long long time, double price, double volume=0.0, double bvolume=0.0):
860
+ cdef Bar b
861
+ bar_start_time = floor_t64(time, self.timeframe)
862
+
863
+ if not self.times:
864
+ self._add_new_item(bar_start_time, Bar(bar_start_time, price, price, price, price, volume, bvolume))
865
+
866
+ # Here we disable first notification because first item may be incomplete
867
+ self._is_new_item = False
868
+
869
+ elif (_dt := time - self.times[0]) >= self.timeframe:
870
+ b = Bar(bar_start_time, price, price, price, price, volume, bvolume)
871
+
872
+ # - add new item
873
+ self._add_new_item(bar_start_time, b)
874
+
875
+ # - update indicators
876
+ self._update_indicators(bar_start_time, b, True)
877
+
878
+ return self._is_new_item
879
+ else:
880
+ if _dt < 0:
881
+ raise ValueError(f"Attempt to update past data at {time_to_str(time)} !")
882
+
883
+ self._update_last_item(bar_start_time, self[0].update(price, volume, bvolume))
884
+
885
+ # - update indicators by new data
886
+ self._update_indicators(bar_start_time, self[0], False)
887
+
888
+ return self._is_new_item
889
+
890
+ cpdef short update_by_bar(self, long long time, double open, double high, double low, double close, double vol_incr=0.0, double b_vol_incr=0.0):
891
+ cdef Bar b
892
+ cdef Bar l_bar
893
+ bar_start_time = floor_t64(time, self.timeframe)
894
+
895
+ if not self.times:
896
+ self._add_new_item(bar_start_time, Bar(bar_start_time, open, high, low, close, vol_incr, b_vol_incr))
897
+
898
+ # Here we disable first notification because first item may be incomplete
899
+ self._is_new_item = False
900
+
901
+ elif time - self.times[0] >= self.timeframe:
902
+ b = Bar(bar_start_time, open, high, low, close, vol_incr, b_vol_incr)
903
+
904
+ # - add new item
905
+ self._add_new_item(bar_start_time, b)
906
+
907
+ # - update indicators
908
+ self._update_indicators(bar_start_time, b, True)
909
+
910
+ return self._is_new_item
911
+ else:
912
+ l_bar = self[0]
913
+ l_bar.high = max(high, l_bar.high)
914
+ l_bar.low = min(low, l_bar.low)
915
+ l_bar.close = close
916
+ l_bar.volume += vol_incr
917
+ l_bar.bought_volume += b_vol_incr
918
+ self._update_last_item(bar_start_time, l_bar)
919
+
920
+ # # - update indicators by new data
921
+ self._update_indicators(bar_start_time, self[0], False)
922
+
923
+ return self._is_new_item
924
+
925
+ # - TODO: need to check if it's safe to drop value series (series of Bar) to avoid duplicating data
926
+ # def __getitem__(self, idx):
927
+ # if isinstance(idx, slice):
928
+ # return [
929
+ # Bar(self.times[i], self.open[i], self.high[i], self.low[i], self.close[i], self.volume[i])
930
+ # for i in range(*idx.indices(len(self.times)))
931
+ # ]
932
+ # return Bar(self.times[idx], self.open[idx], self.high[idx], self.low[idx], self.close[idx], self.volume[idx])
933
+
934
+ cpdef _update_indicators(self, long long time, value, short new_item_started):
935
+ TimeSeries._update_indicators(self, time, value, new_item_started)
936
+ if new_item_started:
937
+ self.open._update_indicators(time, value.open, new_item_started)
938
+ self.close._update_indicators(time, value.close, new_item_started)
939
+ self.high._update_indicators(time, value.high, new_item_started)
940
+ self.low._update_indicators(time, value.low, new_item_started)
941
+ self.volume._update_indicators(time, value.volume, new_item_started)
942
+
943
+ def to_series(self) -> pd.DataFrame:
944
+ df = pd.DataFrame({
945
+ 'open': self.open.to_series(),
946
+ 'high': self.high.to_series(),
947
+ 'low': self.low.to_series(),
948
+ 'close': self.close.to_series(),
949
+ 'volume': self.volume.to_series(), # total volume
950
+ 'bought_volume': self.bvolume.to_series(), # bought volume
951
+ })
952
+ df.index.name = 'timestamp'
953
+ return df
954
+
955
+ @staticmethod
956
+ def from_dataframe(object df_p, str name="ohlc"):
957
+ if not isinstance(df_p, pd.DataFrame):
958
+ ValueError(f"Input must be a pandas DataFrame, got {type(df_p).__name__}")
959
+
960
+ _ohlc = OHLCV(name, infer_series_frequency(df_p).item())
961
+ for t in df_p.itertuples():
962
+ _ohlc.update_by_bar(
963
+ t.Index.asm8, t.open, t.high, t.low, t.close, getattr(t, "volume", 0.0), getattr(t, "taker_buy_volume", 0.0)
964
+ )
965
+ return _ohlc
966
+
967
+ def to_records(self) -> dict:
968
+ ts = [np.datetime64(t, 'ns') for t in self.times[::-1]]
969
+ bs = [v.to_dict(skip_time=True) for v in self.values[::-1]]
970
+ return dict(zip(ts, bs))
971
+
972
+
973
+ # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
974
+ # - this should be done in separate module -
975
+ def _plot_mpl(series: TimeSeries, *args, **kwargs):
976
+ import matplotlib.pyplot as plt
977
+ include_indicators = kwargs.pop('with_indicators', False)
978
+ no_labels = kwargs.pop('no_labels', False)
979
+
980
+ plt.plot(series.pd(), *args, **kwargs, label=series.name)
981
+ if include_indicators:
982
+ for k, vi in series.get_indicators().items():
983
+ plt.plot(vi.pd(), label=k)
984
+ if not no_labels:
985
+ plt.legend(loc=2)
986
+
987
+ _timeseries_plot_func = _plot_mpl
988
+ # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -