Qubx 0.1.0__cp311-cp311-manylinux_2_35_x86_64.whl → 0.1.4__cp311-cp311-manylinux_2_35_x86_64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of Qubx might be problematic. Click here for more details.

qubx/core/basics.py CHANGED
@@ -338,12 +338,12 @@ class CtrlChannel:
338
338
  name: str
339
339
  lock: Lock
340
340
 
341
- def __init__(self, name: str, sent=(None, None)):
341
+ def __init__(self, name: str, sentinel=(None, None, None)):
342
342
  self.name = name
343
343
  self.control = Event()
344
344
  self.queue = Queue()
345
345
  self.lock = Lock()
346
- self.sent = sent
346
+ self.sent = sentinel
347
347
  self.start()
348
348
 
349
349
  def stop(self):
qubx/core/helpers.py ADDED
@@ -0,0 +1,322 @@
1
+ from collections import defaultdict
2
+ import re, sched, time
3
+ from typing import Any, Callable, Dict, List, Optional, Tuple
4
+ from croniter import croniter
5
+ import numpy as np
6
+ import pandas as pd
7
+ from threading import Thread
8
+
9
+ from qubx import logger
10
+ from qubx.core.basics import CtrlChannel
11
+ from qubx.utils.misc import Stopwatch
12
+ from qubx.utils.time import convert_tf_str_td64, convert_seconds_to_str
13
+ from qubx.core.series import TimeSeries, Trade, Quote, Bar, OHLCV
14
+
15
+
16
+ _SW = Stopwatch()
17
+
18
+ class CachedMarketDataHolder:
19
+ """
20
+ Collected cached data updates from StrategyContext
21
+ """
22
+ default_timeframe: np.timedelta64
23
+ _last_bar: Dict[str, Bar | None]
24
+ _ohlcvs: Dict[str, Dict[np.timedelta64, OHLCV]]
25
+ _updates: Dict[str, Any]
26
+
27
+ def __init__(self, default_timeframe: str) -> None:
28
+ self.default_timeframe = convert_tf_str_td64(default_timeframe)
29
+ self._ohlcvs = dict()
30
+ self._last_bar = defaultdict(lambda: None)
31
+ self._updates = dict()
32
+
33
+ def init_ohlcv(self, symbol: str, max_size=np.inf):
34
+ self._ohlcvs[symbol] = {self.default_timeframe: OHLCV(symbol, self.default_timeframe, max_size)}
35
+
36
+ def is_data_ready(self) -> bool:
37
+ """
38
+ Check if all symbols in this cache have at least one update
39
+ """
40
+ for v in self._ohlcvs.keys():
41
+ if v not in self._updates:
42
+ return False
43
+ return True
44
+
45
+ @_SW.watch('CachedMarketDataHolder')
46
+ def get_ohlcv(self, symbol: str, timeframe: str, max_size=np.inf) -> OHLCV:
47
+ tf = convert_tf_str_td64(timeframe)
48
+
49
+ if symbol not in self._ohlcvs:
50
+ self._ohlcvs[symbol] = {}
51
+
52
+ if tf not in self._ohlcvs[symbol]:
53
+ # - check requested timeframe
54
+ new_ohlc = OHLCV(symbol, tf, max_size)
55
+ if tf < self.default_timeframe:
56
+ logger.warning(f"[{symbol}] Request for timeframe {timeframe} that is smaller then minimal {self.default_timeframe}")
57
+ else:
58
+ # - first try to resample from smaller frame
59
+ if (basis := self._ohlcvs[symbol].get(self.default_timeframe)):
60
+ for b in basis[::-1]:
61
+ new_ohlc.update_by_bar(b.time, b.open, b.high, b.low, b.close, b.volume, b.bought_volume)
62
+
63
+ self._ohlcvs[symbol][tf] = new_ohlc
64
+
65
+ return self._ohlcvs[symbol][tf]
66
+
67
+ @_SW.watch('CachedMarketDataHolder')
68
+ def update_by_bars(self, symbol: str, timeframe: str, bars: List[Bar]) -> OHLCV:
69
+ """
70
+ Substitute or create new series based on provided historical bars
71
+ """
72
+ if symbol not in self._ohlcvs:
73
+ self._ohlcvs[symbol] = {}
74
+
75
+ tf = convert_tf_str_td64(timeframe)
76
+ new_ohlc = OHLCV(symbol, tf)
77
+ for b in bars:
78
+ new_ohlc.update_by_bar(b.time, b.open, b.high, b.low, b.close, b.volume, b.bought_volume)
79
+ self._updates[symbol] = b
80
+
81
+ self._ohlcvs[symbol][tf] = new_ohlc
82
+ return new_ohlc
83
+
84
+ @_SW.watch('CachedMarketDataHolder')
85
+ def update_by_bar(self, symbol: str, bar: Bar):
86
+ self._updates[symbol] = bar
87
+
88
+ _last_bar = self._last_bar[symbol]
89
+ v_tot_inc = bar.volume
90
+ v_buy_inc = bar.bought_volume
91
+
92
+ if _last_bar is not None:
93
+ if _last_bar.time == bar.time: # just current bar updated
94
+ v_tot_inc -= _last_bar.volume
95
+ v_buy_inc -= _last_bar.bought_volume
96
+
97
+ if _last_bar.time > bar.time: # update is too late - skip it
98
+ return
99
+
100
+ if symbol in self._ohlcvs:
101
+ self._last_bar[symbol] = bar
102
+ for ser in self._ohlcvs[symbol].values():
103
+ ser.update_by_bar(bar.time, bar.open, bar.high, bar.low, bar.close, v_tot_inc, v_buy_inc)
104
+
105
+ @_SW.watch('CachedMarketDataHolder')
106
+ def update_by_quote(self, symbol: str, quote: Quote):
107
+ self._updates[symbol] = quote
108
+
109
+ series = self._ohlcvs.get(symbol)
110
+ if series:
111
+ for ser in series.values():
112
+ ser.update(quote.time, quote.mid_price(), 0)
113
+
114
+ @_SW.watch('CachedMarketDataHolder')
115
+ def update_by_trade(self, symbol: str, trade: Trade):
116
+ self._updates[symbol] = trade
117
+ series = self._ohlcvs.get(symbol)
118
+ if series:
119
+ total_vol = trade.size
120
+ bought_vol = total_vol if trade.taker >= 1 else 0.0
121
+ for ser in series.values():
122
+ ser.update(trade.time, trade.price, total_vol, bought_vol)
123
+
124
+
125
+ SPEC_REGEX = re.compile(
126
+ r"((?P<type>[A-Za-z]+)(\.?(?P<timeframe>[0-9A-Za-z]+))?\ *:)?"
127
+ r"\ *"
128
+ r"((?P<spec>"
129
+ r"(?P<time>((\d+:\d+(:\d+)?)\ *,?\ *)+)?"
130
+ r"((\ *@\ *)(?P<by>([A-Za-z0-9-,\ ]+)))?"
131
+ r"(("
132
+ r'((?P<months>[-+]?\d+)(months|month|bm|mo))?'
133
+ r'((?P<weeks>[-+]?\d+)(weeks|week|w))?'
134
+ r'((?P<days>[-+]?\d+)(days|day|d))?'
135
+ r'((?P<hours>[-+]?\d+)(hours|hour|h))?'
136
+ r'((?P<minutes>[-+]?\d+)(mins|min|m))?'
137
+ r'((?P<seconds>[-+]?\d+)(sec|s))?'
138
+ r")(\ *)?)*"
139
+ r".*"
140
+ r"))?", re.IGNORECASE
141
+ )
142
+
143
+
144
+ def _mk_cron(time: str, by: list | None) -> str:
145
+ HMS = lambda s: list(map(int, s.split(':') if s.count(':') == 2 else [*s.split(':'), 0]))
146
+
147
+ h,m,s = HMS(time)
148
+ assert h < 24, f'Wrong value for hour {h}'
149
+ assert m < 60, f'Wrong value for minute {m}'
150
+ assert s < 60, f'Wrong value for seconds {s}'
151
+ b = ','.join(by) if by else '*'
152
+ c = f'{m} {h} * * {b}'
153
+ return c if s == 0 else c + f' {s}'
154
+
155
+
156
+ def _make_shift(_b, _w, _d, _h, _m, _s):
157
+ D0 = pd.Timedelta(0)
158
+ AS_TD = lambda d: pd.Timedelta(d)
159
+ P, N = D0, D0
160
+
161
+ # return AS_TD(f'{_b*4}W') + AS_TD(f'{_w}W') + AS_TD(f'{_d}D') + AS_TD(f'{_h}h') + AS_TD(f'{_m}Min') + AS_TD(f'{_s}Sec')
162
+ for t in [
163
+ AS_TD(f'{_b*4}W'), AS_TD(f'{_w}W'), AS_TD(f'{_d}D'),
164
+ AS_TD(f'{_h}h'), AS_TD(f'{_m}Min'), AS_TD(f'{_s}Sec')]:
165
+ if t > D0:
166
+ P += t
167
+ else:
168
+ N += t
169
+ return P, N
170
+
171
+
172
+ def _parse_schedule_spec(schedule: str) -> Dict[str, str]:
173
+ m = SPEC_REGEX.match(schedule)
174
+ return {k: v for k, v in m.groupdict().items() if v} if m else {}
175
+
176
+
177
+ def process_schedule_spec(spec_str: str | None) -> Dict[str, Any]:
178
+ AS_INT = lambda d, k: int(d.get(k, 0))
179
+ S = lambda s: [x for x in re.split(r"[, ]", s) if x]
180
+ config = {}
181
+
182
+ if not spec_str:
183
+ return config
184
+
185
+ # - parse schedule spec
186
+ spec = _parse_schedule_spec(spec_str)
187
+
188
+ # - check how to run it
189
+ _T, _S = spec.get('type'), spec.get('spec')
190
+ _F = spec.get('timeframe')
191
+ _t, _by = S(spec.get('time', '')), S(spec.get('by', ''))
192
+ _b, _w, _d = AS_INT(spec, 'months'), AS_INT(spec, 'weeks'), AS_INT(spec, 'days')
193
+ _h, _m, _s = AS_INT(spec, 'hours'), AS_INT(spec, 'minutes'), AS_INT(spec, 'seconds')
194
+ _has_intervals = (_b != 0) or (_w != 0) or (_d != 0) or (_h != 0) or (_m != 0) or (_s != 0)
195
+ _s_pos, _s_neg = _make_shift(_b, _w, _d, _h, _m, _s)
196
+ _shift = _s_pos + _s_neg
197
+
198
+ match _T:
199
+ case 'cron':
200
+ if not _S or croniter.is_valid(_S):
201
+ config = dict(type='cron', schedule=_S, spec=_S)
202
+ else:
203
+ raise ValueError(f"Wrong specification for cron type: {_S}")
204
+
205
+ case 'time':
206
+ for t in _t:
207
+ config = dict(type='cron', schedule=_mk_cron(t, _by), spec=_S)
208
+
209
+ case None:
210
+ if _t: # - if time specified
211
+ for t in _t:
212
+ config = dict(type='cron', schedule=_mk_cron(t, _by), spec=_S)
213
+ else:
214
+ # - check if it's valid cron
215
+ if _S:
216
+ if croniter.is_valid(_S):
217
+ config = dict(type='cron', schedule=_S, spec=_S)
218
+ else:
219
+ if _has_intervals:
220
+ _F = convert_seconds_to_str(int(_s_pos.as_unit('s').to_timedelta64().item().total_seconds())) if not _F else _F
221
+ config = dict(type='bar', schedule=None, timeframe=_F, delay=_s_neg, spec=_S)
222
+ case _:
223
+ config = dict(type=_T, schedule=None, timeframe=_F, delay=_shift, spec=_S)
224
+
225
+ return config
226
+
227
+
228
+ _SEC2TS = lambda t: pd.Timestamp(t, unit='s')
229
+
230
+ class BasicScheduler:
231
+ """
232
+ Basic scheduler functionality. It helps to create scheduled event task
233
+ """
234
+ _chan: CtrlChannel
235
+ _scdlr: sched.scheduler
236
+ _ns_time_fun: Callable[[], float]
237
+ _crons: Dict[str, croniter]
238
+ _is_started: bool
239
+
240
+ def __init__(self, channel: CtrlChannel, time_provider_ns: Callable[[], float]):
241
+ self._chan = channel
242
+ self._ns_time_fun = time_provider_ns
243
+ self._scdlr = sched.scheduler(self.time_sec)
244
+ self._crons = dict()
245
+ self._is_started = False
246
+
247
+ def time_sec(self) -> float:
248
+ return self._ns_time_fun() / 1000000000.0
249
+
250
+ def schedule_event(self, cron_schedule: str, event_name: str):
251
+ if not croniter.is_valid(cron_schedule):
252
+ raise ValueError(f"Specified schedule {cron_schedule} for {event_name} doesn't have valid cron format !")
253
+ self._crons[event_name] = croniter(cron_schedule, self.time_sec())
254
+
255
+ if self._is_started:
256
+ self._arm_schedule(event_name, self.time_sec())
257
+
258
+ def get_event_last_time(self, event_name: str) -> pd.Timestamp | None:
259
+ if event_name in self._crons:
260
+ _iter = self._crons[event_name]
261
+ _c = _iter.get_current()
262
+ _t = pd.Timestamp(_iter.get_prev(), unit='s')
263
+ _iter.set_current(_c, force=True)
264
+ return _t
265
+ return None
266
+
267
+ def get_event_next_time(self, event_name: str) -> pd.Timestamp | None:
268
+ if event_name in self._crons:
269
+ _iter = self._crons[event_name]
270
+ _t = pd.Timestamp(_iter.get_next(start_time=self.time_sec()), unit='s')
271
+ return _t
272
+ return None
273
+
274
+ def _arm_schedule(self, event: str, start_time: float) -> bool:
275
+ iter = self._crons[event]
276
+ prev_time = iter.get_prev()
277
+ next_time = iter.get_next(start_time=start_time)
278
+ if next_time:
279
+ self._scdlr.enterabs(
280
+ next_time, 1, self._trigger, (event, prev_time, next_time)
281
+ )
282
+ logger.debug(f"Next ({event}) event scheduled at <red>{_SEC2TS(next_time)}</red>")
283
+ return True
284
+ logger.debug(f"({event}) task is not scheduled")
285
+ return False
286
+
287
+ def _trigger(self, event: str, prev_time_sec: float, trig_time: float):
288
+ now = self.time_sec()
289
+
290
+ # - send notification to channel
291
+ if self._chan.control.is_set():
292
+ self._chan.queue.put((None, event, (prev_time_sec, trig_time)))
293
+
294
+ # - try to arm this event again
295
+ self._arm_schedule(event, now)
296
+
297
+ def check_and_run_tasks(self) -> float | None:
298
+ return self._scdlr.run(blocking=False)
299
+
300
+ def run(self):
301
+ if self._is_started:
302
+ logger.warning("Scheduler is already running")
303
+ return
304
+
305
+ _has_tasks = False
306
+ for k in self._crons.keys():
307
+ _has_tasks |= self._arm_schedule(k, self.time_sec())
308
+
309
+ def _watcher():
310
+ while (r := self.check_and_run_tasks()):
311
+ if not self._chan.control.is_set():
312
+ break
313
+ _delay = max(min(r/5, 5), 0.1)
314
+ time.sleep(_delay)
315
+ logger.debug("Scheduler is stopped ")
316
+ self._is_started = False
317
+
318
+ if _has_tasks:
319
+ Thread(target=_watcher).start()
320
+ self._is_started = True
321
+
322
+
qubx/core/loggers.py CHANGED
@@ -94,7 +94,7 @@ class CsvFileLogsWriter(LogsWriter):
94
94
 
95
95
  def write_data(self, log_type: str, data: List[Dict[str, Any]]):
96
96
  if len(data) > 0:
97
- self.pool.apply(self._do_write, (log_type, data))
97
+ self.pool.apply_async(self._do_write, (log_type, data))
98
98
 
99
99
  def flush_data(self):
100
100
  try:
@@ -261,7 +261,7 @@ class BalanceLogger(_BaseIntervalDumper):
261
261
  'total': d[0],
262
262
  'locked': d[1],
263
263
  })
264
- self._writer.write_data('balance', data)
264
+ self._writer.write_data('balance', data)
265
265
 
266
266
  def store(self, timestamp: np.datetime64):
267
267
  pass
@@ -340,4 +340,4 @@ class StrategyLogging:
340
340
 
341
341
  def save_deals(self, symbol: str, deals: List[Deal]):
342
342
  if self.executions_logger:
343
- self.executions_logger.record_deals(symbol, deals)
343
+ self.executions_logger.record_deals(symbol, deals)