Qubx 0.5.7__cp312-cp312-manylinux_2_39_x86_64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of Qubx might be problematic. Click here for more details.
- qubx/__init__.py +207 -0
- qubx/_nb_magic.py +100 -0
- qubx/backtester/__init__.py +5 -0
- qubx/backtester/account.py +145 -0
- qubx/backtester/broker.py +87 -0
- qubx/backtester/data.py +296 -0
- qubx/backtester/management.py +378 -0
- qubx/backtester/ome.py +296 -0
- qubx/backtester/optimization.py +201 -0
- qubx/backtester/simulated_data.py +558 -0
- qubx/backtester/simulator.py +362 -0
- qubx/backtester/utils.py +780 -0
- qubx/cli/__init__.py +0 -0
- qubx/cli/commands.py +67 -0
- qubx/connectors/ccxt/__init__.py +0 -0
- qubx/connectors/ccxt/account.py +495 -0
- qubx/connectors/ccxt/broker.py +132 -0
- qubx/connectors/ccxt/customizations.py +193 -0
- qubx/connectors/ccxt/data.py +612 -0
- qubx/connectors/ccxt/exceptions.py +17 -0
- qubx/connectors/ccxt/factory.py +93 -0
- qubx/connectors/ccxt/utils.py +307 -0
- qubx/core/__init__.py +0 -0
- qubx/core/account.py +251 -0
- qubx/core/basics.py +850 -0
- qubx/core/context.py +420 -0
- qubx/core/exceptions.py +38 -0
- qubx/core/helpers.py +480 -0
- qubx/core/interfaces.py +1150 -0
- qubx/core/loggers.py +514 -0
- qubx/core/lookups.py +475 -0
- qubx/core/metrics.py +1512 -0
- qubx/core/mixins/__init__.py +13 -0
- qubx/core/mixins/market.py +94 -0
- qubx/core/mixins/processing.py +428 -0
- qubx/core/mixins/subscription.py +203 -0
- qubx/core/mixins/trading.py +88 -0
- qubx/core/mixins/universe.py +270 -0
- qubx/core/series.cpython-312-x86_64-linux-gnu.so +0 -0
- qubx/core/series.pxd +125 -0
- qubx/core/series.pyi +118 -0
- qubx/core/series.pyx +988 -0
- qubx/core/utils.cpython-312-x86_64-linux-gnu.so +0 -0
- qubx/core/utils.pyi +6 -0
- qubx/core/utils.pyx +62 -0
- qubx/data/__init__.py +25 -0
- qubx/data/helpers.py +416 -0
- qubx/data/readers.py +1562 -0
- qubx/data/tardis.py +100 -0
- qubx/gathering/simplest.py +88 -0
- qubx/math/__init__.py +3 -0
- qubx/math/stats.py +129 -0
- qubx/pandaz/__init__.py +23 -0
- qubx/pandaz/ta.py +2757 -0
- qubx/pandaz/utils.py +638 -0
- qubx/resources/instruments/symbols-binance.cm.json +1 -0
- qubx/resources/instruments/symbols-binance.json +1 -0
- qubx/resources/instruments/symbols-binance.um.json +1 -0
- qubx/resources/instruments/symbols-bitfinex.f.json +1 -0
- qubx/resources/instruments/symbols-bitfinex.json +1 -0
- qubx/resources/instruments/symbols-kraken.f.json +1 -0
- qubx/resources/instruments/symbols-kraken.json +1 -0
- qubx/ta/__init__.py +0 -0
- qubx/ta/indicators.cpython-312-x86_64-linux-gnu.so +0 -0
- qubx/ta/indicators.pxd +149 -0
- qubx/ta/indicators.pyi +41 -0
- qubx/ta/indicators.pyx +787 -0
- qubx/trackers/__init__.py +3 -0
- qubx/trackers/abvanced.py +236 -0
- qubx/trackers/composite.py +146 -0
- qubx/trackers/rebalancers.py +129 -0
- qubx/trackers/riskctrl.py +641 -0
- qubx/trackers/sizers.py +235 -0
- qubx/utils/__init__.py +5 -0
- qubx/utils/_pyxreloader.py +281 -0
- qubx/utils/charting/lookinglass.py +1057 -0
- qubx/utils/charting/mpl_helpers.py +1183 -0
- qubx/utils/marketdata/binance.py +284 -0
- qubx/utils/marketdata/ccxt.py +90 -0
- qubx/utils/marketdata/dukas.py +130 -0
- qubx/utils/misc.py +541 -0
- qubx/utils/ntp.py +63 -0
- qubx/utils/numbers_utils.py +7 -0
- qubx/utils/orderbook.py +491 -0
- qubx/utils/plotting/__init__.py +0 -0
- qubx/utils/plotting/dashboard.py +150 -0
- qubx/utils/plotting/data.py +137 -0
- qubx/utils/plotting/interfaces.py +25 -0
- qubx/utils/plotting/renderers/__init__.py +0 -0
- qubx/utils/plotting/renderers/plotly.py +0 -0
- qubx/utils/runner/__init__.py +1 -0
- qubx/utils/runner/_jupyter_runner.pyt +60 -0
- qubx/utils/runner/accounts.py +88 -0
- qubx/utils/runner/configs.py +65 -0
- qubx/utils/runner/runner.py +470 -0
- qubx/utils/time.py +312 -0
- qubx-0.5.7.dist-info/METADATA +105 -0
- qubx-0.5.7.dist-info/RECORD +100 -0
- qubx-0.5.7.dist-info/WHEEL +4 -0
- qubx-0.5.7.dist-info/entry_points.txt +3 -0
qubx/core/loggers.py
ADDED
|
@@ -0,0 +1,514 @@
|
|
|
1
|
+
import csv
|
|
2
|
+
import os
|
|
3
|
+
from multiprocessing.pool import ThreadPool
|
|
4
|
+
from typing import Any, Dict, List, Tuple
|
|
5
|
+
|
|
6
|
+
import numpy as np
|
|
7
|
+
import pandas as pd
|
|
8
|
+
|
|
9
|
+
from qubx import logger
|
|
10
|
+
from qubx.core.basics import (
|
|
11
|
+
AssetBalance,
|
|
12
|
+
Deal,
|
|
13
|
+
Instrument,
|
|
14
|
+
Position,
|
|
15
|
+
TargetPosition,
|
|
16
|
+
)
|
|
17
|
+
from qubx.core.metrics import split_cumulative_pnl
|
|
18
|
+
from qubx.core.series import time_as_nsec
|
|
19
|
+
from qubx.core.utils import recognize_timeframe
|
|
20
|
+
from qubx.pandaz.utils import scols
|
|
21
|
+
from qubx.utils.misc import Stopwatch, makedirs
|
|
22
|
+
from qubx.utils.time import convert_tf_str_td64, floor_t64
|
|
23
|
+
|
|
24
|
+
_SW = Stopwatch()
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class LogsWriter:
|
|
28
|
+
account_id: str
|
|
29
|
+
strategy_id: str
|
|
30
|
+
run_id: str
|
|
31
|
+
|
|
32
|
+
"""
|
|
33
|
+
Log writer interface with default implementation
|
|
34
|
+
"""
|
|
35
|
+
|
|
36
|
+
def __init__(self, account_id: str, strategy_id: str, run_id: str) -> None:
|
|
37
|
+
self.account_id = account_id
|
|
38
|
+
self.strategy_id = strategy_id
|
|
39
|
+
self.run_id = run_id
|
|
40
|
+
|
|
41
|
+
def write_data(self, log_type: str, data: List[Dict[str, Any]]):
|
|
42
|
+
pass
|
|
43
|
+
|
|
44
|
+
def flush_data(self):
|
|
45
|
+
pass
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
class InMemoryLogsWriter(LogsWriter):
|
|
49
|
+
_portfolio: List
|
|
50
|
+
_execs: List
|
|
51
|
+
_signals: List
|
|
52
|
+
|
|
53
|
+
def __init__(self, account_id: str, strategy_id: str, run_id: str) -> None:
|
|
54
|
+
super().__init__(account_id, strategy_id, run_id)
|
|
55
|
+
self._portfolio = []
|
|
56
|
+
self._execs = []
|
|
57
|
+
self._signals = []
|
|
58
|
+
|
|
59
|
+
def write_data(self, log_type: str, data: List[Dict[str, Any]]):
|
|
60
|
+
if len(data) > 0:
|
|
61
|
+
if log_type == "portfolio":
|
|
62
|
+
self._portfolio.extend(data)
|
|
63
|
+
elif log_type == "executions":
|
|
64
|
+
self._execs.extend(data)
|
|
65
|
+
elif log_type == "signals":
|
|
66
|
+
self._signals.extend(data)
|
|
67
|
+
|
|
68
|
+
def get_portfolio(self, as_plain_dataframe=True) -> pd.DataFrame:
|
|
69
|
+
pfl = pd.DataFrame.from_records(self._portfolio, index="timestamp")
|
|
70
|
+
pfl.index = pd.DatetimeIndex(pfl.index)
|
|
71
|
+
if as_plain_dataframe:
|
|
72
|
+
# - convert to Qube presentation (TODO: temporary)
|
|
73
|
+
pis = []
|
|
74
|
+
for s in set(pfl["instrument_id"]):
|
|
75
|
+
pi = pfl[pfl["instrument_id"] == s]
|
|
76
|
+
pi = pi.drop(columns=["instrument_id", "realized_pnl_quoted", "current_price", "exchange_time"])
|
|
77
|
+
pi = pi.rename(
|
|
78
|
+
{
|
|
79
|
+
"pnl_quoted": "PnL",
|
|
80
|
+
"quantity": "Pos",
|
|
81
|
+
"avg_position_price": "Price",
|
|
82
|
+
"market_value_quoted": "Value",
|
|
83
|
+
"commissions_quoted": "Commissions",
|
|
84
|
+
},
|
|
85
|
+
axis=1,
|
|
86
|
+
)
|
|
87
|
+
pis.append(pi.rename(lambda x: s + "_" + x, axis=1))
|
|
88
|
+
return split_cumulative_pnl(scols(*pis))
|
|
89
|
+
return pfl
|
|
90
|
+
|
|
91
|
+
def get_executions(self) -> pd.DataFrame:
|
|
92
|
+
p = pd.DataFrame()
|
|
93
|
+
if self._execs:
|
|
94
|
+
p = pd.DataFrame.from_records(self._execs, index="timestamp")
|
|
95
|
+
p.index = pd.DatetimeIndex(p.index)
|
|
96
|
+
return p
|
|
97
|
+
|
|
98
|
+
def get_signals(self) -> pd.DataFrame:
|
|
99
|
+
p = pd.DataFrame()
|
|
100
|
+
if self._signals:
|
|
101
|
+
p = pd.DataFrame.from_records(self._signals, index="timestamp")
|
|
102
|
+
p.index = pd.DatetimeIndex(p.index)
|
|
103
|
+
return p
|
|
104
|
+
|
|
105
|
+
|
|
106
|
+
class CsvFileLogsWriter(LogsWriter):
|
|
107
|
+
"""
|
|
108
|
+
Simple CSV strategy log data writer. It does data writing in separate thread.
|
|
109
|
+
"""
|
|
110
|
+
|
|
111
|
+
def __init__(self, account_id: str, strategy_id: str, run_id: str, log_folder="logs") -> None:
|
|
112
|
+
super().__init__(account_id, strategy_id, run_id)
|
|
113
|
+
|
|
114
|
+
path = makedirs(log_folder)
|
|
115
|
+
# - it rewrites positions every time
|
|
116
|
+
self._pos_file_path = f"{path}/{self.strategy_id}_{self.account_id}_positions.csv"
|
|
117
|
+
self._balance_file_path = f"{path}/{self.strategy_id}_{self.account_id}_balance.csv"
|
|
118
|
+
_pfl_path = f"{path}/{strategy_id}_{account_id}_portfolio.csv"
|
|
119
|
+
_exe_path = f"{path}/{strategy_id}_{account_id}_executions.csv"
|
|
120
|
+
self._hdr_pfl = not os.path.exists(_pfl_path)
|
|
121
|
+
self._hdr_exe = not os.path.exists(_exe_path)
|
|
122
|
+
|
|
123
|
+
self._pfl_file_ = open(_pfl_path, "+a", newline="")
|
|
124
|
+
self._execs_file_ = open(_exe_path, "+a", newline="")
|
|
125
|
+
self._pfl_writer = csv.writer(self._pfl_file_)
|
|
126
|
+
self._exe_writer = csv.writer(self._execs_file_)
|
|
127
|
+
self.pool = ThreadPool(3)
|
|
128
|
+
|
|
129
|
+
@staticmethod
|
|
130
|
+
def _header(d: dict) -> List[str]:
|
|
131
|
+
return list(d.keys()) + ["run_id"]
|
|
132
|
+
|
|
133
|
+
def _values(self, data: List[Dict[str, Any]]) -> List[List[str]]:
|
|
134
|
+
# - attach run_id (last column)
|
|
135
|
+
return [list((d | {"run_id": self.run_id}).values()) for d in data]
|
|
136
|
+
|
|
137
|
+
def _do_write(self, log_type, data):
|
|
138
|
+
match log_type:
|
|
139
|
+
case "positions":
|
|
140
|
+
with open(self._pos_file_path, "w", newline="") as f:
|
|
141
|
+
w = csv.writer(f)
|
|
142
|
+
w.writerow(self._header(data[0]))
|
|
143
|
+
w.writerows(self._values(data))
|
|
144
|
+
|
|
145
|
+
case "portfolio":
|
|
146
|
+
if self._hdr_pfl:
|
|
147
|
+
self._pfl_writer.writerow(self._header(data[0]))
|
|
148
|
+
self._hdr_pfl = False
|
|
149
|
+
self._pfl_writer.writerows(self._values(data))
|
|
150
|
+
self._pfl_file_.flush()
|
|
151
|
+
|
|
152
|
+
case "executions":
|
|
153
|
+
if self._hdr_exe:
|
|
154
|
+
self._exe_writer.writerow(self._header(data[0]))
|
|
155
|
+
self._hdr_exe = False
|
|
156
|
+
self._exe_writer.writerows(self._values(data))
|
|
157
|
+
self._execs_file_.flush()
|
|
158
|
+
|
|
159
|
+
case "balance":
|
|
160
|
+
with open(self._balance_file_path, "w", newline="") as f:
|
|
161
|
+
w = csv.writer(f)
|
|
162
|
+
w.writerow(self._header(data[0]))
|
|
163
|
+
w.writerows(self._values(data))
|
|
164
|
+
|
|
165
|
+
def write_data(self, log_type: str, data: List[Dict[str, Any]]):
|
|
166
|
+
if len(data) > 0:
|
|
167
|
+
self.pool.apply_async(self._do_write, (log_type, data))
|
|
168
|
+
|
|
169
|
+
def flush_data(self):
|
|
170
|
+
try:
|
|
171
|
+
self._pfl_file_.flush()
|
|
172
|
+
self._execs_file_.flush()
|
|
173
|
+
except Exception as e:
|
|
174
|
+
logger.warning(f"Error flushing log writer: {str(e)}")
|
|
175
|
+
|
|
176
|
+
|
|
177
|
+
class _BaseIntervalDumper:
|
|
178
|
+
"""
|
|
179
|
+
Basic functionality for all interval based dumpers
|
|
180
|
+
"""
|
|
181
|
+
|
|
182
|
+
_last_log_time_ns: int
|
|
183
|
+
_freq: np.timedelta64 | None
|
|
184
|
+
|
|
185
|
+
def __init__(self, frequency: str | None) -> None:
|
|
186
|
+
self._freq: np.timedelta64 | None = recognize_timeframe(frequency) if frequency else None
|
|
187
|
+
self._last_log_time_ns = 0
|
|
188
|
+
|
|
189
|
+
def store(self, timestamp: np.datetime64):
|
|
190
|
+
_t_ns = time_as_nsec(timestamp)
|
|
191
|
+
if self._freq:
|
|
192
|
+
_interval_start_time = int(_t_ns - _t_ns % self._freq)
|
|
193
|
+
if _t_ns - self._last_log_time_ns >= self._freq:
|
|
194
|
+
self.dump(np.datetime64(_interval_start_time, "ns"), timestamp)
|
|
195
|
+
self._last_log_time_ns = _interval_start_time
|
|
196
|
+
else:
|
|
197
|
+
self.dump(timestamp, timestamp)
|
|
198
|
+
|
|
199
|
+
def dump(self, interval_start_time: np.datetime64, actual_timestamp: np.datetime64):
|
|
200
|
+
raise NotImplementedError(
|
|
201
|
+
f"dump(np.datetime64, np.datetime64) must be implemented in {self.__class__.__name__}"
|
|
202
|
+
)
|
|
203
|
+
|
|
204
|
+
|
|
205
|
+
class PositionsDumper(_BaseIntervalDumper):
|
|
206
|
+
"""
|
|
207
|
+
Positions dumper is designed to dump positions once per given interval to storage
|
|
208
|
+
so we could check current situation.
|
|
209
|
+
"""
|
|
210
|
+
|
|
211
|
+
positions: Dict[Instrument, Position]
|
|
212
|
+
_writer: LogsWriter
|
|
213
|
+
|
|
214
|
+
def __init__(
|
|
215
|
+
self,
|
|
216
|
+
writer: LogsWriter,
|
|
217
|
+
interval: str,
|
|
218
|
+
) -> None:
|
|
219
|
+
super().__init__(interval)
|
|
220
|
+
self.positions = dict()
|
|
221
|
+
self._writer = writer
|
|
222
|
+
|
|
223
|
+
def attach_positions(self, *positions: Position) -> "PositionsDumper":
|
|
224
|
+
for p in positions:
|
|
225
|
+
self.positions[p.instrument] = p
|
|
226
|
+
return self
|
|
227
|
+
|
|
228
|
+
def dump(self, interval_start_time: np.datetime64, actual_timestamp: np.datetime64):
|
|
229
|
+
data = []
|
|
230
|
+
for i, p in self.positions.items():
|
|
231
|
+
data.append(
|
|
232
|
+
{
|
|
233
|
+
"timestamp": str(actual_timestamp),
|
|
234
|
+
"instrument_id": i.symbol,
|
|
235
|
+
"pnl_quoted": p.total_pnl(),
|
|
236
|
+
"quantity": p.quantity,
|
|
237
|
+
"notional": p.notional_value,
|
|
238
|
+
"realized_pnl_quoted": p.r_pnl,
|
|
239
|
+
"avg_position_price": p.position_avg_price if p.quantity != 0.0 else 0.0,
|
|
240
|
+
"current_price": p.last_update_price,
|
|
241
|
+
"market_value_quoted": p.market_value_funds,
|
|
242
|
+
}
|
|
243
|
+
)
|
|
244
|
+
self._writer.write_data("positions", data)
|
|
245
|
+
|
|
246
|
+
|
|
247
|
+
class PortfolioLogger(PositionsDumper):
|
|
248
|
+
"""
|
|
249
|
+
Portfolio logger - save portfolio records into storage
|
|
250
|
+
"""
|
|
251
|
+
|
|
252
|
+
def __init__(self, writer: LogsWriter, interval: str) -> None:
|
|
253
|
+
super().__init__(writer, interval)
|
|
254
|
+
|
|
255
|
+
def dump(self, interval_start_time: np.datetime64, actual_timestamp: np.datetime64):
|
|
256
|
+
data = []
|
|
257
|
+
for i, p in self.positions.items():
|
|
258
|
+
data.append(
|
|
259
|
+
{
|
|
260
|
+
"timestamp": str(interval_start_time),
|
|
261
|
+
"instrument_id": i.symbol,
|
|
262
|
+
"pnl_quoted": p.total_pnl(),
|
|
263
|
+
"quantity": p.quantity,
|
|
264
|
+
"realized_pnl_quoted": p.r_pnl,
|
|
265
|
+
"avg_position_price": p.position_avg_price if p.quantity != 0.0 else 0.0,
|
|
266
|
+
"current_price": p.last_update_price,
|
|
267
|
+
"market_value_quoted": p.market_value_funds,
|
|
268
|
+
"exchange_time": str(actual_timestamp),
|
|
269
|
+
"commissions_quoted": p.commissions,
|
|
270
|
+
}
|
|
271
|
+
)
|
|
272
|
+
self._writer.write_data("portfolio", data)
|
|
273
|
+
|
|
274
|
+
def close(self):
|
|
275
|
+
self._writer.flush_data()
|
|
276
|
+
|
|
277
|
+
|
|
278
|
+
class ExecutionsLogger(_BaseIntervalDumper):
|
|
279
|
+
"""
|
|
280
|
+
Executions logger - save strategy executions into storage
|
|
281
|
+
"""
|
|
282
|
+
|
|
283
|
+
_writer: LogsWriter
|
|
284
|
+
_deals: List[Tuple[Instrument, Deal]]
|
|
285
|
+
|
|
286
|
+
def __init__(self, writer: LogsWriter, max_records=10) -> None:
|
|
287
|
+
super().__init__(None) # no intervals
|
|
288
|
+
self._writer = writer
|
|
289
|
+
self._max_records = max_records
|
|
290
|
+
self._deals: List[Tuple[Instrument, Deal]] = []
|
|
291
|
+
|
|
292
|
+
def record_deals(self, instrument: Instrument, deals: List[Deal]):
|
|
293
|
+
for d in deals:
|
|
294
|
+
self._deals.append((instrument, d))
|
|
295
|
+
l_time = d.time
|
|
296
|
+
|
|
297
|
+
if len(self._deals) >= self._max_records:
|
|
298
|
+
self.dump(l_time, l_time)
|
|
299
|
+
|
|
300
|
+
def dump(self, interval_start_time: np.datetime64, actual_timestamp: np.datetime64):
|
|
301
|
+
data = []
|
|
302
|
+
for i, d in self._deals:
|
|
303
|
+
data.append(
|
|
304
|
+
{
|
|
305
|
+
"timestamp": d.time,
|
|
306
|
+
"instrument_id": i.symbol,
|
|
307
|
+
"exchange_id": i.exchange,
|
|
308
|
+
"side": "buy" if d.amount > 0 else "sell",
|
|
309
|
+
"filled_qty": d.amount,
|
|
310
|
+
"price": d.price,
|
|
311
|
+
"commissions": d.fee_amount,
|
|
312
|
+
"commissions_quoted": d.fee_currency,
|
|
313
|
+
"order_id": d.order_id,
|
|
314
|
+
}
|
|
315
|
+
)
|
|
316
|
+
self._deals.clear()
|
|
317
|
+
self._writer.write_data("executions", data)
|
|
318
|
+
|
|
319
|
+
def store(self, timestamp: np.datetime64):
|
|
320
|
+
pass
|
|
321
|
+
|
|
322
|
+
def close(self):
|
|
323
|
+
if self._deals:
|
|
324
|
+
t = self._deals[-1][1].time
|
|
325
|
+
self.dump(t, t)
|
|
326
|
+
self._writer.flush_data()
|
|
327
|
+
|
|
328
|
+
|
|
329
|
+
class SignalsLogger(_BaseIntervalDumper):
|
|
330
|
+
"""
|
|
331
|
+
Signals logger - save signals generated by strategy
|
|
332
|
+
"""
|
|
333
|
+
|
|
334
|
+
_writer: LogsWriter
|
|
335
|
+
_targets: List[TargetPosition]
|
|
336
|
+
|
|
337
|
+
def __init__(self, writer: LogsWriter, max_records=10) -> None:
|
|
338
|
+
super().__init__(None)
|
|
339
|
+
self._writer = writer
|
|
340
|
+
self._max_records = max_records
|
|
341
|
+
self._targets = []
|
|
342
|
+
|
|
343
|
+
def record_signals(self, signals: List[TargetPosition]):
|
|
344
|
+
self._targets.extend(signals)
|
|
345
|
+
|
|
346
|
+
if len(self._targets) >= self._max_records:
|
|
347
|
+
self.dump(None, None)
|
|
348
|
+
|
|
349
|
+
def dump(self, interval_start_time: np.datetime64 | None, actual_timestamp: np.datetime64 | None):
|
|
350
|
+
data = []
|
|
351
|
+
for s in self._targets:
|
|
352
|
+
data.append(
|
|
353
|
+
{
|
|
354
|
+
"timestamp": s.time,
|
|
355
|
+
"instrument_id": s.instrument.symbol,
|
|
356
|
+
"exchange_id": s.instrument.exchange,
|
|
357
|
+
"signal": s.signal.signal,
|
|
358
|
+
"target_position": s.target_position_size,
|
|
359
|
+
"reference_price": s.signal.reference_price,
|
|
360
|
+
"price": s.price,
|
|
361
|
+
"take": s.take,
|
|
362
|
+
"stop": s.stop,
|
|
363
|
+
"group": s.signal.group,
|
|
364
|
+
"comment": s.signal.comment,
|
|
365
|
+
"service": s.is_service,
|
|
366
|
+
}
|
|
367
|
+
)
|
|
368
|
+
self._targets.clear()
|
|
369
|
+
self._writer.write_data("signals", data)
|
|
370
|
+
|
|
371
|
+
def store(self, timestamp: np.datetime64):
|
|
372
|
+
pass
|
|
373
|
+
|
|
374
|
+
def close(self):
|
|
375
|
+
if self._targets:
|
|
376
|
+
self.dump(None, None)
|
|
377
|
+
self._writer.flush_data()
|
|
378
|
+
|
|
379
|
+
|
|
380
|
+
class BalanceLogger(_BaseIntervalDumper):
|
|
381
|
+
"""
|
|
382
|
+
Balance logger - send balance on strategy start
|
|
383
|
+
"""
|
|
384
|
+
|
|
385
|
+
_writer: LogsWriter
|
|
386
|
+
|
|
387
|
+
def __init__(self, writer: LogsWriter) -> None:
|
|
388
|
+
super().__init__(None) # no intervals
|
|
389
|
+
self._writer = writer
|
|
390
|
+
|
|
391
|
+
def record_balance(self, timestamp: np.datetime64, balance: Dict[str, AssetBalance]):
|
|
392
|
+
if balance:
|
|
393
|
+
data = []
|
|
394
|
+
for s, d in balance.items():
|
|
395
|
+
data.append(
|
|
396
|
+
{
|
|
397
|
+
"timestamp": timestamp,
|
|
398
|
+
"instrument_id": s,
|
|
399
|
+
"total": d.total,
|
|
400
|
+
"locked": d.locked,
|
|
401
|
+
}
|
|
402
|
+
)
|
|
403
|
+
self._writer.write_data("balance", data)
|
|
404
|
+
|
|
405
|
+
def store(self, timestamp: np.datetime64):
|
|
406
|
+
pass
|
|
407
|
+
|
|
408
|
+
def close(self):
|
|
409
|
+
self._writer.flush_data()
|
|
410
|
+
|
|
411
|
+
|
|
412
|
+
class StrategyLogging:
|
|
413
|
+
"""
|
|
414
|
+
Just combined loggers functionality
|
|
415
|
+
"""
|
|
416
|
+
|
|
417
|
+
positions_dumper: PositionsDumper | None = None
|
|
418
|
+
portfolio_logger: PortfolioLogger | None = None
|
|
419
|
+
executions_logger: ExecutionsLogger | None = None
|
|
420
|
+
balance_logger: BalanceLogger | None = None
|
|
421
|
+
signals_logger: SignalsLogger | None = None
|
|
422
|
+
heartbeat_freq: np.timedelta64 | None = None
|
|
423
|
+
|
|
424
|
+
_last_heartbeat_ts: np.datetime64 | None = None
|
|
425
|
+
|
|
426
|
+
def __init__(
|
|
427
|
+
self,
|
|
428
|
+
logs_writer: LogsWriter | None = None,
|
|
429
|
+
positions_log_freq: str = "1Min",
|
|
430
|
+
portfolio_log_freq: str = "5Min",
|
|
431
|
+
num_exec_records_to_write=1, # in live let's write every execution
|
|
432
|
+
num_signals_records_to_write=1,
|
|
433
|
+
heartbeat_freq: str | None = None,
|
|
434
|
+
) -> None:
|
|
435
|
+
# - instantiate loggers
|
|
436
|
+
if logs_writer:
|
|
437
|
+
if positions_log_freq:
|
|
438
|
+
# - store current positions
|
|
439
|
+
self.positions_dumper = PositionsDumper(logs_writer, positions_log_freq)
|
|
440
|
+
|
|
441
|
+
if portfolio_log_freq:
|
|
442
|
+
# - store portfolio log
|
|
443
|
+
self.portfolio_logger = PortfolioLogger(logs_writer, portfolio_log_freq)
|
|
444
|
+
|
|
445
|
+
# - store executions
|
|
446
|
+
if num_exec_records_to_write >= 1:
|
|
447
|
+
self.executions_logger = ExecutionsLogger(logs_writer, num_exec_records_to_write)
|
|
448
|
+
|
|
449
|
+
# - store signals
|
|
450
|
+
if num_signals_records_to_write >= 1:
|
|
451
|
+
self.signals_logger = SignalsLogger(logs_writer, num_signals_records_to_write)
|
|
452
|
+
|
|
453
|
+
# - balance logger
|
|
454
|
+
self.balance_logger = BalanceLogger(logs_writer)
|
|
455
|
+
else:
|
|
456
|
+
logger.warning("Log writer is not defined - strategy activity will not be saved !")
|
|
457
|
+
|
|
458
|
+
self.heartbeat_freq = convert_tf_str_td64(heartbeat_freq) if heartbeat_freq else None
|
|
459
|
+
|
|
460
|
+
def initialize(
|
|
461
|
+
self,
|
|
462
|
+
timestamp: np.datetime64,
|
|
463
|
+
positions: dict[Instrument, Position],
|
|
464
|
+
balances: dict[str, AssetBalance],
|
|
465
|
+
) -> None:
|
|
466
|
+
# - attach positions to loggers
|
|
467
|
+
if self.positions_dumper:
|
|
468
|
+
self.positions_dumper.attach_positions(*list(positions.values()))
|
|
469
|
+
|
|
470
|
+
if self.portfolio_logger:
|
|
471
|
+
self.portfolio_logger.attach_positions(*list(positions.values()))
|
|
472
|
+
|
|
473
|
+
# - send balance on start
|
|
474
|
+
if self.balance_logger:
|
|
475
|
+
self.balance_logger.record_balance(timestamp, balances)
|
|
476
|
+
|
|
477
|
+
def close(self):
|
|
478
|
+
if self.portfolio_logger:
|
|
479
|
+
self.portfolio_logger.close()
|
|
480
|
+
|
|
481
|
+
if self.executions_logger:
|
|
482
|
+
self.executions_logger.close()
|
|
483
|
+
|
|
484
|
+
if self.signals_logger:
|
|
485
|
+
self.signals_logger.close()
|
|
486
|
+
|
|
487
|
+
@_SW.watch("loggers")
|
|
488
|
+
def notify(self, timestamp: np.datetime64):
|
|
489
|
+
# - notify position logger
|
|
490
|
+
if self.positions_dumper:
|
|
491
|
+
self.positions_dumper.store(timestamp)
|
|
492
|
+
|
|
493
|
+
# - notify portfolio records logger
|
|
494
|
+
if self.portfolio_logger:
|
|
495
|
+
self.portfolio_logger.store(timestamp)
|
|
496
|
+
|
|
497
|
+
# - log heartbeat
|
|
498
|
+
self._log_heartbeat(timestamp)
|
|
499
|
+
|
|
500
|
+
def save_deals(self, instrument: Instrument, deals: List[Deal]):
|
|
501
|
+
if self.executions_logger:
|
|
502
|
+
self.executions_logger.record_deals(instrument, deals)
|
|
503
|
+
|
|
504
|
+
def save_signals_targets(self, targets: List[TargetPosition]):
|
|
505
|
+
if self.signals_logger and targets:
|
|
506
|
+
self.signals_logger.record_signals(targets)
|
|
507
|
+
|
|
508
|
+
def _log_heartbeat(self, timestamp: np.datetime64):
|
|
509
|
+
if not self.heartbeat_freq:
|
|
510
|
+
return
|
|
511
|
+
_floored_ts = floor_t64(timestamp, self.heartbeat_freq)
|
|
512
|
+
if not self._last_heartbeat_ts or _floored_ts - self._last_heartbeat_ts >= self.heartbeat_freq:
|
|
513
|
+
self._last_heartbeat_ts = _floored_ts
|
|
514
|
+
logger.info(f"Heartbeat at {_floored_ts.astype('datetime64[s]')}")
|