Qubx 0.5.7__cp312-cp312-manylinux_2_39_x86_64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of Qubx might be problematic. Click here for more details.
- qubx/__init__.py +207 -0
- qubx/_nb_magic.py +100 -0
- qubx/backtester/__init__.py +5 -0
- qubx/backtester/account.py +145 -0
- qubx/backtester/broker.py +87 -0
- qubx/backtester/data.py +296 -0
- qubx/backtester/management.py +378 -0
- qubx/backtester/ome.py +296 -0
- qubx/backtester/optimization.py +201 -0
- qubx/backtester/simulated_data.py +558 -0
- qubx/backtester/simulator.py +362 -0
- qubx/backtester/utils.py +780 -0
- qubx/cli/__init__.py +0 -0
- qubx/cli/commands.py +67 -0
- qubx/connectors/ccxt/__init__.py +0 -0
- qubx/connectors/ccxt/account.py +495 -0
- qubx/connectors/ccxt/broker.py +132 -0
- qubx/connectors/ccxt/customizations.py +193 -0
- qubx/connectors/ccxt/data.py +612 -0
- qubx/connectors/ccxt/exceptions.py +17 -0
- qubx/connectors/ccxt/factory.py +93 -0
- qubx/connectors/ccxt/utils.py +307 -0
- qubx/core/__init__.py +0 -0
- qubx/core/account.py +251 -0
- qubx/core/basics.py +850 -0
- qubx/core/context.py +420 -0
- qubx/core/exceptions.py +38 -0
- qubx/core/helpers.py +480 -0
- qubx/core/interfaces.py +1150 -0
- qubx/core/loggers.py +514 -0
- qubx/core/lookups.py +475 -0
- qubx/core/metrics.py +1512 -0
- qubx/core/mixins/__init__.py +13 -0
- qubx/core/mixins/market.py +94 -0
- qubx/core/mixins/processing.py +428 -0
- qubx/core/mixins/subscription.py +203 -0
- qubx/core/mixins/trading.py +88 -0
- qubx/core/mixins/universe.py +270 -0
- qubx/core/series.cpython-312-x86_64-linux-gnu.so +0 -0
- qubx/core/series.pxd +125 -0
- qubx/core/series.pyi +118 -0
- qubx/core/series.pyx +988 -0
- qubx/core/utils.cpython-312-x86_64-linux-gnu.so +0 -0
- qubx/core/utils.pyi +6 -0
- qubx/core/utils.pyx +62 -0
- qubx/data/__init__.py +25 -0
- qubx/data/helpers.py +416 -0
- qubx/data/readers.py +1562 -0
- qubx/data/tardis.py +100 -0
- qubx/gathering/simplest.py +88 -0
- qubx/math/__init__.py +3 -0
- qubx/math/stats.py +129 -0
- qubx/pandaz/__init__.py +23 -0
- qubx/pandaz/ta.py +2757 -0
- qubx/pandaz/utils.py +638 -0
- qubx/resources/instruments/symbols-binance.cm.json +1 -0
- qubx/resources/instruments/symbols-binance.json +1 -0
- qubx/resources/instruments/symbols-binance.um.json +1 -0
- qubx/resources/instruments/symbols-bitfinex.f.json +1 -0
- qubx/resources/instruments/symbols-bitfinex.json +1 -0
- qubx/resources/instruments/symbols-kraken.f.json +1 -0
- qubx/resources/instruments/symbols-kraken.json +1 -0
- qubx/ta/__init__.py +0 -0
- qubx/ta/indicators.cpython-312-x86_64-linux-gnu.so +0 -0
- qubx/ta/indicators.pxd +149 -0
- qubx/ta/indicators.pyi +41 -0
- qubx/ta/indicators.pyx +787 -0
- qubx/trackers/__init__.py +3 -0
- qubx/trackers/abvanced.py +236 -0
- qubx/trackers/composite.py +146 -0
- qubx/trackers/rebalancers.py +129 -0
- qubx/trackers/riskctrl.py +641 -0
- qubx/trackers/sizers.py +235 -0
- qubx/utils/__init__.py +5 -0
- qubx/utils/_pyxreloader.py +281 -0
- qubx/utils/charting/lookinglass.py +1057 -0
- qubx/utils/charting/mpl_helpers.py +1183 -0
- qubx/utils/marketdata/binance.py +284 -0
- qubx/utils/marketdata/ccxt.py +90 -0
- qubx/utils/marketdata/dukas.py +130 -0
- qubx/utils/misc.py +541 -0
- qubx/utils/ntp.py +63 -0
- qubx/utils/numbers_utils.py +7 -0
- qubx/utils/orderbook.py +491 -0
- qubx/utils/plotting/__init__.py +0 -0
- qubx/utils/plotting/dashboard.py +150 -0
- qubx/utils/plotting/data.py +137 -0
- qubx/utils/plotting/interfaces.py +25 -0
- qubx/utils/plotting/renderers/__init__.py +0 -0
- qubx/utils/plotting/renderers/plotly.py +0 -0
- qubx/utils/runner/__init__.py +1 -0
- qubx/utils/runner/_jupyter_runner.pyt +60 -0
- qubx/utils/runner/accounts.py +88 -0
- qubx/utils/runner/configs.py +65 -0
- qubx/utils/runner/runner.py +470 -0
- qubx/utils/time.py +312 -0
- qubx-0.5.7.dist-info/METADATA +105 -0
- qubx-0.5.7.dist-info/RECORD +100 -0
- qubx-0.5.7.dist-info/WHEEL +4 -0
- qubx-0.5.7.dist-info/entry_points.txt +3 -0
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
__all__ = [
|
|
2
|
+
"MarketManager",
|
|
3
|
+
"ProcessingManager",
|
|
4
|
+
"SubscriptionManager",
|
|
5
|
+
"TradingManager",
|
|
6
|
+
"UniverseManager",
|
|
7
|
+
]
|
|
8
|
+
|
|
9
|
+
from .market import MarketManager
|
|
10
|
+
from .processing import ProcessingManager
|
|
11
|
+
from .subscription import SubscriptionManager
|
|
12
|
+
from .trading import TradingManager
|
|
13
|
+
from .universe import UniverseManager
|
|
@@ -0,0 +1,94 @@
|
|
|
1
|
+
from typing import Any
|
|
2
|
+
|
|
3
|
+
import pandas as pd
|
|
4
|
+
|
|
5
|
+
from qubx import lookup
|
|
6
|
+
from qubx.core.basics import Instrument, ITimeProvider, dt_64
|
|
7
|
+
from qubx.core.helpers import CachedMarketDataHolder
|
|
8
|
+
from qubx.core.interfaces import (
|
|
9
|
+
IDataProvider,
|
|
10
|
+
IMarketManager,
|
|
11
|
+
IUniverseManager,
|
|
12
|
+
)
|
|
13
|
+
from qubx.core.series import OHLCV, Quote
|
|
14
|
+
from qubx.data.readers import DataReader
|
|
15
|
+
from qubx.utils import convert_seconds_to_str
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class MarketManager(IMarketManager):
|
|
19
|
+
_time_provider: ITimeProvider
|
|
20
|
+
_cache: CachedMarketDataHolder
|
|
21
|
+
_data_provider: IDataProvider
|
|
22
|
+
_universe_manager: IUniverseManager
|
|
23
|
+
_aux_data_provider: DataReader | None
|
|
24
|
+
|
|
25
|
+
def __init__(
|
|
26
|
+
self,
|
|
27
|
+
time_provider: ITimeProvider,
|
|
28
|
+
cache: CachedMarketDataHolder,
|
|
29
|
+
data_provider: IDataProvider,
|
|
30
|
+
universe_manager: IUniverseManager,
|
|
31
|
+
aux_data_provider: DataReader | None = None,
|
|
32
|
+
):
|
|
33
|
+
self._time_provider = time_provider
|
|
34
|
+
self._cache = cache
|
|
35
|
+
self._data_provider = data_provider
|
|
36
|
+
self._universe_manager = universe_manager
|
|
37
|
+
self._aux_data_provider = aux_data_provider
|
|
38
|
+
|
|
39
|
+
def time(self) -> dt_64:
|
|
40
|
+
return self._time_provider.time()
|
|
41
|
+
|
|
42
|
+
def ohlc(
|
|
43
|
+
self,
|
|
44
|
+
instrument: Instrument,
|
|
45
|
+
timeframe: str | None = None,
|
|
46
|
+
length: int | None = None,
|
|
47
|
+
) -> OHLCV:
|
|
48
|
+
timeframe = timeframe or convert_seconds_to_str(
|
|
49
|
+
int(pd.Timedelta(self._cache.default_timeframe).total_seconds())
|
|
50
|
+
)
|
|
51
|
+
rc = self._cache.get_ohlcv(instrument, timeframe)
|
|
52
|
+
|
|
53
|
+
# - check if we need to fetch more data
|
|
54
|
+
_need_history_request = False
|
|
55
|
+
if (_l_rc := len(rc)) > 0:
|
|
56
|
+
_last_bar_time = rc[0].time
|
|
57
|
+
_timeframe_ns = pd.Timedelta(timeframe).asm8.item()
|
|
58
|
+
|
|
59
|
+
# - check if we need to fetch more data
|
|
60
|
+
if (_last_bar_time + _timeframe_ns < self._data_provider.time_provider.time().item()) or (
|
|
61
|
+
length and _l_rc < length
|
|
62
|
+
):
|
|
63
|
+
_need_history_request = True
|
|
64
|
+
|
|
65
|
+
else:
|
|
66
|
+
_need_history_request = True
|
|
67
|
+
|
|
68
|
+
# - send request for historical data
|
|
69
|
+
if _need_history_request and length is not None:
|
|
70
|
+
bars = self._data_provider.get_ohlc(instrument, timeframe, length)
|
|
71
|
+
rc = self._cache.update_by_bars(instrument, timeframe, bars)
|
|
72
|
+
return rc
|
|
73
|
+
|
|
74
|
+
def quote(self, instrument: Instrument) -> Quote | None:
|
|
75
|
+
return self._data_provider.get_quote(instrument)
|
|
76
|
+
|
|
77
|
+
def get_data(self, instrument: Instrument, sub_type: str) -> list[Any]:
|
|
78
|
+
return self._cache.get_data(instrument, sub_type)
|
|
79
|
+
|
|
80
|
+
def get_aux_data(self, data_id: str, **parameters) -> pd.DataFrame | None:
|
|
81
|
+
return self._aux_data_provider.get_aux_data(data_id, **parameters) if self._aux_data_provider else None
|
|
82
|
+
|
|
83
|
+
def get_instruments(self) -> list[Instrument]:
|
|
84
|
+
return self._universe_manager.instruments
|
|
85
|
+
|
|
86
|
+
def query_instrument(self, symbol: str, exchange: str) -> Instrument | None:
|
|
87
|
+
return lookup.find_symbol(exchange, symbol)
|
|
88
|
+
|
|
89
|
+
def exchanges(self) -> list[str]:
|
|
90
|
+
"""
|
|
91
|
+
What exchanges are supported by the market manager.
|
|
92
|
+
Theoretically it can manage multiple exchanges.
|
|
93
|
+
"""
|
|
94
|
+
return [self._data_provider.exchange()]
|
|
@@ -0,0 +1,428 @@
|
|
|
1
|
+
import traceback
|
|
2
|
+
from multiprocessing.pool import ThreadPool
|
|
3
|
+
from types import FunctionType
|
|
4
|
+
from typing import Any, Callable, List, Tuple
|
|
5
|
+
|
|
6
|
+
from qubx import logger
|
|
7
|
+
from qubx.core.basics import (
|
|
8
|
+
SW,
|
|
9
|
+
DataType,
|
|
10
|
+
Deal,
|
|
11
|
+
Instrument,
|
|
12
|
+
MarketEvent,
|
|
13
|
+
Order,
|
|
14
|
+
Signal,
|
|
15
|
+
TargetPosition,
|
|
16
|
+
Timestamped,
|
|
17
|
+
TriggerEvent,
|
|
18
|
+
dt_64,
|
|
19
|
+
)
|
|
20
|
+
from qubx.core.exceptions import StrategyExceededMaxNumberOfRuntimeFailuresError
|
|
21
|
+
from qubx.core.helpers import BasicScheduler, CachedMarketDataHolder, extract_price, process_schedule_spec
|
|
22
|
+
from qubx.core.interfaces import (
|
|
23
|
+
IAccountProcessor,
|
|
24
|
+
IMarketManager,
|
|
25
|
+
IPositionGathering,
|
|
26
|
+
IProcessingManager,
|
|
27
|
+
IStrategy,
|
|
28
|
+
IStrategyContext,
|
|
29
|
+
ISubscriptionManager,
|
|
30
|
+
ITimeProvider,
|
|
31
|
+
IUniverseManager,
|
|
32
|
+
PositionsTracker,
|
|
33
|
+
)
|
|
34
|
+
from qubx.core.loggers import StrategyLogging
|
|
35
|
+
from qubx.core.series import Bar, OrderBook, Quote, Trade
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
class ProcessingManager(IProcessingManager):
|
|
39
|
+
MAX_NUMBER_OF_STRATEGY_FAILURES = 10
|
|
40
|
+
|
|
41
|
+
_context: IStrategyContext
|
|
42
|
+
_strategy: IStrategy
|
|
43
|
+
_logging: StrategyLogging
|
|
44
|
+
_market_data: IMarketManager
|
|
45
|
+
_subscription_manager: ISubscriptionManager
|
|
46
|
+
_time_provider: ITimeProvider
|
|
47
|
+
_account: IAccountProcessor
|
|
48
|
+
_position_tracker: PositionsTracker
|
|
49
|
+
_position_gathering: IPositionGathering
|
|
50
|
+
_cache: CachedMarketDataHolder
|
|
51
|
+
_scheduler: BasicScheduler
|
|
52
|
+
_universe_manager: IUniverseManager
|
|
53
|
+
|
|
54
|
+
_handlers: dict[str, Callable[["ProcessingManager", Instrument, str, Any], TriggerEvent | None]]
|
|
55
|
+
_strategy_name: str
|
|
56
|
+
|
|
57
|
+
_trigger_on_time_event: bool = False
|
|
58
|
+
_fit_is_running: bool = False
|
|
59
|
+
_init_fit_was_called: bool = False
|
|
60
|
+
_fails_counter: int = 0
|
|
61
|
+
_is_simulation: bool
|
|
62
|
+
_pool: ThreadPool | None
|
|
63
|
+
_trig_bar_freq_nsec: int | None = None
|
|
64
|
+
_cur_sim_step: int | None = None
|
|
65
|
+
|
|
66
|
+
def __init__(
|
|
67
|
+
self,
|
|
68
|
+
context: IStrategyContext,
|
|
69
|
+
strategy: IStrategy,
|
|
70
|
+
logging: StrategyLogging,
|
|
71
|
+
market_data: IMarketManager,
|
|
72
|
+
subscription_manager: ISubscriptionManager,
|
|
73
|
+
time_provider: ITimeProvider,
|
|
74
|
+
account: IAccountProcessor,
|
|
75
|
+
position_tracker: PositionsTracker,
|
|
76
|
+
position_gathering: IPositionGathering,
|
|
77
|
+
universe_manager: IUniverseManager,
|
|
78
|
+
cache: CachedMarketDataHolder,
|
|
79
|
+
scheduler: BasicScheduler,
|
|
80
|
+
is_simulation: bool,
|
|
81
|
+
):
|
|
82
|
+
self._context = context
|
|
83
|
+
self._strategy = strategy
|
|
84
|
+
self._logging = logging
|
|
85
|
+
self._market_data = market_data
|
|
86
|
+
self._subscription_manager = subscription_manager
|
|
87
|
+
self._time_provider = time_provider
|
|
88
|
+
self._account = account
|
|
89
|
+
self._is_simulation = is_simulation
|
|
90
|
+
self._position_gathering = position_gathering
|
|
91
|
+
self._position_tracker = position_tracker
|
|
92
|
+
self._universe_manager = universe_manager
|
|
93
|
+
self._cache = cache
|
|
94
|
+
self._scheduler = scheduler
|
|
95
|
+
|
|
96
|
+
self._pool = ThreadPool(2) if not self._is_simulation else None
|
|
97
|
+
self._handlers = {
|
|
98
|
+
n.split("_handle_")[1]: f
|
|
99
|
+
for n, f in self.__class__.__dict__.items()
|
|
100
|
+
if type(f) is FunctionType and n.startswith("_handle_")
|
|
101
|
+
}
|
|
102
|
+
self._strategy_name = strategy.__class__.__name__
|
|
103
|
+
self._trig_bar_freq_nsec = None
|
|
104
|
+
|
|
105
|
+
def set_fit_schedule(self, schedule: str) -> None:
|
|
106
|
+
rule = process_schedule_spec(schedule)
|
|
107
|
+
if rule.get("type") != "cron":
|
|
108
|
+
raise ValueError("Only cron type is supported for fit schedule")
|
|
109
|
+
self._scheduler.schedule_event(rule["schedule"], "fit")
|
|
110
|
+
|
|
111
|
+
def set_event_schedule(self, schedule: str) -> None:
|
|
112
|
+
rule = process_schedule_spec(schedule)
|
|
113
|
+
if not rule or "type" not in rule:
|
|
114
|
+
raise ValueError(f"Can't recognoize schedule format: '{schedule}'")
|
|
115
|
+
|
|
116
|
+
if rule["type"] != "cron":
|
|
117
|
+
raise ValueError("Only cron type is supported for event schedule")
|
|
118
|
+
|
|
119
|
+
self._scheduler.schedule_event(rule["schedule"], "time")
|
|
120
|
+
self._trigger_on_time_event = True
|
|
121
|
+
|
|
122
|
+
def get_event_schedule(self, event_id: str) -> str | None:
|
|
123
|
+
return self._scheduler.get_schedule_for_event(event_id)
|
|
124
|
+
|
|
125
|
+
def process_data(self, instrument: Instrument, d_type: str, data: Any, is_historical: bool) -> bool:
|
|
126
|
+
self._logging.notify(self._time_provider.time())
|
|
127
|
+
|
|
128
|
+
handler = self._handlers.get(d_type)
|
|
129
|
+
with SW("StrategyContext.handler"):
|
|
130
|
+
if not d_type:
|
|
131
|
+
event = None
|
|
132
|
+
elif is_historical:
|
|
133
|
+
event = self._process_hist_event(instrument, d_type, data)
|
|
134
|
+
elif handler:
|
|
135
|
+
event = handler(self, instrument, d_type, data)
|
|
136
|
+
else:
|
|
137
|
+
event = self._process_custom_event(instrument, d_type, data)
|
|
138
|
+
|
|
139
|
+
# - check if it still didn't call on_fit() for first time
|
|
140
|
+
if not self._init_fit_was_called and not self._fit_is_running:
|
|
141
|
+
self._handle_fit(None, "fit", (None, self._time_provider.time()))
|
|
142
|
+
return False
|
|
143
|
+
|
|
144
|
+
if not event:
|
|
145
|
+
return False
|
|
146
|
+
|
|
147
|
+
# - if fit was not called - skip on_event call
|
|
148
|
+
if not self._init_fit_was_called:
|
|
149
|
+
# logger.debug(
|
|
150
|
+
# f"Skipping {self._strategy_name}::on_event({instrument}, {d_type}, [...], {is_historical}) fitting was not called yet (orders and deals processed)!"
|
|
151
|
+
# )
|
|
152
|
+
return False
|
|
153
|
+
|
|
154
|
+
# - if strategy still fitting - skip on_event call
|
|
155
|
+
if self._fit_is_running:
|
|
156
|
+
logger.warning(
|
|
157
|
+
f"Skipping {self._strategy_name}::on_event({instrument}, {d_type}, [...], {is_historical}) fitting in progress (orders and deals processed)!"
|
|
158
|
+
)
|
|
159
|
+
return False
|
|
160
|
+
|
|
161
|
+
signals: list[Signal] | Signal = []
|
|
162
|
+
with SW("StrategyContext.on_event"):
|
|
163
|
+
try:
|
|
164
|
+
if isinstance(event, MarketEvent):
|
|
165
|
+
signals = self._wrap_signal_list(self._strategy.on_market_data(self._context, event))
|
|
166
|
+
|
|
167
|
+
if isinstance(event, TriggerEvent) or (isinstance(event, MarketEvent) and event.is_trigger):
|
|
168
|
+
_trigger_event = event.to_trigger() if isinstance(event, MarketEvent) else event
|
|
169
|
+
_signals = self._wrap_signal_list(self._strategy.on_event(self._context, _trigger_event))
|
|
170
|
+
signals.extend(_signals)
|
|
171
|
+
|
|
172
|
+
# - we reset failures counter when we successfully process on_event
|
|
173
|
+
self._fails_counter = 0
|
|
174
|
+
|
|
175
|
+
if isinstance(event, Order):
|
|
176
|
+
_signals = self._wrap_signal_list(self._strategy.on_order_update(self._context, event))
|
|
177
|
+
signals.extend(_signals)
|
|
178
|
+
|
|
179
|
+
self._subscription_manager.commit() # apply pending operations
|
|
180
|
+
|
|
181
|
+
except Exception as strat_error:
|
|
182
|
+
# - probably we need some cooldown interval after exception to prevent flooding
|
|
183
|
+
logger.error(f"Strategy {self._strategy_name} raised an exception: {strat_error}")
|
|
184
|
+
logger.opt(colors=False).error(traceback.format_exc())
|
|
185
|
+
|
|
186
|
+
# - we stop execution after maximal number of errors in a row
|
|
187
|
+
self._fails_counter += 1
|
|
188
|
+
if self._fails_counter >= self.MAX_NUMBER_OF_STRATEGY_FAILURES:
|
|
189
|
+
logger.error(
|
|
190
|
+
f"STRATEGY FAILED {self.MAX_NUMBER_OF_STRATEGY_FAILURES} TIMES IN THE ROW - STOPPING ..."
|
|
191
|
+
)
|
|
192
|
+
raise StrategyExceededMaxNumberOfRuntimeFailuresError()
|
|
193
|
+
|
|
194
|
+
# - process and execute signals if they are provided
|
|
195
|
+
if signals:
|
|
196
|
+
# fmt: off
|
|
197
|
+
positions_from_strategy = self.__process_and_log_target_positions(
|
|
198
|
+
self._position_tracker.process_signals(
|
|
199
|
+
self._context,
|
|
200
|
+
self.__process_signals(signals)
|
|
201
|
+
)
|
|
202
|
+
)
|
|
203
|
+
self._position_gathering.alter_positions(self._context, positions_from_strategy)
|
|
204
|
+
# fmt: on
|
|
205
|
+
|
|
206
|
+
# - notify poition and portfolio loggers
|
|
207
|
+
self._logging.notify(self._time_provider.time())
|
|
208
|
+
|
|
209
|
+
return False
|
|
210
|
+
|
|
211
|
+
def is_fitted(self) -> bool:
|
|
212
|
+
return self._init_fit_was_called
|
|
213
|
+
|
|
214
|
+
@SW.watch("StrategyContext.on_fit")
|
|
215
|
+
def __invoke_on_fit(self) -> None:
|
|
216
|
+
try:
|
|
217
|
+
logger.debug(f"[<y>{self.__class__.__name__}</y>] :: Invoking <g>{self._strategy_name}</g> on_fit")
|
|
218
|
+
self._strategy.on_fit(self._context)
|
|
219
|
+
self._subscription_manager.commit() # apply pending operations
|
|
220
|
+
logger.debug(f"[<y>{self.__class__.__name__}</y>] :: <g>{self._strategy_name}</g> is fitted")
|
|
221
|
+
except Exception as strat_error:
|
|
222
|
+
logger.error(
|
|
223
|
+
f"[{self.__class__.__name__}] :: Strategy {self._strategy_name} on_fit raised an exception: {strat_error}"
|
|
224
|
+
)
|
|
225
|
+
logger.opt(colors=False).error(traceback.format_exc())
|
|
226
|
+
finally:
|
|
227
|
+
self._fit_is_running = False
|
|
228
|
+
self._init_fit_was_called = True
|
|
229
|
+
|
|
230
|
+
def __process_and_log_target_positions(
|
|
231
|
+
self, target_positions: List[TargetPosition] | TargetPosition | None
|
|
232
|
+
) -> list[TargetPosition]:
|
|
233
|
+
if target_positions is None:
|
|
234
|
+
return []
|
|
235
|
+
|
|
236
|
+
if isinstance(target_positions, TargetPosition):
|
|
237
|
+
target_positions = [target_positions]
|
|
238
|
+
|
|
239
|
+
# - check if trading is allowed for each target position
|
|
240
|
+
target_positions = [t for t in target_positions if self._universe_manager.is_trading_allowed(t.instrument)]
|
|
241
|
+
|
|
242
|
+
self._logging.save_signals_targets(target_positions)
|
|
243
|
+
return target_positions
|
|
244
|
+
|
|
245
|
+
def __process_signals_from_target_positions(
|
|
246
|
+
self, target_positions: list[TargetPosition] | TargetPosition | None
|
|
247
|
+
) -> None:
|
|
248
|
+
if target_positions is None:
|
|
249
|
+
return
|
|
250
|
+
if isinstance(target_positions, TargetPosition):
|
|
251
|
+
target_positions = [target_positions]
|
|
252
|
+
signals = [pos.signal for pos in target_positions]
|
|
253
|
+
self.__process_signals(signals)
|
|
254
|
+
|
|
255
|
+
def __process_signals(self, signals: list[Signal] | Signal | None) -> List[Signal]:
|
|
256
|
+
if isinstance(signals, Signal):
|
|
257
|
+
signals = [signals]
|
|
258
|
+
elif signals is None:
|
|
259
|
+
return []
|
|
260
|
+
|
|
261
|
+
for signal in signals:
|
|
262
|
+
# set strategy group name if not set
|
|
263
|
+
if not signal.group:
|
|
264
|
+
signal.group = self._strategy_name
|
|
265
|
+
|
|
266
|
+
# set reference prices for signals
|
|
267
|
+
if signal.reference_price is None:
|
|
268
|
+
q = self._market_data.quote(signal.instrument)
|
|
269
|
+
if q is None:
|
|
270
|
+
continue
|
|
271
|
+
signal.reference_price = q.mid_price()
|
|
272
|
+
|
|
273
|
+
return signals
|
|
274
|
+
|
|
275
|
+
def _run_in_thread_pool(self, func: Callable, args=()):
|
|
276
|
+
# For simulation we don't need to call function in thread
|
|
277
|
+
if self._is_simulation:
|
|
278
|
+
func(*args)
|
|
279
|
+
else:
|
|
280
|
+
assert self._pool
|
|
281
|
+
self._pool.apply_async(func, args)
|
|
282
|
+
|
|
283
|
+
def _wrap_signal_list(self, signals: List[Signal] | Signal | None) -> List[Signal]:
|
|
284
|
+
if signals is None:
|
|
285
|
+
signals = []
|
|
286
|
+
elif isinstance(signals, Signal):
|
|
287
|
+
signals = [signals]
|
|
288
|
+
return signals
|
|
289
|
+
|
|
290
|
+
__SUBSCR_TO_DATA_MATCH_TABLE = {
|
|
291
|
+
DataType.OHLC: [Bar],
|
|
292
|
+
DataType.OHLC_QUOTES: [Quote, OrderBook],
|
|
293
|
+
DataType.OHLC_TRADES: [Trade],
|
|
294
|
+
DataType.QUOTE: [Quote],
|
|
295
|
+
DataType.TRADE: [Trade],
|
|
296
|
+
DataType.ORDERBOOK: [OrderBook],
|
|
297
|
+
}
|
|
298
|
+
|
|
299
|
+
def _is_base_data(self, data: Timestamped) -> tuple[bool, Timestamped]:
|
|
300
|
+
_base_ss = DataType.from_str(self._subscription_manager.get_base_subscription())[0]
|
|
301
|
+
_d_probe = data
|
|
302
|
+
return (
|
|
303
|
+
type(_d_probe) in _rule if (_rule := self.__SUBSCR_TO_DATA_MATCH_TABLE.get(_base_ss)) else False,
|
|
304
|
+
_d_probe,
|
|
305
|
+
)
|
|
306
|
+
|
|
307
|
+
def __update_base_data(
|
|
308
|
+
self, instrument: Instrument, event_type: str, data: Timestamped, is_historical: bool = False
|
|
309
|
+
) -> bool:
|
|
310
|
+
"""
|
|
311
|
+
Updates the base data cache with the provided data.
|
|
312
|
+
|
|
313
|
+
Returns:
|
|
314
|
+
bool: True if the data is base data and the strategy should be triggered, False otherwise.
|
|
315
|
+
"""
|
|
316
|
+
is_base_data, _update = self._is_base_data(data)
|
|
317
|
+
# logger.info(f"{_update} {is_base_data and not self._trigger_on_time_event}")
|
|
318
|
+
|
|
319
|
+
# update cached ohlc is this is base subscription
|
|
320
|
+
_update_ohlc = is_base_data
|
|
321
|
+
self._cache.update(instrument, event_type, _update, update_ohlc=_update_ohlc)
|
|
322
|
+
|
|
323
|
+
# update trackers, gatherers on base data
|
|
324
|
+
if not is_historical and is_base_data:
|
|
325
|
+
self._account.update_position_price(self._time_provider.time(), instrument, extract_price(_update))
|
|
326
|
+
target_positions = self.__process_and_log_target_positions(
|
|
327
|
+
self._position_tracker.update(self._context, instrument, _update)
|
|
328
|
+
)
|
|
329
|
+
self.__process_signals_from_target_positions(target_positions)
|
|
330
|
+
self._position_gathering.alter_positions(self._context, target_positions)
|
|
331
|
+
|
|
332
|
+
return is_base_data and not self._trigger_on_time_event
|
|
333
|
+
|
|
334
|
+
###########################################################################
|
|
335
|
+
# - Handlers for different types of incoming data
|
|
336
|
+
###########################################################################
|
|
337
|
+
|
|
338
|
+
# it's important that we call it with _process to not include in the handlers map
|
|
339
|
+
def _process_custom_event(
|
|
340
|
+
self, instrument: Instrument | None, event_type: str, event_data: Any
|
|
341
|
+
) -> MarketEvent | None:
|
|
342
|
+
if instrument is not None:
|
|
343
|
+
self.__update_base_data(instrument, event_type, event_data)
|
|
344
|
+
|
|
345
|
+
elif instrument is None and isinstance(event_data, dict):
|
|
346
|
+
for _instrument, data in event_data.items():
|
|
347
|
+
if isinstance(_instrument, Instrument):
|
|
348
|
+
self.__update_base_data(_instrument, event_type, data)
|
|
349
|
+
|
|
350
|
+
return MarketEvent(self._time_provider.time(), event_type, instrument, event_data)
|
|
351
|
+
|
|
352
|
+
def _process_hist_event(self, instrument: Instrument, event_type: str, event_data: Any) -> None:
|
|
353
|
+
if not isinstance(event_data, list):
|
|
354
|
+
event_data = [event_data]
|
|
355
|
+
if DataType.OHLC == event_type:
|
|
356
|
+
# - update ohlc using the list directly, this allows to update
|
|
357
|
+
# multiple timeframes with different data (1h can have more bars than 1m)
|
|
358
|
+
_, sub_params = DataType.from_str(event_type)
|
|
359
|
+
timeframe = sub_params.get("timeframe", self._cache.default_timeframe)
|
|
360
|
+
self._cache.update_by_bars(instrument, timeframe, event_data)
|
|
361
|
+
else:
|
|
362
|
+
for data in event_data:
|
|
363
|
+
self.__update_base_data(instrument, event_type, data, is_historical=True)
|
|
364
|
+
|
|
365
|
+
def _handle_event(self, instrument: Instrument, event_type: str, event_data: Any) -> TriggerEvent:
|
|
366
|
+
return TriggerEvent(self._time_provider.time(), event_type, instrument, event_data)
|
|
367
|
+
|
|
368
|
+
def _handle_time(self, instrument: Instrument, event_type: str, data: dt_64) -> TriggerEvent:
|
|
369
|
+
return TriggerEvent(self._time_provider.time(), event_type, instrument, data)
|
|
370
|
+
|
|
371
|
+
def _handle_service_time(self, instrument: Instrument, event_type: str, data: dt_64) -> TriggerEvent | None:
|
|
372
|
+
"""It is used by simulation as a dummy to trigger actual time events."""
|
|
373
|
+
pass
|
|
374
|
+
|
|
375
|
+
def _handle_fit(self, instrument: Instrument | None, event_type: str, data: Tuple[dt_64 | None, dt_64]) -> None:
|
|
376
|
+
"""
|
|
377
|
+
When scheduled fit event is happened - we need to invoke strategy on_fit method
|
|
378
|
+
"""
|
|
379
|
+
if not self._cache.is_data_ready():
|
|
380
|
+
return
|
|
381
|
+
self._fit_is_running = True
|
|
382
|
+
self._run_in_thread_pool(self.__invoke_on_fit)
|
|
383
|
+
|
|
384
|
+
def _handle_ohlc(self, instrument: Instrument, event_type: str, bar: Bar) -> MarketEvent:
|
|
385
|
+
base_update = self.__update_base_data(instrument, event_type, bar)
|
|
386
|
+
return MarketEvent(self._time_provider.time(), event_type, instrument, bar, is_trigger=base_update)
|
|
387
|
+
|
|
388
|
+
def _handle_trade(self, instrument: Instrument, event_type: str, trade: Trade) -> MarketEvent:
|
|
389
|
+
base_update = self.__update_base_data(instrument, event_type, trade)
|
|
390
|
+
return MarketEvent(self._time_provider.time(), event_type, instrument, trade, is_trigger=base_update)
|
|
391
|
+
|
|
392
|
+
def _handle_orderbook(self, instrument: Instrument, event_type: str, orderbook: OrderBook) -> MarketEvent:
|
|
393
|
+
base_update = self.__update_base_data(instrument, event_type, orderbook)
|
|
394
|
+
return MarketEvent(self._time_provider.time(), event_type, instrument, orderbook, is_trigger=base_update)
|
|
395
|
+
|
|
396
|
+
def _handle_quote(self, instrument: Instrument, event_type: str, quote: Quote) -> MarketEvent:
|
|
397
|
+
base_update = self.__update_base_data(instrument, event_type, quote)
|
|
398
|
+
return MarketEvent(self._time_provider.time(), event_type, instrument, quote, is_trigger=base_update)
|
|
399
|
+
|
|
400
|
+
@SW.watch("StrategyContext.order")
|
|
401
|
+
def _handle_order(self, instrument: Instrument, event_type: str, order: Order) -> Order:
|
|
402
|
+
self._account.process_order(order)
|
|
403
|
+
return order
|
|
404
|
+
|
|
405
|
+
@SW.watch("StrategyContext")
|
|
406
|
+
def _handle_deals(self, instrument: Instrument | None, event_type: str, deals: list[Deal]) -> TriggerEvent | None:
|
|
407
|
+
if instrument is None:
|
|
408
|
+
logger.debug(
|
|
409
|
+
f"[<y>{self.__class__.__name__}</y>] :: Execution report for unknown instrument <r>{instrument}</r>"
|
|
410
|
+
)
|
|
411
|
+
return None
|
|
412
|
+
|
|
413
|
+
# - process deals only for subscribed instruments
|
|
414
|
+
self._account.process_deals(instrument, deals)
|
|
415
|
+
self._logging.save_deals(instrument, deals)
|
|
416
|
+
|
|
417
|
+
for d in deals:
|
|
418
|
+
# - notify position gatherer and tracker
|
|
419
|
+
self._position_gathering.on_execution_report(self._context, instrument, d)
|
|
420
|
+
self._position_tracker.on_execution_report(self._context, instrument, d)
|
|
421
|
+
logger.debug(
|
|
422
|
+
f"[<y>{self.__class__.__name__}</y>(<g>{instrument}</g>)] :: executed <r>{d.order_id}</r> | {d.amount} @ {d.price}"
|
|
423
|
+
)
|
|
424
|
+
|
|
425
|
+
# - notify universe manager about position change
|
|
426
|
+
self._universe_manager.on_alter_position(instrument)
|
|
427
|
+
|
|
428
|
+
return None
|