lumibot 4.1.2__py3-none-any.whl → 4.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of lumibot might be problematic. Click here for more details.
- lumibot/backtesting/__init__.py +19 -5
- lumibot/backtesting/backtesting_broker.py +98 -18
- lumibot/backtesting/databento_backtesting.py +5 -686
- lumibot/backtesting/databento_backtesting_pandas.py +738 -0
- lumibot/backtesting/databento_backtesting_polars.py +860 -546
- lumibot/backtesting/fix_debug.py +37 -0
- lumibot/backtesting/thetadata_backtesting.py +9 -355
- lumibot/backtesting/thetadata_backtesting_pandas.py +1178 -0
- lumibot/brokers/alpaca.py +8 -1
- lumibot/brokers/schwab.py +12 -2
- lumibot/credentials.py +13 -0
- lumibot/data_sources/__init__.py +5 -8
- lumibot/data_sources/data_source.py +6 -2
- lumibot/data_sources/data_source_backtesting.py +30 -0
- lumibot/data_sources/databento_data.py +5 -390
- lumibot/data_sources/databento_data_pandas.py +440 -0
- lumibot/data_sources/databento_data_polars.py +15 -9
- lumibot/data_sources/pandas_data.py +30 -17
- lumibot/data_sources/polars_data.py +986 -0
- lumibot/data_sources/polars_mixin.py +472 -96
- lumibot/data_sources/polygon_data_polars.py +5 -0
- lumibot/data_sources/yahoo_data.py +9 -2
- lumibot/data_sources/yahoo_data_polars.py +5 -0
- lumibot/entities/__init__.py +15 -0
- lumibot/entities/asset.py +5 -28
- lumibot/entities/bars.py +89 -20
- lumibot/entities/data.py +29 -6
- lumibot/entities/data_polars.py +668 -0
- lumibot/entities/position.py +38 -4
- lumibot/strategies/_strategy.py +31 -9
- lumibot/strategies/strategy.py +61 -49
- lumibot/tools/backtest_cache.py +284 -0
- lumibot/tools/databento_helper.py +65 -42
- lumibot/tools/databento_helper_polars.py +748 -778
- lumibot/tools/futures_roll.py +251 -0
- lumibot/tools/indicators.py +135 -104
- lumibot/tools/polars_utils.py +142 -0
- lumibot/tools/thetadata_helper.py +1068 -134
- {lumibot-4.1.2.dist-info → lumibot-4.2.0.dist-info}/METADATA +9 -1
- {lumibot-4.1.2.dist-info → lumibot-4.2.0.dist-info}/RECORD +72 -148
- tests/backtest/test_databento.py +37 -6
- tests/backtest/test_databento_comprehensive_trading.py +70 -87
- tests/backtest/test_databento_parity.py +31 -7
- tests/backtest/test_debug_avg_fill_price.py +1 -1
- tests/backtest/test_example_strategies.py +11 -1
- tests/backtest/test_futures_edge_cases.py +96 -63
- tests/backtest/test_futures_single_trade.py +2 -2
- tests/backtest/test_futures_ultra_simple.py +2 -2
- tests/backtest/test_polars_lru_eviction.py +470 -0
- tests/backtest/test_yahoo.py +42 -0
- tests/test_asset.py +4 -4
- tests/test_backtest_cache_manager.py +149 -0
- tests/test_backtesting_data_source_env.py +50 -10
- tests/test_continuous_futures_resolution.py +60 -48
- tests/test_data_polars_parity.py +160 -0
- tests/test_databento_asset_validation.py +23 -5
- tests/test_databento_backtesting.py +1 -1
- tests/test_databento_backtesting_polars.py +312 -192
- tests/test_databento_data.py +220 -463
- tests/test_databento_helper.py +6 -1
- tests/test_databento_live.py +10 -10
- tests/test_futures_roll.py +38 -0
- tests/test_indicator_subplots.py +101 -0
- tests/test_market_infinite_loop_bug.py +77 -3
- tests/test_polars_resample.py +67 -0
- tests/test_polygon_helper.py +46 -0
- tests/test_thetadata_backwards_compat.py +97 -0
- tests/test_thetadata_helper.py +222 -23
- tests/test_thetadata_pandas_verification.py +186 -0
- lumibot/__pycache__/__init__.cpython-312.pyc +0 -0
- lumibot/__pycache__/constants.cpython-312.pyc +0 -0
- lumibot/__pycache__/credentials.cpython-312.pyc +0 -0
- lumibot/backtesting/__pycache__/__init__.cpython-312.pyc +0 -0
- lumibot/backtesting/__pycache__/alpaca_backtesting.cpython-312.pyc +0 -0
- lumibot/backtesting/__pycache__/alpha_vantage_backtesting.cpython-312.pyc +0 -0
- lumibot/backtesting/__pycache__/backtesting_broker.cpython-312.pyc +0 -0
- lumibot/backtesting/__pycache__/ccxt_backtesting.cpython-312.pyc +0 -0
- lumibot/backtesting/__pycache__/databento_backtesting.cpython-312.pyc +0 -0
- lumibot/backtesting/__pycache__/interactive_brokers_rest_backtesting.cpython-312.pyc +0 -0
- lumibot/backtesting/__pycache__/pandas_backtesting.cpython-312.pyc +0 -0
- lumibot/backtesting/__pycache__/polygon_backtesting.cpython-312.pyc +0 -0
- lumibot/backtesting/__pycache__/thetadata_backtesting.cpython-312.pyc +0 -0
- lumibot/backtesting/__pycache__/yahoo_backtesting.cpython-312.pyc +0 -0
- lumibot/brokers/__pycache__/__init__.cpython-312.pyc +0 -0
- lumibot/brokers/__pycache__/alpaca.cpython-312.pyc +0 -0
- lumibot/brokers/__pycache__/bitunix.cpython-312.pyc +0 -0
- lumibot/brokers/__pycache__/broker.cpython-312.pyc +0 -0
- lumibot/brokers/__pycache__/ccxt.cpython-312.pyc +0 -0
- lumibot/brokers/__pycache__/example_broker.cpython-312.pyc +0 -0
- lumibot/brokers/__pycache__/interactive_brokers.cpython-312.pyc +0 -0
- lumibot/brokers/__pycache__/interactive_brokers_rest.cpython-312.pyc +0 -0
- lumibot/brokers/__pycache__/projectx.cpython-312.pyc +0 -0
- lumibot/brokers/__pycache__/schwab.cpython-312.pyc +0 -0
- lumibot/brokers/__pycache__/tradier.cpython-312.pyc +0 -0
- lumibot/brokers/__pycache__/tradovate.cpython-312.pyc +0 -0
- lumibot/data_sources/__pycache__/__init__.cpython-312.pyc +0 -0
- lumibot/data_sources/__pycache__/alpaca_data.cpython-312.pyc +0 -0
- lumibot/data_sources/__pycache__/alpha_vantage_data.cpython-312.pyc +0 -0
- lumibot/data_sources/__pycache__/bitunix_data.cpython-312.pyc +0 -0
- lumibot/data_sources/__pycache__/ccxt_backtesting_data.cpython-312.pyc +0 -0
- lumibot/data_sources/__pycache__/ccxt_data.cpython-312.pyc +0 -0
- lumibot/data_sources/__pycache__/data_source.cpython-312.pyc +0 -0
- lumibot/data_sources/__pycache__/data_source_backtesting.cpython-312.pyc +0 -0
- lumibot/data_sources/__pycache__/databento_data_polars_backtesting.cpython-312.pyc +0 -0
- lumibot/data_sources/__pycache__/databento_data_polars_live.cpython-312.pyc +0 -0
- lumibot/data_sources/__pycache__/example_broker_data.cpython-312.pyc +0 -0
- lumibot/data_sources/__pycache__/exceptions.cpython-312.pyc +0 -0
- lumibot/data_sources/__pycache__/interactive_brokers_data.cpython-312.pyc +0 -0
- lumibot/data_sources/__pycache__/interactive_brokers_rest_data.cpython-312.pyc +0 -0
- lumibot/data_sources/__pycache__/pandas_data.cpython-312.pyc +0 -0
- lumibot/data_sources/__pycache__/polars_mixin.cpython-312.pyc +0 -0
- lumibot/data_sources/__pycache__/polygon_data_polars.cpython-312.pyc +0 -0
- lumibot/data_sources/__pycache__/projectx_data.cpython-312.pyc +0 -0
- lumibot/data_sources/__pycache__/schwab_data.cpython-312.pyc +0 -0
- lumibot/data_sources/__pycache__/tradier_data.cpython-312.pyc +0 -0
- lumibot/data_sources/__pycache__/tradovate_data.cpython-312.pyc +0 -0
- lumibot/data_sources/__pycache__/yahoo_data_polars.cpython-312.pyc +0 -0
- lumibot/entities/__pycache__/__init__.cpython-312.pyc +0 -0
- lumibot/entities/__pycache__/asset.cpython-312.pyc +0 -0
- lumibot/entities/__pycache__/bar.cpython-312.pyc +0 -0
- lumibot/entities/__pycache__/bars.cpython-312.pyc +0 -0
- lumibot/entities/__pycache__/chains.cpython-312.pyc +0 -0
- lumibot/entities/__pycache__/data.cpython-312.pyc +0 -0
- lumibot/entities/__pycache__/dataline.cpython-312.pyc +0 -0
- lumibot/entities/__pycache__/order.cpython-312.pyc +0 -0
- lumibot/entities/__pycache__/position.cpython-312.pyc +0 -0
- lumibot/entities/__pycache__/quote.cpython-312.pyc +0 -0
- lumibot/entities/__pycache__/trading_fee.cpython-312.pyc +0 -0
- lumibot/example_strategies/__pycache__/__init__.cpython-312.pyc +0 -0
- lumibot/example_strategies/__pycache__/test_broker_functions.cpython-312-pytest-8.4.1.pyc +0 -0
- lumibot/strategies/__pycache__/__init__.cpython-312.pyc +0 -0
- lumibot/strategies/__pycache__/_strategy.cpython-312.pyc +0 -0
- lumibot/strategies/__pycache__/strategy.cpython-312.pyc +0 -0
- lumibot/strategies/__pycache__/strategy_executor.cpython-312.pyc +0 -0
- lumibot/tools/__pycache__/__init__.cpython-312.pyc +0 -0
- lumibot/tools/__pycache__/alpaca_helpers.cpython-312.pyc +0 -0
- lumibot/tools/__pycache__/bitunix_helpers.cpython-312.pyc +0 -0
- lumibot/tools/__pycache__/black_scholes.cpython-312.pyc +0 -0
- lumibot/tools/__pycache__/ccxt_data_store.cpython-312.pyc +0 -0
- lumibot/tools/__pycache__/databento_helper.cpython-312.pyc +0 -0
- lumibot/tools/__pycache__/databento_helper_polars.cpython-312.pyc +0 -0
- lumibot/tools/__pycache__/debugers.cpython-312.pyc +0 -0
- lumibot/tools/__pycache__/decorators.cpython-312.pyc +0 -0
- lumibot/tools/__pycache__/helpers.cpython-312.pyc +0 -0
- lumibot/tools/__pycache__/indicators.cpython-312.pyc +0 -0
- lumibot/tools/__pycache__/lumibot_logger.cpython-312.pyc +0 -0
- lumibot/tools/__pycache__/pandas.cpython-312.pyc +0 -0
- lumibot/tools/__pycache__/polygon_helper.cpython-312.pyc +0 -0
- lumibot/tools/__pycache__/polygon_helper_async.cpython-312.pyc +0 -0
- lumibot/tools/__pycache__/polygon_helper_polars_optimized.cpython-312.pyc +0 -0
- lumibot/tools/__pycache__/projectx_helpers.cpython-312.pyc +0 -0
- lumibot/tools/__pycache__/schwab_helper.cpython-312.pyc +0 -0
- lumibot/tools/__pycache__/thetadata_helper.cpython-312.pyc +0 -0
- lumibot/tools/__pycache__/types.cpython-312.pyc +0 -0
- lumibot/tools/__pycache__/yahoo_helper.cpython-312.pyc +0 -0
- lumibot/tools/__pycache__/yahoo_helper_polars_optimized.cpython-312.pyc +0 -0
- lumibot/traders/__pycache__/__init__.cpython-312.pyc +0 -0
- lumibot/traders/__pycache__/trader.cpython-312.pyc +0 -0
- lumibot/trading_builtins/__pycache__/__init__.cpython-312.pyc +0 -0
- lumibot/trading_builtins/__pycache__/custom_stream.cpython-312.pyc +0 -0
- lumibot/trading_builtins/__pycache__/safe_list.cpython-312.pyc +0 -0
- {lumibot-4.1.2.dist-info → lumibot-4.2.0.dist-info}/WHEEL +0 -0
- {lumibot-4.1.2.dist-info → lumibot-4.2.0.dist-info}/licenses/LICENSE +0 -0
- {lumibot-4.1.2.dist-info → lumibot-4.2.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,251 @@
|
|
|
1
|
+
"""Centralised futures roll logic shared by assets, data sources, and brokers."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from dataclasses import dataclass
|
|
6
|
+
from datetime import datetime, timedelta
|
|
7
|
+
from typing import Dict, List, Optional, Tuple
|
|
8
|
+
|
|
9
|
+
import pytz
|
|
10
|
+
|
|
11
|
+
from lumibot.constants import LUMIBOT_DEFAULT_PYTZ
|
|
12
|
+
|
|
13
|
+
_FUTURES_MONTH_CODES: Dict[int, str] = {
|
|
14
|
+
1: "F",
|
|
15
|
+
2: "G",
|
|
16
|
+
3: "H",
|
|
17
|
+
4: "J",
|
|
18
|
+
5: "K",
|
|
19
|
+
6: "M",
|
|
20
|
+
7: "N",
|
|
21
|
+
8: "Q",
|
|
22
|
+
9: "U",
|
|
23
|
+
10: "V",
|
|
24
|
+
11: "X",
|
|
25
|
+
12: "Z",
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
@dataclass(frozen=True)
|
|
30
|
+
class RollRule:
|
|
31
|
+
offset_business_days: int
|
|
32
|
+
anchor: str
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
ROLL_RULES: Dict[str, RollRule] = {
|
|
36
|
+
symbol: RollRule(offset_business_days=8, anchor="third_friday")
|
|
37
|
+
for symbol in {"ES", "MES", "NQ", "MNQ", "YM", "MYM"}
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
YearMonth = Tuple[int, int]
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def _to_timezone(dt: datetime, tz=pytz.timezone("America/New_York")) -> datetime:
|
|
44
|
+
if dt.tzinfo is None:
|
|
45
|
+
return tz.localize(dt)
|
|
46
|
+
return dt.astimezone(tz)
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def _normalize_reference_date(reference_date: Optional[datetime]) -> datetime:
|
|
50
|
+
if reference_date is None:
|
|
51
|
+
reference_date = datetime.utcnow()
|
|
52
|
+
return _to_timezone(reference_date, LUMIBOT_DEFAULT_PYTZ)
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def _third_friday(year: int, month: int) -> datetime:
|
|
56
|
+
first = datetime(year, month, 1)
|
|
57
|
+
first = _to_timezone(first)
|
|
58
|
+
weekday = first.weekday()
|
|
59
|
+
days_until_friday = (4 - weekday) % 7
|
|
60
|
+
first_friday = first + timedelta(days=days_until_friday)
|
|
61
|
+
third_friday = first_friday + timedelta(weeks=2)
|
|
62
|
+
return third_friday.replace(hour=0, minute=0, second=0, microsecond=0)
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def _subtract_business_days(dt: datetime, days: int) -> datetime:
|
|
66
|
+
result = dt
|
|
67
|
+
remaining = days
|
|
68
|
+
while remaining > 0:
|
|
69
|
+
result -= timedelta(days=1)
|
|
70
|
+
if result.weekday() < 5:
|
|
71
|
+
remaining -= 1
|
|
72
|
+
return result
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
def _calculate_roll_trigger(year: int, month: int, rule: RollRule) -> datetime:
|
|
76
|
+
if rule.anchor == "third_friday":
|
|
77
|
+
anchor = _third_friday(year, month)
|
|
78
|
+
else:
|
|
79
|
+
anchor = _to_timezone(datetime(year, month, 15))
|
|
80
|
+
if rule.offset_business_days <= 0:
|
|
81
|
+
return anchor
|
|
82
|
+
return _subtract_business_days(anchor, rule.offset_business_days)
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
def _advance_quarter(current_month: int, current_year: int) -> YearMonth:
|
|
86
|
+
quarter_months = [3, 6, 9, 12]
|
|
87
|
+
idx = quarter_months.index(current_month)
|
|
88
|
+
next_idx = (idx + 1) % len(quarter_months)
|
|
89
|
+
next_month = quarter_months[next_idx]
|
|
90
|
+
next_year = current_year + (1 if next_idx == 0 else 0)
|
|
91
|
+
return next_year, next_month
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
def _legacy_mid_month(reference_date: datetime) -> YearMonth:
|
|
95
|
+
quarter_months = [3, 6, 9, 12]
|
|
96
|
+
year = reference_date.year
|
|
97
|
+
month = reference_date.month
|
|
98
|
+
day = reference_date.day
|
|
99
|
+
|
|
100
|
+
if month == 12 and day >= 15:
|
|
101
|
+
return year + 1, 3
|
|
102
|
+
if month >= 10:
|
|
103
|
+
return year, 12
|
|
104
|
+
if month == 9 and day >= 15:
|
|
105
|
+
return year, 12
|
|
106
|
+
if month >= 7:
|
|
107
|
+
return year, 9
|
|
108
|
+
if month == 6 and day >= 15:
|
|
109
|
+
return year, 9
|
|
110
|
+
if month >= 4:
|
|
111
|
+
return year, 6
|
|
112
|
+
if month == 3 and day >= 15:
|
|
113
|
+
return year, 6
|
|
114
|
+
return year, 3
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
def determine_contract_year_month(symbol: str, reference_date: Optional[datetime] = None) -> YearMonth:
|
|
118
|
+
ref = _normalize_reference_date(reference_date)
|
|
119
|
+
symbol_upper = symbol.upper()
|
|
120
|
+
rule = ROLL_RULES.get(symbol_upper)
|
|
121
|
+
|
|
122
|
+
quarter_months = [3, 6, 9, 12]
|
|
123
|
+
year = ref.year
|
|
124
|
+
month = ref.month
|
|
125
|
+
|
|
126
|
+
if rule is None:
|
|
127
|
+
return _legacy_mid_month(ref)
|
|
128
|
+
|
|
129
|
+
if month in quarter_months:
|
|
130
|
+
target_year, target_month = year, month
|
|
131
|
+
roll_point = _calculate_roll_trigger(target_year, target_month, rule)
|
|
132
|
+
if ref >= roll_point:
|
|
133
|
+
target_year, target_month = _advance_quarter(target_month, target_year)
|
|
134
|
+
else:
|
|
135
|
+
candidates = [m for m in quarter_months if m > month]
|
|
136
|
+
if candidates:
|
|
137
|
+
target_month = candidates[0]
|
|
138
|
+
target_year = year
|
|
139
|
+
else:
|
|
140
|
+
target_month = quarter_months[0]
|
|
141
|
+
target_year = year + 1
|
|
142
|
+
|
|
143
|
+
return target_year, target_month
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
def build_contract_symbol(root: str, year: int, month: int, year_digits: int = 2) -> str:
|
|
147
|
+
month_code = _FUTURES_MONTH_CODES.get(month)
|
|
148
|
+
if month_code is None:
|
|
149
|
+
raise ValueError(f"Unsupported futures month: {month}")
|
|
150
|
+
if year_digits == 1:
|
|
151
|
+
return f"{root}{month_code}{year % 10}"
|
|
152
|
+
if year_digits == 4:
|
|
153
|
+
return f"{root}{month_code}{year}"
|
|
154
|
+
return f"{root}{month_code}{year % 100:02d}"
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
def resolve_symbol_for_datetime(asset, dt: datetime, year_digits: int = 2) -> str:
|
|
158
|
+
year, month = determine_contract_year_month(asset.symbol, dt)
|
|
159
|
+
return build_contract_symbol(asset.symbol, year, month, year_digits=year_digits)
|
|
160
|
+
|
|
161
|
+
|
|
162
|
+
def resolve_symbols_for_range(asset, start: datetime, end: datetime, year_digits: int = 2) -> List[str]:
|
|
163
|
+
if start is None or end is None:
|
|
164
|
+
return []
|
|
165
|
+
|
|
166
|
+
start = _normalize_reference_date(start)
|
|
167
|
+
end = _normalize_reference_date(end)
|
|
168
|
+
if start > end:
|
|
169
|
+
start, end = end, start
|
|
170
|
+
|
|
171
|
+
symbols: List[str] = []
|
|
172
|
+
seen: set[str] = set()
|
|
173
|
+
cursor = start
|
|
174
|
+
step = timedelta(days=30)
|
|
175
|
+
|
|
176
|
+
while cursor <= end + timedelta(days=45):
|
|
177
|
+
symbol = resolve_symbol_for_datetime(asset, cursor, year_digits=year_digits)
|
|
178
|
+
if symbol not in seen:
|
|
179
|
+
seen.add(symbol)
|
|
180
|
+
symbols.append(symbol)
|
|
181
|
+
cursor += step
|
|
182
|
+
|
|
183
|
+
final_symbol = resolve_symbol_for_datetime(asset, end, year_digits=year_digits)
|
|
184
|
+
if final_symbol not in seen:
|
|
185
|
+
symbols.append(final_symbol)
|
|
186
|
+
|
|
187
|
+
if final_symbol in symbols:
|
|
188
|
+
final_index = symbols.index(final_symbol)
|
|
189
|
+
symbols = symbols[: final_index + 1]
|
|
190
|
+
|
|
191
|
+
return symbols
|
|
192
|
+
|
|
193
|
+
def build_roll_schedule(asset, start: datetime, end: datetime, year_digits: int = 2):
|
|
194
|
+
if start is None or end is None:
|
|
195
|
+
return []
|
|
196
|
+
|
|
197
|
+
start = _normalize_reference_date(start)
|
|
198
|
+
end = _normalize_reference_date(end)
|
|
199
|
+
if start > end:
|
|
200
|
+
start, end = end, start
|
|
201
|
+
|
|
202
|
+
symbol_upper = asset.symbol.upper()
|
|
203
|
+
rule = ROLL_RULES.get(symbol_upper)
|
|
204
|
+
|
|
205
|
+
schedule = []
|
|
206
|
+
cursor = start
|
|
207
|
+
previous_start = start
|
|
208
|
+
|
|
209
|
+
while cursor <= end + timedelta(days=90):
|
|
210
|
+
year, month = determine_contract_year_month(symbol_upper, cursor)
|
|
211
|
+
symbol = build_contract_symbol(symbol_upper, year, month, year_digits=year_digits)
|
|
212
|
+
|
|
213
|
+
if rule:
|
|
214
|
+
roll_dt = _calculate_roll_trigger(year, month, rule)
|
|
215
|
+
else:
|
|
216
|
+
roll_dt = _to_timezone(datetime(year, month, 15))
|
|
217
|
+
|
|
218
|
+
schedule.append((symbol, previous_start, roll_dt))
|
|
219
|
+
|
|
220
|
+
cursor = roll_dt + timedelta(minutes=1)
|
|
221
|
+
previous_start = cursor
|
|
222
|
+
if roll_dt >= end:
|
|
223
|
+
break
|
|
224
|
+
|
|
225
|
+
clipped = []
|
|
226
|
+
for symbol, s, e in schedule:
|
|
227
|
+
start_clip = max(s, start)
|
|
228
|
+
end_clip = min(e, end)
|
|
229
|
+
if end_clip <= start_clip:
|
|
230
|
+
continue
|
|
231
|
+
clipped.append((symbol, start_clip, end_clip))
|
|
232
|
+
|
|
233
|
+
if not clipped:
|
|
234
|
+
return [(
|
|
235
|
+
symbol,
|
|
236
|
+
s.astimezone(pytz.UTC),
|
|
237
|
+
e.astimezone(pytz.UTC),
|
|
238
|
+
) for symbol, s, e in schedule]
|
|
239
|
+
|
|
240
|
+
last_symbol, s, e = clipped[-1]
|
|
241
|
+
if e < end:
|
|
242
|
+
clipped[-1] = (last_symbol, s, end)
|
|
243
|
+
|
|
244
|
+
return [
|
|
245
|
+
(
|
|
246
|
+
symbol,
|
|
247
|
+
start_clip.astimezone(pytz.UTC),
|
|
248
|
+
end_clip.astimezone(pytz.UTC),
|
|
249
|
+
)
|
|
250
|
+
for symbol, start_clip, end_clip in clipped
|
|
251
|
+
]
|
lumibot/tools/indicators.py
CHANGED
|
@@ -18,8 +18,142 @@ from plotly.subplots import make_subplots
|
|
|
18
18
|
from .yahoo_helper import YahooHelper as yh
|
|
19
19
|
|
|
20
20
|
from lumibot.tools.lumibot_logger import get_logger
|
|
21
|
+
|
|
21
22
|
logger = get_logger(__name__)
|
|
22
23
|
|
|
24
|
+
TERMINAL_TRADE_STATUSES_FOR_MARKERS = {
|
|
25
|
+
"fill",
|
|
26
|
+
"filled",
|
|
27
|
+
"partial_fill",
|
|
28
|
+
"cash_settled",
|
|
29
|
+
"assigned",
|
|
30
|
+
"assignment",
|
|
31
|
+
"exercise",
|
|
32
|
+
"exercised",
|
|
33
|
+
"expired",
|
|
34
|
+
"expire",
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def _build_trade_marker_tooltip(row: pd.Series):
|
|
39
|
+
"""Return tooltip text for a trade marker; None when the row lacks required data."""
|
|
40
|
+
status_value = row.get("status")
|
|
41
|
+
if pd.isna(status_value) or str(status_value).strip() == "":
|
|
42
|
+
return None
|
|
43
|
+
|
|
44
|
+
status_text = str(status_value)
|
|
45
|
+
if status_text.lower() not in TERMINAL_TRADE_STATUSES_FOR_MARKERS:
|
|
46
|
+
return None
|
|
47
|
+
|
|
48
|
+
for key in ("filled_quantity", "price"):
|
|
49
|
+
value = row.get(key)
|
|
50
|
+
if pd.isna(value):
|
|
51
|
+
return None
|
|
52
|
+
|
|
53
|
+
try:
|
|
54
|
+
filled_quantity_dec = Decimal(str(row["filled_quantity"]))
|
|
55
|
+
price_dec = Decimal(str(row["price"]))
|
|
56
|
+
except (InvalidOperation, TypeError, ValueError):
|
|
57
|
+
return None
|
|
58
|
+
|
|
59
|
+
multiplier_value = row.get("asset.multiplier")
|
|
60
|
+
if pd.isna(multiplier_value) or multiplier_value == "":
|
|
61
|
+
return None
|
|
62
|
+
try:
|
|
63
|
+
multiplier_dec = Decimal(str(multiplier_value))
|
|
64
|
+
except (InvalidOperation, TypeError, ValueError):
|
|
65
|
+
return None
|
|
66
|
+
|
|
67
|
+
try:
|
|
68
|
+
amount_transacted_dec = price_dec * filled_quantity_dec * multiplier_dec
|
|
69
|
+
except (InvalidOperation, TypeError, ValueError):
|
|
70
|
+
return None
|
|
71
|
+
|
|
72
|
+
trade_cost_value = row.get("trade_cost")
|
|
73
|
+
trade_cost_dec = None
|
|
74
|
+
if not (pd.isna(trade_cost_value) or trade_cost_value == ""):
|
|
75
|
+
try:
|
|
76
|
+
trade_cost_dec = Decimal(str(trade_cost_value))
|
|
77
|
+
except (InvalidOperation, TypeError, ValueError):
|
|
78
|
+
trade_cost_dec = None
|
|
79
|
+
|
|
80
|
+
if trade_cost_dec is None:
|
|
81
|
+
trade_cost_dec = amount_transacted_dec
|
|
82
|
+
|
|
83
|
+
if row.get("asset.asset_type") == "option":
|
|
84
|
+
try:
|
|
85
|
+
return (
|
|
86
|
+
status_text
|
|
87
|
+
+ "<br>"
|
|
88
|
+
+ str(filled_quantity_dec.quantize(Decimal("0.01")).__format__(",f"))
|
|
89
|
+
+ " "
|
|
90
|
+
+ str(row.get("symbol"))
|
|
91
|
+
+ " "
|
|
92
|
+
+ str(row.get("asset.right"))
|
|
93
|
+
+ " Option"
|
|
94
|
+
+ "<br>"
|
|
95
|
+
+ "Strike: "
|
|
96
|
+
+ str(row.get("asset.strike"))
|
|
97
|
+
+ "<br>"
|
|
98
|
+
+ "Expiration: "
|
|
99
|
+
+ str(row.get("asset.expiration"))
|
|
100
|
+
+ "<br>"
|
|
101
|
+
+ "Price: "
|
|
102
|
+
+ str(price_dec.quantize(Decimal("0.0001")).__format__(",f"))
|
|
103
|
+
+ "<br>"
|
|
104
|
+
+ "Order Type: "
|
|
105
|
+
+ str(row.get("type"))
|
|
106
|
+
+ "<br>"
|
|
107
|
+
+ "Amount Transacted: "
|
|
108
|
+
+ str(
|
|
109
|
+
(
|
|
110
|
+
price_dec
|
|
111
|
+
* filled_quantity_dec
|
|
112
|
+
* (multiplier_dec if multiplier_dec != Decimal("0") else Decimal("1"))
|
|
113
|
+
)
|
|
114
|
+
.quantize(Decimal("0.01"))
|
|
115
|
+
.__format__(",f")
|
|
116
|
+
)
|
|
117
|
+
+ "<br>"
|
|
118
|
+
+ "Trade Cost: "
|
|
119
|
+
+ str(trade_cost_dec.quantize(Decimal("0.01")).__format__(",f"))
|
|
120
|
+
+ "<br>"
|
|
121
|
+
)
|
|
122
|
+
except (InvalidOperation, TypeError, ValueError):
|
|
123
|
+
return None
|
|
124
|
+
|
|
125
|
+
if multiplier_dec == Decimal("0"):
|
|
126
|
+
return None
|
|
127
|
+
|
|
128
|
+
try:
|
|
129
|
+
amount_transacted = amount_transacted_dec.quantize(Decimal("0.01")).__format__(",f")
|
|
130
|
+
price_text = str(price_dec.quantize(Decimal("0.0001")).__format__(",f"))
|
|
131
|
+
filled_qty_text = str(filled_quantity_dec.quantize(Decimal("0.01")).__format__(",f"))
|
|
132
|
+
trade_cost_text = str(trade_cost_dec.quantize(Decimal("0.01")).__format__(",f"))
|
|
133
|
+
except (InvalidOperation, TypeError, ValueError):
|
|
134
|
+
return None
|
|
135
|
+
|
|
136
|
+
return (
|
|
137
|
+
status_text
|
|
138
|
+
+ "<br>"
|
|
139
|
+
+ filled_qty_text
|
|
140
|
+
+ " "
|
|
141
|
+
+ str(row.get("symbol"))
|
|
142
|
+
+ "<br>"
|
|
143
|
+
+ "Price: "
|
|
144
|
+
+ price_text
|
|
145
|
+
+ "<br>"
|
|
146
|
+
+ "Order Type: "
|
|
147
|
+
+ str(row.get("type"))
|
|
148
|
+
+ "<br>"
|
|
149
|
+
+ "Amount Transacted: "
|
|
150
|
+
+ amount_transacted
|
|
151
|
+
+ "<br>"
|
|
152
|
+
+ "Trade Cost: "
|
|
153
|
+
+ trade_cost_text
|
|
154
|
+
+ "<br>"
|
|
155
|
+
)
|
|
156
|
+
|
|
23
157
|
|
|
24
158
|
def total_return(_df):
|
|
25
159
|
"""Calculate the cumulative return in a dataframe
|
|
@@ -669,110 +803,7 @@ def plot_returns(
|
|
|
669
803
|
buys = buys.loc[df_final["side"].isin(["buy", "buy_to_open", "buy_to_cover", "buy_to_close"])]
|
|
670
804
|
|
|
671
805
|
def generate_buysell_plotly_text(row):
|
|
672
|
-
|
|
673
|
-
return None
|
|
674
|
-
|
|
675
|
-
for key in ("filled_quantity", "price"):
|
|
676
|
-
value = row.get(key)
|
|
677
|
-
if pd.isna(value):
|
|
678
|
-
return None
|
|
679
|
-
|
|
680
|
-
try:
|
|
681
|
-
filled_quantity_dec = Decimal(str(row["filled_quantity"]))
|
|
682
|
-
price_dec = Decimal(str(row["price"]))
|
|
683
|
-
except (InvalidOperation, TypeError, ValueError):
|
|
684
|
-
return None
|
|
685
|
-
|
|
686
|
-
multiplier_value = row.get("asset.multiplier")
|
|
687
|
-
if pd.isna(multiplier_value) or multiplier_value == "":
|
|
688
|
-
return None
|
|
689
|
-
try:
|
|
690
|
-
multiplier_dec = Decimal(str(multiplier_value))
|
|
691
|
-
except (InvalidOperation, TypeError, ValueError):
|
|
692
|
-
return None
|
|
693
|
-
|
|
694
|
-
trade_cost_value = row.get("trade_cost")
|
|
695
|
-
if pd.isna(trade_cost_value) or trade_cost_value == "":
|
|
696
|
-
return None
|
|
697
|
-
try:
|
|
698
|
-
trade_cost_dec = Decimal(str(trade_cost_value))
|
|
699
|
-
except (InvalidOperation, TypeError, ValueError):
|
|
700
|
-
return None
|
|
701
|
-
|
|
702
|
-
if row["asset.asset_type"] == "option":
|
|
703
|
-
try:
|
|
704
|
-
return (
|
|
705
|
-
row["status"]
|
|
706
|
-
+ "<br>"
|
|
707
|
-
+ str(filled_quantity_dec.quantize(Decimal("0.01")).__format__(",f"))
|
|
708
|
-
+ " "
|
|
709
|
-
+ row["symbol"]
|
|
710
|
-
+ " "
|
|
711
|
-
+ row["asset.right"]
|
|
712
|
-
+ " Option"
|
|
713
|
-
+ "<br>"
|
|
714
|
-
+ "Strike: "
|
|
715
|
-
+ str(row["asset.strike"])
|
|
716
|
-
+ "<br>"
|
|
717
|
-
+ "Expiration: "
|
|
718
|
-
+ str(row["asset.expiration"])
|
|
719
|
-
+ "<br>"
|
|
720
|
-
+ "Price: "
|
|
721
|
-
+ str(price_dec.quantize(Decimal("0.0001")).__format__(",f"))
|
|
722
|
-
+ "<br>"
|
|
723
|
-
+ "Order Type: "
|
|
724
|
-
+ row["type"]
|
|
725
|
-
+ "<br>"
|
|
726
|
-
+ "Amount Transacted: "
|
|
727
|
-
+ str(
|
|
728
|
-
(
|
|
729
|
-
price_dec
|
|
730
|
-
* filled_quantity_dec
|
|
731
|
-
* (multiplier_dec if multiplier_dec != Decimal("0") else Decimal("1"))
|
|
732
|
-
)
|
|
733
|
-
.quantize(Decimal("0.01"))
|
|
734
|
-
.__format__(",f")
|
|
735
|
-
)
|
|
736
|
-
+ "<br>"
|
|
737
|
-
+ "Trade Cost: "
|
|
738
|
-
+ str(trade_cost_dec.quantize(Decimal("0.01")).__format__(",f"))
|
|
739
|
-
+ "<br>"
|
|
740
|
-
)
|
|
741
|
-
except (InvalidOperation, TypeError, ValueError):
|
|
742
|
-
return None
|
|
743
|
-
|
|
744
|
-
if multiplier_dec == Decimal("0"):
|
|
745
|
-
return None
|
|
746
|
-
try:
|
|
747
|
-
amount_transacted = (
|
|
748
|
-
price_dec * filled_quantity_dec * multiplier_dec
|
|
749
|
-
).quantize(Decimal("0.01")).__format__(",f")
|
|
750
|
-
price_text = str(price_dec.quantize(Decimal("0.0001")).__format__(",f"))
|
|
751
|
-
filled_qty_text = str(filled_quantity_dec.quantize(Decimal("0.01")).__format__(",f"))
|
|
752
|
-
trade_cost_text = str(trade_cost_dec.quantize(Decimal("0.01")).__format__(",f"))
|
|
753
|
-
except (InvalidOperation, TypeError, ValueError):
|
|
754
|
-
return None
|
|
755
|
-
|
|
756
|
-
return (
|
|
757
|
-
row["status"]
|
|
758
|
-
+ "<br>"
|
|
759
|
-
+ filled_qty_text
|
|
760
|
-
+ " "
|
|
761
|
-
+ row["symbol"]
|
|
762
|
-
+ "<br>"
|
|
763
|
-
+ "Price: "
|
|
764
|
-
+ price_text
|
|
765
|
-
+ "<br>"
|
|
766
|
-
+ "Order Type: "
|
|
767
|
-
+ row["type"]
|
|
768
|
-
+ "<br>"
|
|
769
|
-
+ "Amount Transacted: "
|
|
770
|
-
+ amount_transacted
|
|
771
|
-
+ "<br>"
|
|
772
|
-
+ "Trade Cost: "
|
|
773
|
-
+ trade_cost_text
|
|
774
|
-
+ "<br>"
|
|
775
|
-
)
|
|
806
|
+
return _build_trade_marker_tooltip(row)
|
|
776
807
|
|
|
777
808
|
buy_ticks_df = buys.apply(generate_buysell_plotly_text, axis=1)
|
|
778
809
|
|
|
@@ -0,0 +1,142 @@
|
|
|
1
|
+
"""Utility helpers for operating on Polars DataFrames within Lumibot."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from typing import Iterable, Optional, Sequence, Set
|
|
6
|
+
|
|
7
|
+
import polars as pl
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class PolarsResampleError(Exception):
|
|
11
|
+
"""Raised when a Polars resample operation cannot be completed."""
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def _ensure_datetime_column(df: pl.DataFrame) -> str:
|
|
15
|
+
"""Return the datetime-like column name used for grouping."""
|
|
16
|
+
if "datetime" in df.columns:
|
|
17
|
+
return "datetime"
|
|
18
|
+
|
|
19
|
+
for candidate in ("timestamp", "date", "time"):
|
|
20
|
+
if candidate in df.columns:
|
|
21
|
+
return candidate
|
|
22
|
+
|
|
23
|
+
raise PolarsResampleError("Polars DataFrame lacks a datetime-like column required for resampling.")
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def _aggregate_expressions(existing_cols: Sequence[str]) -> list[pl.Expr]:
|
|
27
|
+
"""Build aggregation expressions for OHLC-style resampling."""
|
|
28
|
+
exprs: list[pl.Expr] = []
|
|
29
|
+
handled: Set[str] = {"datetime", "timestamp", "date", "time"}
|
|
30
|
+
|
|
31
|
+
if "open" in existing_cols:
|
|
32
|
+
exprs.append(pl.col("open").first().alias("open"))
|
|
33
|
+
handled.add("open")
|
|
34
|
+
|
|
35
|
+
if "high" in existing_cols:
|
|
36
|
+
exprs.append(pl.col("high").max().alias("high"))
|
|
37
|
+
handled.add("high")
|
|
38
|
+
|
|
39
|
+
if "low" in existing_cols:
|
|
40
|
+
exprs.append(pl.col("low").min().alias("low"))
|
|
41
|
+
handled.add("low")
|
|
42
|
+
|
|
43
|
+
if "close" in existing_cols:
|
|
44
|
+
exprs.append(pl.col("close").last().alias("close"))
|
|
45
|
+
handled.add("close")
|
|
46
|
+
|
|
47
|
+
if "volume" in existing_cols:
|
|
48
|
+
exprs.append(pl.col("volume").sum().alias("volume"))
|
|
49
|
+
handled.add("volume")
|
|
50
|
+
|
|
51
|
+
if "dividend" in existing_cols:
|
|
52
|
+
exprs.append(pl.col("dividend").sum().alias("dividend"))
|
|
53
|
+
handled.add("dividend")
|
|
54
|
+
|
|
55
|
+
# Preserve any remaining columns by taking the last observation
|
|
56
|
+
for column in existing_cols:
|
|
57
|
+
if column not in handled:
|
|
58
|
+
exprs.append(pl.col(column).last().alias(column))
|
|
59
|
+
|
|
60
|
+
return exprs
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def resample_polars_ohlc(
|
|
64
|
+
df: pl.DataFrame,
|
|
65
|
+
multiplier: int,
|
|
66
|
+
base_unit: str,
|
|
67
|
+
length: Optional[int] = None,
|
|
68
|
+
label_offset: Optional[str] = None,
|
|
69
|
+
) -> pl.DataFrame:
|
|
70
|
+
"""Resample a Polars DataFrame containing OHLC-like data.
|
|
71
|
+
|
|
72
|
+
Parameters
|
|
73
|
+
----------
|
|
74
|
+
df:
|
|
75
|
+
Input DataFrame containing at least ``datetime`` plus OHLCV columns.
|
|
76
|
+
multiplier:
|
|
77
|
+
Number of base units to roll up. e.g. multiplier=5, base_unit="minute" -> 5-minute bars.
|
|
78
|
+
base_unit:
|
|
79
|
+
Currently supports "minute" or "day".
|
|
80
|
+
length:
|
|
81
|
+
Optional maximum number of rows to retain (tail). If ``None`` retains the full frame.
|
|
82
|
+
label_offset:
|
|
83
|
+
Optional duration string understood by Polars to offset labels. Useful for aligning session boundaries.
|
|
84
|
+
|
|
85
|
+
Returns
|
|
86
|
+
-------
|
|
87
|
+
pl.DataFrame
|
|
88
|
+
Resampled dataset sorted by datetime.
|
|
89
|
+
"""
|
|
90
|
+
|
|
91
|
+
if df.is_empty():
|
|
92
|
+
return df
|
|
93
|
+
|
|
94
|
+
if multiplier <= 0:
|
|
95
|
+
raise PolarsResampleError("Multiplier must be positive for resampling.")
|
|
96
|
+
|
|
97
|
+
unit_map = {"minute": "m", "day": "d"}
|
|
98
|
+
try:
|
|
99
|
+
every_suffix = unit_map[base_unit]
|
|
100
|
+
except KeyError as exc:
|
|
101
|
+
raise PolarsResampleError(f"Unsupported base unit '{base_unit}' for polars resampling.") from exc
|
|
102
|
+
|
|
103
|
+
every = f"{multiplier}{every_suffix}"
|
|
104
|
+
|
|
105
|
+
datetime_column = _ensure_datetime_column(df)
|
|
106
|
+
sorted_df = df.sort(datetime_column)
|
|
107
|
+
|
|
108
|
+
agg_exprs = _aggregate_expressions(sorted_df.columns)
|
|
109
|
+
|
|
110
|
+
group_kwargs = {
|
|
111
|
+
"every": every,
|
|
112
|
+
"period": every,
|
|
113
|
+
"closed": "left",
|
|
114
|
+
"label": "left",
|
|
115
|
+
}
|
|
116
|
+
if label_offset:
|
|
117
|
+
group_kwargs["offset"] = label_offset
|
|
118
|
+
|
|
119
|
+
lazy_frame = sorted_df.lazy()
|
|
120
|
+
if hasattr(lazy_frame, "group_by_dynamic"):
|
|
121
|
+
lazy_grouped = lazy_frame.group_by_dynamic(datetime_column, **group_kwargs)
|
|
122
|
+
else: # pragma: no cover - backward compatibility
|
|
123
|
+
lazy_grouped = lazy_frame.groupby_dynamic(datetime_column, **group_kwargs)
|
|
124
|
+
resampled = (
|
|
125
|
+
lazy_grouped
|
|
126
|
+
.agg(agg_exprs)
|
|
127
|
+
.sort(datetime_column)
|
|
128
|
+
.collect()
|
|
129
|
+
)
|
|
130
|
+
|
|
131
|
+
required_cols: Iterable[str] = [c for c in ("open", "high", "low", "close") if c in resampled.columns]
|
|
132
|
+
if required_cols:
|
|
133
|
+
condition = None
|
|
134
|
+
for col in required_cols:
|
|
135
|
+
expr = pl.col(col).is_not_null()
|
|
136
|
+
condition = expr if condition is None else condition & expr
|
|
137
|
+
resampled = resampled.filter(condition)
|
|
138
|
+
|
|
139
|
+
if length is not None and length > 0 and resampled.height > length:
|
|
140
|
+
resampled = resampled.tail(length)
|
|
141
|
+
|
|
142
|
+
return resampled
|