lumibot 4.1.3__py3-none-any.whl → 4.2.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of lumibot might be problematic. Click here for more details.
- lumibot/backtesting/__init__.py +19 -5
- lumibot/backtesting/backtesting_broker.py +98 -18
- lumibot/backtesting/databento_backtesting.py +5 -686
- lumibot/backtesting/databento_backtesting_pandas.py +738 -0
- lumibot/backtesting/databento_backtesting_polars.py +860 -546
- lumibot/backtesting/fix_debug.py +37 -0
- lumibot/backtesting/thetadata_backtesting.py +9 -355
- lumibot/backtesting/thetadata_backtesting_pandas.py +1167 -0
- lumibot/brokers/alpaca.py +8 -1
- lumibot/brokers/schwab.py +12 -2
- lumibot/credentials.py +13 -0
- lumibot/data_sources/__init__.py +5 -8
- lumibot/data_sources/data_source.py +6 -2
- lumibot/data_sources/data_source_backtesting.py +30 -0
- lumibot/data_sources/databento_data.py +5 -390
- lumibot/data_sources/databento_data_pandas.py +440 -0
- lumibot/data_sources/databento_data_polars.py +15 -9
- lumibot/data_sources/pandas_data.py +30 -17
- lumibot/data_sources/polars_data.py +986 -0
- lumibot/data_sources/polars_mixin.py +472 -96
- lumibot/data_sources/polygon_data_polars.py +5 -0
- lumibot/data_sources/yahoo_data.py +9 -2
- lumibot/data_sources/yahoo_data_polars.py +5 -0
- lumibot/entities/__init__.py +15 -0
- lumibot/entities/asset.py +5 -28
- lumibot/entities/bars.py +89 -20
- lumibot/entities/data.py +29 -6
- lumibot/entities/data_polars.py +668 -0
- lumibot/entities/position.py +38 -4
- lumibot/strategies/_strategy.py +2 -1
- lumibot/strategies/strategy.py +61 -49
- lumibot/tools/backtest_cache.py +284 -0
- lumibot/tools/databento_helper.py +35 -35
- lumibot/tools/databento_helper_polars.py +738 -775
- lumibot/tools/futures_roll.py +251 -0
- lumibot/tools/indicators.py +135 -104
- lumibot/tools/polars_utils.py +142 -0
- lumibot/tools/thetadata_helper.py +1068 -134
- {lumibot-4.1.3.dist-info → lumibot-4.2.1.dist-info}/METADATA +9 -1
- {lumibot-4.1.3.dist-info → lumibot-4.2.1.dist-info}/RECORD +71 -147
- tests/backtest/test_databento.py +37 -6
- tests/backtest/test_databento_comprehensive_trading.py +8 -4
- tests/backtest/test_databento_parity.py +4 -2
- tests/backtest/test_debug_avg_fill_price.py +1 -1
- tests/backtest/test_example_strategies.py +11 -1
- tests/backtest/test_futures_edge_cases.py +3 -3
- tests/backtest/test_futures_single_trade.py +2 -2
- tests/backtest/test_futures_ultra_simple.py +2 -2
- tests/backtest/test_polars_lru_eviction.py +470 -0
- tests/backtest/test_yahoo.py +42 -0
- tests/test_asset.py +4 -4
- tests/test_backtest_cache_manager.py +149 -0
- tests/test_backtesting_data_source_env.py +6 -0
- tests/test_continuous_futures_resolution.py +60 -48
- tests/test_data_polars_parity.py +160 -0
- tests/test_databento_asset_validation.py +23 -5
- tests/test_databento_backtesting.py +1 -1
- tests/test_databento_backtesting_polars.py +312 -192
- tests/test_databento_data.py +220 -463
- tests/test_databento_live.py +10 -10
- tests/test_futures_roll.py +38 -0
- tests/test_indicator_subplots.py +101 -0
- tests/test_market_infinite_loop_bug.py +77 -3
- tests/test_polars_resample.py +67 -0
- tests/test_polygon_helper.py +46 -0
- tests/test_thetadata_backwards_compat.py +97 -0
- tests/test_thetadata_helper.py +222 -23
- tests/test_thetadata_pandas_verification.py +186 -0
- lumibot/__pycache__/__init__.cpython-312.pyc +0 -0
- lumibot/__pycache__/constants.cpython-312.pyc +0 -0
- lumibot/__pycache__/credentials.cpython-312.pyc +0 -0
- lumibot/backtesting/__pycache__/__init__.cpython-312.pyc +0 -0
- lumibot/backtesting/__pycache__/alpaca_backtesting.cpython-312.pyc +0 -0
- lumibot/backtesting/__pycache__/alpha_vantage_backtesting.cpython-312.pyc +0 -0
- lumibot/backtesting/__pycache__/backtesting_broker.cpython-312.pyc +0 -0
- lumibot/backtesting/__pycache__/ccxt_backtesting.cpython-312.pyc +0 -0
- lumibot/backtesting/__pycache__/databento_backtesting.cpython-312.pyc +0 -0
- lumibot/backtesting/__pycache__/interactive_brokers_rest_backtesting.cpython-312.pyc +0 -0
- lumibot/backtesting/__pycache__/pandas_backtesting.cpython-312.pyc +0 -0
- lumibot/backtesting/__pycache__/polygon_backtesting.cpython-312.pyc +0 -0
- lumibot/backtesting/__pycache__/thetadata_backtesting.cpython-312.pyc +0 -0
- lumibot/backtesting/__pycache__/yahoo_backtesting.cpython-312.pyc +0 -0
- lumibot/brokers/__pycache__/__init__.cpython-312.pyc +0 -0
- lumibot/brokers/__pycache__/alpaca.cpython-312.pyc +0 -0
- lumibot/brokers/__pycache__/bitunix.cpython-312.pyc +0 -0
- lumibot/brokers/__pycache__/broker.cpython-312.pyc +0 -0
- lumibot/brokers/__pycache__/ccxt.cpython-312.pyc +0 -0
- lumibot/brokers/__pycache__/example_broker.cpython-312.pyc +0 -0
- lumibot/brokers/__pycache__/interactive_brokers.cpython-312.pyc +0 -0
- lumibot/brokers/__pycache__/interactive_brokers_rest.cpython-312.pyc +0 -0
- lumibot/brokers/__pycache__/projectx.cpython-312.pyc +0 -0
- lumibot/brokers/__pycache__/schwab.cpython-312.pyc +0 -0
- lumibot/brokers/__pycache__/tradier.cpython-312.pyc +0 -0
- lumibot/brokers/__pycache__/tradovate.cpython-312.pyc +0 -0
- lumibot/data_sources/__pycache__/__init__.cpython-312.pyc +0 -0
- lumibot/data_sources/__pycache__/alpaca_data.cpython-312.pyc +0 -0
- lumibot/data_sources/__pycache__/alpha_vantage_data.cpython-312.pyc +0 -0
- lumibot/data_sources/__pycache__/bitunix_data.cpython-312.pyc +0 -0
- lumibot/data_sources/__pycache__/ccxt_backtesting_data.cpython-312.pyc +0 -0
- lumibot/data_sources/__pycache__/ccxt_data.cpython-312.pyc +0 -0
- lumibot/data_sources/__pycache__/data_source.cpython-312.pyc +0 -0
- lumibot/data_sources/__pycache__/data_source_backtesting.cpython-312.pyc +0 -0
- lumibot/data_sources/__pycache__/databento_data_polars_backtesting.cpython-312.pyc +0 -0
- lumibot/data_sources/__pycache__/databento_data_polars_live.cpython-312.pyc +0 -0
- lumibot/data_sources/__pycache__/example_broker_data.cpython-312.pyc +0 -0
- lumibot/data_sources/__pycache__/exceptions.cpython-312.pyc +0 -0
- lumibot/data_sources/__pycache__/interactive_brokers_data.cpython-312.pyc +0 -0
- lumibot/data_sources/__pycache__/interactive_brokers_rest_data.cpython-312.pyc +0 -0
- lumibot/data_sources/__pycache__/pandas_data.cpython-312.pyc +0 -0
- lumibot/data_sources/__pycache__/polars_mixin.cpython-312.pyc +0 -0
- lumibot/data_sources/__pycache__/polygon_data_polars.cpython-312.pyc +0 -0
- lumibot/data_sources/__pycache__/projectx_data.cpython-312.pyc +0 -0
- lumibot/data_sources/__pycache__/schwab_data.cpython-312.pyc +0 -0
- lumibot/data_sources/__pycache__/tradier_data.cpython-312.pyc +0 -0
- lumibot/data_sources/__pycache__/tradovate_data.cpython-312.pyc +0 -0
- lumibot/data_sources/__pycache__/yahoo_data_polars.cpython-312.pyc +0 -0
- lumibot/entities/__pycache__/__init__.cpython-312.pyc +0 -0
- lumibot/entities/__pycache__/asset.cpython-312.pyc +0 -0
- lumibot/entities/__pycache__/bar.cpython-312.pyc +0 -0
- lumibot/entities/__pycache__/bars.cpython-312.pyc +0 -0
- lumibot/entities/__pycache__/chains.cpython-312.pyc +0 -0
- lumibot/entities/__pycache__/data.cpython-312.pyc +0 -0
- lumibot/entities/__pycache__/dataline.cpython-312.pyc +0 -0
- lumibot/entities/__pycache__/order.cpython-312.pyc +0 -0
- lumibot/entities/__pycache__/position.cpython-312.pyc +0 -0
- lumibot/entities/__pycache__/quote.cpython-312.pyc +0 -0
- lumibot/entities/__pycache__/trading_fee.cpython-312.pyc +0 -0
- lumibot/example_strategies/__pycache__/__init__.cpython-312.pyc +0 -0
- lumibot/example_strategies/__pycache__/test_broker_functions.cpython-312-pytest-8.4.1.pyc +0 -0
- lumibot/strategies/__pycache__/__init__.cpython-312.pyc +0 -0
- lumibot/strategies/__pycache__/_strategy.cpython-312.pyc +0 -0
- lumibot/strategies/__pycache__/strategy.cpython-312.pyc +0 -0
- lumibot/strategies/__pycache__/strategy_executor.cpython-312.pyc +0 -0
- lumibot/tools/__pycache__/__init__.cpython-312.pyc +0 -0
- lumibot/tools/__pycache__/alpaca_helpers.cpython-312.pyc +0 -0
- lumibot/tools/__pycache__/bitunix_helpers.cpython-312.pyc +0 -0
- lumibot/tools/__pycache__/black_scholes.cpython-312.pyc +0 -0
- lumibot/tools/__pycache__/ccxt_data_store.cpython-312.pyc +0 -0
- lumibot/tools/__pycache__/databento_helper.cpython-312.pyc +0 -0
- lumibot/tools/__pycache__/databento_helper_polars.cpython-312.pyc +0 -0
- lumibot/tools/__pycache__/debugers.cpython-312.pyc +0 -0
- lumibot/tools/__pycache__/decorators.cpython-312.pyc +0 -0
- lumibot/tools/__pycache__/helpers.cpython-312.pyc +0 -0
- lumibot/tools/__pycache__/indicators.cpython-312.pyc +0 -0
- lumibot/tools/__pycache__/lumibot_logger.cpython-312.pyc +0 -0
- lumibot/tools/__pycache__/pandas.cpython-312.pyc +0 -0
- lumibot/tools/__pycache__/polygon_helper.cpython-312.pyc +0 -0
- lumibot/tools/__pycache__/polygon_helper_async.cpython-312.pyc +0 -0
- lumibot/tools/__pycache__/polygon_helper_polars_optimized.cpython-312.pyc +0 -0
- lumibot/tools/__pycache__/projectx_helpers.cpython-312.pyc +0 -0
- lumibot/tools/__pycache__/schwab_helper.cpython-312.pyc +0 -0
- lumibot/tools/__pycache__/thetadata_helper.cpython-312.pyc +0 -0
- lumibot/tools/__pycache__/types.cpython-312.pyc +0 -0
- lumibot/tools/__pycache__/yahoo_helper.cpython-312.pyc +0 -0
- lumibot/tools/__pycache__/yahoo_helper_polars_optimized.cpython-312.pyc +0 -0
- lumibot/traders/__pycache__/__init__.cpython-312.pyc +0 -0
- lumibot/traders/__pycache__/trader.cpython-312.pyc +0 -0
- lumibot/trading_builtins/__pycache__/__init__.cpython-312.pyc +0 -0
- lumibot/trading_builtins/__pycache__/custom_stream.cpython-312.pyc +0 -0
- lumibot/trading_builtins/__pycache__/safe_list.cpython-312.pyc +0 -0
- {lumibot-4.1.3.dist-info → lumibot-4.2.1.dist-info}/WHEEL +0 -0
- {lumibot-4.1.3.dist-info → lumibot-4.2.1.dist-info}/licenses/LICENSE +0 -0
- {lumibot-4.1.3.dist-info → lumibot-4.2.1.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
import re
|
|
2
|
+
|
|
3
|
+
with open('thetadata_backtesting_polars.py', 'r') as f:
|
|
4
|
+
lines = f.readlines()
|
|
5
|
+
|
|
6
|
+
output = []
|
|
7
|
+
skip_next_dedent = False
|
|
8
|
+
dedent_count = 0
|
|
9
|
+
|
|
10
|
+
for i, line in enumerate(lines):
|
|
11
|
+
# Check if this is the debug flag declaration
|
|
12
|
+
if '_THETA_PARITY_DEBUG = os.getenv' in line:
|
|
13
|
+
continue # Skip this line entirely
|
|
14
|
+
|
|
15
|
+
# Check if this is a conditional debug check
|
|
16
|
+
if 'if _THETA_PARITY_DEBUG:' in line:
|
|
17
|
+
skip_next_dedent = True
|
|
18
|
+
dedent_count = len(line) - len(line.lstrip())
|
|
19
|
+
continue # Skip the if line
|
|
20
|
+
|
|
21
|
+
# If we're in a block that needs dedenting
|
|
22
|
+
if skip_next_dedent:
|
|
23
|
+
current_indent = len(line) - len(line.lstrip())
|
|
24
|
+
# If this line is indented more than the if statement, dedent it
|
|
25
|
+
if current_indent > dedent_count and line.strip():
|
|
26
|
+
line = line[4:] # Remove 4 spaces
|
|
27
|
+
# Check if we've exited the block (line at same or less indent than if)
|
|
28
|
+
elif line.strip() and current_indent <= dedent_count:
|
|
29
|
+
skip_next_dedent = False
|
|
30
|
+
dedent_count = 0
|
|
31
|
+
|
|
32
|
+
output.append(line)
|
|
33
|
+
|
|
34
|
+
with open('thetadata_backtesting_polars.py', 'w') as f:
|
|
35
|
+
f.writelines(output)
|
|
36
|
+
|
|
37
|
+
print("Fixed indentation")
|
|
@@ -1,358 +1,12 @@
|
|
|
1
|
-
|
|
2
|
-
from typing import Union
|
|
1
|
+
"""ThetaData backtesting entry point (pandas-only)."""
|
|
3
2
|
|
|
4
|
-
import
|
|
5
|
-
import pandas as pd
|
|
6
|
-
import subprocess
|
|
7
|
-
from datetime import date, timedelta
|
|
3
|
+
from .thetadata_backtesting_pandas import ThetaDataBacktestingPandas, START_BUFFER
|
|
8
4
|
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
from lumibot.tools import thetadata_helper
|
|
5
|
+
# Maintain legacy import name for backwards compatibility
|
|
6
|
+
ThetaDataBacktesting = ThetaDataBacktestingPandas
|
|
12
7
|
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
START_BUFFER
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
class ThetaDataBacktesting(PandasData):
|
|
20
|
-
"""
|
|
21
|
-
Backtesting implementation of ThetaData
|
|
22
|
-
"""
|
|
23
|
-
|
|
24
|
-
# Enable fallback to last_price when bid/ask quotes are unavailable for options
|
|
25
|
-
option_quote_fallback_allowed = True
|
|
26
|
-
|
|
27
|
-
def __init__(
|
|
28
|
-
self,
|
|
29
|
-
datetime_start,
|
|
30
|
-
datetime_end,
|
|
31
|
-
pandas_data=None,
|
|
32
|
-
username=None,
|
|
33
|
-
password=None,
|
|
34
|
-
use_quote_data=True,
|
|
35
|
-
**kwargs,
|
|
36
|
-
):
|
|
37
|
-
# Pass allow_option_quote_fallback to parent to enable fallback mechanism
|
|
38
|
-
super().__init__(datetime_start=datetime_start, datetime_end=datetime_end, pandas_data=pandas_data,
|
|
39
|
-
allow_option_quote_fallback=True, **kwargs)
|
|
40
|
-
|
|
41
|
-
self._username = username
|
|
42
|
-
self._password = password
|
|
43
|
-
self._use_quote_data = use_quote_data
|
|
44
|
-
|
|
45
|
-
self.kill_processes_by_name("ThetaTerminal.jar")
|
|
46
|
-
|
|
47
|
-
def is_weekend(self, date):
|
|
48
|
-
"""
|
|
49
|
-
Check if the given date is a weekend.
|
|
50
|
-
|
|
51
|
-
:param date: datetime.date object
|
|
52
|
-
:return: Boolean, True if weekend, False otherwise
|
|
53
|
-
"""
|
|
54
|
-
return date.weekday() >= 5 # 5 = Saturday, 6 = Sunday
|
|
55
|
-
|
|
56
|
-
def kill_processes_by_name(self, keyword):
|
|
57
|
-
try:
|
|
58
|
-
# Find all processes related to the keyword
|
|
59
|
-
result = subprocess.run(['pgrep', '-f', keyword], capture_output=True, text=True)
|
|
60
|
-
pids = result.stdout.strip().split('\n')
|
|
61
|
-
|
|
62
|
-
if pids:
|
|
63
|
-
for pid in pids:
|
|
64
|
-
if pid: # Ensure the PID is not empty
|
|
65
|
-
logger.info(f"Killing process with PID: {pid}")
|
|
66
|
-
subprocess.run(['kill', '-9', pid])
|
|
67
|
-
logger.info(f"All processes related to '{keyword}' have been killed.")
|
|
68
|
-
else:
|
|
69
|
-
logger.info(f"No processes found related to '{keyword}'.")
|
|
70
|
-
|
|
71
|
-
except Exception as e:
|
|
72
|
-
print(f"An error occurred during kill process: {e}")
|
|
73
|
-
|
|
74
|
-
def _update_pandas_data(self, asset, quote, length, timestep, start_dt=None):
|
|
75
|
-
"""
|
|
76
|
-
Get asset data and update the self.pandas_data dictionary.
|
|
77
|
-
|
|
78
|
-
Parameters
|
|
79
|
-
----------
|
|
80
|
-
asset : Asset
|
|
81
|
-
The asset to get data for.
|
|
82
|
-
quote : Asset
|
|
83
|
-
The quote asset to use. For example, if asset is "SPY" and quote is "USD", the data will be for "SPY/USD".
|
|
84
|
-
length : int
|
|
85
|
-
The number of data points to get.
|
|
86
|
-
timestep : str
|
|
87
|
-
The timestep to use. For example, "1minute" or "1hour" or "1day".
|
|
88
|
-
|
|
89
|
-
Returns
|
|
90
|
-
-------
|
|
91
|
-
dict
|
|
92
|
-
A dictionary with the keys being the asset and the values being the PandasData objects.
|
|
93
|
-
"""
|
|
94
|
-
# DEBUG: Log when strike 157 is requested
|
|
95
|
-
if hasattr(asset, 'strike') and asset.strike == 157:
|
|
96
|
-
import traceback
|
|
97
|
-
logger.info(f"\n[DEBUG STRIKE 157] _update_pandas_data called for asset: {asset}")
|
|
98
|
-
logger.info(f"[DEBUG STRIKE 157] Traceback:\n{''.join(traceback.format_stack())}")
|
|
99
|
-
|
|
100
|
-
search_asset = asset
|
|
101
|
-
asset_separated = asset
|
|
102
|
-
quote_asset = quote if quote is not None else Asset("USD", "forex")
|
|
103
|
-
|
|
104
|
-
if isinstance(search_asset, tuple):
|
|
105
|
-
asset_separated, quote_asset = search_asset
|
|
106
|
-
else:
|
|
107
|
-
search_asset = (search_asset, quote_asset)
|
|
108
|
-
|
|
109
|
-
if asset_separated.asset_type == "option":
|
|
110
|
-
expiry = asset_separated.expiration
|
|
111
|
-
if self.is_weekend(expiry):
|
|
112
|
-
logger.info(f"\nSKIP: Expiry {expiry} date is a weekend, no contract exists: {asset_separated}")
|
|
113
|
-
return None
|
|
114
|
-
|
|
115
|
-
# Get the start datetime and timestep unit
|
|
116
|
-
start_datetime, ts_unit = self.get_start_datetime_and_ts_unit(
|
|
117
|
-
length, timestep, start_dt, start_buffer=START_BUFFER
|
|
118
|
-
)
|
|
119
|
-
|
|
120
|
-
# Check if we have data for this asset
|
|
121
|
-
if search_asset in self.pandas_data:
|
|
122
|
-
asset_data = self.pandas_data[search_asset]
|
|
123
|
-
asset_data_df = asset_data.df
|
|
124
|
-
data_start_datetime = asset_data_df.index[0]
|
|
125
|
-
|
|
126
|
-
# Get the timestep of the data
|
|
127
|
-
data_timestep = asset_data.timestep
|
|
128
|
-
|
|
129
|
-
# If the timestep is the same, we don't need to update the data
|
|
130
|
-
if data_timestep == ts_unit:
|
|
131
|
-
# Check if we have enough data (5 days is the buffer we subtracted from the start datetime)
|
|
132
|
-
if (data_start_datetime - start_datetime) < START_BUFFER:
|
|
133
|
-
return None
|
|
134
|
-
|
|
135
|
-
# Always try to get the lowest timestep possible because we can always resample
|
|
136
|
-
# If day is requested then make sure we at least have data that's less than a day
|
|
137
|
-
if ts_unit == "day":
|
|
138
|
-
if data_timestep == "minute":
|
|
139
|
-
# Check if we have enough data (5 days is the buffer we subtracted from the start datetime)
|
|
140
|
-
if (data_start_datetime - start_datetime) < START_BUFFER:
|
|
141
|
-
return None
|
|
142
|
-
else:
|
|
143
|
-
# We don't have enough data, so we need to get more (but in minutes)
|
|
144
|
-
ts_unit = "minute"
|
|
145
|
-
elif data_timestep == "hour":
|
|
146
|
-
# Check if we have enough data (5 days is the buffer we subtracted from the start datetime)
|
|
147
|
-
if (data_start_datetime - start_datetime) < START_BUFFER:
|
|
148
|
-
return None
|
|
149
|
-
else:
|
|
150
|
-
# We don't have enough data, so we need to get more (but in hours)
|
|
151
|
-
ts_unit = "hour"
|
|
152
|
-
|
|
153
|
-
# If hour is requested then make sure we at least have data that's less than an hour
|
|
154
|
-
if ts_unit == "hour":
|
|
155
|
-
if data_timestep == "minute":
|
|
156
|
-
# Check if we have enough data (5 days is the buffer we subtracted from the start datetime)
|
|
157
|
-
if (data_start_datetime - start_datetime) < START_BUFFER:
|
|
158
|
-
return None
|
|
159
|
-
else:
|
|
160
|
-
# We don't have enough data, so we need to get more (but in minutes)
|
|
161
|
-
ts_unit = "minute"
|
|
162
|
-
|
|
163
|
-
# Download data from ThetaData
|
|
164
|
-
try:
|
|
165
|
-
# Get ohlc data from ThetaData
|
|
166
|
-
date_time_now = self.get_datetime()
|
|
167
|
-
df_ohlc = None
|
|
168
|
-
df_ohlc = thetadata_helper.get_price_data(
|
|
169
|
-
self._username,
|
|
170
|
-
self._password,
|
|
171
|
-
asset_separated,
|
|
172
|
-
start_datetime,
|
|
173
|
-
self.datetime_end,
|
|
174
|
-
timespan=ts_unit,
|
|
175
|
-
quote_asset=quote_asset,
|
|
176
|
-
dt=date_time_now,
|
|
177
|
-
datastyle="ohlc",
|
|
178
|
-
include_after_hours=True # Default to True for extended hours data
|
|
179
|
-
)
|
|
180
|
-
if df_ohlc is None:
|
|
181
|
-
logger.info(f"\nSKIP: No OHLC data found for {asset_separated} from ThetaData")
|
|
182
|
-
return None
|
|
183
|
-
|
|
184
|
-
# Quote data (bid/ask) is only available for intraday data (minute, hour, second)
|
|
185
|
-
# For daily+ data, only use OHLC
|
|
186
|
-
if self._use_quote_data and ts_unit in ["minute", "hour", "second"]:
|
|
187
|
-
# Get quote data from ThetaData
|
|
188
|
-
df_quote = thetadata_helper.get_price_data(
|
|
189
|
-
self._username,
|
|
190
|
-
self._password,
|
|
191
|
-
asset_separated,
|
|
192
|
-
start_datetime,
|
|
193
|
-
self.datetime_end,
|
|
194
|
-
timespan=ts_unit,
|
|
195
|
-
quote_asset=quote_asset,
|
|
196
|
-
dt=date_time_now,
|
|
197
|
-
datastyle="quote",
|
|
198
|
-
include_after_hours=True # Default to True for extended hours data
|
|
199
|
-
)
|
|
200
|
-
|
|
201
|
-
# Check if we have data
|
|
202
|
-
if df_quote is None:
|
|
203
|
-
logger.info(f"\nSKIP: No QUOTE data found for {quote_asset} from ThetaData")
|
|
204
|
-
return None
|
|
205
|
-
|
|
206
|
-
# Combine the ohlc and quote data using outer join to preserve all data
|
|
207
|
-
# Use forward fill for missing quote values (ThetaData's recommended approach)
|
|
208
|
-
df = pd.concat([df_ohlc, df_quote], axis=1, join='outer')
|
|
209
|
-
|
|
210
|
-
# Forward fill missing quote values
|
|
211
|
-
quote_columns = ['bid', 'ask', 'bid_size', 'ask_size', 'bid_condition', 'ask_condition', 'bid_exchange', 'ask_exchange']
|
|
212
|
-
existing_quote_cols = [col for col in quote_columns if col in df.columns]
|
|
213
|
-
if existing_quote_cols:
|
|
214
|
-
df[existing_quote_cols] = df[existing_quote_cols].fillna(method='ffill')
|
|
215
|
-
|
|
216
|
-
# Log how much forward filling occurred
|
|
217
|
-
if 'bid' in df.columns and 'ask' in df.columns:
|
|
218
|
-
remaining_nulls = df[['bid', 'ask']].isna().sum().sum()
|
|
219
|
-
if remaining_nulls > 0:
|
|
220
|
-
logger.info(f"Forward-filled missing quote values for {asset_separated}. {remaining_nulls} nulls remain at start of data.")
|
|
221
|
-
else:
|
|
222
|
-
df = df_ohlc
|
|
223
|
-
|
|
224
|
-
except Exception as e:
|
|
225
|
-
raise Exception("Error getting data from ThetaData") from e
|
|
226
|
-
|
|
227
|
-
if df is None or df.empty:
|
|
228
|
-
return None
|
|
229
|
-
|
|
230
|
-
data = Data(asset_separated, df, timestep=ts_unit, quote=quote_asset)
|
|
231
|
-
pandas_data_update = self._set_pandas_data_keys([data])
|
|
232
|
-
if pandas_data_update is not None:
|
|
233
|
-
# Add the keys to the self.pandas_data dictionary
|
|
234
|
-
self.pandas_data.update(pandas_data_update)
|
|
235
|
-
self._data_store.update(pandas_data_update)
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
def _pull_source_symbol_bars(
|
|
239
|
-
self,
|
|
240
|
-
asset,
|
|
241
|
-
length,
|
|
242
|
-
timestep=None,
|
|
243
|
-
timeshift=None,
|
|
244
|
-
quote=None,
|
|
245
|
-
exchange=None,
|
|
246
|
-
include_after_hours=True,
|
|
247
|
-
):
|
|
248
|
-
try:
|
|
249
|
-
dt = self.get_datetime()
|
|
250
|
-
self._update_pandas_data(asset, quote, 1, timestep, dt)
|
|
251
|
-
except Exception as e:
|
|
252
|
-
logger.error(f"\nERROR: _pull_source_symbol_bars from ThetaData: {e}, {dt}, asset:{asset}")
|
|
253
|
-
|
|
254
|
-
return super()._pull_source_symbol_bars(
|
|
255
|
-
asset, length, timestep, timeshift, quote, exchange, include_after_hours
|
|
256
|
-
)
|
|
257
|
-
|
|
258
|
-
# Get pricing data for an asset for the entire backtesting period
|
|
259
|
-
def get_historical_prices_between_dates(
|
|
260
|
-
self,
|
|
261
|
-
asset,
|
|
262
|
-
timestep="minute",
|
|
263
|
-
quote=None,
|
|
264
|
-
exchange=None,
|
|
265
|
-
include_after_hours=True,
|
|
266
|
-
start_date=None,
|
|
267
|
-
end_date=None,
|
|
268
|
-
):
|
|
269
|
-
self._update_pandas_data(asset, quote, 1, timestep)
|
|
270
|
-
|
|
271
|
-
response = super()._pull_source_symbol_bars_between_dates(
|
|
272
|
-
asset, timestep, quote, exchange, include_after_hours, start_date, end_date
|
|
273
|
-
)
|
|
274
|
-
|
|
275
|
-
if response is None:
|
|
276
|
-
return None
|
|
277
|
-
|
|
278
|
-
bars = self._parse_source_symbol_bars(response, asset, quote=quote)
|
|
279
|
-
return bars
|
|
280
|
-
|
|
281
|
-
def get_last_price(self, asset, timestep="minute", quote=None, exchange=None, **kwargs) -> Union[float, Decimal, None]:
|
|
282
|
-
try:
|
|
283
|
-
dt = self.get_datetime()
|
|
284
|
-
self._update_pandas_data(asset, quote, 1, timestep, dt)
|
|
285
|
-
except Exception as e:
|
|
286
|
-
logger.error(f"\nERROR: get_last_price from ThetaData: {e}, {dt}, asset:{asset}")
|
|
287
|
-
|
|
288
|
-
return super().get_last_price(asset=asset, quote=quote, exchange=exchange)
|
|
289
|
-
|
|
290
|
-
def get_quote(self, asset, timestep="minute", quote=None, exchange=None, **kwargs):
|
|
291
|
-
"""
|
|
292
|
-
Get quote data for an asset during backtesting.
|
|
293
|
-
|
|
294
|
-
Parameters
|
|
295
|
-
----------
|
|
296
|
-
asset : Asset object
|
|
297
|
-
The asset for which the quote is needed.
|
|
298
|
-
timestep : str, optional
|
|
299
|
-
The timestep to use for the data.
|
|
300
|
-
quote : Asset object, optional
|
|
301
|
-
The quote asset for cryptocurrency pairs.
|
|
302
|
-
exchange : str, optional
|
|
303
|
-
The exchange to get the quote from.
|
|
304
|
-
**kwargs : dict
|
|
305
|
-
Additional keyword arguments.
|
|
306
|
-
|
|
307
|
-
Returns
|
|
308
|
-
-------
|
|
309
|
-
Quote
|
|
310
|
-
A Quote object with the quote information.
|
|
311
|
-
"""
|
|
312
|
-
try:
|
|
313
|
-
dt = self.get_datetime()
|
|
314
|
-
self._update_pandas_data(asset, quote, 1, timestep, dt)
|
|
315
|
-
except Exception as e:
|
|
316
|
-
logger.error(f"\nnERROR: get_quote from ThetaData: {e}, {dt}, asset:{asset}")
|
|
317
|
-
|
|
318
|
-
return super().get_quote(asset=asset, quote=quote, exchange=exchange)
|
|
319
|
-
|
|
320
|
-
def get_chains(self, asset):
|
|
321
|
-
"""
|
|
322
|
-
Get option chains using cached implementation (matches Polygon pattern).
|
|
323
|
-
|
|
324
|
-
Parameters
|
|
325
|
-
----------
|
|
326
|
-
asset : Asset
|
|
327
|
-
The asset to get data for.
|
|
328
|
-
|
|
329
|
-
Returns
|
|
330
|
-
-------
|
|
331
|
-
Chains:
|
|
332
|
-
A Chains entity object (dict subclass) with the structure:
|
|
333
|
-
{
|
|
334
|
-
"Multiplier": 100,
|
|
335
|
-
"Exchange": "SMART",
|
|
336
|
-
"Chains": {
|
|
337
|
-
"CALL": {
|
|
338
|
-
"2023-07-31": [100.0, 101.0, ...],
|
|
339
|
-
...
|
|
340
|
-
},
|
|
341
|
-
"PUT": {
|
|
342
|
-
"2023-07-31": [100.0, 101.0, ...],
|
|
343
|
-
...
|
|
344
|
-
}
|
|
345
|
-
}
|
|
346
|
-
}
|
|
347
|
-
"""
|
|
348
|
-
from lumibot.entities import Chains
|
|
349
|
-
|
|
350
|
-
chains_dict = thetadata_helper.get_chains_cached(
|
|
351
|
-
username=self._username,
|
|
352
|
-
password=self._password,
|
|
353
|
-
asset=asset,
|
|
354
|
-
current_date=self.get_datetime().date()
|
|
355
|
-
)
|
|
356
|
-
|
|
357
|
-
# Wrap in Chains entity for modern API
|
|
358
|
-
return Chains(chains_dict)
|
|
8
|
+
__all__ = [
|
|
9
|
+
"ThetaDataBacktesting",
|
|
10
|
+
"ThetaDataBacktestingPandas",
|
|
11
|
+
"START_BUFFER",
|
|
12
|
+
]
|