lumibot 4.2.0__py3-none-any.whl → 4.2.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of lumibot might be problematic. Click here for more details.
- lumibot/backtesting/thetadata_backtesting_pandas.py +43 -54
- lumibot/tools/thetadata_helper.py +54 -54
- {lumibot-4.2.0.dist-info → lumibot-4.2.1.dist-info}/METADATA +1 -1
- {lumibot-4.2.0.dist-info → lumibot-4.2.1.dist-info}/RECORD +7 -7
- {lumibot-4.2.0.dist-info → lumibot-4.2.1.dist-info}/WHEEL +0 -0
- {lumibot-4.2.0.dist-info → lumibot-4.2.1.dist-info}/licenses/LICENSE +0 -0
- {lumibot-4.2.0.dist-info → lumibot-4.2.1.dist-info}/top_level.txt +0 -0
|
@@ -184,7 +184,7 @@ class ThetaDataBacktestingPandas(PandasData):
|
|
|
184
184
|
asset: Optional[Asset] = None, # DEBUG-LOG: Added for logging
|
|
185
185
|
) -> Optional[pd.DataFrame]:
|
|
186
186
|
# DEBUG-LOG: Method entry with full parameter context
|
|
187
|
-
logger.
|
|
187
|
+
logger.debug(
|
|
188
188
|
"[THETA][DEBUG][PANDAS][FINALIZE][ENTRY] asset=%s current_dt=%s requested_length=%s timeshift=%s input_shape=%s input_columns=%s input_index_type=%s input_has_tz=%s input_index_sample=%s",
|
|
189
189
|
getattr(asset, 'symbol', asset) if asset else 'UNKNOWN',
|
|
190
190
|
current_dt.isoformat() if hasattr(current_dt, 'isoformat') else current_dt,
|
|
@@ -199,7 +199,7 @@ class ThetaDataBacktestingPandas(PandasData):
|
|
|
199
199
|
|
|
200
200
|
if pandas_df is None or pandas_df.empty:
|
|
201
201
|
# DEBUG-LOG: Early return for empty input
|
|
202
|
-
logger.
|
|
202
|
+
logger.debug(
|
|
203
203
|
"[THETA][DEBUG][PANDAS][FINALIZE][EMPTY_INPUT] asset=%s returning_none_or_empty=True",
|
|
204
204
|
getattr(asset, 'symbol', asset) if asset else 'UNKNOWN'
|
|
205
205
|
)
|
|
@@ -212,7 +212,7 @@ class ThetaDataBacktestingPandas(PandasData):
|
|
|
212
212
|
frame.index = pd.to_datetime(frame.index)
|
|
213
213
|
|
|
214
214
|
# DEBUG-LOG: Timezone state before localization
|
|
215
|
-
logger.
|
|
215
|
+
logger.debug(
|
|
216
216
|
"[THETA][DEBUG][PANDAS][FINALIZE][TZ_CHECK] asset=%s frame_index_tz=%s target_tz=%s needs_localization=%s frame_shape=%s",
|
|
217
217
|
getattr(asset, 'symbol', asset) if asset else 'UNKNOWN',
|
|
218
218
|
frame.index.tz,
|
|
@@ -227,7 +227,7 @@ class ThetaDataBacktestingPandas(PandasData):
|
|
|
227
227
|
normalized_for_cutoff = localized_index.normalize()
|
|
228
228
|
|
|
229
229
|
# DEBUG-LOG: After localization
|
|
230
|
-
logger.
|
|
230
|
+
logger.debug(
|
|
231
231
|
"[THETA][DEBUG][PANDAS][FINALIZE][LOCALIZED] asset=%s localized_index_tz=%s localized_sample=%s",
|
|
232
232
|
getattr(asset, 'symbol', asset) if asset else 'UNKNOWN',
|
|
233
233
|
localized_index.tz,
|
|
@@ -238,7 +238,7 @@ class ThetaDataBacktestingPandas(PandasData):
|
|
|
238
238
|
cutoff_mask = normalized_for_cutoff <= cutoff
|
|
239
239
|
|
|
240
240
|
# DEBUG-LOG: Cutoff filtering state
|
|
241
|
-
logger.
|
|
241
|
+
logger.debug(
|
|
242
242
|
"[THETA][DEBUG][PANDAS][FINALIZE][CUTOFF] asset=%s cutoff=%s cutoff_mask_true=%s cutoff_mask_false=%s",
|
|
243
243
|
getattr(asset, 'symbol', asset) if asset else 'UNKNOWN',
|
|
244
244
|
cutoff,
|
|
@@ -249,7 +249,7 @@ class ThetaDataBacktestingPandas(PandasData):
|
|
|
249
249
|
if timeshift and not isinstance(timeshift, int):
|
|
250
250
|
cutoff_mask &= normalized_for_cutoff <= (cutoff - timeshift)
|
|
251
251
|
# DEBUG-LOG: After timeshift adjustment
|
|
252
|
-
logger.
|
|
252
|
+
logger.debug(
|
|
253
253
|
"[THETA][DEBUG][PANDAS][FINALIZE][TIMESHIFT_ADJUSTED] asset=%s timeshift=%s new_cutoff=%s cutoff_mask_true=%s",
|
|
254
254
|
getattr(asset, 'symbol', asset) if asset else 'UNKNOWN',
|
|
255
255
|
timeshift,
|
|
@@ -262,7 +262,7 @@ class ThetaDataBacktestingPandas(PandasData):
|
|
|
262
262
|
normalized_for_cutoff = normalized_for_cutoff[cutoff_mask]
|
|
263
263
|
|
|
264
264
|
# DEBUG-LOG: After cutoff filtering
|
|
265
|
-
logger.
|
|
265
|
+
logger.debug(
|
|
266
266
|
"[THETA][DEBUG][PANDAS][FINALIZE][AFTER_CUTOFF] asset=%s shape=%s index_range=%s",
|
|
267
267
|
getattr(asset, 'symbol', asset) if asset else 'UNKNOWN',
|
|
268
268
|
frame.shape,
|
|
@@ -280,7 +280,7 @@ class ThetaDataBacktestingPandas(PandasData):
|
|
|
280
280
|
raw_frame = frame.copy()
|
|
281
281
|
|
|
282
282
|
# DEBUG-LOG: After normalization
|
|
283
|
-
logger.
|
|
283
|
+
logger.debug(
|
|
284
284
|
"[THETA][DEBUG][PANDAS][FINALIZE][NORMALIZED_INDEX] asset=%s shape=%s index_sample=%s",
|
|
285
285
|
getattr(asset, 'symbol', asset) if asset else 'UNKNOWN',
|
|
286
286
|
frame.shape,
|
|
@@ -291,7 +291,7 @@ class ThetaDataBacktestingPandas(PandasData):
|
|
|
291
291
|
target_index = pd.date_range(end=expected_last_dt, periods=requested_length, freq="D", tz=self.tzinfo)
|
|
292
292
|
|
|
293
293
|
# DEBUG-LOG: Target index details
|
|
294
|
-
logger.
|
|
294
|
+
logger.debug(
|
|
295
295
|
"[THETA][DEBUG][PANDAS][FINALIZE][TARGET_INDEX] asset=%s target_length=%s target_range=%s",
|
|
296
296
|
getattr(asset, 'symbol', asset) if asset else 'UNKNOWN',
|
|
297
297
|
len(target_index),
|
|
@@ -304,7 +304,7 @@ class ThetaDataBacktestingPandas(PandasData):
|
|
|
304
304
|
frame = frame.reindex(target_index)
|
|
305
305
|
|
|
306
306
|
# DEBUG-LOG: After reindex
|
|
307
|
-
logger.
|
|
307
|
+
logger.debug(
|
|
308
308
|
"[THETA][DEBUG][PANDAS][FINALIZE][AFTER_REINDEX] asset=%s shape=%s columns=%s",
|
|
309
309
|
getattr(asset, 'symbol', asset) if asset else 'UNKNOWN',
|
|
310
310
|
frame.shape,
|
|
@@ -318,7 +318,7 @@ class ThetaDataBacktestingPandas(PandasData):
|
|
|
318
318
|
placeholder_mask = frame.isna().all(axis=1)
|
|
319
319
|
|
|
320
320
|
# DEBUG-LOG: Placeholder mask computation
|
|
321
|
-
logger.
|
|
321
|
+
logger.debug(
|
|
322
322
|
"[THETA][DEBUG][PANDAS][FINALIZE][PLACEHOLDER_MASK] asset=%s placeholder_true=%s placeholder_false=%s value_columns=%s",
|
|
323
323
|
getattr(asset, 'symbol', asset) if asset else 'UNKNOWN',
|
|
324
324
|
int(placeholder_mask.sum()) if hasattr(placeholder_mask, 'sum') else 'N/A',
|
|
@@ -359,7 +359,7 @@ class ThetaDataBacktestingPandas(PandasData):
|
|
|
359
359
|
# DEBUG-LOG: Final missing flag state
|
|
360
360
|
try:
|
|
361
361
|
missing_count = int(frame["missing"].sum())
|
|
362
|
-
logger.
|
|
362
|
+
logger.debug(
|
|
363
363
|
"[THETA][DEBUG][PANDAS][FINALIZE][MISSING_FINAL] asset=%s missing_true=%s missing_false=%s total_rows=%s",
|
|
364
364
|
getattr(asset, 'symbol', asset) if asset else 'UNKNOWN',
|
|
365
365
|
missing_count,
|
|
@@ -367,14 +367,14 @@ class ThetaDataBacktestingPandas(PandasData):
|
|
|
367
367
|
len(frame)
|
|
368
368
|
)
|
|
369
369
|
except Exception as e:
|
|
370
|
-
logger.
|
|
370
|
+
logger.debug(
|
|
371
371
|
"[THETA][DEBUG][PANDAS][FINALIZE][MISSING_FINAL] asset=%s error=%s",
|
|
372
372
|
getattr(asset, 'symbol', asset) if asset else 'UNKNOWN',
|
|
373
373
|
str(e)
|
|
374
374
|
)
|
|
375
375
|
|
|
376
376
|
# DEBUG-LOG: Return value
|
|
377
|
-
logger.
|
|
377
|
+
logger.debug(
|
|
378
378
|
"[THETA][DEBUG][PANDAS][FINALIZE][RETURN] asset=%s shape=%s columns=%s index_range=%s",
|
|
379
379
|
getattr(asset, 'symbol', asset) if asset else 'UNKNOWN',
|
|
380
380
|
frame.shape,
|
|
@@ -451,7 +451,7 @@ class ThetaDataBacktestingPandas(PandasData):
|
|
|
451
451
|
existing_end = existing_meta.get("end")
|
|
452
452
|
|
|
453
453
|
# DEBUG-LOG: Cache validation entry
|
|
454
|
-
logger.
|
|
454
|
+
logger.debug(
|
|
455
455
|
"[DEBUG][BACKTEST][THETA][DEBUG][PANDAS][CACHE_VALIDATION][ENTRY] asset=%s timestep=%s | "
|
|
456
456
|
"REQUESTED: start=%s start_threshold=%s end_requirement=%s length=%d | "
|
|
457
457
|
"EXISTING: start=%s end=%s rows=%d",
|
|
@@ -472,7 +472,7 @@ class ThetaDataBacktestingPandas(PandasData):
|
|
|
472
472
|
)
|
|
473
473
|
|
|
474
474
|
# DEBUG-LOG: Start validation result
|
|
475
|
-
logger.
|
|
475
|
+
logger.debug(
|
|
476
476
|
"[DEBUG][BACKTEST][THETA][DEBUG][PANDAS][START_VALIDATION] asset=%s | "
|
|
477
477
|
"start_ok=%s | "
|
|
478
478
|
"existing_start=%s start_threshold=%s | "
|
|
@@ -489,7 +489,7 @@ class ThetaDataBacktestingPandas(PandasData):
|
|
|
489
489
|
end_ok = True
|
|
490
490
|
|
|
491
491
|
# DEBUG-LOG: End validation entry
|
|
492
|
-
logger.
|
|
492
|
+
logger.debug(
|
|
493
493
|
"[DEBUG][BACKTEST][THETA][DEBUG][PANDAS][END_VALIDATION][ENTRY] asset=%s | "
|
|
494
494
|
"end_requirement=%s existing_end=%s tail_placeholder=%s",
|
|
495
495
|
asset_separated.symbol if hasattr(asset_separated, 'symbol') else str(asset_separated),
|
|
@@ -501,7 +501,7 @@ class ThetaDataBacktestingPandas(PandasData):
|
|
|
501
501
|
if end_requirement is not None:
|
|
502
502
|
if existing_end is None:
|
|
503
503
|
end_ok = False
|
|
504
|
-
logger.
|
|
504
|
+
logger.debug(
|
|
505
505
|
"[DEBUG][BACKTEST][THETA][DEBUG][PANDAS][END_VALIDATION][RESULT] asset=%s | "
|
|
506
506
|
"end_ok=FALSE | reason=existing_end_is_None",
|
|
507
507
|
asset_separated.symbol if hasattr(asset_separated, 'symbol') else str(asset_separated)
|
|
@@ -520,7 +520,7 @@ class ThetaDataBacktestingPandas(PandasData):
|
|
|
520
520
|
|
|
521
521
|
if existing_end_cmp > end_requirement_cmp:
|
|
522
522
|
end_ok = True
|
|
523
|
-
logger.
|
|
523
|
+
logger.debug(
|
|
524
524
|
"[DEBUG][BACKTEST][THETA][DEBUG][PANDAS][END_VALIDATION][RESULT] asset=%s | "
|
|
525
525
|
"end_ok=TRUE | reason=existing_end_exceeds_requirement | "
|
|
526
526
|
"existing_end=%s end_requirement=%s ts_unit=%s",
|
|
@@ -535,7 +535,7 @@ class ThetaDataBacktestingPandas(PandasData):
|
|
|
535
535
|
placeholder_empty_fetch = tail_placeholder and existing_meta.get("empty_fetch")
|
|
536
536
|
end_ok = (not tail_placeholder) or placeholder_on_weekend or placeholder_empty_fetch
|
|
537
537
|
|
|
538
|
-
logger.
|
|
538
|
+
logger.debug(
|
|
539
539
|
"[DEBUG][BACKTEST][THETA][DEBUG][PANDAS][END_VALIDATION][EXACT_MATCH] asset=%s | "
|
|
540
540
|
"existing_end == end_requirement | "
|
|
541
541
|
"weekday=%s placeholder_on_weekend=%s placeholder_empty_fetch=%s | "
|
|
@@ -549,7 +549,7 @@ class ThetaDataBacktestingPandas(PandasData):
|
|
|
549
549
|
)
|
|
550
550
|
else:
|
|
551
551
|
end_ok = False
|
|
552
|
-
logger.
|
|
552
|
+
logger.debug(
|
|
553
553
|
"[DEBUG][BACKTEST][THETA][DEBUG][PANDAS][END_VALIDATION][RESULT] asset=%s | "
|
|
554
554
|
"end_ok=FALSE | reason=existing_end_less_than_requirement | "
|
|
555
555
|
"existing_end=%s end_requirement=%s ts_unit=%s",
|
|
@@ -566,7 +566,7 @@ class ThetaDataBacktestingPandas(PandasData):
|
|
|
566
566
|
)
|
|
567
567
|
|
|
568
568
|
# DEBUG-LOG: Final cache decision
|
|
569
|
-
logger.
|
|
569
|
+
logger.debug(
|
|
570
570
|
"[DEBUG][BACKTEST][THETA][DEBUG][PANDAS][CACHE_DECISION] asset=%s | "
|
|
571
571
|
"cache_covers=%s | "
|
|
572
572
|
"start_ok=%s rows_ok=%s (existing=%d >= requested=%d) end_ok=%s",
|
|
@@ -586,7 +586,7 @@ class ThetaDataBacktestingPandas(PandasData):
|
|
|
586
586
|
and expiration_dt == end_requirement
|
|
587
587
|
and not existing_meta.get("expiration_notice")
|
|
588
588
|
):
|
|
589
|
-
logger.
|
|
589
|
+
logger.debug(
|
|
590
590
|
"[THETA][DEBUG][THETADATA-PANDAS] Reusing cached data for %s/%s through option expiry %s.",
|
|
591
591
|
asset_separated,
|
|
592
592
|
quote_asset,
|
|
@@ -702,7 +702,7 @@ class ThetaDataBacktestingPandas(PandasData):
|
|
|
702
702
|
and expiration_dt == end_requirement
|
|
703
703
|
)
|
|
704
704
|
if expired_reason:
|
|
705
|
-
logger.
|
|
705
|
+
logger.debug(
|
|
706
706
|
"[THETA][DEBUG][THETADATA-PANDAS] No new OHLC rows for %s/%s (%s); option expired on %s. Keeping cached data.",
|
|
707
707
|
asset_separated,
|
|
708
708
|
quote_asset,
|
|
@@ -851,9 +851,8 @@ class ThetaDataBacktestingPandas(PandasData):
|
|
|
851
851
|
bars = self._parse_source_symbol_bars(response, asset, quote=quote)
|
|
852
852
|
final_df = getattr(bars, "df", None)
|
|
853
853
|
final_rows = len(final_df) if final_df is not None else 0
|
|
854
|
-
|
|
855
|
-
"[THETA][DEBUG][FETCH][THETA][DEBUG][PANDAS][FINAL] asset=%s quote=%s length=%s timestep=%s timeshift=%s current_dt=%s rows=%s"
|
|
856
|
-
) % (
|
|
854
|
+
logger.debug(
|
|
855
|
+
"[THETA][DEBUG][FETCH][THETA][DEBUG][PANDAS][FINAL] asset=%s quote=%s length=%s timestep=%s timeshift=%s current_dt=%s rows=%s",
|
|
857
856
|
getattr(asset, "symbol", asset) if not isinstance(asset, str) else asset,
|
|
858
857
|
getattr(quote, "symbol", quote),
|
|
859
858
|
length,
|
|
@@ -862,8 +861,6 @@ class ThetaDataBacktestingPandas(PandasData):
|
|
|
862
861
|
current_dt,
|
|
863
862
|
final_rows,
|
|
864
863
|
)
|
|
865
|
-
logger.warning(message)
|
|
866
|
-
print(message)
|
|
867
864
|
return bars
|
|
868
865
|
|
|
869
866
|
def get_last_price(self, asset, timestep="minute", quote=None, exchange=None, **kwargs) -> Union[float, Decimal, None]:
|
|
@@ -893,7 +890,7 @@ class ThetaDataBacktestingPandas(PandasData):
|
|
|
893
890
|
return super().get_last_price(asset=asset, quote=quote, exchange=exchange)
|
|
894
891
|
closes = close_series.dropna()
|
|
895
892
|
if closes.empty:
|
|
896
|
-
logger.
|
|
893
|
+
logger.debug(
|
|
897
894
|
"[THETA][DEBUG][THETADATA-PANDAS] get_last_price found no valid closes for %s/%s; returning None (likely expired).",
|
|
898
895
|
asset,
|
|
899
896
|
quote or Asset("USD", "forex"),
|
|
@@ -957,10 +954,9 @@ class ThetaDataBacktestingPandas(PandasData):
|
|
|
957
954
|
return_polars=False,
|
|
958
955
|
)
|
|
959
956
|
if bars is None or getattr(bars, "df", None) is None or bars.df.empty:
|
|
960
|
-
|
|
957
|
+
logger.debug(
|
|
961
958
|
"[THETA][DEBUG][FETCH][THETA][DEBUG][PANDAS] asset=%s quote=%s length=%s timestep=%s timeshift=%s current_dt=%s "
|
|
962
|
-
"rows=0 first_ts=None last_ts=None columns=None"
|
|
963
|
-
) % (
|
|
959
|
+
"rows=0 first_ts=None last_ts=None columns=None",
|
|
964
960
|
getattr(asset, "symbol", asset) if not isinstance(asset, str) else asset,
|
|
965
961
|
getattr(quote, "symbol", quote),
|
|
966
962
|
length,
|
|
@@ -968,8 +964,6 @@ class ThetaDataBacktestingPandas(PandasData):
|
|
|
968
964
|
timeshift,
|
|
969
965
|
current_dt,
|
|
970
966
|
)
|
|
971
|
-
logger.warning(message)
|
|
972
|
-
print(message)
|
|
973
967
|
return bars
|
|
974
968
|
|
|
975
969
|
df = bars.df
|
|
@@ -981,10 +975,10 @@ class ThetaDataBacktestingPandas(PandasData):
|
|
|
981
975
|
else:
|
|
982
976
|
first_ts = df.index[0]
|
|
983
977
|
last_ts = df.index[-1]
|
|
984
|
-
|
|
978
|
+
|
|
979
|
+
logger.debug(
|
|
985
980
|
"[THETA][DEBUG][FETCH][THETA][DEBUG][PANDAS] asset=%s quote=%s length=%s timestep=%s timeshift=%s current_dt=%s rows=%s "
|
|
986
|
-
"first_ts=%s last_ts=%s columns=%s"
|
|
987
|
-
) % (
|
|
981
|
+
"first_ts=%s last_ts=%s columns=%s",
|
|
988
982
|
getattr(asset, "symbol", asset) if not isinstance(asset, str) else asset,
|
|
989
983
|
getattr(quote, "symbol", quote),
|
|
990
984
|
length,
|
|
@@ -996,8 +990,6 @@ class ThetaDataBacktestingPandas(PandasData):
|
|
|
996
990
|
last_ts,
|
|
997
991
|
columns,
|
|
998
992
|
)
|
|
999
|
-
logger.warning(message)
|
|
1000
|
-
print(message)
|
|
1001
993
|
return bars
|
|
1002
994
|
|
|
1003
995
|
def get_quote(self, asset, timestep="minute", quote=None, exchange=None, **kwargs):
|
|
@@ -1026,7 +1018,7 @@ class ThetaDataBacktestingPandas(PandasData):
|
|
|
1026
1018
|
|
|
1027
1019
|
# [INSTRUMENTATION] Log full asset details for options
|
|
1028
1020
|
if hasattr(asset, 'asset_type') and asset.asset_type == Asset.AssetType.OPTION:
|
|
1029
|
-
logger.
|
|
1021
|
+
logger.debug(
|
|
1030
1022
|
"[THETA][DEBUG][QUOTE][THETA][DEBUG][PANDAS][OPTION_REQUEST] symbol=%s expiration=%s strike=%s right=%s current_dt=%s timestep=%s",
|
|
1031
1023
|
asset.symbol,
|
|
1032
1024
|
asset.expiration,
|
|
@@ -1036,7 +1028,7 @@ class ThetaDataBacktestingPandas(PandasData):
|
|
|
1036
1028
|
timestep
|
|
1037
1029
|
)
|
|
1038
1030
|
else:
|
|
1039
|
-
logger.
|
|
1031
|
+
logger.debug(
|
|
1040
1032
|
"[THETA][DEBUG][QUOTE][THETA][DEBUG][PANDAS][REQUEST] asset=%s current_dt=%s timestep=%s",
|
|
1041
1033
|
getattr(asset, "symbol", asset) if not isinstance(asset, str) else asset,
|
|
1042
1034
|
dt.isoformat() if hasattr(dt, 'isoformat') else dt,
|
|
@@ -1066,7 +1058,7 @@ class ThetaDataBacktestingPandas(PandasData):
|
|
|
1066
1058
|
if isinstance(df.index, pd.DatetimeIndex) and df.index.tz is not None:
|
|
1067
1059
|
tz_info = str(df.index.tz)
|
|
1068
1060
|
|
|
1069
|
-
logger.
|
|
1061
|
+
logger.debug(
|
|
1070
1062
|
"[THETA][DEBUG][QUOTE][THETA][DEBUG][PANDAS][DATAFRAME_STATE] asset=%s | total_rows=%d | timestep=%s | index_type=%s | timezone=%s",
|
|
1071
1063
|
getattr(asset, "symbol", asset),
|
|
1072
1064
|
len(df),
|
|
@@ -1079,7 +1071,7 @@ class ThetaDataBacktestingPandas(PandasData):
|
|
|
1079
1071
|
if isinstance(df.index, pd.DatetimeIndex):
|
|
1080
1072
|
first_dt_str = df.index[0].isoformat() if hasattr(df.index[0], 'isoformat') else str(df.index[0])
|
|
1081
1073
|
last_dt_str = df.index[-1].isoformat() if hasattr(df.index[-1], 'isoformat') else str(df.index[-1])
|
|
1082
|
-
logger.
|
|
1074
|
+
logger.debug(
|
|
1083
1075
|
"[THETA][DEBUG][QUOTE][THETA][DEBUG][PANDAS][DATETIME_RANGE] asset=%s | first_dt=%s | last_dt=%s | tz=%s",
|
|
1084
1076
|
getattr(asset, "symbol", asset),
|
|
1085
1077
|
first_dt_str,
|
|
@@ -1089,12 +1081,12 @@ class ThetaDataBacktestingPandas(PandasData):
|
|
|
1089
1081
|
|
|
1090
1082
|
# CRITICAL: Show tail with explicit datetime index to catch time-travel bug
|
|
1091
1083
|
if debug_enabled and len(available_cols) > 0:
|
|
1092
|
-
logger.
|
|
1084
|
+
logger.debug(
|
|
1093
1085
|
"[THETA][DEBUG][QUOTE][THETA][DEBUG][PANDAS][DATAFRAME_HEAD] asset=%s | first_5_rows (with datetime index):\n%s",
|
|
1094
1086
|
getattr(asset, "symbol", asset),
|
|
1095
1087
|
head_df[available_cols].to_string()
|
|
1096
1088
|
)
|
|
1097
|
-
logger.
|
|
1089
|
+
logger.debug(
|
|
1098
1090
|
"[THETA][DEBUG][QUOTE][THETA][DEBUG][PANDAS][DATAFRAME_TAIL] asset=%s | last_5_rows (with datetime index):\n%s",
|
|
1099
1091
|
getattr(asset, "symbol", asset),
|
|
1100
1092
|
tail_df[available_cols].to_string()
|
|
@@ -1102,18 +1094,18 @@ class ThetaDataBacktestingPandas(PandasData):
|
|
|
1102
1094
|
|
|
1103
1095
|
# Show tail datetime values explicitly
|
|
1104
1096
|
tail_datetimes = [dt.isoformat() if hasattr(dt, 'isoformat') else str(dt) for dt in tail_df.index]
|
|
1105
|
-
logger.
|
|
1097
|
+
logger.debug(
|
|
1106
1098
|
"[THETA][DEBUG][QUOTE][THETA][DEBUG][PANDAS][TAIL_DATETIMES] asset=%s | tail_index=%s",
|
|
1107
1099
|
getattr(asset, "symbol", asset),
|
|
1108
1100
|
tail_datetimes
|
|
1109
1101
|
)
|
|
1110
1102
|
else:
|
|
1111
|
-
logger.
|
|
1103
|
+
logger.debug(
|
|
1112
1104
|
"[THETA][DEBUG][QUOTE][THETA][DEBUG][PANDAS][DATAFRAME_STATE] asset=%s | EMPTY_DATAFRAME",
|
|
1113
1105
|
getattr(asset, "symbol", asset)
|
|
1114
1106
|
)
|
|
1115
1107
|
else:
|
|
1116
|
-
logger.
|
|
1108
|
+
logger.debug(
|
|
1117
1109
|
"[THETA][DEBUG][QUOTE][THETA][DEBUG][PANDAS][DATAFRAME_STATE] asset=%s | NO_DATA_FOUND_IN_STORE",
|
|
1118
1110
|
getattr(asset, "symbol", asset)
|
|
1119
1111
|
)
|
|
@@ -1121,9 +1113,8 @@ class ThetaDataBacktestingPandas(PandasData):
|
|
|
1121
1113
|
quote_obj = super().get_quote(asset=asset, quote=quote, exchange=exchange)
|
|
1122
1114
|
|
|
1123
1115
|
# [INSTRUMENTATION] Final quote result with all details
|
|
1124
|
-
|
|
1125
|
-
"[THETA][DEBUG][QUOTE][THETA][DEBUG][PANDAS][RESULT] asset=%s quote=%s current_dt=%s bid=%s ask=%s mid=%s last=%s source=%s"
|
|
1126
|
-
) % (
|
|
1116
|
+
logger.debug(
|
|
1117
|
+
"[THETA][DEBUG][QUOTE][THETA][DEBUG][PANDAS][RESULT] asset=%s quote=%s current_dt=%s bid=%s ask=%s mid=%s last=%s source=%s",
|
|
1127
1118
|
getattr(asset, "symbol", asset) if not isinstance(asset, str) else asset,
|
|
1128
1119
|
getattr(quote, "symbol", quote),
|
|
1129
1120
|
dt,
|
|
@@ -1133,8 +1124,6 @@ class ThetaDataBacktestingPandas(PandasData):
|
|
|
1133
1124
|
getattr(quote_obj, "last_price", None) if quote_obj else None,
|
|
1134
1125
|
getattr(quote_obj, "source", None) if quote_obj else None,
|
|
1135
1126
|
)
|
|
1136
|
-
logger.warning(message)
|
|
1137
|
-
print(message)
|
|
1138
1127
|
return quote_obj
|
|
1139
1128
|
|
|
1140
1129
|
def get_chains(self, asset):
|
|
@@ -98,7 +98,7 @@ def append_missing_markers(
|
|
|
98
98
|
CONNECTION_DIAGNOSTICS["placeholder_writes"] = CONNECTION_DIAGNOSTICS.get("placeholder_writes", 0) + len(rows)
|
|
99
99
|
|
|
100
100
|
# DEBUG-LOG: Placeholder injection
|
|
101
|
-
logger.
|
|
101
|
+
logger.debug(
|
|
102
102
|
"[THETA][DEBUG][PLACEHOLDER][INJECT] count=%d dates=%s",
|
|
103
103
|
len(rows),
|
|
104
104
|
", ".join(sorted({d.isoformat() for d in missing_dates}))
|
|
@@ -114,7 +114,7 @@ def append_missing_markers(
|
|
|
114
114
|
else:
|
|
115
115
|
df_all = pd.concat([df_all, placeholder_df]).sort_index()
|
|
116
116
|
df_all = df_all[~df_all.index.duplicated(keep="last")]
|
|
117
|
-
logger.
|
|
117
|
+
logger.debug(
|
|
118
118
|
"[THETA][DEBUG][THETADATA-CACHE] recorded %d placeholder day(s): %s",
|
|
119
119
|
len(rows),
|
|
120
120
|
", ".join(sorted({d.isoformat() for d in missing_dates})),
|
|
@@ -140,7 +140,7 @@ def remove_missing_markers(
|
|
|
140
140
|
if mask.any():
|
|
141
141
|
removed_dates = sorted({ts.date().isoformat() for ts in df_all.index[mask]})
|
|
142
142
|
df_all = df_all.loc[~mask]
|
|
143
|
-
logger.
|
|
143
|
+
logger.debug(
|
|
144
144
|
"[THETA][DEBUG][THETADATA-CACHE] cleared %d placeholder row(s) for dates: %s",
|
|
145
145
|
mask.sum(),
|
|
146
146
|
", ".join(removed_dates),
|
|
@@ -274,7 +274,7 @@ def get_price_data(
|
|
|
274
274
|
try:
|
|
275
275
|
fetched_remote = cache_manager.ensure_local_file(cache_file, payload=remote_payload)
|
|
276
276
|
if fetched_remote:
|
|
277
|
-
logger.
|
|
277
|
+
logger.debug(
|
|
278
278
|
"[THETA][DEBUG][CACHE][REMOTE_DOWNLOAD] asset=%s timespan=%s datastyle=%s cache_file=%s",
|
|
279
279
|
asset,
|
|
280
280
|
timespan,
|
|
@@ -282,7 +282,7 @@ def get_price_data(
|
|
|
282
282
|
cache_file,
|
|
283
283
|
)
|
|
284
284
|
except Exception as exc:
|
|
285
|
-
logger.
|
|
285
|
+
logger.debug(
|
|
286
286
|
"[THETA][DEBUG][CACHE][REMOTE_DOWNLOAD_ERROR] asset=%s cache_file=%s error=%s",
|
|
287
287
|
asset,
|
|
288
288
|
cache_file,
|
|
@@ -290,7 +290,7 @@ def get_price_data(
|
|
|
290
290
|
)
|
|
291
291
|
|
|
292
292
|
# DEBUG-LOG: Cache file check
|
|
293
|
-
logger.
|
|
293
|
+
logger.debug(
|
|
294
294
|
"[THETA][DEBUG][CACHE][CHECK] asset=%s timespan=%s datastyle=%s cache_file=%s exists=%s",
|
|
295
295
|
asset,
|
|
296
296
|
timespan,
|
|
@@ -311,7 +311,7 @@ def get_price_data(
|
|
|
311
311
|
placeholder_rows = int(df_all["missing"].sum())
|
|
312
312
|
|
|
313
313
|
# DEBUG-LOG: Cache load result
|
|
314
|
-
logger.
|
|
314
|
+
logger.debug(
|
|
315
315
|
"[THETA][DEBUG][CACHE][LOADED] asset=%s cached_rows=%d placeholder_rows=%d real_rows=%d",
|
|
316
316
|
asset,
|
|
317
317
|
cached_rows,
|
|
@@ -329,7 +329,7 @@ def get_price_data(
|
|
|
329
329
|
)
|
|
330
330
|
|
|
331
331
|
# Check if we need to get more data
|
|
332
|
-
logger.
|
|
332
|
+
logger.debug(
|
|
333
333
|
"[THETA][DEBUG][CACHE][DECISION_START] asset=%s | "
|
|
334
334
|
"calling get_missing_dates(start=%s, end=%s)",
|
|
335
335
|
asset.symbol if hasattr(asset, 'symbol') else str(asset),
|
|
@@ -339,7 +339,7 @@ def get_price_data(
|
|
|
339
339
|
|
|
340
340
|
missing_dates = get_missing_dates(df_all, asset, start, end)
|
|
341
341
|
|
|
342
|
-
logger.
|
|
342
|
+
logger.debug(
|
|
343
343
|
"[THETA][DEBUG][CACHE][DECISION_RESULT] asset=%s | "
|
|
344
344
|
"missing_dates=%d | "
|
|
345
345
|
"decision=%s",
|
|
@@ -363,7 +363,7 @@ def get_price_data(
|
|
|
363
363
|
if df_all is not None and not df_all.empty:
|
|
364
364
|
logger.info("ThetaData cache HIT for %s %s %s (%d rows).", asset, timespan, datastyle, len(df_all))
|
|
365
365
|
# DEBUG-LOG: Cache hit
|
|
366
|
-
logger.
|
|
366
|
+
logger.debug(
|
|
367
367
|
"[THETA][DEBUG][CACHE][HIT] asset=%s timespan=%s datastyle=%s rows=%d start=%s end=%s",
|
|
368
368
|
asset,
|
|
369
369
|
timespan,
|
|
@@ -390,7 +390,7 @@ def get_price_data(
|
|
|
390
390
|
# DEBUG-LOG: Entry to intraday filter
|
|
391
391
|
rows_before_any_filter = len(df_all)
|
|
392
392
|
max_ts_before_any_filter = df_all.index.max() if len(df_all) > 0 else None
|
|
393
|
-
logger.
|
|
393
|
+
logger.debug(
|
|
394
394
|
"[THETA][DEBUG][FILTER][INTRADAY_ENTRY] asset=%s | "
|
|
395
395
|
"rows_before=%d max_ts_before=%s | "
|
|
396
396
|
"start_param=%s end_param=%s dt_param=%s dt_type=%s",
|
|
@@ -406,13 +406,13 @@ def get_price_data(
|
|
|
406
406
|
# Convert date to datetime if needed
|
|
407
407
|
if isinstance(start, datetime_module.date) and not isinstance(start, datetime_module.datetime):
|
|
408
408
|
start = datetime_module.datetime.combine(start, datetime_module.time.min)
|
|
409
|
-
logger.
|
|
409
|
+
logger.debug(
|
|
410
410
|
"[THETA][DEBUG][FILTER][DATE_CONVERSION] converted start from date to datetime: %s",
|
|
411
411
|
start.isoformat()
|
|
412
412
|
)
|
|
413
413
|
if isinstance(end, datetime_module.date) and not isinstance(end, datetime_module.datetime):
|
|
414
414
|
end = datetime_module.datetime.combine(end, datetime_module.time.max)
|
|
415
|
-
logger.
|
|
415
|
+
logger.debug(
|
|
416
416
|
"[THETA][DEBUG][FILTER][DATE_CONVERSION] converted end from date to datetime: %s",
|
|
417
417
|
end.isoformat()
|
|
418
418
|
)
|
|
@@ -421,20 +421,20 @@ def get_price_data(
|
|
|
421
421
|
if isinstance(end, datetime_module.datetime) and end.time() == datetime_module.time.min:
|
|
422
422
|
# Convert end-of-period midnight to end-of-day
|
|
423
423
|
end = datetime_module.datetime.combine(end.date(), datetime_module.time.max)
|
|
424
|
-
logger.
|
|
424
|
+
logger.debug(
|
|
425
425
|
"[THETA][DEBUG][FILTER][MIDNIGHT_FIX] converted end from midnight to end-of-day: %s",
|
|
426
426
|
end.isoformat()
|
|
427
427
|
)
|
|
428
428
|
|
|
429
429
|
if start.tzinfo is None:
|
|
430
430
|
start = LUMIBOT_DEFAULT_PYTZ.localize(start).astimezone(pytz.UTC)
|
|
431
|
-
logger.
|
|
431
|
+
logger.debug(
|
|
432
432
|
"[THETA][DEBUG][FILTER][TZ_LOCALIZE] localized start to UTC: %s",
|
|
433
433
|
start.isoformat()
|
|
434
434
|
)
|
|
435
435
|
if end.tzinfo is None:
|
|
436
436
|
end = LUMIBOT_DEFAULT_PYTZ.localize(end).astimezone(pytz.UTC)
|
|
437
|
-
logger.
|
|
437
|
+
logger.debug(
|
|
438
438
|
"[THETA][DEBUG][FILTER][TZ_LOCALIZE] localized end to UTC: %s",
|
|
439
439
|
end.isoformat()
|
|
440
440
|
)
|
|
@@ -445,7 +445,7 @@ def get_price_data(
|
|
|
445
445
|
#
|
|
446
446
|
# NEW APPROACH: Always return full [start, end] range from cache
|
|
447
447
|
# Let Data/DataPolars.get_bars() handle look-ahead bias protection
|
|
448
|
-
logger.
|
|
448
|
+
logger.debug(
|
|
449
449
|
"[THETA][DEBUG][FILTER][NO_DT_FILTER] asset=%s | "
|
|
450
450
|
"using end=%s for upper bound (dt parameter ignored for cache retrieval)",
|
|
451
451
|
asset.symbol if hasattr(asset, 'symbol') else str(asset),
|
|
@@ -455,7 +455,7 @@ def get_price_data(
|
|
|
455
455
|
|
|
456
456
|
# DEBUG-LOG: After date range filtering, before missing removal
|
|
457
457
|
if df_all is not None and not df_all.empty:
|
|
458
|
-
logger.
|
|
458
|
+
logger.debug(
|
|
459
459
|
"[THETA][DEBUG][FILTER][AFTER] asset=%s rows=%d first_ts=%s last_ts=%s dt_filter=%s",
|
|
460
460
|
asset,
|
|
461
461
|
len(df_all),
|
|
@@ -470,7 +470,7 @@ def get_price_data(
|
|
|
470
470
|
|
|
471
471
|
# DEBUG-LOG: Before pandas return
|
|
472
472
|
if df_all is not None and not df_all.empty:
|
|
473
|
-
logger.
|
|
473
|
+
logger.debug(
|
|
474
474
|
"[THETA][DEBUG][RETURN][PANDAS] asset=%s rows=%d first_ts=%s last_ts=%s",
|
|
475
475
|
asset,
|
|
476
476
|
len(df_all),
|
|
@@ -482,7 +482,7 @@ def get_price_data(
|
|
|
482
482
|
logger.info("ThetaData cache MISS for %s %s %s; fetching %d interval(s) from ThetaTerminal.", asset, timespan, datastyle, len(missing_dates))
|
|
483
483
|
|
|
484
484
|
# DEBUG-LOG: Cache miss
|
|
485
|
-
logger.
|
|
485
|
+
logger.debug(
|
|
486
486
|
"[THETA][DEBUG][CACHE][MISS] asset=%s timespan=%s datastyle=%s missing_intervals=%d first=%s last=%s",
|
|
487
487
|
asset,
|
|
488
488
|
timespan,
|
|
@@ -542,7 +542,7 @@ def get_price_data(
|
|
|
542
542
|
and all(day > asset.expiration for day in requested_dates)
|
|
543
543
|
)
|
|
544
544
|
if expired_range:
|
|
545
|
-
logger.
|
|
545
|
+
logger.debug(
|
|
546
546
|
"[THETA][DEBUG][THETADATA-EOD] Option %s expired on %s; cache reuse for range %s -> %s.",
|
|
547
547
|
asset,
|
|
548
548
|
asset.expiration,
|
|
@@ -550,7 +550,7 @@ def get_price_data(
|
|
|
550
550
|
fetch_end,
|
|
551
551
|
)
|
|
552
552
|
else:
|
|
553
|
-
logger.
|
|
553
|
+
logger.debug(
|
|
554
554
|
"[THETA][DEBUG][THETADATA-EOD] No rows returned for %s between %s and %s; recording placeholders.",
|
|
555
555
|
asset,
|
|
556
556
|
fetch_start,
|
|
@@ -680,7 +680,7 @@ def get_price_data(
|
|
|
680
680
|
and chunk_end.date() >= asset.expiration
|
|
681
681
|
)
|
|
682
682
|
if expired_chunk:
|
|
683
|
-
logger.
|
|
683
|
+
logger.debug(
|
|
684
684
|
"[THETA][DEBUG][THETADATA] Option %s considered expired on %s; reusing cached data between %s and %s.",
|
|
685
685
|
asset,
|
|
686
686
|
asset.expiration,
|
|
@@ -848,7 +848,7 @@ def get_missing_dates(df_all, asset, start, end):
|
|
|
848
848
|
A list of dates that we need to get data for
|
|
849
849
|
"""
|
|
850
850
|
# DEBUG-LOG: Entry to get_missing_dates
|
|
851
|
-
logger.
|
|
851
|
+
logger.debug(
|
|
852
852
|
"[THETA][DEBUG][CACHE][MISSING_DATES_CHECK] asset=%s | "
|
|
853
853
|
"start=%s end=%s | "
|
|
854
854
|
"cache_rows=%d",
|
|
@@ -860,7 +860,7 @@ def get_missing_dates(df_all, asset, start, end):
|
|
|
860
860
|
|
|
861
861
|
trading_dates = get_trading_dates(asset, start, end)
|
|
862
862
|
|
|
863
|
-
logger.
|
|
863
|
+
logger.debug(
|
|
864
864
|
"[THETA][DEBUG][CACHE][TRADING_DATES] asset=%s | "
|
|
865
865
|
"trading_dates_count=%d first=%s last=%s",
|
|
866
866
|
asset.symbol if hasattr(asset, 'symbol') else str(asset),
|
|
@@ -870,7 +870,7 @@ def get_missing_dates(df_all, asset, start, end):
|
|
|
870
870
|
)
|
|
871
871
|
|
|
872
872
|
if df_all is None or not len(df_all):
|
|
873
|
-
logger.
|
|
873
|
+
logger.debug(
|
|
874
874
|
"[THETA][DEBUG][CACHE][EMPTY] asset=%s | "
|
|
875
875
|
"cache is EMPTY -> all %d trading days are missing",
|
|
876
876
|
asset.symbol if hasattr(asset, 'symbol') else str(asset),
|
|
@@ -886,7 +886,7 @@ def get_missing_dates(df_all, asset, start, end):
|
|
|
886
886
|
cached_first = min(dates) if len(dates) > 0 else None
|
|
887
887
|
cached_last = max(dates) if len(dates) > 0 else None
|
|
888
888
|
|
|
889
|
-
logger.
|
|
889
|
+
logger.debug(
|
|
890
890
|
"[THETA][DEBUG][CACHE][CACHED_DATES] asset=%s | "
|
|
891
891
|
"cached_dates_count=%d first=%s last=%s",
|
|
892
892
|
asset.symbol if hasattr(asset, 'symbol') else str(asset),
|
|
@@ -904,7 +904,7 @@ def get_missing_dates(df_all, asset, start, end):
|
|
|
904
904
|
after_expiry_filter = len(missing_dates)
|
|
905
905
|
|
|
906
906
|
if before_expiry_filter != after_expiry_filter:
|
|
907
|
-
logger.
|
|
907
|
+
logger.debug(
|
|
908
908
|
"[THETA][DEBUG][CACHE][OPTION_EXPIRY_FILTER] asset=%s | "
|
|
909
909
|
"filtered %d dates after expiration=%s | "
|
|
910
910
|
"missing_dates: %d -> %d",
|
|
@@ -915,7 +915,7 @@ def get_missing_dates(df_all, asset, start, end):
|
|
|
915
915
|
after_expiry_filter
|
|
916
916
|
)
|
|
917
917
|
|
|
918
|
-
logger.
|
|
918
|
+
logger.debug(
|
|
919
919
|
"[THETA][DEBUG][CACHE][MISSING_RESULT] asset=%s | "
|
|
920
920
|
"missing_dates_count=%d | "
|
|
921
921
|
"first_missing=%s last_missing=%s",
|
|
@@ -931,7 +931,7 @@ def get_missing_dates(df_all, asset, start, end):
|
|
|
931
931
|
def load_cache(cache_file):
|
|
932
932
|
"""Load the data from the cache file and return a DataFrame with a DateTimeIndex"""
|
|
933
933
|
# DEBUG-LOG: Start loading cache
|
|
934
|
-
logger.
|
|
934
|
+
logger.debug(
|
|
935
935
|
"[THETA][DEBUG][CACHE][LOAD_START] cache_file=%s | "
|
|
936
936
|
"exists=%s size_bytes=%d",
|
|
937
937
|
cache_file.name,
|
|
@@ -940,7 +940,7 @@ def load_cache(cache_file):
|
|
|
940
940
|
)
|
|
941
941
|
|
|
942
942
|
if not cache_file.exists():
|
|
943
|
-
logger.
|
|
943
|
+
logger.debug(
|
|
944
944
|
"[THETA][DEBUG][CACHE][LOAD_MISSING] cache_file=%s | returning=None",
|
|
945
945
|
cache_file.name,
|
|
946
946
|
)
|
|
@@ -949,7 +949,7 @@ def load_cache(cache_file):
|
|
|
949
949
|
df = pd.read_parquet(cache_file, engine='pyarrow')
|
|
950
950
|
|
|
951
951
|
rows_after_read = len(df)
|
|
952
|
-
logger.
|
|
952
|
+
logger.debug(
|
|
953
953
|
"[THETA][DEBUG][CACHE][LOAD_READ] cache_file=%s | "
|
|
954
954
|
"rows_read=%d columns=%s",
|
|
955
955
|
cache_file.name,
|
|
@@ -969,7 +969,7 @@ def load_cache(cache_file):
|
|
|
969
969
|
if df.index.tzinfo is None:
|
|
970
970
|
# Set the timezone to UTC
|
|
971
971
|
df.index = df.index.tz_localize("UTC")
|
|
972
|
-
logger.
|
|
972
|
+
logger.debug(
|
|
973
973
|
"[THETA][DEBUG][CACHE][LOAD_TZ] cache_file=%s | "
|
|
974
974
|
"localized index to UTC",
|
|
975
975
|
cache_file.name
|
|
@@ -981,7 +981,7 @@ def load_cache(cache_file):
|
|
|
981
981
|
max_ts = df.index.max() if len(df) > 0 else None
|
|
982
982
|
placeholder_count = int(df["missing"].sum()) if "missing" in df.columns else 0
|
|
983
983
|
|
|
984
|
-
logger.
|
|
984
|
+
logger.debug(
|
|
985
985
|
"[THETA][DEBUG][CACHE][LOAD_SUCCESS] cache_file=%s | "
|
|
986
986
|
"total_rows=%d real_rows=%d placeholders=%d | "
|
|
987
987
|
"min_ts=%s max_ts=%s",
|
|
@@ -999,7 +999,7 @@ def load_cache(cache_file):
|
|
|
999
999
|
def update_cache(cache_file, df_all, df_cached, missing_dates=None, remote_payload=None):
|
|
1000
1000
|
"""Update the cache file with the new data and optional placeholder markers."""
|
|
1001
1001
|
# DEBUG-LOG: Entry to update_cache
|
|
1002
|
-
logger.
|
|
1002
|
+
logger.debug(
|
|
1003
1003
|
"[THETA][DEBUG][CACHE][UPDATE_ENTRY] cache_file=%s | "
|
|
1004
1004
|
"df_all_rows=%d df_cached_rows=%d missing_dates=%d",
|
|
1005
1005
|
cache_file.name,
|
|
@@ -1010,13 +1010,13 @@ def update_cache(cache_file, df_all, df_cached, missing_dates=None, remote_paylo
|
|
|
1010
1010
|
|
|
1011
1011
|
if df_all is None or len(df_all) == 0:
|
|
1012
1012
|
if not missing_dates:
|
|
1013
|
-
logger.
|
|
1013
|
+
logger.debug(
|
|
1014
1014
|
"[THETA][DEBUG][CACHE][UPDATE_SKIP] cache_file=%s | "
|
|
1015
1015
|
"df_all is empty and no missing_dates, skipping cache update",
|
|
1016
1016
|
cache_file.name
|
|
1017
1017
|
)
|
|
1018
1018
|
return
|
|
1019
|
-
logger.
|
|
1019
|
+
logger.debug(
|
|
1020
1020
|
"[THETA][DEBUG][CACHE][UPDATE_PLACEHOLDERS_ONLY] cache_file=%s | "
|
|
1021
1021
|
"df_all is empty, writing %d placeholders",
|
|
1022
1022
|
cache_file.name,
|
|
@@ -1026,7 +1026,7 @@ def update_cache(cache_file, df_all, df_cached, missing_dates=None, remote_paylo
|
|
|
1026
1026
|
else:
|
|
1027
1027
|
df_working = ensure_missing_column(df_all.copy())
|
|
1028
1028
|
if missing_dates:
|
|
1029
|
-
logger.
|
|
1029
|
+
logger.debug(
|
|
1030
1030
|
"[THETA][DEBUG][CACHE][UPDATE_APPEND_PLACEHOLDERS] cache_file=%s | "
|
|
1031
1031
|
"appending %d placeholders to %d existing rows",
|
|
1032
1032
|
cache_file.name,
|
|
@@ -1036,7 +1036,7 @@ def update_cache(cache_file, df_all, df_cached, missing_dates=None, remote_paylo
|
|
|
1036
1036
|
df_working = append_missing_markers(df_working, missing_dates)
|
|
1037
1037
|
|
|
1038
1038
|
if df_working is None or len(df_working) == 0:
|
|
1039
|
-
logger.
|
|
1039
|
+
logger.debug(
|
|
1040
1040
|
"[THETA][DEBUG][CACHE][UPDATE_SKIP_EMPTY] cache_file=%s | "
|
|
1041
1041
|
"df_working is empty after processing, skipping write",
|
|
1042
1042
|
cache_file.name
|
|
@@ -1048,7 +1048,7 @@ def update_cache(cache_file, df_all, df_cached, missing_dates=None, remote_paylo
|
|
|
1048
1048
|
df_cached_cmp = ensure_missing_column(df_cached.copy())
|
|
1049
1049
|
|
|
1050
1050
|
if df_cached_cmp is not None and df_working.equals(df_cached_cmp):
|
|
1051
|
-
logger.
|
|
1051
|
+
logger.debug(
|
|
1052
1052
|
"[THETA][DEBUG][CACHE][UPDATE_NO_CHANGES] cache_file=%s | "
|
|
1053
1053
|
"df_working equals df_cached (rows=%d), skipping write",
|
|
1054
1054
|
cache_file.name,
|
|
@@ -1069,7 +1069,7 @@ def update_cache(cache_file, df_all, df_cached, missing_dates=None, remote_paylo
|
|
|
1069
1069
|
return None
|
|
1070
1070
|
return value.isoformat() if hasattr(value, "isoformat") else value
|
|
1071
1071
|
|
|
1072
|
-
logger.
|
|
1072
|
+
logger.debug(
|
|
1073
1073
|
"[THETA][DEBUG][CACHE][UPDATE_WRITE] cache_file=%s | "
|
|
1074
1074
|
"total_rows=%d real_rows=%d placeholders=%d | "
|
|
1075
1075
|
"min_ts=%s max_ts=%s",
|
|
@@ -1083,7 +1083,7 @@ def update_cache(cache_file, df_all, df_cached, missing_dates=None, remote_paylo
|
|
|
1083
1083
|
|
|
1084
1084
|
df_to_save.to_parquet(cache_file, engine="pyarrow", compression="snappy")
|
|
1085
1085
|
|
|
1086
|
-
logger.
|
|
1086
|
+
logger.debug(
|
|
1087
1087
|
"[THETA][DEBUG][CACHE][UPDATE_SUCCESS] cache_file=%s written successfully",
|
|
1088
1088
|
cache_file.name
|
|
1089
1089
|
)
|
|
@@ -1093,7 +1093,7 @@ def update_cache(cache_file, df_all, df_cached, missing_dates=None, remote_paylo
|
|
|
1093
1093
|
try:
|
|
1094
1094
|
cache_manager.on_local_update(cache_file, payload=remote_payload)
|
|
1095
1095
|
except Exception as exc:
|
|
1096
|
-
logger.
|
|
1096
|
+
logger.debug(
|
|
1097
1097
|
"[THETA][DEBUG][CACHE][REMOTE_UPLOAD_ERROR] cache_file=%s error=%s",
|
|
1098
1098
|
cache_file,
|
|
1099
1099
|
exc,
|
|
@@ -1469,7 +1469,7 @@ def get_request(url: str, headers: dict, querystring: dict, username: str, passw
|
|
|
1469
1469
|
CONNECTION_DIAGNOSTICS["network_requests"] += 1
|
|
1470
1470
|
|
|
1471
1471
|
# DEBUG-LOG: API request
|
|
1472
|
-
logger.
|
|
1472
|
+
logger.debug(
|
|
1473
1473
|
"[THETA][DEBUG][API][REQUEST] url=%s params=%s",
|
|
1474
1474
|
request_url if next_page_url else url,
|
|
1475
1475
|
request_params if request_params else querystring
|
|
@@ -1480,7 +1480,7 @@ def get_request(url: str, headers: dict, querystring: dict, username: str, passw
|
|
|
1480
1480
|
if response.status_code == 472:
|
|
1481
1481
|
logger.warning(f"No data available for request: {response.text[:200]}")
|
|
1482
1482
|
# DEBUG-LOG: API response - no data
|
|
1483
|
-
logger.
|
|
1483
|
+
logger.debug(
|
|
1484
1484
|
"[THETA][DEBUG][API][RESPONSE] status=472 result=NO_DATA"
|
|
1485
1485
|
)
|
|
1486
1486
|
return None
|
|
@@ -1488,7 +1488,7 @@ def get_request(url: str, headers: dict, querystring: dict, username: str, passw
|
|
|
1488
1488
|
elif response.status_code != 200:
|
|
1489
1489
|
logger.warning(f"Non-200 status code {response.status_code}: {response.text[:200]}")
|
|
1490
1490
|
# DEBUG-LOG: API response - error
|
|
1491
|
-
logger.
|
|
1491
|
+
logger.debug(
|
|
1492
1492
|
"[THETA][DEBUG][API][RESPONSE] status=%d result=ERROR",
|
|
1493
1493
|
response.status_code
|
|
1494
1494
|
)
|
|
@@ -1498,7 +1498,7 @@ def get_request(url: str, headers: dict, querystring: dict, username: str, passw
|
|
|
1498
1498
|
|
|
1499
1499
|
# DEBUG-LOG: API response - success
|
|
1500
1500
|
response_rows = len(json_resp.get("response", [])) if isinstance(json_resp.get("response"), list) else 0
|
|
1501
|
-
logger.
|
|
1501
|
+
logger.debug(
|
|
1502
1502
|
"[THETA][DEBUG][API][RESPONSE] status=200 rows=%d has_next_page=%s",
|
|
1503
1503
|
response_rows,
|
|
1504
1504
|
bool(json_resp.get("header", {}).get("next_page"))
|
|
@@ -1524,7 +1524,7 @@ def get_request(url: str, headers: dict, querystring: dict, username: str, passw
|
|
|
1524
1524
|
logger.warning(f"Exception during request (attempt {counter + 1}): {e}")
|
|
1525
1525
|
check_connection(username=username, password=password, wait_for_connection=True)
|
|
1526
1526
|
if counter == 0:
|
|
1527
|
-
logger.
|
|
1527
|
+
logger.debug("[THETA][DEBUG][API][WAIT] Allowing ThetaTerminal to initialize for 5s before retry.")
|
|
1528
1528
|
time.sleep(5)
|
|
1529
1529
|
|
|
1530
1530
|
counter += 1
|
|
@@ -1609,7 +1609,7 @@ def get_historical_eod_data(asset: Asset, start_dt: datetime, end_dt: datetime,
|
|
|
1609
1609
|
headers = {"Accept": "application/json"}
|
|
1610
1610
|
|
|
1611
1611
|
# DEBUG-LOG: EOD data request
|
|
1612
|
-
logger.
|
|
1612
|
+
logger.debug(
|
|
1613
1613
|
"[THETA][DEBUG][EOD][REQUEST] asset=%s start=%s end=%s datastyle=%s",
|
|
1614
1614
|
asset,
|
|
1615
1615
|
start_date,
|
|
@@ -1622,7 +1622,7 @@ def get_historical_eod_data(asset: Asset, start_dt: datetime, end_dt: datetime,
|
|
|
1622
1622
|
username=username, password=password)
|
|
1623
1623
|
if json_resp is None:
|
|
1624
1624
|
# DEBUG-LOG: EOD data response - no data
|
|
1625
|
-
logger.
|
|
1625
|
+
logger.debug(
|
|
1626
1626
|
"[THETA][DEBUG][EOD][RESPONSE] asset=%s result=NO_DATA",
|
|
1627
1627
|
asset
|
|
1628
1628
|
)
|
|
@@ -1630,7 +1630,7 @@ def get_historical_eod_data(asset: Asset, start_dt: datetime, end_dt: datetime,
|
|
|
1630
1630
|
|
|
1631
1631
|
# DEBUG-LOG: EOD data response - success
|
|
1632
1632
|
response_rows = len(json_resp.get("response", [])) if isinstance(json_resp.get("response"), list) else 0
|
|
1633
|
-
logger.
|
|
1633
|
+
logger.debug(
|
|
1634
1634
|
"[THETA][DEBUG][EOD][RESPONSE] asset=%s rows=%d",
|
|
1635
1635
|
asset,
|
|
1636
1636
|
response_rows
|
|
@@ -1786,7 +1786,7 @@ def get_historical_data(asset: Asset, start_dt: datetime, end_dt: datetime, ivl:
|
|
|
1786
1786
|
headers = {"Accept": "application/json"}
|
|
1787
1787
|
|
|
1788
1788
|
# DEBUG-LOG: Intraday data request
|
|
1789
|
-
logger.
|
|
1789
|
+
logger.debug(
|
|
1790
1790
|
"[THETA][DEBUG][INTRADAY][REQUEST] asset=%s start=%s end=%s ivl=%d datastyle=%s include_after_hours=%s",
|
|
1791
1791
|
asset,
|
|
1792
1792
|
start_date,
|
|
@@ -1802,7 +1802,7 @@ def get_historical_data(asset: Asset, start_dt: datetime, end_dt: datetime, ivl:
|
|
|
1802
1802
|
username=username, password=password)
|
|
1803
1803
|
if json_resp is None:
|
|
1804
1804
|
# DEBUG-LOG: Intraday data response - no data
|
|
1805
|
-
logger.
|
|
1805
|
+
logger.debug(
|
|
1806
1806
|
"[THETA][DEBUG][INTRADAY][RESPONSE] asset=%s result=NO_DATA",
|
|
1807
1807
|
asset
|
|
1808
1808
|
)
|
|
@@ -1810,7 +1810,7 @@ def get_historical_data(asset: Asset, start_dt: datetime, end_dt: datetime, ivl:
|
|
|
1810
1810
|
|
|
1811
1811
|
# DEBUG-LOG: Intraday data response - success
|
|
1812
1812
|
response_rows = len(json_resp.get("response", [])) if isinstance(json_resp.get("response"), list) else 0
|
|
1813
|
-
logger.
|
|
1813
|
+
logger.debug(
|
|
1814
1814
|
"[THETA][DEBUG][INTRADAY][RESPONSE] asset=%s rows=%d",
|
|
1815
1815
|
asset,
|
|
1816
1816
|
response_rows
|
|
@@ -14,7 +14,7 @@ lumibot/backtesting/interactive_brokers_rest_backtesting.py,sha256=5HJ_sPX0uOUg-
|
|
|
14
14
|
lumibot/backtesting/pandas_backtesting.py,sha256=m-NvT4o-wFQjaZft6TXULzeZBrskO_7Z-jfy9AIkyAY,388
|
|
15
15
|
lumibot/backtesting/polygon_backtesting.py,sha256=u9kif_2_7k0P4-KDvbHhaMfSoBVejUUX7fh9H3PCVE0,12350
|
|
16
16
|
lumibot/backtesting/thetadata_backtesting.py,sha256=Xcz5f-4zTkKgWWcktNzItH2vrr8CysIMQWKKqLwugbA,345
|
|
17
|
-
lumibot/backtesting/thetadata_backtesting_pandas.py,sha256=
|
|
17
|
+
lumibot/backtesting/thetadata_backtesting_pandas.py,sha256=14XMsbQCa3uE_iS2nvTlGUXkn9kvI0cDSE8mqdKnDEg,51750
|
|
18
18
|
lumibot/backtesting/yahoo_backtesting.py,sha256=LT2524mGlrUSq1YSRnUqGW4-Xcq4USgRv2EhnV_zfs4,502
|
|
19
19
|
lumibot/brokers/__init__.py,sha256=MGWKHeH3mqseYRL7u-KX1Jp2x9EaFO4Ol8sfNSxzu1M,404
|
|
20
20
|
lumibot/brokers/alpaca.py,sha256=VQ17idfqiEFb2JCqqdMGmbvF789L7_PpsCbudiFRzmg,61595
|
|
@@ -132,7 +132,7 @@ lumibot/tools/polygon_helper_async.py,sha256=YHDXa9kmkkn8jh7hToY6GP5etyXS9Tj-uky
|
|
|
132
132
|
lumibot/tools/polygon_helper_polars_optimized.py,sha256=NaIZ-5Av-G2McPEKHyJ-x65W72W_Agnz4lRgvXfQp8c,30415
|
|
133
133
|
lumibot/tools/projectx_helpers.py,sha256=EIemLfbG923T_RBV_i6s6A9xgs7dt0et0oCnhFwdWfA,58299
|
|
134
134
|
lumibot/tools/schwab_helper.py,sha256=CXnYhgsXOIb5MgmIYOp86aLxsBF9oeVrMGrjwl_GEv0,11768
|
|
135
|
-
lumibot/tools/thetadata_helper.py,sha256=
|
|
135
|
+
lumibot/tools/thetadata_helper.py,sha256=z9x26RXazzVmne7a1AcAdxKW2IWTpOLcJ7auEVZikcE,79407
|
|
136
136
|
lumibot/tools/types.py,sha256=x-aQBeC6ZTN2-pUyxyo69Q0j5e0c_swdfe06kfrWSVc,1978
|
|
137
137
|
lumibot/tools/yahoo_helper.py,sha256=htcKKkuktatIckVKfLc_ms0X75mXColysQhrZW244z8,19497
|
|
138
138
|
lumibot/tools/yahoo_helper_polars_optimized.py,sha256=g9xBN-ReHSW4Aj9EMU_OncBXVS1HpfL8LTHit9ZxFY4,7417
|
|
@@ -142,7 +142,7 @@ lumibot/traders/trader.py,sha256=KMif3WoZtnSxA0BzoK3kvkTITNELrDFIortx1BYBv8s,967
|
|
|
142
142
|
lumibot/trading_builtins/__init__.py,sha256=vH2QL5zLjL3slfEV1YW-BvQHtEYLCFkIWTZDfh3y8LE,87
|
|
143
143
|
lumibot/trading_builtins/custom_stream.py,sha256=8_XiPT0JzyXrgnXCXoovGGUrWEfnG4ohIYMPfB_Nook,5264
|
|
144
144
|
lumibot/trading_builtins/safe_list.py,sha256=IIjZOHSiZYK25A4WBts0oJaZNOJDsjZL65MOSHhE3Ig,1975
|
|
145
|
-
lumibot-4.2.
|
|
145
|
+
lumibot-4.2.1.dist-info/licenses/LICENSE,sha256=fYhGIyxjyNXACgpNQS3xxpxDOaVOWRVxZMCRbsDv8k0,35130
|
|
146
146
|
tests/__init__.py,sha256=3-VoT-nAuqMfwufd4ceN6fXaHl_zCfDCSXJOTp1ywYQ,393
|
|
147
147
|
tests/conftest.py,sha256=UBw_2fx7r6TZPKus2b1Qxrzmd4bg8EEBnX1vCHUuSVA,3311
|
|
148
148
|
tests/fixtures.py,sha256=wOHQsh1SGHnXe_PGi6kDWI30CS_Righi7Ig7vwSEKT4,9082
|
|
@@ -280,7 +280,7 @@ tests/backtest/test_thetadata.py,sha256=xWYfC9C4EhbMDb29qyZWHO3sSWaLIPzzvcMbHCt5
|
|
|
280
280
|
tests/backtest/test_thetadata_comprehensive.py,sha256=-gN3xLJcJtlB-k4vlaK82DCZDGDmr0LNZZDzn-aN3l4,26120
|
|
281
281
|
tests/backtest/test_thetadata_vs_polygon.py,sha256=dZqsrOx3u3cz-1onIO6o5BDRjI1ey7U9vIkZupfXoig,22831
|
|
282
282
|
tests/backtest/test_yahoo.py,sha256=2FguUTUMC9_A20eqxnZ17rN3tT9n6hyvJHaL98QKpqY,3443
|
|
283
|
-
lumibot-4.2.
|
|
284
|
-
lumibot-4.2.
|
|
285
|
-
lumibot-4.2.
|
|
286
|
-
lumibot-4.2.
|
|
283
|
+
lumibot-4.2.1.dist-info/METADATA,sha256=PiXutqcizL9FWqUSokKsUMfqKV3vMRzh4jjBviIV_oI,12092
|
|
284
|
+
lumibot-4.2.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
285
|
+
lumibot-4.2.1.dist-info/top_level.txt,sha256=otUnUjDFVASauEDiTiAzNgMyqQ1B6jjS3QqqP-WSx38,14
|
|
286
|
+
lumibot-4.2.1.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|