@heylemon/lemonade 0.2.2 → 0.2.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/build-info.json +3 -3
- package/dist/canvas-host/a2ui/.bundle.hash +1 -1
- package/package.json +1 -1
- package/skills/brave-search/SKILL.md +57 -0
- package/skills/brave-search/content.js +86 -0
- package/skills/brave-search/package.json +14 -0
- package/skills/brave-search/search.js +179 -0
- package/skills/caldav-calendar/SKILL.md +104 -0
- package/skills/frontend-design/SKILL.md +39 -0
- package/skills/self-improving-agent/SKILL.md +128 -0
- package/skills/stock-analysis/SKILL.md +131 -0
- package/skills/stock-analysis/scripts/analyze_stock.py +2532 -0
- package/skills/stock-analysis/scripts/dividends.py +365 -0
- package/skills/stock-analysis/scripts/hot_scanner.py +565 -0
- package/skills/stock-analysis/scripts/portfolio.py +528 -0
- package/skills/stock-analysis/scripts/rumor_scanner.py +330 -0
- package/skills/stock-analysis/scripts/watchlist.py +318 -0
- package/skills/tavily-search/SKILL.md +38 -0
- package/skills/tavily-search/scripts/extract.mjs +59 -0
- package/skills/tavily-search/scripts/search.mjs +101 -0
- package/skills/youtube-watcher/SKILL.md +46 -0
- package/skills/youtube-watcher/scripts/get_transcript.py +81 -0
|
@@ -0,0 +1,2532 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# /// script
|
|
3
|
+
# requires-python = ">=3.10"
|
|
4
|
+
# dependencies = [
|
|
5
|
+
# "yfinance>=0.2.40",
|
|
6
|
+
# "pandas>=2.0.0",
|
|
7
|
+
# "fear-and-greed>=0.4",
|
|
8
|
+
# "edgartools>=2.0.0",
|
|
9
|
+
# "feedparser>=6.0.0",
|
|
10
|
+
# ]
|
|
11
|
+
# ///
|
|
12
|
+
"""
|
|
13
|
+
Stock analysis using Yahoo Finance data.
|
|
14
|
+
|
|
15
|
+
Usage:
|
|
16
|
+
uv run analyze_stock.py TICKER [TICKER2 ...] [--output text|json] [--verbose]
|
|
17
|
+
"""
|
|
18
|
+
|
|
19
|
+
import argparse
|
|
20
|
+
import asyncio
|
|
21
|
+
import json
|
|
22
|
+
import sys
|
|
23
|
+
import time
|
|
24
|
+
from dataclasses import dataclass, asdict
|
|
25
|
+
from datetime import datetime
|
|
26
|
+
from typing import Literal
|
|
27
|
+
|
|
28
|
+
import pandas as pd
|
|
29
|
+
import yfinance as yf
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
# Top 20 supported cryptocurrencies
|
|
33
|
+
SUPPORTED_CRYPTOS = {
|
|
34
|
+
"BTC-USD", "ETH-USD", "BNB-USD", "SOL-USD", "XRP-USD",
|
|
35
|
+
"ADA-USD", "DOGE-USD", "AVAX-USD", "DOT-USD", "MATIC-USD",
|
|
36
|
+
"LINK-USD", "ATOM-USD", "UNI-USD", "LTC-USD", "BCH-USD",
|
|
37
|
+
"XLM-USD", "ALGO-USD", "VET-USD", "FIL-USD", "NEAR-USD",
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
# Crypto category mapping for sector-like analysis
|
|
41
|
+
CRYPTO_CATEGORIES = {
|
|
42
|
+
"BTC-USD": "Store of Value",
|
|
43
|
+
"ETH-USD": "Smart Contract L1",
|
|
44
|
+
"BNB-USD": "Exchange Token",
|
|
45
|
+
"SOL-USD": "Smart Contract L1",
|
|
46
|
+
"XRP-USD": "Payment",
|
|
47
|
+
"ADA-USD": "Smart Contract L1",
|
|
48
|
+
"DOGE-USD": "Meme",
|
|
49
|
+
"AVAX-USD": "Smart Contract L1",
|
|
50
|
+
"DOT-USD": "Interoperability",
|
|
51
|
+
"MATIC-USD": "Layer 2",
|
|
52
|
+
"LINK-USD": "Oracle",
|
|
53
|
+
"ATOM-USD": "Interoperability",
|
|
54
|
+
"UNI-USD": "DeFi",
|
|
55
|
+
"LTC-USD": "Payment",
|
|
56
|
+
"BCH-USD": "Payment",
|
|
57
|
+
"XLM-USD": "Payment",
|
|
58
|
+
"ALGO-USD": "Smart Contract L1",
|
|
59
|
+
"VET-USD": "Enterprise",
|
|
60
|
+
"FIL-USD": "Storage",
|
|
61
|
+
"NEAR-USD": "Smart Contract L1",
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def detect_asset_type(ticker: str) -> Literal["stock", "crypto"]:
|
|
66
|
+
"""Detect asset type from ticker format."""
|
|
67
|
+
ticker_upper = ticker.upper()
|
|
68
|
+
if ticker_upper.endswith("-USD"):
|
|
69
|
+
base = ticker_upper[:-4]
|
|
70
|
+
if base.isalpha():
|
|
71
|
+
return "crypto"
|
|
72
|
+
return "stock"
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
@dataclass
|
|
76
|
+
class StockData:
|
|
77
|
+
ticker: str
|
|
78
|
+
info: dict
|
|
79
|
+
earnings_history: pd.DataFrame | None
|
|
80
|
+
analyst_info: dict | None
|
|
81
|
+
price_history: pd.DataFrame | None
|
|
82
|
+
asset_type: Literal["stock", "crypto"] = "stock"
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
@dataclass
|
|
86
|
+
class CryptoFundamentals:
|
|
87
|
+
"""Crypto-specific fundamentals (replaces P/E, margins for crypto)."""
|
|
88
|
+
market_cap: float | None
|
|
89
|
+
market_cap_rank: str # "large", "mid", "small"
|
|
90
|
+
volume_24h: float | None
|
|
91
|
+
circulating_supply: float | None
|
|
92
|
+
category: str | None # "Smart Contract L1", "DeFi", etc.
|
|
93
|
+
btc_correlation: float | None # 30-day correlation to BTC
|
|
94
|
+
score: float
|
|
95
|
+
explanation: str
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
@dataclass
|
|
99
|
+
class EarningsSurprise:
|
|
100
|
+
score: float
|
|
101
|
+
explanation: str
|
|
102
|
+
actual_eps: float | None = None
|
|
103
|
+
expected_eps: float | None = None
|
|
104
|
+
surprise_pct: float | None = None
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
@dataclass
|
|
108
|
+
class Fundamentals:
|
|
109
|
+
score: float
|
|
110
|
+
key_metrics: dict
|
|
111
|
+
explanation: str
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
@dataclass
|
|
115
|
+
class AnalystSentiment:
|
|
116
|
+
score: float | None
|
|
117
|
+
summary: str
|
|
118
|
+
consensus_rating: str | None = None
|
|
119
|
+
price_target: float | None = None
|
|
120
|
+
current_price: float | None = None
|
|
121
|
+
upside_pct: float | None = None
|
|
122
|
+
num_analysts: int | None = None
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
@dataclass
|
|
126
|
+
class HistoricalPatterns:
|
|
127
|
+
score: float
|
|
128
|
+
pattern_desc: str
|
|
129
|
+
beats_last_4q: int | None = None
|
|
130
|
+
avg_reaction_pct: float | None = None
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
@dataclass
|
|
134
|
+
class MarketContext:
|
|
135
|
+
vix_level: float
|
|
136
|
+
vix_status: str # "calm", "elevated", "fear"
|
|
137
|
+
spy_trend_10d: float
|
|
138
|
+
qqq_trend_10d: float
|
|
139
|
+
market_regime: str # "bull", "bear", "choppy"
|
|
140
|
+
score: float
|
|
141
|
+
explanation: str
|
|
142
|
+
# Safe-haven indicators (v4.0.0)
|
|
143
|
+
gld_change_5d: float | None = None # Gold ETF % change
|
|
144
|
+
tlt_change_5d: float | None = None # Treasury ETF % change
|
|
145
|
+
uup_change_5d: float | None = None # USD Index ETF % change
|
|
146
|
+
risk_off_detected: bool = False # True if flight to safety detected
|
|
147
|
+
|
|
148
|
+
|
|
149
|
+
@dataclass
|
|
150
|
+
class SectorComparison:
|
|
151
|
+
sector_name: str
|
|
152
|
+
industry_name: str
|
|
153
|
+
stock_return_1m: float
|
|
154
|
+
sector_return_1m: float
|
|
155
|
+
relative_strength: float
|
|
156
|
+
sector_trend: str # "strong uptrend", "downtrend", etc.
|
|
157
|
+
score: float
|
|
158
|
+
explanation: str
|
|
159
|
+
|
|
160
|
+
|
|
161
|
+
@dataclass
|
|
162
|
+
class EarningsTiming:
|
|
163
|
+
days_until_earnings: int | None
|
|
164
|
+
days_since_earnings: int | None
|
|
165
|
+
next_earnings_date: str | None
|
|
166
|
+
last_earnings_date: str | None
|
|
167
|
+
timing_flag: str # "pre_earnings", "post_earnings", "safe"
|
|
168
|
+
price_change_5d: float | None
|
|
169
|
+
confidence_adjustment: float
|
|
170
|
+
caveats: list[str]
|
|
171
|
+
|
|
172
|
+
|
|
173
|
+
@dataclass
|
|
174
|
+
class MomentumAnalysis:
|
|
175
|
+
rsi_14d: float | None
|
|
176
|
+
rsi_status: str # "overbought", "oversold", "neutral"
|
|
177
|
+
price_vs_52w_low: float | None
|
|
178
|
+
price_vs_52w_high: float | None
|
|
179
|
+
near_52w_high: bool
|
|
180
|
+
near_52w_low: bool
|
|
181
|
+
volume_ratio: float | None
|
|
182
|
+
relative_strength_vs_sector: float | None
|
|
183
|
+
score: float
|
|
184
|
+
explanation: str
|
|
185
|
+
|
|
186
|
+
|
|
187
|
+
@dataclass
|
|
188
|
+
class SentimentAnalysis:
|
|
189
|
+
score: float # Overall -1.0 to 1.0
|
|
190
|
+
explanation: str # Human-readable summary
|
|
191
|
+
|
|
192
|
+
# Sub-indicator scores
|
|
193
|
+
fear_greed_score: float | None = None
|
|
194
|
+
short_interest_score: float | None = None
|
|
195
|
+
vix_structure_score: float | None = None
|
|
196
|
+
insider_activity_score: float | None = None
|
|
197
|
+
put_call_score: float | None = None
|
|
198
|
+
|
|
199
|
+
# Raw data
|
|
200
|
+
fear_greed_value: int | None = None # 0-100
|
|
201
|
+
fear_greed_status: str | None = None # "Extreme Fear", etc.
|
|
202
|
+
short_interest_pct: float | None = None
|
|
203
|
+
days_to_cover: float | None = None
|
|
204
|
+
vix_structure: str | None = None # "contango", "backwardation", "flat"
|
|
205
|
+
vix_slope: float | None = None
|
|
206
|
+
insider_net_shares: int | None = None
|
|
207
|
+
insider_net_value: float | None = None # Millions USD
|
|
208
|
+
put_call_ratio: float | None = None
|
|
209
|
+
put_volume: int | None = None
|
|
210
|
+
call_volume: int | None = None
|
|
211
|
+
|
|
212
|
+
# Metadata
|
|
213
|
+
indicators_available: int = 0
|
|
214
|
+
data_freshness_warnings: list[str] | None = None
|
|
215
|
+
|
|
216
|
+
|
|
217
|
+
@dataclass
|
|
218
|
+
class Signal:
|
|
219
|
+
ticker: str
|
|
220
|
+
company_name: str
|
|
221
|
+
recommendation: Literal["BUY", "HOLD", "SELL"]
|
|
222
|
+
confidence: float
|
|
223
|
+
final_score: float
|
|
224
|
+
supporting_points: list[str]
|
|
225
|
+
caveats: list[str]
|
|
226
|
+
timestamp: str
|
|
227
|
+
components: dict
|
|
228
|
+
|
|
229
|
+
|
|
230
|
+
def fetch_stock_data(ticker: str, verbose: bool = False) -> StockData | None:
|
|
231
|
+
"""Fetch stock data from Yahoo Finance with retry logic."""
|
|
232
|
+
max_retries = 3
|
|
233
|
+
for attempt in range(max_retries):
|
|
234
|
+
try:
|
|
235
|
+
if verbose:
|
|
236
|
+
print(f"Fetching data for {ticker}... (attempt {attempt + 1}/{max_retries})", file=sys.stderr)
|
|
237
|
+
|
|
238
|
+
stock = yf.Ticker(ticker)
|
|
239
|
+
info = stock.info
|
|
240
|
+
|
|
241
|
+
# Validate ticker
|
|
242
|
+
if not info or "regularMarketPrice" not in info:
|
|
243
|
+
return None
|
|
244
|
+
|
|
245
|
+
# Fetch earnings history
|
|
246
|
+
try:
|
|
247
|
+
earnings_history = stock.earnings_dates
|
|
248
|
+
except Exception:
|
|
249
|
+
earnings_history = None
|
|
250
|
+
|
|
251
|
+
# Fetch analyst info
|
|
252
|
+
try:
|
|
253
|
+
analyst_info = {
|
|
254
|
+
"recommendations": stock.recommendations,
|
|
255
|
+
"analyst_price_targets": stock.analyst_price_targets,
|
|
256
|
+
}
|
|
257
|
+
except Exception:
|
|
258
|
+
analyst_info = None
|
|
259
|
+
|
|
260
|
+
# Fetch price history (1 year for historical patterns)
|
|
261
|
+
try:
|
|
262
|
+
price_history = stock.history(period="1y")
|
|
263
|
+
except Exception:
|
|
264
|
+
price_history = None
|
|
265
|
+
|
|
266
|
+
return StockData(
|
|
267
|
+
ticker=ticker,
|
|
268
|
+
info=info,
|
|
269
|
+
earnings_history=earnings_history,
|
|
270
|
+
analyst_info=analyst_info,
|
|
271
|
+
price_history=price_history,
|
|
272
|
+
asset_type=detect_asset_type(ticker),
|
|
273
|
+
)
|
|
274
|
+
|
|
275
|
+
except Exception as e:
|
|
276
|
+
if attempt < max_retries - 1:
|
|
277
|
+
wait_time = 2 ** attempt # Exponential backoff
|
|
278
|
+
if verbose:
|
|
279
|
+
print(f"Error fetching {ticker}: {e}. Retrying in {wait_time}s...", file=sys.stderr)
|
|
280
|
+
time.sleep(wait_time)
|
|
281
|
+
else:
|
|
282
|
+
if verbose:
|
|
283
|
+
print(f"Failed to fetch {ticker} after {max_retries} attempts", file=sys.stderr)
|
|
284
|
+
return None
|
|
285
|
+
|
|
286
|
+
return None
|
|
287
|
+
|
|
288
|
+
|
|
289
|
+
def analyze_earnings_surprise(data: StockData) -> EarningsSurprise | None:
|
|
290
|
+
"""Analyze earnings surprise from most recent quarter."""
|
|
291
|
+
if data.earnings_history is None or data.earnings_history.empty:
|
|
292
|
+
return None
|
|
293
|
+
|
|
294
|
+
try:
|
|
295
|
+
# Get most recent earnings with actual data
|
|
296
|
+
recent = data.earnings_history.sort_index(ascending=False).head(10)
|
|
297
|
+
|
|
298
|
+
for idx, row in recent.iterrows():
|
|
299
|
+
if pd.notna(row.get("Reported EPS")) and pd.notna(row.get("EPS Estimate")):
|
|
300
|
+
actual = float(row["Reported EPS"])
|
|
301
|
+
expected = float(row["EPS Estimate"])
|
|
302
|
+
|
|
303
|
+
if expected == 0:
|
|
304
|
+
continue
|
|
305
|
+
|
|
306
|
+
surprise_pct = ((actual - expected) / abs(expected)) * 100
|
|
307
|
+
|
|
308
|
+
# Score based on surprise percentage
|
|
309
|
+
if surprise_pct > 10:
|
|
310
|
+
score = 1.0
|
|
311
|
+
elif surprise_pct > 5:
|
|
312
|
+
score = 0.7
|
|
313
|
+
elif surprise_pct > 0:
|
|
314
|
+
score = 0.3
|
|
315
|
+
elif surprise_pct > -5:
|
|
316
|
+
score = -0.3
|
|
317
|
+
elif surprise_pct > -10:
|
|
318
|
+
score = -0.7
|
|
319
|
+
else:
|
|
320
|
+
score = -1.0
|
|
321
|
+
|
|
322
|
+
explanation = f"{'Beat' if surprise_pct > 0 else 'Missed'} by {abs(surprise_pct):.1f}%"
|
|
323
|
+
|
|
324
|
+
return EarningsSurprise(
|
|
325
|
+
score=score,
|
|
326
|
+
explanation=explanation,
|
|
327
|
+
actual_eps=actual,
|
|
328
|
+
expected_eps=expected,
|
|
329
|
+
surprise_pct=surprise_pct,
|
|
330
|
+
)
|
|
331
|
+
|
|
332
|
+
return None
|
|
333
|
+
|
|
334
|
+
except Exception:
|
|
335
|
+
return None
|
|
336
|
+
|
|
337
|
+
|
|
338
|
+
def analyze_fundamentals(data: StockData) -> Fundamentals | None:
|
|
339
|
+
"""Analyze fundamental metrics."""
|
|
340
|
+
info = data.info
|
|
341
|
+
scores = []
|
|
342
|
+
metrics = {}
|
|
343
|
+
explanations = []
|
|
344
|
+
|
|
345
|
+
try:
|
|
346
|
+
# P/E Ratio (lower is better, but consider growth)
|
|
347
|
+
pe_ratio = info.get("trailingPE") or info.get("forwardPE")
|
|
348
|
+
if pe_ratio and pe_ratio > 0:
|
|
349
|
+
metrics["pe_ratio"] = round(pe_ratio, 2)
|
|
350
|
+
if pe_ratio < 15:
|
|
351
|
+
scores.append(0.5)
|
|
352
|
+
explanations.append(f"Attractive P/E: {pe_ratio:.1f}x")
|
|
353
|
+
elif pe_ratio > 30:
|
|
354
|
+
scores.append(-0.3)
|
|
355
|
+
explanations.append(f"Elevated P/E: {pe_ratio:.1f}x")
|
|
356
|
+
else:
|
|
357
|
+
scores.append(0.1)
|
|
358
|
+
|
|
359
|
+
# Operating Margin
|
|
360
|
+
op_margin = info.get("operatingMargins")
|
|
361
|
+
if op_margin:
|
|
362
|
+
metrics["operating_margin"] = round(op_margin, 3)
|
|
363
|
+
if op_margin > 0.15:
|
|
364
|
+
scores.append(0.5)
|
|
365
|
+
explanations.append(f"Strong margin: {op_margin*100:.1f}%")
|
|
366
|
+
elif op_margin < 0.05:
|
|
367
|
+
scores.append(-0.5)
|
|
368
|
+
explanations.append(f"Weak margin: {op_margin*100:.1f}%")
|
|
369
|
+
|
|
370
|
+
# Revenue Growth
|
|
371
|
+
rev_growth = info.get("revenueGrowth")
|
|
372
|
+
if rev_growth:
|
|
373
|
+
metrics["revenue_growth_yoy"] = round(rev_growth, 3)
|
|
374
|
+
if rev_growth > 0.20:
|
|
375
|
+
scores.append(0.5)
|
|
376
|
+
explanations.append(f"Strong growth: {rev_growth*100:.1f}% YoY")
|
|
377
|
+
elif rev_growth < 0.05:
|
|
378
|
+
scores.append(-0.3)
|
|
379
|
+
explanations.append(f"Slow growth: {rev_growth*100:.1f}% YoY")
|
|
380
|
+
else:
|
|
381
|
+
scores.append(0.2)
|
|
382
|
+
|
|
383
|
+
# Debt to Equity
|
|
384
|
+
debt_equity = info.get("debtToEquity")
|
|
385
|
+
if debt_equity is not None:
|
|
386
|
+
metrics["debt_to_equity"] = round(debt_equity / 100, 2)
|
|
387
|
+
if debt_equity < 50:
|
|
388
|
+
scores.append(0.3)
|
|
389
|
+
elif debt_equity > 200:
|
|
390
|
+
scores.append(-0.5)
|
|
391
|
+
explanations.append(f"High debt: D/E {debt_equity/100:.1f}x")
|
|
392
|
+
|
|
393
|
+
if not scores:
|
|
394
|
+
return None
|
|
395
|
+
|
|
396
|
+
# Average and normalize
|
|
397
|
+
avg_score = sum(scores) / len(scores)
|
|
398
|
+
normalized_score = max(-1.0, min(1.0, avg_score))
|
|
399
|
+
|
|
400
|
+
explanation = "; ".join(explanations) if explanations else "Mixed fundamentals"
|
|
401
|
+
|
|
402
|
+
return Fundamentals(
|
|
403
|
+
score=normalized_score,
|
|
404
|
+
key_metrics=metrics,
|
|
405
|
+
explanation=explanation,
|
|
406
|
+
)
|
|
407
|
+
|
|
408
|
+
except Exception:
|
|
409
|
+
return None
|
|
410
|
+
|
|
411
|
+
|
|
412
|
+
def analyze_crypto_fundamentals(data: StockData, verbose: bool = False) -> CryptoFundamentals | None:
|
|
413
|
+
"""Analyze crypto-specific fundamentals (market cap, supply, category)."""
|
|
414
|
+
if data.asset_type != "crypto":
|
|
415
|
+
return None
|
|
416
|
+
|
|
417
|
+
info = data.info
|
|
418
|
+
ticker = data.ticker.upper()
|
|
419
|
+
|
|
420
|
+
try:
|
|
421
|
+
# Market cap analysis
|
|
422
|
+
market_cap = info.get("marketCap")
|
|
423
|
+
if not market_cap:
|
|
424
|
+
return None
|
|
425
|
+
|
|
426
|
+
# Categorize by market cap
|
|
427
|
+
if market_cap >= 10_000_000_000: # $10B+
|
|
428
|
+
market_cap_rank = "large"
|
|
429
|
+
cap_score = 0.3 # Large caps are more stable
|
|
430
|
+
elif market_cap >= 1_000_000_000: # $1B-$10B
|
|
431
|
+
market_cap_rank = "mid"
|
|
432
|
+
cap_score = 0.1
|
|
433
|
+
else:
|
|
434
|
+
market_cap_rank = "small"
|
|
435
|
+
cap_score = -0.2 # Small caps are riskier
|
|
436
|
+
|
|
437
|
+
# Volume analysis
|
|
438
|
+
volume_24h = info.get("volume") or info.get("volume24Hr")
|
|
439
|
+
volume_score = 0.0
|
|
440
|
+
if volume_24h and market_cap:
|
|
441
|
+
volume_to_cap = volume_24h / market_cap
|
|
442
|
+
if volume_to_cap > 0.05: # >5% daily turnover
|
|
443
|
+
volume_score = 0.2 # High liquidity
|
|
444
|
+
elif volume_to_cap < 0.01:
|
|
445
|
+
volume_score = -0.2 # Low liquidity
|
|
446
|
+
|
|
447
|
+
# Circulating supply
|
|
448
|
+
circulating_supply = info.get("circulatingSupply")
|
|
449
|
+
|
|
450
|
+
# Get crypto category
|
|
451
|
+
category = CRYPTO_CATEGORIES.get(ticker, "Unknown")
|
|
452
|
+
|
|
453
|
+
# Calculate BTC correlation (30 days)
|
|
454
|
+
btc_correlation = None
|
|
455
|
+
try:
|
|
456
|
+
if ticker != "BTC-USD" and data.price_history is not None:
|
|
457
|
+
btc = yf.Ticker("BTC-USD")
|
|
458
|
+
btc_hist = btc.history(period="1mo")
|
|
459
|
+
if not btc_hist.empty and len(data.price_history) > 5:
|
|
460
|
+
# Align dates and calculate correlation
|
|
461
|
+
crypto_returns = data.price_history["Close"].pct_change().dropna()
|
|
462
|
+
btc_returns = btc_hist["Close"].pct_change().dropna()
|
|
463
|
+
# Simple correlation on overlapping dates
|
|
464
|
+
common_dates = crypto_returns.index.intersection(btc_returns.index)
|
|
465
|
+
if len(common_dates) > 10:
|
|
466
|
+
btc_correlation = crypto_returns.loc[common_dates].corr(btc_returns.loc[common_dates])
|
|
467
|
+
except Exception:
|
|
468
|
+
pass
|
|
469
|
+
|
|
470
|
+
# BTC correlation scoring (high correlation = less diversification benefit)
|
|
471
|
+
corr_score = 0.0
|
|
472
|
+
if btc_correlation is not None:
|
|
473
|
+
if btc_correlation > 0.8:
|
|
474
|
+
corr_score = -0.1 # Very correlated to BTC
|
|
475
|
+
elif btc_correlation < 0.3:
|
|
476
|
+
corr_score = 0.1 # Good diversification
|
|
477
|
+
|
|
478
|
+
# Total score
|
|
479
|
+
total_score = cap_score + volume_score + corr_score
|
|
480
|
+
|
|
481
|
+
# Build explanation
|
|
482
|
+
explanations = []
|
|
483
|
+
explanations.append(f"Market cap: ${market_cap/1e9:.1f}B ({market_cap_rank})")
|
|
484
|
+
if category != "Unknown":
|
|
485
|
+
explanations.append(f"Category: {category}")
|
|
486
|
+
if btc_correlation is not None:
|
|
487
|
+
explanations.append(f"BTC corr: {btc_correlation:.2f}")
|
|
488
|
+
|
|
489
|
+
return CryptoFundamentals(
|
|
490
|
+
market_cap=market_cap,
|
|
491
|
+
market_cap_rank=market_cap_rank,
|
|
492
|
+
volume_24h=volume_24h,
|
|
493
|
+
circulating_supply=circulating_supply,
|
|
494
|
+
category=category,
|
|
495
|
+
btc_correlation=round(btc_correlation, 2) if btc_correlation else None,
|
|
496
|
+
score=max(-1.0, min(1.0, total_score)),
|
|
497
|
+
explanation="; ".join(explanations),
|
|
498
|
+
)
|
|
499
|
+
|
|
500
|
+
except Exception as e:
|
|
501
|
+
if verbose:
|
|
502
|
+
print(f"Error analyzing crypto fundamentals: {e}", file=sys.stderr)
|
|
503
|
+
return None
|
|
504
|
+
|
|
505
|
+
|
|
506
|
+
def analyze_analyst_sentiment(data: StockData) -> AnalystSentiment | None:
|
|
507
|
+
"""Analyze analyst sentiment and price targets."""
|
|
508
|
+
info = data.info
|
|
509
|
+
|
|
510
|
+
try:
|
|
511
|
+
# Get current price
|
|
512
|
+
current_price = info.get("regularMarketPrice") or info.get("currentPrice")
|
|
513
|
+
if not current_price:
|
|
514
|
+
return None
|
|
515
|
+
|
|
516
|
+
# Get target price
|
|
517
|
+
target_price = info.get("targetMeanPrice")
|
|
518
|
+
|
|
519
|
+
# Get number of analysts
|
|
520
|
+
num_analysts = info.get("numberOfAnalystOpinions")
|
|
521
|
+
|
|
522
|
+
# Get recommendation
|
|
523
|
+
recommendation = info.get("recommendationKey")
|
|
524
|
+
|
|
525
|
+
if not target_price or not recommendation:
|
|
526
|
+
return AnalystSentiment(
|
|
527
|
+
score=None,
|
|
528
|
+
summary="No analyst coverage available",
|
|
529
|
+
)
|
|
530
|
+
|
|
531
|
+
# Calculate upside
|
|
532
|
+
upside_pct = ((target_price - current_price) / current_price) * 100
|
|
533
|
+
|
|
534
|
+
# Score based on recommendation and upside
|
|
535
|
+
rec_scores = {
|
|
536
|
+
"strong_buy": 1.0,
|
|
537
|
+
"buy": 0.7,
|
|
538
|
+
"hold": 0.0,
|
|
539
|
+
"sell": -0.7,
|
|
540
|
+
"strong_sell": -1.0,
|
|
541
|
+
}
|
|
542
|
+
|
|
543
|
+
base_score = rec_scores.get(recommendation, 0.0)
|
|
544
|
+
|
|
545
|
+
# Adjust based on upside
|
|
546
|
+
if upside_pct > 20:
|
|
547
|
+
score = min(1.0, base_score + 0.3)
|
|
548
|
+
elif upside_pct > 10:
|
|
549
|
+
score = min(1.0, base_score + 0.15)
|
|
550
|
+
elif upside_pct < -10:
|
|
551
|
+
score = max(-1.0, base_score - 0.3)
|
|
552
|
+
else:
|
|
553
|
+
score = base_score
|
|
554
|
+
|
|
555
|
+
# Format recommendation
|
|
556
|
+
rec_display = recommendation.replace("_", " ").title()
|
|
557
|
+
|
|
558
|
+
summary = f"{rec_display} with {abs(upside_pct):.1f}% {'upside' if upside_pct > 0 else 'downside'}"
|
|
559
|
+
if num_analysts:
|
|
560
|
+
summary += f" ({num_analysts} analysts)"
|
|
561
|
+
|
|
562
|
+
return AnalystSentiment(
|
|
563
|
+
score=score,
|
|
564
|
+
summary=summary,
|
|
565
|
+
consensus_rating=rec_display,
|
|
566
|
+
price_target=target_price,
|
|
567
|
+
current_price=current_price,
|
|
568
|
+
upside_pct=upside_pct,
|
|
569
|
+
num_analysts=num_analysts,
|
|
570
|
+
)
|
|
571
|
+
|
|
572
|
+
except Exception:
|
|
573
|
+
return AnalystSentiment(
|
|
574
|
+
score=None,
|
|
575
|
+
summary="Error analyzing analyst sentiment",
|
|
576
|
+
)
|
|
577
|
+
|
|
578
|
+
|
|
579
|
+
def analyze_historical_patterns(data: StockData) -> HistoricalPatterns | None:
|
|
580
|
+
"""Analyze historical earnings patterns."""
|
|
581
|
+
if data.earnings_history is None or data.price_history is None:
|
|
582
|
+
return None
|
|
583
|
+
|
|
584
|
+
if data.earnings_history.empty or data.price_history.empty:
|
|
585
|
+
return None
|
|
586
|
+
|
|
587
|
+
try:
|
|
588
|
+
# Get last 4 quarters earnings dates
|
|
589
|
+
earnings_dates = data.earnings_history.sort_index(ascending=False).head(4)
|
|
590
|
+
|
|
591
|
+
beats = 0
|
|
592
|
+
reactions = []
|
|
593
|
+
|
|
594
|
+
for earnings_date, row in earnings_dates.iterrows():
|
|
595
|
+
if pd.notna(row.get("Reported EPS")) and pd.notna(row.get("EPS Estimate")):
|
|
596
|
+
actual = float(row["Reported EPS"])
|
|
597
|
+
expected = float(row["EPS Estimate"])
|
|
598
|
+
|
|
599
|
+
if actual > expected:
|
|
600
|
+
beats += 1
|
|
601
|
+
|
|
602
|
+
# Try to get price reaction (day of earnings)
|
|
603
|
+
try:
|
|
604
|
+
earnings_day = pd.Timestamp(earnings_date).date()
|
|
605
|
+
|
|
606
|
+
# Find closest trading day
|
|
607
|
+
price_data = data.price_history[data.price_history.index.date == earnings_day]
|
|
608
|
+
|
|
609
|
+
if not price_data.empty:
|
|
610
|
+
day_change = ((price_data["Close"].iloc[0] - price_data["Open"].iloc[0]) / price_data["Open"].iloc[0]) * 100
|
|
611
|
+
reactions.append(day_change)
|
|
612
|
+
except Exception:
|
|
613
|
+
continue
|
|
614
|
+
|
|
615
|
+
total_quarters = len(earnings_dates)
|
|
616
|
+
if total_quarters == 0:
|
|
617
|
+
return None
|
|
618
|
+
|
|
619
|
+
# Score based on beat rate
|
|
620
|
+
beat_rate = beats / total_quarters
|
|
621
|
+
|
|
622
|
+
if beat_rate == 1.0:
|
|
623
|
+
score = 0.8
|
|
624
|
+
elif beat_rate >= 0.75:
|
|
625
|
+
score = 0.5
|
|
626
|
+
elif beat_rate >= 0.5:
|
|
627
|
+
score = 0.0
|
|
628
|
+
elif beat_rate >= 0.25:
|
|
629
|
+
score = -0.5
|
|
630
|
+
else:
|
|
631
|
+
score = -0.8
|
|
632
|
+
|
|
633
|
+
# Pattern description
|
|
634
|
+
pattern_desc = f"{beats}/{total_quarters} quarters beat expectations"
|
|
635
|
+
|
|
636
|
+
if reactions:
|
|
637
|
+
avg_reaction = sum(reactions) / len(reactions)
|
|
638
|
+
pattern_desc += f", avg reaction {avg_reaction:+.1f}%"
|
|
639
|
+
else:
|
|
640
|
+
avg_reaction = None
|
|
641
|
+
|
|
642
|
+
return HistoricalPatterns(
|
|
643
|
+
score=score,
|
|
644
|
+
pattern_desc=pattern_desc,
|
|
645
|
+
beats_last_4q=beats,
|
|
646
|
+
avg_reaction_pct=avg_reaction,
|
|
647
|
+
)
|
|
648
|
+
|
|
649
|
+
except Exception:
|
|
650
|
+
return None
|
|
651
|
+
|
|
652
|
+
|
|
653
|
+
def analyze_market_context(verbose: bool = False) -> MarketContext | None:
|
|
654
|
+
"""Analyze overall market conditions using VIX, SPY, QQQ, and safe-havens with 1h cache."""
|
|
655
|
+
# Check cache first
|
|
656
|
+
cached = _get_cached("market_context")
|
|
657
|
+
if cached is not None:
|
|
658
|
+
if verbose:
|
|
659
|
+
print("Using cached market context (< 1h old)", file=sys.stderr)
|
|
660
|
+
return cached
|
|
661
|
+
|
|
662
|
+
try:
|
|
663
|
+
if verbose:
|
|
664
|
+
print("Fetching market indicators (VIX, SPY, QQQ)...", file=sys.stderr)
|
|
665
|
+
|
|
666
|
+
# Fetch market indicators
|
|
667
|
+
vix = yf.Ticker("^VIX")
|
|
668
|
+
spy = yf.Ticker("SPY")
|
|
669
|
+
qqq = yf.Ticker("QQQ")
|
|
670
|
+
|
|
671
|
+
# Get current VIX level
|
|
672
|
+
vix_info = vix.info
|
|
673
|
+
vix_level = vix_info.get("regularMarketPrice") or vix_info.get("currentPrice")
|
|
674
|
+
|
|
675
|
+
if not vix_level:
|
|
676
|
+
return None
|
|
677
|
+
|
|
678
|
+
# Determine VIX status
|
|
679
|
+
if vix_level < 20:
|
|
680
|
+
vix_status = "calm"
|
|
681
|
+
vix_score = 0.2
|
|
682
|
+
elif vix_level < 30:
|
|
683
|
+
vix_status = "elevated"
|
|
684
|
+
vix_score = 0.0
|
|
685
|
+
else:
|
|
686
|
+
vix_status = "fear"
|
|
687
|
+
vix_score = -0.5
|
|
688
|
+
|
|
689
|
+
# Get SPY and QQQ 10-day trends
|
|
690
|
+
spy_hist = spy.history(period="1mo")
|
|
691
|
+
qqq_hist = qqq.history(period="1mo")
|
|
692
|
+
|
|
693
|
+
if spy_hist.empty or qqq_hist.empty:
|
|
694
|
+
return None
|
|
695
|
+
|
|
696
|
+
# Calculate 10-day price changes
|
|
697
|
+
spy_10d_ago = spy_hist["Close"].iloc[-min(10, len(spy_hist))]
|
|
698
|
+
spy_current = spy_hist["Close"].iloc[-1]
|
|
699
|
+
spy_trend_10d = ((spy_current - spy_10d_ago) / spy_10d_ago) * 100
|
|
700
|
+
|
|
701
|
+
qqq_10d_ago = qqq_hist["Close"].iloc[-min(10, len(qqq_hist))]
|
|
702
|
+
qqq_current = qqq_hist["Close"].iloc[-1]
|
|
703
|
+
qqq_trend_10d = ((qqq_current - qqq_10d_ago) / qqq_10d_ago) * 100
|
|
704
|
+
|
|
705
|
+
# Determine market regime
|
|
706
|
+
avg_trend = (spy_trend_10d + qqq_trend_10d) / 2
|
|
707
|
+
|
|
708
|
+
if avg_trend > 3:
|
|
709
|
+
market_regime = "bull"
|
|
710
|
+
regime_score = 0.3
|
|
711
|
+
elif avg_trend < -3:
|
|
712
|
+
market_regime = "bear"
|
|
713
|
+
regime_score = -0.4
|
|
714
|
+
else:
|
|
715
|
+
market_regime = "choppy"
|
|
716
|
+
regime_score = -0.1
|
|
717
|
+
|
|
718
|
+
# Calculate overall score
|
|
719
|
+
overall_score = (vix_score + regime_score) / 2
|
|
720
|
+
|
|
721
|
+
# NEW v4.0.0: Fetch safe-haven indicators (GLD, TLT, UUP)
|
|
722
|
+
gld_change_5d = None
|
|
723
|
+
tlt_change_5d = None
|
|
724
|
+
uup_change_5d = None
|
|
725
|
+
risk_off_detected = False
|
|
726
|
+
|
|
727
|
+
try:
|
|
728
|
+
if verbose:
|
|
729
|
+
print("Fetching safe-haven indicators (GLD, TLT, UUP)...", file=sys.stderr)
|
|
730
|
+
|
|
731
|
+
# Fetch safe-haven ETFs
|
|
732
|
+
gld = yf.Ticker("GLD") # Gold
|
|
733
|
+
tlt = yf.Ticker("TLT") # 20+ Year Treasury
|
|
734
|
+
uup = yf.Ticker("UUP") # USD Index
|
|
735
|
+
|
|
736
|
+
gld_hist = gld.history(period="10d")
|
|
737
|
+
tlt_hist = tlt.history(period="10d")
|
|
738
|
+
uup_hist = uup.history(period="10d")
|
|
739
|
+
|
|
740
|
+
# Calculate 5-day changes
|
|
741
|
+
if not gld_hist.empty and len(gld_hist) >= 5:
|
|
742
|
+
gld_5d_ago = gld_hist["Close"].iloc[-min(5, len(gld_hist))]
|
|
743
|
+
gld_current = gld_hist["Close"].iloc[-1]
|
|
744
|
+
gld_change_5d = ((gld_current - gld_5d_ago) / gld_5d_ago) * 100
|
|
745
|
+
|
|
746
|
+
if not tlt_hist.empty and len(tlt_hist) >= 5:
|
|
747
|
+
tlt_5d_ago = tlt_hist["Close"].iloc[-min(5, len(tlt_hist))]
|
|
748
|
+
tlt_current = tlt_hist["Close"].iloc[-1]
|
|
749
|
+
tlt_change_5d = ((tlt_current - tlt_5d_ago) / tlt_5d_ago) * 100
|
|
750
|
+
|
|
751
|
+
if not uup_hist.empty and len(uup_hist) >= 5:
|
|
752
|
+
uup_5d_ago = uup_hist["Close"].iloc[-min(5, len(uup_hist))]
|
|
753
|
+
uup_current = uup_hist["Close"].iloc[-1]
|
|
754
|
+
uup_change_5d = ((uup_current - uup_5d_ago) / uup_5d_ago) * 100
|
|
755
|
+
|
|
756
|
+
# Risk-off detection: All three safe-havens rising together
|
|
757
|
+
if (gld_change_5d is not None and gld_change_5d >= 2.0 and
|
|
758
|
+
tlt_change_5d is not None and tlt_change_5d >= 1.0 and
|
|
759
|
+
uup_change_5d is not None and uup_change_5d >= 1.0):
|
|
760
|
+
risk_off_detected = True
|
|
761
|
+
overall_score -= 0.5 # Reduce score significantly
|
|
762
|
+
if verbose:
|
|
763
|
+
print(f" 🛡️ RISK-OFF DETECTED: GLD {gld_change_5d:+.1f}%, TLT {tlt_change_5d:+.1f}%, UUP {uup_change_5d:+.1f}%", file=sys.stderr)
|
|
764
|
+
|
|
765
|
+
except Exception as e:
|
|
766
|
+
if verbose:
|
|
767
|
+
print(f" Safe-haven indicators unavailable: {e}", file=sys.stderr)
|
|
768
|
+
|
|
769
|
+
# Build explanation
|
|
770
|
+
explanation = f"VIX {vix_level:.1f} ({vix_status}), Market {market_regime} (SPY {spy_trend_10d:+.1f}%, QQQ {qqq_trend_10d:+.1f}% 10d)"
|
|
771
|
+
if risk_off_detected:
|
|
772
|
+
explanation += " ⚠️ RISK-OFF MODE"
|
|
773
|
+
|
|
774
|
+
result = MarketContext(
|
|
775
|
+
vix_level=vix_level,
|
|
776
|
+
vix_status=vix_status,
|
|
777
|
+
spy_trend_10d=spy_trend_10d,
|
|
778
|
+
qqq_trend_10d=qqq_trend_10d,
|
|
779
|
+
market_regime=market_regime,
|
|
780
|
+
score=overall_score,
|
|
781
|
+
explanation=explanation,
|
|
782
|
+
gld_change_5d=gld_change_5d,
|
|
783
|
+
tlt_change_5d=tlt_change_5d,
|
|
784
|
+
uup_change_5d=uup_change_5d,
|
|
785
|
+
risk_off_detected=risk_off_detected,
|
|
786
|
+
)
|
|
787
|
+
|
|
788
|
+
# Cache the result for 1 hour
|
|
789
|
+
_set_cache("market_context", result)
|
|
790
|
+
return result
|
|
791
|
+
|
|
792
|
+
except Exception as e:
|
|
793
|
+
if verbose:
|
|
794
|
+
print(f"Error analyzing market context: {e}", file=sys.stderr)
|
|
795
|
+
return None
|
|
796
|
+
|
|
797
|
+
|
|
798
|
+
def get_sector_etf_ticker(sector: str) -> str | None:
|
|
799
|
+
"""Map sector name to corresponding sector ETF ticker."""
|
|
800
|
+
sector_map = {
|
|
801
|
+
"Financial Services": "XLF",
|
|
802
|
+
"Financials": "XLF",
|
|
803
|
+
"Technology": "XLK",
|
|
804
|
+
"Healthcare": "XLV",
|
|
805
|
+
"Consumer Cyclical": "XLY",
|
|
806
|
+
"Consumer Defensive": "XLP",
|
|
807
|
+
"Utilities": "XLU",
|
|
808
|
+
"Basic Materials": "XLB",
|
|
809
|
+
"Real Estate": "XLRE",
|
|
810
|
+
"Communication Services": "XLC",
|
|
811
|
+
"Industrials": "XLI",
|
|
812
|
+
"Energy": "XLE",
|
|
813
|
+
}
|
|
814
|
+
|
|
815
|
+
return sector_map.get(sector)
|
|
816
|
+
|
|
817
|
+
|
|
818
|
+
# ============================================================================
|
|
819
|
+
# Breaking News Check (v4.0.0)
|
|
820
|
+
# ============================================================================
|
|
821
|
+
|
|
822
|
+
# Crisis keywords by category
|
|
823
|
+
CRISIS_KEYWORDS = {
|
|
824
|
+
"war": ["war", "invasion", "military strike", "attack", "conflict", "combat"],
|
|
825
|
+
"economic": ["recession", "crisis", "collapse", "default", "bankruptcy", "crash"],
|
|
826
|
+
"regulatory": ["sanctions", "embargo", "ban", "investigation", "fraud", "probe"],
|
|
827
|
+
"disaster": ["earthquake", "hurricane", "pandemic", "outbreak", "disaster", "catastrophe"],
|
|
828
|
+
"financial": ["emergency rate", "fed emergency", "bailout", "circuit breaker", "trading halt"],
|
|
829
|
+
}
|
|
830
|
+
|
|
831
|
+
# Geopolitical event → sector mapping (v4.0.0)
|
|
832
|
+
GEOPOLITICAL_RISK_MAP = {
|
|
833
|
+
"taiwan": {
|
|
834
|
+
"keywords": ["taiwan", "tsmc", "strait"],
|
|
835
|
+
"sectors": ["Technology", "Communication Services"],
|
|
836
|
+
"sector_etfs": ["XLK", "XLC"],
|
|
837
|
+
"impact": "Semiconductor supply chain disruption",
|
|
838
|
+
"affected_tickers": ["NVDA", "AMD", "TSM", "INTC", "QCOM", "AVGO", "MU"],
|
|
839
|
+
},
|
|
840
|
+
"china": {
|
|
841
|
+
"keywords": ["china", "beijing", "tariff", "trade war"],
|
|
842
|
+
"sectors": ["Technology", "Consumer Cyclical", "Consumer Defensive"],
|
|
843
|
+
"sector_etfs": ["XLK", "XLY", "XLP"],
|
|
844
|
+
"impact": "Tech supply chain and consumer market exposure",
|
|
845
|
+
"affected_tickers": ["AAPL", "QCOM", "NKE", "SBUX", "MCD", "YUM", "TGT", "WMT"],
|
|
846
|
+
},
|
|
847
|
+
"russia_ukraine": {
|
|
848
|
+
"keywords": ["russia", "ukraine", "putin", "kyiv", "moscow"],
|
|
849
|
+
"sectors": ["Energy", "Materials"],
|
|
850
|
+
"sector_etfs": ["XLE", "XLB"],
|
|
851
|
+
"impact": "Energy and commodity price volatility",
|
|
852
|
+
"affected_tickers": ["XOM", "CVX", "COP", "SLB", "MOS", "CF", "NTR", "ADM"],
|
|
853
|
+
},
|
|
854
|
+
"middle_east": {
|
|
855
|
+
"keywords": ["iran", "israel", "gaza", "saudi", "middle east", "gulf"],
|
|
856
|
+
"sectors": ["Energy", "Industrials"],
|
|
857
|
+
"sector_etfs": ["XLE", "XLI"],
|
|
858
|
+
"impact": "Oil price volatility and defense spending",
|
|
859
|
+
"affected_tickers": ["XOM", "CVX", "COP", "LMT", "RTX", "NOC", "GD", "BA"],
|
|
860
|
+
},
|
|
861
|
+
"banking_crisis": {
|
|
862
|
+
"keywords": ["bank failure", "credit crisis", "liquidity crisis", "bank run"],
|
|
863
|
+
"sectors": ["Financials"],
|
|
864
|
+
"sector_etfs": ["XLF"],
|
|
865
|
+
"impact": "Financial sector contagion risk",
|
|
866
|
+
"affected_tickers": ["JPM", "BAC", "WFC", "C", "GS", "MS", "USB", "PNC"],
|
|
867
|
+
},
|
|
868
|
+
}
|
|
869
|
+
|
|
870
|
+
|
|
871
|
+
def check_breaking_news(verbose: bool = False) -> list[str] | None:
|
|
872
|
+
"""
|
|
873
|
+
Check Google News RSS for breaking market/economic crisis events (last 24h).
|
|
874
|
+
Returns list of alert strings or None.
|
|
875
|
+
Uses 1h cache to avoid excessive API calls.
|
|
876
|
+
"""
|
|
877
|
+
# Check cache first
|
|
878
|
+
cached = _get_cached("breaking_news")
|
|
879
|
+
if cached is not None:
|
|
880
|
+
return cached
|
|
881
|
+
|
|
882
|
+
alerts = []
|
|
883
|
+
|
|
884
|
+
try:
|
|
885
|
+
import feedparser
|
|
886
|
+
from datetime import datetime, timezone, timedelta
|
|
887
|
+
|
|
888
|
+
if verbose:
|
|
889
|
+
print("Checking breaking news (Google News RSS)...", file=sys.stderr)
|
|
890
|
+
|
|
891
|
+
# Google News RSS feeds for finance/business
|
|
892
|
+
rss_urls = [
|
|
893
|
+
"https://news.google.com/rss/search?q=stock+market+when:24h&hl=en-US&gl=US&ceid=US:en",
|
|
894
|
+
"https://news.google.com/rss/search?q=economy+crisis+when:24h&hl=en-US&gl=US&ceid=US:en",
|
|
895
|
+
]
|
|
896
|
+
|
|
897
|
+
now = datetime.now(timezone.utc)
|
|
898
|
+
cutoff_time = now - timedelta(hours=24)
|
|
899
|
+
|
|
900
|
+
for url in rss_urls:
|
|
901
|
+
try:
|
|
902
|
+
feed = feedparser.parse(url)
|
|
903
|
+
|
|
904
|
+
for entry in feed.entries[:20]: # Check top 20 headlines
|
|
905
|
+
# Parse publication date
|
|
906
|
+
pub_date = None
|
|
907
|
+
if hasattr(entry, "published_parsed") and entry.published_parsed:
|
|
908
|
+
pub_date = datetime(*entry.published_parsed[:6], tzinfo=timezone.utc)
|
|
909
|
+
|
|
910
|
+
# Skip if older than 24h
|
|
911
|
+
if pub_date and pub_date < cutoff_time:
|
|
912
|
+
continue
|
|
913
|
+
|
|
914
|
+
title = entry.get("title", "").lower()
|
|
915
|
+
summary = entry.get("summary", "").lower()
|
|
916
|
+
text = f"{title} {summary}"
|
|
917
|
+
|
|
918
|
+
# Check for crisis keywords
|
|
919
|
+
for category, keywords in CRISIS_KEYWORDS.items():
|
|
920
|
+
for keyword in keywords:
|
|
921
|
+
if keyword in text:
|
|
922
|
+
alert_text = entry.get("title", "Unknown alert")
|
|
923
|
+
hours_ago = int((now - pub_date).total_seconds() / 3600) if pub_date else None
|
|
924
|
+
time_str = f"{hours_ago}h ago" if hours_ago is not None else "recent"
|
|
925
|
+
|
|
926
|
+
alert = f"{alert_text} ({time_str})"
|
|
927
|
+
if alert not in alerts: # Deduplicate
|
|
928
|
+
alerts.append(alert)
|
|
929
|
+
if verbose:
|
|
930
|
+
print(f" ⚠️ Alert: {alert}", file=sys.stderr)
|
|
931
|
+
break
|
|
932
|
+
if len(alerts) >= 3: # Limit to 3 alerts
|
|
933
|
+
break
|
|
934
|
+
|
|
935
|
+
if len(alerts) >= 3:
|
|
936
|
+
break
|
|
937
|
+
|
|
938
|
+
except Exception as e:
|
|
939
|
+
if verbose:
|
|
940
|
+
print(f" Failed to fetch {url}: {e}", file=sys.stderr)
|
|
941
|
+
continue
|
|
942
|
+
|
|
943
|
+
# Cache results (even if empty) for 1 hour
|
|
944
|
+
result = alerts if alerts else None
|
|
945
|
+
_set_cache("breaking_news", result)
|
|
946
|
+
return result
|
|
947
|
+
|
|
948
|
+
except Exception as e:
|
|
949
|
+
if verbose:
|
|
950
|
+
print(f" Breaking news check failed: {e}", file=sys.stderr)
|
|
951
|
+
return None
|
|
952
|
+
|
|
953
|
+
|
|
954
|
+
def check_sector_geopolitical_risk(
|
|
955
|
+
ticker: str,
|
|
956
|
+
sector: str | None,
|
|
957
|
+
breaking_news: list[str] | None,
|
|
958
|
+
verbose: bool = False
|
|
959
|
+
) -> tuple[str | None, float]:
|
|
960
|
+
"""
|
|
961
|
+
Check if ticker is exposed to geopolitical risks based on breaking news.
|
|
962
|
+
Returns (warning_message, confidence_penalty).
|
|
963
|
+
|
|
964
|
+
Args:
|
|
965
|
+
ticker: Stock ticker symbol
|
|
966
|
+
sector: Stock sector (from yfinance)
|
|
967
|
+
breaking_news: List of breaking news alerts
|
|
968
|
+
verbose: Print debug info
|
|
969
|
+
|
|
970
|
+
Returns:
|
|
971
|
+
(warning_message, confidence_penalty) where:
|
|
972
|
+
- warning_message: None or string like "⚠️ SECTOR RISK: Taiwan tensions affect semiconductors"
|
|
973
|
+
- confidence_penalty: 0.0 (no risk) to 0.5 (high risk)
|
|
974
|
+
"""
|
|
975
|
+
if not breaking_news:
|
|
976
|
+
return None, 0.0
|
|
977
|
+
|
|
978
|
+
# Combine all breaking news into single text for keyword matching
|
|
979
|
+
news_text = " ".join(breaking_news).lower()
|
|
980
|
+
|
|
981
|
+
# Check each geopolitical event
|
|
982
|
+
for event_name, event_data in GEOPOLITICAL_RISK_MAP.items():
|
|
983
|
+
# Check if any keywords from this event appear in breaking news
|
|
984
|
+
keywords_found = []
|
|
985
|
+
for keyword in event_data["keywords"]:
|
|
986
|
+
if keyword in news_text:
|
|
987
|
+
keywords_found.append(keyword)
|
|
988
|
+
|
|
989
|
+
if not keywords_found:
|
|
990
|
+
continue
|
|
991
|
+
|
|
992
|
+
# Check if ticker is in affected list
|
|
993
|
+
if ticker in event_data["affected_tickers"]:
|
|
994
|
+
# Direct ticker exposure
|
|
995
|
+
warning = f"⚠️ SECTOR RISK: {event_data['impact']} (detected: {', '.join(keywords_found)})"
|
|
996
|
+
penalty = 0.3 # Reduce BUY confidence by 30%
|
|
997
|
+
|
|
998
|
+
if verbose:
|
|
999
|
+
print(f" Geopolitical risk detected: {event_name} affects {ticker}", file=sys.stderr)
|
|
1000
|
+
|
|
1001
|
+
return warning, penalty
|
|
1002
|
+
|
|
1003
|
+
# Check if sector is affected (even if ticker not in list)
|
|
1004
|
+
if sector and sector in event_data["sectors"]:
|
|
1005
|
+
# Sector exposure (weaker signal)
|
|
1006
|
+
warning = f"⚠️ SECTOR RISK: {sector} sector exposed to {event_data['impact']}"
|
|
1007
|
+
penalty = 0.15 # Reduce BUY confidence by 15%
|
|
1008
|
+
|
|
1009
|
+
if verbose:
|
|
1010
|
+
print(f" Sector risk detected: {event_name} affects {sector} sector", file=sys.stderr)
|
|
1011
|
+
|
|
1012
|
+
return warning, penalty
|
|
1013
|
+
|
|
1014
|
+
return None, 0.0
|
|
1015
|
+
|
|
1016
|
+
|
|
1017
|
+
def analyze_sector_performance(data: StockData, verbose: bool = False) -> SectorComparison | None:
|
|
1018
|
+
"""Compare stock performance to its sector."""
|
|
1019
|
+
try:
|
|
1020
|
+
sector = data.info.get("sector")
|
|
1021
|
+
industry = data.info.get("industry")
|
|
1022
|
+
|
|
1023
|
+
if not sector:
|
|
1024
|
+
return None
|
|
1025
|
+
|
|
1026
|
+
sector_etf_ticker = get_sector_etf_ticker(sector)
|
|
1027
|
+
|
|
1028
|
+
if not sector_etf_ticker:
|
|
1029
|
+
if verbose:
|
|
1030
|
+
print(f"No sector ETF mapping for {sector}", file=sys.stderr)
|
|
1031
|
+
return None
|
|
1032
|
+
|
|
1033
|
+
if verbose:
|
|
1034
|
+
print(f"Comparing to sector ETF: {sector_etf_ticker}", file=sys.stderr)
|
|
1035
|
+
|
|
1036
|
+
# Fetch sector ETF data
|
|
1037
|
+
sector_etf = yf.Ticker(sector_etf_ticker)
|
|
1038
|
+
sector_hist = sector_etf.history(period="3mo")
|
|
1039
|
+
|
|
1040
|
+
if sector_hist.empty or data.price_history is None or data.price_history.empty:
|
|
1041
|
+
return None
|
|
1042
|
+
|
|
1043
|
+
# Calculate 1-month returns
|
|
1044
|
+
stock_1m_ago = data.price_history["Close"].iloc[-min(22, len(data.price_history))]
|
|
1045
|
+
stock_current = data.price_history["Close"].iloc[-1]
|
|
1046
|
+
stock_return_1m = ((stock_current - stock_1m_ago) / stock_1m_ago) * 100
|
|
1047
|
+
|
|
1048
|
+
sector_1m_ago = sector_hist["Close"].iloc[-min(22, len(sector_hist))]
|
|
1049
|
+
sector_current = sector_hist["Close"].iloc[-1]
|
|
1050
|
+
sector_return_1m = ((sector_current - sector_1m_ago) / sector_1m_ago) * 100
|
|
1051
|
+
|
|
1052
|
+
# Calculate relative strength
|
|
1053
|
+
relative_strength = stock_return_1m / sector_return_1m if sector_return_1m != 0 else 1.0
|
|
1054
|
+
|
|
1055
|
+
# Sector 10-day trend
|
|
1056
|
+
sector_10d_ago = sector_hist["Close"].iloc[-min(10, len(sector_hist))]
|
|
1057
|
+
sector_trend_10d = ((sector_current - sector_10d_ago) / sector_10d_ago) * 100
|
|
1058
|
+
|
|
1059
|
+
if sector_trend_10d > 5:
|
|
1060
|
+
sector_trend = "strong uptrend"
|
|
1061
|
+
elif sector_trend_10d > 2:
|
|
1062
|
+
sector_trend = "uptrend"
|
|
1063
|
+
elif sector_trend_10d < -5:
|
|
1064
|
+
sector_trend = "downtrend"
|
|
1065
|
+
elif sector_trend_10d < -2:
|
|
1066
|
+
sector_trend = "weak"
|
|
1067
|
+
else:
|
|
1068
|
+
sector_trend = "neutral"
|
|
1069
|
+
|
|
1070
|
+
# Calculate score
|
|
1071
|
+
score = 0.0
|
|
1072
|
+
|
|
1073
|
+
# Relative performance score
|
|
1074
|
+
if relative_strength > 1.05: # Outperforming by >5%
|
|
1075
|
+
score += 0.3
|
|
1076
|
+
elif relative_strength < 0.95: # Underperforming by >5%
|
|
1077
|
+
score -= 0.3
|
|
1078
|
+
|
|
1079
|
+
# Sector trend score
|
|
1080
|
+
if sector_trend_10d > 5:
|
|
1081
|
+
score += 0.2
|
|
1082
|
+
elif sector_trend_10d < -5:
|
|
1083
|
+
score -= 0.2
|
|
1084
|
+
|
|
1085
|
+
explanation = f"{sector} sector {sector_trend} ({sector_return_1m:+.1f}% 1m), stock {stock_return_1m:+.1f}% vs sector"
|
|
1086
|
+
|
|
1087
|
+
return SectorComparison(
|
|
1088
|
+
sector_name=sector,
|
|
1089
|
+
industry_name=industry or "Unknown",
|
|
1090
|
+
stock_return_1m=stock_return_1m,
|
|
1091
|
+
sector_return_1m=sector_return_1m,
|
|
1092
|
+
relative_strength=relative_strength,
|
|
1093
|
+
sector_trend=sector_trend,
|
|
1094
|
+
score=score,
|
|
1095
|
+
explanation=explanation,
|
|
1096
|
+
)
|
|
1097
|
+
|
|
1098
|
+
except Exception as e:
|
|
1099
|
+
if verbose:
|
|
1100
|
+
print(f"Error analyzing sector performance: {e}", file=sys.stderr)
|
|
1101
|
+
return None
|
|
1102
|
+
|
|
1103
|
+
|
|
1104
|
+
def analyze_earnings_timing(data: StockData) -> EarningsTiming | None:
|
|
1105
|
+
"""Check earnings timing and flag pre/post-earnings periods."""
|
|
1106
|
+
try:
|
|
1107
|
+
from datetime import datetime, timedelta
|
|
1108
|
+
|
|
1109
|
+
if data.earnings_history is None or data.earnings_history.empty:
|
|
1110
|
+
return None
|
|
1111
|
+
|
|
1112
|
+
current_date = datetime.now()
|
|
1113
|
+
earnings_dates = data.earnings_history.sort_index(ascending=False)
|
|
1114
|
+
|
|
1115
|
+
# Find next and last earnings dates
|
|
1116
|
+
next_earnings_date = None
|
|
1117
|
+
last_earnings_date = None
|
|
1118
|
+
|
|
1119
|
+
for earnings_date in earnings_dates.index:
|
|
1120
|
+
earnings_dt = pd.Timestamp(earnings_date).to_pydatetime()
|
|
1121
|
+
|
|
1122
|
+
if earnings_dt > current_date and next_earnings_date is None:
|
|
1123
|
+
next_earnings_date = earnings_dt
|
|
1124
|
+
elif earnings_dt <= current_date and last_earnings_date is None:
|
|
1125
|
+
last_earnings_date = earnings_dt
|
|
1126
|
+
break
|
|
1127
|
+
|
|
1128
|
+
# Calculate days until/since earnings
|
|
1129
|
+
days_until_earnings = None
|
|
1130
|
+
days_since_earnings = None
|
|
1131
|
+
|
|
1132
|
+
if next_earnings_date:
|
|
1133
|
+
days_until_earnings = (next_earnings_date - current_date).days
|
|
1134
|
+
|
|
1135
|
+
if last_earnings_date:
|
|
1136
|
+
days_since_earnings = (current_date - last_earnings_date).days
|
|
1137
|
+
|
|
1138
|
+
# Determine timing flag
|
|
1139
|
+
timing_flag = "safe"
|
|
1140
|
+
confidence_adjustment = 0.0
|
|
1141
|
+
caveats = []
|
|
1142
|
+
|
|
1143
|
+
# Pre-earnings check (< 14 days)
|
|
1144
|
+
if days_until_earnings is not None and days_until_earnings <= 14:
|
|
1145
|
+
timing_flag = "pre_earnings"
|
|
1146
|
+
confidence_adjustment = -0.3
|
|
1147
|
+
caveats.append(f"Earnings in {days_until_earnings} days - high volatility expected")
|
|
1148
|
+
|
|
1149
|
+
# Post-earnings check (< 5 days)
|
|
1150
|
+
price_change_5d = None
|
|
1151
|
+
if days_since_earnings is not None and days_since_earnings <= 5:
|
|
1152
|
+
# Calculate 5-day price change
|
|
1153
|
+
if data.price_history is not None and len(data.price_history) >= 5:
|
|
1154
|
+
price_5d_ago = data.price_history["Close"].iloc[-5]
|
|
1155
|
+
price_current = data.price_history["Close"].iloc[-1]
|
|
1156
|
+
price_change_5d = ((price_current - price_5d_ago) / price_5d_ago) * 100
|
|
1157
|
+
|
|
1158
|
+
if price_change_5d > 15:
|
|
1159
|
+
timing_flag = "post_earnings"
|
|
1160
|
+
confidence_adjustment = -0.2
|
|
1161
|
+
caveats.append(f"Up {price_change_5d:.1f}% in 5 days - gains may be priced in")
|
|
1162
|
+
|
|
1163
|
+
return EarningsTiming(
|
|
1164
|
+
days_until_earnings=days_until_earnings,
|
|
1165
|
+
days_since_earnings=days_since_earnings,
|
|
1166
|
+
next_earnings_date=next_earnings_date.strftime("%Y-%m-%d") if next_earnings_date else None,
|
|
1167
|
+
last_earnings_date=last_earnings_date.strftime("%Y-%m-%d") if last_earnings_date else None,
|
|
1168
|
+
timing_flag=timing_flag,
|
|
1169
|
+
price_change_5d=price_change_5d,
|
|
1170
|
+
confidence_adjustment=confidence_adjustment,
|
|
1171
|
+
caveats=caveats,
|
|
1172
|
+
)
|
|
1173
|
+
|
|
1174
|
+
except Exception:
|
|
1175
|
+
return None
|
|
1176
|
+
|
|
1177
|
+
|
|
1178
|
+
def calculate_rsi(prices: pd.Series, period: int = 14) -> float | None:
|
|
1179
|
+
"""Calculate RSI (Relative Strength Index)."""
|
|
1180
|
+
try:
|
|
1181
|
+
if len(prices) < period + 1:
|
|
1182
|
+
return None
|
|
1183
|
+
|
|
1184
|
+
# Calculate price changes
|
|
1185
|
+
delta = prices.diff()
|
|
1186
|
+
|
|
1187
|
+
# Separate gains and losses
|
|
1188
|
+
gains = delta.where(delta > 0, 0)
|
|
1189
|
+
losses = -delta.where(delta < 0, 0)
|
|
1190
|
+
|
|
1191
|
+
# Calculate average gains and losses
|
|
1192
|
+
avg_gain = gains.rolling(window=period).mean()
|
|
1193
|
+
avg_loss = losses.rolling(window=period).mean()
|
|
1194
|
+
|
|
1195
|
+
# Calculate RS
|
|
1196
|
+
rs = avg_gain / avg_loss
|
|
1197
|
+
|
|
1198
|
+
# Calculate RSI
|
|
1199
|
+
rsi = 100 - (100 / (1 + rs))
|
|
1200
|
+
|
|
1201
|
+
return float(rsi.iloc[-1])
|
|
1202
|
+
|
|
1203
|
+
except Exception:
|
|
1204
|
+
return None
|
|
1205
|
+
|
|
1206
|
+
|
|
1207
|
+
def analyze_momentum(data: StockData) -> MomentumAnalysis | None:
|
|
1208
|
+
"""Analyze momentum indicators (RSI, 52w range, volume, relative strength)."""
|
|
1209
|
+
try:
|
|
1210
|
+
if data.price_history is None or data.price_history.empty:
|
|
1211
|
+
return None
|
|
1212
|
+
|
|
1213
|
+
# Calculate RSI
|
|
1214
|
+
rsi_14d = calculate_rsi(data.price_history["Close"], period=14)
|
|
1215
|
+
|
|
1216
|
+
if rsi_14d:
|
|
1217
|
+
if rsi_14d > 70:
|
|
1218
|
+
rsi_status = "overbought"
|
|
1219
|
+
elif rsi_14d < 30:
|
|
1220
|
+
rsi_status = "oversold"
|
|
1221
|
+
else:
|
|
1222
|
+
rsi_status = "neutral"
|
|
1223
|
+
else:
|
|
1224
|
+
rsi_status = "unknown"
|
|
1225
|
+
|
|
1226
|
+
# Get 52-week high/low
|
|
1227
|
+
high_52w = data.info.get("fiftyTwoWeekHigh")
|
|
1228
|
+
low_52w = data.info.get("fiftyTwoWeekLow")
|
|
1229
|
+
current_price = data.info.get("regularMarketPrice") or data.info.get("currentPrice")
|
|
1230
|
+
|
|
1231
|
+
price_vs_52w_low = None
|
|
1232
|
+
price_vs_52w_high = None
|
|
1233
|
+
near_52w_high = False
|
|
1234
|
+
near_52w_low = False
|
|
1235
|
+
|
|
1236
|
+
if high_52w and low_52w and current_price:
|
|
1237
|
+
price_range = high_52w - low_52w
|
|
1238
|
+
if price_range > 0:
|
|
1239
|
+
price_vs_52w_low = ((current_price - low_52w) / price_range) * 100
|
|
1240
|
+
price_vs_52w_high = ((high_52w - current_price) / price_range) * 100
|
|
1241
|
+
|
|
1242
|
+
near_52w_high = price_vs_52w_low > 90
|
|
1243
|
+
near_52w_low = price_vs_52w_low < 10
|
|
1244
|
+
|
|
1245
|
+
# Volume analysis
|
|
1246
|
+
volume_ratio = None
|
|
1247
|
+
if "Volume" in data.price_history.columns and len(data.price_history) >= 60:
|
|
1248
|
+
recent_vol = data.price_history["Volume"].iloc[-5:].mean()
|
|
1249
|
+
avg_vol = data.price_history["Volume"].iloc[-60:].mean()
|
|
1250
|
+
volume_ratio = recent_vol / avg_vol if avg_vol > 0 else None
|
|
1251
|
+
|
|
1252
|
+
# Calculate score
|
|
1253
|
+
score = 0.0
|
|
1254
|
+
explanations = []
|
|
1255
|
+
|
|
1256
|
+
if rsi_14d:
|
|
1257
|
+
if rsi_14d > 70:
|
|
1258
|
+
score -= 0.5
|
|
1259
|
+
explanations.append(f"RSI {rsi_14d:.0f} (overbought)")
|
|
1260
|
+
elif rsi_14d < 30:
|
|
1261
|
+
score += 0.5
|
|
1262
|
+
explanations.append(f"RSI {rsi_14d:.0f} (oversold)")
|
|
1263
|
+
|
|
1264
|
+
if near_52w_high:
|
|
1265
|
+
score -= 0.3
|
|
1266
|
+
explanations.append("Near 52w high")
|
|
1267
|
+
elif near_52w_low:
|
|
1268
|
+
score += 0.3
|
|
1269
|
+
explanations.append("Near 52w low")
|
|
1270
|
+
|
|
1271
|
+
if volume_ratio and volume_ratio > 1.5:
|
|
1272
|
+
explanations.append(f"Volume {volume_ratio:.1f}x average")
|
|
1273
|
+
|
|
1274
|
+
explanation = "; ".join(explanations) if explanations else "Momentum indicators neutral"
|
|
1275
|
+
|
|
1276
|
+
return MomentumAnalysis(
|
|
1277
|
+
rsi_14d=rsi_14d,
|
|
1278
|
+
rsi_status=rsi_status,
|
|
1279
|
+
price_vs_52w_low=price_vs_52w_low,
|
|
1280
|
+
price_vs_52w_high=price_vs_52w_high,
|
|
1281
|
+
near_52w_high=near_52w_high,
|
|
1282
|
+
near_52w_low=near_52w_low,
|
|
1283
|
+
volume_ratio=volume_ratio,
|
|
1284
|
+
relative_strength_vs_sector=None, # Could be enhanced with sector comparison
|
|
1285
|
+
score=score,
|
|
1286
|
+
explanation=explanation,
|
|
1287
|
+
)
|
|
1288
|
+
|
|
1289
|
+
except Exception:
|
|
1290
|
+
return None
|
|
1291
|
+
|
|
1292
|
+
|
|
1293
|
+
# ============================================================================
|
|
1294
|
+
# Sentiment Analysis Helper Functions
|
|
1295
|
+
# ============================================================================
|
|
1296
|
+
|
|
1297
|
+
# Simple cache for shared indicators (Fear & Greed, VIX)
|
|
1298
|
+
# Format: {key: (value, timestamp)}
|
|
1299
|
+
_SENTIMENT_CACHE = {}
|
|
1300
|
+
_CACHE_TTL_SECONDS = 3600 # 1 hour
|
|
1301
|
+
|
|
1302
|
+
|
|
1303
|
+
def _get_cached(key: str):
|
|
1304
|
+
"""Get cached value if still valid (within TTL)."""
|
|
1305
|
+
if key in _SENTIMENT_CACHE:
|
|
1306
|
+
value, timestamp = _SENTIMENT_CACHE[key]
|
|
1307
|
+
if time.time() - timestamp < _CACHE_TTL_SECONDS:
|
|
1308
|
+
return value
|
|
1309
|
+
return None
|
|
1310
|
+
|
|
1311
|
+
|
|
1312
|
+
def _set_cache(key: str, value):
|
|
1313
|
+
"""Set cached value with current timestamp."""
|
|
1314
|
+
_SENTIMENT_CACHE[key] = (value, time.time())
|
|
1315
|
+
|
|
1316
|
+
|
|
1317
|
+
async def get_fear_greed_index() -> tuple[float, int | None, str | None] | None:
|
|
1318
|
+
"""
|
|
1319
|
+
Fetch CNN Fear & Greed Index (contrarian indicator) with 1h cache.
|
|
1320
|
+
Returns: (score, value, status) or None on failure.
|
|
1321
|
+
"""
|
|
1322
|
+
# Check cache first
|
|
1323
|
+
cached = _get_cached("fear_greed")
|
|
1324
|
+
if cached is not None:
|
|
1325
|
+
return cached
|
|
1326
|
+
|
|
1327
|
+
def _fetch():
|
|
1328
|
+
try:
|
|
1329
|
+
from fear_and_greed import get as get_fear_greed
|
|
1330
|
+
result = get_fear_greed()
|
|
1331
|
+
return result
|
|
1332
|
+
except Exception:
|
|
1333
|
+
return None
|
|
1334
|
+
|
|
1335
|
+
try:
|
|
1336
|
+
result = await asyncio.to_thread(_fetch)
|
|
1337
|
+
if result is None:
|
|
1338
|
+
return None
|
|
1339
|
+
|
|
1340
|
+
value = result.value # 0-100
|
|
1341
|
+
status = result.description # "Extreme Fear", "Fear", etc.
|
|
1342
|
+
|
|
1343
|
+
# Contrarian scoring
|
|
1344
|
+
if value <= 25:
|
|
1345
|
+
score = 0.5 # Extreme fear = buy opportunity
|
|
1346
|
+
elif value <= 45:
|
|
1347
|
+
score = 0.2 # Fear = mild buy signal
|
|
1348
|
+
elif value <= 55:
|
|
1349
|
+
score = 0.0 # Neutral
|
|
1350
|
+
elif value <= 75:
|
|
1351
|
+
score = -0.2 # Greed = caution
|
|
1352
|
+
else:
|
|
1353
|
+
score = -0.5 # Extreme greed = warning
|
|
1354
|
+
|
|
1355
|
+
result_tuple = (score, value, status)
|
|
1356
|
+
_set_cache("fear_greed", result_tuple)
|
|
1357
|
+
return result_tuple
|
|
1358
|
+
except Exception:
|
|
1359
|
+
return None
|
|
1360
|
+
|
|
1361
|
+
|
|
1362
|
+
async def get_short_interest(data: StockData) -> tuple[float, float | None, float | None] | None:
|
|
1363
|
+
"""
|
|
1364
|
+
Analyze short interest (from yfinance).
|
|
1365
|
+
Returns: (score, short_interest_pct, days_to_cover) or None.
|
|
1366
|
+
"""
|
|
1367
|
+
# This is already synchronous data access (no API call), but make it async for consistency
|
|
1368
|
+
try:
|
|
1369
|
+
short_pct = data.info.get("shortPercentOfFloat")
|
|
1370
|
+
if short_pct is None:
|
|
1371
|
+
return None
|
|
1372
|
+
|
|
1373
|
+
short_pct_float = float(short_pct) * 100 # Convert to percentage
|
|
1374
|
+
|
|
1375
|
+
# Estimate days to cover (simplified - actual calculation needs volume data)
|
|
1376
|
+
short_ratio = data.info.get("shortRatio") # Days to cover
|
|
1377
|
+
days_to_cover = float(short_ratio) if short_ratio else None
|
|
1378
|
+
|
|
1379
|
+
# Scoring logic
|
|
1380
|
+
if short_pct_float > 20:
|
|
1381
|
+
if days_to_cover and days_to_cover > 10:
|
|
1382
|
+
score = 0.4 # High short interest + high days to cover = squeeze potential
|
|
1383
|
+
else:
|
|
1384
|
+
score = -0.3 # High short interest but justified
|
|
1385
|
+
elif short_pct_float < 5:
|
|
1386
|
+
score = 0.2 # Low short interest = bullish sentiment
|
|
1387
|
+
else:
|
|
1388
|
+
score = 0.0 # Normal range
|
|
1389
|
+
|
|
1390
|
+
return (score, short_pct_float, days_to_cover)
|
|
1391
|
+
except Exception:
|
|
1392
|
+
return None
|
|
1393
|
+
|
|
1394
|
+
|
|
1395
|
+
async def get_vix_term_structure() -> tuple[float, str | None, float | None] | None:
|
|
1396
|
+
"""
|
|
1397
|
+
Analyze VIX futures term structure (contango vs backwardation) with 1h cache.
|
|
1398
|
+
Returns: (score, structure, slope) or None.
|
|
1399
|
+
"""
|
|
1400
|
+
# Check cache first
|
|
1401
|
+
cached = _get_cached("vix_structure")
|
|
1402
|
+
if cached is not None:
|
|
1403
|
+
return cached
|
|
1404
|
+
|
|
1405
|
+
def _fetch():
|
|
1406
|
+
try:
|
|
1407
|
+
import yfinance as yf
|
|
1408
|
+
vix = yf.Ticker("^VIX")
|
|
1409
|
+
vix_data = vix.history(period="5d")
|
|
1410
|
+
if vix_data.empty:
|
|
1411
|
+
return None
|
|
1412
|
+
return vix_data["Close"].iloc[-1]
|
|
1413
|
+
except Exception:
|
|
1414
|
+
return None
|
|
1415
|
+
|
|
1416
|
+
try:
|
|
1417
|
+
vix_spot = await asyncio.to_thread(_fetch)
|
|
1418
|
+
if vix_spot is None:
|
|
1419
|
+
return None
|
|
1420
|
+
|
|
1421
|
+
# Simplified: assume normal contango when VIX < 20, backwardation when VIX > 30
|
|
1422
|
+
if vix_spot < 15:
|
|
1423
|
+
structure = "contango"
|
|
1424
|
+
slope = 10.0 # Steep contango
|
|
1425
|
+
score = 0.3 # Complacency/bullish
|
|
1426
|
+
elif vix_spot < 20:
|
|
1427
|
+
structure = "contango"
|
|
1428
|
+
slope = 5.0
|
|
1429
|
+
score = 0.1
|
|
1430
|
+
elif vix_spot > 30:
|
|
1431
|
+
structure = "backwardation"
|
|
1432
|
+
slope = -5.0
|
|
1433
|
+
score = -0.3 # Stress/bearish
|
|
1434
|
+
else:
|
|
1435
|
+
structure = "flat"
|
|
1436
|
+
slope = 0.0
|
|
1437
|
+
score = 0.0
|
|
1438
|
+
|
|
1439
|
+
result_tuple = (score, structure, slope)
|
|
1440
|
+
_set_cache("vix_structure", result_tuple)
|
|
1441
|
+
return result_tuple
|
|
1442
|
+
except Exception:
|
|
1443
|
+
return None
|
|
1444
|
+
|
|
1445
|
+
|
|
1446
|
+
async def get_insider_activity(ticker: str, period_days: int = 90) -> tuple[float, int | None, float | None] | None:
|
|
1447
|
+
"""
|
|
1448
|
+
Analyze insider trading from SEC Form 4 filings using edgartools.
|
|
1449
|
+
Returns: (score, net_shares, net_value_millions) or None.
|
|
1450
|
+
|
|
1451
|
+
Scoring logic:
|
|
1452
|
+
- Strong buying (>100K shares or >$1M): +0.8
|
|
1453
|
+
- Moderate buying (>10K shares or >$0.1M): +0.4
|
|
1454
|
+
- Neutral: 0
|
|
1455
|
+
- Moderate selling: -0.4
|
|
1456
|
+
- Strong selling: -0.8
|
|
1457
|
+
|
|
1458
|
+
Note: SEC EDGAR API requires User-Agent with email.
|
|
1459
|
+
"""
|
|
1460
|
+
def _fetch():
|
|
1461
|
+
try:
|
|
1462
|
+
from edgar import Company, set_identity
|
|
1463
|
+
from datetime import datetime, timedelta
|
|
1464
|
+
|
|
1465
|
+
# Set SEC-required identity
|
|
1466
|
+
set_identity("stock-analysis@clawd.bot")
|
|
1467
|
+
|
|
1468
|
+
# Get company and Form 4 filings
|
|
1469
|
+
company = Company(ticker)
|
|
1470
|
+
filings = company.get_filings(form="4")
|
|
1471
|
+
|
|
1472
|
+
if filings is None or len(filings) == 0:
|
|
1473
|
+
return None
|
|
1474
|
+
|
|
1475
|
+
# Calculate cutoff date
|
|
1476
|
+
cutoff_date = datetime.now() - timedelta(days=period_days)
|
|
1477
|
+
|
|
1478
|
+
# Aggregate transactions
|
|
1479
|
+
total_bought_shares = 0
|
|
1480
|
+
total_sold_shares = 0
|
|
1481
|
+
total_bought_value = 0.0
|
|
1482
|
+
total_sold_value = 0.0
|
|
1483
|
+
|
|
1484
|
+
# Process recent filings (iterate, don't slice due to pyarrow compatibility)
|
|
1485
|
+
count = 0
|
|
1486
|
+
for filing in filings:
|
|
1487
|
+
if count >= 50:
|
|
1488
|
+
break
|
|
1489
|
+
count += 1
|
|
1490
|
+
|
|
1491
|
+
try:
|
|
1492
|
+
# Check filing date
|
|
1493
|
+
filing_date = filing.filing_date
|
|
1494
|
+
if hasattr(filing_date, 'to_pydatetime'):
|
|
1495
|
+
filing_date = filing_date.to_pydatetime()
|
|
1496
|
+
elif isinstance(filing_date, str):
|
|
1497
|
+
filing_date = datetime.strptime(filing_date, "%Y-%m-%d")
|
|
1498
|
+
|
|
1499
|
+
# Convert date object to datetime for comparison
|
|
1500
|
+
if hasattr(filing_date, 'year') and not hasattr(filing_date, 'hour'):
|
|
1501
|
+
filing_date = datetime.combine(filing_date, datetime.min.time())
|
|
1502
|
+
|
|
1503
|
+
if filing_date < cutoff_date:
|
|
1504
|
+
continue
|
|
1505
|
+
|
|
1506
|
+
# Get Form 4 object
|
|
1507
|
+
form4 = filing.obj()
|
|
1508
|
+
if form4 is None:
|
|
1509
|
+
continue
|
|
1510
|
+
|
|
1511
|
+
# Process purchases (edgartools returns DataFrames)
|
|
1512
|
+
if hasattr(form4, 'common_stock_purchases'):
|
|
1513
|
+
purchases = form4.common_stock_purchases
|
|
1514
|
+
if isinstance(purchases, pd.DataFrame) and not purchases.empty:
|
|
1515
|
+
if 'Shares' in purchases.columns:
|
|
1516
|
+
total_bought_shares += int(purchases['Shares'].sum())
|
|
1517
|
+
if 'Price' in purchases.columns and 'Shares' in purchases.columns:
|
|
1518
|
+
total_bought_value += float((purchases['Shares'] * purchases['Price']).sum())
|
|
1519
|
+
|
|
1520
|
+
# Process sales
|
|
1521
|
+
if hasattr(form4, 'common_stock_sales'):
|
|
1522
|
+
sales = form4.common_stock_sales
|
|
1523
|
+
if isinstance(sales, pd.DataFrame) and not sales.empty:
|
|
1524
|
+
if 'Shares' in sales.columns:
|
|
1525
|
+
total_sold_shares += int(sales['Shares'].sum())
|
|
1526
|
+
if 'Price' in sales.columns and 'Shares' in sales.columns:
|
|
1527
|
+
total_sold_value += float((sales['Shares'] * sales['Price']).sum())
|
|
1528
|
+
|
|
1529
|
+
except Exception:
|
|
1530
|
+
continue
|
|
1531
|
+
|
|
1532
|
+
# Calculate net values
|
|
1533
|
+
net_shares = total_bought_shares - total_sold_shares
|
|
1534
|
+
net_value = (total_bought_value - total_sold_value) / 1_000_000 # Millions
|
|
1535
|
+
|
|
1536
|
+
# Apply scoring logic
|
|
1537
|
+
if net_shares > 100_000 or net_value > 1.0:
|
|
1538
|
+
score = 0.8 # Strong buying
|
|
1539
|
+
elif net_shares > 10_000 or net_value > 0.1:
|
|
1540
|
+
score = 0.4 # Moderate buying
|
|
1541
|
+
elif net_shares < -100_000 or net_value < -1.0:
|
|
1542
|
+
score = -0.8 # Strong selling
|
|
1543
|
+
elif net_shares < -10_000 or net_value < -0.1:
|
|
1544
|
+
score = -0.4 # Moderate selling
|
|
1545
|
+
else:
|
|
1546
|
+
score = 0.0 # Neutral
|
|
1547
|
+
|
|
1548
|
+
return (score, net_shares, net_value)
|
|
1549
|
+
|
|
1550
|
+
except ImportError:
|
|
1551
|
+
# edgartools not installed
|
|
1552
|
+
return None
|
|
1553
|
+
except Exception:
|
|
1554
|
+
return None
|
|
1555
|
+
|
|
1556
|
+
try:
|
|
1557
|
+
result = await asyncio.to_thread(_fetch)
|
|
1558
|
+
return result
|
|
1559
|
+
except Exception:
|
|
1560
|
+
return None
|
|
1561
|
+
|
|
1562
|
+
|
|
1563
|
+
async def get_put_call_ratio(data: StockData) -> tuple[float, float | None, int | None, int | None] | None:
|
|
1564
|
+
"""
|
|
1565
|
+
Calculate put/call ratio from options chain (contrarian indicator).
|
|
1566
|
+
Returns: (score, ratio, put_volume, call_volume) or None.
|
|
1567
|
+
"""
|
|
1568
|
+
def _fetch():
|
|
1569
|
+
try:
|
|
1570
|
+
if data.ticker_obj is None:
|
|
1571
|
+
return None
|
|
1572
|
+
|
|
1573
|
+
# Get options chain for nearest expiration
|
|
1574
|
+
expirations = data.ticker_obj.options
|
|
1575
|
+
if not expirations or len(expirations) == 0:
|
|
1576
|
+
return None
|
|
1577
|
+
|
|
1578
|
+
nearest_exp = expirations[0]
|
|
1579
|
+
opt_chain = data.ticker_obj.option_chain(nearest_exp)
|
|
1580
|
+
|
|
1581
|
+
# Calculate total put and call volume
|
|
1582
|
+
put_volume = opt_chain.puts["volume"].sum() if "volume" in opt_chain.puts.columns else 0
|
|
1583
|
+
call_volume = opt_chain.calls["volume"].sum() if "volume" in opt_chain.calls.columns else 0
|
|
1584
|
+
|
|
1585
|
+
if call_volume == 0 or put_volume == 0:
|
|
1586
|
+
return None
|
|
1587
|
+
|
|
1588
|
+
ratio = put_volume / call_volume
|
|
1589
|
+
return (ratio, int(put_volume), int(call_volume))
|
|
1590
|
+
except Exception:
|
|
1591
|
+
return None
|
|
1592
|
+
|
|
1593
|
+
try:
|
|
1594
|
+
result = await asyncio.to_thread(_fetch)
|
|
1595
|
+
if result is None:
|
|
1596
|
+
return None
|
|
1597
|
+
|
|
1598
|
+
ratio, put_volume, call_volume = result
|
|
1599
|
+
|
|
1600
|
+
# Contrarian scoring
|
|
1601
|
+
if ratio > 1.5:
|
|
1602
|
+
score = 0.3 # Excessive fear = bullish
|
|
1603
|
+
elif ratio > 1.0:
|
|
1604
|
+
score = 0.1 # Mild fear
|
|
1605
|
+
elif ratio > 0.7:
|
|
1606
|
+
score = -0.1 # Normal
|
|
1607
|
+
else:
|
|
1608
|
+
score = -0.3 # Complacency = bearish
|
|
1609
|
+
|
|
1610
|
+
return (score, ratio, put_volume, call_volume)
|
|
1611
|
+
except Exception:
|
|
1612
|
+
return None
|
|
1613
|
+
|
|
1614
|
+
|
|
1615
|
+
async def analyze_sentiment(data: StockData, verbose: bool = False, skip_insider: bool = False) -> SentimentAnalysis | None:
|
|
1616
|
+
"""
|
|
1617
|
+
Analyze market sentiment using 5 sub-indicators in parallel.
|
|
1618
|
+
Requires at least 2 of 5 indicators for valid sentiment.
|
|
1619
|
+
Returns overall sentiment score (-1.0 to +1.0) with sub-metrics.
|
|
1620
|
+
"""
|
|
1621
|
+
scores = []
|
|
1622
|
+
explanations = []
|
|
1623
|
+
warnings = []
|
|
1624
|
+
|
|
1625
|
+
# Initialize all raw data fields
|
|
1626
|
+
fear_greed_score = None
|
|
1627
|
+
fear_greed_value = None
|
|
1628
|
+
fear_greed_status = None
|
|
1629
|
+
|
|
1630
|
+
short_interest_score = None
|
|
1631
|
+
short_interest_pct = None
|
|
1632
|
+
days_to_cover = None
|
|
1633
|
+
|
|
1634
|
+
vix_structure_score = None
|
|
1635
|
+
vix_structure = None
|
|
1636
|
+
vix_slope = None
|
|
1637
|
+
|
|
1638
|
+
insider_activity_score = None
|
|
1639
|
+
insider_net_shares = None
|
|
1640
|
+
insider_net_value = None
|
|
1641
|
+
|
|
1642
|
+
put_call_score = None
|
|
1643
|
+
put_call_ratio = None
|
|
1644
|
+
put_volume = None
|
|
1645
|
+
call_volume = None
|
|
1646
|
+
|
|
1647
|
+
# Fetch all 5 indicators in parallel with 10s timeout per indicator
|
|
1648
|
+
# (or 4 if skip_insider=True for faster analysis)
|
|
1649
|
+
try:
|
|
1650
|
+
tasks = [
|
|
1651
|
+
asyncio.wait_for(get_fear_greed_index(), timeout=10),
|
|
1652
|
+
asyncio.wait_for(get_short_interest(data), timeout=10),
|
|
1653
|
+
asyncio.wait_for(get_vix_term_structure(), timeout=10),
|
|
1654
|
+
]
|
|
1655
|
+
|
|
1656
|
+
if skip_insider:
|
|
1657
|
+
tasks.append(asyncio.sleep(0)) # Placeholder - returns None
|
|
1658
|
+
if verbose:
|
|
1659
|
+
print(" Skipping insider trading analysis (--no-insider)", file=sys.stderr)
|
|
1660
|
+
else:
|
|
1661
|
+
tasks.append(asyncio.wait_for(get_insider_activity(data.ticker, period_days=90), timeout=10))
|
|
1662
|
+
|
|
1663
|
+
tasks.append(asyncio.wait_for(get_put_call_ratio(data), timeout=10))
|
|
1664
|
+
|
|
1665
|
+
results = await asyncio.gather(*tasks, return_exceptions=True)
|
|
1666
|
+
|
|
1667
|
+
# Process Fear & Greed Index
|
|
1668
|
+
fear_greed_result = results[0]
|
|
1669
|
+
if isinstance(fear_greed_result, tuple) and fear_greed_result is not None:
|
|
1670
|
+
fear_greed_score, fear_greed_value, fear_greed_status = fear_greed_result
|
|
1671
|
+
scores.append(fear_greed_score)
|
|
1672
|
+
explanations.append(f"{fear_greed_status} ({fear_greed_value})")
|
|
1673
|
+
if verbose:
|
|
1674
|
+
print(f" Fear & Greed: {fear_greed_status} ({fear_greed_value}) → score {fear_greed_score:+.2f}", file=sys.stderr)
|
|
1675
|
+
elif verbose and isinstance(fear_greed_result, Exception):
|
|
1676
|
+
print(f" Fear & Greed: Failed ({fear_greed_result})", file=sys.stderr)
|
|
1677
|
+
|
|
1678
|
+
# Process Short Interest
|
|
1679
|
+
short_interest_result = results[1]
|
|
1680
|
+
if isinstance(short_interest_result, tuple) and short_interest_result is not None:
|
|
1681
|
+
short_interest_score, short_interest_pct, days_to_cover = short_interest_result
|
|
1682
|
+
scores.append(short_interest_score)
|
|
1683
|
+
if days_to_cover:
|
|
1684
|
+
explanations.append(f"Short interest {short_interest_pct:.1f}% (days to cover: {days_to_cover:.1f})")
|
|
1685
|
+
else:
|
|
1686
|
+
explanations.append(f"Short interest {short_interest_pct:.1f}%")
|
|
1687
|
+
warnings.append("Short interest data typically ~2 weeks old (FINRA lag)")
|
|
1688
|
+
if verbose:
|
|
1689
|
+
print(f" Short Interest: {short_interest_pct:.1f}% → score {short_interest_score:+.2f}", file=sys.stderr)
|
|
1690
|
+
elif verbose and isinstance(short_interest_result, Exception):
|
|
1691
|
+
print(f" Short Interest: Failed ({short_interest_result})", file=sys.stderr)
|
|
1692
|
+
|
|
1693
|
+
# Process VIX Term Structure
|
|
1694
|
+
vix_result = results[2]
|
|
1695
|
+
if isinstance(vix_result, tuple) and vix_result is not None:
|
|
1696
|
+
vix_structure_score, vix_structure, vix_slope = vix_result
|
|
1697
|
+
scores.append(vix_structure_score)
|
|
1698
|
+
explanations.append(f"VIX {vix_structure}")
|
|
1699
|
+
if verbose:
|
|
1700
|
+
print(f" VIX Structure: {vix_structure} (slope {vix_slope:.1f}%) → score {vix_structure_score:+.2f}", file=sys.stderr)
|
|
1701
|
+
elif verbose and isinstance(vix_result, Exception):
|
|
1702
|
+
print(f" VIX Structure: Failed ({vix_result})", file=sys.stderr)
|
|
1703
|
+
|
|
1704
|
+
# Process Insider Activity
|
|
1705
|
+
insider_result = results[3]
|
|
1706
|
+
if isinstance(insider_result, tuple) and insider_result is not None:
|
|
1707
|
+
insider_activity_score, insider_net_shares, insider_net_value = insider_result
|
|
1708
|
+
scores.append(insider_activity_score)
|
|
1709
|
+
if insider_net_value:
|
|
1710
|
+
explanations.append(f"Insider net: ${insider_net_value:.1f}M")
|
|
1711
|
+
warnings.append("Insider trades may lag filing by 2-3 days")
|
|
1712
|
+
if verbose:
|
|
1713
|
+
print(f" Insider Activity: Net ${insider_net_value:.1f}M → score {insider_activity_score:+.2f}", file=sys.stderr)
|
|
1714
|
+
elif verbose and isinstance(insider_result, Exception):
|
|
1715
|
+
print(f" Insider Activity: Failed ({insider_result})", file=sys.stderr)
|
|
1716
|
+
|
|
1717
|
+
# Process Put/Call Ratio
|
|
1718
|
+
put_call_result = results[4]
|
|
1719
|
+
if isinstance(put_call_result, tuple) and put_call_result is not None:
|
|
1720
|
+
put_call_score, put_call_ratio, put_volume, call_volume = put_call_result
|
|
1721
|
+
scores.append(put_call_score)
|
|
1722
|
+
explanations.append(f"Put/call ratio {put_call_ratio:.2f}")
|
|
1723
|
+
if verbose:
|
|
1724
|
+
print(f" Put/Call Ratio: {put_call_ratio:.2f} → score {put_call_score:+.2f}", file=sys.stderr)
|
|
1725
|
+
elif verbose and isinstance(put_call_result, Exception):
|
|
1726
|
+
print(f" Put/Call Ratio: Failed ({put_call_result})", file=sys.stderr)
|
|
1727
|
+
|
|
1728
|
+
except Exception as e:
|
|
1729
|
+
if verbose:
|
|
1730
|
+
print(f" Sentiment analysis error: {e}", file=sys.stderr)
|
|
1731
|
+
return None
|
|
1732
|
+
|
|
1733
|
+
# Require at least 2 of 5 indicators for valid sentiment
|
|
1734
|
+
indicators_available = len(scores)
|
|
1735
|
+
if indicators_available < 2:
|
|
1736
|
+
if verbose:
|
|
1737
|
+
print(f" Sentiment: Insufficient data ({indicators_available}/5 indicators)", file=sys.stderr)
|
|
1738
|
+
return None
|
|
1739
|
+
|
|
1740
|
+
# Calculate overall score as simple average
|
|
1741
|
+
overall_score = sum(scores) / len(scores)
|
|
1742
|
+
explanation = "; ".join(explanations)
|
|
1743
|
+
|
|
1744
|
+
return SentimentAnalysis(
|
|
1745
|
+
score=overall_score,
|
|
1746
|
+
explanation=explanation,
|
|
1747
|
+
fear_greed_score=fear_greed_score,
|
|
1748
|
+
short_interest_score=short_interest_score,
|
|
1749
|
+
vix_structure_score=vix_structure_score,
|
|
1750
|
+
insider_activity_score=insider_activity_score,
|
|
1751
|
+
put_call_score=put_call_score,
|
|
1752
|
+
fear_greed_value=fear_greed_value,
|
|
1753
|
+
fear_greed_status=fear_greed_status,
|
|
1754
|
+
short_interest_pct=short_interest_pct,
|
|
1755
|
+
days_to_cover=days_to_cover,
|
|
1756
|
+
vix_structure=vix_structure,
|
|
1757
|
+
vix_slope=vix_slope,
|
|
1758
|
+
insider_net_shares=insider_net_shares,
|
|
1759
|
+
insider_net_value=insider_net_value,
|
|
1760
|
+
put_call_ratio=put_call_ratio,
|
|
1761
|
+
put_volume=put_volume,
|
|
1762
|
+
call_volume=call_volume,
|
|
1763
|
+
indicators_available=indicators_available,
|
|
1764
|
+
data_freshness_warnings=warnings if warnings else None,
|
|
1765
|
+
)
|
|
1766
|
+
|
|
1767
|
+
|
|
1768
|
+
def synthesize_signal(
|
|
1769
|
+
ticker: str,
|
|
1770
|
+
company_name: str,
|
|
1771
|
+
earnings: EarningsSurprise | None,
|
|
1772
|
+
fundamentals: Fundamentals | None,
|
|
1773
|
+
analysts: AnalystSentiment | None,
|
|
1774
|
+
historical: HistoricalPatterns | None,
|
|
1775
|
+
market_context: MarketContext | None,
|
|
1776
|
+
sector: SectorComparison | None,
|
|
1777
|
+
earnings_timing: EarningsTiming | None,
|
|
1778
|
+
momentum: MomentumAnalysis | None,
|
|
1779
|
+
sentiment: SentimentAnalysis | None,
|
|
1780
|
+
breaking_news: list[str] | None = None, # NEW v4.0.0
|
|
1781
|
+
geopolitical_risk_warning: str | None = None, # NEW v4.0.0
|
|
1782
|
+
geopolitical_risk_penalty: float = 0.0, # NEW v4.0.0
|
|
1783
|
+
) -> Signal:
|
|
1784
|
+
"""Synthesize all components into a final signal."""
|
|
1785
|
+
|
|
1786
|
+
# Collect available components with weights
|
|
1787
|
+
components = []
|
|
1788
|
+
weights = []
|
|
1789
|
+
|
|
1790
|
+
if earnings:
|
|
1791
|
+
components.append(("earnings", earnings.score))
|
|
1792
|
+
weights.append(0.30) # reduced from 0.35
|
|
1793
|
+
|
|
1794
|
+
if fundamentals:
|
|
1795
|
+
components.append(("fundamentals", fundamentals.score))
|
|
1796
|
+
weights.append(0.20) # reduced from 0.25
|
|
1797
|
+
|
|
1798
|
+
if analysts and analysts.score is not None:
|
|
1799
|
+
components.append(("analysts", analysts.score))
|
|
1800
|
+
weights.append(0.20) # reduced from 0.25
|
|
1801
|
+
|
|
1802
|
+
if historical:
|
|
1803
|
+
components.append(("historical", historical.score))
|
|
1804
|
+
weights.append(0.10) # reduced from 0.15
|
|
1805
|
+
|
|
1806
|
+
# NEW COMPONENTS
|
|
1807
|
+
if market_context:
|
|
1808
|
+
components.append(("market", market_context.score))
|
|
1809
|
+
weights.append(0.10)
|
|
1810
|
+
|
|
1811
|
+
if sector:
|
|
1812
|
+
components.append(("sector", sector.score))
|
|
1813
|
+
weights.append(0.15)
|
|
1814
|
+
|
|
1815
|
+
if momentum:
|
|
1816
|
+
components.append(("momentum", momentum.score))
|
|
1817
|
+
weights.append(0.15)
|
|
1818
|
+
|
|
1819
|
+
if sentiment:
|
|
1820
|
+
components.append(("sentiment", sentiment.score))
|
|
1821
|
+
weights.append(0.10)
|
|
1822
|
+
|
|
1823
|
+
# Require at least 2 components
|
|
1824
|
+
if len(components) < 2:
|
|
1825
|
+
return Signal(
|
|
1826
|
+
ticker=ticker,
|
|
1827
|
+
company_name=company_name,
|
|
1828
|
+
recommendation="HOLD",
|
|
1829
|
+
confidence=0.0,
|
|
1830
|
+
final_score=0.0,
|
|
1831
|
+
supporting_points=["Insufficient data for analysis"],
|
|
1832
|
+
caveats=["Limited data available"],
|
|
1833
|
+
timestamp=datetime.now().isoformat(),
|
|
1834
|
+
components={},
|
|
1835
|
+
)
|
|
1836
|
+
|
|
1837
|
+
# Normalize weights
|
|
1838
|
+
total_weight = sum(weights)
|
|
1839
|
+
normalized_weights = [w / total_weight for w in weights]
|
|
1840
|
+
|
|
1841
|
+
# Calculate weighted score
|
|
1842
|
+
final_score = sum(score * weight for (_, score), weight in zip(components, normalized_weights))
|
|
1843
|
+
|
|
1844
|
+
# Determine recommendation
|
|
1845
|
+
if final_score > 0.33:
|
|
1846
|
+
recommendation = "BUY"
|
|
1847
|
+
elif final_score < -0.33:
|
|
1848
|
+
recommendation = "SELL"
|
|
1849
|
+
else:
|
|
1850
|
+
recommendation = "HOLD"
|
|
1851
|
+
|
|
1852
|
+
confidence = abs(final_score)
|
|
1853
|
+
|
|
1854
|
+
# Apply earnings timing adjustments and overrides
|
|
1855
|
+
if earnings_timing:
|
|
1856
|
+
confidence *= (1.0 + earnings_timing.confidence_adjustment)
|
|
1857
|
+
|
|
1858
|
+
# Override recommendation if needed
|
|
1859
|
+
if earnings_timing.timing_flag == "pre_earnings":
|
|
1860
|
+
if recommendation == "BUY":
|
|
1861
|
+
recommendation = "HOLD"
|
|
1862
|
+
|
|
1863
|
+
elif earnings_timing.timing_flag == "post_earnings":
|
|
1864
|
+
if earnings_timing.price_change_5d and earnings_timing.price_change_5d > 15:
|
|
1865
|
+
if recommendation == "BUY":
|
|
1866
|
+
recommendation = "HOLD"
|
|
1867
|
+
|
|
1868
|
+
# Check overbought + near 52w high
|
|
1869
|
+
if momentum and momentum.rsi_14d and momentum.rsi_14d > 70 and momentum.near_52w_high:
|
|
1870
|
+
if recommendation == "BUY":
|
|
1871
|
+
recommendation = "HOLD"
|
|
1872
|
+
confidence *= 0.7
|
|
1873
|
+
|
|
1874
|
+
# NEW v4.0.0: Risk-off confidence penalty
|
|
1875
|
+
if market_context and market_context.risk_off_detected:
|
|
1876
|
+
if recommendation == "BUY":
|
|
1877
|
+
confidence *= 0.7 # Reduce BUY confidence by 30%
|
|
1878
|
+
|
|
1879
|
+
# NEW v4.0.0: Geopolitical sector risk penalty
|
|
1880
|
+
if geopolitical_risk_penalty > 0:
|
|
1881
|
+
if recommendation == "BUY":
|
|
1882
|
+
confidence *= (1.0 - geopolitical_risk_penalty) # Apply penalty
|
|
1883
|
+
|
|
1884
|
+
# Generate supporting points
|
|
1885
|
+
supporting_points = []
|
|
1886
|
+
|
|
1887
|
+
if earnings and earnings.actual_eps is not None:
|
|
1888
|
+
supporting_points.append(
|
|
1889
|
+
f"{earnings.explanation} - EPS ${earnings.actual_eps:.2f} vs ${earnings.expected_eps:.2f} expected"
|
|
1890
|
+
)
|
|
1891
|
+
|
|
1892
|
+
if fundamentals and fundamentals.explanation:
|
|
1893
|
+
supporting_points.append(fundamentals.explanation)
|
|
1894
|
+
|
|
1895
|
+
if analysts and analysts.summary:
|
|
1896
|
+
supporting_points.append(f"Analyst consensus: {analysts.summary}")
|
|
1897
|
+
|
|
1898
|
+
if historical and historical.pattern_desc:
|
|
1899
|
+
supporting_points.append(f"Historical pattern: {historical.pattern_desc}")
|
|
1900
|
+
|
|
1901
|
+
if market_context and market_context.explanation:
|
|
1902
|
+
supporting_points.append(f"Market: {market_context.explanation}")
|
|
1903
|
+
|
|
1904
|
+
if sector and sector.explanation:
|
|
1905
|
+
supporting_points.append(f"Sector: {sector.explanation}")
|
|
1906
|
+
|
|
1907
|
+
if momentum and momentum.explanation:
|
|
1908
|
+
supporting_points.append(f"Momentum: {momentum.explanation}")
|
|
1909
|
+
|
|
1910
|
+
if sentiment and sentiment.explanation:
|
|
1911
|
+
supporting_points.append(f"Sentiment: {sentiment.explanation}")
|
|
1912
|
+
|
|
1913
|
+
# Generate caveats
|
|
1914
|
+
caveats = []
|
|
1915
|
+
|
|
1916
|
+
# Add earnings timing caveats first (most important)
|
|
1917
|
+
if earnings_timing and earnings_timing.caveats:
|
|
1918
|
+
caveats.extend(earnings_timing.caveats)
|
|
1919
|
+
|
|
1920
|
+
# Add sentiment warnings
|
|
1921
|
+
if sentiment and sentiment.data_freshness_warnings:
|
|
1922
|
+
caveats.extend(sentiment.data_freshness_warnings)
|
|
1923
|
+
|
|
1924
|
+
# Add momentum warnings
|
|
1925
|
+
if momentum and momentum.rsi_14d:
|
|
1926
|
+
if momentum.rsi_14d > 70 and momentum.near_52w_high:
|
|
1927
|
+
caveats.append("Overbought conditions - high risk entry")
|
|
1928
|
+
|
|
1929
|
+
# Add sector warnings
|
|
1930
|
+
if sector and sector.score < -0.2:
|
|
1931
|
+
caveats.append(f"Sector {sector.sector_name} is weak despite stock fundamentals")
|
|
1932
|
+
|
|
1933
|
+
# Add market warnings
|
|
1934
|
+
if market_context and market_context.vix_status == "fear":
|
|
1935
|
+
caveats.append(f"High market volatility (VIX {market_context.vix_level:.0f})")
|
|
1936
|
+
|
|
1937
|
+
# NEW v4.0.0: Risk-off warnings
|
|
1938
|
+
if market_context and market_context.risk_off_detected:
|
|
1939
|
+
caveats.append(f"🛡️ RISK-OFF MODE: Flight to safety detected (GLD {market_context.gld_change_5d:+.1f}%, TLT {market_context.tlt_change_5d:+.1f}%, UUP {market_context.uup_change_5d:+.1f}%)")
|
|
1940
|
+
|
|
1941
|
+
# NEW v4.0.0: Breaking news alerts
|
|
1942
|
+
if breaking_news:
|
|
1943
|
+
for alert in breaking_news[:2]: # Limit to 2 alerts to avoid overwhelming
|
|
1944
|
+
caveats.append(f"⚠️ BREAKING NEWS: {alert}")
|
|
1945
|
+
|
|
1946
|
+
# NEW v4.0.0: Geopolitical sector risk warnings
|
|
1947
|
+
if geopolitical_risk_warning:
|
|
1948
|
+
caveats.append(geopolitical_risk_warning)
|
|
1949
|
+
|
|
1950
|
+
# Original caveats
|
|
1951
|
+
if not analysts or analysts.score is None:
|
|
1952
|
+
caveats.append("Limited or no analyst coverage")
|
|
1953
|
+
|
|
1954
|
+
if not earnings:
|
|
1955
|
+
caveats.append("No recent earnings data available")
|
|
1956
|
+
|
|
1957
|
+
if len(components) < 4:
|
|
1958
|
+
caveats.append("Analysis based on limited data components")
|
|
1959
|
+
|
|
1960
|
+
if not caveats:
|
|
1961
|
+
caveats.append("Market conditions can change rapidly")
|
|
1962
|
+
|
|
1963
|
+
# Limit to 5 caveats
|
|
1964
|
+
caveats = caveats[:5]
|
|
1965
|
+
|
|
1966
|
+
# Build components dict for output
|
|
1967
|
+
components_dict = {}
|
|
1968
|
+
if earnings:
|
|
1969
|
+
components_dict["earnings_surprise"] = {
|
|
1970
|
+
"score": earnings.score,
|
|
1971
|
+
"actual_eps": earnings.actual_eps,
|
|
1972
|
+
"expected_eps": earnings.expected_eps,
|
|
1973
|
+
"surprise_pct": earnings.surprise_pct,
|
|
1974
|
+
"explanation": earnings.explanation,
|
|
1975
|
+
}
|
|
1976
|
+
|
|
1977
|
+
if fundamentals:
|
|
1978
|
+
components_dict["fundamentals"] = {
|
|
1979
|
+
"score": fundamentals.score,
|
|
1980
|
+
**fundamentals.key_metrics,
|
|
1981
|
+
}
|
|
1982
|
+
|
|
1983
|
+
if analysts:
|
|
1984
|
+
components_dict["analyst_sentiment"] = {
|
|
1985
|
+
"score": analysts.score,
|
|
1986
|
+
"consensus_rating": analysts.consensus_rating,
|
|
1987
|
+
"price_target": analysts.price_target,
|
|
1988
|
+
"current_price": analysts.current_price,
|
|
1989
|
+
"upside_pct": analysts.upside_pct,
|
|
1990
|
+
"num_analysts": analysts.num_analysts,
|
|
1991
|
+
}
|
|
1992
|
+
|
|
1993
|
+
if historical:
|
|
1994
|
+
components_dict["historical_patterns"] = {
|
|
1995
|
+
"score": historical.score,
|
|
1996
|
+
"beats_last_4q": historical.beats_last_4q,
|
|
1997
|
+
"avg_reaction_pct": historical.avg_reaction_pct,
|
|
1998
|
+
}
|
|
1999
|
+
|
|
2000
|
+
if market_context:
|
|
2001
|
+
components_dict["market_context"] = {
|
|
2002
|
+
"score": market_context.score,
|
|
2003
|
+
"vix_level": market_context.vix_level,
|
|
2004
|
+
"vix_status": market_context.vix_status,
|
|
2005
|
+
"spy_trend_10d": market_context.spy_trend_10d,
|
|
2006
|
+
"qqq_trend_10d": market_context.qqq_trend_10d,
|
|
2007
|
+
"market_regime": market_context.market_regime,
|
|
2008
|
+
"gld_change_5d": market_context.gld_change_5d,
|
|
2009
|
+
"tlt_change_5d": market_context.tlt_change_5d,
|
|
2010
|
+
"uup_change_5d": market_context.uup_change_5d,
|
|
2011
|
+
"risk_off_detected": market_context.risk_off_detected,
|
|
2012
|
+
}
|
|
2013
|
+
|
|
2014
|
+
if sector:
|
|
2015
|
+
components_dict["sector_performance"] = {
|
|
2016
|
+
"score": sector.score,
|
|
2017
|
+
"sector_name": sector.sector_name,
|
|
2018
|
+
"stock_return_1m": sector.stock_return_1m,
|
|
2019
|
+
"sector_return_1m": sector.sector_return_1m,
|
|
2020
|
+
"relative_strength": sector.relative_strength,
|
|
2021
|
+
"sector_trend": sector.sector_trend,
|
|
2022
|
+
}
|
|
2023
|
+
|
|
2024
|
+
if earnings_timing:
|
|
2025
|
+
components_dict["earnings_timing"] = {
|
|
2026
|
+
"days_until_earnings": earnings_timing.days_until_earnings,
|
|
2027
|
+
"days_since_earnings": earnings_timing.days_since_earnings,
|
|
2028
|
+
"timing_flag": earnings_timing.timing_flag,
|
|
2029
|
+
"price_change_5d": earnings_timing.price_change_5d,
|
|
2030
|
+
"confidence_adjustment": earnings_timing.confidence_adjustment,
|
|
2031
|
+
}
|
|
2032
|
+
|
|
2033
|
+
if momentum:
|
|
2034
|
+
components_dict["momentum"] = {
|
|
2035
|
+
"score": momentum.score,
|
|
2036
|
+
"rsi_14d": momentum.rsi_14d,
|
|
2037
|
+
"rsi_status": momentum.rsi_status,
|
|
2038
|
+
"near_52w_high": momentum.near_52w_high,
|
|
2039
|
+
"near_52w_low": momentum.near_52w_low,
|
|
2040
|
+
"volume_ratio": momentum.volume_ratio,
|
|
2041
|
+
}
|
|
2042
|
+
|
|
2043
|
+
if sentiment:
|
|
2044
|
+
components_dict["sentiment_analysis"] = {
|
|
2045
|
+
"score": sentiment.score,
|
|
2046
|
+
"indicators_available": sentiment.indicators_available,
|
|
2047
|
+
"fear_greed_value": sentiment.fear_greed_value,
|
|
2048
|
+
"fear_greed_status": sentiment.fear_greed_status,
|
|
2049
|
+
"short_interest_pct": sentiment.short_interest_pct,
|
|
2050
|
+
"days_to_cover": sentiment.days_to_cover,
|
|
2051
|
+
"vix_structure": sentiment.vix_structure,
|
|
2052
|
+
"vix_slope": sentiment.vix_slope,
|
|
2053
|
+
"insider_net_value": sentiment.insider_net_value,
|
|
2054
|
+
"put_call_ratio": sentiment.put_call_ratio,
|
|
2055
|
+
"data_freshness_warnings": sentiment.data_freshness_warnings,
|
|
2056
|
+
}
|
|
2057
|
+
|
|
2058
|
+
return Signal(
|
|
2059
|
+
ticker=ticker,
|
|
2060
|
+
company_name=company_name,
|
|
2061
|
+
recommendation=recommendation,
|
|
2062
|
+
confidence=confidence,
|
|
2063
|
+
final_score=final_score,
|
|
2064
|
+
supporting_points=supporting_points[:5], # Limit to 5
|
|
2065
|
+
caveats=caveats, # Already limited to 5 earlier
|
|
2066
|
+
timestamp=datetime.now().isoformat(),
|
|
2067
|
+
components=components_dict,
|
|
2068
|
+
)
|
|
2069
|
+
|
|
2070
|
+
|
|
2071
|
+
def format_output_text(signal: Signal) -> str:
|
|
2072
|
+
"""Format signal as text output."""
|
|
2073
|
+
lines = [
|
|
2074
|
+
"=" * 77,
|
|
2075
|
+
f"STOCK ANALYSIS: {signal.ticker} ({signal.company_name})",
|
|
2076
|
+
f"Generated: {signal.timestamp}",
|
|
2077
|
+
"=" * 77,
|
|
2078
|
+
"",
|
|
2079
|
+
f"RECOMMENDATION: {signal.recommendation} (Confidence: {signal.confidence*100:.0f}%)",
|
|
2080
|
+
"",
|
|
2081
|
+
"SUPPORTING POINTS:",
|
|
2082
|
+
]
|
|
2083
|
+
|
|
2084
|
+
for point in signal.supporting_points:
|
|
2085
|
+
lines.append(f"• {point}")
|
|
2086
|
+
|
|
2087
|
+
lines.extend([
|
|
2088
|
+
"",
|
|
2089
|
+
"CAVEATS:",
|
|
2090
|
+
])
|
|
2091
|
+
|
|
2092
|
+
for caveat in signal.caveats:
|
|
2093
|
+
lines.append(f"• {caveat}")
|
|
2094
|
+
|
|
2095
|
+
lines.extend([
|
|
2096
|
+
"",
|
|
2097
|
+
"=" * 77,
|
|
2098
|
+
"DISCLAIMER: This analysis is for informational purposes only and does NOT",
|
|
2099
|
+
"constitute financial advice. Consult a licensed financial advisor before",
|
|
2100
|
+
"making investment decisions. Data provided by Yahoo Finance.",
|
|
2101
|
+
"=" * 77,
|
|
2102
|
+
])
|
|
2103
|
+
|
|
2104
|
+
return "\n".join(lines)
|
|
2105
|
+
|
|
2106
|
+
|
|
2107
|
+
def format_output_json(signal: Signal) -> str:
|
|
2108
|
+
"""Format signal as JSON output."""
|
|
2109
|
+
output = {
|
|
2110
|
+
**asdict(signal),
|
|
2111
|
+
"disclaimer": "NOT FINANCIAL ADVICE. For informational purposes only.",
|
|
2112
|
+
}
|
|
2113
|
+
return json.dumps(output, indent=2)
|
|
2114
|
+
|
|
2115
|
+
|
|
2116
|
+
def main():
|
|
2117
|
+
parser = argparse.ArgumentParser(
|
|
2118
|
+
description="Analyze stocks using Yahoo Finance data"
|
|
2119
|
+
)
|
|
2120
|
+
parser.add_argument(
|
|
2121
|
+
"tickers",
|
|
2122
|
+
nargs="*",
|
|
2123
|
+
help="Stock/crypto ticker(s) to analyze"
|
|
2124
|
+
)
|
|
2125
|
+
parser.add_argument(
|
|
2126
|
+
"--output",
|
|
2127
|
+
choices=["text", "json"],
|
|
2128
|
+
default="text",
|
|
2129
|
+
help="Output format (default: text)"
|
|
2130
|
+
)
|
|
2131
|
+
parser.add_argument(
|
|
2132
|
+
"--verbose",
|
|
2133
|
+
action="store_true",
|
|
2134
|
+
help="Verbose output to stderr"
|
|
2135
|
+
)
|
|
2136
|
+
parser.add_argument(
|
|
2137
|
+
"--portfolio", "-p",
|
|
2138
|
+
type=str,
|
|
2139
|
+
help="Analyze all assets in a portfolio"
|
|
2140
|
+
)
|
|
2141
|
+
parser.add_argument(
|
|
2142
|
+
"--period",
|
|
2143
|
+
choices=["daily", "weekly", "monthly", "quarterly", "yearly"],
|
|
2144
|
+
help="Period for portfolio performance analysis"
|
|
2145
|
+
)
|
|
2146
|
+
parser.add_argument(
|
|
2147
|
+
"--no-insider",
|
|
2148
|
+
action="store_true",
|
|
2149
|
+
help="Skip insider trading analysis (faster, SEC EDGAR is slow)"
|
|
2150
|
+
)
|
|
2151
|
+
parser.add_argument(
|
|
2152
|
+
"--fast",
|
|
2153
|
+
action="store_true",
|
|
2154
|
+
help="Fast mode: skip slow analyses (insider, breaking news)"
|
|
2155
|
+
)
|
|
2156
|
+
|
|
2157
|
+
args = parser.parse_args()
|
|
2158
|
+
|
|
2159
|
+
# Fast mode shortcuts
|
|
2160
|
+
if args.fast:
|
|
2161
|
+
args.no_insider = True
|
|
2162
|
+
|
|
2163
|
+
# Handle portfolio mode
|
|
2164
|
+
portfolio_assets = []
|
|
2165
|
+
portfolio_name = None
|
|
2166
|
+
if args.portfolio:
|
|
2167
|
+
try:
|
|
2168
|
+
from portfolio import PortfolioStore
|
|
2169
|
+
store = PortfolioStore()
|
|
2170
|
+
portfolio = store.get_portfolio(args.portfolio)
|
|
2171
|
+
if not portfolio:
|
|
2172
|
+
# Try to find default portfolio if name not found
|
|
2173
|
+
default_name = store.get_default_portfolio_name()
|
|
2174
|
+
if default_name and args.portfolio.lower() == "default":
|
|
2175
|
+
portfolio = store.get_portfolio(default_name)
|
|
2176
|
+
portfolio_name = default_name
|
|
2177
|
+
else:
|
|
2178
|
+
print(f"Error: Portfolio '{args.portfolio}' not found", file=sys.stderr)
|
|
2179
|
+
sys.exit(1)
|
|
2180
|
+
else:
|
|
2181
|
+
portfolio_name = portfolio.name
|
|
2182
|
+
|
|
2183
|
+
if not portfolio.assets:
|
|
2184
|
+
print(f"Portfolio '{portfolio_name}' has no assets", file=sys.stderr)
|
|
2185
|
+
sys.exit(1)
|
|
2186
|
+
|
|
2187
|
+
portfolio_assets = [(a.ticker, a.quantity, a.cost_basis, a.type) for a in portfolio.assets]
|
|
2188
|
+
args.tickers = [a.ticker for a in portfolio.assets]
|
|
2189
|
+
|
|
2190
|
+
if args.verbose:
|
|
2191
|
+
print(f"Analyzing portfolio: {portfolio_name} ({len(portfolio_assets)} assets)", file=sys.stderr)
|
|
2192
|
+
|
|
2193
|
+
except ImportError:
|
|
2194
|
+
print("Error: portfolio.py not found", file=sys.stderr)
|
|
2195
|
+
sys.exit(1)
|
|
2196
|
+
except Exception as e:
|
|
2197
|
+
print(f"Error loading portfolio: {e}", file=sys.stderr)
|
|
2198
|
+
sys.exit(1)
|
|
2199
|
+
|
|
2200
|
+
if not args.tickers:
|
|
2201
|
+
parser.print_help()
|
|
2202
|
+
sys.exit(1)
|
|
2203
|
+
|
|
2204
|
+
# NEW v4.0.0: Check for breaking news (market-wide, check once before analyzing tickers)
|
|
2205
|
+
# Check breaking news (skip in fast mode)
|
|
2206
|
+
breaking_news = None
|
|
2207
|
+
if not args.fast:
|
|
2208
|
+
if args.verbose:
|
|
2209
|
+
print(f"Checking breaking news (last 24h)...", file=sys.stderr)
|
|
2210
|
+
breaking_news = check_breaking_news(verbose=args.verbose)
|
|
2211
|
+
elif args.verbose:
|
|
2212
|
+
print(f"Skipping breaking news check (--fast mode)", file=sys.stderr)
|
|
2213
|
+
if breaking_news and args.verbose:
|
|
2214
|
+
print(f" Found {len(breaking_news)} breaking news alert(s)\n", file=sys.stderr)
|
|
2215
|
+
|
|
2216
|
+
results = []
|
|
2217
|
+
|
|
2218
|
+
for ticker in args.tickers:
|
|
2219
|
+
ticker = ticker.upper()
|
|
2220
|
+
|
|
2221
|
+
if args.verbose:
|
|
2222
|
+
print(f"\n=== Analyzing {ticker} ===\n", file=sys.stderr)
|
|
2223
|
+
|
|
2224
|
+
# Fetch data
|
|
2225
|
+
data = fetch_stock_data(ticker, verbose=args.verbose)
|
|
2226
|
+
|
|
2227
|
+
if data is None:
|
|
2228
|
+
print(f"Error: Invalid ticker '{ticker}' or data unavailable", file=sys.stderr)
|
|
2229
|
+
sys.exit(2)
|
|
2230
|
+
|
|
2231
|
+
# Get company name
|
|
2232
|
+
company_name = data.info.get("longName") or data.info.get("shortName") or ticker
|
|
2233
|
+
|
|
2234
|
+
# Detect asset type (crypto vs stock)
|
|
2235
|
+
is_crypto = data.asset_type == "crypto"
|
|
2236
|
+
|
|
2237
|
+
if args.verbose and is_crypto:
|
|
2238
|
+
print(f" Asset type: CRYPTO (using crypto-specific analysis)", file=sys.stderr)
|
|
2239
|
+
|
|
2240
|
+
# Analyze components (different for crypto vs stock)
|
|
2241
|
+
if is_crypto:
|
|
2242
|
+
# Crypto: Skip stock-specific analyses
|
|
2243
|
+
earnings = None
|
|
2244
|
+
fundamentals = None
|
|
2245
|
+
analysts = None
|
|
2246
|
+
historical = None
|
|
2247
|
+
earnings_timing = None
|
|
2248
|
+
sector = None
|
|
2249
|
+
|
|
2250
|
+
# Crypto fundamentals (market cap, category, BTC correlation)
|
|
2251
|
+
if args.verbose:
|
|
2252
|
+
print(f"Analyzing crypto fundamentals...", file=sys.stderr)
|
|
2253
|
+
crypto_fundamentals = analyze_crypto_fundamentals(data, verbose=args.verbose)
|
|
2254
|
+
|
|
2255
|
+
# Convert crypto fundamentals to regular Fundamentals for synthesize_signal
|
|
2256
|
+
if crypto_fundamentals:
|
|
2257
|
+
fundamentals = Fundamentals(
|
|
2258
|
+
score=crypto_fundamentals.score,
|
|
2259
|
+
key_metrics={
|
|
2260
|
+
"market_cap": crypto_fundamentals.market_cap,
|
|
2261
|
+
"market_cap_rank": crypto_fundamentals.market_cap_rank,
|
|
2262
|
+
"category": crypto_fundamentals.category,
|
|
2263
|
+
"btc_correlation": crypto_fundamentals.btc_correlation,
|
|
2264
|
+
},
|
|
2265
|
+
explanation=crypto_fundamentals.explanation,
|
|
2266
|
+
)
|
|
2267
|
+
else:
|
|
2268
|
+
# Stock: Full analysis
|
|
2269
|
+
earnings = analyze_earnings_surprise(data)
|
|
2270
|
+
fundamentals = analyze_fundamentals(data)
|
|
2271
|
+
analysts = analyze_analyst_sentiment(data)
|
|
2272
|
+
historical = analyze_historical_patterns(data)
|
|
2273
|
+
|
|
2274
|
+
# Analyze earnings timing (stocks only)
|
|
2275
|
+
if args.verbose:
|
|
2276
|
+
print(f"Checking earnings timing...", file=sys.stderr)
|
|
2277
|
+
earnings_timing = analyze_earnings_timing(data)
|
|
2278
|
+
|
|
2279
|
+
# Analyze sector performance (stocks only)
|
|
2280
|
+
if args.verbose:
|
|
2281
|
+
print(f"Analyzing sector performance...", file=sys.stderr)
|
|
2282
|
+
sector = analyze_sector_performance(data, verbose=args.verbose)
|
|
2283
|
+
|
|
2284
|
+
# Market context (both crypto and stock)
|
|
2285
|
+
if args.verbose:
|
|
2286
|
+
print(f"Analyzing market context...", file=sys.stderr)
|
|
2287
|
+
market_context = analyze_market_context(verbose=args.verbose)
|
|
2288
|
+
|
|
2289
|
+
# Momentum (both crypto and stock)
|
|
2290
|
+
if args.verbose:
|
|
2291
|
+
print(f"Analyzing momentum...", file=sys.stderr)
|
|
2292
|
+
momentum = analyze_momentum(data)
|
|
2293
|
+
|
|
2294
|
+
# Sentiment (stocks get full sentiment, crypto gets limited)
|
|
2295
|
+
if args.verbose:
|
|
2296
|
+
print(f"Analyzing market sentiment...", file=sys.stderr)
|
|
2297
|
+
if is_crypto:
|
|
2298
|
+
# Skip insider trading and put/call for crypto
|
|
2299
|
+
sentiment = None
|
|
2300
|
+
else:
|
|
2301
|
+
sentiment = asyncio.run(analyze_sentiment(data, verbose=args.verbose, skip_insider=args.no_insider))
|
|
2302
|
+
|
|
2303
|
+
# Geopolitical risks (stocks only)
|
|
2304
|
+
if is_crypto:
|
|
2305
|
+
geopolitical_risk_warning = None
|
|
2306
|
+
geopolitical_risk_penalty = 0.0
|
|
2307
|
+
else:
|
|
2308
|
+
sector_name = data.info.get("sector")
|
|
2309
|
+
geopolitical_risk_warning, geopolitical_risk_penalty = check_sector_geopolitical_risk(
|
|
2310
|
+
ticker=ticker,
|
|
2311
|
+
sector=sector_name,
|
|
2312
|
+
breaking_news=breaking_news,
|
|
2313
|
+
verbose=args.verbose
|
|
2314
|
+
)
|
|
2315
|
+
|
|
2316
|
+
if args.verbose:
|
|
2317
|
+
print(f"Components analyzed:", file=sys.stderr)
|
|
2318
|
+
if is_crypto:
|
|
2319
|
+
print(f" Crypto Fundamentals: {'✓' if fundamentals else '✗'}", file=sys.stderr)
|
|
2320
|
+
print(f" Market Context: {'✓' if market_context else '✗'}", file=sys.stderr)
|
|
2321
|
+
print(f" Momentum: {'✓' if momentum else '✗'}", file=sys.stderr)
|
|
2322
|
+
print(f" (Earnings, Sector, Sentiment: N/A for crypto)\n", file=sys.stderr)
|
|
2323
|
+
else:
|
|
2324
|
+
print(f" Earnings: {'✓' if earnings else '✗'}", file=sys.stderr)
|
|
2325
|
+
print(f" Fundamentals: {'✓' if fundamentals else '✗'}", file=sys.stderr)
|
|
2326
|
+
print(f" Analysts: {'✓' if analysts and analysts.score else '✗'}", file=sys.stderr)
|
|
2327
|
+
print(f" Historical: {'✓' if historical else '✗'}", file=sys.stderr)
|
|
2328
|
+
print(f" Market Context: {'✓' if market_context else '✗'}", file=sys.stderr)
|
|
2329
|
+
print(f" Sector: {'✓' if sector else '✗'}", file=sys.stderr)
|
|
2330
|
+
print(f" Earnings Timing: {'✓' if earnings_timing else '✗'}", file=sys.stderr)
|
|
2331
|
+
print(f" Momentum: {'✓' if momentum else '✗'}", file=sys.stderr)
|
|
2332
|
+
print(f" Sentiment: {'✓' if sentiment else '✗'}\n", file=sys.stderr)
|
|
2333
|
+
|
|
2334
|
+
# Synthesize signal
|
|
2335
|
+
signal = synthesize_signal(
|
|
2336
|
+
ticker=ticker,
|
|
2337
|
+
company_name=company_name,
|
|
2338
|
+
earnings=earnings,
|
|
2339
|
+
fundamentals=fundamentals,
|
|
2340
|
+
analysts=analysts,
|
|
2341
|
+
historical=historical,
|
|
2342
|
+
market_context=market_context, # NEW
|
|
2343
|
+
sector=sector, # NEW
|
|
2344
|
+
earnings_timing=earnings_timing, # NEW
|
|
2345
|
+
momentum=momentum, # NEW
|
|
2346
|
+
sentiment=sentiment, # NEW
|
|
2347
|
+
breaking_news=breaking_news, # NEW v4.0.0
|
|
2348
|
+
geopolitical_risk_warning=geopolitical_risk_warning, # NEW v4.0.0
|
|
2349
|
+
geopolitical_risk_penalty=geopolitical_risk_penalty, # NEW v4.0.0
|
|
2350
|
+
)
|
|
2351
|
+
|
|
2352
|
+
results.append(signal)
|
|
2353
|
+
|
|
2354
|
+
# Output results
|
|
2355
|
+
if args.output == "json":
|
|
2356
|
+
if len(results) == 1:
|
|
2357
|
+
print(format_output_json(results[0]))
|
|
2358
|
+
else:
|
|
2359
|
+
output_data = [asdict(r) for r in results]
|
|
2360
|
+
# Add portfolio summary if in portfolio mode
|
|
2361
|
+
if portfolio_assets:
|
|
2362
|
+
portfolio_summary = generate_portfolio_summary(
|
|
2363
|
+
results, portfolio_assets, portfolio_name, args.period
|
|
2364
|
+
)
|
|
2365
|
+
output_data = {
|
|
2366
|
+
"portfolio": portfolio_name,
|
|
2367
|
+
"assets": output_data,
|
|
2368
|
+
"summary": portfolio_summary,
|
|
2369
|
+
}
|
|
2370
|
+
print(json.dumps(output_data, indent=2))
|
|
2371
|
+
else:
|
|
2372
|
+
for i, signal in enumerate(results):
|
|
2373
|
+
if i > 0:
|
|
2374
|
+
print("\n")
|
|
2375
|
+
print(format_output_text(signal))
|
|
2376
|
+
|
|
2377
|
+
# Print portfolio summary if in portfolio mode
|
|
2378
|
+
if portfolio_assets:
|
|
2379
|
+
print_portfolio_summary(results, portfolio_assets, portfolio_name, args.period)
|
|
2380
|
+
|
|
2381
|
+
|
|
2382
|
+
def generate_portfolio_summary(
|
|
2383
|
+
results: list,
|
|
2384
|
+
portfolio_assets: list[tuple[str, float, float, str]],
|
|
2385
|
+
portfolio_name: str,
|
|
2386
|
+
period: str | None = None,
|
|
2387
|
+
) -> dict:
|
|
2388
|
+
"""Generate portfolio summary data."""
|
|
2389
|
+
# Map results by ticker
|
|
2390
|
+
result_map = {r.ticker: r for r in results}
|
|
2391
|
+
|
|
2392
|
+
# Calculate portfolio metrics
|
|
2393
|
+
total_cost = 0.0
|
|
2394
|
+
total_value = 0.0
|
|
2395
|
+
asset_values = []
|
|
2396
|
+
|
|
2397
|
+
for ticker, quantity, cost_basis, asset_type in portfolio_assets:
|
|
2398
|
+
cost_total = quantity * cost_basis
|
|
2399
|
+
total_cost += cost_total
|
|
2400
|
+
|
|
2401
|
+
# Get current price from yfinance
|
|
2402
|
+
try:
|
|
2403
|
+
stock = yf.Ticker(ticker)
|
|
2404
|
+
current_price = stock.info.get("regularMarketPrice", 0) or 0
|
|
2405
|
+
current_value = quantity * current_price
|
|
2406
|
+
total_value += current_value
|
|
2407
|
+
asset_values.append((ticker, current_value, cost_total, asset_type))
|
|
2408
|
+
except Exception:
|
|
2409
|
+
asset_values.append((ticker, 0, cost_total, asset_type))
|
|
2410
|
+
|
|
2411
|
+
# Calculate period returns if requested
|
|
2412
|
+
period_return = None
|
|
2413
|
+
if period and total_value > 0:
|
|
2414
|
+
period_days = {
|
|
2415
|
+
"daily": 1,
|
|
2416
|
+
"weekly": 7,
|
|
2417
|
+
"monthly": 30,
|
|
2418
|
+
"quarterly": 90,
|
|
2419
|
+
"yearly": 365,
|
|
2420
|
+
}.get(period, 30)
|
|
2421
|
+
|
|
2422
|
+
period_return = calculate_portfolio_period_return(portfolio_assets, period_days)
|
|
2423
|
+
|
|
2424
|
+
# Concentration analysis
|
|
2425
|
+
concentrations = []
|
|
2426
|
+
if total_value > 0:
|
|
2427
|
+
for ticker, value, _, asset_type in asset_values:
|
|
2428
|
+
if value > 0:
|
|
2429
|
+
pct = value / total_value * 100
|
|
2430
|
+
if pct > 30:
|
|
2431
|
+
concentrations.append(f"{ticker}: {pct:.1f}%")
|
|
2432
|
+
|
|
2433
|
+
# Build summary
|
|
2434
|
+
total_pnl = total_value - total_cost
|
|
2435
|
+
total_pnl_pct = (total_pnl / total_cost * 100) if total_cost > 0 else 0
|
|
2436
|
+
|
|
2437
|
+
summary = {
|
|
2438
|
+
"portfolio_name": portfolio_name,
|
|
2439
|
+
"total_cost": total_cost,
|
|
2440
|
+
"total_value": total_value,
|
|
2441
|
+
"total_pnl": total_pnl,
|
|
2442
|
+
"total_pnl_pct": total_pnl_pct,
|
|
2443
|
+
"asset_count": len(portfolio_assets),
|
|
2444
|
+
"concentration_warnings": concentrations if concentrations else None,
|
|
2445
|
+
}
|
|
2446
|
+
|
|
2447
|
+
if period_return is not None:
|
|
2448
|
+
summary["period"] = period
|
|
2449
|
+
summary["period_return_pct"] = period_return
|
|
2450
|
+
|
|
2451
|
+
return summary
|
|
2452
|
+
|
|
2453
|
+
|
|
2454
|
+
def calculate_portfolio_period_return(
|
|
2455
|
+
portfolio_assets: list[tuple[str, float, float, str]],
|
|
2456
|
+
period_days: int,
|
|
2457
|
+
) -> float | None:
|
|
2458
|
+
"""Calculate portfolio return over a period using historical prices."""
|
|
2459
|
+
try:
|
|
2460
|
+
total_start_value = 0.0
|
|
2461
|
+
total_current_value = 0.0
|
|
2462
|
+
|
|
2463
|
+
for ticker, quantity, _, _ in portfolio_assets:
|
|
2464
|
+
stock = yf.Ticker(ticker)
|
|
2465
|
+
hist = stock.history(period=f"{period_days + 5}d")
|
|
2466
|
+
|
|
2467
|
+
if hist.empty or len(hist) < 2:
|
|
2468
|
+
continue
|
|
2469
|
+
|
|
2470
|
+
# Get price at period start and now
|
|
2471
|
+
current_price = hist["Close"].iloc[-1]
|
|
2472
|
+
start_price = hist["Close"].iloc[0]
|
|
2473
|
+
|
|
2474
|
+
total_current_value += quantity * current_price
|
|
2475
|
+
total_start_value += quantity * start_price
|
|
2476
|
+
|
|
2477
|
+
if total_start_value > 0:
|
|
2478
|
+
return (total_current_value - total_start_value) / total_start_value * 100
|
|
2479
|
+
|
|
2480
|
+
except Exception:
|
|
2481
|
+
pass
|
|
2482
|
+
|
|
2483
|
+
return None
|
|
2484
|
+
|
|
2485
|
+
|
|
2486
|
+
def print_portfolio_summary(
|
|
2487
|
+
results: list,
|
|
2488
|
+
portfolio_assets: list[tuple[str, float, float, str]],
|
|
2489
|
+
portfolio_name: str,
|
|
2490
|
+
period: str | None = None,
|
|
2491
|
+
) -> None:
|
|
2492
|
+
"""Print portfolio summary in text format."""
|
|
2493
|
+
summary = generate_portfolio_summary(results, portfolio_assets, portfolio_name, period)
|
|
2494
|
+
|
|
2495
|
+
print("\n" + "=" * 77)
|
|
2496
|
+
print(f"PORTFOLIO SUMMARY: {portfolio_name}")
|
|
2497
|
+
print("=" * 77)
|
|
2498
|
+
|
|
2499
|
+
# Value overview
|
|
2500
|
+
total_cost = summary["total_cost"]
|
|
2501
|
+
total_value = summary["total_value"]
|
|
2502
|
+
total_pnl = summary["total_pnl"]
|
|
2503
|
+
total_pnl_pct = summary["total_pnl_pct"]
|
|
2504
|
+
|
|
2505
|
+
print(f"\nTotal Cost: ${total_cost:,.2f}")
|
|
2506
|
+
print(f"Current Value: ${total_value:,.2f}")
|
|
2507
|
+
pnl_sign = "+" if total_pnl >= 0 else ""
|
|
2508
|
+
print(f"Total P&L: {pnl_sign}${total_pnl:,.2f} ({pnl_sign}{total_pnl_pct:.1f}%)")
|
|
2509
|
+
|
|
2510
|
+
# Period return
|
|
2511
|
+
if "period_return_pct" in summary:
|
|
2512
|
+
period_return = summary["period_return_pct"]
|
|
2513
|
+
period_sign = "+" if period_return >= 0 else ""
|
|
2514
|
+
print(f"{summary['period'].capitalize()} Return: {period_sign}{period_return:.1f}%")
|
|
2515
|
+
|
|
2516
|
+
# Concentration warnings
|
|
2517
|
+
if summary.get("concentration_warnings"):
|
|
2518
|
+
print("\n⚠️ CONCENTRATION WARNINGS:")
|
|
2519
|
+
for warning in summary["concentration_warnings"]:
|
|
2520
|
+
print(f" • {warning} (>30% of portfolio)")
|
|
2521
|
+
|
|
2522
|
+
# Recommendation summary
|
|
2523
|
+
recommendations = {"BUY": 0, "HOLD": 0, "SELL": 0}
|
|
2524
|
+
for r in results:
|
|
2525
|
+
recommendations[r.recommendation] = recommendations.get(r.recommendation, 0) + 1
|
|
2526
|
+
|
|
2527
|
+
print(f"\nRECOMMENDATIONS: {recommendations['BUY']} BUY | {recommendations['HOLD']} HOLD | {recommendations['SELL']} SELL")
|
|
2528
|
+
print("=" * 77)
|
|
2529
|
+
|
|
2530
|
+
|
|
2531
|
+
if __name__ == "__main__":
|
|
2532
|
+
main()
|