quantalogic 0.33.4__py3-none-any.whl → 0.40.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- quantalogic/__init__.py +0 -4
- quantalogic/agent.py +603 -362
- quantalogic/agent_config.py +260 -28
- quantalogic/agent_factory.py +43 -17
- quantalogic/coding_agent.py +20 -12
- quantalogic/config.py +7 -4
- quantalogic/console_print_events.py +4 -8
- quantalogic/console_print_token.py +2 -2
- quantalogic/docs_cli.py +15 -10
- quantalogic/event_emitter.py +258 -83
- quantalogic/flow/__init__.py +23 -0
- quantalogic/flow/flow.py +595 -0
- quantalogic/flow/flow_extractor.py +672 -0
- quantalogic/flow/flow_generator.py +89 -0
- quantalogic/flow/flow_manager.py +407 -0
- quantalogic/flow/flow_manager_schema.py +169 -0
- quantalogic/flow/flow_yaml.md +419 -0
- quantalogic/generative_model.py +109 -77
- quantalogic/get_model_info.py +6 -6
- quantalogic/interactive_text_editor.py +100 -73
- quantalogic/main.py +36 -23
- quantalogic/model_info_list.py +12 -0
- quantalogic/model_info_litellm.py +14 -14
- quantalogic/prompts.py +2 -1
- quantalogic/{llm.py → quantlitellm.py} +29 -39
- quantalogic/search_agent.py +4 -4
- quantalogic/server/models.py +4 -1
- quantalogic/task_file_reader.py +5 -5
- quantalogic/task_runner.py +21 -20
- quantalogic/tool_manager.py +10 -21
- quantalogic/tools/__init__.py +98 -68
- quantalogic/tools/composio/composio.py +416 -0
- quantalogic/tools/{generate_database_report_tool.py → database/generate_database_report_tool.py} +4 -9
- quantalogic/tools/database/sql_query_tool_advanced.py +261 -0
- quantalogic/tools/document_tools/markdown_to_docx_tool.py +620 -0
- quantalogic/tools/document_tools/markdown_to_epub_tool.py +438 -0
- quantalogic/tools/document_tools/markdown_to_html_tool.py +362 -0
- quantalogic/tools/document_tools/markdown_to_ipynb_tool.py +319 -0
- quantalogic/tools/document_tools/markdown_to_latex_tool.py +420 -0
- quantalogic/tools/document_tools/markdown_to_pdf_tool.py +623 -0
- quantalogic/tools/document_tools/markdown_to_pptx_tool.py +319 -0
- quantalogic/tools/duckduckgo_search_tool.py +2 -4
- quantalogic/tools/finance/alpha_vantage_tool.py +440 -0
- quantalogic/tools/finance/ccxt_tool.py +373 -0
- quantalogic/tools/finance/finance_llm_tool.py +387 -0
- quantalogic/tools/finance/google_finance.py +192 -0
- quantalogic/tools/finance/market_intelligence_tool.py +520 -0
- quantalogic/tools/finance/technical_analysis_tool.py +491 -0
- quantalogic/tools/finance/tradingview_tool.py +336 -0
- quantalogic/tools/finance/yahoo_finance.py +236 -0
- quantalogic/tools/git/bitbucket_clone_repo_tool.py +181 -0
- quantalogic/tools/git/bitbucket_operations_tool.py +326 -0
- quantalogic/tools/git/clone_repo_tool.py +189 -0
- quantalogic/tools/git/git_operations_tool.py +532 -0
- quantalogic/tools/google_packages/google_news_tool.py +480 -0
- quantalogic/tools/grep_app_tool.py +123 -186
- quantalogic/tools/{dalle_e.py → image_generation/dalle_e.py} +37 -27
- quantalogic/tools/jinja_tool.py +6 -10
- quantalogic/tools/language_handlers/__init__.py +22 -9
- quantalogic/tools/list_directory_tool.py +131 -42
- quantalogic/tools/llm_tool.py +45 -15
- quantalogic/tools/llm_vision_tool.py +59 -7
- quantalogic/tools/markitdown_tool.py +17 -5
- quantalogic/tools/nasa_packages/models.py +47 -0
- quantalogic/tools/nasa_packages/nasa_apod_tool.py +232 -0
- quantalogic/tools/nasa_packages/nasa_neows_tool.py +147 -0
- quantalogic/tools/nasa_packages/services.py +82 -0
- quantalogic/tools/presentation_tools/presentation_llm_tool.py +396 -0
- quantalogic/tools/product_hunt/product_hunt_tool.py +258 -0
- quantalogic/tools/product_hunt/services.py +63 -0
- quantalogic/tools/rag_tool/__init__.py +48 -0
- quantalogic/tools/rag_tool/document_metadata.py +15 -0
- quantalogic/tools/rag_tool/query_response.py +20 -0
- quantalogic/tools/rag_tool/rag_tool.py +566 -0
- quantalogic/tools/rag_tool/rag_tool_beta.py +264 -0
- quantalogic/tools/read_html_tool.py +24 -38
- quantalogic/tools/replace_in_file_tool.py +10 -10
- quantalogic/tools/safe_python_interpreter_tool.py +10 -24
- quantalogic/tools/search_definition_names.py +2 -2
- quantalogic/tools/sequence_tool.py +14 -23
- quantalogic/tools/sql_query_tool.py +17 -19
- quantalogic/tools/tool.py +39 -15
- quantalogic/tools/unified_diff_tool.py +1 -1
- quantalogic/tools/utilities/csv_processor_tool.py +234 -0
- quantalogic/tools/utilities/download_file_tool.py +179 -0
- quantalogic/tools/utilities/mermaid_validator_tool.py +661 -0
- quantalogic/tools/utils/__init__.py +1 -4
- quantalogic/tools/utils/create_sample_database.py +24 -38
- quantalogic/tools/utils/generate_database_report.py +74 -82
- quantalogic/tools/wikipedia_search_tool.py +17 -21
- quantalogic/utils/ask_user_validation.py +1 -1
- quantalogic/utils/async_utils.py +35 -0
- quantalogic/utils/check_version.py +3 -5
- quantalogic/utils/get_all_models.py +2 -1
- quantalogic/utils/git_ls.py +21 -7
- quantalogic/utils/lm_studio_model_info.py +9 -7
- quantalogic/utils/python_interpreter.py +113 -43
- quantalogic/utils/xml_utility.py +178 -0
- quantalogic/version_check.py +1 -1
- quantalogic/welcome_message.py +7 -7
- quantalogic/xml_parser.py +0 -1
- {quantalogic-0.33.4.dist-info → quantalogic-0.40.0.dist-info}/METADATA +44 -1
- quantalogic-0.40.0.dist-info/RECORD +148 -0
- quantalogic-0.33.4.dist-info/RECORD +0 -102
- {quantalogic-0.33.4.dist-info → quantalogic-0.40.0.dist-info}/LICENSE +0 -0
- {quantalogic-0.33.4.dist-info → quantalogic-0.40.0.dist-info}/WHEEL +0 -0
- {quantalogic-0.33.4.dist-info → quantalogic-0.40.0.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,336 @@
|
|
1
|
+
import asyncio
|
2
|
+
import json
|
3
|
+
from concurrent.futures import ThreadPoolExecutor
|
4
|
+
from dataclasses import dataclass
|
5
|
+
from pathlib import Path
|
6
|
+
from typing import ClassVar, Dict, Optional
|
7
|
+
|
8
|
+
import pandas as pd
|
9
|
+
import ta
|
10
|
+
from loguru import logger
|
11
|
+
from pydantic import model_validator
|
12
|
+
from tvDatafeed import Interval, TvDatafeed
|
13
|
+
|
14
|
+
from quantalogic.tools import Tool, ToolArgument
|
15
|
+
|
16
|
+
|
17
|
+
@dataclass
|
18
|
+
class MarketData:
|
19
|
+
"""Container for market data and analysis."""
|
20
|
+
symbol: str
|
21
|
+
exchange: str
|
22
|
+
timeframe: str
|
23
|
+
data: pd.DataFrame
|
24
|
+
indicators: Dict[str, pd.Series] = None
|
25
|
+
patterns: Dict[str, pd.Series] = None
|
26
|
+
support_resistance: Dict[str, float] = None
|
27
|
+
|
28
|
+
class TradingViewTool(Tool):
|
29
|
+
"""Advanced TradingView data retrieval and analysis tool with real-time capabilities."""
|
30
|
+
|
31
|
+
name: ClassVar[str] = "tradingview_tool"
|
32
|
+
description: ClassVar[str] = "Enhanced financial data and analysis tool using TradingView"
|
33
|
+
|
34
|
+
# Mapping intervals to tvDatafeed format
|
35
|
+
INTERVAL_MAPPING: ClassVar[Dict[str, Interval]] = {
|
36
|
+
'1m': Interval.in_1_minute,
|
37
|
+
'3m': Interval.in_3_minute,
|
38
|
+
'5m': Interval.in_5_minute,
|
39
|
+
'15m': Interval.in_15_minute,
|
40
|
+
'30m': Interval.in_30_minute,
|
41
|
+
'45m': Interval.in_45_minute,
|
42
|
+
'1h': Interval.in_1_hour,
|
43
|
+
'2h': Interval.in_2_hour,
|
44
|
+
'3h': Interval.in_3_hour,
|
45
|
+
'4h': Interval.in_4_hour,
|
46
|
+
'1d': Interval.in_daily,
|
47
|
+
'1w': Interval.in_weekly,
|
48
|
+
'1M': Interval.in_monthly
|
49
|
+
}
|
50
|
+
|
51
|
+
arguments: ClassVar[list[ToolArgument]] = [
|
52
|
+
ToolArgument(
|
53
|
+
name="symbols",
|
54
|
+
arg_type="string",
|
55
|
+
description="Comma-separated list of symbols (e.g., 'AAPL,MSFT,GOOGL')",
|
56
|
+
required=True
|
57
|
+
),
|
58
|
+
ToolArgument(
|
59
|
+
name="exchanges",
|
60
|
+
arg_type="string",
|
61
|
+
description="Comma-separated list of exchanges (e.g., 'NASDAQ,NYSE')",
|
62
|
+
required=True
|
63
|
+
),
|
64
|
+
ToolArgument(
|
65
|
+
name="interval",
|
66
|
+
arg_type="string",
|
67
|
+
description="Time interval (1m/3m/5m/15m/30m/1h/2h/4h/1d/1w/1M)",
|
68
|
+
required=False,
|
69
|
+
default="1h"
|
70
|
+
),
|
71
|
+
ToolArgument(
|
72
|
+
name="lookback_periods",
|
73
|
+
arg_type="string",
|
74
|
+
description="Number of periods to look back",
|
75
|
+
required=False,
|
76
|
+
default="500"
|
77
|
+
),
|
78
|
+
ToolArgument(
|
79
|
+
name="analysis_types",
|
80
|
+
arg_type="string",
|
81
|
+
description="Comma-separated analysis types (technical,patterns,volume,all)",
|
82
|
+
required=False,
|
83
|
+
default="all"
|
84
|
+
),
|
85
|
+
ToolArgument(
|
86
|
+
name="credentials_path",
|
87
|
+
arg_type="string",
|
88
|
+
description="Path to credentials file",
|
89
|
+
required=False,
|
90
|
+
default="config/tradingview_credentials.json"
|
91
|
+
)
|
92
|
+
]
|
93
|
+
|
94
|
+
@model_validator(mode='before')
|
95
|
+
def validate_arguments(cls, values):
|
96
|
+
"""Validate tool arguments."""
|
97
|
+
try:
|
98
|
+
# Validate interval
|
99
|
+
if 'interval' in values and values['interval'] not in cls.INTERVAL_MAPPING:
|
100
|
+
raise ValueError(f"Invalid interval: {values['interval']}")
|
101
|
+
|
102
|
+
# Validate symbols format
|
103
|
+
if 'symbols' in values:
|
104
|
+
symbols = values['symbols'].split(',')
|
105
|
+
if not all(symbol.strip() for symbol in symbols):
|
106
|
+
raise ValueError("Invalid symbols format")
|
107
|
+
|
108
|
+
# Validate exchanges format
|
109
|
+
if 'exchanges' in values:
|
110
|
+
exchanges = values['exchanges'].split(',')
|
111
|
+
if not all(exchange.strip() for exchange in exchanges):
|
112
|
+
raise ValueError("Invalid exchanges format")
|
113
|
+
|
114
|
+
return values
|
115
|
+
except Exception as e:
|
116
|
+
logger.error(f"Error validating arguments: {e}")
|
117
|
+
raise
|
118
|
+
|
119
|
+
def __init__(self, **kwargs):
|
120
|
+
super().__init__(**kwargs)
|
121
|
+
self.tv: Optional[TvDatafeed] = None
|
122
|
+
self.cache = {}
|
123
|
+
self.executor = ThreadPoolExecutor(max_workers=4)
|
124
|
+
|
125
|
+
def _initialize_client(self, credentials_path: str) -> None:
|
126
|
+
"""Initialize TradingView client with credentials."""
|
127
|
+
try:
|
128
|
+
if self.tv is None:
|
129
|
+
creds_path = Path(credentials_path)
|
130
|
+
if creds_path.exists():
|
131
|
+
with open(creds_path) as f:
|
132
|
+
creds = json.load(f)
|
133
|
+
self.tv = TvDatafeed(
|
134
|
+
username=creds.get('username'),
|
135
|
+
password=creds.get('password')
|
136
|
+
)
|
137
|
+
else:
|
138
|
+
logger.warning("No credentials found, using anonymous access")
|
139
|
+
self.tv = TvDatafeed()
|
140
|
+
except Exception as e:
|
141
|
+
logger.error(f"Error initializing TradingView client: {e}")
|
142
|
+
raise
|
143
|
+
|
144
|
+
async def _fetch_market_data(
|
145
|
+
self,
|
146
|
+
symbol: str,
|
147
|
+
exchange: str,
|
148
|
+
interval: Interval,
|
149
|
+
n_bars: int
|
150
|
+
) -> MarketData:
|
151
|
+
"""Fetch market data asynchronously."""
|
152
|
+
try:
|
153
|
+
# Use ThreadPoolExecutor for blocking TvDatafeed calls
|
154
|
+
df = await asyncio.get_event_loop().run_in_executor(
|
155
|
+
self.executor,
|
156
|
+
self.tv.get_hist,
|
157
|
+
symbol,
|
158
|
+
exchange,
|
159
|
+
interval,
|
160
|
+
n_bars
|
161
|
+
)
|
162
|
+
|
163
|
+
if df is None or df.empty:
|
164
|
+
raise ValueError(f"No data returned for {symbol} on {exchange}")
|
165
|
+
|
166
|
+
return MarketData(
|
167
|
+
symbol=symbol,
|
168
|
+
exchange=exchange,
|
169
|
+
timeframe=interval.value,
|
170
|
+
data=df
|
171
|
+
)
|
172
|
+
except Exception as e:
|
173
|
+
logger.error(f"Error fetching data for {symbol} on {exchange}: {e}")
|
174
|
+
raise
|
175
|
+
|
176
|
+
def _calculate_technical_indicators(self, market_data: MarketData) -> None:
|
177
|
+
"""Calculate comprehensive technical indicators using the ta library."""
|
178
|
+
df = market_data.data
|
179
|
+
|
180
|
+
try:
|
181
|
+
# Initialize indicator dictionary
|
182
|
+
indicators = {}
|
183
|
+
|
184
|
+
# Trend Indicators
|
185
|
+
indicators['sma_20'] = ta.trend.sma_indicator(df['close'], 20)
|
186
|
+
indicators['sma_50'] = ta.trend.sma_indicator(df['close'], 50)
|
187
|
+
indicators['sma_200'] = ta.trend.sma_indicator(df['close'], 200)
|
188
|
+
indicators['ema_12'] = ta.trend.ema_indicator(df['close'], 12)
|
189
|
+
indicators['ema_26'] = ta.trend.ema_indicator(df['close'], 26)
|
190
|
+
indicators['macd'] = ta.trend.macd(df['close'])
|
191
|
+
indicators['macd_signal'] = ta.trend.macd_signal(df['close'])
|
192
|
+
indicators['adx'] = ta.trend.adx(df['high'], df['low'], df['close'])
|
193
|
+
|
194
|
+
# Momentum Indicators
|
195
|
+
indicators['rsi'] = ta.momentum.rsi(df['close'])
|
196
|
+
indicators['stoch'] = ta.momentum.stoch(df['high'], df['low'], df['close'])
|
197
|
+
indicators['stoch_signal'] = ta.momentum.stoch_signal(df['high'], df['low'], df['close'])
|
198
|
+
indicators['williams_r'] = ta.momentum.williams_r(df['high'], df['low'], df['close'])
|
199
|
+
|
200
|
+
# Volatility Indicators
|
201
|
+
indicators['bbands_upper'] = ta.volatility.bollinger_hband(df['close'])
|
202
|
+
indicators['bbands_lower'] = ta.volatility.bollinger_lband(df['close'])
|
203
|
+
indicators['atr'] = ta.volatility.average_true_range(df['high'], df['low'], df['close'])
|
204
|
+
|
205
|
+
# Volume Indicators
|
206
|
+
indicators['obv'] = ta.volume.on_balance_volume(df['close'], df['volume'])
|
207
|
+
indicators['mfi'] = ta.volume.money_flow_index(df['high'], df['low'], df['close'], df['volume'])
|
208
|
+
|
209
|
+
market_data.indicators = indicators
|
210
|
+
|
211
|
+
except Exception as e:
|
212
|
+
logger.error(f"Error calculating technical indicators: {e}")
|
213
|
+
raise
|
214
|
+
|
215
|
+
def _identify_patterns(self, market_data: MarketData) -> None:
|
216
|
+
"""Identify chart patterns."""
|
217
|
+
df = market_data.data
|
218
|
+
patterns = {}
|
219
|
+
|
220
|
+
try:
|
221
|
+
# Candlestick Patterns
|
222
|
+
patterns['doji'] = ta.candlestick.doji(df['open'], df['high'], df['low'], df['close'])
|
223
|
+
patterns['hammer'] = ta.candlestick.hammer(df['open'], df['high'], df['low'], df['close'])
|
224
|
+
patterns['shooting_star'] = ta.candlestick.shooting_star(df['open'], df['high'], df['low'], df['close'])
|
225
|
+
patterns['morning_star'] = ta.candlestick.morning_star(df['open'], df['high'], df['low'], df['close'])
|
226
|
+
patterns['evening_star'] = ta.candlestick.evening_star(df['open'], df['high'], df['low'], df['close'])
|
227
|
+
|
228
|
+
market_data.patterns = patterns
|
229
|
+
|
230
|
+
except Exception as e:
|
231
|
+
logger.error(f"Error identifying patterns: {e}")
|
232
|
+
raise
|
233
|
+
|
234
|
+
def _calculate_support_resistance(self, market_data: MarketData) -> None:
|
235
|
+
"""Calculate support and resistance levels using various methods."""
|
236
|
+
df = market_data.data
|
237
|
+
levels = {}
|
238
|
+
|
239
|
+
try:
|
240
|
+
# Calculate using price action
|
241
|
+
highs = df['high'].rolling(window=20).max()
|
242
|
+
lows = df['low'].rolling(window=20).min()
|
243
|
+
|
244
|
+
# Find significant levels
|
245
|
+
levels['major_support'] = lows.iloc[-1]
|
246
|
+
levels['major_resistance'] = highs.iloc[-1]
|
247
|
+
|
248
|
+
# Calculate using volume profile
|
249
|
+
volume_profile = df.groupby(pd.cut(df['close'], bins=50))['volume'].sum()
|
250
|
+
high_volume_prices = volume_profile.nlargest(3).index
|
251
|
+
|
252
|
+
levels['volume_levels'] = [level.mid for level in high_volume_prices]
|
253
|
+
|
254
|
+
market_data.support_resistance = levels
|
255
|
+
|
256
|
+
except Exception as e:
|
257
|
+
logger.error(f"Error calculating support/resistance: {e}")
|
258
|
+
raise
|
259
|
+
|
260
|
+
async def execute(self, **kwargs) -> Dict:
|
261
|
+
"""Execute the TradingView tool with enhanced analysis capabilities."""
|
262
|
+
try:
|
263
|
+
# Validate arguments
|
264
|
+
if not self.validate_arguments(**kwargs):
|
265
|
+
raise ValueError("Invalid arguments")
|
266
|
+
|
267
|
+
# Initialize client
|
268
|
+
self._initialize_client(kwargs.get('credentials_path', 'config/tradingview_credentials.json'))
|
269
|
+
|
270
|
+
symbols = [symbol.strip() for symbol in kwargs['symbols'].split(',')]
|
271
|
+
exchanges = [exchange.strip() for exchange in kwargs['exchanges'].split(',')]
|
272
|
+
interval = self.INTERVAL_MAPPING[kwargs.get('interval', '1h')]
|
273
|
+
lookback_periods = int(kwargs.get('lookback_periods', '500')) # Convert to int
|
274
|
+
analysis_types = [analysis_type.strip() for analysis_type in kwargs.get('analysis_types', 'all').split(',')]
|
275
|
+
|
276
|
+
# Calculate number of bars based on interval and lookback
|
277
|
+
n_bars = self._calculate_n_bars(interval, lookback_periods)
|
278
|
+
|
279
|
+
# Fetch data for all symbol-exchange pairs concurrently
|
280
|
+
tasks = []
|
281
|
+
for symbol, exchange in zip(symbols, exchanges):
|
282
|
+
tasks.append(self._fetch_market_data(symbol, exchange, interval, n_bars))
|
283
|
+
|
284
|
+
market_data_list = await asyncio.gather(*tasks)
|
285
|
+
|
286
|
+
# Process each market data
|
287
|
+
results = {}
|
288
|
+
for market_data in market_data_list:
|
289
|
+
if 'all' in analysis_types or 'technical' in analysis_types:
|
290
|
+
self._calculate_technical_indicators(market_data)
|
291
|
+
|
292
|
+
if 'all' in analysis_types or 'patterns' in analysis_types:
|
293
|
+
self._identify_patterns(market_data)
|
294
|
+
|
295
|
+
if 'all' in analysis_types or 'support_resistance' in analysis_types:
|
296
|
+
self._calculate_support_resistance(market_data)
|
297
|
+
|
298
|
+
# Format results
|
299
|
+
symbol_key = f"{market_data.exchange}:{market_data.symbol}"
|
300
|
+
results[symbol_key] = {
|
301
|
+
'data': market_data.data.to_dict(orient='records'),
|
302
|
+
'indicators': {k: v.to_dict() for k, v in market_data.indicators.items()} if market_data.indicators else None,
|
303
|
+
'patterns': {k: v.to_dict() for k, v in market_data.patterns.items()} if market_data.patterns else None,
|
304
|
+
'support_resistance': market_data.support_resistance
|
305
|
+
}
|
306
|
+
|
307
|
+
return results
|
308
|
+
|
309
|
+
except Exception as e:
|
310
|
+
logger.error(f"Error executing TradingView tool: {e}")
|
311
|
+
raise
|
312
|
+
|
313
|
+
def _calculate_n_bars(self, interval: Interval, lookback_periods: int) -> int:
|
314
|
+
"""Calculate number of bars needed based on interval and lookback period."""
|
315
|
+
interval_minutes = {
|
316
|
+
Interval.in_1_minute: 1,
|
317
|
+
Interval.in_3_minute: 3,
|
318
|
+
Interval.in_5_minute: 5,
|
319
|
+
Interval.in_15_minute: 15,
|
320
|
+
Interval.in_30_minute: 30,
|
321
|
+
Interval.in_45_minute: 45,
|
322
|
+
Interval.in_1_hour: 60,
|
323
|
+
Interval.in_2_hour: 120,
|
324
|
+
Interval.in_3_hour: 180,
|
325
|
+
Interval.in_4_hour: 240,
|
326
|
+
Interval.in_daily: 1440,
|
327
|
+
Interval.in_weekly: 10080,
|
328
|
+
Interval.in_monthly: 43200
|
329
|
+
}
|
330
|
+
|
331
|
+
minutes_in_lookback = lookback_periods * interval_minutes[interval]
|
332
|
+
return minutes_in_lookback // interval_minutes[interval]
|
333
|
+
|
334
|
+
def validate_arguments(self, **kwargs) -> bool:
|
335
|
+
"""Validate the provided arguments."""
|
336
|
+
return super().validate_arguments(**kwargs)
|
@@ -0,0 +1,236 @@
|
|
1
|
+
import json
|
2
|
+
from datetime import datetime, timedelta
|
3
|
+
from typing import ClassVar, Dict, List, Optional, Union
|
4
|
+
|
5
|
+
import numpy as np
|
6
|
+
import pandas as pd
|
7
|
+
import yfinance as yf
|
8
|
+
from loguru import logger
|
9
|
+
|
10
|
+
from quantalogic.tools import Tool, ToolArgument
|
11
|
+
|
12
|
+
|
13
|
+
class YFinanceTool(Tool):
|
14
|
+
"""Enhanced Yahoo Finance data retrieval and analysis tool."""
|
15
|
+
|
16
|
+
name: str = "yfinance_tool"
|
17
|
+
description: str = "Advanced financial data and analysis tool using Yahoo Finance"
|
18
|
+
arguments: list[ToolArgument] = [
|
19
|
+
ToolArgument(name="ticker", arg_type="string", description="Stock symbol", required=True),
|
20
|
+
ToolArgument(name="start_date", arg_type="string", description="Start date (YYYY-MM-DD)", required=True),
|
21
|
+
ToolArgument(name="end_date", arg_type="string", description="End date (YYYY-MM-DD)", required=True),
|
22
|
+
ToolArgument(
|
23
|
+
name="interval",
|
24
|
+
arg_type="string",
|
25
|
+
description="Data interval (1m/2m/5m/15m/30m/60m/90m/1h/1d/5d/1wk/1mo/3mo)",
|
26
|
+
required=False,
|
27
|
+
default="4h"
|
28
|
+
),
|
29
|
+
ToolArgument(
|
30
|
+
name="analysis_type",
|
31
|
+
arg_type="string",
|
32
|
+
description="Type of analysis to perform (technical/fundamental/all)",
|
33
|
+
required=False,
|
34
|
+
default="all"
|
35
|
+
)
|
36
|
+
]
|
37
|
+
|
38
|
+
INTERVAL_LIMITS: ClassVar[Dict[str, str]] = {
|
39
|
+
'1m': '7d', # 1 minute data available for last 7 days
|
40
|
+
'2m': '60d', # 2 minutes
|
41
|
+
'5m': '60d', # 5 minutes
|
42
|
+
'15m': '60d', # 15 minutes
|
43
|
+
'30m': '60d', # 30 minutes
|
44
|
+
'60m': '730d', # 60 minutes
|
45
|
+
'90m': '60d', # 90 minutes
|
46
|
+
'1h': '730d', # 1 hour
|
47
|
+
'4h': '2y', # 4 hours
|
48
|
+
'1d': 'max', # 1 day
|
49
|
+
'5d': 'max', # 5 days
|
50
|
+
'1wk': 'max', # 1 week
|
51
|
+
'1mo': 'max', # 1 month
|
52
|
+
'3mo': 'max' # 3 months
|
53
|
+
}
|
54
|
+
|
55
|
+
def __init__(self, **kwargs):
|
56
|
+
super().__init__(**kwargs)
|
57
|
+
self.cache = {}
|
58
|
+
|
59
|
+
def _validate_interval(self, interval: str, start_date: datetime) -> str:
|
60
|
+
"""Validate and adjust the interval based on date range."""
|
61
|
+
if interval not in self.INTERVAL_LIMITS:
|
62
|
+
logger.warning(f"Invalid interval: {interval}. Using default: 4h")
|
63
|
+
return "4h"
|
64
|
+
|
65
|
+
limit = self.INTERVAL_LIMITS[interval]
|
66
|
+
if limit != 'max':
|
67
|
+
limit_days = int(''.join(filter(str.isdigit, limit)))
|
68
|
+
if 'y' in limit:
|
69
|
+
limit_days *= 365
|
70
|
+
|
71
|
+
date_diff = (datetime.now() - start_date).days
|
72
|
+
if date_diff > limit_days:
|
73
|
+
logger.warning(f"Interval {interval} only supports {limit} of historical data. Adjusting interval...")
|
74
|
+
return self._get_appropriate_interval(date_diff)
|
75
|
+
|
76
|
+
return interval
|
77
|
+
|
78
|
+
def _get_appropriate_interval(self, days: int) -> str:
|
79
|
+
"""Get appropriate interval based on date range."""
|
80
|
+
if days <= 7:
|
81
|
+
return '1m'
|
82
|
+
elif days <= 60:
|
83
|
+
return '5m'
|
84
|
+
elif days <= 730:
|
85
|
+
return '1h'
|
86
|
+
else:
|
87
|
+
return '1d'
|
88
|
+
|
89
|
+
def _calculate_technical_indicators(self, df: pd.DataFrame) -> pd.DataFrame:
|
90
|
+
"""Calculate technical indicators."""
|
91
|
+
# Moving Averages
|
92
|
+
df['SMA_20'] = df['Close'].rolling(window=20).mean()
|
93
|
+
df['SMA_50'] = df['Close'].rolling(window=50).mean()
|
94
|
+
df['EMA_12'] = df['Close'].ewm(span=12, adjust=False).mean()
|
95
|
+
df['EMA_26'] = df['Close'].ewm(span=26, adjust=False).mean()
|
96
|
+
|
97
|
+
# MACD
|
98
|
+
df['MACD'] = df['EMA_12'] - df['EMA_26']
|
99
|
+
df['MACD_Signal'] = df['MACD'].ewm(span=9, adjust=False).mean()
|
100
|
+
|
101
|
+
# RSI
|
102
|
+
delta = df['Close'].diff()
|
103
|
+
gain = (delta.where(delta > 0, 0)).rolling(window=14).mean()
|
104
|
+
loss = (-delta.where(delta < 0, 0)).rolling(window=14).mean()
|
105
|
+
rs = gain / loss
|
106
|
+
df['RSI'] = 100 - (100 / (1 + rs))
|
107
|
+
|
108
|
+
# Bollinger Bands
|
109
|
+
df['BB_Middle'] = df['Close'].rolling(window=20).mean()
|
110
|
+
bb_std = df['Close'].rolling(window=20).std()
|
111
|
+
df['BB_Upper'] = df['BB_Middle'] + (bb_std * 2)
|
112
|
+
df['BB_Lower'] = df['BB_Middle'] - (bb_std * 2)
|
113
|
+
|
114
|
+
# Volume Analysis
|
115
|
+
df['Volume_MA'] = df['Volume'].rolling(window=20).mean()
|
116
|
+
df['Volume_Ratio'] = df['Volume'] / df['Volume_MA']
|
117
|
+
|
118
|
+
# Volatility and Returns
|
119
|
+
df['Returns'] = df['Close'].pct_change()
|
120
|
+
df['Volatility'] = df['Returns'].rolling(window=20).std() * np.sqrt(252)
|
121
|
+
|
122
|
+
return df
|
123
|
+
|
124
|
+
def _get_fundamental_data(self, ticker: str) -> Dict:
|
125
|
+
"""Get fundamental data for the stock."""
|
126
|
+
try:
|
127
|
+
stock = yf.Ticker(ticker)
|
128
|
+
info = stock.info
|
129
|
+
|
130
|
+
return {
|
131
|
+
'company_info': {
|
132
|
+
'name': info.get('longName'),
|
133
|
+
'sector': info.get('sector'),
|
134
|
+
'industry': info.get('industry'),
|
135
|
+
'country': info.get('country'),
|
136
|
+
'website': info.get('website'),
|
137
|
+
'description': info.get('longBusinessSummary')
|
138
|
+
},
|
139
|
+
'financial_metrics': {
|
140
|
+
'market_cap': info.get('marketCap'),
|
141
|
+
'forward_pe': info.get('forwardPE'),
|
142
|
+
'trailing_pe': info.get('trailingPE'),
|
143
|
+
'price_to_book': info.get('priceToBook'),
|
144
|
+
'enterprise_value': info.get('enterpriseValue'),
|
145
|
+
'profit_margins': info.get('profitMargins'),
|
146
|
+
'operating_margins': info.get('operatingMargins'),
|
147
|
+
'roa': info.get('returnOnAssets'),
|
148
|
+
'roe': info.get('returnOnEquity'),
|
149
|
+
'revenue_growth': info.get('revenueGrowth'),
|
150
|
+
'debt_to_equity': info.get('debtToEquity'),
|
151
|
+
'current_ratio': info.get('currentRatio'),
|
152
|
+
'beta': info.get('beta')
|
153
|
+
},
|
154
|
+
'dividend_info': {
|
155
|
+
'dividend_rate': info.get('dividendRate'),
|
156
|
+
'dividend_yield': info.get('dividendYield'),
|
157
|
+
'payout_ratio': info.get('payoutRatio'),
|
158
|
+
'ex_dividend_date': info.get('exDividendDate')
|
159
|
+
}
|
160
|
+
}
|
161
|
+
except Exception as e:
|
162
|
+
logger.error(f"Error fetching fundamental data for {ticker}: {str(e)}")
|
163
|
+
return {}
|
164
|
+
|
165
|
+
def execute(self, ticker: str, start_date: str, end_date: str, interval: str = "4h", analysis_type: str = "all") -> str:
|
166
|
+
"""Execute the Yahoo Finance data retrieval with enhanced features."""
|
167
|
+
try:
|
168
|
+
# Convert dates to datetime
|
169
|
+
start = datetime.strptime(start_date, "%Y-%m-%d")
|
170
|
+
end = datetime.strptime(end_date, "%Y-%m-%d")
|
171
|
+
|
172
|
+
# Validate interval
|
173
|
+
validated_interval = self._validate_interval(interval, start)
|
174
|
+
|
175
|
+
# Check cache
|
176
|
+
cache_key = f"{ticker}_{start_date}_{end_date}_{validated_interval}_{analysis_type}"
|
177
|
+
if cache_key in self.cache:
|
178
|
+
logger.info("Using cached data...")
|
179
|
+
return self.cache[cache_key]
|
180
|
+
|
181
|
+
logger.info(f"Fetching {ticker} data with {validated_interval} interval...")
|
182
|
+
stock = yf.Ticker(ticker)
|
183
|
+
|
184
|
+
# Get historical data
|
185
|
+
hist = stock.history(
|
186
|
+
start=start_date,
|
187
|
+
end=end_date,
|
188
|
+
interval=validated_interval
|
189
|
+
)
|
190
|
+
|
191
|
+
if hist.empty:
|
192
|
+
logger.warning(f"No data available for {ticker}")
|
193
|
+
return json.dumps({"error": "No data available"})
|
194
|
+
|
195
|
+
df = hist.reset_index()
|
196
|
+
|
197
|
+
result = {
|
198
|
+
"metadata": {
|
199
|
+
"ticker": ticker,
|
200
|
+
"start_date": start_date,
|
201
|
+
"end_date": end_date,
|
202
|
+
"interval": validated_interval,
|
203
|
+
"analysis_type": analysis_type
|
204
|
+
},
|
205
|
+
"price_data": {}
|
206
|
+
}
|
207
|
+
|
208
|
+
# Technical Analysis
|
209
|
+
if analysis_type in ["technical", "all"]:
|
210
|
+
df = self._calculate_technical_indicators(df)
|
211
|
+
result["price_data"] = json.loads(df.to_json(orient='records', date_format='iso'))
|
212
|
+
|
213
|
+
# Add summary statistics
|
214
|
+
result["technical_summary"] = {
|
215
|
+
"current_price": float(df['Close'].iloc[-1]),
|
216
|
+
"price_change": float(df['Returns'].iloc[-1]),
|
217
|
+
"volume": float(df['Volume'].iloc[-1]),
|
218
|
+
"volume_ratio": float(df['Volume_Ratio'].iloc[-1]),
|
219
|
+
"volatility": float(df['Volatility'].iloc[-1]),
|
220
|
+
"rsi": float(df['RSI'].iloc[-1]),
|
221
|
+
"macd": float(df['MACD'].iloc[-1]),
|
222
|
+
"trading_days": len(df)
|
223
|
+
}
|
224
|
+
|
225
|
+
# Fundamental Analysis
|
226
|
+
if analysis_type in ["fundamental", "all"]:
|
227
|
+
result["fundamental_data"] = self._get_fundamental_data(ticker)
|
228
|
+
|
229
|
+
# Cache the result
|
230
|
+
self.cache[cache_key] = json.dumps(result)
|
231
|
+
return self.cache[cache_key]
|
232
|
+
|
233
|
+
except Exception as e:
|
234
|
+
error_msg = f"Error processing {ticker}: {str(e)}"
|
235
|
+
logger.error(error_msg)
|
236
|
+
return json.dumps({"error": error_msg})
|