quantalogic 0.35.0__py3-none-any.whl → 0.50.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- quantalogic/__init__.py +0 -4
- quantalogic/agent.py +603 -363
- quantalogic/agent_config.py +233 -46
- quantalogic/agent_factory.py +34 -22
- quantalogic/coding_agent.py +16 -14
- quantalogic/config.py +2 -1
- quantalogic/console_print_events.py +4 -8
- quantalogic/console_print_token.py +2 -2
- quantalogic/docs_cli.py +15 -10
- quantalogic/event_emitter.py +258 -83
- quantalogic/flow/__init__.py +23 -0
- quantalogic/flow/flow.py +595 -0
- quantalogic/flow/flow_extractor.py +672 -0
- quantalogic/flow/flow_generator.py +89 -0
- quantalogic/flow/flow_manager.py +407 -0
- quantalogic/flow/flow_manager_schema.py +169 -0
- quantalogic/flow/flow_yaml.md +419 -0
- quantalogic/generative_model.py +109 -77
- quantalogic/get_model_info.py +5 -5
- quantalogic/interactive_text_editor.py +100 -73
- quantalogic/main.py +17 -21
- quantalogic/model_info_list.py +3 -3
- quantalogic/model_info_litellm.py +14 -14
- quantalogic/prompts.py +2 -1
- quantalogic/{llm.py → quantlitellm.py} +29 -39
- quantalogic/search_agent.py +4 -4
- quantalogic/server/models.py +4 -1
- quantalogic/task_file_reader.py +5 -5
- quantalogic/task_runner.py +20 -20
- quantalogic/tool_manager.py +10 -21
- quantalogic/tools/__init__.py +98 -68
- quantalogic/tools/composio/composio.py +416 -0
- quantalogic/tools/{generate_database_report_tool.py → database/generate_database_report_tool.py} +4 -9
- quantalogic/tools/database/sql_query_tool_advanced.py +261 -0
- quantalogic/tools/document_tools/markdown_to_docx_tool.py +620 -0
- quantalogic/tools/document_tools/markdown_to_epub_tool.py +438 -0
- quantalogic/tools/document_tools/markdown_to_html_tool.py +362 -0
- quantalogic/tools/document_tools/markdown_to_ipynb_tool.py +319 -0
- quantalogic/tools/document_tools/markdown_to_latex_tool.py +420 -0
- quantalogic/tools/document_tools/markdown_to_pdf_tool.py +623 -0
- quantalogic/tools/document_tools/markdown_to_pptx_tool.py +319 -0
- quantalogic/tools/duckduckgo_search_tool.py +2 -4
- quantalogic/tools/finance/alpha_vantage_tool.py +440 -0
- quantalogic/tools/finance/ccxt_tool.py +373 -0
- quantalogic/tools/finance/finance_llm_tool.py +387 -0
- quantalogic/tools/finance/google_finance.py +192 -0
- quantalogic/tools/finance/market_intelligence_tool.py +520 -0
- quantalogic/tools/finance/technical_analysis_tool.py +491 -0
- quantalogic/tools/finance/tradingview_tool.py +336 -0
- quantalogic/tools/finance/yahoo_finance.py +236 -0
- quantalogic/tools/git/bitbucket_clone_repo_tool.py +181 -0
- quantalogic/tools/git/bitbucket_operations_tool.py +326 -0
- quantalogic/tools/git/clone_repo_tool.py +189 -0
- quantalogic/tools/git/git_operations_tool.py +532 -0
- quantalogic/tools/google_packages/google_news_tool.py +480 -0
- quantalogic/tools/grep_app_tool.py +123 -186
- quantalogic/tools/{dalle_e.py → image_generation/dalle_e.py} +37 -27
- quantalogic/tools/jinja_tool.py +6 -10
- quantalogic/tools/language_handlers/__init__.py +22 -9
- quantalogic/tools/list_directory_tool.py +131 -42
- quantalogic/tools/llm_tool.py +45 -15
- quantalogic/tools/llm_vision_tool.py +59 -7
- quantalogic/tools/markitdown_tool.py +17 -5
- quantalogic/tools/nasa_packages/models.py +47 -0
- quantalogic/tools/nasa_packages/nasa_apod_tool.py +232 -0
- quantalogic/tools/nasa_packages/nasa_neows_tool.py +147 -0
- quantalogic/tools/nasa_packages/services.py +82 -0
- quantalogic/tools/presentation_tools/presentation_llm_tool.py +396 -0
- quantalogic/tools/product_hunt/product_hunt_tool.py +258 -0
- quantalogic/tools/product_hunt/services.py +63 -0
- quantalogic/tools/rag_tool/__init__.py +48 -0
- quantalogic/tools/rag_tool/document_metadata.py +15 -0
- quantalogic/tools/rag_tool/query_response.py +20 -0
- quantalogic/tools/rag_tool/rag_tool.py +566 -0
- quantalogic/tools/rag_tool/rag_tool_beta.py +264 -0
- quantalogic/tools/read_html_tool.py +24 -38
- quantalogic/tools/replace_in_file_tool.py +10 -10
- quantalogic/tools/safe_python_interpreter_tool.py +10 -24
- quantalogic/tools/search_definition_names.py +2 -2
- quantalogic/tools/sequence_tool.py +14 -23
- quantalogic/tools/sql_query_tool.py +17 -19
- quantalogic/tools/tool.py +39 -15
- quantalogic/tools/unified_diff_tool.py +1 -1
- quantalogic/tools/utilities/csv_processor_tool.py +234 -0
- quantalogic/tools/utilities/download_file_tool.py +179 -0
- quantalogic/tools/utilities/mermaid_validator_tool.py +661 -0
- quantalogic/tools/utils/__init__.py +1 -4
- quantalogic/tools/utils/create_sample_database.py +24 -38
- quantalogic/tools/utils/generate_database_report.py +74 -82
- quantalogic/tools/wikipedia_search_tool.py +17 -21
- quantalogic/utils/ask_user_validation.py +1 -1
- quantalogic/utils/async_utils.py +35 -0
- quantalogic/utils/check_version.py +3 -5
- quantalogic/utils/get_all_models.py +2 -1
- quantalogic/utils/git_ls.py +21 -7
- quantalogic/utils/lm_studio_model_info.py +9 -7
- quantalogic/utils/python_interpreter.py +113 -43
- quantalogic/utils/xml_utility.py +178 -0
- quantalogic/version_check.py +1 -1
- quantalogic/welcome_message.py +7 -7
- quantalogic/xml_parser.py +0 -1
- {quantalogic-0.35.0.dist-info → quantalogic-0.50.0.dist-info}/METADATA +40 -1
- quantalogic-0.50.0.dist-info/RECORD +148 -0
- quantalogic-0.35.0.dist-info/RECORD +0 -102
- {quantalogic-0.35.0.dist-info → quantalogic-0.50.0.dist-info}/LICENSE +0 -0
- {quantalogic-0.35.0.dist-info → quantalogic-0.50.0.dist-info}/WHEEL +0 -0
- {quantalogic-0.35.0.dist-info → quantalogic-0.50.0.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,491 @@
|
|
1
|
+
"""Advanced Technical Analysis Tool for comprehensive market analysis."""
|
2
|
+
|
3
|
+
import json
|
4
|
+
import warnings
|
5
|
+
from dataclasses import dataclass
|
6
|
+
from datetime import datetime
|
7
|
+
from enum import Enum
|
8
|
+
from typing import Any, ClassVar, Dict, List
|
9
|
+
|
10
|
+
import numpy as np
|
11
|
+
import pandas as pd
|
12
|
+
import ta
|
13
|
+
from loguru import logger
|
14
|
+
from scipy.signal import argrelextrema
|
15
|
+
|
16
|
+
from quantalogic.tools import Tool, ToolArgument
|
17
|
+
|
18
|
+
warnings.filterwarnings('ignore')
|
19
|
+
|
20
|
+
class SignalType(str, Enum):
|
21
|
+
STRONG_BUY = "strong_buy"
|
22
|
+
BUY = "buy"
|
23
|
+
NEUTRAL = "neutral"
|
24
|
+
SELL = "sell"
|
25
|
+
STRONG_SELL = "strong_sell"
|
26
|
+
|
27
|
+
class PatternType(str, Enum):
|
28
|
+
DOUBLE_TOP = "double_top"
|
29
|
+
DOUBLE_BOTTOM = "double_bottom"
|
30
|
+
HEAD_SHOULDERS = "head_shoulders"
|
31
|
+
INV_HEAD_SHOULDERS = "inv_head_shoulders"
|
32
|
+
ASCENDING_TRIANGLE = "ascending_triangle"
|
33
|
+
DESCENDING_TRIANGLE = "descending_triangle"
|
34
|
+
BULL_FLAG = "bull_flag"
|
35
|
+
BEAR_FLAG = "bear_flag"
|
36
|
+
CHANNEL_UP = "channel_up"
|
37
|
+
CHANNEL_DOWN = "channel_down"
|
38
|
+
|
39
|
+
@dataclass
|
40
|
+
class TechnicalSignal:
|
41
|
+
"""Container for technical analysis signals."""
|
42
|
+
indicator_type: str
|
43
|
+
signal: SignalType
|
44
|
+
value: float
|
45
|
+
threshold: float
|
46
|
+
timestamp: datetime
|
47
|
+
confidence: float
|
48
|
+
metadata: Dict[str, Any] = None
|
49
|
+
|
50
|
+
@dataclass
|
51
|
+
class PatternSignal:
|
52
|
+
"""Container for pattern recognition signals."""
|
53
|
+
pattern_type: PatternType
|
54
|
+
start_idx: int
|
55
|
+
end_idx: int
|
56
|
+
confidence: float
|
57
|
+
target_price: float
|
58
|
+
stop_loss: float
|
59
|
+
risk_reward_ratio: float
|
60
|
+
volume_confirmation: bool
|
61
|
+
metadata: Dict[str, Any] = None
|
62
|
+
|
63
|
+
@dataclass
|
64
|
+
class TechnicalAnalysis:
|
65
|
+
"""Container for comprehensive technical analysis results."""
|
66
|
+
symbol: str
|
67
|
+
timeframe: str
|
68
|
+
analysis_timestamp: datetime
|
69
|
+
current_price: float
|
70
|
+
trend_signals: Dict[str, TechnicalSignal]
|
71
|
+
momentum_signals: Dict[str, TechnicalSignal]
|
72
|
+
volatility_signals: Dict[str, TechnicalSignal]
|
73
|
+
volume_signals: Dict[str, TechnicalSignal]
|
74
|
+
pattern_signals: List[PatternSignal]
|
75
|
+
support_resistance: Dict[str, float]
|
76
|
+
pivot_points: Dict[str, float]
|
77
|
+
fibonacci_levels: Dict[str, float]
|
78
|
+
divergences: Dict[str, Dict[str, Any]]
|
79
|
+
market_strength: Dict[str, float]
|
80
|
+
|
81
|
+
class TechnicalAnalysisTool(Tool):
|
82
|
+
"""Advanced technical analysis tool with comprehensive market analysis capabilities."""
|
83
|
+
|
84
|
+
name: ClassVar[str] = "technical_analysis_tool"
|
85
|
+
description: ClassVar[str] = "Advanced technical analysis tool for comprehensive market analysis"
|
86
|
+
|
87
|
+
INDICATORS: ClassVar[Dict[str, str]] = {
|
88
|
+
'SMA': 'Simple Moving Average',
|
89
|
+
'EMA': 'Exponential Moving Average',
|
90
|
+
'RSI': 'Relative Strength Index',
|
91
|
+
'MACD': 'Moving Average Convergence Divergence',
|
92
|
+
'BB': 'Bollinger Bands',
|
93
|
+
'STOCH': 'Stochastic Oscillator',
|
94
|
+
'ATR': 'Average True Range',
|
95
|
+
'OBV': 'On-Balance Volume',
|
96
|
+
'ADX': 'Average Directional Index',
|
97
|
+
'CCI': 'Commodity Channel Index'
|
98
|
+
}
|
99
|
+
|
100
|
+
PATTERNS: ClassVar[Dict[str, str]] = {
|
101
|
+
'double_top': 'Double Top',
|
102
|
+
'double_bottom': 'Double Bottom',
|
103
|
+
'head_shoulders': 'Head and Shoulders',
|
104
|
+
'inverse_head_shoulders': 'Inverse Head and Shoulders',
|
105
|
+
'triangle': 'Triangle',
|
106
|
+
'wedge': 'Wedge',
|
107
|
+
'channel': 'Channel',
|
108
|
+
'flag': 'Flag',
|
109
|
+
'pennant': 'Pennant'
|
110
|
+
}
|
111
|
+
|
112
|
+
arguments: ClassVar[list[ToolArgument]] = [
|
113
|
+
ToolArgument(
|
114
|
+
name="data",
|
115
|
+
arg_type="string",
|
116
|
+
description="JSON string of OHLCV data in format: [{timestamp, open, high, low, close, volume}, ...]",
|
117
|
+
required=True
|
118
|
+
),
|
119
|
+
ToolArgument(
|
120
|
+
name="indicators",
|
121
|
+
arg_type="string",
|
122
|
+
description="Comma-separated list of indicators (e.g., 'RSI,MACD,BB')",
|
123
|
+
required=False,
|
124
|
+
default="all"
|
125
|
+
),
|
126
|
+
ToolArgument(
|
127
|
+
name="timeframe",
|
128
|
+
arg_type="string",
|
129
|
+
description="Timeframe of the data (e.g., '1m', '5m', '1h', '1d')",
|
130
|
+
required=False,
|
131
|
+
default="1h"
|
132
|
+
),
|
133
|
+
ToolArgument(
|
134
|
+
name="lookback_periods",
|
135
|
+
arg_type="string",
|
136
|
+
description="Number of periods to analyze",
|
137
|
+
required=False,
|
138
|
+
default="500"
|
139
|
+
),
|
140
|
+
ToolArgument(
|
141
|
+
name="pattern_types",
|
142
|
+
arg_type="string",
|
143
|
+
description="Comma-separated list of patterns to detect (e.g., 'double_top,head_shoulders')",
|
144
|
+
required=False,
|
145
|
+
default="all"
|
146
|
+
)
|
147
|
+
]
|
148
|
+
|
149
|
+
def __init__(self, **kwargs):
|
150
|
+
super().__init__(**kwargs)
|
151
|
+
self.cache = {}
|
152
|
+
|
153
|
+
def validate_arguments(self, **kwargs) -> bool:
|
154
|
+
"""Validate tool arguments."""
|
155
|
+
try:
|
156
|
+
# Validate required arguments
|
157
|
+
required_args = [arg.name for arg in self.arguments if arg.required]
|
158
|
+
for arg in required_args:
|
159
|
+
if arg not in kwargs:
|
160
|
+
raise ValueError(f"Missing required argument: {arg}")
|
161
|
+
|
162
|
+
# Validate data structure
|
163
|
+
if 'data' in kwargs:
|
164
|
+
try:
|
165
|
+
data = pd.DataFrame(json.loads(kwargs['data']))
|
166
|
+
required_columns = ['open', 'high', 'low', 'close', 'volume']
|
167
|
+
missing_columns = set(required_columns) - set(data.columns)
|
168
|
+
if missing_columns:
|
169
|
+
raise ValueError(f"Missing required columns: {missing_columns}")
|
170
|
+
except json.JSONDecodeError:
|
171
|
+
raise ValueError("Invalid JSON data format")
|
172
|
+
|
173
|
+
# Validate indicators
|
174
|
+
if 'indicators' in kwargs and kwargs['indicators'] != 'all':
|
175
|
+
indicators = kwargs['indicators'].split(',')
|
176
|
+
invalid_indicators = [ind for ind in indicators if ind not in self.INDICATORS]
|
177
|
+
if invalid_indicators:
|
178
|
+
raise ValueError(f"Invalid indicators: {invalid_indicators}")
|
179
|
+
|
180
|
+
# Validate pattern types
|
181
|
+
if 'pattern_types' in kwargs and kwargs['pattern_types'] != 'all':
|
182
|
+
patterns = kwargs['pattern_types'].split(',')
|
183
|
+
invalid_patterns = [pat for pat in patterns if pat not in self.PATTERNS]
|
184
|
+
if invalid_patterns:
|
185
|
+
raise ValueError(f"Invalid pattern types: {invalid_patterns}")
|
186
|
+
|
187
|
+
return True
|
188
|
+
except Exception as e:
|
189
|
+
logger.error(f"Error validating arguments: {e}")
|
190
|
+
raise
|
191
|
+
|
192
|
+
def _calculate_trend_indicators(self, df: pd.DataFrame) -> Dict[str, TechnicalSignal]:
|
193
|
+
"""Calculate comprehensive trend indicators."""
|
194
|
+
signals = {}
|
195
|
+
|
196
|
+
try:
|
197
|
+
# Moving Averages
|
198
|
+
df['sma_20'] = ta.trend.sma_indicator(df['close'], 20)
|
199
|
+
df['sma_50'] = ta.trend.sma_indicator(df['close'], 50)
|
200
|
+
df['sma_200'] = ta.trend.sma_indicator(df['close'], 200)
|
201
|
+
df['ema_12'] = ta.trend.ema_indicator(df['close'], 12)
|
202
|
+
df['ema_26'] = ta.trend.ema_indicator(df['close'], 26)
|
203
|
+
|
204
|
+
# MACD
|
205
|
+
df['macd'] = ta.trend.macd(df['close'])
|
206
|
+
df['macd_signal'] = ta.trend.macd_signal(df['close'])
|
207
|
+
df['macd_diff'] = ta.trend.macd_diff(df['close'])
|
208
|
+
|
209
|
+
# ADX
|
210
|
+
df['adx'] = ta.trend.adx(df['high'], df['low'], df['close'])
|
211
|
+
df['di_plus'] = ta.trend.adx_pos(df['high'], df['low'], df['close'])
|
212
|
+
df['di_minus'] = ta.trend.adx_neg(df['high'], df['low'], df['close'])
|
213
|
+
|
214
|
+
# Ichimoku Cloud
|
215
|
+
df['ichimoku_a'] = ta.trend.ichimoku_a(df['high'], df['low'])
|
216
|
+
df['ichimoku_b'] = ta.trend.ichimoku_b(df['high'], df['low'])
|
217
|
+
|
218
|
+
# Trend Signals
|
219
|
+
signals['ma_cross'] = self._analyze_ma_crossover(df)
|
220
|
+
signals['macd'] = self._analyze_macd(df)
|
221
|
+
signals['adx'] = self._analyze_adx(df)
|
222
|
+
signals['ichimoku'] = self._analyze_ichimoku(df)
|
223
|
+
|
224
|
+
return signals
|
225
|
+
|
226
|
+
except Exception as e:
|
227
|
+
logger.error(f"Error calculating trend indicators: {e}")
|
228
|
+
raise
|
229
|
+
|
230
|
+
def _calculate_momentum_indicators(self, df: pd.DataFrame) -> Dict[str, TechnicalSignal]:
|
231
|
+
"""Calculate comprehensive momentum indicators."""
|
232
|
+
signals = {}
|
233
|
+
|
234
|
+
try:
|
235
|
+
# RSI
|
236
|
+
df['rsi'] = ta.momentum.rsi(df['close'])
|
237
|
+
|
238
|
+
# Stochastic
|
239
|
+
df['stoch_k'] = ta.momentum.stoch(df['high'], df['low'], df['close'])
|
240
|
+
df['stoch_d'] = ta.momentum.stoch_signal(df['high'], df['low'], df['close'])
|
241
|
+
|
242
|
+
# ROC
|
243
|
+
df['roc'] = ta.momentum.roc(df['close'])
|
244
|
+
|
245
|
+
# Ultimate Oscillator
|
246
|
+
df['uo'] = ta.momentum.ultimate_oscillator(df['high'], df['low'], df['close'])
|
247
|
+
|
248
|
+
# TSI
|
249
|
+
df['tsi'] = ta.momentum.tsi(df['close'])
|
250
|
+
|
251
|
+
# Momentum Signals
|
252
|
+
signals['rsi'] = self._analyze_rsi(df)
|
253
|
+
signals['stochastic'] = self._analyze_stochastic(df)
|
254
|
+
signals['ultimate_oscillator'] = self._analyze_ultimate_oscillator(df)
|
255
|
+
signals['tsi'] = self._analyze_tsi(df)
|
256
|
+
|
257
|
+
return signals
|
258
|
+
|
259
|
+
except Exception as e:
|
260
|
+
logger.error(f"Error calculating momentum indicators: {e}")
|
261
|
+
raise
|
262
|
+
|
263
|
+
def _calculate_volatility_indicators(self, df: pd.DataFrame) -> Dict[str, TechnicalSignal]:
|
264
|
+
"""Calculate comprehensive volatility indicators."""
|
265
|
+
signals = {}
|
266
|
+
|
267
|
+
try:
|
268
|
+
# Bollinger Bands
|
269
|
+
df['bb_upper'] = ta.volatility.bollinger_hband(df['close'])
|
270
|
+
df['bb_middle'] = ta.volatility.bollinger_mavg(df['close'])
|
271
|
+
df['bb_lower'] = ta.volatility.bollinger_lband(df['close'])
|
272
|
+
|
273
|
+
# ATR
|
274
|
+
df['atr'] = ta.volatility.average_true_range(df['high'], df['low'], df['close'])
|
275
|
+
|
276
|
+
# Keltner Channel
|
277
|
+
df['kc_upper'] = ta.volatility.keltner_channel_hband(df['high'], df['low'], df['close'])
|
278
|
+
df['kc_lower'] = ta.volatility.keltner_channel_lband(df['high'], df['low'], df['close'])
|
279
|
+
|
280
|
+
# Donchian Channel
|
281
|
+
df['dc_upper'] = df['high'].rolling(20).max()
|
282
|
+
df['dc_lower'] = df['low'].rolling(20).min()
|
283
|
+
|
284
|
+
# Volatility Signals
|
285
|
+
signals['bollinger'] = self._analyze_bollinger_bands(df)
|
286
|
+
signals['atr'] = self._analyze_atr(df)
|
287
|
+
signals['keltner'] = self._analyze_keltner_channel(df)
|
288
|
+
signals['donchian'] = self._analyze_donchian_channel(df)
|
289
|
+
|
290
|
+
return signals
|
291
|
+
|
292
|
+
except Exception as e:
|
293
|
+
logger.error(f"Error calculating volatility indicators: {e}")
|
294
|
+
raise
|
295
|
+
|
296
|
+
def _calculate_volume_indicators(self, df: pd.DataFrame) -> Dict[str, TechnicalSignal]:
|
297
|
+
"""Calculate comprehensive volume indicators."""
|
298
|
+
signals = {}
|
299
|
+
|
300
|
+
try:
|
301
|
+
# On-Balance Volume
|
302
|
+
df['obv'] = ta.volume.on_balance_volume(df['close'], df['volume'])
|
303
|
+
|
304
|
+
# Money Flow Index
|
305
|
+
df['mfi'] = ta.volume.money_flow_index(df['high'], df['low'], df['close'], df['volume'])
|
306
|
+
|
307
|
+
# Volume Price Trend
|
308
|
+
df['vpt'] = ta.volume.volume_price_trend(df['close'], df['volume'])
|
309
|
+
|
310
|
+
# Ease of Movement
|
311
|
+
df['eom'] = ta.volume.ease_of_movement(df['high'], df['low'], df['volume'])
|
312
|
+
|
313
|
+
# Volume-Weighted Average Price
|
314
|
+
df['vwap'] = self._calculate_vwap(df)
|
315
|
+
|
316
|
+
# Volume Signals
|
317
|
+
signals['obv'] = self._analyze_obv(df)
|
318
|
+
signals['mfi'] = self._analyze_mfi(df)
|
319
|
+
signals['vpt'] = self._analyze_vpt(df)
|
320
|
+
signals['vwap'] = self._analyze_vwap(df)
|
321
|
+
|
322
|
+
return signals
|
323
|
+
|
324
|
+
except Exception as e:
|
325
|
+
logger.error(f"Error calculating volume indicators: {e}")
|
326
|
+
raise
|
327
|
+
|
328
|
+
def _identify_chart_patterns(self, df: pd.DataFrame) -> List[PatternSignal]:
|
329
|
+
"""Identify chart patterns using advanced pattern recognition."""
|
330
|
+
patterns = []
|
331
|
+
|
332
|
+
try:
|
333
|
+
# Find local maxima and minima
|
334
|
+
max_idx = argrelextrema(df['high'].values, np.greater, order=5)[0]
|
335
|
+
min_idx = argrelextrema(df['low'].values, np.less, order=5)[0]
|
336
|
+
|
337
|
+
# Double Top/Bottom
|
338
|
+
patterns.extend(self._find_double_patterns(df, max_idx, min_idx))
|
339
|
+
|
340
|
+
# Head and Shoulders
|
341
|
+
patterns.extend(self._find_head_shoulders(df, max_idx, min_idx))
|
342
|
+
|
343
|
+
# Triangle Patterns
|
344
|
+
patterns.extend(self._find_triangle_patterns(df, max_idx, min_idx))
|
345
|
+
|
346
|
+
# Flag Patterns
|
347
|
+
patterns.extend(self._find_flag_patterns(df))
|
348
|
+
|
349
|
+
# Channels
|
350
|
+
patterns.extend(self._find_channels(df))
|
351
|
+
|
352
|
+
return patterns
|
353
|
+
|
354
|
+
except Exception as e:
|
355
|
+
logger.error(f"Error identifying chart patterns: {e}")
|
356
|
+
raise
|
357
|
+
|
358
|
+
def _calculate_support_resistance(self, df: pd.DataFrame) -> Dict[str, float]:
|
359
|
+
"""Calculate support and resistance levels using multiple methods."""
|
360
|
+
levels = {}
|
361
|
+
|
362
|
+
try:
|
363
|
+
# Price Action Based
|
364
|
+
levels.update(self._find_price_levels(df))
|
365
|
+
|
366
|
+
# Volume Profile Based
|
367
|
+
levels.update(self._find_volume_levels(df))
|
368
|
+
|
369
|
+
# Fibonacci Based
|
370
|
+
levels.update(self._calculate_fibonacci_levels(df))
|
371
|
+
|
372
|
+
return levels
|
373
|
+
|
374
|
+
except Exception as e:
|
375
|
+
logger.error(f"Error calculating support/resistance: {e}")
|
376
|
+
raise
|
377
|
+
|
378
|
+
def _find_divergences(self, df: pd.DataFrame) -> Dict[str, Dict[str, Any]]:
|
379
|
+
"""Identify regular and hidden divergences."""
|
380
|
+
divergences = {}
|
381
|
+
|
382
|
+
try:
|
383
|
+
# RSI Divergences
|
384
|
+
divergences['rsi'] = self._find_indicator_divergences(df, df['rsi'], 'RSI')
|
385
|
+
|
386
|
+
# MACD Divergences
|
387
|
+
divergences['macd'] = self._find_indicator_divergences(df, df['macd'], 'MACD')
|
388
|
+
|
389
|
+
# OBV Divergences
|
390
|
+
divergences['obv'] = self._find_indicator_divergences(df, df['obv'], 'OBV')
|
391
|
+
|
392
|
+
return divergences
|
393
|
+
|
394
|
+
except Exception as e:
|
395
|
+
logger.error(f"Error finding divergences: {e}")
|
396
|
+
raise
|
397
|
+
|
398
|
+
def _calculate_market_strength(self, df: pd.DataFrame) -> Dict[str, float]:
|
399
|
+
"""Calculate overall market strength using multiple metrics."""
|
400
|
+
strength = {}
|
401
|
+
|
402
|
+
try:
|
403
|
+
# Trend Strength
|
404
|
+
strength['trend'] = self._calculate_trend_strength(df)
|
405
|
+
|
406
|
+
# Momentum Strength
|
407
|
+
strength['momentum'] = self._calculate_momentum_strength(df)
|
408
|
+
|
409
|
+
# Volume Strength
|
410
|
+
strength['volume'] = self._calculate_volume_strength(df)
|
411
|
+
|
412
|
+
# Overall Strength
|
413
|
+
strength['overall'] = np.mean([
|
414
|
+
strength['trend'],
|
415
|
+
strength['momentum'],
|
416
|
+
strength['volume']
|
417
|
+
])
|
418
|
+
|
419
|
+
return strength
|
420
|
+
|
421
|
+
except Exception as e:
|
422
|
+
logger.error(f"Error calculating market strength: {e}")
|
423
|
+
raise
|
424
|
+
|
425
|
+
def execute(self, **kwargs) -> TechnicalAnalysis:
|
426
|
+
"""Execute comprehensive technical analysis."""
|
427
|
+
try:
|
428
|
+
# Validate arguments
|
429
|
+
if not self.validate_arguments(**kwargs):
|
430
|
+
raise ValueError("Invalid arguments")
|
431
|
+
|
432
|
+
# Parse JSON string into DataFrame
|
433
|
+
data = pd.DataFrame(json.loads(kwargs['data']))
|
434
|
+
|
435
|
+
# Convert parameters
|
436
|
+
indicators = kwargs.get('indicators', 'all').split(',')
|
437
|
+
timeframe = kwargs.get('timeframe', '1h')
|
438
|
+
lookback_periods = int(kwargs.get('lookback_periods', '500'))
|
439
|
+
pattern_types = kwargs.get('pattern_types', 'all').split(',')
|
440
|
+
|
441
|
+
# Initialize containers
|
442
|
+
trend_signals = {}
|
443
|
+
momentum_signals = {}
|
444
|
+
volatility_signals = {}
|
445
|
+
volume_signals = {}
|
446
|
+
pattern_signals = []
|
447
|
+
|
448
|
+
# Perform requested analyses
|
449
|
+
if 'all' in indicators or 'trend' in indicators:
|
450
|
+
trend_signals = self._calculate_trend_indicators(data)
|
451
|
+
|
452
|
+
if 'all' in indicators or 'momentum' in indicators:
|
453
|
+
momentum_signals = self._calculate_momentum_indicators(data)
|
454
|
+
|
455
|
+
if 'all' in indicators or 'volatility' in indicators:
|
456
|
+
volatility_signals = self._calculate_volatility_indicators(data)
|
457
|
+
|
458
|
+
if 'all' in indicators or 'volume' in indicators:
|
459
|
+
volume_signals = self._calculate_volume_indicators(data)
|
460
|
+
|
461
|
+
if 'all' in pattern_types or 'patterns' in pattern_types:
|
462
|
+
pattern_signals = self._identify_chart_patterns(data)
|
463
|
+
|
464
|
+
# Calculate additional analyses
|
465
|
+
support_resistance = self._calculate_support_resistance(data)
|
466
|
+
pivot_points = self._calculate_pivot_points(data)
|
467
|
+
fibonacci_levels = self._calculate_fibonacci_levels(data)
|
468
|
+
divergences = self._find_divergences(data)
|
469
|
+
market_strength = self._calculate_market_strength(data)
|
470
|
+
|
471
|
+
# Combine all analyses
|
472
|
+
return TechnicalAnalysis(
|
473
|
+
symbol=kwargs['symbol'],
|
474
|
+
timeframe=timeframe,
|
475
|
+
analysis_timestamp=datetime.now(),
|
476
|
+
current_price=data['close'].iloc[-1],
|
477
|
+
trend_signals=trend_signals,
|
478
|
+
momentum_signals=momentum_signals,
|
479
|
+
volatility_signals=volatility_signals,
|
480
|
+
volume_signals=volume_signals,
|
481
|
+
pattern_signals=pattern_signals,
|
482
|
+
support_resistance=support_resistance,
|
483
|
+
pivot_points=pivot_points,
|
484
|
+
fibonacci_levels=fibonacci_levels,
|
485
|
+
divergences=divergences,
|
486
|
+
market_strength=market_strength
|
487
|
+
)
|
488
|
+
|
489
|
+
except Exception as e:
|
490
|
+
logger.error(f"Error executing technical analysis: {e}")
|
491
|
+
raise
|