quantalogic 0.35.0__py3-none-any.whl → 0.50.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- quantalogic/__init__.py +0 -4
- quantalogic/agent.py +603 -363
- quantalogic/agent_config.py +233 -46
- quantalogic/agent_factory.py +34 -22
- quantalogic/coding_agent.py +16 -14
- quantalogic/config.py +2 -1
- quantalogic/console_print_events.py +4 -8
- quantalogic/console_print_token.py +2 -2
- quantalogic/docs_cli.py +15 -10
- quantalogic/event_emitter.py +258 -83
- quantalogic/flow/__init__.py +23 -0
- quantalogic/flow/flow.py +595 -0
- quantalogic/flow/flow_extractor.py +672 -0
- quantalogic/flow/flow_generator.py +89 -0
- quantalogic/flow/flow_manager.py +407 -0
- quantalogic/flow/flow_manager_schema.py +169 -0
- quantalogic/flow/flow_yaml.md +419 -0
- quantalogic/generative_model.py +109 -77
- quantalogic/get_model_info.py +5 -5
- quantalogic/interactive_text_editor.py +100 -73
- quantalogic/main.py +17 -21
- quantalogic/model_info_list.py +3 -3
- quantalogic/model_info_litellm.py +14 -14
- quantalogic/prompts.py +2 -1
- quantalogic/{llm.py → quantlitellm.py} +29 -39
- quantalogic/search_agent.py +4 -4
- quantalogic/server/models.py +4 -1
- quantalogic/task_file_reader.py +5 -5
- quantalogic/task_runner.py +20 -20
- quantalogic/tool_manager.py +10 -21
- quantalogic/tools/__init__.py +98 -68
- quantalogic/tools/composio/composio.py +416 -0
- quantalogic/tools/{generate_database_report_tool.py → database/generate_database_report_tool.py} +4 -9
- quantalogic/tools/database/sql_query_tool_advanced.py +261 -0
- quantalogic/tools/document_tools/markdown_to_docx_tool.py +620 -0
- quantalogic/tools/document_tools/markdown_to_epub_tool.py +438 -0
- quantalogic/tools/document_tools/markdown_to_html_tool.py +362 -0
- quantalogic/tools/document_tools/markdown_to_ipynb_tool.py +319 -0
- quantalogic/tools/document_tools/markdown_to_latex_tool.py +420 -0
- quantalogic/tools/document_tools/markdown_to_pdf_tool.py +623 -0
- quantalogic/tools/document_tools/markdown_to_pptx_tool.py +319 -0
- quantalogic/tools/duckduckgo_search_tool.py +2 -4
- quantalogic/tools/finance/alpha_vantage_tool.py +440 -0
- quantalogic/tools/finance/ccxt_tool.py +373 -0
- quantalogic/tools/finance/finance_llm_tool.py +387 -0
- quantalogic/tools/finance/google_finance.py +192 -0
- quantalogic/tools/finance/market_intelligence_tool.py +520 -0
- quantalogic/tools/finance/technical_analysis_tool.py +491 -0
- quantalogic/tools/finance/tradingview_tool.py +336 -0
- quantalogic/tools/finance/yahoo_finance.py +236 -0
- quantalogic/tools/git/bitbucket_clone_repo_tool.py +181 -0
- quantalogic/tools/git/bitbucket_operations_tool.py +326 -0
- quantalogic/tools/git/clone_repo_tool.py +189 -0
- quantalogic/tools/git/git_operations_tool.py +532 -0
- quantalogic/tools/google_packages/google_news_tool.py +480 -0
- quantalogic/tools/grep_app_tool.py +123 -186
- quantalogic/tools/{dalle_e.py → image_generation/dalle_e.py} +37 -27
- quantalogic/tools/jinja_tool.py +6 -10
- quantalogic/tools/language_handlers/__init__.py +22 -9
- quantalogic/tools/list_directory_tool.py +131 -42
- quantalogic/tools/llm_tool.py +45 -15
- quantalogic/tools/llm_vision_tool.py +59 -7
- quantalogic/tools/markitdown_tool.py +17 -5
- quantalogic/tools/nasa_packages/models.py +47 -0
- quantalogic/tools/nasa_packages/nasa_apod_tool.py +232 -0
- quantalogic/tools/nasa_packages/nasa_neows_tool.py +147 -0
- quantalogic/tools/nasa_packages/services.py +82 -0
- quantalogic/tools/presentation_tools/presentation_llm_tool.py +396 -0
- quantalogic/tools/product_hunt/product_hunt_tool.py +258 -0
- quantalogic/tools/product_hunt/services.py +63 -0
- quantalogic/tools/rag_tool/__init__.py +48 -0
- quantalogic/tools/rag_tool/document_metadata.py +15 -0
- quantalogic/tools/rag_tool/query_response.py +20 -0
- quantalogic/tools/rag_tool/rag_tool.py +566 -0
- quantalogic/tools/rag_tool/rag_tool_beta.py +264 -0
- quantalogic/tools/read_html_tool.py +24 -38
- quantalogic/tools/replace_in_file_tool.py +10 -10
- quantalogic/tools/safe_python_interpreter_tool.py +10 -24
- quantalogic/tools/search_definition_names.py +2 -2
- quantalogic/tools/sequence_tool.py +14 -23
- quantalogic/tools/sql_query_tool.py +17 -19
- quantalogic/tools/tool.py +39 -15
- quantalogic/tools/unified_diff_tool.py +1 -1
- quantalogic/tools/utilities/csv_processor_tool.py +234 -0
- quantalogic/tools/utilities/download_file_tool.py +179 -0
- quantalogic/tools/utilities/mermaid_validator_tool.py +661 -0
- quantalogic/tools/utils/__init__.py +1 -4
- quantalogic/tools/utils/create_sample_database.py +24 -38
- quantalogic/tools/utils/generate_database_report.py +74 -82
- quantalogic/tools/wikipedia_search_tool.py +17 -21
- quantalogic/utils/ask_user_validation.py +1 -1
- quantalogic/utils/async_utils.py +35 -0
- quantalogic/utils/check_version.py +3 -5
- quantalogic/utils/get_all_models.py +2 -1
- quantalogic/utils/git_ls.py +21 -7
- quantalogic/utils/lm_studio_model_info.py +9 -7
- quantalogic/utils/python_interpreter.py +113 -43
- quantalogic/utils/xml_utility.py +178 -0
- quantalogic/version_check.py +1 -1
- quantalogic/welcome_message.py +7 -7
- quantalogic/xml_parser.py +0 -1
- {quantalogic-0.35.0.dist-info → quantalogic-0.50.0.dist-info}/METADATA +40 -1
- quantalogic-0.50.0.dist-info/RECORD +148 -0
- quantalogic-0.35.0.dist-info/RECORD +0 -102
- {quantalogic-0.35.0.dist-info → quantalogic-0.50.0.dist-info}/LICENSE +0 -0
- {quantalogic-0.35.0.dist-info → quantalogic-0.50.0.dist-info}/WHEEL +0 -0
- {quantalogic-0.35.0.dist-info → quantalogic-0.50.0.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,373 @@
|
|
1
|
+
import json
|
2
|
+
from concurrent.futures import ThreadPoolExecutor
|
3
|
+
from dataclasses import dataclass
|
4
|
+
from pathlib import Path
|
5
|
+
from typing import Any, ClassVar, Dict, List
|
6
|
+
|
7
|
+
import ccxt.async_support as ccxt
|
8
|
+
import pandas as pd
|
9
|
+
import ta
|
10
|
+
from loguru import logger
|
11
|
+
|
12
|
+
from quantalogic.tools import Tool, ToolArgument
|
13
|
+
|
14
|
+
|
15
|
+
@dataclass
|
16
|
+
class MarketData:
|
17
|
+
"""Container for market data and analysis."""
|
18
|
+
symbol: str
|
19
|
+
exchange: str
|
20
|
+
timeframe: str
|
21
|
+
data: pd.DataFrame
|
22
|
+
indicators: Dict[str, pd.Series] = None
|
23
|
+
patterns: Dict[str, pd.Series] = None
|
24
|
+
levels: Dict[str, float] = None
|
25
|
+
volume_profile: Dict[str, float] = None
|
26
|
+
|
27
|
+
class CCXTTool(Tool):
|
28
|
+
"""Advanced cryptocurrency trading and analysis tool using CCXT."""
|
29
|
+
|
30
|
+
name: ClassVar[str] = "ccxt_tool"
|
31
|
+
description: ClassVar[str] = "Enhanced cryptocurrency trading and analysis tool using CCXT"
|
32
|
+
|
33
|
+
TIMEFRAMES: ClassVar[List[str]] = [
|
34
|
+
'1m', '3m', '5m', '15m', '30m', '1h', '2h', '4h', '6h', '8h', '12h', '1d', '3d', '1w', '1M'
|
35
|
+
]
|
36
|
+
|
37
|
+
arguments: ClassVar[list[ToolArgument]] = [
|
38
|
+
ToolArgument(
|
39
|
+
name="exchange_ids",
|
40
|
+
arg_type="string",
|
41
|
+
description="Comma-separated exchange IDs (e.g., 'binance,kucoin')",
|
42
|
+
required=True
|
43
|
+
),
|
44
|
+
ToolArgument(
|
45
|
+
name="symbols",
|
46
|
+
arg_type="string",
|
47
|
+
description="Comma-separated trading pairs (e.g., 'BTC/USDT,ETH/USDT')",
|
48
|
+
required=True
|
49
|
+
),
|
50
|
+
ToolArgument(
|
51
|
+
name="timeframe",
|
52
|
+
arg_type="string",
|
53
|
+
description="Time interval for data",
|
54
|
+
required=False,
|
55
|
+
default="1h"
|
56
|
+
),
|
57
|
+
ToolArgument(
|
58
|
+
name="lookback_periods",
|
59
|
+
arg_type="string",
|
60
|
+
description="Number of periods to analyze",
|
61
|
+
required=False,
|
62
|
+
default="500"
|
63
|
+
),
|
64
|
+
ToolArgument(
|
65
|
+
name="analysis_types",
|
66
|
+
arg_type="string",
|
67
|
+
description="Comma-separated analysis types (technical,patterns,volume,all)",
|
68
|
+
required=False,
|
69
|
+
default="all"
|
70
|
+
),
|
71
|
+
ToolArgument(
|
72
|
+
name="credentials_path",
|
73
|
+
arg_type="string",
|
74
|
+
description="Path to exchange credentials file",
|
75
|
+
required=False,
|
76
|
+
default="config/exchange_credentials.json"
|
77
|
+
)
|
78
|
+
]
|
79
|
+
|
80
|
+
def __init__(self, **kwargs):
|
81
|
+
super().__init__(**kwargs)
|
82
|
+
self.exchanges: Dict[str, ccxt.Exchange] = {}
|
83
|
+
self.cache = {}
|
84
|
+
self.executor = ThreadPoolExecutor(max_workers=4)
|
85
|
+
|
86
|
+
def validate_arguments(self, **kwargs) -> bool:
|
87
|
+
"""Validate tool arguments."""
|
88
|
+
try:
|
89
|
+
# Validate required arguments
|
90
|
+
required_args = [arg.name for arg in self.arguments if arg.required]
|
91
|
+
for arg in required_args:
|
92
|
+
if arg not in kwargs:
|
93
|
+
raise ValueError(f"Missing required argument: {arg}")
|
94
|
+
|
95
|
+
# Validate timeframe
|
96
|
+
if 'timeframe' in kwargs and kwargs['timeframe'] not in self.TIMEFRAMES:
|
97
|
+
raise ValueError(f"Invalid timeframe: {kwargs['timeframe']}")
|
98
|
+
|
99
|
+
# Validate exchange IDs format
|
100
|
+
if 'exchange_ids' in kwargs:
|
101
|
+
exchange_ids = kwargs['exchange_ids'].split(',')
|
102
|
+
if not all(exchange_id.strip() for exchange_id in exchange_ids):
|
103
|
+
raise ValueError("Invalid exchange IDs format")
|
104
|
+
|
105
|
+
# Validate trading pairs format
|
106
|
+
if 'symbols' in kwargs:
|
107
|
+
symbols = kwargs['symbols'].split(',')
|
108
|
+
if not all('/' in symbol for symbol in symbols):
|
109
|
+
raise ValueError("Invalid trading pair format. Must be in format BASE/QUOTE")
|
110
|
+
|
111
|
+
return True
|
112
|
+
except Exception as e:
|
113
|
+
logger.error(f"Error validating arguments: {e}")
|
114
|
+
raise
|
115
|
+
|
116
|
+
async def _initialize_exchanges(self, exchange_ids: List[str], credentials_path: str) -> None:
|
117
|
+
"""Initialize exchange instances with credentials if available."""
|
118
|
+
try:
|
119
|
+
creds = {}
|
120
|
+
creds_path = Path(credentials_path)
|
121
|
+
if creds_path.exists():
|
122
|
+
with open(creds_path) as f:
|
123
|
+
creds = json.load(f)
|
124
|
+
|
125
|
+
for exchange_id in exchange_ids:
|
126
|
+
exchange_class = getattr(ccxt, exchange_id)
|
127
|
+
exchange_creds = creds.get(exchange_id, {})
|
128
|
+
|
129
|
+
self.exchanges[exchange_id] = exchange_class({
|
130
|
+
'apiKey': exchange_creds.get('api_key'),
|
131
|
+
'secret': exchange_creds.get('secret'),
|
132
|
+
'password': exchange_creds.get('password'),
|
133
|
+
'enableRateLimit': True,
|
134
|
+
'options': {'defaultType': 'spot'}
|
135
|
+
})
|
136
|
+
|
137
|
+
# Load markets for symbol validation
|
138
|
+
await self.exchanges[exchange_id].load_markets()
|
139
|
+
|
140
|
+
except Exception as e:
|
141
|
+
logger.error(f"Error initializing exchanges: {e}")
|
142
|
+
raise
|
143
|
+
|
144
|
+
async def _fetch_ohlcv(
|
145
|
+
self,
|
146
|
+
exchange_id: str,
|
147
|
+
symbol: str,
|
148
|
+
timeframe: str,
|
149
|
+
limit: int
|
150
|
+
) -> MarketData:
|
151
|
+
"""Fetch OHLCV data from exchange."""
|
152
|
+
try:
|
153
|
+
exchange = self.exchanges[exchange_id]
|
154
|
+
|
155
|
+
# Fetch OHLCV data
|
156
|
+
ohlcv = await exchange.fetch_ohlcv(symbol, timeframe, limit=limit)
|
157
|
+
|
158
|
+
# Convert to DataFrame
|
159
|
+
df = pd.DataFrame(
|
160
|
+
ohlcv,
|
161
|
+
columns=['timestamp', 'open', 'high', 'low', 'close', 'volume']
|
162
|
+
)
|
163
|
+
df['timestamp'] = pd.to_datetime(df['timestamp'], unit='ms')
|
164
|
+
df.set_index('timestamp', inplace=True)
|
165
|
+
|
166
|
+
return MarketData(
|
167
|
+
symbol=symbol,
|
168
|
+
exchange=exchange_id,
|
169
|
+
timeframe=timeframe,
|
170
|
+
data=df
|
171
|
+
)
|
172
|
+
|
173
|
+
except Exception as e:
|
174
|
+
logger.error(f"Error fetching OHLCV data for {symbol} on {exchange_id}: {e}")
|
175
|
+
raise
|
176
|
+
|
177
|
+
async def _fetch_order_book(self, exchange_id: str, symbol: str, limit: int = 100) -> Dict:
|
178
|
+
"""Fetch order book data."""
|
179
|
+
try:
|
180
|
+
exchange = self.exchanges[exchange_id]
|
181
|
+
order_book = await exchange.fetch_order_book(symbol, limit)
|
182
|
+
|
183
|
+
return {
|
184
|
+
'bids': order_book['bids'],
|
185
|
+
'asks': order_book['asks'],
|
186
|
+
'timestamp': order_book['timestamp'],
|
187
|
+
'datetime': order_book['datetime'],
|
188
|
+
'nonce': order_book.get('nonce')
|
189
|
+
}
|
190
|
+
except Exception as e:
|
191
|
+
logger.error(f"Error fetching order book for {symbol} on {exchange_id}: {e}")
|
192
|
+
raise
|
193
|
+
|
194
|
+
def _calculate_technical_indicators(self, market_data: MarketData) -> None:
|
195
|
+
"""Calculate comprehensive technical indicators."""
|
196
|
+
df = market_data.data
|
197
|
+
indicators = {}
|
198
|
+
|
199
|
+
try:
|
200
|
+
# Trend Indicators
|
201
|
+
indicators['sma_20'] = ta.trend.sma_indicator(df['close'], 20)
|
202
|
+
indicators['sma_50'] = ta.trend.sma_indicator(df['close'], 50)
|
203
|
+
indicators['sma_200'] = ta.trend.sma_indicator(df['close'], 200)
|
204
|
+
indicators['ema_12'] = ta.trend.ema_indicator(df['close'], 12)
|
205
|
+
indicators['ema_26'] = ta.trend.ema_indicator(df['close'], 26)
|
206
|
+
indicators['macd'] = ta.trend.macd(df['close'])
|
207
|
+
indicators['macd_signal'] = ta.trend.macd_signal(df['close'])
|
208
|
+
indicators['macd_diff'] = ta.trend.macd_diff(df['close'])
|
209
|
+
indicators['adx'] = ta.trend.adx(df['high'], df['low'], df['close'])
|
210
|
+
|
211
|
+
# Momentum Indicators
|
212
|
+
indicators['rsi'] = ta.momentum.rsi(df['close'])
|
213
|
+
indicators['stoch'] = ta.momentum.stoch(df['high'], df['low'], df['close'])
|
214
|
+
indicators['stoch_signal'] = ta.momentum.stoch_signal(df['high'], df['low'], df['close'])
|
215
|
+
indicators['williams_r'] = ta.momentum.williams_r(df['high'], df['low'], df['close'])
|
216
|
+
|
217
|
+
# Volatility Indicators
|
218
|
+
indicators['bbands_upper'] = ta.volatility.bollinger_hband(df['close'])
|
219
|
+
indicators['bbands_lower'] = ta.volatility.bollinger_lband(df['close'])
|
220
|
+
indicators['bbands_middle'] = ta.volatility.bollinger_mavg(df['close'])
|
221
|
+
indicators['atr'] = ta.volatility.average_true_range(df['high'], df['low'], df['close'])
|
222
|
+
|
223
|
+
# Volume Indicators
|
224
|
+
indicators['obv'] = ta.volume.on_balance_volume(df['close'], df['volume'])
|
225
|
+
indicators['mfi'] = ta.volume.money_flow_index(df['high'], df['low'], df['close'], df['volume'])
|
226
|
+
indicators['vwap'] = self._calculate_vwap(df)
|
227
|
+
|
228
|
+
market_data.indicators = indicators
|
229
|
+
|
230
|
+
except Exception as e:
|
231
|
+
logger.error(f"Error calculating technical indicators: {e}")
|
232
|
+
raise
|
233
|
+
|
234
|
+
def _calculate_vwap(self, df: pd.DataFrame) -> pd.Series:
|
235
|
+
"""Calculate Volume Weighted Average Price."""
|
236
|
+
v = df['volume']
|
237
|
+
tp = (df['high'] + df['low'] + df['close']) / 3
|
238
|
+
return (tp * v).cumsum() / v.cumsum()
|
239
|
+
|
240
|
+
def _identify_patterns(self, market_data: MarketData) -> None:
|
241
|
+
"""Identify chart patterns and candlestick patterns."""
|
242
|
+
df = market_data.data
|
243
|
+
patterns = {}
|
244
|
+
|
245
|
+
try:
|
246
|
+
# Candlestick Patterns
|
247
|
+
patterns['doji'] = ta.candlestick.doji(df['open'], df['high'], df['low'], df['close'])
|
248
|
+
patterns['hammer'] = ta.candlestick.hammer(df['open'], df['high'], df['low'], df['close'])
|
249
|
+
patterns['shooting_star'] = ta.candlestick.shooting_star(df['open'], df['high'], df['low'], df['close'])
|
250
|
+
patterns['morning_star'] = ta.candlestick.morning_star(df['open'], df['high'], df['low'], df['close'])
|
251
|
+
patterns['evening_star'] = ta.candlestick.evening_star(df['open'], df['high'], df['low'], df['close'])
|
252
|
+
|
253
|
+
# Custom Pattern Detection
|
254
|
+
patterns['double_top'] = self._detect_double_top(df)
|
255
|
+
patterns['double_bottom'] = self._detect_double_bottom(df)
|
256
|
+
patterns['head_shoulders'] = self._detect_head_shoulders(df)
|
257
|
+
|
258
|
+
market_data.patterns = patterns
|
259
|
+
|
260
|
+
except Exception as e:
|
261
|
+
logger.error(f"Error identifying patterns: {e}")
|
262
|
+
raise
|
263
|
+
|
264
|
+
def _detect_double_top(self, df: pd.DataFrame) -> pd.Series:
|
265
|
+
"""Detect double top pattern."""
|
266
|
+
window = 20
|
267
|
+
peaks = df['high'].rolling(window, center=True).apply(
|
268
|
+
lambda x: 1 if x.iloc[window//2] == max(x) else 0
|
269
|
+
)
|
270
|
+
return peaks
|
271
|
+
|
272
|
+
def _detect_double_bottom(self, df: pd.DataFrame) -> pd.Series:
|
273
|
+
"""Detect double bottom pattern."""
|
274
|
+
window = 20
|
275
|
+
troughs = df['low'].rolling(window, center=True).apply(
|
276
|
+
lambda x: 1 if x.iloc[window//2] == min(x) else 0
|
277
|
+
)
|
278
|
+
return troughs
|
279
|
+
|
280
|
+
def _detect_head_shoulders(self, df: pd.DataFrame) -> pd.Series:
|
281
|
+
"""Detect head and shoulders pattern."""
|
282
|
+
window = 30
|
283
|
+
result = pd.Series(0, index=df.index)
|
284
|
+
|
285
|
+
for i in range(window, len(df)-window):
|
286
|
+
left = df['high'].iloc[i-window:i].max()
|
287
|
+
head = df['high'].iloc[i]
|
288
|
+
right = df['high'].iloc[i:i+window].max()
|
289
|
+
|
290
|
+
if head > left and head > right and abs(left - right) < 0.1 * head:
|
291
|
+
result.iloc[i] = 1
|
292
|
+
|
293
|
+
return result
|
294
|
+
|
295
|
+
def _analyze_volume_profile(self, market_data: MarketData) -> None:
|
296
|
+
"""Analyze volume profile and identify key price levels."""
|
297
|
+
df = market_data.data
|
298
|
+
|
299
|
+
try:
|
300
|
+
# Calculate volume profile
|
301
|
+
price_bins = pd.qcut(df['close'], q=50, duplicates='drop')
|
302
|
+
volume_profile = df.groupby(price_bins)['volume'].sum()
|
303
|
+
|
304
|
+
# Identify high volume nodes
|
305
|
+
high_volume_levels = volume_profile.nlargest(5)
|
306
|
+
|
307
|
+
market_data.volume_profile = {
|
308
|
+
'volume_by_price': volume_profile.to_dict(),
|
309
|
+
'high_volume_levels': high_volume_levels.to_dict()
|
310
|
+
}
|
311
|
+
|
312
|
+
except Exception as e:
|
313
|
+
logger.error(f"Error analyzing volume profile: {e}")
|
314
|
+
raise
|
315
|
+
|
316
|
+
async def execute(self, **kwargs) -> Dict[str, Any]:
|
317
|
+
"""Execute the CCXT tool with comprehensive analysis."""
|
318
|
+
try:
|
319
|
+
self.validate_arguments(**kwargs)
|
320
|
+
|
321
|
+
exchange_ids = kwargs['exchange_ids'].split(',') # Split comma-separated string
|
322
|
+
symbols = kwargs['symbols'].split(',') # Split comma-separated string
|
323
|
+
timeframe = kwargs.get('timeframe', '1h')
|
324
|
+
lookback_periods = int(kwargs.get('lookback_periods', '500')) # Convert to int
|
325
|
+
analysis_types = kwargs.get('analysis_types', 'all').split(',') # Split comma-separated string
|
326
|
+
credentials_path = kwargs.get('credentials_path', 'config/exchange_credentials.json')
|
327
|
+
|
328
|
+
# Initialize exchanges
|
329
|
+
await self._initialize_exchanges(exchange_ids, credentials_path)
|
330
|
+
|
331
|
+
# Fetch data and analyze for each symbol on each exchange
|
332
|
+
results = {}
|
333
|
+
for exchange_id in exchange_ids:
|
334
|
+
for symbol in symbols:
|
335
|
+
# Fetch OHLCV data
|
336
|
+
market_data = await self._fetch_ohlcv(
|
337
|
+
exchange_id, symbol, timeframe, lookback_periods
|
338
|
+
)
|
339
|
+
|
340
|
+
# Perform requested analyses
|
341
|
+
if 'all' in analysis_types or 'technical' in analysis_types:
|
342
|
+
self._calculate_technical_indicators(market_data)
|
343
|
+
|
344
|
+
if 'all' in analysis_types or 'patterns' in analysis_types:
|
345
|
+
self._identify_patterns(market_data)
|
346
|
+
|
347
|
+
if 'all' in analysis_types or 'volume' in analysis_types:
|
348
|
+
self._analyze_volume_profile(market_data)
|
349
|
+
|
350
|
+
if 'all' in analysis_types or 'orderbook' in analysis_types:
|
351
|
+
order_book = await self._fetch_order_book(exchange_id, symbol)
|
352
|
+
else:
|
353
|
+
order_book = None
|
354
|
+
|
355
|
+
# Format results
|
356
|
+
key = f"{exchange_id}:{symbol}"
|
357
|
+
results[key] = {
|
358
|
+
'market_data': market_data.data.to_dict(orient='records'),
|
359
|
+
'indicators': {k: v.to_dict() for k, v in market_data.indicators.items()} if market_data.indicators else None,
|
360
|
+
'patterns': {k: v.to_dict() for k, v in market_data.patterns.items()} if market_data.patterns else None,
|
361
|
+
'volume_profile': market_data.volume_profile,
|
362
|
+
'order_book': order_book
|
363
|
+
}
|
364
|
+
|
365
|
+
# Close exchange connections
|
366
|
+
for exchange in self.exchanges.values():
|
367
|
+
await exchange.close()
|
368
|
+
|
369
|
+
return results
|
370
|
+
|
371
|
+
except Exception as e:
|
372
|
+
logger.error(f"Error executing CCXT tool: {e}")
|
373
|
+
raise
|