ambivo-agents 1.3.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ambivo_agents/__init__.py +89 -0
- ambivo_agents/agents/__init__.py +19 -0
- ambivo_agents/agents/assistant.py +79 -0
- ambivo_agents/agents/code_executor.py +133 -0
- ambivo_agents/agents/knowledge_base.py +595 -0
- ambivo_agents/agents/media_editor.py +777 -0
- ambivo_agents/agents/simple_web_search.py +404 -0
- ambivo_agents/agents/web_scraper.py +682 -0
- ambivo_agents/agents/web_search.py +660 -0
- ambivo_agents/agents/youtube_download.py +553 -0
- ambivo_agents/cli.py +1871 -0
- ambivo_agents/config/__init__.py +4 -0
- ambivo_agents/config/loader.py +301 -0
- ambivo_agents/core/__init__.py +33 -0
- ambivo_agents/core/base.py +880 -0
- ambivo_agents/core/llm.py +333 -0
- ambivo_agents/core/memory.py +640 -0
- ambivo_agents/executors/__init__.py +8 -0
- ambivo_agents/executors/docker_executor.py +108 -0
- ambivo_agents/executors/media_executor.py +237 -0
- ambivo_agents/executors/youtube_executor.py +404 -0
- ambivo_agents/services/__init__.py +6 -0
- ambivo_agents/services/agent_service.py +590 -0
- ambivo_agents/services/factory.py +366 -0
- ambivo_agents-1.3.3.dist-info/METADATA +773 -0
- ambivo_agents-1.3.3.dist-info/RECORD +30 -0
- ambivo_agents-1.3.3.dist-info/WHEEL +5 -0
- ambivo_agents-1.3.3.dist-info/entry_points.txt +3 -0
- ambivo_agents-1.3.3.dist-info/licenses/LICENSE +21 -0
- ambivo_agents-1.3.3.dist-info/top_level.txt +1 -0
@@ -0,0 +1,660 @@
|
|
1
|
+
# ambivo_agents/agents/web_search.py
|
2
|
+
"""
|
3
|
+
Web Search Agent with Multiple Search Provider Support
|
4
|
+
"""
|
5
|
+
|
6
|
+
import asyncio
|
7
|
+
import json
|
8
|
+
import uuid
|
9
|
+
import time
|
10
|
+
import requests
|
11
|
+
from typing import Dict, List, Any, Optional
|
12
|
+
from datetime import datetime
|
13
|
+
from dataclasses import dataclass
|
14
|
+
|
15
|
+
from ..core.base import BaseAgent, AgentRole, AgentMessage, MessageType, ExecutionContext, AgentTool
|
16
|
+
from ..config.loader import load_config, get_config_section
|
17
|
+
|
18
|
+
|
19
|
+
@dataclass
|
20
|
+
class SearchResult:
|
21
|
+
"""Single search result data structure"""
|
22
|
+
title: str
|
23
|
+
url: str
|
24
|
+
snippet: str
|
25
|
+
source: str = ""
|
26
|
+
rank: int = 0
|
27
|
+
score: float = 0.0
|
28
|
+
timestamp: Optional[datetime] = None
|
29
|
+
|
30
|
+
|
31
|
+
@dataclass
|
32
|
+
class SearchResponse:
|
33
|
+
"""Search response containing multiple results"""
|
34
|
+
query: str
|
35
|
+
results: List[SearchResult]
|
36
|
+
total_results: int
|
37
|
+
search_time: float
|
38
|
+
provider: str
|
39
|
+
status: str = "success"
|
40
|
+
error: Optional[str] = None
|
41
|
+
|
42
|
+
|
43
|
+
class WebSearchServiceAdapter:
|
44
|
+
"""Web Search Service Adapter supporting multiple search providers"""
|
45
|
+
|
46
|
+
def __init__(self):
|
47
|
+
# Load configuration from YAML
|
48
|
+
config = load_config()
|
49
|
+
self.search_config = get_config_section('web_search', config)
|
50
|
+
|
51
|
+
self.providers = {}
|
52
|
+
self.current_provider = None
|
53
|
+
|
54
|
+
# Initialize available providers
|
55
|
+
self._initialize_providers()
|
56
|
+
|
57
|
+
# Set default provider
|
58
|
+
self.current_provider = self._get_best_provider()
|
59
|
+
|
60
|
+
def _initialize_providers(self):
|
61
|
+
"""Initialize available search providers"""
|
62
|
+
|
63
|
+
# Brave Search API
|
64
|
+
if self.search_config.get('brave_api_key'):
|
65
|
+
self.providers['brave'] = {
|
66
|
+
'name': 'brave',
|
67
|
+
'api_key': self.search_config['brave_api_key'],
|
68
|
+
'base_url': 'https://api.search.brave.com/res/v1/web/search',
|
69
|
+
'priority': 2,
|
70
|
+
'available': True,
|
71
|
+
'rate_limit_delay': 2.0
|
72
|
+
}
|
73
|
+
|
74
|
+
# AVES API
|
75
|
+
if self.search_config.get('avesapi_api_key'):
|
76
|
+
self.providers['aves'] = {
|
77
|
+
'name': 'aves',
|
78
|
+
'api_key': self.search_config['avesapi_api_key'],
|
79
|
+
'base_url': 'https://api.avesapi.com/search',
|
80
|
+
'priority': 1,
|
81
|
+
'available': True,
|
82
|
+
'rate_limit_delay': 1.5
|
83
|
+
}
|
84
|
+
|
85
|
+
if not self.providers:
|
86
|
+
raise ValueError("No search providers configured in web_search section")
|
87
|
+
|
88
|
+
def _get_best_provider(self) -> Optional[str]:
|
89
|
+
"""Get the best available provider"""
|
90
|
+
available_providers = [
|
91
|
+
(name, config) for name, config in self.providers.items()
|
92
|
+
if config.get('available', False)
|
93
|
+
]
|
94
|
+
|
95
|
+
if not available_providers:
|
96
|
+
return None
|
97
|
+
|
98
|
+
available_providers.sort(key=lambda x: x[1]['priority'])
|
99
|
+
return available_providers[0][0]
|
100
|
+
|
101
|
+
async def search_web(self,
|
102
|
+
query: str,
|
103
|
+
max_results: int = 10,
|
104
|
+
country: str = "US",
|
105
|
+
language: str = "en") -> SearchResponse:
|
106
|
+
"""Perform web search using the current provider with rate limiting"""
|
107
|
+
start_time = time.time()
|
108
|
+
|
109
|
+
if not self.current_provider:
|
110
|
+
return SearchResponse(
|
111
|
+
query=query,
|
112
|
+
results=[],
|
113
|
+
total_results=0,
|
114
|
+
search_time=0.0,
|
115
|
+
provider="none",
|
116
|
+
status="error",
|
117
|
+
error="No search provider available"
|
118
|
+
)
|
119
|
+
|
120
|
+
# Rate limiting
|
121
|
+
provider_config = self.providers[self.current_provider]
|
122
|
+
if 'last_request_time' in provider_config:
|
123
|
+
elapsed = time.time() - provider_config['last_request_time']
|
124
|
+
delay = provider_config.get('rate_limit_delay', 1.0)
|
125
|
+
if elapsed < delay:
|
126
|
+
await asyncio.sleep(delay - elapsed)
|
127
|
+
|
128
|
+
provider_config['last_request_time'] = time.time()
|
129
|
+
|
130
|
+
try:
|
131
|
+
if self.current_provider == 'brave':
|
132
|
+
return await self._search_brave(query, max_results, country)
|
133
|
+
elif self.current_provider == 'aves':
|
134
|
+
return await self._search_aves(query, max_results)
|
135
|
+
else:
|
136
|
+
raise ValueError(f"Unknown provider: {self.current_provider}")
|
137
|
+
|
138
|
+
except Exception as e:
|
139
|
+
search_time = time.time() - start_time
|
140
|
+
|
141
|
+
# Mark provider as temporarily unavailable on certain errors
|
142
|
+
error_str = str(e).lower()
|
143
|
+
if any(keyword in error_str for keyword in ['429', 'rate limit', 'quota exceeded']):
|
144
|
+
self.providers[self.current_provider]['available'] = False
|
145
|
+
self.providers[self.current_provider]['cooldown_until'] = time.time() + 300
|
146
|
+
|
147
|
+
# Try fallback provider
|
148
|
+
fallback = self._try_fallback_provider()
|
149
|
+
if fallback:
|
150
|
+
return await self.search_web(query, max_results, country, language)
|
151
|
+
|
152
|
+
return SearchResponse(
|
153
|
+
query=query,
|
154
|
+
results=[],
|
155
|
+
total_results=0,
|
156
|
+
search_time=search_time,
|
157
|
+
provider=self.current_provider,
|
158
|
+
status="error",
|
159
|
+
error=str(e)
|
160
|
+
)
|
161
|
+
|
162
|
+
async def _search_brave(self, query: str, max_results: int, country: str) -> SearchResponse:
|
163
|
+
"""Search using Brave Search API"""
|
164
|
+
start_time = time.time()
|
165
|
+
|
166
|
+
provider_config = self.providers['brave']
|
167
|
+
|
168
|
+
headers = {
|
169
|
+
'Accept': 'application/json',
|
170
|
+
'Accept-Encoding': 'gzip',
|
171
|
+
'X-Subscription-Token': provider_config['api_key']
|
172
|
+
}
|
173
|
+
|
174
|
+
params = {
|
175
|
+
'q': query,
|
176
|
+
'count': min(max_results, 20),
|
177
|
+
'country': country,
|
178
|
+
'search_lang': 'en',
|
179
|
+
'ui_lang': 'en-US',
|
180
|
+
'freshness': 'pd'
|
181
|
+
}
|
182
|
+
|
183
|
+
try:
|
184
|
+
response = requests.get(
|
185
|
+
provider_config['base_url'],
|
186
|
+
headers=headers,
|
187
|
+
params=params,
|
188
|
+
timeout=15
|
189
|
+
)
|
190
|
+
|
191
|
+
if response.status_code == 429:
|
192
|
+
retry_after = response.headers.get('Retry-After', '300')
|
193
|
+
raise Exception(f"Rate limit exceeded. Retry after {retry_after} seconds")
|
194
|
+
elif response.status_code == 401:
|
195
|
+
raise Exception(f"Authentication failed - check Brave API key")
|
196
|
+
elif response.status_code == 403:
|
197
|
+
raise Exception(f"Brave API access forbidden - check subscription")
|
198
|
+
|
199
|
+
response.raise_for_status()
|
200
|
+
|
201
|
+
data = response.json()
|
202
|
+
search_time = time.time() - start_time
|
203
|
+
|
204
|
+
results = []
|
205
|
+
web_results = data.get('web', {}).get('results', [])
|
206
|
+
|
207
|
+
for i, result in enumerate(web_results[:max_results]):
|
208
|
+
results.append(SearchResult(
|
209
|
+
title=result.get('title', ''),
|
210
|
+
url=result.get('url', ''),
|
211
|
+
snippet=result.get('description', ''),
|
212
|
+
source='brave',
|
213
|
+
rank=i + 1,
|
214
|
+
score=1.0 - (i * 0.1),
|
215
|
+
timestamp=datetime.now()
|
216
|
+
))
|
217
|
+
|
218
|
+
return SearchResponse(
|
219
|
+
query=query,
|
220
|
+
results=results,
|
221
|
+
total_results=len(results),
|
222
|
+
search_time=search_time,
|
223
|
+
provider='brave',
|
224
|
+
status='success'
|
225
|
+
)
|
226
|
+
|
227
|
+
except Exception as e:
|
228
|
+
search_time = time.time() - start_time
|
229
|
+
raise Exception(f"Brave Search API error: {e}")
|
230
|
+
|
231
|
+
async def _search_aves(self, query: str, max_results: int) -> SearchResponse:
|
232
|
+
"""Search using AVES API"""
|
233
|
+
start_time = time.time()
|
234
|
+
|
235
|
+
provider_config = self.providers['aves']
|
236
|
+
|
237
|
+
headers = {
|
238
|
+
'User-Agent': 'AmbivoAgentSystem/1.0'
|
239
|
+
}
|
240
|
+
|
241
|
+
params = {
|
242
|
+
'apikey': provider_config['api_key'],
|
243
|
+
'type': 'web',
|
244
|
+
'query': query,
|
245
|
+
'device': 'desktop',
|
246
|
+
'output': 'json',
|
247
|
+
'num': min(max_results, 10)
|
248
|
+
}
|
249
|
+
|
250
|
+
try:
|
251
|
+
response = requests.get(
|
252
|
+
provider_config['base_url'],
|
253
|
+
headers=headers,
|
254
|
+
params=params,
|
255
|
+
timeout=15
|
256
|
+
)
|
257
|
+
|
258
|
+
if response.status_code == 403:
|
259
|
+
raise Exception(f"AVES API access forbidden - check API key or quota")
|
260
|
+
elif response.status_code == 401:
|
261
|
+
raise Exception(f"AVES API authentication failed - invalid API key")
|
262
|
+
elif response.status_code == 429:
|
263
|
+
raise Exception(f"AVES API rate limit exceeded")
|
264
|
+
|
265
|
+
response.raise_for_status()
|
266
|
+
|
267
|
+
data = response.json()
|
268
|
+
search_time = time.time() - start_time
|
269
|
+
|
270
|
+
results = []
|
271
|
+
|
272
|
+
result_section = data.get('result', {})
|
273
|
+
search_results = result_section.get('organic_results', [])
|
274
|
+
|
275
|
+
if not search_results:
|
276
|
+
search_results = data.get('organic_results',
|
277
|
+
data.get('results', data.get('items', data.get('data', []))))
|
278
|
+
|
279
|
+
for i, result in enumerate(search_results[:max_results]):
|
280
|
+
title = result.get('title', 'No Title')
|
281
|
+
url = result.get('url', result.get('link', result.get('href', '')))
|
282
|
+
snippet = result.get('description', result.get('snippet', result.get('summary', '')))
|
283
|
+
position = result.get('position', i + 1)
|
284
|
+
|
285
|
+
results.append(SearchResult(
|
286
|
+
title=title,
|
287
|
+
url=url,
|
288
|
+
snippet=snippet,
|
289
|
+
source='aves',
|
290
|
+
rank=position,
|
291
|
+
score=result.get('score', 1.0 - (i * 0.1)),
|
292
|
+
timestamp=datetime.now()
|
293
|
+
))
|
294
|
+
|
295
|
+
total_results_count = result_section.get('total_results', len(results))
|
296
|
+
|
297
|
+
return SearchResponse(
|
298
|
+
query=query,
|
299
|
+
results=results,
|
300
|
+
total_results=total_results_count,
|
301
|
+
search_time=search_time,
|
302
|
+
provider='aves',
|
303
|
+
status='success'
|
304
|
+
)
|
305
|
+
|
306
|
+
except Exception as e:
|
307
|
+
search_time = time.time() - start_time
|
308
|
+
raise Exception(f"AVES Search API error: {e}")
|
309
|
+
|
310
|
+
def _try_fallback_provider(self) -> bool:
|
311
|
+
"""Try to switch to a fallback provider"""
|
312
|
+
current_priority = self.providers[self.current_provider]['priority']
|
313
|
+
|
314
|
+
fallback_providers = [
|
315
|
+
(name, config) for name, config in self.providers.items()
|
316
|
+
if config['priority'] > current_priority and config.get('available', False)
|
317
|
+
]
|
318
|
+
|
319
|
+
if fallback_providers:
|
320
|
+
fallback_providers.sort(key=lambda x: x[1]['priority'])
|
321
|
+
self.current_provider = fallback_providers[0][0]
|
322
|
+
return True
|
323
|
+
|
324
|
+
return False
|
325
|
+
|
326
|
+
async def search_news(self, query: str, max_results: int = 10, days_back: int = 7) -> SearchResponse:
|
327
|
+
"""Search for news articles"""
|
328
|
+
news_query = f"{query} news latest recent"
|
329
|
+
return await self.search_web(news_query, max_results)
|
330
|
+
|
331
|
+
async def search_academic(self, query: str, max_results: int = 10) -> SearchResponse:
|
332
|
+
"""Search for academic content"""
|
333
|
+
academic_query = f"{query} research paper study academic"
|
334
|
+
return await self.search_web(academic_query, max_results)
|
335
|
+
|
336
|
+
|
337
|
+
class WebSearchAgent(BaseAgent):
|
338
|
+
"""Web Search Agent that provides web search capabilities"""
|
339
|
+
|
340
|
+
def __init__(self, agent_id: str=None, memory_manager=None, llm_service=None, **kwargs):
|
341
|
+
if agent_id is None:
|
342
|
+
agent_id = f"search_{str(uuid.uuid4())[:8]}"
|
343
|
+
super().__init__(
|
344
|
+
agent_id=agent_id,
|
345
|
+
role=AgentRole.RESEARCHER,
|
346
|
+
memory_manager=memory_manager,
|
347
|
+
llm_service=llm_service,
|
348
|
+
name="Web Search Agent",
|
349
|
+
description="Agent for web search operations and information retrieval",
|
350
|
+
**kwargs
|
351
|
+
)
|
352
|
+
|
353
|
+
# Initialize search service
|
354
|
+
try:
|
355
|
+
self.search_service = WebSearchServiceAdapter()
|
356
|
+
except Exception as e:
|
357
|
+
raise RuntimeError(f"Failed to initialize Web Search Service: {e}")
|
358
|
+
|
359
|
+
# Add web search tools
|
360
|
+
self._add_search_tools()
|
361
|
+
|
362
|
+
def _add_search_tools(self):
|
363
|
+
"""Add web search related tools"""
|
364
|
+
|
365
|
+
# General web search tool
|
366
|
+
self.add_tool(AgentTool(
|
367
|
+
name="search_web",
|
368
|
+
description="Search the web for information",
|
369
|
+
function=self._search_web,
|
370
|
+
parameters_schema={
|
371
|
+
"type": "object",
|
372
|
+
"properties": {
|
373
|
+
"query": {"type": "string", "description": "Search query"},
|
374
|
+
"max_results": {"type": "integer", "default": 10, "description": "Maximum number of results"},
|
375
|
+
"country": {"type": "string", "default": "US", "description": "Country for search results"},
|
376
|
+
"language": {"type": "string", "default": "en", "description": "Language for search results"}
|
377
|
+
},
|
378
|
+
"required": ["query"]
|
379
|
+
}
|
380
|
+
))
|
381
|
+
|
382
|
+
# News search tool
|
383
|
+
self.add_tool(AgentTool(
|
384
|
+
name="search_news",
|
385
|
+
description="Search for recent news articles",
|
386
|
+
function=self._search_news,
|
387
|
+
parameters_schema={
|
388
|
+
"type": "object",
|
389
|
+
"properties": {
|
390
|
+
"query": {"type": "string", "description": "News search query"},
|
391
|
+
"max_results": {"type": "integer", "default": 10, "description": "Maximum number of results"},
|
392
|
+
"days_back": {"type": "integer", "default": 7, "description": "How many days back to search"}
|
393
|
+
},
|
394
|
+
"required": ["query"]
|
395
|
+
}
|
396
|
+
))
|
397
|
+
|
398
|
+
# Academic search tool
|
399
|
+
self.add_tool(AgentTool(
|
400
|
+
name="search_academic",
|
401
|
+
description="Search for academic papers and research",
|
402
|
+
function=self._search_academic,
|
403
|
+
parameters_schema={
|
404
|
+
"type": "object",
|
405
|
+
"properties": {
|
406
|
+
"query": {"type": "string", "description": "Academic search query"},
|
407
|
+
"max_results": {"type": "integer", "default": 10, "description": "Maximum number of results"}
|
408
|
+
},
|
409
|
+
"required": ["query"]
|
410
|
+
}
|
411
|
+
))
|
412
|
+
|
413
|
+
async def _search_web(self, query: str, max_results: int = 10, country: str = "US", language: str = "en") -> Dict[
|
414
|
+
str, Any]:
|
415
|
+
"""Perform web search"""
|
416
|
+
try:
|
417
|
+
search_response = await self.search_service.search_web(
|
418
|
+
query=query,
|
419
|
+
max_results=max_results,
|
420
|
+
country=country,
|
421
|
+
language=language
|
422
|
+
)
|
423
|
+
|
424
|
+
if search_response.status == "success":
|
425
|
+
results_data = []
|
426
|
+
for result in search_response.results:
|
427
|
+
results_data.append({
|
428
|
+
"title": result.title,
|
429
|
+
"url": result.url,
|
430
|
+
"snippet": result.snippet,
|
431
|
+
"rank": result.rank,
|
432
|
+
"score": result.score
|
433
|
+
})
|
434
|
+
|
435
|
+
return {
|
436
|
+
"success": True,
|
437
|
+
"query": query,
|
438
|
+
"results": results_data,
|
439
|
+
"total_results": search_response.total_results,
|
440
|
+
"search_time": search_response.search_time,
|
441
|
+
"provider": search_response.provider
|
442
|
+
}
|
443
|
+
else:
|
444
|
+
return {
|
445
|
+
"success": False,
|
446
|
+
"error": search_response.error,
|
447
|
+
"provider": search_response.provider
|
448
|
+
}
|
449
|
+
|
450
|
+
except Exception as e:
|
451
|
+
return {"success": False, "error": str(e)}
|
452
|
+
|
453
|
+
async def _search_news(self, query: str, max_results: int = 10, days_back: int = 7) -> Dict[str, Any]:
|
454
|
+
"""Search for news articles"""
|
455
|
+
try:
|
456
|
+
search_response = await self.search_service.search_news(
|
457
|
+
query=query,
|
458
|
+
max_results=max_results,
|
459
|
+
days_back=days_back
|
460
|
+
)
|
461
|
+
|
462
|
+
return await self._format_search_response(search_response, "news")
|
463
|
+
|
464
|
+
except Exception as e:
|
465
|
+
return {"success": False, "error": str(e)}
|
466
|
+
|
467
|
+
async def _search_academic(self, query: str, max_results: int = 10) -> Dict[str, Any]:
|
468
|
+
"""Search for academic content"""
|
469
|
+
try:
|
470
|
+
search_response = await self.search_service.search_academic(
|
471
|
+
query=query,
|
472
|
+
max_results=max_results
|
473
|
+
)
|
474
|
+
|
475
|
+
return await self._format_search_response(search_response, "academic")
|
476
|
+
|
477
|
+
except Exception as e:
|
478
|
+
return {"success": False, "error": str(e)}
|
479
|
+
|
480
|
+
async def _format_search_response(self, search_response: SearchResponse, search_type: str) -> Dict[str, Any]:
|
481
|
+
"""Format search response for consistent output"""
|
482
|
+
if search_response.status == "success":
|
483
|
+
results_data = []
|
484
|
+
for result in search_response.results:
|
485
|
+
results_data.append({
|
486
|
+
"title": result.title,
|
487
|
+
"url": result.url,
|
488
|
+
"snippet": result.snippet,
|
489
|
+
"rank": result.rank,
|
490
|
+
"score": result.score,
|
491
|
+
"source": result.source
|
492
|
+
})
|
493
|
+
|
494
|
+
return {
|
495
|
+
"success": True,
|
496
|
+
"search_type": search_type,
|
497
|
+
"query": search_response.query,
|
498
|
+
"results": results_data,
|
499
|
+
"total_results": search_response.total_results,
|
500
|
+
"search_time": search_response.search_time,
|
501
|
+
"provider": search_response.provider
|
502
|
+
}
|
503
|
+
else:
|
504
|
+
return {
|
505
|
+
"success": False,
|
506
|
+
"search_type": search_type,
|
507
|
+
"error": search_response.error,
|
508
|
+
"provider": search_response.provider
|
509
|
+
}
|
510
|
+
|
511
|
+
async def process_message(self, message: AgentMessage, context: ExecutionContext) -> AgentMessage:
|
512
|
+
"""Process incoming message and route to appropriate search operations"""
|
513
|
+
self.memory.store_message(message)
|
514
|
+
|
515
|
+
try:
|
516
|
+
content = message.content.lower()
|
517
|
+
user_message = message.content
|
518
|
+
|
519
|
+
# Determine the appropriate action based on message content
|
520
|
+
if any(keyword in content for keyword in ['search', 'find', 'look up', 'what is', 'who is']):
|
521
|
+
response_content = await self._handle_search_request(user_message, context)
|
522
|
+
elif any(keyword in content for keyword in ['news', 'latest', 'recent', 'current']):
|
523
|
+
response_content = await self._handle_news_search_request(user_message, context)
|
524
|
+
elif any(keyword in content for keyword in ['research', 'academic', 'paper', 'study']):
|
525
|
+
response_content = await self._handle_academic_search_request(user_message, context)
|
526
|
+
else:
|
527
|
+
response_content = await self._handle_general_search_request(user_message, context)
|
528
|
+
|
529
|
+
response = self.create_response(
|
530
|
+
content=response_content,
|
531
|
+
recipient_id=message.sender_id,
|
532
|
+
session_id=message.session_id,
|
533
|
+
conversation_id=message.conversation_id
|
534
|
+
)
|
535
|
+
|
536
|
+
self.memory.store_message(response)
|
537
|
+
return response
|
538
|
+
|
539
|
+
except Exception as e:
|
540
|
+
error_response = self.create_response(
|
541
|
+
content=f"Web Search Agent error: {str(e)}",
|
542
|
+
recipient_id=message.sender_id,
|
543
|
+
message_type=MessageType.ERROR,
|
544
|
+
session_id=message.session_id,
|
545
|
+
conversation_id=message.conversation_id
|
546
|
+
)
|
547
|
+
return error_response
|
548
|
+
|
549
|
+
async def _handle_search_request(self, user_message: str, context: ExecutionContext) -> str:
|
550
|
+
"""Handle general search requests"""
|
551
|
+
# Extract search query from message
|
552
|
+
query = user_message.replace("search for", "").replace("find", "").replace("look up", "").strip()
|
553
|
+
|
554
|
+
if len(query) < 3:
|
555
|
+
return "Please provide a more specific search query. What would you like me to search for?"
|
556
|
+
|
557
|
+
# Perform search
|
558
|
+
search_result = await self._search_web(query, max_results=5)
|
559
|
+
|
560
|
+
if search_result['success']:
|
561
|
+
results = search_result['results']
|
562
|
+
if results:
|
563
|
+
response = f"🔍 **Search Results for: {query}**\n\n"
|
564
|
+
|
565
|
+
for i, result in enumerate(results[:3], 1):
|
566
|
+
response += f"**{i}. {result['title']}**\n"
|
567
|
+
response += f"🔗 {result['url']}\n"
|
568
|
+
response += f"📝 {result['snippet'][:200]}...\n\n"
|
569
|
+
|
570
|
+
response += f"Found {search_result['total_results']} results in {search_result['search_time']:.2f}s using {search_result['provider']}"
|
571
|
+
return response
|
572
|
+
else:
|
573
|
+
return f"No results found for '{query}'. Try rephrasing your search query."
|
574
|
+
else:
|
575
|
+
return f"Search failed: {search_result['error']}"
|
576
|
+
|
577
|
+
async def _handle_news_search_request(self, user_message: str, context: ExecutionContext) -> str:
|
578
|
+
"""Handle news search requests"""
|
579
|
+
query = user_message.replace("news", "").replace("latest", "").replace("recent", "").strip()
|
580
|
+
|
581
|
+
if len(query) < 3:
|
582
|
+
return "What news topic would you like me to search for?"
|
583
|
+
|
584
|
+
search_result = await self._search_news(query, max_results=5)
|
585
|
+
|
586
|
+
if search_result['success']:
|
587
|
+
results = search_result['results']
|
588
|
+
if results:
|
589
|
+
response = f"📰 **Latest News for: {query}**\n\n"
|
590
|
+
|
591
|
+
for i, result in enumerate(results[:3], 1):
|
592
|
+
response += f"**{i}. {result['title']}**\n"
|
593
|
+
response += f"🔗 {result['url']}\n"
|
594
|
+
response += f"📝 {result['snippet'][:200]}...\n\n"
|
595
|
+
|
596
|
+
return response
|
597
|
+
else:
|
598
|
+
return f"No recent news found for '{query}'."
|
599
|
+
else:
|
600
|
+
return f"News search failed: {search_result['error']}"
|
601
|
+
|
602
|
+
async def _handle_academic_search_request(self, user_message: str, context: ExecutionContext) -> str:
|
603
|
+
"""Handle academic search requests"""
|
604
|
+
query = user_message.replace("research", "").replace("academic", "").replace("paper", "").strip()
|
605
|
+
|
606
|
+
if len(query) < 3:
|
607
|
+
return "What academic topic would you like me to research?"
|
608
|
+
|
609
|
+
search_result = await self._search_academic(query, max_results=5)
|
610
|
+
|
611
|
+
if search_result['success']:
|
612
|
+
results = search_result['results']
|
613
|
+
if results:
|
614
|
+
response = f"🎓 **Academic Results for: {query}**\n\n"
|
615
|
+
|
616
|
+
for i, result in enumerate(results[:3], 1):
|
617
|
+
response += f"**{i}. {result['title']}**\n"
|
618
|
+
response += f"🔗 {result['url']}\n"
|
619
|
+
response += f"📝 {result['snippet'][:200]}...\n\n"
|
620
|
+
|
621
|
+
return response
|
622
|
+
else:
|
623
|
+
return f"No academic results found for '{query}'."
|
624
|
+
else:
|
625
|
+
return f"Academic search failed: {search_result['error']}"
|
626
|
+
|
627
|
+
async def _handle_general_search_request(self, user_message: str, context: ExecutionContext) -> str:
|
628
|
+
"""Handle general search assistance"""
|
629
|
+
if self.llm_service:
|
630
|
+
prompt = f"""
|
631
|
+
You are a Web Search Agent that helps users find information on the internet.
|
632
|
+
|
633
|
+
Your capabilities include:
|
634
|
+
- General web search across multiple search engines
|
635
|
+
- News search for current events and recent articles
|
636
|
+
- Academic search for research papers and scholarly content
|
637
|
+
- Search result summarization and analysis
|
638
|
+
|
639
|
+
User message: {user_message}
|
640
|
+
|
641
|
+
Provide a helpful response about how you can assist with their search needs.
|
642
|
+
"""
|
643
|
+
|
644
|
+
response = await self.llm_service.generate_response(prompt, context.metadata)
|
645
|
+
return response
|
646
|
+
else:
|
647
|
+
return ("I'm your Web Search Agent! I can help you with:\n\n"
|
648
|
+
"🔍 **Web Search**\n"
|
649
|
+
"- Search the internet for information\n"
|
650
|
+
"- Find websites, articles, and resources\n"
|
651
|
+
"- Multiple search engine support\n\n"
|
652
|
+
"📰 **News Search**\n"
|
653
|
+
"- Find latest news and current events\n"
|
654
|
+
"- Search across news sources\n"
|
655
|
+
"- Filter by recency\n\n"
|
656
|
+
"🎓 **Academic Search**\n"
|
657
|
+
"- Find research papers and studies\n"
|
658
|
+
"- Search academic databases\n"
|
659
|
+
"- Scholarly content retrieval\n\n"
|
660
|
+
"What would you like me to search for?")
|