agent0-sdk 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,1016 @@
1
+ """
2
+ Agent indexer for discovery and search functionality.
3
+
4
+ ARCHITECTURAL PURPOSE:
5
+ ======================
6
+
7
+ The indexer serves as the unified entry point for all discovery and search operations
8
+ (agents AND feedback), not merely a thin wrapper around SubgraphClient. While currently
9
+ it delegates most queries to the subgraph, it is designed to be the foundation for:
10
+
11
+ 1. SEMANTIC/VECTOR SEARCH: Future integration with embeddings and vector databases
12
+ for semantic search across agent descriptions, feedback text, and capabilities.
13
+
14
+ 2. HYBRID SEARCH: Combining subgraph queries (structured data) with vector similarity
15
+ (semantic understanding) for richer discovery experiences.
16
+
17
+ 3. LOCAL INDEXING: Optional local caching and indexing for offline-capable applications
18
+ or performance optimization.
19
+
20
+ 4. SEARCH OPTIMIZATION: Advanced filtering, ranking, and relevance scoring that goes
21
+ beyond simple subgraph queries.
22
+
23
+ 5. MULTI-SOURCE AGGREGATION: Combining data from subgraph, blockchain direct queries,
24
+ and IPFS to provide complete agent/feedback information.
25
+
26
+ """
27
+
28
+ from __future__ import annotations
29
+
30
+ import asyncio
31
+ import json
32
+ import logging
33
+ import time
34
+ import aiohttp
35
+ from typing import Any, Dict, List, Optional, Union
36
+ from datetime import datetime
37
+
38
+ from .models import (
39
+ AgentId, ChainId, Address, URI, Timestamp,
40
+ AgentSummary, Feedback, SearchParams, SearchFeedbackParams
41
+ )
42
+ from .web3_client import Web3Client
43
+
44
+ logger = logging.getLogger(__name__)
45
+
46
+
47
+ class AgentIndexer:
48
+ """Indexer for agent discovery and search."""
49
+
50
+ def __init__(
51
+ self,
52
+ web3_client: Web3Client,
53
+ store: Optional[Any] = None,
54
+ embeddings: Optional[Any] = None,
55
+ subgraph_client: Optional[Any] = None,
56
+ identity_registry: Optional[Any] = None,
57
+ ):
58
+ """Initialize indexer."""
59
+ self.web3_client = web3_client
60
+ self.store = store or self._create_default_store()
61
+ self.embeddings = embeddings or self._create_default_embeddings()
62
+ self.subgraph_client = subgraph_client
63
+ self.identity_registry = identity_registry
64
+ self._agent_cache = {} # Cache for agent data
65
+ self._cache_timestamp = 0
66
+ self._cache_ttl = 7 * 24 * 60 * 60 # 1 week cache TTL (604800 seconds)
67
+ self._http_cache = {} # Cache for HTTP content
68
+ self._http_cache_ttl = 60 * 60 # 1 hour cache TTL for HTTP content
69
+
70
+ def _create_default_store(self) -> Dict[str, Any]:
71
+ """Create default in-memory store."""
72
+ return {
73
+ "agents": {},
74
+ "feedback": {},
75
+ "embeddings": {},
76
+ }
77
+
78
+ def _create_default_embeddings(self):
79
+ """Create default embeddings model."""
80
+ try:
81
+ from sentence_transformers import SentenceTransformer
82
+ return SentenceTransformer('all-MiniLM-L6-v2')
83
+ except ImportError:
84
+ # Return None if sentence-transformers is not available
85
+ return None
86
+
87
+ async def _fetch_http_content(self, url: str) -> Optional[Dict[str, Any]]:
88
+ """Fetch content from HTTP/HTTPS URL with caching."""
89
+ # Check cache first
90
+ current_time = time.time()
91
+ if url in self._http_cache:
92
+ cached_data, timestamp = self._http_cache[url]
93
+ if current_time - timestamp < self._http_cache_ttl:
94
+ return cached_data
95
+
96
+ try:
97
+ async with aiohttp.ClientSession() as session:
98
+ async with session.get(url, timeout=aiohttp.ClientTimeout(total=10)) as response:
99
+ if response.status == 200:
100
+ content = await response.json()
101
+ # Cache the result
102
+ self._http_cache[url] = (content, current_time)
103
+ return content
104
+ else:
105
+ logger.warning(f"Failed to fetch {url}: HTTP {response.status}")
106
+ return None
107
+ except Exception as e:
108
+ logger.warning(f"Error fetching HTTPS content from {url}: {e}")
109
+ return None
110
+
111
+ def _detect_uri_type(self, uri: str) -> str:
112
+ """Detect URI type (ipfs, https, http, unknown)."""
113
+ if uri.startswith("ipfs://"):
114
+ return "ipfs"
115
+ elif uri.startswith("https://"):
116
+ return "https"
117
+ elif uri.startswith("http://"):
118
+ return "http"
119
+ elif self._is_ipfs_cid(uri):
120
+ return "ipfs"
121
+ else:
122
+ return "unknown"
123
+
124
+ def _is_ipfs_cid(self, uri: str) -> bool:
125
+ """Check if string is an IPFS CID (without ipfs:// prefix)."""
126
+ # Basic IPFS CID patterns
127
+ # Qm... (CIDv0, 46 characters)
128
+ # bafy... (CIDv1, starts with bafy)
129
+ # bafk... (CIDv1, starts with bafk)
130
+ # bafg... (CIDv1, starts with bafg)
131
+ # bafh... (CIDv1, starts with bafh)
132
+ # bafq... (CIDv1, starts with bafq)
133
+ # bafr... (CIDv1, starts with bafr)
134
+ # bafs... (CIDv1, starts with bafs)
135
+ # baft... (CIDv1, starts with baft)
136
+ # bafu... (CIDv1, starts with bafu)
137
+ # bafv... (CIDv1, starts with bafv)
138
+ # bafw... (CIDv1, starts with bafw)
139
+ # bafx... (CIDv1, starts with bafx)
140
+ # bafy... (CIDv1, starts with bafy)
141
+ # bafz... (CIDv1, starts with bafz)
142
+
143
+ if not uri:
144
+ return False
145
+
146
+ # Check for CIDv0 (Qm...)
147
+ if uri.startswith("Qm") and len(uri) == 46:
148
+ return True
149
+
150
+ # Check for CIDv1 (baf...)
151
+ # CIDv1 has variable length but typically 50+ characters
152
+ # We'll be more lenient for shorter CIDs that start with baf
153
+ if uri.startswith("baf") and len(uri) >= 8:
154
+ return True
155
+
156
+ return False
157
+
158
+ def _is_ipfs_gateway_url(self, url: str) -> bool:
159
+ """Check if URL is an IPFS gateway URL."""
160
+ ipfs_gateways = [
161
+ "ipfs.io",
162
+ "gateway.pinata.cloud",
163
+ "cloudflare-ipfs.com",
164
+ "dweb.link",
165
+ "ipfs.fleek.co"
166
+ ]
167
+ return any(gateway in url for gateway in ipfs_gateways)
168
+
169
+ def _convert_gateway_to_ipfs(self, url: str) -> Optional[str]:
170
+ """Convert IPFS gateway URL to ipfs:// format."""
171
+ if "/ipfs/" in url:
172
+ # Extract hash from gateway URL
173
+ parts = url.split("/ipfs/")
174
+ if len(parts) == 2:
175
+ hash_part = parts[1].split("/")[0] # Remove any path after hash
176
+ return f"ipfs://{hash_part}"
177
+ return None
178
+
179
+ async def _fetch_registration_file(self, uri: str) -> Optional[Dict[str, Any]]:
180
+ """Fetch registration file from IPFS or HTTPS."""
181
+ uri_type = self._detect_uri_type(uri)
182
+
183
+ if uri_type == "ipfs":
184
+ # Normalize bare CID to ipfs:// format
185
+ if not uri.startswith("ipfs://"):
186
+ uri = f"ipfs://{uri}"
187
+
188
+ # Use existing IPFS client (if available)
189
+ # For now, return None as IPFS fetching is handled by subgraph
190
+ return None
191
+ elif uri_type in ["https", "http"]:
192
+ # Check if it's an IPFS gateway URL
193
+ if self._is_ipfs_gateway_url(uri):
194
+ ipfs_uri = self._convert_gateway_to_ipfs(uri)
195
+ if ipfs_uri:
196
+ # Try to fetch as IPFS first
197
+ return await self._fetch_registration_file(ipfs_uri)
198
+
199
+ # Fetch directly from HTTPS
200
+ return await self._fetch_http_content(uri)
201
+ else:
202
+ logger.warning(f"Unsupported URI type: {uri}")
203
+ return None
204
+
205
+ async def _fetch_feedback_file(self, uri: str) -> Optional[Dict[str, Any]]:
206
+ """Fetch feedback file from IPFS or HTTPS."""
207
+ uri_type = self._detect_uri_type(uri)
208
+
209
+ if uri_type == "ipfs":
210
+ # Normalize bare CID to ipfs:// format
211
+ if not uri.startswith("ipfs://"):
212
+ uri = f"ipfs://{uri}"
213
+
214
+ # Use existing IPFS client (if available)
215
+ # For now, return None as IPFS fetching is handled by subgraph
216
+ return None
217
+ elif uri_type in ["https", "http"]:
218
+ # Check if it's an IPFS gateway URL
219
+ if self._is_ipfs_gateway_url(uri):
220
+ ipfs_uri = self._convert_gateway_to_ipfs(uri)
221
+ if ipfs_uri:
222
+ # Try to fetch as IPFS first
223
+ return await self._fetch_feedback_file(ipfs_uri)
224
+
225
+ # Fetch directly from HTTPS
226
+ return await self._fetch_http_content(uri)
227
+ else:
228
+ logger.warning(f"Unsupported URI type: {uri}")
229
+ return None
230
+
231
+ async def refresh_agent(self, agent_id: AgentId, deep: bool = False) -> AgentSummary:
232
+ """Refresh index for a single agent."""
233
+ # Parse agent ID
234
+ if ":" in agent_id:
235
+ chain_id, token_id = agent_id.split(":", 1)
236
+ else:
237
+ chain_id = self.web3_client.chain_id
238
+ token_id = agent_id
239
+
240
+ # Get basic agent data from contract
241
+ try:
242
+ if self.identity_registry:
243
+ token_uri = self.web3_client.call_contract(
244
+ self.identity_registry,
245
+ "tokenURI",
246
+ int(token_id)
247
+ )
248
+ else:
249
+ raise ValueError("Identity registry not available")
250
+ except Exception as e:
251
+ raise ValueError(f"Failed to get agent data: {e}")
252
+
253
+ # Load registration file
254
+ registration_data = await self._load_registration_data(token_uri)
255
+
256
+ # Create agent summary
257
+ summary = self._create_agent_summary(
258
+ chain_id=int(chain_id),
259
+ agent_id=agent_id,
260
+ registration_data=registration_data
261
+ )
262
+
263
+ # Store in index
264
+ self.store["agents"][agent_id] = summary
265
+
266
+ # Deep refresh if requested
267
+ if deep:
268
+ await self._deep_refresh_agent(summary)
269
+
270
+ return summary
271
+
272
+ async def refresh_agents(
273
+ self,
274
+ agent_ids: Optional[List[AgentId]] = None,
275
+ concurrency: int = 8,
276
+ ) -> List[AgentSummary]:
277
+ """Refresh index for multiple agents."""
278
+ if agent_ids is None:
279
+ # Get all known agents (this would need to be implemented)
280
+ agent_ids = list(self.store["agents"].keys())
281
+
282
+ # Use semaphore to limit concurrency
283
+ semaphore = asyncio.Semaphore(concurrency)
284
+
285
+ async def refresh_single(agent_id: AgentId) -> AgentSummary:
286
+ async with semaphore:
287
+ return await self.refresh_agent(agent_id)
288
+
289
+ # Execute all refreshes concurrently
290
+ tasks = [refresh_single(agent_id) for agent_id in agent_ids]
291
+ results = await asyncio.gather(*tasks, return_exceptions=True)
292
+
293
+ # Filter out exceptions
294
+ summaries = []
295
+ for result in results:
296
+ if isinstance(result, Exception):
297
+ logger.warning(f"Error refreshing agent: {result}")
298
+ else:
299
+ summaries.append(result)
300
+
301
+ return summaries
302
+
303
+ async def _load_registration_data(self, uri: str) -> Dict[str, Any]:
304
+ """Load registration data from URI."""
305
+ registration_file = await self._fetch_registration_file(uri)
306
+ if registration_file is None:
307
+ raise ValueError(f"Failed to load registration data from: {uri}")
308
+ return registration_file
309
+
310
+ def _create_agent_summary(
311
+ self,
312
+ chain_id: int,
313
+ agent_id: AgentId,
314
+ registration_data: Dict[str, Any]
315
+ ) -> AgentSummary:
316
+ """Create agent summary from registration data."""
317
+ # Extract endpoints
318
+ endpoints = registration_data.get("endpoints", [])
319
+ mcp = any(ep.get("name") == "MCP" for ep in endpoints)
320
+ a2a = any(ep.get("name") == "A2A" for ep in endpoints)
321
+
322
+ ens = None
323
+ did = None
324
+ for ep in endpoints:
325
+ if ep.get("name") == "ENS":
326
+ ens = ep.get("endpoint")
327
+ elif ep.get("name") == "DID":
328
+ did = ep.get("endpoint")
329
+
330
+ # Extract capabilities (would need MCP/A2A crawling)
331
+ a2a_skills = []
332
+ mcp_tools = []
333
+ mcp_prompts = []
334
+ mcp_resources = []
335
+
336
+ return AgentSummary(
337
+ chainId=chain_id,
338
+ agentId=agent_id,
339
+ name=registration_data.get("name", ""),
340
+ image=registration_data.get("image"),
341
+ description=registration_data.get("description", ""),
342
+ owners=[], # Would be populated from contract
343
+ operators=[], # Would be populated from contract
344
+ mcp=mcp,
345
+ a2a=a2a,
346
+ ens=ens,
347
+ did=did,
348
+ walletAddress=registration_data.get("walletAddress"),
349
+ supportedTrusts=registration_data.get("supportedTrust", []),
350
+ a2aSkills=a2a_skills,
351
+ mcpTools=mcp_tools,
352
+ mcpPrompts=mcp_prompts,
353
+ mcpResources=mcp_resources,
354
+ active=registration_data.get("active", True),
355
+ extras={}
356
+ )
357
+
358
+ async def _deep_refresh_agent(self, summary: AgentSummary):
359
+ """Perform deep refresh of agent capabilities."""
360
+ # This would crawl MCP/A2A endpoints to extract capabilities
361
+ # For now, it's a placeholder
362
+ pass
363
+
364
+ def get_agent(self, agent_id: AgentId) -> AgentSummary:
365
+ """Get agent summary from index."""
366
+ # Use subgraph if available (preferred)
367
+ if self.subgraph_client:
368
+ return self._get_agent_from_subgraph(agent_id)
369
+
370
+ # Fallback to local cache
371
+ if agent_id not in self.store["agents"]:
372
+ raise ValueError(f"Agent {agent_id} not found in index")
373
+ return self.store["agents"][agent_id]
374
+
375
+ def _get_agent_from_subgraph(self, agent_id: AgentId) -> AgentSummary:
376
+ """Get agent summary from subgraph."""
377
+ try:
378
+ agent_data = self.subgraph_client.get_agent_by_id(agent_id)
379
+
380
+ if agent_data is None:
381
+ raise ValueError(f"Agent {agent_id} not found in subgraph")
382
+
383
+ reg_file = agent_data.get('registrationFile') or {}
384
+ if not isinstance(reg_file, dict):
385
+ reg_file = {}
386
+
387
+ return AgentSummary(
388
+ chainId=int(agent_data.get('chainId', 0)),
389
+ agentId=agent_data.get('id', agent_id),
390
+ name=reg_file.get('name', f"Agent {agent_id}"),
391
+ image=reg_file.get('image'),
392
+ description=reg_file.get('description', ''),
393
+ owners=[agent_data.get('owner', '')],
394
+ operators=agent_data.get('operators', []),
395
+ mcp=reg_file.get('mcpEndpoint') is not None,
396
+ a2a=reg_file.get('a2aEndpoint') is not None,
397
+ ens=reg_file.get('ens'),
398
+ did=reg_file.get('did'),
399
+ walletAddress=reg_file.get('agentWallet'),
400
+ supportedTrusts=reg_file.get('supportedTrusts', []),
401
+ a2aSkills=reg_file.get('a2aSkills', []),
402
+ mcpTools=reg_file.get('mcpTools', []),
403
+ mcpPrompts=reg_file.get('mcpPrompts', []),
404
+ mcpResources=reg_file.get('mcpResources', []),
405
+ active=reg_file.get('active', True),
406
+ x402support=reg_file.get('x402support', False),
407
+ extras={}
408
+ )
409
+
410
+ except Exception as e:
411
+ raise ValueError(f"Failed to get agent from subgraph: {e}")
412
+
413
+ def search_agents(
414
+ self,
415
+ params: SearchParams,
416
+ sort: List[str],
417
+ page_size: int,
418
+ cursor: Optional[str] = None,
419
+ ) -> Dict[str, Any]:
420
+ """Search for agents by querying the subgraph or blockchain."""
421
+ # Use subgraph if available (preferred)
422
+ if self.subgraph_client:
423
+ return self._search_agents_via_subgraph(params, sort, page_size, cursor)
424
+
425
+ # Fallback to blockchain queries
426
+ return self._search_agents_via_blockchain(params, sort, page_size, cursor)
427
+
428
+ def _search_agents_via_subgraph(
429
+ self,
430
+ params: SearchParams,
431
+ sort: List[str],
432
+ page_size: int,
433
+ cursor: Optional[str] = None,
434
+ ) -> Dict[str, Any]:
435
+ """Search for agents using the subgraph."""
436
+ # Build subgraph query filters
437
+ where_clause = {}
438
+ reg_file_where = {}
439
+
440
+ if params.name is not None:
441
+ reg_file_where["name_contains"] = params.name
442
+ if params.active is not None:
443
+ reg_file_where["active"] = params.active
444
+ if params.x402support is not None:
445
+ reg_file_where["x402support"] = params.x402support
446
+ if params.mcp is not None:
447
+ if params.mcp:
448
+ reg_file_where["mcpEndpoint_not"] = None
449
+ else:
450
+ reg_file_where["mcpEndpoint"] = None
451
+ if params.a2a is not None:
452
+ if params.a2a:
453
+ reg_file_where["a2aEndpoint_not"] = None
454
+ else:
455
+ reg_file_where["a2aEndpoint"] = None
456
+ if params.ens is not None:
457
+ reg_file_where["ens"] = params.ens
458
+ if params.did is not None:
459
+ reg_file_where["did"] = params.did
460
+ if params.walletAddress is not None:
461
+ reg_file_where["agentWallet"] = params.walletAddress
462
+
463
+ if reg_file_where:
464
+ where_clause["registrationFile_"] = reg_file_where
465
+
466
+ # Calculate pagination
467
+ skip = 0
468
+ if cursor:
469
+ try:
470
+ skip = int(cursor)
471
+ except ValueError:
472
+ skip = 0
473
+
474
+ # Determine sort
475
+ order_by = "createdAt"
476
+ order_direction = "desc"
477
+ if sort and len(sort) > 0:
478
+ sort_field = sort[0].split(":")
479
+ if len(sort_field) >= 1:
480
+ order_by = sort_field[0]
481
+ if len(sort_field) >= 2:
482
+ order_direction = sort_field[1]
483
+
484
+ try:
485
+ agents = self.subgraph_client.get_agents(
486
+ where=where_clause if where_clause else None,
487
+ first=page_size,
488
+ skip=skip,
489
+ order_by=order_by,
490
+ order_direction=order_direction
491
+ )
492
+
493
+ results = []
494
+ for agent in agents:
495
+ reg_file = agent.get('registrationFile') or {}
496
+ # Ensure reg_file is a dict
497
+ if not isinstance(reg_file, dict):
498
+ reg_file = {}
499
+
500
+ agent_data = {
501
+ "agentId": agent.get('id'),
502
+ "chainId": agent.get('chainId'),
503
+ "name": reg_file.get('name', f"Agent {agent.get('agentId')}"),
504
+ "description": reg_file.get('description', ''),
505
+ "image": reg_file.get('image'),
506
+ "owner": agent.get('owner'),
507
+ "operators": agent.get('operators', []),
508
+ "mcp": reg_file.get('mcpEndpoint') is not None,
509
+ "a2a": reg_file.get('a2aEndpoint') is not None,
510
+ "ens": reg_file.get('ens'),
511
+ "did": reg_file.get('did'),
512
+ "walletAddress": reg_file.get('agentWallet'),
513
+ "supportedTrusts": reg_file.get('supportedTrusts', []),
514
+ "a2aSkills": reg_file.get('a2aSkills', []),
515
+ "mcpTools": reg_file.get('mcpTools', []),
516
+ "mcpPrompts": reg_file.get('mcpPrompts', []),
517
+ "mcpResources": reg_file.get('mcpResources', []),
518
+ "active": reg_file.get('active', True),
519
+ "x402support": reg_file.get('x402support', False),
520
+ "totalFeedback": agent.get('totalFeedback', 0),
521
+ "lastActivity": agent.get('lastActivity'),
522
+ "updatedAt": agent.get('updatedAt'),
523
+ "extras": {}
524
+ }
525
+
526
+ if params.chains is not None:
527
+ if agent_data["chainId"] not in params.chains:
528
+ continue
529
+ if params.supportedTrust is not None:
530
+ if not any(trust in agent_data["supportedTrusts"] for trust in params.supportedTrust):
531
+ continue
532
+
533
+ results.append(agent_data)
534
+
535
+ next_cursor = str(skip + len(results)) if len(results) == page_size else None
536
+ return {"items": results, "nextCursor": next_cursor}
537
+
538
+ except Exception as e:
539
+ logger.warning(f"Subgraph search failed: {e}")
540
+ return {"items": [], "nextCursor": None}
541
+
542
+ def _search_agents_via_blockchain(
543
+ self,
544
+ params: SearchParams,
545
+ sort: List[str],
546
+ page_size: int,
547
+ cursor: Optional[str] = None,
548
+ ) -> Dict[str, Any]:
549
+ """Search for agents by querying the blockchain (fallback)."""
550
+ return {"items": [], "nextCursor": None}
551
+
552
+ def _apply_filters(self, agents: List[Dict[str, Any]], params: SearchParams) -> List[Dict[str, Any]]:
553
+ """Apply search filters to agents."""
554
+ filtered = agents
555
+
556
+ if params.chains is not None:
557
+ filtered = [a for a in filtered if a.get("chainId") in params.chains]
558
+
559
+ if params.name is not None:
560
+ filtered = [a for a in filtered if params.name.lower() in a.get("name", "").lower()]
561
+
562
+ if params.description is not None:
563
+ # This would use semantic search with embeddings
564
+ filtered = [a for a in filtered if params.description.lower() in a.get("description", "").lower()]
565
+
566
+ if params.owners is not None:
567
+ filtered = [a for a in filtered if any(owner in params.owners for owner in a.get("owners", []))]
568
+
569
+ if params.operators is not None:
570
+ filtered = [a for a in filtered if any(op in params.operators for op in a.get("operators", []))]
571
+
572
+ if params.mcp is not None:
573
+ filtered = [a for a in filtered if a.get("mcp") == params.mcp]
574
+
575
+ if params.a2a is not None:
576
+ filtered = [a for a in filtered if a.get("a2a") == params.a2a]
577
+
578
+ if params.ens is not None:
579
+ filtered = [a for a in filtered if a.get("ens") and params.ens.lower() in a.get("ens", "").lower()]
580
+
581
+ if params.did is not None:
582
+ filtered = [a for a in filtered if a.get("did") == params.did]
583
+
584
+ if params.walletAddress is not None:
585
+ filtered = [a for a in filtered if a.get("walletAddress") == params.walletAddress]
586
+
587
+ if params.supportedTrust is not None:
588
+ filtered = [a for a in filtered if any(trust in params.supportedTrust for trust in a.get("supportedTrusts", []))]
589
+
590
+ if params.a2aSkills is not None:
591
+ filtered = [a for a in filtered if any(skill in params.a2aSkills for skill in a.get("a2aSkills", []))]
592
+
593
+ if params.mcpTools is not None:
594
+ filtered = [a for a in filtered if any(tool in params.mcpTools for tool in a.get("mcpTools", []))]
595
+
596
+ if params.mcpPrompts is not None:
597
+ filtered = [a for a in filtered if any(prompt in params.mcpPrompts for prompt in a.get("mcpPrompts", []))]
598
+
599
+ if params.mcpResources is not None:
600
+ filtered = [a for a in filtered if any(resource in params.mcpResources for resource in a.get("mcpResources", []))]
601
+
602
+ if params.active is not None:
603
+ filtered = [a for a in filtered if a.get("active") == params.active]
604
+
605
+ if params.x402support is not None:
606
+ filtered = [a for a in filtered if a.get("x402support") == params.x402support]
607
+
608
+ return filtered
609
+
610
+ def _apply_sorting(self, agents: List[AgentSummary], sort: List[str]) -> List[AgentSummary]:
611
+ """Apply sorting to agents."""
612
+ def sort_key(agent):
613
+ key_values = []
614
+ for sort_field in sort:
615
+ field, direction = sort_field.split(":", 1)
616
+ if hasattr(agent, field):
617
+ value = getattr(agent, field)
618
+ if direction == "desc":
619
+ value = -value if isinstance(value, (int, float)) else value
620
+ key_values.append(value)
621
+ return key_values
622
+
623
+ return sorted(agents, key=sort_key)
624
+
625
+ def get_feedback(
626
+ self,
627
+ agentId: AgentId,
628
+ clientAddress: Address,
629
+ feedbackIndex: int,
630
+ ) -> Feedback:
631
+ """Get single feedback by agent ID, client address, and index."""
632
+ # Use subgraph if available (preferred)
633
+ if self.subgraph_client:
634
+ return self._get_feedback_from_subgraph(agentId, clientAddress, feedbackIndex)
635
+
636
+ # Fallback to local store (if populated in future)
637
+ # For now, raise error if subgraph unavailable
638
+ feedback_id = Feedback.create_id(agentId, clientAddress, feedbackIndex)
639
+ if feedback_id not in self.store["feedback"]:
640
+ raise ValueError(f"Feedback {feedback_id} not found (subgraph required)")
641
+ return self.store["feedback"][feedback_id]
642
+
643
+ def _get_feedback_from_subgraph(
644
+ self,
645
+ agentId: AgentId,
646
+ clientAddress: Address,
647
+ feedbackIndex: int,
648
+ ) -> Feedback:
649
+ """Get feedback from subgraph."""
650
+ # Normalize addresses to lowercase for consistent storage
651
+ normalized_client_address = self.web3_client.normalize_address(clientAddress)
652
+
653
+ # Build feedback ID in format: chainId:agentId:clientAddress:feedbackIndex
654
+ if ":" in agentId:
655
+ feedback_id = f"{agentId}:{normalized_client_address}:{feedbackIndex}"
656
+ else:
657
+ chain_id = str(self.web3_client.chain_id)
658
+ feedback_id = f"{chain_id}:{agentId}:{normalized_client_address}:{feedbackIndex}"
659
+
660
+ try:
661
+ feedback_data = self.subgraph_client.get_feedback_by_id(feedback_id)
662
+
663
+ if feedback_data is None:
664
+ raise ValueError(f"Feedback {feedback_id} not found in subgraph")
665
+
666
+ return self._map_subgraph_feedback_to_model(feedback_data, agentId, clientAddress, feedbackIndex)
667
+
668
+ except Exception as e:
669
+ raise ValueError(f"Failed to get feedback from subgraph: {e}")
670
+
671
+ def _map_subgraph_feedback_to_model(
672
+ self,
673
+ feedback_data: Dict[str, Any],
674
+ agentId: AgentId,
675
+ clientAddress: Address,
676
+ feedbackIndex: int,
677
+ ) -> Feedback:
678
+ """Map subgraph feedback data to Feedback model."""
679
+ feedback_file = feedback_data.get('feedbackFile') or {}
680
+ if not isinstance(feedback_file, dict):
681
+ feedback_file = {}
682
+
683
+ # Map responses
684
+ responses_data = feedback_data.get('responses', [])
685
+ answers = []
686
+ for resp in responses_data:
687
+ answers.append({
688
+ 'responder': resp.get('responder'),
689
+ 'responseUri': resp.get('responseUri'),
690
+ 'responseHash': resp.get('responseHash'),
691
+ 'createdAt': resp.get('createdAt')
692
+ })
693
+
694
+ # Map tags - check if they're hex bytes32 or plain strings
695
+ tags = []
696
+ tag1 = feedback_data.get('tag1') or feedback_file.get('tag1')
697
+ tag2 = feedback_data.get('tag2') or feedback_file.get('tag2')
698
+
699
+ # Convert hex bytes32 to readable tags
700
+ if tag1 or tag2:
701
+ tags = self._hexBytes32ToTags(
702
+ tag1 if isinstance(tag1, str) else "",
703
+ tag2 if isinstance(tag2, str) else ""
704
+ )
705
+
706
+ # If conversion failed, try as plain strings
707
+ if not tags:
708
+ if tag1 and not tag1.startswith("0x"):
709
+ tags.append(tag1)
710
+ if tag2 and not tag2.startswith("0x"):
711
+ tags.append(tag2)
712
+
713
+ return Feedback(
714
+ id=Feedback.create_id(agentId, clientAddress, feedbackIndex),
715
+ agentId=agentId,
716
+ reviewer=self.web3_client.normalize_address(clientAddress),
717
+ score=feedback_data.get('score'),
718
+ tags=tags,
719
+ text=feedback_file.get('text'),
720
+ capability=feedback_file.get('capability'),
721
+ context=feedback_file.get('context'),
722
+ proof_of_payment={
723
+ 'fromAddress': feedback_file.get('proofOfPaymentFromAddress'),
724
+ 'toAddress': feedback_file.get('proofOfPaymentToAddress'),
725
+ 'chainId': feedback_file.get('proofOfPaymentChainId'),
726
+ 'txHash': feedback_file.get('proofOfPaymentTxHash'),
727
+ } if feedback_file.get('proofOfPaymentFromAddress') else None,
728
+ fileURI=feedback_data.get('feedbackUri'),
729
+ createdAt=feedback_data.get('createdAt', int(time.time())),
730
+ answers=answers,
731
+ isRevoked=feedback_data.get('isRevoked', False),
732
+ name=feedback_file.get('name'),
733
+ skill=feedback_file.get('skill'),
734
+ task=feedback_file.get('task'),
735
+ )
736
+
737
+ def search_feedback(
738
+ self,
739
+ agentId: AgentId,
740
+ clientAddresses: Optional[List[Address]] = None,
741
+ tags: Optional[List[str]] = None,
742
+ capabilities: Optional[List[str]] = None,
743
+ skills: Optional[List[str]] = None,
744
+ tasks: Optional[List[str]] = None,
745
+ names: Optional[List[str]] = None,
746
+ minScore: Optional[int] = None,
747
+ maxScore: Optional[int] = None,
748
+ include_revoked: bool = False,
749
+ first: int = 100,
750
+ skip: int = 0,
751
+ ) -> List[Feedback]:
752
+ """Search feedback for an agent - uses subgraph if available."""
753
+ # Use subgraph if available (preferred)
754
+ if self.subgraph_client:
755
+ return self._search_feedback_subgraph(
756
+ agentId, clientAddresses, tags, capabilities, skills, tasks, names,
757
+ minScore, maxScore, include_revoked, first, skip
758
+ )
759
+
760
+ # Fallback not implemented (would require blockchain queries)
761
+ # For now, return empty if subgraph unavailable
762
+ return []
763
+
764
+ def _search_feedback_subgraph(
765
+ self,
766
+ agentId: AgentId,
767
+ clientAddresses: Optional[List[Address]],
768
+ tags: Optional[List[str]],
769
+ capabilities: Optional[List[str]],
770
+ skills: Optional[List[str]],
771
+ tasks: Optional[List[str]],
772
+ names: Optional[List[str]],
773
+ minScore: Optional[int],
774
+ maxScore: Optional[int],
775
+ include_revoked: bool,
776
+ first: int,
777
+ skip: int,
778
+ ) -> List[Feedback]:
779
+ """Search feedback using subgraph."""
780
+ # Create SearchFeedbackParams
781
+ params = SearchFeedbackParams(
782
+ agents=[agentId],
783
+ reviewers=clientAddresses,
784
+ tags=tags,
785
+ capabilities=capabilities,
786
+ skills=skills,
787
+ tasks=tasks,
788
+ names=names,
789
+ minScore=minScore,
790
+ maxScore=maxScore,
791
+ includeRevoked=include_revoked
792
+ )
793
+
794
+ # Query subgraph
795
+ feedbacks_data = self.subgraph_client.search_feedback(
796
+ params=params,
797
+ first=first,
798
+ skip=skip,
799
+ order_by="createdAt",
800
+ order_direction="desc"
801
+ )
802
+
803
+ # Map to Feedback objects
804
+ feedbacks = []
805
+ for fb_data in feedbacks_data:
806
+ # Parse agentId from feedback ID
807
+ feedback_id = fb_data['id']
808
+ parts = feedback_id.split(':')
809
+ if len(parts) >= 2:
810
+ agent_id_str = f"{parts[0]}:{parts[1]}"
811
+ client_addr = parts[2] if len(parts) > 2 else ""
812
+ feedback_idx = int(parts[3]) if len(parts) > 3 else 1
813
+ else:
814
+ agent_id_str = feedback_id
815
+ client_addr = ""
816
+ feedback_idx = 1
817
+
818
+ feedback = self._map_subgraph_feedback_to_model(
819
+ fb_data, agent_id_str, client_addr, feedback_idx
820
+ )
821
+ feedbacks.append(feedback)
822
+
823
+ return feedbacks
824
+
825
+ def _hexBytes32ToTags(self, tag1: str, tag2: str) -> List[str]:
826
+ """Convert hex bytes32 tags back to strings, or return plain strings as-is.
827
+
828
+ The subgraph now stores tags as human-readable strings (not hex),
829
+ so this method handles both formats for backwards compatibility.
830
+ """
831
+ tags = []
832
+
833
+ if tag1 and tag1 != "0x" + "00" * 32:
834
+ # If it's already a plain string (from subgraph), use it directly
835
+ if not tag1.startswith("0x"):
836
+ if tag1:
837
+ tags.append(tag1)
838
+ else:
839
+ # Try to convert from hex bytes32 (on-chain format)
840
+ try:
841
+ hex_bytes = bytes.fromhex(tag1[2:])
842
+ tag1_str = hex_bytes.rstrip(b'\x00').decode('utf-8', errors='ignore')
843
+ if tag1_str:
844
+ tags.append(tag1_str)
845
+ except Exception:
846
+ pass # Ignore invalid hex strings
847
+
848
+ if tag2 and tag2 != "0x" + "00" * 32:
849
+ # If it's already a plain string (from subgraph), use it directly
850
+ if not tag2.startswith("0x"):
851
+ if tag2:
852
+ tags.append(tag2)
853
+ else:
854
+ # Try to convert from hex bytes32 (on-chain format)
855
+ try:
856
+ if tag2.startswith("0x"):
857
+ hex_bytes = bytes.fromhex(tag2[2:])
858
+ else:
859
+ hex_bytes = bytes.fromhex(tag2)
860
+ tag2_str = hex_bytes.rstrip(b'\x00').decode('utf-8', errors='ignore')
861
+ if tag2_str:
862
+ tags.append(tag2_str)
863
+ except Exception:
864
+ pass # Ignore invalid hex strings
865
+
866
+ return tags
867
+
868
+ def get_reputation_summary(
869
+ self,
870
+ agent_id: AgentId,
871
+ group_by: List[str],
872
+ reviewers: Optional[List[Address]] = None,
873
+ since: Optional[Timestamp] = None,
874
+ until: Optional[Timestamp] = None,
875
+ sort: List[str] = None,
876
+ page_size: int = 100,
877
+ cursor: Optional[str] = None,
878
+ ) -> Dict[str, Any]:
879
+ """Get reputation summary for an agent."""
880
+ # This would aggregate feedback data
881
+ # For now, return empty result
882
+ return {
883
+ "groups": [],
884
+ "nextCursor": None
885
+ }
886
+
887
+ def get_reputation_map(
888
+ self,
889
+ agents: List[Union[AgentSummary, AgentId]],
890
+ filters: Dict[str, Any],
891
+ sort: List[str],
892
+ reviewers: Optional[List[Address]] = None,
893
+ ) -> List[Dict[str, Any]]:
894
+ """Get reputation map for multiple agents."""
895
+ # This would calculate reputation metrics for each agent
896
+ # For now, return empty result
897
+ return []
898
+
899
+ def _get_agent_from_blockchain(self, token_id: int, sdk) -> Optional[Dict[str, Any]]:
900
+ """Get agent data from blockchain."""
901
+ try:
902
+ # Get token URI from contract
903
+ token_uri = self.web3_client.call_contract(
904
+ sdk.identity_registry,
905
+ "tokenURI",
906
+ token_id
907
+ )
908
+
909
+ # Get owner
910
+ owner = self.web3_client.call_contract(
911
+ sdk.identity_registry,
912
+ "ownerOf",
913
+ token_id
914
+ )
915
+
916
+ # Create agent ID
917
+ agent_id = f"{sdk.chain_id}:{token_id}"
918
+
919
+ # Try to load registration data from IPFS
920
+ registration_data = self._load_registration_from_ipfs(token_uri, sdk)
921
+
922
+ if registration_data:
923
+ # Use data from IPFS
924
+ return {
925
+ "agentId": agent_id,
926
+ "name": registration_data.get("name", f"Agent {token_id}"),
927
+ "description": registration_data.get("description", f"Agent registered with token ID {token_id}"),
928
+ "owner": owner,
929
+ "tokenId": token_id,
930
+ "tokenURI": token_uri,
931
+ "x402support": registration_data.get("x402support", False),
932
+ "trustModels": registration_data.get("trustModels", ["reputation"]),
933
+ "active": registration_data.get("active", True),
934
+ "endpoints": registration_data.get("endpoints", []),
935
+ "image": registration_data.get("image"),
936
+ "walletAddress": registration_data.get("walletAddress"),
937
+ "metadata": registration_data.get("metadata", {})
938
+ }
939
+ else:
940
+ # Fallback to basic data
941
+ return {
942
+ "agentId": agent_id,
943
+ "name": f"Agent {token_id}",
944
+ "description": f"Agent registered with token ID {token_id}",
945
+ "owner": owner,
946
+ "tokenId": token_id,
947
+ "tokenURI": token_uri,
948
+ "x402support": False,
949
+ "trustModels": ["reputation"],
950
+ "active": True,
951
+ "endpoints": [],
952
+ "image": None,
953
+ "walletAddress": None,
954
+ "metadata": {}
955
+ }
956
+ except Exception as e:
957
+ logger.error(f"Error loading agent {token_id}: {e}")
958
+ return None
959
+
960
+ def _load_registration_from_ipfs(self, token_uri: str, sdk) -> Optional[Dict[str, Any]]:
961
+ """Load agent registration data from IPFS or HTTP gateway."""
962
+ try:
963
+ import json
964
+ import requests
965
+
966
+ # Extract IPFS hash from token URI
967
+ if token_uri.startswith("ipfs://"):
968
+ ipfs_hash = token_uri[7:] # Remove "ipfs://" prefix
969
+ elif token_uri.startswith("https://") and "ipfs" in token_uri:
970
+ # Extract hash from IPFS gateway URL
971
+ parts = token_uri.split("/")
972
+ ipfs_hash = parts[-1] if parts[-1] else parts[-2]
973
+ elif token_uri.startswith("https://"):
974
+ # Direct HTTP URL - try to fetch directly
975
+ try:
976
+ response = requests.get(token_uri, timeout=10)
977
+ response.raise_for_status()
978
+ return response.json()
979
+ except Exception as e:
980
+ logger.warning(f"Could not load HTTP data from {token_uri}: {e}")
981
+ return None
982
+ else:
983
+ return None
984
+
985
+ # Try local IPFS client first (if available)
986
+ if hasattr(sdk, 'ipfs_client') and sdk.ipfs_client is not None:
987
+ try:
988
+ data = sdk.ipfs_client.get(ipfs_hash)
989
+ if data:
990
+ return json.loads(data)
991
+ except Exception as e:
992
+ logger.warning(f"Could not load from local IPFS for {ipfs_hash}: {e}")
993
+
994
+ # Fallback to IPFS HTTP gateways
995
+ gateways = [
996
+ f"https://ipfs.io/ipfs/{ipfs_hash}",
997
+ f"https://gateway.pinata.cloud/ipfs/{ipfs_hash}",
998
+ f"https://cloudflare-ipfs.com/ipfs/{ipfs_hash}",
999
+ f"https://dweb.link/ipfs/{ipfs_hash}"
1000
+ ]
1001
+
1002
+ for gateway_url in gateways:
1003
+ try:
1004
+ response = requests.get(gateway_url, timeout=10)
1005
+ response.raise_for_status()
1006
+ return response.json()
1007
+ except Exception as e:
1008
+ logger.debug(f"Could not load from {gateway_url}: {e}")
1009
+ continue
1010
+
1011
+ logger.warning(f"Could not load data for {ipfs_hash} from any source")
1012
+ return None
1013
+
1014
+ except Exception as e:
1015
+ logger.warning(f"Could not parse token URI {token_uri}: {e}")
1016
+ return None