agent0-sdk 0.2.2__py3-none-any.whl → 0.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -54,19 +54,28 @@ class AgentIndexer:
54
54
  embeddings: Optional[Any] = None,
55
55
  subgraph_client: Optional[Any] = None,
56
56
  identity_registry: Optional[Any] = None,
57
+ subgraph_url_overrides: Optional[Dict[int, str]] = None,
57
58
  ):
58
- """Initialize indexer."""
59
+ """Initialize indexer with optional subgraph URL overrides for multiple chains."""
59
60
  self.web3_client = web3_client
60
61
  self.store = store or self._create_default_store()
61
62
  self.embeddings = embeddings or self._create_default_embeddings()
62
63
  self.subgraph_client = subgraph_client
63
64
  self.identity_registry = identity_registry
65
+ self.subgraph_url_overrides = subgraph_url_overrides or {}
64
66
  self._agent_cache = {} # Cache for agent data
65
67
  self._cache_timestamp = 0
66
68
  self._cache_ttl = 7 * 24 * 60 * 60 # 1 week cache TTL (604800 seconds)
67
69
  self._http_cache = {} # Cache for HTTP content
68
70
  self._http_cache_ttl = 60 * 60 # 1 hour cache TTL for HTTP content
69
71
 
72
+ # Cache for subgraph clients (one per chain)
73
+ self._subgraph_client_cache: Dict[int, Any] = {}
74
+
75
+ # If default subgraph_client provided, cache it for current chain
76
+ if self.subgraph_client:
77
+ self._subgraph_client_cache[self.web3_client.chain_id] = self.subgraph_client
78
+
70
79
  def _create_default_store(self) -> Dict[str, Any]:
71
80
  """Create default in-memory store."""
72
81
  return {
@@ -240,9 +249,9 @@ class AgentIndexer:
240
249
  # Get basic agent data from contract
241
250
  try:
242
251
  if self.identity_registry:
243
- token_uri = self.web3_client.call_contract(
252
+ agent_uri = self.web3_client.call_contract(
244
253
  self.identity_registry,
245
- "tokenURI",
254
+ "tokenURI", # ERC-721 standard function name, but represents agentURI
246
255
  int(token_id)
247
256
  )
248
257
  else:
@@ -363,19 +372,40 @@ class AgentIndexer:
363
372
 
364
373
  def get_agent(self, agent_id: AgentId) -> AgentSummary:
365
374
  """Get agent summary from index."""
375
+ # Parse chainId from agentId
376
+ chain_id, token_id = self._parse_agent_id(agent_id)
377
+
378
+ # Get subgraph client for the chain
379
+ subgraph_client = None
380
+ full_agent_id = agent_id
381
+
382
+ if chain_id is not None:
383
+ subgraph_client = self._get_subgraph_client_for_chain(chain_id)
384
+ else:
385
+ # No chainId in agentId, use SDK's default
386
+ # Construct full agentId format for subgraph query
387
+ default_chain_id = self.web3_client.chain_id
388
+ full_agent_id = f"{default_chain_id}:{token_id}"
389
+ subgraph_client = self.subgraph_client
390
+
366
391
  # Use subgraph if available (preferred)
367
- if self.subgraph_client:
368
- return self._get_agent_from_subgraph(agent_id)
392
+ if subgraph_client:
393
+ return self._get_agent_from_subgraph(full_agent_id, subgraph_client)
369
394
 
370
395
  # Fallback to local cache
371
396
  if agent_id not in self.store["agents"]:
372
397
  raise ValueError(f"Agent {agent_id} not found in index")
373
398
  return self.store["agents"][agent_id]
374
399
 
375
- def _get_agent_from_subgraph(self, agent_id: AgentId) -> AgentSummary:
400
+ def _get_agent_from_subgraph(self, agent_id: AgentId, subgraph_client: Optional[Any] = None) -> AgentSummary:
376
401
  """Get agent summary from subgraph."""
402
+ # Use provided client or default
403
+ client = subgraph_client or self.subgraph_client
404
+ if not client:
405
+ raise ValueError("No subgraph client available")
406
+
377
407
  try:
378
- agent_data = self.subgraph_client.get_agent_by_id(agent_id)
408
+ agent_data = client.get_agent_by_id(agent_id)
379
409
 
380
410
  if agent_data is None:
381
411
  raise ValueError(f"Agent {agent_id} not found in subgraph")
@@ -418,13 +448,322 @@ class AgentIndexer:
418
448
  cursor: Optional[str] = None,
419
449
  ) -> Dict[str, Any]:
420
450
  """Search for agents by querying the subgraph or blockchain."""
451
+ # Handle "all" chains shorthand
452
+ if params.chains == "all":
453
+ params.chains = self._get_all_configured_chains()
454
+ logger.info(f"Expanding 'all' to configured chains: {params.chains}")
455
+
456
+ # If chains are explicitly specified (even a single chain), use multi-chain path
457
+ # This ensures the correct subgraph client is used for the requested chain(s)
458
+ if params.chains and len(params.chains) > 0:
459
+ # Validate chains are configured
460
+ available_chains = set(self._get_all_configured_chains())
461
+ requested_chains = set(params.chains)
462
+ invalid_chains = requested_chains - available_chains
463
+
464
+ if invalid_chains:
465
+ logger.warning(
466
+ f"Requested chains not configured: {invalid_chains}. "
467
+ f"Available chains: {available_chains}"
468
+ )
469
+ # Filter to valid chains only
470
+ valid_chains = list(requested_chains & available_chains)
471
+ if not valid_chains:
472
+ return {
473
+ "items": [],
474
+ "nextCursor": None,
475
+ "meta": {
476
+ "chains": list(requested_chains),
477
+ "successfulChains": [],
478
+ "failedChains": list(requested_chains),
479
+ "error": f"No valid chains configured. Available: {list(available_chains)}"
480
+ }
481
+ }
482
+ params.chains = valid_chains
483
+
484
+ return asyncio.run(
485
+ self._search_agents_across_chains(params, sort, page_size, cursor)
486
+ )
487
+
421
488
  # Use subgraph if available (preferred)
422
489
  if self.subgraph_client:
423
490
  return self._search_agents_via_subgraph(params, sort, page_size, cursor)
424
-
491
+
425
492
  # Fallback to blockchain queries
426
493
  return self._search_agents_via_blockchain(params, sort, page_size, cursor)
427
-
494
+
495
+ async def _search_agents_across_chains(
496
+ self,
497
+ params: SearchParams,
498
+ sort: List[str],
499
+ page_size: int,
500
+ cursor: Optional[str] = None,
501
+ timeout: float = 30.0,
502
+ ) -> Dict[str, Any]:
503
+ """
504
+ Search agents across multiple chains in parallel.
505
+
506
+ This method is called when params.chains contains 2+ chain IDs.
507
+ It executes one subgraph query per chain, all in parallel using asyncio.
508
+
509
+ Args:
510
+ params: Search parameters
511
+ sort: Sort specification
512
+ page_size: Number of results per page
513
+ cursor: Pagination cursor
514
+ timeout: Maximum time in seconds for all chain queries (default: 30.0)
515
+
516
+ Returns:
517
+ {
518
+ "items": [agent_dict, ...],
519
+ "nextCursor": str or None,
520
+ "meta": {
521
+ "chains": [chainId, ...],
522
+ "successfulChains": [chainId, ...],
523
+ "failedChains": [chainId, ...],
524
+ "totalResults": int,
525
+ "timing": {"totalMs": int}
526
+ }
527
+ }
528
+ """
529
+ import time
530
+ start_time = time.time()
531
+ # Step 1: Determine which chains to query
532
+ chains_to_query = params.chains if params.chains else self._get_all_configured_chains()
533
+
534
+ if not chains_to_query or len(chains_to_query) == 0:
535
+ logger.warning("No chains specified or configured for multi-chain query")
536
+ return {"items": [], "nextCursor": None, "meta": {"chains": [], "successfulChains": [], "failedChains": []}}
537
+
538
+ # Step 2: Parse pagination cursor (if any)
539
+ chain_cursors = self._parse_multi_chain_cursor(cursor)
540
+ global_offset = chain_cursors.get("_global_offset", 0)
541
+
542
+ # Step 3: Define async function for querying a single chain
543
+ async def query_single_chain(chain_id: int) -> Dict[str, Any]:
544
+ """Query one chain and return its results with metadata."""
545
+ try:
546
+ # Get subgraph client for this chain
547
+ subgraph_client = self._get_subgraph_client_for_chain(chain_id)
548
+
549
+ if subgraph_client is None:
550
+ logger.warning(f"No subgraph client available for chain {chain_id}")
551
+ return {
552
+ "chainId": chain_id,
553
+ "status": "unavailable",
554
+ "agents": [],
555
+ "error": f"No subgraph configured for chain {chain_id}"
556
+ }
557
+
558
+ # Build WHERE clause for this chain's query
559
+ # (reuse existing logic from _search_agents_via_subgraph)
560
+ where_clause = {}
561
+ reg_file_where = {}
562
+
563
+ if params.name is not None:
564
+ reg_file_where["name_contains"] = params.name
565
+ if params.active is not None:
566
+ reg_file_where["active"] = params.active
567
+ if params.x402support is not None:
568
+ reg_file_where["x402support"] = params.x402support
569
+ if params.mcp is not None:
570
+ if params.mcp:
571
+ reg_file_where["mcpEndpoint_not"] = None
572
+ else:
573
+ reg_file_where["mcpEndpoint"] = None
574
+ if params.a2a is not None:
575
+ if params.a2a:
576
+ reg_file_where["a2aEndpoint_not"] = None
577
+ else:
578
+ reg_file_where["a2aEndpoint"] = None
579
+ if params.ens is not None:
580
+ reg_file_where["ens"] = params.ens
581
+ if params.did is not None:
582
+ reg_file_where["did"] = params.did
583
+ if params.walletAddress is not None:
584
+ reg_file_where["agentWallet"] = params.walletAddress
585
+
586
+ if reg_file_where:
587
+ where_clause["registrationFile_"] = reg_file_where
588
+
589
+ # Owner filtering
590
+ if params.owners is not None and len(params.owners) > 0:
591
+ normalized_owners = [owner.lower() for owner in params.owners]
592
+ if len(normalized_owners) == 1:
593
+ where_clause["owner"] = normalized_owners[0]
594
+ else:
595
+ where_clause["owner_in"] = normalized_owners
596
+
597
+ # Operator filtering
598
+ if params.operators is not None and len(params.operators) > 0:
599
+ normalized_operators = [op.lower() for op in params.operators]
600
+ where_clause["operators_contains"] = normalized_operators
601
+
602
+ # Get pagination offset for this chain (not used in multi-chain, fetch all)
603
+ skip = 0
604
+
605
+ # Execute subgraph query
606
+ agents = subgraph_client.get_agents(
607
+ where=where_clause if where_clause else None,
608
+ first=page_size * 3, # Fetch extra to allow for filtering/sorting
609
+ skip=skip,
610
+ order_by=self._extract_order_by(sort),
611
+ order_direction=self._extract_order_direction(sort)
612
+ )
613
+
614
+ logger.info(f"Chain {chain_id}: fetched {len(agents)} agents")
615
+
616
+ return {
617
+ "chainId": chain_id,
618
+ "status": "success",
619
+ "agents": agents,
620
+ "count": len(agents),
621
+ }
622
+
623
+ except Exception as e:
624
+ logger.error(f"Error querying chain {chain_id}: {e}", exc_info=True)
625
+ return {
626
+ "chainId": chain_id,
627
+ "status": "error",
628
+ "agents": [],
629
+ "error": str(e)
630
+ }
631
+
632
+ # Step 4: Execute all chain queries in parallel with timeout
633
+ logger.info(f"Querying {len(chains_to_query)} chains in parallel: {chains_to_query}")
634
+ tasks = [query_single_chain(chain_id) for chain_id in chains_to_query]
635
+
636
+ try:
637
+ chain_results = await asyncio.wait_for(
638
+ asyncio.gather(*tasks),
639
+ timeout=timeout
640
+ )
641
+ except asyncio.TimeoutError:
642
+ logger.error(f"Multi-chain query timed out after {timeout}s")
643
+ # Collect results from completed tasks
644
+ chain_results = []
645
+ for task in tasks:
646
+ if task.done():
647
+ try:
648
+ chain_results.append(task.result())
649
+ except Exception as e:
650
+ logger.warning(f"Task failed: {e}")
651
+ else:
652
+ # Task didn't complete - mark as timeout
653
+ chain_results.append({
654
+ "chainId": None,
655
+ "status": "timeout",
656
+ "agents": [],
657
+ "error": f"Query timed out after {timeout}s"
658
+ })
659
+
660
+ # Step 5: Extract successful results and track failures
661
+ all_agents = []
662
+ successful_chains = []
663
+ failed_chains = []
664
+
665
+ for result in chain_results:
666
+ chain_id = result["chainId"]
667
+
668
+ if result["status"] == "success":
669
+ successful_chains.append(chain_id)
670
+ all_agents.extend(result["agents"])
671
+ else:
672
+ failed_chains.append(chain_id)
673
+ logger.warning(
674
+ f"Chain {chain_id} query failed: {result.get('error', 'Unknown error')}"
675
+ )
676
+
677
+ logger.info(f"Multi-chain query: {len(successful_chains)} successful, {len(failed_chains)} failed, {len(all_agents)} total agents")
678
+
679
+ # If ALL chains failed, raise error
680
+ if len(successful_chains) == 0:
681
+ raise ConnectionError(
682
+ f"All chains failed: {', '.join(str(c) for c in failed_chains)}"
683
+ )
684
+
685
+ # Step 6: Apply cross-chain filtering (for fields not supported by subgraph WHERE clause)
686
+ filtered_agents = self._apply_cross_chain_filters(all_agents, params)
687
+ logger.info(f"After cross-chain filters: {len(filtered_agents)} agents")
688
+
689
+ # Step 7: Deduplicate if requested
690
+ deduplicated_agents = self._deduplicate_agents_cross_chain(filtered_agents, params)
691
+ logger.info(f"After deduplication: {len(deduplicated_agents)} agents")
692
+
693
+ # Step 8: Sort across chains
694
+ sorted_agents = self._sort_agents_cross_chain(deduplicated_agents, sort)
695
+ logger.info(f"After sorting: {len(sorted_agents)} agents")
696
+
697
+ # Step 9: Apply pagination
698
+ start_idx = global_offset
699
+ paginated_agents = sorted_agents[start_idx:start_idx + page_size]
700
+
701
+ # Step 10: Convert to result format (keep as dicts, SDK will convert to AgentSummary)
702
+ results = []
703
+ for agent_data in paginated_agents:
704
+ reg_file = agent_data.get('registrationFile') or {}
705
+ if not isinstance(reg_file, dict):
706
+ reg_file = {}
707
+
708
+ result_agent = {
709
+ "agentId": agent_data.get('id'),
710
+ "chainId": agent_data.get('chainId'),
711
+ "name": reg_file.get('name', f"Agent {agent_data.get('agentId')}"),
712
+ "description": reg_file.get('description', ''),
713
+ "image": reg_file.get('image'),
714
+ "owner": agent_data.get('owner'),
715
+ "operators": agent_data.get('operators', []),
716
+ "mcp": reg_file.get('mcpEndpoint') is not None,
717
+ "a2a": reg_file.get('a2aEndpoint') is not None,
718
+ "ens": reg_file.get('ens'),
719
+ "did": reg_file.get('did'),
720
+ "walletAddress": reg_file.get('agentWallet'),
721
+ "supportedTrusts": reg_file.get('supportedTrusts', []),
722
+ "a2aSkills": reg_file.get('a2aSkills', []),
723
+ "mcpTools": reg_file.get('mcpTools', []),
724
+ "mcpPrompts": reg_file.get('mcpPrompts', []),
725
+ "mcpResources": reg_file.get('mcpResources', []),
726
+ "active": reg_file.get('active', True),
727
+ "x402support": reg_file.get('x402support', False),
728
+ "totalFeedback": agent_data.get('totalFeedback', 0),
729
+ "lastActivity": agent_data.get('lastActivity'),
730
+ "updatedAt": agent_data.get('updatedAt'),
731
+ "extras": {}
732
+ }
733
+
734
+ # Add deployedOn if deduplication was used
735
+ if 'deployedOn' in agent_data:
736
+ result_agent['extras']['deployedOn'] = agent_data['deployedOn']
737
+
738
+ results.append(result_agent)
739
+
740
+ # Step 11: Calculate next cursor
741
+ next_cursor = None
742
+ if len(sorted_agents) > start_idx + page_size:
743
+ # More results available
744
+ next_cursor = self._create_multi_chain_cursor(
745
+ global_offset=start_idx + page_size
746
+ )
747
+
748
+ # Step 12: Build response with metadata
749
+ query_time = time.time() - start_time
750
+
751
+ return {
752
+ "items": results,
753
+ "nextCursor": next_cursor,
754
+ "meta": {
755
+ "chains": chains_to_query,
756
+ "successfulChains": successful_chains,
757
+ "failedChains": failed_chains,
758
+ "totalResults": len(sorted_agents),
759
+ "pageResults": len(results),
760
+ "timing": {
761
+ "totalMs": int(query_time * 1000),
762
+ "averagePerChainMs": int(query_time * 1000 / len(chains_to_query)) if chains_to_query else 0,
763
+ }
764
+ }
765
+ }
766
+
428
767
  def _search_agents_via_subgraph(
429
768
  self,
430
769
  params: SearchParams,
@@ -702,29 +1041,42 @@ class AgentIndexer:
702
1041
  for resp in responses_data:
703
1042
  answers.append({
704
1043
  'responder': resp.get('responder'),
705
- 'responseUri': resp.get('responseUri'),
1044
+ 'responseURI': resp.get('responseURI') or resp.get('responseUri'), # Handle both old and new field names
706
1045
  'responseHash': resp.get('responseHash'),
707
1046
  'createdAt': resp.get('createdAt')
708
1047
  })
709
1048
 
710
- # Map tags - check if they're hex bytes32 or plain strings
1049
+ # Map tags - tags are now strings (not bytes32)
711
1050
  tags = []
712
1051
  tag1 = feedback_data.get('tag1') or feedback_file.get('tag1')
713
1052
  tag2 = feedback_data.get('tag2') or feedback_file.get('tag2')
714
1053
 
715
- # Convert hex bytes32 to readable tags
716
- if tag1 or tag2:
717
- tags = self._hexBytes32ToTags(
718
- tag1 if isinstance(tag1, str) else "",
719
- tag2 if isinstance(tag2, str) else ""
720
- )
721
-
722
- # If conversion failed, try as plain strings
723
- if not tags:
724
- if tag1 and not tag1.startswith("0x"):
1054
+ # Tags are now plain strings, but handle backward compatibility with hex bytes32
1055
+ if tag1:
1056
+ if isinstance(tag1, str) and not tag1.startswith("0x"):
725
1057
  tags.append(tag1)
726
- if tag2 and not tag2.startswith("0x"):
1058
+ elif isinstance(tag1, str) and tag1.startswith("0x"):
1059
+ # Try to convert from hex bytes32 (old format)
1060
+ try:
1061
+ hex_bytes = bytes.fromhex(tag1[2:])
1062
+ tag1_str = hex_bytes.rstrip(b'\x00').decode('utf-8', errors='ignore')
1063
+ if tag1_str:
1064
+ tags.append(tag1_str)
1065
+ except Exception:
1066
+ pass # Ignore invalid hex strings
1067
+
1068
+ if tag2:
1069
+ if isinstance(tag2, str) and not tag2.startswith("0x"):
727
1070
  tags.append(tag2)
1071
+ elif isinstance(tag2, str) and tag2.startswith("0x"):
1072
+ # Try to convert from hex bytes32 (old format)
1073
+ try:
1074
+ hex_bytes = bytes.fromhex(tag2[2:])
1075
+ tag2_str = hex_bytes.rstrip(b'\x00').decode('utf-8', errors='ignore')
1076
+ if tag2_str:
1077
+ tags.append(tag2_str)
1078
+ except Exception:
1079
+ pass # Ignore invalid hex strings
728
1080
 
729
1081
  return Feedback(
730
1082
  id=Feedback.create_id(agentId, clientAddress, feedbackIndex),
@@ -741,7 +1093,8 @@ class AgentIndexer:
741
1093
  'chainId': feedback_file.get('proofOfPaymentChainId'),
742
1094
  'txHash': feedback_file.get('proofOfPaymentTxHash'),
743
1095
  } if feedback_file.get('proofOfPaymentFromAddress') else None,
744
- fileURI=feedback_data.get('feedbackUri'),
1096
+ fileURI=feedback_data.get('feedbackURI') or feedback_data.get('feedbackUri'), # Handle both old and new field names
1097
+ endpoint=feedback_data.get('endpoint'),
745
1098
  createdAt=feedback_data.get('createdAt', int(time.time())),
746
1099
  answers=answers,
747
1100
  isRevoked=feedback_data.get('isRevoked', False),
@@ -766,11 +1119,27 @@ class AgentIndexer:
766
1119
  skip: int = 0,
767
1120
  ) -> List[Feedback]:
768
1121
  """Search feedback for an agent - uses subgraph if available."""
1122
+ # Parse chainId from agentId
1123
+ chain_id, token_id = self._parse_agent_id(agentId)
1124
+
1125
+ # Get subgraph client for the chain
1126
+ subgraph_client = None
1127
+ full_agent_id = agentId
1128
+
1129
+ if chain_id is not None:
1130
+ subgraph_client = self._get_subgraph_client_for_chain(chain_id)
1131
+ else:
1132
+ # No chainId in agentId, use SDK's default
1133
+ # Construct full agentId format for subgraph query
1134
+ default_chain_id = self.web3_client.chain_id
1135
+ full_agent_id = f"{default_chain_id}:{token_id}"
1136
+ subgraph_client = self.subgraph_client
1137
+
769
1138
  # Use subgraph if available (preferred)
770
- if self.subgraph_client:
1139
+ if subgraph_client:
771
1140
  return self._search_feedback_subgraph(
772
- agentId, clientAddresses, tags, capabilities, skills, tasks, names,
773
- minScore, maxScore, include_revoked, first, skip
1141
+ full_agent_id, clientAddresses, tags, capabilities, skills, tasks, names,
1142
+ minScore, maxScore, include_revoked, first, skip, subgraph_client
774
1143
  )
775
1144
 
776
1145
  # Fallback not implemented (would require blockchain queries)
@@ -791,8 +1160,14 @@ class AgentIndexer:
791
1160
  include_revoked: bool,
792
1161
  first: int,
793
1162
  skip: int,
1163
+ subgraph_client: Optional[Any] = None,
794
1164
  ) -> List[Feedback]:
795
1165
  """Search feedback using subgraph."""
1166
+ # Use provided client or default
1167
+ client = subgraph_client or self.subgraph_client
1168
+ if not client:
1169
+ return []
1170
+
796
1171
  # Create SearchFeedbackParams
797
1172
  params = SearchFeedbackParams(
798
1173
  agents=[agentId],
@@ -808,7 +1183,7 @@ class AgentIndexer:
808
1183
  )
809
1184
 
810
1185
  # Query subgraph
811
- feedbacks_data = self.subgraph_client.search_feedback(
1186
+ feedbacks_data = client.search_feedback(
812
1187
  params=params,
813
1188
  first=first,
814
1189
  skip=skip,
@@ -915,10 +1290,10 @@ class AgentIndexer:
915
1290
  def _get_agent_from_blockchain(self, token_id: int, sdk) -> Optional[Dict[str, Any]]:
916
1291
  """Get agent data from blockchain."""
917
1292
  try:
918
- # Get token URI from contract
919
- token_uri = self.web3_client.call_contract(
1293
+ # Get agent URI from contract (using ERC-721 tokenURI function)
1294
+ agent_uri = self.web3_client.call_contract(
920
1295
  sdk.identity_registry,
921
- "tokenURI",
1296
+ "tokenURI", # ERC-721 standard function name, but represents agentURI
922
1297
  token_id
923
1298
  )
924
1299
 
@@ -929,27 +1304,41 @@ class AgentIndexer:
929
1304
  token_id
930
1305
  )
931
1306
 
1307
+ # Get agentWallet using new dedicated function
1308
+ wallet_address = None
1309
+ try:
1310
+ wallet_address = self.web3_client.call_contract(
1311
+ sdk.identity_registry,
1312
+ "getAgentWallet",
1313
+ token_id
1314
+ )
1315
+ if wallet_address == "0x0000000000000000000000000000000000000000":
1316
+ wallet_address = None
1317
+ except Exception:
1318
+ # Fallback to registration file if getAgentWallet not available
1319
+ pass
1320
+
932
1321
  # Create agent ID
933
1322
  agent_id = f"{sdk.chain_id}:{token_id}"
934
1323
 
935
1324
  # Try to load registration data from IPFS
936
- registration_data = self._load_registration_from_ipfs(token_uri, sdk)
1325
+ registration_data = self._load_registration_from_ipfs(agent_uri, sdk)
937
1326
 
938
1327
  if registration_data:
939
- # Use data from IPFS
1328
+ # Use data from IPFS, but prefer on-chain wallet if available
940
1329
  return {
941
1330
  "agentId": agent_id,
942
1331
  "name": registration_data.get("name", f"Agent {token_id}"),
943
1332
  "description": registration_data.get("description", f"Agent registered with token ID {token_id}"),
944
1333
  "owner": owner,
945
1334
  "tokenId": token_id,
946
- "tokenURI": token_uri,
947
- "x402support": registration_data.get("x402support", False),
1335
+ "agentURI": agent_uri, # Updated field name
1336
+ "x402support": registration_data.get("x402Support", registration_data.get("x402support", False)),
948
1337
  "trustModels": registration_data.get("trustModels", ["reputation"]),
949
1338
  "active": registration_data.get("active", True),
950
1339
  "endpoints": registration_data.get("endpoints", []),
951
1340
  "image": registration_data.get("image"),
952
- "walletAddress": registration_data.get("walletAddress"),
1341
+ "walletAddress": wallet_address or registration_data.get("walletAddress"), # Prefer on-chain wallet
953
1342
  "metadata": registration_data.get("metadata", {})
954
1343
  }
955
1344
  else:
@@ -960,13 +1349,13 @@ class AgentIndexer:
960
1349
  "description": f"Agent registered with token ID {token_id}",
961
1350
  "owner": owner,
962
1351
  "tokenId": token_id,
963
- "tokenURI": token_uri,
1352
+ "agentURI": agent_uri, # Updated field name
964
1353
  "x402support": False,
965
1354
  "trustModels": ["reputation"],
966
1355
  "active": True,
967
1356
  "endpoints": [],
968
1357
  "image": None,
969
- "walletAddress": None,
1358
+ "walletAddress": wallet_address,
970
1359
  "metadata": {}
971
1360
  }
972
1361
  except Exception as e:
@@ -1030,3 +1419,364 @@ class AgentIndexer:
1030
1419
  except Exception as e:
1031
1420
  logger.warning(f"Could not parse token URI {token_uri}: {e}")
1032
1421
  return None
1422
+
1423
+ def _get_subgraph_client_for_chain(self, chain_id: int):
1424
+ """
1425
+ Get or create SubgraphClient for a specific chain.
1426
+
1427
+ Checks (in order):
1428
+ 1. Client cache (already created)
1429
+ 2. Subgraph URL overrides (from constructor)
1430
+ 3. DEFAULT_SUBGRAPH_URLS (from contracts.py)
1431
+ 4. Environment variables (SUBGRAPH_URL_<chainId>)
1432
+
1433
+ Returns None if no subgraph URL is available for this chain.
1434
+ """
1435
+ # Check cache first
1436
+ if chain_id in self._subgraph_client_cache:
1437
+ return self._subgraph_client_cache[chain_id]
1438
+
1439
+ # Get subgraph URL for this chain
1440
+ subgraph_url = self._get_subgraph_url_for_chain(chain_id)
1441
+
1442
+ if subgraph_url is None:
1443
+ logger.warning(f"No subgraph URL configured for chain {chain_id}")
1444
+ return None
1445
+
1446
+ # Create new SubgraphClient
1447
+ from .subgraph_client import SubgraphClient
1448
+ client = SubgraphClient(subgraph_url)
1449
+
1450
+ # Cache for future use
1451
+ self._subgraph_client_cache[chain_id] = client
1452
+
1453
+ logger.info(f"Created subgraph client for chain {chain_id}: {subgraph_url}")
1454
+
1455
+ return client
1456
+
1457
+ def _get_subgraph_url_for_chain(self, chain_id: int) -> Optional[str]:
1458
+ """
1459
+ Get subgraph URL for a specific chain.
1460
+
1461
+ Priority order:
1462
+ 1. Constructor-provided overrides (self.subgraph_url_overrides)
1463
+ 2. DEFAULT_SUBGRAPH_URLS from contracts.py
1464
+ 3. Environment variable SUBGRAPH_URL_<chainId>
1465
+ 4. None (not configured)
1466
+ """
1467
+ import os
1468
+
1469
+ # 1. Check constructor overrides
1470
+ if chain_id in self.subgraph_url_overrides:
1471
+ return self.subgraph_url_overrides[chain_id]
1472
+
1473
+ # 2. Check DEFAULT_SUBGRAPH_URLS
1474
+ from .contracts import DEFAULT_SUBGRAPH_URLS
1475
+ if chain_id in DEFAULT_SUBGRAPH_URLS:
1476
+ return DEFAULT_SUBGRAPH_URLS[chain_id]
1477
+
1478
+ # 3. Check environment variable
1479
+ env_key = f"SUBGRAPH_URL_{chain_id}"
1480
+ env_url = os.environ.get(env_key)
1481
+ if env_url:
1482
+ logger.info(f"Using subgraph URL from environment: {env_key}={env_url}")
1483
+ return env_url
1484
+
1485
+ # 4. Not found
1486
+ return None
1487
+
1488
+ def _parse_agent_id(self, agent_id: AgentId) -> tuple[Optional[int], str]:
1489
+ """
1490
+ Parse agentId to extract chainId and tokenId.
1491
+
1492
+ Returns:
1493
+ (chain_id, token_id_str) where:
1494
+ - chain_id: int if "chainId:tokenId" format, None if just "tokenId"
1495
+ - token_id_str: the tokenId part (always present)
1496
+ """
1497
+ if ":" in agent_id:
1498
+ parts = agent_id.split(":", 1)
1499
+ try:
1500
+ chain_id = int(parts[0])
1501
+ token_id = parts[1]
1502
+ return (chain_id, token_id)
1503
+ except ValueError:
1504
+ # Invalid chainId, treat as tokenId only
1505
+ return (None, agent_id)
1506
+ return (None, agent_id)
1507
+
1508
+ def _get_all_configured_chains(self) -> List[int]:
1509
+ """
1510
+ Get list of all chains that have subgraphs configured.
1511
+
1512
+ This is used when params.chains is None (query all available chains).
1513
+ """
1514
+ import os
1515
+ from .contracts import DEFAULT_SUBGRAPH_URLS
1516
+
1517
+ chains = set()
1518
+
1519
+ # Add chains from DEFAULT_SUBGRAPH_URLS
1520
+ chains.update(DEFAULT_SUBGRAPH_URLS.keys())
1521
+
1522
+ # Add chains from constructor overrides
1523
+ chains.update(self.subgraph_url_overrides.keys())
1524
+
1525
+ # Add chains from environment variables
1526
+ for key, value in os.environ.items():
1527
+ if key.startswith("SUBGRAPH_URL_") and value:
1528
+ try:
1529
+ chain_id = int(key.replace("SUBGRAPH_URL_", ""))
1530
+ chains.add(chain_id)
1531
+ except ValueError:
1532
+ pass
1533
+
1534
+ return sorted(list(chains))
1535
+
1536
+ def _apply_cross_chain_filters(
1537
+ self,
1538
+ agents: List[Dict[str, Any]],
1539
+ params: SearchParams
1540
+ ) -> List[Dict[str, Any]]:
1541
+ """
1542
+ Apply filters that couldn't be expressed in subgraph WHERE clause.
1543
+
1544
+ Most filters are already applied by the subgraph query, but some
1545
+ (like supportedTrust, mcpTools, etc.) need post-processing.
1546
+ """
1547
+ filtered = agents
1548
+
1549
+ # Filter by supportedTrust (if specified)
1550
+ if params.supportedTrust is not None:
1551
+ filtered = [
1552
+ agent for agent in filtered
1553
+ if any(
1554
+ trust in agent.get('registrationFile', {}).get('supportedTrusts', [])
1555
+ for trust in params.supportedTrust
1556
+ )
1557
+ ]
1558
+
1559
+ # Filter by mcpTools (if specified)
1560
+ if params.mcpTools is not None:
1561
+ filtered = [
1562
+ agent for agent in filtered
1563
+ if any(
1564
+ tool in agent.get('registrationFile', {}).get('mcpTools', [])
1565
+ for tool in params.mcpTools
1566
+ )
1567
+ ]
1568
+
1569
+ # Filter by a2aSkills (if specified)
1570
+ if params.a2aSkills is not None:
1571
+ filtered = [
1572
+ agent for agent in filtered
1573
+ if any(
1574
+ skill in agent.get('registrationFile', {}).get('a2aSkills', [])
1575
+ for skill in params.a2aSkills
1576
+ )
1577
+ ]
1578
+
1579
+ # Filter by mcpPrompts (if specified)
1580
+ if params.mcpPrompts is not None:
1581
+ filtered = [
1582
+ agent for agent in filtered
1583
+ if any(
1584
+ prompt in agent.get('registrationFile', {}).get('mcpPrompts', [])
1585
+ for prompt in params.mcpPrompts
1586
+ )
1587
+ ]
1588
+
1589
+ # Filter by mcpResources (if specified)
1590
+ if params.mcpResources is not None:
1591
+ filtered = [
1592
+ agent for agent in filtered
1593
+ if any(
1594
+ resource in agent.get('registrationFile', {}).get('mcpResources', [])
1595
+ for resource in params.mcpResources
1596
+ )
1597
+ ]
1598
+
1599
+ return filtered
1600
+
1601
+ def _deduplicate_agents_cross_chain(
1602
+ self,
1603
+ agents: List[Dict[str, Any]],
1604
+ params: SearchParams
1605
+ ) -> List[Dict[str, Any]]:
1606
+ """
1607
+ Deduplicate agents across chains (if requested).
1608
+
1609
+ Strategy:
1610
+ - By default, DON'T deduplicate (agents on different chains are different entities)
1611
+ - If params.deduplicate_cross_chain=True, deduplicate by (owner, registration_hash)
1612
+
1613
+ When deduplicating:
1614
+ - Keep the first instance encountered
1615
+ - Add 'deployedOn' array with all chain IDs where this agent exists
1616
+ """
1617
+ # Check if deduplication requested
1618
+ if not params.deduplicate_cross_chain:
1619
+ return agents
1620
+
1621
+ # Group agents by identity key
1622
+ seen = {}
1623
+ deduplicated = []
1624
+
1625
+ for agent in agents:
1626
+ # Create identity key: (owner, name, description)
1627
+ # This identifies "the same agent" across chains
1628
+ owner = agent.get('owner', '').lower()
1629
+ reg_file = agent.get('registrationFile', {})
1630
+ name = reg_file.get('name', '')
1631
+ description = reg_file.get('description', '')
1632
+
1633
+ identity_key = (owner, name, description)
1634
+
1635
+ if identity_key not in seen:
1636
+ # First time seeing this agent
1637
+ seen[identity_key] = agent
1638
+
1639
+ # Add deployedOn array
1640
+ agent['deployedOn'] = [agent['chainId']]
1641
+
1642
+ deduplicated.append(agent)
1643
+ else:
1644
+ # Already seen this agent on another chain
1645
+ # Add this chain to deployedOn array
1646
+ seen[identity_key]['deployedOn'].append(agent['chainId'])
1647
+
1648
+ logger.info(
1649
+ f"Deduplication: {len(agents)} agents → {len(deduplicated)} unique agents"
1650
+ )
1651
+
1652
+ return deduplicated
1653
+
1654
+ def _sort_agents_cross_chain(
1655
+ self,
1656
+ agents: List[Dict[str, Any]],
1657
+ sort: List[str]
1658
+ ) -> List[Dict[str, Any]]:
1659
+ """
1660
+ Sort agents from multiple chains.
1661
+
1662
+ Supports sorting by:
1663
+ - createdAt (timestamp)
1664
+ - updatedAt (timestamp)
1665
+ - totalFeedback (count)
1666
+ - name (alphabetical)
1667
+ - averageScore (reputation, if available)
1668
+ """
1669
+ if not sort or len(sort) == 0:
1670
+ # Default: sort by createdAt descending (newest first)
1671
+ return sorted(
1672
+ agents,
1673
+ key=lambda a: a.get('createdAt', 0),
1674
+ reverse=True
1675
+ )
1676
+
1677
+ # Parse first sort specification
1678
+ sort_spec = sort[0]
1679
+ if ':' in sort_spec:
1680
+ field, direction = sort_spec.split(':', 1)
1681
+ else:
1682
+ field = sort_spec
1683
+ direction = 'desc'
1684
+
1685
+ reverse = (direction.lower() == 'desc')
1686
+
1687
+ # Define sort key function
1688
+ def get_sort_key(agent: Dict[str, Any]):
1689
+ if field == 'createdAt':
1690
+ return agent.get('createdAt', 0)
1691
+
1692
+ elif field == 'updatedAt':
1693
+ return agent.get('updatedAt', 0)
1694
+
1695
+ elif field == 'totalFeedback':
1696
+ return agent.get('totalFeedback', 0)
1697
+
1698
+ elif field == 'name':
1699
+ reg_file = agent.get('registrationFile', {})
1700
+ return reg_file.get('name', '').lower()
1701
+
1702
+ elif field == 'averageScore':
1703
+ # If reputation search was done, averageScore may be available
1704
+ return agent.get('averageScore', 0)
1705
+
1706
+ else:
1707
+ logger.warning(f"Unknown sort field: {field}, defaulting to createdAt")
1708
+ return agent.get('createdAt', 0)
1709
+
1710
+ return sorted(agents, key=get_sort_key, reverse=reverse)
1711
+
1712
+ def _parse_multi_chain_cursor(self, cursor: Optional[str]) -> Dict[int, int]:
1713
+ """
1714
+ Parse multi-chain cursor into per-chain offsets.
1715
+
1716
+ Cursor format (JSON):
1717
+ {
1718
+ "11155111": 50, # Ethereum Sepolia offset
1719
+ "84532": 30, # Base Sepolia offset
1720
+ "_global_offset": 100 # Total items returned so far
1721
+ }
1722
+
1723
+ Returns:
1724
+ Dict mapping chainId → offset (default 0)
1725
+ """
1726
+ if not cursor:
1727
+ return {}
1728
+
1729
+ try:
1730
+ cursor_data = json.loads(cursor)
1731
+
1732
+ # Validate format
1733
+ if not isinstance(cursor_data, dict):
1734
+ logger.warning(f"Invalid cursor format: {cursor}, using empty")
1735
+ return {}
1736
+
1737
+ return cursor_data
1738
+
1739
+ except json.JSONDecodeError as e:
1740
+ logger.warning(f"Failed to parse cursor: {e}, using empty")
1741
+ return {}
1742
+
1743
+ def _create_multi_chain_cursor(
1744
+ self,
1745
+ global_offset: int,
1746
+ ) -> str:
1747
+ """
1748
+ Create multi-chain cursor for next page.
1749
+
1750
+ Args:
1751
+ global_offset: Total items returned so far
1752
+
1753
+ Returns:
1754
+ JSON string cursor
1755
+ """
1756
+ cursor_data = {
1757
+ "_global_offset": global_offset
1758
+ }
1759
+
1760
+ return json.dumps(cursor_data)
1761
+
1762
+ def _extract_order_by(self, sort: List[str]) -> str:
1763
+ """Extract order_by field from sort specification."""
1764
+ if not sort or len(sort) == 0:
1765
+ return "createdAt"
1766
+
1767
+ sort_spec = sort[0]
1768
+ if ':' in sort_spec:
1769
+ field, _ = sort_spec.split(':', 1)
1770
+ return field
1771
+ return sort_spec
1772
+
1773
+ def _extract_order_direction(self, sort: List[str]) -> str:
1774
+ """Extract order direction from sort specification."""
1775
+ if not sort or len(sort) == 0:
1776
+ return "desc"
1777
+
1778
+ sort_spec = sort[0]
1779
+ if ':' in sort_spec:
1780
+ _, direction = sort_spec.split(':', 1)
1781
+ return direction
1782
+ return "desc"