agent0-sdk 1.4.2__py3-none-any.whl → 1.5.0b1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -37,12 +37,14 @@ from datetime import datetime
37
37
 
38
38
  from .models import (
39
39
  AgentId, ChainId, Address, URI, Timestamp,
40
- AgentSummary, Feedback, SearchParams, SearchFeedbackParams
40
+ AgentSummary, Feedback, SearchFilters, SearchOptions, SearchFeedbackParams
41
41
  )
42
42
  from .web3_client import Web3Client
43
43
 
44
44
  logger = logging.getLogger(__name__)
45
45
 
46
+ from .semantic_search_client import SemanticSearchClient
47
+
46
48
 
47
49
  class AgentIndexer:
48
50
  """Indexer for agent discovery and search."""
@@ -323,10 +325,25 @@ class AgentIndexer:
323
325
  registration_data: Dict[str, Any]
324
326
  ) -> AgentSummary:
325
327
  """Create agent summary from registration data."""
326
- # Extract endpoints
328
+ # Extract endpoints (legacy/non-subgraph path)
327
329
  endpoints = registration_data.get("endpoints", [])
328
- mcp = any(ep.get("name") == "MCP" for ep in endpoints)
329
- a2a = any(ep.get("name") == "A2A" for ep in endpoints)
330
+ mcp: Optional[str] = None
331
+ a2a: Optional[str] = None
332
+ web: Optional[str] = None
333
+ email: Optional[str] = None
334
+ for ep in endpoints:
335
+ name = (ep.get("name") or "").upper()
336
+ value = ep.get("endpoint")
337
+ if not isinstance(value, str):
338
+ continue
339
+ if name == "MCP":
340
+ mcp = value
341
+ elif name == "A2A":
342
+ a2a = value
343
+ elif name == "WEB":
344
+ web = value
345
+ elif name == "EMAIL":
346
+ email = value
330
347
 
331
348
  ens = None
332
349
  did = None
@@ -352,6 +369,8 @@ class AgentIndexer:
352
369
  operators=[], # Would be populated from contract
353
370
  mcp=mcp,
354
371
  a2a=a2a,
372
+ web=web,
373
+ email=email,
355
374
  ens=ens,
356
375
  did=did,
357
376
  walletAddress=registration_data.get("walletAddress"),
@@ -360,6 +379,8 @@ class AgentIndexer:
360
379
  mcpTools=mcp_tools,
361
380
  mcpPrompts=mcp_prompts,
362
381
  mcpResources=mcp_resources,
382
+ oasfSkills=[],
383
+ oasfDomains=[],
363
384
  active=registration_data.get("active", True),
364
385
  extras={}
365
386
  )
@@ -422,18 +443,28 @@ class AgentIndexer:
422
443
  description=reg_file.get('description', ''),
423
444
  owners=[agent_data.get('owner', '')],
424
445
  operators=agent_data.get('operators', []),
425
- mcp=reg_file.get('mcpEndpoint') is not None,
426
- a2a=reg_file.get('a2aEndpoint') is not None,
446
+ mcp=reg_file.get('mcpEndpoint') or None,
447
+ a2a=reg_file.get('a2aEndpoint') or None,
448
+ web=reg_file.get('webEndpoint') or None,
449
+ email=reg_file.get('emailEndpoint') or None,
427
450
  ens=reg_file.get('ens'),
428
451
  did=reg_file.get('did'),
429
- walletAddress=reg_file.get('agentWallet'),
452
+ walletAddress=agent_data.get('agentWallet'),
430
453
  supportedTrusts=reg_file.get('supportedTrusts', []),
431
454
  a2aSkills=reg_file.get('a2aSkills', []),
432
455
  mcpTools=reg_file.get('mcpTools', []),
433
456
  mcpPrompts=reg_file.get('mcpPrompts', []),
434
457
  mcpResources=reg_file.get('mcpResources', []),
458
+ oasfSkills=reg_file.get('oasfSkills', []) or [],
459
+ oasfDomains=reg_file.get('oasfDomains', []) or [],
435
460
  active=reg_file.get('active', True),
436
461
  x402support=reg_file.get('x402Support', reg_file.get('x402support', False)),
462
+ createdAt=agent_data.get('createdAt'),
463
+ updatedAt=agent_data.get('updatedAt'),
464
+ lastActivity=agent_data.get('lastActivity'),
465
+ agentURI=agent_data.get('agentURI'),
466
+ agentURIType=agent_data.get('agentURIType'),
467
+ feedbackCount=agent_data.get('totalFeedback'),
437
468
  extras={}
438
469
  )
439
470
 
@@ -442,59 +473,714 @@ class AgentIndexer:
442
473
 
443
474
  def search_agents(
444
475
  self,
445
- params: SearchParams,
446
- sort: List[str],
447
- page_size: int,
448
- cursor: Optional[str] = None,
476
+ filters: SearchFilters,
477
+ options: SearchOptions,
449
478
  ) -> Dict[str, Any]:
450
- """Search for agents by querying the subgraph or blockchain."""
451
- # Handle "all" chains shorthand
452
- if params.chains == "all":
453
- params.chains = self._get_all_configured_chains()
454
- logger.info(f"Expanding 'all' to configured chains: {params.chains}")
455
-
456
- # If chains are explicitly specified (even a single chain), use multi-chain path
457
- # This ensures the correct subgraph client is used for the requested chain(s)
458
- if params.chains and len(params.chains) > 0:
459
- # Validate chains are configured
460
- available_chains = set(self._get_all_configured_chains())
461
- requested_chains = set(params.chains)
462
- invalid_chains = requested_chains - available_chains
463
-
464
- if invalid_chains:
465
- logger.warning(
466
- f"Requested chains not configured: {invalid_chains}. "
467
- f"Available chains: {available_chains}"
468
- )
469
- # Filter to valid chains only
470
- valid_chains = list(requested_chains & available_chains)
471
- if not valid_chains:
472
- return {
473
- "items": [],
474
- "nextCursor": None,
475
- "meta": {
476
- "chains": list(requested_chains),
477
- "successfulChains": [],
478
- "failedChains": list(requested_chains),
479
- "error": f"No valid chains configured. Available: {list(available_chains)}"
480
- }
479
+ """Unified search entry point (replaces all legacy search variants)."""
480
+ start_ms = int(time.time() * 1000)
481
+
482
+ if filters.keyword and str(filters.keyword).strip():
483
+ out = self._search_unified_with_keyword(filters, options)
484
+ else:
485
+ out = self._search_unified_no_keyword(filters, options)
486
+
487
+ meta = out.get("meta") or {}
488
+ timing = meta.get("timing") or {}
489
+ timing["totalMs"] = int(time.time() * 1000) - start_ms
490
+ meta["timing"] = timing
491
+ out["meta"] = meta
492
+ return out
493
+
494
+ # -------------------------------------------------------------------------
495
+ # Unified search (v2)
496
+ # -------------------------------------------------------------------------
497
+
498
+ def _parse_sort(self, sort: Optional[List[str]], keyword_present: bool) -> tuple[str, str]:
499
+ default = "semanticScore:desc" if keyword_present else "updatedAt:desc"
500
+ spec = (sort[0] if sort and len(sort) > 0 else default) or default
501
+ parts = spec.split(":", 1)
502
+ field = parts[0] if parts and parts[0] else ("semanticScore" if keyword_present else "updatedAt")
503
+ direction = (parts[1] if len(parts) > 1 else "desc").lower()
504
+ if direction not in ("asc", "desc"):
505
+ direction = "desc"
506
+ return field, direction
507
+
508
+ def _resolve_chains(self, filters: SearchFilters, keyword_present: bool) -> List[int]:
509
+ if filters.chains == "all":
510
+ return self._get_all_configured_chains()
511
+ if isinstance(filters.chains, list) and len(filters.chains) > 0:
512
+ return filters.chains
513
+ if keyword_present:
514
+ return self._get_all_configured_chains()
515
+ return [self.web3_client.chain_id]
516
+
517
+ def _parse_cursor_offset(self, cursor: Optional[str]) -> int:
518
+ if not cursor:
519
+ return 0
520
+ try:
521
+ n = int(cursor)
522
+ return n if n >= 0 else 0
523
+ except Exception:
524
+ return 0
525
+
526
+ def _parse_per_chain_cursor(self, chains: List[int], cursor: Optional[str]) -> Dict[int, int]:
527
+ out: Dict[int, int] = {c: 0 for c in chains}
528
+ if not cursor:
529
+ return out
530
+ try:
531
+ data = json.loads(cursor)
532
+ if isinstance(data, dict):
533
+ for c in chains:
534
+ v = data.get(str(c))
535
+ if isinstance(v, int) and v >= 0:
536
+ out[c] = v
537
+ return out
538
+ except Exception:
539
+ pass
540
+ if len(chains) == 1:
541
+ try:
542
+ out[chains[0]] = max(0, int(cursor))
543
+ except Exception:
544
+ pass
545
+ return out
546
+
547
+ def _encode_per_chain_cursor(self, skips: Dict[int, int]) -> str:
548
+ return json.dumps({str(k): int(v) for k, v in sorted(skips.items(), key=lambda kv: kv[0])})
549
+
550
+ def _to_unix_seconds(self, dt: Any) -> int:
551
+ if isinstance(dt, int):
552
+ return dt
553
+ if isinstance(dt, datetime):
554
+ return int(dt.timestamp())
555
+ s = str(dt).strip()
556
+ if not s:
557
+ raise ValueError("Empty date")
558
+ # If no timezone, treat as UTC by appending 'Z'
559
+ if not ("Z" in s or "z" in s or "+" in s or "-" in s[-6:]):
560
+ s = f"{s}Z"
561
+ return int(datetime.fromisoformat(s.replace("Z", "+00:00")).timestamp())
562
+
563
+ def _normalize_agent_ids(self, filters: SearchFilters, chains: List[int]) -> Optional[Dict[int, List[str]]]:
564
+ if not filters.agentIds:
565
+ return None
566
+ by_chain: Dict[int, List[str]] = {}
567
+ for aid in filters.agentIds:
568
+ s = str(aid)
569
+ if ":" in s:
570
+ chain_str = s.split(":", 1)[0]
571
+ try:
572
+ chain_id = int(chain_str)
573
+ except Exception:
574
+ continue
575
+ by_chain.setdefault(chain_id, []).append(s)
576
+ else:
577
+ if len(chains) != 1:
578
+ raise ValueError("agentIds without chain prefix are only allowed when searching exactly one chain.")
579
+ by_chain.setdefault(chains[0], []).append(f"{chains[0]}:{s}")
580
+ return by_chain
581
+
582
+ def _build_where_v2(self, filters: SearchFilters, ids_for_chain: Optional[List[str]] = None) -> Dict[str, Any]:
583
+ base: Dict[str, Any] = {}
584
+ and_conditions: List[Dict[str, Any]] = []
585
+
586
+ # Default: only agents with registration files
587
+ if filters.hasRegistrationFile is False:
588
+ base["registrationFile"] = None
589
+ else:
590
+ base["registrationFile_not"] = None
591
+
592
+ if ids_for_chain:
593
+ base["id_in"] = ids_for_chain
594
+
595
+ if filters.walletAddress:
596
+ base["agentWallet"] = str(filters.walletAddress).lower()
597
+
598
+ # Feedback existence filters can be pushed down via Agent.totalFeedback when they are the ONLY feedback constraint.
599
+ fb = filters.feedback
600
+ if fb and (getattr(fb, "hasFeedback", False) or getattr(fb, "hasNoFeedback", False)):
601
+ has_threshold = any(
602
+ x is not None
603
+ for x in [
604
+ getattr(fb, "minCount", None),
605
+ getattr(fb, "maxCount", None),
606
+ getattr(fb, "minValue", None),
607
+ getattr(fb, "maxValue", None),
608
+ ]
609
+ )
610
+ has_any_constraint = any(
611
+ [
612
+ bool(getattr(fb, "hasResponse", False)),
613
+ bool(getattr(fb, "fromReviewers", None)),
614
+ bool(getattr(fb, "endpoint", None)),
615
+ bool(getattr(fb, "tag", None)),
616
+ bool(getattr(fb, "tag1", None)),
617
+ bool(getattr(fb, "tag2", None)),
618
+ ]
619
+ )
620
+ if not has_threshold and not has_any_constraint:
621
+ if getattr(fb, "hasFeedback", False):
622
+ base["totalFeedback_gt"] = "0"
623
+ if getattr(fb, "hasNoFeedback", False):
624
+ base["totalFeedback"] = "0"
625
+
626
+ if filters.owners:
627
+ base["owner_in"] = [str(o).lower() for o in filters.owners]
628
+
629
+ if filters.operators:
630
+ ops = [str(o).lower() for o in filters.operators]
631
+ and_conditions.append({"or": [{"operators_contains": [op]} for op in ops]})
632
+
633
+ if filters.registeredAtFrom is not None:
634
+ base["createdAt_gte"] = self._to_unix_seconds(filters.registeredAtFrom)
635
+ if filters.registeredAtTo is not None:
636
+ base["createdAt_lte"] = self._to_unix_seconds(filters.registeredAtTo)
637
+ if filters.updatedAtFrom is not None:
638
+ base["updatedAt_gte"] = self._to_unix_seconds(filters.updatedAtFrom)
639
+ if filters.updatedAtTo is not None:
640
+ base["updatedAt_lte"] = self._to_unix_seconds(filters.updatedAtTo)
641
+
642
+ rf: Dict[str, Any] = {}
643
+ if filters.name:
644
+ rf["name_contains_nocase"] = filters.name
645
+ if filters.description:
646
+ rf["description_contains_nocase"] = filters.description
647
+ if filters.ensContains:
648
+ rf["ens_contains_nocase"] = filters.ensContains
649
+ if filters.didContains:
650
+ rf["did_contains_nocase"] = filters.didContains
651
+ if filters.active is not None:
652
+ rf["active"] = filters.active
653
+ if filters.x402support is not None:
654
+ rf["x402Support"] = filters.x402support
655
+
656
+ if filters.hasMCP is not None:
657
+ rf["mcpEndpoint_not" if filters.hasMCP else "mcpEndpoint"] = None
658
+ if filters.hasA2A is not None:
659
+ rf["a2aEndpoint_not" if filters.hasA2A else "a2aEndpoint"] = None
660
+ if filters.hasWeb is not None:
661
+ rf["webEndpoint_not" if filters.hasWeb else "webEndpoint"] = None
662
+ if filters.hasOASF is not None:
663
+ # Exact semantics: true iff (oasfSkills OR oasfDomains) is non-empty (via subgraph derived field).
664
+ rf["hasOASF"] = bool(filters.hasOASF)
665
+
666
+ if filters.mcpContains:
667
+ rf["mcpEndpoint_contains_nocase"] = filters.mcpContains
668
+ if filters.a2aContains:
669
+ rf["a2aEndpoint_contains_nocase"] = filters.a2aContains
670
+ if filters.webContains:
671
+ rf["webEndpoint_contains_nocase"] = filters.webContains
672
+
673
+ if rf:
674
+ base["registrationFile_"] = rf
675
+
676
+ def any_of_list(field: str, values: Optional[List[str]]):
677
+ if not values:
678
+ return
679
+ and_conditions.append({"or": [{"registrationFile_": {f"{field}_contains": [v]}} for v in values]})
680
+
681
+ any_of_list("supportedTrusts", filters.supportedTrust)
682
+ any_of_list("a2aSkills", filters.a2aSkills)
683
+ any_of_list("mcpTools", filters.mcpTools)
684
+ any_of_list("mcpPrompts", filters.mcpPrompts)
685
+ any_of_list("mcpResources", filters.mcpResources)
686
+ any_of_list("oasfSkills", filters.oasfSkills)
687
+ any_of_list("oasfDomains", filters.oasfDomains)
688
+
689
+ if filters.hasEndpoints is not None:
690
+ if filters.hasEndpoints:
691
+ and_conditions.append(
692
+ {
693
+ "or": [
694
+ {"registrationFile_": {"webEndpoint_not": None}},
695
+ {"registrationFile_": {"mcpEndpoint_not": None}},
696
+ {"registrationFile_": {"a2aEndpoint_not": None}},
697
+ ]
481
698
  }
482
- params.chains = valid_chains
699
+ )
700
+ else:
701
+ and_conditions.append({"registrationFile_": {"webEndpoint": None, "mcpEndpoint": None, "a2aEndpoint": None}})
702
+
703
+ if not and_conditions:
704
+ return base
705
+ return {"and": [base, *and_conditions]}
706
+
707
+ def _intersect_ids(self, a: Optional[List[str]], b: Optional[List[str]]) -> Optional[List[str]]:
708
+ if a is None and b is None:
709
+ return None
710
+ if a is None:
711
+ return b or []
712
+ if b is None:
713
+ return a or []
714
+ bset = set(b)
715
+ return [x for x in a if x in bset]
716
+
717
+ def _utf8_to_hex(self, s: str) -> str:
718
+ return "0x" + s.encode("utf-8").hex()
719
+
720
+ def _prefilter_by_metadata(self, filters: SearchFilters, chains: List[int]) -> Optional[Dict[int, List[str]]]:
721
+ key = filters.hasMetadataKey or (filters.metadataValue.get("key") if isinstance(filters.metadataValue, dict) else None)
722
+ if not key:
723
+ return None
724
+ value_str = None
725
+ if isinstance(filters.metadataValue, dict):
726
+ value_str = filters.metadataValue.get("value")
727
+ value_hex = self._utf8_to_hex(str(value_str)) if value_str is not None else None
728
+
729
+ first = 1000
730
+ max_rows = 5000
731
+ out: Dict[int, List[str]] = {}
732
+
733
+ for chain_id in chains:
734
+ sub = self._get_subgraph_client_for_chain(chain_id)
735
+ if sub is None:
736
+ out[chain_id] = []
737
+ continue
738
+ ids: List[str] = []
739
+ for skip in range(0, max_rows, first):
740
+ where: Dict[str, Any] = {"key": key}
741
+ if value_hex is not None:
742
+ where["value"] = value_hex
743
+ rows = sub.query_agent_metadatas(where=where, first=first, skip=skip)
744
+ for r in rows:
745
+ agent = r.get("agent") or {}
746
+ aid = agent.get("id")
747
+ if aid:
748
+ ids.append(str(aid))
749
+ if len(rows) < first:
750
+ break
751
+ out[chain_id] = sorted(list(set(ids)))
752
+ return out
753
+
754
+ def _prefilter_by_feedback(
755
+ self,
756
+ filters: SearchFilters,
757
+ chains: List[int],
758
+ candidate_ids_by_chain: Optional[Dict[int, List[str]]] = None,
759
+ ) -> tuple[Optional[Dict[int, List[str]]], Dict[str, Dict[str, float]]]:
760
+ fb = filters.feedback
761
+ if fb is None:
762
+ return None, {}
763
+
764
+ include_revoked = bool(getattr(fb, "includeRevoked", False))
765
+ has_threshold = any(
766
+ x is not None
767
+ for x in [
768
+ getattr(fb, "minCount", None),
769
+ getattr(fb, "maxCount", None),
770
+ getattr(fb, "minValue", None),
771
+ getattr(fb, "maxValue", None),
772
+ ]
773
+ )
774
+ has_any_constraint = any(
775
+ [
776
+ bool(getattr(fb, "hasResponse", False)),
777
+ bool(getattr(fb, "fromReviewers", None)),
778
+ bool(getattr(fb, "endpoint", None)),
779
+ bool(getattr(fb, "tag", None)),
780
+ bool(getattr(fb, "tag1", None)),
781
+ bool(getattr(fb, "tag2", None)),
782
+ ]
783
+ )
784
+
785
+ # If hasNoFeedback/hasFeedback are the ONLY feedback constraint, we push them down via Agent.totalFeedback in _build_where_v2.
786
+ if getattr(fb, "hasNoFeedback", False) and not has_threshold and not has_any_constraint:
787
+ return None, {}
788
+ if getattr(fb, "hasFeedback", False) and not has_threshold and not has_any_constraint:
789
+ return None, {}
790
+
791
+ # Otherwise, hasNoFeedback requires an explicit candidate set to subtract from.
792
+ if getattr(fb, "hasNoFeedback", False):
793
+ if not candidate_ids_by_chain or not any(candidate_ids_by_chain.get(c) for c in chains):
794
+ raise ValueError("feedback.hasNoFeedback requires a pre-filtered candidate set (e.g. agentIds or keyword).")
795
+
796
+ first = 1000
797
+ max_rows = 5000
798
+
799
+ sums: Dict[str, float] = {}
800
+ counts: Dict[str, int] = {}
801
+ matched_by_chain: Dict[int, set[str]] = {}
802
+
803
+ for chain_id in chains:
804
+ sub = self._get_subgraph_client_for_chain(chain_id)
805
+ if sub is None:
806
+ continue
807
+ candidates = (candidate_ids_by_chain or {}).get(chain_id)
808
+
809
+ base: Dict[str, Any] = {}
810
+ and_conditions: List[Dict[str, Any]] = []
811
+
812
+ if not include_revoked:
813
+ base["isRevoked"] = False
814
+ from_reviewers = getattr(fb, "fromReviewers", None)
815
+ if from_reviewers:
816
+ base["clientAddress_in"] = [str(a).lower() for a in from_reviewers]
817
+ endpoint = getattr(fb, "endpoint", None)
818
+ if endpoint:
819
+ base["endpoint_contains_nocase"] = endpoint
820
+ if candidates:
821
+ base["agent_in"] = candidates
822
+
823
+ tag1 = getattr(fb, "tag1", None)
824
+ tag2 = getattr(fb, "tag2", None)
825
+ tag = getattr(fb, "tag", None)
826
+ if tag1:
827
+ base["tag1"] = tag1
828
+ if tag2:
829
+ base["tag2"] = tag2
830
+ if tag:
831
+ and_conditions.append({"or": [{"tag1": tag}, {"tag2": tag}]})
832
+
833
+ where: Dict[str, Any] = {"and": [base, *and_conditions]} if and_conditions else base
834
+
835
+ for skip in range(0, max_rows, first):
836
+ rows = sub.query_feedbacks_minimal(where=where, first=first, skip=skip, order_by="createdAt", order_direction="desc")
837
+ for r in rows:
838
+ agent = r.get("agent") or {}
839
+ aid = agent.get("id")
840
+ if not aid:
841
+ continue
842
+ if getattr(fb, "hasResponse", False):
843
+ responses = r.get("responses") or []
844
+ if not isinstance(responses, list) or len(responses) == 0:
845
+ continue
846
+ try:
847
+ v = float(r.get("value"))
848
+ except Exception:
849
+ continue
850
+ aid_s = str(aid)
851
+ sums[aid_s] = sums.get(aid_s, 0.0) + v
852
+ counts[aid_s] = counts.get(aid_s, 0) + 1
853
+ matched_by_chain.setdefault(chain_id, set()).add(aid_s)
854
+ if len(rows) < first:
855
+ break
856
+
857
+ stats: Dict[str, Dict[str, float]] = {}
858
+ for aid, cnt in counts.items():
859
+ avg = (sums.get(aid, 0.0) / cnt) if cnt > 0 else 0.0
860
+ stats[aid] = {"count": float(cnt), "avg": float(avg)}
861
+
862
+ def passes(aid: str) -> bool:
863
+ st = stats.get(aid, {"count": 0.0, "avg": 0.0})
864
+ cnt = st["count"]
865
+ avg = st["avg"]
866
+ min_count = getattr(fb, "minCount", None)
867
+ max_count = getattr(fb, "maxCount", None)
868
+ min_val = getattr(fb, "minValue", None)
869
+ max_val = getattr(fb, "maxValue", None)
870
+ if min_count is not None and cnt < float(min_count):
871
+ return False
872
+ if max_count is not None and cnt > float(max_count):
873
+ return False
874
+ if min_val is not None and avg < float(min_val):
875
+ return False
876
+ if max_val is not None and avg > float(max_val):
877
+ return False
878
+ return True
879
+
880
+ allow: Dict[int, List[str]] = {}
881
+ for chain_id in chains:
882
+ matched = matched_by_chain.get(chain_id, set())
883
+ candidates = (candidate_ids_by_chain or {}).get(chain_id)
884
+
885
+ if getattr(fb, "hasNoFeedback", False):
886
+ base_list = candidates or []
887
+ allow[chain_id] = [x for x in base_list if x not in matched]
888
+ continue
889
+
890
+ ids = list(matched)
891
+ if has_threshold:
892
+ ids = [x for x in ids if passes(x)]
893
+ elif has_any_constraint or getattr(fb, "hasFeedback", False):
894
+ ids = [x for x in ids if counts.get(x, 0) > 0]
895
+
896
+ if candidates:
897
+ cset = set(candidates)
898
+ ids = [x for x in ids if x in cset]
899
+
900
+ allow[chain_id] = ids
901
+
902
+ return allow, stats
903
+
904
+ def _search_unified_no_keyword(self, filters: SearchFilters, options: SearchOptions) -> Dict[str, Any]:
905
+ if not self.subgraph_client:
906
+ raise ValueError("Subgraph client required for searchAgents")
907
+
908
+ field, direction = self._parse_sort(options.sort, False)
909
+ chains = self._resolve_chains(filters, False)
910
+ page_size = options.pageSize or 50
911
+ per_chain_skip = self._parse_per_chain_cursor(chains, options.cursor)
912
+ ids_by_chain = self._normalize_agent_ids(filters, chains)
913
+ metadata_ids_by_chain = self._prefilter_by_metadata(filters, chains)
914
+ candidate_for_feedback: Dict[int, List[str]] = {}
915
+ for c in chains:
916
+ ids0 = self._intersect_ids((ids_by_chain or {}).get(c), (metadata_ids_by_chain or {}).get(c))
917
+ if ids0:
918
+ candidate_for_feedback[c] = ids0
919
+ feedback_ids_by_chain, feedback_stats_by_id = self._prefilter_by_feedback(
920
+ filters, chains, candidate_for_feedback if candidate_for_feedback else None
921
+ )
483
922
 
484
- return asyncio.run(
485
- self._search_agents_across_chains(params, sort, page_size, cursor)
923
+ order_by = field if field in ("createdAt", "updatedAt", "name", "chainId", "lastActivity", "totalFeedback") else "updatedAt"
924
+ if field == "feedbackCount":
925
+ order_by = "totalFeedback"
926
+
927
+ # Fetch one page per chain (page_size + 1) and merge client-side.
928
+ chain_results: List[Dict[str, Any]] = []
929
+ successful: List[int] = []
930
+ failed: List[int] = []
931
+
932
+ for chain_id in chains:
933
+ client = self._get_subgraph_client_for_chain(chain_id)
934
+ if client is None:
935
+ failed.append(chain_id)
936
+ chain_results.append({"chainId": chain_id, "items": []})
937
+ continue
938
+ try:
939
+ ids0 = self._intersect_ids((ids_by_chain or {}).get(chain_id), (metadata_ids_by_chain or {}).get(chain_id))
940
+ ids = self._intersect_ids(ids0, (feedback_ids_by_chain or {}).get(chain_id))
941
+ if ids is not None and len(ids) == 0:
942
+ successful.append(chain_id)
943
+ chain_results.append({"chainId": chain_id, "items": []})
944
+ continue
945
+ where = self._build_where_v2(filters, ids)
946
+ agents = client.get_agents_v2(where=where, first=page_size + 1, skip=per_chain_skip.get(chain_id, 0), order_by=order_by, order_direction=direction)
947
+ successful.append(chain_id)
948
+ chain_results.append({"chainId": chain_id, "items": agents})
949
+ except Exception:
950
+ failed.append(chain_id)
951
+ chain_results.append({"chainId": chain_id, "items": []})
952
+
953
+ # Convert to AgentSummary objects and k-way merge using the same sort field.
954
+ def to_summary(agent_data: Dict[str, Any]) -> AgentSummary:
955
+ reg_file = agent_data.get("registrationFile") or {}
956
+ if not isinstance(reg_file, dict):
957
+ reg_file = {}
958
+ aid = str(agent_data.get("id", ""))
959
+ st = feedback_stats_by_id.get(aid) or {}
960
+ return AgentSummary(
961
+ chainId=int(agent_data.get("chainId", 0)),
962
+ agentId=aid,
963
+ name=reg_file.get("name") or aid,
964
+ image=reg_file.get("image"),
965
+ description=reg_file.get("description", "") or "",
966
+ owners=[agent_data.get("owner", "")] if agent_data.get("owner") else [],
967
+ operators=agent_data.get("operators", []) or [],
968
+ mcp=reg_file.get("mcpEndpoint") or None,
969
+ a2a=reg_file.get("a2aEndpoint") or None,
970
+ web=reg_file.get("webEndpoint") or None,
971
+ email=reg_file.get("emailEndpoint") or None,
972
+ ens=reg_file.get("ens"),
973
+ did=reg_file.get("did"),
974
+ walletAddress=agent_data.get("agentWallet"),
975
+ supportedTrusts=reg_file.get("supportedTrusts", []) or [],
976
+ a2aSkills=reg_file.get("a2aSkills", []) or [],
977
+ mcpTools=reg_file.get("mcpTools", []) or [],
978
+ mcpPrompts=reg_file.get("mcpPrompts", []) or [],
979
+ mcpResources=reg_file.get("mcpResources", []) or [],
980
+ oasfSkills=reg_file.get("oasfSkills", []) or [],
981
+ oasfDomains=reg_file.get("oasfDomains", []) or [],
982
+ active=bool(reg_file.get("active", False)),
983
+ x402support=bool(reg_file.get("x402Support", reg_file.get("x402support", False))),
984
+ createdAt=agent_data.get("createdAt"),
985
+ updatedAt=agent_data.get("updatedAt"),
986
+ lastActivity=agent_data.get("lastActivity"),
987
+ agentURI=agent_data.get("agentURI"),
988
+ agentURIType=agent_data.get("agentURIType"),
989
+ feedbackCount=agent_data.get("totalFeedback"),
990
+ averageValue=float(st.get("avg")) if st.get("avg") is not None else None,
991
+ extras={},
486
992
  )
487
993
 
488
- # Use subgraph if available (preferred)
489
- if self.subgraph_client:
490
- return self._search_agents_via_subgraph(params, sort, page_size, cursor)
994
+ per_chain_lists: Dict[int, List[AgentSummary]] = {r["chainId"]: [to_summary(a) for a in r["items"]] for r in chain_results}
995
+ indices: Dict[int, int] = {c: 0 for c in chains}
996
+ consumed: Dict[int, int] = {c: 0 for c in chains}
997
+
998
+ def key(agent: AgentSummary):
999
+ v = getattr(agent, field, None)
1000
+ if v is None:
1001
+ return 0
1002
+ return v
1003
+
1004
+ def compare(a: AgentSummary, b: AgentSummary) -> bool:
1005
+ # return True if a should come before b
1006
+ if field == "name":
1007
+ av = (a.name or "").lower()
1008
+ bv = (b.name or "").lower()
1009
+ return av < bv if direction == "asc" else av > bv
1010
+ try:
1011
+ av = float(key(a))
1012
+ bv = float(key(b))
1013
+ except Exception:
1014
+ av = 0.0
1015
+ bv = 0.0
1016
+ return av < bv if direction == "asc" else av > bv
1017
+
1018
+ merged: List[AgentSummary] = []
1019
+ while len(merged) < page_size:
1020
+ best_chain: Optional[int] = None
1021
+ best_item: Optional[AgentSummary] = None
1022
+ for c in chains:
1023
+ idx = indices[c]
1024
+ arr = per_chain_lists.get(c, [])
1025
+ if idx >= len(arr):
1026
+ continue
1027
+ cand = arr[idx]
1028
+ if best_item is None or compare(cand, best_item):
1029
+ best_item = cand
1030
+ best_chain = c
1031
+ if best_item is None or best_chain is None:
1032
+ break
1033
+ merged.append(best_item)
1034
+ indices[best_chain] += 1
1035
+ consumed[best_chain] += 1
1036
+
1037
+ has_more = any(
1038
+ (indices[c] < len(per_chain_lists.get(c, []))) or (len(per_chain_lists.get(c, [])) > page_size)
1039
+ for c in chains
1040
+ )
1041
+ next_cursor = None
1042
+ if has_more:
1043
+ next_skips = {c: per_chain_skip.get(c, 0) + consumed.get(c, 0) for c in chains}
1044
+ next_cursor = self._encode_per_chain_cursor(next_skips)
491
1045
 
492
- # Fallback to blockchain queries
493
- return self._search_agents_via_blockchain(params, sort, page_size, cursor)
1046
+ return {
1047
+ "items": merged,
1048
+ "nextCursor": next_cursor,
1049
+ "meta": {
1050
+ "chains": chains,
1051
+ "successfulChains": successful,
1052
+ "failedChains": failed,
1053
+ "totalResults": 0,
1054
+ },
1055
+ }
1056
+
1057
+ def _search_unified_with_keyword(self, filters: SearchFilters, options: SearchOptions) -> Dict[str, Any]:
1058
+ field, direction = self._parse_sort(options.sort, True)
1059
+ page_size = options.pageSize or 50
1060
+ offset = self._parse_cursor_offset(options.cursor)
1061
+ chains = self._resolve_chains(filters, True)
1062
+
1063
+ client = SemanticSearchClient()
1064
+ semantic_results = client.search(
1065
+ str(filters.keyword),
1066
+ min_score=options.semanticMinScore,
1067
+ top_k=options.semanticTopK,
1068
+ )
1069
+
1070
+ allowed = set(chains)
1071
+ semantic_results = [r for r in semantic_results if r.chainId in allowed]
1072
+ ids_by_chain: Dict[int, List[str]] = {}
1073
+ score_by_id: Dict[str, float] = {}
1074
+ for r in semantic_results:
1075
+ ids_by_chain.setdefault(r.chainId, []).append(r.agentId)
1076
+ score_by_id[r.agentId] = r.score
1077
+
1078
+ fetched: List[AgentSummary] = []
1079
+ successful: List[int] = []
1080
+ failed: List[int] = []
1081
+
1082
+ metadata_ids_by_chain = self._prefilter_by_metadata(filters, chains)
1083
+ feedback_ids_by_chain, feedback_stats_by_id = self._prefilter_by_feedback(filters, chains, ids_by_chain)
1084
+
1085
+ # Query agents by id_in chunks and apply remaining filters via where.
1086
+ chunk_size = 500
1087
+ for chain_id in chains:
1088
+ sub = self._get_subgraph_client_for_chain(chain_id)
1089
+ ids = ids_by_chain.get(chain_id, [])
1090
+ if sub is None:
1091
+ if ids:
1092
+ failed.append(chain_id)
1093
+ continue
1094
+ try:
1095
+ successful.append(chain_id)
1096
+ for i in range(0, len(ids), chunk_size):
1097
+ chunk = ids[i : i + chunk_size]
1098
+ ids2 = self._intersect_ids(chunk, (metadata_ids_by_chain or {}).get(chain_id))
1099
+ ids3 = self._intersect_ids(ids2, (feedback_ids_by_chain or {}).get(chain_id))
1100
+ if ids3 is not None and len(ids3) == 0:
1101
+ continue
1102
+ if ids3 is not None and len(ids3) == 0:
1103
+ continue
1104
+ where = self._build_where_v2(filters, ids3)
1105
+ agents = sub.get_agents_v2(where=where, first=len(ids3 or []), skip=0, order_by="updatedAt", order_direction="desc")
1106
+ for a in agents:
1107
+ reg_file = a.get("registrationFile") or {}
1108
+ if not isinstance(reg_file, dict):
1109
+ reg_file = {}
1110
+ aid = str(a.get("id", ""))
1111
+ st = feedback_stats_by_id.get(aid) or {}
1112
+ fetched.append(
1113
+ AgentSummary(
1114
+ chainId=int(a.get("chainId", 0)),
1115
+ agentId=aid,
1116
+ name=reg_file.get("name") or aid,
1117
+ image=reg_file.get("image"),
1118
+ description=reg_file.get("description", "") or "",
1119
+ owners=[a.get("owner", "")] if a.get("owner") else [],
1120
+ operators=a.get("operators", []) or [],
1121
+ mcp=reg_file.get("mcpEndpoint") or None,
1122
+ a2a=reg_file.get("a2aEndpoint") or None,
1123
+ web=reg_file.get("webEndpoint") or None,
1124
+ email=reg_file.get("emailEndpoint") or None,
1125
+ ens=reg_file.get("ens"),
1126
+ did=reg_file.get("did"),
1127
+ walletAddress=a.get("agentWallet"),
1128
+ supportedTrusts=reg_file.get("supportedTrusts", []) or [],
1129
+ a2aSkills=reg_file.get("a2aSkills", []) or [],
1130
+ mcpTools=reg_file.get("mcpTools", []) or [],
1131
+ mcpPrompts=reg_file.get("mcpPrompts", []) or [],
1132
+ mcpResources=reg_file.get("mcpResources", []) or [],
1133
+ oasfSkills=reg_file.get("oasfSkills", []) or [],
1134
+ oasfDomains=reg_file.get("oasfDomains", []) or [],
1135
+ active=bool(reg_file.get("active", False)),
1136
+ x402support=bool(reg_file.get("x402Support", reg_file.get("x402support", False))),
1137
+ createdAt=a.get("createdAt"),
1138
+ updatedAt=a.get("updatedAt"),
1139
+ lastActivity=a.get("lastActivity"),
1140
+ agentURI=a.get("agentURI"),
1141
+ agentURIType=a.get("agentURIType"),
1142
+ feedbackCount=a.get("totalFeedback"),
1143
+ semanticScore=float(score_by_id.get(aid, 0.0)),
1144
+ averageValue=float(st.get("avg")) if st.get("avg") is not None else None,
1145
+ extras={},
1146
+ )
1147
+ )
1148
+ except Exception:
1149
+ failed.append(chain_id)
1150
+
1151
+ # Default keyword sorting: semanticScore desc, unless overridden.
1152
+ sort_field = field if options.sort and len(options.sort) > 0 else "semanticScore"
1153
+ sort_dir = direction if options.sort and len(options.sort) > 0 else "desc"
1154
+
1155
+ def sort_key(agent: AgentSummary):
1156
+ v = getattr(agent, sort_field, None)
1157
+ if v is None:
1158
+ return 0
1159
+ if sort_field == "name":
1160
+ return (agent.name or "").lower()
1161
+ try:
1162
+ return float(v)
1163
+ except Exception:
1164
+ return 0
1165
+
1166
+ fetched.sort(key=sort_key, reverse=(sort_dir == "desc"))
1167
+ page = fetched[offset : offset + page_size]
1168
+ next_cursor = str(offset + page_size) if len(fetched) > offset + page_size else None
1169
+
1170
+ return {
1171
+ "items": page,
1172
+ "nextCursor": next_cursor,
1173
+ "meta": {
1174
+ "chains": chains,
1175
+ "successfulChains": successful,
1176
+ "failedChains": failed,
1177
+ "totalResults": len(fetched),
1178
+ },
1179
+ }
494
1180
 
495
1181
  async def _search_agents_across_chains(
496
1182
  self,
497
- params: SearchParams,
1183
+ params: SearchFilters,
498
1184
  sort: List[str],
499
1185
  page_size: int,
500
1186
  cursor: Optional[str] = None,
@@ -566,20 +1252,20 @@ class AgentIndexer:
566
1252
  reg_file_where["active"] = params.active
567
1253
  if params.x402support is not None:
568
1254
  reg_file_where["x402support"] = params.x402support
569
- if params.mcp is not None:
570
- if params.mcp:
1255
+ if params.hasMCP is not None:
1256
+ if params.hasMCP:
571
1257
  reg_file_where["mcpEndpoint_not"] = None
572
1258
  else:
573
1259
  reg_file_where["mcpEndpoint"] = None
574
- if params.a2a is not None:
575
- if params.a2a:
1260
+ if params.hasA2A is not None:
1261
+ if params.hasA2A:
576
1262
  reg_file_where["a2aEndpoint_not"] = None
577
1263
  else:
578
1264
  reg_file_where["a2aEndpoint"] = None
579
- if params.ens is not None:
580
- reg_file_where["ens"] = params.ens
581
- if params.did is not None:
582
- reg_file_where["did"] = params.did
1265
+ if params.ensContains is not None:
1266
+ reg_file_where["ens_contains_nocase"] = params.ensContains
1267
+ if params.didContains is not None:
1268
+ reg_file_where["did_contains_nocase"] = params.didContains
583
1269
  if params.walletAddress is not None:
584
1270
  reg_file_where["agentWallet"] = params.walletAddress
585
1271
 
@@ -766,7 +1452,7 @@ class AgentIndexer:
766
1452
 
767
1453
  def _search_agents_via_subgraph(
768
1454
  self,
769
- params: SearchParams,
1455
+ params: SearchFilters,
770
1456
  sort: List[str],
771
1457
  page_size: int,
772
1458
  cursor: Optional[str] = None,
@@ -782,20 +1468,20 @@ class AgentIndexer:
782
1468
  reg_file_where["active"] = params.active
783
1469
  if params.x402support is not None:
784
1470
  reg_file_where["x402support"] = params.x402support
785
- if params.mcp is not None:
786
- if params.mcp:
1471
+ if params.hasMCP is not None:
1472
+ if params.hasMCP:
787
1473
  reg_file_where["mcpEndpoint_not"] = None
788
1474
  else:
789
1475
  reg_file_where["mcpEndpoint"] = None
790
- if params.a2a is not None:
791
- if params.a2a:
1476
+ if params.hasA2A is not None:
1477
+ if params.hasA2A:
792
1478
  reg_file_where["a2aEndpoint_not"] = None
793
1479
  else:
794
1480
  reg_file_where["a2aEndpoint"] = None
795
- if params.ens is not None:
796
- reg_file_where["ens"] = params.ens
797
- if params.did is not None:
798
- reg_file_where["did"] = params.did
1481
+ if params.ensContains is not None:
1482
+ reg_file_where["ens_contains_nocase"] = params.ensContains
1483
+ if params.didContains is not None:
1484
+ reg_file_where["did_contains_nocase"] = params.didContains
799
1485
  if params.walletAddress is not None:
800
1486
  reg_file_where["agentWallet"] = params.walletAddress
801
1487
 
@@ -896,7 +1582,7 @@ class AgentIndexer:
896
1582
 
897
1583
  def _search_agents_via_blockchain(
898
1584
  self,
899
- params: SearchParams,
1585
+ params: SearchFilters,
900
1586
  sort: List[str],
901
1587
  page_size: int,
902
1588
  cursor: Optional[str] = None,
@@ -904,7 +1590,7 @@ class AgentIndexer:
904
1590
  """Search for agents by querying the blockchain (fallback)."""
905
1591
  return {"items": [], "nextCursor": None}
906
1592
 
907
- def _apply_filters(self, agents: List[Dict[str, Any]], params: SearchParams) -> List[Dict[str, Any]]:
1593
+ def _apply_filters(self, agents: List[Dict[str, Any]], params: SearchFilters) -> List[Dict[str, Any]]:
908
1594
  """Apply search filters to agents."""
909
1595
  filtered = agents
910
1596
 
@@ -924,17 +1610,21 @@ class AgentIndexer:
924
1610
  if params.operators is not None:
925
1611
  filtered = [a for a in filtered if any(op in params.operators for op in a.get("operators", []))]
926
1612
 
927
- if params.mcp is not None:
928
- filtered = [a for a in filtered if a.get("mcp") == params.mcp]
1613
+ if getattr(params, "hasMCP", None) is not None:
1614
+ has = params.hasMCP
1615
+ filtered = [a for a in filtered if bool(a.get("mcp")) == bool(has)]
929
1616
 
930
- if params.a2a is not None:
931
- filtered = [a for a in filtered if a.get("a2a") == params.a2a]
1617
+ if getattr(params, "hasA2A", None) is not None:
1618
+ has = params.hasA2A
1619
+ filtered = [a for a in filtered if bool(a.get("a2a")) == bool(has)]
932
1620
 
933
- if params.ens is not None:
934
- filtered = [a for a in filtered if a.get("ens") and params.ens.lower() in a.get("ens", "").lower()]
1621
+ if getattr(params, "ensContains", None) is not None:
1622
+ needle = (params.ensContains or "").lower()
1623
+ filtered = [a for a in filtered if needle in (a.get("ens") or "").lower()]
935
1624
 
936
- if params.did is not None:
937
- filtered = [a for a in filtered if a.get("did") == params.did]
1625
+ if getattr(params, "didContains", None) is not None:
1626
+ needle = (params.didContains or "").lower()
1627
+ filtered = [a for a in filtered if needle in (a.get("did") or "").lower()]
938
1628
 
939
1629
  if params.walletAddress is not None:
940
1630
  filtered = [a for a in filtered if a.get("walletAddress") == params.walletAddress]
@@ -1591,7 +2281,7 @@ class AgentIndexer:
1591
2281
  def _apply_cross_chain_filters(
1592
2282
  self,
1593
2283
  agents: List[Dict[str, Any]],
1594
- params: SearchParams
2284
+ params: SearchFilters
1595
2285
  ) -> List[Dict[str, Any]]:
1596
2286
  """
1597
2287
  Apply filters that couldn't be expressed in subgraph WHERE clause.
@@ -1656,7 +2346,7 @@ class AgentIndexer:
1656
2346
  def _deduplicate_agents_cross_chain(
1657
2347
  self,
1658
2348
  agents: List[Dict[str, Any]],
1659
- params: SearchParams
2349
+ params: SearchFilters
1660
2350
  ) -> List[Dict[str, Any]]:
1661
2351
  """
1662
2352
  Deduplicate agents across chains (if requested).
@@ -1669,42 +2359,8 @@ class AgentIndexer:
1669
2359
  - Keep the first instance encountered
1670
2360
  - Add 'deployedOn' array with all chain IDs where this agent exists
1671
2361
  """
1672
- # Check if deduplication requested
1673
- if not params.deduplicate_cross_chain:
1674
- return agents
1675
-
1676
- # Group agents by identity key
1677
- seen = {}
1678
- deduplicated = []
1679
-
1680
- for agent in agents:
1681
- # Create identity key: (owner, name, description)
1682
- # This identifies "the same agent" across chains
1683
- owner = agent.get('owner', '').lower()
1684
- reg_file = agent.get('registrationFile', {})
1685
- name = reg_file.get('name', '')
1686
- description = reg_file.get('description', '')
1687
-
1688
- identity_key = (owner, name, description)
1689
-
1690
- if identity_key not in seen:
1691
- # First time seeing this agent
1692
- seen[identity_key] = agent
1693
-
1694
- # Add deployedOn array
1695
- agent['deployedOn'] = [agent['chainId']]
1696
-
1697
- deduplicated.append(agent)
1698
- else:
1699
- # Already seen this agent on another chain
1700
- # Add this chain to deployedOn array
1701
- seen[identity_key]['deployedOn'].append(agent['chainId'])
1702
-
1703
- logger.info(
1704
- f"Deduplication: {len(agents)} agents → {len(deduplicated)} unique agents"
1705
- )
1706
-
1707
- return deduplicated
2362
+ # Deduplication across chains was part of an older API surface; the unified search does not deduplicate.
2363
+ return agents
1708
2364
 
1709
2365
  def _sort_agents_cross_chain(
1710
2366
  self,