exa-py 1.8.3__tar.gz → 1.8.4__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of exa-py might be problematic. Click here for more details.

exa_py-1.8.4/PKG-INFO ADDED
@@ -0,0 +1,104 @@
1
+ Metadata-Version: 2.1
2
+ Name: exa_py
3
+ Version: 1.8.4
4
+ Summary: Python SDK for Exa API.
5
+ Home-page: https://github.com/exa-labs/exa-py
6
+ Author: Exa
7
+ Author-email: hello@exa.ai
8
+ Classifier: Development Status :: 5 - Production/Stable
9
+ Classifier: Intended Audience :: Developers
10
+ Classifier: License :: OSI Approved :: MIT License
11
+ Classifier: Typing :: Typed
12
+ Classifier: Programming Language :: Python :: 3.8
13
+ Classifier: Programming Language :: Python :: 3.9
14
+ Classifier: Programming Language :: Python :: 3.10
15
+ Classifier: Programming Language :: Python :: 3.11
16
+ Classifier: Programming Language :: Python :: 3.12
17
+ Description-Content-Type: text/markdown
18
+
19
+ # Exa
20
+
21
+ Exa (formerly Metaphor) API in Python
22
+
23
+ Note: This API is basically the same as `metaphor-python` but reflects new
24
+ features associated with Metaphor's rename to Exa. New site is https://exa.ai
25
+
26
+ ## Installation
27
+
28
+ ```bash
29
+ pip install exa_py
30
+ ```
31
+
32
+ ## Usage
33
+
34
+ Import the package and initialize the Exa client with your API key:
35
+
36
+ ```python
37
+ from exa_py import Exa
38
+
39
+ exa = Exa(api_key="your-api-key")
40
+ ```
41
+
42
+ ## Common requests
43
+ ```python
44
+
45
+ # basic search
46
+ results = exa.search("This is a Exa query:")
47
+
48
+ # autoprompted search
49
+ results = exa.search("autopromptable query", use_autoprompt=True)
50
+
51
+ # keyword search (non-neural)
52
+ results = exa.search("Google-style query", type="keyword")
53
+
54
+ # search with date filters
55
+ results = exa.search("This is a Exa query:", start_published_date="2019-01-01", end_published_date="2019-01-31")
56
+
57
+ # search with domain filters
58
+ results = exa.search("This is a Exa query:", include_domains=["www.cnn.com", "www.nytimes.com"])
59
+
60
+ # search and get text contents
61
+ results = exa.search_and_contents("This is a Exa query:")
62
+
63
+ # search and get highlights
64
+ results = exa.search_and_contents("This is a Exa query:", highlights=True)
65
+
66
+ # search and get contents with contents options
67
+ results = exa.search_and_contents("This is a Exa query:",
68
+ text={"include_html_tags": True, "max_characters": 1000},
69
+ highlights={"highlights_per_url": 2, "num_sentences": 1, "query": "This is the highlight query:"})
70
+
71
+ # find similar documents
72
+ results = exa.find_similar("https://example.com")
73
+
74
+ # find similar excluding source domain
75
+ results = exa.find_similar("https://example.com", exclude_source_domain=True)
76
+
77
+ # find similar with contents
78
+ results = exa.find_similar_and_contents("https://example.com", text=True, highlights=True)
79
+
80
+ # get text contents
81
+ results = exa.get_contents(["urls"])
82
+
83
+ # get highlights
84
+ results = exa.get_contents(["urls"], highlights=True)
85
+
86
+ # get contents with contents options
87
+ results = exa.get_contents(["urls"],
88
+ text={"include_html_tags": True, "max_characters": 1000},
89
+ highlights={"highlights_per_url": 2, "num_sentences": 1, "query": "This is the highlight query:"})
90
+
91
+ # basic answer
92
+ response = exa.answer("This is a query to answer a question")
93
+
94
+ # answer with expanded queries and full text
95
+ response = exa.answer("This is a query to answer a question", expanded_queries_limit=3, include_text=True)
96
+
97
+ # answer with streaming
98
+ response = exa.answer("This is a query to answer with streaming:", stream=True)
99
+
100
+ # Print each chunk as it arrives when answer streaming is enabled
101
+ for chunk in response:
102
+ print(chunk)
103
+ ```
104
+
@@ -58,11 +58,13 @@ def to_camel_case(data: dict) -> dict:
58
58
  Returns:
59
59
  dict: The dictionary with keys converted to camelCase format.
60
60
  """
61
- return {
62
- snake_to_camel(k): to_camel_case(v) if isinstance(v, dict) else v
63
- for k, v in data.items()
64
- if v is not None
65
- }
61
+ if isinstance(data, dict):
62
+ return {
63
+ snake_to_camel(k): to_camel_case(v) if isinstance(v, dict) else v
64
+ for k, v in data.items()
65
+ if v is not None
66
+ }
67
+ return data
66
68
 
67
69
 
68
70
  def camel_to_snake(camel_str: str) -> str:
@@ -88,10 +90,12 @@ def to_snake_case(data: dict) -> dict:
88
90
  Returns:
89
91
  dict: The dictionary with keys converted to snake_case format.
90
92
  """
91
- return {
92
- camel_to_snake(k): to_snake_case(v) if isinstance(v, dict) else v
93
- for k, v in data.items()
94
- }
93
+ if isinstance(data, dict):
94
+ return {
95
+ camel_to_snake(k): to_snake_case(v) if isinstance(v, dict) else v
96
+ for k, v in data.items()
97
+ }
98
+ return data
95
99
 
96
100
 
97
101
  SEARCH_OPTIONS_TYPES = {
@@ -106,9 +110,10 @@ SEARCH_OPTIONS_TYPES = {
106
110
  "include_text": [list], # Must be present in webpage text. (One string, up to 5 words)
107
111
  "exclude_text": [list], # Must not be present in webpage text. (One string, up to 5 words)
108
112
  "use_autoprompt": [bool], # Convert query to Exa. (Default: false)
109
- "type": [str], # 'keyword', 'neural', or 'auto' (Default: auto).'neural' uses embeddings search, 'keyword' is SERP and 'auto' decides the best search type based on your query
113
+ "type": [str], # 'keyword', 'neural', or 'auto' (Default: auto)
110
114
  "category": [str], # A data category to focus on: 'company', 'research paper', 'news', 'pdf', 'github', 'tweet', 'personal site', 'linkedin profile', 'financial report'
111
- "flags": [list], # Experimental flags array for Exa usage.
115
+ "flags": [list], # Experimental flags array for Exa usage.
116
+ "moderation": [bool], # If true, moderate search results for safety.
112
117
  }
113
118
 
114
119
  FIND_SIMILAR_OPTIONS_TYPES = {
@@ -124,7 +129,7 @@ FIND_SIMILAR_OPTIONS_TYPES = {
124
129
  "exclude_text": [list],
125
130
  "exclude_source_domain": [bool],
126
131
  "category": [str],
127
- "flags": [list], # Experimental flags array for Exa usage.
132
+ "flags": [list], # Experimental flags array for Exa usage.
128
133
  }
129
134
 
130
135
  # the livecrawl options
@@ -198,7 +203,7 @@ class HighlightsContentsOptions(TypedDict, total=False):
198
203
  """A class representing the options that you can specify when requesting highlights
199
204
 
200
205
  Attributes:
201
- query (str): The query string for the highlights.
206
+ query (str): The query string for the highlights.
202
207
  num_sentences (int): Size of highlights to return, in sentences. Default: 5
203
208
  highlights_per_url (int): Number of highlights to return per URL. Default: 1
204
209
  """
@@ -217,6 +222,7 @@ class SummaryContentsOptions(TypedDict, total=False):
217
222
 
218
223
  query: str
219
224
 
225
+
220
226
  class ExtrasOptions(TypedDict, total=False):
221
227
  """A class representing additional extraction fields (e.g. links, images)"""
222
228
 
@@ -495,6 +501,7 @@ class ResultWithTextAndHighlightsAndSummary(_Result):
495
501
  f"Summary: {self.summary}\n"
496
502
  )
497
503
 
504
+
498
505
  @dataclass
499
506
  class AnswerResult:
500
507
  """A class representing a source result for an answer.
@@ -514,11 +521,11 @@ class AnswerResult:
514
521
  author: Optional[str] = None
515
522
 
516
523
  def __init__(self, **kwargs):
517
- self.url = kwargs['url']
518
- self.id = kwargs['id']
519
- self.title = kwargs.get('title')
520
- self.published_date = kwargs.get('published_date')
521
- self.author = kwargs.get('author')
524
+ self.url = kwargs["url"]
525
+ self.id = kwargs["id"]
526
+ self.title = kwargs.get("title")
527
+ self.published_date = kwargs.get("published_date")
528
+ self.author = kwargs.get("author")
522
529
 
523
530
  def __str__(self):
524
531
  return (
@@ -529,6 +536,7 @@ class AnswerResult:
529
536
  f"Author: {self.author}\n"
530
537
  )
531
538
 
539
+
532
540
  @dataclass
533
541
  class AnswerResponse:
534
542
  """A class representing the response for an answer operation.
@@ -546,6 +554,7 @@ class AnswerResponse:
546
554
  output += "\n\n".join(str(source) for source in self.sources)
547
555
  return output
548
556
 
557
+
549
558
  T = TypeVar("T")
550
559
 
551
560
 
@@ -599,7 +608,7 @@ class Exa:
599
608
  self,
600
609
  api_key: Optional[str],
601
610
  base_url: str = "https://api.exa.ai",
602
- user_agent: str = "exa-py 1.8.3",
611
+ user_agent: str = "exa-py 1.8.4",
603
612
  ):
604
613
  """Initialize the Exa client with the provided API key and optional base URL and user agent.
605
614
 
@@ -619,12 +628,25 @@ class Exa:
619
628
  self.headers = {"x-api-key": api_key, "User-Agent": user_agent}
620
629
 
621
630
  def request(self, endpoint: str, data):
631
+ """Send a POST request to the Exa API, optionally streaming if data['stream'] is True.
632
+
633
+ Args:
634
+ endpoint (str): The API endpoint (path).
635
+ data (dict): The JSON payload to send.
636
+
637
+ Returns:
638
+ Union[dict, Iterator[str]]: If streaming, returns an iterator of strings (line-by-line).
639
+ Otherwise, returns the JSON-decoded response as a dict.
640
+
641
+ Raises:
642
+ ValueError: If the request fails (non-200 status code).
643
+ """
622
644
  if data.get("stream"):
623
645
  res = requests.post(self.base_url + endpoint, json=data, headers=self.headers, stream=True)
624
646
  if res.status_code != 200:
625
647
  raise ValueError(f"Request failed with status code {res.status_code}: {res.text}")
626
- return (line.decode('utf-8') for line in res.iter_lines() if line)
627
-
648
+ return (line.decode("utf-8") for line in res.iter_lines() if line)
649
+
628
650
  res = requests.post(self.base_url + endpoint, json=data, headers=self.headers)
629
651
  if res.status_code != 200:
630
652
  raise ValueError(f"Request failed with status code {res.status_code}: {res.text}")
@@ -647,6 +669,7 @@ class Exa:
647
669
  type: Optional[str] = None,
648
670
  category: Optional[str] = None,
649
671
  flags: Optional[List[str]] = None,
672
+ moderation: Optional[bool] = None,
650
673
  ) -> SearchResponse[_Result]:
651
674
  """Perform a search with a prompt-engineered query to retrieve relevant results.
652
675
 
@@ -665,6 +688,7 @@ class Exa:
665
688
  type (str, optional): 'keyword' or 'neural' (default 'neural').
666
689
  category (str, optional): e.g. 'company'
667
690
  flags (List[str], optional): Experimental flags for Exa usage.
691
+ moderation (bool, optional): If True, the search results will be moderated for safety.
668
692
 
669
693
  Returns:
670
694
  SearchResponse: The response containing search results, etc.
@@ -697,12 +721,13 @@ class Exa:
697
721
  use_autoprompt: Optional[bool] = None,
698
722
  type: Optional[str] = None,
699
723
  category: Optional[str] = None,
724
+ flags: Optional[List[str]] = None,
725
+ moderation: Optional[bool] = None,
700
726
  livecrawl_timeout: Optional[int] = None,
701
727
  livecrawl: Optional[LIVECRAWL_OPTIONS] = None,
702
728
  filter_empty_results: Optional[bool] = None,
703
729
  subpages: Optional[int] = None,
704
730
  extras: Optional[ExtrasOptions] = None,
705
- flags: Optional[List[str]] = None,
706
731
  ) -> SearchResponse[ResultWithText]:
707
732
  ...
708
733
 
@@ -724,12 +749,13 @@ class Exa:
724
749
  use_autoprompt: Optional[bool] = None,
725
750
  type: Optional[str] = None,
726
751
  category: Optional[str] = None,
752
+ flags: Optional[List[str]] = None,
753
+ moderation: Optional[bool] = None,
727
754
  subpages: Optional[int] = None,
728
755
  livecrawl_timeout: Optional[int] = None,
729
756
  livecrawl: Optional[LIVECRAWL_OPTIONS] = None,
730
757
  filter_empty_results: Optional[bool] = None,
731
758
  extras: Optional[ExtrasOptions] = None,
732
- flags: Optional[List[str]] = None,
733
759
  ) -> SearchResponse[ResultWithText]:
734
760
  ...
735
761
 
@@ -753,11 +779,12 @@ class Exa:
753
779
  category: Optional[str] = None,
754
780
  subpages: Optional[int] = None,
755
781
  subpage_target: Optional[Union[str, List[str]]] = None,
782
+ flags: Optional[List[str]] = None,
783
+ moderation: Optional[bool] = None,
756
784
  livecrawl_timeout: Optional[int] = None,
757
785
  livecrawl: Optional[LIVECRAWL_OPTIONS] = None,
758
786
  filter_empty_results: Optional[bool] = None,
759
787
  extras: Optional[ExtrasOptions] = None,
760
- flags: Optional[List[str]] = None,
761
788
  ) -> SearchResponse[ResultWithHighlights]:
762
789
  ...
763
790
 
@@ -780,13 +807,14 @@ class Exa:
780
807
  use_autoprompt: Optional[bool] = None,
781
808
  type: Optional[str] = None,
782
809
  category: Optional[str] = None,
783
- livecrawl_timeout: Optional[int] = None,
784
- livecrawl: Optional[LIVECRAWL_OPTIONS] = None,
785
810
  subpages: Optional[int] = None,
786
811
  subpage_target: Optional[Union[str, List[str]]] = None,
812
+ flags: Optional[List[str]] = None,
813
+ moderation: Optional[bool] = None,
814
+ livecrawl_timeout: Optional[int] = None,
815
+ livecrawl: Optional[LIVECRAWL_OPTIONS] = None,
787
816
  filter_empty_results: Optional[bool] = None,
788
817
  extras: Optional[ExtrasOptions] = None,
789
- flags: Optional[List[str]] = None,
790
818
  ) -> SearchResponse[ResultWithTextAndHighlights]:
791
819
  ...
792
820
 
@@ -810,11 +838,12 @@ class Exa:
810
838
  category: Optional[str] = None,
811
839
  subpages: Optional[int] = None,
812
840
  subpage_target: Optional[Union[str, List[str]]] = None,
841
+ flags: Optional[List[str]] = None,
842
+ moderation: Optional[bool] = None,
813
843
  livecrawl_timeout: Optional[int] = None,
814
844
  livecrawl: Optional[LIVECRAWL_OPTIONS] = None,
815
845
  filter_empty_results: Optional[bool] = None,
816
846
  extras: Optional[ExtrasOptions] = None,
817
- flags: Optional[List[str]] = None,
818
847
  ) -> SearchResponse[ResultWithSummary]:
819
848
  ...
820
849
 
@@ -839,11 +868,12 @@ class Exa:
839
868
  category: Optional[str] = None,
840
869
  subpages: Optional[int] = None,
841
870
  subpage_target: Optional[Union[str, List[str]]] = None,
871
+ flags: Optional[List[str]] = None,
872
+ moderation: Optional[bool] = None,
842
873
  livecrawl_timeout: Optional[int] = None,
843
874
  livecrawl: Optional[LIVECRAWL_OPTIONS] = None,
844
875
  filter_empty_results: Optional[bool] = None,
845
876
  extras: Optional[ExtrasOptions] = None,
846
- flags: Optional[List[str]] = None,
847
877
  ) -> SearchResponse[ResultWithTextAndSummary]:
848
878
  ...
849
879
 
@@ -868,11 +898,12 @@ class Exa:
868
898
  category: Optional[str] = None,
869
899
  subpages: Optional[int] = None,
870
900
  subpage_target: Optional[Union[str, List[str]]] = None,
901
+ flags: Optional[List[str]] = None,
902
+ moderation: Optional[bool] = None,
871
903
  livecrawl_timeout: Optional[int] = None,
872
904
  livecrawl: Optional[LIVECRAWL_OPTIONS] = None,
873
905
  filter_empty_results: Optional[bool] = None,
874
906
  extras: Optional[ExtrasOptions] = None,
875
- flags: Optional[List[str]] = None,
876
907
  ) -> SearchResponse[ResultWithHighlightsAndSummary]:
877
908
  ...
878
909
 
@@ -896,13 +927,14 @@ class Exa:
896
927
  use_autoprompt: Optional[bool] = None,
897
928
  type: Optional[str] = None,
898
929
  category: Optional[str] = None,
930
+ flags: Optional[List[str]] = None,
931
+ moderation: Optional[bool] = None,
899
932
  livecrawl_timeout: Optional[int] = None,
900
933
  livecrawl: Optional[LIVECRAWL_OPTIONS] = None,
901
934
  subpages: Optional[int] = None,
902
935
  subpage_target: Optional[Union[str, List[str]]] = None,
903
936
  filter_empty_results: Optional[bool] = None,
904
937
  extras: Optional[ExtrasOptions] = None,
905
- flags: Optional[List[str]] = None,
906
938
  ) -> SearchResponse[ResultWithTextAndHighlightsAndSummary]:
907
939
  ...
908
940
 
@@ -919,7 +951,11 @@ class Exa:
919
951
 
920
952
  validate_search_options(
921
953
  options,
922
- {**SEARCH_OPTIONS_TYPES, **CONTENTS_OPTIONS_TYPES, **CONTENTS_ENDPOINT_OPTIONS_TYPES},
954
+ {
955
+ **SEARCH_OPTIONS_TYPES,
956
+ **CONTENTS_OPTIONS_TYPES,
957
+ **CONTENTS_ENDPOINT_OPTIONS_TYPES,
958
+ },
923
959
  )
924
960
 
925
961
  # Nest the appropriate fields under "contents"
@@ -1076,13 +1112,19 @@ class Exa:
1076
1112
  flags: Optional[List[str]] = None,
1077
1113
  ) -> SearchResponse[ResultWithTextAndHighlightsAndSummary]:
1078
1114
  ...
1115
+
1079
1116
  def get_contents(self, urls: Union[str, List[str], List[_Result]], **kwargs):
1080
1117
  options = {
1081
1118
  k: v
1082
1119
  for k, v in {"urls": urls, **kwargs}.items()
1083
1120
  if k != "self" and v is not None
1084
1121
  }
1085
- if "text" not in options and "highlights" not in options and "summary" not in options and "extras" not in options:
1122
+ if (
1123
+ "text" not in options
1124
+ and "highlights" not in options
1125
+ and "summary" not in options
1126
+ and "extras" not in options
1127
+ ):
1086
1128
  options["text"] = True
1087
1129
 
1088
1130
  validate_search_options(
@@ -1093,9 +1135,9 @@ class Exa:
1093
1135
  data = self.request("/contents", options)
1094
1136
  return SearchResponse(
1095
1137
  [Result(**to_snake_case(result)) for result in data["results"]],
1096
- data["autopromptString"] if "autopromptString" in data else None,
1097
- data["resolvedSearchType"] if "resolvedSearchType" in data else None,
1098
- data["autoDate"] if "autoDate" in data else None,
1138
+ data.get("autopromptString"),
1139
+ data.get("resolvedSearchType"),
1140
+ data.get("autoDate"),
1099
1141
  )
1100
1142
 
1101
1143
  def find_similar(
@@ -1115,15 +1157,35 @@ class Exa:
1115
1157
  category: Optional[str] = None,
1116
1158
  flags: Optional[List[str]] = None,
1117
1159
  ) -> SearchResponse[_Result]:
1160
+ """Finds similar pages to a given URL, potentially with domain filters and date filters.
1161
+
1162
+ Args:
1163
+ url (str): The URL to find similar pages for.
1164
+ num_results (int, optional): Number of results to return. Default is None (server default).
1165
+ include_domains (List[str], optional): Domains to include in the search.
1166
+ exclude_domains (List[str], optional): Domains to exclude from the search.
1167
+ start_crawl_date (str, optional): Only links crawled after this date.
1168
+ end_crawl_date (str, optional): Only links crawled before this date.
1169
+ start_published_date (str, optional): Only links published after this date.
1170
+ end_published_date (str, optional): Only links published before this date.
1171
+ include_text (List[str], optional): Strings that must appear in the page text.
1172
+ exclude_text (List[str], optional): Strings that must not appear in the page text.
1173
+ exclude_source_domain (bool, optional): Whether to exclude the source domain.
1174
+ category (str, optional): A data category to focus on.
1175
+ flags (List[str], optional): Experimental flags.
1176
+
1177
+ Returns:
1178
+ SearchResponse[_Result]
1179
+ """
1118
1180
  options = {k: v for k, v in locals().items() if k != "self" and v is not None}
1119
1181
  validate_search_options(options, FIND_SIMILAR_OPTIONS_TYPES)
1120
1182
  options = to_camel_case(options)
1121
1183
  data = self.request("/findSimilar", options)
1122
1184
  return SearchResponse(
1123
1185
  [Result(**to_snake_case(result)) for result in data["results"]],
1124
- data["autopromptString"] if "autopromptString" in data else None,
1125
- data["resolvedSearchType"] if "resolvedSearchType" in data else None,
1126
- data["autoDate"] if "autoDate" in data else None,
1186
+ data.get("autopromptString"),
1187
+ data.get("resolvedSearchType"),
1188
+ data.get("autoDate"),
1127
1189
  )
1128
1190
 
1129
1191
  @overload
@@ -1142,13 +1204,13 @@ class Exa:
1142
1204
  exclude_text: Optional[List[str]] = None,
1143
1205
  exclude_source_domain: Optional[bool] = None,
1144
1206
  category: Optional[str] = None,
1207
+ flags: Optional[List[str]] = None,
1145
1208
  livecrawl_timeout: Optional[int] = None,
1146
1209
  livecrawl: Optional[LIVECRAWL_OPTIONS] = None,
1210
+ filter_empty_results: Optional[bool] = None,
1147
1211
  subpages: Optional[int] = None,
1148
1212
  subpage_target: Optional[Union[str, List[str]]] = None,
1149
- filter_empty_results: Optional[bool] = None,
1150
1213
  extras: Optional[ExtrasOptions] = None,
1151
- flags: Optional[List[str]] = None,
1152
1214
  ) -> SearchResponse[ResultWithText]:
1153
1215
  ...
1154
1216
 
@@ -1169,13 +1231,13 @@ class Exa:
1169
1231
  exclude_text: Optional[List[str]] = None,
1170
1232
  exclude_source_domain: Optional[bool] = None,
1171
1233
  category: Optional[str] = None,
1234
+ flags: Optional[List[str]] = None,
1172
1235
  livecrawl_timeout: Optional[int] = None,
1173
1236
  livecrawl: Optional[LIVECRAWL_OPTIONS] = None,
1237
+ filter_empty_results: Optional[bool] = None,
1174
1238
  subpages: Optional[int] = None,
1175
1239
  subpage_target: Optional[Union[str, List[str]]] = None,
1176
- filter_empty_results: Optional[bool] = None,
1177
1240
  extras: Optional[ExtrasOptions] = None,
1178
- flags: Optional[List[str]] = None,
1179
1241
  ) -> SearchResponse[ResultWithText]:
1180
1242
  ...
1181
1243
 
@@ -1196,13 +1258,13 @@ class Exa:
1196
1258
  exclude_text: Optional[List[str]] = None,
1197
1259
  exclude_source_domain: Optional[bool] = None,
1198
1260
  category: Optional[str] = None,
1261
+ flags: Optional[List[str]] = None,
1199
1262
  subpages: Optional[int] = None,
1200
1263
  subpage_target: Optional[Union[str, List[str]]] = None,
1201
1264
  livecrawl_timeout: Optional[int] = None,
1202
1265
  livecrawl: Optional[LIVECRAWL_OPTIONS] = None,
1203
1266
  filter_empty_results: Optional[bool] = None,
1204
1267
  extras: Optional[ExtrasOptions] = None,
1205
- flags: Optional[List[str]] = None,
1206
1268
  ) -> SearchResponse[ResultWithHighlights]:
1207
1269
  ...
1208
1270
 
@@ -1224,13 +1286,13 @@ class Exa:
1224
1286
  exclude_text: Optional[List[str]] = None,
1225
1287
  exclude_source_domain: Optional[bool] = None,
1226
1288
  category: Optional[str] = None,
1289
+ flags: Optional[List[str]] = None,
1227
1290
  livecrawl_timeout: Optional[int] = None,
1228
1291
  livecrawl: Optional[LIVECRAWL_OPTIONS] = None,
1292
+ filter_empty_results: Optional[bool] = None,
1229
1293
  subpages: Optional[int] = None,
1230
1294
  subpage_target: Optional[Union[str, List[str]]] = None,
1231
- filter_empty_results: Optional[bool] = None,
1232
1295
  extras: Optional[ExtrasOptions] = None,
1233
- flags: Optional[List[str]] = None,
1234
1296
  ) -> SearchResponse[ResultWithTextAndHighlights]:
1235
1297
  ...
1236
1298
 
@@ -1251,13 +1313,13 @@ class Exa:
1251
1313
  exclude_text: Optional[List[str]] = None,
1252
1314
  exclude_source_domain: Optional[bool] = None,
1253
1315
  category: Optional[str] = None,
1316
+ flags: Optional[List[str]] = None,
1254
1317
  livecrawl_timeout: Optional[int] = None,
1255
1318
  livecrawl: Optional[LIVECRAWL_OPTIONS] = None,
1319
+ filter_empty_results: Optional[bool] = None,
1256
1320
  subpages: Optional[int] = None,
1257
1321
  subpage_target: Optional[Union[str, List[str]]] = None,
1258
- filter_empty_results: Optional[bool] = None,
1259
1322
  extras: Optional[ExtrasOptions] = None,
1260
- flags: Optional[List[str]] = None,
1261
1323
  ) -> SearchResponse[ResultWithSummary]:
1262
1324
  ...
1263
1325
 
@@ -1279,13 +1341,13 @@ class Exa:
1279
1341
  exclude_text: Optional[List[str]] = None,
1280
1342
  exclude_source_domain: Optional[bool] = None,
1281
1343
  category: Optional[str] = None,
1344
+ flags: Optional[List[str]] = None,
1282
1345
  livecrawl_timeout: Optional[int] = None,
1283
1346
  livecrawl: Optional[LIVECRAWL_OPTIONS] = None,
1347
+ filter_empty_results: Optional[bool] = None,
1284
1348
  subpages: Optional[int] = None,
1285
1349
  subpage_target: Optional[Union[str, List[str]]] = None,
1286
- filter_empty_results: Optional[bool] = None,
1287
1350
  extras: Optional[ExtrasOptions] = None,
1288
- flags: Optional[List[str]] = None,
1289
1351
  ) -> SearchResponse[ResultWithTextAndSummary]:
1290
1352
  ...
1291
1353
 
@@ -1307,13 +1369,13 @@ class Exa:
1307
1369
  exclude_text: Optional[List[str]] = None,
1308
1370
  exclude_source_domain: Optional[bool] = None,
1309
1371
  category: Optional[str] = None,
1310
- livecrawl_timeout: Optional[int] = None,
1372
+ flags: Optional[List[str]] = None,
1311
1373
  subpages: Optional[int] = None,
1312
1374
  subpage_target: Optional[Union[str, List[str]]] = None,
1375
+ livecrawl_timeout: Optional[int] = None,
1313
1376
  livecrawl: Optional[LIVECRAWL_OPTIONS] = None,
1314
1377
  filter_empty_results: Optional[bool] = None,
1315
1378
  extras: Optional[ExtrasOptions] = None,
1316
- flags: Optional[List[str]] = None,
1317
1379
  ) -> SearchResponse[ResultWithHighlightsAndSummary]:
1318
1380
  ...
1319
1381
 
@@ -1336,20 +1398,24 @@ class Exa:
1336
1398
  exclude_text: Optional[List[str]] = None,
1337
1399
  exclude_source_domain: Optional[bool] = None,
1338
1400
  category: Optional[str] = None,
1401
+ flags: Optional[List[str]] = None,
1339
1402
  livecrawl_timeout: Optional[int] = None,
1403
+ livecrawl: Optional[LIVECRAWL_OPTIONS] = None,
1404
+ filter_empty_results: Optional[bool] = None,
1340
1405
  subpages: Optional[int] = None,
1341
1406
  subpage_target: Optional[Union[str, List[str]]] = None,
1342
- filter_empty_results: Optional[bool] = None,
1343
- livecrawl: Optional[LIVECRAWL_OPTIONS] = None,
1344
1407
  extras: Optional[ExtrasOptions] = None,
1345
- flags: Optional[List[str]] = None,
1346
1408
  ) -> SearchResponse[ResultWithTextAndHighlightsAndSummary]:
1347
1409
  ...
1348
1410
 
1349
1411
  def find_similar_and_contents(self, url: str, **kwargs):
1350
1412
  options = {k: v for k, v in {"url": url, **kwargs}.items() if v is not None}
1351
1413
  # Default to text if none specified
1352
- if "text" not in options and "highlights" not in options and "summary" not in options:
1414
+ if (
1415
+ "text" not in options
1416
+ and "highlights" not in options
1417
+ and "summary" not in options
1418
+ ):
1353
1419
  options["text"] = True
1354
1420
 
1355
1421
  validate_search_options(
@@ -1379,9 +1445,9 @@ class Exa:
1379
1445
  data = self.request("/findSimilar", options)
1380
1446
  return SearchResponse(
1381
1447
  [Result(**to_snake_case(result)) for result in data["results"]],
1382
- data["autopromptString"] if "autopromptString" in data else None,
1383
- data["resolvedSearchType"] if "resolvedSearchType" in data else None,
1384
- data["autoDate"] if "autoDate" in data else None,
1448
+ data.get("autopromptString"),
1449
+ data.get("resolvedSearchType"),
1450
+ data.get("autoDate"),
1385
1451
  )
1386
1452
 
1387
1453
  def wrap(self, client: OpenAI):
@@ -1455,7 +1521,7 @@ class Exa:
1455
1521
  exa_kwargs=exa_kwargs,
1456
1522
  )
1457
1523
 
1458
- print("Wrapping OpenAI client with Exa functionality.", type(create_with_rag))
1524
+ print("Wrapping OpenAI client with Exa functionality.")
1459
1525
  client.chat.completions.create = create_with_rag # type: ignore
1460
1526
 
1461
1527
  return client
@@ -1499,6 +1565,7 @@ class Exa:
1499
1565
  completion=completion, exa_result=None
1500
1566
  )
1501
1567
 
1568
+ # We do a search_and_contents automatically
1502
1569
  exa_result = self.search_and_contents(query, **exa_kwargs)
1503
1570
  exa_str = format_exa_result(exa_result, max_len=max_len)
1504
1571
  new_messages = add_message_to_messages(completion, messages, exa_str)
@@ -1537,8 +1604,9 @@ class Exa:
1537
1604
  include_text (bool, optional): Whether to include full text in the results. Defaults to False.
1538
1605
 
1539
1606
  Returns:
1540
- Union[AnswerResponse, Iterator[Union[str, List[AnswerResult]]]]: Either an AnswerResponse object containing the answer and sources,
1541
- or an iterator that yields either answer chunks or sources when streaming is enabled.
1607
+ Union[AnswerResponse, Iterator[Union[str, List[AnswerResult]]]]:
1608
+ - If stream=False, returns an AnswerResponse object containing the answer and sources.
1609
+ - If stream=True, returns an iterator that yields either answer chunks or sources.
1542
1610
  """
1543
1611
  options = {
1544
1612
  k: v
@@ -1547,10 +1615,10 @@ class Exa:
1547
1615
  }
1548
1616
  options = to_camel_case(options)
1549
1617
  response = self.request("/answer", options)
1550
-
1618
+
1551
1619
  if stream:
1552
1620
  return response
1553
-
1621
+
1554
1622
  return AnswerResponse(
1555
1623
  response["answer"],
1556
1624
  [AnswerResult(**to_snake_case(result)) for result in response["sources"]]
@@ -1,19 +1,19 @@
1
- Metadata-Version: 2.3
1
+ Metadata-Version: 2.1
2
2
  Name: exa-py
3
- Version: 1.8.3
3
+ Version: 1.8.4
4
4
  Summary: Python SDK for Exa API.
5
- Author: Exa AI
5
+ Home-page: https://github.com/exa-labs/exa-py
6
+ Author: Exa
6
7
  Author-email: hello@exa.ai
7
- Requires-Python: >=3.9,<4.0
8
- Classifier: Programming Language :: Python :: 3
8
+ Classifier: Development Status :: 5 - Production/Stable
9
+ Classifier: Intended Audience :: Developers
10
+ Classifier: License :: OSI Approved :: MIT License
11
+ Classifier: Typing :: Typed
12
+ Classifier: Programming Language :: Python :: 3.8
9
13
  Classifier: Programming Language :: Python :: 3.9
10
14
  Classifier: Programming Language :: Python :: 3.10
11
15
  Classifier: Programming Language :: Python :: 3.11
12
16
  Classifier: Programming Language :: Python :: 3.12
13
- Classifier: Programming Language :: Python :: 3.13
14
- Requires-Dist: openai (>=1.48,<2.0)
15
- Requires-Dist: requests (>=2.32.3,<3.0.0)
16
- Requires-Dist: typing-extensions (>=4.12.2,<5.0.0)
17
17
  Description-Content-Type: text/markdown
18
18
 
19
19
  # Exa
@@ -102,4 +102,3 @@ exa = Exa(api_key="your-api-key")
102
102
  print(chunk)
103
103
  ```
104
104
 
105
-
@@ -0,0 +1,12 @@
1
+ README.md
2
+ pyproject.toml
3
+ setup.py
4
+ exa_py/__init__.py
5
+ exa_py/api.py
6
+ exa_py/py.typed
7
+ exa_py/utils.py
8
+ exa_py.egg-info/PKG-INFO
9
+ exa_py.egg-info/SOURCES.txt
10
+ exa_py.egg-info/dependency_links.txt
11
+ exa_py.egg-info/requires.txt
12
+ exa_py.egg-info/top_level.txt
@@ -0,0 +1,3 @@
1
+ requests
2
+ typing-extensions
3
+ openai>=1.10.0
@@ -0,0 +1 @@
1
+ exa_py
exa_py-1.8.4/setup.cfg ADDED
@@ -0,0 +1,4 @@
1
+ [egg_info]
2
+ tag_build =
3
+ tag_date = 0
4
+
exa_py-1.8.4/setup.py ADDED
@@ -0,0 +1,30 @@
1
+ from setuptools import setup, find_packages
2
+
3
+ setup(
4
+ name="exa_py",
5
+ version="1.8.4",
6
+ description="Python SDK for Exa API.",
7
+ long_description_content_type="text/markdown",
8
+ long_description=open("README.md").read(),
9
+ author="Exa",
10
+ author_email="hello@exa.ai",
11
+ package_data={"exa_py": ["py.typed"]},
12
+ url="https://github.com/exa-labs/exa-py",
13
+ packages=find_packages(),
14
+ install_requires=[
15
+ "requests",
16
+ "typing-extensions",
17
+ "openai>=1.10.0"
18
+ ],
19
+ classifiers=[
20
+ "Development Status :: 5 - Production/Stable",
21
+ "Intended Audience :: Developers",
22
+ "License :: OSI Approved :: MIT License",
23
+ "Typing :: Typed",
24
+ "Programming Language :: Python :: 3.8",
25
+ "Programming Language :: Python :: 3.9",
26
+ "Programming Language :: Python :: 3.10",
27
+ "Programming Language :: Python :: 3.11",
28
+ "Programming Language :: Python :: 3.12",
29
+ ],
30
+ )
File without changes
File without changes
File without changes
File without changes
File without changes