exa-py 1.8.7__py3-none-any.whl → 1.8.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of exa-py might be problematic. Click here for more details.

exa_py/api.py CHANGED
@@ -1,37 +1,39 @@
1
1
  from __future__ import annotations
2
- from dataclasses import dataclass
2
+
3
3
  import dataclasses
4
- from functools import wraps
4
+ import json
5
+ import os
5
6
  import re
6
- import requests
7
+ from dataclasses import dataclass
8
+ from functools import wraps
7
9
  from typing import (
8
10
  Callable,
11
+ Dict,
12
+ Generic,
9
13
  Iterable,
14
+ Iterator,
10
15
  List,
16
+ Literal,
11
17
  Optional,
12
- Dict,
13
- Generic,
14
18
  TypeVar,
15
- overload,
16
19
  Union,
17
- Literal,
18
- get_origin,
19
20
  get_args,
20
- Iterator,
21
+ get_origin,
22
+ overload,
21
23
  )
22
- from typing_extensions import TypedDict
23
- import json
24
24
 
25
+ import requests
25
26
  from openai import OpenAI
26
27
  from openai.types.chat.chat_completion_message_param import ChatCompletionMessageParam
27
28
  from openai.types.chat_model import ChatModel
29
+ from typing_extensions import TypedDict
30
+
28
31
  from exa_py.utils import (
29
32
  ExaOpenAICompletion,
30
33
  add_message_to_messages,
31
34
  format_exa_result,
32
35
  maybe_get_query,
33
36
  )
34
- import os
35
37
 
36
38
  is_beta = os.getenv("IS_BETA") == "True"
37
39
 
@@ -102,17 +104,29 @@ def to_snake_case(data: dict) -> dict:
102
104
  SEARCH_OPTIONS_TYPES = {
103
105
  "query": [str], # The query string.
104
106
  "num_results": [int], # Number of results (Default: 10, Max for basic: 10).
105
- "include_domains": [list], # Domains to search from; exclusive with 'exclude_domains'.
107
+ "include_domains": [
108
+ list
109
+ ], # Domains to search from; exclusive with 'exclude_domains'.
106
110
  "exclude_domains": [list], # Domains to omit; exclusive with 'include_domains'.
107
111
  "start_crawl_date": [str], # Results after this crawl date. ISO 8601 format.
108
112
  "end_crawl_date": [str], # Results before this crawl date. ISO 8601 format.
109
- "start_published_date": [str], # Results after this publish date; excludes links with no date. ISO 8601 format.
110
- "end_published_date": [str], # Results before this publish date; excludes links with no date. ISO 8601 format.
111
- "include_text": [list], # Must be present in webpage text. (One string, up to 5 words)
112
- "exclude_text": [list], # Must not be present in webpage text. (One string, up to 5 words)
113
+ "start_published_date": [
114
+ str
115
+ ], # Results after this publish date; excludes links with no date. ISO 8601 format.
116
+ "end_published_date": [
117
+ str
118
+ ], # Results before this publish date; excludes links with no date. ISO 8601 format.
119
+ "include_text": [
120
+ list
121
+ ], # Must be present in webpage text. (One string, up to 5 words)
122
+ "exclude_text": [
123
+ list
124
+ ], # Must not be present in webpage text. (One string, up to 5 words)
113
125
  "use_autoprompt": [bool], # Convert query to Exa. (Default: false)
114
126
  "type": [str], # 'keyword', 'neural', or 'auto' (Default: auto)
115
- "category": [str], # A data category to focus on: 'company', 'research paper', 'news', 'pdf', 'github', 'tweet', 'personal site', 'linkedin profile', 'financial report'
127
+ "category": [
128
+ str
129
+ ], # A data category to focus on: 'company', 'research paper', 'news', 'pdf', 'github', 'tweet', 'personal site', 'linkedin profile', 'financial report'
116
130
  "flags": [list], # Experimental flags array for Exa usage.
117
131
  "moderation": [bool], # If true, moderate search results for safety.
118
132
  }
@@ -188,6 +202,25 @@ def is_valid_type(value, expected_type):
188
202
  return False # For any other case
189
203
 
190
204
 
205
+ def parse_cost_dollars(raw: dict) -> Optional[CostDollars]:
206
+ """
207
+ Parse the costDollars JSON into a CostDollars object, or return None if missing/invalid.
208
+ """
209
+ if not raw:
210
+ return None
211
+
212
+ total = raw.get("total")
213
+ if total is None:
214
+ # If there's no total, treat as absent
215
+ return None
216
+
217
+ # search and contents can be dictionaries or None
218
+ search_part = raw.get("search")
219
+ contents_part = raw.get("contents")
220
+
221
+ return CostDollars(total=total, search=search_part, contents=contents_part)
222
+
223
+
191
224
  class TextContentsOptions(TypedDict, total=False):
192
225
  """A class representing the options that you can specify when requesting text
193
226
 
@@ -231,6 +264,30 @@ class ExtrasOptions(TypedDict, total=False):
231
264
  image_links: int
232
265
 
233
266
 
267
+ class CostDollarsSearch(TypedDict, total=False):
268
+ """Represents the cost breakdown for search."""
269
+
270
+ neural: float
271
+ keyword: float
272
+
273
+
274
+ class CostDollarsContents(TypedDict, total=False):
275
+ """Represents the cost breakdown for contents."""
276
+
277
+ text: float
278
+ highlights: float
279
+ summary: float
280
+
281
+
282
+ @dataclass
283
+ class CostDollars:
284
+ """Represents costDollars field in the API response."""
285
+
286
+ total: float
287
+ search: CostDollarsSearch = None
288
+ contents: CostDollarsContents = None
289
+
290
+
234
291
  @dataclass
235
292
  class _Result:
236
293
  """A class representing the base fields of a search result.
@@ -515,7 +572,8 @@ class AnswerResult:
515
572
  author (str, optional): If available, the author of the content.
516
573
  text (str, optional): The full page text from each search result.
517
574
  """
518
- id: str
575
+
576
+ id: str
519
577
  url: str
520
578
  title: Optional[str] = None
521
579
  published_date: Optional[str] = None
@@ -523,12 +581,12 @@ class AnswerResult:
523
581
  text: Optional[str] = None
524
582
 
525
583
  def __init__(self, **kwargs):
526
- self.id = kwargs['id']
527
- self.url = kwargs['url']
528
- self.title = kwargs.get('title')
529
- self.published_date = kwargs.get('published_date')
530
- self.author = kwargs.get('author')
531
- self.text = kwargs.get('text')
584
+ self.id = kwargs["id"]
585
+ self.url = kwargs["url"]
586
+ self.title = kwargs.get("title")
587
+ self.published_date = kwargs.get("published_date")
588
+ self.author = kwargs.get("author")
589
+ self.text = kwargs.get("text")
532
590
 
533
591
  def __str__(self):
534
592
  return (
@@ -539,18 +597,20 @@ class AnswerResult:
539
597
  f"Author: {self.author}\n"
540
598
  f"Text: {self.text}\n\n"
541
599
  )
542
-
600
+
601
+
543
602
  @dataclass
544
603
  class StreamChunk:
545
604
  """A class representing a single chunk of streaming data.
546
-
605
+
547
606
  Attributes:
548
607
  content (Optional[str]): The partial text content of the answer
549
608
  citations (Optional[List[AnswerResult]]): List of citations if provided in this chunk
550
609
  """
610
+
551
611
  content: Optional[str] = None
552
612
  citations: Optional[List[AnswerResult]] = None
553
-
613
+
554
614
  def has_data(self) -> bool:
555
615
  """Check if this chunk contains any data."""
556
616
  return self.content is not None or self.citations is not None
@@ -594,6 +654,7 @@ class AnswerResponse:
594
654
 
595
655
  class StreamAnswerResponse:
596
656
  """A class representing a streaming answer response."""
657
+
597
658
  def __init__(self, raw_response: requests.Response):
598
659
  self._raw_response = raw_response
599
660
  self._ensure_ok_status()
@@ -621,8 +682,14 @@ class StreamAnswerResponse:
621
682
  if "delta" in chunk["choices"][0]:
622
683
  content = chunk["choices"][0]["delta"].get("content")
623
684
 
624
- if "citations" in chunk and chunk["citations"] and chunk["citations"] != "null":
625
- citations = [AnswerResult(**to_snake_case(s)) for s in chunk["citations"]]
685
+ if (
686
+ "citations" in chunk
687
+ and chunk["citations"]
688
+ and chunk["citations"] != "null"
689
+ ):
690
+ citations = [
691
+ AnswerResult(**to_snake_case(s)) for s in chunk["citations"]
692
+ ]
626
693
 
627
694
  stream_chunk = StreamChunk(content=content, citations=citations)
628
695
  if stream_chunk.has_data():
@@ -651,6 +718,7 @@ class SearchResponse(Generic[T]):
651
718
  autoprompt_string: Optional[str]
652
719
  resolved_search_type: Optional[str]
653
720
  auto_date: Optional[str]
721
+ cost_dollars: Optional[CostDollars] = None
654
722
 
655
723
  def __str__(self):
656
724
  output = "\n\n".join(str(result) for result in self.results)
@@ -658,7 +726,12 @@ class SearchResponse(Generic[T]):
658
726
  output += f"\n\nAutoprompt String: {self.autoprompt_string}"
659
727
  if self.resolved_search_type:
660
728
  output += f"\nResolved Search Type: {self.resolved_search_type}"
661
-
729
+ if self.cost_dollars:
730
+ output += f"\nCostDollars: total={self.cost_dollars.total}"
731
+ if self.cost_dollars.search:
732
+ output += f"\n - search: {self.cost_dollars.search}"
733
+ if self.cost_dollars.contents:
734
+ output += f"\n - contents: {self.cost_dollars.contents}"
662
735
  return output
663
736
 
664
737
 
@@ -686,7 +759,7 @@ class Exa:
686
759
  self,
687
760
  api_key: Optional[str],
688
761
  base_url: str = "https://api.exa.ai",
689
- user_agent: str = "exa-py 1.8.7",
762
+ user_agent: str = "exa-py 1.8.9",
690
763
  ):
691
764
  """Initialize the Exa client with the provided API key and optional base URL and user agent.
692
765
 
@@ -720,12 +793,16 @@ class Exa:
720
793
  ValueError: If the request fails (non-200 status code).
721
794
  """
722
795
  if data.get("stream"):
723
- res = requests.post(self.base_url + endpoint, json=data, headers=self.headers, stream=True)
796
+ res = requests.post(
797
+ self.base_url + endpoint, json=data, headers=self.headers, stream=True
798
+ )
724
799
  return res
725
800
 
726
801
  res = requests.post(self.base_url + endpoint, json=data, headers=self.headers)
727
802
  if res.status_code != 200:
728
- raise ValueError(f"Request failed with status code {res.status_code}: {res.text}")
803
+ raise ValueError(
804
+ f"Request failed with status code {res.status_code}: {res.text}"
805
+ )
729
806
  return res.json()
730
807
 
731
808
  def search(
@@ -773,11 +850,13 @@ class Exa:
773
850
  validate_search_options(options, SEARCH_OPTIONS_TYPES)
774
851
  options = to_camel_case(options)
775
852
  data = self.request("/search", options)
853
+ cost_dollars = parse_cost_dollars(data.get("costDollars"))
776
854
  return SearchResponse(
777
855
  [Result(**to_snake_case(result)) for result in data["results"]],
778
856
  data["autopromptString"] if "autopromptString" in data else None,
779
857
  data["resolvedSearchType"] if "resolvedSearchType" in data else None,
780
858
  data["autoDate"] if "autoDate" in data else None,
859
+ cost_dollars=cost_dollars,
781
860
  )
782
861
 
783
862
  @overload
@@ -805,8 +884,7 @@ class Exa:
805
884
  subpages: Optional[int] = None,
806
885
  subpage_target: Optional[Union[str, List[str]]] = None,
807
886
  extras: Optional[ExtrasOptions] = None,
808
- ) -> SearchResponse[ResultWithText]:
809
- ...
887
+ ) -> SearchResponse[ResultWithText]: ...
810
888
 
811
889
  @overload
812
890
  def search_and_contents(
@@ -826,14 +904,15 @@ class Exa:
826
904
  use_autoprompt: Optional[bool] = None,
827
905
  type: Optional[str] = None,
828
906
  category: Optional[str] = None,
907
+ flags: Optional[List[str]] = None,
908
+ moderation: Optional[bool] = None,
829
909
  subpages: Optional[int] = None,
830
910
  livecrawl_timeout: Optional[int] = None,
831
911
  livecrawl: Optional[LIVECRAWL_OPTIONS] = None,
832
912
  filter_empty_results: Optional[bool] = None,
833
913
  subpage_target: Optional[Union[str, List[str]]] = None,
834
914
  extras: Optional[ExtrasOptions] = None,
835
- ) -> SearchResponse[ResultWithText]:
836
- ...
915
+ ) -> SearchResponse[ResultWithText]: ...
837
916
 
838
917
  @overload
839
918
  def search_and_contents(
@@ -861,8 +940,7 @@ class Exa:
861
940
  livecrawl: Optional[LIVECRAWL_OPTIONS] = None,
862
941
  filter_empty_results: Optional[bool] = None,
863
942
  extras: Optional[ExtrasOptions] = None,
864
- ) -> SearchResponse[ResultWithHighlights]:
865
- ...
943
+ ) -> SearchResponse[ResultWithHighlights]: ...
866
944
 
867
945
  @overload
868
946
  def search_and_contents(
@@ -891,8 +969,7 @@ class Exa:
891
969
  livecrawl: Optional[LIVECRAWL_OPTIONS] = None,
892
970
  filter_empty_results: Optional[bool] = None,
893
971
  extras: Optional[ExtrasOptions] = None,
894
- ) -> SearchResponse[ResultWithTextAndHighlights]:
895
- ...
972
+ ) -> SearchResponse[ResultWithTextAndHighlights]: ...
896
973
 
897
974
  @overload
898
975
  def search_and_contents(
@@ -920,8 +997,7 @@ class Exa:
920
997
  livecrawl: Optional[LIVECRAWL_OPTIONS] = None,
921
998
  filter_empty_results: Optional[bool] = None,
922
999
  extras: Optional[ExtrasOptions] = None,
923
- ) -> SearchResponse[ResultWithSummary]:
924
- ...
1000
+ ) -> SearchResponse[ResultWithSummary]: ...
925
1001
 
926
1002
  @overload
927
1003
  def search_and_contents(
@@ -950,8 +1026,7 @@ class Exa:
950
1026
  livecrawl: Optional[LIVECRAWL_OPTIONS] = None,
951
1027
  filter_empty_results: Optional[bool] = None,
952
1028
  extras: Optional[ExtrasOptions] = None,
953
- ) -> SearchResponse[ResultWithTextAndSummary]:
954
- ...
1029
+ ) -> SearchResponse[ResultWithTextAndSummary]: ...
955
1030
 
956
1031
  @overload
957
1032
  def search_and_contents(
@@ -974,12 +1049,13 @@ class Exa:
974
1049
  category: Optional[str] = None,
975
1050
  subpages: Optional[int] = None,
976
1051
  subpage_target: Optional[Union[str, List[str]]] = None,
1052
+ flags: Optional[List[str]] = None,
1053
+ moderation: Optional[bool] = None,
977
1054
  livecrawl_timeout: Optional[int] = None,
978
1055
  livecrawl: Optional[LIVECRAWL_OPTIONS] = None,
979
1056
  filter_empty_results: Optional[bool] = None,
980
1057
  extras: Optional[ExtrasOptions] = None,
981
- ) -> SearchResponse[ResultWithHighlightsAndSummary]:
982
- ...
1058
+ ) -> SearchResponse[ResultWithHighlightsAndSummary]: ...
983
1059
 
984
1060
  @overload
985
1061
  def search_and_contents(
@@ -1002,14 +1078,14 @@ class Exa:
1002
1078
  type: Optional[str] = None,
1003
1079
  category: Optional[str] = None,
1004
1080
  flags: Optional[List[str]] = None,
1081
+ moderation: Optional[bool] = None,
1005
1082
  livecrawl_timeout: Optional[int] = None,
1006
1083
  livecrawl: Optional[LIVECRAWL_OPTIONS] = None,
1007
- filter_empty_results: Optional[bool] = None,
1008
1084
  subpages: Optional[int] = None,
1009
1085
  subpage_target: Optional[Union[str, List[str]]] = None,
1086
+ filter_empty_results: Optional[bool] = None,
1010
1087
  extras: Optional[ExtrasOptions] = None,
1011
- ) -> SearchResponse[ResultWithTextAndHighlightsAndSummary]:
1012
- ...
1088
+ ) -> SearchResponse[ResultWithTextAndHighlightsAndSummary]: ...
1013
1089
 
1014
1090
  def search_and_contents(self, query: str, **kwargs):
1015
1091
  options = {k: v for k, v in {"query": query, **kwargs}.items() if v is not None}
@@ -1048,11 +1124,13 @@ class Exa:
1048
1124
  )
1049
1125
  options = to_camel_case(options)
1050
1126
  data = self.request("/search", options)
1127
+ cost_dollars = parse_cost_dollars(data.get("costDollars"))
1051
1128
  return SearchResponse(
1052
1129
  [Result(**to_snake_case(result)) for result in data["results"]],
1053
1130
  data["autopromptString"] if "autopromptString" in data else None,
1054
1131
  data["resolvedSearchType"] if "resolvedSearchType" in data else None,
1055
1132
  data["autoDate"] if "autoDate" in data else None,
1133
+ cost_dollars=cost_dollars,
1056
1134
  )
1057
1135
 
1058
1136
  @overload
@@ -1066,8 +1144,7 @@ class Exa:
1066
1144
  subpage_target: Optional[Union[str, List[str]]] = None,
1067
1145
  extras: Optional[ExtrasOptions] = None,
1068
1146
  flags: Optional[List[str]] = None,
1069
- ) -> SearchResponse[ResultWithText]:
1070
- ...
1147
+ ) -> SearchResponse[ResultWithText]: ...
1071
1148
 
1072
1149
  @overload
1073
1150
  def get_contents(
@@ -1082,8 +1159,7 @@ class Exa:
1082
1159
  subpage_target: Optional[Union[str, List[str]]] = None,
1083
1160
  extras: Optional[ExtrasOptions] = None,
1084
1161
  flags: Optional[List[str]] = None,
1085
- ) -> SearchResponse[ResultWithText]:
1086
- ...
1162
+ ) -> SearchResponse[ResultWithText]: ...
1087
1163
 
1088
1164
  @overload
1089
1165
  def get_contents(
@@ -1098,8 +1174,7 @@ class Exa:
1098
1174
  subpage_target: Optional[Union[str, List[str]]] = None,
1099
1175
  extras: Optional[ExtrasOptions] = None,
1100
1176
  flags: Optional[List[str]] = None,
1101
- ) -> SearchResponse[ResultWithHighlights]:
1102
- ...
1177
+ ) -> SearchResponse[ResultWithHighlights]: ...
1103
1178
 
1104
1179
  @overload
1105
1180
  def get_contents(
@@ -1115,8 +1190,7 @@ class Exa:
1115
1190
  subpage_target: Optional[Union[str, List[str]]] = None,
1116
1191
  extras: Optional[ExtrasOptions] = None,
1117
1192
  flags: Optional[List[str]] = None,
1118
- ) -> SearchResponse[ResultWithTextAndHighlights]:
1119
- ...
1193
+ ) -> SearchResponse[ResultWithTextAndHighlights]: ...
1120
1194
 
1121
1195
  @overload
1122
1196
  def get_contents(
@@ -1131,8 +1205,7 @@ class Exa:
1131
1205
  subpage_target: Optional[Union[str, List[str]]] = None,
1132
1206
  extras: Optional[ExtrasOptions] = None,
1133
1207
  flags: Optional[List[str]] = None,
1134
- ) -> SearchResponse[ResultWithSummary]:
1135
- ...
1208
+ ) -> SearchResponse[ResultWithSummary]: ...
1136
1209
 
1137
1210
  @overload
1138
1211
  def get_contents(
@@ -1148,8 +1221,7 @@ class Exa:
1148
1221
  subpage_target: Optional[Union[str, List[str]]] = None,
1149
1222
  extras: Optional[ExtrasOptions] = None,
1150
1223
  flags: Optional[List[str]] = None,
1151
- ) -> SearchResponse[ResultWithTextAndSummary]:
1152
- ...
1224
+ ) -> SearchResponse[ResultWithTextAndSummary]: ...
1153
1225
 
1154
1226
  @overload
1155
1227
  def get_contents(
@@ -1165,8 +1237,7 @@ class Exa:
1165
1237
  subpage_target: Optional[Union[str, List[str]]] = None,
1166
1238
  extras: Optional[ExtrasOptions] = None,
1167
1239
  flags: Optional[List[str]] = None,
1168
- ) -> SearchResponse[ResultWithHighlightsAndSummary]:
1169
- ...
1240
+ ) -> SearchResponse[ResultWithHighlightsAndSummary]: ...
1170
1241
 
1171
1242
  @overload
1172
1243
  def get_contents(
@@ -1183,8 +1254,7 @@ class Exa:
1183
1254
  subpage_target: Optional[Union[str, List[str]]] = None,
1184
1255
  extras: Optional[ExtrasOptions] = None,
1185
1256
  flags: Optional[List[str]] = None,
1186
- ) -> SearchResponse[ResultWithTextAndHighlightsAndSummary]:
1187
- ...
1257
+ ) -> SearchResponse[ResultWithTextAndHighlightsAndSummary]: ...
1188
1258
 
1189
1259
  def get_contents(self, urls: Union[str, List[str], List[_Result]], **kwargs):
1190
1260
  options = {
@@ -1206,11 +1276,13 @@ class Exa:
1206
1276
  )
1207
1277
  options = to_camel_case(options)
1208
1278
  data = self.request("/contents", options)
1279
+ cost_dollars = parse_cost_dollars(data.get("costDollars"))
1209
1280
  return SearchResponse(
1210
1281
  [Result(**to_snake_case(result)) for result in data["results"]],
1211
1282
  data.get("autopromptString"),
1212
1283
  data.get("resolvedSearchType"),
1213
1284
  data.get("autoDate"),
1285
+ cost_dollars=cost_dollars,
1214
1286
  )
1215
1287
 
1216
1288
  def find_similar(
@@ -1254,11 +1326,13 @@ class Exa:
1254
1326
  validate_search_options(options, FIND_SIMILAR_OPTIONS_TYPES)
1255
1327
  options = to_camel_case(options)
1256
1328
  data = self.request("/findSimilar", options)
1329
+ cost_dollars = parse_cost_dollars(data.get("costDollars"))
1257
1330
  return SearchResponse(
1258
1331
  [Result(**to_snake_case(result)) for result in data["results"]],
1259
1332
  data.get("autopromptString"),
1260
1333
  data.get("resolvedSearchType"),
1261
1334
  data.get("autoDate"),
1335
+ cost_dollars=cost_dollars,
1262
1336
  )
1263
1337
 
1264
1338
  @overload
@@ -1284,8 +1358,7 @@ class Exa:
1284
1358
  subpages: Optional[int] = None,
1285
1359
  subpage_target: Optional[Union[str, List[str]]] = None,
1286
1360
  extras: Optional[ExtrasOptions] = None,
1287
- ) -> SearchResponse[ResultWithText]:
1288
- ...
1361
+ ) -> SearchResponse[ResultWithText]: ...
1289
1362
 
1290
1363
  @overload
1291
1364
  def find_similar_and_contents(
@@ -1311,8 +1384,7 @@ class Exa:
1311
1384
  subpages: Optional[int] = None,
1312
1385
  subpage_target: Optional[Union[str, List[str]]] = None,
1313
1386
  extras: Optional[ExtrasOptions] = None,
1314
- ) -> SearchResponse[ResultWithText]:
1315
- ...
1387
+ ) -> SearchResponse[ResultWithText]: ...
1316
1388
 
1317
1389
  @overload
1318
1390
  def find_similar_and_contents(
@@ -1338,8 +1410,7 @@ class Exa:
1338
1410
  livecrawl: Optional[LIVECRAWL_OPTIONS] = None,
1339
1411
  filter_empty_results: Optional[bool] = None,
1340
1412
  extras: Optional[ExtrasOptions] = None,
1341
- ) -> SearchResponse[ResultWithHighlights]:
1342
- ...
1413
+ ) -> SearchResponse[ResultWithHighlights]: ...
1343
1414
 
1344
1415
  @overload
1345
1416
  def find_similar_and_contents(
@@ -1366,8 +1437,7 @@ class Exa:
1366
1437
  subpages: Optional[int] = None,
1367
1438
  subpage_target: Optional[Union[str, List[str]]] = None,
1368
1439
  extras: Optional[ExtrasOptions] = None,
1369
- ) -> SearchResponse[ResultWithTextAndHighlights]:
1370
- ...
1440
+ ) -> SearchResponse[ResultWithTextAndHighlights]: ...
1371
1441
 
1372
1442
  @overload
1373
1443
  def find_similar_and_contents(
@@ -1393,8 +1463,7 @@ class Exa:
1393
1463
  subpages: Optional[int] = None,
1394
1464
  subpage_target: Optional[Union[str, List[str]]] = None,
1395
1465
  extras: Optional[ExtrasOptions] = None,
1396
- ) -> SearchResponse[ResultWithSummary]:
1397
- ...
1466
+ ) -> SearchResponse[ResultWithSummary]: ...
1398
1467
 
1399
1468
  @overload
1400
1469
  def find_similar_and_contents(
@@ -1421,8 +1490,7 @@ class Exa:
1421
1490
  subpages: Optional[int] = None,
1422
1491
  subpage_target: Optional[Union[str, List[str]]] = None,
1423
1492
  extras: Optional[ExtrasOptions] = None,
1424
- ) -> SearchResponse[ResultWithTextAndSummary]:
1425
- ...
1493
+ ) -> SearchResponse[ResultWithTextAndSummary]: ...
1426
1494
 
1427
1495
  @overload
1428
1496
  def find_similar_and_contents(
@@ -1449,8 +1517,7 @@ class Exa:
1449
1517
  livecrawl: Optional[LIVECRAWL_OPTIONS] = None,
1450
1518
  filter_empty_results: Optional[bool] = None,
1451
1519
  extras: Optional[ExtrasOptions] = None,
1452
- ) -> SearchResponse[ResultWithHighlightsAndSummary]:
1453
- ...
1520
+ ) -> SearchResponse[ResultWithHighlightsAndSummary]: ...
1454
1521
 
1455
1522
  @overload
1456
1523
  def find_similar_and_contents(
@@ -1478,8 +1545,7 @@ class Exa:
1478
1545
  subpages: Optional[int] = None,
1479
1546
  subpage_target: Optional[Union[str, List[str]]] = None,
1480
1547
  extras: Optional[ExtrasOptions] = None,
1481
- ) -> SearchResponse[ResultWithTextAndHighlightsAndSummary]:
1482
- ...
1548
+ ) -> SearchResponse[ResultWithTextAndHighlightsAndSummary]: ...
1483
1549
 
1484
1550
  def find_similar_and_contents(self, url: str, **kwargs):
1485
1551
  options = {k: v for k, v in {"url": url, **kwargs}.items() if v is not None}
@@ -1516,18 +1582,20 @@ class Exa:
1516
1582
  )
1517
1583
  options = to_camel_case(options)
1518
1584
  data = self.request("/findSimilar", options)
1585
+ cost_dollars = parse_cost_dollars(data.get("costDollars"))
1519
1586
  return SearchResponse(
1520
1587
  [Result(**to_snake_case(result)) for result in data["results"]],
1521
1588
  data.get("autopromptString"),
1522
1589
  data.get("resolvedSearchType"),
1523
1590
  data.get("autoDate"),
1591
+ cost_dollars=cost_dollars,
1524
1592
  )
1525
1593
 
1526
1594
  def wrap(self, client: OpenAI):
1527
1595
  """Wrap an OpenAI client with Exa functionality.
1528
1596
 
1529
- After wrapping, any call to `client.chat.completions.create` will be intercepted
1530
- and enhanced with Exa RAG functionality. To disable Exa for a specific call,
1597
+ After wrapping, any call to `client.chat.completions.create` will be intercepted
1598
+ and enhanced with Exa RAG functionality. To disable Exa for a specific call,
1531
1599
  set `use_exa="none"` in the `create` method.
1532
1600
 
1533
1601
  Args:
@@ -1656,8 +1724,8 @@ class Exa:
1656
1724
  *,
1657
1725
  stream: Optional[bool] = False,
1658
1726
  text: Optional[bool] = False,
1659
- ) -> Union[AnswerResponse, StreamAnswerResponse]:
1660
- ...
1727
+ model: Optional[Literal["exa", "exa-pro"]] = None,
1728
+ ) -> Union[AnswerResponse, StreamAnswerResponse]: ...
1661
1729
 
1662
1730
  def answer(
1663
1731
  self,
@@ -1665,12 +1733,14 @@ class Exa:
1665
1733
  *,
1666
1734
  stream: Optional[bool] = False,
1667
1735
  text: Optional[bool] = False,
1736
+ model: Optional[Literal["exa", "exa-pro"]] = None,
1668
1737
  ) -> Union[AnswerResponse, StreamAnswerResponse]:
1669
1738
  """Generate an answer to a query using Exa's search and LLM capabilities.
1670
1739
 
1671
1740
  Args:
1672
1741
  query (str): The query to answer.
1673
1742
  text (bool, optional): Whether to include full text in the results. Defaults to False.
1743
+ model (str, optional): The model to use for answering. Either "exa" or "exa-pro". Defaults to None.
1674
1744
 
1675
1745
  Returns:
1676
1746
  AnswerResponse: An object containing the answer and citations.
@@ -1684,17 +1754,13 @@ class Exa:
1684
1754
  "Please use `stream_answer(...)` for streaming."
1685
1755
  )
1686
1756
 
1687
- options = {
1688
- k: v
1689
- for k, v in locals().items()
1690
- if k != "self" and v is not None
1691
- }
1757
+ options = {k: v for k, v in locals().items() if k != "self" and v is not None}
1692
1758
  options = to_camel_case(options)
1693
1759
  response = self.request("/answer", options)
1694
1760
 
1695
1761
  return AnswerResponse(
1696
1762
  response["answer"],
1697
- [AnswerResult(**to_snake_case(result)) for result in response["citations"]]
1763
+ [AnswerResult(**to_snake_case(result)) for result in response["citations"]],
1698
1764
  )
1699
1765
 
1700
1766
  def stream_answer(
@@ -1702,24 +1768,21 @@ class Exa:
1702
1768
  query: str,
1703
1769
  *,
1704
1770
  text: bool = False,
1771
+ model: Optional[Literal["exa", "exa-pro"]] = None,
1705
1772
  ) -> StreamAnswerResponse:
1706
1773
  """Generate a streaming answer response.
1707
1774
 
1708
1775
  Args:
1709
1776
  query (str): The query to answer.
1710
1777
  text (bool): Whether to include full text in the results. Defaults to False.
1778
+ model (str, optional): The model to use for answering. Either "exa" or "exa-pro". Defaults to None.
1711
1779
 
1712
1780
  Returns:
1713
1781
  StreamAnswerResponse: An object that can be iterated over to retrieve (partial text, partial citations).
1714
1782
  Each iteration yields a tuple of (Optional[str], Optional[List[AnswerResult]]).
1715
1783
  """
1716
- options = {
1717
- k: v
1718
- for k, v in locals().items()
1719
- if k != "self" and v is not None
1720
- }
1784
+ options = {k: v for k, v in locals().items() if k != "self" and v is not None}
1721
1785
  options = to_camel_case(options)
1722
1786
  options["stream"] = True
1723
1787
  raw_response = self.request("/answer", options)
1724
1788
  return StreamAnswerResponse(raw_response)
1725
-
@@ -1,20 +1,23 @@
1
- Metadata-Version: 2.3
2
- Name: exa-py
3
- Version: 1.8.7
1
+ Metadata-Version: 2.1
2
+ Name: exa_py
3
+ Version: 1.8.9
4
4
  Summary: Python SDK for Exa API.
5
- Author: Exa AI
5
+ Home-page: https://github.com/exa-labs/exa-py
6
+ Author: Exa
6
7
  Author-email: hello@exa.ai
7
- Requires-Python: >=3.9,<4.0
8
- Classifier: Programming Language :: Python :: 3
8
+ Classifier: Development Status :: 5 - Production/Stable
9
+ Classifier: Intended Audience :: Developers
10
+ Classifier: License :: OSI Approved :: MIT License
11
+ Classifier: Typing :: Typed
12
+ Classifier: Programming Language :: Python :: 3.8
9
13
  Classifier: Programming Language :: Python :: 3.9
10
14
  Classifier: Programming Language :: Python :: 3.10
11
15
  Classifier: Programming Language :: Python :: 3.11
12
16
  Classifier: Programming Language :: Python :: 3.12
13
- Classifier: Programming Language :: Python :: 3.13
14
- Requires-Dist: openai (>=1.48,<2.0)
15
- Requires-Dist: requests (>=2.32.3,<3.0.0)
16
- Requires-Dist: typing-extensions (>=4.12.2,<5.0.0)
17
17
  Description-Content-Type: text/markdown
18
+ Requires-Dist: requests
19
+ Requires-Dist: typing-extensions
20
+ Requires-Dist: openai >=1.10.0
18
21
 
19
22
  # Exa
20
23
 
@@ -91,8 +94,8 @@ exa = Exa(api_key="your-api-key")
91
94
  # basic answer
92
95
  response = exa.answer("This is a query to answer a question")
93
96
 
94
- # answer with full text
95
- response = exa.answer("This is a query to answer a question", text=True)
97
+ # answer with full text, using the exa-pro model (sends 2 expanded quries to exa search)
98
+ response = exa.answer("This is a query to answer a question", text=True, model="exa-pro")
96
99
 
97
100
  # answer with streaming
98
101
  response = exa.stream_answer("This is a query to answer:")
@@ -103,4 +106,3 @@ exa = Exa(api_key="your-api-key")
103
106
 
104
107
  ```
105
108
 
106
-
@@ -0,0 +1,8 @@
1
+ exa_py/__init__.py,sha256=1selemczpRm1y8V9cWNm90LARnU1jbtyp-Qpx3c7cTw,28
2
+ exa_py/api.py,sha256=BSZ-uSRYfCyHWBMUpV4SV-2yaWustPS7xc63gArbJSE,65156
3
+ exa_py/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
+ exa_py/utils.py,sha256=Rc1FJjoR9LQ7L_OJM91Sd1GNkbHjcLyEvJENhRix6gc,2405
5
+ exa_py-1.8.9.dist-info/METADATA,sha256=O_ivBX4PUV2yWQPL69nSmXlqP6S0BTlTYiVMpz4nLAM,3522
6
+ exa_py-1.8.9.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
7
+ exa_py-1.8.9.dist-info/top_level.txt,sha256=Mfkmscdw9HWR1PtVhU1gAiVo6DHu_tyiVdb89gfZBVI,7
8
+ exa_py-1.8.9.dist-info/RECORD,,
@@ -1,4 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: poetry-core 2.0.1
2
+ Generator: bdist_wheel (0.43.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
+
@@ -0,0 +1 @@
1
+ exa_py
exa_py/example.py DELETED
@@ -1,11 +0,0 @@
1
- from exa_py import Exa
2
-
3
- exa = Exa(api_key = "995da727-7b91-42d0-b282-326daa290b39")
4
-
5
- response = exa.stream_answer(
6
- "How close are we to meeting aliens?",
7
- text=True
8
- )
9
-
10
- for chunk in response:
11
- print(chunk, end='', flush=True)
@@ -1,8 +0,0 @@
1
- exa_py/__init__.py,sha256=1selemczpRm1y8V9cWNm90LARnU1jbtyp-Qpx3c7cTw,28
2
- exa_py/api.py,sha256=5uuKVZpLtXQvxEi_X7W_x8JYqTQlsO8R2Ln0IRAQCrk,62663
3
- exa_py/example.py,sha256=V2uZvFTQFLVr61lVQ_HbZz8G8TFT6Ic44-TTE5ixzBk,235
4
- exa_py/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
5
- exa_py/utils.py,sha256=Rc1FJjoR9LQ7L_OJM91Sd1GNkbHjcLyEvJENhRix6gc,2405
6
- exa_py-1.8.7.dist-info/METADATA,sha256=xbby0b9LVLEzzBMu1mGivh0ZO0gDm_xjvPoF219riLk,3337
7
- exa_py-1.8.7.dist-info/WHEEL,sha256=IYZQI976HJqqOpQU6PHkJ8fb3tMNBFjg-Cn-pwAbaFM,88
8
- exa_py-1.8.7.dist-info/RECORD,,