exa-py 1.8.8__tar.gz → 1.9.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of exa-py might be problematic. Click here for more details.

@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: exa-py
3
- Version: 1.8.8
3
+ Version: 1.9.0
4
4
  Summary: Python SDK for Exa API.
5
5
  Author: Exa AI
6
6
  Author-email: hello@exa.ai
@@ -1,37 +1,39 @@
1
1
  from __future__ import annotations
2
- from dataclasses import dataclass
2
+
3
3
  import dataclasses
4
- from functools import wraps
4
+ import json
5
+ import os
5
6
  import re
6
- import requests
7
+ from dataclasses import dataclass
8
+ from functools import wraps
7
9
  from typing import (
8
10
  Callable,
11
+ Dict,
12
+ Generic,
9
13
  Iterable,
14
+ Iterator,
10
15
  List,
16
+ Literal,
11
17
  Optional,
12
- Dict,
13
- Generic,
14
18
  TypeVar,
15
- overload,
16
19
  Union,
17
- Literal,
18
- get_origin,
19
20
  get_args,
20
- Iterator,
21
+ get_origin,
22
+ overload,
21
23
  )
22
- from typing_extensions import TypedDict
23
- import json
24
24
 
25
+ import requests
25
26
  from openai import OpenAI
26
27
  from openai.types.chat.chat_completion_message_param import ChatCompletionMessageParam
27
28
  from openai.types.chat_model import ChatModel
29
+ from typing_extensions import TypedDict
30
+
28
31
  from exa_py.utils import (
29
32
  ExaOpenAICompletion,
30
33
  add_message_to_messages,
31
34
  format_exa_result,
32
35
  maybe_get_query,
33
36
  )
34
- import os
35
37
 
36
38
  is_beta = os.getenv("IS_BETA") == "True"
37
39
 
@@ -45,6 +47,12 @@ def snake_to_camel(snake_str: str) -> str:
45
47
  Returns:
46
48
  str: The string converted to camelCase format.
47
49
  """
50
+ # Handle special cases where the field should start with non-alphanumeric characters
51
+ if snake_str == "schema_":
52
+ return "$schema"
53
+ if snake_str == "not_":
54
+ return "not"
55
+
48
56
  components = snake_str.split("_")
49
57
  return components[0] + "".join(x.title() for x in components[1:])
50
58
 
@@ -102,17 +110,29 @@ def to_snake_case(data: dict) -> dict:
102
110
  SEARCH_OPTIONS_TYPES = {
103
111
  "query": [str], # The query string.
104
112
  "num_results": [int], # Number of results (Default: 10, Max for basic: 10).
105
- "include_domains": [list], # Domains to search from; exclusive with 'exclude_domains'.
113
+ "include_domains": [
114
+ list
115
+ ], # Domains to search from; exclusive with 'exclude_domains'.
106
116
  "exclude_domains": [list], # Domains to omit; exclusive with 'include_domains'.
107
117
  "start_crawl_date": [str], # Results after this crawl date. ISO 8601 format.
108
118
  "end_crawl_date": [str], # Results before this crawl date. ISO 8601 format.
109
- "start_published_date": [str], # Results after this publish date; excludes links with no date. ISO 8601 format.
110
- "end_published_date": [str], # Results before this publish date; excludes links with no date. ISO 8601 format.
111
- "include_text": [list], # Must be present in webpage text. (One string, up to 5 words)
112
- "exclude_text": [list], # Must not be present in webpage text. (One string, up to 5 words)
119
+ "start_published_date": [
120
+ str
121
+ ], # Results after this publish date; excludes links with no date. ISO 8601 format.
122
+ "end_published_date": [
123
+ str
124
+ ], # Results before this publish date; excludes links with no date. ISO 8601 format.
125
+ "include_text": [
126
+ list
127
+ ], # Must be present in webpage text. (One string, up to 5 words)
128
+ "exclude_text": [
129
+ list
130
+ ], # Must not be present in webpage text. (One string, up to 5 words)
113
131
  "use_autoprompt": [bool], # Convert query to Exa. (Default: false)
114
132
  "type": [str], # 'keyword', 'neural', or 'auto' (Default: auto)
115
- "category": [str], # A data category to focus on: 'company', 'research paper', 'news', 'pdf', 'github', 'tweet', 'personal site', 'linkedin profile', 'financial report'
133
+ "category": [
134
+ str
135
+ ], # A data category to focus on: 'company', 'research paper', 'news', 'pdf', 'github', 'tweet', 'personal site', 'linkedin profile', 'financial report'
116
136
  "flags": [list], # Experimental flags array for Exa usage.
117
137
  "moderation": [bool], # If true, moderate search results for safety.
118
138
  }
@@ -188,6 +208,25 @@ def is_valid_type(value, expected_type):
188
208
  return False # For any other case
189
209
 
190
210
 
211
+ def parse_cost_dollars(raw: dict) -> Optional[CostDollars]:
212
+ """
213
+ Parse the costDollars JSON into a CostDollars object, or return None if missing/invalid.
214
+ """
215
+ if not raw:
216
+ return None
217
+
218
+ total = raw.get("total")
219
+ if total is None:
220
+ # If there's no total, treat as absent
221
+ return None
222
+
223
+ # search and contents can be dictionaries or None
224
+ search_part = raw.get("search")
225
+ contents_part = raw.get("contents")
226
+
227
+ return CostDollars(total=total, search=search_part, contents=contents_part)
228
+
229
+
191
230
  class TextContentsOptions(TypedDict, total=False):
192
231
  """A class representing the options that you can specify when requesting text
193
232
 
@@ -214,15 +253,38 @@ class HighlightsContentsOptions(TypedDict, total=False):
214
253
  highlights_per_url: int
215
254
 
216
255
 
256
+ class JSONSchema(TypedDict, total=False):
257
+ """Represents a JSON Schema definition used for structured summary output.
258
+ To learn more visit https://json-schema.org/overview/what-is-jsonschema.
259
+ """
260
+ schema_: str # This will be converted to "$schema" in JSON
261
+ title: str
262
+ description: str
263
+ type: Literal["object", "array", "string", "number", "boolean", "null", "integer"]
264
+ properties: Dict[str, JSONSchema]
265
+ items: Union[JSONSchema, List[JSONSchema]]
266
+ required: List[str]
267
+ enum: List
268
+ additionalProperties: Union[bool, JSONSchema]
269
+ definitions: Dict[str, JSONSchema]
270
+ patternProperties: Dict[str, JSONSchema]
271
+ allOf: List[JSONSchema]
272
+ anyOf: List[JSONSchema]
273
+ oneOf: List[JSONSchema]
274
+ not_: JSONSchema # This will be converted to "not" in JSON
275
+
276
+
217
277
  class SummaryContentsOptions(TypedDict, total=False):
218
278
  """A class representing the options that you can specify when requesting summary
219
279
 
220
280
  Attributes:
221
281
  query (str): The query string for the summary. Summary will bias towards answering the query.
282
+ schema (JSONSchema): JSON schema for structured output from summary.
222
283
  """
223
284
 
224
285
  query: str
225
-
286
+ schema: JSONSchema
287
+
226
288
 
227
289
  class ExtrasOptions(TypedDict, total=False):
228
290
  """A class representing additional extraction fields (e.g. links, images)"""
@@ -231,6 +293,30 @@ class ExtrasOptions(TypedDict, total=False):
231
293
  image_links: int
232
294
 
233
295
 
296
+ class CostDollarsSearch(TypedDict, total=False):
297
+ """Represents the cost breakdown for search."""
298
+
299
+ neural: float
300
+ keyword: float
301
+
302
+
303
+ class CostDollarsContents(TypedDict, total=False):
304
+ """Represents the cost breakdown for contents."""
305
+
306
+ text: float
307
+ highlights: float
308
+ summary: float
309
+
310
+
311
+ @dataclass
312
+ class CostDollars:
313
+ """Represents costDollars field in the API response."""
314
+
315
+ total: float
316
+ search: CostDollarsSearch = None
317
+ contents: CostDollarsContents = None
318
+
319
+
234
320
  @dataclass
235
321
  class _Result:
236
322
  """A class representing the base fields of a search result.
@@ -515,7 +601,8 @@ class AnswerResult:
515
601
  author (str, optional): If available, the author of the content.
516
602
  text (str, optional): The full page text from each search result.
517
603
  """
518
- id: str
604
+
605
+ id: str
519
606
  url: str
520
607
  title: Optional[str] = None
521
608
  published_date: Optional[str] = None
@@ -523,12 +610,12 @@ class AnswerResult:
523
610
  text: Optional[str] = None
524
611
 
525
612
  def __init__(self, **kwargs):
526
- self.id = kwargs['id']
527
- self.url = kwargs['url']
528
- self.title = kwargs.get('title')
529
- self.published_date = kwargs.get('published_date')
530
- self.author = kwargs.get('author')
531
- self.text = kwargs.get('text')
613
+ self.id = kwargs["id"]
614
+ self.url = kwargs["url"]
615
+ self.title = kwargs.get("title")
616
+ self.published_date = kwargs.get("published_date")
617
+ self.author = kwargs.get("author")
618
+ self.text = kwargs.get("text")
532
619
 
533
620
  def __str__(self):
534
621
  return (
@@ -539,18 +626,20 @@ class AnswerResult:
539
626
  f"Author: {self.author}\n"
540
627
  f"Text: {self.text}\n\n"
541
628
  )
542
-
629
+
630
+
543
631
  @dataclass
544
632
  class StreamChunk:
545
633
  """A class representing a single chunk of streaming data.
546
-
634
+
547
635
  Attributes:
548
636
  content (Optional[str]): The partial text content of the answer
549
637
  citations (Optional[List[AnswerResult]]): List of citations if provided in this chunk
550
638
  """
639
+
551
640
  content: Optional[str] = None
552
641
  citations: Optional[List[AnswerResult]] = None
553
-
642
+
554
643
  def has_data(self) -> bool:
555
644
  """Check if this chunk contains any data."""
556
645
  return self.content is not None or self.citations is not None
@@ -594,6 +683,7 @@ class AnswerResponse:
594
683
 
595
684
  class StreamAnswerResponse:
596
685
  """A class representing a streaming answer response."""
686
+
597
687
  def __init__(self, raw_response: requests.Response):
598
688
  self._raw_response = raw_response
599
689
  self._ensure_ok_status()
@@ -621,8 +711,14 @@ class StreamAnswerResponse:
621
711
  if "delta" in chunk["choices"][0]:
622
712
  content = chunk["choices"][0]["delta"].get("content")
623
713
 
624
- if "citations" in chunk and chunk["citations"] and chunk["citations"] != "null":
625
- citations = [AnswerResult(**to_snake_case(s)) for s in chunk["citations"]]
714
+ if (
715
+ "citations" in chunk
716
+ and chunk["citations"]
717
+ and chunk["citations"] != "null"
718
+ ):
719
+ citations = [
720
+ AnswerResult(**to_snake_case(s)) for s in chunk["citations"]
721
+ ]
626
722
 
627
723
  stream_chunk = StreamChunk(content=content, citations=citations)
628
724
  if stream_chunk.has_data():
@@ -651,6 +747,7 @@ class SearchResponse(Generic[T]):
651
747
  autoprompt_string: Optional[str]
652
748
  resolved_search_type: Optional[str]
653
749
  auto_date: Optional[str]
750
+ cost_dollars: Optional[CostDollars] = None
654
751
 
655
752
  def __str__(self):
656
753
  output = "\n\n".join(str(result) for result in self.results)
@@ -658,7 +755,12 @@ class SearchResponse(Generic[T]):
658
755
  output += f"\n\nAutoprompt String: {self.autoprompt_string}"
659
756
  if self.resolved_search_type:
660
757
  output += f"\nResolved Search Type: {self.resolved_search_type}"
661
-
758
+ if self.cost_dollars:
759
+ output += f"\nCostDollars: total={self.cost_dollars.total}"
760
+ if self.cost_dollars.search:
761
+ output += f"\n - search: {self.cost_dollars.search}"
762
+ if self.cost_dollars.contents:
763
+ output += f"\n - contents: {self.cost_dollars.contents}"
662
764
  return output
663
765
 
664
766
 
@@ -686,7 +788,7 @@ class Exa:
686
788
  self,
687
789
  api_key: Optional[str],
688
790
  base_url: str = "https://api.exa.ai",
689
- user_agent: str = "exa-py 1.8.8",
791
+ user_agent: str = "exa-py 1.9.0",
690
792
  ):
691
793
  """Initialize the Exa client with the provided API key and optional base URL and user agent.
692
794
 
@@ -720,12 +822,16 @@ class Exa:
720
822
  ValueError: If the request fails (non-200 status code).
721
823
  """
722
824
  if data.get("stream"):
723
- res = requests.post(self.base_url + endpoint, json=data, headers=self.headers, stream=True)
825
+ res = requests.post(
826
+ self.base_url + endpoint, json=data, headers=self.headers, stream=True
827
+ )
724
828
  return res
725
829
 
726
830
  res = requests.post(self.base_url + endpoint, json=data, headers=self.headers)
727
831
  if res.status_code != 200:
728
- raise ValueError(f"Request failed with status code {res.status_code}: {res.text}")
832
+ raise ValueError(
833
+ f"Request failed with status code {res.status_code}: {res.text}"
834
+ )
729
835
  return res.json()
730
836
 
731
837
  def search(
@@ -773,11 +879,13 @@ class Exa:
773
879
  validate_search_options(options, SEARCH_OPTIONS_TYPES)
774
880
  options = to_camel_case(options)
775
881
  data = self.request("/search", options)
882
+ cost_dollars = parse_cost_dollars(data.get("costDollars"))
776
883
  return SearchResponse(
777
884
  [Result(**to_snake_case(result)) for result in data["results"]],
778
885
  data["autopromptString"] if "autopromptString" in data else None,
779
886
  data["resolvedSearchType"] if "resolvedSearchType" in data else None,
780
887
  data["autoDate"] if "autoDate" in data else None,
888
+ cost_dollars=cost_dollars,
781
889
  )
782
890
 
783
891
  @overload
@@ -805,8 +913,7 @@ class Exa:
805
913
  subpages: Optional[int] = None,
806
914
  subpage_target: Optional[Union[str, List[str]]] = None,
807
915
  extras: Optional[ExtrasOptions] = None,
808
- ) -> SearchResponse[ResultWithText]:
809
- ...
916
+ ) -> SearchResponse[ResultWithText]: ...
810
917
 
811
918
  @overload
812
919
  def search_and_contents(
@@ -834,8 +941,7 @@ class Exa:
834
941
  filter_empty_results: Optional[bool] = None,
835
942
  subpage_target: Optional[Union[str, List[str]]] = None,
836
943
  extras: Optional[ExtrasOptions] = None,
837
- ) -> SearchResponse[ResultWithText]:
838
- ...
944
+ ) -> SearchResponse[ResultWithText]: ...
839
945
 
840
946
  @overload
841
947
  def search_and_contents(
@@ -863,8 +969,7 @@ class Exa:
863
969
  livecrawl: Optional[LIVECRAWL_OPTIONS] = None,
864
970
  filter_empty_results: Optional[bool] = None,
865
971
  extras: Optional[ExtrasOptions] = None,
866
- ) -> SearchResponse[ResultWithHighlights]:
867
- ...
972
+ ) -> SearchResponse[ResultWithHighlights]: ...
868
973
 
869
974
  @overload
870
975
  def search_and_contents(
@@ -893,8 +998,7 @@ class Exa:
893
998
  livecrawl: Optional[LIVECRAWL_OPTIONS] = None,
894
999
  filter_empty_results: Optional[bool] = None,
895
1000
  extras: Optional[ExtrasOptions] = None,
896
- ) -> SearchResponse[ResultWithTextAndHighlights]:
897
- ...
1001
+ ) -> SearchResponse[ResultWithTextAndHighlights]: ...
898
1002
 
899
1003
  @overload
900
1004
  def search_and_contents(
@@ -922,8 +1026,7 @@ class Exa:
922
1026
  livecrawl: Optional[LIVECRAWL_OPTIONS] = None,
923
1027
  filter_empty_results: Optional[bool] = None,
924
1028
  extras: Optional[ExtrasOptions] = None,
925
- ) -> SearchResponse[ResultWithSummary]:
926
- ...
1029
+ ) -> SearchResponse[ResultWithSummary]: ...
927
1030
 
928
1031
  @overload
929
1032
  def search_and_contents(
@@ -952,8 +1055,7 @@ class Exa:
952
1055
  livecrawl: Optional[LIVECRAWL_OPTIONS] = None,
953
1056
  filter_empty_results: Optional[bool] = None,
954
1057
  extras: Optional[ExtrasOptions] = None,
955
- ) -> SearchResponse[ResultWithTextAndSummary]:
956
- ...
1058
+ ) -> SearchResponse[ResultWithTextAndSummary]: ...
957
1059
 
958
1060
  @overload
959
1061
  def search_and_contents(
@@ -982,8 +1084,7 @@ class Exa:
982
1084
  livecrawl: Optional[LIVECRAWL_OPTIONS] = None,
983
1085
  filter_empty_results: Optional[bool] = None,
984
1086
  extras: Optional[ExtrasOptions] = None,
985
- ) -> SearchResponse[ResultWithHighlightsAndSummary]:
986
- ...
1087
+ ) -> SearchResponse[ResultWithHighlightsAndSummary]: ...
987
1088
 
988
1089
  @overload
989
1090
  def search_and_contents(
@@ -1013,8 +1114,7 @@ class Exa:
1013
1114
  subpage_target: Optional[Union[str, List[str]]] = None,
1014
1115
  filter_empty_results: Optional[bool] = None,
1015
1116
  extras: Optional[ExtrasOptions] = None,
1016
- ) -> SearchResponse[ResultWithTextAndHighlightsAndSummary]:
1017
- ...
1117
+ ) -> SearchResponse[ResultWithTextAndHighlightsAndSummary]: ...
1018
1118
 
1019
1119
  def search_and_contents(self, query: str, **kwargs):
1020
1120
  options = {k: v for k, v in {"query": query, **kwargs}.items() if v is not None}
@@ -1053,11 +1153,13 @@ class Exa:
1053
1153
  )
1054
1154
  options = to_camel_case(options)
1055
1155
  data = self.request("/search", options)
1156
+ cost_dollars = parse_cost_dollars(data.get("costDollars"))
1056
1157
  return SearchResponse(
1057
1158
  [Result(**to_snake_case(result)) for result in data["results"]],
1058
1159
  data["autopromptString"] if "autopromptString" in data else None,
1059
1160
  data["resolvedSearchType"] if "resolvedSearchType" in data else None,
1060
1161
  data["autoDate"] if "autoDate" in data else None,
1162
+ cost_dollars=cost_dollars,
1061
1163
  )
1062
1164
 
1063
1165
  @overload
@@ -1071,8 +1173,7 @@ class Exa:
1071
1173
  subpage_target: Optional[Union[str, List[str]]] = None,
1072
1174
  extras: Optional[ExtrasOptions] = None,
1073
1175
  flags: Optional[List[str]] = None,
1074
- ) -> SearchResponse[ResultWithText]:
1075
- ...
1176
+ ) -> SearchResponse[ResultWithText]: ...
1076
1177
 
1077
1178
  @overload
1078
1179
  def get_contents(
@@ -1087,8 +1188,7 @@ class Exa:
1087
1188
  subpage_target: Optional[Union[str, List[str]]] = None,
1088
1189
  extras: Optional[ExtrasOptions] = None,
1089
1190
  flags: Optional[List[str]] = None,
1090
- ) -> SearchResponse[ResultWithText]:
1091
- ...
1191
+ ) -> SearchResponse[ResultWithText]: ...
1092
1192
 
1093
1193
  @overload
1094
1194
  def get_contents(
@@ -1103,8 +1203,7 @@ class Exa:
1103
1203
  subpage_target: Optional[Union[str, List[str]]] = None,
1104
1204
  extras: Optional[ExtrasOptions] = None,
1105
1205
  flags: Optional[List[str]] = None,
1106
- ) -> SearchResponse[ResultWithHighlights]:
1107
- ...
1206
+ ) -> SearchResponse[ResultWithHighlights]: ...
1108
1207
 
1109
1208
  @overload
1110
1209
  def get_contents(
@@ -1120,8 +1219,7 @@ class Exa:
1120
1219
  subpage_target: Optional[Union[str, List[str]]] = None,
1121
1220
  extras: Optional[ExtrasOptions] = None,
1122
1221
  flags: Optional[List[str]] = None,
1123
- ) -> SearchResponse[ResultWithTextAndHighlights]:
1124
- ...
1222
+ ) -> SearchResponse[ResultWithTextAndHighlights]: ...
1125
1223
 
1126
1224
  @overload
1127
1225
  def get_contents(
@@ -1136,8 +1234,7 @@ class Exa:
1136
1234
  subpage_target: Optional[Union[str, List[str]]] = None,
1137
1235
  extras: Optional[ExtrasOptions] = None,
1138
1236
  flags: Optional[List[str]] = None,
1139
- ) -> SearchResponse[ResultWithSummary]:
1140
- ...
1237
+ ) -> SearchResponse[ResultWithSummary]: ...
1141
1238
 
1142
1239
  @overload
1143
1240
  def get_contents(
@@ -1153,8 +1250,7 @@ class Exa:
1153
1250
  subpage_target: Optional[Union[str, List[str]]] = None,
1154
1251
  extras: Optional[ExtrasOptions] = None,
1155
1252
  flags: Optional[List[str]] = None,
1156
- ) -> SearchResponse[ResultWithTextAndSummary]:
1157
- ...
1253
+ ) -> SearchResponse[ResultWithTextAndSummary]: ...
1158
1254
 
1159
1255
  @overload
1160
1256
  def get_contents(
@@ -1170,8 +1266,7 @@ class Exa:
1170
1266
  subpage_target: Optional[Union[str, List[str]]] = None,
1171
1267
  extras: Optional[ExtrasOptions] = None,
1172
1268
  flags: Optional[List[str]] = None,
1173
- ) -> SearchResponse[ResultWithHighlightsAndSummary]:
1174
- ...
1269
+ ) -> SearchResponse[ResultWithHighlightsAndSummary]: ...
1175
1270
 
1176
1271
  @overload
1177
1272
  def get_contents(
@@ -1188,8 +1283,7 @@ class Exa:
1188
1283
  subpage_target: Optional[Union[str, List[str]]] = None,
1189
1284
  extras: Optional[ExtrasOptions] = None,
1190
1285
  flags: Optional[List[str]] = None,
1191
- ) -> SearchResponse[ResultWithTextAndHighlightsAndSummary]:
1192
- ...
1286
+ ) -> SearchResponse[ResultWithTextAndHighlightsAndSummary]: ...
1193
1287
 
1194
1288
  def get_contents(self, urls: Union[str, List[str], List[_Result]], **kwargs):
1195
1289
  options = {
@@ -1211,11 +1305,13 @@ class Exa:
1211
1305
  )
1212
1306
  options = to_camel_case(options)
1213
1307
  data = self.request("/contents", options)
1308
+ cost_dollars = parse_cost_dollars(data.get("costDollars"))
1214
1309
  return SearchResponse(
1215
1310
  [Result(**to_snake_case(result)) for result in data["results"]],
1216
1311
  data.get("autopromptString"),
1217
1312
  data.get("resolvedSearchType"),
1218
1313
  data.get("autoDate"),
1314
+ cost_dollars=cost_dollars,
1219
1315
  )
1220
1316
 
1221
1317
  def find_similar(
@@ -1259,11 +1355,13 @@ class Exa:
1259
1355
  validate_search_options(options, FIND_SIMILAR_OPTIONS_TYPES)
1260
1356
  options = to_camel_case(options)
1261
1357
  data = self.request("/findSimilar", options)
1358
+ cost_dollars = parse_cost_dollars(data.get("costDollars"))
1262
1359
  return SearchResponse(
1263
1360
  [Result(**to_snake_case(result)) for result in data["results"]],
1264
1361
  data.get("autopromptString"),
1265
1362
  data.get("resolvedSearchType"),
1266
1363
  data.get("autoDate"),
1364
+ cost_dollars=cost_dollars,
1267
1365
  )
1268
1366
 
1269
1367
  @overload
@@ -1289,8 +1387,7 @@ class Exa:
1289
1387
  subpages: Optional[int] = None,
1290
1388
  subpage_target: Optional[Union[str, List[str]]] = None,
1291
1389
  extras: Optional[ExtrasOptions] = None,
1292
- ) -> SearchResponse[ResultWithText]:
1293
- ...
1390
+ ) -> SearchResponse[ResultWithText]: ...
1294
1391
 
1295
1392
  @overload
1296
1393
  def find_similar_and_contents(
@@ -1316,8 +1413,7 @@ class Exa:
1316
1413
  subpages: Optional[int] = None,
1317
1414
  subpage_target: Optional[Union[str, List[str]]] = None,
1318
1415
  extras: Optional[ExtrasOptions] = None,
1319
- ) -> SearchResponse[ResultWithText]:
1320
- ...
1416
+ ) -> SearchResponse[ResultWithText]: ...
1321
1417
 
1322
1418
  @overload
1323
1419
  def find_similar_and_contents(
@@ -1343,8 +1439,7 @@ class Exa:
1343
1439
  livecrawl: Optional[LIVECRAWL_OPTIONS] = None,
1344
1440
  filter_empty_results: Optional[bool] = None,
1345
1441
  extras: Optional[ExtrasOptions] = None,
1346
- ) -> SearchResponse[ResultWithHighlights]:
1347
- ...
1442
+ ) -> SearchResponse[ResultWithHighlights]: ...
1348
1443
 
1349
1444
  @overload
1350
1445
  def find_similar_and_contents(
@@ -1371,8 +1466,7 @@ class Exa:
1371
1466
  subpages: Optional[int] = None,
1372
1467
  subpage_target: Optional[Union[str, List[str]]] = None,
1373
1468
  extras: Optional[ExtrasOptions] = None,
1374
- ) -> SearchResponse[ResultWithTextAndHighlights]:
1375
- ...
1469
+ ) -> SearchResponse[ResultWithTextAndHighlights]: ...
1376
1470
 
1377
1471
  @overload
1378
1472
  def find_similar_and_contents(
@@ -1398,8 +1492,7 @@ class Exa:
1398
1492
  subpages: Optional[int] = None,
1399
1493
  subpage_target: Optional[Union[str, List[str]]] = None,
1400
1494
  extras: Optional[ExtrasOptions] = None,
1401
- ) -> SearchResponse[ResultWithSummary]:
1402
- ...
1495
+ ) -> SearchResponse[ResultWithSummary]: ...
1403
1496
 
1404
1497
  @overload
1405
1498
  def find_similar_and_contents(
@@ -1426,8 +1519,7 @@ class Exa:
1426
1519
  subpages: Optional[int] = None,
1427
1520
  subpage_target: Optional[Union[str, List[str]]] = None,
1428
1521
  extras: Optional[ExtrasOptions] = None,
1429
- ) -> SearchResponse[ResultWithTextAndSummary]:
1430
- ...
1522
+ ) -> SearchResponse[ResultWithTextAndSummary]: ...
1431
1523
 
1432
1524
  @overload
1433
1525
  def find_similar_and_contents(
@@ -1454,8 +1546,7 @@ class Exa:
1454
1546
  livecrawl: Optional[LIVECRAWL_OPTIONS] = None,
1455
1547
  filter_empty_results: Optional[bool] = None,
1456
1548
  extras: Optional[ExtrasOptions] = None,
1457
- ) -> SearchResponse[ResultWithHighlightsAndSummary]:
1458
- ...
1549
+ ) -> SearchResponse[ResultWithHighlightsAndSummary]: ...
1459
1550
 
1460
1551
  @overload
1461
1552
  def find_similar_and_contents(
@@ -1483,8 +1574,7 @@ class Exa:
1483
1574
  subpages: Optional[int] = None,
1484
1575
  subpage_target: Optional[Union[str, List[str]]] = None,
1485
1576
  extras: Optional[ExtrasOptions] = None,
1486
- ) -> SearchResponse[ResultWithTextAndHighlightsAndSummary]:
1487
- ...
1577
+ ) -> SearchResponse[ResultWithTextAndHighlightsAndSummary]: ...
1488
1578
 
1489
1579
  def find_similar_and_contents(self, url: str, **kwargs):
1490
1580
  options = {k: v for k, v in {"url": url, **kwargs}.items() if v is not None}
@@ -1521,18 +1611,20 @@ class Exa:
1521
1611
  )
1522
1612
  options = to_camel_case(options)
1523
1613
  data = self.request("/findSimilar", options)
1614
+ cost_dollars = parse_cost_dollars(data.get("costDollars"))
1524
1615
  return SearchResponse(
1525
1616
  [Result(**to_snake_case(result)) for result in data["results"]],
1526
1617
  data.get("autopromptString"),
1527
1618
  data.get("resolvedSearchType"),
1528
1619
  data.get("autoDate"),
1620
+ cost_dollars=cost_dollars,
1529
1621
  )
1530
1622
 
1531
1623
  def wrap(self, client: OpenAI):
1532
1624
  """Wrap an OpenAI client with Exa functionality.
1533
1625
 
1534
- After wrapping, any call to `client.chat.completions.create` will be intercepted
1535
- and enhanced with Exa RAG functionality. To disable Exa for a specific call,
1626
+ After wrapping, any call to `client.chat.completions.create` will be intercepted
1627
+ and enhanced with Exa RAG functionality. To disable Exa for a specific call,
1536
1628
  set `use_exa="none"` in the `create` method.
1537
1629
 
1538
1630
  Args:
@@ -1662,8 +1754,7 @@ class Exa:
1662
1754
  stream: Optional[bool] = False,
1663
1755
  text: Optional[bool] = False,
1664
1756
  model: Optional[Literal["exa", "exa-pro"]] = None,
1665
- ) -> Union[AnswerResponse, StreamAnswerResponse]:
1666
- ...
1757
+ ) -> Union[AnswerResponse, StreamAnswerResponse]: ...
1667
1758
 
1668
1759
  def answer(
1669
1760
  self,
@@ -1692,17 +1783,13 @@ class Exa:
1692
1783
  "Please use `stream_answer(...)` for streaming."
1693
1784
  )
1694
1785
 
1695
- options = {
1696
- k: v
1697
- for k, v in locals().items()
1698
- if k != "self" and v is not None
1699
- }
1786
+ options = {k: v for k, v in locals().items() if k != "self" and v is not None}
1700
1787
  options = to_camel_case(options)
1701
1788
  response = self.request("/answer", options)
1702
1789
 
1703
1790
  return AnswerResponse(
1704
1791
  response["answer"],
1705
- [AnswerResult(**to_snake_case(result)) for result in response["citations"]]
1792
+ [AnswerResult(**to_snake_case(result)) for result in response["citations"]],
1706
1793
  )
1707
1794
 
1708
1795
  def stream_answer(
@@ -1723,13 +1810,8 @@ class Exa:
1723
1810
  StreamAnswerResponse: An object that can be iterated over to retrieve (partial text, partial citations).
1724
1811
  Each iteration yields a tuple of (Optional[str], Optional[List[AnswerResult]]).
1725
1812
  """
1726
- options = {
1727
- k: v
1728
- for k, v in locals().items()
1729
- if k != "self" and v is not None
1730
- }
1813
+ options = {k: v for k, v in locals().items() if k != "self" and v is not None}
1731
1814
  options = to_camel_case(options)
1732
1815
  options["stream"] = True
1733
1816
  raw_response = self.request("/answer", options)
1734
1817
  return StreamAnswerResponse(raw_response)
1735
-
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "exa-py"
3
- version = "1.8.8"
3
+ version = "1.9.0"
4
4
  description = "Python SDK for Exa API."
5
5
  authors = ["Exa AI <hello@exa.ai>"]
6
6
  readme = "README.md"
File without changes
File without changes
File without changes
File without changes