exa-py 1.7.1__tar.gz → 1.7.3__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of exa-py might be problematic. Click here for more details.

@@ -1,23 +1,20 @@
1
- Metadata-Version: 2.1
2
- Name: exa_py
3
- Version: 1.7.1
1
+ Metadata-Version: 2.3
2
+ Name: exa-py
3
+ Version: 1.7.3
4
4
  Summary: Python SDK for Exa API.
5
- Home-page: https://github.com/exa-labs/exa-py
6
- Author: Exa
5
+ Author: Exa AI
7
6
  Author-email: hello@exa.ai
8
- Classifier: Development Status :: 5 - Production/Stable
9
- Classifier: Intended Audience :: Developers
10
- Classifier: License :: OSI Approved :: MIT License
11
- Classifier: Typing :: Typed
12
- Classifier: Programming Language :: Python :: 3.8
7
+ Requires-Python: >=3.9,<4.0
8
+ Classifier: Programming Language :: Python :: 3
13
9
  Classifier: Programming Language :: Python :: 3.9
14
10
  Classifier: Programming Language :: Python :: 3.10
15
11
  Classifier: Programming Language :: Python :: 3.11
16
12
  Classifier: Programming Language :: Python :: 3.12
13
+ Classifier: Programming Language :: Python :: 3.13
14
+ Requires-Dist: openai (>=1.48,<2.0)
15
+ Requires-Dist: requests (>=2.32.3,<3.0.0)
16
+ Requires-Dist: typing-extensions (>=4.12.2,<5.0.0)
17
17
  Description-Content-Type: text/markdown
18
- Requires-Dist: requests
19
- Requires-Dist: typing-extensions
20
- Requires-Dist: openai>=1.10.0
21
18
 
22
19
  # Exa
23
20
 
@@ -81,14 +78,15 @@ exa = Exa(api_key="your-api-key")
81
78
  results = exa.find_similar_and_contents("https://example.com", text=True, highlights=True)
82
79
 
83
80
  # get text contents
84
- results = exa.get_contents(["ids"])
81
+ results = exa.get_contents(["urls"])
85
82
 
86
83
  # get highlights
87
- results = exa.get_contents(["ids"], highlights=True)
84
+ results = exa.get_contents(["urls"], highlights=True)
88
85
 
89
86
  # get contents with contents options
90
- results = exa.get_contents(["ids"],
87
+ results = exa.get_contents(["urls"],
91
88
  text={"include_html_tags": True, "max_characters": 1000},
92
89
  highlights={"highlights_per_url": 2, "num_sentences": 1, "query": "This is the highlight query:"})
93
90
  ```
94
91
 
92
+
@@ -60,13 +60,13 @@ exa = Exa(api_key="your-api-key")
60
60
  results = exa.find_similar_and_contents("https://example.com", text=True, highlights=True)
61
61
 
62
62
  # get text contents
63
- results = exa.get_contents(["ids"])
63
+ results = exa.get_contents(["urls"])
64
64
 
65
65
  # get highlights
66
- results = exa.get_contents(["ids"], highlights=True)
66
+ results = exa.get_contents(["urls"], highlights=True)
67
67
 
68
68
  # get contents with contents options
69
- results = exa.get_contents(["ids"],
69
+ results = exa.get_contents(["urls"],
70
70
  text={"include_html_tags": True, "max_characters": 1000},
71
71
  highlights={"highlights_per_url": 2, "num_sentences": 1, "query": "This is the highlight query:"})
72
72
  ```
@@ -16,7 +16,7 @@ from typing import (
16
16
  Union,
17
17
  Literal,
18
18
  get_origin,
19
- get_args
19
+ get_args,
20
20
  )
21
21
  from typing_extensions import TypedDict
22
22
 
@@ -46,6 +46,7 @@ def snake_to_camel(snake_str: str) -> str:
46
46
  components = snake_str.split("_")
47
47
  return components[0] + "".join(x.title() for x in components[1:])
48
48
 
49
+
49
50
  def to_camel_case(data: dict) -> dict:
50
51
  """
51
52
  Convert keys in a dictionary from snake_case to camelCase recursively.
@@ -62,6 +63,7 @@ def to_camel_case(data: dict) -> dict:
62
63
  if v is not None
63
64
  }
64
65
 
66
+
65
67
  def camel_to_snake(camel_str: str) -> str:
66
68
  """Convert camelCase string to snake_case.
67
69
 
@@ -74,6 +76,7 @@ def camel_to_snake(camel_str: str) -> str:
74
76
  snake_str = re.sub("(.)([A-Z][a-z]+)", r"\1_\2", camel_str)
75
77
  return re.sub("([a-z0-9])([A-Z])", r"\1_\2", snake_str).lower()
76
78
 
79
+
77
80
  def to_snake_case(data: dict) -> dict:
78
81
  """
79
82
  Convert keys in a dictionary from camelCase to snake_case recursively.
@@ -89,32 +92,22 @@ def to_snake_case(data: dict) -> dict:
89
92
  for k, v in data.items()
90
93
  }
91
94
 
95
+
92
96
  SEARCH_OPTIONS_TYPES = {
93
97
  "query": [str], # The query string.
94
98
  "num_results": [int], # Number of results (Default: 10, Max for basic: 10).
95
- "include_domains": [
96
- list
97
- ], # Domains to search from; exclusive with 'exclude_domains'.
99
+ "include_domains": [list], # Domains to search from; exclusive with 'exclude_domains'.
98
100
  "exclude_domains": [list], # Domains to omit; exclusive with 'include_domains'.
99
101
  "start_crawl_date": [str], # Results after this crawl date. ISO 8601 format.
100
102
  "end_crawl_date": [str], # Results before this crawl date. ISO 8601 format.
101
- "start_published_date": [
102
- str
103
- ], # Results after this publish date; excludes links with no date. ISO 8601 format.
104
- "end_published_date": [
105
- str
106
- ], # Results before this publish date; excludes links with no date. ISO 8601 format.
107
- "include_text": [
108
- list
109
- ], # list of strings that must be present in webpage text of results. Currently, only one string is supported, up to 5 words.
110
- "exclude_text": [list], # list of strings that must not be present in webpage text of result. Currently, only one string is supported, up to 5 words.
111
- "use_autoprompt": [bool], # Convert query to Exa (Higher latency, Default: false).
112
- "type": [
113
- str
114
- ], # 'keyword' or 'neural' (Default: neural). Choose 'neural' for high-quality, semantically relevant content in popular domains. 'Keyword' is for specific, local, or obscure queries.
115
- "category": [
116
- str
117
- ], # A data category to focus on, with higher comprehensivity and data cleanliness. Currently, the only category is company.
103
+ "start_published_date": [str], # Results after this publish date; excludes links with no date. ISO 8601 format.
104
+ "end_published_date": [str], # Results before this publish date; excludes links with no date. ISO 8601 format.
105
+ "include_text": [list], # Must be present in webpage text. (One string, up to 5 words)
106
+ "exclude_text": [list], # Must not be present in webpage text. (One string, up to 5 words)
107
+ "use_autoprompt": [bool], # Convert query to Exa. (Default: false)
108
+ "type": [str], # 'keyword', 'neural', or 'auto' (Default: auto).'neural' uses embeddings search, 'keyword' is SERP and 'auto' decides the best search type based on your query
109
+ "category": [str], # A data category to focus on: 'company', 'research paper', 'news', 'pdf', 'github', 'tweet', 'personal site', 'linkedin profile', 'financial report'
110
+ "flags": [list], # Experimental flags array for Exa usage.
118
111
  }
119
112
 
120
113
  FIND_SIMILAR_OPTIONS_TYPES = {
@@ -130,13 +123,14 @@ FIND_SIMILAR_OPTIONS_TYPES = {
130
123
  "exclude_text": [list],
131
124
  "exclude_source_domain": [bool],
132
125
  "category": [str],
126
+ "flags": [list], # Experimental flags array for Exa usage.
133
127
  }
134
128
 
135
129
  # the livecrawl options
136
130
  LIVECRAWL_OPTIONS = Literal["always", "fallback", "never", "auto"]
137
131
 
138
132
  CONTENTS_OPTIONS_TYPES = {
139
- "ids": [list],
133
+ "urls": [list],
140
134
  "text": [dict, bool],
141
135
  "highlights": [dict, bool],
142
136
  "summary": [dict, bool],
@@ -144,17 +138,16 @@ CONTENTS_OPTIONS_TYPES = {
144
138
  "livecrawl_timeout": [int],
145
139
  "livecrawl": [LIVECRAWL_OPTIONS],
146
140
  "filter_empty_results": [bool],
141
+ "flags": [list], # We allow flags to be passed here too
147
142
  }
148
143
 
149
144
  CONTENTS_ENDPOINT_OPTIONS_TYPES = {
150
145
  "subpages": [int],
151
146
  "subpage_target": [str, list],
152
147
  "extras": [dict],
148
+ "flags": [list], # We allow flags to be passed here too
153
149
  }
154
150
 
155
- # FOR BETA OPTIONS
156
- # if is_beta:
157
-
158
151
 
159
152
  def validate_search_options(
160
153
  options: Dict[str, Optional[object]], expected: dict
@@ -179,6 +172,7 @@ def validate_search_options(
179
172
  f"Invalid value for option '{key}': {value}. Expected one of {expected_types}"
180
173
  )
181
174
 
175
+
182
176
  def is_valid_type(value, expected_type):
183
177
  if get_origin(expected_type) is Literal:
184
178
  return value in get_args(expected_type)
@@ -186,6 +180,7 @@ def is_valid_type(value, expected_type):
186
180
  return isinstance(value, expected_type)
187
181
  return False # For any other case
188
182
 
183
+
189
184
  class TextContentsOptions(TypedDict, total=False):
190
185
  """A class representing the options that you can specify when requesting text
191
186
 
@@ -202,15 +197,16 @@ class HighlightsContentsOptions(TypedDict, total=False):
202
197
  """A class representing the options that you can specify when requesting highlights
203
198
 
204
199
  Attributes:
205
- query (str): The query string for the highlights. if not specified, defaults to a generic summarization query.
200
+ query (str): The query string for the highlights.
206
201
  num_sentences (int): Size of highlights to return, in sentences. Default: 5
207
- highlights_per_url (int): The number of highlights to return per URL. Default: 1
202
+ highlights_per_url (int): Number of highlights to return per URL. Default: 1
208
203
  """
209
204
 
210
205
  query: str
211
206
  num_sentences: int
212
207
  highlights_per_url: int
213
208
 
209
+
214
210
  class SummaryContentsOptions(TypedDict, total=False):
215
211
  """A class representing the options that you can specify when requesting summary
216
212
 
@@ -221,15 +217,12 @@ class SummaryContentsOptions(TypedDict, total=False):
221
217
  query: str
222
218
 
223
219
  class ExtrasOptions(TypedDict, total=False):
224
- """A class representing the options that you can specify when requesting summary
225
-
226
- Attributes:
227
- query (str): The query string for the summary. Summary will bias towards answering the query.
228
- """
220
+ """A class representing additional extraction fields (e.g. links, images)"""
229
221
 
230
222
  links: int
231
223
  image_links: int
232
224
 
225
+
233
226
  @dataclass
234
227
  class _Result:
235
228
  """A class representing the base fields of a search result.
@@ -238,12 +231,13 @@ class _Result:
238
231
  title (str): The title of the search result.
239
232
  url (str): The URL of the search result.
240
233
  id (str): The temporary ID for the document.
241
- score (float, optional): A number from 0 to 1 representing similarity between the query/url and the result.
234
+ score (float, optional): A number from 0 to 1 representing similarity.
242
235
  published_date (str, optional): An estimate of the creation date, from parsing HTML content.
243
- author (str, optional): If available, the author of the content.
244
- image (str, optional): If available, a URL to an image associated with the content.
245
- subpages (List[_Result], optional): If available, a list of Exa contents results for a page's subpages (e.g. tesla.com --subpage--> shop.tesla.com)
246
- extras (Dict, optional): Additional metadata associated with the result; currently supports returning links and image links extracted from the text content
236
+ author (str, optional): The author of the content (if available).
237
+ image (str, optional): A URL to an image associated with the content (if available).
238
+ favicon (str, optional): A URL to the favicon (if available).
239
+ subpages (List[_Result], optional): Subpages of main page
240
+ extras (Dict, optional): Additional metadata; e.g. links, images.
247
241
  """
248
242
 
249
243
  url: str
@@ -258,15 +252,15 @@ class _Result:
258
252
  extras: Optional[Dict] = None
259
253
 
260
254
  def __init__(self, **kwargs):
261
- self.url = kwargs['url']
262
- self.id = kwargs['id']
263
- self.title = kwargs.get('title')
264
- self.score = kwargs.get('score')
265
- self.published_date = kwargs.get('published_date')
266
- self.author = kwargs.get('author')
267
- self.image = kwargs.get('image')
268
- self.favicon = kwargs.get('favicon')
269
- self.subpages = kwargs.get('subpages')
255
+ self.url = kwargs["url"]
256
+ self.id = kwargs["id"]
257
+ self.title = kwargs.get("title")
258
+ self.score = kwargs.get("score")
259
+ self.published_date = kwargs.get("published_date")
260
+ self.author = kwargs.get("author")
261
+ self.image = kwargs.get("image")
262
+ self.favicon = kwargs.get("favicon")
263
+ self.subpages = kwargs.get("subpages")
270
264
  self.extras = kwargs.get("extras")
271
265
 
272
266
  def __str__(self):
@@ -278,7 +272,8 @@ class _Result:
278
272
  f"Published Date: {self.published_date}\n"
279
273
  f"Author: {self.author}\n"
280
274
  f"Image: {self.image}\n"
281
- f"Extras {self.extras}\n"
275
+ f"Favicon: {self.favicon}\n"
276
+ f"Extras: {self.extras}\n"
282
277
  f"Subpages: {self.subpages}\n"
283
278
  )
284
279
 
@@ -286,13 +281,13 @@ class _Result:
286
281
  @dataclass
287
282
  class Result(_Result):
288
283
  """
289
- A class representing a search result with optional text and highlights.
284
+ A class representing a search result with optional text, highlights, summary.
290
285
 
291
286
  Attributes:
292
- text (str, optional): The text of the search result page.
293
- highlights (List[str], optional): The highlights of the search result.
294
- highlight_scores (List[float], optional): The scores of the highlights of the search result.
295
- summary (str, optional): The summary of the search result.
287
+ text (str, optional)
288
+ highlights (List[str], optional)
289
+ highlight_scores (List[float], optional)
290
+ summary (str, optional)
296
291
  """
297
292
 
298
293
  text: Optional[str] = None
@@ -302,10 +297,10 @@ class Result(_Result):
302
297
 
303
298
  def __init__(self, **kwargs):
304
299
  super().__init__(**kwargs)
305
- self.text = kwargs.get('text')
306
- self.highlights = kwargs.get('highlights')
307
- self.highlight_scores = kwargs.get('highlight_scores')
308
- self.summary = kwargs.get('summary')
300
+ self.text = kwargs.get("text")
301
+ self.highlights = kwargs.get("highlights")
302
+ self.highlight_scores = kwargs.get("highlight_scores")
303
+ self.summary = kwargs.get("summary")
309
304
 
310
305
  def __str__(self):
311
306
  base_str = super().__str__()
@@ -330,7 +325,7 @@ class ResultWithText(_Result):
330
325
 
331
326
  def __init__(self, **kwargs):
332
327
  super().__init__(**kwargs)
333
- self.text = kwargs['text']
328
+ self.text = kwargs["text"]
334
329
 
335
330
  def __str__(self):
336
331
  base_str = super().__str__()
@@ -343,8 +338,8 @@ class ResultWithHighlights(_Result):
343
338
  A class representing a search result with highlights present.
344
339
 
345
340
  Attributes:
346
- highlights (List[str]): The highlights of the search result.
347
- highlight_scores (List[float]): The scores of the highlights of the search result.
341
+ highlights (List[str])
342
+ highlight_scores (List[float])
348
343
  """
349
344
 
350
345
  highlights: List[str] = dataclasses.field(default_factory=list)
@@ -352,9 +347,8 @@ class ResultWithHighlights(_Result):
352
347
 
353
348
  def __init__(self, **kwargs):
354
349
  super().__init__(**kwargs)
355
- self.highlights = kwargs['highlights']
356
- self.highlight_scores = kwargs['highlight_scores']
357
-
350
+ self.highlights = kwargs["highlights"]
351
+ self.highlight_scores = kwargs["highlight_scores"]
358
352
 
359
353
  def __str__(self):
360
354
  base_str = super().__str__()
@@ -370,9 +364,9 @@ class ResultWithTextAndHighlights(_Result):
370
364
  A class representing a search result with text and highlights present.
371
365
 
372
366
  Attributes:
373
- text (str): The text of the search result page.
374
- highlights (List[str): The highlights of the search result.
375
- highlight_scores (List[float]): The scores of the highlights of the search result.
367
+ text (str)
368
+ highlights (List[str])
369
+ highlight_scores (List[float])
376
370
  """
377
371
 
378
372
  text: str = dataclasses.field(default_factory=str)
@@ -380,10 +374,10 @@ class ResultWithTextAndHighlights(_Result):
380
374
  highlight_scores: List[float] = dataclasses.field(default_factory=list)
381
375
 
382
376
  def __init__(self, **kwargs):
383
- super.__init__(**kwargs)
384
- self.text = kwargs['text']
385
- self.highlights = kwargs['highlights']
386
- self.highlight_scores = kwargs['highlight_scores']
377
+ super().__init__(**kwargs)
378
+ self.text = kwargs["text"]
379
+ self.highlights = kwargs["highlights"]
380
+ self.highlight_scores = kwargs["highlight_scores"]
387
381
 
388
382
  def __str__(self):
389
383
  base_str = super().__str__()
@@ -393,33 +387,35 @@ class ResultWithTextAndHighlights(_Result):
393
387
  f"Highlight Scores: {self.highlight_scores}\n"
394
388
  )
395
389
 
390
+
396
391
  @dataclass
397
392
  class ResultWithSummary(_Result):
398
393
  """
399
394
  A class representing a search result with summary present.
400
395
 
401
396
  Attributes:
402
- summary (str): The summary of the search result.
397
+ summary (str)
403
398
  """
404
399
 
405
400
  summary: str = dataclasses.field(default_factory=str)
406
401
 
407
402
  def __init__(self, **kwargs):
408
403
  super().__init__(**kwargs)
409
- self.summary = kwargs['summary']
404
+ self.summary = kwargs["summary"]
410
405
 
411
406
  def __str__(self):
412
407
  base_str = super().__str__()
413
408
  return base_str + f"Summary: {self.summary}\n"
414
409
 
410
+
415
411
  @dataclass
416
412
  class ResultWithTextAndSummary(_Result):
417
413
  """
418
414
  A class representing a search result with text and summary present.
419
415
 
420
416
  Attributes:
421
- text (str): The text of the search result page.
422
- summary (str): The summary of the search result.
417
+ text (str)
418
+ summary (str)
423
419
  """
424
420
 
425
421
  text: str = dataclasses.field(default_factory=str)
@@ -427,22 +423,23 @@ class ResultWithTextAndSummary(_Result):
427
423
 
428
424
  def __init__(self, **kwargs):
429
425
  super().__init__(**kwargs)
430
- self.text = kwargs['text']
431
- self.summary = kwargs['summary']
426
+ self.text = kwargs["text"]
427
+ self.summary = kwargs["summary"]
432
428
 
433
429
  def __str__(self):
434
430
  base_str = super().__str__()
435
431
  return base_str + f"Text: {self.text}\n" + f"Summary: {self.summary}\n"
436
432
 
433
+
437
434
  @dataclass
438
435
  class ResultWithHighlightsAndSummary(_Result):
439
436
  """
440
437
  A class representing a search result with highlights and summary present.
441
438
 
442
439
  Attributes:
443
- highlights (List[str]): The highlights of the search result.
444
- highlight_scores (List[float]): The scores of the highlights of the search result.
445
- summary (str): The summary of the search result.
440
+ highlights (List[str])
441
+ highlight_scores (List[float])
442
+ summary (str)
446
443
  """
447
444
 
448
445
  highlights: List[str] = dataclasses.field(default_factory=list)
@@ -451,9 +448,9 @@ class ResultWithHighlightsAndSummary(_Result):
451
448
 
452
449
  def __init__(self, **kwargs):
453
450
  super().__init__(**kwargs)
454
- self.highlights = kwargs['highlights']
455
- self.highlight_scores = kwargs['highlight_scores']
456
- self.summary = kwargs['summary']
451
+ self.highlights = kwargs["highlights"]
452
+ self.highlight_scores = kwargs["highlight_scores"]
453
+ self.summary = kwargs["summary"]
457
454
 
458
455
  def __str__(self):
459
456
  base_str = super().__str__()
@@ -463,16 +460,17 @@ class ResultWithHighlightsAndSummary(_Result):
463
460
  f"Summary: {self.summary}\n"
464
461
  )
465
462
 
463
+
466
464
  @dataclass
467
465
  class ResultWithTextAndHighlightsAndSummary(_Result):
468
466
  """
469
467
  A class representing a search result with text, highlights, and summary present.
470
468
 
471
469
  Attributes:
472
- text (str): The text of the search result page.
473
- highlights (List[str]): The highlights of the search result.
474
- highlight_scores (List[float]): The scores of the highlights of the search result.
475
- summary (str): The summary of the search result.
470
+ text (str)
471
+ highlights (List[str])
472
+ highlight_scores (List[float])
473
+ summary (str)
476
474
  """
477
475
 
478
476
  text: str = dataclasses.field(default_factory=str)
@@ -482,10 +480,10 @@ class ResultWithTextAndHighlightsAndSummary(_Result):
482
480
 
483
481
  def __init__(self, **kwargs):
484
482
  super().__init__(**kwargs)
485
- self.text = kwargs['text']
486
- self.highlights = kwargs['highlights']
487
- self.highlight_scores = kwargs['highlight_scores']
488
- self.summary = kwargs['summary']
483
+ self.text = kwargs["text"]
484
+ self.highlights = kwargs["highlights"]
485
+ self.highlight_scores = kwargs["highlight_scores"]
486
+ self.summary = kwargs["summary"]
489
487
 
490
488
  def __str__(self):
491
489
  base_str = super().__str__()
@@ -496,6 +494,7 @@ class ResultWithTextAndHighlightsAndSummary(_Result):
496
494
  f"Summary: {self.summary}\n"
497
495
  )
498
496
 
497
+
499
498
  T = TypeVar("T")
500
499
 
501
500
 
@@ -505,9 +504,9 @@ class SearchResponse(Generic[T]):
505
504
 
506
505
  Attributes:
507
506
  results (List[Result]): A list of search results.
508
- autoprompt_string (str, optional): The Exa query created by the autoprompt functionality.
509
- auto_date (str, optional): The date the autoprompt determines for filtering results to the ones you want.
510
- resolved_search_type (str, optional): What "auto" search resolved to. "neural" or "keyword".
507
+ autoprompt_string (str, optional): The Exa query created by autoprompt.
508
+ resolved_search_type (str, optional): 'neural' or 'keyword' if auto.
509
+ auto_date (str, optional): A date for filtering if autoprompt found one.
511
510
  """
512
511
 
513
512
  results: List[T]
@@ -549,7 +548,7 @@ class Exa:
549
548
  self,
550
549
  api_key: Optional[str],
551
550
  base_url: str = "https://api.exa.ai",
552
- user_agent: str = "exa-py 1.7.1",
551
+ user_agent: str = "exa-py 1.7.3",
553
552
  ):
554
553
  """Initialize the Exa client with the provided API key and optional base URL and user agent.
555
554
 
@@ -563,7 +562,7 @@ class Exa:
563
562
  api_key = os.environ.get("EXA_API_KEY")
564
563
  if api_key is None:
565
564
  raise ValueError(
566
- "API key must be provided as argument or in EXA_API_KEY environment variable"
565
+ "API key must be provided as an argument or in EXA_API_KEY environment variable"
567
566
  )
568
567
  self.base_url = base_url
569
568
  self.headers = {"x-api-key": api_key, "User-Agent": user_agent}
@@ -592,25 +591,28 @@ class Exa:
592
591
  use_autoprompt: Optional[bool] = None,
593
592
  type: Optional[str] = None,
594
593
  category: Optional[str] = None,
594
+ flags: Optional[List[str]] = None,
595
595
  ) -> SearchResponse[_Result]:
596
- """Perform a search with a Exa prompt-engineered query and retrieve a list of relevant results.
596
+ """Perform a search with a prompt-engineered query to retrieve relevant results.
597
597
 
598
598
  Args:
599
599
  query (str): The query string.
600
- num_results (int, optional): Number of search results to return. Defaults to 10.
601
- include_domains (List[str], optional): List of domains to include in the search.
602
- exclude_domains (List[str], optional): List of domains to exclude in the search.
603
- start_crawl_date (str, optional): Results will only include links crawled after this date.
604
- end_crawl_date (str, optional): Results will only include links crawled before this date.
605
- start_published_date (str, optional): Results will only include links with a published date after this date.
606
- end_published_date (str, optional): Results will only include links with a published date before this date.
607
- include_text (List[str], optional): List of strings that must be present in the webpage text of results. Currently, only one string is supported, up to 5 words.
608
- exclude_text (List[str], optional): List of strings that must not be present in the webpage text of results. Currently, only one string is supported, up to 5 words.
609
- use_autoprompt (bool, optional): If true, convert query to a Exa query. Defaults to False.
610
- type (str, optional): The type of search, 'keyword' or 'neural'. Defaults to "neural".
611
- category (str, optional): A data category to focus on, with higher comprehensivity and data cleanliness. Currently, the only category is company.
600
+ num_results (int, optional): Number of search results to return (default 10).
601
+ include_domains (List[str], optional): Domains to include in the search.
602
+ exclude_domains (List[str], optional): Domains to exclude from the search.
603
+ start_crawl_date (str, optional): Only links crawled after this date.
604
+ end_crawl_date (str, optional): Only links crawled before this date.
605
+ start_published_date (str, optional): Only links published after this date.
606
+ end_published_date (str, optional): Only links published before this date.
607
+ include_text (List[str], optional): Strings that must appear in the page text.
608
+ exclude_text (List[str], optional): Strings that must not appear in the page text.
609
+ use_autoprompt (bool, optional): Convert query to Exa (default False).
610
+ type (str, optional): 'keyword' or 'neural' (default 'neural').
611
+ category (str, optional): e.g. 'company'
612
+ flags (List[str], optional): Experimental flags for Exa usage.
613
+
612
614
  Returns:
613
- SearchResponse: The response containing search results and optional autoprompt string.
615
+ SearchResponse: The response containing search results, etc.
614
616
  """
615
617
  options = {k: v for k, v in locals().items() if k != "self" and v is not None}
616
618
  validate_search_options(options, SEARCH_OPTIONS_TYPES)
@@ -645,6 +647,7 @@ class Exa:
645
647
  filter_empty_results: Optional[bool] = None,
646
648
  subpages: Optional[int] = None,
647
649
  extras: Optional[ExtrasOptions] = None,
650
+ flags: Optional[List[str]] = None,
648
651
  ) -> SearchResponse[ResultWithText]:
649
652
  ...
650
653
 
@@ -671,6 +674,7 @@ class Exa:
671
674
  livecrawl: Optional[LIVECRAWL_OPTIONS] = None,
672
675
  filter_empty_results: Optional[bool] = None,
673
676
  extras: Optional[ExtrasOptions] = None,
677
+ flags: Optional[List[str]] = None,
674
678
  ) -> SearchResponse[ResultWithText]:
675
679
  ...
676
680
 
@@ -692,12 +696,13 @@ class Exa:
692
696
  use_autoprompt: Optional[bool] = None,
693
697
  type: Optional[str] = None,
694
698
  category: Optional[str] = None,
695
- livecrawl_timeout: Optional[int] = None,
696
699
  subpages: Optional[int] = None,
697
700
  subpage_target: Optional[Union[str, List[str]]] = None,
701
+ livecrawl_timeout: Optional[int] = None,
698
702
  livecrawl: Optional[LIVECRAWL_OPTIONS] = None,
699
703
  filter_empty_results: Optional[bool] = None,
700
704
  extras: Optional[ExtrasOptions] = None,
705
+ flags: Optional[List[str]] = None,
701
706
  ) -> SearchResponse[ResultWithHighlights]:
702
707
  ...
703
708
 
@@ -721,11 +726,12 @@ class Exa:
721
726
  type: Optional[str] = None,
722
727
  category: Optional[str] = None,
723
728
  livecrawl_timeout: Optional[int] = None,
729
+ livecrawl: Optional[LIVECRAWL_OPTIONS] = None,
724
730
  subpages: Optional[int] = None,
725
731
  subpage_target: Optional[Union[str, List[str]]] = None,
726
- livecrawl: Optional[LIVECRAWL_OPTIONS] = None,
727
732
  filter_empty_results: Optional[bool] = None,
728
733
  extras: Optional[ExtrasOptions] = None,
734
+ flags: Optional[List[str]] = None,
729
735
  ) -> SearchResponse[ResultWithTextAndHighlights]:
730
736
  ...
731
737
 
@@ -747,12 +753,13 @@ class Exa:
747
753
  use_autoprompt: Optional[bool] = None,
748
754
  type: Optional[str] = None,
749
755
  category: Optional[str] = None,
750
- livecrawl_timeout: Optional[int] = None,
751
756
  subpages: Optional[int] = None,
752
757
  subpage_target: Optional[Union[str, List[str]]] = None,
758
+ livecrawl_timeout: Optional[int] = None,
753
759
  livecrawl: Optional[LIVECRAWL_OPTIONS] = None,
754
760
  filter_empty_results: Optional[bool] = None,
755
761
  extras: Optional[ExtrasOptions] = None,
762
+ flags: Optional[List[str]] = None,
756
763
  ) -> SearchResponse[ResultWithSummary]:
757
764
  ...
758
765
 
@@ -781,6 +788,7 @@ class Exa:
781
788
  livecrawl: Optional[LIVECRAWL_OPTIONS] = None,
782
789
  filter_empty_results: Optional[bool] = None,
783
790
  extras: Optional[ExtrasOptions] = None,
791
+ flags: Optional[List[str]] = None,
784
792
  ) -> SearchResponse[ResultWithTextAndSummary]:
785
793
  ...
786
794
 
@@ -803,12 +811,13 @@ class Exa:
803
811
  use_autoprompt: Optional[bool] = None,
804
812
  type: Optional[str] = None,
805
813
  category: Optional[str] = None,
806
- livecrawl_timeout: Optional[int] = None,
807
814
  subpages: Optional[int] = None,
808
815
  subpage_target: Optional[Union[str, List[str]]] = None,
816
+ livecrawl_timeout: Optional[int] = None,
809
817
  livecrawl: Optional[LIVECRAWL_OPTIONS] = None,
810
818
  filter_empty_results: Optional[bool] = None,
811
819
  extras: Optional[ExtrasOptions] = None,
820
+ flags: Optional[List[str]] = None,
812
821
  ) -> SearchResponse[ResultWithHighlightsAndSummary]:
813
822
  ...
814
823
 
@@ -838,21 +847,41 @@ class Exa:
838
847
  subpage_target: Optional[Union[str, List[str]]] = None,
839
848
  filter_empty_results: Optional[bool] = None,
840
849
  extras: Optional[ExtrasOptions] = None,
850
+ flags: Optional[List[str]] = None,
841
851
  ) -> SearchResponse[ResultWithTextAndHighlightsAndSummary]:
842
852
  ...
843
853
 
844
854
  def search_and_contents(self, query: str, **kwargs):
845
- options = {
846
- k: v
847
- for k, v in {"query": query, **kwargs}.items()
848
- if k != "self" and v is not None
849
- }
850
- if "text" not in options and "highlights" not in options and "summary" not in options and "extras" not in options:
855
+ options = {k: v for k, v in {"query": query, **kwargs}.items() if v is not None}
856
+ # If user didn't ask for any particular content, default to text
857
+ if (
858
+ "text" not in options
859
+ and "highlights" not in options
860
+ and "summary" not in options
861
+ and "extras" not in options
862
+ ):
851
863
  options["text"] = True
864
+
852
865
  validate_search_options(
853
- options, {**SEARCH_OPTIONS_TYPES, **CONTENTS_OPTIONS_TYPES, **CONTENTS_ENDPOINT_OPTIONS_TYPES}
866
+ options,
867
+ {**SEARCH_OPTIONS_TYPES, **CONTENTS_OPTIONS_TYPES, **CONTENTS_ENDPOINT_OPTIONS_TYPES},
868
+ )
869
+
870
+ # Nest the appropriate fields under "contents"
871
+ options = nest_fields(
872
+ options,
873
+ [
874
+ "text",
875
+ "highlights",
876
+ "summary",
877
+ "subpages",
878
+ "subpage_target",
879
+ "livecrawl",
880
+ "livecrawl_timeout",
881
+ "extras",
882
+ ],
883
+ "contents",
854
884
  )
855
- options = nest_fields(options, ["text", "highlights", "summary", "subpages", "subpage_target", "livecrawl", "livecrawl_timeout", "extras"], "contents")
856
885
  options = to_camel_case(options)
857
886
  data = self.request("/search", options)
858
887
  return SearchResponse(
@@ -865,20 +894,21 @@ class Exa:
865
894
  @overload
866
895
  def get_contents(
867
896
  self,
868
- ids: Union[str, List[str], List[_Result]],
897
+ urls: Union[str, List[str], List[_Result]],
869
898
  livecrawl_timeout: Optional[int] = None,
870
899
  livecrawl: Optional[LIVECRAWL_OPTIONS] = None,
871
900
  filter_empty_results: Optional[bool] = None,
872
901
  subpages: Optional[int] = None,
873
902
  subpage_target: Optional[Union[str, List[str]]] = None,
874
903
  extras: Optional[ExtrasOptions] = None,
904
+ flags: Optional[List[str]] = None,
875
905
  ) -> SearchResponse[ResultWithText]:
876
906
  ...
877
907
 
878
908
  @overload
879
909
  def get_contents(
880
910
  self,
881
- ids: Union[str, List[str], List[_Result]],
911
+ urls: Union[str, List[str], List[_Result]],
882
912
  *,
883
913
  text: Union[TextContentsOptions, Literal[True]],
884
914
  livecrawl_timeout: Optional[int] = None,
@@ -887,13 +917,14 @@ class Exa:
887
917
  subpages: Optional[int] = None,
888
918
  subpage_target: Optional[Union[str, List[str]]] = None,
889
919
  extras: Optional[ExtrasOptions] = None,
920
+ flags: Optional[List[str]] = None,
890
921
  ) -> SearchResponse[ResultWithText]:
891
922
  ...
892
923
 
893
924
  @overload
894
925
  def get_contents(
895
926
  self,
896
- ids: Union[str, List[str], List[_Result]],
927
+ urls: Union[str, List[str], List[_Result]],
897
928
  *,
898
929
  highlights: Union[HighlightsContentsOptions, Literal[True]],
899
930
  livecrawl_timeout: Optional[int] = None,
@@ -902,13 +933,14 @@ class Exa:
902
933
  subpages: Optional[int] = None,
903
934
  subpage_target: Optional[Union[str, List[str]]] = None,
904
935
  extras: Optional[ExtrasOptions] = None,
936
+ flags: Optional[List[str]] = None,
905
937
  ) -> SearchResponse[ResultWithHighlights]:
906
938
  ...
907
939
 
908
940
  @overload
909
941
  def get_contents(
910
942
  self,
911
- ids: Union[str, List[str], List[_Result]],
943
+ urls: Union[str, List[str], List[_Result]],
912
944
  *,
913
945
  text: Union[TextContentsOptions, Literal[True]],
914
946
  highlights: Union[HighlightsContentsOptions, Literal[True]],
@@ -918,13 +950,14 @@ class Exa:
918
950
  subpages: Optional[int] = None,
919
951
  subpage_target: Optional[Union[str, List[str]]] = None,
920
952
  extras: Optional[ExtrasOptions] = None,
953
+ flags: Optional[List[str]] = None,
921
954
  ) -> SearchResponse[ResultWithTextAndHighlights]:
922
955
  ...
923
956
 
924
957
  @overload
925
958
  def get_contents(
926
959
  self,
927
- ids: Union[str, List[str], List[_Result]],
960
+ urls: Union[str, List[str], List[_Result]],
928
961
  *,
929
962
  summary: Union[SummaryContentsOptions, Literal[True]],
930
963
  livecrawl_timeout: Optional[int] = None,
@@ -933,13 +966,14 @@ class Exa:
933
966
  subpages: Optional[int] = None,
934
967
  subpage_target: Optional[Union[str, List[str]]] = None,
935
968
  extras: Optional[ExtrasOptions] = None,
969
+ flags: Optional[List[str]] = None,
936
970
  ) -> SearchResponse[ResultWithSummary]:
937
971
  ...
938
972
 
939
973
  @overload
940
974
  def get_contents(
941
975
  self,
942
- ids: Union[str, List[str], List[_Result]],
976
+ urls: Union[str, List[str], List[_Result]],
943
977
  *,
944
978
  text: Union[TextContentsOptions, Literal[True]],
945
979
  summary: Union[SummaryContentsOptions, Literal[True]],
@@ -949,13 +983,14 @@ class Exa:
949
983
  subpages: Optional[int] = None,
950
984
  subpage_target: Optional[Union[str, List[str]]] = None,
951
985
  extras: Optional[ExtrasOptions] = None,
986
+ flags: Optional[List[str]] = None,
952
987
  ) -> SearchResponse[ResultWithTextAndSummary]:
953
988
  ...
954
989
 
955
990
  @overload
956
991
  def get_contents(
957
992
  self,
958
- ids: Union[str, List[str], List[_Result]],
993
+ urls: Union[str, List[str], List[_Result]],
959
994
  *,
960
995
  highlights: Union[HighlightsContentsOptions, Literal[True]],
961
996
  summary: Union[SummaryContentsOptions, Literal[True]],
@@ -965,13 +1000,14 @@ class Exa:
965
1000
  subpages: Optional[int] = None,
966
1001
  subpage_target: Optional[Union[str, List[str]]] = None,
967
1002
  extras: Optional[ExtrasOptions] = None,
1003
+ flags: Optional[List[str]] = None,
968
1004
  ) -> SearchResponse[ResultWithHighlightsAndSummary]:
969
1005
  ...
970
1006
 
971
1007
  @overload
972
1008
  def get_contents(
973
1009
  self,
974
- ids: Union[str, List[str], List[_Result]],
1010
+ urls: Union[str, List[str], List[_Result]],
975
1011
  *,
976
1012
  text: Union[TextContentsOptions, Literal[True]],
977
1013
  highlights: Union[HighlightsContentsOptions, Literal[True]],
@@ -982,18 +1018,22 @@ class Exa:
982
1018
  subpages: Optional[int] = None,
983
1019
  subpage_target: Optional[Union[str, List[str]]] = None,
984
1020
  extras: Optional[ExtrasOptions] = None,
1021
+ flags: Optional[List[str]] = None,
985
1022
  ) -> SearchResponse[ResultWithTextAndHighlightsAndSummary]:
986
1023
  ...
987
-
988
- def get_contents(self, ids: Union[str, List[str], List[_Result]], **kwargs):
1024
+ def get_contents(self, urls: Union[str, List[str], List[_Result]], **kwargs):
989
1025
  options = {
990
1026
  k: v
991
- for k, v in {"ids": ids, **kwargs}.items()
1027
+ for k, v in {"urls": urls, **kwargs}.items()
992
1028
  if k != "self" and v is not None
993
1029
  }
994
1030
  if "text" not in options and "highlights" not in options and "summary" not in options and "extras" not in options:
995
1031
  options["text"] = True
996
- validate_search_options(options, {**CONTENTS_OPTIONS_TYPES, **CONTENTS_ENDPOINT_OPTIONS_TYPES})
1032
+
1033
+ validate_search_options(
1034
+ options,
1035
+ {**CONTENTS_OPTIONS_TYPES, **CONTENTS_ENDPOINT_OPTIONS_TYPES},
1036
+ )
997
1037
  options = to_camel_case(options)
998
1038
  data = self.request("/contents", options)
999
1039
  return SearchResponse(
@@ -1018,6 +1058,7 @@ class Exa:
1018
1058
  exclude_text: Optional[List[str]] = None,
1019
1059
  exclude_source_domain: Optional[bool] = None,
1020
1060
  category: Optional[str] = None,
1061
+ flags: Optional[List[str]] = None,
1021
1062
  ) -> SearchResponse[_Result]:
1022
1063
  options = {k: v for k, v in locals().items() if k != "self" and v is not None}
1023
1064
  validate_search_options(options, FIND_SIMILAR_OPTIONS_TYPES)
@@ -1047,11 +1088,12 @@ class Exa:
1047
1088
  exclude_source_domain: Optional[bool] = None,
1048
1089
  category: Optional[str] = None,
1049
1090
  livecrawl_timeout: Optional[int] = None,
1091
+ livecrawl: Optional[LIVECRAWL_OPTIONS] = None,
1050
1092
  subpages: Optional[int] = None,
1051
1093
  subpage_target: Optional[Union[str, List[str]]] = None,
1052
- livecrawl: Optional[LIVECRAWL_OPTIONS] = None,
1053
1094
  filter_empty_results: Optional[bool] = None,
1054
1095
  extras: Optional[ExtrasOptions] = None,
1096
+ flags: Optional[List[str]] = None,
1055
1097
  ) -> SearchResponse[ResultWithText]:
1056
1098
  ...
1057
1099
 
@@ -1073,11 +1115,12 @@ class Exa:
1073
1115
  exclude_source_domain: Optional[bool] = None,
1074
1116
  category: Optional[str] = None,
1075
1117
  livecrawl_timeout: Optional[int] = None,
1118
+ livecrawl: Optional[LIVECRAWL_OPTIONS] = None,
1076
1119
  subpages: Optional[int] = None,
1077
1120
  subpage_target: Optional[Union[str, List[str]]] = None,
1078
- livecrawl: Optional[LIVECRAWL_OPTIONS] = None,
1079
1121
  filter_empty_results: Optional[bool] = None,
1080
1122
  extras: Optional[ExtrasOptions] = None,
1123
+ flags: Optional[List[str]] = None,
1081
1124
  ) -> SearchResponse[ResultWithText]:
1082
1125
  ...
1083
1126
 
@@ -1104,6 +1147,7 @@ class Exa:
1104
1147
  livecrawl: Optional[LIVECRAWL_OPTIONS] = None,
1105
1148
  filter_empty_results: Optional[bool] = None,
1106
1149
  extras: Optional[ExtrasOptions] = None,
1150
+ flags: Optional[List[str]] = None,
1107
1151
  ) -> SearchResponse[ResultWithHighlights]:
1108
1152
  ...
1109
1153
 
@@ -1126,11 +1170,12 @@ class Exa:
1126
1170
  exclude_source_domain: Optional[bool] = None,
1127
1171
  category: Optional[str] = None,
1128
1172
  livecrawl_timeout: Optional[int] = None,
1173
+ livecrawl: Optional[LIVECRAWL_OPTIONS] = None,
1129
1174
  subpages: Optional[int] = None,
1130
1175
  subpage_target: Optional[Union[str, List[str]]] = None,
1131
- livecrawl: Optional[LIVECRAWL_OPTIONS] = None,
1132
1176
  filter_empty_results: Optional[bool] = None,
1133
1177
  extras: Optional[ExtrasOptions] = None,
1178
+ flags: Optional[List[str]] = None,
1134
1179
  ) -> SearchResponse[ResultWithTextAndHighlights]:
1135
1180
  ...
1136
1181
 
@@ -1157,6 +1202,7 @@ class Exa:
1157
1202
  subpage_target: Optional[Union[str, List[str]]] = None,
1158
1203
  filter_empty_results: Optional[bool] = None,
1159
1204
  extras: Optional[ExtrasOptions] = None,
1205
+ flags: Optional[List[str]] = None,
1160
1206
  ) -> SearchResponse[ResultWithSummary]:
1161
1207
  ...
1162
1208
 
@@ -1184,6 +1230,7 @@ class Exa:
1184
1230
  subpage_target: Optional[Union[str, List[str]]] = None,
1185
1231
  filter_empty_results: Optional[bool] = None,
1186
1232
  extras: Optional[ExtrasOptions] = None,
1233
+ flags: Optional[List[str]] = None,
1187
1234
  ) -> SearchResponse[ResultWithTextAndSummary]:
1188
1235
  ...
1189
1236
 
@@ -1206,11 +1253,12 @@ class Exa:
1206
1253
  exclude_source_domain: Optional[bool] = None,
1207
1254
  category: Optional[str] = None,
1208
1255
  livecrawl_timeout: Optional[int] = None,
1209
- livecrawl: Optional[LIVECRAWL_OPTIONS] = None,
1210
1256
  subpages: Optional[int] = None,
1211
1257
  subpage_target: Optional[Union[str, List[str]]] = None,
1258
+ livecrawl: Optional[LIVECRAWL_OPTIONS] = None,
1212
1259
  filter_empty_results: Optional[bool] = None,
1213
1260
  extras: Optional[ExtrasOptions] = None,
1261
+ flags: Optional[List[str]] = None,
1214
1262
  ) -> SearchResponse[ResultWithHighlightsAndSummary]:
1215
1263
  ...
1216
1264
 
@@ -1234,27 +1282,45 @@ class Exa:
1234
1282
  exclude_source_domain: Optional[bool] = None,
1235
1283
  category: Optional[str] = None,
1236
1284
  livecrawl_timeout: Optional[int] = None,
1237
- livecrawl: Optional[LIVECRAWL_OPTIONS] = None,
1238
1285
  subpages: Optional[int] = None,
1239
1286
  subpage_target: Optional[Union[str, List[str]]] = None,
1240
1287
  filter_empty_results: Optional[bool] = None,
1288
+ livecrawl: Optional[LIVECRAWL_OPTIONS] = None,
1241
1289
  extras: Optional[ExtrasOptions] = None,
1290
+ flags: Optional[List[str]] = None,
1242
1291
  ) -> SearchResponse[ResultWithTextAndHighlightsAndSummary]:
1243
1292
  ...
1244
1293
 
1245
1294
  def find_similar_and_contents(self, url: str, **kwargs):
1246
- options = {
1247
- k: v
1248
- for k, v in {"url": url, **kwargs}.items()
1249
- if k != "self" and v is not None
1250
- }
1295
+ options = {k: v for k, v in {"url": url, **kwargs}.items() if v is not None}
1296
+ # Default to text if none specified
1251
1297
  if "text" not in options and "highlights" not in options and "summary" not in options:
1252
1298
  options["text"] = True
1299
+
1253
1300
  validate_search_options(
1254
- options, {**FIND_SIMILAR_OPTIONS_TYPES, **CONTENTS_OPTIONS_TYPES, **CONTENTS_ENDPOINT_OPTIONS_TYPES}
1301
+ options,
1302
+ {
1303
+ **FIND_SIMILAR_OPTIONS_TYPES,
1304
+ **CONTENTS_OPTIONS_TYPES,
1305
+ **CONTENTS_ENDPOINT_OPTIONS_TYPES,
1306
+ },
1307
+ )
1308
+ # We nest the content fields
1309
+ options = nest_fields(
1310
+ options,
1311
+ [
1312
+ "text",
1313
+ "highlights",
1314
+ "summary",
1315
+ "subpages",
1316
+ "subpage_target",
1317
+ "livecrawl",
1318
+ "livecrawl_timeout",
1319
+ "extras",
1320
+ ],
1321
+ "contents",
1255
1322
  )
1256
1323
  options = to_camel_case(options)
1257
- options = nest_fields(options, ["text", "highlights", "summary", "subpages", "subpage_target", "livecrawl", "livecrawl_timeout", "extras"], "contents")
1258
1324
  data = self.request("/findSimilar", options)
1259
1325
  return SearchResponse(
1260
1326
  [Result(**to_snake_case(result)) for result in data["results"]],
@@ -1266,9 +1332,9 @@ class Exa:
1266
1332
  def wrap(self, client: OpenAI):
1267
1333
  """Wrap an OpenAI client with Exa functionality.
1268
1334
 
1269
- After wrapping, any call to `client.chat.completions.create` will be intercepted and enhanced with Exa functionality.
1270
-
1271
- To disable Exa functionality for a specific call, set `use_exa="none"` in the call to `client.chat.completions.create`.
1335
+ After wrapping, any call to `client.chat.completions.create` will be intercepted
1336
+ and enhanced with Exa RAG functionality. To disable Exa for a specific call,
1337
+ set `use_exa="none"` in the `create` method.
1272
1338
 
1273
1339
  Args:
1274
1340
  client (OpenAI): The OpenAI client to wrap.
@@ -1300,6 +1366,7 @@ class Exa:
1300
1366
  type: Optional[str] = None,
1301
1367
  category: Optional[str] = None,
1302
1368
  result_max_len: int = 2048,
1369
+ flags: Optional[List[str]] = None,
1303
1370
  # OpenAI args
1304
1371
  **openai_kwargs,
1305
1372
  ):
@@ -1317,6 +1384,7 @@ class Exa:
1317
1384
  "use_autoprompt": use_autoprompt,
1318
1385
  "type": type,
1319
1386
  "category": category,
1387
+ "flags": flags,
1320
1388
  }
1321
1389
 
1322
1390
  create_kwargs = {
@@ -1333,7 +1401,7 @@ class Exa:
1333
1401
  )
1334
1402
 
1335
1403
  print("Wrapping OpenAI client with Exa functionality.", type(create_with_rag))
1336
- client.chat.completions.create = create_with_rag # type: ignore
1404
+ client.chat.completions.create = create_with_rag # type: ignore
1337
1405
 
1338
1406
  return client
1339
1407
 
@@ -1372,7 +1440,9 @@ class Exa:
1372
1440
  query = maybe_get_query(completion)
1373
1441
 
1374
1442
  if not query:
1375
- return ExaOpenAICompletion.from_completion(completion=completion, exa_result=None)
1443
+ return ExaOpenAICompletion.from_completion(
1444
+ completion=completion, exa_result=None
1445
+ )
1376
1446
 
1377
1447
  exa_result = self.search_and_contents(query, **exa_kwargs)
1378
1448
  exa_str = format_exa_result(exa_result, max_len=max_len)
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "exa-py"
3
- version = "1.0.18"
3
+ version = "1.7.3"
4
4
  description = "Python SDK for Exa API."
5
5
  authors = ["Exa AI <hello@exa.ai>"]
6
6
  readme = "README.md"
@@ -1,94 +0,0 @@
1
- Metadata-Version: 2.1
2
- Name: exa_py
3
- Version: 1.7.1
4
- Summary: Python SDK for Exa API.
5
- Home-page: https://github.com/exa-labs/exa-py
6
- Author: Exa
7
- Author-email: hello@exa.ai
8
- Classifier: Development Status :: 5 - Production/Stable
9
- Classifier: Intended Audience :: Developers
10
- Classifier: License :: OSI Approved :: MIT License
11
- Classifier: Typing :: Typed
12
- Classifier: Programming Language :: Python :: 3.8
13
- Classifier: Programming Language :: Python :: 3.9
14
- Classifier: Programming Language :: Python :: 3.10
15
- Classifier: Programming Language :: Python :: 3.11
16
- Classifier: Programming Language :: Python :: 3.12
17
- Description-Content-Type: text/markdown
18
- Requires-Dist: requests
19
- Requires-Dist: typing-extensions
20
- Requires-Dist: openai>=1.10.0
21
-
22
- # Exa
23
-
24
- Exa (formerly Metaphor) API in Python
25
-
26
- Note: This API is basically the same as `metaphor-python` but reflects new
27
- features associated with Metaphor's rename to Exa. New site is https://exa.ai
28
-
29
- ## Installation
30
-
31
- ```bash
32
- pip install exa_py
33
- ```
34
-
35
- ## Usage
36
-
37
- Import the package and initialize the Exa client with your API key:
38
-
39
- ```python
40
- from exa_py import Exa
41
-
42
- exa = Exa(api_key="your-api-key")
43
- ```
44
-
45
- ## Common requests
46
- ```python
47
-
48
- # basic search
49
- results = exa.search("This is a Exa query:")
50
-
51
- # autoprompted search
52
- results = exa.search("autopromptable query", use_autoprompt=True)
53
-
54
- # keyword search (non-neural)
55
- results = exa.search("Google-style query", type="keyword")
56
-
57
- # search with date filters
58
- results = exa.search("This is a Exa query:", start_published_date="2019-01-01", end_published_date="2019-01-31")
59
-
60
- # search with domain filters
61
- results = exa.search("This is a Exa query:", include_domains=["www.cnn.com", "www.nytimes.com"])
62
-
63
- # search and get text contents
64
- results = exa.search_and_contents("This is a Exa query:")
65
-
66
- # search and get highlights
67
- results = exa.search_and_contents("This is a Exa query:", highlights=True)
68
-
69
- # search and get contents with contents options
70
- results = exa.search_and_contents("This is a Exa query:",
71
- text={"include_html_tags": True, "max_characters": 1000},
72
- highlights={"highlights_per_url": 2, "num_sentences": 1, "query": "This is the highlight query:"})
73
-
74
- # find similar documents
75
- results = exa.find_similar("https://example.com")
76
-
77
- # find similar excluding source domain
78
- results = exa.find_similar("https://example.com", exclude_source_domain=True)
79
-
80
- # find similar with contents
81
- results = exa.find_similar_and_contents("https://example.com", text=True, highlights=True)
82
-
83
- # get text contents
84
- results = exa.get_contents(["ids"])
85
-
86
- # get highlights
87
- results = exa.get_contents(["ids"], highlights=True)
88
-
89
- # get contents with contents options
90
- results = exa.get_contents(["ids"],
91
- text={"include_html_tags": True, "max_characters": 1000},
92
- highlights={"highlights_per_url": 2, "num_sentences": 1, "query": "This is the highlight query:"})
93
- ```
94
-
@@ -1,12 +0,0 @@
1
- README.md
2
- pyproject.toml
3
- setup.py
4
- exa_py/__init__.py
5
- exa_py/api.py
6
- exa_py/py.typed
7
- exa_py/utils.py
8
- exa_py.egg-info/PKG-INFO
9
- exa_py.egg-info/SOURCES.txt
10
- exa_py.egg-info/dependency_links.txt
11
- exa_py.egg-info/requires.txt
12
- exa_py.egg-info/top_level.txt
@@ -1,3 +0,0 @@
1
- requests
2
- typing-extensions
3
- openai>=1.10.0
@@ -1 +0,0 @@
1
- exa_py
exa_py-1.7.1/setup.cfg DELETED
@@ -1,4 +0,0 @@
1
- [egg_info]
2
- tag_build =
3
- tag_date = 0
4
-
exa_py-1.7.1/setup.py DELETED
@@ -1,30 +0,0 @@
1
- from setuptools import setup, find_packages
2
-
3
- setup(
4
- name="exa_py",
5
- version="1.7.1",
6
- description="Python SDK for Exa API.",
7
- long_description_content_type="text/markdown",
8
- long_description=open("README.md").read(),
9
- author="Exa",
10
- author_email="hello@exa.ai",
11
- package_data={"exa_py": ["py.typed"]},
12
- url="https://github.com/exa-labs/exa-py",
13
- packages=find_packages(),
14
- install_requires=[
15
- "requests",
16
- "typing-extensions",
17
- "openai>=1.10.0"
18
- ],
19
- classifiers=[
20
- "Development Status :: 5 - Production/Stable",
21
- "Intended Audience :: Developers",
22
- "License :: OSI Approved :: MIT License",
23
- "Typing :: Typed",
24
- "Programming Language :: Python :: 3.8",
25
- "Programming Language :: Python :: 3.9",
26
- "Programming Language :: Python :: 3.10",
27
- "Programming Language :: Python :: 3.11",
28
- "Programming Language :: Python :: 3.12",
29
- ],
30
- )
File without changes
File without changes
File without changes