exa-py 1.0.14__tar.gz → 1.0.16__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of exa-py might be problematic. Click here for more details.

@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: exa_py
3
- Version: 1.0.14
3
+ Version: 1.0.16
4
4
  Summary: Python SDK for Exa API.
5
5
  Home-page: https://github.com/exa-labs/exa-py
6
6
  Author: Exa
@@ -18,7 +18,6 @@ from typing import (
18
18
  )
19
19
  from typing_extensions import TypedDict
20
20
 
21
- import httpx
22
21
  from openai import OpenAI
23
22
  from openai.types.chat.chat_completion_message_param import ChatCompletionMessageParam
24
23
  from openai.types.chat_model import ChatModel
@@ -30,8 +29,6 @@ from exa_py.utils import (
30
29
  )
31
30
 
32
31
 
33
-
34
-
35
32
  def snake_to_camel(snake_str: str) -> str:
36
33
  """Convert snake_case string to camelCase.
37
34
 
@@ -97,6 +94,10 @@ SEARCH_OPTIONS_TYPES = {
97
94
  "end_published_date": [
98
95
  str
99
96
  ], # Results before this publish date; excludes links with no date. ISO 8601 format.
97
+ "include_text": [
98
+ list
99
+ ], # list of strings that must be present in webpage text of results. Currently, only one string is supported, up to 5 words.
100
+ "exclude_text": [list], # list of strings that must not be present in webpage text of result. Currently, only one string is supported, up to 5 words.
100
101
  "use_autoprompt": [bool], # Convert query to Exa (Higher latency, Default: false).
101
102
  "type": [
102
103
  str
@@ -115,6 +116,8 @@ FIND_SIMILAR_OPTIONS_TYPES = {
115
116
  "end_crawl_date": [str],
116
117
  "start_published_date": [str],
117
118
  "end_published_date": [str],
119
+ "include_text": [list],
120
+ "exclude_text": [list],
118
121
  "exclude_source_domain": [bool],
119
122
  "category": [str],
120
123
  }
@@ -123,6 +126,7 @@ CONTENTS_OPTIONS_TYPES = {
123
126
  "ids": [list],
124
127
  "text": [dict, bool],
125
128
  "highlights": [dict, bool],
129
+ "summary": [dict, bool],
126
130
  }
127
131
 
128
132
 
@@ -172,6 +176,14 @@ class HighlightsContentsOptions(TypedDict, total=False):
172
176
  num_sentences: int
173
177
  highlights_per_url: int
174
178
 
179
+ class SummaryContentsOptions(TypedDict, total=False):
180
+ """A class representing the options that you can specify when requesting summary
181
+
182
+ Attributes:
183
+ query (str): The query string for the summary. Summary will bias towards answering the query.
184
+ """
185
+
186
+ query: str
175
187
 
176
188
  @dataclass
177
189
  class _Result:
@@ -213,11 +225,13 @@ class Result(_Result):
213
225
  text (str, optional): The text of the search result page.
214
226
  highlights (List[str], optional): The highlights of the search result.
215
227
  highlight_scores (List[float], optional): The scores of the highlights of the search result.
228
+ summary (str, optional): The summary of the search result.
216
229
  """
217
230
 
218
231
  text: Optional[str] = None
219
232
  highlights: Optional[List[str]] = None
220
233
  highlight_scores: Optional[List[float]] = None
234
+ summary: Optional[str] = None
221
235
 
222
236
  def __str__(self):
223
237
  base_str = super().__str__()
@@ -225,6 +239,7 @@ class Result(_Result):
225
239
  f"Text: {self.text}\n"
226
240
  f"Highlights: {self.highlights}\n"
227
241
  f"Highlight Scores: {self.highlight_scores}\n"
242
+ f"Summary: {self.summary}\n"
228
243
  )
229
244
 
230
245
 
@@ -288,6 +303,86 @@ class ResultWithTextAndHighlights(_Result):
288
303
  f"Highlight Scores: {self.highlight_scores}\n"
289
304
  )
290
305
 
306
+ @dataclass
307
+ class ResultWithSummary(_Result):
308
+ """
309
+ A class representing a search result with summary present.
310
+
311
+ Attributes:
312
+ summary (str): The summary of the search result.
313
+ """
314
+
315
+ summary: str = dataclasses.field(default_factory=str)
316
+
317
+ def __str__(self):
318
+ base_str = super().__str__()
319
+ return base_str + f"Summary: {self.summary}\n"
320
+
321
+ @dataclass
322
+ class ResultWithTextAndSummary(_Result):
323
+ """
324
+ A class representing a search result with text and summary present.
325
+
326
+ Attributes:
327
+ text (str): The text of the search result page.
328
+ summary (str): The summary of the search result.
329
+ """
330
+
331
+ text: str = dataclasses.field(default_factory=str)
332
+ summary: str = dataclasses.field(default_factory=str)
333
+
334
+ def __str__(self):
335
+ base_str = super().__str__()
336
+ return base_str + f"Text: {self.text}\n" + f"Summary: {self.summary}\n"
337
+
338
+ @dataclass
339
+ class ResultWithHighlightsAndSummary(_Result):
340
+ """
341
+ A class representing a search result with highlights and summary present.
342
+
343
+ Attributes:
344
+ highlights (List[str]): The highlights of the search result.
345
+ highlight_scores (List[float]): The scores of the highlights of the search result.
346
+ summary (str): The summary of the search result.
347
+ """
348
+
349
+ highlights: List[str] = dataclasses.field(default_factory=list)
350
+ highlight_scores: List[float] = dataclasses.field(default_factory=list)
351
+ summary: str = dataclasses.field(default_factory=str)
352
+
353
+ def __str__(self):
354
+ base_str = super().__str__()
355
+ return base_str + (
356
+ f"Highlights: {self.highlights}\n"
357
+ f"Highlight Scores: {self.highlight_scores}\n"
358
+ f"Summary: {self.summary}\n"
359
+ )
360
+
361
+ @dataclass
362
+ class ResultWithTextAndHighlightsAndSummary(_Result):
363
+ """
364
+ A class representing a search result with text, highlights, and summary present.
365
+
366
+ Attributes:
367
+ text (str): The text of the search result page.
368
+ highlights (List[str]): The highlights of the search result.
369
+ highlight_scores (List[float]): The scores of the highlights of the search result.
370
+ summary (str): The summary of the search result.
371
+ """
372
+
373
+ text: str = dataclasses.field(default_factory=str)
374
+ highlights: List[str] = dataclasses.field(default_factory=list)
375
+ highlight_scores: List[float] = dataclasses.field(default_factory=list)
376
+ summary: str = dataclasses.field(default_factory=str)
377
+
378
+ def __str__(self):
379
+ base_str = super().__str__()
380
+ return base_str + (
381
+ f"Text: {self.text}\n"
382
+ f"Highlights: {self.highlights}\n"
383
+ f"Highlight Scores: {self.highlight_scores}\n"
384
+ f"Summary: {self.summary}\n"
385
+ )
291
386
 
292
387
  T = TypeVar("T")
293
388
 
@@ -335,7 +430,7 @@ class Exa:
335
430
  self,
336
431
  api_key: Optional[str],
337
432
  base_url: str = "https://api.exa.ai",
338
- user_agent: str = "exa-py 1.0.14",
433
+ user_agent: str = "exa-py 1.0.16",
339
434
  ):
340
435
  """Initialize the Exa client with the provided API key and optional base URL and user agent.
341
436
 
@@ -373,6 +468,8 @@ class Exa:
373
468
  end_crawl_date: Optional[str] = None,
374
469
  start_published_date: Optional[str] = None,
375
470
  end_published_date: Optional[str] = None,
471
+ include_text: Optional[List[str]] = None,
472
+ exclude_text: Optional[List[str]] = None,
376
473
  use_autoprompt: Optional[bool] = None,
377
474
  type: Optional[str] = None,
378
475
  category: Optional[str] = None,
@@ -388,6 +485,8 @@ class Exa:
388
485
  end_crawl_date (str, optional): Results will only include links crawled before this date.
389
486
  start_published_date (str, optional): Results will only include links with a published date after this date.
390
487
  end_published_date (str, optional): Results will only include links with a published date before this date.
488
+ include_text (List[str], optional): List of strings that must be present in the webpage text of results. Currently, only one string is supported, up to 5 words.
489
+ exclude_text (List[str], optional): List of strings that must not be present in the webpage text of results. Currently, only one string is supported, up to 5 words.
391
490
  use_autoprompt (bool, optional): If true, convert query to a Exa query. Defaults to False.
392
491
  type (str, optional): The type of search, 'keyword' or 'neural'. Defaults to "neural".
393
492
  category (str, optional): A data category to focus on, with higher comprehensivity and data cleanliness. Currently, the only category is company.
@@ -415,6 +514,8 @@ class Exa:
415
514
  end_crawl_date: Optional[str] = None,
416
515
  start_published_date: Optional[str] = None,
417
516
  end_published_date: Optional[str] = None,
517
+ include_text: Optional[List[str]] = None,
518
+ exclude_text: Optional[List[str]] = None,
418
519
  use_autoprompt: Optional[bool] = None,
419
520
  type: Optional[str] = None,
420
521
  category: Optional[str] = None,
@@ -434,6 +535,8 @@ class Exa:
434
535
  end_crawl_date: Optional[str] = None,
435
536
  start_published_date: Optional[str] = None,
436
537
  end_published_date: Optional[str] = None,
538
+ include_text: Optional[List[str]] = None,
539
+ exclude_text: Optional[List[str]] = None,
437
540
  use_autoprompt: Optional[bool] = None,
438
541
  type: Optional[str] = None,
439
542
  category: Optional[str] = None,
@@ -453,6 +556,8 @@ class Exa:
453
556
  end_crawl_date: Optional[str] = None,
454
557
  start_published_date: Optional[str] = None,
455
558
  end_published_date: Optional[str] = None,
559
+ include_text: Optional[List[str]] = None,
560
+ exclude_text: Optional[List[str]] = None,
456
561
  use_autoprompt: Optional[bool] = None,
457
562
  type: Optional[str] = None,
458
563
  category: Optional[str] = None,
@@ -473,24 +578,114 @@ class Exa:
473
578
  end_crawl_date: Optional[str] = None,
474
579
  start_published_date: Optional[str] = None,
475
580
  end_published_date: Optional[str] = None,
581
+ include_text: Optional[List[str]] = None,
582
+ exclude_text: Optional[List[str]] = None,
476
583
  use_autoprompt: Optional[bool] = None,
477
584
  type: Optional[str] = None,
478
585
  category: Optional[str] = None,
479
586
  ) -> SearchResponse[ResultWithTextAndHighlights]:
480
587
  ...
481
588
 
589
+ @overload
590
+ def search_and_contents(
591
+ self,
592
+ query: str,
593
+ *,
594
+ summary: Union[SummaryContentsOptions, Literal[True]],
595
+ num_results: Optional[int] = None,
596
+ include_domains: Optional[List[str]] = None,
597
+ exclude_domains: Optional[List[str]] = None,
598
+ start_crawl_date: Optional[str] = None,
599
+ end_crawl_date: Optional[str] = None,
600
+ start_published_date: Optional[str] = None,
601
+ end_published_date: Optional[str] = None,
602
+ include_text: Optional[List[str]] = None,
603
+ exclude_text: Optional[List[str]] = None,
604
+ use_autoprompt: Optional[bool] = None,
605
+ type: Optional[str] = None,
606
+ category: Optional[str] = None,
607
+ ) -> SearchResponse[ResultWithSummary]:
608
+ ...
609
+
610
+ @overload
611
+ def search_and_contents(
612
+ self,
613
+ query: str,
614
+ *,
615
+ text: Union[TextContentsOptions, Literal[True]],
616
+ summary: Union[SummaryContentsOptions, Literal[True]],
617
+ num_results: Optional[int] = None,
618
+ include_domains: Optional[List[str]] = None,
619
+ exclude_domains: Optional[List[str]] = None,
620
+ start_crawl_date: Optional[str] = None,
621
+ end_crawl_date: Optional[str] = None,
622
+ start_published_date: Optional[str] = None,
623
+ end_published_date: Optional[str] = None,
624
+ include_text: Optional[List[str]] = None,
625
+ exclude_text: Optional[List[str]] = None,
626
+ use_autoprompt: Optional[bool] = None,
627
+ type: Optional[str] = None,
628
+ category: Optional[str] = None,
629
+ ) -> SearchResponse[ResultWithTextAndSummary]:
630
+ ...
631
+
632
+ @overload
633
+ def search_and_contents(
634
+ self,
635
+ query: str,
636
+ *,
637
+ highlights: Union[HighlightsContentsOptions, Literal[True]],
638
+ summary: Union[SummaryContentsOptions, Literal[True]],
639
+ num_results: Optional[int] = None,
640
+ include_domains: Optional[List[str]] = None,
641
+ exclude_domains: Optional[List[str]] = None,
642
+ start_crawl_date: Optional[str] = None,
643
+ end_crawl_date: Optional[str] = None,
644
+ start_published_date: Optional[str] = None,
645
+ end_published_date: Optional[str] = None,
646
+ include_text: Optional[List[str]] = None,
647
+ exclude_text: Optional[List[str]] = None,
648
+ use_autoprompt: Optional[bool] = None,
649
+ type: Optional[str] = None,
650
+ category: Optional[str] = None,
651
+ ) -> SearchResponse[ResultWithHighlightsAndSummary]:
652
+ ...
653
+
654
+ @overload
655
+ def search_and_contents(
656
+ self,
657
+ query: str,
658
+ *,
659
+ text: Union[TextContentsOptions, Literal[True]],
660
+ highlights: Union[HighlightsContentsOptions, Literal[True]],
661
+ summary: Union[SummaryContentsOptions, Literal[True]],
662
+ num_results: Optional[int] = None,
663
+ include_domains: Optional[List[str]] = None,
664
+ exclude_domains: Optional[List[str]] = None,
665
+ start_crawl_date: Optional[str] = None,
666
+ end_crawl_date: Optional[str] = None,
667
+ start_published_date: Optional[str] = None,
668
+ end_published_date: Optional[str] = None,
669
+ include_text: Optional[List[str]] = None,
670
+ exclude_text: Optional[List[str]] = None,
671
+ use_autoprompt: Optional[bool] = None,
672
+ type: Optional[str] = None,
673
+ category: Optional[str] = None,
674
+ ) -> SearchResponse[ResultWithTextAndHighlightsAndSummary]:
675
+ ...
676
+
482
677
  def search_and_contents(self, query: str, **kwargs):
483
678
  options = {
484
679
  k: v
485
680
  for k, v in {"query": query, **kwargs}.items()
486
681
  if k != "self" and v is not None
487
682
  }
488
- if "text" not in options and "highlights" not in options:
683
+ if "text" not in options and "highlights" not in options and "summary" not in options:
489
684
  options["text"] = True
490
685
  validate_search_options(
491
686
  options, {**SEARCH_OPTIONS_TYPES, **CONTENTS_OPTIONS_TYPES}
492
687
  )
493
- options = nest_fields(options, ["text", "highlights"], "contents")
688
+ options = nest_fields(options, ["text", "highlights", "summary"], "contents")
494
689
  options = to_camel_case(options)
495
690
  data = self.request("/search", options)
496
691
  return SearchResponse(
@@ -533,13 +728,53 @@ class Exa:
533
728
  ) -> SearchResponse[ResultWithTextAndHighlights]:
534
729
  ...
535
730
 
731
+ @overload
732
+ def get_contents(
733
+ self,
734
+ ids: Union[str, List[str], List[_Result]],
735
+ *,
736
+ summary: Union[SummaryContentsOptions, Literal[True]],
737
+ ) -> SearchResponse[ResultWithSummary]:
738
+ ...
739
+
740
+ @overload
741
+ def get_contents(
742
+ self,
743
+ ids: Union[str, List[str], List[_Result]],
744
+ *,
745
+ text: Union[TextContentsOptions, Literal[True]],
746
+ summary: Union[SummaryContentsOptions, Literal[True]],
747
+ ) -> SearchResponse[ResultWithTextAndSummary]:
748
+ ...
749
+
750
+ @overload
751
+ def get_contents(
752
+ self,
753
+ ids: Union[str, List[str], List[_Result]],
754
+ *,
755
+ highlights: Union[HighlightsContentsOptions, Literal[True]],
756
+ summary: Union[SummaryContentsOptions, Literal[True]],
757
+ ) -> SearchResponse[ResultWithHighlightsAndSummary]:
758
+ ...
759
+
760
+ @overload
761
+ def get_contents(
762
+ self,
763
+ ids: Union[str, List[str], List[_Result]],
764
+ *,
765
+ text: Union[TextContentsOptions, Literal[True]],
766
+ highlights: Union[HighlightsContentsOptions, Literal[True]],
767
+ summary: Union[SummaryContentsOptions, Literal[True]],
768
+ ) -> SearchResponse[ResultWithTextAndHighlightsAndSummary]:
769
+ ...
770
+
536
771
  def get_contents(self, ids: Union[str, List[str], List[_Result]], **kwargs):
537
772
  options = {
538
773
  k: v
539
774
  for k, v in {"ids": ids, **kwargs}.items()
540
775
  if k != "self" and v is not None
541
776
  }
542
- if "text" not in options and "highlights" not in options:
777
+ if "text" not in options and "highlights" not in options and "summary" not in options:
543
778
  options["text"] = True
544
779
  validate_search_options(options, {**CONTENTS_OPTIONS_TYPES})
545
780
  options = to_camel_case(options)
@@ -560,6 +795,8 @@ class Exa:
560
795
  end_crawl_date: Optional[str] = None,
561
796
  start_published_date: Optional[str] = None,
562
797
  end_published_date: Optional[str] = None,
798
+ include_text: Optional[List[str]] = None,
799
+ exclude_text: Optional[List[str]] = None,
563
800
  exclude_source_domain: Optional[bool] = None,
564
801
  category: Optional[str] = None,
565
802
  ) -> SearchResponse[_Result]:
@@ -584,6 +821,8 @@ class Exa:
584
821
  end_crawl_date: Optional[str] = None,
585
822
  start_published_date: Optional[str] = None,
586
823
  end_published_date: Optional[str] = None,
824
+ include_text: Optional[List[str]] = None,
825
+ exclude_text: Optional[List[str]] = None,
587
826
  exclude_source_domain: Optional[bool] = None,
588
827
  category: Optional[str] = None,
589
828
  ) -> SearchResponse[ResultWithText]:
@@ -602,6 +841,8 @@ class Exa:
602
841
  end_crawl_date: Optional[str] = None,
603
842
  start_published_date: Optional[str] = None,
604
843
  end_published_date: Optional[str] = None,
844
+ include_text: Optional[List[str]] = None,
845
+ exclude_text: Optional[List[str]] = None,
605
846
  exclude_source_domain: Optional[bool] = None,
606
847
  category: Optional[str] = None,
607
848
  ) -> SearchResponse[ResultWithText]:
@@ -620,6 +861,8 @@ class Exa:
620
861
  end_crawl_date: Optional[str] = None,
621
862
  start_published_date: Optional[str] = None,
622
863
  end_published_date: Optional[str] = None,
864
+ include_text: Optional[List[str]] = None,
865
+ exclude_text: Optional[List[str]] = None,
623
866
  exclude_source_domain: Optional[bool] = None,
624
867
  category: Optional[str] = None,
625
868
  ) -> SearchResponse[ResultWithHighlights]:
@@ -639,11 +882,97 @@ class Exa:
639
882
  end_crawl_date: Optional[str] = None,
640
883
  start_published_date: Optional[str] = None,
641
884
  end_published_date: Optional[str] = None,
885
+ include_text: Optional[List[str]] = None,
886
+ exclude_text: Optional[List[str]] = None,
642
887
  exclude_source_domain: Optional[bool] = None,
643
888
  category: Optional[str] = None,
644
889
  ) -> SearchResponse[ResultWithTextAndHighlights]:
645
890
  ...
646
891
 
892
+ @overload
893
+ def find_similar_and_contents(
894
+ self,
895
+ url: str,
896
+ *,
897
+ summary: Union[SummaryContentsOptions, Literal[True]],
898
+ num_results: Optional[int] = None,
899
+ include_domains: Optional[List[str]] = None,
900
+ exclude_domains: Optional[List[str]] = None,
901
+ start_crawl_date: Optional[str] = None,
902
+ end_crawl_date: Optional[str] = None,
903
+ start_published_date: Optional[str] = None,
904
+ end_published_date: Optional[str] = None,
905
+ include_text: Optional[List[str]] = None,
906
+ exclude_text: Optional[List[str]] = None,
907
+ exclude_source_domain: Optional[bool] = None,
908
+ category: Optional[str] = None,
909
+ ) -> SearchResponse[ResultWithSummary]:
910
+ ...
911
+
912
+ @overload
913
+ def find_similar_and_contents(
914
+ self,
915
+ url: str,
916
+ *,
917
+ text: Union[TextContentsOptions, Literal[True]],
918
+ summary: Union[SummaryContentsOptions, Literal[True]],
919
+ num_results: Optional[int] = None,
920
+ include_domains: Optional[List[str]] = None,
921
+ exclude_domains: Optional[List[str]] = None,
922
+ start_crawl_date: Optional[str] = None,
923
+ end_crawl_date: Optional[str] = None,
924
+ start_published_date: Optional[str] = None,
925
+ end_published_date: Optional[str] = None,
926
+ include_text: Optional[List[str]] = None,
927
+ exclude_text: Optional[List[str]] = None,
928
+ exclude_source_domain: Optional[bool] = None,
929
+ category: Optional[str] = None,
930
+ ) -> SearchResponse[ResultWithTextAndSummary]:
931
+ ...
932
+
933
+ @overload
934
+ def find_similar_and_contents(
935
+ self,
936
+ url: str,
937
+ *,
938
+ highlights: Union[HighlightsContentsOptions, Literal[True]],
939
+ summary: Union[SummaryContentsOptions, Literal[True]],
940
+ num_results: Optional[int] = None,
941
+ include_domains: Optional[List[str]] = None,
942
+ exclude_domains: Optional[List[str]] = None,
943
+ start_crawl_date: Optional[str] = None,
944
+ end_crawl_date: Optional[str] = None,
945
+ start_published_date: Optional[str] = None,
946
+ end_published_date: Optional[str] = None,
947
+ include_text: Optional[List[str]] = None,
948
+ exclude_text: Optional[List[str]] = None,
949
+ exclude_source_domain: Optional[bool] = None,
950
+ category: Optional[str] = None,
951
+ ) -> SearchResponse[ResultWithHighlightsAndSummary]:
952
+ ...
953
+
954
+ @overload
955
+ def find_similar_and_contents(
956
+ self,
957
+ url: str,
958
+ *,
959
+ text: Union[TextContentsOptions, Literal[True]],
960
+ highlights: Union[HighlightsContentsOptions, Literal[True]],
961
+ summary: Union[SummaryContentsOptions, Literal[True]],
962
+ num_results: Optional[int] = None,
963
+ include_domains: Optional[List[str]] = None,
964
+ exclude_domains: Optional[List[str]] = None,
965
+ start_crawl_date: Optional[str] = None,
966
+ end_crawl_date: Optional[str] = None,
967
+ start_published_date: Optional[str] = None,
968
+ end_published_date: Optional[str] = None,
969
+ include_text: Optional[List[str]] = None,
970
+ exclude_text: Optional[List[str]] = None,
971
+ exclude_source_domain: Optional[bool] = None,
972
+ category: Optional[str] = None,
973
+ ) -> SearchResponse[ResultWithTextAndHighlightsAndSummary]:
974
+ ...
975
+
647
976
  def find_similar_and_contents(self, url: str, **kwargs):
648
977
  options = {
649
978
  k: v
@@ -656,12 +985,13 @@ class Exa:
656
985
  options, {**FIND_SIMILAR_OPTIONS_TYPES, **CONTENTS_OPTIONS_TYPES}
657
986
  )
658
987
  options = to_camel_case(options)
659
- options = nest_fields(options, ["text", "highlights"], "contents")
988
+ options = nest_fields(options, ["text", "highlights", "summary"], "contents")
660
989
  data = self.request("/findSimilar", options)
661
990
  return SearchResponse(
662
991
  [Result(**to_snake_case(result)) for result in data["results"]],
663
992
  data["autopromptString"] if "autopromptString" in data else None,
664
993
  )
994
+
665
995
  def wrap(self, client: OpenAI):
666
996
  """Wrap an OpenAI client with Exa functionality.
667
997
 
@@ -693,6 +1023,8 @@ class Exa:
693
1023
  end_crawl_date: Optional[str] = None,
694
1024
  start_published_date: Optional[str] = None,
695
1025
  end_published_date: Optional[str] = None,
1026
+ include_text: Optional[List[str]] = None,
1027
+ exclude_text: Optional[List[str]] = None,
696
1028
  use_autoprompt: Optional[bool] = True,
697
1029
  type: Optional[str] = None,
698
1030
  category: Optional[str] = None,
@@ -709,6 +1041,8 @@ class Exa:
709
1041
  "end_crawl_date": end_crawl_date,
710
1042
  "start_published_date": start_published_date,
711
1043
  "end_published_date": end_published_date,
1044
+ "include_text": include_text,
1045
+ "exclude_text": exclude_text,
712
1046
  "use_autoprompt": use_autoprompt,
713
1047
  "type": type,
714
1048
  "category": category,
@@ -767,7 +1101,7 @@ class Exa:
767
1101
  create_kwargs["tools"] = tools
768
1102
 
769
1103
  completion = create_fn(messages=messages, **create_kwargs)
770
-
1104
+
771
1105
  query = maybe_get_query(completion)
772
1106
 
773
1107
  if not query:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: exa-py
3
- Version: 1.0.14
3
+ Version: 1.0.16
4
4
  Summary: Python SDK for Exa API.
5
5
  Home-page: https://github.com/exa-labs/exa-py
6
6
  Author: Exa
@@ -2,7 +2,7 @@ from setuptools import setup, find_packages
2
2
 
3
3
  setup(
4
4
  name="exa_py",
5
- version="1.0.14",
5
+ version="1.0.16",
6
6
  description="Python SDK for Exa API.",
7
7
  long_description_content_type="text/markdown",
8
8
  long_description=open("README.md").read(),
File without changes
File without changes
File without changes
File without changes
File without changes