exa-py 1.7.2__tar.gz → 1.8.3__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of exa-py might be problematic. Click here for more details.
- {exa_py-1.7.2/exa_py.egg-info → exa_py-1.8.3}/PKG-INFO +26 -12
- {exa_py-1.7.2 → exa_py-1.8.3}/README.md +16 -3
- {exa_py-1.7.2 → exa_py-1.8.3}/exa_py/api.py +128 -29
- {exa_py-1.7.2 → exa_py-1.8.3}/pyproject.toml +1 -1
- exa_py-1.7.2/PKG-INFO +0 -91
- exa_py-1.7.2/exa_py.egg-info/SOURCES.txt +0 -12
- exa_py-1.7.2/exa_py.egg-info/dependency_links.txt +0 -1
- exa_py-1.7.2/exa_py.egg-info/requires.txt +0 -3
- exa_py-1.7.2/exa_py.egg-info/top_level.txt +0 -1
- exa_py-1.7.2/setup.cfg +0 -4
- exa_py-1.7.2/setup.py +0 -30
- {exa_py-1.7.2 → exa_py-1.8.3}/exa_py/__init__.py +0 -0
- {exa_py-1.7.2 → exa_py-1.8.3}/exa_py/py.typed +0 -0
- {exa_py-1.7.2 → exa_py-1.8.3}/exa_py/utils.py +0 -0
|
@@ -1,19 +1,19 @@
|
|
|
1
|
-
Metadata-Version: 2.
|
|
1
|
+
Metadata-Version: 2.3
|
|
2
2
|
Name: exa-py
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.8.3
|
|
4
4
|
Summary: Python SDK for Exa API.
|
|
5
|
-
|
|
6
|
-
Author: Exa
|
|
5
|
+
Author: Exa AI
|
|
7
6
|
Author-email: hello@exa.ai
|
|
8
|
-
|
|
9
|
-
Classifier:
|
|
10
|
-
Classifier: License :: OSI Approved :: MIT License
|
|
11
|
-
Classifier: Typing :: Typed
|
|
12
|
-
Classifier: Programming Language :: Python :: 3.8
|
|
7
|
+
Requires-Python: >=3.9,<4.0
|
|
8
|
+
Classifier: Programming Language :: Python :: 3
|
|
13
9
|
Classifier: Programming Language :: Python :: 3.9
|
|
14
10
|
Classifier: Programming Language :: Python :: 3.10
|
|
15
11
|
Classifier: Programming Language :: Python :: 3.11
|
|
16
12
|
Classifier: Programming Language :: Python :: 3.12
|
|
13
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
14
|
+
Requires-Dist: openai (>=1.48,<2.0)
|
|
15
|
+
Requires-Dist: requests (>=2.32.3,<3.0.0)
|
|
16
|
+
Requires-Dist: typing-extensions (>=4.12.2,<5.0.0)
|
|
17
17
|
Description-Content-Type: text/markdown
|
|
18
18
|
|
|
19
19
|
# Exa
|
|
@@ -78,14 +78,28 @@ exa = Exa(api_key="your-api-key")
|
|
|
78
78
|
results = exa.find_similar_and_contents("https://example.com", text=True, highlights=True)
|
|
79
79
|
|
|
80
80
|
# get text contents
|
|
81
|
-
results = exa.get_contents(["
|
|
81
|
+
results = exa.get_contents(["urls"])
|
|
82
82
|
|
|
83
83
|
# get highlights
|
|
84
|
-
results = exa.get_contents(["
|
|
84
|
+
results = exa.get_contents(["urls"], highlights=True)
|
|
85
85
|
|
|
86
86
|
# get contents with contents options
|
|
87
|
-
results = exa.get_contents(["
|
|
87
|
+
results = exa.get_contents(["urls"],
|
|
88
88
|
text={"include_html_tags": True, "max_characters": 1000},
|
|
89
89
|
highlights={"highlights_per_url": 2, "num_sentences": 1, "query": "This is the highlight query:"})
|
|
90
|
+
|
|
91
|
+
# basic answer
|
|
92
|
+
response = exa.answer("This is a query to answer a question")
|
|
93
|
+
|
|
94
|
+
# answer with expanded queries and full text
|
|
95
|
+
response = exa.answer("This is a query to answer a question", expanded_queries_limit=3, include_text=True)
|
|
96
|
+
|
|
97
|
+
# answer with streaming
|
|
98
|
+
response = exa.answer("This is a query to answer with streaming:", stream=True)
|
|
99
|
+
|
|
100
|
+
# Print each chunk as it arrives when answer streaming is enabled
|
|
101
|
+
for chunk in response:
|
|
102
|
+
print(chunk)
|
|
90
103
|
```
|
|
91
104
|
|
|
105
|
+
|
|
@@ -60,14 +60,27 @@ exa = Exa(api_key="your-api-key")
|
|
|
60
60
|
results = exa.find_similar_and_contents("https://example.com", text=True, highlights=True)
|
|
61
61
|
|
|
62
62
|
# get text contents
|
|
63
|
-
results = exa.get_contents(["
|
|
63
|
+
results = exa.get_contents(["urls"])
|
|
64
64
|
|
|
65
65
|
# get highlights
|
|
66
|
-
results = exa.get_contents(["
|
|
66
|
+
results = exa.get_contents(["urls"], highlights=True)
|
|
67
67
|
|
|
68
68
|
# get contents with contents options
|
|
69
|
-
results = exa.get_contents(["
|
|
69
|
+
results = exa.get_contents(["urls"],
|
|
70
70
|
text={"include_html_tags": True, "max_characters": 1000},
|
|
71
71
|
highlights={"highlights_per_url": 2, "num_sentences": 1, "query": "This is the highlight query:"})
|
|
72
|
+
|
|
73
|
+
# basic answer
|
|
74
|
+
response = exa.answer("This is a query to answer a question")
|
|
75
|
+
|
|
76
|
+
# answer with expanded queries and full text
|
|
77
|
+
response = exa.answer("This is a query to answer a question", expanded_queries_limit=3, include_text=True)
|
|
78
|
+
|
|
79
|
+
# answer with streaming
|
|
80
|
+
response = exa.answer("This is a query to answer with streaming:", stream=True)
|
|
81
|
+
|
|
82
|
+
# Print each chunk as it arrives when answer streaming is enabled
|
|
83
|
+
for chunk in response:
|
|
84
|
+
print(chunk)
|
|
72
85
|
```
|
|
73
86
|
|
|
@@ -30,6 +30,7 @@ from exa_py.utils import (
|
|
|
30
30
|
maybe_get_query,
|
|
31
31
|
)
|
|
32
32
|
import os
|
|
33
|
+
from typing import Iterator
|
|
33
34
|
|
|
34
35
|
is_beta = os.getenv("IS_BETA") == "True"
|
|
35
36
|
|
|
@@ -100,13 +101,13 @@ SEARCH_OPTIONS_TYPES = {
|
|
|
100
101
|
"exclude_domains": [list], # Domains to omit; exclusive with 'include_domains'.
|
|
101
102
|
"start_crawl_date": [str], # Results after this crawl date. ISO 8601 format.
|
|
102
103
|
"end_crawl_date": [str], # Results before this crawl date. ISO 8601 format.
|
|
103
|
-
"start_published_date": [str], # Results after this publish date; excludes links with no date.
|
|
104
|
-
"end_published_date": [str], # Results before this publish date; excludes links with no date.
|
|
104
|
+
"start_published_date": [str], # Results after this publish date; excludes links with no date. ISO 8601 format.
|
|
105
|
+
"end_published_date": [str], # Results before this publish date; excludes links with no date. ISO 8601 format.
|
|
105
106
|
"include_text": [list], # Must be present in webpage text. (One string, up to 5 words)
|
|
106
107
|
"exclude_text": [list], # Must not be present in webpage text. (One string, up to 5 words)
|
|
107
108
|
"use_autoprompt": [bool], # Convert query to Exa. (Default: false)
|
|
108
|
-
"type": [str], # 'keyword' or '
|
|
109
|
-
"category": [str], #
|
|
109
|
+
"type": [str], # 'keyword', 'neural', or 'auto' (Default: auto).'neural' uses embeddings search, 'keyword' is SERP and 'auto' decides the best search type based on your query
|
|
110
|
+
"category": [str], # A data category to focus on: 'company', 'research paper', 'news', 'pdf', 'github', 'tweet', 'personal site', 'linkedin profile', 'financial report'
|
|
110
111
|
"flags": [list], # Experimental flags array for Exa usage.
|
|
111
112
|
}
|
|
112
113
|
|
|
@@ -130,7 +131,7 @@ FIND_SIMILAR_OPTIONS_TYPES = {
|
|
|
130
131
|
LIVECRAWL_OPTIONS = Literal["always", "fallback", "never", "auto"]
|
|
131
132
|
|
|
132
133
|
CONTENTS_OPTIONS_TYPES = {
|
|
133
|
-
"
|
|
134
|
+
"urls": [list],
|
|
134
135
|
"text": [dict, bool],
|
|
135
136
|
"highlights": [dict, bool],
|
|
136
137
|
"summary": [dict, bool],
|
|
@@ -216,7 +217,6 @@ class SummaryContentsOptions(TypedDict, total=False):
|
|
|
216
217
|
|
|
217
218
|
query: str
|
|
218
219
|
|
|
219
|
-
|
|
220
220
|
class ExtrasOptions(TypedDict, total=False):
|
|
221
221
|
"""A class representing additional extraction fields (e.g. links, images)"""
|
|
222
222
|
|
|
@@ -273,7 +273,8 @@ class _Result:
|
|
|
273
273
|
f"Published Date: {self.published_date}\n"
|
|
274
274
|
f"Author: {self.author}\n"
|
|
275
275
|
f"Image: {self.image}\n"
|
|
276
|
-
f"
|
|
276
|
+
f"Favicon: {self.favicon}\n"
|
|
277
|
+
f"Extras: {self.extras}\n"
|
|
277
278
|
f"Subpages: {self.subpages}\n"
|
|
278
279
|
)
|
|
279
280
|
|
|
@@ -494,6 +495,56 @@ class ResultWithTextAndHighlightsAndSummary(_Result):
|
|
|
494
495
|
f"Summary: {self.summary}\n"
|
|
495
496
|
)
|
|
496
497
|
|
|
498
|
+
@dataclass
|
|
499
|
+
class AnswerResult:
|
|
500
|
+
"""A class representing a source result for an answer.
|
|
501
|
+
|
|
502
|
+
Attributes:
|
|
503
|
+
title (str): The title of the search result.
|
|
504
|
+
url (str): The URL of the search result.
|
|
505
|
+
id (str): The temporary ID for the document.
|
|
506
|
+
published_date (str, optional): An estimate of the creation date, from parsing HTML content.
|
|
507
|
+
author (str, optional): If available, the author of the content.
|
|
508
|
+
"""
|
|
509
|
+
|
|
510
|
+
url: str
|
|
511
|
+
id: str
|
|
512
|
+
title: Optional[str] = None
|
|
513
|
+
published_date: Optional[str] = None
|
|
514
|
+
author: Optional[str] = None
|
|
515
|
+
|
|
516
|
+
def __init__(self, **kwargs):
|
|
517
|
+
self.url = kwargs['url']
|
|
518
|
+
self.id = kwargs['id']
|
|
519
|
+
self.title = kwargs.get('title')
|
|
520
|
+
self.published_date = kwargs.get('published_date')
|
|
521
|
+
self.author = kwargs.get('author')
|
|
522
|
+
|
|
523
|
+
def __str__(self):
|
|
524
|
+
return (
|
|
525
|
+
f"Title: {self.title}\n"
|
|
526
|
+
f"URL: {self.url}\n"
|
|
527
|
+
f"ID: {self.id}\n"
|
|
528
|
+
f"Published Date: {self.published_date}\n"
|
|
529
|
+
f"Author: {self.author}\n"
|
|
530
|
+
)
|
|
531
|
+
|
|
532
|
+
@dataclass
|
|
533
|
+
class AnswerResponse:
|
|
534
|
+
"""A class representing the response for an answer operation.
|
|
535
|
+
|
|
536
|
+
Attributes:
|
|
537
|
+
answer (str): The generated answer.
|
|
538
|
+
sources (List[AnswerResult]): A list of sources used to generate the answer.
|
|
539
|
+
"""
|
|
540
|
+
|
|
541
|
+
answer: str
|
|
542
|
+
sources: List[AnswerResult]
|
|
543
|
+
|
|
544
|
+
def __str__(self):
|
|
545
|
+
output = f"Answer: {self.answer}\n\nSources:\n"
|
|
546
|
+
output += "\n\n".join(str(source) for source in self.sources)
|
|
547
|
+
return output
|
|
497
548
|
|
|
498
549
|
T = TypeVar("T")
|
|
499
550
|
|
|
@@ -548,7 +599,7 @@ class Exa:
|
|
|
548
599
|
self,
|
|
549
600
|
api_key: Optional[str],
|
|
550
601
|
base_url: str = "https://api.exa.ai",
|
|
551
|
-
user_agent: str = "exa-py 1.
|
|
602
|
+
user_agent: str = "exa-py 1.8.3",
|
|
552
603
|
):
|
|
553
604
|
"""Initialize the Exa client with the provided API key and optional base URL and user agent.
|
|
554
605
|
|
|
@@ -568,11 +619,15 @@ class Exa:
|
|
|
568
619
|
self.headers = {"x-api-key": api_key, "User-Agent": user_agent}
|
|
569
620
|
|
|
570
621
|
def request(self, endpoint: str, data):
|
|
622
|
+
if data.get("stream"):
|
|
623
|
+
res = requests.post(self.base_url + endpoint, json=data, headers=self.headers, stream=True)
|
|
624
|
+
if res.status_code != 200:
|
|
625
|
+
raise ValueError(f"Request failed with status code {res.status_code}: {res.text}")
|
|
626
|
+
return (line.decode('utf-8') for line in res.iter_lines() if line)
|
|
627
|
+
|
|
571
628
|
res = requests.post(self.base_url + endpoint, json=data, headers=self.headers)
|
|
572
629
|
if res.status_code != 200:
|
|
573
|
-
raise ValueError(
|
|
574
|
-
f"Request failed with status code {res.status_code}: {res.text}"
|
|
575
|
-
)
|
|
630
|
+
raise ValueError(f"Request failed with status code {res.status_code}: {res.text}")
|
|
576
631
|
return res.json()
|
|
577
632
|
|
|
578
633
|
def search(
|
|
@@ -894,7 +949,7 @@ class Exa:
|
|
|
894
949
|
@overload
|
|
895
950
|
def get_contents(
|
|
896
951
|
self,
|
|
897
|
-
|
|
952
|
+
urls: Union[str, List[str], List[_Result]],
|
|
898
953
|
livecrawl_timeout: Optional[int] = None,
|
|
899
954
|
livecrawl: Optional[LIVECRAWL_OPTIONS] = None,
|
|
900
955
|
filter_empty_results: Optional[bool] = None,
|
|
@@ -908,7 +963,7 @@ class Exa:
|
|
|
908
963
|
@overload
|
|
909
964
|
def get_contents(
|
|
910
965
|
self,
|
|
911
|
-
|
|
966
|
+
urls: Union[str, List[str], List[_Result]],
|
|
912
967
|
*,
|
|
913
968
|
text: Union[TextContentsOptions, Literal[True]],
|
|
914
969
|
livecrawl_timeout: Optional[int] = None,
|
|
@@ -924,7 +979,7 @@ class Exa:
|
|
|
924
979
|
@overload
|
|
925
980
|
def get_contents(
|
|
926
981
|
self,
|
|
927
|
-
|
|
982
|
+
urls: Union[str, List[str], List[_Result]],
|
|
928
983
|
*,
|
|
929
984
|
highlights: Union[HighlightsContentsOptions, Literal[True]],
|
|
930
985
|
livecrawl_timeout: Optional[int] = None,
|
|
@@ -940,7 +995,7 @@ class Exa:
|
|
|
940
995
|
@overload
|
|
941
996
|
def get_contents(
|
|
942
997
|
self,
|
|
943
|
-
|
|
998
|
+
urls: Union[str, List[str], List[_Result]],
|
|
944
999
|
*,
|
|
945
1000
|
text: Union[TextContentsOptions, Literal[True]],
|
|
946
1001
|
highlights: Union[HighlightsContentsOptions, Literal[True]],
|
|
@@ -957,7 +1012,7 @@ class Exa:
|
|
|
957
1012
|
@overload
|
|
958
1013
|
def get_contents(
|
|
959
1014
|
self,
|
|
960
|
-
|
|
1015
|
+
urls: Union[str, List[str], List[_Result]],
|
|
961
1016
|
*,
|
|
962
1017
|
summary: Union[SummaryContentsOptions, Literal[True]],
|
|
963
1018
|
livecrawl_timeout: Optional[int] = None,
|
|
@@ -973,7 +1028,7 @@ class Exa:
|
|
|
973
1028
|
@overload
|
|
974
1029
|
def get_contents(
|
|
975
1030
|
self,
|
|
976
|
-
|
|
1031
|
+
urls: Union[str, List[str], List[_Result]],
|
|
977
1032
|
*,
|
|
978
1033
|
text: Union[TextContentsOptions, Literal[True]],
|
|
979
1034
|
summary: Union[SummaryContentsOptions, Literal[True]],
|
|
@@ -990,7 +1045,7 @@ class Exa:
|
|
|
990
1045
|
@overload
|
|
991
1046
|
def get_contents(
|
|
992
1047
|
self,
|
|
993
|
-
|
|
1048
|
+
urls: Union[str, List[str], List[_Result]],
|
|
994
1049
|
*,
|
|
995
1050
|
highlights: Union[HighlightsContentsOptions, Literal[True]],
|
|
996
1051
|
summary: Union[SummaryContentsOptions, Literal[True]],
|
|
@@ -1007,7 +1062,7 @@ class Exa:
|
|
|
1007
1062
|
@overload
|
|
1008
1063
|
def get_contents(
|
|
1009
1064
|
self,
|
|
1010
|
-
|
|
1065
|
+
urls: Union[str, List[str], List[_Result]],
|
|
1011
1066
|
*,
|
|
1012
1067
|
text: Union[TextContentsOptions, Literal[True]],
|
|
1013
1068
|
highlights: Union[HighlightsContentsOptions, Literal[True]],
|
|
@@ -1021,16 +1076,13 @@ class Exa:
|
|
|
1021
1076
|
flags: Optional[List[str]] = None,
|
|
1022
1077
|
) -> SearchResponse[ResultWithTextAndHighlightsAndSummary]:
|
|
1023
1078
|
...
|
|
1024
|
-
|
|
1025
|
-
|
|
1026
|
-
|
|
1027
|
-
|
|
1028
|
-
|
|
1029
|
-
|
|
1030
|
-
|
|
1031
|
-
and "summary" not in options
|
|
1032
|
-
and "extras" not in options
|
|
1033
|
-
):
|
|
1079
|
+
def get_contents(self, urls: Union[str, List[str], List[_Result]], **kwargs):
|
|
1080
|
+
options = {
|
|
1081
|
+
k: v
|
|
1082
|
+
for k, v in {"urls": urls, **kwargs}.items()
|
|
1083
|
+
if k != "self" and v is not None
|
|
1084
|
+
}
|
|
1085
|
+
if "text" not in options and "highlights" not in options and "summary" not in options and "extras" not in options:
|
|
1034
1086
|
options["text"] = True
|
|
1035
1087
|
|
|
1036
1088
|
validate_search_options(
|
|
@@ -1456,3 +1508,50 @@ class Exa:
|
|
|
1456
1508
|
completion=completion, exa_result=exa_result
|
|
1457
1509
|
)
|
|
1458
1510
|
return exa_completion
|
|
1511
|
+
|
|
1512
|
+
@overload
|
|
1513
|
+
def answer(
|
|
1514
|
+
self,
|
|
1515
|
+
query: str,
|
|
1516
|
+
*,
|
|
1517
|
+
expanded_queries_limit: Optional[int] = 1,
|
|
1518
|
+
stream: Optional[bool] = False,
|
|
1519
|
+
include_text: Optional[bool] = False,
|
|
1520
|
+
) -> Union[AnswerResponse, Iterator[Union[str, List[AnswerResult]]]]:
|
|
1521
|
+
...
|
|
1522
|
+
|
|
1523
|
+
def answer(
|
|
1524
|
+
self,
|
|
1525
|
+
query: str,
|
|
1526
|
+
*,
|
|
1527
|
+
expanded_queries_limit: Optional[int] = 1,
|
|
1528
|
+
stream: Optional[bool] = False,
|
|
1529
|
+
include_text: Optional[bool] = False,
|
|
1530
|
+
) -> Union[AnswerResponse, Iterator[Union[str, List[AnswerResult]]]]:
|
|
1531
|
+
"""Generate an answer to a query using Exa's search and LLM capabilities.
|
|
1532
|
+
|
|
1533
|
+
Args:
|
|
1534
|
+
query (str): The query to answer.
|
|
1535
|
+
expanded_queries_limit (int, optional): Maximum number of query variations (0-4). Defaults to 1.
|
|
1536
|
+
stream (bool, optional): Whether to stream the response. Defaults to False.
|
|
1537
|
+
include_text (bool, optional): Whether to include full text in the results. Defaults to False.
|
|
1538
|
+
|
|
1539
|
+
Returns:
|
|
1540
|
+
Union[AnswerResponse, Iterator[Union[str, List[AnswerResult]]]]: Either an AnswerResponse object containing the answer and sources,
|
|
1541
|
+
or an iterator that yields either answer chunks or sources when streaming is enabled.
|
|
1542
|
+
"""
|
|
1543
|
+
options = {
|
|
1544
|
+
k: v
|
|
1545
|
+
for k, v in locals().items()
|
|
1546
|
+
if k != "self" and v is not None
|
|
1547
|
+
}
|
|
1548
|
+
options = to_camel_case(options)
|
|
1549
|
+
response = self.request("/answer", options)
|
|
1550
|
+
|
|
1551
|
+
if stream:
|
|
1552
|
+
return response
|
|
1553
|
+
|
|
1554
|
+
return AnswerResponse(
|
|
1555
|
+
response["answer"],
|
|
1556
|
+
[AnswerResult(**to_snake_case(result)) for result in response["sources"]]
|
|
1557
|
+
)
|
exa_py-1.7.2/PKG-INFO
DELETED
|
@@ -1,91 +0,0 @@
|
|
|
1
|
-
Metadata-Version: 2.1
|
|
2
|
-
Name: exa_py
|
|
3
|
-
Version: 1.7.2
|
|
4
|
-
Summary: Python SDK for Exa API.
|
|
5
|
-
Home-page: https://github.com/exa-labs/exa-py
|
|
6
|
-
Author: Exa
|
|
7
|
-
Author-email: hello@exa.ai
|
|
8
|
-
Classifier: Development Status :: 5 - Production/Stable
|
|
9
|
-
Classifier: Intended Audience :: Developers
|
|
10
|
-
Classifier: License :: OSI Approved :: MIT License
|
|
11
|
-
Classifier: Typing :: Typed
|
|
12
|
-
Classifier: Programming Language :: Python :: 3.8
|
|
13
|
-
Classifier: Programming Language :: Python :: 3.9
|
|
14
|
-
Classifier: Programming Language :: Python :: 3.10
|
|
15
|
-
Classifier: Programming Language :: Python :: 3.11
|
|
16
|
-
Classifier: Programming Language :: Python :: 3.12
|
|
17
|
-
Description-Content-Type: text/markdown
|
|
18
|
-
|
|
19
|
-
# Exa
|
|
20
|
-
|
|
21
|
-
Exa (formerly Metaphor) API in Python
|
|
22
|
-
|
|
23
|
-
Note: This API is basically the same as `metaphor-python` but reflects new
|
|
24
|
-
features associated with Metaphor's rename to Exa. New site is https://exa.ai
|
|
25
|
-
|
|
26
|
-
## Installation
|
|
27
|
-
|
|
28
|
-
```bash
|
|
29
|
-
pip install exa_py
|
|
30
|
-
```
|
|
31
|
-
|
|
32
|
-
## Usage
|
|
33
|
-
|
|
34
|
-
Import the package and initialize the Exa client with your API key:
|
|
35
|
-
|
|
36
|
-
```python
|
|
37
|
-
from exa_py import Exa
|
|
38
|
-
|
|
39
|
-
exa = Exa(api_key="your-api-key")
|
|
40
|
-
```
|
|
41
|
-
|
|
42
|
-
## Common requests
|
|
43
|
-
```python
|
|
44
|
-
|
|
45
|
-
# basic search
|
|
46
|
-
results = exa.search("This is a Exa query:")
|
|
47
|
-
|
|
48
|
-
# autoprompted search
|
|
49
|
-
results = exa.search("autopromptable query", use_autoprompt=True)
|
|
50
|
-
|
|
51
|
-
# keyword search (non-neural)
|
|
52
|
-
results = exa.search("Google-style query", type="keyword")
|
|
53
|
-
|
|
54
|
-
# search with date filters
|
|
55
|
-
results = exa.search("This is a Exa query:", start_published_date="2019-01-01", end_published_date="2019-01-31")
|
|
56
|
-
|
|
57
|
-
# search with domain filters
|
|
58
|
-
results = exa.search("This is a Exa query:", include_domains=["www.cnn.com", "www.nytimes.com"])
|
|
59
|
-
|
|
60
|
-
# search and get text contents
|
|
61
|
-
results = exa.search_and_contents("This is a Exa query:")
|
|
62
|
-
|
|
63
|
-
# search and get highlights
|
|
64
|
-
results = exa.search_and_contents("This is a Exa query:", highlights=True)
|
|
65
|
-
|
|
66
|
-
# search and get contents with contents options
|
|
67
|
-
results = exa.search_and_contents("This is a Exa query:",
|
|
68
|
-
text={"include_html_tags": True, "max_characters": 1000},
|
|
69
|
-
highlights={"highlights_per_url": 2, "num_sentences": 1, "query": "This is the highlight query:"})
|
|
70
|
-
|
|
71
|
-
# find similar documents
|
|
72
|
-
results = exa.find_similar("https://example.com")
|
|
73
|
-
|
|
74
|
-
# find similar excluding source domain
|
|
75
|
-
results = exa.find_similar("https://example.com", exclude_source_domain=True)
|
|
76
|
-
|
|
77
|
-
# find similar with contents
|
|
78
|
-
results = exa.find_similar_and_contents("https://example.com", text=True, highlights=True)
|
|
79
|
-
|
|
80
|
-
# get text contents
|
|
81
|
-
results = exa.get_contents(["ids"])
|
|
82
|
-
|
|
83
|
-
# get highlights
|
|
84
|
-
results = exa.get_contents(["ids"], highlights=True)
|
|
85
|
-
|
|
86
|
-
# get contents with contents options
|
|
87
|
-
results = exa.get_contents(["ids"],
|
|
88
|
-
text={"include_html_tags": True, "max_characters": 1000},
|
|
89
|
-
highlights={"highlights_per_url": 2, "num_sentences": 1, "query": "This is the highlight query:"})
|
|
90
|
-
```
|
|
91
|
-
|
|
@@ -1,12 +0,0 @@
|
|
|
1
|
-
README.md
|
|
2
|
-
pyproject.toml
|
|
3
|
-
setup.py
|
|
4
|
-
exa_py/__init__.py
|
|
5
|
-
exa_py/api.py
|
|
6
|
-
exa_py/py.typed
|
|
7
|
-
exa_py/utils.py
|
|
8
|
-
exa_py.egg-info/PKG-INFO
|
|
9
|
-
exa_py.egg-info/SOURCES.txt
|
|
10
|
-
exa_py.egg-info/dependency_links.txt
|
|
11
|
-
exa_py.egg-info/requires.txt
|
|
12
|
-
exa_py.egg-info/top_level.txt
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
exa_py
|
exa_py-1.7.2/setup.cfg
DELETED
exa_py-1.7.2/setup.py
DELETED
|
@@ -1,30 +0,0 @@
|
|
|
1
|
-
from setuptools import setup, find_packages
|
|
2
|
-
|
|
3
|
-
setup(
|
|
4
|
-
name="exa_py",
|
|
5
|
-
version="1.7.2",
|
|
6
|
-
description="Python SDK for Exa API.",
|
|
7
|
-
long_description_content_type="text/markdown",
|
|
8
|
-
long_description=open("README.md").read(),
|
|
9
|
-
author="Exa",
|
|
10
|
-
author_email="hello@exa.ai",
|
|
11
|
-
package_data={"exa_py": ["py.typed"]},
|
|
12
|
-
url="https://github.com/exa-labs/exa-py",
|
|
13
|
-
packages=find_packages(),
|
|
14
|
-
install_requires=[
|
|
15
|
-
"requests",
|
|
16
|
-
"typing-extensions",
|
|
17
|
-
"openai>=1.10.0"
|
|
18
|
-
],
|
|
19
|
-
classifiers=[
|
|
20
|
-
"Development Status :: 5 - Production/Stable",
|
|
21
|
-
"Intended Audience :: Developers",
|
|
22
|
-
"License :: OSI Approved :: MIT License",
|
|
23
|
-
"Typing :: Typed",
|
|
24
|
-
"Programming Language :: Python :: 3.8",
|
|
25
|
-
"Programming Language :: Python :: 3.9",
|
|
26
|
-
"Programming Language :: Python :: 3.10",
|
|
27
|
-
"Programming Language :: Python :: 3.11",
|
|
28
|
-
"Programming Language :: Python :: 3.12",
|
|
29
|
-
],
|
|
30
|
-
)
|
|
File without changes
|
|
File without changes
|
|
File without changes
|