exa-py 1.7.2__tar.gz → 1.7.3__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of exa-py might be problematic. Click here for more details.
- {exa_py-1.7.2/exa_py.egg-info → exa_py-1.7.3}/PKG-INFO +13 -12
- {exa_py-1.7.2 → exa_py-1.7.3}/README.md +3 -3
- {exa_py-1.7.2 → exa_py-1.7.3}/exa_py/api.py +23 -26
- {exa_py-1.7.2 → exa_py-1.7.3}/pyproject.toml +1 -1
- exa_py-1.7.2/PKG-INFO +0 -91
- exa_py-1.7.2/exa_py.egg-info/SOURCES.txt +0 -12
- exa_py-1.7.2/exa_py.egg-info/dependency_links.txt +0 -1
- exa_py-1.7.2/exa_py.egg-info/requires.txt +0 -3
- exa_py-1.7.2/exa_py.egg-info/top_level.txt +0 -1
- exa_py-1.7.2/setup.cfg +0 -4
- exa_py-1.7.2/setup.py +0 -30
- {exa_py-1.7.2 → exa_py-1.7.3}/exa_py/__init__.py +0 -0
- {exa_py-1.7.2 → exa_py-1.7.3}/exa_py/py.typed +0 -0
- {exa_py-1.7.2 → exa_py-1.7.3}/exa_py/utils.py +0 -0
|
@@ -1,19 +1,19 @@
|
|
|
1
|
-
Metadata-Version: 2.
|
|
1
|
+
Metadata-Version: 2.3
|
|
2
2
|
Name: exa-py
|
|
3
|
-
Version: 1.7.
|
|
3
|
+
Version: 1.7.3
|
|
4
4
|
Summary: Python SDK for Exa API.
|
|
5
|
-
|
|
6
|
-
Author: Exa
|
|
5
|
+
Author: Exa AI
|
|
7
6
|
Author-email: hello@exa.ai
|
|
8
|
-
|
|
9
|
-
Classifier:
|
|
10
|
-
Classifier: License :: OSI Approved :: MIT License
|
|
11
|
-
Classifier: Typing :: Typed
|
|
12
|
-
Classifier: Programming Language :: Python :: 3.8
|
|
7
|
+
Requires-Python: >=3.9,<4.0
|
|
8
|
+
Classifier: Programming Language :: Python :: 3
|
|
13
9
|
Classifier: Programming Language :: Python :: 3.9
|
|
14
10
|
Classifier: Programming Language :: Python :: 3.10
|
|
15
11
|
Classifier: Programming Language :: Python :: 3.11
|
|
16
12
|
Classifier: Programming Language :: Python :: 3.12
|
|
13
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
14
|
+
Requires-Dist: openai (>=1.48,<2.0)
|
|
15
|
+
Requires-Dist: requests (>=2.32.3,<3.0.0)
|
|
16
|
+
Requires-Dist: typing-extensions (>=4.12.2,<5.0.0)
|
|
17
17
|
Description-Content-Type: text/markdown
|
|
18
18
|
|
|
19
19
|
# Exa
|
|
@@ -78,14 +78,15 @@ exa = Exa(api_key="your-api-key")
|
|
|
78
78
|
results = exa.find_similar_and_contents("https://example.com", text=True, highlights=True)
|
|
79
79
|
|
|
80
80
|
# get text contents
|
|
81
|
-
results = exa.get_contents(["
|
|
81
|
+
results = exa.get_contents(["urls"])
|
|
82
82
|
|
|
83
83
|
# get highlights
|
|
84
|
-
results = exa.get_contents(["
|
|
84
|
+
results = exa.get_contents(["urls"], highlights=True)
|
|
85
85
|
|
|
86
86
|
# get contents with contents options
|
|
87
|
-
results = exa.get_contents(["
|
|
87
|
+
results = exa.get_contents(["urls"],
|
|
88
88
|
text={"include_html_tags": True, "max_characters": 1000},
|
|
89
89
|
highlights={"highlights_per_url": 2, "num_sentences": 1, "query": "This is the highlight query:"})
|
|
90
90
|
```
|
|
91
91
|
|
|
92
|
+
|
|
@@ -60,13 +60,13 @@ exa = Exa(api_key="your-api-key")
|
|
|
60
60
|
results = exa.find_similar_and_contents("https://example.com", text=True, highlights=True)
|
|
61
61
|
|
|
62
62
|
# get text contents
|
|
63
|
-
results = exa.get_contents(["
|
|
63
|
+
results = exa.get_contents(["urls"])
|
|
64
64
|
|
|
65
65
|
# get highlights
|
|
66
|
-
results = exa.get_contents(["
|
|
66
|
+
results = exa.get_contents(["urls"], highlights=True)
|
|
67
67
|
|
|
68
68
|
# get contents with contents options
|
|
69
|
-
results = exa.get_contents(["
|
|
69
|
+
results = exa.get_contents(["urls"],
|
|
70
70
|
text={"include_html_tags": True, "max_characters": 1000},
|
|
71
71
|
highlights={"highlights_per_url": 2, "num_sentences": 1, "query": "This is the highlight query:"})
|
|
72
72
|
```
|
|
@@ -100,13 +100,13 @@ SEARCH_OPTIONS_TYPES = {
|
|
|
100
100
|
"exclude_domains": [list], # Domains to omit; exclusive with 'include_domains'.
|
|
101
101
|
"start_crawl_date": [str], # Results after this crawl date. ISO 8601 format.
|
|
102
102
|
"end_crawl_date": [str], # Results before this crawl date. ISO 8601 format.
|
|
103
|
-
"start_published_date": [str], # Results after this publish date; excludes links with no date.
|
|
104
|
-
"end_published_date": [str], # Results before this publish date; excludes links with no date.
|
|
103
|
+
"start_published_date": [str], # Results after this publish date; excludes links with no date. ISO 8601 format.
|
|
104
|
+
"end_published_date": [str], # Results before this publish date; excludes links with no date. ISO 8601 format.
|
|
105
105
|
"include_text": [list], # Must be present in webpage text. (One string, up to 5 words)
|
|
106
106
|
"exclude_text": [list], # Must not be present in webpage text. (One string, up to 5 words)
|
|
107
107
|
"use_autoprompt": [bool], # Convert query to Exa. (Default: false)
|
|
108
|
-
"type": [str], # 'keyword' or '
|
|
109
|
-
"category": [str], #
|
|
108
|
+
"type": [str], # 'keyword', 'neural', or 'auto' (Default: auto).'neural' uses embeddings search, 'keyword' is SERP and 'auto' decides the best search type based on your query
|
|
109
|
+
"category": [str], # A data category to focus on: 'company', 'research paper', 'news', 'pdf', 'github', 'tweet', 'personal site', 'linkedin profile', 'financial report'
|
|
110
110
|
"flags": [list], # Experimental flags array for Exa usage.
|
|
111
111
|
}
|
|
112
112
|
|
|
@@ -130,7 +130,7 @@ FIND_SIMILAR_OPTIONS_TYPES = {
|
|
|
130
130
|
LIVECRAWL_OPTIONS = Literal["always", "fallback", "never", "auto"]
|
|
131
131
|
|
|
132
132
|
CONTENTS_OPTIONS_TYPES = {
|
|
133
|
-
"
|
|
133
|
+
"urls": [list],
|
|
134
134
|
"text": [dict, bool],
|
|
135
135
|
"highlights": [dict, bool],
|
|
136
136
|
"summary": [dict, bool],
|
|
@@ -216,7 +216,6 @@ class SummaryContentsOptions(TypedDict, total=False):
|
|
|
216
216
|
|
|
217
217
|
query: str
|
|
218
218
|
|
|
219
|
-
|
|
220
219
|
class ExtrasOptions(TypedDict, total=False):
|
|
221
220
|
"""A class representing additional extraction fields (e.g. links, images)"""
|
|
222
221
|
|
|
@@ -273,7 +272,8 @@ class _Result:
|
|
|
273
272
|
f"Published Date: {self.published_date}\n"
|
|
274
273
|
f"Author: {self.author}\n"
|
|
275
274
|
f"Image: {self.image}\n"
|
|
276
|
-
f"
|
|
275
|
+
f"Favicon: {self.favicon}\n"
|
|
276
|
+
f"Extras: {self.extras}\n"
|
|
277
277
|
f"Subpages: {self.subpages}\n"
|
|
278
278
|
)
|
|
279
279
|
|
|
@@ -548,7 +548,7 @@ class Exa:
|
|
|
548
548
|
self,
|
|
549
549
|
api_key: Optional[str],
|
|
550
550
|
base_url: str = "https://api.exa.ai",
|
|
551
|
-
user_agent: str = "exa-py 1.7.
|
|
551
|
+
user_agent: str = "exa-py 1.7.3",
|
|
552
552
|
):
|
|
553
553
|
"""Initialize the Exa client with the provided API key and optional base URL and user agent.
|
|
554
554
|
|
|
@@ -894,7 +894,7 @@ class Exa:
|
|
|
894
894
|
@overload
|
|
895
895
|
def get_contents(
|
|
896
896
|
self,
|
|
897
|
-
|
|
897
|
+
urls: Union[str, List[str], List[_Result]],
|
|
898
898
|
livecrawl_timeout: Optional[int] = None,
|
|
899
899
|
livecrawl: Optional[LIVECRAWL_OPTIONS] = None,
|
|
900
900
|
filter_empty_results: Optional[bool] = None,
|
|
@@ -908,7 +908,7 @@ class Exa:
|
|
|
908
908
|
@overload
|
|
909
909
|
def get_contents(
|
|
910
910
|
self,
|
|
911
|
-
|
|
911
|
+
urls: Union[str, List[str], List[_Result]],
|
|
912
912
|
*,
|
|
913
913
|
text: Union[TextContentsOptions, Literal[True]],
|
|
914
914
|
livecrawl_timeout: Optional[int] = None,
|
|
@@ -924,7 +924,7 @@ class Exa:
|
|
|
924
924
|
@overload
|
|
925
925
|
def get_contents(
|
|
926
926
|
self,
|
|
927
|
-
|
|
927
|
+
urls: Union[str, List[str], List[_Result]],
|
|
928
928
|
*,
|
|
929
929
|
highlights: Union[HighlightsContentsOptions, Literal[True]],
|
|
930
930
|
livecrawl_timeout: Optional[int] = None,
|
|
@@ -940,7 +940,7 @@ class Exa:
|
|
|
940
940
|
@overload
|
|
941
941
|
def get_contents(
|
|
942
942
|
self,
|
|
943
|
-
|
|
943
|
+
urls: Union[str, List[str], List[_Result]],
|
|
944
944
|
*,
|
|
945
945
|
text: Union[TextContentsOptions, Literal[True]],
|
|
946
946
|
highlights: Union[HighlightsContentsOptions, Literal[True]],
|
|
@@ -957,7 +957,7 @@ class Exa:
|
|
|
957
957
|
@overload
|
|
958
958
|
def get_contents(
|
|
959
959
|
self,
|
|
960
|
-
|
|
960
|
+
urls: Union[str, List[str], List[_Result]],
|
|
961
961
|
*,
|
|
962
962
|
summary: Union[SummaryContentsOptions, Literal[True]],
|
|
963
963
|
livecrawl_timeout: Optional[int] = None,
|
|
@@ -973,7 +973,7 @@ class Exa:
|
|
|
973
973
|
@overload
|
|
974
974
|
def get_contents(
|
|
975
975
|
self,
|
|
976
|
-
|
|
976
|
+
urls: Union[str, List[str], List[_Result]],
|
|
977
977
|
*,
|
|
978
978
|
text: Union[TextContentsOptions, Literal[True]],
|
|
979
979
|
summary: Union[SummaryContentsOptions, Literal[True]],
|
|
@@ -990,7 +990,7 @@ class Exa:
|
|
|
990
990
|
@overload
|
|
991
991
|
def get_contents(
|
|
992
992
|
self,
|
|
993
|
-
|
|
993
|
+
urls: Union[str, List[str], List[_Result]],
|
|
994
994
|
*,
|
|
995
995
|
highlights: Union[HighlightsContentsOptions, Literal[True]],
|
|
996
996
|
summary: Union[SummaryContentsOptions, Literal[True]],
|
|
@@ -1007,7 +1007,7 @@ class Exa:
|
|
|
1007
1007
|
@overload
|
|
1008
1008
|
def get_contents(
|
|
1009
1009
|
self,
|
|
1010
|
-
|
|
1010
|
+
urls: Union[str, List[str], List[_Result]],
|
|
1011
1011
|
*,
|
|
1012
1012
|
text: Union[TextContentsOptions, Literal[True]],
|
|
1013
1013
|
highlights: Union[HighlightsContentsOptions, Literal[True]],
|
|
@@ -1021,16 +1021,13 @@ class Exa:
|
|
|
1021
1021
|
flags: Optional[List[str]] = None,
|
|
1022
1022
|
) -> SearchResponse[ResultWithTextAndHighlightsAndSummary]:
|
|
1023
1023
|
...
|
|
1024
|
-
|
|
1025
|
-
|
|
1026
|
-
|
|
1027
|
-
|
|
1028
|
-
|
|
1029
|
-
|
|
1030
|
-
|
|
1031
|
-
and "summary" not in options
|
|
1032
|
-
and "extras" not in options
|
|
1033
|
-
):
|
|
1024
|
+
def get_contents(self, urls: Union[str, List[str], List[_Result]], **kwargs):
|
|
1025
|
+
options = {
|
|
1026
|
+
k: v
|
|
1027
|
+
for k, v in {"urls": urls, **kwargs}.items()
|
|
1028
|
+
if k != "self" and v is not None
|
|
1029
|
+
}
|
|
1030
|
+
if "text" not in options and "highlights" not in options and "summary" not in options and "extras" not in options:
|
|
1034
1031
|
options["text"] = True
|
|
1035
1032
|
|
|
1036
1033
|
validate_search_options(
|
exa_py-1.7.2/PKG-INFO
DELETED
|
@@ -1,91 +0,0 @@
|
|
|
1
|
-
Metadata-Version: 2.1
|
|
2
|
-
Name: exa_py
|
|
3
|
-
Version: 1.7.2
|
|
4
|
-
Summary: Python SDK for Exa API.
|
|
5
|
-
Home-page: https://github.com/exa-labs/exa-py
|
|
6
|
-
Author: Exa
|
|
7
|
-
Author-email: hello@exa.ai
|
|
8
|
-
Classifier: Development Status :: 5 - Production/Stable
|
|
9
|
-
Classifier: Intended Audience :: Developers
|
|
10
|
-
Classifier: License :: OSI Approved :: MIT License
|
|
11
|
-
Classifier: Typing :: Typed
|
|
12
|
-
Classifier: Programming Language :: Python :: 3.8
|
|
13
|
-
Classifier: Programming Language :: Python :: 3.9
|
|
14
|
-
Classifier: Programming Language :: Python :: 3.10
|
|
15
|
-
Classifier: Programming Language :: Python :: 3.11
|
|
16
|
-
Classifier: Programming Language :: Python :: 3.12
|
|
17
|
-
Description-Content-Type: text/markdown
|
|
18
|
-
|
|
19
|
-
# Exa
|
|
20
|
-
|
|
21
|
-
Exa (formerly Metaphor) API in Python
|
|
22
|
-
|
|
23
|
-
Note: This API is basically the same as `metaphor-python` but reflects new
|
|
24
|
-
features associated with Metaphor's rename to Exa. New site is https://exa.ai
|
|
25
|
-
|
|
26
|
-
## Installation
|
|
27
|
-
|
|
28
|
-
```bash
|
|
29
|
-
pip install exa_py
|
|
30
|
-
```
|
|
31
|
-
|
|
32
|
-
## Usage
|
|
33
|
-
|
|
34
|
-
Import the package and initialize the Exa client with your API key:
|
|
35
|
-
|
|
36
|
-
```python
|
|
37
|
-
from exa_py import Exa
|
|
38
|
-
|
|
39
|
-
exa = Exa(api_key="your-api-key")
|
|
40
|
-
```
|
|
41
|
-
|
|
42
|
-
## Common requests
|
|
43
|
-
```python
|
|
44
|
-
|
|
45
|
-
# basic search
|
|
46
|
-
results = exa.search("This is a Exa query:")
|
|
47
|
-
|
|
48
|
-
# autoprompted search
|
|
49
|
-
results = exa.search("autopromptable query", use_autoprompt=True)
|
|
50
|
-
|
|
51
|
-
# keyword search (non-neural)
|
|
52
|
-
results = exa.search("Google-style query", type="keyword")
|
|
53
|
-
|
|
54
|
-
# search with date filters
|
|
55
|
-
results = exa.search("This is a Exa query:", start_published_date="2019-01-01", end_published_date="2019-01-31")
|
|
56
|
-
|
|
57
|
-
# search with domain filters
|
|
58
|
-
results = exa.search("This is a Exa query:", include_domains=["www.cnn.com", "www.nytimes.com"])
|
|
59
|
-
|
|
60
|
-
# search and get text contents
|
|
61
|
-
results = exa.search_and_contents("This is a Exa query:")
|
|
62
|
-
|
|
63
|
-
# search and get highlights
|
|
64
|
-
results = exa.search_and_contents("This is a Exa query:", highlights=True)
|
|
65
|
-
|
|
66
|
-
# search and get contents with contents options
|
|
67
|
-
results = exa.search_and_contents("This is a Exa query:",
|
|
68
|
-
text={"include_html_tags": True, "max_characters": 1000},
|
|
69
|
-
highlights={"highlights_per_url": 2, "num_sentences": 1, "query": "This is the highlight query:"})
|
|
70
|
-
|
|
71
|
-
# find similar documents
|
|
72
|
-
results = exa.find_similar("https://example.com")
|
|
73
|
-
|
|
74
|
-
# find similar excluding source domain
|
|
75
|
-
results = exa.find_similar("https://example.com", exclude_source_domain=True)
|
|
76
|
-
|
|
77
|
-
# find similar with contents
|
|
78
|
-
results = exa.find_similar_and_contents("https://example.com", text=True, highlights=True)
|
|
79
|
-
|
|
80
|
-
# get text contents
|
|
81
|
-
results = exa.get_contents(["ids"])
|
|
82
|
-
|
|
83
|
-
# get highlights
|
|
84
|
-
results = exa.get_contents(["ids"], highlights=True)
|
|
85
|
-
|
|
86
|
-
# get contents with contents options
|
|
87
|
-
results = exa.get_contents(["ids"],
|
|
88
|
-
text={"include_html_tags": True, "max_characters": 1000},
|
|
89
|
-
highlights={"highlights_per_url": 2, "num_sentences": 1, "query": "This is the highlight query:"})
|
|
90
|
-
```
|
|
91
|
-
|
|
@@ -1,12 +0,0 @@
|
|
|
1
|
-
README.md
|
|
2
|
-
pyproject.toml
|
|
3
|
-
setup.py
|
|
4
|
-
exa_py/__init__.py
|
|
5
|
-
exa_py/api.py
|
|
6
|
-
exa_py/py.typed
|
|
7
|
-
exa_py/utils.py
|
|
8
|
-
exa_py.egg-info/PKG-INFO
|
|
9
|
-
exa_py.egg-info/SOURCES.txt
|
|
10
|
-
exa_py.egg-info/dependency_links.txt
|
|
11
|
-
exa_py.egg-info/requires.txt
|
|
12
|
-
exa_py.egg-info/top_level.txt
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
exa_py
|
exa_py-1.7.2/setup.cfg
DELETED
exa_py-1.7.2/setup.py
DELETED
|
@@ -1,30 +0,0 @@
|
|
|
1
|
-
from setuptools import setup, find_packages
|
|
2
|
-
|
|
3
|
-
setup(
|
|
4
|
-
name="exa_py",
|
|
5
|
-
version="1.7.2",
|
|
6
|
-
description="Python SDK for Exa API.",
|
|
7
|
-
long_description_content_type="text/markdown",
|
|
8
|
-
long_description=open("README.md").read(),
|
|
9
|
-
author="Exa",
|
|
10
|
-
author_email="hello@exa.ai",
|
|
11
|
-
package_data={"exa_py": ["py.typed"]},
|
|
12
|
-
url="https://github.com/exa-labs/exa-py",
|
|
13
|
-
packages=find_packages(),
|
|
14
|
-
install_requires=[
|
|
15
|
-
"requests",
|
|
16
|
-
"typing-extensions",
|
|
17
|
-
"openai>=1.10.0"
|
|
18
|
-
],
|
|
19
|
-
classifiers=[
|
|
20
|
-
"Development Status :: 5 - Production/Stable",
|
|
21
|
-
"Intended Audience :: Developers",
|
|
22
|
-
"License :: OSI Approved :: MIT License",
|
|
23
|
-
"Typing :: Typed",
|
|
24
|
-
"Programming Language :: Python :: 3.8",
|
|
25
|
-
"Programming Language :: Python :: 3.9",
|
|
26
|
-
"Programming Language :: Python :: 3.10",
|
|
27
|
-
"Programming Language :: Python :: 3.11",
|
|
28
|
-
"Programming Language :: Python :: 3.12",
|
|
29
|
-
],
|
|
30
|
-
)
|
|
File without changes
|
|
File without changes
|
|
File without changes
|