exa-py 1.9.0__py3-none-any.whl → 1.10.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of exa-py might be problematic. Click here for more details.
- exa_py/__init__.py +1 -0
- exa_py/api.py +410 -7
- exa_py/websets/__init__.py +5 -0
- exa_py/websets/_generator/pydantic/BaseModel.jinja2 +42 -0
- exa_py/websets/client.py +126 -0
- exa_py/websets/core/__init__.py +9 -0
- exa_py/websets/core/base.py +41 -0
- exa_py/websets/enrichments/__init__.py +3 -0
- exa_py/websets/enrichments/client.py +65 -0
- exa_py/websets/items/__init__.py +3 -0
- exa_py/websets/items/client.py +78 -0
- exa_py/websets/searches/__init__.py +3 -0
- exa_py/websets/searches/client.py +52 -0
- exa_py/websets/types.py +1054 -0
- exa_py/websets/webhooks/__init__.py +3 -0
- exa_py/websets/webhooks/client.py +80 -0
- {exa_py-1.9.0.dist-info → exa_py-1.10.0.dist-info}/METADATA +10 -5
- exa_py-1.10.0.dist-info/RECORD +21 -0
- {exa_py-1.9.0.dist-info → exa_py-1.10.0.dist-info}/WHEEL +1 -1
- exa_py-1.9.0.dist-info/RECORD +0 -7
exa_py/__init__.py
CHANGED
exa_py/api.py
CHANGED
|
@@ -22,6 +22,7 @@ from typing import (
|
|
|
22
22
|
overload,
|
|
23
23
|
)
|
|
24
24
|
|
|
25
|
+
import httpx
|
|
25
26
|
import requests
|
|
26
27
|
from openai import OpenAI
|
|
27
28
|
from openai.types.chat.chat_completion_message_param import ChatCompletionMessageParam
|
|
@@ -34,6 +35,7 @@ from exa_py.utils import (
|
|
|
34
35
|
format_exa_result,
|
|
35
36
|
maybe_get_query,
|
|
36
37
|
)
|
|
38
|
+
from .websets import WebsetsClient
|
|
37
39
|
|
|
38
40
|
is_beta = os.getenv("IS_BETA") == "True"
|
|
39
41
|
|
|
@@ -729,6 +731,56 @@ class StreamAnswerResponse:
|
|
|
729
731
|
self._raw_response.close()
|
|
730
732
|
|
|
731
733
|
|
|
734
|
+
class AsyncStreamAnswerResponse:
|
|
735
|
+
"""A class representing a streaming answer response."""
|
|
736
|
+
|
|
737
|
+
def __init__(self, raw_response: httpx.Response):
|
|
738
|
+
self._raw_response = raw_response
|
|
739
|
+
self._ensure_ok_status()
|
|
740
|
+
|
|
741
|
+
def _ensure_ok_status(self):
|
|
742
|
+
if self._raw_response.status_code != 200:
|
|
743
|
+
raise ValueError(
|
|
744
|
+
f"Request failed with status code {self._raw_response.status_code}: {self._raw_response.text}"
|
|
745
|
+
)
|
|
746
|
+
|
|
747
|
+
def __aiter__(self):
|
|
748
|
+
async def generator():
|
|
749
|
+
async for line in self._raw_response.aiter_lines():
|
|
750
|
+
if not line:
|
|
751
|
+
continue
|
|
752
|
+
decoded_line = line.removeprefix("data: ")
|
|
753
|
+
try:
|
|
754
|
+
chunk = json.loads(decoded_line)
|
|
755
|
+
except json.JSONDecodeError:
|
|
756
|
+
continue
|
|
757
|
+
|
|
758
|
+
content = None
|
|
759
|
+
citations = None
|
|
760
|
+
|
|
761
|
+
if "choices" in chunk and chunk["choices"]:
|
|
762
|
+
if "delta" in chunk["choices"][0]:
|
|
763
|
+
content = chunk["choices"][0]["delta"].get("content")
|
|
764
|
+
|
|
765
|
+
if (
|
|
766
|
+
"citations" in chunk
|
|
767
|
+
and chunk["citations"]
|
|
768
|
+
and chunk["citations"] != "null"
|
|
769
|
+
):
|
|
770
|
+
citations = [
|
|
771
|
+
AnswerResult(**to_snake_case(s)) for s in chunk["citations"]
|
|
772
|
+
]
|
|
773
|
+
|
|
774
|
+
stream_chunk = StreamChunk(content=content, citations=citations)
|
|
775
|
+
if stream_chunk.has_data():
|
|
776
|
+
yield stream_chunk
|
|
777
|
+
return generator()
|
|
778
|
+
|
|
779
|
+
def close(self) -> None:
|
|
780
|
+
"""Close the underlying raw response to release the network socket."""
|
|
781
|
+
self._raw_response.close()
|
|
782
|
+
|
|
783
|
+
|
|
732
784
|
T = TypeVar("T")
|
|
733
785
|
|
|
734
786
|
|
|
@@ -788,7 +840,7 @@ class Exa:
|
|
|
788
840
|
self,
|
|
789
841
|
api_key: Optional[str],
|
|
790
842
|
base_url: str = "https://api.exa.ai",
|
|
791
|
-
user_agent: str = "exa-py 1.
|
|
843
|
+
user_agent: str = "exa-py 1.10.0",
|
|
792
844
|
):
|
|
793
845
|
"""Initialize the Exa client with the provided API key and optional base URL and user agent.
|
|
794
846
|
|
|
@@ -806,13 +858,16 @@ class Exa:
|
|
|
806
858
|
)
|
|
807
859
|
self.base_url = base_url
|
|
808
860
|
self.headers = {"x-api-key": api_key, "User-Agent": user_agent}
|
|
861
|
+
self.websets = WebsetsClient(self)
|
|
809
862
|
|
|
810
|
-
def request(self, endpoint: str, data):
|
|
811
|
-
"""Send a
|
|
863
|
+
def request(self, endpoint: str, data=None, method="POST", params=None):
|
|
864
|
+
"""Send a request to the Exa API, optionally streaming if data['stream'] is True.
|
|
812
865
|
|
|
813
866
|
Args:
|
|
814
867
|
endpoint (str): The API endpoint (path).
|
|
815
|
-
data (dict): The JSON payload to send.
|
|
868
|
+
data (dict, optional): The JSON payload to send. Defaults to None.
|
|
869
|
+
method (str, optional): The HTTP method to use. Defaults to "POST".
|
|
870
|
+
params (dict, optional): Query parameters to include. Defaults to None.
|
|
816
871
|
|
|
817
872
|
Returns:
|
|
818
873
|
Union[dict, requests.Response]: If streaming, returns the Response object.
|
|
@@ -821,14 +876,32 @@ class Exa:
|
|
|
821
876
|
Raises:
|
|
822
877
|
ValueError: If the request fails (non-200 status code).
|
|
823
878
|
"""
|
|
824
|
-
if data.get("stream"):
|
|
879
|
+
if data and data.get("stream"):
|
|
825
880
|
res = requests.post(
|
|
826
881
|
self.base_url + endpoint, json=data, headers=self.headers, stream=True
|
|
827
882
|
)
|
|
828
883
|
return res
|
|
829
884
|
|
|
830
|
-
|
|
831
|
-
|
|
885
|
+
if method.upper() == "GET":
|
|
886
|
+
res = requests.get(
|
|
887
|
+
self.base_url + endpoint, headers=self.headers, params=params
|
|
888
|
+
)
|
|
889
|
+
elif method.upper() == "POST":
|
|
890
|
+
res = requests.post(
|
|
891
|
+
self.base_url + endpoint, json=data, headers=self.headers
|
|
892
|
+
)
|
|
893
|
+
elif method.upper() == "PATCH":
|
|
894
|
+
res = requests.patch(
|
|
895
|
+
self.base_url + endpoint, json=data, headers=self.headers
|
|
896
|
+
)
|
|
897
|
+
elif method.upper() == "DELETE":
|
|
898
|
+
res = requests.delete(
|
|
899
|
+
self.base_url + endpoint, headers=self.headers
|
|
900
|
+
)
|
|
901
|
+
else:
|
|
902
|
+
raise ValueError(f"Unsupported HTTP method: {method}")
|
|
903
|
+
|
|
904
|
+
if res.status_code >= 400:
|
|
832
905
|
raise ValueError(
|
|
833
906
|
f"Request failed with status code {res.status_code}: {res.text}"
|
|
834
907
|
)
|
|
@@ -1815,3 +1888,333 @@ class Exa:
|
|
|
1815
1888
|
options["stream"] = True
|
|
1816
1889
|
raw_response = self.request("/answer", options)
|
|
1817
1890
|
return StreamAnswerResponse(raw_response)
|
|
1891
|
+
|
|
1892
|
+
class AsyncExa(Exa):
|
|
1893
|
+
def __init__(self, api_key: str, api_base: str = "https://api.exa.ai"):
|
|
1894
|
+
super().__init__(api_key, api_base)
|
|
1895
|
+
self._client = None
|
|
1896
|
+
|
|
1897
|
+
@property
|
|
1898
|
+
def client(self) -> httpx.AsyncClient:
|
|
1899
|
+
# this may only be a
|
|
1900
|
+
if self._client is None:
|
|
1901
|
+
self._client = httpx.AsyncClient(
|
|
1902
|
+
base_url=self.base_url,
|
|
1903
|
+
headers=self.headers,
|
|
1904
|
+
timeout=60
|
|
1905
|
+
)
|
|
1906
|
+
return self._client
|
|
1907
|
+
|
|
1908
|
+
async def async_request(self, endpoint: str, data):
|
|
1909
|
+
"""Send a POST request to the Exa API, optionally streaming if data['stream'] is True.
|
|
1910
|
+
|
|
1911
|
+
Args:
|
|
1912
|
+
endpoint (str): The API endpoint (path).
|
|
1913
|
+
data (dict): The JSON payload to send.
|
|
1914
|
+
|
|
1915
|
+
Returns:
|
|
1916
|
+
Union[dict, requests.Response]: If streaming, returns the Response object.
|
|
1917
|
+
Otherwise, returns the JSON-decoded response as a dict.
|
|
1918
|
+
|
|
1919
|
+
Raises:
|
|
1920
|
+
ValueError: If the request fails (non-200 status code).
|
|
1921
|
+
"""
|
|
1922
|
+
if data.get("stream"):
|
|
1923
|
+
request = httpx.Request(
|
|
1924
|
+
'POST',
|
|
1925
|
+
self.base_url + endpoint,
|
|
1926
|
+
json=data,
|
|
1927
|
+
headers=self.headers
|
|
1928
|
+
)
|
|
1929
|
+
res = await self.client.send(request, stream=True)
|
|
1930
|
+
return res
|
|
1931
|
+
|
|
1932
|
+
res = await self.client.post(self.base_url + endpoint, json=data, headers=self.headers)
|
|
1933
|
+
if res.status_code != 200:
|
|
1934
|
+
raise ValueError(
|
|
1935
|
+
f"Request failed with status code {res.status_code}: {res.text}"
|
|
1936
|
+
)
|
|
1937
|
+
return res.json()
|
|
1938
|
+
|
|
1939
|
+
async def search(
|
|
1940
|
+
self,
|
|
1941
|
+
query: str,
|
|
1942
|
+
*,
|
|
1943
|
+
num_results: Optional[int] = None,
|
|
1944
|
+
include_domains: Optional[List[str]] = None,
|
|
1945
|
+
exclude_domains: Optional[List[str]] = None,
|
|
1946
|
+
start_crawl_date: Optional[str] = None,
|
|
1947
|
+
end_crawl_date: Optional[str] = None,
|
|
1948
|
+
start_published_date: Optional[str] = None,
|
|
1949
|
+
end_published_date: Optional[str] = None,
|
|
1950
|
+
include_text: Optional[List[str]] = None,
|
|
1951
|
+
exclude_text: Optional[List[str]] = None,
|
|
1952
|
+
use_autoprompt: Optional[bool] = None,
|
|
1953
|
+
type: Optional[str] = None,
|
|
1954
|
+
category: Optional[str] = None,
|
|
1955
|
+
flags: Optional[List[str]] = None,
|
|
1956
|
+
moderation: Optional[bool] = None,
|
|
1957
|
+
) -> SearchResponse[_Result]:
|
|
1958
|
+
"""Perform a search with a prompt-engineered query to retrieve relevant results.
|
|
1959
|
+
|
|
1960
|
+
Args:
|
|
1961
|
+
query (str): The query string.
|
|
1962
|
+
num_results (int, optional): Number of search results to return (default 10).
|
|
1963
|
+
include_domains (List[str], optional): Domains to include in the search.
|
|
1964
|
+
exclude_domains (List[str], optional): Domains to exclude from the search.
|
|
1965
|
+
start_crawl_date (str, optional): Only links crawled after this date.
|
|
1966
|
+
end_crawl_date (str, optional): Only links crawled before this date.
|
|
1967
|
+
start_published_date (str, optional): Only links published after this date.
|
|
1968
|
+
end_published_date (str, optional): Only links published before this date.
|
|
1969
|
+
include_text (List[str], optional): Strings that must appear in the page text.
|
|
1970
|
+
exclude_text (List[str], optional): Strings that must not appear in the page text.
|
|
1971
|
+
use_autoprompt (bool, optional): Convert query to Exa (default False).
|
|
1972
|
+
type (str, optional): 'keyword' or 'neural' (default 'neural').
|
|
1973
|
+
category (str, optional): e.g. 'company'
|
|
1974
|
+
flags (List[str], optional): Experimental flags for Exa usage.
|
|
1975
|
+
moderation (bool, optional): If True, the search results will be moderated for safety.
|
|
1976
|
+
|
|
1977
|
+
Returns:
|
|
1978
|
+
SearchResponse: The response containing search results, etc.
|
|
1979
|
+
"""
|
|
1980
|
+
options = {k: v for k, v in locals().items() if k != "self" and v is not None}
|
|
1981
|
+
validate_search_options(options, SEARCH_OPTIONS_TYPES)
|
|
1982
|
+
options = to_camel_case(options)
|
|
1983
|
+
data = await self.async_request("/search", options)
|
|
1984
|
+
cost_dollars = parse_cost_dollars(data.get("costDollars"))
|
|
1985
|
+
return SearchResponse(
|
|
1986
|
+
[Result(**to_snake_case(result)) for result in data["results"]],
|
|
1987
|
+
data["autopromptString"] if "autopromptString" in data else None,
|
|
1988
|
+
data["resolvedSearchType"] if "resolvedSearchType" in data else None,
|
|
1989
|
+
data["autoDate"] if "autoDate" in data else None,
|
|
1990
|
+
cost_dollars=cost_dollars,
|
|
1991
|
+
)
|
|
1992
|
+
|
|
1993
|
+
async def search_and_contents(self, query: str, **kwargs):
|
|
1994
|
+
options = {k: v for k, v in {"query": query, **kwargs}.items() if v is not None}
|
|
1995
|
+
# If user didn't ask for any particular content, default to text
|
|
1996
|
+
if (
|
|
1997
|
+
"text" not in options
|
|
1998
|
+
and "highlights" not in options
|
|
1999
|
+
and "summary" not in options
|
|
2000
|
+
and "extras" not in options
|
|
2001
|
+
):
|
|
2002
|
+
options["text"] = True
|
|
2003
|
+
|
|
2004
|
+
validate_search_options(
|
|
2005
|
+
options,
|
|
2006
|
+
{
|
|
2007
|
+
**SEARCH_OPTIONS_TYPES,
|
|
2008
|
+
**CONTENTS_OPTIONS_TYPES,
|
|
2009
|
+
**CONTENTS_ENDPOINT_OPTIONS_TYPES,
|
|
2010
|
+
},
|
|
2011
|
+
)
|
|
2012
|
+
|
|
2013
|
+
# Nest the appropriate fields under "contents"
|
|
2014
|
+
options = nest_fields(
|
|
2015
|
+
options,
|
|
2016
|
+
[
|
|
2017
|
+
"text",
|
|
2018
|
+
"highlights",
|
|
2019
|
+
"summary",
|
|
2020
|
+
"subpages",
|
|
2021
|
+
"subpage_target",
|
|
2022
|
+
"livecrawl",
|
|
2023
|
+
"livecrawl_timeout",
|
|
2024
|
+
"extras",
|
|
2025
|
+
],
|
|
2026
|
+
"contents",
|
|
2027
|
+
)
|
|
2028
|
+
options = to_camel_case(options)
|
|
2029
|
+
data = await self.async_request("/search", options)
|
|
2030
|
+
cost_dollars = parse_cost_dollars(data.get("costDollars"))
|
|
2031
|
+
return SearchResponse(
|
|
2032
|
+
[Result(**to_snake_case(result)) for result in data["results"]],
|
|
2033
|
+
data["autopromptString"] if "autopromptString" in data else None,
|
|
2034
|
+
data["resolvedSearchType"] if "resolvedSearchType" in data else None,
|
|
2035
|
+
data["autoDate"] if "autoDate" in data else None,
|
|
2036
|
+
cost_dollars=cost_dollars,
|
|
2037
|
+
)
|
|
2038
|
+
|
|
2039
|
+
async def get_contents(self, urls: Union[str, List[str], List[_Result]], **kwargs):
|
|
2040
|
+
options = {
|
|
2041
|
+
k: v
|
|
2042
|
+
for k, v in {"urls": urls, **kwargs}.items()
|
|
2043
|
+
if k != "self" and v is not None
|
|
2044
|
+
}
|
|
2045
|
+
if (
|
|
2046
|
+
"text" not in options
|
|
2047
|
+
and "highlights" not in options
|
|
2048
|
+
and "summary" not in options
|
|
2049
|
+
and "extras" not in options
|
|
2050
|
+
):
|
|
2051
|
+
options["text"] = True
|
|
2052
|
+
|
|
2053
|
+
validate_search_options(
|
|
2054
|
+
options,
|
|
2055
|
+
{**CONTENTS_OPTIONS_TYPES, **CONTENTS_ENDPOINT_OPTIONS_TYPES},
|
|
2056
|
+
)
|
|
2057
|
+
options = to_camel_case(options)
|
|
2058
|
+
data = await self.async_request("/contents", options)
|
|
2059
|
+
cost_dollars = parse_cost_dollars(data.get("costDollars"))
|
|
2060
|
+
return SearchResponse(
|
|
2061
|
+
[Result(**to_snake_case(result)) for result in data["results"]],
|
|
2062
|
+
data.get("autopromptString"),
|
|
2063
|
+
data.get("resolvedSearchType"),
|
|
2064
|
+
data.get("autoDate"),
|
|
2065
|
+
cost_dollars=cost_dollars,
|
|
2066
|
+
)
|
|
2067
|
+
|
|
2068
|
+
async def find_similar(
|
|
2069
|
+
self,
|
|
2070
|
+
url: str,
|
|
2071
|
+
*,
|
|
2072
|
+
num_results: Optional[int] = None,
|
|
2073
|
+
include_domains: Optional[List[str]] = None,
|
|
2074
|
+
exclude_domains: Optional[List[str]] = None,
|
|
2075
|
+
start_crawl_date: Optional[str] = None,
|
|
2076
|
+
end_crawl_date: Optional[str] = None,
|
|
2077
|
+
start_published_date: Optional[str] = None,
|
|
2078
|
+
end_published_date: Optional[str] = None,
|
|
2079
|
+
include_text: Optional[List[str]] = None,
|
|
2080
|
+
exclude_text: Optional[List[str]] = None,
|
|
2081
|
+
exclude_source_domain: Optional[bool] = None,
|
|
2082
|
+
category: Optional[str] = None,
|
|
2083
|
+
flags: Optional[List[str]] = None,
|
|
2084
|
+
) -> SearchResponse[_Result]:
|
|
2085
|
+
"""Finds similar pages to a given URL, potentially with domain filters and date filters.
|
|
2086
|
+
|
|
2087
|
+
Args:
|
|
2088
|
+
url (str): The URL to find similar pages for.
|
|
2089
|
+
num_results (int, optional): Number of results to return. Default is None (server default).
|
|
2090
|
+
include_domains (List[str], optional): Domains to include in the search.
|
|
2091
|
+
exclude_domains (List[str], optional): Domains to exclude from the search.
|
|
2092
|
+
start_crawl_date (str, optional): Only links crawled after this date.
|
|
2093
|
+
end_crawl_date (str, optional): Only links crawled before this date.
|
|
2094
|
+
start_published_date (str, optional): Only links published after this date.
|
|
2095
|
+
end_published_date (str, optional): Only links published before this date.
|
|
2096
|
+
include_text (List[str], optional): Strings that must appear in the page text.
|
|
2097
|
+
exclude_text (List[str], optional): Strings that must not appear in the page text.
|
|
2098
|
+
exclude_source_domain (bool, optional): Whether to exclude the source domain.
|
|
2099
|
+
category (str, optional): A data category to focus on.
|
|
2100
|
+
flags (List[str], optional): Experimental flags.
|
|
2101
|
+
|
|
2102
|
+
Returns:
|
|
2103
|
+
SearchResponse[_Result]
|
|
2104
|
+
"""
|
|
2105
|
+
options = {k: v for k, v in locals().items() if k != "self" and v is not None}
|
|
2106
|
+
validate_search_options(options, FIND_SIMILAR_OPTIONS_TYPES)
|
|
2107
|
+
options = to_camel_case(options)
|
|
2108
|
+
data = await self.async_request("/findSimilar", options)
|
|
2109
|
+
cost_dollars = parse_cost_dollars(data.get("costDollars"))
|
|
2110
|
+
return SearchResponse(
|
|
2111
|
+
[Result(**to_snake_case(result)) for result in data["results"]],
|
|
2112
|
+
data.get("autopromptString"),
|
|
2113
|
+
data.get("resolvedSearchType"),
|
|
2114
|
+
data.get("autoDate"),
|
|
2115
|
+
cost_dollars=cost_dollars,
|
|
2116
|
+
)
|
|
2117
|
+
|
|
2118
|
+
async def find_similar_and_contents(self, url: str, **kwargs):
|
|
2119
|
+
options = {k: v for k, v in {"url": url, **kwargs}.items() if v is not None}
|
|
2120
|
+
# Default to text if none specified
|
|
2121
|
+
if (
|
|
2122
|
+
"text" not in options
|
|
2123
|
+
and "highlights" not in options
|
|
2124
|
+
and "summary" not in options
|
|
2125
|
+
):
|
|
2126
|
+
options["text"] = True
|
|
2127
|
+
|
|
2128
|
+
validate_search_options(
|
|
2129
|
+
options,
|
|
2130
|
+
{
|
|
2131
|
+
**FIND_SIMILAR_OPTIONS_TYPES,
|
|
2132
|
+
**CONTENTS_OPTIONS_TYPES,
|
|
2133
|
+
**CONTENTS_ENDPOINT_OPTIONS_TYPES,
|
|
2134
|
+
},
|
|
2135
|
+
)
|
|
2136
|
+
# We nest the content fields
|
|
2137
|
+
options = nest_fields(
|
|
2138
|
+
options,
|
|
2139
|
+
[
|
|
2140
|
+
"text",
|
|
2141
|
+
"highlights",
|
|
2142
|
+
"summary",
|
|
2143
|
+
"subpages",
|
|
2144
|
+
"subpage_target",
|
|
2145
|
+
"livecrawl",
|
|
2146
|
+
"livecrawl_timeout",
|
|
2147
|
+
"extras",
|
|
2148
|
+
],
|
|
2149
|
+
"contents",
|
|
2150
|
+
)
|
|
2151
|
+
options = to_camel_case(options)
|
|
2152
|
+
data = await self.async_request("/findSimilar", options)
|
|
2153
|
+
cost_dollars = parse_cost_dollars(data.get("costDollars"))
|
|
2154
|
+
return SearchResponse(
|
|
2155
|
+
[Result(**to_snake_case(result)) for result in data["results"]],
|
|
2156
|
+
data.get("autopromptString"),
|
|
2157
|
+
data.get("resolvedSearchType"),
|
|
2158
|
+
data.get("autoDate"),
|
|
2159
|
+
cost_dollars=cost_dollars,
|
|
2160
|
+
)
|
|
2161
|
+
|
|
2162
|
+
async def answer(
|
|
2163
|
+
self,
|
|
2164
|
+
query: str,
|
|
2165
|
+
*,
|
|
2166
|
+
stream: Optional[bool] = False,
|
|
2167
|
+
text: Optional[bool] = False,
|
|
2168
|
+
model: Optional[Literal["exa", "exa-pro"]] = None,
|
|
2169
|
+
) -> Union[AnswerResponse, StreamAnswerResponse]:
|
|
2170
|
+
"""Generate an answer to a query using Exa's search and LLM capabilities.
|
|
2171
|
+
|
|
2172
|
+
Args:
|
|
2173
|
+
query (str): The query to answer.
|
|
2174
|
+
text (bool, optional): Whether to include full text in the results. Defaults to False.
|
|
2175
|
+
model (str, optional): The model to use for answering. Either "exa" or "exa-pro". Defaults to None.
|
|
2176
|
+
|
|
2177
|
+
Returns:
|
|
2178
|
+
AnswerResponse: An object containing the answer and citations.
|
|
2179
|
+
|
|
2180
|
+
Raises:
|
|
2181
|
+
ValueError: If stream=True is provided. Use stream_answer() instead for streaming responses.
|
|
2182
|
+
"""
|
|
2183
|
+
if stream:
|
|
2184
|
+
raise ValueError(
|
|
2185
|
+
"stream=True is not supported in `answer()`. "
|
|
2186
|
+
"Please use `stream_answer(...)` for streaming."
|
|
2187
|
+
)
|
|
2188
|
+
|
|
2189
|
+
options = {k: v for k, v in locals().items() if k != "self" and v is not None}
|
|
2190
|
+
options = to_camel_case(options)
|
|
2191
|
+
response = await self.async_request("/answer", options)
|
|
2192
|
+
|
|
2193
|
+
return AnswerResponse(
|
|
2194
|
+
response["answer"],
|
|
2195
|
+
[AnswerResult(**to_snake_case(result)) for result in response["citations"]],
|
|
2196
|
+
)
|
|
2197
|
+
|
|
2198
|
+
async def stream_answer(
|
|
2199
|
+
self,
|
|
2200
|
+
query: str,
|
|
2201
|
+
*,
|
|
2202
|
+
text: bool = False,
|
|
2203
|
+
model: Optional[Literal["exa", "exa-pro"]] = None,
|
|
2204
|
+
) -> AsyncStreamAnswerResponse:
|
|
2205
|
+
"""Generate a streaming answer response.
|
|
2206
|
+
|
|
2207
|
+
Args:
|
|
2208
|
+
query (str): The query to answer.
|
|
2209
|
+
text (bool): Whether to include full text in the results. Defaults to False.
|
|
2210
|
+
model (str, optional): The model to use for answering. Either "exa" or "exa-pro". Defaults to None.
|
|
2211
|
+
|
|
2212
|
+
Returns:
|
|
2213
|
+
AsyncStreamAnswerResponse: An object that can be iterated over to retrieve (partial text, partial citations).
|
|
2214
|
+
Each iteration yields a tuple of (Optional[str], Optional[List[AnswerResult]]).
|
|
2215
|
+
"""
|
|
2216
|
+
options = {k: v for k, v in locals().items() if k != "self" and v is not None}
|
|
2217
|
+
options = to_camel_case(options)
|
|
2218
|
+
options["stream"] = True
|
|
2219
|
+
raw_response = await self.async_request("/answer", options)
|
|
2220
|
+
return AsyncStreamAnswerResponse(raw_response)
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
{% for decorator in decorators -%}
|
|
2
|
+
{{ decorator }}
|
|
3
|
+
{% endfor -%}
|
|
4
|
+
class {{ class_name }}({{ base_class }}):{% if comment is defined %} # {{ comment }}{% endif %}
|
|
5
|
+
{%- if description %}
|
|
6
|
+
"""
|
|
7
|
+
{{ description | indent(4) }}
|
|
8
|
+
"""
|
|
9
|
+
{%- endif %}
|
|
10
|
+
{%- if not fields and not description %}
|
|
11
|
+
pass
|
|
12
|
+
{%- endif %}
|
|
13
|
+
{%- if config %}
|
|
14
|
+
{%- filter indent(4) %}
|
|
15
|
+
{%- endfilter %}
|
|
16
|
+
{%- endif %}
|
|
17
|
+
{%- for field in fields -%}
|
|
18
|
+
{%- if field.name == "type" and field.field %}
|
|
19
|
+
type: Literal['{{ field.default }}']
|
|
20
|
+
{%- elif field.name == "object" and field.field %}
|
|
21
|
+
object: Literal['{{ field.default }}']
|
|
22
|
+
{%- elif not field.annotated and field.field %}
|
|
23
|
+
{{ field.name }}: {{ field.type_hint }} = {{ field.field }}
|
|
24
|
+
{%- else %}
|
|
25
|
+
{%- if field.annotated %}
|
|
26
|
+
{{ field.name }}: {{ field.annotated }}
|
|
27
|
+
{%- else %}
|
|
28
|
+
{{ field.name }}: {{ field.type_hint }}
|
|
29
|
+
{%- endif %}
|
|
30
|
+
{%- if not (field.required or (field.represented_default == 'None' and field.strip_default_none)) or field.data_type.is_optional
|
|
31
|
+
%} = {{ field.represented_default }}
|
|
32
|
+
{%- endif -%}
|
|
33
|
+
{%- endif %}
|
|
34
|
+
{%- if field.docstring %}
|
|
35
|
+
"""
|
|
36
|
+
{{ field.docstring | indent(4) }}
|
|
37
|
+
"""
|
|
38
|
+
{%- endif %}
|
|
39
|
+
{%- for method in methods -%}
|
|
40
|
+
{{ method }}
|
|
41
|
+
{%- endfor -%}
|
|
42
|
+
{%- endfor -%}
|
exa_py/websets/client.py
ADDED
|
@@ -0,0 +1,126 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import time
|
|
4
|
+
from datetime import datetime
|
|
5
|
+
from typing import List, Optional, Literal
|
|
6
|
+
|
|
7
|
+
from .types import (
|
|
8
|
+
Webset,
|
|
9
|
+
ListWebsetsResponse,
|
|
10
|
+
GetWebsetResponse,
|
|
11
|
+
UpdateWebsetRequest,
|
|
12
|
+
WebsetStatus,
|
|
13
|
+
CreateWebsetParameters,
|
|
14
|
+
)
|
|
15
|
+
from .core.base import WebsetsBaseClient
|
|
16
|
+
from .items import WebsetItemsClient
|
|
17
|
+
from .searches import WebsetSearchesClient
|
|
18
|
+
from .enrichments import WebsetEnrichmentsClient
|
|
19
|
+
from .webhooks import WebsetWebhooksClient
|
|
20
|
+
|
|
21
|
+
class WebsetsClient(WebsetsBaseClient):
|
|
22
|
+
"""Client for managing Websets."""
|
|
23
|
+
|
|
24
|
+
def __init__(self, client):
|
|
25
|
+
super().__init__(client)
|
|
26
|
+
self.items = WebsetItemsClient(client)
|
|
27
|
+
self.searches = WebsetSearchesClient(client)
|
|
28
|
+
self.enrichments = WebsetEnrichmentsClient(client)
|
|
29
|
+
self.webhooks = WebsetWebhooksClient(client)
|
|
30
|
+
|
|
31
|
+
def create(self, params: CreateWebsetParameters) -> Webset:
|
|
32
|
+
"""Create a new Webset.
|
|
33
|
+
|
|
34
|
+
Args:
|
|
35
|
+
params (CreateWebsetRequest): The parameters for creating a webset.
|
|
36
|
+
|
|
37
|
+
Returns:
|
|
38
|
+
Webset: The created webset.
|
|
39
|
+
"""
|
|
40
|
+
response = self.request("/v0/websets", data=params.model_dump(by_alias=True, exclude_none=True))
|
|
41
|
+
return Webset.model_validate(response)
|
|
42
|
+
|
|
43
|
+
def get(self, id: str, *, expand: Optional[List[Literal["items"]]] = None) -> GetWebsetResponse:
|
|
44
|
+
"""Get a Webset by ID.
|
|
45
|
+
|
|
46
|
+
Args:
|
|
47
|
+
id (str): The id or externalId of the Webset.
|
|
48
|
+
expand (List[Literal["items"]], optional): Expand the response with specified resources.
|
|
49
|
+
Allowed values: ["items"]
|
|
50
|
+
|
|
51
|
+
Returns:
|
|
52
|
+
GetWebsetResponse: The retrieved webset.
|
|
53
|
+
"""
|
|
54
|
+
params = {"expand": expand} if expand else {}
|
|
55
|
+
response = self.request(f"/v0/websets/{id}", params=params, method="GET")
|
|
56
|
+
return GetWebsetResponse.model_validate(response)
|
|
57
|
+
|
|
58
|
+
def list(self, *, cursor: Optional[str] = None, limit: Optional[int] = None) -> ListWebsetsResponse:
|
|
59
|
+
"""List all Websets.
|
|
60
|
+
|
|
61
|
+
Args:
|
|
62
|
+
cursor (str, optional): The cursor to paginate through the results.
|
|
63
|
+
limit (int, optional): The number of results to return (max 200).
|
|
64
|
+
|
|
65
|
+
Returns:
|
|
66
|
+
ListWebsetsResponse: List of websets.
|
|
67
|
+
"""
|
|
68
|
+
params = {k: v for k, v in {"cursor": cursor, "limit": limit}.items() if v is not None}
|
|
69
|
+
response = self.request("/v0/websets", params=params, method="GET")
|
|
70
|
+
return ListWebsetsResponse.model_validate(response)
|
|
71
|
+
|
|
72
|
+
def update(self, id: str, params: UpdateWebsetRequest) -> Webset:
|
|
73
|
+
"""Update a Webset.
|
|
74
|
+
|
|
75
|
+
Args:
|
|
76
|
+
id (str): The id or externalId of the Webset.
|
|
77
|
+
params (UpdateWebsetRequest): The parameters for updating a webset.
|
|
78
|
+
|
|
79
|
+
Returns:
|
|
80
|
+
Webset: The updated webset.
|
|
81
|
+
"""
|
|
82
|
+
response = self.request(f"/v0/websets/{id}", data=params.model_dump(by_alias=True, exclude_none=True), method="POST")
|
|
83
|
+
return Webset.model_validate(response)
|
|
84
|
+
|
|
85
|
+
def delete(self, id: str) -> Webset:
|
|
86
|
+
"""Delete a Webset.
|
|
87
|
+
|
|
88
|
+
Args:
|
|
89
|
+
id (str): The id or externalId of the Webset.
|
|
90
|
+
|
|
91
|
+
Returns:
|
|
92
|
+
Webset: The deleted webset.
|
|
93
|
+
"""
|
|
94
|
+
response = self.request(f"/v0/websets/{id}", method="DELETE")
|
|
95
|
+
return Webset.model_validate(response)
|
|
96
|
+
|
|
97
|
+
def cancel(self, id: str) -> Webset:
|
|
98
|
+
"""Cancel a running Webset.
|
|
99
|
+
|
|
100
|
+
Args:
|
|
101
|
+
id (str): The id or externalId of the Webset.
|
|
102
|
+
|
|
103
|
+
Returns:
|
|
104
|
+
Webset: The canceled webset.
|
|
105
|
+
"""
|
|
106
|
+
response = self.request(f"/v0/websets/{id}/cancel", method="POST")
|
|
107
|
+
return Webset.model_validate(response)
|
|
108
|
+
|
|
109
|
+
def wait_until_idle(self, id: str, *, timeout: Optional[int] = None) -> Webset:
|
|
110
|
+
"""Wait until a Webset is idle.
|
|
111
|
+
|
|
112
|
+
Args:
|
|
113
|
+
id (str): The id or externalId of the Webset.
|
|
114
|
+
|
|
115
|
+
Returns:
|
|
116
|
+
Webset: The webset.
|
|
117
|
+
"""
|
|
118
|
+
start_time = time.time()
|
|
119
|
+
while True:
|
|
120
|
+
webset = self.get(id)
|
|
121
|
+
if webset.status == WebsetStatus.idle:
|
|
122
|
+
break
|
|
123
|
+
time.sleep(1)
|
|
124
|
+
if timeout and time.time() - start_time > timeout:
|
|
125
|
+
raise Exception("Webset timed out")
|
|
126
|
+
return webset
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
from ..types import *
|
|
2
|
+
import sys
|
|
3
|
+
|
|
4
|
+
# Get all public names from model module that don't start with underscore
|
|
5
|
+
model_module = sys.modules[__name__]
|
|
6
|
+
__all__ = ['WebsetsBaseClient', 'ExaBaseModel'] + [
|
|
7
|
+
name for name in dir(model_module)
|
|
8
|
+
if not name.startswith('_') and name not in ('WebsetsBaseClient', 'ExaBaseModel')
|
|
9
|
+
]
|