athena-intelligence 0.1.40__py3-none-any.whl → 0.1.41__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
athena/__init__.py CHANGED
@@ -2,16 +2,18 @@
2
2
 
3
3
  from .types import (
4
4
  Dataset,
5
+ Document,
6
+ ExcecuteToolFirstWorkflowOut,
5
7
  FirecrawlScrapeUrlDataReponseDto,
6
8
  FirecrawlScrapeUrlMetadata,
7
9
  GetDatasetsResponse,
8
10
  GetSnippetsResponse,
9
11
  HttpValidationError,
12
+ LangchainDocumentsRequestOut,
10
13
  MessageOut,
11
14
  MessageOutDto,
12
15
  Model,
13
16
  Report,
14
- ScrapeMarkdownResult,
15
17
  Snippet,
16
18
  SqlResults,
17
19
  StatusEnum,
@@ -23,27 +25,29 @@ from .types import (
23
25
  )
24
26
  from .errors import UnprocessableEntityError
25
27
  from . import chain, dataset, message, query, report, search, snippet, tools
26
- from .chain import StructuredToolDataParsingModel
28
+ from .chain import StructuredParseInParsingModel
27
29
  from .environment import AthenaEnvironment
28
30
 
29
31
  __all__ = [
30
32
  "AthenaEnvironment",
31
33
  "Dataset",
34
+ "Document",
35
+ "ExcecuteToolFirstWorkflowOut",
32
36
  "FirecrawlScrapeUrlDataReponseDto",
33
37
  "FirecrawlScrapeUrlMetadata",
34
38
  "GetDatasetsResponse",
35
39
  "GetSnippetsResponse",
36
40
  "HttpValidationError",
41
+ "LangchainDocumentsRequestOut",
37
42
  "MessageOut",
38
43
  "MessageOutDto",
39
44
  "Model",
40
45
  "Report",
41
- "ScrapeMarkdownResult",
42
46
  "Snippet",
43
47
  "SqlResults",
44
48
  "StatusEnum",
49
+ "StructuredParseInParsingModel",
45
50
  "StructuredParseResult",
46
- "StructuredToolDataParsingModel",
47
51
  "Tools",
48
52
  "UnprocessableEntityError",
49
53
  "UrlResult",
athena/chain/__init__.py CHANGED
@@ -1,5 +1,5 @@
1
1
  # This file was auto-generated by Fern from our API Definition.
2
2
 
3
- from .types import StructuredToolDataParsingModel
3
+ from .types import StructuredParseInParsingModel
4
4
 
5
- __all__ = ["StructuredToolDataParsingModel"]
5
+ __all__ = ["StructuredParseInParsingModel"]
athena/chain/client.py CHANGED
@@ -12,7 +12,7 @@ from ..core.request_options import RequestOptions
12
12
  from ..errors.unprocessable_entity_error import UnprocessableEntityError
13
13
  from ..types.http_validation_error import HttpValidationError
14
14
  from ..types.structured_parse_result import StructuredParseResult
15
- from .types.structured_tool_data_parsing_model import StructuredToolDataParsingModel
15
+ from .types.structured_parse_in_parsing_model import StructuredParseInParsingModel
16
16
 
17
17
  try:
18
18
  import pydantic.v1 as pydantic # type: ignore
@@ -32,7 +32,7 @@ class ChainClient:
32
32
  *,
33
33
  text_input: str,
34
34
  custom_type_dict: typing.Dict[str, typing.Any],
35
- parsing_model: typing.Optional[StructuredToolDataParsingModel] = OMIT,
35
+ parsing_model: typing.Optional[StructuredParseInParsingModel] = OMIT,
36
36
  request_options: typing.Optional[RequestOptions] = None,
37
37
  ) -> StructuredParseResult:
38
38
  """
@@ -41,19 +41,19 @@ class ChainClient:
41
41
 
42
42
  - custom_type_dict: typing.Dict[str, typing.Any]. A dictionary of field names and their default values.
43
43
 
44
- - parsing_model: typing.Optional[StructuredToolDataParsingModel]. The model to be used for parsing.
44
+ - parsing_model: typing.Optional[StructuredParseInParsingModel]. The model to be used for parsing.
45
45
 
46
46
  - request_options: typing.Optional[RequestOptions]. Request-specific configuration.
47
47
  ---
48
+ from athena import StructuredParseInParsingModel
48
49
  from athena.client import Athena
49
50
 
50
- client = Athena(
51
- api_key="YOUR_API_KEY",
52
- )
53
- client.chain.structured_parse(
54
- text_input="text_input",
55
- custom_type_dict={},
56
- )
51
+ client = Athena(api_key="YOUR_API_KEY", )
52
+ client.chain.structured_parse(text_input='Athena is an AI-native analytics platform and artificial employee built to accelerate analytics workflows
53
+ by offering enterprise teams co-pilot and auto-pilot modes. Athena learns your workflow as a co-pilot,
54
+ allowing you to hand over controls to her for autonomous execution with confidence."
55
+
56
+ Give me all of the modes Athena provides.', custom_type_dict={"modes": {}}, parsing_model=StructuredParseInParsingModel.GPT_4_TURBO, )
57
57
  """
58
58
  _request: typing.Dict[str, typing.Any] = {"text_input": text_input, "custom_type_dict": custom_type_dict}
59
59
  if parsing_model is not OMIT:
@@ -104,7 +104,7 @@ class AsyncChainClient:
104
104
  *,
105
105
  text_input: str,
106
106
  custom_type_dict: typing.Dict[str, typing.Any],
107
- parsing_model: typing.Optional[StructuredToolDataParsingModel] = OMIT,
107
+ parsing_model: typing.Optional[StructuredParseInParsingModel] = OMIT,
108
108
  request_options: typing.Optional[RequestOptions] = None,
109
109
  ) -> StructuredParseResult:
110
110
  """
@@ -113,19 +113,19 @@ class AsyncChainClient:
113
113
 
114
114
  - custom_type_dict: typing.Dict[str, typing.Any]. A dictionary of field names and their default values.
115
115
 
116
- - parsing_model: typing.Optional[StructuredToolDataParsingModel]. The model to be used for parsing.
116
+ - parsing_model: typing.Optional[StructuredParseInParsingModel]. The model to be used for parsing.
117
117
 
118
118
  - request_options: typing.Optional[RequestOptions]. Request-specific configuration.
119
119
  ---
120
+ from athena import StructuredParseInParsingModel
120
121
  from athena.client import AsyncAthena
121
122
 
122
- client = AsyncAthena(
123
- api_key="YOUR_API_KEY",
124
- )
125
- await client.chain.structured_parse(
126
- text_input="text_input",
127
- custom_type_dict={},
128
- )
123
+ client = AsyncAthena(api_key="YOUR_API_KEY", )
124
+ await client.chain.structured_parse(text_input='Athena is an AI-native analytics platform and artificial employee built to accelerate analytics workflows
125
+ by offering enterprise teams co-pilot and auto-pilot modes. Athena learns your workflow as a co-pilot,
126
+ allowing you to hand over controls to her for autonomous execution with confidence."
127
+
128
+ Give me all of the modes Athena provides.', custom_type_dict={"modes": {}}, parsing_model=StructuredParseInParsingModel.GPT_4_TURBO, )
129
129
  """
130
130
  _request: typing.Dict[str, typing.Any] = {"text_input": text_input, "custom_type_dict": custom_type_dict}
131
131
  if parsing_model is not OMIT:
@@ -1,5 +1,5 @@
1
1
  # This file was auto-generated by Fern from our API Definition.
2
2
 
3
- from .structured_tool_data_parsing_model import StructuredToolDataParsingModel
3
+ from .structured_parse_in_parsing_model import StructuredParseInParsingModel
4
4
 
5
- __all__ = ["StructuredToolDataParsingModel"]
5
+ __all__ = ["StructuredParseInParsingModel"]
@@ -6,7 +6,7 @@ import typing
6
6
  T_Result = typing.TypeVar("T_Result")
7
7
 
8
8
 
9
- class StructuredToolDataParsingModel(str, enum.Enum):
9
+ class StructuredParseInParsingModel(str, enum.Enum):
10
10
  """
11
11
  The model to be used for parsing.
12
12
  """
@@ -33,21 +33,21 @@ class StructuredToolDataParsingModel(str, enum.Enum):
33
33
  claude_3_sonnet_20240229: typing.Callable[[], T_Result],
34
34
  claude_3_haiku_20240307: typing.Callable[[], T_Result],
35
35
  ) -> T_Result:
36
- if self is StructuredToolDataParsingModel.GPT_4_TURBO:
36
+ if self is StructuredParseInParsingModel.GPT_4_TURBO:
37
37
  return gpt_4_turbo()
38
- if self is StructuredToolDataParsingModel.GPT_4_TURBO_PREVIEW:
38
+ if self is StructuredParseInParsingModel.GPT_4_TURBO_PREVIEW:
39
39
  return gpt_4_turbo_preview()
40
- if self is StructuredToolDataParsingModel.GPT_4:
40
+ if self is StructuredParseInParsingModel.GPT_4:
41
41
  return gpt_4()
42
- if self is StructuredToolDataParsingModel.GPT_35_TURBO:
42
+ if self is StructuredParseInParsingModel.GPT_35_TURBO:
43
43
  return gpt_35_turbo()
44
- if self is StructuredToolDataParsingModel.MIXTRAL_SMALL_8_X_7_B_0211:
44
+ if self is StructuredParseInParsingModel.MIXTRAL_SMALL_8_X_7_B_0211:
45
45
  return mixtral_small_8_x_7_b_0211()
46
- if self is StructuredToolDataParsingModel.MISTRAL_LARGE_0224:
46
+ if self is StructuredParseInParsingModel.MISTRAL_LARGE_0224:
47
47
  return mistral_large_0224()
48
- if self is StructuredToolDataParsingModel.CLAUDE_3_OPUS_20240229:
48
+ if self is StructuredParseInParsingModel.CLAUDE_3_OPUS_20240229:
49
49
  return claude_3_opus_20240229()
50
- if self is StructuredToolDataParsingModel.CLAUDE_3_SONNET_20240229:
50
+ if self is StructuredParseInParsingModel.CLAUDE_3_SONNET_20240229:
51
51
  return claude_3_sonnet_20240229()
52
- if self is StructuredToolDataParsingModel.CLAUDE_3_HAIKU_20240307:
52
+ if self is StructuredParseInParsingModel.CLAUDE_3_HAIKU_20240307:
53
53
  return claude_3_haiku_20240307()
@@ -16,7 +16,7 @@ class BaseClientWrapper:
16
16
  headers: typing.Dict[str, str] = {
17
17
  "X-Fern-Language": "Python",
18
18
  "X-Fern-SDK-Name": "athena-intelligence",
19
- "X-Fern-SDK-Version": "0.1.40",
19
+ "X-Fern-SDK-Version": "0.1.41",
20
20
  }
21
21
  headers["X-API-KEY"] = self.api_key
22
22
  return headers
athena/search/client.py CHANGED
@@ -11,7 +11,6 @@ from ..core.remove_none_from_dict import remove_none_from_dict
11
11
  from ..core.request_options import RequestOptions
12
12
  from ..errors.unprocessable_entity_error import UnprocessableEntityError
13
13
  from ..types.http_validation_error import HttpValidationError
14
- from ..types.scrape_markdown_result import ScrapeMarkdownResult
15
14
  from ..types.url_result import UrlResult
16
15
 
17
16
  try:
@@ -60,9 +59,9 @@ class SearchClient:
60
59
  api_key="YOUR_API_KEY",
61
60
  )
62
61
  client.search.get_urls(
63
- query="query",
64
- num_urls=1,
65
- tbs="tbs",
62
+ query="Dogs",
63
+ num_urls=10,
64
+ tbs="qdr:m",
66
65
  )
67
66
  """
68
67
  _request: typing.Dict[str, typing.Any] = {"query": query, "num_urls": num_urls, "tbs": tbs}
@@ -74,7 +73,7 @@ class SearchClient:
74
73
  _request["site"] = site
75
74
  _response = self._client_wrapper.httpx_client.request(
76
75
  "POST",
77
- urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v0/get-urls"),
76
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v0/tools/search/get-urls"),
78
77
  params=jsonable_encoder(
79
78
  request_options.get("additional_query_parameters") if request_options is not None else None
80
79
  ),
@@ -108,60 +107,6 @@ class SearchClient:
108
107
  raise ApiError(status_code=_response.status_code, body=_response.text)
109
108
  raise ApiError(status_code=_response.status_code, body=_response_json)
110
109
 
111
- def get_markdown(
112
- self, *, url: str, request_options: typing.Optional[RequestOptions] = None
113
- ) -> ScrapeMarkdownResult:
114
- """
115
- Parameters:
116
- - url: str.
117
-
118
- - request_options: typing.Optional[RequestOptions]. Request-specific configuration.
119
- ---
120
- from athena.client import Athena
121
-
122
- client = Athena(
123
- api_key="YOUR_API_KEY",
124
- )
125
- client.search.get_markdown(
126
- url="url",
127
- )
128
- """
129
- _response = self._client_wrapper.httpx_client.request(
130
- "POST",
131
- urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v0/get-markdown"),
132
- params=jsonable_encoder(
133
- request_options.get("additional_query_parameters") if request_options is not None else None
134
- ),
135
- json=jsonable_encoder({"url": url})
136
- if request_options is None or request_options.get("additional_body_parameters") is None
137
- else {
138
- **jsonable_encoder({"url": url}),
139
- **(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))),
140
- },
141
- headers=jsonable_encoder(
142
- remove_none_from_dict(
143
- {
144
- **self._client_wrapper.get_headers(),
145
- **(request_options.get("additional_headers", {}) if request_options is not None else {}),
146
- }
147
- )
148
- ),
149
- timeout=request_options.get("timeout_in_seconds")
150
- if request_options is not None and request_options.get("timeout_in_seconds") is not None
151
- else 60,
152
- retries=0,
153
- max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore
154
- )
155
- if 200 <= _response.status_code < 300:
156
- return pydantic.parse_obj_as(ScrapeMarkdownResult, _response.json()) # type: ignore
157
- if _response.status_code == 422:
158
- raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
159
- try:
160
- _response_json = _response.json()
161
- except JSONDecodeError:
162
- raise ApiError(status_code=_response.status_code, body=_response.text)
163
- raise ApiError(status_code=_response.status_code, body=_response_json)
164
-
165
110
 
166
111
  class AsyncSearchClient:
167
112
  def __init__(self, *, client_wrapper: AsyncClientWrapper):
@@ -200,9 +145,9 @@ class AsyncSearchClient:
200
145
  api_key="YOUR_API_KEY",
201
146
  )
202
147
  await client.search.get_urls(
203
- query="query",
204
- num_urls=1,
205
- tbs="tbs",
148
+ query="Dogs",
149
+ num_urls=10,
150
+ tbs="qdr:m",
206
151
  )
207
152
  """
208
153
  _request: typing.Dict[str, typing.Any] = {"query": query, "num_urls": num_urls, "tbs": tbs}
@@ -214,7 +159,7 @@ class AsyncSearchClient:
214
159
  _request["site"] = site
215
160
  _response = await self._client_wrapper.httpx_client.request(
216
161
  "POST",
217
- urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v0/get-urls"),
162
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v0/tools/search/get-urls"),
218
163
  params=jsonable_encoder(
219
164
  request_options.get("additional_query_parameters") if request_options is not None else None
220
165
  ),
@@ -247,57 +192,3 @@ class AsyncSearchClient:
247
192
  except JSONDecodeError:
248
193
  raise ApiError(status_code=_response.status_code, body=_response.text)
249
194
  raise ApiError(status_code=_response.status_code, body=_response_json)
250
-
251
- async def get_markdown(
252
- self, *, url: str, request_options: typing.Optional[RequestOptions] = None
253
- ) -> ScrapeMarkdownResult:
254
- """
255
- Parameters:
256
- - url: str.
257
-
258
- - request_options: typing.Optional[RequestOptions]. Request-specific configuration.
259
- ---
260
- from athena.client import AsyncAthena
261
-
262
- client = AsyncAthena(
263
- api_key="YOUR_API_KEY",
264
- )
265
- await client.search.get_markdown(
266
- url="url",
267
- )
268
- """
269
- _response = await self._client_wrapper.httpx_client.request(
270
- "POST",
271
- urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v0/get-markdown"),
272
- params=jsonable_encoder(
273
- request_options.get("additional_query_parameters") if request_options is not None else None
274
- ),
275
- json=jsonable_encoder({"url": url})
276
- if request_options is None or request_options.get("additional_body_parameters") is None
277
- else {
278
- **jsonable_encoder({"url": url}),
279
- **(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))),
280
- },
281
- headers=jsonable_encoder(
282
- remove_none_from_dict(
283
- {
284
- **self._client_wrapper.get_headers(),
285
- **(request_options.get("additional_headers", {}) if request_options is not None else {}),
286
- }
287
- )
288
- ),
289
- timeout=request_options.get("timeout_in_seconds")
290
- if request_options is not None and request_options.get("timeout_in_seconds") is not None
291
- else 60,
292
- retries=0,
293
- max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore
294
- )
295
- if 200 <= _response.status_code < 300:
296
- return pydantic.parse_obj_as(ScrapeMarkdownResult, _response.json()) # type: ignore
297
- if _response.status_code == 422:
298
- raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
299
- try:
300
- _response_json = _response.json()
301
- except JSONDecodeError:
302
- raise ApiError(status_code=_response.status_code, body=_response.text)
303
- raise ApiError(status_code=_response.status_code, body=_response_json)
athena/tools/client.py CHANGED
@@ -10,8 +10,10 @@ from ..core.jsonable_encoder import jsonable_encoder
10
10
  from ..core.remove_none_from_dict import remove_none_from_dict
11
11
  from ..core.request_options import RequestOptions
12
12
  from ..errors.unprocessable_entity_error import UnprocessableEntityError
13
+ from ..types.excecute_tool_first_workflow_out import ExcecuteToolFirstWorkflowOut
13
14
  from ..types.firecrawl_scrape_url_data_reponse_dto import FirecrawlScrapeUrlDataReponseDto
14
15
  from ..types.http_validation_error import HttpValidationError
16
+ from ..types.langchain_documents_request_out import LangchainDocumentsRequestOut
15
17
 
16
18
  try:
17
19
  import pydantic.v1 as pydantic # type: ignore
@@ -89,6 +91,148 @@ class ToolsClient:
89
91
  raise ApiError(status_code=_response.status_code, body=_response.text)
90
92
  raise ApiError(status_code=_response.status_code, body=_response_json)
91
93
 
94
+ def langchain_documents(
95
+ self,
96
+ *,
97
+ document_id: str,
98
+ pagination_limit: typing.Optional[int] = OMIT,
99
+ pagination_offset: typing.Optional[int] = OMIT,
100
+ request_options: typing.Optional[RequestOptions] = None,
101
+ ) -> LangchainDocumentsRequestOut:
102
+ """
103
+ Parameters:
104
+ - document_id: str.
105
+
106
+ - pagination_limit: typing.Optional[int].
107
+
108
+ - pagination_offset: typing.Optional[int].
109
+
110
+ - request_options: typing.Optional[RequestOptions]. Request-specific configuration.
111
+ ---
112
+ from athena.client import Athena
113
+
114
+ client = Athena(
115
+ api_key="YOUR_API_KEY",
116
+ )
117
+ client.tools.langchain_documents(
118
+ document_id="doc_9249292-d118-42d3-95b4-00eccfe0754f",
119
+ pagination_limit=250,
120
+ pagination_offset=0,
121
+ )
122
+ """
123
+ _request: typing.Dict[str, typing.Any] = {"document_id": document_id}
124
+ if pagination_limit is not OMIT:
125
+ _request["pagination_limit"] = pagination_limit
126
+ if pagination_offset is not OMIT:
127
+ _request["pagination_offset"] = pagination_offset
128
+ _response = self._client_wrapper.httpx_client.request(
129
+ "POST",
130
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v0/tools/file/langchain-documents"),
131
+ params=jsonable_encoder(
132
+ request_options.get("additional_query_parameters") if request_options is not None else None
133
+ ),
134
+ json=jsonable_encoder(_request)
135
+ if request_options is None or request_options.get("additional_body_parameters") is None
136
+ else {
137
+ **jsonable_encoder(_request),
138
+ **(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))),
139
+ },
140
+ headers=jsonable_encoder(
141
+ remove_none_from_dict(
142
+ {
143
+ **self._client_wrapper.get_headers(),
144
+ **(request_options.get("additional_headers", {}) if request_options is not None else {}),
145
+ }
146
+ )
147
+ ),
148
+ timeout=request_options.get("timeout_in_seconds")
149
+ if request_options is not None and request_options.get("timeout_in_seconds") is not None
150
+ else 60,
151
+ retries=0,
152
+ max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore
153
+ )
154
+ if 200 <= _response.status_code < 300:
155
+ return pydantic.parse_obj_as(LangchainDocumentsRequestOut, _response.json()) # type: ignore
156
+ if _response.status_code == 422:
157
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
158
+ try:
159
+ _response_json = _response.json()
160
+ except JSONDecodeError:
161
+ raise ApiError(status_code=_response.status_code, body=_response.text)
162
+ raise ApiError(status_code=_response.status_code, body=_response_json)
163
+
164
+ def tool_first_workflow(
165
+ self,
166
+ *,
167
+ model_name: str,
168
+ tool_name: str,
169
+ content: str,
170
+ tool_kwargs: typing.Optional[typing.Dict[str, typing.Any]] = OMIT,
171
+ request_options: typing.Optional[RequestOptions] = None,
172
+ ) -> ExcecuteToolFirstWorkflowOut:
173
+ """
174
+ Parameters:
175
+ - model_name: str.
176
+
177
+ - tool_name: str.
178
+
179
+ - content: str.
180
+
181
+ - tool_kwargs: typing.Optional[typing.Dict[str, typing.Any]].
182
+
183
+ - request_options: typing.Optional[RequestOptions]. Request-specific configuration.
184
+ ---
185
+ from athena.client import Athena
186
+
187
+ client = Athena(
188
+ api_key="YOUR_API_KEY",
189
+ )
190
+ client.tools.tool_first_workflow(
191
+ model_name="gpt-3.5-turbo",
192
+ tool_name="tavily_search",
193
+ content="summarize the website in one paragraph",
194
+ tool_kwargs={"query": "website: www.athenaintelligence.ai"},
195
+ )
196
+ """
197
+ _request: typing.Dict[str, typing.Any] = {"model_name": model_name, "tool_name": tool_name, "content": content}
198
+ if tool_kwargs is not OMIT:
199
+ _request["tool_kwargs"] = tool_kwargs
200
+ _response = self._client_wrapper.httpx_client.request(
201
+ "POST",
202
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v0/tools/first-agent"),
203
+ params=jsonable_encoder(
204
+ request_options.get("additional_query_parameters") if request_options is not None else None
205
+ ),
206
+ json=jsonable_encoder(_request)
207
+ if request_options is None or request_options.get("additional_body_parameters") is None
208
+ else {
209
+ **jsonable_encoder(_request),
210
+ **(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))),
211
+ },
212
+ headers=jsonable_encoder(
213
+ remove_none_from_dict(
214
+ {
215
+ **self._client_wrapper.get_headers(),
216
+ **(request_options.get("additional_headers", {}) if request_options is not None else {}),
217
+ }
218
+ )
219
+ ),
220
+ timeout=request_options.get("timeout_in_seconds")
221
+ if request_options is not None and request_options.get("timeout_in_seconds") is not None
222
+ else 60,
223
+ retries=0,
224
+ max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore
225
+ )
226
+ if 200 <= _response.status_code < 300:
227
+ return pydantic.parse_obj_as(ExcecuteToolFirstWorkflowOut, _response.json()) # type: ignore
228
+ if _response.status_code == 422:
229
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
230
+ try:
231
+ _response_json = _response.json()
232
+ except JSONDecodeError:
233
+ raise ApiError(status_code=_response.status_code, body=_response.text)
234
+ raise ApiError(status_code=_response.status_code, body=_response_json)
235
+
92
236
 
93
237
  class AsyncToolsClient:
94
238
  def __init__(self, *, client_wrapper: AsyncClientWrapper):
@@ -156,3 +300,145 @@ class AsyncToolsClient:
156
300
  except JSONDecodeError:
157
301
  raise ApiError(status_code=_response.status_code, body=_response.text)
158
302
  raise ApiError(status_code=_response.status_code, body=_response_json)
303
+
304
+ async def langchain_documents(
305
+ self,
306
+ *,
307
+ document_id: str,
308
+ pagination_limit: typing.Optional[int] = OMIT,
309
+ pagination_offset: typing.Optional[int] = OMIT,
310
+ request_options: typing.Optional[RequestOptions] = None,
311
+ ) -> LangchainDocumentsRequestOut:
312
+ """
313
+ Parameters:
314
+ - document_id: str.
315
+
316
+ - pagination_limit: typing.Optional[int].
317
+
318
+ - pagination_offset: typing.Optional[int].
319
+
320
+ - request_options: typing.Optional[RequestOptions]. Request-specific configuration.
321
+ ---
322
+ from athena.client import AsyncAthena
323
+
324
+ client = AsyncAthena(
325
+ api_key="YOUR_API_KEY",
326
+ )
327
+ await client.tools.langchain_documents(
328
+ document_id="doc_9249292-d118-42d3-95b4-00eccfe0754f",
329
+ pagination_limit=250,
330
+ pagination_offset=0,
331
+ )
332
+ """
333
+ _request: typing.Dict[str, typing.Any] = {"document_id": document_id}
334
+ if pagination_limit is not OMIT:
335
+ _request["pagination_limit"] = pagination_limit
336
+ if pagination_offset is not OMIT:
337
+ _request["pagination_offset"] = pagination_offset
338
+ _response = await self._client_wrapper.httpx_client.request(
339
+ "POST",
340
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v0/tools/file/langchain-documents"),
341
+ params=jsonable_encoder(
342
+ request_options.get("additional_query_parameters") if request_options is not None else None
343
+ ),
344
+ json=jsonable_encoder(_request)
345
+ if request_options is None or request_options.get("additional_body_parameters") is None
346
+ else {
347
+ **jsonable_encoder(_request),
348
+ **(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))),
349
+ },
350
+ headers=jsonable_encoder(
351
+ remove_none_from_dict(
352
+ {
353
+ **self._client_wrapper.get_headers(),
354
+ **(request_options.get("additional_headers", {}) if request_options is not None else {}),
355
+ }
356
+ )
357
+ ),
358
+ timeout=request_options.get("timeout_in_seconds")
359
+ if request_options is not None and request_options.get("timeout_in_seconds") is not None
360
+ else 60,
361
+ retries=0,
362
+ max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore
363
+ )
364
+ if 200 <= _response.status_code < 300:
365
+ return pydantic.parse_obj_as(LangchainDocumentsRequestOut, _response.json()) # type: ignore
366
+ if _response.status_code == 422:
367
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
368
+ try:
369
+ _response_json = _response.json()
370
+ except JSONDecodeError:
371
+ raise ApiError(status_code=_response.status_code, body=_response.text)
372
+ raise ApiError(status_code=_response.status_code, body=_response_json)
373
+
374
+ async def tool_first_workflow(
375
+ self,
376
+ *,
377
+ model_name: str,
378
+ tool_name: str,
379
+ content: str,
380
+ tool_kwargs: typing.Optional[typing.Dict[str, typing.Any]] = OMIT,
381
+ request_options: typing.Optional[RequestOptions] = None,
382
+ ) -> ExcecuteToolFirstWorkflowOut:
383
+ """
384
+ Parameters:
385
+ - model_name: str.
386
+
387
+ - tool_name: str.
388
+
389
+ - content: str.
390
+
391
+ - tool_kwargs: typing.Optional[typing.Dict[str, typing.Any]].
392
+
393
+ - request_options: typing.Optional[RequestOptions]. Request-specific configuration.
394
+ ---
395
+ from athena.client import AsyncAthena
396
+
397
+ client = AsyncAthena(
398
+ api_key="YOUR_API_KEY",
399
+ )
400
+ await client.tools.tool_first_workflow(
401
+ model_name="gpt-3.5-turbo",
402
+ tool_name="tavily_search",
403
+ content="summarize the website in one paragraph",
404
+ tool_kwargs={"query": "website: www.athenaintelligence.ai"},
405
+ )
406
+ """
407
+ _request: typing.Dict[str, typing.Any] = {"model_name": model_name, "tool_name": tool_name, "content": content}
408
+ if tool_kwargs is not OMIT:
409
+ _request["tool_kwargs"] = tool_kwargs
410
+ _response = await self._client_wrapper.httpx_client.request(
411
+ "POST",
412
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v0/tools/first-agent"),
413
+ params=jsonable_encoder(
414
+ request_options.get("additional_query_parameters") if request_options is not None else None
415
+ ),
416
+ json=jsonable_encoder(_request)
417
+ if request_options is None or request_options.get("additional_body_parameters") is None
418
+ else {
419
+ **jsonable_encoder(_request),
420
+ **(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))),
421
+ },
422
+ headers=jsonable_encoder(
423
+ remove_none_from_dict(
424
+ {
425
+ **self._client_wrapper.get_headers(),
426
+ **(request_options.get("additional_headers", {}) if request_options is not None else {}),
427
+ }
428
+ )
429
+ ),
430
+ timeout=request_options.get("timeout_in_seconds")
431
+ if request_options is not None and request_options.get("timeout_in_seconds") is not None
432
+ else 60,
433
+ retries=0,
434
+ max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore
435
+ )
436
+ if 200 <= _response.status_code < 300:
437
+ return pydantic.parse_obj_as(ExcecuteToolFirstWorkflowOut, _response.json()) # type: ignore
438
+ if _response.status_code == 422:
439
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
440
+ try:
441
+ _response_json = _response.json()
442
+ except JSONDecodeError:
443
+ raise ApiError(status_code=_response.status_code, body=_response.text)
444
+ raise ApiError(status_code=_response.status_code, body=_response_json)
athena/types/__init__.py CHANGED
@@ -1,16 +1,18 @@
1
1
  # This file was auto-generated by Fern from our API Definition.
2
2
 
3
3
  from .dataset import Dataset
4
+ from .document import Document
5
+ from .excecute_tool_first_workflow_out import ExcecuteToolFirstWorkflowOut
4
6
  from .firecrawl_scrape_url_data_reponse_dto import FirecrawlScrapeUrlDataReponseDto
5
7
  from .firecrawl_scrape_url_metadata import FirecrawlScrapeUrlMetadata
6
8
  from .get_datasets_response import GetDatasetsResponse
7
9
  from .get_snippets_response import GetSnippetsResponse
8
10
  from .http_validation_error import HttpValidationError
11
+ from .langchain_documents_request_out import LangchainDocumentsRequestOut
9
12
  from .message_out import MessageOut
10
13
  from .message_out_dto import MessageOutDto
11
14
  from .model import Model
12
15
  from .report import Report
13
- from .scrape_markdown_result import ScrapeMarkdownResult
14
16
  from .snippet import Snippet
15
17
  from .sql_results import SqlResults
16
18
  from .status_enum import StatusEnum
@@ -22,16 +24,18 @@ from .validation_error_loc_item import ValidationErrorLocItem
22
24
 
23
25
  __all__ = [
24
26
  "Dataset",
27
+ "Document",
28
+ "ExcecuteToolFirstWorkflowOut",
25
29
  "FirecrawlScrapeUrlDataReponseDto",
26
30
  "FirecrawlScrapeUrlMetadata",
27
31
  "GetDatasetsResponse",
28
32
  "GetSnippetsResponse",
29
33
  "HttpValidationError",
34
+ "LangchainDocumentsRequestOut",
30
35
  "MessageOut",
31
36
  "MessageOutDto",
32
37
  "Model",
33
38
  "Report",
34
- "ScrapeMarkdownResult",
35
39
  "Snippet",
36
40
  "SqlResults",
37
41
  "StatusEnum",
@@ -0,0 +1,34 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+
8
+ try:
9
+ import pydantic.v1 as pydantic # type: ignore
10
+ except ImportError:
11
+ import pydantic # type: ignore
12
+
13
+
14
+ class Document(pydantic.BaseModel):
15
+ """
16
+ Class for storing a piece of text and associated metadata.
17
+ """
18
+
19
+ page_content: str
20
+ metadata: typing.Optional[typing.Dict[str, typing.Any]] = None
21
+ type: typing.Optional[typing.Literal["Document"]] = None
22
+
23
+ def json(self, **kwargs: typing.Any) -> str:
24
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
25
+ return super().json(**kwargs_with_defaults)
26
+
27
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
28
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
29
+ return super().dict(**kwargs_with_defaults)
30
+
31
+ class Config:
32
+ frozen = True
33
+ smart_union = True
34
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -11,8 +11,8 @@ except ImportError:
11
11
  import pydantic # type: ignore
12
12
 
13
13
 
14
- class ScrapeMarkdownResult(pydantic.BaseModel):
15
- markdown: str
14
+ class ExcecuteToolFirstWorkflowOut(pydantic.BaseModel):
15
+ output_message: str
16
16
 
17
17
  def json(self, **kwargs: typing.Any) -> str:
18
18
  kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
@@ -0,0 +1,29 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+ from .document import Document
8
+
9
+ try:
10
+ import pydantic.v1 as pydantic # type: ignore
11
+ except ImportError:
12
+ import pydantic # type: ignore
13
+
14
+
15
+ class LangchainDocumentsRequestOut(pydantic.BaseModel):
16
+ documents: typing.List[Document]
17
+
18
+ def json(self, **kwargs: typing.Any) -> str:
19
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
20
+ return super().json(**kwargs_with_defaults)
21
+
22
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
23
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
24
+ return super().dict(**kwargs_with_defaults)
25
+
26
+ class Config:
27
+ frozen = True
28
+ smart_union = True
29
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: athena-intelligence
3
- Version: 0.1.40
3
+ Version: 0.1.41
4
4
  Summary:
5
5
  Requires-Python: >=3.8,<4.0
6
6
  Classifier: Programming Language :: Python :: 3
@@ -1,13 +1,13 @@
1
- athena/__init__.py,sha256=6SG9UQ4iQ8rFALsqP-LPImAjHTKLpmUHeSUapfxvmMo,1341
1
+ athena/__init__.py,sha256=tOoLwN-O8vyWKTVN97cDo4WCjAQrwDKc_EDh-p9fYO0,1455
2
2
  athena/base_client.py,sha256=tvX2FKc9AnNBm0k64njjUCOt7dPxBBn6BpClJpcPqgc,5416
3
- athena/chain/__init__.py,sha256=Kv3-525ENLEbHHcRZLT-ioQrzUNmFH9e5mUkApSyvp8,161
4
- athena/chain/client.py,sha256=mF5A0sXw2GvZexy4bK9HPgDx7DMzWVSR7f0WuGbkAIg,7401
5
- athena/chain/types/__init__.py,sha256=la4c4cmiMbVbZpB_XqpBB2Rzgl2-xkEUVZrXDowr1lc,190
6
- athena/chain/types/structured_tool_data_parsing_model.py,sha256=H2Zhh3Sav6foUCyIQIrgIR9iaMjk9ZQNJfHfZgJa1uQ,2213
3
+ athena/chain/__init__.py,sha256=I1CBCogKCvJBIPdXXFQWdGJyFs6pdp0_dp6rnybd5mI,159
4
+ athena/chain/client.py,sha256=SdlDtNr1QBbDgZGBAFW-GNT0FqPdhvAUcrQaHcGuUds,8333
5
+ athena/chain/types/__init__.py,sha256=s4rY--H5yj6slggsUnRQNrKBHZ3QGE9jQWopTCQOFpg,187
6
+ athena/chain/types/structured_parse_in_parsing_model.py,sha256=tr6DLP2v71IUScCOeLrUjUtXrAr3WlaW0sSD3ns2r-Q,2203
7
7
  athena/client.py,sha256=8QypiDlbZ0C1YsJh6GzhylLVCZXDQc1MCJTURo2_vvI,3576
8
8
  athena/core/__init__.py,sha256=RWfyDqkzWsf8e3VGc3NV60MovfJbg5XWzNFGB2DZ0hA,790
9
9
  athena/core/api_error.py,sha256=RE8LELok2QCjABadECTvtDp7qejA1VmINCh6TbqPwSE,426
10
- athena/core/client_wrapper.py,sha256=MicE6puzRD25jmVhsLDi58I2wkLvm4_csCbMBH9vT38,1198
10
+ athena/core/client_wrapper.py,sha256=GqP993lqfPIGgs1ep0UgLKCBKNFa9R6tBOZ6dRC5GMA,1198
11
11
  athena/core/datetime_utils.py,sha256=nBys2IsYrhPdszxGKCNRPSOCwa-5DWOHG95FB8G9PKo,1047
12
12
  athena/core/file.py,sha256=sy1RUGZ3aJYuw998bZytxxo6QdgKmlnlgBaMvwEKCGg,1480
13
13
  athena/core/http_client.py,sha256=LI0yP3jUyE0Ue7oyBcI9nyo1pljOwh9Y5ycTeIpKwOg,4882
@@ -28,23 +28,25 @@ athena/query/client.py,sha256=UOx-Bq-xFFm-sTMTmJjWGrC6q_7vhVno3nYzmi81xwI,6243
28
28
  athena/report/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
29
29
  athena/report/client.py,sha256=sGJDrgk_E1SPleRYNhvspmsz-G3FQwMW-3alFzZPquE,6528
30
30
  athena/search/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
31
- athena/search/client.py,sha256=DLfHjgflIXAff20yswQK1h4BNxvY4SjZzhsywpQOM-g,12391
31
+ athena/search/client.py,sha256=zhE86fKD61gHQ1w45DYfWVHIZ1APGK6yUrWAHYi0OVM,7515
32
32
  athena/snippet/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
33
33
  athena/snippet/client.py,sha256=D0rSpm6ql9cnUj-mMe3z8OHRgRQQuk3bBW2CZSRnyp4,6087
34
34
  athena/tools/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
35
- athena/tools/client.py,sha256=XCkyxX3rau3Xbppj74rni8bScZrTFlYDkep_c3u4kWU,6657
36
- athena/types/__init__.py,sha256=6TfFMQqEF5dfsRSFsoHdH-qgIm8qPrzEeFPBorEcVq8,1370
35
+ athena/tools/client.py,sha256=b3BvnUTBBmlDAw3RfffWSYvKGVnukDaPCbSmGsSFGC0,19332
36
+ athena/types/__init__.py,sha256=Nbrzcoh-fM9lFISWhmZxAmD1m-8Q2dA4qJCbPhSHOuk,1553
37
37
  athena/types/dataset.py,sha256=70OJPxKBAYu7xthGEgrUolSdyLqiyh6X49INw1oN0sA,1014
38
+ athena/types/document.py,sha256=CAByS_smWjl-edYLCCc4fkdhZYXSVUKQqEamjbhpgU0,1081
39
+ athena/types/excecute_tool_first_workflow_out.py,sha256=tGL6pNN4uhL_knWf-SQc-Z1IPJFNVsAmYtDI-VjXS2s,895
38
40
  athena/types/firecrawl_scrape_url_data_reponse_dto.py,sha256=LbJY-SC_WNQG3nbswG8NTfVl_u_tpV1HO7Y3MWCk5gc,1021
39
41
  athena/types/firecrawl_scrape_url_metadata.py,sha256=sqHTtq5_5vujsMixJBDJULPK7MrvxEpB2wGPwC-bTdM,1128
40
42
  athena/types/get_datasets_response.py,sha256=BCdT8yTLfOsXeyFadlyoas4zzseFWGPAdGpkgkOuaD8,989
41
43
  athena/types/get_snippets_response.py,sha256=Lpn7bHJLpPQozN93unCV-8eByAAfz1MhQWR3G3Z1vl4,989
42
44
  athena/types/http_validation_error.py,sha256=Fcv_CTMMrLvCeTHjF0n5xf5tskMDgt-J6H9gp654eQw,973
45
+ athena/types/langchain_documents_request_out.py,sha256=P5Qq7BkCWILORC3yOkvt8pssdYgd_vnTTANNtrWF-l8,939
43
46
  athena/types/message_out.py,sha256=uvZY_Podv2XccEk8CICug9I_S2hFJTSzCBwcHiauW7A,865
44
47
  athena/types/message_out_dto.py,sha256=qgRibRbDNOWVnVGP7Rribh9WdoCT2CSiPUXeIWECqq4,1051
45
48
  athena/types/model.py,sha256=XbXkKXbmnfZ8bPTAn1xnWGjqKK1SVOLdxf1RGk5ON5k,2545
46
49
  athena/types/report.py,sha256=QVaqVfHMAV3s9_V2CqjIEMcRrbJhD8zmi82vrk2A8x0,946
47
- athena/types/scrape_markdown_result.py,sha256=uRpIxoLV9oyLdbJeehm3zmZk_qXZeYqYYcS2SeQmwbA,881
48
50
  athena/types/snippet.py,sha256=POIVJNV9iQxiVegB_qwQx-PZPPSyoIPhyxTsueNVUGA,1126
49
51
  athena/types/sql_results.py,sha256=pNH32nyf1bzoYJs3FgHctLdLO02oOjyGgLkHACACB6k,900
50
52
  athena/types/status_enum.py,sha256=0UZbhdAx215GHC-U53RS98mYHtn1N3On4VBe4j02Qtc,672
@@ -53,6 +55,6 @@ athena/types/tools.py,sha256=mhRkKAwlsDud-fFOhsx2T3hBD-FAtuCnGHyU9cLPcGU,1422
53
55
  athena/types/url_result.py,sha256=zajsW46qJnD6GPimb5kHkUncjqBfzHUlGOcKuUGMX-E,893
54
56
  athena/types/validation_error.py,sha256=2JhGNJouo8QpfrMBoT_JCwYSn1nFN2Nnq0p9uPLDH-U,992
55
57
  athena/types/validation_error_loc_item.py,sha256=LAtjCHIllWRBFXvAZ5QZpp7CPXjdtN9EB7HrLVo6EP0,128
56
- athena_intelligence-0.1.40.dist-info/METADATA,sha256=hV93D3CuM01fBTO3fShNwAEURnu-rJbcCHdSju2SN_I,4738
57
- athena_intelligence-0.1.40.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
58
- athena_intelligence-0.1.40.dist-info/RECORD,,
58
+ athena_intelligence-0.1.41.dist-info/METADATA,sha256=x6Bd7D06Ob6esJAQDGRuRWFkTtlIuixQTWFFuVc3Xvk,4738
59
+ athena_intelligence-0.1.41.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
60
+ athena_intelligence-0.1.41.dist-info/RECORD,,