athena-intelligence 0.1.40__py3-none-any.whl → 0.1.42__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
athena/__init__.py CHANGED
@@ -2,20 +2,23 @@
2
2
 
3
3
  from .types import (
4
4
  Dataset,
5
+ Document,
6
+ ExcecuteToolFirstWorkflowOut,
5
7
  FirecrawlScrapeUrlDataReponseDto,
6
8
  FirecrawlScrapeUrlMetadata,
7
9
  GetDatasetsResponse,
8
10
  GetSnippetsResponse,
9
11
  HttpValidationError,
12
+ LangchainDocumentsRequestOut,
10
13
  MessageOut,
11
14
  MessageOutDto,
12
15
  Model,
13
16
  Report,
14
- ScrapeMarkdownResult,
15
17
  Snippet,
16
18
  SqlResults,
17
19
  StatusEnum,
18
20
  StructuredParseResult,
21
+ ToolModels,
19
22
  Tools,
20
23
  UrlResult,
21
24
  ValidationError,
@@ -23,27 +26,30 @@ from .types import (
23
26
  )
24
27
  from .errors import UnprocessableEntityError
25
28
  from . import chain, dataset, message, query, report, search, snippet, tools
26
- from .chain import StructuredToolDataParsingModel
29
+ from .chain import StructuredParseInParsingModel
27
30
  from .environment import AthenaEnvironment
28
31
 
29
32
  __all__ = [
30
33
  "AthenaEnvironment",
31
34
  "Dataset",
35
+ "Document",
36
+ "ExcecuteToolFirstWorkflowOut",
32
37
  "FirecrawlScrapeUrlDataReponseDto",
33
38
  "FirecrawlScrapeUrlMetadata",
34
39
  "GetDatasetsResponse",
35
40
  "GetSnippetsResponse",
36
41
  "HttpValidationError",
42
+ "LangchainDocumentsRequestOut",
37
43
  "MessageOut",
38
44
  "MessageOutDto",
39
45
  "Model",
40
46
  "Report",
41
- "ScrapeMarkdownResult",
42
47
  "Snippet",
43
48
  "SqlResults",
44
49
  "StatusEnum",
50
+ "StructuredParseInParsingModel",
45
51
  "StructuredParseResult",
46
- "StructuredToolDataParsingModel",
52
+ "ToolModels",
47
53
  "Tools",
48
54
  "UnprocessableEntityError",
49
55
  "UrlResult",
athena/chain/__init__.py CHANGED
@@ -1,5 +1,5 @@
1
1
  # This file was auto-generated by Fern from our API Definition.
2
2
 
3
- from .types import StructuredToolDataParsingModel
3
+ from .types import StructuredParseInParsingModel
4
4
 
5
- __all__ = ["StructuredToolDataParsingModel"]
5
+ __all__ = ["StructuredParseInParsingModel"]
athena/chain/client.py CHANGED
@@ -12,7 +12,7 @@ from ..core.request_options import RequestOptions
12
12
  from ..errors.unprocessable_entity_error import UnprocessableEntityError
13
13
  from ..types.http_validation_error import HttpValidationError
14
14
  from ..types.structured_parse_result import StructuredParseResult
15
- from .types.structured_tool_data_parsing_model import StructuredToolDataParsingModel
15
+ from .types.structured_parse_in_parsing_model import StructuredParseInParsingModel
16
16
 
17
17
  try:
18
18
  import pydantic.v1 as pydantic # type: ignore
@@ -32,7 +32,7 @@ class ChainClient:
32
32
  *,
33
33
  text_input: str,
34
34
  custom_type_dict: typing.Dict[str, typing.Any],
35
- parsing_model: typing.Optional[StructuredToolDataParsingModel] = OMIT,
35
+ parsing_model: typing.Optional[StructuredParseInParsingModel] = OMIT,
36
36
  request_options: typing.Optional[RequestOptions] = None,
37
37
  ) -> StructuredParseResult:
38
38
  """
@@ -41,19 +41,19 @@ class ChainClient:
41
41
 
42
42
  - custom_type_dict: typing.Dict[str, typing.Any]. A dictionary of field names and their default values.
43
43
 
44
- - parsing_model: typing.Optional[StructuredToolDataParsingModel]. The model to be used for parsing.
44
+ - parsing_model: typing.Optional[StructuredParseInParsingModel]. The model to be used for parsing.
45
45
 
46
46
  - request_options: typing.Optional[RequestOptions]. Request-specific configuration.
47
47
  ---
48
+ from athena import StructuredParseInParsingModel
48
49
  from athena.client import Athena
49
50
 
50
- client = Athena(
51
- api_key="YOUR_API_KEY",
52
- )
53
- client.chain.structured_parse(
54
- text_input="text_input",
55
- custom_type_dict={},
56
- )
51
+ client = Athena(api_key="YOUR_API_KEY", )
52
+ client.chain.structured_parse(text_input='Athena is an AI-native analytics platform and artificial employee built to accelerate analytics workflows
53
+ by offering enterprise teams co-pilot and auto-pilot modes. Athena learns your workflow as a co-pilot,
54
+ allowing you to hand over controls to her for autonomous execution with confidence."
55
+
56
+ Give me all of the modes Athena provides.', custom_type_dict={"modes": {}}, parsing_model=StructuredParseInParsingModel.GPT_4_TURBO, )
57
57
  """
58
58
  _request: typing.Dict[str, typing.Any] = {"text_input": text_input, "custom_type_dict": custom_type_dict}
59
59
  if parsing_model is not OMIT:
@@ -104,7 +104,7 @@ class AsyncChainClient:
104
104
  *,
105
105
  text_input: str,
106
106
  custom_type_dict: typing.Dict[str, typing.Any],
107
- parsing_model: typing.Optional[StructuredToolDataParsingModel] = OMIT,
107
+ parsing_model: typing.Optional[StructuredParseInParsingModel] = OMIT,
108
108
  request_options: typing.Optional[RequestOptions] = None,
109
109
  ) -> StructuredParseResult:
110
110
  """
@@ -113,19 +113,19 @@ class AsyncChainClient:
113
113
 
114
114
  - custom_type_dict: typing.Dict[str, typing.Any]. A dictionary of field names and their default values.
115
115
 
116
- - parsing_model: typing.Optional[StructuredToolDataParsingModel]. The model to be used for parsing.
116
+ - parsing_model: typing.Optional[StructuredParseInParsingModel]. The model to be used for parsing.
117
117
 
118
118
  - request_options: typing.Optional[RequestOptions]. Request-specific configuration.
119
119
  ---
120
+ from athena import StructuredParseInParsingModel
120
121
  from athena.client import AsyncAthena
121
122
 
122
- client = AsyncAthena(
123
- api_key="YOUR_API_KEY",
124
- )
125
- await client.chain.structured_parse(
126
- text_input="text_input",
127
- custom_type_dict={},
128
- )
123
+ client = AsyncAthena(api_key="YOUR_API_KEY", )
124
+ await client.chain.structured_parse(text_input='Athena is an AI-native analytics platform and artificial employee built to accelerate analytics workflows
125
+ by offering enterprise teams co-pilot and auto-pilot modes. Athena learns your workflow as a co-pilot,
126
+ allowing you to hand over controls to her for autonomous execution with confidence."
127
+
128
+ Give me all of the modes Athena provides.', custom_type_dict={"modes": {}}, parsing_model=StructuredParseInParsingModel.GPT_4_TURBO, )
129
129
  """
130
130
  _request: typing.Dict[str, typing.Any] = {"text_input": text_input, "custom_type_dict": custom_type_dict}
131
131
  if parsing_model is not OMIT:
@@ -1,5 +1,5 @@
1
1
  # This file was auto-generated by Fern from our API Definition.
2
2
 
3
- from .structured_tool_data_parsing_model import StructuredToolDataParsingModel
3
+ from .structured_parse_in_parsing_model import StructuredParseInParsingModel
4
4
 
5
- __all__ = ["StructuredToolDataParsingModel"]
5
+ __all__ = ["StructuredParseInParsingModel"]
@@ -6,7 +6,7 @@ import typing
6
6
  T_Result = typing.TypeVar("T_Result")
7
7
 
8
8
 
9
- class StructuredToolDataParsingModel(str, enum.Enum):
9
+ class StructuredParseInParsingModel(str, enum.Enum):
10
10
  """
11
11
  The model to be used for parsing.
12
12
  """
@@ -33,21 +33,21 @@ class StructuredToolDataParsingModel(str, enum.Enum):
33
33
  claude_3_sonnet_20240229: typing.Callable[[], T_Result],
34
34
  claude_3_haiku_20240307: typing.Callable[[], T_Result],
35
35
  ) -> T_Result:
36
- if self is StructuredToolDataParsingModel.GPT_4_TURBO:
36
+ if self is StructuredParseInParsingModel.GPT_4_TURBO:
37
37
  return gpt_4_turbo()
38
- if self is StructuredToolDataParsingModel.GPT_4_TURBO_PREVIEW:
38
+ if self is StructuredParseInParsingModel.GPT_4_TURBO_PREVIEW:
39
39
  return gpt_4_turbo_preview()
40
- if self is StructuredToolDataParsingModel.GPT_4:
40
+ if self is StructuredParseInParsingModel.GPT_4:
41
41
  return gpt_4()
42
- if self is StructuredToolDataParsingModel.GPT_35_TURBO:
42
+ if self is StructuredParseInParsingModel.GPT_35_TURBO:
43
43
  return gpt_35_turbo()
44
- if self is StructuredToolDataParsingModel.MIXTRAL_SMALL_8_X_7_B_0211:
44
+ if self is StructuredParseInParsingModel.MIXTRAL_SMALL_8_X_7_B_0211:
45
45
  return mixtral_small_8_x_7_b_0211()
46
- if self is StructuredToolDataParsingModel.MISTRAL_LARGE_0224:
46
+ if self is StructuredParseInParsingModel.MISTRAL_LARGE_0224:
47
47
  return mistral_large_0224()
48
- if self is StructuredToolDataParsingModel.CLAUDE_3_OPUS_20240229:
48
+ if self is StructuredParseInParsingModel.CLAUDE_3_OPUS_20240229:
49
49
  return claude_3_opus_20240229()
50
- if self is StructuredToolDataParsingModel.CLAUDE_3_SONNET_20240229:
50
+ if self is StructuredParseInParsingModel.CLAUDE_3_SONNET_20240229:
51
51
  return claude_3_sonnet_20240229()
52
- if self is StructuredToolDataParsingModel.CLAUDE_3_HAIKU_20240307:
52
+ if self is StructuredParseInParsingModel.CLAUDE_3_HAIKU_20240307:
53
53
  return claude_3_haiku_20240307()
@@ -16,7 +16,7 @@ class BaseClientWrapper:
16
16
  headers: typing.Dict[str, str] = {
17
17
  "X-Fern-Language": "Python",
18
18
  "X-Fern-SDK-Name": "athena-intelligence",
19
- "X-Fern-SDK-Version": "0.1.40",
19
+ "X-Fern-SDK-Version": "0.1.42",
20
20
  }
21
21
  headers["X-API-KEY"] = self.api_key
22
22
  return headers
athena/search/client.py CHANGED
@@ -11,7 +11,6 @@ from ..core.remove_none_from_dict import remove_none_from_dict
11
11
  from ..core.request_options import RequestOptions
12
12
  from ..errors.unprocessable_entity_error import UnprocessableEntityError
13
13
  from ..types.http_validation_error import HttpValidationError
14
- from ..types.scrape_markdown_result import ScrapeMarkdownResult
15
14
  from ..types.url_result import UrlResult
16
15
 
17
16
  try:
@@ -60,9 +59,9 @@ class SearchClient:
60
59
  api_key="YOUR_API_KEY",
61
60
  )
62
61
  client.search.get_urls(
63
- query="query",
64
- num_urls=1,
65
- tbs="tbs",
62
+ query="Dogs",
63
+ num_urls=10,
64
+ tbs="qdr:m",
66
65
  )
67
66
  """
68
67
  _request: typing.Dict[str, typing.Any] = {"query": query, "num_urls": num_urls, "tbs": tbs}
@@ -74,7 +73,7 @@ class SearchClient:
74
73
  _request["site"] = site
75
74
  _response = self._client_wrapper.httpx_client.request(
76
75
  "POST",
77
- urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v0/get-urls"),
76
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v0/tools/search/get-urls"),
78
77
  params=jsonable_encoder(
79
78
  request_options.get("additional_query_parameters") if request_options is not None else None
80
79
  ),
@@ -108,60 +107,6 @@ class SearchClient:
108
107
  raise ApiError(status_code=_response.status_code, body=_response.text)
109
108
  raise ApiError(status_code=_response.status_code, body=_response_json)
110
109
 
111
- def get_markdown(
112
- self, *, url: str, request_options: typing.Optional[RequestOptions] = None
113
- ) -> ScrapeMarkdownResult:
114
- """
115
- Parameters:
116
- - url: str.
117
-
118
- - request_options: typing.Optional[RequestOptions]. Request-specific configuration.
119
- ---
120
- from athena.client import Athena
121
-
122
- client = Athena(
123
- api_key="YOUR_API_KEY",
124
- )
125
- client.search.get_markdown(
126
- url="url",
127
- )
128
- """
129
- _response = self._client_wrapper.httpx_client.request(
130
- "POST",
131
- urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v0/get-markdown"),
132
- params=jsonable_encoder(
133
- request_options.get("additional_query_parameters") if request_options is not None else None
134
- ),
135
- json=jsonable_encoder({"url": url})
136
- if request_options is None or request_options.get("additional_body_parameters") is None
137
- else {
138
- **jsonable_encoder({"url": url}),
139
- **(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))),
140
- },
141
- headers=jsonable_encoder(
142
- remove_none_from_dict(
143
- {
144
- **self._client_wrapper.get_headers(),
145
- **(request_options.get("additional_headers", {}) if request_options is not None else {}),
146
- }
147
- )
148
- ),
149
- timeout=request_options.get("timeout_in_seconds")
150
- if request_options is not None and request_options.get("timeout_in_seconds") is not None
151
- else 60,
152
- retries=0,
153
- max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore
154
- )
155
- if 200 <= _response.status_code < 300:
156
- return pydantic.parse_obj_as(ScrapeMarkdownResult, _response.json()) # type: ignore
157
- if _response.status_code == 422:
158
- raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
159
- try:
160
- _response_json = _response.json()
161
- except JSONDecodeError:
162
- raise ApiError(status_code=_response.status_code, body=_response.text)
163
- raise ApiError(status_code=_response.status_code, body=_response_json)
164
-
165
110
 
166
111
  class AsyncSearchClient:
167
112
  def __init__(self, *, client_wrapper: AsyncClientWrapper):
@@ -200,9 +145,9 @@ class AsyncSearchClient:
200
145
  api_key="YOUR_API_KEY",
201
146
  )
202
147
  await client.search.get_urls(
203
- query="query",
204
- num_urls=1,
205
- tbs="tbs",
148
+ query="Dogs",
149
+ num_urls=10,
150
+ tbs="qdr:m",
206
151
  )
207
152
  """
208
153
  _request: typing.Dict[str, typing.Any] = {"query": query, "num_urls": num_urls, "tbs": tbs}
@@ -214,7 +159,7 @@ class AsyncSearchClient:
214
159
  _request["site"] = site
215
160
  _response = await self._client_wrapper.httpx_client.request(
216
161
  "POST",
217
- urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v0/get-urls"),
162
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v0/tools/search/get-urls"),
218
163
  params=jsonable_encoder(
219
164
  request_options.get("additional_query_parameters") if request_options is not None else None
220
165
  ),
@@ -247,57 +192,3 @@ class AsyncSearchClient:
247
192
  except JSONDecodeError:
248
193
  raise ApiError(status_code=_response.status_code, body=_response.text)
249
194
  raise ApiError(status_code=_response.status_code, body=_response_json)
250
-
251
- async def get_markdown(
252
- self, *, url: str, request_options: typing.Optional[RequestOptions] = None
253
- ) -> ScrapeMarkdownResult:
254
- """
255
- Parameters:
256
- - url: str.
257
-
258
- - request_options: typing.Optional[RequestOptions]. Request-specific configuration.
259
- ---
260
- from athena.client import AsyncAthena
261
-
262
- client = AsyncAthena(
263
- api_key="YOUR_API_KEY",
264
- )
265
- await client.search.get_markdown(
266
- url="url",
267
- )
268
- """
269
- _response = await self._client_wrapper.httpx_client.request(
270
- "POST",
271
- urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v0/get-markdown"),
272
- params=jsonable_encoder(
273
- request_options.get("additional_query_parameters") if request_options is not None else None
274
- ),
275
- json=jsonable_encoder({"url": url})
276
- if request_options is None or request_options.get("additional_body_parameters") is None
277
- else {
278
- **jsonable_encoder({"url": url}),
279
- **(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))),
280
- },
281
- headers=jsonable_encoder(
282
- remove_none_from_dict(
283
- {
284
- **self._client_wrapper.get_headers(),
285
- **(request_options.get("additional_headers", {}) if request_options is not None else {}),
286
- }
287
- )
288
- ),
289
- timeout=request_options.get("timeout_in_seconds")
290
- if request_options is not None and request_options.get("timeout_in_seconds") is not None
291
- else 60,
292
- retries=0,
293
- max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore
294
- )
295
- if 200 <= _response.status_code < 300:
296
- return pydantic.parse_obj_as(ScrapeMarkdownResult, _response.json()) # type: ignore
297
- if _response.status_code == 422:
298
- raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
299
- try:
300
- _response_json = _response.json()
301
- except JSONDecodeError:
302
- raise ApiError(status_code=_response.status_code, body=_response.text)
303
- raise ApiError(status_code=_response.status_code, body=_response_json)
athena/tools/client.py CHANGED
@@ -10,8 +10,11 @@ from ..core.jsonable_encoder import jsonable_encoder
10
10
  from ..core.remove_none_from_dict import remove_none_from_dict
11
11
  from ..core.request_options import RequestOptions
12
12
  from ..errors.unprocessable_entity_error import UnprocessableEntityError
13
+ from ..types.excecute_tool_first_workflow_out import ExcecuteToolFirstWorkflowOut
13
14
  from ..types.firecrawl_scrape_url_data_reponse_dto import FirecrawlScrapeUrlDataReponseDto
14
15
  from ..types.http_validation_error import HttpValidationError
16
+ from ..types.langchain_documents_request_out import LangchainDocumentsRequestOut
17
+ from ..types.tool_models import ToolModels
15
18
 
16
19
  try:
17
20
  import pydantic.v1 as pydantic # type: ignore
@@ -89,6 +92,151 @@ class ToolsClient:
89
92
  raise ApiError(status_code=_response.status_code, body=_response.text)
90
93
  raise ApiError(status_code=_response.status_code, body=_response_json)
91
94
 
95
+ def langchain_documents(
96
+ self,
97
+ *,
98
+ document_id: str,
99
+ pagination_limit: typing.Optional[int] = OMIT,
100
+ pagination_offset: typing.Optional[int] = OMIT,
101
+ request_options: typing.Optional[RequestOptions] = None,
102
+ ) -> LangchainDocumentsRequestOut:
103
+ """
104
+ Parameters:
105
+ - document_id: str.
106
+
107
+ - pagination_limit: typing.Optional[int].
108
+
109
+ - pagination_offset: typing.Optional[int].
110
+
111
+ - request_options: typing.Optional[RequestOptions]. Request-specific configuration.
112
+ ---
113
+ from athena.client import Athena
114
+
115
+ client = Athena(
116
+ api_key="YOUR_API_KEY",
117
+ )
118
+ client.tools.langchain_documents(
119
+ document_id="doc_9249292-d118-42d3-95b4-00eccfe0754f",
120
+ pagination_limit=250,
121
+ pagination_offset=0,
122
+ )
123
+ """
124
+ _request: typing.Dict[str, typing.Any] = {"document_id": document_id}
125
+ if pagination_limit is not OMIT:
126
+ _request["pagination_limit"] = pagination_limit
127
+ if pagination_offset is not OMIT:
128
+ _request["pagination_offset"] = pagination_offset
129
+ _response = self._client_wrapper.httpx_client.request(
130
+ "POST",
131
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v0/tools/file/langchain-documents"),
132
+ params=jsonable_encoder(
133
+ request_options.get("additional_query_parameters") if request_options is not None else None
134
+ ),
135
+ json=jsonable_encoder(_request)
136
+ if request_options is None or request_options.get("additional_body_parameters") is None
137
+ else {
138
+ **jsonable_encoder(_request),
139
+ **(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))),
140
+ },
141
+ headers=jsonable_encoder(
142
+ remove_none_from_dict(
143
+ {
144
+ **self._client_wrapper.get_headers(),
145
+ **(request_options.get("additional_headers", {}) if request_options is not None else {}),
146
+ }
147
+ )
148
+ ),
149
+ timeout=request_options.get("timeout_in_seconds")
150
+ if request_options is not None and request_options.get("timeout_in_seconds") is not None
151
+ else 60,
152
+ retries=0,
153
+ max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore
154
+ )
155
+ if 200 <= _response.status_code < 300:
156
+ return pydantic.parse_obj_as(LangchainDocumentsRequestOut, _response.json()) # type: ignore
157
+ if _response.status_code == 422:
158
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
159
+ try:
160
+ _response_json = _response.json()
161
+ except JSONDecodeError:
162
+ raise ApiError(status_code=_response.status_code, body=_response.text)
163
+ raise ApiError(status_code=_response.status_code, body=_response_json)
164
+
165
+ def tool_first_workflow(
166
+ self,
167
+ *,
168
+ model: typing.Optional[ToolModels] = OMIT,
169
+ tool_name: str,
170
+ content: str,
171
+ tool_kwargs: typing.Optional[typing.Dict[str, typing.Any]] = OMIT,
172
+ request_options: typing.Optional[RequestOptions] = None,
173
+ ) -> ExcecuteToolFirstWorkflowOut:
174
+ """
175
+ Parameters:
176
+ - model: typing.Optional[ToolModels].
177
+
178
+ - tool_name: str.
179
+
180
+ - content: str.
181
+
182
+ - tool_kwargs: typing.Optional[typing.Dict[str, typing.Any]].
183
+
184
+ - request_options: typing.Optional[RequestOptions]. Request-specific configuration.
185
+ ---
186
+ from athena import ToolModels
187
+ from athena.client import Athena
188
+
189
+ client = Athena(
190
+ api_key="YOUR_API_KEY",
191
+ )
192
+ client.tools.tool_first_workflow(
193
+ model=ToolModels.MISTRAL_LARGE_0224,
194
+ tool_name="tavily_search",
195
+ content="summarize the website in one paragraph",
196
+ tool_kwargs={"query": "website: www.athenaintelligence.ai"},
197
+ )
198
+ """
199
+ _request: typing.Dict[str, typing.Any] = {"tool_name": tool_name, "content": content}
200
+ if model is not OMIT:
201
+ _request["model"] = model
202
+ if tool_kwargs is not OMIT:
203
+ _request["tool_kwargs"] = tool_kwargs
204
+ _response = self._client_wrapper.httpx_client.request(
205
+ "POST",
206
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v0/tools/first-agent"),
207
+ params=jsonable_encoder(
208
+ request_options.get("additional_query_parameters") if request_options is not None else None
209
+ ),
210
+ json=jsonable_encoder(_request)
211
+ if request_options is None or request_options.get("additional_body_parameters") is None
212
+ else {
213
+ **jsonable_encoder(_request),
214
+ **(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))),
215
+ },
216
+ headers=jsonable_encoder(
217
+ remove_none_from_dict(
218
+ {
219
+ **self._client_wrapper.get_headers(),
220
+ **(request_options.get("additional_headers", {}) if request_options is not None else {}),
221
+ }
222
+ )
223
+ ),
224
+ timeout=request_options.get("timeout_in_seconds")
225
+ if request_options is not None and request_options.get("timeout_in_seconds") is not None
226
+ else 60,
227
+ retries=0,
228
+ max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore
229
+ )
230
+ if 200 <= _response.status_code < 300:
231
+ return pydantic.parse_obj_as(ExcecuteToolFirstWorkflowOut, _response.json()) # type: ignore
232
+ if _response.status_code == 422:
233
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
234
+ try:
235
+ _response_json = _response.json()
236
+ except JSONDecodeError:
237
+ raise ApiError(status_code=_response.status_code, body=_response.text)
238
+ raise ApiError(status_code=_response.status_code, body=_response_json)
239
+
92
240
 
93
241
  class AsyncToolsClient:
94
242
  def __init__(self, *, client_wrapper: AsyncClientWrapper):
@@ -156,3 +304,148 @@ class AsyncToolsClient:
156
304
  except JSONDecodeError:
157
305
  raise ApiError(status_code=_response.status_code, body=_response.text)
158
306
  raise ApiError(status_code=_response.status_code, body=_response_json)
307
+
308
+ async def langchain_documents(
309
+ self,
310
+ *,
311
+ document_id: str,
312
+ pagination_limit: typing.Optional[int] = OMIT,
313
+ pagination_offset: typing.Optional[int] = OMIT,
314
+ request_options: typing.Optional[RequestOptions] = None,
315
+ ) -> LangchainDocumentsRequestOut:
316
+ """
317
+ Parameters:
318
+ - document_id: str.
319
+
320
+ - pagination_limit: typing.Optional[int].
321
+
322
+ - pagination_offset: typing.Optional[int].
323
+
324
+ - request_options: typing.Optional[RequestOptions]. Request-specific configuration.
325
+ ---
326
+ from athena.client import AsyncAthena
327
+
328
+ client = AsyncAthena(
329
+ api_key="YOUR_API_KEY",
330
+ )
331
+ await client.tools.langchain_documents(
332
+ document_id="doc_9249292-d118-42d3-95b4-00eccfe0754f",
333
+ pagination_limit=250,
334
+ pagination_offset=0,
335
+ )
336
+ """
337
+ _request: typing.Dict[str, typing.Any] = {"document_id": document_id}
338
+ if pagination_limit is not OMIT:
339
+ _request["pagination_limit"] = pagination_limit
340
+ if pagination_offset is not OMIT:
341
+ _request["pagination_offset"] = pagination_offset
342
+ _response = await self._client_wrapper.httpx_client.request(
343
+ "POST",
344
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v0/tools/file/langchain-documents"),
345
+ params=jsonable_encoder(
346
+ request_options.get("additional_query_parameters") if request_options is not None else None
347
+ ),
348
+ json=jsonable_encoder(_request)
349
+ if request_options is None or request_options.get("additional_body_parameters") is None
350
+ else {
351
+ **jsonable_encoder(_request),
352
+ **(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))),
353
+ },
354
+ headers=jsonable_encoder(
355
+ remove_none_from_dict(
356
+ {
357
+ **self._client_wrapper.get_headers(),
358
+ **(request_options.get("additional_headers", {}) if request_options is not None else {}),
359
+ }
360
+ )
361
+ ),
362
+ timeout=request_options.get("timeout_in_seconds")
363
+ if request_options is not None and request_options.get("timeout_in_seconds") is not None
364
+ else 60,
365
+ retries=0,
366
+ max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore
367
+ )
368
+ if 200 <= _response.status_code < 300:
369
+ return pydantic.parse_obj_as(LangchainDocumentsRequestOut, _response.json()) # type: ignore
370
+ if _response.status_code == 422:
371
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
372
+ try:
373
+ _response_json = _response.json()
374
+ except JSONDecodeError:
375
+ raise ApiError(status_code=_response.status_code, body=_response.text)
376
+ raise ApiError(status_code=_response.status_code, body=_response_json)
377
+
378
+ async def tool_first_workflow(
379
+ self,
380
+ *,
381
+ model: typing.Optional[ToolModels] = OMIT,
382
+ tool_name: str,
383
+ content: str,
384
+ tool_kwargs: typing.Optional[typing.Dict[str, typing.Any]] = OMIT,
385
+ request_options: typing.Optional[RequestOptions] = None,
386
+ ) -> ExcecuteToolFirstWorkflowOut:
387
+ """
388
+ Parameters:
389
+ - model: typing.Optional[ToolModels].
390
+
391
+ - tool_name: str.
392
+
393
+ - content: str.
394
+
395
+ - tool_kwargs: typing.Optional[typing.Dict[str, typing.Any]].
396
+
397
+ - request_options: typing.Optional[RequestOptions]. Request-specific configuration.
398
+ ---
399
+ from athena import ToolModels
400
+ from athena.client import AsyncAthena
401
+
402
+ client = AsyncAthena(
403
+ api_key="YOUR_API_KEY",
404
+ )
405
+ await client.tools.tool_first_workflow(
406
+ model=ToolModels.MISTRAL_LARGE_0224,
407
+ tool_name="tavily_search",
408
+ content="summarize the website in one paragraph",
409
+ tool_kwargs={"query": "website: www.athenaintelligence.ai"},
410
+ )
411
+ """
412
+ _request: typing.Dict[str, typing.Any] = {"tool_name": tool_name, "content": content}
413
+ if model is not OMIT:
414
+ _request["model"] = model
415
+ if tool_kwargs is not OMIT:
416
+ _request["tool_kwargs"] = tool_kwargs
417
+ _response = await self._client_wrapper.httpx_client.request(
418
+ "POST",
419
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v0/tools/first-agent"),
420
+ params=jsonable_encoder(
421
+ request_options.get("additional_query_parameters") if request_options is not None else None
422
+ ),
423
+ json=jsonable_encoder(_request)
424
+ if request_options is None or request_options.get("additional_body_parameters") is None
425
+ else {
426
+ **jsonable_encoder(_request),
427
+ **(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))),
428
+ },
429
+ headers=jsonable_encoder(
430
+ remove_none_from_dict(
431
+ {
432
+ **self._client_wrapper.get_headers(),
433
+ **(request_options.get("additional_headers", {}) if request_options is not None else {}),
434
+ }
435
+ )
436
+ ),
437
+ timeout=request_options.get("timeout_in_seconds")
438
+ if request_options is not None and request_options.get("timeout_in_seconds") is not None
439
+ else 60,
440
+ retries=0,
441
+ max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore
442
+ )
443
+ if 200 <= _response.status_code < 300:
444
+ return pydantic.parse_obj_as(ExcecuteToolFirstWorkflowOut, _response.json()) # type: ignore
445
+ if _response.status_code == 422:
446
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
447
+ try:
448
+ _response_json = _response.json()
449
+ except JSONDecodeError:
450
+ raise ApiError(status_code=_response.status_code, body=_response.text)
451
+ raise ApiError(status_code=_response.status_code, body=_response_json)
athena/types/__init__.py CHANGED
@@ -1,20 +1,23 @@
1
1
  # This file was auto-generated by Fern from our API Definition.
2
2
 
3
3
  from .dataset import Dataset
4
+ from .document import Document
5
+ from .excecute_tool_first_workflow_out import ExcecuteToolFirstWorkflowOut
4
6
  from .firecrawl_scrape_url_data_reponse_dto import FirecrawlScrapeUrlDataReponseDto
5
7
  from .firecrawl_scrape_url_metadata import FirecrawlScrapeUrlMetadata
6
8
  from .get_datasets_response import GetDatasetsResponse
7
9
  from .get_snippets_response import GetSnippetsResponse
8
10
  from .http_validation_error import HttpValidationError
11
+ from .langchain_documents_request_out import LangchainDocumentsRequestOut
9
12
  from .message_out import MessageOut
10
13
  from .message_out_dto import MessageOutDto
11
14
  from .model import Model
12
15
  from .report import Report
13
- from .scrape_markdown_result import ScrapeMarkdownResult
14
16
  from .snippet import Snippet
15
17
  from .sql_results import SqlResults
16
18
  from .status_enum import StatusEnum
17
19
  from .structured_parse_result import StructuredParseResult
20
+ from .tool_models import ToolModels
18
21
  from .tools import Tools
19
22
  from .url_result import UrlResult
20
23
  from .validation_error import ValidationError
@@ -22,20 +25,23 @@ from .validation_error_loc_item import ValidationErrorLocItem
22
25
 
23
26
  __all__ = [
24
27
  "Dataset",
28
+ "Document",
29
+ "ExcecuteToolFirstWorkflowOut",
25
30
  "FirecrawlScrapeUrlDataReponseDto",
26
31
  "FirecrawlScrapeUrlMetadata",
27
32
  "GetDatasetsResponse",
28
33
  "GetSnippetsResponse",
29
34
  "HttpValidationError",
35
+ "LangchainDocumentsRequestOut",
30
36
  "MessageOut",
31
37
  "MessageOutDto",
32
38
  "Model",
33
39
  "Report",
34
- "ScrapeMarkdownResult",
35
40
  "Snippet",
36
41
  "SqlResults",
37
42
  "StatusEnum",
38
43
  "StructuredParseResult",
44
+ "ToolModels",
39
45
  "Tools",
40
46
  "UrlResult",
41
47
  "ValidationError",
@@ -0,0 +1,34 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+
8
+ try:
9
+ import pydantic.v1 as pydantic # type: ignore
10
+ except ImportError:
11
+ import pydantic # type: ignore
12
+
13
+
14
+ class Document(pydantic.BaseModel):
15
+ """
16
+ Class for storing a piece of text and associated metadata.
17
+ """
18
+
19
+ page_content: str
20
+ metadata: typing.Optional[typing.Dict[str, typing.Any]] = None
21
+ type: typing.Optional[typing.Literal["Document"]] = None
22
+
23
+ def json(self, **kwargs: typing.Any) -> str:
24
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
25
+ return super().json(**kwargs_with_defaults)
26
+
27
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
28
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
29
+ return super().dict(**kwargs_with_defaults)
30
+
31
+ class Config:
32
+ frozen = True
33
+ smart_union = True
34
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -11,8 +11,8 @@ except ImportError:
11
11
  import pydantic # type: ignore
12
12
 
13
13
 
14
- class ScrapeMarkdownResult(pydantic.BaseModel):
15
- markdown: str
14
+ class ExcecuteToolFirstWorkflowOut(pydantic.BaseModel):
15
+ output_message: str
16
16
 
17
17
  def json(self, **kwargs: typing.Any) -> str:
18
18
  kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
@@ -0,0 +1,29 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+ from .document import Document
8
+
9
+ try:
10
+ import pydantic.v1 as pydantic # type: ignore
11
+ except ImportError:
12
+ import pydantic # type: ignore
13
+
14
+
15
+ class LangchainDocumentsRequestOut(pydantic.BaseModel):
16
+ documents: typing.List[Document]
17
+
18
+ def json(self, **kwargs: typing.Any) -> str:
19
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
20
+ return super().json(**kwargs_with_defaults)
21
+
22
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
23
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
24
+ return super().dict(**kwargs_with_defaults)
25
+
26
+ class Config:
27
+ frozen = True
28
+ smart_union = True
29
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -0,0 +1,49 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import enum
4
+ import typing
5
+
6
+ T_Result = typing.TypeVar("T_Result")
7
+
8
+
9
+ class ToolModels(str, enum.Enum):
10
+ """
11
+ An enumeration.
12
+ """
13
+
14
+ GPT_4_TURBO = "gpt-4-turbo"
15
+ GPT_4_TURBO_PREVIEW = "gpt-4-turbo-preview"
16
+ GPT_4 = "gpt-4"
17
+ MIXTRAL_SMALL_8_X_7_B_0211 = "mixtral-small-8x7b-0211"
18
+ MISTRAL_LARGE_0224 = "mistral-large-0224"
19
+ CLAUDE_3_OPUS_20240229 = "claude-3-opus-20240229"
20
+ CLAUDE_3_SONNET_20240229 = "claude-3-sonnet-20240229"
21
+ CLAUDE_3_HAIKU_20240307 = "claude-3-haiku-20240307"
22
+
23
+ def visit(
24
+ self,
25
+ gpt_4_turbo: typing.Callable[[], T_Result],
26
+ gpt_4_turbo_preview: typing.Callable[[], T_Result],
27
+ gpt_4: typing.Callable[[], T_Result],
28
+ mixtral_small_8_x_7_b_0211: typing.Callable[[], T_Result],
29
+ mistral_large_0224: typing.Callable[[], T_Result],
30
+ claude_3_opus_20240229: typing.Callable[[], T_Result],
31
+ claude_3_sonnet_20240229: typing.Callable[[], T_Result],
32
+ claude_3_haiku_20240307: typing.Callable[[], T_Result],
33
+ ) -> T_Result:
34
+ if self is ToolModels.GPT_4_TURBO:
35
+ return gpt_4_turbo()
36
+ if self is ToolModels.GPT_4_TURBO_PREVIEW:
37
+ return gpt_4_turbo_preview()
38
+ if self is ToolModels.GPT_4:
39
+ return gpt_4()
40
+ if self is ToolModels.MIXTRAL_SMALL_8_X_7_B_0211:
41
+ return mixtral_small_8_x_7_b_0211()
42
+ if self is ToolModels.MISTRAL_LARGE_0224:
43
+ return mistral_large_0224()
44
+ if self is ToolModels.CLAUDE_3_OPUS_20240229:
45
+ return claude_3_opus_20240229()
46
+ if self is ToolModels.CLAUDE_3_SONNET_20240229:
47
+ return claude_3_sonnet_20240229()
48
+ if self is ToolModels.CLAUDE_3_HAIKU_20240307:
49
+ return claude_3_haiku_20240307()
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: athena-intelligence
3
- Version: 0.1.40
3
+ Version: 0.1.42
4
4
  Summary:
5
5
  Requires-Python: >=3.8,<4.0
6
6
  Classifier: Programming Language :: Python :: 3
@@ -1,13 +1,13 @@
1
- athena/__init__.py,sha256=6SG9UQ4iQ8rFALsqP-LPImAjHTKLpmUHeSUapfxvmMo,1341
1
+ athena/__init__.py,sha256=uXIyAvCSPx3g5Y96sk21HkSjNLz4Hi4XIKncoAxNT90,1489
2
2
  athena/base_client.py,sha256=tvX2FKc9AnNBm0k64njjUCOt7dPxBBn6BpClJpcPqgc,5416
3
- athena/chain/__init__.py,sha256=Kv3-525ENLEbHHcRZLT-ioQrzUNmFH9e5mUkApSyvp8,161
4
- athena/chain/client.py,sha256=mF5A0sXw2GvZexy4bK9HPgDx7DMzWVSR7f0WuGbkAIg,7401
5
- athena/chain/types/__init__.py,sha256=la4c4cmiMbVbZpB_XqpBB2Rzgl2-xkEUVZrXDowr1lc,190
6
- athena/chain/types/structured_tool_data_parsing_model.py,sha256=H2Zhh3Sav6foUCyIQIrgIR9iaMjk9ZQNJfHfZgJa1uQ,2213
3
+ athena/chain/__init__.py,sha256=I1CBCogKCvJBIPdXXFQWdGJyFs6pdp0_dp6rnybd5mI,159
4
+ athena/chain/client.py,sha256=SdlDtNr1QBbDgZGBAFW-GNT0FqPdhvAUcrQaHcGuUds,8333
5
+ athena/chain/types/__init__.py,sha256=s4rY--H5yj6slggsUnRQNrKBHZ3QGE9jQWopTCQOFpg,187
6
+ athena/chain/types/structured_parse_in_parsing_model.py,sha256=tr6DLP2v71IUScCOeLrUjUtXrAr3WlaW0sSD3ns2r-Q,2203
7
7
  athena/client.py,sha256=8QypiDlbZ0C1YsJh6GzhylLVCZXDQc1MCJTURo2_vvI,3576
8
8
  athena/core/__init__.py,sha256=RWfyDqkzWsf8e3VGc3NV60MovfJbg5XWzNFGB2DZ0hA,790
9
9
  athena/core/api_error.py,sha256=RE8LELok2QCjABadECTvtDp7qejA1VmINCh6TbqPwSE,426
10
- athena/core/client_wrapper.py,sha256=MicE6puzRD25jmVhsLDi58I2wkLvm4_csCbMBH9vT38,1198
10
+ athena/core/client_wrapper.py,sha256=coayF2VxKYBTqahtCxLUrG-2ksro8yQZgP2yiqSmnUg,1198
11
11
  athena/core/datetime_utils.py,sha256=nBys2IsYrhPdszxGKCNRPSOCwa-5DWOHG95FB8G9PKo,1047
12
12
  athena/core/file.py,sha256=sy1RUGZ3aJYuw998bZytxxo6QdgKmlnlgBaMvwEKCGg,1480
13
13
  athena/core/http_client.py,sha256=LI0yP3jUyE0Ue7oyBcI9nyo1pljOwh9Y5ycTeIpKwOg,4882
@@ -28,31 +28,34 @@ athena/query/client.py,sha256=UOx-Bq-xFFm-sTMTmJjWGrC6q_7vhVno3nYzmi81xwI,6243
28
28
  athena/report/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
29
29
  athena/report/client.py,sha256=sGJDrgk_E1SPleRYNhvspmsz-G3FQwMW-3alFzZPquE,6528
30
30
  athena/search/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
31
- athena/search/client.py,sha256=DLfHjgflIXAff20yswQK1h4BNxvY4SjZzhsywpQOM-g,12391
31
+ athena/search/client.py,sha256=zhE86fKD61gHQ1w45DYfWVHIZ1APGK6yUrWAHYi0OVM,7515
32
32
  athena/snippet/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
33
33
  athena/snippet/client.py,sha256=D0rSpm6ql9cnUj-mMe3z8OHRgRQQuk3bBW2CZSRnyp4,6087
34
34
  athena/tools/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
35
- athena/tools/client.py,sha256=XCkyxX3rau3Xbppj74rni8bScZrTFlYDkep_c3u4kWU,6657
36
- athena/types/__init__.py,sha256=6TfFMQqEF5dfsRSFsoHdH-qgIm8qPrzEeFPBorEcVq8,1370
35
+ athena/tools/client.py,sha256=qk9PJ70meZVKZXReLEsxp1woUMpZ6UPOcuLYqHhMt1Q,19643
36
+ athena/types/__init__.py,sha256=ClUrCuQeInMWgv8vO1AGSV0rQEm51YABwBPvB250fno,1607
37
37
  athena/types/dataset.py,sha256=70OJPxKBAYu7xthGEgrUolSdyLqiyh6X49INw1oN0sA,1014
38
+ athena/types/document.py,sha256=CAByS_smWjl-edYLCCc4fkdhZYXSVUKQqEamjbhpgU0,1081
39
+ athena/types/excecute_tool_first_workflow_out.py,sha256=tGL6pNN4uhL_knWf-SQc-Z1IPJFNVsAmYtDI-VjXS2s,895
38
40
  athena/types/firecrawl_scrape_url_data_reponse_dto.py,sha256=LbJY-SC_WNQG3nbswG8NTfVl_u_tpV1HO7Y3MWCk5gc,1021
39
41
  athena/types/firecrawl_scrape_url_metadata.py,sha256=sqHTtq5_5vujsMixJBDJULPK7MrvxEpB2wGPwC-bTdM,1128
40
42
  athena/types/get_datasets_response.py,sha256=BCdT8yTLfOsXeyFadlyoas4zzseFWGPAdGpkgkOuaD8,989
41
43
  athena/types/get_snippets_response.py,sha256=Lpn7bHJLpPQozN93unCV-8eByAAfz1MhQWR3G3Z1vl4,989
42
44
  athena/types/http_validation_error.py,sha256=Fcv_CTMMrLvCeTHjF0n5xf5tskMDgt-J6H9gp654eQw,973
45
+ athena/types/langchain_documents_request_out.py,sha256=P5Qq7BkCWILORC3yOkvt8pssdYgd_vnTTANNtrWF-l8,939
43
46
  athena/types/message_out.py,sha256=uvZY_Podv2XccEk8CICug9I_S2hFJTSzCBwcHiauW7A,865
44
47
  athena/types/message_out_dto.py,sha256=qgRibRbDNOWVnVGP7Rribh9WdoCT2CSiPUXeIWECqq4,1051
45
48
  athena/types/model.py,sha256=XbXkKXbmnfZ8bPTAn1xnWGjqKK1SVOLdxf1RGk5ON5k,2545
46
49
  athena/types/report.py,sha256=QVaqVfHMAV3s9_V2CqjIEMcRrbJhD8zmi82vrk2A8x0,946
47
- athena/types/scrape_markdown_result.py,sha256=uRpIxoLV9oyLdbJeehm3zmZk_qXZeYqYYcS2SeQmwbA,881
48
50
  athena/types/snippet.py,sha256=POIVJNV9iQxiVegB_qwQx-PZPPSyoIPhyxTsueNVUGA,1126
49
51
  athena/types/sql_results.py,sha256=pNH32nyf1bzoYJs3FgHctLdLO02oOjyGgLkHACACB6k,900
50
52
  athena/types/status_enum.py,sha256=0UZbhdAx215GHC-U53RS98mYHtn1N3On4VBe4j02Qtc,672
51
53
  athena/types/structured_parse_result.py,sha256=7I-w06OmtxXFY01k7FXFSNPe5PpM3z54xNUbs62lSv0,905
54
+ athena/types/tool_models.py,sha256=ltl4RX0QhFIj_wsKLZMDip72_Ko7P-7v0aL9HACXVFo,1829
52
55
  athena/types/tools.py,sha256=mhRkKAwlsDud-fFOhsx2T3hBD-FAtuCnGHyU9cLPcGU,1422
53
56
  athena/types/url_result.py,sha256=zajsW46qJnD6GPimb5kHkUncjqBfzHUlGOcKuUGMX-E,893
54
57
  athena/types/validation_error.py,sha256=2JhGNJouo8QpfrMBoT_JCwYSn1nFN2Nnq0p9uPLDH-U,992
55
58
  athena/types/validation_error_loc_item.py,sha256=LAtjCHIllWRBFXvAZ5QZpp7CPXjdtN9EB7HrLVo6EP0,128
56
- athena_intelligence-0.1.40.dist-info/METADATA,sha256=hV93D3CuM01fBTO3fShNwAEURnu-rJbcCHdSju2SN_I,4738
57
- athena_intelligence-0.1.40.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
58
- athena_intelligence-0.1.40.dist-info/RECORD,,
59
+ athena_intelligence-0.1.42.dist-info/METADATA,sha256=oDpxo7eQ8WaMzSnp7O856K9siIIlLNS8lILnRJSIgUI,4738
60
+ athena_intelligence-0.1.42.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
61
+ athena_intelligence-0.1.42.dist-info/RECORD,,