athena-intelligence 0.1.77__tar.gz → 0.1.79__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (90) hide show
  1. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/PKG-INFO +1 -1
  2. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/pyproject.toml +1 -1
  3. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/__init__.py +17 -1
  4. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/chain/client.py +2 -2
  5. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/core/client_wrapper.py +1 -1
  6. athena_intelligence-0.1.79/src/athena/errors/__init__.py +7 -0
  7. athena_intelligence-0.1.79/src/athena/errors/internal_server_error.py +9 -0
  8. athena_intelligence-0.1.79/src/athena/errors/not_found_error.py +9 -0
  9. athena_intelligence-0.1.79/src/athena/tools/__init__.py +5 -0
  10. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/tools/client.py +304 -2
  11. athena_intelligence-0.1.79/src/athena/tools/types/__init__.py +5 -0
  12. athena_intelligence-0.1.79/src/athena/tools/types/tools_data_frame_request_columns_item.py +5 -0
  13. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/types/__init__.py +12 -0
  14. athena_intelligence-0.1.79/src/athena/types/data_frame_parsing_error.py +26 -0
  15. athena_intelligence-0.1.79/src/athena/types/data_frame_request_out.py +30 -0
  16. athena_intelligence-0.1.79/src/athena/types/data_frame_request_out_columns_item.py +5 -0
  17. athena_intelligence-0.1.79/src/athena/types/data_frame_request_out_data_item_item.py +5 -0
  18. athena_intelligence-0.1.79/src/athena/types/data_frame_request_out_index_item.py +5 -0
  19. athena_intelligence-0.1.79/src/athena/types/file_fetch_error.py +26 -0
  20. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/types/llm_model.py +24 -0
  21. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/types/model.py +4 -0
  22. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/types/query_model.py +1 -0
  23. athena_intelligence-0.1.77/src/athena/errors/__init__.py +0 -5
  24. athena_intelligence-0.1.77/src/athena/workflow/__init__.py +0 -2
  25. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/README.md +0 -0
  26. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/base_client.py +0 -0
  27. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/chain/__init__.py +0 -0
  28. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/client.py +0 -0
  29. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/core/__init__.py +0 -0
  30. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/core/api_error.py +0 -0
  31. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/core/datetime_utils.py +0 -0
  32. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/core/file.py +0 -0
  33. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/core/http_client.py +0 -0
  34. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/core/jsonable_encoder.py +0 -0
  35. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/core/pydantic_utilities.py +0 -0
  36. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/core/remove_none_from_dict.py +0 -0
  37. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/core/request_options.py +0 -0
  38. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/dataset/__init__.py +0 -0
  39. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/dataset/client.py +0 -0
  40. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/environment.py +0 -0
  41. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/errors/unprocessable_entity_error.py +0 -0
  42. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/message/__init__.py +0 -0
  43. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/message/client.py +0 -0
  44. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/polling_message_client.py +0 -0
  45. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/py.typed +0 -0
  46. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/query/__init__.py +0 -0
  47. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/query/client.py +0 -0
  48. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/report/__init__.py +0 -0
  49. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/report/client.py +0 -0
  50. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/search/__init__.py +0 -0
  51. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/search/client.py +0 -0
  52. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/snippet/__init__.py +0 -0
  53. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/snippet/client.py +0 -0
  54. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/types/convert_pdf_to_sheet_out.py +0 -0
  55. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/types/dataset.py +0 -0
  56. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/types/document.py +0 -0
  57. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/types/excecute_tool_first_workflow_out.py +0 -0
  58. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/types/file_data_response.py +0 -0
  59. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/types/filter_model.py +0 -0
  60. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/types/filter_operator.py +0 -0
  61. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/types/firecrawl_scrape_url_data_reponse_dto.py +0 -0
  62. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/types/firecrawl_scrape_url_metadata.py +0 -0
  63. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/types/get_datasets_response.py +0 -0
  64. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/types/get_snippet_out.py +0 -0
  65. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/types/get_snippets_response.py +0 -0
  66. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/types/http_validation_error.py +0 -0
  67. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/types/langchain_documents_request_out.py +0 -0
  68. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/types/map_reduce_chain_out.py +0 -0
  69. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/types/message_out.py +0 -0
  70. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/types/message_out_dto.py +0 -0
  71. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/types/publish_formats.py +0 -0
  72. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/types/report.py +0 -0
  73. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/types/researcher_out.py +0 -0
  74. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/types/semantic_query_out.py +0 -0
  75. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/types/snippet.py +0 -0
  76. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/types/sql_results.py +0 -0
  77. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/types/status_enum.py +0 -0
  78. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/types/structured_parse_result.py +0 -0
  79. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/types/time_dimension_model.py +0 -0
  80. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/types/tools.py +0 -0
  81. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/types/upload_documents_out.py +0 -0
  82. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/types/url_result.py +0 -0
  83. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/types/validation_error.py +0 -0
  84. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/types/validation_error_loc_item.py +0 -0
  85. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/types/workflow_status_out.py +0 -0
  86. {athena_intelligence-0.1.77/src/athena/tools → athena_intelligence-0.1.79/src/athena/upload}/__init__.py +0 -0
  87. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/upload/client.py +0 -0
  88. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/version.py +0 -0
  89. {athena_intelligence-0.1.77/src/athena/upload → athena_intelligence-0.1.79/src/athena/workflow}/__init__.py +0 -0
  90. {athena_intelligence-0.1.77 → athena_intelligence-0.1.79}/src/athena/workflow/client.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: athena-intelligence
3
- Version: 0.1.77
3
+ Version: 0.1.79
4
4
  Summary:
5
5
  Requires-Python: >=3.8,<4.0
6
6
  Classifier: Programming Language :: Python :: 3
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "athena-intelligence"
3
- version = "0.1.77"
3
+ version = "0.1.79"
4
4
  description = ""
5
5
  readme = "README.md"
6
6
  authors = []
@@ -2,10 +2,16 @@
2
2
 
3
3
  from .types import (
4
4
  ConvertPdfToSheetOut,
5
+ DataFrameParsingError,
6
+ DataFrameRequestOut,
7
+ DataFrameRequestOutColumnsItem,
8
+ DataFrameRequestOutDataItemItem,
9
+ DataFrameRequestOutIndexItem,
5
10
  Dataset,
6
11
  Document,
7
12
  ExcecuteToolFirstWorkflowOut,
8
13
  FileDataResponse,
14
+ FileFetchError,
9
15
  FilterModel,
10
16
  FilterOperator,
11
17
  FirecrawlScrapeUrlDataReponseDto,
@@ -37,18 +43,25 @@ from .types import (
37
43
  ValidationErrorLocItem,
38
44
  WorkflowStatusOut,
39
45
  )
40
- from .errors import UnprocessableEntityError
46
+ from .errors import InternalServerError, NotFoundError, UnprocessableEntityError
41
47
  from . import chain, dataset, message, query, report, search, snippet, tools, upload, workflow
42
48
  from .environment import AthenaEnvironment
49
+ from .tools import ToolsDataFrameRequestColumnsItem
43
50
  from .version import __version__
44
51
 
45
52
  __all__ = [
46
53
  "AthenaEnvironment",
47
54
  "ConvertPdfToSheetOut",
55
+ "DataFrameParsingError",
56
+ "DataFrameRequestOut",
57
+ "DataFrameRequestOutColumnsItem",
58
+ "DataFrameRequestOutDataItemItem",
59
+ "DataFrameRequestOutIndexItem",
48
60
  "Dataset",
49
61
  "Document",
50
62
  "ExcecuteToolFirstWorkflowOut",
51
63
  "FileDataResponse",
64
+ "FileFetchError",
52
65
  "FilterModel",
53
66
  "FilterOperator",
54
67
  "FirecrawlScrapeUrlDataReponseDto",
@@ -57,12 +70,14 @@ __all__ = [
57
70
  "GetSnippetOut",
58
71
  "GetSnippetsResponse",
59
72
  "HttpValidationError",
73
+ "InternalServerError",
60
74
  "LangchainDocumentsRequestOut",
61
75
  "LlmModel",
62
76
  "MapReduceChainOut",
63
77
  "MessageOut",
64
78
  "MessageOutDto",
65
79
  "Model",
80
+ "NotFoundError",
66
81
  "PublishFormats",
67
82
  "QueryModel",
68
83
  "Report",
@@ -74,6 +89,7 @@ __all__ = [
74
89
  "StructuredParseResult",
75
90
  "TimeDimensionModel",
76
91
  "Tools",
92
+ "ToolsDataFrameRequestColumnsItem",
77
93
  "UnprocessableEntityError",
78
94
  "UploadDocumentsOut",
79
95
  "UrlResult",
@@ -50,7 +50,7 @@ class ChainClient:
50
50
  api_key="YOUR_API_KEY",
51
51
  )
52
52
  client.chain.structured_parse(
53
- text_input='Athena is an AI-native analytics platform and artificial employee built to accelerate analytics workflows \n by offering enterprise teams co-pilot and auto-pilot modes. Athena learns your workflow as a co-pilot, \n allowing you to hand over controls to her for autonomous execution with confidence." \n \n Give me all of the modes Athena provides.',
53
+ text_input='Athena is an AI-native analytics platform and artificial employee built to accelerate analytics workflows\n by offering enterprise teams co-pilot and auto-pilot modes. Athena learns your workflow as a co-pilot,\n allowing you to hand over controls to her for autonomous execution with confidence."\n\n Give me all of the modes Athena provides.',
54
54
  custom_type_dict={"modes": {}},
55
55
  model=LlmModel.GPT_4_TURBO,
56
56
  )
@@ -219,7 +219,7 @@ class AsyncChainClient:
219
219
  api_key="YOUR_API_KEY",
220
220
  )
221
221
  await client.chain.structured_parse(
222
- text_input='Athena is an AI-native analytics platform and artificial employee built to accelerate analytics workflows \n by offering enterprise teams co-pilot and auto-pilot modes. Athena learns your workflow as a co-pilot, \n allowing you to hand over controls to her for autonomous execution with confidence." \n \n Give me all of the modes Athena provides.',
222
+ text_input='Athena is an AI-native analytics platform and artificial employee built to accelerate analytics workflows\n by offering enterprise teams co-pilot and auto-pilot modes. Athena learns your workflow as a co-pilot,\n allowing you to hand over controls to her for autonomous execution with confidence."\n\n Give me all of the modes Athena provides.',
223
223
  custom_type_dict={"modes": {}},
224
224
  model=LlmModel.GPT_4_TURBO,
225
225
  )
@@ -17,7 +17,7 @@ class BaseClientWrapper:
17
17
  headers: typing.Dict[str, str] = {
18
18
  "X-Fern-Language": "Python",
19
19
  "X-Fern-SDK-Name": "athena-intelligence",
20
- "X-Fern-SDK-Version": "0.1.77",
20
+ "X-Fern-SDK-Version": "0.1.79",
21
21
  }
22
22
  headers["X-API-KEY"] = self.api_key
23
23
  return headers
@@ -0,0 +1,7 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ from .internal_server_error import InternalServerError
4
+ from .not_found_error import NotFoundError
5
+ from .unprocessable_entity_error import UnprocessableEntityError
6
+
7
+ __all__ = ["InternalServerError", "NotFoundError", "UnprocessableEntityError"]
@@ -0,0 +1,9 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ from ..core.api_error import ApiError
4
+ from ..types.data_frame_parsing_error import DataFrameParsingError
5
+
6
+
7
+ class InternalServerError(ApiError):
8
+ def __init__(self, body: DataFrameParsingError):
9
+ super().__init__(status_code=500, body=body)
@@ -0,0 +1,9 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ from ..core.api_error import ApiError
4
+ from ..types.file_fetch_error import FileFetchError
5
+
6
+
7
+ class NotFoundError(ApiError):
8
+ def __init__(self, body: FileFetchError):
9
+ super().__init__(status_code=404, body=body)
@@ -0,0 +1,5 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ from .types import ToolsDataFrameRequestColumnsItem
4
+
5
+ __all__ = ["ToolsDataFrameRequestColumnsItem"]
@@ -10,9 +10,14 @@ from ..core.jsonable_encoder import jsonable_encoder
10
10
  from ..core.pydantic_utilities import pydantic_v1
11
11
  from ..core.remove_none_from_dict import remove_none_from_dict
12
12
  from ..core.request_options import RequestOptions
13
+ from ..errors.internal_server_error import InternalServerError
14
+ from ..errors.not_found_error import NotFoundError
13
15
  from ..errors.unprocessable_entity_error import UnprocessableEntityError
14
16
  from ..types.convert_pdf_to_sheet_out import ConvertPdfToSheetOut
17
+ from ..types.data_frame_parsing_error import DataFrameParsingError
18
+ from ..types.data_frame_request_out import DataFrameRequestOut
15
19
  from ..types.excecute_tool_first_workflow_out import ExcecuteToolFirstWorkflowOut
20
+ from ..types.file_fetch_error import FileFetchError
16
21
  from ..types.firecrawl_scrape_url_data_reponse_dto import FirecrawlScrapeUrlDataReponseDto
17
22
  from ..types.http_validation_error import HttpValidationError
18
23
  from ..types.langchain_documents_request_out import LangchainDocumentsRequestOut
@@ -21,6 +26,7 @@ from ..types.publish_formats import PublishFormats
21
26
  from ..types.query_model import QueryModel
22
27
  from ..types.researcher_out import ResearcherOut
23
28
  from ..types.semantic_query_out import SemanticQueryOut
29
+ from .types.tools_data_frame_request_columns_item import ToolsDataFrameRequestColumnsItem
24
30
 
25
31
  # this is used as the default value for optional parameters
26
32
  OMIT = typing.cast(typing.Any, ...)
@@ -51,7 +57,7 @@ class ToolsClient:
51
57
  api_key="YOUR_API_KEY",
52
58
  )
53
59
  client.tools.scrape_url(
54
- url="https://athenaintelligence.ai",
60
+ url="https://www.athenaintelligence.ai",
55
61
  )
56
62
  """
57
63
  _request: typing.Dict[str, typing.Any] = {"url": url}
@@ -169,6 +175,153 @@ class ToolsClient:
169
175
  raise ApiError(status_code=_response.status_code, body=_response.text)
170
176
  raise ApiError(status_code=_response.status_code, body=_response_json)
171
177
 
178
+ def data_frame(
179
+ self,
180
+ *,
181
+ document_id: str,
182
+ row_limit: typing.Optional[int] = None,
183
+ index_column: typing.Optional[int] = None,
184
+ columns: typing.Optional[
185
+ typing.Union[ToolsDataFrameRequestColumnsItem, typing.Sequence[ToolsDataFrameRequestColumnsItem]]
186
+ ] = None,
187
+ sheet_name: typing.Optional[str] = None,
188
+ separator: typing.Optional[str] = None,
189
+ request_options: typing.Optional[RequestOptions] = None,
190
+ ) -> DataFrameRequestOut:
191
+ """
192
+ Parameters:
193
+ - document_id: str.
194
+
195
+ - row_limit: typing.Optional[int].
196
+
197
+ - index_column: typing.Optional[int].
198
+
199
+ - columns: typing.Optional[typing.Union[ToolsDataFrameRequestColumnsItem, typing.Sequence[ToolsDataFrameRequestColumnsItem]]]. should be a list of strings or a list of integers
200
+
201
+ - sheet_name: typing.Optional[str]. only for excel files
202
+
203
+ - separator: typing.Optional[str]. only for csv files
204
+
205
+ - request_options: typing.Optional[RequestOptions]. Request-specific configuration.
206
+ ---
207
+ from athena.client import Athena
208
+
209
+ client = Athena(
210
+ api_key="YOUR_API_KEY",
211
+ )
212
+ client.tools.data_frame(
213
+ document_id="document_id",
214
+ )
215
+ """
216
+ _response = self._client_wrapper.httpx_client.request(
217
+ method="GET",
218
+ url=urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v0/tools/file/data-frame"),
219
+ params=jsonable_encoder(
220
+ remove_none_from_dict(
221
+ {
222
+ "document_id": document_id,
223
+ "row_limit": row_limit,
224
+ "index_column": index_column,
225
+ "columns": columns,
226
+ "sheet_name": sheet_name,
227
+ "separator": separator,
228
+ **(
229
+ request_options.get("additional_query_parameters", {})
230
+ if request_options is not None
231
+ else {}
232
+ ),
233
+ }
234
+ )
235
+ ),
236
+ headers=jsonable_encoder(
237
+ remove_none_from_dict(
238
+ {
239
+ **self._client_wrapper.get_headers(),
240
+ **(request_options.get("additional_headers", {}) if request_options is not None else {}),
241
+ }
242
+ )
243
+ ),
244
+ timeout=request_options.get("timeout_in_seconds")
245
+ if request_options is not None and request_options.get("timeout_in_seconds") is not None
246
+ else self._client_wrapper.get_timeout(),
247
+ retries=0,
248
+ max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore
249
+ )
250
+ if 200 <= _response.status_code < 300:
251
+ return pydantic_v1.parse_obj_as(DataFrameRequestOut, _response.json()) # type: ignore
252
+ if _response.status_code == 404:
253
+ raise NotFoundError(pydantic_v1.parse_obj_as(FileFetchError, _response.json())) # type: ignore
254
+ if _response.status_code == 422:
255
+ raise UnprocessableEntityError(
256
+ pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore
257
+ )
258
+ if _response.status_code == 500:
259
+ raise InternalServerError(pydantic_v1.parse_obj_as(DataFrameParsingError, _response.json())) # type: ignore
260
+ try:
261
+ _response_json = _response.json()
262
+ except JSONDecodeError:
263
+ raise ApiError(status_code=_response.status_code, body=_response.text)
264
+ raise ApiError(status_code=_response.status_code, body=_response_json)
265
+
266
+ def raw_data(self, *, document_id: str, request_options: typing.Optional[RequestOptions] = None) -> typing.Any:
267
+ """
268
+ Parameters:
269
+ - document_id: str.
270
+
271
+ - request_options: typing.Optional[RequestOptions]. Request-specific configuration.
272
+ ---
273
+ from athena.client import Athena
274
+
275
+ client = Athena(
276
+ api_key="YOUR_API_KEY",
277
+ )
278
+ client.tools.raw_data(
279
+ document_id="document_id",
280
+ )
281
+ """
282
+ _response = self._client_wrapper.httpx_client.request(
283
+ method="GET",
284
+ url=urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v0/tools/file/raw-data"),
285
+ params=jsonable_encoder(
286
+ remove_none_from_dict(
287
+ {
288
+ "document_id": document_id,
289
+ **(
290
+ request_options.get("additional_query_parameters", {})
291
+ if request_options is not None
292
+ else {}
293
+ ),
294
+ }
295
+ )
296
+ ),
297
+ headers=jsonable_encoder(
298
+ remove_none_from_dict(
299
+ {
300
+ **self._client_wrapper.get_headers(),
301
+ **(request_options.get("additional_headers", {}) if request_options is not None else {}),
302
+ }
303
+ )
304
+ ),
305
+ timeout=request_options.get("timeout_in_seconds")
306
+ if request_options is not None and request_options.get("timeout_in_seconds") is not None
307
+ else self._client_wrapper.get_timeout(),
308
+ retries=0,
309
+ max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore
310
+ )
311
+ if 200 <= _response.status_code < 300:
312
+ return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore
313
+ if _response.status_code == 404:
314
+ raise NotFoundError(pydantic_v1.parse_obj_as(FileFetchError, _response.json())) # type: ignore
315
+ if _response.status_code == 422:
316
+ raise UnprocessableEntityError(
317
+ pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore
318
+ )
319
+ try:
320
+ _response_json = _response.json()
321
+ except JSONDecodeError:
322
+ raise ApiError(status_code=_response.status_code, body=_response.text)
323
+ raise ApiError(status_code=_response.status_code, body=_response_json)
324
+
172
325
  def tool_first_workflow(
173
326
  self,
174
327
  *,
@@ -510,7 +663,7 @@ class AsyncToolsClient:
510
663
  api_key="YOUR_API_KEY",
511
664
  )
512
665
  await client.tools.scrape_url(
513
- url="https://athenaintelligence.ai",
666
+ url="https://www.athenaintelligence.ai",
514
667
  )
515
668
  """
516
669
  _request: typing.Dict[str, typing.Any] = {"url": url}
@@ -628,6 +781,155 @@ class AsyncToolsClient:
628
781
  raise ApiError(status_code=_response.status_code, body=_response.text)
629
782
  raise ApiError(status_code=_response.status_code, body=_response_json)
630
783
 
784
+ async def data_frame(
785
+ self,
786
+ *,
787
+ document_id: str,
788
+ row_limit: typing.Optional[int] = None,
789
+ index_column: typing.Optional[int] = None,
790
+ columns: typing.Optional[
791
+ typing.Union[ToolsDataFrameRequestColumnsItem, typing.Sequence[ToolsDataFrameRequestColumnsItem]]
792
+ ] = None,
793
+ sheet_name: typing.Optional[str] = None,
794
+ separator: typing.Optional[str] = None,
795
+ request_options: typing.Optional[RequestOptions] = None,
796
+ ) -> DataFrameRequestOut:
797
+ """
798
+ Parameters:
799
+ - document_id: str.
800
+
801
+ - row_limit: typing.Optional[int].
802
+
803
+ - index_column: typing.Optional[int].
804
+
805
+ - columns: typing.Optional[typing.Union[ToolsDataFrameRequestColumnsItem, typing.Sequence[ToolsDataFrameRequestColumnsItem]]]. should be a list of strings or a list of integers
806
+
807
+ - sheet_name: typing.Optional[str]. only for excel files
808
+
809
+ - separator: typing.Optional[str]. only for csv files
810
+
811
+ - request_options: typing.Optional[RequestOptions]. Request-specific configuration.
812
+ ---
813
+ from athena.client import AsyncAthena
814
+
815
+ client = AsyncAthena(
816
+ api_key="YOUR_API_KEY",
817
+ )
818
+ await client.tools.data_frame(
819
+ document_id="document_id",
820
+ )
821
+ """
822
+ _response = await self._client_wrapper.httpx_client.request(
823
+ method="GET",
824
+ url=urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v0/tools/file/data-frame"),
825
+ params=jsonable_encoder(
826
+ remove_none_from_dict(
827
+ {
828
+ "document_id": document_id,
829
+ "row_limit": row_limit,
830
+ "index_column": index_column,
831
+ "columns": columns,
832
+ "sheet_name": sheet_name,
833
+ "separator": separator,
834
+ **(
835
+ request_options.get("additional_query_parameters", {})
836
+ if request_options is not None
837
+ else {}
838
+ ),
839
+ }
840
+ )
841
+ ),
842
+ headers=jsonable_encoder(
843
+ remove_none_from_dict(
844
+ {
845
+ **self._client_wrapper.get_headers(),
846
+ **(request_options.get("additional_headers", {}) if request_options is not None else {}),
847
+ }
848
+ )
849
+ ),
850
+ timeout=request_options.get("timeout_in_seconds")
851
+ if request_options is not None and request_options.get("timeout_in_seconds") is not None
852
+ else self._client_wrapper.get_timeout(),
853
+ retries=0,
854
+ max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore
855
+ )
856
+ if 200 <= _response.status_code < 300:
857
+ return pydantic_v1.parse_obj_as(DataFrameRequestOut, _response.json()) # type: ignore
858
+ if _response.status_code == 404:
859
+ raise NotFoundError(pydantic_v1.parse_obj_as(FileFetchError, _response.json())) # type: ignore
860
+ if _response.status_code == 422:
861
+ raise UnprocessableEntityError(
862
+ pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore
863
+ )
864
+ if _response.status_code == 500:
865
+ raise InternalServerError(pydantic_v1.parse_obj_as(DataFrameParsingError, _response.json())) # type: ignore
866
+ try:
867
+ _response_json = _response.json()
868
+ except JSONDecodeError:
869
+ raise ApiError(status_code=_response.status_code, body=_response.text)
870
+ raise ApiError(status_code=_response.status_code, body=_response_json)
871
+
872
+ async def raw_data(
873
+ self, *, document_id: str, request_options: typing.Optional[RequestOptions] = None
874
+ ) -> typing.Any:
875
+ """
876
+ Parameters:
877
+ - document_id: str.
878
+
879
+ - request_options: typing.Optional[RequestOptions]. Request-specific configuration.
880
+ ---
881
+ from athena.client import AsyncAthena
882
+
883
+ client = AsyncAthena(
884
+ api_key="YOUR_API_KEY",
885
+ )
886
+ await client.tools.raw_data(
887
+ document_id="document_id",
888
+ )
889
+ """
890
+ _response = await self._client_wrapper.httpx_client.request(
891
+ method="GET",
892
+ url=urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v0/tools/file/raw-data"),
893
+ params=jsonable_encoder(
894
+ remove_none_from_dict(
895
+ {
896
+ "document_id": document_id,
897
+ **(
898
+ request_options.get("additional_query_parameters", {})
899
+ if request_options is not None
900
+ else {}
901
+ ),
902
+ }
903
+ )
904
+ ),
905
+ headers=jsonable_encoder(
906
+ remove_none_from_dict(
907
+ {
908
+ **self._client_wrapper.get_headers(),
909
+ **(request_options.get("additional_headers", {}) if request_options is not None else {}),
910
+ }
911
+ )
912
+ ),
913
+ timeout=request_options.get("timeout_in_seconds")
914
+ if request_options is not None and request_options.get("timeout_in_seconds") is not None
915
+ else self._client_wrapper.get_timeout(),
916
+ retries=0,
917
+ max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore
918
+ )
919
+ if 200 <= _response.status_code < 300:
920
+ return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore
921
+ if _response.status_code == 404:
922
+ raise NotFoundError(pydantic_v1.parse_obj_as(FileFetchError, _response.json())) # type: ignore
923
+ if _response.status_code == 422:
924
+ raise UnprocessableEntityError(
925
+ pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore
926
+ )
927
+ try:
928
+ _response_json = _response.json()
929
+ except JSONDecodeError:
930
+ raise ApiError(status_code=_response.status_code, body=_response.text)
931
+ raise ApiError(status_code=_response.status_code, body=_response_json)
932
+
631
933
  async def tool_first_workflow(
632
934
  self,
633
935
  *,
@@ -0,0 +1,5 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ from .tools_data_frame_request_columns_item import ToolsDataFrameRequestColumnsItem
4
+
5
+ __all__ = ["ToolsDataFrameRequestColumnsItem"]
@@ -0,0 +1,5 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+
5
+ ToolsDataFrameRequestColumnsItem = typing.Union[int, str]
@@ -1,10 +1,16 @@
1
1
  # This file was auto-generated by Fern from our API Definition.
2
2
 
3
3
  from .convert_pdf_to_sheet_out import ConvertPdfToSheetOut
4
+ from .data_frame_parsing_error import DataFrameParsingError
5
+ from .data_frame_request_out import DataFrameRequestOut
6
+ from .data_frame_request_out_columns_item import DataFrameRequestOutColumnsItem
7
+ from .data_frame_request_out_data_item_item import DataFrameRequestOutDataItemItem
8
+ from .data_frame_request_out_index_item import DataFrameRequestOutIndexItem
4
9
  from .dataset import Dataset
5
10
  from .document import Document
6
11
  from .excecute_tool_first_workflow_out import ExcecuteToolFirstWorkflowOut
7
12
  from .file_data_response import FileDataResponse
13
+ from .file_fetch_error import FileFetchError
8
14
  from .filter_model import FilterModel
9
15
  from .filter_operator import FilterOperator
10
16
  from .firecrawl_scrape_url_data_reponse_dto import FirecrawlScrapeUrlDataReponseDto
@@ -38,10 +44,16 @@ from .workflow_status_out import WorkflowStatusOut
38
44
 
39
45
  __all__ = [
40
46
  "ConvertPdfToSheetOut",
47
+ "DataFrameParsingError",
48
+ "DataFrameRequestOut",
49
+ "DataFrameRequestOutColumnsItem",
50
+ "DataFrameRequestOutDataItemItem",
51
+ "DataFrameRequestOutIndexItem",
41
52
  "Dataset",
42
53
  "Document",
43
54
  "ExcecuteToolFirstWorkflowOut",
44
55
  "FileDataResponse",
56
+ "FileFetchError",
45
57
  "FilterModel",
46
58
  "FilterOperator",
47
59
  "FirecrawlScrapeUrlDataReponseDto",
@@ -0,0 +1,26 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+ from ..core.pydantic_utilities import pydantic_v1
8
+
9
+
10
+ class DataFrameParsingError(pydantic_v1.BaseModel):
11
+ message: str
12
+ document_id: str
13
+
14
+ def json(self, **kwargs: typing.Any) -> str:
15
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
16
+ return super().json(**kwargs_with_defaults)
17
+
18
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
19
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
20
+ return super().dict(**kwargs_with_defaults)
21
+
22
+ class Config:
23
+ frozen = True
24
+ smart_union = True
25
+ extra = pydantic_v1.Extra.allow
26
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -0,0 +1,30 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+ from ..core.pydantic_utilities import pydantic_v1
8
+ from .data_frame_request_out_columns_item import DataFrameRequestOutColumnsItem
9
+ from .data_frame_request_out_data_item_item import DataFrameRequestOutDataItemItem
10
+ from .data_frame_request_out_index_item import DataFrameRequestOutIndexItem
11
+
12
+
13
+ class DataFrameRequestOut(pydantic_v1.BaseModel):
14
+ columns: typing.List[DataFrameRequestOutColumnsItem]
15
+ index: typing.List[DataFrameRequestOutIndexItem]
16
+ data: typing.List[typing.List[DataFrameRequestOutDataItemItem]]
17
+
18
+ def json(self, **kwargs: typing.Any) -> str:
19
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
20
+ return super().json(**kwargs_with_defaults)
21
+
22
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
23
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
24
+ return super().dict(**kwargs_with_defaults)
25
+
26
+ class Config:
27
+ frozen = True
28
+ smart_union = True
29
+ extra = pydantic_v1.Extra.allow
30
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -0,0 +1,5 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+
5
+ DataFrameRequestOutColumnsItem = typing.Union[str, int, float]
@@ -0,0 +1,5 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+
5
+ DataFrameRequestOutDataItemItem = typing.Union[str, int, float]
@@ -0,0 +1,5 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+
5
+ DataFrameRequestOutIndexItem = typing.Union[str, int, float]
@@ -0,0 +1,26 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+ from ..core.pydantic_utilities import pydantic_v1
8
+
9
+
10
+ class FileFetchError(pydantic_v1.BaseModel):
11
+ message: str
12
+ status_code: typing.Optional[int] = None
13
+
14
+ def json(self, **kwargs: typing.Any) -> str:
15
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
16
+ return super().json(**kwargs_with_defaults)
17
+
18
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
19
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
20
+ return super().dict(**kwargs_with_defaults)
21
+
22
+ class Config:
23
+ frozen = True
24
+ smart_union = True
25
+ extra = pydantic_v1.Extra.allow
26
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -22,6 +22,7 @@ class LlmModel(str, enum.Enum):
22
22
  MIXTRAL_8_X_22_B_INSTRUCT = "mixtral-8x22b-instruct"
23
23
  LLAMA_V_38_B_INSTRUCT = "llama-v3-8b-instruct"
24
24
  LLAMA_V_370_B_INSTRUCT = "llama-v3-70b-instruct"
25
+ LLAMA_V_3_P_1405_B_INSTRUCT = "llama-v3p1-405b-instruct"
25
26
  CLAUDE_35_SONNET_20240620 = "claude-3-5-sonnet-20240620"
26
27
  CLAUDE_3_OPUS_20240229 = "claude-3-opus-20240229"
27
28
  CLAUDE_3_SONNET_20240229 = "claude-3-sonnet-20240229"
@@ -30,6 +31,11 @@ class LlmModel(str, enum.Enum):
30
31
  GROQ_LLAMA_38_B_8192 = "groq-llama3-8b-8192"
31
32
  GROQ_LLAMA_370_B_8192 = "groq-llama3-70b-8192"
32
33
  GROQ_GEMMA_7_B_IT = "groq-gemma-7b-it"
34
+ GROQ_LLAMA_31405_B_REASONING = "groq-llama-3.1-405b-reasoning"
35
+ GROQ_LLAMA_3170_B_VERSATILE = "groq-llama-3.1-70b-versatile"
36
+ GROQ_LLAMA_318_B_INSTANT = "groq-llama-3.1-8b-instant"
37
+ GROQ_LLAMA_3_GROQ_70_B_8192_TOOL_USE_PREVIEW = "groq-llama3-groq-70b-8192-tool-use-preview"
38
+ GROQ_LLAMA_3_GROQ_8_B_8192_TOOL_USE_PREVIEW = "groq-llama3-groq-8b-8192-tool-use-preview"
33
39
  DATABRICKS_DBRX = "databricks-dbrx"
34
40
  GOOGLE_GEMINI_10_PRO_LATEST = "google-gemini-1.0-pro-latest"
35
41
  GOOGLE_GEMINI_15_PRO_LATEST = "google-gemini-1.5-pro-latest"
@@ -49,6 +55,7 @@ class LlmModel(str, enum.Enum):
49
55
  mixtral_8_x_22_b_instruct: typing.Callable[[], T_Result],
50
56
  llama_v_38_b_instruct: typing.Callable[[], T_Result],
51
57
  llama_v_370_b_instruct: typing.Callable[[], T_Result],
58
+ llama_v_3_p_1405_b_instruct: typing.Callable[[], T_Result],
52
59
  claude_35_sonnet_20240620: typing.Callable[[], T_Result],
53
60
  claude_3_opus_20240229: typing.Callable[[], T_Result],
54
61
  claude_3_sonnet_20240229: typing.Callable[[], T_Result],
@@ -57,6 +64,11 @@ class LlmModel(str, enum.Enum):
57
64
  groq_llama_38_b_8192: typing.Callable[[], T_Result],
58
65
  groq_llama_370_b_8192: typing.Callable[[], T_Result],
59
66
  groq_gemma_7_b_it: typing.Callable[[], T_Result],
67
+ groq_llama_31405_b_reasoning: typing.Callable[[], T_Result],
68
+ groq_llama_3170_b_versatile: typing.Callable[[], T_Result],
69
+ groq_llama_318_b_instant: typing.Callable[[], T_Result],
70
+ groq_llama_3_groq_70_b_8192_tool_use_preview: typing.Callable[[], T_Result],
71
+ groq_llama_3_groq_8_b_8192_tool_use_preview: typing.Callable[[], T_Result],
60
72
  databricks_dbrx: typing.Callable[[], T_Result],
61
73
  google_gemini_10_pro_latest: typing.Callable[[], T_Result],
62
74
  google_gemini_15_pro_latest: typing.Callable[[], T_Result],
@@ -85,6 +97,8 @@ class LlmModel(str, enum.Enum):
85
97
  return llama_v_38_b_instruct()
86
98
  if self is LlmModel.LLAMA_V_370_B_INSTRUCT:
87
99
  return llama_v_370_b_instruct()
100
+ if self is LlmModel.LLAMA_V_3_P_1405_B_INSTRUCT:
101
+ return llama_v_3_p_1405_b_instruct()
88
102
  if self is LlmModel.CLAUDE_35_SONNET_20240620:
89
103
  return claude_35_sonnet_20240620()
90
104
  if self is LlmModel.CLAUDE_3_OPUS_20240229:
@@ -101,6 +115,16 @@ class LlmModel(str, enum.Enum):
101
115
  return groq_llama_370_b_8192()
102
116
  if self is LlmModel.GROQ_GEMMA_7_B_IT:
103
117
  return groq_gemma_7_b_it()
118
+ if self is LlmModel.GROQ_LLAMA_31405_B_REASONING:
119
+ return groq_llama_31405_b_reasoning()
120
+ if self is LlmModel.GROQ_LLAMA_3170_B_VERSATILE:
121
+ return groq_llama_3170_b_versatile()
122
+ if self is LlmModel.GROQ_LLAMA_318_B_INSTANT:
123
+ return groq_llama_318_b_instant()
124
+ if self is LlmModel.GROQ_LLAMA_3_GROQ_70_B_8192_TOOL_USE_PREVIEW:
125
+ return groq_llama_3_groq_70_b_8192_tool_use_preview()
126
+ if self is LlmModel.GROQ_LLAMA_3_GROQ_8_B_8192_TOOL_USE_PREVIEW:
127
+ return groq_llama_3_groq_8_b_8192_tool_use_preview()
104
128
  if self is LlmModel.DATABRICKS_DBRX:
105
129
  return databricks_dbrx()
106
130
  if self is LlmModel.GOOGLE_GEMINI_10_PRO_LATEST:
@@ -25,6 +25,7 @@ class Model(str, enum.Enum):
25
25
  CLAUDE_3_OPUS_20240229 = "claude-3-opus-20240229"
26
26
  CLAUDE_3_SONNET_20240229 = "claude-3-sonnet-20240229"
27
27
  CLAUDE_3_HAIKU_20240307 = "claude-3-haiku-20240307"
28
+ CLAUDE_35_SONNET_20240620 = "claude-3-5-sonnet-20240620"
28
29
  GOOGLE_GEMINI_10_PRO_LATEST = "google-gemini-1.0-pro-latest"
29
30
  DATABRICKS_DBRX = "databricks-dbrx"
30
31
 
@@ -44,6 +45,7 @@ class Model(str, enum.Enum):
44
45
  claude_3_opus_20240229: typing.Callable[[], T_Result],
45
46
  claude_3_sonnet_20240229: typing.Callable[[], T_Result],
46
47
  claude_3_haiku_20240307: typing.Callable[[], T_Result],
48
+ claude_35_sonnet_20240620: typing.Callable[[], T_Result],
47
49
  google_gemini_10_pro_latest: typing.Callable[[], T_Result],
48
50
  databricks_dbrx: typing.Callable[[], T_Result],
49
51
  ) -> T_Result:
@@ -75,6 +77,8 @@ class Model(str, enum.Enum):
75
77
  return claude_3_sonnet_20240229()
76
78
  if self is Model.CLAUDE_3_HAIKU_20240307:
77
79
  return claude_3_haiku_20240307()
80
+ if self is Model.CLAUDE_35_SONNET_20240620:
81
+ return claude_35_sonnet_20240620()
78
82
  if self is Model.GOOGLE_GEMINI_10_PRO_LATEST:
79
83
  return google_gemini_10_pro_latest()
80
84
  if self is Model.DATABRICKS_DBRX:
@@ -16,6 +16,7 @@ class QueryModel(pydantic_v1.BaseModel):
16
16
  )
17
17
  dimensions: typing.Optional[typing.List[str]] = None
18
18
  filters: typing.Optional[typing.List[FilterModel]] = None
19
+ limit: typing.Optional[int] = None
19
20
 
20
21
  def json(self, **kwargs: typing.Any) -> str:
21
22
  kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
@@ -1,5 +0,0 @@
1
- # This file was auto-generated by Fern from our API Definition.
2
-
3
- from .unprocessable_entity_error import UnprocessableEntityError
4
-
5
- __all__ = ["UnprocessableEntityError"]
@@ -1,2 +0,0 @@
1
- # This file was auto-generated by Fern from our API Definition.
2
-