athena-intelligence 0.1.93__py3-none-any.whl → 0.1.95__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- athena/__init__.py +1 -76
- athena/base_client.py +4 -31
- athena/client.py +4 -9
- athena/core/client_wrapper.py +1 -1
- athena/environment.py +1 -1
- athena/tools/client.py +61 -916
- athena/tools/types/tools_data_frame_request_columns_item.py +1 -1
- athena/types/__init__.py +0 -66
- athena/types/data_frame_parsing_error.py +1 -1
- athena/types/data_frame_request_out.py +5 -1
- athena/types/data_frame_unknown_format_error.py +1 -1
- {athena_intelligence-0.1.93.dist-info → athena_intelligence-0.1.95.dist-info}/METADATA +1 -1
- athena_intelligence-0.1.95.dist-info/RECORD +40 -0
- athena/chain/__init__.py +0 -2
- athena/chain/client.py +0 -302
- athena/dataset/__init__.py +0 -2
- athena/dataset/client.py +0 -124
- athena/message/__init__.py +0 -2
- athena/message/client.py +0 -249
- athena/polling_message_client.py +0 -125
- athena/query/__init__.py +0 -2
- athena/query/client.py +0 -123
- athena/report/__init__.py +0 -2
- athena/report/client.py +0 -125
- athena/search/__init__.py +0 -2
- athena/search/client.py +0 -171
- athena/snippet/__init__.py +0 -2
- athena/snippet/client.py +0 -208
- athena/types/athena_document_v_2_out.py +0 -29
- athena/types/convert_pdf_to_sheet_out.py +0 -30
- athena/types/dataset.py +0 -33
- athena/types/document.py +0 -47
- athena/types/excecute_tool_first_workflow_out.py +0 -29
- athena/types/file_data_response.py +0 -36
- athena/types/filter_model.py +0 -32
- athena/types/filter_operator.py +0 -73
- athena/types/firecrawl_scrape_url_data_reponse_dto.py +0 -32
- athena/types/firecrawl_scrape_url_metadata.py +0 -34
- athena/types/get_datasets_response.py +0 -34
- athena/types/get_snippet_out.py +0 -29
- athena/types/get_snippets_response.py +0 -34
- athena/types/langchain_documents_request_out.py +0 -29
- athena/types/llm_model.py +0 -141
- athena/types/map_reduce_chain_out.py +0 -32
- athena/types/message_out.py +0 -29
- athena/types/message_out_dto.py +0 -35
- athena/types/model.py +0 -93
- athena/types/publish_formats.py +0 -31
- athena/types/query_model.py +0 -40
- athena/types/report.py +0 -32
- athena/types/researcher_out.py +0 -29
- athena/types/semantic_query_out.py +0 -29
- athena/types/snippet.py +0 -35
- athena/types/sql_results.py +0 -29
- athena/types/status_enum.py +0 -29
- athena/types/structured_parse_result.py +0 -29
- athena/types/time_dimension_model.py +0 -33
- athena/types/tools.py +0 -57
- athena/types/upload_documents_out.py +0 -31
- athena/types/url_result.py +0 -29
- athena/types/workflow_status_out.py +0 -31
- athena/upload/__init__.py +0 -2
- athena/upload/client.py +0 -108
- athena/workflow/__init__.py +0 -2
- athena/workflow/client.py +0 -117
- athena_intelligence-0.1.93.dist-info/RECORD +0 -92
- {athena_intelligence-0.1.93.dist-info → athena_intelligence-0.1.95.dist-info}/WHEEL +0 -0
athena/types/filter_operator.py
DELETED
@@ -1,73 +0,0 @@
|
|
1
|
-
# This file was auto-generated by Fern from our API Definition.
|
2
|
-
|
3
|
-
import enum
|
4
|
-
import typing
|
5
|
-
|
6
|
-
T_Result = typing.TypeVar("T_Result")
|
7
|
-
|
8
|
-
|
9
|
-
class FilterOperator(str, enum.Enum):
|
10
|
-
"""
|
11
|
-
An enumeration.
|
12
|
-
"""
|
13
|
-
|
14
|
-
EQUALS = "equals"
|
15
|
-
NOT_EQUALS = "notEquals"
|
16
|
-
CONTAINS = "contains"
|
17
|
-
NOT_CONTAINS = "notContains"
|
18
|
-
STARTS_WITH = "startsWith"
|
19
|
-
ENDS_WITH = "endsWith"
|
20
|
-
GT = "gt"
|
21
|
-
GTE = "gte"
|
22
|
-
LT = "lt"
|
23
|
-
LTE = "lte"
|
24
|
-
SET = "set"
|
25
|
-
NOT_SET = "notSet"
|
26
|
-
IN = "in"
|
27
|
-
NOT_IN = "notIn"
|
28
|
-
|
29
|
-
def visit(
|
30
|
-
self,
|
31
|
-
equals: typing.Callable[[], T_Result],
|
32
|
-
not_equals: typing.Callable[[], T_Result],
|
33
|
-
contains: typing.Callable[[], T_Result],
|
34
|
-
not_contains: typing.Callable[[], T_Result],
|
35
|
-
starts_with: typing.Callable[[], T_Result],
|
36
|
-
ends_with: typing.Callable[[], T_Result],
|
37
|
-
gt: typing.Callable[[], T_Result],
|
38
|
-
gte: typing.Callable[[], T_Result],
|
39
|
-
lt: typing.Callable[[], T_Result],
|
40
|
-
lte: typing.Callable[[], T_Result],
|
41
|
-
set_: typing.Callable[[], T_Result],
|
42
|
-
not_set: typing.Callable[[], T_Result],
|
43
|
-
in_: typing.Callable[[], T_Result],
|
44
|
-
not_in: typing.Callable[[], T_Result],
|
45
|
-
) -> T_Result:
|
46
|
-
if self is FilterOperator.EQUALS:
|
47
|
-
return equals()
|
48
|
-
if self is FilterOperator.NOT_EQUALS:
|
49
|
-
return not_equals()
|
50
|
-
if self is FilterOperator.CONTAINS:
|
51
|
-
return contains()
|
52
|
-
if self is FilterOperator.NOT_CONTAINS:
|
53
|
-
return not_contains()
|
54
|
-
if self is FilterOperator.STARTS_WITH:
|
55
|
-
return starts_with()
|
56
|
-
if self is FilterOperator.ENDS_WITH:
|
57
|
-
return ends_with()
|
58
|
-
if self is FilterOperator.GT:
|
59
|
-
return gt()
|
60
|
-
if self is FilterOperator.GTE:
|
61
|
-
return gte()
|
62
|
-
if self is FilterOperator.LT:
|
63
|
-
return lt()
|
64
|
-
if self is FilterOperator.LTE:
|
65
|
-
return lte()
|
66
|
-
if self is FilterOperator.SET:
|
67
|
-
return set_()
|
68
|
-
if self is FilterOperator.NOT_SET:
|
69
|
-
return not_set()
|
70
|
-
if self is FilterOperator.IN:
|
71
|
-
return in_()
|
72
|
-
if self is FilterOperator.NOT_IN:
|
73
|
-
return not_in()
|
@@ -1,32 +0,0 @@
|
|
1
|
-
# This file was auto-generated by Fern from our API Definition.
|
2
|
-
|
3
|
-
import datetime as dt
|
4
|
-
import typing
|
5
|
-
|
6
|
-
from ..core.datetime_utils import serialize_datetime
|
7
|
-
from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1
|
8
|
-
from .firecrawl_scrape_url_metadata import FirecrawlScrapeUrlMetadata
|
9
|
-
|
10
|
-
|
11
|
-
class FirecrawlScrapeUrlDataReponseDto(pydantic_v1.BaseModel):
|
12
|
-
content: str
|
13
|
-
markdown: str
|
14
|
-
metadata: FirecrawlScrapeUrlMetadata
|
15
|
-
|
16
|
-
def json(self, **kwargs: typing.Any) -> str:
|
17
|
-
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
18
|
-
return super().json(**kwargs_with_defaults)
|
19
|
-
|
20
|
-
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
21
|
-
kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
22
|
-
kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs}
|
23
|
-
|
24
|
-
return deep_union_pydantic_dicts(
|
25
|
-
super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none)
|
26
|
-
)
|
27
|
-
|
28
|
-
class Config:
|
29
|
-
frozen = True
|
30
|
-
smart_union = True
|
31
|
-
extra = pydantic_v1.Extra.allow
|
32
|
-
json_encoders = {dt.datetime: serialize_datetime}
|
@@ -1,34 +0,0 @@
|
|
1
|
-
# This file was auto-generated by Fern from our API Definition.
|
2
|
-
|
3
|
-
import datetime as dt
|
4
|
-
import typing
|
5
|
-
|
6
|
-
from ..core.datetime_utils import serialize_datetime
|
7
|
-
from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1
|
8
|
-
|
9
|
-
|
10
|
-
class FirecrawlScrapeUrlMetadata(pydantic_v1.BaseModel):
|
11
|
-
title: typing.Optional[str] = None
|
12
|
-
description: typing.Optional[str] = None
|
13
|
-
language: typing.Optional[str] = None
|
14
|
-
source_url: typing.Optional[str] = pydantic_v1.Field(alias="sourceURL", default=None)
|
15
|
-
|
16
|
-
def json(self, **kwargs: typing.Any) -> str:
|
17
|
-
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
18
|
-
return super().json(**kwargs_with_defaults)
|
19
|
-
|
20
|
-
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
21
|
-
kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
22
|
-
kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs}
|
23
|
-
|
24
|
-
return deep_union_pydantic_dicts(
|
25
|
-
super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none)
|
26
|
-
)
|
27
|
-
|
28
|
-
class Config:
|
29
|
-
frozen = True
|
30
|
-
smart_union = True
|
31
|
-
allow_population_by_field_name = True
|
32
|
-
populate_by_name = True
|
33
|
-
extra = pydantic_v1.Extra.allow
|
34
|
-
json_encoders = {dt.datetime: serialize_datetime}
|
@@ -1,34 +0,0 @@
|
|
1
|
-
# This file was auto-generated by Fern from our API Definition.
|
2
|
-
|
3
|
-
import datetime as dt
|
4
|
-
import typing
|
5
|
-
|
6
|
-
from ..core.datetime_utils import serialize_datetime
|
7
|
-
from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1
|
8
|
-
from .dataset import Dataset
|
9
|
-
|
10
|
-
|
11
|
-
class GetDatasetsResponse(pydantic_v1.BaseModel):
|
12
|
-
datasets: typing.List[Dataset]
|
13
|
-
total: int
|
14
|
-
page: int
|
15
|
-
page_size: int
|
16
|
-
pages: int
|
17
|
-
|
18
|
-
def json(self, **kwargs: typing.Any) -> str:
|
19
|
-
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
20
|
-
return super().json(**kwargs_with_defaults)
|
21
|
-
|
22
|
-
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
23
|
-
kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
24
|
-
kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs}
|
25
|
-
|
26
|
-
return deep_union_pydantic_dicts(
|
27
|
-
super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none)
|
28
|
-
)
|
29
|
-
|
30
|
-
class Config:
|
31
|
-
frozen = True
|
32
|
-
smart_union = True
|
33
|
-
extra = pydantic_v1.Extra.allow
|
34
|
-
json_encoders = {dt.datetime: serialize_datetime}
|
athena/types/get_snippet_out.py
DELETED
@@ -1,29 +0,0 @@
|
|
1
|
-
# This file was auto-generated by Fern from our API Definition.
|
2
|
-
|
3
|
-
import datetime as dt
|
4
|
-
import typing
|
5
|
-
|
6
|
-
from ..core.datetime_utils import serialize_datetime
|
7
|
-
from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1
|
8
|
-
|
9
|
-
|
10
|
-
class GetSnippetOut(pydantic_v1.BaseModel):
|
11
|
-
response: typing.Dict[str, typing.Any]
|
12
|
-
|
13
|
-
def json(self, **kwargs: typing.Any) -> str:
|
14
|
-
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
15
|
-
return super().json(**kwargs_with_defaults)
|
16
|
-
|
17
|
-
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
18
|
-
kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
19
|
-
kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs}
|
20
|
-
|
21
|
-
return deep_union_pydantic_dicts(
|
22
|
-
super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none)
|
23
|
-
)
|
24
|
-
|
25
|
-
class Config:
|
26
|
-
frozen = True
|
27
|
-
smart_union = True
|
28
|
-
extra = pydantic_v1.Extra.allow
|
29
|
-
json_encoders = {dt.datetime: serialize_datetime}
|
@@ -1,34 +0,0 @@
|
|
1
|
-
# This file was auto-generated by Fern from our API Definition.
|
2
|
-
|
3
|
-
import datetime as dt
|
4
|
-
import typing
|
5
|
-
|
6
|
-
from ..core.datetime_utils import serialize_datetime
|
7
|
-
from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1
|
8
|
-
from .snippet import Snippet
|
9
|
-
|
10
|
-
|
11
|
-
class GetSnippetsResponse(pydantic_v1.BaseModel):
|
12
|
-
snippets: typing.List[Snippet]
|
13
|
-
total: int
|
14
|
-
page: int
|
15
|
-
page_size: int
|
16
|
-
pages: int
|
17
|
-
|
18
|
-
def json(self, **kwargs: typing.Any) -> str:
|
19
|
-
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
20
|
-
return super().json(**kwargs_with_defaults)
|
21
|
-
|
22
|
-
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
23
|
-
kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
24
|
-
kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs}
|
25
|
-
|
26
|
-
return deep_union_pydantic_dicts(
|
27
|
-
super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none)
|
28
|
-
)
|
29
|
-
|
30
|
-
class Config:
|
31
|
-
frozen = True
|
32
|
-
smart_union = True
|
33
|
-
extra = pydantic_v1.Extra.allow
|
34
|
-
json_encoders = {dt.datetime: serialize_datetime}
|
@@ -1,29 +0,0 @@
|
|
1
|
-
# This file was auto-generated by Fern from our API Definition.
|
2
|
-
|
3
|
-
import datetime as dt
|
4
|
-
import typing
|
5
|
-
|
6
|
-
from ..core.datetime_utils import serialize_datetime
|
7
|
-
from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1
|
8
|
-
|
9
|
-
|
10
|
-
class LangchainDocumentsRequestOut(pydantic_v1.BaseModel):
|
11
|
-
output: typing.Dict[str, typing.Any]
|
12
|
-
|
13
|
-
def json(self, **kwargs: typing.Any) -> str:
|
14
|
-
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
15
|
-
return super().json(**kwargs_with_defaults)
|
16
|
-
|
17
|
-
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
18
|
-
kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
19
|
-
kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs}
|
20
|
-
|
21
|
-
return deep_union_pydantic_dicts(
|
22
|
-
super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none)
|
23
|
-
)
|
24
|
-
|
25
|
-
class Config:
|
26
|
-
frozen = True
|
27
|
-
smart_union = True
|
28
|
-
extra = pydantic_v1.Extra.allow
|
29
|
-
json_encoders = {dt.datetime: serialize_datetime}
|
athena/types/llm_model.py
DELETED
@@ -1,141 +0,0 @@
|
|
1
|
-
# This file was auto-generated by Fern from our API Definition.
|
2
|
-
|
3
|
-
import enum
|
4
|
-
import typing
|
5
|
-
|
6
|
-
T_Result = typing.TypeVar("T_Result")
|
7
|
-
|
8
|
-
|
9
|
-
class LlmModel(str, enum.Enum):
|
10
|
-
"""
|
11
|
-
An enumeration.
|
12
|
-
"""
|
13
|
-
|
14
|
-
GPT_35_TURBO = "gpt-3.5-turbo"
|
15
|
-
GPT_4_TURBO = "gpt-4-turbo"
|
16
|
-
GPT_4_TURBO_PREVIEW = "gpt-4-turbo-preview"
|
17
|
-
GPT_4_O_MINI = "gpt-4o-mini"
|
18
|
-
GPT_4_O = "gpt-4o"
|
19
|
-
GPT_4_O_20240806 = "gpt-4o-2024-08-06"
|
20
|
-
GPT_4 = "gpt-4"
|
21
|
-
MIXTRAL_SMALL_8_X_7_B_0211 = "mixtral-small-8x7b-0211"
|
22
|
-
MISTRAL_LARGE_0224 = "mistral-large-0224"
|
23
|
-
MIXTRAL_8_X_22_B_INSTRUCT = "mixtral-8x22b-instruct"
|
24
|
-
LLAMA_V_38_B_INSTRUCT = "llama-v3-8b-instruct"
|
25
|
-
LLAMA_V_370_B_INSTRUCT = "llama-v3-70b-instruct"
|
26
|
-
LLAMA_V_3_P_1405_B_INSTRUCT = "llama-v3p1-405b-instruct"
|
27
|
-
CLAUDE_35_SONNET_20240620 = "claude-3-5-sonnet-20240620"
|
28
|
-
CLAUDE_3_OPUS_20240229 = "claude-3-opus-20240229"
|
29
|
-
CLAUDE_3_SONNET_20240229 = "claude-3-sonnet-20240229"
|
30
|
-
CLAUDE_3_HAIKU_20240307 = "claude-3-haiku-20240307"
|
31
|
-
GROQ_MIXTRAL_8_X_7_B_32768 = "groq-mixtral-8x7b-32768"
|
32
|
-
GROQ_LLAMA_38_B_8192 = "groq-llama3-8b-8192"
|
33
|
-
GROQ_LLAMA_370_B_8192 = "groq-llama3-70b-8192"
|
34
|
-
GROQ_GEMMA_7_B_IT = "groq-gemma-7b-it"
|
35
|
-
GROQ_LLAMA_31405_B_REASONING = "groq-llama-3.1-405b-reasoning"
|
36
|
-
GROQ_LLAMA_3170_B_VERSATILE = "groq-llama-3.1-70b-versatile"
|
37
|
-
GROQ_LLAMA_318_B_INSTANT = "groq-llama-3.1-8b-instant"
|
38
|
-
GROQ_LLAMA_3_GROQ_70_B_8192_TOOL_USE_PREVIEW = "groq-llama3-groq-70b-8192-tool-use-preview"
|
39
|
-
GROQ_LLAMA_3_GROQ_8_B_8192_TOOL_USE_PREVIEW = "groq-llama3-groq-8b-8192-tool-use-preview"
|
40
|
-
DATABRICKS_DBRX = "databricks-dbrx"
|
41
|
-
GOOGLE_GEMINI_10_PRO_LATEST = "google-gemini-1.0-pro-latest"
|
42
|
-
GOOGLE_GEMINI_15_PRO_LATEST = "google-gemini-1.5-pro-latest"
|
43
|
-
GOOGLE_GEMINI_15_FLASH_001 = "google-gemini-1.5-flash-001"
|
44
|
-
GOOGLE_GEMINI_15_PRO_001 = "google-gemini-1.5-pro-001"
|
45
|
-
|
46
|
-
def visit(
|
47
|
-
self,
|
48
|
-
gpt_35_turbo: typing.Callable[[], T_Result],
|
49
|
-
gpt_4_turbo: typing.Callable[[], T_Result],
|
50
|
-
gpt_4_turbo_preview: typing.Callable[[], T_Result],
|
51
|
-
gpt_4_o_mini: typing.Callable[[], T_Result],
|
52
|
-
gpt_4_o: typing.Callable[[], T_Result],
|
53
|
-
gpt_4_o_20240806: typing.Callable[[], T_Result],
|
54
|
-
gpt_4: typing.Callable[[], T_Result],
|
55
|
-
mixtral_small_8_x_7_b_0211: typing.Callable[[], T_Result],
|
56
|
-
mistral_large_0224: typing.Callable[[], T_Result],
|
57
|
-
mixtral_8_x_22_b_instruct: typing.Callable[[], T_Result],
|
58
|
-
llama_v_38_b_instruct: typing.Callable[[], T_Result],
|
59
|
-
llama_v_370_b_instruct: typing.Callable[[], T_Result],
|
60
|
-
llama_v_3_p_1405_b_instruct: typing.Callable[[], T_Result],
|
61
|
-
claude_35_sonnet_20240620: typing.Callable[[], T_Result],
|
62
|
-
claude_3_opus_20240229: typing.Callable[[], T_Result],
|
63
|
-
claude_3_sonnet_20240229: typing.Callable[[], T_Result],
|
64
|
-
claude_3_haiku_20240307: typing.Callable[[], T_Result],
|
65
|
-
groq_mixtral_8_x_7_b_32768: typing.Callable[[], T_Result],
|
66
|
-
groq_llama_38_b_8192: typing.Callable[[], T_Result],
|
67
|
-
groq_llama_370_b_8192: typing.Callable[[], T_Result],
|
68
|
-
groq_gemma_7_b_it: typing.Callable[[], T_Result],
|
69
|
-
groq_llama_31405_b_reasoning: typing.Callable[[], T_Result],
|
70
|
-
groq_llama_3170_b_versatile: typing.Callable[[], T_Result],
|
71
|
-
groq_llama_318_b_instant: typing.Callable[[], T_Result],
|
72
|
-
groq_llama_3_groq_70_b_8192_tool_use_preview: typing.Callable[[], T_Result],
|
73
|
-
groq_llama_3_groq_8_b_8192_tool_use_preview: typing.Callable[[], T_Result],
|
74
|
-
databricks_dbrx: typing.Callable[[], T_Result],
|
75
|
-
google_gemini_10_pro_latest: typing.Callable[[], T_Result],
|
76
|
-
google_gemini_15_pro_latest: typing.Callable[[], T_Result],
|
77
|
-
google_gemini_15_flash_001: typing.Callable[[], T_Result],
|
78
|
-
google_gemini_15_pro_001: typing.Callable[[], T_Result],
|
79
|
-
) -> T_Result:
|
80
|
-
if self is LlmModel.GPT_35_TURBO:
|
81
|
-
return gpt_35_turbo()
|
82
|
-
if self is LlmModel.GPT_4_TURBO:
|
83
|
-
return gpt_4_turbo()
|
84
|
-
if self is LlmModel.GPT_4_TURBO_PREVIEW:
|
85
|
-
return gpt_4_turbo_preview()
|
86
|
-
if self is LlmModel.GPT_4_O_MINI:
|
87
|
-
return gpt_4_o_mini()
|
88
|
-
if self is LlmModel.GPT_4_O:
|
89
|
-
return gpt_4_o()
|
90
|
-
if self is LlmModel.GPT_4_O_20240806:
|
91
|
-
return gpt_4_o_20240806()
|
92
|
-
if self is LlmModel.GPT_4:
|
93
|
-
return gpt_4()
|
94
|
-
if self is LlmModel.MIXTRAL_SMALL_8_X_7_B_0211:
|
95
|
-
return mixtral_small_8_x_7_b_0211()
|
96
|
-
if self is LlmModel.MISTRAL_LARGE_0224:
|
97
|
-
return mistral_large_0224()
|
98
|
-
if self is LlmModel.MIXTRAL_8_X_22_B_INSTRUCT:
|
99
|
-
return mixtral_8_x_22_b_instruct()
|
100
|
-
if self is LlmModel.LLAMA_V_38_B_INSTRUCT:
|
101
|
-
return llama_v_38_b_instruct()
|
102
|
-
if self is LlmModel.LLAMA_V_370_B_INSTRUCT:
|
103
|
-
return llama_v_370_b_instruct()
|
104
|
-
if self is LlmModel.LLAMA_V_3_P_1405_B_INSTRUCT:
|
105
|
-
return llama_v_3_p_1405_b_instruct()
|
106
|
-
if self is LlmModel.CLAUDE_35_SONNET_20240620:
|
107
|
-
return claude_35_sonnet_20240620()
|
108
|
-
if self is LlmModel.CLAUDE_3_OPUS_20240229:
|
109
|
-
return claude_3_opus_20240229()
|
110
|
-
if self is LlmModel.CLAUDE_3_SONNET_20240229:
|
111
|
-
return claude_3_sonnet_20240229()
|
112
|
-
if self is LlmModel.CLAUDE_3_HAIKU_20240307:
|
113
|
-
return claude_3_haiku_20240307()
|
114
|
-
if self is LlmModel.GROQ_MIXTRAL_8_X_7_B_32768:
|
115
|
-
return groq_mixtral_8_x_7_b_32768()
|
116
|
-
if self is LlmModel.GROQ_LLAMA_38_B_8192:
|
117
|
-
return groq_llama_38_b_8192()
|
118
|
-
if self is LlmModel.GROQ_LLAMA_370_B_8192:
|
119
|
-
return groq_llama_370_b_8192()
|
120
|
-
if self is LlmModel.GROQ_GEMMA_7_B_IT:
|
121
|
-
return groq_gemma_7_b_it()
|
122
|
-
if self is LlmModel.GROQ_LLAMA_31405_B_REASONING:
|
123
|
-
return groq_llama_31405_b_reasoning()
|
124
|
-
if self is LlmModel.GROQ_LLAMA_3170_B_VERSATILE:
|
125
|
-
return groq_llama_3170_b_versatile()
|
126
|
-
if self is LlmModel.GROQ_LLAMA_318_B_INSTANT:
|
127
|
-
return groq_llama_318_b_instant()
|
128
|
-
if self is LlmModel.GROQ_LLAMA_3_GROQ_70_B_8192_TOOL_USE_PREVIEW:
|
129
|
-
return groq_llama_3_groq_70_b_8192_tool_use_preview()
|
130
|
-
if self is LlmModel.GROQ_LLAMA_3_GROQ_8_B_8192_TOOL_USE_PREVIEW:
|
131
|
-
return groq_llama_3_groq_8_b_8192_tool_use_preview()
|
132
|
-
if self is LlmModel.DATABRICKS_DBRX:
|
133
|
-
return databricks_dbrx()
|
134
|
-
if self is LlmModel.GOOGLE_GEMINI_10_PRO_LATEST:
|
135
|
-
return google_gemini_10_pro_latest()
|
136
|
-
if self is LlmModel.GOOGLE_GEMINI_15_PRO_LATEST:
|
137
|
-
return google_gemini_15_pro_latest()
|
138
|
-
if self is LlmModel.GOOGLE_GEMINI_15_FLASH_001:
|
139
|
-
return google_gemini_15_flash_001()
|
140
|
-
if self is LlmModel.GOOGLE_GEMINI_15_PRO_001:
|
141
|
-
return google_gemini_15_pro_001()
|
@@ -1,32 +0,0 @@
|
|
1
|
-
# This file was auto-generated by Fern from our API Definition.
|
2
|
-
|
3
|
-
import datetime as dt
|
4
|
-
import typing
|
5
|
-
|
6
|
-
from ..core.datetime_utils import serialize_datetime
|
7
|
-
from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1
|
8
|
-
from .document import Document
|
9
|
-
|
10
|
-
|
11
|
-
class MapReduceChainOut(pydantic_v1.BaseModel):
|
12
|
-
input: str
|
13
|
-
input_documents: typing.List[Document]
|
14
|
-
output_text: str
|
15
|
-
|
16
|
-
def json(self, **kwargs: typing.Any) -> str:
|
17
|
-
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
18
|
-
return super().json(**kwargs_with_defaults)
|
19
|
-
|
20
|
-
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
21
|
-
kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
22
|
-
kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs}
|
23
|
-
|
24
|
-
return deep_union_pydantic_dicts(
|
25
|
-
super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none)
|
26
|
-
)
|
27
|
-
|
28
|
-
class Config:
|
29
|
-
frozen = True
|
30
|
-
smart_union = True
|
31
|
-
extra = pydantic_v1.Extra.allow
|
32
|
-
json_encoders = {dt.datetime: serialize_datetime}
|
athena/types/message_out.py
DELETED
@@ -1,29 +0,0 @@
|
|
1
|
-
# This file was auto-generated by Fern from our API Definition.
|
2
|
-
|
3
|
-
import datetime as dt
|
4
|
-
import typing
|
5
|
-
|
6
|
-
from ..core.datetime_utils import serialize_datetime
|
7
|
-
from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1
|
8
|
-
|
9
|
-
|
10
|
-
class MessageOut(pydantic_v1.BaseModel):
|
11
|
-
id: str
|
12
|
-
|
13
|
-
def json(self, **kwargs: typing.Any) -> str:
|
14
|
-
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
15
|
-
return super().json(**kwargs_with_defaults)
|
16
|
-
|
17
|
-
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
18
|
-
kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
19
|
-
kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs}
|
20
|
-
|
21
|
-
return deep_union_pydantic_dicts(
|
22
|
-
super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none)
|
23
|
-
)
|
24
|
-
|
25
|
-
class Config:
|
26
|
-
frozen = True
|
27
|
-
smart_union = True
|
28
|
-
extra = pydantic_v1.Extra.allow
|
29
|
-
json_encoders = {dt.datetime: serialize_datetime}
|
athena/types/message_out_dto.py
DELETED
@@ -1,35 +0,0 @@
|
|
1
|
-
# This file was auto-generated by Fern from our API Definition.
|
2
|
-
|
3
|
-
import datetime as dt
|
4
|
-
import typing
|
5
|
-
|
6
|
-
from ..core.datetime_utils import serialize_datetime
|
7
|
-
from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1
|
8
|
-
from .status_enum import StatusEnum
|
9
|
-
|
10
|
-
|
11
|
-
class MessageOutDto(pydantic_v1.BaseModel):
|
12
|
-
id: str
|
13
|
-
conversation_id: str
|
14
|
-
logs: typing.Optional[str] = None
|
15
|
-
content: typing.Optional[str] = None
|
16
|
-
created_at: str
|
17
|
-
status: StatusEnum
|
18
|
-
|
19
|
-
def json(self, **kwargs: typing.Any) -> str:
|
20
|
-
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
21
|
-
return super().json(**kwargs_with_defaults)
|
22
|
-
|
23
|
-
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
24
|
-
kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
25
|
-
kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs}
|
26
|
-
|
27
|
-
return deep_union_pydantic_dicts(
|
28
|
-
super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none)
|
29
|
-
)
|
30
|
-
|
31
|
-
class Config:
|
32
|
-
frozen = True
|
33
|
-
smart_union = True
|
34
|
-
extra = pydantic_v1.Extra.allow
|
35
|
-
json_encoders = {dt.datetime: serialize_datetime}
|
athena/types/model.py
DELETED
@@ -1,93 +0,0 @@
|
|
1
|
-
# This file was auto-generated by Fern from our API Definition.
|
2
|
-
|
3
|
-
import enum
|
4
|
-
import typing
|
5
|
-
|
6
|
-
T_Result = typing.TypeVar("T_Result")
|
7
|
-
|
8
|
-
|
9
|
-
class Model(str, enum.Enum):
|
10
|
-
"""
|
11
|
-
An enumeration.
|
12
|
-
"""
|
13
|
-
|
14
|
-
GPT_35_TURBO = "gpt-3.5-turbo"
|
15
|
-
GPT_4_TURBO = "gpt-4-turbo"
|
16
|
-
GPT_4_TURBO_PREVIEW = "gpt-4-turbo-preview"
|
17
|
-
GPT_4 = "gpt-4"
|
18
|
-
GPT_4_O = "gpt-4o"
|
19
|
-
GPT_4_O_MINI = "gpt-4o-mini"
|
20
|
-
GPT_4_O_20240806 = "gpt-4o-2024-08-06"
|
21
|
-
MIXTRAL_SMALL_8_X_7_B_0211 = "mixtral-small-8x7b-0211"
|
22
|
-
MISTRAL_LARGE_0224 = "mistral-large-0224"
|
23
|
-
MIXTRAL_8_X_22_B_INSTRUCT = "mixtral-8x22b-instruct"
|
24
|
-
LLAMA_V_370_B_INSTRUCT = "llama-v3-70b-instruct"
|
25
|
-
LLAMA_V_38_B_INSTRUCT = "llama-v3-8b-instruct"
|
26
|
-
CLAUDE_3_OPUS_20240229 = "claude-3-opus-20240229"
|
27
|
-
CLAUDE_3_SONNET_20240229 = "claude-3-sonnet-20240229"
|
28
|
-
CLAUDE_3_HAIKU_20240307 = "claude-3-haiku-20240307"
|
29
|
-
CLAUDE_35_SONNET_20240620 = "claude-3-5-sonnet-20240620"
|
30
|
-
GOOGLE_GEMINI_10_PRO_LATEST = "google-gemini-1.0-pro-latest"
|
31
|
-
GOOGLE_GEMINI_15_PRO_LATEST = "google-gemini-1.5-pro-latest"
|
32
|
-
DATABRICKS_DBRX = "databricks-dbrx"
|
33
|
-
|
34
|
-
def visit(
|
35
|
-
self,
|
36
|
-
gpt_35_turbo: typing.Callable[[], T_Result],
|
37
|
-
gpt_4_turbo: typing.Callable[[], T_Result],
|
38
|
-
gpt_4_turbo_preview: typing.Callable[[], T_Result],
|
39
|
-
gpt_4: typing.Callable[[], T_Result],
|
40
|
-
gpt_4_o: typing.Callable[[], T_Result],
|
41
|
-
gpt_4_o_mini: typing.Callable[[], T_Result],
|
42
|
-
gpt_4_o_20240806: typing.Callable[[], T_Result],
|
43
|
-
mixtral_small_8_x_7_b_0211: typing.Callable[[], T_Result],
|
44
|
-
mistral_large_0224: typing.Callable[[], T_Result],
|
45
|
-
mixtral_8_x_22_b_instruct: typing.Callable[[], T_Result],
|
46
|
-
llama_v_370_b_instruct: typing.Callable[[], T_Result],
|
47
|
-
llama_v_38_b_instruct: typing.Callable[[], T_Result],
|
48
|
-
claude_3_opus_20240229: typing.Callable[[], T_Result],
|
49
|
-
claude_3_sonnet_20240229: typing.Callable[[], T_Result],
|
50
|
-
claude_3_haiku_20240307: typing.Callable[[], T_Result],
|
51
|
-
claude_35_sonnet_20240620: typing.Callable[[], T_Result],
|
52
|
-
google_gemini_10_pro_latest: typing.Callable[[], T_Result],
|
53
|
-
google_gemini_15_pro_latest: typing.Callable[[], T_Result],
|
54
|
-
databricks_dbrx: typing.Callable[[], T_Result],
|
55
|
-
) -> T_Result:
|
56
|
-
if self is Model.GPT_35_TURBO:
|
57
|
-
return gpt_35_turbo()
|
58
|
-
if self is Model.GPT_4_TURBO:
|
59
|
-
return gpt_4_turbo()
|
60
|
-
if self is Model.GPT_4_TURBO_PREVIEW:
|
61
|
-
return gpt_4_turbo_preview()
|
62
|
-
if self is Model.GPT_4:
|
63
|
-
return gpt_4()
|
64
|
-
if self is Model.GPT_4_O:
|
65
|
-
return gpt_4_o()
|
66
|
-
if self is Model.GPT_4_O_MINI:
|
67
|
-
return gpt_4_o_mini()
|
68
|
-
if self is Model.GPT_4_O_20240806:
|
69
|
-
return gpt_4_o_20240806()
|
70
|
-
if self is Model.MIXTRAL_SMALL_8_X_7_B_0211:
|
71
|
-
return mixtral_small_8_x_7_b_0211()
|
72
|
-
if self is Model.MISTRAL_LARGE_0224:
|
73
|
-
return mistral_large_0224()
|
74
|
-
if self is Model.MIXTRAL_8_X_22_B_INSTRUCT:
|
75
|
-
return mixtral_8_x_22_b_instruct()
|
76
|
-
if self is Model.LLAMA_V_370_B_INSTRUCT:
|
77
|
-
return llama_v_370_b_instruct()
|
78
|
-
if self is Model.LLAMA_V_38_B_INSTRUCT:
|
79
|
-
return llama_v_38_b_instruct()
|
80
|
-
if self is Model.CLAUDE_3_OPUS_20240229:
|
81
|
-
return claude_3_opus_20240229()
|
82
|
-
if self is Model.CLAUDE_3_SONNET_20240229:
|
83
|
-
return claude_3_sonnet_20240229()
|
84
|
-
if self is Model.CLAUDE_3_HAIKU_20240307:
|
85
|
-
return claude_3_haiku_20240307()
|
86
|
-
if self is Model.CLAUDE_35_SONNET_20240620:
|
87
|
-
return claude_35_sonnet_20240620()
|
88
|
-
if self is Model.GOOGLE_GEMINI_10_PRO_LATEST:
|
89
|
-
return google_gemini_10_pro_latest()
|
90
|
-
if self is Model.GOOGLE_GEMINI_15_PRO_LATEST:
|
91
|
-
return google_gemini_15_pro_latest()
|
92
|
-
if self is Model.DATABRICKS_DBRX:
|
93
|
-
return databricks_dbrx()
|
athena/types/publish_formats.py
DELETED
@@ -1,31 +0,0 @@
|
|
1
|
-
# This file was auto-generated by Fern from our API Definition.
|
2
|
-
|
3
|
-
import datetime as dt
|
4
|
-
import typing
|
5
|
-
|
6
|
-
from ..core.datetime_utils import serialize_datetime
|
7
|
-
from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1
|
8
|
-
|
9
|
-
|
10
|
-
class PublishFormats(pydantic_v1.BaseModel):
|
11
|
-
markdown: bool
|
12
|
-
pdf: bool
|
13
|
-
docx: bool
|
14
|
-
|
15
|
-
def json(self, **kwargs: typing.Any) -> str:
|
16
|
-
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
17
|
-
return super().json(**kwargs_with_defaults)
|
18
|
-
|
19
|
-
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
20
|
-
kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
21
|
-
kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs}
|
22
|
-
|
23
|
-
return deep_union_pydantic_dicts(
|
24
|
-
super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none)
|
25
|
-
)
|
26
|
-
|
27
|
-
class Config:
|
28
|
-
frozen = True
|
29
|
-
smart_union = True
|
30
|
-
extra = pydantic_v1.Extra.allow
|
31
|
-
json_encoders = {dt.datetime: serialize_datetime}
|
athena/types/query_model.py
DELETED
@@ -1,40 +0,0 @@
|
|
1
|
-
# This file was auto-generated by Fern from our API Definition.
|
2
|
-
|
3
|
-
import datetime as dt
|
4
|
-
import typing
|
5
|
-
|
6
|
-
from ..core.datetime_utils import serialize_datetime
|
7
|
-
from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1
|
8
|
-
from .filter_model import FilterModel
|
9
|
-
from .time_dimension_model import TimeDimensionModel
|
10
|
-
|
11
|
-
|
12
|
-
class QueryModel(pydantic_v1.BaseModel):
|
13
|
-
measures: typing.Optional[typing.List[str]] = None
|
14
|
-
time_dimensions: typing.Optional[typing.List[TimeDimensionModel]] = pydantic_v1.Field(
|
15
|
-
alias="timeDimensions", default=None
|
16
|
-
)
|
17
|
-
dimensions: typing.Optional[typing.List[str]] = None
|
18
|
-
filters: typing.Optional[typing.List[FilterModel]] = None
|
19
|
-
order: typing.Optional[typing.Dict[str, typing.Any]] = None
|
20
|
-
limit: typing.Optional[int] = None
|
21
|
-
|
22
|
-
def json(self, **kwargs: typing.Any) -> str:
|
23
|
-
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
24
|
-
return super().json(**kwargs_with_defaults)
|
25
|
-
|
26
|
-
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
27
|
-
kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
28
|
-
kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs}
|
29
|
-
|
30
|
-
return deep_union_pydantic_dicts(
|
31
|
-
super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none)
|
32
|
-
)
|
33
|
-
|
34
|
-
class Config:
|
35
|
-
frozen = True
|
36
|
-
smart_union = True
|
37
|
-
allow_population_by_field_name = True
|
38
|
-
populate_by_name = True
|
39
|
-
extra = pydantic_v1.Extra.allow
|
40
|
-
json_encoders = {dt.datetime: serialize_datetime}
|