athena-intelligence 0.1.39__py3-none-any.whl → 0.1.40__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
athena/__init__.py CHANGED
@@ -2,6 +2,8 @@
2
2
 
3
3
  from .types import (
4
4
  Dataset,
5
+ FirecrawlScrapeUrlDataReponseDto,
6
+ FirecrawlScrapeUrlMetadata,
5
7
  GetDatasetsResponse,
6
8
  GetSnippetsResponse,
7
9
  HttpValidationError,
@@ -13,18 +15,22 @@ from .types import (
13
15
  Snippet,
14
16
  SqlResults,
15
17
  StatusEnum,
18
+ StructuredParseResult,
16
19
  Tools,
17
20
  UrlResult,
18
21
  ValidationError,
19
22
  ValidationErrorLocItem,
20
23
  )
21
24
  from .errors import UnprocessableEntityError
22
- from . import dataset, message, query, report, search, snippet
25
+ from . import chain, dataset, message, query, report, search, snippet, tools
26
+ from .chain import StructuredToolDataParsingModel
23
27
  from .environment import AthenaEnvironment
24
28
 
25
29
  __all__ = [
26
30
  "AthenaEnvironment",
27
31
  "Dataset",
32
+ "FirecrawlScrapeUrlDataReponseDto",
33
+ "FirecrawlScrapeUrlMetadata",
28
34
  "GetDatasetsResponse",
29
35
  "GetSnippetsResponse",
30
36
  "HttpValidationError",
@@ -36,15 +42,19 @@ __all__ = [
36
42
  "Snippet",
37
43
  "SqlResults",
38
44
  "StatusEnum",
45
+ "StructuredParseResult",
46
+ "StructuredToolDataParsingModel",
39
47
  "Tools",
40
48
  "UnprocessableEntityError",
41
49
  "UrlResult",
42
50
  "ValidationError",
43
51
  "ValidationErrorLocItem",
52
+ "chain",
44
53
  "dataset",
45
54
  "message",
46
55
  "query",
47
56
  "report",
48
57
  "search",
49
58
  "snippet",
59
+ "tools",
50
60
  ]
athena/base_client.py CHANGED
@@ -4,6 +4,7 @@ import typing
4
4
 
5
5
  import httpx
6
6
 
7
+ from .chain.client import AsyncChainClient, ChainClient
7
8
  from .core.client_wrapper import AsyncClientWrapper, SyncClientWrapper
8
9
  from .dataset.client import AsyncDatasetClient, DatasetClient
9
10
  from .environment import AthenaEnvironment
@@ -12,6 +13,7 @@ from .query.client import AsyncQueryClient, QueryClient
12
13
  from .report.client import AsyncReportClient, ReportClient
13
14
  from .search.client import AsyncSearchClient, SearchClient
14
15
  from .snippet.client import AsyncSnippetClient, SnippetClient
16
+ from .tools.client import AsyncToolsClient, ToolsClient
15
17
 
16
18
 
17
19
  class BaseAthena:
@@ -58,6 +60,8 @@ class BaseAthena:
58
60
  self.report = ReportClient(client_wrapper=self._client_wrapper)
59
61
  self.query = QueryClient(client_wrapper=self._client_wrapper)
60
62
  self.search = SearchClient(client_wrapper=self._client_wrapper)
63
+ self.chain = ChainClient(client_wrapper=self._client_wrapper)
64
+ self.tools = ToolsClient(client_wrapper=self._client_wrapper)
61
65
 
62
66
 
63
67
  class AsyncBaseAthena:
@@ -104,6 +108,8 @@ class AsyncBaseAthena:
104
108
  self.report = AsyncReportClient(client_wrapper=self._client_wrapper)
105
109
  self.query = AsyncQueryClient(client_wrapper=self._client_wrapper)
106
110
  self.search = AsyncSearchClient(client_wrapper=self._client_wrapper)
111
+ self.chain = AsyncChainClient(client_wrapper=self._client_wrapper)
112
+ self.tools = AsyncToolsClient(client_wrapper=self._client_wrapper)
107
113
 
108
114
 
109
115
  def _get_base_url(*, base_url: typing.Optional[str] = None, environment: AthenaEnvironment) -> str:
@@ -0,0 +1,5 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ from .types import StructuredToolDataParsingModel
4
+
5
+ __all__ = ["StructuredToolDataParsingModel"]
athena/chain/client.py ADDED
@@ -0,0 +1,167 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+ import urllib.parse
5
+ from json.decoder import JSONDecodeError
6
+
7
+ from ..core.api_error import ApiError
8
+ from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper
9
+ from ..core.jsonable_encoder import jsonable_encoder
10
+ from ..core.remove_none_from_dict import remove_none_from_dict
11
+ from ..core.request_options import RequestOptions
12
+ from ..errors.unprocessable_entity_error import UnprocessableEntityError
13
+ from ..types.http_validation_error import HttpValidationError
14
+ from ..types.structured_parse_result import StructuredParseResult
15
+ from .types.structured_tool_data_parsing_model import StructuredToolDataParsingModel
16
+
17
+ try:
18
+ import pydantic.v1 as pydantic # type: ignore
19
+ except ImportError:
20
+ import pydantic # type: ignore
21
+
22
+ # this is used as the default value for optional parameters
23
+ OMIT = typing.cast(typing.Any, ...)
24
+
25
+
26
+ class ChainClient:
27
+ def __init__(self, *, client_wrapper: SyncClientWrapper):
28
+ self._client_wrapper = client_wrapper
29
+
30
+ def structured_parse(
31
+ self,
32
+ *,
33
+ text_input: str,
34
+ custom_type_dict: typing.Dict[str, typing.Any],
35
+ parsing_model: typing.Optional[StructuredToolDataParsingModel] = OMIT,
36
+ request_options: typing.Optional[RequestOptions] = None,
37
+ ) -> StructuredParseResult:
38
+ """
39
+ Parameters:
40
+ - text_input: str. The text input to be parsed.
41
+
42
+ - custom_type_dict: typing.Dict[str, typing.Any]. A dictionary of field names and their default values.
43
+
44
+ - parsing_model: typing.Optional[StructuredToolDataParsingModel]. The model to be used for parsing.
45
+
46
+ - request_options: typing.Optional[RequestOptions]. Request-specific configuration.
47
+ ---
48
+ from athena.client import Athena
49
+
50
+ client = Athena(
51
+ api_key="YOUR_API_KEY",
52
+ )
53
+ client.chain.structured_parse(
54
+ text_input="text_input",
55
+ custom_type_dict={},
56
+ )
57
+ """
58
+ _request: typing.Dict[str, typing.Any] = {"text_input": text_input, "custom_type_dict": custom_type_dict}
59
+ if parsing_model is not OMIT:
60
+ _request["parsing_model"] = parsing_model
61
+ _response = self._client_wrapper.httpx_client.request(
62
+ "POST",
63
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v0/structured-parse"),
64
+ params=jsonable_encoder(
65
+ request_options.get("additional_query_parameters") if request_options is not None else None
66
+ ),
67
+ json=jsonable_encoder(_request)
68
+ if request_options is None or request_options.get("additional_body_parameters") is None
69
+ else {
70
+ **jsonable_encoder(_request),
71
+ **(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))),
72
+ },
73
+ headers=jsonable_encoder(
74
+ remove_none_from_dict(
75
+ {
76
+ **self._client_wrapper.get_headers(),
77
+ **(request_options.get("additional_headers", {}) if request_options is not None else {}),
78
+ }
79
+ )
80
+ ),
81
+ timeout=request_options.get("timeout_in_seconds")
82
+ if request_options is not None and request_options.get("timeout_in_seconds") is not None
83
+ else 60,
84
+ retries=0,
85
+ max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore
86
+ )
87
+ if 200 <= _response.status_code < 300:
88
+ return pydantic.parse_obj_as(StructuredParseResult, _response.json()) # type: ignore
89
+ if _response.status_code == 422:
90
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
91
+ try:
92
+ _response_json = _response.json()
93
+ except JSONDecodeError:
94
+ raise ApiError(status_code=_response.status_code, body=_response.text)
95
+ raise ApiError(status_code=_response.status_code, body=_response_json)
96
+
97
+
98
+ class AsyncChainClient:
99
+ def __init__(self, *, client_wrapper: AsyncClientWrapper):
100
+ self._client_wrapper = client_wrapper
101
+
102
+ async def structured_parse(
103
+ self,
104
+ *,
105
+ text_input: str,
106
+ custom_type_dict: typing.Dict[str, typing.Any],
107
+ parsing_model: typing.Optional[StructuredToolDataParsingModel] = OMIT,
108
+ request_options: typing.Optional[RequestOptions] = None,
109
+ ) -> StructuredParseResult:
110
+ """
111
+ Parameters:
112
+ - text_input: str. The text input to be parsed.
113
+
114
+ - custom_type_dict: typing.Dict[str, typing.Any]. A dictionary of field names and their default values.
115
+
116
+ - parsing_model: typing.Optional[StructuredToolDataParsingModel]. The model to be used for parsing.
117
+
118
+ - request_options: typing.Optional[RequestOptions]. Request-specific configuration.
119
+ ---
120
+ from athena.client import AsyncAthena
121
+
122
+ client = AsyncAthena(
123
+ api_key="YOUR_API_KEY",
124
+ )
125
+ await client.chain.structured_parse(
126
+ text_input="text_input",
127
+ custom_type_dict={},
128
+ )
129
+ """
130
+ _request: typing.Dict[str, typing.Any] = {"text_input": text_input, "custom_type_dict": custom_type_dict}
131
+ if parsing_model is not OMIT:
132
+ _request["parsing_model"] = parsing_model
133
+ _response = await self._client_wrapper.httpx_client.request(
134
+ "POST",
135
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v0/structured-parse"),
136
+ params=jsonable_encoder(
137
+ request_options.get("additional_query_parameters") if request_options is not None else None
138
+ ),
139
+ json=jsonable_encoder(_request)
140
+ if request_options is None or request_options.get("additional_body_parameters") is None
141
+ else {
142
+ **jsonable_encoder(_request),
143
+ **(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))),
144
+ },
145
+ headers=jsonable_encoder(
146
+ remove_none_from_dict(
147
+ {
148
+ **self._client_wrapper.get_headers(),
149
+ **(request_options.get("additional_headers", {}) if request_options is not None else {}),
150
+ }
151
+ )
152
+ ),
153
+ timeout=request_options.get("timeout_in_seconds")
154
+ if request_options is not None and request_options.get("timeout_in_seconds") is not None
155
+ else 60,
156
+ retries=0,
157
+ max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore
158
+ )
159
+ if 200 <= _response.status_code < 300:
160
+ return pydantic.parse_obj_as(StructuredParseResult, _response.json()) # type: ignore
161
+ if _response.status_code == 422:
162
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
163
+ try:
164
+ _response_json = _response.json()
165
+ except JSONDecodeError:
166
+ raise ApiError(status_code=_response.status_code, body=_response.text)
167
+ raise ApiError(status_code=_response.status_code, body=_response_json)
@@ -0,0 +1,5 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ from .structured_tool_data_parsing_model import StructuredToolDataParsingModel
4
+
5
+ __all__ = ["StructuredToolDataParsingModel"]
@@ -0,0 +1,53 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import enum
4
+ import typing
5
+
6
+ T_Result = typing.TypeVar("T_Result")
7
+
8
+
9
+ class StructuredToolDataParsingModel(str, enum.Enum):
10
+ """
11
+ The model to be used for parsing.
12
+ """
13
+
14
+ GPT_4_TURBO = "gpt-4-turbo"
15
+ GPT_4_TURBO_PREVIEW = "gpt-4-turbo-preview"
16
+ GPT_4 = "gpt-4"
17
+ GPT_35_TURBO = "gpt-3.5-turbo"
18
+ MIXTRAL_SMALL_8_X_7_B_0211 = "mixtral-small-8x7b-0211"
19
+ MISTRAL_LARGE_0224 = "mistral-large-0224"
20
+ CLAUDE_3_OPUS_20240229 = "claude-3-opus-20240229"
21
+ CLAUDE_3_SONNET_20240229 = "claude-3-sonnet-20240229"
22
+ CLAUDE_3_HAIKU_20240307 = "claude-3-haiku-20240307"
23
+
24
+ def visit(
25
+ self,
26
+ gpt_4_turbo: typing.Callable[[], T_Result],
27
+ gpt_4_turbo_preview: typing.Callable[[], T_Result],
28
+ gpt_4: typing.Callable[[], T_Result],
29
+ gpt_35_turbo: typing.Callable[[], T_Result],
30
+ mixtral_small_8_x_7_b_0211: typing.Callable[[], T_Result],
31
+ mistral_large_0224: typing.Callable[[], T_Result],
32
+ claude_3_opus_20240229: typing.Callable[[], T_Result],
33
+ claude_3_sonnet_20240229: typing.Callable[[], T_Result],
34
+ claude_3_haiku_20240307: typing.Callable[[], T_Result],
35
+ ) -> T_Result:
36
+ if self is StructuredToolDataParsingModel.GPT_4_TURBO:
37
+ return gpt_4_turbo()
38
+ if self is StructuredToolDataParsingModel.GPT_4_TURBO_PREVIEW:
39
+ return gpt_4_turbo_preview()
40
+ if self is StructuredToolDataParsingModel.GPT_4:
41
+ return gpt_4()
42
+ if self is StructuredToolDataParsingModel.GPT_35_TURBO:
43
+ return gpt_35_turbo()
44
+ if self is StructuredToolDataParsingModel.MIXTRAL_SMALL_8_X_7_B_0211:
45
+ return mixtral_small_8_x_7_b_0211()
46
+ if self is StructuredToolDataParsingModel.MISTRAL_LARGE_0224:
47
+ return mistral_large_0224()
48
+ if self is StructuredToolDataParsingModel.CLAUDE_3_OPUS_20240229:
49
+ return claude_3_opus_20240229()
50
+ if self is StructuredToolDataParsingModel.CLAUDE_3_SONNET_20240229:
51
+ return claude_3_sonnet_20240229()
52
+ if self is StructuredToolDataParsingModel.CLAUDE_3_HAIKU_20240307:
53
+ return claude_3_haiku_20240307()
@@ -16,7 +16,7 @@ class BaseClientWrapper:
16
16
  headers: typing.Dict[str, str] = {
17
17
  "X-Fern-Language": "Python",
18
18
  "X-Fern-SDK-Name": "athena-intelligence",
19
- "X-Fern-SDK-Version": "0.1.39",
19
+ "X-Fern-SDK-Version": "0.1.40",
20
20
  }
21
21
  headers["X-API-KEY"] = self.api_key
22
22
  return headers
@@ -0,0 +1,2 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
athena/tools/client.py ADDED
@@ -0,0 +1,158 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+ import urllib.parse
5
+ from json.decoder import JSONDecodeError
6
+
7
+ from ..core.api_error import ApiError
8
+ from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper
9
+ from ..core.jsonable_encoder import jsonable_encoder
10
+ from ..core.remove_none_from_dict import remove_none_from_dict
11
+ from ..core.request_options import RequestOptions
12
+ from ..errors.unprocessable_entity_error import UnprocessableEntityError
13
+ from ..types.firecrawl_scrape_url_data_reponse_dto import FirecrawlScrapeUrlDataReponseDto
14
+ from ..types.http_validation_error import HttpValidationError
15
+
16
+ try:
17
+ import pydantic.v1 as pydantic # type: ignore
18
+ except ImportError:
19
+ import pydantic # type: ignore
20
+
21
+ # this is used as the default value for optional parameters
22
+ OMIT = typing.cast(typing.Any, ...)
23
+
24
+
25
+ class ToolsClient:
26
+ def __init__(self, *, client_wrapper: SyncClientWrapper):
27
+ self._client_wrapper = client_wrapper
28
+
29
+ def scrape_url(
30
+ self,
31
+ *,
32
+ url: str,
33
+ params: typing.Optional[typing.Dict[str, typing.Any]] = OMIT,
34
+ request_options: typing.Optional[RequestOptions] = None,
35
+ ) -> FirecrawlScrapeUrlDataReponseDto:
36
+ """
37
+ Parameters:
38
+ - url: str.
39
+
40
+ - params: typing.Optional[typing.Dict[str, typing.Any]].
41
+
42
+ - request_options: typing.Optional[RequestOptions]. Request-specific configuration.
43
+ ---
44
+ from athena.client import Athena
45
+
46
+ client = Athena(
47
+ api_key="YOUR_API_KEY",
48
+ )
49
+ client.tools.scrape_url(
50
+ url="https://athenaintelligence.ai",
51
+ )
52
+ """
53
+ _request: typing.Dict[str, typing.Any] = {"url": url}
54
+ if params is not OMIT:
55
+ _request["params"] = params
56
+ _response = self._client_wrapper.httpx_client.request(
57
+ "POST",
58
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v0/tools/firecrawl/scrape-url"),
59
+ params=jsonable_encoder(
60
+ request_options.get("additional_query_parameters") if request_options is not None else None
61
+ ),
62
+ json=jsonable_encoder(_request)
63
+ if request_options is None or request_options.get("additional_body_parameters") is None
64
+ else {
65
+ **jsonable_encoder(_request),
66
+ **(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))),
67
+ },
68
+ headers=jsonable_encoder(
69
+ remove_none_from_dict(
70
+ {
71
+ **self._client_wrapper.get_headers(),
72
+ **(request_options.get("additional_headers", {}) if request_options is not None else {}),
73
+ }
74
+ )
75
+ ),
76
+ timeout=request_options.get("timeout_in_seconds")
77
+ if request_options is not None and request_options.get("timeout_in_seconds") is not None
78
+ else 60,
79
+ retries=0,
80
+ max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore
81
+ )
82
+ if 200 <= _response.status_code < 300:
83
+ return pydantic.parse_obj_as(FirecrawlScrapeUrlDataReponseDto, _response.json()) # type: ignore
84
+ if _response.status_code == 422:
85
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
86
+ try:
87
+ _response_json = _response.json()
88
+ except JSONDecodeError:
89
+ raise ApiError(status_code=_response.status_code, body=_response.text)
90
+ raise ApiError(status_code=_response.status_code, body=_response_json)
91
+
92
+
93
+ class AsyncToolsClient:
94
+ def __init__(self, *, client_wrapper: AsyncClientWrapper):
95
+ self._client_wrapper = client_wrapper
96
+
97
+ async def scrape_url(
98
+ self,
99
+ *,
100
+ url: str,
101
+ params: typing.Optional[typing.Dict[str, typing.Any]] = OMIT,
102
+ request_options: typing.Optional[RequestOptions] = None,
103
+ ) -> FirecrawlScrapeUrlDataReponseDto:
104
+ """
105
+ Parameters:
106
+ - url: str.
107
+
108
+ - params: typing.Optional[typing.Dict[str, typing.Any]].
109
+
110
+ - request_options: typing.Optional[RequestOptions]. Request-specific configuration.
111
+ ---
112
+ from athena.client import AsyncAthena
113
+
114
+ client = AsyncAthena(
115
+ api_key="YOUR_API_KEY",
116
+ )
117
+ await client.tools.scrape_url(
118
+ url="https://athenaintelligence.ai",
119
+ )
120
+ """
121
+ _request: typing.Dict[str, typing.Any] = {"url": url}
122
+ if params is not OMIT:
123
+ _request["params"] = params
124
+ _response = await self._client_wrapper.httpx_client.request(
125
+ "POST",
126
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v0/tools/firecrawl/scrape-url"),
127
+ params=jsonable_encoder(
128
+ request_options.get("additional_query_parameters") if request_options is not None else None
129
+ ),
130
+ json=jsonable_encoder(_request)
131
+ if request_options is None or request_options.get("additional_body_parameters") is None
132
+ else {
133
+ **jsonable_encoder(_request),
134
+ **(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))),
135
+ },
136
+ headers=jsonable_encoder(
137
+ remove_none_from_dict(
138
+ {
139
+ **self._client_wrapper.get_headers(),
140
+ **(request_options.get("additional_headers", {}) if request_options is not None else {}),
141
+ }
142
+ )
143
+ ),
144
+ timeout=request_options.get("timeout_in_seconds")
145
+ if request_options is not None and request_options.get("timeout_in_seconds") is not None
146
+ else 60,
147
+ retries=0,
148
+ max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore
149
+ )
150
+ if 200 <= _response.status_code < 300:
151
+ return pydantic.parse_obj_as(FirecrawlScrapeUrlDataReponseDto, _response.json()) # type: ignore
152
+ if _response.status_code == 422:
153
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
154
+ try:
155
+ _response_json = _response.json()
156
+ except JSONDecodeError:
157
+ raise ApiError(status_code=_response.status_code, body=_response.text)
158
+ raise ApiError(status_code=_response.status_code, body=_response_json)
athena/types/__init__.py CHANGED
@@ -1,6 +1,8 @@
1
1
  # This file was auto-generated by Fern from our API Definition.
2
2
 
3
3
  from .dataset import Dataset
4
+ from .firecrawl_scrape_url_data_reponse_dto import FirecrawlScrapeUrlDataReponseDto
5
+ from .firecrawl_scrape_url_metadata import FirecrawlScrapeUrlMetadata
4
6
  from .get_datasets_response import GetDatasetsResponse
5
7
  from .get_snippets_response import GetSnippetsResponse
6
8
  from .http_validation_error import HttpValidationError
@@ -12,6 +14,7 @@ from .scrape_markdown_result import ScrapeMarkdownResult
12
14
  from .snippet import Snippet
13
15
  from .sql_results import SqlResults
14
16
  from .status_enum import StatusEnum
17
+ from .structured_parse_result import StructuredParseResult
15
18
  from .tools import Tools
16
19
  from .url_result import UrlResult
17
20
  from .validation_error import ValidationError
@@ -19,6 +22,8 @@ from .validation_error_loc_item import ValidationErrorLocItem
19
22
 
20
23
  __all__ = [
21
24
  "Dataset",
25
+ "FirecrawlScrapeUrlDataReponseDto",
26
+ "FirecrawlScrapeUrlMetadata",
22
27
  "GetDatasetsResponse",
23
28
  "GetSnippetsResponse",
24
29
  "HttpValidationError",
@@ -30,6 +35,7 @@ __all__ = [
30
35
  "Snippet",
31
36
  "SqlResults",
32
37
  "StatusEnum",
38
+ "StructuredParseResult",
33
39
  "Tools",
34
40
  "UrlResult",
35
41
  "ValidationError",
@@ -0,0 +1,31 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+ from .firecrawl_scrape_url_metadata import FirecrawlScrapeUrlMetadata
8
+
9
+ try:
10
+ import pydantic.v1 as pydantic # type: ignore
11
+ except ImportError:
12
+ import pydantic # type: ignore
13
+
14
+
15
+ class FirecrawlScrapeUrlDataReponseDto(pydantic.BaseModel):
16
+ content: str
17
+ markdown: str
18
+ metadata: FirecrawlScrapeUrlMetadata
19
+
20
+ def json(self, **kwargs: typing.Any) -> str:
21
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
22
+ return super().json(**kwargs_with_defaults)
23
+
24
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
25
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
26
+ return super().dict(**kwargs_with_defaults)
27
+
28
+ class Config:
29
+ frozen = True
30
+ smart_union = True
31
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -0,0 +1,32 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+
8
+ try:
9
+ import pydantic.v1 as pydantic # type: ignore
10
+ except ImportError:
11
+ import pydantic # type: ignore
12
+
13
+
14
+ class FirecrawlScrapeUrlMetadata(pydantic.BaseModel):
15
+ title: typing.Optional[str] = None
16
+ description: typing.Optional[str] = None
17
+ language: typing.Optional[str] = None
18
+ source_url: typing.Optional[str] = pydantic.Field(alias="sourceURL", default=None)
19
+
20
+ def json(self, **kwargs: typing.Any) -> str:
21
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
22
+ return super().json(**kwargs_with_defaults)
23
+
24
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
25
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
26
+ return super().dict(**kwargs_with_defaults)
27
+
28
+ class Config:
29
+ frozen = True
30
+ smart_union = True
31
+ allow_population_by_field_name = True
32
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -0,0 +1,28 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+
8
+ try:
9
+ import pydantic.v1 as pydantic # type: ignore
10
+ except ImportError:
11
+ import pydantic # type: ignore
12
+
13
+
14
+ class StructuredParseResult(pydantic.BaseModel):
15
+ result: typing.Dict[str, typing.Any]
16
+
17
+ def json(self, **kwargs: typing.Any) -> str:
18
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
19
+ return super().json(**kwargs_with_defaults)
20
+
21
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
22
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
23
+ return super().dict(**kwargs_with_defaults)
24
+
25
+ class Config:
26
+ frozen = True
27
+ smart_union = True
28
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: athena-intelligence
3
- Version: 0.1.39
3
+ Version: 0.1.40
4
4
  Summary:
5
5
  Requires-Python: >=3.8,<4.0
6
6
  Classifier: Programming Language :: Python :: 3
@@ -1,9 +1,13 @@
1
- athena/__init__.py,sha256=4F_he5MdULBjh5FEtmnAlXK61f8Y5fhP4-bLVzj1K3s,1013
2
- athena/base_client.py,sha256=9CD18sBT5meilMnX4WfnNBagwlyNWnc8NH0bSL9D0Ao,5014
1
+ athena/__init__.py,sha256=6SG9UQ4iQ8rFALsqP-LPImAjHTKLpmUHeSUapfxvmMo,1341
2
+ athena/base_client.py,sha256=tvX2FKc9AnNBm0k64njjUCOt7dPxBBn6BpClJpcPqgc,5416
3
+ athena/chain/__init__.py,sha256=Kv3-525ENLEbHHcRZLT-ioQrzUNmFH9e5mUkApSyvp8,161
4
+ athena/chain/client.py,sha256=mF5A0sXw2GvZexy4bK9HPgDx7DMzWVSR7f0WuGbkAIg,7401
5
+ athena/chain/types/__init__.py,sha256=la4c4cmiMbVbZpB_XqpBB2Rzgl2-xkEUVZrXDowr1lc,190
6
+ athena/chain/types/structured_tool_data_parsing_model.py,sha256=H2Zhh3Sav6foUCyIQIrgIR9iaMjk9ZQNJfHfZgJa1uQ,2213
3
7
  athena/client.py,sha256=8QypiDlbZ0C1YsJh6GzhylLVCZXDQc1MCJTURo2_vvI,3576
4
8
  athena/core/__init__.py,sha256=RWfyDqkzWsf8e3VGc3NV60MovfJbg5XWzNFGB2DZ0hA,790
5
9
  athena/core/api_error.py,sha256=RE8LELok2QCjABadECTvtDp7qejA1VmINCh6TbqPwSE,426
6
- athena/core/client_wrapper.py,sha256=IZ6Zo7IR39BxLeVgeprl1_Sh_0ewYHA8uloBT_m_axc,1198
10
+ athena/core/client_wrapper.py,sha256=MicE6puzRD25jmVhsLDi58I2wkLvm4_csCbMBH9vT38,1198
7
11
  athena/core/datetime_utils.py,sha256=nBys2IsYrhPdszxGKCNRPSOCwa-5DWOHG95FB8G9PKo,1047
8
12
  athena/core/file.py,sha256=sy1RUGZ3aJYuw998bZytxxo6QdgKmlnlgBaMvwEKCGg,1480
9
13
  athena/core/http_client.py,sha256=LI0yP3jUyE0Ue7oyBcI9nyo1pljOwh9Y5ycTeIpKwOg,4882
@@ -27,8 +31,12 @@ athena/search/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
27
31
  athena/search/client.py,sha256=DLfHjgflIXAff20yswQK1h4BNxvY4SjZzhsywpQOM-g,12391
28
32
  athena/snippet/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
29
33
  athena/snippet/client.py,sha256=D0rSpm6ql9cnUj-mMe3z8OHRgRQQuk3bBW2CZSRnyp4,6087
30
- athena/types/__init__.py,sha256=WkQQXOAvm87RuRDk2_W9uJs9y7LWGejK1ZC666NTHXA,1054
34
+ athena/tools/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
35
+ athena/tools/client.py,sha256=XCkyxX3rau3Xbppj74rni8bScZrTFlYDkep_c3u4kWU,6657
36
+ athena/types/__init__.py,sha256=6TfFMQqEF5dfsRSFsoHdH-qgIm8qPrzEeFPBorEcVq8,1370
31
37
  athena/types/dataset.py,sha256=70OJPxKBAYu7xthGEgrUolSdyLqiyh6X49INw1oN0sA,1014
38
+ athena/types/firecrawl_scrape_url_data_reponse_dto.py,sha256=LbJY-SC_WNQG3nbswG8NTfVl_u_tpV1HO7Y3MWCk5gc,1021
39
+ athena/types/firecrawl_scrape_url_metadata.py,sha256=sqHTtq5_5vujsMixJBDJULPK7MrvxEpB2wGPwC-bTdM,1128
32
40
  athena/types/get_datasets_response.py,sha256=BCdT8yTLfOsXeyFadlyoas4zzseFWGPAdGpkgkOuaD8,989
33
41
  athena/types/get_snippets_response.py,sha256=Lpn7bHJLpPQozN93unCV-8eByAAfz1MhQWR3G3Z1vl4,989
34
42
  athena/types/http_validation_error.py,sha256=Fcv_CTMMrLvCeTHjF0n5xf5tskMDgt-J6H9gp654eQw,973
@@ -40,10 +48,11 @@ athena/types/scrape_markdown_result.py,sha256=uRpIxoLV9oyLdbJeehm3zmZk_qXZeYqYYc
40
48
  athena/types/snippet.py,sha256=POIVJNV9iQxiVegB_qwQx-PZPPSyoIPhyxTsueNVUGA,1126
41
49
  athena/types/sql_results.py,sha256=pNH32nyf1bzoYJs3FgHctLdLO02oOjyGgLkHACACB6k,900
42
50
  athena/types/status_enum.py,sha256=0UZbhdAx215GHC-U53RS98mYHtn1N3On4VBe4j02Qtc,672
51
+ athena/types/structured_parse_result.py,sha256=7I-w06OmtxXFY01k7FXFSNPe5PpM3z54xNUbs62lSv0,905
43
52
  athena/types/tools.py,sha256=mhRkKAwlsDud-fFOhsx2T3hBD-FAtuCnGHyU9cLPcGU,1422
44
53
  athena/types/url_result.py,sha256=zajsW46qJnD6GPimb5kHkUncjqBfzHUlGOcKuUGMX-E,893
45
54
  athena/types/validation_error.py,sha256=2JhGNJouo8QpfrMBoT_JCwYSn1nFN2Nnq0p9uPLDH-U,992
46
55
  athena/types/validation_error_loc_item.py,sha256=LAtjCHIllWRBFXvAZ5QZpp7CPXjdtN9EB7HrLVo6EP0,128
47
- athena_intelligence-0.1.39.dist-info/METADATA,sha256=d8EmTn45t-7O63xpZzW5vbZbpxhagqY9Frj1559JYWw,4738
48
- athena_intelligence-0.1.39.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
49
- athena_intelligence-0.1.39.dist-info/RECORD,,
56
+ athena_intelligence-0.1.40.dist-info/METADATA,sha256=hV93D3CuM01fBTO3fShNwAEURnu-rJbcCHdSju2SN_I,4738
57
+ athena_intelligence-0.1.40.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
58
+ athena_intelligence-0.1.40.dist-info/RECORD,,