athena-intelligence 0.1.71__tar.gz → 0.1.73__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (78) hide show
  1. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/PKG-INFO +1 -1
  2. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/pyproject.toml +1 -1
  3. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/__init__.py +12 -0
  4. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/core/client_wrapper.py +1 -1
  5. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/tools/client.py +305 -0
  6. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/types/__init__.py +12 -0
  7. athena_intelligence-0.1.73/src/athena/types/convert_pdf_to_sheet_out.py +26 -0
  8. athena_intelligence-0.1.73/src/athena/types/filter_model.py +28 -0
  9. athena_intelligence-0.1.73/src/athena/types/filter_operator.py +73 -0
  10. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/types/llm_model.py +8 -0
  11. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/types/model.py +4 -0
  12. athena_intelligence-0.1.73/src/athena/types/query_model.py +34 -0
  13. athena_intelligence-0.1.73/src/athena/types/semantic_query_out.py +25 -0
  14. athena_intelligence-0.1.73/src/athena/types/time_dimension_model.py +29 -0
  15. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/README.md +0 -0
  16. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/base_client.py +0 -0
  17. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/chain/__init__.py +0 -0
  18. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/chain/client.py +0 -0
  19. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/client.py +0 -0
  20. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/core/__init__.py +0 -0
  21. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/core/api_error.py +0 -0
  22. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/core/datetime_utils.py +0 -0
  23. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/core/file.py +0 -0
  24. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/core/http_client.py +0 -0
  25. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/core/jsonable_encoder.py +0 -0
  26. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/core/pydantic_utilities.py +0 -0
  27. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/core/remove_none_from_dict.py +0 -0
  28. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/core/request_options.py +0 -0
  29. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/dataset/__init__.py +0 -0
  30. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/dataset/client.py +0 -0
  31. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/environment.py +0 -0
  32. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/errors/__init__.py +0 -0
  33. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/errors/unprocessable_entity_error.py +0 -0
  34. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/message/__init__.py +0 -0
  35. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/message/client.py +0 -0
  36. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/polling_message_client.py +0 -0
  37. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/py.typed +0 -0
  38. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/query/__init__.py +0 -0
  39. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/query/client.py +0 -0
  40. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/report/__init__.py +0 -0
  41. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/report/client.py +0 -0
  42. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/search/__init__.py +0 -0
  43. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/search/client.py +0 -0
  44. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/snippet/__init__.py +0 -0
  45. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/snippet/client.py +0 -0
  46. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/tools/__init__.py +0 -0
  47. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/types/dataset.py +0 -0
  48. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/types/document.py +0 -0
  49. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/types/excecute_tool_first_workflow_out.py +0 -0
  50. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/types/file_data_response.py +0 -0
  51. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/types/firecrawl_scrape_url_data_reponse_dto.py +0 -0
  52. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/types/firecrawl_scrape_url_metadata.py +0 -0
  53. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/types/get_datasets_response.py +0 -0
  54. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/types/get_snippet_out.py +0 -0
  55. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/types/get_snippets_response.py +0 -0
  56. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/types/http_validation_error.py +0 -0
  57. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/types/langchain_documents_request_out.py +0 -0
  58. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/types/map_reduce_chain_out.py +0 -0
  59. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/types/message_out.py +0 -0
  60. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/types/message_out_dto.py +0 -0
  61. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/types/publish_formats.py +0 -0
  62. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/types/report.py +0 -0
  63. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/types/researcher_out.py +0 -0
  64. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/types/snippet.py +0 -0
  65. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/types/sql_results.py +0 -0
  66. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/types/status_enum.py +0 -0
  67. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/types/structured_parse_result.py +0 -0
  68. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/types/tools.py +0 -0
  69. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/types/upload_documents_out.py +0 -0
  70. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/types/url_result.py +0 -0
  71. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/types/validation_error.py +0 -0
  72. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/types/validation_error_loc_item.py +0 -0
  73. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/types/workflow_status_out.py +0 -0
  74. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/upload/__init__.py +0 -0
  75. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/upload/client.py +0 -0
  76. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/version.py +0 -0
  77. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/workflow/__init__.py +0 -0
  78. {athena_intelligence-0.1.71 → athena_intelligence-0.1.73}/src/athena/workflow/client.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: athena-intelligence
3
- Version: 0.1.71
3
+ Version: 0.1.73
4
4
  Summary:
5
5
  Requires-Python: >=3.8,<4.0
6
6
  Classifier: Programming Language :: Python :: 3
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "athena-intelligence"
3
- version = "0.1.71"
3
+ version = "0.1.73"
4
4
  description = ""
5
5
  readme = "README.md"
6
6
  authors = []
@@ -1,10 +1,13 @@
1
1
  # This file was auto-generated by Fern from our API Definition.
2
2
 
3
3
  from .types import (
4
+ ConvertPdfToSheetOut,
4
5
  Dataset,
5
6
  Document,
6
7
  ExcecuteToolFirstWorkflowOut,
7
8
  FileDataResponse,
9
+ FilterModel,
10
+ FilterOperator,
8
11
  FirecrawlScrapeUrlDataReponseDto,
9
12
  FirecrawlScrapeUrlMetadata,
10
13
  GetDatasetsResponse,
@@ -18,12 +21,15 @@ from .types import (
18
21
  MessageOutDto,
19
22
  Model,
20
23
  PublishFormats,
24
+ QueryModel,
21
25
  Report,
22
26
  ResearcherOut,
27
+ SemanticQueryOut,
23
28
  Snippet,
24
29
  SqlResults,
25
30
  StatusEnum,
26
31
  StructuredParseResult,
32
+ TimeDimensionModel,
27
33
  Tools,
28
34
  UploadDocumentsOut,
29
35
  UrlResult,
@@ -38,10 +44,13 @@ from .version import __version__
38
44
 
39
45
  __all__ = [
40
46
  "AthenaEnvironment",
47
+ "ConvertPdfToSheetOut",
41
48
  "Dataset",
42
49
  "Document",
43
50
  "ExcecuteToolFirstWorkflowOut",
44
51
  "FileDataResponse",
52
+ "FilterModel",
53
+ "FilterOperator",
45
54
  "FirecrawlScrapeUrlDataReponseDto",
46
55
  "FirecrawlScrapeUrlMetadata",
47
56
  "GetDatasetsResponse",
@@ -55,12 +64,15 @@ __all__ = [
55
64
  "MessageOutDto",
56
65
  "Model",
57
66
  "PublishFormats",
67
+ "QueryModel",
58
68
  "Report",
59
69
  "ResearcherOut",
70
+ "SemanticQueryOut",
60
71
  "Snippet",
61
72
  "SqlResults",
62
73
  "StatusEnum",
63
74
  "StructuredParseResult",
75
+ "TimeDimensionModel",
64
76
  "Tools",
65
77
  "UnprocessableEntityError",
66
78
  "UploadDocumentsOut",
@@ -17,7 +17,7 @@ class BaseClientWrapper:
17
17
  headers: typing.Dict[str, str] = {
18
18
  "X-Fern-Language": "Python",
19
19
  "X-Fern-SDK-Name": "athena-intelligence",
20
- "X-Fern-SDK-Version": "0.1.71",
20
+ "X-Fern-SDK-Version": "0.1.73",
21
21
  }
22
22
  headers["X-API-KEY"] = self.api_key
23
23
  return headers
@@ -11,13 +11,16 @@ from ..core.pydantic_utilities import pydantic_v1
11
11
  from ..core.remove_none_from_dict import remove_none_from_dict
12
12
  from ..core.request_options import RequestOptions
13
13
  from ..errors.unprocessable_entity_error import UnprocessableEntityError
14
+ from ..types.convert_pdf_to_sheet_out import ConvertPdfToSheetOut
14
15
  from ..types.excecute_tool_first_workflow_out import ExcecuteToolFirstWorkflowOut
15
16
  from ..types.firecrawl_scrape_url_data_reponse_dto import FirecrawlScrapeUrlDataReponseDto
16
17
  from ..types.http_validation_error import HttpValidationError
17
18
  from ..types.langchain_documents_request_out import LangchainDocumentsRequestOut
18
19
  from ..types.llm_model import LlmModel
19
20
  from ..types.publish_formats import PublishFormats
21
+ from ..types.query_model import QueryModel
20
22
  from ..types.researcher_out import ResearcherOut
23
+ from ..types.semantic_query_out import SemanticQueryOut
21
24
 
22
25
  # this is used as the default value for optional parameters
23
26
  OMIT = typing.cast(typing.Any, ...)
@@ -250,6 +253,8 @@ class ToolsClient:
250
253
  max_sections: int,
251
254
  guidelines: typing.Sequence[str],
252
255
  publish_formats: typing.Optional[PublishFormats] = OMIT,
256
+ source: typing.Optional[str] = OMIT,
257
+ athena_document_ids: typing.Optional[typing.Sequence[str]] = OMIT,
253
258
  request_options: typing.Optional[RequestOptions] = None,
254
259
  ) -> ResearcherOut:
255
260
  """
@@ -262,6 +267,10 @@ class ToolsClient:
262
267
 
263
268
  - publish_formats: typing.Optional[PublishFormats].
264
269
 
270
+ - source: typing.Optional[str].
271
+
272
+ - athena_document_ids: typing.Optional[typing.Sequence[str]].
273
+
265
274
  - request_options: typing.Optional[RequestOptions]. Request-specific configuration.
266
275
  ---
267
276
  from athena import PublishFormats
@@ -283,6 +292,8 @@ class ToolsClient:
283
292
  pdf=True,
284
293
  docx=False,
285
294
  ),
295
+ source="web",
296
+ athena_document_ids=["doc_1", "doc_2"],
286
297
  )
287
298
  """
288
299
  _request: typing.Dict[str, typing.Any] = {
@@ -292,6 +303,10 @@ class ToolsClient:
292
303
  }
293
304
  if publish_formats is not OMIT:
294
305
  _request["publish_formats"] = publish_formats
306
+ if source is not OMIT:
307
+ _request["source"] = source
308
+ if athena_document_ids is not OMIT:
309
+ _request["athena_document_ids"] = athena_document_ids
295
310
  _response = self._client_wrapper.httpx_client.request(
296
311
  method="POST",
297
312
  url=urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v0/tools/researcher"),
@@ -330,6 +345,145 @@ class ToolsClient:
330
345
  raise ApiError(status_code=_response.status_code, body=_response.text)
331
346
  raise ApiError(status_code=_response.status_code, body=_response_json)
332
347
 
348
+ def convert_pdf_to_sheet(
349
+ self, *, document_id: str, request_options: typing.Optional[RequestOptions] = None
350
+ ) -> ConvertPdfToSheetOut:
351
+ """
352
+ Parameters:
353
+ - document_id: str.
354
+
355
+ - request_options: typing.Optional[RequestOptions]. Request-specific configuration.
356
+ ---
357
+ from athena.client import Athena
358
+
359
+ client = Athena(
360
+ api_key="YOUR_API_KEY",
361
+ )
362
+ client.tools.convert_pdf_to_sheet(
363
+ document_id="doc_9249292-d118-42d3-95b4-00eccfe0754f",
364
+ )
365
+ """
366
+ _response = self._client_wrapper.httpx_client.request(
367
+ method="POST",
368
+ url=urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v0/tools/convert-pdf-to-sheet"),
369
+ params=jsonable_encoder(
370
+ request_options.get("additional_query_parameters") if request_options is not None else None
371
+ ),
372
+ json=jsonable_encoder({"document_id": document_id})
373
+ if request_options is None or request_options.get("additional_body_parameters") is None
374
+ else {
375
+ **jsonable_encoder({"document_id": document_id}),
376
+ **(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))),
377
+ },
378
+ headers=jsonable_encoder(
379
+ remove_none_from_dict(
380
+ {
381
+ **self._client_wrapper.get_headers(),
382
+ **(request_options.get("additional_headers", {}) if request_options is not None else {}),
383
+ }
384
+ )
385
+ ),
386
+ timeout=request_options.get("timeout_in_seconds")
387
+ if request_options is not None and request_options.get("timeout_in_seconds") is not None
388
+ else self._client_wrapper.get_timeout(),
389
+ retries=0,
390
+ max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore
391
+ )
392
+ if 200 <= _response.status_code < 300:
393
+ return pydantic_v1.parse_obj_as(ConvertPdfToSheetOut, _response.json()) # type: ignore
394
+ if _response.status_code == 422:
395
+ raise UnprocessableEntityError(
396
+ pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore
397
+ )
398
+ try:
399
+ _response_json = _response.json()
400
+ except JSONDecodeError:
401
+ raise ApiError(status_code=_response.status_code, body=_response.text)
402
+ raise ApiError(status_code=_response.status_code, body=_response_json)
403
+
404
+ def semantic_query(
405
+ self,
406
+ *,
407
+ query: QueryModel,
408
+ table_name: typing.Optional[str] = OMIT,
409
+ request_options: typing.Optional[RequestOptions] = None,
410
+ ) -> SemanticQueryOut:
411
+ """
412
+ Parameters:
413
+ - query: QueryModel.
414
+
415
+ - table_name: typing.Optional[str].
416
+
417
+ - request_options: typing.Optional[RequestOptions]. Request-specific configuration.
418
+ ---
419
+ from athena import FilterModel, FilterOperator, QueryModel, TimeDimensionModel
420
+ from athena.client import Athena
421
+
422
+ client = Athena(
423
+ api_key="YOUR_API_KEY",
424
+ )
425
+ client.tools.semantic_query(
426
+ query=QueryModel(
427
+ measures=["count"],
428
+ time_dimensions=[
429
+ TimeDimensionModel(
430
+ dimension="dimension",
431
+ granularity="granularity",
432
+ date_range=["dateRange"],
433
+ )
434
+ ],
435
+ dimensions=["manufacturer"],
436
+ filters=[
437
+ FilterModel(
438
+ dimension="manufacturer",
439
+ operator=FilterOperator.EQUALS,
440
+ values=["values"],
441
+ )
442
+ ],
443
+ ),
444
+ )
445
+ """
446
+ _request: typing.Dict[str, typing.Any] = {"query": query}
447
+ if table_name is not OMIT:
448
+ _request["table_name"] = table_name
449
+ _response = self._client_wrapper.httpx_client.request(
450
+ method="POST",
451
+ url=urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v0/tools/strict-semantic-query"),
452
+ params=jsonable_encoder(
453
+ request_options.get("additional_query_parameters") if request_options is not None else None
454
+ ),
455
+ json=jsonable_encoder(_request)
456
+ if request_options is None or request_options.get("additional_body_parameters") is None
457
+ else {
458
+ **jsonable_encoder(_request),
459
+ **(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))),
460
+ },
461
+ headers=jsonable_encoder(
462
+ remove_none_from_dict(
463
+ {
464
+ **self._client_wrapper.get_headers(),
465
+ **(request_options.get("additional_headers", {}) if request_options is not None else {}),
466
+ }
467
+ )
468
+ ),
469
+ timeout=request_options.get("timeout_in_seconds")
470
+ if request_options is not None and request_options.get("timeout_in_seconds") is not None
471
+ else self._client_wrapper.get_timeout(),
472
+ retries=0,
473
+ max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore
474
+ )
475
+ if 200 <= _response.status_code < 300:
476
+ return pydantic_v1.parse_obj_as(SemanticQueryOut, _response.json()) # type: ignore
477
+ if _response.status_code == 422:
478
+ raise UnprocessableEntityError(
479
+ pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore
480
+ )
481
+ try:
482
+ _response_json = _response.json()
483
+ except JSONDecodeError:
484
+ raise ApiError(status_code=_response.status_code, body=_response.text)
485
+ raise ApiError(status_code=_response.status_code, body=_response_json)
486
+
333
487
 
334
488
  class AsyncToolsClient:
335
489
  def __init__(self, *, client_wrapper: AsyncClientWrapper):
@@ -558,6 +712,8 @@ class AsyncToolsClient:
558
712
  max_sections: int,
559
713
  guidelines: typing.Sequence[str],
560
714
  publish_formats: typing.Optional[PublishFormats] = OMIT,
715
+ source: typing.Optional[str] = OMIT,
716
+ athena_document_ids: typing.Optional[typing.Sequence[str]] = OMIT,
561
717
  request_options: typing.Optional[RequestOptions] = None,
562
718
  ) -> ResearcherOut:
563
719
  """
@@ -570,6 +726,10 @@ class AsyncToolsClient:
570
726
 
571
727
  - publish_formats: typing.Optional[PublishFormats].
572
728
 
729
+ - source: typing.Optional[str].
730
+
731
+ - athena_document_ids: typing.Optional[typing.Sequence[str]].
732
+
573
733
  - request_options: typing.Optional[RequestOptions]. Request-specific configuration.
574
734
  ---
575
735
  from athena import PublishFormats
@@ -591,6 +751,8 @@ class AsyncToolsClient:
591
751
  pdf=True,
592
752
  docx=False,
593
753
  ),
754
+ source="web",
755
+ athena_document_ids=["doc_1", "doc_2"],
594
756
  )
595
757
  """
596
758
  _request: typing.Dict[str, typing.Any] = {
@@ -600,6 +762,10 @@ class AsyncToolsClient:
600
762
  }
601
763
  if publish_formats is not OMIT:
602
764
  _request["publish_formats"] = publish_formats
765
+ if source is not OMIT:
766
+ _request["source"] = source
767
+ if athena_document_ids is not OMIT:
768
+ _request["athena_document_ids"] = athena_document_ids
603
769
  _response = await self._client_wrapper.httpx_client.request(
604
770
  method="POST",
605
771
  url=urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v0/tools/researcher"),
@@ -637,3 +803,142 @@ class AsyncToolsClient:
637
803
  except JSONDecodeError:
638
804
  raise ApiError(status_code=_response.status_code, body=_response.text)
639
805
  raise ApiError(status_code=_response.status_code, body=_response_json)
806
+
807
+ async def convert_pdf_to_sheet(
808
+ self, *, document_id: str, request_options: typing.Optional[RequestOptions] = None
809
+ ) -> ConvertPdfToSheetOut:
810
+ """
811
+ Parameters:
812
+ - document_id: str.
813
+
814
+ - request_options: typing.Optional[RequestOptions]. Request-specific configuration.
815
+ ---
816
+ from athena.client import AsyncAthena
817
+
818
+ client = AsyncAthena(
819
+ api_key="YOUR_API_KEY",
820
+ )
821
+ await client.tools.convert_pdf_to_sheet(
822
+ document_id="doc_9249292-d118-42d3-95b4-00eccfe0754f",
823
+ )
824
+ """
825
+ _response = await self._client_wrapper.httpx_client.request(
826
+ method="POST",
827
+ url=urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v0/tools/convert-pdf-to-sheet"),
828
+ params=jsonable_encoder(
829
+ request_options.get("additional_query_parameters") if request_options is not None else None
830
+ ),
831
+ json=jsonable_encoder({"document_id": document_id})
832
+ if request_options is None or request_options.get("additional_body_parameters") is None
833
+ else {
834
+ **jsonable_encoder({"document_id": document_id}),
835
+ **(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))),
836
+ },
837
+ headers=jsonable_encoder(
838
+ remove_none_from_dict(
839
+ {
840
+ **self._client_wrapper.get_headers(),
841
+ **(request_options.get("additional_headers", {}) if request_options is not None else {}),
842
+ }
843
+ )
844
+ ),
845
+ timeout=request_options.get("timeout_in_seconds")
846
+ if request_options is not None and request_options.get("timeout_in_seconds") is not None
847
+ else self._client_wrapper.get_timeout(),
848
+ retries=0,
849
+ max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore
850
+ )
851
+ if 200 <= _response.status_code < 300:
852
+ return pydantic_v1.parse_obj_as(ConvertPdfToSheetOut, _response.json()) # type: ignore
853
+ if _response.status_code == 422:
854
+ raise UnprocessableEntityError(
855
+ pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore
856
+ )
857
+ try:
858
+ _response_json = _response.json()
859
+ except JSONDecodeError:
860
+ raise ApiError(status_code=_response.status_code, body=_response.text)
861
+ raise ApiError(status_code=_response.status_code, body=_response_json)
862
+
863
+ async def semantic_query(
864
+ self,
865
+ *,
866
+ query: QueryModel,
867
+ table_name: typing.Optional[str] = OMIT,
868
+ request_options: typing.Optional[RequestOptions] = None,
869
+ ) -> SemanticQueryOut:
870
+ """
871
+ Parameters:
872
+ - query: QueryModel.
873
+
874
+ - table_name: typing.Optional[str].
875
+
876
+ - request_options: typing.Optional[RequestOptions]. Request-specific configuration.
877
+ ---
878
+ from athena import FilterModel, FilterOperator, QueryModel, TimeDimensionModel
879
+ from athena.client import AsyncAthena
880
+
881
+ client = AsyncAthena(
882
+ api_key="YOUR_API_KEY",
883
+ )
884
+ await client.tools.semantic_query(
885
+ query=QueryModel(
886
+ measures=["count"],
887
+ time_dimensions=[
888
+ TimeDimensionModel(
889
+ dimension="dimension",
890
+ granularity="granularity",
891
+ date_range=["dateRange"],
892
+ )
893
+ ],
894
+ dimensions=["manufacturer"],
895
+ filters=[
896
+ FilterModel(
897
+ dimension="manufacturer",
898
+ operator=FilterOperator.EQUALS,
899
+ values=["values"],
900
+ )
901
+ ],
902
+ ),
903
+ )
904
+ """
905
+ _request: typing.Dict[str, typing.Any] = {"query": query}
906
+ if table_name is not OMIT:
907
+ _request["table_name"] = table_name
908
+ _response = await self._client_wrapper.httpx_client.request(
909
+ method="POST",
910
+ url=urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v0/tools/strict-semantic-query"),
911
+ params=jsonable_encoder(
912
+ request_options.get("additional_query_parameters") if request_options is not None else None
913
+ ),
914
+ json=jsonable_encoder(_request)
915
+ if request_options is None or request_options.get("additional_body_parameters") is None
916
+ else {
917
+ **jsonable_encoder(_request),
918
+ **(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))),
919
+ },
920
+ headers=jsonable_encoder(
921
+ remove_none_from_dict(
922
+ {
923
+ **self._client_wrapper.get_headers(),
924
+ **(request_options.get("additional_headers", {}) if request_options is not None else {}),
925
+ }
926
+ )
927
+ ),
928
+ timeout=request_options.get("timeout_in_seconds")
929
+ if request_options is not None and request_options.get("timeout_in_seconds") is not None
930
+ else self._client_wrapper.get_timeout(),
931
+ retries=0,
932
+ max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore
933
+ )
934
+ if 200 <= _response.status_code < 300:
935
+ return pydantic_v1.parse_obj_as(SemanticQueryOut, _response.json()) # type: ignore
936
+ if _response.status_code == 422:
937
+ raise UnprocessableEntityError(
938
+ pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore
939
+ )
940
+ try:
941
+ _response_json = _response.json()
942
+ except JSONDecodeError:
943
+ raise ApiError(status_code=_response.status_code, body=_response.text)
944
+ raise ApiError(status_code=_response.status_code, body=_response_json)
@@ -1,9 +1,12 @@
1
1
  # This file was auto-generated by Fern from our API Definition.
2
2
 
3
+ from .convert_pdf_to_sheet_out import ConvertPdfToSheetOut
3
4
  from .dataset import Dataset
4
5
  from .document import Document
5
6
  from .excecute_tool_first_workflow_out import ExcecuteToolFirstWorkflowOut
6
7
  from .file_data_response import FileDataResponse
8
+ from .filter_model import FilterModel
9
+ from .filter_operator import FilterOperator
7
10
  from .firecrawl_scrape_url_data_reponse_dto import FirecrawlScrapeUrlDataReponseDto
8
11
  from .firecrawl_scrape_url_metadata import FirecrawlScrapeUrlMetadata
9
12
  from .get_datasets_response import GetDatasetsResponse
@@ -17,12 +20,15 @@ from .message_out import MessageOut
17
20
  from .message_out_dto import MessageOutDto
18
21
  from .model import Model
19
22
  from .publish_formats import PublishFormats
23
+ from .query_model import QueryModel
20
24
  from .report import Report
21
25
  from .researcher_out import ResearcherOut
26
+ from .semantic_query_out import SemanticQueryOut
22
27
  from .snippet import Snippet
23
28
  from .sql_results import SqlResults
24
29
  from .status_enum import StatusEnum
25
30
  from .structured_parse_result import StructuredParseResult
31
+ from .time_dimension_model import TimeDimensionModel
26
32
  from .tools import Tools
27
33
  from .upload_documents_out import UploadDocumentsOut
28
34
  from .url_result import UrlResult
@@ -31,10 +37,13 @@ from .validation_error_loc_item import ValidationErrorLocItem
31
37
  from .workflow_status_out import WorkflowStatusOut
32
38
 
33
39
  __all__ = [
40
+ "ConvertPdfToSheetOut",
34
41
  "Dataset",
35
42
  "Document",
36
43
  "ExcecuteToolFirstWorkflowOut",
37
44
  "FileDataResponse",
45
+ "FilterModel",
46
+ "FilterOperator",
38
47
  "FirecrawlScrapeUrlDataReponseDto",
39
48
  "FirecrawlScrapeUrlMetadata",
40
49
  "GetDatasetsResponse",
@@ -48,12 +57,15 @@ __all__ = [
48
57
  "MessageOutDto",
49
58
  "Model",
50
59
  "PublishFormats",
60
+ "QueryModel",
51
61
  "Report",
52
62
  "ResearcherOut",
63
+ "SemanticQueryOut",
53
64
  "Snippet",
54
65
  "SqlResults",
55
66
  "StatusEnum",
56
67
  "StructuredParseResult",
68
+ "TimeDimensionModel",
57
69
  "Tools",
58
70
  "UploadDocumentsOut",
59
71
  "UrlResult",
@@ -0,0 +1,26 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+ from ..core.pydantic_utilities import pydantic_v1
8
+
9
+
10
+ class ConvertPdfToSheetOut(pydantic_v1.BaseModel):
11
+ document_id: str
12
+ new_document_id: str
13
+
14
+ def json(self, **kwargs: typing.Any) -> str:
15
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
16
+ return super().json(**kwargs_with_defaults)
17
+
18
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
19
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
20
+ return super().dict(**kwargs_with_defaults)
21
+
22
+ class Config:
23
+ frozen = True
24
+ smart_union = True
25
+ extra = pydantic_v1.Extra.allow
26
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -0,0 +1,28 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+ from ..core.pydantic_utilities import pydantic_v1
8
+ from .filter_operator import FilterOperator
9
+
10
+
11
+ class FilterModel(pydantic_v1.BaseModel):
12
+ dimension: str
13
+ operator: FilterOperator
14
+ values: typing.List[str]
15
+
16
+ def json(self, **kwargs: typing.Any) -> str:
17
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
18
+ return super().json(**kwargs_with_defaults)
19
+
20
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
21
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
22
+ return super().dict(**kwargs_with_defaults)
23
+
24
+ class Config:
25
+ frozen = True
26
+ smart_union = True
27
+ extra = pydantic_v1.Extra.allow
28
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -0,0 +1,73 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import enum
4
+ import typing
5
+
6
+ T_Result = typing.TypeVar("T_Result")
7
+
8
+
9
+ class FilterOperator(str, enum.Enum):
10
+ """
11
+ An enumeration.
12
+ """
13
+
14
+ EQUALS = "equals"
15
+ NOT_EQUALS = "notEquals"
16
+ CONTAINS = "contains"
17
+ NOT_CONTAINS = "notContains"
18
+ STARTS_WITH = "startsWith"
19
+ ENDS_WITH = "endsWith"
20
+ GT = "gt"
21
+ GTE = "gte"
22
+ LT = "lt"
23
+ LTE = "lte"
24
+ SET = "set"
25
+ NOT_SET = "notSet"
26
+ IN = "in"
27
+ NOT_IN = "notIn"
28
+
29
+ def visit(
30
+ self,
31
+ equals: typing.Callable[[], T_Result],
32
+ not_equals: typing.Callable[[], T_Result],
33
+ contains: typing.Callable[[], T_Result],
34
+ not_contains: typing.Callable[[], T_Result],
35
+ starts_with: typing.Callable[[], T_Result],
36
+ ends_with: typing.Callable[[], T_Result],
37
+ gt: typing.Callable[[], T_Result],
38
+ gte: typing.Callable[[], T_Result],
39
+ lt: typing.Callable[[], T_Result],
40
+ lte: typing.Callable[[], T_Result],
41
+ set_: typing.Callable[[], T_Result],
42
+ not_set: typing.Callable[[], T_Result],
43
+ in_: typing.Callable[[], T_Result],
44
+ not_in: typing.Callable[[], T_Result],
45
+ ) -> T_Result:
46
+ if self is FilterOperator.EQUALS:
47
+ return equals()
48
+ if self is FilterOperator.NOT_EQUALS:
49
+ return not_equals()
50
+ if self is FilterOperator.CONTAINS:
51
+ return contains()
52
+ if self is FilterOperator.NOT_CONTAINS:
53
+ return not_contains()
54
+ if self is FilterOperator.STARTS_WITH:
55
+ return starts_with()
56
+ if self is FilterOperator.ENDS_WITH:
57
+ return ends_with()
58
+ if self is FilterOperator.GT:
59
+ return gt()
60
+ if self is FilterOperator.GTE:
61
+ return gte()
62
+ if self is FilterOperator.LT:
63
+ return lt()
64
+ if self is FilterOperator.LTE:
65
+ return lte()
66
+ if self is FilterOperator.SET:
67
+ return set_()
68
+ if self is FilterOperator.NOT_SET:
69
+ return not_set()
70
+ if self is FilterOperator.IN:
71
+ return in_()
72
+ if self is FilterOperator.NOT_IN:
73
+ return not_in()
@@ -14,6 +14,7 @@ class LlmModel(str, enum.Enum):
14
14
  GPT_35_TURBO = "gpt-3.5-turbo"
15
15
  GPT_4_TURBO = "gpt-4-turbo"
16
16
  GPT_4_TURBO_PREVIEW = "gpt-4-turbo-preview"
17
+ GPT_4_O_MINI = "gpt-4o-mini"
17
18
  GPT_4_O = "gpt-4o"
18
19
  GPT_4 = "gpt-4"
19
20
  MIXTRAL_SMALL_8_X_7_B_0211 = "mixtral-small-8x7b-0211"
@@ -21,6 +22,7 @@ class LlmModel(str, enum.Enum):
21
22
  MIXTRAL_8_X_22_B_INSTRUCT = "mixtral-8x22b-instruct"
22
23
  LLAMA_V_38_B_INSTRUCT = "llama-v3-8b-instruct"
23
24
  LLAMA_V_370_B_INSTRUCT = "llama-v3-70b-instruct"
25
+ CLAUDE_35_SONNET_20240620 = "claude-3-5-sonnet-20240620"
24
26
  CLAUDE_3_OPUS_20240229 = "claude-3-opus-20240229"
25
27
  CLAUDE_3_SONNET_20240229 = "claude-3-sonnet-20240229"
26
28
  CLAUDE_3_HAIKU_20240307 = "claude-3-haiku-20240307"
@@ -39,6 +41,7 @@ class LlmModel(str, enum.Enum):
39
41
  gpt_35_turbo: typing.Callable[[], T_Result],
40
42
  gpt_4_turbo: typing.Callable[[], T_Result],
41
43
  gpt_4_turbo_preview: typing.Callable[[], T_Result],
44
+ gpt_4_o_mini: typing.Callable[[], T_Result],
42
45
  gpt_4_o: typing.Callable[[], T_Result],
43
46
  gpt_4: typing.Callable[[], T_Result],
44
47
  mixtral_small_8_x_7_b_0211: typing.Callable[[], T_Result],
@@ -46,6 +49,7 @@ class LlmModel(str, enum.Enum):
46
49
  mixtral_8_x_22_b_instruct: typing.Callable[[], T_Result],
47
50
  llama_v_38_b_instruct: typing.Callable[[], T_Result],
48
51
  llama_v_370_b_instruct: typing.Callable[[], T_Result],
52
+ claude_35_sonnet_20240620: typing.Callable[[], T_Result],
49
53
  claude_3_opus_20240229: typing.Callable[[], T_Result],
50
54
  claude_3_sonnet_20240229: typing.Callable[[], T_Result],
51
55
  claude_3_haiku_20240307: typing.Callable[[], T_Result],
@@ -65,6 +69,8 @@ class LlmModel(str, enum.Enum):
65
69
  return gpt_4_turbo()
66
70
  if self is LlmModel.GPT_4_TURBO_PREVIEW:
67
71
  return gpt_4_turbo_preview()
72
+ if self is LlmModel.GPT_4_O_MINI:
73
+ return gpt_4_o_mini()
68
74
  if self is LlmModel.GPT_4_O:
69
75
  return gpt_4_o()
70
76
  if self is LlmModel.GPT_4:
@@ -79,6 +85,8 @@ class LlmModel(str, enum.Enum):
79
85
  return llama_v_38_b_instruct()
80
86
  if self is LlmModel.LLAMA_V_370_B_INSTRUCT:
81
87
  return llama_v_370_b_instruct()
88
+ if self is LlmModel.CLAUDE_35_SONNET_20240620:
89
+ return claude_35_sonnet_20240620()
82
90
  if self is LlmModel.CLAUDE_3_OPUS_20240229:
83
91
  return claude_3_opus_20240229()
84
92
  if self is LlmModel.CLAUDE_3_SONNET_20240229:
@@ -16,6 +16,7 @@ class Model(str, enum.Enum):
16
16
  GPT_4_TURBO_PREVIEW = "gpt-4-turbo-preview"
17
17
  GPT_4 = "gpt-4"
18
18
  GPT_4_O = "gpt-4o"
19
+ GPT_4_O_MINI = "gpt-4o-mini"
19
20
  MIXTRAL_SMALL_8_X_7_B_0211 = "mixtral-small-8x7b-0211"
20
21
  MISTRAL_LARGE_0224 = "mistral-large-0224"
21
22
  MIXTRAL_8_X_22_B_INSTRUCT = "mixtral-8x22b-instruct"
@@ -34,6 +35,7 @@ class Model(str, enum.Enum):
34
35
  gpt_4_turbo_preview: typing.Callable[[], T_Result],
35
36
  gpt_4: typing.Callable[[], T_Result],
36
37
  gpt_4_o: typing.Callable[[], T_Result],
38
+ gpt_4_o_mini: typing.Callable[[], T_Result],
37
39
  mixtral_small_8_x_7_b_0211: typing.Callable[[], T_Result],
38
40
  mistral_large_0224: typing.Callable[[], T_Result],
39
41
  mixtral_8_x_22_b_instruct: typing.Callable[[], T_Result],
@@ -55,6 +57,8 @@ class Model(str, enum.Enum):
55
57
  return gpt_4()
56
58
  if self is Model.GPT_4_O:
57
59
  return gpt_4_o()
60
+ if self is Model.GPT_4_O_MINI:
61
+ return gpt_4_o_mini()
58
62
  if self is Model.MIXTRAL_SMALL_8_X_7_B_0211:
59
63
  return mixtral_small_8_x_7_b_0211()
60
64
  if self is Model.MISTRAL_LARGE_0224:
@@ -0,0 +1,34 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+ from ..core.pydantic_utilities import pydantic_v1
8
+ from .filter_model import FilterModel
9
+ from .time_dimension_model import TimeDimensionModel
10
+
11
+
12
+ class QueryModel(pydantic_v1.BaseModel):
13
+ measures: typing.Optional[typing.List[str]] = None
14
+ time_dimensions: typing.Optional[typing.List[TimeDimensionModel]] = pydantic_v1.Field(
15
+ alias="timeDimensions", default=None
16
+ )
17
+ dimensions: typing.Optional[typing.List[str]] = None
18
+ filters: typing.Optional[typing.List[FilterModel]] = None
19
+
20
+ def json(self, **kwargs: typing.Any) -> str:
21
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
22
+ return super().json(**kwargs_with_defaults)
23
+
24
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
25
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
26
+ return super().dict(**kwargs_with_defaults)
27
+
28
+ class Config:
29
+ frozen = True
30
+ smart_union = True
31
+ allow_population_by_field_name = True
32
+ populate_by_name = True
33
+ extra = pydantic_v1.Extra.allow
34
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -0,0 +1,25 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+ from ..core.pydantic_utilities import pydantic_v1
8
+
9
+
10
+ class SemanticQueryOut(pydantic_v1.BaseModel):
11
+ output: typing.Dict[str, typing.Any]
12
+
13
+ def json(self, **kwargs: typing.Any) -> str:
14
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
15
+ return super().json(**kwargs_with_defaults)
16
+
17
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
18
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
19
+ return super().dict(**kwargs_with_defaults)
20
+
21
+ class Config:
22
+ frozen = True
23
+ smart_union = True
24
+ extra = pydantic_v1.Extra.allow
25
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -0,0 +1,29 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+ from ..core.pydantic_utilities import pydantic_v1
8
+
9
+
10
+ class TimeDimensionModel(pydantic_v1.BaseModel):
11
+ dimension: str
12
+ granularity: str
13
+ date_range: typing.List[str] = pydantic_v1.Field(alias="dateRange")
14
+
15
+ def json(self, **kwargs: typing.Any) -> str:
16
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
17
+ return super().json(**kwargs_with_defaults)
18
+
19
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
20
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
21
+ return super().dict(**kwargs_with_defaults)
22
+
23
+ class Config:
24
+ frozen = True
25
+ smart_union = True
26
+ allow_population_by_field_name = True
27
+ populate_by_name = True
28
+ extra = pydantic_v1.Extra.allow
29
+ json_encoders = {dt.datetime: serialize_datetime}