llama-cloud 0.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of llama-cloud might be problematic. Click here for more details.
- llama_cloud/__init__.py +295 -0
- llama_cloud/client.py +72 -0
- llama_cloud/core/__init__.py +17 -0
- llama_cloud/core/api_error.py +15 -0
- llama_cloud/core/client_wrapper.py +51 -0
- llama_cloud/core/datetime_utils.py +28 -0
- llama_cloud/core/jsonable_encoder.py +103 -0
- llama_cloud/core/remove_none_from_dict.py +11 -0
- llama_cloud/errors/__init__.py +5 -0
- llama_cloud/errors/unprocessable_entity_error.py +9 -0
- llama_cloud/resources/__init__.py +40 -0
- llama_cloud/resources/api_keys/__init__.py +2 -0
- llama_cloud/resources/api_keys/client.py +302 -0
- llama_cloud/resources/billing/__init__.py +2 -0
- llama_cloud/resources/billing/client.py +234 -0
- llama_cloud/resources/component_definitions/__init__.py +2 -0
- llama_cloud/resources/component_definitions/client.py +192 -0
- llama_cloud/resources/data_sinks/__init__.py +5 -0
- llama_cloud/resources/data_sinks/client.py +506 -0
- llama_cloud/resources/data_sinks/types/__init__.py +6 -0
- llama_cloud/resources/data_sinks/types/data_sink_update_component.py +7 -0
- llama_cloud/resources/data_sinks/types/data_sink_update_component_one.py +17 -0
- llama_cloud/resources/data_sources/__init__.py +5 -0
- llama_cloud/resources/data_sources/client.py +521 -0
- llama_cloud/resources/data_sources/types/__init__.py +7 -0
- llama_cloud/resources/data_sources/types/data_source_update_component.py +7 -0
- llama_cloud/resources/data_sources/types/data_source_update_component_one.py +19 -0
- llama_cloud/resources/data_sources/types/data_source_update_custom_metadata_value.py +7 -0
- llama_cloud/resources/deprecated/__init__.py +2 -0
- llama_cloud/resources/deprecated/client.py +982 -0
- llama_cloud/resources/evals/__init__.py +2 -0
- llama_cloud/resources/evals/client.py +745 -0
- llama_cloud/resources/files/__init__.py +5 -0
- llama_cloud/resources/files/client.py +560 -0
- llama_cloud/resources/files/types/__init__.py +5 -0
- llama_cloud/resources/files/types/file_create_resource_info_value.py +5 -0
- llama_cloud/resources/parsing/__init__.py +2 -0
- llama_cloud/resources/parsing/client.py +982 -0
- llama_cloud/resources/pipelines/__init__.py +5 -0
- llama_cloud/resources/pipelines/client.py +2599 -0
- llama_cloud/resources/pipelines/types/__init__.py +5 -0
- llama_cloud/resources/pipelines/types/pipeline_file_update_custom_metadata_value.py +7 -0
- llama_cloud/resources/projects/__init__.py +2 -0
- llama_cloud/resources/projects/client.py +1231 -0
- llama_cloud/types/__init__.py +253 -0
- llama_cloud/types/api_key.py +37 -0
- llama_cloud/types/azure_open_ai_embedding.py +75 -0
- llama_cloud/types/base.py +26 -0
- llama_cloud/types/base_prompt_template.py +44 -0
- llama_cloud/types/bedrock_embedding.py +56 -0
- llama_cloud/types/chat_message.py +35 -0
- llama_cloud/types/cloud_az_storage_blob_data_source.py +40 -0
- llama_cloud/types/cloud_chroma_vector_store.py +40 -0
- llama_cloud/types/cloud_document.py +36 -0
- llama_cloud/types/cloud_document_create.py +36 -0
- llama_cloud/types/cloud_gcs_data_source.py +37 -0
- llama_cloud/types/cloud_google_drive_data_source.py +36 -0
- llama_cloud/types/cloud_one_drive_data_source.py +38 -0
- llama_cloud/types/cloud_pinecone_vector_store.py +46 -0
- llama_cloud/types/cloud_postgres_vector_store.py +44 -0
- llama_cloud/types/cloud_qdrant_vector_store.py +48 -0
- llama_cloud/types/cloud_s_3_data_source.py +42 -0
- llama_cloud/types/cloud_sharepoint_data_source.py +38 -0
- llama_cloud/types/cloud_weaviate_vector_store.py +38 -0
- llama_cloud/types/code_splitter.py +46 -0
- llama_cloud/types/cohere_embedding.py +46 -0
- llama_cloud/types/configurable_data_sink_names.py +37 -0
- llama_cloud/types/configurable_data_source_names.py +41 -0
- llama_cloud/types/configurable_transformation_definition.py +45 -0
- llama_cloud/types/configurable_transformation_names.py +73 -0
- llama_cloud/types/configured_transformation_item.py +43 -0
- llama_cloud/types/configured_transformation_item_component.py +9 -0
- llama_cloud/types/configured_transformation_item_component_one.py +35 -0
- llama_cloud/types/data_sink.py +40 -0
- llama_cloud/types/data_sink_component.py +7 -0
- llama_cloud/types/data_sink_component_one.py +17 -0
- llama_cloud/types/data_sink_create.py +36 -0
- llama_cloud/types/data_sink_create_component.py +7 -0
- llama_cloud/types/data_sink_create_component_one.py +17 -0
- llama_cloud/types/data_sink_definition.py +41 -0
- llama_cloud/types/data_source.py +44 -0
- llama_cloud/types/data_source_component.py +7 -0
- llama_cloud/types/data_source_component_one.py +19 -0
- llama_cloud/types/data_source_create.py +40 -0
- llama_cloud/types/data_source_create_component.py +7 -0
- llama_cloud/types/data_source_create_component_one.py +19 -0
- llama_cloud/types/data_source_create_custom_metadata_value.py +7 -0
- llama_cloud/types/data_source_custom_metadata_value.py +7 -0
- llama_cloud/types/data_source_definition.py +41 -0
- llama_cloud/types/eval_dataset.py +37 -0
- llama_cloud/types/eval_dataset_job_params.py +36 -0
- llama_cloud/types/eval_dataset_job_record.py +59 -0
- llama_cloud/types/eval_execution_params.py +38 -0
- llama_cloud/types/eval_execution_params_override.py +38 -0
- llama_cloud/types/eval_llm_model_data.py +33 -0
- llama_cloud/types/eval_question.py +39 -0
- llama_cloud/types/eval_question_create.py +28 -0
- llama_cloud/types/eval_question_result.py +49 -0
- llama_cloud/types/file.py +46 -0
- llama_cloud/types/file_resource_info_value.py +5 -0
- llama_cloud/types/filter_condition.py +21 -0
- llama_cloud/types/filter_operator.py +65 -0
- llama_cloud/types/gemini_embedding.py +51 -0
- llama_cloud/types/html_node_parser.py +44 -0
- llama_cloud/types/http_validation_error.py +29 -0
- llama_cloud/types/hugging_face_inference_api_embedding.py +68 -0
- llama_cloud/types/hugging_face_inference_api_embedding_token.py +5 -0
- llama_cloud/types/json_node_parser.py +43 -0
- llama_cloud/types/llama_parse_supported_file_extensions.py +161 -0
- llama_cloud/types/llm.py +55 -0
- llama_cloud/types/local_eval.py +46 -0
- llama_cloud/types/local_eval_results.py +37 -0
- llama_cloud/types/local_eval_sets.py +30 -0
- llama_cloud/types/managed_ingestion_status.py +37 -0
- llama_cloud/types/markdown_element_node_parser.py +49 -0
- llama_cloud/types/markdown_node_parser.py +43 -0
- llama_cloud/types/message_role.py +45 -0
- llama_cloud/types/metadata_filter.py +41 -0
- llama_cloud/types/metadata_filter_value.py +5 -0
- llama_cloud/types/metadata_filters.py +41 -0
- llama_cloud/types/metadata_filters_filters_item.py +8 -0
- llama_cloud/types/metric_result.py +30 -0
- llama_cloud/types/node_parser.py +37 -0
- llama_cloud/types/object_type.py +33 -0
- llama_cloud/types/open_ai_embedding.py +73 -0
- llama_cloud/types/parser_languages.py +361 -0
- llama_cloud/types/parsing_history_item.py +36 -0
- llama_cloud/types/parsing_job.py +30 -0
- llama_cloud/types/parsing_job_json_result.py +29 -0
- llama_cloud/types/parsing_job_markdown_result.py +29 -0
- llama_cloud/types/parsing_job_text_result.py +29 -0
- llama_cloud/types/parsing_usage.py +29 -0
- llama_cloud/types/pipeline.py +64 -0
- llama_cloud/types/pipeline_create.py +61 -0
- llama_cloud/types/pipeline_data_source.py +46 -0
- llama_cloud/types/pipeline_data_source_component.py +7 -0
- llama_cloud/types/pipeline_data_source_component_one.py +19 -0
- llama_cloud/types/pipeline_data_source_create.py +32 -0
- llama_cloud/types/pipeline_data_source_custom_metadata_value.py +7 -0
- llama_cloud/types/pipeline_deployment.py +38 -0
- llama_cloud/types/pipeline_file.py +52 -0
- llama_cloud/types/pipeline_file_create.py +36 -0
- llama_cloud/types/pipeline_file_create_custom_metadata_value.py +7 -0
- llama_cloud/types/pipeline_file_custom_metadata_value.py +7 -0
- llama_cloud/types/pipeline_file_resource_info_value.py +7 -0
- llama_cloud/types/pipeline_file_status_response.py +35 -0
- llama_cloud/types/pipeline_type.py +21 -0
- llama_cloud/types/pooling.py +29 -0
- llama_cloud/types/preset_retrieval_params.py +40 -0
- llama_cloud/types/presigned_url.py +36 -0
- llama_cloud/types/project.py +42 -0
- llama_cloud/types/project_create.py +32 -0
- llama_cloud/types/prompt_mixin_prompts.py +36 -0
- llama_cloud/types/prompt_spec.py +35 -0
- llama_cloud/types/pydantic_program_mode.py +41 -0
- llama_cloud/types/related_node_info.py +37 -0
- llama_cloud/types/retrieve_results.py +40 -0
- llama_cloud/types/sentence_splitter.py +48 -0
- llama_cloud/types/simple_file_node_parser.py +44 -0
- llama_cloud/types/status_enum.py +33 -0
- llama_cloud/types/supported_eval_llm_model.py +35 -0
- llama_cloud/types/supported_eval_llm_model_names.py +29 -0
- llama_cloud/types/text_node.py +62 -0
- llama_cloud/types/text_node_relationships_value.py +7 -0
- llama_cloud/types/text_node_with_score.py +36 -0
- llama_cloud/types/token_text_splitter.py +43 -0
- llama_cloud/types/transformation_category_names.py +21 -0
- llama_cloud/types/validation_error.py +31 -0
- llama_cloud/types/validation_error_loc_item.py +5 -0
- llama_cloud-0.0.1.dist-info/LICENSE +21 -0
- llama_cloud-0.0.1.dist-info/METADATA +25 -0
- llama_cloud-0.0.1.dist-info/RECORD +173 -0
- llama_cloud-0.0.1.dist-info/WHEEL +4 -0
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import datetime as dt
|
|
4
|
+
import typing
|
|
5
|
+
|
|
6
|
+
from ..core.datetime_utils import serialize_datetime
|
|
7
|
+
|
|
8
|
+
try:
|
|
9
|
+
import pydantic.v1 as pydantic # type: ignore
|
|
10
|
+
except ImportError:
|
|
11
|
+
import pydantic # type: ignore
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class NodeParser(pydantic.BaseModel):
|
|
15
|
+
"""
|
|
16
|
+
Base interface for node parser.
|
|
17
|
+
"""
|
|
18
|
+
|
|
19
|
+
include_metadata: typing.Optional[bool] = pydantic.Field(
|
|
20
|
+
description="Whether or not to consider metadata when splitting."
|
|
21
|
+
)
|
|
22
|
+
include_prev_next_rel: typing.Optional[bool] = pydantic.Field(description="Include prev/next node relationships.")
|
|
23
|
+
callback_manager: typing.Optional[typing.Dict[str, typing.Any]]
|
|
24
|
+
class_name: typing.Optional[str]
|
|
25
|
+
|
|
26
|
+
def json(self, **kwargs: typing.Any) -> str:
|
|
27
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
28
|
+
return super().json(**kwargs_with_defaults)
|
|
29
|
+
|
|
30
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
|
31
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
32
|
+
return super().dict(**kwargs_with_defaults)
|
|
33
|
+
|
|
34
|
+
class Config:
|
|
35
|
+
frozen = True
|
|
36
|
+
smart_union = True
|
|
37
|
+
json_encoders = {dt.datetime: serialize_datetime}
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import enum
|
|
4
|
+
import typing
|
|
5
|
+
|
|
6
|
+
T_Result = typing.TypeVar("T_Result")
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class ObjectType(str, enum.Enum):
|
|
10
|
+
"""
|
|
11
|
+
An enumeration.
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
ONE = "1"
|
|
15
|
+
TWO = "2"
|
|
16
|
+
THREE = "3"
|
|
17
|
+
FOUR = "4"
|
|
18
|
+
|
|
19
|
+
def visit(
|
|
20
|
+
self,
|
|
21
|
+
one: typing.Callable[[], T_Result],
|
|
22
|
+
two: typing.Callable[[], T_Result],
|
|
23
|
+
three: typing.Callable[[], T_Result],
|
|
24
|
+
four: typing.Callable[[], T_Result],
|
|
25
|
+
) -> T_Result:
|
|
26
|
+
if self is ObjectType.ONE:
|
|
27
|
+
return one()
|
|
28
|
+
if self is ObjectType.TWO:
|
|
29
|
+
return two()
|
|
30
|
+
if self is ObjectType.THREE:
|
|
31
|
+
return three()
|
|
32
|
+
if self is ObjectType.FOUR:
|
|
33
|
+
return four()
|
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import datetime as dt
|
|
4
|
+
import typing
|
|
5
|
+
|
|
6
|
+
from ..core.datetime_utils import serialize_datetime
|
|
7
|
+
|
|
8
|
+
try:
|
|
9
|
+
import pydantic.v1 as pydantic # type: ignore
|
|
10
|
+
except ImportError:
|
|
11
|
+
import pydantic # type: ignore
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class OpenAiEmbedding(pydantic.BaseModel):
|
|
15
|
+
"""
|
|
16
|
+
OpenAI class for embeddings.
|
|
17
|
+
|
|
18
|
+
Args:
|
|
19
|
+
mode (str): Mode for embedding.
|
|
20
|
+
Defaults to OpenAIEmbeddingMode.TEXT_SEARCH_MODE.
|
|
21
|
+
Options are:
|
|
22
|
+
|
|
23
|
+
- OpenAIEmbeddingMode.SIMILARITY_MODE
|
|
24
|
+
- OpenAIEmbeddingMode.TEXT_SEARCH_MODE
|
|
25
|
+
|
|
26
|
+
model (str): Model for embedding.
|
|
27
|
+
Defaults to OpenAIEmbeddingModelType.TEXT_EMBED_ADA_002.
|
|
28
|
+
Options are:
|
|
29
|
+
|
|
30
|
+
- OpenAIEmbeddingModelType.DAVINCI
|
|
31
|
+
- OpenAIEmbeddingModelType.CURIE
|
|
32
|
+
- OpenAIEmbeddingModelType.BABBAGE
|
|
33
|
+
- OpenAIEmbeddingModelType.ADA
|
|
34
|
+
- OpenAIEmbeddingModelType.TEXT_EMBED_ADA_002
|
|
35
|
+
"""
|
|
36
|
+
|
|
37
|
+
model_name: typing.Optional[str] = pydantic.Field(description="The name of the embedding model.")
|
|
38
|
+
embed_batch_size: typing.Optional[int] = pydantic.Field(description="The batch size for embedding calls.")
|
|
39
|
+
callback_manager: typing.Optional[typing.Dict[str, typing.Any]]
|
|
40
|
+
num_workers: typing.Optional[int] = pydantic.Field(
|
|
41
|
+
description="The number of workers to use for async embedding calls."
|
|
42
|
+
)
|
|
43
|
+
additional_kwargs: typing.Optional[typing.Dict[str, typing.Any]] = pydantic.Field(
|
|
44
|
+
description="Additional kwargs for the OpenAI API."
|
|
45
|
+
)
|
|
46
|
+
api_key: str = pydantic.Field(description="The OpenAI API key.")
|
|
47
|
+
api_base: typing.Optional[str] = pydantic.Field(description="The base URL for OpenAI API.")
|
|
48
|
+
api_version: typing.Optional[str] = pydantic.Field(description="The version for OpenAI API.")
|
|
49
|
+
max_retries: typing.Optional[int] = pydantic.Field(description="Maximum number of retries.")
|
|
50
|
+
timeout: typing.Optional[float] = pydantic.Field(description="Timeout for each request.")
|
|
51
|
+
default_headers: typing.Optional[typing.Dict[str, str]] = pydantic.Field(
|
|
52
|
+
description="The default headers for API requests."
|
|
53
|
+
)
|
|
54
|
+
reuse_client: typing.Optional[bool] = pydantic.Field(
|
|
55
|
+
description="Reuse the OpenAI client between requests. When doing anything with large volumes of async API calls, setting this to false can improve stability."
|
|
56
|
+
)
|
|
57
|
+
dimensions: typing.Optional[int] = pydantic.Field(
|
|
58
|
+
description="The number of dimensions on the output embedding vectors. Works only with v3 embedding models."
|
|
59
|
+
)
|
|
60
|
+
class_name: typing.Optional[str]
|
|
61
|
+
|
|
62
|
+
def json(self, **kwargs: typing.Any) -> str:
|
|
63
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
64
|
+
return super().json(**kwargs_with_defaults)
|
|
65
|
+
|
|
66
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
|
67
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
68
|
+
return super().dict(**kwargs_with_defaults)
|
|
69
|
+
|
|
70
|
+
class Config:
|
|
71
|
+
frozen = True
|
|
72
|
+
smart_union = True
|
|
73
|
+
json_encoders = {dt.datetime: serialize_datetime}
|
|
@@ -0,0 +1,361 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import enum
|
|
4
|
+
import typing
|
|
5
|
+
|
|
6
|
+
T_Result = typing.TypeVar("T_Result")
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class ParserLanguages(str, enum.Enum):
|
|
10
|
+
"""
|
|
11
|
+
Enum for representing the languages supported by the parser
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
AF = "af"
|
|
15
|
+
AZ = "az"
|
|
16
|
+
BS = "bs"
|
|
17
|
+
CS = "cs"
|
|
18
|
+
CY = "cy"
|
|
19
|
+
DA = "da"
|
|
20
|
+
DE = "de"
|
|
21
|
+
EN = "en"
|
|
22
|
+
ES = "es"
|
|
23
|
+
ET = "et"
|
|
24
|
+
FR = "fr"
|
|
25
|
+
GA = "ga"
|
|
26
|
+
HR = "hr"
|
|
27
|
+
HU = "hu"
|
|
28
|
+
ID = "id"
|
|
29
|
+
IS = "is"
|
|
30
|
+
IT = "it"
|
|
31
|
+
KU = "ku"
|
|
32
|
+
LA = "la"
|
|
33
|
+
LT = "lt"
|
|
34
|
+
LV = "lv"
|
|
35
|
+
MI = "mi"
|
|
36
|
+
MS = "ms"
|
|
37
|
+
MT = "mt"
|
|
38
|
+
NL = "nl"
|
|
39
|
+
NO = "no"
|
|
40
|
+
OC = "oc"
|
|
41
|
+
PI = "pi"
|
|
42
|
+
PL = "pl"
|
|
43
|
+
PT = "pt"
|
|
44
|
+
RO = "ro"
|
|
45
|
+
RS_LATIN = "rs_latin"
|
|
46
|
+
SK = "sk"
|
|
47
|
+
SL = "sl"
|
|
48
|
+
SQ = "sq"
|
|
49
|
+
SV = "sv"
|
|
50
|
+
SW = "sw"
|
|
51
|
+
TL = "tl"
|
|
52
|
+
TR = "tr"
|
|
53
|
+
UZ = "uz"
|
|
54
|
+
VI = "vi"
|
|
55
|
+
AR = "ar"
|
|
56
|
+
FA = "fa"
|
|
57
|
+
UG = "ug"
|
|
58
|
+
UR = "ur"
|
|
59
|
+
BN = "bn"
|
|
60
|
+
AS = "as"
|
|
61
|
+
MNI = "mni"
|
|
62
|
+
RU = "ru"
|
|
63
|
+
RS_CYRILLIC = "rs_cyrillic"
|
|
64
|
+
BE = "be"
|
|
65
|
+
BG = "bg"
|
|
66
|
+
UK = "uk"
|
|
67
|
+
MN = "mn"
|
|
68
|
+
ABQ = "abq"
|
|
69
|
+
ADY = "ady"
|
|
70
|
+
KBD = "kbd"
|
|
71
|
+
AVA = "ava"
|
|
72
|
+
DAR = "dar"
|
|
73
|
+
INH = "inh"
|
|
74
|
+
CHE = "che"
|
|
75
|
+
LBE = "lbe"
|
|
76
|
+
LEZ = "lez"
|
|
77
|
+
TAB = "tab"
|
|
78
|
+
TJK = "tjk"
|
|
79
|
+
HI = "hi"
|
|
80
|
+
MR = "mr"
|
|
81
|
+
NE = "ne"
|
|
82
|
+
BH = "bh"
|
|
83
|
+
MAI = "mai"
|
|
84
|
+
ANG = "ang"
|
|
85
|
+
BHO = "bho"
|
|
86
|
+
MAH = "mah"
|
|
87
|
+
SCK = "sck"
|
|
88
|
+
NEW = "new"
|
|
89
|
+
GOM = "gom"
|
|
90
|
+
SA = "sa"
|
|
91
|
+
BGC = "bgc"
|
|
92
|
+
TH = "th"
|
|
93
|
+
CH_SIM = "ch_sim"
|
|
94
|
+
CH_TRA = "ch_tra"
|
|
95
|
+
JA = "ja"
|
|
96
|
+
KO = "ko"
|
|
97
|
+
TA = "ta"
|
|
98
|
+
TE = "te"
|
|
99
|
+
KN = "kn"
|
|
100
|
+
|
|
101
|
+
def visit(
|
|
102
|
+
self,
|
|
103
|
+
af: typing.Callable[[], T_Result],
|
|
104
|
+
az: typing.Callable[[], T_Result],
|
|
105
|
+
bs: typing.Callable[[], T_Result],
|
|
106
|
+
cs: typing.Callable[[], T_Result],
|
|
107
|
+
cy: typing.Callable[[], T_Result],
|
|
108
|
+
da: typing.Callable[[], T_Result],
|
|
109
|
+
de: typing.Callable[[], T_Result],
|
|
110
|
+
en: typing.Callable[[], T_Result],
|
|
111
|
+
es: typing.Callable[[], T_Result],
|
|
112
|
+
et: typing.Callable[[], T_Result],
|
|
113
|
+
fr: typing.Callable[[], T_Result],
|
|
114
|
+
ga: typing.Callable[[], T_Result],
|
|
115
|
+
hr: typing.Callable[[], T_Result],
|
|
116
|
+
hu: typing.Callable[[], T_Result],
|
|
117
|
+
id: typing.Callable[[], T_Result],
|
|
118
|
+
is_: typing.Callable[[], T_Result],
|
|
119
|
+
it: typing.Callable[[], T_Result],
|
|
120
|
+
ku: typing.Callable[[], T_Result],
|
|
121
|
+
la: typing.Callable[[], T_Result],
|
|
122
|
+
lt: typing.Callable[[], T_Result],
|
|
123
|
+
lv: typing.Callable[[], T_Result],
|
|
124
|
+
mi: typing.Callable[[], T_Result],
|
|
125
|
+
ms: typing.Callable[[], T_Result],
|
|
126
|
+
mt: typing.Callable[[], T_Result],
|
|
127
|
+
nl: typing.Callable[[], T_Result],
|
|
128
|
+
no: typing.Callable[[], T_Result],
|
|
129
|
+
oc: typing.Callable[[], T_Result],
|
|
130
|
+
pi: typing.Callable[[], T_Result],
|
|
131
|
+
pl: typing.Callable[[], T_Result],
|
|
132
|
+
pt: typing.Callable[[], T_Result],
|
|
133
|
+
ro: typing.Callable[[], T_Result],
|
|
134
|
+
rs_latin: typing.Callable[[], T_Result],
|
|
135
|
+
sk: typing.Callable[[], T_Result],
|
|
136
|
+
sl: typing.Callable[[], T_Result],
|
|
137
|
+
sq: typing.Callable[[], T_Result],
|
|
138
|
+
sv: typing.Callable[[], T_Result],
|
|
139
|
+
sw: typing.Callable[[], T_Result],
|
|
140
|
+
tl: typing.Callable[[], T_Result],
|
|
141
|
+
tr: typing.Callable[[], T_Result],
|
|
142
|
+
uz: typing.Callable[[], T_Result],
|
|
143
|
+
vi: typing.Callable[[], T_Result],
|
|
144
|
+
ar: typing.Callable[[], T_Result],
|
|
145
|
+
fa: typing.Callable[[], T_Result],
|
|
146
|
+
ug: typing.Callable[[], T_Result],
|
|
147
|
+
ur: typing.Callable[[], T_Result],
|
|
148
|
+
bn: typing.Callable[[], T_Result],
|
|
149
|
+
as_: typing.Callable[[], T_Result],
|
|
150
|
+
mni: typing.Callable[[], T_Result],
|
|
151
|
+
ru: typing.Callable[[], T_Result],
|
|
152
|
+
rs_cyrillic: typing.Callable[[], T_Result],
|
|
153
|
+
be: typing.Callable[[], T_Result],
|
|
154
|
+
bg: typing.Callable[[], T_Result],
|
|
155
|
+
uk: typing.Callable[[], T_Result],
|
|
156
|
+
mn: typing.Callable[[], T_Result],
|
|
157
|
+
abq: typing.Callable[[], T_Result],
|
|
158
|
+
ady: typing.Callable[[], T_Result],
|
|
159
|
+
kbd: typing.Callable[[], T_Result],
|
|
160
|
+
ava: typing.Callable[[], T_Result],
|
|
161
|
+
dar: typing.Callable[[], T_Result],
|
|
162
|
+
inh: typing.Callable[[], T_Result],
|
|
163
|
+
che: typing.Callable[[], T_Result],
|
|
164
|
+
lbe: typing.Callable[[], T_Result],
|
|
165
|
+
lez: typing.Callable[[], T_Result],
|
|
166
|
+
tab: typing.Callable[[], T_Result],
|
|
167
|
+
tjk: typing.Callable[[], T_Result],
|
|
168
|
+
hi: typing.Callable[[], T_Result],
|
|
169
|
+
mr: typing.Callable[[], T_Result],
|
|
170
|
+
ne: typing.Callable[[], T_Result],
|
|
171
|
+
bh: typing.Callable[[], T_Result],
|
|
172
|
+
mai: typing.Callable[[], T_Result],
|
|
173
|
+
ang: typing.Callable[[], T_Result],
|
|
174
|
+
bho: typing.Callable[[], T_Result],
|
|
175
|
+
mah: typing.Callable[[], T_Result],
|
|
176
|
+
sck: typing.Callable[[], T_Result],
|
|
177
|
+
new: typing.Callable[[], T_Result],
|
|
178
|
+
gom: typing.Callable[[], T_Result],
|
|
179
|
+
sa: typing.Callable[[], T_Result],
|
|
180
|
+
bgc: typing.Callable[[], T_Result],
|
|
181
|
+
th: typing.Callable[[], T_Result],
|
|
182
|
+
ch_sim: typing.Callable[[], T_Result],
|
|
183
|
+
ch_tra: typing.Callable[[], T_Result],
|
|
184
|
+
ja: typing.Callable[[], T_Result],
|
|
185
|
+
ko: typing.Callable[[], T_Result],
|
|
186
|
+
ta: typing.Callable[[], T_Result],
|
|
187
|
+
te: typing.Callable[[], T_Result],
|
|
188
|
+
kn: typing.Callable[[], T_Result],
|
|
189
|
+
) -> T_Result:
|
|
190
|
+
if self is ParserLanguages.AF:
|
|
191
|
+
return af()
|
|
192
|
+
if self is ParserLanguages.AZ:
|
|
193
|
+
return az()
|
|
194
|
+
if self is ParserLanguages.BS:
|
|
195
|
+
return bs()
|
|
196
|
+
if self is ParserLanguages.CS:
|
|
197
|
+
return cs()
|
|
198
|
+
if self is ParserLanguages.CY:
|
|
199
|
+
return cy()
|
|
200
|
+
if self is ParserLanguages.DA:
|
|
201
|
+
return da()
|
|
202
|
+
if self is ParserLanguages.DE:
|
|
203
|
+
return de()
|
|
204
|
+
if self is ParserLanguages.EN:
|
|
205
|
+
return en()
|
|
206
|
+
if self is ParserLanguages.ES:
|
|
207
|
+
return es()
|
|
208
|
+
if self is ParserLanguages.ET:
|
|
209
|
+
return et()
|
|
210
|
+
if self is ParserLanguages.FR:
|
|
211
|
+
return fr()
|
|
212
|
+
if self is ParserLanguages.GA:
|
|
213
|
+
return ga()
|
|
214
|
+
if self is ParserLanguages.HR:
|
|
215
|
+
return hr()
|
|
216
|
+
if self is ParserLanguages.HU:
|
|
217
|
+
return hu()
|
|
218
|
+
if self is ParserLanguages.ID:
|
|
219
|
+
return id()
|
|
220
|
+
if self is ParserLanguages.IS:
|
|
221
|
+
return is_()
|
|
222
|
+
if self is ParserLanguages.IT:
|
|
223
|
+
return it()
|
|
224
|
+
if self is ParserLanguages.KU:
|
|
225
|
+
return ku()
|
|
226
|
+
if self is ParserLanguages.LA:
|
|
227
|
+
return la()
|
|
228
|
+
if self is ParserLanguages.LT:
|
|
229
|
+
return lt()
|
|
230
|
+
if self is ParserLanguages.LV:
|
|
231
|
+
return lv()
|
|
232
|
+
if self is ParserLanguages.MI:
|
|
233
|
+
return mi()
|
|
234
|
+
if self is ParserLanguages.MS:
|
|
235
|
+
return ms()
|
|
236
|
+
if self is ParserLanguages.MT:
|
|
237
|
+
return mt()
|
|
238
|
+
if self is ParserLanguages.NL:
|
|
239
|
+
return nl()
|
|
240
|
+
if self is ParserLanguages.NO:
|
|
241
|
+
return no()
|
|
242
|
+
if self is ParserLanguages.OC:
|
|
243
|
+
return oc()
|
|
244
|
+
if self is ParserLanguages.PI:
|
|
245
|
+
return pi()
|
|
246
|
+
if self is ParserLanguages.PL:
|
|
247
|
+
return pl()
|
|
248
|
+
if self is ParserLanguages.PT:
|
|
249
|
+
return pt()
|
|
250
|
+
if self is ParserLanguages.RO:
|
|
251
|
+
return ro()
|
|
252
|
+
if self is ParserLanguages.RS_LATIN:
|
|
253
|
+
return rs_latin()
|
|
254
|
+
if self is ParserLanguages.SK:
|
|
255
|
+
return sk()
|
|
256
|
+
if self is ParserLanguages.SL:
|
|
257
|
+
return sl()
|
|
258
|
+
if self is ParserLanguages.SQ:
|
|
259
|
+
return sq()
|
|
260
|
+
if self is ParserLanguages.SV:
|
|
261
|
+
return sv()
|
|
262
|
+
if self is ParserLanguages.SW:
|
|
263
|
+
return sw()
|
|
264
|
+
if self is ParserLanguages.TL:
|
|
265
|
+
return tl()
|
|
266
|
+
if self is ParserLanguages.TR:
|
|
267
|
+
return tr()
|
|
268
|
+
if self is ParserLanguages.UZ:
|
|
269
|
+
return uz()
|
|
270
|
+
if self is ParserLanguages.VI:
|
|
271
|
+
return vi()
|
|
272
|
+
if self is ParserLanguages.AR:
|
|
273
|
+
return ar()
|
|
274
|
+
if self is ParserLanguages.FA:
|
|
275
|
+
return fa()
|
|
276
|
+
if self is ParserLanguages.UG:
|
|
277
|
+
return ug()
|
|
278
|
+
if self is ParserLanguages.UR:
|
|
279
|
+
return ur()
|
|
280
|
+
if self is ParserLanguages.BN:
|
|
281
|
+
return bn()
|
|
282
|
+
if self is ParserLanguages.AS:
|
|
283
|
+
return as_()
|
|
284
|
+
if self is ParserLanguages.MNI:
|
|
285
|
+
return mni()
|
|
286
|
+
if self is ParserLanguages.RU:
|
|
287
|
+
return ru()
|
|
288
|
+
if self is ParserLanguages.RS_CYRILLIC:
|
|
289
|
+
return rs_cyrillic()
|
|
290
|
+
if self is ParserLanguages.BE:
|
|
291
|
+
return be()
|
|
292
|
+
if self is ParserLanguages.BG:
|
|
293
|
+
return bg()
|
|
294
|
+
if self is ParserLanguages.UK:
|
|
295
|
+
return uk()
|
|
296
|
+
if self is ParserLanguages.MN:
|
|
297
|
+
return mn()
|
|
298
|
+
if self is ParserLanguages.ABQ:
|
|
299
|
+
return abq()
|
|
300
|
+
if self is ParserLanguages.ADY:
|
|
301
|
+
return ady()
|
|
302
|
+
if self is ParserLanguages.KBD:
|
|
303
|
+
return kbd()
|
|
304
|
+
if self is ParserLanguages.AVA:
|
|
305
|
+
return ava()
|
|
306
|
+
if self is ParserLanguages.DAR:
|
|
307
|
+
return dar()
|
|
308
|
+
if self is ParserLanguages.INH:
|
|
309
|
+
return inh()
|
|
310
|
+
if self is ParserLanguages.CHE:
|
|
311
|
+
return che()
|
|
312
|
+
if self is ParserLanguages.LBE:
|
|
313
|
+
return lbe()
|
|
314
|
+
if self is ParserLanguages.LEZ:
|
|
315
|
+
return lez()
|
|
316
|
+
if self is ParserLanguages.TAB:
|
|
317
|
+
return tab()
|
|
318
|
+
if self is ParserLanguages.TJK:
|
|
319
|
+
return tjk()
|
|
320
|
+
if self is ParserLanguages.HI:
|
|
321
|
+
return hi()
|
|
322
|
+
if self is ParserLanguages.MR:
|
|
323
|
+
return mr()
|
|
324
|
+
if self is ParserLanguages.NE:
|
|
325
|
+
return ne()
|
|
326
|
+
if self is ParserLanguages.BH:
|
|
327
|
+
return bh()
|
|
328
|
+
if self is ParserLanguages.MAI:
|
|
329
|
+
return mai()
|
|
330
|
+
if self is ParserLanguages.ANG:
|
|
331
|
+
return ang()
|
|
332
|
+
if self is ParserLanguages.BHO:
|
|
333
|
+
return bho()
|
|
334
|
+
if self is ParserLanguages.MAH:
|
|
335
|
+
return mah()
|
|
336
|
+
if self is ParserLanguages.SCK:
|
|
337
|
+
return sck()
|
|
338
|
+
if self is ParserLanguages.NEW:
|
|
339
|
+
return new()
|
|
340
|
+
if self is ParserLanguages.GOM:
|
|
341
|
+
return gom()
|
|
342
|
+
if self is ParserLanguages.SA:
|
|
343
|
+
return sa()
|
|
344
|
+
if self is ParserLanguages.BGC:
|
|
345
|
+
return bgc()
|
|
346
|
+
if self is ParserLanguages.TH:
|
|
347
|
+
return th()
|
|
348
|
+
if self is ParserLanguages.CH_SIM:
|
|
349
|
+
return ch_sim()
|
|
350
|
+
if self is ParserLanguages.CH_TRA:
|
|
351
|
+
return ch_tra()
|
|
352
|
+
if self is ParserLanguages.JA:
|
|
353
|
+
return ja()
|
|
354
|
+
if self is ParserLanguages.KO:
|
|
355
|
+
return ko()
|
|
356
|
+
if self is ParserLanguages.TA:
|
|
357
|
+
return ta()
|
|
358
|
+
if self is ParserLanguages.TE:
|
|
359
|
+
return te()
|
|
360
|
+
if self is ParserLanguages.KN:
|
|
361
|
+
return kn()
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import datetime as dt
|
|
4
|
+
import typing
|
|
5
|
+
|
|
6
|
+
from ..core.datetime_utils import serialize_datetime
|
|
7
|
+
|
|
8
|
+
try:
|
|
9
|
+
import pydantic.v1 as pydantic # type: ignore
|
|
10
|
+
except ImportError:
|
|
11
|
+
import pydantic # type: ignore
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class ParsingHistoryItem(pydantic.BaseModel):
|
|
15
|
+
user_id: str
|
|
16
|
+
day: str
|
|
17
|
+
job_id: str
|
|
18
|
+
file_name: str
|
|
19
|
+
original_file_name: str
|
|
20
|
+
expired: typing.Optional[bool]
|
|
21
|
+
pages: typing.Optional[float]
|
|
22
|
+
images: typing.Optional[float]
|
|
23
|
+
time: typing.Optional[float]
|
|
24
|
+
|
|
25
|
+
def json(self, **kwargs: typing.Any) -> str:
|
|
26
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
27
|
+
return super().json(**kwargs_with_defaults)
|
|
28
|
+
|
|
29
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
|
30
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
31
|
+
return super().dict(**kwargs_with_defaults)
|
|
32
|
+
|
|
33
|
+
class Config:
|
|
34
|
+
frozen = True
|
|
35
|
+
smart_union = True
|
|
36
|
+
json_encoders = {dt.datetime: serialize_datetime}
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import datetime as dt
|
|
4
|
+
import typing
|
|
5
|
+
|
|
6
|
+
from ..core.datetime_utils import serialize_datetime
|
|
7
|
+
from .status_enum import StatusEnum
|
|
8
|
+
|
|
9
|
+
try:
|
|
10
|
+
import pydantic.v1 as pydantic # type: ignore
|
|
11
|
+
except ImportError:
|
|
12
|
+
import pydantic # type: ignore
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class ParsingJob(pydantic.BaseModel):
|
|
16
|
+
id: str
|
|
17
|
+
status: StatusEnum
|
|
18
|
+
|
|
19
|
+
def json(self, **kwargs: typing.Any) -> str:
|
|
20
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
21
|
+
return super().json(**kwargs_with_defaults)
|
|
22
|
+
|
|
23
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
|
24
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
25
|
+
return super().dict(**kwargs_with_defaults)
|
|
26
|
+
|
|
27
|
+
class Config:
|
|
28
|
+
frozen = True
|
|
29
|
+
smart_union = True
|
|
30
|
+
json_encoders = {dt.datetime: serialize_datetime}
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import datetime as dt
|
|
4
|
+
import typing
|
|
5
|
+
|
|
6
|
+
from ..core.datetime_utils import serialize_datetime
|
|
7
|
+
|
|
8
|
+
try:
|
|
9
|
+
import pydantic.v1 as pydantic # type: ignore
|
|
10
|
+
except ImportError:
|
|
11
|
+
import pydantic # type: ignore
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class ParsingJobJsonResult(pydantic.BaseModel):
|
|
15
|
+
pages: typing.Optional[typing.Any]
|
|
16
|
+
job_metadata: typing.Optional[typing.Any]
|
|
17
|
+
|
|
18
|
+
def json(self, **kwargs: typing.Any) -> str:
|
|
19
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
20
|
+
return super().json(**kwargs_with_defaults)
|
|
21
|
+
|
|
22
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
|
23
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
24
|
+
return super().dict(**kwargs_with_defaults)
|
|
25
|
+
|
|
26
|
+
class Config:
|
|
27
|
+
frozen = True
|
|
28
|
+
smart_union = True
|
|
29
|
+
json_encoders = {dt.datetime: serialize_datetime}
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import datetime as dt
|
|
4
|
+
import typing
|
|
5
|
+
|
|
6
|
+
from ..core.datetime_utils import serialize_datetime
|
|
7
|
+
|
|
8
|
+
try:
|
|
9
|
+
import pydantic.v1 as pydantic # type: ignore
|
|
10
|
+
except ImportError:
|
|
11
|
+
import pydantic # type: ignore
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class ParsingJobMarkdownResult(pydantic.BaseModel):
|
|
15
|
+
markdown: str = pydantic.Field(description="The markdown result of the parsing job")
|
|
16
|
+
job_metadata: typing.Optional[typing.Any]
|
|
17
|
+
|
|
18
|
+
def json(self, **kwargs: typing.Any) -> str:
|
|
19
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
20
|
+
return super().json(**kwargs_with_defaults)
|
|
21
|
+
|
|
22
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
|
23
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
24
|
+
return super().dict(**kwargs_with_defaults)
|
|
25
|
+
|
|
26
|
+
class Config:
|
|
27
|
+
frozen = True
|
|
28
|
+
smart_union = True
|
|
29
|
+
json_encoders = {dt.datetime: serialize_datetime}
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import datetime as dt
|
|
4
|
+
import typing
|
|
5
|
+
|
|
6
|
+
from ..core.datetime_utils import serialize_datetime
|
|
7
|
+
|
|
8
|
+
try:
|
|
9
|
+
import pydantic.v1 as pydantic # type: ignore
|
|
10
|
+
except ImportError:
|
|
11
|
+
import pydantic # type: ignore
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class ParsingJobTextResult(pydantic.BaseModel):
|
|
15
|
+
text: str = pydantic.Field(description="The text result of the parsing job")
|
|
16
|
+
job_metadata: typing.Optional[typing.Any]
|
|
17
|
+
|
|
18
|
+
def json(self, **kwargs: typing.Any) -> str:
|
|
19
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
20
|
+
return super().json(**kwargs_with_defaults)
|
|
21
|
+
|
|
22
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
|
23
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
24
|
+
return super().dict(**kwargs_with_defaults)
|
|
25
|
+
|
|
26
|
+
class Config:
|
|
27
|
+
frozen = True
|
|
28
|
+
smart_union = True
|
|
29
|
+
json_encoders = {dt.datetime: serialize_datetime}
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import datetime as dt
|
|
4
|
+
import typing
|
|
5
|
+
|
|
6
|
+
from ..core.datetime_utils import serialize_datetime
|
|
7
|
+
|
|
8
|
+
try:
|
|
9
|
+
import pydantic.v1 as pydantic # type: ignore
|
|
10
|
+
except ImportError:
|
|
11
|
+
import pydantic # type: ignore
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class ParsingUsage(pydantic.BaseModel):
|
|
15
|
+
usage_pdf_pages: int
|
|
16
|
+
max_pdf_pages: int
|
|
17
|
+
|
|
18
|
+
def json(self, **kwargs: typing.Any) -> str:
|
|
19
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
20
|
+
return super().json(**kwargs_with_defaults)
|
|
21
|
+
|
|
22
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
|
23
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
24
|
+
return super().dict(**kwargs_with_defaults)
|
|
25
|
+
|
|
26
|
+
class Config:
|
|
27
|
+
frozen = True
|
|
28
|
+
smart_union = True
|
|
29
|
+
json_encoders = {dt.datetime: serialize_datetime}
|