llama-cloud 0.1.25__py3-none-any.whl → 0.1.27__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of llama-cloud might be problematic. Click here for more details.

Files changed (37) hide show
  1. llama_cloud/__init__.py +28 -2
  2. llama_cloud/client.py +6 -0
  3. llama_cloud/resources/__init__.py +4 -0
  4. llama_cloud/resources/beta/client.py +14 -2
  5. llama_cloud/resources/llama_apps/__init__.py +2 -0
  6. llama_cloud/resources/llama_apps/client.py +160 -0
  7. llama_cloud/resources/llama_extract/client.py +129 -0
  8. llama_cloud/resources/parsing/client.py +8 -0
  9. llama_cloud/resources/responses/__init__.py +2 -0
  10. llama_cloud/resources/responses/client.py +137 -0
  11. llama_cloud/types/__init__.py +24 -2
  12. llama_cloud/types/agent_deployment_list.py +32 -0
  13. llama_cloud/types/agent_deployment_summary.py +38 -0
  14. llama_cloud/types/app_schema_chat_chat_message.py +2 -2
  15. llama_cloud/types/app_schema_responses_message_role.py +33 -0
  16. llama_cloud/types/cloud_google_drive_data_source.py +1 -3
  17. llama_cloud/types/extract_config.py +2 -0
  18. llama_cloud/types/extract_config_priority.py +29 -0
  19. llama_cloud/types/extract_models.py +8 -0
  20. llama_cloud/types/extract_schema_generate_response.py +38 -0
  21. llama_cloud/types/extract_schema_generate_response_data_schema_value.py +7 -0
  22. llama_cloud/types/input_message.py +2 -2
  23. llama_cloud/types/legacy_parse_job_config.py +1 -0
  24. llama_cloud/types/llama_index_core_base_llms_types_chat_message.py +2 -2
  25. llama_cloud/types/{message_role.py → llama_index_core_base_llms_types_message_role.py} +9 -9
  26. llama_cloud/types/llama_parse_parameters.py +3 -0
  27. llama_cloud/types/llama_parse_parameters_priority.py +29 -0
  28. llama_cloud/types/message.py +38 -0
  29. llama_cloud/types/metadata_filter.py +1 -1
  30. llama_cloud/types/model_configuration.py +39 -0
  31. llama_cloud/types/parse_job_config.py +3 -0
  32. llama_cloud/types/parse_job_config_priority.py +29 -0
  33. llama_cloud/types/text_content_block.py +34 -0
  34. {llama_cloud-0.1.25.dist-info → llama_cloud-0.1.27.dist-info}/METADATA +3 -2
  35. {llama_cloud-0.1.25.dist-info → llama_cloud-0.1.27.dist-info}/RECORD +37 -22
  36. {llama_cloud-0.1.25.dist-info → llama_cloud-0.1.27.dist-info}/WHEEL +1 -1
  37. {llama_cloud-0.1.25.dist-info → llama_cloud-0.1.27.dist-info}/LICENSE +0 -0
@@ -0,0 +1,38 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+
8
+ try:
9
+ import pydantic
10
+ if pydantic.__version__.startswith("1."):
11
+ raise ImportError
12
+ import pydantic.v1 as pydantic # type: ignore
13
+ except ImportError:
14
+ import pydantic # type: ignore
15
+
16
+
17
+ class AgentDeploymentSummary(pydantic.BaseModel):
18
+ id: str = pydantic.Field(description="Deployment ID. Prefixed with dpl-")
19
+ project_id: str = pydantic.Field(description="Project ID")
20
+ app_slug: str = pydantic.Field(description="readable ID of the deployed app")
21
+ thumbnail_url: typing.Optional[str]
22
+ base_url: str = pydantic.Field(description="Base URL of the deployed app")
23
+ display_name: str = pydantic.Field(description="Display name of the deployed app")
24
+ created_at: dt.datetime = pydantic.Field(description="Timestamp when the app deployment was created")
25
+ updated_at: dt.datetime = pydantic.Field(description="Timestamp when the app deployment was last updated")
26
+
27
+ def json(self, **kwargs: typing.Any) -> str:
28
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
29
+ return super().json(**kwargs_with_defaults)
30
+
31
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
32
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
33
+ return super().dict(**kwargs_with_defaults)
34
+
35
+ class Config:
36
+ frozen = True
37
+ smart_union = True
38
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -4,8 +4,8 @@ import datetime as dt
4
4
  import typing
5
5
 
6
6
  from ..core.datetime_utils import serialize_datetime
7
+ from .llama_index_core_base_llms_types_message_role import LlamaIndexCoreBaseLlmsTypesMessageRole
7
8
  from .message_annotation import MessageAnnotation
8
- from .message_role import MessageRole
9
9
 
10
10
  try:
11
11
  import pydantic
@@ -22,7 +22,7 @@ class AppSchemaChatChatMessage(pydantic.BaseModel):
22
22
  annotations: typing.Optional[typing.List[MessageAnnotation]] = pydantic.Field(
23
23
  description="Retrieval annotations for the message."
24
24
  )
25
- role: MessageRole = pydantic.Field(description="The role of the message.")
25
+ role: LlamaIndexCoreBaseLlmsTypesMessageRole = pydantic.Field(description="The role of the message.")
26
26
  content: typing.Optional[str]
27
27
  additional_kwargs: typing.Optional[typing.Dict[str, str]] = pydantic.Field(
28
28
  description="Additional arguments passed to the model"
@@ -0,0 +1,33 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import enum
4
+ import typing
5
+
6
+ T_Result = typing.TypeVar("T_Result")
7
+
8
+
9
+ class AppSchemaResponsesMessageRole(str, enum.Enum):
10
+ """
11
+ Enum representing the role of a message in a conversation.
12
+
13
+ - system: The system message that sets the context or instructions.
14
+ - user: The user's message in the conversation.
15
+ - assistant: The AI assistant's response in the conversation.
16
+ """
17
+
18
+ SYSTEM = "system"
19
+ USER = "user"
20
+ ASSISTANT = "assistant"
21
+
22
+ def visit(
23
+ self,
24
+ system: typing.Callable[[], T_Result],
25
+ user: typing.Callable[[], T_Result],
26
+ assistant: typing.Callable[[], T_Result],
27
+ ) -> T_Result:
28
+ if self is AppSchemaResponsesMessageRole.SYSTEM:
29
+ return system()
30
+ if self is AppSchemaResponsesMessageRole.USER:
31
+ return user()
32
+ if self is AppSchemaResponsesMessageRole.ASSISTANT:
33
+ return assistant()
@@ -17,9 +17,7 @@ except ImportError:
17
17
  class CloudGoogleDriveDataSource(pydantic.BaseModel):
18
18
  supports_access_control: typing.Optional[bool]
19
19
  folder_id: str = pydantic.Field(description="The ID of the Google Drive folder to read from.")
20
- service_account_key: typing.Dict[str, typing.Any] = pydantic.Field(
21
- description="The service account key JSON to use for authentication."
22
- )
20
+ service_account_key: typing.Optional[typing.Dict[str, typing.Any]]
23
21
  class_name: typing.Optional[str]
24
22
 
25
23
  def json(self, **kwargs: typing.Any) -> str:
@@ -5,6 +5,7 @@ import typing
5
5
 
6
6
  from ..core.datetime_utils import serialize_datetime
7
7
  from .document_chunk_mode import DocumentChunkMode
8
+ from .extract_config_priority import ExtractConfigPriority
8
9
  from .extract_mode import ExtractMode
9
10
  from .extract_target import ExtractTarget
10
11
 
@@ -22,6 +23,7 @@ class ExtractConfig(pydantic.BaseModel):
22
23
  Additional parameters for the extraction agent.
23
24
  """
24
25
 
26
+ priority: typing.Optional[ExtractConfigPriority]
25
27
  extraction_target: typing.Optional[ExtractTarget] = pydantic.Field(description="The extraction target specified.")
26
28
  extraction_mode: typing.Optional[ExtractMode] = pydantic.Field(description="The extraction mode specified.")
27
29
  multimodal_fast_mode: typing.Optional[bool] = pydantic.Field(
@@ -0,0 +1,29 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import enum
4
+ import typing
5
+
6
+ T_Result = typing.TypeVar("T_Result")
7
+
8
+
9
+ class ExtractConfigPriority(str, enum.Enum):
10
+ LOW = "low"
11
+ MEDIUM = "medium"
12
+ HIGH = "high"
13
+ CRITICAL = "critical"
14
+
15
+ def visit(
16
+ self,
17
+ low: typing.Callable[[], T_Result],
18
+ medium: typing.Callable[[], T_Result],
19
+ high: typing.Callable[[], T_Result],
20
+ critical: typing.Callable[[], T_Result],
21
+ ) -> T_Result:
22
+ if self is ExtractConfigPriority.LOW:
23
+ return low()
24
+ if self is ExtractConfigPriority.MEDIUM:
25
+ return medium()
26
+ if self is ExtractConfigPriority.HIGH:
27
+ return high()
28
+ if self is ExtractConfigPriority.CRITICAL:
29
+ return critical()
@@ -9,10 +9,12 @@ T_Result = typing.TypeVar("T_Result")
9
9
  class ExtractModels(str, enum.Enum):
10
10
  GPT_41 = "gpt-4.1"
11
11
  GPT_41_MINI = "gpt-4.1-mini"
12
+ GPT_41_NANO = "gpt-4.1-nano"
12
13
  GEMINI_20_FLASH = "gemini-2.0-flash"
13
14
  O_3_MINI = "o3-mini"
14
15
  GEMINI_25_FLASH = "gemini-2.5-flash"
15
16
  GEMINI_25_PRO = "gemini-2.5-pro"
17
+ GEMINI_25_FLASH_LITE_001 = "gemini-2.5-flash-lite-001"
16
18
  GPT_4_O = "gpt-4o"
17
19
  GPT_4_O_MINI = "gpt-4o-mini"
18
20
 
@@ -20,10 +22,12 @@ class ExtractModels(str, enum.Enum):
20
22
  self,
21
23
  gpt_41: typing.Callable[[], T_Result],
22
24
  gpt_41_mini: typing.Callable[[], T_Result],
25
+ gpt_41_nano: typing.Callable[[], T_Result],
23
26
  gemini_20_flash: typing.Callable[[], T_Result],
24
27
  o_3_mini: typing.Callable[[], T_Result],
25
28
  gemini_25_flash: typing.Callable[[], T_Result],
26
29
  gemini_25_pro: typing.Callable[[], T_Result],
30
+ gemini_25_flash_lite_001: typing.Callable[[], T_Result],
27
31
  gpt_4_o: typing.Callable[[], T_Result],
28
32
  gpt_4_o_mini: typing.Callable[[], T_Result],
29
33
  ) -> T_Result:
@@ -31,6 +35,8 @@ class ExtractModels(str, enum.Enum):
31
35
  return gpt_41()
32
36
  if self is ExtractModels.GPT_41_MINI:
33
37
  return gpt_41_mini()
38
+ if self is ExtractModels.GPT_41_NANO:
39
+ return gpt_41_nano()
34
40
  if self is ExtractModels.GEMINI_20_FLASH:
35
41
  return gemini_20_flash()
36
42
  if self is ExtractModels.O_3_MINI:
@@ -39,6 +45,8 @@ class ExtractModels(str, enum.Enum):
39
45
  return gemini_25_flash()
40
46
  if self is ExtractModels.GEMINI_25_PRO:
41
47
  return gemini_25_pro()
48
+ if self is ExtractModels.GEMINI_25_FLASH_LITE_001:
49
+ return gemini_25_flash_lite_001()
42
50
  if self is ExtractModels.GPT_4_O:
43
51
  return gpt_4_o()
44
52
  if self is ExtractModels.GPT_4_O_MINI:
@@ -0,0 +1,38 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+ from .extract_schema_generate_response_data_schema_value import ExtractSchemaGenerateResponseDataSchemaValue
8
+
9
+ try:
10
+ import pydantic
11
+ if pydantic.__version__.startswith("1."):
12
+ raise ImportError
13
+ import pydantic.v1 as pydantic # type: ignore
14
+ except ImportError:
15
+ import pydantic # type: ignore
16
+
17
+
18
+ class ExtractSchemaGenerateResponse(pydantic.BaseModel):
19
+ """
20
+ Response schema for schema generation.
21
+ """
22
+
23
+ data_schema: typing.Dict[str, typing.Optional[ExtractSchemaGenerateResponseDataSchemaValue]] = pydantic.Field(
24
+ description="The generated JSON schema"
25
+ )
26
+
27
+ def json(self, **kwargs: typing.Any) -> str:
28
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
29
+ return super().json(**kwargs_with_defaults)
30
+
31
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
32
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
33
+ return super().dict(**kwargs_with_defaults)
34
+
35
+ class Config:
36
+ frozen = True
37
+ smart_union = True
38
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -0,0 +1,7 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+
5
+ ExtractSchemaGenerateResponseDataSchemaValue = typing.Union[
6
+ typing.Dict[str, typing.Any], typing.List[typing.Any], str, int, float, bool
7
+ ]
@@ -4,7 +4,7 @@ import datetime as dt
4
4
  import typing
5
5
 
6
6
  from ..core.datetime_utils import serialize_datetime
7
- from .message_role import MessageRole
7
+ from .llama_index_core_base_llms_types_message_role import LlamaIndexCoreBaseLlmsTypesMessageRole
8
8
 
9
9
  try:
10
10
  import pydantic
@@ -21,7 +21,7 @@ class InputMessage(pydantic.BaseModel):
21
21
  """
22
22
 
23
23
  id: typing.Optional[str] = pydantic.Field(description="ID of the message, if any. a UUID.")
24
- role: MessageRole
24
+ role: LlamaIndexCoreBaseLlmsTypesMessageRole
25
25
  content: str
26
26
  data: typing.Optional[typing.Dict[str, typing.Any]]
27
27
  class_name: typing.Optional[str]
@@ -54,6 +54,7 @@ class LegacyParseJobConfig(pydantic.BaseModel):
54
54
  )
55
55
  spread_sheet_extract_sub_tables: typing.Optional[bool] = pydantic.Field(alias="spreadSheetExtractSubTables")
56
56
  extract_layout: typing.Optional[bool] = pydantic.Field(alias="extractLayout")
57
+ high_res_ocr: typing.Optional[bool] = pydantic.Field(alias="highResOcr")
57
58
  html_make_all_elements_visible: typing.Optional[bool] = pydantic.Field(alias="htmlMakeAllElementsVisible")
58
59
  html_remove_fixed_elements: typing.Optional[bool] = pydantic.Field(alias="htmlRemoveFixedElements")
59
60
  html_remove_navigation_elements: typing.Optional[bool] = pydantic.Field(alias="htmlRemoveNavigationElements")
@@ -5,7 +5,7 @@ import typing
5
5
 
6
6
  from ..core.datetime_utils import serialize_datetime
7
7
  from .llama_index_core_base_llms_types_chat_message_blocks_item import LlamaIndexCoreBaseLlmsTypesChatMessageBlocksItem
8
- from .message_role import MessageRole
8
+ from .llama_index_core_base_llms_types_message_role import LlamaIndexCoreBaseLlmsTypesMessageRole
9
9
 
10
10
  try:
11
11
  import pydantic
@@ -21,7 +21,7 @@ class LlamaIndexCoreBaseLlmsTypesChatMessage(pydantic.BaseModel):
21
21
  Chat message.
22
22
  """
23
23
 
24
- role: typing.Optional[MessageRole]
24
+ role: typing.Optional[LlamaIndexCoreBaseLlmsTypesMessageRole]
25
25
  additional_kwargs: typing.Optional[typing.Dict[str, typing.Any]]
26
26
  blocks: typing.Optional[typing.List[LlamaIndexCoreBaseLlmsTypesChatMessageBlocksItem]]
27
27
 
@@ -6,7 +6,7 @@ import typing
6
6
  T_Result = typing.TypeVar("T_Result")
7
7
 
8
8
 
9
- class MessageRole(str, enum.Enum):
9
+ class LlamaIndexCoreBaseLlmsTypesMessageRole(str, enum.Enum):
10
10
  """
11
11
  Message role.
12
12
  """
@@ -31,19 +31,19 @@ class MessageRole(str, enum.Enum):
31
31
  chatbot: typing.Callable[[], T_Result],
32
32
  model: typing.Callable[[], T_Result],
33
33
  ) -> T_Result:
34
- if self is MessageRole.SYSTEM:
34
+ if self is LlamaIndexCoreBaseLlmsTypesMessageRole.SYSTEM:
35
35
  return system()
36
- if self is MessageRole.DEVELOPER:
36
+ if self is LlamaIndexCoreBaseLlmsTypesMessageRole.DEVELOPER:
37
37
  return developer()
38
- if self is MessageRole.USER:
38
+ if self is LlamaIndexCoreBaseLlmsTypesMessageRole.USER:
39
39
  return user()
40
- if self is MessageRole.ASSISTANT:
40
+ if self is LlamaIndexCoreBaseLlmsTypesMessageRole.ASSISTANT:
41
41
  return assistant()
42
- if self is MessageRole.FUNCTION:
42
+ if self is LlamaIndexCoreBaseLlmsTypesMessageRole.FUNCTION:
43
43
  return function()
44
- if self is MessageRole.TOOL:
44
+ if self is LlamaIndexCoreBaseLlmsTypesMessageRole.TOOL:
45
45
  return tool()
46
- if self is MessageRole.CHATBOT:
46
+ if self is LlamaIndexCoreBaseLlmsTypesMessageRole.CHATBOT:
47
47
  return chatbot()
48
- if self is MessageRole.MODEL:
48
+ if self is LlamaIndexCoreBaseLlmsTypesMessageRole.MODEL:
49
49
  return model()
@@ -5,6 +5,7 @@ import typing
5
5
 
6
6
  from ..core.datetime_utils import serialize_datetime
7
7
  from .fail_page_mode import FailPageMode
8
+ from .llama_parse_parameters_priority import LlamaParseParametersPriority
8
9
  from .parser_languages import ParserLanguages
9
10
  from .parsing_mode import ParsingMode
10
11
 
@@ -22,6 +23,7 @@ class LlamaParseParameters(pydantic.BaseModel):
22
23
  Settings that can be configured for how to use LlamaParse to parse files within a LlamaCloud pipeline.
23
24
  """
24
25
 
26
+ priority: typing.Optional[LlamaParseParametersPriority]
25
27
  languages: typing.Optional[typing.List[ParserLanguages]]
26
28
  parsing_instruction: typing.Optional[str]
27
29
  disable_ocr: typing.Optional[bool]
@@ -41,6 +43,7 @@ class LlamaParseParameters(pydantic.BaseModel):
41
43
  gpt_4_o_api_key: typing.Optional[str] = pydantic.Field(alias="gpt4o_api_key")
42
44
  do_not_unroll_columns: typing.Optional[bool]
43
45
  extract_layout: typing.Optional[bool]
46
+ high_res_ocr: typing.Optional[bool]
44
47
  html_make_all_elements_visible: typing.Optional[bool]
45
48
  html_remove_navigation_elements: typing.Optional[bool]
46
49
  html_remove_fixed_elements: typing.Optional[bool]
@@ -0,0 +1,29 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import enum
4
+ import typing
5
+
6
+ T_Result = typing.TypeVar("T_Result")
7
+
8
+
9
+ class LlamaParseParametersPriority(str, enum.Enum):
10
+ LOW = "low"
11
+ MEDIUM = "medium"
12
+ HIGH = "high"
13
+ CRITICAL = "critical"
14
+
15
+ def visit(
16
+ self,
17
+ low: typing.Callable[[], T_Result],
18
+ medium: typing.Callable[[], T_Result],
19
+ high: typing.Callable[[], T_Result],
20
+ critical: typing.Callable[[], T_Result],
21
+ ) -> T_Result:
22
+ if self is LlamaParseParametersPriority.LOW:
23
+ return low()
24
+ if self is LlamaParseParametersPriority.MEDIUM:
25
+ return medium()
26
+ if self is LlamaParseParametersPriority.HIGH:
27
+ return high()
28
+ if self is LlamaParseParametersPriority.CRITICAL:
29
+ return critical()
@@ -0,0 +1,38 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+ from .app_schema_responses_message_role import AppSchemaResponsesMessageRole
8
+ from .text_content_block import TextContentBlock
9
+
10
+ try:
11
+ import pydantic
12
+ if pydantic.__version__.startswith("1."):
13
+ raise ImportError
14
+ import pydantic.v1 as pydantic # type: ignore
15
+ except ImportError:
16
+ import pydantic # type: ignore
17
+
18
+
19
+ class Message(pydantic.BaseModel):
20
+ role: AppSchemaResponsesMessageRole = pydantic.Field(
21
+ description="Role of the message in the conversation (system, user, assistant)"
22
+ )
23
+ blocks: typing.List[TextContentBlock] = pydantic.Field(
24
+ description="Content of the message. Can be an input or output content block."
25
+ )
26
+
27
+ def json(self, **kwargs: typing.Any) -> str:
28
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
29
+ return super().json(**kwargs_with_defaults)
30
+
31
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
32
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
33
+ return super().dict(**kwargs_with_defaults)
34
+
35
+ class Config:
36
+ frozen = True
37
+ smart_union = True
38
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -20,7 +20,7 @@ class MetadataFilter(pydantic.BaseModel):
20
20
  """
21
21
  Comprehensive metadata filter for vector stores to support more operators.
22
22
 
23
- Value uses Strict\* types, as int, float and str are compatible types and were all
23
+ Value uses Strict types, as int, float and str are compatible types and were all
24
24
  converted to string before.
25
25
 
26
26
  See: https://docs.pydantic.dev/latest/usage/types/#strict-types
@@ -0,0 +1,39 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+ from .supported_llm_model_names import SupportedLlmModelNames
8
+
9
+ try:
10
+ import pydantic
11
+ if pydantic.__version__.startswith("1."):
12
+ raise ImportError
13
+ import pydantic.v1 as pydantic # type: ignore
14
+ except ImportError:
15
+ import pydantic # type: ignore
16
+
17
+
18
+ class ModelConfiguration(pydantic.BaseModel):
19
+ """
20
+ Configuration for the language model used in response generation.
21
+ """
22
+
23
+ model_name: typing.Optional[SupportedLlmModelNames] = pydantic.Field(
24
+ description="The name of the model to use for LLM completions."
25
+ )
26
+ temperature: typing.Optional[float] = pydantic.Field(description="The temperature to use for LLM completions.")
27
+
28
+ def json(self, **kwargs: typing.Any) -> str:
29
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
30
+ return super().json(**kwargs_with_defaults)
31
+
32
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
33
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
34
+ return super().dict(**kwargs_with_defaults)
35
+
36
+ class Config:
37
+ frozen = True
38
+ smart_union = True
39
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -5,6 +5,7 @@ import typing
5
5
 
6
6
  from ..core.datetime_utils import serialize_datetime
7
7
  from .fail_page_mode import FailPageMode
8
+ from .parse_job_config_priority import ParseJobConfigPriority
8
9
  from .parser_languages import ParserLanguages
9
10
  from .parsing_mode import ParsingMode
10
11
 
@@ -22,6 +23,7 @@ class ParseJobConfig(pydantic.BaseModel):
22
23
  Configuration for llamaparse job
23
24
  """
24
25
 
26
+ priority: typing.Optional[ParseJobConfigPriority]
25
27
  custom_metadata: typing.Optional[typing.Dict[str, typing.Any]]
26
28
  resource_info: typing.Optional[typing.Dict[str, typing.Any]]
27
29
  languages: typing.Optional[typing.List[ParserLanguages]]
@@ -43,6 +45,7 @@ class ParseJobConfig(pydantic.BaseModel):
43
45
  gpt_4_o_api_key: typing.Optional[str] = pydantic.Field(alias="gpt4o_api_key")
44
46
  do_not_unroll_columns: typing.Optional[bool]
45
47
  extract_layout: typing.Optional[bool]
48
+ high_res_ocr: typing.Optional[bool]
46
49
  html_make_all_elements_visible: typing.Optional[bool]
47
50
  html_remove_navigation_elements: typing.Optional[bool]
48
51
  html_remove_fixed_elements: typing.Optional[bool]
@@ -0,0 +1,29 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import enum
4
+ import typing
5
+
6
+ T_Result = typing.TypeVar("T_Result")
7
+
8
+
9
+ class ParseJobConfigPriority(str, enum.Enum):
10
+ LOW = "low"
11
+ MEDIUM = "medium"
12
+ HIGH = "high"
13
+ CRITICAL = "critical"
14
+
15
+ def visit(
16
+ self,
17
+ low: typing.Callable[[], T_Result],
18
+ medium: typing.Callable[[], T_Result],
19
+ high: typing.Callable[[], T_Result],
20
+ critical: typing.Callable[[], T_Result],
21
+ ) -> T_Result:
22
+ if self is ParseJobConfigPriority.LOW:
23
+ return low()
24
+ if self is ParseJobConfigPriority.MEDIUM:
25
+ return medium()
26
+ if self is ParseJobConfigPriority.HIGH:
27
+ return high()
28
+ if self is ParseJobConfigPriority.CRITICAL:
29
+ return critical()
@@ -0,0 +1,34 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ import typing_extensions
7
+
8
+ from ..core.datetime_utils import serialize_datetime
9
+
10
+ try:
11
+ import pydantic
12
+ if pydantic.__version__.startswith("1."):
13
+ raise ImportError
14
+ import pydantic.v1 as pydantic # type: ignore
15
+ except ImportError:
16
+ import pydantic # type: ignore
17
+
18
+
19
+ class TextContentBlock(pydantic.BaseModel):
20
+ type: typing.Optional[typing_extensions.Literal["text"]]
21
+ content: typing.Optional[str] = pydantic.Field(description="Content of the text block")
22
+
23
+ def json(self, **kwargs: typing.Any) -> str:
24
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
25
+ return super().json(**kwargs_with_defaults)
26
+
27
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
28
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
29
+ return super().dict(**kwargs_with_defaults)
30
+
31
+ class Config:
32
+ frozen = True
33
+ smart_union = True
34
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.3
2
2
  Name: llama-cloud
3
- Version: 0.1.25
3
+ Version: 0.1.27
4
4
  Summary:
5
5
  License: MIT
6
6
  Author: Logan Markewich
@@ -13,6 +13,7 @@ Classifier: Programming Language :: Python :: 3.9
13
13
  Classifier: Programming Language :: Python :: 3.10
14
14
  Classifier: Programming Language :: Python :: 3.11
15
15
  Classifier: Programming Language :: Python :: 3.12
16
+ Classifier: Programming Language :: Python :: 3.13
16
17
  Requires-Dist: certifi (>=2024.7.4)
17
18
  Requires-Dist: httpx (>=0.20.0)
18
19
  Requires-Dist: pydantic (>=1.10)