llama-cloud 0.1.35__py3-none-any.whl → 0.1.37__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of llama-cloud might be problematic. Click here for more details.

Files changed (38) hide show
  1. llama_cloud/__init__.py +32 -4
  2. llama_cloud/resources/__init__.py +1 -3
  3. llama_cloud/resources/admin/client.py +108 -0
  4. llama_cloud/resources/beta/client.py +460 -0
  5. llama_cloud/resources/classifier/client.py +231 -181
  6. llama_cloud/resources/data_sinks/types/data_sink_update_component.py +2 -0
  7. llama_cloud/resources/files/__init__.py +2 -2
  8. llama_cloud/resources/files/client.py +15 -73
  9. llama_cloud/resources/files/types/__init__.py +1 -3
  10. llama_cloud/resources/llama_extract/client.py +96 -4
  11. llama_cloud/types/__init__.py +34 -2
  12. llama_cloud/types/classification_result.py +4 -5
  13. llama_cloud/types/classifier_rule.py +43 -0
  14. llama_cloud/types/classify_job.py +45 -0
  15. llama_cloud/types/{classify_response.py → classify_job_results.py} +3 -6
  16. llama_cloud/types/classify_job_with_status.py +47 -0
  17. llama_cloud/types/classify_parsing_configuration.py +38 -0
  18. llama_cloud/types/cloud_astra_db_vector_store.py +51 -0
  19. llama_cloud/types/cloud_confluence_data_source.py +15 -0
  20. llama_cloud/types/configurable_data_sink_names.py +4 -0
  21. llama_cloud/types/data_sink_component.py +2 -0
  22. llama_cloud/types/data_sink_create_component.py +2 -0
  23. llama_cloud/types/failure_handling_config.py +37 -0
  24. llama_cloud/types/file_classification.py +41 -0
  25. llama_cloud/types/file_create.py +41 -0
  26. llama_cloud/types/file_filter.py +40 -0
  27. llama_cloud/types/file_query_response.py +38 -0
  28. llama_cloud/types/file_store_info_response.py +34 -0
  29. llama_cloud/types/file_store_info_response_status.py +25 -0
  30. llama_cloud/types/llama_extract_mode_availability.py +37 -0
  31. llama_cloud/types/llama_extract_mode_availability_status.py +17 -0
  32. llama_cloud/types/supported_llm_model_names.py +12 -0
  33. {llama_cloud-0.1.35.dist-info → llama_cloud-0.1.37.dist-info}/METADATA +1 -1
  34. {llama_cloud-0.1.35.dist-info → llama_cloud-0.1.37.dist-info}/RECORD +38 -24
  35. /llama_cloud/{resources/files/types → types}/file_create_permission_info_value.py +0 -0
  36. /llama_cloud/{resources/files/types → types}/file_create_resource_info_value.py +0 -0
  37. {llama_cloud-0.1.35.dist-info → llama_cloud-0.1.37.dist-info}/LICENSE +0 -0
  38. {llama_cloud-0.1.35.dist-info → llama_cloud-0.1.37.dist-info}/WHEEL +0 -0
@@ -0,0 +1,51 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+
8
+ try:
9
+ import pydantic
10
+ if pydantic.__version__.startswith("1."):
11
+ raise ImportError
12
+ import pydantic.v1 as pydantic # type: ignore
13
+ except ImportError:
14
+ import pydantic # type: ignore
15
+
16
+
17
+ class CloudAstraDbVectorStore(pydantic.BaseModel):
18
+ """
19
+ Cloud AstraDB Vector Store.
20
+
21
+ This class is used to store the configuration for an AstraDB vector store, so that it can be
22
+ created and used in LlamaCloud.
23
+
24
+ Args:
25
+ token (str): The Astra DB Application Token to use.
26
+ api_endpoint (str): The Astra DB JSON API endpoint for your database.
27
+ collection_name (str): Collection name to use. If not existing, it will be created.
28
+ embedding_dimension (int): Length of the embedding vectors in use.
29
+ keyspace (optional[str]): The keyspace to use. If not provided, 'default_keyspace'
30
+ """
31
+
32
+ supports_nested_metadata_filters: typing.Optional[bool]
33
+ token: str = pydantic.Field(description="The Astra DB Application Token to use")
34
+ api_endpoint: str = pydantic.Field(description="The Astra DB JSON API endpoint for your database")
35
+ collection_name: str = pydantic.Field(description="Collection name to use. If not existing, it will be created")
36
+ embedding_dimension: int = pydantic.Field(description="Length of the embedding vectors in use")
37
+ keyspace: typing.Optional[str]
38
+ class_name: typing.Optional[str]
39
+
40
+ def json(self, **kwargs: typing.Any) -> str:
41
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
42
+ return super().json(**kwargs_with_defaults)
43
+
44
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
45
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
46
+ return super().dict(**kwargs_with_defaults)
47
+
48
+ class Config:
49
+ frozen = True
50
+ smart_union = True
51
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -4,6 +4,7 @@ import datetime as dt
4
4
  import typing
5
5
 
6
6
  from ..core.datetime_utils import serialize_datetime
7
+ from .failure_handling_config import FailureHandlingConfig
7
8
 
8
9
  try:
9
10
  import pydantic
@@ -28,6 +29,20 @@ class CloudConfluenceDataSource(pydantic.BaseModel):
28
29
  label: typing.Optional[str]
29
30
  index_restricted_pages: typing.Optional[bool] = pydantic.Field(description="Whether to index restricted pages.")
30
31
  keep_markdown_format: typing.Optional[bool] = pydantic.Field(description="Whether to keep the markdown format.")
32
+ failure_handling: typing.Optional[FailureHandlingConfig] = pydantic.Field(
33
+ description=(
34
+ "Configuration for handling failures during processing. Key-value object controlling failure handling behaviors.\n"
35
+ "\n"
36
+ "Example:\n"
37
+ "{\n"
38
+ '"skip_list_failures": true\n'
39
+ "}\n"
40
+ "\n"
41
+ "Currently supports:\n"
42
+ "\n"
43
+ "- skip_list_failures: Skip failed batches/lists and continue processing\n"
44
+ )
45
+ )
31
46
  class_name: typing.Optional[str]
32
47
 
33
48
  def json(self, **kwargs: typing.Any) -> str:
@@ -13,6 +13,7 @@ class ConfigurableDataSinkNames(str, enum.Enum):
13
13
  AZUREAI_SEARCH = "AZUREAI_SEARCH"
14
14
  MONGODB_ATLAS = "MONGODB_ATLAS"
15
15
  MILVUS = "MILVUS"
16
+ ASTRA_DB = "ASTRA_DB"
16
17
 
17
18
  def visit(
18
19
  self,
@@ -22,6 +23,7 @@ class ConfigurableDataSinkNames(str, enum.Enum):
22
23
  azureai_search: typing.Callable[[], T_Result],
23
24
  mongodb_atlas: typing.Callable[[], T_Result],
24
25
  milvus: typing.Callable[[], T_Result],
26
+ astra_db: typing.Callable[[], T_Result],
25
27
  ) -> T_Result:
26
28
  if self is ConfigurableDataSinkNames.PINECONE:
27
29
  return pinecone()
@@ -35,3 +37,5 @@ class ConfigurableDataSinkNames(str, enum.Enum):
35
37
  return mongodb_atlas()
36
38
  if self is ConfigurableDataSinkNames.MILVUS:
37
39
  return milvus()
40
+ if self is ConfigurableDataSinkNames.ASTRA_DB:
41
+ return astra_db()
@@ -2,6 +2,7 @@
2
2
 
3
3
  import typing
4
4
 
5
+ from .cloud_astra_db_vector_store import CloudAstraDbVectorStore
5
6
  from .cloud_azure_ai_search_vector_store import CloudAzureAiSearchVectorStore
6
7
  from .cloud_milvus_vector_store import CloudMilvusVectorStore
7
8
  from .cloud_mongo_db_atlas_vector_search import CloudMongoDbAtlasVectorSearch
@@ -17,4 +18,5 @@ DataSinkComponent = typing.Union[
17
18
  CloudAzureAiSearchVectorStore,
18
19
  CloudMongoDbAtlasVectorSearch,
19
20
  CloudMilvusVectorStore,
21
+ CloudAstraDbVectorStore,
20
22
  ]
@@ -2,6 +2,7 @@
2
2
 
3
3
  import typing
4
4
 
5
+ from .cloud_astra_db_vector_store import CloudAstraDbVectorStore
5
6
  from .cloud_azure_ai_search_vector_store import CloudAzureAiSearchVectorStore
6
7
  from .cloud_milvus_vector_store import CloudMilvusVectorStore
7
8
  from .cloud_mongo_db_atlas_vector_search import CloudMongoDbAtlasVectorSearch
@@ -17,4 +18,5 @@ DataSinkCreateComponent = typing.Union[
17
18
  CloudAzureAiSearchVectorStore,
18
19
  CloudMongoDbAtlasVectorSearch,
19
20
  CloudMilvusVectorStore,
21
+ CloudAstraDbVectorStore,
20
22
  ]
@@ -0,0 +1,37 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+
8
+ try:
9
+ import pydantic
10
+ if pydantic.__version__.startswith("1."):
11
+ raise ImportError
12
+ import pydantic.v1 as pydantic # type: ignore
13
+ except ImportError:
14
+ import pydantic # type: ignore
15
+
16
+
17
+ class FailureHandlingConfig(pydantic.BaseModel):
18
+ """
19
+ Configuration for handling different types of failures during data source processing.
20
+ """
21
+
22
+ skip_list_failures: typing.Optional[bool] = pydantic.Field(
23
+ description="Whether to skip failed batches/lists and continue processing"
24
+ )
25
+
26
+ def json(self, **kwargs: typing.Any) -> str:
27
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
28
+ return super().json(**kwargs_with_defaults)
29
+
30
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
31
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
32
+ return super().dict(**kwargs_with_defaults)
33
+
34
+ class Config:
35
+ frozen = True
36
+ smart_union = True
37
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -0,0 +1,41 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+ from .classification_result import ClassificationResult
8
+
9
+ try:
10
+ import pydantic
11
+ if pydantic.__version__.startswith("1."):
12
+ raise ImportError
13
+ import pydantic.v1 as pydantic # type: ignore
14
+ except ImportError:
15
+ import pydantic # type: ignore
16
+
17
+
18
+ class FileClassification(pydantic.BaseModel):
19
+ """
20
+ A file classification.
21
+ """
22
+
23
+ id: str = pydantic.Field(description="Unique identifier")
24
+ created_at: typing.Optional[dt.datetime]
25
+ updated_at: typing.Optional[dt.datetime]
26
+ classify_job_id: str = pydantic.Field(description="The ID of the classify job")
27
+ file_id: str = pydantic.Field(description="The ID of the classified file")
28
+ result: typing.Optional[ClassificationResult]
29
+
30
+ def json(self, **kwargs: typing.Any) -> str:
31
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
32
+ return super().json(**kwargs_with_defaults)
33
+
34
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
35
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
36
+ return super().dict(**kwargs_with_defaults)
37
+
38
+ class Config:
39
+ frozen = True
40
+ smart_union = True
41
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -0,0 +1,41 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+ from .file_create_permission_info_value import FileCreatePermissionInfoValue
8
+ from .file_create_resource_info_value import FileCreateResourceInfoValue
9
+
10
+ try:
11
+ import pydantic
12
+ if pydantic.__version__.startswith("1."):
13
+ raise ImportError
14
+ import pydantic.v1 as pydantic # type: ignore
15
+ except ImportError:
16
+ import pydantic # type: ignore
17
+
18
+
19
+ class FileCreate(pydantic.BaseModel):
20
+ name: str = pydantic.Field(
21
+ description="Name that will be used for created file. If possible, always include the file extension in the name."
22
+ )
23
+ external_file_id: typing.Optional[str]
24
+ file_size: typing.Optional[int]
25
+ last_modified_at: typing.Optional[dt.datetime]
26
+ resource_info: typing.Optional[typing.Dict[str, typing.Optional[FileCreateResourceInfoValue]]]
27
+ permission_info: typing.Optional[typing.Dict[str, typing.Optional[FileCreatePermissionInfoValue]]]
28
+ data_source_id: typing.Optional[str]
29
+
30
+ def json(self, **kwargs: typing.Any) -> str:
31
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
32
+ return super().json(**kwargs_with_defaults)
33
+
34
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
35
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
36
+ return super().dict(**kwargs_with_defaults)
37
+
38
+ class Config:
39
+ frozen = True
40
+ smart_union = True
41
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -0,0 +1,40 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+
8
+ try:
9
+ import pydantic
10
+ if pydantic.__version__.startswith("1."):
11
+ raise ImportError
12
+ import pydantic.v1 as pydantic # type: ignore
13
+ except ImportError:
14
+ import pydantic # type: ignore
15
+
16
+
17
+ class FileFilter(pydantic.BaseModel):
18
+ """
19
+ Filter parameters for file queries.
20
+ """
21
+
22
+ project_id: typing.Optional[str]
23
+ file_ids: typing.Optional[typing.List[str]]
24
+ file_name: typing.Optional[str]
25
+ data_source_id: typing.Optional[str]
26
+ external_file_id: typing.Optional[str]
27
+ only_manually_uploaded: typing.Optional[bool]
28
+
29
+ def json(self, **kwargs: typing.Any) -> str:
30
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
31
+ return super().json(**kwargs_with_defaults)
32
+
33
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
34
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
35
+ return super().dict(**kwargs_with_defaults)
36
+
37
+ class Config:
38
+ frozen = True
39
+ smart_union = True
40
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -0,0 +1,38 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+ from .file import File
8
+
9
+ try:
10
+ import pydantic
11
+ if pydantic.__version__.startswith("1."):
12
+ raise ImportError
13
+ import pydantic.v1 as pydantic # type: ignore
14
+ except ImportError:
15
+ import pydantic # type: ignore
16
+
17
+
18
+ class FileQueryResponse(pydantic.BaseModel):
19
+ """
20
+ Response schema for paginated file queries.
21
+ """
22
+
23
+ items: typing.List[File] = pydantic.Field(description="The list of items.")
24
+ next_page_token: typing.Optional[str]
25
+ total_size: typing.Optional[int]
26
+
27
+ def json(self, **kwargs: typing.Any) -> str:
28
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
29
+ return super().json(**kwargs_with_defaults)
30
+
31
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
32
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
33
+ return super().dict(**kwargs_with_defaults)
34
+
35
+ class Config:
36
+ frozen = True
37
+ smart_union = True
38
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -0,0 +1,34 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+ from .file_store_info_response_status import FileStoreInfoResponseStatus
8
+
9
+ try:
10
+ import pydantic
11
+ if pydantic.__version__.startswith("1."):
12
+ raise ImportError
13
+ import pydantic.v1 as pydantic # type: ignore
14
+ except ImportError:
15
+ import pydantic # type: ignore
16
+
17
+
18
+ class FileStoreInfoResponse(pydantic.BaseModel):
19
+ available_buckets: typing.Optional[typing.Dict[str, str]]
20
+ unavailable_buckets: typing.Optional[typing.Dict[str, str]]
21
+ status: FileStoreInfoResponseStatus
22
+
23
+ def json(self, **kwargs: typing.Any) -> str:
24
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
25
+ return super().json(**kwargs_with_defaults)
26
+
27
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
28
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
29
+ return super().dict(**kwargs_with_defaults)
30
+
31
+ class Config:
32
+ frozen = True
33
+ smart_union = True
34
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -0,0 +1,25 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import enum
4
+ import typing
5
+
6
+ T_Result = typing.TypeVar("T_Result")
7
+
8
+
9
+ class FileStoreInfoResponseStatus(str, enum.Enum):
10
+ OK = "ok"
11
+ MISSING_BUCKETS = "missing_buckets"
12
+ MISSING_CREDENTIALS = "missing_credentials"
13
+
14
+ def visit(
15
+ self,
16
+ ok: typing.Callable[[], T_Result],
17
+ missing_buckets: typing.Callable[[], T_Result],
18
+ missing_credentials: typing.Callable[[], T_Result],
19
+ ) -> T_Result:
20
+ if self is FileStoreInfoResponseStatus.OK:
21
+ return ok()
22
+ if self is FileStoreInfoResponseStatus.MISSING_BUCKETS:
23
+ return missing_buckets()
24
+ if self is FileStoreInfoResponseStatus.MISSING_CREDENTIALS:
25
+ return missing_credentials()
@@ -0,0 +1,37 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+ from .llama_extract_mode_availability_status import LlamaExtractModeAvailabilityStatus
8
+
9
+ try:
10
+ import pydantic
11
+ if pydantic.__version__.startswith("1."):
12
+ raise ImportError
13
+ import pydantic.v1 as pydantic # type: ignore
14
+ except ImportError:
15
+ import pydantic # type: ignore
16
+
17
+
18
+ class LlamaExtractModeAvailability(pydantic.BaseModel):
19
+ mode: str
20
+ status: LlamaExtractModeAvailabilityStatus
21
+ parse_mode: str
22
+ parse_models: typing.List[str]
23
+ extract_models: typing.List[str]
24
+ missing_models: typing.Optional[typing.List[str]]
25
+
26
+ def json(self, **kwargs: typing.Any) -> str:
27
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
28
+ return super().json(**kwargs_with_defaults)
29
+
30
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
31
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
32
+ return super().dict(**kwargs_with_defaults)
33
+
34
+ class Config:
35
+ frozen = True
36
+ smart_union = True
37
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -0,0 +1,17 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import enum
4
+ import typing
5
+
6
+ T_Result = typing.TypeVar("T_Result")
7
+
8
+
9
+ class LlamaExtractModeAvailabilityStatus(str, enum.Enum):
10
+ AVAILABLE = "available"
11
+ UNAVAILABLE = "unavailable"
12
+
13
+ def visit(self, available: typing.Callable[[], T_Result], unavailable: typing.Callable[[], T_Result]) -> T_Result:
14
+ if self is LlamaExtractModeAvailabilityStatus.AVAILABLE:
15
+ return available()
16
+ if self is LlamaExtractModeAvailabilityStatus.UNAVAILABLE:
17
+ return unavailable()
@@ -14,6 +14,9 @@ class SupportedLlmModelNames(str, enum.Enum):
14
14
  GPT_4_1_MINI = "GPT_4_1_MINI"
15
15
  AZURE_OPENAI_GPT_4_O = "AZURE_OPENAI_GPT_4O"
16
16
  AZURE_OPENAI_GPT_4_O_MINI = "AZURE_OPENAI_GPT_4O_MINI"
17
+ AZURE_OPENAI_GPT_4_1 = "AZURE_OPENAI_GPT_4_1"
18
+ AZURE_OPENAI_GPT_4_1_MINI = "AZURE_OPENAI_GPT_4_1_MINI"
19
+ AZURE_OPENAI_GPT_4_1_NANO = "AZURE_OPENAI_GPT_4_1_NANO"
17
20
  CLAUDE_3_5_SONNET = "CLAUDE_3_5_SONNET"
18
21
  BEDROCK_CLAUDE_3_5_SONNET_V_1 = "BEDROCK_CLAUDE_3_5_SONNET_V1"
19
22
  BEDROCK_CLAUDE_3_5_SONNET_V_2 = "BEDROCK_CLAUDE_3_5_SONNET_V2"
@@ -28,6 +31,9 @@ class SupportedLlmModelNames(str, enum.Enum):
28
31
  gpt_4_1_mini: typing.Callable[[], T_Result],
29
32
  azure_openai_gpt_4_o: typing.Callable[[], T_Result],
30
33
  azure_openai_gpt_4_o_mini: typing.Callable[[], T_Result],
34
+ azure_openai_gpt_4_1: typing.Callable[[], T_Result],
35
+ azure_openai_gpt_4_1_mini: typing.Callable[[], T_Result],
36
+ azure_openai_gpt_4_1_nano: typing.Callable[[], T_Result],
31
37
  claude_3_5_sonnet: typing.Callable[[], T_Result],
32
38
  bedrock_claude_3_5_sonnet_v_1: typing.Callable[[], T_Result],
33
39
  bedrock_claude_3_5_sonnet_v_2: typing.Callable[[], T_Result],
@@ -47,6 +53,12 @@ class SupportedLlmModelNames(str, enum.Enum):
47
53
  return azure_openai_gpt_4_o()
48
54
  if self is SupportedLlmModelNames.AZURE_OPENAI_GPT_4_O_MINI:
49
55
  return azure_openai_gpt_4_o_mini()
56
+ if self is SupportedLlmModelNames.AZURE_OPENAI_GPT_4_1:
57
+ return azure_openai_gpt_4_1()
58
+ if self is SupportedLlmModelNames.AZURE_OPENAI_GPT_4_1_MINI:
59
+ return azure_openai_gpt_4_1_mini()
60
+ if self is SupportedLlmModelNames.AZURE_OPENAI_GPT_4_1_NANO:
61
+ return azure_openai_gpt_4_1_nano()
50
62
  if self is SupportedLlmModelNames.CLAUDE_3_5_SONNET:
51
63
  return claude_3_5_sonnet()
52
64
  if self is SupportedLlmModelNames.BEDROCK_CLAUDE_3_5_SONNET_V_1:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: llama-cloud
3
- Version: 0.1.35
3
+ Version: 0.1.37
4
4
  Summary:
5
5
  License: MIT
6
6
  Author: Logan Markewich
@@ -1,4 +1,4 @@
1
- llama_cloud/__init__.py,sha256=F93sJ9Sc7tj6XcFHPCy3X1T4VX2f-IJ0GLEG9NYvk0s,26921
1
+ llama_cloud/__init__.py,sha256=h1hssl79cA7z_YQpPphW_UcHPXLfxyPqV8WqhJwbjUM,27687
2
2
  llama_cloud/client.py,sha256=xIC_pTNYLA3AfLE8esqhrzam93LLo7oc6Vrog64Bwzw,6399
3
3
  llama_cloud/core/__init__.py,sha256=QJS3CJ2TYP2E1Tge0CS6Z7r8LTNzJHQVX1hD3558eP0,519
4
4
  llama_cloud/core/api_error.py,sha256=RE8LELok2QCjABadECTvtDp7qejA1VmINCh6TbqPwSE,426
@@ -9,21 +9,21 @@ llama_cloud/core/remove_none_from_dict.py,sha256=8m91FC3YuVem0Gm9_sXhJ2tGvP33owJ
9
9
  llama_cloud/environment.py,sha256=feTjOebeFZMrBdnHat4RE5aHlpt-sJm4NhK4ntV1htI,167
10
10
  llama_cloud/errors/__init__.py,sha256=pbbVUFtB9LCocA1RMWMMF_RKjsy5YkOKX5BAuE49w6g,170
11
11
  llama_cloud/errors/unprocessable_entity_error.py,sha256=FvR7XPlV3Xx5nu8HNlmLhBRdk4so_gCHjYT5PyZe6sM,313
12
- llama_cloud/resources/__init__.py,sha256=YEJrxAIFcQ6-d8qKlUYidwJqWFVWLKUw4B3gQrn1nKI,4429
12
+ llama_cloud/resources/__init__.py,sha256=PYMjBpAdnSZdd_tF9vTFuPbD4fyUDnhALjawaJ393H0,4297
13
13
  llama_cloud/resources/admin/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
14
- llama_cloud/resources/admin/client.py,sha256=mzA_ezCjugKNmvWCMWEF0Z0k86ErACWov1VtPV1J2tU,3678
14
+ llama_cloud/resources/admin/client.py,sha256=YIYy9kU1_xaE0gkpmZZbCgLzZj6XSrAUplS7S2uWmwM,8536
15
15
  llama_cloud/resources/agent_deployments/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
16
16
  llama_cloud/resources/agent_deployments/client.py,sha256=3EOzOjmRs4KISgJ566enq3FCuN3YtskjO0OHqQGtkQ0,6122
17
17
  llama_cloud/resources/beta/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
18
- llama_cloud/resources/beta/client.py,sha256=KMveY6Uj_lurX9DcY198GoOW7rhww_emrvHFHHD4W7o,46846
18
+ llama_cloud/resources/beta/client.py,sha256=_GNkHQxyZxhZOkLIRzfCw6PexQx-E8r_7R-3Wd9Y0uE,63128
19
19
  llama_cloud/resources/chat_apps/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
20
20
  llama_cloud/resources/chat_apps/client.py,sha256=orSI8rpQbUwVEToolEeiEi5Qe--suXFvfu6D9JDii5I,23595
21
21
  llama_cloud/resources/classifier/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
22
- llama_cloud/resources/classifier/client.py,sha256=EJyTdjuKhESP1Ew_kEOP_GUz2o1I_Zh2xnGyjJkA5iI,11804
22
+ llama_cloud/resources/classifier/client.py,sha256=4oiCJVFPl1reWlXk_wEoX_r5_FeSc2dH_MKQyX8Xzro,13650
23
23
  llama_cloud/resources/data_sinks/__init__.py,sha256=ZHUjn3HbKhq_7QS1q74r2m5RGKF5lxcvF2P6pGvpcis,147
24
24
  llama_cloud/resources/data_sinks/client.py,sha256=GpD6FhbGqkg2oUToyMG6J8hPxG_iG7W5ZJRo0qg3yzk,20639
25
25
  llama_cloud/resources/data_sinks/types/__init__.py,sha256=M1aTcufJwiEZo9B0KmYj9PfkSd6I1ooFt9tpIRGwgg8,168
26
- llama_cloud/resources/data_sinks/types/data_sink_update_component.py,sha256=EWbsPt3k_w_vySf01iiFanyN7UVNzSOM3weHzx-Y_rk,809
26
+ llama_cloud/resources/data_sinks/types/data_sink_update_component.py,sha256=ynPdEg844hZaD6EcAK0jrMY_vogtvmLTZ7FZSwWcor8,912
27
27
  llama_cloud/resources/data_sources/__init__.py,sha256=McURkcNBGHXH1hmRDRmZI1dRzJrekCTHZsgv03r2oZI,227
28
28
  llama_cloud/resources/data_sources/client.py,sha256=SZFm8bW5nkaXringdSnmxHqvVjKM7cNNOtqVXjgTKhc,21855
29
29
  llama_cloud/resources/data_sources/types/__init__.py,sha256=Cd5xEECTzXqQSfJALfJPSjudlSLeb3RENeJVi8vwPbM,303
@@ -35,16 +35,14 @@ llama_cloud/resources/embedding_model_configs/types/__init__.py,sha256=6-rcDwJhw
35
35
  llama_cloud/resources/embedding_model_configs/types/embedding_model_config_create_embedding_config.py,sha256=SQCHJk0AmBbKS5XKdcEJxhDhIMLQCmCI13IHC28v7vQ,3054
36
36
  llama_cloud/resources/evals/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
37
37
  llama_cloud/resources/evals/client.py,sha256=v2AyeQV0hVgC6xoP2gJNgneJMaeXALV1hIeirYGxlPw,3242
38
- llama_cloud/resources/files/__init__.py,sha256=3B0SNM8EE6PddD5LpxYllci9vflEXy1xjPzhEEd-OUk,293
39
- llama_cloud/resources/files/client.py,sha256=41iMAtvSIz019jGFJ5aBVG-Haxho_bUYKBavBdFYV2I,59400
40
- llama_cloud/resources/files/types/__init__.py,sha256=EPYENAwkjBWv1MLf8s7R5-RO-cxZ_8NPrqfR4ZoR7jY,418
38
+ llama_cloud/resources/files/__init__.py,sha256=Ws53l-S3kyAGFinYPOb9WpN84DtbFn6gLYZtI2akBLQ,169
39
+ llama_cloud/resources/files/client.py,sha256=Crd0IR0cV5fld4jUGAHE8VsIbw7vCYrOIyBTSwDyitA,56242
40
+ llama_cloud/resources/files/types/__init__.py,sha256=ZZuDQsYsxmQ9VwpfN7oqftzGRnFTR2EMYdCa7zARo4g,204
41
41
  llama_cloud/resources/files/types/file_create_from_url_resource_info_value.py,sha256=Wc8wFgujOO5pZvbbh2TMMzpa37GKZd14GYNJ9bdq7BE,214
42
- llama_cloud/resources/files/types/file_create_permission_info_value.py,sha256=KPCFuEaa8NiB85A5MfdXRAQ0poAUTl7Feg6BTfmdWas,209
43
- llama_cloud/resources/files/types/file_create_resource_info_value.py,sha256=R7Y-CJf7fnbvIqE3xOI5XOrmPwLbVJLC7zpxMu8Zopk,201
44
42
  llama_cloud/resources/jobs/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
45
43
  llama_cloud/resources/jobs/client.py,sha256=gv_N8e0lay7cjt6MCwx-Cj4FiCXKhbyCDaWbadaJpgY,6270
46
44
  llama_cloud/resources/llama_extract/__init__.py,sha256=V6VZ8hQXwAuvOOZyk43nnbINoDQqEr03AjKQPhYKluk,997
47
- llama_cloud/resources/llama_extract/client.py,sha256=wXDJy3gIiWgcQaeMXk60AWAExQLVK-s_90mnoEA5oFQ,79256
45
+ llama_cloud/resources/llama_extract/client.py,sha256=B_qhVsk-Qs81qrFOVgWqcvelSB3TLWFJCibnn--3BjE,83096
48
46
  llama_cloud/resources/llama_extract/types/__init__.py,sha256=2Iu4w5LXZY2Govr1RzahIfY0b84y658SQjMDtj7rH_0,1497
49
47
  llama_cloud/resources/llama_extract/types/extract_agent_create_data_schema.py,sha256=zB31hJQ8hKaIsPkfTWiX5hqsPVFMyyeWEDZ_Aq237jo,305
50
48
  llama_cloud/resources/llama_extract/types/extract_agent_create_data_schema_zero_value.py,sha256=xoyXH3f0Y5beMWBxmtXSz6QoB_df_-0QBsYdjBhZnGw,217
@@ -77,7 +75,7 @@ llama_cloud/resources/retrievers/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-
77
75
  llama_cloud/resources/retrievers/client.py,sha256=z2LhmA-cZVFzr9P6loeCZYnJbvSIk0QitFeVFp-IyZk,32126
78
76
  llama_cloud/resources/users/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
79
77
  llama_cloud/resources/users/client.py,sha256=A2s8e2syQHkkSwPz-Lrt_Zxp1K-8nqJqj5EafE6NWYs,5545
80
- llama_cloud/types/__init__.py,sha256=NH19mNyDXsPsrT7EpEq9A8Wp-aCinsbtf-KwLPfUrJo,32253
78
+ llama_cloud/types/__init__.py,sha256=l9f6mCL1dMOR2kl7HUdKLuHOUOXHHo6sxi2-8ZP8Tx8,33685
81
79
  llama_cloud/types/advanced_mode_transform_config.py,sha256=4xCXye0_cPmVS1F8aNTx81sIaEPjQH9kiCCAIoqUzlI,1502
82
80
  llama_cloud/types/advanced_mode_transform_config_chunking_config.py,sha256=wYbJnWLpeQDfhmDZz-wJfYzD1iGT5Jcxb9ga3mzUuvk,1983
83
81
  llama_cloud/types/advanced_mode_transform_config_segmentation_config.py,sha256=anNGq0F5-IlbIW3kpC8OilzLJnUq5tdIcWHnRnmlYsg,1303
@@ -106,12 +104,17 @@ llama_cloud/types/chat_app.py,sha256=fLuzYkXLq51C_Y23hoLwfmG-OiT7jlyHt2JGe6-f1IA
106
104
  llama_cloud/types/chat_app_response.py,sha256=WSKr1KI9_pGTSstr3I53kZ8qb3y87Q4ulh8fR0C7sSU,1784
107
105
  llama_cloud/types/chat_data.py,sha256=ZYqVtjXF6qPGajU4IWZu3InpU54TXJwBFiqxBepylP0,1197
108
106
  llama_cloud/types/chunk_mode.py,sha256=J4vqAQfQG6PWsIv1Fe_99nVsAfDbv_P81_KVsJ9AkU4,790
109
- llama_cloud/types/classification_result.py,sha256=aRuD2xfIQQUxGsW1jFA091b4SZFTnDFDrJxv3z0kP5E,1425
110
- llama_cloud/types/classify_response.py,sha256=qhw71pDfClb9karjfP2cmZHbRBZgm1i6pWUM7r7IF8o,1467
107
+ llama_cloud/types/classification_result.py,sha256=1faExxbtJLoYjy0h0Gl38Shk2idySEOenJBjQlcRpXs,1309
108
+ llama_cloud/types/classifier_rule.py,sha256=-64iBABkQ_IXN8rA77xA6L4xSsj8epTVT9Z1C7ypGx0,1533
109
+ llama_cloud/types/classify_job.py,sha256=tT1_9g_PkBqTkq7xMpsfT4nMICwXXcqyl4ldsfCHICc,1654
110
+ llama_cloud/types/classify_job_results.py,sha256=gasxmGX4D1cJuAY0z8Sm7PuZ1TIeEPYzp6VggWeelko,1279
111
+ llama_cloud/types/classify_job_with_status.py,sha256=DwJIfzuaUoLZ0KAA2dGj6DhZ9FtrUFtZRRrkH0BmekM,1798
112
+ llama_cloud/types/classify_parsing_configuration.py,sha256=FMbDCtz9gGu557WRtiKkMsKfzPWuDF-XKW8Z2DeOL9g,1270
113
+ llama_cloud/types/cloud_astra_db_vector_store.py,sha256=uvPZcMk0QISyMee0n2Z6QapCIejvibY94XWn5gmieO8,2065
111
114
  llama_cloud/types/cloud_az_storage_blob_data_source.py,sha256=NT4cYsD1M868_bSJxKM9cvTMtjQtQxKloE4vRv8_lwg,1534
112
115
  llama_cloud/types/cloud_azure_ai_search_vector_store.py,sha256=9GTaft7BaKsR9RJQp5dlpbslXUlTMA1AcDdKV1ApfqI,1513
113
116
  llama_cloud/types/cloud_box_data_source.py,sha256=9bffCaKGvctSsk9OdTpzzP__O1NDpb9wdvKY2uwjpwY,1470
114
- llama_cloud/types/cloud_confluence_data_source.py,sha256=ok8BOv51SC4Ia9kX3DC8LuZjnP8hmdy-vqzOrTZek2A,1720
117
+ llama_cloud/types/cloud_confluence_data_source.py,sha256=q-bBwkG5L2QZqZdPDlvgzdz9Fv0jz4nn2Qpea3J0xu0,2303
115
118
  llama_cloud/types/cloud_document.py,sha256=Rg_H8lcz2TzxEAIdU-m5mGpkM7s0j1Cn4JHkXYddmGs,1255
116
119
  llama_cloud/types/cloud_document_create.py,sha256=fQ1gZAtLCpr-a-sPbMez_5fK9JMU3uyp2tNvIzWNG3U,1278
117
120
  llama_cloud/types/cloud_jira_data_source.py,sha256=9R20k8Ne0Bl9X5dgSxpM_IGOFmC70Llz0pJ93rAKRvw,1458
@@ -131,13 +134,13 @@ llama_cloud/types/composite_retrieval_mode.py,sha256=PtN0vQ90xyAJL4vyGRG4lMNOpnJ
131
134
  llama_cloud/types/composite_retrieval_result.py,sha256=EulVseVvpK50kto4wQweLO7jJe6l6Ym1erKa4dOl4CU,1801
132
135
  llama_cloud/types/composite_retrieved_text_node.py,sha256=eTQ99cdZ2PASff5n4oVV1oaNiS9Ie3AtY_E55kBYpBs,1702
133
136
  llama_cloud/types/composite_retrieved_text_node_with_score.py,sha256=o-HvmyjqODc68zYuobtj10_62FMBAKRLfRoTHGDdmxw,1148
134
- llama_cloud/types/configurable_data_sink_names.py,sha256=0Yk9i8hcNXKCcSKpa5KwsCwy_EDeodqbny7qmF86_lM,1225
137
+ llama_cloud/types/configurable_data_sink_names.py,sha256=eGSnwk5yWffBBc0C3Iuh8RlynGTmRC1hqVv0JlUfbNE,1385
135
138
  llama_cloud/types/configurable_data_source_names.py,sha256=mNW71sSgcVhU3kePAOUgRxeqK1Vo7F_J1xIzmYKPRq0,1971
136
139
  llama_cloud/types/credit_type.py,sha256=nwSRKDWgHk_msdWitctqtyeZwj5EFd6VLto6NF2yCd4,971
137
140
  llama_cloud/types/data_sink.py,sha256=PeexYHHoD8WkVp9WsFtfC-AIWszcgeJUprG1bwC8WsQ,1498
138
- llama_cloud/types/data_sink_component.py,sha256=uvuxLY3MPDpv_bkT0y-tHSZVPRSHCkDBDHVff-036Dg,749
141
+ llama_cloud/types/data_sink_component.py,sha256=yNX2YbevUd6RIbaAvkB40ttU0VSx2JBF-eCuLB_Au9Y,843
139
142
  llama_cloud/types/data_sink_create.py,sha256=dAaFPCwZ5oX0Fbf7ij62dzSaYnrhj3EHmnLnYnw2KgI,1360
140
- llama_cloud/types/data_sink_create_component.py,sha256=8QfNKSTJV_sQ0nJxlpfh0fBkMTSnQD1DTJR8ZMYaesI,755
143
+ llama_cloud/types/data_sink_create_component.py,sha256=0LWeqGDeQh3cZm2h5_IrSlFoU5VKmIILaOdE1VtPtfc,849
141
144
  llama_cloud/types/data_source.py,sha256=QkJsQBlLt7cX0FxYuNF1w9yZw1BnNcGiQTTfMAuxiEM,1852
142
145
  llama_cloud/types/data_source_component.py,sha256=QBxAneOFe8crS0z-eFo3gd1siToQ4hYsLdfB4p3ZeVU,974
143
146
  llama_cloud/types/data_source_create.py,sha256=s0bAX_GUwiRdrL-PXS9ROrvq3xpmqbqzdMa6thqL2P4,1581
@@ -187,13 +190,22 @@ llama_cloud/types/extract_schema_validate_response_data_schema_value.py,sha256=l
187
190
  llama_cloud/types/extract_state.py,sha256=TNeVAXXKZaiM2srlbQlzRSn4_TDpR4xyT_yQhJUxFvk,775
188
191
  llama_cloud/types/extract_target.py,sha256=Gt-FNqblzcjdfq1hxsqEjWWu-HNLXdKy4w98nog52Ms,478
189
192
  llama_cloud/types/fail_page_mode.py,sha256=n4fgPpiEB5siPoEg0Sux4COg7ElNybjshxDoUihZwRU,786
193
+ llama_cloud/types/failure_handling_config.py,sha256=EmAQW0qm7-JTSYFwhmIWxqkVNWym_AyAJIMEmeI9Cqc,1216
190
194
  llama_cloud/types/file.py,sha256=rQXitPRKOYw91nK5qOZ0vpOmIx_MCpRb0g78d9dQs6w,1822
195
+ llama_cloud/types/file_classification.py,sha256=jKzAc_3rg0Usyf3TNr-bI5HZn9zGIj9vYH90RKoDtiY,1418
191
196
  llama_cloud/types/file_count_by_status_response.py,sha256=WuorbZvKjDs9Ql1hUiQu4gN5iCm8d6fr92KLyHpRvQU,1356
197
+ llama_cloud/types/file_create.py,sha256=eLUC50CzXOdAR_P2mBtX_R7kGteIVbP1V3LzuP1s0Xs,1629
198
+ llama_cloud/types/file_create_permission_info_value.py,sha256=KPCFuEaa8NiB85A5MfdXRAQ0poAUTl7Feg6BTfmdWas,209
199
+ llama_cloud/types/file_create_resource_info_value.py,sha256=R7Y-CJf7fnbvIqE3xOI5XOrmPwLbVJLC7zpxMu8Zopk,201
192
200
  llama_cloud/types/file_data.py,sha256=dH2SNK9ZM-ZH7uKFIfBsk8bVixM33rUr40BdZWFXLhU,1225
201
+ llama_cloud/types/file_filter.py,sha256=VMP_NxXhhyUKInwPTcGPXYO2r5Q17ilds_tXgy6jteo,1257
193
202
  llama_cloud/types/file_id_presigned_url.py,sha256=Yr_MGFKbuBEHK4efRSK53fHcoo5bbAKnqQGGhMycUc0,1398
194
203
  llama_cloud/types/file_parse_public.py,sha256=sshZ0BcjHMGpuz4ylSurv0K_3ejfPrUGGyDxBHCtdMg,1378
195
204
  llama_cloud/types/file_permission_info_value.py,sha256=RyQlNbhvIKS87Ywu7XUaw5jDToZX64M9Wqzu1U_q2Us,197
205
+ llama_cloud/types/file_query_response.py,sha256=e92h6xJoqGPM9VSDy7wnrkQpsaxrVH8YVHzRIgTTl-g,1199
196
206
  llama_cloud/types/file_resource_info_value.py,sha256=g6T6ELeLK9jgcvX6r-EuAl_4JkwnyqdS0RRoabMReSU,195
207
+ llama_cloud/types/file_store_info_response.py,sha256=YztOvESSDM52urD0gyO47RPWz-kZEjIpEYSeZYfkCLk,1195
208
+ llama_cloud/types/file_store_info_response_status.py,sha256=UiPdZDEACVuiZ6zqkLnAYJVIxa-TIVwGN6_xF9lt9Xc,778
197
209
  llama_cloud/types/filter_condition.py,sha256=YEc-NaZbMha4oZVSKerZ6-gNYriNOZmTHTRMKX-9Ju0,678
198
210
  llama_cloud/types/filter_operation.py,sha256=lzyF_LQ-bT_wubU2bSbV6q2oncCE3mypz3D6qkAR86U,1663
199
211
  llama_cloud/types/filter_operation_eq.py,sha256=7UQkjycQvUFBvd1KRWfNacXAEgp2eGG6XNej0EikP1M,165
@@ -221,6 +233,8 @@ llama_cloud/types/job_record_with_usage_metrics.py,sha256=iNV2do5TB_0e3PoOz_DJyA
221
233
  llama_cloud/types/l_lama_parse_transform_config.py,sha256=YQRJZvKh1Ee2FUyW_N0nqYJoW599qBgH3JCH9SH6YLo,1249
222
234
  llama_cloud/types/legacy_parse_job_config.py,sha256=eEPExbkUi9J7lQoY0Fuc2HK_RlhPmO30cMkfjtmmizs,12832
223
235
  llama_cloud/types/license_info_response.py,sha256=fE9vcWO8k92SBqb_wOyBu_16C61s72utA-SifEi9iBc,1192
236
+ llama_cloud/types/llama_extract_mode_availability.py,sha256=7XelUrLe9wteCeEnP_shnb485lwKo56A2EZ66bq9HQw,1257
237
+ llama_cloud/types/llama_extract_mode_availability_status.py,sha256=_ildgVCsBdqOLD__qdEjcYxqgKunXhJ_VHUeqjZJX8c,566
224
238
  llama_cloud/types/llama_extract_settings.py,sha256=YKhhyUNgqpowTdTx715Uk13GdBsxCUZLVsLi5iYQIiY,2767
225
239
  llama_cloud/types/llama_index_core_base_llms_types_chat_message.py,sha256=NelHo-T-ebVMhRKsqE_xV8AJW4c7o6lS0uEQnPsmTwg,1365
226
240
  llama_cloud/types/llama_index_core_base_llms_types_chat_message_blocks_item.py,sha256=-aL8fh-w2Xf4uQs_LHzb3q6LL_onLAcVzCR5yMI4qJw,1571
@@ -356,7 +370,7 @@ llama_cloud/types/status_enum.py,sha256=cUBIlys89E8PUzmVqqawu7qTDF0aRqBwiijOmRDP
356
370
  llama_cloud/types/struct_mode.py,sha256=ROicwjXfFmgVU8_xSVxJlnFUzRNKG5VIEF1wYg9uOPU,1020
357
371
  llama_cloud/types/struct_parse_conf.py,sha256=3QQBy8VP9JB16d4fTGK_GiU6PUALIOWCN9GYI3in6ic,2439
358
372
  llama_cloud/types/supported_llm_model.py,sha256=hubSopFICVNEegbJbtbpK6zRHwFPwUNtrw_NAw_3bfg,1380
359
- llama_cloud/types/supported_llm_model_names.py,sha256=PXL0gA1lc0GJNzZHnjOscoxHpPW787A8Adh-2egAKo8,2512
373
+ llama_cloud/types/supported_llm_model_names.py,sha256=w2FrfffSwpJflq1EoO6Kw7ViTOZNGX4hf60k0Qf3VLA,3213
360
374
  llama_cloud/types/text_block.py,sha256=X154sQkSyposXuRcEWNp_tWcDQ-AI6q_-MfJUN5exP8,958
361
375
  llama_cloud/types/text_node.py,sha256=Tq3QmuKC5cIHvC9wAtvhsXl1g2sACs2yJwQ0Uko8GSU,2846
362
376
  llama_cloud/types/text_node_relationships_value.py,sha256=qmXURTk1Xg7ZDzRSSV1uDEel0AXRLohND5ioezibHY0,217
@@ -380,7 +394,7 @@ llama_cloud/types/vertex_embedding_mode.py,sha256=yY23FjuWU_DkXjBb3JoKV4SCMqel2B
380
394
  llama_cloud/types/vertex_text_embedding.py,sha256=-C4fNCYfFl36ATdBMGFVPpiHIKxjk0KB1ERA2Ec20aU,1932
381
395
  llama_cloud/types/webhook_configuration.py,sha256=_Xm15whrWoKNBuCoO5y_NunA-ByhCAYK87LnC4W-Pzg,1350
382
396
  llama_cloud/types/webhook_configuration_webhook_events_item.py,sha256=OL3moFO_6hsKZYSBQBsSHmWA0NgLcLJgBPZfABwT60c,2544
383
- llama_cloud-0.1.35.dist-info/LICENSE,sha256=_iNqtPcw1Ue7dZKwOwgPtbegMUkWVy15hC7bffAdNmY,1067
384
- llama_cloud-0.1.35.dist-info/METADATA,sha256=-fwtdm5e3opq0GhkQGu4Pkw2RYvWew37uRjstjXmmwY,1194
385
- llama_cloud-0.1.35.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
386
- llama_cloud-0.1.35.dist-info/RECORD,,
397
+ llama_cloud-0.1.37.dist-info/LICENSE,sha256=_iNqtPcw1Ue7dZKwOwgPtbegMUkWVy15hC7bffAdNmY,1067
398
+ llama_cloud-0.1.37.dist-info/METADATA,sha256=3ZhEF_4jLcePL5FpH8KPjt1q2VOMivGivd20sul-Hps,1194
399
+ llama_cloud-0.1.37.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
400
+ llama_cloud-0.1.37.dist-info/RECORD,,