llama-cloud 0.1.3__py3-none-any.whl → 0.1.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of llama-cloud might be problematic. Click here for more details.

Files changed (38) hide show
  1. llama_cloud/__init__.py +64 -0
  2. llama_cloud/client.py +3 -0
  3. llama_cloud/resources/__init__.py +22 -1
  4. llama_cloud/resources/data_sinks/client.py +12 -6
  5. llama_cloud/resources/embedding_model_configs/__init__.py +23 -0
  6. llama_cloud/resources/embedding_model_configs/client.py +360 -0
  7. llama_cloud/resources/embedding_model_configs/types/__init__.py +23 -0
  8. llama_cloud/resources/embedding_model_configs/types/embedding_model_config_create_embedding_config.py +89 -0
  9. llama_cloud/resources/files/__init__.py +2 -2
  10. llama_cloud/resources/files/client.py +265 -34
  11. llama_cloud/resources/files/types/__init__.py +2 -1
  12. llama_cloud/resources/files/types/file_create_from_url_resource_info_value.py +7 -0
  13. llama_cloud/resources/organizations/client.py +65 -0
  14. llama_cloud/resources/parsing/client.py +157 -0
  15. llama_cloud/resources/pipelines/client.py +177 -14
  16. llama_cloud/resources/projects/client.py +71 -0
  17. llama_cloud/types/__init__.py +48 -0
  18. llama_cloud/types/base.py +29 -0
  19. llama_cloud/types/cloud_one_drive_data_source.py +1 -0
  20. llama_cloud/types/cloud_postgres_vector_store.py +1 -1
  21. llama_cloud/types/cloud_sharepoint_data_source.py +1 -0
  22. llama_cloud/types/embedding_model_config.py +43 -0
  23. llama_cloud/types/embedding_model_config_embedding_config.py +89 -0
  24. llama_cloud/types/embedding_model_config_update.py +33 -0
  25. llama_cloud/types/embedding_model_config_update_embedding_config.py +89 -0
  26. llama_cloud/types/interval_usage_and_plan.py +36 -0
  27. llama_cloud/types/llama_parse_parameters.py +10 -0
  28. llama_cloud/types/markdown_node_parser.py +2 -1
  29. llama_cloud/types/paginated_list_pipeline_files_response.py +35 -0
  30. llama_cloud/types/pipeline.py +1 -0
  31. llama_cloud/types/pipeline_create.py +1 -0
  32. llama_cloud/types/pipeline_file.py +1 -0
  33. llama_cloud/types/plan.py +40 -0
  34. llama_cloud/types/usage.py +41 -0
  35. {llama_cloud-0.1.3.dist-info → llama_cloud-0.1.5.dist-info}/METADATA +1 -2
  36. {llama_cloud-0.1.3.dist-info → llama_cloud-0.1.5.dist-info}/RECORD +38 -25
  37. {llama_cloud-0.1.3.dist-info → llama_cloud-0.1.5.dist-info}/WHEEL +1 -1
  38. {llama_cloud-0.1.3.dist-info → llama_cloud-0.1.5.dist-info}/LICENSE +0 -0
@@ -0,0 +1,43 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+ from .embedding_model_config_embedding_config import EmbeddingModelConfigEmbeddingConfig
8
+
9
+ try:
10
+ import pydantic
11
+ if pydantic.__version__.startswith("1."):
12
+ raise ImportError
13
+ import pydantic.v1 as pydantic # type: ignore
14
+ except ImportError:
15
+ import pydantic # type: ignore
16
+
17
+
18
+ class EmbeddingModelConfig(pydantic.BaseModel):
19
+ """
20
+ Schema for an embedding model config.
21
+ """
22
+
23
+ id: str = pydantic.Field(description="Unique identifier")
24
+ created_at: typing.Optional[dt.datetime]
25
+ updated_at: typing.Optional[dt.datetime]
26
+ name: str = pydantic.Field(description="The name of the embedding model config.")
27
+ embedding_config: EmbeddingModelConfigEmbeddingConfig = pydantic.Field(
28
+ description="The embedding configuration for the embedding model config."
29
+ )
30
+ project_id: str
31
+
32
+ def json(self, **kwargs: typing.Any) -> str:
33
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
34
+ return super().json(**kwargs_with_defaults)
35
+
36
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
37
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
38
+ return super().dict(**kwargs_with_defaults)
39
+
40
+ class Config:
41
+ frozen = True
42
+ smart_union = True
43
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -0,0 +1,89 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ from __future__ import annotations
4
+
5
+ import typing
6
+
7
+ import typing_extensions
8
+
9
+ from .azure_open_ai_embedding_config import AzureOpenAiEmbeddingConfig
10
+ from .bedrock_embedding_config import BedrockEmbeddingConfig
11
+ from .cohere_embedding_config import CohereEmbeddingConfig
12
+ from .gemini_embedding_config import GeminiEmbeddingConfig
13
+ from .hugging_face_inference_api_embedding_config import HuggingFaceInferenceApiEmbeddingConfig
14
+ from .open_ai_embedding_config import OpenAiEmbeddingConfig
15
+ from .vertex_ai_embedding_config import VertexAiEmbeddingConfig
16
+
17
+
18
+ class EmbeddingModelConfigEmbeddingConfig_AzureEmbedding(AzureOpenAiEmbeddingConfig):
19
+ type: typing_extensions.Literal["AZURE_EMBEDDING"]
20
+
21
+ class Config:
22
+ frozen = True
23
+ smart_union = True
24
+ allow_population_by_field_name = True
25
+
26
+
27
+ class EmbeddingModelConfigEmbeddingConfig_BedrockEmbedding(BedrockEmbeddingConfig):
28
+ type: typing_extensions.Literal["BEDROCK_EMBEDDING"]
29
+
30
+ class Config:
31
+ frozen = True
32
+ smart_union = True
33
+ allow_population_by_field_name = True
34
+
35
+
36
+ class EmbeddingModelConfigEmbeddingConfig_CohereEmbedding(CohereEmbeddingConfig):
37
+ type: typing_extensions.Literal["COHERE_EMBEDDING"]
38
+
39
+ class Config:
40
+ frozen = True
41
+ smart_union = True
42
+ allow_population_by_field_name = True
43
+
44
+
45
+ class EmbeddingModelConfigEmbeddingConfig_GeminiEmbedding(GeminiEmbeddingConfig):
46
+ type: typing_extensions.Literal["GEMINI_EMBEDDING"]
47
+
48
+ class Config:
49
+ frozen = True
50
+ smart_union = True
51
+ allow_population_by_field_name = True
52
+
53
+
54
+ class EmbeddingModelConfigEmbeddingConfig_HuggingfaceApiEmbedding(HuggingFaceInferenceApiEmbeddingConfig):
55
+ type: typing_extensions.Literal["HUGGINGFACE_API_EMBEDDING"]
56
+
57
+ class Config:
58
+ frozen = True
59
+ smart_union = True
60
+ allow_population_by_field_name = True
61
+
62
+
63
+ class EmbeddingModelConfigEmbeddingConfig_OpenaiEmbedding(OpenAiEmbeddingConfig):
64
+ type: typing_extensions.Literal["OPENAI_EMBEDDING"]
65
+
66
+ class Config:
67
+ frozen = True
68
+ smart_union = True
69
+ allow_population_by_field_name = True
70
+
71
+
72
+ class EmbeddingModelConfigEmbeddingConfig_VertexaiEmbedding(VertexAiEmbeddingConfig):
73
+ type: typing_extensions.Literal["VERTEXAI_EMBEDDING"]
74
+
75
+ class Config:
76
+ frozen = True
77
+ smart_union = True
78
+ allow_population_by_field_name = True
79
+
80
+
81
+ EmbeddingModelConfigEmbeddingConfig = typing.Union[
82
+ EmbeddingModelConfigEmbeddingConfig_AzureEmbedding,
83
+ EmbeddingModelConfigEmbeddingConfig_BedrockEmbedding,
84
+ EmbeddingModelConfigEmbeddingConfig_CohereEmbedding,
85
+ EmbeddingModelConfigEmbeddingConfig_GeminiEmbedding,
86
+ EmbeddingModelConfigEmbeddingConfig_HuggingfaceApiEmbedding,
87
+ EmbeddingModelConfigEmbeddingConfig_OpenaiEmbedding,
88
+ EmbeddingModelConfigEmbeddingConfig_VertexaiEmbedding,
89
+ ]
@@ -0,0 +1,33 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+ from .embedding_model_config_update_embedding_config import EmbeddingModelConfigUpdateEmbeddingConfig
8
+
9
+ try:
10
+ import pydantic
11
+ if pydantic.__version__.startswith("1."):
12
+ raise ImportError
13
+ import pydantic.v1 as pydantic # type: ignore
14
+ except ImportError:
15
+ import pydantic # type: ignore
16
+
17
+
18
+ class EmbeddingModelConfigUpdate(pydantic.BaseModel):
19
+ name: typing.Optional[str]
20
+ embedding_config: typing.Optional[EmbeddingModelConfigUpdateEmbeddingConfig]
21
+
22
+ def json(self, **kwargs: typing.Any) -> str:
23
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
24
+ return super().json(**kwargs_with_defaults)
25
+
26
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
27
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
28
+ return super().dict(**kwargs_with_defaults)
29
+
30
+ class Config:
31
+ frozen = True
32
+ smart_union = True
33
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -0,0 +1,89 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ from __future__ import annotations
4
+
5
+ import typing
6
+
7
+ import typing_extensions
8
+
9
+ from .azure_open_ai_embedding_config import AzureOpenAiEmbeddingConfig
10
+ from .bedrock_embedding_config import BedrockEmbeddingConfig
11
+ from .cohere_embedding_config import CohereEmbeddingConfig
12
+ from .gemini_embedding_config import GeminiEmbeddingConfig
13
+ from .hugging_face_inference_api_embedding_config import HuggingFaceInferenceApiEmbeddingConfig
14
+ from .open_ai_embedding_config import OpenAiEmbeddingConfig
15
+ from .vertex_ai_embedding_config import VertexAiEmbeddingConfig
16
+
17
+
18
+ class EmbeddingModelConfigUpdateEmbeddingConfig_AzureEmbedding(AzureOpenAiEmbeddingConfig):
19
+ type: typing_extensions.Literal["AZURE_EMBEDDING"]
20
+
21
+ class Config:
22
+ frozen = True
23
+ smart_union = True
24
+ allow_population_by_field_name = True
25
+
26
+
27
+ class EmbeddingModelConfigUpdateEmbeddingConfig_BedrockEmbedding(BedrockEmbeddingConfig):
28
+ type: typing_extensions.Literal["BEDROCK_EMBEDDING"]
29
+
30
+ class Config:
31
+ frozen = True
32
+ smart_union = True
33
+ allow_population_by_field_name = True
34
+
35
+
36
+ class EmbeddingModelConfigUpdateEmbeddingConfig_CohereEmbedding(CohereEmbeddingConfig):
37
+ type: typing_extensions.Literal["COHERE_EMBEDDING"]
38
+
39
+ class Config:
40
+ frozen = True
41
+ smart_union = True
42
+ allow_population_by_field_name = True
43
+
44
+
45
+ class EmbeddingModelConfigUpdateEmbeddingConfig_GeminiEmbedding(GeminiEmbeddingConfig):
46
+ type: typing_extensions.Literal["GEMINI_EMBEDDING"]
47
+
48
+ class Config:
49
+ frozen = True
50
+ smart_union = True
51
+ allow_population_by_field_name = True
52
+
53
+
54
+ class EmbeddingModelConfigUpdateEmbeddingConfig_HuggingfaceApiEmbedding(HuggingFaceInferenceApiEmbeddingConfig):
55
+ type: typing_extensions.Literal["HUGGINGFACE_API_EMBEDDING"]
56
+
57
+ class Config:
58
+ frozen = True
59
+ smart_union = True
60
+ allow_population_by_field_name = True
61
+
62
+
63
+ class EmbeddingModelConfigUpdateEmbeddingConfig_OpenaiEmbedding(OpenAiEmbeddingConfig):
64
+ type: typing_extensions.Literal["OPENAI_EMBEDDING"]
65
+
66
+ class Config:
67
+ frozen = True
68
+ smart_union = True
69
+ allow_population_by_field_name = True
70
+
71
+
72
+ class EmbeddingModelConfigUpdateEmbeddingConfig_VertexaiEmbedding(VertexAiEmbeddingConfig):
73
+ type: typing_extensions.Literal["VERTEXAI_EMBEDDING"]
74
+
75
+ class Config:
76
+ frozen = True
77
+ smart_union = True
78
+ allow_population_by_field_name = True
79
+
80
+
81
+ EmbeddingModelConfigUpdateEmbeddingConfig = typing.Union[
82
+ EmbeddingModelConfigUpdateEmbeddingConfig_AzureEmbedding,
83
+ EmbeddingModelConfigUpdateEmbeddingConfig_BedrockEmbedding,
84
+ EmbeddingModelConfigUpdateEmbeddingConfig_CohereEmbedding,
85
+ EmbeddingModelConfigUpdateEmbeddingConfig_GeminiEmbedding,
86
+ EmbeddingModelConfigUpdateEmbeddingConfig_HuggingfaceApiEmbedding,
87
+ EmbeddingModelConfigUpdateEmbeddingConfig_OpenaiEmbedding,
88
+ EmbeddingModelConfigUpdateEmbeddingConfig_VertexaiEmbedding,
89
+ ]
@@ -0,0 +1,36 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+ from .plan import Plan
8
+ from .usage import Usage
9
+
10
+ try:
11
+ import pydantic
12
+ if pydantic.__version__.startswith("1."):
13
+ raise ImportError
14
+ import pydantic.v1 as pydantic # type: ignore
15
+ except ImportError:
16
+ import pydantic # type: ignore
17
+
18
+
19
+ class IntervalUsageAndPlan(pydantic.BaseModel):
20
+ start_window: typing.Optional[dt.datetime]
21
+ end_window: typing.Optional[dt.datetime]
22
+ plan: typing.Optional[Plan]
23
+ usage: typing.Optional[Usage]
24
+
25
+ def json(self, **kwargs: typing.Any) -> str:
26
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
27
+ return super().json(**kwargs_with_defaults)
28
+
29
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
30
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
31
+ return super().dict(**kwargs_with_defaults)
32
+
33
+ class Config:
34
+ frozen = True
35
+ smart_union = True
36
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -25,6 +25,7 @@ class LlamaParseParameters(pydantic.BaseModel):
25
25
  disable_ocr: typing.Optional[bool]
26
26
  annotate_links: typing.Optional[bool]
27
27
  disable_reconstruction: typing.Optional[bool]
28
+ disable_image_extraction: typing.Optional[bool]
28
29
  invalidate_cache: typing.Optional[bool]
29
30
  do_not_cache: typing.Optional[bool]
30
31
  fast_mode: typing.Optional[bool]
@@ -32,6 +33,7 @@ class LlamaParseParameters(pydantic.BaseModel):
32
33
  gpt_4_o_mode: typing.Optional[bool] = pydantic.Field(alias="gpt4o_mode")
33
34
  gpt_4_o_api_key: typing.Optional[str] = pydantic.Field(alias="gpt4o_api_key")
34
35
  do_not_unroll_columns: typing.Optional[bool]
36
+ guess_xlsx_sheet_name: typing.Optional[bool]
35
37
  page_separator: typing.Optional[str]
36
38
  bounding_box: typing.Optional[str]
37
39
  target_pages: typing.Optional[str]
@@ -47,10 +49,18 @@ class LlamaParseParameters(pydantic.BaseModel):
47
49
  continuous_mode: typing.Optional[bool]
48
50
  s_3_input_path: typing.Optional[str] = pydantic.Field(alias="s3_input_path")
49
51
  s_3_output_path_prefix: typing.Optional[str] = pydantic.Field(alias="s3_output_path_prefix")
52
+ project_id: typing.Optional[str]
50
53
  azure_openai_deployment_name: typing.Optional[str]
51
54
  azure_openai_endpoint: typing.Optional[str]
52
55
  azure_openai_api_version: typing.Optional[str]
53
56
  azure_openai_key: typing.Optional[str]
57
+ input_url: typing.Optional[str]
58
+ http_proxy: typing.Optional[str]
59
+ auto_mode: typing.Optional[bool]
60
+ auto_mode_trigger_on_regexp_in_page: typing.Optional[str]
61
+ auto_mode_trigger_on_text_in_page: typing.Optional[str]
62
+ auto_mode_trigger_on_table_in_page: typing.Optional[bool]
63
+ auto_mode_trigger_on_image_in_page: typing.Optional[bool]
54
64
 
55
65
  def json(self, **kwargs: typing.Any) -> str:
56
66
  kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
@@ -18,7 +18,8 @@ class MarkdownNodeParser(pydantic.BaseModel):
18
18
  """
19
19
  Markdown node parser.
20
20
 
21
- Splits a document into Nodes using custom Markdown splitting logic.
21
+ Splits a document into Nodes using Markdown header-based splitting logic.
22
+ Each node contains its text content and the path of headers leading to it.
22
23
 
23
24
  Args:
24
25
  include_metadata (bool): whether to include metadata in nodes
@@ -0,0 +1,35 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+ from .pipeline_file import PipelineFile
8
+
9
+ try:
10
+ import pydantic
11
+ if pydantic.__version__.startswith("1."):
12
+ raise ImportError
13
+ import pydantic.v1 as pydantic # type: ignore
14
+ except ImportError:
15
+ import pydantic # type: ignore
16
+
17
+
18
+ class PaginatedListPipelineFilesResponse(pydantic.BaseModel):
19
+ files: typing.List[PipelineFile] = pydantic.Field(description="The files to list")
20
+ limit: int = pydantic.Field(description="The limit of the files")
21
+ offset: int = pydantic.Field(description="The offset of the files")
22
+ total_count: int = pydantic.Field(description="The total number of files")
23
+
24
+ def json(self, **kwargs: typing.Any) -> str:
25
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
26
+ return super().json(**kwargs_with_defaults)
27
+
28
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
29
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
30
+ return super().dict(**kwargs_with_defaults)
31
+
32
+ class Config:
33
+ frozen = True
34
+ smart_union = True
35
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -33,6 +33,7 @@ class Pipeline(pydantic.BaseModel):
33
33
  updated_at: typing.Optional[dt.datetime]
34
34
  name: str
35
35
  project_id: str
36
+ embedding_model_config_id: typing.Optional[str]
36
37
  pipeline_type: typing.Optional[PipelineType] = pydantic.Field(
37
38
  description="Type of pipeline. Either PLAYGROUND or MANAGED."
38
39
  )
@@ -33,6 +33,7 @@ class PipelineCreate(pydantic.BaseModel):
33
33
  )
34
34
  configured_transformations: typing.Optional[typing.List[ConfiguredTransformationItem]]
35
35
  data_sink_id: typing.Optional[str]
36
+ embedding_model_config_id: typing.Optional[str]
36
37
  data_sink: typing.Optional[DataSinkCreate]
37
38
  preset_retrieval_parameters: typing.Optional[PresetRetrievalParams] = pydantic.Field(
38
39
  description="Preset retrieval parameters for the pipeline."
@@ -36,6 +36,7 @@ class PipelineFile(pydantic.BaseModel):
36
36
  pipeline_id: str = pydantic.Field(description="The ID of the pipeline that the file is associated with")
37
37
  custom_metadata: typing.Optional[typing.Dict[str, typing.Optional[PipelineFileCustomMetadataValue]]]
38
38
  config_hash: typing.Optional[typing.Dict[str, typing.Optional[PipelineFileConfigHashValue]]]
39
+ indexed_page_count: typing.Optional[int]
39
40
 
40
41
  def json(self, **kwargs: typing.Any) -> str:
41
42
  kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
@@ -0,0 +1,40 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+
8
+ try:
9
+ import pydantic
10
+ if pydantic.__version__.startswith("1."):
11
+ raise ImportError
12
+ import pydantic.v1 as pydantic # type: ignore
13
+ except ImportError:
14
+ import pydantic # type: ignore
15
+
16
+
17
+ class Plan(pydantic.BaseModel):
18
+ total_users: typing.Optional[int]
19
+ total_indexes: typing.Optional[int]
20
+ total_indexed_pages: typing.Optional[int]
21
+ credits: typing.Optional[int]
22
+ has_payment_method: typing.Optional[bool]
23
+ free: typing.Optional[bool] = pydantic.Field(description="If is a free plan")
24
+ allowed_index: typing.Optional[bool] = pydantic.Field(description="If is allowed to use indexes")
25
+ allowed_external_index: typing.Optional[bool] = pydantic.Field(
26
+ description="If is allowed to use external data sources or sinks in indexes"
27
+ )
28
+
29
+ def json(self, **kwargs: typing.Any) -> str:
30
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
31
+ return super().json(**kwargs_with_defaults)
32
+
33
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
34
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
35
+ return super().dict(**kwargs_with_defaults)
36
+
37
+ class Config:
38
+ frozen = True
39
+ smart_union = True
40
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -0,0 +1,41 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+
8
+ try:
9
+ import pydantic
10
+ if pydantic.__version__.startswith("1."):
11
+ raise ImportError
12
+ import pydantic.v1 as pydantic # type: ignore
13
+ except ImportError:
14
+ import pydantic # type: ignore
15
+
16
+
17
+ class Usage(pydantic.BaseModel):
18
+ """
19
+ Response model; use UsageSubmission for tracking
20
+ """
21
+
22
+ total_users: typing.Optional[int]
23
+ total_indexes: typing.Optional[int]
24
+ total_indexed_pages: typing.Optional[int]
25
+ extract_pages: typing.Optional[int]
26
+ parse_pages: typing.Optional[int]
27
+ index_pages: typing.Optional[int]
28
+ credits: typing.Optional[int]
29
+
30
+ def json(self, **kwargs: typing.Any) -> str:
31
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
32
+ return super().json(**kwargs_with_defaults)
33
+
34
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
35
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
36
+ return super().dict(**kwargs_with_defaults)
37
+
38
+ class Config:
39
+ frozen = True
40
+ smart_union = True
41
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: llama-cloud
3
- Version: 0.1.3
3
+ Version: 0.1.5
4
4
  Summary:
5
5
  License: MIT
6
6
  Author: Logan Markewich
@@ -12,7 +12,6 @@ Classifier: Programming Language :: Python :: 3.8
12
12
  Classifier: Programming Language :: Python :: 3.9
13
13
  Classifier: Programming Language :: Python :: 3.10
14
14
  Classifier: Programming Language :: Python :: 3.11
15
- Classifier: Programming Language :: Python :: 3.12
16
15
  Requires-Dist: httpx (>=0.20.0)
17
16
  Requires-Dist: pydantic (>=1.10)
18
17
  Description-Content-Type: text/markdown