llama-cloud 0.1.33__py3-none-any.whl → 0.1.35__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of llama-cloud might be problematic. Click here for more details.

Files changed (45) hide show
  1. llama_cloud/__init__.py +36 -0
  2. llama_cloud/client.py +3 -0
  3. llama_cloud/resources/__init__.py +6 -0
  4. llama_cloud/resources/beta/client.py +211 -8
  5. llama_cloud/resources/files/client.py +226 -0
  6. llama_cloud/resources/llama_extract/__init__.py +4 -0
  7. llama_cloud/resources/llama_extract/client.py +179 -0
  8. llama_cloud/resources/llama_extract/types/__init__.py +4 -0
  9. llama_cloud/resources/llama_extract/types/extract_stateless_request_data_schema.py +9 -0
  10. llama_cloud/resources/llama_extract/types/extract_stateless_request_data_schema_zero_value.py +7 -0
  11. llama_cloud/resources/organizations/client.py +10 -6
  12. llama_cloud/resources/parsing/client.py +24 -0
  13. llama_cloud/resources/users/__init__.py +2 -0
  14. llama_cloud/resources/users/client.py +155 -0
  15. llama_cloud/types/__init__.py +30 -0
  16. llama_cloud/types/data_source.py +2 -1
  17. llama_cloud/types/data_source_reader_version_metadata.py +32 -0
  18. llama_cloud/types/data_source_reader_version_metadata_reader_version.py +17 -0
  19. llama_cloud/types/extract_agent.py +3 -0
  20. llama_cloud/types/extract_config.py +7 -0
  21. llama_cloud/types/file_data.py +36 -0
  22. llama_cloud/types/legacy_parse_job_config.py +3 -0
  23. llama_cloud/types/llama_extract_settings.py +4 -0
  24. llama_cloud/types/llama_parse_parameters.py +3 -0
  25. llama_cloud/types/managed_open_ai_embedding.py +36 -0
  26. llama_cloud/types/managed_open_ai_embedding_config.py +34 -0
  27. llama_cloud/types/multimodal_parse_resolution.py +17 -0
  28. llama_cloud/types/paginated_response_quota_configuration.py +36 -0
  29. llama_cloud/types/parse_job_config.py +3 -0
  30. llama_cloud/types/pipeline_data_source.py +2 -1
  31. llama_cloud/types/pipeline_embedding_config.py +11 -0
  32. llama_cloud/types/quota_configuration.py +53 -0
  33. llama_cloud/types/quota_configuration_configuration_type.py +33 -0
  34. llama_cloud/types/quota_configuration_status.py +21 -0
  35. llama_cloud/types/quota_rate_limit_configuration_value.py +38 -0
  36. llama_cloud/types/quota_rate_limit_configuration_value_denominator_units.py +29 -0
  37. llama_cloud/types/struct_parse_conf.py +3 -0
  38. llama_cloud/types/update_user_response.py +33 -0
  39. llama_cloud/types/usage_response_active_alerts_item.py +4 -0
  40. llama_cloud/types/user_summary.py +38 -0
  41. llama_cloud/types/webhook_configuration_webhook_events_item.py +20 -0
  42. {llama_cloud-0.1.33.dist-info → llama_cloud-0.1.35.dist-info}/METADATA +1 -1
  43. {llama_cloud-0.1.33.dist-info → llama_cloud-0.1.35.dist-info}/RECORD +45 -27
  44. {llama_cloud-0.1.33.dist-info → llama_cloud-0.1.35.dist-info}/LICENSE +0 -0
  45. {llama_cloud-0.1.33.dist-info → llama_cloud-0.1.35.dist-info}/WHEEL +0 -0
@@ -118,6 +118,7 @@ class ParsingClient:
118
118
  output_s_3_region: str,
119
119
  target_pages: str,
120
120
  webhook_url: str,
121
+ webhook_configurations: str,
121
122
  job_timeout_in_seconds: float,
122
123
  job_timeout_extra_time_per_page_in_seconds: float,
123
124
  ) -> ParsingJob:
@@ -151,6 +152,8 @@ class ParsingClient:
151
152
 
152
153
  - webhook_url: str.
153
154
 
155
+ - webhook_configurations: str.
156
+
154
157
  - job_timeout_in_seconds: float.
155
158
 
156
159
  - job_timeout_extra_time_per_page_in_seconds: float.
@@ -166,6 +169,7 @@ class ParsingClient:
166
169
  "output_s3_region": output_s_3_region,
167
170
  "target_pages": target_pages,
168
171
  "webhook_url": webhook_url,
172
+ "webhook_configurations": webhook_configurations,
169
173
  "job_timeout_in_seconds": job_timeout_in_seconds,
170
174
  "job_timeout_extra_time_per_page_in_seconds": job_timeout_extra_time_per_page_in_seconds,
171
175
  }
@@ -242,6 +246,7 @@ class ParsingClient:
242
246
  page_separator: str,
243
247
  page_suffix: str,
244
248
  preserve_layout_alignment_across_pages: bool,
249
+ preserve_very_small_text: bool,
245
250
  skip_diagonal_text: bool,
246
251
  spreadsheet_extract_sub_tables: bool,
247
252
  structured_output: bool,
@@ -253,6 +258,7 @@ class ParsingClient:
253
258
  vendor_multimodal_model_name: str,
254
259
  model: str,
255
260
  webhook_url: str,
261
+ webhook_configurations: str,
256
262
  preset: str,
257
263
  parse_mode: typing.Optional[ParsingMode] = OMIT,
258
264
  page_error_tolerance: float,
@@ -389,6 +395,8 @@ class ParsingClient:
389
395
 
390
396
  - preserve_layout_alignment_across_pages: bool.
391
397
 
398
+ - preserve_very_small_text: bool.
399
+
392
400
  - skip_diagonal_text: bool.
393
401
 
394
402
  - spreadsheet_extract_sub_tables: bool.
@@ -411,6 +419,8 @@ class ParsingClient:
411
419
 
412
420
  - webhook_url: str.
413
421
 
422
+ - webhook_configurations: str.
423
+
414
424
  - preset: str.
415
425
 
416
426
  - parse_mode: typing.Optional[ParsingMode].
@@ -530,6 +540,7 @@ class ParsingClient:
530
540
  "page_separator": page_separator,
531
541
  "page_suffix": page_suffix,
532
542
  "preserve_layout_alignment_across_pages": preserve_layout_alignment_across_pages,
543
+ "preserve_very_small_text": preserve_very_small_text,
533
544
  "skip_diagonal_text": skip_diagonal_text,
534
545
  "spreadsheet_extract_sub_tables": spreadsheet_extract_sub_tables,
535
546
  "structured_output": structured_output,
@@ -541,6 +552,7 @@ class ParsingClient:
541
552
  "vendor_multimodal_model_name": vendor_multimodal_model_name,
542
553
  "model": model,
543
554
  "webhook_url": webhook_url,
555
+ "webhook_configurations": webhook_configurations,
544
556
  "preset": preset,
545
557
  "page_error_tolerance": page_error_tolerance,
546
558
  "replace_failed_page_with_error_message_prefix": replace_failed_page_with_error_message_prefix,
@@ -1278,6 +1290,7 @@ class AsyncParsingClient:
1278
1290
  output_s_3_region: str,
1279
1291
  target_pages: str,
1280
1292
  webhook_url: str,
1293
+ webhook_configurations: str,
1281
1294
  job_timeout_in_seconds: float,
1282
1295
  job_timeout_extra_time_per_page_in_seconds: float,
1283
1296
  ) -> ParsingJob:
@@ -1311,6 +1324,8 @@ class AsyncParsingClient:
1311
1324
 
1312
1325
  - webhook_url: str.
1313
1326
 
1327
+ - webhook_configurations: str.
1328
+
1314
1329
  - job_timeout_in_seconds: float.
1315
1330
 
1316
1331
  - job_timeout_extra_time_per_page_in_seconds: float.
@@ -1326,6 +1341,7 @@ class AsyncParsingClient:
1326
1341
  "output_s3_region": output_s_3_region,
1327
1342
  "target_pages": target_pages,
1328
1343
  "webhook_url": webhook_url,
1344
+ "webhook_configurations": webhook_configurations,
1329
1345
  "job_timeout_in_seconds": job_timeout_in_seconds,
1330
1346
  "job_timeout_extra_time_per_page_in_seconds": job_timeout_extra_time_per_page_in_seconds,
1331
1347
  }
@@ -1402,6 +1418,7 @@ class AsyncParsingClient:
1402
1418
  page_separator: str,
1403
1419
  page_suffix: str,
1404
1420
  preserve_layout_alignment_across_pages: bool,
1421
+ preserve_very_small_text: bool,
1405
1422
  skip_diagonal_text: bool,
1406
1423
  spreadsheet_extract_sub_tables: bool,
1407
1424
  structured_output: bool,
@@ -1413,6 +1430,7 @@ class AsyncParsingClient:
1413
1430
  vendor_multimodal_model_name: str,
1414
1431
  model: str,
1415
1432
  webhook_url: str,
1433
+ webhook_configurations: str,
1416
1434
  preset: str,
1417
1435
  parse_mode: typing.Optional[ParsingMode] = OMIT,
1418
1436
  page_error_tolerance: float,
@@ -1549,6 +1567,8 @@ class AsyncParsingClient:
1549
1567
 
1550
1568
  - preserve_layout_alignment_across_pages: bool.
1551
1569
 
1570
+ - preserve_very_small_text: bool.
1571
+
1552
1572
  - skip_diagonal_text: bool.
1553
1573
 
1554
1574
  - spreadsheet_extract_sub_tables: bool.
@@ -1571,6 +1591,8 @@ class AsyncParsingClient:
1571
1591
 
1572
1592
  - webhook_url: str.
1573
1593
 
1594
+ - webhook_configurations: str.
1595
+
1574
1596
  - preset: str.
1575
1597
 
1576
1598
  - parse_mode: typing.Optional[ParsingMode].
@@ -1690,6 +1712,7 @@ class AsyncParsingClient:
1690
1712
  "page_separator": page_separator,
1691
1713
  "page_suffix": page_suffix,
1692
1714
  "preserve_layout_alignment_across_pages": preserve_layout_alignment_across_pages,
1715
+ "preserve_very_small_text": preserve_very_small_text,
1693
1716
  "skip_diagonal_text": skip_diagonal_text,
1694
1717
  "spreadsheet_extract_sub_tables": spreadsheet_extract_sub_tables,
1695
1718
  "structured_output": structured_output,
@@ -1701,6 +1724,7 @@ class AsyncParsingClient:
1701
1724
  "vendor_multimodal_model_name": vendor_multimodal_model_name,
1702
1725
  "model": model,
1703
1726
  "webhook_url": webhook_url,
1727
+ "webhook_configurations": webhook_configurations,
1704
1728
  "preset": preset,
1705
1729
  "page_error_tolerance": page_error_tolerance,
1706
1730
  "replace_failed_page_with_error_message_prefix": replace_failed_page_with_error_message_prefix,
@@ -0,0 +1,2 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
@@ -0,0 +1,155 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+ import urllib.parse
5
+ from json.decoder import JSONDecodeError
6
+
7
+ from ...core.api_error import ApiError
8
+ from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper
9
+ from ...core.jsonable_encoder import jsonable_encoder
10
+ from ...errors.unprocessable_entity_error import UnprocessableEntityError
11
+ from ...types.http_validation_error import HttpValidationError
12
+ from ...types.update_user_response import UpdateUserResponse
13
+
14
+ try:
15
+ import pydantic
16
+ if pydantic.__version__.startswith("1."):
17
+ raise ImportError
18
+ import pydantic.v1 as pydantic # type: ignore
19
+ except ImportError:
20
+ import pydantic # type: ignore
21
+
22
+ # this is used as the default value for optional parameters
23
+ OMIT = typing.cast(typing.Any, ...)
24
+
25
+
26
+ class UsersClient:
27
+ def __init__(self, *, client_wrapper: SyncClientWrapper):
28
+ self._client_wrapper = client_wrapper
29
+
30
+ def update_user(
31
+ self,
32
+ user_id: str,
33
+ *,
34
+ first_name: typing.Optional[str] = OMIT,
35
+ last_name: typing.Optional[str] = OMIT,
36
+ email: typing.Optional[str] = OMIT,
37
+ current_password: typing.Optional[str] = OMIT,
38
+ new_password: typing.Optional[str] = OMIT,
39
+ ) -> UpdateUserResponse:
40
+ """
41
+ Parameters:
42
+ - user_id: str.
43
+
44
+ - first_name: typing.Optional[str].
45
+
46
+ - last_name: typing.Optional[str].
47
+
48
+ - email: typing.Optional[str].
49
+
50
+ - current_password: typing.Optional[str].
51
+
52
+ - new_password: typing.Optional[str].
53
+ ---
54
+ from llama_cloud.client import LlamaCloud
55
+
56
+ client = LlamaCloud(
57
+ token="YOUR_TOKEN",
58
+ )
59
+ client.users.update_user(
60
+ user_id="string",
61
+ )
62
+ """
63
+ _request: typing.Dict[str, typing.Any] = {}
64
+ if first_name is not OMIT:
65
+ _request["first_name"] = first_name
66
+ if last_name is not OMIT:
67
+ _request["last_name"] = last_name
68
+ if email is not OMIT:
69
+ _request["email"] = email
70
+ if current_password is not OMIT:
71
+ _request["current_password"] = current_password
72
+ if new_password is not OMIT:
73
+ _request["new_password"] = new_password
74
+ _response = self._client_wrapper.httpx_client.request(
75
+ "PUT",
76
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/users/{user_id}"),
77
+ json=jsonable_encoder(_request),
78
+ headers=self._client_wrapper.get_headers(),
79
+ timeout=60,
80
+ )
81
+ if 200 <= _response.status_code < 300:
82
+ return pydantic.parse_obj_as(UpdateUserResponse, _response.json()) # type: ignore
83
+ if _response.status_code == 422:
84
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
85
+ try:
86
+ _response_json = _response.json()
87
+ except JSONDecodeError:
88
+ raise ApiError(status_code=_response.status_code, body=_response.text)
89
+ raise ApiError(status_code=_response.status_code, body=_response_json)
90
+
91
+
92
+ class AsyncUsersClient:
93
+ def __init__(self, *, client_wrapper: AsyncClientWrapper):
94
+ self._client_wrapper = client_wrapper
95
+
96
+ async def update_user(
97
+ self,
98
+ user_id: str,
99
+ *,
100
+ first_name: typing.Optional[str] = OMIT,
101
+ last_name: typing.Optional[str] = OMIT,
102
+ email: typing.Optional[str] = OMIT,
103
+ current_password: typing.Optional[str] = OMIT,
104
+ new_password: typing.Optional[str] = OMIT,
105
+ ) -> UpdateUserResponse:
106
+ """
107
+ Parameters:
108
+ - user_id: str.
109
+
110
+ - first_name: typing.Optional[str].
111
+
112
+ - last_name: typing.Optional[str].
113
+
114
+ - email: typing.Optional[str].
115
+
116
+ - current_password: typing.Optional[str].
117
+
118
+ - new_password: typing.Optional[str].
119
+ ---
120
+ from llama_cloud.client import AsyncLlamaCloud
121
+
122
+ client = AsyncLlamaCloud(
123
+ token="YOUR_TOKEN",
124
+ )
125
+ await client.users.update_user(
126
+ user_id="string",
127
+ )
128
+ """
129
+ _request: typing.Dict[str, typing.Any] = {}
130
+ if first_name is not OMIT:
131
+ _request["first_name"] = first_name
132
+ if last_name is not OMIT:
133
+ _request["last_name"] = last_name
134
+ if email is not OMIT:
135
+ _request["email"] = email
136
+ if current_password is not OMIT:
137
+ _request["current_password"] = current_password
138
+ if new_password is not OMIT:
139
+ _request["new_password"] = new_password
140
+ _response = await self._client_wrapper.httpx_client.request(
141
+ "PUT",
142
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/users/{user_id}"),
143
+ json=jsonable_encoder(_request),
144
+ headers=self._client_wrapper.get_headers(),
145
+ timeout=60,
146
+ )
147
+ if 200 <= _response.status_code < 300:
148
+ return pydantic.parse_obj_as(UpdateUserResponse, _response.json()) # type: ignore
149
+ if _response.status_code == 422:
150
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
151
+ try:
152
+ _response_json = _response.json()
153
+ except JSONDecodeError:
154
+ raise ApiError(status_code=_response.status_code, body=_response.text)
155
+ raise ApiError(status_code=_response.status_code, body=_response_json)
@@ -78,6 +78,8 @@ from .data_source_create import DataSourceCreate
78
78
  from .data_source_create_component import DataSourceCreateComponent
79
79
  from .data_source_create_custom_metadata_value import DataSourceCreateCustomMetadataValue
80
80
  from .data_source_custom_metadata_value import DataSourceCustomMetadataValue
81
+ from .data_source_reader_version_metadata import DataSourceReaderVersionMetadata
82
+ from .data_source_reader_version_metadata_reader_version import DataSourceReaderVersionMetadataReaderVersion
81
83
  from .data_source_update_dispatcher_config import DataSourceUpdateDispatcherConfig
82
84
  from .delete_params import DeleteParams
83
85
  from .document_block import DocumentBlock
@@ -139,6 +141,7 @@ from .extract_target import ExtractTarget
139
141
  from .fail_page_mode import FailPageMode
140
142
  from .file import File
141
143
  from .file_count_by_status_response import FileCountByStatusResponse
144
+ from .file_data import FileData
142
145
  from .file_id_presigned_url import FileIdPresignedUrl
143
146
  from .file_parse_public import FileParsePublic
144
147
  from .file_permission_info_value import FilePermissionInfoValue
@@ -198,12 +201,15 @@ from .llm_parameters import LlmParameters
198
201
  from .load_files_job_config import LoadFilesJobConfig
199
202
  from .managed_ingestion_status import ManagedIngestionStatus
200
203
  from .managed_ingestion_status_response import ManagedIngestionStatusResponse
204
+ from .managed_open_ai_embedding import ManagedOpenAiEmbedding
205
+ from .managed_open_ai_embedding_config import ManagedOpenAiEmbeddingConfig
201
206
  from .message_annotation import MessageAnnotation
202
207
  from .message_role import MessageRole
203
208
  from .metadata_filter import MetadataFilter
204
209
  from .metadata_filter_value import MetadataFilterValue
205
210
  from .metadata_filters import MetadataFilters
206
211
  from .metadata_filters_filters_item import MetadataFiltersFiltersItem
212
+ from .multimodal_parse_resolution import MultimodalParseResolution
207
213
  from .node_relationship import NodeRelationship
208
214
  from .none_chunking_config import NoneChunkingConfig
209
215
  from .none_segmentation_config import NoneSegmentationConfig
@@ -224,6 +230,7 @@ from .paginated_list_pipeline_files_response import PaginatedListPipelineFilesRe
224
230
  from .paginated_report_response import PaginatedReportResponse
225
231
  from .paginated_response_agent_data import PaginatedResponseAgentData
226
232
  from .paginated_response_aggregate_group import PaginatedResponseAggregateGroup
233
+ from .paginated_response_quota_configuration import PaginatedResponseQuotaConfiguration
227
234
  from .parse_job_config import ParseJobConfig
228
235
  from .parse_job_config_priority import ParseJobConfigPriority
229
236
  from .parse_plan_level import ParsePlanLevel
@@ -267,6 +274,7 @@ from .pipeline_embedding_config import (
267
274
  PipelineEmbeddingConfig_CohereEmbedding,
268
275
  PipelineEmbeddingConfig_GeminiEmbedding,
269
276
  PipelineEmbeddingConfig_HuggingfaceApiEmbedding,
277
+ PipelineEmbeddingConfig_ManagedOpenaiEmbedding,
270
278
  PipelineEmbeddingConfig_OpenaiEmbedding,
271
279
  PipelineEmbeddingConfig_VertexaiEmbedding,
272
280
  )
@@ -303,6 +311,11 @@ from .progress_event_status import ProgressEventStatus
303
311
  from .project import Project
304
312
  from .project_create import ProjectCreate
305
313
  from .prompt_conf import PromptConf
314
+ from .quota_configuration import QuotaConfiguration
315
+ from .quota_configuration_configuration_type import QuotaConfigurationConfigurationType
316
+ from .quota_configuration_status import QuotaConfigurationStatus
317
+ from .quota_rate_limit_configuration_value import QuotaRateLimitConfigurationValue
318
+ from .quota_rate_limit_configuration_value_denominator_units import QuotaRateLimitConfigurationValueDenominatorUnits
306
319
  from .re_rank_config import ReRankConfig
307
320
  from .re_ranker_type import ReRankerType
308
321
  from .recurring_credit_grant import RecurringCreditGrant
@@ -348,6 +361,7 @@ from .text_node import TextNode
348
361
  from .text_node_relationships_value import TextNodeRelationshipsValue
349
362
  from .text_node_with_score import TextNodeWithScore
350
363
  from .token_chunking_config import TokenChunkingConfig
364
+ from .update_user_response import UpdateUserResponse
351
365
  from .usage_and_plan import UsageAndPlan
352
366
  from .usage_metric_response import UsageMetricResponse
353
367
  from .usage_response import UsageResponse
@@ -357,6 +371,7 @@ from .user_organization import UserOrganization
357
371
  from .user_organization_create import UserOrganizationCreate
358
372
  from .user_organization_delete import UserOrganizationDelete
359
373
  from .user_organization_role import UserOrganizationRole
374
+ from .user_summary import UserSummary
360
375
  from .validation_error import ValidationError
361
376
  from .validation_error_loc_item import ValidationErrorLocItem
362
377
  from .vertex_ai_embedding_config import VertexAiEmbeddingConfig
@@ -440,6 +455,8 @@ __all__ = [
440
455
  "DataSourceCreateComponent",
441
456
  "DataSourceCreateCustomMetadataValue",
442
457
  "DataSourceCustomMetadataValue",
458
+ "DataSourceReaderVersionMetadata",
459
+ "DataSourceReaderVersionMetadataReaderVersion",
443
460
  "DataSourceUpdateDispatcherConfig",
444
461
  "DeleteParams",
445
462
  "DocumentBlock",
@@ -497,6 +514,7 @@ __all__ = [
497
514
  "FailPageMode",
498
515
  "File",
499
516
  "FileCountByStatusResponse",
517
+ "FileData",
500
518
  "FileIdPresignedUrl",
501
519
  "FileParsePublic",
502
520
  "FilePermissionInfoValue",
@@ -552,12 +570,15 @@ __all__ = [
552
570
  "LoadFilesJobConfig",
553
571
  "ManagedIngestionStatus",
554
572
  "ManagedIngestionStatusResponse",
573
+ "ManagedOpenAiEmbedding",
574
+ "ManagedOpenAiEmbeddingConfig",
555
575
  "MessageAnnotation",
556
576
  "MessageRole",
557
577
  "MetadataFilter",
558
578
  "MetadataFilterValue",
559
579
  "MetadataFilters",
560
580
  "MetadataFiltersFiltersItem",
581
+ "MultimodalParseResolution",
561
582
  "NodeRelationship",
562
583
  "NoneChunkingConfig",
563
584
  "NoneSegmentationConfig",
@@ -578,6 +599,7 @@ __all__ = [
578
599
  "PaginatedReportResponse",
579
600
  "PaginatedResponseAgentData",
580
601
  "PaginatedResponseAggregateGroup",
602
+ "PaginatedResponseQuotaConfiguration",
581
603
  "ParseJobConfig",
582
604
  "ParseJobConfigPriority",
583
605
  "ParsePlanLevel",
@@ -618,6 +640,7 @@ __all__ = [
618
640
  "PipelineEmbeddingConfig_CohereEmbedding",
619
641
  "PipelineEmbeddingConfig_GeminiEmbedding",
620
642
  "PipelineEmbeddingConfig_HuggingfaceApiEmbedding",
643
+ "PipelineEmbeddingConfig_ManagedOpenaiEmbedding",
621
644
  "PipelineEmbeddingConfig_OpenaiEmbedding",
622
645
  "PipelineEmbeddingConfig_VertexaiEmbedding",
623
646
  "PipelineFile",
@@ -649,6 +672,11 @@ __all__ = [
649
672
  "Project",
650
673
  "ProjectCreate",
651
674
  "PromptConf",
675
+ "QuotaConfiguration",
676
+ "QuotaConfigurationConfigurationType",
677
+ "QuotaConfigurationStatus",
678
+ "QuotaRateLimitConfigurationValue",
679
+ "QuotaRateLimitConfigurationValueDenominatorUnits",
652
680
  "ReRankConfig",
653
681
  "ReRankerType",
654
682
  "RecurringCreditGrant",
@@ -692,6 +720,7 @@ __all__ = [
692
720
  "TextNodeRelationshipsValue",
693
721
  "TextNodeWithScore",
694
722
  "TokenChunkingConfig",
723
+ "UpdateUserResponse",
695
724
  "UsageAndPlan",
696
725
  "UsageMetricResponse",
697
726
  "UsageResponse",
@@ -701,6 +730,7 @@ __all__ = [
701
730
  "UserOrganizationCreate",
702
731
  "UserOrganizationDelete",
703
732
  "UserOrganizationRole",
733
+ "UserSummary",
704
734
  "ValidationError",
705
735
  "ValidationErrorLocItem",
706
736
  "VertexAiEmbeddingConfig",
@@ -7,6 +7,7 @@ from ..core.datetime_utils import serialize_datetime
7
7
  from .configurable_data_source_names import ConfigurableDataSourceNames
8
8
  from .data_source_component import DataSourceComponent
9
9
  from .data_source_custom_metadata_value import DataSourceCustomMetadataValue
10
+ from .data_source_reader_version_metadata import DataSourceReaderVersionMetadata
10
11
 
11
12
  try:
12
13
  import pydantic
@@ -29,7 +30,7 @@ class DataSource(pydantic.BaseModel):
29
30
  source_type: ConfigurableDataSourceNames
30
31
  custom_metadata: typing.Optional[typing.Dict[str, typing.Optional[DataSourceCustomMetadataValue]]]
31
32
  component: DataSourceComponent = pydantic.Field(description="Component that implements the data source")
32
- version_metadata: typing.Optional[typing.Dict[str, typing.Any]]
33
+ version_metadata: typing.Optional[DataSourceReaderVersionMetadata]
33
34
  project_id: str
34
35
 
35
36
  def json(self, **kwargs: typing.Any) -> str:
@@ -0,0 +1,32 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+ from .data_source_reader_version_metadata_reader_version import DataSourceReaderVersionMetadataReaderVersion
8
+
9
+ try:
10
+ import pydantic
11
+ if pydantic.__version__.startswith("1."):
12
+ raise ImportError
13
+ import pydantic.v1 as pydantic # type: ignore
14
+ except ImportError:
15
+ import pydantic # type: ignore
16
+
17
+
18
+ class DataSourceReaderVersionMetadata(pydantic.BaseModel):
19
+ reader_version: typing.Optional[DataSourceReaderVersionMetadataReaderVersion]
20
+
21
+ def json(self, **kwargs: typing.Any) -> str:
22
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
23
+ return super().json(**kwargs_with_defaults)
24
+
25
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
26
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
27
+ return super().dict(**kwargs_with_defaults)
28
+
29
+ class Config:
30
+ frozen = True
31
+ smart_union = True
32
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -0,0 +1,17 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import enum
4
+ import typing
5
+
6
+ T_Result = typing.TypeVar("T_Result")
7
+
8
+
9
+ class DataSourceReaderVersionMetadataReaderVersion(str, enum.Enum):
10
+ ONE_0 = "1.0"
11
+ TWO_0 = "2.0"
12
+
13
+ def visit(self, one_0: typing.Callable[[], T_Result], two_0: typing.Callable[[], T_Result]) -> T_Result:
14
+ if self is DataSourceReaderVersionMetadataReaderVersion.ONE_0:
15
+ return one_0()
16
+ if self is DataSourceReaderVersionMetadataReaderVersion.TWO_0:
17
+ return two_0()
@@ -3,6 +3,8 @@
3
3
  import datetime as dt
4
4
  import typing
5
5
 
6
+ import typing_extensions
7
+
6
8
  from ..core.datetime_utils import serialize_datetime
7
9
  from .extract_agent_data_schema_value import ExtractAgentDataSchemaValue
8
10
  from .extract_config import ExtractConfig
@@ -28,6 +30,7 @@ class ExtractAgent(pydantic.BaseModel):
28
30
  description="The schema of the data."
29
31
  )
30
32
  config: ExtractConfig = pydantic.Field(description="The configuration parameters for the extraction agent.")
33
+ custom_configuration: typing.Optional[typing_extensions.Literal["default"]]
31
34
  created_at: typing.Optional[dt.datetime]
32
35
  updated_at: typing.Optional[dt.datetime]
33
36
 
@@ -32,12 +32,19 @@ class ExtractConfig(pydantic.BaseModel):
32
32
  system_prompt: typing.Optional[str]
33
33
  use_reasoning: typing.Optional[bool] = pydantic.Field(description="Whether to use reasoning for the extraction.")
34
34
  cite_sources: typing.Optional[bool] = pydantic.Field(description="Whether to cite sources for the extraction.")
35
+ confidence_scores: typing.Optional[bool] = pydantic.Field(
36
+ description="Whether to fetch confidence scores for the extraction."
37
+ )
35
38
  chunk_mode: typing.Optional[DocumentChunkMode] = pydantic.Field(
36
39
  description="The mode to use for chunking the document."
37
40
  )
41
+ high_resolution_mode: typing.Optional[bool] = pydantic.Field(
42
+ description="Whether to use high resolution mode for the extraction."
43
+ )
38
44
  invalidate_cache: typing.Optional[bool] = pydantic.Field(
39
45
  description="Whether to invalidate the cache for the extraction."
40
46
  )
47
+ page_range: typing.Optional[str]
41
48
 
42
49
  def json(self, **kwargs: typing.Any) -> str:
43
50
  kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
@@ -0,0 +1,36 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+
8
+ try:
9
+ import pydantic
10
+ if pydantic.__version__.startswith("1."):
11
+ raise ImportError
12
+ import pydantic.v1 as pydantic # type: ignore
13
+ except ImportError:
14
+ import pydantic # type: ignore
15
+
16
+
17
+ class FileData(pydantic.BaseModel):
18
+ """
19
+ Schema for file data with base64 content and MIME type.
20
+ """
21
+
22
+ data: str = pydantic.Field(description="The file content as base64-encoded string")
23
+ mime_type: str = pydantic.Field(description="The MIME type of the file (e.g., 'application/pdf', 'text/plain')")
24
+
25
+ def json(self, **kwargs: typing.Any) -> str:
26
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
27
+ return super().json(**kwargs_with_defaults)
28
+
29
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
30
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
31
+ return super().dict(**kwargs_with_defaults)
32
+
33
+ class Config:
34
+ frozen = True
35
+ smart_union = True
36
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -43,6 +43,9 @@ class LegacyParseJobConfig(pydantic.BaseModel):
43
43
  preserve_layout_alignment_across_pages: typing.Optional[bool] = pydantic.Field(
44
44
  alias="preserveLayoutAlignmentAcrossPages", description="Whether to preserve layout alignment across pages."
45
45
  )
46
+ preserve_very_small_text: typing.Optional[bool] = pydantic.Field(
47
+ alias="preserveVerySmallText", description="Whether to preserve very small text lines."
48
+ )
46
49
  invalidate_cache: bool = pydantic.Field(alias="invalidateCache", description="Whether to invalidate the cache.")
47
50
  output_pdf_of_document: typing.Optional[bool] = pydantic.Field(alias="outputPDFOfDocument")
48
51
  outlined_table_extraction: typing.Optional[bool] = pydantic.Field(alias="outlinedTableExtraction")
@@ -6,6 +6,7 @@ import typing
6
6
  from ..core.datetime_utils import serialize_datetime
7
7
  from .chunk_mode import ChunkMode
8
8
  from .llama_parse_parameters import LlamaParseParameters
9
+ from .multimodal_parse_resolution import MultimodalParseResolution
9
10
  from .struct_parse_conf import StructParseConf
10
11
 
11
12
  try:
@@ -48,6 +49,9 @@ class LlamaExtractSettings(pydantic.BaseModel):
48
49
  llama_parse_params: typing.Optional[LlamaParseParameters] = pydantic.Field(
49
50
  description="LlamaParse related settings."
50
51
  )
52
+ multimodal_parse_resolution: typing.Optional[MultimodalParseResolution] = pydantic.Field(
53
+ description="The resolution to use for multimodal parsing."
54
+ )
51
55
 
52
56
  def json(self, **kwargs: typing.Any) -> str:
53
57
  kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
@@ -8,6 +8,7 @@ from .fail_page_mode import FailPageMode
8
8
  from .llama_parse_parameters_priority import LlamaParseParametersPriority
9
9
  from .parser_languages import ParserLanguages
10
10
  from .parsing_mode import ParsingMode
11
+ from .webhook_configuration import WebhookConfiguration
11
12
 
12
13
  try:
13
14
  import pydantic
@@ -23,6 +24,7 @@ class LlamaParseParameters(pydantic.BaseModel):
23
24
  Settings that can be configured for how to use LlamaParse to parse files within a LlamaCloud pipeline.
24
25
  """
25
26
 
27
+ webhook_configurations: typing.Optional[typing.List[WebhookConfiguration]]
26
28
  priority: typing.Optional[LlamaParseParametersPriority]
27
29
  languages: typing.Optional[typing.List[ParserLanguages]]
28
30
  parsing_instruction: typing.Optional[str]
@@ -40,6 +42,7 @@ class LlamaParseParameters(pydantic.BaseModel):
40
42
  fast_mode: typing.Optional[bool]
41
43
  skip_diagonal_text: typing.Optional[bool]
42
44
  preserve_layout_alignment_across_pages: typing.Optional[bool]
45
+ preserve_very_small_text: typing.Optional[bool]
43
46
  gpt_4_o_mode: typing.Optional[bool] = pydantic.Field(alias="gpt4o_mode")
44
47
  gpt_4_o_api_key: typing.Optional[str] = pydantic.Field(alias="gpt4o_api_key")
45
48
  do_not_unroll_columns: typing.Optional[bool]