llama-cloud 0.1.41__py3-none-any.whl → 0.1.43__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of llama-cloud might be problematic. Click here for more details.

Files changed (43) hide show
  1. llama_cloud/__init__.py +19 -19
  2. llama_cloud/resources/__init__.py +6 -0
  3. llama_cloud/resources/alpha/client.py +14 -30
  4. llama_cloud/resources/beta/client.py +1045 -59
  5. llama_cloud/resources/jobs/client.py +0 -8
  6. llama_cloud/resources/llama_extract/__init__.py +6 -0
  7. llama_cloud/resources/llama_extract/client.py +825 -941
  8. llama_cloud/resources/llama_extract/types/__init__.py +6 -0
  9. llama_cloud/resources/organizations/client.py +18 -4
  10. llama_cloud/resources/parsing/client.py +56 -0
  11. llama_cloud/resources/pipelines/client.py +164 -0
  12. llama_cloud/types/__init__.py +16 -22
  13. llama_cloud/types/agent_data.py +1 -1
  14. llama_cloud/types/agent_deployment_summary.py +1 -2
  15. llama_cloud/types/{prompt_conf.py → api_key.py} +14 -9
  16. llama_cloud/types/{extract_job_create.py → api_key_query_response.py} +6 -14
  17. llama_cloud/types/api_key_type.py +17 -0
  18. llama_cloud/types/delete_response.py +35 -0
  19. llama_cloud/types/extract_config.py +1 -0
  20. llama_cloud/types/extract_models.py +4 -0
  21. llama_cloud/types/extracted_table.py +40 -0
  22. llama_cloud/types/legacy_parse_job_config.py +3 -0
  23. llama_cloud/types/llama_parse_parameters.py +7 -0
  24. llama_cloud/types/organization.py +1 -0
  25. llama_cloud/types/paginated_response_spreadsheet_job.py +34 -0
  26. llama_cloud/types/parse_job_config.py +7 -0
  27. llama_cloud/types/public_model_name.py +4 -0
  28. llama_cloud/types/quota_configuration_configuration_type.py +4 -0
  29. llama_cloud/types/spreadsheet_job.py +50 -0
  30. llama_cloud/types/spreadsheet_parsing_config.py +35 -0
  31. {llama_cloud-0.1.41.dist-info → llama_cloud-0.1.43.dist-info}/METADATA +1 -1
  32. {llama_cloud-0.1.41.dist-info → llama_cloud-0.1.43.dist-info}/RECORD +37 -37
  33. llama_cloud/types/chunk_mode.py +0 -29
  34. llama_cloud/types/llama_extract_settings.py +0 -67
  35. llama_cloud/types/multimodal_parse_resolution.py +0 -17
  36. llama_cloud/types/schema_relax_mode.py +0 -25
  37. llama_cloud/types/struct_mode.py +0 -33
  38. llama_cloud/types/struct_parse_conf.py +0 -63
  39. /llama_cloud/{types → resources/llama_extract/types}/extract_job_create_data_schema_override.py +0 -0
  40. /llama_cloud/{types → resources/llama_extract/types}/extract_job_create_data_schema_override_zero_value.py +0 -0
  41. /llama_cloud/{types → resources/llama_extract/types}/extract_job_create_priority.py +0 -0
  42. {llama_cloud-0.1.41.dist-info → llama_cloud-0.1.43.dist-info}/LICENSE +0 -0
  43. {llama_cloud-0.1.41.dist-info → llama_cloud-0.1.43.dist-info}/WHEEL +0 -0
llama_cloud/__init__.py CHANGED
@@ -16,6 +16,9 @@ from .types import (
16
16
  AgentDeploymentList,
17
17
  AgentDeploymentSummary,
18
18
  AggregateGroup,
19
+ ApiKey,
20
+ ApiKeyQueryResponse,
21
+ ApiKeyType,
19
22
  AutoTransformConfig,
20
23
  AzureOpenAiEmbedding,
21
24
  AzureOpenAiEmbeddingConfig,
@@ -36,7 +39,6 @@ from .types import (
36
39
  ChatAppResponse,
37
40
  ChatData,
38
41
  ChatMessage,
39
- ChunkMode,
40
42
  ClassificationResult,
41
43
  ClassifierRule,
42
44
  ClassifyJob,
@@ -85,6 +87,7 @@ from .types import (
85
87
  DataSourceReaderVersionMetadataReaderVersion,
86
88
  DataSourceUpdateDispatcherConfig,
87
89
  DeleteParams,
90
+ DeleteResponse,
88
91
  DocumentChunkMode,
89
92
  DocumentIngestionJobParams,
90
93
  ElementSegmentationConfig,
@@ -112,10 +115,6 @@ from .types import (
112
115
  ExtractConfig,
113
116
  ExtractConfigPriority,
114
117
  ExtractJob,
115
- ExtractJobCreate,
116
- ExtractJobCreateDataSchemaOverride,
117
- ExtractJobCreateDataSchemaOverrideZeroValue,
118
- ExtractJobCreatePriority,
119
118
  ExtractMode,
120
119
  ExtractModels,
121
120
  ExtractResultset,
@@ -135,6 +134,7 @@ from .types import (
135
134
  ExtractSchemaValidateResponseDataSchemaValue,
136
135
  ExtractState,
137
136
  ExtractTarget,
137
+ ExtractedTable,
138
138
  FailPageMode,
139
139
  FailureHandlingConfig,
140
140
  File,
@@ -190,7 +190,6 @@ from .types import (
190
190
  LlamaExtractFeatureAvailability,
191
191
  LlamaExtractModeAvailability,
192
192
  LlamaExtractModeAvailabilityStatus,
193
- LlamaExtractSettings,
194
193
  LlamaParseParameters,
195
194
  LlamaParseParametersPriority,
196
195
  LlamaParseSupportedFileExtensions,
@@ -207,7 +206,6 @@ from .types import (
207
206
  MetadataFilterValue,
208
207
  MetadataFilters,
209
208
  MetadataFiltersFiltersItem,
210
- MultimodalParseResolution,
211
209
  NodeRelationship,
212
210
  NoneChunkingConfig,
213
211
  NoneSegmentationConfig,
@@ -229,6 +227,7 @@ from .types import (
229
227
  PaginatedResponseAggregateGroup,
230
228
  PaginatedResponseClassifyJob,
231
229
  PaginatedResponseQuotaConfiguration,
230
+ PaginatedResponseSpreadsheetJob,
232
231
  ParseConfiguration,
233
232
  ParseConfigurationCreate,
234
233
  ParseConfigurationFilter,
@@ -302,7 +301,6 @@ from .types import (
302
301
  PresignedUrl,
303
302
  Project,
304
303
  ProjectCreate,
305
- PromptConf,
306
304
  PublicModelName,
307
305
  QuotaConfiguration,
308
306
  QuotaConfigurationConfigurationType,
@@ -322,14 +320,13 @@ from .types import (
322
320
  Role,
323
321
  SchemaGenerationAvailability,
324
322
  SchemaGenerationAvailabilityStatus,
325
- SchemaRelaxMode,
326
323
  SemanticChunkingConfig,
327
324
  SentenceChunkingConfig,
328
325
  SparseModelConfig,
329
326
  SparseModelType,
327
+ SpreadsheetJob,
328
+ SpreadsheetParsingConfig,
330
329
  StatusEnum,
331
- StructMode,
332
- StructParseConf,
333
330
  SupportedLlmModel,
334
331
  SupportedLlmModelNames,
335
332
  TextNode,
@@ -374,6 +371,9 @@ from .resources import (
374
371
  ExtractAgentUpdateDataSchemaZeroValue,
375
372
  ExtractJobCreateBatchDataSchemaOverride,
376
373
  ExtractJobCreateBatchDataSchemaOverrideZeroValue,
374
+ ExtractJobCreateDataSchemaOverride,
375
+ ExtractJobCreateDataSchemaOverrideZeroValue,
376
+ ExtractJobCreatePriority,
377
377
  ExtractSchemaValidateRequestDataSchema,
378
378
  ExtractSchemaValidateRequestDataSchemaZeroValue,
379
379
  ExtractStatelessRequestDataSchema,
@@ -428,6 +428,9 @@ __all__ = [
428
428
  "AgentDeploymentList",
429
429
  "AgentDeploymentSummary",
430
430
  "AggregateGroup",
431
+ "ApiKey",
432
+ "ApiKeyQueryResponse",
433
+ "ApiKeyType",
431
434
  "AutoTransformConfig",
432
435
  "AzureOpenAiEmbedding",
433
436
  "AzureOpenAiEmbeddingConfig",
@@ -448,7 +451,6 @@ __all__ = [
448
451
  "ChatAppResponse",
449
452
  "ChatData",
450
453
  "ChatMessage",
451
- "ChunkMode",
452
454
  "ClassificationResult",
453
455
  "ClassifierRule",
454
456
  "ClassifyJob",
@@ -500,6 +502,7 @@ __all__ = [
500
502
  "DataSourceUpdateCustomMetadataValue",
501
503
  "DataSourceUpdateDispatcherConfig",
502
504
  "DeleteParams",
505
+ "DeleteResponse",
503
506
  "DocumentChunkMode",
504
507
  "DocumentIngestionJobParams",
505
508
  "ElementSegmentationConfig",
@@ -539,7 +542,6 @@ __all__ = [
539
542
  "ExtractConfig",
540
543
  "ExtractConfigPriority",
541
544
  "ExtractJob",
542
- "ExtractJobCreate",
543
545
  "ExtractJobCreateBatchDataSchemaOverride",
544
546
  "ExtractJobCreateBatchDataSchemaOverrideZeroValue",
545
547
  "ExtractJobCreateDataSchemaOverride",
@@ -568,6 +570,7 @@ __all__ = [
568
570
  "ExtractStatelessRequestDataSchema",
569
571
  "ExtractStatelessRequestDataSchemaZeroValue",
570
572
  "ExtractTarget",
573
+ "ExtractedTable",
571
574
  "FailPageMode",
572
575
  "FailureHandlingConfig",
573
576
  "File",
@@ -625,7 +628,6 @@ __all__ = [
625
628
  "LlamaExtractFeatureAvailability",
626
629
  "LlamaExtractModeAvailability",
627
630
  "LlamaExtractModeAvailabilityStatus",
628
- "LlamaExtractSettings",
629
631
  "LlamaParseParameters",
630
632
  "LlamaParseParametersPriority",
631
633
  "LlamaParseSupportedFileExtensions",
@@ -642,7 +644,6 @@ __all__ = [
642
644
  "MetadataFilterValue",
643
645
  "MetadataFilters",
644
646
  "MetadataFiltersFiltersItem",
645
- "MultimodalParseResolution",
646
647
  "NodeRelationship",
647
648
  "NoneChunkingConfig",
648
649
  "NoneSegmentationConfig",
@@ -664,6 +665,7 @@ __all__ = [
664
665
  "PaginatedResponseAggregateGroup",
665
666
  "PaginatedResponseClassifyJob",
666
667
  "PaginatedResponseQuotaConfiguration",
668
+ "PaginatedResponseSpreadsheetJob",
667
669
  "ParseConfiguration",
668
670
  "ParseConfigurationCreate",
669
671
  "ParseConfigurationFilter",
@@ -747,7 +749,6 @@ __all__ = [
747
749
  "PresignedUrl",
748
750
  "Project",
749
751
  "ProjectCreate",
750
- "PromptConf",
751
752
  "PublicModelName",
752
753
  "QuotaConfiguration",
753
754
  "QuotaConfigurationConfigurationType",
@@ -768,14 +769,13 @@ __all__ = [
768
769
  "Role",
769
770
  "SchemaGenerationAvailability",
770
771
  "SchemaGenerationAvailabilityStatus",
771
- "SchemaRelaxMode",
772
772
  "SemanticChunkingConfig",
773
773
  "SentenceChunkingConfig",
774
774
  "SparseModelConfig",
775
775
  "SparseModelType",
776
+ "SpreadsheetJob",
777
+ "SpreadsheetParsingConfig",
776
778
  "StatusEnum",
777
- "StructMode",
778
- "StructParseConf",
779
779
  "SupportedLlmModel",
780
780
  "SupportedLlmModelNames",
781
781
  "TextNode",
@@ -41,6 +41,9 @@ from .llama_extract import (
41
41
  ExtractAgentUpdateDataSchemaZeroValue,
42
42
  ExtractJobCreateBatchDataSchemaOverride,
43
43
  ExtractJobCreateBatchDataSchemaOverrideZeroValue,
44
+ ExtractJobCreateDataSchemaOverride,
45
+ ExtractJobCreateDataSchemaOverrideZeroValue,
46
+ ExtractJobCreatePriority,
44
47
  ExtractSchemaValidateRequestDataSchema,
45
48
  ExtractSchemaValidateRequestDataSchemaZeroValue,
46
49
  ExtractStatelessRequestDataSchema,
@@ -78,6 +81,9 @@ __all__ = [
78
81
  "ExtractAgentUpdateDataSchemaZeroValue",
79
82
  "ExtractJobCreateBatchDataSchemaOverride",
80
83
  "ExtractJobCreateBatchDataSchemaOverrideZeroValue",
84
+ "ExtractJobCreateDataSchemaOverride",
85
+ "ExtractJobCreateDataSchemaOverrideZeroValue",
86
+ "ExtractJobCreatePriority",
81
87
  "ExtractSchemaValidateRequestDataSchema",
82
88
  "ExtractSchemaValidateRequestDataSchemaZeroValue",
83
89
  "ExtractStatelessRequestDataSchema",
@@ -6,7 +6,6 @@ from json.decoder import JSONDecodeError
6
6
 
7
7
  from ...core.api_error import ApiError
8
8
  from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper
9
- from ...core.jsonable_encoder import jsonable_encoder
10
9
  from ...core.remove_none_from_dict import remove_none_from_dict
11
10
  from ...errors.unprocessable_entity_error import UnprocessableEntityError
12
11
  from ...types.http_validation_error import HttpValidationError
@@ -20,40 +19,31 @@ try:
20
19
  except ImportError:
21
20
  import pydantic # type: ignore
22
21
 
23
- # this is used as the default value for optional parameters
24
- OMIT = typing.cast(typing.Any, ...)
25
-
26
22
 
27
23
  class AlphaClient:
28
24
  def __init__(self, *, client_wrapper: SyncClientWrapper):
29
25
  self._client_wrapper = client_wrapper
30
26
 
31
27
  def upload_file_v_2(
32
- self,
33
- *,
34
- project_id: typing.Optional[str] = None,
35
- organization_id: typing.Optional[str] = None,
36
- configuration: str,
37
- file: typing.Optional[str] = OMIT,
28
+ self, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
38
29
  ) -> ParsingJob:
39
30
  """
40
31
  Parameters:
41
32
  - project_id: typing.Optional[str].
42
33
 
43
34
  - organization_id: typing.Optional[str].
35
+ ---
36
+ from llama_cloud.client import LlamaCloud
44
37
 
45
- - configuration: str.
46
-
47
- - file: typing.Optional[str].
38
+ client = LlamaCloud(
39
+ token="YOUR_TOKEN",
40
+ )
41
+ client.alpha.upload_file_v_2()
48
42
  """
49
- _request: typing.Dict[str, typing.Any] = {"configuration": configuration}
50
- if file is not OMIT:
51
- _request["file"] = file
52
43
  _response = self._client_wrapper.httpx_client.request(
53
44
  "POST",
54
45
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v2alpha1/parse/upload"),
55
46
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
56
- json=jsonable_encoder(_request),
57
47
  headers=self._client_wrapper.get_headers(),
58
48
  timeout=60,
59
49
  )
@@ -73,31 +63,25 @@ class AsyncAlphaClient:
73
63
  self._client_wrapper = client_wrapper
74
64
 
75
65
  async def upload_file_v_2(
76
- self,
77
- *,
78
- project_id: typing.Optional[str] = None,
79
- organization_id: typing.Optional[str] = None,
80
- configuration: str,
81
- file: typing.Optional[str] = OMIT,
66
+ self, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
82
67
  ) -> ParsingJob:
83
68
  """
84
69
  Parameters:
85
70
  - project_id: typing.Optional[str].
86
71
 
87
72
  - organization_id: typing.Optional[str].
73
+ ---
74
+ from llama_cloud.client import AsyncLlamaCloud
88
75
 
89
- - configuration: str.
90
-
91
- - file: typing.Optional[str].
76
+ client = AsyncLlamaCloud(
77
+ token="YOUR_TOKEN",
78
+ )
79
+ await client.alpha.upload_file_v_2()
92
80
  """
93
- _request: typing.Dict[str, typing.Any] = {"configuration": configuration}
94
- if file is not OMIT:
95
- _request["file"] = file
96
81
  _response = await self._client_wrapper.httpx_client.request(
97
82
  "POST",
98
83
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v2alpha1/parse/upload"),
99
84
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
100
- json=jsonable_encoder(_request),
101
85
  headers=self._client_wrapper.get_headers(),
102
86
  timeout=60,
103
87
  )