llama-cloud 0.0.1__py3-none-any.whl → 0.0.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of llama-cloud might be problematic. Click here for more details.
- llama_cloud/client.py +2 -2
- llama_cloud/core/jsonable_encoder.py +3 -0
- llama_cloud/resources/api_keys/client.py +19 -16
- llama_cloud/resources/billing/client.py +15 -12
- llama_cloud/resources/component_definitions/client.py +15 -12
- llama_cloud/resources/data_sinks/client.py +33 -30
- llama_cloud/resources/data_sources/client.py +33 -30
- llama_cloud/resources/deprecated/client.py +51 -48
- llama_cloud/resources/evals/client.py +47 -44
- llama_cloud/resources/files/client.py +27 -24
- llama_cloud/resources/parsing/client.py +51 -48
- llama_cloud/resources/pipelines/client.py +238 -164
- llama_cloud/resources/projects/client.py +75 -72
- llama_cloud/types/api_key.py +3 -0
- llama_cloud/types/azure_open_ai_embedding.py +3 -0
- llama_cloud/types/base.py +3 -0
- llama_cloud/types/base_prompt_template.py +3 -0
- llama_cloud/types/bedrock_embedding.py +3 -0
- llama_cloud/types/chat_message.py +3 -0
- llama_cloud/types/cloud_az_storage_blob_data_source.py +3 -0
- llama_cloud/types/cloud_chroma_vector_store.py +3 -0
- llama_cloud/types/cloud_document.py +3 -0
- llama_cloud/types/cloud_document_create.py +3 -0
- llama_cloud/types/cloud_gcs_data_source.py +3 -0
- llama_cloud/types/cloud_google_drive_data_source.py +3 -0
- llama_cloud/types/cloud_one_drive_data_source.py +3 -0
- llama_cloud/types/cloud_pinecone_vector_store.py +3 -0
- llama_cloud/types/cloud_postgres_vector_store.py +3 -0
- llama_cloud/types/cloud_qdrant_vector_store.py +3 -0
- llama_cloud/types/cloud_s_3_data_source.py +3 -0
- llama_cloud/types/cloud_sharepoint_data_source.py +3 -0
- llama_cloud/types/cloud_weaviate_vector_store.py +3 -0
- llama_cloud/types/code_splitter.py +3 -0
- llama_cloud/types/cohere_embedding.py +3 -0
- llama_cloud/types/configurable_transformation_definition.py +3 -0
- llama_cloud/types/configured_transformation_item.py +3 -0
- llama_cloud/types/data_sink.py +3 -0
- llama_cloud/types/data_sink_create.py +3 -0
- llama_cloud/types/data_sink_definition.py +3 -0
- llama_cloud/types/data_source.py +3 -0
- llama_cloud/types/data_source_create.py +3 -0
- llama_cloud/types/data_source_definition.py +3 -0
- llama_cloud/types/eval_dataset.py +3 -0
- llama_cloud/types/eval_dataset_job_params.py +3 -0
- llama_cloud/types/eval_dataset_job_record.py +3 -0
- llama_cloud/types/eval_execution_params.py +3 -0
- llama_cloud/types/eval_execution_params_override.py +3 -0
- llama_cloud/types/eval_llm_model_data.py +3 -0
- llama_cloud/types/eval_question.py +3 -0
- llama_cloud/types/eval_question_create.py +3 -0
- llama_cloud/types/eval_question_result.py +3 -0
- llama_cloud/types/file.py +3 -0
- llama_cloud/types/gemini_embedding.py +3 -0
- llama_cloud/types/html_node_parser.py +3 -0
- llama_cloud/types/http_validation_error.py +3 -0
- llama_cloud/types/hugging_face_inference_api_embedding.py +3 -0
- llama_cloud/types/json_node_parser.py +3 -0
- llama_cloud/types/llm.py +3 -0
- llama_cloud/types/local_eval.py +3 -0
- llama_cloud/types/local_eval_results.py +3 -0
- llama_cloud/types/local_eval_sets.py +3 -0
- llama_cloud/types/markdown_element_node_parser.py +3 -0
- llama_cloud/types/markdown_node_parser.py +3 -0
- llama_cloud/types/metadata_filter.py +3 -0
- llama_cloud/types/metadata_filters.py +3 -0
- llama_cloud/types/metric_result.py +3 -0
- llama_cloud/types/node_parser.py +3 -0
- llama_cloud/types/open_ai_embedding.py +3 -0
- llama_cloud/types/parsing_history_item.py +3 -0
- llama_cloud/types/parsing_job.py +3 -0
- llama_cloud/types/parsing_job_json_result.py +3 -0
- llama_cloud/types/parsing_job_markdown_result.py +3 -0
- llama_cloud/types/parsing_job_text_result.py +3 -0
- llama_cloud/types/parsing_usage.py +3 -0
- llama_cloud/types/pipeline.py +3 -0
- llama_cloud/types/pipeline_create.py +3 -0
- llama_cloud/types/pipeline_data_source.py +3 -0
- llama_cloud/types/pipeline_data_source_create.py +3 -0
- llama_cloud/types/pipeline_deployment.py +3 -0
- llama_cloud/types/pipeline_file.py +3 -0
- llama_cloud/types/pipeline_file_create.py +3 -0
- llama_cloud/types/pipeline_file_status_response.py +3 -0
- llama_cloud/types/preset_retrieval_params.py +3 -0
- llama_cloud/types/presigned_url.py +3 -0
- llama_cloud/types/project.py +3 -0
- llama_cloud/types/project_create.py +3 -0
- llama_cloud/types/prompt_mixin_prompts.py +3 -0
- llama_cloud/types/prompt_spec.py +3 -0
- llama_cloud/types/related_node_info.py +3 -0
- llama_cloud/types/retrieve_results.py +3 -0
- llama_cloud/types/sentence_splitter.py +3 -0
- llama_cloud/types/simple_file_node_parser.py +3 -0
- llama_cloud/types/supported_eval_llm_model.py +3 -0
- llama_cloud/types/text_node.py +3 -0
- llama_cloud/types/text_node_with_score.py +3 -0
- llama_cloud/types/token_text_splitter.py +3 -0
- llama_cloud/types/validation_error.py +3 -0
- {llama_cloud-0.0.1.dist-info → llama_cloud-0.0.3.dist-info}/METADATA +1 -1
- llama_cloud-0.0.3.dist-info/RECORD +173 -0
- llama_cloud-0.0.1.dist-info/RECORD +0 -173
- {llama_cloud-0.0.1.dist-info → llama_cloud-0.0.3.dist-info}/LICENSE +0 -0
- {llama_cloud-0.0.1.dist-info → llama_cloud-0.0.3.dist-info}/WHEEL +0 -0
|
@@ -19,6 +19,9 @@ from ...types.project_create import ProjectCreate
|
|
|
19
19
|
from ...types.prompt_mixin_prompts import PromptMixinPrompts
|
|
20
20
|
|
|
21
21
|
try:
|
|
22
|
+
import pydantic
|
|
23
|
+
if pydantic.__version__.startswith("1."):
|
|
24
|
+
raise ImportError
|
|
22
25
|
import pydantic.v1 as pydantic # type: ignore
|
|
23
26
|
except ImportError:
|
|
24
27
|
import pydantic # type: ignore
|
|
@@ -38,9 +41,9 @@ class ProjectsClient:
|
|
|
38
41
|
Parameters:
|
|
39
42
|
- project_name: typing.Optional[str].
|
|
40
43
|
---
|
|
41
|
-
from
|
|
44
|
+
from llama_cloud.client import LlamaCloud
|
|
42
45
|
|
|
43
|
-
client =
|
|
46
|
+
client = LlamaCloud(
|
|
44
47
|
token="YOUR_TOKEN",
|
|
45
48
|
base_url="https://yourhost.com/path/to/api",
|
|
46
49
|
)
|
|
@@ -70,10 +73,10 @@ class ProjectsClient:
|
|
|
70
73
|
Parameters:
|
|
71
74
|
- request: ProjectCreate.
|
|
72
75
|
---
|
|
73
|
-
from
|
|
74
|
-
from
|
|
76
|
+
from llama_cloud import ProjectCreate
|
|
77
|
+
from llama_cloud.client import LlamaCloud
|
|
75
78
|
|
|
76
|
-
client =
|
|
79
|
+
client = LlamaCloud(
|
|
77
80
|
token="YOUR_TOKEN",
|
|
78
81
|
base_url="https://yourhost.com/path/to/api",
|
|
79
82
|
)
|
|
@@ -108,10 +111,10 @@ class ProjectsClient:
|
|
|
108
111
|
Parameters:
|
|
109
112
|
- request: ProjectCreate.
|
|
110
113
|
---
|
|
111
|
-
from
|
|
112
|
-
from
|
|
114
|
+
from llama_cloud import ProjectCreate
|
|
115
|
+
from llama_cloud.client import LlamaCloud
|
|
113
116
|
|
|
114
|
-
client =
|
|
117
|
+
client = LlamaCloud(
|
|
115
118
|
token="YOUR_TOKEN",
|
|
116
119
|
base_url="https://yourhost.com/path/to/api",
|
|
117
120
|
)
|
|
@@ -145,9 +148,9 @@ class ProjectsClient:
|
|
|
145
148
|
Parameters:
|
|
146
149
|
- project_id: str.
|
|
147
150
|
---
|
|
148
|
-
from
|
|
151
|
+
from llama_cloud.client import LlamaCloud
|
|
149
152
|
|
|
150
|
-
client =
|
|
153
|
+
client = LlamaCloud(
|
|
151
154
|
token="YOUR_TOKEN",
|
|
152
155
|
base_url="https://yourhost.com/path/to/api",
|
|
153
156
|
)
|
|
@@ -180,9 +183,9 @@ class ProjectsClient:
|
|
|
180
183
|
|
|
181
184
|
- name: str.
|
|
182
185
|
---
|
|
183
|
-
from
|
|
186
|
+
from llama_cloud.client import LlamaCloud
|
|
184
187
|
|
|
185
|
-
client =
|
|
188
|
+
client = LlamaCloud(
|
|
186
189
|
token="YOUR_TOKEN",
|
|
187
190
|
base_url="https://yourhost.com/path/to/api",
|
|
188
191
|
)
|
|
@@ -215,9 +218,9 @@ class ProjectsClient:
|
|
|
215
218
|
Parameters:
|
|
216
219
|
- project_id: str.
|
|
217
220
|
---
|
|
218
|
-
from
|
|
221
|
+
from llama_cloud.client import LlamaCloud
|
|
219
222
|
|
|
220
|
-
client =
|
|
223
|
+
client = LlamaCloud(
|
|
221
224
|
token="YOUR_TOKEN",
|
|
222
225
|
base_url="https://yourhost.com/path/to/api",
|
|
223
226
|
)
|
|
@@ -248,9 +251,9 @@ class ProjectsClient:
|
|
|
248
251
|
Parameters:
|
|
249
252
|
- project_id: str.
|
|
250
253
|
---
|
|
251
|
-
from
|
|
254
|
+
from llama_cloud.client import LlamaCloud
|
|
252
255
|
|
|
253
|
-
client =
|
|
256
|
+
client = LlamaCloud(
|
|
254
257
|
token="YOUR_TOKEN",
|
|
255
258
|
base_url="https://yourhost.com/path/to/api",
|
|
256
259
|
)
|
|
@@ -285,9 +288,9 @@ class ProjectsClient:
|
|
|
285
288
|
|
|
286
289
|
- name: str. The name of the EvalDataset.
|
|
287
290
|
---
|
|
288
|
-
from
|
|
291
|
+
from llama_cloud.client import LlamaCloud
|
|
289
292
|
|
|
290
|
-
client =
|
|
293
|
+
client = LlamaCloud(
|
|
291
294
|
token="YOUR_TOKEN",
|
|
292
295
|
base_url="https://yourhost.com/path/to/api",
|
|
293
296
|
)
|
|
@@ -328,9 +331,9 @@ class ProjectsClient:
|
|
|
328
331
|
|
|
329
332
|
- results: typing.Dict[str, typing.List[LocalEval]]. The eval results.
|
|
330
333
|
---
|
|
331
|
-
from
|
|
334
|
+
from llama_cloud.client import LlamaCloud
|
|
332
335
|
|
|
333
|
-
client =
|
|
336
|
+
client = LlamaCloud(
|
|
334
337
|
token="YOUR_TOKEN",
|
|
335
338
|
base_url="https://yourhost.com/path/to/api",
|
|
336
339
|
)
|
|
@@ -366,9 +369,9 @@ class ProjectsClient:
|
|
|
366
369
|
Parameters:
|
|
367
370
|
- project_id: str.
|
|
368
371
|
---
|
|
369
|
-
from
|
|
372
|
+
from llama_cloud.client import LlamaCloud
|
|
370
373
|
|
|
371
|
-
client =
|
|
374
|
+
client = LlamaCloud(
|
|
372
375
|
token="YOUR_TOKEN",
|
|
373
376
|
base_url="https://yourhost.com/path/to/api",
|
|
374
377
|
)
|
|
@@ -399,9 +402,9 @@ class ProjectsClient:
|
|
|
399
402
|
Parameters:
|
|
400
403
|
- project_id: str.
|
|
401
404
|
---
|
|
402
|
-
from
|
|
405
|
+
from llama_cloud.client import LlamaCloud
|
|
403
406
|
|
|
404
|
-
client =
|
|
407
|
+
client = LlamaCloud(
|
|
405
408
|
token="YOUR_TOKEN",
|
|
406
409
|
base_url="https://yourhost.com/path/to/api",
|
|
407
410
|
)
|
|
@@ -436,9 +439,9 @@ class ProjectsClient:
|
|
|
436
439
|
|
|
437
440
|
- local_eval_set_id: str.
|
|
438
441
|
---
|
|
439
|
-
from
|
|
442
|
+
from llama_cloud.client import LlamaCloud
|
|
440
443
|
|
|
441
|
-
client =
|
|
444
|
+
client = LlamaCloud(
|
|
442
445
|
token="YOUR_TOKEN",
|
|
443
446
|
base_url="https://yourhost.com/path/to/api",
|
|
444
447
|
)
|
|
@@ -473,9 +476,9 @@ class ProjectsClient:
|
|
|
473
476
|
Parameters:
|
|
474
477
|
- project_id: str.
|
|
475
478
|
---
|
|
476
|
-
from
|
|
479
|
+
from llama_cloud.client import LlamaCloud
|
|
477
480
|
|
|
478
|
-
client =
|
|
481
|
+
client = LlamaCloud(
|
|
479
482
|
token="YOUR_TOKEN",
|
|
480
483
|
base_url="https://yourhost.com/path/to/api",
|
|
481
484
|
)
|
|
@@ -508,10 +511,10 @@ class ProjectsClient:
|
|
|
508
511
|
|
|
509
512
|
- request: PromptMixinPrompts.
|
|
510
513
|
---
|
|
511
|
-
from
|
|
512
|
-
from
|
|
514
|
+
from llama_cloud import PromptMixinPrompts
|
|
515
|
+
from llama_cloud.client import LlamaCloud
|
|
513
516
|
|
|
514
|
-
client =
|
|
517
|
+
client = LlamaCloud(
|
|
515
518
|
token="YOUR_TOKEN",
|
|
516
519
|
base_url="https://yourhost.com/path/to/api",
|
|
517
520
|
)
|
|
@@ -554,10 +557,10 @@ class ProjectsClient:
|
|
|
554
557
|
|
|
555
558
|
- request: PromptMixinPrompts.
|
|
556
559
|
---
|
|
557
|
-
from
|
|
558
|
-
from
|
|
560
|
+
from llama_cloud import PromptMixinPrompts
|
|
561
|
+
from llama_cloud.client import LlamaCloud
|
|
559
562
|
|
|
560
|
-
client =
|
|
563
|
+
client = LlamaCloud(
|
|
561
564
|
token="YOUR_TOKEN",
|
|
562
565
|
base_url="https://yourhost.com/path/to/api",
|
|
563
566
|
)
|
|
@@ -599,9 +602,9 @@ class ProjectsClient:
|
|
|
599
602
|
|
|
600
603
|
- prompt_set_id: str.
|
|
601
604
|
---
|
|
602
|
-
from
|
|
605
|
+
from llama_cloud.client import LlamaCloud
|
|
603
606
|
|
|
604
|
-
client =
|
|
607
|
+
client = LlamaCloud(
|
|
605
608
|
token="YOUR_TOKEN",
|
|
606
609
|
base_url="https://yourhost.com/path/to/api",
|
|
607
610
|
)
|
|
@@ -640,9 +643,9 @@ class AsyncProjectsClient:
|
|
|
640
643
|
Parameters:
|
|
641
644
|
- project_name: typing.Optional[str].
|
|
642
645
|
---
|
|
643
|
-
from
|
|
646
|
+
from llama_cloud.client import AsyncLlamaCloud
|
|
644
647
|
|
|
645
|
-
client =
|
|
648
|
+
client = AsyncLlamaCloud(
|
|
646
649
|
token="YOUR_TOKEN",
|
|
647
650
|
base_url="https://yourhost.com/path/to/api",
|
|
648
651
|
)
|
|
@@ -672,10 +675,10 @@ class AsyncProjectsClient:
|
|
|
672
675
|
Parameters:
|
|
673
676
|
- request: ProjectCreate.
|
|
674
677
|
---
|
|
675
|
-
from
|
|
676
|
-
from
|
|
678
|
+
from llama_cloud import ProjectCreate
|
|
679
|
+
from llama_cloud.client import AsyncLlamaCloud
|
|
677
680
|
|
|
678
|
-
client =
|
|
681
|
+
client = AsyncLlamaCloud(
|
|
679
682
|
token="YOUR_TOKEN",
|
|
680
683
|
base_url="https://yourhost.com/path/to/api",
|
|
681
684
|
)
|
|
@@ -710,10 +713,10 @@ class AsyncProjectsClient:
|
|
|
710
713
|
Parameters:
|
|
711
714
|
- request: ProjectCreate.
|
|
712
715
|
---
|
|
713
|
-
from
|
|
714
|
-
from
|
|
716
|
+
from llama_cloud import ProjectCreate
|
|
717
|
+
from llama_cloud.client import AsyncLlamaCloud
|
|
715
718
|
|
|
716
|
-
client =
|
|
719
|
+
client = AsyncLlamaCloud(
|
|
717
720
|
token="YOUR_TOKEN",
|
|
718
721
|
base_url="https://yourhost.com/path/to/api",
|
|
719
722
|
)
|
|
@@ -747,9 +750,9 @@ class AsyncProjectsClient:
|
|
|
747
750
|
Parameters:
|
|
748
751
|
- project_id: str.
|
|
749
752
|
---
|
|
750
|
-
from
|
|
753
|
+
from llama_cloud.client import AsyncLlamaCloud
|
|
751
754
|
|
|
752
|
-
client =
|
|
755
|
+
client = AsyncLlamaCloud(
|
|
753
756
|
token="YOUR_TOKEN",
|
|
754
757
|
base_url="https://yourhost.com/path/to/api",
|
|
755
758
|
)
|
|
@@ -782,9 +785,9 @@ class AsyncProjectsClient:
|
|
|
782
785
|
|
|
783
786
|
- name: str.
|
|
784
787
|
---
|
|
785
|
-
from
|
|
788
|
+
from llama_cloud.client import AsyncLlamaCloud
|
|
786
789
|
|
|
787
|
-
client =
|
|
790
|
+
client = AsyncLlamaCloud(
|
|
788
791
|
token="YOUR_TOKEN",
|
|
789
792
|
base_url="https://yourhost.com/path/to/api",
|
|
790
793
|
)
|
|
@@ -817,9 +820,9 @@ class AsyncProjectsClient:
|
|
|
817
820
|
Parameters:
|
|
818
821
|
- project_id: str.
|
|
819
822
|
---
|
|
820
|
-
from
|
|
823
|
+
from llama_cloud.client import AsyncLlamaCloud
|
|
821
824
|
|
|
822
|
-
client =
|
|
825
|
+
client = AsyncLlamaCloud(
|
|
823
826
|
token="YOUR_TOKEN",
|
|
824
827
|
base_url="https://yourhost.com/path/to/api",
|
|
825
828
|
)
|
|
@@ -850,9 +853,9 @@ class AsyncProjectsClient:
|
|
|
850
853
|
Parameters:
|
|
851
854
|
- project_id: str.
|
|
852
855
|
---
|
|
853
|
-
from
|
|
856
|
+
from llama_cloud.client import AsyncLlamaCloud
|
|
854
857
|
|
|
855
|
-
client =
|
|
858
|
+
client = AsyncLlamaCloud(
|
|
856
859
|
token="YOUR_TOKEN",
|
|
857
860
|
base_url="https://yourhost.com/path/to/api",
|
|
858
861
|
)
|
|
@@ -887,9 +890,9 @@ class AsyncProjectsClient:
|
|
|
887
890
|
|
|
888
891
|
- name: str. The name of the EvalDataset.
|
|
889
892
|
---
|
|
890
|
-
from
|
|
893
|
+
from llama_cloud.client import AsyncLlamaCloud
|
|
891
894
|
|
|
892
|
-
client =
|
|
895
|
+
client = AsyncLlamaCloud(
|
|
893
896
|
token="YOUR_TOKEN",
|
|
894
897
|
base_url="https://yourhost.com/path/to/api",
|
|
895
898
|
)
|
|
@@ -930,9 +933,9 @@ class AsyncProjectsClient:
|
|
|
930
933
|
|
|
931
934
|
- results: typing.Dict[str, typing.List[LocalEval]]. The eval results.
|
|
932
935
|
---
|
|
933
|
-
from
|
|
936
|
+
from llama_cloud.client import AsyncLlamaCloud
|
|
934
937
|
|
|
935
|
-
client =
|
|
938
|
+
client = AsyncLlamaCloud(
|
|
936
939
|
token="YOUR_TOKEN",
|
|
937
940
|
base_url="https://yourhost.com/path/to/api",
|
|
938
941
|
)
|
|
@@ -968,9 +971,9 @@ class AsyncProjectsClient:
|
|
|
968
971
|
Parameters:
|
|
969
972
|
- project_id: str.
|
|
970
973
|
---
|
|
971
|
-
from
|
|
974
|
+
from llama_cloud.client import AsyncLlamaCloud
|
|
972
975
|
|
|
973
|
-
client =
|
|
976
|
+
client = AsyncLlamaCloud(
|
|
974
977
|
token="YOUR_TOKEN",
|
|
975
978
|
base_url="https://yourhost.com/path/to/api",
|
|
976
979
|
)
|
|
@@ -1001,9 +1004,9 @@ class AsyncProjectsClient:
|
|
|
1001
1004
|
Parameters:
|
|
1002
1005
|
- project_id: str.
|
|
1003
1006
|
---
|
|
1004
|
-
from
|
|
1007
|
+
from llama_cloud.client import AsyncLlamaCloud
|
|
1005
1008
|
|
|
1006
|
-
client =
|
|
1009
|
+
client = AsyncLlamaCloud(
|
|
1007
1010
|
token="YOUR_TOKEN",
|
|
1008
1011
|
base_url="https://yourhost.com/path/to/api",
|
|
1009
1012
|
)
|
|
@@ -1038,9 +1041,9 @@ class AsyncProjectsClient:
|
|
|
1038
1041
|
|
|
1039
1042
|
- local_eval_set_id: str.
|
|
1040
1043
|
---
|
|
1041
|
-
from
|
|
1044
|
+
from llama_cloud.client import AsyncLlamaCloud
|
|
1042
1045
|
|
|
1043
|
-
client =
|
|
1046
|
+
client = AsyncLlamaCloud(
|
|
1044
1047
|
token="YOUR_TOKEN",
|
|
1045
1048
|
base_url="https://yourhost.com/path/to/api",
|
|
1046
1049
|
)
|
|
@@ -1075,9 +1078,9 @@ class AsyncProjectsClient:
|
|
|
1075
1078
|
Parameters:
|
|
1076
1079
|
- project_id: str.
|
|
1077
1080
|
---
|
|
1078
|
-
from
|
|
1081
|
+
from llama_cloud.client import AsyncLlamaCloud
|
|
1079
1082
|
|
|
1080
|
-
client =
|
|
1083
|
+
client = AsyncLlamaCloud(
|
|
1081
1084
|
token="YOUR_TOKEN",
|
|
1082
1085
|
base_url="https://yourhost.com/path/to/api",
|
|
1083
1086
|
)
|
|
@@ -1110,10 +1113,10 @@ class AsyncProjectsClient:
|
|
|
1110
1113
|
|
|
1111
1114
|
- request: PromptMixinPrompts.
|
|
1112
1115
|
---
|
|
1113
|
-
from
|
|
1114
|
-
from
|
|
1116
|
+
from llama_cloud import PromptMixinPrompts
|
|
1117
|
+
from llama_cloud.client import AsyncLlamaCloud
|
|
1115
1118
|
|
|
1116
|
-
client =
|
|
1119
|
+
client = AsyncLlamaCloud(
|
|
1117
1120
|
token="YOUR_TOKEN",
|
|
1118
1121
|
base_url="https://yourhost.com/path/to/api",
|
|
1119
1122
|
)
|
|
@@ -1156,10 +1159,10 @@ class AsyncProjectsClient:
|
|
|
1156
1159
|
|
|
1157
1160
|
- request: PromptMixinPrompts.
|
|
1158
1161
|
---
|
|
1159
|
-
from
|
|
1160
|
-
from
|
|
1162
|
+
from llama_cloud import PromptMixinPrompts
|
|
1163
|
+
from llama_cloud.client import AsyncLlamaCloud
|
|
1161
1164
|
|
|
1162
|
-
client =
|
|
1165
|
+
client = AsyncLlamaCloud(
|
|
1163
1166
|
token="YOUR_TOKEN",
|
|
1164
1167
|
base_url="https://yourhost.com/path/to/api",
|
|
1165
1168
|
)
|
|
@@ -1201,9 +1204,9 @@ class AsyncProjectsClient:
|
|
|
1201
1204
|
|
|
1202
1205
|
- prompt_set_id: str.
|
|
1203
1206
|
---
|
|
1204
|
-
from
|
|
1207
|
+
from llama_cloud.client import AsyncLlamaCloud
|
|
1205
1208
|
|
|
1206
|
-
client =
|
|
1209
|
+
client = AsyncLlamaCloud(
|
|
1207
1210
|
token="YOUR_TOKEN",
|
|
1208
1211
|
base_url="https://yourhost.com/path/to/api",
|
|
1209
1212
|
)
|
llama_cloud/types/api_key.py
CHANGED
|
@@ -6,6 +6,9 @@ import typing
|
|
|
6
6
|
from ..core.datetime_utils import serialize_datetime
|
|
7
7
|
|
|
8
8
|
try:
|
|
9
|
+
import pydantic
|
|
10
|
+
if pydantic.__version__.startswith("1."):
|
|
11
|
+
raise ImportError
|
|
9
12
|
import pydantic.v1 as pydantic # type: ignore
|
|
10
13
|
except ImportError:
|
|
11
14
|
import pydantic # type: ignore
|
|
@@ -6,6 +6,9 @@ import typing
|
|
|
6
6
|
from ..core.datetime_utils import serialize_datetime
|
|
7
7
|
|
|
8
8
|
try:
|
|
9
|
+
import pydantic
|
|
10
|
+
if pydantic.__version__.startswith("1."):
|
|
11
|
+
raise ImportError
|
|
9
12
|
import pydantic.v1 as pydantic # type: ignore
|
|
10
13
|
except ImportError:
|
|
11
14
|
import pydantic # type: ignore
|
llama_cloud/types/base.py
CHANGED
|
@@ -6,6 +6,9 @@ import typing
|
|
|
6
6
|
from ..core.datetime_utils import serialize_datetime
|
|
7
7
|
|
|
8
8
|
try:
|
|
9
|
+
import pydantic
|
|
10
|
+
if pydantic.__version__.startswith("1."):
|
|
11
|
+
raise ImportError
|
|
9
12
|
import pydantic.v1 as pydantic # type: ignore
|
|
10
13
|
except ImportError:
|
|
11
14
|
import pydantic # type: ignore
|
|
@@ -6,6 +6,9 @@ import typing
|
|
|
6
6
|
from ..core.datetime_utils import serialize_datetime
|
|
7
7
|
|
|
8
8
|
try:
|
|
9
|
+
import pydantic
|
|
10
|
+
if pydantic.__version__.startswith("1."):
|
|
11
|
+
raise ImportError
|
|
9
12
|
import pydantic.v1 as pydantic # type: ignore
|
|
10
13
|
except ImportError:
|
|
11
14
|
import pydantic # type: ignore
|
|
@@ -6,6 +6,9 @@ import typing
|
|
|
6
6
|
from ..core.datetime_utils import serialize_datetime
|
|
7
7
|
|
|
8
8
|
try:
|
|
9
|
+
import pydantic
|
|
10
|
+
if pydantic.__version__.startswith("1."):
|
|
11
|
+
raise ImportError
|
|
9
12
|
import pydantic.v1 as pydantic # type: ignore
|
|
10
13
|
except ImportError:
|
|
11
14
|
import pydantic # type: ignore
|
|
@@ -7,6 +7,9 @@ from ..core.datetime_utils import serialize_datetime
|
|
|
7
7
|
from .message_role import MessageRole
|
|
8
8
|
|
|
9
9
|
try:
|
|
10
|
+
import pydantic
|
|
11
|
+
if pydantic.__version__.startswith("1."):
|
|
12
|
+
raise ImportError
|
|
10
13
|
import pydantic.v1 as pydantic # type: ignore
|
|
11
14
|
except ImportError:
|
|
12
15
|
import pydantic # type: ignore
|
|
@@ -6,6 +6,9 @@ import typing
|
|
|
6
6
|
from ..core.datetime_utils import serialize_datetime
|
|
7
7
|
|
|
8
8
|
try:
|
|
9
|
+
import pydantic
|
|
10
|
+
if pydantic.__version__.startswith("1."):
|
|
11
|
+
raise ImportError
|
|
9
12
|
import pydantic.v1 as pydantic # type: ignore
|
|
10
13
|
except ImportError:
|
|
11
14
|
import pydantic # type: ignore
|
|
@@ -6,6 +6,9 @@ import typing
|
|
|
6
6
|
from ..core.datetime_utils import serialize_datetime
|
|
7
7
|
|
|
8
8
|
try:
|
|
9
|
+
import pydantic
|
|
10
|
+
if pydantic.__version__.startswith("1."):
|
|
11
|
+
raise ImportError
|
|
9
12
|
import pydantic.v1 as pydantic # type: ignore
|
|
10
13
|
except ImportError:
|
|
11
14
|
import pydantic # type: ignore
|
|
@@ -6,6 +6,9 @@ import typing
|
|
|
6
6
|
from ..core.datetime_utils import serialize_datetime
|
|
7
7
|
|
|
8
8
|
try:
|
|
9
|
+
import pydantic
|
|
10
|
+
if pydantic.__version__.startswith("1."):
|
|
11
|
+
raise ImportError
|
|
9
12
|
import pydantic.v1 as pydantic # type: ignore
|
|
10
13
|
except ImportError:
|
|
11
14
|
import pydantic # type: ignore
|
|
@@ -6,6 +6,9 @@ import typing
|
|
|
6
6
|
from ..core.datetime_utils import serialize_datetime
|
|
7
7
|
|
|
8
8
|
try:
|
|
9
|
+
import pydantic
|
|
10
|
+
if pydantic.__version__.startswith("1."):
|
|
11
|
+
raise ImportError
|
|
9
12
|
import pydantic.v1 as pydantic # type: ignore
|
|
10
13
|
except ImportError:
|
|
11
14
|
import pydantic # type: ignore
|
|
@@ -6,6 +6,9 @@ import typing
|
|
|
6
6
|
from ..core.datetime_utils import serialize_datetime
|
|
7
7
|
|
|
8
8
|
try:
|
|
9
|
+
import pydantic
|
|
10
|
+
if pydantic.__version__.startswith("1."):
|
|
11
|
+
raise ImportError
|
|
9
12
|
import pydantic.v1 as pydantic # type: ignore
|
|
10
13
|
except ImportError:
|
|
11
14
|
import pydantic # type: ignore
|
|
@@ -6,6 +6,9 @@ import typing
|
|
|
6
6
|
from ..core.datetime_utils import serialize_datetime
|
|
7
7
|
|
|
8
8
|
try:
|
|
9
|
+
import pydantic
|
|
10
|
+
if pydantic.__version__.startswith("1."):
|
|
11
|
+
raise ImportError
|
|
9
12
|
import pydantic.v1 as pydantic # type: ignore
|
|
10
13
|
except ImportError:
|
|
11
14
|
import pydantic # type: ignore
|
|
@@ -6,6 +6,9 @@ import typing
|
|
|
6
6
|
from ..core.datetime_utils import serialize_datetime
|
|
7
7
|
|
|
8
8
|
try:
|
|
9
|
+
import pydantic
|
|
10
|
+
if pydantic.__version__.startswith("1."):
|
|
11
|
+
raise ImportError
|
|
9
12
|
import pydantic.v1 as pydantic # type: ignore
|
|
10
13
|
except ImportError:
|
|
11
14
|
import pydantic # type: ignore
|
|
@@ -6,6 +6,9 @@ import typing
|
|
|
6
6
|
from ..core.datetime_utils import serialize_datetime
|
|
7
7
|
|
|
8
8
|
try:
|
|
9
|
+
import pydantic
|
|
10
|
+
if pydantic.__version__.startswith("1."):
|
|
11
|
+
raise ImportError
|
|
9
12
|
import pydantic.v1 as pydantic # type: ignore
|
|
10
13
|
except ImportError:
|
|
11
14
|
import pydantic # type: ignore
|
|
@@ -6,6 +6,9 @@ import typing
|
|
|
6
6
|
from ..core.datetime_utils import serialize_datetime
|
|
7
7
|
|
|
8
8
|
try:
|
|
9
|
+
import pydantic
|
|
10
|
+
if pydantic.__version__.startswith("1."):
|
|
11
|
+
raise ImportError
|
|
9
12
|
import pydantic.v1 as pydantic # type: ignore
|
|
10
13
|
except ImportError:
|
|
11
14
|
import pydantic # type: ignore
|
|
@@ -6,6 +6,9 @@ import typing
|
|
|
6
6
|
from ..core.datetime_utils import serialize_datetime
|
|
7
7
|
|
|
8
8
|
try:
|
|
9
|
+
import pydantic
|
|
10
|
+
if pydantic.__version__.startswith("1."):
|
|
11
|
+
raise ImportError
|
|
9
12
|
import pydantic.v1 as pydantic # type: ignore
|
|
10
13
|
except ImportError:
|
|
11
14
|
import pydantic # type: ignore
|
|
@@ -6,6 +6,9 @@ import typing
|
|
|
6
6
|
from ..core.datetime_utils import serialize_datetime
|
|
7
7
|
|
|
8
8
|
try:
|
|
9
|
+
import pydantic
|
|
10
|
+
if pydantic.__version__.startswith("1."):
|
|
11
|
+
raise ImportError
|
|
9
12
|
import pydantic.v1 as pydantic # type: ignore
|
|
10
13
|
except ImportError:
|
|
11
14
|
import pydantic # type: ignore
|
|
@@ -6,6 +6,9 @@ import typing
|
|
|
6
6
|
from ..core.datetime_utils import serialize_datetime
|
|
7
7
|
|
|
8
8
|
try:
|
|
9
|
+
import pydantic
|
|
10
|
+
if pydantic.__version__.startswith("1."):
|
|
11
|
+
raise ImportError
|
|
9
12
|
import pydantic.v1 as pydantic # type: ignore
|
|
10
13
|
except ImportError:
|
|
11
14
|
import pydantic # type: ignore
|
|
@@ -6,6 +6,9 @@ import typing
|
|
|
6
6
|
from ..core.datetime_utils import serialize_datetime
|
|
7
7
|
|
|
8
8
|
try:
|
|
9
|
+
import pydantic
|
|
10
|
+
if pydantic.__version__.startswith("1."):
|
|
11
|
+
raise ImportError
|
|
9
12
|
import pydantic.v1 as pydantic # type: ignore
|
|
10
13
|
except ImportError:
|
|
11
14
|
import pydantic # type: ignore
|
|
@@ -6,6 +6,9 @@ import typing
|
|
|
6
6
|
from ..core.datetime_utils import serialize_datetime
|
|
7
7
|
|
|
8
8
|
try:
|
|
9
|
+
import pydantic
|
|
10
|
+
if pydantic.__version__.startswith("1."):
|
|
11
|
+
raise ImportError
|
|
9
12
|
import pydantic.v1 as pydantic # type: ignore
|
|
10
13
|
except ImportError:
|
|
11
14
|
import pydantic # type: ignore
|
|
@@ -6,6 +6,9 @@ import typing
|
|
|
6
6
|
from ..core.datetime_utils import serialize_datetime
|
|
7
7
|
|
|
8
8
|
try:
|
|
9
|
+
import pydantic
|
|
10
|
+
if pydantic.__version__.startswith("1."):
|
|
11
|
+
raise ImportError
|
|
9
12
|
import pydantic.v1 as pydantic # type: ignore
|
|
10
13
|
except ImportError:
|
|
11
14
|
import pydantic # type: ignore
|
|
@@ -8,6 +8,9 @@ from .configurable_transformation_names import ConfigurableTransformationNames
|
|
|
8
8
|
from .transformation_category_names import TransformationCategoryNames
|
|
9
9
|
|
|
10
10
|
try:
|
|
11
|
+
import pydantic
|
|
12
|
+
if pydantic.__version__.startswith("1."):
|
|
13
|
+
raise ImportError
|
|
11
14
|
import pydantic.v1 as pydantic # type: ignore
|
|
12
15
|
except ImportError:
|
|
13
16
|
import pydantic # type: ignore
|
|
@@ -8,6 +8,9 @@ from .configurable_transformation_names import ConfigurableTransformationNames
|
|
|
8
8
|
from .configured_transformation_item_component import ConfiguredTransformationItemComponent
|
|
9
9
|
|
|
10
10
|
try:
|
|
11
|
+
import pydantic
|
|
12
|
+
if pydantic.__version__.startswith("1."):
|
|
13
|
+
raise ImportError
|
|
11
14
|
import pydantic.v1 as pydantic # type: ignore
|
|
12
15
|
except ImportError:
|
|
13
16
|
import pydantic # type: ignore
|
llama_cloud/types/data_sink.py
CHANGED
|
@@ -8,6 +8,9 @@ from .configurable_data_sink_names import ConfigurableDataSinkNames
|
|
|
8
8
|
from .data_sink_component import DataSinkComponent
|
|
9
9
|
|
|
10
10
|
try:
|
|
11
|
+
import pydantic
|
|
12
|
+
if pydantic.__version__.startswith("1."):
|
|
13
|
+
raise ImportError
|
|
11
14
|
import pydantic.v1 as pydantic # type: ignore
|
|
12
15
|
except ImportError:
|
|
13
16
|
import pydantic # type: ignore
|