llama-cloud 0.0.1__py3-none-any.whl → 0.0.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of llama-cloud might be problematic. Click here for more details.
- llama_cloud/client.py +2 -2
- llama_cloud/resources/api_keys/client.py +16 -16
- llama_cloud/resources/billing/client.py +12 -12
- llama_cloud/resources/component_definitions/client.py +12 -12
- llama_cloud/resources/data_sinks/client.py +24 -24
- llama_cloud/resources/data_sources/client.py +24 -24
- llama_cloud/resources/deprecated/client.py +48 -48
- llama_cloud/resources/evals/client.py +40 -40
- llama_cloud/resources/files/client.py +24 -24
- llama_cloud/resources/parsing/client.py +48 -48
- llama_cloud/resources/pipelines/client.py +223 -152
- llama_cloud/resources/projects/client.py +64 -64
- {llama_cloud-0.0.1.dist-info → llama_cloud-0.0.2.dist-info}/METADATA +1 -1
- {llama_cloud-0.0.1.dist-info → llama_cloud-0.0.2.dist-info}/RECORD +16 -16
- {llama_cloud-0.0.1.dist-info → llama_cloud-0.0.2.dist-info}/LICENSE +0 -0
- {llama_cloud-0.0.1.dist-info → llama_cloud-0.0.2.dist-info}/WHEEL +0 -0
|
@@ -41,9 +41,9 @@ class ParsingClient:
|
|
|
41
41
|
|
|
42
42
|
- name: str.
|
|
43
43
|
---
|
|
44
|
-
from platform.client import
|
|
44
|
+
from platform.client import LlamaCloud
|
|
45
45
|
|
|
46
|
-
client =
|
|
46
|
+
client = LlamaCloud(
|
|
47
47
|
token="YOUR_TOKEN",
|
|
48
48
|
base_url="https://yourhost.com/path/to/api",
|
|
49
49
|
)
|
|
@@ -75,9 +75,9 @@ class ParsingClient:
|
|
|
75
75
|
Get a list of supported file extensions
|
|
76
76
|
|
|
77
77
|
---
|
|
78
|
-
from platform.client import
|
|
78
|
+
from platform.client import LlamaCloud
|
|
79
79
|
|
|
80
|
-
client =
|
|
80
|
+
client = LlamaCloud(
|
|
81
81
|
token="YOUR_TOKEN",
|
|
82
82
|
base_url="https://yourhost.com/path/to/api",
|
|
83
83
|
)
|
|
@@ -174,9 +174,9 @@ class ParsingClient:
|
|
|
174
174
|
Get parsing usage for user
|
|
175
175
|
|
|
176
176
|
---
|
|
177
|
-
from platform.client import
|
|
177
|
+
from platform.client import LlamaCloud
|
|
178
178
|
|
|
179
|
-
client =
|
|
179
|
+
client = LlamaCloud(
|
|
180
180
|
token="YOUR_TOKEN",
|
|
181
181
|
base_url="https://yourhost.com/path/to/api",
|
|
182
182
|
)
|
|
@@ -205,9 +205,9 @@ class ParsingClient:
|
|
|
205
205
|
Parameters:
|
|
206
206
|
- job_id: str.
|
|
207
207
|
---
|
|
208
|
-
from platform.client import
|
|
208
|
+
from platform.client import LlamaCloud
|
|
209
209
|
|
|
210
|
-
client =
|
|
210
|
+
client = LlamaCloud(
|
|
211
211
|
token="YOUR_TOKEN",
|
|
212
212
|
base_url="https://yourhost.com/path/to/api",
|
|
213
213
|
)
|
|
@@ -238,9 +238,9 @@ class ParsingClient:
|
|
|
238
238
|
Parameters:
|
|
239
239
|
- job_id: str.
|
|
240
240
|
---
|
|
241
|
-
from platform.client import
|
|
241
|
+
from platform.client import LlamaCloud
|
|
242
242
|
|
|
243
|
-
client =
|
|
243
|
+
client = LlamaCloud(
|
|
244
244
|
token="YOUR_TOKEN",
|
|
245
245
|
base_url="https://yourhost.com/path/to/api",
|
|
246
246
|
)
|
|
@@ -271,9 +271,9 @@ class ParsingClient:
|
|
|
271
271
|
Parameters:
|
|
272
272
|
- job_id: str.
|
|
273
273
|
---
|
|
274
|
-
from platform.client import
|
|
274
|
+
from platform.client import LlamaCloud
|
|
275
275
|
|
|
276
|
-
client =
|
|
276
|
+
client = LlamaCloud(
|
|
277
277
|
token="YOUR_TOKEN",
|
|
278
278
|
base_url="https://yourhost.com/path/to/api",
|
|
279
279
|
)
|
|
@@ -306,9 +306,9 @@ class ParsingClient:
|
|
|
306
306
|
Parameters:
|
|
307
307
|
- job_id: str.
|
|
308
308
|
---
|
|
309
|
-
from platform.client import
|
|
309
|
+
from platform.client import LlamaCloud
|
|
310
310
|
|
|
311
|
-
client =
|
|
311
|
+
client = LlamaCloud(
|
|
312
312
|
token="YOUR_TOKEN",
|
|
313
313
|
base_url="https://yourhost.com/path/to/api",
|
|
314
314
|
)
|
|
@@ -341,9 +341,9 @@ class ParsingClient:
|
|
|
341
341
|
Parameters:
|
|
342
342
|
- job_id: str.
|
|
343
343
|
---
|
|
344
|
-
from platform.client import
|
|
344
|
+
from platform.client import LlamaCloud
|
|
345
345
|
|
|
346
|
-
client =
|
|
346
|
+
client = LlamaCloud(
|
|
347
347
|
token="YOUR_TOKEN",
|
|
348
348
|
base_url="https://yourhost.com/path/to/api",
|
|
349
349
|
)
|
|
@@ -376,9 +376,9 @@ class ParsingClient:
|
|
|
376
376
|
Parameters:
|
|
377
377
|
- job_id: str.
|
|
378
378
|
---
|
|
379
|
-
from platform.client import
|
|
379
|
+
from platform.client import LlamaCloud
|
|
380
380
|
|
|
381
|
-
client =
|
|
381
|
+
client = LlamaCloud(
|
|
382
382
|
token="YOUR_TOKEN",
|
|
383
383
|
base_url="https://yourhost.com/path/to/api",
|
|
384
384
|
)
|
|
@@ -409,9 +409,9 @@ class ParsingClient:
|
|
|
409
409
|
Parameters:
|
|
410
410
|
- job_id: str.
|
|
411
411
|
---
|
|
412
|
-
from platform.client import
|
|
412
|
+
from platform.client import LlamaCloud
|
|
413
413
|
|
|
414
|
-
client =
|
|
414
|
+
client = LlamaCloud(
|
|
415
415
|
token="YOUR_TOKEN",
|
|
416
416
|
base_url="https://yourhost.com/path/to/api",
|
|
417
417
|
)
|
|
@@ -442,9 +442,9 @@ class ParsingClient:
|
|
|
442
442
|
Get parsing history for user
|
|
443
443
|
|
|
444
444
|
---
|
|
445
|
-
from platform.client import
|
|
445
|
+
from platform.client import LlamaCloud
|
|
446
446
|
|
|
447
|
-
client =
|
|
447
|
+
client = LlamaCloud(
|
|
448
448
|
token="YOUR_TOKEN",
|
|
449
449
|
base_url="https://yourhost.com/path/to/api",
|
|
450
450
|
)
|
|
@@ -475,9 +475,9 @@ class ParsingClient:
|
|
|
475
475
|
|
|
476
476
|
- filename: str.
|
|
477
477
|
---
|
|
478
|
-
from platform.client import
|
|
478
|
+
from platform.client import LlamaCloud
|
|
479
479
|
|
|
480
|
-
client =
|
|
480
|
+
client = LlamaCloud(
|
|
481
481
|
token="YOUR_TOKEN",
|
|
482
482
|
base_url="https://yourhost.com/path/to/api",
|
|
483
483
|
)
|
|
@@ -518,9 +518,9 @@ class AsyncParsingClient:
|
|
|
518
518
|
|
|
519
519
|
- name: str.
|
|
520
520
|
---
|
|
521
|
-
from platform.client import
|
|
521
|
+
from platform.client import AsyncLlamaCloud
|
|
522
522
|
|
|
523
|
-
client =
|
|
523
|
+
client = AsyncLlamaCloud(
|
|
524
524
|
token="YOUR_TOKEN",
|
|
525
525
|
base_url="https://yourhost.com/path/to/api",
|
|
526
526
|
)
|
|
@@ -552,9 +552,9 @@ class AsyncParsingClient:
|
|
|
552
552
|
Get a list of supported file extensions
|
|
553
553
|
|
|
554
554
|
---
|
|
555
|
-
from platform.client import
|
|
555
|
+
from platform.client import AsyncLlamaCloud
|
|
556
556
|
|
|
557
|
-
client =
|
|
557
|
+
client = AsyncLlamaCloud(
|
|
558
558
|
token="YOUR_TOKEN",
|
|
559
559
|
base_url="https://yourhost.com/path/to/api",
|
|
560
560
|
)
|
|
@@ -651,9 +651,9 @@ class AsyncParsingClient:
|
|
|
651
651
|
Get parsing usage for user
|
|
652
652
|
|
|
653
653
|
---
|
|
654
|
-
from platform.client import
|
|
654
|
+
from platform.client import AsyncLlamaCloud
|
|
655
655
|
|
|
656
|
-
client =
|
|
656
|
+
client = AsyncLlamaCloud(
|
|
657
657
|
token="YOUR_TOKEN",
|
|
658
658
|
base_url="https://yourhost.com/path/to/api",
|
|
659
659
|
)
|
|
@@ -682,9 +682,9 @@ class AsyncParsingClient:
|
|
|
682
682
|
Parameters:
|
|
683
683
|
- job_id: str.
|
|
684
684
|
---
|
|
685
|
-
from platform.client import
|
|
685
|
+
from platform.client import AsyncLlamaCloud
|
|
686
686
|
|
|
687
|
-
client =
|
|
687
|
+
client = AsyncLlamaCloud(
|
|
688
688
|
token="YOUR_TOKEN",
|
|
689
689
|
base_url="https://yourhost.com/path/to/api",
|
|
690
690
|
)
|
|
@@ -715,9 +715,9 @@ class AsyncParsingClient:
|
|
|
715
715
|
Parameters:
|
|
716
716
|
- job_id: str.
|
|
717
717
|
---
|
|
718
|
-
from platform.client import
|
|
718
|
+
from platform.client import AsyncLlamaCloud
|
|
719
719
|
|
|
720
|
-
client =
|
|
720
|
+
client = AsyncLlamaCloud(
|
|
721
721
|
token="YOUR_TOKEN",
|
|
722
722
|
base_url="https://yourhost.com/path/to/api",
|
|
723
723
|
)
|
|
@@ -748,9 +748,9 @@ class AsyncParsingClient:
|
|
|
748
748
|
Parameters:
|
|
749
749
|
- job_id: str.
|
|
750
750
|
---
|
|
751
|
-
from platform.client import
|
|
751
|
+
from platform.client import AsyncLlamaCloud
|
|
752
752
|
|
|
753
|
-
client =
|
|
753
|
+
client = AsyncLlamaCloud(
|
|
754
754
|
token="YOUR_TOKEN",
|
|
755
755
|
base_url="https://yourhost.com/path/to/api",
|
|
756
756
|
)
|
|
@@ -783,9 +783,9 @@ class AsyncParsingClient:
|
|
|
783
783
|
Parameters:
|
|
784
784
|
- job_id: str.
|
|
785
785
|
---
|
|
786
|
-
from platform.client import
|
|
786
|
+
from platform.client import AsyncLlamaCloud
|
|
787
787
|
|
|
788
|
-
client =
|
|
788
|
+
client = AsyncLlamaCloud(
|
|
789
789
|
token="YOUR_TOKEN",
|
|
790
790
|
base_url="https://yourhost.com/path/to/api",
|
|
791
791
|
)
|
|
@@ -818,9 +818,9 @@ class AsyncParsingClient:
|
|
|
818
818
|
Parameters:
|
|
819
819
|
- job_id: str.
|
|
820
820
|
---
|
|
821
|
-
from platform.client import
|
|
821
|
+
from platform.client import AsyncLlamaCloud
|
|
822
822
|
|
|
823
|
-
client =
|
|
823
|
+
client = AsyncLlamaCloud(
|
|
824
824
|
token="YOUR_TOKEN",
|
|
825
825
|
base_url="https://yourhost.com/path/to/api",
|
|
826
826
|
)
|
|
@@ -853,9 +853,9 @@ class AsyncParsingClient:
|
|
|
853
853
|
Parameters:
|
|
854
854
|
- job_id: str.
|
|
855
855
|
---
|
|
856
|
-
from platform.client import
|
|
856
|
+
from platform.client import AsyncLlamaCloud
|
|
857
857
|
|
|
858
|
-
client =
|
|
858
|
+
client = AsyncLlamaCloud(
|
|
859
859
|
token="YOUR_TOKEN",
|
|
860
860
|
base_url="https://yourhost.com/path/to/api",
|
|
861
861
|
)
|
|
@@ -886,9 +886,9 @@ class AsyncParsingClient:
|
|
|
886
886
|
Parameters:
|
|
887
887
|
- job_id: str.
|
|
888
888
|
---
|
|
889
|
-
from platform.client import
|
|
889
|
+
from platform.client import AsyncLlamaCloud
|
|
890
890
|
|
|
891
|
-
client =
|
|
891
|
+
client = AsyncLlamaCloud(
|
|
892
892
|
token="YOUR_TOKEN",
|
|
893
893
|
base_url="https://yourhost.com/path/to/api",
|
|
894
894
|
)
|
|
@@ -919,9 +919,9 @@ class AsyncParsingClient:
|
|
|
919
919
|
Get parsing history for user
|
|
920
920
|
|
|
921
921
|
---
|
|
922
|
-
from platform.client import
|
|
922
|
+
from platform.client import AsyncLlamaCloud
|
|
923
923
|
|
|
924
|
-
client =
|
|
924
|
+
client = AsyncLlamaCloud(
|
|
925
925
|
token="YOUR_TOKEN",
|
|
926
926
|
base_url="https://yourhost.com/path/to/api",
|
|
927
927
|
)
|
|
@@ -952,9 +952,9 @@ class AsyncParsingClient:
|
|
|
952
952
|
|
|
953
953
|
- filename: str.
|
|
954
954
|
---
|
|
955
|
-
from platform.client import
|
|
955
|
+
from platform.client import AsyncLlamaCloud
|
|
956
956
|
|
|
957
|
-
client =
|
|
957
|
+
client = AsyncLlamaCloud(
|
|
958
958
|
token="YOUR_TOKEN",
|
|
959
959
|
base_url="https://yourhost.com/path/to/api",
|
|
960
960
|
)
|