llama-cloud 0.1.5__py3-none-any.whl → 0.1.7a1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of llama-cloud might be problematic. Click here for more details.
- llama_cloud/__init__.py +138 -2
- llama_cloud/client.py +15 -0
- llama_cloud/resources/__init__.py +17 -1
- llama_cloud/resources/chat_apps/__init__.py +2 -0
- llama_cloud/resources/chat_apps/client.py +620 -0
- llama_cloud/resources/data_sinks/client.py +2 -2
- llama_cloud/resources/data_sources/client.py +2 -2
- llama_cloud/resources/embedding_model_configs/client.py +4 -4
- llama_cloud/resources/files/__init__.py +2 -2
- llama_cloud/resources/files/client.py +21 -0
- llama_cloud/resources/files/types/__init__.py +2 -1
- llama_cloud/resources/files/types/file_create_permission_info_value.py +7 -0
- llama_cloud/resources/jobs/__init__.py +2 -0
- llama_cloud/resources/jobs/client.py +148 -0
- llama_cloud/resources/llama_extract/__init__.py +5 -0
- llama_cloud/resources/llama_extract/client.py +1038 -0
- llama_cloud/resources/llama_extract/types/__init__.py +6 -0
- llama_cloud/resources/llama_extract/types/extract_agent_create_data_schema_value.py +7 -0
- llama_cloud/resources/llama_extract/types/extract_agent_update_data_schema_value.py +7 -0
- llama_cloud/resources/organizations/client.py +14 -14
- llama_cloud/resources/parsing/client.py +480 -229
- llama_cloud/resources/pipelines/client.py +182 -126
- llama_cloud/resources/projects/client.py +210 -102
- llama_cloud/resources/reports/__init__.py +5 -0
- llama_cloud/resources/reports/client.py +1198 -0
- llama_cloud/resources/reports/types/__init__.py +7 -0
- llama_cloud/resources/reports/types/update_report_plan_api_v_1_reports_report_id_plan_patch_request_action.py +25 -0
- llama_cloud/resources/retrievers/__init__.py +2 -0
- llama_cloud/resources/retrievers/client.py +654 -0
- llama_cloud/types/__init__.py +124 -2
- llama_cloud/types/{chat_message.py → app_schema_chat_chat_message.py} +2 -2
- llama_cloud/types/chat_app.py +44 -0
- llama_cloud/types/chat_app_response.py +41 -0
- llama_cloud/types/cloud_az_storage_blob_data_source.py +1 -0
- llama_cloud/types/cloud_box_data_source.py +1 -0
- llama_cloud/types/cloud_confluence_data_source.py +1 -0
- llama_cloud/types/cloud_google_drive_data_source.py +1 -0
- llama_cloud/types/cloud_jira_data_source.py +1 -0
- llama_cloud/types/cloud_notion_page_data_source.py +1 -0
- llama_cloud/types/cloud_one_drive_data_source.py +1 -0
- llama_cloud/types/cloud_postgres_vector_store.py +1 -0
- llama_cloud/types/cloud_s_3_data_source.py +1 -0
- llama_cloud/types/cloud_sharepoint_data_source.py +1 -0
- llama_cloud/types/cloud_slack_data_source.py +1 -0
- llama_cloud/types/composite_retrieval_mode.py +21 -0
- llama_cloud/types/composite_retrieval_result.py +38 -0
- llama_cloud/types/composite_retrieved_text_node.py +42 -0
- llama_cloud/types/data_sink.py +1 -1
- llama_cloud/types/data_sink_create.py +1 -1
- llama_cloud/types/data_source.py +1 -1
- llama_cloud/types/data_source_create.py +1 -1
- llama_cloud/types/edit_suggestion.py +39 -0
- llama_cloud/types/eval_dataset_job_record.py +1 -0
- llama_cloud/types/extract_agent.py +45 -0
- llama_cloud/types/extract_agent_data_schema_value.py +5 -0
- llama_cloud/types/extract_config.py +40 -0
- llama_cloud/types/extract_job.py +35 -0
- llama_cloud/types/extract_job_create.py +40 -0
- llama_cloud/types/extract_job_create_data_schema_override_value.py +7 -0
- llama_cloud/types/extract_mode.py +17 -0
- llama_cloud/types/extract_resultset.py +46 -0
- llama_cloud/types/extract_resultset_data.py +11 -0
- llama_cloud/types/extract_resultset_data_item_value.py +7 -0
- llama_cloud/types/extract_resultset_data_zero_value.py +7 -0
- llama_cloud/types/extract_resultset_extraction_metadata_value.py +7 -0
- llama_cloud/types/file.py +3 -0
- llama_cloud/types/file_permission_info_value.py +5 -0
- llama_cloud/types/filter_condition.py +9 -1
- llama_cloud/types/filter_operator.py +4 -0
- llama_cloud/types/image_block.py +35 -0
- llama_cloud/types/input_message.py +1 -1
- llama_cloud/types/job_name_mapping.py +4 -0
- llama_cloud/types/job_names.py +89 -0
- llama_cloud/types/job_record.py +57 -0
- llama_cloud/types/job_record_with_usage_metrics.py +36 -0
- llama_cloud/types/llama_index_core_base_llms_types_chat_message.py +39 -0
- llama_cloud/types/llama_index_core_base_llms_types_chat_message_blocks_item.py +33 -0
- llama_cloud/types/llama_parse_parameters.py +15 -0
- llama_cloud/types/llm.py +1 -0
- llama_cloud/types/llm_model_data.py +1 -0
- llama_cloud/types/llm_parameters.py +1 -0
- llama_cloud/types/managed_ingestion_status.py +4 -0
- llama_cloud/types/managed_ingestion_status_response.py +1 -0
- llama_cloud/types/object_type.py +4 -0
- llama_cloud/types/organization.py +5 -0
- llama_cloud/types/paginated_jobs_history_with_metrics.py +35 -0
- llama_cloud/types/paginated_report_response.py +35 -0
- llama_cloud/types/parse_plan_level.py +21 -0
- llama_cloud/types/parsing_job_structured_result.py +32 -0
- llama_cloud/types/pipeline_create.py +3 -1
- llama_cloud/types/pipeline_data_source.py +1 -1
- llama_cloud/types/pipeline_file.py +3 -0
- llama_cloud/types/pipeline_file_permission_info_value.py +7 -0
- llama_cloud/types/playground_session.py +2 -2
- llama_cloud/types/preset_retrieval_params.py +1 -0
- llama_cloud/types/progress_event.py +44 -0
- llama_cloud/types/progress_event_status.py +33 -0
- llama_cloud/types/prompt_spec.py +2 -2
- llama_cloud/types/related_node_info.py +2 -2
- llama_cloud/types/related_node_info_node_type.py +7 -0
- llama_cloud/types/report.py +33 -0
- llama_cloud/types/report_block.py +34 -0
- llama_cloud/types/report_block_dependency.py +29 -0
- llama_cloud/types/report_create_response.py +31 -0
- llama_cloud/types/report_event_item.py +40 -0
- llama_cloud/types/report_event_item_event_data.py +45 -0
- llama_cloud/types/report_event_type.py +37 -0
- llama_cloud/types/report_metadata.py +39 -0
- llama_cloud/types/report_plan.py +36 -0
- llama_cloud/types/report_plan_block.py +36 -0
- llama_cloud/types/report_query.py +33 -0
- llama_cloud/types/report_response.py +41 -0
- llama_cloud/types/report_state.py +37 -0
- llama_cloud/types/report_state_event.py +38 -0
- llama_cloud/types/report_update_event.py +38 -0
- llama_cloud/types/retrieve_results.py +1 -1
- llama_cloud/types/retriever.py +45 -0
- llama_cloud/types/retriever_create.py +37 -0
- llama_cloud/types/retriever_pipeline.py +37 -0
- llama_cloud/types/status_enum.py +4 -0
- llama_cloud/types/supported_llm_model_names.py +4 -0
- llama_cloud/types/text_block.py +31 -0
- llama_cloud/types/text_node.py +13 -6
- llama_cloud/types/usage_metric_response.py +34 -0
- llama_cloud/types/user_job_record.py +32 -0
- {llama_cloud-0.1.5.dist-info → llama_cloud-0.1.7a1.dist-info}/METADATA +3 -1
- {llama_cloud-0.1.5.dist-info → llama_cloud-0.1.7a1.dist-info}/RECORD +129 -59
- {llama_cloud-0.1.5.dist-info → llama_cloud-0.1.7a1.dist-info}/WHEEL +1 -1
- {llama_cloud-0.1.5.dist-info → llama_cloud-0.1.7a1.dist-info}/LICENSE +0 -0
|
@@ -149,25 +149,26 @@ class ProjectsClient:
|
|
|
149
149
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
150
150
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
151
151
|
|
|
152
|
-
def get_project(self, project_id: str) -> Project:
|
|
152
|
+
def get_project(self, project_id: typing.Optional[str], *, organization_id: typing.Optional[str] = None) -> Project:
|
|
153
153
|
"""
|
|
154
154
|
Get a project by ID.
|
|
155
155
|
|
|
156
156
|
Parameters:
|
|
157
|
-
- project_id: str.
|
|
157
|
+
- project_id: typing.Optional[str].
|
|
158
|
+
|
|
159
|
+
- organization_id: typing.Optional[str].
|
|
158
160
|
---
|
|
159
161
|
from llama_cloud.client import LlamaCloud
|
|
160
162
|
|
|
161
163
|
client = LlamaCloud(
|
|
162
164
|
token="YOUR_TOKEN",
|
|
163
165
|
)
|
|
164
|
-
client.projects.get_project(
|
|
165
|
-
project_id="string",
|
|
166
|
-
)
|
|
166
|
+
client.projects.get_project()
|
|
167
167
|
"""
|
|
168
168
|
_response = self._client_wrapper.httpx_client.request(
|
|
169
169
|
"GET",
|
|
170
170
|
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}"),
|
|
171
|
+
params=remove_none_from_dict({"organization_id": organization_id}),
|
|
171
172
|
headers=self._client_wrapper.get_headers(),
|
|
172
173
|
timeout=60,
|
|
173
174
|
)
|
|
@@ -181,12 +182,16 @@ class ProjectsClient:
|
|
|
181
182
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
182
183
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
183
184
|
|
|
184
|
-
def update_existing_project(
|
|
185
|
+
def update_existing_project(
|
|
186
|
+
self, project_id: typing.Optional[str], *, organization_id: typing.Optional[str] = None, name: str
|
|
187
|
+
) -> Project:
|
|
185
188
|
"""
|
|
186
189
|
Update an existing project.
|
|
187
190
|
|
|
188
191
|
Parameters:
|
|
189
|
-
- project_id: str.
|
|
192
|
+
- project_id: typing.Optional[str].
|
|
193
|
+
|
|
194
|
+
- organization_id: typing.Optional[str].
|
|
190
195
|
|
|
191
196
|
- name: str.
|
|
192
197
|
---
|
|
@@ -196,13 +201,13 @@ class ProjectsClient:
|
|
|
196
201
|
token="YOUR_TOKEN",
|
|
197
202
|
)
|
|
198
203
|
client.projects.update_existing_project(
|
|
199
|
-
project_id="string",
|
|
200
204
|
name="string",
|
|
201
205
|
)
|
|
202
206
|
"""
|
|
203
207
|
_response = self._client_wrapper.httpx_client.request(
|
|
204
208
|
"PUT",
|
|
205
209
|
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}"),
|
|
210
|
+
params=remove_none_from_dict({"organization_id": organization_id}),
|
|
206
211
|
json=jsonable_encoder({"name": name}),
|
|
207
212
|
headers=self._client_wrapper.get_headers(),
|
|
208
213
|
timeout=60,
|
|
@@ -217,25 +222,26 @@ class ProjectsClient:
|
|
|
217
222
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
218
223
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
219
224
|
|
|
220
|
-
def delete_project(self, project_id: str) -> None:
|
|
225
|
+
def delete_project(self, project_id: typing.Optional[str], *, organization_id: typing.Optional[str] = None) -> None:
|
|
221
226
|
"""
|
|
222
227
|
Delete a project by ID.
|
|
223
228
|
|
|
224
229
|
Parameters:
|
|
225
|
-
- project_id: str.
|
|
230
|
+
- project_id: typing.Optional[str].
|
|
231
|
+
|
|
232
|
+
- organization_id: typing.Optional[str].
|
|
226
233
|
---
|
|
227
234
|
from llama_cloud.client import LlamaCloud
|
|
228
235
|
|
|
229
236
|
client = LlamaCloud(
|
|
230
237
|
token="YOUR_TOKEN",
|
|
231
238
|
)
|
|
232
|
-
client.projects.delete_project(
|
|
233
|
-
project_id="string",
|
|
234
|
-
)
|
|
239
|
+
client.projects.delete_project()
|
|
235
240
|
"""
|
|
236
241
|
_response = self._client_wrapper.httpx_client.request(
|
|
237
242
|
"DELETE",
|
|
238
243
|
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}"),
|
|
244
|
+
params=remove_none_from_dict({"organization_id": organization_id}),
|
|
239
245
|
headers=self._client_wrapper.get_headers(),
|
|
240
246
|
timeout=60,
|
|
241
247
|
)
|
|
@@ -284,27 +290,30 @@ class ProjectsClient:
|
|
|
284
290
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
285
291
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
286
292
|
|
|
287
|
-
def list_datasets_for_project(
|
|
293
|
+
def list_datasets_for_project(
|
|
294
|
+
self, project_id: typing.Optional[str], *, organization_id: typing.Optional[str] = None
|
|
295
|
+
) -> typing.List[EvalDataset]:
|
|
288
296
|
"""
|
|
289
297
|
List eval datasets for a project.
|
|
290
298
|
|
|
291
299
|
Parameters:
|
|
292
|
-
- project_id: str.
|
|
300
|
+
- project_id: typing.Optional[str].
|
|
301
|
+
|
|
302
|
+
- organization_id: typing.Optional[str].
|
|
293
303
|
---
|
|
294
304
|
from llama_cloud.client import LlamaCloud
|
|
295
305
|
|
|
296
306
|
client = LlamaCloud(
|
|
297
307
|
token="YOUR_TOKEN",
|
|
298
308
|
)
|
|
299
|
-
client.projects.list_datasets_for_project(
|
|
300
|
-
project_id="string",
|
|
301
|
-
)
|
|
309
|
+
client.projects.list_datasets_for_project()
|
|
302
310
|
"""
|
|
303
311
|
_response = self._client_wrapper.httpx_client.request(
|
|
304
312
|
"GET",
|
|
305
313
|
urllib.parse.urljoin(
|
|
306
314
|
f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}/eval/dataset"
|
|
307
315
|
),
|
|
316
|
+
params=remove_none_from_dict({"organization_id": organization_id}),
|
|
308
317
|
headers=self._client_wrapper.get_headers(),
|
|
309
318
|
timeout=60,
|
|
310
319
|
)
|
|
@@ -318,12 +327,16 @@ class ProjectsClient:
|
|
|
318
327
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
319
328
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
320
329
|
|
|
321
|
-
def create_eval_dataset_for_project(
|
|
330
|
+
def create_eval_dataset_for_project(
|
|
331
|
+
self, project_id: typing.Optional[str], *, organization_id: typing.Optional[str] = None, name: str
|
|
332
|
+
) -> EvalDataset:
|
|
322
333
|
"""
|
|
323
334
|
Create a new eval dataset for a project.
|
|
324
335
|
|
|
325
336
|
Parameters:
|
|
326
|
-
- project_id: str.
|
|
337
|
+
- project_id: typing.Optional[str].
|
|
338
|
+
|
|
339
|
+
- organization_id: typing.Optional[str].
|
|
327
340
|
|
|
328
341
|
- name: str. The name of the EvalDataset.
|
|
329
342
|
---
|
|
@@ -333,7 +346,6 @@ class ProjectsClient:
|
|
|
333
346
|
token="YOUR_TOKEN",
|
|
334
347
|
)
|
|
335
348
|
client.projects.create_eval_dataset_for_project(
|
|
336
|
-
project_id="string",
|
|
337
349
|
name="string",
|
|
338
350
|
)
|
|
339
351
|
"""
|
|
@@ -342,6 +354,7 @@ class ProjectsClient:
|
|
|
342
354
|
urllib.parse.urljoin(
|
|
343
355
|
f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}/eval/dataset"
|
|
344
356
|
),
|
|
357
|
+
params=remove_none_from_dict({"organization_id": organization_id}),
|
|
345
358
|
json=jsonable_encoder({"name": name}),
|
|
346
359
|
headers=self._client_wrapper.get_headers(),
|
|
347
360
|
timeout=60,
|
|
@@ -357,13 +370,20 @@ class ProjectsClient:
|
|
|
357
370
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
358
371
|
|
|
359
372
|
def create_local_eval_set_for_project(
|
|
360
|
-
self,
|
|
373
|
+
self,
|
|
374
|
+
project_id: typing.Optional[str],
|
|
375
|
+
*,
|
|
376
|
+
organization_id: typing.Optional[str] = None,
|
|
377
|
+
app_name: str,
|
|
378
|
+
results: typing.Dict[str, typing.List[LocalEval]],
|
|
361
379
|
) -> typing.List[LocalEvalResults]:
|
|
362
380
|
"""
|
|
363
381
|
Create a new local eval set.
|
|
364
382
|
|
|
365
383
|
Parameters:
|
|
366
|
-
- project_id: str.
|
|
384
|
+
- project_id: typing.Optional[str].
|
|
385
|
+
|
|
386
|
+
- organization_id: typing.Optional[str].
|
|
367
387
|
|
|
368
388
|
- app_name: str. The name of the app.
|
|
369
389
|
|
|
@@ -375,7 +395,6 @@ class ProjectsClient:
|
|
|
375
395
|
token="YOUR_TOKEN",
|
|
376
396
|
)
|
|
377
397
|
client.projects.create_local_eval_set_for_project(
|
|
378
|
-
project_id="string",
|
|
379
398
|
app_name="string",
|
|
380
399
|
results={"string": []},
|
|
381
400
|
)
|
|
@@ -385,6 +404,7 @@ class ProjectsClient:
|
|
|
385
404
|
urllib.parse.urljoin(
|
|
386
405
|
f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}/localevalset"
|
|
387
406
|
),
|
|
407
|
+
params=remove_none_from_dict({"organization_id": organization_id}),
|
|
388
408
|
json=jsonable_encoder({"app_name": app_name, "results": results}),
|
|
389
409
|
headers=self._client_wrapper.get_headers(),
|
|
390
410
|
timeout=60,
|
|
@@ -399,25 +419,28 @@ class ProjectsClient:
|
|
|
399
419
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
400
420
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
401
421
|
|
|
402
|
-
def list_local_evals_for_project(
|
|
422
|
+
def list_local_evals_for_project(
|
|
423
|
+
self, project_id: typing.Optional[str], *, organization_id: typing.Optional[str] = None
|
|
424
|
+
) -> typing.List[LocalEvalResults]:
|
|
403
425
|
"""
|
|
404
426
|
List local eval results for a project.
|
|
405
427
|
|
|
406
428
|
Parameters:
|
|
407
|
-
- project_id: str.
|
|
429
|
+
- project_id: typing.Optional[str].
|
|
430
|
+
|
|
431
|
+
- organization_id: typing.Optional[str].
|
|
408
432
|
---
|
|
409
433
|
from llama_cloud.client import LlamaCloud
|
|
410
434
|
|
|
411
435
|
client = LlamaCloud(
|
|
412
436
|
token="YOUR_TOKEN",
|
|
413
437
|
)
|
|
414
|
-
client.projects.list_local_evals_for_project(
|
|
415
|
-
project_id="string",
|
|
416
|
-
)
|
|
438
|
+
client.projects.list_local_evals_for_project()
|
|
417
439
|
"""
|
|
418
440
|
_response = self._client_wrapper.httpx_client.request(
|
|
419
441
|
"GET",
|
|
420
442
|
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}/localeval"),
|
|
443
|
+
params=remove_none_from_dict({"organization_id": organization_id}),
|
|
421
444
|
headers=self._client_wrapper.get_headers(),
|
|
422
445
|
timeout=60,
|
|
423
446
|
)
|
|
@@ -431,27 +454,30 @@ class ProjectsClient:
|
|
|
431
454
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
432
455
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
433
456
|
|
|
434
|
-
def list_local_eval_sets_for_project(
|
|
457
|
+
def list_local_eval_sets_for_project(
|
|
458
|
+
self, project_id: typing.Optional[str], *, organization_id: typing.Optional[str] = None
|
|
459
|
+
) -> typing.List[LocalEvalSets]:
|
|
435
460
|
"""
|
|
436
461
|
List local eval sets for a project.
|
|
437
462
|
|
|
438
463
|
Parameters:
|
|
439
|
-
- project_id: str.
|
|
464
|
+
- project_id: typing.Optional[str].
|
|
465
|
+
|
|
466
|
+
- organization_id: typing.Optional[str].
|
|
440
467
|
---
|
|
441
468
|
from llama_cloud.client import LlamaCloud
|
|
442
469
|
|
|
443
470
|
client = LlamaCloud(
|
|
444
471
|
token="YOUR_TOKEN",
|
|
445
472
|
)
|
|
446
|
-
client.projects.list_local_eval_sets_for_project(
|
|
447
|
-
project_id="string",
|
|
448
|
-
)
|
|
473
|
+
client.projects.list_local_eval_sets_for_project()
|
|
449
474
|
"""
|
|
450
475
|
_response = self._client_wrapper.httpx_client.request(
|
|
451
476
|
"GET",
|
|
452
477
|
urllib.parse.urljoin(
|
|
453
478
|
f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}/localevalsets"
|
|
454
479
|
),
|
|
480
|
+
params=remove_none_from_dict({"organization_id": organization_id}),
|
|
455
481
|
headers=self._client_wrapper.get_headers(),
|
|
456
482
|
timeout=60,
|
|
457
483
|
)
|
|
@@ -465,14 +491,18 @@ class ProjectsClient:
|
|
|
465
491
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
466
492
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
467
493
|
|
|
468
|
-
def delete_local_eval_set(
|
|
494
|
+
def delete_local_eval_set(
|
|
495
|
+
self, project_id: typing.Optional[str], local_eval_set_id: str, *, organization_id: typing.Optional[str] = None
|
|
496
|
+
) -> typing.Any:
|
|
469
497
|
"""
|
|
470
498
|
Delete a local eval set.
|
|
471
499
|
|
|
472
500
|
Parameters:
|
|
473
|
-
- project_id: str.
|
|
501
|
+
- project_id: typing.Optional[str].
|
|
474
502
|
|
|
475
503
|
- local_eval_set_id: str.
|
|
504
|
+
|
|
505
|
+
- organization_id: typing.Optional[str].
|
|
476
506
|
---
|
|
477
507
|
from llama_cloud.client import LlamaCloud
|
|
478
508
|
|
|
@@ -480,7 +510,6 @@ class ProjectsClient:
|
|
|
480
510
|
token="YOUR_TOKEN",
|
|
481
511
|
)
|
|
482
512
|
client.projects.delete_local_eval_set(
|
|
483
|
-
project_id="string",
|
|
484
513
|
local_eval_set_id="string",
|
|
485
514
|
)
|
|
486
515
|
"""
|
|
@@ -490,6 +519,7 @@ class ProjectsClient:
|
|
|
490
519
|
f"{self._client_wrapper.get_base_url()}/",
|
|
491
520
|
f"api/v1/projects/{project_id}/localevalset/{local_eval_set_id}",
|
|
492
521
|
),
|
|
522
|
+
params=remove_none_from_dict({"organization_id": organization_id}),
|
|
493
523
|
headers=self._client_wrapper.get_headers(),
|
|
494
524
|
timeout=60,
|
|
495
525
|
)
|
|
@@ -503,25 +533,28 @@ class ProjectsClient:
|
|
|
503
533
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
504
534
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
505
535
|
|
|
506
|
-
def list_promptmixin_prompts(
|
|
536
|
+
def list_promptmixin_prompts(
|
|
537
|
+
self, project_id: typing.Optional[str], *, organization_id: typing.Optional[str] = None
|
|
538
|
+
) -> typing.List[PromptMixinPrompts]:
|
|
507
539
|
"""
|
|
508
540
|
List PromptMixin prompt sets for a project.
|
|
509
541
|
|
|
510
542
|
Parameters:
|
|
511
|
-
- project_id: str.
|
|
543
|
+
- project_id: typing.Optional[str].
|
|
544
|
+
|
|
545
|
+
- organization_id: typing.Optional[str].
|
|
512
546
|
---
|
|
513
547
|
from llama_cloud.client import LlamaCloud
|
|
514
548
|
|
|
515
549
|
client = LlamaCloud(
|
|
516
550
|
token="YOUR_TOKEN",
|
|
517
551
|
)
|
|
518
|
-
client.projects.list_promptmixin_prompts(
|
|
519
|
-
project_id="string",
|
|
520
|
-
)
|
|
552
|
+
client.projects.list_promptmixin_prompts()
|
|
521
553
|
"""
|
|
522
554
|
_response = self._client_wrapper.httpx_client.request(
|
|
523
555
|
"GET",
|
|
524
556
|
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}/prompts"),
|
|
557
|
+
params=remove_none_from_dict({"organization_id": organization_id}),
|
|
525
558
|
headers=self._client_wrapper.get_headers(),
|
|
526
559
|
timeout=60,
|
|
527
560
|
)
|
|
@@ -535,12 +568,20 @@ class ProjectsClient:
|
|
|
535
568
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
536
569
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
537
570
|
|
|
538
|
-
def create_prompt_mixin_prompts(
|
|
571
|
+
def create_prompt_mixin_prompts(
|
|
572
|
+
self,
|
|
573
|
+
project_id: typing.Optional[str],
|
|
574
|
+
*,
|
|
575
|
+
organization_id: typing.Optional[str] = None,
|
|
576
|
+
request: PromptMixinPrompts,
|
|
577
|
+
) -> PromptMixinPrompts:
|
|
539
578
|
"""
|
|
540
579
|
Create a new PromptMixin prompt set.
|
|
541
580
|
|
|
542
581
|
Parameters:
|
|
543
|
-
- project_id: str.
|
|
582
|
+
- project_id: typing.Optional[str].
|
|
583
|
+
|
|
584
|
+
- organization_id: typing.Optional[str].
|
|
544
585
|
|
|
545
586
|
- request: PromptMixinPrompts.
|
|
546
587
|
---
|
|
@@ -551,7 +592,6 @@ class ProjectsClient:
|
|
|
551
592
|
token="YOUR_TOKEN",
|
|
552
593
|
)
|
|
553
594
|
client.projects.create_prompt_mixin_prompts(
|
|
554
|
-
project_id="string",
|
|
555
595
|
request=PromptMixinPrompts(
|
|
556
596
|
project_id="string",
|
|
557
597
|
name="string",
|
|
@@ -562,6 +602,7 @@ class ProjectsClient:
|
|
|
562
602
|
_response = self._client_wrapper.httpx_client.request(
|
|
563
603
|
"POST",
|
|
564
604
|
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}/prompts"),
|
|
605
|
+
params=remove_none_from_dict({"organization_id": organization_id}),
|
|
565
606
|
json=jsonable_encoder(request),
|
|
566
607
|
headers=self._client_wrapper.get_headers(),
|
|
567
608
|
timeout=60,
|
|
@@ -577,16 +618,23 @@ class ProjectsClient:
|
|
|
577
618
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
578
619
|
|
|
579
620
|
def update_promptmixin_prompts(
|
|
580
|
-
self,
|
|
621
|
+
self,
|
|
622
|
+
project_id: typing.Optional[str],
|
|
623
|
+
prompt_set_id: str,
|
|
624
|
+
*,
|
|
625
|
+
organization_id: typing.Optional[str] = None,
|
|
626
|
+
request: PromptMixinPrompts,
|
|
581
627
|
) -> PromptMixinPrompts:
|
|
582
628
|
"""
|
|
583
629
|
Update a PromptMixin prompt set.
|
|
584
630
|
|
|
585
631
|
Parameters:
|
|
586
|
-
- project_id: str.
|
|
632
|
+
- project_id: typing.Optional[str].
|
|
587
633
|
|
|
588
634
|
- prompt_set_id: str.
|
|
589
635
|
|
|
636
|
+
- organization_id: typing.Optional[str].
|
|
637
|
+
|
|
590
638
|
- request: PromptMixinPrompts.
|
|
591
639
|
---
|
|
592
640
|
from llama_cloud import PromptMixinPrompts
|
|
@@ -596,7 +644,6 @@ class ProjectsClient:
|
|
|
596
644
|
token="YOUR_TOKEN",
|
|
597
645
|
)
|
|
598
646
|
client.projects.update_promptmixin_prompts(
|
|
599
|
-
project_id="string",
|
|
600
647
|
prompt_set_id="string",
|
|
601
648
|
request=PromptMixinPrompts(
|
|
602
649
|
project_id="string",
|
|
@@ -610,6 +657,7 @@ class ProjectsClient:
|
|
|
610
657
|
urllib.parse.urljoin(
|
|
611
658
|
f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}/prompts/{prompt_set_id}"
|
|
612
659
|
),
|
|
660
|
+
params=remove_none_from_dict({"organization_id": organization_id}),
|
|
613
661
|
json=jsonable_encoder(request),
|
|
614
662
|
headers=self._client_wrapper.get_headers(),
|
|
615
663
|
timeout=60,
|
|
@@ -624,14 +672,18 @@ class ProjectsClient:
|
|
|
624
672
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
625
673
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
626
674
|
|
|
627
|
-
def delete_prompt_mixin_prompts(
|
|
675
|
+
def delete_prompt_mixin_prompts(
|
|
676
|
+
self, project_id: typing.Optional[str], prompt_set_id: str, *, organization_id: typing.Optional[str] = None
|
|
677
|
+
) -> typing.Any:
|
|
628
678
|
"""
|
|
629
679
|
Delete a PromptMixin prompt set.
|
|
630
680
|
|
|
631
681
|
Parameters:
|
|
632
|
-
- project_id: str.
|
|
682
|
+
- project_id: typing.Optional[str].
|
|
633
683
|
|
|
634
684
|
- prompt_set_id: str.
|
|
685
|
+
|
|
686
|
+
- organization_id: typing.Optional[str].
|
|
635
687
|
---
|
|
636
688
|
from llama_cloud.client import LlamaCloud
|
|
637
689
|
|
|
@@ -639,7 +691,6 @@ class ProjectsClient:
|
|
|
639
691
|
token="YOUR_TOKEN",
|
|
640
692
|
)
|
|
641
693
|
client.projects.delete_prompt_mixin_prompts(
|
|
642
|
-
project_id="string",
|
|
643
694
|
prompt_set_id="string",
|
|
644
695
|
)
|
|
645
696
|
"""
|
|
@@ -648,6 +699,7 @@ class ProjectsClient:
|
|
|
648
699
|
urllib.parse.urljoin(
|
|
649
700
|
f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}/prompts/{prompt_set_id}"
|
|
650
701
|
),
|
|
702
|
+
params=remove_none_from_dict({"organization_id": organization_id}),
|
|
651
703
|
headers=self._client_wrapper.get_headers(),
|
|
652
704
|
timeout=60,
|
|
653
705
|
)
|
|
@@ -780,25 +832,28 @@ class AsyncProjectsClient:
|
|
|
780
832
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
781
833
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
782
834
|
|
|
783
|
-
async def get_project(
|
|
835
|
+
async def get_project(
|
|
836
|
+
self, project_id: typing.Optional[str], *, organization_id: typing.Optional[str] = None
|
|
837
|
+
) -> Project:
|
|
784
838
|
"""
|
|
785
839
|
Get a project by ID.
|
|
786
840
|
|
|
787
841
|
Parameters:
|
|
788
|
-
- project_id: str.
|
|
842
|
+
- project_id: typing.Optional[str].
|
|
843
|
+
|
|
844
|
+
- organization_id: typing.Optional[str].
|
|
789
845
|
---
|
|
790
846
|
from llama_cloud.client import AsyncLlamaCloud
|
|
791
847
|
|
|
792
848
|
client = AsyncLlamaCloud(
|
|
793
849
|
token="YOUR_TOKEN",
|
|
794
850
|
)
|
|
795
|
-
await client.projects.get_project(
|
|
796
|
-
project_id="string",
|
|
797
|
-
)
|
|
851
|
+
await client.projects.get_project()
|
|
798
852
|
"""
|
|
799
853
|
_response = await self._client_wrapper.httpx_client.request(
|
|
800
854
|
"GET",
|
|
801
855
|
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}"),
|
|
856
|
+
params=remove_none_from_dict({"organization_id": organization_id}),
|
|
802
857
|
headers=self._client_wrapper.get_headers(),
|
|
803
858
|
timeout=60,
|
|
804
859
|
)
|
|
@@ -812,12 +867,16 @@ class AsyncProjectsClient:
|
|
|
812
867
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
813
868
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
814
869
|
|
|
815
|
-
async def update_existing_project(
|
|
870
|
+
async def update_existing_project(
|
|
871
|
+
self, project_id: typing.Optional[str], *, organization_id: typing.Optional[str] = None, name: str
|
|
872
|
+
) -> Project:
|
|
816
873
|
"""
|
|
817
874
|
Update an existing project.
|
|
818
875
|
|
|
819
876
|
Parameters:
|
|
820
|
-
- project_id: str.
|
|
877
|
+
- project_id: typing.Optional[str].
|
|
878
|
+
|
|
879
|
+
- organization_id: typing.Optional[str].
|
|
821
880
|
|
|
822
881
|
- name: str.
|
|
823
882
|
---
|
|
@@ -827,13 +886,13 @@ class AsyncProjectsClient:
|
|
|
827
886
|
token="YOUR_TOKEN",
|
|
828
887
|
)
|
|
829
888
|
await client.projects.update_existing_project(
|
|
830
|
-
project_id="string",
|
|
831
889
|
name="string",
|
|
832
890
|
)
|
|
833
891
|
"""
|
|
834
892
|
_response = await self._client_wrapper.httpx_client.request(
|
|
835
893
|
"PUT",
|
|
836
894
|
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}"),
|
|
895
|
+
params=remove_none_from_dict({"organization_id": organization_id}),
|
|
837
896
|
json=jsonable_encoder({"name": name}),
|
|
838
897
|
headers=self._client_wrapper.get_headers(),
|
|
839
898
|
timeout=60,
|
|
@@ -848,25 +907,28 @@ class AsyncProjectsClient:
|
|
|
848
907
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
849
908
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
850
909
|
|
|
851
|
-
async def delete_project(
|
|
910
|
+
async def delete_project(
|
|
911
|
+
self, project_id: typing.Optional[str], *, organization_id: typing.Optional[str] = None
|
|
912
|
+
) -> None:
|
|
852
913
|
"""
|
|
853
914
|
Delete a project by ID.
|
|
854
915
|
|
|
855
916
|
Parameters:
|
|
856
|
-
- project_id: str.
|
|
917
|
+
- project_id: typing.Optional[str].
|
|
918
|
+
|
|
919
|
+
- organization_id: typing.Optional[str].
|
|
857
920
|
---
|
|
858
921
|
from llama_cloud.client import AsyncLlamaCloud
|
|
859
922
|
|
|
860
923
|
client = AsyncLlamaCloud(
|
|
861
924
|
token="YOUR_TOKEN",
|
|
862
925
|
)
|
|
863
|
-
await client.projects.delete_project(
|
|
864
|
-
project_id="string",
|
|
865
|
-
)
|
|
926
|
+
await client.projects.delete_project()
|
|
866
927
|
"""
|
|
867
928
|
_response = await self._client_wrapper.httpx_client.request(
|
|
868
929
|
"DELETE",
|
|
869
930
|
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}"),
|
|
931
|
+
params=remove_none_from_dict({"organization_id": organization_id}),
|
|
870
932
|
headers=self._client_wrapper.get_headers(),
|
|
871
933
|
timeout=60,
|
|
872
934
|
)
|
|
@@ -915,27 +977,30 @@ class AsyncProjectsClient:
|
|
|
915
977
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
916
978
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
917
979
|
|
|
918
|
-
async def list_datasets_for_project(
|
|
980
|
+
async def list_datasets_for_project(
|
|
981
|
+
self, project_id: typing.Optional[str], *, organization_id: typing.Optional[str] = None
|
|
982
|
+
) -> typing.List[EvalDataset]:
|
|
919
983
|
"""
|
|
920
984
|
List eval datasets for a project.
|
|
921
985
|
|
|
922
986
|
Parameters:
|
|
923
|
-
- project_id: str.
|
|
987
|
+
- project_id: typing.Optional[str].
|
|
988
|
+
|
|
989
|
+
- organization_id: typing.Optional[str].
|
|
924
990
|
---
|
|
925
991
|
from llama_cloud.client import AsyncLlamaCloud
|
|
926
992
|
|
|
927
993
|
client = AsyncLlamaCloud(
|
|
928
994
|
token="YOUR_TOKEN",
|
|
929
995
|
)
|
|
930
|
-
await client.projects.list_datasets_for_project(
|
|
931
|
-
project_id="string",
|
|
932
|
-
)
|
|
996
|
+
await client.projects.list_datasets_for_project()
|
|
933
997
|
"""
|
|
934
998
|
_response = await self._client_wrapper.httpx_client.request(
|
|
935
999
|
"GET",
|
|
936
1000
|
urllib.parse.urljoin(
|
|
937
1001
|
f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}/eval/dataset"
|
|
938
1002
|
),
|
|
1003
|
+
params=remove_none_from_dict({"organization_id": organization_id}),
|
|
939
1004
|
headers=self._client_wrapper.get_headers(),
|
|
940
1005
|
timeout=60,
|
|
941
1006
|
)
|
|
@@ -949,12 +1014,16 @@ class AsyncProjectsClient:
|
|
|
949
1014
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
950
1015
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
951
1016
|
|
|
952
|
-
async def create_eval_dataset_for_project(
|
|
1017
|
+
async def create_eval_dataset_for_project(
|
|
1018
|
+
self, project_id: typing.Optional[str], *, organization_id: typing.Optional[str] = None, name: str
|
|
1019
|
+
) -> EvalDataset:
|
|
953
1020
|
"""
|
|
954
1021
|
Create a new eval dataset for a project.
|
|
955
1022
|
|
|
956
1023
|
Parameters:
|
|
957
|
-
- project_id: str.
|
|
1024
|
+
- project_id: typing.Optional[str].
|
|
1025
|
+
|
|
1026
|
+
- organization_id: typing.Optional[str].
|
|
958
1027
|
|
|
959
1028
|
- name: str. The name of the EvalDataset.
|
|
960
1029
|
---
|
|
@@ -964,7 +1033,6 @@ class AsyncProjectsClient:
|
|
|
964
1033
|
token="YOUR_TOKEN",
|
|
965
1034
|
)
|
|
966
1035
|
await client.projects.create_eval_dataset_for_project(
|
|
967
|
-
project_id="string",
|
|
968
1036
|
name="string",
|
|
969
1037
|
)
|
|
970
1038
|
"""
|
|
@@ -973,6 +1041,7 @@ class AsyncProjectsClient:
|
|
|
973
1041
|
urllib.parse.urljoin(
|
|
974
1042
|
f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}/eval/dataset"
|
|
975
1043
|
),
|
|
1044
|
+
params=remove_none_from_dict({"organization_id": organization_id}),
|
|
976
1045
|
json=jsonable_encoder({"name": name}),
|
|
977
1046
|
headers=self._client_wrapper.get_headers(),
|
|
978
1047
|
timeout=60,
|
|
@@ -988,13 +1057,20 @@ class AsyncProjectsClient:
|
|
|
988
1057
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
989
1058
|
|
|
990
1059
|
async def create_local_eval_set_for_project(
|
|
991
|
-
self,
|
|
1060
|
+
self,
|
|
1061
|
+
project_id: typing.Optional[str],
|
|
1062
|
+
*,
|
|
1063
|
+
organization_id: typing.Optional[str] = None,
|
|
1064
|
+
app_name: str,
|
|
1065
|
+
results: typing.Dict[str, typing.List[LocalEval]],
|
|
992
1066
|
) -> typing.List[LocalEvalResults]:
|
|
993
1067
|
"""
|
|
994
1068
|
Create a new local eval set.
|
|
995
1069
|
|
|
996
1070
|
Parameters:
|
|
997
|
-
- project_id: str.
|
|
1071
|
+
- project_id: typing.Optional[str].
|
|
1072
|
+
|
|
1073
|
+
- organization_id: typing.Optional[str].
|
|
998
1074
|
|
|
999
1075
|
- app_name: str. The name of the app.
|
|
1000
1076
|
|
|
@@ -1006,7 +1082,6 @@ class AsyncProjectsClient:
|
|
|
1006
1082
|
token="YOUR_TOKEN",
|
|
1007
1083
|
)
|
|
1008
1084
|
await client.projects.create_local_eval_set_for_project(
|
|
1009
|
-
project_id="string",
|
|
1010
1085
|
app_name="string",
|
|
1011
1086
|
results={"string": []},
|
|
1012
1087
|
)
|
|
@@ -1016,6 +1091,7 @@ class AsyncProjectsClient:
|
|
|
1016
1091
|
urllib.parse.urljoin(
|
|
1017
1092
|
f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}/localevalset"
|
|
1018
1093
|
),
|
|
1094
|
+
params=remove_none_from_dict({"organization_id": organization_id}),
|
|
1019
1095
|
json=jsonable_encoder({"app_name": app_name, "results": results}),
|
|
1020
1096
|
headers=self._client_wrapper.get_headers(),
|
|
1021
1097
|
timeout=60,
|
|
@@ -1030,25 +1106,28 @@ class AsyncProjectsClient:
|
|
|
1030
1106
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
1031
1107
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
1032
1108
|
|
|
1033
|
-
async def list_local_evals_for_project(
|
|
1109
|
+
async def list_local_evals_for_project(
|
|
1110
|
+
self, project_id: typing.Optional[str], *, organization_id: typing.Optional[str] = None
|
|
1111
|
+
) -> typing.List[LocalEvalResults]:
|
|
1034
1112
|
"""
|
|
1035
1113
|
List local eval results for a project.
|
|
1036
1114
|
|
|
1037
1115
|
Parameters:
|
|
1038
|
-
- project_id: str.
|
|
1116
|
+
- project_id: typing.Optional[str].
|
|
1117
|
+
|
|
1118
|
+
- organization_id: typing.Optional[str].
|
|
1039
1119
|
---
|
|
1040
1120
|
from llama_cloud.client import AsyncLlamaCloud
|
|
1041
1121
|
|
|
1042
1122
|
client = AsyncLlamaCloud(
|
|
1043
1123
|
token="YOUR_TOKEN",
|
|
1044
1124
|
)
|
|
1045
|
-
await client.projects.list_local_evals_for_project(
|
|
1046
|
-
project_id="string",
|
|
1047
|
-
)
|
|
1125
|
+
await client.projects.list_local_evals_for_project()
|
|
1048
1126
|
"""
|
|
1049
1127
|
_response = await self._client_wrapper.httpx_client.request(
|
|
1050
1128
|
"GET",
|
|
1051
1129
|
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}/localeval"),
|
|
1130
|
+
params=remove_none_from_dict({"organization_id": organization_id}),
|
|
1052
1131
|
headers=self._client_wrapper.get_headers(),
|
|
1053
1132
|
timeout=60,
|
|
1054
1133
|
)
|
|
@@ -1062,27 +1141,30 @@ class AsyncProjectsClient:
|
|
|
1062
1141
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
1063
1142
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
1064
1143
|
|
|
1065
|
-
async def list_local_eval_sets_for_project(
|
|
1144
|
+
async def list_local_eval_sets_for_project(
|
|
1145
|
+
self, project_id: typing.Optional[str], *, organization_id: typing.Optional[str] = None
|
|
1146
|
+
) -> typing.List[LocalEvalSets]:
|
|
1066
1147
|
"""
|
|
1067
1148
|
List local eval sets for a project.
|
|
1068
1149
|
|
|
1069
1150
|
Parameters:
|
|
1070
|
-
- project_id: str.
|
|
1151
|
+
- project_id: typing.Optional[str].
|
|
1152
|
+
|
|
1153
|
+
- organization_id: typing.Optional[str].
|
|
1071
1154
|
---
|
|
1072
1155
|
from llama_cloud.client import AsyncLlamaCloud
|
|
1073
1156
|
|
|
1074
1157
|
client = AsyncLlamaCloud(
|
|
1075
1158
|
token="YOUR_TOKEN",
|
|
1076
1159
|
)
|
|
1077
|
-
await client.projects.list_local_eval_sets_for_project(
|
|
1078
|
-
project_id="string",
|
|
1079
|
-
)
|
|
1160
|
+
await client.projects.list_local_eval_sets_for_project()
|
|
1080
1161
|
"""
|
|
1081
1162
|
_response = await self._client_wrapper.httpx_client.request(
|
|
1082
1163
|
"GET",
|
|
1083
1164
|
urllib.parse.urljoin(
|
|
1084
1165
|
f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}/localevalsets"
|
|
1085
1166
|
),
|
|
1167
|
+
params=remove_none_from_dict({"organization_id": organization_id}),
|
|
1086
1168
|
headers=self._client_wrapper.get_headers(),
|
|
1087
1169
|
timeout=60,
|
|
1088
1170
|
)
|
|
@@ -1096,14 +1178,18 @@ class AsyncProjectsClient:
|
|
|
1096
1178
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
1097
1179
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
1098
1180
|
|
|
1099
|
-
async def delete_local_eval_set(
|
|
1181
|
+
async def delete_local_eval_set(
|
|
1182
|
+
self, project_id: typing.Optional[str], local_eval_set_id: str, *, organization_id: typing.Optional[str] = None
|
|
1183
|
+
) -> typing.Any:
|
|
1100
1184
|
"""
|
|
1101
1185
|
Delete a local eval set.
|
|
1102
1186
|
|
|
1103
1187
|
Parameters:
|
|
1104
|
-
- project_id: str.
|
|
1188
|
+
- project_id: typing.Optional[str].
|
|
1105
1189
|
|
|
1106
1190
|
- local_eval_set_id: str.
|
|
1191
|
+
|
|
1192
|
+
- organization_id: typing.Optional[str].
|
|
1107
1193
|
---
|
|
1108
1194
|
from llama_cloud.client import AsyncLlamaCloud
|
|
1109
1195
|
|
|
@@ -1111,7 +1197,6 @@ class AsyncProjectsClient:
|
|
|
1111
1197
|
token="YOUR_TOKEN",
|
|
1112
1198
|
)
|
|
1113
1199
|
await client.projects.delete_local_eval_set(
|
|
1114
|
-
project_id="string",
|
|
1115
1200
|
local_eval_set_id="string",
|
|
1116
1201
|
)
|
|
1117
1202
|
"""
|
|
@@ -1121,6 +1206,7 @@ class AsyncProjectsClient:
|
|
|
1121
1206
|
f"{self._client_wrapper.get_base_url()}/",
|
|
1122
1207
|
f"api/v1/projects/{project_id}/localevalset/{local_eval_set_id}",
|
|
1123
1208
|
),
|
|
1209
|
+
params=remove_none_from_dict({"organization_id": organization_id}),
|
|
1124
1210
|
headers=self._client_wrapper.get_headers(),
|
|
1125
1211
|
timeout=60,
|
|
1126
1212
|
)
|
|
@@ -1134,25 +1220,28 @@ class AsyncProjectsClient:
|
|
|
1134
1220
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
1135
1221
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
1136
1222
|
|
|
1137
|
-
async def list_promptmixin_prompts(
|
|
1223
|
+
async def list_promptmixin_prompts(
|
|
1224
|
+
self, project_id: typing.Optional[str], *, organization_id: typing.Optional[str] = None
|
|
1225
|
+
) -> typing.List[PromptMixinPrompts]:
|
|
1138
1226
|
"""
|
|
1139
1227
|
List PromptMixin prompt sets for a project.
|
|
1140
1228
|
|
|
1141
1229
|
Parameters:
|
|
1142
|
-
- project_id: str.
|
|
1230
|
+
- project_id: typing.Optional[str].
|
|
1231
|
+
|
|
1232
|
+
- organization_id: typing.Optional[str].
|
|
1143
1233
|
---
|
|
1144
1234
|
from llama_cloud.client import AsyncLlamaCloud
|
|
1145
1235
|
|
|
1146
1236
|
client = AsyncLlamaCloud(
|
|
1147
1237
|
token="YOUR_TOKEN",
|
|
1148
1238
|
)
|
|
1149
|
-
await client.projects.list_promptmixin_prompts(
|
|
1150
|
-
project_id="string",
|
|
1151
|
-
)
|
|
1239
|
+
await client.projects.list_promptmixin_prompts()
|
|
1152
1240
|
"""
|
|
1153
1241
|
_response = await self._client_wrapper.httpx_client.request(
|
|
1154
1242
|
"GET",
|
|
1155
1243
|
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}/prompts"),
|
|
1244
|
+
params=remove_none_from_dict({"organization_id": organization_id}),
|
|
1156
1245
|
headers=self._client_wrapper.get_headers(),
|
|
1157
1246
|
timeout=60,
|
|
1158
1247
|
)
|
|
@@ -1166,12 +1255,20 @@ class AsyncProjectsClient:
|
|
|
1166
1255
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
1167
1256
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
1168
1257
|
|
|
1169
|
-
async def create_prompt_mixin_prompts(
|
|
1258
|
+
async def create_prompt_mixin_prompts(
|
|
1259
|
+
self,
|
|
1260
|
+
project_id: typing.Optional[str],
|
|
1261
|
+
*,
|
|
1262
|
+
organization_id: typing.Optional[str] = None,
|
|
1263
|
+
request: PromptMixinPrompts,
|
|
1264
|
+
) -> PromptMixinPrompts:
|
|
1170
1265
|
"""
|
|
1171
1266
|
Create a new PromptMixin prompt set.
|
|
1172
1267
|
|
|
1173
1268
|
Parameters:
|
|
1174
|
-
- project_id: str.
|
|
1269
|
+
- project_id: typing.Optional[str].
|
|
1270
|
+
|
|
1271
|
+
- organization_id: typing.Optional[str].
|
|
1175
1272
|
|
|
1176
1273
|
- request: PromptMixinPrompts.
|
|
1177
1274
|
---
|
|
@@ -1182,7 +1279,6 @@ class AsyncProjectsClient:
|
|
|
1182
1279
|
token="YOUR_TOKEN",
|
|
1183
1280
|
)
|
|
1184
1281
|
await client.projects.create_prompt_mixin_prompts(
|
|
1185
|
-
project_id="string",
|
|
1186
1282
|
request=PromptMixinPrompts(
|
|
1187
1283
|
project_id="string",
|
|
1188
1284
|
name="string",
|
|
@@ -1193,6 +1289,7 @@ class AsyncProjectsClient:
|
|
|
1193
1289
|
_response = await self._client_wrapper.httpx_client.request(
|
|
1194
1290
|
"POST",
|
|
1195
1291
|
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}/prompts"),
|
|
1292
|
+
params=remove_none_from_dict({"organization_id": organization_id}),
|
|
1196
1293
|
json=jsonable_encoder(request),
|
|
1197
1294
|
headers=self._client_wrapper.get_headers(),
|
|
1198
1295
|
timeout=60,
|
|
@@ -1208,16 +1305,23 @@ class AsyncProjectsClient:
|
|
|
1208
1305
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
1209
1306
|
|
|
1210
1307
|
async def update_promptmixin_prompts(
|
|
1211
|
-
self,
|
|
1308
|
+
self,
|
|
1309
|
+
project_id: typing.Optional[str],
|
|
1310
|
+
prompt_set_id: str,
|
|
1311
|
+
*,
|
|
1312
|
+
organization_id: typing.Optional[str] = None,
|
|
1313
|
+
request: PromptMixinPrompts,
|
|
1212
1314
|
) -> PromptMixinPrompts:
|
|
1213
1315
|
"""
|
|
1214
1316
|
Update a PromptMixin prompt set.
|
|
1215
1317
|
|
|
1216
1318
|
Parameters:
|
|
1217
|
-
- project_id: str.
|
|
1319
|
+
- project_id: typing.Optional[str].
|
|
1218
1320
|
|
|
1219
1321
|
- prompt_set_id: str.
|
|
1220
1322
|
|
|
1323
|
+
- organization_id: typing.Optional[str].
|
|
1324
|
+
|
|
1221
1325
|
- request: PromptMixinPrompts.
|
|
1222
1326
|
---
|
|
1223
1327
|
from llama_cloud import PromptMixinPrompts
|
|
@@ -1227,7 +1331,6 @@ class AsyncProjectsClient:
|
|
|
1227
1331
|
token="YOUR_TOKEN",
|
|
1228
1332
|
)
|
|
1229
1333
|
await client.projects.update_promptmixin_prompts(
|
|
1230
|
-
project_id="string",
|
|
1231
1334
|
prompt_set_id="string",
|
|
1232
1335
|
request=PromptMixinPrompts(
|
|
1233
1336
|
project_id="string",
|
|
@@ -1241,6 +1344,7 @@ class AsyncProjectsClient:
|
|
|
1241
1344
|
urllib.parse.urljoin(
|
|
1242
1345
|
f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}/prompts/{prompt_set_id}"
|
|
1243
1346
|
),
|
|
1347
|
+
params=remove_none_from_dict({"organization_id": organization_id}),
|
|
1244
1348
|
json=jsonable_encoder(request),
|
|
1245
1349
|
headers=self._client_wrapper.get_headers(),
|
|
1246
1350
|
timeout=60,
|
|
@@ -1255,14 +1359,18 @@ class AsyncProjectsClient:
|
|
|
1255
1359
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
1256
1360
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
1257
1361
|
|
|
1258
|
-
async def delete_prompt_mixin_prompts(
|
|
1362
|
+
async def delete_prompt_mixin_prompts(
|
|
1363
|
+
self, project_id: typing.Optional[str], prompt_set_id: str, *, organization_id: typing.Optional[str] = None
|
|
1364
|
+
) -> typing.Any:
|
|
1259
1365
|
"""
|
|
1260
1366
|
Delete a PromptMixin prompt set.
|
|
1261
1367
|
|
|
1262
1368
|
Parameters:
|
|
1263
|
-
- project_id: str.
|
|
1369
|
+
- project_id: typing.Optional[str].
|
|
1264
1370
|
|
|
1265
1371
|
- prompt_set_id: str.
|
|
1372
|
+
|
|
1373
|
+
- organization_id: typing.Optional[str].
|
|
1266
1374
|
---
|
|
1267
1375
|
from llama_cloud.client import AsyncLlamaCloud
|
|
1268
1376
|
|
|
@@ -1270,7 +1378,6 @@ class AsyncProjectsClient:
|
|
|
1270
1378
|
token="YOUR_TOKEN",
|
|
1271
1379
|
)
|
|
1272
1380
|
await client.projects.delete_prompt_mixin_prompts(
|
|
1273
|
-
project_id="string",
|
|
1274
1381
|
prompt_set_id="string",
|
|
1275
1382
|
)
|
|
1276
1383
|
"""
|
|
@@ -1279,6 +1386,7 @@ class AsyncProjectsClient:
|
|
|
1279
1386
|
urllib.parse.urljoin(
|
|
1280
1387
|
f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}/prompts/{prompt_set_id}"
|
|
1281
1388
|
),
|
|
1389
|
+
params=remove_none_from_dict({"organization_id": organization_id}),
|
|
1282
1390
|
headers=self._client_wrapper.get_headers(),
|
|
1283
1391
|
timeout=60,
|
|
1284
1392
|
)
|