llama-cloud 0.1.5__py3-none-any.whl → 0.1.7a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of llama-cloud might be problematic. Click here for more details.

Files changed (129) hide show
  1. llama_cloud/__init__.py +138 -2
  2. llama_cloud/client.py +15 -0
  3. llama_cloud/resources/__init__.py +17 -1
  4. llama_cloud/resources/chat_apps/__init__.py +2 -0
  5. llama_cloud/resources/chat_apps/client.py +620 -0
  6. llama_cloud/resources/data_sinks/client.py +2 -2
  7. llama_cloud/resources/data_sources/client.py +2 -2
  8. llama_cloud/resources/embedding_model_configs/client.py +4 -4
  9. llama_cloud/resources/files/__init__.py +2 -2
  10. llama_cloud/resources/files/client.py +21 -0
  11. llama_cloud/resources/files/types/__init__.py +2 -1
  12. llama_cloud/resources/files/types/file_create_permission_info_value.py +7 -0
  13. llama_cloud/resources/jobs/__init__.py +2 -0
  14. llama_cloud/resources/jobs/client.py +148 -0
  15. llama_cloud/resources/llama_extract/__init__.py +5 -0
  16. llama_cloud/resources/llama_extract/client.py +1038 -0
  17. llama_cloud/resources/llama_extract/types/__init__.py +6 -0
  18. llama_cloud/resources/llama_extract/types/extract_agent_create_data_schema_value.py +7 -0
  19. llama_cloud/resources/llama_extract/types/extract_agent_update_data_schema_value.py +7 -0
  20. llama_cloud/resources/organizations/client.py +14 -14
  21. llama_cloud/resources/parsing/client.py +480 -229
  22. llama_cloud/resources/pipelines/client.py +182 -126
  23. llama_cloud/resources/projects/client.py +210 -102
  24. llama_cloud/resources/reports/__init__.py +5 -0
  25. llama_cloud/resources/reports/client.py +1198 -0
  26. llama_cloud/resources/reports/types/__init__.py +7 -0
  27. llama_cloud/resources/reports/types/update_report_plan_api_v_1_reports_report_id_plan_patch_request_action.py +25 -0
  28. llama_cloud/resources/retrievers/__init__.py +2 -0
  29. llama_cloud/resources/retrievers/client.py +654 -0
  30. llama_cloud/types/__init__.py +124 -2
  31. llama_cloud/types/{chat_message.py → app_schema_chat_chat_message.py} +2 -2
  32. llama_cloud/types/chat_app.py +44 -0
  33. llama_cloud/types/chat_app_response.py +41 -0
  34. llama_cloud/types/cloud_az_storage_blob_data_source.py +1 -0
  35. llama_cloud/types/cloud_box_data_source.py +1 -0
  36. llama_cloud/types/cloud_confluence_data_source.py +1 -0
  37. llama_cloud/types/cloud_google_drive_data_source.py +1 -0
  38. llama_cloud/types/cloud_jira_data_source.py +1 -0
  39. llama_cloud/types/cloud_notion_page_data_source.py +1 -0
  40. llama_cloud/types/cloud_one_drive_data_source.py +1 -0
  41. llama_cloud/types/cloud_postgres_vector_store.py +1 -0
  42. llama_cloud/types/cloud_s_3_data_source.py +1 -0
  43. llama_cloud/types/cloud_sharepoint_data_source.py +1 -0
  44. llama_cloud/types/cloud_slack_data_source.py +1 -0
  45. llama_cloud/types/composite_retrieval_mode.py +21 -0
  46. llama_cloud/types/composite_retrieval_result.py +38 -0
  47. llama_cloud/types/composite_retrieved_text_node.py +42 -0
  48. llama_cloud/types/data_sink.py +1 -1
  49. llama_cloud/types/data_sink_create.py +1 -1
  50. llama_cloud/types/data_source.py +1 -1
  51. llama_cloud/types/data_source_create.py +1 -1
  52. llama_cloud/types/edit_suggestion.py +39 -0
  53. llama_cloud/types/eval_dataset_job_record.py +1 -0
  54. llama_cloud/types/extract_agent.py +45 -0
  55. llama_cloud/types/extract_agent_data_schema_value.py +5 -0
  56. llama_cloud/types/extract_config.py +40 -0
  57. llama_cloud/types/extract_job.py +35 -0
  58. llama_cloud/types/extract_job_create.py +40 -0
  59. llama_cloud/types/extract_job_create_data_schema_override_value.py +7 -0
  60. llama_cloud/types/extract_mode.py +17 -0
  61. llama_cloud/types/extract_resultset.py +46 -0
  62. llama_cloud/types/extract_resultset_data.py +11 -0
  63. llama_cloud/types/extract_resultset_data_item_value.py +7 -0
  64. llama_cloud/types/extract_resultset_data_zero_value.py +7 -0
  65. llama_cloud/types/extract_resultset_extraction_metadata_value.py +7 -0
  66. llama_cloud/types/file.py +3 -0
  67. llama_cloud/types/file_permission_info_value.py +5 -0
  68. llama_cloud/types/filter_condition.py +9 -1
  69. llama_cloud/types/filter_operator.py +4 -0
  70. llama_cloud/types/image_block.py +35 -0
  71. llama_cloud/types/input_message.py +1 -1
  72. llama_cloud/types/job_name_mapping.py +4 -0
  73. llama_cloud/types/job_names.py +89 -0
  74. llama_cloud/types/job_record.py +57 -0
  75. llama_cloud/types/job_record_with_usage_metrics.py +36 -0
  76. llama_cloud/types/llama_index_core_base_llms_types_chat_message.py +39 -0
  77. llama_cloud/types/llama_index_core_base_llms_types_chat_message_blocks_item.py +33 -0
  78. llama_cloud/types/llama_parse_parameters.py +15 -0
  79. llama_cloud/types/llm.py +1 -0
  80. llama_cloud/types/llm_model_data.py +1 -0
  81. llama_cloud/types/llm_parameters.py +1 -0
  82. llama_cloud/types/managed_ingestion_status.py +4 -0
  83. llama_cloud/types/managed_ingestion_status_response.py +1 -0
  84. llama_cloud/types/object_type.py +4 -0
  85. llama_cloud/types/organization.py +5 -0
  86. llama_cloud/types/paginated_jobs_history_with_metrics.py +35 -0
  87. llama_cloud/types/paginated_report_response.py +35 -0
  88. llama_cloud/types/parse_plan_level.py +21 -0
  89. llama_cloud/types/parsing_job_structured_result.py +32 -0
  90. llama_cloud/types/pipeline_create.py +3 -1
  91. llama_cloud/types/pipeline_data_source.py +1 -1
  92. llama_cloud/types/pipeline_file.py +3 -0
  93. llama_cloud/types/pipeline_file_permission_info_value.py +7 -0
  94. llama_cloud/types/playground_session.py +2 -2
  95. llama_cloud/types/preset_retrieval_params.py +1 -0
  96. llama_cloud/types/progress_event.py +44 -0
  97. llama_cloud/types/progress_event_status.py +33 -0
  98. llama_cloud/types/prompt_spec.py +2 -2
  99. llama_cloud/types/related_node_info.py +2 -2
  100. llama_cloud/types/related_node_info_node_type.py +7 -0
  101. llama_cloud/types/report.py +33 -0
  102. llama_cloud/types/report_block.py +34 -0
  103. llama_cloud/types/report_block_dependency.py +29 -0
  104. llama_cloud/types/report_create_response.py +31 -0
  105. llama_cloud/types/report_event_item.py +40 -0
  106. llama_cloud/types/report_event_item_event_data.py +45 -0
  107. llama_cloud/types/report_event_type.py +37 -0
  108. llama_cloud/types/report_metadata.py +39 -0
  109. llama_cloud/types/report_plan.py +36 -0
  110. llama_cloud/types/report_plan_block.py +36 -0
  111. llama_cloud/types/report_query.py +33 -0
  112. llama_cloud/types/report_response.py +41 -0
  113. llama_cloud/types/report_state.py +37 -0
  114. llama_cloud/types/report_state_event.py +38 -0
  115. llama_cloud/types/report_update_event.py +38 -0
  116. llama_cloud/types/retrieve_results.py +1 -1
  117. llama_cloud/types/retriever.py +45 -0
  118. llama_cloud/types/retriever_create.py +37 -0
  119. llama_cloud/types/retriever_pipeline.py +37 -0
  120. llama_cloud/types/status_enum.py +4 -0
  121. llama_cloud/types/supported_llm_model_names.py +4 -0
  122. llama_cloud/types/text_block.py +31 -0
  123. llama_cloud/types/text_node.py +13 -6
  124. llama_cloud/types/usage_metric_response.py +34 -0
  125. llama_cloud/types/user_job_record.py +32 -0
  126. {llama_cloud-0.1.5.dist-info → llama_cloud-0.1.7a1.dist-info}/METADATA +3 -1
  127. {llama_cloud-0.1.5.dist-info → llama_cloud-0.1.7a1.dist-info}/RECORD +129 -59
  128. {llama_cloud-0.1.5.dist-info → llama_cloud-0.1.7a1.dist-info}/WHEEL +1 -1
  129. {llama_cloud-0.1.5.dist-info → llama_cloud-0.1.7a1.dist-info}/LICENSE +0 -0
@@ -0,0 +1,1198 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+ import urllib.parse
5
+ from json.decoder import JSONDecodeError
6
+
7
+ from ...core.api_error import ApiError
8
+ from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper
9
+ from ...core.jsonable_encoder import jsonable_encoder
10
+ from ...core.remove_none_from_dict import remove_none_from_dict
11
+ from ...errors.unprocessable_entity_error import UnprocessableEntityError
12
+ from ...types.edit_suggestion import EditSuggestion
13
+ from ...types.http_validation_error import HttpValidationError
14
+ from ...types.llama_index_core_base_llms_types_chat_message import LlamaIndexCoreBaseLlmsTypesChatMessage
15
+ from ...types.paginated_report_response import PaginatedReportResponse
16
+ from ...types.report import Report
17
+ from ...types.report_create_response import ReportCreateResponse
18
+ from ...types.report_event_item import ReportEventItem
19
+ from ...types.report_metadata import ReportMetadata
20
+ from ...types.report_plan import ReportPlan
21
+ from ...types.report_response import ReportResponse
22
+ from ...types.report_state import ReportState
23
+ from .types.update_report_plan_api_v_1_reports_report_id_plan_patch_request_action import (
24
+ UpdateReportPlanApiV1ReportsReportIdPlanPatchRequestAction,
25
+ )
26
+
27
+ try:
28
+ import pydantic
29
+ if pydantic.__version__.startswith("1."):
30
+ raise ImportError
31
+ import pydantic.v1 as pydantic # type: ignore
32
+ except ImportError:
33
+ import pydantic # type: ignore
34
+
35
+ # this is used as the default value for optional parameters
36
+ OMIT = typing.cast(typing.Any, ...)
37
+
38
+
39
+ class ReportsClient:
40
+ def __init__(self, *, client_wrapper: SyncClientWrapper):
41
+ self._client_wrapper = client_wrapper
42
+
43
+ def create_report_api_v_1_reports_post(
44
+ self,
45
+ *,
46
+ project_id: typing.Optional[str] = None,
47
+ organization_id: typing.Optional[str] = None,
48
+ name: str,
49
+ template_text: str,
50
+ template_instructions: str,
51
+ files: typing.List[str],
52
+ template_file: typing.IO,
53
+ ) -> ReportCreateResponse:
54
+ """
55
+ Create a new report.
56
+
57
+ Parameters:
58
+ - project_id: typing.Optional[str].
59
+
60
+ - organization_id: typing.Optional[str].
61
+
62
+ - name: str.
63
+
64
+ - template_text: str.
65
+
66
+ - template_instructions: str.
67
+
68
+ - files: typing.List[str].
69
+
70
+ - template_file: typing.IO.
71
+ """
72
+ _response = self._client_wrapper.httpx_client.request(
73
+ "POST",
74
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/reports"),
75
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
76
+ data=jsonable_encoder(
77
+ {
78
+ "name": name,
79
+ "template_text": template_text,
80
+ "template_instructions": template_instructions,
81
+ "files": files,
82
+ }
83
+ ),
84
+ files={"template_file": template_file},
85
+ headers=self._client_wrapper.get_headers(),
86
+ timeout=60,
87
+ )
88
+ if 200 <= _response.status_code < 300:
89
+ return pydantic.parse_obj_as(ReportCreateResponse, _response.json()) # type: ignore
90
+ if _response.status_code == 422:
91
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
92
+ try:
93
+ _response_json = _response.json()
94
+ except JSONDecodeError:
95
+ raise ApiError(status_code=_response.status_code, body=_response.text)
96
+ raise ApiError(status_code=_response.status_code, body=_response_json)
97
+
98
+ def list_reports(
99
+ self,
100
+ *,
101
+ state: typing.Optional[ReportState] = None,
102
+ limit: typing.Optional[int] = None,
103
+ offset: typing.Optional[int] = None,
104
+ project_id: typing.Optional[str] = None,
105
+ organization_id: typing.Optional[str] = None,
106
+ ) -> PaginatedReportResponse:
107
+ """
108
+ List all reports for a project.
109
+
110
+ Parameters:
111
+ - state: typing.Optional[ReportState].
112
+
113
+ - limit: typing.Optional[int].
114
+
115
+ - offset: typing.Optional[int].
116
+
117
+ - project_id: typing.Optional[str].
118
+
119
+ - organization_id: typing.Optional[str].
120
+ ---
121
+ from llama_cloud import ReportState
122
+ from llama_cloud.client import LlamaCloud
123
+
124
+ client = LlamaCloud(
125
+ token="YOUR_TOKEN",
126
+ )
127
+ client.reports.list_reports(
128
+ state=ReportState.PENDING,
129
+ )
130
+ """
131
+ _response = self._client_wrapper.httpx_client.request(
132
+ "GET",
133
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/reports/list"),
134
+ params=remove_none_from_dict(
135
+ {
136
+ "state": state,
137
+ "limit": limit,
138
+ "offset": offset,
139
+ "project_id": project_id,
140
+ "organization_id": organization_id,
141
+ }
142
+ ),
143
+ headers=self._client_wrapper.get_headers(),
144
+ timeout=60,
145
+ )
146
+ if 200 <= _response.status_code < 300:
147
+ return pydantic.parse_obj_as(PaginatedReportResponse, _response.json()) # type: ignore
148
+ if _response.status_code == 422:
149
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
150
+ try:
151
+ _response_json = _response.json()
152
+ except JSONDecodeError:
153
+ raise ApiError(status_code=_response.status_code, body=_response.text)
154
+ raise ApiError(status_code=_response.status_code, body=_response_json)
155
+
156
+ def get_report(
157
+ self,
158
+ report_id: str,
159
+ *,
160
+ version: typing.Optional[int] = None,
161
+ project_id: typing.Optional[str] = None,
162
+ organization_id: typing.Optional[str] = None,
163
+ ) -> ReportResponse:
164
+ """
165
+ Get a specific report.
166
+
167
+ Parameters:
168
+ - report_id: str.
169
+
170
+ - version: typing.Optional[int].
171
+
172
+ - project_id: typing.Optional[str].
173
+
174
+ - organization_id: typing.Optional[str].
175
+ ---
176
+ from llama_cloud.client import LlamaCloud
177
+
178
+ client = LlamaCloud(
179
+ token="YOUR_TOKEN",
180
+ )
181
+ client.reports.get_report(
182
+ report_id="string",
183
+ )
184
+ """
185
+ _response = self._client_wrapper.httpx_client.request(
186
+ "GET",
187
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/reports/{report_id}"),
188
+ params=remove_none_from_dict(
189
+ {"version": version, "project_id": project_id, "organization_id": organization_id}
190
+ ),
191
+ headers=self._client_wrapper.get_headers(),
192
+ timeout=60,
193
+ )
194
+ if 200 <= _response.status_code < 300:
195
+ return pydantic.parse_obj_as(ReportResponse, _response.json()) # type: ignore
196
+ if _response.status_code == 422:
197
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
198
+ try:
199
+ _response_json = _response.json()
200
+ except JSONDecodeError:
201
+ raise ApiError(status_code=_response.status_code, body=_response.text)
202
+ raise ApiError(status_code=_response.status_code, body=_response_json)
203
+
204
+ def update_report_metadata(
205
+ self,
206
+ report_id: str,
207
+ *,
208
+ project_id: typing.Optional[str] = None,
209
+ organization_id: typing.Optional[str] = None,
210
+ name: str,
211
+ ) -> ReportMetadata:
212
+ """
213
+ Update metadata for a report.
214
+
215
+ Parameters:
216
+ - report_id: str.
217
+
218
+ - project_id: typing.Optional[str].
219
+
220
+ - organization_id: typing.Optional[str].
221
+
222
+ - name: str. The name of the report
223
+ ---
224
+ from llama_cloud.client import LlamaCloud
225
+
226
+ client = LlamaCloud(
227
+ token="YOUR_TOKEN",
228
+ )
229
+ client.reports.update_report_metadata(
230
+ report_id="string",
231
+ name="string",
232
+ )
233
+ """
234
+ _response = self._client_wrapper.httpx_client.request(
235
+ "POST",
236
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/reports/{report_id}"),
237
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
238
+ json=jsonable_encoder({"name": name}),
239
+ headers=self._client_wrapper.get_headers(),
240
+ timeout=60,
241
+ )
242
+ if 200 <= _response.status_code < 300:
243
+ return pydantic.parse_obj_as(ReportMetadata, _response.json()) # type: ignore
244
+ if _response.status_code == 422:
245
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
246
+ try:
247
+ _response_json = _response.json()
248
+ except JSONDecodeError:
249
+ raise ApiError(status_code=_response.status_code, body=_response.text)
250
+ raise ApiError(status_code=_response.status_code, body=_response_json)
251
+
252
+ def delete_report(
253
+ self, report_id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
254
+ ) -> typing.Any:
255
+ """
256
+ Delete a report.
257
+
258
+ Parameters:
259
+ - report_id: str.
260
+
261
+ - project_id: typing.Optional[str].
262
+
263
+ - organization_id: typing.Optional[str].
264
+ ---
265
+ from llama_cloud.client import LlamaCloud
266
+
267
+ client = LlamaCloud(
268
+ token="YOUR_TOKEN",
269
+ )
270
+ client.reports.delete_report(
271
+ report_id="string",
272
+ )
273
+ """
274
+ _response = self._client_wrapper.httpx_client.request(
275
+ "DELETE",
276
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/reports/{report_id}"),
277
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
278
+ headers=self._client_wrapper.get_headers(),
279
+ timeout=60,
280
+ )
281
+ if 200 <= _response.status_code < 300:
282
+ return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore
283
+ if _response.status_code == 422:
284
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
285
+ try:
286
+ _response_json = _response.json()
287
+ except JSONDecodeError:
288
+ raise ApiError(status_code=_response.status_code, body=_response.text)
289
+ raise ApiError(status_code=_response.status_code, body=_response_json)
290
+
291
+ def update_report(
292
+ self,
293
+ report_id: str,
294
+ *,
295
+ project_id: typing.Optional[str] = None,
296
+ organization_id: typing.Optional[str] = None,
297
+ content: Report,
298
+ ) -> ReportResponse:
299
+ """
300
+ Update a report's content.
301
+
302
+ Parameters:
303
+ - report_id: str.
304
+
305
+ - project_id: typing.Optional[str].
306
+
307
+ - organization_id: typing.Optional[str].
308
+
309
+ - content: Report. The content of the report version
310
+ ---
311
+ from llama_cloud import Report
312
+ from llama_cloud.client import LlamaCloud
313
+
314
+ client = LlamaCloud(
315
+ token="YOUR_TOKEN",
316
+ )
317
+ client.reports.update_report(
318
+ report_id="string",
319
+ content=Report(
320
+ id="string",
321
+ ),
322
+ )
323
+ """
324
+ _response = self._client_wrapper.httpx_client.request(
325
+ "PATCH",
326
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/reports/{report_id}"),
327
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
328
+ json=jsonable_encoder({"content": content}),
329
+ headers=self._client_wrapper.get_headers(),
330
+ timeout=60,
331
+ )
332
+ if 200 <= _response.status_code < 300:
333
+ return pydantic.parse_obj_as(ReportResponse, _response.json()) # type: ignore
334
+ if _response.status_code == 422:
335
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
336
+ try:
337
+ _response_json = _response.json()
338
+ except JSONDecodeError:
339
+ raise ApiError(status_code=_response.status_code, body=_response.text)
340
+ raise ApiError(status_code=_response.status_code, body=_response_json)
341
+
342
+ def get_report_plan(
343
+ self, report_id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
344
+ ) -> ReportPlan:
345
+ """
346
+ Get the plan for a report.
347
+
348
+ Parameters:
349
+ - report_id: str.
350
+
351
+ - project_id: typing.Optional[str].
352
+
353
+ - organization_id: typing.Optional[str].
354
+ ---
355
+ from llama_cloud.client import LlamaCloud
356
+
357
+ client = LlamaCloud(
358
+ token="YOUR_TOKEN",
359
+ )
360
+ client.reports.get_report_plan(
361
+ report_id="string",
362
+ )
363
+ """
364
+ _response = self._client_wrapper.httpx_client.request(
365
+ "GET",
366
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/reports/{report_id}/plan"),
367
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
368
+ headers=self._client_wrapper.get_headers(),
369
+ timeout=60,
370
+ )
371
+ if 200 <= _response.status_code < 300:
372
+ return pydantic.parse_obj_as(ReportPlan, _response.json()) # type: ignore
373
+ if _response.status_code == 422:
374
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
375
+ try:
376
+ _response_json = _response.json()
377
+ except JSONDecodeError:
378
+ raise ApiError(status_code=_response.status_code, body=_response.text)
379
+ raise ApiError(status_code=_response.status_code, body=_response_json)
380
+
381
+ def update_report_plan(
382
+ self,
383
+ report_id: str,
384
+ *,
385
+ action: UpdateReportPlanApiV1ReportsReportIdPlanPatchRequestAction,
386
+ project_id: typing.Optional[str] = None,
387
+ organization_id: typing.Optional[str] = None,
388
+ request: typing.Optional[ReportPlan] = None,
389
+ ) -> ReportResponse:
390
+ """
391
+ Update the plan of a report, including approval, rejection, and editing.
392
+
393
+ Parameters:
394
+ - report_id: str.
395
+
396
+ - action: UpdateReportPlanApiV1ReportsReportIdPlanPatchRequestAction.
397
+
398
+ - project_id: typing.Optional[str].
399
+
400
+ - organization_id: typing.Optional[str].
401
+
402
+ - request: typing.Optional[ReportPlan].
403
+ ---
404
+ from llama_cloud import (
405
+ ReportPlan,
406
+ UpdateReportPlanApiV1ReportsReportIdPlanPatchRequestAction,
407
+ )
408
+ from llama_cloud.client import LlamaCloud
409
+
410
+ client = LlamaCloud(
411
+ token="YOUR_TOKEN",
412
+ )
413
+ client.reports.update_report_plan(
414
+ report_id="string",
415
+ action=UpdateReportPlanApiV1ReportsReportIdPlanPatchRequestAction.APPROVE,
416
+ request=ReportPlan(),
417
+ )
418
+ """
419
+ _response = self._client_wrapper.httpx_client.request(
420
+ "PATCH",
421
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/reports/{report_id}/plan"),
422
+ params=remove_none_from_dict(
423
+ {"action": action, "project_id": project_id, "organization_id": organization_id}
424
+ ),
425
+ json=jsonable_encoder(request),
426
+ headers=self._client_wrapper.get_headers(),
427
+ timeout=60,
428
+ )
429
+ if 200 <= _response.status_code < 300:
430
+ return pydantic.parse_obj_as(ReportResponse, _response.json()) # type: ignore
431
+ if _response.status_code == 422:
432
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
433
+ try:
434
+ _response_json = _response.json()
435
+ except JSONDecodeError:
436
+ raise ApiError(status_code=_response.status_code, body=_response.text)
437
+ raise ApiError(status_code=_response.status_code, body=_response_json)
438
+
439
+ def get_report_events(
440
+ self,
441
+ report_id: str,
442
+ *,
443
+ last_sequence: typing.Optional[int] = None,
444
+ project_id: typing.Optional[str] = None,
445
+ organization_id: typing.Optional[str] = None,
446
+ ) -> typing.List[ReportEventItem]:
447
+ """
448
+ Get all historical events for a report.
449
+
450
+ Parameters:
451
+ - report_id: str.
452
+
453
+ - last_sequence: typing.Optional[int].
454
+
455
+ - project_id: typing.Optional[str].
456
+
457
+ - organization_id: typing.Optional[str].
458
+ ---
459
+ from llama_cloud.client import LlamaCloud
460
+
461
+ client = LlamaCloud(
462
+ token="YOUR_TOKEN",
463
+ )
464
+ client.reports.get_report_events(
465
+ report_id="string",
466
+ )
467
+ """
468
+ _response = self._client_wrapper.httpx_client.request(
469
+ "GET",
470
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/reports/{report_id}/events"),
471
+ params=remove_none_from_dict(
472
+ {"last_sequence": last_sequence, "project_id": project_id, "organization_id": organization_id}
473
+ ),
474
+ headers=self._client_wrapper.get_headers(),
475
+ timeout=60,
476
+ )
477
+ if 200 <= _response.status_code < 300:
478
+ return pydantic.parse_obj_as(typing.List[ReportEventItem], _response.json()) # type: ignore
479
+ if _response.status_code == 422:
480
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
481
+ try:
482
+ _response_json = _response.json()
483
+ except JSONDecodeError:
484
+ raise ApiError(status_code=_response.status_code, body=_response.text)
485
+ raise ApiError(status_code=_response.status_code, body=_response_json)
486
+
487
+ def get_report_metadata(
488
+ self, report_id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
489
+ ) -> ReportMetadata:
490
+ """
491
+ Get metadata for a report.
492
+
493
+ Parameters:
494
+ - report_id: str.
495
+
496
+ - project_id: typing.Optional[str].
497
+
498
+ - organization_id: typing.Optional[str].
499
+ ---
500
+ from llama_cloud.client import LlamaCloud
501
+
502
+ client = LlamaCloud(
503
+ token="YOUR_TOKEN",
504
+ )
505
+ client.reports.get_report_metadata(
506
+ report_id="string",
507
+ )
508
+ """
509
+ _response = self._client_wrapper.httpx_client.request(
510
+ "GET",
511
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/reports/{report_id}/metadata"),
512
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
513
+ headers=self._client_wrapper.get_headers(),
514
+ timeout=60,
515
+ )
516
+ if 200 <= _response.status_code < 300:
517
+ return pydantic.parse_obj_as(ReportMetadata, _response.json()) # type: ignore
518
+ if _response.status_code == 422:
519
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
520
+ try:
521
+ _response_json = _response.json()
522
+ except JSONDecodeError:
523
+ raise ApiError(status_code=_response.status_code, body=_response.text)
524
+ raise ApiError(status_code=_response.status_code, body=_response_json)
525
+
526
+ def suggest_edits_endpoint(
527
+ self,
528
+ report_id: str,
529
+ *,
530
+ project_id: typing.Optional[str] = None,
531
+ organization_id: typing.Optional[str] = None,
532
+ user_query: str,
533
+ chat_history: typing.List[LlamaIndexCoreBaseLlmsTypesChatMessage],
534
+ ) -> typing.List[EditSuggestion]:
535
+ """
536
+ Suggest edits to a report based on user query and chat history.
537
+
538
+ Parameters:
539
+ - report_id: str.
540
+
541
+ - project_id: typing.Optional[str].
542
+
543
+ - organization_id: typing.Optional[str].
544
+
545
+ - user_query: str.
546
+
547
+ - chat_history: typing.List[LlamaIndexCoreBaseLlmsTypesChatMessage].
548
+ ---
549
+ from llama_cloud.client import LlamaCloud
550
+
551
+ client = LlamaCloud(
552
+ token="YOUR_TOKEN",
553
+ )
554
+ client.reports.suggest_edits_endpoint(
555
+ report_id="string",
556
+ user_query="string",
557
+ chat_history=[],
558
+ )
559
+ """
560
+ _response = self._client_wrapper.httpx_client.request(
561
+ "POST",
562
+ urllib.parse.urljoin(
563
+ f"{self._client_wrapper.get_base_url()}/", f"api/v1/reports/{report_id}/suggest_edits"
564
+ ),
565
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
566
+ json=jsonable_encoder({"user_query": user_query, "chat_history": chat_history}),
567
+ headers=self._client_wrapper.get_headers(),
568
+ timeout=60,
569
+ )
570
+ if 200 <= _response.status_code < 300:
571
+ return pydantic.parse_obj_as(typing.List[EditSuggestion], _response.json()) # type: ignore
572
+ if _response.status_code == 422:
573
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
574
+ try:
575
+ _response_json = _response.json()
576
+ except JSONDecodeError:
577
+ raise ApiError(status_code=_response.status_code, body=_response.text)
578
+ raise ApiError(status_code=_response.status_code, body=_response_json)
579
+
580
+ def restart_report(
581
+ self, report_id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
582
+ ) -> typing.Any:
583
+ """
584
+ Restart a report.
585
+
586
+ Parameters:
587
+ - report_id: str.
588
+
589
+ - project_id: typing.Optional[str].
590
+
591
+ - organization_id: typing.Optional[str].
592
+ ---
593
+ from llama_cloud.client import LlamaCloud
594
+
595
+ client = LlamaCloud(
596
+ token="YOUR_TOKEN",
597
+ )
598
+ client.reports.restart_report(
599
+ report_id="string",
600
+ )
601
+ """
602
+ _response = self._client_wrapper.httpx_client.request(
603
+ "POST",
604
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/reports/{report_id}/retry"),
605
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
606
+ headers=self._client_wrapper.get_headers(),
607
+ timeout=60,
608
+ )
609
+ if 200 <= _response.status_code < 300:
610
+ return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore
611
+ if _response.status_code == 422:
612
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
613
+ try:
614
+ _response_json = _response.json()
615
+ except JSONDecodeError:
616
+ raise ApiError(status_code=_response.status_code, body=_response.text)
617
+ raise ApiError(status_code=_response.status_code, body=_response_json)
618
+
619
+
620
+ class AsyncReportsClient:
621
+ def __init__(self, *, client_wrapper: AsyncClientWrapper):
622
+ self._client_wrapper = client_wrapper
623
+
624
+ async def create_report_api_v_1_reports_post(
625
+ self,
626
+ *,
627
+ project_id: typing.Optional[str] = None,
628
+ organization_id: typing.Optional[str] = None,
629
+ name: str,
630
+ template_text: str,
631
+ template_instructions: str,
632
+ files: typing.List[str],
633
+ template_file: typing.IO,
634
+ ) -> ReportCreateResponse:
635
+ """
636
+ Create a new report.
637
+
638
+ Parameters:
639
+ - project_id: typing.Optional[str].
640
+
641
+ - organization_id: typing.Optional[str].
642
+
643
+ - name: str.
644
+
645
+ - template_text: str.
646
+
647
+ - template_instructions: str.
648
+
649
+ - files: typing.List[str].
650
+
651
+ - template_file: typing.IO.
652
+ """
653
+ _response = await self._client_wrapper.httpx_client.request(
654
+ "POST",
655
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/reports"),
656
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
657
+ data=jsonable_encoder(
658
+ {
659
+ "name": name,
660
+ "template_text": template_text,
661
+ "template_instructions": template_instructions,
662
+ "files": files,
663
+ }
664
+ ),
665
+ files={"template_file": template_file},
666
+ headers=self._client_wrapper.get_headers(),
667
+ timeout=60,
668
+ )
669
+ if 200 <= _response.status_code < 300:
670
+ return pydantic.parse_obj_as(ReportCreateResponse, _response.json()) # type: ignore
671
+ if _response.status_code == 422:
672
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
673
+ try:
674
+ _response_json = _response.json()
675
+ except JSONDecodeError:
676
+ raise ApiError(status_code=_response.status_code, body=_response.text)
677
+ raise ApiError(status_code=_response.status_code, body=_response_json)
678
+
679
+ async def list_reports(
680
+ self,
681
+ *,
682
+ state: typing.Optional[ReportState] = None,
683
+ limit: typing.Optional[int] = None,
684
+ offset: typing.Optional[int] = None,
685
+ project_id: typing.Optional[str] = None,
686
+ organization_id: typing.Optional[str] = None,
687
+ ) -> PaginatedReportResponse:
688
+ """
689
+ List all reports for a project.
690
+
691
+ Parameters:
692
+ - state: typing.Optional[ReportState].
693
+
694
+ - limit: typing.Optional[int].
695
+
696
+ - offset: typing.Optional[int].
697
+
698
+ - project_id: typing.Optional[str].
699
+
700
+ - organization_id: typing.Optional[str].
701
+ ---
702
+ from llama_cloud import ReportState
703
+ from llama_cloud.client import AsyncLlamaCloud
704
+
705
+ client = AsyncLlamaCloud(
706
+ token="YOUR_TOKEN",
707
+ )
708
+ await client.reports.list_reports(
709
+ state=ReportState.PENDING,
710
+ )
711
+ """
712
+ _response = await self._client_wrapper.httpx_client.request(
713
+ "GET",
714
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/reports/list"),
715
+ params=remove_none_from_dict(
716
+ {
717
+ "state": state,
718
+ "limit": limit,
719
+ "offset": offset,
720
+ "project_id": project_id,
721
+ "organization_id": organization_id,
722
+ }
723
+ ),
724
+ headers=self._client_wrapper.get_headers(),
725
+ timeout=60,
726
+ )
727
+ if 200 <= _response.status_code < 300:
728
+ return pydantic.parse_obj_as(PaginatedReportResponse, _response.json()) # type: ignore
729
+ if _response.status_code == 422:
730
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
731
+ try:
732
+ _response_json = _response.json()
733
+ except JSONDecodeError:
734
+ raise ApiError(status_code=_response.status_code, body=_response.text)
735
+ raise ApiError(status_code=_response.status_code, body=_response_json)
736
+
737
+ async def get_report(
738
+ self,
739
+ report_id: str,
740
+ *,
741
+ version: typing.Optional[int] = None,
742
+ project_id: typing.Optional[str] = None,
743
+ organization_id: typing.Optional[str] = None,
744
+ ) -> ReportResponse:
745
+ """
746
+ Get a specific report.
747
+
748
+ Parameters:
749
+ - report_id: str.
750
+
751
+ - version: typing.Optional[int].
752
+
753
+ - project_id: typing.Optional[str].
754
+
755
+ - organization_id: typing.Optional[str].
756
+ ---
757
+ from llama_cloud.client import AsyncLlamaCloud
758
+
759
+ client = AsyncLlamaCloud(
760
+ token="YOUR_TOKEN",
761
+ )
762
+ await client.reports.get_report(
763
+ report_id="string",
764
+ )
765
+ """
766
+ _response = await self._client_wrapper.httpx_client.request(
767
+ "GET",
768
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/reports/{report_id}"),
769
+ params=remove_none_from_dict(
770
+ {"version": version, "project_id": project_id, "organization_id": organization_id}
771
+ ),
772
+ headers=self._client_wrapper.get_headers(),
773
+ timeout=60,
774
+ )
775
+ if 200 <= _response.status_code < 300:
776
+ return pydantic.parse_obj_as(ReportResponse, _response.json()) # type: ignore
777
+ if _response.status_code == 422:
778
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
779
+ try:
780
+ _response_json = _response.json()
781
+ except JSONDecodeError:
782
+ raise ApiError(status_code=_response.status_code, body=_response.text)
783
+ raise ApiError(status_code=_response.status_code, body=_response_json)
784
+
785
+ async def update_report_metadata(
786
+ self,
787
+ report_id: str,
788
+ *,
789
+ project_id: typing.Optional[str] = None,
790
+ organization_id: typing.Optional[str] = None,
791
+ name: str,
792
+ ) -> ReportMetadata:
793
+ """
794
+ Update metadata for a report.
795
+
796
+ Parameters:
797
+ - report_id: str.
798
+
799
+ - project_id: typing.Optional[str].
800
+
801
+ - organization_id: typing.Optional[str].
802
+
803
+ - name: str. The name of the report
804
+ ---
805
+ from llama_cloud.client import AsyncLlamaCloud
806
+
807
+ client = AsyncLlamaCloud(
808
+ token="YOUR_TOKEN",
809
+ )
810
+ await client.reports.update_report_metadata(
811
+ report_id="string",
812
+ name="string",
813
+ )
814
+ """
815
+ _response = await self._client_wrapper.httpx_client.request(
816
+ "POST",
817
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/reports/{report_id}"),
818
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
819
+ json=jsonable_encoder({"name": name}),
820
+ headers=self._client_wrapper.get_headers(),
821
+ timeout=60,
822
+ )
823
+ if 200 <= _response.status_code < 300:
824
+ return pydantic.parse_obj_as(ReportMetadata, _response.json()) # type: ignore
825
+ if _response.status_code == 422:
826
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
827
+ try:
828
+ _response_json = _response.json()
829
+ except JSONDecodeError:
830
+ raise ApiError(status_code=_response.status_code, body=_response.text)
831
+ raise ApiError(status_code=_response.status_code, body=_response_json)
832
+
833
+ async def delete_report(
834
+ self, report_id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
835
+ ) -> typing.Any:
836
+ """
837
+ Delete a report.
838
+
839
+ Parameters:
840
+ - report_id: str.
841
+
842
+ - project_id: typing.Optional[str].
843
+
844
+ - organization_id: typing.Optional[str].
845
+ ---
846
+ from llama_cloud.client import AsyncLlamaCloud
847
+
848
+ client = AsyncLlamaCloud(
849
+ token="YOUR_TOKEN",
850
+ )
851
+ await client.reports.delete_report(
852
+ report_id="string",
853
+ )
854
+ """
855
+ _response = await self._client_wrapper.httpx_client.request(
856
+ "DELETE",
857
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/reports/{report_id}"),
858
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
859
+ headers=self._client_wrapper.get_headers(),
860
+ timeout=60,
861
+ )
862
+ if 200 <= _response.status_code < 300:
863
+ return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore
864
+ if _response.status_code == 422:
865
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
866
+ try:
867
+ _response_json = _response.json()
868
+ except JSONDecodeError:
869
+ raise ApiError(status_code=_response.status_code, body=_response.text)
870
+ raise ApiError(status_code=_response.status_code, body=_response_json)
871
+
872
+ async def update_report(
873
+ self,
874
+ report_id: str,
875
+ *,
876
+ project_id: typing.Optional[str] = None,
877
+ organization_id: typing.Optional[str] = None,
878
+ content: Report,
879
+ ) -> ReportResponse:
880
+ """
881
+ Update a report's content.
882
+
883
+ Parameters:
884
+ - report_id: str.
885
+
886
+ - project_id: typing.Optional[str].
887
+
888
+ - organization_id: typing.Optional[str].
889
+
890
+ - content: Report. The content of the report version
891
+ ---
892
+ from llama_cloud import Report
893
+ from llama_cloud.client import AsyncLlamaCloud
894
+
895
+ client = AsyncLlamaCloud(
896
+ token="YOUR_TOKEN",
897
+ )
898
+ await client.reports.update_report(
899
+ report_id="string",
900
+ content=Report(
901
+ id="string",
902
+ ),
903
+ )
904
+ """
905
+ _response = await self._client_wrapper.httpx_client.request(
906
+ "PATCH",
907
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/reports/{report_id}"),
908
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
909
+ json=jsonable_encoder({"content": content}),
910
+ headers=self._client_wrapper.get_headers(),
911
+ timeout=60,
912
+ )
913
+ if 200 <= _response.status_code < 300:
914
+ return pydantic.parse_obj_as(ReportResponse, _response.json()) # type: ignore
915
+ if _response.status_code == 422:
916
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
917
+ try:
918
+ _response_json = _response.json()
919
+ except JSONDecodeError:
920
+ raise ApiError(status_code=_response.status_code, body=_response.text)
921
+ raise ApiError(status_code=_response.status_code, body=_response_json)
922
+
923
+ async def get_report_plan(
924
+ self, report_id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
925
+ ) -> ReportPlan:
926
+ """
927
+ Get the plan for a report.
928
+
929
+ Parameters:
930
+ - report_id: str.
931
+
932
+ - project_id: typing.Optional[str].
933
+
934
+ - organization_id: typing.Optional[str].
935
+ ---
936
+ from llama_cloud.client import AsyncLlamaCloud
937
+
938
+ client = AsyncLlamaCloud(
939
+ token="YOUR_TOKEN",
940
+ )
941
+ await client.reports.get_report_plan(
942
+ report_id="string",
943
+ )
944
+ """
945
+ _response = await self._client_wrapper.httpx_client.request(
946
+ "GET",
947
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/reports/{report_id}/plan"),
948
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
949
+ headers=self._client_wrapper.get_headers(),
950
+ timeout=60,
951
+ )
952
+ if 200 <= _response.status_code < 300:
953
+ return pydantic.parse_obj_as(ReportPlan, _response.json()) # type: ignore
954
+ if _response.status_code == 422:
955
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
956
+ try:
957
+ _response_json = _response.json()
958
+ except JSONDecodeError:
959
+ raise ApiError(status_code=_response.status_code, body=_response.text)
960
+ raise ApiError(status_code=_response.status_code, body=_response_json)
961
+
962
+ async def update_report_plan(
963
+ self,
964
+ report_id: str,
965
+ *,
966
+ action: UpdateReportPlanApiV1ReportsReportIdPlanPatchRequestAction,
967
+ project_id: typing.Optional[str] = None,
968
+ organization_id: typing.Optional[str] = None,
969
+ request: typing.Optional[ReportPlan] = None,
970
+ ) -> ReportResponse:
971
+ """
972
+ Update the plan of a report, including approval, rejection, and editing.
973
+
974
+ Parameters:
975
+ - report_id: str.
976
+
977
+ - action: UpdateReportPlanApiV1ReportsReportIdPlanPatchRequestAction.
978
+
979
+ - project_id: typing.Optional[str].
980
+
981
+ - organization_id: typing.Optional[str].
982
+
983
+ - request: typing.Optional[ReportPlan].
984
+ ---
985
+ from llama_cloud import (
986
+ ReportPlan,
987
+ UpdateReportPlanApiV1ReportsReportIdPlanPatchRequestAction,
988
+ )
989
+ from llama_cloud.client import AsyncLlamaCloud
990
+
991
+ client = AsyncLlamaCloud(
992
+ token="YOUR_TOKEN",
993
+ )
994
+ await client.reports.update_report_plan(
995
+ report_id="string",
996
+ action=UpdateReportPlanApiV1ReportsReportIdPlanPatchRequestAction.APPROVE,
997
+ request=ReportPlan(),
998
+ )
999
+ """
1000
+ _response = await self._client_wrapper.httpx_client.request(
1001
+ "PATCH",
1002
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/reports/{report_id}/plan"),
1003
+ params=remove_none_from_dict(
1004
+ {"action": action, "project_id": project_id, "organization_id": organization_id}
1005
+ ),
1006
+ json=jsonable_encoder(request),
1007
+ headers=self._client_wrapper.get_headers(),
1008
+ timeout=60,
1009
+ )
1010
+ if 200 <= _response.status_code < 300:
1011
+ return pydantic.parse_obj_as(ReportResponse, _response.json()) # type: ignore
1012
+ if _response.status_code == 422:
1013
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1014
+ try:
1015
+ _response_json = _response.json()
1016
+ except JSONDecodeError:
1017
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1018
+ raise ApiError(status_code=_response.status_code, body=_response_json)
1019
+
1020
+ async def get_report_events(
1021
+ self,
1022
+ report_id: str,
1023
+ *,
1024
+ last_sequence: typing.Optional[int] = None,
1025
+ project_id: typing.Optional[str] = None,
1026
+ organization_id: typing.Optional[str] = None,
1027
+ ) -> typing.List[ReportEventItem]:
1028
+ """
1029
+ Get all historical events for a report.
1030
+
1031
+ Parameters:
1032
+ - report_id: str.
1033
+
1034
+ - last_sequence: typing.Optional[int].
1035
+
1036
+ - project_id: typing.Optional[str].
1037
+
1038
+ - organization_id: typing.Optional[str].
1039
+ ---
1040
+ from llama_cloud.client import AsyncLlamaCloud
1041
+
1042
+ client = AsyncLlamaCloud(
1043
+ token="YOUR_TOKEN",
1044
+ )
1045
+ await client.reports.get_report_events(
1046
+ report_id="string",
1047
+ )
1048
+ """
1049
+ _response = await self._client_wrapper.httpx_client.request(
1050
+ "GET",
1051
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/reports/{report_id}/events"),
1052
+ params=remove_none_from_dict(
1053
+ {"last_sequence": last_sequence, "project_id": project_id, "organization_id": organization_id}
1054
+ ),
1055
+ headers=self._client_wrapper.get_headers(),
1056
+ timeout=60,
1057
+ )
1058
+ if 200 <= _response.status_code < 300:
1059
+ return pydantic.parse_obj_as(typing.List[ReportEventItem], _response.json()) # type: ignore
1060
+ if _response.status_code == 422:
1061
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1062
+ try:
1063
+ _response_json = _response.json()
1064
+ except JSONDecodeError:
1065
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1066
+ raise ApiError(status_code=_response.status_code, body=_response_json)
1067
+
1068
+ async def get_report_metadata(
1069
+ self, report_id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
1070
+ ) -> ReportMetadata:
1071
+ """
1072
+ Get metadata for a report.
1073
+
1074
+ Parameters:
1075
+ - report_id: str.
1076
+
1077
+ - project_id: typing.Optional[str].
1078
+
1079
+ - organization_id: typing.Optional[str].
1080
+ ---
1081
+ from llama_cloud.client import AsyncLlamaCloud
1082
+
1083
+ client = AsyncLlamaCloud(
1084
+ token="YOUR_TOKEN",
1085
+ )
1086
+ await client.reports.get_report_metadata(
1087
+ report_id="string",
1088
+ )
1089
+ """
1090
+ _response = await self._client_wrapper.httpx_client.request(
1091
+ "GET",
1092
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/reports/{report_id}/metadata"),
1093
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1094
+ headers=self._client_wrapper.get_headers(),
1095
+ timeout=60,
1096
+ )
1097
+ if 200 <= _response.status_code < 300:
1098
+ return pydantic.parse_obj_as(ReportMetadata, _response.json()) # type: ignore
1099
+ if _response.status_code == 422:
1100
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1101
+ try:
1102
+ _response_json = _response.json()
1103
+ except JSONDecodeError:
1104
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1105
+ raise ApiError(status_code=_response.status_code, body=_response_json)
1106
+
1107
+ async def suggest_edits_endpoint(
1108
+ self,
1109
+ report_id: str,
1110
+ *,
1111
+ project_id: typing.Optional[str] = None,
1112
+ organization_id: typing.Optional[str] = None,
1113
+ user_query: str,
1114
+ chat_history: typing.List[LlamaIndexCoreBaseLlmsTypesChatMessage],
1115
+ ) -> typing.List[EditSuggestion]:
1116
+ """
1117
+ Suggest edits to a report based on user query and chat history.
1118
+
1119
+ Parameters:
1120
+ - report_id: str.
1121
+
1122
+ - project_id: typing.Optional[str].
1123
+
1124
+ - organization_id: typing.Optional[str].
1125
+
1126
+ - user_query: str.
1127
+
1128
+ - chat_history: typing.List[LlamaIndexCoreBaseLlmsTypesChatMessage].
1129
+ ---
1130
+ from llama_cloud.client import AsyncLlamaCloud
1131
+
1132
+ client = AsyncLlamaCloud(
1133
+ token="YOUR_TOKEN",
1134
+ )
1135
+ await client.reports.suggest_edits_endpoint(
1136
+ report_id="string",
1137
+ user_query="string",
1138
+ chat_history=[],
1139
+ )
1140
+ """
1141
+ _response = await self._client_wrapper.httpx_client.request(
1142
+ "POST",
1143
+ urllib.parse.urljoin(
1144
+ f"{self._client_wrapper.get_base_url()}/", f"api/v1/reports/{report_id}/suggest_edits"
1145
+ ),
1146
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1147
+ json=jsonable_encoder({"user_query": user_query, "chat_history": chat_history}),
1148
+ headers=self._client_wrapper.get_headers(),
1149
+ timeout=60,
1150
+ )
1151
+ if 200 <= _response.status_code < 300:
1152
+ return pydantic.parse_obj_as(typing.List[EditSuggestion], _response.json()) # type: ignore
1153
+ if _response.status_code == 422:
1154
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1155
+ try:
1156
+ _response_json = _response.json()
1157
+ except JSONDecodeError:
1158
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1159
+ raise ApiError(status_code=_response.status_code, body=_response_json)
1160
+
1161
+ async def restart_report(
1162
+ self, report_id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
1163
+ ) -> typing.Any:
1164
+ """
1165
+ Restart a report.
1166
+
1167
+ Parameters:
1168
+ - report_id: str.
1169
+
1170
+ - project_id: typing.Optional[str].
1171
+
1172
+ - organization_id: typing.Optional[str].
1173
+ ---
1174
+ from llama_cloud.client import AsyncLlamaCloud
1175
+
1176
+ client = AsyncLlamaCloud(
1177
+ token="YOUR_TOKEN",
1178
+ )
1179
+ await client.reports.restart_report(
1180
+ report_id="string",
1181
+ )
1182
+ """
1183
+ _response = await self._client_wrapper.httpx_client.request(
1184
+ "POST",
1185
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/reports/{report_id}/retry"),
1186
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1187
+ headers=self._client_wrapper.get_headers(),
1188
+ timeout=60,
1189
+ )
1190
+ if 200 <= _response.status_code < 300:
1191
+ return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore
1192
+ if _response.status_code == 422:
1193
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1194
+ try:
1195
+ _response_json = _response.json()
1196
+ except JSONDecodeError:
1197
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1198
+ raise ApiError(status_code=_response.status_code, body=_response_json)