llama-cloud 0.1.39__py3-none-any.whl → 0.1.41__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (61) hide show
  1. llama_cloud/__init__.py +24 -72
  2. llama_cloud/client.py +2 -5
  3. llama_cloud/resources/__init__.py +0 -4
  4. llama_cloud/resources/admin/client.py +5 -5
  5. llama_cloud/resources/beta/client.py +1351 -335
  6. llama_cloud/resources/llama_extract/client.py +56 -0
  7. llama_cloud/resources/parsing/client.py +8 -0
  8. llama_cloud/resources/pipelines/client.py +37 -0
  9. llama_cloud/types/__init__.py +24 -72
  10. llama_cloud/types/{src_app_schema_chat_chat_message.py → chat_message.py} +1 -1
  11. llama_cloud/types/extract_config.py +8 -2
  12. llama_cloud/types/extract_job_create.py +2 -0
  13. llama_cloud/types/extract_job_create_priority.py +29 -0
  14. llama_cloud/types/extract_models.py +28 -28
  15. llama_cloud/types/job_names.py +0 -4
  16. llama_cloud/types/{document_block.py → llama_extract_feature_availability.py} +5 -6
  17. llama_cloud/types/llama_extract_mode_availability.py +4 -3
  18. llama_cloud/types/llama_extract_settings.py +1 -1
  19. llama_cloud/types/llama_parse_parameters.py +1 -0
  20. llama_cloud/types/{progress_event.py → parse_configuration.py} +12 -12
  21. llama_cloud/types/{llama_index_core_base_llms_types_chat_message.py → parse_configuration_create.py} +9 -7
  22. llama_cloud/types/{edit_suggestion.py → parse_configuration_filter.py} +8 -6
  23. llama_cloud/types/{report_update_event.py → parse_configuration_query_response.py} +6 -6
  24. llama_cloud/types/parse_job_config.py +1 -0
  25. llama_cloud/types/pipeline.py +4 -0
  26. llama_cloud/types/pipeline_create.py +2 -0
  27. llama_cloud/types/playground_session.py +2 -2
  28. llama_cloud/types/public_model_name.py +97 -0
  29. llama_cloud/types/{report_create_response.py → schema_generation_availability.py} +4 -2
  30. llama_cloud/types/schema_generation_availability_status.py +17 -0
  31. llama_cloud/types/{report_event_item.py → sparse_model_config.py} +10 -8
  32. llama_cloud/types/sparse_model_type.py +33 -0
  33. llama_cloud/types/webhook_configuration.py +1 -0
  34. llama_cloud-0.1.41.dist-info/METADATA +106 -0
  35. {llama_cloud-0.1.39.dist-info → llama_cloud-0.1.41.dist-info}/RECORD +37 -56
  36. {llama_cloud-0.1.39.dist-info → llama_cloud-0.1.41.dist-info}/WHEEL +1 -1
  37. llama_cloud/resources/reports/__init__.py +0 -5
  38. llama_cloud/resources/reports/client.py +0 -1230
  39. llama_cloud/resources/reports/types/__init__.py +0 -7
  40. llama_cloud/resources/reports/types/update_report_plan_api_v_1_reports_report_id_plan_patch_request_action.py +0 -25
  41. llama_cloud/types/audio_block.py +0 -34
  42. llama_cloud/types/edit_suggestion_blocks_item.py +0 -8
  43. llama_cloud/types/image_block.py +0 -35
  44. llama_cloud/types/llama_index_core_base_llms_types_chat_message_blocks_item.py +0 -56
  45. llama_cloud/types/paginated_report_response.py +0 -35
  46. llama_cloud/types/progress_event_status.py +0 -33
  47. llama_cloud/types/report.py +0 -33
  48. llama_cloud/types/report_block.py +0 -35
  49. llama_cloud/types/report_block_dependency.py +0 -29
  50. llama_cloud/types/report_event_item_event_data.py +0 -45
  51. llama_cloud/types/report_event_type.py +0 -37
  52. llama_cloud/types/report_metadata.py +0 -43
  53. llama_cloud/types/report_plan.py +0 -36
  54. llama_cloud/types/report_plan_block.py +0 -36
  55. llama_cloud/types/report_query.py +0 -33
  56. llama_cloud/types/report_response.py +0 -41
  57. llama_cloud/types/report_state.py +0 -37
  58. llama_cloud/types/report_state_event.py +0 -38
  59. llama_cloud/types/text_block.py +0 -31
  60. llama_cloud-0.1.39.dist-info/METADATA +0 -32
  61. {llama_cloud-0.1.39.dist-info → llama_cloud-0.1.41.dist-info}/LICENSE +0 -0
@@ -25,6 +25,10 @@ from ...types.llama_parse_parameters import LlamaParseParameters
25
25
  from ...types.paginated_response_agent_data import PaginatedResponseAgentData
26
26
  from ...types.paginated_response_aggregate_group import PaginatedResponseAggregateGroup
27
27
  from ...types.paginated_response_quota_configuration import PaginatedResponseQuotaConfiguration
28
+ from ...types.parse_configuration import ParseConfiguration
29
+ from ...types.parse_configuration_create import ParseConfigurationCreate
30
+ from ...types.parse_configuration_filter import ParseConfigurationFilter
31
+ from ...types.parse_configuration_query_response import ParseConfigurationQueryResponse
28
32
 
29
33
  try:
30
34
  import pydantic
@@ -842,47 +846,74 @@ class BetaClient:
842
846
  raise ApiError(status_code=_response.status_code, body=_response.text)
843
847
  raise ApiError(status_code=_response.status_code, body=_response_json)
844
848
 
845
-
846
- class AsyncBetaClient:
847
- def __init__(self, *, client_wrapper: AsyncClientWrapper):
848
- self._client_wrapper = client_wrapper
849
-
850
- async def list_batches(
849
+ def list_parse_configurations(
851
850
  self,
852
851
  *,
853
- limit: typing.Optional[int] = None,
854
- offset: typing.Optional[int] = None,
852
+ page_size: typing.Optional[int] = None,
853
+ page_token: typing.Optional[str] = None,
854
+ name: typing.Optional[str] = None,
855
+ creator: typing.Optional[str] = None,
856
+ version: typing.Optional[str] = None,
855
857
  project_id: typing.Optional[str] = None,
856
858
  organization_id: typing.Optional[str] = None,
857
- ) -> BatchPaginatedList:
859
+ ) -> ParseConfigurationQueryResponse:
858
860
  """
861
+ List parse configurations for the current project.
862
+
863
+ Args:
864
+ project: Validated project from dependency
865
+ user: Current user
866
+ db: Database session
867
+ page_size: Number of items per page
868
+ page_token: Token for pagination
869
+ name: Filter by configuration name
870
+ creator: Filter by creator
871
+ version: Filter by version
872
+
873
+ Returns:
874
+ Paginated response with parse configurations
875
+
859
876
  Parameters:
860
- - limit: typing.Optional[int].
877
+ - page_size: typing.Optional[int].
861
878
 
862
- - offset: typing.Optional[int].
879
+ - page_token: typing.Optional[str].
880
+
881
+ - name: typing.Optional[str].
882
+
883
+ - creator: typing.Optional[str].
884
+
885
+ - version: typing.Optional[str].
863
886
 
864
887
  - project_id: typing.Optional[str].
865
888
 
866
889
  - organization_id: typing.Optional[str].
867
890
  ---
868
- from llama_cloud.client import AsyncLlamaCloud
891
+ from llama_cloud.client import LlamaCloud
869
892
 
870
- client = AsyncLlamaCloud(
893
+ client = LlamaCloud(
871
894
  token="YOUR_TOKEN",
872
895
  )
873
- await client.beta.list_batches()
896
+ client.beta.list_parse_configurations()
874
897
  """
875
- _response = await self._client_wrapper.httpx_client.request(
898
+ _response = self._client_wrapper.httpx_client.request(
876
899
  "GET",
877
- urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/batches"),
900
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/parse-configurations"),
878
901
  params=remove_none_from_dict(
879
- {"limit": limit, "offset": offset, "project_id": project_id, "organization_id": organization_id}
902
+ {
903
+ "page_size": page_size,
904
+ "page_token": page_token,
905
+ "name": name,
906
+ "creator": creator,
907
+ "version": version,
908
+ "project_id": project_id,
909
+ "organization_id": organization_id,
910
+ }
880
911
  ),
881
912
  headers=self._client_wrapper.get_headers(),
882
913
  timeout=60,
883
914
  )
884
915
  if 200 <= _response.status_code < 300:
885
- return pydantic.parse_obj_as(BatchPaginatedList, _response.json()) # type: ignore
916
+ return pydantic.parse_obj_as(ParseConfigurationQueryResponse, _response.json()) # type: ignore
886
917
  if _response.status_code == 422:
887
918
  raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
888
919
  try:
@@ -891,94 +922,66 @@ class AsyncBetaClient:
891
922
  raise ApiError(status_code=_response.status_code, body=_response.text)
892
923
  raise ApiError(status_code=_response.status_code, body=_response_json)
893
924
 
894
- async def create_batch(
925
+ def create_parse_configuration(
895
926
  self,
896
927
  *,
897
- organization_id: typing.Optional[str] = None,
898
928
  project_id: typing.Optional[str] = None,
899
- tool: str,
900
- tool_data: typing.Optional[LlamaParseParameters] = OMIT,
901
- input_type: str,
902
- input_id: str,
903
- output_type: typing.Optional[str] = OMIT,
904
- output_id: typing.Optional[str] = OMIT,
905
- batch_create_project_id: str,
906
- external_id: str,
907
- completion_window: typing.Optional[int] = OMIT,
908
- ) -> Batch:
929
+ organization_id: typing.Optional[str] = None,
930
+ request: ParseConfigurationCreate,
931
+ ) -> ParseConfiguration:
909
932
  """
910
- Parameters:
911
- - organization_id: typing.Optional[str].
912
-
913
- - project_id: typing.Optional[str].
914
-
915
- - tool: str. The tool to be used for all requests in the batch.
916
-
917
- - tool_data: typing.Optional[LlamaParseParameters].
918
-
919
- - input_type: str. The type of input file. Currently only 'datasource' is supported.
920
-
921
- - input_id: str. The ID of the input file for the batch.
933
+ Create a new parse configuration.
922
934
 
923
- - output_type: typing.Optional[str].
935
+ Args:
936
+ config_create: Parse configuration creation data
937
+ project: Validated project from dependency
938
+ user: Current user
939
+ db: Database session
924
940
 
925
- - output_id: typing.Optional[str].
941
+ Returns:
942
+ The created parse configuration
926
943
 
927
- - batch_create_project_id: str. The ID of the project to which the batch belongs
944
+ Parameters:
945
+ - project_id: typing.Optional[str].
928
946
 
929
- - external_id: str. A developer-provided ID for the batch. This ID will be returned in the response.
947
+ - organization_id: typing.Optional[str].
930
948
 
931
- - completion_window: typing.Optional[int]. The time frame within which the batch should be processed. Currently only 24h is supported.
949
+ - request: ParseConfigurationCreate.
932
950
  ---
933
951
  from llama_cloud import (
934
952
  FailPageMode,
935
953
  LlamaParseParameters,
936
954
  LlamaParseParametersPriority,
955
+ ParseConfigurationCreate,
937
956
  ParsingMode,
938
957
  )
939
- from llama_cloud.client import AsyncLlamaCloud
958
+ from llama_cloud.client import LlamaCloud
940
959
 
941
- client = AsyncLlamaCloud(
960
+ client = LlamaCloud(
942
961
  token="YOUR_TOKEN",
943
962
  )
944
- await client.beta.create_batch(
945
- tool="string",
946
- tool_data=LlamaParseParameters(
947
- priority=LlamaParseParametersPriority.LOW,
948
- parse_mode=ParsingMode.PARSE_PAGE_WITHOUT_LLM,
949
- replace_failed_page_mode=FailPageMode.RAW_TEXT,
963
+ client.beta.create_parse_configuration(
964
+ request=ParseConfigurationCreate(
965
+ name="string",
966
+ version="string",
967
+ parameters=LlamaParseParameters(
968
+ priority=LlamaParseParametersPriority.LOW,
969
+ parse_mode=ParsingMode.PARSE_PAGE_WITHOUT_LLM,
970
+ replace_failed_page_mode=FailPageMode.RAW_TEXT,
971
+ ),
950
972
  ),
951
- input_type="string",
952
- input_id="string",
953
- batch_create_project_id="string",
954
- external_id="string",
955
973
  )
956
974
  """
957
- _request: typing.Dict[str, typing.Any] = {
958
- "tool": tool,
959
- "input_type": input_type,
960
- "input_id": input_id,
961
- "project_id": batch_create_project_id,
962
- "external_id": external_id,
963
- }
964
- if tool_data is not OMIT:
965
- _request["tool_data"] = tool_data
966
- if output_type is not OMIT:
967
- _request["output_type"] = output_type
968
- if output_id is not OMIT:
969
- _request["output_id"] = output_id
970
- if completion_window is not OMIT:
971
- _request["completion_window"] = completion_window
972
- _response = await self._client_wrapper.httpx_client.request(
975
+ _response = self._client_wrapper.httpx_client.request(
973
976
  "POST",
974
- urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/batches"),
975
- params=remove_none_from_dict({"organization_id": organization_id, "project_id": project_id}),
976
- json=jsonable_encoder(_request),
977
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/parse-configurations"),
978
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
979
+ json=jsonable_encoder(request),
977
980
  headers=self._client_wrapper.get_headers(),
978
981
  timeout=60,
979
982
  )
980
983
  if 200 <= _response.status_code < 300:
981
- return pydantic.parse_obj_as(Batch, _response.json()) # type: ignore
984
+ return pydantic.parse_obj_as(ParseConfiguration, _response.json()) # type: ignore
982
985
  if _response.status_code == 422:
983
986
  raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
984
987
  try:
@@ -987,31 +990,66 @@ class AsyncBetaClient:
987
990
  raise ApiError(status_code=_response.status_code, body=_response.text)
988
991
  raise ApiError(status_code=_response.status_code, body=_response_json)
989
992
 
990
- async def get_batch(self, batch_id: str, *, organization_id: typing.Optional[str] = None) -> BatchPublicOutput:
993
+ def upsert_parse_configuration(
994
+ self,
995
+ *,
996
+ project_id: typing.Optional[str] = None,
997
+ organization_id: typing.Optional[str] = None,
998
+ request: ParseConfigurationCreate,
999
+ ) -> ParseConfiguration:
991
1000
  """
1001
+ Create or update a parse configuration by name.
1002
+
1003
+ Args:
1004
+ config_create: Parse configuration creation data
1005
+ project: Validated project from dependency
1006
+ user: Current user
1007
+ db: Database session
1008
+
1009
+ Returns:
1010
+ The created or updated parse configuration
1011
+
992
1012
  Parameters:
993
- - batch_id: str.
1013
+ - project_id: typing.Optional[str].
994
1014
 
995
1015
  - organization_id: typing.Optional[str].
1016
+
1017
+ - request: ParseConfigurationCreate.
996
1018
  ---
997
- from llama_cloud.client import AsyncLlamaCloud
1019
+ from llama_cloud import (
1020
+ FailPageMode,
1021
+ LlamaParseParameters,
1022
+ LlamaParseParametersPriority,
1023
+ ParseConfigurationCreate,
1024
+ ParsingMode,
1025
+ )
1026
+ from llama_cloud.client import LlamaCloud
998
1027
 
999
- client = AsyncLlamaCloud(
1028
+ client = LlamaCloud(
1000
1029
  token="YOUR_TOKEN",
1001
1030
  )
1002
- await client.beta.get_batch(
1003
- batch_id="string",
1031
+ client.beta.upsert_parse_configuration(
1032
+ request=ParseConfigurationCreate(
1033
+ name="string",
1034
+ version="string",
1035
+ parameters=LlamaParseParameters(
1036
+ priority=LlamaParseParametersPriority.LOW,
1037
+ parse_mode=ParsingMode.PARSE_PAGE_WITHOUT_LLM,
1038
+ replace_failed_page_mode=FailPageMode.RAW_TEXT,
1039
+ ),
1040
+ ),
1004
1041
  )
1005
1042
  """
1006
- _response = await self._client_wrapper.httpx_client.request(
1007
- "GET",
1008
- urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/batches/{batch_id}"),
1009
- params=remove_none_from_dict({"organization_id": organization_id}),
1043
+ _response = self._client_wrapper.httpx_client.request(
1044
+ "PUT",
1045
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/parse-configurations"),
1046
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1047
+ json=jsonable_encoder(request),
1010
1048
  headers=self._client_wrapper.get_headers(),
1011
1049
  timeout=60,
1012
1050
  )
1013
1051
  if 200 <= _response.status_code < 300:
1014
- return pydantic.parse_obj_as(BatchPublicOutput, _response.json()) # type: ignore
1052
+ return pydantic.parse_obj_as(ParseConfiguration, _response.json()) # type: ignore
1015
1053
  if _response.status_code == 422:
1016
1054
  raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1017
1055
  try:
@@ -1020,37 +1058,48 @@ class AsyncBetaClient:
1020
1058
  raise ApiError(status_code=_response.status_code, body=_response.text)
1021
1059
  raise ApiError(status_code=_response.status_code, body=_response_json)
1022
1060
 
1023
- async def get_agent_data(
1024
- self, item_id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
1025
- ) -> AgentData:
1061
+ def get_parse_configuration(
1062
+ self, config_id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
1063
+ ) -> ParseConfiguration:
1026
1064
  """
1027
- Get agent data by ID.
1065
+ Get a parse configuration by ID.
1066
+
1067
+ Args:
1068
+ config_id: The ID of the parse configuration
1069
+ project: Validated project from dependency
1070
+ user: Current user
1071
+ db: Database session
1072
+
1073
+ Returns:
1074
+ The parse configuration
1028
1075
 
1029
1076
  Parameters:
1030
- - item_id: str.
1077
+ - config_id: str.
1031
1078
 
1032
1079
  - project_id: typing.Optional[str].
1033
1080
 
1034
1081
  - organization_id: typing.Optional[str].
1035
1082
  ---
1036
- from llama_cloud.client import AsyncLlamaCloud
1083
+ from llama_cloud.client import LlamaCloud
1037
1084
 
1038
- client = AsyncLlamaCloud(
1085
+ client = LlamaCloud(
1039
1086
  token="YOUR_TOKEN",
1040
1087
  )
1041
- await client.beta.get_agent_data(
1042
- item_id="string",
1088
+ client.beta.get_parse_configuration(
1089
+ config_id="string",
1043
1090
  )
1044
1091
  """
1045
- _response = await self._client_wrapper.httpx_client.request(
1092
+ _response = self._client_wrapper.httpx_client.request(
1046
1093
  "GET",
1047
- urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/agent-data/{item_id}"),
1094
+ urllib.parse.urljoin(
1095
+ f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/parse-configurations/{config_id}"
1096
+ ),
1048
1097
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1049
1098
  headers=self._client_wrapper.get_headers(),
1050
1099
  timeout=60,
1051
1100
  )
1052
1101
  if 200 <= _response.status_code < 300:
1053
- return pydantic.parse_obj_as(AgentData, _response.json()) # type: ignore
1102
+ return pydantic.parse_obj_as(ParseConfiguration, _response.json()) # type: ignore
1054
1103
  if _response.status_code == 422:
1055
1104
  raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1056
1105
  try:
@@ -1059,46 +1108,71 @@ class AsyncBetaClient:
1059
1108
  raise ApiError(status_code=_response.status_code, body=_response.text)
1060
1109
  raise ApiError(status_code=_response.status_code, body=_response_json)
1061
1110
 
1062
- async def update_agent_data(
1111
+ def update_parse_configuration(
1063
1112
  self,
1064
- item_id: str,
1113
+ config_id: str,
1065
1114
  *,
1066
1115
  project_id: typing.Optional[str] = None,
1067
1116
  organization_id: typing.Optional[str] = None,
1068
- data: typing.Dict[str, typing.Any],
1069
- ) -> AgentData:
1117
+ parameters: typing.Optional[LlamaParseParameters] = OMIT,
1118
+ ) -> ParseConfiguration:
1070
1119
  """
1071
- Update agent data by ID (overwrites).
1120
+ Update a parse configuration.
1121
+
1122
+ Args:
1123
+ config_id: The ID of the parse configuration to update
1124
+ config_update: Update data
1125
+ project: Validated project from dependency
1126
+ user: Current user
1127
+ db: Database session
1128
+
1129
+ Returns:
1130
+ The updated parse configuration
1072
1131
 
1073
1132
  Parameters:
1074
- - item_id: str.
1133
+ - config_id: str.
1075
1134
 
1076
1135
  - project_id: typing.Optional[str].
1077
1136
 
1078
1137
  - organization_id: typing.Optional[str].
1079
1138
 
1080
- - data: typing.Dict[str, typing.Any].
1139
+ - parameters: typing.Optional[LlamaParseParameters].
1081
1140
  ---
1082
- from llama_cloud.client import AsyncLlamaCloud
1141
+ from llama_cloud import (
1142
+ FailPageMode,
1143
+ LlamaParseParameters,
1144
+ LlamaParseParametersPriority,
1145
+ ParsingMode,
1146
+ )
1147
+ from llama_cloud.client import LlamaCloud
1083
1148
 
1084
- client = AsyncLlamaCloud(
1149
+ client = LlamaCloud(
1085
1150
  token="YOUR_TOKEN",
1086
1151
  )
1087
- await client.beta.update_agent_data(
1088
- item_id="string",
1089
- data={"string": {}},
1152
+ client.beta.update_parse_configuration(
1153
+ config_id="string",
1154
+ parameters=LlamaParseParameters(
1155
+ priority=LlamaParseParametersPriority.LOW,
1156
+ parse_mode=ParsingMode.PARSE_PAGE_WITHOUT_LLM,
1157
+ replace_failed_page_mode=FailPageMode.RAW_TEXT,
1158
+ ),
1090
1159
  )
1091
1160
  """
1092
- _response = await self._client_wrapper.httpx_client.request(
1161
+ _request: typing.Dict[str, typing.Any] = {}
1162
+ if parameters is not OMIT:
1163
+ _request["parameters"] = parameters
1164
+ _response = self._client_wrapper.httpx_client.request(
1093
1165
  "PUT",
1094
- urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/agent-data/{item_id}"),
1166
+ urllib.parse.urljoin(
1167
+ f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/parse-configurations/{config_id}"
1168
+ ),
1095
1169
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1096
- json=jsonable_encoder({"data": data}),
1170
+ json=jsonable_encoder(_request),
1097
1171
  headers=self._client_wrapper.get_headers(),
1098
1172
  timeout=60,
1099
1173
  )
1100
1174
  if 200 <= _response.status_code < 300:
1101
- return pydantic.parse_obj_as(AgentData, _response.json()) # type: ignore
1175
+ return pydantic.parse_obj_as(ParseConfiguration, _response.json()) # type: ignore
1102
1176
  if _response.status_code == 422:
1103
1177
  raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1104
1178
  try:
@@ -1107,37 +1181,45 @@ class AsyncBetaClient:
1107
1181
  raise ApiError(status_code=_response.status_code, body=_response.text)
1108
1182
  raise ApiError(status_code=_response.status_code, body=_response_json)
1109
1183
 
1110
- async def delete_agent_data(
1111
- self, item_id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
1112
- ) -> typing.Dict[str, str]:
1184
+ def delete_parse_configuration(
1185
+ self, config_id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
1186
+ ) -> None:
1113
1187
  """
1114
- Delete agent data by ID.
1188
+ Delete a parse configuration.
1189
+
1190
+ Args:
1191
+ config_id: The ID of the parse configuration to delete
1192
+ project: Validated project from dependency
1193
+ user: Current user
1194
+ db: Database session
1115
1195
 
1116
1196
  Parameters:
1117
- - item_id: str.
1197
+ - config_id: str.
1118
1198
 
1119
1199
  - project_id: typing.Optional[str].
1120
1200
 
1121
1201
  - organization_id: typing.Optional[str].
1122
1202
  ---
1123
- from llama_cloud.client import AsyncLlamaCloud
1203
+ from llama_cloud.client import LlamaCloud
1124
1204
 
1125
- client = AsyncLlamaCloud(
1205
+ client = LlamaCloud(
1126
1206
  token="YOUR_TOKEN",
1127
1207
  )
1128
- await client.beta.delete_agent_data(
1129
- item_id="string",
1208
+ client.beta.delete_parse_configuration(
1209
+ config_id="string",
1130
1210
  )
1131
1211
  """
1132
- _response = await self._client_wrapper.httpx_client.request(
1212
+ _response = self._client_wrapper.httpx_client.request(
1133
1213
  "DELETE",
1134
- urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/agent-data/{item_id}"),
1214
+ urllib.parse.urljoin(
1215
+ f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/parse-configurations/{config_id}"
1216
+ ),
1135
1217
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1136
1218
  headers=self._client_wrapper.get_headers(),
1137
1219
  timeout=60,
1138
1220
  )
1139
1221
  if 200 <= _response.status_code < 300:
1140
- return pydantic.parse_obj_as(typing.Dict[str, str], _response.json()) # type: ignore
1222
+ return
1141
1223
  if _response.status_code == 422:
1142
1224
  raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1143
1225
  try:
@@ -1146,52 +1228,1003 @@ class AsyncBetaClient:
1146
1228
  raise ApiError(status_code=_response.status_code, body=_response.text)
1147
1229
  raise ApiError(status_code=_response.status_code, body=_response_json)
1148
1230
 
1149
- async def create_agent_data(
1231
+ def query_parse_configurations(
1150
1232
  self,
1151
1233
  *,
1152
1234
  project_id: typing.Optional[str] = None,
1153
1235
  organization_id: typing.Optional[str] = None,
1154
- agent_slug: str,
1155
- collection: typing.Optional[str] = OMIT,
1156
- data: typing.Dict[str, typing.Any],
1157
- ) -> AgentData:
1236
+ page_size: typing.Optional[int] = OMIT,
1237
+ page_token: typing.Optional[str] = OMIT,
1238
+ filter: typing.Optional[ParseConfigurationFilter] = OMIT,
1239
+ order_by: typing.Optional[str] = OMIT,
1240
+ ) -> ParseConfigurationQueryResponse:
1158
1241
  """
1159
- Create new agent data.
1242
+ Query parse configurations with filtering and pagination.
1243
+
1244
+ Args:
1245
+ query_request: Query request with filters and pagination
1246
+ project: Validated project from dependency
1247
+ user: Current user
1248
+ db: Database session
1249
+
1250
+ Returns:
1251
+ Paginated response with parse configurations
1160
1252
 
1161
1253
  Parameters:
1162
1254
  - project_id: typing.Optional[str].
1163
1255
 
1164
1256
  - organization_id: typing.Optional[str].
1165
1257
 
1166
- - agent_slug: str.
1258
+ - page_size: typing.Optional[int].
1167
1259
 
1168
- - collection: typing.Optional[str].
1260
+ - page_token: typing.Optional[str].
1169
1261
 
1170
- - data: typing.Dict[str, typing.Any].
1262
+ - filter: typing.Optional[ParseConfigurationFilter].
1263
+
1264
+ - order_by: typing.Optional[str].
1171
1265
  ---
1172
- from llama_cloud.client import AsyncLlamaCloud
1266
+ from llama_cloud import ParseConfigurationFilter
1267
+ from llama_cloud.client import LlamaCloud
1173
1268
 
1174
- client = AsyncLlamaCloud(
1269
+ client = LlamaCloud(
1175
1270
  token="YOUR_TOKEN",
1176
1271
  )
1177
- await client.beta.create_agent_data(
1272
+ client.beta.query_parse_configurations(
1273
+ filter=ParseConfigurationFilter(),
1274
+ )
1275
+ """
1276
+ _request: typing.Dict[str, typing.Any] = {}
1277
+ if page_size is not OMIT:
1278
+ _request["page_size"] = page_size
1279
+ if page_token is not OMIT:
1280
+ _request["page_token"] = page_token
1281
+ if filter is not OMIT:
1282
+ _request["filter"] = filter
1283
+ if order_by is not OMIT:
1284
+ _request["order_by"] = order_by
1285
+ _response = self._client_wrapper.httpx_client.request(
1286
+ "POST",
1287
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/parse-configurations/query"),
1288
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1289
+ json=jsonable_encoder(_request),
1290
+ headers=self._client_wrapper.get_headers(),
1291
+ timeout=60,
1292
+ )
1293
+ if 200 <= _response.status_code < 300:
1294
+ return pydantic.parse_obj_as(ParseConfigurationQueryResponse, _response.json()) # type: ignore
1295
+ if _response.status_code == 422:
1296
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1297
+ try:
1298
+ _response_json = _response.json()
1299
+ except JSONDecodeError:
1300
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1301
+ raise ApiError(status_code=_response.status_code, body=_response_json)
1302
+
1303
+ def get_latest_parse_configuration(
1304
+ self,
1305
+ *,
1306
+ creator: typing.Optional[str] = None,
1307
+ project_id: typing.Optional[str] = None,
1308
+ organization_id: typing.Optional[str] = None,
1309
+ ) -> typing.Optional[ParseConfiguration]:
1310
+ """
1311
+ Get the latest parse configuration for the current project.
1312
+
1313
+ Args:
1314
+ project: Validated project from dependency
1315
+ user: Current user
1316
+ db: Database session
1317
+ creator: Optional creator filter
1318
+
1319
+ Returns:
1320
+ The latest parse configuration or None if not found
1321
+
1322
+ Parameters:
1323
+ - creator: typing.Optional[str].
1324
+
1325
+ - project_id: typing.Optional[str].
1326
+
1327
+ - organization_id: typing.Optional[str].
1328
+ ---
1329
+ from llama_cloud.client import LlamaCloud
1330
+
1331
+ client = LlamaCloud(
1332
+ token="YOUR_TOKEN",
1333
+ )
1334
+ client.beta.get_latest_parse_configuration()
1335
+ """
1336
+ _response = self._client_wrapper.httpx_client.request(
1337
+ "GET",
1338
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/parse-configurations/latest"),
1339
+ params=remove_none_from_dict(
1340
+ {"creator": creator, "project_id": project_id, "organization_id": organization_id}
1341
+ ),
1342
+ headers=self._client_wrapper.get_headers(),
1343
+ timeout=60,
1344
+ )
1345
+ if 200 <= _response.status_code < 300:
1346
+ return pydantic.parse_obj_as(typing.Optional[ParseConfiguration], _response.json()) # type: ignore
1347
+ if _response.status_code == 422:
1348
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1349
+ try:
1350
+ _response_json = _response.json()
1351
+ except JSONDecodeError:
1352
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1353
+ raise ApiError(status_code=_response.status_code, body=_response_json)
1354
+
1355
+
1356
+ class AsyncBetaClient:
1357
+ def __init__(self, *, client_wrapper: AsyncClientWrapper):
1358
+ self._client_wrapper = client_wrapper
1359
+
1360
+ async def list_batches(
1361
+ self,
1362
+ *,
1363
+ limit: typing.Optional[int] = None,
1364
+ offset: typing.Optional[int] = None,
1365
+ project_id: typing.Optional[str] = None,
1366
+ organization_id: typing.Optional[str] = None,
1367
+ ) -> BatchPaginatedList:
1368
+ """
1369
+ Parameters:
1370
+ - limit: typing.Optional[int].
1371
+
1372
+ - offset: typing.Optional[int].
1373
+
1374
+ - project_id: typing.Optional[str].
1375
+
1376
+ - organization_id: typing.Optional[str].
1377
+ ---
1378
+ from llama_cloud.client import AsyncLlamaCloud
1379
+
1380
+ client = AsyncLlamaCloud(
1381
+ token="YOUR_TOKEN",
1382
+ )
1383
+ await client.beta.list_batches()
1384
+ """
1385
+ _response = await self._client_wrapper.httpx_client.request(
1386
+ "GET",
1387
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/batches"),
1388
+ params=remove_none_from_dict(
1389
+ {"limit": limit, "offset": offset, "project_id": project_id, "organization_id": organization_id}
1390
+ ),
1391
+ headers=self._client_wrapper.get_headers(),
1392
+ timeout=60,
1393
+ )
1394
+ if 200 <= _response.status_code < 300:
1395
+ return pydantic.parse_obj_as(BatchPaginatedList, _response.json()) # type: ignore
1396
+ if _response.status_code == 422:
1397
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1398
+ try:
1399
+ _response_json = _response.json()
1400
+ except JSONDecodeError:
1401
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1402
+ raise ApiError(status_code=_response.status_code, body=_response_json)
1403
+
1404
+ async def create_batch(
1405
+ self,
1406
+ *,
1407
+ organization_id: typing.Optional[str] = None,
1408
+ project_id: typing.Optional[str] = None,
1409
+ tool: str,
1410
+ tool_data: typing.Optional[LlamaParseParameters] = OMIT,
1411
+ input_type: str,
1412
+ input_id: str,
1413
+ output_type: typing.Optional[str] = OMIT,
1414
+ output_id: typing.Optional[str] = OMIT,
1415
+ batch_create_project_id: str,
1416
+ external_id: str,
1417
+ completion_window: typing.Optional[int] = OMIT,
1418
+ ) -> Batch:
1419
+ """
1420
+ Parameters:
1421
+ - organization_id: typing.Optional[str].
1422
+
1423
+ - project_id: typing.Optional[str].
1424
+
1425
+ - tool: str. The tool to be used for all requests in the batch.
1426
+
1427
+ - tool_data: typing.Optional[LlamaParseParameters].
1428
+
1429
+ - input_type: str. The type of input file. Currently only 'datasource' is supported.
1430
+
1431
+ - input_id: str. The ID of the input file for the batch.
1432
+
1433
+ - output_type: typing.Optional[str].
1434
+
1435
+ - output_id: typing.Optional[str].
1436
+
1437
+ - batch_create_project_id: str. The ID of the project to which the batch belongs
1438
+
1439
+ - external_id: str. A developer-provided ID for the batch. This ID will be returned in the response.
1440
+
1441
+ - completion_window: typing.Optional[int]. The time frame within which the batch should be processed. Currently only 24h is supported.
1442
+ ---
1443
+ from llama_cloud import (
1444
+ FailPageMode,
1445
+ LlamaParseParameters,
1446
+ LlamaParseParametersPriority,
1447
+ ParsingMode,
1448
+ )
1449
+ from llama_cloud.client import AsyncLlamaCloud
1450
+
1451
+ client = AsyncLlamaCloud(
1452
+ token="YOUR_TOKEN",
1453
+ )
1454
+ await client.beta.create_batch(
1455
+ tool="string",
1456
+ tool_data=LlamaParseParameters(
1457
+ priority=LlamaParseParametersPriority.LOW,
1458
+ parse_mode=ParsingMode.PARSE_PAGE_WITHOUT_LLM,
1459
+ replace_failed_page_mode=FailPageMode.RAW_TEXT,
1460
+ ),
1461
+ input_type="string",
1462
+ input_id="string",
1463
+ batch_create_project_id="string",
1464
+ external_id="string",
1465
+ )
1466
+ """
1467
+ _request: typing.Dict[str, typing.Any] = {
1468
+ "tool": tool,
1469
+ "input_type": input_type,
1470
+ "input_id": input_id,
1471
+ "project_id": batch_create_project_id,
1472
+ "external_id": external_id,
1473
+ }
1474
+ if tool_data is not OMIT:
1475
+ _request["tool_data"] = tool_data
1476
+ if output_type is not OMIT:
1477
+ _request["output_type"] = output_type
1478
+ if output_id is not OMIT:
1479
+ _request["output_id"] = output_id
1480
+ if completion_window is not OMIT:
1481
+ _request["completion_window"] = completion_window
1482
+ _response = await self._client_wrapper.httpx_client.request(
1483
+ "POST",
1484
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/batches"),
1485
+ params=remove_none_from_dict({"organization_id": organization_id, "project_id": project_id}),
1486
+ json=jsonable_encoder(_request),
1487
+ headers=self._client_wrapper.get_headers(),
1488
+ timeout=60,
1489
+ )
1490
+ if 200 <= _response.status_code < 300:
1491
+ return pydantic.parse_obj_as(Batch, _response.json()) # type: ignore
1492
+ if _response.status_code == 422:
1493
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1494
+ try:
1495
+ _response_json = _response.json()
1496
+ except JSONDecodeError:
1497
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1498
+ raise ApiError(status_code=_response.status_code, body=_response_json)
1499
+
1500
+ async def get_batch(self, batch_id: str, *, organization_id: typing.Optional[str] = None) -> BatchPublicOutput:
1501
+ """
1502
+ Parameters:
1503
+ - batch_id: str.
1504
+
1505
+ - organization_id: typing.Optional[str].
1506
+ ---
1507
+ from llama_cloud.client import AsyncLlamaCloud
1508
+
1509
+ client = AsyncLlamaCloud(
1510
+ token="YOUR_TOKEN",
1511
+ )
1512
+ await client.beta.get_batch(
1513
+ batch_id="string",
1514
+ )
1515
+ """
1516
+ _response = await self._client_wrapper.httpx_client.request(
1517
+ "GET",
1518
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/batches/{batch_id}"),
1519
+ params=remove_none_from_dict({"organization_id": organization_id}),
1520
+ headers=self._client_wrapper.get_headers(),
1521
+ timeout=60,
1522
+ )
1523
+ if 200 <= _response.status_code < 300:
1524
+ return pydantic.parse_obj_as(BatchPublicOutput, _response.json()) # type: ignore
1525
+ if _response.status_code == 422:
1526
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1527
+ try:
1528
+ _response_json = _response.json()
1529
+ except JSONDecodeError:
1530
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1531
+ raise ApiError(status_code=_response.status_code, body=_response_json)
1532
+
1533
+ async def get_agent_data(
1534
+ self, item_id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
1535
+ ) -> AgentData:
1536
+ """
1537
+ Get agent data by ID.
1538
+
1539
+ Parameters:
1540
+ - item_id: str.
1541
+
1542
+ - project_id: typing.Optional[str].
1543
+
1544
+ - organization_id: typing.Optional[str].
1545
+ ---
1546
+ from llama_cloud.client import AsyncLlamaCloud
1547
+
1548
+ client = AsyncLlamaCloud(
1549
+ token="YOUR_TOKEN",
1550
+ )
1551
+ await client.beta.get_agent_data(
1552
+ item_id="string",
1553
+ )
1554
+ """
1555
+ _response = await self._client_wrapper.httpx_client.request(
1556
+ "GET",
1557
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/agent-data/{item_id}"),
1558
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1559
+ headers=self._client_wrapper.get_headers(),
1560
+ timeout=60,
1561
+ )
1562
+ if 200 <= _response.status_code < 300:
1563
+ return pydantic.parse_obj_as(AgentData, _response.json()) # type: ignore
1564
+ if _response.status_code == 422:
1565
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1566
+ try:
1567
+ _response_json = _response.json()
1568
+ except JSONDecodeError:
1569
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1570
+ raise ApiError(status_code=_response.status_code, body=_response_json)
1571
+
1572
+ async def update_agent_data(
1573
+ self,
1574
+ item_id: str,
1575
+ *,
1576
+ project_id: typing.Optional[str] = None,
1577
+ organization_id: typing.Optional[str] = None,
1578
+ data: typing.Dict[str, typing.Any],
1579
+ ) -> AgentData:
1580
+ """
1581
+ Update agent data by ID (overwrites).
1582
+
1583
+ Parameters:
1584
+ - item_id: str.
1585
+
1586
+ - project_id: typing.Optional[str].
1587
+
1588
+ - organization_id: typing.Optional[str].
1589
+
1590
+ - data: typing.Dict[str, typing.Any].
1591
+ ---
1592
+ from llama_cloud.client import AsyncLlamaCloud
1593
+
1594
+ client = AsyncLlamaCloud(
1595
+ token="YOUR_TOKEN",
1596
+ )
1597
+ await client.beta.update_agent_data(
1598
+ item_id="string",
1599
+ data={"string": {}},
1600
+ )
1601
+ """
1602
+ _response = await self._client_wrapper.httpx_client.request(
1603
+ "PUT",
1604
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/agent-data/{item_id}"),
1605
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1606
+ json=jsonable_encoder({"data": data}),
1607
+ headers=self._client_wrapper.get_headers(),
1608
+ timeout=60,
1609
+ )
1610
+ if 200 <= _response.status_code < 300:
1611
+ return pydantic.parse_obj_as(AgentData, _response.json()) # type: ignore
1612
+ if _response.status_code == 422:
1613
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1614
+ try:
1615
+ _response_json = _response.json()
1616
+ except JSONDecodeError:
1617
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1618
+ raise ApiError(status_code=_response.status_code, body=_response_json)
1619
+
1620
+ async def delete_agent_data(
1621
+ self, item_id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
1622
+ ) -> typing.Dict[str, str]:
1623
+ """
1624
+ Delete agent data by ID.
1625
+
1626
+ Parameters:
1627
+ - item_id: str.
1628
+
1629
+ - project_id: typing.Optional[str].
1630
+
1631
+ - organization_id: typing.Optional[str].
1632
+ ---
1633
+ from llama_cloud.client import AsyncLlamaCloud
1634
+
1635
+ client = AsyncLlamaCloud(
1636
+ token="YOUR_TOKEN",
1637
+ )
1638
+ await client.beta.delete_agent_data(
1639
+ item_id="string",
1640
+ )
1641
+ """
1642
+ _response = await self._client_wrapper.httpx_client.request(
1643
+ "DELETE",
1644
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/agent-data/{item_id}"),
1645
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1646
+ headers=self._client_wrapper.get_headers(),
1647
+ timeout=60,
1648
+ )
1649
+ if 200 <= _response.status_code < 300:
1650
+ return pydantic.parse_obj_as(typing.Dict[str, str], _response.json()) # type: ignore
1651
+ if _response.status_code == 422:
1652
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1653
+ try:
1654
+ _response_json = _response.json()
1655
+ except JSONDecodeError:
1656
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1657
+ raise ApiError(status_code=_response.status_code, body=_response_json)
1658
+
1659
+ async def create_agent_data(
1660
+ self,
1661
+ *,
1662
+ project_id: typing.Optional[str] = None,
1663
+ organization_id: typing.Optional[str] = None,
1664
+ agent_slug: str,
1665
+ collection: typing.Optional[str] = OMIT,
1666
+ data: typing.Dict[str, typing.Any],
1667
+ ) -> AgentData:
1668
+ """
1669
+ Create new agent data.
1670
+
1671
+ Parameters:
1672
+ - project_id: typing.Optional[str].
1673
+
1674
+ - organization_id: typing.Optional[str].
1675
+
1676
+ - agent_slug: str.
1677
+
1678
+ - collection: typing.Optional[str].
1679
+
1680
+ - data: typing.Dict[str, typing.Any].
1681
+ ---
1682
+ from llama_cloud.client import AsyncLlamaCloud
1683
+
1684
+ client = AsyncLlamaCloud(
1685
+ token="YOUR_TOKEN",
1686
+ )
1687
+ await client.beta.create_agent_data(
1178
1688
  agent_slug="string",
1179
1689
  data={"string": {}},
1180
1690
  )
1181
1691
  """
1182
- _request: typing.Dict[str, typing.Any] = {"agent_slug": agent_slug, "data": data}
1183
- if collection is not OMIT:
1184
- _request["collection"] = collection
1692
+ _request: typing.Dict[str, typing.Any] = {"agent_slug": agent_slug, "data": data}
1693
+ if collection is not OMIT:
1694
+ _request["collection"] = collection
1695
+ _response = await self._client_wrapper.httpx_client.request(
1696
+ "POST",
1697
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/agent-data"),
1698
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1699
+ json=jsonable_encoder(_request),
1700
+ headers=self._client_wrapper.get_headers(),
1701
+ timeout=60,
1702
+ )
1703
+ if 200 <= _response.status_code < 300:
1704
+ return pydantic.parse_obj_as(AgentData, _response.json()) # type: ignore
1705
+ if _response.status_code == 422:
1706
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1707
+ try:
1708
+ _response_json = _response.json()
1709
+ except JSONDecodeError:
1710
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1711
+ raise ApiError(status_code=_response.status_code, body=_response_json)
1712
+
1713
+ async def search_agent_data_api_v_1_beta_agent_data_search_post(
1714
+ self,
1715
+ *,
1716
+ project_id: typing.Optional[str] = None,
1717
+ organization_id: typing.Optional[str] = None,
1718
+ page_size: typing.Optional[int] = OMIT,
1719
+ page_token: typing.Optional[str] = OMIT,
1720
+ filter: typing.Optional[typing.Dict[str, typing.Optional[FilterOperation]]] = OMIT,
1721
+ order_by: typing.Optional[str] = OMIT,
1722
+ agent_slug: str,
1723
+ collection: typing.Optional[str] = OMIT,
1724
+ include_total: typing.Optional[bool] = OMIT,
1725
+ offset: typing.Optional[int] = OMIT,
1726
+ ) -> PaginatedResponseAgentData:
1727
+ """
1728
+ Search agent data with filtering, sorting, and pagination.
1729
+
1730
+ Parameters:
1731
+ - project_id: typing.Optional[str].
1732
+
1733
+ - organization_id: typing.Optional[str].
1734
+
1735
+ - page_size: typing.Optional[int].
1736
+
1737
+ - page_token: typing.Optional[str].
1738
+
1739
+ - filter: typing.Optional[typing.Dict[str, typing.Optional[FilterOperation]]].
1740
+
1741
+ - order_by: typing.Optional[str].
1742
+
1743
+ - agent_slug: str. The agent deployment's agent_slug to search within
1744
+
1745
+ - collection: typing.Optional[str]. The logical agent data collection to search within
1746
+
1747
+ - include_total: typing.Optional[bool]. Whether to include the total number of items in the response
1748
+
1749
+ - offset: typing.Optional[int].
1750
+ ---
1751
+ from llama_cloud.client import AsyncLlamaCloud
1752
+
1753
+ client = AsyncLlamaCloud(
1754
+ token="YOUR_TOKEN",
1755
+ )
1756
+ await client.beta.search_agent_data_api_v_1_beta_agent_data_search_post(
1757
+ agent_slug="string",
1758
+ )
1759
+ """
1760
+ _request: typing.Dict[str, typing.Any] = {"agent_slug": agent_slug}
1761
+ if page_size is not OMIT:
1762
+ _request["page_size"] = page_size
1763
+ if page_token is not OMIT:
1764
+ _request["page_token"] = page_token
1765
+ if filter is not OMIT:
1766
+ _request["filter"] = filter
1767
+ if order_by is not OMIT:
1768
+ _request["order_by"] = order_by
1769
+ if collection is not OMIT:
1770
+ _request["collection"] = collection
1771
+ if include_total is not OMIT:
1772
+ _request["include_total"] = include_total
1773
+ if offset is not OMIT:
1774
+ _request["offset"] = offset
1775
+ _response = await self._client_wrapper.httpx_client.request(
1776
+ "POST",
1777
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/agent-data/:search"),
1778
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1779
+ json=jsonable_encoder(_request),
1780
+ headers=self._client_wrapper.get_headers(),
1781
+ timeout=60,
1782
+ )
1783
+ if 200 <= _response.status_code < 300:
1784
+ return pydantic.parse_obj_as(PaginatedResponseAgentData, _response.json()) # type: ignore
1785
+ if _response.status_code == 422:
1786
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1787
+ try:
1788
+ _response_json = _response.json()
1789
+ except JSONDecodeError:
1790
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1791
+ raise ApiError(status_code=_response.status_code, body=_response_json)
1792
+
1793
+ async def aggregate_agent_data_api_v_1_beta_agent_data_aggregate_post(
1794
+ self,
1795
+ *,
1796
+ project_id: typing.Optional[str] = None,
1797
+ organization_id: typing.Optional[str] = None,
1798
+ page_size: typing.Optional[int] = OMIT,
1799
+ page_token: typing.Optional[str] = OMIT,
1800
+ filter: typing.Optional[typing.Dict[str, typing.Optional[FilterOperation]]] = OMIT,
1801
+ order_by: typing.Optional[str] = OMIT,
1802
+ agent_slug: str,
1803
+ collection: typing.Optional[str] = OMIT,
1804
+ group_by: typing.Optional[typing.List[str]] = OMIT,
1805
+ count: typing.Optional[bool] = OMIT,
1806
+ first: typing.Optional[bool] = OMIT,
1807
+ offset: typing.Optional[int] = OMIT,
1808
+ ) -> PaginatedResponseAggregateGroup:
1809
+ """
1810
+ Aggregate agent data with grouping and optional counting/first item retrieval.
1811
+
1812
+ Parameters:
1813
+ - project_id: typing.Optional[str].
1814
+
1815
+ - organization_id: typing.Optional[str].
1816
+
1817
+ - page_size: typing.Optional[int].
1818
+
1819
+ - page_token: typing.Optional[str].
1820
+
1821
+ - filter: typing.Optional[typing.Dict[str, typing.Optional[FilterOperation]]].
1822
+
1823
+ - order_by: typing.Optional[str].
1824
+
1825
+ - agent_slug: str. The agent deployment's agent_slug to aggregate data for
1826
+
1827
+ - collection: typing.Optional[str]. The logical agent data collection to aggregate data for
1828
+
1829
+ - group_by: typing.Optional[typing.List[str]].
1830
+
1831
+ - count: typing.Optional[bool].
1832
+
1833
+ - first: typing.Optional[bool].
1834
+
1835
+ - offset: typing.Optional[int].
1836
+ ---
1837
+ from llama_cloud.client import AsyncLlamaCloud
1838
+
1839
+ client = AsyncLlamaCloud(
1840
+ token="YOUR_TOKEN",
1841
+ )
1842
+ await client.beta.aggregate_agent_data_api_v_1_beta_agent_data_aggregate_post(
1843
+ agent_slug="string",
1844
+ )
1845
+ """
1846
+ _request: typing.Dict[str, typing.Any] = {"agent_slug": agent_slug}
1847
+ if page_size is not OMIT:
1848
+ _request["page_size"] = page_size
1849
+ if page_token is not OMIT:
1850
+ _request["page_token"] = page_token
1851
+ if filter is not OMIT:
1852
+ _request["filter"] = filter
1853
+ if order_by is not OMIT:
1854
+ _request["order_by"] = order_by
1855
+ if collection is not OMIT:
1856
+ _request["collection"] = collection
1857
+ if group_by is not OMIT:
1858
+ _request["group_by"] = group_by
1859
+ if count is not OMIT:
1860
+ _request["count"] = count
1861
+ if first is not OMIT:
1862
+ _request["first"] = first
1863
+ if offset is not OMIT:
1864
+ _request["offset"] = offset
1865
+ _response = await self._client_wrapper.httpx_client.request(
1866
+ "POST",
1867
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/agent-data/:aggregate"),
1868
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1869
+ json=jsonable_encoder(_request),
1870
+ headers=self._client_wrapper.get_headers(),
1871
+ timeout=60,
1872
+ )
1873
+ if 200 <= _response.status_code < 300:
1874
+ return pydantic.parse_obj_as(PaginatedResponseAggregateGroup, _response.json()) # type: ignore
1875
+ if _response.status_code == 422:
1876
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1877
+ try:
1878
+ _response_json = _response.json()
1879
+ except JSONDecodeError:
1880
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1881
+ raise ApiError(status_code=_response.status_code, body=_response_json)
1882
+
1883
+ async def list_quota_configurations(
1884
+ self,
1885
+ *,
1886
+ source_type: typing_extensions.Literal["organization"],
1887
+ source_id: str,
1888
+ page: typing.Optional[int] = None,
1889
+ page_size: typing.Optional[int] = None,
1890
+ ) -> PaginatedResponseQuotaConfiguration:
1891
+ """
1892
+ Retrieve a paginated list of quota configurations with optional filtering.
1893
+
1894
+ Parameters:
1895
+ - source_type: typing_extensions.Literal["organization"].
1896
+
1897
+ - source_id: str.
1898
+
1899
+ - page: typing.Optional[int].
1900
+
1901
+ - page_size: typing.Optional[int].
1902
+ ---
1903
+ from llama_cloud.client import AsyncLlamaCloud
1904
+
1905
+ client = AsyncLlamaCloud(
1906
+ token="YOUR_TOKEN",
1907
+ )
1908
+ await client.beta.list_quota_configurations(
1909
+ source_type="organization",
1910
+ source_id="string",
1911
+ )
1912
+ """
1913
+ _response = await self._client_wrapper.httpx_client.request(
1914
+ "GET",
1915
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/quota-management"),
1916
+ params=remove_none_from_dict(
1917
+ {"source_type": source_type, "source_id": source_id, "page": page, "page_size": page_size}
1918
+ ),
1919
+ headers=self._client_wrapper.get_headers(),
1920
+ timeout=60,
1921
+ )
1922
+ if 200 <= _response.status_code < 300:
1923
+ return pydantic.parse_obj_as(PaginatedResponseQuotaConfiguration, _response.json()) # type: ignore
1924
+ if _response.status_code == 422:
1925
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1926
+ try:
1927
+ _response_json = _response.json()
1928
+ except JSONDecodeError:
1929
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1930
+ raise ApiError(status_code=_response.status_code, body=_response_json)
1931
+
1932
+ async def create_file(
1933
+ self,
1934
+ *,
1935
+ project_id: typing.Optional[str] = None,
1936
+ organization_id: typing.Optional[str] = None,
1937
+ request: FileCreate,
1938
+ ) -> File:
1939
+ """
1940
+ Create a new file in the project.
1941
+
1942
+ Args:
1943
+ file_create: File creation data
1944
+ project: Validated project from dependency
1945
+ db: Database session
1946
+
1947
+ Returns:
1948
+ The created file
1949
+
1950
+ Parameters:
1951
+ - project_id: typing.Optional[str].
1952
+
1953
+ - organization_id: typing.Optional[str].
1954
+
1955
+ - request: FileCreate.
1956
+ ---
1957
+ from llama_cloud import FileCreate
1958
+ from llama_cloud.client import AsyncLlamaCloud
1959
+
1960
+ client = AsyncLlamaCloud(
1961
+ token="YOUR_TOKEN",
1962
+ )
1963
+ await client.beta.create_file(
1964
+ request=FileCreate(
1965
+ name="string",
1966
+ ),
1967
+ )
1968
+ """
1185
1969
  _response = await self._client_wrapper.httpx_client.request(
1186
1970
  "POST",
1187
- urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/agent-data"),
1971
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/files"),
1972
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1973
+ json=jsonable_encoder(request),
1974
+ headers=self._client_wrapper.get_headers(),
1975
+ timeout=60,
1976
+ )
1977
+ if 200 <= _response.status_code < 300:
1978
+ return pydantic.parse_obj_as(File, _response.json()) # type: ignore
1979
+ if _response.status_code == 422:
1980
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1981
+ try:
1982
+ _response_json = _response.json()
1983
+ except JSONDecodeError:
1984
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1985
+ raise ApiError(status_code=_response.status_code, body=_response_json)
1986
+
1987
+ async def upsert_file(
1988
+ self,
1989
+ *,
1990
+ project_id: typing.Optional[str] = None,
1991
+ organization_id: typing.Optional[str] = None,
1992
+ request: FileCreate,
1993
+ ) -> File:
1994
+ """
1995
+ Upsert a file (create or update if exists) in the project.
1996
+
1997
+ Args:
1998
+ file_create: File creation/update data
1999
+ project: Validated project from dependency
2000
+ db: Database session
2001
+
2002
+ Returns:
2003
+ The upserted file
2004
+
2005
+ Parameters:
2006
+ - project_id: typing.Optional[str].
2007
+
2008
+ - organization_id: typing.Optional[str].
2009
+
2010
+ - request: FileCreate.
2011
+ ---
2012
+ from llama_cloud import FileCreate
2013
+ from llama_cloud.client import AsyncLlamaCloud
2014
+
2015
+ client = AsyncLlamaCloud(
2016
+ token="YOUR_TOKEN",
2017
+ )
2018
+ await client.beta.upsert_file(
2019
+ request=FileCreate(
2020
+ name="string",
2021
+ ),
2022
+ )
2023
+ """
2024
+ _response = await self._client_wrapper.httpx_client.request(
2025
+ "PUT",
2026
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/files"),
2027
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
2028
+ json=jsonable_encoder(request),
2029
+ headers=self._client_wrapper.get_headers(),
2030
+ timeout=60,
2031
+ )
2032
+ if 200 <= _response.status_code < 300:
2033
+ return pydantic.parse_obj_as(File, _response.json()) # type: ignore
2034
+ if _response.status_code == 422:
2035
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
2036
+ try:
2037
+ _response_json = _response.json()
2038
+ except JSONDecodeError:
2039
+ raise ApiError(status_code=_response.status_code, body=_response.text)
2040
+ raise ApiError(status_code=_response.status_code, body=_response_json)
2041
+
2042
+ async def query_files(
2043
+ self,
2044
+ *,
2045
+ project_id: typing.Optional[str] = None,
2046
+ organization_id: typing.Optional[str] = None,
2047
+ page_size: typing.Optional[int] = OMIT,
2048
+ page_token: typing.Optional[str] = OMIT,
2049
+ filter: typing.Optional[FileFilter] = OMIT,
2050
+ order_by: typing.Optional[str] = OMIT,
2051
+ ) -> FileQueryResponse:
2052
+ """
2053
+ Query files with flexible filtering and pagination.
2054
+
2055
+ Args:
2056
+ request: The query request with filters and pagination
2057
+ project: Validated project from dependency
2058
+ db: Database session
2059
+
2060
+ Returns:
2061
+ Paginated response with files
2062
+
2063
+ Parameters:
2064
+ - project_id: typing.Optional[str].
2065
+
2066
+ - organization_id: typing.Optional[str].
2067
+
2068
+ - page_size: typing.Optional[int].
2069
+
2070
+ - page_token: typing.Optional[str].
2071
+
2072
+ - filter: typing.Optional[FileFilter].
2073
+
2074
+ - order_by: typing.Optional[str].
2075
+ ---
2076
+ from llama_cloud import FileFilter
2077
+ from llama_cloud.client import AsyncLlamaCloud
2078
+
2079
+ client = AsyncLlamaCloud(
2080
+ token="YOUR_TOKEN",
2081
+ )
2082
+ await client.beta.query_files(
2083
+ filter=FileFilter(),
2084
+ )
2085
+ """
2086
+ _request: typing.Dict[str, typing.Any] = {}
2087
+ if page_size is not OMIT:
2088
+ _request["page_size"] = page_size
2089
+ if page_token is not OMIT:
2090
+ _request["page_token"] = page_token
2091
+ if filter is not OMIT:
2092
+ _request["filter"] = filter
2093
+ if order_by is not OMIT:
2094
+ _request["order_by"] = order_by
2095
+ _response = await self._client_wrapper.httpx_client.request(
2096
+ "POST",
2097
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/files/query"),
1188
2098
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1189
2099
  json=jsonable_encoder(_request),
1190
2100
  headers=self._client_wrapper.get_headers(),
1191
2101
  timeout=60,
1192
2102
  )
1193
2103
  if 200 <= _response.status_code < 300:
1194
- return pydantic.parse_obj_as(AgentData, _response.json()) # type: ignore
2104
+ return pydantic.parse_obj_as(FileQueryResponse, _response.json()) # type: ignore
2105
+ if _response.status_code == 422:
2106
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
2107
+ try:
2108
+ _response_json = _response.json()
2109
+ except JSONDecodeError:
2110
+ raise ApiError(status_code=_response.status_code, body=_response.text)
2111
+ raise ApiError(status_code=_response.status_code, body=_response_json)
2112
+
2113
+ async def delete_file(
2114
+ self, file_id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
2115
+ ) -> None:
2116
+ """
2117
+ Delete a single file from the project.
2118
+
2119
+ Args:
2120
+ file_id: The ID of the file to delete
2121
+ project: Validated project from dependency
2122
+ db: Database session
2123
+
2124
+ Returns:
2125
+ None (204 No Content on success)
2126
+
2127
+ Parameters:
2128
+ - file_id: str.
2129
+
2130
+ - project_id: typing.Optional[str].
2131
+
2132
+ - organization_id: typing.Optional[str].
2133
+ ---
2134
+ from llama_cloud.client import AsyncLlamaCloud
2135
+
2136
+ client = AsyncLlamaCloud(
2137
+ token="YOUR_TOKEN",
2138
+ )
2139
+ await client.beta.delete_file(
2140
+ file_id="string",
2141
+ )
2142
+ """
2143
+ _response = await self._client_wrapper.httpx_client.request(
2144
+ "DELETE",
2145
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/files/{file_id}"),
2146
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
2147
+ headers=self._client_wrapper.get_headers(),
2148
+ timeout=60,
2149
+ )
2150
+ if 200 <= _response.status_code < 300:
2151
+ return
2152
+ if _response.status_code == 422:
2153
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
2154
+ try:
2155
+ _response_json = _response.json()
2156
+ except JSONDecodeError:
2157
+ raise ApiError(status_code=_response.status_code, body=_response.text)
2158
+ raise ApiError(status_code=_response.status_code, body=_response_json)
2159
+
2160
+ async def list_parse_configurations(
2161
+ self,
2162
+ *,
2163
+ page_size: typing.Optional[int] = None,
2164
+ page_token: typing.Optional[str] = None,
2165
+ name: typing.Optional[str] = None,
2166
+ creator: typing.Optional[str] = None,
2167
+ version: typing.Optional[str] = None,
2168
+ project_id: typing.Optional[str] = None,
2169
+ organization_id: typing.Optional[str] = None,
2170
+ ) -> ParseConfigurationQueryResponse:
2171
+ """
2172
+ List parse configurations for the current project.
2173
+
2174
+ Args:
2175
+ project: Validated project from dependency
2176
+ user: Current user
2177
+ db: Database session
2178
+ page_size: Number of items per page
2179
+ page_token: Token for pagination
2180
+ name: Filter by configuration name
2181
+ creator: Filter by creator
2182
+ version: Filter by version
2183
+
2184
+ Returns:
2185
+ Paginated response with parse configurations
2186
+
2187
+ Parameters:
2188
+ - page_size: typing.Optional[int].
2189
+
2190
+ - page_token: typing.Optional[str].
2191
+
2192
+ - name: typing.Optional[str].
2193
+
2194
+ - creator: typing.Optional[str].
2195
+
2196
+ - version: typing.Optional[str].
2197
+
2198
+ - project_id: typing.Optional[str].
2199
+
2200
+ - organization_id: typing.Optional[str].
2201
+ ---
2202
+ from llama_cloud.client import AsyncLlamaCloud
2203
+
2204
+ client = AsyncLlamaCloud(
2205
+ token="YOUR_TOKEN",
2206
+ )
2207
+ await client.beta.list_parse_configurations()
2208
+ """
2209
+ _response = await self._client_wrapper.httpx_client.request(
2210
+ "GET",
2211
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/parse-configurations"),
2212
+ params=remove_none_from_dict(
2213
+ {
2214
+ "page_size": page_size,
2215
+ "page_token": page_token,
2216
+ "name": name,
2217
+ "creator": creator,
2218
+ "version": version,
2219
+ "project_id": project_id,
2220
+ "organization_id": organization_id,
2221
+ }
2222
+ ),
2223
+ headers=self._client_wrapper.get_headers(),
2224
+ timeout=60,
2225
+ )
2226
+ if 200 <= _response.status_code < 300:
2227
+ return pydantic.parse_obj_as(ParseConfigurationQueryResponse, _response.json()) # type: ignore
1195
2228
  if _response.status_code == 422:
1196
2229
  raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1197
2230
  try:
@@ -1200,78 +2233,66 @@ class AsyncBetaClient:
1200
2233
  raise ApiError(status_code=_response.status_code, body=_response.text)
1201
2234
  raise ApiError(status_code=_response.status_code, body=_response_json)
1202
2235
 
1203
- async def search_agent_data_api_v_1_beta_agent_data_search_post(
2236
+ async def create_parse_configuration(
1204
2237
  self,
1205
2238
  *,
1206
2239
  project_id: typing.Optional[str] = None,
1207
2240
  organization_id: typing.Optional[str] = None,
1208
- page_size: typing.Optional[int] = OMIT,
1209
- page_token: typing.Optional[str] = OMIT,
1210
- filter: typing.Optional[typing.Dict[str, typing.Optional[FilterOperation]]] = OMIT,
1211
- order_by: typing.Optional[str] = OMIT,
1212
- agent_slug: str,
1213
- collection: typing.Optional[str] = OMIT,
1214
- include_total: typing.Optional[bool] = OMIT,
1215
- offset: typing.Optional[int] = OMIT,
1216
- ) -> PaginatedResponseAgentData:
2241
+ request: ParseConfigurationCreate,
2242
+ ) -> ParseConfiguration:
1217
2243
  """
1218
- Search agent data with filtering, sorting, and pagination.
2244
+ Create a new parse configuration.
2245
+
2246
+ Args:
2247
+ config_create: Parse configuration creation data
2248
+ project: Validated project from dependency
2249
+ user: Current user
2250
+ db: Database session
2251
+
2252
+ Returns:
2253
+ The created parse configuration
1219
2254
 
1220
2255
  Parameters:
1221
2256
  - project_id: typing.Optional[str].
1222
2257
 
1223
2258
  - organization_id: typing.Optional[str].
1224
2259
 
1225
- - page_size: typing.Optional[int].
1226
-
1227
- - page_token: typing.Optional[str].
1228
-
1229
- - filter: typing.Optional[typing.Dict[str, typing.Optional[FilterOperation]]].
1230
-
1231
- - order_by: typing.Optional[str].
1232
-
1233
- - agent_slug: str. The agent deployment's agent_slug to search within
1234
-
1235
- - collection: typing.Optional[str]. The logical agent data collection to search within
1236
-
1237
- - include_total: typing.Optional[bool]. Whether to include the total number of items in the response
1238
-
1239
- - offset: typing.Optional[int].
2260
+ - request: ParseConfigurationCreate.
1240
2261
  ---
2262
+ from llama_cloud import (
2263
+ FailPageMode,
2264
+ LlamaParseParameters,
2265
+ LlamaParseParametersPriority,
2266
+ ParseConfigurationCreate,
2267
+ ParsingMode,
2268
+ )
1241
2269
  from llama_cloud.client import AsyncLlamaCloud
1242
2270
 
1243
2271
  client = AsyncLlamaCloud(
1244
2272
  token="YOUR_TOKEN",
1245
2273
  )
1246
- await client.beta.search_agent_data_api_v_1_beta_agent_data_search_post(
1247
- agent_slug="string",
2274
+ await client.beta.create_parse_configuration(
2275
+ request=ParseConfigurationCreate(
2276
+ name="string",
2277
+ version="string",
2278
+ parameters=LlamaParseParameters(
2279
+ priority=LlamaParseParametersPriority.LOW,
2280
+ parse_mode=ParsingMode.PARSE_PAGE_WITHOUT_LLM,
2281
+ replace_failed_page_mode=FailPageMode.RAW_TEXT,
2282
+ ),
2283
+ ),
1248
2284
  )
1249
2285
  """
1250
- _request: typing.Dict[str, typing.Any] = {"agent_slug": agent_slug}
1251
- if page_size is not OMIT:
1252
- _request["page_size"] = page_size
1253
- if page_token is not OMIT:
1254
- _request["page_token"] = page_token
1255
- if filter is not OMIT:
1256
- _request["filter"] = filter
1257
- if order_by is not OMIT:
1258
- _request["order_by"] = order_by
1259
- if collection is not OMIT:
1260
- _request["collection"] = collection
1261
- if include_total is not OMIT:
1262
- _request["include_total"] = include_total
1263
- if offset is not OMIT:
1264
- _request["offset"] = offset
1265
2286
  _response = await self._client_wrapper.httpx_client.request(
1266
2287
  "POST",
1267
- urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/agent-data/:search"),
2288
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/parse-configurations"),
1268
2289
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1269
- json=jsonable_encoder(_request),
2290
+ json=jsonable_encoder(request),
1270
2291
  headers=self._client_wrapper.get_headers(),
1271
2292
  timeout=60,
1272
2293
  )
1273
2294
  if 200 <= _response.status_code < 300:
1274
- return pydantic.parse_obj_as(PaginatedResponseAgentData, _response.json()) # type: ignore
2295
+ return pydantic.parse_obj_as(ParseConfiguration, _response.json()) # type: ignore
1275
2296
  if _response.status_code == 422:
1276
2297
  raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1277
2298
  try:
@@ -1280,88 +2301,66 @@ class AsyncBetaClient:
1280
2301
  raise ApiError(status_code=_response.status_code, body=_response.text)
1281
2302
  raise ApiError(status_code=_response.status_code, body=_response_json)
1282
2303
 
1283
- async def aggregate_agent_data_api_v_1_beta_agent_data_aggregate_post(
2304
+ async def upsert_parse_configuration(
1284
2305
  self,
1285
2306
  *,
1286
2307
  project_id: typing.Optional[str] = None,
1287
2308
  organization_id: typing.Optional[str] = None,
1288
- page_size: typing.Optional[int] = OMIT,
1289
- page_token: typing.Optional[str] = OMIT,
1290
- filter: typing.Optional[typing.Dict[str, typing.Optional[FilterOperation]]] = OMIT,
1291
- order_by: typing.Optional[str] = OMIT,
1292
- agent_slug: str,
1293
- collection: typing.Optional[str] = OMIT,
1294
- group_by: typing.Optional[typing.List[str]] = OMIT,
1295
- count: typing.Optional[bool] = OMIT,
1296
- first: typing.Optional[bool] = OMIT,
1297
- offset: typing.Optional[int] = OMIT,
1298
- ) -> PaginatedResponseAggregateGroup:
2309
+ request: ParseConfigurationCreate,
2310
+ ) -> ParseConfiguration:
1299
2311
  """
1300
- Aggregate agent data with grouping and optional counting/first item retrieval.
2312
+ Create or update a parse configuration by name.
2313
+
2314
+ Args:
2315
+ config_create: Parse configuration creation data
2316
+ project: Validated project from dependency
2317
+ user: Current user
2318
+ db: Database session
2319
+
2320
+ Returns:
2321
+ The created or updated parse configuration
1301
2322
 
1302
2323
  Parameters:
1303
2324
  - project_id: typing.Optional[str].
1304
2325
 
1305
2326
  - organization_id: typing.Optional[str].
1306
2327
 
1307
- - page_size: typing.Optional[int].
1308
-
1309
- - page_token: typing.Optional[str].
1310
-
1311
- - filter: typing.Optional[typing.Dict[str, typing.Optional[FilterOperation]]].
1312
-
1313
- - order_by: typing.Optional[str].
1314
-
1315
- - agent_slug: str. The agent deployment's agent_slug to aggregate data for
1316
-
1317
- - collection: typing.Optional[str]. The logical agent data collection to aggregate data for
1318
-
1319
- - group_by: typing.Optional[typing.List[str]].
1320
-
1321
- - count: typing.Optional[bool].
1322
-
1323
- - first: typing.Optional[bool].
1324
-
1325
- - offset: typing.Optional[int].
2328
+ - request: ParseConfigurationCreate.
1326
2329
  ---
2330
+ from llama_cloud import (
2331
+ FailPageMode,
2332
+ LlamaParseParameters,
2333
+ LlamaParseParametersPriority,
2334
+ ParseConfigurationCreate,
2335
+ ParsingMode,
2336
+ )
1327
2337
  from llama_cloud.client import AsyncLlamaCloud
1328
2338
 
1329
2339
  client = AsyncLlamaCloud(
1330
2340
  token="YOUR_TOKEN",
1331
2341
  )
1332
- await client.beta.aggregate_agent_data_api_v_1_beta_agent_data_aggregate_post(
1333
- agent_slug="string",
2342
+ await client.beta.upsert_parse_configuration(
2343
+ request=ParseConfigurationCreate(
2344
+ name="string",
2345
+ version="string",
2346
+ parameters=LlamaParseParameters(
2347
+ priority=LlamaParseParametersPriority.LOW,
2348
+ parse_mode=ParsingMode.PARSE_PAGE_WITHOUT_LLM,
2349
+ replace_failed_page_mode=FailPageMode.RAW_TEXT,
2350
+ ),
2351
+ ),
1334
2352
  )
1335
2353
  """
1336
- _request: typing.Dict[str, typing.Any] = {"agent_slug": agent_slug}
1337
- if page_size is not OMIT:
1338
- _request["page_size"] = page_size
1339
- if page_token is not OMIT:
1340
- _request["page_token"] = page_token
1341
- if filter is not OMIT:
1342
- _request["filter"] = filter
1343
- if order_by is not OMIT:
1344
- _request["order_by"] = order_by
1345
- if collection is not OMIT:
1346
- _request["collection"] = collection
1347
- if group_by is not OMIT:
1348
- _request["group_by"] = group_by
1349
- if count is not OMIT:
1350
- _request["count"] = count
1351
- if first is not OMIT:
1352
- _request["first"] = first
1353
- if offset is not OMIT:
1354
- _request["offset"] = offset
1355
2354
  _response = await self._client_wrapper.httpx_client.request(
1356
- "POST",
1357
- urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/agent-data/:aggregate"),
2355
+ "PUT",
2356
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/parse-configurations"),
1358
2357
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1359
- json=jsonable_encoder(_request),
2358
+ json=jsonable_encoder(request),
1360
2359
  headers=self._client_wrapper.get_headers(),
1361
2360
  timeout=60,
1362
2361
  )
1363
2362
  if 200 <= _response.status_code < 300:
1364
- return pydantic.parse_obj_as(PaginatedResponseAggregateGroup, _response.json()) # type: ignore
2363
+ return pydantic.parse_obj_as(ParseConfiguration, _response.json()) # type: ignore
1365
2364
  if _response.status_code == 422:
1366
2365
  raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1367
2366
  try:
@@ -1370,47 +2369,48 @@ class AsyncBetaClient:
1370
2369
  raise ApiError(status_code=_response.status_code, body=_response.text)
1371
2370
  raise ApiError(status_code=_response.status_code, body=_response_json)
1372
2371
 
1373
- async def list_quota_configurations(
1374
- self,
1375
- *,
1376
- source_type: typing_extensions.Literal["organization"],
1377
- source_id: str,
1378
- page: typing.Optional[int] = None,
1379
- page_size: typing.Optional[int] = None,
1380
- ) -> PaginatedResponseQuotaConfiguration:
2372
+ async def get_parse_configuration(
2373
+ self, config_id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
2374
+ ) -> ParseConfiguration:
1381
2375
  """
1382
- Retrieve a paginated list of quota configurations with optional filtering.
2376
+ Get a parse configuration by ID.
1383
2377
 
1384
- Parameters:
1385
- - source_type: typing_extensions.Literal["organization"].
2378
+ Args:
2379
+ config_id: The ID of the parse configuration
2380
+ project: Validated project from dependency
2381
+ user: Current user
2382
+ db: Database session
1386
2383
 
1387
- - source_id: str.
2384
+ Returns:
2385
+ The parse configuration
1388
2386
 
1389
- - page: typing.Optional[int].
2387
+ Parameters:
2388
+ - config_id: str.
1390
2389
 
1391
- - page_size: typing.Optional[int].
2390
+ - project_id: typing.Optional[str].
2391
+
2392
+ - organization_id: typing.Optional[str].
1392
2393
  ---
1393
2394
  from llama_cloud.client import AsyncLlamaCloud
1394
2395
 
1395
2396
  client = AsyncLlamaCloud(
1396
2397
  token="YOUR_TOKEN",
1397
2398
  )
1398
- await client.beta.list_quota_configurations(
1399
- source_type="organization",
1400
- source_id="string",
2399
+ await client.beta.get_parse_configuration(
2400
+ config_id="string",
1401
2401
  )
1402
2402
  """
1403
2403
  _response = await self._client_wrapper.httpx_client.request(
1404
2404
  "GET",
1405
- urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/quota-management"),
1406
- params=remove_none_from_dict(
1407
- {"source_type": source_type, "source_id": source_id, "page": page, "page_size": page_size}
2405
+ urllib.parse.urljoin(
2406
+ f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/parse-configurations/{config_id}"
1408
2407
  ),
2408
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1409
2409
  headers=self._client_wrapper.get_headers(),
1410
2410
  timeout=60,
1411
2411
  )
1412
2412
  if 200 <= _response.status_code < 300:
1413
- return pydantic.parse_obj_as(PaginatedResponseQuotaConfiguration, _response.json()) # type: ignore
2413
+ return pydantic.parse_obj_as(ParseConfiguration, _response.json()) # type: ignore
1414
2414
  if _response.status_code == 422:
1415
2415
  raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1416
2416
  try:
@@ -1419,53 +2419,71 @@ class AsyncBetaClient:
1419
2419
  raise ApiError(status_code=_response.status_code, body=_response.text)
1420
2420
  raise ApiError(status_code=_response.status_code, body=_response_json)
1421
2421
 
1422
- async def create_file(
2422
+ async def update_parse_configuration(
1423
2423
  self,
2424
+ config_id: str,
1424
2425
  *,
1425
2426
  project_id: typing.Optional[str] = None,
1426
2427
  organization_id: typing.Optional[str] = None,
1427
- request: FileCreate,
1428
- ) -> File:
2428
+ parameters: typing.Optional[LlamaParseParameters] = OMIT,
2429
+ ) -> ParseConfiguration:
1429
2430
  """
1430
- Create a new file in the project.
2431
+ Update a parse configuration.
1431
2432
 
1432
2433
  Args:
1433
- file_create: File creation data
2434
+ config_id: The ID of the parse configuration to update
2435
+ config_update: Update data
1434
2436
  project: Validated project from dependency
2437
+ user: Current user
1435
2438
  db: Database session
1436
2439
 
1437
2440
  Returns:
1438
- The created file
2441
+ The updated parse configuration
1439
2442
 
1440
2443
  Parameters:
2444
+ - config_id: str.
2445
+
1441
2446
  - project_id: typing.Optional[str].
1442
2447
 
1443
2448
  - organization_id: typing.Optional[str].
1444
2449
 
1445
- - request: FileCreate.
2450
+ - parameters: typing.Optional[LlamaParseParameters].
1446
2451
  ---
1447
- from llama_cloud import FileCreate
2452
+ from llama_cloud import (
2453
+ FailPageMode,
2454
+ LlamaParseParameters,
2455
+ LlamaParseParametersPriority,
2456
+ ParsingMode,
2457
+ )
1448
2458
  from llama_cloud.client import AsyncLlamaCloud
1449
2459
 
1450
2460
  client = AsyncLlamaCloud(
1451
2461
  token="YOUR_TOKEN",
1452
2462
  )
1453
- await client.beta.create_file(
1454
- request=FileCreate(
1455
- name="string",
2463
+ await client.beta.update_parse_configuration(
2464
+ config_id="string",
2465
+ parameters=LlamaParseParameters(
2466
+ priority=LlamaParseParametersPriority.LOW,
2467
+ parse_mode=ParsingMode.PARSE_PAGE_WITHOUT_LLM,
2468
+ replace_failed_page_mode=FailPageMode.RAW_TEXT,
1456
2469
  ),
1457
2470
  )
1458
2471
  """
2472
+ _request: typing.Dict[str, typing.Any] = {}
2473
+ if parameters is not OMIT:
2474
+ _request["parameters"] = parameters
1459
2475
  _response = await self._client_wrapper.httpx_client.request(
1460
- "POST",
1461
- urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/files"),
2476
+ "PUT",
2477
+ urllib.parse.urljoin(
2478
+ f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/parse-configurations/{config_id}"
2479
+ ),
1462
2480
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1463
- json=jsonable_encoder(request),
2481
+ json=jsonable_encoder(_request),
1464
2482
  headers=self._client_wrapper.get_headers(),
1465
2483
  timeout=60,
1466
2484
  )
1467
2485
  if 200 <= _response.status_code < 300:
1468
- return pydantic.parse_obj_as(File, _response.json()) # type: ignore
2486
+ return pydantic.parse_obj_as(ParseConfiguration, _response.json()) # type: ignore
1469
2487
  if _response.status_code == 422:
1470
2488
  raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1471
2489
  try:
@@ -1474,53 +2492,45 @@ class AsyncBetaClient:
1474
2492
  raise ApiError(status_code=_response.status_code, body=_response.text)
1475
2493
  raise ApiError(status_code=_response.status_code, body=_response_json)
1476
2494
 
1477
- async def upsert_file(
1478
- self,
1479
- *,
1480
- project_id: typing.Optional[str] = None,
1481
- organization_id: typing.Optional[str] = None,
1482
- request: FileCreate,
1483
- ) -> File:
2495
+ async def delete_parse_configuration(
2496
+ self, config_id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
2497
+ ) -> None:
1484
2498
  """
1485
- Upsert a file (create or update if exists) in the project.
2499
+ Delete a parse configuration.
1486
2500
 
1487
2501
  Args:
1488
- file_create: File creation/update data
2502
+ config_id: The ID of the parse configuration to delete
1489
2503
  project: Validated project from dependency
2504
+ user: Current user
1490
2505
  db: Database session
1491
2506
 
1492
- Returns:
1493
- The upserted file
1494
-
1495
2507
  Parameters:
2508
+ - config_id: str.
2509
+
1496
2510
  - project_id: typing.Optional[str].
1497
2511
 
1498
2512
  - organization_id: typing.Optional[str].
1499
-
1500
- - request: FileCreate.
1501
2513
  ---
1502
- from llama_cloud import FileCreate
1503
2514
  from llama_cloud.client import AsyncLlamaCloud
1504
2515
 
1505
2516
  client = AsyncLlamaCloud(
1506
2517
  token="YOUR_TOKEN",
1507
2518
  )
1508
- await client.beta.upsert_file(
1509
- request=FileCreate(
1510
- name="string",
1511
- ),
2519
+ await client.beta.delete_parse_configuration(
2520
+ config_id="string",
1512
2521
  )
1513
2522
  """
1514
2523
  _response = await self._client_wrapper.httpx_client.request(
1515
- "PUT",
1516
- urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/files"),
2524
+ "DELETE",
2525
+ urllib.parse.urljoin(
2526
+ f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/parse-configurations/{config_id}"
2527
+ ),
1517
2528
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1518
- json=jsonable_encoder(request),
1519
2529
  headers=self._client_wrapper.get_headers(),
1520
2530
  timeout=60,
1521
2531
  )
1522
2532
  if 200 <= _response.status_code < 300:
1523
- return pydantic.parse_obj_as(File, _response.json()) # type: ignore
2533
+ return
1524
2534
  if _response.status_code == 422:
1525
2535
  raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1526
2536
  try:
@@ -1529,26 +2539,27 @@ class AsyncBetaClient:
1529
2539
  raise ApiError(status_code=_response.status_code, body=_response.text)
1530
2540
  raise ApiError(status_code=_response.status_code, body=_response_json)
1531
2541
 
1532
- async def query_files(
2542
+ async def query_parse_configurations(
1533
2543
  self,
1534
2544
  *,
1535
2545
  project_id: typing.Optional[str] = None,
1536
2546
  organization_id: typing.Optional[str] = None,
1537
2547
  page_size: typing.Optional[int] = OMIT,
1538
2548
  page_token: typing.Optional[str] = OMIT,
1539
- filter: typing.Optional[FileFilter] = OMIT,
2549
+ filter: typing.Optional[ParseConfigurationFilter] = OMIT,
1540
2550
  order_by: typing.Optional[str] = OMIT,
1541
- ) -> FileQueryResponse:
2551
+ ) -> ParseConfigurationQueryResponse:
1542
2552
  """
1543
- Query files with flexible filtering and pagination.
2553
+ Query parse configurations with filtering and pagination.
1544
2554
 
1545
2555
  Args:
1546
- request: The query request with filters and pagination
2556
+ query_request: Query request with filters and pagination
1547
2557
  project: Validated project from dependency
2558
+ user: Current user
1548
2559
  db: Database session
1549
2560
 
1550
2561
  Returns:
1551
- Paginated response with files
2562
+ Paginated response with parse configurations
1552
2563
 
1553
2564
  Parameters:
1554
2565
  - project_id: typing.Optional[str].
@@ -1559,18 +2570,18 @@ class AsyncBetaClient:
1559
2570
 
1560
2571
  - page_token: typing.Optional[str].
1561
2572
 
1562
- - filter: typing.Optional[FileFilter].
2573
+ - filter: typing.Optional[ParseConfigurationFilter].
1563
2574
 
1564
2575
  - order_by: typing.Optional[str].
1565
2576
  ---
1566
- from llama_cloud import FileFilter
2577
+ from llama_cloud import ParseConfigurationFilter
1567
2578
  from llama_cloud.client import AsyncLlamaCloud
1568
2579
 
1569
2580
  client = AsyncLlamaCloud(
1570
2581
  token="YOUR_TOKEN",
1571
2582
  )
1572
- await client.beta.query_files(
1573
- filter=FileFilter(),
2583
+ await client.beta.query_parse_configurations(
2584
+ filter=ParseConfigurationFilter(),
1574
2585
  )
1575
2586
  """
1576
2587
  _request: typing.Dict[str, typing.Any] = {}
@@ -1584,14 +2595,14 @@ class AsyncBetaClient:
1584
2595
  _request["order_by"] = order_by
1585
2596
  _response = await self._client_wrapper.httpx_client.request(
1586
2597
  "POST",
1587
- urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/files/query"),
2598
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/parse-configurations/query"),
1588
2599
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1589
2600
  json=jsonable_encoder(_request),
1590
2601
  headers=self._client_wrapper.get_headers(),
1591
2602
  timeout=60,
1592
2603
  )
1593
2604
  if 200 <= _response.status_code < 300:
1594
- return pydantic.parse_obj_as(FileQueryResponse, _response.json()) # type: ignore
2605
+ return pydantic.parse_obj_as(ParseConfigurationQueryResponse, _response.json()) # type: ignore
1595
2606
  if _response.status_code == 422:
1596
2607
  raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1597
2608
  try:
@@ -1600,22 +2611,27 @@ class AsyncBetaClient:
1600
2611
  raise ApiError(status_code=_response.status_code, body=_response.text)
1601
2612
  raise ApiError(status_code=_response.status_code, body=_response_json)
1602
2613
 
1603
- async def delete_file(
1604
- self, file_id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
1605
- ) -> None:
2614
+ async def get_latest_parse_configuration(
2615
+ self,
2616
+ *,
2617
+ creator: typing.Optional[str] = None,
2618
+ project_id: typing.Optional[str] = None,
2619
+ organization_id: typing.Optional[str] = None,
2620
+ ) -> typing.Optional[ParseConfiguration]:
1606
2621
  """
1607
- Delete a single file from the project.
2622
+ Get the latest parse configuration for the current project.
1608
2623
 
1609
2624
  Args:
1610
- file_id: The ID of the file to delete
1611
2625
  project: Validated project from dependency
2626
+ user: Current user
1612
2627
  db: Database session
2628
+ creator: Optional creator filter
1613
2629
 
1614
2630
  Returns:
1615
- None (204 No Content on success)
2631
+ The latest parse configuration or None if not found
1616
2632
 
1617
2633
  Parameters:
1618
- - file_id: str.
2634
+ - creator: typing.Optional[str].
1619
2635
 
1620
2636
  - project_id: typing.Optional[str].
1621
2637
 
@@ -1626,19 +2642,19 @@ class AsyncBetaClient:
1626
2642
  client = AsyncLlamaCloud(
1627
2643
  token="YOUR_TOKEN",
1628
2644
  )
1629
- await client.beta.delete_file(
1630
- file_id="string",
1631
- )
2645
+ await client.beta.get_latest_parse_configuration()
1632
2646
  """
1633
2647
  _response = await self._client_wrapper.httpx_client.request(
1634
- "DELETE",
1635
- urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/files/{file_id}"),
1636
- params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
2648
+ "GET",
2649
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/parse-configurations/latest"),
2650
+ params=remove_none_from_dict(
2651
+ {"creator": creator, "project_id": project_id, "organization_id": organization_id}
2652
+ ),
1637
2653
  headers=self._client_wrapper.get_headers(),
1638
2654
  timeout=60,
1639
2655
  )
1640
2656
  if 200 <= _response.status_code < 300:
1641
- return
2657
+ return pydantic.parse_obj_as(typing.Optional[ParseConfiguration], _response.json()) # type: ignore
1642
2658
  if _response.status_code == 422:
1643
2659
  raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1644
2660
  try: