llama-cloud 0.1.38__py3-none-any.whl → 0.1.39__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of llama-cloud might be problematic. Click here for more details.

@@ -34,7 +34,6 @@ class JobsClient:
34
34
  include_usage_metrics: typing.Optional[bool] = None,
35
35
  project_id: typing.Optional[str] = None,
36
36
  organization_id: typing.Optional[str] = None,
37
- project_id: typing.Optional[str] = None,
38
37
  ) -> PaginatedJobsHistoryWithMetrics:
39
38
  """
40
39
  Get jobs for a project.
@@ -57,8 +56,6 @@ class JobsClient:
57
56
  - project_id: typing.Optional[str].
58
57
 
59
58
  - organization_id: typing.Optional[str].
60
-
61
- - project_id: typing.Optional[str].
62
59
  ---
63
60
  from llama_cloud.client import LlamaCloud
64
61
 
@@ -81,7 +78,7 @@ class JobsClient:
81
78
  "organization_id": organization_id,
82
79
  }
83
80
  ),
84
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
81
+ headers=self._client_wrapper.get_headers(),
85
82
  timeout=60,
86
83
  )
87
84
  if 200 <= _response.status_code < 300:
@@ -109,7 +106,6 @@ class AsyncJobsClient:
109
106
  include_usage_metrics: typing.Optional[bool] = None,
110
107
  project_id: typing.Optional[str] = None,
111
108
  organization_id: typing.Optional[str] = None,
112
- project_id: typing.Optional[str] = None,
113
109
  ) -> PaginatedJobsHistoryWithMetrics:
114
110
  """
115
111
  Get jobs for a project.
@@ -132,8 +128,6 @@ class AsyncJobsClient:
132
128
  - project_id: typing.Optional[str].
133
129
 
134
130
  - organization_id: typing.Optional[str].
135
-
136
- - project_id: typing.Optional[str].
137
131
  ---
138
132
  from llama_cloud.client import AsyncLlamaCloud
139
133
 
@@ -156,7 +150,7 @@ class AsyncJobsClient:
156
150
  "organization_id": organization_id,
157
151
  }
158
152
  ),
159
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
153
+ headers=self._client_wrapper.get_headers(),
160
154
  timeout=60,
161
155
  )
162
156
  if 200 <= _response.status_code < 300:
@@ -50,7 +50,6 @@ class LlamaExtractClient:
50
50
  include_default: typing.Optional[bool] = None,
51
51
  project_id: typing.Optional[str] = None,
52
52
  organization_id: typing.Optional[str] = None,
53
- project_id: typing.Optional[str] = None,
54
53
  ) -> typing.List[ExtractAgent]:
55
54
  """
56
55
  Parameters:
@@ -59,8 +58,6 @@ class LlamaExtractClient:
59
58
  - project_id: typing.Optional[str].
60
59
 
61
60
  - organization_id: typing.Optional[str].
62
-
63
- - project_id: typing.Optional[str].
64
61
  ---
65
62
  from llama_cloud.client import LlamaCloud
66
63
 
@@ -75,7 +72,7 @@ class LlamaExtractClient:
75
72
  params=remove_none_from_dict(
76
73
  {"include_default": include_default, "project_id": project_id, "organization_id": organization_id}
77
74
  ),
78
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
75
+ headers=self._client_wrapper.get_headers(),
79
76
  timeout=60,
80
77
  )
81
78
  if 200 <= _response.status_code < 300:
@@ -96,7 +93,6 @@ class LlamaExtractClient:
96
93
  name: str,
97
94
  data_schema: ExtractAgentCreateDataSchema,
98
95
  config: ExtractConfig,
99
- project_id: typing.Optional[str] = None,
100
96
  ) -> ExtractAgent:
101
97
  """
102
98
  Parameters:
@@ -109,8 +105,6 @@ class LlamaExtractClient:
109
105
  - data_schema: ExtractAgentCreateDataSchema. The schema of the data.
110
106
 
111
107
  - config: ExtractConfig. The configuration parameters for the extraction agent.
112
-
113
- - project_id: typing.Optional[str].
114
108
  ---
115
109
  from llama_cloud import (
116
110
  DocumentChunkMode,
@@ -139,7 +133,7 @@ class LlamaExtractClient:
139
133
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/extraction/extraction-agents"),
140
134
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
141
135
  json=jsonable_encoder({"name": name, "data_schema": data_schema, "config": config}),
142
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
136
+ headers=self._client_wrapper.get_headers(),
143
137
  timeout=60,
144
138
  )
145
139
  if 200 <= _response.status_code < 300:
@@ -195,7 +189,6 @@ class LlamaExtractClient:
195
189
  organization_id: typing.Optional[str] = None,
196
190
  prompt: typing.Optional[str] = OMIT,
197
191
  file_id: typing.Optional[str] = OMIT,
198
- project_id: typing.Optional[str] = None,
199
192
  ) -> ExtractSchemaGenerateResponse:
200
193
  """
201
194
  Generates an extraction agent's schema definition from a file and/or natural language prompt.
@@ -208,8 +201,6 @@ class LlamaExtractClient:
208
201
  - prompt: typing.Optional[str].
209
202
 
210
203
  - file_id: typing.Optional[str].
211
-
212
- - project_id: typing.Optional[str].
213
204
  ---
214
205
  from llama_cloud.client import LlamaCloud
215
206
 
@@ -230,7 +221,7 @@ class LlamaExtractClient:
230
221
  ),
231
222
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
232
223
  json=jsonable_encoder(_request),
233
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
224
+ headers=self._client_wrapper.get_headers(),
234
225
  timeout=60,
235
226
  )
236
227
  if 200 <= _response.status_code < 300:
@@ -244,12 +235,7 @@ class LlamaExtractClient:
244
235
  raise ApiError(status_code=_response.status_code, body=_response_json)
245
236
 
246
237
  def get_extraction_agent_by_name(
247
- self,
248
- name: str,
249
- *,
250
- project_id: typing.Optional[str] = None,
251
- organization_id: typing.Optional[str] = None,
252
- project_id: typing.Optional[str] = None,
238
+ self, name: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
253
239
  ) -> ExtractAgent:
254
240
  """
255
241
  Parameters:
@@ -258,8 +244,6 @@ class LlamaExtractClient:
258
244
  - project_id: typing.Optional[str].
259
245
 
260
246
  - organization_id: typing.Optional[str].
261
-
262
- - project_id: typing.Optional[str].
263
247
  ---
264
248
  from llama_cloud.client import LlamaCloud
265
249
 
@@ -276,7 +260,7 @@ class LlamaExtractClient:
276
260
  f"{self._client_wrapper.get_base_url()}/", f"api/v1/extraction/extraction-agents/by-name/{name}"
277
261
  ),
278
262
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
279
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
263
+ headers=self._client_wrapper.get_headers(),
280
264
  timeout=60,
281
265
  )
282
266
  if 200 <= _response.status_code < 300:
@@ -290,11 +274,7 @@ class LlamaExtractClient:
290
274
  raise ApiError(status_code=_response.status_code, body=_response_json)
291
275
 
292
276
  def get_or_create_default_extraction_agent(
293
- self,
294
- *,
295
- project_id: typing.Optional[str] = None,
296
- organization_id: typing.Optional[str] = None,
297
- project_id: typing.Optional[str] = None,
277
+ self, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
298
278
  ) -> ExtractAgent:
299
279
  """
300
280
  Get or create a default extraction agent for the current project.
@@ -304,8 +284,6 @@ class LlamaExtractClient:
304
284
  - project_id: typing.Optional[str].
305
285
 
306
286
  - organization_id: typing.Optional[str].
307
-
308
- - project_id: typing.Optional[str].
309
287
  ---
310
288
  from llama_cloud.client import LlamaCloud
311
289
 
@@ -320,7 +298,7 @@ class LlamaExtractClient:
320
298
  f"{self._client_wrapper.get_base_url()}/", "api/v1/extraction/extraction-agents/default"
321
299
  ),
322
300
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
323
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
301
+ headers=self._client_wrapper.get_headers(),
324
302
  timeout=60,
325
303
  )
326
304
  if 200 <= _response.status_code < 300:
@@ -753,12 +731,7 @@ class LlamaExtractClient:
753
731
  raise ApiError(status_code=_response.status_code, body=_response_json)
754
732
 
755
733
  def get_job_result(
756
- self,
757
- job_id: str,
758
- *,
759
- project_id: typing.Optional[str] = None,
760
- organization_id: typing.Optional[str] = None,
761
- project_id: typing.Optional[str] = None,
734
+ self, job_id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
762
735
  ) -> ExtractResultset:
763
736
  """
764
737
  Parameters:
@@ -767,8 +740,6 @@ class LlamaExtractClient:
767
740
  - project_id: typing.Optional[str].
768
741
 
769
742
  - organization_id: typing.Optional[str].
770
-
771
- - project_id: typing.Optional[str].
772
743
  ---
773
744
  from llama_cloud.client import LlamaCloud
774
745
 
@@ -783,7 +754,7 @@ class LlamaExtractClient:
783
754
  "GET",
784
755
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/extraction/jobs/{job_id}/result"),
785
756
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
786
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
757
+ headers=self._client_wrapper.get_headers(),
787
758
  timeout=60,
788
759
  )
789
760
  if 200 <= _response.status_code < 300:
@@ -865,12 +836,7 @@ class LlamaExtractClient:
865
836
  raise ApiError(status_code=_response.status_code, body=_response_json)
866
837
 
867
838
  def get_run_by_job_id(
868
- self,
869
- job_id: str,
870
- *,
871
- project_id: typing.Optional[str] = None,
872
- organization_id: typing.Optional[str] = None,
873
- project_id: typing.Optional[str] = None,
839
+ self, job_id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
874
840
  ) -> ExtractRun:
875
841
  """
876
842
  Parameters:
@@ -879,8 +845,6 @@ class LlamaExtractClient:
879
845
  - project_id: typing.Optional[str].
880
846
 
881
847
  - organization_id: typing.Optional[str].
882
-
883
- - project_id: typing.Optional[str].
884
848
  ---
885
849
  from llama_cloud.client import LlamaCloud
886
850
 
@@ -895,7 +859,7 @@ class LlamaExtractClient:
895
859
  "GET",
896
860
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/extraction/runs/by-job/{job_id}"),
897
861
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
898
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
862
+ headers=self._client_wrapper.get_headers(),
899
863
  timeout=60,
900
864
  )
901
865
  if 200 <= _response.status_code < 300:
@@ -909,12 +873,7 @@ class LlamaExtractClient:
909
873
  raise ApiError(status_code=_response.status_code, body=_response_json)
910
874
 
911
875
  def get_run(
912
- self,
913
- run_id: str,
914
- *,
915
- project_id: typing.Optional[str] = None,
916
- organization_id: typing.Optional[str] = None,
917
- project_id: typing.Optional[str] = None,
876
+ self, run_id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
918
877
  ) -> ExtractRun:
919
878
  """
920
879
  Parameters:
@@ -923,8 +882,6 @@ class LlamaExtractClient:
923
882
  - project_id: typing.Optional[str].
924
883
 
925
884
  - organization_id: typing.Optional[str].
926
-
927
- - project_id: typing.Optional[str].
928
885
  ---
929
886
  from llama_cloud.client import LlamaCloud
930
887
 
@@ -939,7 +896,7 @@ class LlamaExtractClient:
939
896
  "GET",
940
897
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/extraction/runs/{run_id}"),
941
898
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
942
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
899
+ headers=self._client_wrapper.get_headers(),
943
900
  timeout=60,
944
901
  )
945
902
  if 200 <= _response.status_code < 300:
@@ -953,12 +910,7 @@ class LlamaExtractClient:
953
910
  raise ApiError(status_code=_response.status_code, body=_response_json)
954
911
 
955
912
  def delete_extraction_run(
956
- self,
957
- run_id: str,
958
- *,
959
- project_id: typing.Optional[str] = None,
960
- organization_id: typing.Optional[str] = None,
961
- project_id: typing.Optional[str] = None,
913
+ self, run_id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
962
914
  ) -> typing.Any:
963
915
  """
964
916
  Parameters:
@@ -967,8 +919,6 @@ class LlamaExtractClient:
967
919
  - project_id: typing.Optional[str].
968
920
 
969
921
  - organization_id: typing.Optional[str].
970
-
971
- - project_id: typing.Optional[str].
972
922
  ---
973
923
  from llama_cloud.client import LlamaCloud
974
924
 
@@ -983,7 +933,7 @@ class LlamaExtractClient:
983
933
  "DELETE",
984
934
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/extraction/runs/{run_id}"),
985
935
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
986
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
936
+ headers=self._client_wrapper.get_headers(),
987
937
  timeout=60,
988
938
  )
989
939
  if 200 <= _response.status_code < 300:
@@ -1007,7 +957,6 @@ class LlamaExtractClient:
1007
957
  file_id: typing.Optional[str] = OMIT,
1008
958
  text: typing.Optional[str] = OMIT,
1009
959
  file: typing.Optional[FileData] = OMIT,
1010
- project_id: typing.Optional[str] = None,
1011
960
  ) -> ExtractJob:
1012
961
  """
1013
962
  Stateless extraction endpoint that uses a default extraction agent in the user's default project.
@@ -1029,8 +978,6 @@ class LlamaExtractClient:
1029
978
  - text: typing.Optional[str].
1030
979
 
1031
980
  - file: typing.Optional[FileData].
1032
-
1033
- - project_id: typing.Optional[str].
1034
981
  ---
1035
982
  from llama_cloud import (
1036
983
  DocumentChunkMode,
@@ -1072,7 +1019,7 @@ class LlamaExtractClient:
1072
1019
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/extraction/run"),
1073
1020
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1074
1021
  json=jsonable_encoder(_request),
1075
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1022
+ headers=self._client_wrapper.get_headers(),
1076
1023
  timeout=60,
1077
1024
  )
1078
1025
  if 200 <= _response.status_code < 300:
@@ -1096,7 +1043,6 @@ class AsyncLlamaExtractClient:
1096
1043
  include_default: typing.Optional[bool] = None,
1097
1044
  project_id: typing.Optional[str] = None,
1098
1045
  organization_id: typing.Optional[str] = None,
1099
- project_id: typing.Optional[str] = None,
1100
1046
  ) -> typing.List[ExtractAgent]:
1101
1047
  """
1102
1048
  Parameters:
@@ -1105,8 +1051,6 @@ class AsyncLlamaExtractClient:
1105
1051
  - project_id: typing.Optional[str].
1106
1052
 
1107
1053
  - organization_id: typing.Optional[str].
1108
-
1109
- - project_id: typing.Optional[str].
1110
1054
  ---
1111
1055
  from llama_cloud.client import AsyncLlamaCloud
1112
1056
 
@@ -1121,7 +1065,7 @@ class AsyncLlamaExtractClient:
1121
1065
  params=remove_none_from_dict(
1122
1066
  {"include_default": include_default, "project_id": project_id, "organization_id": organization_id}
1123
1067
  ),
1124
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1068
+ headers=self._client_wrapper.get_headers(),
1125
1069
  timeout=60,
1126
1070
  )
1127
1071
  if 200 <= _response.status_code < 300:
@@ -1142,7 +1086,6 @@ class AsyncLlamaExtractClient:
1142
1086
  name: str,
1143
1087
  data_schema: ExtractAgentCreateDataSchema,
1144
1088
  config: ExtractConfig,
1145
- project_id: typing.Optional[str] = None,
1146
1089
  ) -> ExtractAgent:
1147
1090
  """
1148
1091
  Parameters:
@@ -1155,8 +1098,6 @@ class AsyncLlamaExtractClient:
1155
1098
  - data_schema: ExtractAgentCreateDataSchema. The schema of the data.
1156
1099
 
1157
1100
  - config: ExtractConfig. The configuration parameters for the extraction agent.
1158
-
1159
- - project_id: typing.Optional[str].
1160
1101
  ---
1161
1102
  from llama_cloud import (
1162
1103
  DocumentChunkMode,
@@ -1185,7 +1126,7 @@ class AsyncLlamaExtractClient:
1185
1126
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/extraction/extraction-agents"),
1186
1127
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1187
1128
  json=jsonable_encoder({"name": name, "data_schema": data_schema, "config": config}),
1188
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1129
+ headers=self._client_wrapper.get_headers(),
1189
1130
  timeout=60,
1190
1131
  )
1191
1132
  if 200 <= _response.status_code < 300:
@@ -1241,7 +1182,6 @@ class AsyncLlamaExtractClient:
1241
1182
  organization_id: typing.Optional[str] = None,
1242
1183
  prompt: typing.Optional[str] = OMIT,
1243
1184
  file_id: typing.Optional[str] = OMIT,
1244
- project_id: typing.Optional[str] = None,
1245
1185
  ) -> ExtractSchemaGenerateResponse:
1246
1186
  """
1247
1187
  Generates an extraction agent's schema definition from a file and/or natural language prompt.
@@ -1254,8 +1194,6 @@ class AsyncLlamaExtractClient:
1254
1194
  - prompt: typing.Optional[str].
1255
1195
 
1256
1196
  - file_id: typing.Optional[str].
1257
-
1258
- - project_id: typing.Optional[str].
1259
1197
  ---
1260
1198
  from llama_cloud.client import AsyncLlamaCloud
1261
1199
 
@@ -1276,7 +1214,7 @@ class AsyncLlamaExtractClient:
1276
1214
  ),
1277
1215
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1278
1216
  json=jsonable_encoder(_request),
1279
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1217
+ headers=self._client_wrapper.get_headers(),
1280
1218
  timeout=60,
1281
1219
  )
1282
1220
  if 200 <= _response.status_code < 300:
@@ -1290,12 +1228,7 @@ class AsyncLlamaExtractClient:
1290
1228
  raise ApiError(status_code=_response.status_code, body=_response_json)
1291
1229
 
1292
1230
  async def get_extraction_agent_by_name(
1293
- self,
1294
- name: str,
1295
- *,
1296
- project_id: typing.Optional[str] = None,
1297
- organization_id: typing.Optional[str] = None,
1298
- project_id: typing.Optional[str] = None,
1231
+ self, name: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
1299
1232
  ) -> ExtractAgent:
1300
1233
  """
1301
1234
  Parameters:
@@ -1304,8 +1237,6 @@ class AsyncLlamaExtractClient:
1304
1237
  - project_id: typing.Optional[str].
1305
1238
 
1306
1239
  - organization_id: typing.Optional[str].
1307
-
1308
- - project_id: typing.Optional[str].
1309
1240
  ---
1310
1241
  from llama_cloud.client import AsyncLlamaCloud
1311
1242
 
@@ -1322,7 +1253,7 @@ class AsyncLlamaExtractClient:
1322
1253
  f"{self._client_wrapper.get_base_url()}/", f"api/v1/extraction/extraction-agents/by-name/{name}"
1323
1254
  ),
1324
1255
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1325
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1256
+ headers=self._client_wrapper.get_headers(),
1326
1257
  timeout=60,
1327
1258
  )
1328
1259
  if 200 <= _response.status_code < 300:
@@ -1336,11 +1267,7 @@ class AsyncLlamaExtractClient:
1336
1267
  raise ApiError(status_code=_response.status_code, body=_response_json)
1337
1268
 
1338
1269
  async def get_or_create_default_extraction_agent(
1339
- self,
1340
- *,
1341
- project_id: typing.Optional[str] = None,
1342
- organization_id: typing.Optional[str] = None,
1343
- project_id: typing.Optional[str] = None,
1270
+ self, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
1344
1271
  ) -> ExtractAgent:
1345
1272
  """
1346
1273
  Get or create a default extraction agent for the current project.
@@ -1350,8 +1277,6 @@ class AsyncLlamaExtractClient:
1350
1277
  - project_id: typing.Optional[str].
1351
1278
 
1352
1279
  - organization_id: typing.Optional[str].
1353
-
1354
- - project_id: typing.Optional[str].
1355
1280
  ---
1356
1281
  from llama_cloud.client import AsyncLlamaCloud
1357
1282
 
@@ -1366,7 +1291,7 @@ class AsyncLlamaExtractClient:
1366
1291
  f"{self._client_wrapper.get_base_url()}/", "api/v1/extraction/extraction-agents/default"
1367
1292
  ),
1368
1293
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1369
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1294
+ headers=self._client_wrapper.get_headers(),
1370
1295
  timeout=60,
1371
1296
  )
1372
1297
  if 200 <= _response.status_code < 300:
@@ -1799,12 +1724,7 @@ class AsyncLlamaExtractClient:
1799
1724
  raise ApiError(status_code=_response.status_code, body=_response_json)
1800
1725
 
1801
1726
  async def get_job_result(
1802
- self,
1803
- job_id: str,
1804
- *,
1805
- project_id: typing.Optional[str] = None,
1806
- organization_id: typing.Optional[str] = None,
1807
- project_id: typing.Optional[str] = None,
1727
+ self, job_id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
1808
1728
  ) -> ExtractResultset:
1809
1729
  """
1810
1730
  Parameters:
@@ -1813,8 +1733,6 @@ class AsyncLlamaExtractClient:
1813
1733
  - project_id: typing.Optional[str].
1814
1734
 
1815
1735
  - organization_id: typing.Optional[str].
1816
-
1817
- - project_id: typing.Optional[str].
1818
1736
  ---
1819
1737
  from llama_cloud.client import AsyncLlamaCloud
1820
1738
 
@@ -1829,7 +1747,7 @@ class AsyncLlamaExtractClient:
1829
1747
  "GET",
1830
1748
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/extraction/jobs/{job_id}/result"),
1831
1749
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1832
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1750
+ headers=self._client_wrapper.get_headers(),
1833
1751
  timeout=60,
1834
1752
  )
1835
1753
  if 200 <= _response.status_code < 300:
@@ -1911,12 +1829,7 @@ class AsyncLlamaExtractClient:
1911
1829
  raise ApiError(status_code=_response.status_code, body=_response_json)
1912
1830
 
1913
1831
  async def get_run_by_job_id(
1914
- self,
1915
- job_id: str,
1916
- *,
1917
- project_id: typing.Optional[str] = None,
1918
- organization_id: typing.Optional[str] = None,
1919
- project_id: typing.Optional[str] = None,
1832
+ self, job_id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
1920
1833
  ) -> ExtractRun:
1921
1834
  """
1922
1835
  Parameters:
@@ -1925,8 +1838,6 @@ class AsyncLlamaExtractClient:
1925
1838
  - project_id: typing.Optional[str].
1926
1839
 
1927
1840
  - organization_id: typing.Optional[str].
1928
-
1929
- - project_id: typing.Optional[str].
1930
1841
  ---
1931
1842
  from llama_cloud.client import AsyncLlamaCloud
1932
1843
 
@@ -1941,7 +1852,7 @@ class AsyncLlamaExtractClient:
1941
1852
  "GET",
1942
1853
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/extraction/runs/by-job/{job_id}"),
1943
1854
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1944
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1855
+ headers=self._client_wrapper.get_headers(),
1945
1856
  timeout=60,
1946
1857
  )
1947
1858
  if 200 <= _response.status_code < 300:
@@ -1955,12 +1866,7 @@ class AsyncLlamaExtractClient:
1955
1866
  raise ApiError(status_code=_response.status_code, body=_response_json)
1956
1867
 
1957
1868
  async def get_run(
1958
- self,
1959
- run_id: str,
1960
- *,
1961
- project_id: typing.Optional[str] = None,
1962
- organization_id: typing.Optional[str] = None,
1963
- project_id: typing.Optional[str] = None,
1869
+ self, run_id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
1964
1870
  ) -> ExtractRun:
1965
1871
  """
1966
1872
  Parameters:
@@ -1969,8 +1875,6 @@ class AsyncLlamaExtractClient:
1969
1875
  - project_id: typing.Optional[str].
1970
1876
 
1971
1877
  - organization_id: typing.Optional[str].
1972
-
1973
- - project_id: typing.Optional[str].
1974
1878
  ---
1975
1879
  from llama_cloud.client import AsyncLlamaCloud
1976
1880
 
@@ -1985,7 +1889,7 @@ class AsyncLlamaExtractClient:
1985
1889
  "GET",
1986
1890
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/extraction/runs/{run_id}"),
1987
1891
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1988
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1892
+ headers=self._client_wrapper.get_headers(),
1989
1893
  timeout=60,
1990
1894
  )
1991
1895
  if 200 <= _response.status_code < 300:
@@ -1999,12 +1903,7 @@ class AsyncLlamaExtractClient:
1999
1903
  raise ApiError(status_code=_response.status_code, body=_response_json)
2000
1904
 
2001
1905
  async def delete_extraction_run(
2002
- self,
2003
- run_id: str,
2004
- *,
2005
- project_id: typing.Optional[str] = None,
2006
- organization_id: typing.Optional[str] = None,
2007
- project_id: typing.Optional[str] = None,
1906
+ self, run_id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
2008
1907
  ) -> typing.Any:
2009
1908
  """
2010
1909
  Parameters:
@@ -2013,8 +1912,6 @@ class AsyncLlamaExtractClient:
2013
1912
  - project_id: typing.Optional[str].
2014
1913
 
2015
1914
  - organization_id: typing.Optional[str].
2016
-
2017
- - project_id: typing.Optional[str].
2018
1915
  ---
2019
1916
  from llama_cloud.client import AsyncLlamaCloud
2020
1917
 
@@ -2029,7 +1926,7 @@ class AsyncLlamaExtractClient:
2029
1926
  "DELETE",
2030
1927
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/extraction/runs/{run_id}"),
2031
1928
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
2032
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1929
+ headers=self._client_wrapper.get_headers(),
2033
1930
  timeout=60,
2034
1931
  )
2035
1932
  if 200 <= _response.status_code < 300:
@@ -2053,7 +1950,6 @@ class AsyncLlamaExtractClient:
2053
1950
  file_id: typing.Optional[str] = OMIT,
2054
1951
  text: typing.Optional[str] = OMIT,
2055
1952
  file: typing.Optional[FileData] = OMIT,
2056
- project_id: typing.Optional[str] = None,
2057
1953
  ) -> ExtractJob:
2058
1954
  """
2059
1955
  Stateless extraction endpoint that uses a default extraction agent in the user's default project.
@@ -2075,8 +1971,6 @@ class AsyncLlamaExtractClient:
2075
1971
  - text: typing.Optional[str].
2076
1972
 
2077
1973
  - file: typing.Optional[FileData].
2078
-
2079
- - project_id: typing.Optional[str].
2080
1974
  ---
2081
1975
  from llama_cloud import (
2082
1976
  DocumentChunkMode,
@@ -2118,7 +2012,7 @@ class AsyncLlamaExtractClient:
2118
2012
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/extraction/run"),
2119
2013
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
2120
2014
  json=jsonable_encoder(_request),
2121
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
2015
+ headers=self._client_wrapper.get_headers(),
2122
2016
  timeout=60,
2123
2017
  )
2124
2018
  if 200 <= _response.status_code < 300:
@@ -657,12 +657,7 @@ class OrganizationsClient:
657
657
  raise ApiError(status_code=_response.status_code, body=_response_json)
658
658
 
659
659
  def add_user_to_project(
660
- self,
661
- organization_id: typing.Optional[str],
662
- user_id: str,
663
- *,
664
- project_id: typing.Optional[str] = None,
665
- project_id: typing.Optional[str] = None,
660
+ self, organization_id: typing.Optional[str], user_id: str, *, project_id: typing.Optional[str] = None
666
661
  ) -> typing.Any:
667
662
  """
668
663
  Add a user to a project.
@@ -673,8 +668,6 @@ class OrganizationsClient:
673
668
  - user_id: str.
674
669
 
675
670
  - project_id: typing.Optional[str].
676
-
677
- - project_id: typing.Optional[str].
678
671
  ---
679
672
  from llama_cloud.client import LlamaCloud
680
673
 
@@ -692,7 +685,7 @@ class OrganizationsClient:
692
685
  f"api/v1/organizations/{organization_id}/users/{user_id}/projects",
693
686
  ),
694
687
  params=remove_none_from_dict({"project_id": project_id}),
695
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
688
+ headers=self._client_wrapper.get_headers(),
696
689
  timeout=60,
697
690
  )
698
691
  if 200 <= _response.status_code < 300:
@@ -1372,12 +1365,7 @@ class AsyncOrganizationsClient:
1372
1365
  raise ApiError(status_code=_response.status_code, body=_response_json)
1373
1366
 
1374
1367
  async def add_user_to_project(
1375
- self,
1376
- organization_id: typing.Optional[str],
1377
- user_id: str,
1378
- *,
1379
- project_id: typing.Optional[str] = None,
1380
- project_id: typing.Optional[str] = None,
1368
+ self, organization_id: typing.Optional[str], user_id: str, *, project_id: typing.Optional[str] = None
1381
1369
  ) -> typing.Any:
1382
1370
  """
1383
1371
  Add a user to a project.
@@ -1388,8 +1376,6 @@ class AsyncOrganizationsClient:
1388
1376
  - user_id: str.
1389
1377
 
1390
1378
  - project_id: typing.Optional[str].
1391
-
1392
- - project_id: typing.Optional[str].
1393
1379
  ---
1394
1380
  from llama_cloud.client import AsyncLlamaCloud
1395
1381
 
@@ -1407,7 +1393,7 @@ class AsyncOrganizationsClient:
1407
1393
  f"api/v1/organizations/{organization_id}/users/{user_id}/projects",
1408
1394
  ),
1409
1395
  params=remove_none_from_dict({"project_id": project_id}),
1410
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1396
+ headers=self._client_wrapper.get_headers(),
1411
1397
  timeout=60,
1412
1398
  )
1413
1399
  if 200 <= _response.status_code < 300: