groundx 2.2.4__py3-none-any.whl → 2.2.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- groundx/core/client_wrapper.py +1 -1
- groundx/ingest.py +314 -143
- {groundx-2.2.4.dist-info → groundx-2.2.8.dist-info}/METADATA +2 -1
- {groundx-2.2.4.dist-info → groundx-2.2.8.dist-info}/RECORD +6 -6
- {groundx-2.2.4.dist-info → groundx-2.2.8.dist-info}/LICENSE +0 -0
- {groundx-2.2.4.dist-info → groundx-2.2.8.dist-info}/WHEEL +0 -0
groundx/core/client_wrapper.py
CHANGED
groundx/ingest.py
CHANGED
@@ -1,16 +1,12 @@
|
|
1
|
-
import
|
2
|
-
from
|
3
|
-
from
|
4
|
-
|
5
|
-
from json.decoder import JSONDecodeError
|
1
|
+
import requests, time, typing, os
|
2
|
+
from pathlib import Path
|
3
|
+
from tqdm import tqdm
|
4
|
+
from urllib.parse import urlparse, urlunparse
|
6
5
|
|
7
6
|
from .client import GroundXBase, AsyncGroundXBase
|
8
|
-
from .core.api_error import ApiError
|
9
|
-
from .core.pydantic_utilities import parse_obj_as
|
10
7
|
from .core.request_options import RequestOptions
|
11
|
-
from .errors.bad_request_error import BadRequestError
|
12
|
-
from .errors.unauthorized_error import UnauthorizedError
|
13
8
|
from .types.document import Document
|
9
|
+
from .types.document_type import DocumentType
|
14
10
|
from .types.ingest_remote_document import IngestRemoteDocument
|
15
11
|
from .types.ingest_response import IngestResponse
|
16
12
|
|
@@ -19,6 +15,14 @@ OMIT = typing.cast(typing.Any, ...)
|
|
19
15
|
|
20
16
|
|
21
17
|
DOCUMENT_TYPE_TO_MIME = {
|
18
|
+
"bmp": "image/bmp",
|
19
|
+
"gif": "image/gif",
|
20
|
+
"heif": "image/heif",
|
21
|
+
"hwp": "application/x-hwp",
|
22
|
+
"ico": "image/vnd.microsoft.icon",
|
23
|
+
"svg": "image/svg",
|
24
|
+
"tiff": "image/tiff",
|
25
|
+
"webp": "image/webp",
|
22
26
|
"txt": "text/plain",
|
23
27
|
"docx": "application/vnd.openxmlformats-officedocument.wordprocessingml.document",
|
24
28
|
"pptx": "application/vnd.openxmlformats-officedocument.presentationml.presentation",
|
@@ -32,14 +36,42 @@ DOCUMENT_TYPE_TO_MIME = {
|
|
32
36
|
}
|
33
37
|
MIME_TO_DOCUMENT_TYPE = {v: k for k, v in DOCUMENT_TYPE_TO_MIME.items()}
|
34
38
|
|
39
|
+
ALLOWED_SUFFIXES = {f".{k}": v for k, v in DOCUMENT_TYPE_TO_MIME.items()}
|
40
|
+
|
41
|
+
SUFFIX_ALIASES = {
|
42
|
+
".jpeg": ".jpg",
|
43
|
+
".heic": ".heif",
|
44
|
+
".tif": ".tiff",
|
45
|
+
}
|
46
|
+
|
47
|
+
MAX_BATCH_SIZE = 50
|
48
|
+
MIN_BATCH_SIZE = 1
|
49
|
+
MAX_BATCH_SIZE_BYTES = 50 * 1024 * 1024
|
50
|
+
|
51
|
+
def get_presigned_url(
|
52
|
+
endpoint: str,
|
53
|
+
file_name: str,
|
54
|
+
file_extension: str,
|
55
|
+
) -> typing.Dict[str, typing.Any]:
|
56
|
+
params = {"name": file_name, "type": file_extension}
|
57
|
+
response = requests.get(endpoint, params=params)
|
58
|
+
response.raise_for_status()
|
59
|
+
|
60
|
+
return response.json()
|
61
|
+
|
62
|
+
def strip_query_params(
|
63
|
+
url: str,
|
64
|
+
) -> str:
|
65
|
+
parsed = urlparse(url)
|
66
|
+
clean_url = urlunparse((parsed.scheme, parsed.netloc, parsed.path, "", "", ""))
|
67
|
+
|
68
|
+
return clean_url
|
35
69
|
|
36
70
|
def prep_documents(
|
37
71
|
documents: typing.Sequence[Document],
|
38
72
|
) -> typing.Tuple[
|
39
73
|
typing.List[IngestRemoteDocument],
|
40
|
-
typing.List[
|
41
|
-
typing.Tuple[str, typing.Tuple[typing.Union[str, None], typing.BinaryIO, str]]
|
42
|
-
],
|
74
|
+
typing.List[Document],
|
43
75
|
]:
|
44
76
|
"""
|
45
77
|
Process documents and separate them into remote and local documents.
|
@@ -58,9 +90,7 @@ def prep_documents(
|
|
58
90
|
except ValueError:
|
59
91
|
return False
|
60
92
|
|
61
|
-
local_documents: typing.List[
|
62
|
-
typing.Tuple[str, typing.Tuple[typing.Union[str, None], typing.BinaryIO, str]]
|
63
|
-
] = []
|
93
|
+
local_documents: typing.List[Document] = []
|
64
94
|
remote_documents: typing.List[IngestRemoteDocument] = []
|
65
95
|
|
66
96
|
for document in documents:
|
@@ -78,53 +108,7 @@ def prep_documents(
|
|
78
108
|
)
|
79
109
|
remote_documents.append(remote_document)
|
80
110
|
elif is_valid_local_path(document.file_path):
|
81
|
-
|
82
|
-
file_name = os.path.basename(expanded_path)
|
83
|
-
mime_type = mimetypes.guess_type(file_name)[0] or "application/octet-stream"
|
84
|
-
file_type = MIME_TO_DOCUMENT_TYPE.get(mime_type, None)
|
85
|
-
if document.file_type:
|
86
|
-
file_type = document.file_type
|
87
|
-
mime_type = DOCUMENT_TYPE_TO_MIME.get(
|
88
|
-
document.file_type, "application/octet-stream"
|
89
|
-
)
|
90
|
-
|
91
|
-
if document.file_name:
|
92
|
-
file_name = document.file_name
|
93
|
-
|
94
|
-
try:
|
95
|
-
local_documents.append(
|
96
|
-
(
|
97
|
-
"blob",
|
98
|
-
(
|
99
|
-
file_name,
|
100
|
-
open(expanded_path, "rb"),
|
101
|
-
mime_type,
|
102
|
-
),
|
103
|
-
)
|
104
|
-
)
|
105
|
-
except Exception as e:
|
106
|
-
raise ValueError(f"Error reading file {expanded_path}: {e}")
|
107
|
-
|
108
|
-
metadata = {
|
109
|
-
"bucketId": document.bucket_id,
|
110
|
-
"fileName": file_name,
|
111
|
-
"fileType": file_type,
|
112
|
-
}
|
113
|
-
if document.process_level:
|
114
|
-
metadata["processLevel"] = document.process_level
|
115
|
-
if document.search_data:
|
116
|
-
metadata["searchData"] = document.search_data
|
117
|
-
|
118
|
-
local_documents.append(
|
119
|
-
(
|
120
|
-
"metadata",
|
121
|
-
(
|
122
|
-
f"data.json",
|
123
|
-
io.BytesIO(json.dumps(metadata).encode("utf-8")),
|
124
|
-
"application/json",
|
125
|
-
),
|
126
|
-
)
|
127
|
-
)
|
111
|
+
local_documents.append(document)
|
128
112
|
else:
|
129
113
|
raise ValueError(f"Invalid file path: {document.file_path}")
|
130
114
|
|
@@ -136,6 +120,7 @@ class GroundX(GroundXBase):
|
|
136
120
|
self,
|
137
121
|
*,
|
138
122
|
documents: typing.Sequence[Document],
|
123
|
+
upload_api: typing.Optional[str] = "https://api.eyelevel.ai/upload/file",
|
139
124
|
request_options: typing.Optional[RequestOptions] = None,
|
140
125
|
) -> IngestResponse:
|
141
126
|
"""
|
@@ -145,6 +130,10 @@ class GroundX(GroundXBase):
|
|
145
130
|
----------
|
146
131
|
documents : typing.Sequence[Document]
|
147
132
|
|
133
|
+
# an endpoint that accepts 'name' and 'type' query params
|
134
|
+
# and returns a presigned URL in a JSON dictionary with key 'URL'
|
135
|
+
upload_api : typing.Optional[str]
|
136
|
+
|
148
137
|
request_options : typing.Optional[RequestOptions]
|
149
138
|
Request-specific configuration.
|
150
139
|
|
@@ -178,61 +167,215 @@ class GroundX(GroundXBase):
|
|
178
167
|
raise ValueError("Documents must all be either local or remote, not a mix.")
|
179
168
|
|
180
169
|
if len(remote_documents) > 0:
|
170
|
+
if len(remote_documents) > MAX_BATCH_SIZE:
|
171
|
+
raise ValueError("You have sent too many documents in this request")
|
172
|
+
|
181
173
|
return self.documents.ingest_remote(
|
182
174
|
documents=remote_documents,
|
183
175
|
request_options=request_options,
|
184
176
|
)
|
185
177
|
|
186
|
-
|
187
|
-
|
188
|
-
|
189
|
-
|
190
|
-
|
178
|
+
if len(local_documents) > MAX_BATCH_SIZE:
|
179
|
+
raise ValueError("You have sent too many documents in this request")
|
180
|
+
|
181
|
+
if len(local_documents) == 0:
|
182
|
+
raise ValueError("No valid documents were provided")
|
183
|
+
|
184
|
+
docs: typing.List[IngestRemoteDocument] = []
|
185
|
+
for d in local_documents:
|
186
|
+
url = self._upload_file(upload_api, Path(os.path.expanduser(d.file_path)))
|
187
|
+
|
188
|
+
docs.append(
|
189
|
+
IngestRemoteDocument(
|
190
|
+
bucket_id=d.bucket_id,
|
191
|
+
file_name=d.file_name,
|
192
|
+
file_type=d.file_type,
|
193
|
+
process_level=d.process_level,
|
194
|
+
search_data=d.search_data,
|
195
|
+
source_url=url,
|
196
|
+
)
|
197
|
+
)
|
198
|
+
|
199
|
+
return self.documents.ingest_remote(
|
200
|
+
documents=docs,
|
201
|
+
request_options=request_options,
|
202
|
+
)
|
203
|
+
|
204
|
+
def ingest_directory(
|
205
|
+
self,
|
206
|
+
*,
|
207
|
+
bucket_id: int,
|
208
|
+
path: str,
|
209
|
+
batch_size: typing.Optional[int] = 10,
|
210
|
+
upload_api: typing.Optional[str] = "https://api.eyelevel.ai/upload/file",
|
211
|
+
request_options: typing.Optional[RequestOptions] = None,
|
212
|
+
):
|
213
|
+
"""
|
214
|
+
Ingest documents from a local directory into a GroundX bucket.
|
215
|
+
|
216
|
+
Parameters
|
217
|
+
----------
|
218
|
+
bucket_id : int
|
219
|
+
path : str
|
220
|
+
batch_size : type.Optional[int]
|
221
|
+
|
222
|
+
# an endpoint that accepts 'name' and 'type' query params
|
223
|
+
# and returns a presigned URL in a JSON dictionary with key 'URL'
|
224
|
+
upload_api : typing.Optional[str]
|
225
|
+
|
226
|
+
request_options : typing.Optional[RequestOptions]
|
227
|
+
Request-specific configuration.
|
228
|
+
|
229
|
+
Returns
|
230
|
+
-------
|
231
|
+
IngestResponse
|
232
|
+
Documents successfully uploaded
|
233
|
+
|
234
|
+
Examples
|
235
|
+
--------
|
236
|
+
from groundx import Document, GroundX
|
237
|
+
|
238
|
+
client = GroundX(
|
239
|
+
api_key="YOUR_API_KEY",
|
191
240
|
)
|
192
241
|
|
193
|
-
|
194
|
-
|
195
|
-
|
196
|
-
files=local_documents,
|
197
|
-
headers=headers,
|
198
|
-
timeout=timeout,
|
199
|
-
allow_redirects=follow_redirects,
|
242
|
+
client.ingest_directory(
|
243
|
+
bucket_id=0,
|
244
|
+
path="/path/to/directory"
|
200
245
|
)
|
246
|
+
"""
|
201
247
|
|
202
|
-
|
203
|
-
|
204
|
-
|
205
|
-
|
206
|
-
|
207
|
-
|
208
|
-
|
209
|
-
|
210
|
-
|
211
|
-
|
212
|
-
|
213
|
-
|
214
|
-
|
215
|
-
parse_obj_as(
|
216
|
-
type_=typing.Optional[typing.Any], # type: ignore
|
217
|
-
object_=_response.json(),
|
218
|
-
),
|
219
|
-
)
|
220
|
-
)
|
221
|
-
if _response.status_code == 401:
|
222
|
-
raise UnauthorizedError(
|
223
|
-
typing.cast(
|
224
|
-
typing.Optional[typing.Any],
|
225
|
-
parse_obj_as(
|
226
|
-
type_=typing.Optional[typing.Any], # type: ignore
|
227
|
-
object_=_response.json(),
|
228
|
-
),
|
229
|
-
)
|
248
|
+
def is_valid_local_directory(path: str) -> bool:
|
249
|
+
expanded_path = os.path.expanduser(path)
|
250
|
+
return os.path.isdir(expanded_path)
|
251
|
+
|
252
|
+
def load_directory_files(directory: str) -> typing.List[Path]:
|
253
|
+
dir_path = Path(directory)
|
254
|
+
|
255
|
+
matched_files = [
|
256
|
+
file
|
257
|
+
for file in dir_path.rglob("*")
|
258
|
+
if file.is_file() and (
|
259
|
+
file.suffix.lower() in ALLOWED_SUFFIXES
|
260
|
+
or file.suffix.lower() in SUFFIX_ALIASES
|
230
261
|
)
|
231
|
-
|
232
|
-
|
233
|
-
|
262
|
+
]
|
263
|
+
|
264
|
+
return matched_files
|
265
|
+
|
266
|
+
if bucket_id < 1:
|
267
|
+
raise ValueError(f"Invalid bucket_id: {bucket_id}")
|
268
|
+
|
269
|
+
if is_valid_local_directory(path) is not True:
|
270
|
+
raise ValueError(f"Invalid directory path: {path}")
|
271
|
+
|
272
|
+
files = load_directory_files(path)
|
273
|
+
|
274
|
+
if len(files) < 1:
|
275
|
+
raise ValueError(f"No supported files found in directory: {path}")
|
276
|
+
|
277
|
+
current_batch: typing.List[Path] = []
|
278
|
+
current_batch_size: int = 0
|
279
|
+
|
280
|
+
n = max(MIN_BATCH_SIZE, min(batch_size or MIN_BATCH_SIZE, MAX_BATCH_SIZE))
|
281
|
+
|
282
|
+
with tqdm(total=len(files), desc="Ingesting Files", unit="file") as pbar:
|
283
|
+
for file in files:
|
284
|
+
file_size = file.stat().st_size
|
285
|
+
|
286
|
+
if (current_batch_size + file_size > MAX_BATCH_SIZE_BYTES) or (len(current_batch) >= n):
|
287
|
+
self._upload_file_batch(bucket_id, current_batch, upload_api, request_options, pbar)
|
288
|
+
current_batch = []
|
289
|
+
current_batch_size = 0
|
290
|
+
|
291
|
+
current_batch.append(file)
|
292
|
+
current_batch_size += file_size
|
293
|
+
|
294
|
+
if current_batch:
|
295
|
+
self._upload_file_batch(bucket_id, current_batch, upload_api, request_options, pbar)
|
296
|
+
|
297
|
+
def _upload_file(
|
298
|
+
self,
|
299
|
+
endpoint,
|
300
|
+
file_path,
|
301
|
+
):
|
302
|
+
file_name = os.path.basename(file_path)
|
303
|
+
file_extension = os.path.splitext(file_name)[1][1:].lower()
|
304
|
+
|
305
|
+
presigned_info = get_presigned_url(endpoint, file_name, file_extension)
|
306
|
+
|
307
|
+
upload_url = presigned_info["URL"]
|
308
|
+
headers = presigned_info.get("Header", {})
|
309
|
+
method = presigned_info.get("Method", "PUT").upper()
|
310
|
+
|
311
|
+
for key, value in headers.items():
|
312
|
+
if isinstance(value, list):
|
313
|
+
headers[key] = value[0]
|
314
|
+
|
315
|
+
try:
|
316
|
+
with open(file_path, "rb") as f:
|
317
|
+
file_data = f.read()
|
318
|
+
except Exception as e:
|
319
|
+
raise ValueError(f"Error reading file {file_path}: {e}")
|
320
|
+
|
321
|
+
if method == "PUT":
|
322
|
+
upload_response = requests.put(upload_url, data=file_data, headers=headers)
|
323
|
+
else:
|
324
|
+
raise ValueError(f"Unsupported HTTP method: {method}")
|
325
|
+
|
326
|
+
if upload_response.status_code not in (200, 201):
|
327
|
+
raise Exception(
|
328
|
+
f"Upload failed: {upload_response.status_code} - {upload_response.text}"
|
329
|
+
)
|
330
|
+
|
331
|
+
return strip_query_params(upload_url)
|
332
|
+
|
333
|
+
def _upload_file_batch(
|
334
|
+
self,
|
335
|
+
bucket_id,
|
336
|
+
batch,
|
337
|
+
upload_api,
|
338
|
+
request_options,
|
339
|
+
pbar,
|
340
|
+
):
|
341
|
+
docs = []
|
342
|
+
|
343
|
+
progress = len(batch)
|
344
|
+
for file in batch:
|
345
|
+
url = self._upload_file(upload_api, file)
|
346
|
+
docs.append(
|
347
|
+
Document(
|
348
|
+
bucket_id=bucket_id,
|
349
|
+
file_path=url,
|
350
|
+
),
|
351
|
+
)
|
352
|
+
pbar.update(0.25)
|
353
|
+
progress -= 0.25
|
354
|
+
|
355
|
+
if docs:
|
356
|
+
ingest = self.ingest(documents=docs, request_options=request_options)
|
357
|
+
|
358
|
+
completed_files = set()
|
359
|
+
|
360
|
+
while (
|
361
|
+
ingest is not None
|
362
|
+
and ingest.ingest.status not in ["complete", "error", "cancelled"]
|
363
|
+
):
|
364
|
+
time.sleep(3)
|
365
|
+
ingest = self.documents.get_processing_status_by_id(ingest.ingest.process_id)
|
366
|
+
|
367
|
+
if ingest.ingest.progress and ingest.ingest.progress.processing:
|
368
|
+
for doc in ingest.ingest.progress.processing.documents:
|
369
|
+
if doc.status == "complete" and doc.document_id not in completed_files:
|
370
|
+
pbar.update(0.75)
|
371
|
+
progress -= 0.75
|
372
|
+
|
373
|
+
if ingest.ingest.status in ["error", "cancelled"]:
|
374
|
+
raise ValueError(f"Ingest failed with status: {ingest.ingest.status}")
|
375
|
+
|
376
|
+
if progress > 0:
|
377
|
+
pbar.update(progress)
|
234
378
|
|
235
|
-
raise ApiError(status_code=_response.status_code, body=_response_json)
|
236
379
|
|
237
380
|
|
238
381
|
class AsyncGroundX(AsyncGroundXBase):
|
@@ -240,6 +383,7 @@ class AsyncGroundX(AsyncGroundXBase):
|
|
240
383
|
self,
|
241
384
|
*,
|
242
385
|
documents: typing.Sequence[Document],
|
386
|
+
upload_api: str = "https://api.eyelevel.ai/upload/file",
|
243
387
|
request_options: typing.Optional[RequestOptions] = None,
|
244
388
|
) -> IngestResponse:
|
245
389
|
"""
|
@@ -249,6 +393,10 @@ class AsyncGroundX(AsyncGroundXBase):
|
|
249
393
|
----------
|
250
394
|
documents : typing.Sequence[Document]
|
251
395
|
|
396
|
+
# an endpoint that accepts 'name' and 'type' query params
|
397
|
+
# and returns a presigned URL in a JSON dictionary with key 'URL'
|
398
|
+
upload_api : typing.Optional[str]
|
399
|
+
|
252
400
|
request_options : typing.Optional[RequestOptions]
|
253
401
|
Request-specific configuration.
|
254
402
|
|
@@ -287,49 +435,72 @@ class AsyncGroundX(AsyncGroundXBase):
|
|
287
435
|
raise ValueError("Documents must all be either local or remote, not a mix.")
|
288
436
|
|
289
437
|
if len(remote_documents) > 0:
|
438
|
+
if len(remote_documents) > MAX_BATCH_SIZE:
|
439
|
+
raise ValueError("You have sent too many documents in this request")
|
440
|
+
|
290
441
|
return await self.documents.ingest_remote(
|
291
442
|
documents=remote_documents,
|
292
443
|
request_options=request_options,
|
293
444
|
)
|
294
445
|
|
295
|
-
|
296
|
-
|
297
|
-
|
446
|
+
if len(local_documents) > MAX_BATCH_SIZE:
|
447
|
+
raise ValueError("You have sent too many documents in this request")
|
448
|
+
|
449
|
+
if len(local_documents) == 0:
|
450
|
+
raise ValueError("No valid documents were provided")
|
451
|
+
|
452
|
+
docs: typing.List[IngestRemoteDocument] = []
|
453
|
+
for d in local_documents:
|
454
|
+
url = self._upload_file(upload_api, Path(os.path.expanduser(d.file_path)))
|
455
|
+
|
456
|
+
docs.append(
|
457
|
+
IngestRemoteDocument(
|
458
|
+
bucket_id=d.bucket_id,
|
459
|
+
file_name=d.file_name,
|
460
|
+
file_type=d.file_type,
|
461
|
+
process_level=d.process_level,
|
462
|
+
search_data=d.search_data,
|
463
|
+
source_url=url,
|
464
|
+
)
|
465
|
+
)
|
466
|
+
|
467
|
+
return await self.documents.ingest_remote(
|
468
|
+
documents=docs,
|
469
|
+
request_options=request_options,
|
470
|
+
)
|
471
|
+
|
472
|
+
def _upload_file(
|
473
|
+
self,
|
474
|
+
endpoint,
|
475
|
+
file_path,
|
476
|
+
):
|
477
|
+
file_name = os.path.basename(file_path)
|
478
|
+
file_extension = os.path.splitext(file_name)[1][1:].lower()
|
479
|
+
|
480
|
+
presigned_info = get_presigned_url(endpoint, file_name, file_extension)
|
481
|
+
|
482
|
+
upload_url = presigned_info["URL"]
|
483
|
+
headers = presigned_info.get("Header", {})
|
484
|
+
method = presigned_info.get("Method", "PUT").upper()
|
298
485
|
|
299
|
-
|
486
|
+
for key, value in headers.items():
|
487
|
+
if isinstance(value, list):
|
488
|
+
headers[key] = value[0]
|
300
489
|
|
301
490
|
try:
|
302
|
-
|
303
|
-
|
304
|
-
|
305
|
-
|
306
|
-
name=field_name,
|
307
|
-
value=file_obj,
|
308
|
-
filename=file_name,
|
309
|
-
content_type=content_type,
|
310
|
-
)
|
491
|
+
with open(file_path, "rb") as f:
|
492
|
+
file_data = f.read()
|
493
|
+
except Exception as e:
|
494
|
+
raise ValueError(f"Error reading file {file_path}: {e}")
|
311
495
|
|
312
|
-
|
313
|
-
|
314
|
-
|
315
|
-
|
316
|
-
|
317
|
-
|
318
|
-
|
319
|
-
|
320
|
-
|
321
|
-
|
322
|
-
|
323
|
-
)
|
324
|
-
if response.status == 400:
|
325
|
-
raise BadRequestError(await response.json())
|
326
|
-
if response.status == 401:
|
327
|
-
raise UnauthorizedError(await response.json())
|
328
|
-
|
329
|
-
raise ApiError(
|
330
|
-
status_code=response.status, body=await response.text()
|
331
|
-
)
|
332
|
-
except TimeoutError:
|
333
|
-
raise ApiError(status_code=408, body="Request timed out")
|
334
|
-
except aiohttp.ClientError as e:
|
335
|
-
raise ApiError(status_code=500, body=str(e))
|
496
|
+
if method == "PUT":
|
497
|
+
upload_response = requests.put(upload_url, data=file_data, headers=headers)
|
498
|
+
else:
|
499
|
+
raise ValueError(f"Unsupported HTTP method: {method}")
|
500
|
+
|
501
|
+
if upload_response.status_code not in (200, 201):
|
502
|
+
raise Exception(
|
503
|
+
f"Upload failed: {upload_response.status_code} - {upload_response.text}"
|
504
|
+
)
|
505
|
+
|
506
|
+
return strip_query_params(upload_url)
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: groundx
|
3
|
-
Version: 2.2.
|
3
|
+
Version: 2.2.8
|
4
4
|
Summary:
|
5
5
|
License: MIT
|
6
6
|
Requires-Python: >=3.8,<4.0
|
@@ -26,6 +26,7 @@ Requires-Dist: pydantic (>=1.9.2)
|
|
26
26
|
Requires-Dist: pydantic-core (>=2.18.2,<3.0.0)
|
27
27
|
Requires-Dist: requests (>=2.4.0)
|
28
28
|
Requires-Dist: tqdm (>=4.60.0)
|
29
|
+
Requires-Dist: types-tqdm (>=4.60.0)
|
29
30
|
Requires-Dist: typing_extensions (>=4.0.0)
|
30
31
|
Description-Content-Type: text/markdown
|
31
32
|
|
@@ -4,7 +4,7 @@ groundx/buckets/client.py,sha256=4jlc9vfIult1mMJ4FZW4_KFJybZPStZt1FUplIgrxbU,239
|
|
4
4
|
groundx/client.py,sha256=dIW9OyrMyfC1N7HSxRrHh0w_8rJ8osNUOPdYD6ueQ6g,6515
|
5
5
|
groundx/core/__init__.py,sha256=SQ85PF84B9MuKnBwHNHWemSGuy-g_515gFYNFhvEE0I,1438
|
6
6
|
groundx/core/api_error.py,sha256=RE8LELok2QCjABadECTvtDp7qejA1VmINCh6TbqPwSE,426
|
7
|
-
groundx/core/client_wrapper.py,sha256=
|
7
|
+
groundx/core/client_wrapper.py,sha256=DckEncGF_W3v8kguKAXWcn2um2B9k-nkWvKmH0HoyGQ,1802
|
8
8
|
groundx/core/datetime_utils.py,sha256=nBys2IsYrhPdszxGKCNRPSOCwa-5DWOHG95FB8G9PKo,1047
|
9
9
|
groundx/core/file.py,sha256=d4NNbX8XvXP32z8KpK2Xovv33nFfruIrpz0QWxlgpZk,2663
|
10
10
|
groundx/core/http_client.py,sha256=Z77OIxIbL4OAB2IDqjRq_sYa5yNYAWfmdhdCSSvh6Y4,19552
|
@@ -26,7 +26,7 @@ groundx/groups/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
|
|
26
26
|
groundx/groups/client.py,sha256=bytQRh9m7e4vIuYHb7dD1kCTQZvyBxedCqGnmmLqrsI,35237
|
27
27
|
groundx/health/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
|
28
28
|
groundx/health/client.py,sha256=fcTa21RWPyBuT77PQ0EncC6rBaW_DrYlRvudy9-0H58,7545
|
29
|
-
groundx/ingest.py,sha256=
|
29
|
+
groundx/ingest.py,sha256=mQB__GQmIDP6W5jGhmjORXh2o6HHTseHiahZz2Es-tM,16119
|
30
30
|
groundx/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
31
31
|
groundx/search/__init__.py,sha256=RagVzjShP33mDg9o4N3kGzV0egL1RYNjCpXPE8VzMYE,145
|
32
32
|
groundx/search/client.py,sha256=zrrqFy0HowDUYPsMU4nfvDV2RgmkEQ4E8WYNktu3xcs,18684
|
@@ -81,7 +81,7 @@ groundx/types/subscription_detail.py,sha256=WNfUw2EMVECIvNYcV2s51zZ6T3Utc4zYXw63
|
|
81
81
|
groundx/types/subscription_detail_meters.py,sha256=lBa8-1QlMVHjr5RLGqhiTKnD1KMM0AAHTWvz9TVtG8w,830
|
82
82
|
groundx/types/website_source.py,sha256=3WeRCiilNKKBTfhwgjo3jbcVI3vLTeM-KxI6dVzpg9o,1578
|
83
83
|
groundx/version.py,sha256=1yVogKaq260fQfckM2RYN2144SEw0QROsZW8ICtkG4U,74
|
84
|
-
groundx-2.2.
|
85
|
-
groundx-2.2.
|
86
|
-
groundx-2.2.
|
87
|
-
groundx-2.2.
|
84
|
+
groundx-2.2.8.dist-info/LICENSE,sha256=dFE6nY1bHnSn6NqmdlghlU1gQqLqYNphrceGVehSa7o,1065
|
85
|
+
groundx-2.2.8.dist-info/METADATA,sha256=Q67rLMlsO72-NH2qvK8Bw4ciSTDS5C4LNoPiBk8UWyI,5173
|
86
|
+
groundx-2.2.8.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
|
87
|
+
groundx-2.2.8.dist-info/RECORD,,
|
File without changes
|
File without changes
|