groundx 2.2.9__py3-none-any.whl → 2.3.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (27) hide show
  1. groundx/__init__.py +14 -16
  2. groundx/core/client_wrapper.py +1 -1
  3. groundx/documents/client.py +26 -13
  4. groundx/ingest.py +170 -60
  5. groundx/search/client.py +22 -2
  6. groundx/types/__init__.py +14 -16
  7. groundx/types/bucket_list_response.py +15 -1
  8. groundx/types/document_detail.py +5 -0
  9. groundx/types/document_lookup_response.py +5 -0
  10. groundx/types/group_list_response.py +15 -1
  11. groundx/types/ingest_local_document_metadata.py +5 -0
  12. groundx/types/ingest_remote_document.py +6 -1
  13. groundx/types/ingest_response.py +2 -2
  14. groundx/types/{process_status_response_ingest.py → ingest_status.py} +5 -4
  15. groundx/types/{ingest_response_ingest.py → ingest_status_light.py} +4 -2
  16. groundx/types/ingest_status_progress.py +26 -0
  17. groundx/types/{process_status_response_ingest_progress_errors.py → ingest_status_progress_cancelled.py} +1 -1
  18. groundx/types/{process_status_response_ingest_progress_complete.py → ingest_status_progress_complete.py} +1 -1
  19. groundx/types/{process_status_response_ingest_progress_cancelled.py → ingest_status_progress_errors.py} +1 -1
  20. groundx/types/{process_status_response_ingest_progress_processing.py → ingest_status_progress_processing.py} +1 -1
  21. groundx/types/processes_status_response.py +19 -2
  22. {groundx-2.2.9.dist-info → groundx-2.3.3.dist-info}/METADATA +1 -1
  23. {groundx-2.2.9.dist-info → groundx-2.3.3.dist-info}/RECORD +25 -26
  24. groundx/types/process_status_response.py +0 -20
  25. groundx/types/process_status_response_ingest_progress.py +0 -26
  26. {groundx-2.2.9.dist-info → groundx-2.3.3.dist-info}/LICENSE +0 -0
  27. {groundx-2.2.9.dist-info → groundx-2.3.3.dist-info}/WHEEL +0 -0
groundx/__init__.py CHANGED
@@ -27,17 +27,16 @@ from .types import (
27
27
  IngestLocalDocumentMetadata,
28
28
  IngestRemoteDocument,
29
29
  IngestResponse,
30
- IngestResponseIngest,
30
+ IngestStatus,
31
+ IngestStatusLight,
32
+ IngestStatusProgress,
33
+ IngestStatusProgressCancelled,
34
+ IngestStatusProgressComplete,
35
+ IngestStatusProgressErrors,
36
+ IngestStatusProgressProcessing,
31
37
  MessageResponse,
32
38
  MeterDetail,
33
39
  ProcessLevel,
34
- ProcessStatusResponse,
35
- ProcessStatusResponseIngest,
36
- ProcessStatusResponseIngestProgress,
37
- ProcessStatusResponseIngestProgressCancelled,
38
- ProcessStatusResponseIngestProgressComplete,
39
- ProcessStatusResponseIngestProgressErrors,
40
- ProcessStatusResponseIngestProgressProcessing,
41
40
  ProcessesStatusResponse,
42
41
  ProcessingStatus,
43
42
  SearchResponse,
@@ -87,17 +86,16 @@ __all__ = [
87
86
  "IngestLocalDocumentMetadata",
88
87
  "IngestRemoteDocument",
89
88
  "IngestResponse",
90
- "IngestResponseIngest",
89
+ "IngestStatus",
90
+ "IngestStatusLight",
91
+ "IngestStatusProgress",
92
+ "IngestStatusProgressCancelled",
93
+ "IngestStatusProgressComplete",
94
+ "IngestStatusProgressErrors",
95
+ "IngestStatusProgressProcessing",
91
96
  "MessageResponse",
92
97
  "MeterDetail",
93
98
  "ProcessLevel",
94
- "ProcessStatusResponse",
95
- "ProcessStatusResponseIngest",
96
- "ProcessStatusResponseIngestProgress",
97
- "ProcessStatusResponseIngestProgressCancelled",
98
- "ProcessStatusResponseIngestProgressComplete",
99
- "ProcessStatusResponseIngestProgressErrors",
100
- "ProcessStatusResponseIngestProgressProcessing",
101
99
  "ProcessesStatusResponse",
102
100
  "ProcessingStatus",
103
101
  "SearchContentRequestId",
@@ -16,7 +16,7 @@ class BaseClientWrapper:
16
16
  headers: typing.Dict[str, str] = {
17
17
  "X-Fern-Language": "Python",
18
18
  "X-Fern-SDK-Name": "groundx",
19
- "X-Fern-SDK-Version": "2.2.9",
19
+ "X-Fern-SDK-Version": "2.3.3",
20
20
  }
21
21
  headers["X-API-Key"] = self.api_key
22
22
  return headers
@@ -17,7 +17,6 @@ from ..types.sort import Sort
17
17
  from ..types.sort_order import SortOrder
18
18
  from ..types.processing_status import ProcessingStatus
19
19
  from ..types.document_list_response import DocumentListResponse
20
- from ..types.process_status_response import ProcessStatusResponse
21
20
  from ..core.jsonable_encoder import jsonable_encoder
22
21
  from ..types.document_lookup_response import DocumentLookupResponse
23
22
  from ..types.document_response import DocumentResponse
@@ -41,6 +40,8 @@ class DocumentsClient:
41
40
  """
42
41
  Ingest documents hosted on public URLs into a GroundX bucket.
43
42
 
43
+ [Supported Document Types and Ingest Capacities](https://docs.eyelevel.ai/documentation/fundamentals/document-types-and-ingest-capacities)
44
+
44
45
  Parameters
45
46
  ----------
46
47
  documents : typing.Sequence[IngestRemoteDocument]
@@ -125,6 +126,8 @@ class DocumentsClient:
125
126
  """
126
127
  Upload documents hosted on a local file system into a GroundX bucket.
127
128
 
129
+ [Supported Document Types and Ingest Capacities](https://docs.eyelevel.ai/documentation/fundamentals/document-types-and-ingest-capacities)
130
+
128
131
  Parameters
129
132
  ----------
130
133
  request : DocumentLocalIngestRequest
@@ -205,9 +208,12 @@ class DocumentsClient:
205
208
  ) -> IngestResponse:
206
209
  """
207
210
  Upload the content of a publicly accessible website for ingestion into a GroundX bucket. This is done by following links within a specified URL, recursively, up to a specified depth or number of pages.
211
+
208
212
  Note1: This endpoint is currently not supported for on-prem deployments.
209
213
  Note2: The `source_url` must include the protocol, http:// or https://.
210
214
 
215
+ [Supported Document Types and Ingest Capacities](https://docs.eyelevel.ai/documentation/fundamentals/document-types-and-ingest-capacities)
216
+
211
217
  Parameters
212
218
  ----------
213
219
  websites : typing.Sequence[WebsiteSource]
@@ -442,7 +448,7 @@ class DocumentsClient:
442
448
 
443
449
  def get_processing_status_by_id(
444
450
  self, process_id: str, *, request_options: typing.Optional[RequestOptions] = None
445
- ) -> ProcessStatusResponse:
451
+ ) -> IngestResponse:
446
452
  """
447
453
  Get the current status of an ingest, initiated with documents.ingest_remote, documents.ingest_local, or documents.crawl_website, by specifying the processId (the processId is included in the response of the documents.ingest functions).
448
454
 
@@ -456,7 +462,7 @@ class DocumentsClient:
456
462
 
457
463
  Returns
458
464
  -------
459
- ProcessStatusResponse
465
+ IngestResponse
460
466
  Look up success
461
467
 
462
468
  Examples
@@ -478,9 +484,9 @@ class DocumentsClient:
478
484
  try:
479
485
  if 200 <= _response.status_code < 300:
480
486
  return typing.cast(
481
- ProcessStatusResponse,
487
+ IngestResponse,
482
488
  parse_obj_as(
483
- type_=ProcessStatusResponse, # type: ignore
489
+ type_=IngestResponse, # type: ignore
484
490
  object_=_response.json(),
485
491
  ),
486
492
  )
@@ -522,12 +528,12 @@ class DocumentsClient:
522
528
  request_options: typing.Optional[RequestOptions] = None,
523
529
  ) -> DocumentLookupResponse:
524
530
  """
525
- lookup the document(s) associated with a processId, bucketId, groupId, or projectId.
531
+ lookup the document(s) associated with a processId, bucketId, or groupId.
526
532
 
527
533
  Parameters
528
534
  ----------
529
535
  id : int
530
- a processId, bucketId, groupId, or projectId
536
+ a processId, bucketId, or groupId
531
537
 
532
538
  n : typing.Optional[int]
533
539
  The maximum number of returned documents. Accepts 1-100 with a default of 20.
@@ -821,6 +827,8 @@ class AsyncDocumentsClient:
821
827
  """
822
828
  Ingest documents hosted on public URLs into a GroundX bucket.
823
829
 
830
+ [Supported Document Types and Ingest Capacities](https://docs.eyelevel.ai/documentation/fundamentals/document-types-and-ingest-capacities)
831
+
824
832
  Parameters
825
833
  ----------
826
834
  documents : typing.Sequence[IngestRemoteDocument]
@@ -913,6 +921,8 @@ class AsyncDocumentsClient:
913
921
  """
914
922
  Upload documents hosted on a local file system into a GroundX bucket.
915
923
 
924
+ [Supported Document Types and Ingest Capacities](https://docs.eyelevel.ai/documentation/fundamentals/document-types-and-ingest-capacities)
925
+
916
926
  Parameters
917
927
  ----------
918
928
  request : DocumentLocalIngestRequest
@@ -1005,9 +1015,12 @@ class AsyncDocumentsClient:
1005
1015
  ) -> IngestResponse:
1006
1016
  """
1007
1017
  Upload the content of a publicly accessible website for ingestion into a GroundX bucket. This is done by following links within a specified URL, recursively, up to a specified depth or number of pages.
1018
+
1008
1019
  Note1: This endpoint is currently not supported for on-prem deployments.
1009
1020
  Note2: The `source_url` must include the protocol, http:// or https://.
1010
1021
 
1022
+ [Supported Document Types and Ingest Capacities](https://docs.eyelevel.ai/documentation/fundamentals/document-types-and-ingest-capacities)
1023
+
1011
1024
  Parameters
1012
1025
  ----------
1013
1026
  websites : typing.Sequence[WebsiteSource]
@@ -1266,7 +1279,7 @@ class AsyncDocumentsClient:
1266
1279
 
1267
1280
  async def get_processing_status_by_id(
1268
1281
  self, process_id: str, *, request_options: typing.Optional[RequestOptions] = None
1269
- ) -> ProcessStatusResponse:
1282
+ ) -> IngestResponse:
1270
1283
  """
1271
1284
  Get the current status of an ingest, initiated with documents.ingest_remote, documents.ingest_local, or documents.crawl_website, by specifying the processId (the processId is included in the response of the documents.ingest functions).
1272
1285
 
@@ -1280,7 +1293,7 @@ class AsyncDocumentsClient:
1280
1293
 
1281
1294
  Returns
1282
1295
  -------
1283
- ProcessStatusResponse
1296
+ IngestResponse
1284
1297
  Look up success
1285
1298
 
1286
1299
  Examples
@@ -1310,9 +1323,9 @@ class AsyncDocumentsClient:
1310
1323
  try:
1311
1324
  if 200 <= _response.status_code < 300:
1312
1325
  return typing.cast(
1313
- ProcessStatusResponse,
1326
+ IngestResponse,
1314
1327
  parse_obj_as(
1315
- type_=ProcessStatusResponse, # type: ignore
1328
+ type_=IngestResponse, # type: ignore
1316
1329
  object_=_response.json(),
1317
1330
  ),
1318
1331
  )
@@ -1354,12 +1367,12 @@ class AsyncDocumentsClient:
1354
1367
  request_options: typing.Optional[RequestOptions] = None,
1355
1368
  ) -> DocumentLookupResponse:
1356
1369
  """
1357
- lookup the document(s) associated with a processId, bucketId, groupId, or projectId.
1370
+ lookup the document(s) associated with a processId, bucketId, or groupId.
1358
1371
 
1359
1372
  Parameters
1360
1373
  ----------
1361
1374
  id : int
1362
- a processId, bucketId, groupId, or projectId
1375
+ a processId, bucketId, or groupId
1363
1376
 
1364
1377
  n : typing.Optional[int]
1365
1378
  The maximum number of returned documents. Accepts 1-100 with a default of 20.
groundx/ingest.py CHANGED
@@ -9,6 +9,7 @@ from .csv_splitter import CSVSplitter
9
9
  from .types.document import Document
10
10
  from .types.ingest_remote_document import IngestRemoteDocument
11
11
  from .types.ingest_response import IngestResponse
12
+ from .types.ingest_status import IngestStatus
12
13
 
13
14
  # this is used as the default value for optional parameters
14
15
  OMIT = typing.cast(typing.Any, ...)
@@ -140,6 +141,8 @@ class GroundX(GroundXBase):
140
141
  self,
141
142
  *,
142
143
  documents: typing.Sequence[Document],
144
+ batch_size: typing.Optional[int] = 10,
145
+ wait_for_complete: typing.Optional[bool] = False,
143
146
  upload_api: typing.Optional[str] = "https://api.eyelevel.ai/upload/file",
144
147
  request_options: typing.Optional[RequestOptions] = None,
145
148
  ) -> IngestResponse:
@@ -150,6 +153,13 @@ class GroundX(GroundXBase):
150
153
  ----------
151
154
  documents : typing.Sequence[Document]
152
155
 
156
+ # defines how many files to send per batch
157
+ # ignored unless wait_for_complete is True
158
+ batch_size : typing.Optional[int]
159
+
160
+ # will turn on progress bar and wait for ingestion to complete
161
+ wait_for_complete : typing.Optional[bool]
162
+
153
163
  # an endpoint that accepts 'name' and 'type' query params
154
164
  # and returns a presigned URL in a JSON dictionary with key 'URL'
155
165
  upload_api : typing.Optional[str]
@@ -183,36 +193,84 @@ class GroundX(GroundXBase):
183
193
  """
184
194
  remote_documents, local_documents = prep_documents(documents)
185
195
 
186
- if len(remote_documents) + len(local_documents) > MAX_BATCH_SIZE:
187
- raise ValueError("You have sent too many documents in this request")
188
-
189
196
  if len(remote_documents) + len(local_documents) == 0:
190
197
  raise ValueError("No valid documents were provided")
191
198
 
192
- for d in local_documents:
193
- splits = split_doc(Path(os.path.expanduser(d.file_path)))
199
+ if wait_for_complete:
200
+ with tqdm(total=len(remote_documents) + len(local_documents), desc="Ingesting Files", unit="file") as pbar:
201
+ n = max(MIN_BATCH_SIZE, min(batch_size or MIN_BATCH_SIZE, MAX_BATCH_SIZE))
194
202
 
195
- for sd in splits:
196
- url = self._upload_file(upload_api, sd)
203
+ remote_batch: typing.List[IngestRemoteDocument] = []
204
+ ingest = IngestResponse(ingest=IngestStatus(process_id="",status="queued"))
197
205
 
198
- ft = d.file_type
199
- if sd.suffix.lower() in SUFFIX_ALIASES:
200
- ft = SUFFIX_ALIASES[sd.suffix.lower()]
206
+ progress = float(len(remote_documents))
207
+ for rd in remote_documents:
208
+ if len(remote_batch) >= n:
209
+ ingest = self.documents.ingest_remote(
210
+ documents=remote_batch,
211
+ request_options=request_options,
212
+ )
213
+ ingest, progress = self._monitor_batch(ingest, progress, pbar)
201
214
 
202
- fn = sd.name
203
- if len(splits) == 1 and d.file_name:
204
- fn = d.file_name
215
+ remote_batch = []
205
216
 
206
- remote_documents.append(
207
- IngestRemoteDocument(
208
- bucket_id=d.bucket_id,
209
- file_name=fn,
210
- file_type=ft,
211
- process_level=d.process_level,
212
- search_data=d.search_data,
213
- source_url=url,
217
+ remote_batch.append(rd)
218
+ pbar.update(0.25)
219
+ progress -= 0.25
220
+
221
+ if remote_batch:
222
+ ingest = self.documents.ingest_remote(
223
+ documents=remote_batch,
224
+ request_options=request_options,
214
225
  )
215
- )
226
+ ingest, progress = self._monitor_batch(ingest, progress, pbar)
227
+
228
+
229
+ if progress > 0:
230
+ pbar.update(progress)
231
+
232
+ current_batch_size = 0
233
+ local_batch: typing.List[Document] = []
234
+
235
+ progress = float(len(local_documents))
236
+ for ld in local_documents:
237
+ fp = Path(os.path.expanduser(ld.file_path))
238
+ file_size = fp.stat().st_size
239
+
240
+ if (current_batch_size + file_size > MAX_BATCH_SIZE_BYTES) or (len(local_batch) >= n):
241
+ up_docs, progress = self._process_local(local_batch, upload_api, progress, pbar)
242
+
243
+ ingest = self.documents.ingest_remote(
244
+ documents=up_docs,
245
+ request_options=request_options,
246
+ )
247
+ ingest, progress = self._monitor_batch(ingest, progress, pbar)
248
+
249
+ local_batch = []
250
+ current_batch_size = 0
251
+
252
+ local_batch.append(ld)
253
+ current_batch_size += file_size
254
+
255
+ if local_batch:
256
+ up_docs, progress = self._process_local(local_batch, upload_api, progress, pbar)
257
+
258
+ ingest = self.documents.ingest_remote(
259
+ documents=up_docs,
260
+ request_options=request_options,
261
+ )
262
+ ingest, progress = self._monitor_batch(ingest, progress, pbar)
263
+
264
+ if progress > 0:
265
+ pbar.update(progress)
266
+
267
+ return ingest
268
+ elif len(remote_documents) + len(local_documents) > MAX_BATCH_SIZE:
269
+ raise ValueError("You have sent too many documents in this request")
270
+
271
+
272
+ up_docs, _ = self._process_local(local_documents, upload_api)
273
+ remote_documents.extend(up_docs)
216
274
 
217
275
  return self.documents.ingest_remote(
218
276
  documents=remote_documents,
@@ -346,6 +404,92 @@ class GroundX(GroundXBase):
346
404
 
347
405
  return strip_query_params(upload_url)
348
406
 
407
+ def _process_local(
408
+ self,
409
+ local_docs,
410
+ upload_api,
411
+ progress = None,
412
+ pbar = None,
413
+ ):
414
+ remote_docs = []
415
+ for d in local_docs:
416
+ splits = split_doc(Path(os.path.expanduser(d.file_path)))
417
+
418
+ for sd in splits:
419
+ url = self._upload_file(upload_api, sd)
420
+
421
+ ft = d.file_type
422
+ if sd.suffix.lower() in SUFFIX_ALIASES:
423
+ ft = SUFFIX_ALIASES[sd.suffix.lower()]
424
+
425
+ fn = sd.name
426
+ if len(splits) == 1 and d.file_name:
427
+ fn = d.file_name
428
+
429
+ remote_docs.append(
430
+ IngestRemoteDocument(
431
+ bucket_id=d.bucket_id,
432
+ file_name=fn,
433
+ file_type=ft,
434
+ process_level=d.process_level,
435
+ search_data=d.search_data,
436
+ source_url=url,
437
+ )
438
+ )
439
+
440
+ if progress is not None and pbar is not None and pbar.update is not None:
441
+ pbar.update(0.25)
442
+ progress -= 0.25
443
+
444
+ return remote_docs, progress
445
+
446
+ def _monitor_batch(
447
+ self,
448
+ ingest,
449
+ progress,
450
+ pbar,
451
+ ):
452
+ completed_files = set()
453
+
454
+ while (
455
+ ingest is not None
456
+ and ingest.ingest.status not in ["complete", "error", "cancelled"]
457
+ ):
458
+ time.sleep(3)
459
+ ingest = self.documents.get_processing_status_by_id(ingest.ingest.process_id)
460
+
461
+ if ingest.ingest.progress:
462
+ if ingest.ingest.progress.processing and ingest.ingest.progress.processing.documents:
463
+ for doc in ingest.ingest.progress.processing.documents:
464
+ if doc.status in ["complete", "error", "cancelled"] and doc.document_id not in completed_files:
465
+ pbar.update(0.75)
466
+ progress -= 0.75
467
+ completed_files.add(doc.document_id)
468
+ if ingest.ingest.progress.complete and ingest.ingest.progress.complete.documents:
469
+ for doc in ingest.ingest.progress.complete.documents:
470
+ if doc.status in ["complete", "error", "cancelled"] and doc.document_id not in completed_files:
471
+ pbar.update(0.75)
472
+ progress -= 0.75
473
+ completed_files.add(doc.document_id)
474
+ if ingest.ingest.progress.cancelled and ingest.ingest.progress.cancelled.documents:
475
+ for doc in ingest.ingest.progress.cancelled.documents:
476
+ if doc.status in ["complete", "error", "cancelled"] and doc.document_id not in completed_files:
477
+ pbar.update(0.75)
478
+ progress -= 0.75
479
+ completed_files.add(doc.document_id)
480
+ if ingest.ingest.progress.errors and ingest.ingest.progress.errors.documents:
481
+ for doc in ingest.ingest.progress.errors.documents:
482
+ if doc.status in ["complete", "error", "cancelled"] and doc.document_id not in completed_files:
483
+ pbar.update(0.75)
484
+ progress -= 0.75
485
+ completed_files.add(doc.document_id)
486
+
487
+
488
+ if ingest.ingest.status in ["error", "cancelled"]:
489
+ raise ValueError(f"Ingest failed with status: {ingest.ingest.status}")
490
+
491
+ return ingest, progress
492
+
349
493
  def _upload_file_batch(
350
494
  self,
351
495
  bucket_id,
@@ -356,7 +500,7 @@ class GroundX(GroundXBase):
356
500
  ):
357
501
  docs = []
358
502
 
359
- progress = len(batch)
503
+ progress = float(len(batch))
360
504
  for file in batch:
361
505
  url = self._upload_file(upload_api, file)
362
506
  if file.suffix.lower() in SUFFIX_ALIASES:
@@ -381,44 +525,10 @@ class GroundX(GroundXBase):
381
525
 
382
526
  if docs:
383
527
  ingest = self.ingest(documents=docs, request_options=request_options)
528
+ ingest, progress = self._monitor_batch(ingest, progress, pbar)
384
529
 
385
- completed_files = set()
386
-
387
- while (
388
- ingest is not None
389
- and ingest.ingest.status not in ["complete", "error", "cancelled"]
390
- ):
391
- time.sleep(3)
392
- ingest = self.documents.get_processing_status_by_id(ingest.ingest.process_id)
393
-
394
- if ingest.ingest.progress:
395
- if ingest.ingest.progress.processing and ingest.ingest.progress.processing.documents:
396
- for doc in ingest.ingest.progress.processing.documents:
397
- if doc.status in ["complete", "error", "cancelled"] and doc.document_id not in completed_files:
398
- pbar.update(0.75)
399
- progress -= 0.75
400
- if ingest.ingest.progress.complete and ingest.ingest.progress.complete.documents:
401
- for doc in ingest.ingest.progress.complete.documents:
402
- if doc.status in ["complete", "error", "cancelled"] and doc.document_id not in completed_files:
403
- pbar.update(0.75)
404
- progress -= 0.75
405
- if ingest.ingest.progress.cancelled and ingest.ingest.progress.cancelled.documents:
406
- for doc in ingest.ingest.progress.cancelled.documents:
407
- if doc.status in ["complete", "error", "cancelled"] and doc.document_id not in completed_files:
408
- pbar.update(0.75)
409
- progress -= 0.75
410
- if ingest.ingest.progress.errors and ingest.ingest.progress.errors.documents:
411
- for doc in ingest.ingest.progress.errors.documents:
412
- if doc.status in ["complete", "error", "cancelled"] and doc.document_id not in completed_files:
413
- pbar.update(0.75)
414
- progress -= 0.75
415
-
416
-
417
- if ingest.ingest.status in ["error", "cancelled"]:
418
- raise ValueError(f"Ingest failed with status: {ingest.ingest.status}")
419
-
420
- if progress > 0:
421
- pbar.update(progress)
530
+ if progress > 0:
531
+ pbar.update(progress)
422
532
 
423
533
 
424
534
 
groundx/search/client.py CHANGED
@@ -29,6 +29,7 @@ class SearchClient:
29
29
  n: typing.Optional[int] = None,
30
30
  next_token: typing.Optional[str] = None,
31
31
  verbosity: typing.Optional[int] = None,
32
+ filter: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
32
33
  relevance: typing.Optional[float] = OMIT,
33
34
  request_options: typing.Optional[RequestOptions] = None,
34
35
  ) -> SearchResponse:
@@ -39,7 +40,7 @@ class SearchClient:
39
40
  Parameters
40
41
  ----------
41
42
  id : SearchContentRequestId
42
- The bucketId, groupId, projectId, or documentId to be searched. The document or documents within the specified container will be compared to the query, and relevant information will be extracted.
43
+ The bucketId, groupId, or documentId to be searched. The document or documents within the specified container will be compared to the query, and relevant information will be extracted.
43
44
 
44
45
  query : str
45
46
  The search query to be used to find relevant documentation.
@@ -53,6 +54,9 @@ class SearchClient:
53
54
  verbosity : typing.Optional[int]
54
55
  The amount of data returned with each search result. 0 == no search results, only the recommended context. 1 == search results but no searchData. 2 == search results and searchData.
55
56
 
57
+ filter : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]
58
+ A dictionary of key-value pairs that can be used to pre-filter documents prior to a search.
59
+
56
60
  relevance : typing.Optional[float]
57
61
  The minimum search relevance score required to include the result. By default, this is 10.0.
58
62
 
@@ -87,6 +91,7 @@ class SearchClient:
87
91
  },
88
92
  json={
89
93
  "query": query,
94
+ "filter": filter,
90
95
  "relevance": relevance,
91
96
  },
92
97
  headers={
@@ -137,6 +142,7 @@ class SearchClient:
137
142
  n: typing.Optional[int] = None,
138
143
  next_token: typing.Optional[str] = None,
139
144
  verbosity: typing.Optional[int] = None,
145
+ filter: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
140
146
  relevance: typing.Optional[float] = OMIT,
141
147
  request_options: typing.Optional[RequestOptions] = None,
142
148
  ) -> SearchResponse:
@@ -161,6 +167,9 @@ class SearchClient:
161
167
  verbosity : typing.Optional[int]
162
168
  The amount of data returned with each search result. 0 == no search results, only the recommended context. 1 == search results but no searchData. 2 == search results and searchData.
163
169
 
170
+ filter : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]
171
+ A dictionary of key-value pairs that can be used to pre-filter documents prior to a search.
172
+
164
173
  relevance : typing.Optional[float]
165
174
  The minimum search relevance score required to include the result. By default, this is 10.0.
166
175
 
@@ -196,6 +205,7 @@ class SearchClient:
196
205
  json={
197
206
  "query": query,
198
207
  "documentIds": document_ids,
208
+ "filter": filter,
199
209
  "relevance": relevance,
200
210
  },
201
211
  headers={
@@ -251,6 +261,7 @@ class AsyncSearchClient:
251
261
  n: typing.Optional[int] = None,
252
262
  next_token: typing.Optional[str] = None,
253
263
  verbosity: typing.Optional[int] = None,
264
+ filter: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
254
265
  relevance: typing.Optional[float] = OMIT,
255
266
  request_options: typing.Optional[RequestOptions] = None,
256
267
  ) -> SearchResponse:
@@ -261,7 +272,7 @@ class AsyncSearchClient:
261
272
  Parameters
262
273
  ----------
263
274
  id : SearchContentRequestId
264
- The bucketId, groupId, projectId, or documentId to be searched. The document or documents within the specified container will be compared to the query, and relevant information will be extracted.
275
+ The bucketId, groupId, or documentId to be searched. The document or documents within the specified container will be compared to the query, and relevant information will be extracted.
265
276
 
266
277
  query : str
267
278
  The search query to be used to find relevant documentation.
@@ -275,6 +286,9 @@ class AsyncSearchClient:
275
286
  verbosity : typing.Optional[int]
276
287
  The amount of data returned with each search result. 0 == no search results, only the recommended context. 1 == search results but no searchData. 2 == search results and searchData.
277
288
 
289
+ filter : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]
290
+ A dictionary of key-value pairs that can be used to pre-filter documents prior to a search.
291
+
278
292
  relevance : typing.Optional[float]
279
293
  The minimum search relevance score required to include the result. By default, this is 10.0.
280
294
 
@@ -317,6 +331,7 @@ class AsyncSearchClient:
317
331
  },
318
332
  json={
319
333
  "query": query,
334
+ "filter": filter,
320
335
  "relevance": relevance,
321
336
  },
322
337
  headers={
@@ -367,6 +382,7 @@ class AsyncSearchClient:
367
382
  n: typing.Optional[int] = None,
368
383
  next_token: typing.Optional[str] = None,
369
384
  verbosity: typing.Optional[int] = None,
385
+ filter: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
370
386
  relevance: typing.Optional[float] = OMIT,
371
387
  request_options: typing.Optional[RequestOptions] = None,
372
388
  ) -> SearchResponse:
@@ -391,6 +407,9 @@ class AsyncSearchClient:
391
407
  verbosity : typing.Optional[int]
392
408
  The amount of data returned with each search result. 0 == no search results, only the recommended context. 1 == search results but no searchData. 2 == search results and searchData.
393
409
 
410
+ filter : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]
411
+ A dictionary of key-value pairs that can be used to pre-filter documents prior to a search.
412
+
394
413
  relevance : typing.Optional[float]
395
414
  The minimum search relevance score required to include the result. By default, this is 10.0.
396
415
 
@@ -434,6 +453,7 @@ class AsyncSearchClient:
434
453
  json={
435
454
  "query": query,
436
455
  "documentIds": document_ids,
456
+ "filter": filter,
437
457
  "relevance": relevance,
438
458
  },
439
459
  headers={
groundx/types/__init__.py CHANGED
@@ -26,17 +26,16 @@ from .ingest_local_document import IngestLocalDocument
26
26
  from .ingest_local_document_metadata import IngestLocalDocumentMetadata
27
27
  from .ingest_remote_document import IngestRemoteDocument
28
28
  from .ingest_response import IngestResponse
29
- from .ingest_response_ingest import IngestResponseIngest
29
+ from .ingest_status import IngestStatus
30
+ from .ingest_status_light import IngestStatusLight
31
+ from .ingest_status_progress import IngestStatusProgress
32
+ from .ingest_status_progress_cancelled import IngestStatusProgressCancelled
33
+ from .ingest_status_progress_complete import IngestStatusProgressComplete
34
+ from .ingest_status_progress_errors import IngestStatusProgressErrors
35
+ from .ingest_status_progress_processing import IngestStatusProgressProcessing
30
36
  from .message_response import MessageResponse
31
37
  from .meter_detail import MeterDetail
32
38
  from .process_level import ProcessLevel
33
- from .process_status_response import ProcessStatusResponse
34
- from .process_status_response_ingest import ProcessStatusResponseIngest
35
- from .process_status_response_ingest_progress import ProcessStatusResponseIngestProgress
36
- from .process_status_response_ingest_progress_cancelled import ProcessStatusResponseIngestProgressCancelled
37
- from .process_status_response_ingest_progress_complete import ProcessStatusResponseIngestProgressComplete
38
- from .process_status_response_ingest_progress_errors import ProcessStatusResponseIngestProgressErrors
39
- from .process_status_response_ingest_progress_processing import ProcessStatusResponseIngestProgressProcessing
40
39
  from .processes_status_response import ProcessesStatusResponse
41
40
  from .processing_status import ProcessingStatus
42
41
  from .search_response import SearchResponse
@@ -75,17 +74,16 @@ __all__ = [
75
74
  "IngestLocalDocumentMetadata",
76
75
  "IngestRemoteDocument",
77
76
  "IngestResponse",
78
- "IngestResponseIngest",
77
+ "IngestStatus",
78
+ "IngestStatusLight",
79
+ "IngestStatusProgress",
80
+ "IngestStatusProgressCancelled",
81
+ "IngestStatusProgressComplete",
82
+ "IngestStatusProgressErrors",
83
+ "IngestStatusProgressProcessing",
79
84
  "MessageResponse",
80
85
  "MeterDetail",
81
86
  "ProcessLevel",
82
- "ProcessStatusResponse",
83
- "ProcessStatusResponseIngest",
84
- "ProcessStatusResponseIngestProgress",
85
- "ProcessStatusResponseIngestProgressCancelled",
86
- "ProcessStatusResponseIngestProgressComplete",
87
- "ProcessStatusResponseIngestProgressErrors",
88
- "ProcessStatusResponseIngestProgressProcessing",
89
87
  "ProcessesStatusResponse",
90
88
  "ProcessingStatus",
91
89
  "SearchResponse",
@@ -3,12 +3,26 @@
3
3
  from ..core.pydantic_utilities import UniversalBaseModel
4
4
  import typing
5
5
  from .bucket_detail import BucketDetail
6
- from ..core.pydantic_utilities import IS_PYDANTIC_V2
7
6
  import pydantic
7
+ from ..core.pydantic_utilities import IS_PYDANTIC_V2
8
8
 
9
9
 
10
10
  class BucketListResponse(UniversalBaseModel):
11
11
  buckets: typing.Optional[typing.List[BucketDetail]] = None
12
+ count: typing.Optional[int] = pydantic.Field(default=None)
13
+ """
14
+ The number of buckets returned in the current response
15
+ """
16
+
17
+ remaining: typing.Optional[int] = pydantic.Field(default=None)
18
+ """
19
+ The number of buckets that have not been returned yet, will be null if there are no remaining buckets
20
+ """
21
+
22
+ total: typing.Optional[int] = pydantic.Field(default=None)
23
+ """
24
+ The total number of buckets found
25
+ """
12
26
 
13
27
  if IS_PYDANTIC_V2:
14
28
  model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
@@ -26,6 +26,11 @@ class DocumentDetail(UniversalBaseModel):
26
26
  """
27
27
 
28
28
  file_type: typing_extensions.Annotated[typing.Optional[DocumentType], FieldMetadata(alias="fileType")] = None
29
+ filter: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = pydantic.Field(default=None)
30
+ """
31
+ A dictionary of key-value pairs that can be used to pre-filter documents prior to a search.
32
+ """
33
+
29
34
  process_id: typing_extensions.Annotated[typing.Optional[str], FieldMetadata(alias="processId")] = pydantic.Field(
30
35
  default=None
31
36
  )
@@ -17,6 +17,11 @@ class DocumentLookupResponse(UniversalBaseModel):
17
17
 
18
18
  documents: typing.Optional[typing.List[DocumentDetail]] = None
19
19
  next_token: typing_extensions.Annotated[typing.Optional[str], FieldMetadata(alias="nextToken")] = None
20
+ remaining: typing.Optional[int] = pydantic.Field(default=None)
21
+ """
22
+ The number of results that have not been returned yet. Will be null if there are no remaining results.
23
+ """
24
+
20
25
  total: typing.Optional[int] = pydantic.Field(default=None)
21
26
  """
22
27
  The total number of results found
@@ -3,12 +3,26 @@
3
3
  from ..core.pydantic_utilities import UniversalBaseModel
4
4
  import typing
5
5
  from .group_detail import GroupDetail
6
- from ..core.pydantic_utilities import IS_PYDANTIC_V2
7
6
  import pydantic
7
+ from ..core.pydantic_utilities import IS_PYDANTIC_V2
8
8
 
9
9
 
10
10
  class GroupListResponse(UniversalBaseModel):
11
11
  groups: typing.Optional[typing.List[GroupDetail]] = None
12
+ count: typing.Optional[int] = pydantic.Field(default=None)
13
+ """
14
+ The number of groups returned in the current response
15
+ """
16
+
17
+ remaining: typing.Optional[int] = pydantic.Field(default=None)
18
+ """
19
+ The number of groups that have not been returned yet, will be null if there are no remaining groups
20
+ """
21
+
22
+ total: typing.Optional[int] = pydantic.Field(default=None)
23
+ """
24
+ The total number of groups found
25
+ """
12
26
 
13
27
  if IS_PYDANTIC_V2:
14
28
  model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
@@ -26,6 +26,11 @@ class IngestLocalDocumentMetadata(UniversalBaseModel):
26
26
  """
27
27
 
28
28
  file_type: typing_extensions.Annotated[typing.Optional[DocumentType], FieldMetadata(alias="fileType")] = None
29
+ filter: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = pydantic.Field(default=None)
30
+ """
31
+ A dictionary of key-value pairs that can be used to pre-filter documents prior to a search.
32
+ """
33
+
29
34
  process_level: typing_extensions.Annotated[typing.Optional[ProcessLevel], FieldMetadata(alias="processLevel")] = (
30
35
  None
31
36
  )
@@ -20,10 +20,15 @@ class IngestRemoteDocument(UniversalBaseModel):
20
20
  default=None
21
21
  )
22
22
  """
23
- The name of the file being ingested
23
+ The name of the file being ingested.
24
24
  """
25
25
 
26
26
  file_type: typing_extensions.Annotated[typing.Optional[DocumentType], FieldMetadata(alias="fileType")] = None
27
+ filter: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = pydantic.Field(default=None)
28
+ """
29
+ A dictionary of key-value pairs that can be used to pre-filter documents prior to a search.
30
+ """
31
+
27
32
  process_level: typing_extensions.Annotated[typing.Optional[ProcessLevel], FieldMetadata(alias="processLevel")] = (
28
33
  None
29
34
  )
@@ -1,14 +1,14 @@
1
1
  # This file was auto-generated by Fern from our API Definition.
2
2
 
3
3
  from ..core.pydantic_utilities import UniversalBaseModel
4
- from .ingest_response_ingest import IngestResponseIngest
4
+ from .ingest_status import IngestStatus
5
5
  from ..core.pydantic_utilities import IS_PYDANTIC_V2
6
6
  import typing
7
7
  import pydantic
8
8
 
9
9
 
10
10
  class IngestResponse(UniversalBaseModel):
11
- ingest: IngestResponseIngest
11
+ ingest: IngestStatus
12
12
 
13
13
  if IS_PYDANTIC_V2:
14
14
  model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
@@ -1,18 +1,19 @@
1
1
  # This file was auto-generated by Fern from our API Definition.
2
2
 
3
3
  from ..core.pydantic_utilities import UniversalBaseModel
4
+ import typing
4
5
  import typing_extensions
5
6
  from ..core.serialization import FieldMetadata
6
- import typing
7
- from .process_status_response_ingest_progress import ProcessStatusResponseIngestProgress
7
+ from .ingest_status_progress import IngestStatusProgress
8
8
  from .processing_status import ProcessingStatus
9
9
  from ..core.pydantic_utilities import IS_PYDANTIC_V2
10
10
  import pydantic
11
11
 
12
12
 
13
- class ProcessStatusResponseIngest(UniversalBaseModel):
13
+ class IngestStatus(UniversalBaseModel):
14
+ id: typing.Optional[int] = None
14
15
  process_id: typing_extensions.Annotated[str, FieldMetadata(alias="processId")]
15
- progress: typing.Optional[ProcessStatusResponseIngestProgress] = None
16
+ progress: typing.Optional[IngestStatusProgress] = None
16
17
  status: ProcessingStatus
17
18
  status_message: typing_extensions.Annotated[typing.Optional[str], FieldMetadata(alias="statusMessage")] = None
18
19
 
@@ -1,17 +1,19 @@
1
1
  # This file was auto-generated by Fern from our API Definition.
2
2
 
3
3
  from ..core.pydantic_utilities import UniversalBaseModel
4
+ import typing
4
5
  import typing_extensions
5
6
  from ..core.serialization import FieldMetadata
6
7
  from .processing_status import ProcessingStatus
7
8
  from ..core.pydantic_utilities import IS_PYDANTIC_V2
8
- import typing
9
9
  import pydantic
10
10
 
11
11
 
12
- class IngestResponseIngest(UniversalBaseModel):
12
+ class IngestStatusLight(UniversalBaseModel):
13
+ id: typing.Optional[int] = None
13
14
  process_id: typing_extensions.Annotated[str, FieldMetadata(alias="processId")]
14
15
  status: ProcessingStatus
16
+ status_message: typing_extensions.Annotated[typing.Optional[str], FieldMetadata(alias="statusMessage")] = None
15
17
 
16
18
  if IS_PYDANTIC_V2:
17
19
  model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
@@ -0,0 +1,26 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ from ..core.pydantic_utilities import UniversalBaseModel
4
+ import typing
5
+ from .ingest_status_progress_cancelled import IngestStatusProgressCancelled
6
+ from .ingest_status_progress_complete import IngestStatusProgressComplete
7
+ from .ingest_status_progress_errors import IngestStatusProgressErrors
8
+ from .ingest_status_progress_processing import IngestStatusProgressProcessing
9
+ from ..core.pydantic_utilities import IS_PYDANTIC_V2
10
+ import pydantic
11
+
12
+
13
+ class IngestStatusProgress(UniversalBaseModel):
14
+ cancelled: typing.Optional[IngestStatusProgressCancelled] = None
15
+ complete: typing.Optional[IngestStatusProgressComplete] = None
16
+ errors: typing.Optional[IngestStatusProgressErrors] = None
17
+ processing: typing.Optional[IngestStatusProgressProcessing] = None
18
+
19
+ if IS_PYDANTIC_V2:
20
+ model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
21
+ else:
22
+
23
+ class Config:
24
+ frozen = True
25
+ smart_union = True
26
+ extra = pydantic.Extra.allow
@@ -7,7 +7,7 @@ from ..core.pydantic_utilities import IS_PYDANTIC_V2
7
7
  import pydantic
8
8
 
9
9
 
10
- class ProcessStatusResponseIngestProgressErrors(UniversalBaseModel):
10
+ class IngestStatusProgressCancelled(UniversalBaseModel):
11
11
  documents: typing.Optional[typing.List[DocumentDetail]] = None
12
12
  total: typing.Optional[int] = None
13
13
 
@@ -7,7 +7,7 @@ from ..core.pydantic_utilities import IS_PYDANTIC_V2
7
7
  import pydantic
8
8
 
9
9
 
10
- class ProcessStatusResponseIngestProgressComplete(UniversalBaseModel):
10
+ class IngestStatusProgressComplete(UniversalBaseModel):
11
11
  documents: typing.Optional[typing.List[DocumentDetail]] = None
12
12
  total: typing.Optional[int] = None
13
13
 
@@ -7,7 +7,7 @@ from ..core.pydantic_utilities import IS_PYDANTIC_V2
7
7
  import pydantic
8
8
 
9
9
 
10
- class ProcessStatusResponseIngestProgressCancelled(UniversalBaseModel):
10
+ class IngestStatusProgressErrors(UniversalBaseModel):
11
11
  documents: typing.Optional[typing.List[DocumentDetail]] = None
12
12
  total: typing.Optional[int] = None
13
13
 
@@ -7,7 +7,7 @@ from ..core.pydantic_utilities import IS_PYDANTIC_V2
7
7
  import pydantic
8
8
 
9
9
 
10
- class ProcessStatusResponseIngestProgressProcessing(UniversalBaseModel):
10
+ class IngestStatusProgressProcessing(UniversalBaseModel):
11
11
  documents: typing.Optional[typing.List[DocumentDetail]] = None
12
12
  total: typing.Optional[int] = None
13
13
 
@@ -1,6 +1,23 @@
1
1
  # This file was auto-generated by Fern from our API Definition.
2
2
 
3
+ from ..core.pydantic_utilities import UniversalBaseModel
3
4
  import typing
4
- from .process_status_response import ProcessStatusResponse
5
+ from .ingest_status_light import IngestStatusLight
6
+ import pydantic
7
+ from ..core.pydantic_utilities import IS_PYDANTIC_V2
5
8
 
6
- ProcessesStatusResponse = typing.List[ProcessStatusResponse]
9
+
10
+ class ProcessesStatusResponse(UniversalBaseModel):
11
+ processes: typing.List[IngestStatusLight] = pydantic.Field()
12
+ """
13
+ Processes
14
+ """
15
+
16
+ if IS_PYDANTIC_V2:
17
+ model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
18
+ else:
19
+
20
+ class Config:
21
+ frozen = True
22
+ smart_union = True
23
+ extra = pydantic.Extra.allow
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: groundx
3
- Version: 2.2.9
3
+ Version: 2.3.3
4
4
  Summary:
5
5
  License: MIT
6
6
  Requires-Python: >=3.8,<4.0
@@ -1,10 +1,10 @@
1
- groundx/__init__.py,sha256=h2Hzs9yA7uz0N4s_xzpzbE9Rkkq0TA_My-xqjKZWKlU,3103
1
+ groundx/__init__.py,sha256=hPdwGKDv9pYpW077kZRiEV3rur8SBE3u-JMzc8sARvo,2861
2
2
  groundx/buckets/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
3
3
  groundx/buckets/client.py,sha256=4jlc9vfIult1mMJ4FZW4_KFJybZPStZt1FUplIgrxbU,23947
4
4
  groundx/client.py,sha256=dIW9OyrMyfC1N7HSxRrHh0w_8rJ8osNUOPdYD6ueQ6g,6515
5
5
  groundx/core/__init__.py,sha256=SQ85PF84B9MuKnBwHNHWemSGuy-g_515gFYNFhvEE0I,1438
6
6
  groundx/core/api_error.py,sha256=RE8LELok2QCjABadECTvtDp7qejA1VmINCh6TbqPwSE,426
7
- groundx/core/client_wrapper.py,sha256=D6uZpUYxYzmgxNNCTN7quiFvNlBQmPOyLstnrXfcJcs,1802
7
+ groundx/core/client_wrapper.py,sha256=H248fDvW8hSRD0zzzF3XWE4M8UCCdhDrfFGsET4UwOo,1802
8
8
  groundx/core/datetime_utils.py,sha256=nBys2IsYrhPdszxGKCNRPSOCwa-5DWOHG95FB8G9PKo,1047
9
9
  groundx/core/file.py,sha256=d4NNbX8XvXP32z8KpK2Xovv33nFfruIrpz0QWxlgpZk,2663
10
10
  groundx/core/http_client.py,sha256=Z77OIxIbL4OAB2IDqjRq_sYa5yNYAWfmdhdCSSvh6Y4,19552
@@ -18,7 +18,7 @@ groundx/csv_splitter.py,sha256=6HGXdDpwBX_IJaCbla1WuirJERBTvjLzBf9OBtwGFWU,2254
18
18
  groundx/customer/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
19
19
  groundx/customer/client.py,sha256=C_JANeDewRD1Kg-q7LPxdiOSWbYSTOiYlBYZLRYPI44,3467
20
20
  groundx/documents/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
21
- groundx/documents/client.py,sha256=TVmQd5Q3DjuUbve7CNG90ufn65EyselMx7bAiUIgdSc,58805
21
+ groundx/documents/client.py,sha256=Uw99gixszCJLAzyIdiISURZCInuOur9aAPzQrmwExSY,59529
22
22
  groundx/environment.py,sha256=CInm1_DKtZ1mrxutmKb1qqv82P33r_S87hZD3Hc1VB0,159
23
23
  groundx/errors/__init__.py,sha256=-prNYsFd8xxM4va0vR1raZjcd10tllOJKyEWjX_pwdU,214
24
24
  groundx/errors/bad_request_error.py,sha256=_EbO8mWqN9kFZPvIap8qa1lL_EWkRcsZe1HKV9GDWJY,264
@@ -27,51 +27,50 @@ groundx/groups/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
27
27
  groundx/groups/client.py,sha256=bytQRh9m7e4vIuYHb7dD1kCTQZvyBxedCqGnmmLqrsI,35237
28
28
  groundx/health/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
29
29
  groundx/health/client.py,sha256=fcTa21RWPyBuT77PQ0EncC6rBaW_DrYlRvudy9-0H58,7545
30
- groundx/ingest.py,sha256=LtnUGcgtE1MNYL3PGFrzPqRMnLeOxr-fVsZ3fmTAUKI,18294
30
+ groundx/ingest.py,sha256=GbCpuzqipBARt3FOTxTk9PpLxkQ4IS1SYnwlZBBcy7s,22399
31
31
  groundx/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
32
32
  groundx/search/__init__.py,sha256=RagVzjShP33mDg9o4N3kGzV0egL1RYNjCpXPE8VzMYE,145
33
- groundx/search/client.py,sha256=zrrqFy0HowDUYPsMU4nfvDV2RgmkEQ4E8WYNktu3xcs,18684
33
+ groundx/search/client.py,sha256=2pvs2KoGdqp8irltuKdWD7G0AJ_XBPM9vYy9wJlzpkw,19886
34
34
  groundx/search/types/__init__.py,sha256=fNFXQloPa1PHHO8VZim6KQNMA9N5EZtfSkissdxtY_c,165
35
35
  groundx/search/types/search_content_request_id.py,sha256=us7mYdzR0qPur_wR5I9BhHaLEzC5nLBRna6-xq4M1ec,128
36
- groundx/types/__init__.py,sha256=FIfetK3iKUmtyHI8_hUTybZ5mW_MUtOshC7sFm4-Qps,3963
36
+ groundx/types/__init__.py,sha256=SEzsG4JOnW7s9-IKZb7hUtQOWFyD7FG8HBaFVxb6gFw,3584
37
37
  groundx/types/bounding_box_detail.py,sha256=51qcen326NTHY2ZqH1cFXut0_MCmk39EbLoDAwotdq4,1832
38
38
  groundx/types/bucket_detail.py,sha256=bQjCvfyWydjItmzNNTvH-iWxNDOggd7R7X1alFZzlEY,1511
39
- groundx/types/bucket_list_response.py,sha256=jC0NBsLCYDSwQrBzuW0g3PWFycjtKl8YRkKhic_-1DA,650
39
+ groundx/types/bucket_list_response.py,sha256=XUIrshu_Vd8LNIFhgcRu5zKCHx8_drrW6muRPr8S17Y,1096
40
40
  groundx/types/bucket_response.py,sha256=E8V7H2_TVKdmMsGCBjwzdf2bg4rUjiXFnhXtVGVCqZQ,608
41
41
  groundx/types/bucket_update_detail.py,sha256=B4atQMDSXEdx7otcDbvgsrAHtXNz9swMnOsXRe25coc,738
42
42
  groundx/types/bucket_update_response.py,sha256=h5RJTEpc4WPI_C4sPvsJZo7IxKppnPR-I9VGEQryRlc,633
43
43
  groundx/types/customer_detail.py,sha256=RNm0qXvKx6YvVmkVJZeCNIz7n8_siFMXJ_AGtH3i5Z0,1094
44
44
  groundx/types/customer_response.py,sha256=_RbuanXhCWQkCeQ0dkwPgsjNBoBgNpixiNfRpXcMew8,618
45
45
  groundx/types/document.py,sha256=oU-rDYOVCVAxGdxI0OFwOsAVAx86_6MQ-je7Q9ES0mY,1869
46
- groundx/types/document_detail.py,sha256=i1UfcQAGYo9v1HwrrpzQPw_O0qA7IOXwOUuPV1yU8nI,2323
46
+ groundx/types/document_detail.py,sha256=w7qj9-0xuUV0IbEC7dcFefrQUkdghsI5USO8g1YpVM8,2542
47
47
  groundx/types/document_list_response.py,sha256=Z0Hm5VBwI0qatbSp6nYHh0RrGwJN3Gqh2D72FfDseZk,839
48
48
  groundx/types/document_local_ingest_request.py,sha256=zqaT_QgYcEc8AfVwZm-O5jLTEiYSsO-i3VVgZ_7xl7w,197
49
- groundx/types/document_lookup_response.py,sha256=hZBwUO2pI6xFfeh7DmX_l1xRoh-5oaVNgUVxd00ml14,1097
49
+ groundx/types/document_lookup_response.py,sha256=lkyuVR-LHH-lcslH4I9g3kYg3AFe1iu83DOgqL0KMiE,1288
50
50
  groundx/types/document_response.py,sha256=EBDrYhTYoA3Q3ZpqwFEYmTgE3tY86TScFPhBnc3_ItI,642
51
51
  groundx/types/document_type.py,sha256=JQiwb7ZQOCKHPyCR71RYBue3akPsEQj3C2T-FMDolb8,431
52
52
  groundx/types/group_detail.py,sha256=ms8iEEE1d88PTNwJOHeSiyoXn0hKdMrjJhkM2tlMdMs,1754
53
- groundx/types/group_list_response.py,sha256=mDIDaW8uWo4Wd-sk8_SzaZeUD1EC4ZqcJpbieQLRszc,645
53
+ groundx/types/group_list_response.py,sha256=YKcuTB2ncDiB4ozKhljxZDDL4_Pw7x3s_IEPQQKxQo8,1087
54
54
  groundx/types/group_response.py,sha256=SGqaQYPV9jaA4ET3x2adhkgL9NQQB9XwolXTuBQ1Xx0,603
55
55
  groundx/types/health_response.py,sha256=3UpYL2IZb56tTo-fOpSU-0OTRyWgpYiB3pMU3sfjWUU,633
56
56
  groundx/types/health_response_health.py,sha256=I0QeEljFp6l5LCJbCTArW031Om84egePgnGdtE6WXlI,632
57
57
  groundx/types/health_service.py,sha256=M1-h1EJSpAXw-j3pY-09_g_WKkO0spdj8e7pgPzSGf0,1083
58
58
  groundx/types/health_service_status.py,sha256=ugKJXlx8QGi83n_J6s1frFrW1hYfOn3Dlb_pPNexwMA,185
59
59
  groundx/types/ingest_local_document.py,sha256=am6TPgHu40S4Lzo9hMkDRauYnc-AWBuYL0Lgk85Fseg,753
60
- groundx/types/ingest_local_document_metadata.py,sha256=GBWalwdF6AzQSlfxgkwOh8dJ8T2aPB3XHcNaNYR0L8o,1738
61
- groundx/types/ingest_remote_document.py,sha256=u79fsF-zwkXs6vm80-aFaChi3rMpC1e1KHFOlqLNWwk,1863
62
- groundx/types/ingest_response.py,sha256=139rn8wpT44jlUzYXiy0r8XzN2U_OtdLltpSbRU0TyA,633
63
- groundx/types/ingest_response_ingest.py,sha256=8FKApYNvS6KFxEKm05pKpKJ0BAagxoE0cWeTt-qjm1g,781
60
+ groundx/types/ingest_local_document_metadata.py,sha256=MwkTUgkOdVCGicKjjJjpCMadse4ZXskwfvftTx39Dfo,1957
61
+ groundx/types/ingest_remote_document.py,sha256=-vLs-vEZwlyMXzTiW8Gv32cbbZDchWTLaj5L47dS_0g,2083
62
+ groundx/types/ingest_response.py,sha256=MYQSag2eyqEm7kH_7MPozPqmo1mtXfqeuvLFEYfhRJU,608
63
+ groundx/types/ingest_status.py,sha256=MPqgVctbBZjccwcCrris2U4xKSy2aWS1_rzHxFpOM74,1040
64
+ groundx/types/ingest_status_light.py,sha256=vpy5BuYw0yYZdz8WE6WxslV2GNkKR41E4TqKBWpHjUs,929
65
+ groundx/types/ingest_status_progress.py,sha256=E_fOdz1B8LljYkIrYpv57OHULnTwkXsneaxo8d8rC70,1117
66
+ groundx/types/ingest_status_progress_cancelled.py,sha256=jEjqUNxrCxUEz4MWStHHSHEKnsQ0ev3IvPN_RWg5N3s,708
67
+ groundx/types/ingest_status_progress_complete.py,sha256=rC6jrtIQgLYLUcZ770E-KxkeoTW4XwPYdHWqHVBLStU,707
68
+ groundx/types/ingest_status_progress_errors.py,sha256=ew-0exkJbvRBLWe7ikBkqzCSNt08kSUv79Fn-eT-ZRQ,705
69
+ groundx/types/ingest_status_progress_processing.py,sha256=ADWDSUmt3uoOZQeFFlZN_AWyAEl92Fsa2750DZSiN0I,709
64
70
  groundx/types/message_response.py,sha256=g_FJJyXYg_3fjZQueXkcy11q-qUfZGdVdALddHBieh4,585
65
71
  groundx/types/meter_detail.py,sha256=FybpzJj5QrtlDXT26ejw2CH1koOWe0ZeG-MS0n63HSI,1152
66
72
  groundx/types/process_level.py,sha256=gDFm3FKDpoL_W7jGlZcv9EMHwSALiT7mnJuV6EJ4dyA,152
67
- groundx/types/process_status_response.py,sha256=ScmEqF9TVGeugFLshyFbGQpM4MkAawsEJ3sUBILP87c,662
68
- groundx/types/process_status_response_ingest.py,sha256=tZteJy-DI_2jT6eLbTJ812qJc_WG9Sd44w7G1LVatLQ,1066
69
- groundx/types/process_status_response_ingest_progress.py,sha256=4rPMHdLEc6PC5qTjMBAB4IA-l38t5KPcN8qtET7qKfc,1320
70
- groundx/types/process_status_response_ingest_progress_cancelled.py,sha256=PpvHXzb6ewD-nWUSMpE0T4ZS0UtIo4iP57os1MP4CWQ,723
71
- groundx/types/process_status_response_ingest_progress_complete.py,sha256=6-UuZCHK28No15RPigumAJ4r_gAUjbElH3Brg7ED8pU,722
72
- groundx/types/process_status_response_ingest_progress_errors.py,sha256=aSpl99wXwcTFhtkB47BpBZATcqzQS6cXHssKLPdtnzQ,720
73
- groundx/types/process_status_response_ingest_progress_processing.py,sha256=rqaTkeki5Vf3SgeeZtuSRtioGqmj99JcbfKoeI-Hgos,724
74
- groundx/types/processes_status_response.py,sha256=LSCEf3i1Gs3QDCYye2lw8zmZDWODtDMNu9aMf1YIfH8,200
73
+ groundx/types/processes_status_response.py,sha256=eCf2h7H0R9mL978U6aqMQJ13V6ZWz4ScmCm9uRbj40Y,698
75
74
  groundx/types/processing_status.py,sha256=nUvsnKcDOFcT6NRDCTQ1vpm4KDNiKy8_b7PxaiKxnZM,226
76
75
  groundx/types/search_response.py,sha256=EUwAFEHfzEisHCSTxa5zAy7VWY-bebV5VLx0b7irNlI,633
77
76
  groundx/types/search_response_search.py,sha256=fhEbG9qQZHWlxpI_A9rLOHhm7VbzECrTLhicHkR2Xi0,1685
@@ -82,7 +81,7 @@ groundx/types/subscription_detail.py,sha256=WNfUw2EMVECIvNYcV2s51zZ6T3Utc4zYXw63
82
81
  groundx/types/subscription_detail_meters.py,sha256=lBa8-1QlMVHjr5RLGqhiTKnD1KMM0AAHTWvz9TVtG8w,830
83
82
  groundx/types/website_source.py,sha256=3WeRCiilNKKBTfhwgjo3jbcVI3vLTeM-KxI6dVzpg9o,1578
84
83
  groundx/version.py,sha256=1yVogKaq260fQfckM2RYN2144SEw0QROsZW8ICtkG4U,74
85
- groundx-2.2.9.dist-info/LICENSE,sha256=dFE6nY1bHnSn6NqmdlghlU1gQqLqYNphrceGVehSa7o,1065
86
- groundx-2.2.9.dist-info/METADATA,sha256=1BWmC2-Lx8AT1vEY0juSK4ugjxyn9W5ndJy0bomFEwQ,5173
87
- groundx-2.2.9.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
88
- groundx-2.2.9.dist-info/RECORD,,
84
+ groundx-2.3.3.dist-info/LICENSE,sha256=dFE6nY1bHnSn6NqmdlghlU1gQqLqYNphrceGVehSa7o,1065
85
+ groundx-2.3.3.dist-info/METADATA,sha256=UgcRA_9yyRfXYaGboHJL9KaO7PAY-LZLzXSWkUdK1BE,5173
86
+ groundx-2.3.3.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
87
+ groundx-2.3.3.dist-info/RECORD,,
@@ -1,20 +0,0 @@
1
- # This file was auto-generated by Fern from our API Definition.
2
-
3
- from ..core.pydantic_utilities import UniversalBaseModel
4
- from .process_status_response_ingest import ProcessStatusResponseIngest
5
- from ..core.pydantic_utilities import IS_PYDANTIC_V2
6
- import typing
7
- import pydantic
8
-
9
-
10
- class ProcessStatusResponse(UniversalBaseModel):
11
- ingest: ProcessStatusResponseIngest
12
-
13
- if IS_PYDANTIC_V2:
14
- model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
15
- else:
16
-
17
- class Config:
18
- frozen = True
19
- smart_union = True
20
- extra = pydantic.Extra.allow
@@ -1,26 +0,0 @@
1
- # This file was auto-generated by Fern from our API Definition.
2
-
3
- from ..core.pydantic_utilities import UniversalBaseModel
4
- import typing
5
- from .process_status_response_ingest_progress_cancelled import ProcessStatusResponseIngestProgressCancelled
6
- from .process_status_response_ingest_progress_complete import ProcessStatusResponseIngestProgressComplete
7
- from .process_status_response_ingest_progress_errors import ProcessStatusResponseIngestProgressErrors
8
- from .process_status_response_ingest_progress_processing import ProcessStatusResponseIngestProgressProcessing
9
- from ..core.pydantic_utilities import IS_PYDANTIC_V2
10
- import pydantic
11
-
12
-
13
- class ProcessStatusResponseIngestProgress(UniversalBaseModel):
14
- cancelled: typing.Optional[ProcessStatusResponseIngestProgressCancelled] = None
15
- complete: typing.Optional[ProcessStatusResponseIngestProgressComplete] = None
16
- errors: typing.Optional[ProcessStatusResponseIngestProgressErrors] = None
17
- processing: typing.Optional[ProcessStatusResponseIngestProgressProcessing] = None
18
-
19
- if IS_PYDANTIC_V2:
20
- model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
21
- else:
22
-
23
- class Config:
24
- frozen = True
25
- smart_union = True
26
- extra = pydantic.Extra.allow