groundx 2.2.7__py3-none-any.whl → 2.2.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -16,7 +16,7 @@ class BaseClientWrapper:
16
16
  headers: typing.Dict[str, str] = {
17
17
  "X-Fern-Language": "Python",
18
18
  "X-Fern-SDK-Name": "groundx",
19
- "X-Fern-SDK-Version": "2.2.7",
19
+ "X-Fern-SDK-Version": "2.2.8",
20
20
  }
21
21
  headers["X-API-Key"] = self.api_key
22
22
  return headers
groundx/ingest.py CHANGED
@@ -1,17 +1,10 @@
1
- import aiohttp, io, json, mimetypes, requests, time, typing, os
2
- from asyncio import TimeoutError
1
+ import requests, time, typing, os
3
2
  from pathlib import Path
4
3
  from tqdm import tqdm
5
4
  from urllib.parse import urlparse, urlunparse
6
5
 
7
- from json.decoder import JSONDecodeError
8
-
9
6
  from .client import GroundXBase, AsyncGroundXBase
10
- from .core.api_error import ApiError
11
- from .core.pydantic_utilities import parse_obj_as
12
7
  from .core.request_options import RequestOptions
13
- from .errors.bad_request_error import BadRequestError
14
- from .errors.unauthorized_error import UnauthorizedError
15
8
  from .types.document import Document
16
9
  from .types.document_type import DocumentType
17
10
  from .types.ingest_remote_document import IngestRemoteDocument
@@ -55,13 +48,30 @@ MAX_BATCH_SIZE = 50
55
48
  MIN_BATCH_SIZE = 1
56
49
  MAX_BATCH_SIZE_BYTES = 50 * 1024 * 1024
57
50
 
51
+ def get_presigned_url(
52
+ endpoint: str,
53
+ file_name: str,
54
+ file_extension: str,
55
+ ) -> typing.Dict[str, typing.Any]:
56
+ params = {"name": file_name, "type": file_extension}
57
+ response = requests.get(endpoint, params=params)
58
+ response.raise_for_status()
59
+
60
+ return response.json()
61
+
62
+ def strip_query_params(
63
+ url: str,
64
+ ) -> str:
65
+ parsed = urlparse(url)
66
+ clean_url = urlunparse((parsed.scheme, parsed.netloc, parsed.path, "", "", ""))
67
+
68
+ return clean_url
69
+
58
70
  def prep_documents(
59
71
  documents: typing.Sequence[Document],
60
72
  ) -> typing.Tuple[
61
73
  typing.List[IngestRemoteDocument],
62
- typing.List[
63
- typing.Tuple[str, typing.Tuple[typing.Union[str, None], typing.BinaryIO, str]]
64
- ],
74
+ typing.List[Document],
65
75
  ]:
66
76
  """
67
77
  Process documents and separate them into remote and local documents.
@@ -80,9 +90,7 @@ def prep_documents(
80
90
  except ValueError:
81
91
  return False
82
92
 
83
- local_documents: typing.List[
84
- typing.Tuple[str, typing.Tuple[typing.Union[str, None], typing.BinaryIO, str]]
85
- ] = []
93
+ local_documents: typing.List[Document] = []
86
94
  remote_documents: typing.List[IngestRemoteDocument] = []
87
95
 
88
96
  for document in documents:
@@ -100,53 +108,7 @@ def prep_documents(
100
108
  )
101
109
  remote_documents.append(remote_document)
102
110
  elif is_valid_local_path(document.file_path):
103
- expanded_path = os.path.expanduser(document.file_path)
104
- file_name = os.path.basename(expanded_path)
105
- mime_type = mimetypes.guess_type(file_name)[0] or "application/octet-stream"
106
- file_type = MIME_TO_DOCUMENT_TYPE.get(mime_type, None)
107
- if document.file_type:
108
- file_type = document.file_type
109
- mime_type = DOCUMENT_TYPE_TO_MIME.get(
110
- document.file_type, "application/octet-stream"
111
- )
112
-
113
- if document.file_name:
114
- file_name = document.file_name
115
-
116
- try:
117
- local_documents.append(
118
- (
119
- "blob",
120
- (
121
- file_name,
122
- open(expanded_path, "rb"),
123
- mime_type,
124
- ),
125
- )
126
- )
127
- except Exception as e:
128
- raise ValueError(f"Error reading file {expanded_path}: {e}")
129
-
130
- metadata = {
131
- "bucketId": document.bucket_id,
132
- "fileName": file_name,
133
- "fileType": file_type,
134
- }
135
- if document.process_level:
136
- metadata["processLevel"] = document.process_level
137
- if document.search_data:
138
- metadata["searchData"] = document.search_data
139
-
140
- local_documents.append(
141
- (
142
- "metadata",
143
- (
144
- f"data.json",
145
- io.BytesIO(json.dumps(metadata).encode("utf-8")),
146
- "application/json",
147
- ),
148
- )
149
- )
111
+ local_documents.append(document)
150
112
  else:
151
113
  raise ValueError(f"Invalid file path: {document.file_path}")
152
114
 
@@ -158,6 +120,7 @@ class GroundX(GroundXBase):
158
120
  self,
159
121
  *,
160
122
  documents: typing.Sequence[Document],
123
+ upload_api: typing.Optional[str] = "https://api.eyelevel.ai/upload/file",
161
124
  request_options: typing.Optional[RequestOptions] = None,
162
125
  ) -> IngestResponse:
163
126
  """
@@ -167,6 +130,10 @@ class GroundX(GroundXBase):
167
130
  ----------
168
131
  documents : typing.Sequence[Document]
169
132
 
133
+ # an endpoint that accepts 'name' and 'type' query params
134
+ # and returns a presigned URL in a JSON dictionary with key 'URL'
135
+ upload_api : typing.Optional[str]
136
+
170
137
  request_options : typing.Optional[RequestOptions]
171
138
  Request-specific configuration.
172
139
 
@@ -200,61 +167,39 @@ class GroundX(GroundXBase):
200
167
  raise ValueError("Documents must all be either local or remote, not a mix.")
201
168
 
202
169
  if len(remote_documents) > 0:
170
+ if len(remote_documents) > MAX_BATCH_SIZE:
171
+ raise ValueError("You have sent too many documents in this request")
172
+
203
173
  return self.documents.ingest_remote(
204
174
  documents=remote_documents,
205
175
  request_options=request_options,
206
176
  )
207
177
 
208
- timeout = self._client_wrapper.get_timeout()
209
- headers = self._client_wrapper.get_headers()
210
- base_url = self._client_wrapper.get_base_url().rstrip("/")
211
- follow_redirects = getattr(
212
- self._client_wrapper.httpx_client, "follow_redirects", True
213
- )
178
+ if len(local_documents) > MAX_BATCH_SIZE:
179
+ raise ValueError("You have sent too many documents in this request")
214
180
 
215
- url = f"{base_url}/v1/ingest/documents/local"
216
- _response = requests.post(
217
- url,
218
- files=local_documents,
219
- headers=headers,
220
- timeout=timeout,
221
- allow_redirects=follow_redirects,
222
- )
181
+ if len(local_documents) == 0:
182
+ raise ValueError("No valid documents were provided")
223
183
 
224
- try:
225
- if 200 <= _response.status_code < 300:
226
- return typing.cast(
227
- IngestResponse,
228
- parse_obj_as(
229
- type_=IngestResponse, # type: ignore
230
- object_=_response.json(),
231
- ),
232
- )
233
- if _response.status_code == 400:
234
- raise BadRequestError(
235
- typing.cast(
236
- typing.Optional[typing.Any],
237
- parse_obj_as(
238
- type_=typing.Optional[typing.Any], # type: ignore
239
- object_=_response.json(),
240
- ),
241
- )
242
- )
243
- if _response.status_code == 401:
244
- raise UnauthorizedError(
245
- typing.cast(
246
- typing.Optional[typing.Any],
247
- parse_obj_as(
248
- type_=typing.Optional[typing.Any], # type: ignore
249
- object_=_response.json(),
250
- ),
251
- )
184
+ docs: typing.List[IngestRemoteDocument] = []
185
+ for d in local_documents:
186
+ url = self._upload_file(upload_api, Path(os.path.expanduser(d.file_path)))
187
+
188
+ docs.append(
189
+ IngestRemoteDocument(
190
+ bucket_id=d.bucket_id,
191
+ file_name=d.file_name,
192
+ file_type=d.file_type,
193
+ process_level=d.process_level,
194
+ search_data=d.search_data,
195
+ source_url=url,
252
196
  )
253
- _response_json = _response.json()
254
- except JSONDecodeError:
255
- raise ApiError(status_code=_response.status_code, body=_response.text)
197
+ )
256
198
 
257
- raise ApiError(status_code=_response.status_code, body=_response_json)
199
+ return self.documents.ingest_remote(
200
+ documents=docs,
201
+ request_options=request_options,
202
+ )
258
203
 
259
204
  def ingest_directory(
260
205
  self,
@@ -275,7 +220,7 @@ class GroundX(GroundXBase):
275
220
  batch_size : type.Optional[int]
276
221
 
277
222
  # an endpoint that accepts 'name' and 'type' query params
278
- # and returns a presigned URL
223
+ # and returns a presigned URL in a JSON dictionary with key 'URL'
279
224
  upload_api : typing.Optional[str]
280
225
 
281
226
  request_options : typing.Optional[RequestOptions]
@@ -300,13 +245,6 @@ class GroundX(GroundXBase):
300
245
  )
301
246
  """
302
247
 
303
- def get_presigned_url(endpoint, file_name, file_extension) -> typing.Dict[str, typing.Any]:
304
- params = {"name": file_name, "type": file_extension}
305
- response = requests.get(endpoint, params=params)
306
- response.raise_for_status()
307
-
308
- return response.json()
309
-
310
248
  def is_valid_local_directory(path: str) -> bool:
311
249
  expanded_path = os.path.expanduser(path)
312
250
  return os.path.isdir(expanded_path)
@@ -323,80 +261,7 @@ class GroundX(GroundXBase):
323
261
  )
324
262
  ]
325
263
 
326
- return matched_files
327
-
328
- def strip_query_params(url: str) -> str:
329
- parsed = urlparse(url)
330
- clean_url = urlunparse((parsed.scheme, parsed.netloc, parsed.path, "", "", ""))
331
- return clean_url
332
-
333
- def _upload_file_batch(bucket_id, batch, upload_api, request_options, pbar):
334
- docs = []
335
-
336
- progress = len(batch)
337
- for file in batch:
338
- url = upload_file(upload_api, file)
339
- docs.append(
340
- Document(
341
- bucket_id=bucket_id,
342
- file_path=url,
343
- ),
344
- )
345
- pbar.update(0.25)
346
- progress -= 0.25
347
-
348
- if docs:
349
- ingest = self.ingest(documents=docs, request_options=request_options)
350
-
351
- completed_files = set()
352
-
353
- while (
354
- ingest is not None
355
- and ingest.ingest.status not in ["complete", "error", "cancelled"]
356
- ):
357
- time.sleep(3)
358
- ingest = self.documents.get_processing_status_by_id(ingest.ingest.process_id)
359
-
360
- if ingest.ingest.progress and ingest.ingest.progress.processing:
361
- for doc in ingest.ingest.progress.processing.documents:
362
- if doc.status == "complete" and doc.document_id not in completed_files:
363
- pbar.update(0.75)
364
- progress -= 0.75
365
-
366
- if ingest.ingest.status in ["error", "cancelled"]:
367
- raise ValueError(f"Ingest failed with status: {ingest.ingest.status}")
368
-
369
- if progress > 0:
370
- pbar.update(progress)
371
-
372
- def upload_file(endpoint, file_path) -> str:
373
- file_name = os.path.basename(file_path)
374
- file_extension = os.path.splitext(file_name)[1][1:].lower()
375
-
376
- presigned_info = get_presigned_url(endpoint, file_name, file_extension)
377
-
378
- upload_url = presigned_info["URL"]
379
- headers = presigned_info.get("Header", {})
380
- method = presigned_info.get("Method", "PUT").upper()
381
-
382
- for key, value in headers.items():
383
- if isinstance(value, list):
384
- headers[key] = value[0]
385
-
386
- with open(file_path, "rb") as f:
387
- file_data = f.read()
388
-
389
- if method == "PUT":
390
- upload_response = requests.put(upload_url, data=file_data, headers=headers)
391
- else:
392
- raise ValueError(f"Unsupported HTTP method: {method}")
393
-
394
- if upload_response.status_code not in (200, 201):
395
- raise Exception(
396
- f"Upload failed: {upload_response.status_code} - {upload_response.text}"
397
- )
398
-
399
- return strip_query_params(upload_url)
264
+ return matched_files
400
265
 
401
266
  if bucket_id < 1:
402
267
  raise ValueError(f"Invalid bucket_id: {bucket_id}")
@@ -419,7 +284,7 @@ class GroundX(GroundXBase):
419
284
  file_size = file.stat().st_size
420
285
 
421
286
  if (current_batch_size + file_size > MAX_BATCH_SIZE_BYTES) or (len(current_batch) >= n):
422
- _upload_file_batch(bucket_id, current_batch, upload_api, request_options, pbar)
287
+ self._upload_file_batch(bucket_id, current_batch, upload_api, request_options, pbar)
423
288
  current_batch = []
424
289
  current_batch_size = 0
425
290
 
@@ -427,7 +292,89 @@ class GroundX(GroundXBase):
427
292
  current_batch_size += file_size
428
293
 
429
294
  if current_batch:
430
- _upload_file_batch(bucket_id, current_batch, upload_api, request_options, pbar)
295
+ self._upload_file_batch(bucket_id, current_batch, upload_api, request_options, pbar)
296
+
297
+ def _upload_file(
298
+ self,
299
+ endpoint,
300
+ file_path,
301
+ ):
302
+ file_name = os.path.basename(file_path)
303
+ file_extension = os.path.splitext(file_name)[1][1:].lower()
304
+
305
+ presigned_info = get_presigned_url(endpoint, file_name, file_extension)
306
+
307
+ upload_url = presigned_info["URL"]
308
+ headers = presigned_info.get("Header", {})
309
+ method = presigned_info.get("Method", "PUT").upper()
310
+
311
+ for key, value in headers.items():
312
+ if isinstance(value, list):
313
+ headers[key] = value[0]
314
+
315
+ try:
316
+ with open(file_path, "rb") as f:
317
+ file_data = f.read()
318
+ except Exception as e:
319
+ raise ValueError(f"Error reading file {file_path}: {e}")
320
+
321
+ if method == "PUT":
322
+ upload_response = requests.put(upload_url, data=file_data, headers=headers)
323
+ else:
324
+ raise ValueError(f"Unsupported HTTP method: {method}")
325
+
326
+ if upload_response.status_code not in (200, 201):
327
+ raise Exception(
328
+ f"Upload failed: {upload_response.status_code} - {upload_response.text}"
329
+ )
330
+
331
+ return strip_query_params(upload_url)
332
+
333
+ def _upload_file_batch(
334
+ self,
335
+ bucket_id,
336
+ batch,
337
+ upload_api,
338
+ request_options,
339
+ pbar,
340
+ ):
341
+ docs = []
342
+
343
+ progress = len(batch)
344
+ for file in batch:
345
+ url = self._upload_file(upload_api, file)
346
+ docs.append(
347
+ Document(
348
+ bucket_id=bucket_id,
349
+ file_path=url,
350
+ ),
351
+ )
352
+ pbar.update(0.25)
353
+ progress -= 0.25
354
+
355
+ if docs:
356
+ ingest = self.ingest(documents=docs, request_options=request_options)
357
+
358
+ completed_files = set()
359
+
360
+ while (
361
+ ingest is not None
362
+ and ingest.ingest.status not in ["complete", "error", "cancelled"]
363
+ ):
364
+ time.sleep(3)
365
+ ingest = self.documents.get_processing_status_by_id(ingest.ingest.process_id)
366
+
367
+ if ingest.ingest.progress and ingest.ingest.progress.processing:
368
+ for doc in ingest.ingest.progress.processing.documents:
369
+ if doc.status == "complete" and doc.document_id not in completed_files:
370
+ pbar.update(0.75)
371
+ progress -= 0.75
372
+
373
+ if ingest.ingest.status in ["error", "cancelled"]:
374
+ raise ValueError(f"Ingest failed with status: {ingest.ingest.status}")
375
+
376
+ if progress > 0:
377
+ pbar.update(progress)
431
378
 
432
379
 
433
380
 
@@ -436,6 +383,7 @@ class AsyncGroundX(AsyncGroundXBase):
436
383
  self,
437
384
  *,
438
385
  documents: typing.Sequence[Document],
386
+ upload_api: str = "https://api.eyelevel.ai/upload/file",
439
387
  request_options: typing.Optional[RequestOptions] = None,
440
388
  ) -> IngestResponse:
441
389
  """
@@ -445,6 +393,10 @@ class AsyncGroundX(AsyncGroundXBase):
445
393
  ----------
446
394
  documents : typing.Sequence[Document]
447
395
 
396
+ # an endpoint that accepts 'name' and 'type' query params
397
+ # and returns a presigned URL in a JSON dictionary with key 'URL'
398
+ upload_api : typing.Optional[str]
399
+
448
400
  request_options : typing.Optional[RequestOptions]
449
401
  Request-specific configuration.
450
402
 
@@ -483,49 +435,72 @@ class AsyncGroundX(AsyncGroundXBase):
483
435
  raise ValueError("Documents must all be either local or remote, not a mix.")
484
436
 
485
437
  if len(remote_documents) > 0:
438
+ if len(remote_documents) > MAX_BATCH_SIZE:
439
+ raise ValueError("You have sent too many documents in this request")
440
+
486
441
  return await self.documents.ingest_remote(
487
442
  documents=remote_documents,
488
443
  request_options=request_options,
489
444
  )
490
445
 
491
- timeout = self._client_wrapper.get_timeout()
492
- headers = self._client_wrapper.get_headers()
493
- base_url = self._client_wrapper.get_base_url().rstrip("/")
446
+ if len(local_documents) > MAX_BATCH_SIZE:
447
+ raise ValueError("You have sent too many documents in this request")
448
+
449
+ if len(local_documents) == 0:
450
+ raise ValueError("No valid documents were provided")
494
451
 
495
- url = f"{base_url}/v1/ingest/documents/local"
452
+ docs: typing.List[IngestRemoteDocument] = []
453
+ for d in local_documents:
454
+ url = self._upload_file(upload_api, Path(os.path.expanduser(d.file_path)))
455
+
456
+ docs.append(
457
+ IngestRemoteDocument(
458
+ bucket_id=d.bucket_id,
459
+ file_name=d.file_name,
460
+ file_type=d.file_type,
461
+ process_level=d.process_level,
462
+ search_data=d.search_data,
463
+ source_url=url,
464
+ )
465
+ )
466
+
467
+ return await self.documents.ingest_remote(
468
+ documents=docs,
469
+ request_options=request_options,
470
+ )
471
+
472
+ def _upload_file(
473
+ self,
474
+ endpoint,
475
+ file_path,
476
+ ):
477
+ file_name = os.path.basename(file_path)
478
+ file_extension = os.path.splitext(file_name)[1][1:].lower()
479
+
480
+ presigned_info = get_presigned_url(endpoint, file_name, file_extension)
481
+
482
+ upload_url = presigned_info["URL"]
483
+ headers = presigned_info.get("Header", {})
484
+ method = presigned_info.get("Method", "PUT").upper()
485
+
486
+ for key, value in headers.items():
487
+ if isinstance(value, list):
488
+ headers[key] = value[0]
496
489
 
497
490
  try:
498
- async with aiohttp.ClientSession() as session:
499
- data = aiohttp.FormData()
500
- for field_name, (file_name, file_obj, content_type) in local_documents:
501
- data.add_field(
502
- name=field_name,
503
- value=file_obj,
504
- filename=file_name,
505
- content_type=content_type,
506
- )
491
+ with open(file_path, "rb") as f:
492
+ file_data = f.read()
493
+ except Exception as e:
494
+ raise ValueError(f"Error reading file {file_path}: {e}")
507
495
 
508
- async with session.post(
509
- url, data=data, headers=headers, timeout=timeout
510
- ) as response:
511
- if 200 <= response.status < 300:
512
- response_data = await response.json()
513
- return typing.cast(
514
- IngestResponse,
515
- parse_obj_as(
516
- type_=IngestResponse, # type: ignore
517
- object_=response_data,
518
- ),
519
- )
520
- if response.status == 400:
521
- raise BadRequestError(await response.json())
522
- if response.status == 401:
523
- raise UnauthorizedError(await response.json())
524
-
525
- raise ApiError(
526
- status_code=response.status, body=await response.text()
527
- )
528
- except TimeoutError:
529
- raise ApiError(status_code=408, body="Request timed out")
530
- except aiohttp.ClientError as e:
531
- raise ApiError(status_code=500, body=str(e))
496
+ if method == "PUT":
497
+ upload_response = requests.put(upload_url, data=file_data, headers=headers)
498
+ else:
499
+ raise ValueError(f"Unsupported HTTP method: {method}")
500
+
501
+ if upload_response.status_code not in (200, 201):
502
+ raise Exception(
503
+ f"Upload failed: {upload_response.status_code} - {upload_response.text}"
504
+ )
505
+
506
+ return strip_query_params(upload_url)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: groundx
3
- Version: 2.2.7
3
+ Version: 2.2.8
4
4
  Summary:
5
5
  License: MIT
6
6
  Requires-Python: >=3.8,<4.0
@@ -4,7 +4,7 @@ groundx/buckets/client.py,sha256=4jlc9vfIult1mMJ4FZW4_KFJybZPStZt1FUplIgrxbU,239
4
4
  groundx/client.py,sha256=dIW9OyrMyfC1N7HSxRrHh0w_8rJ8osNUOPdYD6ueQ6g,6515
5
5
  groundx/core/__init__.py,sha256=SQ85PF84B9MuKnBwHNHWemSGuy-g_515gFYNFhvEE0I,1438
6
6
  groundx/core/api_error.py,sha256=RE8LELok2QCjABadECTvtDp7qejA1VmINCh6TbqPwSE,426
7
- groundx/core/client_wrapper.py,sha256=Bhc6L2UfeJoET17u-IIW6OWHD5GwdYaita2HNWDJjr4,1802
7
+ groundx/core/client_wrapper.py,sha256=DckEncGF_W3v8kguKAXWcn2um2B9k-nkWvKmH0HoyGQ,1802
8
8
  groundx/core/datetime_utils.py,sha256=nBys2IsYrhPdszxGKCNRPSOCwa-5DWOHG95FB8G9PKo,1047
9
9
  groundx/core/file.py,sha256=d4NNbX8XvXP32z8KpK2Xovv33nFfruIrpz0QWxlgpZk,2663
10
10
  groundx/core/http_client.py,sha256=Z77OIxIbL4OAB2IDqjRq_sYa5yNYAWfmdhdCSSvh6Y4,19552
@@ -26,7 +26,7 @@ groundx/groups/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
26
26
  groundx/groups/client.py,sha256=bytQRh9m7e4vIuYHb7dD1kCTQZvyBxedCqGnmmLqrsI,35237
27
27
  groundx/health/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
28
28
  groundx/health/client.py,sha256=fcTa21RWPyBuT77PQ0EncC6rBaW_DrYlRvudy9-0H58,7545
29
- groundx/ingest.py,sha256=RTgmeg_4cEaZynSEyf-3ArKGBcnhbcZhJl7BAeUeAMU,18187
29
+ groundx/ingest.py,sha256=mQB__GQmIDP6W5jGhmjORXh2o6HHTseHiahZz2Es-tM,16119
30
30
  groundx/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
31
31
  groundx/search/__init__.py,sha256=RagVzjShP33mDg9o4N3kGzV0egL1RYNjCpXPE8VzMYE,145
32
32
  groundx/search/client.py,sha256=zrrqFy0HowDUYPsMU4nfvDV2RgmkEQ4E8WYNktu3xcs,18684
@@ -81,7 +81,7 @@ groundx/types/subscription_detail.py,sha256=WNfUw2EMVECIvNYcV2s51zZ6T3Utc4zYXw63
81
81
  groundx/types/subscription_detail_meters.py,sha256=lBa8-1QlMVHjr5RLGqhiTKnD1KMM0AAHTWvz9TVtG8w,830
82
82
  groundx/types/website_source.py,sha256=3WeRCiilNKKBTfhwgjo3jbcVI3vLTeM-KxI6dVzpg9o,1578
83
83
  groundx/version.py,sha256=1yVogKaq260fQfckM2RYN2144SEw0QROsZW8ICtkG4U,74
84
- groundx-2.2.7.dist-info/LICENSE,sha256=dFE6nY1bHnSn6NqmdlghlU1gQqLqYNphrceGVehSa7o,1065
85
- groundx-2.2.7.dist-info/METADATA,sha256=Gx6smhve9G7ECJbxQBrw3pJ_LosNgtehM3VETuW0c9I,5173
86
- groundx-2.2.7.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
87
- groundx-2.2.7.dist-info/RECORD,,
84
+ groundx-2.2.8.dist-info/LICENSE,sha256=dFE6nY1bHnSn6NqmdlghlU1gQqLqYNphrceGVehSa7o,1065
85
+ groundx-2.2.8.dist-info/METADATA,sha256=Q67rLMlsO72-NH2qvK8Bw4ciSTDS5C4LNoPiBk8UWyI,5173
86
+ groundx-2.2.8.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
87
+ groundx-2.2.8.dist-info/RECORD,,