groundx 2.0.15__py3-none-any.whl → 2.0.16__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
groundx/__init__.py CHANGED
@@ -7,6 +7,7 @@ from .types import (
7
7
  BucketResponse,
8
8
  BucketUpdateDetail,
9
9
  BucketUpdateResponse,
10
+ CrawlWebsiteSource,
10
11
  CustomerDetail,
11
12
  CustomerResponse,
12
13
  DocumentDetail,
@@ -21,6 +22,7 @@ from .types import (
21
22
  HealthResponseHealth,
22
23
  HealthService,
23
24
  HealthServiceStatus,
25
+ IngestLocalDocument,
24
26
  IngestRemoteDocument,
25
27
  IngestResponse,
26
28
  IngestResponseIngest,
@@ -45,7 +47,6 @@ from .types import (
45
47
  from .errors import BadRequestError, UnauthorizedError
46
48
  from . import buckets, customer, documents, groups, health, search
47
49
  from .client import AsyncGroundX, GroundX
48
- from .documents import DocumentsIngestLocalRequestFilesItem, WebsiteCrawlRequestWebsitesItem
49
50
  from .environment import GroundXEnvironment
50
51
  from .search import SearchContentRequestId
51
52
  from .version import __version__
@@ -59,6 +60,7 @@ __all__ = [
59
60
  "BucketResponse",
60
61
  "BucketUpdateDetail",
61
62
  "BucketUpdateResponse",
63
+ "CrawlWebsiteSource",
62
64
  "CustomerDetail",
63
65
  "CustomerResponse",
64
66
  "DocumentDetail",
@@ -66,7 +68,6 @@ __all__ = [
66
68
  "DocumentLookupResponse",
67
69
  "DocumentResponse",
68
70
  "DocumentType",
69
- "DocumentsIngestLocalRequestFilesItem",
70
71
  "GroundX",
71
72
  "GroundXEnvironment",
72
73
  "GroupDetail",
@@ -76,6 +77,7 @@ __all__ = [
76
77
  "HealthResponseHealth",
77
78
  "HealthService",
78
79
  "HealthServiceStatus",
80
+ "IngestLocalDocument",
79
81
  "IngestRemoteDocument",
80
82
  "IngestResponse",
81
83
  "IngestResponseIngest",
@@ -98,7 +100,6 @@ __all__ = [
98
100
  "SubscriptionDetail",
99
101
  "SubscriptionDetailMeters",
100
102
  "UnauthorizedError",
101
- "WebsiteCrawlRequestWebsitesItem",
102
103
  "__version__",
103
104
  "buckets",
104
105
  "customer",
@@ -16,7 +16,7 @@ class BaseClientWrapper:
16
16
  headers: typing.Dict[str, str] = {
17
17
  "X-Fern-Language": "Python",
18
18
  "X-Fern-SDK-Name": "groundx",
19
- "X-Fern-SDK-Version": "2.0.15",
19
+ "X-Fern-SDK-Version": "2.0.16",
20
20
  }
21
21
  headers["X-API-Key"] = self.api_key
22
22
  return headers
@@ -1,5 +1,2 @@
1
1
  # This file was auto-generated by Fern from our API Definition.
2
2
 
3
- from .types import DocumentsIngestLocalRequestFilesItem, WebsiteCrawlRequestWebsitesItem
4
-
5
- __all__ = ["DocumentsIngestLocalRequestFilesItem", "WebsiteCrawlRequestWebsitesItem"]
@@ -11,8 +11,8 @@ from ..errors.bad_request_error import BadRequestError
11
11
  from ..errors.unauthorized_error import UnauthorizedError
12
12
  from json.decoder import JSONDecodeError
13
13
  from ..core.api_error import ApiError
14
- from .types.documents_ingest_local_request_files_item import DocumentsIngestLocalRequestFilesItem
15
- from .types.website_crawl_request_websites_item import WebsiteCrawlRequestWebsitesItem
14
+ from ..types.ingest_local_document import IngestLocalDocument
15
+ from ..types.crawl_website_source import CrawlWebsiteSource
16
16
  from ..types.process_status_response import ProcessStatusResponse
17
17
  from ..core.jsonable_encoder import jsonable_encoder
18
18
  from ..types.sort import Sort
@@ -122,10 +122,7 @@ class DocumentsClient:
122
122
  raise ApiError(status_code=_response.status_code, body=_response_json)
123
123
 
124
124
  def ingest_local(
125
- self,
126
- *,
127
- files: typing.List[DocumentsIngestLocalRequestFilesItem],
128
- request_options: typing.Optional[RequestOptions] = None,
125
+ self, *, files: typing.List[IngestLocalDocument], request_options: typing.Optional[RequestOptions] = None
129
126
  ) -> IngestResponse:
130
127
  """
131
128
  Upload documents hosted on a local file system for ingestion into a GroundX bucket.
@@ -134,7 +131,7 @@ class DocumentsClient:
134
131
 
135
132
  Parameters
136
133
  ----------
137
- files : typing.List[DocumentsIngestLocalRequestFilesItem]
134
+ files : typing.List[IngestLocalDocument]
138
135
 
139
136
  request_options : typing.Optional[RequestOptions]
140
137
  Request-specific configuration.
@@ -146,18 +143,17 @@ class DocumentsClient:
146
143
 
147
144
  Examples
148
145
  --------
149
- from groundx import GroundX
150
- from groundx.documents import DocumentsIngestLocalRequestFilesItem
146
+ from groundx import GroundX, IngestLocalDocument
151
147
 
152
148
  client = GroundX(
153
149
  api_key="YOUR_API_KEY",
154
150
  )
155
151
  client.documents.ingest_local(
156
152
  files=[
157
- DocumentsIngestLocalRequestFilesItem(
158
- bucket_id=1,
159
- file_data="fileData",
160
- file_name="fileName",
153
+ IngestLocalDocument(
154
+ bucket_id=1234,
155
+ file_data="binary data here",
156
+ file_name="my_file.txt",
161
157
  file_type="txt",
162
158
  )
163
159
  ],
@@ -208,10 +204,7 @@ class DocumentsClient:
208
204
  raise ApiError(status_code=_response.status_code, body=_response_json)
209
205
 
210
206
  def crawl_website(
211
- self,
212
- *,
213
- websites: typing.Sequence[WebsiteCrawlRequestWebsitesItem],
214
- request_options: typing.Optional[RequestOptions] = None,
207
+ self, *, websites: typing.Sequence[CrawlWebsiteSource], request_options: typing.Optional[RequestOptions] = None
215
208
  ) -> IngestResponse:
216
209
  """
217
210
  Upload the content of a publicly accessible website for ingestion into a GroundX bucket. This is done by following links within a specified URL, recursively, up to a specified depth or number of pages.
@@ -220,7 +213,7 @@ class DocumentsClient:
220
213
 
221
214
  Parameters
222
215
  ----------
223
- websites : typing.Sequence[WebsiteCrawlRequestWebsitesItem]
216
+ websites : typing.Sequence[CrawlWebsiteSource]
224
217
 
225
218
  request_options : typing.Optional[RequestOptions]
226
219
  Request-specific configuration.
@@ -232,15 +225,14 @@ class DocumentsClient:
232
225
 
233
226
  Examples
234
227
  --------
235
- from groundx import GroundX
236
- from groundx.documents import WebsiteCrawlRequestWebsitesItem
228
+ from groundx import CrawlWebsiteSource, GroundX
237
229
 
238
230
  client = GroundX(
239
231
  api_key="YOUR_API_KEY",
240
232
  )
241
233
  client.documents.crawl_website(
242
234
  websites=[
243
- WebsiteCrawlRequestWebsitesItem(
235
+ CrawlWebsiteSource(
244
236
  bucket_id=123,
245
237
  source_url="https://my.website.com",
246
238
  )
@@ -252,7 +244,7 @@ class DocumentsClient:
252
244
  method="POST",
253
245
  json={
254
246
  "websites": convert_and_respect_annotation_metadata(
255
- object_=websites, annotation=typing.Sequence[WebsiteCrawlRequestWebsitesItem], direction="write"
247
+ object_=websites, annotation=typing.Sequence[CrawlWebsiteSource], direction="write"
256
248
  ),
257
249
  },
258
250
  headers={
@@ -871,10 +863,7 @@ class AsyncDocumentsClient:
871
863
  raise ApiError(status_code=_response.status_code, body=_response_json)
872
864
 
873
865
  async def ingest_local(
874
- self,
875
- *,
876
- files: typing.List[DocumentsIngestLocalRequestFilesItem],
877
- request_options: typing.Optional[RequestOptions] = None,
866
+ self, *, files: typing.List[IngestLocalDocument], request_options: typing.Optional[RequestOptions] = None
878
867
  ) -> IngestResponse:
879
868
  """
880
869
  Upload documents hosted on a local file system for ingestion into a GroundX bucket.
@@ -883,7 +872,7 @@ class AsyncDocumentsClient:
883
872
 
884
873
  Parameters
885
874
  ----------
886
- files : typing.List[DocumentsIngestLocalRequestFilesItem]
875
+ files : typing.List[IngestLocalDocument]
887
876
 
888
877
  request_options : typing.Optional[RequestOptions]
889
878
  Request-specific configuration.
@@ -897,8 +886,7 @@ class AsyncDocumentsClient:
897
886
  --------
898
887
  import asyncio
899
888
 
900
- from groundx import AsyncGroundX
901
- from groundx.documents import DocumentsIngestLocalRequestFilesItem
889
+ from groundx import AsyncGroundX, IngestLocalDocument
902
890
 
903
891
  client = AsyncGroundX(
904
892
  api_key="YOUR_API_KEY",
@@ -908,10 +896,10 @@ class AsyncDocumentsClient:
908
896
  async def main() -> None:
909
897
  await client.documents.ingest_local(
910
898
  files=[
911
- DocumentsIngestLocalRequestFilesItem(
912
- bucket_id=1,
913
- file_data="fileData",
914
- file_name="fileName",
899
+ IngestLocalDocument(
900
+ bucket_id=1234,
901
+ file_data="binary data here",
902
+ file_name="my_file.txt",
915
903
  file_type="txt",
916
904
  )
917
905
  ],
@@ -965,10 +953,7 @@ class AsyncDocumentsClient:
965
953
  raise ApiError(status_code=_response.status_code, body=_response_json)
966
954
 
967
955
  async def crawl_website(
968
- self,
969
- *,
970
- websites: typing.Sequence[WebsiteCrawlRequestWebsitesItem],
971
- request_options: typing.Optional[RequestOptions] = None,
956
+ self, *, websites: typing.Sequence[CrawlWebsiteSource], request_options: typing.Optional[RequestOptions] = None
972
957
  ) -> IngestResponse:
973
958
  """
974
959
  Upload the content of a publicly accessible website for ingestion into a GroundX bucket. This is done by following links within a specified URL, recursively, up to a specified depth or number of pages.
@@ -977,7 +962,7 @@ class AsyncDocumentsClient:
977
962
 
978
963
  Parameters
979
964
  ----------
980
- websites : typing.Sequence[WebsiteCrawlRequestWebsitesItem]
965
+ websites : typing.Sequence[CrawlWebsiteSource]
981
966
 
982
967
  request_options : typing.Optional[RequestOptions]
983
968
  Request-specific configuration.
@@ -991,8 +976,7 @@ class AsyncDocumentsClient:
991
976
  --------
992
977
  import asyncio
993
978
 
994
- from groundx import AsyncGroundX
995
- from groundx.documents import WebsiteCrawlRequestWebsitesItem
979
+ from groundx import AsyncGroundX, CrawlWebsiteSource
996
980
 
997
981
  client = AsyncGroundX(
998
982
  api_key="YOUR_API_KEY",
@@ -1002,7 +986,7 @@ class AsyncDocumentsClient:
1002
986
  async def main() -> None:
1003
987
  await client.documents.crawl_website(
1004
988
  websites=[
1005
- WebsiteCrawlRequestWebsitesItem(
989
+ CrawlWebsiteSource(
1006
990
  bucket_id=123,
1007
991
  source_url="https://my.website.com",
1008
992
  )
@@ -1017,7 +1001,7 @@ class AsyncDocumentsClient:
1017
1001
  method="POST",
1018
1002
  json={
1019
1003
  "websites": convert_and_respect_annotation_metadata(
1020
- object_=websites, annotation=typing.Sequence[WebsiteCrawlRequestWebsitesItem], direction="write"
1004
+ object_=websites, annotation=typing.Sequence[CrawlWebsiteSource], direction="write"
1021
1005
  ),
1022
1006
  },
1023
1007
  headers={
groundx/types/__init__.py CHANGED
@@ -6,6 +6,7 @@ from .bucket_list_response import BucketListResponse
6
6
  from .bucket_response import BucketResponse
7
7
  from .bucket_update_detail import BucketUpdateDetail
8
8
  from .bucket_update_response import BucketUpdateResponse
9
+ from .crawl_website_source import CrawlWebsiteSource
9
10
  from .customer_detail import CustomerDetail
10
11
  from .customer_response import CustomerResponse
11
12
  from .document_detail import DocumentDetail
@@ -20,6 +21,7 @@ from .health_response import HealthResponse
20
21
  from .health_response_health import HealthResponseHealth
21
22
  from .health_service import HealthService
22
23
  from .health_service_status import HealthServiceStatus
24
+ from .ingest_local_document import IngestLocalDocument
23
25
  from .ingest_remote_document import IngestRemoteDocument
24
26
  from .ingest_response import IngestResponse
25
27
  from .ingest_response_ingest import IngestResponseIngest
@@ -48,6 +50,7 @@ __all__ = [
48
50
  "BucketResponse",
49
51
  "BucketUpdateDetail",
50
52
  "BucketUpdateResponse",
53
+ "CrawlWebsiteSource",
51
54
  "CustomerDetail",
52
55
  "CustomerResponse",
53
56
  "DocumentDetail",
@@ -62,6 +65,7 @@ __all__ = [
62
65
  "HealthResponseHealth",
63
66
  "HealthService",
64
67
  "HealthServiceStatus",
68
+ "IngestLocalDocument",
65
69
  "IngestRemoteDocument",
66
70
  "IngestResponse",
67
71
  "IngestResponseIngest",
@@ -1,14 +1,14 @@
1
1
  # This file was auto-generated by Fern from our API Definition.
2
2
 
3
- from ...core.pydantic_utilities import UniversalBaseModel
3
+ from ..core.pydantic_utilities import UniversalBaseModel
4
4
  import typing_extensions
5
- from ...core.serialization import FieldMetadata
5
+ from ..core.serialization import FieldMetadata
6
6
  import pydantic
7
7
  import typing
8
- from ...core.pydantic_utilities import IS_PYDANTIC_V2
8
+ from ..core.pydantic_utilities import IS_PYDANTIC_V2
9
9
 
10
10
 
11
- class WebsiteCrawlRequestWebsitesItem(UniversalBaseModel):
11
+ class CrawlWebsiteSource(UniversalBaseModel):
12
12
  bucket_id: typing_extensions.Annotated[int, FieldMetadata(alias="bucketId")] = pydantic.Field()
13
13
  """
14
14
  the bucketId of the bucket which this website will be ingested to.
@@ -1,15 +1,15 @@
1
1
  # This file was auto-generated by Fern from our API Definition.
2
2
 
3
- from ...core.pydantic_utilities import UniversalBaseModel
3
+ from ..core.pydantic_utilities import UniversalBaseModel
4
4
  import typing_extensions
5
- from ...core.serialization import FieldMetadata
5
+ from ..core.serialization import FieldMetadata
6
6
  import pydantic
7
- from ...types.document_type import DocumentType
7
+ from .document_type import DocumentType
8
8
  import typing
9
- from ...core.pydantic_utilities import IS_PYDANTIC_V2
9
+ from ..core.pydantic_utilities import IS_PYDANTIC_V2
10
10
 
11
11
 
12
- class DocumentsIngestLocalRequestFilesItem(UniversalBaseModel):
12
+ class IngestLocalDocument(UniversalBaseModel):
13
13
  bucket_id: typing_extensions.Annotated[int, FieldMetadata(alias="bucketId")] = pydantic.Field()
14
14
  """
15
15
  the bucketId of the bucket which this local file will be ingested to.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: groundx
3
- Version: 2.0.15
3
+ Version: 2.0.16
4
4
  Summary:
5
5
  License: MIT
6
6
  Requires-Python: >=3.8,<4.0
@@ -1,10 +1,10 @@
1
- groundx/__init__.py,sha256=k3hsC7MSufSELpIPOb1-G3Y_-YM2GV61ADC-EwSaPvw,2925
1
+ groundx/__init__.py,sha256=22BLAf5omcgumWGCWHDb8o-ha8TnNbkO8LcEvpcYU6A,2851
2
2
  groundx/buckets/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
3
3
  groundx/buckets/client.py,sha256=TofNrkej1AC_-FU5rf_y8KG8ubFUpHtLa8PQ7rqax6E,26537
4
4
  groundx/client.py,sha256=Q1Kw0z6K-z-ShhNyuuPe5fYonM9M2I_55-ukUrUWk1U,6507
5
5
  groundx/core/__init__.py,sha256=SQ85PF84B9MuKnBwHNHWemSGuy-g_515gFYNFhvEE0I,1438
6
6
  groundx/core/api_error.py,sha256=RE8LELok2QCjABadECTvtDp7qejA1VmINCh6TbqPwSE,426
7
- groundx/core/client_wrapper.py,sha256=66tcEWEACDxQIXDiX5s5Kn5XdmYZETM2b8YUiM7Spgs,1803
7
+ groundx/core/client_wrapper.py,sha256=wcxyqdGn-zWPv0Bsf2g9MQ4dLpQZZXi_P-jQmMCcODM,1803
8
8
  groundx/core/datetime_utils.py,sha256=nBys2IsYrhPdszxGKCNRPSOCwa-5DWOHG95FB8G9PKo,1047
9
9
  groundx/core/file.py,sha256=d4NNbX8XvXP32z8KpK2Xovv33nFfruIrpz0QWxlgpZk,2663
10
10
  groundx/core/http_client.py,sha256=siUQ6UV0ARZALlxubqWSSAAPC9B4VW8y6MGlHStfaeo,19552
@@ -16,11 +16,8 @@ groundx/core/request_options.py,sha256=h0QUNCFVdCW_7GclVySCAY2w4NhtXVBUCmHgmzaxp
16
16
  groundx/core/serialization.py,sha256=D9h_t-RQON3-CHWs1C4ESY9B-Yd5d-l5lnTLb_X896g,9601
17
17
  groundx/customer/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
18
18
  groundx/customer/client.py,sha256=C_JANeDewRD1Kg-q7LPxdiOSWbYSTOiYlBYZLRYPI44,3467
19
- groundx/documents/__init__.py,sha256=-Z5Vjus4Uq29lfATYXWLh5zC9DvDMu-FLGWcm8chMdY,241
20
- groundx/documents/client.py,sha256=Ah3LPoJmo7H1vNb2b2xfQ6zRVmnrihdYJBhcriEgcEw,58935
21
- groundx/documents/types/__init__.py,sha256=fmIT0AisC1K0-EQIa1GDcYr70wpsi7QuXBeyqWiZD14,325
22
- groundx/documents/types/documents_ingest_local_request_files_item.py,sha256=EpD7TE1us1DAXdcPvI1li-AGUNpEy_f13bBXidmCAL8,1630
23
- groundx/documents/types/website_crawl_request_websites_item.py,sha256=6So4stWecfZYPbiQWg6-FgsfIqV4g2ujFXXgn70evNI,1597
19
+ groundx/documents/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
20
+ groundx/documents/client.py,sha256=hxChT8fhcSVp_y2zBam2gBd6gZvndhHEYE2UN1Mn7AE,58386
24
21
  groundx/environment.py,sha256=CInm1_DKtZ1mrxutmKb1qqv82P33r_S87hZD3Hc1VB0,159
25
22
  groundx/errors/__init__.py,sha256=-prNYsFd8xxM4va0vR1raZjcd10tllOJKyEWjX_pwdU,214
26
23
  groundx/errors/bad_request_error.py,sha256=_EbO8mWqN9kFZPvIap8qa1lL_EWkRcsZe1HKV9GDWJY,264
@@ -34,13 +31,14 @@ groundx/search/__init__.py,sha256=RagVzjShP33mDg9o4N3kGzV0egL1RYNjCpXPE8VzMYE,14
34
31
  groundx/search/client.py,sha256=10ifg9GyIwIZF13ULfCXF8iFIydq6H6QRDrGPDjpanw,19756
35
32
  groundx/search/types/__init__.py,sha256=fNFXQloPa1PHHO8VZim6KQNMA9N5EZtfSkissdxtY_c,165
36
33
  groundx/search/types/search_content_request_id.py,sha256=us7mYdzR0qPur_wR5I9BhHaLEzC5nLBRna6-xq4M1ec,128
37
- groundx/types/__init__.py,sha256=W5fN6ryfqki0p_bipBi7jKs6SXH2vVsrOe5ORcbngVY,3406
34
+ groundx/types/__init__.py,sha256=yCuwY3aySGDKKYYbxFX_v0kyG_ePu1_M0ZryOwMZJVk,3567
38
35
  groundx/types/bounding_box_detail.py,sha256=51qcen326NTHY2ZqH1cFXut0_MCmk39EbLoDAwotdq4,1832
39
36
  groundx/types/bucket_detail.py,sha256=bQjCvfyWydjItmzNNTvH-iWxNDOggd7R7X1alFZzlEY,1511
40
37
  groundx/types/bucket_list_response.py,sha256=jC0NBsLCYDSwQrBzuW0g3PWFycjtKl8YRkKhic_-1DA,650
41
38
  groundx/types/bucket_response.py,sha256=E8V7H2_TVKdmMsGCBjwzdf2bg4rUjiXFnhXtVGVCqZQ,608
42
39
  groundx/types/bucket_update_detail.py,sha256=B4atQMDSXEdx7otcDbvgsrAHtXNz9swMnOsXRe25coc,738
43
40
  groundx/types/bucket_update_response.py,sha256=h5RJTEpc4WPI_C4sPvsJZo7IxKppnPR-I9VGEQryRlc,633
41
+ groundx/types/crawl_website_source.py,sha256=K4-IoK__K4OekBwmJhClL2rZD1P4rFPR2-Ck02jP2uw,1581
44
42
  groundx/types/customer_detail.py,sha256=RNm0qXvKx6YvVmkVJZeCNIz7n8_siFMXJ_AGtH3i5Z0,1094
45
43
  groundx/types/customer_response.py,sha256=_RbuanXhCWQkCeQ0dkwPgsjNBoBgNpixiNfRpXcMew8,618
46
44
  groundx/types/document_detail.py,sha256=i1UfcQAGYo9v1HwrrpzQPw_O0qA7IOXwOUuPV1yU8nI,2323
@@ -55,6 +53,7 @@ groundx/types/health_response.py,sha256=3UpYL2IZb56tTo-fOpSU-0OTRyWgpYiB3pMU3sfj
55
53
  groundx/types/health_response_health.py,sha256=I0QeEljFp6l5LCJbCTArW031Om84egePgnGdtE6WXlI,632
56
54
  groundx/types/health_service.py,sha256=M1-h1EJSpAXw-j3pY-09_g_WKkO0spdj8e7pgPzSGf0,1083
57
55
  groundx/types/health_service_status.py,sha256=ugKJXlx8QGi83n_J6s1frFrW1hYfOn3Dlb_pPNexwMA,185
56
+ groundx/types/ingest_local_document.py,sha256=2T1HXR2a-BDj5LEOTM98Sl4sgjVMWbVShQLn8MTR6QA,1602
58
57
  groundx/types/ingest_remote_document.py,sha256=xlPA4SYoUgoGXpxZhyORdezxIPGmr4wneav2ZEVmmOY,1683
59
58
  groundx/types/ingest_response.py,sha256=139rn8wpT44jlUzYXiy0r8XzN2U_OtdLltpSbRU0TyA,633
60
59
  groundx/types/ingest_response_ingest.py,sha256=8FKApYNvS6KFxEKm05pKpKJ0BAagxoE0cWeTt-qjm1g,781
@@ -76,7 +75,7 @@ groundx/types/sort_order.py,sha256=hfJkStz6zHf3iWQFaVLkNCZPdyj5JS7TsQlN4Ij8Q5A,1
76
75
  groundx/types/subscription_detail.py,sha256=WNfUw2EMVECIvNYcV2s51zZ6T3Utc4zYXw63bPaeM6U,768
77
76
  groundx/types/subscription_detail_meters.py,sha256=lBa8-1QlMVHjr5RLGqhiTKnD1KMM0AAHTWvz9TVtG8w,830
78
77
  groundx/version.py,sha256=1yVogKaq260fQfckM2RYN2144SEw0QROsZW8ICtkG4U,74
79
- groundx-2.0.15.dist-info/LICENSE,sha256=8dMPYAFBTA7O4DUxhrEKEks8CL2waCMYM6dHohW4xrI,1065
80
- groundx-2.0.15.dist-info/METADATA,sha256=7SUu2d02VHGaZCu06RSUl3dlZJ4Kuh-1Z3VBea6QNrg,5206
81
- groundx-2.0.15.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
82
- groundx-2.0.15.dist-info/RECORD,,
78
+ groundx-2.0.16.dist-info/LICENSE,sha256=8dMPYAFBTA7O4DUxhrEKEks8CL2waCMYM6dHohW4xrI,1065
79
+ groundx-2.0.16.dist-info/METADATA,sha256=M0848SdVxMLZGlJo73ReJwhkVSU9R4WfkHPh3LkZo9k,5206
80
+ groundx-2.0.16.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
81
+ groundx-2.0.16.dist-info/RECORD,,
@@ -1,6 +0,0 @@
1
- # This file was auto-generated by Fern from our API Definition.
2
-
3
- from .documents_ingest_local_request_files_item import DocumentsIngestLocalRequestFilesItem
4
- from .website_crawl_request_websites_item import WebsiteCrawlRequestWebsitesItem
5
-
6
- __all__ = ["DocumentsIngestLocalRequestFilesItem", "WebsiteCrawlRequestWebsitesItem"]