groundx 2.3.6__tar.gz → 2.3.8__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (96) hide show
  1. {groundx-2.3.6 → groundx-2.3.8}/PKG-INFO +1 -1
  2. {groundx-2.3.6 → groundx-2.3.8}/pyproject.toml +1 -1
  3. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/core/client_wrapper.py +2 -2
  4. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/documents/client.py +52 -6
  5. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/documents/raw_client.py +48 -2
  6. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/ingest.py +3 -0
  7. {groundx-2.3.6 → groundx-2.3.8}/LICENSE +0 -0
  8. {groundx-2.3.6 → groundx-2.3.8}/README.md +0 -0
  9. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/__init__.py +0 -0
  10. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/buckets/__init__.py +0 -0
  11. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/buckets/client.py +0 -0
  12. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/buckets/raw_client.py +0 -0
  13. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/client.py +0 -0
  14. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/core/__init__.py +0 -0
  15. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/core/api_error.py +0 -0
  16. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/core/datetime_utils.py +0 -0
  17. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/core/file.py +0 -0
  18. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/core/force_multipart.py +0 -0
  19. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/core/http_client.py +0 -0
  20. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/core/http_response.py +0 -0
  21. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/core/jsonable_encoder.py +0 -0
  22. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/core/pydantic_utilities.py +0 -0
  23. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/core/query_encoder.py +0 -0
  24. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/core/remove_none_from_dict.py +0 -0
  25. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/core/request_options.py +0 -0
  26. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/core/serialization.py +0 -0
  27. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/csv_splitter.py +0 -0
  28. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/customer/__init__.py +0 -0
  29. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/customer/client.py +0 -0
  30. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/customer/raw_client.py +0 -0
  31. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/documents/__init__.py +0 -0
  32. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/environment.py +0 -0
  33. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/errors/__init__.py +0 -0
  34. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/errors/bad_request_error.py +0 -0
  35. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/errors/unauthorized_error.py +0 -0
  36. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/groups/__init__.py +0 -0
  37. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/groups/client.py +0 -0
  38. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/groups/raw_client.py +0 -0
  39. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/health/__init__.py +0 -0
  40. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/health/client.py +0 -0
  41. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/health/raw_client.py +0 -0
  42. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/py.typed +0 -0
  43. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/search/__init__.py +0 -0
  44. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/search/client.py +0 -0
  45. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/search/raw_client.py +0 -0
  46. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/search/types/__init__.py +0 -0
  47. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/search/types/search_content_request_id.py +0 -0
  48. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/types/__init__.py +0 -0
  49. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/types/bounding_box_detail.py +0 -0
  50. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/types/bucket_detail.py +0 -0
  51. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/types/bucket_list_response.py +0 -0
  52. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/types/bucket_response.py +0 -0
  53. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/types/bucket_update_detail.py +0 -0
  54. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/types/bucket_update_response.py +0 -0
  55. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/types/customer_detail.py +0 -0
  56. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/types/customer_response.py +0 -0
  57. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/types/document.py +0 -0
  58. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/types/document_detail.py +0 -0
  59. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/types/document_list_response.py +0 -0
  60. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/types/document_local_ingest_request.py +0 -0
  61. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/types/document_lookup_response.py +0 -0
  62. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/types/document_response.py +0 -0
  63. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/types/document_type.py +0 -0
  64. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/types/group_detail.py +0 -0
  65. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/types/group_list_response.py +0 -0
  66. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/types/group_response.py +0 -0
  67. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/types/health_response.py +0 -0
  68. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/types/health_response_health.py +0 -0
  69. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/types/health_service.py +0 -0
  70. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/types/health_service_status.py +0 -0
  71. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/types/ingest_local_document.py +0 -0
  72. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/types/ingest_local_document_metadata.py +0 -0
  73. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/types/ingest_remote_document.py +0 -0
  74. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/types/ingest_response.py +0 -0
  75. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/types/ingest_status.py +0 -0
  76. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/types/ingest_status_light.py +0 -0
  77. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/types/ingest_status_progress.py +0 -0
  78. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/types/ingest_status_progress_cancelled.py +0 -0
  79. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/types/ingest_status_progress_complete.py +0 -0
  80. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/types/ingest_status_progress_errors.py +0 -0
  81. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/types/ingest_status_progress_processing.py +0 -0
  82. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/types/message_response.py +0 -0
  83. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/types/meter_detail.py +0 -0
  84. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/types/process_level.py +0 -0
  85. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/types/processes_status_response.py +0 -0
  86. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/types/processing_status.py +0 -0
  87. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/types/search_response.py +0 -0
  88. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/types/search_response_search.py +0 -0
  89. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/types/search_result_item.py +0 -0
  90. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/types/search_result_item_pages_item.py +0 -0
  91. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/types/sort.py +0 -0
  92. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/types/sort_order.py +0 -0
  93. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/types/subscription_detail.py +0 -0
  94. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/types/subscription_detail_meters.py +0 -0
  95. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/types/website_source.py +0 -0
  96. {groundx-2.3.6 → groundx-2.3.8}/src/groundx/version.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: groundx
3
- Version: 2.3.6
3
+ Version: 2.3.8
4
4
  Summary:
5
5
  License: MIT
6
6
  Requires-Python: >=3.8,<4.0
@@ -3,7 +3,7 @@ name = "groundx"
3
3
 
4
4
  [tool.poetry]
5
5
  name = "groundx"
6
- version = "2.3.6"
6
+ version = "2.3.8"
7
7
  description = ""
8
8
  readme = "README.md"
9
9
  authors = []
@@ -14,10 +14,10 @@ class BaseClientWrapper:
14
14
 
15
15
  def get_headers(self) -> typing.Dict[str, str]:
16
16
  headers: typing.Dict[str, str] = {
17
- "User-Agent": "groundx/2.3.6",
17
+ "User-Agent": "groundx/2.3.8",
18
18
  "X-Fern-Language": "Python",
19
19
  "X-Fern-SDK-Name": "groundx",
20
- "X-Fern-SDK-Version": "2.3.6",
20
+ "X-Fern-SDK-Version": "2.3.8",
21
21
  }
22
22
  headers["X-API-Key"] = self.api_key
23
23
  return headers
@@ -40,6 +40,8 @@ class DocumentsClient:
40
40
  self,
41
41
  *,
42
42
  documents: typing.Sequence[IngestRemoteDocument],
43
+ callback_url: typing.Optional[str] = OMIT,
44
+ callback_data: typing.Optional[str] = OMIT,
43
45
  request_options: typing.Optional[RequestOptions] = None,
44
46
  ) -> IngestResponse:
45
47
  """
@@ -51,6 +53,12 @@ class DocumentsClient:
51
53
  ----------
52
54
  documents : typing.Sequence[IngestRemoteDocument]
53
55
 
56
+ callback_url : typing.Optional[str]
57
+ An endpoint that will receive processing event updates as POST.
58
+
59
+ callback_data : typing.Optional[str]
60
+ A string that is returned, along with processing event updates, to the callback URL.
61
+
54
62
  request_options : typing.Optional[RequestOptions]
55
63
  Request-specific configuration.
56
64
 
@@ -77,7 +85,9 @@ class DocumentsClient:
77
85
  ],
78
86
  )
79
87
  """
80
- _response = self._raw_client.ingest_remote(documents=documents, request_options=request_options)
88
+ _response = self._raw_client.ingest_remote(
89
+ documents=documents, callback_url=callback_url, callback_data=callback_data, request_options=request_options
90
+ )
81
91
  return _response.data
82
92
 
83
93
  def ingest_local(
@@ -124,7 +134,12 @@ class DocumentsClient:
124
134
  return _response.data
125
135
 
126
136
  def crawl_website(
127
- self, *, websites: typing.Sequence[WebsiteSource], request_options: typing.Optional[RequestOptions] = None
137
+ self,
138
+ *,
139
+ websites: typing.Sequence[WebsiteSource],
140
+ callback_url: typing.Optional[str] = OMIT,
141
+ callback_data: typing.Optional[str] = OMIT,
142
+ request_options: typing.Optional[RequestOptions] = None,
128
143
  ) -> IngestResponse:
129
144
  """
130
145
  Upload the content of a publicly accessible website for ingestion into a GroundX bucket. This is done by following links within a specified URL, recursively, up to a specified depth or number of pages.
@@ -138,6 +153,12 @@ class DocumentsClient:
138
153
  ----------
139
154
  websites : typing.Sequence[WebsiteSource]
140
155
 
156
+ callback_url : typing.Optional[str]
157
+ The URL that will receive processing event updates.
158
+
159
+ callback_data : typing.Optional[str]
160
+ A string that is returned, along with processing event updates, to the callback URL.
161
+
141
162
  request_options : typing.Optional[RequestOptions]
142
163
  Request-specific configuration.
143
164
 
@@ -165,7 +186,9 @@ class DocumentsClient:
165
186
  ],
166
187
  )
167
188
  """
168
- _response = self._raw_client.crawl_website(websites=websites, request_options=request_options)
189
+ _response = self._raw_client.crawl_website(
190
+ websites=websites, callback_url=callback_url, callback_data=callback_data, request_options=request_options
191
+ )
169
192
  return _response.data
170
193
 
171
194
  def list(
@@ -490,6 +513,8 @@ class AsyncDocumentsClient:
490
513
  self,
491
514
  *,
492
515
  documents: typing.Sequence[IngestRemoteDocument],
516
+ callback_url: typing.Optional[str] = OMIT,
517
+ callback_data: typing.Optional[str] = OMIT,
493
518
  request_options: typing.Optional[RequestOptions] = None,
494
519
  ) -> IngestResponse:
495
520
  """
@@ -501,6 +526,12 @@ class AsyncDocumentsClient:
501
526
  ----------
502
527
  documents : typing.Sequence[IngestRemoteDocument]
503
528
 
529
+ callback_url : typing.Optional[str]
530
+ An endpoint that will receive processing event updates as POST.
531
+
532
+ callback_data : typing.Optional[str]
533
+ A string that is returned, along with processing event updates, to the callback URL.
534
+
504
535
  request_options : typing.Optional[RequestOptions]
505
536
  Request-specific configuration.
506
537
 
@@ -535,7 +566,9 @@ class AsyncDocumentsClient:
535
566
 
536
567
  asyncio.run(main())
537
568
  """
538
- _response = await self._raw_client.ingest_remote(documents=documents, request_options=request_options)
569
+ _response = await self._raw_client.ingest_remote(
570
+ documents=documents, callback_url=callback_url, callback_data=callback_data, request_options=request_options
571
+ )
539
572
  return _response.data
540
573
 
541
574
  async def ingest_local(
@@ -594,7 +627,12 @@ class AsyncDocumentsClient:
594
627
  return _response.data
595
628
 
596
629
  async def crawl_website(
597
- self, *, websites: typing.Sequence[WebsiteSource], request_options: typing.Optional[RequestOptions] = None
630
+ self,
631
+ *,
632
+ websites: typing.Sequence[WebsiteSource],
633
+ callback_url: typing.Optional[str] = OMIT,
634
+ callback_data: typing.Optional[str] = OMIT,
635
+ request_options: typing.Optional[RequestOptions] = None,
598
636
  ) -> IngestResponse:
599
637
  """
600
638
  Upload the content of a publicly accessible website for ingestion into a GroundX bucket. This is done by following links within a specified URL, recursively, up to a specified depth or number of pages.
@@ -608,6 +646,12 @@ class AsyncDocumentsClient:
608
646
  ----------
609
647
  websites : typing.Sequence[WebsiteSource]
610
648
 
649
+ callback_url : typing.Optional[str]
650
+ The URL that will receive processing event updates.
651
+
652
+ callback_data : typing.Optional[str]
653
+ A string that is returned, along with processing event updates, to the callback URL.
654
+
611
655
  request_options : typing.Optional[RequestOptions]
612
656
  Request-specific configuration.
613
657
 
@@ -643,7 +687,9 @@ class AsyncDocumentsClient:
643
687
 
644
688
  asyncio.run(main())
645
689
  """
646
- _response = await self._raw_client.crawl_website(websites=websites, request_options=request_options)
690
+ _response = await self._raw_client.crawl_website(
691
+ websites=websites, callback_url=callback_url, callback_data=callback_data, request_options=request_options
692
+ )
647
693
  return _response.data
648
694
 
649
695
  async def list(
@@ -36,6 +36,8 @@ class RawDocumentsClient:
36
36
  self,
37
37
  *,
38
38
  documents: typing.Sequence[IngestRemoteDocument],
39
+ callback_url: typing.Optional[str] = OMIT,
40
+ callback_data: typing.Optional[str] = OMIT,
39
41
  request_options: typing.Optional[RequestOptions] = None,
40
42
  ) -> HttpResponse[IngestResponse]:
41
43
  """
@@ -47,6 +49,12 @@ class RawDocumentsClient:
47
49
  ----------
48
50
  documents : typing.Sequence[IngestRemoteDocument]
49
51
 
52
+ callback_url : typing.Optional[str]
53
+ An endpoint that will receive processing event updates as POST.
54
+
55
+ callback_data : typing.Optional[str]
56
+ A string that is returned, along with processing event updates, to the callback URL.
57
+
50
58
  request_options : typing.Optional[RequestOptions]
51
59
  Request-specific configuration.
52
60
 
@@ -62,6 +70,8 @@ class RawDocumentsClient:
62
70
  "documents": convert_and_respect_annotation_metadata(
63
71
  object_=documents, annotation=typing.Sequence[IngestRemoteDocument], direction="write"
64
72
  ),
73
+ "callbackUrl": callback_url,
74
+ "callbackData": callback_data,
65
75
  },
66
76
  headers={
67
77
  "content-type": "application/json",
@@ -176,7 +186,12 @@ class RawDocumentsClient:
176
186
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
177
187
 
178
188
  def crawl_website(
179
- self, *, websites: typing.Sequence[WebsiteSource], request_options: typing.Optional[RequestOptions] = None
189
+ self,
190
+ *,
191
+ websites: typing.Sequence[WebsiteSource],
192
+ callback_url: typing.Optional[str] = OMIT,
193
+ callback_data: typing.Optional[str] = OMIT,
194
+ request_options: typing.Optional[RequestOptions] = None,
180
195
  ) -> HttpResponse[IngestResponse]:
181
196
  """
182
197
  Upload the content of a publicly accessible website for ingestion into a GroundX bucket. This is done by following links within a specified URL, recursively, up to a specified depth or number of pages.
@@ -190,6 +205,12 @@ class RawDocumentsClient:
190
205
  ----------
191
206
  websites : typing.Sequence[WebsiteSource]
192
207
 
208
+ callback_url : typing.Optional[str]
209
+ The URL that will receive processing event updates.
210
+
211
+ callback_data : typing.Optional[str]
212
+ A string that is returned, along with processing event updates, to the callback URL.
213
+
193
214
  request_options : typing.Optional[RequestOptions]
194
215
  Request-specific configuration.
195
216
 
@@ -205,6 +226,8 @@ class RawDocumentsClient:
205
226
  "websites": convert_and_respect_annotation_metadata(
206
227
  object_=websites, annotation=typing.Sequence[WebsiteSource], direction="write"
207
228
  ),
229
+ "callbackUrl": callback_url,
230
+ "callbackData": callback_data,
208
231
  },
209
232
  headers={
210
233
  "content-type": "application/json",
@@ -724,6 +747,8 @@ class AsyncRawDocumentsClient:
724
747
  self,
725
748
  *,
726
749
  documents: typing.Sequence[IngestRemoteDocument],
750
+ callback_url: typing.Optional[str] = OMIT,
751
+ callback_data: typing.Optional[str] = OMIT,
727
752
  request_options: typing.Optional[RequestOptions] = None,
728
753
  ) -> AsyncHttpResponse[IngestResponse]:
729
754
  """
@@ -735,6 +760,12 @@ class AsyncRawDocumentsClient:
735
760
  ----------
736
761
  documents : typing.Sequence[IngestRemoteDocument]
737
762
 
763
+ callback_url : typing.Optional[str]
764
+ An endpoint that will receive processing event updates as POST.
765
+
766
+ callback_data : typing.Optional[str]
767
+ A string that is returned, along with processing event updates, to the callback URL.
768
+
738
769
  request_options : typing.Optional[RequestOptions]
739
770
  Request-specific configuration.
740
771
 
@@ -750,6 +781,8 @@ class AsyncRawDocumentsClient:
750
781
  "documents": convert_and_respect_annotation_metadata(
751
782
  object_=documents, annotation=typing.Sequence[IngestRemoteDocument], direction="write"
752
783
  ),
784
+ "callbackUrl": callback_url,
785
+ "callbackData": callback_data,
753
786
  },
754
787
  headers={
755
788
  "content-type": "application/json",
@@ -864,7 +897,12 @@ class AsyncRawDocumentsClient:
864
897
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
865
898
 
866
899
  async def crawl_website(
867
- self, *, websites: typing.Sequence[WebsiteSource], request_options: typing.Optional[RequestOptions] = None
900
+ self,
901
+ *,
902
+ websites: typing.Sequence[WebsiteSource],
903
+ callback_url: typing.Optional[str] = OMIT,
904
+ callback_data: typing.Optional[str] = OMIT,
905
+ request_options: typing.Optional[RequestOptions] = None,
868
906
  ) -> AsyncHttpResponse[IngestResponse]:
869
907
  """
870
908
  Upload the content of a publicly accessible website for ingestion into a GroundX bucket. This is done by following links within a specified URL, recursively, up to a specified depth or number of pages.
@@ -878,6 +916,12 @@ class AsyncRawDocumentsClient:
878
916
  ----------
879
917
  websites : typing.Sequence[WebsiteSource]
880
918
 
919
+ callback_url : typing.Optional[str]
920
+ The URL that will receive processing event updates.
921
+
922
+ callback_data : typing.Optional[str]
923
+ A string that is returned, along with processing event updates, to the callback URL.
924
+
881
925
  request_options : typing.Optional[RequestOptions]
882
926
  Request-specific configuration.
883
927
 
@@ -893,6 +937,8 @@ class AsyncRawDocumentsClient:
893
937
  "websites": convert_and_respect_annotation_metadata(
894
938
  object_=websites, annotation=typing.Sequence[WebsiteSource], direction="write"
895
939
  ),
940
+ "callbackUrl": callback_url,
941
+ "callbackData": callback_data,
896
942
  },
897
943
  headers={
898
944
  "content-type": "application/json",
@@ -111,6 +111,7 @@ def prep_documents(
111
111
  bucket_id=document.bucket_id,
112
112
  file_name=document.file_name,
113
113
  file_type=document.file_type,
114
+ filter=document.filter,
114
115
  process_level=document.process_level,
115
116
  search_data=document.search_data,
116
117
  source_url=document.file_path,
@@ -431,6 +432,7 @@ class GroundX(GroundXBase):
431
432
  bucket_id=d.bucket_id,
432
433
  file_name=fn,
433
434
  file_type=ft,
435
+ filter=d.filter,
434
436
  process_level=d.process_level,
435
437
  search_data=d.search_data,
436
438
  source_url=url,
@@ -610,6 +612,7 @@ class AsyncGroundX(AsyncGroundXBase):
610
612
  bucket_id=d.bucket_id,
611
613
  file_name=fn,
612
614
  file_type=ft,
615
+ filter=d.filter,
613
616
  process_level=d.process_level,
614
617
  search_data=d.search_data,
615
618
  source_url=url,
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes