unstructured-ingest 1.0.35__py3-none-any.whl → 1.0.40__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of unstructured-ingest might be problematic. Click here for more details.

@@ -1 +1 @@
1
- __version__ = "1.0.35" # pragma: no cover
1
+ __version__ = "1.0.40" # pragma: no cover
@@ -33,6 +33,8 @@ from unstructured_ingest.utils.string_and_date_utils import fix_unescaped_unicod
33
33
 
34
34
  if TYPE_CHECKING:
35
35
  from atlassian import Confluence
36
+ from bs4 import BeautifulSoup
37
+ from bs4.element import Tag
36
38
 
37
39
  CONNECTOR_TYPE = "confluence"
38
40
 
@@ -235,11 +237,28 @@ class ConfluenceIndexer(Indexer):
235
237
  yield file_data
236
238
 
237
239
 
238
- class ConfluenceDownloaderConfig(DownloaderConfig, HtmlMixin):
240
+ class ConfluenceDownloaderConfig(HtmlMixin, DownloaderConfig):
239
241
  max_num_metadata_permissions: int = Field(
240
242
  250, description="Approximate maximum number of permissions included in metadata"
241
243
  )
242
244
 
245
+ @requires_dependencies(["bs4"])
246
+ def _find_hyperlink_tags(self, html_soup: "BeautifulSoup") -> list["Tag"]:
247
+ from bs4.element import Tag
248
+
249
+ return [
250
+ element
251
+ for element in html_soup.find_all(
252
+ "a",
253
+ attrs={
254
+ "class": "confluence-embedded-file",
255
+ "data-linked-resource-type": "attachment",
256
+ "href": True,
257
+ },
258
+ )
259
+ if isinstance(element, Tag)
260
+ ]
261
+
243
262
 
244
263
  @dataclass
245
264
  class ConfluenceDownloader(Downloader):
@@ -343,10 +343,9 @@ class FsspecUploader(Uploader):
343
343
  raise self.wrap_error(e=e)
344
344
 
345
345
  def get_upload_path(self, file_data: FileData) -> Path:
346
- upload_path = (
347
- Path(self.upload_config.path_without_protocol)
348
- / file_data.source_identifiers.relative_path.lstrip("/")
349
- )
346
+ upload_path = Path(
347
+ self.upload_config.path_without_protocol
348
+ ) / file_data.source_identifiers.relative_path.lstrip("/")
350
349
  updated_upload_path = upload_path.parent / f"{upload_path.name}.json"
351
350
  return updated_upload_path
352
351
 
@@ -1,7 +1,7 @@
1
1
  import json
2
2
  from contextlib import contextmanager
3
3
  from dataclasses import dataclass, field
4
- from typing import TYPE_CHECKING, Any, Generator, Optional, Union
4
+ from typing import TYPE_CHECKING, Any, Generator, Optional
5
5
 
6
6
  from dateutil import parser
7
7
  from pydantic import Field, Secret
@@ -97,10 +97,16 @@ class MilvusUploadStager(UploadStager):
97
97
 
98
98
  def conform_dict(self, element_dict: dict, file_data: FileData) -> dict:
99
99
  working_data = element_dict.copy()
100
- if self.upload_stager_config.flatten_metadata and (
101
- metadata := working_data.pop("metadata", None)
102
- ):
103
- working_data.update(flatten_dict(metadata, keys_to_omit=["data_source_record_locator"]))
100
+
101
+ if self.upload_stager_config.flatten_metadata:
102
+ metadata: dict[str, Any] = working_data.pop("metadata", {})
103
+ flattened_metadata = flatten_dict(
104
+ metadata,
105
+ separator="_",
106
+ flatten_lists=False,
107
+ remove_none=True,
108
+ )
109
+ working_data.update(flattened_metadata)
104
110
 
105
111
  # TODO: milvus sdk doesn't seem to support defaults via the schema yet,
106
112
  # remove once that gets updated
@@ -154,6 +160,23 @@ class MilvusUploader(Uploader):
154
160
  upload_config: MilvusUploaderConfig
155
161
  connector_type: str = CONNECTOR_TYPE
156
162
 
163
+ def has_dynamic_fields_enabled(self) -> bool:
164
+ """Check if the target collection has dynamic fields enabled."""
165
+ try:
166
+ with self.get_client() as client:
167
+ collection_info = client.describe_collection(self.upload_config.collection_name)
168
+
169
+ # Check if dynamic field is enabled
170
+ # The schema info should contain enable_dynamic_field or enableDynamicField
171
+ schema_info = collection_info.get(
172
+ "enable_dynamic_field",
173
+ collection_info.get("enableDynamicField", False),
174
+ )
175
+ return bool(schema_info)
176
+ except Exception as e:
177
+ logger.warning(f"Could not determine if collection has dynamic fields enabled: {e}")
178
+ return False
179
+
157
180
  @DestinationConnectionError.wrap
158
181
  def precheck(self):
159
182
  from pymilvus import MilvusException
@@ -164,6 +187,7 @@ class MilvusUploader(Uploader):
164
187
  raise DestinationConnectionError(
165
188
  f"Collection '{self.upload_config.collection_name}' does not exist"
166
189
  )
190
+
167
191
  except MilvusException as milvus_exception:
168
192
  raise DestinationConnectionError(
169
193
  f"failed to precheck Milvus: {str(milvus_exception.message)}"
@@ -193,16 +217,66 @@ class MilvusUploader(Uploader):
193
217
  )
194
218
 
195
219
  @requires_dependencies(["pymilvus"], extras="milvus")
196
- def insert_results(self, data: Union[dict, list[dict]]):
220
+ def _prepare_data_for_insert(self, data: list[dict]) -> list[dict]:
221
+ """
222
+ Conforms the provided data to the schema of the target Milvus collection.
223
+ - If dynamic fields are enabled, it ensures JSON-stringified fields are decoded.
224
+ - If dynamic fields are disabled, it filters out any fields not present in the schema.
225
+ """
226
+
227
+ dynamic_fields_enabled = self.has_dynamic_fields_enabled()
228
+
229
+ # If dynamic fields are enabled, 'languages' field needs to be a list
230
+ if dynamic_fields_enabled:
231
+ logger.debug("Dynamic fields enabled, ensuring 'languages' field is a list.")
232
+ prepared_data = []
233
+ for item in data:
234
+ new_item = item.copy()
235
+ if "languages" in new_item and isinstance(new_item["languages"], str):
236
+ try:
237
+ new_item["languages"] = json.loads(new_item["languages"])
238
+ except (json.JSONDecodeError, TypeError):
239
+ logger.warning(
240
+ f"Could not JSON decode languages field: {new_item['languages']}. "
241
+ "Leaving as string.",
242
+ )
243
+ prepared_data.append(new_item)
244
+ return prepared_data
245
+
246
+ # If dynamic fields are not enabled, we need to filter out the metadata fields
247
+ # to avoid insertion errors for fields not defined in the schema
248
+ with self.get_client() as client:
249
+ collection_info = client.describe_collection(
250
+ self.upload_config.collection_name,
251
+ )
252
+ schema_fields = {
253
+ field["name"]
254
+ for field in collection_info.get("fields", [])
255
+ if not field.get("auto_id", False)
256
+ }
257
+ # Remove metadata fields that are not part of the base schema
258
+ filtered_data = []
259
+ for item in data:
260
+ filtered_item = {key: value for key, value in item.items() if key in schema_fields}
261
+ filtered_data.append(filtered_item)
262
+ return filtered_data
263
+
264
+ @requires_dependencies(["pymilvus"], extras="milvus")
265
+ def insert_results(self, data: list[dict]):
197
266
  from pymilvus import MilvusException
198
267
 
199
268
  logger.info(
200
269
  f"uploading {len(data)} entries to {self.connection_config.db_name} "
201
270
  f"db in collection {self.upload_config.collection_name}"
202
271
  )
272
+
273
+ prepared_data = self._prepare_data_for_insert(data=data)
274
+
203
275
  with self.get_client() as client:
204
276
  try:
205
- res = client.insert(collection_name=self.upload_config.collection_name, data=data)
277
+ res = client.insert(
278
+ collection_name=self.upload_config.collection_name, data=prepared_data
279
+ )
206
280
  except MilvusException as milvus_exception:
207
281
  raise WriteError(
208
282
  f"failed to upload records to Milvus: {str(milvus_exception.message)}"
@@ -18,6 +18,12 @@ class OriginalSyncedBlock(BlockBase):
18
18
 
19
19
  @classmethod
20
20
  def from_dict(cls, data: dict):
21
+ """Create OriginalSyncedBlock from dictionary data.
22
+
23
+ Original blocks contain children content.
24
+ """
25
+ if "children" not in data:
26
+ raise ValueError(f"OriginalSyncedBlock data missing 'children': {data}")
21
27
  return cls(children=data["children"])
22
28
 
23
29
  def get_html(self) -> Optional[HtmlTag]:
@@ -31,27 +37,73 @@ class DuplicateSyncedBlock(BlockBase):
31
37
 
32
38
  @staticmethod
33
39
  def can_have_children() -> bool:
40
+ """Check if duplicate synced blocks can have children.
41
+
42
+ Duplicate blocks themselves don't have children directly fetched here,
43
+ but they represent content that does, so Notion API might report has_children=True
44
+ on the parent block object. The actual children are fetched from the original block.
45
+ """
34
46
  return True
35
47
 
36
48
  @classmethod
37
49
  def from_dict(cls, data: dict):
38
- return cls(**data)
50
+ """Create DuplicateSyncedBlock from dictionary data.
51
+
52
+ Duplicate blocks contain a 'synced_from' reference.
53
+ """
54
+ synced_from_data = data.get("synced_from")
55
+ if not synced_from_data or not isinstance(synced_from_data, dict):
56
+ raise ValueError(f"Invalid data structure for DuplicateSyncedBlock: {data}")
57
+ # Ensure required keys are present in the nested dictionary
58
+ if "type" not in synced_from_data or "block_id" not in synced_from_data:
59
+ raise ValueError(
60
+ f"Missing 'type' or 'block_id' in synced_from data: {synced_from_data}"
61
+ )
62
+ return cls(type=synced_from_data["type"], block_id=synced_from_data["block_id"])
39
63
 
40
64
  def get_html(self) -> Optional[HtmlTag]:
65
+ """Get HTML representation of the duplicate synced block.
66
+
67
+ HTML representation might need fetching the original block's content,
68
+ which is outside the scope of this simple data class.
69
+ """
41
70
  return None
42
71
 
43
72
 
44
73
  class SyncBlock(BlockBase):
45
74
  @staticmethod
46
75
  def can_have_children() -> bool:
76
+ """Check if synced blocks can have children.
77
+
78
+ Synced blocks (both original and duplicate) can conceptually have children.
79
+ """
47
80
  return True
48
81
 
49
82
  @classmethod
50
83
  def from_dict(cls, data: dict):
51
- if "synced_from" in data:
84
+ """Create appropriate SyncedBlock subclass from dictionary data.
85
+
86
+ Determine if it's a duplicate (has 'synced_from') or original (has 'children').
87
+ """
88
+ if data.get("synced_from") is not None:
89
+ # It's a duplicate block containing a reference
90
+ return DuplicateSyncedBlock.from_dict(data)
91
+ elif "children" in data:
92
+ # It's an original block containing children
52
93
  return OriginalSyncedBlock.from_dict(data)
53
94
  else:
54
- return DuplicateSyncedBlock.from_dict(data)
95
+ # Handle cases where neither 'synced_from' nor 'children' are present.
96
+ # Notion API might return this for an empty original synced block.
97
+ # Let's treat it as an empty OriginalSyncedBlock.
98
+ # If this assumption is wrong, errors might occur later.
99
+ # Consider logging a warning here if strictness is needed.
100
+ return OriginalSyncedBlock(children=[])
55
101
 
56
102
  def get_html(self) -> Optional[HtmlTag]:
103
+ """Get HTML representation of the synced block.
104
+
105
+ The specific instance returned by from_dict (Original or Duplicate)
106
+ will handle its own get_html logic.
107
+ This method on the base SyncBlock might not be directly called.
108
+ """
57
109
  return None
@@ -240,7 +240,7 @@ class PineconeUploader(VectorDBUploader):
240
240
  destination_name: str = "unstructuredautocreated",
241
241
  destination_type: Literal["pod", "serverless"] = "serverless",
242
242
  serverless_cloud: str = "aws",
243
- serverless_region: str = "us-west-2",
243
+ serverless_region: str = "us-east-1",
244
244
  pod_environment: str = "us-east1-gcp",
245
245
  pod_type: str = "p1.x1",
246
246
  pod_count: int = 1,
@@ -29,6 +29,7 @@ from unstructured_ingest.utils.dep_check import requires_dependencies
29
29
 
30
30
  if TYPE_CHECKING:
31
31
  from office365.onedrive.driveitems.driveItem import DriveItem
32
+ from office365.onedrive.sites.site import Site
32
33
 
33
34
  CONNECTOR_TYPE = "sharepoint"
34
35
  LEGACY_DEFAULT_PATH = "Shared Documents"
@@ -51,6 +52,33 @@ class SharepointConnectionConfig(OnedriveConnectionConfig):
51
52
  https://[tenant]-admin.sharepoint.com.\
52
53
  This requires the app to be registered at a tenant level"
53
54
  )
55
+ library: Optional[str] = Field(
56
+ default=None,
57
+ description="Sharepoint library name. If not provided, the default \
58
+ drive will be used.",
59
+ )
60
+
61
+ def _get_drive_item(self, client_site: Site) -> DriveItem:
62
+ """Helper method to get the drive item for the specified library or default drive."""
63
+ site_drive_item = None
64
+ if self.library:
65
+ for drive in client_site.drives.get().execute_query():
66
+ if drive.name == self.library:
67
+ logger.info(f"Found the requested library: {self.library}")
68
+ site_drive_item = drive.get().execute_query().root
69
+ break
70
+
71
+ # If no specific library was found or requested, use the default drive
72
+ if not site_drive_item:
73
+ if self.library:
74
+ logger.warning(
75
+ f"Library '{self.library}' not found in site '{self.site}'. "
76
+ "Using the default drive instead."
77
+ )
78
+
79
+ site_drive_item = client_site.drive.get().execute_query().root
80
+
81
+ return site_drive_item
54
82
 
55
83
 
56
84
  class SharepointIndexerConfig(OnedriveIndexerConfig):
@@ -76,8 +104,8 @@ class SharepointIndexer(OnedriveIndexer):
76
104
 
77
105
  client = await asyncio.to_thread(self.connection_config.get_client)
78
106
  try:
79
- site = client.sites.get_by_url(self.connection_config.site).get().execute_query()
80
- site_drive_item = site.drive.get().execute_query().root
107
+ client_site = client.sites.get_by_url(self.connection_config.site).get().execute_query()
108
+ site_drive_item = self.connection_config._get_drive_item(client_site)
81
109
  except ClientRequestException:
82
110
  logger.info("Site not found")
83
111
 
@@ -118,8 +146,8 @@ class SharepointDownloader(OnedriveDownloader):
118
146
  client = self.connection_config.get_client()
119
147
 
120
148
  try:
121
- site = client.sites.get_by_url(self.connection_config.site).get().execute_query()
122
- site_drive_item = site.drive.get().execute_query().root
149
+ client_site = client.sites.get_by_url(self.connection_config.site).get().execute_query()
150
+ site_drive_item = self.connection_config._get_drive_item(client_site)
123
151
  except ClientRequestException:
124
152
  logger.info("Site not found")
125
153
  file = site_drive_item.get_by_path(server_relative_path).get().execute_query()
@@ -12,6 +12,7 @@ from unstructured_ingest.logger import logger
12
12
  from unstructured_ingest.utils.dep_check import requires_dependencies
13
13
 
14
14
  if TYPE_CHECKING:
15
+ from bs4 import BeautifulSoup
15
16
  from bs4.element import Tag
16
17
  from requests import Session
17
18
 
@@ -96,7 +97,7 @@ class HtmlMixin(BaseModel):
96
97
  from bs4 import BeautifulSoup
97
98
 
98
99
  soup = BeautifulSoup(html, "html.parser")
99
- tags = soup.find_all("a", href=True)
100
+ tags = self._find_hyperlink_tags(soup)
100
101
  hrefs = [
101
102
  tag["href"]
102
103
  for tag in tags
@@ -158,3 +159,15 @@ class HtmlMixin(BaseModel):
158
159
  )
159
160
  for url_to_download in urls_to_download
160
161
  ]
162
+
163
+ @requires_dependencies(["bs4"])
164
+ def _find_hyperlink_tags(self, html_soup: "BeautifulSoup") -> list["Tag"]:
165
+ """Find hyperlink tags in the HTML.
166
+
167
+ Overwrite this method to customize the tag search.
168
+ """
169
+ from bs4.element import Tag
170
+
171
+ return [
172
+ element for element in html_soup.find_all("a", href=True) if isinstance(element, Tag)
173
+ ]
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: unstructured_ingest
3
- Version: 1.0.35
3
+ Version: 1.0.40
4
4
  Summary: Local ETL data pipeline to get data RAG ready
5
5
  Author-email: Unstructured Technologies <devops@unstructuredai.io>
6
6
  License-Expression: Apache-2.0
@@ -1,5 +1,5 @@
1
1
  unstructured_ingest/__init__.py,sha256=U4S_2y3zgLZVfMenHRaJFBW8yqh2mUBuI291LGQVOJ8,35
2
- unstructured_ingest/__version__.py,sha256=b3qTfBCIVt9b6BKAiKwjS-S1JYzG3JrNJz133p_CIH8,43
2
+ unstructured_ingest/__version__.py,sha256=Nh8AH5kdHKmvlr1cLANF544IyKeypbci9JhgVAy8F10,43
3
3
  unstructured_ingest/error.py,sha256=qDncnJgbf5ils956RcO2CGlAKYDT5OaEM9Clv1JVTNc,1448
4
4
  unstructured_ingest/errors_v2.py,sha256=9RuRCi7lbDxCguDz07y5RiHoQiFIOWwOD7xqzJ2B3Yw,436
5
5
  unstructured_ingest/logger.py,sha256=7e_7UeK6hVOd5BQ6i9NzRUAPCS_DF839Y8TjUDywraY,1428
@@ -66,7 +66,7 @@ unstructured_ingest/processes/connectors/airtable.py,sha256=smx5qBSUKwM8V6Xcc7ik
66
66
  unstructured_ingest/processes/connectors/astradb.py,sha256=Ob9wQgDxa6BXDPZBOqooNKQgvjIZcMwIe4fW3VlI7h8,18929
67
67
  unstructured_ingest/processes/connectors/azure_ai_search.py,sha256=szhSRXzUHk0DE2hGFfjGc_jNFzlUwiRlCtIkuu7tmnk,11524
68
68
  unstructured_ingest/processes/connectors/chroma.py,sha256=q5_Fu4xb6_W_NyrPxVa3-jVwZLqVdlBNlR4dFvbd7l0,7235
69
- unstructured_ingest/processes/connectors/confluence.py,sha256=C62LVwZYk7H8RfiPb0mbxig2osW5u7KvHIlz4qOJU-0,21954
69
+ unstructured_ingest/processes/connectors/confluence.py,sha256=SPPZzpNXbS5xyha9fxofFNBCB1irRMbogp8po9S1Z7k,22539
70
70
  unstructured_ingest/processes/connectors/couchbase.py,sha256=KCHoYDNya9B05NIB5D78zXoizFyfpJRepcYBe1nLSOs,12298
71
71
  unstructured_ingest/processes/connectors/delta_table.py,sha256=2DFox_Vzoopt_D3Jy3rCjrrTGMutG2INIrwCeoIohRY,7340
72
72
  unstructured_ingest/processes/connectors/discord.py,sha256=6yEJ_agfKUqsV43wFsbMkcd8lcLJC0uqbo4izjdZ3rU,5294
@@ -76,15 +76,15 @@ unstructured_ingest/processes/connectors/google_drive.py,sha256=jQb4_rKL_tJg7s7m
76
76
  unstructured_ingest/processes/connectors/jira.py,sha256=a7OuVi4RFfr22Tqgk60lwmtWTRBw2fI1m8KPqfA8Ffo,18504
77
77
  unstructured_ingest/processes/connectors/kdbai.py,sha256=XhxYpKSAoFPBsDQWwNuLX03DCxOVr7yquj9VYM55Rtc,5174
78
78
  unstructured_ingest/processes/connectors/local.py,sha256=LluTLKv4g7FbJb4A6vuSxI9VhzKZuuQUpDS-cVNAQ2g,7426
79
- unstructured_ingest/processes/connectors/milvus.py,sha256=Jr9cul7By03tGAPFnFBoqncnNWwbhKd-qbmkuqnin8U,8908
79
+ unstructured_ingest/processes/connectors/milvus.py,sha256=L-PM5osheNyNsLGYZmiF3rRmeulp7Ejk92JCoaQ_F9Y,12075
80
80
  unstructured_ingest/processes/connectors/mongodb.py,sha256=1g_5bfbS6lah3nsOXqLAanR3zNYJ47_Njw_uV-uj3_U,14324
81
81
  unstructured_ingest/processes/connectors/neo4j.py,sha256=ztxvI9KY8RF5kYUuMGSzzN5mz7Fu_4Ai9P7dqCpJLc0,20267
82
82
  unstructured_ingest/processes/connectors/onedrive.py,sha256=JIADpc31PI9Yzr0raF6bSqzes2jhfcniUzew1aKVWeI,19305
83
83
  unstructured_ingest/processes/connectors/outlook.py,sha256=zHM5frO7CqQG0-KcTyX49aZeSlsvVrl8kh_lR_ESgQw,9275
84
- unstructured_ingest/processes/connectors/pinecone.py,sha256=pSREUNsQqel6q1EFZsFWelg-uZgGubQY5m_6nVnBFKs,15090
84
+ unstructured_ingest/processes/connectors/pinecone.py,sha256=jCabAqKQyBFzaGjphxLMr57y7P0Z15Jd9Jj-JM40YnU,15090
85
85
  unstructured_ingest/processes/connectors/redisdb.py,sha256=rTihbfv0Mlk1eo5Izn-JXRu5Ad5C-KD58nSqeKsaZJ8,8024
86
86
  unstructured_ingest/processes/connectors/salesforce.py,sha256=OaKEWCqZrirHqFJ650K5jSPwYlWefPOapas8Y-4D9oc,11661
87
- unstructured_ingest/processes/connectors/sharepoint.py,sha256=jI-erp4YUfHxPeUTcfHSPEG3w0wjSBYfAnMg1WT6lfw,4996
87
+ unstructured_ingest/processes/connectors/sharepoint.py,sha256=vIfLIactYXcdetccHvKlYOay6NOzGj2X0CkXbY0KuRo,6213
88
88
  unstructured_ingest/processes/connectors/slack.py,sha256=EkFj9PcAu5_gF2xLogikKDADLbJYq-_jvchzYrTdLO4,9224
89
89
  unstructured_ingest/processes/connectors/utils.py,sha256=TAd0hb1f291N-q7-TUe6JKSCGkhqDyo7Ij8zmliBZUc,2071
90
90
  unstructured_ingest/processes/connectors/vectara.py,sha256=xrC6jkgW8BII4UjdzUelDu122xT484cpfMTK2wl-sko,12292
@@ -109,7 +109,7 @@ unstructured_ingest/processes/connectors/fsspec/__init__.py,sha256=3HTdw4L4mdN4W
109
109
  unstructured_ingest/processes/connectors/fsspec/azure.py,sha256=31VNiG5YnXfhrFX7QJ2O1ubeWHxbe1sYVIztefbscAQ,7148
110
110
  unstructured_ingest/processes/connectors/fsspec/box.py,sha256=1gLS7xR2vbjgKBrQ4ZpI1fKTsJuIDfXuAzx_a4FzxG4,5873
111
111
  unstructured_ingest/processes/connectors/fsspec/dropbox.py,sha256=HwwKjQmjM7yFk9Esh_F20xDisRPXGUkFduzaasByRDE,8355
112
- unstructured_ingest/processes/connectors/fsspec/fsspec.py,sha256=d_ig69_tuSWczwPxzZue1xTYMYqYqUe-dg1jMEjC8M0,14481
112
+ unstructured_ingest/processes/connectors/fsspec/fsspec.py,sha256=NbId5WMq6M5kF3fYAwSUuaL2e_gutgmTATrE_X8okGY,14467
113
113
  unstructured_ingest/processes/connectors/fsspec/gcs.py,sha256=ouxISCKpZTAj3T6pWGYbASu93wytJjl5WSICvQcrgfE,7172
114
114
  unstructured_ingest/processes/connectors/fsspec/s3.py,sha256=2ZV6b2E2pIsf_ab1Lty74FwpMnJZhpQUdamPgpwcKsQ,7141
115
115
  unstructured_ingest/processes/connectors/fsspec/sftp.py,sha256=pR_a2SgLjt8ffNkariHrPB1E0HVSTj5h3pt7KxTU3TI,6371
@@ -166,7 +166,7 @@ unstructured_ingest/processes/connectors/notion/types/blocks/numbered_list.py,sh
166
166
  unstructured_ingest/processes/connectors/notion/types/blocks/paragraph.py,sha256=qvc4orjP2XcbaeBWor-a3xAEglLkyb-epknm7SXgU1E,992
167
167
  unstructured_ingest/processes/connectors/notion/types/blocks/pdf.py,sha256=St43RmpefAzDwJKTwz2CdGVm-xeUwHkYgtQtLYQbnw0,1661
168
168
  unstructured_ingest/processes/connectors/notion/types/blocks/quote.py,sha256=yl7npmdcO6oFNgTNGVN_Ihvzexv12Xwg1r4NWAOjILQ,1176
169
- unstructured_ingest/processes/connectors/notion/types/blocks/synced_block.py,sha256=Rc3xyKtnOwovx-O-dzmS9pX0h4-s41YnWmmEz5TYxdU,1333
169
+ unstructured_ingest/processes/connectors/notion/types/blocks/synced_block.py,sha256=aSfFxJKYx1qylOJHwiS_ZAu5pQ-YQZqJM20KGHUvx48,3991
170
170
  unstructured_ingest/processes/connectors/notion/types/blocks/table.py,sha256=eYUlRp4uCwjy_eB0mLh7MGMe1qrr_hnOxXS5RfUM2DQ,1724
171
171
  unstructured_ingest/processes/connectors/notion/types/blocks/table_of_contents.py,sha256=bR5DdecXFz468okM5WOs10DK8_14Dj7OCLSRusMZzsk,534
172
172
  unstructured_ingest/processes/connectors/notion/types/blocks/template.py,sha256=bq2Vh2X7ptpofs9OZnATHySZe2DzbOLsNNfpEI70NgM,968
@@ -226,13 +226,13 @@ unstructured_ingest/utils/compression.py,sha256=_BkFREoa0fkJ6z-1lY76HCmy8mLymbPC
226
226
  unstructured_ingest/utils/constants.py,sha256=pDspTYz-nEojHBqrZNfssGEiujmVa02pIWL63PQP9sU,103
227
227
  unstructured_ingest/utils/data_prep.py,sha256=yqrv7x_nlj0y3uaN0m0Bnsekb7VIQnwABWPa24KU5QI,7426
228
228
  unstructured_ingest/utils/dep_check.py,sha256=SXXcUna2H0RtxA6j1S2NGkvQa9JP2DujWhmyBa7776Y,2400
229
- unstructured_ingest/utils/html.py,sha256=0WduP8tI5S3nHFQi6XHNPHgsIC9j3iWwyIayX9gDLiE,6386
229
+ unstructured_ingest/utils/html.py,sha256=78ou1vVZ0SJ3c6-Nmxg2iR5MoqubJTvwiuTNMtSFDh4,6816
230
230
  unstructured_ingest/utils/ndjson.py,sha256=nz8VUOPEgAFdhaDOpuveknvCU4x82fVwqE01qAbElH0,1201
231
231
  unstructured_ingest/utils/pydantic_models.py,sha256=BT_j15e4rX40wQbt8LUXbqfPhA3rJn1PHTI_G_A_EHY,1720
232
232
  unstructured_ingest/utils/string_and_date_utils.py,sha256=oXOI6rxXq-8ncbk7EoJK0WCcTXWj75EzKl8pfQMID3U,2522
233
233
  unstructured_ingest/utils/table.py,sha256=WZechczgVFvlodUWFcsnCGvBNh1xRm6hr0VbJTPxKAc,3669
234
- unstructured_ingest-1.0.35.dist-info/METADATA,sha256=rpeaT-RpY6IFgDR7tIIxylJL1t-geRuhk3QKQq6JSDY,8747
235
- unstructured_ingest-1.0.35.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
236
- unstructured_ingest-1.0.35.dist-info/entry_points.txt,sha256=gUAAFnjFPnBgThJSEbw0N5ZjxtaKlT1s9e05_arQrNw,70
237
- unstructured_ingest-1.0.35.dist-info/licenses/LICENSE.md,sha256=SxkKP_62uIAKb9mb1eH7FH4Kn2aYT09fgjKpJt5PyTk,11360
238
- unstructured_ingest-1.0.35.dist-info/RECORD,,
234
+ unstructured_ingest-1.0.40.dist-info/METADATA,sha256=URvUgQtqnRmftrPAeq9QAAWgvskHVwUwnPE0m07iE7M,8747
235
+ unstructured_ingest-1.0.40.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
236
+ unstructured_ingest-1.0.40.dist-info/entry_points.txt,sha256=gUAAFnjFPnBgThJSEbw0N5ZjxtaKlT1s9e05_arQrNw,70
237
+ unstructured_ingest-1.0.40.dist-info/licenses/LICENSE.md,sha256=SxkKP_62uIAKb9mb1eH7FH4Kn2aYT09fgjKpJt5PyTk,11360
238
+ unstructured_ingest-1.0.40.dist-info/RECORD,,