unstructured-ingest 1.2.13__py3-none-any.whl → 1.2.18__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of unstructured-ingest might be problematic. Click here for more details.

@@ -1 +1 @@
1
- __version__ = "1.2.13" # pragma: no cover
1
+ __version__ = "1.2.18" # pragma: no cover
@@ -27,12 +27,24 @@ if TYPE_CHECKING:
27
27
  from botocore.client import BaseClient
28
28
 
29
29
  class BedrockRuntimeClient(BaseClient):
30
- def invoke_model(self, body: str, modelId: str, accept: str, contentType: str) -> dict:
30
+ def invoke_model(
31
+ self,
32
+ body: str,
33
+ modelId: str,
34
+ accept: str,
35
+ contentType: str,
36
+ inferenceProfileId: str = None,
37
+ ) -> dict:
31
38
  pass
32
39
 
33
40
  class AsyncBedrockRuntimeClient(BaseClient):
34
41
  async def invoke_model(
35
- self, body: str, modelId: str, accept: str, contentType: str
42
+ self,
43
+ body: str,
44
+ modelId: str,
45
+ accept: str,
46
+ contentType: str,
47
+ inferenceProfileId: str = None,
36
48
  ) -> dict:
37
49
  pass
38
50
 
@@ -63,10 +75,10 @@ class BedrockEmbeddingConfig(EmbeddingConfig):
63
75
  description="aws secret access key", default=None
64
76
  )
65
77
  region_name: str = Field(
66
- description="aws region name",
78
+ description="aws region name",
67
79
  default_factory=lambda: (
68
- os.getenv("BEDROCK_REGION_NAME") or
69
- os.getenv("AWS_DEFAULT_REGION") or
80
+ os.getenv("BEDROCK_REGION_NAME") or
81
+ os.getenv("AWS_DEFAULT_REGION") or
70
82
  "us-west-2"
71
83
  )
72
84
  )
@@ -79,6 +91,10 @@ class BedrockEmbeddingConfig(EmbeddingConfig):
79
91
  alias="model_name",
80
92
  description="AWS Bedrock model name",
81
93
  )
94
+ inference_profile_id: str | None = Field(
95
+ description="AWS Bedrock inference profile ID",
96
+ default_factory=lambda: os.getenv("BEDROCK_INFERENCE_PROFILE_ID"),
97
+ )
82
98
 
83
99
  def wrap_error(self, e: Exception) -> Exception:
84
100
  if is_internal_error(e=e):
@@ -218,12 +234,18 @@ class BedrockEmbeddingEncoder(BaseEmbeddingEncoder):
218
234
  bedrock_client = self.config.get_client()
219
235
  # invoke bedrock API
220
236
  try:
221
- response = bedrock_client.invoke_model(
222
- body=json.dumps(body),
223
- modelId=self.config.embedder_model_name,
224
- accept="application/json",
225
- contentType="application/json",
226
- )
237
+ invoke_params = {
238
+ "body": json.dumps(body),
239
+ "modelId": self.config.embedder_model_name,
240
+ "accept": "application/json",
241
+ "contentType": "application/json",
242
+ }
243
+
244
+ # Add inference profile if configured
245
+ if self.config.inference_profile_id:
246
+ invoke_params["inferenceProfileId"] = self.config.inference_profile_id
247
+
248
+ response = bedrock_client.invoke_model(**invoke_params)
227
249
  except Exception as e:
228
250
  raise self.wrap_error(e=e)
229
251
 
@@ -264,12 +286,18 @@ class AsyncBedrockEmbeddingEncoder(AsyncBaseEmbeddingEncoder):
264
286
  async with self.config.get_async_client() as bedrock_client:
265
287
  # invoke bedrock API
266
288
  try:
267
- response = await bedrock_client.invoke_model(
268
- body=json.dumps(body),
269
- modelId=self.config.embedder_model_name,
270
- accept="application/json",
271
- contentType="application/json",
272
- )
289
+ invoke_params = {
290
+ "body": json.dumps(body),
291
+ "modelId": self.config.embedder_model_name,
292
+ "accept": "application/json",
293
+ "contentType": "application/json",
294
+ }
295
+
296
+ # Add inference profile if configured
297
+ if self.config.inference_profile_id:
298
+ invoke_params["inferenceProfileId"] = self.config.inference_profile_id
299
+
300
+ response = await bedrock_client.invoke_model(**invoke_params)
273
301
  except Exception as e:
274
302
  raise self.wrap_error(e=e)
275
303
  async with response.get("body") as client_response:
@@ -76,7 +76,9 @@ class DatabricksVolumesConnectionConfig(ConnectionConfig, ABC):
76
76
  if isinstance(e, ValueError):
77
77
  error_message = e.args[0]
78
78
  message_split = error_message.split(":")
79
- if message_split[0].endswith("auth"):
79
+ if (message_split[0].endswith("auth")) or (
80
+ "Client authentication failed" in error_message
81
+ ):
80
82
  return UserAuthError(e)
81
83
  if isinstance(e, DatabricksError):
82
84
  reverse_mapping = {v: k for k, v in STATUS_CODE_MAPPING.items()}
@@ -189,6 +189,9 @@ class GoogleDriveIndexer(Indexer):
189
189
  """
190
190
  count = 0
191
191
  stack = [folder_id]
192
+ # Pre-compute lower-case extension set for O(1) lookup
193
+ valid_exts = set(e.lower() for e in extensions) if extensions else None
194
+
192
195
  while stack:
193
196
  current_folder = stack.pop()
194
197
  # Always list all items under the current folder.
@@ -212,7 +215,6 @@ class GoogleDriveIndexer(Indexer):
212
215
  if extensions:
213
216
  # Use a case-insensitive comparison for the file extension.
214
217
  file_ext = (item.get("fileExtension") or "").lower()
215
- valid_exts = [e.lower() for e in extensions]
216
218
  if file_ext in valid_exts:
217
219
  count += 1
218
220
  else:
@@ -147,6 +147,10 @@ class IbmWatsonxConnectionConfig(ConnectionConfig):
147
147
  "s3.access-key-id": self.access_config.get_secret_value().access_key_id,
148
148
  "s3.secret-access-key": self.access_config.get_secret_value().secret_access_key,
149
149
  "s3.region": self.object_storage_region,
150
+ # By default this header is set to `vended-credentials`, and default bucket
151
+ # configuration doesn't allow vending credentials. We need to set it to `None`
152
+ # in order to use user-provided S3 credentials.
153
+ "header.X-Iceberg-Access-Delegation": None,
150
154
  }
151
155
 
152
156
  @requires_dependencies(["pyiceberg"], extras="ibm-watsonx-s3")
@@ -1,6 +1,7 @@
1
1
  import json
2
2
  from contextlib import contextmanager
3
3
  from dataclasses import dataclass, field
4
+ from datetime import datetime
4
5
  from typing import TYPE_CHECKING, Any, Generator, Optional
5
6
 
6
7
  from dateutil import parser
@@ -97,6 +98,13 @@ class MilvusUploadStager(UploadStager):
97
98
  return timestamp
98
99
  except ValueError:
99
100
  pass
101
+
102
+ try:
103
+ dt = datetime.fromisoformat(date_string.replace("Z", "+00:00"))
104
+ return dt.timestamp()
105
+ except ValueError:
106
+ pass
107
+
100
108
  return parser.parse(date_string).timestamp()
101
109
 
102
110
  def conform_dict(self, element_dict: dict, file_data: FileData) -> dict:
@@ -1,5 +1,5 @@
1
1
  # https://developers.notion.com/reference/page
2
- from dataclasses import dataclass
2
+ from dataclasses import dataclass, fields
3
3
  from typing import Optional
4
4
 
5
5
  from unstructured_ingest.processes.connectors.notion.interfaces import FromJSONMixin
@@ -28,18 +28,25 @@ class Page(FromJSONMixin):
28
28
 
29
29
  @classmethod
30
30
  def from_dict(cls, data: dict):
31
+ data = data.copy() # Don't modify the original
31
32
  created_by = data.pop("created_by")
32
33
  last_edited_by = data.pop("last_edited_by")
33
34
  icon = data.pop("icon")
34
35
  cover = data.pop("cover")
35
36
  parent = data.pop("parent")
37
+
38
+ # Filter data to only include fields that exist in the dataclass
39
+ filtered_data = {
40
+ k: v for k, v in data.items() if k in {field.name for field in fields(cls)}
41
+ }
42
+
36
43
  page = cls(
37
44
  created_by=PartialUser.from_dict(created_by),
38
45
  last_edited_by=PartialUser.from_dict(last_edited_by),
39
46
  icon=FileObject.from_dict(icon) if icon else None,
40
47
  cover=FileObject.from_dict(cover) if cover else None,
41
48
  parent=Parent.from_dict(parent),
42
- **data,
49
+ **filtered_data,
43
50
  )
44
51
 
45
52
  return page
@@ -233,18 +233,19 @@ class SnowflakeUploader(SQLUploader):
233
233
  output.append(tuple(parsed))
234
234
  return output
235
235
 
236
- def _parse_values(self, columns: list[str]) -> str:
236
+ def _parse_select(self, columns: list[str]) -> str:
237
237
  embeddings_dimension = self.embeddings_dimension
238
238
  parsed_values = []
239
- for col in columns:
239
+ for i, col in enumerate(columns):
240
+ argument_selector = f"${i + 1}"
240
241
  if col in _VECTOR_COLUMNS and embeddings_dimension:
241
242
  parsed_values.append(
242
- f"PARSE_JSON({self.values_delimiter})::VECTOR(FLOAT,{embeddings_dimension})"
243
+ f"PARSE_JSON({argument_selector})::VECTOR(FLOAT,{embeddings_dimension})"
243
244
  )
244
245
  elif col in _ARRAY_COLUMNS or col in _VECTOR_COLUMNS:
245
- parsed_values.append(f"PARSE_JSON({self.values_delimiter})")
246
+ parsed_values.append(f"PARSE_JSON({argument_selector})")
246
247
  else:
247
- parsed_values.append(self.values_delimiter)
248
+ parsed_values.append(argument_selector)
248
249
  return ",".join(parsed_values)
249
250
 
250
251
  def upload_dataframe(self, df: "DataFrame", file_data: FileData) -> None:
@@ -262,10 +263,11 @@ class SnowflakeUploader(SQLUploader):
262
263
  df.replace({np.nan: None}, inplace=True)
263
264
 
264
265
  columns = list(df.columns)
265
- stmt = "INSERT INTO {table_name} ({columns}) SELECT {values}".format(
266
+ stmt = "INSERT INTO {table_name} ({columns}) SELECT {select} FROM VALUES ({values})".format(
266
267
  table_name=self.upload_config.table_name,
267
268
  columns=",".join(columns),
268
- values=self._parse_values(columns),
269
+ select=self._parse_select(columns),
270
+ values=",".join([self.values_delimiter for _ in columns]),
269
271
  )
270
272
  logger.info(
271
273
  f"writing a total of {len(df)} elements via"
@@ -276,10 +278,7 @@ class SnowflakeUploader(SQLUploader):
276
278
  for rows in split_dataframe(df=df, chunk_size=self.upload_config.batch_size):
277
279
  with self.connection_config.get_cursor() as cursor:
278
280
  values = self.prepare_data(columns, tuple(rows.itertuples(index=False, name=None)))
279
- # TODO: executemany break on 'Binding data in type (list) is not supported'
280
- for val in values:
281
- logger.debug(f"running query: {stmt}\nwith values: {val}")
282
- cursor.execute(stmt, val)
281
+ cursor.executemany(stmt, values)
283
282
 
284
283
 
285
284
  snowflake_source_entry = SourceRegistryEntry(
@@ -64,6 +64,12 @@ def parse_date_string(date_value: Union[str, int]) -> datetime:
64
64
  return datetime.fromtimestamp(timestamp)
65
65
  except Exception as e:
66
66
  logger.debug(f"date {date_value} string not a timestamp: {e}")
67
+
68
+ if isinstance(date_value, str):
69
+ try:
70
+ return datetime.fromisoformat(date_value)
71
+ except Exception:
72
+ pass
67
73
  return parser.parse(date_value)
68
74
 
69
75
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: unstructured_ingest
3
- Version: 1.2.13
3
+ Version: 1.2.18
4
4
  Summary: Local ETL data pipeline to get data RAG ready
5
5
  Author-email: Unstructured Technologies <devops@unstructuredai.io>
6
6
  License-Expression: Apache-2.0
@@ -1,5 +1,5 @@
1
1
  unstructured_ingest/__init__.py,sha256=U4S_2y3zgLZVfMenHRaJFBW8yqh2mUBuI291LGQVOJ8,35
2
- unstructured_ingest/__version__.py,sha256=8OCyiWcSFAmgmIe4xxt0uHx-rAWATFP54TzUIE37hxE,43
2
+ unstructured_ingest/__version__.py,sha256=2oXTzaWwDAqSzco9vnQ5X3trGI9q_sP5tVMO58snQ_4,43
3
3
  unstructured_ingest/error.py,sha256=chM7zQSTKjaKaQt_2_QkoZDUwY5XPNeACML7JqOWRLY,4036
4
4
  unstructured_ingest/errors_v2.py,sha256=chM7zQSTKjaKaQt_2_QkoZDUwY5XPNeACML7JqOWRLY,4036
5
5
  unstructured_ingest/logger.py,sha256=7e_7UeK6hVOd5BQ6i9NzRUAPCS_DF839Y8TjUDywraY,1428
@@ -23,7 +23,7 @@ unstructured_ingest/data_types/entities.py,sha256=ECc6EkZ5_ZUvK7uaALYOynfFmofIrH
23
23
  unstructured_ingest/data_types/file_data.py,sha256=J0RQa7YXhhxiLVzhPbF5Hl2nzSpxLFK9vrP6RTBWlSg,3833
24
24
  unstructured_ingest/embed/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
25
25
  unstructured_ingest/embed/azure_openai.py,sha256=Q_buBkAcx9FBuTsAqKbRU8vd9vDh8JoDOEth4fFxHbg,2160
26
- unstructured_ingest/embed/bedrock.py,sha256=BnlKOuL1e4FfR4nV_Ro3A26Fqj3Pq-ZBaSNAgIzUQH0,10955
26
+ unstructured_ingest/embed/bedrock.py,sha256=pUt0VctaxYMkCsyoYoZ-OAiVAM9LmyQN0mp5D5dU-ao,11867
27
27
  unstructured_ingest/embed/huggingface.py,sha256=6Gx9L3xa3cv9fX4AMuLsePJQF4T_jwkKjovfqF5X1NM,2435
28
28
  unstructured_ingest/embed/interfaces.py,sha256=VCrCSJiEfIxKB4NL4AHgKb-0vB_SEekb47zMUW6gWf0,5211
29
29
  unstructured_ingest/embed/mixedbreadai.py,sha256=uKTqzoi4M_WeYZu-qc_TSxwJONOESzxVbBLUbD1Wbns,3922
@@ -72,11 +72,11 @@ unstructured_ingest/processes/connectors/delta_table.py,sha256=SWf-ekUzwzU3TuLHp
72
72
  unstructured_ingest/processes/connectors/discord.py,sha256=Ou6b4WKhHqIb1NZBlvgN_4m5zv6OEMHyFE3j_YSqQaY,5333
73
73
  unstructured_ingest/processes/connectors/github.py,sha256=-zb9CZbMvbQdXcy7PYWbG-l_-MObxhk1OARGeWkl7FI,7867
74
74
  unstructured_ingest/processes/connectors/gitlab.py,sha256=3bFyn8chtAWkrDIXwlVhcZUKuwZLPHBtE7Dq5WTuTt4,10126
75
- unstructured_ingest/processes/connectors/google_drive.py,sha256=O6RtOQH9dHBUtwKsa9BiVQjBDvcSjwvJsp8cmaJXYSs,35325
75
+ unstructured_ingest/processes/connectors/google_drive.py,sha256=0Ya7TrYOxZsOzAY_xHpwPUNjzskSahnaMGYfEn33VAA,35396
76
76
  unstructured_ingest/processes/connectors/jira.py,sha256=5BZeYHoWKzcwZKUbJDFsdCUcSHbLDZTErOdNbteDwI0,20290
77
77
  unstructured_ingest/processes/connectors/kdbai.py,sha256=XhxYpKSAoFPBsDQWwNuLX03DCxOVr7yquj9VYM55Rtc,5174
78
78
  unstructured_ingest/processes/connectors/local.py,sha256=Tsp9d9YSx2zPh4yl2U--P6cMIQSKMzsFAGyNXXtdS-4,7529
79
- unstructured_ingest/processes/connectors/milvus.py,sha256=Bkt4u1zzrKqpO0CZbmuFfbtd824ws5XouiTAnc4I4BM,12102
79
+ unstructured_ingest/processes/connectors/milvus.py,sha256=YZaMHGD79tDX5-UpyZR1TFrL460D-JctMdNh-zpSU2o,12301
80
80
  unstructured_ingest/processes/connectors/mongodb.py,sha256=zhGWnEJYZnKzjuElyYAEJUT3M7J5m0e48TpVPdiKsBA,15412
81
81
  unstructured_ingest/processes/connectors/neo4j.py,sha256=jmnxQmi8EjS22mFKfcdXajZrxoKEkrzHRtrP6QeTuFI,20353
82
82
  unstructured_ingest/processes/connectors/onedrive.py,sha256=qhIeFWotFuIxt1Ehg-6IEWXaDu4p-Zhy0u14CfDcnZo,20142
@@ -92,7 +92,7 @@ unstructured_ingest/processes/connectors/assets/__init__.py,sha256=47DEQpj8HBSa-
92
92
  unstructured_ingest/processes/connectors/assets/databricks_delta_table_schema.sql,sha256=8a9HTcRWA6IuswSD632b_uZSO6Dax_0rUYnflqktcek,226
93
93
  unstructured_ingest/processes/connectors/assets/weaviate_collection_config.json,sha256=SJlIO0kXxy866tWQ8bEzvwLwflsoUMIS-OKlxMvHIuE,504
94
94
  unstructured_ingest/processes/connectors/databricks/__init__.py,sha256=RtKAPyNtXh6fzEsOQ08pA0-vC1uMr3KqYG6cqiBoo70,2133
95
- unstructured_ingest/processes/connectors/databricks/volumes.py,sha256=sX_6TNrcp5T0lGiMk-z58adyMCPIkoHOe1LyHAKZy6I,8369
95
+ unstructured_ingest/processes/connectors/databricks/volumes.py,sha256=agcDtOweLkqSJwg4fdcZwFq8NUvGynTRVr_buOFUpGk,8454
96
96
  unstructured_ingest/processes/connectors/databricks/volumes_aws.py,sha256=WhGTp6aRTLSdc4GChCL4mz2b-IanderW8j1IqezX6YA,2958
97
97
  unstructured_ingest/processes/connectors/databricks/volumes_azure.py,sha256=pF2d6uAIbwJJUeOIG5xknUMCGc5d9Aztmc2776wp-a0,3740
98
98
  unstructured_ingest/processes/connectors/databricks/volumes_gcp.py,sha256=y9AvVl6PtnIxlTlrPj_wyHBDBRJNq3uoTOuZwTryNg8,2994
@@ -115,7 +115,7 @@ unstructured_ingest/processes/connectors/fsspec/s3.py,sha256=Zng-aV_Z0B52CFILAXf
115
115
  unstructured_ingest/processes/connectors/fsspec/sftp.py,sha256=pR_a2SgLjt8ffNkariHrPB1E0HVSTj5h3pt7KxTU3TI,6371
116
116
  unstructured_ingest/processes/connectors/fsspec/utils.py,sha256=jec_Qfe2hbfahBuY-u8FnvHuv933AI5HwPFjOL3kEEY,456
117
117
  unstructured_ingest/processes/connectors/ibm_watsonx/__init__.py,sha256=kf0UpgdAY2KK1R1FbAB6GEBBAIOeYQ8cZIr3bp660qM,374
118
- unstructured_ingest/processes/connectors/ibm_watsonx/ibm_watsonx_s3.py,sha256=k_c2PtKcaRA6B9ZFXYCk4-2BWxLJnD_Cfjvluk9hKzs,13876
118
+ unstructured_ingest/processes/connectors/ibm_watsonx/ibm_watsonx_s3.py,sha256=TuQPpm9O7_3PZQC1s4S3HzybUWDKUeZDs-V3ZTzqdjA,14171
119
119
  unstructured_ingest/processes/connectors/kafka/__init__.py,sha256=pFN2cWwAStiGTAsQ616GIWKi_hDv0s74ZvNqhJEp1Pc,751
120
120
  unstructured_ingest/processes/connectors/kafka/cloud.py,sha256=Ki6iOLoZ86tYWdnLnMWYvb2hUCneKqo4mTJcfXh7YoQ,3432
121
121
  unstructured_ingest/processes/connectors/kafka/kafka.py,sha256=VI-e7WTzV48mmSwqhlDsNARSzkjauckbJEFvWjuqt7k,10301
@@ -141,7 +141,7 @@ unstructured_ingest/processes/connectors/notion/types/block.py,sha256=pEhy3fFCXS
141
141
  unstructured_ingest/processes/connectors/notion/types/database.py,sha256=4VNhpX06dGKmA-COLLzh-sfdSoxQzdUxl1Sk05_B258,2636
142
142
  unstructured_ingest/processes/connectors/notion/types/date.py,sha256=VNLs5nTAIGWKxkFUwE13Yoeo5kVQiI-bxUVhjW9SWhE,753
143
143
  unstructured_ingest/processes/connectors/notion/types/file.py,sha256=MpEWi7OE0mpA3efq11HJQJTlaVpMMM8cXVE_Pk0m0kg,1315
144
- unstructured_ingest/processes/connectors/notion/types/page.py,sha256=0fExZsJHXBzaRLwJAKpZwtnfQf_gZ7KnTIbyIyDYC4Q,1471
144
+ unstructured_ingest/processes/connectors/notion/types/page.py,sha256=lfw5fu8UpkmHHv-d4pSTqPbwJt5rnw0eveHf6TFzxmU,1743
145
145
  unstructured_ingest/processes/connectors/notion/types/parent.py,sha256=l-EJBKU0HNpDg7p87cATqw0WlUSATD9btyVF7B2A2nI,1706
146
146
  unstructured_ingest/processes/connectors/notion/types/rich_text.py,sha256=LPeyFconK_-8Kl3DSLFiCmxwXH3LWthBiYSzj4FAJKY,5483
147
147
  unstructured_ingest/processes/connectors/notion/types/user.py,sha256=Bs9hqsMPsfXtMJq1pf-tSgoexVjx__jKdJdfcCyMggM,1964
@@ -206,8 +206,8 @@ unstructured_ingest/processes/connectors/sql/__init__.py,sha256=WNO7jSL1ABw7K5Ix
206
206
  unstructured_ingest/processes/connectors/sql/databricks_delta_tables.py,sha256=JFzU84OUIqnKNweH60GbAif6N22KwHWAWOslSGTC62g,9369
207
207
  unstructured_ingest/processes/connectors/sql/postgres.py,sha256=kDIL8Cj45EDpKqit1_araRpP4v3cb__QbYqoINg9f2k,5403
208
208
  unstructured_ingest/processes/connectors/sql/singlestore.py,sha256=B46lpvyAj1AArpACi9MXbXD1-52zF6Dsj3RJtD1g4r0,5955
209
- unstructured_ingest/processes/connectors/sql/snowflake.py,sha256=dkGIFz_VIVhew_FjbuO8r3cVluw7VIUdvV6VjkAItP8,11369
210
- unstructured_ingest/processes/connectors/sql/sql.py,sha256=jIwAck_vFlsMczH7BOyI-iZC_lrLAV-1eqmGtKkPNQc,16170
209
+ unstructured_ingest/processes/connectors/sql/snowflake.py,sha256=zQ6XjLTORkh3bhE9i3u5byB5Myo9UEacUEFHnh3ymIQ,11306
210
+ unstructured_ingest/processes/connectors/sql/sql.py,sha256=GQOlh2onsnJ3_tk-k6t5LWHlgDRCFp1mwH5VO-YDvU4,16317
211
211
  unstructured_ingest/processes/connectors/sql/sqlite.py,sha256=2SbwuTlVUztJeuVSEw_--cVP3THJRr2gFySLXF2xkMU,5598
212
212
  unstructured_ingest/processes/connectors/sql/vastdb.py,sha256=trhvUBumDmj2rLjmxFBKw9L9wF6ZpssF0wfmRaG97H0,9803
213
213
  unstructured_ingest/processes/connectors/weaviate/__init__.py,sha256=1Vnz8hm_Cf3NkQUTz5ZD4QkbLSVql4UvRoY2j2FnC9k,853
@@ -235,8 +235,8 @@ unstructured_ingest/utils/pydantic_models.py,sha256=BT_j15e4rX40wQbt8LUXbqfPhA3r
235
235
  unstructured_ingest/utils/string_and_date_utils.py,sha256=oXOI6rxXq-8ncbk7EoJK0WCcTXWj75EzKl8pfQMID3U,2522
236
236
  unstructured_ingest/utils/table.py,sha256=WZechczgVFvlodUWFcsnCGvBNh1xRm6hr0VbJTPxKAc,3669
237
237
  unstructured_ingest/utils/tls.py,sha256=Ra8Mii1F4VqErRreg76PBI0eAqPBC009l0sSHa8FdnA,448
238
- unstructured_ingest-1.2.13.dist-info/METADATA,sha256=lbr3UctsNzU6-t4pDfbBUCWJx__RZY_ncYTeIymQ2EY,8827
239
- unstructured_ingest-1.2.13.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
240
- unstructured_ingest-1.2.13.dist-info/entry_points.txt,sha256=gUAAFnjFPnBgThJSEbw0N5ZjxtaKlT1s9e05_arQrNw,70
241
- unstructured_ingest-1.2.13.dist-info/licenses/LICENSE.md,sha256=SxkKP_62uIAKb9mb1eH7FH4Kn2aYT09fgjKpJt5PyTk,11360
242
- unstructured_ingest-1.2.13.dist-info/RECORD,,
238
+ unstructured_ingest-1.2.18.dist-info/METADATA,sha256=EifLrammakugSnn80GasBgwUar3hrk4l-tj6Sd1b-DM,8827
239
+ unstructured_ingest-1.2.18.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
240
+ unstructured_ingest-1.2.18.dist-info/entry_points.txt,sha256=gUAAFnjFPnBgThJSEbw0N5ZjxtaKlT1s9e05_arQrNw,70
241
+ unstructured_ingest-1.2.18.dist-info/licenses/LICENSE.md,sha256=SxkKP_62uIAKb9mb1eH7FH4Kn2aYT09fgjKpJt5PyTk,11360
242
+ unstructured_ingest-1.2.18.dist-info/RECORD,,