unstructured-ingest 0.2.2__py3-none-any.whl → 0.3.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of unstructured-ingest might be problematic. Click here for more details.

Files changed (93) hide show
  1. test/integration/connectors/elasticsearch/__init__.py +0 -0
  2. test/integration/connectors/elasticsearch/conftest.py +34 -0
  3. test/integration/connectors/elasticsearch/test_elasticsearch.py +308 -0
  4. test/integration/connectors/elasticsearch/test_opensearch.py +302 -0
  5. test/integration/connectors/sql/test_postgres.py +10 -4
  6. test/integration/connectors/sql/test_singlestore.py +8 -4
  7. test/integration/connectors/sql/test_snowflake.py +10 -6
  8. test/integration/connectors/sql/test_sqlite.py +4 -4
  9. test/integration/connectors/test_astradb.py +156 -0
  10. test/integration/connectors/test_azure_cog_search.py +233 -0
  11. test/integration/connectors/test_delta_table.py +46 -0
  12. test/integration/connectors/test_kafka.py +150 -16
  13. test/integration/connectors/test_lancedb.py +209 -0
  14. test/integration/connectors/test_milvus.py +141 -0
  15. test/integration/connectors/test_pinecone.py +213 -0
  16. test/integration/connectors/test_s3.py +23 -0
  17. test/integration/connectors/utils/docker.py +81 -15
  18. test/integration/connectors/utils/validation.py +10 -0
  19. test/integration/connectors/weaviate/__init__.py +0 -0
  20. test/integration/connectors/weaviate/conftest.py +15 -0
  21. test/integration/connectors/weaviate/test_local.py +131 -0
  22. test/unit/v2/__init__.py +0 -0
  23. test/unit/v2/chunkers/__init__.py +0 -0
  24. test/unit/v2/chunkers/test_chunkers.py +49 -0
  25. test/unit/v2/connectors/__init__.py +0 -0
  26. test/unit/v2/embedders/__init__.py +0 -0
  27. test/unit/v2/embedders/test_bedrock.py +36 -0
  28. test/unit/v2/embedders/test_huggingface.py +48 -0
  29. test/unit/v2/embedders/test_mixedbread.py +37 -0
  30. test/unit/v2/embedders/test_octoai.py +35 -0
  31. test/unit/v2/embedders/test_openai.py +35 -0
  32. test/unit/v2/embedders/test_togetherai.py +37 -0
  33. test/unit/v2/embedders/test_vertexai.py +37 -0
  34. test/unit/v2/embedders/test_voyageai.py +38 -0
  35. test/unit/v2/partitioners/__init__.py +0 -0
  36. test/unit/v2/partitioners/test_partitioner.py +63 -0
  37. test/unit/v2/utils/__init__.py +0 -0
  38. test/unit/v2/utils/data_generator.py +32 -0
  39. unstructured_ingest/__version__.py +1 -1
  40. unstructured_ingest/cli/cmds/__init__.py +2 -2
  41. unstructured_ingest/cli/cmds/{azure_cognitive_search.py → azure_ai_search.py} +9 -9
  42. unstructured_ingest/connector/{azure_cognitive_search.py → azure_ai_search.py} +9 -9
  43. unstructured_ingest/pipeline/reformat/embedding.py +1 -1
  44. unstructured_ingest/runner/writers/__init__.py +2 -2
  45. unstructured_ingest/runner/writers/azure_ai_search.py +24 -0
  46. unstructured_ingest/utils/data_prep.py +9 -1
  47. unstructured_ingest/v2/constants.py +2 -0
  48. unstructured_ingest/v2/processes/connectors/__init__.py +7 -20
  49. unstructured_ingest/v2/processes/connectors/airtable.py +2 -2
  50. unstructured_ingest/v2/processes/connectors/astradb.py +35 -23
  51. unstructured_ingest/v2/processes/connectors/{azure_cognitive_search.py → azure_ai_search.py} +116 -35
  52. unstructured_ingest/v2/processes/connectors/confluence.py +2 -2
  53. unstructured_ingest/v2/processes/connectors/couchbase.py +1 -0
  54. unstructured_ingest/v2/processes/connectors/delta_table.py +37 -9
  55. unstructured_ingest/v2/processes/connectors/elasticsearch/__init__.py +19 -0
  56. unstructured_ingest/v2/processes/connectors/{elasticsearch.py → elasticsearch/elasticsearch.py} +93 -46
  57. unstructured_ingest/v2/processes/connectors/{opensearch.py → elasticsearch/opensearch.py} +1 -1
  58. unstructured_ingest/v2/processes/connectors/fsspec/fsspec.py +27 -0
  59. unstructured_ingest/v2/processes/connectors/google_drive.py +3 -3
  60. unstructured_ingest/v2/processes/connectors/kafka/__init__.py +6 -2
  61. unstructured_ingest/v2/processes/connectors/kafka/cloud.py +38 -2
  62. unstructured_ingest/v2/processes/connectors/kafka/kafka.py +84 -23
  63. unstructured_ingest/v2/processes/connectors/kafka/local.py +32 -4
  64. unstructured_ingest/v2/processes/connectors/lancedb/__init__.py +17 -0
  65. unstructured_ingest/v2/processes/connectors/lancedb/aws.py +43 -0
  66. unstructured_ingest/v2/processes/connectors/lancedb/azure.py +43 -0
  67. unstructured_ingest/v2/processes/connectors/lancedb/gcp.py +44 -0
  68. unstructured_ingest/v2/processes/connectors/lancedb/lancedb.py +161 -0
  69. unstructured_ingest/v2/processes/connectors/lancedb/local.py +44 -0
  70. unstructured_ingest/v2/processes/connectors/milvus.py +72 -27
  71. unstructured_ingest/v2/processes/connectors/onedrive.py +2 -3
  72. unstructured_ingest/v2/processes/connectors/outlook.py +2 -2
  73. unstructured_ingest/v2/processes/connectors/pinecone.py +101 -13
  74. unstructured_ingest/v2/processes/connectors/sharepoint.py +3 -2
  75. unstructured_ingest/v2/processes/connectors/slack.py +2 -2
  76. unstructured_ingest/v2/processes/connectors/sql/postgres.py +16 -8
  77. unstructured_ingest/v2/processes/connectors/sql/sql.py +97 -26
  78. unstructured_ingest/v2/processes/connectors/weaviate/__init__.py +22 -0
  79. unstructured_ingest/v2/processes/connectors/weaviate/cloud.py +164 -0
  80. unstructured_ingest/v2/processes/connectors/weaviate/embedded.py +90 -0
  81. unstructured_ingest/v2/processes/connectors/weaviate/local.py +73 -0
  82. unstructured_ingest/v2/processes/connectors/weaviate/weaviate.py +289 -0
  83. {unstructured_ingest-0.2.2.dist-info → unstructured_ingest-0.3.1.dist-info}/METADATA +20 -19
  84. {unstructured_ingest-0.2.2.dist-info → unstructured_ingest-0.3.1.dist-info}/RECORD +91 -50
  85. unstructured_ingest/runner/writers/azure_cognitive_search.py +0 -24
  86. unstructured_ingest/v2/processes/connectors/weaviate.py +0 -242
  87. /test/integration/embedders/{togetherai.py → test_togetherai.py} +0 -0
  88. /test/unit/{test_interfaces_v2.py → v2/test_interfaces.py} +0 -0
  89. /test/unit/{test_utils_v2.py → v2/test_utils.py} +0 -0
  90. {unstructured_ingest-0.2.2.dist-info → unstructured_ingest-0.3.1.dist-info}/LICENSE.md +0 -0
  91. {unstructured_ingest-0.2.2.dist-info → unstructured_ingest-0.3.1.dist-info}/WHEEL +0 -0
  92. {unstructured_ingest-0.2.2.dist-info → unstructured_ingest-0.3.1.dist-info}/entry_points.txt +0 -0
  93. {unstructured_ingest-0.2.2.dist-info → unstructured_ingest-0.3.1.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,213 @@
1
+ import json
2
+ import os
3
+ import re
4
+ import time
5
+ from pathlib import Path
6
+ from typing import Generator
7
+ from uuid import uuid4
8
+
9
+ import pytest
10
+ from pinecone import Pinecone, ServerlessSpec
11
+ from pinecone.core.openapi.shared.exceptions import NotFoundException
12
+
13
+ from test.integration.connectors.utils.constants import (
14
+ DESTINATION_TAG,
15
+ )
16
+ from test.integration.utils import requires_env
17
+ from unstructured_ingest.error import DestinationConnectionError
18
+ from unstructured_ingest.v2.interfaces import FileData, SourceIdentifiers
19
+ from unstructured_ingest.v2.logger import logger
20
+ from unstructured_ingest.v2.processes.connectors.pinecone import (
21
+ CONNECTOR_TYPE,
22
+ PineconeAccessConfig,
23
+ PineconeConnectionConfig,
24
+ PineconeUploader,
25
+ PineconeUploaderConfig,
26
+ PineconeUploadStager,
27
+ PineconeUploadStagerConfig,
28
+ )
29
+
30
+ METADATA_BYTES_LIMIT = (
31
+ 40960 # 40KB https://docs.pinecone.io/reference/quotas-and-limits#hard-limits
32
+ )
33
+ VECTOR_DIMENSION = 384
34
+ SPEC = {"serverless": {"cloud": "aws", "region": "us-east-1"}}
35
+ ALLOWED_METADATA_FIELD = "text"
36
+ API_KEY = "PINECONE_API_KEY"
37
+
38
+
39
+ def get_api_key() -> str:
40
+ api_key = os.getenv(API_KEY, None)
41
+ assert api_key
42
+ return api_key
43
+
44
+
45
+ def wait_for_delete(client: Pinecone, index_name: str, timeout=60, interval=1) -> None:
46
+ start = time.time()
47
+ while True and time.time() - start < timeout:
48
+ try:
49
+ description = client.describe_index(name=index_name)
50
+ logger.info(f"current index status: {description}")
51
+ except NotFoundException:
52
+ return
53
+ time.sleep(interval)
54
+
55
+ raise TimeoutError("time out waiting for index to delete")
56
+
57
+
58
+ def wait_for_ready(client: Pinecone, index_name: str, timeout=60, interval=1) -> None:
59
+ def is_ready_status():
60
+ description = client.describe_index(name=index_name)
61
+ status = description["status"]
62
+ return status["ready"]
63
+
64
+ start = time.time()
65
+ is_ready = is_ready_status()
66
+ while not is_ready and time.time() - start < timeout:
67
+ time.sleep(interval)
68
+ is_ready = is_ready_status()
69
+ if not is_ready:
70
+ raise TimeoutError("time out waiting for index to be ready")
71
+
72
+
73
+ @pytest.fixture
74
+ def pinecone_index() -> Generator[str, None, None]:
75
+ pinecone = Pinecone(api_key=get_api_key())
76
+ random_id = str(uuid4()).split("-")[0]
77
+ index_name = f"ingest-test-{random_id}"
78
+ assert len(index_name) < 45
79
+ logger.info(f"Creating index: {index_name}")
80
+ try:
81
+ pinecone.create_index(
82
+ name=index_name,
83
+ dimension=384,
84
+ metric="cosine",
85
+ spec=ServerlessSpec(
86
+ cloud="aws",
87
+ region="us-east-1",
88
+ ),
89
+ deletion_protection="disabled",
90
+ )
91
+ wait_for_ready(client=pinecone, index_name=index_name)
92
+ yield index_name
93
+ except Exception as e:
94
+ logger.error(f"failed to create index {index_name}: {e}")
95
+ finally:
96
+ try:
97
+ logger.info(f"deleting index: {index_name}")
98
+ pinecone.delete_index(name=index_name)
99
+ wait_for_delete(client=pinecone, index_name=index_name)
100
+ except NotFoundException:
101
+ return
102
+
103
+
104
+ def validate_pinecone_index(
105
+ index_name: str, expected_num_of_vectors: int, retries=30, interval=1
106
+ ) -> None:
107
+ # Because there's a delay for the index to catch up to the recent writes, add in a retry
108
+ pinecone = Pinecone(api_key=get_api_key())
109
+ index = pinecone.Index(name=index_name)
110
+ vector_count = -1
111
+ for i in range(retries):
112
+ index_stats = index.describe_index_stats()
113
+ vector_count = index_stats["total_vector_count"]
114
+ if vector_count == expected_num_of_vectors:
115
+ logger.info(f"expected {expected_num_of_vectors} == vector count {vector_count}")
116
+ break
117
+ logger.info(
118
+ f"retry attempt {i}: expected {expected_num_of_vectors} != vector count {vector_count}"
119
+ )
120
+ time.sleep(interval)
121
+ assert vector_count == expected_num_of_vectors
122
+
123
+
124
+ @requires_env(API_KEY)
125
+ @pytest.mark.asyncio
126
+ @pytest.mark.tags(CONNECTOR_TYPE, DESTINATION_TAG)
127
+ async def test_pinecone_destination(pinecone_index: str, upload_file: Path, temp_dir: Path):
128
+ file_data = FileData(
129
+ source_identifiers=SourceIdentifiers(fullpath=upload_file.name, filename=upload_file.name),
130
+ connector_type=CONNECTOR_TYPE,
131
+ identifier="pinecone_mock_id",
132
+ )
133
+ connection_config = PineconeConnectionConfig(
134
+ index_name=pinecone_index,
135
+ access_config=PineconeAccessConfig(api_key=get_api_key()),
136
+ )
137
+ stager_config = PineconeUploadStagerConfig()
138
+ stager = PineconeUploadStager(upload_stager_config=stager_config)
139
+ new_upload_file = stager.run(
140
+ elements_filepath=upload_file,
141
+ output_dir=temp_dir,
142
+ output_filename=upload_file.name,
143
+ file_data=file_data,
144
+ )
145
+
146
+ upload_config = PineconeUploaderConfig()
147
+ uploader = PineconeUploader(connection_config=connection_config, upload_config=upload_config)
148
+ uploader.precheck()
149
+
150
+ if uploader.is_async():
151
+ await uploader.run_async(path=new_upload_file, file_data=file_data)
152
+ else:
153
+ uploader.run(path=new_upload_file, file_data=file_data)
154
+ with new_upload_file.open() as f:
155
+ staged_content = json.load(f)
156
+ expected_num_of_vectors = len(staged_content)
157
+ logger.info("validating first upload")
158
+ validate_pinecone_index(
159
+ index_name=pinecone_index, expected_num_of_vectors=expected_num_of_vectors
160
+ )
161
+
162
+ # Rerun uploader and make sure no duplicates exist
163
+ if uploader.is_async():
164
+ await uploader.run_async(path=new_upload_file, file_data=file_data)
165
+ else:
166
+ uploader.run(path=new_upload_file, file_data=file_data)
167
+ logger.info("validating second upload")
168
+ validate_pinecone_index(
169
+ index_name=pinecone_index, expected_num_of_vectors=expected_num_of_vectors
170
+ )
171
+
172
+
173
+ @requires_env(API_KEY)
174
+ @pytest.mark.tags(CONNECTOR_TYPE, DESTINATION_TAG)
175
+ def test_large_metadata(pinecone_index: str, tmp_path: Path, upload_file: Path):
176
+ stager = PineconeUploadStager()
177
+ uploader = PineconeUploader(
178
+ connection_config=PineconeConnectionConfig(
179
+ access_config=PineconeAccessConfig(api_key=get_api_key()),
180
+ index_name=pinecone_index,
181
+ ),
182
+ upload_config=PineconeUploaderConfig(),
183
+ )
184
+ large_metadata_upload_file = tmp_path / "mock-upload-file.pdf.json"
185
+ large_metadata = {ALLOWED_METADATA_FIELD: "0" * 2 * METADATA_BYTES_LIMIT}
186
+
187
+ with open(upload_file) as file:
188
+ elements = json.load(file)
189
+
190
+ with open(large_metadata_upload_file, "w") as file:
191
+ mock_element = elements[0]
192
+ mock_element["metadata"] = large_metadata
193
+ json.dump([mock_element], file)
194
+
195
+ file_data = FileData(
196
+ source_identifiers=SourceIdentifiers(
197
+ fullpath=large_metadata_upload_file.name, filename=large_metadata_upload_file.name
198
+ ),
199
+ connector_type=CONNECTOR_TYPE,
200
+ identifier="mock-file-data",
201
+ )
202
+ staged_file = stager.run(
203
+ file_data, large_metadata_upload_file, tmp_path, large_metadata_upload_file.name
204
+ )
205
+ try:
206
+ uploader.run(staged_file, file_data)
207
+ except DestinationConnectionError as e:
208
+ error_line = r"Metadata size is \d+ bytes, which exceeds the limit of \d+ bytes per vector"
209
+ if re.search(re.compile(error_line), str(e)) is None:
210
+ raise e
211
+ raise pytest.fail("Upload request failed due to metadata exceeding limits.")
212
+
213
+ validate_pinecone_index(pinecone_index, 1, interval=5)
@@ -71,6 +71,29 @@ async def test_s3_source(anon_connection_config: S3ConnectionConfig):
71
71
  )
72
72
 
73
73
 
74
+ @pytest.mark.asyncio
75
+ @pytest.mark.tags(CONNECTOR_TYPE, SOURCE_TAG)
76
+ async def test_s3_source_special_char(anon_connection_config: S3ConnectionConfig):
77
+ indexer_config = S3IndexerConfig(remote_url="s3://utic-dev-tech-fixtures/special-characters/")
78
+ with tempfile.TemporaryDirectory() as tempdir:
79
+ tempdir_path = Path(tempdir)
80
+ download_config = S3DownloaderConfig(download_dir=tempdir_path)
81
+ indexer = S3Indexer(connection_config=anon_connection_config, index_config=indexer_config)
82
+ downloader = S3Downloader(
83
+ connection_config=anon_connection_config, download_config=download_config
84
+ )
85
+ await source_connector_validation(
86
+ indexer=indexer,
87
+ downloader=downloader,
88
+ configs=ValidationConfigs(
89
+ test_id="s3-specialchar",
90
+ predownload_file_data_check=validate_predownload_file_data,
91
+ postdownload_file_data_check=validate_postdownload_file_data,
92
+ expected_num_files=1,
93
+ ),
94
+ )
95
+
96
+
74
97
  @pytest.mark.asyncio
75
98
  @pytest.mark.tags(CONNECTOR_TYPE, SOURCE_TAG)
76
99
  async def test_s3_source_no_access(anon_connection_config: S3ConnectionConfig):
@@ -1,9 +1,43 @@
1
1
  import time
2
2
  from contextlib import contextmanager
3
- from typing import Optional
3
+ from typing import Optional, Union
4
4
 
5
5
  import docker
6
6
  from docker.models.containers import Container
7
+ from pydantic import BaseModel, Field, field_serializer
8
+
9
+
10
+ class HealthCheck(BaseModel):
11
+ test: Union[str, list[str]]
12
+ interval: int = Field(
13
+ gt=0, default=30, description="The time to wait between checks in seconds."
14
+ )
15
+ timeout: int = Field(
16
+ gt=0, default=30, description="The time to wait before considering the check to have hung."
17
+ )
18
+ retries: int = Field(
19
+ gt=0,
20
+ default=3,
21
+ description="The number of consecutive failures needed "
22
+ "to consider a container as unhealthy.",
23
+ )
24
+ start_period: int = Field(
25
+ gt=0,
26
+ default=0,
27
+ description="Start period for the container to initialize before starting health-retries countdown in seconds.", # noqa: E501
28
+ )
29
+
30
+ @field_serializer("interval")
31
+ def serialize_interval(self, interval: int) -> int:
32
+ return int(interval * 10e8)
33
+
34
+ @field_serializer("timeout")
35
+ def serialize_timeout(self, timeout: int) -> int:
36
+ return int(timeout * 10e8)
37
+
38
+ @field_serializer("start_period")
39
+ def serialize_start_period(self, start_period: int) -> int:
40
+ return int(start_period * 10e8)
7
41
 
8
42
 
9
43
  def get_container(
@@ -12,7 +46,7 @@ def get_container(
12
46
  ports: dict,
13
47
  environment: Optional[dict] = None,
14
48
  volumes: Optional[dict] = None,
15
- healthcheck: Optional[dict] = None,
49
+ healthcheck: Optional[HealthCheck] = None,
16
50
  ) -> Container:
17
51
  run_kwargs = {
18
52
  "image": image,
@@ -24,25 +58,49 @@ def get_container(
24
58
  if volumes:
25
59
  run_kwargs["volumes"] = volumes
26
60
  if healthcheck:
27
- run_kwargs["healthcheck"] = healthcheck
61
+ run_kwargs["healthcheck"] = healthcheck.model_dump()
28
62
  container: Container = docker_client.containers.run(**run_kwargs)
29
63
  return container
30
64
 
31
65
 
32
- def has_healthcheck(container: Container) -> bool:
33
- return container.attrs.get("Config", {}).get("Healthcheck", None) is not None
66
+ def get_healthcheck(container: Container) -> Optional[HealthCheck]:
67
+ healthcheck_config = container.attrs.get("Config", {}).get("Healthcheck", None)
68
+ if not healthcheck_config:
69
+ return None
70
+ healthcheck_data = {
71
+ "test": healthcheck_config["Test"],
72
+ }
73
+ if interval := healthcheck_config.get("Interval"):
74
+ healthcheck_data["interval"] = interval / 10e8
75
+ if start_period := healthcheck_config.get("StartPeriod"):
76
+ healthcheck_data["start_period"] = start_period / 10e8
77
+ if retries := healthcheck_config.get("Retries"):
78
+ healthcheck_data["retries"] = retries
79
+ return HealthCheck.model_validate(healthcheck_data)
34
80
 
35
81
 
36
- def healthcheck_wait(container: Container, timeout: int = 10) -> None:
82
+ def healthcheck_wait(
83
+ container: Container, retries: int = 30, interval: int = 1, start_period: Optional[int] = None
84
+ ) -> None:
85
+ if start_period:
86
+ time.sleep(start_period)
37
87
  health = container.health
38
- start = time.time()
39
- while health != "healthy" and time.time() - start < timeout:
40
- time.sleep(1)
88
+ tries = 0
89
+ while health != "healthy" and tries < retries:
90
+ tries += 1
91
+ logs = container.attrs.get("State", {}).get("Health", {}).get("Log")
92
+ latest_log = logs[-1] if logs else None
93
+ print(
94
+ f"attempt {tries} - waiting for docker container "
95
+ f"to be healthy: {health} latest log: {latest_log}"
96
+ )
97
+ time.sleep(interval)
41
98
  container.reload()
42
99
  health = container.health
43
100
  if health != "healthy":
44
- health_dict = container.attrs.get("State", {}).get("Health", {})
45
- raise TimeoutError(f"Docker container never came up healthy: {health_dict}")
101
+ logs = container.attrs.get("State", {}).get("Health", {}).get("Log")
102
+ latest_log = logs[-1] if logs else None
103
+ raise TimeoutError(f"Docker container never came up healthy: {latest_log}")
46
104
 
47
105
 
48
106
  @contextmanager
@@ -51,11 +109,13 @@ def container_context(
51
109
  ports: dict,
52
110
  environment: Optional[dict] = None,
53
111
  volumes: Optional[dict] = None,
54
- healthcheck: Optional[dict] = None,
55
- healthcheck_timeout: int = 10,
112
+ healthcheck: Optional[HealthCheck] = None,
113
+ healthcheck_retries: int = 30,
56
114
  docker_client: Optional[docker.DockerClient] = None,
57
115
  ):
58
116
  docker_client = docker_client or docker.from_env()
117
+ print(f"pulling image {image}")
118
+ docker_client.images.pull(image)
59
119
  container: Optional[Container] = None
60
120
  try:
61
121
  container = get_container(
@@ -66,8 +126,14 @@ def container_context(
66
126
  volumes=volumes,
67
127
  healthcheck=healthcheck,
68
128
  )
69
- if has_healthcheck(container):
70
- healthcheck_wait(container=container, timeout=healthcheck_timeout)
129
+ if healthcheck_data := get_healthcheck(container):
130
+ # Mirror whatever healthcheck config set on container
131
+ healthcheck_wait(
132
+ container=container,
133
+ retries=healthcheck_retries,
134
+ start_period=healthcheck_data.start_period,
135
+ interval=healthcheck_data.interval,
136
+ )
71
137
  yield container
72
138
  except AssertionError as e:
73
139
  if container:
@@ -240,6 +240,10 @@ def update_fixtures(
240
240
  # Rewrite the current file data
241
241
  if save_filedata:
242
242
  file_data_output_path = output_dir / "file_data"
243
+ print(
244
+ f"Writing {len(all_file_data)} file data to "
245
+ f"saved fixture location {file_data_output_path}"
246
+ )
243
247
  file_data_output_path.mkdir(parents=True, exist_ok=True)
244
248
  for file_data in all_file_data:
245
249
  file_data_path = file_data_output_path / f"{file_data.identifier}.json"
@@ -256,6 +260,10 @@ def update_fixtures(
256
260
  # If applicable, save raw downloads
257
261
  if save_downloads:
258
262
  raw_download_output_path = output_dir / "downloads"
263
+ print(
264
+ f"Writing {len(download_files)} downloaded files to "
265
+ f"saved fixture location {raw_download_output_path}"
266
+ )
259
267
  shutil.copytree(download_dir, raw_download_output_path)
260
268
 
261
269
 
@@ -328,6 +336,7 @@ async def source_connector_validation(
328
336
  postdownload_file_data = replace(resp["file_data"])
329
337
  all_postdownload_file_data.append(postdownload_file_data)
330
338
  if not overwrite_fixtures:
339
+ print("Running validation")
331
340
  run_all_validations(
332
341
  configs=configs,
333
342
  predownload_file_data=all_predownload_file_data,
@@ -336,6 +345,7 @@ async def source_connector_validation(
336
345
  test_output_dir=test_output_dir,
337
346
  )
338
347
  else:
348
+ print("Running fixtures update")
339
349
  update_fixtures(
340
350
  output_dir=test_output_dir,
341
351
  download_dir=download_dir,
File without changes
@@ -0,0 +1,15 @@
1
+ import json
2
+ from pathlib import Path
3
+
4
+ import pytest
5
+
6
+
7
+ @pytest.fixture
8
+ def collections_schema_config() -> dict:
9
+ int_test_dir = Path(__file__).parent
10
+ assets_dir = int_test_dir / "assets"
11
+ config_file = assets_dir / "elements.json"
12
+ assert config_file.exists()
13
+ assert config_file.is_file()
14
+ with config_file.open() as config_data:
15
+ return json.load(config_data)
@@ -0,0 +1,131 @@
1
+ import json
2
+ import time
3
+ from pathlib import Path
4
+
5
+ import pytest
6
+ import requests
7
+ import weaviate
8
+ from weaviate.client import WeaviateClient
9
+
10
+ from test.integration.connectors.utils.constants import DESTINATION_TAG
11
+ from test.integration.connectors.utils.docker import container_context
12
+ from unstructured_ingest.v2.interfaces import FileData, SourceIdentifiers
13
+ from unstructured_ingest.v2.processes.connectors.weaviate.local import (
14
+ CONNECTOR_TYPE,
15
+ LocalWeaviateConnectionConfig,
16
+ LocalWeaviateUploader,
17
+ LocalWeaviateUploaderConfig,
18
+ LocalWeaviateUploadStager,
19
+ )
20
+
21
+ COLLECTION_NAME = "elements"
22
+
23
+
24
+ def wait_for_container(timeout: int = 10, interval: int = 1) -> None:
25
+ start_time = time.time()
26
+ while time.time() - start_time < timeout:
27
+ try:
28
+ requests.get("http://localhost:8080/v1/.well-known/read")
29
+ return
30
+ except Exception as e:
31
+ print(f"Failed to validate container healthy, sleeping for {interval} seconds: {e}")
32
+ time.sleep(interval)
33
+ raise TimeoutError("Docker container never came up healthy")
34
+
35
+
36
+ @pytest.fixture
37
+ def collection(collections_schema_config: dict) -> str:
38
+ with container_context(
39
+ image="semitechnologies/weaviate:1.27.3",
40
+ ports={8080: 8080, 50051: 50051},
41
+ ):
42
+ wait_for_container()
43
+ with weaviate.connect_to_local() as weaviate_client:
44
+ weaviate_client.collections.create_from_dict(config=collections_schema_config)
45
+ yield COLLECTION_NAME
46
+
47
+
48
+ def get_count(client: WeaviateClient) -> int:
49
+ collection = client.collections.get(COLLECTION_NAME)
50
+ resp = collection.aggregate.over_all(total_count=True)
51
+ return resp.total_count
52
+
53
+
54
+ def validate_count(expected_count: int, retries: int = 10, interval: int = 1) -> None:
55
+ with weaviate.connect_to_local() as weaviate_client:
56
+ current_count = get_count(client=weaviate_client)
57
+ retry_count = 0
58
+ while current_count != expected_count and retry_count < retries:
59
+ retry_count += 1
60
+ time.sleep(interval)
61
+ current_count = get_count(client=weaviate_client)
62
+ assert current_count == expected_count, (
63
+ f"Expected count ({expected_count}) doesn't match how "
64
+ f"much came back from collection: {current_count}"
65
+ )
66
+
67
+
68
+ def run_uploader_and_validate(
69
+ uploader: LocalWeaviateUploader, path: Path, file_data: FileData, expected_count: int
70
+ ):
71
+ uploader.precheck()
72
+ uploader.run(path=path, file_data=file_data)
73
+ validate_count(expected_count=expected_count)
74
+
75
+
76
+ @pytest.mark.asyncio
77
+ @pytest.mark.tags(CONNECTOR_TYPE, DESTINATION_TAG)
78
+ def test_weaviate_local_destination(upload_file: Path, collection: str, tmp_path: Path):
79
+ file_data = FileData(
80
+ source_identifiers=SourceIdentifiers(fullpath=upload_file.name, filename=upload_file.name),
81
+ connector_type=CONNECTOR_TYPE,
82
+ identifier="mock file data",
83
+ )
84
+ stager = LocalWeaviateUploadStager()
85
+
86
+ staged_filepath = stager.run(
87
+ elements_filepath=upload_file,
88
+ file_data=file_data,
89
+ output_dir=tmp_path,
90
+ output_filename=upload_file.name,
91
+ )
92
+ dynamic_uploader = LocalWeaviateUploader(
93
+ upload_config=LocalWeaviateUploaderConfig(
94
+ collection=COLLECTION_NAME,
95
+ ),
96
+ connection_config=LocalWeaviateConnectionConfig(),
97
+ )
98
+ fixed_size_uploader = LocalWeaviateUploader(
99
+ upload_config=LocalWeaviateUploaderConfig(
100
+ collection=COLLECTION_NAME, batch_size=10, dynamic_batch=False
101
+ ),
102
+ connection_config=LocalWeaviateConnectionConfig(),
103
+ )
104
+ rate_limited_uploader = LocalWeaviateUploader(
105
+ upload_config=LocalWeaviateUploaderConfig(
106
+ collection=COLLECTION_NAME, requests_per_minute=50, dynamic_batch=False
107
+ ),
108
+ connection_config=LocalWeaviateConnectionConfig(),
109
+ )
110
+ with staged_filepath.open() as f:
111
+ staged_elements = json.load(f)
112
+ expected_count = len(staged_elements)
113
+
114
+ run_uploader_and_validate(
115
+ uploader=dynamic_uploader,
116
+ path=staged_filepath,
117
+ file_data=file_data,
118
+ expected_count=expected_count,
119
+ )
120
+ run_uploader_and_validate(
121
+ uploader=fixed_size_uploader,
122
+ path=staged_filepath,
123
+ file_data=file_data,
124
+ expected_count=expected_count,
125
+ )
126
+ run_uploader_and_validate(
127
+ uploader=rate_limited_uploader,
128
+ path=staged_filepath,
129
+ file_data=file_data,
130
+ expected_count=expected_count,
131
+ )
File without changes
File without changes
@@ -0,0 +1,49 @@
1
+ import random
2
+
3
+ import faker
4
+ import pytest
5
+
6
+ from unstructured_ingest.v2.processes.chunker import Chunker, ChunkerConfig
7
+
8
+ fake = faker.Faker()
9
+
10
+
11
+ def generate_chunker_config_params() -> dict:
12
+ params = {}
13
+ random_val = random.random()
14
+ if random_val < 0.5:
15
+ params["chunking_strategy"] = fake.word() if random.random() < 0.5 else None
16
+ params["chunk_combine_text_under_n_chars"] = (
17
+ fake.random_int() if random.random() < 0.5 else None
18
+ )
19
+ params["chunk_include_orig_elements"] = fake.boolean() if random.random() < 0.5 else None
20
+ params["chunk_max_characters"] = fake.random_int()
21
+ params["chunk_multipage_sections"] = fake.boolean()
22
+ params["chunk_new_after_n_chars"] = fake.random_int() if random.random() < 0.5 else None
23
+ params["chunk_overlap"] = fake.random_int() if random.random() < 0.5 else None
24
+ params["chunk_overlap_all"] = fake.boolean() if random.random() < 0.5 else None
25
+ if random_val < 0.5:
26
+ params["chunk_by_api"] = True
27
+ params["chunking_endpoint"] = fake.url()
28
+ params["chunk_api_key"] = fake.password()
29
+ else:
30
+ params["chunk_by_api"] = False
31
+
32
+ return params
33
+
34
+
35
+ @pytest.mark.parametrize(
36
+ "partition_config_params", [generate_chunker_config_params() for i in range(10)]
37
+ )
38
+ def test_chunker_config(partition_config_params: dict):
39
+ chunker_config = ChunkerConfig.model_validate(partition_config_params)
40
+ assert chunker_config
41
+
42
+
43
+ @pytest.mark.parametrize(
44
+ "partition_config_params", [generate_chunker_config_params() for i in range(10)]
45
+ )
46
+ def test_chunker(partition_config_params: dict):
47
+ chunker_config = ChunkerConfig.model_validate(partition_config_params)
48
+ chunker = Chunker(config=chunker_config)
49
+ assert chunker
File without changes
File without changes
@@ -0,0 +1,36 @@
1
+ import random
2
+
3
+ import faker
4
+ import pytest
5
+
6
+ from unstructured_ingest.embed.bedrock import BedrockEmbeddingConfig, BedrockEmbeddingEncoder
7
+
8
+ fake = faker.Faker()
9
+
10
+
11
+ def generate_embedder_config_params() -> dict:
12
+ params = {
13
+ "aws_access_key_id": fake.password(),
14
+ "aws_secret_access_key": fake.password(),
15
+ "region_name": fake.city(),
16
+ }
17
+ if random.random() < 0.5:
18
+ params["embed_model_name"] = fake.word()
19
+ return params
20
+
21
+
22
+ @pytest.mark.parametrize(
23
+ "embedder_config_params", [generate_embedder_config_params() for i in range(10)]
24
+ )
25
+ def test_embedder_config(embedder_config_params: dict):
26
+ embedder_config = BedrockEmbeddingConfig.model_validate(embedder_config_params)
27
+ assert embedder_config
28
+
29
+
30
+ @pytest.mark.parametrize(
31
+ "embedder_config_params", [generate_embedder_config_params() for i in range(10)]
32
+ )
33
+ def test_embedder(embedder_config_params: dict):
34
+ embedder_config = BedrockEmbeddingConfig.model_validate(embedder_config_params)
35
+ embedder = BedrockEmbeddingEncoder(config=embedder_config)
36
+ assert embedder