unstructured-ingest 1.2.32__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of unstructured-ingest might be problematic. Click here for more details.
- unstructured_ingest/__init__.py +1 -0
- unstructured_ingest/__version__.py +1 -0
- unstructured_ingest/cli/README.md +28 -0
- unstructured_ingest/cli/__init__.py +0 -0
- unstructured_ingest/cli/base/__init__.py +4 -0
- unstructured_ingest/cli/base/cmd.py +269 -0
- unstructured_ingest/cli/base/dest.py +84 -0
- unstructured_ingest/cli/base/importer.py +34 -0
- unstructured_ingest/cli/base/src.py +75 -0
- unstructured_ingest/cli/cli.py +24 -0
- unstructured_ingest/cli/cmds.py +14 -0
- unstructured_ingest/cli/utils/__init__.py +0 -0
- unstructured_ingest/cli/utils/click.py +237 -0
- unstructured_ingest/cli/utils/model_conversion.py +222 -0
- unstructured_ingest/data_types/__init__.py +0 -0
- unstructured_ingest/data_types/entities.py +17 -0
- unstructured_ingest/data_types/file_data.py +116 -0
- unstructured_ingest/embed/__init__.py +0 -0
- unstructured_ingest/embed/azure_openai.py +63 -0
- unstructured_ingest/embed/bedrock.py +323 -0
- unstructured_ingest/embed/huggingface.py +69 -0
- unstructured_ingest/embed/interfaces.py +146 -0
- unstructured_ingest/embed/mixedbreadai.py +134 -0
- unstructured_ingest/embed/octoai.py +133 -0
- unstructured_ingest/embed/openai.py +142 -0
- unstructured_ingest/embed/togetherai.py +116 -0
- unstructured_ingest/embed/vertexai.py +109 -0
- unstructured_ingest/embed/voyageai.py +130 -0
- unstructured_ingest/error.py +156 -0
- unstructured_ingest/errors_v2.py +156 -0
- unstructured_ingest/interfaces/__init__.py +27 -0
- unstructured_ingest/interfaces/connector.py +56 -0
- unstructured_ingest/interfaces/downloader.py +90 -0
- unstructured_ingest/interfaces/indexer.py +29 -0
- unstructured_ingest/interfaces/process.py +22 -0
- unstructured_ingest/interfaces/processor.py +88 -0
- unstructured_ingest/interfaces/upload_stager.py +89 -0
- unstructured_ingest/interfaces/uploader.py +67 -0
- unstructured_ingest/logger.py +39 -0
- unstructured_ingest/main.py +11 -0
- unstructured_ingest/otel.py +128 -0
- unstructured_ingest/pipeline/__init__.py +0 -0
- unstructured_ingest/pipeline/interfaces.py +211 -0
- unstructured_ingest/pipeline/otel.py +32 -0
- unstructured_ingest/pipeline/pipeline.py +408 -0
- unstructured_ingest/pipeline/steps/__init__.py +0 -0
- unstructured_ingest/pipeline/steps/chunk.py +78 -0
- unstructured_ingest/pipeline/steps/download.py +206 -0
- unstructured_ingest/pipeline/steps/embed.py +77 -0
- unstructured_ingest/pipeline/steps/filter.py +35 -0
- unstructured_ingest/pipeline/steps/index.py +86 -0
- unstructured_ingest/pipeline/steps/partition.py +77 -0
- unstructured_ingest/pipeline/steps/stage.py +65 -0
- unstructured_ingest/pipeline/steps/uncompress.py +50 -0
- unstructured_ingest/pipeline/steps/upload.py +58 -0
- unstructured_ingest/processes/__init__.py +18 -0
- unstructured_ingest/processes/chunker.py +131 -0
- unstructured_ingest/processes/connector_registry.py +69 -0
- unstructured_ingest/processes/connectors/__init__.py +129 -0
- unstructured_ingest/processes/connectors/airtable.py +238 -0
- unstructured_ingest/processes/connectors/assets/__init__.py +0 -0
- unstructured_ingest/processes/connectors/assets/databricks_delta_table_schema.sql +9 -0
- unstructured_ingest/processes/connectors/assets/weaviate_collection_config.json +23 -0
- unstructured_ingest/processes/connectors/astradb.py +592 -0
- unstructured_ingest/processes/connectors/azure_ai_search.py +275 -0
- unstructured_ingest/processes/connectors/chroma.py +193 -0
- unstructured_ingest/processes/connectors/confluence.py +527 -0
- unstructured_ingest/processes/connectors/couchbase.py +336 -0
- unstructured_ingest/processes/connectors/databricks/__init__.py +58 -0
- unstructured_ingest/processes/connectors/databricks/volumes.py +233 -0
- unstructured_ingest/processes/connectors/databricks/volumes_aws.py +93 -0
- unstructured_ingest/processes/connectors/databricks/volumes_azure.py +108 -0
- unstructured_ingest/processes/connectors/databricks/volumes_gcp.py +91 -0
- unstructured_ingest/processes/connectors/databricks/volumes_native.py +92 -0
- unstructured_ingest/processes/connectors/databricks/volumes_table.py +187 -0
- unstructured_ingest/processes/connectors/delta_table.py +310 -0
- unstructured_ingest/processes/connectors/discord.py +161 -0
- unstructured_ingest/processes/connectors/duckdb/__init__.py +15 -0
- unstructured_ingest/processes/connectors/duckdb/base.py +103 -0
- unstructured_ingest/processes/connectors/duckdb/duckdb.py +130 -0
- unstructured_ingest/processes/connectors/duckdb/motherduck.py +130 -0
- unstructured_ingest/processes/connectors/elasticsearch/__init__.py +19 -0
- unstructured_ingest/processes/connectors/elasticsearch/elasticsearch.py +478 -0
- unstructured_ingest/processes/connectors/elasticsearch/opensearch.py +523 -0
- unstructured_ingest/processes/connectors/fsspec/__init__.py +37 -0
- unstructured_ingest/processes/connectors/fsspec/azure.py +203 -0
- unstructured_ingest/processes/connectors/fsspec/box.py +176 -0
- unstructured_ingest/processes/connectors/fsspec/dropbox.py +238 -0
- unstructured_ingest/processes/connectors/fsspec/fsspec.py +475 -0
- unstructured_ingest/processes/connectors/fsspec/gcs.py +203 -0
- unstructured_ingest/processes/connectors/fsspec/s3.py +253 -0
- unstructured_ingest/processes/connectors/fsspec/sftp.py +177 -0
- unstructured_ingest/processes/connectors/fsspec/utils.py +17 -0
- unstructured_ingest/processes/connectors/github.py +226 -0
- unstructured_ingest/processes/connectors/gitlab.py +270 -0
- unstructured_ingest/processes/connectors/google_drive.py +848 -0
- unstructured_ingest/processes/connectors/ibm_watsonx/__init__.py +10 -0
- unstructured_ingest/processes/connectors/ibm_watsonx/ibm_watsonx_s3.py +367 -0
- unstructured_ingest/processes/connectors/jira.py +522 -0
- unstructured_ingest/processes/connectors/kafka/__init__.py +17 -0
- unstructured_ingest/processes/connectors/kafka/cloud.py +121 -0
- unstructured_ingest/processes/connectors/kafka/kafka.py +275 -0
- unstructured_ingest/processes/connectors/kafka/local.py +103 -0
- unstructured_ingest/processes/connectors/kdbai.py +156 -0
- unstructured_ingest/processes/connectors/lancedb/__init__.py +30 -0
- unstructured_ingest/processes/connectors/lancedb/aws.py +43 -0
- unstructured_ingest/processes/connectors/lancedb/azure.py +43 -0
- unstructured_ingest/processes/connectors/lancedb/cloud.py +42 -0
- unstructured_ingest/processes/connectors/lancedb/gcp.py +44 -0
- unstructured_ingest/processes/connectors/lancedb/lancedb.py +181 -0
- unstructured_ingest/processes/connectors/lancedb/local.py +44 -0
- unstructured_ingest/processes/connectors/local.py +227 -0
- unstructured_ingest/processes/connectors/milvus.py +311 -0
- unstructured_ingest/processes/connectors/mongodb.py +389 -0
- unstructured_ingest/processes/connectors/neo4j.py +534 -0
- unstructured_ingest/processes/connectors/notion/__init__.py +0 -0
- unstructured_ingest/processes/connectors/notion/client.py +349 -0
- unstructured_ingest/processes/connectors/notion/connector.py +350 -0
- unstructured_ingest/processes/connectors/notion/helpers.py +448 -0
- unstructured_ingest/processes/connectors/notion/ingest_backoff/__init__.py +3 -0
- unstructured_ingest/processes/connectors/notion/ingest_backoff/_common.py +102 -0
- unstructured_ingest/processes/connectors/notion/ingest_backoff/_wrapper.py +126 -0
- unstructured_ingest/processes/connectors/notion/ingest_backoff/types.py +24 -0
- unstructured_ingest/processes/connectors/notion/interfaces.py +32 -0
- unstructured_ingest/processes/connectors/notion/types/__init__.py +0 -0
- unstructured_ingest/processes/connectors/notion/types/block.py +96 -0
- unstructured_ingest/processes/connectors/notion/types/blocks/__init__.py +63 -0
- unstructured_ingest/processes/connectors/notion/types/blocks/bookmark.py +40 -0
- unstructured_ingest/processes/connectors/notion/types/blocks/breadcrumb.py +21 -0
- unstructured_ingest/processes/connectors/notion/types/blocks/bulleted_list_item.py +31 -0
- unstructured_ingest/processes/connectors/notion/types/blocks/callout.py +131 -0
- unstructured_ingest/processes/connectors/notion/types/blocks/child_database.py +23 -0
- unstructured_ingest/processes/connectors/notion/types/blocks/child_page.py +23 -0
- unstructured_ingest/processes/connectors/notion/types/blocks/code.py +43 -0
- unstructured_ingest/processes/connectors/notion/types/blocks/column_list.py +35 -0
- unstructured_ingest/processes/connectors/notion/types/blocks/divider.py +22 -0
- unstructured_ingest/processes/connectors/notion/types/blocks/embed.py +36 -0
- unstructured_ingest/processes/connectors/notion/types/blocks/equation.py +23 -0
- unstructured_ingest/processes/connectors/notion/types/blocks/file.py +49 -0
- unstructured_ingest/processes/connectors/notion/types/blocks/heading.py +37 -0
- unstructured_ingest/processes/connectors/notion/types/blocks/image.py +21 -0
- unstructured_ingest/processes/connectors/notion/types/blocks/link_preview.py +24 -0
- unstructured_ingest/processes/connectors/notion/types/blocks/link_to_page.py +29 -0
- unstructured_ingest/processes/connectors/notion/types/blocks/numbered_list.py +29 -0
- unstructured_ingest/processes/connectors/notion/types/blocks/paragraph.py +31 -0
- unstructured_ingest/processes/connectors/notion/types/blocks/pdf.py +49 -0
- unstructured_ingest/processes/connectors/notion/types/blocks/quote.py +37 -0
- unstructured_ingest/processes/connectors/notion/types/blocks/synced_block.py +109 -0
- unstructured_ingest/processes/connectors/notion/types/blocks/table.py +60 -0
- unstructured_ingest/processes/connectors/notion/types/blocks/table_of_contents.py +23 -0
- unstructured_ingest/processes/connectors/notion/types/blocks/template.py +30 -0
- unstructured_ingest/processes/connectors/notion/types/blocks/todo.py +42 -0
- unstructured_ingest/processes/connectors/notion/types/blocks/toggle.py +37 -0
- unstructured_ingest/processes/connectors/notion/types/blocks/unsupported.py +20 -0
- unstructured_ingest/processes/connectors/notion/types/blocks/video.py +22 -0
- unstructured_ingest/processes/connectors/notion/types/database.py +73 -0
- unstructured_ingest/processes/connectors/notion/types/database_properties/__init__.py +125 -0
- unstructured_ingest/processes/connectors/notion/types/database_properties/checkbox.py +39 -0
- unstructured_ingest/processes/connectors/notion/types/database_properties/created_by.py +36 -0
- unstructured_ingest/processes/connectors/notion/types/database_properties/created_time.py +35 -0
- unstructured_ingest/processes/connectors/notion/types/database_properties/date.py +42 -0
- unstructured_ingest/processes/connectors/notion/types/database_properties/email.py +37 -0
- unstructured_ingest/processes/connectors/notion/types/database_properties/files.py +38 -0
- unstructured_ingest/processes/connectors/notion/types/database_properties/formula.py +50 -0
- unstructured_ingest/processes/connectors/notion/types/database_properties/last_edited_by.py +34 -0
- unstructured_ingest/processes/connectors/notion/types/database_properties/last_edited_time.py +35 -0
- unstructured_ingest/processes/connectors/notion/types/database_properties/multiselect.py +74 -0
- unstructured_ingest/processes/connectors/notion/types/database_properties/number.py +50 -0
- unstructured_ingest/processes/connectors/notion/types/database_properties/people.py +42 -0
- unstructured_ingest/processes/connectors/notion/types/database_properties/phone_number.py +37 -0
- unstructured_ingest/processes/connectors/notion/types/database_properties/relation.py +68 -0
- unstructured_ingest/processes/connectors/notion/types/database_properties/rich_text.py +44 -0
- unstructured_ingest/processes/connectors/notion/types/database_properties/rollup.py +57 -0
- unstructured_ingest/processes/connectors/notion/types/database_properties/select.py +70 -0
- unstructured_ingest/processes/connectors/notion/types/database_properties/status.py +82 -0
- unstructured_ingest/processes/connectors/notion/types/database_properties/title.py +38 -0
- unstructured_ingest/processes/connectors/notion/types/database_properties/unique_id.py +51 -0
- unstructured_ingest/processes/connectors/notion/types/database_properties/url.py +38 -0
- unstructured_ingest/processes/connectors/notion/types/database_properties/verification.py +79 -0
- unstructured_ingest/processes/connectors/notion/types/date.py +29 -0
- unstructured_ingest/processes/connectors/notion/types/file.py +54 -0
- unstructured_ingest/processes/connectors/notion/types/page.py +52 -0
- unstructured_ingest/processes/connectors/notion/types/parent.py +66 -0
- unstructured_ingest/processes/connectors/notion/types/rich_text.py +189 -0
- unstructured_ingest/processes/connectors/notion/types/user.py +83 -0
- unstructured_ingest/processes/connectors/onedrive.py +485 -0
- unstructured_ingest/processes/connectors/outlook.py +242 -0
- unstructured_ingest/processes/connectors/pinecone.py +400 -0
- unstructured_ingest/processes/connectors/qdrant/__init__.py +16 -0
- unstructured_ingest/processes/connectors/qdrant/cloud.py +59 -0
- unstructured_ingest/processes/connectors/qdrant/local.py +58 -0
- unstructured_ingest/processes/connectors/qdrant/qdrant.py +163 -0
- unstructured_ingest/processes/connectors/qdrant/server.py +60 -0
- unstructured_ingest/processes/connectors/redisdb.py +214 -0
- unstructured_ingest/processes/connectors/salesforce.py +307 -0
- unstructured_ingest/processes/connectors/sharepoint.py +282 -0
- unstructured_ingest/processes/connectors/slack.py +249 -0
- unstructured_ingest/processes/connectors/sql/__init__.py +41 -0
- unstructured_ingest/processes/connectors/sql/databricks_delta_tables.py +228 -0
- unstructured_ingest/processes/connectors/sql/postgres.py +168 -0
- unstructured_ingest/processes/connectors/sql/singlestore.py +176 -0
- unstructured_ingest/processes/connectors/sql/snowflake.py +298 -0
- unstructured_ingest/processes/connectors/sql/sql.py +456 -0
- unstructured_ingest/processes/connectors/sql/sqlite.py +179 -0
- unstructured_ingest/processes/connectors/sql/teradata.py +254 -0
- unstructured_ingest/processes/connectors/sql/vastdb.py +263 -0
- unstructured_ingest/processes/connectors/utils.py +60 -0
- unstructured_ingest/processes/connectors/vectara.py +348 -0
- unstructured_ingest/processes/connectors/weaviate/__init__.py +22 -0
- unstructured_ingest/processes/connectors/weaviate/cloud.py +166 -0
- unstructured_ingest/processes/connectors/weaviate/embedded.py +90 -0
- unstructured_ingest/processes/connectors/weaviate/local.py +73 -0
- unstructured_ingest/processes/connectors/weaviate/weaviate.py +337 -0
- unstructured_ingest/processes/connectors/zendesk/__init__.py +0 -0
- unstructured_ingest/processes/connectors/zendesk/client.py +314 -0
- unstructured_ingest/processes/connectors/zendesk/zendesk.py +241 -0
- unstructured_ingest/processes/embedder.py +203 -0
- unstructured_ingest/processes/filter.py +60 -0
- unstructured_ingest/processes/partitioner.py +233 -0
- unstructured_ingest/processes/uncompress.py +61 -0
- unstructured_ingest/processes/utils/__init__.py +8 -0
- unstructured_ingest/processes/utils/blob_storage.py +32 -0
- unstructured_ingest/processes/utils/logging/connector.py +365 -0
- unstructured_ingest/processes/utils/logging/sanitizer.py +117 -0
- unstructured_ingest/unstructured_api.py +140 -0
- unstructured_ingest/utils/__init__.py +5 -0
- unstructured_ingest/utils/chunking.py +56 -0
- unstructured_ingest/utils/compression.py +72 -0
- unstructured_ingest/utils/constants.py +2 -0
- unstructured_ingest/utils/data_prep.py +216 -0
- unstructured_ingest/utils/dep_check.py +78 -0
- unstructured_ingest/utils/filesystem.py +27 -0
- unstructured_ingest/utils/html.py +174 -0
- unstructured_ingest/utils/ndjson.py +52 -0
- unstructured_ingest/utils/pydantic_models.py +52 -0
- unstructured_ingest/utils/string_and_date_utils.py +74 -0
- unstructured_ingest/utils/table.py +80 -0
- unstructured_ingest/utils/tls.py +15 -0
- unstructured_ingest-1.2.32.dist-info/METADATA +235 -0
- unstructured_ingest-1.2.32.dist-info/RECORD +243 -0
- unstructured_ingest-1.2.32.dist-info/WHEEL +4 -0
- unstructured_ingest-1.2.32.dist-info/entry_points.txt +2 -0
- unstructured_ingest-1.2.32.dist-info/licenses/LICENSE.md +201 -0
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import hashlib
|
|
3
|
+
from dataclasses import dataclass
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from typing import Callable, Optional, TypedDict
|
|
6
|
+
|
|
7
|
+
from unstructured_ingest.data_types.file_data import FileData, file_data_from_file
|
|
8
|
+
from unstructured_ingest.logger import logger
|
|
9
|
+
from unstructured_ingest.pipeline.interfaces import PipelineStep
|
|
10
|
+
from unstructured_ingest.processes.embedder import Embedder
|
|
11
|
+
from unstructured_ingest.utils.data_prep import write_data
|
|
12
|
+
from unstructured_ingest.utils.pydantic_models import serialize_base_model_json
|
|
13
|
+
|
|
14
|
+
STEP_ID = "embed"
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class EmbedStepResponse(TypedDict):
|
|
18
|
+
file_data_path: str
|
|
19
|
+
path: str
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
@dataclass
|
|
23
|
+
class EmbedStep(PipelineStep):
|
|
24
|
+
process: Embedder
|
|
25
|
+
identifier: str = STEP_ID
|
|
26
|
+
|
|
27
|
+
def __str__(self):
|
|
28
|
+
return f"{self.identifier} ({self.process.config.embedding_provider})"
|
|
29
|
+
|
|
30
|
+
def __post_init__(self):
|
|
31
|
+
config = self.process.config.model_dump_json() if self.process.config else None
|
|
32
|
+
logger.info(f"created {self.identifier} with configs: {config}")
|
|
33
|
+
|
|
34
|
+
def should_embed(self, filepath: Path, file_data: FileData) -> bool:
|
|
35
|
+
if self.context.reprocess or file_data.reprocess:
|
|
36
|
+
return True
|
|
37
|
+
return not filepath.exists()
|
|
38
|
+
|
|
39
|
+
def get_output_filepath(self, filename: Path) -> Path:
|
|
40
|
+
hashed_output_file = f"{self.get_hash(extras=[filename.name])}.ndjson"
|
|
41
|
+
filepath = (self.cache_dir / hashed_output_file).resolve()
|
|
42
|
+
filepath.parent.mkdir(parents=True, exist_ok=True)
|
|
43
|
+
return filepath
|
|
44
|
+
|
|
45
|
+
def _save_output(self, output_filepath: str, embedded_content: list[dict]):
|
|
46
|
+
logger.debug(f"writing embedded output to: {output_filepath}")
|
|
47
|
+
write_data(path=Path(output_filepath), data=embedded_content)
|
|
48
|
+
|
|
49
|
+
async def _run_async(self, fn: Callable, path: str, file_data_path: str) -> EmbedStepResponse:
|
|
50
|
+
path = Path(path)
|
|
51
|
+
file_data = file_data_from_file(path=file_data_path)
|
|
52
|
+
output_filepath = self.get_output_filepath(filename=path)
|
|
53
|
+
if not self.should_embed(filepath=output_filepath, file_data=file_data):
|
|
54
|
+
logger.debug(f"skipping embedding, output already exists: {output_filepath}")
|
|
55
|
+
return EmbedStepResponse(file_data_path=file_data_path, path=str(output_filepath))
|
|
56
|
+
fn_kwargs = {"elements_filepath": path}
|
|
57
|
+
if not asyncio.iscoroutinefunction(fn):
|
|
58
|
+
embed_content_raw = fn(**fn_kwargs)
|
|
59
|
+
elif semaphore := self.context.semaphore:
|
|
60
|
+
async with semaphore:
|
|
61
|
+
embed_content_raw = await fn(**fn_kwargs)
|
|
62
|
+
else:
|
|
63
|
+
embed_content_raw = await fn(**fn_kwargs)
|
|
64
|
+
|
|
65
|
+
self._save_output(
|
|
66
|
+
output_filepath=str(output_filepath),
|
|
67
|
+
embedded_content=embed_content_raw,
|
|
68
|
+
)
|
|
69
|
+
return EmbedStepResponse(file_data_path=file_data_path, path=str(output_filepath))
|
|
70
|
+
|
|
71
|
+
def get_hash(self, extras: Optional[list[str]]) -> str:
|
|
72
|
+
hashable_string = serialize_base_model_json(
|
|
73
|
+
model=self.process.config, sort_keys=True, ensure_ascii=True
|
|
74
|
+
)
|
|
75
|
+
if extras:
|
|
76
|
+
hashable_string += "".join(extras)
|
|
77
|
+
return hashlib.sha256(hashable_string.encode()).hexdigest()[:12]
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
from dataclasses import dataclass
|
|
3
|
+
from typing import Callable, Optional
|
|
4
|
+
|
|
5
|
+
from unstructured_ingest.data_types.file_data import file_data_from_file
|
|
6
|
+
from unstructured_ingest.logger import logger
|
|
7
|
+
from unstructured_ingest.pipeline.interfaces import PipelineStep
|
|
8
|
+
from unstructured_ingest.processes.filter import Filterer
|
|
9
|
+
|
|
10
|
+
STEP_ID = "filter"
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
@dataclass
|
|
14
|
+
class FilterStep(PipelineStep):
|
|
15
|
+
process: Filterer
|
|
16
|
+
identifier: str = STEP_ID
|
|
17
|
+
|
|
18
|
+
def __post_init__(self):
|
|
19
|
+
config = self.process.config.model_dump_json() if self.process.config else None
|
|
20
|
+
logger.info(f"created {self.identifier} with configs: {config}")
|
|
21
|
+
|
|
22
|
+
async def _run_async(self, fn: Callable, file_data_path: str, **kwargs) -> Optional[dict]:
|
|
23
|
+
file_data = file_data_from_file(path=file_data_path)
|
|
24
|
+
fn_kwargs = {"file_data": file_data}
|
|
25
|
+
if not asyncio.iscoroutinefunction(fn):
|
|
26
|
+
resp = fn(**fn_kwargs)
|
|
27
|
+
elif semaphore := self.context.semaphore:
|
|
28
|
+
async with semaphore:
|
|
29
|
+
resp = await fn(**fn_kwargs)
|
|
30
|
+
else:
|
|
31
|
+
resp = await fn(**fn_kwargs)
|
|
32
|
+
|
|
33
|
+
if resp:
|
|
34
|
+
return {"file_data_path": file_data_path}
|
|
35
|
+
return None
|
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
import hashlib
|
|
2
|
+
import json
|
|
3
|
+
from dataclasses import dataclass
|
|
4
|
+
from typing import AsyncGenerator, Generator, Optional, TypeVar
|
|
5
|
+
|
|
6
|
+
from unstructured_ingest.interfaces.indexer import Indexer
|
|
7
|
+
from unstructured_ingest.logger import logger
|
|
8
|
+
from unstructured_ingest.pipeline.interfaces import PipelineStep
|
|
9
|
+
from unstructured_ingest.pipeline.otel import instrument
|
|
10
|
+
from unstructured_ingest.utils.pydantic_models import serialize_base_model_json
|
|
11
|
+
|
|
12
|
+
IndexerT = TypeVar("IndexerT", bound=Indexer)
|
|
13
|
+
|
|
14
|
+
STEP_ID = "indexer"
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
@dataclass
|
|
18
|
+
class IndexStep(PipelineStep):
|
|
19
|
+
process: IndexerT
|
|
20
|
+
identifier: str = STEP_ID
|
|
21
|
+
|
|
22
|
+
def __str__(self):
|
|
23
|
+
return f"{self.identifier} ({self.process.__class__.__name__})"
|
|
24
|
+
|
|
25
|
+
def __post_init__(self):
|
|
26
|
+
config = self.process.index_config.model_dump_json() if self.process.index_config else None
|
|
27
|
+
connection_config = (
|
|
28
|
+
self.process.connection_config.model_dump_json()
|
|
29
|
+
if self.process.connection_config
|
|
30
|
+
else None
|
|
31
|
+
)
|
|
32
|
+
logger.info(
|
|
33
|
+
f"created {self.identifier} with configs: {config}, "
|
|
34
|
+
f"connection configs: {connection_config}"
|
|
35
|
+
)
|
|
36
|
+
|
|
37
|
+
@instrument(span_name=STEP_ID)
|
|
38
|
+
def run(self) -> Generator[str, None, None]:
|
|
39
|
+
for file_data in self.process.run():
|
|
40
|
+
logger.debug(f"generated file data: {file_data.model_dump_json()}")
|
|
41
|
+
try:
|
|
42
|
+
record_hash = self.get_hash(extras=[file_data.identifier])
|
|
43
|
+
filename = f"{record_hash}.json"
|
|
44
|
+
filepath = (self.cache_dir / filename).resolve()
|
|
45
|
+
filepath.parent.mkdir(parents=True, exist_ok=True)
|
|
46
|
+
with open(str(filepath), "w") as f:
|
|
47
|
+
f.write(file_data.model_dump_json(indent=2))
|
|
48
|
+
yield str(filepath)
|
|
49
|
+
except Exception as e:
|
|
50
|
+
logger.error(f"failed to create index for file data: {file_data}", exc_info=True)
|
|
51
|
+
if self.context.raise_on_error:
|
|
52
|
+
raise e
|
|
53
|
+
continue
|
|
54
|
+
|
|
55
|
+
async def run_async(self) -> AsyncGenerator[str, None]:
|
|
56
|
+
async for file_data in self.process.run_async():
|
|
57
|
+
logger.debug(f"generated file data: {file_data.model_dump_json()}")
|
|
58
|
+
try:
|
|
59
|
+
record_hash = self.get_hash(extras=[file_data.identifier])
|
|
60
|
+
filename = f"{record_hash}.json"
|
|
61
|
+
filepath = (self.cache_dir / filename).resolve()
|
|
62
|
+
filepath.parent.mkdir(parents=True, exist_ok=True)
|
|
63
|
+
with open(str(filepath), "w") as f:
|
|
64
|
+
f.write(file_data.model_dump_json(indent=2))
|
|
65
|
+
yield str(filepath)
|
|
66
|
+
except Exception as e:
|
|
67
|
+
logger.error(f"failed to create index for file data: {file_data}", exc_info=True)
|
|
68
|
+
if self.context.raise_on_error:
|
|
69
|
+
raise e
|
|
70
|
+
continue
|
|
71
|
+
|
|
72
|
+
def get_hash(self, extras: Optional[list[str]]) -> str:
|
|
73
|
+
index_config_dict = json.loads(
|
|
74
|
+
serialize_base_model_json(model=self.process.index_config, sort_keys=True)
|
|
75
|
+
)
|
|
76
|
+
connection_config_dict = json.loads(
|
|
77
|
+
serialize_base_model_json(model=self.process.connection_config, sort_keys=True)
|
|
78
|
+
)
|
|
79
|
+
hashable_dict = {
|
|
80
|
+
"index_config": index_config_dict,
|
|
81
|
+
"connection_config": connection_config_dict,
|
|
82
|
+
}
|
|
83
|
+
hashable_string = json.dumps(hashable_dict, sort_keys=True)
|
|
84
|
+
if extras:
|
|
85
|
+
hashable_string += "".join(extras)
|
|
86
|
+
return hashlib.sha256(hashable_string.encode()).hexdigest()[:12]
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import hashlib
|
|
3
|
+
from dataclasses import dataclass
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from typing import Callable, Optional, TypedDict
|
|
6
|
+
|
|
7
|
+
from unstructured_ingest.data_types.file_data import FileData, file_data_from_file
|
|
8
|
+
from unstructured_ingest.logger import logger
|
|
9
|
+
from unstructured_ingest.pipeline.interfaces import PipelineStep
|
|
10
|
+
from unstructured_ingest.processes.partitioner import Partitioner
|
|
11
|
+
from unstructured_ingest.utils.data_prep import write_data
|
|
12
|
+
from unstructured_ingest.utils.pydantic_models import serialize_base_model_json
|
|
13
|
+
|
|
14
|
+
STEP_ID = "partition"
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class PartitionStepResponse(TypedDict):
|
|
18
|
+
file_data_path: str
|
|
19
|
+
path: str
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
@dataclass
|
|
23
|
+
class PartitionStep(PipelineStep):
|
|
24
|
+
process: Partitioner
|
|
25
|
+
identifier: str = STEP_ID
|
|
26
|
+
|
|
27
|
+
def __str__(self):
|
|
28
|
+
return f"{self.identifier} ({self.process.config.strategy})"
|
|
29
|
+
|
|
30
|
+
def __post_init__(self):
|
|
31
|
+
config = self.process.config.model_dump_json()
|
|
32
|
+
logger.info(f"created {self.identifier} with configs: {config}")
|
|
33
|
+
|
|
34
|
+
def should_partition(self, filepath: Path, file_data: FileData) -> bool:
|
|
35
|
+
if self.context.reprocess or file_data.reprocess:
|
|
36
|
+
return True
|
|
37
|
+
return not filepath.exists()
|
|
38
|
+
|
|
39
|
+
def get_output_filepath(self, filename: Path) -> Path:
|
|
40
|
+
hashed_output_file = f"{self.get_hash(extras=[filename.name])}.ndjson"
|
|
41
|
+
filepath = (self.cache_dir / hashed_output_file).resolve()
|
|
42
|
+
filepath.parent.mkdir(parents=True, exist_ok=True)
|
|
43
|
+
return filepath
|
|
44
|
+
|
|
45
|
+
def _save_output(self, output_filepath: str, partitioned_content: list[dict]):
|
|
46
|
+
logger.debug(f"writing partitioned output to: {output_filepath}")
|
|
47
|
+
write_data(path=Path(output_filepath), data=partitioned_content)
|
|
48
|
+
|
|
49
|
+
async def _run_async(
|
|
50
|
+
self, fn: Callable, path: str, file_data_path: str
|
|
51
|
+
) -> Optional[PartitionStepResponse]:
|
|
52
|
+
path = Path(path)
|
|
53
|
+
file_data = file_data_from_file(path=file_data_path)
|
|
54
|
+
output_filepath = self.get_output_filepath(filename=Path(file_data_path))
|
|
55
|
+
if not self.should_partition(filepath=output_filepath, file_data=file_data):
|
|
56
|
+
logger.debug(f"skipping partitioning, output already exists: {output_filepath}")
|
|
57
|
+
return PartitionStepResponse(file_data_path=file_data_path, path=str(output_filepath))
|
|
58
|
+
fn_kwargs = {"filename": path, "metadata": file_data.metadata.model_dump()}
|
|
59
|
+
if not asyncio.iscoroutinefunction(fn):
|
|
60
|
+
partitioned_content = fn(**fn_kwargs)
|
|
61
|
+
elif semaphore := self.context.semaphore:
|
|
62
|
+
async with semaphore:
|
|
63
|
+
partitioned_content = await fn(**fn_kwargs)
|
|
64
|
+
else:
|
|
65
|
+
partitioned_content = await fn(**fn_kwargs)
|
|
66
|
+
self._save_output(
|
|
67
|
+
output_filepath=str(output_filepath), partitioned_content=partitioned_content
|
|
68
|
+
)
|
|
69
|
+
return PartitionStepResponse(file_data_path=file_data_path, path=str(output_filepath))
|
|
70
|
+
|
|
71
|
+
def get_hash(self, extras: Optional[list[str]]) -> str:
|
|
72
|
+
hashable_string = serialize_base_model_json(
|
|
73
|
+
model=self.process.config, sort_keys=True, ensure_ascii=True
|
|
74
|
+
)
|
|
75
|
+
if extras:
|
|
76
|
+
hashable_string += "".join(extras)
|
|
77
|
+
return hashlib.sha256(hashable_string.encode()).hexdigest()[:12]
|
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import hashlib
|
|
3
|
+
from dataclasses import dataclass
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from typing import Callable, Optional, TypedDict
|
|
6
|
+
|
|
7
|
+
from unstructured_ingest.data_types.file_data import file_data_from_file
|
|
8
|
+
from unstructured_ingest.interfaces import UploadStager
|
|
9
|
+
from unstructured_ingest.logger import logger
|
|
10
|
+
from unstructured_ingest.pipeline.interfaces import PipelineStep
|
|
11
|
+
from unstructured_ingest.utils.pydantic_models import serialize_base_model_json
|
|
12
|
+
|
|
13
|
+
STEP_ID = "upload_stage"
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class UploadStageStepResponse(TypedDict):
|
|
17
|
+
file_data_path: str
|
|
18
|
+
path: str
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
@dataclass
|
|
22
|
+
class UploadStageStep(PipelineStep):
|
|
23
|
+
process: UploadStager
|
|
24
|
+
identifier: str = STEP_ID
|
|
25
|
+
|
|
26
|
+
def __str__(self):
|
|
27
|
+
return f"{self.identifier} ({self.process.__class__.__name__})"
|
|
28
|
+
|
|
29
|
+
def __post_init__(self):
|
|
30
|
+
config = (
|
|
31
|
+
self.process.upload_stager_config.model_dump_json()
|
|
32
|
+
if self.process.upload_stager_config
|
|
33
|
+
else None
|
|
34
|
+
)
|
|
35
|
+
self.cache_dir.mkdir(parents=True, exist_ok=True)
|
|
36
|
+
logger.info(f"created {self.identifier} with configs: {config}")
|
|
37
|
+
|
|
38
|
+
async def _run_async(
|
|
39
|
+
self, fn: Callable, path: str, file_data_path: str
|
|
40
|
+
) -> UploadStageStepResponse:
|
|
41
|
+
path = Path(path)
|
|
42
|
+
# Maintain extension
|
|
43
|
+
output_filename = f"{self.get_hash(extras=[path.name])}{path.suffix}"
|
|
44
|
+
fn_kwargs = {
|
|
45
|
+
"elements_filepath": path,
|
|
46
|
+
"file_data": file_data_from_file(path=file_data_path),
|
|
47
|
+
"output_dir": self.cache_dir,
|
|
48
|
+
"output_filename": output_filename,
|
|
49
|
+
}
|
|
50
|
+
if not asyncio.iscoroutinefunction(fn):
|
|
51
|
+
staged_output_path = fn(**fn_kwargs)
|
|
52
|
+
elif semaphore := self.context.semaphore:
|
|
53
|
+
async with semaphore:
|
|
54
|
+
staged_output_path = await fn(**fn_kwargs)
|
|
55
|
+
else:
|
|
56
|
+
staged_output_path = await fn(**fn_kwargs)
|
|
57
|
+
return UploadStageStepResponse(file_data_path=file_data_path, path=str(staged_output_path))
|
|
58
|
+
|
|
59
|
+
def get_hash(self, extras: Optional[list[str]]) -> str:
|
|
60
|
+
hashable_string = serialize_base_model_json(
|
|
61
|
+
model=self.process.upload_stager_config, sort_keys=True, ensure_ascii=True
|
|
62
|
+
)
|
|
63
|
+
if extras:
|
|
64
|
+
hashable_string += "".join(extras)
|
|
65
|
+
return hashlib.sha256(hashable_string.encode()).hexdigest()[:12]
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
from dataclasses import dataclass
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
from typing import Callable, TypedDict
|
|
5
|
+
|
|
6
|
+
from unstructured_ingest.data_types.file_data import file_data_from_file
|
|
7
|
+
from unstructured_ingest.logger import logger
|
|
8
|
+
from unstructured_ingest.pipeline.interfaces import PipelineStep
|
|
9
|
+
from unstructured_ingest.processes.uncompress import Uncompressor
|
|
10
|
+
|
|
11
|
+
STEP_ID = "uncompress"
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class UncompressStepResponse(TypedDict):
|
|
15
|
+
file_data_path: str
|
|
16
|
+
path: str
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
@dataclass
|
|
20
|
+
class UncompressStep(PipelineStep):
|
|
21
|
+
process: Uncompressor
|
|
22
|
+
identifier: str = STEP_ID
|
|
23
|
+
|
|
24
|
+
def __post_init__(self):
|
|
25
|
+
config = self.process.config.model_dump_json() if self.process.config else None
|
|
26
|
+
logger.info(f"created {self.identifier} with configs: {config}")
|
|
27
|
+
|
|
28
|
+
async def _run_async(
|
|
29
|
+
self, fn: Callable, path: str, file_data_path: str
|
|
30
|
+
) -> list[UncompressStepResponse]:
|
|
31
|
+
file_data = file_data_from_file(path=file_data_path)
|
|
32
|
+
fn_kwargs = {"file_data": file_data}
|
|
33
|
+
if not asyncio.iscoroutinefunction(fn):
|
|
34
|
+
new_file_data = fn(**fn_kwargs)
|
|
35
|
+
elif semaphore := self.context.semaphore:
|
|
36
|
+
async with semaphore:
|
|
37
|
+
new_file_data = await fn(**fn_kwargs)
|
|
38
|
+
else:
|
|
39
|
+
new_file_data = await fn(**fn_kwargs)
|
|
40
|
+
responses = []
|
|
41
|
+
for new_file in new_file_data:
|
|
42
|
+
new_file_data_path = Path(file_data_path).parent / f"{new_file.identifier}.json"
|
|
43
|
+
new_file.to_file(path=str(new_file_data_path.resolve()))
|
|
44
|
+
responses.append(
|
|
45
|
+
UncompressStepResponse(
|
|
46
|
+
path=new_file.local_download_path,
|
|
47
|
+
file_data_path=str(new_file_data_path),
|
|
48
|
+
)
|
|
49
|
+
)
|
|
50
|
+
return responses
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
from dataclasses import dataclass
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
from typing import Callable, Optional, TypedDict
|
|
5
|
+
|
|
6
|
+
from unstructured_ingest.data_types.file_data import file_data_from_file
|
|
7
|
+
from unstructured_ingest.interfaces import UploadContent
|
|
8
|
+
from unstructured_ingest.logger import logger
|
|
9
|
+
from unstructured_ingest.pipeline.interfaces import BatchPipelineStep
|
|
10
|
+
from unstructured_ingest.pipeline.otel import instrument
|
|
11
|
+
|
|
12
|
+
STEP_ID = "upload"
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class UploadStepContent(TypedDict):
|
|
16
|
+
path: str
|
|
17
|
+
file_data_path: str
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
@dataclass
|
|
21
|
+
class UploadStep(BatchPipelineStep):
|
|
22
|
+
identifier: str = STEP_ID
|
|
23
|
+
|
|
24
|
+
def __str__(self):
|
|
25
|
+
return f"{self.identifier} ({self.process.__class__.__name__})"
|
|
26
|
+
|
|
27
|
+
def __post_init__(self):
|
|
28
|
+
config = (
|
|
29
|
+
self.process.upload_config.model_dump_json() if self.process.upload_config else None
|
|
30
|
+
)
|
|
31
|
+
connection_config = (
|
|
32
|
+
self.process.connection_config.model_dump_json()
|
|
33
|
+
if self.process.connection_config
|
|
34
|
+
else None
|
|
35
|
+
)
|
|
36
|
+
logger.info(
|
|
37
|
+
f"Created {self.identifier} with configs: {config}, "
|
|
38
|
+
f"connection configs: {connection_config}"
|
|
39
|
+
)
|
|
40
|
+
|
|
41
|
+
@instrument(span_name=STEP_ID)
|
|
42
|
+
def _run_batch(self, contents: list[UploadStepContent]) -> None:
|
|
43
|
+
upload_contents = [
|
|
44
|
+
UploadContent(path=Path(c["path"]), file_data=file_data_from_file(c["file_data_path"]))
|
|
45
|
+
for c in contents
|
|
46
|
+
]
|
|
47
|
+
self.process.run_batch(contents=upload_contents)
|
|
48
|
+
|
|
49
|
+
async def _run_async(self, path: str, file_data_path: str, fn: Optional[Callable] = None):
|
|
50
|
+
fn = fn or self.process.run_async
|
|
51
|
+
fn_kwargs = {"path": Path(path), "file_data": file_data_from_file(path=file_data_path)}
|
|
52
|
+
if not asyncio.iscoroutinefunction(fn):
|
|
53
|
+
fn(**fn_kwargs)
|
|
54
|
+
elif semaphore := self.context.semaphore:
|
|
55
|
+
async with semaphore:
|
|
56
|
+
await fn(**fn_kwargs)
|
|
57
|
+
else:
|
|
58
|
+
await fn(**fn_kwargs)
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
from .chunker import Chunker, ChunkerConfig
|
|
2
|
+
from .embedder import Embedder, EmbedderConfig
|
|
3
|
+
from .filter import Filterer, FiltererConfig
|
|
4
|
+
from .partitioner import Partitioner, PartitionerConfig
|
|
5
|
+
from .uncompress import UncompressConfig, Uncompressor
|
|
6
|
+
|
|
7
|
+
__all__ = [
|
|
8
|
+
"Chunker",
|
|
9
|
+
"ChunkerConfig",
|
|
10
|
+
"Embedder",
|
|
11
|
+
"EmbedderConfig",
|
|
12
|
+
"Filterer",
|
|
13
|
+
"FiltererConfig",
|
|
14
|
+
"Partitioner",
|
|
15
|
+
"PartitionerConfig",
|
|
16
|
+
"Uncompressor",
|
|
17
|
+
"UncompressConfig",
|
|
18
|
+
]
|
|
@@ -0,0 +1,131 @@
|
|
|
1
|
+
from abc import ABC
|
|
2
|
+
from dataclasses import dataclass
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
from typing import Any, Optional
|
|
5
|
+
|
|
6
|
+
from pydantic import BaseModel, Field, SecretStr
|
|
7
|
+
|
|
8
|
+
from unstructured_ingest.interfaces.process import BaseProcess
|
|
9
|
+
from unstructured_ingest.logger import logger
|
|
10
|
+
from unstructured_ingest.unstructured_api import call_api_async
|
|
11
|
+
from unstructured_ingest.utils.chunking import assign_and_map_hash_ids
|
|
12
|
+
from unstructured_ingest.utils.data_prep import get_json_data
|
|
13
|
+
from unstructured_ingest.utils.dep_check import requires_dependencies
|
|
14
|
+
|
|
15
|
+
CHUNK_MAX_CHARS_DEFAULT: int = 500
|
|
16
|
+
CHUNK_MULTI_PAGE_DEFAULT: bool = True
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class ChunkerConfig(BaseModel):
|
|
20
|
+
chunking_strategy: Optional[str] = Field(
|
|
21
|
+
default=None, description="The rule-set to use to form chunks. Omit to disable chunking."
|
|
22
|
+
)
|
|
23
|
+
chunking_endpoint: Optional[str] = Field(
|
|
24
|
+
default="https://api.unstructuredapp.io/general/v0/general",
|
|
25
|
+
description="If chunking via api, use the following host.",
|
|
26
|
+
)
|
|
27
|
+
chunk_api_timeout_ms: Optional[int] = Field(
|
|
28
|
+
default=None, description="Timeout in milliseconds for all api call during chunking."
|
|
29
|
+
)
|
|
30
|
+
chunk_by_api: bool = Field(default=False, description="Flag to use api for chunking")
|
|
31
|
+
chunk_api_key: Optional[SecretStr] = Field(
|
|
32
|
+
default=None, description="API Key for chunking endpoint."
|
|
33
|
+
)
|
|
34
|
+
|
|
35
|
+
chunk_combine_text_under_n_chars: Optional[int] = Field(
|
|
36
|
+
default=None,
|
|
37
|
+
description="Combine consecutive chunks when the first does not exceed this length and"
|
|
38
|
+
" the second will fit without exceeding the hard-maximum length. Only"
|
|
39
|
+
" operative for 'by_title' chunking-strategy.",
|
|
40
|
+
)
|
|
41
|
+
chunk_include_orig_elements: Optional[bool] = Field(
|
|
42
|
+
default=None,
|
|
43
|
+
description="When chunking, add the original elements consolidated to form each chunk to"
|
|
44
|
+
" `.metadata.orig_elements` on that chunk.",
|
|
45
|
+
)
|
|
46
|
+
chunk_max_characters: int = Field(
|
|
47
|
+
default=CHUNK_MAX_CHARS_DEFAULT,
|
|
48
|
+
description="Hard maximum chunk length. No chunk will exceed this length. An oversized"
|
|
49
|
+
" element will be divided by text-splitting to fit this window.",
|
|
50
|
+
)
|
|
51
|
+
chunk_multipage_sections: bool = Field(
|
|
52
|
+
default=CHUNK_MULTI_PAGE_DEFAULT,
|
|
53
|
+
description="Ignore page boundaries when chunking such that elements from two different"
|
|
54
|
+
" pages can appear in the same chunk. Only operative for 'by_title'"
|
|
55
|
+
" chunking-strategy.",
|
|
56
|
+
)
|
|
57
|
+
chunk_new_after_n_chars: Optional[int] = Field(
|
|
58
|
+
default=None,
|
|
59
|
+
description="Soft-maximum chunk length. Another element will not be added to a chunk of"
|
|
60
|
+
" this length even when it would fit without exceeding the hard-maximum"
|
|
61
|
+
" length.",
|
|
62
|
+
)
|
|
63
|
+
chunk_overlap: Optional[int] = Field(
|
|
64
|
+
default=None,
|
|
65
|
+
description="Prefix chunk text with last overlap=N characters of prior chunk. Only"
|
|
66
|
+
" applies to oversized chunks divided by text-splitting. To apply overlap to"
|
|
67
|
+
" non-oversized chunks use the --overlap-all option.",
|
|
68
|
+
)
|
|
69
|
+
chunk_overlap_all: Optional[bool] = Field(
|
|
70
|
+
default=None,
|
|
71
|
+
description="Apply overlap to chunks formed from whole elements as well as those formed"
|
|
72
|
+
" by text-splitting oversized elements. Overlap length is take from --overlap"
|
|
73
|
+
" option value.",
|
|
74
|
+
)
|
|
75
|
+
|
|
76
|
+
def to_chunking_kwargs(self) -> dict[str, Any]:
|
|
77
|
+
return {
|
|
78
|
+
"chunking_strategy": self.chunking_strategy,
|
|
79
|
+
"combine_under_n_chars": self.chunk_combine_text_under_n_chars,
|
|
80
|
+
"max_characters": self.chunk_max_characters,
|
|
81
|
+
"include_orig_elements": self.chunk_include_orig_elements,
|
|
82
|
+
"multipage_sections": self.chunk_multipage_sections,
|
|
83
|
+
"new_after_n_chars": self.chunk_new_after_n_chars,
|
|
84
|
+
"overlap": self.chunk_overlap,
|
|
85
|
+
"overlap_all": self.chunk_overlap_all,
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
@dataclass
|
|
90
|
+
class Chunker(BaseProcess, ABC):
|
|
91
|
+
config: ChunkerConfig
|
|
92
|
+
|
|
93
|
+
def is_async(self) -> bool:
|
|
94
|
+
return self.config.chunk_by_api
|
|
95
|
+
|
|
96
|
+
@requires_dependencies(dependencies=["unstructured"])
|
|
97
|
+
def run(self, elements_filepath: Path, **kwargs: Any) -> list[dict]:
|
|
98
|
+
from unstructured.chunking import dispatch
|
|
99
|
+
from unstructured.staging.base import elements_from_dicts
|
|
100
|
+
|
|
101
|
+
element_dicts = get_json_data(elements_filepath)
|
|
102
|
+
|
|
103
|
+
elements = elements_from_dicts(element_dicts=element_dicts)
|
|
104
|
+
if not elements:
|
|
105
|
+
return [e.to_dict() for e in elements]
|
|
106
|
+
local_chunking_strategies = ("basic", "by_title")
|
|
107
|
+
if self.config.chunking_strategy not in local_chunking_strategies:
|
|
108
|
+
logger.warning(
|
|
109
|
+
"chunking strategy not supported for local chunking: {}, must be one of: {}".format(
|
|
110
|
+
self.config.chunking_strategy, ", ".join(local_chunking_strategies)
|
|
111
|
+
)
|
|
112
|
+
)
|
|
113
|
+
return [e.to_dict() for e in elements]
|
|
114
|
+
chunked_elements = dispatch.chunk(elements=elements, **self.config.to_chunking_kwargs())
|
|
115
|
+
chunked_elements_dicts = [e.to_dict() for e in chunked_elements]
|
|
116
|
+
chunked_elements_dicts = assign_and_map_hash_ids(elements=chunked_elements_dicts)
|
|
117
|
+
return chunked_elements_dicts
|
|
118
|
+
|
|
119
|
+
@requires_dependencies(dependencies=["unstructured_client"], extras="remote")
|
|
120
|
+
async def run_async(self, elements_filepath: Path, **kwargs: Any) -> list[dict]:
|
|
121
|
+
elements = await call_api_async(
|
|
122
|
+
server_url=self.config.chunking_endpoint,
|
|
123
|
+
api_key=self.config.chunk_api_key.get_secret_value(),
|
|
124
|
+
filename=elements_filepath,
|
|
125
|
+
api_parameters=self.config.to_chunking_kwargs(),
|
|
126
|
+
timeout_ms=self.config.chunk_api_timeout_ms,
|
|
127
|
+
)
|
|
128
|
+
|
|
129
|
+
elements = assign_and_map_hash_ids(elements=elements)
|
|
130
|
+
|
|
131
|
+
return elements
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
from abc import ABC
|
|
2
|
+
from dataclasses import dataclass
|
|
3
|
+
from typing import Optional, Type, TypeVar
|
|
4
|
+
|
|
5
|
+
from unstructured_ingest.interfaces import (
|
|
6
|
+
ConnectionConfig,
|
|
7
|
+
Downloader,
|
|
8
|
+
DownloaderConfig,
|
|
9
|
+
Indexer,
|
|
10
|
+
IndexerConfig,
|
|
11
|
+
Uploader,
|
|
12
|
+
UploaderConfig,
|
|
13
|
+
UploadStager,
|
|
14
|
+
UploadStagerConfig,
|
|
15
|
+
)
|
|
16
|
+
|
|
17
|
+
IndexerT = TypeVar("IndexerT", bound=Indexer)
|
|
18
|
+
IndexerConfigT = TypeVar("IndexerConfigT", bound=IndexerConfig)
|
|
19
|
+
DownloaderT = TypeVar("DownloaderT", bound=Downloader)
|
|
20
|
+
DownloaderConfigT = TypeVar("DownloaderConfigT", bound=DownloaderConfig)
|
|
21
|
+
ConnectionConfigT = TypeVar("ConnectionConfigT", bound=ConnectionConfig)
|
|
22
|
+
UploadStagerConfigT = TypeVar("UploadStagerConfigT", bound=UploadStagerConfig)
|
|
23
|
+
UploadStagerT = TypeVar("UploadStagerT", bound=UploadStager)
|
|
24
|
+
UploaderConfigT = TypeVar("UploaderConfigT", bound=UploaderConfig)
|
|
25
|
+
UploaderT = TypeVar("UploaderT", bound=Uploader)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
@dataclass
|
|
29
|
+
class RegistryEntry(ABC):
|
|
30
|
+
pass
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
@dataclass
|
|
34
|
+
class SourceRegistryEntry(RegistryEntry):
|
|
35
|
+
indexer: Type[IndexerT]
|
|
36
|
+
downloader: Type[DownloaderT]
|
|
37
|
+
|
|
38
|
+
downloader_config: Optional[Type[DownloaderConfigT]] = None
|
|
39
|
+
indexer_config: Optional[Type[IndexerConfigT]] = None
|
|
40
|
+
connection_config: Optional[Type[ConnectionConfigT]] = None
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
source_registry: dict[str, SourceRegistryEntry] = {}
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def add_source_entry(source_type: str, entry: SourceRegistryEntry):
|
|
47
|
+
if source_type in source_registry:
|
|
48
|
+
raise ValueError(f"source {source_type} has already been registered")
|
|
49
|
+
source_registry[source_type] = entry
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
@dataclass
|
|
53
|
+
class DestinationRegistryEntry(RegistryEntry):
|
|
54
|
+
uploader: Type[UploaderT]
|
|
55
|
+
upload_stager: Optional[Type[UploadStagerT]] = None
|
|
56
|
+
|
|
57
|
+
upload_stager_config: Optional[Type[UploadStagerConfigT]] = None
|
|
58
|
+
uploader_config: Optional[Type[UploaderConfigT]] = None
|
|
59
|
+
|
|
60
|
+
connection_config: Optional[Type[ConnectionConfigT]] = None
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
destination_registry: dict[str, DestinationRegistryEntry] = {}
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
def add_destination_entry(destination_type: str, entry: DestinationRegistryEntry):
|
|
67
|
+
if destination_type in destination_registry:
|
|
68
|
+
raise ValueError(f"destination {destination_type} has already been registered")
|
|
69
|
+
destination_registry[destination_type] = entry
|