unstructured-ingest 0.7.2__py3-none-any.whl → 1.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of unstructured-ingest might be problematic. Click here for more details.
- unstructured_ingest/__version__.py +1 -1
- unstructured_ingest/cli/README.md +28 -0
- unstructured_ingest/embed/mixedbreadai.py +0 -1
- unstructured_ingest/interfaces/upload_stager.py +2 -2
- unstructured_ingest/interfaces/uploader.py +3 -3
- unstructured_ingest/main.py +0 -0
- unstructured_ingest/pipeline/interfaces.py +1 -1
- unstructured_ingest/pipeline/pipeline.py +1 -1
- unstructured_ingest/processes/chunker.py +4 -0
- unstructured_ingest/processes/connectors/airtable.py +4 -2
- unstructured_ingest/processes/connectors/astradb.py +2 -2
- unstructured_ingest/processes/connectors/azure_ai_search.py +1 -1
- unstructured_ingest/processes/connectors/confluence.py +0 -1
- unstructured_ingest/processes/connectors/databricks/volumes_aws.py +1 -1
- unstructured_ingest/processes/connectors/databricks/volumes_azure.py +2 -2
- unstructured_ingest/processes/connectors/databricks/volumes_gcp.py +1 -1
- unstructured_ingest/processes/connectors/databricks/volumes_table.py +1 -2
- unstructured_ingest/processes/connectors/delta_table.py +1 -0
- unstructured_ingest/processes/connectors/duckdb/base.py +2 -2
- unstructured_ingest/processes/connectors/duckdb/duckdb.py +3 -3
- unstructured_ingest/processes/connectors/duckdb/motherduck.py +3 -3
- unstructured_ingest/processes/connectors/fsspec/s3.py +5 -3
- unstructured_ingest/processes/connectors/gitlab.py +1 -2
- unstructured_ingest/processes/connectors/google_drive.py +0 -2
- unstructured_ingest/processes/connectors/ibm_watsonx/ibm_watsonx_s3.py +9 -7
- unstructured_ingest/processes/connectors/kdbai.py +1 -0
- unstructured_ingest/processes/connectors/outlook.py +1 -2
- unstructured_ingest/processes/connectors/pinecone.py +0 -1
- unstructured_ingest/processes/connectors/redisdb.py +28 -24
- unstructured_ingest/processes/connectors/salesforce.py +1 -1
- unstructured_ingest/processes/connectors/slack.py +1 -2
- unstructured_ingest/processes/connectors/sql/databricks_delta_tables.py +5 -0
- unstructured_ingest/processes/connectors/sql/postgres.py +7 -1
- unstructured_ingest/processes/connectors/sql/singlestore.py +11 -6
- unstructured_ingest/processes/connectors/sql/snowflake.py +5 -0
- unstructured_ingest/processes/connectors/sql/sql.py +3 -4
- unstructured_ingest/processes/connectors/sql/sqlite.py +5 -0
- unstructured_ingest/processes/connectors/sql/vastdb.py +7 -3
- unstructured_ingest/processes/connectors/vectara.py +0 -2
- unstructured_ingest/processes/connectors/zendesk/zendesk.py +0 -2
- unstructured_ingest/processes/embedder.py +2 -2
- unstructured_ingest/processes/filter.py +1 -1
- unstructured_ingest/processes/partitioner.py +4 -0
- unstructured_ingest/processes/utils/blob_storage.py +2 -2
- unstructured_ingest/unstructured_api.py +13 -8
- unstructured_ingest/utils/data_prep.py +8 -32
- unstructured_ingest-1.0.1.dist-info/METADATA +226 -0
- {unstructured_ingest-0.7.2.dist-info → unstructured_ingest-1.0.1.dist-info}/RECORD +50 -184
- {unstructured_ingest-0.7.2.dist-info → unstructured_ingest-1.0.1.dist-info}/WHEEL +1 -2
- examples/__init__.py +0 -0
- examples/airtable.py +0 -44
- examples/azure_cognitive_search.py +0 -55
- examples/chroma.py +0 -54
- examples/couchbase.py +0 -55
- examples/databricks_volumes_dest.py +0 -55
- examples/databricks_volumes_source.py +0 -53
- examples/delta_table.py +0 -45
- examples/discord_example.py +0 -36
- examples/elasticsearch.py +0 -49
- examples/google_drive.py +0 -45
- examples/kdbai.py +0 -54
- examples/local.py +0 -36
- examples/milvus.py +0 -44
- examples/mongodb.py +0 -53
- examples/opensearch.py +0 -50
- examples/pinecone.py +0 -57
- examples/s3.py +0 -38
- examples/salesforce.py +0 -44
- examples/sharepoint.py +0 -47
- examples/singlestore.py +0 -49
- examples/sql.py +0 -90
- examples/vectara.py +0 -54
- examples/weaviate.py +0 -44
- test/__init__.py +0 -0
- test/integration/__init__.py +0 -0
- test/integration/chunkers/__init__.py +0 -0
- test/integration/chunkers/test_chunkers.py +0 -31
- test/integration/connectors/__init__.py +0 -0
- test/integration/connectors/conftest.py +0 -38
- test/integration/connectors/databricks/__init__.py +0 -0
- test/integration/connectors/databricks/test_volumes_native.py +0 -273
- test/integration/connectors/discord/__init__.py +0 -0
- test/integration/connectors/discord/test_discord.py +0 -90
- test/integration/connectors/duckdb/__init__.py +0 -0
- test/integration/connectors/duckdb/conftest.py +0 -14
- test/integration/connectors/duckdb/test_duckdb.py +0 -90
- test/integration/connectors/duckdb/test_motherduck.py +0 -95
- test/integration/connectors/elasticsearch/__init__.py +0 -0
- test/integration/connectors/elasticsearch/conftest.py +0 -34
- test/integration/connectors/elasticsearch/test_elasticsearch.py +0 -331
- test/integration/connectors/elasticsearch/test_opensearch.py +0 -326
- test/integration/connectors/sql/__init__.py +0 -0
- test/integration/connectors/sql/test_databricks_delta_tables.py +0 -170
- test/integration/connectors/sql/test_postgres.py +0 -201
- test/integration/connectors/sql/test_singlestore.py +0 -182
- test/integration/connectors/sql/test_snowflake.py +0 -244
- test/integration/connectors/sql/test_sqlite.py +0 -168
- test/integration/connectors/sql/test_vastdb.py +0 -34
- test/integration/connectors/test_astradb.py +0 -287
- test/integration/connectors/test_azure_ai_search.py +0 -254
- test/integration/connectors/test_chroma.py +0 -136
- test/integration/connectors/test_confluence.py +0 -111
- test/integration/connectors/test_delta_table.py +0 -183
- test/integration/connectors/test_dropbox.py +0 -151
- test/integration/connectors/test_github.py +0 -49
- test/integration/connectors/test_google_drive.py +0 -257
- test/integration/connectors/test_jira.py +0 -67
- test/integration/connectors/test_lancedb.py +0 -247
- test/integration/connectors/test_milvus.py +0 -208
- test/integration/connectors/test_mongodb.py +0 -335
- test/integration/connectors/test_neo4j.py +0 -244
- test/integration/connectors/test_notion.py +0 -152
- test/integration/connectors/test_onedrive.py +0 -163
- test/integration/connectors/test_pinecone.py +0 -387
- test/integration/connectors/test_qdrant.py +0 -216
- test/integration/connectors/test_redis.py +0 -143
- test/integration/connectors/test_s3.py +0 -184
- test/integration/connectors/test_sharepoint.py +0 -222
- test/integration/connectors/test_vectara.py +0 -282
- test/integration/connectors/test_zendesk.py +0 -120
- test/integration/connectors/utils/__init__.py +0 -0
- test/integration/connectors/utils/constants.py +0 -13
- test/integration/connectors/utils/docker.py +0 -151
- test/integration/connectors/utils/docker_compose.py +0 -59
- test/integration/connectors/utils/validation/__init__.py +0 -0
- test/integration/connectors/utils/validation/destination.py +0 -77
- test/integration/connectors/utils/validation/equality.py +0 -76
- test/integration/connectors/utils/validation/source.py +0 -331
- test/integration/connectors/utils/validation/utils.py +0 -36
- test/integration/connectors/weaviate/__init__.py +0 -0
- test/integration/connectors/weaviate/conftest.py +0 -15
- test/integration/connectors/weaviate/test_cloud.py +0 -39
- test/integration/connectors/weaviate/test_local.py +0 -152
- test/integration/embedders/__init__.py +0 -0
- test/integration/embedders/conftest.py +0 -13
- test/integration/embedders/test_azure_openai.py +0 -57
- test/integration/embedders/test_bedrock.py +0 -103
- test/integration/embedders/test_huggingface.py +0 -24
- test/integration/embedders/test_mixedbread.py +0 -71
- test/integration/embedders/test_octoai.py +0 -75
- test/integration/embedders/test_openai.py +0 -74
- test/integration/embedders/test_togetherai.py +0 -71
- test/integration/embedders/test_vertexai.py +0 -63
- test/integration/embedders/test_voyageai.py +0 -79
- test/integration/embedders/utils.py +0 -66
- test/integration/partitioners/__init__.py +0 -0
- test/integration/partitioners/test_partitioner.py +0 -76
- test/integration/utils.py +0 -15
- test/unit/__init__.py +0 -0
- test/unit/chunkers/__init__.py +0 -0
- test/unit/chunkers/test_chunkers.py +0 -49
- test/unit/connectors/__init__.py +0 -0
- test/unit/connectors/ibm_watsonx/__init__.py +0 -0
- test/unit/connectors/ibm_watsonx/test_ibm_watsonx_s3.py +0 -459
- test/unit/connectors/motherduck/__init__.py +0 -0
- test/unit/connectors/motherduck/test_base.py +0 -73
- test/unit/connectors/sql/__init__.py +0 -0
- test/unit/connectors/sql/test_sql.py +0 -152
- test/unit/connectors/test_confluence.py +0 -71
- test/unit/connectors/test_jira.py +0 -401
- test/unit/embed/__init__.py +0 -0
- test/unit/embed/test_mixedbreadai.py +0 -42
- test/unit/embed/test_octoai.py +0 -27
- test/unit/embed/test_openai.py +0 -28
- test/unit/embed/test_vertexai.py +0 -25
- test/unit/embed/test_voyageai.py +0 -24
- test/unit/embedders/__init__.py +0 -0
- test/unit/embedders/test_bedrock.py +0 -36
- test/unit/embedders/test_huggingface.py +0 -48
- test/unit/embedders/test_mixedbread.py +0 -37
- test/unit/embedders/test_octoai.py +0 -35
- test/unit/embedders/test_openai.py +0 -35
- test/unit/embedders/test_togetherai.py +0 -37
- test/unit/embedders/test_vertexai.py +0 -37
- test/unit/embedders/test_voyageai.py +0 -38
- test/unit/partitioners/__init__.py +0 -0
- test/unit/partitioners/test_partitioner.py +0 -63
- test/unit/test_error.py +0 -27
- test/unit/test_html.py +0 -112
- test/unit/test_interfaces.py +0 -26
- test/unit/test_utils.py +0 -220
- test/unit/utils/__init__.py +0 -0
- test/unit/utils/data_generator.py +0 -32
- unstructured_ingest-0.7.2.dist-info/METADATA +0 -383
- unstructured_ingest-0.7.2.dist-info/top_level.txt +0 -3
- {unstructured_ingest-0.7.2.dist-info → unstructured_ingest-1.0.1.dist-info}/entry_points.txt +0 -0
- {unstructured_ingest-0.7.2.dist-info → unstructured_ingest-1.0.1.dist-info/licenses}/LICENSE.md +0 -0
|
@@ -1,53 +0,0 @@
|
|
|
1
|
-
import os
|
|
2
|
-
from pathlib import Path
|
|
3
|
-
|
|
4
|
-
from unstructured_ingest.interfaces import ProcessorConfig
|
|
5
|
-
from unstructured_ingest.logger import logger
|
|
6
|
-
from unstructured_ingest.pipeline.pipeline import Pipeline
|
|
7
|
-
from unstructured_ingest.processes.chunker import ChunkerConfig
|
|
8
|
-
from unstructured_ingest.processes.connectors.databricks.volumes_native import (
|
|
9
|
-
CONNECTOR_TYPE,
|
|
10
|
-
DatabricksNativeVolumesAccessConfig,
|
|
11
|
-
DatabricksNativeVolumesConnectionConfig,
|
|
12
|
-
DatabricksNativeVolumesDownloaderConfig,
|
|
13
|
-
DatabricksNativeVolumesIndexerConfig,
|
|
14
|
-
)
|
|
15
|
-
from unstructured_ingest.processes.connectors.local import (
|
|
16
|
-
LocalUploaderConfig,
|
|
17
|
-
)
|
|
18
|
-
from unstructured_ingest.processes.partitioner import PartitionerConfig
|
|
19
|
-
|
|
20
|
-
base_path = Path(__file__).parent.parent.parent.parent
|
|
21
|
-
docs_path = base_path / "example-docs"
|
|
22
|
-
work_dir = base_path / "tmp_ingest" / CONNECTOR_TYPE
|
|
23
|
-
output_path = work_dir / "output"
|
|
24
|
-
download_path = work_dir / "download"
|
|
25
|
-
|
|
26
|
-
if __name__ == "__main__":
|
|
27
|
-
logger.info(f"writing all content in: {work_dir.resolve()}")
|
|
28
|
-
Pipeline.from_configs(
|
|
29
|
-
context=ProcessorConfig(work_dir=str(work_dir.resolve())),
|
|
30
|
-
indexer_config=DatabricksNativeVolumesIndexerConfig(
|
|
31
|
-
host=os.environ["DATABRICKS_HOST"],
|
|
32
|
-
catalog=os.environ["DATABRICKS_CATALOG"],
|
|
33
|
-
volume=os.environ["DATABRICKS_VOLUME"],
|
|
34
|
-
volume_path=os.environ["DATABRICKS_VOLUME_PATH"],
|
|
35
|
-
),
|
|
36
|
-
downloader_config=DatabricksNativeVolumesDownloaderConfig(download_dir=download_path),
|
|
37
|
-
source_connection_config=DatabricksNativeVolumesConnectionConfig(
|
|
38
|
-
access_config=DatabricksNativeVolumesAccessConfig(
|
|
39
|
-
client_id=os.environ["DATABRICKS_CLIENT_ID"],
|
|
40
|
-
client_secret=os.environ["DATABRICKS_CLIENT_SECRET"],
|
|
41
|
-
),
|
|
42
|
-
host=os.environ["DATABRICKS_HOST"],
|
|
43
|
-
catalog=os.environ["DATABRICKS_CATALOG"],
|
|
44
|
-
volume=os.environ["DATABRICKS_VOLUME"],
|
|
45
|
-
volume_path=os.environ["DATABRICKS_VOLUME_PATH"],
|
|
46
|
-
),
|
|
47
|
-
partitioner_config=PartitionerConfig(strategy="fast"),
|
|
48
|
-
chunker_config=ChunkerConfig(
|
|
49
|
-
chunking_strategy="basic",
|
|
50
|
-
),
|
|
51
|
-
embedder_config=None,
|
|
52
|
-
uploader_config=LocalUploaderConfig(output_dir=str(output_path.resolve())),
|
|
53
|
-
).run()
|
examples/delta_table.py
DELETED
|
@@ -1,45 +0,0 @@
|
|
|
1
|
-
from pathlib import Path
|
|
2
|
-
|
|
3
|
-
from unstructured_ingest.interfaces import ProcessorConfig
|
|
4
|
-
from unstructured_ingest.logger import logger
|
|
5
|
-
from unstructured_ingest.pipeline.pipeline import Pipeline
|
|
6
|
-
from unstructured_ingest.processes.chunker import ChunkerConfig
|
|
7
|
-
from unstructured_ingest.processes.connectors.delta_table import (
|
|
8
|
-
CONNECTOR_TYPE,
|
|
9
|
-
DeltaTableAccessConfig,
|
|
10
|
-
DeltaTableConnectionConfig,
|
|
11
|
-
DeltaTableUploaderConfig,
|
|
12
|
-
DeltaTableUploadStagerConfig,
|
|
13
|
-
)
|
|
14
|
-
from unstructured_ingest.processes.connectors.local import (
|
|
15
|
-
LocalConnectionConfig,
|
|
16
|
-
LocalDownloaderConfig,
|
|
17
|
-
LocalIndexerConfig,
|
|
18
|
-
)
|
|
19
|
-
from unstructured_ingest.processes.embedder import EmbedderConfig
|
|
20
|
-
from unstructured_ingest.processes.partitioner import PartitionerConfig
|
|
21
|
-
|
|
22
|
-
base_path = Path(__file__).parent.parent.parent.parent
|
|
23
|
-
docs_path = base_path / "example-docs"
|
|
24
|
-
work_dir = base_path / "tmp_ingest" / CONNECTOR_TYPE
|
|
25
|
-
output_path = work_dir / "output"
|
|
26
|
-
download_path = work_dir / "download"
|
|
27
|
-
|
|
28
|
-
if __name__ == "__main__":
|
|
29
|
-
logger.info(f"writing all content in: {work_dir.resolve()}")
|
|
30
|
-
Pipeline.from_configs(
|
|
31
|
-
context=ProcessorConfig(work_dir=str(work_dir.resolve())),
|
|
32
|
-
indexer_config=LocalIndexerConfig(
|
|
33
|
-
input_path=str(docs_path.resolve()) + "/book-war-and-peace-1p.txt",
|
|
34
|
-
),
|
|
35
|
-
downloader_config=LocalDownloaderConfig(download_dir=download_path),
|
|
36
|
-
source_connection_config=LocalConnectionConfig(),
|
|
37
|
-
partitioner_config=PartitionerConfig(strategy="fast"),
|
|
38
|
-
chunker_config=ChunkerConfig(chunking_strategy="by_title"),
|
|
39
|
-
embedder_config=EmbedderConfig(embedding_provider="huggingface"),
|
|
40
|
-
destination_connection_config=DeltaTableConnectionConfig(
|
|
41
|
-
access_config=DeltaTableAccessConfig(), table_uri="example_uri"
|
|
42
|
-
),
|
|
43
|
-
stager_config=DeltaTableUploadStagerConfig(),
|
|
44
|
-
uploader_config=DeltaTableUploaderConfig(),
|
|
45
|
-
).run()
|
examples/discord_example.py
DELETED
|
@@ -1,36 +0,0 @@
|
|
|
1
|
-
import os
|
|
2
|
-
from pathlib import Path
|
|
3
|
-
|
|
4
|
-
from unstructured_ingest.interfaces import ProcessorConfig
|
|
5
|
-
from unstructured_ingest.logger import logger
|
|
6
|
-
from unstructured_ingest.pipeline.pipeline import Pipeline
|
|
7
|
-
from unstructured_ingest.processes.connectors.discord import (
|
|
8
|
-
CONNECTOR_TYPE,
|
|
9
|
-
DiscordAccessConfig,
|
|
10
|
-
DiscordConnectionConfig,
|
|
11
|
-
DiscordDownloaderConfig,
|
|
12
|
-
DiscordIndexerConfig,
|
|
13
|
-
)
|
|
14
|
-
from unstructured_ingest.processes.connectors.local import LocalUploaderConfig
|
|
15
|
-
from unstructured_ingest.processes.partitioner import PartitionerConfig
|
|
16
|
-
|
|
17
|
-
base_path = Path(__file__).parent.parent.parent.parent
|
|
18
|
-
docs_path = base_path / "example-docs"
|
|
19
|
-
work_dir = base_path / "tmp_ingest" / CONNECTOR_TYPE
|
|
20
|
-
output_path = work_dir / "output"
|
|
21
|
-
download_path = work_dir / "download"
|
|
22
|
-
|
|
23
|
-
if __name__ == "__main__":
|
|
24
|
-
logger.info(f"writing all content in: {work_dir.resolve()}")
|
|
25
|
-
Pipeline.from_configs(
|
|
26
|
-
context=ProcessorConfig(work_dir=str(work_dir.resolve()), tqdm=True, verbose=True),
|
|
27
|
-
indexer_config=DiscordIndexerConfig(channels=os.environ["DISCORD_CHANNELS"].split(",")),
|
|
28
|
-
downloader_config=DiscordDownloaderConfig(limit=int(os.getenv("DISCORD_LIMIT", 100))),
|
|
29
|
-
source_connection_config=DiscordConnectionConfig(
|
|
30
|
-
access_config=DiscordAccessConfig(token=os.environ["DISCORD_TOKEN"])
|
|
31
|
-
),
|
|
32
|
-
partitioner_config=PartitionerConfig(strategy="fast"),
|
|
33
|
-
# chunker_config=ChunkerConfig(chunking_strategy="by_title"),
|
|
34
|
-
# embedder_config=EmbedderConfig(embedding_provider="huggingface"),
|
|
35
|
-
uploader_config=LocalUploaderConfig(output_dir=str(output_path.resolve())),
|
|
36
|
-
).run()
|
examples/elasticsearch.py
DELETED
|
@@ -1,49 +0,0 @@
|
|
|
1
|
-
import os
|
|
2
|
-
from pathlib import Path
|
|
3
|
-
|
|
4
|
-
from unstructured_ingest.interfaces import ProcessorConfig
|
|
5
|
-
from unstructured_ingest.logger import logger
|
|
6
|
-
from unstructured_ingest.pipeline.pipeline import Pipeline
|
|
7
|
-
from unstructured_ingest.processes.chunker import ChunkerConfig
|
|
8
|
-
from unstructured_ingest.processes.connectors.elasticsearch import (
|
|
9
|
-
CONNECTOR_TYPE,
|
|
10
|
-
ElasticsearchAccessConfig,
|
|
11
|
-
ElasticsearchConnectionConfig,
|
|
12
|
-
ElasticsearchUploaderConfig,
|
|
13
|
-
ElasticsearchUploadStagerConfig,
|
|
14
|
-
)
|
|
15
|
-
from unstructured_ingest.processes.connectors.local import (
|
|
16
|
-
LocalConnectionConfig,
|
|
17
|
-
LocalDownloaderConfig,
|
|
18
|
-
LocalIndexerConfig,
|
|
19
|
-
)
|
|
20
|
-
from unstructured_ingest.processes.embedder import EmbedderConfig
|
|
21
|
-
from unstructured_ingest.processes.partitioner import PartitionerConfig
|
|
22
|
-
|
|
23
|
-
base_path = Path(__file__).parent.parent.parent.parent
|
|
24
|
-
docs_path = base_path / "example-docs"
|
|
25
|
-
work_dir = base_path / "tmp_ingest" / CONNECTOR_TYPE
|
|
26
|
-
output_path = work_dir / "output"
|
|
27
|
-
download_path = work_dir / "download"
|
|
28
|
-
|
|
29
|
-
if __name__ == "__main__":
|
|
30
|
-
logger.info(f"writing all content in: {work_dir.resolve()}")
|
|
31
|
-
index_name = "ingest-test-destination"
|
|
32
|
-
Pipeline.from_configs(
|
|
33
|
-
context=ProcessorConfig(work_dir=str(work_dir.resolve())),
|
|
34
|
-
indexer_config=LocalIndexerConfig(
|
|
35
|
-
input_path=str(docs_path.resolve()) + "/book-war-and-peace-1p.txt"
|
|
36
|
-
),
|
|
37
|
-
downloader_config=LocalDownloaderConfig(download_dir=download_path),
|
|
38
|
-
source_connection_config=LocalConnectionConfig(),
|
|
39
|
-
partitioner_config=PartitionerConfig(strategy="fast"),
|
|
40
|
-
chunker_config=ChunkerConfig(chunking_strategy="by_title"),
|
|
41
|
-
embedder_config=EmbedderConfig(embedding_provider="huggingface"),
|
|
42
|
-
destination_connection_config=ElasticsearchConnectionConfig(
|
|
43
|
-
access_config=ElasticsearchAccessConfig(password=os.getenv("ELASTIC_PASSWORD")),
|
|
44
|
-
username=os.getenv("ELASTIC_USERNAME"),
|
|
45
|
-
hosts=["http://localhost:9200"],
|
|
46
|
-
),
|
|
47
|
-
uploader_config=ElasticsearchUploaderConfig(index_name=index_name),
|
|
48
|
-
stager_config=ElasticsearchUploadStagerConfig(index_name=index_name),
|
|
49
|
-
).run()
|
examples/google_drive.py
DELETED
|
@@ -1,45 +0,0 @@
|
|
|
1
|
-
import os
|
|
2
|
-
from pathlib import Path
|
|
3
|
-
|
|
4
|
-
from unstructured_ingest.interfaces import ProcessorConfig
|
|
5
|
-
from unstructured_ingest.pipeline.pipeline import Pipeline
|
|
6
|
-
from unstructured_ingest.processes.chunker import ChunkerConfig
|
|
7
|
-
from unstructured_ingest.processes.connectors.google_drive import (
|
|
8
|
-
CONNECTOR_TYPE,
|
|
9
|
-
GoogleDriveAccessConfig,
|
|
10
|
-
GoogleDriveConnectionConfig,
|
|
11
|
-
GoogleDriveDownloaderConfig,
|
|
12
|
-
GoogleDriveIndexerConfig,
|
|
13
|
-
)
|
|
14
|
-
from unstructured_ingest.processes.connectors.local import (
|
|
15
|
-
LocalUploaderConfig,
|
|
16
|
-
)
|
|
17
|
-
from unstructured_ingest.processes.partitioner import PartitionerConfig
|
|
18
|
-
|
|
19
|
-
base_path = Path(__file__).parent.parent.parent.parent
|
|
20
|
-
work_dir = base_path / "tmp_ingest" / CONNECTOR_TYPE
|
|
21
|
-
output_path = work_dir / "output"
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
if __name__ == "__main__":
|
|
25
|
-
Pipeline.from_configs(
|
|
26
|
-
context=ProcessorConfig(work_dir=str(work_dir.resolve())),
|
|
27
|
-
# You'll need to set GOOGLE_DRIVE_SERVICE_KEY and GOOGLE_DRIVE_DRIVE_ID
|
|
28
|
-
# environment variable to run this example
|
|
29
|
-
source_connection_config=GoogleDriveConnectionConfig(
|
|
30
|
-
access_config=GoogleDriveAccessConfig(
|
|
31
|
-
service_account_key=os.environ.get("GOOGLE_DRIVE_SERVICE_KEY")
|
|
32
|
-
),
|
|
33
|
-
drive_id=os.environ.get("GOOGLE_DRIVE_DRIVE_ID"),
|
|
34
|
-
),
|
|
35
|
-
indexer_config=GoogleDriveIndexerConfig(
|
|
36
|
-
resursive=True,
|
|
37
|
-
),
|
|
38
|
-
downloader_config=GoogleDriveDownloaderConfig(),
|
|
39
|
-
partitioner_config=PartitionerConfig(strategy="fast"),
|
|
40
|
-
chunker_config=ChunkerConfig(
|
|
41
|
-
chunking_strategy="basic",
|
|
42
|
-
),
|
|
43
|
-
embedder_config=None,
|
|
44
|
-
uploader_config=LocalUploaderConfig(output_dir=output_path),
|
|
45
|
-
).run()
|
examples/kdbai.py
DELETED
|
@@ -1,54 +0,0 @@
|
|
|
1
|
-
import os
|
|
2
|
-
from pathlib import Path
|
|
3
|
-
|
|
4
|
-
from unstructured_ingest.interfaces import ProcessorConfig
|
|
5
|
-
from unstructured_ingest.logger import logger
|
|
6
|
-
from unstructured_ingest.pipeline.pipeline import Pipeline
|
|
7
|
-
from unstructured_ingest.processes.chunker import ChunkerConfig
|
|
8
|
-
from unstructured_ingest.processes.connectors.kdbai import (
|
|
9
|
-
CONNECTOR_TYPE,
|
|
10
|
-
KdbaiConnectionConfig,
|
|
11
|
-
KdbaiUploaderConfig,
|
|
12
|
-
KdbaiUploadStagerConfig,
|
|
13
|
-
)
|
|
14
|
-
from unstructured_ingest.processes.connectors.local import (
|
|
15
|
-
LocalConnectionConfig,
|
|
16
|
-
LocalDownloaderConfig,
|
|
17
|
-
LocalIndexerConfig,
|
|
18
|
-
)
|
|
19
|
-
from unstructured_ingest.processes.embedder import EmbedderConfig
|
|
20
|
-
from unstructured_ingest.processes.partitioner import PartitionerConfig
|
|
21
|
-
|
|
22
|
-
base_path = Path(__file__).parent.parent.parent.parent
|
|
23
|
-
docs_path = base_path / "example-docs"
|
|
24
|
-
work_dir = base_path / "tmp_ingest" / CONNECTOR_TYPE
|
|
25
|
-
output_path = work_dir / "output"
|
|
26
|
-
download_path = work_dir / "download"
|
|
27
|
-
input_path = docs_path.resolve() / "pdf" / "fake-memo.pdf"
|
|
28
|
-
|
|
29
|
-
os.environ["KDBAI_API_KEY"] = "key"
|
|
30
|
-
os.environ["KDBAI_ENDPOINT"] = "http://localhost"
|
|
31
|
-
os.environ["KDBAI_DATABASE"] = "default"
|
|
32
|
-
os.environ["KDBAI_TABLE"] = "table"
|
|
33
|
-
|
|
34
|
-
if __name__ == "__main__":
|
|
35
|
-
logger.info(f"writing all content in: {work_dir.resolve()}")
|
|
36
|
-
logger.info(f"processing file(s): {input_path.resolve()}")
|
|
37
|
-
Pipeline.from_configs(
|
|
38
|
-
context=ProcessorConfig(work_dir=str(work_dir.resolve()), tqdm=True, verbose=True),
|
|
39
|
-
indexer_config=LocalIndexerConfig(
|
|
40
|
-
input_path=docs_path.resolve() / "book-war-and-peace-1p.txt"
|
|
41
|
-
),
|
|
42
|
-
downloader_config=LocalDownloaderConfig(download_dir=download_path),
|
|
43
|
-
source_connection_config=LocalConnectionConfig(),
|
|
44
|
-
partitioner_config=PartitionerConfig(strategy="fast"),
|
|
45
|
-
chunker_config=ChunkerConfig(chunking_strategy="by_title"),
|
|
46
|
-
embedder_config=EmbedderConfig(embedding_provider="huggingface"),
|
|
47
|
-
destination_connection_config=KdbaiConnectionConfig(
|
|
48
|
-
endpoint=os.environ["KDBAI_ENDPOINT"],
|
|
49
|
-
),
|
|
50
|
-
stager_config=KdbaiUploadStagerConfig(),
|
|
51
|
-
uploader_config=KdbaiUploaderConfig(
|
|
52
|
-
database_name=os.environ["KDBAI_DATABASE"], table_name=os.environ["KDBAI_TABLE"]
|
|
53
|
-
),
|
|
54
|
-
).run()
|
examples/local.py
DELETED
|
@@ -1,36 +0,0 @@
|
|
|
1
|
-
from pathlib import Path
|
|
2
|
-
|
|
3
|
-
from unstructured_ingest.interfaces import ProcessorConfig
|
|
4
|
-
from unstructured_ingest.logger import logger
|
|
5
|
-
from unstructured_ingest.pipeline.pipeline import Pipeline
|
|
6
|
-
from unstructured_ingest.processes.chunker import ChunkerConfig
|
|
7
|
-
from unstructured_ingest.processes.connectors.local import (
|
|
8
|
-
CONNECTOR_TYPE,
|
|
9
|
-
LocalConnectionConfig,
|
|
10
|
-
LocalDownloaderConfig,
|
|
11
|
-
LocalIndexerConfig,
|
|
12
|
-
LocalUploaderConfig,
|
|
13
|
-
)
|
|
14
|
-
from unstructured_ingest.processes.embedder import EmbedderConfig
|
|
15
|
-
from unstructured_ingest.processes.partitioner import PartitionerConfig
|
|
16
|
-
|
|
17
|
-
base_path = Path(__file__).parent.parent.parent.parent
|
|
18
|
-
docs_path = base_path / "example-docs"
|
|
19
|
-
work_dir = base_path / "tmp_ingest" / CONNECTOR_TYPE
|
|
20
|
-
output_path = work_dir / "output"
|
|
21
|
-
download_path = work_dir / "download"
|
|
22
|
-
|
|
23
|
-
if __name__ == "__main__":
|
|
24
|
-
logger.info(f"writing all content in: {work_dir.resolve()}")
|
|
25
|
-
Pipeline.from_configs(
|
|
26
|
-
context=ProcessorConfig(work_dir=str(work_dir.resolve())),
|
|
27
|
-
indexer_config=LocalIndexerConfig(
|
|
28
|
-
input_path=str(docs_path.resolve()) + "/language-docs/UDHR_first_article_all.txt"
|
|
29
|
-
),
|
|
30
|
-
downloader_config=LocalDownloaderConfig(download_dir=download_path),
|
|
31
|
-
source_connection_config=LocalConnectionConfig(),
|
|
32
|
-
partitioner_config=PartitionerConfig(strategy="fast"),
|
|
33
|
-
chunker_config=ChunkerConfig(chunking_strategy="by_title"),
|
|
34
|
-
embedder_config=EmbedderConfig(embedding_provider="huggingface"),
|
|
35
|
-
uploader_config=LocalUploaderConfig(output_dir=str(output_path.resolve())),
|
|
36
|
-
).run()
|
examples/milvus.py
DELETED
|
@@ -1,44 +0,0 @@
|
|
|
1
|
-
from pathlib import Path
|
|
2
|
-
|
|
3
|
-
from unstructured_ingest.interfaces import ProcessorConfig
|
|
4
|
-
from unstructured_ingest.logger import logger
|
|
5
|
-
from unstructured_ingest.pipeline.pipeline import Pipeline
|
|
6
|
-
from unstructured_ingest.processes.chunker import ChunkerConfig
|
|
7
|
-
from unstructured_ingest.processes.connectors.local import (
|
|
8
|
-
LocalConnectionConfig,
|
|
9
|
-
LocalDownloaderConfig,
|
|
10
|
-
LocalIndexerConfig,
|
|
11
|
-
)
|
|
12
|
-
from unstructured_ingest.processes.connectors.milvus import (
|
|
13
|
-
CONNECTOR_TYPE,
|
|
14
|
-
MilvusConnectionConfig,
|
|
15
|
-
MilvusUploaderConfig,
|
|
16
|
-
MilvusUploadStagerConfig,
|
|
17
|
-
)
|
|
18
|
-
from unstructured_ingest.processes.embedder import EmbedderConfig
|
|
19
|
-
from unstructured_ingest.processes.partitioner import PartitionerConfig
|
|
20
|
-
|
|
21
|
-
base_path = Path(__file__).parent.parent.parent.parent
|
|
22
|
-
docs_path = base_path / "example-docs"
|
|
23
|
-
work_dir = base_path / "tmp_ingest" / CONNECTOR_TYPE
|
|
24
|
-
output_path = work_dir / "output"
|
|
25
|
-
download_path = work_dir / "download"
|
|
26
|
-
|
|
27
|
-
if __name__ == "__main__":
|
|
28
|
-
logger.info(f"writing all content in: {work_dir.resolve()}")
|
|
29
|
-
Pipeline.from_configs(
|
|
30
|
-
context=ProcessorConfig(work_dir=str(work_dir.resolve()), tqdm=True, verbose=True),
|
|
31
|
-
indexer_config=LocalIndexerConfig(
|
|
32
|
-
input_path=str(docs_path.resolve()) + "/book-war-and-peace-1p.txt"
|
|
33
|
-
),
|
|
34
|
-
downloader_config=LocalDownloaderConfig(download_dir=download_path),
|
|
35
|
-
source_connection_config=LocalConnectionConfig(),
|
|
36
|
-
partitioner_config=PartitionerConfig(strategy="fast"),
|
|
37
|
-
chunker_config=ChunkerConfig(chunking_strategy="by_title"),
|
|
38
|
-
embedder_config=EmbedderConfig(embedding_provider="huggingface"),
|
|
39
|
-
destination_connection_config=MilvusConnectionConfig(
|
|
40
|
-
uri="http://localhost:19530", db_name="milvus"
|
|
41
|
-
),
|
|
42
|
-
stager_config=MilvusUploadStagerConfig(),
|
|
43
|
-
uploader_config=MilvusUploaderConfig(collection_name="ingest_test"),
|
|
44
|
-
).run()
|
examples/mongodb.py
DELETED
|
@@ -1,53 +0,0 @@
|
|
|
1
|
-
import random
|
|
2
|
-
from pathlib import Path
|
|
3
|
-
|
|
4
|
-
from unstructured_ingest.interfaces import ProcessorConfig
|
|
5
|
-
from unstructured_ingest.logger import logger
|
|
6
|
-
from unstructured_ingest.pipeline.pipeline import Pipeline
|
|
7
|
-
from unstructured_ingest.processes.chunker import ChunkerConfig
|
|
8
|
-
from unstructured_ingest.processes.connectors.local import (
|
|
9
|
-
LocalConnectionConfig,
|
|
10
|
-
LocalDownloaderConfig,
|
|
11
|
-
LocalIndexerConfig,
|
|
12
|
-
)
|
|
13
|
-
from unstructured_ingest.processes.connectors.mongodb import (
|
|
14
|
-
CONNECTOR_TYPE,
|
|
15
|
-
MongoDBAccessConfig,
|
|
16
|
-
MongoDBConnectionConfig,
|
|
17
|
-
MongoDBUploaderConfig,
|
|
18
|
-
MongoDBUploadStagerConfig,
|
|
19
|
-
)
|
|
20
|
-
from unstructured_ingest.processes.embedder import EmbedderConfig
|
|
21
|
-
from unstructured_ingest.processes.partitioner import PartitionerConfig
|
|
22
|
-
|
|
23
|
-
base_path = Path(__file__).parent.parent.parent.parent
|
|
24
|
-
docs_path = base_path / "example-docs"
|
|
25
|
-
work_dir = base_path / "tmp_ingest" / CONNECTOR_TYPE
|
|
26
|
-
output_path = work_dir / "output"
|
|
27
|
-
download_path = work_dir / "download"
|
|
28
|
-
|
|
29
|
-
if __name__ == "__main__":
|
|
30
|
-
logger.info(f"writing all content in: {work_dir.resolve()}")
|
|
31
|
-
Pipeline.from_configs(
|
|
32
|
-
context=ProcessorConfig(work_dir=str(work_dir.resolve())),
|
|
33
|
-
indexer_config=LocalIndexerConfig(input_path=str(docs_path.resolve()) + "/multisimple/"),
|
|
34
|
-
downloader_config=LocalDownloaderConfig(download_dir=download_path),
|
|
35
|
-
source_connection_config=LocalConnectionConfig(),
|
|
36
|
-
partitioner_config=PartitionerConfig(strategy="fast"),
|
|
37
|
-
chunker_config=ChunkerConfig(
|
|
38
|
-
chunking_strategy="by_title",
|
|
39
|
-
chunk_include_orig_elements=False,
|
|
40
|
-
chunk_max_characters=1500,
|
|
41
|
-
chunk_multipage_sections=True,
|
|
42
|
-
),
|
|
43
|
-
embedder_config=EmbedderConfig(embedding_provider="huggingface"),
|
|
44
|
-
destination_connection_config=MongoDBConnectionConfig(
|
|
45
|
-
access_config=MongoDBAccessConfig(uri=None),
|
|
46
|
-
host="localhost",
|
|
47
|
-
port=27017,
|
|
48
|
-
collection=f"test-collection-{random.randint(1000, 9999)}",
|
|
49
|
-
database="testDatabase",
|
|
50
|
-
),
|
|
51
|
-
stager_config=MongoDBUploadStagerConfig(),
|
|
52
|
-
uploader_config=MongoDBUploaderConfig(batch_size=10),
|
|
53
|
-
).run()
|
examples/opensearch.py
DELETED
|
@@ -1,50 +0,0 @@
|
|
|
1
|
-
from pathlib import Path
|
|
2
|
-
|
|
3
|
-
from unstructured_ingest.interfaces import ProcessorConfig
|
|
4
|
-
from unstructured_ingest.logger import logger
|
|
5
|
-
from unstructured_ingest.pipeline.pipeline import Pipeline
|
|
6
|
-
from unstructured_ingest.processes.chunker import ChunkerConfig
|
|
7
|
-
from unstructured_ingest.processes.connectors.local import (
|
|
8
|
-
LocalConnectionConfig,
|
|
9
|
-
LocalDownloaderConfig,
|
|
10
|
-
LocalIndexerConfig,
|
|
11
|
-
)
|
|
12
|
-
from unstructured_ingest.processes.connectors.opensearch import (
|
|
13
|
-
CONNECTOR_TYPE,
|
|
14
|
-
OpenSearchAccessConfig,
|
|
15
|
-
OpenSearchConnectionConfig,
|
|
16
|
-
OpenSearchUploaderConfig,
|
|
17
|
-
OpenSearchUploadStagerConfig,
|
|
18
|
-
)
|
|
19
|
-
from unstructured_ingest.processes.embedder import EmbedderConfig
|
|
20
|
-
from unstructured_ingest.processes.partitioner import PartitionerConfig
|
|
21
|
-
|
|
22
|
-
base_path = Path(__file__).parent.parent.parent.parent
|
|
23
|
-
docs_path = base_path / "example-docs"
|
|
24
|
-
work_dir = base_path / "tmp_ingest" / CONNECTOR_TYPE
|
|
25
|
-
output_path = work_dir / "output"
|
|
26
|
-
download_path = work_dir / "download"
|
|
27
|
-
|
|
28
|
-
if __name__ == "__main__":
|
|
29
|
-
logger.info(f"writing all content in: {work_dir.resolve()}")
|
|
30
|
-
Pipeline.from_configs(
|
|
31
|
-
context=ProcessorConfig(work_dir=str(work_dir.resolve())),
|
|
32
|
-
indexer_config=LocalIndexerConfig(
|
|
33
|
-
input_path=str(docs_path.resolve()) + "/book-war-and-peace-1p.txt"
|
|
34
|
-
),
|
|
35
|
-
downloader_config=LocalDownloaderConfig(download_dir=download_path),
|
|
36
|
-
source_connection_config=LocalConnectionConfig(),
|
|
37
|
-
partitioner_config=PartitionerConfig(strategy="fast"),
|
|
38
|
-
chunker_config=ChunkerConfig(chunking_strategy="by_title"),
|
|
39
|
-
embedder_config=EmbedderConfig(embedding_provider="huggingface"),
|
|
40
|
-
destination_connection_config=OpenSearchConnectionConfig(
|
|
41
|
-
hosts="http://localhost:9247",
|
|
42
|
-
username="admin",
|
|
43
|
-
use_ssl=True,
|
|
44
|
-
access_config=OpenSearchAccessConfig(password="admin"),
|
|
45
|
-
),
|
|
46
|
-
stager_config=OpenSearchUploadStagerConfig(index_name="ingest-test-destination"),
|
|
47
|
-
uploader_config=OpenSearchUploaderConfig(
|
|
48
|
-
index_name="ingest-test-destination", batch_size_bytes=150
|
|
49
|
-
),
|
|
50
|
-
).run()
|
examples/pinecone.py
DELETED
|
@@ -1,57 +0,0 @@
|
|
|
1
|
-
import os
|
|
2
|
-
from pathlib import Path
|
|
3
|
-
|
|
4
|
-
from unstructured_ingest.interfaces import ProcessorConfig
|
|
5
|
-
from unstructured_ingest.logger import logger
|
|
6
|
-
from unstructured_ingest.pipeline.pipeline import Pipeline
|
|
7
|
-
from unstructured_ingest.processes.chunker import ChunkerConfig
|
|
8
|
-
from unstructured_ingest.processes.connectors.local import (
|
|
9
|
-
LocalConnectionConfig,
|
|
10
|
-
LocalDownloaderConfig,
|
|
11
|
-
LocalIndexerConfig,
|
|
12
|
-
)
|
|
13
|
-
from unstructured_ingest.processes.connectors.pinecone import (
|
|
14
|
-
CONNECTOR_TYPE,
|
|
15
|
-
PineconeAccessConfig,
|
|
16
|
-
PineconeConnectionConfig,
|
|
17
|
-
PineconeUploaderConfig,
|
|
18
|
-
PineconeUploadStagerConfig,
|
|
19
|
-
)
|
|
20
|
-
from unstructured_ingest.processes.embedder import EmbedderConfig
|
|
21
|
-
from unstructured_ingest.processes.partitioner import PartitionerConfig
|
|
22
|
-
|
|
23
|
-
base_path = Path(__file__).parent.parent.parent.parent
|
|
24
|
-
docs_path = base_path / "example-docs"
|
|
25
|
-
work_dir = base_path / "tmp_ingest" / CONNECTOR_TYPE
|
|
26
|
-
output_path = work_dir / "output"
|
|
27
|
-
download_path = work_dir / "download"
|
|
28
|
-
|
|
29
|
-
if __name__ == "__main__":
|
|
30
|
-
logger.info(f"writing all content in: {work_dir.resolve()}")
|
|
31
|
-
Pipeline.from_configs(
|
|
32
|
-
context=ProcessorConfig(work_dir=str(work_dir.resolve())),
|
|
33
|
-
indexer_config=LocalIndexerConfig(
|
|
34
|
-
input_path=str(docs_path.resolve()) + "/book-war-and-peace-1p.txt"
|
|
35
|
-
),
|
|
36
|
-
downloader_config=LocalDownloaderConfig(download_dir=download_path),
|
|
37
|
-
source_connection_config=LocalConnectionConfig(),
|
|
38
|
-
partitioner_config=PartitionerConfig(strategy="fast"),
|
|
39
|
-
chunker_config=ChunkerConfig(chunking_strategy="by_title"),
|
|
40
|
-
embedder_config=EmbedderConfig(embedding_provider="huggingface"),
|
|
41
|
-
destination_connection_config=PineconeConnectionConfig(
|
|
42
|
-
# You'll need to set PINECONE_API_KEY environment variable to run this example
|
|
43
|
-
access_config=PineconeAccessConfig(pinecone_api_key=os.getenv("PINECONE_API_KEY")),
|
|
44
|
-
index_name=os.getenv(
|
|
45
|
-
"PINECONE_INDEX",
|
|
46
|
-
default="your index name here. e.g. my-index,"
|
|
47
|
-
"or define in environment variable PINECONE_INDEX",
|
|
48
|
-
),
|
|
49
|
-
environment=os.getenv(
|
|
50
|
-
"PINECONE_ENVIRONMENT",
|
|
51
|
-
default="your environment name here. e.g. us-east-1,"
|
|
52
|
-
"or define in environment variable PINECONE_ENVIRONMENT",
|
|
53
|
-
),
|
|
54
|
-
),
|
|
55
|
-
stager_config=PineconeUploadStagerConfig(),
|
|
56
|
-
uploader_config=PineconeUploaderConfig(batch_size=10, num_processes=2),
|
|
57
|
-
).run()
|
examples/s3.py
DELETED
|
@@ -1,38 +0,0 @@
|
|
|
1
|
-
from pathlib import Path
|
|
2
|
-
|
|
3
|
-
from unstructured_ingest.interfaces import ProcessorConfig
|
|
4
|
-
from unstructured_ingest.logger import logger
|
|
5
|
-
from unstructured_ingest.pipeline.pipeline import Pipeline
|
|
6
|
-
from unstructured_ingest.processes.chunker import ChunkerConfig
|
|
7
|
-
from unstructured_ingest.processes.connectors.fsspec.s3 import (
|
|
8
|
-
CONNECTOR_TYPE,
|
|
9
|
-
S3ConnectionConfig,
|
|
10
|
-
S3DownloaderConfig,
|
|
11
|
-
S3IndexerConfig,
|
|
12
|
-
)
|
|
13
|
-
from unstructured_ingest.processes.connectors.local import (
|
|
14
|
-
LocalUploaderConfig,
|
|
15
|
-
)
|
|
16
|
-
from unstructured_ingest.processes.embedder import EmbedderConfig
|
|
17
|
-
from unstructured_ingest.processes.filter import FiltererConfig
|
|
18
|
-
from unstructured_ingest.processes.partitioner import PartitionerConfig
|
|
19
|
-
|
|
20
|
-
base_path = Path(__file__).parent.parent.parent.parent
|
|
21
|
-
docs_path = base_path / "example-docs"
|
|
22
|
-
work_dir = base_path / "tmp_ingest" / CONNECTOR_TYPE
|
|
23
|
-
output_path = work_dir / "output"
|
|
24
|
-
download_path = work_dir / "download"
|
|
25
|
-
|
|
26
|
-
if __name__ == "__main__":
|
|
27
|
-
logger.info(f"writing all content in: {work_dir.resolve()}")
|
|
28
|
-
Pipeline.from_configs(
|
|
29
|
-
context=ProcessorConfig(work_dir=str(work_dir.resolve()), verbose=True, iter_delete=True),
|
|
30
|
-
indexer_config=S3IndexerConfig(remote_url="s3://utic-dev-tech-fixtures/small-pdf-set/"),
|
|
31
|
-
downloader_config=S3DownloaderConfig(download_dir=download_path),
|
|
32
|
-
source_connection_config=S3ConnectionConfig(anonymous=True),
|
|
33
|
-
partitioner_config=PartitionerConfig(strategy="fast"),
|
|
34
|
-
chunker_config=ChunkerConfig(chunking_strategy="by_title"),
|
|
35
|
-
embedder_config=EmbedderConfig(embedding_provider="huggingface"),
|
|
36
|
-
uploader_config=LocalUploaderConfig(output_dir=str(output_path.resolve())),
|
|
37
|
-
filterer_config=FiltererConfig(max_file_size=900000),
|
|
38
|
-
).run()
|
examples/salesforce.py
DELETED
|
@@ -1,44 +0,0 @@
|
|
|
1
|
-
import os
|
|
2
|
-
from pathlib import Path
|
|
3
|
-
|
|
4
|
-
from unstructured_ingest.interfaces import ProcessorConfig
|
|
5
|
-
from unstructured_ingest.logger import logger
|
|
6
|
-
from unstructured_ingest.pipeline.pipeline import Pipeline
|
|
7
|
-
from unstructured_ingest.processes.chunker import ChunkerConfig
|
|
8
|
-
from unstructured_ingest.processes.connectors.local import (
|
|
9
|
-
LocalUploaderConfig,
|
|
10
|
-
)
|
|
11
|
-
from unstructured_ingest.processes.connectors.salesforce import (
|
|
12
|
-
CONNECTOR_TYPE,
|
|
13
|
-
SalesforceAccessConfig,
|
|
14
|
-
SalesforceConnectionConfig,
|
|
15
|
-
SalesforceDownloaderConfig,
|
|
16
|
-
SalesforceIndexerConfig,
|
|
17
|
-
)
|
|
18
|
-
from unstructured_ingest.processes.embedder import EmbedderConfig
|
|
19
|
-
from unstructured_ingest.processes.partitioner import PartitionerConfig
|
|
20
|
-
|
|
21
|
-
base_path = Path(__file__).parent.parent.parent.parent
|
|
22
|
-
docs_path = base_path / "example-docs"
|
|
23
|
-
work_dir = base_path / "tmp_ingest" / CONNECTOR_TYPE
|
|
24
|
-
output_path = work_dir / "output"
|
|
25
|
-
download_path = work_dir / "download"
|
|
26
|
-
|
|
27
|
-
if __name__ == "__main__":
|
|
28
|
-
logger.info(f"writing all content in: {work_dir.resolve()}")
|
|
29
|
-
Pipeline.from_configs(
|
|
30
|
-
context=ProcessorConfig(work_dir=str(work_dir.resolve())),
|
|
31
|
-
indexer_config=SalesforceIndexerConfig(categories=["Campaign", "EmailMessage"]),
|
|
32
|
-
downloader_config=SalesforceDownloaderConfig(download_dir=download_path),
|
|
33
|
-
source_connection_config=SalesforceConnectionConfig(
|
|
34
|
-
SalesforceAccessConfig(
|
|
35
|
-
consumer_key=os.getenv("SALESFORCE_CONSUMER_KEY"),
|
|
36
|
-
private_key=os.getenv("SALESFORCE_PRIVATE_KEY"),
|
|
37
|
-
),
|
|
38
|
-
username=os.getenv("SALESFORCE_USERNAME"),
|
|
39
|
-
),
|
|
40
|
-
partitioner_config=PartitionerConfig(strategy="fast"),
|
|
41
|
-
chunker_config=ChunkerConfig(chunking_strategy="by_title"),
|
|
42
|
-
embedder_config=EmbedderConfig(embedding_provider="huggingface"),
|
|
43
|
-
uploader_config=LocalUploaderConfig(output_dir=str(output_path.resolve())),
|
|
44
|
-
).run()
|
examples/sharepoint.py
DELETED
|
@@ -1,47 +0,0 @@
|
|
|
1
|
-
import os
|
|
2
|
-
from pathlib import Path
|
|
3
|
-
|
|
4
|
-
from unstructured_ingest.interfaces import ProcessorConfig
|
|
5
|
-
from unstructured_ingest.logger import logger
|
|
6
|
-
from unstructured_ingest.pipeline.pipeline import Pipeline
|
|
7
|
-
from unstructured_ingest.processes.connectors.local import (
|
|
8
|
-
LocalUploaderConfig,
|
|
9
|
-
)
|
|
10
|
-
from unstructured_ingest.processes.connectors.sharepoint import (
|
|
11
|
-
CONNECTOR_TYPE,
|
|
12
|
-
SharepointAccessConfig,
|
|
13
|
-
SharepointConnectionConfig,
|
|
14
|
-
SharepointDownloaderConfig,
|
|
15
|
-
SharepointIndexerConfig,
|
|
16
|
-
SharepointPermissionsConfig,
|
|
17
|
-
)
|
|
18
|
-
from unstructured_ingest.processes.partitioner import PartitionerConfig
|
|
19
|
-
|
|
20
|
-
base_path = Path(__file__).parent.parent.parent.parent
|
|
21
|
-
docs_path = base_path / "example-docs"
|
|
22
|
-
work_dir = base_path / "tmp_ingest" / CONNECTOR_TYPE
|
|
23
|
-
output_path = work_dir / "output"
|
|
24
|
-
download_path = work_dir / "download"
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
if __name__ == "__main__":
|
|
28
|
-
logger.info(f"writing all content in: {work_dir.resolve()}")
|
|
29
|
-
Pipeline.from_configs(
|
|
30
|
-
context=ProcessorConfig(work_dir=str(work_dir.resolve()), tqdm=True, verbose=True),
|
|
31
|
-
indexer_config=SharepointIndexerConfig(),
|
|
32
|
-
downloader_config=SharepointDownloaderConfig(download_dir=download_path),
|
|
33
|
-
source_connection_config=SharepointConnectionConfig(
|
|
34
|
-
client_id=os.getenv("SHAREPOINT_CLIENT_ID"),
|
|
35
|
-
site=os.getenv("SHAREPOINT_SITE"),
|
|
36
|
-
access_config=SharepointAccessConfig(client_cred=os.getenv("SHAREPOINT_CRED")),
|
|
37
|
-
permissions_config=SharepointPermissionsConfig(
|
|
38
|
-
permissions_application_id=os.getenv("SHAREPOINT_PERMISSIONS_APP_ID"),
|
|
39
|
-
permissions_client_cred=os.getenv("SHAREPOINT_PERMISSIONS_APP_CRED"),
|
|
40
|
-
permissions_tenant=os.getenv("SHAREPOINT_PERMISSIONS_TENANT"),
|
|
41
|
-
),
|
|
42
|
-
),
|
|
43
|
-
partitioner_config=PartitionerConfig(strategy="fast"),
|
|
44
|
-
# chunker_config=ChunkerConfig(chunking_strategy="by_title"),
|
|
45
|
-
# embedder_config=EmbedderConfig(embedding_provider="huggingface"),
|
|
46
|
-
uploader_config=LocalUploaderConfig(output_dir=str(output_path.resolve())),
|
|
47
|
-
).run()
|