lfx-nightly 0.1.13.dev0__py3-none-any.whl → 0.2.0.dev0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- lfx/_assets/component_index.json +1 -1
- lfx/base/agents/agent.py +109 -29
- lfx/base/agents/events.py +102 -35
- lfx/base/agents/utils.py +15 -2
- lfx/base/composio/composio_base.py +24 -9
- lfx/base/datastax/__init__.py +5 -0
- lfx/{components/vectorstores/astradb.py → base/datastax/astradb_base.py} +84 -473
- lfx/base/io/chat.py +5 -4
- lfx/base/mcp/util.py +101 -15
- lfx/base/models/model_input_constants.py +74 -7
- lfx/base/models/ollama_constants.py +3 -0
- lfx/base/models/watsonx_constants.py +12 -0
- lfx/cli/commands.py +1 -1
- lfx/components/agents/__init__.py +3 -1
- lfx/components/agents/agent.py +47 -4
- lfx/components/agents/altk_agent.py +366 -0
- lfx/components/agents/cuga_agent.py +1 -1
- lfx/components/agents/mcp_component.py +32 -2
- lfx/components/amazon/amazon_bedrock_converse.py +1 -1
- lfx/components/apify/apify_actor.py +3 -3
- lfx/components/datastax/__init__.py +12 -6
- lfx/components/datastax/{astra_assistant_manager.py → astradb_assistant_manager.py} +1 -0
- lfx/components/datastax/astradb_chatmemory.py +40 -0
- lfx/components/datastax/astradb_cql.py +5 -31
- lfx/components/datastax/astradb_graph.py +9 -123
- lfx/components/datastax/astradb_tool.py +12 -52
- lfx/components/datastax/astradb_vectorstore.py +133 -976
- lfx/components/datastax/create_assistant.py +1 -0
- lfx/components/datastax/create_thread.py +1 -0
- lfx/components/datastax/dotenv.py +1 -0
- lfx/components/datastax/get_assistant.py +1 -0
- lfx/components/datastax/getenvvar.py +1 -0
- lfx/components/datastax/graph_rag.py +1 -1
- lfx/components/datastax/list_assistants.py +1 -0
- lfx/components/datastax/run.py +1 -0
- lfx/components/docling/__init__.py +3 -0
- lfx/components/docling/docling_remote_vlm.py +284 -0
- lfx/components/ibm/watsonx.py +25 -21
- lfx/components/input_output/chat.py +8 -0
- lfx/components/input_output/chat_output.py +8 -0
- lfx/components/knowledge_bases/ingestion.py +17 -9
- lfx/components/knowledge_bases/retrieval.py +16 -8
- lfx/components/logic/loop.py +4 -0
- lfx/components/mistral/mistral_embeddings.py +1 -1
- lfx/components/models/embedding_model.py +88 -7
- lfx/components/ollama/ollama.py +221 -14
- lfx/components/openrouter/openrouter.py +49 -147
- lfx/components/processing/parser.py +6 -1
- lfx/components/processing/structured_output.py +55 -17
- lfx/components/vectorstores/__init__.py +0 -6
- lfx/custom/custom_component/component.py +3 -2
- lfx/field_typing/constants.py +1 -0
- lfx/graph/edge/base.py +2 -2
- lfx/graph/graph/base.py +1 -1
- lfx/graph/graph/schema.py +3 -2
- lfx/graph/vertex/vertex_types.py +1 -1
- lfx/io/schema.py +6 -0
- lfx/schema/schema.py +5 -0
- {lfx_nightly-0.1.13.dev0.dist-info → lfx_nightly-0.2.0.dev0.dist-info}/METADATA +1 -1
- {lfx_nightly-0.1.13.dev0.dist-info → lfx_nightly-0.2.0.dev0.dist-info}/RECORD +63 -81
- lfx/components/datastax/astra_db.py +0 -77
- lfx/components/datastax/cassandra.py +0 -92
- lfx/components/vectorstores/astradb_graph.py +0 -326
- lfx/components/vectorstores/cassandra.py +0 -264
- lfx/components/vectorstores/cassandra_graph.py +0 -238
- lfx/components/vectorstores/chroma.py +0 -167
- lfx/components/vectorstores/clickhouse.py +0 -135
- lfx/components/vectorstores/couchbase.py +0 -102
- lfx/components/vectorstores/elasticsearch.py +0 -267
- lfx/components/vectorstores/faiss.py +0 -111
- lfx/components/vectorstores/graph_rag.py +0 -141
- lfx/components/vectorstores/hcd.py +0 -314
- lfx/components/vectorstores/milvus.py +0 -115
- lfx/components/vectorstores/mongodb_atlas.py +0 -213
- lfx/components/vectorstores/opensearch.py +0 -243
- lfx/components/vectorstores/pgvector.py +0 -72
- lfx/components/vectorstores/pinecone.py +0 -134
- lfx/components/vectorstores/qdrant.py +0 -109
- lfx/components/vectorstores/supabase.py +0 -76
- lfx/components/vectorstores/upstash.py +0 -124
- lfx/components/vectorstores/vectara.py +0 -97
- lfx/components/vectorstores/vectara_rag.py +0 -164
- lfx/components/vectorstores/weaviate.py +0 -89
- /lfx/components/datastax/{astra_vectorize.py → astradb_vectorize.py} +0 -0
- {lfx_nightly-0.1.13.dev0.dist-info → lfx_nightly-0.2.0.dev0.dist-info}/WHEEL +0 -0
- {lfx_nightly-0.1.13.dev0.dist-info → lfx_nightly-0.2.0.dev0.dist-info}/entry_points.txt +0 -0
|
@@ -1,138 +1,36 @@
|
|
|
1
|
-
import os
|
|
2
|
-
|
|
3
1
|
import orjson
|
|
4
2
|
|
|
3
|
+
from lfx.base.datastax.astradb_base import AstraDBBaseComponent
|
|
5
4
|
from lfx.base.vectorstores.model import LCVectorStoreComponent, check_cached_vector_store
|
|
6
5
|
from lfx.helpers.data import docs_to_data
|
|
7
6
|
from lfx.inputs.inputs import (
|
|
8
|
-
BoolInput,
|
|
9
7
|
DictInput,
|
|
10
8
|
DropdownInput,
|
|
11
9
|
FloatInput,
|
|
12
|
-
HandleInput,
|
|
13
10
|
IntInput,
|
|
14
|
-
SecretStrInput,
|
|
15
11
|
StrInput,
|
|
16
12
|
)
|
|
17
13
|
from lfx.schema.data import Data
|
|
18
14
|
|
|
19
15
|
|
|
20
|
-
class AstraDBGraphVectorStoreComponent(LCVectorStoreComponent):
|
|
16
|
+
class AstraDBGraphVectorStoreComponent(AstraDBBaseComponent, LCVectorStoreComponent):
|
|
21
17
|
display_name: str = "Astra DB Graph"
|
|
22
18
|
description: str = "Implementation of Graph Vector Store using Astra DB"
|
|
23
19
|
name = "AstraDBGraph"
|
|
24
20
|
documentation: str = "https://docs.langflow.org/bundles-datastax#astra-db-graph"
|
|
25
21
|
icon: str = "AstraDB"
|
|
22
|
+
legacy: bool = True
|
|
23
|
+
replacement = ["datastax.GraphRAG"]
|
|
26
24
|
|
|
27
25
|
inputs = [
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
display_name="Astra DB Application Token",
|
|
31
|
-
info="Authentication token for accessing Astra DB.",
|
|
32
|
-
value="ASTRA_DB_APPLICATION_TOKEN",
|
|
33
|
-
required=True,
|
|
34
|
-
advanced=os.getenv("ASTRA_ENHANCED", "false").lower() == "true",
|
|
35
|
-
),
|
|
36
|
-
SecretStrInput(
|
|
37
|
-
name="api_endpoint",
|
|
38
|
-
display_name="Database" if os.getenv("ASTRA_ENHANCED", "false").lower() == "true" else "API Endpoint",
|
|
39
|
-
info="API endpoint URL for the Astra DB service.",
|
|
40
|
-
value="ASTRA_DB_API_ENDPOINT",
|
|
41
|
-
required=True,
|
|
42
|
-
),
|
|
43
|
-
StrInput(
|
|
44
|
-
name="collection_name",
|
|
45
|
-
display_name="Collection Name",
|
|
46
|
-
info="The name of the collection within Astra DB where the vectors will be stored.",
|
|
47
|
-
required=True,
|
|
48
|
-
),
|
|
26
|
+
*AstraDBBaseComponent.inputs,
|
|
27
|
+
*LCVectorStoreComponent.inputs,
|
|
49
28
|
StrInput(
|
|
50
29
|
name="metadata_incoming_links_key",
|
|
51
30
|
display_name="Metadata incoming links key",
|
|
52
31
|
info="Metadata key used for incoming links.",
|
|
53
32
|
advanced=True,
|
|
54
33
|
),
|
|
55
|
-
*LCVectorStoreComponent.inputs,
|
|
56
|
-
StrInput(
|
|
57
|
-
name="keyspace",
|
|
58
|
-
display_name="Keyspace",
|
|
59
|
-
info="Optional keyspace within Astra DB to use for the collection.",
|
|
60
|
-
advanced=True,
|
|
61
|
-
),
|
|
62
|
-
HandleInput(
|
|
63
|
-
name="embedding_model",
|
|
64
|
-
display_name="Embedding Model",
|
|
65
|
-
input_types=["Embeddings"],
|
|
66
|
-
info="Allows an embedding model configuration.",
|
|
67
|
-
),
|
|
68
|
-
DropdownInput(
|
|
69
|
-
name="metric",
|
|
70
|
-
display_name="Metric",
|
|
71
|
-
info="Optional distance metric for vector comparisons in the vector store.",
|
|
72
|
-
options=["cosine", "dot_product", "euclidean"],
|
|
73
|
-
value="cosine",
|
|
74
|
-
advanced=True,
|
|
75
|
-
),
|
|
76
|
-
IntInput(
|
|
77
|
-
name="batch_size",
|
|
78
|
-
display_name="Batch Size",
|
|
79
|
-
info="Optional number of data to process in a single batch.",
|
|
80
|
-
advanced=True,
|
|
81
|
-
),
|
|
82
|
-
IntInput(
|
|
83
|
-
name="bulk_insert_batch_concurrency",
|
|
84
|
-
display_name="Bulk Insert Batch Concurrency",
|
|
85
|
-
info="Optional concurrency level for bulk insert operations.",
|
|
86
|
-
advanced=True,
|
|
87
|
-
),
|
|
88
|
-
IntInput(
|
|
89
|
-
name="bulk_insert_overwrite_concurrency",
|
|
90
|
-
display_name="Bulk Insert Overwrite Concurrency",
|
|
91
|
-
info="Optional concurrency level for bulk insert operations that overwrite existing data.",
|
|
92
|
-
advanced=True,
|
|
93
|
-
),
|
|
94
|
-
IntInput(
|
|
95
|
-
name="bulk_delete_concurrency",
|
|
96
|
-
display_name="Bulk Delete Concurrency",
|
|
97
|
-
info="Optional concurrency level for bulk delete operations.",
|
|
98
|
-
advanced=True,
|
|
99
|
-
),
|
|
100
|
-
DropdownInput(
|
|
101
|
-
name="setup_mode",
|
|
102
|
-
display_name="Setup Mode",
|
|
103
|
-
info="Configuration mode for setting up the vector store, with options like 'Sync', or 'Off'.",
|
|
104
|
-
options=["Sync", "Off"],
|
|
105
|
-
advanced=True,
|
|
106
|
-
value="Sync",
|
|
107
|
-
),
|
|
108
|
-
BoolInput(
|
|
109
|
-
name="pre_delete_collection",
|
|
110
|
-
display_name="Pre Delete Collection",
|
|
111
|
-
info="Boolean flag to determine whether to delete the collection before creating a new one.",
|
|
112
|
-
advanced=True,
|
|
113
|
-
value=False,
|
|
114
|
-
),
|
|
115
|
-
StrInput(
|
|
116
|
-
name="metadata_indexing_include",
|
|
117
|
-
display_name="Metadata Indexing Include",
|
|
118
|
-
info="Optional list of metadata fields to include in the indexing.",
|
|
119
|
-
advanced=True,
|
|
120
|
-
list=True,
|
|
121
|
-
),
|
|
122
|
-
StrInput(
|
|
123
|
-
name="metadata_indexing_exclude",
|
|
124
|
-
display_name="Metadata Indexing Exclude",
|
|
125
|
-
info="Optional list of metadata fields to exclude from the indexing.",
|
|
126
|
-
advanced=True,
|
|
127
|
-
list=True,
|
|
128
|
-
),
|
|
129
|
-
StrInput(
|
|
130
|
-
name="collection_indexing_policy",
|
|
131
|
-
display_name="Collection Indexing Policy",
|
|
132
|
-
info='Optional JSON string for the "indexing" field of the collection. '
|
|
133
|
-
"See https://docs.datastax.com/en/astra-db-serverless/api-reference/collections.html#the-indexing-option",
|
|
134
|
-
advanced=True,
|
|
135
|
-
),
|
|
136
34
|
IntInput(
|
|
137
35
|
name="number_of_results",
|
|
138
36
|
display_name="Number of Results",
|
|
@@ -174,7 +72,6 @@ class AstraDBGraphVectorStoreComponent(LCVectorStoreComponent):
|
|
|
174
72
|
@check_cached_vector_store
|
|
175
73
|
def build_vector_store(self):
|
|
176
74
|
try:
|
|
177
|
-
from astrapy.admin import parse_api_endpoint
|
|
178
75
|
from langchain_astradb import AstraDBGraphVectorStore
|
|
179
76
|
from langchain_astradb.utils.astradb import SetupMode
|
|
180
77
|
except ImportError as e:
|
|
@@ -196,25 +93,14 @@ class AstraDBGraphVectorStoreComponent(LCVectorStoreComponent):
|
|
|
196
93
|
try:
|
|
197
94
|
self.log(f"Initializing Graph Vector Store {self.collection_name}")
|
|
198
95
|
|
|
199
|
-
# Handle environment parsing with try-except to avoid circular import
|
|
200
|
-
environment = None
|
|
201
|
-
if self.api_endpoint:
|
|
202
|
-
try:
|
|
203
|
-
from astrapy.admin import parse_api_endpoint
|
|
204
|
-
|
|
205
|
-
environment = parse_api_endpoint(self.api_endpoint).environment
|
|
206
|
-
except ImportError:
|
|
207
|
-
self.log("Warning: Could not import parse_api_endpoint, using None for environment")
|
|
208
|
-
environment = None
|
|
209
|
-
|
|
210
96
|
vector_store = AstraDBGraphVectorStore(
|
|
211
97
|
embedding=self.embedding_model,
|
|
212
98
|
collection_name=self.collection_name,
|
|
213
99
|
metadata_incoming_links_key=self.metadata_incoming_links_key or "incoming_links",
|
|
214
100
|
token=self.token,
|
|
215
|
-
api_endpoint=self.
|
|
216
|
-
namespace=self.
|
|
217
|
-
environment=environment,
|
|
101
|
+
api_endpoint=self.get_api_endpoint(),
|
|
102
|
+
namespace=self.get_keyspace(),
|
|
103
|
+
environment=self.environment,
|
|
218
104
|
metric=self.metric or None,
|
|
219
105
|
batch_size=self.batch_size or None,
|
|
220
106
|
bulk_insert_batch_concurrency=self.bulk_insert_batch_concurrency or None,
|
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
import os
|
|
2
1
|
from datetime import datetime, timezone
|
|
3
2
|
from typing import Any
|
|
4
3
|
|
|
@@ -6,20 +5,25 @@ from astrapy import Collection, DataAPIClient, Database
|
|
|
6
5
|
from langchain_core.tools import StructuredTool, Tool
|
|
7
6
|
from pydantic import BaseModel, Field, create_model
|
|
8
7
|
|
|
8
|
+
from lfx.base.datastax.astradb_base import AstraDBBaseComponent
|
|
9
9
|
from lfx.base.langchain_utilities.model import LCToolComponent
|
|
10
|
-
from lfx.io import BoolInput, DictInput,
|
|
10
|
+
from lfx.io import BoolInput, DictInput, IntInput, StrInput, TableInput
|
|
11
11
|
from lfx.log.logger import logger
|
|
12
12
|
from lfx.schema.data import Data
|
|
13
13
|
from lfx.schema.table import EditMode
|
|
14
14
|
|
|
15
15
|
|
|
16
|
-
class AstraDBToolComponent(LCToolComponent):
|
|
16
|
+
class AstraDBToolComponent(AstraDBBaseComponent, LCToolComponent):
|
|
17
17
|
display_name: str = "Astra DB Tool"
|
|
18
18
|
description: str = "Tool to run hybrid vector and metadata search on DataStax Astra DB Collection"
|
|
19
19
|
documentation: str = "https://docs.langflow.org/bundles-datastax#astra-db-tool"
|
|
20
20
|
icon: str = "AstraDB"
|
|
21
|
+
legacy: bool = True
|
|
22
|
+
name = "AstraDBTool"
|
|
23
|
+
replacement = ["datastax.AstraDB"]
|
|
21
24
|
|
|
22
25
|
inputs = [
|
|
26
|
+
*AstraDBBaseComponent.inputs,
|
|
23
27
|
StrInput(
|
|
24
28
|
name="tool_name",
|
|
25
29
|
display_name="Tool Name",
|
|
@@ -32,33 +36,6 @@ class AstraDBToolComponent(LCToolComponent):
|
|
|
32
36
|
info="Describe the tool to LLM. Add any information that can help the LLM to use the tool.",
|
|
33
37
|
required=True,
|
|
34
38
|
),
|
|
35
|
-
StrInput(
|
|
36
|
-
name="keyspace",
|
|
37
|
-
display_name="Keyspace Name",
|
|
38
|
-
info="The name of the keyspace within Astra where the collection is stored.",
|
|
39
|
-
value="default_keyspace",
|
|
40
|
-
advanced=True,
|
|
41
|
-
),
|
|
42
|
-
StrInput(
|
|
43
|
-
name="collection_name",
|
|
44
|
-
display_name="Collection Name",
|
|
45
|
-
info="The name of the collection within Astra DB where the vectors will be stored.",
|
|
46
|
-
required=True,
|
|
47
|
-
),
|
|
48
|
-
SecretStrInput(
|
|
49
|
-
name="token",
|
|
50
|
-
display_name="Astra DB Application Token",
|
|
51
|
-
info="Authentication token for accessing Astra DB.",
|
|
52
|
-
value="ASTRA_DB_APPLICATION_TOKEN",
|
|
53
|
-
required=True,
|
|
54
|
-
),
|
|
55
|
-
SecretStrInput(
|
|
56
|
-
name="api_endpoint",
|
|
57
|
-
display_name="Database" if os.getenv("ASTRA_ENHANCED", "false").lower() == "true" else "API Endpoint",
|
|
58
|
-
info="API endpoint URL for the Astra DB service.",
|
|
59
|
-
value="ASTRA_DB_API_ENDPOINT",
|
|
60
|
-
required=True,
|
|
61
|
-
),
|
|
62
39
|
StrInput(
|
|
63
40
|
name="projection_attributes",
|
|
64
41
|
display_name="Projection Attributes",
|
|
@@ -175,7 +152,6 @@ class AstraDBToolComponent(LCToolComponent):
|
|
|
175
152
|
advanced=False,
|
|
176
153
|
value=False,
|
|
177
154
|
),
|
|
178
|
-
HandleInput(name="embedding", display_name="Embedding Model", input_types=["Embeddings"]),
|
|
179
155
|
StrInput(
|
|
180
156
|
name="semantic_search_instruction",
|
|
181
157
|
display_name="Semantic Search Instruction",
|
|
@@ -190,26 +166,6 @@ class AstraDBToolComponent(LCToolComponent):
|
|
|
190
166
|
_cached_db: Database | None = None
|
|
191
167
|
_cached_collection: Collection | None = None
|
|
192
168
|
|
|
193
|
-
def _build_collection(self):
|
|
194
|
-
try:
|
|
195
|
-
from astrapy.admin import parse_api_endpoint
|
|
196
|
-
except ImportError as e:
|
|
197
|
-
msg = "Could not import Astra DB integration package. Please install it with `uv pip install astrapy`."
|
|
198
|
-
raise ImportError(msg) from e
|
|
199
|
-
if self._cached_collection:
|
|
200
|
-
return self._cached_collection
|
|
201
|
-
|
|
202
|
-
try:
|
|
203
|
-
environment = parse_api_endpoint(self.api_endpoint).environment
|
|
204
|
-
cached_client = DataAPIClient(self.token, environment=environment)
|
|
205
|
-
cached_db = cached_client.get_database(self.api_endpoint, keyspace=self.keyspace)
|
|
206
|
-
self._cached_collection = cached_db.get_collection(self.collection_name)
|
|
207
|
-
except Exception as e:
|
|
208
|
-
msg = f"Error building collection: {e}"
|
|
209
|
-
raise ValueError(msg) from e
|
|
210
|
-
else:
|
|
211
|
-
return self._cached_collection
|
|
212
|
-
|
|
213
169
|
def create_args_schema(self) -> dict[str, BaseModel]:
|
|
214
170
|
"""DEPRECATED: This method is deprecated. Please use create_args_schema_v2 instead.
|
|
215
171
|
|
|
@@ -375,7 +331,6 @@ class AstraDBToolComponent(LCToolComponent):
|
|
|
375
331
|
|
|
376
332
|
def run_model(self, **args) -> Data | list[Data]:
|
|
377
333
|
"""Run the query to get the data from the Astra DB collection."""
|
|
378
|
-
collection = self._build_collection()
|
|
379
334
|
sort = {}
|
|
380
335
|
|
|
381
336
|
# Build filters using the new method
|
|
@@ -405,6 +360,11 @@ class AstraDBToolComponent(LCToolComponent):
|
|
|
405
360
|
find_options["projection"] = projection
|
|
406
361
|
|
|
407
362
|
try:
|
|
363
|
+
database = self.get_database_object(api_endpoint=self.get_api_endpoint())
|
|
364
|
+
collection = database.get_collection(
|
|
365
|
+
name=self.collection_name,
|
|
366
|
+
keyspace=self.get_keyspace(),
|
|
367
|
+
)
|
|
408
368
|
results = collection.find(**find_options)
|
|
409
369
|
except Exception as e:
|
|
410
370
|
msg = f"Error on Astra DB Tool {self.tool_name} request: {e}"
|