lfx-nightly 0.1.12.dev11__py3-none-any.whl → 0.1.12.dev13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of lfx-nightly might be problematic. Click here for more details.
- lfx/components/clickhouse/clickhouse.py +3 -3
- lfx/components/cohere/cohere_models.py +1 -1
- lfx/components/elastic/opensearch.py +693 -158
- lfx/components/nvidia/system_assist.py +1 -1
- lfx/components/vectorstores/clickhouse.py +3 -3
- lfx/logging/__init__.py +11 -0
- {lfx_nightly-0.1.12.dev11.dist-info → lfx_nightly-0.1.12.dev13.dist-info}/METADATA +1 -1
- {lfx_nightly-0.1.12.dev11.dist-info → lfx_nightly-0.1.12.dev13.dist-info}/RECORD +10 -9
- {lfx_nightly-0.1.12.dev11.dist-info → lfx_nightly-0.1.12.dev13.dist-info}/WHEEL +0 -0
- {lfx_nightly-0.1.12.dev11.dist-info → lfx_nightly-0.1.12.dev13.dist-info}/entry_points.txt +0 -0
|
@@ -15,8 +15,8 @@ from lfx.schema.data import Data
|
|
|
15
15
|
|
|
16
16
|
|
|
17
17
|
class ClickhouseVectorStoreComponent(LCVectorStoreComponent):
|
|
18
|
-
display_name = "
|
|
19
|
-
description = "
|
|
18
|
+
display_name = "ClickHouse"
|
|
19
|
+
description = "ClickHouse Vector Store with search capabilities"
|
|
20
20
|
name = "Clickhouse"
|
|
21
21
|
icon = "Clickhouse"
|
|
22
22
|
|
|
@@ -69,7 +69,7 @@ class ClickhouseVectorStoreComponent(LCVectorStoreComponent):
|
|
|
69
69
|
import clickhouse_connect
|
|
70
70
|
except ImportError as e:
|
|
71
71
|
msg = (
|
|
72
|
-
"Failed to import
|
|
72
|
+
"Failed to import ClickHouse dependencies. "
|
|
73
73
|
"Install it using `uv pip install langflow[clickhouse-connect] --pre`"
|
|
74
74
|
)
|
|
75
75
|
raise ImportError(msg) from e
|
|
@@ -10,7 +10,7 @@ from lfx.io import SecretStrInput, SliderInput
|
|
|
10
10
|
class CohereComponent(LCModelComponent):
|
|
11
11
|
display_name = "Cohere Language Models"
|
|
12
12
|
description = "Generate text using Cohere LLMs."
|
|
13
|
-
documentation = "https://python.langchain.com/docs/
|
|
13
|
+
documentation = "https://python.langchain.com/docs/integrations/llms/cohere/"
|
|
14
14
|
icon = "Cohere"
|
|
15
15
|
name = "CohereModel"
|
|
16
16
|
|
|
@@ -1,243 +1,778 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
1
3
|
import json
|
|
4
|
+
import uuid
|
|
2
5
|
from typing import Any
|
|
3
6
|
|
|
4
|
-
from
|
|
7
|
+
from opensearchpy import OpenSearch, helpers
|
|
5
8
|
|
|
6
9
|
from lfx.base.vectorstores.model import LCVectorStoreComponent, check_cached_vector_store
|
|
7
10
|
from lfx.base.vectorstores.vector_store_connection_decorator import vector_store_connection
|
|
8
|
-
from lfx.io import
|
|
9
|
-
|
|
10
|
-
DropdownInput,
|
|
11
|
-
FloatInput,
|
|
12
|
-
HandleInput,
|
|
13
|
-
IntInput,
|
|
14
|
-
MultilineInput,
|
|
15
|
-
SecretStrInput,
|
|
16
|
-
StrInput,
|
|
17
|
-
)
|
|
11
|
+
from lfx.io import BoolInput, DropdownInput, HandleInput, IntInput, MultilineInput, SecretStrInput, StrInput, TableInput
|
|
12
|
+
from lfx.log import logger
|
|
18
13
|
from lfx.schema.data import Data
|
|
19
14
|
|
|
20
15
|
|
|
21
16
|
@vector_store_connection
|
|
22
17
|
class OpenSearchVectorStoreComponent(LCVectorStoreComponent):
|
|
23
|
-
"""OpenSearch Vector Store with
|
|
18
|
+
"""OpenSearch Vector Store Component with Hybrid Search Capabilities.
|
|
19
|
+
|
|
20
|
+
This component provides vector storage and retrieval using OpenSearch, combining semantic
|
|
21
|
+
similarity search (KNN) with keyword-based search for optimal results. It supports document
|
|
22
|
+
ingestion, vector embeddings, and advanced filtering with authentication options.
|
|
23
|
+
|
|
24
|
+
Features:
|
|
25
|
+
- Vector storage with configurable engines (jvector, nmslib, faiss, lucene)
|
|
26
|
+
- Hybrid search combining KNN vector similarity and keyword matching
|
|
27
|
+
- Flexible authentication (Basic auth, JWT tokens)
|
|
28
|
+
- Advanced filtering and aggregations
|
|
29
|
+
- Metadata injection during document ingestion
|
|
30
|
+
"""
|
|
24
31
|
|
|
25
32
|
display_name: str = "OpenSearch"
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
33
|
+
icon: str = "OpenSearch"
|
|
34
|
+
description: str = (
|
|
35
|
+
"Store and search documents using OpenSearch with hybrid semantic and keyword search capabilities."
|
|
36
|
+
)
|
|
37
|
+
|
|
38
|
+
# Keys we consider baseline
|
|
39
|
+
default_keys: list[str] = [
|
|
40
|
+
"opensearch_url",
|
|
41
|
+
"index_name",
|
|
42
|
+
*[i.name for i in LCVectorStoreComponent.inputs], # search_query, add_documents, etc.
|
|
43
|
+
"embedding",
|
|
44
|
+
"vector_field",
|
|
45
|
+
"number_of_results",
|
|
46
|
+
"auth_mode",
|
|
47
|
+
"username",
|
|
48
|
+
"password",
|
|
49
|
+
"jwt_token",
|
|
50
|
+
"jwt_header",
|
|
51
|
+
"bearer_prefix",
|
|
52
|
+
"use_ssl",
|
|
53
|
+
"verify_certs",
|
|
54
|
+
"filter_expression",
|
|
55
|
+
"engine",
|
|
56
|
+
"space_type",
|
|
57
|
+
"ef_construction",
|
|
58
|
+
"m",
|
|
59
|
+
"docs_metadata",
|
|
60
|
+
]
|
|
29
61
|
|
|
30
62
|
inputs = [
|
|
63
|
+
TableInput(
|
|
64
|
+
name="docs_metadata",
|
|
65
|
+
display_name="Document Metadata",
|
|
66
|
+
info=(
|
|
67
|
+
"Additional metadata key-value pairs to be added to all ingested documents. "
|
|
68
|
+
"Useful for tagging documents with source information, categories, or other custom attributes."
|
|
69
|
+
),
|
|
70
|
+
table_schema=[
|
|
71
|
+
{
|
|
72
|
+
"name": "key",
|
|
73
|
+
"display_name": "Key",
|
|
74
|
+
"type": "str",
|
|
75
|
+
"description": "Key name",
|
|
76
|
+
},
|
|
77
|
+
{
|
|
78
|
+
"name": "value",
|
|
79
|
+
"display_name": "Value",
|
|
80
|
+
"type": "str",
|
|
81
|
+
"description": "Value of the metadata",
|
|
82
|
+
},
|
|
83
|
+
],
|
|
84
|
+
value=[],
|
|
85
|
+
advanced=True,
|
|
86
|
+
),
|
|
31
87
|
StrInput(
|
|
32
88
|
name="opensearch_url",
|
|
33
89
|
display_name="OpenSearch URL",
|
|
34
90
|
value="http://localhost:9200",
|
|
35
|
-
info=
|
|
91
|
+
info=(
|
|
92
|
+
"The connection URL for your OpenSearch cluster "
|
|
93
|
+
"(e.g., http://localhost:9200 for local development or your cloud endpoint)."
|
|
94
|
+
),
|
|
36
95
|
),
|
|
37
96
|
StrInput(
|
|
38
97
|
name="index_name",
|
|
39
98
|
display_name="Index Name",
|
|
40
99
|
value="langflow",
|
|
41
|
-
info=
|
|
100
|
+
info=(
|
|
101
|
+
"The OpenSearch index name where documents will be stored and searched. "
|
|
102
|
+
"Will be created automatically if it doesn't exist."
|
|
103
|
+
),
|
|
42
104
|
),
|
|
43
|
-
*LCVectorStoreComponent.inputs,
|
|
44
|
-
HandleInput(name="embedding", display_name="Embedding", input_types=["Embeddings"]),
|
|
45
105
|
DropdownInput(
|
|
46
|
-
name="
|
|
47
|
-
display_name="
|
|
48
|
-
options=["
|
|
49
|
-
value="
|
|
106
|
+
name="engine",
|
|
107
|
+
display_name="Vector Engine",
|
|
108
|
+
options=["jvector", "nmslib", "faiss", "lucene"],
|
|
109
|
+
value="jvector",
|
|
110
|
+
info=(
|
|
111
|
+
"Vector search engine for similarity calculations. 'jvector' is recommended for most use cases. "
|
|
112
|
+
"Note: Amazon OpenSearch Serverless only supports 'nmslib' or 'faiss'."
|
|
113
|
+
),
|
|
114
|
+
advanced=True,
|
|
115
|
+
),
|
|
116
|
+
DropdownInput(
|
|
117
|
+
name="space_type",
|
|
118
|
+
display_name="Distance Metric",
|
|
119
|
+
options=["l2", "l1", "cosinesimil", "linf", "innerproduct"],
|
|
120
|
+
value="l2",
|
|
121
|
+
info=(
|
|
122
|
+
"Distance metric for calculating vector similarity. 'l2' (Euclidean) is most common, "
|
|
123
|
+
"'cosinesimil' for cosine similarity, 'innerproduct' for dot product."
|
|
124
|
+
),
|
|
50
125
|
advanced=True,
|
|
51
126
|
),
|
|
52
127
|
IntInput(
|
|
53
|
-
name="
|
|
54
|
-
display_name="
|
|
55
|
-
|
|
128
|
+
name="ef_construction",
|
|
129
|
+
display_name="EF Construction",
|
|
130
|
+
value=512,
|
|
131
|
+
info=(
|
|
132
|
+
"Size of the dynamic candidate list during index construction. "
|
|
133
|
+
"Higher values improve recall but increase indexing time and memory usage."
|
|
134
|
+
),
|
|
135
|
+
advanced=True,
|
|
136
|
+
),
|
|
137
|
+
IntInput(
|
|
138
|
+
name="m",
|
|
139
|
+
display_name="M Parameter",
|
|
140
|
+
value=16,
|
|
141
|
+
info=(
|
|
142
|
+
"Number of bidirectional connections for each vector in the HNSW graph. "
|
|
143
|
+
"Higher values improve search quality but increase memory usage and indexing time."
|
|
144
|
+
),
|
|
56
145
|
advanced=True,
|
|
57
|
-
value=4,
|
|
58
146
|
),
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
147
|
+
*LCVectorStoreComponent.inputs, # includes search_query, add_documents, etc.
|
|
148
|
+
HandleInput(name="embedding", display_name="Embedding", input_types=["Embeddings"]),
|
|
149
|
+
StrInput(
|
|
150
|
+
name="vector_field",
|
|
151
|
+
display_name="Vector Field Name",
|
|
152
|
+
value="chunk_embedding",
|
|
64
153
|
advanced=True,
|
|
154
|
+
info="Name of the field in OpenSearch documents that stores the vector embeddings for similarity search.",
|
|
155
|
+
),
|
|
156
|
+
IntInput(
|
|
157
|
+
name="number_of_results",
|
|
158
|
+
display_name="Default Result Limit",
|
|
159
|
+
value=10,
|
|
160
|
+
advanced=True,
|
|
161
|
+
info=(
|
|
162
|
+
"Default maximum number of search results to return when no limit is "
|
|
163
|
+
"specified in the filter expression."
|
|
164
|
+
),
|
|
165
|
+
),
|
|
166
|
+
MultilineInput(
|
|
167
|
+
name="filter_expression",
|
|
168
|
+
display_name="Search Filters (JSON)",
|
|
169
|
+
value="",
|
|
170
|
+
info=(
|
|
171
|
+
"Optional JSON configuration for search filtering, result limits, and score thresholds.\n\n"
|
|
172
|
+
"Format 1 - Explicit filters:\n"
|
|
173
|
+
'{"filter": [{"term": {"filename":"doc.pdf"}}, '
|
|
174
|
+
'{"terms":{"owner":["user1","user2"]}}], "limit": 10, "score_threshold": 1.6}\n\n'
|
|
175
|
+
"Format 2 - Context-style mapping:\n"
|
|
176
|
+
'{"data_sources":["file.pdf"], "document_types":["application/pdf"], "owners":["user123"]}\n\n'
|
|
177
|
+
"Use __IMPOSSIBLE_VALUE__ as placeholder to ignore specific filters."
|
|
178
|
+
),
|
|
179
|
+
),
|
|
180
|
+
# ----- Auth controls (dynamic) -----
|
|
181
|
+
DropdownInput(
|
|
182
|
+
name="auth_mode",
|
|
183
|
+
display_name="Authentication Mode",
|
|
184
|
+
value="basic",
|
|
185
|
+
options=["basic", "jwt"],
|
|
186
|
+
info=(
|
|
187
|
+
"Authentication method: 'basic' for username/password authentication, "
|
|
188
|
+
"or 'jwt' for JSON Web Token (Bearer) authentication."
|
|
189
|
+
),
|
|
190
|
+
real_time_refresh=True,
|
|
191
|
+
advanced=False,
|
|
65
192
|
),
|
|
66
193
|
StrInput(
|
|
67
194
|
name="username",
|
|
68
195
|
display_name="Username",
|
|
69
196
|
value="admin",
|
|
70
|
-
|
|
197
|
+
show=False,
|
|
71
198
|
),
|
|
72
199
|
SecretStrInput(
|
|
73
200
|
name="password",
|
|
74
201
|
display_name="Password",
|
|
75
202
|
value="admin",
|
|
203
|
+
show=False,
|
|
204
|
+
),
|
|
205
|
+
SecretStrInput(
|
|
206
|
+
name="jwt_token",
|
|
207
|
+
display_name="JWT Token",
|
|
208
|
+
value="JWT",
|
|
209
|
+
load_from_db=True,
|
|
210
|
+
show=True,
|
|
211
|
+
info=(
|
|
212
|
+
"Valid JSON Web Token for authentication. "
|
|
213
|
+
"Will be sent in the Authorization header (with optional 'Bearer ' prefix)."
|
|
214
|
+
),
|
|
215
|
+
),
|
|
216
|
+
StrInput(
|
|
217
|
+
name="jwt_header",
|
|
218
|
+
display_name="JWT Header Name",
|
|
219
|
+
value="Authorization",
|
|
220
|
+
show=False,
|
|
76
221
|
advanced=True,
|
|
77
222
|
),
|
|
223
|
+
BoolInput(
|
|
224
|
+
name="bearer_prefix",
|
|
225
|
+
display_name="Prefix 'Bearer '",
|
|
226
|
+
value=True,
|
|
227
|
+
show=False,
|
|
228
|
+
advanced=True,
|
|
229
|
+
),
|
|
230
|
+
# ----- TLS -----
|
|
78
231
|
BoolInput(
|
|
79
232
|
name="use_ssl",
|
|
80
|
-
display_name="Use SSL",
|
|
233
|
+
display_name="Use SSL/TLS",
|
|
81
234
|
value=True,
|
|
82
235
|
advanced=True,
|
|
236
|
+
info="Enable SSL/TLS encryption for secure connections to OpenSearch.",
|
|
83
237
|
),
|
|
84
238
|
BoolInput(
|
|
85
239
|
name="verify_certs",
|
|
86
|
-
display_name="Verify Certificates",
|
|
240
|
+
display_name="Verify SSL Certificates",
|
|
87
241
|
value=False,
|
|
88
242
|
advanced=True,
|
|
89
|
-
),
|
|
90
|
-
MultilineInput(
|
|
91
|
-
name="hybrid_search_query",
|
|
92
|
-
display_name="Hybrid Search Query",
|
|
93
|
-
value="",
|
|
94
|
-
advanced=True,
|
|
95
243
|
info=(
|
|
96
|
-
"
|
|
97
|
-
"
|
|
244
|
+
"Verify SSL certificates when connecting. "
|
|
245
|
+
"Disable for self-signed certificates in development environments."
|
|
98
246
|
),
|
|
99
247
|
),
|
|
100
248
|
]
|
|
101
249
|
|
|
102
|
-
|
|
103
|
-
def
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
250
|
+
# ---------- helper functions for index management ----------
|
|
251
|
+
def _default_text_mapping(
|
|
252
|
+
self,
|
|
253
|
+
dim: int,
|
|
254
|
+
engine: str = "jvector",
|
|
255
|
+
space_type: str = "l2",
|
|
256
|
+
ef_search: int = 512,
|
|
257
|
+
ef_construction: int = 100,
|
|
258
|
+
m: int = 16,
|
|
259
|
+
vector_field: str = "vector_field",
|
|
260
|
+
) -> dict[str, Any]:
|
|
261
|
+
"""Create the default OpenSearch index mapping for vector search.
|
|
111
262
|
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
263
|
+
This method generates the index configuration with k-NN settings optimized
|
|
264
|
+
for approximate nearest neighbor search using the specified vector engine.
|
|
265
|
+
|
|
266
|
+
Args:
|
|
267
|
+
dim: Dimensionality of the vector embeddings
|
|
268
|
+
engine: Vector search engine (jvector, nmslib, faiss, lucene)
|
|
269
|
+
space_type: Distance metric for similarity calculation
|
|
270
|
+
ef_search: Size of dynamic list used during search
|
|
271
|
+
ef_construction: Size of dynamic list used during index construction
|
|
272
|
+
m: Number of bidirectional links for each vector
|
|
273
|
+
vector_field: Name of the field storing vector embeddings
|
|
274
|
+
|
|
275
|
+
Returns:
|
|
276
|
+
Dictionary containing OpenSearch index mapping configuration
|
|
277
|
+
"""
|
|
278
|
+
return {
|
|
279
|
+
"settings": {"index": {"knn": True, "knn.algo_param.ef_search": ef_search}},
|
|
280
|
+
"mappings": {
|
|
281
|
+
"properties": {
|
|
282
|
+
vector_field: {
|
|
283
|
+
"type": "knn_vector",
|
|
284
|
+
"dimension": dim,
|
|
285
|
+
"method": {
|
|
286
|
+
"name": "disk_ann",
|
|
287
|
+
"space_type": space_type,
|
|
288
|
+
"engine": engine,
|
|
289
|
+
"parameters": {"ef_construction": ef_construction, "m": m},
|
|
290
|
+
},
|
|
291
|
+
}
|
|
292
|
+
}
|
|
293
|
+
},
|
|
294
|
+
}
|
|
295
|
+
|
|
296
|
+
def _validate_aoss_with_engines(self, *, is_aoss: bool, engine: str) -> None:
|
|
297
|
+
"""Validate engine compatibility with Amazon OpenSearch Serverless (AOSS).
|
|
298
|
+
|
|
299
|
+
Amazon OpenSearch Serverless has restrictions on which vector engines
|
|
300
|
+
can be used. This method ensures the selected engine is compatible.
|
|
301
|
+
|
|
302
|
+
Args:
|
|
303
|
+
is_aoss: Whether the connection is to Amazon OpenSearch Serverless
|
|
304
|
+
engine: The selected vector search engine
|
|
305
|
+
|
|
306
|
+
Raises:
|
|
307
|
+
ValueError: If AOSS is used with an incompatible engine
|
|
308
|
+
"""
|
|
309
|
+
if is_aoss and engine not in {"nmslib", "faiss"}:
|
|
310
|
+
msg = "Amazon OpenSearch Service Serverless only supports `nmslib` or `faiss` engines"
|
|
311
|
+
raise ValueError(msg)
|
|
127
312
|
|
|
128
|
-
|
|
129
|
-
|
|
313
|
+
def _is_aoss_enabled(self, http_auth: Any) -> bool:
|
|
314
|
+
"""Determine if Amazon OpenSearch Serverless (AOSS) is being used.
|
|
130
315
|
|
|
131
|
-
|
|
316
|
+
Args:
|
|
317
|
+
http_auth: The HTTP authentication object
|
|
132
318
|
|
|
133
|
-
|
|
134
|
-
|
|
319
|
+
Returns:
|
|
320
|
+
True if AOSS is enabled, False otherwise
|
|
321
|
+
"""
|
|
322
|
+
return http_auth is not None and hasattr(http_auth, "service") and http_auth.service == "aoss"
|
|
323
|
+
|
|
324
|
+
def _bulk_ingest_embeddings(
|
|
325
|
+
self,
|
|
326
|
+
client: OpenSearch,
|
|
327
|
+
index_name: str,
|
|
328
|
+
embeddings: list[list[float]],
|
|
329
|
+
texts: list[str],
|
|
330
|
+
metadatas: list[dict] | None = None,
|
|
331
|
+
ids: list[str] | None = None,
|
|
332
|
+
vector_field: str = "vector_field",
|
|
333
|
+
text_field: str = "text",
|
|
334
|
+
mapping: dict | None = None,
|
|
335
|
+
max_chunk_bytes: int | None = 1 * 1024 * 1024,
|
|
336
|
+
*,
|
|
337
|
+
is_aoss: bool = False,
|
|
338
|
+
) -> list[str]:
|
|
339
|
+
"""Efficiently ingest multiple documents with embeddings into OpenSearch.
|
|
340
|
+
|
|
341
|
+
This method uses bulk operations to insert documents with their vector
|
|
342
|
+
embeddings and metadata into the specified OpenSearch index.
|
|
343
|
+
|
|
344
|
+
Args:
|
|
345
|
+
client: OpenSearch client instance
|
|
346
|
+
index_name: Target index for document storage
|
|
347
|
+
embeddings: List of vector embeddings for each document
|
|
348
|
+
texts: List of document texts
|
|
349
|
+
metadatas: Optional metadata dictionaries for each document
|
|
350
|
+
ids: Optional document IDs (UUIDs generated if not provided)
|
|
351
|
+
vector_field: Field name for storing vector embeddings
|
|
352
|
+
text_field: Field name for storing document text
|
|
353
|
+
mapping: Optional index mapping configuration
|
|
354
|
+
max_chunk_bytes: Maximum size per bulk request chunk
|
|
355
|
+
is_aoss: Whether using Amazon OpenSearch Serverless
|
|
356
|
+
|
|
357
|
+
Returns:
|
|
358
|
+
List of document IDs that were successfully ingested
|
|
359
|
+
"""
|
|
360
|
+
if not mapping:
|
|
361
|
+
mapping = {}
|
|
362
|
+
|
|
363
|
+
requests = []
|
|
364
|
+
return_ids = []
|
|
365
|
+
|
|
366
|
+
for i, text in enumerate(texts):
|
|
367
|
+
metadata = metadatas[i] if metadatas else {}
|
|
368
|
+
_id = ids[i] if ids else str(uuid.uuid4())
|
|
369
|
+
request = {
|
|
370
|
+
"_op_type": "index",
|
|
371
|
+
"_index": index_name,
|
|
372
|
+
vector_field: embeddings[i],
|
|
373
|
+
text_field: text,
|
|
374
|
+
**metadata,
|
|
375
|
+
}
|
|
376
|
+
if is_aoss:
|
|
377
|
+
request["id"] = _id
|
|
378
|
+
else:
|
|
379
|
+
request["_id"] = _id
|
|
380
|
+
requests.append(request)
|
|
381
|
+
return_ids.append(_id)
|
|
382
|
+
if metadatas:
|
|
383
|
+
self.log(f"Sample metadata: {metadatas[0] if metadatas else {}}")
|
|
384
|
+
helpers.bulk(client, requests, max_chunk_bytes=max_chunk_bytes)
|
|
385
|
+
return return_ids
|
|
386
|
+
|
|
387
|
+
# ---------- auth / client ----------
|
|
388
|
+
def _build_auth_kwargs(self) -> dict[str, Any]:
|
|
389
|
+
"""Build authentication configuration for OpenSearch client.
|
|
390
|
+
|
|
391
|
+
Constructs the appropriate authentication parameters based on the
|
|
392
|
+
selected auth mode (basic username/password or JWT token).
|
|
393
|
+
|
|
394
|
+
Returns:
|
|
395
|
+
Dictionary containing authentication configuration
|
|
396
|
+
|
|
397
|
+
Raises:
|
|
398
|
+
ValueError: If required authentication parameters are missing
|
|
399
|
+
"""
|
|
400
|
+
mode = (self.auth_mode or "basic").strip().lower()
|
|
401
|
+
if mode == "jwt":
|
|
402
|
+
token = (self.jwt_token or "").strip()
|
|
403
|
+
if not token:
|
|
404
|
+
msg = "Auth Mode is 'jwt' but no jwt_token was provided."
|
|
405
|
+
raise ValueError(msg)
|
|
406
|
+
header_name = (self.jwt_header or "Authorization").strip()
|
|
407
|
+
header_value = f"Bearer {token}" if self.bearer_prefix else token
|
|
408
|
+
return {"headers": {header_name: header_value}}
|
|
409
|
+
user = (self.username or "").strip()
|
|
410
|
+
pwd = (self.password or "").strip()
|
|
411
|
+
if not user or not pwd:
|
|
412
|
+
msg = "Auth Mode is 'basic' but username/password are missing."
|
|
413
|
+
raise ValueError(msg)
|
|
414
|
+
return {"http_auth": (user, pwd)}
|
|
415
|
+
|
|
416
|
+
def build_client(self) -> OpenSearch:
|
|
417
|
+
"""Create and configure an OpenSearch client instance.
|
|
418
|
+
|
|
419
|
+
Returns:
|
|
420
|
+
Configured OpenSearch client ready for operations
|
|
421
|
+
"""
|
|
422
|
+
auth_kwargs = self._build_auth_kwargs()
|
|
423
|
+
return OpenSearch(
|
|
424
|
+
hosts=[self.opensearch_url],
|
|
425
|
+
use_ssl=self.use_ssl,
|
|
426
|
+
verify_certs=self.verify_certs,
|
|
427
|
+
ssl_assert_hostname=False,
|
|
428
|
+
ssl_show_warn=False,
|
|
429
|
+
**auth_kwargs,
|
|
430
|
+
)
|
|
431
|
+
|
|
432
|
+
@check_cached_vector_store
|
|
433
|
+
def build_vector_store(self) -> OpenSearch:
|
|
434
|
+
# Return raw OpenSearch client as our “vector store.”
|
|
435
|
+
self.log(self.ingest_data)
|
|
436
|
+
client = self.build_client()
|
|
437
|
+
self._add_documents_to_vector_store(client=client)
|
|
438
|
+
return client
|
|
439
|
+
|
|
440
|
+
# ---------- ingest ----------
|
|
441
|
+
def _add_documents_to_vector_store(self, client: OpenSearch) -> None:
|
|
442
|
+
"""Process and ingest documents into the OpenSearch vector store.
|
|
443
|
+
|
|
444
|
+
This method handles the complete document ingestion pipeline:
|
|
445
|
+
- Prepares document data and metadata
|
|
446
|
+
- Generates vector embeddings
|
|
447
|
+
- Creates appropriate index mappings
|
|
448
|
+
- Bulk inserts documents with vectors
|
|
449
|
+
|
|
450
|
+
Args:
|
|
451
|
+
client: OpenSearch client for performing operations
|
|
452
|
+
"""
|
|
135
453
|
# Convert DataFrame to Data if needed using parent's method
|
|
136
454
|
self.ingest_data = self._prepare_ingest_data()
|
|
137
455
|
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
456
|
+
docs = self.ingest_data or []
|
|
457
|
+
if not docs:
|
|
458
|
+
self.log("No documents to ingest.")
|
|
459
|
+
return
|
|
460
|
+
|
|
461
|
+
# Extract texts and metadata from documents
|
|
462
|
+
texts = []
|
|
463
|
+
metadatas = []
|
|
464
|
+
# Process docs_metadata table input into a dict
|
|
465
|
+
additional_metadata = {}
|
|
466
|
+
if hasattr(self, "docs_metadata") and self.docs_metadata:
|
|
467
|
+
for item in self.docs_metadata:
|
|
468
|
+
if isinstance(item, dict) and "key" in item and "value" in item:
|
|
469
|
+
additional_metadata[item["key"]] = item["value"]
|
|
470
|
+
|
|
471
|
+
for doc_obj in docs:
|
|
472
|
+
data_copy = json.loads(doc_obj.model_dump_json())
|
|
473
|
+
text = data_copy.pop(doc_obj.text_key, doc_obj.default_value)
|
|
474
|
+
texts.append(text)
|
|
475
|
+
|
|
476
|
+
# Merge additional metadata from table input
|
|
477
|
+
data_copy.update(additional_metadata)
|
|
478
|
+
|
|
479
|
+
metadatas.append(data_copy)
|
|
480
|
+
self.log(metadatas)
|
|
481
|
+
if not self.embedding:
|
|
482
|
+
msg = "Embedding handle is required to embed documents."
|
|
483
|
+
raise ValueError(msg)
|
|
484
|
+
|
|
485
|
+
# Generate embeddings
|
|
486
|
+
vectors = self.embedding.embed_documents(texts)
|
|
487
|
+
|
|
488
|
+
if not vectors:
|
|
489
|
+
self.log("No vectors generated from documents.")
|
|
490
|
+
return
|
|
491
|
+
|
|
492
|
+
# Get vector dimension for mapping
|
|
493
|
+
dim = len(vectors[0]) if vectors else 768 # default fallback
|
|
494
|
+
|
|
495
|
+
# Check for AOSS
|
|
496
|
+
auth_kwargs = self._build_auth_kwargs()
|
|
497
|
+
is_aoss = self._is_aoss_enabled(auth_kwargs.get("http_auth"))
|
|
498
|
+
|
|
499
|
+
# Validate engine with AOSS
|
|
500
|
+
engine = getattr(self, "engine", "jvector")
|
|
501
|
+
self._validate_aoss_with_engines(is_aoss=is_aoss, engine=engine)
|
|
502
|
+
|
|
503
|
+
# Create mapping with proper KNN settings
|
|
504
|
+
space_type = getattr(self, "space_type", "l2")
|
|
505
|
+
ef_construction = getattr(self, "ef_construction", 512)
|
|
506
|
+
m = getattr(self, "m", 16)
|
|
507
|
+
|
|
508
|
+
mapping = self._default_text_mapping(
|
|
509
|
+
dim=dim,
|
|
510
|
+
engine=engine,
|
|
511
|
+
space_type=space_type,
|
|
512
|
+
ef_construction=ef_construction,
|
|
513
|
+
m=m,
|
|
514
|
+
vector_field=self.vector_field,
|
|
515
|
+
)
|
|
516
|
+
|
|
517
|
+
self.log(f"Indexing {len(texts)} documents into '{self.index_name}' with proper KNN mapping...")
|
|
518
|
+
|
|
519
|
+
# Use the LangChain-style bulk ingestion
|
|
520
|
+
return_ids = self._bulk_ingest_embeddings(
|
|
521
|
+
client=client,
|
|
522
|
+
index_name=self.index_name,
|
|
523
|
+
embeddings=vectors,
|
|
524
|
+
texts=texts,
|
|
525
|
+
metadatas=metadatas,
|
|
526
|
+
vector_field=self.vector_field,
|
|
527
|
+
text_field="text",
|
|
528
|
+
mapping=mapping,
|
|
529
|
+
is_aoss=is_aoss,
|
|
530
|
+
)
|
|
531
|
+
self.log(metadatas)
|
|
146
532
|
|
|
147
|
-
|
|
148
|
-
|
|
533
|
+
self.log(f"Successfully indexed {len(return_ids)} documents.")
|
|
534
|
+
|
|
535
|
+
# ---------- helpers for filters ----------
|
|
536
|
+
def _is_placeholder_term(self, term_obj: dict) -> bool:
|
|
537
|
+
# term_obj like {"filename": "__IMPOSSIBLE_VALUE__"}
|
|
538
|
+
return any(v == "__IMPOSSIBLE_VALUE__" for v in term_obj.values())
|
|
539
|
+
|
|
540
|
+
def _coerce_filter_clauses(self, filter_obj: dict | None) -> list[dict]:
|
|
541
|
+
"""Convert filter expressions into OpenSearch-compatible filter clauses.
|
|
542
|
+
|
|
543
|
+
This method accepts two filter formats and converts them to standardized
|
|
544
|
+
OpenSearch query clauses:
|
|
545
|
+
|
|
546
|
+
Format A - Explicit filters:
|
|
547
|
+
{"filter": [{"term": {"field": "value"}}, {"terms": {"field": ["val1", "val2"]}}],
|
|
548
|
+
"limit": 10, "score_threshold": 1.5}
|
|
549
|
+
|
|
550
|
+
Format B - Context-style mapping:
|
|
551
|
+
{"data_sources": ["file1.pdf"], "document_types": ["pdf"], "owners": ["user1"]}
|
|
552
|
+
|
|
553
|
+
Args:
|
|
554
|
+
filter_obj: Filter configuration dictionary or None
|
|
555
|
+
|
|
556
|
+
Returns:
|
|
557
|
+
List of OpenSearch filter clauses (term/terms objects)
|
|
558
|
+
Placeholder values with "__IMPOSSIBLE_VALUE__" are ignored
|
|
559
|
+
"""
|
|
560
|
+
if not filter_obj:
|
|
561
|
+
return []
|
|
562
|
+
|
|
563
|
+
# If it is a string, try to parse it once
|
|
564
|
+
if isinstance(filter_obj, str):
|
|
149
565
|
try:
|
|
150
|
-
|
|
151
|
-
except
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
566
|
+
filter_obj = json.loads(filter_obj)
|
|
567
|
+
except json.JSONDecodeError:
|
|
568
|
+
# Not valid JSON - treat as no filters
|
|
569
|
+
return []
|
|
570
|
+
|
|
571
|
+
# Case A: already an explicit list/dict under "filter"
|
|
572
|
+
if "filter" in filter_obj:
|
|
573
|
+
raw = filter_obj["filter"]
|
|
574
|
+
if isinstance(raw, dict):
|
|
575
|
+
raw = [raw]
|
|
576
|
+
explicit_clauses: list[dict] = []
|
|
577
|
+
for f in raw or []:
|
|
578
|
+
if "term" in f and isinstance(f["term"], dict) and not self._is_placeholder_term(f["term"]):
|
|
579
|
+
explicit_clauses.append(f)
|
|
580
|
+
elif "terms" in f and isinstance(f["terms"], dict):
|
|
581
|
+
field, vals = next(iter(f["terms"].items()))
|
|
582
|
+
if isinstance(vals, list) and len(vals) > 0:
|
|
583
|
+
explicit_clauses.append(f)
|
|
584
|
+
return explicit_clauses
|
|
157
585
|
|
|
586
|
+
# Case B: convert context-style maps into clauses
|
|
587
|
+
field_mapping = {
|
|
588
|
+
"data_sources": "filename",
|
|
589
|
+
"document_types": "mimetype",
|
|
590
|
+
"owners": "owner",
|
|
591
|
+
}
|
|
592
|
+
context_clauses: list[dict] = []
|
|
593
|
+
for k, values in filter_obj.items():
|
|
594
|
+
if not isinstance(values, list):
|
|
595
|
+
continue
|
|
596
|
+
field = field_mapping.get(k, k)
|
|
597
|
+
if len(values) == 0:
|
|
598
|
+
# Match-nothing placeholder (kept to mirror your tool semantics)
|
|
599
|
+
context_clauses.append({"term": {field: "__IMPOSSIBLE_VALUE__"}})
|
|
600
|
+
elif len(values) == 1:
|
|
601
|
+
if values[0] != "__IMPOSSIBLE_VALUE__":
|
|
602
|
+
context_clauses.append({"term": {field: values[0]}})
|
|
603
|
+
else:
|
|
604
|
+
context_clauses.append({"terms": {field: values}})
|
|
605
|
+
return context_clauses
|
|
606
|
+
|
|
607
|
+
# ---------- search (single hybrid path matching your tool) ----------
|
|
158
608
|
def search(self, query: str | None = None) -> list[dict[str, Any]]:
|
|
159
|
-
"""
|
|
160
|
-
|
|
161
|
-
|
|
609
|
+
"""Perform hybrid search combining vector similarity and keyword matching.
|
|
610
|
+
|
|
611
|
+
This method executes a sophisticated search that combines:
|
|
612
|
+
- K-nearest neighbor (KNN) vector similarity search (70% weight)
|
|
613
|
+
- Multi-field keyword search with fuzzy matching (30% weight)
|
|
614
|
+
- Optional filtering and score thresholds
|
|
615
|
+
- Aggregations for faceted search results
|
|
616
|
+
|
|
617
|
+
Args:
|
|
618
|
+
query: Search query string (used for both vector embedding and keyword search)
|
|
162
619
|
|
|
163
|
-
|
|
620
|
+
Returns:
|
|
621
|
+
List of search results with page_content, metadata, and relevance scores
|
|
622
|
+
|
|
623
|
+
Raises:
|
|
624
|
+
ValueError: If embedding component is not provided or filter JSON is invalid
|
|
625
|
+
"""
|
|
626
|
+
logger.info(self.ingest_data)
|
|
627
|
+
client = self.build_client()
|
|
628
|
+
q = (query or "").strip()
|
|
629
|
+
|
|
630
|
+
# Parse optional filter expression (can be either A or B shape; see _coerce_filter_clauses)
|
|
631
|
+
filter_obj = None
|
|
632
|
+
if getattr(self, "filter_expression", "") and self.filter_expression.strip():
|
|
633
|
+
try:
|
|
634
|
+
filter_obj = json.loads(self.filter_expression)
|
|
635
|
+
except json.JSONDecodeError as e:
|
|
636
|
+
msg = f"Invalid filter_expression JSON: {e}"
|
|
637
|
+
raise ValueError(msg) from e
|
|
164
638
|
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
except json.JSONDecodeError as e:
|
|
169
|
-
error_message = f"Invalid hybrid search query JSON: {e}"
|
|
170
|
-
self.log(error_message)
|
|
171
|
-
raise ValueError(error_message) from e
|
|
639
|
+
if not self.embedding:
|
|
640
|
+
msg = "Embedding is required to run hybrid search (KNN + keyword)."
|
|
641
|
+
raise ValueError(msg)
|
|
172
642
|
|
|
173
|
-
|
|
643
|
+
# Embed the query
|
|
644
|
+
vec = self.embedding.embed_query(q)
|
|
174
645
|
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
source = hit.get("_source", {})
|
|
178
|
-
text = source.get("text", "")
|
|
179
|
-
metadata = source.get("metadata", {})
|
|
646
|
+
# Build filter clauses (accept both shapes)
|
|
647
|
+
filter_clauses = self._coerce_filter_clauses(filter_obj)
|
|
180
648
|
|
|
181
|
-
|
|
182
|
-
|
|
649
|
+
# Respect the tool's limit/threshold defaults
|
|
650
|
+
limit = (filter_obj or {}).get("limit", self.number_of_results)
|
|
651
|
+
score_threshold = (filter_obj or {}).get("score_threshold", 0)
|
|
183
652
|
|
|
184
|
-
|
|
653
|
+
# Build the same hybrid body as your SearchService
|
|
654
|
+
body = {
|
|
655
|
+
"query": {
|
|
656
|
+
"bool": {
|
|
657
|
+
"should": [
|
|
185
658
|
{
|
|
186
|
-
"
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
659
|
+
"knn": {
|
|
660
|
+
self.vector_field: {
|
|
661
|
+
"vector": vec,
|
|
662
|
+
"k": 10, # fixed to match the tool
|
|
663
|
+
"boost": 0.7,
|
|
664
|
+
}
|
|
665
|
+
}
|
|
666
|
+
},
|
|
667
|
+
{
|
|
668
|
+
"multi_match": {
|
|
669
|
+
"query": q,
|
|
670
|
+
"fields": ["text^2", "filename^1.5"],
|
|
671
|
+
"type": "best_fields",
|
|
672
|
+
"fuzziness": "AUTO",
|
|
673
|
+
"boost": 0.3,
|
|
674
|
+
}
|
|
675
|
+
},
|
|
676
|
+
],
|
|
677
|
+
"minimum_should_match": 1,
|
|
678
|
+
}
|
|
679
|
+
},
|
|
680
|
+
"aggs": {
|
|
681
|
+
"data_sources": {"terms": {"field": "filename", "size": 20}},
|
|
682
|
+
"document_types": {"terms": {"field": "mimetype", "size": 10}},
|
|
683
|
+
"owners": {"terms": {"field": "owner", "size": 10}},
|
|
684
|
+
},
|
|
685
|
+
"_source": [
|
|
686
|
+
"filename",
|
|
687
|
+
"mimetype",
|
|
688
|
+
"page",
|
|
689
|
+
"text",
|
|
690
|
+
"source_url",
|
|
691
|
+
"owner",
|
|
692
|
+
"allowed_users",
|
|
693
|
+
"allowed_groups",
|
|
694
|
+
],
|
|
695
|
+
"size": limit,
|
|
696
|
+
}
|
|
697
|
+
if filter_clauses:
|
|
698
|
+
body["query"]["bool"]["filter"] = filter_clauses
|
|
212
699
|
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
raise RuntimeError(error_message) from e
|
|
700
|
+
if isinstance(score_threshold, (int, float)) and score_threshold > 0:
|
|
701
|
+
# top-level min_score (matches your tool)
|
|
702
|
+
body["min_score"] = score_threshold
|
|
217
703
|
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
704
|
+
resp = client.search(index=self.index_name, body=body)
|
|
705
|
+
hits = resp.get("hits", {}).get("hits", [])
|
|
706
|
+
return [
|
|
707
|
+
{
|
|
708
|
+
"page_content": hit["_source"].get("text", ""),
|
|
709
|
+
"metadata": {k: v for k, v in hit["_source"].items() if k != "text"},
|
|
710
|
+
"score": hit.get("_score"),
|
|
711
|
+
}
|
|
712
|
+
for hit in hits
|
|
713
|
+
]
|
|
221
714
|
|
|
222
715
|
def search_documents(self) -> list[Data]:
|
|
223
|
-
"""Search
|
|
716
|
+
"""Search documents and return results as Data objects.
|
|
717
|
+
|
|
718
|
+
This is the main interface method that performs the search using the
|
|
719
|
+
configured search_query and returns results in Langflow's Data format.
|
|
720
|
+
|
|
721
|
+
Returns:
|
|
722
|
+
List of Data objects containing search results with text and metadata
|
|
224
723
|
|
|
225
|
-
|
|
724
|
+
Raises:
|
|
725
|
+
Exception: If search operation fails
|
|
226
726
|
"""
|
|
227
727
|
try:
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
Data(
|
|
232
|
-
file_path=result["metadata"].get("file_path", ""),
|
|
233
|
-
text=result["page_content"],
|
|
234
|
-
)
|
|
235
|
-
for result in results
|
|
236
|
-
]
|
|
728
|
+
raw = self.search(self.search_query or "")
|
|
729
|
+
return [Data(text=hit["page_content"], **hit["metadata"]) for hit in raw]
|
|
730
|
+
self.log(self.ingest_data)
|
|
237
731
|
except Exception as e:
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
732
|
+
self.log(f"search_documents error: {e}")
|
|
733
|
+
raise
|
|
734
|
+
|
|
735
|
+
# -------- dynamic UI handling (auth switch) --------
|
|
736
|
+
async def update_build_config(self, build_config: dict, field_value: str, field_name: str | None = None) -> dict:
|
|
737
|
+
"""Dynamically update component configuration based on field changes.
|
|
738
|
+
|
|
739
|
+
This method handles real-time UI updates, particularly for authentication
|
|
740
|
+
mode changes that show/hide relevant input fields.
|
|
741
|
+
|
|
742
|
+
Args:
|
|
743
|
+
build_config: Current component configuration
|
|
744
|
+
field_value: New value for the changed field
|
|
745
|
+
field_name: Name of the field that changed
|
|
746
|
+
|
|
747
|
+
Returns:
|
|
748
|
+
Updated build configuration with appropriate field visibility
|
|
749
|
+
"""
|
|
750
|
+
try:
|
|
751
|
+
if field_name == "auth_mode":
|
|
752
|
+
mode = (field_value or "basic").strip().lower()
|
|
753
|
+
is_basic = mode == "basic"
|
|
754
|
+
is_jwt = mode == "jwt"
|
|
755
|
+
|
|
756
|
+
build_config["username"]["show"] = is_basic
|
|
757
|
+
build_config["password"]["show"] = is_basic
|
|
758
|
+
|
|
759
|
+
build_config["jwt_token"]["show"] = is_jwt
|
|
760
|
+
build_config["jwt_header"]["show"] = is_jwt
|
|
761
|
+
build_config["bearer_prefix"]["show"] = is_jwt
|
|
762
|
+
|
|
763
|
+
build_config["username"]["required"] = is_basic
|
|
764
|
+
build_config["password"]["required"] = is_basic
|
|
765
|
+
|
|
766
|
+
build_config["jwt_token"]["required"] = is_jwt
|
|
767
|
+
build_config["jwt_header"]["required"] = is_jwt
|
|
768
|
+
build_config["bearer_prefix"]["required"] = False
|
|
769
|
+
|
|
770
|
+
if is_basic:
|
|
771
|
+
build_config["jwt_token"]["value"] = ""
|
|
772
|
+
|
|
773
|
+
return build_config
|
|
774
|
+
|
|
775
|
+
except (KeyError, ValueError) as e:
|
|
776
|
+
self.log(f"update_build_config error: {e}")
|
|
241
777
|
|
|
242
|
-
|
|
243
|
-
return retrieved_data
|
|
778
|
+
return build_config
|
|
@@ -15,7 +15,7 @@ class NvidiaSystemAssistComponent(ComponentWithCache):
|
|
|
15
15
|
"The user may query GPU specifications, state, and ask the NV-API to perform "
|
|
16
16
|
"several GPU-editing acations. The prompt must be human-readable language."
|
|
17
17
|
)
|
|
18
|
-
documentation = "https://docs.langflow.org/
|
|
18
|
+
documentation = "https://docs.langflow.org/integrations-nvidia-g-assist"
|
|
19
19
|
icon = "NVIDIA"
|
|
20
20
|
rise_initialized = False
|
|
21
21
|
|
|
@@ -15,8 +15,8 @@ from lfx.schema.data import Data
|
|
|
15
15
|
|
|
16
16
|
|
|
17
17
|
class ClickhouseVectorStoreComponent(LCVectorStoreComponent):
|
|
18
|
-
display_name = "
|
|
19
|
-
description = "
|
|
18
|
+
display_name = "ClickHouse"
|
|
19
|
+
description = "ClickHouse Vector Store with search capabilities"
|
|
20
20
|
name = "Clickhouse"
|
|
21
21
|
icon = "Clickhouse"
|
|
22
22
|
|
|
@@ -69,7 +69,7 @@ class ClickhouseVectorStoreComponent(LCVectorStoreComponent):
|
|
|
69
69
|
import clickhouse_connect
|
|
70
70
|
except ImportError as e:
|
|
71
71
|
msg = (
|
|
72
|
-
"Failed to import
|
|
72
|
+
"Failed to import ClickHouse dependencies. "
|
|
73
73
|
"Install it using `uv pip install langflow[clickhouse-connect] --pre`"
|
|
74
74
|
)
|
|
75
75
|
raise ImportError(msg) from e
|
lfx/logging/__init__.py
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
"""Backwards compatibility module for lfx.logging.
|
|
2
|
+
|
|
3
|
+
This module provides backwards compatibility for code that imports from lfx.logging.
|
|
4
|
+
All functionality has been moved to lfx.log.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
# Re-export everything from lfx.log for backwards compatibility
|
|
8
|
+
from lfx.log.logger import configure, logger
|
|
9
|
+
|
|
10
|
+
# Maintain the same __all__ exports
|
|
11
|
+
__all__ = ["configure", "logger"]
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: lfx-nightly
|
|
3
|
-
Version: 0.1.12.
|
|
3
|
+
Version: 0.1.12.dev13
|
|
4
4
|
Summary: Langflow Executor - A lightweight CLI tool for executing and serving Langflow AI flows
|
|
5
5
|
Author-email: Gabriel Luiz Freitas Almeida <gabriel@langflow.org>
|
|
6
6
|
Requires-Python: <3.14,>=3.10
|
|
@@ -148,12 +148,12 @@ lfx/components/cleanlab/cleanlab_evaluator.py,sha256=BpY8gE_-HnhrnfqHFOmA-zup7NA
|
|
|
148
148
|
lfx/components/cleanlab/cleanlab_rag_evaluator.py,sha256=9ItX_3yj_0gZuX3_vVVRXj2qd8N7Jtr7P0ZBXPk0imk,9834
|
|
149
149
|
lfx/components/cleanlab/cleanlab_remediator.py,sha256=pbGdM4mRx8v3qMHX6-3CZ_lAbPw6TVUGbrGbRTkfUxU,5877
|
|
150
150
|
lfx/components/clickhouse/__init__.py,sha256=wv5NE-8x0eFDIJDjMfNq7xXURXQN8EwT0Rbpz4rXvZc,965
|
|
151
|
-
lfx/components/clickhouse/clickhouse.py,sha256=
|
|
151
|
+
lfx/components/clickhouse/clickhouse.py,sha256=kQnoE9ycFPWDE0rNLxaTyjxq5b3aCeaf-f5H_Lx7iBs,5035
|
|
152
152
|
lfx/components/cloudflare/__init__.py,sha256=hNoGPWCBAXbN5pjfMqexHGRpZlK0ldDZoHpejDnIpu8,1007
|
|
153
153
|
lfx/components/cloudflare/cloudflare.py,sha256=HqbkhXwUYacol8GkHchZD1bM7tDNHuuPGnHF7M8X454,2994
|
|
154
154
|
lfx/components/cohere/__init__.py,sha256=MSTeplsNIXTVm_dUcJETy6YGb-fw7-dplC9jzAodPpo,1198
|
|
155
155
|
lfx/components/cohere/cohere_embeddings.py,sha256=nA9BOixk534yJZymJaukBrQYBj_uB2nyYvzJPd_3aUc,3083
|
|
156
|
-
lfx/components/cohere/cohere_models.py,sha256=
|
|
156
|
+
lfx/components/cohere/cohere_models.py,sha256=WUhS4dcG8FBcJm2dCfhiDuaxZX8S1lICMI_Mmd6kflo,1563
|
|
157
157
|
lfx/components/cohere/cohere_rerank.py,sha256=qUoNEe6sjUnvkTHkCzwayBuLDoH957BBEgb-Qu_k9Yk,1554
|
|
158
158
|
lfx/components/composio/__init__.py,sha256=DVYKV9JyGTAWYDpAV7TeEKp_efltoOiyblQmL-qq4Ok,5175
|
|
159
159
|
lfx/components/composio/airtable_composio.py,sha256=5HrQEcM8bW7xv4AE5NIWyBzfgopxf9SIIpdcQHuy978,357
|
|
@@ -264,7 +264,7 @@ lfx/components/duckduckgo/__init__.py,sha256=Y4zaOLVOKsD_qwF7KRLek1pcaKKHa6lGUHO
|
|
|
264
264
|
lfx/components/duckduckgo/duck_duck_go_search_run.py,sha256=LlIqWkOJPIde1zEzin6XArYLjkg4ZBNi_AEZLJkfOQo,3074
|
|
265
265
|
lfx/components/elastic/__init__.py,sha256=tEqQ9UwUyeGttqGXOS2Or7Y50rQnNRWySfMx8u4fV8U,1126
|
|
266
266
|
lfx/components/elastic/elasticsearch.py,sha256=WcBi8THcOzopZeYOQeEoHxsZkACHk4R3MKhSEYGxnfY,9773
|
|
267
|
-
lfx/components/elastic/opensearch.py,sha256=
|
|
267
|
+
lfx/components/elastic/opensearch.py,sha256=6uyJdHsG-IvfMTXgmRD8O7fCZc82zDguCUCC-na3kv4,29446
|
|
268
268
|
lfx/components/embeddings/__init__.py,sha256=WP7MRGihB0vkSmqKlBhi2n-ZLMMbwboUbKjQRpIVVCQ,1136
|
|
269
269
|
lfx/components/embeddings/similarity.py,sha256=EqL8p8g9fPTpMVnVNB3hBpHgZZZg3TbQN9B20vHDnRo,2932
|
|
270
270
|
lfx/components/embeddings/text_embedder.py,sha256=oYriXXuYKU_kMW-pL0Cuk--4G5CVD0bMlfes4Ge4zIQ,2450
|
|
@@ -397,7 +397,7 @@ lfx/components/nvidia/nvidia.py,sha256=MpW5cgiUkWNYPSPR8mfJXbuajKb52J2p8Fzjc0HNl
|
|
|
397
397
|
lfx/components/nvidia/nvidia_embedding.py,sha256=D97QOAgtZEzwHvBmDDShTmZhDAyN2SRbfb71515ib-g,2658
|
|
398
398
|
lfx/components/nvidia/nvidia_ingest.py,sha256=_wxmYNmRQ2kBfAxaXLykBIlKFXVGXEsTY22spVeoCCI,12065
|
|
399
399
|
lfx/components/nvidia/nvidia_rerank.py,sha256=zzl2skHxf2oXINDZBmG8-GbkTkc6EWtyMjyV8pVRAm4,2293
|
|
400
|
-
lfx/components/nvidia/system_assist.py,sha256=
|
|
400
|
+
lfx/components/nvidia/system_assist.py,sha256=G8cgsLQxRBBnUt49_Uzxt7cdTNplVAzUlDDwoqe57u0,2473
|
|
401
401
|
lfx/components/olivya/__init__.py,sha256=ilZR88huL3vnQHO27g4jsUkyIYSgN7RPOq8Corbi6xA,67
|
|
402
402
|
lfx/components/olivya/olivya.py,sha256=2A5QizyHByoIJLMM9RlyU64gYlqy0qtkw78S2RJU8NA,4185
|
|
403
403
|
lfx/components/ollama/__init__.py,sha256=fau8QcWs_eHO2MmtQ4coiKj9CzFA9X4hqFf541ekgXk,1068
|
|
@@ -502,7 +502,7 @@ lfx/components/vectorstores/astradb_graph.py,sha256=qvh49NH5zTkAlA46HZOy7c-vucpj
|
|
|
502
502
|
lfx/components/vectorstores/cassandra.py,sha256=gz8nfaq3ta6h_zVkHVn8uRKtcs9vDRuE7d1p5vI7LhE,9540
|
|
503
503
|
lfx/components/vectorstores/cassandra_graph.py,sha256=JsY_gHRexI2p3U6uiQuygC7upu_HljquasDMId6PG6Y,8305
|
|
504
504
|
lfx/components/vectorstores/chroma.py,sha256=DxvHJqhqx6CYdPoAG5Ly6jJsEOMR2R4lXv4UlvGQmQc,6171
|
|
505
|
-
lfx/components/vectorstores/clickhouse.py,sha256=
|
|
505
|
+
lfx/components/vectorstores/clickhouse.py,sha256=kQnoE9ycFPWDE0rNLxaTyjxq5b3aCeaf-f5H_Lx7iBs,5035
|
|
506
506
|
lfx/components/vectorstores/couchbase.py,sha256=cO1HWm9IisWWVcxKjr7TuoOfqnY2Ndnq_QPP3L2yAoc,3965
|
|
507
507
|
lfx/components/vectorstores/elasticsearch.py,sha256=WcBi8THcOzopZeYOQeEoHxsZkACHk4R3MKhSEYGxnfY,9773
|
|
508
508
|
lfx/components/vectorstores/faiss.py,sha256=K9egZNckeHOrPqxuFia4VL4-mFphyEl6dQ_F-lXvax8,3893
|
|
@@ -620,6 +620,7 @@ lfx/load/load.py,sha256=mpQG2RV2ZOysShEOguWKdnQI9TUub1Ds5j89ZbwiQhA,10451
|
|
|
620
620
|
lfx/load/utils.py,sha256=qa8aoMLW-X8FO8xVz3YVHQwjTSJYbYr_AOQAAp3smlc,3705
|
|
621
621
|
lfx/log/__init__.py,sha256=UATLSm1Fp9rVclAXP00LKQzzYKcaboVSuWNujlRR6P4,119
|
|
622
622
|
lfx/log/logger.py,sha256=_KqyTe1JHI6y4TFyy_WjH8J-vTG4qOhfKN_RWCnGvoA,13369
|
|
623
|
+
lfx/logging/__init__.py,sha256=X5tXF5e1hc62adprRPLtKeaqm8-tpl6loXsxbh9IO-Q,367
|
|
623
624
|
lfx/memory/__init__.py,sha256=XR7-FSeIxikpi6HSo1bYEXY4ua_1G6oHufD1OCRjynw,2531
|
|
624
625
|
lfx/memory/stubs.py,sha256=kR6TRI2t6rPvA5Pja5XPC4yvKRBFBuJfdI0hJL8vfwU,9924
|
|
625
626
|
lfx/processing/__init__.py,sha256=jERZg6it9mhOzrbTAt9YtakSNXPSjUXFh5MfKBN48wA,41
|
|
@@ -708,7 +709,7 @@ lfx/utils/schemas.py,sha256=NbOtVQBrn4d0BAu-0H_eCTZI2CXkKZlRY37XCSmuJwc,3865
|
|
|
708
709
|
lfx/utils/util.py,sha256=xGR32XDRr_TtruhjnXfI7lEWmk-vgywHAy3kz5SBowc,15725
|
|
709
710
|
lfx/utils/util_strings.py,sha256=nU_IcdphNaj6bAPbjeL-c1cInQPfTBit8mp5Y57lwQk,1686
|
|
710
711
|
lfx/utils/version.py,sha256=cHpbO0OJD2JQAvVaTH_6ibYeFbHJV0QDHs_YXXZ-bT8,671
|
|
711
|
-
lfx_nightly-0.1.12.
|
|
712
|
-
lfx_nightly-0.1.12.
|
|
713
|
-
lfx_nightly-0.1.12.
|
|
714
|
-
lfx_nightly-0.1.12.
|
|
712
|
+
lfx_nightly-0.1.12.dev13.dist-info/METADATA,sha256=OrTDiOi81zmo0ZarKMPRW4fEIYv2ZQgQgoQtUBM6d4g,8068
|
|
713
|
+
lfx_nightly-0.1.12.dev13.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
714
|
+
lfx_nightly-0.1.12.dev13.dist-info/entry_points.txt,sha256=1724p3RHDQRT2CKx_QRzEIa7sFuSVO0Ux70YfXfoMT4,42
|
|
715
|
+
lfx_nightly-0.1.12.dev13.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|