lfx-nightly 0.1.13.dev2__py3-none-any.whl → 0.1.13.dev4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of lfx-nightly might be problematic. Click here for more details.
- lfx/_assets/component_index.json +1 -1
- lfx/base/agents/agent.py +17 -1
- lfx/base/agents/utils.py +15 -2
- lfx/base/composio/composio_base.py +24 -9
- lfx/base/datastax/__init__.py +5 -0
- lfx/{components/vectorstores/astradb.py → base/datastax/astradb_base.py} +84 -473
- lfx/base/io/chat.py +5 -4
- lfx/base/mcp/util.py +101 -15
- lfx/cli/commands.py +1 -1
- lfx/components/agents/agent.py +1 -1
- lfx/components/agents/cuga_agent.py +1 -1
- lfx/components/agents/mcp_component.py +16 -0
- lfx/components/amazon/amazon_bedrock_converse.py +1 -1
- lfx/components/apify/apify_actor.py +3 -3
- lfx/components/datastax/__init__.py +12 -6
- lfx/components/datastax/{astra_assistant_manager.py → astradb_assistant_manager.py} +1 -0
- lfx/components/datastax/astradb_chatmemory.py +40 -0
- lfx/components/datastax/astradb_cql.py +5 -31
- lfx/components/datastax/astradb_graph.py +9 -123
- lfx/components/datastax/astradb_tool.py +12 -52
- lfx/components/datastax/astradb_vectorstore.py +133 -976
- lfx/components/datastax/create_assistant.py +1 -0
- lfx/components/datastax/create_thread.py +1 -0
- lfx/components/datastax/dotenv.py +1 -0
- lfx/components/datastax/get_assistant.py +1 -0
- lfx/components/datastax/getenvvar.py +1 -0
- lfx/components/datastax/graph_rag.py +1 -1
- lfx/components/datastax/list_assistants.py +1 -0
- lfx/components/datastax/run.py +1 -0
- lfx/components/knowledge_bases/ingestion.py +17 -9
- lfx/components/knowledge_bases/retrieval.py +16 -8
- lfx/components/mistral/mistral_embeddings.py +1 -1
- lfx/components/openrouter/openrouter.py +49 -147
- lfx/components/vectorstores/__init__.py +0 -6
- lfx/custom/custom_component/component.py +3 -2
- lfx/graph/edge/base.py +2 -2
- lfx/graph/graph/base.py +1 -1
- lfx/graph/graph/schema.py +3 -2
- lfx/graph/vertex/vertex_types.py +1 -1
- {lfx_nightly-0.1.13.dev2.dist-info → lfx_nightly-0.1.13.dev4.dist-info}/METADATA +1 -1
- {lfx_nightly-0.1.13.dev2.dist-info → lfx_nightly-0.1.13.dev4.dist-info}/RECORD +44 -65
- lfx/components/datastax/astra_db.py +0 -77
- lfx/components/datastax/cassandra.py +0 -92
- lfx/components/vectorstores/astradb_graph.py +0 -326
- lfx/components/vectorstores/cassandra.py +0 -264
- lfx/components/vectorstores/cassandra_graph.py +0 -238
- lfx/components/vectorstores/chroma.py +0 -167
- lfx/components/vectorstores/clickhouse.py +0 -135
- lfx/components/vectorstores/couchbase.py +0 -102
- lfx/components/vectorstores/elasticsearch.py +0 -267
- lfx/components/vectorstores/faiss.py +0 -111
- lfx/components/vectorstores/graph_rag.py +0 -141
- lfx/components/vectorstores/hcd.py +0 -314
- lfx/components/vectorstores/milvus.py +0 -115
- lfx/components/vectorstores/mongodb_atlas.py +0 -213
- lfx/components/vectorstores/opensearch.py +0 -243
- lfx/components/vectorstores/pgvector.py +0 -72
- lfx/components/vectorstores/pinecone.py +0 -134
- lfx/components/vectorstores/qdrant.py +0 -109
- lfx/components/vectorstores/supabase.py +0 -76
- lfx/components/vectorstores/upstash.py +0 -124
- lfx/components/vectorstores/vectara.py +0 -97
- lfx/components/vectorstores/vectara_rag.py +0 -164
- lfx/components/vectorstores/weaviate.py +0 -89
- /lfx/components/datastax/{astra_vectorize.py → astradb_vectorize.py} +0 -0
- {lfx_nightly-0.1.13.dev2.dist-info → lfx_nightly-0.1.13.dev4.dist-info}/WHEEL +0 -0
- {lfx_nightly-0.1.13.dev2.dist-info → lfx_nightly-0.1.13.dev4.dist-info}/entry_points.txt +0 -0
|
@@ -8,6 +8,7 @@ class AssistantsListAssistants(ComponentWithCache):
|
|
|
8
8
|
display_name = "List Assistants"
|
|
9
9
|
description = "Returns a list of assistant id's"
|
|
10
10
|
icon = "AstraDB"
|
|
11
|
+
legacy = True
|
|
11
12
|
outputs = [
|
|
12
13
|
Output(display_name="Assistants", name="assistants", method="process_inputs"),
|
|
13
14
|
]
|
lfx/components/datastax/run.py
CHANGED
|
@@ -48,12 +48,20 @@ HUGGINGFACE_MODEL_NAMES = [
|
|
|
48
48
|
]
|
|
49
49
|
COHERE_MODEL_NAMES = ["embed-english-v3.0", "embed-multilingual-v3.0"]
|
|
50
50
|
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
51
|
+
_KNOWLEDGE_BASES_ROOT_PATH: Path | None = None
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def _get_knowledge_bases_root_path() -> Path:
|
|
55
|
+
"""Lazy load the knowledge bases root path from settings."""
|
|
56
|
+
global _KNOWLEDGE_BASES_ROOT_PATH # noqa: PLW0603
|
|
57
|
+
if _KNOWLEDGE_BASES_ROOT_PATH is None:
|
|
58
|
+
settings = get_settings_service().settings
|
|
59
|
+
knowledge_directory = settings.knowledge_bases_dir
|
|
60
|
+
if not knowledge_directory:
|
|
61
|
+
msg = "Knowledge bases directory is not set in the settings."
|
|
62
|
+
raise ValueError(msg)
|
|
63
|
+
_KNOWLEDGE_BASES_ROOT_PATH = Path(knowledge_directory).expanduser()
|
|
64
|
+
return _KNOWLEDGE_BASES_ROOT_PATH
|
|
57
65
|
|
|
58
66
|
|
|
59
67
|
class KnowledgeIngestionComponent(Component):
|
|
@@ -203,7 +211,7 @@ class KnowledgeIngestionComponent(Component):
|
|
|
203
211
|
# ------ Internal helpers ---------------------------------------------
|
|
204
212
|
def _get_kb_root(self) -> Path:
|
|
205
213
|
"""Return the root directory for knowledge bases."""
|
|
206
|
-
return
|
|
214
|
+
return _get_knowledge_bases_root_path()
|
|
207
215
|
|
|
208
216
|
def _validate_column_config(self, df_source: pd.DataFrame) -> list[dict[str, Any]]:
|
|
209
217
|
"""Validate column configuration using Structured Output patterns."""
|
|
@@ -662,7 +670,7 @@ class KnowledgeIngestionComponent(Component):
|
|
|
662
670
|
raise ValueError(msg) from e
|
|
663
671
|
|
|
664
672
|
# Create the new knowledge base directory
|
|
665
|
-
kb_path =
|
|
673
|
+
kb_path = _get_knowledge_bases_root_path() / kb_user / field_value["01_new_kb_name"]
|
|
666
674
|
kb_path.mkdir(parents=True, exist_ok=True)
|
|
667
675
|
|
|
668
676
|
# Save the embedding metadata
|
|
@@ -675,7 +683,7 @@ class KnowledgeIngestionComponent(Component):
|
|
|
675
683
|
|
|
676
684
|
# Update the knowledge base options dynamically
|
|
677
685
|
build_config["knowledge_base"]["options"] = await get_knowledge_bases(
|
|
678
|
-
|
|
686
|
+
_get_knowledge_bases_root_path(),
|
|
679
687
|
user_id=self.user_id,
|
|
680
688
|
)
|
|
681
689
|
|
|
@@ -16,12 +16,20 @@ from lfx.schema.data import Data
|
|
|
16
16
|
from lfx.schema.dataframe import DataFrame
|
|
17
17
|
from lfx.services.deps import get_settings_service, session_scope
|
|
18
18
|
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
19
|
+
_KNOWLEDGE_BASES_ROOT_PATH: Path | None = None
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def _get_knowledge_bases_root_path() -> Path:
|
|
23
|
+
"""Lazy load the knowledge bases root path from settings."""
|
|
24
|
+
global _KNOWLEDGE_BASES_ROOT_PATH # noqa: PLW0603
|
|
25
|
+
if _KNOWLEDGE_BASES_ROOT_PATH is None:
|
|
26
|
+
settings = get_settings_service().settings
|
|
27
|
+
knowledge_directory = settings.knowledge_bases_dir
|
|
28
|
+
if not knowledge_directory:
|
|
29
|
+
msg = "Knowledge bases directory is not set in the settings."
|
|
30
|
+
raise ValueError(msg)
|
|
31
|
+
_KNOWLEDGE_BASES_ROOT_PATH = Path(knowledge_directory).expanduser()
|
|
32
|
+
return _KNOWLEDGE_BASES_ROOT_PATH
|
|
25
33
|
|
|
26
34
|
|
|
27
35
|
class KnowledgeRetrievalComponent(Component):
|
|
@@ -90,7 +98,7 @@ class KnowledgeRetrievalComponent(Component):
|
|
|
90
98
|
if field_name == "knowledge_base":
|
|
91
99
|
# Update the knowledge base options dynamically
|
|
92
100
|
build_config["knowledge_base"]["options"] = await get_knowledge_bases(
|
|
93
|
-
|
|
101
|
+
_get_knowledge_bases_root_path(),
|
|
94
102
|
user_id=self.user_id, # Use the user_id from the component context
|
|
95
103
|
)
|
|
96
104
|
|
|
@@ -186,7 +194,7 @@ class KnowledgeRetrievalComponent(Component):
|
|
|
186
194
|
msg = f"User with ID {self.user_id} not found."
|
|
187
195
|
raise ValueError(msg)
|
|
188
196
|
kb_user = current_user.username
|
|
189
|
-
kb_path =
|
|
197
|
+
kb_path = _get_knowledge_bases_root_path() / kb_user / self.knowledge_base
|
|
190
198
|
|
|
191
199
|
metadata = self._get_kb_metadata(kb_path)
|
|
192
200
|
if not metadata:
|
|
@@ -1,6 +1,3 @@
|
|
|
1
|
-
from collections import defaultdict
|
|
2
|
-
from typing import Any
|
|
3
|
-
|
|
4
1
|
import httpx
|
|
5
2
|
from langchain_openai import ChatOpenAI
|
|
6
3
|
from pydantic.v1 import SecretStr
|
|
@@ -8,13 +5,7 @@ from pydantic.v1 import SecretStr
|
|
|
8
5
|
from lfx.base.models.model import LCModelComponent
|
|
9
6
|
from lfx.field_typing import LanguageModel
|
|
10
7
|
from lfx.field_typing.range_spec import RangeSpec
|
|
11
|
-
from lfx.inputs.inputs import
|
|
12
|
-
DropdownInput,
|
|
13
|
-
IntInput,
|
|
14
|
-
SecretStrInput,
|
|
15
|
-
SliderInput,
|
|
16
|
-
StrInput,
|
|
17
|
-
)
|
|
8
|
+
from lfx.inputs.inputs import DropdownInput, IntInput, SecretStrInput, SliderInput, StrInput
|
|
18
9
|
|
|
19
10
|
|
|
20
11
|
class OpenRouterComponent(LCModelComponent):
|
|
@@ -28,36 +19,13 @@ class OpenRouterComponent(LCModelComponent):
|
|
|
28
19
|
|
|
29
20
|
inputs = [
|
|
30
21
|
*LCModelComponent.get_base_inputs(),
|
|
31
|
-
SecretStrInput(
|
|
32
|
-
name="api_key", display_name="OpenRouter API Key", required=True, info="Your OpenRouter API key"
|
|
33
|
-
),
|
|
34
|
-
StrInput(
|
|
35
|
-
name="site_url",
|
|
36
|
-
display_name="Site URL",
|
|
37
|
-
info="Your site URL for OpenRouter rankings",
|
|
38
|
-
advanced=True,
|
|
39
|
-
),
|
|
40
|
-
StrInput(
|
|
41
|
-
name="app_name",
|
|
42
|
-
display_name="App Name",
|
|
43
|
-
info="Your app name for OpenRouter rankings",
|
|
44
|
-
advanced=True,
|
|
45
|
-
),
|
|
46
|
-
DropdownInput(
|
|
47
|
-
name="provider",
|
|
48
|
-
display_name="Provider",
|
|
49
|
-
info="The AI model provider",
|
|
50
|
-
options=["Loading providers..."],
|
|
51
|
-
value="Loading providers...",
|
|
52
|
-
real_time_refresh=True,
|
|
53
|
-
required=True,
|
|
54
|
-
),
|
|
22
|
+
SecretStrInput(name="api_key", display_name="API Key", required=True),
|
|
55
23
|
DropdownInput(
|
|
56
24
|
name="model_name",
|
|
57
25
|
display_name="Model",
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
26
|
+
options=[],
|
|
27
|
+
value="",
|
|
28
|
+
refresh_button=True,
|
|
61
29
|
real_time_refresh=True,
|
|
62
30
|
required=True,
|
|
63
31
|
),
|
|
@@ -66,137 +34,71 @@ class OpenRouterComponent(LCModelComponent):
|
|
|
66
34
|
display_name="Temperature",
|
|
67
35
|
value=0.7,
|
|
68
36
|
range_spec=RangeSpec(min=0, max=2, step=0.01),
|
|
69
|
-
info="Controls randomness. Lower values are more deterministic, higher values are more creative.",
|
|
70
|
-
advanced=True,
|
|
71
|
-
),
|
|
72
|
-
IntInput(
|
|
73
|
-
name="max_tokens",
|
|
74
|
-
display_name="Max Tokens",
|
|
75
|
-
info="Maximum number of tokens to generate",
|
|
76
37
|
advanced=True,
|
|
77
38
|
),
|
|
39
|
+
IntInput(name="max_tokens", display_name="Max Tokens", advanced=True),
|
|
40
|
+
StrInput(name="site_url", display_name="Site URL", advanced=True),
|
|
41
|
+
StrInput(name="app_name", display_name="App Name", advanced=True),
|
|
78
42
|
]
|
|
79
43
|
|
|
80
|
-
def fetch_models(self) -> dict
|
|
81
|
-
"""Fetch available models from OpenRouter
|
|
82
|
-
url = "https://openrouter.ai/api/v1/models"
|
|
83
|
-
|
|
44
|
+
def fetch_models(self) -> list[dict]:
|
|
45
|
+
"""Fetch available models from OpenRouter."""
|
|
84
46
|
try:
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
47
|
+
response = httpx.get("https://openrouter.ai/api/v1/models", timeout=10.0)
|
|
48
|
+
response.raise_for_status()
|
|
49
|
+
models = response.json().get("data", [])
|
|
50
|
+
return sorted(
|
|
51
|
+
[
|
|
52
|
+
{
|
|
53
|
+
"id": m["id"],
|
|
54
|
+
"name": m.get("name", m["id"]),
|
|
55
|
+
"context": m.get("context_length", 0),
|
|
56
|
+
}
|
|
57
|
+
for m in models
|
|
58
|
+
if m.get("id")
|
|
59
|
+
],
|
|
60
|
+
key=lambda x: x["name"],
|
|
61
|
+
)
|
|
62
|
+
except (httpx.RequestError, httpx.HTTPStatusError) as e:
|
|
63
|
+
self.log(f"Error fetching models: {e}")
|
|
64
|
+
return []
|
|
65
|
+
|
|
66
|
+
def update_build_config(self, build_config: dict, field_value: str, field_name: str | None = None) -> dict: # noqa: ARG002
|
|
67
|
+
"""Update model options."""
|
|
68
|
+
models = self.fetch_models()
|
|
69
|
+
if models:
|
|
70
|
+
build_config["model_name"]["options"] = [m["id"] for m in models]
|
|
71
|
+
build_config["model_name"]["tooltips"] = {m["id"]: f"{m['name']} ({m['context']:,} tokens)" for m in models}
|
|
72
|
+
else:
|
|
73
|
+
build_config["model_name"]["options"] = ["Failed to load models"]
|
|
74
|
+
build_config["model_name"]["value"] = "Failed to load models"
|
|
75
|
+
return build_config
|
|
110
76
|
|
|
111
77
|
def build_model(self) -> LanguageModel:
|
|
112
|
-
"""Build
|
|
113
|
-
model_not_selected = "Please select a model"
|
|
114
|
-
api_key_required = "API key is required"
|
|
115
|
-
|
|
116
|
-
if not self.model_name or self.model_name == "Select a provider first":
|
|
117
|
-
raise ValueError(model_not_selected)
|
|
118
|
-
|
|
78
|
+
"""Build the OpenRouter model."""
|
|
119
79
|
if not self.api_key:
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
80
|
+
msg = "API key is required"
|
|
81
|
+
raise ValueError(msg)
|
|
82
|
+
if not self.model_name or self.model_name == "Loading...":
|
|
83
|
+
msg = "Please select a model"
|
|
84
|
+
raise ValueError(msg)
|
|
123
85
|
|
|
124
|
-
|
|
125
|
-
kwargs: dict[str, Any] = {
|
|
86
|
+
kwargs = {
|
|
126
87
|
"model": self.model_name,
|
|
127
|
-
"openai_api_key": api_key,
|
|
88
|
+
"openai_api_key": SecretStr(self.api_key).get_secret_value(),
|
|
128
89
|
"openai_api_base": "https://openrouter.ai/api/v1",
|
|
129
90
|
"temperature": self.temperature if self.temperature is not None else 0.7,
|
|
130
91
|
}
|
|
131
92
|
|
|
132
|
-
# Add optional parameters
|
|
133
93
|
if self.max_tokens:
|
|
134
|
-
kwargs["max_tokens"] = self.max_tokens
|
|
94
|
+
kwargs["max_tokens"] = int(self.max_tokens)
|
|
135
95
|
|
|
136
96
|
headers = {}
|
|
137
97
|
if self.site_url:
|
|
138
98
|
headers["HTTP-Referer"] = self.site_url
|
|
139
99
|
if self.app_name:
|
|
140
100
|
headers["X-Title"] = self.app_name
|
|
141
|
-
|
|
142
101
|
if headers:
|
|
143
102
|
kwargs["default_headers"] = headers
|
|
144
103
|
|
|
145
|
-
|
|
146
|
-
return ChatOpenAI(**kwargs)
|
|
147
|
-
except (ValueError, httpx.HTTPError) as err:
|
|
148
|
-
error_msg = f"Failed to build model: {err!s}"
|
|
149
|
-
self.log(error_msg)
|
|
150
|
-
raise ValueError(error_msg) from err
|
|
151
|
-
|
|
152
|
-
def _get_exception_message(self, e: Exception) -> str | None:
|
|
153
|
-
"""Get a message from an OpenRouter exception.
|
|
154
|
-
|
|
155
|
-
Args:
|
|
156
|
-
e (Exception): The exception to get the message from.
|
|
157
|
-
|
|
158
|
-
Returns:
|
|
159
|
-
str | None: The message from the exception, or None if no specific message can be extracted.
|
|
160
|
-
"""
|
|
161
|
-
try:
|
|
162
|
-
from openai import BadRequestError
|
|
163
|
-
|
|
164
|
-
if isinstance(e, BadRequestError):
|
|
165
|
-
message = e.body.get("message")
|
|
166
|
-
if message:
|
|
167
|
-
return message
|
|
168
|
-
except ImportError:
|
|
169
|
-
pass
|
|
170
|
-
return None
|
|
171
|
-
|
|
172
|
-
def update_build_config(self, build_config: dict, field_value: str, field_name: str | None = None) -> dict:
|
|
173
|
-
"""Update build configuration based on field updates."""
|
|
174
|
-
try:
|
|
175
|
-
if field_name is None or field_name == "provider":
|
|
176
|
-
provider_models = self.fetch_models()
|
|
177
|
-
build_config["provider"]["options"] = sorted(provider_models.keys())
|
|
178
|
-
if build_config["provider"]["value"] not in provider_models:
|
|
179
|
-
build_config["provider"]["value"] = build_config["provider"]["options"][0]
|
|
180
|
-
|
|
181
|
-
if field_name == "provider" and field_value in self.fetch_models():
|
|
182
|
-
provider_models = self.fetch_models()
|
|
183
|
-
models = provider_models[field_value]
|
|
184
|
-
|
|
185
|
-
build_config["model_name"]["options"] = [model["id"] for model in models]
|
|
186
|
-
if models:
|
|
187
|
-
build_config["model_name"]["value"] = models[0]["id"]
|
|
188
|
-
|
|
189
|
-
tooltips = {
|
|
190
|
-
model["id"]: (f"{model['name']}\nContext Length: {model['context_length']}\n{model['description']}")
|
|
191
|
-
for model in models
|
|
192
|
-
}
|
|
193
|
-
build_config["model_name"]["tooltips"] = tooltips
|
|
194
|
-
|
|
195
|
-
except httpx.HTTPError as e:
|
|
196
|
-
self.log(f"Error updating build config: {e!s}")
|
|
197
|
-
build_config["provider"]["options"] = ["Error loading providers"]
|
|
198
|
-
build_config["provider"]["value"] = "Error loading providers"
|
|
199
|
-
build_config["model_name"]["options"] = ["Error loading models"]
|
|
200
|
-
build_config["model_name"]["value"] = "Error loading models"
|
|
201
|
-
|
|
202
|
-
return build_config
|
|
104
|
+
return ChatOpenAI(**kwargs)
|
|
@@ -5,20 +5,14 @@ from typing import TYPE_CHECKING, Any
|
|
|
5
5
|
from lfx.components._importing import import_mod
|
|
6
6
|
|
|
7
7
|
if TYPE_CHECKING:
|
|
8
|
-
from .astradb import AstraDBVectorStoreComponent
|
|
9
8
|
from .local_db import LocalDBComponent
|
|
10
|
-
from .mongodb_atlas import MongoVectorStoreComponent
|
|
11
9
|
|
|
12
10
|
_dynamic_imports = {
|
|
13
11
|
"LocalDBComponent": "local_db",
|
|
14
|
-
"AstraDBVectorStoreComponent": "astradb",
|
|
15
|
-
"MongoVectorStoreComponent": "mongodb_atlas",
|
|
16
12
|
}
|
|
17
13
|
|
|
18
14
|
__all__ = [
|
|
19
|
-
"AstraDBVectorStoreComponent",
|
|
20
15
|
"LocalDBComponent",
|
|
21
|
-
"MongoVectorStoreComponent",
|
|
22
16
|
]
|
|
23
17
|
|
|
24
18
|
|
|
@@ -154,7 +154,7 @@ class Component(CustomComponent):
|
|
|
154
154
|
self.trace_type = "chain"
|
|
155
155
|
|
|
156
156
|
# Setup inputs and outputs
|
|
157
|
-
self.
|
|
157
|
+
self.reset_all_output_values()
|
|
158
158
|
if self.inputs is not None:
|
|
159
159
|
self.map_inputs(self.inputs)
|
|
160
160
|
self.map_outputs()
|
|
@@ -330,7 +330,8 @@ class Component(CustomComponent):
|
|
|
330
330
|
def set_event_manager(self, event_manager: EventManager | None = None) -> None:
|
|
331
331
|
self._event_manager = event_manager
|
|
332
332
|
|
|
333
|
-
def
|
|
333
|
+
def reset_all_output_values(self) -> None:
|
|
334
|
+
"""Reset all output values to UNDEFINED."""
|
|
334
335
|
if isinstance(self._outputs_map, dict):
|
|
335
336
|
for output in self._outputs_map.values():
|
|
336
337
|
output.value = UNDEFINED
|
lfx/graph/edge/base.py
CHANGED
|
@@ -63,8 +63,8 @@ class Edge:
|
|
|
63
63
|
# target_param is documents
|
|
64
64
|
if isinstance(self._target_handle, str):
|
|
65
65
|
self.target_param = self._target_handle.split("|")[1]
|
|
66
|
-
self.source_handle = None
|
|
67
|
-
self.target_handle = None
|
|
66
|
+
self.source_handle = None # type: ignore[assignment]
|
|
67
|
+
self.target_handle = None # type: ignore[assignment]
|
|
68
68
|
else:
|
|
69
69
|
msg = "Target handle is not a string"
|
|
70
70
|
raise ValueError(msg)
|
lfx/graph/graph/base.py
CHANGED
lfx/graph/graph/schema.py
CHANGED
|
@@ -4,11 +4,12 @@ from typing import TYPE_CHECKING, NamedTuple, Protocol
|
|
|
4
4
|
|
|
5
5
|
from typing_extensions import NotRequired, TypedDict
|
|
6
6
|
|
|
7
|
+
from lfx.graph.edge.schema import EdgeData
|
|
8
|
+
from lfx.graph.vertex.schema import NodeData
|
|
9
|
+
|
|
7
10
|
if TYPE_CHECKING:
|
|
8
|
-
from lfx.graph.edge.schema import EdgeData
|
|
9
11
|
from lfx.graph.schema import ResultData
|
|
10
12
|
from lfx.graph.vertex.base import Vertex
|
|
11
|
-
from lfx.graph.vertex.schema import NodeData
|
|
12
13
|
from lfx.schema.log import LoggableType
|
|
13
14
|
|
|
14
15
|
|
lfx/graph/vertex/vertex_types.py
CHANGED
|
@@ -65,7 +65,7 @@ class ComponentVertex(Vertex):
|
|
|
65
65
|
self.built_object, self.artifacts = result
|
|
66
66
|
elif len(result) == 3: # noqa: PLR2004
|
|
67
67
|
self.custom_component, self.built_object, self.artifacts = result
|
|
68
|
-
self.logs = self.custom_component.
|
|
68
|
+
self.logs = self.custom_component.get_output_logs()
|
|
69
69
|
for key in self.artifacts:
|
|
70
70
|
if self.artifacts_raw is None:
|
|
71
71
|
self.artifacts_raw = {}
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: lfx-nightly
|
|
3
|
-
Version: 0.1.13.
|
|
3
|
+
Version: 0.1.13.dev4
|
|
4
4
|
Summary: Langflow Executor - A lightweight CLI tool for executing and serving Langflow AI flows
|
|
5
5
|
Author-email: Gabriel Luiz Freitas Almeida <gabriel@langflow.org>
|
|
6
6
|
Requires-Python: <3.14,>=3.10
|