lfx-nightly 0.2.0.dev0__py3-none-any.whl → 0.2.0.dev26__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- lfx/_assets/component_index.json +1 -1
- lfx/base/agents/agent.py +13 -1
- lfx/base/agents/altk_base_agent.py +380 -0
- lfx/base/agents/altk_tool_wrappers.py +565 -0
- lfx/base/agents/events.py +2 -1
- lfx/base/composio/composio_base.py +159 -224
- lfx/base/data/base_file.py +88 -21
- lfx/base/data/storage_utils.py +192 -0
- lfx/base/data/utils.py +178 -14
- lfx/base/embeddings/embeddings_class.py +113 -0
- lfx/base/models/groq_constants.py +74 -58
- lfx/base/models/groq_model_discovery.py +265 -0
- lfx/base/models/model.py +1 -1
- lfx/base/models/model_utils.py +100 -0
- lfx/base/models/openai_constants.py +7 -0
- lfx/base/models/watsonx_constants.py +32 -8
- lfx/base/tools/run_flow.py +601 -129
- lfx/cli/commands.py +6 -3
- lfx/cli/common.py +2 -2
- lfx/cli/run.py +1 -1
- lfx/cli/script_loader.py +53 -11
- lfx/components/Notion/create_page.py +1 -1
- lfx/components/Notion/list_database_properties.py +1 -1
- lfx/components/Notion/list_pages.py +1 -1
- lfx/components/Notion/list_users.py +1 -1
- lfx/components/Notion/page_content_viewer.py +1 -1
- lfx/components/Notion/search.py +1 -1
- lfx/components/Notion/update_page_property.py +1 -1
- lfx/components/__init__.py +19 -5
- lfx/components/{agents → altk}/__init__.py +5 -9
- lfx/components/altk/altk_agent.py +193 -0
- lfx/components/apify/apify_actor.py +1 -1
- lfx/components/composio/__init__.py +70 -18
- lfx/components/composio/apollo_composio.py +11 -0
- lfx/components/composio/bitbucket_composio.py +11 -0
- lfx/components/composio/canva_composio.py +11 -0
- lfx/components/composio/coda_composio.py +11 -0
- lfx/components/composio/composio_api.py +10 -0
- lfx/components/composio/discord_composio.py +1 -1
- lfx/components/composio/elevenlabs_composio.py +11 -0
- lfx/components/composio/exa_composio.py +11 -0
- lfx/components/composio/firecrawl_composio.py +11 -0
- lfx/components/composio/fireflies_composio.py +11 -0
- lfx/components/composio/gmail_composio.py +1 -1
- lfx/components/composio/googlebigquery_composio.py +11 -0
- lfx/components/composio/googlecalendar_composio.py +1 -1
- lfx/components/composio/googledocs_composio.py +1 -1
- lfx/components/composio/googlemeet_composio.py +1 -1
- lfx/components/composio/googlesheets_composio.py +1 -1
- lfx/components/composio/googletasks_composio.py +1 -1
- lfx/components/composio/heygen_composio.py +11 -0
- lfx/components/composio/mem0_composio.py +11 -0
- lfx/components/composio/peopledatalabs_composio.py +11 -0
- lfx/components/composio/perplexityai_composio.py +11 -0
- lfx/components/composio/serpapi_composio.py +11 -0
- lfx/components/composio/slack_composio.py +3 -574
- lfx/components/composio/slackbot_composio.py +1 -1
- lfx/components/composio/snowflake_composio.py +11 -0
- lfx/components/composio/tavily_composio.py +11 -0
- lfx/components/composio/youtube_composio.py +2 -2
- lfx/components/cuga/__init__.py +34 -0
- lfx/components/cuga/cuga_agent.py +730 -0
- lfx/components/data/__init__.py +78 -28
- lfx/components/data_source/__init__.py +58 -0
- lfx/components/{data → data_source}/api_request.py +26 -3
- lfx/components/{data → data_source}/csv_to_data.py +15 -10
- lfx/components/{data → data_source}/json_to_data.py +15 -8
- lfx/components/{data → data_source}/news_search.py +1 -1
- lfx/components/{data → data_source}/rss.py +1 -1
- lfx/components/{data → data_source}/sql_executor.py +1 -1
- lfx/components/{data → data_source}/url.py +1 -1
- lfx/components/{data → data_source}/web_search.py +1 -1
- lfx/components/datastax/astradb_cql.py +1 -1
- lfx/components/datastax/astradb_graph.py +1 -1
- lfx/components/datastax/astradb_tool.py +1 -1
- lfx/components/datastax/astradb_vectorstore.py +1 -1
- lfx/components/datastax/hcd.py +1 -1
- lfx/components/deactivated/json_document_builder.py +1 -1
- lfx/components/docling/__init__.py +0 -3
- lfx/components/elastic/elasticsearch.py +1 -1
- lfx/components/elastic/opensearch_multimodal.py +1575 -0
- lfx/components/files_and_knowledge/__init__.py +47 -0
- lfx/components/{data → files_and_knowledge}/directory.py +1 -1
- lfx/components/{data → files_and_knowledge}/file.py +246 -18
- lfx/components/{knowledge_bases → files_and_knowledge}/retrieval.py +2 -2
- lfx/components/{data → files_and_knowledge}/save_file.py +142 -22
- lfx/components/flow_controls/__init__.py +58 -0
- lfx/components/{logic → flow_controls}/conditional_router.py +1 -1
- lfx/components/{logic → flow_controls}/loop.py +43 -9
- lfx/components/flow_controls/run_flow.py +108 -0
- lfx/components/glean/glean_search_api.py +1 -1
- lfx/components/groq/groq.py +35 -28
- lfx/components/helpers/__init__.py +102 -0
- lfx/components/input_output/__init__.py +3 -1
- lfx/components/input_output/chat.py +4 -3
- lfx/components/input_output/chat_output.py +4 -4
- lfx/components/input_output/text.py +1 -1
- lfx/components/input_output/text_output.py +1 -1
- lfx/components/{data → input_output}/webhook.py +1 -1
- lfx/components/knowledge_bases/__init__.py +59 -4
- lfx/components/langchain_utilities/character.py +1 -1
- lfx/components/langchain_utilities/csv_agent.py +84 -16
- lfx/components/langchain_utilities/json_agent.py +67 -12
- lfx/components/langchain_utilities/language_recursive.py +1 -1
- lfx/components/llm_operations/__init__.py +46 -0
- lfx/components/{processing → llm_operations}/batch_run.py +1 -1
- lfx/components/{processing → llm_operations}/lambda_filter.py +1 -1
- lfx/components/{logic → llm_operations}/llm_conditional_router.py +1 -1
- lfx/components/{processing/llm_router.py → llm_operations/llm_selector.py} +3 -3
- lfx/components/{processing → llm_operations}/structured_output.py +1 -1
- lfx/components/logic/__init__.py +126 -0
- lfx/components/mem0/mem0_chat_memory.py +11 -0
- lfx/components/models/__init__.py +64 -9
- lfx/components/models_and_agents/__init__.py +49 -0
- lfx/components/{agents → models_and_agents}/agent.py +2 -2
- lfx/components/models_and_agents/embedding_model.py +423 -0
- lfx/components/models_and_agents/language_model.py +398 -0
- lfx/components/{agents → models_and_agents}/mcp_component.py +53 -44
- lfx/components/{helpers → models_and_agents}/memory.py +1 -1
- lfx/components/nvidia/system_assist.py +1 -1
- lfx/components/olivya/olivya.py +1 -1
- lfx/components/ollama/ollama.py +17 -3
- lfx/components/processing/__init__.py +9 -57
- lfx/components/processing/converter.py +1 -1
- lfx/components/processing/dataframe_operations.py +1 -1
- lfx/components/processing/parse_json_data.py +2 -2
- lfx/components/processing/parser.py +1 -1
- lfx/components/processing/split_text.py +1 -1
- lfx/components/qdrant/qdrant.py +1 -1
- lfx/components/redis/redis.py +1 -1
- lfx/components/twelvelabs/split_video.py +10 -0
- lfx/components/twelvelabs/video_file.py +12 -0
- lfx/components/utilities/__init__.py +43 -0
- lfx/components/{helpers → utilities}/calculator_core.py +1 -1
- lfx/components/{helpers → utilities}/current_date.py +1 -1
- lfx/components/{processing → utilities}/python_repl_core.py +1 -1
- lfx/components/vectorstores/local_db.py +9 -0
- lfx/components/youtube/youtube_transcripts.py +118 -30
- lfx/custom/custom_component/component.py +57 -1
- lfx/custom/custom_component/custom_component.py +68 -6
- lfx/graph/edge/base.py +43 -20
- lfx/graph/graph/base.py +4 -1
- lfx/graph/state/model.py +15 -2
- lfx/graph/utils.py +6 -0
- lfx/graph/vertex/base.py +4 -1
- lfx/graph/vertex/param_handler.py +10 -7
- lfx/helpers/__init__.py +12 -0
- lfx/helpers/flow.py +117 -0
- lfx/inputs/input_mixin.py +24 -1
- lfx/inputs/inputs.py +13 -1
- lfx/interface/components.py +161 -83
- lfx/log/logger.py +5 -3
- lfx/services/database/__init__.py +5 -0
- lfx/services/database/service.py +25 -0
- lfx/services/deps.py +87 -22
- lfx/services/manager.py +19 -6
- lfx/services/mcp_composer/service.py +998 -157
- lfx/services/session.py +5 -0
- lfx/services/settings/base.py +51 -7
- lfx/services/settings/constants.py +8 -0
- lfx/services/storage/local.py +76 -46
- lfx/services/storage/service.py +152 -29
- lfx/template/field/base.py +3 -0
- lfx/utils/ssrf_protection.py +384 -0
- lfx/utils/validate_cloud.py +26 -0
- {lfx_nightly-0.2.0.dev0.dist-info → lfx_nightly-0.2.0.dev26.dist-info}/METADATA +38 -22
- {lfx_nightly-0.2.0.dev0.dist-info → lfx_nightly-0.2.0.dev26.dist-info}/RECORD +182 -150
- {lfx_nightly-0.2.0.dev0.dist-info → lfx_nightly-0.2.0.dev26.dist-info}/WHEEL +1 -1
- lfx/components/agents/altk_agent.py +0 -366
- lfx/components/agents/cuga_agent.py +0 -1013
- lfx/components/docling/docling_remote_vlm.py +0 -284
- lfx/components/logic/run_flow.py +0 -71
- lfx/components/models/embedding_model.py +0 -195
- lfx/components/models/language_model.py +0 -144
- /lfx/components/{data → data_source}/mock_data.py +0 -0
- /lfx/components/{knowledge_bases → files_and_knowledge}/ingestion.py +0 -0
- /lfx/components/{logic → flow_controls}/data_conditional_router.py +0 -0
- /lfx/components/{logic → flow_controls}/flow_tool.py +0 -0
- /lfx/components/{logic → flow_controls}/listen.py +0 -0
- /lfx/components/{logic → flow_controls}/notify.py +0 -0
- /lfx/components/{logic → flow_controls}/pass_message.py +0 -0
- /lfx/components/{logic → flow_controls}/sub_flow.py +0 -0
- /lfx/components/{processing → models_and_agents}/prompt.py +0 -0
- /lfx/components/{helpers → processing}/create_list.py +0 -0
- /lfx/components/{helpers → processing}/output_parser.py +0 -0
- /lfx/components/{helpers → processing}/store_message.py +0 -0
- /lfx/components/{helpers → utilities}/id_generator.py +0 -0
- {lfx_nightly-0.2.0.dev0.dist-info → lfx_nightly-0.2.0.dev26.dist-info}/entry_points.txt +0 -0
lfx/components/groq/groq.py
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
|
-
import requests
|
|
2
1
|
from pydantic.v1 import SecretStr
|
|
3
2
|
|
|
4
|
-
from lfx.base.models.groq_constants import GROQ_MODELS
|
|
3
|
+
from lfx.base.models.groq_constants import GROQ_MODELS
|
|
4
|
+
from lfx.base.models.groq_model_discovery import get_groq_models
|
|
5
5
|
from lfx.base.models.model import LCModelComponent
|
|
6
6
|
from lfx.field_typing import LanguageModel
|
|
7
7
|
from lfx.field_typing.range_spec import RangeSpec
|
|
@@ -52,7 +52,7 @@ class GroqModel(LCModelComponent):
|
|
|
52
52
|
DropdownInput(
|
|
53
53
|
name="model_name",
|
|
54
54
|
display_name="Model",
|
|
55
|
-
info="The name of the model to use.",
|
|
55
|
+
info="The name of the model to use. Add your Groq API key to access additional available models.",
|
|
56
56
|
options=GROQ_MODELS,
|
|
57
57
|
value=GROQ_MODELS[0],
|
|
58
58
|
refresh_button=True,
|
|
@@ -71,35 +71,42 @@ class GroqModel(LCModelComponent):
|
|
|
71
71
|
]
|
|
72
72
|
|
|
73
73
|
def get_models(self, *, tool_model_enabled: bool | None = None) -> list[str]:
|
|
74
|
+
"""Get available Groq models using the dynamic discovery system.
|
|
75
|
+
|
|
76
|
+
This method uses the groq_model_discovery module which:
|
|
77
|
+
- Fetches models directly from Groq API
|
|
78
|
+
- Automatically tests tool calling support
|
|
79
|
+
- Caches results for 24 hours
|
|
80
|
+
- Falls back to hardcoded list if API fails
|
|
81
|
+
|
|
82
|
+
Args:
|
|
83
|
+
tool_model_enabled: If True, only return models that support tool calling
|
|
84
|
+
|
|
85
|
+
Returns:
|
|
86
|
+
List of available model IDs
|
|
87
|
+
"""
|
|
74
88
|
try:
|
|
75
|
-
|
|
76
|
-
|
|
89
|
+
# Get models with metadata from dynamic discovery system
|
|
90
|
+
api_key = self.api_key if hasattr(self, "api_key") and self.api_key else None
|
|
91
|
+
models_metadata = get_groq_models(api_key=api_key)
|
|
77
92
|
|
|
78
|
-
|
|
79
|
-
response.raise_for_status()
|
|
80
|
-
model_list = response.json()
|
|
93
|
+
# Filter out non-LLM models (audio, TTS, guards)
|
|
81
94
|
model_ids = [
|
|
82
|
-
|
|
95
|
+
model_id for model_id, metadata in models_metadata.items() if not metadata.get("not_supported", False)
|
|
83
96
|
]
|
|
84
|
-
|
|
97
|
+
|
|
98
|
+
# Filter by tool calling support if requested
|
|
99
|
+
if tool_model_enabled:
|
|
100
|
+
model_ids = [model_id for model_id in model_ids if models_metadata[model_id].get("tool_calling", False)]
|
|
101
|
+
logger.info(f"Loaded {len(model_ids)} Groq models with tool calling support")
|
|
102
|
+
else:
|
|
103
|
+
logger.info(f"Loaded {len(model_ids)} Groq models")
|
|
104
|
+
except (ValueError, KeyError, TypeError, ImportError) as e:
|
|
85
105
|
logger.exception(f"Error getting model names: {e}")
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
from langchain_groq import ChatGroq
|
|
90
|
-
except ImportError as e:
|
|
91
|
-
msg = "langchain_groq is not installed. Please install it with `pip install langchain_groq`."
|
|
92
|
-
raise ImportError(msg) from e
|
|
93
|
-
for model in model_ids:
|
|
94
|
-
model_with_tool = ChatGroq(
|
|
95
|
-
model=model,
|
|
96
|
-
api_key=self.api_key,
|
|
97
|
-
base_url=self.base_url,
|
|
98
|
-
)
|
|
99
|
-
if not self.supports_tool_calling(model_with_tool) or model in TOOL_CALLING_UNSUPPORTED_GROQ_MODELS:
|
|
100
|
-
model_ids.remove(model)
|
|
106
|
+
# Fallback to hardcoded list from groq_constants.py
|
|
107
|
+
return GROQ_MODELS
|
|
108
|
+
else:
|
|
101
109
|
return model_ids
|
|
102
|
-
return model_ids
|
|
103
110
|
|
|
104
111
|
def update_build_config(self, build_config: dict, field_value: str, field_name: str | None = None):
|
|
105
112
|
if field_name in {"base_url", "model_name", "tool_model_enabled", "api_key"} and field_value:
|
|
@@ -107,13 +114,13 @@ class GroqModel(LCModelComponent):
|
|
|
107
114
|
if len(self.api_key) != 0:
|
|
108
115
|
try:
|
|
109
116
|
ids = self.get_models(tool_model_enabled=self.tool_model_enabled)
|
|
110
|
-
except (
|
|
117
|
+
except (ValueError, KeyError, TypeError, ImportError) as e:
|
|
111
118
|
logger.exception(f"Error getting model names: {e}")
|
|
112
119
|
ids = GROQ_MODELS
|
|
113
120
|
build_config.setdefault("model_name", {})
|
|
114
121
|
build_config["model_name"]["options"] = ids
|
|
115
122
|
build_config["model_name"].setdefault("value", ids[0])
|
|
116
|
-
except
|
|
123
|
+
except (ValueError, KeyError, TypeError, AttributeError) as e:
|
|
117
124
|
msg = f"Error getting model names: {e}"
|
|
118
125
|
raise ValueError(msg) from e
|
|
119
126
|
return build_config
|
|
@@ -1,5 +1,8 @@
|
|
|
1
|
+
"""Helpers module - backwards compatibility for moved components."""
|
|
2
|
+
|
|
1
3
|
from __future__ import annotations
|
|
2
4
|
|
|
5
|
+
import sys
|
|
3
6
|
from typing import TYPE_CHECKING, Any
|
|
4
7
|
|
|
5
8
|
from lfx.components._importing import import_mod
|
|
@@ -33,12 +36,111 @@ __all__ = [
|
|
|
33
36
|
"OutputParserComponent",
|
|
34
37
|
]
|
|
35
38
|
|
|
39
|
+
# Register redirected submodules in sys.modules for direct importlib.import_module() calls
|
|
40
|
+
# This allows imports like: import lfx.components.helpers.current_date
|
|
41
|
+
_redirected_submodules = {
|
|
42
|
+
"lfx.components.helpers.current_date": "lfx.components.utilities.current_date",
|
|
43
|
+
"lfx.components.helpers.calculator_core": "lfx.components.utilities.calculator_core",
|
|
44
|
+
"lfx.components.helpers.id_generator": "lfx.components.utilities.id_generator",
|
|
45
|
+
"lfx.components.helpers.memory": "lfx.components.models_and_agents.memory",
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
for old_path, new_path in _redirected_submodules.items():
|
|
49
|
+
if old_path not in sys.modules:
|
|
50
|
+
# Use a lazy loader that imports the actual module when accessed
|
|
51
|
+
class _RedirectedModule:
|
|
52
|
+
def __init__(self, target_path: str, original_path: str):
|
|
53
|
+
self._target_path = target_path
|
|
54
|
+
self._original_path = original_path
|
|
55
|
+
self._module = None
|
|
56
|
+
|
|
57
|
+
def __getattr__(self, name: str) -> Any:
|
|
58
|
+
if self._module is None:
|
|
59
|
+
from importlib import import_module
|
|
60
|
+
|
|
61
|
+
self._module = import_module(self._target_path)
|
|
62
|
+
# Also register under the original path for future imports
|
|
63
|
+
sys.modules[self._original_path] = self._module
|
|
64
|
+
return getattr(self._module, name)
|
|
65
|
+
|
|
66
|
+
def __repr__(self) -> str:
|
|
67
|
+
return f"<redirected module '{self._original_path}' -> '{self._target_path}'>"
|
|
68
|
+
|
|
69
|
+
sys.modules[old_path] = _RedirectedModule(new_path, old_path)
|
|
70
|
+
|
|
36
71
|
|
|
37
72
|
def __getattr__(attr_name: str) -> Any:
|
|
38
73
|
"""Lazily import helper components on attribute access."""
|
|
74
|
+
# Handle submodule access for backwards compatibility
|
|
75
|
+
# e.g., lfx.components.helpers.id_generator -> lfx.components.utilities.id_generator
|
|
76
|
+
if attr_name == "id_generator":
|
|
77
|
+
from importlib import import_module
|
|
78
|
+
|
|
79
|
+
result = import_module("lfx.components.utilities.id_generator")
|
|
80
|
+
globals()[attr_name] = result
|
|
81
|
+
return result
|
|
82
|
+
if attr_name == "calculator_core":
|
|
83
|
+
from importlib import import_module
|
|
84
|
+
|
|
85
|
+
result = import_module("lfx.components.utilities.calculator_core")
|
|
86
|
+
globals()[attr_name] = result
|
|
87
|
+
return result
|
|
88
|
+
if attr_name == "current_date":
|
|
89
|
+
from importlib import import_module
|
|
90
|
+
|
|
91
|
+
result = import_module("lfx.components.utilities.current_date")
|
|
92
|
+
globals()[attr_name] = result
|
|
93
|
+
return result
|
|
94
|
+
if attr_name == "memory":
|
|
95
|
+
from importlib import import_module
|
|
96
|
+
|
|
97
|
+
result = import_module("lfx.components.models_and_agents.memory")
|
|
98
|
+
globals()[attr_name] = result
|
|
99
|
+
return result
|
|
100
|
+
|
|
39
101
|
if attr_name not in _dynamic_imports:
|
|
40
102
|
msg = f"module '{__name__}' has no attribute '{attr_name}'"
|
|
41
103
|
raise AttributeError(msg)
|
|
104
|
+
|
|
105
|
+
# CurrentDateComponent, CalculatorComponent, and IDGeneratorComponent were moved to utilities
|
|
106
|
+
# Forward them to utilities for backwards compatibility
|
|
107
|
+
if attr_name in ("CurrentDateComponent", "CalculatorComponent", "IDGeneratorComponent"):
|
|
108
|
+
from lfx.components import utilities
|
|
109
|
+
|
|
110
|
+
result = getattr(utilities, attr_name)
|
|
111
|
+
globals()[attr_name] = result
|
|
112
|
+
return result
|
|
113
|
+
|
|
114
|
+
# MemoryComponent was moved to models_and_agents
|
|
115
|
+
# Forward it to models_and_agents for backwards compatibility
|
|
116
|
+
if attr_name == "MemoryComponent":
|
|
117
|
+
from lfx.components import models_and_agents
|
|
118
|
+
|
|
119
|
+
result = getattr(models_and_agents, attr_name)
|
|
120
|
+
globals()[attr_name] = result
|
|
121
|
+
return result
|
|
122
|
+
|
|
123
|
+
# CreateListComponent, MessageStoreComponent, and OutputParserComponent were moved to processing
|
|
124
|
+
# Forward them to processing for backwards compatibility
|
|
125
|
+
if attr_name == "CreateListComponent":
|
|
126
|
+
from lfx.components import processing
|
|
127
|
+
|
|
128
|
+
result = getattr(processing, attr_name)
|
|
129
|
+
globals()[attr_name] = result
|
|
130
|
+
return result
|
|
131
|
+
if attr_name == "MessageStoreComponent":
|
|
132
|
+
from lfx.components import processing
|
|
133
|
+
|
|
134
|
+
result = processing.MessageStoreComponent
|
|
135
|
+
globals()[attr_name] = result
|
|
136
|
+
return result
|
|
137
|
+
if attr_name == "OutputParserComponent":
|
|
138
|
+
from lfx.components import processing
|
|
139
|
+
|
|
140
|
+
result = getattr(processing, attr_name)
|
|
141
|
+
globals()[attr_name] = result
|
|
142
|
+
return result
|
|
143
|
+
|
|
42
144
|
try:
|
|
43
145
|
result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent)
|
|
44
146
|
except (ModuleNotFoundError, ImportError, AttributeError) as e:
|
|
@@ -9,15 +9,17 @@ if TYPE_CHECKING:
|
|
|
9
9
|
from lfx.components.input_output.chat_output import ChatOutput
|
|
10
10
|
from lfx.components.input_output.text import TextInputComponent
|
|
11
11
|
from lfx.components.input_output.text_output import TextOutputComponent
|
|
12
|
+
from lfx.components.input_output.webhook import WebhookComponent
|
|
12
13
|
|
|
13
14
|
_dynamic_imports = {
|
|
14
15
|
"ChatInput": "chat",
|
|
15
16
|
"ChatOutput": "chat_output",
|
|
16
17
|
"TextInputComponent": "text",
|
|
17
18
|
"TextOutputComponent": "text_output",
|
|
19
|
+
"WebhookComponent": "webhook",
|
|
18
20
|
}
|
|
19
21
|
|
|
20
|
-
__all__ = ["ChatInput", "ChatOutput", "TextInputComponent", "TextOutputComponent"]
|
|
22
|
+
__all__ = ["ChatInput", "ChatOutput", "TextInputComponent", "TextOutputComponent", "WebhookComponent"]
|
|
21
23
|
|
|
22
24
|
|
|
23
25
|
def __getattr__(attr_name: str) -> Any:
|
|
@@ -19,7 +19,7 @@ from lfx.utils.constants import (
|
|
|
19
19
|
class ChatInput(ChatComponent):
|
|
20
20
|
display_name = "Chat Input"
|
|
21
21
|
description = "Get chat inputs from the Playground."
|
|
22
|
-
documentation: str = "https://docs.langflow.org/
|
|
22
|
+
documentation: str = "https://docs.langflow.org/chat-input-and-output"
|
|
23
23
|
icon = "MessagesSquare"
|
|
24
24
|
name = "ChatInput"
|
|
25
25
|
minimized = True
|
|
@@ -89,15 +89,16 @@ class ChatInput(ChatComponent):
|
|
|
89
89
|
# Filter out None/empty values
|
|
90
90
|
files = [f for f in files if f is not None and f != ""]
|
|
91
91
|
|
|
92
|
+
session_id = self.session_id or self.graph.session_id or ""
|
|
92
93
|
message = await Message.create(
|
|
93
94
|
text=self.input_value,
|
|
94
95
|
sender=self.sender,
|
|
95
96
|
sender_name=self.sender_name,
|
|
96
|
-
session_id=
|
|
97
|
+
session_id=session_id,
|
|
97
98
|
context_id=self.context_id,
|
|
98
99
|
files=files,
|
|
99
100
|
)
|
|
100
|
-
if
|
|
101
|
+
if session_id and isinstance(message, Message) and self.should_store_message:
|
|
101
102
|
stored_message = await self.send_message(
|
|
102
103
|
message,
|
|
103
104
|
)
|
|
@@ -22,7 +22,7 @@ from lfx.utils.constants import (
|
|
|
22
22
|
class ChatOutput(ChatComponent):
|
|
23
23
|
display_name = "Chat Output"
|
|
24
24
|
description = "Display a chat message in the Playground."
|
|
25
|
-
documentation: str = "https://docs.langflow.org/
|
|
25
|
+
documentation: str = "https://docs.langflow.org/chat-input-and-output"
|
|
26
26
|
icon = "MessagesSquare"
|
|
27
27
|
name = "ChatOutput"
|
|
28
28
|
minimized = True
|
|
@@ -117,7 +117,7 @@ class ChatOutput(ChatComponent):
|
|
|
117
117
|
source, _, display_name, source_id = self.get_properties_from_source_component()
|
|
118
118
|
|
|
119
119
|
# Create or use existing Message object
|
|
120
|
-
if isinstance(self.input_value, Message):
|
|
120
|
+
if isinstance(self.input_value, Message) and not self.is_connected_to_chat_input():
|
|
121
121
|
message = self.input_value
|
|
122
122
|
# Update message properties
|
|
123
123
|
message.text = text
|
|
@@ -127,13 +127,13 @@ class ChatOutput(ChatComponent):
|
|
|
127
127
|
# Set message properties
|
|
128
128
|
message.sender = self.sender
|
|
129
129
|
message.sender_name = self.sender_name
|
|
130
|
-
message.session_id = self.session_id
|
|
130
|
+
message.session_id = self.session_id or self.graph.session_id or ""
|
|
131
131
|
message.context_id = self.context_id
|
|
132
132
|
message.flow_id = self.graph.flow_id if hasattr(self, "graph") else None
|
|
133
133
|
message.properties.source = self._build_source(source_id, display_name, source)
|
|
134
134
|
|
|
135
135
|
# Store message if needed
|
|
136
|
-
if
|
|
136
|
+
if message.session_id and self.should_store_message:
|
|
137
137
|
stored_message = await self.send_message(message)
|
|
138
138
|
self.message.value = stored_message
|
|
139
139
|
message = stored_message
|
|
@@ -6,7 +6,7 @@ from lfx.schema.message import Message
|
|
|
6
6
|
class TextInputComponent(TextComponent):
|
|
7
7
|
display_name = "Text Input"
|
|
8
8
|
description = "Get user text inputs."
|
|
9
|
-
documentation: str = "https://docs.langflow.org/
|
|
9
|
+
documentation: str = "https://docs.langflow.org/text-input-and-output"
|
|
10
10
|
icon = "type"
|
|
11
11
|
name = "TextInput"
|
|
12
12
|
|
|
@@ -6,7 +6,7 @@ from lfx.schema.message import Message
|
|
|
6
6
|
class TextOutputComponent(TextComponent):
|
|
7
7
|
display_name = "Text Output"
|
|
8
8
|
description = "Sends text output via API."
|
|
9
|
-
documentation: str = "https://docs.langflow.org/
|
|
9
|
+
documentation: str = "https://docs.langflow.org/text-input-and-output"
|
|
10
10
|
icon = "type"
|
|
11
11
|
name = "TextOutput"
|
|
12
12
|
|
|
@@ -7,7 +7,7 @@ from lfx.schema.data import Data
|
|
|
7
7
|
|
|
8
8
|
class WebhookComponent(Component):
|
|
9
9
|
display_name = "Webhook"
|
|
10
|
-
documentation: str = "https://docs.langflow.org/
|
|
10
|
+
documentation: str = "https://docs.langflow.org/component-webhook"
|
|
11
11
|
name = "Webhook"
|
|
12
12
|
icon = "webhook"
|
|
13
13
|
|
|
@@ -1,12 +1,19 @@
|
|
|
1
|
+
"""Knowledge bases module - backwards compatibility alias for files_and_knowledge.
|
|
2
|
+
|
|
3
|
+
This module provides backwards compatibility by forwarding all imports
|
|
4
|
+
to files_and_knowledge where the actual knowledge base components are located.
|
|
5
|
+
"""
|
|
6
|
+
|
|
1
7
|
from __future__ import annotations
|
|
2
8
|
|
|
9
|
+
import sys
|
|
3
10
|
from typing import TYPE_CHECKING, Any
|
|
4
11
|
|
|
5
12
|
from lfx.components._importing import import_mod
|
|
6
13
|
|
|
7
14
|
if TYPE_CHECKING:
|
|
8
|
-
from lfx.components.
|
|
9
|
-
from lfx.components.
|
|
15
|
+
from lfx.components.files_and_knowledge.ingestion import KnowledgeIngestionComponent
|
|
16
|
+
from lfx.components.files_and_knowledge.retrieval import KnowledgeRetrievalComponent
|
|
10
17
|
|
|
11
18
|
_dynamic_imports = {
|
|
12
19
|
"KnowledgeIngestionComponent": "ingestion",
|
|
@@ -15,14 +22,61 @@ _dynamic_imports = {
|
|
|
15
22
|
|
|
16
23
|
__all__ = ["KnowledgeIngestionComponent", "KnowledgeRetrievalComponent"]
|
|
17
24
|
|
|
25
|
+
# Register redirected submodules in sys.modules for direct importlib.import_module() calls
|
|
26
|
+
# This allows imports like: import lfx.components.knowledge_bases.ingestion
|
|
27
|
+
_redirected_submodules = {
|
|
28
|
+
"lfx.components.knowledge_bases.ingestion": "lfx.components.files_and_knowledge.ingestion",
|
|
29
|
+
"lfx.components.knowledge_bases.retrieval": "lfx.components.files_and_knowledge.retrieval",
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
for old_path, new_path in _redirected_submodules.items():
|
|
33
|
+
if old_path not in sys.modules:
|
|
34
|
+
# Use a lazy loader that imports the actual module when accessed
|
|
35
|
+
class _RedirectedModule:
|
|
36
|
+
def __init__(self, target_path: str, original_path: str):
|
|
37
|
+
self._target_path = target_path
|
|
38
|
+
self._original_path = original_path
|
|
39
|
+
self._module = None
|
|
40
|
+
|
|
41
|
+
def __getattr__(self, name: str) -> Any:
|
|
42
|
+
if self._module is None:
|
|
43
|
+
from importlib import import_module
|
|
44
|
+
|
|
45
|
+
self._module = import_module(self._target_path)
|
|
46
|
+
# Also register under the original path for future imports
|
|
47
|
+
sys.modules[self._original_path] = self._module
|
|
48
|
+
return getattr(self._module, name)
|
|
49
|
+
|
|
50
|
+
def __repr__(self) -> str:
|
|
51
|
+
return f"<redirected module '{self._original_path}' -> '{self._target_path}'>"
|
|
52
|
+
|
|
53
|
+
sys.modules[old_path] = _RedirectedModule(new_path, old_path)
|
|
54
|
+
|
|
18
55
|
|
|
19
56
|
def __getattr__(attr_name: str) -> Any:
|
|
20
|
-
"""
|
|
57
|
+
"""Forward attribute access to files_and_knowledge components."""
|
|
58
|
+
# Handle submodule access for backwards compatibility
|
|
59
|
+
if attr_name == "ingestion":
|
|
60
|
+
from importlib import import_module
|
|
61
|
+
|
|
62
|
+
result = import_module("lfx.components.files_and_knowledge.ingestion")
|
|
63
|
+
globals()[attr_name] = result
|
|
64
|
+
return result
|
|
65
|
+
if attr_name == "retrieval":
|
|
66
|
+
from importlib import import_module
|
|
67
|
+
|
|
68
|
+
result = import_module("lfx.components.files_and_knowledge.retrieval")
|
|
69
|
+
globals()[attr_name] = result
|
|
70
|
+
return result
|
|
71
|
+
|
|
21
72
|
if attr_name not in _dynamic_imports:
|
|
22
73
|
msg = f"module '{__name__}' has no attribute '{attr_name}'"
|
|
23
74
|
raise AttributeError(msg)
|
|
75
|
+
|
|
76
|
+
# Import from files_and_knowledge using the correct package path
|
|
77
|
+
package = "lfx.components.files_and_knowledge"
|
|
24
78
|
try:
|
|
25
|
-
result = import_mod(attr_name, _dynamic_imports[attr_name],
|
|
79
|
+
result = import_mod(attr_name, _dynamic_imports[attr_name], package)
|
|
26
80
|
except (ModuleNotFoundError, ImportError, AttributeError) as e:
|
|
27
81
|
msg = f"Could not import '{attr_name}' from '{__name__}': {e}"
|
|
28
82
|
raise AttributeError(msg) from e
|
|
@@ -31,4 +85,5 @@ def __getattr__(attr_name: str) -> Any:
|
|
|
31
85
|
|
|
32
86
|
|
|
33
87
|
def __dir__() -> list[str]:
|
|
88
|
+
"""Return directory of available components."""
|
|
34
89
|
return list(__all__)
|
|
@@ -10,7 +10,7 @@ from lfx.utils.util import unescape_string
|
|
|
10
10
|
class CharacterTextSplitterComponent(LCTextSplitterComponent):
|
|
11
11
|
display_name = "Character Text Splitter"
|
|
12
12
|
description = "Split text by number of characters."
|
|
13
|
-
documentation = "https://docs.langflow.org/
|
|
13
|
+
documentation = "https://docs.langflow.org/bundles-langchain"
|
|
14
14
|
name = "CharacterTextSplitter"
|
|
15
15
|
icon = "LangChain"
|
|
16
16
|
|
|
@@ -1,6 +1,9 @@
|
|
|
1
|
-
|
|
1
|
+
import contextlib
|
|
2
|
+
import tempfile
|
|
3
|
+
from pathlib import Path
|
|
2
4
|
|
|
3
5
|
from lfx.base.agents.agent import LCAgentComponent
|
|
6
|
+
from lfx.base.data.storage_utils import read_file_bytes
|
|
4
7
|
from lfx.field_typing import AgentExecutor
|
|
5
8
|
from lfx.inputs.inputs import (
|
|
6
9
|
DictInput,
|
|
@@ -10,7 +13,9 @@ from lfx.inputs.inputs import (
|
|
|
10
13
|
MessageTextInput,
|
|
11
14
|
)
|
|
12
15
|
from lfx.schema.message import Message
|
|
16
|
+
from lfx.services.deps import get_settings_service
|
|
13
17
|
from lfx.template.field.base import Output
|
|
18
|
+
from lfx.utils.async_helpers import run_until_complete
|
|
14
19
|
|
|
15
20
|
|
|
16
21
|
class CSVAgentComponent(LCAgentComponent):
|
|
@@ -70,32 +75,60 @@ class CSVAgentComponent(LCAgentComponent):
|
|
|
70
75
|
return self.path
|
|
71
76
|
|
|
72
77
|
def build_agent_response(self) -> Message:
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
78
|
+
"""Build and execute the CSV agent, returning the response."""
|
|
79
|
+
try:
|
|
80
|
+
from langchain_experimental.agents.agent_toolkits.csv.base import create_csv_agent
|
|
81
|
+
except ImportError as e:
|
|
82
|
+
msg = (
|
|
83
|
+
"langchain-experimental is not installed. Please install it with `pip install langchain-experimental`."
|
|
84
|
+
)
|
|
85
|
+
raise ImportError(msg) from e
|
|
77
86
|
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
87
|
+
try:
|
|
88
|
+
agent_kwargs = {
|
|
89
|
+
"verbose": self.verbose,
|
|
90
|
+
"allow_dangerous_code": True,
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
# Get local path (downloads from S3 if needed)
|
|
94
|
+
local_path = self._get_local_path()
|
|
86
95
|
|
|
87
|
-
|
|
88
|
-
|
|
96
|
+
agent_csv = create_csv_agent(
|
|
97
|
+
llm=self.llm,
|
|
98
|
+
path=local_path,
|
|
99
|
+
agent_type=self.agent_type,
|
|
100
|
+
handle_parsing_errors=self.handle_parsing_errors,
|
|
101
|
+
pandas_kwargs=self.pandas_kwargs,
|
|
102
|
+
**agent_kwargs,
|
|
103
|
+
)
|
|
104
|
+
|
|
105
|
+
result = agent_csv.invoke({"input": self.input_value})
|
|
106
|
+
return Message(text=str(result["output"]))
|
|
107
|
+
|
|
108
|
+
finally:
|
|
109
|
+
# Clean up temp file if created
|
|
110
|
+
self._cleanup_temp_file()
|
|
89
111
|
|
|
90
112
|
def build_agent(self) -> AgentExecutor:
|
|
113
|
+
try:
|
|
114
|
+
from langchain_experimental.agents.agent_toolkits.csv.base import create_csv_agent
|
|
115
|
+
except ImportError as e:
|
|
116
|
+
msg = (
|
|
117
|
+
"langchain-experimental is not installed. Please install it with `pip install langchain-experimental`."
|
|
118
|
+
)
|
|
119
|
+
raise ImportError(msg) from e
|
|
120
|
+
|
|
91
121
|
agent_kwargs = {
|
|
92
122
|
"verbose": self.verbose,
|
|
93
123
|
"allow_dangerous_code": True,
|
|
94
124
|
}
|
|
95
125
|
|
|
126
|
+
# Get local path (downloads from S3 if needed)
|
|
127
|
+
local_path = self._get_local_path()
|
|
128
|
+
|
|
96
129
|
agent_csv = create_csv_agent(
|
|
97
130
|
llm=self.llm,
|
|
98
|
-
path=
|
|
131
|
+
path=local_path,
|
|
99
132
|
agent_type=self.agent_type,
|
|
100
133
|
handle_parsing_errors=self.handle_parsing_errors,
|
|
101
134
|
pandas_kwargs=self.pandas_kwargs,
|
|
@@ -104,4 +137,39 @@ class CSVAgentComponent(LCAgentComponent):
|
|
|
104
137
|
|
|
105
138
|
self.status = Message(text=str(agent_csv))
|
|
106
139
|
|
|
140
|
+
# Note: Temp file will be cleaned up when the component is destroyed or
|
|
141
|
+
# when build_agent_response is called
|
|
107
142
|
return agent_csv
|
|
143
|
+
|
|
144
|
+
def _get_local_path(self) -> str:
|
|
145
|
+
"""Get a local file path, downloading from S3 storage if necessary.
|
|
146
|
+
|
|
147
|
+
Returns:
|
|
148
|
+
str: Local file path that can be used by LangChain
|
|
149
|
+
"""
|
|
150
|
+
file_path = self._path()
|
|
151
|
+
settings = get_settings_service().settings
|
|
152
|
+
|
|
153
|
+
# If using S3 storage, download the file to temp
|
|
154
|
+
if settings.storage_type == "s3":
|
|
155
|
+
# Download from S3 to temp file
|
|
156
|
+
csv_bytes = run_until_complete(read_file_bytes(file_path))
|
|
157
|
+
|
|
158
|
+
# Create temp file with .csv extension
|
|
159
|
+
suffix = Path(file_path.split("/")[-1]).suffix or ".csv"
|
|
160
|
+
with tempfile.NamedTemporaryFile(mode="wb", suffix=suffix, delete=False) as tmp_file:
|
|
161
|
+
tmp_file.write(csv_bytes)
|
|
162
|
+
temp_path = tmp_file.name
|
|
163
|
+
|
|
164
|
+
# Store temp path for cleanup
|
|
165
|
+
self._temp_file_path = temp_path
|
|
166
|
+
return temp_path
|
|
167
|
+
|
|
168
|
+
# Local storage - return path as-is
|
|
169
|
+
return file_path
|
|
170
|
+
|
|
171
|
+
def _cleanup_temp_file(self) -> None:
|
|
172
|
+
"""Clean up temporary file if one was created."""
|
|
173
|
+
if hasattr(self, "_temp_file_path"):
|
|
174
|
+
with contextlib.suppress(Exception):
|
|
175
|
+
Path(self._temp_file_path).unlink() # Ignore cleanup errors
|
|
@@ -1,13 +1,15 @@
|
|
|
1
|
+
import contextlib
|
|
2
|
+
import tempfile
|
|
1
3
|
from pathlib import Path
|
|
2
4
|
|
|
3
5
|
import yaml
|
|
4
6
|
from langchain.agents import AgentExecutor
|
|
5
|
-
from langchain_community.agent_toolkits import create_json_agent
|
|
6
|
-
from langchain_community.agent_toolkits.json.toolkit import JsonToolkit
|
|
7
|
-
from langchain_community.tools.json.tool import JsonSpec
|
|
8
7
|
|
|
9
8
|
from lfx.base.agents.agent import LCAgentComponent
|
|
9
|
+
from lfx.base.data.storage_utils import read_file_bytes
|
|
10
10
|
from lfx.inputs.inputs import FileInput, HandleInput
|
|
11
|
+
from lfx.services.deps import get_settings_service
|
|
12
|
+
from lfx.utils.async_helpers import run_until_complete
|
|
11
13
|
|
|
12
14
|
|
|
13
15
|
class JsonAgentComponent(LCAgentComponent):
|
|
@@ -32,14 +34,67 @@ class JsonAgentComponent(LCAgentComponent):
|
|
|
32
34
|
),
|
|
33
35
|
]
|
|
34
36
|
|
|
37
|
+
def _get_local_path(self) -> Path:
|
|
38
|
+
"""Get a local file path, downloading from S3 storage if necessary.
|
|
39
|
+
|
|
40
|
+
Returns:
|
|
41
|
+
Path: Local file path that can be used by LangChain
|
|
42
|
+
"""
|
|
43
|
+
file_path = self.path
|
|
44
|
+
settings = get_settings_service().settings
|
|
45
|
+
|
|
46
|
+
# If using S3 storage, download the file to temp
|
|
47
|
+
if settings.storage_type == "s3":
|
|
48
|
+
# Download from S3 to temp file
|
|
49
|
+
file_bytes = run_until_complete(read_file_bytes(file_path))
|
|
50
|
+
|
|
51
|
+
# Create temp file with appropriate extension
|
|
52
|
+
suffix = Path(file_path.split("/")[-1]).suffix or ".json"
|
|
53
|
+
with tempfile.NamedTemporaryFile(mode="wb", suffix=suffix, delete=False) as tmp_file:
|
|
54
|
+
tmp_file.write(file_bytes)
|
|
55
|
+
temp_path = tmp_file.name
|
|
56
|
+
|
|
57
|
+
# Store temp path for cleanup
|
|
58
|
+
self._temp_file_path = temp_path
|
|
59
|
+
return Path(temp_path)
|
|
60
|
+
|
|
61
|
+
# Local storage - return as Path
|
|
62
|
+
return Path(file_path)
|
|
63
|
+
|
|
64
|
+
def _cleanup_temp_file(self) -> None:
|
|
65
|
+
"""Clean up temporary file if one was created."""
|
|
66
|
+
if hasattr(self, "_temp_file_path"):
|
|
67
|
+
with contextlib.suppress(Exception):
|
|
68
|
+
Path(self._temp_file_path).unlink() # Ignore cleanup errors
|
|
69
|
+
|
|
35
70
|
def build_agent(self) -> AgentExecutor:
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
71
|
+
"""Build the JSON agent executor."""
|
|
72
|
+
try:
|
|
73
|
+
from langchain_community.agent_toolkits import create_json_agent
|
|
74
|
+
from langchain_community.agent_toolkits.json.toolkit import JsonToolkit
|
|
75
|
+
from langchain_community.tools.json.tool import JsonSpec
|
|
76
|
+
except ImportError as e:
|
|
77
|
+
msg = "langchain-community is not installed. Please install it with `pip install langchain-community`."
|
|
78
|
+
raise ImportError(msg) from e
|
|
79
|
+
|
|
80
|
+
try:
|
|
81
|
+
# Get local path (downloads from S3 if needed)
|
|
82
|
+
path = self._get_local_path()
|
|
44
83
|
|
|
45
|
-
|
|
84
|
+
if path.suffix in {".yaml", ".yml"}:
|
|
85
|
+
with path.open(encoding="utf-8") as file:
|
|
86
|
+
yaml_dict = yaml.safe_load(file)
|
|
87
|
+
spec = JsonSpec(dict_=yaml_dict)
|
|
88
|
+
else:
|
|
89
|
+
spec = JsonSpec.from_file(str(path))
|
|
90
|
+
toolkit = JsonToolkit(spec=spec)
|
|
91
|
+
|
|
92
|
+
agent = create_json_agent(llm=self.llm, toolkit=toolkit, **self.get_agent_kwargs())
|
|
93
|
+
except Exception:
|
|
94
|
+
# Make sure to clean up temp file on error
|
|
95
|
+
self._cleanup_temp_file()
|
|
96
|
+
raise
|
|
97
|
+
else:
|
|
98
|
+
# Clean up temp file after agent is created
|
|
99
|
+
self._cleanup_temp_file()
|
|
100
|
+
return agent
|