lfx-nightly 0.1.13.dev0__py3-none-any.whl → 0.2.0.dev26__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- lfx/_assets/component_index.json +1 -1
- lfx/base/agents/agent.py +121 -29
- lfx/base/agents/altk_base_agent.py +380 -0
- lfx/base/agents/altk_tool_wrappers.py +565 -0
- lfx/base/agents/events.py +103 -35
- lfx/base/agents/utils.py +15 -2
- lfx/base/composio/composio_base.py +183 -233
- lfx/base/data/base_file.py +88 -21
- lfx/base/data/storage_utils.py +192 -0
- lfx/base/data/utils.py +178 -14
- lfx/base/datastax/__init__.py +5 -0
- lfx/{components/vectorstores/astradb.py → base/datastax/astradb_base.py} +84 -473
- lfx/base/embeddings/embeddings_class.py +113 -0
- lfx/base/io/chat.py +5 -4
- lfx/base/mcp/util.py +101 -15
- lfx/base/models/groq_constants.py +74 -58
- lfx/base/models/groq_model_discovery.py +265 -0
- lfx/base/models/model.py +1 -1
- lfx/base/models/model_input_constants.py +74 -7
- lfx/base/models/model_utils.py +100 -0
- lfx/base/models/ollama_constants.py +3 -0
- lfx/base/models/openai_constants.py +7 -0
- lfx/base/models/watsonx_constants.py +36 -0
- lfx/base/tools/run_flow.py +601 -129
- lfx/cli/commands.py +7 -4
- lfx/cli/common.py +2 -2
- lfx/cli/run.py +1 -1
- lfx/cli/script_loader.py +53 -11
- lfx/components/Notion/create_page.py +1 -1
- lfx/components/Notion/list_database_properties.py +1 -1
- lfx/components/Notion/list_pages.py +1 -1
- lfx/components/Notion/list_users.py +1 -1
- lfx/components/Notion/page_content_viewer.py +1 -1
- lfx/components/Notion/search.py +1 -1
- lfx/components/Notion/update_page_property.py +1 -1
- lfx/components/__init__.py +19 -5
- lfx/components/altk/__init__.py +34 -0
- lfx/components/altk/altk_agent.py +193 -0
- lfx/components/amazon/amazon_bedrock_converse.py +1 -1
- lfx/components/apify/apify_actor.py +4 -4
- lfx/components/composio/__init__.py +70 -18
- lfx/components/composio/apollo_composio.py +11 -0
- lfx/components/composio/bitbucket_composio.py +11 -0
- lfx/components/composio/canva_composio.py +11 -0
- lfx/components/composio/coda_composio.py +11 -0
- lfx/components/composio/composio_api.py +10 -0
- lfx/components/composio/discord_composio.py +1 -1
- lfx/components/composio/elevenlabs_composio.py +11 -0
- lfx/components/composio/exa_composio.py +11 -0
- lfx/components/composio/firecrawl_composio.py +11 -0
- lfx/components/composio/fireflies_composio.py +11 -0
- lfx/components/composio/gmail_composio.py +1 -1
- lfx/components/composio/googlebigquery_composio.py +11 -0
- lfx/components/composio/googlecalendar_composio.py +1 -1
- lfx/components/composio/googledocs_composio.py +1 -1
- lfx/components/composio/googlemeet_composio.py +1 -1
- lfx/components/composio/googlesheets_composio.py +1 -1
- lfx/components/composio/googletasks_composio.py +1 -1
- lfx/components/composio/heygen_composio.py +11 -0
- lfx/components/composio/mem0_composio.py +11 -0
- lfx/components/composio/peopledatalabs_composio.py +11 -0
- lfx/components/composio/perplexityai_composio.py +11 -0
- lfx/components/composio/serpapi_composio.py +11 -0
- lfx/components/composio/slack_composio.py +3 -574
- lfx/components/composio/slackbot_composio.py +1 -1
- lfx/components/composio/snowflake_composio.py +11 -0
- lfx/components/composio/tavily_composio.py +11 -0
- lfx/components/composio/youtube_composio.py +2 -2
- lfx/components/{agents → cuga}/__init__.py +5 -7
- lfx/components/cuga/cuga_agent.py +730 -0
- lfx/components/data/__init__.py +78 -28
- lfx/components/data_source/__init__.py +58 -0
- lfx/components/{data → data_source}/api_request.py +26 -3
- lfx/components/{data → data_source}/csv_to_data.py +15 -10
- lfx/components/{data → data_source}/json_to_data.py +15 -8
- lfx/components/{data → data_source}/news_search.py +1 -1
- lfx/components/{data → data_source}/rss.py +1 -1
- lfx/components/{data → data_source}/sql_executor.py +1 -1
- lfx/components/{data → data_source}/url.py +1 -1
- lfx/components/{data → data_source}/web_search.py +1 -1
- lfx/components/datastax/__init__.py +12 -6
- lfx/components/datastax/{astra_assistant_manager.py → astradb_assistant_manager.py} +1 -0
- lfx/components/datastax/astradb_chatmemory.py +40 -0
- lfx/components/datastax/astradb_cql.py +6 -32
- lfx/components/datastax/astradb_graph.py +10 -124
- lfx/components/datastax/astradb_tool.py +13 -53
- lfx/components/datastax/astradb_vectorstore.py +134 -977
- lfx/components/datastax/create_assistant.py +1 -0
- lfx/components/datastax/create_thread.py +1 -0
- lfx/components/datastax/dotenv.py +1 -0
- lfx/components/datastax/get_assistant.py +1 -0
- lfx/components/datastax/getenvvar.py +1 -0
- lfx/components/datastax/graph_rag.py +1 -1
- lfx/components/datastax/hcd.py +1 -1
- lfx/components/datastax/list_assistants.py +1 -0
- lfx/components/datastax/run.py +1 -0
- lfx/components/deactivated/json_document_builder.py +1 -1
- lfx/components/elastic/elasticsearch.py +1 -1
- lfx/components/elastic/opensearch_multimodal.py +1575 -0
- lfx/components/files_and_knowledge/__init__.py +47 -0
- lfx/components/{data → files_and_knowledge}/directory.py +1 -1
- lfx/components/{data → files_and_knowledge}/file.py +246 -18
- lfx/components/{knowledge_bases → files_and_knowledge}/ingestion.py +17 -9
- lfx/components/{knowledge_bases → files_and_knowledge}/retrieval.py +18 -10
- lfx/components/{data → files_and_knowledge}/save_file.py +142 -22
- lfx/components/flow_controls/__init__.py +58 -0
- lfx/components/{logic → flow_controls}/conditional_router.py +1 -1
- lfx/components/{logic → flow_controls}/loop.py +47 -9
- lfx/components/flow_controls/run_flow.py +108 -0
- lfx/components/glean/glean_search_api.py +1 -1
- lfx/components/groq/groq.py +35 -28
- lfx/components/helpers/__init__.py +102 -0
- lfx/components/ibm/watsonx.py +25 -21
- lfx/components/input_output/__init__.py +3 -1
- lfx/components/input_output/chat.py +12 -3
- lfx/components/input_output/chat_output.py +12 -4
- lfx/components/input_output/text.py +1 -1
- lfx/components/input_output/text_output.py +1 -1
- lfx/components/{data → input_output}/webhook.py +1 -1
- lfx/components/knowledge_bases/__init__.py +59 -4
- lfx/components/langchain_utilities/character.py +1 -1
- lfx/components/langchain_utilities/csv_agent.py +84 -16
- lfx/components/langchain_utilities/json_agent.py +67 -12
- lfx/components/langchain_utilities/language_recursive.py +1 -1
- lfx/components/llm_operations/__init__.py +46 -0
- lfx/components/{processing → llm_operations}/batch_run.py +1 -1
- lfx/components/{processing → llm_operations}/lambda_filter.py +1 -1
- lfx/components/{logic → llm_operations}/llm_conditional_router.py +1 -1
- lfx/components/{processing/llm_router.py → llm_operations/llm_selector.py} +3 -3
- lfx/components/{processing → llm_operations}/structured_output.py +56 -18
- lfx/components/logic/__init__.py +126 -0
- lfx/components/mem0/mem0_chat_memory.py +11 -0
- lfx/components/mistral/mistral_embeddings.py +1 -1
- lfx/components/models/__init__.py +64 -9
- lfx/components/models_and_agents/__init__.py +49 -0
- lfx/components/{agents → models_and_agents}/agent.py +49 -6
- lfx/components/models_and_agents/embedding_model.py +423 -0
- lfx/components/models_and_agents/language_model.py +398 -0
- lfx/components/{agents → models_and_agents}/mcp_component.py +84 -45
- lfx/components/{helpers → models_and_agents}/memory.py +1 -1
- lfx/components/nvidia/system_assist.py +1 -1
- lfx/components/olivya/olivya.py +1 -1
- lfx/components/ollama/ollama.py +235 -14
- lfx/components/openrouter/openrouter.py +49 -147
- lfx/components/processing/__init__.py +9 -57
- lfx/components/processing/converter.py +1 -1
- lfx/components/processing/dataframe_operations.py +1 -1
- lfx/components/processing/parse_json_data.py +2 -2
- lfx/components/processing/parser.py +7 -2
- lfx/components/processing/split_text.py +1 -1
- lfx/components/qdrant/qdrant.py +1 -1
- lfx/components/redis/redis.py +1 -1
- lfx/components/twelvelabs/split_video.py +10 -0
- lfx/components/twelvelabs/video_file.py +12 -0
- lfx/components/utilities/__init__.py +43 -0
- lfx/components/{helpers → utilities}/calculator_core.py +1 -1
- lfx/components/{helpers → utilities}/current_date.py +1 -1
- lfx/components/{processing → utilities}/python_repl_core.py +1 -1
- lfx/components/vectorstores/__init__.py +0 -6
- lfx/components/vectorstores/local_db.py +9 -0
- lfx/components/youtube/youtube_transcripts.py +118 -30
- lfx/custom/custom_component/component.py +60 -3
- lfx/custom/custom_component/custom_component.py +68 -6
- lfx/field_typing/constants.py +1 -0
- lfx/graph/edge/base.py +45 -22
- lfx/graph/graph/base.py +5 -2
- lfx/graph/graph/schema.py +3 -2
- lfx/graph/state/model.py +15 -2
- lfx/graph/utils.py +6 -0
- lfx/graph/vertex/base.py +4 -1
- lfx/graph/vertex/param_handler.py +10 -7
- lfx/graph/vertex/vertex_types.py +1 -1
- lfx/helpers/__init__.py +12 -0
- lfx/helpers/flow.py +117 -0
- lfx/inputs/input_mixin.py +24 -1
- lfx/inputs/inputs.py +13 -1
- lfx/interface/components.py +161 -83
- lfx/io/schema.py +6 -0
- lfx/log/logger.py +5 -3
- lfx/schema/schema.py +5 -0
- lfx/services/database/__init__.py +5 -0
- lfx/services/database/service.py +25 -0
- lfx/services/deps.py +87 -22
- lfx/services/manager.py +19 -6
- lfx/services/mcp_composer/service.py +998 -157
- lfx/services/session.py +5 -0
- lfx/services/settings/base.py +51 -7
- lfx/services/settings/constants.py +8 -0
- lfx/services/storage/local.py +76 -46
- lfx/services/storage/service.py +152 -29
- lfx/template/field/base.py +3 -0
- lfx/utils/ssrf_protection.py +384 -0
- lfx/utils/validate_cloud.py +26 -0
- {lfx_nightly-0.1.13.dev0.dist-info → lfx_nightly-0.2.0.dev26.dist-info}/METADATA +38 -22
- {lfx_nightly-0.1.13.dev0.dist-info → lfx_nightly-0.2.0.dev26.dist-info}/RECORD +210 -196
- {lfx_nightly-0.1.13.dev0.dist-info → lfx_nightly-0.2.0.dev26.dist-info}/WHEEL +1 -1
- lfx/components/agents/cuga_agent.py +0 -1013
- lfx/components/datastax/astra_db.py +0 -77
- lfx/components/datastax/cassandra.py +0 -92
- lfx/components/logic/run_flow.py +0 -71
- lfx/components/models/embedding_model.py +0 -114
- lfx/components/models/language_model.py +0 -144
- lfx/components/vectorstores/astradb_graph.py +0 -326
- lfx/components/vectorstores/cassandra.py +0 -264
- lfx/components/vectorstores/cassandra_graph.py +0 -238
- lfx/components/vectorstores/chroma.py +0 -167
- lfx/components/vectorstores/clickhouse.py +0 -135
- lfx/components/vectorstores/couchbase.py +0 -102
- lfx/components/vectorstores/elasticsearch.py +0 -267
- lfx/components/vectorstores/faiss.py +0 -111
- lfx/components/vectorstores/graph_rag.py +0 -141
- lfx/components/vectorstores/hcd.py +0 -314
- lfx/components/vectorstores/milvus.py +0 -115
- lfx/components/vectorstores/mongodb_atlas.py +0 -213
- lfx/components/vectorstores/opensearch.py +0 -243
- lfx/components/vectorstores/pgvector.py +0 -72
- lfx/components/vectorstores/pinecone.py +0 -134
- lfx/components/vectorstores/qdrant.py +0 -109
- lfx/components/vectorstores/supabase.py +0 -76
- lfx/components/vectorstores/upstash.py +0 -124
- lfx/components/vectorstores/vectara.py +0 -97
- lfx/components/vectorstores/vectara_rag.py +0 -164
- lfx/components/vectorstores/weaviate.py +0 -89
- /lfx/components/{data → data_source}/mock_data.py +0 -0
- /lfx/components/datastax/{astra_vectorize.py → astradb_vectorize.py} +0 -0
- /lfx/components/{logic → flow_controls}/data_conditional_router.py +0 -0
- /lfx/components/{logic → flow_controls}/flow_tool.py +0 -0
- /lfx/components/{logic → flow_controls}/listen.py +0 -0
- /lfx/components/{logic → flow_controls}/notify.py +0 -0
- /lfx/components/{logic → flow_controls}/pass_message.py +0 -0
- /lfx/components/{logic → flow_controls}/sub_flow.py +0 -0
- /lfx/components/{processing → models_and_agents}/prompt.py +0 -0
- /lfx/components/{helpers → processing}/create_list.py +0 -0
- /lfx/components/{helpers → processing}/output_parser.py +0 -0
- /lfx/components/{helpers → processing}/store_message.py +0 -0
- /lfx/components/{helpers → utilities}/id_generator.py +0 -0
- {lfx_nightly-0.1.13.dev0.dist-info → lfx_nightly-0.2.0.dev26.dist-info}/entry_points.txt +0 -0
lfx/base/tools/run_flow.py
CHANGED
|
@@ -1,27 +1,59 @@
|
|
|
1
|
-
from
|
|
2
|
-
from
|
|
1
|
+
from collections import Counter
|
|
2
|
+
from datetime import datetime
|
|
3
|
+
from types import MethodType # near the imports
|
|
4
|
+
from typing import TYPE_CHECKING, Any
|
|
3
5
|
|
|
6
|
+
from langflow.helpers.flow import get_flow_by_id_or_name
|
|
7
|
+
from langflow.processing.process import process_tweaks_on_graph
|
|
8
|
+
|
|
9
|
+
from lfx.base.tools.constants import TOOL_OUTPUT_NAME
|
|
4
10
|
from lfx.custom.custom_component.component import Component, get_component_toolkit
|
|
5
11
|
from lfx.field_typing import Tool
|
|
6
12
|
from lfx.graph.graph.base import Graph
|
|
7
13
|
from lfx.graph.vertex.base import Vertex
|
|
8
|
-
|
|
9
|
-
|
|
14
|
+
|
|
15
|
+
# TODO: switch to lfx
|
|
16
|
+
from lfx.helpers import get_flow_inputs, run_flow
|
|
17
|
+
from lfx.inputs.inputs import BoolInput, DropdownInput, InputTypes, MessageTextInput, StrInput
|
|
10
18
|
from lfx.log.logger import logger
|
|
11
19
|
from lfx.schema.data import Data
|
|
12
|
-
from lfx.schema.dataframe import DataFrame
|
|
13
20
|
from lfx.schema.dotdict import dotdict
|
|
14
|
-
from lfx.
|
|
21
|
+
from lfx.services.cache.utils import CacheMiss
|
|
22
|
+
from lfx.services.deps import get_shared_component_cache_service
|
|
15
23
|
from lfx.template.field.base import Output
|
|
16
24
|
|
|
17
25
|
if TYPE_CHECKING:
|
|
26
|
+
from collections.abc import Callable
|
|
27
|
+
|
|
18
28
|
from lfx.base.tools.component_tool import ComponentToolkit
|
|
19
29
|
|
|
20
30
|
|
|
21
31
|
class RunFlowBaseComponent(Component):
|
|
22
32
|
def __init__(self, *args, **kwargs):
|
|
33
|
+
self._flow_output_methods: set[str] = set()
|
|
23
34
|
super().__init__(*args, **kwargs)
|
|
24
35
|
self.add_tool_output = True
|
|
36
|
+
################################################################
|
|
37
|
+
# cache the selected flow's graph in the shared component cache
|
|
38
|
+
# if cache_flow is enabled.
|
|
39
|
+
################################################################
|
|
40
|
+
self._shared_component_cache = get_shared_component_cache_service()
|
|
41
|
+
# add all the flow cache related methods to the dispatcher.
|
|
42
|
+
# these are used internally among the cache related methods.
|
|
43
|
+
# the _flow_cache_call method is meant to be user-facing
|
|
44
|
+
# for cache operations as it handles validation.
|
|
45
|
+
self._cache_flow_dispatcher: dict[str, Callable[..., Any]] = {
|
|
46
|
+
"get": self._get_cached_flow,
|
|
47
|
+
"set": self._set_cached_flow,
|
|
48
|
+
"delete": self._delete_cached_flow,
|
|
49
|
+
"_build_key": self._build_flow_cache_key,
|
|
50
|
+
"_build_graph": self._build_graph_from_dict,
|
|
51
|
+
}
|
|
52
|
+
# save the run's outputs to avoid re-executing
|
|
53
|
+
# the flow if it has multiple outputs.
|
|
54
|
+
self._last_run_outputs: list[Data] | None = None
|
|
55
|
+
# save the updated_at of the user's selected flow
|
|
56
|
+
self._cached_flow_updated_at: str | None = None
|
|
25
57
|
|
|
26
58
|
_base_inputs: list[InputTypes] = [
|
|
27
59
|
DropdownInput(
|
|
@@ -29,120 +61,119 @@ class RunFlowBaseComponent(Component):
|
|
|
29
61
|
display_name="Flow Name",
|
|
30
62
|
info="The name of the flow to run.",
|
|
31
63
|
options=[],
|
|
64
|
+
options_metadata=[],
|
|
32
65
|
real_time_refresh=True,
|
|
66
|
+
refresh_button=True,
|
|
33
67
|
value=None,
|
|
34
68
|
),
|
|
35
|
-
|
|
69
|
+
StrInput(
|
|
70
|
+
name="flow_id_selected",
|
|
71
|
+
display_name="Flow ID",
|
|
72
|
+
info="The ID of the flow to run.",
|
|
73
|
+
value=None,
|
|
74
|
+
show=False,
|
|
75
|
+
override_skip=True, # persist to runtime
|
|
76
|
+
),
|
|
77
|
+
MessageTextInput(
|
|
36
78
|
name="session_id",
|
|
37
79
|
display_name="Session ID",
|
|
38
80
|
info="The session ID to run the flow in.",
|
|
39
81
|
advanced=True,
|
|
40
82
|
),
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
Output(
|
|
52
|
-
name="flow_outputs_dataframe",
|
|
53
|
-
display_name="Flow Dataframe Output",
|
|
54
|
-
method="dataframe_output",
|
|
55
|
-
hidden=True,
|
|
56
|
-
group_outputs=True,
|
|
57
|
-
tool_mode=False, # This output is not intended to be used as a tool, so tool_mode is disabled.
|
|
58
|
-
),
|
|
59
|
-
Output(
|
|
60
|
-
name="flow_outputs_message",
|
|
61
|
-
group_outputs=True,
|
|
62
|
-
display_name="Flow Message Output",
|
|
63
|
-
method="message_output",
|
|
83
|
+
# bool dropdown to select if the flow should be cached
|
|
84
|
+
# Note: the user's selected flow is automatically updated when
|
|
85
|
+
# when the flow_name_selected dropdown is refreshed.
|
|
86
|
+
# TODO: find a more explicit way to update the cached flow.
|
|
87
|
+
BoolInput(
|
|
88
|
+
name="cache_flow",
|
|
89
|
+
display_name="Cache Flow",
|
|
90
|
+
info="Whether to cache the selected flow.",
|
|
91
|
+
value=False,
|
|
92
|
+
advanced=True,
|
|
64
93
|
),
|
|
65
94
|
]
|
|
66
|
-
|
|
95
|
+
_base_outputs: list[Output] = []
|
|
96
|
+
default_keys = ["code", "_type", "flow_name_selected", "flow_id_selected", "session_id", "cache_flow"]
|
|
67
97
|
FLOW_INPUTS: list[dotdict] = []
|
|
68
98
|
flow_tweak_data: dict = {}
|
|
99
|
+
IOPUT_SEP = "~" # separator for joining a vertex id and input/output name to form a unique input/output name
|
|
69
100
|
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
run_outputs = await self.run_flow_with_tweaks()
|
|
77
|
-
first_output = run_outputs[0]
|
|
78
|
-
|
|
79
|
-
if isinstance(first_output, Data):
|
|
80
|
-
return first_output
|
|
101
|
+
################################################################
|
|
102
|
+
# set and register the selected flow's output methods
|
|
103
|
+
################################################################
|
|
104
|
+
def map_outputs(self) -> None: # Note: overrides the base map_outputs method
|
|
105
|
+
super().map_outputs()
|
|
106
|
+
self._ensure_flow_output_methods()
|
|
81
107
|
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
108
|
+
def _ensure_flow_output_methods(self) -> None:
|
|
109
|
+
self._clear_dynamic_flow_output_methods()
|
|
110
|
+
for output in self._outputs_map.values():
|
|
111
|
+
if not output or not output.name or output.name == TOOL_OUTPUT_NAME or self.IOPUT_SEP not in output.name:
|
|
112
|
+
continue
|
|
113
|
+
vertex_id, output_name = output.name.split(self.IOPUT_SEP, 1)
|
|
114
|
+
output.method = self._register_flow_output_method(
|
|
115
|
+
vertex_id=vertex_id,
|
|
116
|
+
output_name=output_name,
|
|
117
|
+
)
|
|
86
118
|
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
119
|
+
################################################################
|
|
120
|
+
# Flow retrieval
|
|
121
|
+
################################################################
|
|
122
|
+
async def get_flow(self, flow_name_selected: str | None = None, flow_id_selected: str | None = None) -> Data:
|
|
123
|
+
"""Get a flow's data by name or id."""
|
|
124
|
+
flow = await get_flow_by_id_or_name(
|
|
125
|
+
user_id=self.user_id,
|
|
126
|
+
flow_id=flow_id_selected,
|
|
127
|
+
flow_name=flow_name_selected,
|
|
128
|
+
)
|
|
129
|
+
return flow or Data(data={})
|
|
91
130
|
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
if isinstance(message_result, str):
|
|
107
|
-
return Message(text=message_result)
|
|
108
|
-
return Message(text=message_result.data["text"])
|
|
109
|
-
|
|
110
|
-
async def get_flow_names(self) -> list[str]:
|
|
111
|
-
# TODO: get flfow ID with flow name
|
|
112
|
-
flow_data = await self.alist_flows()
|
|
113
|
-
return [flow_data.data["name"] for flow_data in flow_data]
|
|
114
|
-
|
|
115
|
-
async def get_flow(self, flow_name_selected: str) -> Data | None:
|
|
116
|
-
# get flow from flow id
|
|
117
|
-
flow_datas = await self.alist_flows()
|
|
118
|
-
for flow_data in flow_datas:
|
|
119
|
-
if flow_data.data["name"] == flow_name_selected:
|
|
120
|
-
return flow_data
|
|
121
|
-
return None
|
|
131
|
+
async def get_graph(
|
|
132
|
+
self,
|
|
133
|
+
flow_name_selected: str | None = None,
|
|
134
|
+
flow_id_selected: str | None = None,
|
|
135
|
+
updated_at: str | None = None,
|
|
136
|
+
) -> Graph | None:
|
|
137
|
+
"""Get a flow's graph by name or id."""
|
|
138
|
+
if not (flow_name_selected or flow_id_selected):
|
|
139
|
+
msg = "Flow name or id is required"
|
|
140
|
+
raise ValueError(msg)
|
|
141
|
+
if flow_id_selected and (flow := self._flow_cache_call("get", flow_id=flow_id_selected)):
|
|
142
|
+
if self._is_cached_flow_up_to_date(flow, updated_at):
|
|
143
|
+
return flow
|
|
144
|
+
self._flow_cache_call("delete", flow_id=flow_id_selected) # stale, delete it
|
|
122
145
|
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
if flow_data:
|
|
127
|
-
return Graph.from_payload(flow_data.data["data"])
|
|
146
|
+
# TODO: use flow id only
|
|
147
|
+
flow = await self.get_flow(flow_name_selected=flow_name_selected, flow_id_selected=flow_id_selected)
|
|
148
|
+
if not flow:
|
|
128
149
|
msg = "Flow not found"
|
|
129
150
|
raise ValueError(msg)
|
|
130
|
-
# Ensure a Graph is always returned or an exception is raised
|
|
131
|
-
msg = "No valid flow JSON or flow name selected."
|
|
132
|
-
raise ValueError(msg)
|
|
133
151
|
|
|
152
|
+
graph = Graph.from_payload(
|
|
153
|
+
payload=flow.data.get("data", {}),
|
|
154
|
+
flow_id=flow_id_selected,
|
|
155
|
+
flow_name=flow_name_selected,
|
|
156
|
+
)
|
|
157
|
+
graph.description = flow.data.get("description", None)
|
|
158
|
+
graph.updated_at = flow.data.get("updated_at", None)
|
|
159
|
+
|
|
160
|
+
self._flow_cache_call("set", flow=graph)
|
|
161
|
+
|
|
162
|
+
return graph
|
|
163
|
+
|
|
164
|
+
################################################################
|
|
165
|
+
# Flow inputs/config
|
|
166
|
+
################################################################
|
|
134
167
|
def get_new_fields_from_graph(self, graph: Graph) -> list[dotdict]:
|
|
135
168
|
inputs = get_flow_inputs(graph)
|
|
136
169
|
return self.get_new_fields(inputs)
|
|
137
170
|
|
|
138
171
|
def update_build_config_from_graph(self, build_config: dotdict, graph: Graph):
|
|
139
172
|
try:
|
|
140
|
-
# Get all inputs from the graph
|
|
141
173
|
new_fields = self.get_new_fields_from_graph(graph)
|
|
142
|
-
|
|
143
|
-
self.delete_fields(build_config,
|
|
144
|
-
build_config
|
|
145
|
-
|
|
174
|
+
keep_fields: set[str] = set([new_field["name"] for new_field in new_fields] + self.default_keys)
|
|
175
|
+
self.delete_fields(build_config, [key for key in build_config if key not in keep_fields])
|
|
176
|
+
build_config.update((field["name"], field) for field in new_fields)
|
|
146
177
|
except Exception as e:
|
|
147
178
|
msg = "Error updating build config from graph"
|
|
148
179
|
logger.exception(msg)
|
|
@@ -150,24 +181,34 @@ class RunFlowBaseComponent(Component):
|
|
|
150
181
|
|
|
151
182
|
def get_new_fields(self, inputs_vertex: list[Vertex]) -> list[dotdict]:
|
|
152
183
|
new_fields: list[dotdict] = []
|
|
184
|
+
vdisp_cts = Counter(v.display_name for v in inputs_vertex)
|
|
153
185
|
|
|
154
186
|
for vertex in inputs_vertex:
|
|
155
187
|
field_template = vertex.data.get("node", {}).get("template", {})
|
|
156
188
|
field_order = vertex.data.get("node", {}).get("field_order", [])
|
|
157
|
-
if field_order and field_template:
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
189
|
+
if not (field_order and field_template):
|
|
190
|
+
continue
|
|
191
|
+
new_vertex_inputs = [
|
|
192
|
+
dotdict(
|
|
193
|
+
{
|
|
194
|
+
**field_template[input_name],
|
|
195
|
+
"name": self._get_ioput_name(vertex.id, input_name),
|
|
196
|
+
"display_name": (
|
|
197
|
+
f"{field_template[input_name]['display_name']} ({vertex.display_name})"
|
|
198
|
+
if vdisp_cts[vertex.display_name] == 1
|
|
199
|
+
else (
|
|
200
|
+
f"{field_template[input_name]['display_name']}"
|
|
201
|
+
f"({vertex.display_name}-{vertex.id.split('-')[-1]})"
|
|
202
|
+
)
|
|
203
|
+
),
|
|
204
|
+
# TODO: make this more robust?
|
|
205
|
+
"tool_mode": not (field_template[input_name].get("advanced", False)),
|
|
206
|
+
}
|
|
207
|
+
)
|
|
208
|
+
for input_name in field_order
|
|
209
|
+
if input_name in field_template
|
|
210
|
+
]
|
|
211
|
+
new_fields += new_vertex_inputs
|
|
171
212
|
return new_fields
|
|
172
213
|
|
|
173
214
|
def add_new_fields(self, build_config: dotdict, new_fields: list[dotdict]) -> dotdict:
|
|
@@ -177,44 +218,67 @@ class RunFlowBaseComponent(Component):
|
|
|
177
218
|
return build_config
|
|
178
219
|
|
|
179
220
|
def delete_fields(self, build_config: dotdict, fields: dict | list[str]) -> None:
|
|
180
|
-
"""Delete specified fields from build_config.
|
|
221
|
+
"""Delete specified fields from build_config.
|
|
222
|
+
|
|
223
|
+
Args:
|
|
224
|
+
build_config: The build_config to delete the fields from.
|
|
225
|
+
fields: The fields to delete from the build_config.
|
|
226
|
+
"""
|
|
181
227
|
if isinstance(fields, dict):
|
|
182
228
|
fields = list(fields.keys())
|
|
183
229
|
for field in fields:
|
|
184
230
|
build_config.pop(field, None)
|
|
185
231
|
|
|
186
|
-
def
|
|
187
|
-
"""
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
232
|
+
async def get_required_data(self) -> tuple[str, list[dotdict]] | None:
|
|
233
|
+
"""Retrieve flow description and tool-mode input fields for the selected flow.
|
|
234
|
+
|
|
235
|
+
Fetches the graph for the given flow, extracts its input fields, and filters
|
|
236
|
+
for only those inputs that are eligible for tool mode (non-advanced fields).
|
|
237
|
+
|
|
238
|
+
Args:
|
|
239
|
+
flow_name_selected: The name of the flow to retrieve data for. If None,
|
|
240
|
+
returns None.
|
|
241
|
+
|
|
242
|
+
Returns:
|
|
243
|
+
A tuple of (flow_description, tool_mode_fields) where:
|
|
244
|
+
- flow_description (str): The human-readable description of the flow
|
|
245
|
+
- tool_mode_fields (list[dotdict]): Input fields marked for tool mode
|
|
246
|
+
Returns None if the flow cannot be found or loaded.
|
|
247
|
+
"""
|
|
248
|
+
graph = await self.get_graph(self.flow_name_selected, self.flow_id_selected, self._cached_flow_updated_at)
|
|
249
|
+
formatted_outputs = self._format_flow_outputs(graph)
|
|
250
|
+
self._sync_flow_outputs(formatted_outputs)
|
|
251
|
+
new_fields = self.get_new_fields_from_graph(graph)
|
|
252
|
+
new_fields = self.update_input_types(new_fields)
|
|
253
|
+
|
|
254
|
+
return (graph.description, [field for field in new_fields if field.get("tool_mode") is True])
|
|
204
255
|
|
|
205
256
|
def update_input_types(self, fields: list[dotdict]) -> list[dotdict]:
|
|
257
|
+
"""Update the input_types of the fields.
|
|
258
|
+
|
|
259
|
+
If a field's input_types is None, it will be set to an empty list.
|
|
260
|
+
|
|
261
|
+
Args:
|
|
262
|
+
fields: The fields to update the input_types for.
|
|
263
|
+
|
|
264
|
+
Returns:
|
|
265
|
+
The updated fields.
|
|
266
|
+
"""
|
|
206
267
|
for field in fields:
|
|
207
268
|
if isinstance(field, dict):
|
|
208
|
-
if field.get("input_types") is None:
|
|
269
|
+
if field.get("input_types", None) is None:
|
|
209
270
|
field["input_types"] = []
|
|
210
271
|
elif hasattr(field, "input_types") and field.input_types is None:
|
|
211
272
|
field.input_types = []
|
|
212
273
|
return fields
|
|
213
274
|
|
|
214
275
|
async def _get_tools(self) -> list[Tool]:
|
|
276
|
+
"""Expose flow as a tool."""
|
|
215
277
|
component_toolkit: type[ComponentToolkit] = get_component_toolkit()
|
|
216
|
-
flow_description, tool_mode_inputs = await self.get_required_data(
|
|
217
|
-
|
|
278
|
+
flow_description, tool_mode_inputs = await self.get_required_data()
|
|
279
|
+
if not tool_mode_inputs:
|
|
280
|
+
return []
|
|
281
|
+
# convert list of dicts to list of dotdicts
|
|
218
282
|
tool_mode_inputs = [dotdict(field) for field in tool_mode_inputs]
|
|
219
283
|
return component_toolkit(component=self).get_tools(
|
|
220
284
|
tool_name=f"{self.flow_name_selected}_tool",
|
|
@@ -224,3 +288,411 @@ class RunFlowBaseComponent(Component):
|
|
|
224
288
|
callbacks=self.get_langchain_callbacks(),
|
|
225
289
|
flow_mode_inputs=tool_mode_inputs,
|
|
226
290
|
)
|
|
291
|
+
|
|
292
|
+
################################################################
|
|
293
|
+
# Flow output resolution
|
|
294
|
+
################################################################
|
|
295
|
+
async def _get_cached_run_outputs(
|
|
296
|
+
self,
|
|
297
|
+
*,
|
|
298
|
+
user_id: str | None = None,
|
|
299
|
+
tweaks: dict | None,
|
|
300
|
+
inputs: dict | list[dict] | None,
|
|
301
|
+
output_type: str,
|
|
302
|
+
):
|
|
303
|
+
if self._last_run_outputs is not None:
|
|
304
|
+
return self._last_run_outputs
|
|
305
|
+
resolved_tweaks = tweaks or self.flow_tweak_data or {}
|
|
306
|
+
resolved_inputs = (inputs or self._flow_run_inputs or self._build_inputs_from_tweaks(resolved_tweaks)) or None
|
|
307
|
+
self._last_run_outputs = await self._run_flow_with_cached_graph(
|
|
308
|
+
user_id=user_id,
|
|
309
|
+
tweaks=resolved_tweaks,
|
|
310
|
+
inputs=resolved_inputs,
|
|
311
|
+
output_type=output_type,
|
|
312
|
+
)
|
|
313
|
+
return self._last_run_outputs
|
|
314
|
+
|
|
315
|
+
async def _resolve_flow_output(self, *, vertex_id: str, output_name: str):
|
|
316
|
+
"""Resolve the value of a given vertex's output.
|
|
317
|
+
|
|
318
|
+
Given a vertex_id and output_name, it will resolve the value of the output
|
|
319
|
+
belonging to the vertex with the given vertex_id.
|
|
320
|
+
|
|
321
|
+
Args:
|
|
322
|
+
vertex_id: The ID of the vertex to resolve the output for.
|
|
323
|
+
output_name: The name of the output to resolve.
|
|
324
|
+
|
|
325
|
+
Returns:
|
|
326
|
+
The resolved output.
|
|
327
|
+
"""
|
|
328
|
+
run_outputs = await self._get_cached_run_outputs(
|
|
329
|
+
user_id=self.user_id,
|
|
330
|
+
tweaks=self.flow_tweak_data,
|
|
331
|
+
inputs=None,
|
|
332
|
+
output_type="any",
|
|
333
|
+
)
|
|
334
|
+
|
|
335
|
+
if not run_outputs:
|
|
336
|
+
return None
|
|
337
|
+
first_output = run_outputs[0]
|
|
338
|
+
if not first_output.outputs:
|
|
339
|
+
return None
|
|
340
|
+
for result in first_output.outputs:
|
|
341
|
+
if not (result and result.component_id == vertex_id):
|
|
342
|
+
continue
|
|
343
|
+
if isinstance(result.results, dict) and output_name in result.results:
|
|
344
|
+
return result.results[output_name]
|
|
345
|
+
if result.artifacts and output_name in result.artifacts:
|
|
346
|
+
return result.artifacts[output_name]
|
|
347
|
+
return result.results or result.artifacts or result.outputs
|
|
348
|
+
|
|
349
|
+
return None
|
|
350
|
+
|
|
351
|
+
def _clear_dynamic_flow_output_methods(self) -> None:
|
|
352
|
+
for method_name in self._flow_output_methods:
|
|
353
|
+
if hasattr(self, method_name):
|
|
354
|
+
delattr(self, method_name)
|
|
355
|
+
self._flow_output_methods.clear()
|
|
356
|
+
|
|
357
|
+
def _register_flow_output_method(self, *, vertex_id: str, output_name: str) -> str:
|
|
358
|
+
safe_vertex = vertex_id.replace("-", "_")
|
|
359
|
+
safe_output = output_name.replace("-", "_").replace(self.IOPUT_SEP, "_")
|
|
360
|
+
method_name = f"_resolve_flow_output__{safe_vertex}__{safe_output}"
|
|
361
|
+
|
|
362
|
+
async def _dynamic_resolver(_self):
|
|
363
|
+
return await _self._resolve_flow_output( # noqa: SLF001
|
|
364
|
+
vertex_id=vertex_id,
|
|
365
|
+
output_name=output_name,
|
|
366
|
+
)
|
|
367
|
+
|
|
368
|
+
setattr(self, method_name, MethodType(_dynamic_resolver, self))
|
|
369
|
+
self._flow_output_methods.add(method_name)
|
|
370
|
+
return method_name
|
|
371
|
+
|
|
372
|
+
################################################################
|
|
373
|
+
# Dynamic flow output synchronization
|
|
374
|
+
################################################################
|
|
375
|
+
def _sync_flow_outputs(self, outputs: list[Output]) -> None:
|
|
376
|
+
"""Persist dynamic flow outputs in the component.
|
|
377
|
+
|
|
378
|
+
Args:
|
|
379
|
+
outputs: The list of Output objects to persist.
|
|
380
|
+
|
|
381
|
+
Returns:
|
|
382
|
+
None
|
|
383
|
+
"""
|
|
384
|
+
tool_output = None
|
|
385
|
+
if TOOL_OUTPUT_NAME in self._outputs_map:
|
|
386
|
+
tool_output = self._outputs_map[TOOL_OUTPUT_NAME]
|
|
387
|
+
else:
|
|
388
|
+
tool_output = next(
|
|
389
|
+
(out for out in outputs if out and out.name == TOOL_OUTPUT_NAME),
|
|
390
|
+
None,
|
|
391
|
+
)
|
|
392
|
+
|
|
393
|
+
self.outputs = outputs
|
|
394
|
+
self._outputs_map = {out.name: out for out in outputs if out}
|
|
395
|
+
self._outputs_map[TOOL_OUTPUT_NAME] = tool_output
|
|
396
|
+
|
|
397
|
+
async def update_outputs(self, frontend_node: dict, field_name: str, field_value: Any) -> dict:
|
|
398
|
+
"""Update the outputs of the frontend node.
|
|
399
|
+
|
|
400
|
+
This method is called when the flow_name_selected field is updated.
|
|
401
|
+
It will generate the Output objects for the selected flow and update the outputs of the frontend node.
|
|
402
|
+
|
|
403
|
+
Args:
|
|
404
|
+
frontend_node: The frontend node to update the outputs for.
|
|
405
|
+
field_name: The name of the field that was updated.
|
|
406
|
+
field_value: The value of the field that was updated.
|
|
407
|
+
|
|
408
|
+
Returns:
|
|
409
|
+
The updated frontend node.
|
|
410
|
+
"""
|
|
411
|
+
if field_name != "flow_name_selected" or not field_value:
|
|
412
|
+
return frontend_node
|
|
413
|
+
|
|
414
|
+
flow_selected_metadata = (
|
|
415
|
+
frontend_node.get("template", {}).get("flow_name_selected", {}).get("selected_metadata", {})
|
|
416
|
+
)
|
|
417
|
+
graph = await self.get_graph(
|
|
418
|
+
flow_name_selected=field_value,
|
|
419
|
+
flow_id_selected=flow_selected_metadata.get("id"),
|
|
420
|
+
updated_at=flow_selected_metadata.get("updated_at"),
|
|
421
|
+
)
|
|
422
|
+
outputs = self._format_flow_outputs(graph) # generate Output objects from the flow's output nodes
|
|
423
|
+
self._sync_flow_outputs(outputs)
|
|
424
|
+
frontend_node["outputs"] = [output.model_dump() for output in outputs]
|
|
425
|
+
return frontend_node
|
|
426
|
+
|
|
427
|
+
################################################################
|
|
428
|
+
# Tool mode + formatting
|
|
429
|
+
################################################################
|
|
430
|
+
def _format_flow_outputs(self, graph: Graph) -> list[Output]:
|
|
431
|
+
"""Generate Output objects from the graph's outputs.
|
|
432
|
+
|
|
433
|
+
The Output objects modify the name and method of the graph's outputs.
|
|
434
|
+
The name is modified by prepending the vertex_id and to the original name,
|
|
435
|
+
which uniquely identifies the output.
|
|
436
|
+
The method is set to a dynamically generated method which uses a unique name
|
|
437
|
+
to resolve the output to its value generated during the flow execution.
|
|
438
|
+
|
|
439
|
+
Args:
|
|
440
|
+
graph: The graph to generate outputs for.
|
|
441
|
+
|
|
442
|
+
Returns:
|
|
443
|
+
A list of Output objects.
|
|
444
|
+
"""
|
|
445
|
+
output_vertices: list[Vertex] = [v for v in graph.vertices if v.is_output]
|
|
446
|
+
outputs: list[Output] = []
|
|
447
|
+
vdisp_cts = Counter(v.display_name for v in output_vertices)
|
|
448
|
+
for vertex in output_vertices:
|
|
449
|
+
one_out = len(vertex.outputs) == 1
|
|
450
|
+
for vertex_output in vertex.outputs:
|
|
451
|
+
new_name = self._get_ioput_name(vertex.id, vertex_output.get("name"))
|
|
452
|
+
output = Output(**vertex_output)
|
|
453
|
+
output.name = new_name
|
|
454
|
+
output.method = self._register_flow_output_method(
|
|
455
|
+
vertex_id=vertex.id,
|
|
456
|
+
output_name=vertex_output.get("name"),
|
|
457
|
+
)
|
|
458
|
+
vdn = vertex.display_name
|
|
459
|
+
odn = output.display_name
|
|
460
|
+
output.display_name = (
|
|
461
|
+
vdn
|
|
462
|
+
if one_out and vdisp_cts[vdn] == 1
|
|
463
|
+
else odn
|
|
464
|
+
+ (
|
|
465
|
+
# output.display_name potentially collides w/ those of other vertices
|
|
466
|
+
f" ({vdn})"
|
|
467
|
+
if vdisp_cts[vdn] == 1
|
|
468
|
+
# output.display_name collides w/ those of duplicate vertices
|
|
469
|
+
else f"-{vertex.id}"
|
|
470
|
+
)
|
|
471
|
+
)
|
|
472
|
+
outputs.append(output)
|
|
473
|
+
|
|
474
|
+
return outputs
|
|
475
|
+
|
|
476
|
+
def _get_ioput_name(
|
|
477
|
+
self,
|
|
478
|
+
vertex_id: str,
|
|
479
|
+
ioput_name: str,
|
|
480
|
+
) -> str:
|
|
481
|
+
"""Helper for joining a vertex id and input/output name to form a unique input/output name.
|
|
482
|
+
|
|
483
|
+
Args:
|
|
484
|
+
vertex_id: The ID of the vertex who's input/output name is being generated.
|
|
485
|
+
ioput_name: The name of the input/output to get the name for.
|
|
486
|
+
|
|
487
|
+
Returns:
|
|
488
|
+
A unique output name for the given vertex's output.
|
|
489
|
+
"""
|
|
490
|
+
if not vertex_id or not ioput_name:
|
|
491
|
+
msg = "Vertex ID and input/output name are required"
|
|
492
|
+
raise ValueError(msg)
|
|
493
|
+
return f"{vertex_id}{self.IOPUT_SEP}{ioput_name}"
|
|
494
|
+
|
|
495
|
+
################################################################
|
|
496
|
+
# Flow execution
|
|
497
|
+
################################################################
|
|
498
|
+
async def _run_flow_with_cached_graph(
|
|
499
|
+
self,
|
|
500
|
+
*,
|
|
501
|
+
user_id: str | None = None,
|
|
502
|
+
tweaks: dict | None = None,
|
|
503
|
+
inputs: dict | list[dict] | None = None,
|
|
504
|
+
output_type: str = "any", # "any" is used to return all outputs
|
|
505
|
+
):
|
|
506
|
+
graph = await self.get_graph(
|
|
507
|
+
flow_name_selected=self.flow_name_selected,
|
|
508
|
+
flow_id_selected=self.flow_id_selected,
|
|
509
|
+
updated_at=self._cached_flow_updated_at,
|
|
510
|
+
)
|
|
511
|
+
if tweaks:
|
|
512
|
+
graph = process_tweaks_on_graph(graph, tweaks)
|
|
513
|
+
|
|
514
|
+
return await run_flow(
|
|
515
|
+
inputs=inputs,
|
|
516
|
+
flow_id=self.flow_id_selected,
|
|
517
|
+
flow_name=self.flow_name_selected,
|
|
518
|
+
user_id=user_id,
|
|
519
|
+
session_id=self.session_id,
|
|
520
|
+
output_type=output_type,
|
|
521
|
+
graph=graph,
|
|
522
|
+
)
|
|
523
|
+
|
|
524
|
+
################################################################
|
|
525
|
+
# Flow cache utils
|
|
526
|
+
################################################################
|
|
527
|
+
def _flow_cache_call(self, action: str, *args, **kwargs):
|
|
528
|
+
"""Call a flow cache related method."""
|
|
529
|
+
if not self.cache_flow:
|
|
530
|
+
msg = "Cache flow is disabled"
|
|
531
|
+
logger.warning(msg)
|
|
532
|
+
return None
|
|
533
|
+
if self._shared_component_cache is None:
|
|
534
|
+
logger.warning("Shared component cache is not available")
|
|
535
|
+
return None
|
|
536
|
+
|
|
537
|
+
handler = self._cache_flow_dispatcher.get(action)
|
|
538
|
+
if handler is None:
|
|
539
|
+
msg = f"Unknown cache action: {action}"
|
|
540
|
+
raise ValueError(msg)
|
|
541
|
+
try:
|
|
542
|
+
return handler(*args, **kwargs)
|
|
543
|
+
except Exception as exc: # noqa: BLE001
|
|
544
|
+
key = kwargs.get("cache_key") or kwargs.get("flow_name") or kwargs.get("flow_name_selected")
|
|
545
|
+
if not key and args:
|
|
546
|
+
key = args[0]
|
|
547
|
+
logger.warning("Cache %s failed for key %s: %s", action, key or "[missing key]", exc)
|
|
548
|
+
return None
|
|
549
|
+
|
|
550
|
+
def _get_cached_flow(self, *, flow_id: str | None = None) -> Graph | None:
|
|
551
|
+
cache_key = self._build_flow_cache_key(flow_id=flow_id)
|
|
552
|
+
cache_entry = self._shared_component_cache.get(cache_key)
|
|
553
|
+
if isinstance(cache_entry, CacheMiss):
|
|
554
|
+
logger.debug(f"{cache_entry} for key {cache_key}")
|
|
555
|
+
return None
|
|
556
|
+
if not cache_entry:
|
|
557
|
+
logger.warning(f"None or empty cache entry ({cache_entry}) for key {cache_key}")
|
|
558
|
+
return None
|
|
559
|
+
return self._build_graph_from_dict(cache_entry=cache_entry)
|
|
560
|
+
|
|
561
|
+
def _set_cached_flow(self, *, flow: Graph) -> None:
|
|
562
|
+
graph_dump = flow.dump()
|
|
563
|
+
payload = {
|
|
564
|
+
"graph_dump": graph_dump,
|
|
565
|
+
"flow_id": flow.flow_id,
|
|
566
|
+
"user_id": self.user_id,
|
|
567
|
+
"description": flow.description or graph_dump.get("description"),
|
|
568
|
+
"updated_at": flow.updated_at or graph_dump.get("updated_at"),
|
|
569
|
+
}
|
|
570
|
+
cache_key = self._build_flow_cache_key(flow_id=flow.flow_id)
|
|
571
|
+
self._shared_component_cache.set(cache_key, payload)
|
|
572
|
+
|
|
573
|
+
def _build_flow_cache_key(self, *, flow_id: str | None = None) -> str | None:
|
|
574
|
+
"""Build a cache key for a flow.
|
|
575
|
+
|
|
576
|
+
Raises a ValueError if the user or flow ID is not provided.
|
|
577
|
+
|
|
578
|
+
Args:
|
|
579
|
+
flow_id: The ID of the flow to build the cache key for.
|
|
580
|
+
|
|
581
|
+
Returns:
|
|
582
|
+
The cache key for the flow.
|
|
583
|
+
"""
|
|
584
|
+
if not (self.user_id and flow_id):
|
|
585
|
+
msg = "Failed to build cache key: Flow ID and user ID are required"
|
|
586
|
+
raise ValueError(msg)
|
|
587
|
+
return f"run_flow:{self.user_id}:{flow_id or 'missing_id'}"
|
|
588
|
+
|
|
589
|
+
def _build_graph_from_dict(self, *, cache_entry: dict[str, Any]) -> Graph | None:
|
|
590
|
+
if not (graph_dump := cache_entry.get("graph_dump")):
|
|
591
|
+
return None
|
|
592
|
+
graph = Graph.from_payload(
|
|
593
|
+
payload=graph_dump.get("data", {}),
|
|
594
|
+
flow_id=cache_entry.get("flow_id"),
|
|
595
|
+
flow_name=cache_entry.get("flow_name"),
|
|
596
|
+
user_id=cache_entry.get("user_id"),
|
|
597
|
+
)
|
|
598
|
+
graph.description = cache_entry.get("description") or graph_dump.get("description")
|
|
599
|
+
graph.updated_at = cache_entry.get("updated_at") or graph_dump.get("updated_at")
|
|
600
|
+
return graph
|
|
601
|
+
|
|
602
|
+
def _is_cached_flow_up_to_date(self, cached_flow: Graph, updated_at: str | None) -> bool:
|
|
603
|
+
if not updated_at or not (cached_ts := getattr(cached_flow, "updated_at", None)):
|
|
604
|
+
return False # both timetamps must be present
|
|
605
|
+
return self._parse_timestamp(cached_ts) >= self._parse_timestamp(updated_at)
|
|
606
|
+
|
|
607
|
+
@staticmethod
|
|
608
|
+
def _parse_timestamp(value: str | None) -> datetime | None:
|
|
609
|
+
from datetime import timezone
|
|
610
|
+
|
|
611
|
+
if not value:
|
|
612
|
+
return None
|
|
613
|
+
try:
|
|
614
|
+
dt = datetime.fromisoformat(value.replace("Z", "+00:00"))
|
|
615
|
+
return dt.replace(tzinfo=timezone.utc, microsecond=0)
|
|
616
|
+
except ValueError:
|
|
617
|
+
logger.warning("Invalid updated_at value: %s", value)
|
|
618
|
+
return None
|
|
619
|
+
|
|
620
|
+
def _delete_cached_flow(self, flow_id: str | None) -> None:
|
|
621
|
+
"""Remove the flow with the given ID or name from cache.
|
|
622
|
+
|
|
623
|
+
Args:
|
|
624
|
+
flow_id: The ID of the flow to delete from cache.
|
|
625
|
+
flow_name: The name of the flow to delete from cache.
|
|
626
|
+
|
|
627
|
+
Returns:
|
|
628
|
+
None
|
|
629
|
+
"""
|
|
630
|
+
err_msg_prefix = "Failed to delete user flow from cache"
|
|
631
|
+
if self._shared_component_cache is None:
|
|
632
|
+
msg = f"{err_msg_prefix}: Shared component cache is not available"
|
|
633
|
+
raise ValueError(msg)
|
|
634
|
+
if not self.user_id:
|
|
635
|
+
msg = f"{err_msg_prefix}: Please provide your user ID"
|
|
636
|
+
raise ValueError(msg)
|
|
637
|
+
if not flow_id or not flow_id.strip():
|
|
638
|
+
msg = f"{err_msg_prefix}: Please provide a valid flow ID"
|
|
639
|
+
raise ValueError(msg)
|
|
640
|
+
|
|
641
|
+
self._shared_component_cache.delete(self._build_flow_cache_key(flow_id=flow_id))
|
|
642
|
+
|
|
643
|
+
################################################################
|
|
644
|
+
# Build inputs and flow tweak data
|
|
645
|
+
################################################################
|
|
646
|
+
def _extract_tweaks_from_keyed_values(
|
|
647
|
+
self,
|
|
648
|
+
values: dict[str, Any] | None,
|
|
649
|
+
) -> dict[str, dict[str, Any]]:
|
|
650
|
+
tweaks: dict[str, dict[str, Any]] = {}
|
|
651
|
+
if not values:
|
|
652
|
+
return tweaks
|
|
653
|
+
for field_name, field_value in values.items():
|
|
654
|
+
if self.IOPUT_SEP not in field_name:
|
|
655
|
+
continue
|
|
656
|
+
node_id, param_name = field_name.split(self.IOPUT_SEP, 1)
|
|
657
|
+
tweaks.setdefault(node_id, {})[param_name] = field_value
|
|
658
|
+
return tweaks
|
|
659
|
+
|
|
660
|
+
def _build_inputs_from_tweaks(
|
|
661
|
+
self,
|
|
662
|
+
tweaks: dict[str, dict[str, Any]],
|
|
663
|
+
) -> list[dict[str, Any]]:
|
|
664
|
+
inputs: list[dict[str, Any]] = []
|
|
665
|
+
for vertex_id, params in tweaks.items():
|
|
666
|
+
if "input_value" not in params:
|
|
667
|
+
continue
|
|
668
|
+
payload: dict[str, Any] = {
|
|
669
|
+
"components": [vertex_id],
|
|
670
|
+
"input_value": params["input_value"],
|
|
671
|
+
}
|
|
672
|
+
if params.get("type"):
|
|
673
|
+
payload["type"] = params["type"]
|
|
674
|
+
inputs.append(payload)
|
|
675
|
+
return inputs
|
|
676
|
+
|
|
677
|
+
def _get_selected_flow_updated_at(self) -> str | None:
|
|
678
|
+
updated_at = (
|
|
679
|
+
getattr(self, "_vertex", {})
|
|
680
|
+
.data.get("node", {})
|
|
681
|
+
.get("template", {})
|
|
682
|
+
.get("flow_name_selected", {})
|
|
683
|
+
.get("selected_metadata", {})
|
|
684
|
+
.get("updated_at", None)
|
|
685
|
+
)
|
|
686
|
+
if updated_at:
|
|
687
|
+
return updated_at
|
|
688
|
+
return self._attributes.get("flow_name_selected_updated_at")
|
|
689
|
+
|
|
690
|
+
def _pre_run_setup(self) -> None: # Note: overrides the base pre_run_setup method
|
|
691
|
+
"""Reset the last run's outputs upon new flow execution."""
|
|
692
|
+
self._last_run_outputs = None
|
|
693
|
+
self._cached_flow_updated_at = self._get_selected_flow_updated_at()
|
|
694
|
+
if self._cached_flow_updated_at:
|
|
695
|
+
self._attributes["flow_name_selected_updated_at"] = self._cached_flow_updated_at
|
|
696
|
+
self._attributes["flow_tweak_data"] = {}
|
|
697
|
+
self.flow_tweak_data = self._extract_tweaks_from_keyed_values(self._attributes)
|
|
698
|
+
self._flow_run_inputs = self._build_inputs_from_tweaks(self.flow_tweak_data)
|