lfx-nightly 0.2.0.dev25__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of lfx-nightly might be problematic. Click here for more details.

Files changed (769) hide show
  1. lfx/__init__.py +0 -0
  2. lfx/__main__.py +25 -0
  3. lfx/_assets/component_index.json +1 -0
  4. lfx/base/__init__.py +0 -0
  5. lfx/base/agents/__init__.py +0 -0
  6. lfx/base/agents/agent.py +375 -0
  7. lfx/base/agents/altk_base_agent.py +380 -0
  8. lfx/base/agents/altk_tool_wrappers.py +565 -0
  9. lfx/base/agents/callback.py +130 -0
  10. lfx/base/agents/context.py +109 -0
  11. lfx/base/agents/crewai/__init__.py +0 -0
  12. lfx/base/agents/crewai/crew.py +231 -0
  13. lfx/base/agents/crewai/tasks.py +12 -0
  14. lfx/base/agents/default_prompts.py +23 -0
  15. lfx/base/agents/errors.py +15 -0
  16. lfx/base/agents/events.py +430 -0
  17. lfx/base/agents/utils.py +237 -0
  18. lfx/base/astra_assistants/__init__.py +0 -0
  19. lfx/base/astra_assistants/util.py +171 -0
  20. lfx/base/chains/__init__.py +0 -0
  21. lfx/base/chains/model.py +19 -0
  22. lfx/base/composio/__init__.py +0 -0
  23. lfx/base/composio/composio_base.py +2584 -0
  24. lfx/base/compressors/__init__.py +0 -0
  25. lfx/base/compressors/model.py +60 -0
  26. lfx/base/constants.py +46 -0
  27. lfx/base/curl/__init__.py +0 -0
  28. lfx/base/curl/parse.py +188 -0
  29. lfx/base/data/__init__.py +5 -0
  30. lfx/base/data/base_file.py +810 -0
  31. lfx/base/data/docling_utils.py +338 -0
  32. lfx/base/data/storage_utils.py +192 -0
  33. lfx/base/data/utils.py +362 -0
  34. lfx/base/datastax/__init__.py +5 -0
  35. lfx/base/datastax/astradb_base.py +896 -0
  36. lfx/base/document_transformers/__init__.py +0 -0
  37. lfx/base/document_transformers/model.py +43 -0
  38. lfx/base/embeddings/__init__.py +0 -0
  39. lfx/base/embeddings/aiml_embeddings.py +62 -0
  40. lfx/base/embeddings/embeddings_class.py +113 -0
  41. lfx/base/embeddings/model.py +26 -0
  42. lfx/base/flow_processing/__init__.py +0 -0
  43. lfx/base/flow_processing/utils.py +86 -0
  44. lfx/base/huggingface/__init__.py +0 -0
  45. lfx/base/huggingface/model_bridge.py +133 -0
  46. lfx/base/io/__init__.py +0 -0
  47. lfx/base/io/chat.py +21 -0
  48. lfx/base/io/text.py +22 -0
  49. lfx/base/knowledge_bases/__init__.py +3 -0
  50. lfx/base/knowledge_bases/knowledge_base_utils.py +137 -0
  51. lfx/base/langchain_utilities/__init__.py +0 -0
  52. lfx/base/langchain_utilities/model.py +35 -0
  53. lfx/base/langchain_utilities/spider_constants.py +1 -0
  54. lfx/base/langwatch/__init__.py +0 -0
  55. lfx/base/langwatch/utils.py +18 -0
  56. lfx/base/mcp/__init__.py +0 -0
  57. lfx/base/mcp/constants.py +2 -0
  58. lfx/base/mcp/util.py +1659 -0
  59. lfx/base/memory/__init__.py +0 -0
  60. lfx/base/memory/memory.py +49 -0
  61. lfx/base/memory/model.py +38 -0
  62. lfx/base/models/__init__.py +3 -0
  63. lfx/base/models/aiml_constants.py +51 -0
  64. lfx/base/models/anthropic_constants.py +51 -0
  65. lfx/base/models/aws_constants.py +151 -0
  66. lfx/base/models/chat_result.py +76 -0
  67. lfx/base/models/cometapi_constants.py +54 -0
  68. lfx/base/models/google_generative_ai_constants.py +70 -0
  69. lfx/base/models/google_generative_ai_model.py +38 -0
  70. lfx/base/models/groq_constants.py +150 -0
  71. lfx/base/models/groq_model_discovery.py +265 -0
  72. lfx/base/models/model.py +375 -0
  73. lfx/base/models/model_input_constants.py +378 -0
  74. lfx/base/models/model_metadata.py +41 -0
  75. lfx/base/models/model_utils.py +108 -0
  76. lfx/base/models/novita_constants.py +35 -0
  77. lfx/base/models/ollama_constants.py +52 -0
  78. lfx/base/models/openai_constants.py +129 -0
  79. lfx/base/models/sambanova_constants.py +18 -0
  80. lfx/base/models/watsonx_constants.py +36 -0
  81. lfx/base/processing/__init__.py +0 -0
  82. lfx/base/prompts/__init__.py +0 -0
  83. lfx/base/prompts/api_utils.py +224 -0
  84. lfx/base/prompts/utils.py +61 -0
  85. lfx/base/textsplitters/__init__.py +0 -0
  86. lfx/base/textsplitters/model.py +28 -0
  87. lfx/base/tools/__init__.py +0 -0
  88. lfx/base/tools/base.py +26 -0
  89. lfx/base/tools/component_tool.py +325 -0
  90. lfx/base/tools/constants.py +49 -0
  91. lfx/base/tools/flow_tool.py +132 -0
  92. lfx/base/tools/run_flow.py +698 -0
  93. lfx/base/vectorstores/__init__.py +0 -0
  94. lfx/base/vectorstores/model.py +193 -0
  95. lfx/base/vectorstores/utils.py +22 -0
  96. lfx/base/vectorstores/vector_store_connection_decorator.py +52 -0
  97. lfx/cli/__init__.py +5 -0
  98. lfx/cli/commands.py +327 -0
  99. lfx/cli/common.py +650 -0
  100. lfx/cli/run.py +506 -0
  101. lfx/cli/script_loader.py +289 -0
  102. lfx/cli/serve_app.py +546 -0
  103. lfx/cli/validation.py +69 -0
  104. lfx/components/FAISS/__init__.py +34 -0
  105. lfx/components/FAISS/faiss.py +111 -0
  106. lfx/components/Notion/__init__.py +19 -0
  107. lfx/components/Notion/add_content_to_page.py +269 -0
  108. lfx/components/Notion/create_page.py +94 -0
  109. lfx/components/Notion/list_database_properties.py +68 -0
  110. lfx/components/Notion/list_pages.py +122 -0
  111. lfx/components/Notion/list_users.py +77 -0
  112. lfx/components/Notion/page_content_viewer.py +93 -0
  113. lfx/components/Notion/search.py +111 -0
  114. lfx/components/Notion/update_page_property.py +114 -0
  115. lfx/components/__init__.py +428 -0
  116. lfx/components/_importing.py +42 -0
  117. lfx/components/agentql/__init__.py +3 -0
  118. lfx/components/agentql/agentql_api.py +151 -0
  119. lfx/components/aiml/__init__.py +37 -0
  120. lfx/components/aiml/aiml.py +115 -0
  121. lfx/components/aiml/aiml_embeddings.py +37 -0
  122. lfx/components/altk/__init__.py +34 -0
  123. lfx/components/altk/altk_agent.py +193 -0
  124. lfx/components/amazon/__init__.py +36 -0
  125. lfx/components/amazon/amazon_bedrock_converse.py +195 -0
  126. lfx/components/amazon/amazon_bedrock_embedding.py +109 -0
  127. lfx/components/amazon/amazon_bedrock_model.py +130 -0
  128. lfx/components/amazon/s3_bucket_uploader.py +211 -0
  129. lfx/components/anthropic/__init__.py +34 -0
  130. lfx/components/anthropic/anthropic.py +187 -0
  131. lfx/components/apify/__init__.py +5 -0
  132. lfx/components/apify/apify_actor.py +325 -0
  133. lfx/components/arxiv/__init__.py +3 -0
  134. lfx/components/arxiv/arxiv.py +169 -0
  135. lfx/components/assemblyai/__init__.py +46 -0
  136. lfx/components/assemblyai/assemblyai_get_subtitles.py +83 -0
  137. lfx/components/assemblyai/assemblyai_lemur.py +183 -0
  138. lfx/components/assemblyai/assemblyai_list_transcripts.py +95 -0
  139. lfx/components/assemblyai/assemblyai_poll_transcript.py +72 -0
  140. lfx/components/assemblyai/assemblyai_start_transcript.py +188 -0
  141. lfx/components/azure/__init__.py +37 -0
  142. lfx/components/azure/azure_openai.py +95 -0
  143. lfx/components/azure/azure_openai_embeddings.py +83 -0
  144. lfx/components/baidu/__init__.py +32 -0
  145. lfx/components/baidu/baidu_qianfan_chat.py +113 -0
  146. lfx/components/bing/__init__.py +3 -0
  147. lfx/components/bing/bing_search_api.py +61 -0
  148. lfx/components/cassandra/__init__.py +40 -0
  149. lfx/components/cassandra/cassandra.py +264 -0
  150. lfx/components/cassandra/cassandra_chat.py +92 -0
  151. lfx/components/cassandra/cassandra_graph.py +238 -0
  152. lfx/components/chains/__init__.py +3 -0
  153. lfx/components/chroma/__init__.py +34 -0
  154. lfx/components/chroma/chroma.py +169 -0
  155. lfx/components/cleanlab/__init__.py +40 -0
  156. lfx/components/cleanlab/cleanlab_evaluator.py +155 -0
  157. lfx/components/cleanlab/cleanlab_rag_evaluator.py +254 -0
  158. lfx/components/cleanlab/cleanlab_remediator.py +131 -0
  159. lfx/components/clickhouse/__init__.py +34 -0
  160. lfx/components/clickhouse/clickhouse.py +135 -0
  161. lfx/components/cloudflare/__init__.py +32 -0
  162. lfx/components/cloudflare/cloudflare.py +81 -0
  163. lfx/components/cohere/__init__.py +40 -0
  164. lfx/components/cohere/cohere_embeddings.py +81 -0
  165. lfx/components/cohere/cohere_models.py +46 -0
  166. lfx/components/cohere/cohere_rerank.py +51 -0
  167. lfx/components/cometapi/__init__.py +32 -0
  168. lfx/components/cometapi/cometapi.py +166 -0
  169. lfx/components/composio/__init__.py +222 -0
  170. lfx/components/composio/agentql_composio.py +11 -0
  171. lfx/components/composio/agiled_composio.py +11 -0
  172. lfx/components/composio/airtable_composio.py +11 -0
  173. lfx/components/composio/apollo_composio.py +11 -0
  174. lfx/components/composio/asana_composio.py +11 -0
  175. lfx/components/composio/attio_composio.py +11 -0
  176. lfx/components/composio/bitbucket_composio.py +11 -0
  177. lfx/components/composio/bolna_composio.py +11 -0
  178. lfx/components/composio/brightdata_composio.py +11 -0
  179. lfx/components/composio/calendly_composio.py +11 -0
  180. lfx/components/composio/canva_composio.py +11 -0
  181. lfx/components/composio/canvas_composio.py +11 -0
  182. lfx/components/composio/coda_composio.py +11 -0
  183. lfx/components/composio/composio_api.py +278 -0
  184. lfx/components/composio/contentful_composio.py +11 -0
  185. lfx/components/composio/digicert_composio.py +11 -0
  186. lfx/components/composio/discord_composio.py +11 -0
  187. lfx/components/composio/dropbox_compnent.py +11 -0
  188. lfx/components/composio/elevenlabs_composio.py +11 -0
  189. lfx/components/composio/exa_composio.py +11 -0
  190. lfx/components/composio/figma_composio.py +11 -0
  191. lfx/components/composio/finage_composio.py +11 -0
  192. lfx/components/composio/firecrawl_composio.py +11 -0
  193. lfx/components/composio/fireflies_composio.py +11 -0
  194. lfx/components/composio/fixer_composio.py +11 -0
  195. lfx/components/composio/flexisign_composio.py +11 -0
  196. lfx/components/composio/freshdesk_composio.py +11 -0
  197. lfx/components/composio/github_composio.py +11 -0
  198. lfx/components/composio/gmail_composio.py +38 -0
  199. lfx/components/composio/googlebigquery_composio.py +11 -0
  200. lfx/components/composio/googlecalendar_composio.py +11 -0
  201. lfx/components/composio/googleclassroom_composio.py +11 -0
  202. lfx/components/composio/googledocs_composio.py +11 -0
  203. lfx/components/composio/googlemeet_composio.py +11 -0
  204. lfx/components/composio/googlesheets_composio.py +11 -0
  205. lfx/components/composio/googletasks_composio.py +8 -0
  206. lfx/components/composio/heygen_composio.py +11 -0
  207. lfx/components/composio/instagram_composio.py +11 -0
  208. lfx/components/composio/jira_composio.py +11 -0
  209. lfx/components/composio/jotform_composio.py +11 -0
  210. lfx/components/composio/klaviyo_composio.py +11 -0
  211. lfx/components/composio/linear_composio.py +11 -0
  212. lfx/components/composio/listennotes_composio.py +11 -0
  213. lfx/components/composio/mem0_composio.py +11 -0
  214. lfx/components/composio/miro_composio.py +11 -0
  215. lfx/components/composio/missive_composio.py +11 -0
  216. lfx/components/composio/notion_composio.py +11 -0
  217. lfx/components/composio/onedrive_composio.py +11 -0
  218. lfx/components/composio/outlook_composio.py +11 -0
  219. lfx/components/composio/pandadoc_composio.py +11 -0
  220. lfx/components/composio/peopledatalabs_composio.py +11 -0
  221. lfx/components/composio/perplexityai_composio.py +11 -0
  222. lfx/components/composio/reddit_composio.py +11 -0
  223. lfx/components/composio/serpapi_composio.py +11 -0
  224. lfx/components/composio/slack_composio.py +11 -0
  225. lfx/components/composio/slackbot_composio.py +11 -0
  226. lfx/components/composio/snowflake_composio.py +11 -0
  227. lfx/components/composio/supabase_composio.py +11 -0
  228. lfx/components/composio/tavily_composio.py +11 -0
  229. lfx/components/composio/timelinesai_composio.py +11 -0
  230. lfx/components/composio/todoist_composio.py +11 -0
  231. lfx/components/composio/wrike_composio.py +11 -0
  232. lfx/components/composio/youtube_composio.py +11 -0
  233. lfx/components/confluence/__init__.py +3 -0
  234. lfx/components/confluence/confluence.py +84 -0
  235. lfx/components/couchbase/__init__.py +34 -0
  236. lfx/components/couchbase/couchbase.py +102 -0
  237. lfx/components/crewai/__init__.py +49 -0
  238. lfx/components/crewai/crewai.py +108 -0
  239. lfx/components/crewai/hierarchical_crew.py +47 -0
  240. lfx/components/crewai/hierarchical_task.py +45 -0
  241. lfx/components/crewai/sequential_crew.py +53 -0
  242. lfx/components/crewai/sequential_task.py +74 -0
  243. lfx/components/crewai/sequential_task_agent.py +144 -0
  244. lfx/components/cuga/__init__.py +34 -0
  245. lfx/components/cuga/cuga_agent.py +730 -0
  246. lfx/components/custom_component/__init__.py +34 -0
  247. lfx/components/custom_component/custom_component.py +31 -0
  248. lfx/components/data/__init__.py +114 -0
  249. lfx/components/data_source/__init__.py +58 -0
  250. lfx/components/data_source/api_request.py +577 -0
  251. lfx/components/data_source/csv_to_data.py +101 -0
  252. lfx/components/data_source/json_to_data.py +106 -0
  253. lfx/components/data_source/mock_data.py +398 -0
  254. lfx/components/data_source/news_search.py +166 -0
  255. lfx/components/data_source/rss.py +71 -0
  256. lfx/components/data_source/sql_executor.py +101 -0
  257. lfx/components/data_source/url.py +311 -0
  258. lfx/components/data_source/web_search.py +326 -0
  259. lfx/components/datastax/__init__.py +76 -0
  260. lfx/components/datastax/astradb_assistant_manager.py +307 -0
  261. lfx/components/datastax/astradb_chatmemory.py +40 -0
  262. lfx/components/datastax/astradb_cql.py +288 -0
  263. lfx/components/datastax/astradb_graph.py +217 -0
  264. lfx/components/datastax/astradb_tool.py +378 -0
  265. lfx/components/datastax/astradb_vectorize.py +122 -0
  266. lfx/components/datastax/astradb_vectorstore.py +449 -0
  267. lfx/components/datastax/create_assistant.py +59 -0
  268. lfx/components/datastax/create_thread.py +33 -0
  269. lfx/components/datastax/dotenv.py +36 -0
  270. lfx/components/datastax/get_assistant.py +38 -0
  271. lfx/components/datastax/getenvvar.py +31 -0
  272. lfx/components/datastax/graph_rag.py +141 -0
  273. lfx/components/datastax/hcd.py +315 -0
  274. lfx/components/datastax/list_assistants.py +26 -0
  275. lfx/components/datastax/run.py +90 -0
  276. lfx/components/deactivated/__init__.py +15 -0
  277. lfx/components/deactivated/amazon_kendra.py +66 -0
  278. lfx/components/deactivated/chat_litellm_model.py +158 -0
  279. lfx/components/deactivated/code_block_extractor.py +26 -0
  280. lfx/components/deactivated/documents_to_data.py +22 -0
  281. lfx/components/deactivated/embed.py +16 -0
  282. lfx/components/deactivated/extract_key_from_data.py +46 -0
  283. lfx/components/deactivated/json_document_builder.py +57 -0
  284. lfx/components/deactivated/list_flows.py +20 -0
  285. lfx/components/deactivated/mcp_sse.py +61 -0
  286. lfx/components/deactivated/mcp_stdio.py +62 -0
  287. lfx/components/deactivated/merge_data.py +93 -0
  288. lfx/components/deactivated/message.py +37 -0
  289. lfx/components/deactivated/metal.py +54 -0
  290. lfx/components/deactivated/multi_query.py +59 -0
  291. lfx/components/deactivated/retriever.py +43 -0
  292. lfx/components/deactivated/selective_passthrough.py +77 -0
  293. lfx/components/deactivated/should_run_next.py +40 -0
  294. lfx/components/deactivated/split_text.py +63 -0
  295. lfx/components/deactivated/store_message.py +24 -0
  296. lfx/components/deactivated/sub_flow.py +124 -0
  297. lfx/components/deactivated/vectara_self_query.py +76 -0
  298. lfx/components/deactivated/vector_store.py +24 -0
  299. lfx/components/deepseek/__init__.py +34 -0
  300. lfx/components/deepseek/deepseek.py +136 -0
  301. lfx/components/docling/__init__.py +43 -0
  302. lfx/components/docling/chunk_docling_document.py +186 -0
  303. lfx/components/docling/docling_inline.py +238 -0
  304. lfx/components/docling/docling_remote.py +195 -0
  305. lfx/components/docling/export_docling_document.py +117 -0
  306. lfx/components/documentloaders/__init__.py +3 -0
  307. lfx/components/duckduckgo/__init__.py +3 -0
  308. lfx/components/duckduckgo/duck_duck_go_search_run.py +92 -0
  309. lfx/components/elastic/__init__.py +37 -0
  310. lfx/components/elastic/elasticsearch.py +267 -0
  311. lfx/components/elastic/opensearch.py +789 -0
  312. lfx/components/elastic/opensearch_multimodal.py +1575 -0
  313. lfx/components/embeddings/__init__.py +37 -0
  314. lfx/components/embeddings/similarity.py +77 -0
  315. lfx/components/embeddings/text_embedder.py +65 -0
  316. lfx/components/exa/__init__.py +3 -0
  317. lfx/components/exa/exa_search.py +68 -0
  318. lfx/components/files_and_knowledge/__init__.py +47 -0
  319. lfx/components/files_and_knowledge/directory.py +113 -0
  320. lfx/components/files_and_knowledge/file.py +841 -0
  321. lfx/components/files_and_knowledge/ingestion.py +694 -0
  322. lfx/components/files_and_knowledge/retrieval.py +264 -0
  323. lfx/components/files_and_knowledge/save_file.py +746 -0
  324. lfx/components/firecrawl/__init__.py +43 -0
  325. lfx/components/firecrawl/firecrawl_crawl_api.py +88 -0
  326. lfx/components/firecrawl/firecrawl_extract_api.py +136 -0
  327. lfx/components/firecrawl/firecrawl_map_api.py +89 -0
  328. lfx/components/firecrawl/firecrawl_scrape_api.py +73 -0
  329. lfx/components/flow_controls/__init__.py +58 -0
  330. lfx/components/flow_controls/conditional_router.py +208 -0
  331. lfx/components/flow_controls/data_conditional_router.py +126 -0
  332. lfx/components/flow_controls/flow_tool.py +111 -0
  333. lfx/components/flow_controls/listen.py +29 -0
  334. lfx/components/flow_controls/loop.py +163 -0
  335. lfx/components/flow_controls/notify.py +88 -0
  336. lfx/components/flow_controls/pass_message.py +36 -0
  337. lfx/components/flow_controls/run_flow.py +108 -0
  338. lfx/components/flow_controls/sub_flow.py +115 -0
  339. lfx/components/git/__init__.py +4 -0
  340. lfx/components/git/git.py +262 -0
  341. lfx/components/git/gitextractor.py +196 -0
  342. lfx/components/glean/__init__.py +3 -0
  343. lfx/components/glean/glean_search_api.py +173 -0
  344. lfx/components/google/__init__.py +17 -0
  345. lfx/components/google/gmail.py +193 -0
  346. lfx/components/google/google_bq_sql_executor.py +157 -0
  347. lfx/components/google/google_drive.py +92 -0
  348. lfx/components/google/google_drive_search.py +152 -0
  349. lfx/components/google/google_generative_ai.py +144 -0
  350. lfx/components/google/google_generative_ai_embeddings.py +141 -0
  351. lfx/components/google/google_oauth_token.py +89 -0
  352. lfx/components/google/google_search_api_core.py +68 -0
  353. lfx/components/google/google_serper_api_core.py +74 -0
  354. lfx/components/groq/__init__.py +34 -0
  355. lfx/components/groq/groq.py +143 -0
  356. lfx/components/helpers/__init__.py +154 -0
  357. lfx/components/homeassistant/__init__.py +7 -0
  358. lfx/components/homeassistant/home_assistant_control.py +152 -0
  359. lfx/components/homeassistant/list_home_assistant_states.py +137 -0
  360. lfx/components/huggingface/__init__.py +37 -0
  361. lfx/components/huggingface/huggingface.py +199 -0
  362. lfx/components/huggingface/huggingface_inference_api.py +106 -0
  363. lfx/components/ibm/__init__.py +34 -0
  364. lfx/components/ibm/watsonx.py +207 -0
  365. lfx/components/ibm/watsonx_embeddings.py +135 -0
  366. lfx/components/icosacomputing/__init__.py +5 -0
  367. lfx/components/icosacomputing/combinatorial_reasoner.py +84 -0
  368. lfx/components/input_output/__init__.py +40 -0
  369. lfx/components/input_output/chat.py +109 -0
  370. lfx/components/input_output/chat_output.py +184 -0
  371. lfx/components/input_output/text.py +27 -0
  372. lfx/components/input_output/text_output.py +29 -0
  373. lfx/components/input_output/webhook.py +56 -0
  374. lfx/components/jigsawstack/__init__.py +23 -0
  375. lfx/components/jigsawstack/ai_scrape.py +126 -0
  376. lfx/components/jigsawstack/ai_web_search.py +136 -0
  377. lfx/components/jigsawstack/file_read.py +115 -0
  378. lfx/components/jigsawstack/file_upload.py +94 -0
  379. lfx/components/jigsawstack/image_generation.py +205 -0
  380. lfx/components/jigsawstack/nsfw.py +60 -0
  381. lfx/components/jigsawstack/object_detection.py +124 -0
  382. lfx/components/jigsawstack/sentiment.py +112 -0
  383. lfx/components/jigsawstack/text_to_sql.py +90 -0
  384. lfx/components/jigsawstack/text_translate.py +77 -0
  385. lfx/components/jigsawstack/vocr.py +107 -0
  386. lfx/components/knowledge_bases/__init__.py +89 -0
  387. lfx/components/langchain_utilities/__init__.py +109 -0
  388. lfx/components/langchain_utilities/character.py +53 -0
  389. lfx/components/langchain_utilities/conversation.py +59 -0
  390. lfx/components/langchain_utilities/csv_agent.py +175 -0
  391. lfx/components/langchain_utilities/fake_embeddings.py +26 -0
  392. lfx/components/langchain_utilities/html_link_extractor.py +35 -0
  393. lfx/components/langchain_utilities/json_agent.py +100 -0
  394. lfx/components/langchain_utilities/langchain_hub.py +126 -0
  395. lfx/components/langchain_utilities/language_recursive.py +49 -0
  396. lfx/components/langchain_utilities/language_semantic.py +138 -0
  397. lfx/components/langchain_utilities/llm_checker.py +39 -0
  398. lfx/components/langchain_utilities/llm_math.py +42 -0
  399. lfx/components/langchain_utilities/natural_language.py +61 -0
  400. lfx/components/langchain_utilities/openai_tools.py +53 -0
  401. lfx/components/langchain_utilities/openapi.py +48 -0
  402. lfx/components/langchain_utilities/recursive_character.py +60 -0
  403. lfx/components/langchain_utilities/retrieval_qa.py +83 -0
  404. lfx/components/langchain_utilities/runnable_executor.py +137 -0
  405. lfx/components/langchain_utilities/self_query.py +80 -0
  406. lfx/components/langchain_utilities/spider.py +142 -0
  407. lfx/components/langchain_utilities/sql.py +40 -0
  408. lfx/components/langchain_utilities/sql_database.py +35 -0
  409. lfx/components/langchain_utilities/sql_generator.py +78 -0
  410. lfx/components/langchain_utilities/tool_calling.py +59 -0
  411. lfx/components/langchain_utilities/vector_store_info.py +49 -0
  412. lfx/components/langchain_utilities/vector_store_router.py +33 -0
  413. lfx/components/langchain_utilities/xml_agent.py +71 -0
  414. lfx/components/langwatch/__init__.py +3 -0
  415. lfx/components/langwatch/langwatch.py +278 -0
  416. lfx/components/link_extractors/__init__.py +3 -0
  417. lfx/components/llm_operations/__init__.py +46 -0
  418. lfx/components/llm_operations/batch_run.py +205 -0
  419. lfx/components/llm_operations/lambda_filter.py +218 -0
  420. lfx/components/llm_operations/llm_conditional_router.py +421 -0
  421. lfx/components/llm_operations/llm_selector.py +499 -0
  422. lfx/components/llm_operations/structured_output.py +244 -0
  423. lfx/components/lmstudio/__init__.py +34 -0
  424. lfx/components/lmstudio/lmstudioembeddings.py +89 -0
  425. lfx/components/lmstudio/lmstudiomodel.py +133 -0
  426. lfx/components/logic/__init__.py +181 -0
  427. lfx/components/maritalk/__init__.py +32 -0
  428. lfx/components/maritalk/maritalk.py +52 -0
  429. lfx/components/mem0/__init__.py +3 -0
  430. lfx/components/mem0/mem0_chat_memory.py +147 -0
  431. lfx/components/milvus/__init__.py +34 -0
  432. lfx/components/milvus/milvus.py +115 -0
  433. lfx/components/mistral/__init__.py +37 -0
  434. lfx/components/mistral/mistral.py +114 -0
  435. lfx/components/mistral/mistral_embeddings.py +58 -0
  436. lfx/components/models/__init__.py +89 -0
  437. lfx/components/models_and_agents/__init__.py +49 -0
  438. lfx/components/models_and_agents/agent.py +644 -0
  439. lfx/components/models_and_agents/embedding_model.py +423 -0
  440. lfx/components/models_and_agents/language_model.py +398 -0
  441. lfx/components/models_and_agents/mcp_component.py +594 -0
  442. lfx/components/models_and_agents/memory.py +268 -0
  443. lfx/components/models_and_agents/prompt.py +67 -0
  444. lfx/components/mongodb/__init__.py +34 -0
  445. lfx/components/mongodb/mongodb_atlas.py +213 -0
  446. lfx/components/needle/__init__.py +3 -0
  447. lfx/components/needle/needle.py +104 -0
  448. lfx/components/notdiamond/__init__.py +34 -0
  449. lfx/components/notdiamond/notdiamond.py +228 -0
  450. lfx/components/novita/__init__.py +32 -0
  451. lfx/components/novita/novita.py +130 -0
  452. lfx/components/nvidia/__init__.py +57 -0
  453. lfx/components/nvidia/nvidia.py +151 -0
  454. lfx/components/nvidia/nvidia_embedding.py +77 -0
  455. lfx/components/nvidia/nvidia_ingest.py +317 -0
  456. lfx/components/nvidia/nvidia_rerank.py +63 -0
  457. lfx/components/nvidia/system_assist.py +65 -0
  458. lfx/components/olivya/__init__.py +3 -0
  459. lfx/components/olivya/olivya.py +116 -0
  460. lfx/components/ollama/__init__.py +37 -0
  461. lfx/components/ollama/ollama.py +548 -0
  462. lfx/components/ollama/ollama_embeddings.py +103 -0
  463. lfx/components/openai/__init__.py +37 -0
  464. lfx/components/openai/openai.py +100 -0
  465. lfx/components/openai/openai_chat_model.py +176 -0
  466. lfx/components/openrouter/__init__.py +32 -0
  467. lfx/components/openrouter/openrouter.py +104 -0
  468. lfx/components/output_parsers/__init__.py +3 -0
  469. lfx/components/perplexity/__init__.py +34 -0
  470. lfx/components/perplexity/perplexity.py +75 -0
  471. lfx/components/pgvector/__init__.py +34 -0
  472. lfx/components/pgvector/pgvector.py +72 -0
  473. lfx/components/pinecone/__init__.py +34 -0
  474. lfx/components/pinecone/pinecone.py +134 -0
  475. lfx/components/processing/__init__.py +72 -0
  476. lfx/components/processing/alter_metadata.py +109 -0
  477. lfx/components/processing/combine_text.py +40 -0
  478. lfx/components/processing/converter.py +248 -0
  479. lfx/components/processing/create_data.py +111 -0
  480. lfx/components/processing/create_list.py +40 -0
  481. lfx/components/processing/data_operations.py +528 -0
  482. lfx/components/processing/data_to_dataframe.py +71 -0
  483. lfx/components/processing/dataframe_operations.py +313 -0
  484. lfx/components/processing/dataframe_to_toolset.py +259 -0
  485. lfx/components/processing/dynamic_create_data.py +357 -0
  486. lfx/components/processing/extract_key.py +54 -0
  487. lfx/components/processing/filter_data.py +43 -0
  488. lfx/components/processing/filter_data_values.py +89 -0
  489. lfx/components/processing/json_cleaner.py +104 -0
  490. lfx/components/processing/merge_data.py +91 -0
  491. lfx/components/processing/message_to_data.py +37 -0
  492. lfx/components/processing/output_parser.py +46 -0
  493. lfx/components/processing/parse_data.py +71 -0
  494. lfx/components/processing/parse_dataframe.py +69 -0
  495. lfx/components/processing/parse_json_data.py +91 -0
  496. lfx/components/processing/parser.py +148 -0
  497. lfx/components/processing/regex.py +83 -0
  498. lfx/components/processing/select_data.py +49 -0
  499. lfx/components/processing/split_text.py +141 -0
  500. lfx/components/processing/store_message.py +91 -0
  501. lfx/components/processing/update_data.py +161 -0
  502. lfx/components/prototypes/__init__.py +35 -0
  503. lfx/components/prototypes/python_function.py +73 -0
  504. lfx/components/qdrant/__init__.py +34 -0
  505. lfx/components/qdrant/qdrant.py +109 -0
  506. lfx/components/redis/__init__.py +37 -0
  507. lfx/components/redis/redis.py +89 -0
  508. lfx/components/redis/redis_chat.py +43 -0
  509. lfx/components/sambanova/__init__.py +32 -0
  510. lfx/components/sambanova/sambanova.py +84 -0
  511. lfx/components/scrapegraph/__init__.py +40 -0
  512. lfx/components/scrapegraph/scrapegraph_markdownify_api.py +64 -0
  513. lfx/components/scrapegraph/scrapegraph_search_api.py +64 -0
  514. lfx/components/scrapegraph/scrapegraph_smart_scraper_api.py +71 -0
  515. lfx/components/searchapi/__init__.py +34 -0
  516. lfx/components/searchapi/search.py +79 -0
  517. lfx/components/serpapi/__init__.py +3 -0
  518. lfx/components/serpapi/serp.py +115 -0
  519. lfx/components/supabase/__init__.py +34 -0
  520. lfx/components/supabase/supabase.py +76 -0
  521. lfx/components/tavily/__init__.py +4 -0
  522. lfx/components/tavily/tavily_extract.py +117 -0
  523. lfx/components/tavily/tavily_search.py +212 -0
  524. lfx/components/textsplitters/__init__.py +3 -0
  525. lfx/components/toolkits/__init__.py +3 -0
  526. lfx/components/tools/__init__.py +66 -0
  527. lfx/components/tools/calculator.py +109 -0
  528. lfx/components/tools/google_search_api.py +45 -0
  529. lfx/components/tools/google_serper_api.py +115 -0
  530. lfx/components/tools/python_code_structured_tool.py +328 -0
  531. lfx/components/tools/python_repl.py +98 -0
  532. lfx/components/tools/search_api.py +88 -0
  533. lfx/components/tools/searxng.py +145 -0
  534. lfx/components/tools/serp_api.py +120 -0
  535. lfx/components/tools/tavily_search_tool.py +345 -0
  536. lfx/components/tools/wikidata_api.py +103 -0
  537. lfx/components/tools/wikipedia_api.py +50 -0
  538. lfx/components/tools/yahoo_finance.py +130 -0
  539. lfx/components/twelvelabs/__init__.py +52 -0
  540. lfx/components/twelvelabs/convert_astra_results.py +84 -0
  541. lfx/components/twelvelabs/pegasus_index.py +311 -0
  542. lfx/components/twelvelabs/split_video.py +301 -0
  543. lfx/components/twelvelabs/text_embeddings.py +57 -0
  544. lfx/components/twelvelabs/twelvelabs_pegasus.py +408 -0
  545. lfx/components/twelvelabs/video_embeddings.py +100 -0
  546. lfx/components/twelvelabs/video_file.py +191 -0
  547. lfx/components/unstructured/__init__.py +3 -0
  548. lfx/components/unstructured/unstructured.py +121 -0
  549. lfx/components/upstash/__init__.py +34 -0
  550. lfx/components/upstash/upstash.py +124 -0
  551. lfx/components/utilities/__init__.py +43 -0
  552. lfx/components/utilities/calculator_core.py +89 -0
  553. lfx/components/utilities/current_date.py +42 -0
  554. lfx/components/utilities/id_generator.py +42 -0
  555. lfx/components/utilities/python_repl_core.py +98 -0
  556. lfx/components/vectara/__init__.py +37 -0
  557. lfx/components/vectara/vectara.py +97 -0
  558. lfx/components/vectara/vectara_rag.py +164 -0
  559. lfx/components/vectorstores/__init__.py +34 -0
  560. lfx/components/vectorstores/local_db.py +270 -0
  561. lfx/components/vertexai/__init__.py +37 -0
  562. lfx/components/vertexai/vertexai.py +71 -0
  563. lfx/components/vertexai/vertexai_embeddings.py +67 -0
  564. lfx/components/vlmrun/__init__.py +34 -0
  565. lfx/components/vlmrun/vlmrun_transcription.py +224 -0
  566. lfx/components/weaviate/__init__.py +34 -0
  567. lfx/components/weaviate/weaviate.py +89 -0
  568. lfx/components/wikipedia/__init__.py +4 -0
  569. lfx/components/wikipedia/wikidata.py +86 -0
  570. lfx/components/wikipedia/wikipedia.py +53 -0
  571. lfx/components/wolframalpha/__init__.py +3 -0
  572. lfx/components/wolframalpha/wolfram_alpha_api.py +54 -0
  573. lfx/components/xai/__init__.py +32 -0
  574. lfx/components/xai/xai.py +167 -0
  575. lfx/components/yahoosearch/__init__.py +3 -0
  576. lfx/components/yahoosearch/yahoo.py +137 -0
  577. lfx/components/youtube/__init__.py +52 -0
  578. lfx/components/youtube/channel.py +227 -0
  579. lfx/components/youtube/comments.py +231 -0
  580. lfx/components/youtube/playlist.py +33 -0
  581. lfx/components/youtube/search.py +120 -0
  582. lfx/components/youtube/trending.py +285 -0
  583. lfx/components/youtube/video_details.py +263 -0
  584. lfx/components/youtube/youtube_transcripts.py +206 -0
  585. lfx/components/zep/__init__.py +3 -0
  586. lfx/components/zep/zep.py +45 -0
  587. lfx/constants.py +6 -0
  588. lfx/custom/__init__.py +7 -0
  589. lfx/custom/attributes.py +87 -0
  590. lfx/custom/code_parser/__init__.py +3 -0
  591. lfx/custom/code_parser/code_parser.py +361 -0
  592. lfx/custom/custom_component/__init__.py +0 -0
  593. lfx/custom/custom_component/base_component.py +128 -0
  594. lfx/custom/custom_component/component.py +1890 -0
  595. lfx/custom/custom_component/component_with_cache.py +8 -0
  596. lfx/custom/custom_component/custom_component.py +650 -0
  597. lfx/custom/dependency_analyzer.py +165 -0
  598. lfx/custom/directory_reader/__init__.py +3 -0
  599. lfx/custom/directory_reader/directory_reader.py +359 -0
  600. lfx/custom/directory_reader/utils.py +171 -0
  601. lfx/custom/eval.py +12 -0
  602. lfx/custom/schema.py +32 -0
  603. lfx/custom/tree_visitor.py +21 -0
  604. lfx/custom/utils.py +877 -0
  605. lfx/custom/validate.py +523 -0
  606. lfx/events/__init__.py +1 -0
  607. lfx/events/event_manager.py +110 -0
  608. lfx/exceptions/__init__.py +0 -0
  609. lfx/exceptions/component.py +15 -0
  610. lfx/field_typing/__init__.py +91 -0
  611. lfx/field_typing/constants.py +216 -0
  612. lfx/field_typing/range_spec.py +35 -0
  613. lfx/graph/__init__.py +6 -0
  614. lfx/graph/edge/__init__.py +0 -0
  615. lfx/graph/edge/base.py +300 -0
  616. lfx/graph/edge/schema.py +119 -0
  617. lfx/graph/edge/utils.py +0 -0
  618. lfx/graph/graph/__init__.py +0 -0
  619. lfx/graph/graph/ascii.py +202 -0
  620. lfx/graph/graph/base.py +2298 -0
  621. lfx/graph/graph/constants.py +63 -0
  622. lfx/graph/graph/runnable_vertices_manager.py +133 -0
  623. lfx/graph/graph/schema.py +53 -0
  624. lfx/graph/graph/state_model.py +66 -0
  625. lfx/graph/graph/utils.py +1024 -0
  626. lfx/graph/schema.py +75 -0
  627. lfx/graph/state/__init__.py +0 -0
  628. lfx/graph/state/model.py +250 -0
  629. lfx/graph/utils.py +206 -0
  630. lfx/graph/vertex/__init__.py +0 -0
  631. lfx/graph/vertex/base.py +826 -0
  632. lfx/graph/vertex/constants.py +0 -0
  633. lfx/graph/vertex/exceptions.py +4 -0
  634. lfx/graph/vertex/param_handler.py +316 -0
  635. lfx/graph/vertex/schema.py +26 -0
  636. lfx/graph/vertex/utils.py +19 -0
  637. lfx/graph/vertex/vertex_types.py +489 -0
  638. lfx/helpers/__init__.py +141 -0
  639. lfx/helpers/base_model.py +71 -0
  640. lfx/helpers/custom.py +13 -0
  641. lfx/helpers/data.py +167 -0
  642. lfx/helpers/flow.py +308 -0
  643. lfx/inputs/__init__.py +68 -0
  644. lfx/inputs/constants.py +2 -0
  645. lfx/inputs/input_mixin.py +352 -0
  646. lfx/inputs/inputs.py +718 -0
  647. lfx/inputs/validators.py +19 -0
  648. lfx/interface/__init__.py +6 -0
  649. lfx/interface/components.py +897 -0
  650. lfx/interface/importing/__init__.py +5 -0
  651. lfx/interface/importing/utils.py +39 -0
  652. lfx/interface/initialize/__init__.py +3 -0
  653. lfx/interface/initialize/loading.py +317 -0
  654. lfx/interface/listing.py +26 -0
  655. lfx/interface/run.py +16 -0
  656. lfx/interface/utils.py +111 -0
  657. lfx/io/__init__.py +63 -0
  658. lfx/io/schema.py +295 -0
  659. lfx/load/__init__.py +8 -0
  660. lfx/load/load.py +256 -0
  661. lfx/load/utils.py +99 -0
  662. lfx/log/__init__.py +5 -0
  663. lfx/log/logger.py +411 -0
  664. lfx/logging/__init__.py +11 -0
  665. lfx/logging/logger.py +24 -0
  666. lfx/memory/__init__.py +70 -0
  667. lfx/memory/stubs.py +302 -0
  668. lfx/processing/__init__.py +1 -0
  669. lfx/processing/process.py +238 -0
  670. lfx/processing/utils.py +25 -0
  671. lfx/py.typed +0 -0
  672. lfx/schema/__init__.py +66 -0
  673. lfx/schema/artifact.py +83 -0
  674. lfx/schema/content_block.py +62 -0
  675. lfx/schema/content_types.py +91 -0
  676. lfx/schema/cross_module.py +80 -0
  677. lfx/schema/data.py +309 -0
  678. lfx/schema/dataframe.py +210 -0
  679. lfx/schema/dotdict.py +74 -0
  680. lfx/schema/encoders.py +13 -0
  681. lfx/schema/graph.py +47 -0
  682. lfx/schema/image.py +184 -0
  683. lfx/schema/json_schema.py +186 -0
  684. lfx/schema/log.py +62 -0
  685. lfx/schema/message.py +493 -0
  686. lfx/schema/openai_responses_schemas.py +74 -0
  687. lfx/schema/properties.py +41 -0
  688. lfx/schema/schema.py +180 -0
  689. lfx/schema/serialize.py +13 -0
  690. lfx/schema/table.py +142 -0
  691. lfx/schema/validators.py +114 -0
  692. lfx/serialization/__init__.py +5 -0
  693. lfx/serialization/constants.py +2 -0
  694. lfx/serialization/serialization.py +314 -0
  695. lfx/services/__init__.py +26 -0
  696. lfx/services/base.py +28 -0
  697. lfx/services/cache/__init__.py +6 -0
  698. lfx/services/cache/base.py +183 -0
  699. lfx/services/cache/service.py +166 -0
  700. lfx/services/cache/utils.py +169 -0
  701. lfx/services/chat/__init__.py +1 -0
  702. lfx/services/chat/config.py +2 -0
  703. lfx/services/chat/schema.py +10 -0
  704. lfx/services/database/__init__.py +5 -0
  705. lfx/services/database/service.py +25 -0
  706. lfx/services/deps.py +194 -0
  707. lfx/services/factory.py +19 -0
  708. lfx/services/initialize.py +19 -0
  709. lfx/services/interfaces.py +103 -0
  710. lfx/services/manager.py +185 -0
  711. lfx/services/mcp_composer/__init__.py +6 -0
  712. lfx/services/mcp_composer/factory.py +16 -0
  713. lfx/services/mcp_composer/service.py +1441 -0
  714. lfx/services/schema.py +21 -0
  715. lfx/services/session.py +87 -0
  716. lfx/services/settings/__init__.py +3 -0
  717. lfx/services/settings/auth.py +133 -0
  718. lfx/services/settings/base.py +668 -0
  719. lfx/services/settings/constants.py +43 -0
  720. lfx/services/settings/factory.py +23 -0
  721. lfx/services/settings/feature_flags.py +11 -0
  722. lfx/services/settings/service.py +35 -0
  723. lfx/services/settings/utils.py +40 -0
  724. lfx/services/shared_component_cache/__init__.py +1 -0
  725. lfx/services/shared_component_cache/factory.py +30 -0
  726. lfx/services/shared_component_cache/service.py +9 -0
  727. lfx/services/storage/__init__.py +5 -0
  728. lfx/services/storage/local.py +185 -0
  729. lfx/services/storage/service.py +177 -0
  730. lfx/services/tracing/__init__.py +1 -0
  731. lfx/services/tracing/service.py +21 -0
  732. lfx/settings.py +6 -0
  733. lfx/template/__init__.py +6 -0
  734. lfx/template/field/__init__.py +0 -0
  735. lfx/template/field/base.py +260 -0
  736. lfx/template/field/prompt.py +15 -0
  737. lfx/template/frontend_node/__init__.py +6 -0
  738. lfx/template/frontend_node/base.py +214 -0
  739. lfx/template/frontend_node/constants.py +65 -0
  740. lfx/template/frontend_node/custom_components.py +79 -0
  741. lfx/template/template/__init__.py +0 -0
  742. lfx/template/template/base.py +100 -0
  743. lfx/template/utils.py +217 -0
  744. lfx/type_extraction/__init__.py +19 -0
  745. lfx/type_extraction/type_extraction.py +75 -0
  746. lfx/type_extraction.py +80 -0
  747. lfx/utils/__init__.py +1 -0
  748. lfx/utils/async_helpers.py +42 -0
  749. lfx/utils/component_utils.py +154 -0
  750. lfx/utils/concurrency.py +60 -0
  751. lfx/utils/connection_string_parser.py +11 -0
  752. lfx/utils/constants.py +233 -0
  753. lfx/utils/data_structure.py +212 -0
  754. lfx/utils/exceptions.py +22 -0
  755. lfx/utils/helpers.py +34 -0
  756. lfx/utils/image.py +79 -0
  757. lfx/utils/langflow_utils.py +52 -0
  758. lfx/utils/lazy_load.py +15 -0
  759. lfx/utils/request_utils.py +18 -0
  760. lfx/utils/schemas.py +139 -0
  761. lfx/utils/ssrf_protection.py +384 -0
  762. lfx/utils/util.py +626 -0
  763. lfx/utils/util_strings.py +56 -0
  764. lfx/utils/validate_cloud.py +26 -0
  765. lfx/utils/version.py +24 -0
  766. lfx_nightly-0.2.0.dev25.dist-info/METADATA +312 -0
  767. lfx_nightly-0.2.0.dev25.dist-info/RECORD +769 -0
  768. lfx_nightly-0.2.0.dev25.dist-info/WHEEL +4 -0
  769. lfx_nightly-0.2.0.dev25.dist-info/entry_points.txt +2 -0
@@ -0,0 +1,2298 @@
1
+ from __future__ import annotations
2
+
3
+ import asyncio
4
+ import contextlib
5
+ import contextvars
6
+ import copy
7
+ import json
8
+ import queue
9
+ import threading
10
+ import traceback
11
+ import uuid
12
+ from collections import defaultdict, deque
13
+ from datetime import datetime, timezone
14
+ from functools import partial
15
+ from itertools import chain
16
+ from typing import TYPE_CHECKING, Any, cast
17
+
18
+ from lfx.exceptions.component import ComponentBuildError
19
+ from lfx.graph.edge.base import CycleEdge, Edge
20
+ from lfx.graph.graph.constants import Finish, lazy_load_vertex_dict
21
+ from lfx.graph.graph.runnable_vertices_manager import RunnableVerticesManager
22
+ from lfx.graph.graph.schema import GraphData, GraphDump, StartConfigDict, VertexBuildResult
23
+ from lfx.graph.graph.state_model import create_state_model_from_graph
24
+ from lfx.graph.graph.utils import (
25
+ find_all_cycle_edges,
26
+ find_cycle_vertices,
27
+ find_start_component_id,
28
+ get_sorted_vertices,
29
+ process_flow,
30
+ should_continue,
31
+ )
32
+ from lfx.graph.schema import InterfaceComponentTypes, RunOutputs
33
+ from lfx.graph.utils import log_vertex_build
34
+ from lfx.graph.vertex.base import Vertex, VertexStates
35
+ from lfx.graph.vertex.schema import NodeData, NodeTypeEnum
36
+ from lfx.graph.vertex.vertex_types import ComponentVertex, InterfaceVertex, StateVertex
37
+ from lfx.log.logger import LogConfig, configure, logger
38
+ from lfx.schema.dotdict import dotdict
39
+ from lfx.schema.schema import INPUT_FIELD_NAME, InputType, OutputValue
40
+ from lfx.services.cache.utils import CacheMiss
41
+ from lfx.services.deps import get_chat_service, get_tracing_service
42
+ from lfx.utils.async_helpers import run_until_complete
43
+
44
+ if TYPE_CHECKING:
45
+ from collections.abc import Callable, Generator, Iterable
46
+ from typing import Any
47
+
48
+ from lfx.custom.custom_component.component import Component
49
+ from lfx.events.event_manager import EventManager
50
+ from lfx.graph.edge.schema import EdgeData
51
+ from lfx.graph.schema import ResultData
52
+ from lfx.schema.schema import InputValueRequest
53
+ from lfx.services.chat.schema import GetCache, SetCache
54
+ from lfx.services.tracing.service import TracingService
55
+
56
+
57
+ class Graph:
58
+ """A class representing a graph of vertices and edges."""
59
+
60
+ def __init__(
61
+ self,
62
+ start: Component | None = None,
63
+ end: Component | None = None,
64
+ flow_id: str | None = None,
65
+ flow_name: str | None = None,
66
+ description: str | None = None,
67
+ user_id: str | None = None,
68
+ log_config: LogConfig | None = None,
69
+ context: dict[str, Any] | None = None,
70
+ ) -> None:
71
+ """Initializes a new Graph instance.
72
+
73
+ If both start and end components are provided, the graph is initialized and prepared for execution.
74
+ If only one is provided, a ValueError is raised. The context must be a dictionary if specified,
75
+ otherwise a TypeError is raised. Internal data structures for vertices, edges, state management,
76
+ run management, and tracing are set up during initialization.
77
+ """
78
+ if log_config:
79
+ configure(**log_config)
80
+
81
+ self._start = start
82
+ self._state_model = None
83
+ self._end = end
84
+ self._prepared = False
85
+ self._runs = 0
86
+ self._updates = 0
87
+ self.flow_id = flow_id
88
+ self.flow_name = flow_name
89
+ self.description = description
90
+ self.user_id = user_id
91
+ self._is_input_vertices: list[str] = []
92
+ self._is_output_vertices: list[str] = []
93
+ self._is_state_vertices: list[str] | None = None
94
+ self.has_session_id_vertices: list[str] = []
95
+ self._sorted_vertices_layers: list[list[str]] = []
96
+ self._run_id = ""
97
+ self._session_id = ""
98
+ self._start_time = datetime.now(timezone.utc)
99
+ self.inactivated_vertices: set = set()
100
+ self.activated_vertices: list[str] = []
101
+ self.vertices_layers: list[list[str]] = []
102
+ self.vertices_to_run: set[str] = set()
103
+ self.stop_vertex: str | None = None
104
+ self.inactive_vertices: set = set()
105
+ # Conditional routing system (separate from ACTIVE/INACTIVE cycle management)
106
+ self.conditionally_excluded_vertices: set = set() # Vertices excluded by conditional routing
107
+ self.conditional_exclusion_sources: dict[str, set[str]] = {} # Maps source vertex -> excluded vertices
108
+ self.edges: list[CycleEdge] = []
109
+ self.vertices: list[Vertex] = []
110
+ self.run_manager = RunnableVerticesManager()
111
+ self._vertices: list[NodeData] = []
112
+ self._edges: list[EdgeData] = []
113
+
114
+ self.top_level_vertices: list[str] = []
115
+ self.vertex_map: dict[str, Vertex] = {}
116
+ self.predecessor_map: dict[str, list[str]] = defaultdict(list)
117
+ self.successor_map: dict[str, list[str]] = defaultdict(list)
118
+ self.in_degree_map: dict[str, int] = defaultdict(int)
119
+ self.parent_child_map: dict[str, list[str]] = defaultdict(list)
120
+ self._run_queue: deque[str] = deque()
121
+ self._first_layer: list[str] = []
122
+ self._lock: asyncio.Lock | None = None
123
+ self.raw_graph_data: GraphData = {"nodes": [], "edges": []}
124
+ self._is_cyclic: bool | None = None
125
+ self._cycles: list[tuple[str, str]] | None = None
126
+ self._cycle_vertices: set[str] | None = None
127
+ self._call_order: list[str] = []
128
+ self._snapshots: list[dict[str, Any]] = []
129
+ self._end_trace_tasks: set[asyncio.Task] = set()
130
+
131
+ if context and not isinstance(context, dict):
132
+ msg = "Context must be a dictionary"
133
+ raise TypeError(msg)
134
+ self._context = dotdict(context or {})
135
+ # Lazy initialization - only get tracing service when needed
136
+ self._tracing_service: TracingService | None = None
137
+ self._tracing_service_initialized = False
138
+ if start is not None and end is not None:
139
+ self._set_start_and_end(start, end)
140
+ self.prepare(start_component_id=start.get_id())
141
+ if (start is not None and end is None) or (start is None and end is not None):
142
+ msg = "You must provide both input and output components"
143
+ raise ValueError(msg)
144
+
145
+ @property
146
+ def lock(self):
147
+ """Lazy initialization of asyncio.Lock to avoid event loop binding issues."""
148
+ if self._lock is None:
149
+ self._lock = asyncio.Lock()
150
+ return self._lock
151
+
152
+ @property
153
+ def context(self) -> dotdict:
154
+ if isinstance(self._context, dotdict):
155
+ return self._context
156
+ return dotdict(self._context)
157
+
158
+ @context.setter
159
+ def context(self, value: dict[str, Any]):
160
+ if not isinstance(value, dict):
161
+ msg = "Context must be a dictionary"
162
+ raise TypeError(msg)
163
+ if isinstance(value, dict):
164
+ value = dotdict(value)
165
+ self._context = value
166
+
167
+ @property
168
+ def session_id(self):
169
+ return self._session_id
170
+
171
+ @session_id.setter
172
+ def session_id(self, value: str):
173
+ self._session_id = value
174
+
175
+ @property
176
+ def state_model(self):
177
+ if not self._state_model:
178
+ self._state_model = create_state_model_from_graph(self)
179
+ return self._state_model
180
+
181
+ def __add__(self, other):
182
+ if not isinstance(other, Graph):
183
+ msg = "Can only add Graph objects"
184
+ raise TypeError(msg)
185
+ # Add the vertices and edges from the other graph to this graph
186
+ new_instance = copy.deepcopy(self)
187
+ for vertex in other.vertices:
188
+ # This updates the edges as well
189
+ new_instance.add_vertex(vertex)
190
+ new_instance.build_graph_maps(new_instance.edges)
191
+ new_instance.define_vertices_lists()
192
+ return new_instance
193
+
194
+ def __iadd__(self, other):
195
+ if not isinstance(other, Graph):
196
+ msg = "Can only add Graph objects"
197
+ raise TypeError(msg)
198
+ # Add the vertices and edges from the other graph to this graph
199
+ for vertex in other.vertices:
200
+ # This updates the edges as well
201
+ self.add_vertex(vertex)
202
+ self.build_graph_maps(self.edges)
203
+ self.define_vertices_lists()
204
+ return self
205
+
206
+ @property
207
+ def tracing_service(self) -> TracingService | None:
208
+ """Lazily initialize tracing service only when accessed."""
209
+ if not self._tracing_service_initialized:
210
+ try:
211
+ self._tracing_service = get_tracing_service()
212
+ except Exception: # noqa: BLE001
213
+ logger.exception("Error getting tracing service")
214
+ self._tracing_service = None
215
+ self._tracing_service_initialized = True
216
+ return self._tracing_service
217
+
218
+ def dumps(
219
+ self,
220
+ name: str | None = None,
221
+ description: str | None = None,
222
+ endpoint_name: str | None = None,
223
+ ) -> str:
224
+ graph_dict = self.dump(name, description, endpoint_name)
225
+ return json.dumps(graph_dict, indent=4, sort_keys=True)
226
+
227
+ def dump(
228
+ self, name: str | None = None, description: str | None = None, endpoint_name: str | None = None
229
+ ) -> GraphDump:
230
+ if self.raw_graph_data != {"nodes": [], "edges": []}:
231
+ data_dict = self.raw_graph_data
232
+ else:
233
+ # we need to convert the vertices and edges to json
234
+ nodes = [node.to_data() for node in self.vertices]
235
+ edges = [edge.to_data() for edge in self.edges]
236
+ self.raw_graph_data = {"nodes": nodes, "edges": edges}
237
+ data_dict = self.raw_graph_data
238
+ graph_dict: GraphDump = {
239
+ "data": data_dict,
240
+ "is_component": len(data_dict.get("nodes", [])) == 1 and data_dict["edges"] == [],
241
+ }
242
+ if name:
243
+ graph_dict["name"] = name
244
+ elif name is None and self.flow_name:
245
+ graph_dict["name"] = self.flow_name
246
+ if description:
247
+ graph_dict["description"] = description
248
+ elif description is None and self.description:
249
+ graph_dict["description"] = self.description
250
+ graph_dict["endpoint_name"] = str(endpoint_name)
251
+ return graph_dict
252
+
253
+ def add_nodes_and_edges(self, nodes: list[NodeData], edges: list[EdgeData]) -> None:
254
+ self._vertices = nodes
255
+ self._edges = edges
256
+ self.raw_graph_data = {"nodes": nodes, "edges": edges}
257
+ self.top_level_vertices = []
258
+ for vertex in self._vertices:
259
+ if vertex_id := vertex.get("id"):
260
+ self.top_level_vertices.append(vertex_id)
261
+ if vertex_id in self.cycle_vertices:
262
+ self.run_manager.add_to_cycle_vertices(vertex_id)
263
+ self._graph_data = process_flow(self.raw_graph_data)
264
+
265
+ self._vertices = self._graph_data["nodes"]
266
+ self._edges = self._graph_data["edges"]
267
+ self.initialize()
268
+
269
+ def add_component(self, component: Component, component_id: str | None = None) -> str:
270
+ component_id = component_id or component.get_id()
271
+ if component_id in self.vertex_map:
272
+ return component_id
273
+ component.set_id(component_id)
274
+ if component_id in self.vertex_map:
275
+ msg = f"Component ID {component_id} already exists"
276
+ raise ValueError(msg)
277
+ frontend_node = component.to_frontend_node()
278
+ self._vertices.append(frontend_node)
279
+ vertex = self._create_vertex(frontend_node)
280
+ vertex.add_component_instance(component)
281
+ self._add_vertex(vertex)
282
+ if component.get_edges():
283
+ for edge in component.get_edges():
284
+ self._add_edge(edge)
285
+
286
+ if component.get_components():
287
+ for _component in component.get_components():
288
+ self.add_component(_component)
289
+
290
+ return component_id
291
+
292
+ def _set_start_and_end(self, start: Component, end: Component) -> None:
293
+ if not hasattr(start, "to_frontend_node"):
294
+ msg = f"start must be a Component. Got {type(start)}"
295
+ raise TypeError(msg)
296
+ if not hasattr(end, "to_frontend_node"):
297
+ msg = f"end must be a Component. Got {type(end)}"
298
+ raise TypeError(msg)
299
+ self.add_component(start, start.get_id())
300
+ self.add_component(end, end.get_id())
301
+
302
+ def add_component_edge(self, source_id: str, output_input_tuple: tuple[str, str], target_id: str) -> None:
303
+ source_vertex = self.get_vertex(source_id)
304
+ if not isinstance(source_vertex, ComponentVertex):
305
+ msg = f"Source vertex {source_id} is not a component vertex."
306
+ raise TypeError(msg)
307
+ target_vertex = self.get_vertex(target_id)
308
+ if not isinstance(target_vertex, ComponentVertex):
309
+ msg = f"Target vertex {target_id} is not a component vertex."
310
+ raise TypeError(msg)
311
+ output_name, input_name = output_input_tuple
312
+ if source_vertex.custom_component is None:
313
+ msg = f"Source vertex {source_id} does not have a custom component."
314
+ raise ValueError(msg)
315
+ if target_vertex.custom_component is None:
316
+ msg = f"Target vertex {target_id} does not have a custom component."
317
+ raise ValueError(msg)
318
+
319
+ try:
320
+ input_field = target_vertex.get_input(input_name)
321
+ input_types = input_field.input_types
322
+ input_field_type = str(input_field.field_type)
323
+ except ValueError as e:
324
+ input_field = target_vertex.data.get("node", {}).get("template", {}).get(input_name)
325
+ if not input_field:
326
+ msg = f"Input field {input_name} not found in target vertex {target_id}"
327
+ raise ValueError(msg) from e
328
+ input_types = input_field.get("input_types", [])
329
+ input_field_type = input_field.get("type", "")
330
+
331
+ edge_data: EdgeData = {
332
+ "source": source_id,
333
+ "target": target_id,
334
+ "data": {
335
+ "sourceHandle": {
336
+ "dataType": source_vertex.custom_component.name
337
+ or source_vertex.custom_component.__class__.__name__,
338
+ "id": source_vertex.id,
339
+ "name": output_name,
340
+ "output_types": source_vertex.get_output(output_name).types,
341
+ },
342
+ "targetHandle": {
343
+ "fieldName": input_name,
344
+ "id": target_vertex.id,
345
+ "inputTypes": input_types,
346
+ "type": input_field_type,
347
+ },
348
+ },
349
+ }
350
+ self._add_edge(edge_data)
351
+
352
+ async def async_start(
353
+ self,
354
+ inputs: list[dict] | None = None,
355
+ max_iterations: int | None = None,
356
+ config: StartConfigDict | None = None,
357
+ event_manager: EventManager | None = None,
358
+ *,
359
+ reset_output_values: bool = True,
360
+ ):
361
+ self.prepare()
362
+ if reset_output_values:
363
+ self._reset_all_output_values()
364
+
365
+ # The idea is for this to return a generator that yields the result of
366
+ # each step call and raise StopIteration when the graph is done
367
+ if config is not None:
368
+ self.__apply_config(config)
369
+ # I want to keep a counter of how many tyimes result.vertex.id
370
+ # has been yielded
371
+ yielded_counts: dict[str, int] = defaultdict(int)
372
+
373
+ while should_continue(yielded_counts, max_iterations):
374
+ result = await self.astep(event_manager=event_manager, inputs=inputs)
375
+ yield result
376
+ if isinstance(result, Finish):
377
+ return
378
+ if hasattr(result, "vertex"):
379
+ yielded_counts[result.vertex.id] += 1
380
+
381
+ msg = "Max iterations reached"
382
+ raise ValueError(msg)
383
+
384
+ def _snapshot(self):
385
+ return {
386
+ "_run_queue": self._run_queue.copy(),
387
+ "_first_layer": self._first_layer.copy(),
388
+ "vertices_layers": copy.deepcopy(self.vertices_layers),
389
+ "vertices_to_run": copy.deepcopy(self.vertices_to_run),
390
+ "run_manager": copy.deepcopy(self.run_manager.to_dict()),
391
+ }
392
+
393
+ def __apply_config(self, config: StartConfigDict) -> None:
394
+ for vertex in self.vertices:
395
+ if vertex.custom_component is None:
396
+ continue
397
+ for output in vertex.custom_component.get_outputs_map().values():
398
+ for key, value in config["output"].items():
399
+ setattr(output, key, value)
400
+
401
+ def _reset_all_output_values(self) -> None:
402
+ for vertex in self.vertices:
403
+ if vertex.custom_component is None:
404
+ continue
405
+ vertex.custom_component.reset_all_output_values()
406
+
407
+ def start(
408
+ self,
409
+ inputs: list[dict] | None = None,
410
+ max_iterations: int | None = None,
411
+ config: StartConfigDict | None = None,
412
+ event_manager: EventManager | None = None,
413
+ ) -> Generator:
414
+ """Starts the graph execution synchronously by creating a new event loop in a separate thread.
415
+
416
+ Args:
417
+ inputs: Optional list of input dictionaries
418
+ max_iterations: Optional maximum number of iterations
419
+ config: Optional configuration dictionary
420
+ event_manager: Optional event manager
421
+
422
+ Returns:
423
+ Generator yielding results from graph execution
424
+ """
425
+ if self.is_cyclic and max_iterations is None:
426
+ msg = "You must specify a max_iterations if the graph is cyclic"
427
+ raise ValueError(msg)
428
+
429
+ if config is not None:
430
+ self.__apply_config(config)
431
+
432
+ # Create a queue for passing results and errors between threads
433
+ result_queue: queue.Queue[VertexBuildResult | Exception | None] = queue.Queue()
434
+
435
+ # Function to run async code in separate thread
436
+ def run_async_code():
437
+ # Create new event loop for this thread
438
+ loop = asyncio.new_event_loop()
439
+ asyncio.set_event_loop(loop)
440
+
441
+ try:
442
+ # Run the async generator
443
+ async_gen = self.async_start(inputs, max_iterations, event_manager)
444
+
445
+ while True:
446
+ try:
447
+ # Get next result from async generator
448
+ result = loop.run_until_complete(anext(async_gen))
449
+ result_queue.put(result)
450
+
451
+ if isinstance(result, Finish):
452
+ break
453
+
454
+ except StopAsyncIteration:
455
+ break
456
+ except ValueError as e:
457
+ # Put the exception in the queue
458
+ result_queue.put(e)
459
+ break
460
+
461
+ finally:
462
+ # Ensure all pending tasks are completed
463
+ pending = asyncio.all_tasks(loop)
464
+ if pending:
465
+ # Create a future to gather all pending tasks
466
+ cleanup_future = asyncio.gather(*pending, return_exceptions=True)
467
+ loop.run_until_complete(cleanup_future)
468
+
469
+ # Close the loop
470
+ loop.close()
471
+ # Signal completion
472
+ result_queue.put(None)
473
+
474
+ # Start thread for async execution
475
+ thread = threading.Thread(target=run_async_code)
476
+ thread.start()
477
+
478
+ # Yield results from queue
479
+ while True:
480
+ result = result_queue.get()
481
+ if result is None:
482
+ break
483
+ if isinstance(result, Exception):
484
+ raise result
485
+ yield result
486
+
487
+ # Wait for thread to complete
488
+ thread.join()
489
+
490
+ def _add_edge(self, edge: EdgeData) -> None:
491
+ self.add_edge(edge)
492
+ source_id = edge["data"]["sourceHandle"]["id"]
493
+ target_id = edge["data"]["targetHandle"]["id"]
494
+ self.predecessor_map[target_id].append(source_id)
495
+ self.successor_map[source_id].append(target_id)
496
+ self.in_degree_map[target_id] += 1
497
+ self.parent_child_map[source_id].append(target_id)
498
+
499
+ def add_node(self, node: NodeData) -> None:
500
+ self._vertices.append(node)
501
+
502
+ def add_edge(self, edge: EdgeData) -> None:
503
+ # Check if the edge already exists
504
+ if edge in self._edges:
505
+ return
506
+ self._edges.append(edge)
507
+
508
+ def initialize(self) -> None:
509
+ self._build_graph()
510
+ self.build_graph_maps(self.edges)
511
+ self.define_vertices_lists()
512
+
513
+ @property
514
+ def is_state_vertices(self) -> list[str]:
515
+ """Returns a cached list of vertex IDs for vertices marked as state vertices.
516
+
517
+ The list is computed on first access by filtering vertices with `is_state` set to True and is
518
+ cached for future calls.
519
+ """
520
+ if self._is_state_vertices is None:
521
+ self._is_state_vertices = [vertex.id for vertex in self.vertices if vertex.is_state]
522
+ return self._is_state_vertices
523
+
524
+ def activate_state_vertices(self, name: str, caller: str) -> None:
525
+ """Activates vertices associated with a given state name.
526
+
527
+ Marks vertices with the specified state name, as well as their successors and related
528
+ predecessors. The state manager is then updated with the new state record.
529
+ """
530
+ vertices_ids = set()
531
+ new_predecessor_map = {}
532
+ activated_vertices = []
533
+ for vertex_id in self.is_state_vertices:
534
+ caller_vertex = self.get_vertex(caller)
535
+ vertex = self.get_vertex(vertex_id)
536
+ if vertex_id == caller or vertex.display_name == caller_vertex.display_name:
537
+ continue
538
+ ctx_key = vertex.raw_params.get("context_key")
539
+ if isinstance(ctx_key, str) and name in ctx_key and vertex_id != caller and isinstance(vertex, StateVertex):
540
+ activated_vertices.append(vertex_id)
541
+ vertices_ids.add(vertex_id)
542
+ successors = self.get_all_successors(vertex, flat=True)
543
+ # Update run_manager.run_predecessors because we are activating vertices
544
+ # The run_prdecessors is the predecessor map of the vertices
545
+ # we remove the vertex_id from the predecessor map whenever we run a vertex
546
+ # So we need to get all edges of the vertex and successors
547
+ # and run self.build_adjacency_maps(edges) to get the new predecessor map
548
+ # that is not complete but we can use to update the run_predecessors
549
+ successors_predecessors = set()
550
+ for sucessor in successors:
551
+ successors_predecessors.update(self.get_all_predecessors(sucessor))
552
+
553
+ edges_set = set()
554
+ for _vertex in [vertex, *successors, *successors_predecessors]:
555
+ edges_set.update(_vertex.edges)
556
+ if _vertex.state == VertexStates.INACTIVE:
557
+ _vertex.set_state("ACTIVE")
558
+
559
+ vertices_ids.add(_vertex.id)
560
+ edges = list(edges_set)
561
+ predecessor_map, _ = self.build_adjacency_maps(edges)
562
+ new_predecessor_map.update(predecessor_map)
563
+
564
+ vertices_ids.update(new_predecessor_map.keys())
565
+ vertices_ids.update(v_id for value_list in new_predecessor_map.values() for v_id in value_list)
566
+
567
+ self.activated_vertices = activated_vertices
568
+ self.vertices_to_run.update(vertices_ids)
569
+ self.run_manager.update_run_state(
570
+ run_predecessors=new_predecessor_map,
571
+ vertices_to_run=self.vertices_to_run,
572
+ )
573
+
574
+ def reset_activated_vertices(self) -> None:
575
+ """Resets the activated vertices in the graph."""
576
+ self.activated_vertices = []
577
+
578
+ def validate_stream(self) -> None:
579
+ """Validates the stream configuration of the graph.
580
+
581
+ If there are two vertices in the same graph (connected by edges)
582
+ that have `stream=True` or `streaming=True`, raises a `ValueError`.
583
+
584
+ Raises:
585
+ ValueError: If two connected vertices have `stream=True` or `streaming=True`.
586
+ """
587
+ for vertex in self.vertices:
588
+ if vertex.params.get("stream") or vertex.params.get("streaming"):
589
+ successors = self.get_all_successors(vertex)
590
+ for successor in successors:
591
+ if successor.params.get("stream") or successor.params.get("streaming"):
592
+ msg = (
593
+ f"Components {vertex.display_name} and {successor.display_name} "
594
+ "are connected and both have stream or streaming set to True"
595
+ )
596
+ raise ValueError(msg)
597
+
598
+ @property
599
+ def first_layer(self):
600
+ if self._first_layer is None:
601
+ msg = "Graph not prepared. Call prepare() first."
602
+ raise ValueError(msg)
603
+ return self._first_layer
604
+
605
+ @property
606
+ def is_cyclic(self):
607
+ """Check if the graph has any cycles.
608
+
609
+ Returns:
610
+ bool: True if the graph has any cycles, False otherwise.
611
+ """
612
+ if self._is_cyclic is None:
613
+ self._is_cyclic = bool(self.cycle_vertices)
614
+ return self._is_cyclic
615
+
616
+ @property
617
+ def run_id(self):
618
+ """The ID of the current run.
619
+
620
+ Returns:
621
+ str: The run ID.
622
+
623
+ Raises:
624
+ ValueError: If the run ID is not set.
625
+ """
626
+ if not self._run_id:
627
+ msg = "Run ID not set"
628
+ raise ValueError(msg)
629
+ return self._run_id
630
+
631
+ def set_run_id(self, run_id: uuid.UUID | str | None = None) -> None:
632
+ """Sets the ID of the current run.
633
+
634
+ Args:
635
+ run_id (str): The run ID.
636
+ """
637
+ if run_id is None:
638
+ run_id = uuid.uuid4()
639
+
640
+ self._run_id = str(run_id)
641
+
642
+ async def initialize_run(self) -> None:
643
+ if not self._run_id:
644
+ self.set_run_id()
645
+ if self.tracing_service:
646
+ run_name = f"{self.flow_name} - {self.flow_id}"
647
+ await self.tracing_service.start_tracers(
648
+ run_id=uuid.UUID(self._run_id),
649
+ run_name=run_name,
650
+ user_id=self.user_id,
651
+ session_id=self.session_id,
652
+ )
653
+
654
+ def _end_all_traces_async(self, outputs: dict[str, Any] | None = None, error: Exception | None = None) -> None:
655
+ task = asyncio.create_task(self.end_all_traces(outputs, error))
656
+ self._end_trace_tasks.add(task)
657
+ task.add_done_callback(self._end_trace_tasks.discard)
658
+
659
+ def end_all_traces_in_context(
660
+ self,
661
+ outputs: dict[str, Any] | None = None,
662
+ error: Exception | None = None,
663
+ ) -> Callable:
664
+ # BackgroundTasks run in different context, so we need to copy the context
665
+ context = contextvars.copy_context()
666
+
667
+ async def async_end_traces_func():
668
+ await asyncio.create_task(self.end_all_traces(outputs, error), context=context)
669
+
670
+ return async_end_traces_func
671
+
672
+ async def end_all_traces(self, outputs: dict[str, Any] | None = None, error: Exception | None = None) -> None:
673
+ if not self.tracing_service:
674
+ return
675
+ self._end_time = datetime.now(timezone.utc)
676
+ if outputs is None:
677
+ outputs = {}
678
+ outputs |= self.metadata
679
+ await self.tracing_service.end_tracers(outputs, error)
680
+
681
+ @property
682
+ def sorted_vertices_layers(self) -> list[list[str]]:
683
+ """Returns the sorted layers of vertex IDs by type.
684
+
685
+ Each layer in the returned list contains vertex IDs grouped by their classification,
686
+ such as input, output, session, or state vertices. Sorting is performed if not already done.
687
+ """
688
+ if not self._sorted_vertices_layers:
689
+ self.sort_vertices()
690
+ return self._sorted_vertices_layers
691
+
692
+ def define_vertices_lists(self) -> None:
693
+ """Populates internal lists of input, output, session ID, and state vertex IDs.
694
+
695
+ Iterates over all vertices and appends their IDs to the corresponding internal lists
696
+ based on their classification.
697
+ """
698
+ for vertex in self.vertices:
699
+ if vertex.is_input:
700
+ self._is_input_vertices.append(vertex.id)
701
+ if vertex.is_output:
702
+ self._is_output_vertices.append(vertex.id)
703
+ if vertex.has_session_id:
704
+ self.has_session_id_vertices.append(vertex.id)
705
+ if vertex.is_state:
706
+ if self._is_state_vertices is None:
707
+ self._is_state_vertices = []
708
+ self._is_state_vertices.append(vertex.id)
709
+
710
+ def _set_inputs(self, input_components: list[str], inputs: dict[str, str], input_type: InputType | None) -> None:
711
+ """Updates input vertices' parameters with the provided inputs, filtering by component list and input type.
712
+
713
+ Only vertices whose IDs or display names match the specified input components and whose IDs contain
714
+ the input type (unless input type is 'any' or None) are updated. Raises a ValueError if a specified
715
+ vertex is not found.
716
+ """
717
+ for vertex_id in self._is_input_vertices:
718
+ vertex = self.get_vertex(vertex_id)
719
+ # If the vertex is not in the input_components list
720
+ if input_components and (vertex_id not in input_components and vertex.display_name not in input_components):
721
+ continue
722
+ # If the input_type is not any and the input_type is not in the vertex id
723
+ # Example: input_type = "chat" and vertex.id = "OpenAI-19ddn"
724
+ if input_type is not None and input_type != "any" and input_type not in vertex.id.lower():
725
+ continue
726
+ if vertex is None:
727
+ msg = f"Vertex {vertex_id} not found"
728
+ raise ValueError(msg)
729
+ vertex.update_raw_params(inputs, overwrite=True)
730
+
731
+ async def _run(
732
+ self,
733
+ *,
734
+ inputs: dict[str, str],
735
+ input_components: list[str],
736
+ input_type: InputType | None,
737
+ outputs: list[str],
738
+ stream: bool,
739
+ session_id: str,
740
+ fallback_to_env_vars: bool,
741
+ event_manager: EventManager | None = None,
742
+ ) -> list[ResultData | None]:
743
+ """Runs the graph with the given inputs.
744
+
745
+ Args:
746
+ inputs (Dict[str, str]): The input values for the graph.
747
+ input_components (list[str]): The components to run for the inputs.
748
+ input_type: (Optional[InputType]): The input type.
749
+ outputs (list[str]): The outputs to retrieve from the graph.
750
+ stream (bool): Whether to stream the results or not.
751
+ session_id (str): The session ID for the graph.
752
+ fallback_to_env_vars (bool): Whether to fallback to environment variables.
753
+ event_manager (EventManager | None): The event manager for the graph.
754
+
755
+ Returns:
756
+ List[Optional["ResultData"]]: The outputs of the graph.
757
+ """
758
+ if input_components and not isinstance(input_components, list):
759
+ msg = f"Invalid components value: {input_components}. Expected list"
760
+ raise ValueError(msg)
761
+ if input_components is None:
762
+ input_components = []
763
+
764
+ if not isinstance(inputs.get(INPUT_FIELD_NAME, ""), str):
765
+ msg = f"Invalid input value: {inputs.get(INPUT_FIELD_NAME)}. Expected string"
766
+ raise TypeError(msg)
767
+ if inputs:
768
+ self._set_inputs(input_components, inputs, input_type)
769
+ # Update all the vertices with the session_id
770
+ for vertex_id in self.has_session_id_vertices:
771
+ vertex = self.get_vertex(vertex_id)
772
+ if vertex is None:
773
+ msg = f"Vertex {vertex_id} not found"
774
+ raise ValueError(msg)
775
+ vertex.update_raw_params({"session_id": session_id})
776
+ # Process the graph
777
+ try:
778
+ cache_service = get_chat_service()
779
+ if cache_service and self.flow_id:
780
+ await cache_service.set_cache(self.flow_id, self)
781
+ except Exception: # noqa: BLE001
782
+ logger.exception("Error setting cache")
783
+
784
+ try:
785
+ # Prioritize the webhook component if it exists
786
+ start_component_id = find_start_component_id(self._is_input_vertices)
787
+ await self.process(
788
+ start_component_id=start_component_id,
789
+ fallback_to_env_vars=fallback_to_env_vars,
790
+ event_manager=event_manager,
791
+ )
792
+ self.increment_run_count()
793
+ except Exception as exc:
794
+ self._end_all_traces_async(error=exc)
795
+ msg = f"Error running graph: {exc}"
796
+ raise ValueError(msg) from exc
797
+
798
+ self._end_all_traces_async()
799
+ # Get the outputs
800
+ vertex_outputs = []
801
+ for vertex in self.vertices:
802
+ if not vertex.built:
803
+ continue
804
+ if vertex is None:
805
+ msg = f"Vertex {vertex_id} not found"
806
+ raise ValueError(msg)
807
+
808
+ if not vertex.result and not stream and hasattr(vertex, "consume_async_generator"):
809
+ await vertex.consume_async_generator()
810
+ if (not outputs and vertex.is_output) or (vertex.display_name in outputs or vertex.id in outputs):
811
+ vertex_outputs.append(vertex.result)
812
+
813
+ return vertex_outputs
814
+
815
+ async def arun(
816
+ self,
817
+ inputs: list[dict[str, str]],
818
+ *,
819
+ inputs_components: list[list[str]] | None = None,
820
+ types: list[InputType | None] | None = None,
821
+ outputs: list[str] | None = None,
822
+ session_id: str | None = None,
823
+ stream: bool = False,
824
+ fallback_to_env_vars: bool = False,
825
+ event_manager: EventManager | None = None,
826
+ ) -> list[RunOutputs]:
827
+ """Runs the graph with the given inputs.
828
+
829
+ Args:
830
+ inputs (list[Dict[str, str]]): The input values for the graph.
831
+ inputs_components (Optional[list[list[str]]], optional): Components to run for the inputs. Defaults to None.
832
+ types (Optional[list[Optional[InputType]]], optional): The types of the inputs. Defaults to None.
833
+ outputs (Optional[list[str]], optional): The outputs to retrieve from the graph. Defaults to None.
834
+ session_id (Optional[str], optional): The session ID for the graph. Defaults to None.
835
+ stream (bool, optional): Whether to stream the results or not. Defaults to False.
836
+ fallback_to_env_vars (bool, optional): Whether to fallback to environment variables. Defaults to False.
837
+ event_manager (EventManager | None): The event manager for the graph.
838
+
839
+ Returns:
840
+ List[RunOutputs]: The outputs of the graph.
841
+ """
842
+ # inputs is {"message": "Hello, world!"}
843
+ # we need to go through self.inputs and update the self.raw_params
844
+ # of the vertices that are inputs
845
+ # if the value is a list, we need to run multiple times
846
+ vertex_outputs = []
847
+ if not isinstance(inputs, list):
848
+ inputs = [inputs]
849
+ elif not inputs:
850
+ inputs = [{}]
851
+ # Length of all should be the as inputs length
852
+ # just add empty lists to complete the length
853
+ if inputs_components is None:
854
+ inputs_components = []
855
+ for _ in range(len(inputs) - len(inputs_components)):
856
+ inputs_components.append([])
857
+ if types is None:
858
+ types = []
859
+ if session_id:
860
+ self.session_id = session_id
861
+ for _ in range(len(inputs) - len(types)):
862
+ types.append("chat") # default to chat
863
+ for run_inputs, components, input_type in zip(inputs, inputs_components, types, strict=True):
864
+ run_outputs = await self._run(
865
+ inputs=run_inputs,
866
+ input_components=components,
867
+ input_type=input_type,
868
+ outputs=outputs or [],
869
+ stream=stream,
870
+ session_id=session_id or "",
871
+ fallback_to_env_vars=fallback_to_env_vars,
872
+ event_manager=event_manager,
873
+ )
874
+ run_output_object = RunOutputs(inputs=run_inputs, outputs=run_outputs)
875
+ await logger.adebug(f"Run outputs: {run_output_object}")
876
+ vertex_outputs.append(run_output_object)
877
+ return vertex_outputs
878
+
879
+ def next_vertex_to_build(self):
880
+ """Returns the next vertex to be built.
881
+
882
+ Yields:
883
+ str: The ID of the next vertex to be built.
884
+ """
885
+ yield from chain.from_iterable(self.vertices_layers)
886
+
887
+ @property
888
+ def metadata(self):
889
+ """The metadata of the graph.
890
+
891
+ Returns:
892
+ dict: The metadata of the graph.
893
+ """
894
+ time_format = "%Y-%m-%d %H:%M:%S %Z"
895
+ return {
896
+ "start_time": self._start_time.strftime(time_format),
897
+ "end_time": self._end_time.strftime(time_format),
898
+ "time_elapsed": f"{(self._end_time - self._start_time).total_seconds()} seconds",
899
+ "flow_id": self.flow_id,
900
+ "flow_name": self.flow_name,
901
+ }
902
+
903
+ def build_graph_maps(self, edges: list[CycleEdge] | None = None, vertices: list[Vertex] | None = None) -> None:
904
+ """Builds the adjacency maps for the graph."""
905
+ if edges is None:
906
+ edges = self.edges
907
+
908
+ if vertices is None:
909
+ vertices = self.vertices
910
+
911
+ self.predecessor_map, self.successor_map = self.build_adjacency_maps(edges)
912
+
913
+ self.in_degree_map = self.build_in_degree(edges)
914
+ self.parent_child_map = self.build_parent_child_map(vertices)
915
+
916
+ def reset_inactivated_vertices(self) -> None:
917
+ """Resets the inactivated vertices in the graph."""
918
+ for vertex_id in self.inactivated_vertices.copy():
919
+ self.mark_vertex(vertex_id, "ACTIVE")
920
+ self.inactivated_vertices = set()
921
+ self.inactivated_vertices = set()
922
+
923
+ def mark_all_vertices(self, state: str) -> None:
924
+ """Marks all vertices in the graph."""
925
+ for vertex in self.vertices:
926
+ vertex.set_state(state)
927
+
928
+ def mark_vertex(self, vertex_id: str, state: str) -> None:
929
+ """Marks a vertex in the graph."""
930
+ vertex = self.get_vertex(vertex_id)
931
+ vertex.set_state(state)
932
+ if state == VertexStates.INACTIVE:
933
+ self.run_manager.remove_from_predecessors(vertex_id)
934
+
935
+ def _mark_branch(
936
+ self, vertex_id: str, state: str, visited: set | None = None, output_name: str | None = None
937
+ ) -> set:
938
+ """Marks a branch of the graph."""
939
+ if visited is None:
940
+ visited = set()
941
+ else:
942
+ self.mark_vertex(vertex_id, state)
943
+ if vertex_id in visited:
944
+ return visited
945
+ visited.add(vertex_id)
946
+
947
+ for child_id in self.parent_child_map[vertex_id]:
948
+ # Only child_id that have an edge with the vertex_id through the output_name
949
+ # should be marked
950
+ if output_name:
951
+ edge = self.get_edge(vertex_id, child_id)
952
+ if edge and edge.source_handle.name != output_name:
953
+ continue
954
+ self._mark_branch(child_id, state, visited)
955
+ return visited
956
+
957
+ def mark_branch(self, vertex_id: str, state: str, output_name: str | None = None) -> None:
958
+ visited = self._mark_branch(vertex_id=vertex_id, state=state, output_name=output_name)
959
+ new_predecessor_map, _ = self.build_adjacency_maps(self.edges)
960
+ new_predecessor_map = {k: v for k, v in new_predecessor_map.items() if k in visited}
961
+ if vertex_id in self.cycle_vertices:
962
+ # Remove dependencies that are not in the cycle and have run at least once
963
+ new_predecessor_map = {
964
+ k: [dep for dep in v if dep in self.cycle_vertices and dep in self.run_manager.ran_at_least_once]
965
+ for k, v in new_predecessor_map.items()
966
+ }
967
+ self.run_manager.update_run_state(
968
+ run_predecessors=new_predecessor_map,
969
+ vertices_to_run=self.vertices_to_run,
970
+ )
971
+
972
+ def exclude_branch_conditionally(self, vertex_id: str, output_name: str | None = None) -> None:
973
+ """Marks a branch as conditionally excluded (for conditional routing).
974
+
975
+ This system is separate from the ACTIVE/INACTIVE state used for cycle management:
976
+ - ACTIVE/INACTIVE: Reset after each cycle iteration to allow cycles to continue
977
+ - Conditional exclusion: Persists until explicitly cleared by the same source vertex
978
+
979
+ Used by ConditionalRouter to ensure only one branch executes per condition evaluation.
980
+ If this vertex has previously excluded branches, they are cleared first to allow
981
+ re-evaluation on subsequent iterations (e.g., in cycles where condition may change).
982
+
983
+ Args:
984
+ vertex_id: The source vertex making the exclusion decision
985
+ output_name: The output name to follow when excluding downstream vertices
986
+ """
987
+ # Clear any previous exclusions from this source vertex
988
+ if vertex_id in self.conditional_exclusion_sources:
989
+ previous_exclusions = self.conditional_exclusion_sources[vertex_id]
990
+ self.conditionally_excluded_vertices -= previous_exclusions
991
+ del self.conditional_exclusion_sources[vertex_id]
992
+
993
+ # Now exclude the new branch
994
+ visited: set[str] = set()
995
+ excluded: set[str] = set()
996
+ self._exclude_branch_conditionally(vertex_id, visited, excluded, output_name, skip_first=True)
997
+
998
+ # Track which vertices this source excluded
999
+ if excluded:
1000
+ self.conditional_exclusion_sources[vertex_id] = excluded
1001
+
1002
+ def _exclude_branch_conditionally(
1003
+ self, vertex_id: str, visited: set, excluded: set, output_name: str | None = None, *, skip_first: bool = False
1004
+ ) -> None:
1005
+ """Recursively excludes vertices in a branch for conditional routing."""
1006
+ if vertex_id in visited:
1007
+ return
1008
+ visited.add(vertex_id)
1009
+
1010
+ # Don't exclude the first vertex (the router itself)
1011
+ if not skip_first:
1012
+ self.conditionally_excluded_vertices.add(vertex_id)
1013
+ excluded.add(vertex_id)
1014
+
1015
+ for child_id in self.parent_child_map[vertex_id]:
1016
+ # If we're at the router (skip_first=True) and have an output_name,
1017
+ # only follow edges from that specific output
1018
+ if skip_first and output_name:
1019
+ edge = self.get_edge(vertex_id, child_id)
1020
+ if edge and edge.source_handle.name != output_name:
1021
+ continue
1022
+ # After the first level, exclude all descendants
1023
+ self._exclude_branch_conditionally(child_id, visited, excluded, output_name=None, skip_first=False)
1024
+
1025
+ def get_edge(self, source_id: str, target_id: str) -> CycleEdge | None:
1026
+ """Returns the edge between two vertices."""
1027
+ for edge in self.edges:
1028
+ if edge.source_id == source_id and edge.target_id == target_id:
1029
+ return edge
1030
+ return None
1031
+
1032
+ def build_parent_child_map(self, vertices: list[Vertex]):
1033
+ parent_child_map = defaultdict(list)
1034
+ for vertex in vertices:
1035
+ parent_child_map[vertex.id] = [child.id for child in self.get_successors(vertex)]
1036
+ return parent_child_map
1037
+
1038
+ def increment_run_count(self) -> None:
1039
+ self._runs += 1
1040
+
1041
+ def increment_update_count(self) -> None:
1042
+ self._updates += 1
1043
+
1044
+ def __getstate__(self):
1045
+ # Get all attributes that are useful in runs.
1046
+ # We don't need to save the state_manager because it is
1047
+ # a singleton and it is not necessary to save it
1048
+ return {
1049
+ "vertices": self.vertices,
1050
+ "edges": self.edges,
1051
+ "flow_id": self.flow_id,
1052
+ "flow_name": self.flow_name,
1053
+ "description": self.description,
1054
+ "user_id": self.user_id,
1055
+ "raw_graph_data": self.raw_graph_data,
1056
+ "top_level_vertices": self.top_level_vertices,
1057
+ "inactivated_vertices": self.inactivated_vertices,
1058
+ "run_manager": self.run_manager.to_dict(),
1059
+ "_run_id": self._run_id,
1060
+ "in_degree_map": self.in_degree_map,
1061
+ "parent_child_map": self.parent_child_map,
1062
+ "predecessor_map": self.predecessor_map,
1063
+ "successor_map": self.successor_map,
1064
+ "activated_vertices": self.activated_vertices,
1065
+ "vertices_layers": self.vertices_layers,
1066
+ "vertices_to_run": self.vertices_to_run,
1067
+ "stop_vertex": self.stop_vertex,
1068
+ "_run_queue": self._run_queue,
1069
+ "_first_layer": self._first_layer,
1070
+ "_vertices": self._vertices,
1071
+ "_edges": self._edges,
1072
+ "_is_input_vertices": self._is_input_vertices,
1073
+ "_is_output_vertices": self._is_output_vertices,
1074
+ "has_session_id_vertices": self.has_session_id_vertices,
1075
+ "_sorted_vertices_layers": self._sorted_vertices_layers,
1076
+ }
1077
+
1078
+ def __deepcopy__(self, memo):
1079
+ # Check if we've already copied this instance
1080
+ if id(self) in memo:
1081
+ return memo[id(self)]
1082
+
1083
+ if self._start is not None and self._end is not None:
1084
+ # Deep copy start and end components
1085
+ start_copy = copy.deepcopy(self._start, memo)
1086
+ end_copy = copy.deepcopy(self._end, memo)
1087
+ new_graph = type(self)(
1088
+ start_copy,
1089
+ end_copy,
1090
+ copy.deepcopy(self.flow_id, memo),
1091
+ copy.deepcopy(self.flow_name, memo),
1092
+ copy.deepcopy(self.user_id, memo),
1093
+ )
1094
+ else:
1095
+ # Create a new graph without start and end, but copy flow_id, flow_name, and user_id
1096
+ new_graph = type(self)(
1097
+ None,
1098
+ None,
1099
+ copy.deepcopy(self.flow_id, memo),
1100
+ copy.deepcopy(self.flow_name, memo),
1101
+ copy.deepcopy(self.user_id, memo),
1102
+ )
1103
+ # Deep copy vertices and edges
1104
+ new_graph.add_nodes_and_edges(copy.deepcopy(self._vertices, memo), copy.deepcopy(self._edges, memo))
1105
+
1106
+ # Store the newly created object in memo
1107
+ memo[id(self)] = new_graph
1108
+
1109
+ return new_graph
1110
+
1111
+ def __setstate__(self, state):
1112
+ run_manager = state["run_manager"]
1113
+ if isinstance(run_manager, RunnableVerticesManager):
1114
+ state["run_manager"] = run_manager
1115
+ else:
1116
+ state["run_manager"] = RunnableVerticesManager.from_dict(run_manager)
1117
+ self.__dict__.update(state)
1118
+ self.vertex_map = {vertex.id: vertex for vertex in self.vertices}
1119
+ # Tracing service will be lazily initialized via property when needed
1120
+ self.set_run_id(self._run_id)
1121
+
1122
+ @classmethod
1123
+ def from_payload(
1124
+ cls,
1125
+ payload: dict,
1126
+ flow_id: str | None = None,
1127
+ flow_name: str | None = None,
1128
+ user_id: str | None = None,
1129
+ context: dict | None = None,
1130
+ ) -> Graph:
1131
+ """Creates a graph from a payload.
1132
+
1133
+ Args:
1134
+ payload: The payload to create the graph from.
1135
+ flow_id: The ID of the flow.
1136
+ flow_name: The flow name.
1137
+ user_id: The user ID.
1138
+ context: Optional context dictionary for request-specific data.
1139
+
1140
+ Returns:
1141
+ Graph: The created graph.
1142
+ """
1143
+ if "data" in payload:
1144
+ payload = payload["data"]
1145
+ try:
1146
+ vertices = payload["nodes"]
1147
+ edges = payload["edges"]
1148
+ graph = cls(flow_id=flow_id, flow_name=flow_name, user_id=user_id, context=context)
1149
+ graph.add_nodes_and_edges(vertices, edges)
1150
+ except KeyError as exc:
1151
+ logger.exception(exc)
1152
+ if "nodes" not in payload and "edges" not in payload:
1153
+ msg = f"Invalid payload. Expected keys 'nodes' and 'edges'. Found {list(payload.keys())}"
1154
+ raise ValueError(msg) from exc
1155
+
1156
+ msg = f"Error while creating graph from payload: {exc}"
1157
+ raise ValueError(msg) from exc
1158
+ else:
1159
+ return graph
1160
+
1161
+ def __eq__(self, /, other: object) -> bool:
1162
+ if not isinstance(other, Graph):
1163
+ return False
1164
+ return self.__repr__() == other.__repr__()
1165
+
1166
+ # update this graph with another graph by comparing the __repr__ of each vertex
1167
+ # and if the __repr__ of a vertex is not the same as the other
1168
+ # then update the .data of the vertex to the self
1169
+ # both graphs have the same vertices and edges
1170
+ # but the data of the vertices might be different
1171
+
1172
+ def update_edges_from_vertex(self, other_vertex: Vertex) -> None:
1173
+ """Updates the edges of a vertex in the Graph."""
1174
+ new_edges = []
1175
+ for edge in self.edges:
1176
+ if other_vertex.id in {edge.source_id, edge.target_id}:
1177
+ continue
1178
+ new_edges.append(edge)
1179
+ new_edges += other_vertex.edges
1180
+ self.edges = new_edges
1181
+
1182
+ def vertex_data_is_identical(self, vertex: Vertex, other_vertex: Vertex) -> bool:
1183
+ data_is_equivalent = vertex == other_vertex
1184
+ if not data_is_equivalent:
1185
+ return False
1186
+ return self.vertex_edges_are_identical(vertex, other_vertex)
1187
+
1188
+ @staticmethod
1189
+ def vertex_edges_are_identical(vertex: Vertex, other_vertex: Vertex) -> bool:
1190
+ same_length = len(vertex.edges) == len(other_vertex.edges)
1191
+ if not same_length:
1192
+ return False
1193
+ return all(edge in other_vertex.edges for edge in vertex.edges)
1194
+
1195
+ def update(self, other: Graph) -> Graph:
1196
+ # Existing vertices in self graph
1197
+ existing_vertex_ids = {vertex.id for vertex in self.vertices}
1198
+ # Vertex IDs in the other graph
1199
+ other_vertex_ids = set(other.vertex_map.keys())
1200
+
1201
+ # Find vertices that are in other but not in self (new vertices)
1202
+ new_vertex_ids = other_vertex_ids - existing_vertex_ids
1203
+
1204
+ # Find vertices that are in self but not in other (removed vertices)
1205
+ removed_vertex_ids = existing_vertex_ids - other_vertex_ids
1206
+
1207
+ # Remove vertices that are not in the other graph
1208
+ for vertex_id in removed_vertex_ids:
1209
+ with contextlib.suppress(ValueError):
1210
+ self.remove_vertex(vertex_id)
1211
+
1212
+ # The order here matters because adding the vertex is required
1213
+ # if any of them have edges that point to any of the new vertices
1214
+ # By adding them first, them adding the edges we ensure that the
1215
+ # edges have valid vertices to point to
1216
+
1217
+ # Add new vertices
1218
+ for vertex_id in new_vertex_ids:
1219
+ new_vertex = other.get_vertex(vertex_id)
1220
+ self._add_vertex(new_vertex)
1221
+
1222
+ # Now update the edges
1223
+ for vertex_id in new_vertex_ids:
1224
+ new_vertex = other.get_vertex(vertex_id)
1225
+ self._update_edges(new_vertex)
1226
+ # Graph is set at the end because the edges come from the graph
1227
+ # and the other graph is where the new edges and vertices come from
1228
+ new_vertex.graph = self
1229
+
1230
+ # Update existing vertices that have changed
1231
+ for vertex_id in existing_vertex_ids.intersection(other_vertex_ids):
1232
+ self_vertex = self.get_vertex(vertex_id)
1233
+ other_vertex = other.get_vertex(vertex_id)
1234
+ # If the vertices are not identical, update the vertex
1235
+ if not self.vertex_data_is_identical(self_vertex, other_vertex):
1236
+ self.update_vertex_from_another(self_vertex, other_vertex)
1237
+
1238
+ self.build_graph_maps()
1239
+ self.define_vertices_lists()
1240
+ self.increment_update_count()
1241
+ return self
1242
+
1243
+ def update_vertex_from_another(self, vertex: Vertex, other_vertex: Vertex) -> None:
1244
+ """Updates a vertex from another vertex.
1245
+
1246
+ Args:
1247
+ vertex (Vertex): The vertex to be updated.
1248
+ other_vertex (Vertex): The vertex to update from.
1249
+ """
1250
+ vertex.full_data = other_vertex.full_data
1251
+ vertex.parse_data()
1252
+ # Now we update the edges of the vertex
1253
+ self.update_edges_from_vertex(other_vertex)
1254
+ vertex.params = {}
1255
+ vertex.build_params()
1256
+ vertex.graph = self
1257
+ # If the vertex is frozen, we don't want
1258
+ # to reset the results nor the built attribute
1259
+ if not vertex.frozen:
1260
+ vertex.built = False
1261
+ vertex.result = None
1262
+ vertex.artifacts = {}
1263
+ vertex.set_top_level(self.top_level_vertices)
1264
+ self.reset_all_edges_of_vertex(vertex)
1265
+
1266
+ def reset_all_edges_of_vertex(self, vertex: Vertex) -> None:
1267
+ """Resets all the edges of a vertex."""
1268
+ for edge in vertex.edges:
1269
+ for vid in [edge.source_id, edge.target_id]:
1270
+ if vid in self.vertex_map:
1271
+ vertex_ = self.vertex_map[vid]
1272
+ if not vertex_.frozen:
1273
+ vertex_.build_params()
1274
+
1275
+ def _add_vertex(self, vertex: Vertex) -> None:
1276
+ """Adds a vertex to the graph."""
1277
+ self.vertices.append(vertex)
1278
+ self.vertex_map[vertex.id] = vertex
1279
+
1280
+ def add_vertex(self, vertex: Vertex) -> None:
1281
+ """Adds a new vertex to the graph."""
1282
+ self._add_vertex(vertex)
1283
+ self._update_edges(vertex)
1284
+
1285
+ def _update_edges(self, vertex: Vertex) -> None:
1286
+ """Updates the edges of a vertex."""
1287
+ # Vertex has edges, so we need to update the edges
1288
+ for edge in vertex.edges:
1289
+ if edge not in self.edges and edge.source_id in self.vertex_map and edge.target_id in self.vertex_map:
1290
+ self.edges.append(edge)
1291
+
1292
+ def _build_graph(self) -> None:
1293
+ """Builds the graph from the vertices and edges."""
1294
+ self.vertices = self._build_vertices()
1295
+ self.vertex_map = {vertex.id: vertex for vertex in self.vertices}
1296
+ self.edges = self._build_edges()
1297
+
1298
+ # This is a hack to make sure that the LLM vertex is sent to
1299
+ # the toolkit vertex
1300
+ self._build_vertex_params()
1301
+ self._instantiate_components_in_vertices()
1302
+ self._set_cache_to_vertices_in_cycle()
1303
+ self._set_cache_if_listen_notify_components()
1304
+ for vertex in self.vertices:
1305
+ if vertex.id in self.cycle_vertices:
1306
+ self.run_manager.add_to_cycle_vertices(vertex.id)
1307
+
1308
+ def _get_edges_as_list_of_tuples(self) -> list[tuple[str, str]]:
1309
+ """Returns the edges of the graph as a list of tuples.
1310
+
1311
+ Each tuple contains the source and target handle IDs from the edge data.
1312
+
1313
+ Returns:
1314
+ list[tuple[str, str]]: List of (source_id, target_id) tuples representing graph edges.
1315
+ """
1316
+ return [(e["data"]["sourceHandle"]["id"], e["data"]["targetHandle"]["id"]) for e in self._edges]
1317
+
1318
+ def _set_cache_if_listen_notify_components(self) -> None:
1319
+ """Disables caching for all vertices if Listen/Notify components are present.
1320
+
1321
+ If the graph contains any Listen or Notify components, caching is disabled for all vertices
1322
+ by setting cache=False on their outputs. This ensures proper handling of real-time
1323
+ communication between components.
1324
+ """
1325
+ has_listen_or_notify_component = any(
1326
+ vertex.id.split("-")[0] in {"Listen", "Notify"} for vertex in self.vertices
1327
+ )
1328
+ if has_listen_or_notify_component:
1329
+ for vertex in self.vertices:
1330
+ vertex.apply_on_outputs(lambda output_object: setattr(output_object, "cache", False))
1331
+
1332
+ def _set_cache_to_vertices_in_cycle(self) -> None:
1333
+ """Sets the cache to the vertices in cycle."""
1334
+ edges = self._get_edges_as_list_of_tuples()
1335
+ cycle_vertices = set(find_cycle_vertices(edges))
1336
+ for vertex in self.vertices:
1337
+ if vertex.id in cycle_vertices:
1338
+ vertex.apply_on_outputs(lambda output_object: setattr(output_object, "cache", False))
1339
+
1340
+ def _instantiate_components_in_vertices(self) -> None:
1341
+ """Instantiates the components in the vertices."""
1342
+ for vertex in self.vertices:
1343
+ vertex.instantiate_component(self.user_id)
1344
+
1345
+ def remove_vertex(self, vertex_id: str) -> None:
1346
+ """Removes a vertex from the graph."""
1347
+ vertex = self.get_vertex(vertex_id)
1348
+ if vertex is None:
1349
+ return
1350
+ self.vertices.remove(vertex)
1351
+ self.vertex_map.pop(vertex_id)
1352
+ self.edges = [edge for edge in self.edges if vertex_id not in {edge.source_id, edge.target_id}]
1353
+
1354
+ def _build_vertex_params(self) -> None:
1355
+ """Identifies and handles the LLM vertex within the graph."""
1356
+ for vertex in self.vertices:
1357
+ vertex.build_params()
1358
+
1359
+ def _validate_vertex(self, vertex: Vertex) -> bool:
1360
+ """Validates a vertex."""
1361
+ # All vertices that do not have edges are invalid
1362
+ return len(self.get_vertex_edges(vertex.id)) > 0
1363
+
1364
+ def get_vertex(self, vertex_id: str) -> Vertex:
1365
+ """Returns a vertex by id."""
1366
+ try:
1367
+ return self.vertex_map[vertex_id]
1368
+ except KeyError as e:
1369
+ msg = f"Vertex {vertex_id} not found"
1370
+ raise ValueError(msg) from e
1371
+
1372
+ def get_root_of_group_node(self, vertex_id: str) -> Vertex:
1373
+ """Returns the root of a group node."""
1374
+ if vertex_id in self.top_level_vertices:
1375
+ # Get all vertices with vertex_id as .parent_node_id
1376
+ # then get the one at the top
1377
+ vertices = [vertex for vertex in self.vertices if vertex.parent_node_id == vertex_id]
1378
+ # Now go through successors of the vertices
1379
+ # and get the one that none of its successors is in vertices
1380
+ for vertex in vertices:
1381
+ successors = self.get_all_successors(vertex, recursive=False)
1382
+ if not any(successor in vertices for successor in successors):
1383
+ return vertex
1384
+ msg = f"Vertex {vertex_id} is not a top level vertex or no root vertex found"
1385
+ raise ValueError(msg)
1386
+
1387
+ def get_next_in_queue(self):
1388
+ if not self._run_queue:
1389
+ return None
1390
+ return self._run_queue.popleft()
1391
+
1392
+ def extend_run_queue(self, vertices: list[str]) -> None:
1393
+ self._run_queue.extend(vertices)
1394
+
1395
+ async def astep(
1396
+ self,
1397
+ inputs: InputValueRequest | None = None,
1398
+ files: list[str] | None = None,
1399
+ user_id: str | None = None,
1400
+ event_manager: EventManager | None = None,
1401
+ ):
1402
+ if not self._prepared:
1403
+ msg = "Graph not prepared. Call prepare() first."
1404
+ raise ValueError(msg)
1405
+ if not self._run_queue:
1406
+ self._end_all_traces_async()
1407
+ return Finish()
1408
+ vertex_id = self.get_next_in_queue()
1409
+ if not vertex_id:
1410
+ msg = "No vertex to run"
1411
+ raise ValueError(msg)
1412
+ chat_service = get_chat_service()
1413
+
1414
+ # Provide fallback cache functions if chat service is unavailable
1415
+ if chat_service is not None:
1416
+ get_cache_func = chat_service.get_cache
1417
+ set_cache_func = chat_service.set_cache
1418
+ else:
1419
+ # Fallback no-op cache functions for tests or when service unavailable
1420
+ async def get_cache_func(*args, **kwargs): # noqa: ARG001
1421
+ return None
1422
+
1423
+ async def set_cache_func(*args, **kwargs) -> bool: # noqa: ARG001
1424
+ return True
1425
+
1426
+ vertex_build_result = await self.build_vertex(
1427
+ vertex_id=vertex_id,
1428
+ user_id=user_id,
1429
+ inputs_dict=inputs.model_dump() if inputs and hasattr(inputs, "model_dump") else {},
1430
+ files=files,
1431
+ get_cache=get_cache_func,
1432
+ set_cache=set_cache_func,
1433
+ event_manager=event_manager,
1434
+ )
1435
+
1436
+ next_runnable_vertices = await self.get_next_runnable_vertices(
1437
+ self.lock, vertex=vertex_build_result.vertex, cache=False
1438
+ )
1439
+ if self.stop_vertex and self.stop_vertex in next_runnable_vertices:
1440
+ next_runnable_vertices = [self.stop_vertex]
1441
+ self.extend_run_queue(next_runnable_vertices)
1442
+ self.reset_inactivated_vertices()
1443
+ self.reset_activated_vertices()
1444
+
1445
+ if chat_service is not None:
1446
+ await chat_service.set_cache(str(self.flow_id or self._run_id), self)
1447
+ self._record_snapshot(vertex_id)
1448
+ return vertex_build_result
1449
+
1450
+ def get_snapshot(self):
1451
+ return copy.deepcopy(
1452
+ {
1453
+ "run_manager": self.run_manager.to_dict(),
1454
+ "run_queue": self._run_queue,
1455
+ "vertices_layers": self.vertices_layers,
1456
+ "first_layer": self.first_layer,
1457
+ "inactive_vertices": self.inactive_vertices,
1458
+ "activated_vertices": self.activated_vertices,
1459
+ }
1460
+ )
1461
+
1462
+ def _record_snapshot(self, vertex_id: str | None = None) -> None:
1463
+ self._snapshots.append(self.get_snapshot())
1464
+ if vertex_id:
1465
+ self._call_order.append(vertex_id)
1466
+
1467
+ def step(
1468
+ self,
1469
+ inputs: InputValueRequest | None = None,
1470
+ files: list[str] | None = None,
1471
+ user_id: str | None = None,
1472
+ ):
1473
+ """Runs the next vertex in the graph.
1474
+
1475
+ Note:
1476
+ This function is a synchronous wrapper around `astep`.
1477
+ It creates an event loop if one does not exist.
1478
+
1479
+ Args:
1480
+ inputs: The inputs for the vertex. Defaults to None.
1481
+ files: The files for the vertex. Defaults to None.
1482
+ user_id: The user ID. Defaults to None.
1483
+ """
1484
+ return run_until_complete(self.astep(inputs, files, user_id))
1485
+
1486
+ async def build_vertex(
1487
+ self,
1488
+ vertex_id: str,
1489
+ *,
1490
+ get_cache: GetCache | None = None,
1491
+ set_cache: SetCache | None = None,
1492
+ inputs_dict: dict[str, str] | None = None,
1493
+ files: list[str] | None = None,
1494
+ user_id: str | None = None,
1495
+ fallback_to_env_vars: bool = False,
1496
+ event_manager: EventManager | None = None,
1497
+ ) -> VertexBuildResult:
1498
+ """Builds a vertex in the graph.
1499
+
1500
+ Args:
1501
+ vertex_id (str): The ID of the vertex to build.
1502
+ get_cache (GetCache): A coroutine to get the cache.
1503
+ set_cache (SetCache): A coroutine to set the cache.
1504
+ inputs_dict (Optional[Dict[str, str]]): Optional dictionary of inputs for the vertex. Defaults to None.
1505
+ files: (Optional[List[str]]): Optional list of files. Defaults to None.
1506
+ user_id (Optional[str]): Optional user ID. Defaults to None.
1507
+ fallback_to_env_vars (bool): Whether to fallback to environment variables. Defaults to False.
1508
+ event_manager (Optional[EventManager]): Optional event manager. Defaults to None.
1509
+
1510
+ Returns:
1511
+ Tuple: A tuple containing the next runnable vertices, top level vertices, result dictionary,
1512
+ parameters, validity flag, artifacts, and the built vertex.
1513
+
1514
+ Raises:
1515
+ ValueError: If no result is found for the vertex.
1516
+ """
1517
+ vertex = self.get_vertex(vertex_id)
1518
+ self.run_manager.add_to_vertices_being_run(vertex_id)
1519
+ try:
1520
+ params = ""
1521
+ should_build = False
1522
+ # Loop components must always build, even when frozen,
1523
+ # because they need to iterate through their data
1524
+ is_loop_component = vertex.display_name == "Loop" or vertex.is_loop
1525
+ if not vertex.frozen or is_loop_component:
1526
+ should_build = True
1527
+ else:
1528
+ # Check the cache for the vertex
1529
+ if get_cache is not None:
1530
+ cached_result = await get_cache(key=vertex.id)
1531
+ else:
1532
+ cached_result = CacheMiss()
1533
+ if isinstance(cached_result, CacheMiss):
1534
+ should_build = True
1535
+ else:
1536
+ try:
1537
+ cached_vertex_dict = cached_result["result"]
1538
+ # Now set update the vertex with the cached vertex
1539
+ vertex.built = cached_vertex_dict["built"]
1540
+ vertex.artifacts = cached_vertex_dict["artifacts"]
1541
+ vertex.built_object = cached_vertex_dict["built_object"]
1542
+ vertex.built_result = cached_vertex_dict["built_result"]
1543
+ vertex.full_data = cached_vertex_dict["full_data"]
1544
+ vertex.results = cached_vertex_dict["results"]
1545
+ try:
1546
+ vertex.finalize_build()
1547
+
1548
+ if vertex.result is not None:
1549
+ vertex.result.used_frozen_result = True
1550
+ except Exception: # noqa: BLE001
1551
+ logger.debug("Error finalizing build", exc_info=True)
1552
+ should_build = True
1553
+ except KeyError:
1554
+ should_build = True
1555
+
1556
+ if should_build:
1557
+ await vertex.build(
1558
+ user_id=user_id,
1559
+ inputs=inputs_dict,
1560
+ fallback_to_env_vars=fallback_to_env_vars,
1561
+ files=files,
1562
+ event_manager=event_manager,
1563
+ )
1564
+ if set_cache is not None:
1565
+ vertex_dict = {
1566
+ "built": vertex.built,
1567
+ "results": vertex.results,
1568
+ "artifacts": vertex.artifacts,
1569
+ "built_object": vertex.built_object,
1570
+ "built_result": vertex.built_result,
1571
+ "full_data": vertex.full_data,
1572
+ }
1573
+
1574
+ await set_cache(key=vertex.id, data=vertex_dict)
1575
+
1576
+ except Exception as exc:
1577
+ if not isinstance(exc, ComponentBuildError):
1578
+ await logger.aexception("Error building Component")
1579
+ raise
1580
+
1581
+ if vertex.result is not None:
1582
+ params = f"{vertex.built_object_repr()}{params}"
1583
+ valid = True
1584
+ result_dict = vertex.result
1585
+ artifacts = vertex.artifacts
1586
+ else:
1587
+ msg = f"Error building Component: no result found for vertex {vertex_id}"
1588
+ raise ValueError(msg)
1589
+
1590
+ return VertexBuildResult(
1591
+ result_dict=result_dict, params=params, valid=valid, artifacts=artifacts, vertex=vertex
1592
+ )
1593
+
1594
+ def get_vertex_edges(
1595
+ self,
1596
+ vertex_id: str,
1597
+ *,
1598
+ is_target: bool | None = None,
1599
+ is_source: bool | None = None,
1600
+ ) -> list[CycleEdge]:
1601
+ """Returns a list of edges for a given vertex."""
1602
+ # The idea here is to return the edges that have the vertex_id as source or target
1603
+ # or both
1604
+ return [
1605
+ edge
1606
+ for edge in self.edges
1607
+ if (edge.source_id == vertex_id and is_source is not False)
1608
+ or (edge.target_id == vertex_id and is_target is not False)
1609
+ ]
1610
+
1611
+ def get_vertices_with_target(self, vertex_id: str) -> list[Vertex]:
1612
+ """Returns the vertices connected to a vertex."""
1613
+ vertices: list[Vertex] = []
1614
+ for edge in self.edges:
1615
+ if edge.target_id == vertex_id:
1616
+ vertex = self.get_vertex(edge.source_id)
1617
+ if vertex is None:
1618
+ continue
1619
+ vertices.append(vertex)
1620
+ return vertices
1621
+
1622
+ async def process(
1623
+ self,
1624
+ *,
1625
+ fallback_to_env_vars: bool,
1626
+ start_component_id: str | None = None,
1627
+ event_manager: EventManager | None = None,
1628
+ ) -> Graph:
1629
+ """Processes the graph with vertices in each layer run in parallel."""
1630
+ has_webhook_component = "webhook" in start_component_id.lower() if start_component_id else False
1631
+ first_layer = self.sort_vertices(start_component_id=start_component_id)
1632
+ vertex_task_run_count: dict[str, int] = {}
1633
+ to_process = deque(first_layer)
1634
+ layer_index = 0
1635
+ chat_service = get_chat_service()
1636
+
1637
+ # Provide fallback cache functions if chat service is unavailable
1638
+ if chat_service is not None:
1639
+ get_cache_func = chat_service.get_cache
1640
+ set_cache_func = chat_service.set_cache
1641
+ else:
1642
+ # Fallback no-op cache functions for tests or when service unavailable
1643
+ async def get_cache_func(*args, **kwargs): # noqa: ARG001
1644
+ return None
1645
+
1646
+ async def set_cache_func(*args, **kwargs):
1647
+ pass
1648
+
1649
+ await self.initialize_run()
1650
+ lock = asyncio.Lock()
1651
+ while to_process:
1652
+ current_batch = list(to_process) # Copy current deque items to a list
1653
+ to_process.clear() # Clear the deque for new items
1654
+ tasks = []
1655
+ for vertex_id in current_batch:
1656
+ vertex = self.get_vertex(vertex_id)
1657
+ task = asyncio.create_task(
1658
+ self.build_vertex(
1659
+ vertex_id=vertex_id,
1660
+ user_id=self.user_id,
1661
+ inputs_dict={},
1662
+ fallback_to_env_vars=fallback_to_env_vars,
1663
+ get_cache=get_cache_func,
1664
+ set_cache=set_cache_func,
1665
+ event_manager=event_manager,
1666
+ ),
1667
+ name=f"{vertex.id} Run {vertex_task_run_count.get(vertex_id, 0)}",
1668
+ )
1669
+ tasks.append(task)
1670
+ vertex_task_run_count[vertex_id] = vertex_task_run_count.get(vertex_id, 0) + 1
1671
+
1672
+ await logger.adebug(f"Running layer {layer_index} with {len(tasks)} tasks, {current_batch}")
1673
+ try:
1674
+ next_runnable_vertices = await self._execute_tasks(
1675
+ tasks, lock=lock, has_webhook_component=has_webhook_component
1676
+ )
1677
+ except Exception:
1678
+ await logger.aexception(f"Error executing tasks in layer {layer_index}")
1679
+ raise
1680
+ if not next_runnable_vertices:
1681
+ break
1682
+ to_process.extend(next_runnable_vertices)
1683
+ layer_index += 1
1684
+
1685
+ await logger.adebug("Graph processing complete")
1686
+ return self
1687
+
1688
+ def find_next_runnable_vertices(self, vertex_successors_ids: list[str]) -> list[str]:
1689
+ """Determines the next set of runnable vertices from a list of successor vertex IDs.
1690
+
1691
+ For each successor, if it is not runnable, recursively finds its runnable
1692
+ predecessors; otherwise, includes the successor itself. Returns a sorted list of all such vertex IDs.
1693
+ """
1694
+ next_runnable_vertices = set()
1695
+ for v_id in sorted(vertex_successors_ids):
1696
+ if not self.is_vertex_runnable(v_id):
1697
+ next_runnable_vertices.update(self.find_runnable_predecessors_for_successor(v_id))
1698
+ else:
1699
+ next_runnable_vertices.add(v_id)
1700
+
1701
+ return sorted(next_runnable_vertices)
1702
+
1703
+ async def get_next_runnable_vertices(self, lock: asyncio.Lock, vertex: Vertex, *, cache: bool = True) -> list[str]:
1704
+ """Determines the next set of runnable vertex IDs after a vertex completes execution.
1705
+
1706
+ If the completed vertex is a state vertex, any recently activated state vertices are also included.
1707
+ Updates the run manager to reflect the new runnable state and optionally caches the updated graph state.
1708
+
1709
+ Args:
1710
+ lock: An asyncio lock for thread-safe updates.
1711
+ vertex: The vertex that has just finished execution.
1712
+ cache: If True, caches the updated graph state.
1713
+
1714
+ Returns:
1715
+ A list of vertex IDs that are ready to be executed next.
1716
+ """
1717
+ v_id = vertex.id
1718
+ v_successors_ids = vertex.successors_ids
1719
+ self.run_manager.ran_at_least_once.add(v_id)
1720
+ async with lock:
1721
+ self.run_manager.remove_vertex_from_runnables(v_id)
1722
+ next_runnable_vertices = self.find_next_runnable_vertices(v_successors_ids)
1723
+
1724
+ for next_v_id in set(next_runnable_vertices): # Use set to avoid duplicates
1725
+ if next_v_id == v_id:
1726
+ next_runnable_vertices.remove(v_id)
1727
+ else:
1728
+ self.run_manager.add_to_vertices_being_run(next_v_id)
1729
+ if cache and self.flow_id is not None:
1730
+ set_cache_coro = partial(get_chat_service().set_cache, key=self.flow_id)
1731
+ await set_cache_coro(data=self, lock=lock)
1732
+ if vertex.is_state:
1733
+ next_runnable_vertices.extend(self.activated_vertices)
1734
+ return next_runnable_vertices
1735
+
1736
+ async def _log_vertex_build_from_exception(self, vertex_id: str, result: Exception) -> None:
1737
+ """Logs detailed information about a vertex build exception.
1738
+
1739
+ Formats the exception message and stack trace, constructs an error output,
1740
+ and records the failure using the vertex build logging system.
1741
+ """
1742
+ if isinstance(result, ComponentBuildError):
1743
+ params = result.message
1744
+ tb = result.formatted_traceback
1745
+ else:
1746
+ from lfx.utils.exceptions import format_exception_message
1747
+
1748
+ tb = traceback.format_exc()
1749
+ await logger.aexception("Error building Component")
1750
+
1751
+ params = format_exception_message(result)
1752
+ message = {"errorMessage": params, "stackTrace": tb}
1753
+ vertex = self.get_vertex(vertex_id)
1754
+ output_label = vertex.outputs[0]["name"] if vertex.outputs else "output"
1755
+ outputs = {output_label: OutputValue(message=message, type="error")}
1756
+ result_data_response = {
1757
+ "results": {},
1758
+ "outputs": outputs,
1759
+ "logs": {},
1760
+ "message": {},
1761
+ "artifacts": {},
1762
+ "timedelta": None,
1763
+ "duration": None,
1764
+ "used_frozen_result": False,
1765
+ }
1766
+
1767
+ await log_vertex_build(
1768
+ flow_id=self.flow_id or "",
1769
+ vertex_id=vertex_id or "errors",
1770
+ valid=False,
1771
+ params=params,
1772
+ data=result_data_response,
1773
+ artifacts={},
1774
+ )
1775
+
1776
+ async def _execute_tasks(
1777
+ self, tasks: list[asyncio.Task], lock: asyncio.Lock, *, has_webhook_component: bool = False
1778
+ ) -> list[str]:
1779
+ """Executes tasks in parallel, handling exceptions for each task.
1780
+
1781
+ Args:
1782
+ tasks: List of tasks to execute
1783
+ lock: Async lock for synchronization
1784
+ has_webhook_component: Whether the graph has a webhook component
1785
+ """
1786
+ results = []
1787
+ completed_tasks = await asyncio.gather(*tasks, return_exceptions=True)
1788
+ vertices: list[Vertex] = []
1789
+
1790
+ for i, result in enumerate(completed_tasks):
1791
+ task_name = tasks[i].get_name()
1792
+ vertex_id = tasks[i].get_name().split(" ")[0]
1793
+
1794
+ if isinstance(result, Exception):
1795
+ await logger.aerror(f"Task {task_name} failed with exception: {result}")
1796
+ if has_webhook_component:
1797
+ await self._log_vertex_build_from_exception(vertex_id, result)
1798
+
1799
+ # Cancel all remaining tasks
1800
+ for t in tasks[i + 1 :]:
1801
+ t.cancel()
1802
+ raise result
1803
+ if isinstance(result, VertexBuildResult):
1804
+ if self.flow_id is not None:
1805
+ await log_vertex_build(
1806
+ flow_id=self.flow_id,
1807
+ vertex_id=result.vertex.id,
1808
+ valid=result.valid,
1809
+ params=result.params,
1810
+ data=result.result_dict,
1811
+ artifacts=result.artifacts,
1812
+ )
1813
+
1814
+ vertices.append(result.vertex)
1815
+ else:
1816
+ msg = f"Invalid result from task {task_name}: {result}"
1817
+ raise TypeError(msg)
1818
+
1819
+ for v in vertices:
1820
+ # set all executed vertices as non-runnable to not run them again.
1821
+ # they could be calculated as predecessor or successors of parallel vertices
1822
+ # This could usually happen with input vertices like ChatInput
1823
+ self.run_manager.remove_vertex_from_runnables(v.id)
1824
+
1825
+ await logger.adebug(f"Vertex {v.id}, result: {v.built_result}, object: {v.built_object}")
1826
+
1827
+ for v in vertices:
1828
+ next_runnable_vertices = await self.get_next_runnable_vertices(lock, vertex=v, cache=False)
1829
+ results.extend(next_runnable_vertices)
1830
+ return list(set(results))
1831
+
1832
+ def topological_sort(self) -> list[Vertex]:
1833
+ """Performs a topological sort of the vertices in the graph.
1834
+
1835
+ Returns:
1836
+ List[Vertex]: A list of vertices in topological order.
1837
+
1838
+ Raises:
1839
+ ValueError: If the graph contains a cycle.
1840
+ """
1841
+ # States: 0 = unvisited, 1 = visiting, 2 = visited
1842
+ state = dict.fromkeys(self.vertices, 0)
1843
+ sorted_vertices = []
1844
+
1845
+ def dfs(vertex) -> None:
1846
+ if state[vertex] == 1:
1847
+ # We have a cycle
1848
+ msg = "Graph contains a cycle, cannot perform topological sort"
1849
+ raise ValueError(msg)
1850
+ if state[vertex] == 0:
1851
+ state[vertex] = 1
1852
+ for edge in vertex.edges:
1853
+ if edge.source_id == vertex.id:
1854
+ dfs(self.get_vertex(edge.target_id))
1855
+ state[vertex] = 2
1856
+ sorted_vertices.append(vertex)
1857
+
1858
+ # Visit each vertex
1859
+ for vertex in self.vertices:
1860
+ if state[vertex] == 0:
1861
+ dfs(vertex)
1862
+
1863
+ return list(reversed(sorted_vertices))
1864
+
1865
+ def generator_build(self) -> Generator[Vertex, None, None]:
1866
+ """Builds each vertex in the graph and yields it."""
1867
+ sorted_vertices = self.topological_sort()
1868
+ logger.debug("There are %s vertices in the graph", len(sorted_vertices))
1869
+ yield from sorted_vertices
1870
+
1871
+ def get_predecessors(self, vertex):
1872
+ """Returns the predecessors of a vertex."""
1873
+ return [self.get_vertex(source_id) for source_id in self.predecessor_map.get(vertex.id, [])]
1874
+
1875
+ def get_all_successors(self, vertex: Vertex, *, recursive=True, flat=True, visited=None):
1876
+ """Returns all successors of a given vertex, optionally recursively and as a flat or nested list.
1877
+
1878
+ Args:
1879
+ vertex: The vertex whose successors are to be retrieved.
1880
+ recursive: If True, retrieves successors recursively; otherwise, only immediate successors.
1881
+ flat: If True, returns a flat list of successors; if False, returns a nested list structure.
1882
+ visited: Internal set used to track visited vertices and prevent cycles.
1883
+
1884
+ Returns:
1885
+ A list of successor vertices, either flat or nested depending on the `flat` parameter.
1886
+ """
1887
+ if visited is None:
1888
+ visited = set()
1889
+
1890
+ # Prevent revisiting vertices to avoid infinite loops in cyclic graphs
1891
+ if vertex in visited:
1892
+ return []
1893
+
1894
+ visited.add(vertex)
1895
+
1896
+ successors = vertex.successors
1897
+ if not successors:
1898
+ return []
1899
+
1900
+ successors_result = []
1901
+
1902
+ for successor in successors:
1903
+ if recursive:
1904
+ next_successors = self.get_all_successors(successor, recursive=recursive, flat=flat, visited=visited)
1905
+ if flat:
1906
+ successors_result.extend(next_successors)
1907
+ else:
1908
+ successors_result.append(next_successors)
1909
+ if flat:
1910
+ successors_result.append(successor)
1911
+ else:
1912
+ successors_result.append([successor])
1913
+
1914
+ if not flat and successors_result:
1915
+ return [successors, *successors_result]
1916
+
1917
+ return successors_result
1918
+
1919
+ def get_successors(self, vertex: Vertex) -> list[Vertex]:
1920
+ """Returns the immediate successor vertices of the given vertex.
1921
+
1922
+ Args:
1923
+ vertex: The vertex whose successors are to be retrieved.
1924
+
1925
+ Returns:
1926
+ A list of vertices that are direct successors of the specified vertex.
1927
+ """
1928
+ return [self.get_vertex(target_id) for target_id in self.successor_map.get(vertex.id, set())]
1929
+
1930
+ def get_all_predecessors(self, vertex: Vertex, *, recursive: bool = True) -> list[Vertex]:
1931
+ """Retrieves all predecessor vertices of a given vertex.
1932
+
1933
+ If `recursive` is True, returns both direct and indirect predecessors by
1934
+ traversing the graph recursively. If False, returns only the immediate predecessors.
1935
+ """
1936
+ _predecessors = self.predecessor_map.get(vertex.id, [])
1937
+ predecessors = [self.get_vertex(v_id) for v_id in _predecessors]
1938
+ if recursive:
1939
+ for predecessor in _predecessors:
1940
+ predecessors.extend(self.get_all_predecessors(self.get_vertex(predecessor), recursive=recursive))
1941
+ else:
1942
+ predecessors.extend([self.get_vertex(predecessor) for predecessor in _predecessors])
1943
+ return predecessors
1944
+
1945
+ def get_vertex_neighbors(self, vertex: Vertex) -> dict[Vertex, int]:
1946
+ """Returns a dictionary mapping each direct neighbor of a vertex to the count of connecting edges.
1947
+
1948
+ A neighbor is any vertex directly connected to the input vertex, either as a source or target.
1949
+ The count reflects the number of edges between the input vertex and each neighbor.
1950
+ """
1951
+ neighbors: dict[Vertex, int] = {}
1952
+ for edge in self.edges:
1953
+ if edge.source_id == vertex.id:
1954
+ neighbor = self.get_vertex(edge.target_id)
1955
+ if neighbor is None:
1956
+ continue
1957
+ if neighbor not in neighbors:
1958
+ neighbors[neighbor] = 0
1959
+ neighbors[neighbor] += 1
1960
+ elif edge.target_id == vertex.id:
1961
+ neighbor = self.get_vertex(edge.source_id)
1962
+ if neighbor is None:
1963
+ continue
1964
+ if neighbor not in neighbors:
1965
+ neighbors[neighbor] = 0
1966
+ neighbors[neighbor] += 1
1967
+ return neighbors
1968
+
1969
+ @property
1970
+ def cycles(self):
1971
+ if self._cycles is None:
1972
+ if self._start is None:
1973
+ self._cycles = []
1974
+ else:
1975
+ entry_vertex = self._start.get_id()
1976
+ edges = [(e["data"]["sourceHandle"]["id"], e["data"]["targetHandle"]["id"]) for e in self._edges]
1977
+ self._cycles = find_all_cycle_edges(entry_vertex, edges)
1978
+ return self._cycles
1979
+
1980
+ @property
1981
+ def cycle_vertices(self):
1982
+ if self._cycle_vertices is None:
1983
+ edges = self._get_edges_as_list_of_tuples()
1984
+ self._cycle_vertices = set(find_cycle_vertices(edges))
1985
+ return self._cycle_vertices
1986
+
1987
+ def _build_edges(self) -> list[CycleEdge]:
1988
+ """Builds the edges of the graph."""
1989
+ # Edge takes two vertices as arguments, so we need to build the vertices first
1990
+ # and then build the edges
1991
+ # if we can't find a vertex, we raise an error
1992
+ edges: set[CycleEdge | Edge] = set()
1993
+ for edge in self._edges:
1994
+ new_edge = self.build_edge(edge)
1995
+ edges.add(new_edge)
1996
+ if self.vertices and not edges:
1997
+ logger.warning("Graph has vertices but no edges")
1998
+ return list(cast("Iterable[CycleEdge]", edges))
1999
+
2000
+ def build_edge(self, edge: EdgeData) -> CycleEdge | Edge:
2001
+ source = self.get_vertex(edge["source"])
2002
+ target = self.get_vertex(edge["target"])
2003
+
2004
+ if source is None:
2005
+ msg = f"Source vertex {edge['source']} not found"
2006
+ raise ValueError(msg)
2007
+ if target is None:
2008
+ msg = f"Target vertex {edge['target']} not found"
2009
+ raise ValueError(msg)
2010
+ if any(v in self.cycle_vertices for v in [source.id, target.id]):
2011
+ new_edge: CycleEdge | Edge = CycleEdge(source, target, edge)
2012
+ else:
2013
+ new_edge = Edge(source, target, edge)
2014
+ return new_edge
2015
+
2016
+ @staticmethod
2017
+ def _get_vertex_class(node_type: str, node_base_type: str, node_id: str) -> type[Vertex]:
2018
+ """Returns the node class based on the node type."""
2019
+ # First we check for the node_base_type
2020
+ node_name = node_id.split("-")[0]
2021
+ if node_name in InterfaceComponentTypes or node_type in InterfaceComponentTypes:
2022
+ return InterfaceVertex
2023
+ if node_name in {"SharedState", "Notify", "Listen"}:
2024
+ return StateVertex
2025
+ if node_base_type in lazy_load_vertex_dict.vertex_type_map:
2026
+ return lazy_load_vertex_dict.vertex_type_map[node_base_type]
2027
+ if node_name in lazy_load_vertex_dict.vertex_type_map:
2028
+ return lazy_load_vertex_dict.vertex_type_map[node_name]
2029
+
2030
+ if node_type in lazy_load_vertex_dict.vertex_type_map:
2031
+ return lazy_load_vertex_dict.vertex_type_map[node_type]
2032
+ return Vertex
2033
+
2034
+ def _build_vertices(self) -> list[Vertex]:
2035
+ """Builds the vertices of the graph."""
2036
+ vertices: list[Vertex] = []
2037
+ for frontend_data in self._vertices:
2038
+ if frontend_data.get("type") == NodeTypeEnum.NoteNode:
2039
+ continue
2040
+ try:
2041
+ vertex_instance = self.get_vertex(frontend_data["id"])
2042
+ except ValueError:
2043
+ vertex_instance = self._create_vertex(frontend_data)
2044
+ vertices.append(vertex_instance)
2045
+
2046
+ return vertices
2047
+
2048
+ def _create_vertex(self, frontend_data: NodeData):
2049
+ vertex_data = frontend_data["data"]
2050
+ vertex_type: str = vertex_data["type"]
2051
+ vertex_base_type: str = vertex_data["node"]["template"]["_type"]
2052
+ if "id" not in vertex_data:
2053
+ msg = f"Vertex data for {vertex_data['display_name']} does not contain an id"
2054
+ raise ValueError(msg)
2055
+
2056
+ vertex_class = self._get_vertex_class(vertex_type, vertex_base_type, vertex_data["id"])
2057
+
2058
+ vertex_instance = vertex_class(frontend_data, graph=self)
2059
+ vertex_instance.set_top_level(self.top_level_vertices)
2060
+ return vertex_instance
2061
+
2062
+ def prepare(self, stop_component_id: str | None = None, start_component_id: str | None = None):
2063
+ self.initialize()
2064
+ if stop_component_id and start_component_id:
2065
+ msg = "You can only provide one of stop_component_id or start_component_id"
2066
+ raise ValueError(msg)
2067
+
2068
+ if stop_component_id or start_component_id:
2069
+ try:
2070
+ first_layer = self.sort_vertices(stop_component_id, start_component_id)
2071
+ except Exception: # noqa: BLE001
2072
+ logger.exception("Error sorting vertices")
2073
+ first_layer = self.sort_vertices()
2074
+ else:
2075
+ first_layer = self.sort_vertices()
2076
+
2077
+ for vertex_id in first_layer:
2078
+ self.run_manager.add_to_vertices_being_run(vertex_id)
2079
+ if vertex_id in self.cycle_vertices:
2080
+ self.run_manager.add_to_cycle_vertices(vertex_id)
2081
+ self._first_layer = sorted(first_layer)
2082
+ self._run_queue = deque(self._first_layer)
2083
+ self._prepared = True
2084
+ self._record_snapshot()
2085
+ return self
2086
+
2087
+ @staticmethod
2088
+ def get_children_by_vertex_type(vertex: Vertex, vertex_type: str) -> list[Vertex]:
2089
+ """Returns the children of a vertex based on the vertex type."""
2090
+ children = []
2091
+ vertex_types = [vertex.data["type"]]
2092
+ if "node" in vertex.data:
2093
+ vertex_types += vertex.data["node"]["base_classes"]
2094
+ if vertex_type in vertex_types:
2095
+ children.append(vertex)
2096
+ return children
2097
+
2098
+ def __repr__(self) -> str:
2099
+ vertex_ids = [vertex.id for vertex in self.vertices]
2100
+ edges_repr = "\n".join([f" {edge.source_id} --> {edge.target_id}" for edge in self.edges])
2101
+
2102
+ return (
2103
+ f"Graph Representation:\n"
2104
+ f"----------------------\n"
2105
+ f"Vertices ({len(vertex_ids)}):\n"
2106
+ f" {', '.join(map(str, vertex_ids))}\n\n"
2107
+ f"Edges ({len(self.edges)}):\n"
2108
+ f"{edges_repr}"
2109
+ )
2110
+
2111
+ def __hash__(self) -> int:
2112
+ """Return hash of the graph based on its string representation."""
2113
+ return hash(self.__repr__())
2114
+
2115
+ def get_vertex_predecessors_ids(self, vertex_id: str) -> list[str]:
2116
+ """Get the predecessor IDs of a vertex."""
2117
+ return [v.id for v in self.get_predecessors(self.get_vertex(vertex_id))]
2118
+
2119
+ def get_vertex_successors_ids(self, vertex_id: str) -> list[str]:
2120
+ """Get the successor IDs of a vertex."""
2121
+ return [v.id for v in self.get_vertex(vertex_id).successors]
2122
+
2123
+ def get_vertex_input_status(self, vertex_id: str) -> bool:
2124
+ """Check if a vertex is an input vertex."""
2125
+ return self.get_vertex(vertex_id).is_input
2126
+
2127
+ def get_parent_map(self) -> dict[str, str | None]:
2128
+ """Get the parent node map for all vertices."""
2129
+ return {vertex.id: vertex.parent_node_id for vertex in self.vertices}
2130
+
2131
+ def get_vertex_ids(self) -> list[str]:
2132
+ """Get all vertex IDs in the graph."""
2133
+ return [vertex.id for vertex in self.vertices]
2134
+
2135
+ def sort_vertices(
2136
+ self,
2137
+ stop_component_id: str | None = None,
2138
+ start_component_id: str | None = None,
2139
+ ) -> list[str]:
2140
+ """Sorts the vertices in the graph."""
2141
+ self.mark_all_vertices("ACTIVE")
2142
+
2143
+ first_layer, remaining_layers = get_sorted_vertices(
2144
+ vertices_ids=self.get_vertex_ids(),
2145
+ cycle_vertices=self.cycle_vertices,
2146
+ stop_component_id=stop_component_id,
2147
+ start_component_id=start_component_id,
2148
+ graph_dict=self.__to_dict(),
2149
+ in_degree_map=self.in_degree_map,
2150
+ successor_map=self.successor_map,
2151
+ predecessor_map=self.predecessor_map,
2152
+ is_input_vertex=self.get_vertex_input_status,
2153
+ get_vertex_predecessors=self.get_vertex_predecessors_ids,
2154
+ get_vertex_successors=self.get_vertex_successors_ids,
2155
+ is_cyclic=self.is_cyclic,
2156
+ )
2157
+
2158
+ self.increment_run_count()
2159
+ self._sorted_vertices_layers = [first_layer, *remaining_layers]
2160
+ self.vertices_layers = remaining_layers
2161
+ self.vertices_to_run = set(chain.from_iterable([first_layer, *remaining_layers]))
2162
+ self.build_run_map()
2163
+ self._first_layer = first_layer
2164
+ return first_layer
2165
+
2166
+ @staticmethod
2167
+ def sort_interface_components_first(vertices_layers: list[list[str]]) -> list[list[str]]:
2168
+ """Sorts the vertices in the graph so that vertices containing ChatInput or ChatOutput come first."""
2169
+
2170
+ def contains_interface_component(vertex):
2171
+ return any(component.value in vertex for component in InterfaceComponentTypes)
2172
+
2173
+ # Sort each inner list so that vertices containing ChatInput or ChatOutput come first
2174
+ return [
2175
+ sorted(
2176
+ inner_list,
2177
+ key=lambda vertex: not contains_interface_component(vertex),
2178
+ )
2179
+ for inner_list in vertices_layers
2180
+ ]
2181
+
2182
+ def sort_by_avg_build_time(self, vertices_layers: list[list[str]]) -> list[list[str]]:
2183
+ """Sorts the vertices in the graph so that vertices with the lowest average build time come first."""
2184
+
2185
+ def sort_layer_by_avg_build_time(vertices_ids: list[str]) -> list[str]:
2186
+ """Sorts the vertices in the graph so that vertices with the lowest average build time come first."""
2187
+ if len(vertices_ids) == 1:
2188
+ return vertices_ids
2189
+ vertices_ids.sort(key=lambda vertex_id: self.get_vertex(vertex_id).avg_build_time)
2190
+
2191
+ return vertices_ids
2192
+
2193
+ return [sort_layer_by_avg_build_time(layer) for layer in vertices_layers]
2194
+
2195
+ def is_vertex_runnable(self, vertex_id: str) -> bool:
2196
+ """Returns whether a vertex is runnable."""
2197
+ # Check if vertex is conditionally excluded (for conditional routing)
2198
+ if vertex_id in self.conditionally_excluded_vertices:
2199
+ return False
2200
+ is_active = self.get_vertex(vertex_id).is_active()
2201
+ is_loop = self.get_vertex(vertex_id).is_loop
2202
+ return self.run_manager.is_vertex_runnable(vertex_id, is_active=is_active, is_loop=is_loop)
2203
+
2204
+ def build_run_map(self) -> None:
2205
+ """Builds the run map for the graph.
2206
+
2207
+ This method is responsible for building the run map for the graph,
2208
+ which maps each node in the graph to its corresponding run function.
2209
+ """
2210
+ self.run_manager.build_run_map(predecessor_map=self.predecessor_map, vertices_to_run=self.vertices_to_run)
2211
+
2212
+ def find_runnable_predecessors_for_successors(self, vertex_id: str) -> list[str]:
2213
+ """For each successor of the current vertex, find runnable predecessors if any.
2214
+
2215
+ This checks the direct predecessors of each successor to identify any that are
2216
+ immediately runnable, expanding the search to ensure progress can be made.
2217
+ """
2218
+ runnable_vertices = []
2219
+ for successor_id in self.run_manager.run_map.get(vertex_id, []):
2220
+ runnable_vertices.extend(self.find_runnable_predecessors_for_successor(successor_id))
2221
+
2222
+ return sorted(runnable_vertices)
2223
+
2224
+ def find_runnable_predecessors_for_successor(self, vertex_id: str) -> list[str]:
2225
+ runnable_vertices = []
2226
+ visited = set()
2227
+
2228
+ def find_runnable_predecessors(predecessor_id: str) -> None:
2229
+ if predecessor_id in visited:
2230
+ return
2231
+ visited.add(predecessor_id)
2232
+
2233
+ if self.is_vertex_runnable(predecessor_id):
2234
+ runnable_vertices.append(predecessor_id)
2235
+ else:
2236
+ for pred_pred_id in self.run_manager.run_predecessors.get(predecessor_id, []):
2237
+ find_runnable_predecessors(pred_pred_id)
2238
+
2239
+ for predecessor_id in self.run_manager.run_predecessors.get(vertex_id, []):
2240
+ find_runnable_predecessors(predecessor_id)
2241
+ return runnable_vertices
2242
+
2243
+ def remove_from_predecessors(self, vertex_id: str) -> None:
2244
+ self.run_manager.remove_from_predecessors(vertex_id)
2245
+
2246
+ def remove_vertex_from_runnables(self, vertex_id: str) -> None:
2247
+ self.run_manager.remove_vertex_from_runnables(vertex_id)
2248
+
2249
+ def get_top_level_vertices(self, vertices_ids):
2250
+ """Retrieves the top-level vertices from the given graph based on the provided vertex IDs.
2251
+
2252
+ Args:
2253
+ vertices_ids (list): A list of vertex IDs.
2254
+
2255
+ Returns:
2256
+ list: A list of top-level vertex IDs.
2257
+
2258
+ """
2259
+ top_level_vertices = []
2260
+ for vertex_id in vertices_ids:
2261
+ vertex = self.get_vertex(vertex_id)
2262
+ if vertex.parent_is_top_level:
2263
+ top_level_vertices.append(vertex.parent_node_id)
2264
+ else:
2265
+ top_level_vertices.append(vertex_id)
2266
+ return top_level_vertices
2267
+
2268
+ def build_in_degree(self, edges: list[CycleEdge]) -> dict[str, int]:
2269
+ in_degree: dict[str, int] = defaultdict(int)
2270
+
2271
+ for edge in edges:
2272
+ # We don't need to count if a Component connects more than one
2273
+ # time to the same vertex.
2274
+ in_degree[edge.target_id] += 1
2275
+ for vertex in self.vertices:
2276
+ if vertex.id not in in_degree:
2277
+ in_degree[vertex.id] = 0
2278
+ return in_degree
2279
+
2280
+ @staticmethod
2281
+ def build_adjacency_maps(edges: list[CycleEdge]) -> tuple[dict[str, list[str]], dict[str, list[str]]]:
2282
+ """Returns the adjacency maps for the graph."""
2283
+ predecessor_map: dict[str, list[str]] = defaultdict(list)
2284
+ successor_map: dict[str, list[str]] = defaultdict(list)
2285
+ for edge in edges:
2286
+ predecessor_map[edge.target_id].append(edge.source_id)
2287
+ successor_map[edge.source_id].append(edge.target_id)
2288
+ return predecessor_map, successor_map
2289
+
2290
+ def __to_dict(self) -> dict[str, dict[str, list[str]]]:
2291
+ """Converts the graph to a dictionary."""
2292
+ result: dict = {}
2293
+ for vertex in self.vertices:
2294
+ vertex_id = vertex.id
2295
+ sucessors = [i.id for i in self.get_all_successors(vertex)]
2296
+ predecessors = [i.id for i in self.get_predecessors(vertex)]
2297
+ result |= {vertex_id: {"successors": sucessors, "predecessors": predecessors}}
2298
+ return result