lfx-nightly 0.2.0.dev0__py3-none-any.whl → 0.2.0.dev26__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (188) hide show
  1. lfx/_assets/component_index.json +1 -1
  2. lfx/base/agents/agent.py +13 -1
  3. lfx/base/agents/altk_base_agent.py +380 -0
  4. lfx/base/agents/altk_tool_wrappers.py +565 -0
  5. lfx/base/agents/events.py +2 -1
  6. lfx/base/composio/composio_base.py +159 -224
  7. lfx/base/data/base_file.py +88 -21
  8. lfx/base/data/storage_utils.py +192 -0
  9. lfx/base/data/utils.py +178 -14
  10. lfx/base/embeddings/embeddings_class.py +113 -0
  11. lfx/base/models/groq_constants.py +74 -58
  12. lfx/base/models/groq_model_discovery.py +265 -0
  13. lfx/base/models/model.py +1 -1
  14. lfx/base/models/model_utils.py +100 -0
  15. lfx/base/models/openai_constants.py +7 -0
  16. lfx/base/models/watsonx_constants.py +32 -8
  17. lfx/base/tools/run_flow.py +601 -129
  18. lfx/cli/commands.py +6 -3
  19. lfx/cli/common.py +2 -2
  20. lfx/cli/run.py +1 -1
  21. lfx/cli/script_loader.py +53 -11
  22. lfx/components/Notion/create_page.py +1 -1
  23. lfx/components/Notion/list_database_properties.py +1 -1
  24. lfx/components/Notion/list_pages.py +1 -1
  25. lfx/components/Notion/list_users.py +1 -1
  26. lfx/components/Notion/page_content_viewer.py +1 -1
  27. lfx/components/Notion/search.py +1 -1
  28. lfx/components/Notion/update_page_property.py +1 -1
  29. lfx/components/__init__.py +19 -5
  30. lfx/components/{agents → altk}/__init__.py +5 -9
  31. lfx/components/altk/altk_agent.py +193 -0
  32. lfx/components/apify/apify_actor.py +1 -1
  33. lfx/components/composio/__init__.py +70 -18
  34. lfx/components/composio/apollo_composio.py +11 -0
  35. lfx/components/composio/bitbucket_composio.py +11 -0
  36. lfx/components/composio/canva_composio.py +11 -0
  37. lfx/components/composio/coda_composio.py +11 -0
  38. lfx/components/composio/composio_api.py +10 -0
  39. lfx/components/composio/discord_composio.py +1 -1
  40. lfx/components/composio/elevenlabs_composio.py +11 -0
  41. lfx/components/composio/exa_composio.py +11 -0
  42. lfx/components/composio/firecrawl_composio.py +11 -0
  43. lfx/components/composio/fireflies_composio.py +11 -0
  44. lfx/components/composio/gmail_composio.py +1 -1
  45. lfx/components/composio/googlebigquery_composio.py +11 -0
  46. lfx/components/composio/googlecalendar_composio.py +1 -1
  47. lfx/components/composio/googledocs_composio.py +1 -1
  48. lfx/components/composio/googlemeet_composio.py +1 -1
  49. lfx/components/composio/googlesheets_composio.py +1 -1
  50. lfx/components/composio/googletasks_composio.py +1 -1
  51. lfx/components/composio/heygen_composio.py +11 -0
  52. lfx/components/composio/mem0_composio.py +11 -0
  53. lfx/components/composio/peopledatalabs_composio.py +11 -0
  54. lfx/components/composio/perplexityai_composio.py +11 -0
  55. lfx/components/composio/serpapi_composio.py +11 -0
  56. lfx/components/composio/slack_composio.py +3 -574
  57. lfx/components/composio/slackbot_composio.py +1 -1
  58. lfx/components/composio/snowflake_composio.py +11 -0
  59. lfx/components/composio/tavily_composio.py +11 -0
  60. lfx/components/composio/youtube_composio.py +2 -2
  61. lfx/components/cuga/__init__.py +34 -0
  62. lfx/components/cuga/cuga_agent.py +730 -0
  63. lfx/components/data/__init__.py +78 -28
  64. lfx/components/data_source/__init__.py +58 -0
  65. lfx/components/{data → data_source}/api_request.py +26 -3
  66. lfx/components/{data → data_source}/csv_to_data.py +15 -10
  67. lfx/components/{data → data_source}/json_to_data.py +15 -8
  68. lfx/components/{data → data_source}/news_search.py +1 -1
  69. lfx/components/{data → data_source}/rss.py +1 -1
  70. lfx/components/{data → data_source}/sql_executor.py +1 -1
  71. lfx/components/{data → data_source}/url.py +1 -1
  72. lfx/components/{data → data_source}/web_search.py +1 -1
  73. lfx/components/datastax/astradb_cql.py +1 -1
  74. lfx/components/datastax/astradb_graph.py +1 -1
  75. lfx/components/datastax/astradb_tool.py +1 -1
  76. lfx/components/datastax/astradb_vectorstore.py +1 -1
  77. lfx/components/datastax/hcd.py +1 -1
  78. lfx/components/deactivated/json_document_builder.py +1 -1
  79. lfx/components/docling/__init__.py +0 -3
  80. lfx/components/elastic/elasticsearch.py +1 -1
  81. lfx/components/elastic/opensearch_multimodal.py +1575 -0
  82. lfx/components/files_and_knowledge/__init__.py +47 -0
  83. lfx/components/{data → files_and_knowledge}/directory.py +1 -1
  84. lfx/components/{data → files_and_knowledge}/file.py +246 -18
  85. lfx/components/{knowledge_bases → files_and_knowledge}/retrieval.py +2 -2
  86. lfx/components/{data → files_and_knowledge}/save_file.py +142 -22
  87. lfx/components/flow_controls/__init__.py +58 -0
  88. lfx/components/{logic → flow_controls}/conditional_router.py +1 -1
  89. lfx/components/{logic → flow_controls}/loop.py +43 -9
  90. lfx/components/flow_controls/run_flow.py +108 -0
  91. lfx/components/glean/glean_search_api.py +1 -1
  92. lfx/components/groq/groq.py +35 -28
  93. lfx/components/helpers/__init__.py +102 -0
  94. lfx/components/input_output/__init__.py +3 -1
  95. lfx/components/input_output/chat.py +4 -3
  96. lfx/components/input_output/chat_output.py +4 -4
  97. lfx/components/input_output/text.py +1 -1
  98. lfx/components/input_output/text_output.py +1 -1
  99. lfx/components/{data → input_output}/webhook.py +1 -1
  100. lfx/components/knowledge_bases/__init__.py +59 -4
  101. lfx/components/langchain_utilities/character.py +1 -1
  102. lfx/components/langchain_utilities/csv_agent.py +84 -16
  103. lfx/components/langchain_utilities/json_agent.py +67 -12
  104. lfx/components/langchain_utilities/language_recursive.py +1 -1
  105. lfx/components/llm_operations/__init__.py +46 -0
  106. lfx/components/{processing → llm_operations}/batch_run.py +1 -1
  107. lfx/components/{processing → llm_operations}/lambda_filter.py +1 -1
  108. lfx/components/{logic → llm_operations}/llm_conditional_router.py +1 -1
  109. lfx/components/{processing/llm_router.py → llm_operations/llm_selector.py} +3 -3
  110. lfx/components/{processing → llm_operations}/structured_output.py +1 -1
  111. lfx/components/logic/__init__.py +126 -0
  112. lfx/components/mem0/mem0_chat_memory.py +11 -0
  113. lfx/components/models/__init__.py +64 -9
  114. lfx/components/models_and_agents/__init__.py +49 -0
  115. lfx/components/{agents → models_and_agents}/agent.py +2 -2
  116. lfx/components/models_and_agents/embedding_model.py +423 -0
  117. lfx/components/models_and_agents/language_model.py +398 -0
  118. lfx/components/{agents → models_and_agents}/mcp_component.py +53 -44
  119. lfx/components/{helpers → models_and_agents}/memory.py +1 -1
  120. lfx/components/nvidia/system_assist.py +1 -1
  121. lfx/components/olivya/olivya.py +1 -1
  122. lfx/components/ollama/ollama.py +17 -3
  123. lfx/components/processing/__init__.py +9 -57
  124. lfx/components/processing/converter.py +1 -1
  125. lfx/components/processing/dataframe_operations.py +1 -1
  126. lfx/components/processing/parse_json_data.py +2 -2
  127. lfx/components/processing/parser.py +1 -1
  128. lfx/components/processing/split_text.py +1 -1
  129. lfx/components/qdrant/qdrant.py +1 -1
  130. lfx/components/redis/redis.py +1 -1
  131. lfx/components/twelvelabs/split_video.py +10 -0
  132. lfx/components/twelvelabs/video_file.py +12 -0
  133. lfx/components/utilities/__init__.py +43 -0
  134. lfx/components/{helpers → utilities}/calculator_core.py +1 -1
  135. lfx/components/{helpers → utilities}/current_date.py +1 -1
  136. lfx/components/{processing → utilities}/python_repl_core.py +1 -1
  137. lfx/components/vectorstores/local_db.py +9 -0
  138. lfx/components/youtube/youtube_transcripts.py +118 -30
  139. lfx/custom/custom_component/component.py +57 -1
  140. lfx/custom/custom_component/custom_component.py +68 -6
  141. lfx/graph/edge/base.py +43 -20
  142. lfx/graph/graph/base.py +4 -1
  143. lfx/graph/state/model.py +15 -2
  144. lfx/graph/utils.py +6 -0
  145. lfx/graph/vertex/base.py +4 -1
  146. lfx/graph/vertex/param_handler.py +10 -7
  147. lfx/helpers/__init__.py +12 -0
  148. lfx/helpers/flow.py +117 -0
  149. lfx/inputs/input_mixin.py +24 -1
  150. lfx/inputs/inputs.py +13 -1
  151. lfx/interface/components.py +161 -83
  152. lfx/log/logger.py +5 -3
  153. lfx/services/database/__init__.py +5 -0
  154. lfx/services/database/service.py +25 -0
  155. lfx/services/deps.py +87 -22
  156. lfx/services/manager.py +19 -6
  157. lfx/services/mcp_composer/service.py +998 -157
  158. lfx/services/session.py +5 -0
  159. lfx/services/settings/base.py +51 -7
  160. lfx/services/settings/constants.py +8 -0
  161. lfx/services/storage/local.py +76 -46
  162. lfx/services/storage/service.py +152 -29
  163. lfx/template/field/base.py +3 -0
  164. lfx/utils/ssrf_protection.py +384 -0
  165. lfx/utils/validate_cloud.py +26 -0
  166. {lfx_nightly-0.2.0.dev0.dist-info → lfx_nightly-0.2.0.dev26.dist-info}/METADATA +38 -22
  167. {lfx_nightly-0.2.0.dev0.dist-info → lfx_nightly-0.2.0.dev26.dist-info}/RECORD +182 -150
  168. {lfx_nightly-0.2.0.dev0.dist-info → lfx_nightly-0.2.0.dev26.dist-info}/WHEEL +1 -1
  169. lfx/components/agents/altk_agent.py +0 -366
  170. lfx/components/agents/cuga_agent.py +0 -1013
  171. lfx/components/docling/docling_remote_vlm.py +0 -284
  172. lfx/components/logic/run_flow.py +0 -71
  173. lfx/components/models/embedding_model.py +0 -195
  174. lfx/components/models/language_model.py +0 -144
  175. /lfx/components/{data → data_source}/mock_data.py +0 -0
  176. /lfx/components/{knowledge_bases → files_and_knowledge}/ingestion.py +0 -0
  177. /lfx/components/{logic → flow_controls}/data_conditional_router.py +0 -0
  178. /lfx/components/{logic → flow_controls}/flow_tool.py +0 -0
  179. /lfx/components/{logic → flow_controls}/listen.py +0 -0
  180. /lfx/components/{logic → flow_controls}/notify.py +0 -0
  181. /lfx/components/{logic → flow_controls}/pass_message.py +0 -0
  182. /lfx/components/{logic → flow_controls}/sub_flow.py +0 -0
  183. /lfx/components/{processing → models_and_agents}/prompt.py +0 -0
  184. /lfx/components/{helpers → processing}/create_list.py +0 -0
  185. /lfx/components/{helpers → processing}/output_parser.py +0 -0
  186. /lfx/components/{helpers → processing}/store_message.py +0 -0
  187. /lfx/components/{helpers → utilities}/id_generator.py +0 -0
  188. {lfx_nightly-0.2.0.dev0.dist-info → lfx_nightly-0.2.0.dev26.dist-info}/entry_points.txt +0 -0
@@ -9,7 +9,7 @@ from fastapi.encoders import jsonable_encoder
9
9
 
10
10
  from lfx.custom import Component
11
11
  from lfx.inputs import SortableListInput
12
- from lfx.io import DropdownInput, HandleInput, SecretStrInput, StrInput
12
+ from lfx.io import BoolInput, DropdownInput, HandleInput, SecretStrInput, StrInput
13
13
  from lfx.schema import Data, DataFrame, Message
14
14
  from lfx.services.deps import get_settings_service, get_storage_service, session_scope
15
15
  from lfx.template.field.base import Output
@@ -18,7 +18,7 @@ from lfx.template.field.base import Output
18
18
  class SaveToFileComponent(Component):
19
19
  display_name = "Write File"
20
20
  description = "Save data to local file, AWS S3, or Google Drive in the selected format."
21
- documentation: str = "https://docs.langflow.org/components-processing#save-file"
21
+ documentation: str = "https://docs.langflow.org/write-file"
22
22
  icon = "file-text"
23
23
  name = "SaveToFile"
24
24
 
@@ -74,6 +74,13 @@ class SaveToFileComponent(Component):
74
74
  show=False,
75
75
  tool_mode=True,
76
76
  ),
77
+ BoolInput(
78
+ name="append_mode",
79
+ display_name="Append",
80
+ info="Append to file if it exists (only for plain text formats). Disabled for binary formats like Excel.",
81
+ value=False,
82
+ show=False,
83
+ ),
77
84
  # Format inputs (dynamic based on storage location)
78
85
  DropdownInput(
79
86
  name="local_format",
@@ -168,6 +175,7 @@ class SaveToFileComponent(Component):
168
175
  # Hide all dynamic fields first
169
176
  dynamic_fields = [
170
177
  "file_name", # Common fields (input is always visible)
178
+ "append_mode",
171
179
  "local_format",
172
180
  "aws_format",
173
181
  "gdrive_format",
@@ -188,9 +196,11 @@ class SaveToFileComponent(Component):
188
196
  if len(selected) == 1:
189
197
  location = selected[0]
190
198
 
191
- # Show file_name when any storage location is selected (input is always visible)
199
+ # Show file_name and append_mode when any storage location is selected
192
200
  if "file_name" in build_config:
193
201
  build_config["file_name"]["show"] = True
202
+ if "append_mode" in build_config:
203
+ build_config["append_mode"]["show"] = True
194
204
 
195
205
  if location == "Local":
196
206
  if "local_format" in build_config:
@@ -274,6 +284,11 @@ class SaveToFileComponent(Component):
274
284
  return Path(f"{path}.xlsx").expanduser() if file_extension not in ["xlsx", "xls"] else path
275
285
  return Path(f"{path}.{fmt}").expanduser() if file_extension != fmt else path
276
286
 
287
+ def _is_plain_text_format(self, fmt: str) -> bool:
288
+ """Check if a file format is plain text (supports appending)."""
289
+ plain_text_formats = ["txt", "json", "markdown", "md", "csv", "xml", "html", "yaml", "log", "tsv", "jsonl"]
290
+ return fmt.lower() in plain_text_formats
291
+
277
292
  async def _upload_file(self, file_path: Path) -> None:
278
293
  """Upload the saved file using the upload_user_file service."""
279
294
  from langflow.api.v2.files import upload_user_file
@@ -284,7 +299,8 @@ class SaveToFileComponent(Component):
284
299
  msg = f"File not found: {file_path}"
285
300
  raise FileNotFoundError(msg)
286
301
 
287
- # Upload the file
302
+ # Upload the file - always use append=False because the local file already contains
303
+ # the correct content (either new or appended locally)
288
304
  with file_path.open("rb") as f:
289
305
  async with session_scope() as db:
290
306
  if not self.user_id:
@@ -298,39 +314,109 @@ class SaveToFileComponent(Component):
298
314
  current_user=current_user,
299
315
  storage_service=get_storage_service(),
300
316
  settings_service=get_settings_service(),
317
+ append=False,
301
318
  )
302
319
 
303
320
  def _save_dataframe(self, dataframe: DataFrame, path: Path, fmt: str) -> str:
304
321
  """Save a DataFrame to the specified file format."""
322
+ append_mode = getattr(self, "append_mode", False)
323
+ should_append = append_mode and path.exists() and self._is_plain_text_format(fmt)
324
+
305
325
  if fmt == "csv":
306
- dataframe.to_csv(path, index=False)
326
+ dataframe.to_csv(path, index=False, mode="a" if should_append else "w", header=not should_append)
307
327
  elif fmt == "excel":
308
328
  dataframe.to_excel(path, index=False, engine="openpyxl")
309
329
  elif fmt == "json":
310
- dataframe.to_json(path, orient="records", indent=2)
330
+ if should_append:
331
+ # Read and parse existing JSON
332
+ existing_data = []
333
+ try:
334
+ existing_content = path.read_text(encoding="utf-8").strip()
335
+ if existing_content:
336
+ parsed = json.loads(existing_content)
337
+ # Handle case where existing content is a single object
338
+ if isinstance(parsed, dict):
339
+ existing_data = [parsed]
340
+ elif isinstance(parsed, list):
341
+ existing_data = parsed
342
+ except (json.JSONDecodeError, FileNotFoundError):
343
+ # Treat parse errors or missing file as empty array
344
+ existing_data = []
345
+
346
+ # Append new data
347
+ new_records = json.loads(dataframe.to_json(orient="records"))
348
+ existing_data.extend(new_records)
349
+
350
+ # Write back as a single JSON array
351
+ path.write_text(json.dumps(existing_data, indent=2), encoding="utf-8")
352
+ else:
353
+ dataframe.to_json(path, orient="records", indent=2)
311
354
  elif fmt == "markdown":
312
- path.write_text(dataframe.to_markdown(index=False), encoding="utf-8")
355
+ content = dataframe.to_markdown(index=False)
356
+ if should_append:
357
+ path.write_text(path.read_text(encoding="utf-8") + "\n\n" + content, encoding="utf-8")
358
+ else:
359
+ path.write_text(content, encoding="utf-8")
313
360
  else:
314
361
  msg = f"Unsupported DataFrame format: {fmt}"
315
362
  raise ValueError(msg)
316
- return f"DataFrame saved successfully as '{path}'"
363
+ action = "appended to" if should_append else "saved successfully as"
364
+ return f"DataFrame {action} '{path}'"
317
365
 
318
366
  def _save_data(self, data: Data, path: Path, fmt: str) -> str:
319
367
  """Save a Data object to the specified file format."""
368
+ append_mode = getattr(self, "append_mode", False)
369
+ should_append = append_mode and path.exists() and self._is_plain_text_format(fmt)
370
+
320
371
  if fmt == "csv":
321
- pd.DataFrame(data.data).to_csv(path, index=False)
372
+ pd.DataFrame(data.data).to_csv(
373
+ path,
374
+ index=False,
375
+ mode="a" if should_append else "w",
376
+ header=not should_append,
377
+ )
322
378
  elif fmt == "excel":
323
379
  pd.DataFrame(data.data).to_excel(path, index=False, engine="openpyxl")
324
380
  elif fmt == "json":
325
- path.write_text(
326
- orjson.dumps(jsonable_encoder(data.data), option=orjson.OPT_INDENT_2).decode("utf-8"), encoding="utf-8"
327
- )
381
+ new_data = jsonable_encoder(data.data)
382
+ if should_append:
383
+ # Read and parse existing JSON
384
+ existing_data = []
385
+ try:
386
+ existing_content = path.read_text(encoding="utf-8").strip()
387
+ if existing_content:
388
+ parsed = json.loads(existing_content)
389
+ # Handle case where existing content is a single object
390
+ if isinstance(parsed, dict):
391
+ existing_data = [parsed]
392
+ elif isinstance(parsed, list):
393
+ existing_data = parsed
394
+ except (json.JSONDecodeError, FileNotFoundError):
395
+ # Treat parse errors or missing file as empty array
396
+ existing_data = []
397
+
398
+ # Append new data
399
+ if isinstance(new_data, list):
400
+ existing_data.extend(new_data)
401
+ else:
402
+ existing_data.append(new_data)
403
+
404
+ # Write back as a single JSON array
405
+ path.write_text(json.dumps(existing_data, indent=2), encoding="utf-8")
406
+ else:
407
+ content = orjson.dumps(new_data, option=orjson.OPT_INDENT_2).decode("utf-8")
408
+ path.write_text(content, encoding="utf-8")
328
409
  elif fmt == "markdown":
329
- path.write_text(pd.DataFrame(data.data).to_markdown(index=False), encoding="utf-8")
410
+ content = pd.DataFrame(data.data).to_markdown(index=False)
411
+ if should_append:
412
+ path.write_text(path.read_text(encoding="utf-8") + "\n\n" + content, encoding="utf-8")
413
+ else:
414
+ path.write_text(content, encoding="utf-8")
330
415
  else:
331
416
  msg = f"Unsupported Data format: {fmt}"
332
417
  raise ValueError(msg)
333
- return f"Data saved successfully as '{path}'"
418
+ action = "appended to" if should_append else "saved successfully as"
419
+ return f"Data {action} '{path}'"
334
420
 
335
421
  async def _save_message(self, message: Message, path: Path, fmt: str) -> str:
336
422
  """Save a Message to the specified file format, handling async iterators."""
@@ -346,16 +432,50 @@ class SaveToFileComponent(Component):
346
432
  else:
347
433
  content = str(message.text)
348
434
 
435
+ append_mode = getattr(self, "append_mode", False)
436
+ should_append = append_mode and path.exists() and self._is_plain_text_format(fmt)
437
+
349
438
  if fmt == "txt":
350
- path.write_text(content, encoding="utf-8")
439
+ if should_append:
440
+ path.write_text(path.read_text(encoding="utf-8") + "\n" + content, encoding="utf-8")
441
+ else:
442
+ path.write_text(content, encoding="utf-8")
351
443
  elif fmt == "json":
352
- path.write_text(json.dumps({"message": content}, indent=2), encoding="utf-8")
444
+ new_message = {"message": content}
445
+ if should_append:
446
+ # Read and parse existing JSON
447
+ existing_data = []
448
+ try:
449
+ existing_content = path.read_text(encoding="utf-8").strip()
450
+ if existing_content:
451
+ parsed = json.loads(existing_content)
452
+ # Handle case where existing content is a single object
453
+ if isinstance(parsed, dict):
454
+ existing_data = [parsed]
455
+ elif isinstance(parsed, list):
456
+ existing_data = parsed
457
+ except (json.JSONDecodeError, FileNotFoundError):
458
+ # Treat parse errors or missing file as empty array
459
+ existing_data = []
460
+
461
+ # Append new message
462
+ existing_data.append(new_message)
463
+
464
+ # Write back as a single JSON array
465
+ path.write_text(json.dumps(existing_data, indent=2), encoding="utf-8")
466
+ else:
467
+ path.write_text(json.dumps(new_message, indent=2), encoding="utf-8")
353
468
  elif fmt == "markdown":
354
- path.write_text(f"**Message:**\n\n{content}", encoding="utf-8")
469
+ md_content = f"**Message:**\n\n{content}"
470
+ if should_append:
471
+ path.write_text(path.read_text(encoding="utf-8") + "\n\n" + md_content, encoding="utf-8")
472
+ else:
473
+ path.write_text(md_content, encoding="utf-8")
355
474
  else:
356
475
  msg = f"Unsupported Message format: {fmt}"
357
476
  raise ValueError(msg)
358
- return f"Message saved successfully as '{path}'"
477
+ action = "appended to" if should_append else "saved successfully as"
478
+ return f"Message {action} '{path}'"
359
479
 
360
480
  def _get_selected_storage_location(self) -> str:
361
481
  """Get the selected storage location from the SortableListInput."""
@@ -510,7 +630,7 @@ class SaveToFileComponent(Component):
510
630
 
511
631
  # Handle special Google Drive formats
512
632
  if file_format in ["slides", "docs"]:
513
- return await self._save_to_google_apps(drive_service, content, file_format)
633
+ return await self._save_to_google_apps(drive_service, credentials, content, file_format)
514
634
 
515
635
  # Create temporary file
516
636
  file_path = f"{self.file_name}.{file_format}"
@@ -533,14 +653,14 @@ class SaveToFileComponent(Component):
533
653
  if Path(temp_file_path).exists():
534
654
  Path(temp_file_path).unlink()
535
655
 
536
- async def _save_to_google_apps(self, drive_service, content: str, app_type: str) -> Message:
656
+ async def _save_to_google_apps(self, drive_service, credentials, content: str, app_type: str) -> Message:
537
657
  """Save content to Google Apps (Slides or Docs)."""
538
658
  import time
539
659
 
540
660
  if app_type == "slides":
541
661
  from googleapiclient.discovery import build
542
662
 
543
- slides_service = build("slides", "v1", credentials=drive_service._http.credentials)
663
+ slides_service = build("slides", "v1", credentials=credentials)
544
664
 
545
665
  file_metadata = {
546
666
  "name": self.file_name,
@@ -589,7 +709,7 @@ class SaveToFileComponent(Component):
589
709
  elif app_type == "docs":
590
710
  from googleapiclient.discovery import build
591
711
 
592
- docs_service = build("docs", "v1", credentials=drive_service._http.credentials)
712
+ docs_service = build("docs", "v1", credentials=credentials)
593
713
 
594
714
  file_metadata = {
595
715
  "name": self.file_name,
@@ -0,0 +1,58 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import TYPE_CHECKING, Any
4
+
5
+ from lfx.components._importing import import_mod
6
+
7
+ if TYPE_CHECKING:
8
+ from lfx.components.flow_controls.conditional_router import ConditionalRouterComponent
9
+ from lfx.components.flow_controls.data_conditional_router import DataConditionalRouterComponent
10
+ from lfx.components.flow_controls.flow_tool import FlowToolComponent
11
+ from lfx.components.flow_controls.listen import ListenComponent
12
+ from lfx.components.flow_controls.loop import LoopComponent
13
+ from lfx.components.flow_controls.notify import NotifyComponent
14
+ from lfx.components.flow_controls.pass_message import PassMessageComponent
15
+ from lfx.components.flow_controls.run_flow import RunFlowComponent
16
+ from lfx.components.flow_controls.sub_flow import SubFlowComponent
17
+
18
+ _dynamic_imports = {
19
+ "ConditionalRouterComponent": "conditional_router",
20
+ "DataConditionalRouterComponent": "data_conditional_router",
21
+ "FlowToolComponent": "flow_tool",
22
+ "ListenComponent": "listen",
23
+ "LoopComponent": "loop",
24
+ "NotifyComponent": "notify",
25
+ "PassMessageComponent": "pass_message",
26
+ "RunFlowComponent": "run_flow",
27
+ "SubFlowComponent": "sub_flow",
28
+ }
29
+
30
+ __all__ = [
31
+ "ConditionalRouterComponent",
32
+ "DataConditionalRouterComponent",
33
+ "FlowToolComponent",
34
+ "ListenComponent",
35
+ "LoopComponent",
36
+ "NotifyComponent",
37
+ "PassMessageComponent",
38
+ "RunFlowComponent",
39
+ "SubFlowComponent",
40
+ ]
41
+
42
+
43
+ def __getattr__(attr_name: str) -> Any:
44
+ """Lazily import flow control components on attribute access."""
45
+ if attr_name not in _dynamic_imports:
46
+ msg = f"module '{__name__}' has no attribute '{attr_name}'"
47
+ raise AttributeError(msg)
48
+ try:
49
+ result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent)
50
+ except (ModuleNotFoundError, ImportError, AttributeError) as e:
51
+ msg = f"Could not import '{attr_name}' from '{__name__}': {e}"
52
+ raise AttributeError(msg) from e
53
+ globals()[attr_name] = result
54
+ return result
55
+
56
+
57
+ def __dir__() -> list[str]:
58
+ return list(__all__)
@@ -8,7 +8,7 @@ from lfx.schema.message import Message
8
8
  class ConditionalRouterComponent(Component):
9
9
  display_name = "If-Else"
10
10
  description = "Routes an input message to a corresponding output based on text comparison."
11
- documentation: str = "https://docs.langflow.org/components-logic#conditional-router-if-else-component"
11
+ documentation: str = "https://docs.langflow.org/if-else"
12
12
  icon = "split"
13
13
  name = "ConditionalRouter"
14
14
 
@@ -1,29 +1,40 @@
1
+ from lfx.components.processing.converter import convert_to_data
1
2
  from lfx.custom.custom_component.component import Component
2
3
  from lfx.inputs.inputs import HandleInput
3
4
  from lfx.schema.data import Data
4
5
  from lfx.schema.dataframe import DataFrame
6
+ from lfx.schema.message import Message
5
7
  from lfx.template.field.base import Output
6
8
 
7
9
 
8
10
  class LoopComponent(Component):
9
11
  display_name = "Loop"
10
12
  description = (
11
- "Iterates over a list of Data objects, outputting one item at a time and aggregating results from loop inputs."
13
+ "Iterates over a list of Data or Message objects, outputting one item at a time and "
14
+ "aggregating results from loop inputs. Message objects are automatically converted to "
15
+ "Data objects for consistent processing."
12
16
  )
13
- documentation: str = "https://docs.langflow.org/components-logic#loop"
17
+ documentation: str = "https://docs.langflow.org/loop"
14
18
  icon = "infinity"
15
19
 
16
20
  inputs = [
17
21
  HandleInput(
18
22
  name="data",
19
23
  display_name="Inputs",
20
- info="The initial list of Data objects or DataFrame to iterate over.",
24
+ info="The initial DataFrame to iterate over.",
21
25
  input_types=["DataFrame"],
22
26
  ),
23
27
  ]
24
28
 
25
29
  outputs = [
26
- Output(display_name="Item", name="item", method="item_output", allows_loop=True, group_outputs=True),
30
+ Output(
31
+ display_name="Item",
32
+ name="item",
33
+ method="item_output",
34
+ allows_loop=True,
35
+ loop_types=["Message"],
36
+ group_outputs=True,
37
+ ),
27
38
  Output(display_name="Done", name="done", method="done_output", group_outputs=True),
28
39
  ]
29
40
 
@@ -45,15 +56,30 @@ class LoopComponent(Component):
45
56
  }
46
57
  )
47
58
 
59
+ def _convert_message_to_data(self, message: Message) -> Data:
60
+ """Convert a Message object to a Data object using Type Convert logic."""
61
+ return convert_to_data(message, auto_parse=False)
62
+
48
63
  def _validate_data(self, data):
49
- """Validate and return a list of Data objects."""
64
+ """Validate and return a list of Data objects. Message objects are auto-converted to Data."""
50
65
  if isinstance(data, DataFrame):
51
66
  return data.to_data_list()
52
67
  if isinstance(data, Data):
53
68
  return [data]
54
- if isinstance(data, list) and all(isinstance(item, Data) for item in data):
55
- return data
56
- msg = "The 'data' input must be a DataFrame, a list of Data objects, or a single Data object."
69
+ if isinstance(data, Message):
70
+ # Auto-convert Message to Data
71
+ converted_data = self._convert_message_to_data(data)
72
+ return [converted_data]
73
+ if isinstance(data, list) and all(isinstance(item, (Data, Message)) for item in data):
74
+ # Convert any Message objects in the list to Data objects
75
+ converted_list = []
76
+ for item in data:
77
+ if isinstance(item, Message):
78
+ converted_list.append(self._convert_message_to_data(item))
79
+ else:
80
+ converted_list.append(item)
81
+ return converted_list
82
+ msg = "The 'data' input must be a DataFrame, a list of Data/Message objects, or a single Data/Message object."
57
83
  raise TypeError(msg)
58
84
 
59
85
  def evaluate_stop_loop(self) -> bool:
@@ -116,14 +142,22 @@ class LoopComponent(Component):
116
142
  )
117
143
 
118
144
  def aggregated_output(self) -> list[Data]:
119
- """Return the aggregated list once all items are processed."""
145
+ """Return the aggregated list once all items are processed.
146
+
147
+ Returns Data or Message objects depending on loop input types.
148
+ """
120
149
  self.initialize_data()
121
150
 
122
151
  # Get data list and aggregated list
123
152
  data_list = self.ctx.get(f"{self._id}_data", [])
124
153
  aggregated = self.ctx.get(f"{self._id}_aggregated", [])
125
154
  loop_input = self.item
155
+
156
+ # Append the current loop input to aggregated if it's not already included
126
157
  if loop_input is not None and not isinstance(loop_input, str) and len(aggregated) <= len(data_list):
158
+ # If the loop input is a Message, convert it to Data for consistency
159
+ if isinstance(loop_input, Message):
160
+ loop_input = self._convert_message_to_data(loop_input)
127
161
  aggregated.append(loop_input)
128
162
  self.update_ctx({f"{self._id}_aggregated": aggregated})
129
163
  return aggregated
@@ -0,0 +1,108 @@
1
+ from datetime import datetime
2
+ from typing import Any
3
+
4
+ from lfx.base.tools.run_flow import RunFlowBaseComponent
5
+ from lfx.log.logger import logger
6
+ from lfx.schema.data import Data
7
+ from lfx.schema.dotdict import dotdict
8
+
9
+
10
+ class RunFlowComponent(RunFlowBaseComponent):
11
+ display_name = "Run Flow"
12
+ description = (
13
+ "Executes another flow from within the same project. Can also be used as a tool for agents."
14
+ " \n **Select a Flow to use the tool mode**"
15
+ )
16
+ documentation: str = "https://docs.langflow.org/run-flow"
17
+ beta = True
18
+ name = "RunFlow"
19
+ icon = "Workflow"
20
+
21
+ inputs = RunFlowBaseComponent.get_base_inputs()
22
+ outputs = RunFlowBaseComponent.get_base_outputs()
23
+
24
+ async def update_build_config(
25
+ self,
26
+ build_config: dotdict,
27
+ field_value: Any,
28
+ field_name: str | None = None,
29
+ ):
30
+ missing_keys = [key for key in self.default_keys if key not in build_config]
31
+ for key in missing_keys:
32
+ if key == "flow_name_selected":
33
+ build_config[key] = {"options": [], "options_metadata": [], "value": None}
34
+ elif key == "flow_id_selected":
35
+ build_config[key] = {"value": None}
36
+ elif key == "cache_flow":
37
+ build_config[key] = {"value": False}
38
+ else:
39
+ build_config[key] = {}
40
+ if field_name == "flow_name_selected" and (build_config.get("is_refresh", False) or field_value is None):
41
+ # refresh button was clicked or componented was initialized, so list the flows
42
+ options: list[str] = await self.alist_flows_by_flow_folder()
43
+ build_config["flow_name_selected"]["options"] = [flow.data["name"] for flow in options]
44
+ build_config["flow_name_selected"]["options_metadata"] = []
45
+ for flow in options:
46
+ # populate options_metadata
47
+ build_config["flow_name_selected"]["options_metadata"].append(
48
+ {"id": flow.data["id"], "updated_at": flow.data["updated_at"]}
49
+ )
50
+ # update selected flow if it is stale
51
+ if str(flow.data["id"]) == self.flow_id_selected:
52
+ await self.check_and_update_stale_flow(flow, build_config)
53
+ elif field_name in {"flow_name_selected", "flow_id_selected"} and field_value is not None:
54
+ # flow was selected by name or id, so get the flow and update the bcfg
55
+ try:
56
+ # derive flow id if the field_name is flow_name_selected
57
+ build_config["flow_id_selected"]["value"] = (
58
+ self.get_selected_flow_meta(build_config, "id") or build_config["flow_id_selected"]["value"]
59
+ )
60
+ updated_at = self.get_selected_flow_meta(build_config, "updated_at")
61
+ await self.load_graph_and_update_cfg(
62
+ build_config, build_config["flow_id_selected"]["value"], updated_at
63
+ )
64
+ except Exception as e:
65
+ msg = f"Error building graph for flow {field_value}"
66
+ await logger.aexception(msg)
67
+ raise RuntimeError(msg) from e
68
+
69
+ return build_config
70
+
71
+ def get_selected_flow_meta(self, build_config: dotdict, field: str) -> dict:
72
+ """Get the selected flow's metadata from the build config."""
73
+ return build_config.get("flow_name_selected", {}).get("selected_metadata", {}).get(field)
74
+
75
+ async def load_graph_and_update_cfg(
76
+ self,
77
+ build_config: dotdict,
78
+ flow_id: str,
79
+ updated_at: str | datetime,
80
+ ) -> None:
81
+ """Load a flow's graph and update the build config."""
82
+ graph = await self.get_graph(
83
+ flow_id_selected=flow_id,
84
+ updated_at=self.get_str_isots(updated_at),
85
+ )
86
+ self.update_build_config_from_graph(build_config, graph)
87
+
88
+ def should_update_stale_flow(self, flow: Data, build_config: dotdict) -> bool:
89
+ """Check if the flow should be updated."""
90
+ return (
91
+ (updated_at := self.get_str_isots(flow.data["updated_at"])) # true updated_at date just fetched from db
92
+ and (stale_at := self.get_selected_flow_meta(build_config, "updated_at")) # outdated date in bcfg
93
+ and self._parse_timestamp(updated_at) > self._parse_timestamp(stale_at) # stale flow condition
94
+ )
95
+
96
+ async def check_and_update_stale_flow(self, flow: Data, build_config: dotdict) -> None:
97
+ """Check if the flow should be updated and update it if necessary."""
98
+ # TODO: improve contract/return value
99
+ if self.should_update_stale_flow(flow, build_config):
100
+ await self.load_graph_and_update_cfg(
101
+ build_config,
102
+ flow.data["id"],
103
+ flow.data["updated_at"],
104
+ )
105
+
106
+ def get_str_isots(self, date: datetime | str) -> str:
107
+ """Get a string timestamp from a datetime or string."""
108
+ return date.isoformat() if hasattr(date, "isoformat") else date
@@ -101,7 +101,7 @@ class GleanAPIWrapper(BaseModel):
101
101
  class GleanSearchAPIComponent(LCToolComponent):
102
102
  display_name: str = "Glean Search API"
103
103
  description: str = "Search using Glean's API."
104
- documentation: str = "https://docs.langflow.org/Components/components-tools#glean-search-api"
104
+ documentation: str = "https://docs.langflow.org/bundles-glean"
105
105
  icon: str = "Glean"
106
106
 
107
107
  outputs = [