lfx-nightly 0.2.0.dev0__py3-none-any.whl → 0.2.0.dev41__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (196) hide show
  1. lfx/_assets/component_index.json +1 -1
  2. lfx/base/agents/agent.py +21 -4
  3. lfx/base/agents/altk_base_agent.py +393 -0
  4. lfx/base/agents/altk_tool_wrappers.py +565 -0
  5. lfx/base/agents/events.py +2 -1
  6. lfx/base/composio/composio_base.py +159 -224
  7. lfx/base/data/base_file.py +97 -20
  8. lfx/base/data/docling_utils.py +61 -10
  9. lfx/base/data/storage_utils.py +301 -0
  10. lfx/base/data/utils.py +178 -14
  11. lfx/base/mcp/util.py +2 -2
  12. lfx/base/models/anthropic_constants.py +21 -12
  13. lfx/base/models/groq_constants.py +74 -58
  14. lfx/base/models/groq_model_discovery.py +265 -0
  15. lfx/base/models/model.py +1 -1
  16. lfx/base/models/model_utils.py +100 -0
  17. lfx/base/models/openai_constants.py +7 -0
  18. lfx/base/models/watsonx_constants.py +32 -8
  19. lfx/base/tools/run_flow.py +601 -129
  20. lfx/cli/commands.py +9 -4
  21. lfx/cli/common.py +2 -2
  22. lfx/cli/run.py +1 -1
  23. lfx/cli/script_loader.py +53 -11
  24. lfx/components/Notion/create_page.py +1 -1
  25. lfx/components/Notion/list_database_properties.py +1 -1
  26. lfx/components/Notion/list_pages.py +1 -1
  27. lfx/components/Notion/list_users.py +1 -1
  28. lfx/components/Notion/page_content_viewer.py +1 -1
  29. lfx/components/Notion/search.py +1 -1
  30. lfx/components/Notion/update_page_property.py +1 -1
  31. lfx/components/__init__.py +19 -5
  32. lfx/components/{agents → altk}/__init__.py +5 -9
  33. lfx/components/altk/altk_agent.py +193 -0
  34. lfx/components/apify/apify_actor.py +1 -1
  35. lfx/components/composio/__init__.py +70 -18
  36. lfx/components/composio/apollo_composio.py +11 -0
  37. lfx/components/composio/bitbucket_composio.py +11 -0
  38. lfx/components/composio/canva_composio.py +11 -0
  39. lfx/components/composio/coda_composio.py +11 -0
  40. lfx/components/composio/composio_api.py +10 -0
  41. lfx/components/composio/discord_composio.py +1 -1
  42. lfx/components/composio/elevenlabs_composio.py +11 -0
  43. lfx/components/composio/exa_composio.py +11 -0
  44. lfx/components/composio/firecrawl_composio.py +11 -0
  45. lfx/components/composio/fireflies_composio.py +11 -0
  46. lfx/components/composio/gmail_composio.py +1 -1
  47. lfx/components/composio/googlebigquery_composio.py +11 -0
  48. lfx/components/composio/googlecalendar_composio.py +1 -1
  49. lfx/components/composio/googledocs_composio.py +1 -1
  50. lfx/components/composio/googlemeet_composio.py +1 -1
  51. lfx/components/composio/googlesheets_composio.py +1 -1
  52. lfx/components/composio/googletasks_composio.py +1 -1
  53. lfx/components/composio/heygen_composio.py +11 -0
  54. lfx/components/composio/mem0_composio.py +11 -0
  55. lfx/components/composio/peopledatalabs_composio.py +11 -0
  56. lfx/components/composio/perplexityai_composio.py +11 -0
  57. lfx/components/composio/serpapi_composio.py +11 -0
  58. lfx/components/composio/slack_composio.py +3 -574
  59. lfx/components/composio/slackbot_composio.py +1 -1
  60. lfx/components/composio/snowflake_composio.py +11 -0
  61. lfx/components/composio/tavily_composio.py +11 -0
  62. lfx/components/composio/youtube_composio.py +2 -2
  63. lfx/components/cuga/__init__.py +34 -0
  64. lfx/components/cuga/cuga_agent.py +730 -0
  65. lfx/components/data/__init__.py +78 -28
  66. lfx/components/data_source/__init__.py +58 -0
  67. lfx/components/{data → data_source}/api_request.py +26 -3
  68. lfx/components/{data → data_source}/csv_to_data.py +15 -10
  69. lfx/components/{data → data_source}/json_to_data.py +15 -8
  70. lfx/components/{data → data_source}/news_search.py +1 -1
  71. lfx/components/{data → data_source}/rss.py +1 -1
  72. lfx/components/{data → data_source}/sql_executor.py +1 -1
  73. lfx/components/{data → data_source}/url.py +1 -1
  74. lfx/components/{data → data_source}/web_search.py +1 -1
  75. lfx/components/datastax/astradb_cql.py +1 -1
  76. lfx/components/datastax/astradb_graph.py +1 -1
  77. lfx/components/datastax/astradb_tool.py +1 -1
  78. lfx/components/datastax/astradb_vectorstore.py +1 -1
  79. lfx/components/datastax/hcd.py +1 -1
  80. lfx/components/deactivated/json_document_builder.py +1 -1
  81. lfx/components/docling/__init__.py +0 -3
  82. lfx/components/docling/chunk_docling_document.py +3 -1
  83. lfx/components/docling/export_docling_document.py +3 -1
  84. lfx/components/elastic/elasticsearch.py +1 -1
  85. lfx/components/files_and_knowledge/__init__.py +47 -0
  86. lfx/components/{data → files_and_knowledge}/directory.py +1 -1
  87. lfx/components/{data → files_and_knowledge}/file.py +304 -24
  88. lfx/components/{knowledge_bases → files_and_knowledge}/retrieval.py +2 -2
  89. lfx/components/{data → files_and_knowledge}/save_file.py +218 -31
  90. lfx/components/flow_controls/__init__.py +58 -0
  91. lfx/components/{logic → flow_controls}/conditional_router.py +1 -1
  92. lfx/components/{logic → flow_controls}/loop.py +43 -9
  93. lfx/components/flow_controls/run_flow.py +108 -0
  94. lfx/components/glean/glean_search_api.py +1 -1
  95. lfx/components/groq/groq.py +35 -28
  96. lfx/components/helpers/__init__.py +102 -0
  97. lfx/components/ibm/watsonx.py +7 -1
  98. lfx/components/input_output/__init__.py +3 -1
  99. lfx/components/input_output/chat.py +4 -3
  100. lfx/components/input_output/chat_output.py +10 -4
  101. lfx/components/input_output/text.py +1 -1
  102. lfx/components/input_output/text_output.py +1 -1
  103. lfx/components/{data → input_output}/webhook.py +1 -1
  104. lfx/components/knowledge_bases/__init__.py +59 -4
  105. lfx/components/langchain_utilities/character.py +1 -1
  106. lfx/components/langchain_utilities/csv_agent.py +84 -16
  107. lfx/components/langchain_utilities/json_agent.py +67 -12
  108. lfx/components/langchain_utilities/language_recursive.py +1 -1
  109. lfx/components/llm_operations/__init__.py +46 -0
  110. lfx/components/{processing → llm_operations}/batch_run.py +17 -8
  111. lfx/components/{processing → llm_operations}/lambda_filter.py +1 -1
  112. lfx/components/{logic → llm_operations}/llm_conditional_router.py +1 -1
  113. lfx/components/{processing/llm_router.py → llm_operations/llm_selector.py} +3 -3
  114. lfx/components/{processing → llm_operations}/structured_output.py +1 -1
  115. lfx/components/logic/__init__.py +126 -0
  116. lfx/components/mem0/mem0_chat_memory.py +11 -0
  117. lfx/components/models/__init__.py +64 -9
  118. lfx/components/models_and_agents/__init__.py +49 -0
  119. lfx/components/{agents → models_and_agents}/agent.py +6 -4
  120. lfx/components/models_and_agents/embedding_model.py +353 -0
  121. lfx/components/models_and_agents/language_model.py +398 -0
  122. lfx/components/{agents → models_and_agents}/mcp_component.py +53 -44
  123. lfx/components/{helpers → models_and_agents}/memory.py +1 -1
  124. lfx/components/nvidia/system_assist.py +1 -1
  125. lfx/components/olivya/olivya.py +1 -1
  126. lfx/components/ollama/ollama.py +24 -5
  127. lfx/components/processing/__init__.py +9 -60
  128. lfx/components/processing/converter.py +1 -1
  129. lfx/components/processing/dataframe_operations.py +1 -1
  130. lfx/components/processing/parse_json_data.py +2 -2
  131. lfx/components/processing/parser.py +1 -1
  132. lfx/components/processing/split_text.py +1 -1
  133. lfx/components/qdrant/qdrant.py +1 -1
  134. lfx/components/redis/redis.py +1 -1
  135. lfx/components/twelvelabs/split_video.py +10 -0
  136. lfx/components/twelvelabs/video_file.py +12 -0
  137. lfx/components/utilities/__init__.py +43 -0
  138. lfx/components/{helpers → utilities}/calculator_core.py +1 -1
  139. lfx/components/{helpers → utilities}/current_date.py +1 -1
  140. lfx/components/{processing → utilities}/python_repl_core.py +1 -1
  141. lfx/components/vectorstores/local_db.py +9 -0
  142. lfx/components/youtube/youtube_transcripts.py +118 -30
  143. lfx/custom/custom_component/component.py +57 -1
  144. lfx/custom/custom_component/custom_component.py +68 -6
  145. lfx/custom/directory_reader/directory_reader.py +5 -2
  146. lfx/graph/edge/base.py +43 -20
  147. lfx/graph/state/model.py +15 -2
  148. lfx/graph/utils.py +6 -0
  149. lfx/graph/vertex/param_handler.py +10 -7
  150. lfx/helpers/__init__.py +12 -0
  151. lfx/helpers/flow.py +117 -0
  152. lfx/inputs/input_mixin.py +24 -1
  153. lfx/inputs/inputs.py +13 -1
  154. lfx/interface/components.py +161 -83
  155. lfx/log/logger.py +5 -3
  156. lfx/schema/image.py +2 -12
  157. lfx/services/database/__init__.py +5 -0
  158. lfx/services/database/service.py +25 -0
  159. lfx/services/deps.py +87 -22
  160. lfx/services/interfaces.py +5 -0
  161. lfx/services/manager.py +24 -10
  162. lfx/services/mcp_composer/service.py +1029 -162
  163. lfx/services/session.py +5 -0
  164. lfx/services/settings/auth.py +18 -11
  165. lfx/services/settings/base.py +56 -30
  166. lfx/services/settings/constants.py +8 -0
  167. lfx/services/storage/local.py +108 -46
  168. lfx/services/storage/service.py +171 -29
  169. lfx/template/field/base.py +3 -0
  170. lfx/utils/image.py +29 -11
  171. lfx/utils/ssrf_protection.py +384 -0
  172. lfx/utils/validate_cloud.py +26 -0
  173. {lfx_nightly-0.2.0.dev0.dist-info → lfx_nightly-0.2.0.dev41.dist-info}/METADATA +38 -22
  174. {lfx_nightly-0.2.0.dev0.dist-info → lfx_nightly-0.2.0.dev41.dist-info}/RECORD +189 -160
  175. {lfx_nightly-0.2.0.dev0.dist-info → lfx_nightly-0.2.0.dev41.dist-info}/WHEEL +1 -1
  176. lfx/components/agents/altk_agent.py +0 -366
  177. lfx/components/agents/cuga_agent.py +0 -1013
  178. lfx/components/docling/docling_remote_vlm.py +0 -284
  179. lfx/components/logic/run_flow.py +0 -71
  180. lfx/components/models/embedding_model.py +0 -195
  181. lfx/components/models/language_model.py +0 -144
  182. lfx/components/processing/dataframe_to_toolset.py +0 -259
  183. /lfx/components/{data → data_source}/mock_data.py +0 -0
  184. /lfx/components/{knowledge_bases → files_and_knowledge}/ingestion.py +0 -0
  185. /lfx/components/{logic → flow_controls}/data_conditional_router.py +0 -0
  186. /lfx/components/{logic → flow_controls}/flow_tool.py +0 -0
  187. /lfx/components/{logic → flow_controls}/listen.py +0 -0
  188. /lfx/components/{logic → flow_controls}/notify.py +0 -0
  189. /lfx/components/{logic → flow_controls}/pass_message.py +0 -0
  190. /lfx/components/{logic → flow_controls}/sub_flow.py +0 -0
  191. /lfx/components/{processing → models_and_agents}/prompt.py +0 -0
  192. /lfx/components/{helpers → processing}/create_list.py +0 -0
  193. /lfx/components/{helpers → processing}/output_parser.py +0 -0
  194. /lfx/components/{helpers → processing}/store_message.py +0 -0
  195. /lfx/components/{helpers → utilities}/id_generator.py +0 -0
  196. {lfx_nightly-0.2.0.dev0.dist-info → lfx_nightly-0.2.0.dev41.dist-info}/entry_points.txt +0 -0
@@ -9,7 +9,7 @@ from fastapi.encoders import jsonable_encoder
9
9
 
10
10
  from lfx.custom import Component
11
11
  from lfx.inputs import SortableListInput
12
- from lfx.io import DropdownInput, HandleInput, SecretStrInput, StrInput
12
+ from lfx.io import BoolInput, DropdownInput, HandleInput, SecretStrInput, StrInput
13
13
  from lfx.schema import Data, DataFrame, Message
14
14
  from lfx.services.deps import get_settings_service, get_storage_service, session_scope
15
15
  from lfx.template.field.base import Output
@@ -18,7 +18,7 @@ from lfx.template.field.base import Output
18
18
  class SaveToFileComponent(Component):
19
19
  display_name = "Write File"
20
20
  description = "Save data to local file, AWS S3, or Google Drive in the selected format."
21
- documentation: str = "https://docs.langflow.org/components-processing#save-file"
21
+ documentation: str = "https://docs.langflow.org/write-file"
22
22
  icon = "file-text"
23
23
  name = "SaveToFile"
24
24
 
@@ -74,6 +74,16 @@ class SaveToFileComponent(Component):
74
74
  show=False,
75
75
  tool_mode=True,
76
76
  ),
77
+ BoolInput(
78
+ name="append_mode",
79
+ display_name="Append",
80
+ info=(
81
+ "Append to file if it exists (only for Local storage with plain text formats). "
82
+ "Not supported for cloud storage (AWS/Google Drive)."
83
+ ),
84
+ value=False,
85
+ show=False,
86
+ ),
77
87
  # Format inputs (dynamic based on storage location)
78
88
  DropdownInput(
79
89
  name="local_format",
@@ -150,6 +160,7 @@ class SaveToFileComponent(Component):
150
160
  "The Google Drive folder ID where the file will be uploaded. "
151
161
  "The folder must be shared with the service account email."
152
162
  ),
163
+ required=True,
153
164
  show=False,
154
165
  advanced=True,
155
166
  ),
@@ -168,6 +179,7 @@ class SaveToFileComponent(Component):
168
179
  # Hide all dynamic fields first
169
180
  dynamic_fields = [
170
181
  "file_name", # Common fields (input is always visible)
182
+ "append_mode",
171
183
  "local_format",
172
184
  "aws_format",
173
185
  "gdrive_format",
@@ -188,10 +200,14 @@ class SaveToFileComponent(Component):
188
200
  if len(selected) == 1:
189
201
  location = selected[0]
190
202
 
191
- # Show file_name when any storage location is selected (input is always visible)
203
+ # Show file_name when any storage location is selected
192
204
  if "file_name" in build_config:
193
205
  build_config["file_name"]["show"] = True
194
206
 
207
+ # Show append_mode only for Local storage (not supported for cloud storage)
208
+ if "append_mode" in build_config:
209
+ build_config["append_mode"]["show"] = location == "Local"
210
+
195
211
  if location == "Local":
196
212
  if "local_format" in build_config:
197
213
  build_config["local_format"]["show"] = True
@@ -274,6 +290,11 @@ class SaveToFileComponent(Component):
274
290
  return Path(f"{path}.xlsx").expanduser() if file_extension not in ["xlsx", "xls"] else path
275
291
  return Path(f"{path}.{fmt}").expanduser() if file_extension != fmt else path
276
292
 
293
+ def _is_plain_text_format(self, fmt: str) -> bool:
294
+ """Check if a file format is plain text (supports appending)."""
295
+ plain_text_formats = ["txt", "json", "markdown", "md", "csv", "xml", "html", "yaml", "log", "tsv", "jsonl"]
296
+ return fmt.lower() in plain_text_formats
297
+
277
298
  async def _upload_file(self, file_path: Path) -> None:
278
299
  """Upload the saved file using the upload_user_file service."""
279
300
  from langflow.api.v2.files import upload_user_file
@@ -284,7 +305,8 @@ class SaveToFileComponent(Component):
284
305
  msg = f"File not found: {file_path}"
285
306
  raise FileNotFoundError(msg)
286
307
 
287
- # Upload the file
308
+ # Upload the file - always use append=False because the local file already contains
309
+ # the correct content (either new or appended locally)
288
310
  with file_path.open("rb") as f:
289
311
  async with session_scope() as db:
290
312
  if not self.user_id:
@@ -298,39 +320,109 @@ class SaveToFileComponent(Component):
298
320
  current_user=current_user,
299
321
  storage_service=get_storage_service(),
300
322
  settings_service=get_settings_service(),
323
+ append=False,
301
324
  )
302
325
 
303
326
  def _save_dataframe(self, dataframe: DataFrame, path: Path, fmt: str) -> str:
304
327
  """Save a DataFrame to the specified file format."""
328
+ append_mode = getattr(self, "append_mode", False)
329
+ should_append = append_mode and path.exists() and self._is_plain_text_format(fmt)
330
+
305
331
  if fmt == "csv":
306
- dataframe.to_csv(path, index=False)
332
+ dataframe.to_csv(path, index=False, mode="a" if should_append else "w", header=not should_append)
307
333
  elif fmt == "excel":
308
334
  dataframe.to_excel(path, index=False, engine="openpyxl")
309
335
  elif fmt == "json":
310
- dataframe.to_json(path, orient="records", indent=2)
336
+ if should_append:
337
+ # Read and parse existing JSON
338
+ existing_data = []
339
+ try:
340
+ existing_content = path.read_text(encoding="utf-8").strip()
341
+ if existing_content:
342
+ parsed = json.loads(existing_content)
343
+ # Handle case where existing content is a single object
344
+ if isinstance(parsed, dict):
345
+ existing_data = [parsed]
346
+ elif isinstance(parsed, list):
347
+ existing_data = parsed
348
+ except (json.JSONDecodeError, FileNotFoundError):
349
+ # Treat parse errors or missing file as empty array
350
+ existing_data = []
351
+
352
+ # Append new data
353
+ new_records = json.loads(dataframe.to_json(orient="records"))
354
+ existing_data.extend(new_records)
355
+
356
+ # Write back as a single JSON array
357
+ path.write_text(json.dumps(existing_data, indent=2), encoding="utf-8")
358
+ else:
359
+ dataframe.to_json(path, orient="records", indent=2)
311
360
  elif fmt == "markdown":
312
- path.write_text(dataframe.to_markdown(index=False), encoding="utf-8")
361
+ content = dataframe.to_markdown(index=False)
362
+ if should_append:
363
+ path.write_text(path.read_text(encoding="utf-8") + "\n\n" + content, encoding="utf-8")
364
+ else:
365
+ path.write_text(content, encoding="utf-8")
313
366
  else:
314
367
  msg = f"Unsupported DataFrame format: {fmt}"
315
368
  raise ValueError(msg)
316
- return f"DataFrame saved successfully as '{path}'"
369
+ action = "appended to" if should_append else "saved successfully as"
370
+ return f"DataFrame {action} '{path}'"
317
371
 
318
372
  def _save_data(self, data: Data, path: Path, fmt: str) -> str:
319
373
  """Save a Data object to the specified file format."""
374
+ append_mode = getattr(self, "append_mode", False)
375
+ should_append = append_mode and path.exists() and self._is_plain_text_format(fmt)
376
+
320
377
  if fmt == "csv":
321
- pd.DataFrame(data.data).to_csv(path, index=False)
378
+ pd.DataFrame(data.data).to_csv(
379
+ path,
380
+ index=False,
381
+ mode="a" if should_append else "w",
382
+ header=not should_append,
383
+ )
322
384
  elif fmt == "excel":
323
385
  pd.DataFrame(data.data).to_excel(path, index=False, engine="openpyxl")
324
386
  elif fmt == "json":
325
- path.write_text(
326
- orjson.dumps(jsonable_encoder(data.data), option=orjson.OPT_INDENT_2).decode("utf-8"), encoding="utf-8"
327
- )
387
+ new_data = jsonable_encoder(data.data)
388
+ if should_append:
389
+ # Read and parse existing JSON
390
+ existing_data = []
391
+ try:
392
+ existing_content = path.read_text(encoding="utf-8").strip()
393
+ if existing_content:
394
+ parsed = json.loads(existing_content)
395
+ # Handle case where existing content is a single object
396
+ if isinstance(parsed, dict):
397
+ existing_data = [parsed]
398
+ elif isinstance(parsed, list):
399
+ existing_data = parsed
400
+ except (json.JSONDecodeError, FileNotFoundError):
401
+ # Treat parse errors or missing file as empty array
402
+ existing_data = []
403
+
404
+ # Append new data
405
+ if isinstance(new_data, list):
406
+ existing_data.extend(new_data)
407
+ else:
408
+ existing_data.append(new_data)
409
+
410
+ # Write back as a single JSON array
411
+ path.write_text(json.dumps(existing_data, indent=2), encoding="utf-8")
412
+ else:
413
+ content = orjson.dumps(new_data, option=orjson.OPT_INDENT_2).decode("utf-8")
414
+ path.write_text(content, encoding="utf-8")
328
415
  elif fmt == "markdown":
329
- path.write_text(pd.DataFrame(data.data).to_markdown(index=False), encoding="utf-8")
416
+ content = pd.DataFrame(data.data).to_markdown(index=False)
417
+ if should_append:
418
+ path.write_text(path.read_text(encoding="utf-8") + "\n\n" + content, encoding="utf-8")
419
+ else:
420
+ path.write_text(content, encoding="utf-8")
330
421
  else:
331
422
  msg = f"Unsupported Data format: {fmt}"
332
423
  raise ValueError(msg)
333
- return f"Data saved successfully as '{path}'"
424
+ action = "appended to" if should_append else "saved successfully as"
425
+ return f"Data {action} '{path}'"
334
426
 
335
427
  async def _save_message(self, message: Message, path: Path, fmt: str) -> str:
336
428
  """Save a Message to the specified file format, handling async iterators."""
@@ -346,16 +438,50 @@ class SaveToFileComponent(Component):
346
438
  else:
347
439
  content = str(message.text)
348
440
 
441
+ append_mode = getattr(self, "append_mode", False)
442
+ should_append = append_mode and path.exists() and self._is_plain_text_format(fmt)
443
+
349
444
  if fmt == "txt":
350
- path.write_text(content, encoding="utf-8")
445
+ if should_append:
446
+ path.write_text(path.read_text(encoding="utf-8") + "\n" + content, encoding="utf-8")
447
+ else:
448
+ path.write_text(content, encoding="utf-8")
351
449
  elif fmt == "json":
352
- path.write_text(json.dumps({"message": content}, indent=2), encoding="utf-8")
450
+ new_message = {"message": content}
451
+ if should_append:
452
+ # Read and parse existing JSON
453
+ existing_data = []
454
+ try:
455
+ existing_content = path.read_text(encoding="utf-8").strip()
456
+ if existing_content:
457
+ parsed = json.loads(existing_content)
458
+ # Handle case where existing content is a single object
459
+ if isinstance(parsed, dict):
460
+ existing_data = [parsed]
461
+ elif isinstance(parsed, list):
462
+ existing_data = parsed
463
+ except (json.JSONDecodeError, FileNotFoundError):
464
+ # Treat parse errors or missing file as empty array
465
+ existing_data = []
466
+
467
+ # Append new message
468
+ existing_data.append(new_message)
469
+
470
+ # Write back as a single JSON array
471
+ path.write_text(json.dumps(existing_data, indent=2), encoding="utf-8")
472
+ else:
473
+ path.write_text(json.dumps(new_message, indent=2), encoding="utf-8")
353
474
  elif fmt == "markdown":
354
- path.write_text(f"**Message:**\n\n{content}", encoding="utf-8")
475
+ md_content = f"**Message:**\n\n{content}"
476
+ if should_append:
477
+ path.write_text(path.read_text(encoding="utf-8") + "\n\n" + md_content, encoding="utf-8")
478
+ else:
479
+ path.write_text(md_content, encoding="utf-8")
355
480
  else:
356
481
  msg = f"Unsupported Message format: {fmt}"
357
482
  raise ValueError(msg)
358
- return f"Message saved successfully as '{path}'"
483
+ action = "appended to" if should_append else "saved successfully as"
484
+ return f"Message {action} '{path}'"
359
485
 
360
486
  def _get_selected_storage_location(self) -> str:
361
487
  """Get the selected storage location from the SortableListInput."""
@@ -455,7 +581,9 @@ class SaveToFileComponent(Component):
455
581
  # Create temporary file
456
582
  import tempfile
457
583
 
458
- with tempfile.NamedTemporaryFile(mode="w", suffix=f".{file_format}", delete=False) as temp_file:
584
+ with tempfile.NamedTemporaryFile(
585
+ mode="w", encoding="utf-8", suffix=f".{file_format}", delete=False
586
+ ) as temp_file:
459
587
  temp_file.write(content)
460
588
  temp_file_path = temp_file.name
461
589
 
@@ -491,16 +619,57 @@ class SaveToFileComponent(Component):
491
619
  msg = "Google API client libraries are not installed. Please install them."
492
620
  raise ImportError(msg) from e
493
621
 
494
- # Parse credentials
622
+ # Parse credentials with multiple fallback strategies
623
+ credentials_dict = None
624
+ parse_errors = []
625
+
626
+ # Strategy 1: Parse as-is with strict=False to allow control characters
495
627
  try:
496
- credentials_dict = json.loads(self.service_account_key)
628
+ credentials_dict = json.loads(self.service_account_key, strict=False)
497
629
  except json.JSONDecodeError as e:
498
- msg = f"Invalid JSON in service account key: {e!s}"
499
- raise ValueError(msg) from e
630
+ parse_errors.append(f"Standard parse: {e!s}")
631
+
632
+ # Strategy 2: Strip whitespace and try again
633
+ if credentials_dict is None:
634
+ try:
635
+ cleaned_key = self.service_account_key.strip()
636
+ credentials_dict = json.loads(cleaned_key, strict=False)
637
+ except json.JSONDecodeError as e:
638
+ parse_errors.append(f"Stripped parse: {e!s}")
639
+
640
+ # Strategy 3: Check if it's double-encoded (JSON string of a JSON string)
641
+ if credentials_dict is None:
642
+ try:
643
+ decoded_once = json.loads(self.service_account_key, strict=False)
644
+ if isinstance(decoded_once, str):
645
+ credentials_dict = json.loads(decoded_once, strict=False)
646
+ else:
647
+ credentials_dict = decoded_once
648
+ except json.JSONDecodeError as e:
649
+ parse_errors.append(f"Double-encoded parse: {e!s}")
650
+
651
+ # Strategy 4: Try to fix common issues with newlines in the private_key field
652
+ if credentials_dict is None:
653
+ try:
654
+ # Replace literal \n with actual newlines which is common in pasted JSON
655
+ fixed_key = self.service_account_key.replace("\\n", "\n")
656
+ credentials_dict = json.loads(fixed_key, strict=False)
657
+ except json.JSONDecodeError as e:
658
+ parse_errors.append(f"Newline-fixed parse: {e!s}")
659
+
660
+ if credentials_dict is None:
661
+ error_details = "; ".join(parse_errors)
662
+ msg = (
663
+ f"Unable to parse service account key JSON. Tried multiple strategies: {error_details}. "
664
+ "Please ensure you've copied the entire JSON content from your service account key file. "
665
+ "The JSON should start with '{' and contain fields like 'type', 'project_id', 'private_key', etc."
666
+ )
667
+ raise ValueError(msg)
500
668
 
501
- # Create Google Drive service
669
+ # Create Google Drive service with appropriate scopes
670
+ # Use drive scope for folder access, file scope is too restrictive for folder verification
502
671
  credentials = service_account.Credentials.from_service_account_info(
503
- credentials_dict, scopes=["https://www.googleapis.com/auth/drive.file"]
672
+ credentials_dict, scopes=["https://www.googleapis.com/auth/drive"]
504
673
  )
505
674
  drive_service = build("drive", "v3", credentials=credentials)
506
675
 
@@ -510,20 +679,38 @@ class SaveToFileComponent(Component):
510
679
 
511
680
  # Handle special Google Drive formats
512
681
  if file_format in ["slides", "docs"]:
513
- return await self._save_to_google_apps(drive_service, content, file_format)
682
+ return await self._save_to_google_apps(drive_service, credentials, content, file_format)
514
683
 
515
684
  # Create temporary file
516
685
  file_path = f"{self.file_name}.{file_format}"
517
- with tempfile.NamedTemporaryFile(mode="w", suffix=f".{file_format}", delete=False) as temp_file:
686
+ with tempfile.NamedTemporaryFile(
687
+ mode="w",
688
+ encoding="utf-8",
689
+ suffix=f".{file_format}",
690
+ delete=False,
691
+ ) as temp_file:
518
692
  temp_file.write(content)
519
693
  temp_file_path = temp_file.name
520
694
 
521
695
  try:
522
696
  # Upload to Google Drive
697
+ # Note: We skip explicit folder verification since it requires broader permissions.
698
+ # If the folder doesn't exist or isn't accessible, the create() call will fail with a clear error.
523
699
  file_metadata = {"name": file_path, "parents": [self.folder_id]}
524
700
  media = MediaFileUpload(temp_file_path, resumable=True)
525
701
 
526
- uploaded_file = drive_service.files().create(body=file_metadata, media_body=media, fields="id").execute()
702
+ try:
703
+ uploaded_file = (
704
+ drive_service.files().create(body=file_metadata, media_body=media, fields="id").execute()
705
+ )
706
+ except Exception as e:
707
+ msg = (
708
+ f"Unable to upload file to Google Drive folder '{self.folder_id}'. "
709
+ f"Error: {e!s}. "
710
+ "Please ensure: 1) The folder ID is correct, 2) The folder exists, "
711
+ "3) The service account has been granted access to this folder."
712
+ )
713
+ raise ValueError(msg) from e
527
714
 
528
715
  file_id = uploaded_file.get("id")
529
716
  file_url = f"https://drive.google.com/file/d/{file_id}/view"
@@ -533,14 +720,14 @@ class SaveToFileComponent(Component):
533
720
  if Path(temp_file_path).exists():
534
721
  Path(temp_file_path).unlink()
535
722
 
536
- async def _save_to_google_apps(self, drive_service, content: str, app_type: str) -> Message:
723
+ async def _save_to_google_apps(self, drive_service, credentials, content: str, app_type: str) -> Message:
537
724
  """Save content to Google Apps (Slides or Docs)."""
538
725
  import time
539
726
 
540
727
  if app_type == "slides":
541
728
  from googleapiclient.discovery import build
542
729
 
543
- slides_service = build("slides", "v1", credentials=drive_service._http.credentials)
730
+ slides_service = build("slides", "v1", credentials=credentials)
544
731
 
545
732
  file_metadata = {
546
733
  "name": self.file_name,
@@ -589,7 +776,7 @@ class SaveToFileComponent(Component):
589
776
  elif app_type == "docs":
590
777
  from googleapiclient.discovery import build
591
778
 
592
- docs_service = build("docs", "v1", credentials=drive_service._http.credentials)
779
+ docs_service = build("docs", "v1", credentials=credentials)
593
780
 
594
781
  file_metadata = {
595
782
  "name": self.file_name,
@@ -0,0 +1,58 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import TYPE_CHECKING, Any
4
+
5
+ from lfx.components._importing import import_mod
6
+
7
+ if TYPE_CHECKING:
8
+ from lfx.components.flow_controls.conditional_router import ConditionalRouterComponent
9
+ from lfx.components.flow_controls.data_conditional_router import DataConditionalRouterComponent
10
+ from lfx.components.flow_controls.flow_tool import FlowToolComponent
11
+ from lfx.components.flow_controls.listen import ListenComponent
12
+ from lfx.components.flow_controls.loop import LoopComponent
13
+ from lfx.components.flow_controls.notify import NotifyComponent
14
+ from lfx.components.flow_controls.pass_message import PassMessageComponent
15
+ from lfx.components.flow_controls.run_flow import RunFlowComponent
16
+ from lfx.components.flow_controls.sub_flow import SubFlowComponent
17
+
18
+ _dynamic_imports = {
19
+ "ConditionalRouterComponent": "conditional_router",
20
+ "DataConditionalRouterComponent": "data_conditional_router",
21
+ "FlowToolComponent": "flow_tool",
22
+ "ListenComponent": "listen",
23
+ "LoopComponent": "loop",
24
+ "NotifyComponent": "notify",
25
+ "PassMessageComponent": "pass_message",
26
+ "RunFlowComponent": "run_flow",
27
+ "SubFlowComponent": "sub_flow",
28
+ }
29
+
30
+ __all__ = [
31
+ "ConditionalRouterComponent",
32
+ "DataConditionalRouterComponent",
33
+ "FlowToolComponent",
34
+ "ListenComponent",
35
+ "LoopComponent",
36
+ "NotifyComponent",
37
+ "PassMessageComponent",
38
+ "RunFlowComponent",
39
+ "SubFlowComponent",
40
+ ]
41
+
42
+
43
+ def __getattr__(attr_name: str) -> Any:
44
+ """Lazily import flow control components on attribute access."""
45
+ if attr_name not in _dynamic_imports:
46
+ msg = f"module '{__name__}' has no attribute '{attr_name}'"
47
+ raise AttributeError(msg)
48
+ try:
49
+ result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent)
50
+ except (ModuleNotFoundError, ImportError, AttributeError) as e:
51
+ msg = f"Could not import '{attr_name}' from '{__name__}': {e}"
52
+ raise AttributeError(msg) from e
53
+ globals()[attr_name] = result
54
+ return result
55
+
56
+
57
+ def __dir__() -> list[str]:
58
+ return list(__all__)
@@ -8,7 +8,7 @@ from lfx.schema.message import Message
8
8
  class ConditionalRouterComponent(Component):
9
9
  display_name = "If-Else"
10
10
  description = "Routes an input message to a corresponding output based on text comparison."
11
- documentation: str = "https://docs.langflow.org/components-logic#conditional-router-if-else-component"
11
+ documentation: str = "https://docs.langflow.org/if-else"
12
12
  icon = "split"
13
13
  name = "ConditionalRouter"
14
14
 
@@ -1,29 +1,40 @@
1
+ from lfx.components.processing.converter import convert_to_data
1
2
  from lfx.custom.custom_component.component import Component
2
3
  from lfx.inputs.inputs import HandleInput
3
4
  from lfx.schema.data import Data
4
5
  from lfx.schema.dataframe import DataFrame
6
+ from lfx.schema.message import Message
5
7
  from lfx.template.field.base import Output
6
8
 
7
9
 
8
10
  class LoopComponent(Component):
9
11
  display_name = "Loop"
10
12
  description = (
11
- "Iterates over a list of Data objects, outputting one item at a time and aggregating results from loop inputs."
13
+ "Iterates over a list of Data or Message objects, outputting one item at a time and "
14
+ "aggregating results from loop inputs. Message objects are automatically converted to "
15
+ "Data objects for consistent processing."
12
16
  )
13
- documentation: str = "https://docs.langflow.org/components-logic#loop"
17
+ documentation: str = "https://docs.langflow.org/loop"
14
18
  icon = "infinity"
15
19
 
16
20
  inputs = [
17
21
  HandleInput(
18
22
  name="data",
19
23
  display_name="Inputs",
20
- info="The initial list of Data objects or DataFrame to iterate over.",
24
+ info="The initial DataFrame to iterate over.",
21
25
  input_types=["DataFrame"],
22
26
  ),
23
27
  ]
24
28
 
25
29
  outputs = [
26
- Output(display_name="Item", name="item", method="item_output", allows_loop=True, group_outputs=True),
30
+ Output(
31
+ display_name="Item",
32
+ name="item",
33
+ method="item_output",
34
+ allows_loop=True,
35
+ loop_types=["Message"],
36
+ group_outputs=True,
37
+ ),
27
38
  Output(display_name="Done", name="done", method="done_output", group_outputs=True),
28
39
  ]
29
40
 
@@ -45,15 +56,30 @@ class LoopComponent(Component):
45
56
  }
46
57
  )
47
58
 
59
+ def _convert_message_to_data(self, message: Message) -> Data:
60
+ """Convert a Message object to a Data object using Type Convert logic."""
61
+ return convert_to_data(message, auto_parse=False)
62
+
48
63
  def _validate_data(self, data):
49
- """Validate and return a list of Data objects."""
64
+ """Validate and return a list of Data objects. Message objects are auto-converted to Data."""
50
65
  if isinstance(data, DataFrame):
51
66
  return data.to_data_list()
52
67
  if isinstance(data, Data):
53
68
  return [data]
54
- if isinstance(data, list) and all(isinstance(item, Data) for item in data):
55
- return data
56
- msg = "The 'data' input must be a DataFrame, a list of Data objects, or a single Data object."
69
+ if isinstance(data, Message):
70
+ # Auto-convert Message to Data
71
+ converted_data = self._convert_message_to_data(data)
72
+ return [converted_data]
73
+ if isinstance(data, list) and all(isinstance(item, (Data, Message)) for item in data):
74
+ # Convert any Message objects in the list to Data objects
75
+ converted_list = []
76
+ for item in data:
77
+ if isinstance(item, Message):
78
+ converted_list.append(self._convert_message_to_data(item))
79
+ else:
80
+ converted_list.append(item)
81
+ return converted_list
82
+ msg = "The 'data' input must be a DataFrame, a list of Data/Message objects, or a single Data/Message object."
57
83
  raise TypeError(msg)
58
84
 
59
85
  def evaluate_stop_loop(self) -> bool:
@@ -116,14 +142,22 @@ class LoopComponent(Component):
116
142
  )
117
143
 
118
144
  def aggregated_output(self) -> list[Data]:
119
- """Return the aggregated list once all items are processed."""
145
+ """Return the aggregated list once all items are processed.
146
+
147
+ Returns Data or Message objects depending on loop input types.
148
+ """
120
149
  self.initialize_data()
121
150
 
122
151
  # Get data list and aggregated list
123
152
  data_list = self.ctx.get(f"{self._id}_data", [])
124
153
  aggregated = self.ctx.get(f"{self._id}_aggregated", [])
125
154
  loop_input = self.item
155
+
156
+ # Append the current loop input to aggregated if it's not already included
126
157
  if loop_input is not None and not isinstance(loop_input, str) and len(aggregated) <= len(data_list):
158
+ # If the loop input is a Message, convert it to Data for consistency
159
+ if isinstance(loop_input, Message):
160
+ loop_input = self._convert_message_to_data(loop_input)
127
161
  aggregated.append(loop_input)
128
162
  self.update_ctx({f"{self._id}_aggregated": aggregated})
129
163
  return aggregated