lfx-nightly 0.2.0.dev0__py3-none-any.whl → 0.2.0.dev41__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (196) hide show
  1. lfx/_assets/component_index.json +1 -1
  2. lfx/base/agents/agent.py +21 -4
  3. lfx/base/agents/altk_base_agent.py +393 -0
  4. lfx/base/agents/altk_tool_wrappers.py +565 -0
  5. lfx/base/agents/events.py +2 -1
  6. lfx/base/composio/composio_base.py +159 -224
  7. lfx/base/data/base_file.py +97 -20
  8. lfx/base/data/docling_utils.py +61 -10
  9. lfx/base/data/storage_utils.py +301 -0
  10. lfx/base/data/utils.py +178 -14
  11. lfx/base/mcp/util.py +2 -2
  12. lfx/base/models/anthropic_constants.py +21 -12
  13. lfx/base/models/groq_constants.py +74 -58
  14. lfx/base/models/groq_model_discovery.py +265 -0
  15. lfx/base/models/model.py +1 -1
  16. lfx/base/models/model_utils.py +100 -0
  17. lfx/base/models/openai_constants.py +7 -0
  18. lfx/base/models/watsonx_constants.py +32 -8
  19. lfx/base/tools/run_flow.py +601 -129
  20. lfx/cli/commands.py +9 -4
  21. lfx/cli/common.py +2 -2
  22. lfx/cli/run.py +1 -1
  23. lfx/cli/script_loader.py +53 -11
  24. lfx/components/Notion/create_page.py +1 -1
  25. lfx/components/Notion/list_database_properties.py +1 -1
  26. lfx/components/Notion/list_pages.py +1 -1
  27. lfx/components/Notion/list_users.py +1 -1
  28. lfx/components/Notion/page_content_viewer.py +1 -1
  29. lfx/components/Notion/search.py +1 -1
  30. lfx/components/Notion/update_page_property.py +1 -1
  31. lfx/components/__init__.py +19 -5
  32. lfx/components/{agents → altk}/__init__.py +5 -9
  33. lfx/components/altk/altk_agent.py +193 -0
  34. lfx/components/apify/apify_actor.py +1 -1
  35. lfx/components/composio/__init__.py +70 -18
  36. lfx/components/composio/apollo_composio.py +11 -0
  37. lfx/components/composio/bitbucket_composio.py +11 -0
  38. lfx/components/composio/canva_composio.py +11 -0
  39. lfx/components/composio/coda_composio.py +11 -0
  40. lfx/components/composio/composio_api.py +10 -0
  41. lfx/components/composio/discord_composio.py +1 -1
  42. lfx/components/composio/elevenlabs_composio.py +11 -0
  43. lfx/components/composio/exa_composio.py +11 -0
  44. lfx/components/composio/firecrawl_composio.py +11 -0
  45. lfx/components/composio/fireflies_composio.py +11 -0
  46. lfx/components/composio/gmail_composio.py +1 -1
  47. lfx/components/composio/googlebigquery_composio.py +11 -0
  48. lfx/components/composio/googlecalendar_composio.py +1 -1
  49. lfx/components/composio/googledocs_composio.py +1 -1
  50. lfx/components/composio/googlemeet_composio.py +1 -1
  51. lfx/components/composio/googlesheets_composio.py +1 -1
  52. lfx/components/composio/googletasks_composio.py +1 -1
  53. lfx/components/composio/heygen_composio.py +11 -0
  54. lfx/components/composio/mem0_composio.py +11 -0
  55. lfx/components/composio/peopledatalabs_composio.py +11 -0
  56. lfx/components/composio/perplexityai_composio.py +11 -0
  57. lfx/components/composio/serpapi_composio.py +11 -0
  58. lfx/components/composio/slack_composio.py +3 -574
  59. lfx/components/composio/slackbot_composio.py +1 -1
  60. lfx/components/composio/snowflake_composio.py +11 -0
  61. lfx/components/composio/tavily_composio.py +11 -0
  62. lfx/components/composio/youtube_composio.py +2 -2
  63. lfx/components/cuga/__init__.py +34 -0
  64. lfx/components/cuga/cuga_agent.py +730 -0
  65. lfx/components/data/__init__.py +78 -28
  66. lfx/components/data_source/__init__.py +58 -0
  67. lfx/components/{data → data_source}/api_request.py +26 -3
  68. lfx/components/{data → data_source}/csv_to_data.py +15 -10
  69. lfx/components/{data → data_source}/json_to_data.py +15 -8
  70. lfx/components/{data → data_source}/news_search.py +1 -1
  71. lfx/components/{data → data_source}/rss.py +1 -1
  72. lfx/components/{data → data_source}/sql_executor.py +1 -1
  73. lfx/components/{data → data_source}/url.py +1 -1
  74. lfx/components/{data → data_source}/web_search.py +1 -1
  75. lfx/components/datastax/astradb_cql.py +1 -1
  76. lfx/components/datastax/astradb_graph.py +1 -1
  77. lfx/components/datastax/astradb_tool.py +1 -1
  78. lfx/components/datastax/astradb_vectorstore.py +1 -1
  79. lfx/components/datastax/hcd.py +1 -1
  80. lfx/components/deactivated/json_document_builder.py +1 -1
  81. lfx/components/docling/__init__.py +0 -3
  82. lfx/components/docling/chunk_docling_document.py +3 -1
  83. lfx/components/docling/export_docling_document.py +3 -1
  84. lfx/components/elastic/elasticsearch.py +1 -1
  85. lfx/components/files_and_knowledge/__init__.py +47 -0
  86. lfx/components/{data → files_and_knowledge}/directory.py +1 -1
  87. lfx/components/{data → files_and_knowledge}/file.py +304 -24
  88. lfx/components/{knowledge_bases → files_and_knowledge}/retrieval.py +2 -2
  89. lfx/components/{data → files_and_knowledge}/save_file.py +218 -31
  90. lfx/components/flow_controls/__init__.py +58 -0
  91. lfx/components/{logic → flow_controls}/conditional_router.py +1 -1
  92. lfx/components/{logic → flow_controls}/loop.py +43 -9
  93. lfx/components/flow_controls/run_flow.py +108 -0
  94. lfx/components/glean/glean_search_api.py +1 -1
  95. lfx/components/groq/groq.py +35 -28
  96. lfx/components/helpers/__init__.py +102 -0
  97. lfx/components/ibm/watsonx.py +7 -1
  98. lfx/components/input_output/__init__.py +3 -1
  99. lfx/components/input_output/chat.py +4 -3
  100. lfx/components/input_output/chat_output.py +10 -4
  101. lfx/components/input_output/text.py +1 -1
  102. lfx/components/input_output/text_output.py +1 -1
  103. lfx/components/{data → input_output}/webhook.py +1 -1
  104. lfx/components/knowledge_bases/__init__.py +59 -4
  105. lfx/components/langchain_utilities/character.py +1 -1
  106. lfx/components/langchain_utilities/csv_agent.py +84 -16
  107. lfx/components/langchain_utilities/json_agent.py +67 -12
  108. lfx/components/langchain_utilities/language_recursive.py +1 -1
  109. lfx/components/llm_operations/__init__.py +46 -0
  110. lfx/components/{processing → llm_operations}/batch_run.py +17 -8
  111. lfx/components/{processing → llm_operations}/lambda_filter.py +1 -1
  112. lfx/components/{logic → llm_operations}/llm_conditional_router.py +1 -1
  113. lfx/components/{processing/llm_router.py → llm_operations/llm_selector.py} +3 -3
  114. lfx/components/{processing → llm_operations}/structured_output.py +1 -1
  115. lfx/components/logic/__init__.py +126 -0
  116. lfx/components/mem0/mem0_chat_memory.py +11 -0
  117. lfx/components/models/__init__.py +64 -9
  118. lfx/components/models_and_agents/__init__.py +49 -0
  119. lfx/components/{agents → models_and_agents}/agent.py +6 -4
  120. lfx/components/models_and_agents/embedding_model.py +353 -0
  121. lfx/components/models_and_agents/language_model.py +398 -0
  122. lfx/components/{agents → models_and_agents}/mcp_component.py +53 -44
  123. lfx/components/{helpers → models_and_agents}/memory.py +1 -1
  124. lfx/components/nvidia/system_assist.py +1 -1
  125. lfx/components/olivya/olivya.py +1 -1
  126. lfx/components/ollama/ollama.py +24 -5
  127. lfx/components/processing/__init__.py +9 -60
  128. lfx/components/processing/converter.py +1 -1
  129. lfx/components/processing/dataframe_operations.py +1 -1
  130. lfx/components/processing/parse_json_data.py +2 -2
  131. lfx/components/processing/parser.py +1 -1
  132. lfx/components/processing/split_text.py +1 -1
  133. lfx/components/qdrant/qdrant.py +1 -1
  134. lfx/components/redis/redis.py +1 -1
  135. lfx/components/twelvelabs/split_video.py +10 -0
  136. lfx/components/twelvelabs/video_file.py +12 -0
  137. lfx/components/utilities/__init__.py +43 -0
  138. lfx/components/{helpers → utilities}/calculator_core.py +1 -1
  139. lfx/components/{helpers → utilities}/current_date.py +1 -1
  140. lfx/components/{processing → utilities}/python_repl_core.py +1 -1
  141. lfx/components/vectorstores/local_db.py +9 -0
  142. lfx/components/youtube/youtube_transcripts.py +118 -30
  143. lfx/custom/custom_component/component.py +57 -1
  144. lfx/custom/custom_component/custom_component.py +68 -6
  145. lfx/custom/directory_reader/directory_reader.py +5 -2
  146. lfx/graph/edge/base.py +43 -20
  147. lfx/graph/state/model.py +15 -2
  148. lfx/graph/utils.py +6 -0
  149. lfx/graph/vertex/param_handler.py +10 -7
  150. lfx/helpers/__init__.py +12 -0
  151. lfx/helpers/flow.py +117 -0
  152. lfx/inputs/input_mixin.py +24 -1
  153. lfx/inputs/inputs.py +13 -1
  154. lfx/interface/components.py +161 -83
  155. lfx/log/logger.py +5 -3
  156. lfx/schema/image.py +2 -12
  157. lfx/services/database/__init__.py +5 -0
  158. lfx/services/database/service.py +25 -0
  159. lfx/services/deps.py +87 -22
  160. lfx/services/interfaces.py +5 -0
  161. lfx/services/manager.py +24 -10
  162. lfx/services/mcp_composer/service.py +1029 -162
  163. lfx/services/session.py +5 -0
  164. lfx/services/settings/auth.py +18 -11
  165. lfx/services/settings/base.py +56 -30
  166. lfx/services/settings/constants.py +8 -0
  167. lfx/services/storage/local.py +108 -46
  168. lfx/services/storage/service.py +171 -29
  169. lfx/template/field/base.py +3 -0
  170. lfx/utils/image.py +29 -11
  171. lfx/utils/ssrf_protection.py +384 -0
  172. lfx/utils/validate_cloud.py +26 -0
  173. {lfx_nightly-0.2.0.dev0.dist-info → lfx_nightly-0.2.0.dev41.dist-info}/METADATA +38 -22
  174. {lfx_nightly-0.2.0.dev0.dist-info → lfx_nightly-0.2.0.dev41.dist-info}/RECORD +189 -160
  175. {lfx_nightly-0.2.0.dev0.dist-info → lfx_nightly-0.2.0.dev41.dist-info}/WHEEL +1 -1
  176. lfx/components/agents/altk_agent.py +0 -366
  177. lfx/components/agents/cuga_agent.py +0 -1013
  178. lfx/components/docling/docling_remote_vlm.py +0 -284
  179. lfx/components/logic/run_flow.py +0 -71
  180. lfx/components/models/embedding_model.py +0 -195
  181. lfx/components/models/language_model.py +0 -144
  182. lfx/components/processing/dataframe_to_toolset.py +0 -259
  183. /lfx/components/{data → data_source}/mock_data.py +0 -0
  184. /lfx/components/{knowledge_bases → files_and_knowledge}/ingestion.py +0 -0
  185. /lfx/components/{logic → flow_controls}/data_conditional_router.py +0 -0
  186. /lfx/components/{logic → flow_controls}/flow_tool.py +0 -0
  187. /lfx/components/{logic → flow_controls}/listen.py +0 -0
  188. /lfx/components/{logic → flow_controls}/notify.py +0 -0
  189. /lfx/components/{logic → flow_controls}/pass_message.py +0 -0
  190. /lfx/components/{logic → flow_controls}/sub_flow.py +0 -0
  191. /lfx/components/{processing → models_and_agents}/prompt.py +0 -0
  192. /lfx/components/{helpers → processing}/create_list.py +0 -0
  193. /lfx/components/{helpers → processing}/output_parser.py +0 -0
  194. /lfx/components/{helpers → processing}/store_message.py +0 -0
  195. /lfx/components/{helpers → utilities}/id_generator.py +0 -0
  196. {lfx_nightly-0.2.0.dev0.dist-info → lfx_nightly-0.2.0.dev41.dist-info}/entry_points.txt +0 -0
lfx/inputs/inputs.py CHANGED
@@ -10,6 +10,7 @@ from lfx.schema.data import Data
10
10
  from lfx.schema.message import Message
11
11
 
12
12
  from .input_mixin import (
13
+ AIMixin,
13
14
  AuthMixin,
14
15
  BaseInputMixin,
15
16
  ConnectionMixin,
@@ -257,7 +258,7 @@ class MessageTextInput(StrInput, MetadataTraceMixin, InputTraceMixin, ToolModeMi
257
258
  return value
258
259
 
259
260
 
260
- class MultilineInput(MessageTextInput, MultilineMixin, InputTraceMixin, ToolModeMixin):
261
+ class MultilineInput(MessageTextInput, AIMixin, MultilineMixin, InputTraceMixin, ToolModeMixin):
261
262
  """Represents a multiline input field.
262
263
 
263
264
  Attributes:
@@ -281,6 +282,7 @@ class MultilineSecretInput(MessageTextInput, MultilineMixin, InputTraceMixin):
281
282
  field_type: SerializableFieldTypes = FieldTypes.PASSWORD
282
283
  multiline: CoalesceBool = True
283
284
  password: CoalesceBool = Field(default=True)
285
+ track_in_telemetry: CoalesceBool = False # Never track secret inputs
284
286
 
285
287
 
286
288
  class SecretStrInput(BaseInputMixin, DatabaseLoadMixin):
@@ -298,6 +300,7 @@ class SecretStrInput(BaseInputMixin, DatabaseLoadMixin):
298
300
  password: CoalesceBool = Field(default=True)
299
301
  input_types: list[str] = []
300
302
  load_from_db: CoalesceBool = True
303
+ track_in_telemetry: CoalesceBool = False # Never track passwords
301
304
 
302
305
  @field_validator("value")
303
306
  @classmethod
@@ -352,6 +355,7 @@ class IntInput(BaseInputMixin, ListableInputMixin, RangeMixin, MetadataTraceMixi
352
355
  """
353
356
 
354
357
  field_type: SerializableFieldTypes = FieldTypes.INTEGER
358
+ track_in_telemetry: CoalesceBool = True # Safe numeric parameter
355
359
 
356
360
  @field_validator("value")
357
361
  @classmethod
@@ -387,6 +391,7 @@ class FloatInput(BaseInputMixin, ListableInputMixin, RangeMixin, MetadataTraceMi
387
391
  """
388
392
 
389
393
  field_type: SerializableFieldTypes = FieldTypes.FLOAT
394
+ track_in_telemetry: CoalesceBool = True # Safe numeric parameter
390
395
 
391
396
  @field_validator("value")
392
397
  @classmethod
@@ -424,6 +429,7 @@ class BoolInput(BaseInputMixin, ListableInputMixin, MetadataTraceMixin, ToolMode
424
429
 
425
430
  field_type: SerializableFieldTypes = FieldTypes.BOOLEAN
426
431
  value: CoalesceBool = False
432
+ track_in_telemetry: CoalesceBool = True # Safe boolean flag
427
433
 
428
434
 
429
435
  class NestedDictInput(
@@ -488,6 +494,7 @@ class DropdownInput(BaseInputMixin, DropDownMixin, MetadataTraceMixin, ToolModeM
488
494
  toggle: bool = False
489
495
  toggle_disable: bool | None = None
490
496
  toggle_value: bool | None = None
497
+ track_in_telemetry: CoalesceBool = True # Safe predefined choices
491
498
 
492
499
 
493
500
  class ConnectionInput(BaseInputMixin, ConnectionMixin, MetadataTraceMixin, ToolModeMixin):
@@ -499,6 +506,7 @@ class ConnectionInput(BaseInputMixin, ConnectionMixin, MetadataTraceMixin, ToolM
499
506
  """
500
507
 
501
508
  field_type: SerializableFieldTypes = FieldTypes.CONNECTION
509
+ track_in_telemetry: CoalesceBool = False # Never track connection strings (may contain credentials)
502
510
 
503
511
 
504
512
  class AuthInput(BaseInputMixin, AuthMixin, MetadataTraceMixin):
@@ -513,6 +521,7 @@ class AuthInput(BaseInputMixin, AuthMixin, MetadataTraceMixin):
513
521
 
514
522
  field_type: SerializableFieldTypes = FieldTypes.AUTH
515
523
  show: bool = False
524
+ track_in_telemetry: CoalesceBool = False # Never track auth credentials
516
525
 
517
526
 
518
527
  class QueryInput(MessageTextInput, QueryMixin):
@@ -558,6 +567,7 @@ class TabInput(BaseInputMixin, TabMixin, MetadataTraceMixin, ToolModeMixin):
558
567
 
559
568
  field_type: SerializableFieldTypes = FieldTypes.TAB
560
569
  options: list[str] = Field(default_factory=list)
570
+ track_in_telemetry: CoalesceBool = True # Safe UI tab selection
561
571
 
562
572
  @model_validator(mode="after")
563
573
  @classmethod
@@ -619,6 +629,7 @@ class FileInput(BaseInputMixin, ListableInputMixin, FileMixin, MetadataTraceMixi
619
629
  """
620
630
 
621
631
  field_type: SerializableFieldTypes = FieldTypes.FILE
632
+ track_in_telemetry: CoalesceBool = False # Never track file paths (may contain PII)
622
633
 
623
634
 
624
635
  class McpInput(BaseInputMixin, MetadataTraceMixin):
@@ -633,6 +644,7 @@ class McpInput(BaseInputMixin, MetadataTraceMixin):
633
644
 
634
645
  field_type: SerializableFieldTypes = FieldTypes.MCP
635
646
  value: dict[str, Any] = Field(default_factory=dict)
647
+ track_in_telemetry: CoalesceBool = False # Never track MCP config (may contain sensitive data)
636
648
 
637
649
 
638
650
  class LinkInput(BaseInputMixin, LinkMixin):
@@ -256,93 +256,77 @@ async def _send_telemetry(
256
256
  await logger.adebug(f"Failed to send component index telemetry: {e}")
257
257
 
258
258
 
259
- async def import_langflow_components(
260
- settings_service: Optional["SettingsService"] = None, telemetry_service: Any | None = None
261
- ):
262
- """Asynchronously discovers and loads all built-in Langflow components with module-level parallelization.
263
-
264
- In production mode (non-dev), attempts to load components from a prebuilt static index for instant startup.
265
- Falls back to dynamic module scanning if index is unavailable or invalid. When dynamic loading is used,
266
- the generated index is cached for future use.
267
-
268
- Scans the `lfx.components` package and its submodules in parallel, instantiates classes that are subclasses
269
- of `Component` or `CustomComponent`, and generates their templates. Components are grouped by their
270
- top-level subpackage name.
259
+ async def _load_from_index_or_cache(
260
+ settings_service: Optional["SettingsService"] = None,
261
+ ) -> tuple[dict[str, Any], str | None]:
262
+ """Load components from prebuilt index or cache.
271
263
 
272
264
  Args:
273
265
  settings_service: Optional settings service to get custom index path
274
- telemetry_service: Optional telemetry service to log component loading metrics
275
266
 
276
267
  Returns:
277
- A dictionary with a "components" key mapping top-level package names to their component templates.
268
+ Tuple of (modules_dict, index_source) where index_source is "builtin", "cache", or None if failed
278
269
  """
279
- # Start timer for telemetry
280
- start_time_ms = int(time.time() * 1000)
281
- index_source = None
282
-
283
- # Track if we need to save the index after building
284
- should_save_index = False
285
-
286
- # Fast path: load from prebuilt index if not in dev mode
287
- dev_mode_enabled, target_modules = _parse_dev_mode()
288
- if not dev_mode_enabled:
289
- # Get custom index path from settings if available
290
- custom_index_path = None
291
- if settings_service and settings_service.settings.components_index_path:
292
- custom_index_path = settings_service.settings.components_index_path
293
- await logger.adebug(f"Using custom component index: {custom_index_path}")
294
-
295
- index = _read_component_index(custom_index_path)
296
- if index and "entries" in index:
297
- source = custom_index_path or "built-in index"
298
- await logger.adebug(f"Loading components from {source}")
299
- index_source = "builtin"
300
- # Reconstruct modules_dict from index entries
301
- modules_dict = {}
302
- for top_level, components in index["entries"]:
303
- if top_level not in modules_dict:
304
- modules_dict[top_level] = {}
305
- modules_dict[top_level].update(components)
306
- await logger.adebug(f"Loaded {len(modules_dict)} component categories from index")
307
- await _send_telemetry(
308
- telemetry_service, index_source, modules_dict, dev_mode_enabled, target_modules, start_time_ms
309
- )
310
- return {"components": modules_dict}
270
+ modules_dict: dict[str, Any] = {}
271
+
272
+ # Try to load from prebuilt index first
273
+ custom_index_path = None
274
+ if settings_service and settings_service.settings.components_index_path:
275
+ custom_index_path = settings_service.settings.components_index_path
276
+ await logger.adebug(f"Using custom component index: {custom_index_path}")
277
+
278
+ index = _read_component_index(custom_index_path)
279
+ if index and "entries" in index:
280
+ source = custom_index_path or "built-in index"
281
+ await logger.adebug(f"Loading components from {source}")
282
+ # Reconstruct modules_dict from index entries
283
+ for top_level, components in index["entries"]:
284
+ if top_level not in modules_dict:
285
+ modules_dict[top_level] = {}
286
+ modules_dict[top_level].update(components)
287
+ await logger.adebug(f"Loaded {len(modules_dict)} component categories from index")
288
+ return modules_dict, "builtin"
289
+
290
+ # Index failed to load - try cache
291
+ await logger.adebug("Prebuilt index not available, checking cache")
292
+ try:
293
+ cache_path = _get_cache_path()
294
+ except Exception as e: # noqa: BLE001
295
+ await logger.adebug(f"Cache load failed: {e}")
296
+ else:
297
+ if cache_path.exists():
298
+ await logger.adebug(f"Attempting to load from cache: {cache_path}")
299
+ index = _read_component_index(str(cache_path))
300
+ if index and "entries" in index:
301
+ await logger.adebug("Loading components from cached index")
302
+ for top_level, components in index["entries"]:
303
+ if top_level not in modules_dict:
304
+ modules_dict[top_level] = {}
305
+ modules_dict[top_level].update(components)
306
+ await logger.adebug(f"Loaded {len(modules_dict)} component categories from cache")
307
+ return modules_dict, "cache"
308
+
309
+ return modules_dict, None
310
+
311
+
312
+ async def _load_components_dynamically(
313
+ target_modules: list[str] | None = None,
314
+ ) -> dict[str, Any]:
315
+ """Load components dynamically by scanning and importing modules.
311
316
 
312
- # Index failed to load in production - try cache before building
313
- await logger.adebug("Prebuilt index not available, checking cache")
314
- try:
315
- cache_path = _get_cache_path()
316
- if cache_path.exists():
317
- await logger.adebug(f"Attempting to load from cache: {cache_path}")
318
- index = _read_component_index(str(cache_path))
319
- if index and "entries" in index:
320
- await logger.adebug("Loading components from cached index")
321
- index_source = "cache"
322
- modules_dict = {}
323
- for top_level, components in index["entries"]:
324
- if top_level not in modules_dict:
325
- modules_dict[top_level] = {}
326
- modules_dict[top_level].update(components)
327
- await logger.adebug(f"Loaded {len(modules_dict)} component categories from cache")
328
- await _send_telemetry(
329
- telemetry_service, index_source, modules_dict, dev_mode_enabled, target_modules, start_time_ms
330
- )
331
- return {"components": modules_dict}
332
- except Exception as e: # noqa: BLE001
333
- await logger.adebug(f"Cache load failed: {e}")
317
+ Args:
318
+ target_modules: Optional list of specific module names to load (e.g., ["mistral", "openai"])
334
319
 
335
- # No cache available, will build and save
336
- await logger.adebug("Falling back to dynamic loading")
337
- should_save_index = True
320
+ Returns:
321
+ Dictionary mapping top-level module names to their components
322
+ """
323
+ modules_dict: dict[str, Any] = {}
338
324
 
339
- # Fallback: dynamic loading (dev mode or index unavailable)
340
- modules_dict = {}
341
325
  try:
342
326
  import lfx.components as components_pkg
343
327
  except ImportError as e:
344
328
  await logger.aerror(f"Failed to import langflow.components package: {e}", exc_info=True)
345
- return {"components": modules_dict}
329
+ return modules_dict
346
330
 
347
331
  # Collect all module names to process
348
332
  module_names = []
@@ -361,11 +345,10 @@ async def import_langflow_components(
361
345
  module_names.append(modname)
362
346
 
363
347
  if target_modules:
364
- await logger.adebug(f"LFX_DEV module filter active: loading only {target_modules}")
365
348
  await logger.adebug(f"Found {len(module_names)} modules matching filter")
366
349
 
367
350
  if not module_names:
368
- return {"components": modules_dict}
351
+ return modules_dict
369
352
 
370
353
  # Create tasks for parallel module processing
371
354
  tasks = [asyncio.to_thread(_process_single_module, modname) for modname in module_names]
@@ -375,7 +358,7 @@ async def import_langflow_components(
375
358
  module_results = await asyncio.gather(*tasks, return_exceptions=True)
376
359
  except Exception as e: # noqa: BLE001
377
360
  await logger.aerror(f"Error during parallel module processing: {e}", exc_info=True)
378
- return {"components": modules_dict}
361
+ return modules_dict
379
362
 
380
363
  # Merge results from all modules
381
364
  for result in module_results:
@@ -390,13 +373,108 @@ async def import_langflow_components(
390
373
  modules_dict[top_level] = {}
391
374
  modules_dict[top_level].update(components)
392
375
 
393
- # Save the generated index to cache if needed (production mode with missing index)
394
- if should_save_index and modules_dict:
395
- await logger.adebug("Saving generated component index to cache")
396
- _save_generated_index(modules_dict)
376
+ return modules_dict
377
+
378
+
379
+ async def _load_full_dev_mode() -> tuple[dict[str, Any], str]:
380
+ """Load all components dynamically in full dev mode.
381
+
382
+ Returns:
383
+ Tuple of (modules_dict, index_source)
384
+ """
385
+ await logger.adebug("LFX_DEV full mode: loading all modules dynamically")
386
+ modules_dict = await _load_components_dynamically(target_modules=None)
387
+ return modules_dict, "dynamic"
388
+
389
+
390
+ async def _load_selective_dev_mode(
391
+ settings_service: Optional["SettingsService"],
392
+ target_modules: list[str],
393
+ ) -> tuple[dict[str, Any], str]:
394
+ """Load index and selectively reload specific modules.
395
+
396
+ Args:
397
+ settings_service: Settings service for custom index path
398
+ target_modules: List of module names to reload
399
+
400
+ Returns:
401
+ Tuple of (modules_dict, index_source)
402
+ """
403
+ await logger.adebug(f"LFX_DEV selective mode: reloading {target_modules}")
404
+ modules_dict, _ = await _load_from_index_or_cache(settings_service)
405
+
406
+ # Reload specific modules dynamically
407
+ dynamic_modules = await _load_components_dynamically(target_modules=target_modules)
408
+
409
+ # Merge/replace the targeted modules
410
+ for top_level, components in dynamic_modules.items():
411
+ if top_level not in modules_dict:
412
+ modules_dict[top_level] = {}
413
+ modules_dict[top_level].update(components)
414
+
415
+ await logger.adebug(f"Reloaded {len(target_modules)} module(s), kept others from index")
416
+ return modules_dict, "dynamic"
417
+
418
+
419
+ async def _load_production_mode(
420
+ settings_service: Optional["SettingsService"],
421
+ ) -> tuple[dict[str, Any], str]:
422
+ """Load components in production mode with fallback chain.
423
+
424
+ Tries: index -> cache -> dynamic build (with caching)
425
+
426
+ Args:
427
+ settings_service: Settings service for custom index path
428
+
429
+ Returns:
430
+ Tuple of (modules_dict, index_source)
431
+ """
432
+ modules_dict, index_source = await _load_from_index_or_cache(settings_service)
433
+
434
+ if not index_source:
435
+ # No index or cache available - build dynamically and save
436
+ await logger.adebug("Falling back to dynamic loading")
437
+ modules_dict = await _load_components_dynamically(target_modules=None)
438
+ index_source = "dynamic"
439
+
440
+ # Save to cache for future use
441
+ if modules_dict:
442
+ await logger.adebug("Saving generated component index to cache")
443
+ _save_generated_index(modules_dict)
444
+
445
+ return modules_dict, index_source
446
+
447
+
448
+ async def import_langflow_components(
449
+ settings_service: Optional["SettingsService"] = None,
450
+ telemetry_service: Any | None = None,
451
+ ) -> dict[str, dict[str, Any]]:
452
+ """Asynchronously discovers and loads all built-in Langflow components.
453
+
454
+ Loading Strategy:
455
+ - Production mode: Load from prebuilt index -> cache -> build dynamically (with caching)
456
+ - Dev mode (full): Build all components dynamically
457
+ - Dev mode (selective): Load index + replace specific modules dynamically
458
+
459
+ Args:
460
+ settings_service: Optional settings service to get custom index path
461
+ telemetry_service: Optional telemetry service to log component loading metrics
462
+
463
+ Returns:
464
+ A dictionary with a "components" key mapping top-level package names to their component templates.
465
+ """
466
+ start_time_ms: int = int(time.time() * 1000)
467
+ dev_mode_enabled, target_modules = _parse_dev_mode()
468
+
469
+ # Strategy pattern: map dev mode state to loading function
470
+ if dev_mode_enabled and not target_modules:
471
+ modules_dict, index_source = await _load_full_dev_mode()
472
+ elif dev_mode_enabled and target_modules:
473
+ modules_dict, index_source = await _load_selective_dev_mode(settings_service, target_modules)
474
+ else:
475
+ modules_dict, index_source = await _load_production_mode(settings_service)
397
476
 
398
- # Send telemetry for dynamic loading
399
- index_source = "dynamic"
477
+ # Send telemetry
400
478
  await _send_telemetry(
401
479
  telemetry_service, index_source, modules_dict, dev_mode_enabled, target_modules, start_time_ms
402
480
  )
lfx/log/logger.py CHANGED
@@ -184,9 +184,11 @@ def remove_exception_in_production(_logger: Any, _method_name: str, event_dict:
184
184
 
185
185
  def buffer_writer(_logger: Any, _method_name: str, event_dict: dict[str, Any]) -> dict[str, Any]:
186
186
  """Write to log buffer if enabled."""
187
- if log_buffer.enabled():
188
- # Create a JSON representation for the buffer
189
- log_buffer.write(json.dumps(event_dict))
187
+ if log_buffer.enabled() and "serialized" in event_dict:
188
+ # Use the already-serialized version prepared by add_serialized()
189
+ # This avoids duplicate serialization and ensures consistency
190
+ serialized_bytes = event_dict["serialized"]
191
+ log_buffer.write(serialized_bytes.decode("utf-8"))
190
192
  return event_dict
191
193
 
192
194
 
lfx/schema/image.py CHANGED
@@ -74,12 +74,7 @@ def get_file_paths(files: list[str | dict]):
74
74
  if not file_path_str: # Skip empty paths
75
75
  continue
76
76
 
77
- file_path = Path(file_path_str)
78
- # Handle edge case where path might be just a filename without parent
79
- if file_path.parent == Path():
80
- flow_id, file_name = "", file_path.name
81
- else:
82
- flow_id, file_name = str(file_path.parent), file_path.name
77
+ flow_id, file_name = storage_service.parse_file_path(file_path_str)
83
78
 
84
79
  if not file_name: # Skip if no filename
85
80
  continue
@@ -129,12 +124,7 @@ async def get_files(
129
124
  if not file: # Skip empty file paths
130
125
  continue
131
126
 
132
- file_path = Path(file)
133
- # Handle edge case where path might be just a filename without parent
134
- if file_path.parent == Path():
135
- flow_id, file_name = "", file_path.name
136
- else:
137
- flow_id, file_name = str(file_path.parent), file_path.name
127
+ flow_id, file_name = storage_service.parse_file_path(file)
138
128
 
139
129
  if not file_name: # Skip if no filename
140
130
  continue
@@ -0,0 +1,5 @@
1
+ """Database service implementations for lfx package."""
2
+
3
+ from lfx.services.database.service import NoopDatabaseService
4
+
5
+ __all__ = ["NoopDatabaseService"]
@@ -0,0 +1,25 @@
1
+ """Database service implementations for lfx package."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from contextlib import asynccontextmanager
6
+
7
+
8
+ class NoopDatabaseService:
9
+ """No-operation database service for standalone lfx usage.
10
+
11
+ This provides a database service interface that always returns NoopSession,
12
+ allowing lfx to work without a real database connection.
13
+ """
14
+
15
+ @asynccontextmanager
16
+ async def _with_session(self):
17
+ """Internal method to create a session. DO NOT USE DIRECTLY.
18
+
19
+ Use session_scope() for write operations or session_scope_readonly() for read operations.
20
+ This method does not handle commits - it only provides a raw session.
21
+ """
22
+ from lfx.services.session import NoopSession
23
+
24
+ async with NoopSession() as session:
25
+ yield session
lfx/services/deps.py CHANGED
@@ -2,14 +2,21 @@
2
2
 
3
3
  from __future__ import annotations
4
4
 
5
- import inspect
6
- from contextlib import asynccontextmanager
5
+ from contextlib import asynccontextmanager, suppress
6
+ from http import HTTPStatus
7
7
  from typing import TYPE_CHECKING
8
8
 
9
+ from fastapi import HTTPException
10
+ from sqlalchemy.exc import InvalidRequestError
11
+
9
12
  from lfx.log.logger import logger
10
13
  from lfx.services.schema import ServiceType
11
14
 
12
15
  if TYPE_CHECKING:
16
+ from collections.abc import AsyncGenerator
17
+
18
+ from sqlalchemy.ext.asyncio import AsyncSession
19
+
13
20
  from lfx.services.interfaces import (
14
21
  CacheServiceProtocol,
15
22
  ChatServiceProtocol,
@@ -52,11 +59,21 @@ def get_service(service_type: ServiceType, default=None):
52
59
  return None
53
60
 
54
61
 
55
- def get_db_service() -> DatabaseServiceProtocol | None:
56
- """Retrieves the database service instance."""
62
+ def get_db_service() -> DatabaseServiceProtocol:
63
+ """Retrieves the database service instance.
64
+
65
+ Returns a NoopDatabaseService if no real database service is available,
66
+ ensuring that session_scope() always has a valid database service to work with.
67
+ """
68
+ from lfx.services.database.service import NoopDatabaseService
57
69
  from lfx.services.schema import ServiceType
58
70
 
59
- return get_service(ServiceType.DATABASE_SERVICE)
71
+ db_service = get_service(ServiceType.DATABASE_SERVICE)
72
+ if db_service is None:
73
+ # Return noop database service when no real database service is available
74
+ # This allows lfx to work in standalone mode without requiring database setup
75
+ return NoopDatabaseService()
76
+ return db_service
60
77
 
61
78
 
62
79
  def get_storage_service() -> StorageServiceProtocol | None:
@@ -101,29 +118,77 @@ def get_tracing_service() -> TracingServiceProtocol | None:
101
118
  return get_service(ServiceType.TRACING_SERVICE)
102
119
 
103
120
 
121
+ async def get_session():
122
+ msg = "get_session is deprecated, use session_scope instead"
123
+ logger.warning(msg)
124
+ raise NotImplementedError(msg)
125
+
126
+
127
+ async def injectable_session_scope():
128
+ async with session_scope() as session:
129
+ yield session
130
+
131
+
104
132
  @asynccontextmanager
105
- async def session_scope():
106
- """Session scope context manager.
133
+ async def session_scope() -> AsyncGenerator[AsyncSession, None]:
134
+ """Context manager for managing an async session scope with auto-commit for write operations.
107
135
 
108
- Returns a real session if database service is available, otherwise a NoopSession.
109
- This ensures code can always call session methods without None checking.
110
- """
111
- db_service = get_db_service()
112
- if db_service is None or inspect.isabstract(type(db_service)):
113
- from lfx.services.session import NoopSession
136
+ This is used with `async with session_scope() as session:` for direct session management.
137
+ It ensures that the session is properly committed if no exceptions occur,
138
+ and rolled back if an exception is raised.
139
+ Use session_scope_readonly() for read-only operations to avoid unnecessary commits and locks.
114
140
 
115
- yield NoopSession()
116
- return
141
+ Yields:
142
+ AsyncSession: The async session object.
117
143
 
118
- async with db_service.with_session() as session:
144
+ Raises:
145
+ Exception: If an error occurs during the session scope.
146
+ """
147
+ db_service = get_db_service()
148
+ async with db_service._with_session() as session: # noqa: SLF001
149
+ try:
150
+ yield session
151
+ await session.commit()
152
+ except Exception as e:
153
+ # Log at appropriate level based on error type
154
+ if isinstance(e, HTTPException):
155
+ if HTTPStatus.BAD_REQUEST.value <= e.status_code < HTTPStatus.INTERNAL_SERVER_ERROR.value:
156
+ # Client errors (4xx) - log at info level
157
+ await logger.ainfo(f"Client error during session scope: {e.status_code}: {e.detail}")
158
+ else:
159
+ # Server errors (5xx) or other - log at error level
160
+ await logger.aexception("An error occurred during the session scope.", exception=e)
161
+ else:
162
+ # Non-HTTP exceptions - log at error level
163
+ await logger.aexception("An error occurred during the session scope.", exception=e)
164
+
165
+ # Only rollback if session is still in a valid state
166
+ if session.is_active:
167
+ with suppress(InvalidRequestError):
168
+ # Session was already rolled back by SQLAlchemy
169
+ await session.rollback()
170
+ raise
171
+ # No explicit close needed - _with_session() handles it
172
+
173
+
174
+ async def injectable_session_scope_readonly():
175
+ async with session_scope_readonly() as session:
119
176
  yield session
120
177
 
121
178
 
122
- def get_session():
123
- """Get database session.
179
+ @asynccontextmanager
180
+ async def session_scope_readonly() -> AsyncGenerator[AsyncSession, None]:
181
+ """Context manager for managing a read-only async session scope.
182
+
183
+ This is used with `async with session_scope_readonly() as session:` for direct session management
184
+ when only reading data. No auto-commit or rollback - the session is simply closed after use.
124
185
 
125
- Returns a session from the database service if available, otherwise NoopSession.
186
+ Yields:
187
+ AsyncSession: The async session object.
126
188
  """
127
- msg = "get_session is deprecated, use session_scope instead"
128
- logger.warning(msg)
129
- raise NotImplementedError(msg)
189
+ db_service = get_db_service()
190
+ async with db_service._with_session() as session: # noqa: SLF001
191
+ yield session
192
+ # No commit - read-only
193
+ # No clean up - client is responsible (plus, read only sessions are not committed)
194
+ # No explicit close needed - _with_session() handles it
@@ -41,6 +41,11 @@ class StorageServiceProtocol(Protocol):
41
41
  """Build the full path of a file in the storage."""
42
42
  ...
43
43
 
44
+ @abstractmethod
45
+ def parse_file_path(self, full_path: str) -> tuple[str, str]:
46
+ """Parse a full storage path to extract flow_id and file_name."""
47
+ ...
48
+
44
49
 
45
50
  class SettingsServiceProtocol(Protocol):
46
51
  """Protocol for settings service."""