lfx-nightly 0.1.12.dev38__py3-none-any.whl → 0.1.12.dev40__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of lfx-nightly might be problematic. Click here for more details.

Files changed (38) hide show
  1. lfx/_assets/component_index.json +1 -0
  2. lfx/cli/run.py +7 -3
  3. lfx/components/agents/agent.py +1 -0
  4. lfx/components/arxiv/arxiv.py +8 -2
  5. lfx/components/composio/__init__.py +71 -17
  6. lfx/components/composio/agentql_composio.py +11 -0
  7. lfx/components/composio/agiled_composio.py +11 -0
  8. lfx/components/composio/bolna_composio.py +11 -0
  9. lfx/components/composio/brightdata_composio.py +11 -0
  10. lfx/components/composio/canvas_composio.py +11 -0
  11. lfx/components/composio/digicert_composio.py +11 -0
  12. lfx/components/composio/finage_composio.py +11 -0
  13. lfx/components/composio/fixer_composio.py +11 -0
  14. lfx/components/composio/flexisign_composio.py +11 -0
  15. lfx/components/composio/freshdesk_composio.py +11 -0
  16. lfx/components/composio/googleclassroom_composio.py +11 -0
  17. lfx/components/composio/instagram_composio.py +11 -0
  18. lfx/components/composio/jira_composio.py +11 -0
  19. lfx/components/composio/jotform_composio.py +11 -0
  20. lfx/components/composio/listennotes_composio.py +11 -0
  21. lfx/components/composio/missive_composio.py +11 -0
  22. lfx/components/composio/pandadoc_composio.py +11 -0
  23. lfx/components/composio/timelinesai_composio.py +11 -0
  24. lfx/components/helpers/current_date.py +1 -1
  25. lfx/components/lmstudio/lmstudiomodel.py +9 -5
  26. lfx/components/logic/__init__.py +3 -0
  27. lfx/components/logic/llm_conditional_router.py +65 -21
  28. lfx/components/nvidia/nvidia.py +3 -3
  29. lfx/components/processing/__init__.py +3 -0
  30. lfx/components/processing/dynamic_create_data.py +357 -0
  31. lfx/components/processing/lambda_filter.py +82 -18
  32. lfx/custom/validate.py +12 -3
  33. lfx/interface/components.py +336 -8
  34. lfx/services/settings/base.py +7 -0
  35. {lfx_nightly-0.1.12.dev38.dist-info → lfx_nightly-0.1.12.dev40.dist-info}/METADATA +1 -1
  36. {lfx_nightly-0.1.12.dev38.dist-info → lfx_nightly-0.1.12.dev40.dist-info}/RECORD +38 -18
  37. {lfx_nightly-0.1.12.dev38.dist-info → lfx_nightly-0.1.12.dev40.dist-info}/WHEEL +0 -0
  38. {lfx_nightly-0.1.12.dev38.dist-info → lfx_nightly-0.1.12.dev40.dist-info}/entry_points.txt +0 -0
@@ -7,7 +7,7 @@ from typing import TYPE_CHECKING, Any
7
7
  from lfx.custom.custom_component.component import Component
8
8
  from lfx.io import DataInput, HandleInput, IntInput, MultilineInput, Output
9
9
  from lfx.schema.data import Data
10
- from lfx.utils.data_structure import get_data_structure
10
+ from lfx.schema.dataframe import DataFrame
11
11
 
12
12
  if TYPE_CHECKING:
13
13
  from collections.abc import Callable
@@ -25,6 +25,7 @@ class LambdaFilterComponent(Component):
25
25
  name="data",
26
26
  display_name="Data",
27
27
  info="The structured data to filter or transform using a lambda function.",
28
+ input_types=["Data", "DataFrame"],
28
29
  is_list=True,
29
30
  required=True,
30
31
  ),
@@ -63,24 +64,67 @@ class LambdaFilterComponent(Component):
63
64
 
64
65
  outputs = [
65
66
  Output(
66
- display_name="Filtered Data",
67
- name="filtered_data",
68
- method="filter_data",
67
+ display_name="Output",
68
+ name="data_output",
69
+ method="process_as_data",
70
+ ),
71
+ Output(
72
+ display_name="Output",
73
+ name="dataframe_output",
74
+ method="process_as_dataframe",
69
75
  ),
70
76
  ]
71
77
 
72
78
  def get_data_structure(self, data):
73
- """Extract the structure of a dictionary, replacing values with their types."""
74
- return {k: get_data_structure(v) for k, v in data.items()}
79
+ """Extract the structure of data, replacing values with their types."""
80
+ if isinstance(data, list):
81
+ # For lists, get structure of first item if available
82
+ if data:
83
+ return [self.get_data_structure(data[0])]
84
+ return []
85
+ if isinstance(data, dict):
86
+ return {k: self.get_data_structure(v) for k, v in data.items()}
87
+ # For primitive types, return the type name
88
+ return type(data).__name__
75
89
 
76
90
  def _validate_lambda(self, lambda_text: str) -> bool:
77
91
  """Validate the provided lambda function text."""
78
92
  # Return False if the lambda function does not start with 'lambda' or does not contain a colon
79
93
  return lambda_text.strip().startswith("lambda") and ":" in lambda_text
80
94
 
81
- async def filter_data(self) -> list[Data]:
95
+ async def _execute_lambda(self) -> Any:
82
96
  self.log(str(self.data))
83
- data = self.data[0].data if isinstance(self.data, list) else self.data.data
97
+
98
+ # Convert input to a unified format
99
+ if isinstance(self.data, list):
100
+ # Handle list of Data or DataFrame objects
101
+ combined_data = []
102
+ for item in self.data:
103
+ if isinstance(item, DataFrame):
104
+ # DataFrame to list of dicts
105
+ combined_data.extend(item.to_dict(orient="records"))
106
+ elif hasattr(item, "data"):
107
+ # Data object
108
+ if isinstance(item.data, dict):
109
+ combined_data.append(item.data)
110
+ elif isinstance(item.data, list):
111
+ combined_data.extend(item.data)
112
+
113
+ # If we have a single dict, unwrap it so lambdas can access it directly
114
+ if len(combined_data) == 1 and isinstance(combined_data[0], dict):
115
+ data = combined_data[0]
116
+ elif len(combined_data) == 0:
117
+ data = {}
118
+ else:
119
+ data = combined_data # type: ignore[assignment]
120
+ elif isinstance(self.data, DataFrame):
121
+ # Single DataFrame to list of dicts
122
+ data = self.data.to_dict(orient="records")
123
+ elif hasattr(self.data, "data"):
124
+ # Single Data object
125
+ data = self.data.data
126
+ else:
127
+ data = self.data
84
128
 
85
129
  dump = json.dumps(data)
86
130
  self.log(str(data))
@@ -142,13 +186,33 @@ class LambdaFilterComponent(Component):
142
186
  fn: Callable[[Any], Any] = eval(lambda_text) # noqa: S307
143
187
 
144
188
  # Apply the lambda function to the data
145
- processed_data = fn(data)
146
-
147
- # If it's a dict, wrap it in a Data object
148
- if isinstance(processed_data, dict):
149
- return [Data(**processed_data)]
150
- # If it's a list, convert each item to a Data object
151
- if isinstance(processed_data, list):
152
- return [Data(**item) if isinstance(item, dict) else Data(text=str(item)) for item in processed_data]
153
- # If it's anything else, convert to string and wrap in a Data object
154
- return [Data(text=str(processed_data))]
189
+ return fn(data)
190
+
191
+ async def process_as_data(self) -> Data:
192
+ """Process the data and return as a Data object."""
193
+ result = await self._execute_lambda()
194
+
195
+ # Convert result to Data based on type
196
+ if isinstance(result, dict):
197
+ return Data(data=result)
198
+ if isinstance(result, list):
199
+ return Data(data={"_results": result})
200
+ # For other types, convert to string
201
+ return Data(data={"text": str(result)})
202
+
203
+ async def process_as_dataframe(self) -> DataFrame:
204
+ """Process the data and return as a DataFrame."""
205
+ result = await self._execute_lambda()
206
+
207
+ # Convert result to DataFrame based on type
208
+ if isinstance(result, list):
209
+ # Check if it's a list of dicts
210
+ if all(isinstance(item, dict) for item in result):
211
+ return DataFrame(result)
212
+ # List of non-dicts: wrap each value
213
+ return DataFrame([{"value": item} for item in result])
214
+ if isinstance(result, dict):
215
+ # Single dict becomes single-row DataFrame
216
+ return DataFrame([result])
217
+ # Other types: convert to string and wrap
218
+ return DataFrame([{"value": str(result)}])
lfx/custom/validate.py CHANGED
@@ -348,9 +348,18 @@ def prepare_global_scope(module):
348
348
  for node in imports:
349
349
  for alias in node.names:
350
350
  module_name = alias.name
351
- variable_name = alias.asname or alias.name
352
- # Let importlib.import_module raise its own ModuleNotFoundError with the actual missing module
353
- exec_globals[variable_name] = importlib.import_module(module_name)
351
+ # Import the full module path to ensure submodules are loaded
352
+ module_obj = importlib.import_module(module_name)
353
+
354
+ # Determine the variable name
355
+ if alias.asname:
356
+ # For aliased imports like "import yfinance as yf", use the imported module directly
357
+ variable_name = alias.asname
358
+ exec_globals[variable_name] = module_obj
359
+ else:
360
+ # For dotted imports like "urllib.request", set the variable to the top-level package
361
+ variable_name = module_name.split(".")[0]
362
+ exec_globals[variable_name] = importlib.import_module(variable_name)
354
363
 
355
364
  for node in import_froms:
356
365
  module_names_to_try = [node.module]
@@ -1,10 +1,16 @@
1
1
  import asyncio
2
+ import hashlib
2
3
  import importlib
4
+ import inspect
3
5
  import json
6
+ import os
4
7
  import pkgutil
8
+ import time
5
9
  from pathlib import Path
6
10
  from typing import TYPE_CHECKING, Any, Optional
7
11
 
12
+ import orjson
13
+
8
14
  from lfx.constants import BASE_COMPONENTS_PATH
9
15
  from lfx.custom.utils import abuild_custom_components, create_component_template
10
16
  from lfx.log.logger import logger
@@ -31,16 +37,306 @@ class ComponentCache:
31
37
  component_cache = ComponentCache()
32
38
 
33
39
 
34
- async def import_langflow_components():
40
+ def _parse_dev_mode() -> tuple[bool, list[str] | None]:
41
+ """Parse LFX_DEV to determine dev mode and which modules to load.
42
+
43
+ Development mode must be explicitly enabled via the LFX_DEV environment variable.
44
+ When enabled, components are always rebuilt dynamically to reflect code changes.
45
+ When disabled or not set, the prebuilt index is used for fast startup.
46
+
47
+ Supports two modes:
48
+ - Boolean mode: LFX_DEV=1/true/yes loads all modules dynamically
49
+ - List mode: LFX_DEV=mistral,openai,anthropic loads only specified modules
50
+
51
+ Returns:
52
+ Tuple of (dev_mode_enabled, module_list)
53
+ - If module_list is None, load all modules
54
+ - If module_list is a list, only load those specific modules
55
+ """
56
+ lfx_dev = os.getenv("LFX_DEV", "").strip()
57
+ if not lfx_dev:
58
+ return (False, None)
59
+
60
+ # Boolean mode: "1", "true", "yes" enables dev mode
61
+ if lfx_dev.lower() in {"1", "true", "yes"}:
62
+ return (True, None) # Load all modules
63
+
64
+ # Boolean mode: "0", "false", "no" explicitly disables dev mode
65
+ if lfx_dev.lower() in {"0", "false", "no"}:
66
+ return (False, None)
67
+
68
+ # List mode: comma-separated values
69
+ modules = [m.strip().lower() for m in lfx_dev.split(",") if m.strip()]
70
+ if modules:
71
+ return (True, modules)
72
+
73
+ return (False, None)
74
+
75
+
76
+ def _read_component_index(custom_path: str | None = None) -> dict | None:
77
+ """Read and validate the prebuilt component index.
78
+
79
+ Args:
80
+ custom_path: Optional custom path or URL to index file. If None, uses built-in index.
81
+
82
+ Returns:
83
+ The index dictionary if valid, None otherwise
84
+ """
85
+ try:
86
+ import lfx
87
+
88
+ # Determine index location
89
+ if custom_path:
90
+ # Check if it's a URL
91
+ if custom_path.startswith(("http://", "https://")):
92
+ # Fetch from URL
93
+ import httpx
94
+
95
+ try:
96
+ response = httpx.get(custom_path, timeout=10.0)
97
+ response.raise_for_status()
98
+ blob = orjson.loads(response.content)
99
+ except httpx.HTTPError as e:
100
+ logger.warning(f"Failed to fetch component index from {custom_path}: {e}")
101
+ return None
102
+ except orjson.JSONDecodeError as e:
103
+ logger.warning(f"Component index from {custom_path} is corrupted or invalid JSON: {e}")
104
+ return None
105
+ else:
106
+ # Load from file path
107
+ index_path = Path(custom_path)
108
+ if not index_path.exists():
109
+ logger.warning(f"Custom component index not found at {custom_path}")
110
+ return None
111
+ try:
112
+ blob = orjson.loads(index_path.read_bytes())
113
+ except orjson.JSONDecodeError as e:
114
+ logger.warning(f"Component index at {custom_path} is corrupted or invalid JSON: {e}")
115
+ return None
116
+ else:
117
+ # Use built-in index
118
+ pkg_dir = Path(inspect.getfile(lfx)).parent
119
+ index_path = pkg_dir / "_assets" / "component_index.json"
120
+
121
+ if not index_path.exists():
122
+ return None
123
+
124
+ try:
125
+ blob = orjson.loads(index_path.read_bytes())
126
+ except orjson.JSONDecodeError as e:
127
+ logger.warning(f"Built-in component index is corrupted or invalid JSON: {e}")
128
+ return None
129
+
130
+ # Integrity check: verify SHA256
131
+ tmp = dict(blob)
132
+ sha = tmp.pop("sha256", None)
133
+ if not sha:
134
+ logger.warning("Component index missing SHA256 hash - index may be tampered")
135
+ return None
136
+
137
+ # Use orjson for hash calculation to match build script
138
+ calc = hashlib.sha256(orjson.dumps(tmp, option=orjson.OPT_SORT_KEYS)).hexdigest()
139
+ if sha != calc:
140
+ logger.warning(
141
+ "Component index integrity check failed - SHA256 mismatch (file may be corrupted or tampered)"
142
+ )
143
+ return None
144
+
145
+ # Version check: ensure index matches installed langflow version
146
+ from importlib.metadata import version
147
+
148
+ installed_version = version("langflow")
149
+ if blob.get("version") != installed_version:
150
+ logger.debug(
151
+ f"Component index version mismatch: index={blob.get('version')}, installed={installed_version}"
152
+ )
153
+ return None
154
+ except Exception as e: # noqa: BLE001
155
+ logger.warning(f"Unexpected error reading component index: {type(e).__name__}: {e}")
156
+ return None
157
+ return blob
158
+
159
+
160
+ def _get_cache_path() -> Path:
161
+ """Get the path for the cached component index in the user's cache directory."""
162
+ from platformdirs import user_cache_dir
163
+
164
+ cache_dir = Path(user_cache_dir("lfx", "langflow"))
165
+ cache_dir.mkdir(parents=True, exist_ok=True)
166
+ return cache_dir / "component_index.json"
167
+
168
+
169
+ def _save_generated_index(modules_dict: dict) -> None:
170
+ """Save a dynamically generated component index to cache for future use.
171
+
172
+ Args:
173
+ modules_dict: Dictionary of components by category
174
+ """
175
+ try:
176
+ cache_path = _get_cache_path()
177
+
178
+ # Convert modules_dict to entries format
179
+ entries = [[top_level, components] for top_level, components in modules_dict.items()]
180
+
181
+ # Calculate metadata
182
+ num_modules = len(modules_dict)
183
+ num_components = sum(len(components) for components in modules_dict.values())
184
+
185
+ # Get version
186
+ from importlib.metadata import version
187
+
188
+ langflow_version = version("langflow")
189
+
190
+ # Build index structure
191
+ index = {
192
+ "version": langflow_version,
193
+ "metadata": {
194
+ "num_modules": num_modules,
195
+ "num_components": num_components,
196
+ },
197
+ "entries": entries,
198
+ }
199
+
200
+ # Calculate hash
201
+ payload = orjson.dumps(index, option=orjson.OPT_SORT_KEYS)
202
+ index["sha256"] = hashlib.sha256(payload).hexdigest()
203
+
204
+ # Write to cache
205
+ json_bytes = orjson.dumps(index, option=orjson.OPT_SORT_KEYS | orjson.OPT_INDENT_2)
206
+ cache_path.write_bytes(json_bytes)
207
+
208
+ logger.debug(f"Saved generated component index to cache: {cache_path}")
209
+ except Exception as e: # noqa: BLE001
210
+ logger.debug(f"Failed to save generated index to cache: {e}")
211
+
212
+
213
+ async def _send_telemetry(
214
+ telemetry_service: Any,
215
+ index_source: str,
216
+ modules_dict: dict,
217
+ dev_mode: bool, # noqa: FBT001
218
+ target_modules: list[str] | None,
219
+ start_time_ms: int,
220
+ ) -> None:
221
+ """Send telemetry about component index loading.
222
+
223
+ Args:
224
+ telemetry_service: Telemetry service instance (optional)
225
+ index_source: Source of the index ("builtin", "cache", or "dynamic")
226
+ modules_dict: Dictionary of loaded components
227
+ dev_mode: Whether dev mode is enabled
228
+ target_modules: List of filtered modules if any
229
+ start_time_ms: Start time in milliseconds
230
+ """
231
+ if not telemetry_service:
232
+ return
233
+
234
+ try:
235
+ # Calculate metrics
236
+ num_modules = len(modules_dict)
237
+ num_components = sum(len(components) for components in modules_dict.values())
238
+ load_time_ms = int(time.time() * 1000) - start_time_ms
239
+ filtered_modules = ",".join(target_modules) if target_modules else None
240
+
241
+ # Import the payload class dynamically to avoid circular imports
242
+ from langflow.services.telemetry.schema import ComponentIndexPayload
243
+
244
+ payload = ComponentIndexPayload(
245
+ index_source=index_source,
246
+ num_modules=num_modules,
247
+ num_components=num_components,
248
+ dev_mode=dev_mode,
249
+ filtered_modules=filtered_modules,
250
+ load_time_ms=load_time_ms,
251
+ )
252
+
253
+ await telemetry_service.log_component_index(payload)
254
+ except Exception as e: # noqa: BLE001
255
+ # Don't fail component loading if telemetry fails
256
+ await logger.adebug(f"Failed to send component index telemetry: {e}")
257
+
258
+
259
+ async def import_langflow_components(
260
+ settings_service: Optional["SettingsService"] = None, telemetry_service: Any | None = None
261
+ ):
35
262
  """Asynchronously discovers and loads all built-in Langflow components with module-level parallelization.
36
263
 
264
+ In production mode (non-dev), attempts to load components from a prebuilt static index for instant startup.
265
+ Falls back to dynamic module scanning if index is unavailable or invalid. When dynamic loading is used,
266
+ the generated index is cached for future use.
267
+
37
268
  Scans the `lfx.components` package and its submodules in parallel, instantiates classes that are subclasses
38
269
  of `Component` or `CustomComponent`, and generates their templates. Components are grouped by their
39
270
  top-level subpackage name.
40
271
 
272
+ Args:
273
+ settings_service: Optional settings service to get custom index path
274
+ telemetry_service: Optional telemetry service to log component loading metrics
275
+
41
276
  Returns:
42
277
  A dictionary with a "components" key mapping top-level package names to their component templates.
43
278
  """
279
+ # Start timer for telemetry
280
+ start_time_ms = int(time.time() * 1000)
281
+ index_source = None
282
+
283
+ # Track if we need to save the index after building
284
+ should_save_index = False
285
+
286
+ # Fast path: load from prebuilt index if not in dev mode
287
+ dev_mode_enabled, target_modules = _parse_dev_mode()
288
+ if not dev_mode_enabled:
289
+ # Get custom index path from settings if available
290
+ custom_index_path = None
291
+ if settings_service and settings_service.settings.components_index_path:
292
+ custom_index_path = settings_service.settings.components_index_path
293
+ await logger.adebug(f"Using custom component index: {custom_index_path}")
294
+
295
+ index = _read_component_index(custom_index_path)
296
+ if index and "entries" in index:
297
+ source = custom_index_path or "built-in index"
298
+ await logger.adebug(f"Loading components from {source}")
299
+ index_source = "builtin"
300
+ # Reconstruct modules_dict from index entries
301
+ modules_dict = {}
302
+ for top_level, components in index["entries"]:
303
+ if top_level not in modules_dict:
304
+ modules_dict[top_level] = {}
305
+ modules_dict[top_level].update(components)
306
+ await logger.adebug(f"Loaded {len(modules_dict)} component categories from index")
307
+ await _send_telemetry(
308
+ telemetry_service, index_source, modules_dict, dev_mode_enabled, target_modules, start_time_ms
309
+ )
310
+ return {"components": modules_dict}
311
+
312
+ # Index failed to load in production - try cache before building
313
+ await logger.adebug("Prebuilt index not available, checking cache")
314
+ try:
315
+ cache_path = _get_cache_path()
316
+ if cache_path.exists():
317
+ await logger.adebug(f"Attempting to load from cache: {cache_path}")
318
+ index = _read_component_index(str(cache_path))
319
+ if index and "entries" in index:
320
+ await logger.adebug("Loading components from cached index")
321
+ index_source = "cache"
322
+ modules_dict = {}
323
+ for top_level, components in index["entries"]:
324
+ if top_level not in modules_dict:
325
+ modules_dict[top_level] = {}
326
+ modules_dict[top_level].update(components)
327
+ await logger.adebug(f"Loaded {len(modules_dict)} component categories from cache")
328
+ await _send_telemetry(
329
+ telemetry_service, index_source, modules_dict, dev_mode_enabled, target_modules, start_time_ms
330
+ )
331
+ return {"components": modules_dict}
332
+ except Exception as e: # noqa: BLE001
333
+ await logger.adebug(f"Cache load failed: {e}")
334
+
335
+ # No cache available, will build and save
336
+ await logger.adebug("Falling back to dynamic loading")
337
+ should_save_index = True
338
+
339
+ # Fallback: dynamic loading (dev mode or index unavailable)
44
340
  modules_dict = {}
45
341
  try:
46
342
  import lfx.components as components_pkg
@@ -52,8 +348,21 @@ async def import_langflow_components():
52
348
  module_names = []
53
349
  for _, modname, _ in pkgutil.walk_packages(components_pkg.__path__, prefix=components_pkg.__name__ + "."):
54
350
  # Skip if the module is in the deactivated folder
55
- if "deactivated" not in modname:
56
- module_names.append(modname)
351
+ if "deactivated" in modname:
352
+ continue
353
+
354
+ # If specific modules requested, filter by top-level module name
355
+ if target_modules:
356
+ # Extract top-level: "lfx.components.mistral.xyz" -> "mistral"
357
+ parts = modname.split(".")
358
+ if len(parts) > MIN_MODULE_PARTS and parts[2].lower() not in target_modules:
359
+ continue
360
+
361
+ module_names.append(modname)
362
+
363
+ if target_modules:
364
+ await logger.adebug(f"LFX_DEV module filter active: loading only {target_modules}")
365
+ await logger.adebug(f"Found {len(module_names)} modules matching filter")
57
366
 
58
367
  if not module_names:
59
368
  return {"components": modules_dict}
@@ -81,6 +390,17 @@ async def import_langflow_components():
81
390
  modules_dict[top_level] = {}
82
391
  modules_dict[top_level].update(components)
83
392
 
393
+ # Save the generated index to cache if needed (production mode with missing index)
394
+ if should_save_index and modules_dict:
395
+ await logger.adebug("Saving generated component index to cache")
396
+ _save_generated_index(modules_dict)
397
+
398
+ # Send telemetry for dynamic loading
399
+ index_source = "dynamic"
400
+ await _send_telemetry(
401
+ telemetry_service, index_source, modules_dict, dev_mode_enabled, target_modules, start_time_ms
402
+ )
403
+
84
404
  return {"components": modules_dict}
85
405
 
86
406
 
@@ -100,8 +420,8 @@ def _process_single_module(modname: str) -> tuple[str, dict] | None:
100
420
  # TODO: Surface these errors to the UI in a friendly manner
101
421
  logger.error(f"Failed to import module {modname}: {e}", exc_info=True)
102
422
  return None
103
- # Extract the top-level subpackage name after "langflow.components."
104
- # e.g., "langflow.components.Notion.add_content_to_page" -> "Notion"
423
+ # Extract the top-level subpackage name after "lfx.components."
424
+ # e.g., "lfx.components.Notion.add_content_to_page" -> "Notion"
105
425
  mod_parts = modname.split(".")
106
426
  if len(mod_parts) <= MIN_MODULE_PARTS:
107
427
  return None
@@ -184,6 +504,7 @@ async def _determine_loading_strategy(settings_service: "SettingsService") -> di
184
504
 
185
505
  async def get_and_cache_all_types_dict(
186
506
  settings_service: "SettingsService",
507
+ telemetry_service: Any | None = None,
187
508
  ):
188
509
  """Retrieves and caches the complete dictionary of component types and templates.
189
510
 
@@ -191,17 +512,24 @@ async def get_and_cache_all_types_dict(
191
512
  components and either fully loads all components or loads only their metadata, depending on the
192
513
  lazy loading setting. Merges built-in and custom components into the cache and returns the
193
514
  resulting dictionary.
515
+
516
+ Args:
517
+ settings_service: Settings service instance
518
+ telemetry_service: Optional telemetry service for tracking component loading metrics
194
519
  """
195
520
  if component_cache.all_types_dict is None:
196
521
  await logger.adebug("Building components cache")
197
522
 
198
- langflow_components = await import_langflow_components()
523
+ langflow_components = await import_langflow_components(settings_service, telemetry_service)
199
524
  custom_components_dict = await _determine_loading_strategy(settings_service)
200
525
 
201
- # merge the dicts
526
+ # Flatten custom dict if it has a "components" wrapper
527
+ custom_flat = custom_components_dict.get("components", custom_components_dict) or {}
528
+
529
+ # Merge built-in and custom components (no wrapper at cache level)
202
530
  component_cache.all_types_dict = {
203
531
  **langflow_components["components"],
204
- **custom_components_dict,
532
+ **custom_flat,
205
533
  }
206
534
  component_count = sum(len(comps) for comps in component_cache.all_types_dict.values())
207
535
  await logger.adebug(f"Loaded {component_count} components")
@@ -158,6 +158,13 @@ class Settings(BaseSettings):
158
158
  disable_track_apikey_usage: bool = False
159
159
  remove_api_keys: bool = False
160
160
  components_path: list[str] = []
161
+ components_index_path: str | None = None
162
+ """Path or URL to a prebuilt component index JSON file.
163
+
164
+ If None, uses the built-in index at lfx/_assets/component_index.json.
165
+ Set to a file path (e.g., '/path/to/index.json') or URL (e.g., 'https://example.com/index.json')
166
+ to use a custom index.
167
+ """
161
168
  langchain_cache: str = "InMemoryCache"
162
169
  load_flows_path: str | None = None
163
170
  bundle_urls: list[str] = []
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: lfx-nightly
3
- Version: 0.1.12.dev38
3
+ Version: 0.1.12.dev40
4
4
  Summary: Langflow Executor - A lightweight CLI tool for executing and serving Langflow AI flows
5
5
  Author-email: Gabriel Luiz Freitas Almeida <gabriel@langflow.org>
6
6
  Requires-Python: <3.14,>=3.10