lfx-nightly 0.2.0.dev26__py3-none-any.whl → 0.2.0.dev41__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. lfx/_assets/component_index.json +1 -1
  2. lfx/base/agents/agent.py +8 -3
  3. lfx/base/agents/altk_base_agent.py +16 -3
  4. lfx/base/data/base_file.py +14 -4
  5. lfx/base/data/docling_utils.py +61 -10
  6. lfx/base/data/storage_utils.py +109 -0
  7. lfx/base/mcp/util.py +2 -2
  8. lfx/base/models/anthropic_constants.py +21 -12
  9. lfx/cli/commands.py +3 -1
  10. lfx/components/docling/chunk_docling_document.py +3 -1
  11. lfx/components/docling/export_docling_document.py +3 -1
  12. lfx/components/files_and_knowledge/file.py +59 -7
  13. lfx/components/files_and_knowledge/save_file.py +79 -12
  14. lfx/components/ibm/watsonx.py +7 -1
  15. lfx/components/input_output/chat_output.py +7 -1
  16. lfx/components/llm_operations/batch_run.py +16 -7
  17. lfx/components/models_and_agents/agent.py +4 -2
  18. lfx/components/models_and_agents/embedding_model.py +6 -76
  19. lfx/components/ollama/ollama.py +9 -4
  20. lfx/components/processing/__init__.py +0 -3
  21. lfx/custom/directory_reader/directory_reader.py +5 -2
  22. lfx/graph/graph/base.py +1 -4
  23. lfx/graph/vertex/base.py +1 -4
  24. lfx/schema/image.py +2 -12
  25. lfx/services/interfaces.py +5 -0
  26. lfx/services/manager.py +5 -4
  27. lfx/services/mcp_composer/service.py +38 -12
  28. lfx/services/settings/auth.py +18 -11
  29. lfx/services/settings/base.py +5 -23
  30. lfx/services/storage/local.py +32 -0
  31. lfx/services/storage/service.py +19 -0
  32. lfx/utils/image.py +29 -11
  33. {lfx_nightly-0.2.0.dev26.dist-info → lfx_nightly-0.2.0.dev41.dist-info}/METADATA +1 -1
  34. {lfx_nightly-0.2.0.dev26.dist-info → lfx_nightly-0.2.0.dev41.dist-info}/RECORD +36 -39
  35. lfx/base/embeddings/embeddings_class.py +0 -113
  36. lfx/components/elastic/opensearch_multimodal.py +0 -1575
  37. lfx/components/processing/dataframe_to_toolset.py +0 -259
  38. {lfx_nightly-0.2.0.dev26.dist-info → lfx_nightly-0.2.0.dev41.dist-info}/WHEEL +0 -0
  39. {lfx_nightly-0.2.0.dev26.dist-info → lfx_nightly-0.2.0.dev41.dist-info}/entry_points.txt +0 -0
@@ -73,7 +73,9 @@ class MCPComposerService(Service):
73
73
 
74
74
  def __init__(self):
75
75
  super().__init__()
76
- self.project_composers: dict[str, dict] = {} # project_id -> {process, host, port, sse_url, auth_config}
76
+ self.project_composers: dict[
77
+ str, dict
78
+ ] = {} # project_id -> {process, host, port, streamable_http_url, auth_config}
77
79
  self._start_locks: dict[
78
80
  str, asyncio.Lock
79
81
  ] = {} # Lock to prevent concurrent start operations for the same project
@@ -949,21 +951,24 @@ class MCPComposerService(Service):
949
951
  async def start_project_composer(
950
952
  self,
951
953
  project_id: str,
952
- sse_url: str,
954
+ streamable_http_url: str,
953
955
  auth_config: dict[str, Any] | None,
954
956
  max_retries: int = 3,
955
957
  max_startup_checks: int = 40,
956
958
  startup_delay: float = 2.0,
959
+ *,
960
+ legacy_sse_url: str | None = None,
957
961
  ) -> None:
958
962
  """Start an MCP Composer instance for a specific project.
959
963
 
960
964
  Args:
961
965
  project_id: The project ID
962
- sse_url: The SSE URL to connect to
966
+ streamable_http_url: Streamable HTTP endpoint for the remote Langflow MCP server
963
967
  auth_config: Authentication configuration
964
968
  max_retries: Maximum number of retry attempts (default: 3)
965
969
  max_startup_checks: Number of checks per retry attempt (default: 40)
966
970
  startup_delay: Delay between checks in seconds (default: 2.0)
971
+ legacy_sse_url: Optional legacy SSE URL used for backward compatibility
967
972
 
968
973
  Raises:
969
974
  MCPComposerError: Various specific errors if startup fails
@@ -994,7 +999,13 @@ class MCPComposerService(Service):
994
999
 
995
1000
  try:
996
1001
  await self._do_start_project_composer(
997
- project_id, sse_url, auth_config, max_retries, max_startup_checks, startup_delay
1002
+ project_id,
1003
+ streamable_http_url,
1004
+ auth_config,
1005
+ max_retries,
1006
+ max_startup_checks,
1007
+ startup_delay,
1008
+ legacy_sse_url=legacy_sse_url,
998
1009
  )
999
1010
  finally:
1000
1011
  # Clean up the task reference when done
@@ -1004,25 +1015,29 @@ class MCPComposerService(Service):
1004
1015
  async def _do_start_project_composer(
1005
1016
  self,
1006
1017
  project_id: str,
1007
- sse_url: str,
1018
+ streamable_http_url: str,
1008
1019
  auth_config: dict[str, Any] | None,
1009
1020
  max_retries: int = 3,
1010
1021
  max_startup_checks: int = 40,
1011
1022
  startup_delay: float = 2.0,
1023
+ *,
1024
+ legacy_sse_url: str | None = None,
1012
1025
  ) -> None:
1013
1026
  """Internal method to start an MCP Composer instance.
1014
1027
 
1015
1028
  Args:
1016
1029
  project_id: The project ID
1017
- sse_url: The SSE URL to connect to
1030
+ streamable_http_url: Streamable HTTP endpoint for the remote Langflow MCP server
1018
1031
  auth_config: Authentication configuration
1019
1032
  max_retries: Maximum number of retry attempts (default: 3)
1020
1033
  max_startup_checks: Number of checks per retry attempt (default: 40)
1021
1034
  startup_delay: Delay between checks in seconds (default: 2.0)
1035
+ legacy_sse_url: Optional legacy SSE URL used for backward compatibility
1022
1036
 
1023
1037
  Raises:
1024
1038
  MCPComposerError: Various specific errors if startup fails
1025
1039
  """
1040
+ legacy_sse_url = legacy_sse_url or f"{streamable_http_url.rstrip('/')}/sse"
1026
1041
  if not auth_config:
1027
1042
  no_auth_error_msg = "No auth settings provided"
1028
1043
  raise MCPComposerConfigError(no_auth_error_msg, project_id)
@@ -1126,10 +1141,11 @@ class MCPComposerService(Service):
1126
1141
  project_id,
1127
1142
  project_host,
1128
1143
  project_port,
1129
- sse_url,
1144
+ streamable_http_url,
1130
1145
  auth_config,
1131
1146
  max_startup_checks,
1132
1147
  startup_delay,
1148
+ legacy_sse_url=legacy_sse_url,
1133
1149
  )
1134
1150
 
1135
1151
  except MCPComposerError as e:
@@ -1174,7 +1190,9 @@ class MCPComposerService(Service):
1174
1190
  "process": process,
1175
1191
  "host": project_host,
1176
1192
  "port": project_port,
1177
- "sse_url": sse_url,
1193
+ "streamable_http_url": streamable_http_url,
1194
+ "legacy_sse_url": legacy_sse_url,
1195
+ "sse_url": legacy_sse_url,
1178
1196
  "auth_config": auth_config,
1179
1197
  }
1180
1198
  self._port_to_project[project_port] = project_id
@@ -1209,10 +1227,12 @@ class MCPComposerService(Service):
1209
1227
  project_id: str,
1210
1228
  host: str,
1211
1229
  port: int,
1212
- sse_url: str,
1230
+ streamable_http_url: str,
1213
1231
  auth_config: dict[str, Any] | None = None,
1214
1232
  max_startup_checks: int = 40,
1215
1233
  startup_delay: float = 2.0,
1234
+ *,
1235
+ legacy_sse_url: str | None = None,
1216
1236
  ) -> subprocess.Popen:
1217
1237
  """Start the MCP Composer subprocess for a specific project.
1218
1238
 
@@ -1220,10 +1240,11 @@ class MCPComposerService(Service):
1220
1240
  project_id: The project ID
1221
1241
  host: Host to bind to
1222
1242
  port: Port to bind to
1223
- sse_url: SSE URL to connect to
1243
+ streamable_http_url: Streamable HTTP endpoint to connect to
1224
1244
  auth_config: Authentication configuration
1225
1245
  max_startup_checks: Number of port binding checks (default: 40)
1226
1246
  startup_delay: Delay between checks in seconds (default: 2.0)
1247
+ legacy_sse_url: Optional legacy SSE URL used for backward compatibility when required by tooling
1227
1248
 
1228
1249
  Returns:
1229
1250
  The started subprocess
@@ -1232,6 +1253,9 @@ class MCPComposerService(Service):
1232
1253
  MCPComposerStartupError: If startup fails
1233
1254
  """
1234
1255
  settings = get_settings_service().settings
1256
+ # Some composer tooling still uses the --sse-url flag for backwards compatibility even in HTTP mode.
1257
+ effective_legacy_sse_url = legacy_sse_url or f"{streamable_http_url.rstrip('/')}/sse"
1258
+
1235
1259
  cmd = [
1236
1260
  "uvx",
1237
1261
  f"mcp-composer{settings.mcp_composer_version}",
@@ -1240,9 +1264,11 @@ class MCPComposerService(Service):
1240
1264
  "--host",
1241
1265
  host,
1242
1266
  "--mode",
1243
- "sse",
1267
+ "http",
1268
+ "--endpoint",
1269
+ streamable_http_url,
1244
1270
  "--sse-url",
1245
- sse_url,
1271
+ effective_legacy_sse_url,
1246
1272
  "--disable-composer-tools",
1247
1273
  ]
1248
1274
 
@@ -27,6 +27,16 @@ class AuthSettings(BaseSettings):
27
27
  API_KEY_ALGORITHM: str = "HS256"
28
28
  API_V1_STR: str = "/api/v1"
29
29
 
30
+ # API Key Source Configuration
31
+ API_KEY_SOURCE: Literal["db", "env"] = Field(
32
+ default="db",
33
+ description=(
34
+ "Source for API key validation. "
35
+ "'db' validates against database-stored API keys (default behavior). "
36
+ "'env' validates against the LANGFLOW_API_KEY environment variable."
37
+ ),
38
+ )
39
+
30
40
  AUTO_LOGIN: bool = Field(
31
41
  default=True, # TODO: Set to False in v2.0
32
42
  description=(
@@ -115,19 +125,16 @@ class AuthSettings(BaseSettings):
115
125
  logger.debug("Secret key provided")
116
126
  secret_value = value.get_secret_value() if isinstance(value, SecretStr) else value
117
127
  write_secret_to_file(secret_key_path, secret_value)
118
- else:
119
- logger.debug("No secret key provided, generating a random one")
120
-
121
- if secret_key_path.exists():
122
- value = read_secret_from_file(secret_key_path)
123
- logger.debug("Loaded secret key")
124
- if not value:
125
- value = secrets.token_urlsafe(32)
126
- write_secret_to_file(secret_key_path, value)
127
- logger.debug("Saved secret key")
128
- else:
128
+ elif secret_key_path.exists():
129
+ value = read_secret_from_file(secret_key_path)
130
+ logger.debug("Loaded secret key")
131
+ if not value:
129
132
  value = secrets.token_urlsafe(32)
130
133
  write_secret_to_file(secret_key_path, value)
131
134
  logger.debug("Saved secret key")
135
+ else:
136
+ value = secrets.token_urlsafe(32)
137
+ write_secret_to_file(secret_key_path, value)
138
+ logger.debug("Saved secret key")
132
139
 
133
140
  return value if isinstance(value, SecretStr) else SecretStr(value).get_secret_value()
@@ -477,12 +477,10 @@ class Settings(BaseSettings):
477
477
  msg = f"Invalid database_url provided: '{value}'"
478
478
  raise ValueError(msg)
479
479
 
480
- logger.debug("No database_url provided, trying LANGFLOW_DATABASE_URL env variable")
481
480
  if langflow_database_url := os.getenv("LANGFLOW_DATABASE_URL"):
482
481
  value = langflow_database_url
483
- logger.debug("Using LANGFLOW_DATABASE_URL env variable.")
482
+ logger.debug("Using LANGFLOW_DATABASE_URL env variable")
484
483
  else:
485
- logger.debug("No database_url env variable, using sqlite database")
486
484
  # Originally, we used sqlite:///./langflow.db
487
485
  # so we need to migrate to the new format
488
486
  # if there is a database in that location
@@ -498,10 +496,8 @@ class Settings(BaseSettings):
498
496
 
499
497
  if info.data["save_db_in_config_dir"]:
500
498
  database_dir = info.data["config_dir"]
501
- logger.debug(f"Saving database to config_dir: {database_dir}")
502
499
  else:
503
500
  database_dir = Path(__file__).parent.parent.parent.resolve()
504
- logger.debug(f"Saving database to langflow directory: {database_dir}")
505
501
 
506
502
  pre_db_file_name = "langflow-pre.db"
507
503
  db_file_name = "langflow.db"
@@ -524,7 +520,6 @@ class Settings(BaseSettings):
524
520
  logger.debug(f"Creating new database at {new_pre_path}")
525
521
  final_path = new_pre_path
526
522
  elif Path(new_path).exists():
527
- logger.debug(f"Database already exists at {new_path}, using it")
528
523
  final_path = new_path
529
524
  elif Path(f"./{db_file_name}").exists():
530
525
  try:
@@ -568,15 +563,10 @@ class Settings(BaseSettings):
568
563
 
569
564
  if not value:
570
565
  value = [BASE_COMPONENTS_PATH]
571
- logger.debug("Setting default components path to components_path")
572
- else:
573
- if isinstance(value, Path):
574
- value = [str(value)]
575
- elif isinstance(value, list):
576
- value = [str(p) if isinstance(p, Path) else p for p in value]
577
- logger.debug("Adding default components path to components_path")
578
-
579
- logger.debug(f"Components path: {value}")
566
+ elif isinstance(value, Path):
567
+ value = [str(value)]
568
+ elif isinstance(value, list):
569
+ value = [str(p) if isinstance(p, Path) else p for p in value]
580
570
  return value
581
571
 
582
572
  model_config = SettingsConfigDict(validate_assignment=True, extra="ignore", env_prefix="LANGFLOW_")
@@ -587,13 +577,10 @@ class Settings(BaseSettings):
587
577
  self.dev = dev
588
578
 
589
579
  def update_settings(self, **kwargs) -> None:
590
- logger.debug("Updating settings")
591
580
  for key, value in kwargs.items():
592
581
  # value may contain sensitive information, so we don't want to log it
593
582
  if not hasattr(self, key):
594
- logger.debug(f"Key {key} not found in settings")
595
583
  continue
596
- logger.debug(f"Updating {key}")
597
584
  if isinstance(getattr(self, key), list):
598
585
  # value might be a '[something]' string
599
586
  value_ = value
@@ -604,17 +591,12 @@ class Settings(BaseSettings):
604
591
  item_ = str(item) if isinstance(item, Path) else item
605
592
  if item_ not in getattr(self, key):
606
593
  getattr(self, key).append(item_)
607
- logger.debug(f"Extended {key}")
608
594
  else:
609
595
  value_ = str(value_) if isinstance(value_, Path) else value_
610
596
  if value_ not in getattr(self, key):
611
597
  getattr(self, key).append(value_)
612
- logger.debug(f"Appended {key}")
613
-
614
598
  else:
615
599
  setattr(self, key, value)
616
- logger.debug(f"Updated {key}")
617
- logger.debug(f"{key}: {getattr(self, key)}")
618
600
 
619
601
  @property
620
602
  def voice_mode_available(self) -> bool:
@@ -57,6 +57,38 @@ class LocalStorageService(StorageService):
57
57
  """Build the full path of a file in the local storage."""
58
58
  return str(self.data_dir / flow_id / file_name)
59
59
 
60
+ def parse_file_path(self, full_path: str) -> tuple[str, str]:
61
+ """Parse a full local storage path to extract flow_id and file_name.
62
+
63
+ Args:
64
+ full_path: Filesystem path, may or may not include data_dir
65
+ e.g., "/data/user_123/image.png" or "user_123/image.png"
66
+
67
+ Returns:
68
+ tuple[str, str]: A tuple of (flow_id, file_name)
69
+
70
+ Examples:
71
+ >>> parse_file_path("/data/user_123/image.png") # with data_dir
72
+ ("user_123", "image.png")
73
+ >>> parse_file_path("user_123/image.png") # without data_dir
74
+ ("user_123", "image.png")
75
+ """
76
+ data_dir_str = str(self.data_dir)
77
+
78
+ # Remove data_dir if present (but don't require it)
79
+ path_without_prefix = full_path
80
+ if full_path.startswith(data_dir_str):
81
+ path_without_prefix = full_path[len(data_dir_str) :].lstrip("/")
82
+
83
+ # Split from the right to get the filename
84
+ # Everything before the last "/" is the flow_id
85
+ if "/" not in path_without_prefix:
86
+ return "", path_without_prefix
87
+
88
+ # Use rsplit to split from the right, limiting to 1 split
89
+ flow_id, file_name = path_without_prefix.rsplit("/", 1)
90
+ return flow_id, file_name
91
+
60
92
  async def save_file(self, flow_id: str, file_name: str, data: bytes, *, append: bool = False) -> None:
61
93
  """Save a file in the local storage.
62
94
 
@@ -37,6 +37,7 @@ class StorageService(Service):
37
37
  self.data_dir: anyio.Path = anyio.Path(settings_service.settings.config_dir)
38
38
  self.set_ready()
39
39
 
40
+ @abstractmethod
40
41
  def build_full_path(self, flow_id: str, file_name: str) -> str:
41
42
  """Build the full path/key for a file.
42
43
 
@@ -49,6 +50,24 @@ class StorageService(Service):
49
50
  """
50
51
  raise NotImplementedError
51
52
 
53
+ @abstractmethod
54
+ def parse_file_path(self, full_path: str) -> tuple[str, str]:
55
+ """Parse a full storage path to extract flow_id and file_name.
56
+
57
+ This reverses the build_full_path operation.
58
+
59
+ Args:
60
+ full_path: Full path as returned by build_full_path
61
+
62
+ Returns:
63
+ tuple[str, str]: A tuple of (flow_id, file_name)
64
+
65
+ Raises:
66
+ ValueError: If the path format is invalid or doesn't match expected structure
67
+ """
68
+ raise NotImplementedError
69
+
70
+ @abstractmethod
52
71
  def resolve_component_path(self, logical_path: str) -> str:
53
72
  """Convert a logical path to a format that components can use directly.
54
73
 
lfx/utils/image.py CHANGED
@@ -6,14 +6,19 @@ import base64
6
6
  from functools import lru_cache
7
7
  from pathlib import Path
8
8
 
9
+ from lfx.log import logger
10
+ from lfx.services.deps import get_storage_service
11
+ from lfx.utils.async_helpers import run_until_complete
9
12
  from lfx.utils.helpers import get_mime_type
10
13
 
11
14
 
12
15
  def convert_image_to_base64(image_path: str | Path) -> str:
13
16
  """Convert an image file to a base64 encoded string.
14
17
 
18
+ Handles both local files and S3 storage paths.
19
+
15
20
  Args:
16
- image_path: Path to the image file
21
+ image_path: Path to the image file (local or S3 path like "flow_id/filename")
17
22
 
18
23
  Returns:
19
24
  Base64 encoded string of the image
@@ -22,6 +27,20 @@ def convert_image_to_base64(image_path: str | Path) -> str:
22
27
  FileNotFoundError: If the image file doesn't exist
23
28
  """
24
29
  image_path = Path(image_path)
30
+
31
+ storage_service = get_storage_service()
32
+ if storage_service:
33
+ flow_id, file_name = storage_service.parse_file_path(str(image_path))
34
+ try:
35
+ file_content = run_until_complete(
36
+ storage_service.get_file(flow_id=flow_id, file_name=file_name) # type: ignore[call-arg]
37
+ )
38
+ return base64.b64encode(file_content).decode("utf-8")
39
+ except Exception as e:
40
+ logger.error(f"Error reading image file: {e}")
41
+ raise
42
+
43
+ # Fall back to local file access
25
44
  if not image_path.exists():
26
45
  msg = f"Image file not found: {image_path}"
27
46
  raise FileNotFoundError(msg)
@@ -34,7 +53,7 @@ def create_data_url(image_path: str | Path, mime_type: str | None = None) -> str
34
53
  """Create a data URL from an image file.
35
54
 
36
55
  Args:
37
- image_path: Path to the image file
56
+ image_path: Path to the image file (local or S3 path like "flow_id/filename")
38
57
  mime_type: MIME type of the image. If None, will be auto-detected
39
58
 
40
59
  Returns:
@@ -44,9 +63,6 @@ def create_data_url(image_path: str | Path, mime_type: str | None = None) -> str
44
63
  FileNotFoundError: If the image file doesn't exist
45
64
  """
46
65
  image_path = Path(image_path)
47
- if not image_path.exists():
48
- msg = f"Image file not found: {image_path}"
49
- raise FileNotFoundError(msg)
50
66
 
51
67
  if mime_type is None:
52
68
  mime_type = get_mime_type(image_path)
@@ -57,14 +73,16 @@ def create_data_url(image_path: str | Path, mime_type: str | None = None) -> str
57
73
 
58
74
  @lru_cache(maxsize=50)
59
75
  def create_image_content_dict(
60
- image_path: str | Path, mime_type: str | None = None, model_name: str | None = None
76
+ image_path: str | Path,
77
+ mime_type: str | None = None,
78
+ model_name: str | None = None, # noqa: ARG001
61
79
  ) -> dict:
62
80
  """Create a content dictionary for multimodal inputs from an image file.
63
81
 
64
82
  Args:
65
- image_path: Path to the image file
83
+ image_path: Path to the image file (local or S3 path like "flow_id/filename")
66
84
  mime_type: MIME type of the image. If None, will be auto-detected
67
- model_name: Optional model parameter to determine content dict structure
85
+ model_name: Optional model parameter (kept for backward compatibility, no longer used)
68
86
 
69
87
  Returns:
70
88
  Content dictionary with type and image_url fields
@@ -74,6 +92,6 @@ def create_image_content_dict(
74
92
  """
75
93
  data_url = create_data_url(image_path, mime_type)
76
94
 
77
- if model_name == "OllamaModel":
78
- return {"type": "image_url", "source_type": "url", "image_url": data_url}
79
- return {"type": "image", "source_type": "url", "url": data_url}
95
+ # Standard format for OpenAI, Anthropic, Gemini, and most providers
96
+ # Format: {"type": "image_url", "image_url": {"url": "data:..."}}
97
+ return {"type": "image_url", "image_url": {"url": data_url}}
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: lfx-nightly
3
- Version: 0.2.0.dev26
3
+ Version: 0.2.0.dev41
4
4
  Summary: Langflow Executor - A lightweight CLI tool for executing and serving Langflow AI flows
5
5
  Author-email: Gabriel Luiz Freitas Almeida <gabriel@langflow.org>
6
6
  Requires-Python: <3.14,>=3.10