lfx-nightly 0.1.12.dev35__py3-none-any.whl → 0.1.12.dev37__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of lfx-nightly might be problematic. Click here for more details.

@@ -1,6 +1,7 @@
1
1
  import copy
2
2
  import json
3
3
  import re
4
+ from contextlib import suppress
4
5
  from typing import Any
5
6
 
6
7
  from composio import Composio
@@ -381,9 +382,9 @@ class ComposioBaseComponent(Component):
381
382
  if clean_field == "user_id":
382
383
  clean_field = f"{self.app_name}_user_id"
383
384
 
384
- # Handle reserved attribute name conflicts (e.g., 'status') by prefixing with app name
385
- # This prevents clashes with component attributes like self.status
386
- if clean_field in {"status"}:
385
+ # Handle reserved attribute name conflicts (e.g., 'status', 'name')
386
+ # Prefix with app name to prevent clashes with component attributes
387
+ if clean_field in {"status", "name"}:
387
388
  clean_field = f"{self.app_name}_{clean_field}"
388
389
 
389
390
  action_fields.append(clean_field)
@@ -549,6 +550,13 @@ class ComposioBaseComponent(Component):
549
550
  field_schema_copy["description"] = f"Status for {self.app_name.title()}: " + field_schema.get(
550
551
  "description", ""
551
552
  )
553
+ elif clean_field_name == "name":
554
+ clean_field_name = f"{self.app_name}_name"
555
+ # Update the field schema description to reflect the name change
556
+ field_schema_copy = field_schema.copy()
557
+ field_schema_copy["description"] = f"Name for {self.app_name.title()}: " + field_schema.get(
558
+ "description", ""
559
+ )
552
560
  else:
553
561
  # Use the original field schema for all other fields
554
562
  field_schema_copy = field_schema
@@ -571,7 +579,17 @@ class ComposioBaseComponent(Component):
571
579
 
572
580
  # Also update required fields to match cleaned names
573
581
  if flat_schema.get("required"):
574
- cleaned_required = [field.replace("[0]", "") for field in flat_schema["required"]]
582
+ cleaned_required = []
583
+ for field in flat_schema["required"]:
584
+ base = field.replace("[0]", "")
585
+ if base == "user_id":
586
+ cleaned_required.append(f"{self.app_name}_user_id")
587
+ elif base == "status":
588
+ cleaned_required.append(f"{self.app_name}_status")
589
+ elif base == "name":
590
+ cleaned_required.append(f"{self.app_name}_name")
591
+ else:
592
+ cleaned_required.append(base)
575
593
  flat_schema["required"] = cleaned_required
576
594
 
577
595
  input_schema = create_input_schema_from_json_schema(flat_schema)
@@ -756,6 +774,9 @@ class ComposioBaseComponent(Component):
756
774
  # Ensure _all_fields includes new ones
757
775
  self._all_fields.update({i.name for i in lf_inputs if i.name is not None})
758
776
 
777
+ # Normalize input_types to prevent None values
778
+ self.update_input_types(build_config)
779
+
759
780
  def _is_tool_mode_enabled(self) -> bool:
760
781
  """Check if tool_mode is currently enabled."""
761
782
  return getattr(self, "tool_mode", False)
@@ -1094,6 +1115,20 @@ class ComposioBaseComponent(Component):
1094
1115
  # Also clear any tracked dynamic fields
1095
1116
  self._clear_auth_dynamic_fields(build_config)
1096
1117
 
1118
+ def update_input_types(self, build_config: dict) -> dict:
1119
+ """Normalize input_types to [] wherever None appears in the build_config template."""
1120
+ try:
1121
+ for key, value in list(build_config.items()):
1122
+ if isinstance(value, dict):
1123
+ if value.get("input_types") is None:
1124
+ build_config[key]["input_types"] = []
1125
+ elif hasattr(value, "input_types") and value.input_types is None:
1126
+ with suppress(AttributeError, TypeError):
1127
+ value.input_types = []
1128
+ except (RuntimeError, KeyError):
1129
+ pass
1130
+ return build_config
1131
+
1097
1132
  def update_build_config(self, build_config: dict, field_value: Any, field_name: str | None = None) -> dict:
1098
1133
  """Update build config for auth and action selection."""
1099
1134
  # Avoid normalizing legacy input_types here; rely on upstream fixes
@@ -1233,7 +1268,7 @@ class ComposioBaseComponent(Component):
1233
1268
  build_config["auth_link"].pop("connection_id", None)
1234
1269
  build_config["action_button"]["helper_text"] = "Please connect before selecting actions."
1235
1270
  build_config["action_button"]["helper_text_metadata"] = {"variant": "destructive"}
1236
- return build_config
1271
+ return self.update_input_types(build_config)
1237
1272
 
1238
1273
  # Handle auth mode change -> render appropriate fields based on schema
1239
1274
  if field_name == "auth_mode":
@@ -1290,7 +1325,7 @@ class ComposioBaseComponent(Component):
1290
1325
  }
1291
1326
  build_config["action_button"]["helper_text"] = ""
1292
1327
  build_config["action_button"]["helper_text_metadata"] = {}
1293
- return build_config
1328
+ return self.update_input_types(build_config)
1294
1329
  if mode:
1295
1330
  managed = schema.get("composio_managed_auth_schemes") or []
1296
1331
  # Always hide the Create Auth Config control (used internally only)
@@ -1306,7 +1341,7 @@ class ComposioBaseComponent(Component):
1306
1341
  else:
1307
1342
  # Custom → render only required fields based on the toolkit schema
1308
1343
  self._render_custom_auth_fields(build_config, schema, mode)
1309
- return build_config
1344
+ return self.update_input_types(build_config)
1310
1345
 
1311
1346
  # Handle connection initiation when tool mode is enabled
1312
1347
  if field_name == "auth_link" and isinstance(field_value, dict):
@@ -1323,7 +1358,7 @@ class ComposioBaseComponent(Component):
1323
1358
  build_config["action_button"]["helper_text"] = ""
1324
1359
  build_config["action_button"]["helper_text_metadata"] = {}
1325
1360
  logger.info(f"Using existing ACTIVE connection {connection_id} for {toolkit_slug}")
1326
- return build_config
1361
+ return self.update_input_types(build_config)
1327
1362
 
1328
1363
  # Only reuse ACTIVE connections; otherwise create a new connection
1329
1364
  stored_connection_id = None
@@ -1342,11 +1377,11 @@ class ComposioBaseComponent(Component):
1342
1377
  redirect_url, connection_id = self._initiate_connection(toolkit_slug)
1343
1378
  build_config["auth_link"]["value"] = redirect_url
1344
1379
  logger.info(f"New OAuth URL created for {toolkit_slug}: {redirect_url}")
1345
- return build_config
1380
+ return self.update_input_types(build_config)
1346
1381
  if not mode:
1347
1382
  build_config["auth_link"]["value"] = "connect"
1348
1383
  build_config["auth_link"]["auth_tooltip"] = "Select Auth Mode"
1349
- return build_config
1384
+ return self.update_input_types(build_config)
1350
1385
  # Custom modes: create auth config and/or initiate with config
1351
1386
  # Validate required fields before creating any auth config
1352
1387
  required_missing = []
@@ -1403,7 +1438,7 @@ class ComposioBaseComponent(Component):
1403
1438
  build_config["auth_mode"]["helper_text_metadata"] = {"variant": "destructive"}
1404
1439
  build_config["auth_link"]["value"] = "connect"
1405
1440
  build_config["auth_link"]["auth_tooltip"] = f"Missing: {missing_joined}"
1406
- return build_config
1441
+ return self.update_input_types(build_config)
1407
1442
  composio = self._build_wrapper()
1408
1443
  if mode == "OAUTH2":
1409
1444
  # If an auth_config was already created via the button, use it and include initiation fields
@@ -1437,7 +1472,7 @@ class ComposioBaseComponent(Component):
1437
1472
  # Clear action blocker text on successful initiation
1438
1473
  build_config["action_button"]["helper_text"] = ""
1439
1474
  build_config["action_button"]["helper_text_metadata"] = {}
1440
- return build_config
1475
+ return self.update_input_types(build_config)
1441
1476
  # Otherwise, create custom OAuth2 auth config using schema-declared required fields
1442
1477
  credentials = {}
1443
1478
  missing = []
@@ -1487,7 +1522,7 @@ class ComposioBaseComponent(Component):
1487
1522
  build_config["auth_link"]["auth_config_id"] = auth_config_id
1488
1523
  build_config["auth_link"]["value"] = "connect"
1489
1524
  build_config["auth_link"]["auth_tooltip"] = "Connect"
1490
- return build_config
1525
+ return self.update_input_types(build_config)
1491
1526
  # Otherwise initiate immediately
1492
1527
  redirect = composio.connected_accounts.initiate(
1493
1528
  user_id=self.entity_id,
@@ -1504,7 +1539,7 @@ class ComposioBaseComponent(Component):
1504
1539
  self._clear_auth_fields_from_schema(build_config, schema)
1505
1540
  build_config["action_button"]["helper_text"] = ""
1506
1541
  build_config["action_button"]["helper_text_metadata"] = {}
1507
- return build_config
1542
+ return self.update_input_types(build_config)
1508
1543
  if mode == "API_KEY":
1509
1544
  ac = composio.auth_configs.create(
1510
1545
  toolkit=toolkit_slug,
@@ -1550,7 +1585,7 @@ class ComposioBaseComponent(Component):
1550
1585
  self._clear_auth_fields_from_schema(build_config, schema)
1551
1586
  build_config["action_button"]["helper_text"] = ""
1552
1587
  build_config["action_button"]["helper_text_metadata"] = {}
1553
- return build_config
1588
+ return self.update_input_types(build_config)
1554
1589
  # Generic custom auth flow for any other mode (treat like API_KEY)
1555
1590
  ac = composio.auth_configs.create(
1556
1591
  toolkit=toolkit_slug,
@@ -1584,13 +1619,13 @@ class ComposioBaseComponent(Component):
1584
1619
  else:
1585
1620
  build_config["auth_link"]["value"] = "validated"
1586
1621
  build_config["auth_link"]["auth_tooltip"] = "Disconnect"
1587
- return build_config
1622
+ return self.update_input_types(build_config)
1588
1623
  except (ValueError, ConnectionError, TypeError) as e:
1589
1624
  logger.error(f"Error creating connection: {e}")
1590
1625
  build_config["auth_link"]["value"] = "connect"
1591
1626
  build_config["auth_link"]["auth_tooltip"] = f"Error: {e!s}"
1592
1627
  else:
1593
- return build_config
1628
+ return self.update_input_types(build_config)
1594
1629
  else:
1595
1630
  # We already have a usable connection; no new OAuth request
1596
1631
  build_config["auth_link"]["auth_tooltip"] = "Disconnect"
@@ -1732,7 +1767,7 @@ class ComposioBaseComponent(Component):
1732
1767
  build_config["action_button"]["show"] = True # Show action field when tool mode is disabled
1733
1768
  for field in self._all_fields:
1734
1769
  build_config[field]["show"] = True # Update show status for all fields based on tool mode
1735
- return build_config
1770
+ return self.update_input_types(build_config)
1736
1771
 
1737
1772
  if field_name == "action_button":
1738
1773
  # If selection is cancelled/cleared, remove generated fields
@@ -1748,12 +1783,12 @@ class ComposioBaseComponent(Component):
1748
1783
 
1749
1784
  if _is_cleared(field_value):
1750
1785
  self._hide_all_action_fields(build_config)
1751
- return build_config
1786
+ return self.update_input_types(build_config)
1752
1787
 
1753
1788
  self._update_action_config(build_config, field_value)
1754
1789
  # Keep the existing show/hide behaviour
1755
1790
  self.show_hide_fields(build_config, field_value)
1756
- return build_config
1791
+ return self.update_input_types(build_config)
1757
1792
 
1758
1793
  # Handle auth config button click
1759
1794
  if field_name == "create_auth_config" and field_value == "create":
@@ -1798,7 +1833,7 @@ class ComposioBaseComponent(Component):
1798
1833
  build_config["auth_link"]["auth_config_id"] = auth_config_id
1799
1834
  build_config["auth_link"]["value"] = "connect"
1800
1835
  build_config["auth_link"]["auth_tooltip"] = "Connect"
1801
- return build_config
1836
+ return self.update_input_types(build_config)
1802
1837
  # If no initiation fields required, initiate immediately
1803
1838
  connection_request = composio.connected_accounts.initiate(
1804
1839
  user_id=self.entity_id, auth_config_id=auth_config_id
@@ -1824,7 +1859,7 @@ class ComposioBaseComponent(Component):
1824
1859
  logger.error(f"Error creating new auth config: {e}")
1825
1860
  build_config["auth_link"]["value"] = "error"
1826
1861
  build_config["auth_link"]["auth_tooltip"] = f"Error: {e!s}"
1827
- return build_config
1862
+ return self.update_input_types(build_config)
1828
1863
 
1829
1864
  # Handle API key removal
1830
1865
  if field_name == "api_key" and len(field_value) == 0:
@@ -1861,16 +1896,16 @@ class ComposioBaseComponent(Component):
1861
1896
  self._hide_all_action_fields(build_config)
1862
1897
  except (TypeError, ValueError, AttributeError):
1863
1898
  pass
1864
- return build_config
1899
+ return self.update_input_types(build_config)
1865
1900
 
1866
1901
  # Only proceed with connection logic if we have an API key
1867
1902
  if not hasattr(self, "api_key") or not self.api_key:
1868
- return build_config
1903
+ return self.update_input_types(build_config)
1869
1904
 
1870
1905
  # CRITICAL: If tool_mode is enabled (check both instance and build_config), skip all connection logic
1871
1906
  if current_tool_mode:
1872
1907
  build_config["action_button"]["show"] = False
1873
- return build_config
1908
+ return self.update_input_types(build_config)
1874
1909
 
1875
1910
  # Update action options only if tool_mode is disabled
1876
1911
  self._build_action_maps()
@@ -1931,7 +1966,7 @@ class ComposioBaseComponent(Component):
1931
1966
  if self._is_tool_mode_enabled():
1932
1967
  build_config["action_button"]["show"] = False
1933
1968
 
1934
- return build_config
1969
+ return self.update_input_types(build_config)
1935
1970
 
1936
1971
  def configure_tools(self, composio: Composio, limit: int | None = None) -> list[Tool]:
1937
1972
  if limit is None:
@@ -2039,6 +2074,10 @@ class ComposioBaseComponent(Component):
2039
2074
  final_field_name = field
2040
2075
  if field.endswith("_user_id") and field.startswith(self.app_name):
2041
2076
  final_field_name = "user_id"
2077
+ elif field == f"{self.app_name}_status":
2078
+ final_field_name = "status"
2079
+ elif field == f"{self.app_name}_name":
2080
+ final_field_name = "name"
2042
2081
 
2043
2082
  arguments[final_field_name] = value
2044
2083
 
@@ -304,11 +304,12 @@ class BaseFileComponent(Component, ABC):
304
304
  parts.append(str(data_text))
305
305
  elif isinstance(d.data, dict):
306
306
  # convert the data dict to a readable string
307
- parts.append(orjson.dumps(d.data, default=str).decode())
307
+ parts.append(orjson.dumps(d.data, option=orjson.OPT_INDENT_2, default=str).decode())
308
308
  else:
309
309
  parts.append(str(d))
310
- except (AttributeError, TypeError, ValueError):
310
+ except Exception: # noqa: BLE001
311
311
  # Final fallback - just try to convert to string
312
+ # TODO: Consider downstream error case more. Should this raise an error?
312
313
  parts.append(str(d))
313
314
 
314
315
  return Message(text=sep.join(parts), **metadata)
@@ -671,6 +672,9 @@ class BaseFileComponent(Component, ABC):
671
672
  def _safe_extract_zip(bundle: ZipFile, output_dir: Path):
672
673
  """Safely extract ZIP files."""
673
674
  for member in bundle.namelist():
675
+ # Filter out resource fork information for automatic production of mac
676
+ if Path(member).name.startswith("._"):
677
+ continue
674
678
  member_path = output_dir / member
675
679
  # Ensure no path traversal outside `output_dir`
676
680
  if not member_path.resolve().is_relative_to(output_dir.resolve()):
@@ -681,6 +685,9 @@ class BaseFileComponent(Component, ABC):
681
685
  def _safe_extract_tar(bundle: tarfile.TarFile, output_dir: Path):
682
686
  """Safely extract TAR files."""
683
687
  for member in bundle.getmembers():
688
+ # Filter out resource fork information for automatic production of mac
689
+ if Path(member.name).name.startswith("._"):
690
+ continue
684
691
  member_path = output_dir / member.name
685
692
  # Ensure no path traversal outside `output_dir`
686
693
  if not member_path.resolve().is_relative_to(output_dir.resolve()):
@@ -96,6 +96,7 @@ if TYPE_CHECKING:
96
96
  vectara,
97
97
  vectorstores,
98
98
  vertexai,
99
+ vlmrun,
99
100
  weaviate,
100
101
  wikipedia,
101
102
  wolframalpha,
@@ -198,6 +199,7 @@ _dynamic_imports = {
198
199
  "vectara": "__module__",
199
200
  "vectorstores": "__module__",
200
201
  "vertexai": "__module__",
202
+ "vlmrun": "__module__",
201
203
  "weaviate": "__module__",
202
204
  "wikipedia": "__module__",
203
205
  "wolframalpha": "__module__",
@@ -326,6 +328,7 @@ __all__ = [
326
328
  "vectara",
327
329
  "vectorstores",
328
330
  "vertexai",
331
+ "vlmrun",
329
332
  "weaviate",
330
333
  "wikipedia",
331
334
  "wolframalpha",
@@ -22,7 +22,6 @@ class LocalDBComponent(LCVectorStoreComponent):
22
22
  name = "LocalDB"
23
23
  icon = "database"
24
24
  legacy = True
25
- replacement = ["knowledgebases.KnowledgeRetrieval", "knowledgebases.KnowledgeIngestion"]
26
25
 
27
26
  inputs = [
28
27
  TabInput(
@@ -0,0 +1,34 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import TYPE_CHECKING, Any
4
+
5
+ from langflow.components._importing import import_mod
6
+
7
+ if TYPE_CHECKING:
8
+ from .vlmrun_transcription import VLMRunTranscription
9
+
10
+ _dynamic_imports = {
11
+ "VLMRunTranscription": "vlmrun_transcription",
12
+ }
13
+
14
+ __all__ = [
15
+ "VLMRunTranscription",
16
+ ]
17
+
18
+
19
+ def __getattr__(attr_name: str) -> Any:
20
+ """Lazily import VLMRun components on attribute access."""
21
+ if attr_name not in _dynamic_imports:
22
+ msg = f"module '{__name__}' has no attribute '{attr_name}'"
23
+ raise AttributeError(msg)
24
+ try:
25
+ result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent)
26
+ except (ModuleNotFoundError, ImportError, AttributeError) as e:
27
+ msg = f"Could not import '{attr_name}' from '{__name__}': {e}"
28
+ raise AttributeError(msg) from e
29
+ globals()[attr_name] = result
30
+ return result
31
+
32
+
33
+ def __dir__() -> list[str]:
34
+ return list(__all__)
@@ -0,0 +1,224 @@
1
+ from pathlib import Path
2
+ from urllib.parse import urlparse
3
+
4
+ from langflow.custom.custom_component.component import Component
5
+ from langflow.io import (
6
+ DropdownInput,
7
+ FileInput,
8
+ IntInput,
9
+ MessageTextInput,
10
+ Output,
11
+ SecretStrInput,
12
+ )
13
+ from langflow.schema.data import Data
14
+ from loguru import logger
15
+
16
+
17
+ class VLMRunTranscription(Component):
18
+ display_name = "VLM Run Transcription"
19
+ description = "Extract structured data from audio and video using [VLM Run AI](https://app.vlm.run)"
20
+ documentation = "https://docs.vlm.run"
21
+ icon = "VLMRun"
22
+ beta = True
23
+
24
+ inputs = [
25
+ SecretStrInput(
26
+ name="api_key",
27
+ display_name="VLM Run API Key",
28
+ info="Get your API key from https://app.vlm.run",
29
+ required=True,
30
+ ),
31
+ DropdownInput(
32
+ name="media_type",
33
+ display_name="Media Type",
34
+ options=["audio", "video"],
35
+ value="audio",
36
+ info="Select the type of media to process",
37
+ ),
38
+ FileInput(
39
+ name="media_files",
40
+ display_name="Media Files",
41
+ file_types=[
42
+ "mp3",
43
+ "wav",
44
+ "m4a",
45
+ "flac",
46
+ "ogg",
47
+ "opus",
48
+ "webm",
49
+ "aac",
50
+ "mp4",
51
+ "mov",
52
+ "avi",
53
+ "mkv",
54
+ "flv",
55
+ "wmv",
56
+ "m4v",
57
+ ],
58
+ info="Upload one or more audio/video files",
59
+ required=False,
60
+ is_list=True,
61
+ ),
62
+ MessageTextInput(
63
+ name="media_url",
64
+ display_name="Media URL",
65
+ info="URL to media file (alternative to file upload)",
66
+ required=False,
67
+ advanced=True,
68
+ ),
69
+ IntInput(
70
+ name="timeout_seconds",
71
+ display_name="Timeout (seconds)",
72
+ value=600,
73
+ info="Maximum time to wait for processing completion",
74
+ advanced=True,
75
+ ),
76
+ DropdownInput(
77
+ name="domain",
78
+ display_name="Processing Domain",
79
+ options=["transcription"],
80
+ value="transcription",
81
+ info="Select the processing domain",
82
+ advanced=True,
83
+ ),
84
+ ]
85
+
86
+ outputs = [
87
+ Output(
88
+ display_name="Result",
89
+ name="result",
90
+ method="process_media",
91
+ ),
92
+ ]
93
+
94
+ def _check_inputs(self) -> str | None:
95
+ """Validate that either media files or URL is provided."""
96
+ if not self.media_files and not self.media_url:
97
+ return "Either media files or media URL must be provided"
98
+ return None
99
+
100
+ def _import_vlmrun(self):
101
+ """Import and return VLMRun client class."""
102
+ try:
103
+ from vlmrun.client import VLMRun
104
+ except ImportError as e:
105
+ error_msg = "VLM Run SDK not installed. Run: pip install 'vlmrun[all]'"
106
+ raise ImportError(error_msg) from e
107
+ else:
108
+ return VLMRun
109
+
110
+ def _generate_media_response(self, client, media_source):
111
+ """Generate response for audio or video media."""
112
+ domain_str = f"{self.media_type}.{self.domain}"
113
+
114
+ if self.media_type == "audio":
115
+ if isinstance(media_source, Path):
116
+ return client.audio.generate(file=media_source, domain=domain_str, batch=True)
117
+ return client.audio.generate(url=media_source, domain=domain_str, batch=True)
118
+ # video
119
+ if isinstance(media_source, Path):
120
+ return client.video.generate(file=media_source, domain=domain_str, batch=True)
121
+ return client.video.generate(url=media_source, domain=domain_str, batch=True)
122
+
123
+ def _wait_for_response(self, client, response):
124
+ """Wait for batch processing to complete if needed."""
125
+ if hasattr(response, "id"):
126
+ return client.predictions.wait(response.id, timeout=self.timeout_seconds)
127
+ return response
128
+
129
+ def _extract_transcription(self, segments: list) -> list[str]:
130
+ """Extract transcription parts from segments."""
131
+ transcription_parts = []
132
+ for segment in segments:
133
+ if self.media_type == "audio" and "audio" in segment:
134
+ transcription_parts.append(segment["audio"].get("content", ""))
135
+ elif self.media_type == "video" and "video" in segment:
136
+ transcription_parts.append(segment["video"].get("content", ""))
137
+ # Also include audio if available for video
138
+ if "audio" in segment:
139
+ audio_content = segment["audio"].get("content", "")
140
+ if audio_content and audio_content.strip():
141
+ transcription_parts.append(f"[Audio: {audio_content}]")
142
+ return transcription_parts
143
+
144
+ def _create_result_dict(self, response, transcription_parts: list, source_name: str) -> dict:
145
+ """Create a standardized result dictionary."""
146
+ response_data = response.response if hasattr(response, "response") else {}
147
+ result = {
148
+ "prediction_id": response.id if hasattr(response, "id") else None,
149
+ "transcription": " ".join(transcription_parts),
150
+ "full_response": response_data,
151
+ "metadata": {
152
+ "media_type": self.media_type,
153
+ "duration": response_data.get("metadata", {}).get("duration", 0),
154
+ },
155
+ "usage": response.usage if hasattr(response, "usage") else None,
156
+ "status": response.status if hasattr(response, "status") else "completed",
157
+ }
158
+
159
+ # Add source-specific field
160
+ parsed_url = urlparse(source_name)
161
+ if parsed_url.scheme in ["http", "https", "s3", "gs", "ftp", "ftps"]:
162
+ result["source"] = source_name
163
+ else:
164
+ result["filename"] = source_name
165
+
166
+ return result
167
+
168
+ def _process_single_media(self, client, media_source, source_name: str) -> dict:
169
+ """Process a single media file or URL."""
170
+ response = self._generate_media_response(client, media_source)
171
+ response = self._wait_for_response(client, response)
172
+ response_data = response.response if hasattr(response, "response") else {}
173
+ segments = response_data.get("segments", [])
174
+ transcription_parts = self._extract_transcription(segments)
175
+ return self._create_result_dict(response, transcription_parts, source_name)
176
+
177
+ def process_media(self) -> Data:
178
+ """Process audio or video file and extract structured data."""
179
+ # Validate inputs
180
+ error_msg = self._check_inputs()
181
+ if error_msg:
182
+ self.status = error_msg
183
+ return Data(data={"error": error_msg})
184
+
185
+ try:
186
+ # Import and initialize client
187
+ vlmrun_class = self._import_vlmrun()
188
+ client = vlmrun_class(api_key=self.api_key)
189
+ all_results = []
190
+
191
+ # Handle multiple files
192
+ if self.media_files:
193
+ files_to_process = self.media_files if isinstance(self.media_files, list) else [self.media_files]
194
+ for idx, media_file in enumerate(files_to_process):
195
+ self.status = f"Processing file {idx + 1} of {len(files_to_process)}..."
196
+ result = self._process_single_media(client, Path(media_file), Path(media_file).name)
197
+ all_results.append(result)
198
+
199
+ # Handle URL
200
+ elif self.media_url:
201
+ result = self._process_single_media(client, self.media_url, self.media_url)
202
+ all_results.append(result)
203
+
204
+ # Return clean, flexible output structure
205
+ output_data = {
206
+ "results": all_results,
207
+ "total_files": len(all_results),
208
+ }
209
+ self.status = f"Successfully processed {len(all_results)} file(s)"
210
+ return Data(data=output_data)
211
+
212
+ except ImportError as e:
213
+ self.status = str(e)
214
+ return Data(data={"error": str(e)})
215
+ except (ValueError, ConnectionError, TimeoutError) as e:
216
+ logger.opt(exception=True).debug("Error processing media with VLM Run")
217
+ error_msg = f"Processing failed: {e!s}"
218
+ self.status = error_msg
219
+ return Data(data={"error": error_msg})
220
+ except (AttributeError, KeyError, OSError) as e:
221
+ logger.opt(exception=True).debug("Unexpected error processing media with VLM Run")
222
+ error_msg = f"Unexpected error: {e!s}"
223
+ self.status = error_msg
224
+ return Data(data={"error": error_msg})
lfx/logging/logger.py ADDED
@@ -0,0 +1,24 @@
1
+ """Backwards compatibility module for lfx.logging.logger.
2
+
3
+ This module provides backwards compatibility for code that imports from lfx.logging.logger.
4
+ All functionality has been moved to lfx.log.logger.
5
+ """
6
+
7
+ # Ensure we maintain all the original exports
8
+ from lfx.log.logger import (
9
+ InterceptHandler,
10
+ LogConfig,
11
+ configure,
12
+ logger,
13
+ setup_gunicorn_logger,
14
+ setup_uvicorn_logger,
15
+ )
16
+
17
+ __all__ = [
18
+ "InterceptHandler",
19
+ "LogConfig",
20
+ "configure",
21
+ "logger",
22
+ "setup_gunicorn_logger",
23
+ "setup_uvicorn_logger",
24
+ ]
@@ -417,9 +417,10 @@ class MCPComposerService(Service):
417
417
  startup_delay: float = 2.0,
418
418
  ) -> subprocess.Popen:
419
419
  """Start the MCP Composer subprocess for a specific project."""
420
+ settings = get_settings_service().settings
420
421
  cmd = [
421
422
  "uvx",
422
- "mcp-composer",
423
+ f"mcp-composer{settings.mcp_composer_version}",
423
424
  "--mode",
424
425
  "sse",
425
426
  "--sse-url",
@@ -447,7 +448,7 @@ class MCPComposerService(Service):
447
448
  "oauth_server_url": "OAUTH_SERVER_URL",
448
449
  "oauth_callback_path": "OAUTH_CALLBACK_PATH",
449
450
  "oauth_client_id": "OAUTH_CLIENT_ID",
450
- "oauth_client_secret": "OAUTH_CLIENT_SECRET",
451
+ "oauth_client_secret": "OAUTH_CLIENT_SECRET", # pragma: allowlist secret
451
452
  "oauth_auth_url": "OAUTH_AUTH_URL",
452
453
  "oauth_token_url": "OAUTH_TOKEN_URL",
453
454
  "oauth_mcp_scope": "OAUTH_MCP_SCOPE",
@@ -285,6 +285,9 @@ class Settings(BaseSettings):
285
285
  # MCP Composer
286
286
  mcp_composer_enabled: bool = True
287
287
  """If set to False, Langflow will not start the MCP Composer service."""
288
+ mcp_composer_version: str = "~=0.1.0.7"
289
+ """Version constraint for mcp-composer when using uvx. Uses PEP 440 syntax.
290
+ ~=0.1.0.7 allows patch updates (0.1.0.x) but prevents minor/major version changes."""
288
291
 
289
292
  # Public Flow Settings
290
293
  public_flow_cleanup_interval: int = Field(default=3600, gt=600)
@@ -348,6 +351,34 @@ class Settings(BaseSettings):
348
351
  logger.debug(f"Setting user agent to {value}")
349
352
  return value
350
353
 
354
+ @field_validator("mcp_composer_version", mode="before")
355
+ @classmethod
356
+ def validate_mcp_composer_version(cls, value):
357
+ """Ensure the version string has a version specifier prefix.
358
+
359
+ If a bare version like '0.1.0.7' is provided, prepend '~=' to allow patch updates.
360
+ Supports PEP 440 specifiers: ==, !=, <=, >=, <, >, ~=, ===
361
+ """
362
+ if not value:
363
+ return "~=0.1.0.7" # Default
364
+
365
+ # Check if it already has a version specifier
366
+ # Order matters: check longer specifiers first to avoid false matches
367
+ specifiers = ["===", "==", "!=", "<=", ">=", "~=", "<", ">"]
368
+ if any(value.startswith(spec) for spec in specifiers):
369
+ return value
370
+
371
+ # If it's a bare version number, add ~= prefix
372
+ # This regex matches version numbers like 0.1.0.7, 1.2.3, etc.
373
+ import re
374
+
375
+ if re.match(r"^\d+(\.\d+)*", value):
376
+ logger.debug(f"Adding ~= prefix to bare version '{value}' -> '~={value}'")
377
+ return f"~={value}"
378
+
379
+ # If we can't determine, return as-is and let uvx handle it
380
+ return value
381
+
351
382
  @field_validator("variables_to_get_from_environment", mode="before")
352
383
  @classmethod
353
384
  def set_variables_to_get_from_environment(cls, value):
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: lfx-nightly
3
- Version: 0.1.12.dev35
3
+ Version: 0.1.12.dev37
4
4
  Summary: Langflow Executor - A lightweight CLI tool for executing and serving Langflow AI flows
5
5
  Author-email: Gabriel Luiz Freitas Almeida <gabriel@langflow.org>
6
6
  Requires-Python: <3.14,>=3.10
@@ -22,13 +22,13 @@ lfx/base/astra_assistants/util.py,sha256=T_W44VFoOXBF3m-0eCSrHvzbKx1gdyBF9IAWKMX
22
22
  lfx/base/chains/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
23
23
  lfx/base/chains/model.py,sha256=QSYJBc0Ygpx2Ko273u1idL_gPK2xpvRQgJb4oTx8x8s,766
24
24
  lfx/base/composio/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
25
- lfx/base/composio/composio_base.py,sha256=orFVSLWDCmvxarzCHAJdXlMhT7dr4MxGZNEeNmx76hc,113150
25
+ lfx/base/composio/composio_base.py,sha256=pltCXF0eQVfGbetksE2sXMTPMji8lR1jSryeMNrYNZM,115607
26
26
  lfx/base/compressors/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
27
27
  lfx/base/compressors/model.py,sha256=-FFBAPAy9bAgvklIo7x_uwShZR5NoMHakF6f_hNnLHg,2098
28
28
  lfx/base/curl/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
29
29
  lfx/base/curl/parse.py,sha256=Yw6mMbGg7e-ffrBItEUJeTiljneCXlNyt5afzEP9eUI,6094
30
30
  lfx/base/data/__init__.py,sha256=lQsYYMyAg_jA9ZF7oc-LNZsRE2uMGT6g16WzsUByHqs,81
31
- lfx/base/data/base_file.py,sha256=ZYAEThTcRI7Oy4aEZPMYCC0UDljXQSDVypdPRpDJEAs,27599
31
+ lfx/base/data/base_file.py,sha256=UKpF9BsNHgg-cdB1uVB8F00czvCTLfks320aLg3F_kM,28049
32
32
  lfx/base/data/docling_utils.py,sha256=gVDxOZghSJEo5n-UNkVGBQYqkvfNqkNkltBhAnoaJd4,13048
33
33
  lfx/base/data/utils.py,sha256=dGqEO4zE5s_V2Cs4j0EEeyLjYLX6Zex-EGzIOznK76o,5960
34
34
  lfx/base/document_transformers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -94,7 +94,7 @@ lfx/cli/run.py,sha256=_7JX6YpoO3XLCfD2BtZl3FsghMyo18B9SriLyO0EaoQ,21411
94
94
  lfx/cli/script_loader.py,sha256=xWSpx57cBeX0UHmUgAk97aye9-hhD2Y6nKh68A-xaTA,8997
95
95
  lfx/cli/serve_app.py,sha256=3U0QsoCkf-1DxSpxfNOr8ap7Giaxm_MfuLrii5GpIHM,22485
96
96
  lfx/cli/validation.py,sha256=xZfL-rKt_Y-Lek19GMZffyxhPIyYMQHBIpR0Hwa_Ji8,2615
97
- lfx/components/__init__.py,sha256=3oqczcZDZEpgm8p3QLotXwgmAvBJDYUW8ME5CrDb2Ac,10994
97
+ lfx/components/__init__.py,sha256=aZTGgShKOfNK2G6MPCYMWYQB7PX-achWa1ZqGD0a51E,11052
98
98
  lfx/components/_importing.py,sha256=XYMV7cnw6QAw9hdY5dOybc3RZ7esDVYd8pOV822xdsU,1625
99
99
  lfx/components/FAISS/__init__.py,sha256=gbdyinU7MBtv4PRUfUcPuR08_Ixx0W95LdIXHEgmrfg,935
100
100
  lfx/components/FAISS/faiss.py,sha256=K9egZNckeHOrPqxuFia4VL4-mFphyEl6dQ_F-lXvax8,3893
@@ -515,7 +515,7 @@ lfx/components/vectorstores/elasticsearch.py,sha256=WcBi8THcOzopZeYOQeEoHxsZkACH
515
515
  lfx/components/vectorstores/faiss.py,sha256=K9egZNckeHOrPqxuFia4VL4-mFphyEl6dQ_F-lXvax8,3893
516
516
  lfx/components/vectorstores/graph_rag.py,sha256=4NmYQkjSru_zaDhJfxdaYtap-RMGJfv2AYN2NEYSdds,5163
517
517
  lfx/components/vectorstores/hcd.py,sha256=Fo7Zj4U-A1ZcbsdhlTxheMJDy8EzbYWlo85iY6vASnQ,12379
518
- lfx/components/vectorstores/local_db.py,sha256=i2GxQ7SwtcCED0bqXWkc7lMtfT0vzsKWUC0NLtEhCXk,10741
518
+ lfx/components/vectorstores/local_db.py,sha256=bruzpEk6PQc1DeGqSWJblsVplJ5eC1JsSlncxRc4yuQ,10648
519
519
  lfx/components/vectorstores/milvus.py,sha256=_jMyO4l5y6O2Lkx9eHyuhgcHtXcApkI-ktuqa1YxnGI,4395
520
520
  lfx/components/vectorstores/mongodb_atlas.py,sha256=OlAstNMToHuvGI-8djkiGr7kdGBr927O0SE5cnVd0O0,8594
521
521
  lfx/components/vectorstores/opensearch.py,sha256=P8Eq4KsjHT8b7iOUOKMFRwOLwgRfIWfxIHLD0GJsw24,9080
@@ -530,6 +530,8 @@ lfx/components/vectorstores/weaviate.py,sha256=ZrqR1qRA3IZsJB-TB07C7Qyqt5ysh-eEi
530
530
  lfx/components/vertexai/__init__.py,sha256=bIcDPTzHyUujS75CWtFxYCvdY7eygvH3UDmWVAcX8I0,1090
531
531
  lfx/components/vertexai/vertexai.py,sha256=2fdSgdP6lfVYZElxWvuwi5wylpwFEtlSUlT1zaCGtgE,2992
532
532
  lfx/components/vertexai/vertexai_embeddings.py,sha256=Or1cFSZKKUp063yZVny7oqKiWgEqonhhCHPbRjY4CMA,3135
533
+ lfx/components/vlmrun/__init__.py,sha256=RTywYzmYq35aiPiJL0_01KhphJewLTDNBSUVmsM9y-E,953
534
+ lfx/components/vlmrun/vlmrun_transcription.py,sha256=yR4jSaPfAhkh5-Xcc5artMbuBD_Tcsl5gwxCjPOdshg,8667
533
535
  lfx/components/weaviate/__init__.py,sha256=IJPCeA67vEeqHcsf_DqbKV5DXKmHEKBVTxPgBHlrzuU,953
534
536
  lfx/components/weaviate/weaviate.py,sha256=zMnx4BSdt7HmXj4r_CHLEq4g93kW7sGFDstJT1n89n0,3271
535
537
  lfx/components/wikipedia/__init__.py,sha256=sOp1c_gIOcIL-XZFqFExa_DTklDOCIdzVl_OUVV9-yE,137
@@ -628,6 +630,7 @@ lfx/load/utils.py,sha256=qa8aoMLW-X8FO8xVz3YVHQwjTSJYbYr_AOQAAp3smlc,3705
628
630
  lfx/log/__init__.py,sha256=UATLSm1Fp9rVclAXP00LKQzzYKcaboVSuWNujlRR6P4,119
629
631
  lfx/log/logger.py,sha256=UaUlWEwws7SVa24_9ZuPwRgefoatzRV7nnZV7YQZjwU,14238
630
632
  lfx/logging/__init__.py,sha256=X5tXF5e1hc62adprRPLtKeaqm8-tpl6loXsxbh9IO-Q,367
633
+ lfx/logging/logger.py,sha256=y7ophyWX5-r8RCxHJeAmGKyGeEhR-7imR-D8YBXU7CE,546
631
634
  lfx/memory/__init__.py,sha256=s7nCNKlcwLfT6Z_cXbiYjvoXQXZ-H2GqK1qsAuKBV08,1815
632
635
  lfx/memory/stubs.py,sha256=kR6TRI2t6rPvA5Pja5XPC4yvKRBFBuJfdI0hJL8vfwU,9924
633
636
  lfx/processing/__init__.py,sha256=jERZg6it9mhOzrbTAt9YtakSNXPSjUXFh5MfKBN48wA,41
@@ -674,10 +677,10 @@ lfx/services/chat/config.py,sha256=l2btsQ3xp-CYD5XiatZC8y23gkUn0Qr4_TzVjPpzwJo,4
674
677
  lfx/services/chat/schema.py,sha256=MHq5o3adAiLZIpR8OxM2vEkxRmEtZURIvvB9S4-pXoc,286
675
678
  lfx/services/mcp_composer/__init__.py,sha256=Y5IahKX0siDJuRCvUF_KrpAQq6UmHzQtXu8rXvdThqM,242
676
679
  lfx/services/mcp_composer/factory.py,sha256=f8Bj0ZR9A_o1c3Kw5JKyR6SbtbCEPNWOy8b0OK990Z8,530
677
- lfx/services/mcp_composer/service.py,sha256=Binv29dXSRscUPOa40714w_NYmebZB3gwBp68KnaSFc,25765
680
+ lfx/services/mcp_composer/service.py,sha256=TdQoQ1VV_aATRGCYNm9MZRj_WEb45LLP4ACub_ChCXg,25876
678
681
  lfx/services/settings/__init__.py,sha256=UISBvOQIqoA3a8opwJrTQp4PSTqpReY6GQ_7O6WuqJQ,65
679
682
  lfx/services/settings/auth.py,sha256=_18KZipq0udCJPq-4xCD_juhqSwAEvoCqxOTSYsNv5w,5720
680
- lfx/services/settings/base.py,sha256=8_eiUe90Yi_YKw-abRurGXgbmVmOAbSXEbUDTabXbas,26221
683
+ lfx/services/settings/base.py,sha256=xtqqE4f8Bo-Dt0D2DlB83-BHss8BACYioaQ08y5ETYY,27558
681
684
  lfx/services/settings/constants.py,sha256=ZBJolZ4kx0ZoYp2BDyHkgDFgaXEQAH-ZcLqgunv_MqQ,908
682
685
  lfx/services/settings/factory.py,sha256=NezZ6TE_xP955B9l9pI6ONNyoylrHPfUZN8arvLVRXg,615
683
686
  lfx/services/settings/feature_flags.py,sha256=HGuDGgfOBIDtuEiEVTgoWHxKqX2vuVBRgsqdX_4D9kg,205
@@ -721,7 +724,7 @@ lfx/utils/schemas.py,sha256=NbOtVQBrn4d0BAu-0H_eCTZI2CXkKZlRY37XCSmuJwc,3865
721
724
  lfx/utils/util.py,sha256=Ww85wbr1-vjh2pXVtmTqoUVr6MXAW8S7eDx_Ys6HpE8,20696
722
725
  lfx/utils/util_strings.py,sha256=nU_IcdphNaj6bAPbjeL-c1cInQPfTBit8mp5Y57lwQk,1686
723
726
  lfx/utils/version.py,sha256=cHpbO0OJD2JQAvVaTH_6ibYeFbHJV0QDHs_YXXZ-bT8,671
724
- lfx_nightly-0.1.12.dev35.dist-info/METADATA,sha256=15fUNw16xD_P0VF15znV4v47_8SRS1cEOAY2R05g-Fg,8290
725
- lfx_nightly-0.1.12.dev35.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
726
- lfx_nightly-0.1.12.dev35.dist-info/entry_points.txt,sha256=1724p3RHDQRT2CKx_QRzEIa7sFuSVO0Ux70YfXfoMT4,42
727
- lfx_nightly-0.1.12.dev35.dist-info/RECORD,,
727
+ lfx_nightly-0.1.12.dev37.dist-info/METADATA,sha256=d776iFhCA-JXSNc7oP-rmjUilfF7E-LU99hI434WnDs,8290
728
+ lfx_nightly-0.1.12.dev37.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
729
+ lfx_nightly-0.1.12.dev37.dist-info/entry_points.txt,sha256=1724p3RHDQRT2CKx_QRzEIa7sFuSVO0Ux70YfXfoMT4,42
730
+ lfx_nightly-0.1.12.dev37.dist-info/RECORD,,