lfx-nightly 0.2.0.dev41__py3-none-any.whl → 0.3.0.dev3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (98) hide show
  1. lfx/__main__.py +137 -6
  2. lfx/_assets/component_index.json +1 -1
  3. lfx/base/agents/agent.py +10 -6
  4. lfx/base/agents/altk_base_agent.py +5 -3
  5. lfx/base/agents/altk_tool_wrappers.py +1 -1
  6. lfx/base/agents/events.py +1 -1
  7. lfx/base/agents/utils.py +4 -0
  8. lfx/base/composio/composio_base.py +78 -41
  9. lfx/base/data/cloud_storage_utils.py +156 -0
  10. lfx/base/data/docling_utils.py +130 -55
  11. lfx/base/datastax/astradb_base.py +75 -64
  12. lfx/base/embeddings/embeddings_class.py +113 -0
  13. lfx/base/models/__init__.py +11 -1
  14. lfx/base/models/google_generative_ai_constants.py +33 -9
  15. lfx/base/models/model_metadata.py +6 -0
  16. lfx/base/models/ollama_constants.py +196 -30
  17. lfx/base/models/openai_constants.py +37 -10
  18. lfx/base/models/unified_models.py +1123 -0
  19. lfx/base/models/watsonx_constants.py +43 -4
  20. lfx/base/prompts/api_utils.py +40 -5
  21. lfx/base/tools/component_tool.py +2 -9
  22. lfx/cli/__init__.py +10 -2
  23. lfx/cli/commands.py +3 -0
  24. lfx/cli/run.py +65 -409
  25. lfx/cli/script_loader.py +18 -7
  26. lfx/cli/validation.py +6 -3
  27. lfx/components/__init__.py +0 -3
  28. lfx/components/composio/github_composio.py +1 -1
  29. lfx/components/cuga/cuga_agent.py +39 -27
  30. lfx/components/data_source/api_request.py +4 -2
  31. lfx/components/datastax/astradb_assistant_manager.py +4 -2
  32. lfx/components/docling/__init__.py +45 -11
  33. lfx/components/docling/docling_inline.py +39 -49
  34. lfx/components/docling/docling_remote.py +1 -0
  35. lfx/components/elastic/opensearch_multimodal.py +1733 -0
  36. lfx/components/files_and_knowledge/file.py +384 -36
  37. lfx/components/files_and_knowledge/ingestion.py +8 -0
  38. lfx/components/files_and_knowledge/retrieval.py +10 -0
  39. lfx/components/files_and_knowledge/save_file.py +91 -88
  40. lfx/components/langchain_utilities/ibm_granite_handler.py +211 -0
  41. lfx/components/langchain_utilities/tool_calling.py +37 -6
  42. lfx/components/llm_operations/batch_run.py +64 -18
  43. lfx/components/llm_operations/lambda_filter.py +213 -101
  44. lfx/components/llm_operations/llm_conditional_router.py +39 -7
  45. lfx/components/llm_operations/structured_output.py +38 -12
  46. lfx/components/models/__init__.py +16 -74
  47. lfx/components/models_and_agents/agent.py +51 -203
  48. lfx/components/models_and_agents/embedding_model.py +171 -255
  49. lfx/components/models_and_agents/language_model.py +54 -318
  50. lfx/components/models_and_agents/mcp_component.py +96 -10
  51. lfx/components/models_and_agents/prompt.py +105 -18
  52. lfx/components/ollama/ollama_embeddings.py +111 -29
  53. lfx/components/openai/openai_chat_model.py +1 -1
  54. lfx/components/processing/text_operations.py +580 -0
  55. lfx/components/vllm/__init__.py +37 -0
  56. lfx/components/vllm/vllm.py +141 -0
  57. lfx/components/vllm/vllm_embeddings.py +110 -0
  58. lfx/custom/custom_component/component.py +65 -10
  59. lfx/custom/custom_component/custom_component.py +8 -6
  60. lfx/events/observability/__init__.py +0 -0
  61. lfx/events/observability/lifecycle_events.py +111 -0
  62. lfx/field_typing/__init__.py +57 -58
  63. lfx/graph/graph/base.py +40 -1
  64. lfx/graph/utils.py +109 -30
  65. lfx/graph/vertex/base.py +75 -23
  66. lfx/graph/vertex/vertex_types.py +0 -5
  67. lfx/inputs/__init__.py +2 -0
  68. lfx/inputs/input_mixin.py +55 -0
  69. lfx/inputs/inputs.py +120 -0
  70. lfx/interface/components.py +24 -7
  71. lfx/interface/initialize/loading.py +42 -12
  72. lfx/io/__init__.py +2 -0
  73. lfx/run/__init__.py +5 -0
  74. lfx/run/base.py +464 -0
  75. lfx/schema/__init__.py +50 -0
  76. lfx/schema/data.py +1 -1
  77. lfx/schema/image.py +26 -7
  78. lfx/schema/message.py +104 -11
  79. lfx/schema/workflow.py +171 -0
  80. lfx/services/deps.py +12 -0
  81. lfx/services/interfaces.py +43 -1
  82. lfx/services/mcp_composer/service.py +7 -1
  83. lfx/services/schema.py +1 -0
  84. lfx/services/settings/auth.py +95 -4
  85. lfx/services/settings/base.py +11 -1
  86. lfx/services/settings/constants.py +2 -0
  87. lfx/services/settings/utils.py +82 -0
  88. lfx/services/storage/local.py +13 -8
  89. lfx/services/transaction/__init__.py +5 -0
  90. lfx/services/transaction/service.py +35 -0
  91. lfx/tests/unit/components/__init__.py +0 -0
  92. lfx/utils/constants.py +2 -0
  93. lfx/utils/mustache_security.py +79 -0
  94. lfx/utils/validate_cloud.py +81 -3
  95. {lfx_nightly-0.2.0.dev41.dist-info → lfx_nightly-0.3.0.dev3.dist-info}/METADATA +7 -2
  96. {lfx_nightly-0.2.0.dev41.dist-info → lfx_nightly-0.3.0.dev3.dist-info}/RECORD +98 -80
  97. {lfx_nightly-0.2.0.dev41.dist-info → lfx_nightly-0.3.0.dev3.dist-info}/WHEEL +0 -0
  98. {lfx_nightly-0.2.0.dev41.dist-info → lfx_nightly-0.3.0.dev3.dist-info}/entry_points.txt +0 -0
lfx/schema/message.py CHANGED
@@ -12,10 +12,11 @@ from uuid import UUID
12
12
  from fastapi.encoders import jsonable_encoder
13
13
  from langchain_core.load import load
14
14
  from langchain_core.messages import AIMessage, BaseMessage, HumanMessage, SystemMessage, ToolMessage
15
- from langchain_core.prompts.chat import BaseChatPromptTemplate, ChatPromptTemplate
16
- from langchain_core.prompts.prompt import PromptTemplate
17
15
  from pydantic import BaseModel, ConfigDict, Field, ValidationError, field_serializer, field_validator
18
16
 
17
+ if TYPE_CHECKING:
18
+ from langchain_core.prompts.chat import BaseChatPromptTemplate
19
+
19
20
  from lfx.base.prompts.utils import dict_values_to_string
20
21
  from lfx.log.logger import logger
21
22
  from lfx.schema.content_block import ContentBlock
@@ -26,12 +27,33 @@ from lfx.schema.properties import Properties, Source
26
27
  from lfx.schema.validators import timestamp_to_str, timestamp_to_str_validator
27
28
  from lfx.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_NAME_USER, MESSAGE_SENDER_USER
28
29
  from lfx.utils.image import create_image_content_dict
30
+ from lfx.utils.mustache_security import safe_mustache_render
29
31
 
30
32
  if TYPE_CHECKING:
31
33
  from lfx.schema.dataframe import DataFrame
32
34
 
33
35
 
34
36
  class Message(Data):
37
+ """Message schema for Langflow.
38
+
39
+ Message ID Semantics:
40
+ - Messages only have an ID after being stored in the database
41
+ - Messages that are skipped (via Component._should_skip_message) will NOT have an ID
42
+ - Always use get_id(), has_id(), or require_id() methods to safely access the ID
43
+ - Never access message.id directly without checking if it exists first
44
+
45
+ Safe ID Access Patterns:
46
+ - Use get_id() when ID may or may not exist (returns None if missing)
47
+ - Use has_id() to check if ID exists before operations that require it
48
+ - Use require_id() when ID is required (raises ValueError if missing)
49
+
50
+ Example:
51
+ message_id = message.get_id() # Safe: returns None if no ID
52
+ if message.has_id():
53
+ # Safe to use message_id
54
+ do_something_with_id(message_id)
55
+ """
56
+
35
57
  model_config = ConfigDict(arbitrary_types_allowed=True)
36
58
  # Helper class to deal with image data
37
59
  text_key: str = "text"
@@ -109,7 +131,22 @@ class Message(Data):
109
131
  def model_post_init(self, /, _context: Any) -> None:
110
132
  new_files: list[Any] = []
111
133
  for file in self.files or []:
112
- if is_image_file(file):
134
+ # Skip if already an Image instance
135
+ if isinstance(file, Image):
136
+ new_files.append(file)
137
+ # Get the path string if file is a dict or has path attribute
138
+ elif isinstance(file, dict) and "path" in file:
139
+ file_path = file["path"]
140
+ if file_path and is_image_file(file_path):
141
+ new_files.append(Image(path=file_path))
142
+ else:
143
+ new_files.append(file_path if file_path else file)
144
+ elif hasattr(file, "path") and file.path:
145
+ if is_image_file(file.path):
146
+ new_files.append(Image(path=file.path))
147
+ else:
148
+ new_files.append(file.path)
149
+ elif isinstance(file, str) and is_image_file(file):
113
150
  new_files.append(Image(path=file))
114
151
  else:
115
152
  new_files.append(file)
@@ -213,7 +250,8 @@ class Message(Data):
213
250
 
214
251
  for file in files:
215
252
  if isinstance(file, Image):
216
- content_dicts.append(file.to_content_dict())
253
+ # Pass the message's flow_id to the Image for proper path resolution
254
+ content_dicts.append(file.to_content_dict(flow_id=self.flow_id))
217
255
  else:
218
256
  content_dicts.append(create_image_content_dict(file, None, model_name))
219
257
  return content_dicts
@@ -250,24 +288,35 @@ class Message(Data):
250
288
  prompt_json = prompt.to_json()
251
289
  return cls(prompt=prompt_json)
252
290
 
253
- def format_text(self):
254
- prompt_template = PromptTemplate.from_template(self.template)
291
+ def format_text(self, template_format="f-string"):
292
+ if template_format == "mustache":
293
+ # Use our secure mustache renderer
294
+ variables_with_str_values = dict_values_to_string(self.variables)
295
+ formatted_prompt = safe_mustache_render(self.template, variables_with_str_values)
296
+ self.text = formatted_prompt
297
+ return formatted_prompt
298
+ # Use langchain's template for other formats
299
+ from langchain_core.prompts.prompt import PromptTemplate
300
+
301
+ prompt_template = PromptTemplate.from_template(self.template, template_format=template_format)
255
302
  variables_with_str_values = dict_values_to_string(self.variables)
256
303
  formatted_prompt = prompt_template.format(**variables_with_str_values)
257
304
  self.text = formatted_prompt
258
305
  return formatted_prompt
259
306
 
260
307
  @classmethod
261
- async def from_template_and_variables(cls, template: str, **variables):
308
+ async def from_template_and_variables(cls, template: str, template_format: str = "f-string", **variables):
262
309
  # This method has to be async for backwards compatibility with versions
263
310
  # >1.0.15, <1.1
264
- return cls.from_template(template, **variables)
311
+ return cls.from_template(template, template_format=template_format, **variables)
265
312
 
266
313
  # Define a sync version for backwards compatibility with versions >1.0.15, <1.1
267
314
  @classmethod
268
- def from_template(cls, template: str, **variables):
315
+ def from_template(cls, template: str, template_format: str = "f-string", **variables):
316
+ from langchain_core.prompts.chat import ChatPromptTemplate
317
+
269
318
  instance = cls(template=template, variables=variables)
270
- text = instance.format_text()
319
+ text = instance.format_text(template_format=template_format)
271
320
  message = HumanMessage(content=text)
272
321
  contents = []
273
322
  for value in variables.values():
@@ -286,7 +335,7 @@ class Message(Data):
286
335
  @classmethod
287
336
  async def create(cls, **kwargs):
288
337
  """If files are present, create the message in a separate thread as is_image_file is blocking."""
289
- if "files" in kwargs:
338
+ if kwargs.get("files"):
290
339
  return await asyncio.to_thread(cls, **kwargs)
291
340
  return cls(**kwargs)
292
341
 
@@ -298,6 +347,50 @@ class Message(Data):
298
347
 
299
348
  return DataFrame(data=[self])
300
349
 
350
+ def get_id(self) -> str | UUID | None:
351
+ """Safely get the message ID.
352
+
353
+ Returns:
354
+ The message ID if it exists, None otherwise.
355
+
356
+ Note:
357
+ A message only has an ID if it has been stored in the database.
358
+ Messages that are skipped (via _should_skip_message) will not have an ID.
359
+ """
360
+ return getattr(self, "id", None)
361
+
362
+ def has_id(self) -> bool:
363
+ """Check if the message has an ID.
364
+
365
+ Returns:
366
+ True if the message has an ID, False otherwise.
367
+
368
+ Note:
369
+ A message only has an ID if it has been stored in the database.
370
+ Messages that are skipped (via _should_skip_message) will not have an ID.
371
+ """
372
+ message_id = getattr(self, "id", None)
373
+ return message_id is not None
374
+
375
+ def require_id(self) -> str | UUID:
376
+ """Get the message ID, raising an error if it doesn't exist.
377
+
378
+ Returns:
379
+ The message ID.
380
+
381
+ Raises:
382
+ ValueError: If the message does not have an ID.
383
+
384
+ Note:
385
+ Use this method when an ID is required for the operation.
386
+ For optional ID access, use get_id() instead.
387
+ """
388
+ message_id = getattr(self, "id", None)
389
+ if message_id is None:
390
+ msg = "Message does not have an ID. Messages only have IDs after being stored in the database."
391
+ raise ValueError(msg)
392
+ return message_id
393
+
301
394
 
302
395
  class DefaultModel(BaseModel):
303
396
  model_config = ConfigDict(
lfx/schema/workflow.py ADDED
@@ -0,0 +1,171 @@
1
+ """Workflow execution schemas for V2 API."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from enum import Enum
6
+ from typing import Any, Literal
7
+
8
+ from pydantic import BaseModel, ConfigDict, Field
9
+
10
+
11
+ class JobStatus(str, Enum):
12
+ """Job execution status."""
13
+
14
+ QUEUED = "queued"
15
+ IN_PROGRESS = "in_progress"
16
+ COMPLETED = "completed"
17
+ FAILED = "failed"
18
+ ERROR = "error"
19
+
20
+
21
+ class ErrorDetail(BaseModel):
22
+ """Error detail schema."""
23
+
24
+ error: str
25
+ code: str | None = None
26
+ details: dict[str, Any] | None = None
27
+
28
+
29
+ class ComponentOutput(BaseModel):
30
+ """Component output schema."""
31
+
32
+ type: str = Field(..., description="Type of the component output (e.g., 'message', 'data', 'tool', 'text')")
33
+ component_id: str
34
+ status: JobStatus
35
+ content: Any | None = None
36
+ metadata: dict[str, Any] | None = None
37
+
38
+
39
+ class GlobalInputs(BaseModel):
40
+ """Global inputs that apply to all input components in the workflow."""
41
+
42
+ input_value: str | None = Field(None, description="The input value to send to input components")
43
+ input_type: str = Field("chat", description="The type of input (chat, text, etc.)")
44
+ session_id: str | None = Field(None, description="Session ID for conversation continuity")
45
+
46
+
47
+ class WorkflowExecutionRequest(BaseModel):
48
+ """Request schema for workflow execution."""
49
+
50
+ background: bool = False
51
+ stream: bool = False
52
+ flow_id: str
53
+ inputs: dict[str, Any] | None = Field(
54
+ None, description="Inputs with 'global' key for global inputs and component IDs for component-specific tweaks"
55
+ )
56
+
57
+ model_config = ConfigDict(
58
+ json_schema_extra={
59
+ "examples": [
60
+ {
61
+ "background": False,
62
+ "stream": False,
63
+ "flow_id": "flow_67ccd2be17f0819081ff3bb2cf6508e60bb6a6b452d3795b",
64
+ "inputs": {
65
+ "global": {
66
+ "input_value": "Hello, how can you help me today?",
67
+ "input_type": "chat",
68
+ "session_id": "session-123",
69
+ },
70
+ "llm_component": {"temperature": 0.7, "max_tokens": 100},
71
+ "opensearch_component": {"opensearch_url": "https://opensearch:9200"},
72
+ },
73
+ },
74
+ {
75
+ "background": True,
76
+ "stream": False,
77
+ "flow_id": "flow_67ccd2be17f0819081ff3bb2cf6508e60bb6a6b452d3795b",
78
+ "inputs": {"global": {"input_value": "Process this in the background", "input_type": "text"}},
79
+ },
80
+ {
81
+ "background": False,
82
+ "stream": True,
83
+ "flow_id": "flow_67ccd2be17f0819081ff3bb2cf6508e60bb6a6b452d3795b",
84
+ "inputs": {"chat_component": {"text": "Stream this conversation"}},
85
+ },
86
+ ]
87
+ },
88
+ extra="forbid",
89
+ )
90
+
91
+
92
+ class WorkflowExecutionResponse(BaseModel):
93
+ """Synchronous workflow execution response."""
94
+
95
+ flow_id: str
96
+ job_id: str
97
+ object: Literal["response"] = "response"
98
+ created_timestamp: str
99
+ status: JobStatus
100
+ errors: list[ErrorDetail] = []
101
+ inputs: dict[str, Any] = {}
102
+ outputs: dict[str, ComponentOutput] = {}
103
+ metadata: dict[str, Any] = {}
104
+
105
+
106
+ class WorkflowJobResponse(BaseModel):
107
+ """Background job response."""
108
+
109
+ job_id: str
110
+ created_timestamp: str
111
+ status: JobStatus
112
+ errors: list[ErrorDetail] = []
113
+
114
+
115
+ class WorkflowStreamEvent(BaseModel):
116
+ """Streaming event response."""
117
+
118
+ type: str
119
+ run_id: str
120
+ timestamp: int
121
+ raw_event: dict[str, Any]
122
+
123
+
124
+ class WorkflowStopRequest(BaseModel):
125
+ """Request schema for stopping workflow."""
126
+
127
+ job_id: str
128
+ force: bool = Field(default=False, description="Force stop the workflow")
129
+
130
+
131
+ class WorkflowStopResponse(BaseModel):
132
+ """Response schema for stopping workflow."""
133
+
134
+ job_id: str
135
+ status: Literal["stopped", "stopping", "not_found", "error"]
136
+ message: str
137
+
138
+
139
+ # OpenAPI response definitions
140
+ WORKFLOW_EXECUTION_RESPONSES = {
141
+ 200: {
142
+ "description": "Workflow execution response",
143
+ "content": {
144
+ "application/json": {
145
+ "schema": {
146
+ "oneOf": [
147
+ WorkflowExecutionResponse.model_json_schema(),
148
+ WorkflowJobResponse.model_json_schema(),
149
+ ]
150
+ }
151
+ },
152
+ "text/event-stream": {
153
+ "schema": WorkflowStreamEvent.model_json_schema(),
154
+ "description": "Server-sent events for streaming execution",
155
+ },
156
+ },
157
+ }
158
+ }
159
+
160
+ WORKFLOW_STATUS_RESPONSES = {
161
+ 200: {
162
+ "description": "Workflow status response",
163
+ "content": {
164
+ "application/json": {"schema": WorkflowExecutionResponse.model_json_schema()},
165
+ "text/event-stream": {
166
+ "schema": WorkflowStreamEvent.model_json_schema(),
167
+ "description": "Server-sent events for streaming status",
168
+ },
169
+ },
170
+ }
171
+ }
lfx/services/deps.py CHANGED
@@ -24,6 +24,7 @@ if TYPE_CHECKING:
24
24
  SettingsServiceProtocol,
25
25
  StorageServiceProtocol,
26
26
  TracingServiceProtocol,
27
+ TransactionServiceProtocol,
27
28
  VariableServiceProtocol,
28
29
  )
29
30
 
@@ -118,6 +119,17 @@ def get_tracing_service() -> TracingServiceProtocol | None:
118
119
  return get_service(ServiceType.TRACING_SERVICE)
119
120
 
120
121
 
122
+ def get_transaction_service() -> TransactionServiceProtocol | None:
123
+ """Retrieves the transaction service instance.
124
+
125
+ Returns the transaction service for logging component executions.
126
+ Returns None if no transaction service is registered.
127
+ """
128
+ from lfx.services.schema import ServiceType
129
+
130
+ return get_service(ServiceType.TRANSACTION_SERVICE)
131
+
132
+
121
133
  async def get_session():
122
134
  msg = "get_session is deprecated, use session_scope instead"
123
135
  logger.warning(msg)
@@ -3,7 +3,7 @@
3
3
  from __future__ import annotations
4
4
 
5
5
  from abc import abstractmethod
6
- from typing import TYPE_CHECKING, Any, Protocol
6
+ from typing import TYPE_CHECKING, Any, Protocol, runtime_checkable
7
7
 
8
8
  if TYPE_CHECKING:
9
9
  import asyncio
@@ -106,3 +106,45 @@ class TracingServiceProtocol(Protocol):
106
106
  def log(self, message: str, **kwargs) -> None:
107
107
  """Log tracing information."""
108
108
  ...
109
+
110
+
111
+ @runtime_checkable
112
+ class TransactionServiceProtocol(Protocol):
113
+ """Protocol for transaction logging service.
114
+
115
+ This service handles logging of component execution transactions,
116
+ tracking inputs, outputs, and status of each vertex build.
117
+ """
118
+
119
+ @abstractmethod
120
+ async def log_transaction(
121
+ self,
122
+ flow_id: str,
123
+ vertex_id: str,
124
+ inputs: dict[str, Any] | None,
125
+ outputs: dict[str, Any] | None,
126
+ status: str,
127
+ target_id: str | None = None,
128
+ error: str | None = None,
129
+ ) -> None:
130
+ """Log a transaction record for a vertex execution.
131
+
132
+ Args:
133
+ flow_id: The flow ID (as string)
134
+ vertex_id: The vertex/component ID
135
+ inputs: Input parameters for the component
136
+ outputs: Output results from the component
137
+ status: Execution status (success/error)
138
+ target_id: Optional target vertex ID
139
+ error: Optional error message
140
+ """
141
+ ...
142
+
143
+ @abstractmethod
144
+ def is_enabled(self) -> bool:
145
+ """Check if transaction logging is enabled.
146
+
147
+ Returns:
148
+ True if transaction logging is enabled, False otherwise.
149
+ """
150
+ ...
@@ -1292,7 +1292,7 @@ class MCPComposerService(Service):
1292
1292
  "oauth_host": "OAUTH_HOST",
1293
1293
  "oauth_port": "OAUTH_PORT",
1294
1294
  "oauth_server_url": "OAUTH_SERVER_URL",
1295
- "oauth_callback_path": "OAUTH_CALLBACK_PATH",
1295
+ "oauth_callback_url": "OAUTH_CALLBACK_URL",
1296
1296
  "oauth_client_id": "OAUTH_CLIENT_ID",
1297
1297
  "oauth_client_secret": "OAUTH_CLIENT_SECRET", # pragma: allowlist secret
1298
1298
  "oauth_auth_url": "OAUTH_AUTH_URL",
@@ -1301,6 +1301,12 @@ class MCPComposerService(Service):
1301
1301
  "oauth_provider_scope": "OAUTH_PROVIDER_SCOPE",
1302
1302
  }
1303
1303
 
1304
+ # Backwards compatibility: if oauth_callback_url not set, try oauth_callback_path
1305
+ if ("oauth_callback_url" not in auth_config or not auth_config.get("oauth_callback_url")) and (
1306
+ "oauth_callback_path" in auth_config and auth_config.get("oauth_callback_path")
1307
+ ):
1308
+ auth_config["oauth_callback_url"] = auth_config["oauth_callback_path"]
1309
+
1304
1310
  # Add environment variables as command line arguments
1305
1311
  # Only set non-empty values to avoid Pydantic validation errors
1306
1312
  for config_key, env_key in oauth_env_mapping.items():
lfx/services/schema.py CHANGED
@@ -19,3 +19,4 @@ class ServiceType(str, Enum):
19
19
  JOB_QUEUE_SERVICE = "job_queue_service"
20
20
  SHARED_COMPONENT_CACHE_SERVICE = "shared_component_cache_service"
21
21
  MCP_COMPOSER_SERVICE = "mcp_composer_service"
22
+ TRANSACTION_SERVICE = "transaction_service"
@@ -1,14 +1,33 @@
1
1
  import secrets
2
+ from enum import Enum
2
3
  from pathlib import Path
3
4
  from typing import Literal
4
5
 
5
6
  from passlib.context import CryptContext
6
- from pydantic import Field, SecretStr, field_validator
7
+ from pydantic import Field, SecretStr, field_validator, model_validator
7
8
  from pydantic_settings import BaseSettings, SettingsConfigDict
8
9
 
9
10
  from lfx.log.logger import logger
10
11
  from lfx.services.settings.constants import DEFAULT_SUPERUSER, DEFAULT_SUPERUSER_PASSWORD
11
- from lfx.services.settings.utils import read_secret_from_file, write_secret_to_file
12
+ from lfx.services.settings.utils import (
13
+ derive_public_key_from_private,
14
+ generate_rsa_key_pair,
15
+ read_secret_from_file,
16
+ write_public_key_to_file,
17
+ write_secret_to_file,
18
+ )
19
+
20
+
21
+ class JWTAlgorithm(str, Enum):
22
+ """JWT signing algorithm options."""
23
+
24
+ HS256 = "HS256"
25
+ RS256 = "RS256"
26
+ RS512 = "RS512"
27
+
28
+ def is_asymmetric(self) -> bool:
29
+ """Return True if this algorithm uses asymmetric (public/private key) cryptography."""
30
+ return self in (JWTAlgorithm.RS256, JWTAlgorithm.RS512)
12
31
 
13
32
 
14
33
  class AuthSettings(BaseSettings):
@@ -16,10 +35,22 @@ class AuthSettings(BaseSettings):
16
35
  CONFIG_DIR: str
17
36
  SECRET_KEY: SecretStr = Field(
18
37
  default=SecretStr(""),
19
- description="Secret key for JWT. If not provided, a random one will be generated.",
38
+ description="Secret key for JWT (used with HS256). If not provided, a random one will be generated.",
39
+ frozen=False,
40
+ )
41
+ PRIVATE_KEY: SecretStr = Field(
42
+ default=SecretStr(""),
43
+ description="RSA private key for JWT signing (RS256/RS512). Auto-generated if not provided.",
20
44
  frozen=False,
21
45
  )
22
- ALGORITHM: str = "HS256"
46
+ PUBLIC_KEY: str = Field(
47
+ default="",
48
+ description="RSA public key for JWT verification (RS256/RS512). Derived from private key if not provided.",
49
+ )
50
+ ALGORITHM: JWTAlgorithm = Field(
51
+ default=JWTAlgorithm.HS256,
52
+ description="JWT signing algorithm. Use RS256 or RS512 for asymmetric signing (recommended for production).",
53
+ )
23
54
  ACCESS_TOKEN_EXPIRE_SECONDS: int = 60 * 60 # 1 hour
24
55
  REFRESH_TOKEN_EXPIRE_SECONDS: int = 60 * 60 * 24 * 7 # 7 days
25
56
 
@@ -138,3 +169,63 @@ class AuthSettings(BaseSettings):
138
169
  logger.debug("Saved secret key")
139
170
 
140
171
  return value if isinstance(value, SecretStr) else SecretStr(value).get_secret_value()
172
+
173
+ @model_validator(mode="after")
174
+ def setup_rsa_keys(self):
175
+ """Generate or load RSA keys when using RS256/RS512 algorithm."""
176
+ if not self.ALGORITHM.is_asymmetric():
177
+ return self
178
+
179
+ config_dir = self.CONFIG_DIR
180
+ private_key_value = self.PRIVATE_KEY.get_secret_value() if self.PRIVATE_KEY else ""
181
+
182
+ if not config_dir:
183
+ # No config dir - generate keys in memory if not provided
184
+ if not private_key_value:
185
+ logger.debug("No CONFIG_DIR provided, generating RSA keys in memory")
186
+ private_key_pem, public_key_pem = generate_rsa_key_pair()
187
+ object.__setattr__(self, "PRIVATE_KEY", SecretStr(private_key_pem))
188
+ object.__setattr__(self, "PUBLIC_KEY", public_key_pem)
189
+ elif not self.PUBLIC_KEY:
190
+ # Derive public key from private key
191
+ public_key_pem = derive_public_key_from_private(private_key_value)
192
+ object.__setattr__(self, "PUBLIC_KEY", public_key_pem)
193
+ return self
194
+
195
+ private_key_path = Path(config_dir) / "private_key.pem"
196
+ public_key_path = Path(config_dir) / "public_key.pem"
197
+
198
+ if private_key_value:
199
+ # Private key provided via env var - save it and derive public key
200
+ logger.debug("RSA private key provided")
201
+ write_secret_to_file(private_key_path, private_key_value)
202
+
203
+ if not self.PUBLIC_KEY:
204
+ public_key_pem = derive_public_key_from_private(private_key_value)
205
+ object.__setattr__(self, "PUBLIC_KEY", public_key_pem)
206
+ write_public_key_to_file(public_key_path, public_key_pem)
207
+ # No private key provided - load from file or generate
208
+ elif private_key_path.exists():
209
+ logger.debug("Loading RSA keys from files")
210
+ private_key_pem = read_secret_from_file(private_key_path)
211
+ object.__setattr__(self, "PRIVATE_KEY", SecretStr(private_key_pem))
212
+
213
+ if public_key_path.exists():
214
+ public_key_pem = public_key_path.read_text(encoding="utf-8")
215
+ object.__setattr__(self, "PUBLIC_KEY", public_key_pem)
216
+ else:
217
+ # Derive public key from private key
218
+ public_key_pem = derive_public_key_from_private(private_key_pem)
219
+ object.__setattr__(self, "PUBLIC_KEY", public_key_pem)
220
+ write_public_key_to_file(public_key_path, public_key_pem)
221
+ else:
222
+ # Generate new RSA key pair
223
+ logger.debug("Generating new RSA key pair")
224
+ private_key_pem, public_key_pem = generate_rsa_key_pair()
225
+ write_secret_to_file(private_key_path, private_key_pem)
226
+ write_public_key_to_file(public_key_path, public_key_pem)
227
+ object.__setattr__(self, "PRIVATE_KEY", SecretStr(private_key_pem))
228
+ object.__setattr__(self, "PUBLIC_KEY", public_key_pem)
229
+ logger.debug("RSA key pair generated and saved")
230
+
231
+ return self
@@ -313,6 +313,10 @@ class Settings(BaseSettings):
313
313
  """If set to True, Langflow will start the agentic MCP server that provides tools for
314
314
  flow/component operations, template search, and graph visualization."""
315
315
 
316
+ # Developer API
317
+ developer_api_enabled: bool = False
318
+ """If set to True, Langflow will enable developer API endpoints for advanced debugging and introspection."""
319
+
316
320
  # Public Flow Settings
317
321
  public_flow_cleanup_interval: int = Field(default=3600, gt=600)
318
322
  """The interval in seconds at which public temporary flows will be cleaned up.
@@ -497,7 +501,13 @@ class Settings(BaseSettings):
497
501
  if info.data["save_db_in_config_dir"]:
498
502
  database_dir = info.data["config_dir"]
499
503
  else:
500
- database_dir = Path(__file__).parent.parent.parent.resolve()
504
+ # Use langflow package path, not lfx, for backwards compatibility
505
+ try:
506
+ import langflow
507
+
508
+ database_dir = Path(langflow.__file__).parent.resolve()
509
+ except ImportError:
510
+ database_dir = Path(__file__).parent.parent.parent.resolve()
501
511
 
502
512
  pre_db_file_name = "langflow-pre.db"
503
513
  db_file_name = "langflow.db"
@@ -41,3 +41,5 @@ AGENTIC_VARIABLES = [
41
41
  "FIELD_NAME",
42
42
  "ASTRA_TOKEN",
43
43
  ]
44
+
45
+ DEFAULT_AGENTIC_VARIABLE_VALUE = ""