lfx-nightly 0.2.1.dev7__py3-none-any.whl → 0.3.0.dev3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. lfx/__main__.py +137 -6
  2. lfx/_assets/component_index.json +1 -1
  3. lfx/base/agents/agent.py +9 -5
  4. lfx/base/agents/altk_base_agent.py +5 -3
  5. lfx/base/agents/events.py +1 -1
  6. lfx/base/models/unified_models.py +1 -1
  7. lfx/base/models/watsonx_constants.py +10 -7
  8. lfx/base/prompts/api_utils.py +40 -5
  9. lfx/cli/__init__.py +10 -2
  10. lfx/cli/script_loader.py +5 -4
  11. lfx/cli/validation.py +6 -3
  12. lfx/components/datastax/astradb_assistant_manager.py +4 -2
  13. lfx/components/docling/docling_remote.py +1 -0
  14. lfx/components/langchain_utilities/ibm_granite_handler.py +211 -0
  15. lfx/components/langchain_utilities/tool_calling.py +24 -1
  16. lfx/components/llm_operations/lambda_filter.py +182 -97
  17. lfx/components/models_and_agents/mcp_component.py +38 -1
  18. lfx/components/models_and_agents/prompt.py +105 -18
  19. lfx/components/ollama/ollama_embeddings.py +109 -28
  20. lfx/components/processing/text_operations.py +580 -0
  21. lfx/custom/custom_component/component.py +65 -10
  22. lfx/events/observability/__init__.py +0 -0
  23. lfx/events/observability/lifecycle_events.py +111 -0
  24. lfx/field_typing/__init__.py +57 -58
  25. lfx/graph/graph/base.py +36 -0
  26. lfx/graph/utils.py +45 -12
  27. lfx/graph/vertex/base.py +71 -22
  28. lfx/graph/vertex/vertex_types.py +0 -5
  29. lfx/inputs/input_mixin.py +1 -0
  30. lfx/inputs/inputs.py +5 -0
  31. lfx/interface/components.py +24 -7
  32. lfx/run/base.py +47 -77
  33. lfx/schema/__init__.py +50 -0
  34. lfx/schema/message.py +85 -8
  35. lfx/schema/workflow.py +171 -0
  36. lfx/services/deps.py +12 -0
  37. lfx/services/interfaces.py +43 -1
  38. lfx/services/schema.py +1 -0
  39. lfx/services/settings/auth.py +95 -4
  40. lfx/services/settings/base.py +4 -0
  41. lfx/services/settings/utils.py +82 -0
  42. lfx/services/transaction/__init__.py +5 -0
  43. lfx/services/transaction/service.py +35 -0
  44. lfx/tests/unit/components/__init__.py +0 -0
  45. lfx/utils/constants.py +1 -0
  46. lfx/utils/mustache_security.py +79 -0
  47. lfx/utils/validate_cloud.py +67 -0
  48. {lfx_nightly-0.2.1.dev7.dist-info → lfx_nightly-0.3.0.dev3.dist-info}/METADATA +3 -1
  49. {lfx_nightly-0.2.1.dev7.dist-info → lfx_nightly-0.3.0.dev3.dist-info}/RECORD +51 -42
  50. {lfx_nightly-0.2.1.dev7.dist-info → lfx_nightly-0.3.0.dev3.dist-info}/WHEEL +0 -0
  51. {lfx_nightly-0.2.1.dev7.dist-info → lfx_nightly-0.3.0.dev3.dist-info}/entry_points.txt +0 -0
@@ -2,7 +2,8 @@ from __future__ import annotations
2
2
 
3
3
  import json
4
4
  import re
5
- from typing import TYPE_CHECKING, Any
5
+ from collections.abc import Callable # noqa: TC003 - required at runtime for dynamic exec()
6
+ from typing import Any
6
7
 
7
8
  from lfx.base.models.unified_models import (
8
9
  get_language_model_options,
@@ -13,18 +14,34 @@ from lfx.custom.custom_component.component import Component
13
14
  from lfx.io import DataInput, IntInput, ModelInput, MultilineInput, Output, SecretStrInput
14
15
  from lfx.schema.data import Data
15
16
  from lfx.schema.dataframe import DataFrame
17
+ from lfx.schema.message import Message
18
+ from lfx.utils.constants import MESSAGE_SENDER_AI
19
+
20
+ TEXT_TRANSFORM_PROMPT = (
21
+ "Given this text, create a Python lambda function that transforms it "
22
+ "according to the instruction.\n"
23
+ "The lambda should take a string parameter and return the transformed string.\n\n"
24
+ "Text Preview:\n{text_preview}\n\n"
25
+ "Instruction: {instruction}\n\n"
26
+ "Return ONLY the lambda function and nothing else. No need for ```python or whatever.\n"
27
+ "Just a string starting with lambda.\n"
28
+ "Example: lambda text: text.upper()"
29
+ )
16
30
 
17
- if TYPE_CHECKING:
18
- from collections.abc import Callable
19
-
20
- # # Compute model options once at module level
21
- # _MODEL_OPTIONS = get_language_model_options()
22
- # _PROVIDERS = [provider["provider"] for provider in _MODEL_OPTIONS]
31
+ DATA_TRANSFORM_PROMPT = (
32
+ "Given this data structure and examples, create a Python lambda function "
33
+ "that implements the following instruction:\n\n"
34
+ "Data Structure:\n{dump_structure}\n\n"
35
+ "Example Items:\n{data_sample}\n\n"
36
+ "Instruction: {instruction}\n\n"
37
+ "Return ONLY the lambda function and nothing else. No need for ```python or whatever.\n"
38
+ "Just a string starting with lambda."
39
+ )
23
40
 
24
41
 
25
42
  class LambdaFilterComponent(Component):
26
43
  display_name = "Smart Transform"
27
- description = "Uses an LLM to generate a function for filtering or transforming structured data."
44
+ description = "Uses an LLM to generate a function for filtering or transforming structured data and messages."
28
45
  documentation: str = "https://docs.langflow.org/smart-transform"
29
46
  icon = "square-function"
30
47
  name = "Smart Transform"
@@ -33,8 +50,8 @@ class LambdaFilterComponent(Component):
33
50
  DataInput(
34
51
  name="data",
35
52
  display_name="Data",
36
- info="The structured data to filter or transform using a lambda function.",
37
- input_types=["Data", "DataFrame"],
53
+ info="The structured data or text messages to filter or transform using a lambda function.",
54
+ input_types=["Data", "DataFrame", "Message"],
38
55
  is_list=True,
39
56
  required=True,
40
57
  ),
@@ -57,9 +74,10 @@ class LambdaFilterComponent(Component):
57
74
  display_name="Instructions",
58
75
  info=(
59
76
  "Natural language instructions for how to filter or transform the data using a lambda function. "
60
- "Example: Filter the data to only include items where the 'status' is 'active'."
77
+ "Examples: 'Filter the data to only include items where status is active', "
78
+ "'Convert the text to uppercase', 'Keep only first 100 characters'"
61
79
  ),
62
- value="Filter the data to...",
80
+ value="Transform the data to...",
63
81
  required=True,
64
82
  ),
65
83
  IntInput(
@@ -89,6 +107,11 @@ class LambdaFilterComponent(Component):
89
107
  name="dataframe_output",
90
108
  method="process_as_dataframe",
91
109
  ),
110
+ Output(
111
+ display_name="Output",
112
+ name="message_output",
113
+ method="process_as_message",
114
+ ),
92
115
  ]
93
116
 
94
117
  def update_build_config(self, build_config: dict, field_value: str, field_name: str | None = None):
@@ -119,127 +142,189 @@ class LambdaFilterComponent(Component):
119
142
  # Return False if the lambda function does not start with 'lambda' or does not contain a colon
120
143
  return lambda_text.strip().startswith("lambda") and ":" in lambda_text
121
144
 
122
- async def _execute_lambda(self) -> Any:
123
- self.log(str(self.data))
124
-
125
- # Convert input to a unified format
126
- if isinstance(self.data, list):
127
- # Handle list of Data or DataFrame objects
128
- combined_data = []
129
- for item in self.data:
130
- if isinstance(item, DataFrame):
131
- # DataFrame to list of dicts
132
- combined_data.extend(item.to_dict(orient="records"))
133
- elif hasattr(item, "data"):
134
- # Data object
135
- if isinstance(item.data, dict):
136
- combined_data.append(item.data)
137
- elif isinstance(item.data, list):
138
- combined_data.extend(item.data)
139
-
140
- # If we have a single dict, unwrap it so lambdas can access it directly
141
- if len(combined_data) == 1 and isinstance(combined_data[0], dict):
142
- data = combined_data[0]
143
- elif len(combined_data) == 0:
144
- data = {}
145
- else:
146
- data = combined_data # type: ignore[assignment]
147
- elif isinstance(self.data, DataFrame):
148
- # Single DataFrame to list of dicts
149
- data = self.data.to_dict(orient="records")
150
- elif hasattr(self.data, "data"):
151
- # Single Data object
152
- data = self.data.data
145
+ def _get_input_type_name(self) -> str:
146
+ """Detect and return the input type name for error messages."""
147
+ if isinstance(self.data, Message):
148
+ return "Message"
149
+ if isinstance(self.data, DataFrame):
150
+ return "DataFrame"
151
+ if isinstance(self.data, Data):
152
+ return "Data"
153
+ if isinstance(self.data, list) and len(self.data) > 0:
154
+ first = self.data[0]
155
+ if isinstance(first, Message):
156
+ return "Message"
157
+ if isinstance(first, DataFrame):
158
+ return "DataFrame"
159
+ if isinstance(first, Data):
160
+ return "Data"
161
+ return "unknown"
162
+
163
+ def _extract_message_text(self) -> str:
164
+ """Extract text content from Message input(s)."""
165
+ if isinstance(self.data, Message):
166
+ return self.data.text or ""
167
+
168
+ texts = [msg.text or "" for msg in self.data if isinstance(msg, Message)]
169
+ return "\n\n".join(texts) if len(texts) > 1 else (texts[0] if texts else "")
170
+
171
+ def _extract_structured_data(self) -> dict | list:
172
+ """Extract structured data from Data or DataFrame input(s)."""
173
+ if isinstance(self.data, DataFrame):
174
+ return self.data.to_dict(orient="records")
175
+
176
+ if hasattr(self.data, "data"):
177
+ return self.data.data
178
+
179
+ if not isinstance(self.data, list):
180
+ return self.data
181
+
182
+ combined_data: list[dict] = []
183
+ for item in self.data:
184
+ if isinstance(item, DataFrame):
185
+ combined_data.extend(item.to_dict(orient="records"))
186
+ elif hasattr(item, "data"):
187
+ if isinstance(item.data, dict):
188
+ combined_data.append(item.data)
189
+ elif isinstance(item.data, list):
190
+ combined_data.extend(item.data)
191
+
192
+ if len(combined_data) == 1 and isinstance(combined_data[0], dict):
193
+ return combined_data[0]
194
+ if len(combined_data) == 0:
195
+ return {}
196
+ return combined_data
197
+
198
+ def _is_message_input(self) -> bool:
199
+ """Check if input is Message type."""
200
+ if isinstance(self.data, Message):
201
+ return True
202
+ return isinstance(self.data, list) and len(self.data) > 0 and isinstance(self.data[0], Message)
203
+
204
+ def _build_text_prompt(self, text: str) -> str:
205
+ """Build prompt for text/Message transformation."""
206
+ text_length = len(text)
207
+ if text_length > self.max_size:
208
+ text_preview = (
209
+ f"Text length: {text_length} characters\n\n"
210
+ f"First {self.sample_size} characters:\n{text[: self.sample_size]}\n\n"
211
+ f"Last {self.sample_size} characters:\n{text[-self.sample_size :]}"
212
+ )
153
213
  else:
154
- data = self.data
214
+ text_preview = text
155
215
 
156
- dump = json.dumps(data)
157
- self.log(str(data))
158
-
159
- llm = get_llm(model=self.model, user_id=self.user_id, api_key=self.api_key)
160
- instruction = self.filter_instruction
161
- sample_size = self.sample_size
216
+ return TEXT_TRANSFORM_PROMPT.format(text_preview=text_preview, instruction=self.filter_instruction)
162
217
 
163
- # Get data structure and samples
164
- data_structure = self.get_data_structure(data)
165
- dump_structure = json.dumps(data_structure)
166
- self.log(dump_structure)
218
+ def _build_data_prompt(self, data: dict | list) -> str:
219
+ """Build prompt for structured data transformation."""
220
+ dump = json.dumps(data)
221
+ dump_structure = json.dumps(self.get_data_structure(data))
167
222
 
168
- # For large datasets, sample from head and tail
169
223
  if len(dump) > self.max_size:
170
224
  data_sample = (
171
- f"Data is too long to display... \n\n First lines (head): {dump[:sample_size]} \n\n"
172
- f" Last lines (tail): {dump[-sample_size:]})"
225
+ f"Data is too long to display...\n\nFirst lines (head): {dump[: self.sample_size]}\n\n"
226
+ f"Last lines (tail): {dump[-self.sample_size :]}"
173
227
  )
174
228
  else:
175
229
  data_sample = dump
176
230
 
177
- self.log(data_sample)
178
-
179
- prompt = f"""Given this data structure and examples, create a Python lambda function that
180
- implements the following instruction:
181
-
182
- Data Structure:
183
- {dump_structure}
184
-
185
- Example Items:
186
- {data_sample}
187
-
188
- Instruction: {instruction}
189
-
190
- Return ONLY the lambda function and nothing else. No need for ```python or whatever.
191
- Just a string starting with lambda.
192
- """
193
-
194
- response = await llm.ainvoke(prompt)
195
- response_text = response.content if hasattr(response, "content") else str(response)
196
- self.log(response_text)
231
+ return DATA_TRANSFORM_PROMPT.format(
232
+ dump_structure=dump_structure, data_sample=data_sample, instruction=self.filter_instruction
233
+ )
197
234
 
198
- # Extract lambda using regex
235
+ def _parse_lambda_from_response(self, response_text: str) -> Callable[[Any], Any]:
236
+ """Extract and validate lambda function from LLM response."""
199
237
  lambda_match = re.search(r"lambda\s+\w+\s*:.*?(?=\n|$)", response_text)
200
238
  if not lambda_match:
201
239
  msg = f"Could not find lambda in response: {response_text}"
202
240
  raise ValueError(msg)
203
241
 
204
242
  lambda_text = lambda_match.group().strip()
205
- self.log(lambda_text)
243
+ self.log(f"Generated lambda: {lambda_text}")
206
244
 
207
- # Validation is commented out as requested
208
245
  if not self._validate_lambda(lambda_text):
209
246
  msg = f"Invalid lambda format: {lambda_text}"
210
247
  raise ValueError(msg)
211
248
 
212
- # Create and apply the function
213
- fn: Callable[[Any], Any] = eval(lambda_text) # noqa: S307
249
+ return eval(lambda_text) # noqa: S307
214
250
 
215
- # Apply the lambda function to the data
251
+ async def _execute_lambda(self) -> Any:
252
+ """Generate and execute a lambda function based on input type."""
253
+ if self._is_message_input():
254
+ data: Any = self._extract_message_text()
255
+ prompt = self._build_text_prompt(data)
256
+ else:
257
+ data = self._extract_structured_data()
258
+ prompt = self._build_data_prompt(data)
259
+
260
+ llm = get_llm(model=self.model, user_id=self.user_id, api_key=self.api_key)
261
+ response = await llm.ainvoke(prompt)
262
+ response_text = response.content if hasattr(response, "content") else str(response)
263
+
264
+ fn = self._parse_lambda_from_response(response_text)
216
265
  return fn(data)
217
266
 
218
- async def process_as_data(self) -> Data:
219
- """Process the data and return as a Data object."""
220
- result = await self._execute_lambda()
267
+ def _handle_process_error(self, error: Exception, output_type: str) -> None:
268
+ """Handle errors from process methods with context-aware messages."""
269
+ input_type = self._get_input_type_name()
270
+ error_msg = (
271
+ f"Failed to convert result to {output_type} output. "
272
+ f"Error: {error}. "
273
+ f"Input type was {input_type}. "
274
+ f"Try using the same output type as the input."
275
+ )
276
+ raise ValueError(error_msg) from error
221
277
 
222
- # Convert result to Data based on type
278
+ def _convert_result_to_data(self, result: Any) -> Data:
279
+ """Convert lambda result to Data object."""
223
280
  if isinstance(result, dict):
224
281
  return Data(data=result)
225
282
  if isinstance(result, list):
226
283
  return Data(data={"_results": result})
227
- # For other types, convert to string
228
284
  return Data(data={"text": str(result)})
229
285
 
230
- async def process_as_dataframe(self) -> DataFrame:
231
- """Process the data and return as a DataFrame."""
232
- result = await self._execute_lambda()
233
-
234
- # Convert result to DataFrame based on type
286
+ def _convert_result_to_dataframe(self, result: Any) -> DataFrame:
287
+ """Convert lambda result to DataFrame object."""
235
288
  if isinstance(result, list):
236
- # Check if it's a list of dicts
237
289
  if all(isinstance(item, dict) for item in result):
238
290
  return DataFrame(result)
239
- # List of non-dicts: wrap each value
240
291
  return DataFrame([{"value": item} for item in result])
241
292
  if isinstance(result, dict):
242
- # Single dict becomes single-row DataFrame
243
293
  return DataFrame([result])
244
- # Other types: convert to string and wrap
245
294
  return DataFrame([{"value": str(result)}])
295
+
296
+ def _convert_result_to_message(self, result: Any) -> Message:
297
+ """Convert lambda result to Message object."""
298
+ if isinstance(result, str):
299
+ return Message(text=result, sender=MESSAGE_SENDER_AI)
300
+ if isinstance(result, list):
301
+ text = "\n".join(str(item) for item in result)
302
+ return Message(text=text, sender=MESSAGE_SENDER_AI)
303
+ if isinstance(result, dict):
304
+ text = json.dumps(result, indent=2)
305
+ return Message(text=text, sender=MESSAGE_SENDER_AI)
306
+ return Message(text=str(result), sender=MESSAGE_SENDER_AI)
307
+
308
+ async def process_as_data(self) -> Data:
309
+ """Process the data and return as a Data object."""
310
+ try:
311
+ result = await self._execute_lambda()
312
+ return self._convert_result_to_data(result)
313
+ except Exception as e: # noqa: BLE001 - dynamic lambda can raise any exception
314
+ self._handle_process_error(e, "Data")
315
+
316
+ async def process_as_dataframe(self) -> DataFrame:
317
+ """Process the data and return as a DataFrame."""
318
+ try:
319
+ result = await self._execute_lambda()
320
+ return self._convert_result_to_dataframe(result)
321
+ except Exception as e: # noqa: BLE001 - dynamic lambda can raise any exception
322
+ self._handle_process_error(e, "DataFrame")
323
+
324
+ async def process_as_message(self) -> Message:
325
+ """Process the data and return as a Message."""
326
+ try:
327
+ result = await self._execute_lambda()
328
+ return self._convert_result_to_message(result)
329
+ except Exception as e: # noqa: BLE001 - dynamic lambda can raise any exception
330
+ self._handle_process_error(e, "Message")
@@ -15,7 +15,7 @@ from lfx.base.mcp.util import (
15
15
  )
16
16
  from lfx.custom.custom_component.component_with_cache import ComponentWithCache
17
17
  from lfx.inputs.inputs import InputTypes # noqa: TC001
18
- from lfx.io import BoolInput, DropdownInput, McpInput, MessageTextInput, Output
18
+ from lfx.io import BoolInput, DictInput, DropdownInput, McpInput, MessageTextInput, Output
19
19
  from lfx.io.schema import flatten_schema, schema_to_langflow_inputs
20
20
  from lfx.log.logger import logger
21
21
  from lfx.schema.dataframe import DataFrame
@@ -87,6 +87,7 @@ class MCPToolsComponent(ComponentWithCache):
87
87
  "tool",
88
88
  "use_cache",
89
89
  "verify_ssl",
90
+ "headers",
90
91
  ]
91
92
 
92
93
  display_name = "MCP Tools"
@@ -122,6 +123,17 @@ class MCPToolsComponent(ComponentWithCache):
122
123
  value=True,
123
124
  advanced=True,
124
125
  ),
126
+ DictInput(
127
+ name="headers",
128
+ display_name="Headers",
129
+ info=(
130
+ "HTTP headers to include with MCP server requests. "
131
+ "Useful for authentication (e.g., Authorization header). "
132
+ "These headers override any headers configured in the MCP server settings."
133
+ ),
134
+ advanced=True,
135
+ is_list=True,
136
+ ),
125
137
  DropdownInput(
126
138
  name="tool",
127
139
  display_name="Tool",
@@ -258,6 +270,31 @@ class MCPToolsComponent(ComponentWithCache):
258
270
  verify_ssl = getattr(self, "verify_ssl", True)
259
271
  server_config["verify_ssl"] = verify_ssl
260
272
 
273
+ # Merge headers from component input with server config headers
274
+ # Component headers take precedence over server config headers
275
+ component_headers = getattr(self, "headers", None) or []
276
+ if component_headers:
277
+ # Convert list of {"key": k, "value": v} to dict
278
+ component_headers_dict = {}
279
+ if isinstance(component_headers, list):
280
+ for item in component_headers:
281
+ if isinstance(item, dict) and "key" in item and "value" in item:
282
+ component_headers_dict[item["key"]] = item["value"]
283
+ elif isinstance(component_headers, dict):
284
+ component_headers_dict = component_headers
285
+
286
+ if component_headers_dict:
287
+ existing_headers = server_config.get("headers", {}) or {}
288
+ # Ensure existing_headers is a dict (convert from list if needed)
289
+ if isinstance(existing_headers, list):
290
+ existing_dict = {}
291
+ for item in existing_headers:
292
+ if isinstance(item, dict) and "key" in item and "value" in item:
293
+ existing_dict[item["key"]] = item["value"]
294
+ existing_headers = existing_dict
295
+ merged_headers = {**existing_headers, **component_headers_dict}
296
+ server_config["headers"] = merged_headers
297
+
261
298
  _, tool_list, tool_cache = await update_tools(
262
299
  server_name=server_name,
263
300
  server_config=server_config,
@@ -1,22 +1,36 @@
1
+ from typing import Any
2
+
1
3
  from lfx.base.prompts.api_utils import process_prompt_template
2
4
  from lfx.custom.custom_component.component import Component
5
+ from lfx.inputs.input_mixin import FieldTypes
3
6
  from lfx.inputs.inputs import DefaultPromptField
4
- from lfx.io import MessageTextInput, Output, PromptInput
7
+ from lfx.io import BoolInput, MessageTextInput, Output, PromptInput
8
+ from lfx.log.logger import logger
9
+ from lfx.schema.dotdict import dotdict
5
10
  from lfx.schema.message import Message
6
11
  from lfx.template.utils import update_template_values
12
+ from lfx.utils.mustache_security import validate_mustache_template
7
13
 
8
14
 
9
15
  class PromptComponent(Component):
10
16
  display_name: str = "Prompt Template"
11
17
  description: str = "Create a prompt template with dynamic variables."
12
18
  documentation: str = "https://docs.langflow.org/components-prompts"
13
- icon = "braces"
19
+ icon = "prompts"
14
20
  trace_type = "prompt"
15
21
  name = "Prompt Template"
16
22
  priority = 0 # Set priority to 0 to make it appear first
17
23
 
18
24
  inputs = [
19
25
  PromptInput(name="template", display_name="Template"),
26
+ BoolInput(
27
+ name="use_double_brackets",
28
+ display_name="Use Double Brackets",
29
+ value=False,
30
+ advanced=True,
31
+ info="Use {{variable}} syntax instead of {variable}.",
32
+ real_time_refresh=True,
33
+ ),
20
34
  MessageTextInput(
21
35
  name="tool_placeholder",
22
36
  display_name="Tool Placeholder",
@@ -30,34 +44,107 @@ class PromptComponent(Component):
30
44
  Output(display_name="Prompt", name="prompt", method="build_prompt"),
31
45
  ]
32
46
 
47
+ def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None) -> dotdict:
48
+ """Update the template field type based on the selected mode."""
49
+ if field_name == "use_double_brackets":
50
+ # Change the template field type based on mode
51
+ is_mustache = field_value is True
52
+ if is_mustache:
53
+ build_config["template"]["type"] = FieldTypes.MUSTACHE_PROMPT.value
54
+ else:
55
+ build_config["template"]["type"] = FieldTypes.PROMPT.value
56
+
57
+ # Re-process the template to update variables when mode changes
58
+ template_value = build_config.get("template", {}).get("value", "")
59
+ if template_value:
60
+ # Ensure custom_fields is properly initialized
61
+ if "custom_fields" not in build_config:
62
+ build_config["custom_fields"] = {}
63
+
64
+ # Clean up fields from the OLD mode before processing with NEW mode
65
+ # This ensures we don't keep fields with wrong syntax even if validation fails
66
+ old_custom_fields = build_config["custom_fields"].get("template", [])
67
+ for old_field in list(old_custom_fields):
68
+ # Remove the field from custom_fields and template
69
+ if old_field in old_custom_fields:
70
+ old_custom_fields.remove(old_field)
71
+ build_config.pop(old_field, None)
72
+
73
+ # Try to process template with new mode to add new variables
74
+ # If validation fails, at least we cleaned up old fields
75
+ try:
76
+ # Validate mustache templates for security
77
+ if is_mustache:
78
+ validate_mustache_template(template_value)
79
+
80
+ # Re-process template with new mode to add new variables
81
+ _ = process_prompt_template(
82
+ template=template_value,
83
+ name="template",
84
+ custom_fields=build_config["custom_fields"],
85
+ frontend_node_template=build_config,
86
+ is_mustache=is_mustache,
87
+ )
88
+ except ValueError as e:
89
+ # If validation fails, we still updated the mode and cleaned old fields
90
+ # User will see error when they try to save
91
+ logger.debug(f"Template validation failed during mode switch: {e}")
92
+ return build_config
93
+
33
94
  async def build_prompt(self) -> Message:
34
- prompt = Message.from_template(**self._attributes)
95
+ use_double_brackets = self.use_double_brackets if hasattr(self, "use_double_brackets") else False
96
+ template_format = "mustache" if use_double_brackets else "f-string"
97
+ prompt = await Message.from_template_and_variables(template_format=template_format, **self._attributes)
35
98
  self.status = prompt.text
36
99
  return prompt
37
100
 
38
101
  def _update_template(self, frontend_node: dict):
39
102
  prompt_template = frontend_node["template"]["template"]["value"]
40
- custom_fields = frontend_node["custom_fields"]
41
- frontend_node_template = frontend_node["template"]
42
- _ = process_prompt_template(
43
- template=prompt_template,
44
- name="template",
45
- custom_fields=custom_fields,
46
- frontend_node_template=frontend_node_template,
47
- )
103
+ use_double_brackets = frontend_node["template"].get("use_double_brackets", {}).get("value", False)
104
+ is_mustache = use_double_brackets is True
105
+
106
+ try:
107
+ # Validate mustache templates for security
108
+ if is_mustache:
109
+ validate_mustache_template(prompt_template)
110
+
111
+ custom_fields = frontend_node["custom_fields"]
112
+ frontend_node_template = frontend_node["template"]
113
+ _ = process_prompt_template(
114
+ template=prompt_template,
115
+ name="template",
116
+ custom_fields=custom_fields,
117
+ frontend_node_template=frontend_node_template,
118
+ is_mustache=is_mustache,
119
+ )
120
+ except ValueError as e:
121
+ # If validation fails, don't add variables but allow component to be created
122
+ logger.debug(f"Template validation failed in _update_template: {e}")
48
123
  return frontend_node
49
124
 
50
125
  async def update_frontend_node(self, new_frontend_node: dict, current_frontend_node: dict):
51
126
  """This function is called after the code validation is done."""
52
127
  frontend_node = await super().update_frontend_node(new_frontend_node, current_frontend_node)
53
128
  template = frontend_node["template"]["template"]["value"]
54
- # Kept it duplicated for backwards compatibility
55
- _ = process_prompt_template(
56
- template=template,
57
- name="template",
58
- custom_fields=frontend_node["custom_fields"],
59
- frontend_node_template=frontend_node["template"],
60
- )
129
+ use_double_brackets = frontend_node["template"].get("use_double_brackets", {}).get("value", False)
130
+ is_mustache = use_double_brackets is True
131
+
132
+ try:
133
+ # Validate mustache templates for security
134
+ if is_mustache:
135
+ validate_mustache_template(template)
136
+
137
+ # Kept it duplicated for backwards compatibility
138
+ _ = process_prompt_template(
139
+ template=template,
140
+ name="template",
141
+ custom_fields=frontend_node["custom_fields"],
142
+ frontend_node_template=frontend_node["template"],
143
+ is_mustache=is_mustache,
144
+ )
145
+ except ValueError as e:
146
+ # If validation fails, don't add variables but allow component to be updated
147
+ logger.debug(f"Template validation failed in update_frontend_node: {e}")
61
148
  # Now that template is updated, we need to grab any values that were set in the current_frontend_node
62
149
  # and update the frontend_node with those values
63
150
  update_template_values(new_template=frontend_node, previous_template=current_frontend_node["template"])