lfx-nightly 0.1.12.dev41__py3-none-any.whl → 0.1.13.dev0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,54 @@
1
+ """CometAPI model constants and configuration.
2
+
3
+ This module contains the default model names available through CometAPI.
4
+ These models are used as fallbacks when the API is unavailable or when
5
+ no API key is provided.
6
+ """
7
+
8
+ from typing import Final
9
+
10
+ # CometAPI available model list based on actual API offerings
11
+ COMETAPI_MODELS: Final[list[str]] = [
12
+ # GPT series
13
+ "gpt-5-chat-latest",
14
+ "chatgpt-4o-latest",
15
+ "gpt-5-mini",
16
+ "gpt-5-nano",
17
+ "gpt-5",
18
+ "gpt-4.1-mini",
19
+ "gpt-4.1-nano",
20
+ "gpt-4.1",
21
+ "gpt-4o-mini",
22
+ "o4-mini-2025-04-16",
23
+ "o3-pro-2025-06-10",
24
+ # Claude series
25
+ "claude-sonnet-4-5-20250929",
26
+ "claude-opus-4-1-20250805",
27
+ "claude-opus-4-1-20250805-thinking",
28
+ "claude-sonnet-4-20250514",
29
+ "claude-sonnet-4-20250514-thinking",
30
+ "claude-3-7-sonnet-latest",
31
+ "claude-3-5-haiku-latest",
32
+ # Gemini series
33
+ "gemini-2.5-pro",
34
+ "gemini-2.5-flash",
35
+ "gemini-2.5-flash-lite",
36
+ "gemini-2.0-flash",
37
+ # Grok series
38
+ "grok-4-0709",
39
+ "grok-3",
40
+ "grok-3-mini",
41
+ "grok-2-image-1212",
42
+ # DeepSeek series
43
+ "deepseek-v3.1",
44
+ "deepseek-v3",
45
+ "deepseek-r1-0528",
46
+ "deepseek-chat",
47
+ "deepseek-reasoner",
48
+ # Qwen series
49
+ "qwen3-30b-a3b",
50
+ "qwen3-coder-plus-2025-07-22",
51
+ ]
52
+
53
+ # Backward compatibility alias
54
+ MODEL_NAMES: Final[list[str]] = COMETAPI_MODELS
@@ -254,14 +254,32 @@ class CugaComponent(ToolCallingAgentComponent):
254
254
  }
255
255
  logger.debug(f"LLM MODEL TYPE: {type(llm)}")
256
256
  if current_input:
257
- os.environ["DYNACONF_ADVANCED_FEATURES__REGISTRY"] = "false"
258
- if self.browser_enabled:
259
- logger.info("browser_enabled is true, setting env to hybrid")
260
- os.environ["DYNACONF_ADVANCED_FEATURES__MODE"] = "hybrid"
261
- os.environ["DYNACONF_ADVANCED_FEATURES__USE_VISION"] = "false"
262
- else:
263
- logger.info("browser_enabled is false, setting env to api")
264
- os.environ["DYNACONF_ADVANCED_FEATURES__MODE"] = "api"
257
+ try:
258
+ from cuga.config import settings as cuga_settings
259
+
260
+ logger.info("Updating cuga settings programmatically")
261
+ cuga_settings.set("advanced_features.registry", False) # noqa: FBT003
262
+
263
+ if self.browser_enabled:
264
+ logger.info("browser_enabled is true, setting mode to hybrid")
265
+ cuga_settings.set("advanced_features.mode", "hybrid")
266
+ cuga_settings.set("advanced_features.use_vision", False) # noqa: FBT003
267
+ else:
268
+ logger.info("browser_enabled is false, setting mode to api")
269
+ cuga_settings.set("advanced_features.mode", "api")
270
+
271
+ logger.info(f"Cuga settings updated - MODE: {cuga_settings.get('advanced_features.mode')}")
272
+ except (ImportError, AttributeError) as e:
273
+ logger.warning(f"Could not update cuga settings: {e}")
274
+ os.environ["DYNACONF_ADVANCED_FEATURES__REGISTRY"] = "false"
275
+ if self.browser_enabled:
276
+ logger.info("browser_enabled is true, setting env to hybrid")
277
+ os.environ["DYNACONF_ADVANCED_FEATURES__MODE"] = "hybrid"
278
+ os.environ["DYNACONF_ADVANCED_FEATURES__USE_VISION"] = "false"
279
+ else:
280
+ logger.info("browser_enabled is false, setting env to api")
281
+ os.environ["DYNACONF_ADVANCED_FEATURES__MODE"] = "api"
282
+
265
283
  from cuga.backend.activity_tracker.tracker import ActivityTracker
266
284
  from cuga.backend.cuga_graph.utils.agent_loop import StreamEvent
267
285
  from cuga.backend.cuga_graph.utils.controller import (
@@ -0,0 +1,32 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import TYPE_CHECKING, Any
4
+
5
+ from lfx.components._importing import import_mod
6
+
7
+ if TYPE_CHECKING:
8
+ from lfx.components.cometapi.cometapi import CometAPIComponent
9
+
10
+ _dynamic_imports = {
11
+ "CometAPIComponent": "cometapi",
12
+ }
13
+
14
+ __all__ = ["CometAPIComponent"]
15
+
16
+
17
+ def __getattr__(attr_name: str) -> Any:
18
+ """Lazily import cometapi components on attribute access."""
19
+ if attr_name not in _dynamic_imports:
20
+ msg = f"module '{__name__}' has no attribute '{attr_name}'"
21
+ raise AttributeError(msg)
22
+ try:
23
+ result = import_mod(attr_name, _dynamic_imports[attr_name], __spec__.parent)
24
+ except (ModuleNotFoundError, ImportError, AttributeError) as e:
25
+ msg = f"Could not import '{attr_name}' from '{__name__}': {e}"
26
+ raise AttributeError(msg) from e
27
+ globals()[attr_name] = result
28
+ return result
29
+
30
+
31
+ def __dir__() -> list[str]:
32
+ return list(__all__)
@@ -0,0 +1,166 @@
1
+ import json
2
+
3
+ import requests
4
+ from langchain_openai import ChatOpenAI
5
+ from pydantic.v1 import SecretStr
6
+ from typing_extensions import override
7
+
8
+ from lfx.base.models.cometapi_constants import MODEL_NAMES
9
+ from lfx.base.models.model import LCModelComponent
10
+ from lfx.field_typing import LanguageModel
11
+ from lfx.field_typing.range_spec import RangeSpec
12
+ from lfx.inputs.inputs import (
13
+ BoolInput,
14
+ DictInput,
15
+ DropdownInput,
16
+ IntInput,
17
+ SecretStrInput,
18
+ SliderInput,
19
+ StrInput,
20
+ )
21
+
22
+
23
+ class CometAPIComponent(LCModelComponent):
24
+ """CometAPI component for language models."""
25
+
26
+ display_name = "CometAPI"
27
+ description = "All AI Models in One API 500+ AI Models"
28
+ icon = "CometAPI"
29
+ name = "CometAPIModel"
30
+
31
+ inputs = [
32
+ *LCModelComponent.get_base_inputs(),
33
+ SecretStrInput(
34
+ name="api_key",
35
+ display_name="CometAPI Key",
36
+ required=True,
37
+ info="Your CometAPI key",
38
+ real_time_refresh=True,
39
+ ),
40
+ StrInput(
41
+ name="app_name",
42
+ display_name="App Name",
43
+ info="Your app name for CometAPI rankings",
44
+ advanced=True,
45
+ ),
46
+ DropdownInput(
47
+ name="model_name",
48
+ display_name="Model",
49
+ info="The model to use for chat completion",
50
+ options=["Select a model"],
51
+ value="Select a model",
52
+ real_time_refresh=True,
53
+ required=True,
54
+ ),
55
+ DictInput(
56
+ name="model_kwargs",
57
+ display_name="Model Kwargs",
58
+ info="Additional keyword arguments to pass to the model.",
59
+ advanced=True,
60
+ ),
61
+ SliderInput(
62
+ name="temperature",
63
+ display_name="Temperature",
64
+ value=0.7,
65
+ range_spec=RangeSpec(min=0, max=2, step=0.01),
66
+ info="Controls randomness. Lower values are more deterministic, higher values are more creative.",
67
+ advanced=True,
68
+ ),
69
+ IntInput(
70
+ name="max_tokens",
71
+ display_name="Max Tokens",
72
+ info="Maximum number of tokens to generate",
73
+ advanced=True,
74
+ ),
75
+ IntInput(
76
+ name="seed",
77
+ display_name="Seed",
78
+ info="Seed for reproducible outputs.",
79
+ value=1,
80
+ advanced=True,
81
+ ),
82
+ BoolInput(
83
+ name="json_mode",
84
+ display_name="JSON Mode",
85
+ info="If enabled, the model will be asked to return a JSON object.",
86
+ advanced=True,
87
+ ),
88
+ ]
89
+
90
+ def get_models(self, token_override: str | None = None) -> list[str]:
91
+ base_url = "https://api.cometapi.com/v1"
92
+ url = f"{base_url}/models"
93
+
94
+ headers = {"Content-Type": "application/json"}
95
+ # Add Bearer Authorization when API key is available
96
+ api_key_source = token_override if token_override else getattr(self, "api_key", None)
97
+ if api_key_source:
98
+ token = api_key_source.get_secret_value() if isinstance(api_key_source, SecretStr) else str(api_key_source)
99
+ headers["Authorization"] = f"Bearer {token}"
100
+
101
+ try:
102
+ response = requests.get(url, headers=headers, timeout=10)
103
+ response.raise_for_status()
104
+ # Safely parse JSON; fallback to defaults on failure
105
+ try:
106
+ model_list = response.json()
107
+ except (json.JSONDecodeError, ValueError) as e:
108
+ self.status = f"Error decoding models response: {e}"
109
+ return MODEL_NAMES
110
+ return [model["id"] for model in model_list.get("data", [])]
111
+ except requests.RequestException as e:
112
+ self.status = f"Error fetching models: {e}"
113
+ return MODEL_NAMES
114
+
115
+ @override
116
+ def update_build_config(self, build_config: dict, field_value: str, field_name: str | None = None):
117
+ if field_name == "api_key":
118
+ models = self.get_models(field_value)
119
+ model_cfg = build_config.get("model_name", {})
120
+ # Preserve placeholder (fallback to existing value or a generic prompt)
121
+ placeholder = model_cfg.get("placeholder", model_cfg.get("value", "Select a model"))
122
+ current_value = model_cfg.get("value")
123
+
124
+ options = list(models) if models else []
125
+ # Ensure current value stays visible even if not present in fetched options
126
+ if current_value and current_value not in options:
127
+ options = [current_value, *options]
128
+
129
+ model_cfg["options"] = options
130
+ model_cfg["placeholder"] = placeholder
131
+ build_config["model_name"] = model_cfg
132
+ return build_config
133
+
134
+ def build_model(self) -> LanguageModel: # type: ignore[type-var]
135
+ api_key = self.api_key
136
+ temperature = self.temperature
137
+ model_name: str = self.model_name
138
+ max_tokens = self.max_tokens
139
+ model_kwargs = getattr(self, "model_kwargs", {}) or {}
140
+ json_mode = self.json_mode
141
+ seed = self.seed
142
+ # Ensure a valid model was selected
143
+ if not model_name or model_name == "Select a model":
144
+ msg = "Please select a valid CometAPI model."
145
+ raise ValueError(msg)
146
+ try:
147
+ # Extract raw API key safely
148
+ _api_key = api_key.get_secret_value() if isinstance(api_key, SecretStr) else api_key
149
+ output = ChatOpenAI(
150
+ model=model_name,
151
+ api_key=_api_key or None,
152
+ max_tokens=max_tokens or None,
153
+ temperature=temperature,
154
+ model_kwargs=model_kwargs,
155
+ streaming=bool(self.stream),
156
+ seed=seed,
157
+ base_url="https://api.cometapi.com/v1",
158
+ )
159
+ except (TypeError, ValueError) as e:
160
+ msg = "Could not connect to CometAPI."
161
+ raise ValueError(msg) from e
162
+
163
+ if json_mode:
164
+ output = output.bind(response_format={"type": "json_object"})
165
+
166
+ return output
@@ -19,10 +19,10 @@ class MemoryComponent(Component):
19
19
  documentation: str = "https://docs.langflow.org/components-helpers#message-history"
20
20
  icon = "message-square-more"
21
21
  name = "Memory"
22
- default_keys = ["mode", "memory", "session_id"]
22
+ default_keys = ["mode", "memory", "session_id", "context_id"]
23
23
  mode_config = {
24
- "Store": ["message", "memory", "sender", "sender_name", "session_id"],
25
- "Retrieve": ["n_messages", "order", "template", "memory", "session_id"],
24
+ "Store": ["message", "memory", "sender", "sender_name", "session_id", "context_id"],
25
+ "Retrieve": ["n_messages", "order", "template", "memory", "session_id", "context_id"],
26
26
  }
27
27
 
28
28
  inputs = [
@@ -86,6 +86,13 @@ class MemoryComponent(Component):
86
86
  value="",
87
87
  advanced=True,
88
88
  ),
89
+ MessageTextInput(
90
+ name="context_id",
91
+ display_name="Context ID",
92
+ info="The context ID of the chat. Adds an extra layer to the local memory.",
93
+ value="",
94
+ advanced=True,
95
+ ),
89
96
  DropdownInput(
90
97
  name="order",
91
98
  display_name="Order",
@@ -141,6 +148,7 @@ class MemoryComponent(Component):
141
148
  async def store_message(self) -> Message:
142
149
  message = Message(text=self.message) if isinstance(self.message, str) else self.message
143
150
 
151
+ message.context_id = self.context_id or message.context_id
144
152
  message.session_id = self.session_id or message.session_id
145
153
  message.sender = self.sender or message.sender or MESSAGE_SENDER_AI
146
154
  message.sender_name = self.sender_name or message.sender_name or MESSAGE_SENDER_NAME_AI
@@ -148,6 +156,7 @@ class MemoryComponent(Component):
148
156
  stored_messages: list[Message] = []
149
157
 
150
158
  if self.memory:
159
+ self.memory.context_id = message.context_id
151
160
  self.memory.session_id = message.session_id
152
161
  lc_message = message.to_lc_message()
153
162
  await self.memory.aadd_messages([lc_message])
@@ -162,7 +171,10 @@ class MemoryComponent(Component):
162
171
  await astore_message(message, flow_id=self.graph.flow_id)
163
172
  stored_messages = (
164
173
  await aget_messages(
165
- session_id=message.session_id, sender_name=message.sender_name, sender=message.sender
174
+ session_id=message.session_id,
175
+ context_id=message.context_id,
176
+ sender_name=message.sender_name,
177
+ sender=message.sender,
166
178
  )
167
179
  or []
168
180
  )
@@ -179,6 +191,7 @@ class MemoryComponent(Component):
179
191
  sender_type = self.sender_type
180
192
  sender_name = self.sender_name
181
193
  session_id = self.session_id
194
+ context_id = self.context_id
182
195
  n_messages = self.n_messages
183
196
  order = "DESC" if self.order == "Descending" else "ASC"
184
197
 
@@ -195,6 +208,7 @@ class MemoryComponent(Component):
195
208
  elif self.memory:
196
209
  # override session_id
197
210
  self.memory.session_id = session_id
211
+ self.memory.context_id = context_id
198
212
 
199
213
  stored = await self.memory.aget_messages()
200
214
  # langchain memories are supposed to return messages in ascending order
@@ -215,6 +229,7 @@ class MemoryComponent(Component):
215
229
  sender=sender_type,
216
230
  sender_name=sender_name,
217
231
  session_id=session_id,
232
+ context_id=context_id,
218
233
  limit=10000,
219
234
  order=order,
220
235
  )
lfx/memory/stubs.py CHANGED
@@ -165,6 +165,7 @@ async def aget_messages(
165
165
  sender: str | None = None, # noqa: ARG001
166
166
  sender_name: str | None = None, # noqa: ARG001
167
167
  session_id: str | UUID | None = None, # noqa: ARG001
168
+ context_id: str | UUID | None = None, # noqa: ARG001
168
169
  order_by: str | None = "timestamp", # noqa: ARG001
169
170
  order: str | None = "DESC", # noqa: ARG001
170
171
  flow_id: UUID | None = None, # noqa: ARG001
@@ -176,6 +177,7 @@ async def aget_messages(
176
177
  sender (Optional[str]): The sender of the messages (e.g., "Machine" or "User")
177
178
  sender_name (Optional[str]): The name of the sender.
178
179
  session_id (Optional[str]): The session ID associated with the messages.
180
+ context_id (Optional[str]): The context ID associated with the messages.
179
181
  order_by (Optional[str]): The field to order the messages by. Defaults to "timestamp".
180
182
  order (Optional[str]): The order in which to retrieve the messages. Defaults to "DESC".
181
183
  flow_id (Optional[UUID]): The flow ID associated with the messages.
@@ -200,6 +202,7 @@ def get_messages(
200
202
  sender: str | None = None,
201
203
  sender_name: str | None = None,
202
204
  session_id: str | UUID | None = None,
205
+ context_id: str | UUID | None = None,
203
206
  order_by: str | None = "timestamp",
204
207
  order: str | None = "DESC",
205
208
  flow_id: UUID | None = None,
@@ -209,33 +212,49 @@ def get_messages(
209
212
 
210
213
  DEPRECATED: Use `aget_messages` instead.
211
214
  """
212
- return run_until_complete(aget_messages(sender, sender_name, session_id, order_by, order, flow_id, limit))
215
+ return run_until_complete(
216
+ aget_messages(
217
+ sender,
218
+ sender_name,
219
+ session_id,
220
+ context_id,
221
+ order_by,
222
+ order,
223
+ flow_id,
224
+ limit,
225
+ )
226
+ )
213
227
 
214
228
 
215
- async def adelete_messages(session_id: str) -> None:
216
- """Delete messages from the memory based on the provided session ID.
229
+ async def adelete_messages(session_id: str | None = None, context_id: str | None = None) -> None:
230
+ """Delete messages from the memory based on the provided session or context ID.
217
231
 
218
232
  Args:
219
233
  session_id (str): The session ID associated with the messages to delete.
234
+ context_id (str): The context ID associated with the messages to delete.
220
235
  """
236
+ if not session_id and not context_id:
237
+ msg = "Either session_id or context_id must be provided to delete messages."
238
+ raise ValueError(msg)
239
+
221
240
  async with session_scope() as session:
222
241
  try:
223
242
  # In a real implementation, this would delete from database
224
243
  # For now, this is a no-op since we're using NoopSession
225
- await session.delete(session_id)
244
+ await session.delete(session_id or context_id) # type: ignore # noqa: PGH003
226
245
  await session.commit()
227
- logger.debug(f"Messages deleted for session: {session_id}")
246
+ logger.debug(f"Messages deleted for session: {session_id or context_id}")
228
247
  except Exception as e:
229
248
  logger.exception(f"Error deleting messages: {e}")
230
249
  raise
231
250
 
232
251
 
233
- def delete_messages(session_id: str) -> None:
252
+ def delete_messages(session_id: str | None = None, context_id: str | None = None) -> None:
234
253
  """DEPRECATED - Delete messages based on the provided session ID.
235
254
 
236
255
  DEPRECATED: Use `adelete_messages` instead.
237
256
  """
238
- return run_until_complete(adelete_messages(session_id))
257
+ return run_until_complete(adelete_messages(session_id, context_id))
239
258
 
240
259
 
241
260
  async def aadd_messages(messages: Message | list[Message]) -> list[Message]:
lfx/schema/message.py CHANGED
@@ -40,6 +40,7 @@ class Message(Data):
40
40
  sender_name: str | None = None
41
41
  files: list[str | Image] | None = Field(default=[])
42
42
  session_id: str | UUID | None = Field(default="")
43
+ context_id: str | UUID | None = Field(default="")
43
44
  timestamp: Annotated[str, timestamp_to_str_validator] = Field(
44
45
  default_factory=lambda: datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S %Z")
45
46
  )
@@ -188,6 +189,7 @@ class Message(Data):
188
189
  sender_name=data.sender_name,
189
190
  files=data.files,
190
191
  session_id=data.session_id,
192
+ context_id=data.context_id,
191
193
  timestamp=data.timestamp,
192
194
  flow_id=data.flow_id,
193
195
  error=data.error,
@@ -326,6 +328,7 @@ class MessageResponse(DefaultModel):
326
328
  sender: str
327
329
  sender_name: str
328
330
  session_id: str
331
+ context_id: str | None = None
329
332
  text: str
330
333
  files: list[str] = []
331
334
  edit: bool
@@ -383,6 +386,7 @@ class MessageResponse(DefaultModel):
383
386
  sender_name=message.sender_name,
384
387
  text=message.text,
385
388
  session_id=message.session_id,
389
+ context_id=message.context_id,
386
390
  files=message.files or [],
387
391
  timestamp=message.timestamp,
388
392
  flow_id=flow_id,
@@ -433,6 +437,7 @@ class ErrorMessage(Message):
433
437
  self,
434
438
  exception: BaseException,
435
439
  session_id: str | None = None,
440
+ context_id: str | None = None,
436
441
  source: Source | None = None,
437
442
  trace_name: str | None = None,
438
443
  flow_id: UUID | str | None = None,
@@ -451,6 +456,7 @@ class ErrorMessage(Message):
451
456
 
452
457
  super().__init__(
453
458
  session_id=session_id,
459
+ context_id=context_id,
454
460
  sender=source.display_name if source else None,
455
461
  sender_name=source.display_name if source else None,
456
462
  text=plain_reason,
@@ -31,4 +31,5 @@ VARIABLES_TO_GET_FROM_ENVIRONMENT = [
31
31
  "AWS_SECRET_ACCESS_KEY",
32
32
  "NOVITA_API_KEY",
33
33
  "TAVILY_API_KEY",
34
+ "COMETAPI_KEY",
34
35
  ]
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: lfx-nightly
3
- Version: 0.1.12.dev41
3
+ Version: 0.1.13.dev0
4
4
  Summary: Langflow Executor - A lightweight CLI tool for executing and serving Langflow AI flows
5
5
  Author-email: Gabriel Luiz Freitas Almeida <gabriel@langflow.org>
6
6
  Requires-Python: <3.14,>=3.10
@@ -4,7 +4,7 @@ lfx/constants.py,sha256=Ert_SpwXhutgcTKEvtDArtkONXgyE5x68opMoQfukMA,203
4
4
  lfx/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
5
5
  lfx/settings.py,sha256=wnx4zkOLQ8mvampYsnnvVV9GvEnRUuWQpKFSbFTCIp4,181
6
6
  lfx/type_extraction.py,sha256=eCZNl9nAQivKdaPv_9BK71N0JV9Rtr--veAht0dnQ4A,2921
7
- lfx/_assets/component_index.json,sha256=y4N7fkmU7ashl8Xwy7vz8Eb68MdF6r0rWSFLy2hNiTs,3894751
7
+ lfx/_assets/component_index.json,sha256=CbTH5uOq4p9-EKQLfGluol1YlRDIETIfxi374uvyalw,3910724
8
8
  lfx/base/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
9
9
  lfx/base/constants.py,sha256=v9vo0Ifg8RxDu__XqgGzIXHlsnUFyWM-SSux0uHHoz8,1187
10
10
  lfx/base/agents/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -62,6 +62,7 @@ lfx/base/models/aiml_constants.py,sha256=7r_wG6MklHqPJgerDgXXrfL6cW4lJavXjyDaVpn
62
62
  lfx/base/models/anthropic_constants.py,sha256=51_fghjdfBRyLSNj3qa-ogyWmeP518HrdsTnV-C-j-Y,2751
63
63
  lfx/base/models/aws_constants.py,sha256=-Fa7T3wJqBaZhs80ATRgZP6yZ0Nsd1YYdZv9SfqT-Hs,6327
64
64
  lfx/base/models/chat_result.py,sha256=-MypS6_GKXOqWevtk0xwtrsEO4mIgpPAt7-EML5n0vA,2756
65
+ lfx/base/models/cometapi_constants.py,sha256=VDqpyBAXbI79N7YAacK9AGVI7sK7ojVzvxJY-3by-uw,1353
65
66
  lfx/base/models/google_generative_ai_constants.py,sha256=EuFd77ZrrSr6YtSKtmEaq0Nfa4y45AbDe_cz_18nReE,2564
66
67
  lfx/base/models/google_generative_ai_model.py,sha256=wEIyBkTZcZD3akUiAKTGxazTJnOQeh80WHMKiHdK1wo,1839
67
68
  lfx/base/models/groq_constants.py,sha256=WOMpYRwJVrZavsi7zGJwRHJX8ZBvdtILUOmBFv0QIPQ,5536
@@ -113,7 +114,7 @@ lfx/components/agentql/__init__.py,sha256=Erl669Dzsk-SegsDPWTtkKbprMXVuv8UTCo5RE
113
114
  lfx/components/agentql/agentql_api.py,sha256=N94yEK7ZuQCIsFBlr_8dqrJY-K1-KNb6QEEYfDIsDME,5569
114
115
  lfx/components/agents/__init__.py,sha256=UBu5kO9hp8yFyxTU-u9KHN9zTSoHhJSYdKtRuT5ig9c,1164
115
116
  lfx/components/agents/agent.py,sha256=cv8CqEvLKpTsR9YAg09rqjxEXbW0_GzW8oUxeWc4pHY,26681
116
- lfx/components/agents/cuga_agent.py,sha256=ynMAF96ANB9qxGck92urkFmqmAeJRUgc6dKulpuaeTM,43518
117
+ lfx/components/agents/cuga_agent.py,sha256=r_342LveLdERtvE84UJJOdKTEwWSIjvASRYke9JV2Ik,44494
117
118
  lfx/components/agents/mcp_component.py,sha256=mE2HvbHcdkuWWylxmaNNZobbtgBRktOOakeGwUYs7Qs,25586
118
119
  lfx/components/aiml/__init__.py,sha256=DNKB-HMFGFYmsdkON-s8557ttgBXVXADmS-BcuSQiIQ,1087
119
120
  lfx/components/aiml/aiml.py,sha256=23Ineg1ajlCoqXgWgp50I20OnQbaleRNsw1c6IzPu3A,3877
@@ -161,6 +162,8 @@ lfx/components/cohere/__init__.py,sha256=MSTeplsNIXTVm_dUcJETy6YGb-fw7-dplC9jzAo
161
162
  lfx/components/cohere/cohere_embeddings.py,sha256=nA9BOixk534yJZymJaukBrQYBj_uB2nyYvzJPd_3aUc,3083
162
163
  lfx/components/cohere/cohere_models.py,sha256=WUhS4dcG8FBcJm2dCfhiDuaxZX8S1lICMI_Mmd6kflo,1563
163
164
  lfx/components/cohere/cohere_rerank.py,sha256=qUoNEe6sjUnvkTHkCzwayBuLDoH957BBEgb-Qu_k9Yk,1554
165
+ lfx/components/cometapi/__init__.py,sha256=2wKkp6iJyQhCIe5wi5amkbAZZNdrH11REcJqWnptd-k,936
166
+ lfx/components/cometapi/cometapi.py,sha256=TsSohS6cx8XuPNYq85XUOm85Q4LQn3FMd5qipRBZlKk,6028
164
167
  lfx/components/composio/__init__.py,sha256=Ycwax7TUDTez6O9Q9cv0I9GT0eB1hquDIOMJo51R_64,7996
165
168
  lfx/components/composio/agentql_composio.py,sha256=zKcIoQq2WmY_if3b7e6N0S5Z-k1aDAoQSoeKePiRIwI,352
166
169
  lfx/components/composio/agiled_composio.py,sha256=MpSpUaGo0t2Lu-KzHpv4NT0LZNbvLwkdZ3gJ0gf9cdk,347
@@ -322,7 +325,7 @@ lfx/components/helpers/calculator_core.py,sha256=X8-ia-d91DDFnTgomG-CvReheMve_y0
322
325
  lfx/components/helpers/create_list.py,sha256=nsdw0DMQ6ZLyvJ0mQasB0ACkYE6I8avCbXCIv34Ba14,1146
323
326
  lfx/components/helpers/current_date.py,sha256=hznwtkoFTMy-HpHWEAC6FdVlf52oaFXCYLFh_5ud13o,1561
324
327
  lfx/components/helpers/id_generator.py,sha256=zduLTtvDX9WfHISGhSvY5sCTGfqomIVe5gu6KGQ_q9k,1203
325
- lfx/components/helpers/memory.py,sha256=79EfrCQWyroMaNcQN3kuNSDBKGKLq4FmwhrgmaHgDG4,9756
328
+ lfx/components/helpers/memory.py,sha256=In4FO0cEJG-xqqR0nVHXGnyxz5LGYAulUuXPSvHilVI,10382
326
329
  lfx/components/helpers/output_parser.py,sha256=m-tio-j7M2Ipjmgb37wy5JPIQBROTxku0QaHLAs7vUY,1574
327
330
  lfx/components/helpers/store_message.py,sha256=mtGqCLWXuB2RnHufqj1FbiGAnTQOURSZMCvvo3gNLtc,3489
328
331
  lfx/components/homeassistant/__init__.py,sha256=qTAvZrtw8Mf4F_9ZjHBTc1pAB-Pu5sLaUB46iR24E_c,195
@@ -654,7 +657,7 @@ lfx/log/logger.py,sha256=UaUlWEwws7SVa24_9ZuPwRgefoatzRV7nnZV7YQZjwU,14238
654
657
  lfx/logging/__init__.py,sha256=X5tXF5e1hc62adprRPLtKeaqm8-tpl6loXsxbh9IO-Q,367
655
658
  lfx/logging/logger.py,sha256=y7ophyWX5-r8RCxHJeAmGKyGeEhR-7imR-D8YBXU7CE,546
656
659
  lfx/memory/__init__.py,sha256=s7nCNKlcwLfT6Z_cXbiYjvoXQXZ-H2GqK1qsAuKBV08,1815
657
- lfx/memory/stubs.py,sha256=kR6TRI2t6rPvA5Pja5XPC4yvKRBFBuJfdI0hJL8vfwU,9924
660
+ lfx/memory/stubs.py,sha256=QMWBqJ_2WPiRWAkVi5gH6p5BqUD8VEiqXi96EZPSfvI,10650
658
661
  lfx/processing/__init__.py,sha256=jERZg6it9mhOzrbTAt9YtakSNXPSjUXFh5MfKBN48wA,41
659
662
  lfx/processing/process.py,sha256=FSYjseEWEgfBxP4GDkfRVVSyrvXwyIb7U0pTVc1gV_w,9252
660
663
  lfx/processing/utils.py,sha256=ptX2AHbhoPkmZ5O7BXITCo58jBZ_u1OuK59VlQRQsYU,754
@@ -671,7 +674,7 @@ lfx/schema/graph.py,sha256=o7qXhHZT4lEwjJZtlg4k9SNPgmMVZsZsclBbe8v_y6Y,1313
671
674
  lfx/schema/image.py,sha256=WdaOT3bjkJaG28RpgmurtfcnOG7Hr2phZ27YXH25uHA,5970
672
675
  lfx/schema/json_schema.py,sha256=UzMRSSAiLewJpf7B0XY4jPnPt0iskf61QUBxPdyiYys,6871
673
676
  lfx/schema/log.py,sha256=TISQa44D4pL_-AOw9p0nOPV-7s6Phl-0yrpuZihhEsU,1981
674
- lfx/schema/message.py,sha256=mHHTX9OCHCGpA4goniQXF7I2UqEOy744ZFS4LMzNrYk,18261
677
+ lfx/schema/message.py,sha256=U4vtgkC6lNciJbfwtrIquyB3-UdPieHAjuegGk8416E,18506
675
678
  lfx/schema/openai_responses_schemas.py,sha256=drMCAlliefHfGRojBTMepPwk4DyEGh67naWvMPD10Sw,2596
676
679
  lfx/schema/properties.py,sha256=ZRY6FUDfqpc5wQ-bi-ZuUUrusF9t-pt9fQa_FNPpia0,1356
677
680
  lfx/schema/schema.py,sha256=XbIuvD64EdVljP1V32tsEL-ETXOQSFipMDaiMGzYttM,5079
@@ -703,7 +706,7 @@ lfx/services/mcp_composer/service.py,sha256=TdQoQ1VV_aATRGCYNm9MZRj_WEb45LLP4ACu
703
706
  lfx/services/settings/__init__.py,sha256=UISBvOQIqoA3a8opwJrTQp4PSTqpReY6GQ_7O6WuqJQ,65
704
707
  lfx/services/settings/auth.py,sha256=_18KZipq0udCJPq-4xCD_juhqSwAEvoCqxOTSYsNv5w,5720
705
708
  lfx/services/settings/base.py,sha256=Cohox-JLOcrGXr1-hvJXtm8K1I3Fw9PH9JufuT5u4rA,27874
706
- lfx/services/settings/constants.py,sha256=ZBJolZ4kx0ZoYp2BDyHkgDFgaXEQAH-ZcLqgunv_MqQ,908
709
+ lfx/services/settings/constants.py,sha256=QC3FwiNVBNzwHCFRkjrNDAaTT0gRIbYJsVZyAJAjyFk,928
707
710
  lfx/services/settings/factory.py,sha256=NezZ6TE_xP955B9l9pI6ONNyoylrHPfUZN8arvLVRXg,615
708
711
  lfx/services/settings/feature_flags.py,sha256=HGuDGgfOBIDtuEiEVTgoWHxKqX2vuVBRgsqdX_4D9kg,205
709
712
  lfx/services/settings/service.py,sha256=af2L45QAfp2YWh5T59FJfGhw1wF_bVniNRRKeFwy2Xs,1001
@@ -746,7 +749,7 @@ lfx/utils/schemas.py,sha256=NbOtVQBrn4d0BAu-0H_eCTZI2CXkKZlRY37XCSmuJwc,3865
746
749
  lfx/utils/util.py,sha256=Ww85wbr1-vjh2pXVtmTqoUVr6MXAW8S7eDx_Ys6HpE8,20696
747
750
  lfx/utils/util_strings.py,sha256=nU_IcdphNaj6bAPbjeL-c1cInQPfTBit8mp5Y57lwQk,1686
748
751
  lfx/utils/version.py,sha256=cHpbO0OJD2JQAvVaTH_6ibYeFbHJV0QDHs_YXXZ-bT8,671
749
- lfx_nightly-0.1.12.dev41.dist-info/METADATA,sha256=3os2O6_b3b1S4rRuNOmExc3S6GRCHg3VSaIiUBwS6Xs,8290
750
- lfx_nightly-0.1.12.dev41.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
751
- lfx_nightly-0.1.12.dev41.dist-info/entry_points.txt,sha256=1724p3RHDQRT2CKx_QRzEIa7sFuSVO0Ux70YfXfoMT4,42
752
- lfx_nightly-0.1.12.dev41.dist-info/RECORD,,
752
+ lfx_nightly-0.1.13.dev0.dist-info/METADATA,sha256=GpRyvjKkhriS_eIdZ8qp5c--og7GrrXlWDIddu9ajnI,8289
753
+ lfx_nightly-0.1.13.dev0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
754
+ lfx_nightly-0.1.13.dev0.dist-info/entry_points.txt,sha256=1724p3RHDQRT2CKx_QRzEIa7sFuSVO0Ux70YfXfoMT4,42
755
+ lfx_nightly-0.1.13.dev0.dist-info/RECORD,,