letta-nightly 0.13.0.dev20251030104218__py3-none-any.whl → 0.13.1.dev20251031234110__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of letta-nightly might be problematic. Click here for more details.

Files changed (101) hide show
  1. letta/__init__.py +1 -1
  2. letta/adapters/simple_llm_stream_adapter.py +1 -0
  3. letta/agents/letta_agent_v2.py +8 -0
  4. letta/agents/letta_agent_v3.py +120 -27
  5. letta/agents/temporal/activities/__init__.py +25 -0
  6. letta/agents/temporal/activities/create_messages.py +26 -0
  7. letta/agents/temporal/activities/create_step.py +57 -0
  8. letta/agents/temporal/activities/example_activity.py +9 -0
  9. letta/agents/temporal/activities/execute_tool.py +130 -0
  10. letta/agents/temporal/activities/llm_request.py +114 -0
  11. letta/agents/temporal/activities/prepare_messages.py +27 -0
  12. letta/agents/temporal/activities/refresh_context.py +160 -0
  13. letta/agents/temporal/activities/summarize_conversation_history.py +77 -0
  14. letta/agents/temporal/activities/update_message_ids.py +25 -0
  15. letta/agents/temporal/activities/update_run.py +43 -0
  16. letta/agents/temporal/constants.py +59 -0
  17. letta/agents/temporal/temporal_agent_workflow.py +704 -0
  18. letta/agents/temporal/types.py +275 -0
  19. letta/constants.py +8 -0
  20. letta/errors.py +4 -0
  21. letta/functions/function_sets/base.py +0 -11
  22. letta/groups/helpers.py +7 -1
  23. letta/groups/sleeptime_multi_agent_v4.py +4 -3
  24. letta/interfaces/anthropic_streaming_interface.py +0 -1
  25. letta/interfaces/openai_streaming_interface.py +103 -100
  26. letta/llm_api/anthropic_client.py +57 -12
  27. letta/llm_api/bedrock_client.py +1 -0
  28. letta/llm_api/deepseek_client.py +3 -2
  29. letta/llm_api/google_vertex_client.py +1 -0
  30. letta/llm_api/groq_client.py +1 -0
  31. letta/llm_api/llm_client_base.py +15 -1
  32. letta/llm_api/openai.py +2 -2
  33. letta/llm_api/openai_client.py +17 -3
  34. letta/llm_api/xai_client.py +1 -0
  35. letta/orm/organization.py +4 -0
  36. letta/orm/sqlalchemy_base.py +7 -0
  37. letta/otel/tracing.py +131 -4
  38. letta/schemas/agent_file.py +10 -10
  39. letta/schemas/block.py +22 -3
  40. letta/schemas/enums.py +21 -0
  41. letta/schemas/environment_variables.py +3 -2
  42. letta/schemas/group.py +3 -3
  43. letta/schemas/letta_response.py +36 -4
  44. letta/schemas/llm_batch_job.py +3 -3
  45. letta/schemas/llm_config.py +27 -3
  46. letta/schemas/mcp.py +3 -2
  47. letta/schemas/mcp_server.py +3 -2
  48. letta/schemas/message.py +167 -49
  49. letta/schemas/organization.py +2 -1
  50. letta/schemas/passage.py +2 -1
  51. letta/schemas/provider_trace.py +2 -1
  52. letta/schemas/providers/openrouter.py +1 -2
  53. letta/schemas/run_metrics.py +2 -1
  54. letta/schemas/sandbox_config.py +3 -1
  55. letta/schemas/step_metrics.py +2 -1
  56. letta/schemas/tool_rule.py +2 -2
  57. letta/schemas/user.py +2 -1
  58. letta/server/rest_api/app.py +5 -1
  59. letta/server/rest_api/routers/v1/__init__.py +4 -0
  60. letta/server/rest_api/routers/v1/agents.py +71 -9
  61. letta/server/rest_api/routers/v1/blocks.py +7 -7
  62. letta/server/rest_api/routers/v1/groups.py +40 -0
  63. letta/server/rest_api/routers/v1/identities.py +2 -2
  64. letta/server/rest_api/routers/v1/internal_agents.py +31 -0
  65. letta/server/rest_api/routers/v1/internal_blocks.py +177 -0
  66. letta/server/rest_api/routers/v1/internal_runs.py +25 -1
  67. letta/server/rest_api/routers/v1/runs.py +2 -22
  68. letta/server/rest_api/routers/v1/tools.py +10 -0
  69. letta/server/server.py +5 -2
  70. letta/services/agent_manager.py +4 -4
  71. letta/services/archive_manager.py +16 -0
  72. letta/services/group_manager.py +44 -0
  73. letta/services/helpers/run_manager_helper.py +2 -2
  74. letta/services/lettuce/lettuce_client.py +148 -0
  75. letta/services/mcp/base_client.py +9 -3
  76. letta/services/run_manager.py +148 -37
  77. letta/services/source_manager.py +91 -3
  78. letta/services/step_manager.py +2 -3
  79. letta/services/streaming_service.py +52 -13
  80. letta/services/summarizer/summarizer.py +28 -2
  81. letta/services/tool_executor/builtin_tool_executor.py +1 -1
  82. letta/services/tool_executor/core_tool_executor.py +2 -117
  83. letta/services/tool_schema_generator.py +2 -2
  84. letta/validators.py +21 -0
  85. {letta_nightly-0.13.0.dev20251030104218.dist-info → letta_nightly-0.13.1.dev20251031234110.dist-info}/METADATA +1 -1
  86. {letta_nightly-0.13.0.dev20251030104218.dist-info → letta_nightly-0.13.1.dev20251031234110.dist-info}/RECORD +89 -84
  87. letta/agent.py +0 -1758
  88. letta/cli/cli_load.py +0 -16
  89. letta/client/__init__.py +0 -0
  90. letta/client/streaming.py +0 -95
  91. letta/client/utils.py +0 -78
  92. letta/functions/async_composio_toolset.py +0 -109
  93. letta/functions/composio_helpers.py +0 -96
  94. letta/helpers/composio_helpers.py +0 -38
  95. letta/orm/job_messages.py +0 -33
  96. letta/schemas/providers.py +0 -1617
  97. letta/server/rest_api/routers/openai/chat_completions/chat_completions.py +0 -132
  98. letta/services/tool_executor/composio_tool_executor.py +0 -57
  99. {letta_nightly-0.13.0.dev20251030104218.dist-info → letta_nightly-0.13.1.dev20251031234110.dist-info}/WHEEL +0 -0
  100. {letta_nightly-0.13.0.dev20251030104218.dist-info → letta_nightly-0.13.1.dev20251031234110.dist-info}/entry_points.txt +0 -0
  101. {letta_nightly-0.13.0.dev20251030104218.dist-info → letta_nightly-0.13.1.dev20251031234110.dist-info}/licenses/LICENSE +0 -0
@@ -1,3 +1,4 @@
1
+ from datetime import datetime
1
2
  from typing import List, Literal, Optional
2
3
 
3
4
  from fastapi import APIRouter, Depends, Query
@@ -55,13 +56,25 @@ async def list_runs(
55
56
  order: Literal["asc", "desc"] = Query(
56
57
  "desc", description="Sort order for runs by creation time. 'asc' for oldest first, 'desc' for newest first"
57
58
  ),
58
- order_by: Literal["created_at"] = Query("created_at", description="Field to sort by"),
59
+ order_by: Literal["created_at", "duration"] = Query("created_at", description="Field to sort by"),
59
60
  active: bool = Query(False, description="Filter for active runs."),
60
61
  ascending: bool = Query(
61
62
  False,
62
63
  description="Whether to sort agents oldest to newest (True) or newest to oldest (False, default). Deprecated in favor of order field.",
63
64
  deprecated=True,
64
65
  ),
66
+ project_id: Optional[str] = Query(None, description="Filter runs by project ID."),
67
+ duration_percentile: Optional[int] = Query(
68
+ None, description="Filter runs by duration percentile (1-100). Returns runs slower than this percentile."
69
+ ),
70
+ duration_value: Optional[int] = Query(
71
+ None, description="Duration value in nanoseconds for filtering. Must be used with duration_operator."
72
+ ),
73
+ duration_operator: Optional[Literal["gt", "lt", "eq"]] = Query(
74
+ None, description="Comparison operator for duration filter: 'gt' (greater than), 'lt' (less than), 'eq' (equals)."
75
+ ),
76
+ start_date: Optional[datetime] = Query(None, description="Filter runs created on or after this date (ISO 8601 format)."),
77
+ end_date: Optional[datetime] = Query(None, description="Filter runs created on or before this date (ISO 8601 format)."),
65
78
  headers: HeaderParams = Depends(get_headers),
66
79
  ):
67
80
  """
@@ -89,6 +102,11 @@ async def list_runs(
89
102
  # Convert string statuses to RunStatus enum
90
103
  parsed_statuses = convert_statuses_to_enum(statuses)
91
104
 
105
+ # Create duration filter dict if both parameters provided
106
+ duration_filter = None
107
+ if duration_value is not None and duration_operator is not None:
108
+ duration_filter = {"value": duration_value, "operator": duration_operator}
109
+
92
110
  runs = await runs_manager.list_runs(
93
111
  actor=actor,
94
112
  agent_ids=agent_ids,
@@ -103,5 +121,11 @@ async def list_runs(
103
121
  step_count=step_count,
104
122
  step_count_operator=step_count_operator,
105
123
  tools_used=tools_used,
124
+ project_id=project_id,
125
+ order_by=order_by,
126
+ duration_percentile=duration_percentile,
127
+ duration_filter=duration_filter,
128
+ start_date=start_date,
129
+ end_date=end_date,
106
130
  )
107
131
  return runs
@@ -23,7 +23,6 @@ from letta.server.rest_api.streaming_response import (
23
23
  cancellation_aware_stream_wrapper,
24
24
  )
25
25
  from letta.server.server import SyncServer
26
- from letta.services.lettuce import LettuceClient
27
26
  from letta.services.run_manager import RunManager
28
27
  from letta.settings import settings
29
28
 
@@ -150,26 +149,7 @@ async def retrieve_run(
150
149
  """
151
150
  actor = await server.user_manager.get_actor_or_default_async(actor_id=headers.actor_id)
152
151
  runs_manager = RunManager()
153
-
154
- run = await runs_manager.get_run_by_id(run_id=run_id, actor=actor)
155
-
156
- use_lettuce = run.metadata and run.metadata.get("lettuce")
157
- if use_lettuce and run.status not in [RunStatus.completed, RunStatus.failed, RunStatus.cancelled]:
158
- lettuce_client = await LettuceClient.create()
159
- status = await lettuce_client.get_status(run_id=run_id)
160
-
161
- # Map the status to our enum
162
- run_status = run.status
163
- if status == "RUNNING":
164
- run_status = RunStatus.running
165
- elif status == "COMPLETED":
166
- run_status = RunStatus.completed
167
- elif status == "FAILED":
168
- run_status = RunStatus.failed
169
- elif status == "CANCELLED":
170
- run_status = RunStatus.cancelled
171
- run.status = run_status
172
- return run
152
+ return await runs_manager.get_run_with_status(run_id=run_id, actor=actor)
173
153
 
174
154
 
175
155
  RunMessagesResponse = Annotated[
@@ -276,7 +256,7 @@ async def delete_run(
276
256
  """
277
257
  actor = await server.user_manager.get_actor_or_default_async(actor_id=headers.actor_id)
278
258
  runs_manager = RunManager()
279
- return await runs_manager.delete_run_by_id(run_id=run_id, actor=actor)
259
+ return await runs_manager.delete_run(run_id=run_id, actor=actor)
280
260
 
281
261
 
282
262
  @router.post(
@@ -867,12 +867,22 @@ async def generate_tool_from_prompt(
867
867
  response = llm_client.convert_response_to_chat_completion(response_data, input_messages, llm_config)
868
868
  output = json.loads(response.choices[0].message.tool_calls[0].function.arguments)
869
869
  pip_requirements = [PipRequirement(name=k, version=v or None) for k, v in json.loads(output["pip_requirements_json"]).items()]
870
+
871
+ # Derive JSON schema from the generated source code
872
+ try:
873
+ json_schema = derive_openai_json_schema(source_code=output["raw_source_code"])
874
+ except Exception as e:
875
+ raise LettaInvalidArgumentError(
876
+ message=f"Failed to generate JSON schema for tool '{request.tool_name}': {e}", argument_name="tool_name"
877
+ )
878
+
870
879
  return GenerateToolOutput(
871
880
  tool=Tool(
872
881
  name=request.tool_name,
873
882
  source_type="python",
874
883
  source_code=output["raw_source_code"],
875
884
  pip_requirements=pip_requirements,
885
+ json_schema=json_schema,
876
886
  ),
877
887
  sample_args=json.loads(output["sample_args_json"]),
878
888
  response=response.choices[0].message.content,
letta/server/server.py CHANGED
@@ -304,9 +304,8 @@ class SyncServer(object):
304
304
  if model_settings.openrouter_api_key:
305
305
  self._enabled_providers.append(
306
306
  OpenRouterProvider(
307
- name="openrouter",
307
+ name=model_settings.openrouter_handle_base if model_settings.openrouter_handle_base else "openrouter",
308
308
  api_key=model_settings.openrouter_api_key,
309
- handle_base=model_settings.openrouter_handle_base,
310
309
  )
311
310
  )
312
311
 
@@ -430,6 +429,10 @@ class SyncServer(object):
430
429
  log_event(name="start get_cached_llm_config", attributes=config_params)
431
430
  request.llm_config = await self.get_cached_llm_config_async(actor=actor, **config_params)
432
431
  log_event(name="end get_cached_llm_config", attributes=config_params)
432
+ if request.model and isinstance(request.model, str):
433
+ assert request.llm_config.handle == request.model, (
434
+ f"LLM config handle {request.llm_config.handle} does not match request handle {request.model}"
435
+ )
433
436
 
434
437
  if request.reasoning is None:
435
438
  request.reasoning = request.llm_config.enable_reasoner or request.llm_config.put_inner_thoughts_in_kwargs
@@ -411,9 +411,6 @@ class AgentManager:
411
411
  if agent_create.include_multi_agent_tools:
412
412
  tool_names |= calculate_multi_agent_tools()
413
413
 
414
- # take out the deprecated tool names
415
- tool_names.difference_update(set(DEPRECATED_LETTA_TOOLS))
416
-
417
414
  supplied_ids = set(agent_create.tool_ids or [])
418
415
 
419
416
  source_ids = agent_create.source_ids or []
@@ -1568,7 +1565,7 @@ class AgentManager:
1568
1565
  actor: User performing the action
1569
1566
 
1570
1567
  Raises:
1571
- ValueError: If either agent or source doesn't exist
1568
+ NoResultFound: If either agent or source doesn't exist or actor lacks permission to access them
1572
1569
  IntegrityError: If the source is already attached to the agent
1573
1570
  """
1574
1571
 
@@ -1576,6 +1573,9 @@ class AgentManager:
1576
1573
  # Verify both agent and source exist and user has permission to access them
1577
1574
  agent = await AgentModel.read_async(db_session=session, identifier=agent_id, actor=actor)
1578
1575
 
1576
+ # Verify the actor has permission to access the source
1577
+ await SourceModel.read_async(db_session=session, identifier=source_id, actor=actor)
1578
+
1579
1579
  # The _process_relationship helper already handles duplicate checking via unique constraint
1580
1580
  await _process_relationship_async(
1581
1581
  session=session,
@@ -14,6 +14,7 @@ from letta.schemas.embedding_config import EmbeddingConfig
14
14
  from letta.schemas.enums import PrimitiveType, VectorDBProvider
15
15
  from letta.schemas.user import User as PydanticUser
16
16
  from letta.server.db import db_registry
17
+ from letta.services.helpers.agent_manager_helper import validate_agent_exists_async
17
18
  from letta.settings import DatabaseChoice, settings
18
19
  from letta.utils import enforce_types
19
20
  from letta.validators import raise_on_invalid_id
@@ -130,6 +131,9 @@ class ArchiveManager:
130
131
  ]
131
132
 
132
133
  async with db_registry.async_session() as session:
134
+ if agent_id:
135
+ await validate_agent_exists_async(session, agent_id, actor)
136
+
133
137
  archives = await ArchiveModel.list_async(
134
138
  db_session=session,
135
139
  before=before,
@@ -157,6 +161,12 @@ class ArchiveManager:
157
161
  ) -> None:
158
162
  """Attach an agent to an archive."""
159
163
  async with db_registry.async_session() as session:
164
+ # Verify agent exists and user has access to it
165
+ await validate_agent_exists_async(session, agent_id, actor)
166
+
167
+ # Verify archive exists and user has access to it
168
+ await ArchiveModel.read_async(db_session=session, identifier=archive_id, actor=actor)
169
+
160
170
  # Check if relationship already exists
161
171
  existing = await session.execute(
162
172
  select(ArchivesAgents).where(
@@ -194,6 +204,12 @@ class ArchiveManager:
194
204
  ) -> None:
195
205
  """Detach an agent from an archive."""
196
206
  async with db_registry.async_session() as session:
207
+ # Verify agent exists and user has access to it
208
+ await validate_agent_exists_async(session, agent_id, actor)
209
+
210
+ # Verify archive exists and user has access to it
211
+ await ArchiveModel.read_async(db_session=session, identifier=archive_id, actor=actor)
212
+
197
213
  # Delete the relationship directly
198
214
  result = await session.execute(
199
215
  delete(ArchivesAgents).where(
@@ -5,8 +5,10 @@ from sqlalchemy import and_, asc, delete, desc, or_, select
5
5
  from sqlalchemy.orm import Session
6
6
 
7
7
  from letta.orm.agent import Agent as AgentModel
8
+ from letta.orm.block import Block
8
9
  from letta.orm.errors import NoResultFound
9
10
  from letta.orm.group import Group as GroupModel
11
+ from letta.orm.groups_blocks import GroupsBlocks
10
12
  from letta.orm.message import Message as MessageModel
11
13
  from letta.otel.tracing import trace_method
12
14
  from letta.schemas.enums import PrimitiveType
@@ -410,6 +412,48 @@ class GroupManager:
410
412
  for block in blocks:
411
413
  session.add(BlocksAgents(agent_id=manager_agent.id, block_id=block.id, block_label=block.label))
412
414
 
415
+ @enforce_types
416
+ @trace_method
417
+ @raise_on_invalid_id(param_name="group_id", expected_prefix=PrimitiveType.GROUP)
418
+ @raise_on_invalid_id(param_name="block_id", expected_prefix=PrimitiveType.BLOCK)
419
+ async def attach_block_async(self, group_id: str, block_id: str, actor: PydanticUser) -> None:
420
+ """Attach a block to a group."""
421
+ async with db_registry.async_session() as session:
422
+ # Verify group exists and user has access
423
+ await GroupModel.read_async(db_session=session, identifier=group_id, actor=actor)
424
+
425
+ # Verify block exists AND user has access to it
426
+ await Block.read_async(db_session=session, identifier=block_id, actor=actor)
427
+
428
+ # Check if block is already attached to the group
429
+ check_query = select(GroupsBlocks).where(and_(GroupsBlocks.group_id == group_id, GroupsBlocks.block_id == block_id))
430
+ result = await session.execute(check_query)
431
+ if result.scalar_one_or_none():
432
+ # Block already attached, no-op
433
+ return
434
+
435
+ # Add block to group
436
+ session.add(GroupsBlocks(group_id=group_id, block_id=block_id))
437
+ await session.commit()
438
+
439
+ @enforce_types
440
+ @trace_method
441
+ @raise_on_invalid_id(param_name="group_id", expected_prefix=PrimitiveType.GROUP)
442
+ @raise_on_invalid_id(param_name="block_id", expected_prefix=PrimitiveType.BLOCK)
443
+ async def detach_block_async(self, group_id: str, block_id: str, actor: PydanticUser) -> None:
444
+ """Detach a block from a group."""
445
+ async with db_registry.async_session() as session:
446
+ # Verify group exists and user has access
447
+ await GroupModel.read_async(db_session=session, identifier=group_id, actor=actor)
448
+
449
+ # Verify block exists AND user has access to it
450
+ await Block.read_async(db_session=session, identifier=block_id, actor=actor)
451
+
452
+ # Remove block from group
453
+ delete_group_block = delete(GroupsBlocks).where(and_(GroupsBlocks.group_id == group_id, GroupsBlocks.block_id == block_id))
454
+ await session.execute(delete_group_block)
455
+ await session.commit()
456
+
413
457
  @staticmethod
414
458
  def ensure_buffer_length_range_valid(
415
459
  max_value: Optional[int],
@@ -37,7 +37,7 @@ async def _apply_pagination_async(
37
37
  RunModel.id,
38
38
  after_sort_value,
39
39
  after_id,
40
- forward=ascending,
40
+ forward=not ascending,
41
41
  nulls_last=sort_nulls_last,
42
42
  )
43
43
  )
@@ -55,7 +55,7 @@ async def _apply_pagination_async(
55
55
  RunModel.id,
56
56
  before_sort_value,
57
57
  before_id,
58
- forward=not ascending,
58
+ forward=ascending,
59
59
  nulls_last=sort_nulls_last,
60
60
  )
61
61
  )
@@ -0,0 +1,148 @@
1
+ import os
2
+
3
+ from temporalio.client import Client
4
+ from temporalio.common import SearchAttributeKey, SearchAttributePair, TypedSearchAttributes
5
+
6
+ from letta.agents.temporal.temporal_agent_workflow import TemporalAgentWorkflow
7
+ from letta.agents.temporal.types import WorkflowInputParams
8
+ from letta.constants import DEFAULT_MAX_STEPS
9
+ from letta.schemas.agent import AgentState
10
+ from letta.schemas.letta_message import MessageType
11
+ from letta.schemas.message import MessageCreate
12
+ from letta.schemas.user import User
13
+
14
+
15
+ class LettuceClient:
16
+ """Client class for Lettuce service."""
17
+
18
+ def __init__(self):
19
+ """Initialize the LettuceClient."""
20
+ self.temporal_tls = os.getenv("LETTA_TEMPORAL_TLS", "true").lower() in (
21
+ "true",
22
+ "1",
23
+ )
24
+ self.temporal_api_key = os.getenv("LETTA_TEMPORAL_API_KEY")
25
+ self.temporal_namespace = os.getenv("LETTA_TEMPORAL_NAMESPACE")
26
+ self.temporal_endpoint = os.getenv("LETTA_TEMPORAL_ENDPOINT")
27
+ self.temporal_task_queue = os.getenv("LETTA_TEMPORAL_TASK_QUEUE", "agent_loop_async_task_queue")
28
+ self.temporal_skip_custom_sa = os.getenv("LETTA_TEMPORAL_SKIP_CUSTOM_SA")
29
+ self.client: Client | None = None
30
+
31
+ @classmethod
32
+ async def create(cls) -> "LettuceClient":
33
+ """
34
+ Asynchronously creates and connects the temporal client.
35
+
36
+ Returns:
37
+ LettuceClient: The created LettuceClient instance.
38
+ """
39
+ instance = cls()
40
+ if instance.temporal_api_key and instance.temporal_endpoint:
41
+ instance.client = await Client.connect(
42
+ instance.temporal_endpoint,
43
+ namespace=instance.temporal_namespace,
44
+ api_key=instance.temporal_api_key,
45
+ tls=instance.temporal_tls,
46
+ )
47
+ return instance
48
+
49
+ def get_client(self) -> Client | None:
50
+ """
51
+ Get the temporal client, if connected.
52
+
53
+ Returns:
54
+ Client | None: The temporal client, if connected.
55
+ """
56
+ return self.client
57
+
58
+ async def get_status(self, run_id: str) -> str | None:
59
+ """
60
+ Get the status of a run.
61
+
62
+ Args:
63
+ run_id (str): The ID of the run.
64
+
65
+ Returns:
66
+ str | None: The status of the run or None if not available.
67
+ """
68
+ if not self.client:
69
+ return None
70
+
71
+ handle = self.client.get_workflow_handle(run_id)
72
+ desc = await handle.describe()
73
+ return desc.status.name
74
+
75
+ async def cancel(self, run_id: str) -> str | None:
76
+ """
77
+ Cancel a run.
78
+
79
+ Args:
80
+ run_id (str): The ID of the run to cancel.
81
+
82
+ Returns:
83
+ str | None: The ID of the canceled run or None if not available.
84
+ """
85
+ if not self.client:
86
+ return None
87
+
88
+ await self.client.cancel_workflow(run_id)
89
+
90
+ async def step(
91
+ self,
92
+ agent_state: AgentState,
93
+ actor: User,
94
+ input_messages: list[MessageCreate],
95
+ max_steps: int = DEFAULT_MAX_STEPS,
96
+ run_id: str | None = None,
97
+ use_assistant_message: bool = True,
98
+ include_return_message_types: list[MessageType] | None = None,
99
+ request_start_timestamp_ns: int | None = None,
100
+ ) -> str | None:
101
+ """
102
+ Execute the agent loop on temporal.
103
+
104
+ Args:
105
+ agent_state (AgentState): The state of the agent.
106
+ actor (User): The actor.
107
+ input_messages (list[MessageCreate]): The input messages.
108
+ max_steps (int, optional): The maximum number of steps. Defaults to DEFAULT_MAX_STEPS.
109
+ run_id (str | None, optional): The ID of the run. Defaults to None.
110
+ use_assistant_message (bool, optional): Whether to use the assistant message. Defaults to True.
111
+ include_return_message_types (list[MessageType] | None, optional): The message types to include in the return. Defaults to None.
112
+ request_start_timestamp_ns (int | None, optional): The start timestamp of the request. Defaults to None.
113
+
114
+ Returns:
115
+ str | None: The ID of the run or None if client is not available.
116
+ """
117
+ if not self.client:
118
+ return None
119
+
120
+ workflow_input = WorkflowInputParams(
121
+ agent_state=agent_state,
122
+ messages=input_messages,
123
+ actor=actor,
124
+ max_steps=max_steps,
125
+ run_id=run_id,
126
+ )
127
+
128
+ #
129
+ custom_search_attributes = []
130
+ if not self.temporal_skip_custom_sa:
131
+ organization_id = actor.organization_id
132
+ user_id = actor.id
133
+ agent_id = agent_state.id
134
+ custom_search_attributes = [
135
+ SearchAttributePair(SearchAttributeKey.for_keyword("OrganizationId"), organization_id),
136
+ SearchAttributePair(SearchAttributeKey.for_keyword("AgentId"), agent_id),
137
+ SearchAttributePair(SearchAttributeKey.for_keyword("UserId"), user_id),
138
+ ]
139
+
140
+ await self.client.start_workflow(
141
+ TemporalAgentWorkflow.run,
142
+ workflow_input,
143
+ id=run_id,
144
+ task_queue=self.temporal_task_queue,
145
+ search_attributes=TypedSearchAttributes(custom_search_attributes),
146
+ )
147
+
148
+ return run_id
@@ -38,8 +38,8 @@ class AsyncBaseMCPClient:
38
38
  raise e
39
39
  except Exception as e:
40
40
  # MCP connection failures are often due to user misconfiguration, not system errors
41
- # Log at info level to help with debugging without triggering Sentry alerts
42
- logger.info(
41
+ # Log as warning for visibility in monitoring
42
+ logger.warning(
43
43
  f"Connecting to MCP server failed. Please review your server config: {self.server_config.model_dump_json(indent=4)}. Error: {str(e)}"
44
44
  )
45
45
  if hasattr(self.server_config, "server_url") and self.server_config.server_url:
@@ -78,7 +78,13 @@ class AsyncBaseMCPClient:
78
78
 
79
79
  async def execute_tool(self, tool_name: str, tool_args: dict) -> Tuple[str, bool]:
80
80
  self._check_initialized()
81
- result = await self.session.call_tool(tool_name, tool_args)
81
+ try:
82
+ result = await self.session.call_tool(tool_name, tool_args)
83
+ except Exception as e:
84
+ if e.__class__.__name__ == "McpError":
85
+ logger.warning(f"MCP tool '{tool_name}' execution failed: {str(e)}")
86
+ raise
87
+
82
88
  parsed_content = []
83
89
  for content_piece in result.content:
84
90
  if isinstance(content_piece, TextContent):