openhands-sdk 1.5.0__py3-none-any.whl → 1.7.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (56) hide show
  1. openhands/sdk/__init__.py +9 -1
  2. openhands/sdk/agent/agent.py +35 -12
  3. openhands/sdk/agent/base.py +53 -7
  4. openhands/sdk/agent/prompts/model_specific/anthropic_claude.j2 +3 -0
  5. openhands/sdk/agent/prompts/model_specific/google_gemini.j2 +1 -0
  6. openhands/sdk/agent/prompts/model_specific/openai_gpt/gpt-5-codex.j2 +2 -0
  7. openhands/sdk/agent/prompts/model_specific/openai_gpt/gpt-5.j2 +3 -0
  8. openhands/sdk/agent/prompts/self_documentation.j2 +15 -0
  9. openhands/sdk/agent/prompts/system_prompt.j2 +29 -1
  10. openhands/sdk/agent/utils.py +18 -4
  11. openhands/sdk/context/__init__.py +2 -0
  12. openhands/sdk/context/agent_context.py +42 -10
  13. openhands/sdk/context/condenser/base.py +11 -6
  14. openhands/sdk/context/condenser/llm_summarizing_condenser.py +169 -20
  15. openhands/sdk/context/condenser/no_op_condenser.py +2 -1
  16. openhands/sdk/context/condenser/pipeline_condenser.py +10 -9
  17. openhands/sdk/context/condenser/utils.py +149 -0
  18. openhands/sdk/context/prompts/prompt.py +40 -2
  19. openhands/sdk/context/prompts/templates/system_message_suffix.j2 +3 -3
  20. openhands/sdk/context/skills/__init__.py +2 -0
  21. openhands/sdk/context/skills/skill.py +152 -1
  22. openhands/sdk/context/view.py +287 -27
  23. openhands/sdk/conversation/base.py +17 -0
  24. openhands/sdk/conversation/conversation.py +19 -0
  25. openhands/sdk/conversation/exceptions.py +29 -4
  26. openhands/sdk/conversation/impl/local_conversation.py +126 -9
  27. openhands/sdk/conversation/impl/remote_conversation.py +152 -3
  28. openhands/sdk/conversation/state.py +42 -1
  29. openhands/sdk/conversation/stuck_detector.py +81 -45
  30. openhands/sdk/conversation/types.py +30 -0
  31. openhands/sdk/event/llm_convertible/system.py +16 -20
  32. openhands/sdk/hooks/__init__.py +30 -0
  33. openhands/sdk/hooks/config.py +180 -0
  34. openhands/sdk/hooks/conversation_hooks.py +227 -0
  35. openhands/sdk/hooks/executor.py +155 -0
  36. openhands/sdk/hooks/manager.py +170 -0
  37. openhands/sdk/hooks/types.py +40 -0
  38. openhands/sdk/io/cache.py +85 -0
  39. openhands/sdk/io/local.py +39 -2
  40. openhands/sdk/llm/llm.py +3 -2
  41. openhands/sdk/llm/message.py +4 -3
  42. openhands/sdk/llm/mixins/fn_call_converter.py +61 -16
  43. openhands/sdk/llm/mixins/non_native_fc.py +5 -1
  44. openhands/sdk/llm/utils/model_features.py +64 -24
  45. openhands/sdk/llm/utils/model_prompt_spec.py +98 -0
  46. openhands/sdk/llm/utils/verified_models.py +6 -4
  47. openhands/sdk/logger/logger.py +1 -1
  48. openhands/sdk/tool/schema.py +10 -0
  49. openhands/sdk/tool/tool.py +2 -2
  50. openhands/sdk/utils/async_executor.py +76 -67
  51. openhands/sdk/utils/models.py +1 -1
  52. openhands/sdk/utils/paging.py +63 -0
  53. {openhands_sdk-1.5.0.dist-info → openhands_sdk-1.7.2.dist-info}/METADATA +3 -3
  54. {openhands_sdk-1.5.0.dist-info → openhands_sdk-1.7.2.dist-info}/RECORD +56 -41
  55. {openhands_sdk-1.5.0.dist-info → openhands_sdk-1.7.2.dist-info}/WHEEL +0 -0
  56. {openhands_sdk-1.5.0.dist-info → openhands_sdk-1.7.2.dist-info}/top_level.txt +0 -0
@@ -1,3 +1,4 @@
1
+ from collections.abc import Mapping
1
2
  from pathlib import Path
2
3
  from typing import TYPE_CHECKING, Self, overload
3
4
 
@@ -7,11 +8,13 @@ from openhands.sdk.conversation.types import (
7
8
  ConversationCallbackType,
8
9
  ConversationID,
9
10
  ConversationTokenCallbackType,
11
+ StuckDetectionThresholds,
10
12
  )
11
13
  from openhands.sdk.conversation.visualizer import (
12
14
  ConversationVisualizerBase,
13
15
  DefaultConversationVisualizer,
14
16
  )
17
+ from openhands.sdk.hooks import HookConfig
15
18
  from openhands.sdk.logger import get_logger
16
19
  from openhands.sdk.secret import SecretValue
17
20
  from openhands.sdk.workspace import LocalWorkspace, RemoteWorkspace
@@ -54,8 +57,12 @@ class Conversation:
54
57
  conversation_id: ConversationID | None = None,
55
58
  callbacks: list[ConversationCallbackType] | None = None,
56
59
  token_callbacks: list[ConversationTokenCallbackType] | None = None,
60
+ hook_config: HookConfig | None = None,
57
61
  max_iteration_per_run: int = 500,
58
62
  stuck_detection: bool = True,
63
+ stuck_detection_thresholds: (
64
+ StuckDetectionThresholds | Mapping[str, int] | None
65
+ ) = None,
59
66
  visualizer: (
60
67
  type[ConversationVisualizerBase] | ConversationVisualizerBase | None
61
68
  ) = DefaultConversationVisualizer,
@@ -71,8 +78,12 @@ class Conversation:
71
78
  conversation_id: ConversationID | None = None,
72
79
  callbacks: list[ConversationCallbackType] | None = None,
73
80
  token_callbacks: list[ConversationTokenCallbackType] | None = None,
81
+ hook_config: HookConfig | None = None,
74
82
  max_iteration_per_run: int = 500,
75
83
  stuck_detection: bool = True,
84
+ stuck_detection_thresholds: (
85
+ StuckDetectionThresholds | Mapping[str, int] | None
86
+ ) = None,
76
87
  visualizer: (
77
88
  type[ConversationVisualizerBase] | ConversationVisualizerBase | None
78
89
  ) = DefaultConversationVisualizer,
@@ -88,8 +99,12 @@ class Conversation:
88
99
  conversation_id: ConversationID | None = None,
89
100
  callbacks: list[ConversationCallbackType] | None = None,
90
101
  token_callbacks: list[ConversationTokenCallbackType] | None = None,
102
+ hook_config: HookConfig | None = None,
91
103
  max_iteration_per_run: int = 500,
92
104
  stuck_detection: bool = True,
105
+ stuck_detection_thresholds: (
106
+ StuckDetectionThresholds | Mapping[str, int] | None
107
+ ) = None,
93
108
  visualizer: (
94
109
  type[ConversationVisualizerBase] | ConversationVisualizerBase | None
95
110
  ) = DefaultConversationVisualizer,
@@ -112,8 +127,10 @@ class Conversation:
112
127
  conversation_id=conversation_id,
113
128
  callbacks=callbacks,
114
129
  token_callbacks=token_callbacks,
130
+ hook_config=hook_config,
115
131
  max_iteration_per_run=max_iteration_per_run,
116
132
  stuck_detection=stuck_detection,
133
+ stuck_detection_thresholds=stuck_detection_thresholds,
117
134
  visualizer=visualizer,
118
135
  workspace=workspace,
119
136
  secrets=secrets,
@@ -124,8 +141,10 @@ class Conversation:
124
141
  conversation_id=conversation_id,
125
142
  callbacks=callbacks,
126
143
  token_callbacks=token_callbacks,
144
+ hook_config=hook_config,
127
145
  max_iteration_per_run=max_iteration_per_run,
128
146
  stuck_detection=stuck_detection,
147
+ stuck_detection_thresholds=stuck_detection_thresholds,
129
148
  visualizer=visualizer,
130
149
  workspace=workspace,
131
150
  persistence_dir=persistence_dir,
@@ -1,25 +1,50 @@
1
1
  from openhands.sdk.conversation.types import ConversationID
2
2
 
3
3
 
4
+ ISSUE_URL = "https://github.com/OpenHands/software-agent-sdk/issues/new"
5
+
6
+
4
7
  class ConversationRunError(RuntimeError):
5
8
  """Raised when a conversation run fails.
6
9
 
7
- Carries the conversation_id to make resuming/debugging easier while
8
- preserving the original exception via exception chaining.
10
+ Carries the conversation_id and persistence_dir to make resuming/debugging
11
+ easier while preserving the original exception via exception chaining.
9
12
  """
10
13
 
11
14
  conversation_id: ConversationID
15
+ persistence_dir: str | None
12
16
  original_exception: BaseException
13
17
 
14
18
  def __init__(
15
19
  self,
16
20
  conversation_id: ConversationID,
17
21
  original_exception: BaseException,
22
+ persistence_dir: str | None = None,
18
23
  message: str | None = None,
19
24
  ) -> None:
20
25
  self.conversation_id = conversation_id
26
+ self.persistence_dir = persistence_dir
21
27
  self.original_exception = original_exception
22
- default_msg = (
23
- f"Conversation run failed for id={conversation_id}: {original_exception}"
28
+ default_msg = self._build_error_message(
29
+ conversation_id, original_exception, persistence_dir
24
30
  )
25
31
  super().__init__(message or default_msg)
32
+
33
+ @staticmethod
34
+ def _build_error_message(
35
+ conversation_id: ConversationID,
36
+ original_exception: BaseException,
37
+ persistence_dir: str | None,
38
+ ) -> str:
39
+ """Build a detailed error message with debugging information."""
40
+ lines = [
41
+ f"Conversation run failed for id={conversation_id}: {original_exception}",
42
+ ]
43
+
44
+ if persistence_dir:
45
+ lines.append(f"\nConversation logs are stored at: {persistence_dir}")
46
+ lines.append("\nTo help debug this issue, please file a bug report at:")
47
+ lines.append(f" {ISSUE_URL}")
48
+ lines.append("and attach the conversation logs from the directory above.")
49
+
50
+ return "\n".join(lines)
@@ -18,17 +18,20 @@ from openhands.sdk.conversation.types import (
18
18
  ConversationCallbackType,
19
19
  ConversationID,
20
20
  ConversationTokenCallbackType,
21
+ StuckDetectionThresholds,
21
22
  )
22
23
  from openhands.sdk.conversation.visualizer import (
23
24
  ConversationVisualizerBase,
24
25
  DefaultConversationVisualizer,
25
26
  )
26
27
  from openhands.sdk.event import (
28
+ CondensationRequest,
27
29
  MessageEvent,
28
30
  PauseEvent,
29
31
  UserRejectObservation,
30
32
  )
31
33
  from openhands.sdk.event.conversation_error import ConversationErrorEvent
34
+ from openhands.sdk.hooks import HookConfig, HookEventProcessor, create_hook_callback
32
35
  from openhands.sdk.llm import LLM, Message, TextContent
33
36
  from openhands.sdk.llm.llm_registry import LLMRegistry
34
37
  from openhands.sdk.logger import get_logger
@@ -54,6 +57,7 @@ class LocalConversation(BaseConversation):
54
57
  _stuck_detector: StuckDetector | None
55
58
  llm_registry: LLMRegistry
56
59
  _cleanup_initiated: bool
60
+ _hook_processor: HookEventProcessor | None
57
61
 
58
62
  def __init__(
59
63
  self,
@@ -63,8 +67,12 @@ class LocalConversation(BaseConversation):
63
67
  conversation_id: ConversationID | None = None,
64
68
  callbacks: list[ConversationCallbackType] | None = None,
65
69
  token_callbacks: list[ConversationTokenCallbackType] | None = None,
70
+ hook_config: HookConfig | None = None,
66
71
  max_iteration_per_run: int = 500,
67
72
  stuck_detection: bool = True,
73
+ stuck_detection_thresholds: (
74
+ StuckDetectionThresholds | Mapping[str, int] | None
75
+ ) = None,
68
76
  visualizer: (
69
77
  type[ConversationVisualizerBase] | ConversationVisualizerBase | None
70
78
  ) = DefaultConversationVisualizer,
@@ -84,6 +92,7 @@ class LocalConversation(BaseConversation):
84
92
  suffix their persistent filestore with this ID.
85
93
  callbacks: Optional list of callback functions to handle events
86
94
  token_callbacks: Optional list of callbacks invoked for streaming deltas
95
+ hook_config: Optional hook configuration to auto-wire session hooks
87
96
  max_iteration_per_run: Maximum number of iterations per run
88
97
  visualizer: Visualization configuration. Can be:
89
98
  - ConversationVisualizerBase subclass: Class to instantiate
@@ -91,6 +100,11 @@ class LocalConversation(BaseConversation):
91
100
  - ConversationVisualizerBase instance: Use custom visualizer
92
101
  - None: No visualization
93
102
  stuck_detection: Whether to enable stuck detection
103
+ stuck_detection_thresholds: Optional configuration for stuck detection
104
+ thresholds. Can be a StuckDetectionThresholds instance or
105
+ a dict with keys: 'action_observation', 'action_error',
106
+ 'monologue', 'alternating_pattern'. Values are integers
107
+ representing the number of repetitions before triggering.
94
108
  """
95
109
  super().__init__() # Initialize with span tracking
96
110
  # Mark cleanup as initiated as early as possible to avoid races or partially
@@ -126,7 +140,20 @@ class LocalConversation(BaseConversation):
126
140
  def _default_callback(e):
127
141
  self._state.events.append(e)
128
142
 
129
- composed_list = (callbacks if callbacks else []) + [_default_callback]
143
+ self._hook_processor = None
144
+ hook_callback = None
145
+ if hook_config is not None:
146
+ self._hook_processor, hook_callback = create_hook_callback(
147
+ hook_config=hook_config,
148
+ working_dir=str(self.workspace.working_dir),
149
+ session_id=str(desired_id),
150
+ )
151
+
152
+ callback_list = list(callbacks) if callbacks else []
153
+ if hook_callback is not None:
154
+ callback_list.insert(0, hook_callback)
155
+
156
+ composed_list = callback_list + [_default_callback]
130
157
  # Handle visualization configuration
131
158
  if isinstance(visualizer, ConversationVisualizerBase):
132
159
  # Use custom visualizer instance
@@ -158,7 +185,24 @@ class LocalConversation(BaseConversation):
158
185
  self.max_iteration_per_run = max_iteration_per_run
159
186
 
160
187
  # Initialize stuck detector
161
- self._stuck_detector = StuckDetector(self._state) if stuck_detection else None
188
+ if stuck_detection:
189
+ # Convert dict to StuckDetectionThresholds if needed
190
+ if isinstance(stuck_detection_thresholds, Mapping):
191
+ threshold_config = StuckDetectionThresholds(
192
+ **stuck_detection_thresholds
193
+ )
194
+ else:
195
+ threshold_config = stuck_detection_thresholds
196
+ self._stuck_detector = StuckDetector(
197
+ self._state,
198
+ thresholds=threshold_config,
199
+ )
200
+ else:
201
+ self._stuck_detector = None
202
+
203
+ if self._hook_processor is not None:
204
+ self._hook_processor.set_conversation_state(self._state)
205
+ self._hook_processor.run_session_start()
162
206
 
163
207
  with self._state:
164
208
  self.agent.init_state(self._state, on_event=self._on_event)
@@ -333,9 +377,24 @@ class LocalConversation(BaseConversation):
333
377
  if (
334
378
  self.state.execution_status
335
379
  == ConversationExecutionStatus.WAITING_FOR_CONFIRMATION
336
- or iteration >= self.max_iteration_per_run
337
380
  ):
338
381
  break
382
+
383
+ if iteration >= self.max_iteration_per_run:
384
+ error_msg = (
385
+ f"Agent reached maximum iterations limit "
386
+ f"({self.max_iteration_per_run})."
387
+ )
388
+ logger.error(error_msg)
389
+ self._state.execution_status = ConversationExecutionStatus.ERROR
390
+ self._on_event(
391
+ ConversationErrorEvent(
392
+ source="environment",
393
+ code="MaxIterationsReached",
394
+ detail=error_msg,
395
+ )
396
+ )
397
+ break
339
398
  except Exception as e:
340
399
  self._state.execution_status = ConversationExecutionStatus.ERROR
341
400
 
@@ -348,10 +407,10 @@ class LocalConversation(BaseConversation):
348
407
  )
349
408
  )
350
409
 
351
- # Re-raise with conversation id for better UX; include original traceback
352
- raise ConversationRunError(self._state.id, e) from e
353
- finally:
354
- self._end_observability_span()
410
+ # Re-raise with conversation id and persistence dir for better UX
411
+ raise ConversationRunError(
412
+ self._state.id, e, persistence_dir=self._state.persistence_dir
413
+ ) from e
355
414
 
356
415
  def set_confirmation_policy(self, policy: ConfirmationPolicyBase) -> None:
357
416
  """Set the confirmation policy and store it in conversation state."""
@@ -435,16 +494,25 @@ class LocalConversation(BaseConversation):
435
494
 
436
495
  def close(self) -> None:
437
496
  """Close the conversation and clean up all tool executors."""
438
- if self._cleanup_initiated:
497
+ # Use getattr for safety - object may be partially constructed
498
+ if getattr(self, "_cleanup_initiated", False):
439
499
  return
440
500
  self._cleanup_initiated = True
441
501
  logger.debug("Closing conversation and cleaning up tool executors")
502
+ hook_processor = getattr(self, "_hook_processor", None)
503
+ if hook_processor is not None:
504
+ hook_processor.run_session_end()
442
505
  try:
443
506
  self._end_observability_span()
444
507
  except AttributeError:
445
508
  # Object may be partially constructed; span fields may be missing.
446
509
  pass
447
- for tool in self.agent.tools_map.values():
510
+ try:
511
+ tools_map = self.agent.tools_map
512
+ except (AttributeError, RuntimeError):
513
+ # Agent not initialized or partially constructed
514
+ return
515
+ for tool in tools_map.values():
448
516
  try:
449
517
  executable_tool = tool.as_executable()
450
518
  executable_tool.executor.close()
@@ -540,6 +608,55 @@ class LocalConversation(BaseConversation):
540
608
  events=self._state.events, llm=llm_to_use, max_length=max_length
541
609
  )
542
610
 
611
+ def condense(self) -> None:
612
+ """Synchronously force condense the conversation history.
613
+
614
+ If the agent is currently running, `condense()` will wait for the
615
+ ongoing step to finish before proceeding.
616
+
617
+ Raises ValueError if no compatible condenser exists.
618
+ """
619
+
620
+ # Check if condenser is configured and handles condensation requests
621
+ if (
622
+ self.agent.condenser is None
623
+ or not self.agent.condenser.handles_condensation_requests()
624
+ ):
625
+ condenser_info = (
626
+ "No condenser configured"
627
+ if self.agent.condenser is None
628
+ else (
629
+ f"Condenser {type(self.agent.condenser).__name__} does not handle "
630
+ "condensation requests"
631
+ )
632
+ )
633
+ raise ValueError(
634
+ f"Cannot condense conversation: {condenser_info}. "
635
+ "To enable manual condensation, configure an "
636
+ "LLMSummarizingCondenser:\n\n"
637
+ "from openhands.sdk.context.condenser import LLMSummarizingCondenser\n"
638
+ "agent = Agent(\n"
639
+ " llm=your_llm,\n"
640
+ " condenser=LLMSummarizingCondenser(\n"
641
+ " llm=your_llm,\n"
642
+ " max_size=120,\n"
643
+ " keep_first=4\n"
644
+ " )\n"
645
+ ")"
646
+ )
647
+
648
+ # Add a condensation request event
649
+ condensation_request = CondensationRequest()
650
+ self._on_event(condensation_request)
651
+
652
+ # Force the agent to take a single step to process the condensation request
653
+ # This will trigger the condenser if it handles condensation requests
654
+ with self._state:
655
+ # Take a single step to process the condensation request
656
+ self.agent.step(self, on_event=self._on_event, on_token=self._on_token)
657
+
658
+ logger.info("Condensation request processed")
659
+
543
660
  def __del__(self) -> None:
544
661
  """Ensure cleanup happens when conversation is destroyed."""
545
662
  try:
@@ -2,6 +2,7 @@ import asyncio
2
2
  import json
3
3
  import os
4
4
  import threading
5
+ import time
5
6
  import uuid
6
7
  from collections.abc import Mapping
7
8
  from typing import SupportsIndex, overload
@@ -17,7 +18,11 @@ from openhands.sdk.conversation.events_list_base import EventsListBase
17
18
  from openhands.sdk.conversation.exceptions import ConversationRunError
18
19
  from openhands.sdk.conversation.secret_registry import SecretValue
19
20
  from openhands.sdk.conversation.state import ConversationExecutionStatus
20
- from openhands.sdk.conversation.types import ConversationCallbackType, ConversationID
21
+ from openhands.sdk.conversation.types import (
22
+ ConversationCallbackType,
23
+ ConversationID,
24
+ StuckDetectionThresholds,
25
+ )
21
26
  from openhands.sdk.conversation.visualizer import (
22
27
  ConversationVisualizerBase,
23
28
  DefaultConversationVisualizer,
@@ -28,6 +33,12 @@ from openhands.sdk.event.conversation_state import (
28
33
  ConversationStateUpdateEvent,
29
34
  )
30
35
  from openhands.sdk.event.llm_completion_log import LLMCompletionLogEvent
36
+ from openhands.sdk.hooks import (
37
+ HookConfig,
38
+ HookEventProcessor,
39
+ HookEventType,
40
+ HookManager,
41
+ )
31
42
  from openhands.sdk.llm import LLM, Message, TextContent
32
43
  from openhands.sdk.logger import DEBUG, get_logger
33
44
  from openhands.sdk.observability.laminar import observe
@@ -425,6 +436,8 @@ class RemoteConversation(BaseConversation):
425
436
  max_iteration_per_run: int
426
437
  workspace: RemoteWorkspace
427
438
  _client: httpx.Client
439
+ _hook_processor: HookEventProcessor | None
440
+ _cleanup_initiated: bool
428
441
 
429
442
  def __init__(
430
443
  self,
@@ -434,6 +447,10 @@ class RemoteConversation(BaseConversation):
434
447
  callbacks: list[ConversationCallbackType] | None = None,
435
448
  max_iteration_per_run: int = 500,
436
449
  stuck_detection: bool = True,
450
+ stuck_detection_thresholds: (
451
+ StuckDetectionThresholds | Mapping[str, int] | None
452
+ ) = None,
453
+ hook_config: HookConfig | None = None,
437
454
  visualizer: (
438
455
  type[ConversationVisualizerBase] | ConversationVisualizerBase | None
439
456
  ) = DefaultConversationVisualizer,
@@ -449,6 +466,12 @@ class RemoteConversation(BaseConversation):
449
466
  callbacks: Optional callbacks to receive events (not yet streamed)
450
467
  max_iteration_per_run: Max iterations configured on server
451
468
  stuck_detection: Whether to enable stuck detection on server
469
+ stuck_detection_thresholds: Optional configuration for stuck detection
470
+ thresholds. Can be a StuckDetectionThresholds instance or
471
+ a dict with keys: 'action_observation', 'action_error',
472
+ 'monologue', 'alternating_pattern'. Values are integers
473
+ representing the number of repetitions before triggering.
474
+ hook_config: Optional hook configuration for session hooks
452
475
  visualizer: Visualization configuration. Can be:
453
476
  - ConversationVisualizerBase subclass: Class to instantiate
454
477
  (default: ConversationVisualizer)
@@ -462,6 +485,8 @@ class RemoteConversation(BaseConversation):
462
485
  self.max_iteration_per_run = max_iteration_per_run
463
486
  self.workspace = workspace
464
487
  self._client = workspace.client
488
+ self._hook_processor = None
489
+ self._cleanup_initiated = False
465
490
 
466
491
  if conversation_id is None:
467
492
  payload = {
@@ -476,6 +501,15 @@ class RemoteConversation(BaseConversation):
476
501
  working_dir=self.workspace.working_dir
477
502
  ).model_dump(),
478
503
  }
504
+ if stuck_detection_thresholds is not None:
505
+ # Convert to StuckDetectionThresholds if dict, then serialize
506
+ if isinstance(stuck_detection_thresholds, Mapping):
507
+ threshold_config = StuckDetectionThresholds(
508
+ **stuck_detection_thresholds
509
+ )
510
+ else:
511
+ threshold_config = stuck_detection_thresholds
512
+ payload["stuck_detection_thresholds"] = threshold_config.model_dump()
479
513
  resp = _send_request(
480
514
  self._client, "POST", "/api/conversations", json=payload
481
515
  )
@@ -548,6 +582,25 @@ class RemoteConversation(BaseConversation):
548
582
  self.update_secrets(secret_values)
549
583
 
550
584
  self._start_observability_span(str(self._id))
585
+ if hook_config is not None:
586
+ unsupported = (
587
+ HookEventType.PRE_TOOL_USE,
588
+ HookEventType.POST_TOOL_USE,
589
+ HookEventType.USER_PROMPT_SUBMIT,
590
+ HookEventType.STOP,
591
+ )
592
+ if any(hook_config.has_hooks_for_event(t) for t in unsupported):
593
+ logger.warning(
594
+ "RemoteConversation only supports SessionStart/SessionEnd hooks; "
595
+ "other hook types will not be enforced."
596
+ )
597
+ hook_manager = HookManager(
598
+ config=hook_config,
599
+ working_dir=os.getcwd(),
600
+ session_id=str(self._id),
601
+ )
602
+ self._hook_processor = HookEventProcessor(hook_manager=hook_manager)
603
+ self._hook_processor.run_session_start()
551
604
 
552
605
  def _create_llm_completion_log_callback(self) -> ConversationCallbackType:
553
606
  """Create a callback that writes LLM completion logs to client filesystem."""
@@ -623,7 +676,25 @@ class RemoteConversation(BaseConversation):
623
676
  )
624
677
 
625
678
  @observe(name="conversation.run")
626
- def run(self) -> None:
679
+ def run(
680
+ self,
681
+ blocking: bool = True,
682
+ poll_interval: float = 1.0,
683
+ timeout: float = 3600.0,
684
+ ) -> None:
685
+ """Trigger a run on the server.
686
+
687
+ Args:
688
+ blocking: If True (default), wait for the run to complete by polling
689
+ the server. If False, return immediately after triggering the run.
690
+ poll_interval: Time in seconds between status polls (only used when
691
+ blocking=True). Default is 1.0 second.
692
+ timeout: Maximum time in seconds to wait for the run to complete
693
+ (only used when blocking=True). Default is 3600 seconds.
694
+
695
+ Raises:
696
+ ConversationRunError: If the run fails or times out.
697
+ """
627
698
  # Trigger a run on the server using the dedicated run endpoint.
628
699
  # Let the server tell us if it's already running (409), avoiding an extra GET.
629
700
  try:
@@ -632,16 +703,74 @@ class RemoteConversation(BaseConversation):
632
703
  "POST",
633
704
  f"/api/conversations/{self._id}/run",
634
705
  acceptable_status_codes={200, 201, 204, 409},
635
- timeout=1800,
706
+ timeout=30, # Short timeout for trigger request
636
707
  )
637
708
  except Exception as e: # httpx errors already logged by _send_request
638
709
  # Surface conversation id to help resuming
639
710
  raise ConversationRunError(self._id, e) from e
711
+
640
712
  if resp.status_code == 409:
641
713
  logger.info("Conversation is already running; skipping run trigger")
714
+ if blocking:
715
+ # Still wait for the existing run to complete
716
+ self._wait_for_run_completion(poll_interval, timeout)
642
717
  return
718
+
643
719
  logger.info(f"run() triggered successfully: {resp}")
644
720
 
721
+ if blocking:
722
+ self._wait_for_run_completion(poll_interval, timeout)
723
+
724
+ def _wait_for_run_completion(
725
+ self,
726
+ poll_interval: float = 1.0,
727
+ timeout: float = 1800.0,
728
+ ) -> None:
729
+ """Poll the server until the conversation is no longer running.
730
+
731
+ Args:
732
+ poll_interval: Time in seconds between status polls.
733
+ timeout: Maximum time in seconds to wait.
734
+
735
+ Raises:
736
+ ConversationRunError: If the wait times out.
737
+ """
738
+ start_time = time.monotonic()
739
+
740
+ while True:
741
+ elapsed = time.monotonic() - start_time
742
+ if elapsed > timeout:
743
+ raise ConversationRunError(
744
+ self._id,
745
+ TimeoutError(
746
+ f"Run timed out after {timeout} seconds. "
747
+ "The conversation may still be running on the server."
748
+ ),
749
+ )
750
+
751
+ try:
752
+ resp = _send_request(
753
+ self._client,
754
+ "GET",
755
+ f"/api/conversations/{self._id}",
756
+ timeout=30,
757
+ )
758
+ info = resp.json()
759
+ status = info.get("execution_status")
760
+
761
+ if status != ConversationExecutionStatus.RUNNING.value:
762
+ logger.info(
763
+ f"Run completed with status: {status} (elapsed: {elapsed:.1f}s)"
764
+ )
765
+ return
766
+
767
+ except Exception as e:
768
+ # Log but continue polling - transient network errors shouldn't
769
+ # stop us from waiting for the run to complete
770
+ logger.warning(f"Error polling status (will retry): {e}")
771
+
772
+ time.sleep(poll_interval)
773
+
645
774
  def set_confirmation_policy(self, policy: ConfirmationPolicyBase) -> None:
646
775
  payload = {"policy": policy.model_dump()}
647
776
  _send_request(
@@ -746,6 +875,21 @@ class RemoteConversation(BaseConversation):
746
875
  data = resp.json()
747
876
  return data["title"]
748
877
 
878
+ def condense(self) -> None:
879
+ """Force condensation of the conversation history.
880
+
881
+ This method sends a condensation request to the remote agent server.
882
+ The server will use the existing condensation request pattern to trigger
883
+ condensation if a condenser is configured and handles condensation requests.
884
+
885
+ The condensation will be applied on the server side and will modify the
886
+ conversation state by adding a condensation event to the history.
887
+
888
+ Raises:
889
+ HTTPError: If the server returns an error (e.g., no condenser configured).
890
+ """
891
+ _send_request(self._client, "POST", f"/api/conversations/{self._id}/condense")
892
+
749
893
  def close(self) -> None:
750
894
  """Close the conversation and clean up resources.
751
895
 
@@ -753,6 +897,11 @@ class RemoteConversation(BaseConversation):
753
897
  The workspace owns the client and will close it during its own cleanup.
754
898
  Closing it here would prevent the workspace from making cleanup API calls.
755
899
  """
900
+ if self._cleanup_initiated:
901
+ return
902
+ self._cleanup_initiated = True
903
+ if self._hook_processor is not None:
904
+ self._hook_processor.run_session_end()
756
905
  try:
757
906
  # Stop WebSocket client if it exists
758
907
  if self._ws_client:
@@ -42,6 +42,7 @@ class ConversationExecutionStatus(str, Enum):
42
42
  FINISHED = "finished" # Conversation has completed the current task
43
43
  ERROR = "error" # Conversation encountered an error (optional for future use)
44
44
  STUCK = "stuck" # Conversation is stuck in a loop or unable to proceed
45
+ DELETING = "deleting" # Conversation is in the process of being deleted
45
46
 
46
47
 
47
48
  class ConversationState(OpenHandsModel):
@@ -93,6 +94,18 @@ class ConversationState(OpenHandsModel):
93
94
  description="List of activated knowledge skills name",
94
95
  )
95
96
 
97
+ # Hook-blocked actions: action_id -> blocking reason
98
+ blocked_actions: dict[str, str] = Field(
99
+ default_factory=dict,
100
+ description="Actions blocked by PreToolUse hooks, keyed by action ID",
101
+ )
102
+
103
+ # Hook-blocked messages: message_id -> blocking reason
104
+ blocked_messages: dict[str, str] = Field(
105
+ default_factory=dict,
106
+ description="Messages blocked by UserPromptSubmit hooks, keyed by message ID",
107
+ )
108
+
96
109
  # Conversation statistics for LLM usage tracking
97
110
  stats: ConversationStats = Field(
98
111
  default_factory=ConversationStats,
@@ -166,7 +179,9 @@ class ConversationState(OpenHandsModel):
166
179
  Else: create fresh (agent required), persist base, and return.
167
180
  """
168
181
  file_store = (
169
- LocalFileStore(persistence_dir) if persistence_dir else InMemoryFileStore()
182
+ LocalFileStore(persistence_dir, cache_limit_size=max_iterations)
183
+ if persistence_dir
184
+ else InMemoryFileStore()
170
185
  )
171
186
 
172
187
  try:
@@ -275,6 +290,32 @@ class ConversationState(OpenHandsModel):
275
290
  f"State change callback failed for field {name}", exc_info=True
276
291
  )
277
292
 
293
+ def block_action(self, action_id: str, reason: str) -> None:
294
+ """Persistently record a hook-blocked action."""
295
+ self.blocked_actions = {**self.blocked_actions, action_id: reason}
296
+
297
+ def pop_blocked_action(self, action_id: str) -> str | None:
298
+ """Remove and return a hook-blocked action reason, if present."""
299
+ if action_id not in self.blocked_actions:
300
+ return None
301
+ updated = dict(self.blocked_actions)
302
+ reason = updated.pop(action_id)
303
+ self.blocked_actions = updated
304
+ return reason
305
+
306
+ def block_message(self, message_id: str, reason: str) -> None:
307
+ """Persistently record a hook-blocked user message."""
308
+ self.blocked_messages = {**self.blocked_messages, message_id: reason}
309
+
310
+ def pop_blocked_message(self, message_id: str) -> str | None:
311
+ """Remove and return a hook-blocked message reason, if present."""
312
+ if message_id not in self.blocked_messages:
313
+ return None
314
+ updated = dict(self.blocked_messages)
315
+ reason = updated.pop(message_id)
316
+ self.blocked_messages = updated
317
+ return reason
318
+
278
319
  @staticmethod
279
320
  def get_unmatched_actions(events: Sequence[Event]) -> list[ActionEvent]:
280
321
  """Find actions in the event history that don't have matching observations.