openhands-agent-server 1.8.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. openhands/agent_server/__init__.py +0 -0
  2. openhands/agent_server/__main__.py +118 -0
  3. openhands/agent_server/api.py +331 -0
  4. openhands/agent_server/bash_router.py +105 -0
  5. openhands/agent_server/bash_service.py +379 -0
  6. openhands/agent_server/config.py +187 -0
  7. openhands/agent_server/conversation_router.py +321 -0
  8. openhands/agent_server/conversation_service.py +692 -0
  9. openhands/agent_server/dependencies.py +72 -0
  10. openhands/agent_server/desktop_router.py +47 -0
  11. openhands/agent_server/desktop_service.py +212 -0
  12. openhands/agent_server/docker/Dockerfile +244 -0
  13. openhands/agent_server/docker/build.py +825 -0
  14. openhands/agent_server/docker/wallpaper.svg +22 -0
  15. openhands/agent_server/env_parser.py +460 -0
  16. openhands/agent_server/event_router.py +204 -0
  17. openhands/agent_server/event_service.py +648 -0
  18. openhands/agent_server/file_router.py +121 -0
  19. openhands/agent_server/git_router.py +34 -0
  20. openhands/agent_server/logging_config.py +56 -0
  21. openhands/agent_server/middleware.py +32 -0
  22. openhands/agent_server/models.py +307 -0
  23. openhands/agent_server/openapi.py +21 -0
  24. openhands/agent_server/pub_sub.py +80 -0
  25. openhands/agent_server/py.typed +0 -0
  26. openhands/agent_server/server_details_router.py +43 -0
  27. openhands/agent_server/sockets.py +173 -0
  28. openhands/agent_server/tool_preload_service.py +76 -0
  29. openhands/agent_server/tool_router.py +22 -0
  30. openhands/agent_server/utils.py +63 -0
  31. openhands/agent_server/vscode_extensions/openhands-settings/extension.js +22 -0
  32. openhands/agent_server/vscode_extensions/openhands-settings/package.json +12 -0
  33. openhands/agent_server/vscode_router.py +70 -0
  34. openhands/agent_server/vscode_service.py +232 -0
  35. openhands_agent_server-1.8.2.dist-info/METADATA +15 -0
  36. openhands_agent_server-1.8.2.dist-info/RECORD +39 -0
  37. openhands_agent_server-1.8.2.dist-info/WHEEL +5 -0
  38. openhands_agent_server-1.8.2.dist-info/entry_points.txt +2 -0
  39. openhands_agent_server-1.8.2.dist-info/top_level.txt +1 -0
@@ -0,0 +1,692 @@
1
+ import asyncio
2
+ import importlib
3
+ import logging
4
+ from dataclasses import dataclass, field
5
+ from pathlib import Path
6
+ from uuid import UUID, uuid4
7
+
8
+ import httpx
9
+
10
+ from openhands.agent_server.config import Config, WebhookSpec
11
+ from openhands.agent_server.event_service import EventService
12
+ from openhands.agent_server.models import (
13
+ ConversationInfo,
14
+ ConversationPage,
15
+ ConversationSortOrder,
16
+ StartConversationRequest,
17
+ StoredConversation,
18
+ UpdateConversationRequest,
19
+ )
20
+ from openhands.agent_server.pub_sub import Subscriber
21
+ from openhands.agent_server.server_details_router import update_last_execution_time
22
+ from openhands.agent_server.utils import safe_rmtree, utc_now
23
+ from openhands.sdk import LLM, Event, Message
24
+ from openhands.sdk.conversation.state import (
25
+ ConversationExecutionStatus,
26
+ ConversationState,
27
+ )
28
+ from openhands.sdk.utils.cipher import Cipher
29
+
30
+
31
+ logger = logging.getLogger(__name__)
32
+
33
+
34
+ def _compose_conversation_info(
35
+ stored: StoredConversation, state: ConversationState
36
+ ) -> ConversationInfo:
37
+ return ConversationInfo(
38
+ **state.model_dump(),
39
+ title=stored.title,
40
+ metrics=stored.metrics,
41
+ created_at=stored.created_at,
42
+ updated_at=stored.updated_at,
43
+ )
44
+
45
+
46
+ @dataclass
47
+ class ConversationService:
48
+ """
49
+ Conversation service which stores to a local file store. When the context starts
50
+ all event_services are loaded into memory, and stored when it stops.
51
+ """
52
+
53
+ conversations_dir: Path = field()
54
+ webhook_specs: list[WebhookSpec] = field(default_factory=list)
55
+ session_api_key: str | None = field(default=None)
56
+ cipher: Cipher | None = None
57
+ _event_services: dict[UUID, EventService] | None = field(default=None, init=False)
58
+ _conversation_webhook_subscribers: list["ConversationWebhookSubscriber"] = field(
59
+ default_factory=list, init=False
60
+ )
61
+
62
+ async def get_conversation(self, conversation_id: UUID) -> ConversationInfo | None:
63
+ if self._event_services is None:
64
+ raise ValueError("inactive_service")
65
+ event_service = self._event_services.get(conversation_id)
66
+ if event_service is None:
67
+ return None
68
+ state = await event_service.get_state()
69
+ return _compose_conversation_info(event_service.stored, state)
70
+
71
+ async def search_conversations(
72
+ self,
73
+ page_id: str | None = None,
74
+ limit: int = 100,
75
+ execution_status: ConversationExecutionStatus | None = None,
76
+ sort_order: ConversationSortOrder = ConversationSortOrder.CREATED_AT_DESC,
77
+ ) -> ConversationPage:
78
+ if self._event_services is None:
79
+ raise ValueError("inactive_service")
80
+
81
+ # Collect all conversations with their info
82
+ all_conversations = []
83
+ for id, event_service in self._event_services.items():
84
+ state = await event_service.get_state()
85
+ conversation_info = _compose_conversation_info(event_service.stored, state)
86
+ # Apply status filter if provided
87
+ if (
88
+ execution_status is not None
89
+ and conversation_info.execution_status != execution_status
90
+ ):
91
+ continue
92
+
93
+ all_conversations.append((id, conversation_info))
94
+
95
+ # Sort conversations based on sort_order
96
+ if sort_order == ConversationSortOrder.CREATED_AT:
97
+ all_conversations.sort(key=lambda x: x[1].created_at)
98
+ elif sort_order == ConversationSortOrder.CREATED_AT_DESC:
99
+ all_conversations.sort(key=lambda x: x[1].created_at, reverse=True)
100
+ elif sort_order == ConversationSortOrder.UPDATED_AT:
101
+ all_conversations.sort(key=lambda x: x[1].updated_at)
102
+ elif sort_order == ConversationSortOrder.UPDATED_AT_DESC:
103
+ all_conversations.sort(key=lambda x: x[1].updated_at, reverse=True)
104
+
105
+ # Handle pagination
106
+ items = []
107
+ start_index = 0
108
+
109
+ # Find the starting point if page_id is provided
110
+ if page_id:
111
+ for i, (id, _) in enumerate(all_conversations):
112
+ if id.hex == page_id:
113
+ start_index = i
114
+ break
115
+
116
+ # Collect items for this page
117
+ next_page_id = None
118
+ for i in range(start_index, len(all_conversations)):
119
+ if len(items) >= limit:
120
+ # We have more items, set next_page_id
121
+ if i < len(all_conversations):
122
+ next_page_id = all_conversations[i][0].hex
123
+ break
124
+ items.append(all_conversations[i][1])
125
+
126
+ return ConversationPage(items=items, next_page_id=next_page_id)
127
+
128
+ async def count_conversations(
129
+ self,
130
+ execution_status: ConversationExecutionStatus | None = None,
131
+ ) -> int:
132
+ """Count conversations matching the given filters."""
133
+ if self._event_services is None:
134
+ raise ValueError("inactive_service")
135
+
136
+ count = 0
137
+ for event_service in self._event_services.values():
138
+ state = await event_service.get_state()
139
+
140
+ # Apply status filter if provided
141
+ if (
142
+ execution_status is not None
143
+ and state.execution_status != execution_status
144
+ ):
145
+ continue
146
+
147
+ count += 1
148
+
149
+ return count
150
+
151
+ async def batch_get_conversations(
152
+ self, conversation_ids: list[UUID]
153
+ ) -> list[ConversationInfo | None]:
154
+ """Given a list of ids, get a batch of conversation info, returning
155
+ None for any that were not found."""
156
+ results = await asyncio.gather(
157
+ *[
158
+ self.get_conversation(conversation_id)
159
+ for conversation_id in conversation_ids
160
+ ]
161
+ )
162
+ return results
163
+
164
+ async def _notify_conversation_webhooks(self, conversation_info: ConversationInfo):
165
+ """Notify all conversation webhook subscribers about conversation changes."""
166
+ if not self._conversation_webhook_subscribers:
167
+ return
168
+
169
+ # Send notifications to all conversation webhook subscribers in the background
170
+ async def _notify_and_log_errors():
171
+ results = await asyncio.gather(
172
+ *[
173
+ subscriber.post_conversation_info(conversation_info)
174
+ for subscriber in self._conversation_webhook_subscribers
175
+ ],
176
+ return_exceptions=True, # Don't fail if one webhook fails
177
+ )
178
+
179
+ # Log any exceptions that occurred
180
+ for i, result in enumerate(results):
181
+ if isinstance(result, Exception):
182
+ subscriber = self._conversation_webhook_subscribers[i]
183
+ logger.error(
184
+ (
185
+ f"Failed to notify conversation webhook "
186
+ f"{subscriber.spec.base_url}: {result}"
187
+ ),
188
+ exc_info=result,
189
+ )
190
+
191
+ # Create task to run in background without awaiting
192
+ asyncio.create_task(_notify_and_log_errors())
193
+
194
+ # Write Methods
195
+
196
+ async def start_conversation(
197
+ self, request: StartConversationRequest
198
+ ) -> tuple[ConversationInfo, bool]:
199
+ """Start a local event_service and return its id."""
200
+ if self._event_services is None:
201
+ raise ValueError("inactive_service")
202
+ conversation_id = request.conversation_id or uuid4()
203
+
204
+ existing_event_service = self._event_services.get(conversation_id)
205
+ if existing_event_service and existing_event_service.is_open():
206
+ state = await existing_event_service.get_state()
207
+ conversation_info = _compose_conversation_info(
208
+ existing_event_service.stored, state
209
+ )
210
+ return conversation_info, False
211
+
212
+ # Dynamically register tools from client's registry
213
+ if request.tool_module_qualnames:
214
+ import importlib
215
+
216
+ for tool_name, module_qualname in request.tool_module_qualnames.items():
217
+ try:
218
+ # Import the module to trigger tool auto-registration
219
+ importlib.import_module(module_qualname)
220
+ logger.debug(
221
+ f"Tool '{tool_name}' registered via module '{module_qualname}'"
222
+ )
223
+ except ImportError as e:
224
+ logger.warning(
225
+ f"Failed to import module '{module_qualname}' for tool "
226
+ f"'{tool_name}': {e}. Tool will not be available."
227
+ )
228
+ # Continue even if some tools fail to register
229
+ # The agent will fail gracefully if it tries to use unregistered
230
+ # tools
231
+ if request.tool_module_qualnames:
232
+ logger.info(
233
+ f"Dynamically registered {len(request.tool_module_qualnames)} "
234
+ f"tools for conversation {conversation_id}: "
235
+ f"{list(request.tool_module_qualnames.keys())}"
236
+ )
237
+
238
+ stored = StoredConversation(id=conversation_id, **request.model_dump())
239
+ event_service = await self._start_event_service(stored)
240
+ initial_message = request.initial_message
241
+ if initial_message:
242
+ message = Message(
243
+ role=initial_message.role, content=initial_message.content
244
+ )
245
+ await event_service.send_message(message, True)
246
+
247
+ state = await event_service.get_state()
248
+ conversation_info = _compose_conversation_info(event_service.stored, state)
249
+
250
+ # Notify conversation webhooks about the started conversation
251
+ await self._notify_conversation_webhooks(conversation_info)
252
+
253
+ return conversation_info, True
254
+
255
+ async def pause_conversation(self, conversation_id: UUID) -> bool:
256
+ if self._event_services is None:
257
+ raise ValueError("inactive_service")
258
+ event_service = self._event_services.get(conversation_id)
259
+ if event_service:
260
+ await event_service.pause()
261
+ # Notify conversation webhooks about the paused conversation
262
+ state = await event_service.get_state()
263
+ conversation_info = _compose_conversation_info(event_service.stored, state)
264
+ await self._notify_conversation_webhooks(conversation_info)
265
+ return bool(event_service)
266
+
267
+ async def resume_conversation(self, conversation_id: UUID) -> bool:
268
+ if self._event_services is None:
269
+ raise ValueError("inactive_service")
270
+ event_service = self._event_services.get(conversation_id)
271
+ if event_service:
272
+ await event_service.start()
273
+ return bool(event_service)
274
+
275
+ async def delete_conversation(self, conversation_id: UUID) -> bool:
276
+ if self._event_services is None:
277
+ raise ValueError("inactive_service")
278
+ event_service = self._event_services.pop(conversation_id, None)
279
+ if event_service:
280
+ # Notify conversation webhooks about the stopped conversation before closing
281
+ try:
282
+ state = await event_service.get_state()
283
+ conversation_info = _compose_conversation_info(
284
+ event_service.stored, state
285
+ )
286
+ conversation_info.execution_status = (
287
+ ConversationExecutionStatus.DELETING
288
+ )
289
+ await self._notify_conversation_webhooks(conversation_info)
290
+ except Exception as e:
291
+ logger.warning(
292
+ f"Failed to notify webhooks for conversation {conversation_id}: {e}"
293
+ )
294
+
295
+ # Close the event service
296
+ try:
297
+ await event_service.close()
298
+ except Exception as e:
299
+ logger.warning(
300
+ f"Failed to close event service for conversation "
301
+ f"{conversation_id}: {e}"
302
+ )
303
+
304
+ # Safely remove only the conversation directory (workspace is preserved).
305
+ # This operation may fail due to permission issues, but we don't want that
306
+ # to prevent the conversation from being marked as deleted.
307
+ safe_rmtree(
308
+ event_service.conversation_dir,
309
+ f"conversation directory for {conversation_id}",
310
+ )
311
+
312
+ logger.info(f"Successfully deleted conversation {conversation_id}")
313
+ return True
314
+ return False
315
+
316
+ async def update_conversation(
317
+ self, conversation_id: UUID, request: UpdateConversationRequest
318
+ ) -> bool:
319
+ """Update conversation metadata.
320
+
321
+ Args:
322
+ conversation_id: The ID of the conversation to update
323
+ request: Request object containing fields to update (e.g., title)
324
+
325
+ Returns:
326
+ bool: True if the conversation was updated successfully, False if not found
327
+ """
328
+ if self._event_services is None:
329
+ raise ValueError("inactive_service")
330
+ event_service = self._event_services.get(conversation_id)
331
+ if event_service is None:
332
+ return False
333
+
334
+ # Update the title in stored conversation
335
+ event_service.stored.title = request.title.strip()
336
+ # Save the updated metadata to disk
337
+ await event_service.save_meta()
338
+
339
+ # Notify conversation webhooks about the updated conversation
340
+ state = await event_service.get_state()
341
+ conversation_info = _compose_conversation_info(event_service.stored, state)
342
+ await self._notify_conversation_webhooks(conversation_info)
343
+
344
+ logger.info(
345
+ f"Successfully updated conversation {conversation_id} "
346
+ f"with title: {request.title}"
347
+ )
348
+ return True
349
+
350
+ async def get_event_service(self, conversation_id: UUID) -> EventService | None:
351
+ if self._event_services is None:
352
+ raise ValueError("inactive_service")
353
+ return self._event_services.get(conversation_id)
354
+
355
+ async def generate_conversation_title(
356
+ self, conversation_id: UUID, max_length: int = 50, llm: LLM | None = None
357
+ ) -> str | None:
358
+ """Generate a title for the conversation using LLM."""
359
+ if self._event_services is None:
360
+ raise ValueError("inactive_service")
361
+ event_service = self._event_services.get(conversation_id)
362
+ if event_service is None:
363
+ return None
364
+
365
+ # Delegate to EventService to avoid accessing private conversation internals
366
+ title = await event_service.generate_title(llm=llm, max_length=max_length)
367
+ return title
368
+
369
+ async def ask_agent(self, conversation_id: UUID, question: str) -> str | None:
370
+ """Ask the agent a simple question without affecting conversation state."""
371
+ if self._event_services is None:
372
+ raise ValueError("inactive_service")
373
+ event_service = self._event_services.get(conversation_id)
374
+ if event_service is None:
375
+ return None
376
+
377
+ # Delegate to EventService to avoid accessing private conversation internals
378
+ response = await event_service.ask_agent(question)
379
+ return response
380
+
381
+ async def condense(self, conversation_id: UUID) -> bool:
382
+ """Force condensation of the conversation history."""
383
+ if self._event_services is None:
384
+ raise ValueError("inactive_service")
385
+ event_service = self._event_services.get(conversation_id)
386
+ if event_service is None:
387
+ return False
388
+
389
+ # Delegate to EventService to avoid accessing private conversation internals
390
+ await event_service.condense()
391
+ return True
392
+
393
+ async def __aenter__(self):
394
+ self.conversations_dir.mkdir(parents=True, exist_ok=True)
395
+ self._event_services = {}
396
+ for conversation_dir in self.conversations_dir.iterdir():
397
+ try:
398
+ meta_file = conversation_dir / "meta.json"
399
+ if not meta_file.exists():
400
+ continue
401
+ json_str = meta_file.read_text()
402
+ stored = StoredConversation.model_validate_json(
403
+ json_str,
404
+ context={
405
+ "cipher": self.cipher,
406
+ },
407
+ )
408
+ # Dynamically register tools when resuming persisted conversations
409
+ if stored.tool_module_qualnames:
410
+ for (
411
+ tool_name,
412
+ module_qualname,
413
+ ) in stored.tool_module_qualnames.items():
414
+ try:
415
+ # Import the module to trigger tool auto-registration
416
+ importlib.import_module(module_qualname)
417
+ logger.debug(
418
+ f"Tool '{tool_name}' registered via module "
419
+ f"'{module_qualname}' when resuming conversation "
420
+ f"{stored.id}"
421
+ )
422
+ except ImportError as e:
423
+ logger.warning(
424
+ f"Failed to import module '{module_qualname}' for "
425
+ f"tool '{tool_name}' when resuming conversation "
426
+ f"{stored.id}: {e}. Tool will not be available."
427
+ )
428
+ # Continue even if some tools fail to register
429
+ if stored.tool_module_qualnames:
430
+ logger.info(
431
+ f"Dynamically registered "
432
+ f"{len(stored.tool_module_qualnames)} tools when "
433
+ f"resuming conversation {stored.id}: "
434
+ f"{list(stored.tool_module_qualnames.keys())}"
435
+ )
436
+ await self._start_event_service(stored)
437
+ except Exception:
438
+ logger.exception(
439
+ f"error_loading_event_service:{conversation_dir}", stack_info=True
440
+ )
441
+
442
+ # Initialize conversation webhook subscribers
443
+ self._conversation_webhook_subscribers = [
444
+ ConversationWebhookSubscriber(
445
+ spec=webhook_spec,
446
+ session_api_key=self.session_api_key,
447
+ )
448
+ for webhook_spec in self.webhook_specs
449
+ ]
450
+
451
+ return self
452
+
453
+ async def __aexit__(self, exc_type, exc_value, traceback):
454
+ event_services = self._event_services
455
+ if event_services is None:
456
+ return
457
+ self._event_services = None
458
+ # This stops conversations and saves meta
459
+ await asyncio.gather(
460
+ *[
461
+ event_service.__aexit__(exc_type, exc_value, traceback)
462
+ for event_service in event_services.values()
463
+ ]
464
+ )
465
+
466
+ @classmethod
467
+ def get_instance(cls, config: Config) -> "ConversationService":
468
+ return ConversationService(
469
+ conversations_dir=config.conversations_path,
470
+ webhook_specs=config.webhooks,
471
+ session_api_key=(
472
+ config.session_api_keys[0] if config.session_api_keys else None
473
+ ),
474
+ cipher=config.cipher,
475
+ )
476
+
477
+ async def _start_event_service(self, stored: StoredConversation) -> EventService:
478
+ event_services = self._event_services
479
+ if event_services is None:
480
+ raise ValueError("inactive_service")
481
+
482
+ event_service = EventService(
483
+ stored=stored,
484
+ conversations_dir=self.conversations_dir,
485
+ cipher=self.cipher,
486
+ )
487
+ # Create subscribers...
488
+ await event_service.subscribe_to_events(_EventSubscriber(service=event_service))
489
+ asyncio.gather(
490
+ *[
491
+ event_service.subscribe_to_events(
492
+ WebhookSubscriber(
493
+ conversation_id=stored.id,
494
+ service=event_service,
495
+ spec=webhook_spec,
496
+ session_api_key=self.session_api_key,
497
+ )
498
+ )
499
+ for webhook_spec in self.webhook_specs
500
+ ]
501
+ )
502
+
503
+ try:
504
+ await event_service.start()
505
+ # Save metadata immediately after successful start to ensure persistence
506
+ # even if the system is not shut down gracefully
507
+ await event_service.save_meta()
508
+ except Exception:
509
+ # Clean up the event service if startup fails
510
+ await event_service.close()
511
+ raise
512
+
513
+ event_services[stored.id] = event_service
514
+ return event_service
515
+
516
+
517
+ @dataclass
518
+ class _EventSubscriber(Subscriber):
519
+ service: EventService
520
+
521
+ async def __call__(self, _event: Event):
522
+ self.service.stored.updated_at = utc_now()
523
+ update_last_execution_time()
524
+
525
+
526
+ @dataclass
527
+ class WebhookSubscriber(Subscriber):
528
+ conversation_id: UUID
529
+ service: EventService
530
+ spec: WebhookSpec
531
+ session_api_key: str | None = None
532
+ queue: list[Event] = field(default_factory=list)
533
+ _flush_timer: asyncio.Task | None = field(default=None, init=False)
534
+
535
+ async def __call__(self, event: Event):
536
+ """Add event to queue and post to webhook when buffer size is reached."""
537
+ self.queue.append(event)
538
+
539
+ if len(self.queue) >= self.spec.event_buffer_size:
540
+ # Cancel timer since we're flushing due to buffer size
541
+ self._cancel_flush_timer()
542
+ await self._post_events()
543
+ elif not self._flush_timer:
544
+ self._flush_timer = asyncio.create_task(self._flush_after_delay())
545
+
546
+ async def close(self):
547
+ """Post any remaining items in the queue to the webhook."""
548
+ # Cancel any pending flush timer
549
+ self._cancel_flush_timer()
550
+
551
+ if self.queue:
552
+ await self._post_events()
553
+
554
+ async def _post_events(self):
555
+ """Post queued events to the webhook with retry logic."""
556
+ if not self.queue:
557
+ return
558
+
559
+ events_to_post = self.queue.copy()
560
+ self.queue.clear()
561
+
562
+ # Prepare headers
563
+ headers = self.spec.headers.copy()
564
+ if self.session_api_key:
565
+ headers["X-Session-API-Key"] = self.session_api_key
566
+
567
+ # Convert events to serializable format
568
+ event_data = [
569
+ event.model_dump() if hasattr(event, "model_dump") else event.__dict__
570
+ for event in events_to_post
571
+ ]
572
+
573
+ # Construct events URL
574
+ events_url = (
575
+ f"{self.spec.base_url.rstrip('/')}/events/{self.conversation_id.hex}"
576
+ )
577
+
578
+ # Retry logic
579
+ for attempt in range(self.spec.num_retries + 1):
580
+ try:
581
+ async with httpx.AsyncClient() as client:
582
+ response = await client.request(
583
+ method="POST",
584
+ url=events_url,
585
+ json=event_data,
586
+ headers=headers,
587
+ timeout=30.0,
588
+ )
589
+ response.raise_for_status()
590
+ logger.debug(
591
+ f"Successfully posted {len(event_data)} events "
592
+ f"to webhook {events_url}"
593
+ )
594
+ return
595
+ except Exception as e:
596
+ logger.warning(f"Webhook post attempt {attempt + 1} failed: {e}")
597
+ if attempt < self.spec.num_retries:
598
+ await asyncio.sleep(self.spec.retry_delay)
599
+ else:
600
+ logger.error(
601
+ f"Failed to post events to webhook {events_url} "
602
+ f"after {self.spec.num_retries + 1} attempts"
603
+ )
604
+ # Re-queue events for potential retry later
605
+ self.queue.extend(events_to_post)
606
+
607
+ def _cancel_flush_timer(self):
608
+ """Cancel the current flush timer if it exists."""
609
+ if self._flush_timer and not self._flush_timer.done():
610
+ self._flush_timer.cancel()
611
+ self._flush_timer = None
612
+
613
+ async def _flush_after_delay(self):
614
+ """Wait for flush_delay seconds then flush events if any exist."""
615
+ try:
616
+ await asyncio.sleep(self.spec.flush_delay)
617
+ # Only flush if there are events in the queue
618
+ if self.queue:
619
+ await self._post_events()
620
+ except asyncio.CancelledError:
621
+ # Timer was cancelled, which is expected behavior
622
+ pass
623
+ finally:
624
+ self._flush_timer = None
625
+
626
+
627
+ @dataclass
628
+ class ConversationWebhookSubscriber:
629
+ """Webhook subscriber for conversation lifecycle events (start, pause, stop)."""
630
+
631
+ spec: WebhookSpec
632
+ session_api_key: str | None = None
633
+
634
+ async def post_conversation_info(self, conversation_info: ConversationInfo):
635
+ """Post conversation info to the webhook immediately (no batching)."""
636
+ # Prepare headers
637
+ headers = self.spec.headers.copy()
638
+ if self.session_api_key:
639
+ headers["X-Session-API-Key"] = self.session_api_key
640
+
641
+ # Construct conversations URL
642
+ conversations_url = f"{self.spec.base_url.rstrip('/')}/conversations"
643
+
644
+ # Convert conversation info to serializable format
645
+ conversation_data = conversation_info.model_dump(mode="json")
646
+
647
+ # Retry logic
648
+ for attempt in range(self.spec.num_retries + 1):
649
+ try:
650
+ async with httpx.AsyncClient() as client:
651
+ response = await client.request(
652
+ method="POST",
653
+ url=conversations_url,
654
+ json=conversation_data,
655
+ headers=headers,
656
+ timeout=30.0,
657
+ )
658
+ response.raise_for_status()
659
+ logger.debug(
660
+ f"Successfully posted conversation info "
661
+ f"to webhook {conversations_url}"
662
+ )
663
+ return
664
+ except Exception as e:
665
+ logger.warning(
666
+ f"Conversation webhook post attempt {attempt + 1} failed: {e}"
667
+ )
668
+ if attempt < self.spec.num_retries:
669
+ await asyncio.sleep(self.spec.retry_delay)
670
+ else:
671
+ logger.error(
672
+ f"Failed to post conversation info to webhook "
673
+ f"{conversations_url} after {self.spec.num_retries + 1} "
674
+ "attempts"
675
+ )
676
+
677
+
678
+ _conversation_service: ConversationService | None = None
679
+
680
+
681
+ def get_default_conversation_service() -> ConversationService:
682
+ global _conversation_service
683
+ if _conversation_service:
684
+ return _conversation_service
685
+
686
+ from openhands.agent_server.config import (
687
+ get_default_config,
688
+ )
689
+
690
+ config = get_default_config()
691
+ _conversation_service = ConversationService.get_instance(config)
692
+ return _conversation_service