gohumanloop 0.0.1__py3-none-any.whl → 0.0.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,819 @@
1
+ from typing import Dict, Any, Optional, Callable, Awaitable, TypeVar, Union, List
2
+ from functools import wraps
3
+ import asyncio
4
+ import uuid
5
+ from inspect import iscoroutinefunction
6
+ from contextlib import asynccontextmanager, contextmanager
7
+
8
+ from gohumanloop.utils import run_async_safely
9
+ from gohumanloop.core.interface import (
10
+ HumanLoopManager, HumanLoopResult, HumanLoopStatus, HumanLoopType, HumanLoopCallback, HumanLoopProvider
11
+ )
12
+
13
+ # Define TypeVars for input and output types
14
+ T = TypeVar("T")
15
+ R = TypeVar('R')
16
+
17
+ # Check LangGraph version
18
+ def _check_langgraph_version():
19
+ """Check LangGraph version to determine if interrupt feature is supported"""
20
+ try:
21
+ import importlib.metadata
22
+ version = importlib.metadata.version("langgraph")
23
+ version_parts = version.split('.')
24
+ major, minor, patch = int(version_parts[0]), int(version_parts[1]), int(version_parts[2])
25
+
26
+ # Interrupt support starts from version 0.2.57
27
+ return (major > 0 or (major == 0 and (minor > 2 or (minor == 2 and patch >= 57))))
28
+ except (importlib.metadata.PackageNotFoundError, ValueError, IndexError):
29
+ # If version cannot be determined, assume no support
30
+ return False
31
+
32
+ # Import corresponding features based on version
33
+ _SUPPORTS_INTERRUPT = _check_langgraph_version()
34
+ if _SUPPORTS_INTERRUPT:
35
+ try:
36
+ from langgraph.types import interrupt as _lg_interrupt
37
+ from langgraph.types import Command as _lg_Command
38
+ except ImportError:
39
+ _SUPPORTS_INTERRUPT = False
40
+
41
+ class HumanLoopWrapper:
42
+ def __init__(
43
+ self,
44
+ decorator: Callable[[Any], Callable],
45
+ ) -> None:
46
+ self.decorator = decorator
47
+
48
+ def wrap(self, fn: Callable) -> Callable:
49
+ return self.decorator(fn)
50
+
51
+ def __call__(self, fn: Callable) -> Callable:
52
+ return self.decorator(fn)
53
+
54
+ class LangGraphAdapter:
55
+ """LangGraph adapter for simplifying human-in-the-loop integration
56
+
57
+ Provides decorators for three scenarios:
58
+ - require_approval: Requires human approval
59
+ - require_info: Requires human input information
60
+ - require_conversation: Requires multi-turn conversation
61
+ """
62
+
63
+ def __init__(
64
+ self,
65
+ manager: HumanLoopManager,
66
+ default_timeout: Optional[int] = None
67
+ ):
68
+ self.manager = manager
69
+ self.default_timeout = default_timeout
70
+
71
+ async def __aenter__(self):
72
+ """Implements async context manager protocol, automatically manages manager lifecycle"""
73
+ if hasattr(self.manager, '__aenter__'):
74
+ await self.manager.__aenter__()
75
+ return self
76
+
77
+ async def __aexit__(self, exc_type, exc_val, exc_tb):
78
+ """Implements async context manager protocol, automatically manages manager lifecycle"""
79
+ if hasattr(self.manager, '__aexit__'):
80
+ await self.manager.__aexit__(exc_type, exc_val, exc_tb)
81
+
82
+ def __enter__(self):
83
+ """Implements sync context manager protocol, automatically manages manager lifecycle"""
84
+ if hasattr(self.manager, '__enter__'):
85
+ self.manager.__enter__()
86
+ return self
87
+
88
+ def __exit__(self, exc_type, exc_val, exc_tb):
89
+ """Implements sync context manager protocol, automatically manages manager lifecycle"""
90
+ if hasattr(self.manager, '__exit__'):
91
+ self.manager.__exit__(exc_type, exc_val, exc_tb)
92
+
93
+ @asynccontextmanager
94
+ async def asession(self):
95
+ """Provides async context manager for managing session lifecycle
96
+
97
+ Example:
98
+ async with adapter.session():
99
+ # Use adapter here
100
+ """
101
+ try:
102
+ if hasattr(self.manager, '__aenter__'):
103
+ await self.manager.__aenter__()
104
+ yield self
105
+ finally:
106
+ if hasattr(self.manager, '__aexit__'):
107
+ await self.manager.__aexit__(None, None, None)
108
+
109
+ @contextmanager
110
+ def session(self):
111
+ """Provides a synchronous context manager for managing session lifecycle
112
+
113
+ Example:
114
+ with adapter.sync_session():
115
+ # Use adapter here
116
+ """
117
+ try:
118
+ if hasattr(self.manager, '__enter__'):
119
+ self.manager.__enter__()
120
+ yield self
121
+ finally:
122
+ if hasattr(self.manager, '__exit__'):
123
+ self.manager.__exit__(None, None, None)
124
+
125
+ def require_approval(
126
+ self,
127
+ task_id: Optional[str] = None,
128
+ conversation_id: Optional[str] = None,
129
+ ret_key: str = "approval_result",
130
+ additional: Optional[str] = "",
131
+ metadata: Optional[Dict[str, Any]] = None,
132
+ provider_id: Optional[str] = None,
133
+ timeout: Optional[int] = None,
134
+ execute_on_reject: bool = False,
135
+ callback: Optional[Union[HumanLoopCallback, Callable[[Any], HumanLoopCallback]]] = None,
136
+ ) -> HumanLoopWrapper:
137
+ """Decorator for approval scenario"""
138
+ if task_id is None:
139
+ task_id = str(uuid.uuid4())
140
+ if conversation_id is None:
141
+ conversation_id = str(uuid.uuid4())
142
+
143
+ def decorator(fn):
144
+ return self._approve_cli(fn, task_id, conversation_id, ret_key, additional, metadata, provider_id, timeout, execute_on_reject, callback)
145
+ return HumanLoopWrapper(decorator)
146
+
147
+ def _approve_cli(
148
+ self,
149
+ fn: Callable[[T], R],
150
+ task_id: str,
151
+ conversation_id: str,
152
+ ret_key: str = "approval_result",
153
+ additional: Optional[str] = "",
154
+ metadata: Optional[Dict[str, Any]] = None,
155
+ provider_id: Optional[str] = None,
156
+ timeout: Optional[int] = None,
157
+ execute_on_reject: bool = False,
158
+ callback: Optional[Union[HumanLoopCallback, Callable[[Any], HumanLoopCallback]]] = None,
159
+ ) -> Callable[[T], R | None]:
160
+ """
161
+ Converts function type from Callable[[T], R] to Callable[[T], R | None]
162
+
163
+ Passes approval results through keyword arguments while maintaining original function signature
164
+
165
+ Benefits of this approach:
166
+ 1. Maintains original function return type, keeping compatibility with LangGraph workflow
167
+ 2. Decorated function can optionally use approval result information
168
+ 3. Can pass richer approval context information
169
+
170
+ Parameters:
171
+ - fn: Target function to be decorated
172
+ - task_id: Unique task identifier for tracking approval requests
173
+ - conversation_id: Unique conversation identifier for tracking approval sessions
174
+ - ret_key: Parameter name used to inject approval results into function kwargs
175
+ - additional: Additional context information to show to approvers
176
+ - metadata: Optional metadata dictionary passed with request
177
+ - provider_id: Optional provider identifier to route requests
178
+ - timeout: Timeout in seconds for approval response
179
+ - execute_on_reject: Whether to execute function on rejection
180
+ - callback: Optional callback object or factory function for approval events
181
+
182
+ Returns:
183
+ - Decorated function maintaining original signature
184
+ - Raises ValueError if approval fails or is rejected
185
+
186
+ Notes:
187
+ - Decorated function must accept ret_key parameter to receive approval results
188
+ - If approval is rejected, execution depends on execute_on_reject parameter
189
+ - Approval results contain complete context including:
190
+ - conversation_id: Unique conversation identifier
191
+ - request_id: Unique request identifier
192
+ - loop_type: Type of human loop (APPROVAL)
193
+ - status: Current approval status
194
+ - response: Approver's response
195
+ - feedback: Optional approver feedback
196
+ - responded_by: Approver identity
197
+ - responded_at: Response timestamp
198
+ - error: Error information if any
199
+ """
200
+
201
+ @wraps(fn)
202
+ async def async_wrapper(*args, **kwargs) -> R | None:
203
+ # Determine if callback is instance or factory function
204
+ cb = None
205
+ if callable(callback) and not isinstance(callback, HumanLoopCallback):
206
+ # Factory function, pass state
207
+ state = args[0] if args else None
208
+ cb = callback(state)
209
+ else:
210
+ cb = callback
211
+
212
+ result = await self.manager.request_humanloop(
213
+ task_id=task_id,
214
+ conversation_id=conversation_id,
215
+ loop_type=HumanLoopType.APPROVAL,
216
+ context={
217
+ "message": {
218
+ "function_name": fn.__name__,
219
+ "function_signature": str(fn.__code__.co_varnames),
220
+ "arguments": str(args),
221
+ "keyword_arguments": str(kwargs),
222
+ "documentation": fn.__doc__ or "No documentation available"
223
+ },
224
+ "question": "Please review and approve/reject this human loop execution.",
225
+ "additional": additional
226
+ },
227
+ callback=cb,
228
+ metadata=metadata,
229
+ provider_id=provider_id,
230
+ timeout=timeout or self.default_timeout,
231
+ blocking=True
232
+ )
233
+
234
+ # Initialize approval result object as None
235
+ approval_info = None
236
+
237
+ if isinstance(result, HumanLoopResult):
238
+ # If result is HumanLoopResult type, build complete approval info
239
+ approval_info = {
240
+ 'conversation_id': result.conversation_id,
241
+ 'request_id': result.request_id,
242
+ 'loop_type': result.loop_type,
243
+ 'status': result.status,
244
+ 'response': result.response,
245
+ 'feedback': result.feedback,
246
+ 'responded_by': result.responded_by,
247
+ 'responded_at': result.responded_at,
248
+ 'error': result.error
249
+ }
250
+
251
+ kwargs[ret_key] = approval_info
252
+ # Check approval result
253
+ if isinstance(result, HumanLoopResult):
254
+ # Handle based on approval status
255
+ if result.status == HumanLoopStatus.APPROVED:
256
+ if iscoroutinefunction(fn):
257
+ return await fn(*args, **kwargs)
258
+ return fn(*args, **kwargs)
259
+ elif result.status == HumanLoopStatus.REJECTED:
260
+ # If execute on reject is set, run the function
261
+ if execute_on_reject:
262
+ if iscoroutinefunction(fn):
263
+ return await fn(*args, **kwargs)
264
+ return fn(*args, **kwargs)
265
+ # Otherwise return rejection info
266
+ reason = result.response
267
+ raise ValueError(f"Function {fn.__name__} execution not approved: {reason}")
268
+ else:
269
+ raise ValueError(f"Approval error for {fn.__name__}: approval status: {result.status} and {result.error}")
270
+ else:
271
+ raise ValueError(f"Unknown approval error: {fn.__name__}")
272
+
273
+ @wraps(fn)
274
+ def sync_wrapper(*args, **kwargs) -> R | None:
275
+ return run_async_safely(async_wrapper(*args, **kwargs))
276
+
277
+ # Return corresponding wrapper based on decorated function type
278
+ if iscoroutinefunction(fn):
279
+ return async_wrapper # type: ignore
280
+ return sync_wrapper
281
+
282
+ def require_conversation(
283
+ self,
284
+ task_id: Optional[str] = None,
285
+ conversation_id: Optional[str] = None,
286
+ state_key: str = "conv_info",
287
+ ret_key: str = "conv_result",
288
+ additional: Optional[str] = "",
289
+ provider_id: Optional[str] = None,
290
+ metadata: Optional[Dict[str, Any]] = None,
291
+ timeout: Optional[int] = None,
292
+ callback: Optional[Union[HumanLoopCallback, Callable[[Any], HumanLoopCallback]]] = None,
293
+ ) -> HumanLoopWrapper:
294
+ """Decorator for multi-turn conversation scenario"""
295
+
296
+ if task_id is None:
297
+ task_id = str(uuid.uuid4())
298
+ if conversation_id is None:
299
+ conversation_id = str(uuid.uuid4())
300
+
301
+ def decorator(fn):
302
+ return self._conversation_cli(fn, task_id, conversation_id, state_key, ret_key, additional, provider_id, metadata, timeout, callback)
303
+ return HumanLoopWrapper(decorator)
304
+
305
+ def _conversation_cli(
306
+ self,
307
+ fn: Callable[[T], R],
308
+ task_id: str,
309
+ conversation_id: str,
310
+ state_key: str = "conv_info",
311
+ ret_key: str = "conv_result",
312
+ additional: Optional[str] = "",
313
+ metadata: Optional[Dict[str, Any]] = None,
314
+ provider_id: Optional[str] = None,
315
+ timeout: Optional[int] = None,
316
+ callback: Optional[Union[HumanLoopCallback, Callable[[Any], HumanLoopCallback]]] = None,
317
+ ) -> Callable[[T], R | None]:
318
+ """Internal decorator implementation for multi-turn conversation scenario
319
+
320
+ Converts function type from Callable[[T], R] to Callable[[T], R | None]
321
+
322
+ Main features:
323
+ 1. Conduct multi-turn conversations through human-machine interaction
324
+ 2. Inject conversation results into function parameters via ret_key
325
+ 3. Support both synchronous and asynchronous function calls
326
+
327
+ Parameters:
328
+ - fn: Target function to be decorated
329
+ - task_id: Unique task identifier for tracking human interaction requests
330
+ - conversation_id: Unique conversation identifier for tracking interaction sessions
331
+ - state_key: Key name used to get conversation input info from state
332
+ - ret_key: Parameter name used to inject human interaction results into function kwargs
333
+ - additional: Additional context information to show to users
334
+ - metadata: Optional metadata dictionary passed along with request
335
+ - provider_id: Optional provider identifier to route requests to specific provider
336
+ - timeout: Timeout in seconds for human response, defaults to adapter's default_timeout
337
+ - callback: Optional callback object or factory function for handling human interaction events
338
+
339
+ Returns:
340
+ - Decorated function maintaining original signature
341
+ - Raises ValueError if human interaction fails
342
+
343
+ Notes:
344
+ - Decorated function must accept ret_key parameter to receive interaction results
345
+ - Interaction results contain complete context information including:
346
+ - conversation_id: Unique conversation identifier
347
+ - request_id: Unique request identifier
348
+ - loop_type: Human interaction type (CONVERSATION)
349
+ - status: Current request status
350
+ - response: Human provided response
351
+ - feedback: Optional human feedback
352
+ - responded_by: Responder identity
353
+ - responded_at: Response timestamp
354
+ - error: Error information if any
355
+ - Automatically adapts to async and sync functions
356
+ """
357
+
358
+ @wraps(fn)
359
+ async def async_wrapper(*args, **kwargs) -> R | None:
360
+ # Determine if callback is instance or factory function
361
+ cb = None
362
+ state = args[0] if args else None
363
+ if callable(callback) and not isinstance(callback, HumanLoopCallback):
364
+ cb = callback(state)
365
+ else:
366
+ cb = callback
367
+
368
+ node_input = None
369
+ if state:
370
+ # Get input information from key fields in State
371
+ node_input = state.get(state_key, {})
372
+
373
+ # Compose question content
374
+ question_content = f"Please respond to the following information:\n{node_input}"
375
+
376
+ # Check if conversation exists to determine whether to use request_humanloop or continue_humanloop
377
+ conversation_requests = await self.manager.check_conversation_exist(task_id, conversation_id)
378
+
379
+ result = None
380
+ if conversation_requests:
381
+ # Existing conversation, use continue_humanloop
382
+ result = await self.manager.continue_humanloop(
383
+ conversation_id=conversation_id,
384
+ context={
385
+ "message": {
386
+ "function_name": fn.__name__,
387
+ "function_signature": str(fn.__code__.co_varnames),
388
+ "arguments": str(args),
389
+ "keyword_arguments": str(kwargs),
390
+ "documentation": fn.__doc__ or "No documentation available"
391
+ },
392
+ "question": question_content,
393
+ "additional": additional
394
+ },
395
+ timeout=timeout or self.default_timeout,
396
+ callback=cb,
397
+ metadata=metadata,
398
+ provider_id=provider_id,
399
+ blocking=True
400
+ )
401
+ else:
402
+ # New conversation, use request_humanloop
403
+ result = await self.manager.request_humanloop(
404
+ task_id=task_id,
405
+ conversation_id=conversation_id,
406
+ loop_type=HumanLoopType.CONVERSATION,
407
+ context={
408
+ "message": {
409
+ "function_name": fn.__name__,
410
+ "function_signature": str(fn.__code__.co_varnames),
411
+ "arguments": str(args),
412
+ "keyword_arguments": str(kwargs),
413
+ "documentation": fn.__doc__ or "No documentation available"
414
+ },
415
+ "question": question_content,
416
+ "additional": additional
417
+ },
418
+ timeout=timeout or self.default_timeout,
419
+ callback=cb,
420
+ metadata=metadata,
421
+ provider_id=provider_id,
422
+ blocking=True
423
+ )
424
+
425
+ # Initialize conversation result object as None
426
+ conversation_info = None
427
+
428
+ if isinstance(result, HumanLoopResult):
429
+ conversation_info = {
430
+ 'conversation_id': result.conversation_id,
431
+ 'request_id': result.request_id,
432
+ 'loop_type': result.loop_type,
433
+ 'status': result.status,
434
+ 'response': result.response,
435
+ 'feedback': result.feedback,
436
+ 'responded_by': result.responded_by,
437
+ 'responded_at': result.responded_at,
438
+ 'error': result.error
439
+ }
440
+
441
+ kwargs[ret_key] = conversation_info
442
+
443
+ if isinstance(result, HumanLoopResult):
444
+ if iscoroutinefunction(fn):
445
+ return await fn(*args, **kwargs)
446
+ return fn(*args, **kwargs)
447
+ else:
448
+ raise ValueError(f"Conversation request timeout or error for {fn.__name__}")
449
+
450
+ @wraps(fn)
451
+ def sync_wrapper(*args, **kwargs) -> R | None:
452
+ return run_async_safely(async_wrapper(*args, **kwargs))
453
+
454
+ if iscoroutinefunction(fn):
455
+ return async_wrapper # type: ignore
456
+ return sync_wrapper
457
+
458
+ def require_info(
459
+ self,
460
+ task_id: Optional[str] = None,
461
+ conversation_id: Optional[str] = None,
462
+ ret_key: str = "info_result",
463
+ additional: Optional[str] = "",
464
+ metadata: Optional[Dict[str, Any]] = None,
465
+ provider_id: Optional[str] = None,
466
+ timeout: Optional[int] = None,
467
+ callback: Optional[Union[HumanLoopCallback, Callable[[Any], HumanLoopCallback]]] = None,
468
+ ) -> HumanLoopWrapper:
469
+ """Decorator for information gathering scenario"""
470
+
471
+ if task_id is None:
472
+ task_id = str(uuid.uuid4())
473
+ if conversation_id is None:
474
+ conversation_id = str(uuid.uuid4())
475
+
476
+ def decorator(fn):
477
+ return self._get_info_cli(fn, task_id, conversation_id, ret_key, additional, metadata, provider_id, timeout, callback)
478
+ return HumanLoopWrapper(decorator)
479
+
480
+ def _get_info_cli(
481
+ self,
482
+ fn: Callable[[T], R],
483
+ task_id: str,
484
+ conversation_id: str,
485
+ ret_key: str = "info_result",
486
+ additional: Optional[str] = "",
487
+ metadata: Optional[Dict[str, Any]] = None,
488
+ provider_id: Optional[str] = None,
489
+ timeout: Optional[int] = None,
490
+ callback: Optional[Union[HumanLoopCallback, Callable[[Any], HumanLoopCallback]]] = None,
491
+ ) -> Callable[[T], R | None]:
492
+ """Internal decorator implementation for information gathering scenario
493
+ Converts function type from Callable[[T], R] to Callable[[T], R | None]
494
+
495
+ Main features:
496
+ 1. Get required information through human-machine interaction
497
+ 2. Inject obtained information into function parameters via ret_key
498
+ 3. Support both synchronous and asynchronous function calls
499
+
500
+ Parameters:
501
+ - fn: Target function to be decorated
502
+ - task_id: Unique task identifier for tracking the human loop request
503
+ - conversation_id: Unique conversation identifier for tracking the interaction session
504
+ - ret_key: Parameter name used to inject the human loop result into function kwargs
505
+ - additional: Additional context information to be shown to human user
506
+ - metadata: Optional metadata dictionary to be passed with the request
507
+ - provider_id: Optional provider identifier to route request to specific provider
508
+ - timeout: Timeout in seconds for human response, defaults to adapter's default_timeout
509
+ - callback: Optional callback object or factory function for handling human loop events
510
+
511
+ Returns:
512
+ - Decorated function maintaining original signature
513
+ - Raises ValueError if human interaction fails
514
+
515
+ Notes:
516
+ - Decorated function must accept ret_key parameter to receive interaction results
517
+ - Interaction results contain complete context information including:
518
+ - conversation_id: Unique conversation identifier
519
+ - request_id: Unique request identifier
520
+ - loop_type: Type of human loop (INFORMATION)
521
+ - status: Current status of the request
522
+ - response: Human provided response
523
+ - feedback: Optional feedback from human
524
+ - responded_by: Identity of responder
525
+ - responded_at: Response timestamp
526
+ - error: Error information if any
527
+ - Automatically adapts to async and sync functions
528
+ """
529
+
530
+ @wraps(fn)
531
+ async def async_wrapper(*args, **kwargs) -> R | None:
532
+
533
+ # Determine if callback is an instance or factory function
534
+ # callback: can be HumanLoopCallback instance or factory function
535
+ # - If factory function: accepts state parameter and returns HumanLoopCallback instance
536
+ # - If HumanLoopCallback instance: use directly
537
+ cb = None
538
+ if callable(callback) and not isinstance(callback, HumanLoopCallback):
539
+ # Factory function mode: get state from args and create callback instance
540
+ # state is typically the first argument, None if args is empty
541
+ state = args[0] if args else None
542
+ cb = callback(state)
543
+ else:
544
+ cb = callback
545
+
546
+ result = await self.manager.request_humanloop(
547
+ task_id=task_id,
548
+ conversation_id=conversation_id,
549
+ loop_type=HumanLoopType.INFORMATION,
550
+ context={
551
+ "message": {
552
+ "function_name": fn.__name__,
553
+ "function_signature": str(fn.__code__.co_varnames),
554
+ "arguments": str(args),
555
+ "keyword_arguments": str(kwargs),
556
+ "documentation": fn.__doc__ or "No documentation available"
557
+ },
558
+ "question": "Please provide the required information for the human loop",
559
+ "additional": additional
560
+ },
561
+ timeout=timeout or self.default_timeout,
562
+ callback=cb,
563
+ metadata=metadata,
564
+ provider_id=provider_id,
565
+ blocking=True
566
+ )
567
+
568
+ # 初始化审批结果对象为None
569
+ resp_info = None
570
+
571
+ if isinstance(result, HumanLoopResult):
572
+ # 如果结果是HumanLoopResult类型,则构建完整的审批信息
573
+ resp_info = {
574
+ 'conversation_id': result.conversation_id,
575
+ 'request_id': result.request_id,
576
+ 'loop_type': result.loop_type,
577
+ 'status': result.status,
578
+ 'response': result.response,
579
+ 'feedback': result.feedback,
580
+ 'responded_by': result.responded_by,
581
+ 'responded_at': result.responded_at,
582
+ 'error': result.error
583
+ }
584
+
585
+ kwargs[ret_key] = resp_info
586
+
587
+ # 检查结果是否有效
588
+ if isinstance(result, HumanLoopResult):
589
+ # 返回获取信息结果,由用户去判断是否使用
590
+ if iscoroutinefunction(fn):
591
+ return await fn(*args, **kwargs)
592
+ return fn(*args, **kwargs)
593
+ else:
594
+ raise ValueError(f"Info request timeout or error for {fn.__name__}")
595
+
596
+ @wraps(fn)
597
+ def sync_wrapper(*args, **kwargs) -> R | None:
598
+ return run_async_safely(async_wrapper(*args, **kwargs))
599
+
600
+ # 根据被装饰函数类型返回对应的wrapper
601
+ if iscoroutinefunction(fn):
602
+ return async_wrapper # type: ignore
603
+ return sync_wrapper
604
+
605
+ class LangGraphHumanLoopCallback(HumanLoopCallback):
606
+ """LangGraph-specific human loop callback, compatible with TypedDict or Pydantic BaseModel State"""
607
+
608
+ def __init__(
609
+ self,
610
+ state: Any,
611
+ on_update: Optional[Callable[[Any, HumanLoopProvider, HumanLoopResult], Awaitable[None]]] = None,
612
+ on_timeout: Optional[Callable[[Any, HumanLoopProvider], Awaitable[None]]] = None,
613
+ on_error: Optional[Callable[[Any, HumanLoopProvider, Exception], Awaitable[None]]] = None
614
+ ):
615
+ self.state = state
616
+ self.on_update = on_update
617
+ self.on_timeout = on_timeout
618
+ self.on_error = on_error
619
+
620
+ async def on_humanloop_update(
621
+ self,
622
+ provider: HumanLoopProvider,
623
+ result: HumanLoopResult
624
+ ):
625
+ if self.on_update:
626
+ await self.on_update(self.state, provider, result)
627
+
628
+ async def on_humanloop_timeout(
629
+ self,
630
+ provider: HumanLoopProvider,
631
+ ):
632
+ if self.on_timeout:
633
+ await self.on_timeout(self.state, provider)
634
+
635
+ async def on_humanloop_error(
636
+ self,
637
+ provider: HumanLoopProvider,
638
+ error: Exception
639
+ ):
640
+ if self.on_error:
641
+ await self.on_error(self.state, provider, error)
642
+
643
+
644
+ def default_langgraph_callback_factory(state: Any) -> LangGraphHumanLoopCallback:
645
+ """Default human-loop callback factory for LangGraph framework
646
+
647
+ This callback focuses on:
648
+ 1. Logging human interaction events
649
+ 2. Providing debug information
650
+ 3. Collecting performance metrics
651
+
652
+ Note: This callback does not modify state to maintain clear state management
653
+
654
+ Args:
655
+ state: LangGraph state object, only used for log correlation
656
+
657
+ Returns:
658
+ Configured LangGraphHumanLoopCallback instance
659
+ """
660
+ import logging
661
+
662
+ logger = logging.getLogger("gohumanloop.langgraph")
663
+
664
+ async def on_update(state, provider: HumanLoopProvider, result: HumanLoopResult):
665
+ """Log human interaction update events"""
666
+ logger.info(f"Provider ID: {provider.name}")
667
+ logger.info(
668
+ f"Human interaction update "
669
+ f"status={result.status}, "
670
+ f"response={result.response}, "
671
+ f"responded_by={result.responded_by}, "
672
+ f"responded_at={result.responded_at}, "
673
+ f"feedback={result.feedback}"
674
+ )
675
+
676
+
677
+ async def on_timeout(state, provider: HumanLoopProvider):
678
+ """Log human interaction timeout events"""
679
+
680
+ logger.info(f"Provider ID: {provider.name}")
681
+ from datetime import datetime
682
+ current_time = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
683
+ logger.warning(f"Human interaction timeout - Time: {current_time}")
684
+
685
+
686
+ # Alert logic can be added here, such as sending notifications
687
+
688
+ async def on_error(state, provider: HumanLoopProvider, error: Exception):
689
+ """Log human interaction error events"""
690
+
691
+ logger.info(f"Provider ID: {provider.name}")
692
+ from datetime import datetime
693
+ current_time = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
694
+ logger.error(f"Human interaction error - Time: {current_time} Error: {error}")
695
+
696
+ return LangGraphHumanLoopCallback(
697
+ state=state,
698
+ on_update=on_update,
699
+ on_timeout=on_timeout,
700
+ on_error=on_error
701
+ )
702
+
703
+ from gohumanloop.core.manager import DefaultHumanLoopManager
704
+ from gohumanloop.providers.terminal_provider import TerminalProvider
705
+
706
+ # Create HumanLoopManager instance
707
+ manager = DefaultHumanLoopManager(initial_providers=TerminalProvider(name="LGDefaultProvider"))
708
+
709
+ # Create LangGraphAdapter instance
710
+ default_adapter = LangGraphAdapter(manager, default_timeout=60)
711
+
712
+ default_conversation_id = str(uuid.uuid4())
713
+
714
+ def interrupt(value: Any, lg_humanloop: LangGraphAdapter = default_adapter) -> Any:
715
+ """
716
+ Wraps LangGraph's interrupt functionality to pause graph execution and wait for human input
717
+
718
+ Raises RuntimeError if LangGraph version doesn't support interrupt
719
+
720
+ Args:
721
+ value: Any JSON-serializable value that will be shown to human user
722
+ lg_humanloop: LangGraphAdapter instance, defaults to global instance
723
+
724
+ Returns:
725
+ Input value provided by human user
726
+ """
727
+ if not _SUPPORTS_INTERRUPT:
728
+ raise RuntimeError(
729
+ "LangGraph version too low, interrupt not supported. Please upgrade to version 0.2.57 or higher."
730
+ "You can use: pip install --upgrade langgraph>=0.2.57"
731
+ )
732
+
733
+ # Get current event loop or create new one
734
+ try:
735
+ loop = asyncio.get_event_loop()
736
+ except RuntimeError:
737
+ # If no event loop exists, create a new one
738
+ loop = asyncio.new_event_loop()
739
+ asyncio.set_event_loop(loop)
740
+
741
+ loop.create_task(lg_humanloop.manager.request_humanloop(
742
+ task_id="lg_interrupt",
743
+ conversation_id=default_conversation_id,
744
+ loop_type=HumanLoopType.INFORMATION,
745
+ context={
746
+ "message": f"{value}",
747
+ "question": "The execution has been interrupted. Please review the above information and provide your input to continue.",
748
+ },
749
+ blocking=False,
750
+ ))
751
+
752
+ # Return LangGraph's interrupt
753
+ return _lg_interrupt(value)
754
+ def create_resume_command(lg_humanloop: LangGraphAdapter = default_adapter) -> Any:
755
+ """
756
+ Create a Command object to resume interrupted graph execution
757
+
758
+ Will raise RuntimeError if LangGraph version doesn't support Command
759
+
760
+ Args:
761
+ lg_humanloop: LangGraphAdapter instance, defaults to global instance
762
+
763
+ Returns:
764
+ Command object that can be used with graph.stream method
765
+ """
766
+ if not _SUPPORTS_INTERRUPT:
767
+ raise RuntimeError(
768
+ "LangGraph version too low, Command feature not supported. Please upgrade to 0.2.57 or higher."
769
+ "You can use: pip install --upgrade langgraph>=0.2.57"
770
+ )
771
+
772
+ # Define async polling function
773
+ async def poll_for_result():
774
+ poll_interval = 1.0 # Polling interval (seconds)
775
+ while True:
776
+ result = await lg_humanloop.manager.check_conversation_status(default_conversation_id)
777
+ # If status is final state (not PENDING), return result
778
+ if result.status != HumanLoopStatus.PENDING:
779
+ return result.response
780
+ # Wait before polling again
781
+ await asyncio.sleep(poll_interval)
782
+
783
+ # Wait for async result synchronously
784
+ response = run_async_safely(poll_for_result())
785
+ return _lg_Command(resume=response)
786
+
787
+ async def acreate_resume_command(lg_humanloop: LangGraphAdapter = default_adapter) -> Any:
788
+ """
789
+ Create an async version of Command object to resume interrupted graph execution
790
+
791
+ Will raise RuntimeError if LangGraph version doesn't support Command
792
+
793
+ Args:
794
+ lg_humanloop: LangGraphAdapter instance, defaults to global instance
795
+
796
+ Returns:
797
+ Command object that can be used with graph.astream method
798
+ """
799
+ if not _SUPPORTS_INTERRUPT:
800
+ raise RuntimeError(
801
+ "LangGraph version too low, Command feature not supported. Please upgrade to 0.2.57 or higher."
802
+ "You can use: pip install --upgrade langgraph>=0.2.57"
803
+ )
804
+
805
+ # Define async polling function
806
+ async def poll_for_result():
807
+ poll_interval = 1.0 # Polling interval (seconds)
808
+ while True:
809
+ result = await lg_humanloop.manager.check_conversation_status(default_conversation_id)
810
+ # If status is final state (not PENDING), return result
811
+ if result.status != HumanLoopStatus.PENDING:
812
+ return result.response
813
+ # Wait before polling again
814
+ await asyncio.sleep(poll_interval)
815
+
816
+ # Wait for async result directly
817
+ response = await poll_for_result()
818
+ return _lg_Command(resume=response)
819
+