gohumanloop 0.0.1__py3-none-any.whl → 0.0.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,817 @@
1
+ from typing import Dict, Any, Optional, Callable, Awaitable, TypeVar, Union, List
2
+ from functools import wraps
3
+ import asyncio
4
+ import uuid
5
+ import time
6
+ from inspect import iscoroutinefunction
7
+ from contextlib import asynccontextmanager, contextmanager
8
+
9
+ from gohumanloop.utils import run_async_safely
10
+ from gohumanloop.core.interface import (
11
+ HumanLoopManager, HumanLoopResult, HumanLoopStatus, HumanLoopType, HumanLoopCallback, HumanLoopProvider
12
+ )
13
+
14
+ # Define TypeVars for input and output types
15
+ T = TypeVar("T")
16
+ R = TypeVar('R')
17
+
18
+ # Check LangGraph version
19
+ def _check_langgraph_version():
20
+ """Check LangGraph version to determine if interrupt feature is supported"""
21
+ try:
22
+ import importlib.metadata
23
+ version = importlib.metadata.version("langgraph")
24
+ version_parts = version.split('.')
25
+ major, minor, patch = int(version_parts[0]), int(version_parts[1]), int(version_parts[2])
26
+
27
+ # Interrupt support starts from version 0.2.57
28
+ return (major > 0 or (major == 0 and (minor > 2 or (minor == 2 and patch >= 57))))
29
+ except (importlib.metadata.PackageNotFoundError, ValueError, IndexError):
30
+ # If version cannot be determined, assume no support
31
+ return False
32
+
33
+ # Import corresponding features based on version
34
+ _SUPPORTS_INTERRUPT = _check_langgraph_version()
35
+ if _SUPPORTS_INTERRUPT:
36
+ try:
37
+ from langgraph.types import interrupt as _lg_interrupt
38
+ from langgraph.types import Command as _lg_Command
39
+ except ImportError:
40
+ _SUPPORTS_INTERRUPT = False
41
+
42
+ class HumanLoopWrapper:
43
+ def __init__(
44
+ self,
45
+ decorator: Callable[[Any], Callable],
46
+ ) -> None:
47
+ self.decorator = decorator
48
+
49
+ def wrap(self, fn: Callable) -> Callable:
50
+ return self.decorator(fn)
51
+
52
+ def __call__(self, fn: Callable) -> Callable:
53
+ return self.decorator(fn)
54
+
55
+ class LangGraphAdapter:
56
+ """LangGraph adapter for simplifying human-in-the-loop integration
57
+
58
+ Provides decorators for three scenarios:
59
+ - require_approval: Requires human approval
60
+ - require_info: Requires human input information
61
+ - require_conversation: Requires multi-turn conversation
62
+ """
63
+
64
+ def __init__(
65
+ self,
66
+ manager: HumanLoopManager,
67
+ default_timeout: Optional[int] = None
68
+ ):
69
+ self.manager = manager
70
+ self.default_timeout = default_timeout
71
+
72
+ async def __aenter__(self):
73
+ """Implements async context manager protocol, automatically manages manager lifecycle"""
74
+ if hasattr(self.manager, '__aenter__'):
75
+ await self.manager.__aenter__()
76
+ return self
77
+
78
+ async def __aexit__(self, exc_type, exc_val, exc_tb):
79
+ """Implements async context manager protocol, automatically manages manager lifecycle"""
80
+ if hasattr(self.manager, '__aexit__'):
81
+ await self.manager.__aexit__(exc_type, exc_val, exc_tb)
82
+
83
+ def __enter__(self):
84
+ """Implements sync context manager protocol, automatically manages manager lifecycle"""
85
+ if hasattr(self.manager, '__enter__'):
86
+ self.manager.__enter__()
87
+ return self
88
+
89
+ def __exit__(self, exc_type, exc_val, exc_tb):
90
+ """Implements sync context manager protocol, automatically manages manager lifecycle"""
91
+ if hasattr(self.manager, '__exit__'):
92
+ self.manager.__exit__(exc_type, exc_val, exc_tb)
93
+
94
+ @asynccontextmanager
95
+ async def asession(self):
96
+ """Provides async context manager for managing session lifecycle
97
+
98
+ Example:
99
+ async with adapter.session():
100
+ # Use adapter here
101
+ """
102
+ try:
103
+ if hasattr(self.manager, '__aenter__'):
104
+ await self.manager.__aenter__()
105
+ yield self
106
+ finally:
107
+ if hasattr(self.manager, '__aexit__'):
108
+ await self.manager.__aexit__(None, None, None)
109
+
110
+ @contextmanager
111
+ def session(self):
112
+ """Provides a synchronous context manager for managing session lifecycle
113
+
114
+ Example:
115
+ with adapter.sync_session():
116
+ # Use adapter here
117
+ """
118
+ try:
119
+ if hasattr(self.manager, '__enter__'):
120
+ self.manager.__enter__()
121
+ yield self
122
+ finally:
123
+ if hasattr(self.manager, '__exit__'):
124
+ self.manager.__exit__(None, None, None)
125
+
126
+ def require_approval(
127
+ self,
128
+ task_id: Optional[str] = None,
129
+ conversation_id: Optional[str] = None,
130
+ ret_key: str = "approval_result",
131
+ additional: Optional[str] = "",
132
+ metadata: Optional[Dict[str, Any]] = None,
133
+ provider_id: Optional[str] = None,
134
+ timeout: Optional[int] = None,
135
+ execute_on_reject: bool = False,
136
+ callback: Optional[Union[HumanLoopCallback, Callable[[Any], HumanLoopCallback]]] = None,
137
+ ) -> HumanLoopWrapper:
138
+ """Decorator for approval scenario"""
139
+ if task_id is None:
140
+ task_id = str(uuid.uuid4())
141
+ if conversation_id is None:
142
+ conversation_id = str(uuid.uuid4())
143
+
144
+ def decorator(fn):
145
+ return self._approve_cli(fn, task_id, conversation_id, ret_key, additional, metadata, provider_id, timeout, execute_on_reject, callback)
146
+ return HumanLoopWrapper(decorator)
147
+
148
+ def _approve_cli(
149
+ self,
150
+ fn: Callable[[T], R],
151
+ task_id: str,
152
+ conversation_id: str,
153
+ ret_key: str = "approval_result",
154
+ additional: Optional[str] = "",
155
+ metadata: Optional[Dict[str, Any]] = None,
156
+ provider_id: Optional[str] = None,
157
+ timeout: Optional[int] = None,
158
+ execute_on_reject: bool = False,
159
+ callback: Optional[Union[HumanLoopCallback, Callable[[Any], HumanLoopCallback]]] = None,
160
+ ) -> Callable[[T], R | None]:
161
+ """
162
+ Converts function type from Callable[[T], R] to Callable[[T], R | None]
163
+
164
+ Passes approval results through keyword arguments while maintaining original function signature
165
+
166
+ Benefits of this approach:
167
+ 1. Maintains original function return type, keeping compatibility with LangGraph workflow
168
+ 2. Decorated function can optionally use approval result information
169
+ 3. Can pass richer approval context information
170
+
171
+ Parameters:
172
+ - fn: Target function to be decorated
173
+ - task_id: Unique task identifier for tracking approval requests
174
+ - conversation_id: Unique conversation identifier for tracking approval sessions
175
+ - ret_key: Parameter name used to inject approval results into function kwargs
176
+ - additional: Additional context information to show to approvers
177
+ - metadata: Optional metadata dictionary passed with request
178
+ - provider_id: Optional provider identifier to route requests
179
+ - timeout: Timeout in seconds for approval response
180
+ - execute_on_reject: Whether to execute function on rejection
181
+ - callback: Optional callback object or factory function for approval events
182
+
183
+ Returns:
184
+ - Decorated function maintaining original signature
185
+ - Raises ValueError if approval fails or is rejected
186
+
187
+ Notes:
188
+ - Decorated function must accept ret_key parameter to receive approval results
189
+ - If approval is rejected, execution depends on execute_on_reject parameter
190
+ - Approval results contain complete context including:
191
+ - conversation_id: Unique conversation identifier
192
+ - request_id: Unique request identifier
193
+ - loop_type: Type of human loop (APPROVAL)
194
+ - status: Current approval status
195
+ - response: Approver's response
196
+ - feedback: Optional approver feedback
197
+ - responded_by: Approver identity
198
+ - responded_at: Response timestamp
199
+ - error: Error information if any
200
+ """
201
+
202
+ @wraps(fn)
203
+ async def async_wrapper(*args, **kwargs) -> R | None:
204
+ # Determine if callback is instance or factory function
205
+ cb = None
206
+ if callable(callback) and not isinstance(callback, HumanLoopCallback):
207
+ # Factory function, pass state
208
+ state = args[0] if args else None
209
+ cb = callback(state)
210
+ else:
211
+ cb = callback
212
+
213
+ result = await self.manager.async_request_humanloop(
214
+ task_id=task_id,
215
+ conversation_id=conversation_id,
216
+ loop_type=HumanLoopType.APPROVAL,
217
+ context={
218
+ "message": {
219
+ "function_name": fn.__name__,
220
+ "function_signature": str(fn.__code__.co_varnames),
221
+ "arguments": str(args),
222
+ "keyword_arguments": str(kwargs),
223
+ "documentation": fn.__doc__ or "No documentation available"
224
+ },
225
+ "question": "Please review and approve/reject this human loop execution.",
226
+ "additional": additional
227
+ },
228
+ callback=cb,
229
+ metadata=metadata,
230
+ provider_id=provider_id,
231
+ timeout=timeout or self.default_timeout,
232
+ blocking=True
233
+ )
234
+
235
+ # Initialize approval result object as None
236
+ approval_info = None
237
+
238
+ if isinstance(result, HumanLoopResult):
239
+ # If result is HumanLoopResult type, build complete approval info
240
+ approval_info = {
241
+ 'conversation_id': result.conversation_id,
242
+ 'request_id': result.request_id,
243
+ 'loop_type': result.loop_type,
244
+ 'status': result.status,
245
+ 'response': result.response,
246
+ 'feedback': result.feedback,
247
+ 'responded_by': result.responded_by,
248
+ 'responded_at': result.responded_at,
249
+ 'error': result.error
250
+ }
251
+
252
+ kwargs[ret_key] = approval_info
253
+ # Check approval result
254
+ if isinstance(result, HumanLoopResult):
255
+ # Handle based on approval status
256
+ if result.status == HumanLoopStatus.APPROVED:
257
+ if iscoroutinefunction(fn):
258
+ return await fn(*args, **kwargs)
259
+ return fn(*args, **kwargs)
260
+ elif result.status == HumanLoopStatus.REJECTED:
261
+ # If execute on reject is set, run the function
262
+ if execute_on_reject:
263
+ if iscoroutinefunction(fn):
264
+ return await fn(*args, **kwargs)
265
+ return fn(*args, **kwargs)
266
+ # Otherwise return rejection info
267
+ reason = result.response
268
+ raise ValueError(f"Function {fn.__name__} execution not approved: {reason}")
269
+ else:
270
+ raise ValueError(f"Approval error for {fn.__name__}: approval status: {result.status} and {result.error}")
271
+ else:
272
+ raise ValueError(f"Unknown approval error: {fn.__name__}")
273
+
274
+ @wraps(fn)
275
+ def sync_wrapper(*args, **kwargs) -> R | None:
276
+ return run_async_safely(async_wrapper(*args, **kwargs))
277
+
278
+ # Return corresponding wrapper based on decorated function type
279
+ if iscoroutinefunction(fn):
280
+ return async_wrapper # type: ignore
281
+ return sync_wrapper
282
+
283
+ def require_conversation(
284
+ self,
285
+ task_id: Optional[str] = None,
286
+ conversation_id: Optional[str] = None,
287
+ state_key: str = "conv_info",
288
+ ret_key: str = "conv_result",
289
+ additional: Optional[str] = "",
290
+ provider_id: Optional[str] = None,
291
+ metadata: Optional[Dict[str, Any]] = None,
292
+ timeout: Optional[int] = None,
293
+ callback: Optional[Union[HumanLoopCallback, Callable[[Any], HumanLoopCallback]]] = None,
294
+ ) -> HumanLoopWrapper:
295
+ """Decorator for multi-turn conversation scenario"""
296
+
297
+ if task_id is None:
298
+ task_id = str(uuid.uuid4())
299
+ if conversation_id is None:
300
+ conversation_id = str(uuid.uuid4())
301
+
302
+ def decorator(fn):
303
+ return self._conversation_cli(fn, task_id, conversation_id, state_key, ret_key, additional, provider_id, metadata, timeout, callback)
304
+ return HumanLoopWrapper(decorator)
305
+
306
+ def _conversation_cli(
307
+ self,
308
+ fn: Callable[[T], R],
309
+ task_id: str,
310
+ conversation_id: str,
311
+ state_key: str = "conv_info",
312
+ ret_key: str = "conv_result",
313
+ additional: Optional[str] = "",
314
+ metadata: Optional[Dict[str, Any]] = None,
315
+ provider_id: Optional[str] = None,
316
+ timeout: Optional[int] = None,
317
+ callback: Optional[Union[HumanLoopCallback, Callable[[Any], HumanLoopCallback]]] = None,
318
+ ) -> Callable[[T], R | None]:
319
+ """Internal decorator implementation for multi-turn conversation scenario
320
+
321
+ Converts function type from Callable[[T], R] to Callable[[T], R | None]
322
+
323
+ Main features:
324
+ 1. Conduct multi-turn conversations through human-machine interaction
325
+ 2. Inject conversation results into function parameters via ret_key
326
+ 3. Support both synchronous and asynchronous function calls
327
+
328
+ Parameters:
329
+ - fn: Target function to be decorated
330
+ - task_id: Unique task identifier for tracking human interaction requests
331
+ - conversation_id: Unique conversation identifier for tracking interaction sessions
332
+ - state_key: Key name used to get conversation input info from state
333
+ - ret_key: Parameter name used to inject human interaction results into function kwargs
334
+ - additional: Additional context information to show to users
335
+ - metadata: Optional metadata dictionary passed along with request
336
+ - provider_id: Optional provider identifier to route requests to specific provider
337
+ - timeout: Timeout in seconds for human response, defaults to adapter's default_timeout
338
+ - callback: Optional callback object or factory function for handling human interaction events
339
+
340
+ Returns:
341
+ - Decorated function maintaining original signature
342
+ - Raises ValueError if human interaction fails
343
+
344
+ Notes:
345
+ - Decorated function must accept ret_key parameter to receive interaction results
346
+ - Interaction results contain complete context information including:
347
+ - conversation_id: Unique conversation identifier
348
+ - request_id: Unique request identifier
349
+ - loop_type: Human interaction type (CONVERSATION)
350
+ - status: Current request status
351
+ - response: Human provided response
352
+ - feedback: Optional human feedback
353
+ - responded_by: Responder identity
354
+ - responded_at: Response timestamp
355
+ - error: Error information if any
356
+ - Automatically adapts to async and sync functions
357
+ """
358
+
359
+ @wraps(fn)
360
+ async def async_wrapper(*args, **kwargs) -> R | None:
361
+ # Determine if callback is instance or factory function
362
+ cb = None
363
+ state = args[0] if args else None
364
+ if callable(callback) and not isinstance(callback, HumanLoopCallback):
365
+ cb = callback(state)
366
+ else:
367
+ cb = callback
368
+
369
+ node_input = None
370
+ if state:
371
+ # Get input information from key fields in State
372
+ node_input = state.get(state_key, {})
373
+
374
+ # Compose question content
375
+ question_content = f"Please respond to the following information:\n{node_input}"
376
+
377
+ # Check if conversation exists to determine whether to use request_humanloop or continue_humanloop
378
+ conversation_requests = await self.manager.async_check_conversation_exist(task_id, conversation_id)
379
+
380
+ result = None
381
+ if conversation_requests:
382
+ # Existing conversation, use continue_humanloop
383
+ result = await self.manager.async_continue_humanloop(
384
+ conversation_id=conversation_id,
385
+ context={
386
+ "message": {
387
+ "function_name": fn.__name__,
388
+ "function_signature": str(fn.__code__.co_varnames),
389
+ "arguments": str(args),
390
+ "keyword_arguments": str(kwargs),
391
+ "documentation": fn.__doc__ or "No documentation available"
392
+ },
393
+ "question": question_content,
394
+ "additional": additional
395
+ },
396
+ timeout=timeout or self.default_timeout,
397
+ callback=cb,
398
+ metadata=metadata,
399
+ provider_id=provider_id,
400
+ blocking=True
401
+ )
402
+ else:
403
+ # New conversation, use request_humanloop
404
+ result = await self.manager.async_request_humanloop(
405
+ task_id=task_id,
406
+ conversation_id=conversation_id,
407
+ loop_type=HumanLoopType.CONVERSATION,
408
+ context={
409
+ "message": {
410
+ "function_name": fn.__name__,
411
+ "function_signature": str(fn.__code__.co_varnames),
412
+ "arguments": str(args),
413
+ "keyword_arguments": str(kwargs),
414
+ "documentation": fn.__doc__ or "No documentation available"
415
+ },
416
+ "question": question_content,
417
+ "additional": additional
418
+ },
419
+ timeout=timeout or self.default_timeout,
420
+ callback=cb,
421
+ metadata=metadata,
422
+ provider_id=provider_id,
423
+ blocking=True
424
+ )
425
+
426
+ # Initialize conversation result object as None
427
+ conversation_info = None
428
+
429
+ if isinstance(result, HumanLoopResult):
430
+ conversation_info = {
431
+ 'conversation_id': result.conversation_id,
432
+ 'request_id': result.request_id,
433
+ 'loop_type': result.loop_type,
434
+ 'status': result.status,
435
+ 'response': result.response,
436
+ 'feedback': result.feedback,
437
+ 'responded_by': result.responded_by,
438
+ 'responded_at': result.responded_at,
439
+ 'error': result.error
440
+ }
441
+
442
+ kwargs[ret_key] = conversation_info
443
+
444
+ if isinstance(result, HumanLoopResult):
445
+ if iscoroutinefunction(fn):
446
+ return await fn(*args, **kwargs)
447
+ return fn(*args, **kwargs)
448
+ else:
449
+ raise ValueError(f"Conversation request timeout or error for {fn.__name__}")
450
+
451
+ @wraps(fn)
452
+ def sync_wrapper(*args, **kwargs) -> R | None:
453
+ return run_async_safely(async_wrapper(*args, **kwargs))
454
+
455
+ if iscoroutinefunction(fn):
456
+ return async_wrapper # type: ignore
457
+ return sync_wrapper
458
+
459
+ def require_info(
460
+ self,
461
+ task_id: Optional[str] = None,
462
+ conversation_id: Optional[str] = None,
463
+ ret_key: str = "info_result",
464
+ additional: Optional[str] = "",
465
+ metadata: Optional[Dict[str, Any]] = None,
466
+ provider_id: Optional[str] = None,
467
+ timeout: Optional[int] = None,
468
+ callback: Optional[Union[HumanLoopCallback, Callable[[Any], HumanLoopCallback]]] = None,
469
+ ) -> HumanLoopWrapper:
470
+ """Decorator for information gathering scenario"""
471
+
472
+ if task_id is None:
473
+ task_id = str(uuid.uuid4())
474
+ if conversation_id is None:
475
+ conversation_id = str(uuid.uuid4())
476
+
477
+ def decorator(fn):
478
+ return self._get_info_cli(fn, task_id, conversation_id, ret_key, additional, metadata, provider_id, timeout, callback)
479
+ return HumanLoopWrapper(decorator)
480
+
481
+ def _get_info_cli(
482
+ self,
483
+ fn: Callable[[T], R],
484
+ task_id: str,
485
+ conversation_id: str,
486
+ ret_key: str = "info_result",
487
+ additional: Optional[str] = "",
488
+ metadata: Optional[Dict[str, Any]] = None,
489
+ provider_id: Optional[str] = None,
490
+ timeout: Optional[int] = None,
491
+ callback: Optional[Union[HumanLoopCallback, Callable[[Any], HumanLoopCallback]]] = None,
492
+ ) -> Callable[[T], R | None]:
493
+ """Internal decorator implementation for information gathering scenario
494
+ Converts function type from Callable[[T], R] to Callable[[T], R | None]
495
+
496
+ Main features:
497
+ 1. Get required information through human-machine interaction
498
+ 2. Inject obtained information into function parameters via ret_key
499
+ 3. Support both synchronous and asynchronous function calls
500
+
501
+ Parameters:
502
+ - fn: Target function to be decorated
503
+ - task_id: Unique task identifier for tracking the human loop request
504
+ - conversation_id: Unique conversation identifier for tracking the interaction session
505
+ - ret_key: Parameter name used to inject the human loop result into function kwargs
506
+ - additional: Additional context information to be shown to human user
507
+ - metadata: Optional metadata dictionary to be passed with the request
508
+ - provider_id: Optional provider identifier to route request to specific provider
509
+ - timeout: Timeout in seconds for human response, defaults to adapter's default_timeout
510
+ - callback: Optional callback object or factory function for handling human loop events
511
+
512
+ Returns:
513
+ - Decorated function maintaining original signature
514
+ - Raises ValueError if human interaction fails
515
+
516
+ Notes:
517
+ - Decorated function must accept ret_key parameter to receive interaction results
518
+ - Interaction results contain complete context information including:
519
+ - conversation_id: Unique conversation identifier
520
+ - request_id: Unique request identifier
521
+ - loop_type: Type of human loop (INFORMATION)
522
+ - status: Current status of the request
523
+ - response: Human provided response
524
+ - feedback: Optional feedback from human
525
+ - responded_by: Identity of responder
526
+ - responded_at: Response timestamp
527
+ - error: Error information if any
528
+ - Automatically adapts to async and sync functions
529
+ """
530
+
531
+ @wraps(fn)
532
+ async def async_wrapper(*args, **kwargs) -> R | None:
533
+
534
+ # Determine if callback is an instance or factory function
535
+ # callback: can be HumanLoopCallback instance or factory function
536
+ # - If factory function: accepts state parameter and returns HumanLoopCallback instance
537
+ # - If HumanLoopCallback instance: use directly
538
+ cb = None
539
+ if callable(callback) and not isinstance(callback, HumanLoopCallback):
540
+ # Factory function mode: get state from args and create callback instance
541
+ # state is typically the first argument, None if args is empty
542
+ state = args[0] if args else None
543
+ cb = callback(state)
544
+ else:
545
+ cb = callback
546
+
547
+ result = await self.manager.async_request_humanloop(
548
+ task_id=task_id,
549
+ conversation_id=conversation_id,
550
+ loop_type=HumanLoopType.INFORMATION,
551
+ context={
552
+ "message": {
553
+ "function_name": fn.__name__,
554
+ "function_signature": str(fn.__code__.co_varnames),
555
+ "arguments": str(args),
556
+ "keyword_arguments": str(kwargs),
557
+ "documentation": fn.__doc__ or "No documentation available"
558
+ },
559
+ "question": "Please provide the required information for the human loop",
560
+ "additional": additional
561
+ },
562
+ timeout=timeout or self.default_timeout,
563
+ callback=cb,
564
+ metadata=metadata,
565
+ provider_id=provider_id,
566
+ blocking=True
567
+ )
568
+
569
+ # 初始化审批结果对象为None
570
+ resp_info = None
571
+
572
+ if isinstance(result, HumanLoopResult):
573
+ # 如果结果是HumanLoopResult类型,则构建完整的审批信息
574
+ resp_info = {
575
+ 'conversation_id': result.conversation_id,
576
+ 'request_id': result.request_id,
577
+ 'loop_type': result.loop_type,
578
+ 'status': result.status,
579
+ 'response': result.response,
580
+ 'feedback': result.feedback,
581
+ 'responded_by': result.responded_by,
582
+ 'responded_at': result.responded_at,
583
+ 'error': result.error
584
+ }
585
+
586
+ kwargs[ret_key] = resp_info
587
+
588
+ # 检查结果是否有效
589
+ if isinstance(result, HumanLoopResult):
590
+ # 返回获取信息结果,由用户去判断是否使用
591
+ if iscoroutinefunction(fn):
592
+ return await fn(*args, **kwargs)
593
+ return fn(*args, **kwargs)
594
+ else:
595
+ raise ValueError(f"Info request timeout or error for {fn.__name__}")
596
+
597
+ @wraps(fn)
598
+ def sync_wrapper(*args, **kwargs) -> R | None:
599
+ return run_async_safely(async_wrapper(*args, **kwargs))
600
+
601
+ # 根据被装饰函数类型返回对应的wrapper
602
+ if iscoroutinefunction(fn):
603
+ return async_wrapper # type: ignore
604
+ return sync_wrapper
605
+
606
+ class LangGraphHumanLoopCallback(HumanLoopCallback):
607
+ """LangGraph-specific human loop callback, compatible with TypedDict or Pydantic BaseModel State"""
608
+
609
+ def __init__(
610
+ self,
611
+ state: Any,
612
+ async_on_update: Optional[Callable[[Any, HumanLoopProvider, HumanLoopResult], Awaitable[None]]] = None,
613
+ async_on_timeout: Optional[Callable[[Any, HumanLoopProvider], Awaitable[None]]] = None,
614
+ async_on_error: Optional[Callable[[Any, HumanLoopProvider, Exception], Awaitable[None]]] = None,
615
+ ):
616
+ self.state = state
617
+ self.async_on_update = async_on_update
618
+ self.async_on_timeout = async_on_timeout
619
+ self.async_on_error = async_on_error
620
+
621
+ async def async_on_humanloop_update(
622
+ self,
623
+ provider: HumanLoopProvider,
624
+ result: HumanLoopResult
625
+ ):
626
+ if self.async_on_update:
627
+ await self.async_on_update(self.state, provider, result)
628
+
629
+ async def async_on_humanloop_timeout(
630
+ self,
631
+ provider: HumanLoopProvider,
632
+ ):
633
+ if self.async_on_timeout:
634
+ await self.async_on_timeout(self.state, provider)
635
+
636
+ async def async_humanloop_on_error(
637
+ self,
638
+ provider: HumanLoopProvider,
639
+ error: Exception
640
+ ):
641
+ if self.async_on_error:
642
+ await self.async_on_error(self.state, provider, error)
643
+
644
+
645
+ def default_langgraph_callback_factory(state: Any) -> LangGraphHumanLoopCallback:
646
+ """Default human-loop callback factory for LangGraph framework
647
+
648
+ This callback focuses on:
649
+ 1. Logging human interaction events
650
+ 2. Providing debug information
651
+ 3. Collecting performance metrics
652
+
653
+ Note: This callback does not modify state to maintain clear state management
654
+
655
+ Args:
656
+ state: LangGraph state object, only used for log correlation
657
+
658
+ Returns:
659
+ Configured LangGraphHumanLoopCallback instance
660
+ """
661
+ import logging
662
+
663
+ logger = logging.getLogger("gohumanloop.langgraph")
664
+
665
+ async def async_on_update(state, provider: HumanLoopProvider, result: HumanLoopResult):
666
+ """Log human interaction update events"""
667
+ logger.info(f"Provider ID: {provider.name}")
668
+ logger.info(
669
+ f"Human interaction update "
670
+ f"status={result.status}, "
671
+ f"response={result.response}, "
672
+ f"responded_by={result.responded_by}, "
673
+ f"responded_at={result.responded_at}, "
674
+ f"feedback={result.feedback}"
675
+ )
676
+
677
+
678
+
679
+ async def async_on_timeout(state, provider: HumanLoopProvider):
680
+ """Log human interaction timeout events"""
681
+
682
+ logger.info(f"Provider ID: {provider.name}")
683
+ from datetime import datetime
684
+ current_time = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
685
+ logger.warning(f"Human interaction timeout - Time: {current_time}")
686
+
687
+
688
+ # Alert logic can be added here, such as sending notifications
689
+
690
+ async def async_on_error(state, provider: HumanLoopProvider, error: Exception):
691
+ """Log human interaction error events"""
692
+
693
+ logger.info(f"Provider ID: {provider.name}")
694
+ from datetime import datetime
695
+ current_time = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
696
+ logger.error(f"Human interaction error - Time: {current_time} Error: {error}")
697
+
698
+ return LangGraphHumanLoopCallback(
699
+ state=state,
700
+ async_on_update=async_on_update,
701
+ async_on_timeout=async_on_timeout,
702
+ async_on_error=async_on_error
703
+ )
704
+
705
+ from gohumanloop.core.manager import DefaultHumanLoopManager
706
+ from gohumanloop.providers.terminal_provider import TerminalProvider
707
+
708
+ # Create HumanLoopManager instance
709
+ manager = DefaultHumanLoopManager(initial_providers=TerminalProvider(name="LGDefaultProvider"))
710
+
711
+ # Create LangGraphAdapter instance
712
+ default_adapter = LangGraphAdapter(manager, default_timeout=60)
713
+
714
+ default_conversation_id = str(uuid.uuid4())
715
+
716
+ def interrupt(value: Any, lg_humanloop: LangGraphAdapter = default_adapter) -> Any:
717
+ """
718
+ Wraps LangGraph's interrupt functionality to pause graph execution and wait for human input
719
+
720
+ Raises RuntimeError if LangGraph version doesn't support interrupt
721
+
722
+ Args:
723
+ value: Any JSON-serializable value that will be shown to human user
724
+ lg_humanloop: LangGraphAdapter instance, defaults to global instance
725
+
726
+ Returns:
727
+ Input value provided by human user
728
+ """
729
+ if not _SUPPORTS_INTERRUPT:
730
+ raise RuntimeError(
731
+ "LangGraph version too low, interrupt not supported. Please upgrade to version 0.2.57 or higher."
732
+ "You can use: pip install --upgrade langgraph>=0.2.57"
733
+ )
734
+
735
+ # Get current event loop or create new one
736
+ lg_humanloop.manager.request_humanloop(
737
+ task_id="lg_interrupt",
738
+ conversation_id=default_conversation_id,
739
+ loop_type=HumanLoopType.INFORMATION,
740
+ context={
741
+ "message": f"{value}",
742
+ "question": "The execution has been interrupted. Please review the above information and provide your input to continue.",
743
+ },
744
+ blocking=False,
745
+ )
746
+
747
+ # Return LangGraph's interrupt
748
+ return _lg_interrupt(value)
749
+ def create_resume_command(lg_humanloop: LangGraphAdapter = default_adapter) -> Any:
750
+ """
751
+ Create a Command object to resume interrupted graph execution
752
+
753
+ Will raise RuntimeError if LangGraph version doesn't support Command
754
+
755
+ Args:
756
+ lg_humanloop: LangGraphAdapter instance, defaults to global instance
757
+
758
+ Returns:
759
+ Command object that can be used with graph.stream method
760
+ """
761
+ if not _SUPPORTS_INTERRUPT:
762
+ raise RuntimeError(
763
+ "LangGraph version too low, Command feature not supported. Please upgrade to 0.2.57 or higher."
764
+ "You can use: pip install --upgrade langgraph>=0.2.57"
765
+ )
766
+
767
+ # Define async polling function
768
+ def poll_for_result():
769
+ poll_interval = 1.0 # Polling interval (seconds)
770
+ while True:
771
+ result = lg_humanloop.manager.check_conversation_status(default_conversation_id)
772
+ print(result)
773
+ # If status is final state (not PENDING), return result
774
+ if result.status != HumanLoopStatus.PENDING:
775
+ return result.response
776
+ # Wait before polling again
777
+ time.sleep(poll_interval)
778
+
779
+ # Wait for async result synchronously
780
+ # loop = asyncio.get_event_loop() # In synchronous environment
781
+
782
+ response = poll_for_result()
783
+ return _lg_Command(resume=response)
784
+
785
+ async def acreate_resume_command(lg_humanloop: LangGraphAdapter = default_adapter) -> Any:
786
+ """
787
+ Create an async version of Command object to resume interrupted graph execution
788
+
789
+ Will raise RuntimeError if LangGraph version doesn't support Command
790
+
791
+ Args:
792
+ lg_humanloop: LangGraphAdapter instance, defaults to global instance
793
+
794
+ Returns:
795
+ Command object that can be used with graph.astream method
796
+ """
797
+ if not _SUPPORTS_INTERRUPT:
798
+ raise RuntimeError(
799
+ "LangGraph version too low, Command feature not supported. Please upgrade to 0.2.57 or higher."
800
+ "You can use: pip install --upgrade langgraph>=0.2.57"
801
+ )
802
+
803
+ # Define async polling function
804
+ async def poll_for_result():
805
+ poll_interval = 1.0 # Polling interval (seconds)
806
+ while True:
807
+ result = await lg_humanloop.manager.async_check_conversation_status(default_conversation_id)
808
+ # If status is final state (not PENDING), return result
809
+ if result.status != HumanLoopStatus.PENDING:
810
+ return result.response
811
+ # Wait before polling again
812
+ await asyncio.sleep(poll_interval)
813
+
814
+ # Wait for async result directly
815
+ response = await poll_for_result()
816
+ return _lg_Command(resume=response)
817
+