gohumanloop 0.0.5__py3-none-any.whl → 0.0.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,38 +1,57 @@
1
- from typing import Dict, Any, Optional, Callable, Awaitable, TypeVar, Union, List
2
- from functools import wraps
1
+ from typing import (
2
+ Dict,
3
+ Any,
4
+ Optional,
5
+ Callable,
6
+ Awaitable,
7
+ TypeVar,
8
+ Union,
9
+ )
3
10
  import asyncio
4
11
  import uuid
5
12
  import time
6
- from inspect import iscoroutinefunction
7
- from contextlib import asynccontextmanager, contextmanager
8
13
  import logging
9
14
 
10
- from gohumanloop.utils import run_async_safely
11
15
  from gohumanloop.core.interface import (
12
- HumanLoopManager, HumanLoopResult, HumanLoopStatus, HumanLoopType, HumanLoopCallback, HumanLoopProvider
16
+ HumanLoopRequest,
17
+ HumanLoopResult,
18
+ HumanLoopStatus,
19
+ HumanLoopType,
20
+ HumanLoopCallback,
21
+ HumanLoopProvider,
13
22
  )
23
+ from gohumanloop.core.manager import DefaultHumanLoopManager
24
+ from gohumanloop.providers.terminal_provider import TerminalProvider
25
+ from gohumanloop.adapters.base_adapter import HumanloopAdapter
14
26
 
15
27
  logger = logging.getLogger(__name__)
16
28
 
17
29
  # Define TypeVars for input and output types
18
30
  T = TypeVar("T")
19
- R = TypeVar('R')
31
+ R = TypeVar("R", bound=Union[Any, None])
32
+
20
33
 
21
34
  # Check LangGraph version
22
- def _check_langgraph_version():
35
+ def _check_langgraph_version() -> bool:
23
36
  """Check LangGraph version to determine if interrupt feature is supported"""
24
37
  try:
25
38
  import importlib.metadata
39
+
26
40
  version = importlib.metadata.version("langgraph")
27
- version_parts = version.split('.')
28
- major, minor, patch = int(version_parts[0]), int(version_parts[1]), int(version_parts[2])
29
-
41
+ version_parts = version.split(".")
42
+ major, minor, patch = (
43
+ int(version_parts[0]),
44
+ int(version_parts[1]),
45
+ int(version_parts[2]),
46
+ )
47
+
30
48
  # Interrupt support starts from version 0.2.57
31
- return (major > 0 or (major == 0 and (minor > 2 or (minor == 2 and patch >= 57))))
49
+ return major > 0 or (major == 0 and (minor > 2 or (minor == 2 and patch >= 57)))
32
50
  except (importlib.metadata.PackageNotFoundError, ValueError, IndexError):
33
51
  # If version cannot be determined, assume no support
34
52
  return False
35
53
 
54
+
36
55
  # Import corresponding features based on version
37
56
  _SUPPORTS_INTERRUPT = _check_langgraph_version()
38
57
  if _SUPPORTS_INTERRUPT:
@@ -42,690 +61,172 @@ if _SUPPORTS_INTERRUPT:
42
61
  except ImportError:
43
62
  _SUPPORTS_INTERRUPT = False
44
63
 
45
- class HumanLoopWrapper:
46
- def __init__(
47
- self,
48
- decorator: Callable[[Any], Callable],
49
- ) -> None:
50
- self.decorator = decorator
51
64
 
52
- def wrap(self, fn: Callable) -> Callable:
53
- return self.decorator(fn)
54
-
55
- def __call__(self, fn: Callable) -> Callable:
56
- return self.decorator(fn)
57
-
58
- class LangGraphAdapter:
59
- """LangGraph adapter for simplifying human-in-the-loop integration
60
-
61
- Provides decorators for three scenarios:
62
- - require_approval: Requires human approval
63
- - require_info: Requires human input information
64
- - require_conversation: Requires multi-turn conversation
65
- """
66
-
67
- def __init__(
68
- self,
69
- manager: HumanLoopManager,
70
- default_timeout: Optional[int] = None
71
- ):
72
- self.manager = manager
73
- self.default_timeout = default_timeout
74
-
75
- async def __aenter__(self):
76
- """Implements async context manager protocol, automatically manages manager lifecycle"""
77
- if hasattr(self.manager, '__aenter__'):
78
- await self.manager.__aenter__()
79
- return self
80
-
81
- async def __aexit__(self, exc_type, exc_val, exc_tb):
82
- """Implements async context manager protocol, automatically manages manager lifecycle"""
83
- if hasattr(self.manager, '__aexit__'):
84
- await self.manager.__aexit__(exc_type, exc_val, exc_tb)
85
-
86
- def __enter__(self):
87
- """Implements sync context manager protocol, automatically manages manager lifecycle"""
88
- if hasattr(self.manager, '__enter__'):
89
- self.manager.__enter__()
90
- return self
91
-
92
- def __exit__(self, exc_type, exc_val, exc_tb):
93
- """Implements sync context manager protocol, automatically manages manager lifecycle"""
94
- if hasattr(self.manager, '__exit__'):
95
- self.manager.__exit__(exc_type, exc_val, exc_tb)
96
-
97
- @asynccontextmanager
98
- async def asession(self):
99
- """Provides async context manager for managing session lifecycle
100
-
101
- Example:
102
- async with adapter.session():
103
- # Use adapter here
104
- """
105
- try:
106
- if hasattr(self.manager, '__aenter__'):
107
- await self.manager.__aenter__()
108
- yield self
109
- finally:
110
- if hasattr(self.manager, '__aexit__'):
111
- await self.manager.__aexit__(None, None, None)
112
-
113
- @contextmanager
114
- def session(self):
115
- """Provides a synchronous context manager for managing session lifecycle
116
-
117
- Example:
118
- with adapter.sync_session():
119
- # Use adapter here
120
- """
121
- try:
122
- if hasattr(self.manager, '__enter__'):
123
- self.manager.__enter__()
124
- yield self
125
- finally:
126
- if hasattr(self.manager, '__exit__'):
127
- self.manager.__exit__(None, None, None)
128
-
129
- def require_approval(
130
- self,
131
- task_id: Optional[str] = None,
132
- conversation_id: Optional[str] = None,
133
- ret_key: str = "approval_result",
134
- additional: Optional[str] = "",
135
- metadata: Optional[Dict[str, Any]] = None,
136
- provider_id: Optional[str] = None,
137
- timeout: Optional[int] = None,
138
- execute_on_reject: bool = False,
139
- callback: Optional[Union[HumanLoopCallback, Callable[[Any], HumanLoopCallback]]] = None,
140
- ) -> HumanLoopWrapper:
141
- """Decorator for approval scenario"""
142
- if task_id is None:
143
- task_id = str(uuid.uuid4())
144
- if conversation_id is None:
145
- conversation_id = str(uuid.uuid4())
146
-
147
- def decorator(fn):
148
- return self._approve_cli(fn, task_id, conversation_id, ret_key, additional, metadata, provider_id, timeout, execute_on_reject, callback)
149
- return HumanLoopWrapper(decorator)
150
-
151
- def _approve_cli(
152
- self,
153
- fn: Callable[[T], R],
154
- task_id: str,
155
- conversation_id: str,
156
- ret_key: str = "approval_result",
157
- additional: Optional[str] = "",
158
- metadata: Optional[Dict[str, Any]] = None,
159
- provider_id: Optional[str] = None,
160
- timeout: Optional[int] = None,
161
- execute_on_reject: bool = False,
162
- callback: Optional[Union[HumanLoopCallback, Callable[[Any], HumanLoopCallback]]] = None,
163
- ) -> Callable[[T], R | None]:
164
- """
165
- Converts function type from Callable[[T], R] to Callable[[T], R | None]
166
-
167
- Passes approval results through keyword arguments while maintaining original function signature
168
-
169
- Benefits of this approach:
170
- 1. Maintains original function return type, keeping compatibility with LangGraph workflow
171
- 2. Decorated function can optionally use approval result information
172
- 3. Can pass richer approval context information
173
-
174
- Parameters:
175
- - fn: Target function to be decorated
176
- - task_id: Unique task identifier for tracking approval requests
177
- - conversation_id: Unique conversation identifier for tracking approval sessions
178
- - ret_key: Parameter name used to inject approval results into function kwargs
179
- - additional: Additional context information to show to approvers
180
- - metadata: Optional metadata dictionary passed with request
181
- - provider_id: Optional provider identifier to route requests
182
- - timeout: Timeout in seconds for approval response
183
- - execute_on_reject: Whether to execute function on rejection
184
- - callback: Optional callback object or factory function for approval events
185
-
186
- Returns:
187
- - Decorated function maintaining original signature
188
- - Raises ValueError if approval fails or is rejected
189
-
190
- Notes:
191
- - Decorated function must accept ret_key parameter to receive approval results
192
- - If approval is rejected, execution depends on execute_on_reject parameter
193
- - Approval results contain complete context including:
194
- - conversation_id: Unique conversation identifier
195
- - request_id: Unique request identifier
196
- - loop_type: Type of human loop (APPROVAL)
197
- - status: Current approval status
198
- - response: Approver's response
199
- - feedback: Optional approver feedback
200
- - responded_by: Approver identity
201
- - responded_at: Response timestamp
202
- - error: Error information if any
203
- """
204
-
205
- @wraps(fn)
206
- async def async_wrapper(*args, **kwargs) -> R | None:
207
- # Determine if callback is instance or factory function
208
- cb = None
209
- if callable(callback) and not isinstance(callback, HumanLoopCallback):
210
- # Factory function, pass state
211
- state = args[0] if args else None
212
- cb = callback(state)
213
- else:
214
- cb = callback
215
-
216
- result = await self.manager.async_request_humanloop(
217
- task_id=task_id,
218
- conversation_id=conversation_id,
219
- loop_type=HumanLoopType.APPROVAL,
220
- context={
221
- "message": {
222
- "function_name": fn.__name__,
223
- "function_signature": str(fn.__code__.co_varnames),
224
- "arguments": str(args),
225
- "keyword_arguments": str(kwargs),
226
- "documentation": fn.__doc__ or "No documentation available"
227
- },
228
- "question": "Please review and approve/reject this human loop execution.",
229
- "additional": additional
230
- },
231
- callback=cb,
232
- metadata=metadata,
233
- provider_id=provider_id,
234
- timeout=timeout or self.default_timeout,
235
- blocking=True
236
- )
237
-
238
- # Initialize approval result object as None
239
- approval_info = None
240
-
241
- if isinstance(result, HumanLoopResult):
242
- # If result is HumanLoopResult type, build complete approval info
243
- approval_info = {
244
- 'conversation_id': result.conversation_id,
245
- 'request_id': result.request_id,
246
- 'loop_type': result.loop_type,
247
- 'status': result.status,
248
- 'response': result.response,
249
- 'feedback': result.feedback,
250
- 'responded_by': result.responded_by,
251
- 'responded_at': result.responded_at,
252
- 'error': result.error
253
- }
254
-
255
- kwargs[ret_key] = approval_info
256
- # Check approval result
257
- if isinstance(result, HumanLoopResult):
258
- # Handle based on approval status
259
- if result.status == HumanLoopStatus.APPROVED:
260
- if iscoroutinefunction(fn):
261
- return await fn(*args, **kwargs)
262
- return fn(*args, **kwargs)
263
- elif result.status == HumanLoopStatus.REJECTED:
264
- # If execute on reject is set, run the function
265
- if execute_on_reject:
266
- if iscoroutinefunction(fn):
267
- return await fn(*args, **kwargs)
268
- return fn(*args, **kwargs)
269
- # Otherwise return rejection info
270
- reason = result.response
271
- raise ValueError(f"Function {fn.__name__} execution not approved: {reason}")
272
- else:
273
- raise ValueError(f"Approval error for {fn.__name__}: approval status: {result.status} and {result.error}")
274
- else:
275
- raise ValueError(f"Unknown approval error: {fn.__name__}")
276
-
277
- @wraps(fn)
278
- def sync_wrapper(*args, **kwargs) -> R | None:
279
- return run_async_safely(async_wrapper(*args, **kwargs))
280
-
281
- # Return corresponding wrapper based on decorated function type
282
- if iscoroutinefunction(fn):
283
- return async_wrapper # type: ignore
284
- return sync_wrapper
285
-
286
- def require_conversation(
287
- self,
288
- task_id: Optional[str] = None,
289
- conversation_id: Optional[str] = None,
290
- state_key: str = "conv_info",
291
- ret_key: str = "conv_result",
292
- additional: Optional[str] = "",
293
- provider_id: Optional[str] = None,
294
- metadata: Optional[Dict[str, Any]] = None,
295
- timeout: Optional[int] = None,
296
- callback: Optional[Union[HumanLoopCallback, Callable[[Any], HumanLoopCallback]]] = None,
297
- ) -> HumanLoopWrapper:
298
- """Decorator for multi-turn conversation scenario"""
299
-
300
- if task_id is None:
301
- task_id = str(uuid.uuid4())
302
- if conversation_id is None:
303
- conversation_id = str(uuid.uuid4())
304
-
305
- def decorator(fn):
306
- return self._conversation_cli(fn, task_id, conversation_id, state_key, ret_key, additional, provider_id, metadata, timeout, callback)
307
- return HumanLoopWrapper(decorator)
308
-
309
- def _conversation_cli(
310
- self,
311
- fn: Callable[[T], R],
312
- task_id: str,
313
- conversation_id: str,
314
- state_key: str = "conv_info",
315
- ret_key: str = "conv_result",
316
- additional: Optional[str] = "",
317
- metadata: Optional[Dict[str, Any]] = None,
318
- provider_id: Optional[str] = None,
319
- timeout: Optional[int] = None,
320
- callback: Optional[Union[HumanLoopCallback, Callable[[Any], HumanLoopCallback]]] = None,
321
- ) -> Callable[[T], R | None]:
322
- """Internal decorator implementation for multi-turn conversation scenario
323
-
324
- Converts function type from Callable[[T], R] to Callable[[T], R | None]
325
-
326
- Main features:
327
- 1. Conduct multi-turn conversations through human-machine interaction
328
- 2. Inject conversation results into function parameters via ret_key
329
- 3. Support both synchronous and asynchronous function calls
330
-
331
- Parameters:
332
- - fn: Target function to be decorated
333
- - task_id: Unique task identifier for tracking human interaction requests
334
- - conversation_id: Unique conversation identifier for tracking interaction sessions
335
- - state_key: Key name used to get conversation input info from state
336
- - ret_key: Parameter name used to inject human interaction results into function kwargs
337
- - additional: Additional context information to show to users
338
- - metadata: Optional metadata dictionary passed along with request
339
- - provider_id: Optional provider identifier to route requests to specific provider
340
- - timeout: Timeout in seconds for human response, defaults to adapter's default_timeout
341
- - callback: Optional callback object or factory function for handling human interaction events
342
-
343
- Returns:
344
- - Decorated function maintaining original signature
345
- - Raises ValueError if human interaction fails
346
-
347
- Notes:
348
- - Decorated function must accept ret_key parameter to receive interaction results
349
- - Interaction results contain complete context information including:
350
- - conversation_id: Unique conversation identifier
351
- - request_id: Unique request identifier
352
- - loop_type: Human interaction type (CONVERSATION)
353
- - status: Current request status
354
- - response: Human provided response
355
- - feedback: Optional human feedback
356
- - responded_by: Responder identity
357
- - responded_at: Response timestamp
358
- - error: Error information if any
359
- - Automatically adapts to async and sync functions
360
- """
361
-
362
- @wraps(fn)
363
- async def async_wrapper(*args, **kwargs) -> R | None:
364
- # Determine if callback is instance or factory function
365
- cb = None
366
- state = args[0] if args else None
367
- if callable(callback) and not isinstance(callback, HumanLoopCallback):
368
- cb = callback(state)
369
- else:
370
- cb = callback
371
-
372
- node_input = None
373
- if state:
374
- # Get input information from key fields in State
375
- node_input = state.get(state_key, {})
376
-
377
- # Compose question content
378
- question_content = f"Please respond to the following information:\n{node_input}"
379
-
380
- # Check if conversation exists to determine whether to use request_humanloop or continue_humanloop
381
- conversation_requests = await self.manager.async_check_conversation_exist(task_id, conversation_id)
382
-
383
- result = None
384
- if conversation_requests:
385
- # Existing conversation, use continue_humanloop
386
- result = await self.manager.async_continue_humanloop(
387
- conversation_id=conversation_id,
388
- context={
389
- "message": {
390
- "function_name": fn.__name__,
391
- "function_signature": str(fn.__code__.co_varnames),
392
- "arguments": str(args),
393
- "keyword_arguments": str(kwargs),
394
- "documentation": fn.__doc__ or "No documentation available"
395
- },
396
- "question": question_content,
397
- "additional": additional
398
- },
399
- timeout=timeout or self.default_timeout,
400
- callback=cb,
401
- metadata=metadata,
402
- provider_id=provider_id,
403
- blocking=True
404
- )
405
- else:
406
- # New conversation, use request_humanloop
407
- result = await self.manager.async_request_humanloop(
408
- task_id=task_id,
409
- conversation_id=conversation_id,
410
- loop_type=HumanLoopType.CONVERSATION,
411
- context={
412
- "message": {
413
- "function_name": fn.__name__,
414
- "function_signature": str(fn.__code__.co_varnames),
415
- "arguments": str(args),
416
- "keyword_arguments": str(kwargs),
417
- "documentation": fn.__doc__ or "No documentation available"
418
- },
419
- "question": question_content,
420
- "additional": additional
421
- },
422
- timeout=timeout or self.default_timeout,
423
- callback=cb,
424
- metadata=metadata,
425
- provider_id=provider_id,
426
- blocking=True
427
- )
428
-
429
- # Initialize conversation result object as None
430
- conversation_info = None
431
-
432
- if isinstance(result, HumanLoopResult):
433
- conversation_info = {
434
- 'conversation_id': result.conversation_id,
435
- 'request_id': result.request_id,
436
- 'loop_type': result.loop_type,
437
- 'status': result.status,
438
- 'response': result.response,
439
- 'feedback': result.feedback,
440
- 'responded_by': result.responded_by,
441
- 'responded_at': result.responded_at,
442
- 'error': result.error
443
- }
444
-
445
- kwargs[ret_key] = conversation_info
446
-
447
- if isinstance(result, HumanLoopResult):
448
- if iscoroutinefunction(fn):
449
- return await fn(*args, **kwargs)
450
- return fn(*args, **kwargs)
451
- else:
452
- raise ValueError(f"Conversation request timeout or error for {fn.__name__}")
453
-
454
- @wraps(fn)
455
- def sync_wrapper(*args, **kwargs) -> R | None:
456
- return run_async_safely(async_wrapper(*args, **kwargs))
457
-
458
- if iscoroutinefunction(fn):
459
- return async_wrapper # type: ignore
460
- return sync_wrapper
461
-
462
- def require_info(
463
- self,
464
- task_id: Optional[str] = None,
465
- conversation_id: Optional[str] = None,
466
- ret_key: str = "info_result",
467
- additional: Optional[str] = "",
468
- metadata: Optional[Dict[str, Any]] = None,
469
- provider_id: Optional[str] = None,
470
- timeout: Optional[int] = None,
471
- callback: Optional[Union[HumanLoopCallback, Callable[[Any], HumanLoopCallback]]] = None,
472
- ) -> HumanLoopWrapper:
473
- """Decorator for information gathering scenario"""
474
-
475
- if task_id is None:
476
- task_id = str(uuid.uuid4())
477
- if conversation_id is None:
478
- conversation_id = str(uuid.uuid4())
479
-
480
- def decorator(fn):
481
- return self._get_info_cli(fn, task_id, conversation_id, ret_key, additional, metadata, provider_id, timeout, callback)
482
- return HumanLoopWrapper(decorator)
483
-
484
- def _get_info_cli(
485
- self,
486
- fn: Callable[[T], R],
487
- task_id: str,
488
- conversation_id: str,
489
- ret_key: str = "info_result",
490
- additional: Optional[str] = "",
491
- metadata: Optional[Dict[str, Any]] = None,
492
- provider_id: Optional[str] = None,
493
- timeout: Optional[int] = None,
494
- callback: Optional[Union[HumanLoopCallback, Callable[[Any], HumanLoopCallback]]] = None,
495
- ) -> Callable[[T], R | None]:
496
- """Internal decorator implementation for information gathering scenario
497
- Converts function type from Callable[[T], R] to Callable[[T], R | None]
498
-
499
- Main features:
500
- 1. Get required information through human-machine interaction
501
- 2. Inject obtained information into function parameters via ret_key
502
- 3. Support both synchronous and asynchronous function calls
503
-
504
- Parameters:
505
- - fn: Target function to be decorated
506
- - task_id: Unique task identifier for tracking the human loop request
507
- - conversation_id: Unique conversation identifier for tracking the interaction session
508
- - ret_key: Parameter name used to inject the human loop result into function kwargs
509
- - additional: Additional context information to be shown to human user
510
- - metadata: Optional metadata dictionary to be passed with the request
511
- - provider_id: Optional provider identifier to route request to specific provider
512
- - timeout: Timeout in seconds for human response, defaults to adapter's default_timeout
513
- - callback: Optional callback object or factory function for handling human loop events
514
-
515
- Returns:
516
- - Decorated function maintaining original signature
517
- - Raises ValueError if human interaction fails
518
-
519
- Notes:
520
- - Decorated function must accept ret_key parameter to receive interaction results
521
- - Interaction results contain complete context information including:
522
- - conversation_id: Unique conversation identifier
523
- - request_id: Unique request identifier
524
- - loop_type: Type of human loop (INFORMATION)
525
- - status: Current status of the request
526
- - response: Human provided response
527
- - feedback: Optional feedback from human
528
- - responded_by: Identity of responder
529
- - responded_at: Response timestamp
530
- - error: Error information if any
531
- - Automatically adapts to async and sync functions
532
- """
533
-
534
- @wraps(fn)
535
- async def async_wrapper(*args, **kwargs) -> R | None:
536
-
537
- # Determine if callback is an instance or factory function
538
- # callback: can be HumanLoopCallback instance or factory function
539
- # - If factory function: accepts state parameter and returns HumanLoopCallback instance
540
- # - If HumanLoopCallback instance: use directly
541
- cb = None
542
- if callable(callback) and not isinstance(callback, HumanLoopCallback):
543
- # Factory function mode: get state from args and create callback instance
544
- # state is typically the first argument, None if args is empty
545
- state = args[0] if args else None
546
- cb = callback(state)
547
- else:
548
- cb = callback
549
-
550
- result = await self.manager.async_request_humanloop(
551
- task_id=task_id,
552
- conversation_id=conversation_id,
553
- loop_type=HumanLoopType.INFORMATION,
554
- context={
555
- "message": {
556
- "function_name": fn.__name__,
557
- "function_signature": str(fn.__code__.co_varnames),
558
- "arguments": str(args),
559
- "keyword_arguments": str(kwargs),
560
- "documentation": fn.__doc__ or "No documentation available"
561
- },
562
- "question": "Please provide the required information for the human loop",
563
- "additional": additional
564
- },
565
- timeout=timeout or self.default_timeout,
566
- callback=cb,
567
- metadata=metadata,
568
- provider_id=provider_id,
569
- blocking=True
570
- )
571
-
572
- # 初始化审批结果对象为None
573
- resp_info = None
574
-
575
- if isinstance(result, HumanLoopResult):
576
- # 如果结果是HumanLoopResult类型,则构建完整的审批信息
577
- resp_info = {
578
- 'conversation_id': result.conversation_id,
579
- 'request_id': result.request_id,
580
- 'loop_type': result.loop_type,
581
- 'status': result.status,
582
- 'response': result.response,
583
- 'feedback': result.feedback,
584
- 'responded_by': result.responded_by,
585
- 'responded_at': result.responded_at,
586
- 'error': result.error
587
- }
588
-
589
- kwargs[ret_key] = resp_info
590
-
591
- # 检查结果是否有效
592
- if isinstance(result, HumanLoopResult):
593
- # 返回获取信息结果,由用户去判断是否使用
594
- if iscoroutinefunction(fn):
595
- return await fn(*args, **kwargs)
596
- return fn(*args, **kwargs)
597
- else:
598
- raise ValueError(f"Info request timeout or error for {fn.__name__}")
599
-
600
- @wraps(fn)
601
- def sync_wrapper(*args, **kwargs) -> R | None:
602
- return run_async_safely(async_wrapper(*args, **kwargs))
603
-
604
- # 根据被装饰函数类型返回对应的wrapper
605
- if iscoroutinefunction(fn):
606
- return async_wrapper # type: ignore
607
- return sync_wrapper
608
-
609
65
  class LangGraphHumanLoopCallback(HumanLoopCallback):
610
66
  """LangGraph-specific human loop callback, compatible with TypedDict or Pydantic BaseModel State"""
611
-
67
+
612
68
  def __init__(
613
69
  self,
614
70
  state: Any,
615
- async_on_update: Optional[Callable[[Any, HumanLoopProvider, HumanLoopResult], Awaitable[None]]] = None,
616
- async_on_timeout: Optional[Callable[[Any, HumanLoopProvider], Awaitable[None]]] = None,
617
- async_on_error: Optional[Callable[[Any, HumanLoopProvider, Exception], Awaitable[None]]] = None,
618
- ):
71
+ async_on_request: Optional[
72
+ Callable[[Any, HumanLoopProvider, HumanLoopRequest], Awaitable[Any]]
73
+ ] = None,
74
+ async_on_update: Optional[
75
+ Callable[[Any, HumanLoopProvider, HumanLoopResult], Awaitable[Any]]
76
+ ] = None,
77
+ async_on_timeout: Optional[
78
+ Callable[[Any, HumanLoopProvider, HumanLoopResult], Awaitable[Any]]
79
+ ] = None,
80
+ async_on_error: Optional[
81
+ Callable[[Any, HumanLoopProvider, Exception], Awaitable[Any]]
82
+ ] = None,
83
+ ) -> None:
619
84
  self.state = state
85
+ self.async_on_request = async_on_request
620
86
  self.async_on_update = async_on_update
621
87
  self.async_on_timeout = async_on_timeout
622
88
  self.async_on_error = async_on_error
623
89
 
90
+ async def async_on_humanloop_request(
91
+ self, provider: HumanLoopProvider, request: HumanLoopRequest
92
+ ) -> Any:
93
+ if self.async_on_request:
94
+ await self.async_on_request(self.state, provider, request)
95
+
624
96
  async def async_on_humanloop_update(
625
- self,
626
- provider: HumanLoopProvider,
627
- result: HumanLoopResult
628
- ):
97
+ self, provider: HumanLoopProvider, result: HumanLoopResult
98
+ ) -> Any:
629
99
  if self.async_on_update:
630
100
  await self.async_on_update(self.state, provider, result)
631
101
 
632
102
  async def async_on_humanloop_timeout(
633
- self,
634
- provider: HumanLoopProvider,
635
- ):
103
+ self, provider: HumanLoopProvider, result: HumanLoopResult
104
+ ) -> Any:
636
105
  if self.async_on_timeout:
637
- await self.async_on_timeout(self.state, provider)
106
+ await self.async_on_timeout(self.state, provider, result)
638
107
 
639
108
  async def async_on_humanloop_error(
640
- self,
641
- provider: HumanLoopProvider,
642
- error: Exception
643
- ):
109
+ self, provider: HumanLoopProvider, error: Exception
110
+ ) -> Any:
644
111
  if self.async_on_error:
645
112
  await self.async_on_error(self.state, provider, error)
646
113
 
647
114
 
648
115
  def default_langgraph_callback_factory(state: Any) -> LangGraphHumanLoopCallback:
649
116
  """Default human-loop callback factory for LangGraph framework
650
-
117
+
651
118
  This callback focuses on:
652
119
  1. Logging human interaction events
653
- 2. Providing debug information
120
+ 2. Providing debug information
654
121
  3. Collecting performance metrics
655
-
122
+
656
123
  Note: This callback does not modify state to maintain clear state management
657
-
124
+
658
125
  Args:
659
126
  state: LangGraph state object, only used for log correlation
660
-
127
+
661
128
  Returns:
662
129
  Configured LangGraphHumanLoopCallback instance
663
130
  """
664
131
 
665
-
666
- async def async_on_update(state, provider: HumanLoopProvider, result: HumanLoopResult):
132
+ async def async_on_request(
133
+ state: Any, provider: HumanLoopProvider, request: HumanLoopRequest
134
+ ) -> Any:
135
+ """Log human interaction request events"""
136
+ logger.info(f"Provider ID: {provider.name}")
137
+ logger.info(
138
+ f"Human interaction request "
139
+ f"task_id={request.task_id}, "
140
+ f"conversation_id={request.conversation_id}, "
141
+ f"loop_type={request.loop_type}, "
142
+ f"context={request.context}, "
143
+ f"metadata={request.metadata}, "
144
+ f"timeout={request.timeout}, "
145
+ f"created_at={request.created_at}"
146
+ )
147
+
148
+ async def async_on_update(
149
+ state: Any, provider: HumanLoopProvider, result: HumanLoopResult
150
+ ) -> Any:
667
151
  """Log human interaction update events"""
668
152
  logger.info(f"Provider ID: {provider.name}")
669
153
  logger.info(
670
154
  f"Human interaction update "
155
+ f"conversation_id={result.conversation_id}, "
156
+ f"request_id={result.request_id},"
671
157
  f"status={result.status}, "
672
158
  f"response={result.response}, "
673
159
  f"responded_by={result.responded_by}, "
674
160
  f"responded_at={result.responded_at}, "
675
161
  f"feedback={result.feedback}"
676
162
  )
677
-
678
-
679
163
 
680
- async def async_on_timeout(state, provider: HumanLoopProvider):
164
+ async def async_on_timeout(
165
+ state: Any, provider: HumanLoopProvider, result: HumanLoopResult
166
+ ) -> Any:
681
167
  """Log human interaction timeout events"""
682
-
168
+
683
169
  logger.info(f"Provider ID: {provider.name}")
170
+ logger.info(
171
+ f"Human interaction timeout "
172
+ f"conversation_id={result.conversation_id}, "
173
+ f"request_id={result.request_id},"
174
+ f"status={result.status}, "
175
+ f"response={result.response}, "
176
+ f"responded_by={result.responded_by}, "
177
+ f"responded_at={result.responded_at}, "
178
+ f"feedback={result.feedback}"
179
+ )
684
180
  from datetime import datetime
181
+
685
182
  current_time = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
686
183
  logger.warning(f"Human interaction timeout - Time: {current_time}")
687
-
688
-
184
+
689
185
  # Alert logic can be added here, such as sending notifications
690
186
 
691
- async def async_on_error(state, provider: HumanLoopProvider, error: Exception):
187
+ async def async_on_error(
188
+ state: Any, provider: HumanLoopProvider, error: Exception
189
+ ) -> Any:
692
190
  """Log human interaction error events"""
693
-
191
+
694
192
  logger.info(f"Provider ID: {provider.name}")
695
193
  from datetime import datetime
194
+
696
195
  current_time = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
697
196
  logger.error(f"Human interaction error - Time: {current_time} Error: {error}")
698
197
 
699
198
  return LangGraphHumanLoopCallback(
700
199
  state=state,
200
+ async_on_request=async_on_request,
701
201
  async_on_update=async_on_update,
702
202
  async_on_timeout=async_on_timeout,
703
- async_on_error=async_on_error
203
+ async_on_error=async_on_error,
704
204
  )
705
205
 
706
- from gohumanloop.core.manager import DefaultHumanLoopManager
707
- from gohumanloop.providers.terminal_provider import TerminalProvider
708
206
 
709
207
  # Create HumanLoopManager instance
710
- manager = DefaultHumanLoopManager(initial_providers=TerminalProvider(name="LGDefaultProvider"))
208
+ manager = DefaultHumanLoopManager(
209
+ initial_providers=TerminalProvider(name="LGDefaultProvider")
210
+ )
711
211
 
712
212
  # Create LangGraphAdapter instance
713
- default_adapter = LangGraphAdapter(manager, default_timeout=60)
213
+ default_adapter = HumanloopAdapter(manager, default_timeout=60)
714
214
 
715
215
  default_conversation_id = str(uuid.uuid4())
716
216
 
717
217
  _SKIP_NEXT_HUMANLOOP = False
718
218
 
719
- def interrupt(value: Any, lg_humanloop: LangGraphAdapter = default_adapter) -> Any:
219
+
220
+ def interrupt(value: Any, lg_humanloop: HumanloopAdapter = default_adapter) -> Any:
720
221
  """
721
222
  Wraps LangGraph's interrupt functionality to pause graph execution and wait for human input
722
-
223
+
723
224
  Raises RuntimeError if LangGraph version doesn't support interrupt
724
-
225
+
725
226
  Args:
726
227
  value: Any JSON-serializable value that will be shown to human user
727
228
  lg_humanloop: LangGraphAdapter instance, defaults to global instance
728
-
229
+
729
230
  Returns:
730
231
  Input value provided by human user
731
232
  """
@@ -737,7 +238,7 @@ def interrupt(value: Any, lg_humanloop: LangGraphAdapter = default_adapter) -> A
737
238
  "LangGraph version too low, interrupt not supported. Please upgrade to version 0.2.57 or higher."
738
239
  "You can use: pip install --upgrade langgraph>=0.2.57"
739
240
  )
740
-
241
+
741
242
  if not _SKIP_NEXT_HUMANLOOP:
742
243
  # Get current event loop or create new one
743
244
  try:
@@ -757,18 +258,19 @@ def interrupt(value: Any, lg_humanloop: LangGraphAdapter = default_adapter) -> A
757
258
  # Reset flag to allow normal human intervention trigger next time
758
259
  _SKIP_NEXT_HUMANLOOP = False
759
260
 
760
-
761
261
  # Return LangGraph's interrupt
762
262
  return _lg_interrupt(value)
763
- def create_resume_command(lg_humanloop: LangGraphAdapter = default_adapter) -> Any:
263
+
264
+
265
+ def create_resume_command(lg_humanloop: HumanloopAdapter = default_adapter) -> Any:
764
266
  """
765
267
  Create a Command object to resume interrupted graph execution
766
-
268
+
767
269
  Will raise RuntimeError if LangGraph version doesn't support Command
768
-
270
+
769
271
  Args:
770
272
  lg_humanloop: LangGraphAdapter instance, defaults to global instance
771
-
273
+
772
274
  Returns:
773
275
  Command object that can be used with graph.stream method
774
276
  """
@@ -782,10 +284,12 @@ def create_resume_command(lg_humanloop: LangGraphAdapter = default_adapter) -> A
782
284
  )
783
285
 
784
286
  # Define async polling function
785
- def poll_for_result():
287
+ def poll_for_result() -> Optional[Dict[str, Any]]:
786
288
  poll_interval = 1.0 # Polling interval (seconds)
787
289
  while True:
788
- result = lg_humanloop.manager.check_conversation_status(default_conversation_id)
290
+ result = lg_humanloop.manager.check_conversation_status(
291
+ default_conversation_id
292
+ )
789
293
  # If status is final state (not PENDING), return result
790
294
  if result.status != HumanLoopStatus.PENDING:
791
295
  return result.response
@@ -797,15 +301,18 @@ def create_resume_command(lg_humanloop: LangGraphAdapter = default_adapter) -> A
797
301
  response = poll_for_result()
798
302
  return _lg_Command(resume=response)
799
303
 
800
- async def acreate_resume_command(lg_humanloop: LangGraphAdapter = default_adapter) -> Any:
304
+
305
+ async def acreate_resume_command(
306
+ lg_humanloop: HumanloopAdapter = default_adapter
307
+ ) -> Any:
801
308
  """
802
309
  Create an async version of Command object to resume interrupted graph execution
803
-
310
+
804
311
  Will raise RuntimeError if LangGraph version doesn't support Command
805
-
312
+
806
313
  Args:
807
314
  lg_humanloop: LangGraphAdapter instance, defaults to global instance
808
-
315
+
809
316
  Returns:
810
317
  Command object that can be used with graph.astream method
811
318
  """
@@ -818,19 +325,20 @@ async def acreate_resume_command(lg_humanloop: LangGraphAdapter = default_adapte
818
325
  )
819
326
 
820
327
  # Define async polling function
821
- async def poll_for_result():
328
+ async def poll_for_result() -> Optional[Dict[str, Any]]:
822
329
  poll_interval = 1.0 # Polling interval (seconds)
823
330
  while True:
824
- result = await lg_humanloop.manager.async_check_conversation_status(default_conversation_id)
331
+ result = await lg_humanloop.manager.async_check_conversation_status(
332
+ default_conversation_id
333
+ )
825
334
  # If status is final state (not PENDING), return result
826
335
  if result.status != HumanLoopStatus.PENDING:
827
336
  return result.response
828
337
  # Wait before polling again
829
338
  await asyncio.sleep(poll_interval)
830
-
339
+
831
340
  _SKIP_NEXT_HUMANLOOP = True
832
341
 
833
342
  # Wait for async result directly
834
343
  response = await poll_for_result()
835
344
  return _lg_Command(resume=response)
836
-