gohumanloop 0.0.5__py3-none-any.whl → 0.0.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,4 +1,18 @@
1
- from typing import Dict, Any, Optional, Callable, Awaitable, TypeVar, Union, List
1
+ from typing import (
2
+ cast,
3
+ Dict,
4
+ Any,
5
+ Optional,
6
+ Callable,
7
+ Awaitable,
8
+ TypeVar,
9
+ Union,
10
+ Type,
11
+ AsyncIterator,
12
+ Iterator,
13
+ Coroutine,
14
+ )
15
+ from types import TracebackType
2
16
  from functools import wraps
3
17
  import asyncio
4
18
  import uuid
@@ -9,30 +23,44 @@ import logging
9
23
 
10
24
  from gohumanloop.utils import run_async_safely
11
25
  from gohumanloop.core.interface import (
12
- HumanLoopManager, HumanLoopResult, HumanLoopStatus, HumanLoopType, HumanLoopCallback, HumanLoopProvider
26
+ HumanLoopManager,
27
+ HumanLoopResult,
28
+ HumanLoopStatus,
29
+ HumanLoopType,
30
+ HumanLoopCallback,
31
+ HumanLoopProvider,
13
32
  )
33
+ from gohumanloop.core.manager import DefaultHumanLoopManager
34
+ from gohumanloop.providers.terminal_provider import TerminalProvider
14
35
 
15
36
  logger = logging.getLogger(__name__)
16
37
 
17
38
  # Define TypeVars for input and output types
18
39
  T = TypeVar("T")
19
- R = TypeVar('R')
40
+ R = TypeVar("R", bound=Union[Any, None])
41
+
20
42
 
21
43
  # Check LangGraph version
22
- def _check_langgraph_version():
44
+ def _check_langgraph_version() -> bool:
23
45
  """Check LangGraph version to determine if interrupt feature is supported"""
24
46
  try:
25
47
  import importlib.metadata
48
+
26
49
  version = importlib.metadata.version("langgraph")
27
- version_parts = version.split('.')
28
- major, minor, patch = int(version_parts[0]), int(version_parts[1]), int(version_parts[2])
29
-
50
+ version_parts = version.split(".")
51
+ major, minor, patch = (
52
+ int(version_parts[0]),
53
+ int(version_parts[1]),
54
+ int(version_parts[2]),
55
+ )
56
+
30
57
  # Interrupt support starts from version 0.2.57
31
- return (major > 0 or (major == 0 and (minor > 2 or (minor == 2 and patch >= 57))))
58
+ return major > 0 or (major == 0 and (minor > 2 or (minor == 2 and patch >= 57)))
32
59
  except (importlib.metadata.PackageNotFoundError, ValueError, IndexError):
33
60
  # If version cannot be determined, assume no support
34
61
  return False
35
62
 
63
+
36
64
  # Import corresponding features based on version
37
65
  _SUPPORTS_INTERRUPT = _check_langgraph_version()
38
66
  if _SUPPORTS_INTERRUPT:
@@ -42,6 +70,7 @@ if _SUPPORTS_INTERRUPT:
42
70
  except ImportError:
43
71
  _SUPPORTS_INTERRUPT = False
44
72
 
73
+
45
74
  class HumanLoopWrapper:
46
75
  def __init__(
47
76
  self,
@@ -55,9 +84,10 @@ class HumanLoopWrapper:
55
84
  def __call__(self, fn: Callable) -> Callable:
56
85
  return self.decorator(fn)
57
86
 
87
+
58
88
  class LangGraphAdapter:
59
89
  """LangGraph adapter for simplifying human-in-the-loop integration
60
-
90
+
61
91
  Provides decorators for three scenarios:
62
92
  - require_approval: Requires human approval
63
93
  - require_info: Requires human input information
@@ -65,66 +95,88 @@ class LangGraphAdapter:
65
95
  """
66
96
 
67
97
  def __init__(
68
- self,
69
- manager: HumanLoopManager,
70
- default_timeout: Optional[int] = None
98
+ self, manager: HumanLoopManager, default_timeout: Optional[int] = None
71
99
  ):
72
100
  self.manager = manager
73
101
  self.default_timeout = default_timeout
74
102
 
75
- async def __aenter__(self):
103
+ async def __aenter__(self) -> "LangGraphAdapter":
76
104
  """Implements async context manager protocol, automatically manages manager lifecycle"""
77
- if hasattr(self.manager, '__aenter__'):
78
- await self.manager.__aenter__()
105
+
106
+ manager = cast(Any, self.manager)
107
+ if hasattr(manager, "__aenter__"):
108
+ await manager.__aenter__()
79
109
  return self
80
-
81
- async def __aexit__(self, exc_type, exc_val, exc_tb):
110
+
111
+ async def __aexit__(
112
+ self,
113
+ exc_type: Optional[Type[BaseException]],
114
+ exc_val: Optional[BaseException],
115
+ exc_tb: Optional[TracebackType],
116
+ ) -> Optional[bool]:
82
117
  """Implements async context manager protocol, automatically manages manager lifecycle"""
83
- if hasattr(self.manager, '__aexit__'):
84
- await self.manager.__aexit__(exc_type, exc_val, exc_tb)
85
-
86
- def __enter__(self):
118
+
119
+ manager = cast(Any, self.manager)
120
+ if hasattr(manager, "__aexit__"):
121
+ await manager.__aexit__(exc_type, exc_val, exc_tb)
122
+
123
+ return None
124
+
125
+ def __enter__(self) -> "LangGraphAdapter":
87
126
  """Implements sync context manager protocol, automatically manages manager lifecycle"""
88
- if hasattr(self.manager, '__enter__'):
89
- self.manager.__enter__()
127
+
128
+ manager = cast(Any, self.manager)
129
+ if hasattr(manager, "__enter__"):
130
+ manager.__enter__()
90
131
  return self
91
-
92
- def __exit__(self, exc_type, exc_val, exc_tb):
132
+
133
+ def __exit__(
134
+ self,
135
+ exc_type: Optional[Type[BaseException]],
136
+ exc_val: Optional[BaseException],
137
+ exc_tb: Optional[TracebackType],
138
+ ) -> Optional[bool]:
93
139
  """Implements sync context manager protocol, automatically manages manager lifecycle"""
94
- if hasattr(self.manager, '__exit__'):
95
- self.manager.__exit__(exc_type, exc_val, exc_tb)
96
-
140
+
141
+ manager = cast(Any, self.manager)
142
+ if hasattr(manager, "__exit__"):
143
+ manager.__exit__(exc_type, exc_val, exc_tb)
144
+
145
+ return None
146
+
97
147
  @asynccontextmanager
98
- async def asession(self):
148
+ async def asession(self) -> AsyncIterator["LangGraphAdapter"]:
99
149
  """Provides async context manager for managing session lifecycle
100
-
150
+
101
151
  Example:
102
152
  async with adapter.session():
103
153
  # Use adapter here
104
154
  """
105
155
  try:
106
- if hasattr(self.manager, '__aenter__'):
107
- await self.manager.__aenter__()
156
+ manager = cast(Any, self.manager)
157
+ if hasattr(manager, "__aenter__"):
158
+ await manager.__aenter__()
108
159
  yield self
109
160
  finally:
110
- if hasattr(self.manager, '__aexit__'):
111
- await self.manager.__aexit__(None, None, None)
112
-
161
+ if hasattr(manager, "__aexit__"):
162
+ await manager.__aexit__(None, None, None)
163
+
113
164
  @contextmanager
114
- def session(self):
165
+ def session(self) -> Iterator["LangGraphAdapter"]:
115
166
  """Provides a synchronous context manager for managing session lifecycle
116
-
167
+
117
168
  Example:
118
169
  with adapter.sync_session():
119
170
  # Use adapter here
120
171
  """
121
172
  try:
122
- if hasattr(self.manager, '__enter__'):
123
- self.manager.__enter__()
173
+ manager = cast(Any, self.manager)
174
+ if hasattr(manager, "__enter__"):
175
+ manager.__enter__()
124
176
  yield self
125
177
  finally:
126
- if hasattr(self.manager, '__exit__'):
127
- self.manager.__exit__(None, None, None)
178
+ if hasattr(manager, "__exit__"):
179
+ manager.__exit__(None, None, None)
128
180
 
129
181
  def require_approval(
130
182
  self,
@@ -136,16 +188,30 @@ class LangGraphAdapter:
136
188
  provider_id: Optional[str] = None,
137
189
  timeout: Optional[int] = None,
138
190
  execute_on_reject: bool = False,
139
- callback: Optional[Union[HumanLoopCallback, Callable[[Any], HumanLoopCallback]]] = None,
191
+ callback: Optional[
192
+ Union[HumanLoopCallback, Callable[[Any], HumanLoopCallback]]
193
+ ] = None,
140
194
  ) -> HumanLoopWrapper:
141
195
  """Decorator for approval scenario"""
142
196
  if task_id is None:
143
197
  task_id = str(uuid.uuid4())
144
198
  if conversation_id is None:
145
199
  conversation_id = str(uuid.uuid4())
146
-
147
- def decorator(fn):
148
- return self._approve_cli(fn, task_id, conversation_id, ret_key, additional, metadata, provider_id, timeout, execute_on_reject, callback)
200
+
201
+ def decorator(fn: Callable) -> Callable:
202
+ return self._approve_cli(
203
+ fn,
204
+ task_id,
205
+ conversation_id,
206
+ ret_key,
207
+ additional,
208
+ metadata,
209
+ provider_id,
210
+ timeout,
211
+ execute_on_reject,
212
+ callback,
213
+ )
214
+
149
215
  return HumanLoopWrapper(decorator)
150
216
 
151
217
  def _approve_cli(
@@ -159,22 +225,27 @@ class LangGraphAdapter:
159
225
  provider_id: Optional[str] = None,
160
226
  timeout: Optional[int] = None,
161
227
  execute_on_reject: bool = False,
162
- callback: Optional[Union[HumanLoopCallback, Callable[[Any], HumanLoopCallback]]] = None,
163
- ) -> Callable[[T], R | None]:
228
+ callback: Optional[
229
+ Union[HumanLoopCallback, Callable[[Any], HumanLoopCallback]]
230
+ ] = None,
231
+ ) -> Union[
232
+ Callable[[T], Coroutine[Any, Any, R]], # For async functions
233
+ Callable[[T], R], # For sync functions
234
+ ]:
164
235
  """
165
- Converts function type from Callable[[T], R] to Callable[[T], R | None]
166
-
236
+ Converts function type from Callable[[T], R] to Callable[[T], R]
237
+
167
238
  Passes approval results through keyword arguments while maintaining original function signature
168
-
239
+
169
240
  Benefits of this approach:
170
241
  1. Maintains original function return type, keeping compatibility with LangGraph workflow
171
242
  2. Decorated function can optionally use approval result information
172
243
  3. Can pass richer approval context information
173
-
244
+
174
245
  Parameters:
175
246
  - fn: Target function to be decorated
176
247
  - task_id: Unique task identifier for tracking approval requests
177
- - conversation_id: Unique conversation identifier for tracking approval sessions
248
+ - conversation_id: Unique conversation identifier for tracking approval sessions
178
249
  - ret_key: Parameter name used to inject approval results into function kwargs
179
250
  - additional: Additional context information to show to approvers
180
251
  - metadata: Optional metadata dictionary passed with request
@@ -182,11 +253,11 @@ class LangGraphAdapter:
182
253
  - timeout: Timeout in seconds for approval response
183
254
  - execute_on_reject: Whether to execute function on rejection
184
255
  - callback: Optional callback object or factory function for approval events
185
-
256
+
186
257
  Returns:
187
258
  - Decorated function maintaining original signature
188
259
  - Raises ValueError if approval fails or is rejected
189
-
260
+
190
261
  Notes:
191
262
  - Decorated function must accept ret_key parameter to receive approval results
192
263
  - If approval is rejected, execution depends on execute_on_reject parameter
@@ -203,7 +274,7 @@ class LangGraphAdapter:
203
274
  """
204
275
 
205
276
  @wraps(fn)
206
- async def async_wrapper(*args, **kwargs) -> R | None:
277
+ async def async_wrapper(*args: Any, **kwargs: Any) -> R:
207
278
  # Determine if callback is instance or factory function
208
279
  cb = None
209
280
  if callable(callback) and not isinstance(callback, HumanLoopCallback):
@@ -223,33 +294,33 @@ class LangGraphAdapter:
223
294
  "function_signature": str(fn.__code__.co_varnames),
224
295
  "arguments": str(args),
225
296
  "keyword_arguments": str(kwargs),
226
- "documentation": fn.__doc__ or "No documentation available"
297
+ "documentation": fn.__doc__ or "No documentation available",
227
298
  },
228
299
  "question": "Please review and approve/reject this human loop execution.",
229
- "additional": additional
300
+ "additional": additional,
230
301
  },
231
302
  callback=cb,
232
303
  metadata=metadata,
233
304
  provider_id=provider_id,
234
305
  timeout=timeout or self.default_timeout,
235
- blocking=True
306
+ blocking=True,
236
307
  )
237
308
 
238
309
  # Initialize approval result object as None
239
310
  approval_info = None
240
-
311
+
241
312
  if isinstance(result, HumanLoopResult):
242
313
  # If result is HumanLoopResult type, build complete approval info
243
314
  approval_info = {
244
- 'conversation_id': result.conversation_id,
245
- 'request_id': result.request_id,
246
- 'loop_type': result.loop_type,
247
- 'status': result.status,
248
- 'response': result.response,
249
- 'feedback': result.feedback,
250
- 'responded_by': result.responded_by,
251
- 'responded_at': result.responded_at,
252
- 'error': result.error
315
+ "conversation_id": result.conversation_id,
316
+ "request_id": result.request_id,
317
+ "loop_type": result.loop_type,
318
+ "status": result.status,
319
+ "response": result.response,
320
+ "feedback": result.feedback,
321
+ "responded_by": result.responded_by,
322
+ "responded_at": result.responded_at,
323
+ "error": result.error,
253
324
  }
254
325
 
255
326
  kwargs[ret_key] = approval_info
@@ -258,29 +329,38 @@ class LangGraphAdapter:
258
329
  # Handle based on approval status
259
330
  if result.status == HumanLoopStatus.APPROVED:
260
331
  if iscoroutinefunction(fn):
261
- return await fn(*args, **kwargs)
262
- return fn(*args, **kwargs)
332
+ ret = await fn(*args, **kwargs)
333
+ else:
334
+ ret = fn(*args, **kwargs)
335
+ return cast(R, ret)
263
336
  elif result.status == HumanLoopStatus.REJECTED:
264
- # If execute on reject is set, run the function
337
+ # If execute on reject is set, run the function
265
338
  if execute_on_reject:
266
339
  if iscoroutinefunction(fn):
267
- return await fn(*args, **kwargs)
268
- return fn(*args, **kwargs)
340
+ ret = await fn(*args, **kwargs)
341
+ else:
342
+ ret = fn(*args, **kwargs)
343
+ return cast(R, ret)
269
344
  # Otherwise return rejection info
270
345
  reason = result.response
271
- raise ValueError(f"Function {fn.__name__} execution not approved: {reason}")
346
+ raise ValueError(
347
+ f"Function {fn.__name__} execution not approved: {reason}"
348
+ )
272
349
  else:
273
- raise ValueError(f"Approval error for {fn.__name__}: approval status: {result.status} and {result.error}")
350
+ raise ValueError(
351
+ f"Approval error for {fn.__name__}: approval status: {result.status} and {result.error}"
352
+ )
274
353
  else:
275
354
  raise ValueError(f"Unknown approval error: {fn.__name__}")
276
355
 
277
356
  @wraps(fn)
278
- def sync_wrapper(*args, **kwargs) -> R | None:
279
- return run_async_safely(async_wrapper(*args, **kwargs))
357
+ def sync_wrapper(*args: Any, **kwargs: Any) -> R:
358
+ ret = run_async_safely(async_wrapper(*args, **kwargs))
359
+ return cast(R, ret)
280
360
 
281
361
  # Return corresponding wrapper based on decorated function type
282
362
  if iscoroutinefunction(fn):
283
- return async_wrapper # type: ignore
363
+ return async_wrapper
284
364
  return sync_wrapper
285
365
 
286
366
  def require_conversation(
@@ -293,7 +373,9 @@ class LangGraphAdapter:
293
373
  provider_id: Optional[str] = None,
294
374
  metadata: Optional[Dict[str, Any]] = None,
295
375
  timeout: Optional[int] = None,
296
- callback: Optional[Union[HumanLoopCallback, Callable[[Any], HumanLoopCallback]]] = None,
376
+ callback: Optional[
377
+ Union[HumanLoopCallback, Callable[[Any], HumanLoopCallback]]
378
+ ] = None,
297
379
  ) -> HumanLoopWrapper:
298
380
  """Decorator for multi-turn conversation scenario"""
299
381
 
@@ -302,8 +384,20 @@ class LangGraphAdapter:
302
384
  if conversation_id is None:
303
385
  conversation_id = str(uuid.uuid4())
304
386
 
305
- def decorator(fn):
306
- return self._conversation_cli(fn, task_id, conversation_id, state_key, ret_key, additional, provider_id, metadata, timeout, callback)
387
+ def decorator(fn: Callable) -> Callable:
388
+ return self._conversation_cli(
389
+ fn,
390
+ task_id,
391
+ conversation_id,
392
+ state_key,
393
+ ret_key,
394
+ additional,
395
+ metadata,
396
+ provider_id,
397
+ timeout,
398
+ callback,
399
+ )
400
+
307
401
  return HumanLoopWrapper(decorator)
308
402
 
309
403
  def _conversation_cli(
@@ -312,22 +406,27 @@ class LangGraphAdapter:
312
406
  task_id: str,
313
407
  conversation_id: str,
314
408
  state_key: str = "conv_info",
315
- ret_key: str = "conv_result",
409
+ ret_key: str = "conv_result",
316
410
  additional: Optional[str] = "",
317
411
  metadata: Optional[Dict[str, Any]] = None,
318
412
  provider_id: Optional[str] = None,
319
413
  timeout: Optional[int] = None,
320
- callback: Optional[Union[HumanLoopCallback, Callable[[Any], HumanLoopCallback]]] = None,
321
- ) -> Callable[[T], R | None]:
414
+ callback: Optional[
415
+ Union[HumanLoopCallback, Callable[[Any], HumanLoopCallback]]
416
+ ] = None,
417
+ ) -> Union[
418
+ Callable[[T], Coroutine[Any, Any, R]], # For async functions
419
+ Callable[[T], R], # For sync functions
420
+ ]:
322
421
  """Internal decorator implementation for multi-turn conversation scenario
323
-
324
- Converts function type from Callable[[T], R] to Callable[[T], R | None]
325
-
422
+
423
+ Converts function type from Callable[[T], R] to Callable[[T], R]
424
+
326
425
  Main features:
327
426
  1. Conduct multi-turn conversations through human-machine interaction
328
427
  2. Inject conversation results into function parameters via ret_key
329
428
  3. Support both synchronous and asynchronous function calls
330
-
429
+
331
430
  Parameters:
332
431
  - fn: Target function to be decorated
333
432
  - task_id: Unique task identifier for tracking human interaction requests
@@ -339,16 +438,16 @@ class LangGraphAdapter:
339
438
  - provider_id: Optional provider identifier to route requests to specific provider
340
439
  - timeout: Timeout in seconds for human response, defaults to adapter's default_timeout
341
440
  - callback: Optional callback object or factory function for handling human interaction events
342
-
441
+
343
442
  Returns:
344
443
  - Decorated function maintaining original signature
345
444
  - Raises ValueError if human interaction fails
346
-
445
+
347
446
  Notes:
348
447
  - Decorated function must accept ret_key parameter to receive interaction results
349
448
  - Interaction results contain complete context information including:
350
449
  - conversation_id: Unique conversation identifier
351
- - request_id: Unique request identifier
450
+ - request_id: Unique request identifier
352
451
  - loop_type: Human interaction type (CONVERSATION)
353
452
  - status: Current request status
354
453
  - response: Human provided response
@@ -360,7 +459,7 @@ class LangGraphAdapter:
360
459
  """
361
460
 
362
461
  @wraps(fn)
363
- async def async_wrapper(*args, **kwargs) -> R | None:
462
+ async def async_wrapper(*args: Any, **kwargs: Any) -> R:
364
463
  # Determine if callback is instance or factory function
365
464
  cb = None
366
465
  state = args[0] if args else None
@@ -375,11 +474,15 @@ class LangGraphAdapter:
375
474
  node_input = state.get(state_key, {})
376
475
 
377
476
  # Compose question content
378
- question_content = f"Please respond to the following information:\n{node_input}"
379
-
477
+ question_content = (
478
+ f"Please respond to the following information:\n{node_input}"
479
+ )
480
+
380
481
  # Check if conversation exists to determine whether to use request_humanloop or continue_humanloop
381
- conversation_requests = await self.manager.async_check_conversation_exist(task_id, conversation_id)
382
-
482
+ conversation_requests = await self.manager.async_check_conversation_exist(
483
+ task_id, conversation_id
484
+ )
485
+
383
486
  result = None
384
487
  if conversation_requests:
385
488
  # Existing conversation, use continue_humanloop
@@ -387,20 +490,20 @@ class LangGraphAdapter:
387
490
  conversation_id=conversation_id,
388
491
  context={
389
492
  "message": {
390
- "function_name": fn.__name__,
391
- "function_signature": str(fn.__code__.co_varnames),
392
- "arguments": str(args),
393
- "keyword_arguments": str(kwargs),
394
- "documentation": fn.__doc__ or "No documentation available"
493
+ "function_name": fn.__name__,
494
+ "function_signature": str(fn.__code__.co_varnames),
495
+ "arguments": str(args),
496
+ "keyword_arguments": str(kwargs),
497
+ "documentation": fn.__doc__ or "No documentation available",
395
498
  },
396
499
  "question": question_content,
397
- "additional": additional
500
+ "additional": additional,
398
501
  },
399
502
  timeout=timeout or self.default_timeout,
400
503
  callback=cb,
401
504
  metadata=metadata,
402
505
  provider_id=provider_id,
403
- blocking=True
506
+ blocking=True,
404
507
  )
405
508
  else:
406
509
  # New conversation, use request_humanloop
@@ -410,20 +513,20 @@ class LangGraphAdapter:
410
513
  loop_type=HumanLoopType.CONVERSATION,
411
514
  context={
412
515
  "message": {
413
- "function_name": fn.__name__,
414
- "function_signature": str(fn.__code__.co_varnames),
415
- "arguments": str(args),
416
- "keyword_arguments": str(kwargs),
417
- "documentation": fn.__doc__ or "No documentation available"
516
+ "function_name": fn.__name__,
517
+ "function_signature": str(fn.__code__.co_varnames),
518
+ "arguments": str(args),
519
+ "keyword_arguments": str(kwargs),
520
+ "documentation": fn.__doc__ or "No documentation available",
418
521
  },
419
522
  "question": question_content,
420
- "additional": additional
523
+ "additional": additional,
421
524
  },
422
525
  timeout=timeout or self.default_timeout,
423
526
  callback=cb,
424
527
  metadata=metadata,
425
528
  provider_id=provider_id,
426
- blocking=True
529
+ blocking=True,
427
530
  )
428
531
 
429
532
  # Initialize conversation result object as None
@@ -431,32 +534,37 @@ class LangGraphAdapter:
431
534
 
432
535
  if isinstance(result, HumanLoopResult):
433
536
  conversation_info = {
434
- 'conversation_id': result.conversation_id,
435
- 'request_id': result.request_id,
436
- 'loop_type': result.loop_type,
437
- 'status': result.status,
438
- 'response': result.response,
439
- 'feedback': result.feedback,
440
- 'responded_by': result.responded_by,
441
- 'responded_at': result.responded_at,
442
- 'error': result.error
537
+ "conversation_id": result.conversation_id,
538
+ "request_id": result.request_id,
539
+ "loop_type": result.loop_type,
540
+ "status": result.status,
541
+ "response": result.response,
542
+ "feedback": result.feedback,
543
+ "responded_by": result.responded_by,
544
+ "responded_at": result.responded_at,
545
+ "error": result.error,
443
546
  }
444
547
 
445
548
  kwargs[ret_key] = conversation_info
446
549
 
447
550
  if isinstance(result, HumanLoopResult):
448
551
  if iscoroutinefunction(fn):
449
- return await fn(*args, **kwargs)
450
- return fn(*args, **kwargs)
552
+ ret = await fn(*args, **kwargs)
553
+ else:
554
+ ret = fn(*args, **kwargs)
555
+ return cast(R, ret)
451
556
  else:
452
- raise ValueError(f"Conversation request timeout or error for {fn.__name__}")
557
+ raise ValueError(
558
+ f"Conversation request timeout or error for {fn.__name__}"
559
+ )
453
560
 
454
561
  @wraps(fn)
455
- def sync_wrapper(*args, **kwargs) -> R | None:
456
- return run_async_safely(async_wrapper(*args, **kwargs))
562
+ def sync_wrapper(*args: Any, **kwargs: Any) -> R:
563
+ ret = run_async_safely(async_wrapper(*args, **kwargs))
564
+ return cast(R, ret)
457
565
 
458
566
  if iscoroutinefunction(fn):
459
- return async_wrapper # type: ignore
567
+ return async_wrapper
460
568
  return sync_wrapper
461
569
 
462
570
  def require_info(
@@ -468,7 +576,9 @@ class LangGraphAdapter:
468
576
  metadata: Optional[Dict[str, Any]] = None,
469
577
  provider_id: Optional[str] = None,
470
578
  timeout: Optional[int] = None,
471
- callback: Optional[Union[HumanLoopCallback, Callable[[Any], HumanLoopCallback]]] = None,
579
+ callback: Optional[
580
+ Union[HumanLoopCallback, Callable[[Any], HumanLoopCallback]]
581
+ ] = None,
472
582
  ) -> HumanLoopWrapper:
473
583
  """Decorator for information gathering scenario"""
474
584
 
@@ -477,8 +587,19 @@ class LangGraphAdapter:
477
587
  if conversation_id is None:
478
588
  conversation_id = str(uuid.uuid4())
479
589
 
480
- def decorator(fn):
481
- return self._get_info_cli(fn, task_id, conversation_id, ret_key, additional, metadata, provider_id, timeout, callback)
590
+ def decorator(fn: Callable) -> Callable:
591
+ return self._get_info_cli(
592
+ fn,
593
+ task_id,
594
+ conversation_id,
595
+ ret_key,
596
+ additional,
597
+ metadata,
598
+ provider_id,
599
+ timeout,
600
+ callback,
601
+ )
602
+
482
603
  return HumanLoopWrapper(decorator)
483
604
 
484
605
  def _get_info_cli(
@@ -491,16 +612,21 @@ class LangGraphAdapter:
491
612
  metadata: Optional[Dict[str, Any]] = None,
492
613
  provider_id: Optional[str] = None,
493
614
  timeout: Optional[int] = None,
494
- callback: Optional[Union[HumanLoopCallback, Callable[[Any], HumanLoopCallback]]] = None,
495
- ) -> Callable[[T], R | None]:
615
+ callback: Optional[
616
+ Union[HumanLoopCallback, Callable[[Any], HumanLoopCallback]]
617
+ ] = None,
618
+ ) -> Union[
619
+ Callable[[T], Coroutine[Any, Any, R]], # For async functions
620
+ Callable[[T], R], # For sync functions
621
+ ]:
496
622
  """Internal decorator implementation for information gathering scenario
497
- Converts function type from Callable[[T], R] to Callable[[T], R | None]
498
-
623
+ Converts function type from Callable[[T], R] to Callable[[T], R]
624
+
499
625
  Main features:
500
626
  1. Get required information through human-machine interaction
501
627
  2. Inject obtained information into function parameters via ret_key
502
628
  3. Support both synchronous and asynchronous function calls
503
-
629
+
504
630
  Parameters:
505
631
  - fn: Target function to be decorated
506
632
  - task_id: Unique task identifier for tracking the human loop request
@@ -511,11 +637,11 @@ class LangGraphAdapter:
511
637
  - provider_id: Optional provider identifier to route request to specific provider
512
638
  - timeout: Timeout in seconds for human response, defaults to adapter's default_timeout
513
639
  - callback: Optional callback object or factory function for handling human loop events
514
-
640
+
515
641
  Returns:
516
642
  - Decorated function maintaining original signature
517
643
  - Raises ValueError if human interaction fails
518
-
644
+
519
645
  Notes:
520
646
  - Decorated function must accept ret_key parameter to receive interaction results
521
647
  - Interaction results contain complete context information including:
@@ -532,8 +658,7 @@ class LangGraphAdapter:
532
658
  """
533
659
 
534
660
  @wraps(fn)
535
- async def async_wrapper(*args, **kwargs) -> R | None:
536
-
661
+ async def async_wrapper(*args: Any, **kwargs: Any) -> R:
537
662
  # Determine if callback is an instance or factory function
538
663
  # callback: can be HumanLoopCallback instance or factory function
539
664
  # - If factory function: accepts state parameter and returns HumanLoopCallback instance
@@ -546,7 +671,7 @@ class LangGraphAdapter:
546
671
  cb = callback(state)
547
672
  else:
548
673
  cb = callback
549
-
674
+
550
675
  result = await self.manager.async_request_humanloop(
551
676
  task_id=task_id,
552
677
  conversation_id=conversation_id,
@@ -557,16 +682,16 @@ class LangGraphAdapter:
557
682
  "function_signature": str(fn.__code__.co_varnames),
558
683
  "arguments": str(args),
559
684
  "keyword_arguments": str(kwargs),
560
- "documentation": fn.__doc__ or "No documentation available"
685
+ "documentation": fn.__doc__ or "No documentation available",
561
686
  },
562
687
  "question": "Please provide the required information for the human loop",
563
- "additional": additional
688
+ "additional": additional,
564
689
  },
565
690
  timeout=timeout or self.default_timeout,
566
691
  callback=cb,
567
692
  metadata=metadata,
568
693
  provider_id=provider_id,
569
- blocking=True
694
+ blocking=True,
570
695
  )
571
696
 
572
697
  # 初始化审批结果对象为None
@@ -575,15 +700,15 @@ class LangGraphAdapter:
575
700
  if isinstance(result, HumanLoopResult):
576
701
  # 如果结果是HumanLoopResult类型,则构建完整的审批信息
577
702
  resp_info = {
578
- 'conversation_id': result.conversation_id,
579
- 'request_id': result.request_id,
580
- 'loop_type': result.loop_type,
581
- 'status': result.status,
582
- 'response': result.response,
583
- 'feedback': result.feedback,
584
- 'responded_by': result.responded_by,
585
- 'responded_at': result.responded_at,
586
- 'error': result.error
703
+ "conversation_id": result.conversation_id,
704
+ "request_id": result.request_id,
705
+ "loop_type": result.loop_type,
706
+ "status": result.status,
707
+ "response": result.response,
708
+ "feedback": result.feedback,
709
+ "responded_by": result.responded_by,
710
+ "responded_at": result.responded_at,
711
+ "error": result.error,
587
712
  }
588
713
 
589
714
  kwargs[ret_key] = resp_info
@@ -592,78 +717,85 @@ class LangGraphAdapter:
592
717
  if isinstance(result, HumanLoopResult):
593
718
  # 返回获取信息结果,由用户去判断是否使用
594
719
  if iscoroutinefunction(fn):
595
- return await fn(*args, **kwargs)
596
- return fn(*args, **kwargs)
720
+ ret = await fn(*args, **kwargs)
721
+ else:
722
+ ret = fn(*args, **kwargs)
723
+ return cast(R, ret)
597
724
  else:
598
725
  raise ValueError(f"Info request timeout or error for {fn.__name__}")
599
726
 
600
727
  @wraps(fn)
601
- def sync_wrapper(*args, **kwargs) -> R | None:
602
- return run_async_safely(async_wrapper(*args, **kwargs))
728
+ def sync_wrapper(*args: Any, **kwargs: Any) -> R:
729
+ ret = run_async_safely(async_wrapper(*args, **kwargs))
730
+ return cast(R, ret)
603
731
 
604
732
  # 根据被装饰函数类型返回对应的wrapper
605
733
  if iscoroutinefunction(fn):
606
- return async_wrapper # type: ignore
734
+ return async_wrapper
607
735
  return sync_wrapper
608
-
736
+
737
+
609
738
  class LangGraphHumanLoopCallback(HumanLoopCallback):
610
739
  """LangGraph-specific human loop callback, compatible with TypedDict or Pydantic BaseModel State"""
611
-
740
+
612
741
  def __init__(
613
742
  self,
614
743
  state: Any,
615
- async_on_update: Optional[Callable[[Any, HumanLoopProvider, HumanLoopResult], Awaitable[None]]] = None,
616
- async_on_timeout: Optional[Callable[[Any, HumanLoopProvider], Awaitable[None]]] = None,
617
- async_on_error: Optional[Callable[[Any, HumanLoopProvider, Exception], Awaitable[None]]] = None,
618
- ):
744
+ async_on_update: Optional[
745
+ Callable[[Any, HumanLoopProvider, HumanLoopResult], Awaitable[None]]
746
+ ] = None,
747
+ async_on_timeout: Optional[
748
+ Callable[[Any, HumanLoopProvider], Awaitable[None]]
749
+ ] = None,
750
+ async_on_error: Optional[
751
+ Callable[[Any, HumanLoopProvider, Exception], Awaitable[None]]
752
+ ] = None,
753
+ ) -> None:
619
754
  self.state = state
620
755
  self.async_on_update = async_on_update
621
756
  self.async_on_timeout = async_on_timeout
622
757
  self.async_on_error = async_on_error
623
758
 
624
759
  async def async_on_humanloop_update(
625
- self,
626
- provider: HumanLoopProvider,
627
- result: HumanLoopResult
628
- ):
760
+ self, provider: HumanLoopProvider, result: HumanLoopResult
761
+ ) -> None:
629
762
  if self.async_on_update:
630
763
  await self.async_on_update(self.state, provider, result)
631
764
 
632
765
  async def async_on_humanloop_timeout(
633
766
  self,
634
767
  provider: HumanLoopProvider,
635
- ):
768
+ ) -> None:
636
769
  if self.async_on_timeout:
637
770
  await self.async_on_timeout(self.state, provider)
638
771
 
639
772
  async def async_on_humanloop_error(
640
- self,
641
- provider: HumanLoopProvider,
642
- error: Exception
643
- ):
773
+ self, provider: HumanLoopProvider, error: Exception
774
+ ) -> None:
644
775
  if self.async_on_error:
645
776
  await self.async_on_error(self.state, provider, error)
646
777
 
647
778
 
648
779
  def default_langgraph_callback_factory(state: Any) -> LangGraphHumanLoopCallback:
649
780
  """Default human-loop callback factory for LangGraph framework
650
-
781
+
651
782
  This callback focuses on:
652
783
  1. Logging human interaction events
653
- 2. Providing debug information
784
+ 2. Providing debug information
654
785
  3. Collecting performance metrics
655
-
786
+
656
787
  Note: This callback does not modify state to maintain clear state management
657
-
788
+
658
789
  Args:
659
790
  state: LangGraph state object, only used for log correlation
660
-
791
+
661
792
  Returns:
662
793
  Configured LangGraphHumanLoopCallback instance
663
794
  """
664
795
 
665
-
666
- async def async_on_update(state, provider: HumanLoopProvider, result: HumanLoopResult):
796
+ async def async_on_update(
797
+ state: Any, provider: HumanLoopProvider, result: HumanLoopResult
798
+ ) -> None:
667
799
  """Log human interaction update events"""
668
800
  logger.info(f"Provider ID: {provider.name}")
669
801
  logger.info(
@@ -674,25 +806,26 @@ def default_langgraph_callback_factory(state: Any) -> LangGraphHumanLoopCallback
674
806
  f"responded_at={result.responded_at}, "
675
807
  f"feedback={result.feedback}"
676
808
  )
677
-
678
-
679
809
 
680
- async def async_on_timeout(state, provider: HumanLoopProvider):
810
+ async def async_on_timeout(state: Any, provider: HumanLoopProvider) -> None:
681
811
  """Log human interaction timeout events"""
682
-
812
+
683
813
  logger.info(f"Provider ID: {provider.name}")
684
814
  from datetime import datetime
815
+
685
816
  current_time = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
686
817
  logger.warning(f"Human interaction timeout - Time: {current_time}")
687
-
688
-
818
+
689
819
  # Alert logic can be added here, such as sending notifications
690
820
 
691
- async def async_on_error(state, provider: HumanLoopProvider, error: Exception):
821
+ async def async_on_error(
822
+ state: Any, provider: HumanLoopProvider, error: Exception
823
+ ) -> None:
692
824
  """Log human interaction error events"""
693
-
825
+
694
826
  logger.info(f"Provider ID: {provider.name}")
695
827
  from datetime import datetime
828
+
696
829
  current_time = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
697
830
  logger.error(f"Human interaction error - Time: {current_time} Error: {error}")
698
831
 
@@ -700,14 +833,14 @@ def default_langgraph_callback_factory(state: Any) -> LangGraphHumanLoopCallback
700
833
  state=state,
701
834
  async_on_update=async_on_update,
702
835
  async_on_timeout=async_on_timeout,
703
- async_on_error=async_on_error
836
+ async_on_error=async_on_error,
704
837
  )
705
838
 
706
- from gohumanloop.core.manager import DefaultHumanLoopManager
707
- from gohumanloop.providers.terminal_provider import TerminalProvider
708
839
 
709
840
  # Create HumanLoopManager instance
710
- manager = DefaultHumanLoopManager(initial_providers=TerminalProvider(name="LGDefaultProvider"))
841
+ manager = DefaultHumanLoopManager(
842
+ initial_providers=TerminalProvider(name="LGDefaultProvider")
843
+ )
711
844
 
712
845
  # Create LangGraphAdapter instance
713
846
  default_adapter = LangGraphAdapter(manager, default_timeout=60)
@@ -716,16 +849,17 @@ default_conversation_id = str(uuid.uuid4())
716
849
 
717
850
  _SKIP_NEXT_HUMANLOOP = False
718
851
 
852
+
719
853
  def interrupt(value: Any, lg_humanloop: LangGraphAdapter = default_adapter) -> Any:
720
854
  """
721
855
  Wraps LangGraph's interrupt functionality to pause graph execution and wait for human input
722
-
856
+
723
857
  Raises RuntimeError if LangGraph version doesn't support interrupt
724
-
858
+
725
859
  Args:
726
860
  value: Any JSON-serializable value that will be shown to human user
727
861
  lg_humanloop: LangGraphAdapter instance, defaults to global instance
728
-
862
+
729
863
  Returns:
730
864
  Input value provided by human user
731
865
  """
@@ -737,7 +871,7 @@ def interrupt(value: Any, lg_humanloop: LangGraphAdapter = default_adapter) -> A
737
871
  "LangGraph version too low, interrupt not supported. Please upgrade to version 0.2.57 or higher."
738
872
  "You can use: pip install --upgrade langgraph>=0.2.57"
739
873
  )
740
-
874
+
741
875
  if not _SKIP_NEXT_HUMANLOOP:
742
876
  # Get current event loop or create new one
743
877
  try:
@@ -757,18 +891,19 @@ def interrupt(value: Any, lg_humanloop: LangGraphAdapter = default_adapter) -> A
757
891
  # Reset flag to allow normal human intervention trigger next time
758
892
  _SKIP_NEXT_HUMANLOOP = False
759
893
 
760
-
761
894
  # Return LangGraph's interrupt
762
895
  return _lg_interrupt(value)
896
+
897
+
763
898
  def create_resume_command(lg_humanloop: LangGraphAdapter = default_adapter) -> Any:
764
899
  """
765
900
  Create a Command object to resume interrupted graph execution
766
-
901
+
767
902
  Will raise RuntimeError if LangGraph version doesn't support Command
768
-
903
+
769
904
  Args:
770
905
  lg_humanloop: LangGraphAdapter instance, defaults to global instance
771
-
906
+
772
907
  Returns:
773
908
  Command object that can be used with graph.stream method
774
909
  """
@@ -782,10 +917,12 @@ def create_resume_command(lg_humanloop: LangGraphAdapter = default_adapter) -> A
782
917
  )
783
918
 
784
919
  # Define async polling function
785
- def poll_for_result():
920
+ def poll_for_result() -> Optional[Dict[str, Any]]:
786
921
  poll_interval = 1.0 # Polling interval (seconds)
787
922
  while True:
788
- result = lg_humanloop.manager.check_conversation_status(default_conversation_id)
923
+ result = lg_humanloop.manager.check_conversation_status(
924
+ default_conversation_id
925
+ )
789
926
  # If status is final state (not PENDING), return result
790
927
  if result.status != HumanLoopStatus.PENDING:
791
928
  return result.response
@@ -797,15 +934,18 @@ def create_resume_command(lg_humanloop: LangGraphAdapter = default_adapter) -> A
797
934
  response = poll_for_result()
798
935
  return _lg_Command(resume=response)
799
936
 
800
- async def acreate_resume_command(lg_humanloop: LangGraphAdapter = default_adapter) -> Any:
937
+
938
+ async def acreate_resume_command(
939
+ lg_humanloop: LangGraphAdapter = default_adapter
940
+ ) -> Any:
801
941
  """
802
942
  Create an async version of Command object to resume interrupted graph execution
803
-
943
+
804
944
  Will raise RuntimeError if LangGraph version doesn't support Command
805
-
945
+
806
946
  Args:
807
947
  lg_humanloop: LangGraphAdapter instance, defaults to global instance
808
-
948
+
809
949
  Returns:
810
950
  Command object that can be used with graph.astream method
811
951
  """
@@ -818,19 +958,20 @@ async def acreate_resume_command(lg_humanloop: LangGraphAdapter = default_adapte
818
958
  )
819
959
 
820
960
  # Define async polling function
821
- async def poll_for_result():
961
+ async def poll_for_result() -> Optional[Dict[str, Any]]:
822
962
  poll_interval = 1.0 # Polling interval (seconds)
823
963
  while True:
824
- result = await lg_humanloop.manager.async_check_conversation_status(default_conversation_id)
964
+ result = await lg_humanloop.manager.async_check_conversation_status(
965
+ default_conversation_id
966
+ )
825
967
  # If status is final state (not PENDING), return result
826
968
  if result.status != HumanLoopStatus.PENDING:
827
969
  return result.response
828
970
  # Wait before polling again
829
971
  await asyncio.sleep(poll_interval)
830
-
972
+
831
973
  _SKIP_NEXT_HUMANLOOP = True
832
974
 
833
975
  # Wait for async result directly
834
976
  response = await poll_for_result()
835
977
  return _lg_Command(resume=response)
836
-