gohumanloop 0.0.4__py3-none-any.whl → 0.0.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- gohumanloop/__init__.py +15 -9
- gohumanloop/adapters/__init__.py +4 -4
- gohumanloop/adapters/langgraph_adapter.py +365 -220
- gohumanloop/cli/main.py +4 -1
- gohumanloop/core/interface.py +181 -215
- gohumanloop/core/manager.py +341 -361
- gohumanloop/manager/ghl_manager.py +223 -185
- gohumanloop/models/api_model.py +32 -7
- gohumanloop/models/glh_model.py +15 -11
- gohumanloop/providers/api_provider.py +233 -189
- gohumanloop/providers/base.py +179 -172
- gohumanloop/providers/email_provider.py +386 -325
- gohumanloop/providers/ghl_provider.py +19 -17
- gohumanloop/providers/terminal_provider.py +111 -92
- gohumanloop/utils/__init__.py +7 -1
- gohumanloop/utils/context_formatter.py +20 -15
- gohumanloop/utils/threadsafedict.py +64 -56
- gohumanloop/utils/utils.py +28 -28
- gohumanloop-0.0.6.dist-info/METADATA +259 -0
- gohumanloop-0.0.6.dist-info/RECORD +30 -0
- {gohumanloop-0.0.4.dist-info → gohumanloop-0.0.6.dist-info}/WHEEL +1 -1
- gohumanloop-0.0.4.dist-info/METADATA +0 -35
- gohumanloop-0.0.4.dist-info/RECORD +0 -30
- {gohumanloop-0.0.4.dist-info → gohumanloop-0.0.6.dist-info}/entry_points.txt +0 -0
- {gohumanloop-0.0.4.dist-info → gohumanloop-0.0.6.dist-info}/licenses/LICENSE +0 -0
- {gohumanloop-0.0.4.dist-info → gohumanloop-0.0.6.dist-info}/top_level.txt +0 -0
@@ -1,35 +1,66 @@
|
|
1
|
-
from typing import
|
1
|
+
from typing import (
|
2
|
+
cast,
|
3
|
+
Dict,
|
4
|
+
Any,
|
5
|
+
Optional,
|
6
|
+
Callable,
|
7
|
+
Awaitable,
|
8
|
+
TypeVar,
|
9
|
+
Union,
|
10
|
+
Type,
|
11
|
+
AsyncIterator,
|
12
|
+
Iterator,
|
13
|
+
Coroutine,
|
14
|
+
)
|
15
|
+
from types import TracebackType
|
2
16
|
from functools import wraps
|
3
17
|
import asyncio
|
4
18
|
import uuid
|
5
19
|
import time
|
6
20
|
from inspect import iscoroutinefunction
|
7
21
|
from contextlib import asynccontextmanager, contextmanager
|
22
|
+
import logging
|
8
23
|
|
9
24
|
from gohumanloop.utils import run_async_safely
|
10
25
|
from gohumanloop.core.interface import (
|
11
|
-
HumanLoopManager,
|
26
|
+
HumanLoopManager,
|
27
|
+
HumanLoopResult,
|
28
|
+
HumanLoopStatus,
|
29
|
+
HumanLoopType,
|
30
|
+
HumanLoopCallback,
|
31
|
+
HumanLoopProvider,
|
12
32
|
)
|
33
|
+
from gohumanloop.core.manager import DefaultHumanLoopManager
|
34
|
+
from gohumanloop.providers.terminal_provider import TerminalProvider
|
35
|
+
|
36
|
+
logger = logging.getLogger(__name__)
|
13
37
|
|
14
38
|
# Define TypeVars for input and output types
|
15
39
|
T = TypeVar("T")
|
16
|
-
R = TypeVar(
|
40
|
+
R = TypeVar("R", bound=Union[Any, None])
|
41
|
+
|
17
42
|
|
18
43
|
# Check LangGraph version
|
19
|
-
def _check_langgraph_version():
|
44
|
+
def _check_langgraph_version() -> bool:
|
20
45
|
"""Check LangGraph version to determine if interrupt feature is supported"""
|
21
46
|
try:
|
22
47
|
import importlib.metadata
|
48
|
+
|
23
49
|
version = importlib.metadata.version("langgraph")
|
24
|
-
version_parts = version.split(
|
25
|
-
major, minor, patch =
|
26
|
-
|
50
|
+
version_parts = version.split(".")
|
51
|
+
major, minor, patch = (
|
52
|
+
int(version_parts[0]),
|
53
|
+
int(version_parts[1]),
|
54
|
+
int(version_parts[2]),
|
55
|
+
)
|
56
|
+
|
27
57
|
# Interrupt support starts from version 0.2.57
|
28
|
-
return
|
58
|
+
return major > 0 or (major == 0 and (minor > 2 or (minor == 2 and patch >= 57)))
|
29
59
|
except (importlib.metadata.PackageNotFoundError, ValueError, IndexError):
|
30
60
|
# If version cannot be determined, assume no support
|
31
61
|
return False
|
32
62
|
|
63
|
+
|
33
64
|
# Import corresponding features based on version
|
34
65
|
_SUPPORTS_INTERRUPT = _check_langgraph_version()
|
35
66
|
if _SUPPORTS_INTERRUPT:
|
@@ -39,6 +70,7 @@ if _SUPPORTS_INTERRUPT:
|
|
39
70
|
except ImportError:
|
40
71
|
_SUPPORTS_INTERRUPT = False
|
41
72
|
|
73
|
+
|
42
74
|
class HumanLoopWrapper:
|
43
75
|
def __init__(
|
44
76
|
self,
|
@@ -52,9 +84,10 @@ class HumanLoopWrapper:
|
|
52
84
|
def __call__(self, fn: Callable) -> Callable:
|
53
85
|
return self.decorator(fn)
|
54
86
|
|
87
|
+
|
55
88
|
class LangGraphAdapter:
|
56
89
|
"""LangGraph adapter for simplifying human-in-the-loop integration
|
57
|
-
|
90
|
+
|
58
91
|
Provides decorators for three scenarios:
|
59
92
|
- require_approval: Requires human approval
|
60
93
|
- require_info: Requires human input information
|
@@ -62,66 +95,88 @@ class LangGraphAdapter:
|
|
62
95
|
"""
|
63
96
|
|
64
97
|
def __init__(
|
65
|
-
self,
|
66
|
-
manager: HumanLoopManager,
|
67
|
-
default_timeout: Optional[int] = None
|
98
|
+
self, manager: HumanLoopManager, default_timeout: Optional[int] = None
|
68
99
|
):
|
69
100
|
self.manager = manager
|
70
101
|
self.default_timeout = default_timeout
|
71
102
|
|
72
|
-
async def __aenter__(self):
|
103
|
+
async def __aenter__(self) -> "LangGraphAdapter":
|
73
104
|
"""Implements async context manager protocol, automatically manages manager lifecycle"""
|
74
|
-
|
75
|
-
|
105
|
+
|
106
|
+
manager = cast(Any, self.manager)
|
107
|
+
if hasattr(manager, "__aenter__"):
|
108
|
+
await manager.__aenter__()
|
76
109
|
return self
|
77
|
-
|
78
|
-
async def __aexit__(
|
110
|
+
|
111
|
+
async def __aexit__(
|
112
|
+
self,
|
113
|
+
exc_type: Optional[Type[BaseException]],
|
114
|
+
exc_val: Optional[BaseException],
|
115
|
+
exc_tb: Optional[TracebackType],
|
116
|
+
) -> Optional[bool]:
|
79
117
|
"""Implements async context manager protocol, automatically manages manager lifecycle"""
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
|
118
|
+
|
119
|
+
manager = cast(Any, self.manager)
|
120
|
+
if hasattr(manager, "__aexit__"):
|
121
|
+
await manager.__aexit__(exc_type, exc_val, exc_tb)
|
122
|
+
|
123
|
+
return None
|
124
|
+
|
125
|
+
def __enter__(self) -> "LangGraphAdapter":
|
84
126
|
"""Implements sync context manager protocol, automatically manages manager lifecycle"""
|
85
|
-
|
86
|
-
|
127
|
+
|
128
|
+
manager = cast(Any, self.manager)
|
129
|
+
if hasattr(manager, "__enter__"):
|
130
|
+
manager.__enter__()
|
87
131
|
return self
|
88
|
-
|
89
|
-
def __exit__(
|
132
|
+
|
133
|
+
def __exit__(
|
134
|
+
self,
|
135
|
+
exc_type: Optional[Type[BaseException]],
|
136
|
+
exc_val: Optional[BaseException],
|
137
|
+
exc_tb: Optional[TracebackType],
|
138
|
+
) -> Optional[bool]:
|
90
139
|
"""Implements sync context manager protocol, automatically manages manager lifecycle"""
|
91
|
-
|
92
|
-
|
93
|
-
|
140
|
+
|
141
|
+
manager = cast(Any, self.manager)
|
142
|
+
if hasattr(manager, "__exit__"):
|
143
|
+
manager.__exit__(exc_type, exc_val, exc_tb)
|
144
|
+
|
145
|
+
return None
|
146
|
+
|
94
147
|
@asynccontextmanager
|
95
|
-
async def asession(self):
|
148
|
+
async def asession(self) -> AsyncIterator["LangGraphAdapter"]:
|
96
149
|
"""Provides async context manager for managing session lifecycle
|
97
|
-
|
150
|
+
|
98
151
|
Example:
|
99
152
|
async with adapter.session():
|
100
153
|
# Use adapter here
|
101
154
|
"""
|
102
155
|
try:
|
103
|
-
|
104
|
-
|
156
|
+
manager = cast(Any, self.manager)
|
157
|
+
if hasattr(manager, "__aenter__"):
|
158
|
+
await manager.__aenter__()
|
105
159
|
yield self
|
106
160
|
finally:
|
107
|
-
if hasattr(
|
108
|
-
await
|
109
|
-
|
161
|
+
if hasattr(manager, "__aexit__"):
|
162
|
+
await manager.__aexit__(None, None, None)
|
163
|
+
|
110
164
|
@contextmanager
|
111
|
-
def session(self):
|
165
|
+
def session(self) -> Iterator["LangGraphAdapter"]:
|
112
166
|
"""Provides a synchronous context manager for managing session lifecycle
|
113
|
-
|
167
|
+
|
114
168
|
Example:
|
115
169
|
with adapter.sync_session():
|
116
170
|
# Use adapter here
|
117
171
|
"""
|
118
172
|
try:
|
119
|
-
|
120
|
-
|
173
|
+
manager = cast(Any, self.manager)
|
174
|
+
if hasattr(manager, "__enter__"):
|
175
|
+
manager.__enter__()
|
121
176
|
yield self
|
122
177
|
finally:
|
123
|
-
if hasattr(
|
124
|
-
|
178
|
+
if hasattr(manager, "__exit__"):
|
179
|
+
manager.__exit__(None, None, None)
|
125
180
|
|
126
181
|
def require_approval(
|
127
182
|
self,
|
@@ -133,16 +188,30 @@ class LangGraphAdapter:
|
|
133
188
|
provider_id: Optional[str] = None,
|
134
189
|
timeout: Optional[int] = None,
|
135
190
|
execute_on_reject: bool = False,
|
136
|
-
callback: Optional[
|
191
|
+
callback: Optional[
|
192
|
+
Union[HumanLoopCallback, Callable[[Any], HumanLoopCallback]]
|
193
|
+
] = None,
|
137
194
|
) -> HumanLoopWrapper:
|
138
195
|
"""Decorator for approval scenario"""
|
139
196
|
if task_id is None:
|
140
197
|
task_id = str(uuid.uuid4())
|
141
198
|
if conversation_id is None:
|
142
199
|
conversation_id = str(uuid.uuid4())
|
143
|
-
|
144
|
-
def decorator(fn):
|
145
|
-
return self._approve_cli(
|
200
|
+
|
201
|
+
def decorator(fn: Callable) -> Callable:
|
202
|
+
return self._approve_cli(
|
203
|
+
fn,
|
204
|
+
task_id,
|
205
|
+
conversation_id,
|
206
|
+
ret_key,
|
207
|
+
additional,
|
208
|
+
metadata,
|
209
|
+
provider_id,
|
210
|
+
timeout,
|
211
|
+
execute_on_reject,
|
212
|
+
callback,
|
213
|
+
)
|
214
|
+
|
146
215
|
return HumanLoopWrapper(decorator)
|
147
216
|
|
148
217
|
def _approve_cli(
|
@@ -156,22 +225,27 @@ class LangGraphAdapter:
|
|
156
225
|
provider_id: Optional[str] = None,
|
157
226
|
timeout: Optional[int] = None,
|
158
227
|
execute_on_reject: bool = False,
|
159
|
-
callback: Optional[
|
160
|
-
|
228
|
+
callback: Optional[
|
229
|
+
Union[HumanLoopCallback, Callable[[Any], HumanLoopCallback]]
|
230
|
+
] = None,
|
231
|
+
) -> Union[
|
232
|
+
Callable[[T], Coroutine[Any, Any, R]], # For async functions
|
233
|
+
Callable[[T], R], # For sync functions
|
234
|
+
]:
|
161
235
|
"""
|
162
|
-
Converts function type from Callable[[T], R] to Callable[[T], R
|
163
|
-
|
236
|
+
Converts function type from Callable[[T], R] to Callable[[T], R]
|
237
|
+
|
164
238
|
Passes approval results through keyword arguments while maintaining original function signature
|
165
|
-
|
239
|
+
|
166
240
|
Benefits of this approach:
|
167
241
|
1. Maintains original function return type, keeping compatibility with LangGraph workflow
|
168
242
|
2. Decorated function can optionally use approval result information
|
169
243
|
3. Can pass richer approval context information
|
170
|
-
|
244
|
+
|
171
245
|
Parameters:
|
172
246
|
- fn: Target function to be decorated
|
173
247
|
- task_id: Unique task identifier for tracking approval requests
|
174
|
-
- conversation_id: Unique conversation identifier for tracking approval sessions
|
248
|
+
- conversation_id: Unique conversation identifier for tracking approval sessions
|
175
249
|
- ret_key: Parameter name used to inject approval results into function kwargs
|
176
250
|
- additional: Additional context information to show to approvers
|
177
251
|
- metadata: Optional metadata dictionary passed with request
|
@@ -179,11 +253,11 @@ class LangGraphAdapter:
|
|
179
253
|
- timeout: Timeout in seconds for approval response
|
180
254
|
- execute_on_reject: Whether to execute function on rejection
|
181
255
|
- callback: Optional callback object or factory function for approval events
|
182
|
-
|
256
|
+
|
183
257
|
Returns:
|
184
258
|
- Decorated function maintaining original signature
|
185
259
|
- Raises ValueError if approval fails or is rejected
|
186
|
-
|
260
|
+
|
187
261
|
Notes:
|
188
262
|
- Decorated function must accept ret_key parameter to receive approval results
|
189
263
|
- If approval is rejected, execution depends on execute_on_reject parameter
|
@@ -200,7 +274,7 @@ class LangGraphAdapter:
|
|
200
274
|
"""
|
201
275
|
|
202
276
|
@wraps(fn)
|
203
|
-
async def async_wrapper(*args, **kwargs) -> R
|
277
|
+
async def async_wrapper(*args: Any, **kwargs: Any) -> R:
|
204
278
|
# Determine if callback is instance or factory function
|
205
279
|
cb = None
|
206
280
|
if callable(callback) and not isinstance(callback, HumanLoopCallback):
|
@@ -220,33 +294,33 @@ class LangGraphAdapter:
|
|
220
294
|
"function_signature": str(fn.__code__.co_varnames),
|
221
295
|
"arguments": str(args),
|
222
296
|
"keyword_arguments": str(kwargs),
|
223
|
-
"documentation": fn.__doc__ or "No documentation available"
|
297
|
+
"documentation": fn.__doc__ or "No documentation available",
|
224
298
|
},
|
225
299
|
"question": "Please review and approve/reject this human loop execution.",
|
226
|
-
"additional": additional
|
300
|
+
"additional": additional,
|
227
301
|
},
|
228
302
|
callback=cb,
|
229
303
|
metadata=metadata,
|
230
304
|
provider_id=provider_id,
|
231
305
|
timeout=timeout or self.default_timeout,
|
232
|
-
blocking=True
|
306
|
+
blocking=True,
|
233
307
|
)
|
234
308
|
|
235
309
|
# Initialize approval result object as None
|
236
310
|
approval_info = None
|
237
|
-
|
311
|
+
|
238
312
|
if isinstance(result, HumanLoopResult):
|
239
313
|
# If result is HumanLoopResult type, build complete approval info
|
240
314
|
approval_info = {
|
241
|
-
|
242
|
-
|
243
|
-
|
244
|
-
|
245
|
-
|
246
|
-
|
247
|
-
|
248
|
-
|
249
|
-
|
315
|
+
"conversation_id": result.conversation_id,
|
316
|
+
"request_id": result.request_id,
|
317
|
+
"loop_type": result.loop_type,
|
318
|
+
"status": result.status,
|
319
|
+
"response": result.response,
|
320
|
+
"feedback": result.feedback,
|
321
|
+
"responded_by": result.responded_by,
|
322
|
+
"responded_at": result.responded_at,
|
323
|
+
"error": result.error,
|
250
324
|
}
|
251
325
|
|
252
326
|
kwargs[ret_key] = approval_info
|
@@ -255,29 +329,38 @@ class LangGraphAdapter:
|
|
255
329
|
# Handle based on approval status
|
256
330
|
if result.status == HumanLoopStatus.APPROVED:
|
257
331
|
if iscoroutinefunction(fn):
|
258
|
-
|
259
|
-
|
332
|
+
ret = await fn(*args, **kwargs)
|
333
|
+
else:
|
334
|
+
ret = fn(*args, **kwargs)
|
335
|
+
return cast(R, ret)
|
260
336
|
elif result.status == HumanLoopStatus.REJECTED:
|
261
|
-
|
337
|
+
# If execute on reject is set, run the function
|
262
338
|
if execute_on_reject:
|
263
339
|
if iscoroutinefunction(fn):
|
264
|
-
|
265
|
-
|
340
|
+
ret = await fn(*args, **kwargs)
|
341
|
+
else:
|
342
|
+
ret = fn(*args, **kwargs)
|
343
|
+
return cast(R, ret)
|
266
344
|
# Otherwise return rejection info
|
267
345
|
reason = result.response
|
268
|
-
raise ValueError(
|
346
|
+
raise ValueError(
|
347
|
+
f"Function {fn.__name__} execution not approved: {reason}"
|
348
|
+
)
|
269
349
|
else:
|
270
|
-
raise ValueError(
|
350
|
+
raise ValueError(
|
351
|
+
f"Approval error for {fn.__name__}: approval status: {result.status} and {result.error}"
|
352
|
+
)
|
271
353
|
else:
|
272
354
|
raise ValueError(f"Unknown approval error: {fn.__name__}")
|
273
355
|
|
274
356
|
@wraps(fn)
|
275
|
-
def sync_wrapper(*args, **kwargs) -> R
|
276
|
-
|
357
|
+
def sync_wrapper(*args: Any, **kwargs: Any) -> R:
|
358
|
+
ret = run_async_safely(async_wrapper(*args, **kwargs))
|
359
|
+
return cast(R, ret)
|
277
360
|
|
278
361
|
# Return corresponding wrapper based on decorated function type
|
279
362
|
if iscoroutinefunction(fn):
|
280
|
-
return async_wrapper
|
363
|
+
return async_wrapper
|
281
364
|
return sync_wrapper
|
282
365
|
|
283
366
|
def require_conversation(
|
@@ -290,7 +373,9 @@ class LangGraphAdapter:
|
|
290
373
|
provider_id: Optional[str] = None,
|
291
374
|
metadata: Optional[Dict[str, Any]] = None,
|
292
375
|
timeout: Optional[int] = None,
|
293
|
-
callback: Optional[
|
376
|
+
callback: Optional[
|
377
|
+
Union[HumanLoopCallback, Callable[[Any], HumanLoopCallback]]
|
378
|
+
] = None,
|
294
379
|
) -> HumanLoopWrapper:
|
295
380
|
"""Decorator for multi-turn conversation scenario"""
|
296
381
|
|
@@ -299,8 +384,20 @@ class LangGraphAdapter:
|
|
299
384
|
if conversation_id is None:
|
300
385
|
conversation_id = str(uuid.uuid4())
|
301
386
|
|
302
|
-
def decorator(fn):
|
303
|
-
return self._conversation_cli(
|
387
|
+
def decorator(fn: Callable) -> Callable:
|
388
|
+
return self._conversation_cli(
|
389
|
+
fn,
|
390
|
+
task_id,
|
391
|
+
conversation_id,
|
392
|
+
state_key,
|
393
|
+
ret_key,
|
394
|
+
additional,
|
395
|
+
metadata,
|
396
|
+
provider_id,
|
397
|
+
timeout,
|
398
|
+
callback,
|
399
|
+
)
|
400
|
+
|
304
401
|
return HumanLoopWrapper(decorator)
|
305
402
|
|
306
403
|
def _conversation_cli(
|
@@ -309,22 +406,27 @@ class LangGraphAdapter:
|
|
309
406
|
task_id: str,
|
310
407
|
conversation_id: str,
|
311
408
|
state_key: str = "conv_info",
|
312
|
-
ret_key: str = "conv_result",
|
409
|
+
ret_key: str = "conv_result",
|
313
410
|
additional: Optional[str] = "",
|
314
411
|
metadata: Optional[Dict[str, Any]] = None,
|
315
412
|
provider_id: Optional[str] = None,
|
316
413
|
timeout: Optional[int] = None,
|
317
|
-
callback: Optional[
|
318
|
-
|
414
|
+
callback: Optional[
|
415
|
+
Union[HumanLoopCallback, Callable[[Any], HumanLoopCallback]]
|
416
|
+
] = None,
|
417
|
+
) -> Union[
|
418
|
+
Callable[[T], Coroutine[Any, Any, R]], # For async functions
|
419
|
+
Callable[[T], R], # For sync functions
|
420
|
+
]:
|
319
421
|
"""Internal decorator implementation for multi-turn conversation scenario
|
320
|
-
|
321
|
-
Converts function type from Callable[[T], R] to Callable[[T], R
|
322
|
-
|
422
|
+
|
423
|
+
Converts function type from Callable[[T], R] to Callable[[T], R]
|
424
|
+
|
323
425
|
Main features:
|
324
426
|
1. Conduct multi-turn conversations through human-machine interaction
|
325
427
|
2. Inject conversation results into function parameters via ret_key
|
326
428
|
3. Support both synchronous and asynchronous function calls
|
327
|
-
|
429
|
+
|
328
430
|
Parameters:
|
329
431
|
- fn: Target function to be decorated
|
330
432
|
- task_id: Unique task identifier for tracking human interaction requests
|
@@ -336,16 +438,16 @@ class LangGraphAdapter:
|
|
336
438
|
- provider_id: Optional provider identifier to route requests to specific provider
|
337
439
|
- timeout: Timeout in seconds for human response, defaults to adapter's default_timeout
|
338
440
|
- callback: Optional callback object or factory function for handling human interaction events
|
339
|
-
|
441
|
+
|
340
442
|
Returns:
|
341
443
|
- Decorated function maintaining original signature
|
342
444
|
- Raises ValueError if human interaction fails
|
343
|
-
|
445
|
+
|
344
446
|
Notes:
|
345
447
|
- Decorated function must accept ret_key parameter to receive interaction results
|
346
448
|
- Interaction results contain complete context information including:
|
347
449
|
- conversation_id: Unique conversation identifier
|
348
|
-
- request_id: Unique request identifier
|
450
|
+
- request_id: Unique request identifier
|
349
451
|
- loop_type: Human interaction type (CONVERSATION)
|
350
452
|
- status: Current request status
|
351
453
|
- response: Human provided response
|
@@ -357,7 +459,7 @@ class LangGraphAdapter:
|
|
357
459
|
"""
|
358
460
|
|
359
461
|
@wraps(fn)
|
360
|
-
async def async_wrapper(*args, **kwargs) -> R
|
462
|
+
async def async_wrapper(*args: Any, **kwargs: Any) -> R:
|
361
463
|
# Determine if callback is instance or factory function
|
362
464
|
cb = None
|
363
465
|
state = args[0] if args else None
|
@@ -372,11 +474,15 @@ class LangGraphAdapter:
|
|
372
474
|
node_input = state.get(state_key, {})
|
373
475
|
|
374
476
|
# Compose question content
|
375
|
-
question_content =
|
376
|
-
|
477
|
+
question_content = (
|
478
|
+
f"Please respond to the following information:\n{node_input}"
|
479
|
+
)
|
480
|
+
|
377
481
|
# Check if conversation exists to determine whether to use request_humanloop or continue_humanloop
|
378
|
-
conversation_requests = await self.manager.async_check_conversation_exist(
|
379
|
-
|
482
|
+
conversation_requests = await self.manager.async_check_conversation_exist(
|
483
|
+
task_id, conversation_id
|
484
|
+
)
|
485
|
+
|
380
486
|
result = None
|
381
487
|
if conversation_requests:
|
382
488
|
# Existing conversation, use continue_humanloop
|
@@ -384,20 +490,20 @@ class LangGraphAdapter:
|
|
384
490
|
conversation_id=conversation_id,
|
385
491
|
context={
|
386
492
|
"message": {
|
387
|
-
|
388
|
-
|
389
|
-
|
390
|
-
|
391
|
-
|
493
|
+
"function_name": fn.__name__,
|
494
|
+
"function_signature": str(fn.__code__.co_varnames),
|
495
|
+
"arguments": str(args),
|
496
|
+
"keyword_arguments": str(kwargs),
|
497
|
+
"documentation": fn.__doc__ or "No documentation available",
|
392
498
|
},
|
393
499
|
"question": question_content,
|
394
|
-
"additional": additional
|
500
|
+
"additional": additional,
|
395
501
|
},
|
396
502
|
timeout=timeout or self.default_timeout,
|
397
503
|
callback=cb,
|
398
504
|
metadata=metadata,
|
399
505
|
provider_id=provider_id,
|
400
|
-
blocking=True
|
506
|
+
blocking=True,
|
401
507
|
)
|
402
508
|
else:
|
403
509
|
# New conversation, use request_humanloop
|
@@ -407,20 +513,20 @@ class LangGraphAdapter:
|
|
407
513
|
loop_type=HumanLoopType.CONVERSATION,
|
408
514
|
context={
|
409
515
|
"message": {
|
410
|
-
|
411
|
-
|
412
|
-
|
413
|
-
|
414
|
-
|
516
|
+
"function_name": fn.__name__,
|
517
|
+
"function_signature": str(fn.__code__.co_varnames),
|
518
|
+
"arguments": str(args),
|
519
|
+
"keyword_arguments": str(kwargs),
|
520
|
+
"documentation": fn.__doc__ or "No documentation available",
|
415
521
|
},
|
416
522
|
"question": question_content,
|
417
|
-
"additional": additional
|
523
|
+
"additional": additional,
|
418
524
|
},
|
419
525
|
timeout=timeout or self.default_timeout,
|
420
526
|
callback=cb,
|
421
527
|
metadata=metadata,
|
422
528
|
provider_id=provider_id,
|
423
|
-
blocking=True
|
529
|
+
blocking=True,
|
424
530
|
)
|
425
531
|
|
426
532
|
# Initialize conversation result object as None
|
@@ -428,32 +534,37 @@ class LangGraphAdapter:
|
|
428
534
|
|
429
535
|
if isinstance(result, HumanLoopResult):
|
430
536
|
conversation_info = {
|
431
|
-
|
432
|
-
|
433
|
-
|
434
|
-
|
435
|
-
|
436
|
-
|
437
|
-
|
438
|
-
|
439
|
-
|
537
|
+
"conversation_id": result.conversation_id,
|
538
|
+
"request_id": result.request_id,
|
539
|
+
"loop_type": result.loop_type,
|
540
|
+
"status": result.status,
|
541
|
+
"response": result.response,
|
542
|
+
"feedback": result.feedback,
|
543
|
+
"responded_by": result.responded_by,
|
544
|
+
"responded_at": result.responded_at,
|
545
|
+
"error": result.error,
|
440
546
|
}
|
441
547
|
|
442
548
|
kwargs[ret_key] = conversation_info
|
443
549
|
|
444
550
|
if isinstance(result, HumanLoopResult):
|
445
551
|
if iscoroutinefunction(fn):
|
446
|
-
|
447
|
-
|
552
|
+
ret = await fn(*args, **kwargs)
|
553
|
+
else:
|
554
|
+
ret = fn(*args, **kwargs)
|
555
|
+
return cast(R, ret)
|
448
556
|
else:
|
449
|
-
raise ValueError(
|
557
|
+
raise ValueError(
|
558
|
+
f"Conversation request timeout or error for {fn.__name__}"
|
559
|
+
)
|
450
560
|
|
451
561
|
@wraps(fn)
|
452
|
-
def sync_wrapper(*args, **kwargs) -> R
|
453
|
-
|
562
|
+
def sync_wrapper(*args: Any, **kwargs: Any) -> R:
|
563
|
+
ret = run_async_safely(async_wrapper(*args, **kwargs))
|
564
|
+
return cast(R, ret)
|
454
565
|
|
455
566
|
if iscoroutinefunction(fn):
|
456
|
-
return async_wrapper
|
567
|
+
return async_wrapper
|
457
568
|
return sync_wrapper
|
458
569
|
|
459
570
|
def require_info(
|
@@ -465,7 +576,9 @@ class LangGraphAdapter:
|
|
465
576
|
metadata: Optional[Dict[str, Any]] = None,
|
466
577
|
provider_id: Optional[str] = None,
|
467
578
|
timeout: Optional[int] = None,
|
468
|
-
callback: Optional[
|
579
|
+
callback: Optional[
|
580
|
+
Union[HumanLoopCallback, Callable[[Any], HumanLoopCallback]]
|
581
|
+
] = None,
|
469
582
|
) -> HumanLoopWrapper:
|
470
583
|
"""Decorator for information gathering scenario"""
|
471
584
|
|
@@ -474,8 +587,19 @@ class LangGraphAdapter:
|
|
474
587
|
if conversation_id is None:
|
475
588
|
conversation_id = str(uuid.uuid4())
|
476
589
|
|
477
|
-
def decorator(fn):
|
478
|
-
return self._get_info_cli(
|
590
|
+
def decorator(fn: Callable) -> Callable:
|
591
|
+
return self._get_info_cli(
|
592
|
+
fn,
|
593
|
+
task_id,
|
594
|
+
conversation_id,
|
595
|
+
ret_key,
|
596
|
+
additional,
|
597
|
+
metadata,
|
598
|
+
provider_id,
|
599
|
+
timeout,
|
600
|
+
callback,
|
601
|
+
)
|
602
|
+
|
479
603
|
return HumanLoopWrapper(decorator)
|
480
604
|
|
481
605
|
def _get_info_cli(
|
@@ -488,16 +612,21 @@ class LangGraphAdapter:
|
|
488
612
|
metadata: Optional[Dict[str, Any]] = None,
|
489
613
|
provider_id: Optional[str] = None,
|
490
614
|
timeout: Optional[int] = None,
|
491
|
-
callback: Optional[
|
492
|
-
|
615
|
+
callback: Optional[
|
616
|
+
Union[HumanLoopCallback, Callable[[Any], HumanLoopCallback]]
|
617
|
+
] = None,
|
618
|
+
) -> Union[
|
619
|
+
Callable[[T], Coroutine[Any, Any, R]], # For async functions
|
620
|
+
Callable[[T], R], # For sync functions
|
621
|
+
]:
|
493
622
|
"""Internal decorator implementation for information gathering scenario
|
494
|
-
Converts function type from Callable[[T], R] to Callable[[T], R
|
495
|
-
|
623
|
+
Converts function type from Callable[[T], R] to Callable[[T], R]
|
624
|
+
|
496
625
|
Main features:
|
497
626
|
1. Get required information through human-machine interaction
|
498
627
|
2. Inject obtained information into function parameters via ret_key
|
499
628
|
3. Support both synchronous and asynchronous function calls
|
500
|
-
|
629
|
+
|
501
630
|
Parameters:
|
502
631
|
- fn: Target function to be decorated
|
503
632
|
- task_id: Unique task identifier for tracking the human loop request
|
@@ -508,11 +637,11 @@ class LangGraphAdapter:
|
|
508
637
|
- provider_id: Optional provider identifier to route request to specific provider
|
509
638
|
- timeout: Timeout in seconds for human response, defaults to adapter's default_timeout
|
510
639
|
- callback: Optional callback object or factory function for handling human loop events
|
511
|
-
|
640
|
+
|
512
641
|
Returns:
|
513
642
|
- Decorated function maintaining original signature
|
514
643
|
- Raises ValueError if human interaction fails
|
515
|
-
|
644
|
+
|
516
645
|
Notes:
|
517
646
|
- Decorated function must accept ret_key parameter to receive interaction results
|
518
647
|
- Interaction results contain complete context information including:
|
@@ -529,8 +658,7 @@ class LangGraphAdapter:
|
|
529
658
|
"""
|
530
659
|
|
531
660
|
@wraps(fn)
|
532
|
-
async def async_wrapper(*args, **kwargs) -> R
|
533
|
-
|
661
|
+
async def async_wrapper(*args: Any, **kwargs: Any) -> R:
|
534
662
|
# Determine if callback is an instance or factory function
|
535
663
|
# callback: can be HumanLoopCallback instance or factory function
|
536
664
|
# - If factory function: accepts state parameter and returns HumanLoopCallback instance
|
@@ -543,7 +671,7 @@ class LangGraphAdapter:
|
|
543
671
|
cb = callback(state)
|
544
672
|
else:
|
545
673
|
cb = callback
|
546
|
-
|
674
|
+
|
547
675
|
result = await self.manager.async_request_humanloop(
|
548
676
|
task_id=task_id,
|
549
677
|
conversation_id=conversation_id,
|
@@ -554,16 +682,16 @@ class LangGraphAdapter:
|
|
554
682
|
"function_signature": str(fn.__code__.co_varnames),
|
555
683
|
"arguments": str(args),
|
556
684
|
"keyword_arguments": str(kwargs),
|
557
|
-
"documentation": fn.__doc__ or "No documentation available"
|
685
|
+
"documentation": fn.__doc__ or "No documentation available",
|
558
686
|
},
|
559
687
|
"question": "Please provide the required information for the human loop",
|
560
|
-
"additional": additional
|
688
|
+
"additional": additional,
|
561
689
|
},
|
562
690
|
timeout=timeout or self.default_timeout,
|
563
691
|
callback=cb,
|
564
692
|
metadata=metadata,
|
565
693
|
provider_id=provider_id,
|
566
|
-
blocking=True
|
694
|
+
blocking=True,
|
567
695
|
)
|
568
696
|
|
569
697
|
# 初始化审批结果对象为None
|
@@ -572,15 +700,15 @@ class LangGraphAdapter:
|
|
572
700
|
if isinstance(result, HumanLoopResult):
|
573
701
|
# 如果结果是HumanLoopResult类型,则构建完整的审批信息
|
574
702
|
resp_info = {
|
575
|
-
|
576
|
-
|
577
|
-
|
578
|
-
|
579
|
-
|
580
|
-
|
581
|
-
|
582
|
-
|
583
|
-
|
703
|
+
"conversation_id": result.conversation_id,
|
704
|
+
"request_id": result.request_id,
|
705
|
+
"loop_type": result.loop_type,
|
706
|
+
"status": result.status,
|
707
|
+
"response": result.response,
|
708
|
+
"feedback": result.feedback,
|
709
|
+
"responded_by": result.responded_by,
|
710
|
+
"responded_at": result.responded_at,
|
711
|
+
"error": result.error,
|
584
712
|
}
|
585
713
|
|
586
714
|
kwargs[ret_key] = resp_info
|
@@ -589,80 +717,85 @@ class LangGraphAdapter:
|
|
589
717
|
if isinstance(result, HumanLoopResult):
|
590
718
|
# 返回获取信息结果,由用户去判断是否使用
|
591
719
|
if iscoroutinefunction(fn):
|
592
|
-
|
593
|
-
|
720
|
+
ret = await fn(*args, **kwargs)
|
721
|
+
else:
|
722
|
+
ret = fn(*args, **kwargs)
|
723
|
+
return cast(R, ret)
|
594
724
|
else:
|
595
725
|
raise ValueError(f"Info request timeout or error for {fn.__name__}")
|
596
726
|
|
597
727
|
@wraps(fn)
|
598
|
-
def sync_wrapper(*args, **kwargs) -> R
|
599
|
-
|
728
|
+
def sync_wrapper(*args: Any, **kwargs: Any) -> R:
|
729
|
+
ret = run_async_safely(async_wrapper(*args, **kwargs))
|
730
|
+
return cast(R, ret)
|
600
731
|
|
601
732
|
# 根据被装饰函数类型返回对应的wrapper
|
602
733
|
if iscoroutinefunction(fn):
|
603
|
-
return async_wrapper
|
734
|
+
return async_wrapper
|
604
735
|
return sync_wrapper
|
605
|
-
|
736
|
+
|
737
|
+
|
606
738
|
class LangGraphHumanLoopCallback(HumanLoopCallback):
|
607
739
|
"""LangGraph-specific human loop callback, compatible with TypedDict or Pydantic BaseModel State"""
|
608
|
-
|
740
|
+
|
609
741
|
def __init__(
|
610
742
|
self,
|
611
743
|
state: Any,
|
612
|
-
async_on_update: Optional[
|
613
|
-
|
614
|
-
|
615
|
-
|
744
|
+
async_on_update: Optional[
|
745
|
+
Callable[[Any, HumanLoopProvider, HumanLoopResult], Awaitable[None]]
|
746
|
+
] = None,
|
747
|
+
async_on_timeout: Optional[
|
748
|
+
Callable[[Any, HumanLoopProvider], Awaitable[None]]
|
749
|
+
] = None,
|
750
|
+
async_on_error: Optional[
|
751
|
+
Callable[[Any, HumanLoopProvider, Exception], Awaitable[None]]
|
752
|
+
] = None,
|
753
|
+
) -> None:
|
616
754
|
self.state = state
|
617
755
|
self.async_on_update = async_on_update
|
618
756
|
self.async_on_timeout = async_on_timeout
|
619
757
|
self.async_on_error = async_on_error
|
620
758
|
|
621
759
|
async def async_on_humanloop_update(
|
622
|
-
self,
|
623
|
-
|
624
|
-
result: HumanLoopResult
|
625
|
-
):
|
760
|
+
self, provider: HumanLoopProvider, result: HumanLoopResult
|
761
|
+
) -> None:
|
626
762
|
if self.async_on_update:
|
627
763
|
await self.async_on_update(self.state, provider, result)
|
628
764
|
|
629
765
|
async def async_on_humanloop_timeout(
|
630
766
|
self,
|
631
767
|
provider: HumanLoopProvider,
|
632
|
-
):
|
768
|
+
) -> None:
|
633
769
|
if self.async_on_timeout:
|
634
770
|
await self.async_on_timeout(self.state, provider)
|
635
771
|
|
636
772
|
async def async_on_humanloop_error(
|
637
|
-
self,
|
638
|
-
|
639
|
-
error: Exception
|
640
|
-
):
|
773
|
+
self, provider: HumanLoopProvider, error: Exception
|
774
|
+
) -> None:
|
641
775
|
if self.async_on_error:
|
642
776
|
await self.async_on_error(self.state, provider, error)
|
643
777
|
|
644
778
|
|
645
779
|
def default_langgraph_callback_factory(state: Any) -> LangGraphHumanLoopCallback:
|
646
780
|
"""Default human-loop callback factory for LangGraph framework
|
647
|
-
|
781
|
+
|
648
782
|
This callback focuses on:
|
649
783
|
1. Logging human interaction events
|
650
|
-
2. Providing debug information
|
784
|
+
2. Providing debug information
|
651
785
|
3. Collecting performance metrics
|
652
|
-
|
786
|
+
|
653
787
|
Note: This callback does not modify state to maintain clear state management
|
654
|
-
|
788
|
+
|
655
789
|
Args:
|
656
790
|
state: LangGraph state object, only used for log correlation
|
657
|
-
|
791
|
+
|
658
792
|
Returns:
|
659
793
|
Configured LangGraphHumanLoopCallback instance
|
660
794
|
"""
|
661
|
-
|
662
|
-
|
663
|
-
|
664
|
-
|
665
|
-
async def async_on_update(state, provider: HumanLoopProvider, result: HumanLoopResult):
|
795
|
+
|
796
|
+
async def async_on_update(
|
797
|
+
state: Any, provider: HumanLoopProvider, result: HumanLoopResult
|
798
|
+
) -> None:
|
666
799
|
"""Log human interaction update events"""
|
667
800
|
logger.info(f"Provider ID: {provider.name}")
|
668
801
|
logger.info(
|
@@ -673,25 +806,26 @@ def default_langgraph_callback_factory(state: Any) -> LangGraphHumanLoopCallback
|
|
673
806
|
f"responded_at={result.responded_at}, "
|
674
807
|
f"feedback={result.feedback}"
|
675
808
|
)
|
676
|
-
|
677
|
-
|
678
809
|
|
679
|
-
async def async_on_timeout(state, provider: HumanLoopProvider):
|
810
|
+
async def async_on_timeout(state: Any, provider: HumanLoopProvider) -> None:
|
680
811
|
"""Log human interaction timeout events"""
|
681
|
-
|
812
|
+
|
682
813
|
logger.info(f"Provider ID: {provider.name}")
|
683
814
|
from datetime import datetime
|
815
|
+
|
684
816
|
current_time = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
685
817
|
logger.warning(f"Human interaction timeout - Time: {current_time}")
|
686
|
-
|
687
|
-
|
818
|
+
|
688
819
|
# Alert logic can be added here, such as sending notifications
|
689
820
|
|
690
|
-
async def async_on_error(
|
821
|
+
async def async_on_error(
|
822
|
+
state: Any, provider: HumanLoopProvider, error: Exception
|
823
|
+
) -> None:
|
691
824
|
"""Log human interaction error events"""
|
692
|
-
|
825
|
+
|
693
826
|
logger.info(f"Provider ID: {provider.name}")
|
694
827
|
from datetime import datetime
|
828
|
+
|
695
829
|
current_time = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
696
830
|
logger.error(f"Human interaction error - Time: {current_time} Error: {error}")
|
697
831
|
|
@@ -699,14 +833,14 @@ def default_langgraph_callback_factory(state: Any) -> LangGraphHumanLoopCallback
|
|
699
833
|
state=state,
|
700
834
|
async_on_update=async_on_update,
|
701
835
|
async_on_timeout=async_on_timeout,
|
702
|
-
async_on_error=async_on_error
|
836
|
+
async_on_error=async_on_error,
|
703
837
|
)
|
704
838
|
|
705
|
-
from gohumanloop.core.manager import DefaultHumanLoopManager
|
706
|
-
from gohumanloop.providers.terminal_provider import TerminalProvider
|
707
839
|
|
708
840
|
# Create HumanLoopManager instance
|
709
|
-
manager = DefaultHumanLoopManager(
|
841
|
+
manager = DefaultHumanLoopManager(
|
842
|
+
initial_providers=TerminalProvider(name="LGDefaultProvider")
|
843
|
+
)
|
710
844
|
|
711
845
|
# Create LangGraphAdapter instance
|
712
846
|
default_adapter = LangGraphAdapter(manager, default_timeout=60)
|
@@ -715,16 +849,17 @@ default_conversation_id = str(uuid.uuid4())
|
|
715
849
|
|
716
850
|
_SKIP_NEXT_HUMANLOOP = False
|
717
851
|
|
852
|
+
|
718
853
|
def interrupt(value: Any, lg_humanloop: LangGraphAdapter = default_adapter) -> Any:
|
719
854
|
"""
|
720
855
|
Wraps LangGraph's interrupt functionality to pause graph execution and wait for human input
|
721
|
-
|
856
|
+
|
722
857
|
Raises RuntimeError if LangGraph version doesn't support interrupt
|
723
|
-
|
858
|
+
|
724
859
|
Args:
|
725
860
|
value: Any JSON-serializable value that will be shown to human user
|
726
861
|
lg_humanloop: LangGraphAdapter instance, defaults to global instance
|
727
|
-
|
862
|
+
|
728
863
|
Returns:
|
729
864
|
Input value provided by human user
|
730
865
|
"""
|
@@ -736,35 +871,39 @@ def interrupt(value: Any, lg_humanloop: LangGraphAdapter = default_adapter) -> A
|
|
736
871
|
"LangGraph version too low, interrupt not supported. Please upgrade to version 0.2.57 or higher."
|
737
872
|
"You can use: pip install --upgrade langgraph>=0.2.57"
|
738
873
|
)
|
739
|
-
|
874
|
+
|
740
875
|
if not _SKIP_NEXT_HUMANLOOP:
|
741
876
|
# Get current event loop or create new one
|
742
|
-
|
743
|
-
|
744
|
-
|
745
|
-
|
746
|
-
|
747
|
-
|
748
|
-
|
749
|
-
|
750
|
-
|
751
|
-
|
877
|
+
try:
|
878
|
+
lg_humanloop.manager.request_humanloop(
|
879
|
+
task_id="lg_interrupt",
|
880
|
+
conversation_id=default_conversation_id,
|
881
|
+
loop_type=HumanLoopType.INFORMATION,
|
882
|
+
context={
|
883
|
+
"message": f"{value}",
|
884
|
+
"question": "The execution has been interrupted. Please review the above information and provide your input to continue.",
|
885
|
+
},
|
886
|
+
blocking=False,
|
887
|
+
)
|
888
|
+
except Exception as e:
|
889
|
+
logger.exception(f"Error in interrupt: {e}")
|
752
890
|
else:
|
753
891
|
# Reset flag to allow normal human intervention trigger next time
|
754
892
|
_SKIP_NEXT_HUMANLOOP = False
|
755
893
|
|
756
|
-
|
757
894
|
# Return LangGraph's interrupt
|
758
895
|
return _lg_interrupt(value)
|
896
|
+
|
897
|
+
|
759
898
|
def create_resume_command(lg_humanloop: LangGraphAdapter = default_adapter) -> Any:
|
760
899
|
"""
|
761
900
|
Create a Command object to resume interrupted graph execution
|
762
|
-
|
901
|
+
|
763
902
|
Will raise RuntimeError if LangGraph version doesn't support Command
|
764
|
-
|
903
|
+
|
765
904
|
Args:
|
766
905
|
lg_humanloop: LangGraphAdapter instance, defaults to global instance
|
767
|
-
|
906
|
+
|
768
907
|
Returns:
|
769
908
|
Command object that can be used with graph.stream method
|
770
909
|
"""
|
@@ -778,10 +917,12 @@ def create_resume_command(lg_humanloop: LangGraphAdapter = default_adapter) -> A
|
|
778
917
|
)
|
779
918
|
|
780
919
|
# Define async polling function
|
781
|
-
def poll_for_result():
|
920
|
+
def poll_for_result() -> Optional[Dict[str, Any]]:
|
782
921
|
poll_interval = 1.0 # Polling interval (seconds)
|
783
922
|
while True:
|
784
|
-
result = lg_humanloop.manager.check_conversation_status(
|
923
|
+
result = lg_humanloop.manager.check_conversation_status(
|
924
|
+
default_conversation_id
|
925
|
+
)
|
785
926
|
# If status is final state (not PENDING), return result
|
786
927
|
if result.status != HumanLoopStatus.PENDING:
|
787
928
|
return result.response
|
@@ -793,15 +934,18 @@ def create_resume_command(lg_humanloop: LangGraphAdapter = default_adapter) -> A
|
|
793
934
|
response = poll_for_result()
|
794
935
|
return _lg_Command(resume=response)
|
795
936
|
|
796
|
-
|
937
|
+
|
938
|
+
async def acreate_resume_command(
|
939
|
+
lg_humanloop: LangGraphAdapter = default_adapter
|
940
|
+
) -> Any:
|
797
941
|
"""
|
798
942
|
Create an async version of Command object to resume interrupted graph execution
|
799
|
-
|
943
|
+
|
800
944
|
Will raise RuntimeError if LangGraph version doesn't support Command
|
801
|
-
|
945
|
+
|
802
946
|
Args:
|
803
947
|
lg_humanloop: LangGraphAdapter instance, defaults to global instance
|
804
|
-
|
948
|
+
|
805
949
|
Returns:
|
806
950
|
Command object that can be used with graph.astream method
|
807
951
|
"""
|
@@ -814,19 +958,20 @@ async def acreate_resume_command(lg_humanloop: LangGraphAdapter = default_adapte
|
|
814
958
|
)
|
815
959
|
|
816
960
|
# Define async polling function
|
817
|
-
async def poll_for_result():
|
961
|
+
async def poll_for_result() -> Optional[Dict[str, Any]]:
|
818
962
|
poll_interval = 1.0 # Polling interval (seconds)
|
819
963
|
while True:
|
820
|
-
result = await lg_humanloop.manager.async_check_conversation_status(
|
964
|
+
result = await lg_humanloop.manager.async_check_conversation_status(
|
965
|
+
default_conversation_id
|
966
|
+
)
|
821
967
|
# If status is final state (not PENDING), return result
|
822
968
|
if result.status != HumanLoopStatus.PENDING:
|
823
969
|
return result.response
|
824
970
|
# Wait before polling again
|
825
971
|
await asyncio.sleep(poll_interval)
|
826
|
-
|
972
|
+
|
827
973
|
_SKIP_NEXT_HUMANLOOP = True
|
828
974
|
|
829
975
|
# Wait for async result directly
|
830
976
|
response = await poll_for_result()
|
831
977
|
return _lg_Command(resume=response)
|
832
|
-
|