pybotchi 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pybotchi/__init__.py +22 -0
- pybotchi/action.py +399 -0
- pybotchi/constants.py +133 -0
- pybotchi/context.py +157 -0
- pybotchi/llm.py +46 -0
- pybotchi/mcp.py +408 -0
- pybotchi/tools.py +81 -0
- pybotchi/utils.py +18 -0
- pybotchi-1.0.0.dist-info/LICENSE +201 -0
- pybotchi-1.0.0.dist-info/METADATA +567 -0
- pybotchi-1.0.0.dist-info/RECORD +12 -0
- pybotchi-1.0.0.dist-info/WHEEL +4 -0
pybotchi/__init__.py
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
"""Pybotchi."""
|
|
2
|
+
|
|
3
|
+
from .action import DEFAULT_ACTION
|
|
4
|
+
from .constants import ChatRole
|
|
5
|
+
from .context import Action, ActionReturn, Context
|
|
6
|
+
from .llm import LLM
|
|
7
|
+
from .mcp import MCPAction, MCPConnection, MCPToolAction, start_mcp_servers
|
|
8
|
+
from .tools import graph
|
|
9
|
+
|
|
10
|
+
__all__ = [
|
|
11
|
+
"DEFAULT_ACTION",
|
|
12
|
+
"ChatRole",
|
|
13
|
+
"Action",
|
|
14
|
+
"ActionReturn",
|
|
15
|
+
"Context",
|
|
16
|
+
"LLM",
|
|
17
|
+
"MCPAction",
|
|
18
|
+
"MCPConnection",
|
|
19
|
+
"MCPToolAction",
|
|
20
|
+
"start_mcp_servers",
|
|
21
|
+
"graph",
|
|
22
|
+
]
|
pybotchi/action.py
ADDED
|
@@ -0,0 +1,399 @@
|
|
|
1
|
+
"""Pybotchi Action."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from asyncio import TaskGroup
|
|
6
|
+
from collections import OrderedDict
|
|
7
|
+
from inspect import getmembers
|
|
8
|
+
from itertools import islice
|
|
9
|
+
from os import getenv
|
|
10
|
+
from typing import Any, TYPE_CHECKING, TypeAlias, TypeVar, cast
|
|
11
|
+
from uuid import uuid4
|
|
12
|
+
|
|
13
|
+
from mcp.server.fastmcp import FastMCP
|
|
14
|
+
|
|
15
|
+
from openai.types.chat.chat_completion_message_tool_call_param import (
|
|
16
|
+
ChatCompletionMessageToolCallParam,
|
|
17
|
+
)
|
|
18
|
+
|
|
19
|
+
from pydantic import BaseModel, PrivateAttr
|
|
20
|
+
|
|
21
|
+
from .constants import ActionEntry, ActionReturn, Usage
|
|
22
|
+
from .utils import apply_placeholders
|
|
23
|
+
|
|
24
|
+
if TYPE_CHECKING:
|
|
25
|
+
from .context import Context
|
|
26
|
+
|
|
27
|
+
try:
|
|
28
|
+
from uuid6 import uuid7 # type: ignore[import-not-found]
|
|
29
|
+
|
|
30
|
+
gen_uuid = uuid7
|
|
31
|
+
except Exception:
|
|
32
|
+
gen_uuid = uuid4
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
DEFAULT_ACTION = getenv("DEFAULT_ACTION", "DefaultAction")
|
|
36
|
+
DEFAULT_TOOL_CALL_PROMPT = getenv(
|
|
37
|
+
"DEFAULT_TOOL_CALL_PROMPT",
|
|
38
|
+
"""
|
|
39
|
+
You are an AI assistant expert in function calling.
|
|
40
|
+
Your primary responsibility is to select and invoke the most suitable function(s) to accurately fulfill the user's request, following the guidelines below.
|
|
41
|
+
|
|
42
|
+
# `tool_choice` is set to "${tool_choice}"
|
|
43
|
+
|
|
44
|
+
# Function Calling Guidelines:
|
|
45
|
+
- You may call one or more functions as needed, including repeated calls to the same function, to ensure the user's request is fully addressed.
|
|
46
|
+
- Always invoke functions in a logical and sequential order to ensure comprehensive and accurate responses.
|
|
47
|
+
- If `${default}` function is provided and `Initial Task` doesn't have rules over it, prioritize invoking it whenever no other relevant or suitable function is available.
|
|
48
|
+
- If `tool_choice` is set to `auto` and no suitable function can be identified, respond directly to the user based on the provided `Initial Task`.
|
|
49
|
+
|
|
50
|
+
# Initial Task:
|
|
51
|
+
${system}
|
|
52
|
+
|
|
53
|
+
${addons}
|
|
54
|
+
""".strip(),
|
|
55
|
+
)
|
|
56
|
+
|
|
57
|
+
TAction = TypeVar("TAction", bound="Action")
|
|
58
|
+
T = TypeVar("T")
|
|
59
|
+
|
|
60
|
+
ChildActions: TypeAlias = OrderedDict[str, type["Action"]]
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
class Action(BaseModel):
|
|
64
|
+
"""Base Agent Action."""
|
|
65
|
+
|
|
66
|
+
__mcp_servers__: dict[str, FastMCP] = {}
|
|
67
|
+
|
|
68
|
+
##############################################################
|
|
69
|
+
# CLASS VARIABLES #
|
|
70
|
+
##############################################################
|
|
71
|
+
|
|
72
|
+
__enabled__: bool = True
|
|
73
|
+
__system_prompt__: str | None = None
|
|
74
|
+
__temperature__: float | None = None
|
|
75
|
+
__max_tool_prompts__: int | None = None
|
|
76
|
+
__default_tool__ = DEFAULT_ACTION
|
|
77
|
+
__first_tool_only__ = False
|
|
78
|
+
__concurrent__ = False
|
|
79
|
+
__mcp_hosts__: list[str] | None = None
|
|
80
|
+
|
|
81
|
+
# --------------------- not inheritable -------------------- #
|
|
82
|
+
|
|
83
|
+
__agent__: bool = False
|
|
84
|
+
__max_iteration__: int | None
|
|
85
|
+
__display_name__: str
|
|
86
|
+
__has_pre__: bool
|
|
87
|
+
__has_fallback__: bool
|
|
88
|
+
__has_post__: bool
|
|
89
|
+
__detached__: bool
|
|
90
|
+
__max_child_iteration__: int | None
|
|
91
|
+
__child_actions__: ChildActions
|
|
92
|
+
__mcp_tool_actions__: ChildActions
|
|
93
|
+
__mcp_groups__: list[str] | None
|
|
94
|
+
|
|
95
|
+
# ---------------------------------------------------------- #
|
|
96
|
+
|
|
97
|
+
##############################################################
|
|
98
|
+
# INSTANCE VARIABLES #
|
|
99
|
+
##############################################################
|
|
100
|
+
|
|
101
|
+
_usage: list[Usage] = PrivateAttr(default_factory=list)
|
|
102
|
+
_actions: list["Action"] = PrivateAttr(default_factory=list)
|
|
103
|
+
|
|
104
|
+
# ---------------------------------------------------------- #
|
|
105
|
+
|
|
106
|
+
@classmethod
|
|
107
|
+
def __pydantic_init_subclass__(cls, **kwargs: Any) -> None:
|
|
108
|
+
"""Override __pydantic_init_subclass__."""
|
|
109
|
+
src = cls.__dict__
|
|
110
|
+
cls.__agent__ = src.get("__agent__", False)
|
|
111
|
+
cls.__display_name__ = src.get("__display_name__", cls.__name__)
|
|
112
|
+
cls.__has_pre__ = cls.pre is not Action.pre
|
|
113
|
+
cls.__has_fallback__ = cls.fallback is not Action.fallback
|
|
114
|
+
cls.__has_post__ = cls.post is not Action.post
|
|
115
|
+
cls.__detached__ = src.get(
|
|
116
|
+
"__detached__", cls.commit_context is not Action.commit_context
|
|
117
|
+
)
|
|
118
|
+
cls.__max_iteration__ = src.get("__max_iteration__")
|
|
119
|
+
cls.__max_child_iteration__ = src.get("__max_child_iteration__")
|
|
120
|
+
cls.__mcp_groups__ = src.get("__mcp_groups__")
|
|
121
|
+
|
|
122
|
+
cls.__mcp_tool_actions__ = OrderedDict()
|
|
123
|
+
cls.__child_actions__ = OrderedDict()
|
|
124
|
+
for _name, attr in getmembers(cls):
|
|
125
|
+
if isinstance(attr, type):
|
|
126
|
+
if getattr(attr, "__mcp_tool__", False):
|
|
127
|
+
cls.__mcp_tool_actions__[attr.__name__] = attr
|
|
128
|
+
elif issubclass(attr, Action):
|
|
129
|
+
cls.__child_actions__[attr.__name__] = attr
|
|
130
|
+
|
|
131
|
+
async def get_child_actions(self, context: Context) -> ChildActions:
|
|
132
|
+
"""Retrieve child Actions."""
|
|
133
|
+
return OrderedDict(
|
|
134
|
+
item
|
|
135
|
+
for item in self.__child_actions__.items()
|
|
136
|
+
if context.allowed_actions.get(item[0], item[1].__enabled__)
|
|
137
|
+
)
|
|
138
|
+
|
|
139
|
+
@property
|
|
140
|
+
def _tool_call(self) -> ChatCompletionMessageToolCallParam:
|
|
141
|
+
"""Override post init."""
|
|
142
|
+
tool_id = f"call_{gen_uuid().hex}"
|
|
143
|
+
return {
|
|
144
|
+
"id": tool_id,
|
|
145
|
+
"function": {
|
|
146
|
+
"name": self.__class__.__name__,
|
|
147
|
+
"arguments": self.model_dump_json(),
|
|
148
|
+
},
|
|
149
|
+
"type": "function",
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
async def execute(self, context: Context) -> ActionReturn:
|
|
153
|
+
"""Execute main process."""
|
|
154
|
+
parent = context
|
|
155
|
+
to_commit = True
|
|
156
|
+
try:
|
|
157
|
+
if self.__detached__:
|
|
158
|
+
context = await context.detach_context()
|
|
159
|
+
|
|
160
|
+
if context.check_self_recursion(self):
|
|
161
|
+
return ActionReturn.END
|
|
162
|
+
|
|
163
|
+
if self.__has_pre__ and (result := await self.pre(context)).is_break:
|
|
164
|
+
return result
|
|
165
|
+
|
|
166
|
+
if self.__max_child_iteration__:
|
|
167
|
+
iteration = 0
|
|
168
|
+
while iteration <= self.__max_child_iteration__:
|
|
169
|
+
if (result := await self.execution(context)).is_break:
|
|
170
|
+
break
|
|
171
|
+
iteration += 1
|
|
172
|
+
if result.is_end:
|
|
173
|
+
return result
|
|
174
|
+
elif (result := await self.execution(context)).is_break:
|
|
175
|
+
return result
|
|
176
|
+
|
|
177
|
+
if self.__has_post__ and (result := await self.post(context)).is_break:
|
|
178
|
+
return result
|
|
179
|
+
|
|
180
|
+
return ActionReturn.GO
|
|
181
|
+
except Exception:
|
|
182
|
+
to_commit = False
|
|
183
|
+
raise
|
|
184
|
+
finally:
|
|
185
|
+
if to_commit and self.__detached__:
|
|
186
|
+
await self.commit_context(parent, context)
|
|
187
|
+
|
|
188
|
+
async def pre(self, context: Context) -> ActionReturn:
|
|
189
|
+
"""Execute pre process."""
|
|
190
|
+
return ActionReturn.GO
|
|
191
|
+
|
|
192
|
+
async def fallback(self, context: Context, content: str) -> ActionReturn:
|
|
193
|
+
"""Execute fallback process."""
|
|
194
|
+
return ActionReturn.GO
|
|
195
|
+
|
|
196
|
+
async def child_selection(
|
|
197
|
+
self,
|
|
198
|
+
context: Context,
|
|
199
|
+
child_actions: ChildActions | None = None,
|
|
200
|
+
) -> tuple[list["Action"], str]:
|
|
201
|
+
"""Execute tool selection process."""
|
|
202
|
+
tool_choice = "auto" if self.__has_fallback__ else "required"
|
|
203
|
+
|
|
204
|
+
if child_actions is None:
|
|
205
|
+
child_actions = await self.get_child_actions(context)
|
|
206
|
+
llm = context.llm.bind_tools([*child_actions.values()], tool_choice=tool_choice)
|
|
207
|
+
if self.__temperature__ is not None:
|
|
208
|
+
llm = llm.with_config(
|
|
209
|
+
configurable={"llm_temperature": self.__temperature__}
|
|
210
|
+
)
|
|
211
|
+
|
|
212
|
+
max = len(context.prompts)
|
|
213
|
+
if self.__max_tool_prompts__:
|
|
214
|
+
min = max - self.__max_tool_prompts__
|
|
215
|
+
min = 1 if min < 1 else min
|
|
216
|
+
else:
|
|
217
|
+
min = 1
|
|
218
|
+
|
|
219
|
+
message = await llm.ainvoke(
|
|
220
|
+
[
|
|
221
|
+
{
|
|
222
|
+
"content": apply_placeholders(
|
|
223
|
+
DEFAULT_TOOL_CALL_PROMPT,
|
|
224
|
+
tool_choice=tool_choice,
|
|
225
|
+
default=self.__default_tool__,
|
|
226
|
+
system=self.__system_prompt__
|
|
227
|
+
or context.prompts[0]["content"]
|
|
228
|
+
or "Not defined",
|
|
229
|
+
),
|
|
230
|
+
"role": "system",
|
|
231
|
+
},
|
|
232
|
+
*islice(context.prompts, min, max),
|
|
233
|
+
]
|
|
234
|
+
)
|
|
235
|
+
context.add_usage(
|
|
236
|
+
self,
|
|
237
|
+
context.llm,
|
|
238
|
+
message.usage_metadata, # type: ignore[attr-defined]
|
|
239
|
+
"$tool",
|
|
240
|
+
)
|
|
241
|
+
|
|
242
|
+
next_actions = [
|
|
243
|
+
child_actions[call["name"]](**call["args"]) for call in message.tool_calls # type: ignore[attr-defined]
|
|
244
|
+
]
|
|
245
|
+
|
|
246
|
+
return next_actions, cast(str, message.content)
|
|
247
|
+
|
|
248
|
+
async def execution(self, context: Context) -> ActionReturn:
|
|
249
|
+
"""Execute core process."""
|
|
250
|
+
child_actions = await self.get_child_actions(context)
|
|
251
|
+
if (
|
|
252
|
+
len(child_actions) == 1
|
|
253
|
+
and not (action := next(iter(child_actions.values()))).model_fields
|
|
254
|
+
and not self.__has_fallback__
|
|
255
|
+
):
|
|
256
|
+
self._actions.append(next_action := action()) # type: ignore[call-arg]
|
|
257
|
+
if (result := await next_action.execute(context)).is_break:
|
|
258
|
+
return result
|
|
259
|
+
elif child_actions:
|
|
260
|
+
await context.notify(
|
|
261
|
+
{
|
|
262
|
+
"event": "tool",
|
|
263
|
+
"type": "selection",
|
|
264
|
+
"status": "started",
|
|
265
|
+
"data": [n.__display_name__ for n in child_actions.values()],
|
|
266
|
+
}
|
|
267
|
+
)
|
|
268
|
+
|
|
269
|
+
next_actions, content = await self.child_selection(context, child_actions)
|
|
270
|
+
|
|
271
|
+
await context.notify(
|
|
272
|
+
{
|
|
273
|
+
"event": "tool",
|
|
274
|
+
"type": "selection",
|
|
275
|
+
"status": "completed",
|
|
276
|
+
"data": [n.__display_name__ for n in next_actions],
|
|
277
|
+
}
|
|
278
|
+
)
|
|
279
|
+
|
|
280
|
+
if next_actions:
|
|
281
|
+
async with TaskGroup() as tg:
|
|
282
|
+
for next_action in (
|
|
283
|
+
islice(next_actions, 1)
|
|
284
|
+
if self.__first_tool_only__
|
|
285
|
+
else next_actions
|
|
286
|
+
):
|
|
287
|
+
self._actions.append(next_action)
|
|
288
|
+
if next_action.__concurrent__:
|
|
289
|
+
tg.create_task(next_action.execute(context))
|
|
290
|
+
elif (result := await next_action.execute(context)).is_break:
|
|
291
|
+
return result
|
|
292
|
+
elif (
|
|
293
|
+
self.__has_fallback__
|
|
294
|
+
and (result := await self.fallback(context, content)).is_break
|
|
295
|
+
):
|
|
296
|
+
return result
|
|
297
|
+
elif self.__has_fallback__:
|
|
298
|
+
llm = (
|
|
299
|
+
context.llm.with_config(
|
|
300
|
+
configurable={"llm_temperature": self.__temperature__}
|
|
301
|
+
)
|
|
302
|
+
if self.__temperature__ is not None
|
|
303
|
+
else context.llm
|
|
304
|
+
)
|
|
305
|
+
|
|
306
|
+
await context.notify(
|
|
307
|
+
{
|
|
308
|
+
"event": "tool",
|
|
309
|
+
"type": "fallback",
|
|
310
|
+
"status": "started",
|
|
311
|
+
"data": self.__display_name__,
|
|
312
|
+
}
|
|
313
|
+
)
|
|
314
|
+
|
|
315
|
+
message = await llm.ainvoke(context.prompts)
|
|
316
|
+
|
|
317
|
+
context.add_usage(
|
|
318
|
+
self,
|
|
319
|
+
context.llm,
|
|
320
|
+
message.usage_metadata, # type: ignore[attr-defined]
|
|
321
|
+
"$fallback",
|
|
322
|
+
)
|
|
323
|
+
|
|
324
|
+
await context.notify(
|
|
325
|
+
{
|
|
326
|
+
"event": "tool",
|
|
327
|
+
"type": "fallback",
|
|
328
|
+
"status": "completed",
|
|
329
|
+
"data": self.__display_name__,
|
|
330
|
+
}
|
|
331
|
+
)
|
|
332
|
+
|
|
333
|
+
if (result := await self.fallback(context, message.content)).is_break: # type: ignore[arg-type]
|
|
334
|
+
return result
|
|
335
|
+
|
|
336
|
+
return ActionReturn.GO
|
|
337
|
+
|
|
338
|
+
async def post(self, context: Context) -> ActionReturn:
|
|
339
|
+
"""Execute post process."""
|
|
340
|
+
return ActionReturn.GO
|
|
341
|
+
|
|
342
|
+
async def commit_context(self, parent: Context, child: Context) -> None:
|
|
343
|
+
"""Execute commit context if it's detached."""
|
|
344
|
+
usage = parent.usage
|
|
345
|
+
for model, token in child.usage.items():
|
|
346
|
+
if model not in usage:
|
|
347
|
+
usage[model] = token
|
|
348
|
+
else:
|
|
349
|
+
usage[model] += token
|
|
350
|
+
|
|
351
|
+
def serialize(self) -> ActionEntry:
|
|
352
|
+
"""Serialize Action."""
|
|
353
|
+
return {
|
|
354
|
+
"name": self.__class__.__name__,
|
|
355
|
+
"args": self.model_dump(),
|
|
356
|
+
"usages": [u.model_dump() for u in self._usage], # type: ignore[misc]
|
|
357
|
+
"actions": [a.serialize() for a in self._actions],
|
|
358
|
+
}
|
|
359
|
+
|
|
360
|
+
####################################################################################################
|
|
361
|
+
# ACTION TOOLS #
|
|
362
|
+
# ------------------------------------------------------------------------------------------------ #
|
|
363
|
+
|
|
364
|
+
@classmethod
|
|
365
|
+
def add_child(
|
|
366
|
+
cls,
|
|
367
|
+
action: type["Action"],
|
|
368
|
+
name: str | None = None,
|
|
369
|
+
override: bool = False,
|
|
370
|
+
extended: bool = True,
|
|
371
|
+
) -> None:
|
|
372
|
+
"""Add child action."""
|
|
373
|
+
name = name or action.__name__
|
|
374
|
+
if not override and hasattr(cls, name):
|
|
375
|
+
raise ValueError(f"Attribute {name} already exists!")
|
|
376
|
+
|
|
377
|
+
if not issubclass(action, Action):
|
|
378
|
+
raise ValueError(f"{action.__name__} is not a valid action!")
|
|
379
|
+
|
|
380
|
+
if extended:
|
|
381
|
+
action = type(name, (action,), {"__module__": action.__module__})
|
|
382
|
+
|
|
383
|
+
if getattr(action, "__mcp_tool__", False):
|
|
384
|
+
cls.__mcp_tool_actions__[name] = action
|
|
385
|
+
else:
|
|
386
|
+
cls.__child_actions__[name] = action
|
|
387
|
+
setattr(cls, name, action)
|
|
388
|
+
|
|
389
|
+
@classmethod
|
|
390
|
+
def add_grand_child(
|
|
391
|
+
cls,
|
|
392
|
+
action: type["Action"],
|
|
393
|
+
name: str | None = None,
|
|
394
|
+
override: bool = False,
|
|
395
|
+
extended: bool = True,
|
|
396
|
+
) -> None:
|
|
397
|
+
"""Add child action."""
|
|
398
|
+
for ccls in cls.__child_actions__.values():
|
|
399
|
+
ccls.add_child(action, name, override, extended)
|
pybotchi/constants.py
ADDED
|
@@ -0,0 +1,133 @@
|
|
|
1
|
+
"""Pybotchi Constants."""
|
|
2
|
+
|
|
3
|
+
from enum import StrEnum
|
|
4
|
+
from functools import cached_property
|
|
5
|
+
from typing import Annotated, Any, ClassVar, NotRequired, TypedDict
|
|
6
|
+
|
|
7
|
+
from pydantic import BaseModel, Field, SkipValidation
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class ChatRole(StrEnum):
|
|
11
|
+
"""Chat Role Enum."""
|
|
12
|
+
|
|
13
|
+
USER = "user"
|
|
14
|
+
SYSTEM = "system"
|
|
15
|
+
ASSISTANT = "assistant"
|
|
16
|
+
TOOL = "tool"
|
|
17
|
+
DEVELOPER = "developer"
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class InputTokenDetails(TypedDict, total=False):
|
|
21
|
+
"""Input Token Details."""
|
|
22
|
+
|
|
23
|
+
audio: int
|
|
24
|
+
cache_creation: int
|
|
25
|
+
cache_read: int
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class OutputTokenDetails(TypedDict, total=False):
|
|
29
|
+
"""Output Token Details."""
|
|
30
|
+
|
|
31
|
+
audio: int
|
|
32
|
+
reasoning: int
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class UsageMetadata(TypedDict):
|
|
36
|
+
"""Usage Metadata."""
|
|
37
|
+
|
|
38
|
+
input_tokens: int
|
|
39
|
+
output_tokens: int
|
|
40
|
+
total_tokens: int
|
|
41
|
+
input_token_details: NotRequired[InputTokenDetails]
|
|
42
|
+
output_token_details: NotRequired[OutputTokenDetails]
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
class Usage(BaseModel):
|
|
46
|
+
"""Token Usage."""
|
|
47
|
+
|
|
48
|
+
name: str | None
|
|
49
|
+
model: str
|
|
50
|
+
usage: UsageMetadata
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
class UsageResponse(TypedDict):
|
|
54
|
+
"""Usage Response."""
|
|
55
|
+
|
|
56
|
+
name: str | None
|
|
57
|
+
model: str
|
|
58
|
+
usage: UsageMetadata
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
class ActionItem(TypedDict):
|
|
62
|
+
"""Action Item.."""
|
|
63
|
+
|
|
64
|
+
name: str
|
|
65
|
+
args: dict[str, Any]
|
|
66
|
+
usages: list[UsageResponse]
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
class ActionEntry(ActionItem):
|
|
70
|
+
"""Action Entry.."""
|
|
71
|
+
|
|
72
|
+
actions: list["ActionEntry"]
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
class Graph(BaseModel):
|
|
76
|
+
"""Action Result Class."""
|
|
77
|
+
|
|
78
|
+
nodes: set[str] = Field(default_factory=set)
|
|
79
|
+
edges: set[tuple[str, str, bool]] = Field(default_factory=set)
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
class ActionReturn(BaseModel):
|
|
83
|
+
"""Action Result Class."""
|
|
84
|
+
|
|
85
|
+
value: Annotated[Any, SkipValidation()] = None
|
|
86
|
+
|
|
87
|
+
GO: ClassVar["Go"]
|
|
88
|
+
BREAK: ClassVar["Break"]
|
|
89
|
+
END: ClassVar["End"]
|
|
90
|
+
|
|
91
|
+
class Config:
|
|
92
|
+
"""Model Config."""
|
|
93
|
+
|
|
94
|
+
arbitrary_types_allowed = True
|
|
95
|
+
|
|
96
|
+
@staticmethod
|
|
97
|
+
def end(value: Any) -> "End":
|
|
98
|
+
"""Return ActionReturn.END with value."""
|
|
99
|
+
return End(value=value)
|
|
100
|
+
|
|
101
|
+
@staticmethod
|
|
102
|
+
def go(value: Any) -> "Go":
|
|
103
|
+
"""Return ActionReturn.GO with value."""
|
|
104
|
+
return Go(value=value)
|
|
105
|
+
|
|
106
|
+
@cached_property
|
|
107
|
+
def is_break(self) -> bool:
|
|
108
|
+
"""Check if instance of End."""
|
|
109
|
+
return isinstance(self, Break)
|
|
110
|
+
|
|
111
|
+
@cached_property
|
|
112
|
+
def is_end(self) -> bool:
|
|
113
|
+
"""Check if instance of End."""
|
|
114
|
+
return isinstance(self, End)
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
class Go(ActionReturn):
|
|
118
|
+
"""Continue Action."""
|
|
119
|
+
|
|
120
|
+
|
|
121
|
+
class Break(ActionReturn):
|
|
122
|
+
"""Break Action Iteration."""
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
class End(Break):
|
|
126
|
+
"""End Action."""
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
ActionReturn.GO = Go()
|
|
130
|
+
ActionReturn.END = End()
|
|
131
|
+
ActionReturn.BREAK = Break()
|
|
132
|
+
|
|
133
|
+
UNSPECIFIED = "UNSPECIFIED"
|