fabricatio 0.2.0.dev4__cp312-cp312-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fabricatio/__init__.py +32 -0
- fabricatio/_rust.cp312-win_amd64.pyd +0 -0
- fabricatio/_rust.pyi +1 -0
- fabricatio/actions/__init__.py +5 -0
- fabricatio/actions/communication.py +13 -0
- fabricatio/actions/transmission.py +32 -0
- fabricatio/config.py +206 -0
- fabricatio/core.py +167 -0
- fabricatio/decorators.py +56 -0
- fabricatio/fs/__init__.py +5 -0
- fabricatio/fs/readers.py +5 -0
- fabricatio/journal.py +23 -0
- fabricatio/models/action.py +128 -0
- fabricatio/models/events.py +80 -0
- fabricatio/models/generic.py +388 -0
- fabricatio/models/role.py +26 -0
- fabricatio/models/task.py +283 -0
- fabricatio/models/tool.py +100 -0
- fabricatio/models/utils.py +78 -0
- fabricatio/parser.py +69 -0
- fabricatio/py.typed +0 -0
- fabricatio/templates.py +41 -0
- fabricatio/toolboxes/__init__.py +7 -0
- fabricatio/toolboxes/task.py +4 -0
- fabricatio-0.2.0.dev4.data/scripts/tdown.exe +0 -0
- fabricatio-0.2.0.dev4.dist-info/METADATA +224 -0
- fabricatio-0.2.0.dev4.dist-info/RECORD +29 -0
- fabricatio-0.2.0.dev4.dist-info/WHEEL +4 -0
- fabricatio-0.2.0.dev4.dist-info/licenses/LICENSE +21 -0
fabricatio/__init__.py
ADDED
@@ -0,0 +1,32 @@
|
|
1
|
+
"""Fabricatio is a Python library for building llm app using event-based agent structure."""
|
2
|
+
|
3
|
+
from fabricatio.core import env
|
4
|
+
from fabricatio.fs import magika
|
5
|
+
from fabricatio.journal import logger
|
6
|
+
from fabricatio.models.action import Action, WorkFlow
|
7
|
+
from fabricatio.models.events import Event
|
8
|
+
from fabricatio.models.role import Role
|
9
|
+
from fabricatio.models.task import Task
|
10
|
+
from fabricatio.models.tool import ToolBox
|
11
|
+
from fabricatio.models.utils import Message, Messages
|
12
|
+
from fabricatio.parser import Capture, CodeBlockCapture, JsonCapture, PythonCapture
|
13
|
+
from fabricatio.templates import templates_manager
|
14
|
+
|
15
|
+
__all__ = [
|
16
|
+
"Action",
|
17
|
+
"Capture",
|
18
|
+
"CodeBlockCapture",
|
19
|
+
"Event",
|
20
|
+
"JsonCapture",
|
21
|
+
"Message",
|
22
|
+
"Messages",
|
23
|
+
"PythonCapture",
|
24
|
+
"Role",
|
25
|
+
"Task",
|
26
|
+
"ToolBox",
|
27
|
+
"WorkFlow",
|
28
|
+
"env",
|
29
|
+
"logger",
|
30
|
+
"magika",
|
31
|
+
"templates_manager",
|
32
|
+
]
|
Binary file
|
fabricatio/_rust.pyi
ADDED
@@ -0,0 +1 @@
|
|
1
|
+
def download_templates() -> None: ...
|
@@ -0,0 +1,13 @@
|
|
1
|
+
from fabricatio.models.action import Action
|
2
|
+
from fabricatio.models.task import Task
|
3
|
+
|
4
|
+
|
5
|
+
class Talk(Action):
|
6
|
+
"""Action that says hello to the world."""
|
7
|
+
|
8
|
+
name: str = "talk"
|
9
|
+
output_key: str = "talk_response"
|
10
|
+
|
11
|
+
async def _execute(self, task_input: Task[str], **_) -> str:
|
12
|
+
"""Execute the action."""
|
13
|
+
return await self.aask(task_input.briefing, system_message=task_input.generate_prompt())
|
@@ -0,0 +1,32 @@
|
|
1
|
+
from typing import List
|
2
|
+
|
3
|
+
from fabricatio.journal import logger
|
4
|
+
from fabricatio.models.action import Action
|
5
|
+
from fabricatio.models.events import EventLike
|
6
|
+
from fabricatio.models.task import Task
|
7
|
+
|
8
|
+
|
9
|
+
class PublishTask(Action):
|
10
|
+
"""An action that publishes a task to a list of targets."""
|
11
|
+
|
12
|
+
name: str = "publish_task"
|
13
|
+
"""The name of the action."""
|
14
|
+
description: str = "Publish a task to a list of targets."
|
15
|
+
"""The description of the action."""
|
16
|
+
|
17
|
+
async def _execute(self, send_targets: List[EventLike], send_task: Task, **_) -> None:
|
18
|
+
"""Execute the action by sending the task to the specified targets."""
|
19
|
+
logger.info(f"Sending task {send_task.name} to {send_targets}")
|
20
|
+
for target in send_targets:
|
21
|
+
await send_task.move_to(target).publish()
|
22
|
+
|
23
|
+
|
24
|
+
class CycleTask(Action):
|
25
|
+
"""An action that cycles a task through a list of targets."""
|
26
|
+
|
27
|
+
name: str = "cycle_task"
|
28
|
+
"""The name of the action."""
|
29
|
+
description: str = "Cycle a task through a list of targets"
|
30
|
+
|
31
|
+
async def _execute(self, task_input: Task, **_) -> None:
|
32
|
+
"""Execute the action by cycling the task through the specified targets."""
|
fabricatio/config.py
ADDED
@@ -0,0 +1,206 @@
|
|
1
|
+
"""Configuration module for the Fabricatio application."""
|
2
|
+
|
3
|
+
from pathlib import Path
|
4
|
+
from typing import List, Literal, Optional
|
5
|
+
|
6
|
+
from appdirs import user_config_dir
|
7
|
+
from pydantic import (
|
8
|
+
BaseModel,
|
9
|
+
ConfigDict,
|
10
|
+
DirectoryPath,
|
11
|
+
Field,
|
12
|
+
FilePath,
|
13
|
+
HttpUrl,
|
14
|
+
NonNegativeFloat,
|
15
|
+
PositiveInt,
|
16
|
+
SecretStr,
|
17
|
+
)
|
18
|
+
from pydantic_settings import (
|
19
|
+
BaseSettings,
|
20
|
+
DotEnvSettingsSource,
|
21
|
+
EnvSettingsSource,
|
22
|
+
PydanticBaseSettingsSource,
|
23
|
+
PyprojectTomlConfigSettingsSource,
|
24
|
+
SettingsConfigDict,
|
25
|
+
TomlConfigSettingsSource,
|
26
|
+
)
|
27
|
+
|
28
|
+
ROAMING_DIR = user_config_dir("fabricatio", "", roaming=True)
|
29
|
+
|
30
|
+
|
31
|
+
class LLMConfig(BaseModel):
|
32
|
+
"""LLM configuration class.
|
33
|
+
|
34
|
+
Attributes:
|
35
|
+
api_endpoint (HttpUrl): OpenAI API Endpoint.
|
36
|
+
api_key (SecretStr): OpenAI API key. Empty by default for security reasons, should be set before use.
|
37
|
+
timeout (PositiveInt): The timeout of the LLM model in seconds. Default is 300 seconds as per request.
|
38
|
+
max_retries (PositiveInt): The maximum number of retries. Default is 3 retries.
|
39
|
+
model (str): The LLM model name. Set to 'gpt-3.5-turbo' as per request.
|
40
|
+
temperature (NonNegativeFloat): The temperature of the LLM model. Controls randomness in generation. Set to 1.0 as per request.
|
41
|
+
stop_sign (str): The stop sign of the LLM model. No default stop sign specified.
|
42
|
+
top_p (NonNegativeFloat): The top p of the LLM model. Controls diversity via nucleus sampling. Set to 0.35 as per request.
|
43
|
+
generation_count (PositiveInt): The number of generations to generate. Default is 1.
|
44
|
+
stream (bool): Whether to stream the LLM model's response. Default is False.
|
45
|
+
max_tokens (PositiveInt): The maximum number of tokens to generate. Set to 8192 as per request.
|
46
|
+
"""
|
47
|
+
|
48
|
+
model_config = ConfigDict(use_attribute_docstrings=True)
|
49
|
+
api_endpoint: HttpUrl = Field(default=HttpUrl("https://api.openai.com"))
|
50
|
+
"""OpenAI API Endpoint."""
|
51
|
+
|
52
|
+
api_key: SecretStr = Field(default=SecretStr(""))
|
53
|
+
"""OpenAI API key. Empty by default for security reasons, should be set before use."""
|
54
|
+
|
55
|
+
timeout: PositiveInt = Field(default=300)
|
56
|
+
"""The timeout of the LLM model in seconds. Default is 300 seconds as per request."""
|
57
|
+
|
58
|
+
max_retries: PositiveInt = Field(default=3)
|
59
|
+
"""The maximum number of retries. Default is 3 retries."""
|
60
|
+
|
61
|
+
model: str = Field(default="gpt-3.5-turbo")
|
62
|
+
"""The LLM model name. Set to 'gpt-3.5-turbo' as per request."""
|
63
|
+
|
64
|
+
temperature: NonNegativeFloat = Field(default=1.0)
|
65
|
+
"""The temperature of the LLM model. Controls randomness in generation. Set to 1.0 as per request."""
|
66
|
+
|
67
|
+
stop_sign: str | List[str] = Field(default=("\n\n", "User:"))
|
68
|
+
"""The stop sign of the LLM model. No default stop sign specified."""
|
69
|
+
|
70
|
+
top_p: NonNegativeFloat = Field(default=0.35)
|
71
|
+
"""The top p of the LLM model. Controls diversity via nucleus sampling. Set to 0.35 as per request."""
|
72
|
+
|
73
|
+
generation_count: PositiveInt = Field(default=1)
|
74
|
+
"""The number of generations to generate. Default is 1."""
|
75
|
+
|
76
|
+
stream: bool = Field(default=False)
|
77
|
+
"""Whether to stream the LLM model's response. Default is False."""
|
78
|
+
|
79
|
+
max_tokens: PositiveInt = Field(default=8192)
|
80
|
+
"""The maximum number of tokens to generate. Set to 8192 as per request."""
|
81
|
+
|
82
|
+
|
83
|
+
class PymitterConfig(BaseModel):
|
84
|
+
"""Pymitter configuration class.
|
85
|
+
|
86
|
+
Attributes:
|
87
|
+
delimiter (str): The delimiter used to separate the event name into segments.
|
88
|
+
new_listener_event (bool): If set, a newListener event is emitted when a new listener is added.
|
89
|
+
max_listeners (int): The maximum number of listeners per event.
|
90
|
+
"""
|
91
|
+
|
92
|
+
model_config = ConfigDict(use_attribute_docstrings=True)
|
93
|
+
delimiter: str = Field(default=".", frozen=True)
|
94
|
+
"""The delimiter used to separate the event name into segments."""
|
95
|
+
|
96
|
+
new_listener_event: bool = Field(default=False, frozen=True)
|
97
|
+
"""If set, a newListener event is emitted when a new listener is added."""
|
98
|
+
|
99
|
+
max_listeners: int = Field(default=-1, frozen=True)
|
100
|
+
"""The maximum number of listeners per event."""
|
101
|
+
|
102
|
+
|
103
|
+
class DebugConfig(BaseModel):
|
104
|
+
"""Debug configuration class.
|
105
|
+
|
106
|
+
Attributes:
|
107
|
+
log_level (Literal["DEBUG", "INFO", "SUCCESS", "WARNING", "ERROR", "CRITICAL"]): The log level of the application.
|
108
|
+
log_file (FilePath): The log file of the application.
|
109
|
+
"""
|
110
|
+
|
111
|
+
model_config = ConfigDict(use_attribute_docstrings=True)
|
112
|
+
|
113
|
+
log_level: Literal["DEBUG", "INFO", "SUCCESS", "WARNING", "ERROR", "CRITICAL"] = Field(default="INFO")
|
114
|
+
"""The log level of the application."""
|
115
|
+
|
116
|
+
log_file: FilePath = Field(default=rf"{ROAMING_DIR}\fabricatio.log")
|
117
|
+
"""The log file of the application."""
|
118
|
+
|
119
|
+
|
120
|
+
class Code2PromptConfig(BaseModel):
|
121
|
+
"""Code2Prompt configuration class."""
|
122
|
+
|
123
|
+
model_config = ConfigDict(use_attribute_docstrings=True)
|
124
|
+
template_dir: List[DirectoryPath] = Field(
|
125
|
+
default_factory=lambda: [Path(r".\templates"), Path(rf"{ROAMING_DIR}\templates")]
|
126
|
+
)
|
127
|
+
"""The directory containing the templates for code2prompt."""
|
128
|
+
|
129
|
+
template_suffix: str = Field(default=".hbs", frozen=True)
|
130
|
+
"""The suffix of the template files for code2prompt."""
|
131
|
+
|
132
|
+
|
133
|
+
class MagikaConfig(BaseModel):
|
134
|
+
"""Magika configuration class."""
|
135
|
+
|
136
|
+
model_config = ConfigDict(use_attribute_docstrings=True)
|
137
|
+
model_dir: Optional[DirectoryPath] = Field(default=None)
|
138
|
+
"""The directory containing the models for magika."""
|
139
|
+
|
140
|
+
|
141
|
+
class Settings(BaseSettings):
|
142
|
+
"""Application settings class.
|
143
|
+
|
144
|
+
Attributes:
|
145
|
+
llm (LLMConfig): LLM Configuration
|
146
|
+
debug (DebugConfig): Debug Configuration
|
147
|
+
pymitter (PymitterConfig): Pymitter Configuration
|
148
|
+
code2prompt (Code2PromptConfig): Code2Prompt Configuration
|
149
|
+
magika (MagikaConfig): Magika Configuration
|
150
|
+
"""
|
151
|
+
|
152
|
+
model_config = SettingsConfigDict(
|
153
|
+
env_prefix="FABRIK_",
|
154
|
+
env_nested_delimiter="__",
|
155
|
+
pyproject_toml_depth=1,
|
156
|
+
pyproject_toml_table_header=("tool", "fabricatio"),
|
157
|
+
toml_file=["fabricatio.toml", rf"{ROAMING_DIR}\fabricatio.toml"],
|
158
|
+
env_file=[".env", ".envrc"],
|
159
|
+
use_attribute_docstrings=True,
|
160
|
+
)
|
161
|
+
|
162
|
+
llm: LLMConfig = Field(default_factory=LLMConfig)
|
163
|
+
"""LLM Configuration"""
|
164
|
+
|
165
|
+
debug: DebugConfig = Field(default_factory=DebugConfig)
|
166
|
+
"""Debug Configuration"""
|
167
|
+
|
168
|
+
pymitter: PymitterConfig = Field(default_factory=PymitterConfig)
|
169
|
+
"""Pymitter Configuration"""
|
170
|
+
|
171
|
+
code2prompt: Code2PromptConfig = Field(default_factory=Code2PromptConfig)
|
172
|
+
"""Code2Prompt Configuration"""
|
173
|
+
|
174
|
+
magika: MagikaConfig = Field(default_factory=MagikaConfig)
|
175
|
+
"""Magika Configuration"""
|
176
|
+
|
177
|
+
@classmethod
|
178
|
+
def settings_customise_sources(
|
179
|
+
cls,
|
180
|
+
settings_cls: type[BaseSettings],
|
181
|
+
init_settings: PydanticBaseSettingsSource,
|
182
|
+
env_settings: PydanticBaseSettingsSource,
|
183
|
+
dotenv_settings: PydanticBaseSettingsSource,
|
184
|
+
file_secret_settings: PydanticBaseSettingsSource,
|
185
|
+
) -> tuple[PydanticBaseSettingsSource, ...]:
|
186
|
+
"""Customize settings sources.
|
187
|
+
|
188
|
+
Args:
|
189
|
+
settings_cls (type[BaseSettings]): The settings class.
|
190
|
+
init_settings (PydanticBaseSettingsSource): Initial settings source.
|
191
|
+
env_settings (PydanticBaseSettingsSource): Environment settings source.
|
192
|
+
dotenv_settings (PydanticBaseSettingsSource): Dotenv settings source.
|
193
|
+
file_secret_settings (PydanticBaseSettingsSource): File secret settings source.
|
194
|
+
|
195
|
+
Returns:
|
196
|
+
tuple[PydanticBaseSettingsSource, ...]: A tuple of settings sources.
|
197
|
+
"""
|
198
|
+
return (
|
199
|
+
DotEnvSettingsSource(settings_cls),
|
200
|
+
EnvSettingsSource(settings_cls),
|
201
|
+
TomlConfigSettingsSource(settings_cls),
|
202
|
+
PyprojectTomlConfigSettingsSource(settings_cls),
|
203
|
+
)
|
204
|
+
|
205
|
+
|
206
|
+
configs: Settings = Settings()
|
fabricatio/core.py
ADDED
@@ -0,0 +1,167 @@
|
|
1
|
+
"""Core module that contains the Env class for managing event handling."""
|
2
|
+
|
3
|
+
from typing import Callable, Optional, Self, overload
|
4
|
+
|
5
|
+
from pydantic import BaseModel, ConfigDict, PrivateAttr
|
6
|
+
from pymitter import EventEmitter
|
7
|
+
|
8
|
+
from fabricatio.config import configs
|
9
|
+
from fabricatio.models.events import Event
|
10
|
+
|
11
|
+
|
12
|
+
class Env(BaseModel):
|
13
|
+
"""Environment class that manages event handling using EventEmitter."""
|
14
|
+
|
15
|
+
model_config = ConfigDict(use_attribute_docstrings=True)
|
16
|
+
_ee: EventEmitter = PrivateAttr(
|
17
|
+
default_factory=lambda: EventEmitter(
|
18
|
+
delimiter=configs.pymitter.delimiter,
|
19
|
+
new_listener=configs.pymitter.new_listener_event,
|
20
|
+
max_listeners=configs.pymitter.max_listeners,
|
21
|
+
wildcard=True,
|
22
|
+
)
|
23
|
+
)
|
24
|
+
|
25
|
+
@overload
|
26
|
+
def on(self, event: str | Event, /, ttl: int = -1) -> Self:
|
27
|
+
"""
|
28
|
+
Registers an event listener that listens indefinitely or for a specified number of times.
|
29
|
+
|
30
|
+
Args:
|
31
|
+
event (str | Event): The event to listen for.
|
32
|
+
ttl (int): Time-to-live for the listener. If -1, the listener will listen indefinitely.
|
33
|
+
|
34
|
+
Returns:
|
35
|
+
Self: The current instance of Env.
|
36
|
+
"""
|
37
|
+
...
|
38
|
+
|
39
|
+
@overload
|
40
|
+
def on[**P, R](
|
41
|
+
self,
|
42
|
+
event: str | Event,
|
43
|
+
func: Optional[Callable[P, R]] = None,
|
44
|
+
/,
|
45
|
+
ttl: int = -1,
|
46
|
+
) -> Callable[[Callable[P, R]], Callable[P, R]]:
|
47
|
+
"""
|
48
|
+
Registers an event listener with a specific function that listens indefinitely or for a specified number of times.
|
49
|
+
|
50
|
+
Args:
|
51
|
+
event (str | Event): The event to listen for.
|
52
|
+
func (Callable[P, R]): The function to be called when the event is emitted.
|
53
|
+
ttl (int): Time-to-live for the listener. If -1, the listener will listen indefinitely.
|
54
|
+
|
55
|
+
Returns:
|
56
|
+
Callable[[Callable[P, R]], Callable[P, R]]: A decorator that registers the function as an event listener.
|
57
|
+
"""
|
58
|
+
...
|
59
|
+
|
60
|
+
def on[**P, R](
|
61
|
+
self,
|
62
|
+
event: str | Event,
|
63
|
+
func: Optional[Callable[P, R]] = None,
|
64
|
+
/,
|
65
|
+
ttl=-1,
|
66
|
+
) -> Callable[[Callable[P, R]], Callable[P, R]] | Self:
|
67
|
+
"""Registers an event listener with a specific function that listens indefinitely or for a specified number of times.
|
68
|
+
|
69
|
+
Args:
|
70
|
+
event (str | Event): The event to listen for.
|
71
|
+
func (Callable[P, R]): The function to be called when the event is emitted.
|
72
|
+
ttl (int): Time-to-live for the listener. If -1, the listener will listen indefinitely.
|
73
|
+
|
74
|
+
Returns:
|
75
|
+
Callable[[Callable[P, R]], Callable[P, R]] | Self: A decorator that registers the function as an event listener or the current instance of Env.
|
76
|
+
"""
|
77
|
+
if isinstance(event, Event):
|
78
|
+
event = event.collapse()
|
79
|
+
if func is None:
|
80
|
+
return self._ee.on(event, ttl=ttl)
|
81
|
+
|
82
|
+
self._ee.on(event, func, ttl=ttl)
|
83
|
+
return self
|
84
|
+
|
85
|
+
@overload
|
86
|
+
def once[**P, R](
|
87
|
+
self,
|
88
|
+
event: str | Event,
|
89
|
+
) -> Callable[[Callable[P, R]], Callable[P, R]]:
|
90
|
+
"""
|
91
|
+
Registers an event listener that listens only once.
|
92
|
+
|
93
|
+
Args:
|
94
|
+
event (str | Event): The event to listen for.
|
95
|
+
|
96
|
+
Returns:
|
97
|
+
Callable[[Callable[P, R]], Callable[P, R]]: A decorator that registers the function as an event listener.
|
98
|
+
"""
|
99
|
+
...
|
100
|
+
|
101
|
+
@overload
|
102
|
+
def once[**P, R](
|
103
|
+
self,
|
104
|
+
event: str | Event,
|
105
|
+
func: Callable[[Callable[P, R]], Callable[P, R]],
|
106
|
+
) -> Self:
|
107
|
+
"""
|
108
|
+
Registers an event listener with a specific function that listens only once.
|
109
|
+
|
110
|
+
Args:
|
111
|
+
event (str | Event): The event to listen for.
|
112
|
+
func (Callable[P, R]): The function to be called when the event is emitted.
|
113
|
+
|
114
|
+
Returns:
|
115
|
+
Self: The current instance of Env.
|
116
|
+
"""
|
117
|
+
...
|
118
|
+
|
119
|
+
def once[**P, R](
|
120
|
+
self,
|
121
|
+
event: str | Event,
|
122
|
+
func: Optional[Callable[P, R]] = None,
|
123
|
+
) -> Callable[[Callable[P, R]], Callable[P, R]] | Self:
|
124
|
+
"""Registers an event listener with a specific function that listens only once.
|
125
|
+
|
126
|
+
Args:
|
127
|
+
event (str | Event): The event to listen for.
|
128
|
+
func (Callable[P, R]): The function to be called when the event is emitted.
|
129
|
+
|
130
|
+
Returns:
|
131
|
+
Callable[[Callable[P, R]], Callable[P, R]] | Self: A decorator that registers the function as an event listener or the current instance
|
132
|
+
"""
|
133
|
+
if isinstance(event, Event):
|
134
|
+
event = event.collapse()
|
135
|
+
if func is None:
|
136
|
+
return self._ee.once(event)
|
137
|
+
|
138
|
+
self._ee.once(event, func)
|
139
|
+
return self
|
140
|
+
|
141
|
+
def emit[**P](self, event: str | Event, *args: P.args, **kwargs: P.kwargs) -> None:
|
142
|
+
"""Emits an event to all registered listeners.
|
143
|
+
|
144
|
+
Args:
|
145
|
+
event (str | Event): The event to emit.
|
146
|
+
*args: Positional arguments to pass to the listeners.
|
147
|
+
**kwargs: Keyword arguments to pass to the listeners.
|
148
|
+
"""
|
149
|
+
if isinstance(event, Event):
|
150
|
+
event = event.collapse()
|
151
|
+
|
152
|
+
self._ee.emit(event, *args, **kwargs)
|
153
|
+
|
154
|
+
async def emit_async[**P](self, event: str | Event, *args: P.args, **kwargs: P.kwargs) -> None:
|
155
|
+
"""Asynchronously emits an event to all registered listeners.
|
156
|
+
|
157
|
+
Args:
|
158
|
+
event (str | Event): The event to emit.
|
159
|
+
*args: Positional arguments to pass to the listeners.
|
160
|
+
**kwargs: Keyword arguments to pass to the listeners.
|
161
|
+
"""
|
162
|
+
if isinstance(event, Event):
|
163
|
+
event = event.collapse()
|
164
|
+
return await self._ee.emit_async(event, *args, **kwargs)
|
165
|
+
|
166
|
+
|
167
|
+
env = Env()
|
fabricatio/decorators.py
ADDED
@@ -0,0 +1,56 @@
|
|
1
|
+
from functools import wraps
|
2
|
+
from shutil import which
|
3
|
+
from typing import Callable
|
4
|
+
|
5
|
+
from fabricatio.journal import logger
|
6
|
+
|
7
|
+
|
8
|
+
def depend_on_external_cmd[**P, R](bin_name: str, install_tip: str) -> Callable[[Callable[P, R]], Callable[P, R]]:
|
9
|
+
"""Decorator to check for the presence of an external command.
|
10
|
+
|
11
|
+
Args:
|
12
|
+
bin_name (str): The name of the required binary.
|
13
|
+
install_tip (str): Installation instructions for the required binary.
|
14
|
+
|
15
|
+
Returns:
|
16
|
+
Callable[[Callable[P, R]], Callable[P, R]]: A decorator that wraps the function to check for the binary.
|
17
|
+
|
18
|
+
Raises:
|
19
|
+
RuntimeError: If the required binary is not found.
|
20
|
+
"""
|
21
|
+
|
22
|
+
def decorator(func: Callable[P, R]) -> Callable[P, R]:
|
23
|
+
"""Decorator to wrap the function with binary presence check.
|
24
|
+
|
25
|
+
Args:
|
26
|
+
func (Callable[P, R]): The function to be decorated.
|
27
|
+
|
28
|
+
Returns:
|
29
|
+
Callable[P, R]: The wrapped function.
|
30
|
+
"""
|
31
|
+
|
32
|
+
@wraps(func)
|
33
|
+
def wrapper(*args: P.args, **kwargs: P.kwargs) -> R:
|
34
|
+
"""Wrapper function to check for the presence of the required binary.
|
35
|
+
|
36
|
+
Args:
|
37
|
+
*args: Positional arguments for the function.
|
38
|
+
**kwargs: Keyword arguments for the function.
|
39
|
+
|
40
|
+
Returns:
|
41
|
+
R: The result of the function call.
|
42
|
+
|
43
|
+
Raises:
|
44
|
+
RuntimeError: If the required binary is not found.
|
45
|
+
"""
|
46
|
+
if which(bin_name) is None:
|
47
|
+
err = (
|
48
|
+
f"{bin_name} is required to run function: {func.__name__}, please install it first.\n{install_tip}"
|
49
|
+
)
|
50
|
+
logger.critical(err)
|
51
|
+
raise RuntimeError(err)
|
52
|
+
return func(*args, **kwargs)
|
53
|
+
|
54
|
+
return wrapper
|
55
|
+
|
56
|
+
return decorator
|
fabricatio/fs/readers.py
ADDED
fabricatio/journal.py
ADDED
@@ -0,0 +1,23 @@
|
|
1
|
+
"""Logging setup for the project."""
|
2
|
+
|
3
|
+
import sys
|
4
|
+
|
5
|
+
from loguru import logger
|
6
|
+
from rich import traceback
|
7
|
+
|
8
|
+
from fabricatio.config import configs
|
9
|
+
|
10
|
+
traceback.install()
|
11
|
+
logger.remove()
|
12
|
+
logger.add(
|
13
|
+
configs.debug.log_file, level=configs.debug.log_level, rotation="1 weeks", retention="1 month", compression="zip"
|
14
|
+
)
|
15
|
+
logger.add(sys.stderr, level=configs.debug.log_level)
|
16
|
+
|
17
|
+
if __name__ == "__main__":
|
18
|
+
logger.debug("This is a trace message.")
|
19
|
+
logger.info("This is an information message.")
|
20
|
+
logger.success("This is a success message.")
|
21
|
+
logger.warning("This is a warning message.")
|
22
|
+
logger.error("This is an error message.")
|
23
|
+
logger.critical("This is a critical message.")
|
@@ -0,0 +1,128 @@
|
|
1
|
+
"""Module that contains the classes for actions and workflows."""
|
2
|
+
|
3
|
+
import traceback
|
4
|
+
from abc import abstractmethod
|
5
|
+
from asyncio import Queue
|
6
|
+
from typing import Any, Dict, Self, Tuple, Type, Unpack
|
7
|
+
|
8
|
+
from fabricatio.journal import logger
|
9
|
+
from fabricatio.models.generic import LLMUsage, WithBriefing
|
10
|
+
from fabricatio.models.task import ProposeTask, Task
|
11
|
+
from pydantic import Field, PrivateAttr
|
12
|
+
|
13
|
+
|
14
|
+
class Action(ProposeTask):
|
15
|
+
"""Class that represents an action to be executed in a workflow."""
|
16
|
+
|
17
|
+
personality: str = Field(default="")
|
18
|
+
"""The personality of whom the action belongs to."""
|
19
|
+
output_key: str = Field(default="")
|
20
|
+
"""The key of the output data."""
|
21
|
+
|
22
|
+
@abstractmethod
|
23
|
+
async def _execute(self, **cxt: Unpack) -> Any:
|
24
|
+
"""Execute the action with the provided arguments.
|
25
|
+
|
26
|
+
Args:
|
27
|
+
**cxt: The context dictionary containing input and output data.
|
28
|
+
|
29
|
+
Returns:
|
30
|
+
The result of the action execution.
|
31
|
+
"""
|
32
|
+
pass
|
33
|
+
|
34
|
+
async def act(self, cxt: Dict[str, Any]) -> Dict[str, Any]:
|
35
|
+
"""Perform the action by executing it and setting the output data.
|
36
|
+
|
37
|
+
Args:
|
38
|
+
cxt: The context dictionary containing input and output data.
|
39
|
+
"""
|
40
|
+
ret = await self._execute(**cxt)
|
41
|
+
if self.output_key:
|
42
|
+
logger.debug(f"Setting output: {self.output_key}")
|
43
|
+
cxt[self.output_key] = ret
|
44
|
+
return cxt
|
45
|
+
|
46
|
+
def briefing(self) -> str:
|
47
|
+
"""Return a brief description of the action."""
|
48
|
+
if self.personality:
|
49
|
+
return f"## Your personality: \n{self.personality}\n# The action you are going to perform: \n{super().briefing}"
|
50
|
+
return f"# The action you are going to perform: \n{super().briefing}"
|
51
|
+
|
52
|
+
|
53
|
+
class WorkFlow[A: Type[Action] | Action](WithBriefing, LLMUsage):
|
54
|
+
"""Class that represents a workflow to be executed in a task."""
|
55
|
+
|
56
|
+
_context: Queue[Dict[str, Any]] = PrivateAttr(default_factory=lambda: Queue(maxsize=1))
|
57
|
+
""" The context dictionary to be used for workflow execution."""
|
58
|
+
|
59
|
+
_instances: Tuple[Action, ...] = PrivateAttr(...)
|
60
|
+
""" The instances of the workflow steps."""
|
61
|
+
|
62
|
+
steps: Tuple[A, ...] = Field(...)
|
63
|
+
""" The steps to be executed in the workflow, actions or action classes."""
|
64
|
+
task_input_key: str = Field(default="task_input")
|
65
|
+
""" The key of the task input data."""
|
66
|
+
task_output_key: str = Field(default="task_output")
|
67
|
+
""" The key of the task output data."""
|
68
|
+
extra_init_context: Dict[str, Any] = Field(default_factory=dict, frozen=True)
|
69
|
+
""" The extra context dictionary to be used for workflow initialization."""
|
70
|
+
|
71
|
+
def model_post_init(self, __context: Any) -> None:
|
72
|
+
"""Initialize the workflow by setting fallbacks for each step.
|
73
|
+
|
74
|
+
Args:
|
75
|
+
__context: The context to be used for initialization.
|
76
|
+
"""
|
77
|
+
temp = []
|
78
|
+
for step in self.steps:
|
79
|
+
temp.append(step if isinstance(step, Action) else step())
|
80
|
+
self._instances = tuple(temp)
|
81
|
+
|
82
|
+
def inject_personality(self, personality: str) -> Self:
|
83
|
+
"""Inject the personality of the workflow.
|
84
|
+
|
85
|
+
Args:
|
86
|
+
personality: The personality to be injected.
|
87
|
+
|
88
|
+
Returns:
|
89
|
+
Self: The instance of the workflow with the injected personality.
|
90
|
+
"""
|
91
|
+
for a in self._instances:
|
92
|
+
if not a.personality:
|
93
|
+
a.personality = personality
|
94
|
+
return self
|
95
|
+
|
96
|
+
async def serve(self, task: Task) -> None:
|
97
|
+
"""Serve the task by executing the workflow steps.
|
98
|
+
|
99
|
+
Args:
|
100
|
+
task: The task to be served.
|
101
|
+
"""
|
102
|
+
await task.start()
|
103
|
+
await self._init_context()
|
104
|
+
current_action = None
|
105
|
+
try:
|
106
|
+
for step in self._instances:
|
107
|
+
logger.debug(f"Executing step: {step.name}")
|
108
|
+
cxt = await self._context.get()
|
109
|
+
modified_ctx = await step.act(cxt)
|
110
|
+
await self._context.put(modified_ctx)
|
111
|
+
current_action = step.name
|
112
|
+
logger.info(f"Finished executing workflow: {self.name}")
|
113
|
+
await task.finish((await self._context.get()).get(self.task_output_key, None))
|
114
|
+
except RuntimeError as e:
|
115
|
+
logger.error(f"Error during task: {current_action} execution: {e}") # Log the exception
|
116
|
+
logger.error(traceback.format_exc()) # Add this line to log the traceback
|
117
|
+
await task.fail() # Mark the task as failed
|
118
|
+
|
119
|
+
async def _init_context(self) -> None:
|
120
|
+
"""Initialize the context dictionary for workflow execution."""
|
121
|
+
logger.debug(f"Initializing context for workflow: {self.name}")
|
122
|
+
await self._context.put({self.task_input_key: None, **dict(self.extra_init_context)})
|
123
|
+
|
124
|
+
def fallback_to_self(self) -> Self:
|
125
|
+
"""Set the fallback for each step to the workflow itself."""
|
126
|
+
for step in self._instances:
|
127
|
+
step.fallback_to(self)
|
128
|
+
return self
|