fabricatio 0.2.1.dev0__cp313-cp313-win_amd64.whl → 0.3.14.dev4__cp313-cp313-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (75) hide show
  1. fabricatio/__init__.py +12 -20
  2. fabricatio/actions/__init__.py +1 -5
  3. fabricatio/actions/article.py +319 -0
  4. fabricatio/actions/article_rag.py +416 -0
  5. fabricatio/actions/fs.py +25 -0
  6. fabricatio/actions/output.py +248 -0
  7. fabricatio/actions/rag.py +96 -0
  8. fabricatio/actions/rules.py +83 -0
  9. fabricatio/capabilities/__init__.py +1 -0
  10. fabricatio/capabilities/advanced_judge.py +20 -0
  11. fabricatio/capabilities/advanced_rag.py +61 -0
  12. fabricatio/capabilities/censor.py +105 -0
  13. fabricatio/capabilities/check.py +212 -0
  14. fabricatio/capabilities/correct.py +228 -0
  15. fabricatio/capabilities/extract.py +74 -0
  16. fabricatio/capabilities/persist.py +103 -0
  17. fabricatio/capabilities/propose.py +65 -0
  18. fabricatio/capabilities/rag.py +263 -0
  19. fabricatio/capabilities/rating.py +404 -0
  20. fabricatio/capabilities/review.py +114 -0
  21. fabricatio/capabilities/task.py +113 -0
  22. fabricatio/decorators.py +251 -179
  23. fabricatio/{core.py → emitter.py} +31 -21
  24. fabricatio/fs/__init__.py +32 -2
  25. fabricatio/fs/curd.py +32 -9
  26. fabricatio/fs/readers.py +44 -7
  27. fabricatio/journal.py +3 -19
  28. fabricatio/models/action.py +185 -61
  29. fabricatio/models/adv_kwargs_types.py +63 -0
  30. fabricatio/models/extra/__init__.py +1 -0
  31. fabricatio/models/extra/advanced_judge.py +32 -0
  32. fabricatio/models/extra/aricle_rag.py +284 -0
  33. fabricatio/models/extra/article_base.py +422 -0
  34. fabricatio/models/extra/article_essence.py +101 -0
  35. fabricatio/models/extra/article_main.py +285 -0
  36. fabricatio/models/extra/article_outline.py +46 -0
  37. fabricatio/models/extra/article_proposal.py +52 -0
  38. fabricatio/models/extra/patches.py +20 -0
  39. fabricatio/models/extra/problem.py +165 -0
  40. fabricatio/models/extra/rag.py +98 -0
  41. fabricatio/models/extra/rule.py +52 -0
  42. fabricatio/models/generic.py +704 -36
  43. fabricatio/models/kwargs_types.py +112 -17
  44. fabricatio/models/role.py +74 -27
  45. fabricatio/models/task.py +94 -60
  46. fabricatio/models/tool.py +328 -188
  47. fabricatio/models/usages.py +791 -515
  48. fabricatio/parser.py +81 -60
  49. fabricatio/rust.cp313-win_amd64.pyd +0 -0
  50. fabricatio/rust.pyi +846 -0
  51. fabricatio/toolboxes/__init__.py +1 -3
  52. fabricatio/toolboxes/fs.py +17 -1
  53. fabricatio/utils.py +156 -0
  54. fabricatio/workflows/__init__.py +1 -0
  55. fabricatio/workflows/articles.py +24 -0
  56. fabricatio/workflows/rag.py +11 -0
  57. fabricatio-0.3.14.dev4.data/scripts/tdown.exe +0 -0
  58. fabricatio-0.3.14.dev4.data/scripts/ttm.exe +0 -0
  59. fabricatio-0.3.14.dev4.dist-info/METADATA +188 -0
  60. fabricatio-0.3.14.dev4.dist-info/RECORD +64 -0
  61. {fabricatio-0.2.1.dev0.dist-info → fabricatio-0.3.14.dev4.dist-info}/WHEEL +1 -1
  62. fabricatio/_rust.cp313-win_amd64.pyd +0 -0
  63. fabricatio/_rust.pyi +0 -53
  64. fabricatio/_rust_instances.py +0 -8
  65. fabricatio/actions/communication.py +0 -15
  66. fabricatio/actions/transmission.py +0 -23
  67. fabricatio/config.py +0 -263
  68. fabricatio/models/advanced.py +0 -128
  69. fabricatio/models/events.py +0 -82
  70. fabricatio/models/utils.py +0 -78
  71. fabricatio/toolboxes/task.py +0 -6
  72. fabricatio-0.2.1.dev0.data/scripts/tdown.exe +0 -0
  73. fabricatio-0.2.1.dev0.dist-info/METADATA +0 -420
  74. fabricatio-0.2.1.dev0.dist-info/RECORD +0 -35
  75. {fabricatio-0.2.1.dev0.dist-info → fabricatio-0.3.14.dev4.dist-info}/licenses/LICENSE +0 -0
@@ -1,25 +1,21 @@
1
1
  """Core module that contains the Env class for managing event handling."""
2
+ from dataclasses import dataclass
3
+ from typing import Callable, ClassVar, Optional, Self, overload
2
4
 
3
- from typing import Callable, Optional, Self, overload
4
-
5
- from pydantic import BaseModel, ConfigDict, PrivateAttr
6
5
  from pymitter import EventEmitter
7
6
 
8
- from fabricatio.config import configs
9
- from fabricatio.models.events import Event
7
+ from fabricatio.rust import CONFIG, Event
10
8
 
11
9
 
12
- class Env(BaseModel):
10
+ @dataclass
11
+ class Env:
13
12
  """Environment class that manages event handling using EventEmitter."""
14
13
 
15
- model_config = ConfigDict(use_attribute_docstrings=True)
16
- _ee: EventEmitter = PrivateAttr(
17
- default_factory=lambda: EventEmitter(
18
- delimiter=configs.pymitter.delimiter,
19
- new_listener=configs.pymitter.new_listener_event,
20
- max_listeners=configs.pymitter.max_listeners,
21
- wildcard=True,
22
- )
14
+ ee: ClassVar[EventEmitter] = EventEmitter(
15
+ delimiter=CONFIG.pymitter.delimiter,
16
+ new_listener=CONFIG.pymitter.new_listener_event,
17
+ max_listeners=CONFIG.pymitter.max_listeners,
18
+ wildcard=True,
23
19
  )
24
20
 
25
21
  @overload
@@ -77,9 +73,8 @@ class Env(BaseModel):
77
73
  if isinstance(event, Event):
78
74
  event = event.collapse()
79
75
  if func is None:
80
- return self._ee.on(event, ttl=ttl)
81
-
82
- self._ee.on(event, func, ttl=ttl)
76
+ return self.ee.on(event, ttl=ttl)
77
+ self.ee.on(event, func, ttl=ttl)
83
78
  return self
84
79
 
85
80
  @overload
@@ -133,9 +128,9 @@ class Env(BaseModel):
133
128
  if isinstance(event, Event):
134
129
  event = event.collapse()
135
130
  if func is None:
136
- return self._ee.once(event)
131
+ return self.ee.once(event)
137
132
 
138
- self._ee.once(event, func)
133
+ self.ee.once(event, func)
139
134
  return self
140
135
 
141
136
  def emit[**P](self, event: str | Event, *args: P.args, **kwargs: P.kwargs) -> None:
@@ -149,7 +144,7 @@ class Env(BaseModel):
149
144
  if isinstance(event, Event):
150
145
  event = event.collapse()
151
146
 
152
- self._ee.emit(event, *args, **kwargs)
147
+ self.ee.emit(event, *args, **kwargs)
153
148
 
154
149
  async def emit_async[**P](self, event: str | Event, *args: P.args, **kwargs: P.kwargs) -> None:
155
150
  """Asynchronously emits an event to all registered listeners.
@@ -161,7 +156,22 @@ class Env(BaseModel):
161
156
  """
162
157
  if isinstance(event, Event):
163
158
  event = event.collapse()
164
- return await self._ee.emit_async(event, *args, **kwargs)
159
+ return await self.ee.emit_async(event, *args, **kwargs)
160
+
161
+ def emit_future[**P](self, event: str | Event, *args: P.args, **kwargs: P.kwargs) -> None:
162
+ """Emits an event to all registered listeners and returns a future object.
163
+
164
+ Args:
165
+ event (str | Event): The event to emit.
166
+ *args: Positional arguments to pass to the listeners.
167
+ **kwargs: Keyword arguments to pass to the listeners.
168
+
169
+ Returns:
170
+ None: The future object.
171
+ """
172
+ if isinstance(event, Event):
173
+ event = event.collapse()
174
+ return self.ee.emit_future(event, *args, **kwargs)
165
175
 
166
176
 
167
177
  env = Env()
fabricatio/fs/__init__.py CHANGED
@@ -1,5 +1,35 @@
1
1
  """FileSystem manipulation module for Fabricatio."""
2
+ from importlib.util import find_spec
2
3
 
3
- from fabricatio.fs.readers import magika
4
+ from fabricatio.fs.curd import (
5
+ absolute_path,
6
+ copy_file,
7
+ create_directory,
8
+ delete_directory,
9
+ delete_file,
10
+ dump_text,
11
+ gather_files,
12
+ move_file,
13
+ tree,
14
+ )
15
+ from fabricatio.fs.readers import safe_json_read, safe_text_read
4
16
 
5
- __all__ = ["magika"]
17
+ __all__ = [
18
+ "absolute_path",
19
+ "copy_file",
20
+ "create_directory",
21
+ "delete_directory",
22
+ "delete_file",
23
+ "dump_text",
24
+ "gather_files",
25
+ "move_file",
26
+ "safe_json_read",
27
+ "safe_text_read",
28
+ "tree",
29
+ ]
30
+
31
+ if find_spec("magika"):
32
+ from magika import Magika
33
+
34
+ MAGIKA = Magika()
35
+ __all__ += ["MAGIKA"]
fabricatio/fs/curd.py CHANGED
@@ -2,14 +2,14 @@
2
2
 
3
3
  import shutil
4
4
  import subprocess
5
+ from os import PathLike
5
6
  from pathlib import Path
6
7
  from typing import Union
7
8
 
8
- from fabricatio.decorators import depend_on_external_cmd, logging_execution_info
9
+ from fabricatio.decorators import depend_on_external_cmd
9
10
  from fabricatio.journal import logger
10
11
 
11
12
 
12
- @logging_execution_info
13
13
  def dump_text(path: Union[str, Path], text: str) -> None:
14
14
  """Dump text to a file. you need to make sure the file's parent directory exists.
15
15
 
@@ -20,10 +20,9 @@ def dump_text(path: Union[str, Path], text: str) -> None:
20
20
  Returns:
21
21
  None
22
22
  """
23
- Path(path).write_text(text, encoding="utf-8", errors="ignore")
23
+ Path(path).write_text(text, encoding="utf-8", errors="ignore", newline="\n")
24
24
 
25
25
 
26
- @logging_execution_info
27
26
  def copy_file(src: Union[str, Path], dst: Union[str, Path]) -> None:
28
27
  """Copy a file from source to destination.
29
28
 
@@ -43,7 +42,6 @@ def copy_file(src: Union[str, Path], dst: Union[str, Path]) -> None:
43
42
  raise
44
43
 
45
44
 
46
- @logging_execution_info
47
45
  def move_file(src: Union[str, Path], dst: Union[str, Path]) -> None:
48
46
  """Move a file from source to destination.
49
47
 
@@ -63,7 +61,6 @@ def move_file(src: Union[str, Path], dst: Union[str, Path]) -> None:
63
61
  raise
64
62
 
65
63
 
66
- @logging_execution_info
67
64
  def delete_file(file_path: Union[str, Path]) -> None:
68
65
  """Delete a file.
69
66
 
@@ -82,7 +79,6 @@ def delete_file(file_path: Union[str, Path]) -> None:
82
79
  raise
83
80
 
84
81
 
85
- @logging_execution_info
86
82
  def create_directory(dir_path: Union[str, Path], parents: bool = True, exist_ok: bool = True) -> None:
87
83
  """Create a directory.
88
84
 
@@ -99,7 +95,6 @@ def create_directory(dir_path: Union[str, Path], parents: bool = True, exist_ok:
99
95
  raise
100
96
 
101
97
 
102
- @logging_execution_info
103
98
  @depend_on_external_cmd(
104
99
  "erd",
105
100
  "Please install `erd` using `cargo install erdtree` or `scoop install erdtree`.",
@@ -111,7 +106,6 @@ def tree(dir_path: Union[str, Path]) -> str:
111
106
  return subprocess.check_output(("erd", dir_path.as_posix()), encoding="utf-8") # noqa: S603
112
107
 
113
108
 
114
- @logging_execution_info
115
109
  def delete_directory(dir_path: Union[str, Path]) -> None:
116
110
  """Delete a directory and its contents.
117
111
 
@@ -128,3 +122,32 @@ def delete_directory(dir_path: Union[str, Path]) -> None:
128
122
  except OSError as e:
129
123
  logger.error(f"Failed to delete directory {dir_path}: {e!s}")
130
124
  raise
125
+
126
+
127
+ def absolute_path(path: str | Path | PathLike) -> str:
128
+ """Get the absolute path of a file or directory.
129
+
130
+ Args:
131
+ path (str, Path, PathLike): The path to the file or directory.
132
+
133
+ Returns:
134
+ str: The absolute path of the file or directory.
135
+ """
136
+ return Path(path).expanduser().resolve().as_posix()
137
+
138
+
139
+ def gather_files(directory: str | Path | PathLike, extension: str) -> list[str]:
140
+ """Gather all files with a specific extension in a directory.
141
+
142
+ Args:
143
+ directory (str, Path, PathLike): The directory to search in.
144
+ extension (str): The file extension to look for.
145
+
146
+ Returns:
147
+ list[str]: A list of file paths with the specified extension.
148
+
149
+ Example:
150
+ >>> gather_files('/path/to/directory', 'txt')
151
+ ['/path/to/directory/file1.txt', '/path/to/directory/file2.txt']
152
+ """
153
+ return [file.as_posix() for file in Path(directory).rglob(f"*.{extension}")]
fabricatio/fs/readers.py CHANGED
@@ -1,24 +1,61 @@
1
1
  """Filesystem readers for Fabricatio."""
2
2
 
3
+ import re
3
4
  from pathlib import Path
5
+ from typing import Dict, List, Tuple
4
6
 
5
- from magika import Magika
7
+ import ujson
6
8
 
7
- from fabricatio.config import configs
9
+ from fabricatio.journal import logger
8
10
 
9
- magika = Magika(model_dir=configs.magika.model_dir)
10
11
 
11
-
12
- def safe_text_read(path: Path) -> str:
12
+ def safe_text_read(path: Path | str) -> str:
13
13
  """Safely read the text from a file.
14
14
 
15
15
  Args:
16
- path (Path): The path to the file.
16
+ path (Path|str): The path to the file.
17
17
 
18
18
  Returns:
19
19
  str: The text from the file.
20
20
  """
21
+ path = Path(path)
21
22
  try:
22
23
  return path.read_text(encoding="utf-8")
23
- except (UnicodeDecodeError, IsADirectoryError, FileNotFoundError):
24
+ except (UnicodeDecodeError, IsADirectoryError, FileNotFoundError) as e:
25
+ logger.error(f"Failed to read file {path}: {e!s}")
24
26
  return ""
27
+
28
+
29
+ def safe_json_read(path: Path | str) -> Dict:
30
+ """Safely read the JSON from a file.
31
+
32
+ Args:
33
+ path (Path|str): The path to the file.
34
+
35
+ Returns:
36
+ dict: The JSON from the file.
37
+ """
38
+ path = Path(path)
39
+ try:
40
+ return ujson.loads(path.read_text(encoding="utf-8"))
41
+ except (ujson.JSONDecodeError, IsADirectoryError, FileNotFoundError) as e:
42
+ logger.error(f"Failed to read file {path}: {e!s}")
43
+ return {}
44
+
45
+
46
+ def extract_sections(string: str, level: int, section_char: str = "#") -> List[Tuple[str, str]]:
47
+ """Extract sections from markdown-style text by header level.
48
+
49
+ Args:
50
+ string (str): Input text to parse
51
+ level (int): Header level (e.g., 1 for '#', 2 for '##')
52
+ section_char (str, optional): The character used for headers (default: '#')
53
+
54
+ Returns:
55
+ List[Tuple[str, str]]: List of (header_text, section_content) tuples
56
+ """
57
+ return re.findall(
58
+ r"^%s{%d}\s+(.+?)\n((?:(?!^%s{%d}\s).|\n)*)" % (section_char, level, section_char, level),
59
+ string,
60
+ re.MULTILINE,
61
+ )
fabricatio/journal.py CHANGED
@@ -3,26 +3,10 @@
3
3
  import sys
4
4
 
5
5
  from loguru import logger
6
- from rich import pretty, traceback
7
6
 
8
- from fabricatio.config import configs
7
+ from fabricatio.rust import CONFIG
9
8
 
10
- pretty.install()
11
- traceback.install()
12
9
  logger.remove()
13
- logger.add(
14
- configs.debug.log_file,
15
- level=configs.debug.log_level,
16
- rotation=f"{configs.debug.rotation} weeks",
17
- retention=f"{configs.debug.retention} weeks",
18
- )
19
- logger.add(sys.stderr, level=configs.debug.log_level)
10
+ logger.add(sys.stderr, level=CONFIG.debug.log_level)
20
11
 
21
-
22
- if __name__ == "__main__":
23
- logger.debug("This is a trace message.")
24
- logger.info("This is an information message.")
25
- logger.success("This is a success message.")
26
- logger.warning("This is a warning message.")
27
- logger.error("This is an error message.")
28
- logger.critical("This is a critical message.")
12
+ __all__ = ["logger"]
@@ -1,139 +1,263 @@
1
- """Module that contains the classes for actions and workflows."""
1
+ """Module that contains the classes for defining and executing task workflows.
2
+
3
+ This module provides the Action and WorkFlow classes for creating structured
4
+ task execution pipelines. Actions represent atomic operations, while WorkFlows
5
+ orchestrate sequences of actions with shared context and error handling.
6
+
7
+ Classes:
8
+ Action: Base class for defining executable actions with context management.
9
+ WorkFlow: Manages action sequences, context propagation, and task lifecycle.
10
+ """
2
11
 
3
12
  import traceback
4
13
  from abc import abstractmethod
5
- from asyncio import Queue
6
- from typing import Any, Dict, Self, Tuple, Type, Union, Unpack
14
+ from asyncio import Queue, create_task
15
+ from typing import Any, ClassVar, Dict, Generator, Self, Sequence, Tuple, Type, Union, final
7
16
 
8
17
  from fabricatio.journal import logger
9
- from fabricatio.models.advanced import HandleTask, ProposeTask
10
18
  from fabricatio.models.generic import WithBriefing
11
19
  from fabricatio.models.task import Task
12
- from fabricatio.models.usages import ToolBoxUsage
20
+ from fabricatio.utils import override_kwargs
13
21
  from pydantic import Field, PrivateAttr
14
22
 
23
+ OUTPUT_KEY = "task_output"
24
+
25
+ INPUT_KEY = "task_input"
26
+
27
+
28
+ class Action(WithBriefing):
29
+ """Class that represents an action to be executed in a workflow.
30
+
31
+ Actions are the atomic units of work in a workflow. Each action performs
32
+ a specific operation and can modify the shared context data.
33
+ """
34
+
35
+ ctx_override: ClassVar[bool] = False
36
+ """Whether to override the instance attr by the context variable."""
15
37
 
16
- class Action(HandleTask, ProposeTask):
17
- """Class that represents an action to be executed in a workflow."""
38
+ name: str = Field(default="")
39
+ """The name of the action."""
40
+
41
+ description: str = Field(default="")
42
+ """The description of the action."""
18
43
 
19
44
  personality: str = Field(default="")
20
- """The personality of whom the action belongs to."""
45
+ """The personality traits or context for the action executor."""
46
+
21
47
  output_key: str = Field(default="")
22
- """The key of the output data."""
48
+ """The key used to store this action's output in the context dictionary."""
49
+
50
+ @final
51
+ def model_post_init(self, __context: Any) -> None:
52
+ """Initialize the action by setting default name and description if not provided.
53
+
54
+ Args:
55
+ __context: The context to be used for initialization.
56
+ """
57
+ self.name = self.name or self.__class__.__name__
58
+ self.description = self.description or self.__class__.__doc__ or ""
23
59
 
24
60
  @abstractmethod
25
- async def _execute(self, **cxt: Unpack) -> Any:
26
- """Execute the action with the provided arguments.
61
+ async def _execute(self, *_: Any, **cxt) -> Any:
62
+ """Implement the core logic of the action.
27
63
 
28
64
  Args:
29
- **cxt: The context dictionary containing input and output data.
65
+ **cxt: Context dictionary containing input/output data.
30
66
 
31
67
  Returns:
32
- The result of the action execution.
68
+ Result of the action execution to be stored in context.
33
69
  """
34
70
  pass
35
71
 
72
+ @final
36
73
  async def act(self, cxt: Dict[str, Any]) -> Dict[str, Any]:
37
- """Perform the action by executing it and setting the output data.
74
+ """Execute action and update context.
38
75
 
39
76
  Args:
40
- cxt: The context dictionary containing input and output data.
77
+ cxt (Dict[str, Any]): Shared context dictionary.
78
+
79
+ Returns:
80
+ Updated context dictionary with new/modified entries.
41
81
  """
42
82
  ret = await self._execute(**cxt)
83
+
43
84
  if self.output_key:
44
85
  logger.debug(f"Setting output: {self.output_key}")
45
86
  cxt[self.output_key] = ret
87
+
46
88
  return cxt
47
89
 
90
+ @property
48
91
  def briefing(self) -> str:
49
- """Return a brief description of the action."""
92
+ """Generate formatted action description with personality context.
93
+
94
+ Returns:
95
+ Briefing text combining personality and action description.
96
+ """
50
97
  if self.personality:
51
98
  return f"## Your personality: \n{self.personality}\n# The action you are going to perform: \n{super().briefing}"
52
99
  return f"# The action you are going to perform: \n{super().briefing}"
53
100
 
101
+ def to_task_output(self, to: Union[str, "WorkFlow"] = OUTPUT_KEY) -> Self:
102
+ """Set the output key to OUTPUT_KEY and return the action instance."""
103
+ self.output_key = to.task_output_key if isinstance(to, WorkFlow) else to
104
+ return self
105
+
54
106
 
55
- class WorkFlow(WithBriefing, ToolBoxUsage):
56
- """Class that represents a workflow to be executed in a task."""
107
+ class WorkFlow(WithBriefing):
108
+ """Manages sequences of actions to fulfill tasks.
109
+
110
+ Handles context propagation between actions, error handling, and task lifecycle
111
+ events like cancellation and completion.
112
+ """
113
+
114
+ name: str = "WorkFlow"
115
+ """The name of the workflow, which is used to identify and describe the workflow."""
116
+ description: str = ""
117
+ """The description of the workflow, which describes the workflow's purpose and requirements."""
57
118
 
58
119
  _context: Queue[Dict[str, Any]] = PrivateAttr(default_factory=lambda: Queue(maxsize=1))
59
- """ The context dictionary to be used for workflow execution."""
120
+ """Queue for storing the workflow execution context."""
121
+
122
+ _instances: Tuple[Action, ...] = PrivateAttr(default_factory=tuple)
123
+ """Instantiated action objects to be executed in this workflow."""
60
124
 
61
- _instances: Tuple[Action, ...] = PrivateAttr(...)
62
- """ The instances of the workflow steps."""
125
+ steps: Sequence[Union[Type[Action], Action]] = Field(frozen=True)
126
+ """The sequence of actions to be executed, can be action classes or instances."""
127
+
128
+ task_input_key: ClassVar[str] = INPUT_KEY
129
+ """Key used to store the input task in the context dictionary."""
130
+
131
+ task_output_key: ClassVar[str] = OUTPUT_KEY
132
+ """Key used to extract the final result from the context dictionary."""
63
133
 
64
- steps: Tuple[Union[Type[Action], Action], ...] = Field(...)
65
- """ The steps to be executed in the workflow, actions or action classes."""
66
- task_input_key: str = Field(default="task_input")
67
- """ The key of the task input data."""
68
- task_output_key: str = Field(default="task_output")
69
- """ The key of the task output data."""
70
134
  extra_init_context: Dict[str, Any] = Field(default_factory=dict, frozen=True)
71
- """ The extra context dictionary to be used for workflow initialization."""
135
+ """Additional initial context values to be included at workflow start."""
72
136
 
73
137
  def model_post_init(self, __context: Any) -> None:
74
- """Initialize the workflow by setting fallbacks for each step.
138
+ """Initialize the workflow by instantiating any action classes.
75
139
 
76
140
  Args:
77
141
  __context: The context to be used for initialization.
142
+
78
143
  """
79
- temp = []
80
- for step in self.steps:
81
- temp.append(step if isinstance(step, Action) else step())
82
- self._instances = tuple(temp)
144
+ self.name = self.name or self.__class__.__name__
145
+ # Convert any action classes to instances
146
+ self._instances = tuple(step if isinstance(step, Action) else step() for step in self.steps)
147
+
148
+ def iter_actions(self) -> Generator[Action, None, None]:
149
+ """Iterate over action instances."""
150
+ yield from self._instances
83
151
 
84
152
  def inject_personality(self, personality: str) -> Self:
85
- """Inject the personality of the workflow.
153
+ """Set personality for actions without existing personality.
86
154
 
87
155
  Args:
88
- personality: The personality to be injected.
156
+ personality (str): Shared personality context
89
157
 
90
158
  Returns:
91
- Self: The instance of the workflow with the injected personality.
159
+ Workflow instance with updated actions
92
160
  """
93
- for a in self._instances:
94
- if not a.personality:
95
- a.personality = personality
161
+ for action in filter(lambda a: not a.personality, self._instances):
162
+ action.personality = personality
163
+ return self
164
+
165
+ def override_action_variable(self, action: Action, ctx: Dict[str, Any]) -> Self:
166
+ """Override action variable with context values."""
167
+ if action.ctx_override:
168
+ for k, v in ctx.items():
169
+ if hasattr(action, k):
170
+ setattr(action, k, v)
171
+
96
172
  return self
97
173
 
98
174
  async def serve(self, task: Task) -> None:
99
- """Serve the task by executing the workflow steps.
175
+ """Execute workflow to complete given task.
100
176
 
101
177
  Args:
102
- task: The task to be served.
178
+ task (Task): Task instance to be processed.
179
+
180
+ Steps:
181
+ 1. Initialize context with task instance and extra data
182
+ 2. Execute each action sequentially
183
+ 3. Handle task cancellation and exceptions
184
+ 4. Extract final result from context
103
185
  """
186
+ logger.info(f"Start execute workflow: {self.name}")
187
+
104
188
  await task.start()
105
189
  await self._init_context(task)
190
+
106
191
  current_action = None
107
192
  try:
108
- for step in self._instances:
109
- logger.debug(f"Executing step: {step.name}")
110
- modified_ctx = await step.act(await self._context.get())
193
+ # Process each action in sequence
194
+ for i, step in enumerate(self._instances):
195
+ logger.info(f"Executing step [{i}] >> {(current_action := step.name)}")
196
+
197
+ # Get current context and execute action
198
+ context = await self._context.get()
199
+
200
+ self.override_action_variable(step, context)
201
+ act_task = create_task(step.act(context))
202
+ # Handle task cancellation
203
+ if task.is_cancelled():
204
+ logger.warning(f"Workflow cancelled by task: {task.name}")
205
+ act_task.cancel(f"Cancelled by task: {task.name}")
206
+ break
207
+
208
+ # Update context with modified values
209
+ modified_ctx = await act_task
210
+ logger.success(f"Step [{i}] `{current_action}` execution finished.")
211
+ if step.output_key:
212
+ logger.success(f"Setting action `{current_action}` output to `{step.output_key}`")
111
213
  await self._context.put(modified_ctx)
112
- current_action = step.name
113
- logger.info(f"Finished executing workflow: {self.name}")
214
+
215
+ logger.success(f"Workflow `{self.name}` execution finished.")
216
+
217
+ # Get final context and extract result
114
218
  final_ctx = await self._context.get()
219
+ result = final_ctx.get(self.task_output_key)
220
+
115
221
  if self.task_output_key not in final_ctx:
116
222
  logger.warning(
117
- f"Task output key: {self.task_output_key} not found in the context, None will be returned. You can check if `Action.output_key` is set the same as `WorkFlow.task_output_key`."
223
+ f"Task output key: `{self.task_output_key}` not found in the context, None will be returned. "
224
+ f"You can check if `Action.output_key` is set the same as `WorkFlow.task_output_key`."
118
225
  )
119
226
 
120
- await task.finish(final_ctx.get(self.task_output_key, None))
121
- except RuntimeError as e:
122
- logger.error(f"Error during task: {current_action} execution: {e}") # Log the exception
123
- logger.error(traceback.format_exc()) # Add this line to log the traceback
124
- await task.fail() # Mark the task as failed
227
+ await task.finish(result)
228
+
229
+ except Exception as e: # noqa: BLE001
230
+ logger.critical(f"Error during task: {current_action} execution: {e}")
231
+ logger.critical(traceback.format_exc())
232
+ await task.fail()
125
233
 
126
234
  async def _init_context[T](self, task: Task[T]) -> None:
127
- """Initialize the context dictionary for workflow execution."""
235
+ """Initialize workflow execution context.
236
+
237
+ Args:
238
+ task (Task[T]): Task being processed
239
+
240
+ Context includes:
241
+ - Task instance stored under task_input_key
242
+ - Any extra_init_context values
243
+ """
128
244
  logger.debug(f"Initializing context for workflow: {self.name}")
129
- await self._context.put({self.task_input_key: task, **dict(self.extra_init_context)})
245
+ ctx = override_kwargs(self.extra_init_context, **task.extra_init_context)
246
+ if self.task_input_key in ctx:
247
+ raise ValueError(
248
+ f"Task input key: `{self.task_input_key}`, which is reserved, is already set in the init context"
249
+ )
130
250
 
131
- def steps_fallback_to_self(self) -> Self:
132
- """Set the fallback for each step to the workflow itself."""
133
- self.hold_to(self._instances)
134
- return self
251
+ await self._context.put({self.task_input_key: task, **ctx})
252
+
253
+ def update_init_context(self, /, **kwargs) -> Self:
254
+ """Update the initial context with additional key-value pairs.
255
+
256
+ Args:
257
+ **kwargs: Key-value pairs to add to the initial context.
135
258
 
136
- def steps_supply_tools_from_self(self) -> Self:
137
- """Supply the tools from the workflow to each step."""
138
- self.provide_tools_to(self._instances)
259
+ Returns:
260
+ Self: The workflow instance for method chaining.
261
+ """
262
+ self.extra_init_context.update(kwargs)
139
263
  return self