fabricatio 0.1.1__py3-none-any.whl → 0.1.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,5 @@
1
+ """module for actions."""
2
+
3
+ from fabricatio.actions.transmission import SendTask
4
+
5
+ __all__ = ["SendTask"]
@@ -0,0 +1,16 @@
1
+ from typing import List
2
+
3
+ from fabricatio.journal import logger
4
+ from fabricatio.models.action import Action
5
+ from fabricatio.models.task import Task
6
+
7
+
8
+ class SendTask(Action):
9
+ """Action that sends a task to a user."""
10
+
11
+ name: str = "send_task"
12
+
13
+ async def _execute(self, send_targets: List[str], send_task: Task, **_) -> None:
14
+ logger.info(f"Sending task {send_task.name} to {send_targets}")
15
+ for target in send_targets:
16
+ await send_task.publish(target)
fabricatio/config.py CHANGED
@@ -1,4 +1,4 @@
1
- from typing import Literal
1
+ from typing import List, Literal
2
2
 
3
3
  from appdirs import user_config_dir
4
4
  from pydantic import BaseModel, ConfigDict, Field, FilePath, HttpUrl, NonNegativeFloat, PositiveInt, SecretStr
@@ -29,6 +29,7 @@ class LLMConfig(BaseModel):
29
29
  stream (bool): Whether to stream the LLM model's response. Default is False.
30
30
  max_tokens (PositiveInt): The maximum number of tokens to generate. Set to 8192 as per request.
31
31
  """
32
+
32
33
  model_config = ConfigDict(use_attribute_docstrings=True)
33
34
  api_endpoint: HttpUrl = Field(default=HttpUrl("https://api.openai.com"))
34
35
  """
@@ -60,7 +61,7 @@ class LLMConfig(BaseModel):
60
61
  The temperature of the LLM model. Controls randomness in generation. Set to 1.0 as per request.
61
62
  """
62
63
 
63
- stop_sign: str = Field(default="")
64
+ stop_sign: str | List[str] = Field(default=("\n\n", "User:"))
64
65
  """
65
66
  The stop sign of the LLM model. No default stop sign specified.
66
67
  """
@@ -94,6 +95,7 @@ class PymitterConfig(BaseModel):
94
95
  new_listener_event (bool): If set, a newListener event is emitted when a new listener is added.
95
96
  max_listeners (int): The maximum number of listeners per event.
96
97
  """
98
+
97
99
  model_config = ConfigDict(use_attribute_docstrings=True)
98
100
  delimiter: str = Field(default=".", frozen=True)
99
101
  """
@@ -118,6 +120,7 @@ class DebugConfig(BaseModel):
118
120
  log_level (Literal["DEBUG", "INFO", "SUCCESS", "WARNING", "ERROR", "CRITICAL"]): The log level of the application.
119
121
  log_file (FilePath): The log file of the application.
120
122
  """
123
+
121
124
  model_config = ConfigDict(use_attribute_docstrings=True)
122
125
 
123
126
  log_level: Literal["DEBUG", "INFO", "SUCCESS", "WARNING", "ERROR", "CRITICAL"] = Field(default="INFO")
@@ -139,6 +142,7 @@ class Settings(BaseSettings):
139
142
  debug (DebugConfig): Debug Configuration
140
143
  pymitter (PymitterConfig): Pymitter Configuration
141
144
  """
145
+
142
146
  model_config = SettingsConfigDict(
143
147
  env_prefix="FABRIK_",
144
148
  env_nested_delimiter="__",
@@ -50,8 +50,10 @@ class Event(BaseModel):
50
50
  self.segments.clear()
51
51
  return self
52
52
 
53
- def concat(self, event: Self) -> Self:
53
+ def concat(self, event: Self | str) -> Self:
54
54
  """Concatenate another event to this event."""
55
+ if isinstance(event, str):
56
+ event = Event.from_string(event)
55
57
  self.segments.extend(event.segments)
56
58
  return self
57
59
 
@@ -1,7 +1,8 @@
1
1
  from asyncio import Queue
2
- from typing import Any, Dict, Iterable, List, Optional, Self
2
+ from typing import Callable, Dict, Iterable, List, Optional, Self
3
3
 
4
4
  import litellm
5
+ import orjson
5
6
  from litellm.types.utils import Choices, ModelResponse, StreamingChoices
6
7
  from pydantic import (
7
8
  BaseModel,
@@ -190,7 +191,7 @@ class LLMUsage(Base):
190
191
  The temperature of the LLM model.
191
192
  """
192
193
 
193
- llm_stop_sign: Optional[str] = None
194
+ llm_stop_sign: Optional[str | List[str]] = None
194
195
  """
195
196
  The stop sign of the LLM model.
196
197
  """
@@ -215,21 +216,12 @@ class LLMUsage(Base):
215
216
  The maximum number of tokens to generate.
216
217
  """
217
218
 
218
- def model_post_init(self, __context: Any) -> None:
219
- """Initialize the LLM model with API key and endpoint.
220
-
221
- Args:
222
- __context (Any): The context passed during model initialization.
223
- """
224
- litellm.api_key = self.llm_api_key.get_secret_value() if self.llm_api_key else configs.llm.api_key
225
- litellm.api_base = self.llm_api_endpoint.unicode_string() if self.llm_api_endpoint else configs.llm.api_endpoint
226
-
227
219
  async def aquery(
228
220
  self,
229
221
  messages: List[Dict[str, str]],
230
222
  model: str | None = None,
231
223
  temperature: NonNegativeFloat | None = None,
232
- stop: str | None = None,
224
+ stop: str | List[str] | None = None,
233
225
  top_p: NonNegativeFloat | None = None,
234
226
  max_tokens: PositiveInt | None = None,
235
227
  n: PositiveInt | None = None,
@@ -266,6 +258,10 @@ class LLMUsage(Base):
266
258
  stream=stream or self.llm_stream or configs.llm.stream,
267
259
  timeout=timeout or self.llm_timeout or configs.llm.timeout,
268
260
  max_retries=max_retries or self.llm_max_retries or configs.llm.max_retries,
261
+ api_key=self.llm_api_key.get_secret_value() if self.llm_api_key else configs.llm.api_key.get_secret_value(),
262
+ base_url=self.llm_api_endpoint.unicode_string()
263
+ if self.llm_api_endpoint
264
+ else configs.llm.api_endpoint.unicode_string(),
269
265
  )
270
266
 
271
267
  async def ainvoke(
@@ -274,7 +270,7 @@ class LLMUsage(Base):
274
270
  system_message: str = "",
275
271
  model: str | None = None,
276
272
  temperature: NonNegativeFloat | None = None,
277
- stop: str | None = None,
273
+ stop: str | List[str] | None = None,
278
274
  top_p: NonNegativeFloat | None = None,
279
275
  max_tokens: PositiveInt | None = None,
280
276
  n: PositiveInt | None = None,
@@ -321,7 +317,7 @@ class LLMUsage(Base):
321
317
  system_message: str = "",
322
318
  model: str | None = None,
323
319
  temperature: NonNegativeFloat | None = None,
324
- stop: str | None = None,
320
+ stop: str | List[str] | None = None,
325
321
  top_p: NonNegativeFloat | None = None,
326
322
  max_tokens: PositiveInt | None = None,
327
323
  stream: bool | None = None,
@@ -365,6 +361,61 @@ class LLMUsage(Base):
365
361
  .message.content
366
362
  )
367
363
 
364
+ async def aask_validate[T](
365
+ self,
366
+ question: str,
367
+ validator: Callable[[str], T | None],
368
+ max_validations: PositiveInt = 2,
369
+ system_message: str = "",
370
+ model: str | None = None,
371
+ temperature: NonNegativeFloat | None = None,
372
+ stop: str | List[str] | None = None,
373
+ top_p: NonNegativeFloat | None = None,
374
+ max_tokens: PositiveInt | None = None,
375
+ stream: bool | None = None,
376
+ timeout: PositiveInt | None = None,
377
+ max_retries: PositiveInt | None = None,
378
+ ) -> T:
379
+ """Asynchronously ask a question and validate the response using a given validator.
380
+
381
+ Args:
382
+ question (str): The question to ask.
383
+ validator (Callable[[str], T | None]): A function to validate the response.
384
+ max_validations (PositiveInt): Maximum number of validation attempts.
385
+ system_message (str): System message to include in the request.
386
+ model (str | None): The model to use for the request.
387
+ temperature (NonNegativeFloat | None): Temperature setting for the request.
388
+ stop (str | None): Stop sequence for the request.
389
+ top_p (NonNegativeFloat | None): Top-p sampling parameter.
390
+ max_tokens (PositiveInt | None): Maximum number of tokens in the response.
391
+ stream (bool | None): Whether to stream the response.
392
+ timeout (PositiveInt | None): Timeout for the request.
393
+ max_retries (PositiveInt | None): Maximum number of retries for the request.
394
+
395
+ Returns:
396
+ T: The validated response.
397
+
398
+ Raises:
399
+ ValueError: If the response fails to validate after the maximum number of attempts.
400
+ """
401
+ for _ in range(max_validations):
402
+ if (
403
+ response := await self.aask(
404
+ question,
405
+ system_message,
406
+ model,
407
+ temperature,
408
+ stop,
409
+ top_p,
410
+ max_tokens,
411
+ stream,
412
+ timeout,
413
+ max_retries,
414
+ )
415
+ ) and (validated := validator(response)):
416
+ return validated
417
+ raise ValueError("Failed to validate the response.")
418
+
368
419
  def fallback_to(self, other: "LLMUsage") -> Self:
369
420
  """Fallback to another instance's attribute values if the current instance's attributes are None.
370
421
 
@@ -397,3 +448,19 @@ class LLMUsage(Base):
397
448
 
398
449
  # Return the current instance to allow for method chaining
399
450
  return self
451
+
452
+
453
+ class WithJsonExample(Base):
454
+ """Class that provides a JSON schema for the model."""
455
+
456
+ @classmethod
457
+ def json_example(cls) -> str:
458
+ """Return a JSON example for the model.
459
+
460
+ Returns:
461
+ str: A JSON example for the model.
462
+ """
463
+ return orjson.dumps(
464
+ {field_name: field_info.description for field_name, field_info in cls.model_fields.items()},
465
+ option=orjson.OPT_INDENT_2 | orjson.OPT_SORT_KEYS,
466
+ ).decode()
fabricatio/models/role.py CHANGED
@@ -1,6 +1,6 @@
1
1
  from typing import Any
2
2
 
3
- from pydantic import Field
3
+ from pydantic import Field, ValidationError
4
4
 
5
5
  from fabricatio.core import env
6
6
  from fabricatio.journal import logger
@@ -8,6 +8,7 @@ from fabricatio.models.action import WorkFlow
8
8
  from fabricatio.models.events import Event
9
9
  from fabricatio.models.generic import LLMUsage, Memorable, WithBriefing, WithToDo
10
10
  from fabricatio.models.task import Task
11
+ from fabricatio.parser import JsonCapture
11
12
 
12
13
 
13
14
  class Role(Memorable, WithBriefing, WithToDo, LLMUsage):
@@ -26,4 +27,24 @@ class Role(Memorable, WithBriefing, WithToDo, LLMUsage):
26
27
  env.on(event, workflow.serve)
27
28
 
28
29
  async def propose(self, prompt: str) -> Task:
29
- """Propose a task to the role."""
30
+ """Propose a task based on the provided prompt."""
31
+ assert prompt, "Prompt must be provided."
32
+
33
+ def _validate_json(response: str) -> None | Task:
34
+ try:
35
+ cap = JsonCapture.capture(response)
36
+ logger.debug(f"Response: \n{response}")
37
+ logger.info(f"Captured JSON: \n{cap[0]}")
38
+ return Task.model_validate_json(cap[0] if cap else response)
39
+ except ValidationError as e:
40
+ logger.error(f"Failed to parse task from JSON: {e}")
41
+ return None
42
+
43
+ return await self.aask_validate(
44
+ f"{prompt} \n\nBased on requirement above, "
45
+ f"you need to construct a task to satisfy that requirement in JSON format "
46
+ f"written like this: \n\n```json\n{Task.json_example()}\n```\n\n"
47
+ f"No extra explanation needed. ",
48
+ _validate_json,
49
+ system_message=f"# your personal briefing: \n{self.briefing}",
50
+ )
fabricatio/models/task.py CHANGED
@@ -1,4 +1,4 @@
1
- """This module defines the Task class, which represents a task with a status and output.
1
+ """This module defines the `Task` class, which represents a task with a status and output.
2
2
 
3
3
  It includes methods to manage the task's lifecycle, such as starting, finishing, cancelling, and failing the task.
4
4
  """
@@ -12,11 +12,20 @@ from pydantic import Field, PrivateAttr
12
12
  from fabricatio.config import configs
13
13
  from fabricatio.core import env
14
14
  from fabricatio.journal import logger
15
- from fabricatio.models.generic import WithBriefing
15
+ from fabricatio.models.events import Event
16
+ from fabricatio.models.generic import WithBriefing, WithJsonExample
16
17
 
17
18
 
18
19
  class TaskStatus(Enum):
19
- """Enum that represents the status of a task."""
20
+ """An enumeration representing the status of a task.
21
+
22
+ Attributes:
23
+ Pending: The task is pending.
24
+ Running: The task is currently running.
25
+ Finished: The task has been successfully completed.
26
+ Failed: The task has failed.
27
+ Cancelled: The task has been cancelled.
28
+ """
20
29
 
21
30
  Pending = "pending"
22
31
  Running = "running"
@@ -25,27 +34,30 @@ class TaskStatus(Enum):
25
34
  Cancelled = "cancelled"
26
35
 
27
36
 
28
- class Task[T](WithBriefing):
29
- """Class that represents a task with a status and output.
37
+ class Task[T](WithBriefing, WithJsonExample):
38
+ """A class representing a task with a status and output.
30
39
 
31
40
  Attributes:
32
41
  name (str): The name of the task.
33
42
  description (str): The description of the task.
34
- _output (Queue): The output queue of the task.
35
- status (TaskStatus): The status of the task.
36
43
  goal (str): The goal of the task.
37
44
  """
38
45
 
39
46
  name: str = Field(...)
40
47
  """The name of the task."""
48
+
41
49
  description: str = Field(default="")
42
50
  """The description of the task."""
43
- _output: Queue = PrivateAttr(default_factory=lambda: Queue(maxsize=1))
44
- status: TaskStatus = Field(default=TaskStatus.Pending)
45
- """The status of the task."""
51
+
46
52
  goal: str = Field(default="")
47
53
  """The goal of the task."""
48
54
 
55
+ _output: Queue = PrivateAttr(default_factory=lambda: Queue(maxsize=1))
56
+ """The output queue of the task."""
57
+
58
+ _status: TaskStatus = PrivateAttr(default=TaskStatus.Pending)
59
+ """The status of the task."""
60
+
49
61
  @classmethod
50
62
  def simple_task(cls, name: str, goal: str, description: str) -> Self:
51
63
  """Create a simple task with a name, goal, and description.
@@ -56,7 +68,7 @@ class Task[T](WithBriefing):
56
68
  description (str): The description of the task.
57
69
 
58
70
  Returns:
59
- Self: A new instance of the Task class.
71
+ Task: A new instance of the `Task` class.
60
72
  """
61
73
  return cls(name=name, goal=goal, description=description)
62
74
 
@@ -64,11 +76,11 @@ class Task[T](WithBriefing):
64
76
  """Update the goal and description of the task.
65
77
 
66
78
  Args:
67
- goal (Optional[str]): The new goal of the task.
68
- description (Optional[str]): The new description of the task.
79
+ goal (str, optional): The new goal of the task.
80
+ description (str, optional): The new description of the task.
69
81
 
70
82
  Returns:
71
- Self: The updated instance of the Task class.
83
+ Task: The updated instance of the `Task` class.
72
84
  """
73
85
  if goal:
74
86
  self.goal = goal
@@ -148,10 +160,10 @@ class Task[T](WithBriefing):
148
160
  output (T): The output of the task.
149
161
 
150
162
  Returns:
151
- Self: The finished instance of the Task class.
163
+ Task: The finished instance of the `Task` class.
152
164
  """
153
165
  logger.info(f"Finishing task {self.name}")
154
- self.status = TaskStatus.Finished
166
+ self._status = TaskStatus.Finished
155
167
  await self._output.put(output)
156
168
  logger.debug(f"Output set for task {self.name}")
157
169
  await env.emit_async(self.finished_label, self)
@@ -162,10 +174,10 @@ class Task[T](WithBriefing):
162
174
  """Mark the task as running.
163
175
 
164
176
  Returns:
165
- Self: The running instance of the Task class.
177
+ Task: The running instance of the `Task` class.
166
178
  """
167
179
  logger.info(f"Starting task {self.name}")
168
- self.status = TaskStatus.Running
180
+ self._status = TaskStatus.Running
169
181
  await env.emit_async(self.running_label, self)
170
182
  return self
171
183
 
@@ -173,9 +185,9 @@ class Task[T](WithBriefing):
173
185
  """Mark the task as cancelled.
174
186
 
175
187
  Returns:
176
- Self: The cancelled instance of the Task class.
188
+ Task: The cancelled instance of the `Task` class.
177
189
  """
178
- self.status = TaskStatus.Cancelled
190
+ self._status = TaskStatus.Cancelled
179
191
  await env.emit_async(self.cancelled_label, self)
180
192
  return self
181
193
 
@@ -183,31 +195,43 @@ class Task[T](WithBriefing):
183
195
  """Mark the task as failed.
184
196
 
185
197
  Returns:
186
- Self: The failed instance of the Task class.
198
+ Task: The failed instance of the `Task` class.
187
199
  """
188
200
  logger.error(f"Task {self.name} failed")
189
- self.status = TaskStatus.Failed
201
+ self._status = TaskStatus.Failed
190
202
  await env.emit_async(self.failed_label, self)
191
203
  return self
192
204
 
193
- async def publish(self) -> Self:
194
- """Publish the task to the environment.
205
+ async def publish(self, event_namespace: Event | str = "") -> Self:
206
+ """Publish the task with an optional event namespace.
207
+
208
+ Args:
209
+ event_namespace (Event | str, optional): The event namespace to use. Defaults to an empty string.
195
210
 
196
211
  Returns:
197
- Self: The published instance of the Task class.
212
+ Task: The published instance of the `Task` class.
198
213
  """
214
+ if isinstance(event_namespace, str):
215
+ event_namespace = Event.from_string(event_namespace)
216
+
199
217
  logger.info(f"Publishing task {self.name}")
200
- await env.emit_async(self.pending_label, self)
218
+ await env.emit_async(event_namespace.concat(self.pending_label).collapse(), self)
201
219
  return self
202
220
 
203
- async def delegate(self) -> T:
204
- """Delegate the task to the environment.
221
+ async def delegate(self, event_namespace: Event | str = "") -> T:
222
+ """Delegate the task with an optional event namespace and return the output.
223
+
224
+ Args:
225
+ event_namespace (Event | str, optional): The event namespace to use. Defaults to an empty string.
205
226
 
206
227
  Returns:
207
228
  T: The output of the task.
208
229
  """
230
+ if isinstance(event_namespace, str):
231
+ event_namespace = Event.from_string(event_namespace)
232
+
209
233
  logger.info(f"Delegating task {self.name}")
210
- await env.emit_async(self.pending_label, self)
234
+ await env.emit_async(event_namespace.concat(self.pending_label).collapse(), self)
211
235
  return await self.get_output()
212
236
 
213
237
  @property
fabricatio/parser.py CHANGED
@@ -1,6 +1,7 @@
1
1
  from typing import Any, Self, Tuple
2
2
 
3
- from pydantic import Field, PrivateAttr
3
+ import regex
4
+ from pydantic import Field, PositiveInt, PrivateAttr
4
5
  from regex import Pattern, compile
5
6
 
6
7
  from fabricatio.models.generic import Base
@@ -18,6 +19,8 @@ class Capture(Base):
18
19
  """The target groups to capture from the pattern."""
19
20
  pattern: str = Field(frozen=True)
20
21
  """The regular expression pattern to search for."""
22
+ flags: PositiveInt = Field(default=regex.DOTALL | regex.MULTILINE | regex.IGNORECASE, frozen=True)
23
+ """The flags to use when compiling the regular expression pattern."""
21
24
  _compiled: Pattern = PrivateAttr()
22
25
 
23
26
  def model_post_init(self, __context: Any) -> None:
@@ -26,7 +29,7 @@ class Capture(Base):
26
29
  Args:
27
30
  __context (Any): The context in which the model is initialized.
28
31
  """
29
- self._compiled = compile(self.pattern)
32
+ self._compiled = compile(self.pattern, self.flags)
30
33
 
31
34
  def capture(self, text: str) -> Tuple[str, ...] | None:
32
35
  """Capture the first occurrence of the pattern in the given text.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: fabricatio
3
- Version: 0.1.1
3
+ Version: 0.1.3
4
4
  Summary: A LLM multi-agent framework.
5
5
  Author-email: Whth <zettainspector@foxmail.com>
6
6
  License: MIT License
@@ -40,6 +40,7 @@ Requires-Dist: asyncio>=3.4.3
40
40
  Requires-Dist: gitpython>=3.1.44
41
41
  Requires-Dist: litellm>=1.60.0
42
42
  Requires-Dist: loguru>=0.7.3
43
+ Requires-Dist: orjson>=3.10.15
43
44
  Requires-Dist: pydantic-settings>=2.7.1
44
45
  Requires-Dist: pydantic>=2.10.6
45
46
  Requires-Dist: pymitter>=1.0.0
@@ -0,0 +1,21 @@
1
+ fabricatio/__init__.py,sha256=nFPtohqceECRYzU-WlVT6o4oSaKN0vGok-w9JIaiJfs,644
2
+ fabricatio/config.py,sha256=EOlVkuEBAHESAlrGtolGwEG2YrTaJPhEGPKS7QDxrx0,6995
3
+ fabricatio/core.py,sha256=B6KBIfBRF023HF0UUaUprEkQd6sT7G_pexGXQ9btJnE,5788
4
+ fabricatio/journal.py,sha256=CW9HePtgTiboOyPTExq9GjG5BseZcbc-S6lxDXrpmv0,667
5
+ fabricatio/parser.py,sha256=On_YUCvOuA0FA_NtDVNJqKp7KEO_sUE89oO_WnkEhQ4,2314
6
+ fabricatio/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
7
+ fabricatio/actions/__init__.py,sha256=n3lwq9FPNtvfLu2L1pX4UkwiPITU7luk-b4aMJyjIC8,109
8
+ fabricatio/actions/transmission.py,sha256=Azog4ItVk7aASdYzTwzyckzYG2hDFSXctnA7qp-Qlq0,502
9
+ fabricatio/models/action.py,sha256=M-12dc-nQiNJU6Y9j-dr4Ef3642vRvzHlzxekBepzaU,3358
10
+ fabricatio/models/events.py,sha256=PlavKOD94Q9Q9iPoQPkf9HJdznBtzuSUrPtUoVtkQxU,2143
11
+ fabricatio/models/generic.py,sha256=Sxpx0BO0t85YF5Lwks6F165N6TJsDe7xym28dQG5Mqs,17681
12
+ fabricatio/models/role.py,sha256=jdabuYRXwgvpYoNwvazygDiZHGGQApUIIKltniu78O8,2151
13
+ fabricatio/models/task.py,sha256=8IXT192t5EXVHAdHj45DJNdBhdA2xmh36S_vKyEw6y0,7757
14
+ fabricatio/models/tool.py,sha256=UkEp1Nzbl5wZX21q_Z2VkpiJmVDSdoGDzINQniO8hSY,3536
15
+ fabricatio/models/utils.py,sha256=2mgXla9_K3dnRrz6hIKzmltTYPmvDk0MBjjEBkCXTdg,2474
16
+ fabricatio/toolboxes/__init__.py,sha256=bjefmPd7wBaWhbZzdMPXvrjMTeRzlUh_Dev2PUAc124,158
17
+ fabricatio/toolboxes/task.py,sha256=xgyPetm2R_HlQwpzE8YPnBN7QOYLd0-T8E6QPZG1PPQ,204
18
+ fabricatio-0.1.3.dist-info/METADATA,sha256=pDZPikewIva_s6riDaxhlOmvd92ls_JoS_VqETPwDdM,3797
19
+ fabricatio-0.1.3.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
20
+ fabricatio-0.1.3.dist-info/licenses/LICENSE,sha256=do7J7EiCGbq0QPbMAL_FqLYufXpHnCnXBOuqVPwSV8Y,1088
21
+ fabricatio-0.1.3.dist-info/RECORD,,
@@ -1,19 +0,0 @@
1
- fabricatio/__init__.py,sha256=nFPtohqceECRYzU-WlVT6o4oSaKN0vGok-w9JIaiJfs,644
2
- fabricatio/config.py,sha256=Y-XPY23p6DrNk9YUMMpu1ECNON-hRUMUDePqeW6Qx0w,6954
3
- fabricatio/core.py,sha256=B6KBIfBRF023HF0UUaUprEkQd6sT7G_pexGXQ9btJnE,5788
4
- fabricatio/journal.py,sha256=CW9HePtgTiboOyPTExq9GjG5BseZcbc-S6lxDXrpmv0,667
5
- fabricatio/parser.py,sha256=Nxrfw-m0DVaI0wIE1b6LK67zXiYbQdXtv8ZqiZd3hbw,2096
6
- fabricatio/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
7
- fabricatio/models/action.py,sha256=M-12dc-nQiNJU6Y9j-dr4Ef3642vRvzHlzxekBepzaU,3358
8
- fabricatio/models/events.py,sha256=0p42QmNDzmC76DhMwW1H_Mlg15MQ_XjEqkCJc8UkIB8,2055
9
- fabricatio/models/generic.py,sha256=_1HKN3tdU-SI1uEnMZnAaUFH1_7IvEOfyE0I8JP9TX8,14929
10
- fabricatio/models/role.py,sha256=0P8Ys84cHNYk1uhPfkJdIOl8-GiqO5y3gAE2IZX_MbQ,1123
11
- fabricatio/models/task.py,sha256=1ZTMktQGuCe7LGWfNFQEL-7M4adoKA_esmZgVS23CnU,6772
12
- fabricatio/models/tool.py,sha256=UkEp1Nzbl5wZX21q_Z2VkpiJmVDSdoGDzINQniO8hSY,3536
13
- fabricatio/models/utils.py,sha256=2mgXla9_K3dnRrz6hIKzmltTYPmvDk0MBjjEBkCXTdg,2474
14
- fabricatio/toolboxes/__init__.py,sha256=bjefmPd7wBaWhbZzdMPXvrjMTeRzlUh_Dev2PUAc124,158
15
- fabricatio/toolboxes/task.py,sha256=xgyPetm2R_HlQwpzE8YPnBN7QOYLd0-T8E6QPZG1PPQ,204
16
- fabricatio-0.1.1.dist-info/METADATA,sha256=2y5f2Z9wK_sOyKJpLHO6-CBRXZq9Z4JeMKnL7LFafD8,3766
17
- fabricatio-0.1.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
18
- fabricatio-0.1.1.dist-info/licenses/LICENSE,sha256=do7J7EiCGbq0QPbMAL_FqLYufXpHnCnXBOuqVPwSV8Y,1088
19
- fabricatio-0.1.1.dist-info/RECORD,,