lionagi 0.0.316__py3-none-any.whl → 0.1.0__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
Files changed (103) hide show
  1. lionagi/core/__init__.py +19 -8
  2. lionagi/core/agent/__init__.py +0 -3
  3. lionagi/core/agent/base_agent.py +26 -30
  4. lionagi/core/branch/__init__.py +0 -4
  5. lionagi/core/branch/{base_branch.py → base.py} +13 -14
  6. lionagi/core/branch/branch.py +22 -20
  7. lionagi/core/branch/executable_branch.py +0 -347
  8. lionagi/core/branch/{branch_flow_mixin.py → flow_mixin.py} +6 -6
  9. lionagi/core/branch/util.py +1 -1
  10. lionagi/core/direct/__init__.py +10 -1
  11. lionagi/core/direct/cot.py +61 -26
  12. lionagi/core/direct/plan.py +10 -8
  13. lionagi/core/direct/predict.py +5 -5
  14. lionagi/core/direct/react.py +8 -8
  15. lionagi/core/direct/score.py +4 -4
  16. lionagi/core/direct/select.py +4 -4
  17. lionagi/core/direct/utils.py +7 -4
  18. lionagi/core/direct/vote.py +2 -2
  19. lionagi/core/execute/base_executor.py +50 -0
  20. lionagi/core/execute/branch_executor.py +233 -0
  21. lionagi/core/execute/instruction_map_executor.py +131 -0
  22. lionagi/core/execute/structure_executor.py +218 -0
  23. lionagi/core/flow/monoflow/ReAct.py +4 -4
  24. lionagi/core/flow/monoflow/chat.py +6 -6
  25. lionagi/core/flow/monoflow/chat_mixin.py +24 -34
  26. lionagi/core/flow/monoflow/followup.py +4 -4
  27. lionagi/core/flow/polyflow/__init__.py +1 -1
  28. lionagi/core/flow/polyflow/chat.py +15 -12
  29. lionagi/core/{prompt/action_template.py → form/action_form.py} +2 -2
  30. lionagi/core/{prompt → form}/field_validator.py +40 -31
  31. lionagi/core/form/form.py +302 -0
  32. lionagi/core/form/mixin.py +214 -0
  33. lionagi/core/{prompt/scored_template.py → form/scored_form.py} +2 -2
  34. lionagi/core/generic/__init__.py +37 -0
  35. lionagi/core/generic/action.py +26 -0
  36. lionagi/core/generic/component.py +457 -0
  37. lionagi/core/generic/condition.py +44 -0
  38. lionagi/core/generic/data_logger.py +305 -0
  39. lionagi/core/generic/edge.py +110 -0
  40. lionagi/core/generic/mail.py +90 -0
  41. lionagi/core/generic/mailbox.py +36 -0
  42. lionagi/core/generic/node.py +285 -0
  43. lionagi/core/generic/relation.py +70 -0
  44. lionagi/core/generic/signal.py +22 -0
  45. lionagi/core/generic/structure.py +362 -0
  46. lionagi/core/generic/transfer.py +20 -0
  47. lionagi/core/generic/work.py +40 -0
  48. lionagi/core/graph/graph.py +126 -0
  49. lionagi/core/graph/tree.py +190 -0
  50. lionagi/core/mail/__init__.py +0 -8
  51. lionagi/core/mail/mail_manager.py +12 -10
  52. lionagi/core/mail/schema.py +9 -2
  53. lionagi/core/messages/__init__.py +0 -3
  54. lionagi/core/messages/schema.py +17 -225
  55. lionagi/core/session/__init__.py +0 -3
  56. lionagi/core/session/session.py +25 -23
  57. lionagi/core/tool/__init__.py +3 -1
  58. lionagi/core/tool/tool.py +28 -0
  59. lionagi/core/tool/tool_manager.py +75 -75
  60. lionagi/integrations/chunker/chunk.py +7 -7
  61. lionagi/integrations/config/oai_configs.py +4 -4
  62. lionagi/integrations/loader/load.py +6 -6
  63. lionagi/integrations/loader/load_util.py +8 -8
  64. lionagi/libs/ln_api.py +3 -3
  65. lionagi/libs/ln_parse.py +43 -6
  66. lionagi/libs/ln_validate.py +288 -0
  67. lionagi/libs/sys_util.py +28 -6
  68. lionagi/tests/libs/test_async.py +0 -0
  69. lionagi/tests/libs/test_field_validators.py +353 -0
  70. lionagi/tests/test_core/test_base_branch.py +0 -1
  71. lionagi/tests/test_core/test_branch.py +3 -0
  72. lionagi/tests/test_core/test_session_base_util.py +1 -0
  73. lionagi/version.py +1 -1
  74. {lionagi-0.0.316.dist-info → lionagi-0.1.0.dist-info}/METADATA +1 -1
  75. lionagi-0.1.0.dist-info/RECORD +136 -0
  76. lionagi/core/prompt/prompt_template.py +0 -312
  77. lionagi/core/schema/__init__.py +0 -22
  78. lionagi/core/schema/action_node.py +0 -29
  79. lionagi/core/schema/base_mixin.py +0 -296
  80. lionagi/core/schema/base_node.py +0 -199
  81. lionagi/core/schema/condition.py +0 -24
  82. lionagi/core/schema/data_logger.py +0 -354
  83. lionagi/core/schema/data_node.py +0 -93
  84. lionagi/core/schema/prompt_template.py +0 -67
  85. lionagi/core/schema/structure.py +0 -912
  86. lionagi/core/tool/manual.py +0 -1
  87. lionagi-0.0.316.dist-info/RECORD +0 -121
  88. /lionagi/core/{branch/base → execute}/__init__.py +0 -0
  89. /lionagi/core/flow/{base/baseflow.py → baseflow.py} +0 -0
  90. /lionagi/core/flow/{base/__init__.py → mono_chat_mixin.py} +0 -0
  91. /lionagi/core/{prompt → form}/__init__.py +0 -0
  92. /lionagi/{tests/test_integrations → core/graph}/__init__.py +0 -0
  93. /lionagi/tests/{test_libs → integrations}/__init__.py +0 -0
  94. /lionagi/tests/{test_libs/test_async.py → libs/__init__.py} +0 -0
  95. /lionagi/tests/{test_libs → libs}/test_api.py +0 -0
  96. /lionagi/tests/{test_libs → libs}/test_convert.py +0 -0
  97. /lionagi/tests/{test_libs → libs}/test_func_call.py +0 -0
  98. /lionagi/tests/{test_libs → libs}/test_nested.py +0 -0
  99. /lionagi/tests/{test_libs → libs}/test_parse.py +0 -0
  100. /lionagi/tests/{test_libs → libs}/test_sys_util.py +0 -0
  101. {lionagi-0.0.316.dist-info → lionagi-0.1.0.dist-info}/LICENSE +0 -0
  102. {lionagi-0.0.316.dist-info → lionagi-0.1.0.dist-info}/WHEEL +0 -0
  103. {lionagi-0.0.316.dist-info → lionagi-0.1.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,305 @@
1
+ import atexit
2
+ import contextlib
3
+ import logging
4
+ from collections import deque
5
+ from dataclasses import dataclass
6
+ from pathlib import Path
7
+ from typing import Any, Dict, List
8
+
9
+ from lionagi.libs import SysUtil, convert, nested
10
+
11
+
12
+ # TODO: there should be a global data logger, under setting
13
+
14
+
15
+ @dataclass
16
+ class DLog:
17
+ """
18
+ Defines a log entry structure for data processing operations.
19
+
20
+ This class encapsulates both the input to and output from a data processing operation,
21
+ along with an automatically generated timestamp indicating when the log entry was
22
+ created. It aims to standardize logging across applications for easier analysis
23
+ and debugging.
24
+
25
+ Attributes:
26
+ input_data: The data input to the operation. Can be of any type.
27
+ output_data: The data output by the operation. Can be of any type.
28
+ """
29
+
30
+ input_data: Any
31
+ output_data: Any
32
+
33
+ def serialize(self, *, flatten_: bool = True, sep: str = "[^_^]") -> dict[str, Any]:
34
+ """Serializes the DLog instance into a dictionary with an added timestamp.
35
+
36
+ Args:
37
+ flatten_ (bool): If True, flattens dictionary inputs for serialization.
38
+ sep (str): Separator used in flattening nested dictionaries.
39
+
40
+ Returns:
41
+ A dictionary representation of the DLog instance, including 'input_data',
42
+ 'output_data', and 'timestamp'.
43
+ """
44
+ log_dict = {}
45
+
46
+ def _process_data(data, field):
47
+ try:
48
+ data = convert.to_str(data)
49
+ if "{" not in data:
50
+ log_dict[field] = convert.to_str(data)
51
+
52
+ else:
53
+ with contextlib.suppress(Exception):
54
+ data = convert.to_dict(data)
55
+
56
+ if isinstance(self.input_data, dict) and flatten_:
57
+ log_dict[field] = convert.to_str(nested.flatten(data, sep=sep))
58
+
59
+ else:
60
+ log_dict[field] = convert.to_str(data)
61
+
62
+ except Exception as e:
63
+ log_dict[field] = data
64
+ logging.error(f"Error in processing {field} to str: {e}")
65
+
66
+ _process_data(self.input_data, "input_data")
67
+ _process_data(self.output_data, "output_data")
68
+
69
+ log_dict["timestamp"] = SysUtil.get_timestamp()
70
+
71
+ return log_dict
72
+
73
+ @classmethod
74
+ def deserialize(
75
+ cls,
76
+ *,
77
+ input_str: str,
78
+ output_str: str,
79
+ unflatten_: bool = True,
80
+ sep: str = "[^_^]",
81
+ ) -> "DLog":
82
+ """Deserializes log entries from string representations of input and output data.
83
+
84
+ This method reconstructs a DLog instance from serialized string data, optionally
85
+ unflattening nested dictionary structures if they were flattened during the
86
+ serialization process. The method is particularly useful for reading logs from
87
+ storage formats like JSON or CSV where data is represented as strings.
88
+
89
+ Note:
90
+ The separator '[^_^]' is reserved and should not be used within dictionary
91
+ keys to ensure proper structure preservation during unflattening.
92
+
93
+ Args:
94
+ input_str: String representation of the input data.
95
+ output_str: String representation of the output data.
96
+ unflatten_ (bool): Indicates whether to unflatten the string data back into
97
+ nested dictionaries.
98
+ sep (str): Separator used if unflattening is performed.
99
+
100
+ Returns:
101
+ An instance of DLog reconstructed from the provided string data.
102
+
103
+ Raises:
104
+ ValueError: If deserialization or unflattening fails due to incorrect data
105
+ format or separator issues.
106
+ """
107
+
108
+ def _process_data(data):
109
+ if unflatten_:
110
+ try:
111
+ return nested.unflatten(convert.to_dict(data), sep=sep)
112
+ except:
113
+ return data
114
+ else:
115
+ return data
116
+
117
+ input_data = _process_data(input_str)
118
+ output_data = _process_data(output_str)
119
+
120
+ return cls(input_data=input_data, output_data=output_data)
121
+
122
+
123
+ class DataLogger:
124
+ """
125
+ Manages logging for data processing activities within an application.
126
+
127
+ This class handles the accumulation,
128
+ structuring, and persistence of log entries.
129
+ It supports exporting logs to disk in both CSV and JSON formats,
130
+ with features for
131
+ automatic log saving at program exit and customizable file naming.
132
+
133
+ Attributes:
134
+ persist_path: Path to the directory for saving log files.
135
+ log: Container for log entries.
136
+ filename: Base name for log files.
137
+ """
138
+
139
+ def __init__(
140
+ self,
141
+ persist_path: str | Path | None = None,
142
+ log: List[Dict] | None = None,
143
+ filename: str | None = None,
144
+ ) -> None:
145
+ """
146
+ Initializes the DataLogger with optional custom settings for log storage.
147
+
148
+ Args:
149
+ persist_path: The file system path for storing log files. Defaults to 'data/logs/'.
150
+ log: Initial log entries.
151
+ filename: Base name for exported log files.
152
+ """
153
+ self.persist_path = Path(persist_path) if persist_path else Path("data/logs/")
154
+ self.log = deque(log) if log else deque()
155
+ self.filename = filename or "log"
156
+ atexit.register(self.save_at_exit)
157
+
158
+ def extend(self, logs) -> None:
159
+ """
160
+ Extends the log deque with multiple log entries.
161
+
162
+ This method allows for bulk addition of log entries, which can be useful for
163
+ importing logs from external sources or consolidating logs from different parts
164
+ of an application.
165
+
166
+ Args:
167
+ logs: A list of log entries, each as a dictionary conforming to the log
168
+ structure (e.g., containing 'input_data', 'output_data', etc.).
169
+ """
170
+ if len(logs) > 0:
171
+ log1 = convert.to_list(self.log)
172
+ log1.extend(convert.to_list(logs))
173
+ self.log = deque(log1)
174
+
175
+ def append(self, *, input_data: Any, output_data: Any) -> None:
176
+ """
177
+ Appends a new log entry from provided input and output data.
178
+
179
+ Args:
180
+ input_data: Input data to the operation.
181
+ output_data: Output data from the operation.
182
+ """
183
+ log_entry = DLog(input_data=input_data, output_data=output_data)
184
+ self.log.append(log_entry)
185
+
186
+ def to_csv_file(
187
+ self,
188
+ filename: str = "log.csv",
189
+ *,
190
+ dir_exist_ok: bool = True,
191
+ timestamp: bool = True,
192
+ time_prefix: bool = False,
193
+ verbose: bool = True,
194
+ clear: bool = True,
195
+ flatten_=True,
196
+ sep="[^_^]",
197
+ index=False,
198
+ random_hash_digits=3,
199
+ **kwargs,
200
+ ) -> None:
201
+ """Exports log entries to a CSV file with customizable options.
202
+
203
+ Args:
204
+ filename: Filename for the exported CSV. Defaults to 'log.csv'.
205
+ dir_exist_ok: If True, allows writing to an existing directory.
206
+ timestamp: If True, appends a timestamp to the filename.
207
+ time_prefix: If True, places the timestamp prefix before the filename.
208
+ verbose: If True, prints a message upon successful save.
209
+ clear: If True, clears the log deque after saving.
210
+ flatten_: If True, flattens dictionary data for serialization.
211
+ sep: Separator for flattening nested dictionaries.
212
+ index: If True, includes an index column in the CSV.
213
+ **kwargs: Additional arguments for DataFrame.to_csv().
214
+ """
215
+
216
+ if not filename.endswith(".csv"):
217
+ filename += ".csv"
218
+
219
+ filepath = SysUtil.create_path(
220
+ self.persist_path,
221
+ filename,
222
+ timestamp=timestamp,
223
+ dir_exist_ok=dir_exist_ok,
224
+ time_prefix=time_prefix,
225
+ random_hash_digits=random_hash_digits,
226
+ )
227
+ try:
228
+ logs = [log.serialize(flatten_=flatten_, sep=sep) for log in self.log]
229
+ df = convert.to_df(convert.to_list(logs, flatten=True))
230
+ df.to_csv(filepath, index=index, **kwargs)
231
+ if verbose:
232
+ print(f"{len(self.log)} logs saved to {filepath}")
233
+ if clear:
234
+ self.log.clear()
235
+ except Exception as e:
236
+ raise ValueError(f"Error in saving to csv: {e}") from e
237
+
238
+ def to_json_file(
239
+ self,
240
+ filename: str = "log.json",
241
+ *,
242
+ dir_exist_ok: bool = True,
243
+ timestamp: bool = True,
244
+ time_prefix: bool = False,
245
+ verbose: bool = True,
246
+ clear: bool = True,
247
+ flatten_=True,
248
+ sep="[^_^]",
249
+ index=False,
250
+ random_hash_digits=3,
251
+ **kwargs,
252
+ ) -> None:
253
+ """Exports log entries to a JSON file with customizable options.
254
+
255
+ Args:
256
+ filename: Filename for the exported JSON. Defaults to 'log.json'.
257
+ dir_exist_ok: If True, allows writing to an existing directory.
258
+ timestamp: If True, appends a timestamp to the filename.
259
+ time_prefix: If True, places the timestamp prefix before the filename.
260
+ verbose: If True, prints a message upon successful save.
261
+ clear: If True, clears the log deque after saving.
262
+ flatten_: If True, flattens dictionary data for serialization.
263
+ sep: Separator for flattening nested dictionaries.
264
+ index: If True, includes an index in the JSON.
265
+ **kwargs: Additional arguments for DataFrame.to_json().
266
+ """
267
+ if not filename.endswith(".json"):
268
+ filename += ".json"
269
+
270
+ filepath = SysUtil.create_path(
271
+ self.persist_path,
272
+ filename,
273
+ timestamp=timestamp,
274
+ dir_exist_ok=dir_exist_ok,
275
+ time_prefix=time_prefix,
276
+ random_hash_digits=random_hash_digits,
277
+ )
278
+
279
+ try:
280
+ logs = [log.serialize(flatten_=flatten_, sep=sep) for log in self.log]
281
+ df = convert.to_df(convert.to_list(logs, flatten=True))
282
+ df.to_json(filepath, index=index, **kwargs)
283
+ if verbose:
284
+ print(f"{len(self.log)} logs saved to {filepath}")
285
+ if clear:
286
+ self.log.clear()
287
+ except Exception as e:
288
+ raise ValueError(f"Error in saving to csv: {e}") from e
289
+
290
+ def save_at_exit(self):
291
+ """
292
+ Registers an at-exit handler to ensure that any unsaved logs are automatically
293
+ persisted to a file upon program termination. This safeguard helps prevent the
294
+ loss of log data due to unexpected shutdowns or program exits.
295
+
296
+ The method is configured to save the logs to a CSV file, named
297
+ 'unsaved_logs.csv', which is stored in the designated persisting directory. This
298
+ automatic save operation is triggered only if there are unsaved logs present at
299
+ the time of program exit.
300
+
301
+ Note: This method does not clear the logs after saving, allowing for the
302
+ possibility of manual.py review or recovery after the program has terminated.
303
+ """
304
+ if self.log:
305
+ self.to_csv_file("unsaved_logs.csv", clear=False)
@@ -0,0 +1,110 @@
1
+ """
2
+ Module for representing conditions and edges between nodes in a graph structure.
3
+
4
+ This module provides the base for creating and managing edges that connect nodes
5
+ within a graph. It includes support for conditional edges, allowing the dynamic
6
+ evaluation of connections based on custom logic.
7
+ """
8
+
9
+ from typing import Any
10
+ from pydantic import Field, field_validator
11
+ from lionagi.core.generic.component import BaseComponent, BaseNode
12
+ from lionagi.core.generic.condition import Condition
13
+
14
+
15
+ class Edge(BaseComponent):
16
+ """
17
+ Represents an edge between two nodes, potentially with a condition.
18
+
19
+ Attributes:
20
+ head (str): The identifier of the head node of the edge.
21
+ tail (str): The identifier of the tail node of the edge.
22
+ condition (Optional[Condition]): An optional condition that must be met
23
+ for the edge to be considered active.
24
+ label (Optional[str]): An optional label for the edge.
25
+
26
+ Methods:
27
+ check_condition: Evaluates if the condition associated with the edge is met.
28
+ """
29
+
30
+ head: str = Field(
31
+ title="Head",
32
+ description="The identifier of the head node of the edge.",
33
+ )
34
+ tail: str = Field(
35
+ title="Tail",
36
+ description="The identifier of the tail node of the edge.",
37
+ )
38
+ condition: Condition | None = Field(
39
+ default=None,
40
+ description="An optional condition that must be met for the edge to be considered active.",
41
+ )
42
+ label: str | None = Field(
43
+ default=None,
44
+ description="An optional label for the edge.",
45
+ )
46
+ bundle: bool = Field(
47
+ default=False,
48
+ description="A flag indicating if the edge is bundled.",
49
+ )
50
+
51
+ @field_validator("head", "tail", mode="before")
52
+ def _validate_head_tail(cls, value):
53
+ """
54
+ Validates the head and tail fields to ensure they are valid node identifiers.
55
+
56
+ Args:
57
+ value: The value of the field being validated.
58
+ values: A dictionary of all other values on the model.
59
+ field: The model field being validated.
60
+
61
+ Returns:
62
+ The validated value, ensuring it is a valid identifier.
63
+
64
+ Raises:
65
+ ValueError: If the validation fails.
66
+ """
67
+ if isinstance(value, BaseNode):
68
+ return value.id_
69
+ return value
70
+
71
+ def check_condition(self, obj: dict[str, Any]) -> bool:
72
+ """
73
+ Evaluates if the condition associated with the edge is met.
74
+
75
+ Args:
76
+ obj (dict[str, Any]): The context object used for condition evaluation.
77
+
78
+ Returns:
79
+ bool: True if the condition is met, False otherwise.
80
+
81
+ Raises:
82
+ ValueError: If the condition is not set.
83
+ """
84
+ if not self.condition:
85
+ raise ValueError("The condition for the edge is not set.")
86
+ return self.condition(obj)
87
+
88
+ def __str__(self) -> str:
89
+ """
90
+ Returns a simple string representation of the Relationship.
91
+ """
92
+
93
+ return (
94
+ f"Edge (id_={self.id_}, from={self.head}, to={self.tail}, "
95
+ f"label={self.label})"
96
+ )
97
+
98
+ def __repr__(self) -> str:
99
+ """
100
+ Returns a detailed string representation of the Relationship.
101
+
102
+ Examples:
103
+ >>> edge = Relationship(source_node_id="node1", target_node_id="node2")
104
+ >>> repr(edge)
105
+ 'Relationship(id_=None, from=node1, to=node2, content=None, metadata=None, label=None)'
106
+ """
107
+ return (
108
+ f"Edge(id_={self.id_}, from={self.head}, to={self.tail}, "
109
+ f"label={self.label})"
110
+ )
@@ -0,0 +1,90 @@
1
+ """
2
+ This module defines classes for representing mail packages and mailboxes
3
+ in a messaging system.
4
+
5
+ The module includes the following classes:
6
+ - MailPackageCategory: An enumeration of categories for mail packages.
7
+ - Mail: Represents a mail message sent from one component to another.
8
+ - MailBox: Represents a mailbox that stores pending incoming and outgoing mails.
9
+ """
10
+
11
+ from typing import Any
12
+ from enum import Enum
13
+
14
+ from pydantic import Field, field_validator
15
+
16
+ from lionagi.core.generic.component import BaseComponent
17
+
18
+
19
+ class MailPackageCategory(str, Enum):
20
+ """
21
+ Defines categories for mail packages in a messaging system.
22
+
23
+ Attributes:
24
+ MESSAGES: Represents general messages.
25
+ TOOL: Represents tools.
26
+ SERVICE: Represents services.
27
+ MODEL: Represents models.
28
+ NODE: Represents nodes.
29
+ NODE_LIST: Represents a list of nodes.
30
+ NODE_ID: Represents a node's ID.
31
+ START: Represents a start signal or value.
32
+ END: Represents an end signal or value.
33
+ CONDITION: Represents a condition.
34
+ """
35
+
36
+ MESSAGES = "messages"
37
+ TOOL = "tool"
38
+ SERVICE = "service"
39
+ MODEL = "model"
40
+ NODE = "node"
41
+ NODE_LIST = "node_list"
42
+ NODE_ID = "node_id"
43
+ START = "start"
44
+ END = "end"
45
+ CONDITION = "condition"
46
+
47
+
48
+ class Package(BaseComponent):
49
+ category: MailPackageCategory = Field(
50
+ ..., title="Category", description="The category of the mail package."
51
+ )
52
+
53
+ package: Any = Field(
54
+ ..., title="Package", description="The package to send in the mail."
55
+ )
56
+
57
+
58
+ class Mail(BaseComponent):
59
+ """
60
+ Represents a mail message sent from one component to another within
61
+ the system.
62
+
63
+ Attributes:
64
+ sender (str): The ID of the sender node.
65
+ recipient (str): The ID of the recipient node.
66
+ category (MailPackageCategory): The category of the mail package.
67
+ package (Any): The content of the mail package.
68
+ """
69
+
70
+ sender: str = Field(
71
+ title="Sender",
72
+ description="The id of the sender node.",
73
+ )
74
+
75
+ recipient: str = Field(
76
+ title="Recipient",
77
+ description="The id of the recipient node.",
78
+ )
79
+
80
+ packages: dict[str, Package] = Field(
81
+ title="Packages",
82
+ default_factory=dict,
83
+ description="The packages to send in the mail.",
84
+ )
85
+
86
+ @field_validator("sender", "recipient", mode="before")
87
+ def _validate_sender_recipient(cls, value):
88
+ if isinstance(value, BaseComponent):
89
+ return value.id_
90
+ return value
@@ -0,0 +1,36 @@
1
+ from collections import deque
2
+ from pydantic import Field
3
+ from pydantic.dataclasses import dataclass
4
+
5
+ from lionagi.core.generic.mail import Mail
6
+
7
+
8
+ @dataclass
9
+ class MailBox:
10
+
11
+ pile: dict[str, Mail] = Field(
12
+ default_factory=dict, description="The pile of all mails - {mail_id: Mail}"
13
+ )
14
+
15
+ sequence_in: dict[str, deque] = Field(
16
+ default_factory=dict,
17
+ description="The sequence of all incoming mails - {sender_id: deque[mail_id]}",
18
+ )
19
+
20
+ sequence_out: deque = Field(
21
+ default_factory=deque,
22
+ description="The sequence of all outgoing mails - deque[mail_id]",
23
+ )
24
+
25
+ def __str__(self) -> str:
26
+ """
27
+ Returns a string representation of the MailBox instance.
28
+
29
+ Returns:
30
+ str: A string describing the number of pending incoming and
31
+ outgoing mails in the MailBox.
32
+ """
33
+ return (
34
+ f"MailBox with {len(self.receieving)} pending incoming mails and "
35
+ f"{len(self.sending)} pending outgoing mails."
36
+ )