lionagi 0.0.316__py3-none-any.whl → 0.1.1__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
Files changed (157) hide show
  1. lionagi/core/__init__.py +19 -8
  2. lionagi/core/agent/__init__.py +0 -3
  3. lionagi/core/agent/base_agent.py +25 -30
  4. lionagi/core/branch/__init__.py +0 -4
  5. lionagi/core/branch/{base_branch.py → base.py} +12 -13
  6. lionagi/core/branch/branch.py +22 -19
  7. lionagi/core/branch/executable_branch.py +0 -347
  8. lionagi/core/branch/{branch_flow_mixin.py → flow_mixin.py} +5 -5
  9. lionagi/core/direct/__init__.py +10 -1
  10. lionagi/core/direct/cot.py +61 -26
  11. lionagi/core/direct/plan.py +10 -8
  12. lionagi/core/direct/predict.py +5 -5
  13. lionagi/core/direct/react.py +8 -8
  14. lionagi/core/direct/score.py +4 -4
  15. lionagi/core/direct/select.py +4 -4
  16. lionagi/core/direct/utils.py +7 -4
  17. lionagi/core/direct/vote.py +2 -2
  18. lionagi/core/execute/base_executor.py +47 -0
  19. lionagi/core/execute/branch_executor.py +296 -0
  20. lionagi/core/execute/instruction_map_executor.py +179 -0
  21. lionagi/core/execute/neo4j_executor.py +381 -0
  22. lionagi/core/execute/structure_executor.py +314 -0
  23. lionagi/core/flow/monoflow/ReAct.py +20 -20
  24. lionagi/core/flow/monoflow/chat.py +6 -6
  25. lionagi/core/flow/monoflow/chat_mixin.py +23 -33
  26. lionagi/core/flow/monoflow/followup.py +14 -15
  27. lionagi/core/flow/polyflow/chat.py +15 -12
  28. lionagi/core/{prompt/action_template.py → form/action_form.py} +2 -2
  29. lionagi/core/{prompt → form}/field_validator.py +40 -31
  30. lionagi/core/form/form.py +302 -0
  31. lionagi/core/form/mixin.py +214 -0
  32. lionagi/core/{prompt/scored_template.py → form/scored_form.py} +2 -2
  33. lionagi/core/generic/__init__.py +37 -0
  34. lionagi/core/generic/action.py +26 -0
  35. lionagi/core/generic/component.py +455 -0
  36. lionagi/core/generic/condition.py +44 -0
  37. lionagi/core/generic/data_logger.py +305 -0
  38. lionagi/core/generic/edge.py +162 -0
  39. lionagi/core/generic/mail.py +90 -0
  40. lionagi/core/generic/mailbox.py +36 -0
  41. lionagi/core/generic/node.py +285 -0
  42. lionagi/core/generic/relation.py +70 -0
  43. lionagi/core/generic/signal.py +22 -0
  44. lionagi/core/generic/structure.py +362 -0
  45. lionagi/core/generic/transfer.py +20 -0
  46. lionagi/core/generic/work.py +40 -0
  47. lionagi/core/graph/graph.py +126 -0
  48. lionagi/core/graph/tree.py +190 -0
  49. lionagi/core/mail/__init__.py +0 -8
  50. lionagi/core/mail/mail_manager.py +15 -12
  51. lionagi/core/mail/schema.py +9 -2
  52. lionagi/core/messages/__init__.py +0 -3
  53. lionagi/core/messages/schema.py +17 -225
  54. lionagi/core/session/__init__.py +0 -3
  55. lionagi/core/session/session.py +24 -22
  56. lionagi/core/tool/__init__.py +3 -1
  57. lionagi/core/tool/tool.py +28 -0
  58. lionagi/core/tool/tool_manager.py +75 -75
  59. lionagi/experimental/directive/evaluator/__init__.py +0 -0
  60. lionagi/experimental/directive/evaluator/ast_evaluator.py +115 -0
  61. lionagi/experimental/directive/evaluator/base_evaluator.py +202 -0
  62. lionagi/experimental/directive/evaluator/sandbox_.py +14 -0
  63. lionagi/experimental/directive/evaluator/script_engine.py +83 -0
  64. lionagi/experimental/directive/parser/__init__.py +0 -0
  65. lionagi/experimental/directive/parser/base_parser.py +215 -0
  66. lionagi/experimental/directive/schema.py +36 -0
  67. lionagi/experimental/directive/template_/__init__.py +0 -0
  68. lionagi/experimental/directive/template_/base_template.py +63 -0
  69. lionagi/experimental/tool/__init__.py +0 -0
  70. lionagi/experimental/tool/function_calling.py +43 -0
  71. lionagi/experimental/tool/manual.py +66 -0
  72. lionagi/experimental/tool/schema.py +59 -0
  73. lionagi/experimental/tool/tool_manager.py +138 -0
  74. lionagi/experimental/tool/util.py +16 -0
  75. lionagi/experimental/work/__init__.py +0 -0
  76. lionagi/experimental/work/_logger.py +25 -0
  77. lionagi/experimental/work/exchange.py +0 -0
  78. lionagi/experimental/work/schema.py +30 -0
  79. lionagi/experimental/work/tests.py +72 -0
  80. lionagi/experimental/work/util.py +0 -0
  81. lionagi/experimental/work/work_function.py +89 -0
  82. lionagi/experimental/work/worker.py +12 -0
  83. lionagi/integrations/bridge/autogen_/__init__.py +0 -0
  84. lionagi/integrations/bridge/autogen_/autogen_.py +124 -0
  85. lionagi/integrations/bridge/llamaindex_/get_index.py +294 -0
  86. lionagi/integrations/bridge/llamaindex_/llama_pack.py +227 -0
  87. lionagi/integrations/bridge/transformers_/__init__.py +0 -0
  88. lionagi/integrations/bridge/transformers_/install_.py +36 -0
  89. lionagi/integrations/chunker/chunk.py +7 -7
  90. lionagi/integrations/config/oai_configs.py +5 -5
  91. lionagi/integrations/config/ollama_configs.py +1 -1
  92. lionagi/integrations/config/openrouter_configs.py +1 -1
  93. lionagi/integrations/loader/load.py +6 -6
  94. lionagi/integrations/loader/load_util.py +8 -8
  95. lionagi/integrations/storage/__init__.py +3 -0
  96. lionagi/integrations/storage/neo4j.py +673 -0
  97. lionagi/integrations/storage/storage_util.py +289 -0
  98. lionagi/integrations/storage/to_csv.py +63 -0
  99. lionagi/integrations/storage/to_excel.py +67 -0
  100. lionagi/libs/ln_api.py +3 -3
  101. lionagi/libs/ln_knowledge_graph.py +405 -0
  102. lionagi/libs/ln_parse.py +43 -6
  103. lionagi/libs/ln_queue.py +101 -0
  104. lionagi/libs/ln_tokenizer.py +57 -0
  105. lionagi/libs/ln_validate.py +288 -0
  106. lionagi/libs/sys_util.py +29 -7
  107. lionagi/lions/__init__.py +0 -0
  108. lionagi/lions/coder/__init__.py +0 -0
  109. lionagi/lions/coder/add_feature.py +20 -0
  110. lionagi/lions/coder/base_prompts.py +22 -0
  111. lionagi/lions/coder/coder.py +121 -0
  112. lionagi/lions/coder/util.py +91 -0
  113. lionagi/lions/researcher/__init__.py +0 -0
  114. lionagi/lions/researcher/data_source/__init__.py +0 -0
  115. lionagi/lions/researcher/data_source/finhub_.py +191 -0
  116. lionagi/lions/researcher/data_source/google_.py +199 -0
  117. lionagi/lions/researcher/data_source/wiki_.py +96 -0
  118. lionagi/lions/researcher/data_source/yfinance_.py +21 -0
  119. lionagi/tests/integrations/__init__.py +0 -0
  120. lionagi/tests/libs/__init__.py +0 -0
  121. lionagi/tests/libs/test_async.py +0 -0
  122. lionagi/tests/libs/test_field_validators.py +353 -0
  123. lionagi/tests/libs/test_queue.py +67 -0
  124. lionagi/tests/test_core/test_base_branch.py +0 -1
  125. lionagi/tests/test_core/test_branch.py +2 -0
  126. lionagi/tests/test_core/test_session_base_util.py +1 -0
  127. lionagi/version.py +1 -1
  128. {lionagi-0.0.316.dist-info → lionagi-0.1.1.dist-info}/METADATA +1 -1
  129. lionagi-0.1.1.dist-info/RECORD +190 -0
  130. lionagi/core/prompt/prompt_template.py +0 -312
  131. lionagi/core/schema/__init__.py +0 -22
  132. lionagi/core/schema/action_node.py +0 -29
  133. lionagi/core/schema/base_mixin.py +0 -296
  134. lionagi/core/schema/base_node.py +0 -199
  135. lionagi/core/schema/condition.py +0 -24
  136. lionagi/core/schema/data_logger.py +0 -354
  137. lionagi/core/schema/data_node.py +0 -93
  138. lionagi/core/schema/prompt_template.py +0 -67
  139. lionagi/core/schema/structure.py +0 -912
  140. lionagi/core/tool/manual.py +0 -1
  141. lionagi-0.0.316.dist-info/RECORD +0 -121
  142. /lionagi/core/{branch/base → execute}/__init__.py +0 -0
  143. /lionagi/core/flow/{base/baseflow.py → baseflow.py} +0 -0
  144. /lionagi/core/flow/{base/__init__.py → mono_chat_mixin.py} +0 -0
  145. /lionagi/core/{prompt → form}/__init__.py +0 -0
  146. /lionagi/{tests/test_integrations → core/graph}/__init__.py +0 -0
  147. /lionagi/{tests/test_libs → experimental}/__init__.py +0 -0
  148. /lionagi/{tests/test_libs/test_async.py → experimental/directive/__init__.py} +0 -0
  149. /lionagi/tests/{test_libs → libs}/test_api.py +0 -0
  150. /lionagi/tests/{test_libs → libs}/test_convert.py +0 -0
  151. /lionagi/tests/{test_libs → libs}/test_func_call.py +0 -0
  152. /lionagi/tests/{test_libs → libs}/test_nested.py +0 -0
  153. /lionagi/tests/{test_libs → libs}/test_parse.py +0 -0
  154. /lionagi/tests/{test_libs → libs}/test_sys_util.py +0 -0
  155. {lionagi-0.0.316.dist-info → lionagi-0.1.1.dist-info}/LICENSE +0 -0
  156. {lionagi-0.0.316.dist-info → lionagi-0.1.1.dist-info}/WHEEL +0 -0
  157. {lionagi-0.0.316.dist-info → lionagi-0.1.1.dist-info}/top_level.txt +0 -0
@@ -1,199 +0,0 @@
1
- """
2
- Module for base component model definition using Pydantic.
3
- """
4
-
5
- from abc import ABC
6
- from typing import Any, TypeVar
7
-
8
- from pydantic import Field, field_serializer, AliasChoices
9
- from lionagi.libs import SysUtil, convert
10
-
11
- from .base_mixin import BaseComponentMixin
12
-
13
- T = TypeVar("T", bound="BaseComponent")
14
-
15
-
16
- class BaseComponent(BaseComponentMixin, ABC):
17
- """
18
- A base component model that provides common attributes and utility methods for metadata management.
19
- It includes functionality to interact with metadata in various ways, such as retrieving, modifying,
20
- and validating metadata keys and values.
21
-
22
- Attributes:
23
- id_ (str): Unique identifier, defaulted using SysUtil.create_id.
24
- timestamp (str | None): Timestamp of creation or modification.
25
- metadata (dict[str, Any]): Metadata associated with the component.
26
- """
27
-
28
- id_: str = Field(default_factory=SysUtil.create_id, alias="node_id")
29
- timestamp: str | None = Field(default_factory=SysUtil.get_timestamp)
30
- metadata: dict[str, Any] = Field(default_factory=dict, alias="meta")
31
-
32
- class Config:
33
- """Model configuration settings."""
34
-
35
- extra = "allow"
36
- arbitrary_types_allowed = True
37
- populate_by_name = True
38
- validate_assignment = True
39
- validate_return = True
40
- str_strip_whitespace = True
41
-
42
- @classmethod
43
- def class_name(cls) -> str:
44
- """
45
- Retrieves the name of the class.
46
- """
47
- return cls.__name__
48
-
49
- @property
50
- def property_schema(self):
51
- return self.model_json_schema()["properties"]
52
-
53
- @property
54
- def property_keys(self):
55
- return list(self.model_json_schema()["properties"].keys())
56
-
57
- def copy(self, *args, **kwargs) -> T:
58
- """
59
- Creates a deep copy of the instance, with an option to update specific fields.
60
-
61
- Args:
62
- *args: Variable length argument list for additional options.
63
- **kwargs: Arbitrary keyword arguments specifying updates to the instance.
64
-
65
- Returns:
66
- BaseComponent: A new instance of BaseComponent as a deep copy of the original, with updates applied.
67
- """
68
- return self.model_copy(*args, **kwargs)
69
-
70
- def __repr__(self):
71
- return f"{self.__class__.__name__}({self.to_dict()})"
72
-
73
-
74
- class BaseNode(BaseComponent):
75
- """
76
- A base class for nodes, representing a fundamental unit in a graph or tree structure,
77
- extending BaseComponent with content handling capabilities.
78
-
79
- Attributes:
80
- content: The content of the node, which can be a string, a dictionary with any structure,
81
- None, or any other type. It is flexible to accommodate various types of content.
82
- This attribute also supports aliasing through validation_alias for compatibility with
83
- different naming conventions like "text", "page_content", or "chunk_content".
84
- """
85
-
86
- content: str | dict[str, Any] | None | Any = Field(
87
- default=None,
88
- validation_alias=AliasChoices("text", "page_content", "chunk_content"),
89
- )
90
-
91
- @property
92
- def content_str(self):
93
- """
94
- Attempts to serialize the node's content to a string.
95
-
96
- Returns:
97
- str: The serialized content string. If serialization fails, returns "null" and
98
- logs an error message indicating the content is not serializable.
99
- """
100
- try:
101
- return convert.to_str(self.content)
102
- except ValueError:
103
- print(
104
- f"Content is not serializable for Node: {self._id}, defaulting to 'null'"
105
- )
106
- return "null"
107
-
108
- def __str__(self):
109
- """
110
- Provides a string representation of the BaseNode instance, including a content preview,
111
- metadata preview, and optionally the timestamp if present.
112
-
113
- Returns:
114
- str: A string representation of the instance.
115
- """
116
- timestamp = f" ({self.timestamp})" if self.timestamp else ""
117
- if self.content:
118
- content_preview = (
119
- f"{self.content[:50]}..." if len(self.content) > 50 else self.content
120
- )
121
- else:
122
- content_preview = ""
123
- meta_preview = (
124
- f"{str(self.metadata)[:50]}..."
125
- if len(str(self.metadata)) > 50
126
- else str(self.metadata)
127
- )
128
- return (
129
- f"{self.class_name()}({self.id_}, {content_preview}, {meta_preview},"
130
- f"{timestamp})"
131
- )
132
-
133
-
134
- class BaseRelatableNode(BaseNode):
135
- """
136
- Extends BaseNode with functionality to manage relationships with other nodes.
137
-
138
- Attributes:
139
- related_nodes: A list of identifiers (str) for nodes that are related to this node.
140
- label: An optional label for the node, providing additional context or classification.
141
- """
142
-
143
- related_nodes: list[str] = Field(default_factory=list)
144
- label: str | None = None
145
-
146
- def add_related_node(self, node_id: str) -> bool:
147
- """
148
- Adds a node to the list of related nodes if it's not already present.
149
-
150
- Args:
151
- node_id: The identifier of the node to add.
152
-
153
- Returns:
154
- bool: True if the node was added, False if it was already in the list.
155
- """
156
- if node_id not in self.related_nodes:
157
- self.related_nodes.append(node_id)
158
- return True
159
- return False
160
-
161
- def remove_related_node(self, node_id: str) -> bool:
162
- """
163
- Removes a node from the list of related nodes if it's present.
164
-
165
- Args:
166
- node_id: The identifier of the node to remove.
167
-
168
- Returns:
169
- bool: True if the node was removed, False if it was not found in the list.
170
- """
171
-
172
- if node_id in self.related_nodes:
173
- self.related_nodes.remove(node_id)
174
- return True
175
- return False
176
-
177
-
178
- class Tool(BaseRelatableNode):
179
- """
180
- Represents a tool, extending BaseRelatableNode with specific functionalities and configurations.
181
-
182
- Attributes:
183
- func: The main function or capability of the tool.
184
- schema_: An optional schema defining the structure and constraints of data the tool works with.
185
- manual: Optional documentation or manual for using the tool.
186
- parser: An optional parser associated with the tool for data processing or interpretation.
187
- """
188
-
189
- func: Any
190
- schema_: dict | None = None
191
- manual: Any | None = None
192
- parser: Any | None = None
193
-
194
- @field_serializer("func")
195
- def serialize_func(self, func):
196
- return func.__name__
197
-
198
-
199
- TOOL_TYPE = bool | Tool | str | list[Tool | str | dict] | dict
@@ -1,24 +0,0 @@
1
- from abc import ABC, abstractmethod
2
- from enum import Enum
3
-
4
-
5
- class SourceType(str, Enum):
6
- STRUCTURE = "structure"
7
- EXECUTABLE = "executable"
8
-
9
-
10
- class Condition(ABC):
11
- def __init__(self, source_type):
12
- try:
13
- if isinstance(source_type, str):
14
- source_type = SourceType(source_type)
15
- if isinstance(source_type, SourceType):
16
- self.source_type = source_type
17
- except:
18
- raise ValueError(
19
- f"Invalid source_type. Valid source types are {list(SourceType)}"
20
- )
21
-
22
- @abstractmethod
23
- def __call__(self, source_obj):
24
- pass
@@ -1,354 +0,0 @@
1
- import atexit
2
- from collections import deque
3
- from dataclasses import dataclass
4
- from pathlib import Path
5
- from typing import Any, Dict, List
6
-
7
- from lionagi.libs import SysUtil, convert, nested
8
-
9
-
10
- # TODO: there should be a global data logger, under setting
11
-
12
-
13
- @dataclass
14
- class DLog:
15
- """
16
- Represents a log entry in a structured logging system, encapsulating both the
17
- input to and output from a data processing operation, along with an automatically
18
- generated timestamp indicating when the log entry was created.
19
-
20
- The primary use of this class is to provide a standardized format for logging data
21
- transformations or interactions within an application, facilitating analysis and
22
- debugging by maintaining a clear record of data states before and after specific
23
- operations.
24
-
25
- Attributes:
26
- input_data (Any): The data received by the operation. This attribute can be of
27
- any type, reflecting the flexible nature of input data to
28
- various processes.
29
- output_data (Any): The data produced by the operation. Similar to `input_data`,
30
- this attribute supports any type, accommodating the diverse
31
- outputs that different operations may generate.
32
-
33
- Methods: serialize: Converts the instance into a dictionary, suitable for
34
- serialization, and appends a timestamp to this dictionary, reflecting the current
35
- time.
36
- """
37
-
38
- input_data: Any
39
- output_data: Any
40
-
41
- def serialize(self, *, flatten_=True, sep="[^_^]") -> Dict[str, Any]:
42
- """
43
- Converts the DLog instance to a dictionary, suitable for serialization. This
44
- method is particularly useful for exporting log entries to formats like JSON or
45
- CSV. In addition to the data attributes, it appends a timestamp to the
46
- dictionary, capturing the exact time the log entry was serialized.
47
-
48
- Returns:
49
- Dict[str, Any]: A dictionary representation of the DLog instance, including
50
- 'input_data', 'output_data', and 'timestamp' keys.
51
- """
52
- log_dict = {}
53
-
54
- if flatten_:
55
- if isinstance(self.input_data, dict):
56
- log_dict["input_data"] = convert.to_str(
57
- nested.flatten(self.input_data, sep=sep)
58
- )
59
- if isinstance(self.output_data, dict):
60
- log_dict["output_data"] = convert.to_str(
61
- nested.flatten(self.output_data, sep=sep)
62
- )
63
-
64
- else:
65
- log_dict["input_data"] = self.input_data
66
- log_dict["output_data"] = self.output_data
67
-
68
- log_dict["timestamp"] = SysUtil.get_timestamp()
69
-
70
- return log_dict
71
-
72
- @classmethod
73
- def deserialize(
74
- cls, *, input_str, output_str, unflatten_=True, sep="[^_^]"
75
- ) -> Dict[str, Any]:
76
- """
77
- [^_^] is reserved, do not add this in dictionary keys, otherwise the structrue
78
- won't be reserved
79
-
80
- Converts the DLog instance to a dictionary, suitable for serialization. This
81
- method is particularly useful for exporting log entries to formats like JSON or
82
- CSV. In addition to the data attributes, it appends a timestamp to the
83
- dictionary, capturing the exact time the log entry was serialized.
84
-
85
- Returns:
86
- Dict[str, Any]: A dictionary representation of the DLog instance, including
87
- 'input_data', 'output_data', and 'timestamp' keys.
88
- """
89
- input_data = ""
90
- output_data = ""
91
-
92
- if unflatten_:
93
- input_data = nested.unflatten(convert.to_dict(input_str), sep=sep)
94
- output_data = nested.unflatten(convert.to_dict(output_str), sep=sep)
95
-
96
- else:
97
- input_data = input_str
98
- output_data = output_str
99
-
100
- return cls(input_data=input_data, output_data=output_data)
101
-
102
-
103
- class DataLogger:
104
- """
105
- Manages the accumulation, structuring, and persistence of log entries pertaining to
106
- data processing activities within an application. The datalogger is designed to capture
107
- input-output data pairs across operations, offering functionalities for exporting
108
- these logs to disk in both CSV and JSON formats.
109
-
110
- The class ensures that log entries are stored in an orderly fashion and can be
111
- easily retrieved or persisted for analysis, debugging, or record-keeping purposes.
112
- It supports customizable file naming, directory management, and automatic log saving
113
- at program exit, among other features.
114
-
115
- Attributes:
116
- persist_path (Path): The filesystem path to the directory where log files will
117
- be saved. Defaults to a subdirectory 'data/logs/' within
118
- the current working directory.
119
- log (Deque[Dict]): A deque object that acts as the container for log entries.
120
- Each log entry is stored as a dictionary, facilitating easy
121
- conversion to various data formats.
122
- filename (str): The base name used for log files when saved. The actual filepath
123
- may include a timestamp or other modifiers based on the class's
124
- configuration.
125
-
126
- Methods:
127
- append: Adds a new log entry to the datalogger.
128
- to_csv_file: Exports accumulated log entries to a CSV file.
129
- to_json_file: Exports accumulated log entries to a JSON file.
130
- save_at_exit: Ensures that unsaved log entries are persisted to a CSV file when
131
- the program terminates.
132
-
133
- Usage Example:
134
- >>> datalogger = DataLogger(persist_path='my/logs/directory', filepath='process_logs')
135
- >>> datalogger.append(input_data="Example input", output_data="Example output")
136
- >>> datalogger.to_csv_file('finalized_logs.csv', clear=True)
137
-
138
- This example demonstrates initializing a `DataLogger` with a custom directory and
139
- filepath, appending a log entry, and then exporting the log to a CSV file.
140
- """
141
-
142
- def __init__(
143
- self,
144
- persist_path: str | Path | None = None,
145
- log: List[Dict] | None = None,
146
- filename: str | None = None,
147
- ) -> None:
148
- """
149
- initializes a DataLogger instance, preparing it for structured logging of data
150
- processing activities. allows for customization of storage directory, initial
151
- logs, and base filepath for exports.
152
-
153
- Args:
154
- persist_path (str | Path | None, optional):
155
- The file system path to the directory where log files will be persisted.
156
- if not provided, defaults to 'data/logs/' within the current working
157
- directory. this path is used for all subsequent log export operations.
158
- log (list[Dict[str, Any]] | None, optional):
159
- An initial collection of log entries to populate the datalogger. each entry
160
- should be a dictionary reflecting the structure used by the datalogger
161
- (input, output, timestamp). if omitted, the datalogger starts empty.
162
- filename (str | None, optional):
163
- The base name for exported log files. this name may be augmented with
164
- timestamps and format-specific extensions during export operations.
165
- defaults to 'log'.
166
-
167
- register an at-exit handler to ensure unsaved logs are automatically persisted to
168
- a CSV file upon program termination.
169
- """
170
- self.persist_path = Path(persist_path) if persist_path else Path("data/logs/")
171
- self.log = deque(log) if log else deque()
172
- self.filename = filename or "log"
173
- atexit.register(self.save_at_exit)
174
-
175
- def extend(self, logs) -> None:
176
- if len(logs) > 0:
177
- log1 = convert.to_list(self.log)
178
- log1.extend(convert.to_list(logs))
179
- self.log = deque(log1)
180
-
181
- def append(self, *, input_data: Any, output_data: Any) -> None:
182
- """
183
- appends a new log entry, encapsulating input and output data, to the datalogger's
184
- record deque.
185
-
186
- Args:
187
- input_data (Any):
188
- Data provided as input to a tracked operation or process.
189
- output_data (Any):
190
- Data resulting from the operation, recorded as the output.
191
-
192
- constructs a log entry from the provided data and automatically includes a
193
- timestamp upon serialization.
194
- """
195
- log_entry = DLog(input_data=input_data, output_data=output_data)
196
- self.log.append(log_entry)
197
-
198
- def to_csv_file(
199
- self,
200
- filename: str = "log.csv",
201
- *,
202
- dir_exist_ok: bool = True,
203
- timestamp: bool = True,
204
- time_prefix: bool = False,
205
- verbose: bool = True,
206
- clear: bool = True,
207
- flatten_=True,
208
- sep="[^_^]",
209
- index=False,
210
- **kwargs,
211
- ) -> None:
212
- """
213
- exports accumulated log entries to a CSV file, with customizable file naming
214
- and timestamping options.
215
-
216
- Args:
217
- filename (str, optional):
218
- Filename for the CSV output, appended with '.csv' if not included, saved
219
- within the specified persisting directory.
220
- dir_exist_ok (bool, optional):
221
- If False, raises an error if the directory already exists; otherwise,
222
- writes without an error.
223
- timestamp (bool, optional):
224
- If True, appends a current timestamp to the filepath for uniqueness.
225
- time_prefix (bool, optional):
226
- If True, place the timestamp prefix before the filepath; otherwise,
227
- it's suffixed.
228
- verbose (bool, optional):
229
- If True, print a message upon successful file save, detailing the file
230
- path and number of logs saved.
231
- clear (bool, optional):
232
- If True, empties the internal log record after saving.
233
- **kwargs:
234
- Additional keyword arguments for pandas.DataFrame.to_csv(), allowing
235
- customization of the CSV output, such as excluding the index.
236
-
237
- raises a ValueError with an explanatory message if an error occurs during the file
238
- writing or DataFrame conversion process.
239
- """
240
-
241
- if not filename.endswith(".csv"):
242
- filename += ".csv"
243
-
244
- filepath = SysUtil.create_path(
245
- self.persist_path,
246
- filename,
247
- timestamp=timestamp,
248
- dir_exist_ok=dir_exist_ok,
249
- time_prefix=time_prefix,
250
- )
251
- try:
252
- logs = [log.serialize(flatten_=flatten_, sep=sep) for log in self.log]
253
- df = convert.to_df(convert.to_list(logs, flatten=True))
254
- df.to_csv(filepath, index=index, **kwargs)
255
- if verbose:
256
- print(f"{len(self.log)} logs saved to {filepath}")
257
- if clear:
258
- self.log.clear()
259
- except Exception as e:
260
- raise ValueError(f"Error in saving to csv: {e}") from e
261
-
262
- def to_json_file(
263
- self,
264
- filename: str = "log.json",
265
- *,
266
- dir_exist_ok: bool = True,
267
- timestamp: bool = True,
268
- time_prefix: bool = False,
269
- verbose: bool = True,
270
- clear: bool = True,
271
- flatten_=True,
272
- sep="[^_^]",
273
- index=False,
274
- **kwargs,
275
- ) -> None:
276
- """
277
- exports the log entries to a JSON file within the specified persisting directory,
278
- offering customization for file naming and timestamping.
279
-
280
- Args:
281
- filename (str, optional):
282
- The filepath for the JSON output.'.json' is appended if not specified.
283
- The file is saved within the designated persisting directory.
284
- timestamp (bool, optional):
285
- If True, adds a timestamp to the filepath to ensure uniqueness.
286
- time_prefix (bool, optional):
287
- Determines the placement of the timestamp in the filepath. A prefix if
288
- True; otherwise, a suffix.
289
- dir_exist_ok (bool, optional):
290
- Allows writing to an existing directory without raising an error.
291
- If False, an error is raised when attempting to write to an existing
292
- directory.
293
- verbose (bool, optional):
294
- Print a message upon successful save, indicating the file path and
295
- number of logs saved.
296
- clear (bool, optional):
297
- Clears the log deque after saving, aiding in memory management.
298
- **kwargs:
299
- Additional arguments passed to pandas.DataFrame.to_json(),
300
- enabling customization of the JSON output.
301
-
302
- Raises:
303
- ValueError: When an error occurs during file writing or DataFrame conversion,
304
- encapsulating
305
- the exception with a descriptive message.
306
-
307
- Examples:
308
- Default usage saving logs to 'log.json' within the specified persisting
309
- directory:
310
- >>> datalogger.to_json_file()
311
- # Save path: 'data/logs/log.json'
312
-
313
- Custom filepath without a timestamp, using additional pandas options:
314
- >>> datalogger.to_json_file(filepath='detailed_log.json', orient='records')
315
- # Save a path: 'data/logs/detailed_log.json'
316
- """
317
- if not filename.endswith(".json"):
318
- filename += ".json"
319
-
320
- filepath = SysUtil.create_path(
321
- self.persist_path,
322
- filename,
323
- timestamp=timestamp,
324
- dir_exist_ok=dir_exist_ok,
325
- time_prefix=time_prefix,
326
- )
327
-
328
- try:
329
- logs = [log.serialize(flatten_=flatten_, sep=sep) for log in self.log]
330
- df = convert.to_df(convert.to_list(logs, flatten=True))
331
- df.to_json(filepath, index=index, **kwargs)
332
- if verbose:
333
- print(f"{len(self.log)} logs saved to {filepath}")
334
- if clear:
335
- self.log.clear()
336
- except Exception as e:
337
- raise ValueError(f"Error in saving to csv: {e}") from e
338
-
339
- def save_at_exit(self):
340
- """
341
- Registers an at-exit handler to ensure that any unsaved logs are automatically
342
- persisted to a file upon program termination. This safeguard helps prevent the
343
- loss of log data due to unexpected shutdowns or program exits.
344
-
345
- The method is configured to save the logs to a CSV file, named
346
- 'unsaved_logs.csv', which is stored in the designated persisting directory. This
347
- automatic save operation is triggered only if there are unsaved logs present at
348
- the time of program exit.
349
-
350
- Note: This method does not clear the logs after saving, allowing for the
351
- possibility of manual.py review or recovery after the program has terminated.
352
- """
353
- if self.log:
354
- self.to_csv_file("unsaved_logs.csv", clear=False)
@@ -1,93 +0,0 @@
1
- from typing import Any
2
- from .base_node import BaseNode
3
-
4
- from lionagi.integrations.bridge import LlamaIndexBridge, LangchainBridge
5
-
6
-
7
- class DataNode(BaseNode):
8
- """
9
- Represents a data node with extended functionality for integration with llama index and langchain_ formats.
10
-
11
- This class extends `BaseNode` to include methods for converting between DataNode instances and specific formats
12
- used by llama index and langchain_, facilitating interoperability with these systems.
13
-
14
- Methods provided allow for serialization to and deserialization from these formats, supporting a variety of use cases.
15
- """
16
-
17
- def to_llama_index(self, node_type=None, **kwargs) -> Any:
18
- """
19
- Converts the node to a format compatible with llama index.
20
-
21
- This method serializes the DataNode into a format recognized by the llama index system, allowing for
22
- integration and usage within that ecosystem.
23
-
24
- Args:
25
- node_type:
26
- **kwargs: Additional keyword arguments for customization.
27
-
28
- Returns:
29
- Any: The llama index format representation of the node.
30
-
31
- Examples:
32
- >>> node = DataNode(content="Example content")
33
- >>> llama_index = node.to_llama_index()
34
- """
35
- return LlamaIndexBridge.to_llama_index_node(self, node_type=node_type, **kwargs)
36
-
37
- def to_langchain(self, **kwargs) -> Any:
38
- """
39
- Converts the node to a langchain_ document format.
40
-
41
- This method serializes the DataNode into a document format used by langchain_, enabling the node's
42
- use within langchain_ applications and workflows.
43
-
44
- Args:
45
- **kwargs: Additional keyword arguments for customization.
46
-
47
- Returns:
48
- Any: The langchain_ document representation of the node.
49
-
50
- Examples:
51
- >>> node = DataNode(content="Example content")
52
- >>> langchain_doc = node.to_langchain()
53
- """
54
- return LangchainBridge.to_langchain_document(self, **kwargs)
55
-
56
- @classmethod
57
- def from_llama_index(cls, llama_node: Any, **kwargs) -> "DataNode":
58
- """
59
- Creates a DataNode instance from a llama index node.
60
-
61
- Args:
62
- llama_node: The llama index node object.
63
- **kwargs: Variable length argument list.
64
-
65
- Returns:
66
- An instance of DataNode.
67
-
68
- Examples:
69
- llama_node = SomeLlamaIndexNode()
70
- data_node = DataNode.from_llama_index(llama_node)
71
- """
72
- llama_dict = llama_node.to_dict(**kwargs)
73
- return cls.from_obj(llama_dict)
74
-
75
- @classmethod
76
- def from_langchain(cls, lc_doc: Any) -> "DataNode":
77
- """
78
- Creates a DataNode instance from a langchain_ document.
79
-
80
- Args:
81
- lc_doc: The langchain_ document object.
82
-
83
- Returns:
84
- An instance of DataNode.
85
-
86
- Examples:
87
- lc_doc = SomeLangChainDocument()
88
- data_node = DataNode.from_langchain(lc_doc)
89
- """
90
- info_json = lc_doc.to_json()
91
- info_node = {"lc_id": info_json["id"]}
92
- info_node = {**info_node, **info_json["kwargs"]}
93
- return cls(**info_node)