lionagi 0.0.305__py3-none-any.whl → 0.0.307__py3-none-any.whl
Sign up to get free protection for your applications and to get access to all the features.
- lionagi/__init__.py +2 -5
- lionagi/core/__init__.py +7 -4
- lionagi/core/agent/__init__.py +3 -0
- lionagi/core/agent/base_agent.py +46 -0
- lionagi/core/branch/__init__.py +4 -0
- lionagi/core/branch/base/__init__.py +0 -0
- lionagi/core/branch/base_branch.py +100 -78
- lionagi/core/branch/branch.py +22 -34
- lionagi/core/branch/branch_flow_mixin.py +3 -7
- lionagi/core/branch/executable_branch.py +192 -0
- lionagi/core/branch/util.py +77 -162
- lionagi/core/direct/__init__.py +13 -0
- lionagi/core/direct/parallel_predict.py +127 -0
- lionagi/core/direct/parallel_react.py +0 -0
- lionagi/core/direct/parallel_score.py +0 -0
- lionagi/core/direct/parallel_select.py +0 -0
- lionagi/core/direct/parallel_sentiment.py +0 -0
- lionagi/core/direct/predict.py +174 -0
- lionagi/core/direct/react.py +33 -0
- lionagi/core/direct/score.py +163 -0
- lionagi/core/direct/select.py +144 -0
- lionagi/core/direct/sentiment.py +51 -0
- lionagi/core/direct/utils.py +83 -0
- lionagi/core/flow/__init__.py +0 -3
- lionagi/core/flow/monoflow/{mono_react.py → ReAct.py} +52 -9
- lionagi/core/flow/monoflow/__init__.py +9 -0
- lionagi/core/flow/monoflow/{mono_chat.py → chat.py} +11 -11
- lionagi/core/flow/monoflow/{mono_chat_mixin.py → chat_mixin.py} +33 -27
- lionagi/core/flow/monoflow/{mono_followup.py → followup.py} +7 -6
- lionagi/core/flow/polyflow/__init__.py +1 -0
- lionagi/core/flow/polyflow/{polychat.py → chat.py} +15 -3
- lionagi/core/mail/__init__.py +8 -0
- lionagi/core/mail/mail_manager.py +88 -40
- lionagi/core/mail/schema.py +32 -6
- lionagi/core/messages/__init__.py +3 -0
- lionagi/core/messages/schema.py +56 -25
- lionagi/core/prompt/__init__.py +0 -0
- lionagi/core/prompt/prompt_template.py +0 -0
- lionagi/core/schema/__init__.py +7 -5
- lionagi/core/schema/action_node.py +29 -0
- lionagi/core/schema/base_mixin.py +56 -59
- lionagi/core/schema/base_node.py +35 -38
- lionagi/core/schema/condition.py +24 -0
- lionagi/core/schema/data_logger.py +98 -98
- lionagi/core/schema/data_node.py +19 -19
- lionagi/core/schema/prompt_template.py +0 -0
- lionagi/core/schema/structure.py +293 -190
- lionagi/core/session/__init__.py +1 -3
- lionagi/core/session/session.py +196 -214
- lionagi/core/tool/tool_manager.py +95 -103
- lionagi/integrations/__init__.py +1 -3
- lionagi/integrations/bridge/langchain_/documents.py +17 -18
- lionagi/integrations/bridge/langchain_/langchain_bridge.py +14 -14
- lionagi/integrations/bridge/llamaindex_/llama_index_bridge.py +22 -22
- lionagi/integrations/bridge/llamaindex_/node_parser.py +12 -12
- lionagi/integrations/bridge/llamaindex_/reader.py +11 -11
- lionagi/integrations/bridge/llamaindex_/textnode.py +7 -7
- lionagi/integrations/config/openrouter_configs.py +0 -1
- lionagi/integrations/provider/oai.py +26 -26
- lionagi/integrations/provider/services.py +38 -38
- lionagi/libs/__init__.py +34 -1
- lionagi/libs/ln_api.py +211 -221
- lionagi/libs/ln_async.py +53 -60
- lionagi/libs/ln_convert.py +118 -120
- lionagi/libs/ln_dataframe.py +32 -33
- lionagi/libs/ln_func_call.py +334 -342
- lionagi/libs/ln_nested.py +99 -107
- lionagi/libs/ln_parse.py +175 -158
- lionagi/libs/sys_util.py +52 -52
- lionagi/tests/test_core/test_base_branch.py +427 -427
- lionagi/tests/test_core/test_branch.py +292 -292
- lionagi/tests/test_core/test_mail_manager.py +57 -57
- lionagi/tests/test_core/test_session.py +254 -266
- lionagi/tests/test_core/test_session_base_util.py +299 -300
- lionagi/tests/test_core/test_tool_manager.py +70 -74
- lionagi/tests/test_libs/test_nested.py +2 -7
- lionagi/tests/test_libs/test_parse.py +2 -2
- lionagi/version.py +1 -1
- {lionagi-0.0.305.dist-info → lionagi-0.0.307.dist-info}/METADATA +4 -2
- lionagi-0.0.307.dist-info/RECORD +115 -0
- lionagi-0.0.305.dist-info/RECORD +0 -94
- {lionagi-0.0.305.dist-info → lionagi-0.0.307.dist-info}/LICENSE +0 -0
- {lionagi-0.0.305.dist-info → lionagi-0.0.307.dist-info}/WHEEL +0 -0
- {lionagi-0.0.305.dist-info → lionagi-0.0.307.dist-info}/top_level.txt +0 -0
lionagi/core/schema/base_node.py
CHANGED
@@ -5,14 +5,11 @@ Module for base component model definition using Pydantic.
|
|
5
5
|
from abc import ABC
|
6
6
|
from typing import Any, TypeVar
|
7
7
|
|
8
|
-
import
|
9
|
-
from lionagi.libs
|
10
|
-
|
11
|
-
from lionagi.libs import ln_convert as convert
|
8
|
+
from pydantic import Field, field_serializer, AliasChoices
|
9
|
+
from lionagi.libs import SysUtil, convert
|
12
10
|
|
13
11
|
from .base_mixin import BaseComponentMixin
|
14
12
|
|
15
|
-
|
16
13
|
T = TypeVar("T", bound="BaseComponent")
|
17
14
|
|
18
15
|
|
@@ -23,16 +20,16 @@ class BaseComponent(BaseComponentMixin, ABC):
|
|
23
20
|
and validating metadata keys and values.
|
24
21
|
|
25
22
|
Attributes:
|
26
|
-
|
27
|
-
|
28
|
-
|
23
|
+
id_ (str): Unique identifier, defaulted using SysUtil.create_id.
|
24
|
+
timestamp (str | None): Timestamp of creation or modification.
|
25
|
+
metadata (dict[str, Any]): Metadata associated with the component.
|
29
26
|
"""
|
30
27
|
|
31
|
-
id_: str =
|
32
|
-
timestamp: str | None =
|
33
|
-
metadata: dict[str, Any] =
|
28
|
+
id_: str = Field(default_factory=SysUtil.create_id, alias="node_id")
|
29
|
+
timestamp: str | None = Field(default_factory=SysUtil.get_timestamp)
|
30
|
+
metadata: dict[str, Any] = Field(default_factory=dict, alias="meta")
|
34
31
|
|
35
|
-
class
|
32
|
+
class ConfigDict:
|
36
33
|
"""Model configuration settings."""
|
37
34
|
|
38
35
|
extra = "allow"
|
@@ -46,11 +43,11 @@ class BaseComponent(BaseComponentMixin, ABC):
|
|
46
43
|
Creates a deep copy of the instance, with an option to update specific fields.
|
47
44
|
|
48
45
|
Args:
|
49
|
-
|
50
|
-
|
46
|
+
*args: Variable length argument list for additional options.
|
47
|
+
**kwargs: Arbitrary keyword arguments specifying updates to the instance.
|
51
48
|
|
52
49
|
Returns:
|
53
|
-
|
50
|
+
BaseComponent: A new instance of BaseComponent as a deep copy of the original, with updates applied.
|
54
51
|
"""
|
55
52
|
return self.model_copy(*args, **kwargs)
|
56
53
|
|
@@ -64,15 +61,15 @@ class BaseNode(BaseComponent):
|
|
64
61
|
extending BaseComponent with content handling capabilities.
|
65
62
|
|
66
63
|
Attributes:
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
|
64
|
+
content: The content of the node, which can be a string, a dictionary with any structure,
|
65
|
+
None, or any other type. It is flexible to accommodate various types of content.
|
66
|
+
This attribute also supports aliasing through validation_alias for compatibility with
|
67
|
+
different naming conventions like "text", "page_content", or "chunk_content".
|
71
68
|
"""
|
72
69
|
|
73
|
-
content: str | dict[str, Any] | None | Any =
|
70
|
+
content: str | dict[str, Any] | None | Any = Field(
|
74
71
|
default=None,
|
75
|
-
validation_alias=
|
72
|
+
validation_alias=AliasChoices("text", "page_content", "chunk_content"),
|
76
73
|
)
|
77
74
|
|
78
75
|
@property
|
@@ -81,8 +78,8 @@ class BaseNode(BaseComponent):
|
|
81
78
|
Attempts to serialize the node's content to a string.
|
82
79
|
|
83
80
|
Returns:
|
84
|
-
|
85
|
-
|
81
|
+
str: The serialized content string. If serialization fails, returns "null" and
|
82
|
+
logs an error message indicating the content is not serializable.
|
86
83
|
"""
|
87
84
|
try:
|
88
85
|
return convert.to_str(self.content)
|
@@ -98,17 +95,17 @@ class BaseNode(BaseComponent):
|
|
98
95
|
metadata preview, and optionally the timestamp if present.
|
99
96
|
|
100
97
|
Returns:
|
101
|
-
|
98
|
+
str: A string representation of the instance.
|
102
99
|
"""
|
103
100
|
timestamp = f" ({self.timestamp})" if self.timestamp else ""
|
104
101
|
if self.content:
|
105
102
|
content_preview = (
|
106
|
-
self.content[:50]
|
103
|
+
f"{self.content[:50]}..." if len(self.content) > 50 else self.content
|
107
104
|
)
|
108
105
|
else:
|
109
106
|
content_preview = ""
|
110
107
|
meta_preview = (
|
111
|
-
str(self.metadata)[:50]
|
108
|
+
f"{str(self.metadata)[:50]}..."
|
112
109
|
if len(str(self.metadata)) > 50
|
113
110
|
else str(self.metadata)
|
114
111
|
)
|
@@ -123,11 +120,11 @@ class BaseRelatableNode(BaseNode):
|
|
123
120
|
Extends BaseNode with functionality to manage relationships with other nodes.
|
124
121
|
|
125
122
|
Attributes:
|
126
|
-
|
127
|
-
|
123
|
+
related_nodes: A list of identifiers (str) for nodes that are related to this node.
|
124
|
+
label: An optional label for the node, providing additional context or classification.
|
128
125
|
"""
|
129
126
|
|
130
|
-
related_nodes: list[str] =
|
127
|
+
related_nodes: list[str] = Field(default_factory=list)
|
131
128
|
label: str | None = None
|
132
129
|
|
133
130
|
def add_related_node(self, node_id: str) -> bool:
|
@@ -135,10 +132,10 @@ class BaseRelatableNode(BaseNode):
|
|
135
132
|
Adds a node to the list of related nodes if it's not already present.
|
136
133
|
|
137
134
|
Args:
|
138
|
-
|
135
|
+
node_id: The identifier of the node to add.
|
139
136
|
|
140
137
|
Returns:
|
141
|
-
|
138
|
+
bool: True if the node was added, False if it was already in the list.
|
142
139
|
"""
|
143
140
|
if node_id not in self.related_nodes:
|
144
141
|
self.related_nodes.append(node_id)
|
@@ -150,10 +147,10 @@ class BaseRelatableNode(BaseNode):
|
|
150
147
|
Removes a node from the list of related nodes if it's present.
|
151
148
|
|
152
149
|
Args:
|
153
|
-
|
150
|
+
node_id: The identifier of the node to remove.
|
154
151
|
|
155
152
|
Returns:
|
156
|
-
|
153
|
+
bool: True if the node was removed, False if it was not found in the list.
|
157
154
|
"""
|
158
155
|
|
159
156
|
if node_id in self.related_nodes:
|
@@ -167,10 +164,10 @@ class Tool(BaseRelatableNode):
|
|
167
164
|
Represents a tool, extending BaseRelatableNode with specific functionalities and configurations.
|
168
165
|
|
169
166
|
Attributes:
|
170
|
-
|
171
|
-
|
172
|
-
|
173
|
-
|
167
|
+
func: The main function or capability of the tool.
|
168
|
+
schema_: An optional schema defining the structure and constraints of data the tool works with.
|
169
|
+
manual: Optional documentation or manual for using the tool.
|
170
|
+
parser: An optional parser associated with the tool for data processing or interpretation.
|
174
171
|
"""
|
175
172
|
|
176
173
|
func: Any
|
@@ -178,7 +175,7 @@ class Tool(BaseRelatableNode):
|
|
178
175
|
manual: Any | None = None
|
179
176
|
parser: Any | None = None
|
180
177
|
|
181
|
-
@
|
178
|
+
@field_serializer("func")
|
182
179
|
def serialize_func(self, func):
|
183
180
|
return func.__name__
|
184
181
|
|
@@ -0,0 +1,24 @@
|
|
1
|
+
from abc import ABC, abstractmethod
|
2
|
+
from enum import Enum
|
3
|
+
|
4
|
+
|
5
|
+
class SourceType(str, Enum):
|
6
|
+
STRUCTURE = "structure"
|
7
|
+
EXECUTABLE = "executable"
|
8
|
+
|
9
|
+
|
10
|
+
class Condition(ABC):
|
11
|
+
def __init__(self, source_type):
|
12
|
+
try:
|
13
|
+
if isinstance(source_type, str):
|
14
|
+
source_type = SourceType(source_type)
|
15
|
+
if isinstance(source_type, SourceType):
|
16
|
+
self.source_type = source_type
|
17
|
+
except:
|
18
|
+
raise ValueError(
|
19
|
+
f"Invalid source_type. Valid source types are {list(SourceType)}"
|
20
|
+
)
|
21
|
+
|
22
|
+
@abstractmethod
|
23
|
+
def __call__(self, source_obj):
|
24
|
+
pass
|
@@ -4,10 +4,10 @@ from dataclasses import dataclass
|
|
4
4
|
from pathlib import Path
|
5
5
|
from typing import Any, Dict, List
|
6
6
|
|
7
|
-
from lionagi.libs
|
7
|
+
from lionagi.libs import SysUtil, convert, nested
|
8
8
|
|
9
|
-
|
10
|
-
|
9
|
+
|
10
|
+
# TODO: there should be a global data logger, under setting
|
11
11
|
|
12
12
|
|
13
13
|
@dataclass
|
@@ -23,12 +23,12 @@ class DLog:
|
|
23
23
|
operations.
|
24
24
|
|
25
25
|
Attributes:
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
26
|
+
input_data (Any): The data received by the operation. This attribute can be of
|
27
|
+
any type, reflecting the flexible nature of input data to
|
28
|
+
various processes.
|
29
|
+
output_data (Any): The data produced by the operation. Similar to `input_data`,
|
30
|
+
this attribute supports any type, accommodating the diverse
|
31
|
+
outputs that different operations may generate.
|
32
32
|
|
33
33
|
Methods: serialize: Converts the instance into a dictionary, suitable for
|
34
34
|
serialization, and appends a timestamp to this dictionary, reflecting the current
|
@@ -46,8 +46,8 @@ class DLog:
|
|
46
46
|
dictionary, capturing the exact time the log entry was serialized.
|
47
47
|
|
48
48
|
Returns:
|
49
|
-
|
50
|
-
|
49
|
+
Dict[str, Any]: A dictionary representation of the DLog instance, including
|
50
|
+
'input_data', 'output_data', and 'timestamp' keys.
|
51
51
|
"""
|
52
52
|
log_dict = {}
|
53
53
|
|
@@ -83,8 +83,8 @@ class DLog:
|
|
83
83
|
dictionary, capturing the exact time the log entry was serialized.
|
84
84
|
|
85
85
|
Returns:
|
86
|
-
|
87
|
-
|
86
|
+
Dict[str, Any]: A dictionary representation of the DLog instance, including
|
87
|
+
'input_data', 'output_data', and 'timestamp' keys.
|
88
88
|
"""
|
89
89
|
input_data = ""
|
90
90
|
output_data = ""
|
@@ -113,27 +113,27 @@ class DataLogger:
|
|
113
113
|
at program exit, among other features.
|
114
114
|
|
115
115
|
Attributes:
|
116
|
-
|
117
|
-
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
|
122
|
-
|
123
|
-
|
124
|
-
|
116
|
+
persist_path (Path): The filesystem path to the directory where log files will
|
117
|
+
be saved. Defaults to a subdirectory 'data/logs/' within
|
118
|
+
the current working directory.
|
119
|
+
log (Deque[Dict]): A deque object that acts as the container for log entries.
|
120
|
+
Each log entry is stored as a dictionary, facilitating easy
|
121
|
+
conversion to various data formats.
|
122
|
+
filename (str): The base name used for log files when saved. The actual filepath
|
123
|
+
may include a timestamp or other modifiers based on the class's
|
124
|
+
configuration.
|
125
125
|
|
126
126
|
Methods:
|
127
|
-
|
128
|
-
|
129
|
-
|
130
|
-
|
131
|
-
|
127
|
+
append: Adds a new log entry to the datalogger.
|
128
|
+
to_csv_file: Exports accumulated log entries to a CSV file.
|
129
|
+
to_json_file: Exports accumulated log entries to a JSON file.
|
130
|
+
save_at_exit: Ensures that unsaved log entries are persisted to a CSV file when
|
131
|
+
the program terminates.
|
132
132
|
|
133
133
|
Usage Example:
|
134
|
-
|
135
|
-
|
136
|
-
|
134
|
+
>>> datalogger = DataLogger(persist_path='my/logs/directory', filepath='process_logs')
|
135
|
+
>>> datalogger.append(input_data="Example input", output_data="Example output")
|
136
|
+
>>> datalogger.to_csv_file('finalized_logs.csv', clear=True)
|
137
137
|
|
138
138
|
This example demonstrates initializing a `DataLogger` with a custom directory and
|
139
139
|
filepath, appending a log entry, and then exporting the log to a CSV file.
|
@@ -151,18 +151,18 @@ class DataLogger:
|
|
151
151
|
logs, and base filepath for exports.
|
152
152
|
|
153
153
|
Args:
|
154
|
-
|
155
|
-
|
156
|
-
|
157
|
-
|
158
|
-
|
159
|
-
|
160
|
-
|
161
|
-
|
162
|
-
|
163
|
-
|
164
|
-
|
165
|
-
|
154
|
+
persist_path (str | Path | None, optional):
|
155
|
+
The file system path to the directory where log files will be persisted.
|
156
|
+
if not provided, defaults to 'data/logs/' within the current working
|
157
|
+
directory. this path is used for all subsequent log export operations.
|
158
|
+
log (list[Dict[str, Any]] | None, optional):
|
159
|
+
An initial collection of log entries to populate the datalogger. each entry
|
160
|
+
should be a dictionary reflecting the structure used by the datalogger
|
161
|
+
(input, output, timestamp). if omitted, the datalogger starts empty.
|
162
|
+
filename (str | None, optional):
|
163
|
+
The base name for exported log files. this name may be augmented with
|
164
|
+
timestamps and format-specific extensions during export operations.
|
165
|
+
defaults to 'log'.
|
166
166
|
|
167
167
|
register an at-exit handler to ensure unsaved logs are automatically persisted to
|
168
168
|
a CSV file upon program termination.
|
@@ -184,10 +184,10 @@ class DataLogger:
|
|
184
184
|
record deque.
|
185
185
|
|
186
186
|
Args:
|
187
|
-
|
188
|
-
|
189
|
-
|
190
|
-
|
187
|
+
input_data (Any):
|
188
|
+
Data provided as input to a tracked operation or process.
|
189
|
+
output_data (Any):
|
190
|
+
Data resulting from the operation, recorded as the output.
|
191
191
|
|
192
192
|
constructs a log entry from the provided data and automatically includes a
|
193
193
|
timestamp upon serialization.
|
@@ -214,25 +214,25 @@ class DataLogger:
|
|
214
214
|
and timestamping options.
|
215
215
|
|
216
216
|
Args:
|
217
|
-
|
218
|
-
|
219
|
-
|
220
|
-
|
221
|
-
|
222
|
-
|
223
|
-
|
224
|
-
|
225
|
-
|
226
|
-
|
227
|
-
|
228
|
-
|
229
|
-
|
230
|
-
|
231
|
-
|
232
|
-
|
233
|
-
|
234
|
-
|
235
|
-
|
217
|
+
filename (str, optional):
|
218
|
+
Filename for the CSV output, appended with '.csv' if not included, saved
|
219
|
+
within the specified persisting directory.
|
220
|
+
dir_exist_ok (bool, optional):
|
221
|
+
If False, raises an error if the directory already exists; otherwise,
|
222
|
+
writes without an error.
|
223
|
+
timestamp (bool, optional):
|
224
|
+
If True, appends a current timestamp to the filepath for uniqueness.
|
225
|
+
time_prefix (bool, optional):
|
226
|
+
If True, place the timestamp prefix before the filepath; otherwise,
|
227
|
+
it's suffixed.
|
228
|
+
verbose (bool, optional):
|
229
|
+
If True, print a message upon successful file save, detailing the file
|
230
|
+
path and number of logs saved.
|
231
|
+
clear (bool, optional):
|
232
|
+
If True, empties the internal log record after saving.
|
233
|
+
**kwargs:
|
234
|
+
Additional keyword arguments for pandas.DataFrame.to_csv(), allowing
|
235
|
+
customization of the CSV output, such as excluding the index.
|
236
236
|
|
237
237
|
raises a ValueError with an explanatory message if an error occurs during the file
|
238
238
|
writing or DataFrame conversion process.
|
@@ -257,7 +257,7 @@ class DataLogger:
|
|
257
257
|
if clear:
|
258
258
|
self.log.clear()
|
259
259
|
except Exception as e:
|
260
|
-
raise ValueError(f"Error in saving to csv: {e}")
|
260
|
+
raise ValueError(f"Error in saving to csv: {e}") from e
|
261
261
|
|
262
262
|
def to_json_file(
|
263
263
|
self,
|
@@ -278,41 +278,41 @@ class DataLogger:
|
|
278
278
|
offering customization for file naming and timestamping.
|
279
279
|
|
280
280
|
Args:
|
281
|
-
|
282
|
-
|
283
|
-
|
284
|
-
|
285
|
-
|
286
|
-
|
287
|
-
|
288
|
-
|
289
|
-
|
290
|
-
|
291
|
-
|
292
|
-
|
293
|
-
|
294
|
-
|
295
|
-
|
296
|
-
|
297
|
-
|
298
|
-
|
299
|
-
|
300
|
-
|
281
|
+
filename (str, optional):
|
282
|
+
The filepath for the JSON output.'.json' is appended if not specified.
|
283
|
+
The file is saved within the designated persisting directory.
|
284
|
+
timestamp (bool, optional):
|
285
|
+
If True, adds a timestamp to the filepath to ensure uniqueness.
|
286
|
+
time_prefix (bool, optional):
|
287
|
+
Determines the placement of the timestamp in the filepath. A prefix if
|
288
|
+
True; otherwise, a suffix.
|
289
|
+
dir_exist_ok (bool, optional):
|
290
|
+
Allows writing to an existing directory without raising an error.
|
291
|
+
If False, an error is raised when attempting to write to an existing
|
292
|
+
directory.
|
293
|
+
verbose (bool, optional):
|
294
|
+
Print a message upon successful save, indicating the file path and
|
295
|
+
number of logs saved.
|
296
|
+
clear (bool, optional):
|
297
|
+
Clears the log deque after saving, aiding in memory management.
|
298
|
+
**kwargs:
|
299
|
+
Additional arguments passed to pandas.DataFrame.to_json(),
|
300
|
+
enabling customization of the JSON output.
|
301
301
|
|
302
302
|
Raises:
|
303
|
-
|
304
|
-
|
305
|
-
|
303
|
+
ValueError: When an error occurs during file writing or DataFrame conversion,
|
304
|
+
encapsulating
|
305
|
+
the exception with a descriptive message.
|
306
306
|
|
307
307
|
Examples:
|
308
|
-
|
309
|
-
|
310
|
-
|
311
|
-
|
312
|
-
|
313
|
-
|
314
|
-
|
315
|
-
|
308
|
+
Default usage saving logs to 'log.json' within the specified persisting
|
309
|
+
directory:
|
310
|
+
>>> datalogger.to_json_file()
|
311
|
+
# Save path: 'data/logs/log.json'
|
312
|
+
|
313
|
+
Custom filepath without a timestamp, using additional pandas options:
|
314
|
+
>>> datalogger.to_json_file(filepath='detailed_log.json', orient='records')
|
315
|
+
# Save a path: 'data/logs/detailed_log.json'
|
316
316
|
"""
|
317
317
|
if not filename.endswith(".json"):
|
318
318
|
filename += ".json"
|
@@ -334,7 +334,7 @@ class DataLogger:
|
|
334
334
|
if clear:
|
335
335
|
self.log.clear()
|
336
336
|
except Exception as e:
|
337
|
-
raise ValueError(f"Error in saving to csv: {e}")
|
337
|
+
raise ValueError(f"Error in saving to csv: {e}") from e
|
338
338
|
|
339
339
|
def save_at_exit(self):
|
340
340
|
"""
|
lionagi/core/schema/data_node.py
CHANGED
@@ -1,5 +1,5 @@
|
|
1
1
|
from typing import Any
|
2
|
-
from
|
2
|
+
from .base_node import BaseNode
|
3
3
|
|
4
4
|
from lionagi.integrations.bridge import LlamaIndexBridge, LangchainBridge
|
5
5
|
|
@@ -22,15 +22,15 @@ class DataNode(BaseNode):
|
|
22
22
|
integration and usage within that ecosystem.
|
23
23
|
|
24
24
|
Args:
|
25
|
-
|
26
|
-
|
25
|
+
node_type:
|
26
|
+
**kwargs: Additional keyword arguments for customization.
|
27
27
|
|
28
28
|
Returns:
|
29
|
-
|
29
|
+
Any: The llama index format representation of the node.
|
30
30
|
|
31
31
|
Examples:
|
32
|
-
|
33
|
-
|
32
|
+
>>> node = DataNode(content="Example content")
|
33
|
+
>>> llama_index = node.to_llama_index()
|
34
34
|
"""
|
35
35
|
return LlamaIndexBridge.to_llama_index_node(self, node_type=node_type, **kwargs)
|
36
36
|
|
@@ -42,14 +42,14 @@ class DataNode(BaseNode):
|
|
42
42
|
use within langchain_ applications and workflows.
|
43
43
|
|
44
44
|
Args:
|
45
|
-
|
45
|
+
**kwargs: Additional keyword arguments for customization.
|
46
46
|
|
47
47
|
Returns:
|
48
|
-
|
48
|
+
Any: The langchain_ document representation of the node.
|
49
49
|
|
50
50
|
Examples:
|
51
|
-
|
52
|
-
|
51
|
+
>>> node = DataNode(content="Example content")
|
52
|
+
>>> langchain_doc = node.to_langchain()
|
53
53
|
"""
|
54
54
|
return LangchainBridge.to_langchain_document(self, **kwargs)
|
55
55
|
|
@@ -59,15 +59,15 @@ class DataNode(BaseNode):
|
|
59
59
|
Creates a DataNode instance from a llama index node.
|
60
60
|
|
61
61
|
Args:
|
62
|
-
|
63
|
-
|
62
|
+
llama_node: The llama index node object.
|
63
|
+
**kwargs: Variable length argument list.
|
64
64
|
|
65
65
|
Returns:
|
66
|
-
|
66
|
+
An instance of DataNode.
|
67
67
|
|
68
68
|
Examples:
|
69
|
-
|
70
|
-
|
69
|
+
llama_node = SomeLlamaIndexNode()
|
70
|
+
data_node = DataNode.from_llama_index(llama_node)
|
71
71
|
"""
|
72
72
|
llama_dict = llama_node.to_dict(**kwargs)
|
73
73
|
return cls.from_obj(llama_dict)
|
@@ -78,14 +78,14 @@ class DataNode(BaseNode):
|
|
78
78
|
Creates a DataNode instance from a langchain_ document.
|
79
79
|
|
80
80
|
Args:
|
81
|
-
|
81
|
+
lc_doc: The langchain_ document object.
|
82
82
|
|
83
83
|
Returns:
|
84
|
-
|
84
|
+
An instance of DataNode.
|
85
85
|
|
86
86
|
Examples:
|
87
|
-
|
88
|
-
|
87
|
+
lc_doc = SomeLangChainDocument()
|
88
|
+
data_node = DataNode.from_langchain(lc_doc)
|
89
89
|
"""
|
90
90
|
info_json = lc_doc.to_json()
|
91
91
|
info_node = {"lc_id": info_json["id"]}
|
File without changes
|