lionagi 0.0.209__py3-none-any.whl → 0.0.211__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (86) hide show
  1. lionagi/__init__.py +2 -4
  2. lionagi/api_service/base_endpoint.py +65 -0
  3. lionagi/api_service/base_rate_limiter.py +121 -0
  4. lionagi/api_service/base_service.py +146 -0
  5. lionagi/api_service/chat_completion.py +6 -0
  6. lionagi/api_service/embeddings.py +6 -0
  7. lionagi/api_service/payload_package.py +47 -0
  8. lionagi/api_service/status_tracker.py +29 -0
  9. lionagi/core/__init__.py +3 -3
  10. lionagi/core/branch.py +22 -3
  11. lionagi/core/session.py +14 -2
  12. lionagi/schema/__init__.py +5 -8
  13. lionagi/schema/base_schema.py +821 -0
  14. lionagi/structures/graph.py +1 -1
  15. lionagi/structures/relationship.py +1 -1
  16. lionagi/structures/structure.py +1 -1
  17. lionagi/tools/tool_manager.py +0 -163
  18. lionagi/tools/tool_util.py +2 -1
  19. lionagi/utils/__init__.py +5 -6
  20. lionagi/utils/api_util.py +6 -1
  21. lionagi/version.py +1 -1
  22. {lionagi-0.0.209.dist-info → lionagi-0.0.211.dist-info}/METADATA +3 -18
  23. lionagi-0.0.211.dist-info/RECORD +56 -0
  24. lionagi/agents/planner.py +0 -1
  25. lionagi/agents/prompter.py +0 -1
  26. lionagi/agents/scorer.py +0 -1
  27. lionagi/agents/summarizer.py +0 -1
  28. lionagi/agents/validator.py +0 -1
  29. lionagi/bridge/__init__.py +0 -22
  30. lionagi/bridge/langchain.py +0 -195
  31. lionagi/bridge/llama_index.py +0 -266
  32. lionagi/datastores/__init__.py +0 -1
  33. lionagi/datastores/chroma.py +0 -1
  34. lionagi/datastores/deeplake.py +0 -1
  35. lionagi/datastores/elasticsearch.py +0 -1
  36. lionagi/datastores/lantern.py +0 -1
  37. lionagi/datastores/pinecone.py +0 -1
  38. lionagi/datastores/postgres.py +0 -1
  39. lionagi/datastores/qdrant.py +0 -1
  40. lionagi/iservices/anthropic.py +0 -79
  41. lionagi/iservices/anyscale.py +0 -0
  42. lionagi/iservices/azure.py +0 -1
  43. lionagi/iservices/bedrock.py +0 -0
  44. lionagi/iservices/everlyai.py +0 -0
  45. lionagi/iservices/gemini.py +0 -0
  46. lionagi/iservices/gpt4all.py +0 -0
  47. lionagi/iservices/huggingface.py +0 -0
  48. lionagi/iservices/litellm.py +0 -33
  49. lionagi/iservices/localai.py +0 -0
  50. lionagi/iservices/openllm.py +0 -0
  51. lionagi/iservices/openrouter.py +0 -44
  52. lionagi/iservices/perplexity.py +0 -0
  53. lionagi/iservices/predibase.py +0 -0
  54. lionagi/iservices/rungpt.py +0 -0
  55. lionagi/iservices/vllm.py +0 -0
  56. lionagi/iservices/xinference.py +0 -0
  57. lionagi/loaders/__init__.py +0 -18
  58. lionagi/loaders/chunker.py +0 -166
  59. lionagi/loaders/load_util.py +0 -240
  60. lionagi/loaders/reader.py +0 -122
  61. lionagi/models/__init__.py +0 -0
  62. lionagi/models/base_model.py +0 -0
  63. lionagi/models/imodel.py +0 -53
  64. lionagi/parsers/__init__.py +0 -1
  65. lionagi/schema/async_queue.py +0 -158
  66. lionagi/schema/base_condition.py +0 -1
  67. lionagi/schema/base_node.py +0 -422
  68. lionagi/schema/base_tool.py +0 -44
  69. lionagi/schema/data_logger.py +0 -131
  70. lionagi/schema/data_node.py +0 -88
  71. lionagi/schema/status_tracker.py +0 -37
  72. lionagi/tests/test_utils/test_encrypt_util.py +0 -323
  73. lionagi/utils/encrypt_util.py +0 -283
  74. lionagi-0.0.209.dist-info/RECORD +0 -98
  75. /lionagi/{agents → api_service}/__init__.py +0 -0
  76. /lionagi/{iservices → services}/__init__.py +0 -0
  77. /lionagi/{iservices → services}/base_service.py +0 -0
  78. /lionagi/{iservices → services}/mistralai.py +0 -0
  79. /lionagi/{iservices → services}/mlx_service.py +0 -0
  80. /lionagi/{iservices → services}/oai.py +0 -0
  81. /lionagi/{iservices → services}/ollama.py +0 -0
  82. /lionagi/{iservices → services}/services.py +0 -0
  83. /lionagi/{iservices → services}/transformers.py +0 -0
  84. {lionagi-0.0.209.dist-info → lionagi-0.0.211.dist-info}/LICENSE +0 -0
  85. {lionagi-0.0.209.dist-info → lionagi-0.0.211.dist-info}/WHEEL +0 -0
  86. {lionagi-0.0.209.dist-info → lionagi-0.0.211.dist-info}/top_level.txt +0 -0
@@ -1,422 +0,0 @@
1
- import json
2
- import xml.etree.ElementTree as ET
3
- from typing import Any, Dict, Optional, TypeVar, Type, List, Callable, Union
4
- from pydantic import BaseModel, Field, AliasChoices
5
-
6
- from ..utils.sys_util import create_id, change_dict_key, is_schema
7
- from ..utils.encrypt_util import EncrytionUtil
8
-
9
- T = TypeVar('T', bound='BaseNode')
10
-
11
-
12
- class BaseNode(BaseModel):
13
- """
14
- The base class for nodes containing general information and metadata.
15
-
16
- Attributes:
17
- id_ (str): The unique identifier for the node.
18
- metadata (Dict[str, Any]): Additional metadata for the node.
19
- label (Optional[str]): An optional label for the node.
20
- related_nodes (List[str]): List of related node IDs.
21
- content (Union[str, Dict[str, Any], None, Any]): The content of the node.
22
-
23
- Examples:
24
- >>> node = BaseNode(content="Example content")
25
- >>> node_dict = node.to_dict()
26
- >>> json_str = node.to_json()
27
- >>> same_node = BaseNode.from_json(json_str)
28
- """
29
-
30
- id_: str = Field(default_factory=lambda: str(create_id()), alias="node_id")
31
- metadata: Dict[str, Any] = Field(default_factory=dict)
32
- label: Optional[str] = None
33
- related_nodes: List[str] = Field(default_factory=list)
34
- content: Union[str, Dict[str, Any], None, Any] = Field(
35
- default=None, validation_alias=AliasChoices('text', 'page_content', 'chunk_content')
36
- )
37
-
38
- class Config:
39
- extra = 'allow'
40
- populate_by_name = True
41
- validate_assignment = True
42
- validate_return = True
43
- str_strip_whitespace = True
44
-
45
- @classmethod
46
- def from_dict(cls, data: Dict[str, Any]) -> T:
47
- """
48
- Creates an instance of the class from a dictionary.
49
-
50
- Args:
51
- data: A dictionary containing the node's data.
52
-
53
- Returns:
54
- An instance of the class.
55
-
56
- Examples:
57
- >>> data = {"content": "Example content"}
58
- >>> node = BaseNode.from_dict(data)
59
- """
60
- return cls(**data)
61
-
62
- @classmethod
63
- def from_json(cls: Type[T], json_str: str, **kwargs) -> T:
64
- """
65
- Creates an instance of the class from a JSON string.
66
-
67
- Args:
68
- json_str: A JSON string containing the node's data.
69
- **kwargs: Additional keyword arguments to pass to json.loads.
70
-
71
- Returns:
72
- An instance of the class.
73
-
74
- Examples:
75
- >>> json_str = '{"content": "Example content"}'
76
- >>> node = BaseNode.from_json(json_str)
77
- """
78
- try:
79
- data = json.loads(json_str, **kwargs)
80
- return cls(**data)
81
- except json.JSONDecodeError as e:
82
- raise ValueError("Invalid JSON string provided for deserialization.") from e
83
-
84
- @classmethod
85
- def from_xml(cls, xml_str: str) -> T:
86
- """
87
- Creates an instance of the class from an XML string.
88
-
89
- Args:
90
- xml_str: An XML string containing the node's data.
91
-
92
- Returns:
93
- An instance of the class.
94
-
95
- Examples:
96
- >>> xml_str = "<BaseNode><content>Example content</content></BaseNode>"
97
- >>> node = BaseNode.from_xml(xml_str)
98
- """
99
- root = ET.fromstring(xml_str)
100
- data = cls._xml_to_dict(root)
101
- return cls(**data)
102
-
103
- def to_json(self) -> str:
104
- """
105
- Converts the instance to a JSON string.
106
-
107
- Returns:
108
- A JSON string representing the node.
109
-
110
- Examples:
111
- >>> node = BaseNode(content="Example content")
112
- >>> json_str = node.to_json()
113
- """
114
- return self.model_dump_json(by_alias=True)
115
-
116
- def to_dict(self) -> Dict[str, Any]:
117
- """
118
- Converts the instance to a dictionary.
119
-
120
- Returns:
121
- A dictionary representing the node.
122
-
123
- Examples:
124
- >>> node = BaseNode(content="Example content")
125
- >>> node_dict = node.to_dict()
126
- """
127
- return self.model_dump(by_alias=True)
128
-
129
- def to_xml(self) -> str:
130
- """
131
- Converts the instance to an XML string.
132
-
133
- Returns:
134
- An XML string representing the node.
135
-
136
- Examples:
137
- >>> node = BaseNode(content="Example content")
138
- >>> xml_str = node.to_xml()
139
- """
140
- root = ET.Element(self.__class__.__name__)
141
- for attr, value in self.to_dict().items():
142
- child = ET.SubElement(root, attr)
143
- child.text = str(value)
144
- return ET.tostring(root, encoding='unicode')
145
-
146
- def validate_content(self, schema: Dict[str, type]) -> bool:
147
- """
148
- Validates the node's content against a schema.
149
-
150
- Args:
151
- schema: The schema to validate against.
152
-
153
- Returns:
154
- True if the content matches the schema, False otherwise.
155
-
156
- Examples:
157
- >>> schema = {"title": str, "body": str}
158
- >>> node = BaseNode(content={"title": "Example", "body": "Content"})
159
- >>> node.validate_content(schema)
160
- """
161
- if not isinstance(self.content, dict):
162
- return False
163
- return is_schema(self.content, schema)
164
-
165
- @property
166
- def meta_keys(self) -> List[str]:
167
- """
168
- List of metadata keys.
169
-
170
- Returns:
171
- A list of keys in the metadata dictionary.
172
-
173
- Examples:
174
- >>> node = BaseNode(metadata={"author": "John Doe"})
175
- >>> node.meta_keys
176
- """
177
- return list(self.metadata.keys())
178
-
179
- def has_meta_key(self, key: str) -> bool:
180
- """
181
- Checks if a metadata key exists.
182
-
183
- Args:
184
- key: The metadata key to check for.
185
-
186
- Returns:
187
- True if the key exists, False otherwise.
188
-
189
- Examples:
190
- >>> node = BaseNode(metadata={"author": "John Doe"})
191
- >>> node.has_meta_key("author")
192
- """
193
- return key in self.metadata
194
-
195
- def get_meta_key(self, key: str) -> Any:
196
- """
197
- Retrieves a value from the metadata dictionary.
198
-
199
- Args:
200
- key: The key for the value to retrieve.
201
-
202
- Returns:
203
- The value associated with the key, if it exists.
204
-
205
- Examples:
206
- >>> node = BaseNode(metadata={"author": "John Doe"})
207
- >>> node.get_meta_key("author")
208
- """
209
- return self.metadata.get(key)
210
-
211
- def change_meta_key(self, old_key: str, new_key: str) -> bool:
212
- """
213
- Changes a key in the metadata dictionary.
214
-
215
- Args:
216
- old_key: The old key name.
217
- new_key: The new key name.
218
-
219
- Returns:
220
- True if the key was changed successfully, False otherwise.
221
-
222
- Examples:
223
- >>> node = BaseNode(metadata={"author": "John Doe"})
224
- >>> node.change_meta_key("author", "creator")
225
- """
226
- if old_key in self.metadata:
227
- change_dict_key(self.metadata, old_key=old_key, new_key=new_key)
228
- return True
229
- return False
230
-
231
- def delete_meta_key(self, key: str) -> bool:
232
- """
233
- Deletes a key from the metadata dictionary.
234
-
235
- Args:
236
- key: The key to delete.
237
-
238
- Returns:
239
- True if the key was deleted, False otherwise.
240
-
241
- Examples:
242
- >>> node = BaseNode(metadata={"author": "John Doe"})
243
- >>> node.delete_meta_key("author")
244
- """
245
- if key in self.metadata:
246
- del self.metadata[key]
247
- return True
248
- return False
249
-
250
- def merge_meta(self, other_metadata: Dict[str, Any], overwrite: bool = False) -> None:
251
- """
252
- Merges another metadata dictionary into the current metadata.
253
-
254
- Args:
255
- other_metadata: The metadata dictionary to merge.
256
- overwrite: If True, existing keys will be overwritten.
257
-
258
- Examples:
259
- >>> node = BaseNode(metadata={"author": "John Doe"})
260
- >>> new_meta = {"editor": "Jane Smith"}
261
- >>> node.merge_meta(new_meta)
262
- """
263
- if not overwrite:
264
- other_metadata = ({
265
- k: v for k, v in other_metadata.items()
266
- if k not in self.metadata
267
- })
268
- self.metadata.update(other_metadata)
269
-
270
- def clear_meta(self) -> None:
271
- """
272
- Clears the metadata dictionary.
273
-
274
- Examples:
275
- >>> node = BaseNode(metadata={"author": "John Doe"})
276
- >>> node.clear_meta()
277
- """
278
-
279
- self.metadata.clear()
280
-
281
- def filter_meta(self, filter_func: Callable[[Any], bool]) -> Dict[str, Any]:
282
- """
283
- Filters the metadata dictionary based on a filter function.
284
-
285
- Args:
286
- filter_func: The function to filter metadata items.
287
-
288
- Returns:
289
- A dictionary containing the filtered metadata items.
290
-
291
- Examples:
292
- >>> node = BaseNode(metadata={"author": "John Doe", "year": 2020})
293
- >>> filtered_meta = node.filter_meta(lambda x: isinstance(x, str))
294
- """
295
- return {k: v for k, v in self.metadata.items() if filter_func(v)}
296
-
297
- def validate_meta(self, schema: Dict[str, type]) -> bool:
298
- """
299
- Validates the metadata against a schema.
300
-
301
- Args:
302
- schema: The schema to validate against.
303
-
304
- Returns:
305
- True if the metadata matches the schema, False otherwise.
306
-
307
- Examples:
308
- >>> schema = {"author": str, "year": int}
309
- >>> node = BaseNode(metadata={"author": "John Doe", "year": 2020})
310
- >>> node.validate_meta(schema)
311
- """
312
- return is_schema(dict_=self.metadata, schema=schema)
313
-
314
- def encrypt_content(self, key: str) -> None:
315
- """
316
- Encrypts the node's content.
317
-
318
- Args:
319
- key: The encryption key.
320
-
321
- Examples:
322
- >>> node = BaseNode(content="Sensitive information")
323
- >>> node.encrypt_content("my_secret_key")
324
- """
325
- self.content = EncrytionUtil.encrypt(self.content, key)
326
-
327
- def decrypt_content(self, key: str) -> None:
328
- """
329
- Decrypts the node's content.
330
-
331
- Args:
332
- key: The decryption key.
333
-
334
- Examples:
335
- >>> node = BaseNode(content="Encrypted content")
336
- >>> node.decrypt_content("my_secret_key")
337
- """
338
- self.content = EncrytionUtil.decrypt(self.content, key)
339
-
340
- def add_related_node(self, node_id: str) -> bool:
341
- """
342
- Adds a related node ID to the list of related nodes.
343
-
344
- Args:
345
- node_id: The ID of the related node to add.
346
-
347
- Returns:
348
- True if the ID was added, False if it was already in the list.
349
-
350
- Examples:
351
- >>> node = BaseNode()
352
- >>> related_node_id = "123456"
353
- >>> node.add_related_node(related_node_id)
354
- """
355
- if node_id not in self.related_nodes:
356
- self.related_nodes.append(node_id)
357
- return True
358
- return False
359
-
360
- def remove_related_node(self, node_id: str) -> bool:
361
- """
362
- Removes a related node ID from the list of related nodes.
363
-
364
- Args:
365
- node_id: The ID of the related node to remove.
366
-
367
- Returns:
368
- True if the ID was removed, False if it was not in the list.
369
-
370
- Examples:
371
- >>> node = BaseNode(related_nodes=["123456"])
372
- >>> related_node_id = "123456"
373
- >>> node.remove_related_node(related_node_id)
374
- """
375
- if node_id in self.related_nodes:
376
- self.related_nodes.remove(node_id)
377
- return True
378
- return False
379
-
380
- def __str__(self) -> str:
381
- """String representation of the BaseNode instance."""
382
- content_preview = (str(self.content)[:75] + '...') if len(str(self.content)) > 75 else str(self.content)
383
- metadata_preview = str(self.metadata)[:75] + '...' if len(str(self.metadata)) > 75 else str(self.metadata)
384
- related_nodes_preview = ', '.join(self.related_nodes[:3]) + ('...' if len(self.related_nodes) > 3 else '')
385
- return (f"{self.__class__.__name__}(id={self.id_}, label={self.label}, "
386
- f"content='{content_preview}', metadata='{metadata_preview}', "
387
- f"related_nodes=[{related_nodes_preview}])")
388
-
389
- def __repr__(self):
390
- """Machine-readable representation of the BaseNode instance."""
391
- return f"{self.__class__.__name__}({self.to_json()})"
392
-
393
- @staticmethod
394
- def _xml_to_dict(root: ET.Element) -> Dict[str, Any]:
395
- data = {}
396
- for child in root:
397
- data[child.tag] = child.text
398
- return data
399
-
400
-
401
- # def is_empty(self) -> bool:
402
- # return not self.content and not self.metadata
403
-
404
- # def copy(self, n: int = 1) -> Union[List[T], T]:
405
- # return create_copy(self, n)
406
-
407
- # def data_equals(self, other: 'BaseNode') -> bool:
408
- # return (
409
- # self.content == other.content and
410
- # self.metadata == other.metadata and
411
- # self.related_nodes == other.related_nodes
412
- # )
413
-
414
- # def is_copy_of(self, other: 'BaseNode') -> bool:
415
- # return (
416
- # self.data_equals(other) and
417
- # self is not other
418
- # )
419
-
420
- # def __eq__(self, other: 'BaseNode') -> bool:
421
- # # return (self.id_ == other.id_ and self.data_equals(other))
422
- # return self.id_ == other.id_
@@ -1,44 +0,0 @@
1
- from typing import Any
2
- from pydantic import field_serializer
3
- from .base_node import BaseNode
4
-
5
- class Tool(BaseNode):
6
- """
7
- A class representing a tool with a function, content, parser, and schema.
8
-
9
- Attributes:
10
- func (Callable): The function associated with the tool.
11
- content (Any, optional): The content to be processed by the tool. Defaults to None.
12
- parser (Any, optional): The parser to be used with the tool. Defaults to None.
13
- schema_ (Dict): The schema definition for the tool.
14
-
15
- Examples:
16
- >>> tool = Tool(func=my_function, schema_={'type': 'string'})
17
- >>> serialized_func = tool.serialize_func()
18
- >>> print(serialized_func)
19
- 'my_function'
20
- """
21
-
22
- func: Any
23
- content: Any = None
24
- parser: Any = None
25
- schema_: dict
26
-
27
- @field_serializer('func')
28
- def serialize_func(self, func):
29
- """
30
- Serialize the function to its name.
31
-
32
- Args:
33
- func (Callable): The function to serialize.
34
-
35
- Returns:
36
- str: The name of the function.
37
-
38
- Examples:
39
- >>> def my_function():
40
- ... pass
41
- >>> Tool.serialize_func(my_function)
42
- 'my_function'
43
- """
44
- return func.__name__
@@ -1,131 +0,0 @@
1
- from collections import deque
2
- from typing import Dict, Any
3
- from ..utils.sys_util import get_timestamp, create_path, as_dict, to_df
4
-
5
-
6
- class DataLogger:
7
- """
8
- A class for logging data entries and exporting them as CSV files.
9
-
10
- This class provides functionality to log data entries in a deque and
11
- supports exporting the logged data to a CSV file. The DataLogger can
12
- be configured to use a specific directory for saving files.
13
-
14
- Attributes:
15
- dir (Optional[str]):
16
- The default directory where CSV files will be saved.
17
- log (deque):
18
- A deque object that stores the logged data entries.
19
-
20
- Methods:
21
- __call__:
22
- Adds an entry to the log.
23
- to_csv:
24
- Exports the logged data to a CSV file and clears the log.
25
- set_dir:
26
- Sets the default directory for saving CSV files.
27
- """
28
-
29
- def __init__(self, dir= None, log: list = None) -> None:
30
- """
31
- Initializes the DataLogger with an optional directory and initial log.
32
-
33
- Parameters:
34
- dir (Optional[str]): The directory where CSV files will be saved. Defaults to None.
35
-
36
- log (Optional[List]): An initial list of log entries. Defaults to an empty list.
37
- """
38
- self.dir = dir or 'data/logs/'
39
- self.log = deque(log) if log else deque()
40
-
41
- def add_entry(self, entry: Dict[str, Any], level: str = "INFO") -> None:
42
- """
43
- Adds a new entry to the log with a timestamp and a log level.
44
-
45
- Args:
46
- entry (Dict[str, Any]): The data entry to be added to the log.
47
- level (str): The log level for the entry (e.g., "INFO", "ERROR"). Defaults to "INFO".
48
- """
49
- self.log.append({
50
- "timestamp": get_timestamp(), "level": level, **as_dict(entry)
51
- })
52
-
53
- def set_dir(self, dir: str) -> None:
54
- """
55
- Sets the default directory for saving CSV files.
56
-
57
- Parameters:
58
- dir (str): The directory to be set as the default for saving files.
59
- """
60
- self.dir = dir
61
-
62
- def to_csv(
63
- self, filename: str ='log.csv',
64
- file_exist_ok: bool = False,
65
- timestamp = True,
66
- time_prefix: bool = False,
67
- verbose: bool = True,
68
- clear = True, **kwargs
69
- ) -> None:
70
- """
71
- Exports the logged data to a CSV file, using the provided utilities for path creation and timestamping.
72
-
73
- Args:
74
- filename (str): The name of the CSV file.
75
- file_exist_ok (bool): If True, creates the directory for the file if it does not exist. Defaults to False.
76
- verbose (bool): If True, prints a message upon completion. Defaults to True.
77
- time_prefix (bool): If True, adds the timestamp as a prefix to the filename. Defaults to False.
78
- """
79
- if not filename.endswith('.csv'):
80
- filename += '.csv'
81
-
82
- filepath = create_path(
83
- self.dir, filename, timestamp=timestamp,
84
- dir_exist_ok=file_exist_ok, time_prefix=time_prefix
85
- )
86
-
87
- try:
88
- df = to_df(list(self.log))
89
- df.to_csv(filepath, **kwargs)
90
- if verbose:
91
- print(f"{len(self.log)} logs saved to {filepath}")
92
- if clear:
93
- self.log.clear()
94
- except Exception as e:
95
- raise ValueError(f"Error in saving to csv: {e}")
96
-
97
-
98
- def to_json(
99
- self, filename: str = 'log.json',
100
- timestamp = False,
101
- time_prefix=False,
102
- file_exist_ok: bool = False,
103
- verbose: bool = True,
104
- clear = True,
105
- **kwargs
106
- ) -> None:
107
- """
108
- Exports the logged data to a JSONL file and optionally clears the log.
109
-
110
- Parameters:
111
- filename (str): The name of the JSONL file.
112
- file_exist_ok (bool): If True, creates the directory for the file if it does not exist. Defaults to False.
113
- verbose (bool): If True, prints a message upon completion. Defaults to True.
114
- """
115
- if not filename.endswith('.json'):
116
- filename += '.json'
117
-
118
- filepath = create_path(
119
- self.dir, filename, timestamp=timestamp,
120
- dir_exist_ok=file_exist_ok, time_prefix=time_prefix
121
- )
122
-
123
- try:
124
- df = to_df(list(self.log))
125
- df.to_json(filepath, **kwargs)
126
- if verbose:
127
- print(f"{len(self.log)} logs saved to {filepath}")
128
- if clear:
129
- self.log.clear()
130
- except Exception as e:
131
- raise ValueError(f"Error in saving to csv: {e}")
@@ -1,88 +0,0 @@
1
- from .base_node import BaseNode
2
- from typing import Any
3
-
4
-
5
- class DataNode(BaseNode):
6
-
7
- def to_llama_index(self, **kwargs) -> Any:
8
- """
9
- Converts node to llama index format.
10
-
11
- Args:
12
- **kwargs: Variable length argument list.
13
-
14
- Returns:
15
- The llama index representation of the node.
16
-
17
- Examples:
18
- node = DataNode()
19
- llama_index = node.to_llama_index()
20
- """
21
- from lionagi.bridge.llama_index import to_llama_index_textnode
22
- return to_llama_index_textnode(self, **kwargs)
23
-
24
- def to_langchain(self, **kwargs) -> Any:
25
- """
26
- Converts node to langchain document format.
27
-
28
- Args:
29
- **kwargs: Variable length argument list.
30
-
31
- Returns:
32
- The langchain document representation of the node.
33
-
34
- Examples:
35
- node = DataNode()
36
- langchain_doc = node.to_langchain()
37
- """
38
- from lionagi.bridge.langchain import to_langchain_document
39
- return to_langchain_document(self, **kwargs)
40
-
41
- @classmethod
42
- def from_llama_index(cls, llama_node: Any, **kwargs) -> "DataNode":
43
- """
44
- Creates a DataNode instance from a llama index node.
45
-
46
- Args:
47
- llama_node: The llama index node object.
48
- **kwargs: Variable length argument list.
49
-
50
- Returns:
51
- An instance of DataNode.
52
-
53
- Examples:
54
- llama_node = SomeLlamaIndexNode()
55
- data_node = DataNode.from_llama_index(llama_node)
56
- """
57
- llama_dict = llama_node.to_dict(**kwargs)
58
- return cls.from_dict(llama_dict)
59
-
60
- @classmethod
61
- def from_langchain(cls, lc_doc: Any) -> "DataNode":
62
- """
63
- Creates a DataNode instance from a langchain document.
64
-
65
- Args:
66
- lc_doc: The langchain document object.
67
-
68
- Returns:
69
- An instance of DataNode.
70
-
71
- Examples:
72
- lc_doc = SomeLangChainDocument()
73
- data_node = DataNode.from_langchain(lc_doc)
74
- """
75
- info_json = lc_doc.to_json()
76
- info_node = {'lc_id': info_json['id']}
77
- info_node = {**info_node, **info_json['kwargs']}
78
- return cls(**info_node)
79
-
80
-
81
- class File(DataNode):
82
-
83
- ...
84
-
85
-
86
- class Chunk(DataNode):
87
-
88
- ...