lionagi 0.0.209__py3-none-any.whl → 0.0.211__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (86) hide show
  1. lionagi/__init__.py +2 -4
  2. lionagi/api_service/base_endpoint.py +65 -0
  3. lionagi/api_service/base_rate_limiter.py +121 -0
  4. lionagi/api_service/base_service.py +146 -0
  5. lionagi/api_service/chat_completion.py +6 -0
  6. lionagi/api_service/embeddings.py +6 -0
  7. lionagi/api_service/payload_package.py +47 -0
  8. lionagi/api_service/status_tracker.py +29 -0
  9. lionagi/core/__init__.py +3 -3
  10. lionagi/core/branch.py +22 -3
  11. lionagi/core/session.py +14 -2
  12. lionagi/schema/__init__.py +5 -8
  13. lionagi/schema/base_schema.py +821 -0
  14. lionagi/structures/graph.py +1 -1
  15. lionagi/structures/relationship.py +1 -1
  16. lionagi/structures/structure.py +1 -1
  17. lionagi/tools/tool_manager.py +0 -163
  18. lionagi/tools/tool_util.py +2 -1
  19. lionagi/utils/__init__.py +5 -6
  20. lionagi/utils/api_util.py +6 -1
  21. lionagi/version.py +1 -1
  22. {lionagi-0.0.209.dist-info → lionagi-0.0.211.dist-info}/METADATA +3 -18
  23. lionagi-0.0.211.dist-info/RECORD +56 -0
  24. lionagi/agents/planner.py +0 -1
  25. lionagi/agents/prompter.py +0 -1
  26. lionagi/agents/scorer.py +0 -1
  27. lionagi/agents/summarizer.py +0 -1
  28. lionagi/agents/validator.py +0 -1
  29. lionagi/bridge/__init__.py +0 -22
  30. lionagi/bridge/langchain.py +0 -195
  31. lionagi/bridge/llama_index.py +0 -266
  32. lionagi/datastores/__init__.py +0 -1
  33. lionagi/datastores/chroma.py +0 -1
  34. lionagi/datastores/deeplake.py +0 -1
  35. lionagi/datastores/elasticsearch.py +0 -1
  36. lionagi/datastores/lantern.py +0 -1
  37. lionagi/datastores/pinecone.py +0 -1
  38. lionagi/datastores/postgres.py +0 -1
  39. lionagi/datastores/qdrant.py +0 -1
  40. lionagi/iservices/anthropic.py +0 -79
  41. lionagi/iservices/anyscale.py +0 -0
  42. lionagi/iservices/azure.py +0 -1
  43. lionagi/iservices/bedrock.py +0 -0
  44. lionagi/iservices/everlyai.py +0 -0
  45. lionagi/iservices/gemini.py +0 -0
  46. lionagi/iservices/gpt4all.py +0 -0
  47. lionagi/iservices/huggingface.py +0 -0
  48. lionagi/iservices/litellm.py +0 -33
  49. lionagi/iservices/localai.py +0 -0
  50. lionagi/iservices/openllm.py +0 -0
  51. lionagi/iservices/openrouter.py +0 -44
  52. lionagi/iservices/perplexity.py +0 -0
  53. lionagi/iservices/predibase.py +0 -0
  54. lionagi/iservices/rungpt.py +0 -0
  55. lionagi/iservices/vllm.py +0 -0
  56. lionagi/iservices/xinference.py +0 -0
  57. lionagi/loaders/__init__.py +0 -18
  58. lionagi/loaders/chunker.py +0 -166
  59. lionagi/loaders/load_util.py +0 -240
  60. lionagi/loaders/reader.py +0 -122
  61. lionagi/models/__init__.py +0 -0
  62. lionagi/models/base_model.py +0 -0
  63. lionagi/models/imodel.py +0 -53
  64. lionagi/parsers/__init__.py +0 -1
  65. lionagi/schema/async_queue.py +0 -158
  66. lionagi/schema/base_condition.py +0 -1
  67. lionagi/schema/base_node.py +0 -422
  68. lionagi/schema/base_tool.py +0 -44
  69. lionagi/schema/data_logger.py +0 -131
  70. lionagi/schema/data_node.py +0 -88
  71. lionagi/schema/status_tracker.py +0 -37
  72. lionagi/tests/test_utils/test_encrypt_util.py +0 -323
  73. lionagi/utils/encrypt_util.py +0 -283
  74. lionagi-0.0.209.dist-info/RECORD +0 -98
  75. /lionagi/{agents → api_service}/__init__.py +0 -0
  76. /lionagi/{iservices → services}/__init__.py +0 -0
  77. /lionagi/{iservices → services}/base_service.py +0 -0
  78. /lionagi/{iservices → services}/mistralai.py +0 -0
  79. /lionagi/{iservices → services}/mlx_service.py +0 -0
  80. /lionagi/{iservices → services}/oai.py +0 -0
  81. /lionagi/{iservices → services}/ollama.py +0 -0
  82. /lionagi/{iservices → services}/services.py +0 -0
  83. /lionagi/{iservices → services}/transformers.py +0 -0
  84. {lionagi-0.0.209.dist-info → lionagi-0.0.211.dist-info}/LICENSE +0 -0
  85. {lionagi-0.0.209.dist-info → lionagi-0.0.211.dist-info}/WHEEL +0 -0
  86. {lionagi-0.0.209.dist-info → lionagi-0.0.211.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,821 @@
1
+ from collections import deque
2
+ from typing import Any, Dict, List, Optional, Type, TypeVar, Union, Callable
3
+ from ..utils.sys_util import get_timestamp, create_path, as_dict, to_df, create_id, change_dict_key, is_schema
4
+ import json
5
+ import xml.etree.ElementTree as ET
6
+
7
+ from pydantic import BaseModel, Field, AliasChoices, field_serializer
8
+
9
+ T = TypeVar('T', bound='BaseNode')
10
+
11
+
12
+ class BaseNode(BaseModel):
13
+ """
14
+ The base class for nodes containing general information and metadata.
15
+
16
+ Attributes:
17
+ id_ (str): The unique identifier for the node.
18
+ metadata (Dict[str, Any]): Additional metadata for the node.
19
+ label (Optional[str]): An optional label for the node.
20
+ related_nodes (List[str]): List of related node IDs.
21
+ content (Union[str, Dict[str, Any], None, Any]): The content of the node.
22
+
23
+ Examples:
24
+ >>> node = BaseNode(content="Example content")
25
+ >>> node_dict = node.to_dict()
26
+ >>> json_str = node.to_json()
27
+ >>> same_node = BaseNode.from_json(json_str)
28
+ """
29
+
30
+ id_: str = Field(default_factory=lambda: str(create_id()), alias="node_id")
31
+ metadata: Dict[str, Any] = Field(default_factory=dict)
32
+ label: Optional[str] = None
33
+ related_nodes: List[str] = Field(default_factory=list)
34
+ content: Union[str, Dict[str, Any], None, Any] = Field(
35
+ default=None, validation_alias=AliasChoices('text', 'page_content', 'chunk_content')
36
+ )
37
+
38
+ class Config:
39
+ extra = 'allow'
40
+ populate_by_name = True
41
+ validate_assignment = True
42
+ validate_return = True
43
+ str_strip_whitespace = True
44
+
45
+ @classmethod
46
+ def from_dict(cls, data: Dict[str, Any]) -> T:
47
+ """
48
+ Creates an instance of the class from a dictionary.
49
+
50
+ Args:
51
+ data: A dictionary containing the node's data.
52
+
53
+ Returns:
54
+ An instance of the class.
55
+
56
+ Examples:
57
+ >>> data = {"content": "Example content"}
58
+ >>> node = BaseNode.from_dict(data)
59
+ """
60
+ return cls(**data)
61
+
62
+ @classmethod
63
+ def from_json(cls: Type[T], json_str: str, **kwargs) -> T:
64
+ """
65
+ Creates an instance of the class from a JSON string.
66
+
67
+ Args:
68
+ json_str: A JSON string containing the node's data.
69
+ **kwargs: Additional keyword arguments to pass to json.loads.
70
+
71
+ Returns:
72
+ An instance of the class.
73
+
74
+ Examples:
75
+ >>> json_str = '{"content": "Example content"}'
76
+ >>> node = BaseNode.from_json(json_str)
77
+ """
78
+ try:
79
+ data = json.loads(json_str, **kwargs)
80
+ return cls(**data)
81
+ except json.JSONDecodeError as e:
82
+ raise ValueError("Invalid JSON string provided for deserialization.") from e
83
+
84
+ @classmethod
85
+ def from_xml(cls, xml_str: str) -> T:
86
+ """
87
+ Creates an instance of the class from an XML string.
88
+
89
+ Args:
90
+ xml_str: An XML string containing the node's data.
91
+
92
+ Returns:
93
+ An instance of the class.
94
+
95
+ Examples:
96
+ >>> xml_str = "<BaseNode><content>Example content</content></BaseNode>"
97
+ >>> node = BaseNode.from_xml(xml_str)
98
+ """
99
+ root = ET.fromstring(xml_str)
100
+ data = cls._xml_to_dict(root)
101
+ return cls(**data)
102
+
103
+ def to_json(self) -> str:
104
+ """
105
+ Converts the instance to a JSON string.
106
+
107
+ Returns:
108
+ A JSON string representing the node.
109
+
110
+ Examples:
111
+ >>> node = BaseNode(content="Example content")
112
+ >>> json_str = node.to_json()
113
+ """
114
+ return self.model_dump_json(by_alias=True)
115
+
116
+ def to_dict(self) -> Dict[str, Any]:
117
+ """
118
+ Converts the instance to a dictionary.
119
+
120
+ Returns:
121
+ A dictionary representing the node.
122
+
123
+ Examples:
124
+ >>> node = BaseNode(content="Example content")
125
+ >>> node_dict = node.to_dict()
126
+ """
127
+ return self.model_dump(by_alias=True)
128
+
129
+ def to_xml(self) -> str:
130
+ """
131
+ Converts the instance to an XML string.
132
+
133
+ Returns:
134
+ An XML string representing the node.
135
+
136
+ Examples:
137
+ >>> node = BaseNode(content="Example content")
138
+ >>> xml_str = node.to_xml()
139
+ """
140
+ root = ET.Element(self.__class__.__name__)
141
+ for attr, value in self.to_dict().items():
142
+ child = ET.SubElement(root, attr)
143
+ child.text = str(value)
144
+ return ET.tostring(root, encoding='unicode')
145
+
146
+ def validate_content(self, schema: Dict[str, type]) -> bool:
147
+ """
148
+ Validates the node's content against a schema.
149
+
150
+ Args:
151
+ schema: The schema to validate against.
152
+
153
+ Returns:
154
+ True if the content matches the schema, False otherwise.
155
+
156
+ Examples:
157
+ >>> schema = {"title": str, "body": str}
158
+ >>> node = BaseNode(content={"title": "Example", "body": "Content"})
159
+ >>> node.validate_content(schema)
160
+ """
161
+ if not isinstance(self.content, dict):
162
+ return False
163
+ return is_schema(self.content, schema)
164
+
165
+ @property
166
+ def meta_keys(self) -> List[str]:
167
+ """
168
+ List of metadata keys.
169
+
170
+ Returns:
171
+ A list of keys in the metadata dictionary.
172
+
173
+ Examples:
174
+ >>> node = BaseNode(metadata={"author": "John Doe"})
175
+ >>> node.meta_keys
176
+ """
177
+ return list(self.metadata.keys())
178
+
179
+ def has_meta_key(self, key: str) -> bool:
180
+ """
181
+ Checks if a metadata key exists.
182
+
183
+ Args:
184
+ key: The metadata key to check for.
185
+
186
+ Returns:
187
+ True if the key exists, False otherwise.
188
+
189
+ Examples:
190
+ >>> node = BaseNode(metadata={"author": "John Doe"})
191
+ >>> node.has_meta_key("author")
192
+ """
193
+ return key in self.metadata
194
+
195
+ def get_meta_key(self, key: str) -> Any:
196
+ """
197
+ Retrieves a value from the metadata dictionary.
198
+
199
+ Args:
200
+ key: The key for the value to retrieve.
201
+
202
+ Returns:
203
+ The value associated with the key, if it exists.
204
+
205
+ Examples:
206
+ >>> node = BaseNode(metadata={"author": "John Doe"})
207
+ >>> node.get_meta_key("author")
208
+ """
209
+ return self.metadata.get(key)
210
+
211
+ def change_meta_key(self, old_key: str, new_key: str) -> bool:
212
+ """
213
+ Changes a key in the metadata dictionary.
214
+
215
+ Args:
216
+ old_key: The old key name.
217
+ new_key: The new key name.
218
+
219
+ Returns:
220
+ True if the key was changed successfully, False otherwise.
221
+
222
+ Examples:
223
+ >>> node = BaseNode(metadata={"author": "John Doe"})
224
+ >>> node.change_meta_key("author", "creator")
225
+ """
226
+ if old_key in self.metadata:
227
+ change_dict_key(self.metadata, old_key=old_key, new_key=new_key)
228
+ return True
229
+ return False
230
+
231
+ def delete_meta_key(self, key: str) -> bool:
232
+ """
233
+ Deletes a key from the metadata dictionary.
234
+
235
+ Args:
236
+ key: The key to delete.
237
+
238
+ Returns:
239
+ True if the key was deleted, False otherwise.
240
+
241
+ Examples:
242
+ >>> node = BaseNode(metadata={"author": "John Doe"})
243
+ >>> node.delete_meta_key("author")
244
+ """
245
+ if key in self.metadata:
246
+ del self.metadata[key]
247
+ return True
248
+ return False
249
+
250
+ def merge_meta(self, other_metadata: Dict[str, Any], overwrite: bool = False) -> None:
251
+ """
252
+ Merges another metadata dictionary into the current metadata.
253
+
254
+ Args:
255
+ other_metadata: The metadata dictionary to merge.
256
+ overwrite: If True, existing keys will be overwritten.
257
+
258
+ Examples:
259
+ >>> node = BaseNode(metadata={"author": "John Doe"})
260
+ >>> new_meta = {"editor": "Jane Smith"}
261
+ >>> node.merge_meta(new_meta)
262
+ """
263
+ if not overwrite:
264
+ other_metadata = ({
265
+ k: v for k, v in other_metadata.items()
266
+ if k not in self.metadata
267
+ })
268
+ self.metadata.update(other_metadata)
269
+
270
+ def clear_meta(self) -> None:
271
+ """
272
+ Clears the metadata dictionary.
273
+
274
+ Examples:
275
+ >>> node = BaseNode(metadata={"author": "John Doe"})
276
+ >>> node.clear_meta()
277
+ """
278
+
279
+ self.metadata.clear()
280
+
281
+ def filter_meta(self, filter_func: Callable[[Any], bool]) -> Dict[str, Any]:
282
+ """
283
+ Filters the metadata dictionary based on a filter function.
284
+
285
+ Args:
286
+ filter_func: The function to filter metadata items.
287
+
288
+ Returns:
289
+ A dictionary containing the filtered metadata items.
290
+
291
+ Examples:
292
+ >>> node = BaseNode(metadata={"author": "John Doe", "year": 2020})
293
+ >>> filtered_meta = node.filter_meta(lambda x: isinstance(x, str))
294
+ """
295
+ return {k: v for k, v in self.metadata.items() if filter_func(v)}
296
+
297
+ def validate_meta(self, schema: Dict[str, type]) -> bool:
298
+ """
299
+ Validates the metadata against a schema.
300
+
301
+ Args:
302
+ schema: The schema to validate against.
303
+
304
+ Returns:
305
+ True if the metadata matches the schema, False otherwise.
306
+
307
+ Examples:
308
+ >>> schema = {"author": str, "year": int}
309
+ >>> node = BaseNode(metadata={"author": "John Doe", "year": 2020})
310
+ >>> node.validate_meta(schema)
311
+ """
312
+
313
+ return is_schema(dict_=self.metadata, schema=schema)
314
+
315
+ def add_related_node(self, node_id: str) -> bool:
316
+ """
317
+ Adds a related node ID to the list of related nodes.
318
+
319
+ Args:
320
+ node_id: The ID of the related node to add.
321
+
322
+ Returns:
323
+ True if the ID was added, False if it was already in the list.
324
+
325
+ Examples:
326
+ >>> node = BaseNode()
327
+ >>> related_node_id = "123456"
328
+ >>> node.add_related_node(related_node_id)
329
+ """
330
+ if node_id not in self.related_nodes:
331
+ self.related_nodes.append(node_id)
332
+ return True
333
+ return False
334
+
335
+ def remove_related_node(self, node_id: str) -> bool:
336
+ """
337
+ Removes a related node ID from the list of related nodes.
338
+
339
+ Args:
340
+ node_id: The ID of the related node to remove.
341
+
342
+ Returns:
343
+ True if the ID was removed, False if it was not in the list.
344
+
345
+ Examples:
346
+ >>> node = BaseNode(related_nodes=["123456"])
347
+ >>> related_node_id = "123456"
348
+ >>> node.remove_related_node(related_node_id)
349
+ """
350
+ if node_id in self.related_nodes:
351
+ self.related_nodes.remove(node_id)
352
+ return True
353
+ return False
354
+
355
+ def __str__(self) -> str:
356
+ """String representation of the BaseNode instance."""
357
+ content_preview = (str(self.content)[:75] + '...') if len(str(self.content)) > 75 else str(self.content)
358
+ metadata_preview = str(self.metadata)[:75] + '...' if len(str(self.metadata)) > 75 else str(self.metadata)
359
+ related_nodes_preview = ', '.join(self.related_nodes[:3]) + ('...' if len(self.related_nodes) > 3 else '')
360
+ return (f"{self.__class__.__name__}(id={self.id_}, label={self.label}, "
361
+ f"content='{content_preview}', metadata='{metadata_preview}', "
362
+ f"related_nodes=[{related_nodes_preview}])")
363
+
364
+ def __repr__(self):
365
+ """Machine-readable representation of the BaseNode instance."""
366
+ return f"{self.__class__.__name__}({self.to_json()})"
367
+
368
+ @staticmethod
369
+ def _xml_to_dict(root: ET.Element) -> Dict[str, Any]:
370
+ data = {}
371
+ for child in root:
372
+ data[child.tag] = child.text
373
+ return data
374
+
375
+ class Tool(BaseNode):
376
+ """
377
+ A class representing a tool with a function, content, parser, and schema.
378
+
379
+ Attributes:
380
+ func (Callable): The function associated with the tool.
381
+ content (Any, optional): The content to be processed by the tool. Defaults to None.
382
+ parser (Any, optional): The parser to be used with the tool. Defaults to None.
383
+ schema_ (Dict): The schema definition for the tool.
384
+
385
+ Examples:
386
+ >>> tool = Tool(func=my_function, schema_={'type': 'string'})
387
+ >>> serialized_func = tool.serialize_func()
388
+ >>> print(serialized_func)
389
+ 'my_function'
390
+ """
391
+
392
+ func: Any
393
+ content: Any = None
394
+ parser: Any = None
395
+ schema_: dict
396
+
397
+ @field_serializer('func')
398
+ def serialize_func(self, func):
399
+ """
400
+ Serialize the function to its name.
401
+
402
+ Args:
403
+ func (Callable): The function to serialize.
404
+
405
+ Returns:
406
+ str: The name of the function.
407
+
408
+ Examples:
409
+ >>> def my_function():
410
+ ... pass
411
+ >>> Tool.serialize_func(my_function)
412
+ 'my_function'
413
+ """
414
+ return func.__name__
415
+
416
+
417
+ class DataLogger:
418
+ """
419
+ A class for logging data entries and exporting them as CSV files.
420
+
421
+ This class provides functionality to log data entries in a deque and
422
+ supports exporting the logged data to a CSV file. The DataLogger can
423
+ be configured to use a specific directory for saving files.
424
+
425
+ Attributes:
426
+ dir (Optional[str]):
427
+ The default directory where CSV files will be saved.
428
+ log (deque):
429
+ A deque object that stores the logged data entries.
430
+
431
+ Methods:
432
+ __call__:
433
+ Adds an entry to the log.
434
+ to_csv:
435
+ Exports the logged data to a CSV file and clears the log.
436
+ set_dir:
437
+ Sets the default directory for saving CSV files.
438
+ """
439
+
440
+ def __init__(self, dir= None, log: list = None) -> None:
441
+ """
442
+ Initializes the DataLogger with an optional directory and initial log.
443
+
444
+ Parameters:
445
+ dir (Optional[str]): The directory where CSV files will be saved. Defaults to None.
446
+
447
+ log (Optional[List]): An initial list of log entries. Defaults to an empty list.
448
+ """
449
+ self.dir = dir or 'data/logs/'
450
+ self.log = deque(log) if log else deque()
451
+
452
+ def add_entry(self, entry: Dict[str, Any], level: str = "INFO") -> None:
453
+ """
454
+ Adds a new entry to the log with a timestamp and a log level.
455
+
456
+ Args:
457
+ entry (Dict[str, Any]): The data entry to be added to the log.
458
+ level (str): The log level for the entry (e.g., "INFO", "ERROR"). Defaults to "INFO".
459
+ """
460
+ self.log.append({
461
+ "timestamp": get_timestamp(), "level": level, **as_dict(entry)
462
+ })
463
+
464
+ def set_dir(self, dir: str) -> None:
465
+ """
466
+ Sets the default directory for saving CSV files.
467
+
468
+ Parameters:
469
+ dir (str): The directory to be set as the default for saving files.
470
+ """
471
+ self.dir = dir
472
+
473
+ def to_csv(
474
+ self, filename: str ='log.csv',
475
+ file_exist_ok: bool = False,
476
+ timestamp = True,
477
+ time_prefix: bool = False,
478
+ verbose: bool = True,
479
+ clear = True, **kwargs
480
+ ) -> None:
481
+ """
482
+ Exports the logged data to a CSV file, using the provided utilities for path creation and timestamping.
483
+
484
+ Args:
485
+ filename (str): The name of the CSV file.
486
+ file_exist_ok (bool): If True, creates the directory for the file if it does not exist. Defaults to False.
487
+ verbose (bool): If True, prints a message upon completion. Defaults to True.
488
+ time_prefix (bool): If True, adds the timestamp as a prefix to the filename. Defaults to False.
489
+ """
490
+ if not filename.endswith('.csv'):
491
+ filename += '.csv'
492
+
493
+ filepath = create_path(
494
+ self.dir, filename, timestamp=timestamp,
495
+ dir_exist_ok=file_exist_ok, time_prefix=time_prefix
496
+ )
497
+ try:
498
+ df = to_df(list(self.log))
499
+ df.to_csv(filepath, **kwargs)
500
+ if verbose:
501
+ print(f"{len(self.log)} logs saved to {filepath}")
502
+ if clear:
503
+ self.log.clear()
504
+ except Exception as e:
505
+ raise ValueError(f"Error in saving to csv: {e}")
506
+
507
+ def to_json(
508
+ self, filename: str = 'log.json',
509
+ timestamp = False,
510
+ time_prefix=False,
511
+ file_exist_ok: bool = False,
512
+ verbose: bool = True,
513
+ clear = True,
514
+ **kwargs
515
+ ) -> None:
516
+ """
517
+ Exports the logged data to a JSONL file and optionally clears the log.
518
+
519
+ Parameters:
520
+ filename (str): The name of the JSONL file.
521
+ file_exist_ok (bool): If True, creates the directory for the file if it does not exist. Defaults to False.
522
+ verbose (bool): If True, prints a message upon completion. Defaults to True.
523
+ """
524
+ if not filename.endswith('.json'):
525
+ filename += '.json'
526
+
527
+ filepath = create_path(
528
+ self.dir, filename, timestamp=timestamp,
529
+ dir_exist_ok=file_exist_ok, time_prefix=time_prefix
530
+ )
531
+
532
+ try:
533
+ df = to_df(list(self.log))
534
+ df.to_json(filepath, **kwargs)
535
+ if verbose:
536
+ print(f"{len(self.log)} logs saved to {filepath}")
537
+ if clear:
538
+ self.log.clear()
539
+ except Exception as e:
540
+ raise ValueError(f"Error in saving to csv: {e}")
541
+
542
+ class DataNode(BaseNode):
543
+
544
+ def to_llama_index(self, **kwargs) -> Any:
545
+ """
546
+ Converts node to llama index format.
547
+
548
+ Args:
549
+ **kwargs: Variable length argument list.
550
+
551
+ Returns:
552
+ The llama index representation of the node.
553
+
554
+ Examples:
555
+ node = DataNode()
556
+ llama_index = node.to_llama_index()
557
+ """
558
+ from lionagi.bridge.llama_index import to_llama_index_textnode
559
+ return to_llama_index_textnode(self, **kwargs)
560
+
561
+ def to_langchain(self, **kwargs) -> Any:
562
+ """
563
+ Converts node to langchain document format.
564
+
565
+ Args:
566
+ **kwargs: Variable length argument list.
567
+
568
+ Returns:
569
+ The langchain document representation of the node.
570
+
571
+ Examples:
572
+ node = DataNode()
573
+ langchain_doc = node.to_langchain()
574
+ """
575
+ from lionagi.bridge.langchain import to_langchain_document
576
+ return to_langchain_document(self, **kwargs)
577
+
578
+ @classmethod
579
+ def from_llama_index(cls, llama_node: Any, **kwargs) -> "DataNode":
580
+ """
581
+ Creates a DataNode instance from a llama index node.
582
+
583
+ Args:
584
+ llama_node: The llama index node object.
585
+ **kwargs: Variable length argument list.
586
+
587
+ Returns:
588
+ An instance of DataNode.
589
+
590
+ Examples:
591
+ llama_node = SomeLlamaIndexNode()
592
+ data_node = DataNode.from_llama_index(llama_node)
593
+ """
594
+ llama_dict = llama_node.to_dict(**kwargs)
595
+ return cls.from_dict(llama_dict)
596
+
597
+ @classmethod
598
+ def from_langchain(cls, lc_doc: Any) -> "DataNode":
599
+ """
600
+ Creates a DataNode instance from a langchain document.
601
+
602
+ Args:
603
+ lc_doc: The langchain document object.
604
+
605
+ Returns:
606
+ An instance of DataNode.
607
+
608
+ Examples:
609
+ lc_doc = SomeLangChainDocument()
610
+ data_node = DataNode.from_langchain(lc_doc)
611
+ """
612
+ info_json = lc_doc.to_json()
613
+ info_node = {'lc_id': info_json['id']}
614
+ info_node = {**info_node, **info_json['kwargs']}
615
+ return cls(**info_node)
616
+
617
+
618
+ class File(DataNode):
619
+
620
+ ...
621
+
622
+
623
+ class Chunk(DataNode):
624
+
625
+ ...
626
+
627
+ from dataclasses import dataclass
628
+
629
+
630
+ # credit to OpenAI for the following object
631
+ @dataclass
632
+ class StatusTracker:
633
+ """
634
+ Class for keeping track of various task statuses.
635
+
636
+ This class serves as a simple way to monitor different types of task
637
+ outcomes and errors within a system. It uses dataclasses for easy
638
+ creation and management of state.
639
+
640
+ Attributes:
641
+ num_tasks_started:
642
+ The number of tasks that have been initiated.
643
+ num_tasks_in_progress:
644
+ The number of tasks currently being processed.
645
+ num_tasks_succeeded:
646
+ The number of tasks that have completed successfully.
647
+ num_tasks_failed:
648
+ The number of tasks that have failed.
649
+ num_rate_limit_errors:
650
+ The number of tasks that failed due to rate limiting.
651
+ num_api_errors:
652
+ The number of tasks that failed due to API errors.
653
+ num_other_errors:
654
+ The number of tasks that failed due to other errors.
655
+ """
656
+ num_tasks_started: int = 0
657
+ num_tasks_in_progress: int = 0
658
+ num_tasks_succeeded: int = 0
659
+ num_tasks_failed: int = 0
660
+ num_rate_limit_errors: int = 0
661
+ num_api_errors: int = 0
662
+ num_other_errors: int = 0
663
+
664
+ # import asyncio
665
+ # from typing import Any, Callable
666
+ # from ..utils.call_util import tcall
667
+
668
+ # class AsyncQueue:
669
+ # """
670
+ # A queue class that handles asynchronous operations using asyncio.
671
+
672
+ # This class provides an asynchronous queue that can enqueue items, process them
673
+ # asynchronously, and support graceful shutdowns. It is designed to facilitate
674
+ # concurrent task processing in an orderly and controlled manner.
675
+
676
+ # Attributes:
677
+ # queue (asyncio.Queue):
678
+ # A queue to hold items for asynchronous processing.
679
+ # _stop_event (asyncio.Event):
680
+ # An event to signal when the queue should stop processing.
681
+
682
+ # Methods:
683
+ # enqueue(item):
684
+ # Add an item to the queue for processing.
685
+ # dequeue():
686
+ # Remove and return an item from the queue.
687
+ # join():
688
+ # Wait until all items in the queue have been processed.
689
+ # stop():
690
+ # Signal to stop processing new items in the queue.
691
+ # stopped():
692
+ # Check if the queue has been signaled to stop.
693
+ # process_requests(func):
694
+ # Process items using a provided function.
695
+ # """
696
+
697
+ # def __init__(self) -> None:
698
+ # """
699
+ # Initializes an AsyncQueue object with an empty asyncio Queue and a stop event.
700
+ # """
701
+ # self.queue = asyncio.Queue()
702
+ # self._stop_event = asyncio.Event()
703
+
704
+ # async def enqueue(self, item: Any) -> None:
705
+ # """
706
+ # Asynchronously add an item to the queue for processing.
707
+
708
+ # Parameters:
709
+ # item (Any): The item to be added to the queue.
710
+
711
+ # Example:
712
+ # >>> async_queue = AsyncQueue()
713
+ # >>> asyncio.run(async_queue.enqueue('Task 1'))
714
+ # """
715
+ # await self.queue.put(item)
716
+
717
+ # async def dequeue(self) -> Any:
718
+ # """
719
+ # Asynchronously remove and return an item from the queue.
720
+
721
+ # If the queue is empty, this method will wait until an item is available.
722
+
723
+ # Returns:
724
+ # Any: The next item from the queue.
725
+
726
+ # Example:
727
+ # >>> async_queue = AsyncQueue()
728
+ # >>> asyncio.run(async_queue.enqueue('Task 1'))
729
+ # >>> asyncio.run(async_queue.dequeue())
730
+ # 'Task 1'
731
+ # """
732
+ # return await self.queue.get()
733
+
734
+ # async def join(self) -> None:
735
+ # """
736
+ # Asynchronously wait until all items in the queue have been processed.
737
+
738
+ # This method blocks until every item that has been enqueued is processed,
739
+ # ensuring that all tasks are completed.
740
+
741
+ # Example:
742
+ # >>> async_queue = AsyncQueue()
743
+ # >>> asyncio.run(async_queue.enqueue('Task 1'))
744
+ # >>> asyncio.run(async_queue.join()) # This will block until 'Task 1' is processed.
745
+ # """
746
+ # await self.queue.join()
747
+
748
+ # async def stop(self) -> None:
749
+ # """
750
+ # Signal the queue to stop processing new items.
751
+
752
+ # Once called, the queue will not process any new items after the current ones
753
+ # are completed, allowing for a graceful shutdown.
754
+
755
+ # Example:
756
+ # >>> async_queue = AsyncQueue()
757
+ # >>> asyncio.run(async_queue.stop()) # This signals the queue to stop processing.
758
+ # """
759
+ # self._stop_event.set()
760
+
761
+ # def stopped(self) -> bool:
762
+ # """
763
+ # Check if the queue has been signaled to stop processing.
764
+
765
+ # Returns:
766
+ # bool: True if a stop has been signaled, False otherwise.
767
+
768
+ # Example:
769
+ # >>> async_queue = AsyncQueue()
770
+ # >>> asyncio.run(async_queue.stop())
771
+ # >>> async_queue.stopped()
772
+ # True
773
+ # """
774
+ # return self._stop_event.is_set()
775
+
776
+ # # async def process_requests(self, func: Callable[[Any], Any]) -> None:
777
+ # # """
778
+ # # Asynchronously process items from the queue using the provided function.
779
+
780
+ # # Continuously dequeues items and applies the given function to each.
781
+ # # The processing stops when the queue is signaled to stop or a sentinel value (`None`) is dequeued.
782
+
783
+ # # Parameters:
784
+ # # func (Callable[[Any], Any]): A coroutine function to process items from the queue.
785
+
786
+ # # Example:
787
+ # # >>> async def sample_processing(task):
788
+ # # ... print("Processing:", task)
789
+ # # >>> async_queue = AsyncQueue()
790
+ # # >>> asyncio.run(async_queue.enqueue('Task 1'))
791
+ # # >>> asyncio.run(async_queue.process_requests(sample_processing))
792
+ # # Processing: Task 1
793
+ # # """
794
+ # # while not self.stopped():
795
+ # # item = await self.dequeue()
796
+ # # if item is None: # Using `None` as a sentinel value to cease processing.
797
+ # # await self.stop()
798
+ # # break
799
+ # # await func(item)
800
+
801
+ # async def process_requests(self, func, timeout=None):
802
+ # """
803
+ # Process items with timeout management for each task.
804
+ # """
805
+ # tasks = set()
806
+ # while not self.stopped():
807
+ # if len(tasks) >= self.max_concurrent_tasks:
808
+ # done, tasks = await asyncio.wait(tasks, return_when=asyncio.FIRST_COMPLETED)
809
+
810
+ # item = await self.dequeue()
811
+ # if item is None:
812
+ # await self.stop()
813
+ # break
814
+
815
+ # # Wrap the function call with tcall for timeout handling
816
+ # task = asyncio.create_task(tcall(item, func, timeout=timeout))
817
+ # tasks.add(task)
818
+
819
+ # if tasks:
820
+ # await asyncio.wait(tasks)
821
+