lionagi 0.0.306__py3-none-any.whl → 0.0.307__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (78) hide show
  1. lionagi/__init__.py +2 -5
  2. lionagi/core/__init__.py +7 -5
  3. lionagi/core/agent/__init__.py +3 -0
  4. lionagi/core/agent/base_agent.py +10 -12
  5. lionagi/core/branch/__init__.py +4 -0
  6. lionagi/core/branch/base_branch.py +81 -81
  7. lionagi/core/branch/branch.py +16 -28
  8. lionagi/core/branch/branch_flow_mixin.py +3 -7
  9. lionagi/core/branch/executable_branch.py +86 -56
  10. lionagi/core/branch/util.py +77 -162
  11. lionagi/core/{flow/direct → direct}/__init__.py +1 -1
  12. lionagi/core/{flow/direct/predict.py → direct/parallel_predict.py} +39 -17
  13. lionagi/core/direct/parallel_react.py +0 -0
  14. lionagi/core/direct/parallel_score.py +0 -0
  15. lionagi/core/direct/parallel_select.py +0 -0
  16. lionagi/core/direct/parallel_sentiment.py +0 -0
  17. lionagi/core/direct/predict.py +174 -0
  18. lionagi/core/{flow/direct → direct}/react.py +2 -2
  19. lionagi/core/{flow/direct → direct}/score.py +28 -23
  20. lionagi/core/{flow/direct → direct}/select.py +48 -45
  21. lionagi/core/direct/utils.py +83 -0
  22. lionagi/core/flow/monoflow/ReAct.py +6 -5
  23. lionagi/core/flow/monoflow/__init__.py +9 -0
  24. lionagi/core/flow/monoflow/chat.py +10 -10
  25. lionagi/core/flow/monoflow/chat_mixin.py +11 -10
  26. lionagi/core/flow/monoflow/followup.py +6 -5
  27. lionagi/core/flow/polyflow/__init__.py +1 -0
  28. lionagi/core/flow/polyflow/chat.py +15 -3
  29. lionagi/core/mail/mail_manager.py +18 -19
  30. lionagi/core/mail/schema.py +5 -4
  31. lionagi/core/messages/schema.py +18 -20
  32. lionagi/core/prompt/__init__.py +0 -0
  33. lionagi/core/prompt/prompt_template.py +0 -0
  34. lionagi/core/schema/__init__.py +2 -2
  35. lionagi/core/schema/action_node.py +11 -3
  36. lionagi/core/schema/base_mixin.py +56 -59
  37. lionagi/core/schema/base_node.py +35 -38
  38. lionagi/core/schema/condition.py +24 -0
  39. lionagi/core/schema/data_logger.py +96 -99
  40. lionagi/core/schema/data_node.py +19 -19
  41. lionagi/core/schema/prompt_template.py +0 -0
  42. lionagi/core/schema/structure.py +171 -169
  43. lionagi/core/session/__init__.py +1 -3
  44. lionagi/core/session/session.py +196 -214
  45. lionagi/core/tool/tool_manager.py +95 -103
  46. lionagi/integrations/__init__.py +1 -3
  47. lionagi/integrations/bridge/langchain_/documents.py +17 -18
  48. lionagi/integrations/bridge/langchain_/langchain_bridge.py +14 -14
  49. lionagi/integrations/bridge/llamaindex_/llama_index_bridge.py +22 -22
  50. lionagi/integrations/bridge/llamaindex_/node_parser.py +12 -12
  51. lionagi/integrations/bridge/llamaindex_/reader.py +11 -11
  52. lionagi/integrations/bridge/llamaindex_/textnode.py +7 -7
  53. lionagi/integrations/config/openrouter_configs.py +0 -1
  54. lionagi/integrations/provider/oai.py +26 -26
  55. lionagi/integrations/provider/services.py +38 -38
  56. lionagi/libs/__init__.py +34 -1
  57. lionagi/libs/ln_api.py +211 -221
  58. lionagi/libs/ln_async.py +53 -60
  59. lionagi/libs/ln_convert.py +118 -120
  60. lionagi/libs/ln_dataframe.py +32 -33
  61. lionagi/libs/ln_func_call.py +334 -342
  62. lionagi/libs/ln_nested.py +99 -107
  63. lionagi/libs/ln_parse.py +161 -165
  64. lionagi/libs/sys_util.py +52 -52
  65. lionagi/tests/test_core/test_session.py +254 -266
  66. lionagi/tests/test_core/test_session_base_util.py +299 -300
  67. lionagi/tests/test_core/test_tool_manager.py +70 -74
  68. lionagi/tests/test_libs/test_nested.py +2 -7
  69. lionagi/tests/test_libs/test_parse.py +2 -2
  70. lionagi/version.py +1 -1
  71. {lionagi-0.0.306.dist-info → lionagi-0.0.307.dist-info}/METADATA +4 -2
  72. lionagi-0.0.307.dist-info/RECORD +115 -0
  73. lionagi/core/flow/direct/utils.py +0 -43
  74. lionagi-0.0.306.dist-info/RECORD +0 -106
  75. /lionagi/core/{flow/direct → direct}/sentiment.py +0 -0
  76. {lionagi-0.0.306.dist-info → lionagi-0.0.307.dist-info}/LICENSE +0 -0
  77. {lionagi-0.0.306.dist-info → lionagi-0.0.307.dist-info}/WHEEL +0 -0
  78. {lionagi-0.0.306.dist-info → lionagi-0.0.307.dist-info}/top_level.txt +0 -0
@@ -4,10 +4,7 @@ from dataclasses import dataclass
4
4
  from pathlib import Path
5
5
  from typing import Any, Dict, List
6
6
 
7
- from lionagi.libs.sys_util import SysUtil
8
-
9
- from lionagi.libs import ln_convert as convert
10
- from lionagi.libs import ln_nested as nested
7
+ from lionagi.libs import SysUtil, convert, nested
11
8
 
12
9
 
13
10
  # TODO: there should be a global data logger, under setting
@@ -26,12 +23,12 @@ class DLog:
26
23
  operations.
27
24
 
28
25
  Attributes:
29
- input_data (Any): The data received by the operation. This attribute can be of
30
- any type, reflecting the flexible nature of input data to
31
- various processes.
32
- output_data (Any): The data produced by the operation. Similar to `input_data`,
33
- this attribute supports any type, accommodating the diverse
34
- outputs that different operations may generate.
26
+ input_data (Any): The data received by the operation. This attribute can be of
27
+ any type, reflecting the flexible nature of input data to
28
+ various processes.
29
+ output_data (Any): The data produced by the operation. Similar to `input_data`,
30
+ this attribute supports any type, accommodating the diverse
31
+ outputs that different operations may generate.
35
32
 
36
33
  Methods: serialize: Converts the instance into a dictionary, suitable for
37
34
  serialization, and appends a timestamp to this dictionary, reflecting the current
@@ -49,8 +46,8 @@ class DLog:
49
46
  dictionary, capturing the exact time the log entry was serialized.
50
47
 
51
48
  Returns:
52
- Dict[str, Any]: A dictionary representation of the DLog instance, including
53
- 'input_data', 'output_data', and 'timestamp' keys.
49
+ Dict[str, Any]: A dictionary representation of the DLog instance, including
50
+ 'input_data', 'output_data', and 'timestamp' keys.
54
51
  """
55
52
  log_dict = {}
56
53
 
@@ -86,8 +83,8 @@ class DLog:
86
83
  dictionary, capturing the exact time the log entry was serialized.
87
84
 
88
85
  Returns:
89
- Dict[str, Any]: A dictionary representation of the DLog instance, including
90
- 'input_data', 'output_data', and 'timestamp' keys.
86
+ Dict[str, Any]: A dictionary representation of the DLog instance, including
87
+ 'input_data', 'output_data', and 'timestamp' keys.
91
88
  """
92
89
  input_data = ""
93
90
  output_data = ""
@@ -116,27 +113,27 @@ class DataLogger:
116
113
  at program exit, among other features.
117
114
 
118
115
  Attributes:
119
- persist_path (Path): The filesystem path to the directory where log files will
120
- be saved. Defaults to a subdirectory 'data/logs/' within
121
- the current working directory.
122
- log (Deque[Dict]): A deque object that acts as the container for log entries.
123
- Each log entry is stored as a dictionary, facilitating easy
124
- conversion to various data formats.
125
- filename (str): The base name used for log files when saved. The actual filepath
126
- may include a timestamp or other modifiers based on the class's
127
- configuration.
116
+ persist_path (Path): The filesystem path to the directory where log files will
117
+ be saved. Defaults to a subdirectory 'data/logs/' within
118
+ the current working directory.
119
+ log (Deque[Dict]): A deque object that acts as the container for log entries.
120
+ Each log entry is stored as a dictionary, facilitating easy
121
+ conversion to various data formats.
122
+ filename (str): The base name used for log files when saved. The actual filepath
123
+ may include a timestamp or other modifiers based on the class's
124
+ configuration.
128
125
 
129
126
  Methods:
130
- append: Adds a new log entry to the datalogger.
131
- to_csv_file: Exports accumulated log entries to a CSV file.
132
- to_json_file: Exports accumulated log entries to a JSON file.
133
- save_at_exit: Ensures that unsaved log entries are persisted to a CSV file when
134
- the program terminates.
127
+ append: Adds a new log entry to the datalogger.
128
+ to_csv_file: Exports accumulated log entries to a CSV file.
129
+ to_json_file: Exports accumulated log entries to a JSON file.
130
+ save_at_exit: Ensures that unsaved log entries are persisted to a CSV file when
131
+ the program terminates.
135
132
 
136
133
  Usage Example:
137
- >>> datalogger = DataLogger(persist_path='my/logs/directory', filepath='process_logs')
138
- >>> datalogger.append(input_data="Example input", output_data="Example output")
139
- >>> datalogger.to_csv_file('finalized_logs.csv', clear=True)
134
+ >>> datalogger = DataLogger(persist_path='my/logs/directory', filepath='process_logs')
135
+ >>> datalogger.append(input_data="Example input", output_data="Example output")
136
+ >>> datalogger.to_csv_file('finalized_logs.csv', clear=True)
140
137
 
141
138
  This example demonstrates initializing a `DataLogger` with a custom directory and
142
139
  filepath, appending a log entry, and then exporting the log to a CSV file.
@@ -154,18 +151,18 @@ class DataLogger:
154
151
  logs, and base filepath for exports.
155
152
 
156
153
  Args:
157
- persist_path (str | Path | None, optional):
158
- The file system path to the directory where log files will be persisted.
159
- if not provided, defaults to 'data/logs/' within the current working
160
- directory. this path is used for all subsequent log export operations.
161
- log (list[Dict[str, Any]] | None, optional):
162
- An initial collection of log entries to populate the datalogger. each entry
163
- should be a dictionary reflecting the structure used by the datalogger
164
- (input, output, timestamp). if omitted, the datalogger starts empty.
165
- filename (str | None, optional):
166
- The base name for exported log files. this name may be augmented with
167
- timestamps and format-specific extensions during export operations.
168
- defaults to 'log'.
154
+ persist_path (str | Path | None, optional):
155
+ The file system path to the directory where log files will be persisted.
156
+ if not provided, defaults to 'data/logs/' within the current working
157
+ directory. this path is used for all subsequent log export operations.
158
+ log (list[Dict[str, Any]] | None, optional):
159
+ An initial collection of log entries to populate the datalogger. each entry
160
+ should be a dictionary reflecting the structure used by the datalogger
161
+ (input, output, timestamp). if omitted, the datalogger starts empty.
162
+ filename (str | None, optional):
163
+ The base name for exported log files. this name may be augmented with
164
+ timestamps and format-specific extensions during export operations.
165
+ defaults to 'log'.
169
166
 
170
167
  register an at-exit handler to ensure unsaved logs are automatically persisted to
171
168
  a CSV file upon program termination.
@@ -187,10 +184,10 @@ class DataLogger:
187
184
  record deque.
188
185
 
189
186
  Args:
190
- input_data (Any):
191
- Data provided as input to a tracked operation or process.
192
- output_data (Any):
193
- Data resulting from the operation, recorded as the output.
187
+ input_data (Any):
188
+ Data provided as input to a tracked operation or process.
189
+ output_data (Any):
190
+ Data resulting from the operation, recorded as the output.
194
191
 
195
192
  constructs a log entry from the provided data and automatically includes a
196
193
  timestamp upon serialization.
@@ -217,25 +214,25 @@ class DataLogger:
217
214
  and timestamping options.
218
215
 
219
216
  Args:
220
- filename (str, optional):
221
- Filename for the CSV output, appended with '.csv' if not included, saved
222
- within the specified persisting directory.
223
- dir_exist_ok (bool, optional):
224
- If False, raises an error if the directory already exists; otherwise,
225
- writes without an error.
226
- timestamp (bool, optional):
227
- If True, appends a current timestamp to the filepath for uniqueness.
228
- time_prefix (bool, optional):
229
- If True, place the timestamp prefix before the filepath; otherwise,
230
- it's suffixed.
231
- verbose (bool, optional):
232
- If True, print a message upon successful file save, detailing the file
233
- path and number of logs saved.
234
- clear (bool, optional):
235
- If True, empties the internal log record after saving.
236
- **kwargs:
237
- Additional keyword arguments for pandas.DataFrame.to_csv(), allowing
238
- customization of the CSV output, such as excluding the index.
217
+ filename (str, optional):
218
+ Filename for the CSV output, appended with '.csv' if not included, saved
219
+ within the specified persisting directory.
220
+ dir_exist_ok (bool, optional):
221
+ If False, raises an error if the directory already exists; otherwise,
222
+ writes without an error.
223
+ timestamp (bool, optional):
224
+ If True, appends a current timestamp to the filepath for uniqueness.
225
+ time_prefix (bool, optional):
226
+ If True, place the timestamp prefix before the filepath; otherwise,
227
+ it's suffixed.
228
+ verbose (bool, optional):
229
+ If True, print a message upon successful file save, detailing the file
230
+ path and number of logs saved.
231
+ clear (bool, optional):
232
+ If True, empties the internal log record after saving.
233
+ **kwargs:
234
+ Additional keyword arguments for pandas.DataFrame.to_csv(), allowing
235
+ customization of the CSV output, such as excluding the index.
239
236
 
240
237
  raises a ValueError with an explanatory message if an error occurs during the file
241
238
  writing or DataFrame conversion process.
@@ -260,7 +257,7 @@ class DataLogger:
260
257
  if clear:
261
258
  self.log.clear()
262
259
  except Exception as e:
263
- raise ValueError(f"Error in saving to csv: {e}")
260
+ raise ValueError(f"Error in saving to csv: {e}") from e
264
261
 
265
262
  def to_json_file(
266
263
  self,
@@ -281,41 +278,41 @@ class DataLogger:
281
278
  offering customization for file naming and timestamping.
282
279
 
283
280
  Args:
284
- filename (str, optional):
285
- The filepath for the JSON output.'.json' is appended if not specified.
286
- The file is saved within the designated persisting directory.
287
- timestamp (bool, optional):
288
- If True, adds a timestamp to the filepath to ensure uniqueness.
289
- time_prefix (bool, optional):
290
- Determines the placement of the timestamp in the filepath. A prefix if
291
- True; otherwise, a suffix.
292
- dir_exist_ok (bool, optional):
293
- Allows writing to an existing directory without raising an error.
294
- If False, an error is raised when attempting to write to an existing
295
- directory.
296
- verbose (bool, optional):
297
- Print a message upon successful save, indicating the file path and
298
- number of logs saved.
299
- clear (bool, optional):
300
- Clears the log deque after saving, aiding in memory management.
301
- **kwargs:
302
- Additional arguments passed to pandas.DataFrame.to_json(),
303
- enabling customization of the JSON output.
281
+ filename (str, optional):
282
+ The filepath for the JSON output.'.json' is appended if not specified.
283
+ The file is saved within the designated persisting directory.
284
+ timestamp (bool, optional):
285
+ If True, adds a timestamp to the filepath to ensure uniqueness.
286
+ time_prefix (bool, optional):
287
+ Determines the placement of the timestamp in the filepath. A prefix if
288
+ True; otherwise, a suffix.
289
+ dir_exist_ok (bool, optional):
290
+ Allows writing to an existing directory without raising an error.
291
+ If False, an error is raised when attempting to write to an existing
292
+ directory.
293
+ verbose (bool, optional):
294
+ Print a message upon successful save, indicating the file path and
295
+ number of logs saved.
296
+ clear (bool, optional):
297
+ Clears the log deque after saving, aiding in memory management.
298
+ **kwargs:
299
+ Additional arguments passed to pandas.DataFrame.to_json(),
300
+ enabling customization of the JSON output.
304
301
 
305
302
  Raises:
306
- ValueError: When an error occurs during file writing or DataFrame conversion,
307
- encapsulating
308
- the exception with a descriptive message.
303
+ ValueError: When an error occurs during file writing or DataFrame conversion,
304
+ encapsulating
305
+ the exception with a descriptive message.
309
306
 
310
307
  Examples:
311
- Default usage saving logs to 'log.json' within the specified persisting
312
- directory:
313
- >>> datalogger.to_json_file()
314
- # Save path: 'data/logs/log.json'
315
-
316
- Custom filepath without a timestamp, using additional pandas options:
317
- >>> datalogger.to_json_file(filepath='detailed_log.json', orient='records')
318
- # Save a path: 'data/logs/detailed_log.json'
308
+ Default usage saving logs to 'log.json' within the specified persisting
309
+ directory:
310
+ >>> datalogger.to_json_file()
311
+ # Save path: 'data/logs/log.json'
312
+
313
+ Custom filepath without a timestamp, using additional pandas options:
314
+ >>> datalogger.to_json_file(filepath='detailed_log.json', orient='records')
315
+ # Save a path: 'data/logs/detailed_log.json'
319
316
  """
320
317
  if not filename.endswith(".json"):
321
318
  filename += ".json"
@@ -337,7 +334,7 @@ class DataLogger:
337
334
  if clear:
338
335
  self.log.clear()
339
336
  except Exception as e:
340
- raise ValueError(f"Error in saving to csv: {e}")
337
+ raise ValueError(f"Error in saving to csv: {e}") from e
341
338
 
342
339
  def save_at_exit(self):
343
340
  """
@@ -1,5 +1,5 @@
1
1
  from typing import Any
2
- from lionagi.core.schema.base_node import BaseNode
2
+ from .base_node import BaseNode
3
3
 
4
4
  from lionagi.integrations.bridge import LlamaIndexBridge, LangchainBridge
5
5
 
@@ -22,15 +22,15 @@ class DataNode(BaseNode):
22
22
  integration and usage within that ecosystem.
23
23
 
24
24
  Args:
25
- node_type:
26
- **kwargs: Additional keyword arguments for customization.
25
+ node_type:
26
+ **kwargs: Additional keyword arguments for customization.
27
27
 
28
28
  Returns:
29
- Any: The llama index format representation of the node.
29
+ Any: The llama index format representation of the node.
30
30
 
31
31
  Examples:
32
- >>> node = DataNode(content="Example content")
33
- >>> llama_index = node.to_llama_index()
32
+ >>> node = DataNode(content="Example content")
33
+ >>> llama_index = node.to_llama_index()
34
34
  """
35
35
  return LlamaIndexBridge.to_llama_index_node(self, node_type=node_type, **kwargs)
36
36
 
@@ -42,14 +42,14 @@ class DataNode(BaseNode):
42
42
  use within langchain_ applications and workflows.
43
43
 
44
44
  Args:
45
- **kwargs: Additional keyword arguments for customization.
45
+ **kwargs: Additional keyword arguments for customization.
46
46
 
47
47
  Returns:
48
- Any: The langchain_ document representation of the node.
48
+ Any: The langchain_ document representation of the node.
49
49
 
50
50
  Examples:
51
- >>> node = DataNode(content="Example content")
52
- >>> langchain_doc = node.to_langchain()
51
+ >>> node = DataNode(content="Example content")
52
+ >>> langchain_doc = node.to_langchain()
53
53
  """
54
54
  return LangchainBridge.to_langchain_document(self, **kwargs)
55
55
 
@@ -59,15 +59,15 @@ class DataNode(BaseNode):
59
59
  Creates a DataNode instance from a llama index node.
60
60
 
61
61
  Args:
62
- llama_node: The llama index node object.
63
- **kwargs: Variable length argument list.
62
+ llama_node: The llama index node object.
63
+ **kwargs: Variable length argument list.
64
64
 
65
65
  Returns:
66
- An instance of DataNode.
66
+ An instance of DataNode.
67
67
 
68
68
  Examples:
69
- llama_node = SomeLlamaIndexNode()
70
- data_node = DataNode.from_llama_index(llama_node)
69
+ llama_node = SomeLlamaIndexNode()
70
+ data_node = DataNode.from_llama_index(llama_node)
71
71
  """
72
72
  llama_dict = llama_node.to_dict(**kwargs)
73
73
  return cls.from_obj(llama_dict)
@@ -78,14 +78,14 @@ class DataNode(BaseNode):
78
78
  Creates a DataNode instance from a langchain_ document.
79
79
 
80
80
  Args:
81
- lc_doc: The langchain_ document object.
81
+ lc_doc: The langchain_ document object.
82
82
 
83
83
  Returns:
84
- An instance of DataNode.
84
+ An instance of DataNode.
85
85
 
86
86
  Examples:
87
- lc_doc = SomeLangChainDocument()
88
- data_node = DataNode.from_langchain(lc_doc)
87
+ lc_doc = SomeLangChainDocument()
88
+ data_node = DataNode.from_langchain(lc_doc)
89
89
  """
90
90
  info_json = lc_doc.to_json()
91
91
  info_node = {"lc_id": info_json["id"]}
File without changes