lionagi 0.0.111__py3-none-any.whl → 0.0.113__py3-none-any.whl
Sign up to get free protection for your applications and to get access to all the features.
- lionagi/__init__.py +7 -2
- lionagi/bridge/__init__.py +7 -0
- lionagi/bridge/langchain.py +131 -0
- lionagi/bridge/llama_index.py +157 -0
- lionagi/configs/__init__.py +7 -0
- lionagi/configs/oai_configs.py +49 -0
- lionagi/configs/openrouter_config.py +49 -0
- lionagi/core/__init__.py +15 -0
- lionagi/{session/conversation.py → core/conversations.py} +10 -17
- lionagi/core/flows.py +1 -0
- lionagi/core/instruction_sets.py +1 -0
- lionagi/{session/message.py → core/messages.py} +5 -5
- lionagi/core/sessions.py +262 -0
- lionagi/datastore/__init__.py +1 -0
- lionagi/datastore/chroma.py +1 -0
- lionagi/datastore/deeplake.py +1 -0
- lionagi/datastore/elasticsearch.py +1 -0
- lionagi/datastore/lantern.py +1 -0
- lionagi/datastore/pinecone.py +1 -0
- lionagi/datastore/postgres.py +1 -0
- lionagi/datastore/qdrant.py +1 -0
- lionagi/loader/__init__.py +12 -0
- lionagi/loader/chunker.py +157 -0
- lionagi/loader/reader.py +124 -0
- lionagi/objs/__init__.py +7 -0
- lionagi/objs/messenger.py +163 -0
- lionagi/objs/tool_registry.py +247 -0
- lionagi/schema/__init__.py +11 -0
- lionagi/schema/base_condition.py +1 -0
- lionagi/schema/base_schema.py +239 -0
- lionagi/schema/base_tool.py +9 -0
- lionagi/schema/data_logger.py +94 -0
- lionagi/services/__init__.py +14 -0
- lionagi/services/anthropic.py +1 -0
- lionagi/services/anyscale.py +0 -0
- lionagi/services/azure.py +1 -0
- lionagi/{api/oai_service.py → services/base_api_service.py} +74 -148
- lionagi/services/bedrock.py +0 -0
- lionagi/services/chatcompletion.py +48 -0
- lionagi/services/everlyai.py +0 -0
- lionagi/services/gemini.py +0 -0
- lionagi/services/gpt4all.py +0 -0
- lionagi/services/huggingface.py +0 -0
- lionagi/services/litellm.py +1 -0
- lionagi/services/localai.py +0 -0
- lionagi/services/mistralai.py +0 -0
- lionagi/services/oai.py +34 -0
- lionagi/services/ollama.py +1 -0
- lionagi/services/openllm.py +0 -0
- lionagi/services/openrouter.py +32 -0
- lionagi/services/perplexity.py +0 -0
- lionagi/services/predibase.py +0 -0
- lionagi/services/rungpt.py +0 -0
- lionagi/services/service_objs.py +282 -0
- lionagi/services/vllm.py +0 -0
- lionagi/services/xinference.py +0 -0
- lionagi/structure/__init__.py +7 -0
- lionagi/structure/relationship.py +128 -0
- lionagi/structure/structure.py +160 -0
- lionagi/tests/__init__.py +0 -0
- lionagi/tests/test_flatten_util.py +426 -0
- lionagi/tools/__init__.py +0 -0
- lionagi/tools/coder.py +1 -0
- lionagi/tools/planner.py +1 -0
- lionagi/tools/prompter.py +1 -0
- lionagi/tools/sandbox.py +1 -0
- lionagi/tools/scorer.py +1 -0
- lionagi/tools/summarizer.py +1 -0
- lionagi/tools/validator.py +1 -0
- lionagi/utils/__init__.py +46 -8
- lionagi/utils/api_util.py +63 -416
- lionagi/utils/call_util.py +347 -0
- lionagi/utils/flat_util.py +540 -0
- lionagi/utils/io_util.py +102 -0
- lionagi/utils/load_utils.py +190 -0
- lionagi/utils/sys_util.py +85 -660
- lionagi/utils/tool_util.py +82 -199
- lionagi/utils/type_util.py +81 -0
- lionagi/version.py +1 -1
- {lionagi-0.0.111.dist-info → lionagi-0.0.113.dist-info}/METADATA +44 -15
- lionagi-0.0.113.dist-info/RECORD +84 -0
- lionagi/api/__init__.py +0 -8
- lionagi/api/oai_config.py +0 -16
- lionagi/session/__init__.py +0 -7
- lionagi/session/session.py +0 -380
- lionagi/utils/doc_util.py +0 -331
- lionagi/utils/log_util.py +0 -86
- lionagi-0.0.111.dist-info/RECORD +0 -20
- {lionagi-0.0.111.dist-info → lionagi-0.0.113.dist-info}/LICENSE +0 -0
- {lionagi-0.0.111.dist-info → lionagi-0.0.113.dist-info}/WHEEL +0 -0
- {lionagi-0.0.111.dist-info → lionagi-0.0.113.dist-info}/top_level.txt +0 -0
lionagi/utils/doc_util.py
DELETED
@@ -1,331 +0,0 @@
|
|
1
|
-
import math
|
2
|
-
from pathlib import Path
|
3
|
-
from typing import Any, Dict, List, Union, Callable, Optional
|
4
|
-
|
5
|
-
from .sys_util import to_list, l_call
|
6
|
-
from .log_util import DataLogger
|
7
|
-
|
8
|
-
|
9
|
-
def dir_to_path(dir: str, ext, recursive: bool = False, flat: bool = True):
|
10
|
-
"""
|
11
|
-
Retrieves a list of file paths in the specified directory with the given extension.
|
12
|
-
|
13
|
-
Parameters:
|
14
|
-
dir (str): The directory path where to search for files.
|
15
|
-
|
16
|
-
ext (str): The file extension to filter by.
|
17
|
-
|
18
|
-
recursive (bool, optional): If True, search for files recursively in subdirectories. Defaults to False.
|
19
|
-
|
20
|
-
flat (bool, optional): If True, return a flat list of file paths. Defaults to True.
|
21
|
-
|
22
|
-
Returns:
|
23
|
-
List[str]: A list of file paths that match the given extension within the specified directory.
|
24
|
-
|
25
|
-
Example:
|
26
|
-
>>> files = dir_to_path(dir='my_directory', ext='.txt', recursive=True, flat=True)
|
27
|
-
"""
|
28
|
-
|
29
|
-
def _dir_to_path(ext, recursive=recursive):
|
30
|
-
tem = '**/*' if recursive else '*'
|
31
|
-
return list(Path(dir).glob(tem + ext))
|
32
|
-
|
33
|
-
try:
|
34
|
-
return to_list(l_call(ext, _dir_to_path, flat=True), flat=flat)
|
35
|
-
except:
|
36
|
-
raise ValueError("Invalid directory or extension, please check the path")
|
37
|
-
|
38
|
-
def read_text(filepath: str, clean: bool = True) -> str:
|
39
|
-
"""
|
40
|
-
Reads the content of a text file and optionally cleans it by removing specified characters.
|
41
|
-
|
42
|
-
Parameters:
|
43
|
-
filepath (str): The path to the text file to be read.
|
44
|
-
|
45
|
-
clean (bool, optional): If True, clean the content by removing specific unwanted characters. Defaults to True.
|
46
|
-
|
47
|
-
Returns:
|
48
|
-
str: The cleaned (if 'clean' is True) or raw content of the text file.
|
49
|
-
|
50
|
-
Example:
|
51
|
-
>>> content = read_text(filepath='example.txt', clean=True)
|
52
|
-
"""
|
53
|
-
|
54
|
-
with open(filepath, 'r') as f:
|
55
|
-
content = f.read()
|
56
|
-
if clean:
|
57
|
-
# Define characters to replace and their replacements
|
58
|
-
replacements = {'\\': ' ', '\n': ' ', '\t': ' ', ' ': ' ', '\'': ' '}
|
59
|
-
for old, new in replacements.items():
|
60
|
-
content = content.replace(old, new)
|
61
|
-
return content
|
62
|
-
|
63
|
-
def dir_to_files(dir: str, ext: str, recursive: bool = False,
|
64
|
-
reader: Callable = read_text, clean: bool = True,
|
65
|
-
to_csv: bool = False, project: str = 'project',
|
66
|
-
output_dir: str = 'data/logs/sources/', filename: Optional[str] = None,
|
67
|
-
verbose: bool = True, timestamp: bool = True, logger: Optional[DataLogger] = None):
|
68
|
-
"""
|
69
|
-
Reads and processes files in a specified directory with the given extension.
|
70
|
-
|
71
|
-
Parameters:
|
72
|
-
dir (str): The directory path where files are located.
|
73
|
-
|
74
|
-
ext (str): The file extension to filter by.
|
75
|
-
|
76
|
-
recursive (bool, optional): If True, search files recursively in subdirectories. Defaults to False.
|
77
|
-
|
78
|
-
reader (Callable, optional): Function used to read and process the content of each file. Defaults to read_text.
|
79
|
-
|
80
|
-
clean (bool, optional): If True, cleans the content by removing specified characters. Defaults to True.
|
81
|
-
|
82
|
-
to_csv (bool, optional): If True, export the processed data to a CSV file. Defaults to False.
|
83
|
-
|
84
|
-
project (str, optional): The name of the project. Defaults to 'project'.
|
85
|
-
|
86
|
-
output_dir (str, optional): Directory path for exporting the CSV file. Defaults to 'data/logs/sources/'.
|
87
|
-
|
88
|
-
filename (Optional[str], optional): Name of the CSV file, if not provided, a default will be used. Defaults to None.
|
89
|
-
|
90
|
-
verbose (bool, optional): If True, print a message upon CSV export. Defaults to True.
|
91
|
-
|
92
|
-
timestamp (bool, optional): If True, include a timestamp in the file name. Defaults to True.
|
93
|
-
|
94
|
-
logger (Optional[DataLogger], optional): An instance of DataLogger for logging, if not provided, a new one will be created. Defaults to None.
|
95
|
-
|
96
|
-
Returns:
|
97
|
-
List[Dict[str, Union[str, Path]]]: A list of dictionaries containing file information and content.
|
98
|
-
|
99
|
-
Examples:
|
100
|
-
>>> logs = dir_to_files(dir='my_directory', ext='.txt', to_csv=True)
|
101
|
-
"""
|
102
|
-
|
103
|
-
sources = dir_to_path(dir, ext, recursive)
|
104
|
-
|
105
|
-
def _split_path(path: Path) -> tuple:
|
106
|
-
folder_name = path.parent.name
|
107
|
-
file_name = path.name
|
108
|
-
return (folder_name, file_name)
|
109
|
-
|
110
|
-
def _to_dict(path_: Path) -> Dict[str, Union[str, Path]]:
|
111
|
-
folder, file = _split_path(path_)
|
112
|
-
content = reader(str(path_), clean=clean)
|
113
|
-
return {
|
114
|
-
'project': project,
|
115
|
-
'folder': folder,
|
116
|
-
'file': file,
|
117
|
-
"file_size": len(str(content)),
|
118
|
-
'content': content
|
119
|
-
} if content else None
|
120
|
-
|
121
|
-
logs = to_list(l_call(sources, _to_dict, flat=True), dropna=True)
|
122
|
-
|
123
|
-
if to_csv:
|
124
|
-
filename = filename or f"{project}_sources.csv"
|
125
|
-
logger = DataLogger(dir=output_dir, log=logs) if not logger else logger
|
126
|
-
logger.to_csv(dir=output_dir, filename=filename, verbose=verbose, timestamp=timestamp)
|
127
|
-
|
128
|
-
return logs
|
129
|
-
|
130
|
-
def chunk_text(input: str, chunk_size: int, overlap: float,
|
131
|
-
threshold: int) -> List[Union[str, None]]:
|
132
|
-
"""
|
133
|
-
Splits a string into chunks of a specified size, allowing for optional overlap between chunks.
|
134
|
-
|
135
|
-
Parameters:
|
136
|
-
input (str): The text to be split into chunks.
|
137
|
-
|
138
|
-
chunk_size (int): The size of each chunk in characters.
|
139
|
-
|
140
|
-
overlap (float): A value between [0, 1] specifying the percentage of overlap between adjacent chunks.
|
141
|
-
|
142
|
-
threshold (int): The minimum size for the last chunk. If the last chunk is smaller than this, it will be merged with the previous chunk.
|
143
|
-
|
144
|
-
Raises:
|
145
|
-
TypeError: If input text cannot be converted to a string.
|
146
|
-
|
147
|
-
ValueError: If any error occurs during the chunking process.
|
148
|
-
|
149
|
-
Returns:
|
150
|
-
List[Union[str, None]]: List of text chunks.
|
151
|
-
"""
|
152
|
-
|
153
|
-
try:
|
154
|
-
# Ensure text is a string
|
155
|
-
if not isinstance(input, str):
|
156
|
-
input = str(input)
|
157
|
-
|
158
|
-
chunks = []
|
159
|
-
n_chunks = math.ceil(len(input) / chunk_size)
|
160
|
-
overlap_size = int(chunk_size * overlap / 2)
|
161
|
-
|
162
|
-
if n_chunks == 1:
|
163
|
-
return [input]
|
164
|
-
|
165
|
-
elif n_chunks == 2:
|
166
|
-
chunks.append(input[:chunk_size + overlap_size])
|
167
|
-
if len(input) - chunk_size > threshold:
|
168
|
-
chunks.append(input[chunk_size - overlap_size:])
|
169
|
-
else:
|
170
|
-
return [input]
|
171
|
-
return chunks
|
172
|
-
|
173
|
-
elif n_chunks > 2:
|
174
|
-
chunks.append(input[:chunk_size + overlap_size])
|
175
|
-
for i in range(1, n_chunks - 1):
|
176
|
-
start_idx = chunk_size * i - overlap_size
|
177
|
-
end_idx = chunk_size * (i + 1) + overlap_size
|
178
|
-
chunks.append(input[start_idx:end_idx])
|
179
|
-
|
180
|
-
if len(input) - chunk_size * (n_chunks - 1) > threshold:
|
181
|
-
chunks.append(input[chunk_size * (n_chunks - 1) - overlap_size:])
|
182
|
-
else:
|
183
|
-
chunks[-1] += input[chunk_size * (n_chunks - 1) + overlap_size:]
|
184
|
-
|
185
|
-
return chunks
|
186
|
-
|
187
|
-
except Exception as e:
|
188
|
-
raise ValueError(f"An error occurred while chunking the text. {e}")
|
189
|
-
|
190
|
-
def _file_to_chunks(input: Dict[str, Any],
|
191
|
-
field: str = 'content',
|
192
|
-
chunk_size: int = 1500,
|
193
|
-
overlap: float = 0.2,
|
194
|
-
threshold: int = 200) -> List[Dict[str, Any]]:
|
195
|
-
"""
|
196
|
-
Splits text from a specified dictionary field into chunks and returns a list of dictionaries.
|
197
|
-
|
198
|
-
Parameters:
|
199
|
-
input (Dict[str, Any]): The input dictionary containing the text field to be chunked.
|
200
|
-
|
201
|
-
field (str, optional): The dictionary key corresponding to the text field. Defaults to 'content'.
|
202
|
-
|
203
|
-
chunk_size (int, optional): Size of each text chunk in characters. Defaults to 1500.
|
204
|
-
|
205
|
-
overlap (float, optional): Percentage of overlap between adjacent chunks, in the range [0, 1]. Defaults to 0.2.
|
206
|
-
|
207
|
-
threshold (int, optional): Minimum size for the last chunk. If smaller, it will be merged with the previous chunk. Defaults to 200.
|
208
|
-
|
209
|
-
Raises:
|
210
|
-
ValueError: If any error occurs during the chunking process.
|
211
|
-
|
212
|
-
Returns:
|
213
|
-
List[Dict[str, Any]]: A list of dictionaries, each containing a separate chunk along with original key-value pairs from the input dictionary.
|
214
|
-
|
215
|
-
Example:
|
216
|
-
>>> d = {'content': 'This is a test string.', 'other_field': 1}
|
217
|
-
>>> file_to_chunks(d)
|
218
|
-
[{'chunk_overlap': 0.2, 'chunk_threshold': 200, 'file_chunks': 2, 'chunk_id': 1, 'chunk_size': 14, 'chunk_content': 'This is a test', 'other_field': 1}, ...]
|
219
|
-
"""
|
220
|
-
|
221
|
-
try:
|
222
|
-
out = {key: value for key, value in input.items() if key != field}
|
223
|
-
out.update({"chunk_overlap": overlap, "chunk_threshold": threshold})
|
224
|
-
|
225
|
-
chunks = chunk_text(input[field], chunk_size=chunk_size, overlap=overlap, threshold=threshold)
|
226
|
-
logs = []
|
227
|
-
for i, chunk in enumerate(chunks):
|
228
|
-
chunk_dict = out.copy()
|
229
|
-
chunk_dict.update({
|
230
|
-
'file_chunks': len(chunks),
|
231
|
-
'chunk_id': i + 1,
|
232
|
-
'chunk_size': len(chunk),
|
233
|
-
f'chunk_{field}': chunk
|
234
|
-
})
|
235
|
-
logs.append(chunk_dict)
|
236
|
-
|
237
|
-
return logs
|
238
|
-
|
239
|
-
except Exception as e:
|
240
|
-
raise ValueError(f"An error occurred while chunking the file. {e}")
|
241
|
-
|
242
|
-
def file_to_chunks(input,
|
243
|
-
field: str = 'content',
|
244
|
-
chunk_size: int = 1500,
|
245
|
-
overlap: float = 0.2,
|
246
|
-
threshold: int = 200,
|
247
|
-
to_csv=False,
|
248
|
-
project='project',
|
249
|
-
output_dir='data/logs/sources/',
|
250
|
-
chunk_func = _file_to_chunks,
|
251
|
-
filename=None,
|
252
|
-
verbose=True,
|
253
|
-
timestamp=True,
|
254
|
-
logger=None):
|
255
|
-
"""
|
256
|
-
Splits text from a specified dictionary field into chunks and returns a list of dictionaries.
|
257
|
-
|
258
|
-
Parameters:
|
259
|
-
input (List[Dict[str, Any]]): The input dictionaries containing the text field to be chunked.
|
260
|
-
|
261
|
-
field (str, optional): The dictionary key corresponding to the text field. Defaults to 'content'.
|
262
|
-
|
263
|
-
chunk_size (int, optional): Size of each text chunk in characters. Defaults to 1500.
|
264
|
-
|
265
|
-
overlap (float, optional): Percentage of overlap between adjacent chunks, in the range [0, 1]. Defaults to 0.2.
|
266
|
-
|
267
|
-
threshold (int, optional): Minimum size for the last chunk. If smaller, it will be merged with the previous chunk. Defaults to 200.
|
268
|
-
|
269
|
-
to_csv (bool, optional): If True, export the processed data to a CSV file.
|
270
|
-
|
271
|
-
project (str, optional): The name of the project.
|
272
|
-
|
273
|
-
output_dir (str, optional): The directory path for exporting the CSV file.
|
274
|
-
|
275
|
-
chunk_func (function, optional): The function to be used for chunking. Defaults to _file_to_chunks.
|
276
|
-
|
277
|
-
filename (str, optional): The name of the CSV file.
|
278
|
-
|
279
|
-
verbose (bool, optional): If True, print a verbose message after export.
|
280
|
-
|
281
|
-
timestamp (bool, optional): If True, include a timestamp in the exported file name.
|
282
|
-
|
283
|
-
logger (DataLogger, optional): An optional DataLogger instance for logging.
|
284
|
-
|
285
|
-
Returns:
|
286
|
-
List[Dict[str, Any]]: A list of dictionaries representing the processed text chunks.
|
287
|
-
"""
|
288
|
-
|
289
|
-
_f = lambda x: chunk_func(x, field=field, chunk_size=chunk_size, overlap=overlap, threshold=threshold)
|
290
|
-
logs = to_list(l_call(input, _f), flat=True)
|
291
|
-
|
292
|
-
if to_csv:
|
293
|
-
filename = filename if filename else f"{project}_sources.csv"
|
294
|
-
logger = DataLogger(log=logs) if not logger else logger
|
295
|
-
logger.to_csv(dir=output_dir, filename=filename, verbose=verbose, timestamp=timestamp)
|
296
|
-
|
297
|
-
return logs
|
298
|
-
|
299
|
-
def get_bins(input: List[str], upper: int = 7500) -> List[List[int]]:
|
300
|
-
"""
|
301
|
-
Get index of elements in a list based on their consecutive cumulative sum of length,
|
302
|
-
according to some upper threshold. Return lists of indices as bins.
|
303
|
-
|
304
|
-
Parameters:
|
305
|
-
input (List[str]): List of items to be binned.
|
306
|
-
|
307
|
-
upper (int, optional): Upper threshold for the cumulative sum of the length of items in a bin. Default is 7500.
|
308
|
-
|
309
|
-
Returns:
|
310
|
-
List[List[int]]: List of lists, where each inner list contains the indices of the items that form a bin.
|
311
|
-
|
312
|
-
Example:
|
313
|
-
>>> items = ['apple', 'a', 'b', 'banana', 'cheery', 'c', 'd', 'e']
|
314
|
-
>>> upper = 10
|
315
|
-
>>> get_bins(items, upper)
|
316
|
-
[[0, 1, 2], [3], [4, 5, 6, 7]]
|
317
|
-
"""
|
318
|
-
current = 0
|
319
|
-
bins = []
|
320
|
-
bin = []
|
321
|
-
for idx, item in enumerate(input):
|
322
|
-
if current + len(item) < upper:
|
323
|
-
bin.append(idx)
|
324
|
-
current += len(item)
|
325
|
-
elif current + len(item) >= upper:
|
326
|
-
bins.append(bin)
|
327
|
-
bin = [idx]
|
328
|
-
current = len(item)
|
329
|
-
if idx == len(input) - 1 and len(bin) > 0:
|
330
|
-
bins.append(bin)
|
331
|
-
return bins
|
lionagi/utils/log_util.py
DELETED
@@ -1,86 +0,0 @@
|
|
1
|
-
from collections import deque
|
2
|
-
from .sys_util import to_csv, create_path
|
3
|
-
|
4
|
-
|
5
|
-
class DataLogger:
|
6
|
-
"""
|
7
|
-
Logs data entries and outputs them to a CSV file.
|
8
|
-
|
9
|
-
This class provides a logging mechanism for data entries that can be saved to a
|
10
|
-
CSV file. It offers methods for appending new log entries, saving the log to a CSV file,
|
11
|
-
and setting the directory where the logs should be saved.
|
12
|
-
|
13
|
-
Attributes:
|
14
|
-
dir (str):
|
15
|
-
The directory where the log files are to be saved.
|
16
|
-
log (deque):
|
17
|
-
A deque that stores log entries.
|
18
|
-
|
19
|
-
Methods:
|
20
|
-
__call__(entry):
|
21
|
-
Appends a new entry to the log.
|
22
|
-
to_csv(dir: str, filename: str, verbose: bool, timestamp: bool, dir_exist_ok: bool, file_exist_ok: bool):
|
23
|
-
Converts the log to a CSV format and saves it to a file.
|
24
|
-
set_dir(dir: str):
|
25
|
-
Sets the directory for saving log files.
|
26
|
-
"""
|
27
|
-
|
28
|
-
def __init__(self, dir= None, log: list = None) -> None:
|
29
|
-
"""
|
30
|
-
Initializes a new instance of the DataLogger class.
|
31
|
-
|
32
|
-
Parameters:
|
33
|
-
dir (str, optional): The directory where the log files will be saved. Defaults to None.
|
34
|
-
|
35
|
-
log (list, optional): An initial list of log entries. Defaults to an empty deque.
|
36
|
-
"""
|
37
|
-
self.dir = dir
|
38
|
-
self.log = deque(log) if log else deque()
|
39
|
-
|
40
|
-
def __call__(self, entry):
|
41
|
-
"""
|
42
|
-
Appends a new entry to the log.
|
43
|
-
|
44
|
-
Parameters:
|
45
|
-
entry: The entry to append to the log. The entry can be of any datatype.
|
46
|
-
"""
|
47
|
-
self.log.append(entry)
|
48
|
-
|
49
|
-
def to_csv(self, dir: str, filename: str, verbose: bool = True, timestamp: bool = True, dir_exist_ok=True, file_exist_ok=False):
|
50
|
-
"""
|
51
|
-
Converts the log to a CSV format and saves it to a file.
|
52
|
-
|
53
|
-
Parameters:
|
54
|
-
dir (str): The directory where the CSV file will be saved.
|
55
|
-
|
56
|
-
filename (str): The name of the CSV file.
|
57
|
-
|
58
|
-
verbose (bool, optional): If True, prints a message after saving the log. Defaults to True.
|
59
|
-
|
60
|
-
timestamp (bool, optional): If True, appends a timestamp to the filename. Defaults to True.
|
61
|
-
|
62
|
-
dir_exist_ok (bool, optional): If True, overrides the existing directory if needed. Defaults to True.
|
63
|
-
|
64
|
-
file_exist_ok (bool, optional): If True, overrides the existing file if needed. Defaults to False.
|
65
|
-
|
66
|
-
Postconditions:
|
67
|
-
Saves the log entries to a CSV file and clears the `log` attribute.
|
68
|
-
|
69
|
-
Optionally prints a message with the number of log entries saved and the file path.
|
70
|
-
"""
|
71
|
-
filepath = create_path(dir=dir, filename=filename, timestamp=timestamp, dir_exist_ok=dir_exist_ok)
|
72
|
-
to_csv(list(self.log), filepath, file_exist_ok=file_exist_ok)
|
73
|
-
n_logs = len(list(self.log))
|
74
|
-
self.log = deque()
|
75
|
-
if verbose:
|
76
|
-
print(f"{n_logs} logs saved to {filepath}")
|
77
|
-
|
78
|
-
def set_dir(self, dir: str):
|
79
|
-
"""
|
80
|
-
Sets the directory where log files will be saved.
|
81
|
-
|
82
|
-
Parameters:
|
83
|
-
dir (str): The directory to set for saving log files.
|
84
|
-
"""
|
85
|
-
self.dir = dir
|
86
|
-
|
lionagi-0.0.111.dist-info/RECORD
DELETED
@@ -1,20 +0,0 @@
|
|
1
|
-
lionagi/__init__.py,sha256=2Rko3tw94ZFVN_GSvcxAY1O77FxswcaMxNHKH5Bj7jc,788
|
2
|
-
lionagi/version.py,sha256=BtvlzlO7UNG5nD9ftqoP0J2wHDhVCXpzuMsjYBw0Wfg,24
|
3
|
-
lionagi/api/__init__.py,sha256=PF-fWsB0axACGDbm8FluZgDQMyQf3PUUJ1tIXW535TQ,178
|
4
|
-
lionagi/api/oai_config.py,sha256=yhyZ4aEaF6r3XBbhxI47r8CL2-amc-4IKJhbXv2W9CM,356
|
5
|
-
lionagi/api/oai_service.py,sha256=9FBmw_UTBsgnPBP7f8KOX4PwS5TXeknnUw6-kAYJxqk,11919
|
6
|
-
lionagi/session/__init__.py,sha256=qAf0IAA2D1ZhwvRopSgi8X13DH4Y0E5HaA2DVr6FxG0,152
|
7
|
-
lionagi/session/conversation.py,sha256=5EePoSVIcyaZJEtkNxv1yCFby_3_SLx5AmaSxS9pwI8,4058
|
8
|
-
lionagi/session/message.py,sha256=L4QhTvay-xqL_PWWzr_lTig70cr7zEc5YjONILmAoWU,6504
|
9
|
-
lionagi/session/session.py,sha256=wmAwNxk6pm1MTV3kqjG8i5a89xssDP0Ln8m2_e_zdek,14748
|
10
|
-
lionagi/utils/__init__.py,sha256=e5aEzyHofUYZ8olcyHxq7wqTGRsRZwag3vyZ0T4_ByQ,842
|
11
|
-
lionagi/utils/api_util.py,sha256=xPWwQkWMnkGOUAKpwOkGkMv2SWCSCHjBsggnrSfHJhs,15257
|
12
|
-
lionagi/utils/doc_util.py,sha256=ZEuLKzc3EH692FW1LXRXBHgextMfb1OaLE3z_NhBBT0,12882
|
13
|
-
lionagi/utils/log_util.py,sha256=mfLmvjv4hvTYMel46tpKJyqLbj1PZimCgKigz48osZY,3158
|
14
|
-
lionagi/utils/sys_util.py,sha256=q4I_d61Zwe-WvukNoa53Gd8ycDcTpfOhw3yER8ZoiCg,28449
|
15
|
-
lionagi/utils/tool_util.py,sha256=0mWGW_rfUPTay_L05dckGzEXdg4ZdhFyGA1lve9tnj8,7410
|
16
|
-
lionagi-0.0.111.dist-info/LICENSE,sha256=TBnSyG8fs_tMRtK805GzA1cIyExleKyzoN_kuVxT9IY,11358
|
17
|
-
lionagi-0.0.111.dist-info/METADATA,sha256=drb1EbWKei2M7WjH6Md8Qq_vOMRc8Sgz1oyxawPZQEY,17414
|
18
|
-
lionagi-0.0.111.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
|
19
|
-
lionagi-0.0.111.dist-info/top_level.txt,sha256=szvch_d2jE1Lu9ZIKsl26Ll6BGfYfbOgt5lm-UpFSo4,8
|
20
|
-
lionagi-0.0.111.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|