lionagi 0.17.4__py3-none-any.whl → 0.17.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- lionagi/__init__.py +45 -7
- lionagi/config.py +26 -0
- lionagi/fields/action.py +5 -3
- lionagi/libs/file/chunk.py +3 -14
- lionagi/libs/file/process.py +10 -92
- lionagi/libs/schema/breakdown_pydantic_annotation.py +45 -0
- lionagi/ln/_async_call.py +19 -8
- lionagi/ln/_hash.py +12 -2
- lionagi/ln/_to_list.py +23 -12
- lionagi/ln/fuzzy/_fuzzy_match.py +3 -6
- lionagi/ln/fuzzy/_fuzzy_validate.py +9 -8
- lionagi/ln/fuzzy/_string_similarity.py +11 -5
- lionagi/ln/fuzzy/_to_dict.py +19 -19
- lionagi/ln/types.py +15 -0
- lionagi/operations/operate/operate.py +7 -11
- lionagi/operations/parse/parse.py +5 -3
- lionagi/protocols/generic/element.py +3 -6
- lionagi/protocols/generic/event.py +1 -1
- lionagi/protocols/mail/package.py +2 -2
- lionagi/protocols/messages/instruction.py +9 -1
- lionagi/protocols/operatives/operative.py +4 -3
- lionagi/service/broadcaster.py +61 -0
- lionagi/service/connections/api_calling.py +22 -145
- lionagi/service/connections/mcp/wrapper.py +8 -15
- lionagi/service/hooks/__init__.py +2 -10
- lionagi/service/hooks/_types.py +1 -0
- lionagi/service/hooks/hooked_event.py +142 -0
- lionagi/service/imodel.py +2 -2
- lionagi/session/branch.py +46 -169
- lionagi/session/session.py +1 -44
- lionagi/tools/file/reader.py +6 -4
- lionagi/utils.py +3 -342
- lionagi/version.py +1 -1
- {lionagi-0.17.4.dist-info → lionagi-0.17.6.dist-info}/METADATA +4 -4
- {lionagi-0.17.4.dist-info → lionagi-0.17.6.dist-info}/RECORD +37 -41
- lionagi/libs/file/_utils.py +0 -10
- lionagi/libs/file/concat.py +0 -121
- lionagi/libs/file/concat_files.py +0 -85
- lionagi/libs/file/file_ops.py +0 -118
- lionagi/libs/file/save.py +0 -103
- lionagi/ln/concurrency/throttle.py +0 -83
- lionagi/settings.py +0 -71
- {lionagi-0.17.4.dist-info → lionagi-0.17.6.dist-info}/WHEEL +0 -0
- {lionagi-0.17.4.dist-info → lionagi-0.17.6.dist-info}/licenses/LICENSE +0 -0
lionagi/libs/file/concat.py
DELETED
@@ -1,121 +0,0 @@
|
|
1
|
-
from pathlib import Path
|
2
|
-
from typing import Any
|
3
|
-
|
4
|
-
from lionagi.utils import create_path, lcall
|
5
|
-
|
6
|
-
from .process import dir_to_files
|
7
|
-
|
8
|
-
|
9
|
-
def concat(
|
10
|
-
data_path: str | Path | list,
|
11
|
-
file_types: list[str],
|
12
|
-
output_dir: str | Path = None,
|
13
|
-
output_filename: str = None,
|
14
|
-
file_exist_ok: bool = True,
|
15
|
-
recursive: bool = True,
|
16
|
-
verbose: bool = True,
|
17
|
-
threshold: int = 0,
|
18
|
-
return_fps: bool = False,
|
19
|
-
return_files: bool = False,
|
20
|
-
exclude_patterns: list[str] = None,
|
21
|
-
**kwargs,
|
22
|
-
) -> dict[str, Any]:
|
23
|
-
"""
|
24
|
-
data_path: str or Path or list of str or Path, the directory or file paths to concatenate.
|
25
|
-
file_types: list of str, the file types to concatenate. [e.g. ['.txt', '.md']]
|
26
|
-
output_dir: str or Path, the directory to save the concatenated file. If provided, will save the file.
|
27
|
-
output_filename: str, the filename to save the concatenated file.
|
28
|
-
file_exist_ok: bool, if True, overwrite the existing file. Default is True.
|
29
|
-
recursive: bool, if True, search files recursively. Default is True.
|
30
|
-
verbose: bool, if True, print the output path. Default is True.
|
31
|
-
threshold: int, the minimum number of chars for the file to be considered valid to concatenate.
|
32
|
-
exclude_patterns: list of str, patterns to exclude files from concatenation (e.g. ['log', 'temp', '.venv']).
|
33
|
-
kwargs: additional keyword arguments to pass to create_path.
|
34
|
-
"""
|
35
|
-
persist_path = None
|
36
|
-
if output_dir:
|
37
|
-
if not output_filename:
|
38
|
-
output_filename = "concatenated_text.txt"
|
39
|
-
kwargs["timestamp"] = kwargs.get("timestamp", True)
|
40
|
-
kwargs["random_hash_digits"] = kwargs.get("random_hash_digits", 6)
|
41
|
-
output_filename = output_filename or "concatenated_text.txt"
|
42
|
-
persist_path = create_path(
|
43
|
-
output_dir, output_filename, file_exist_ok=file_exist_ok, **kwargs
|
44
|
-
)
|
45
|
-
|
46
|
-
texts = []
|
47
|
-
|
48
|
-
def _check_existence(_p: str) -> Path | list[Path] | None:
|
49
|
-
if exclude_patterns:
|
50
|
-
_str_p = str(_p)
|
51
|
-
for pattern in exclude_patterns:
|
52
|
-
if pattern in _str_p:
|
53
|
-
return None
|
54
|
-
|
55
|
-
if not Path(_p).exists():
|
56
|
-
# if the path doesn't exist, return None
|
57
|
-
if verbose:
|
58
|
-
print(f"Path {_p} does not exist, skipping...")
|
59
|
-
return None
|
60
|
-
|
61
|
-
p = Path(_p)
|
62
|
-
if p.is_dir():
|
63
|
-
return dir_to_files(
|
64
|
-
p,
|
65
|
-
recursive=recursive,
|
66
|
-
file_types=file_types,
|
67
|
-
ignore_errors=True,
|
68
|
-
max_workers=5,
|
69
|
-
)
|
70
|
-
if p.is_file():
|
71
|
-
return p
|
72
|
-
|
73
|
-
data_path: list[Path] = lcall(
|
74
|
-
data_path,
|
75
|
-
_check_existence,
|
76
|
-
sanitize_input=True,
|
77
|
-
unique_input=True,
|
78
|
-
flatten=True,
|
79
|
-
dropna=True,
|
80
|
-
unique_output=True,
|
81
|
-
flatten_tuple_set=True,
|
82
|
-
)
|
83
|
-
|
84
|
-
contents = {}
|
85
|
-
fps = []
|
86
|
-
for dp in data_path:
|
87
|
-
try:
|
88
|
-
text = dp.read_text(encoding="utf-8")
|
89
|
-
|
90
|
-
except Exception:
|
91
|
-
# if we cannot read the file, skip it
|
92
|
-
print(f"Could not read file: {dp}. Skipping...")
|
93
|
-
continue
|
94
|
-
|
95
|
-
if threshold > 0 and len(text) < threshold:
|
96
|
-
continue
|
97
|
-
|
98
|
-
fps.append(dp)
|
99
|
-
contents[str(dp)] = text
|
100
|
-
|
101
|
-
for k, text in sorted(contents.items(), key=lambda x: x[0]):
|
102
|
-
texts.extend(["---", k, "---\n", text])
|
103
|
-
|
104
|
-
text = "\n".join(texts)
|
105
|
-
if persist_path:
|
106
|
-
persist_path.write_text(text, encoding="utf-8")
|
107
|
-
if verbose:
|
108
|
-
print(
|
109
|
-
f"Concatenated {len(fps)} files to {persist_path}."
|
110
|
-
f" The file contains {len(text)} characters."
|
111
|
-
)
|
112
|
-
|
113
|
-
out = {"text": text} # default output
|
114
|
-
if persist_path:
|
115
|
-
out["persist_path"] = persist_path
|
116
|
-
if return_files:
|
117
|
-
out["texts"] = texts
|
118
|
-
if return_fps:
|
119
|
-
out["fps"] = fps
|
120
|
-
|
121
|
-
return out
|
@@ -1,85 +0,0 @@
|
|
1
|
-
from pathlib import Path
|
2
|
-
|
3
|
-
from lionagi.utils import create_path
|
4
|
-
|
5
|
-
from .process import dir_to_files
|
6
|
-
|
7
|
-
|
8
|
-
def concat_files(
|
9
|
-
data_path: str | Path | list,
|
10
|
-
file_types: list[str],
|
11
|
-
output_dir: str | Path = None,
|
12
|
-
output_filename: str = None,
|
13
|
-
file_exist_ok: bool = True,
|
14
|
-
recursive: bool = True,
|
15
|
-
verbose: bool = True,
|
16
|
-
threshold: int = 0,
|
17
|
-
return_fps: bool = False,
|
18
|
-
return_files: bool = False,
|
19
|
-
**kwargs,
|
20
|
-
) -> list[str] | str | tuple[list[str], list[Path]] | tuple[str, list[Path]]:
|
21
|
-
"""
|
22
|
-
data_path: str or Path or list of str or Path, the directory or file paths to concatenate.
|
23
|
-
file_types: list of str, the file types to concatenate. [e.g. ['.txt', '.md']]
|
24
|
-
output_dir: str or Path, the directory to save the concatenated file. If provided, will save the file.
|
25
|
-
output_filename: str, the filename to save the concatenated file.
|
26
|
-
file_exist_ok: bool, if True, overwrite the existing file. Default is True.
|
27
|
-
recursive: bool, if True, search files recursively. Default is True.
|
28
|
-
verbose: bool, if True, print the output path. Default is True.
|
29
|
-
threshold: int, the minimum number of chars for the file to be considered valid to concatenate.
|
30
|
-
kwargs: additional keyword arguments to pass to create_path.
|
31
|
-
"""
|
32
|
-
persist_path = None
|
33
|
-
if output_dir:
|
34
|
-
if not output_filename:
|
35
|
-
output_filename = "concatenated_text.txt"
|
36
|
-
kwargs["timestamp"] = kwargs.get("timestamp", True)
|
37
|
-
kwargs["random_hash_digits"] = kwargs.get("random_hash_digits", 6)
|
38
|
-
output_filename = output_filename or "concatenated_text.txt"
|
39
|
-
persist_path = create_path(
|
40
|
-
output_dir, output_filename, file_exist_ok=file_exist_ok, **kwargs
|
41
|
-
)
|
42
|
-
|
43
|
-
texts = []
|
44
|
-
data_path = (
|
45
|
-
[str(data_path)] if not isinstance(data_path, list) else data_path
|
46
|
-
)
|
47
|
-
data_path = sorted(data_path)
|
48
|
-
data_path = [Path(dp) for dp in data_path if Path(dp).exists()]
|
49
|
-
|
50
|
-
fps = []
|
51
|
-
for dp in data_path:
|
52
|
-
_fps = dir_to_files(dp, recursive=recursive, file_types=file_types)
|
53
|
-
|
54
|
-
data_path = sorted([str(i) for i in _fps])
|
55
|
-
data_path: list[Path] = [
|
56
|
-
Path(dp) for dp in data_path if Path(dp).exists()
|
57
|
-
]
|
58
|
-
|
59
|
-
for fp in data_path:
|
60
|
-
text = fp.read_text(encoding="utf-8")
|
61
|
-
if len(text) >= threshold:
|
62
|
-
fp_text = (
|
63
|
-
"\n----------------------------------------------------\n"
|
64
|
-
f"{str(fp)}"
|
65
|
-
"\n----------------------------------------------------\n"
|
66
|
-
)
|
67
|
-
text = fp_text + text
|
68
|
-
texts.append(text)
|
69
|
-
fps.extend(data_path)
|
70
|
-
|
71
|
-
text = "\n".join(texts)
|
72
|
-
if persist_path:
|
73
|
-
persist_path.write_text(text, encoding="utf-8")
|
74
|
-
if verbose:
|
75
|
-
print(f"Concatenated {len(fps)} files to {persist_path}")
|
76
|
-
print(f"The file contains {len(text)} characters.")
|
77
|
-
|
78
|
-
if return_files:
|
79
|
-
if return_fps:
|
80
|
-
return texts, fps
|
81
|
-
return texts
|
82
|
-
|
83
|
-
if return_fps:
|
84
|
-
return text, fps
|
85
|
-
return text
|
lionagi/libs/file/file_ops.py
DELETED
@@ -1,118 +0,0 @@
|
|
1
|
-
# Copyright (c) 2023 - 2025, HaiyangLi <quantocean.li at gmail dot com>
|
2
|
-
#
|
3
|
-
# SPDX-License-Identifier: Apache-2.0
|
4
|
-
|
5
|
-
import logging
|
6
|
-
from pathlib import Path
|
7
|
-
from shutil import copy2
|
8
|
-
|
9
|
-
|
10
|
-
def copy_file(src: Path | str, dest: Path | str) -> None:
|
11
|
-
"""
|
12
|
-
Copy a file from a source path to a destination path.
|
13
|
-
|
14
|
-
Args:
|
15
|
-
src: The source file path.
|
16
|
-
dest: The destination file path.
|
17
|
-
|
18
|
-
Raises:
|
19
|
-
FileNotFoundError: If the source file does not exist or is not
|
20
|
-
a file.
|
21
|
-
PermissionError: If there are insufficient permissions to copy
|
22
|
-
the file.
|
23
|
-
OSError: If there's an OS-level error during the copy operation.
|
24
|
-
"""
|
25
|
-
src_path, dest_path = Path(src), Path(dest)
|
26
|
-
if not src_path.is_file():
|
27
|
-
raise FileNotFoundError(f"{src_path} does not exist or is not a file.")
|
28
|
-
|
29
|
-
try:
|
30
|
-
dest_path.parent.mkdir(parents=True, exist_ok=True)
|
31
|
-
copy2(src_path, dest_path)
|
32
|
-
except PermissionError as e:
|
33
|
-
raise PermissionError(
|
34
|
-
f"Permission denied when copying {src_path} to {dest_path}"
|
35
|
-
) from e
|
36
|
-
except OSError as e:
|
37
|
-
raise OSError(f"Failed to copy {src_path} to {dest_path}: {e}") from e
|
38
|
-
|
39
|
-
|
40
|
-
def get_file_size(path: Path | str) -> int:
|
41
|
-
"""
|
42
|
-
Get the size of a file or total size of files in a directory.
|
43
|
-
|
44
|
-
Args:
|
45
|
-
path: The file or directory path.
|
46
|
-
|
47
|
-
Returns:
|
48
|
-
The size in bytes.
|
49
|
-
|
50
|
-
Raises:
|
51
|
-
FileNotFoundError: If the path does not exist.
|
52
|
-
PermissionError: If there are insufficient permissions
|
53
|
-
to access the path.
|
54
|
-
"""
|
55
|
-
path = Path(path)
|
56
|
-
try:
|
57
|
-
if path.is_file():
|
58
|
-
return path.stat().st_size
|
59
|
-
elif path.is_dir():
|
60
|
-
return sum(
|
61
|
-
f.stat().st_size for f in path.rglob("*") if f.is_file()
|
62
|
-
)
|
63
|
-
else:
|
64
|
-
raise FileNotFoundError(f"{path} does not exist.")
|
65
|
-
except PermissionError as e:
|
66
|
-
raise PermissionError(
|
67
|
-
f"Permission denied when accessing {path}"
|
68
|
-
) from e
|
69
|
-
|
70
|
-
|
71
|
-
def list_files(
|
72
|
-
dir_path: Path | str, extension: str | None = None
|
73
|
-
) -> list[Path]:
|
74
|
-
"""
|
75
|
-
List all files in a specified directory with an optional extension
|
76
|
-
filter, including files in subdirectories.
|
77
|
-
|
78
|
-
Args:
|
79
|
-
dir_path: The directory path where files are listed.
|
80
|
-
extension: Filter files by extension.
|
81
|
-
|
82
|
-
Returns:
|
83
|
-
A list of Path objects representing files in the directory.
|
84
|
-
|
85
|
-
Raises:
|
86
|
-
NotADirectoryError: If the provided dir_path is not a directory.
|
87
|
-
"""
|
88
|
-
dir_path = Path(dir_path)
|
89
|
-
if not dir_path.is_dir():
|
90
|
-
raise NotADirectoryError(f"{dir_path} is not a directory.")
|
91
|
-
|
92
|
-
pattern = f"*.{extension}" if extension else "*"
|
93
|
-
return [f for f in dir_path.rglob(pattern) if f.is_file()]
|
94
|
-
|
95
|
-
|
96
|
-
def read_file(path: Path | str, /) -> str:
|
97
|
-
"""
|
98
|
-
Read the contents of a file.
|
99
|
-
|
100
|
-
Args:
|
101
|
-
path: The path to the file to read.
|
102
|
-
|
103
|
-
Returns:
|
104
|
-
str: The contents of the file.
|
105
|
-
|
106
|
-
Raises:
|
107
|
-
FileNotFoundError: If the file does not exist.
|
108
|
-
PermissionError: If there are insufficient permissions to read
|
109
|
-
the file.
|
110
|
-
"""
|
111
|
-
try:
|
112
|
-
return Path(path).read_text(encoding="utf-8")
|
113
|
-
except FileNotFoundError as e:
|
114
|
-
logging.error(f"File not found: {path}: {e}")
|
115
|
-
raise
|
116
|
-
except PermissionError as e:
|
117
|
-
logging.error(f"Permission denied when reading file: {path}: {e}")
|
118
|
-
raise
|
lionagi/libs/file/save.py
DELETED
@@ -1,103 +0,0 @@
|
|
1
|
-
# Copyright (c) 2023 - 2025, HaiyangLi <quantocean.li at gmail dot com>
|
2
|
-
#
|
3
|
-
# SPDX-License-Identifier: Apache-2.0
|
4
|
-
|
5
|
-
import logging
|
6
|
-
from pathlib import Path
|
7
|
-
from typing import Any
|
8
|
-
|
9
|
-
from lionagi.utils import create_path
|
10
|
-
|
11
|
-
|
12
|
-
def save_to_file(
|
13
|
-
text: str,
|
14
|
-
directory: Path | str,
|
15
|
-
filename: str,
|
16
|
-
extension: str = None,
|
17
|
-
timestamp: bool = False,
|
18
|
-
dir_exist_ok: bool = True,
|
19
|
-
file_exist_ok: bool = False,
|
20
|
-
time_prefix: bool = False,
|
21
|
-
timestamp_format: str | None = None,
|
22
|
-
random_hash_digits: int = 0,
|
23
|
-
verbose: bool = True,
|
24
|
-
) -> Path:
|
25
|
-
"""
|
26
|
-
Save text to a file within a specified directory, optionally adding a
|
27
|
-
timestamp, hash, and verbose logging.
|
28
|
-
|
29
|
-
Args:
|
30
|
-
text: The text to save.
|
31
|
-
directory: The directory path to save the file.
|
32
|
-
filename: The filename for the saved text.
|
33
|
-
timestamp: If True, append a timestamp to the filename.
|
34
|
-
dir_exist_ok: If True, creates the directory if it does not exist.
|
35
|
-
time_prefix: If True, prepend the timestamp instead of appending.
|
36
|
-
timestamp_format: A custom format for the timestamp.
|
37
|
-
random_hash_digits: Number of random hash digits to append
|
38
|
-
to filename.
|
39
|
-
verbose: If True, logs the file path after saving.
|
40
|
-
|
41
|
-
Returns:
|
42
|
-
Path: The path to the saved file.
|
43
|
-
|
44
|
-
Raises:
|
45
|
-
OSError: If there's an error creating the directory or
|
46
|
-
writing the file.
|
47
|
-
"""
|
48
|
-
try:
|
49
|
-
file_path = create_path(
|
50
|
-
directory=directory,
|
51
|
-
filename=filename,
|
52
|
-
extension=extension,
|
53
|
-
timestamp=timestamp,
|
54
|
-
dir_exist_ok=dir_exist_ok,
|
55
|
-
file_exist_ok=file_exist_ok,
|
56
|
-
time_prefix=time_prefix,
|
57
|
-
timestamp_format=timestamp_format,
|
58
|
-
random_hash_digits=random_hash_digits,
|
59
|
-
)
|
60
|
-
with file_path.open("w", encoding="utf-8") as file:
|
61
|
-
file.write(text)
|
62
|
-
file.close()
|
63
|
-
if verbose:
|
64
|
-
logging.info(f"Text saved to: {file_path}")
|
65
|
-
return file_path
|
66
|
-
|
67
|
-
except OSError as e:
|
68
|
-
logging.error(f"Failed to save file {filename}: {e}")
|
69
|
-
raise
|
70
|
-
|
71
|
-
|
72
|
-
def save_chunks(
|
73
|
-
chunks: list[dict[str, Any]],
|
74
|
-
output_dir: str | Path,
|
75
|
-
verbose: bool,
|
76
|
-
timestamp: bool,
|
77
|
-
random_hash_digits: int,
|
78
|
-
) -> None:
|
79
|
-
"""Helper function to save chunks to files."""
|
80
|
-
from lionagi import ln
|
81
|
-
|
82
|
-
output_path = Path(output_dir)
|
83
|
-
for i, chunk in enumerate(chunks):
|
84
|
-
file_path = create_path(
|
85
|
-
directory=output_path,
|
86
|
-
filename=f"chunk_{i + 1}",
|
87
|
-
extension="json",
|
88
|
-
timestamp=timestamp,
|
89
|
-
random_hash_digits=random_hash_digits,
|
90
|
-
)
|
91
|
-
save_to_file(
|
92
|
-
ln.json_dumps(
|
93
|
-
chunk,
|
94
|
-
pretty=True,
|
95
|
-
sort_keys=True,
|
96
|
-
append_newline=True,
|
97
|
-
deterministic_sets=True,
|
98
|
-
decimal_as_float=True,
|
99
|
-
),
|
100
|
-
directory=file_path.parent,
|
101
|
-
filename=file_path.name,
|
102
|
-
verbose=verbose,
|
103
|
-
)
|
@@ -1,83 +0,0 @@
|
|
1
|
-
from __future__ import annotations
|
2
|
-
|
3
|
-
import asyncio
|
4
|
-
import functools
|
5
|
-
from collections.abc import Callable
|
6
|
-
from time import sleep, time
|
7
|
-
from typing import Any, TypeVar
|
8
|
-
|
9
|
-
from typing_extensions import deprecated
|
10
|
-
|
11
|
-
T = TypeVar("T")
|
12
|
-
|
13
|
-
|
14
|
-
__all__ = ("Throttle",)
|
15
|
-
|
16
|
-
|
17
|
-
@deprecated("Throttle is deprecated and will be removed in a future release.")
|
18
|
-
class Throttle:
|
19
|
-
"""
|
20
|
-
Provide a throttling mechanism for function calls.
|
21
|
-
|
22
|
-
When used as a decorator, it ensures that the decorated function can only
|
23
|
-
be called once per specified period. Subsequent calls within this period
|
24
|
-
are delayed to enforce this constraint.
|
25
|
-
|
26
|
-
Attributes:
|
27
|
-
period: The minimum time period (in seconds) between successive calls.
|
28
|
-
"""
|
29
|
-
|
30
|
-
def __init__(self, period: float) -> None:
|
31
|
-
"""
|
32
|
-
Initialize a new instance of Throttle.
|
33
|
-
|
34
|
-
Args:
|
35
|
-
period: The minimum time period (in seconds) between
|
36
|
-
successive calls.
|
37
|
-
"""
|
38
|
-
self.period = period
|
39
|
-
self.last_called = 0
|
40
|
-
|
41
|
-
def __call__(self, func: Callable[..., T]) -> Callable[..., T]:
|
42
|
-
"""
|
43
|
-
Decorate a synchronous function with the throttling mechanism.
|
44
|
-
|
45
|
-
Args:
|
46
|
-
func: The synchronous function to be throttled.
|
47
|
-
|
48
|
-
Returns:
|
49
|
-
The throttled synchronous function.
|
50
|
-
"""
|
51
|
-
|
52
|
-
@functools.wraps(func)
|
53
|
-
def wrapper(*args, **kwargs) -> Any:
|
54
|
-
elapsed = time() - self.last_called
|
55
|
-
if elapsed < self.period:
|
56
|
-
sleep(self.period - elapsed)
|
57
|
-
self.last_called = time()
|
58
|
-
return func(*args, **kwargs)
|
59
|
-
|
60
|
-
return wrapper
|
61
|
-
|
62
|
-
def __call_async__(
|
63
|
-
self, func: Callable[..., Callable[..., T]]
|
64
|
-
) -> Callable[..., Callable[..., T]]:
|
65
|
-
"""
|
66
|
-
Decorate an asynchronous function with the throttling mechanism.
|
67
|
-
|
68
|
-
Args:
|
69
|
-
func: The asynchronous function to be throttled.
|
70
|
-
|
71
|
-
Returns:
|
72
|
-
The throttled asynchronous function.
|
73
|
-
"""
|
74
|
-
|
75
|
-
@functools.wraps(func)
|
76
|
-
async def wrapper(*args, **kwargs) -> Any:
|
77
|
-
elapsed = time() - self.last_called
|
78
|
-
if elapsed < self.period:
|
79
|
-
await asyncio.sleep(self.period - elapsed)
|
80
|
-
self.last_called = time()
|
81
|
-
return await func(*args, **kwargs)
|
82
|
-
|
83
|
-
return wrapper
|
lionagi/settings.py
DELETED
@@ -1,71 +0,0 @@
|
|
1
|
-
# Copyright (c) 2023 - 2025, HaiyangLi <quantocean.li at gmail dot com>
|
2
|
-
#
|
3
|
-
# SPDX-License-Identifier: Apache-2.0
|
4
|
-
|
5
|
-
from datetime import timezone
|
6
|
-
|
7
|
-
CACHED_CONFIG = {
|
8
|
-
"ttl": 300,
|
9
|
-
"key": None,
|
10
|
-
"namespace": None,
|
11
|
-
"key_builder": None,
|
12
|
-
"skip_cache_func": lambda x: False,
|
13
|
-
"serializer": None,
|
14
|
-
"plugins": None,
|
15
|
-
"alias": None,
|
16
|
-
"noself": lambda x: False,
|
17
|
-
}
|
18
|
-
|
19
|
-
CHAT_IMODEL_CONFIG = {
|
20
|
-
"provider": "openai",
|
21
|
-
"model": "gpt-4.1-nano",
|
22
|
-
"base_url": "https://api.openai.com/v1",
|
23
|
-
"endpoint": "chat/completions",
|
24
|
-
"api_key": "OPENAI_API_KEY",
|
25
|
-
"queue_capacity": 100,
|
26
|
-
"capacity_refresh_time": 60,
|
27
|
-
"interval": None,
|
28
|
-
"limit_requests": None,
|
29
|
-
"limit_tokens": None,
|
30
|
-
}
|
31
|
-
|
32
|
-
PARSE_IMODEL_CONFIG = {
|
33
|
-
"provider": "openai",
|
34
|
-
"model": "gpt-4.1-nano",
|
35
|
-
"base_url": "https://api.openai.com/v1",
|
36
|
-
"endpoint": "chat/completions",
|
37
|
-
"api_key": "OPENAI_API_KEY",
|
38
|
-
"queue_capacity": 100,
|
39
|
-
"capacity_refresh_time": 60,
|
40
|
-
"interval": None,
|
41
|
-
"limit_requests": None,
|
42
|
-
"limit_tokens": None,
|
43
|
-
}
|
44
|
-
|
45
|
-
LOG_CONFIG = {
|
46
|
-
"persist_dir": "./data/logs",
|
47
|
-
"subfolder": None,
|
48
|
-
"capacity": 50,
|
49
|
-
"extension": ".json",
|
50
|
-
"use_timestamp": True,
|
51
|
-
"hash_digits": 5,
|
52
|
-
"file_prefix": "log",
|
53
|
-
"auto_save_on_exit": True,
|
54
|
-
"clear_after_dump": True,
|
55
|
-
}
|
56
|
-
|
57
|
-
|
58
|
-
class Settings:
|
59
|
-
class Config:
|
60
|
-
TIMEZONE: timezone = timezone.utc
|
61
|
-
LOG: dict = LOG_CONFIG
|
62
|
-
|
63
|
-
class Action:
|
64
|
-
CACHED_CONFIG: dict = CACHED_CONFIG
|
65
|
-
|
66
|
-
class API:
|
67
|
-
CACHED_CONFIG: dict = CACHED_CONFIG
|
68
|
-
|
69
|
-
class iModel:
|
70
|
-
CHAT: dict = CHAT_IMODEL_CONFIG
|
71
|
-
PARSE: dict = PARSE_IMODEL_CONFIG
|
File without changes
|
File without changes
|