lionagi 0.12.2__py3-none-any.whl → 0.12.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- lionagi/config.py +123 -0
- lionagi/fields/file.py +1 -1
- lionagi/fields/reason.py +1 -1
- lionagi/libs/file/concat.py +1 -6
- lionagi/libs/file/concat_files.py +1 -5
- lionagi/libs/file/save.py +1 -1
- lionagi/libs/package/imports.py +8 -177
- lionagi/libs/parse.py +30 -0
- lionagi/libs/schema/load_pydantic_model_from_schema.py +259 -0
- lionagi/libs/token_transform/perplexity.py +2 -4
- lionagi/libs/token_transform/synthlang_/resources/frameworks/framework_options.json +46 -46
- lionagi/libs/token_transform/synthlang_/translate_to_synthlang.py +1 -1
- lionagi/operations/chat/chat.py +2 -2
- lionagi/operations/communicate/communicate.py +20 -5
- lionagi/operations/parse/parse.py +131 -43
- lionagi/protocols/generic/log.py +1 -2
- lionagi/protocols/generic/pile.py +18 -4
- lionagi/protocols/messages/assistant_response.py +20 -1
- lionagi/protocols/messages/templates/README.md +6 -10
- lionagi/service/connections/__init__.py +15 -0
- lionagi/service/connections/api_calling.py +230 -0
- lionagi/service/connections/endpoint.py +410 -0
- lionagi/service/connections/endpoint_config.py +137 -0
- lionagi/service/connections/header_factory.py +56 -0
- lionagi/service/connections/match_endpoint.py +49 -0
- lionagi/service/connections/providers/__init__.py +3 -0
- lionagi/service/connections/providers/anthropic_.py +87 -0
- lionagi/service/connections/providers/exa_.py +33 -0
- lionagi/service/connections/providers/oai_.py +166 -0
- lionagi/service/connections/providers/ollama_.py +122 -0
- lionagi/service/connections/providers/perplexity_.py +29 -0
- lionagi/service/imodel.py +36 -144
- lionagi/service/manager.py +1 -7
- lionagi/service/{endpoints/rate_limited_processor.py → rate_limited_processor.py} +4 -2
- lionagi/service/resilience.py +545 -0
- lionagi/service/third_party/README.md +71 -0
- lionagi/service/third_party/__init__.py +0 -0
- lionagi/service/third_party/anthropic_models.py +159 -0
- lionagi/service/third_party/exa_models.py +165 -0
- lionagi/service/third_party/openai_models.py +18241 -0
- lionagi/service/third_party/pplx_models.py +156 -0
- lionagi/service/types.py +5 -4
- lionagi/session/branch.py +12 -7
- lionagi/tools/file/reader.py +1 -1
- lionagi/tools/memory/tools.py +497 -0
- lionagi/utils.py +921 -123
- lionagi/version.py +1 -1
- {lionagi-0.12.2.dist-info → lionagi-0.12.4.dist-info}/METADATA +33 -16
- {lionagi-0.12.2.dist-info → lionagi-0.12.4.dist-info}/RECORD +53 -63
- lionagi/libs/file/create_path.py +0 -80
- lionagi/libs/file/file_util.py +0 -358
- lionagi/libs/parse/__init__.py +0 -3
- lionagi/libs/parse/fuzzy_parse_json.py +0 -117
- lionagi/libs/parse/to_dict.py +0 -336
- lionagi/libs/parse/to_json.py +0 -61
- lionagi/libs/parse/to_num.py +0 -378
- lionagi/libs/parse/to_xml.py +0 -57
- lionagi/libs/parse/xml_parser.py +0 -148
- lionagi/libs/schema/breakdown_pydantic_annotation.py +0 -48
- lionagi/service/endpoints/__init__.py +0 -3
- lionagi/service/endpoints/base.py +0 -706
- lionagi/service/endpoints/chat_completion.py +0 -116
- lionagi/service/endpoints/match_endpoint.py +0 -72
- lionagi/service/providers/__init__.py +0 -3
- lionagi/service/providers/anthropic_/__init__.py +0 -3
- lionagi/service/providers/anthropic_/messages.py +0 -99
- lionagi/service/providers/exa_/models.py +0 -3
- lionagi/service/providers/exa_/search.py +0 -80
- lionagi/service/providers/exa_/types.py +0 -7
- lionagi/service/providers/groq_/__init__.py +0 -3
- lionagi/service/providers/groq_/chat_completions.py +0 -56
- lionagi/service/providers/ollama_/__init__.py +0 -3
- lionagi/service/providers/ollama_/chat_completions.py +0 -134
- lionagi/service/providers/openai_/__init__.py +0 -3
- lionagi/service/providers/openai_/chat_completions.py +0 -101
- lionagi/service/providers/openai_/spec.py +0 -14
- lionagi/service/providers/openrouter_/__init__.py +0 -3
- lionagi/service/providers/openrouter_/chat_completions.py +0 -62
- lionagi/service/providers/perplexity_/__init__.py +0 -3
- lionagi/service/providers/perplexity_/chat_completions.py +0 -44
- lionagi/service/providers/perplexity_/models.py +0 -5
- lionagi/service/providers/types.py +0 -17
- /lionagi/{service/providers/exa_/__init__.py → py.typed} +0 -0
- /lionagi/service/{endpoints/token_calculator.py → token_calculator.py} +0 -0
- {lionagi-0.12.2.dist-info → lionagi-0.12.4.dist-info}/WHEEL +0 -0
- {lionagi-0.12.2.dist-info → lionagi-0.12.4.dist-info}/licenses/LICENSE +0 -0
lionagi/config.py
ADDED
@@ -0,0 +1,123 @@
|
|
1
|
+
# Copyright (c) 2025, HaiyangLi <quantocean.li at gmail dot com>
|
2
|
+
#
|
3
|
+
# SPDX-License-Identifier: Apache-2.0
|
4
|
+
|
5
|
+
from typing import Any, ClassVar
|
6
|
+
|
7
|
+
from pydantic import BaseModel, Field, SecretStr
|
8
|
+
from pydantic_settings import BaseSettings, SettingsConfigDict
|
9
|
+
|
10
|
+
|
11
|
+
class CacheConfig(BaseModel):
|
12
|
+
"""Configuration for aiocache."""
|
13
|
+
|
14
|
+
ttl: int = 300
|
15
|
+
key: str | None = None
|
16
|
+
namespace: str | None = None
|
17
|
+
key_builder: Any = None
|
18
|
+
skip_cache_func: Any = lambda _: False
|
19
|
+
serializer: dict[str, Any] | None = None
|
20
|
+
plugins: Any = None
|
21
|
+
alias: str | None = None
|
22
|
+
noself: Any = lambda _: False
|
23
|
+
|
24
|
+
def as_kwargs(self) -> dict[str, Any]:
|
25
|
+
"""Convert config to kwargs dict for @cached decorator.
|
26
|
+
|
27
|
+
Removes unserialisable callables that aiocache can't pickle.
|
28
|
+
"""
|
29
|
+
raw = self.model_dump(exclude_none=True)
|
30
|
+
# Remove all unserialisable callables
|
31
|
+
unserialisable_keys = (
|
32
|
+
"key_builder",
|
33
|
+
"skip_cache_func",
|
34
|
+
"noself",
|
35
|
+
"serializer",
|
36
|
+
"plugins",
|
37
|
+
)
|
38
|
+
for key in unserialisable_keys:
|
39
|
+
raw.pop(key, None)
|
40
|
+
return raw
|
41
|
+
|
42
|
+
|
43
|
+
class AppSettings(BaseSettings, frozen=True):
|
44
|
+
"""Application settings with environment variable support."""
|
45
|
+
|
46
|
+
model_config = SettingsConfigDict(
|
47
|
+
env_file=(".env", ".env.local", ".secrets.env"),
|
48
|
+
env_file_encoding="utf-8",
|
49
|
+
case_sensitive=False,
|
50
|
+
extra="ignore",
|
51
|
+
)
|
52
|
+
|
53
|
+
aiocache_config: CacheConfig = Field(
|
54
|
+
default_factory=CacheConfig, description="Cache settings for aiocache"
|
55
|
+
)
|
56
|
+
|
57
|
+
# secrets
|
58
|
+
OPENAI_API_KEY: SecretStr | None = None
|
59
|
+
OPENROUTER_API_KEY: SecretStr | None = None
|
60
|
+
OLLAMA_API_KEY: SecretStr | None = None
|
61
|
+
EXA_API_KEY: SecretStr | None = None
|
62
|
+
PERPLEXITY_API_KEY: SecretStr | None = None
|
63
|
+
GROQ_API_KEY: SecretStr | None = None
|
64
|
+
ANTHROPIC_API_KEY: SecretStr | None = None
|
65
|
+
|
66
|
+
# defaults models
|
67
|
+
LIONAGI_EMBEDDING_PROVIDER: str = "openai"
|
68
|
+
LIONAGI_EMBEDDING_MODEL: str = "text-embedding-3-small"
|
69
|
+
|
70
|
+
LIONAGI_CHAT_PROVIDER: str = "openai"
|
71
|
+
LIONAGI_CHAT_MODEL: str = "gpt-4o"
|
72
|
+
|
73
|
+
# default storage
|
74
|
+
LIONAGI_AUTO_STORE_EVENT: bool = False
|
75
|
+
LIONAGI_STORAGE_PROVIDER: str = "async_qdrant"
|
76
|
+
|
77
|
+
LIONAGI_AUTO_EMBED_LOG: bool = False
|
78
|
+
|
79
|
+
# specific storage
|
80
|
+
LIONAGI_QDRANT_URL: str = "http://localhost:6333"
|
81
|
+
LIONAGI_DEFAULT_QDRANT_COLLECTION: str = "event_logs"
|
82
|
+
|
83
|
+
# Class variable to store the singleton instance
|
84
|
+
_instance: ClassVar[Any] = None
|
85
|
+
|
86
|
+
def get_secret(self, key_name: str) -> str:
|
87
|
+
"""
|
88
|
+
Get the secret value for a given key name.
|
89
|
+
|
90
|
+
Args:
|
91
|
+
key_name: The name of the secret key (e.g., "OPENAI_API_KEY")
|
92
|
+
|
93
|
+
Returns:
|
94
|
+
The secret value as a string
|
95
|
+
|
96
|
+
Raises:
|
97
|
+
AttributeError: If the key doesn't exist
|
98
|
+
ValueError: If the key exists but is None
|
99
|
+
"""
|
100
|
+
if not hasattr(self, key_name):
|
101
|
+
if "ollama" in key_name.lower():
|
102
|
+
return "ollama"
|
103
|
+
raise AttributeError(
|
104
|
+
f"Secret key '{key_name}' not found in settings"
|
105
|
+
)
|
106
|
+
|
107
|
+
secret = getattr(self, key_name)
|
108
|
+
if secret is None:
|
109
|
+
# Special case for Ollama - return "ollama" even if key exists but is None
|
110
|
+
if "ollama" in key_name.lower():
|
111
|
+
return "ollama"
|
112
|
+
raise ValueError(f"Secret key '{key_name}' is not set")
|
113
|
+
|
114
|
+
if isinstance(secret, SecretStr):
|
115
|
+
return secret.get_secret_value()
|
116
|
+
|
117
|
+
return str(secret)
|
118
|
+
|
119
|
+
|
120
|
+
# Create a singleton instance
|
121
|
+
settings = AppSettings()
|
122
|
+
# Store the instance in the class variable for singleton pattern
|
123
|
+
AppSettings._instance = settings
|
lionagi/fields/file.py
CHANGED
lionagi/fields/reason.py
CHANGED
lionagi/libs/file/concat.py
CHANGED
@@ -1,12 +1,7 @@
|
|
1
|
-
# Copyright (c) 2023 - 2025, HaiyangLi <quantocean.li at gmail dot com>
|
2
|
-
#
|
3
|
-
# SPDX-License-Identifier: Apache-2.0
|
4
|
-
|
5
1
|
from pathlib import Path
|
6
2
|
from typing import Any
|
7
3
|
|
8
|
-
from lionagi.
|
9
|
-
from lionagi.utils import lcall
|
4
|
+
from lionagi.utils import create_path, lcall
|
10
5
|
|
11
6
|
from .process import dir_to_files
|
12
7
|
|
@@ -1,10 +1,6 @@
|
|
1
|
-
# Copyright (c) 2023 - 2025, HaiyangLi <quantocean.li at gmail dot com>
|
2
|
-
#
|
3
|
-
# SPDX-License-Identifier: Apache-2.0
|
4
|
-
|
5
1
|
from pathlib import Path
|
6
2
|
|
7
|
-
from lionagi.
|
3
|
+
from lionagi.utils import create_path
|
8
4
|
|
9
5
|
from .process import dir_to_files
|
10
6
|
|
lionagi/libs/file/save.py
CHANGED
lionagi/libs/package/imports.py
CHANGED
@@ -2,184 +2,15 @@
|
|
2
2
|
#
|
3
3
|
# SPDX-License-Identifier: Apache-2.0
|
4
4
|
|
5
|
-
import
|
6
|
-
|
7
|
-
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
from lionagi.utils import is_import_installed
|
13
|
-
|
14
|
-
|
15
|
-
def run_package_manager_command(
|
16
|
-
args: Sequence[str],
|
17
|
-
) -> subprocess.CompletedProcess[bytes]:
|
18
|
-
"""Run a package manager command, using uv if available, otherwise falling back to pip."""
|
19
|
-
# Check if uv is available in PATH
|
20
|
-
uv_path = shutil.which("uv")
|
21
|
-
|
22
|
-
if uv_path:
|
23
|
-
# Use uv if available
|
24
|
-
try:
|
25
|
-
return subprocess.run(
|
26
|
-
[uv_path] + list(args),
|
27
|
-
check=True,
|
28
|
-
capture_output=True,
|
29
|
-
)
|
30
|
-
except subprocess.CalledProcessError:
|
31
|
-
# If uv fails, fall back to pip
|
32
|
-
print("uv command failed, falling back to pip...")
|
33
|
-
|
34
|
-
# Fall back to pip
|
35
|
-
return subprocess.run(
|
36
|
-
[sys.executable, "-m", "pip"] + list(args),
|
37
|
-
check=True,
|
38
|
-
capture_output=True,
|
39
|
-
)
|
40
|
-
|
41
|
-
|
42
|
-
def check_import(
|
43
|
-
package_name: str,
|
44
|
-
module_name: str | None = None,
|
45
|
-
import_name: str | None = None,
|
46
|
-
pip_name: str | None = None,
|
47
|
-
attempt_install: bool = True,
|
48
|
-
error_message: str = "",
|
49
|
-
):
|
50
|
-
"""
|
51
|
-
Check if a package is installed, attempt to install if not.
|
52
|
-
|
53
|
-
Args:
|
54
|
-
package_name: The name of the package to check.
|
55
|
-
module_name: The specific module to import (if any).
|
56
|
-
import_name: The specific name to import from the module (if any).
|
57
|
-
pip_name: The name to use for pip installation (if different).
|
58
|
-
attempt_install: Whether to attempt installation if not found.
|
59
|
-
error_message: Custom error message to use if package not found.
|
60
|
-
|
61
|
-
Raises:
|
62
|
-
ImportError: If the package is not found and not installed.
|
63
|
-
ValueError: If the import fails after installation attempt.
|
64
|
-
"""
|
65
|
-
if not is_import_installed(package_name):
|
66
|
-
if attempt_install:
|
67
|
-
logging.info(
|
68
|
-
f"Package {package_name} not found. Attempting " "to install.",
|
69
|
-
)
|
70
|
-
try:
|
71
|
-
return install_import(
|
72
|
-
package_name=package_name,
|
73
|
-
module_name=module_name,
|
74
|
-
import_name=import_name,
|
75
|
-
pip_name=pip_name,
|
76
|
-
)
|
77
|
-
except ImportError as e:
|
78
|
-
raise ValueError(
|
79
|
-
f"Failed to install {package_name}: {e}"
|
80
|
-
) from e
|
81
|
-
else:
|
82
|
-
logging.info(
|
83
|
-
f"Package {package_name} not found. {error_message}",
|
84
|
-
)
|
85
|
-
raise ImportError(
|
86
|
-
f"Package {package_name} not found. {error_message}",
|
87
|
-
)
|
88
|
-
|
89
|
-
return import_module(
|
90
|
-
package_name=package_name,
|
91
|
-
module_name=module_name,
|
92
|
-
import_name=import_name,
|
93
|
-
)
|
94
|
-
|
95
|
-
|
96
|
-
def import_module(
|
97
|
-
package_name: str,
|
98
|
-
module_name: str = None,
|
99
|
-
import_name: str | list = None,
|
100
|
-
) -> Any:
|
101
|
-
"""
|
102
|
-
Import a module by its path.
|
103
|
-
|
104
|
-
Args:
|
105
|
-
module_path: The path of the module to import.
|
106
|
-
|
107
|
-
Returns:
|
108
|
-
The imported module.
|
109
|
-
|
110
|
-
Raises:
|
111
|
-
ImportError: If the module cannot be imported.
|
112
|
-
"""
|
113
|
-
try:
|
114
|
-
full_import_path = (
|
115
|
-
f"{package_name}.{module_name}" if module_name else package_name
|
116
|
-
)
|
117
|
-
|
118
|
-
if import_name:
|
119
|
-
import_name = (
|
120
|
-
[import_name]
|
121
|
-
if not isinstance(import_name, list)
|
122
|
-
else import_name
|
123
|
-
)
|
124
|
-
a = __import__(
|
125
|
-
full_import_path,
|
126
|
-
fromlist=import_name,
|
127
|
-
)
|
128
|
-
if len(import_name) == 1:
|
129
|
-
return getattr(a, import_name[0])
|
130
|
-
return [getattr(a, name) for name in import_name]
|
131
|
-
else:
|
132
|
-
return __import__(full_import_path)
|
133
|
-
|
134
|
-
except ImportError as e:
|
135
|
-
raise ImportError(
|
136
|
-
f"Failed to import module {full_import_path}: {e}"
|
137
|
-
) from e
|
138
|
-
|
139
|
-
|
140
|
-
def install_import(
|
141
|
-
package_name: str,
|
142
|
-
module_name: str | None = None,
|
143
|
-
import_name: str | None = None,
|
144
|
-
pip_name: str | None = None,
|
145
|
-
):
|
146
|
-
"""
|
147
|
-
Attempt to import a package, installing it if not found.
|
148
|
-
|
149
|
-
Args:
|
150
|
-
package_name: The name of the package to import.
|
151
|
-
module_name: The specific module to import (if any).
|
152
|
-
import_name: The specific name to import from the module (if any).
|
153
|
-
pip_name: The name to use for pip installation (if different).
|
154
|
-
|
155
|
-
Raises:
|
156
|
-
ImportError: If the package cannot be imported or installed.
|
157
|
-
subprocess.CalledProcessError: If pip installation fails.
|
158
|
-
"""
|
159
|
-
pip_name = pip_name or package_name
|
160
|
-
|
161
|
-
try:
|
162
|
-
return import_module(
|
163
|
-
package_name=package_name,
|
164
|
-
module_name=module_name,
|
165
|
-
import_name=import_name,
|
166
|
-
)
|
167
|
-
except ImportError:
|
168
|
-
logging.info(f"Installing {pip_name}...")
|
169
|
-
try:
|
170
|
-
run_package_manager_command(["install", pip_name])
|
171
|
-
return import_module(
|
172
|
-
package_name=package_name,
|
173
|
-
module_name=module_name,
|
174
|
-
import_name=import_name,
|
175
|
-
)
|
176
|
-
except subprocess.CalledProcessError as e:
|
177
|
-
raise ImportError(f"Failed to install {pip_name}: {e}") from e
|
178
|
-
except ImportError as e:
|
179
|
-
raise ImportError(
|
180
|
-
f"Failed to import {pip_name} after installation: {e}"
|
181
|
-
) from e
|
5
|
+
from lionagi.utils import (
|
6
|
+
check_import,
|
7
|
+
import_module,
|
8
|
+
install_import,
|
9
|
+
is_import_installed,
|
10
|
+
run_package_manager_command,
|
11
|
+
)
|
182
12
|
|
13
|
+
# backward compatibility
|
183
14
|
|
184
15
|
__all__ = (
|
185
16
|
"run_package_manager_command",
|
lionagi/libs/parse.py
ADDED
@@ -0,0 +1,30 @@
|
|
1
|
+
# Copyright (c) 2023 - 2025, HaiyangLi <quantocean.li at gmail dot com>
|
2
|
+
#
|
3
|
+
# SPDX-License-Identifier: Apache-2.0
|
4
|
+
|
5
|
+
from lionagi.libs.schema.as_readable import as_readable
|
6
|
+
from lionagi.libs.schema.extract_code_block import extract_code_block
|
7
|
+
from lionagi.libs.schema.function_to_schema import function_to_schema
|
8
|
+
from lionagi.libs.validate.fuzzy_match_keys import fuzzy_match_keys
|
9
|
+
from lionagi.libs.validate.fuzzy_validate_mapping import fuzzy_validate_mapping
|
10
|
+
from lionagi.libs.validate.string_similarity import string_similarity
|
11
|
+
from lionagi.utils import fuzzy_parse_json, to_dict, to_json, to_num
|
12
|
+
|
13
|
+
validate_keys = fuzzy_match_keys # for backward compatibility
|
14
|
+
validate_mapping = fuzzy_validate_mapping # for backward compatibility
|
15
|
+
|
16
|
+
|
17
|
+
__all__ = (
|
18
|
+
"as_readable",
|
19
|
+
"extract_code_block",
|
20
|
+
"function_to_schema",
|
21
|
+
"fuzzy_match_keys",
|
22
|
+
"fuzzy_validate_mapping",
|
23
|
+
"string_similarity",
|
24
|
+
"validate_keys",
|
25
|
+
"validate_mapping",
|
26
|
+
"to_dict",
|
27
|
+
"to_json",
|
28
|
+
"to_num",
|
29
|
+
"fuzzy_parse_json",
|
30
|
+
)
|
@@ -0,0 +1,259 @@
|
|
1
|
+
# Copyright (c) 2025, HaiyangLi <quantocean.li at gmail dot com>
|
2
|
+
#
|
3
|
+
# SPDX-License-Identifier: Apache-2.0
|
4
|
+
|
5
|
+
import importlib.util
|
6
|
+
import json
|
7
|
+
import string
|
8
|
+
import tempfile
|
9
|
+
from pathlib import Path
|
10
|
+
from typing import Any, TypeVar
|
11
|
+
|
12
|
+
from pydantic import BaseModel, PydanticUserError
|
13
|
+
|
14
|
+
from lionagi.utils import is_import_installed
|
15
|
+
|
16
|
+
_HAS_DATAMODEL_CODE_GENERATOR = is_import_installed("datamodel-code-generator")
|
17
|
+
|
18
|
+
# Import at module level for easier mocking in tests
|
19
|
+
if _HAS_DATAMODEL_CODE_GENERATOR:
|
20
|
+
from datamodel_code_generator import (
|
21
|
+
DataModelType,
|
22
|
+
InputFileType,
|
23
|
+
PythonVersion,
|
24
|
+
generate,
|
25
|
+
)
|
26
|
+
else:
|
27
|
+
# Create dummy objects for when package is not installed
|
28
|
+
DataModelType = None
|
29
|
+
InputFileType = None
|
30
|
+
PythonVersion = None
|
31
|
+
generate = None
|
32
|
+
|
33
|
+
|
34
|
+
B = TypeVar("B", bound=BaseModel)
|
35
|
+
|
36
|
+
|
37
|
+
def load_pydantic_model_from_schema(
|
38
|
+
schema: str | dict[str, Any],
|
39
|
+
model_name: str = "DynamicModel",
|
40
|
+
/,
|
41
|
+
pydantic_version=None,
|
42
|
+
python_version=None,
|
43
|
+
) -> type[BaseModel]:
|
44
|
+
"""
|
45
|
+
Generates a Pydantic model class dynamically from a JSON schema string or dict,
|
46
|
+
and ensures it's fully resolved using model_rebuild() with the correct namespace.
|
47
|
+
|
48
|
+
Args:
|
49
|
+
schema: The JSON schema as a string or a Python dictionary.
|
50
|
+
model_name: The desired base name for the generated Pydantic model.
|
51
|
+
If the schema has a 'title', that will likely be used.
|
52
|
+
pydantic_version: The Pydantic model type to generate.
|
53
|
+
python_version: The target Python version for generated code syntax.
|
54
|
+
|
55
|
+
Returns:
|
56
|
+
The dynamically created and resolved Pydantic BaseModel class.
|
57
|
+
|
58
|
+
Raises:
|
59
|
+
ValueError: If the schema is invalid.
|
60
|
+
FileNotFoundError: If the generated model file is not found.
|
61
|
+
AttributeError: If the expected model class cannot be found.
|
62
|
+
RuntimeError: For errors during generation, loading, or rebuilding.
|
63
|
+
Exception: For other potential errors.
|
64
|
+
"""
|
65
|
+
if not _HAS_DATAMODEL_CODE_GENERATOR:
|
66
|
+
error_msg = "`datamodel-code-generator` is not installed. Please install with `pip install datamodel-code-generator`."
|
67
|
+
raise ImportError(error_msg)
|
68
|
+
|
69
|
+
if DataModelType is not None:
|
70
|
+
pydantic_version = (
|
71
|
+
pydantic_version or DataModelType.PydanticV2BaseModel
|
72
|
+
)
|
73
|
+
python_version = python_version or PythonVersion.PY_312
|
74
|
+
else:
|
75
|
+
# These won't be used since we'll raise ImportError above
|
76
|
+
pydantic_version = None
|
77
|
+
python_version = None
|
78
|
+
|
79
|
+
schema_input_data: str
|
80
|
+
schema_dict: dict[str, Any]
|
81
|
+
resolved_model_name = (
|
82
|
+
model_name # Keep track of the potentially updated name
|
83
|
+
)
|
84
|
+
|
85
|
+
# --- 1. Prepare Schema Input ---
|
86
|
+
if isinstance(schema, dict):
|
87
|
+
try:
|
88
|
+
model_name_from_title = schema.get("title")
|
89
|
+
if model_name_from_title and isinstance(
|
90
|
+
model_name_from_title, str
|
91
|
+
):
|
92
|
+
valid_chars = string.ascii_letters + string.digits + "_"
|
93
|
+
sanitized_title = "".join(
|
94
|
+
c
|
95
|
+
for c in model_name_from_title.replace(" ", "")
|
96
|
+
if c in valid_chars
|
97
|
+
)
|
98
|
+
if sanitized_title and sanitized_title[0].isalpha():
|
99
|
+
resolved_model_name = (
|
100
|
+
sanitized_title # Update the name to use
|
101
|
+
)
|
102
|
+
schema_dict = schema
|
103
|
+
schema_input_data = json.dumps(schema)
|
104
|
+
except TypeError as e:
|
105
|
+
error_msg = "Invalid dictionary provided for schema"
|
106
|
+
raise ValueError(error_msg) from e
|
107
|
+
elif isinstance(schema, str):
|
108
|
+
try:
|
109
|
+
schema_dict = json.loads(schema)
|
110
|
+
model_name_from_title = schema_dict.get("title")
|
111
|
+
if model_name_from_title and isinstance(
|
112
|
+
model_name_from_title, str
|
113
|
+
):
|
114
|
+
valid_chars = string.ascii_letters + string.digits + "_"
|
115
|
+
sanitized_title = "".join(
|
116
|
+
c
|
117
|
+
for c in model_name_from_title.replace(" ", "")
|
118
|
+
if c in valid_chars
|
119
|
+
)
|
120
|
+
if sanitized_title and sanitized_title[0].isalpha():
|
121
|
+
resolved_model_name = (
|
122
|
+
sanitized_title # Update the name to use
|
123
|
+
)
|
124
|
+
schema_input_data = schema
|
125
|
+
except json.JSONDecodeError as e:
|
126
|
+
error_msg = "Invalid JSON schema string provided"
|
127
|
+
raise ValueError(error_msg) from e
|
128
|
+
else:
|
129
|
+
error_msg = "Schema must be a JSON string or a dictionary."
|
130
|
+
raise TypeError(error_msg)
|
131
|
+
|
132
|
+
# --- 2. Generate Code to Temporary File ---
|
133
|
+
with tempfile.TemporaryDirectory() as temporary_directory_name:
|
134
|
+
temporary_directory = Path(temporary_directory_name)
|
135
|
+
# Use a predictable but unique-ish filename
|
136
|
+
output_file = (
|
137
|
+
temporary_directory
|
138
|
+
/ f"{resolved_model_name.lower()}_model_{hash(schema_input_data)}.py"
|
139
|
+
)
|
140
|
+
module_name = output_file.stem # e.g., "userprofile_model_12345"
|
141
|
+
|
142
|
+
try:
|
143
|
+
generate(
|
144
|
+
schema_input_data,
|
145
|
+
input_file_type=InputFileType.JsonSchema,
|
146
|
+
input_filename="schema.json",
|
147
|
+
output=output_file,
|
148
|
+
output_model_type=pydantic_version,
|
149
|
+
target_python_version=python_version,
|
150
|
+
# Ensure necessary base models are imported in the generated code
|
151
|
+
base_class="pydantic.BaseModel",
|
152
|
+
)
|
153
|
+
except Exception as e:
|
154
|
+
# Optional: Print generated code on failure for debugging
|
155
|
+
# if output_file.exists():
|
156
|
+
# print(f"--- Generated Code (Error) ---\n{output_file.read_text()}\n--------------------------")
|
157
|
+
error_msg = "Failed to generate model code"
|
158
|
+
raise RuntimeError(error_msg) from e
|
159
|
+
|
160
|
+
if not output_file.exists():
|
161
|
+
error_msg = f"Generated model file was not created: {output_file}"
|
162
|
+
raise FileNotFoundError(error_msg)
|
163
|
+
|
164
|
+
def get_modules():
|
165
|
+
spec = importlib.util.spec_from_file_location(
|
166
|
+
module_name, str(output_file)
|
167
|
+
)
|
168
|
+
|
169
|
+
if spec is None or spec.loader is None:
|
170
|
+
error_msg = f"Could not create module spec for {output_file}"
|
171
|
+
raise ImportError(error_msg)
|
172
|
+
|
173
|
+
return spec, importlib.util.module_from_spec(spec)
|
174
|
+
|
175
|
+
# --- 3. Import the Generated Module Dynamically ---
|
176
|
+
try:
|
177
|
+
spec, generated_module = get_modules()
|
178
|
+
# Important: Make pydantic available within the executed module's globals
|
179
|
+
# if it's not explicitly imported by the generated code for some reason.
|
180
|
+
# Usually, datamodel-code-generator handles imports well.
|
181
|
+
# generated_module.__dict__['BaseModel'] = BaseModel
|
182
|
+
spec.loader.exec_module(generated_module)
|
183
|
+
|
184
|
+
except Exception as e:
|
185
|
+
# Optional: Print generated code on failure for debugging
|
186
|
+
# print(f"--- Generated Code (Import Error) ---\n{output_file.read_text()}\n--------------------------")
|
187
|
+
error_msg = f"Failed to load generated module ({output_file})"
|
188
|
+
raise RuntimeError(error_msg) from e
|
189
|
+
|
190
|
+
def validate_base_model_class(m):
|
191
|
+
if not isinstance(m, type) or not issubclass(m, BaseModel):
|
192
|
+
error_msg = f"Found attribute '{resolved_model_name}' is not a Pydantic BaseModel class."
|
193
|
+
raise TypeError(error_msg)
|
194
|
+
|
195
|
+
# --- 4. Find the Model Class ---
|
196
|
+
model_class: type[BaseModel]
|
197
|
+
try:
|
198
|
+
# Use the name potentially derived from the schema title
|
199
|
+
model_class = getattr(generated_module, resolved_model_name)
|
200
|
+
validate_base_model_class(model_class)
|
201
|
+
|
202
|
+
except AttributeError:
|
203
|
+
# Fallback attempt (less likely now with title extraction)
|
204
|
+
try:
|
205
|
+
model_class = generated_module.Model # Default fallback name
|
206
|
+
validate_base_model_class(model_class)
|
207
|
+
print(
|
208
|
+
f"Warning: Model name '{resolved_model_name}' not found, falling back to 'Model'."
|
209
|
+
)
|
210
|
+
except AttributeError as e:
|
211
|
+
# List available Pydantic models found in the module for debugging
|
212
|
+
available_attrs = [
|
213
|
+
attr
|
214
|
+
for attr in dir(generated_module)
|
215
|
+
if isinstance(getattr(generated_module, attr, None), type)
|
216
|
+
and issubclass(
|
217
|
+
getattr(generated_module, attr, object), BaseModel
|
218
|
+
) # Check inheritance safely
|
219
|
+
and getattr(generated_module, attr, None)
|
220
|
+
is not BaseModel # Exclude BaseModel itself
|
221
|
+
]
|
222
|
+
# Optional: Print generated code on failure for debugging
|
223
|
+
# print(f"--- Generated Code (AttributeError) ---\n{output_file.read_text()}\n--------------------------")
|
224
|
+
error_msg = (
|
225
|
+
f"Could not find expected model class '{resolved_model_name}' or fallback 'Model' "
|
226
|
+
f"in the generated module {output_file}. "
|
227
|
+
f"Found Pydantic models: {available_attrs}"
|
228
|
+
)
|
229
|
+
raise AttributeError(error_msg) from e
|
230
|
+
except TypeError as e:
|
231
|
+
error_msg = (
|
232
|
+
f"Error validating found model class '{resolved_model_name}'"
|
233
|
+
)
|
234
|
+
raise TypeError(error_msg) from e
|
235
|
+
|
236
|
+
# --- 5. Rebuild the Model (Providing Namespace) ---
|
237
|
+
try:
|
238
|
+
# Pass the generated module's dictionary as the namespace
|
239
|
+
# for resolving type hints like 'Status', 'ProfileDetails', etc.
|
240
|
+
model_class.model_rebuild(
|
241
|
+
_types_namespace=generated_module.__dict__,
|
242
|
+
force=True, # Force rebuild even if Pydantic thinks it's okay
|
243
|
+
)
|
244
|
+
except (
|
245
|
+
PydanticUserError,
|
246
|
+
NameError,
|
247
|
+
) as e: # Catch NameError explicitly here
|
248
|
+
# Optional: Print generated code on failure for debugging
|
249
|
+
# print(f"--- Generated Code (Rebuild Error) ---\n{output_file.read_text()}\n--------------------------")
|
250
|
+
error_msg = f"Error during model_rebuild for {resolved_model_name}"
|
251
|
+
raise RuntimeError(error_msg) from e
|
252
|
+
except Exception as e:
|
253
|
+
# Optional: Print generated code on failure for debugging
|
254
|
+
# print(f"--- Generated Code (Rebuild Error) ---\n{output_file.read_text()}\n--------------------------")
|
255
|
+
error_msg = f"Unexpected error during model_rebuild for {resolved_model_name}"
|
256
|
+
raise RuntimeError(error_msg) from e
|
257
|
+
|
258
|
+
# --- 6. Return the Resolved Model Class ---
|
259
|
+
return model_class
|
@@ -7,7 +7,7 @@ from pydantic import BaseModel
|
|
7
7
|
|
8
8
|
from lionagi.protocols.generic.event import EventStatus
|
9
9
|
from lionagi.protocols.generic.log import Log
|
10
|
-
from lionagi.service.
|
10
|
+
from lionagi.service.connections.api_calling import APICalling
|
11
11
|
from lionagi.service.imodel import iModel
|
12
12
|
from lionagi.utils import alcall, lcall, to_dict, to_list
|
13
13
|
|
@@ -208,9 +208,7 @@ class LLMCompressor:
|
|
208
208
|
Tokenize text. If no custom tokenizer, use the default from lionagi.
|
209
209
|
"""
|
210
210
|
if not self.tokenizer:
|
211
|
-
from lionagi.service.
|
212
|
-
TokenCalculator,
|
213
|
-
)
|
211
|
+
from lionagi.service.token_calculator import TokenCalculator
|
214
212
|
|
215
213
|
return TokenCalculator.tokenize(
|
216
214
|
text,
|