gllm-core-binary 0.3.23b3__py3-none-any.whl → 0.4.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- gllm_core/adapters/__init__.py +5 -0
- gllm_core/adapters/__init__.pyi +3 -0
- gllm_core/adapters/tool/__init__.py +6 -0
- gllm_core/adapters/tool/__init__.pyi +4 -0
- gllm_core/adapters/tool/google_adk.py +91 -0
- gllm_core/adapters/tool/google_adk.pyi +23 -0
- gllm_core/adapters/tool/langchain.py +130 -0
- gllm_core/adapters/tool/langchain.pyi +31 -0
- gllm_core/constants.py +0 -1
- gllm_core/constants.pyi +0 -1
- gllm_core/event/event_emitter.py +8 -44
- gllm_core/event/event_emitter.pyi +9 -21
- gllm_core/event/handler/console_event_handler.py +1 -12
- gllm_core/event/handler/console_event_handler.pyi +0 -1
- gllm_core/event/handler/print_event_handler.py +15 -59
- gllm_core/event/handler/print_event_handler.pyi +1 -2
- gllm_core/schema/__init__.py +2 -2
- gllm_core/schema/__init__.pyi +2 -2
- gllm_core/schema/component.py +236 -27
- gllm_core/schema/component.pyi +164 -17
- gllm_core/schema/schema_generator.py +150 -0
- gllm_core/schema/schema_generator.pyi +35 -0
- gllm_core/schema/tool.py +31 -1
- gllm_core/schema/tool.pyi +21 -0
- gllm_core/utils/__init__.py +2 -0
- gllm_core/utils/__init__.pyi +2 -1
- gllm_core/utils/analyzer.py +24 -1
- gllm_core/utils/analyzer.pyi +15 -1
- gllm_core/utils/concurrency.py +2 -2
- gllm_core/utils/logger_manager.py +17 -7
- gllm_core/utils/logger_manager.pyi +3 -0
- gllm_core/utils/main_method_resolver.py +185 -0
- gllm_core/utils/main_method_resolver.pyi +54 -0
- gllm_core/utils/retry.py +130 -21
- gllm_core/utils/retry.pyi +6 -29
- gllm_core_binary-0.4.4.dist-info/METADATA +177 -0
- {gllm_core_binary-0.3.23b3.dist-info → gllm_core_binary-0.4.4.dist-info}/RECORD +39 -27
- gllm_core_binary-0.3.23b3.dist-info/METADATA +0 -108
- {gllm_core_binary-0.3.23b3.dist-info → gllm_core_binary-0.4.4.dist-info}/WHEEL +0 -0
- {gllm_core_binary-0.3.23b3.dist-info → gllm_core_binary-0.4.4.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,150 @@
|
|
|
1
|
+
"""Utility for generating Pydantic models from component methods.
|
|
2
|
+
|
|
3
|
+
Authors:
|
|
4
|
+
Dimitrij Ray (dimitrij.ray@gdplabs.id)
|
|
5
|
+
|
|
6
|
+
References:
|
|
7
|
+
NONE
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
from __future__ import annotations
|
|
11
|
+
|
|
12
|
+
import inspect
|
|
13
|
+
from typing import Any, Callable, get_type_hints
|
|
14
|
+
|
|
15
|
+
from pydantic import BaseModel, ConfigDict, create_model
|
|
16
|
+
|
|
17
|
+
from gllm_core.utils.analyzer import analyze_method
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def generate_params_model(method: Callable, class_name: str) -> type[BaseModel]:
|
|
21
|
+
"""Generate a Pydantic model representing a component method signature.
|
|
22
|
+
|
|
23
|
+
The generated class is named `{class_name}Params` and contains one field for
|
|
24
|
+
every parameter in `method`. The first `self` parameter is ignored, `*args` are
|
|
25
|
+
skipped entirely, and `**kwargs` trigger `extra="allow"` to permit arbitrary
|
|
26
|
+
keyword arguments at runtime.
|
|
27
|
+
|
|
28
|
+
For legacy `_run` methods with only `**kwargs`, this function will use
|
|
29
|
+
RunAnalyzer to infer parameters from the method body usage patterns.
|
|
30
|
+
|
|
31
|
+
Args:
|
|
32
|
+
method (Callable): Method whose signature should be represented.
|
|
33
|
+
class_name (str): Component class name used to derive the generated model name.
|
|
34
|
+
|
|
35
|
+
Returns:
|
|
36
|
+
type[BaseModel]: A Pydantic `BaseModel` subclass describing the method's
|
|
37
|
+
parameters.
|
|
38
|
+
|
|
39
|
+
Example:
|
|
40
|
+
```python
|
|
41
|
+
class_name = "TextProcessor"
|
|
42
|
+
|
|
43
|
+
def process(self, text: str, count: int = 5) -> str:
|
|
44
|
+
return text * count
|
|
45
|
+
|
|
46
|
+
Model = generate_params_model(process, class_name)
|
|
47
|
+
assert Model.__name__ == "TextProcessorParams"
|
|
48
|
+
assert Model(text="hello", count=2).model_dump() == {"text": "hello", "count": 2}
|
|
49
|
+
```
|
|
50
|
+
"""
|
|
51
|
+
signature = inspect.signature(method)
|
|
52
|
+
|
|
53
|
+
non_self_params = [param for param in signature.parameters.values() if param.name != "self"]
|
|
54
|
+
|
|
55
|
+
if len(non_self_params) == 1 and non_self_params[0].kind == inspect.Parameter.VAR_KEYWORD:
|
|
56
|
+
return _generate_from_analyzer(method, class_name)
|
|
57
|
+
|
|
58
|
+
return _generate_from_signature(method, class_name)
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def _generate_from_signature(method: Callable, class_name: str) -> type[BaseModel]:
|
|
62
|
+
"""Generate model using method signature analysis.
|
|
63
|
+
|
|
64
|
+
This function is used for modern methods with explicit parameter annotations.
|
|
65
|
+
|
|
66
|
+
Args:
|
|
67
|
+
method (Callable): Method whose signature should be represented.
|
|
68
|
+
class_name (str): Component class name used to derive the generated model name.
|
|
69
|
+
|
|
70
|
+
Returns:
|
|
71
|
+
type[BaseModel]: A Pydantic `BaseModel` subclass describing the method's
|
|
72
|
+
parameters.
|
|
73
|
+
"""
|
|
74
|
+
signature = inspect.signature(method)
|
|
75
|
+
|
|
76
|
+
try:
|
|
77
|
+
type_hints = get_type_hints(method)
|
|
78
|
+
except Exception: # pragma: no cover - defensive fallback
|
|
79
|
+
type_hints = {}
|
|
80
|
+
|
|
81
|
+
fields: dict[str, tuple[type[Any], Any]] = {}
|
|
82
|
+
extra_setting = "forbid"
|
|
83
|
+
|
|
84
|
+
for param_name, param in signature.parameters.items():
|
|
85
|
+
if param_name == "self":
|
|
86
|
+
continue
|
|
87
|
+
if param.kind == inspect.Parameter.VAR_POSITIONAL:
|
|
88
|
+
continue
|
|
89
|
+
if param.kind == inspect.Parameter.VAR_KEYWORD:
|
|
90
|
+
extra_setting = "allow"
|
|
91
|
+
continue
|
|
92
|
+
|
|
93
|
+
annotation = type_hints.get(param_name, Any)
|
|
94
|
+
if param.default is inspect.Parameter.empty:
|
|
95
|
+
fields[param_name] = (annotation, ...)
|
|
96
|
+
else:
|
|
97
|
+
fields[param_name] = (annotation, param.default)
|
|
98
|
+
|
|
99
|
+
model_name = f"{class_name}Params"
|
|
100
|
+
return create_model( # type: ignore[call-overload]
|
|
101
|
+
model_name,
|
|
102
|
+
__config__=ConfigDict(extra=extra_setting, arbitrary_types_allowed=True),
|
|
103
|
+
**fields,
|
|
104
|
+
)
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
def _generate_from_analyzer(method: Callable, class_name: str) -> type[BaseModel]:
|
|
108
|
+
"""Generate model using RunAnalyzer to infer parameters from method body.
|
|
109
|
+
|
|
110
|
+
This is used for legacy `_run` methods that only have `**kwargs` in their signature
|
|
111
|
+
but use specific parameters within the method body.
|
|
112
|
+
|
|
113
|
+
Args:
|
|
114
|
+
method (Callable): Method whose signature should be represented.
|
|
115
|
+
class_name (str): Component class name used to derive the generated model name.
|
|
116
|
+
|
|
117
|
+
Returns:
|
|
118
|
+
type[BaseModel]: A Pydantic `BaseModel` subclass describing the method's
|
|
119
|
+
parameters.
|
|
120
|
+
"""
|
|
121
|
+
|
|
122
|
+
class MockClass:
|
|
123
|
+
pass
|
|
124
|
+
|
|
125
|
+
MockClass._run = method
|
|
126
|
+
config_dict = ConfigDict(extra="allow", arbitrary_types_allowed=True)
|
|
127
|
+
|
|
128
|
+
try:
|
|
129
|
+
profile = analyze_method(MockClass, method)
|
|
130
|
+
|
|
131
|
+
fields: dict[str, tuple[type[Any], Any]] = {}
|
|
132
|
+
|
|
133
|
+
for param_name in profile.arg_usages.required:
|
|
134
|
+
fields[param_name] = (Any, ...)
|
|
135
|
+
|
|
136
|
+
for param_name in profile.arg_usages.optional:
|
|
137
|
+
fields[param_name] = (Any, None)
|
|
138
|
+
|
|
139
|
+
model_name = f"{class_name}Params"
|
|
140
|
+
return create_model( # type: ignore[call-overload]
|
|
141
|
+
model_name,
|
|
142
|
+
__config__=config_dict,
|
|
143
|
+
**fields,
|
|
144
|
+
)
|
|
145
|
+
except Exception:
|
|
146
|
+
model_name = f"{class_name}Params"
|
|
147
|
+
return create_model( # type: ignore[call-overload]
|
|
148
|
+
model_name,
|
|
149
|
+
__config__=config_dict,
|
|
150
|
+
)
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
from gllm_core.utils.analyzer import analyze_method as analyze_method
|
|
2
|
+
from pydantic import BaseModel as BaseModel
|
|
3
|
+
from typing import Callable
|
|
4
|
+
|
|
5
|
+
def generate_params_model(method: Callable, class_name: str) -> type[BaseModel]:
|
|
6
|
+
'''Generate a Pydantic model representing a component method signature.
|
|
7
|
+
|
|
8
|
+
The generated class is named `{class_name}Params` and contains one field for
|
|
9
|
+
every parameter in `method`. The first `self` parameter is ignored, `*args` are
|
|
10
|
+
skipped entirely, and `**kwargs` trigger `extra="allow"` to permit arbitrary
|
|
11
|
+
keyword arguments at runtime.
|
|
12
|
+
|
|
13
|
+
For legacy `_run` methods with only `**kwargs`, this function will use
|
|
14
|
+
RunAnalyzer to infer parameters from the method body usage patterns.
|
|
15
|
+
|
|
16
|
+
Args:
|
|
17
|
+
method (Callable): Method whose signature should be represented.
|
|
18
|
+
class_name (str): Component class name used to derive the generated model name.
|
|
19
|
+
|
|
20
|
+
Returns:
|
|
21
|
+
type[BaseModel]: A Pydantic `BaseModel` subclass describing the method\'s
|
|
22
|
+
parameters.
|
|
23
|
+
|
|
24
|
+
Example:
|
|
25
|
+
```python
|
|
26
|
+
class_name = "TextProcessor"
|
|
27
|
+
|
|
28
|
+
def process(self, text: str, count: int = 5) -> str:
|
|
29
|
+
return text * count
|
|
30
|
+
|
|
31
|
+
Model = generate_params_model(process, class_name)
|
|
32
|
+
assert Model.__name__ == "TextProcessorParams"
|
|
33
|
+
assert Model(text="hello", count=2).model_dump() == {"text": "hello", "count": 2}
|
|
34
|
+
```
|
|
35
|
+
'''
|
gllm_core/schema/tool.py
CHANGED
|
@@ -6,6 +6,7 @@ Authors:
|
|
|
6
6
|
References:
|
|
7
7
|
[1] https://modelcontextprotocol.io/
|
|
8
8
|
"""
|
|
9
|
+
from __future__ import annotations
|
|
9
10
|
|
|
10
11
|
import asyncio
|
|
11
12
|
import inspect
|
|
@@ -268,7 +269,36 @@ class Tool(BaseModel):
|
|
|
268
269
|
func: Callable | None = Field(default=None)
|
|
269
270
|
is_async: bool = Field(default=False)
|
|
270
271
|
|
|
271
|
-
model_config = ConfigDict(
|
|
272
|
+
model_config = ConfigDict(validate_by_name=True, arbitrary_types_allowed=True)
|
|
273
|
+
|
|
274
|
+
@classmethod
|
|
275
|
+
def from_langchain(cls, langchain_tool: Any) -> "Tool":
|
|
276
|
+
"""Create a Tool from a LangChain tool instance.
|
|
277
|
+
|
|
278
|
+
Args:
|
|
279
|
+
langchain_tool (Any): LangChain tool implementation to convert.
|
|
280
|
+
|
|
281
|
+
Returns:
|
|
282
|
+
Tool: Tool instance derived from the LangChain representation.
|
|
283
|
+
"""
|
|
284
|
+
from gllm_core.adapters.tool import from_langchain_tool # noqa: PLC0415
|
|
285
|
+
|
|
286
|
+
return from_langchain_tool(langchain_tool)
|
|
287
|
+
|
|
288
|
+
@classmethod
|
|
289
|
+
def from_google_adk(cls, function_declaration: Any, func: Callable | None = None) -> "Tool":
|
|
290
|
+
"""Create a Tool from a Google ADK function declaration.
|
|
291
|
+
|
|
292
|
+
Args:
|
|
293
|
+
function_declaration (Any): Google ADK function declaration to convert.
|
|
294
|
+
func (Callable | None): Optional implementation callable for the tool.
|
|
295
|
+
|
|
296
|
+
Returns:
|
|
297
|
+
Tool: Tool instance derived from the Google ADK definition.
|
|
298
|
+
"""
|
|
299
|
+
from gllm_core.adapters.tool import from_google_function # noqa: PLC0415
|
|
300
|
+
|
|
301
|
+
return from_google_function(function_declaration, func=func)
|
|
272
302
|
|
|
273
303
|
@field_validator("input_schema", mode="before")
|
|
274
304
|
@classmethod
|
gllm_core/schema/tool.pyi
CHANGED
|
@@ -110,6 +110,27 @@ class Tool(BaseModel):
|
|
|
110
110
|
is_async: bool
|
|
111
111
|
model_config: Incomplete
|
|
112
112
|
@classmethod
|
|
113
|
+
def from_langchain(cls, langchain_tool: Any) -> Tool:
|
|
114
|
+
"""Create a Tool from a LangChain tool instance.
|
|
115
|
+
|
|
116
|
+
Args:
|
|
117
|
+
langchain_tool (Any): LangChain tool implementation to convert.
|
|
118
|
+
|
|
119
|
+
Returns:
|
|
120
|
+
Tool: Tool instance derived from the LangChain representation.
|
|
121
|
+
"""
|
|
122
|
+
@classmethod
|
|
123
|
+
def from_google_adk(cls, function_declaration: Any, func: Callable | None = None) -> Tool:
|
|
124
|
+
"""Create a Tool from a Google ADK function declaration.
|
|
125
|
+
|
|
126
|
+
Args:
|
|
127
|
+
function_declaration (Any): Google ADK function declaration to convert.
|
|
128
|
+
func (Callable | None): Optional implementation callable for the tool.
|
|
129
|
+
|
|
130
|
+
Returns:
|
|
131
|
+
Tool: Tool instance derived from the Google ADK definition.
|
|
132
|
+
"""
|
|
133
|
+
@classmethod
|
|
113
134
|
def validate_input_schema(cls, v: Any):
|
|
114
135
|
"""Validate and convert input_schema to JSON Schema dict if it's a Pydantic model.
|
|
115
136
|
|
gllm_core/utils/__init__.py
CHANGED
|
@@ -7,6 +7,7 @@ from gllm_core.utils.concurrency import asyncify, get_default_portal, syncify
|
|
|
7
7
|
from gllm_core.utils.event_formatter import format_chunk_message, get_placeholder_keys
|
|
8
8
|
from gllm_core.utils.google_sheets import load_gsheets
|
|
9
9
|
from gllm_core.utils.logger_manager import LoggerManager
|
|
10
|
+
from gllm_core.utils.main_method_resolver import MainMethodResolver
|
|
10
11
|
from gllm_core.utils.merger_method import MergerMethod
|
|
11
12
|
from gllm_core.utils.retry import RetryConfig, retry
|
|
12
13
|
from gllm_core.utils.validation import validate_string_enum
|
|
@@ -15,6 +16,7 @@ __all__ = [
|
|
|
15
16
|
"BinaryHandlingStrategy",
|
|
16
17
|
"ChunkMetadataMerger",
|
|
17
18
|
"LoggerManager",
|
|
19
|
+
"MainMethodResolver",
|
|
18
20
|
"MergerMethod",
|
|
19
21
|
"RunAnalyzer",
|
|
20
22
|
"RetryConfig",
|
gllm_core/utils/__init__.pyi
CHANGED
|
@@ -5,8 +5,9 @@ from gllm_core.utils.concurrency import asyncify as asyncify, get_default_portal
|
|
|
5
5
|
from gllm_core.utils.event_formatter import format_chunk_message as format_chunk_message, get_placeholder_keys as get_placeholder_keys
|
|
6
6
|
from gllm_core.utils.google_sheets import load_gsheets as load_gsheets
|
|
7
7
|
from gllm_core.utils.logger_manager import LoggerManager as LoggerManager
|
|
8
|
+
from gllm_core.utils.main_method_resolver import MainMethodResolver as MainMethodResolver
|
|
8
9
|
from gllm_core.utils.merger_method import MergerMethod as MergerMethod
|
|
9
10
|
from gllm_core.utils.retry import RetryConfig as RetryConfig, retry as retry
|
|
10
11
|
from gllm_core.utils.validation import validate_string_enum as validate_string_enum
|
|
11
12
|
|
|
12
|
-
__all__ = ['BinaryHandlingStrategy', 'ChunkMetadataMerger', 'LoggerManager', 'MergerMethod', 'RunAnalyzer', 'RetryConfig', 'asyncify', 'get_default_portal', 'binary_handler_factory', 'format_chunk_message', 'get_placeholder_keys', 'load_gsheets', 'syncify', 'retry', 'validate_string_enum']
|
|
13
|
+
__all__ = ['BinaryHandlingStrategy', 'ChunkMetadataMerger', 'LoggerManager', 'MainMethodResolver', 'MergerMethod', 'RunAnalyzer', 'RetryConfig', 'asyncify', 'get_default_portal', 'binary_handler_factory', 'format_chunk_message', 'get_placeholder_keys', 'load_gsheets', 'syncify', 'retry', 'validate_string_enum']
|
gllm_core/utils/analyzer.py
CHANGED
|
@@ -10,8 +10,10 @@ References:
|
|
|
10
10
|
"""
|
|
11
11
|
|
|
12
12
|
import ast
|
|
13
|
+
import inspect
|
|
14
|
+
import textwrap
|
|
13
15
|
from enum import StrEnum
|
|
14
|
-
from typing import Any
|
|
16
|
+
from typing import Any, Callable
|
|
15
17
|
|
|
16
18
|
from pydantic import BaseModel, Field
|
|
17
19
|
|
|
@@ -230,4 +232,25 @@ class RunAnalyzer(ast.NodeVisitor):
|
|
|
230
232
|
return expr.attr
|
|
231
233
|
if isinstance(expr, ast.Constant):
|
|
232
234
|
return str(expr.value)
|
|
235
|
+
# Fallback for unknown node types
|
|
233
236
|
return expr.__class__.__name__
|
|
237
|
+
|
|
238
|
+
|
|
239
|
+
def analyze_method(cls: type, method: Callable) -> RunProfile:
|
|
240
|
+
"""Analyze a method using RunAnalyzer.
|
|
241
|
+
|
|
242
|
+
This function encapsulates the common analysis logic used by both
|
|
243
|
+
Component._analyze_run_method() and schema_generator._generate_from_analyzer().
|
|
244
|
+
|
|
245
|
+
Args:
|
|
246
|
+
cls (type): The class containing the method (for analyzer context).
|
|
247
|
+
method (Callable): The method to analyze.
|
|
248
|
+
|
|
249
|
+
Returns:
|
|
250
|
+
RunProfile: The analysis results.
|
|
251
|
+
"""
|
|
252
|
+
analyzer = RunAnalyzer(cls)
|
|
253
|
+
source = inspect.getsource(method)
|
|
254
|
+
tree = ast.parse(textwrap.dedent(source))
|
|
255
|
+
analyzer.visit(tree)
|
|
256
|
+
return analyzer.profile
|
gllm_core/utils/analyzer.pyi
CHANGED
|
@@ -2,7 +2,7 @@ import ast
|
|
|
2
2
|
from _typeshed import Incomplete
|
|
3
3
|
from enum import StrEnum
|
|
4
4
|
from pydantic import BaseModel
|
|
5
|
-
from typing import Any
|
|
5
|
+
from typing import Any, Callable
|
|
6
6
|
|
|
7
7
|
class ParameterKind(StrEnum):
|
|
8
8
|
"""Enum representing the different kinds of parameters a method can have."""
|
|
@@ -107,3 +107,17 @@ class RunAnalyzer(ast.NodeVisitor):
|
|
|
107
107
|
Args:
|
|
108
108
|
node (ast.Subscript): The Subscript node to visit.
|
|
109
109
|
'''
|
|
110
|
+
|
|
111
|
+
def analyze_method(cls, method: Callable) -> RunProfile:
|
|
112
|
+
"""Analyze a method using RunAnalyzer.
|
|
113
|
+
|
|
114
|
+
This function encapsulates the common analysis logic used by both
|
|
115
|
+
Component._analyze_run_method() and schema_generator._generate_from_analyzer().
|
|
116
|
+
|
|
117
|
+
Args:
|
|
118
|
+
cls (type): The class containing the method (for analyzer context).
|
|
119
|
+
method (Callable): The method to analyze.
|
|
120
|
+
|
|
121
|
+
Returns:
|
|
122
|
+
RunProfile: The analysis results.
|
|
123
|
+
"""
|
gllm_core/utils/concurrency.py
CHANGED
|
@@ -125,7 +125,7 @@ def asyncify(
|
|
|
125
125
|
"""
|
|
126
126
|
|
|
127
127
|
async def _wrapper(*args: P.args, **kwargs: P.kwargs) -> R:
|
|
128
|
-
return await anyio.to_thread.run_sync(lambda: func(*args, **kwargs),
|
|
128
|
+
return await anyio.to_thread.run_sync(lambda: func(*args, **kwargs), abandon_on_cancel=cancellable, limiter=limiter)
|
|
129
129
|
|
|
130
130
|
return _wrapper
|
|
131
131
|
|
|
@@ -179,6 +179,6 @@ def syncify(
|
|
|
179
179
|
|
|
180
180
|
def _wrapper(*args: P.args, **kwargs: P.kwargs) -> R:
|
|
181
181
|
p: BlockingPortal = portal if portal is not None else get_default_portal()
|
|
182
|
-
return p.call(async_func
|
|
182
|
+
return p.call(lambda: async_func(*args, **kwargs))
|
|
183
183
|
|
|
184
184
|
return _wrapper
|
|
@@ -30,6 +30,13 @@ TEXT_COLOR_MAP = {
|
|
|
30
30
|
logging.CRITICAL: "bold black on red",
|
|
31
31
|
}
|
|
32
32
|
LOG_FORMAT_KEY = "LOG_FORMAT"
|
|
33
|
+
RICH_CLOSE_TAG = "[/"
|
|
34
|
+
JSON_LOG_FIELDS = ["timestamp", "name", "level", "message"]
|
|
35
|
+
JSON_ERROR_FIELDS_MAP = {
|
|
36
|
+
"exc_info": "message",
|
|
37
|
+
"stack_info": "stacktrace",
|
|
38
|
+
"error_code": "code",
|
|
39
|
+
}
|
|
33
40
|
|
|
34
41
|
|
|
35
42
|
class TextRichHandler(RichHandler):
|
|
@@ -44,6 +51,8 @@ class TextRichHandler(RichHandler):
|
|
|
44
51
|
color = TEXT_COLOR_MAP.get(record.levelno, "white")
|
|
45
52
|
|
|
46
53
|
name, msg = record.name, record.getMessage()
|
|
54
|
+
msg = msg.replace(RICH_CLOSE_TAG, rf"\{RICH_CLOSE_TAG}")
|
|
55
|
+
|
|
47
56
|
record.msg = f"[{color}][{name}] {msg}[/]"
|
|
48
57
|
record.args = None
|
|
49
58
|
|
|
@@ -70,6 +79,7 @@ class SimpleRichHandler(logging.StreamHandler):
|
|
|
70
79
|
"""
|
|
71
80
|
color = TEXT_COLOR_MAP.get(record.levelno, "white")
|
|
72
81
|
msg = self.format(record)
|
|
82
|
+
msg = msg.replace(RICH_CLOSE_TAG, rf"\{RICH_CLOSE_TAG}")
|
|
73
83
|
|
|
74
84
|
self.console.print(f"[{color}]{msg}[/]", markup=True, highlight=False)
|
|
75
85
|
|
|
@@ -93,17 +103,17 @@ class AppJSONFormatter(JsonFormatter):
|
|
|
93
103
|
LogRecord: The processed log record.
|
|
94
104
|
"""
|
|
95
105
|
record = super().process_log_record(log_record)
|
|
106
|
+
logged_record = {key: value for key, value in record.items() if key in JSON_LOG_FIELDS}
|
|
107
|
+
error_payload: dict[str, str] = dict.fromkeys(JSON_ERROR_FIELDS_MAP.values(), "")
|
|
96
108
|
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
for key, new_key in [("exc_info", "message"), ("stack_info", "stacktrace"), ("error_code", "code")]:
|
|
100
|
-
if value := record.pop(key, None):
|
|
109
|
+
for key, new_key in JSON_ERROR_FIELDS_MAP.items():
|
|
110
|
+
if value := record.get(key):
|
|
101
111
|
error_payload[new_key] = value
|
|
102
112
|
|
|
103
|
-
if error_payload:
|
|
104
|
-
|
|
113
|
+
if any(error_payload.values()):
|
|
114
|
+
logged_record["error"] = error_payload
|
|
105
115
|
|
|
106
|
-
return
|
|
116
|
+
return logged_record
|
|
107
117
|
|
|
108
118
|
|
|
109
119
|
LOG_FORMAT_HANDLER_MAP = {
|
|
@@ -8,6 +8,9 @@ from rich.logging import RichHandler
|
|
|
8
8
|
DEFAULT_DATE_FORMAT: str
|
|
9
9
|
TEXT_COLOR_MAP: Incomplete
|
|
10
10
|
LOG_FORMAT_KEY: str
|
|
11
|
+
RICH_CLOSE_TAG: str
|
|
12
|
+
JSON_LOG_FIELDS: Incomplete
|
|
13
|
+
JSON_ERROR_FIELDS_MAP: Incomplete
|
|
11
14
|
|
|
12
15
|
class TextRichHandler(RichHandler):
|
|
13
16
|
"""Custom RichHandler that applies specific colors and log format."""
|
|
@@ -0,0 +1,185 @@
|
|
|
1
|
+
"""Main method resolver for Component classes.
|
|
2
|
+
|
|
3
|
+
This module provides the MainMethodResolver class which encapsulates the logic
|
|
4
|
+
for resolving the main entrypoint method for Component subclasses.
|
|
5
|
+
|
|
6
|
+
Authors:
|
|
7
|
+
Dimitrij Ray (dimitrij.ray@gdplabs.id)
|
|
8
|
+
|
|
9
|
+
References:
|
|
10
|
+
NONE
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
from abc import ABC
|
|
14
|
+
from typing import Callable
|
|
15
|
+
|
|
16
|
+
from gllm_core.utils.logger_manager import LoggerManager
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class MainMethodResolver:
|
|
20
|
+
"""Resolves the main entrypoint method for Component classes.
|
|
21
|
+
|
|
22
|
+
This resolver implements the precedence rules for determining which method
|
|
23
|
+
should be used as the main entrypoint:
|
|
24
|
+
1. Method decorated with @main in the most derived class.
|
|
25
|
+
2. Method named by __main_method__ property.
|
|
26
|
+
3. _run method (with deprecation warning).
|
|
27
|
+
|
|
28
|
+
Attributes:
|
|
29
|
+
cls (type): The Component class to resolve the main method for.
|
|
30
|
+
"""
|
|
31
|
+
|
|
32
|
+
def __init__(self, component_class: type):
|
|
33
|
+
"""Initialize the resolver with a Component class.
|
|
34
|
+
|
|
35
|
+
Args:
|
|
36
|
+
component_class (type): The Component class to resolve the main method for.
|
|
37
|
+
"""
|
|
38
|
+
self.cls = component_class
|
|
39
|
+
self._logger = LoggerManager().get_logger(f"MainMethodResolver.{component_class.__name__}")
|
|
40
|
+
|
|
41
|
+
@staticmethod
|
|
42
|
+
def validate_class(component_class: type) -> None:
|
|
43
|
+
"""Validate main method configuration at class definition time.
|
|
44
|
+
|
|
45
|
+
This performs early validation that can be done when a Component subclass
|
|
46
|
+
is defined, before any instances are created or methods are called.
|
|
47
|
+
|
|
48
|
+
Validations performed:
|
|
49
|
+
1. Check that __main_method__ property points to an existing method
|
|
50
|
+
2. Check that only one @main decorator is used within the same class
|
|
51
|
+
|
|
52
|
+
Note: Multiple inheritance conflicts are intentionally NOT checked here,
|
|
53
|
+
as they are deferred to runtime (get_main()) to allow class definition
|
|
54
|
+
to succeed.
|
|
55
|
+
|
|
56
|
+
Args:
|
|
57
|
+
component_class (type): The Component class to validate.
|
|
58
|
+
|
|
59
|
+
Raises:
|
|
60
|
+
AttributeError: If __main_method__ refers to a non-existent method.
|
|
61
|
+
TypeError: If multiple methods are decorated with @main in the same class.
|
|
62
|
+
"""
|
|
63
|
+
if (method_name := getattr(component_class, "__main_method__", None)) is not None:
|
|
64
|
+
if not hasattr(component_class, method_name):
|
|
65
|
+
raise AttributeError(
|
|
66
|
+
f"Method {method_name!r} specified in __main_method__ does not exist in {component_class.__name__}"
|
|
67
|
+
)
|
|
68
|
+
|
|
69
|
+
main_methods = []
|
|
70
|
+
for name, method in component_class.__dict__.items():
|
|
71
|
+
if callable(method) and hasattr(method, "__is_main__"):
|
|
72
|
+
main_methods.append(name)
|
|
73
|
+
|
|
74
|
+
if len(main_methods) > 1:
|
|
75
|
+
raise TypeError(f"Multiple main methods defined in {component_class.__name__}: {', '.join(main_methods)}")
|
|
76
|
+
|
|
77
|
+
def resolve(self) -> Callable | None:
|
|
78
|
+
"""Resolve the main method following precedence rules.
|
|
79
|
+
|
|
80
|
+
Returns:
|
|
81
|
+
Callable | None: The resolved main method, or None if not found.
|
|
82
|
+
|
|
83
|
+
Raises:
|
|
84
|
+
TypeError: If conflicting main methods are inherited from multiple ancestors.
|
|
85
|
+
"""
|
|
86
|
+
if decorated := self._resolve_decorated():
|
|
87
|
+
self._warn_if_redundant_property()
|
|
88
|
+
return decorated
|
|
89
|
+
|
|
90
|
+
if property_method := self._resolve_property():
|
|
91
|
+
return property_method
|
|
92
|
+
|
|
93
|
+
return self._resolve_legacy()
|
|
94
|
+
|
|
95
|
+
def _resolve_decorated(self) -> Callable | None:
|
|
96
|
+
"""Find the most-derived @main decorated method in the MRO (method resolution order).
|
|
97
|
+
|
|
98
|
+
Returns:
|
|
99
|
+
Callable | None: The decorated main method, or None if not found.
|
|
100
|
+
|
|
101
|
+
Raises:
|
|
102
|
+
TypeError: If conflicting main methods are inherited from multiple ancestors.
|
|
103
|
+
"""
|
|
104
|
+
classes_with_main = []
|
|
105
|
+
for base in self.cls.__mro__:
|
|
106
|
+
if base.__name__ == "Component" or base is ABC:
|
|
107
|
+
continue
|
|
108
|
+
|
|
109
|
+
main_method_name = next(
|
|
110
|
+
(name for name, method in base.__dict__.items() if callable(method) and hasattr(method, "__is_main__")),
|
|
111
|
+
None,
|
|
112
|
+
)
|
|
113
|
+
|
|
114
|
+
if main_method_name:
|
|
115
|
+
classes_with_main.append((base, main_method_name))
|
|
116
|
+
|
|
117
|
+
if not classes_with_main:
|
|
118
|
+
return None
|
|
119
|
+
|
|
120
|
+
most_derived_class, method_name = classes_with_main[0]
|
|
121
|
+
actual_method = getattr(self.cls, method_name)
|
|
122
|
+
|
|
123
|
+
self._validate_no_decorator_conflicts(classes_with_main, most_derived_class, actual_method)
|
|
124
|
+
|
|
125
|
+
return actual_method
|
|
126
|
+
|
|
127
|
+
def _validate_no_decorator_conflicts(
|
|
128
|
+
self, classes_with_main: list[tuple[type, str]], most_derived_class: type, actual_method: Callable
|
|
129
|
+
) -> None:
|
|
130
|
+
"""Validate that there are no conflicting @main decorators in multiple inheritance.
|
|
131
|
+
|
|
132
|
+
This method checks for conflicts only when multiple classes in the inheritance hierarchy have @main decorated
|
|
133
|
+
methods. It allows intentional overrides (when the most derived class defines its own @main method) but prevents
|
|
134
|
+
conflicts from multiple inheritance where different ancestors define different @main methods.
|
|
135
|
+
|
|
136
|
+
Args:
|
|
137
|
+
classes_with_main (list[tuple[type, str]]): List of (class, method_name) tuples for classes with @main.
|
|
138
|
+
most_derived_class (type): The most derived class in the hierarchy with @main.
|
|
139
|
+
actual_method (Callable): The resolved method from the most derived class.
|
|
140
|
+
|
|
141
|
+
Raises:
|
|
142
|
+
TypeError: If conflicting main methods are inherited from multiple ancestors and the most derived class
|
|
143
|
+
is not the current class (indicating a true conflict rather than an intentional override).
|
|
144
|
+
"""
|
|
145
|
+
if len(classes_with_main) > 1 and most_derived_class is not self.cls:
|
|
146
|
+
for _, name in classes_with_main[1:]:
|
|
147
|
+
other_method = getattr(self.cls, name)
|
|
148
|
+
if other_method is not actual_method:
|
|
149
|
+
raise TypeError(
|
|
150
|
+
f"Conflicting main methods inherited from multiple ancestors in {self.cls.__name__}. "
|
|
151
|
+
"Please explicitly override with @main decorator."
|
|
152
|
+
)
|
|
153
|
+
|
|
154
|
+
def _resolve_property(self) -> Callable | None:
|
|
155
|
+
"""Find method via __main_method__ property.
|
|
156
|
+
|
|
157
|
+
Returns:
|
|
158
|
+
Callable | None: The method named by __main_method__, or None if not found.
|
|
159
|
+
"""
|
|
160
|
+
if hasattr(self.cls, "__main_method__") and self.cls.__main_method__ is not None:
|
|
161
|
+
method_name = self.cls.__main_method__
|
|
162
|
+
return getattr(self.cls, method_name, None)
|
|
163
|
+
|
|
164
|
+
def _resolve_legacy(self) -> Callable | None:
|
|
165
|
+
"""Fall back to _run method with deprecation warning.
|
|
166
|
+
|
|
167
|
+
Returns:
|
|
168
|
+
Callable | None: The _run method, or None if not found.
|
|
169
|
+
"""
|
|
170
|
+
if hasattr(self.cls, "_run"):
|
|
171
|
+
self._logger.warning(
|
|
172
|
+
f"Using legacy _run method for {self.cls.__name__}. "
|
|
173
|
+
f"Consider using @main decorator to explicitly declare the main entrypoint.",
|
|
174
|
+
stacklevel=4,
|
|
175
|
+
)
|
|
176
|
+
return self.cls._run
|
|
177
|
+
|
|
178
|
+
def _warn_if_redundant_property(self) -> None:
|
|
179
|
+
"""Emit warning if both @main decorator and __main_method__ are defined."""
|
|
180
|
+
if self.cls.__dict__.get("__main_method__") is not None:
|
|
181
|
+
self._logger.warning(
|
|
182
|
+
f"Both @main decorator and __main_method__ property are defined in {self.cls.__name__}. "
|
|
183
|
+
"The @main decorator takes precedence. This redundant configuration should be resolved.",
|
|
184
|
+
stacklevel=4,
|
|
185
|
+
)
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
from _typeshed import Incomplete
|
|
2
|
+
from gllm_core.utils.logger_manager import LoggerManager as LoggerManager
|
|
3
|
+
from typing import Callable
|
|
4
|
+
|
|
5
|
+
class MainMethodResolver:
|
|
6
|
+
"""Resolves the main entrypoint method for Component classes.
|
|
7
|
+
|
|
8
|
+
This resolver implements the precedence rules for determining which method
|
|
9
|
+
should be used as the main entrypoint:
|
|
10
|
+
1. Method decorated with @main in the most derived class.
|
|
11
|
+
2. Method named by __main_method__ property.
|
|
12
|
+
3. _run method (with deprecation warning).
|
|
13
|
+
|
|
14
|
+
Attributes:
|
|
15
|
+
cls (type): The Component class to resolve the main method for.
|
|
16
|
+
"""
|
|
17
|
+
cls: Incomplete
|
|
18
|
+
def __init__(self, component_class: type) -> None:
|
|
19
|
+
"""Initialize the resolver with a Component class.
|
|
20
|
+
|
|
21
|
+
Args:
|
|
22
|
+
component_class (type): The Component class to resolve the main method for.
|
|
23
|
+
"""
|
|
24
|
+
@staticmethod
|
|
25
|
+
def validate_class(component_class: type) -> None:
|
|
26
|
+
"""Validate main method configuration at class definition time.
|
|
27
|
+
|
|
28
|
+
This performs early validation that can be done when a Component subclass
|
|
29
|
+
is defined, before any instances are created or methods are called.
|
|
30
|
+
|
|
31
|
+
Validations performed:
|
|
32
|
+
1. Check that __main_method__ property points to an existing method
|
|
33
|
+
2. Check that only one @main decorator is used within the same class
|
|
34
|
+
|
|
35
|
+
Note: Multiple inheritance conflicts are intentionally NOT checked here,
|
|
36
|
+
as they are deferred to runtime (get_main()) to allow class definition
|
|
37
|
+
to succeed.
|
|
38
|
+
|
|
39
|
+
Args:
|
|
40
|
+
component_class (type): The Component class to validate.
|
|
41
|
+
|
|
42
|
+
Raises:
|
|
43
|
+
AttributeError: If __main_method__ refers to a non-existent method.
|
|
44
|
+
TypeError: If multiple methods are decorated with @main in the same class.
|
|
45
|
+
"""
|
|
46
|
+
def resolve(self) -> Callable | None:
|
|
47
|
+
"""Resolve the main method following precedence rules.
|
|
48
|
+
|
|
49
|
+
Returns:
|
|
50
|
+
Callable | None: The resolved main method, or None if not found.
|
|
51
|
+
|
|
52
|
+
Raises:
|
|
53
|
+
TypeError: If conflicting main methods are inherited from multiple ancestors.
|
|
54
|
+
"""
|