aixtools 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of aixtools might be problematic. Click here for more details.
- aixtools/__init__.py +5 -0
- aixtools/a2a/__init__.py +5 -0
- aixtools/a2a/app.py +126 -0
- aixtools/a2a/utils.py +115 -0
- aixtools/agents/__init__.py +12 -0
- aixtools/agents/agent.py +164 -0
- aixtools/agents/agent_batch.py +74 -0
- aixtools/app.py +143 -0
- aixtools/context.py +12 -0
- aixtools/db/__init__.py +17 -0
- aixtools/db/database.py +110 -0
- aixtools/db/vector_db.py +115 -0
- aixtools/log_view/__init__.py +17 -0
- aixtools/log_view/app.py +195 -0
- aixtools/log_view/display.py +285 -0
- aixtools/log_view/export.py +51 -0
- aixtools/log_view/filters.py +41 -0
- aixtools/log_view/log_utils.py +26 -0
- aixtools/log_view/node_summary.py +229 -0
- aixtools/logfilters/__init__.py +7 -0
- aixtools/logfilters/context_filter.py +67 -0
- aixtools/logging/__init__.py +30 -0
- aixtools/logging/log_objects.py +227 -0
- aixtools/logging/logging_config.py +116 -0
- aixtools/logging/mcp_log_models.py +102 -0
- aixtools/logging/mcp_logger.py +172 -0
- aixtools/logging/model_patch_logging.py +87 -0
- aixtools/logging/open_telemetry.py +36 -0
- aixtools/mcp/__init__.py +9 -0
- aixtools/mcp/example_client.py +30 -0
- aixtools/mcp/example_server.py +22 -0
- aixtools/mcp/fast_mcp_log.py +31 -0
- aixtools/mcp/faulty_mcp.py +320 -0
- aixtools/model_patch/model_patch.py +65 -0
- aixtools/server/__init__.py +23 -0
- aixtools/server/app_mounter.py +90 -0
- aixtools/server/path.py +72 -0
- aixtools/server/utils.py +70 -0
- aixtools/testing/__init__.py +9 -0
- aixtools/testing/aix_test_model.py +147 -0
- aixtools/testing/mock_tool.py +66 -0
- aixtools/testing/model_patch_cache.py +279 -0
- aixtools/tools/doctor/__init__.py +3 -0
- aixtools/tools/doctor/tool_doctor.py +61 -0
- aixtools/tools/doctor/tool_recommendation.py +44 -0
- aixtools/utils/__init__.py +35 -0
- aixtools/utils/chainlit/cl_agent_show.py +82 -0
- aixtools/utils/chainlit/cl_utils.py +168 -0
- aixtools/utils/config.py +118 -0
- aixtools/utils/config_util.py +69 -0
- aixtools/utils/enum_with_description.py +37 -0
- aixtools/utils/persisted_dict.py +99 -0
- aixtools/utils/utils.py +160 -0
- aixtools-0.1.0.dist-info/METADATA +355 -0
- aixtools-0.1.0.dist-info/RECORD +58 -0
- aixtools-0.1.0.dist-info/WHEEL +5 -0
- aixtools-0.1.0.dist-info/entry_points.txt +2 -0
- aixtools-0.1.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
"""
|
|
2
|
+
A logging filter for injecting contextual information into log records.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import logging
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class ContextFilter(logging.Filter): # pylint: disable=too-few-public-methods
|
|
9
|
+
"""
|
|
10
|
+
A logging filter that injects a formatted context string (user and session
|
|
11
|
+
IDs) into the log record. It sources the IDs from the active FastMCP
|
|
12
|
+
application context and ignores default values.
|
|
13
|
+
"""
|
|
14
|
+
|
|
15
|
+
def _extract_from_mcp_context(self) -> tuple[str | None, str | None]:
|
|
16
|
+
"""
|
|
17
|
+
Retrieve session id (aka conversation id) and user id from the MCP context.
|
|
18
|
+
Useful in MCP servers.
|
|
19
|
+
"""
|
|
20
|
+
try:
|
|
21
|
+
from aixtools.server.utils import ( # noqa: PLC0415 # pylint: disable=import-outside-toplevel
|
|
22
|
+
get_session_id_tuple,
|
|
23
|
+
)
|
|
24
|
+
|
|
25
|
+
return get_session_id_tuple()
|
|
26
|
+
except (ImportError, RuntimeError, ValueError):
|
|
27
|
+
# Context is not available
|
|
28
|
+
return None, None
|
|
29
|
+
|
|
30
|
+
def filter(self, record: logging.LogRecord) -> bool:
|
|
31
|
+
"""
|
|
32
|
+
Adds a `context` string to the log record.
|
|
33
|
+
|
|
34
|
+
The filter attempts to extract user, session (conversation) IDs from
|
|
35
|
+
context variables. If that fails, it falls back to extracting IDs from
|
|
36
|
+
the FastMCP context.
|
|
37
|
+
|
|
38
|
+
If valid IDs are found, the `context` attribute is formatted as
|
|
39
|
+
`[conversation:id user:id]`. Otherwise, it is an empty string.
|
|
40
|
+
"""
|
|
41
|
+
user_id = None
|
|
42
|
+
session_id = None
|
|
43
|
+
|
|
44
|
+
try:
|
|
45
|
+
# First, try to get context from the global context variables
|
|
46
|
+
from aixtools.context import ( # noqa: PLC0415 # pylint: disable=import-outside-toplevel
|
|
47
|
+
session_id_var,
|
|
48
|
+
user_id_var,
|
|
49
|
+
)
|
|
50
|
+
|
|
51
|
+
user_id = user_id_var.get()
|
|
52
|
+
session_id = session_id_var.get()
|
|
53
|
+
except ImportError:
|
|
54
|
+
pass
|
|
55
|
+
|
|
56
|
+
if not user_id and not session_id:
|
|
57
|
+
user_id, session_id = self._extract_from_mcp_context()
|
|
58
|
+
|
|
59
|
+
context = ""
|
|
60
|
+
if session_id and not str(session_id).startswith("default"):
|
|
61
|
+
context += f"[{session_id}]"
|
|
62
|
+
if user_id and not str(user_id).startswith("default"):
|
|
63
|
+
context += f"[{user_id}]"
|
|
64
|
+
|
|
65
|
+
record.context = context
|
|
66
|
+
|
|
67
|
+
return True
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Logging utilities for AI agent operations and model interactions.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from aixtools.logging.log_objects import ObjectLogger
|
|
6
|
+
from aixtools.logging.mcp_log_models import (
|
|
7
|
+
BaseLogEntry,
|
|
8
|
+
CodeLogEntry,
|
|
9
|
+
CommandLogEntry,
|
|
10
|
+
Language,
|
|
11
|
+
LogEntry,
|
|
12
|
+
LogType,
|
|
13
|
+
ProcessResult,
|
|
14
|
+
SystemLogEntry,
|
|
15
|
+
)
|
|
16
|
+
from aixtools.logging.mcp_logger import JSONFileMcpLogger, McpLogger
|
|
17
|
+
|
|
18
|
+
__all__ = [
|
|
19
|
+
"ObjectLogger",
|
|
20
|
+
"LogType",
|
|
21
|
+
"Language",
|
|
22
|
+
"ProcessResult",
|
|
23
|
+
"BaseLogEntry",
|
|
24
|
+
"CommandLogEntry",
|
|
25
|
+
"CodeLogEntry",
|
|
26
|
+
"SystemLogEntry",
|
|
27
|
+
"LogEntry",
|
|
28
|
+
"McpLogger",
|
|
29
|
+
"JSONFileMcpLogger",
|
|
30
|
+
]
|
|
@@ -0,0 +1,227 @@
|
|
|
1
|
+
"""
|
|
2
|
+
This module provides functionality to save objects to a log file using pickle.
|
|
3
|
+
It includes a function to check if an object is pickleable and a function to perform a safe deepcopy of objects.
|
|
4
|
+
It also includes a function to save the objects to a log file with a timestamp.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import logging
|
|
8
|
+
import pickle
|
|
9
|
+
import traceback
|
|
10
|
+
from copy import copy
|
|
11
|
+
from datetime import datetime
|
|
12
|
+
from pathlib import Path
|
|
13
|
+
from types import NoneType
|
|
14
|
+
from typing import Mapping, Sequence, Union
|
|
15
|
+
|
|
16
|
+
import rich
|
|
17
|
+
|
|
18
|
+
from aixtools.logging.logging_config import get_logger
|
|
19
|
+
from aixtools.utils.config import LOG_LEVEL, LOGS_DIR
|
|
20
|
+
|
|
21
|
+
logger = get_logger(__name__)
|
|
22
|
+
|
|
23
|
+
_is_pickleable_cache = {}
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class ExceptionWrapper: # pylint: disable=too-few-public-methods
|
|
27
|
+
"""
|
|
28
|
+
A wrapper for exceptions to make them pickleable.
|
|
29
|
+
It stores the exception type and message.
|
|
30
|
+
"""
|
|
31
|
+
|
|
32
|
+
def __init__(self, exception):
|
|
33
|
+
self.exc_type = str(type(exception))
|
|
34
|
+
self.exc_value = str(exception)
|
|
35
|
+
self.exc_traceback = traceback.format_exc()
|
|
36
|
+
|
|
37
|
+
def __str__(self):
|
|
38
|
+
return f"{self.exc_type}: {self.exc_value}\n{self.exc_traceback}"
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def is_pickleable(obj):
|
|
42
|
+
"""
|
|
43
|
+
Check if an object is pickleable.
|
|
44
|
+
Uses a cache to avoid repeated checks for the same type.
|
|
45
|
+
"""
|
|
46
|
+
obj_type = type(obj)
|
|
47
|
+
module_name = getattr(obj_type, "__module__", "")
|
|
48
|
+
|
|
49
|
+
# FastMCP json_schema_to_type changes __module__ which causes pickle error but for some reason goes to the cache
|
|
50
|
+
if module_name == "fastmcp.utilities.json_schema_type":
|
|
51
|
+
return False
|
|
52
|
+
|
|
53
|
+
if obj_type not in _is_pickleable_cache:
|
|
54
|
+
try:
|
|
55
|
+
pickle.loads(pickle.dumps(obj))
|
|
56
|
+
_is_pickleable_cache[obj_type] = True
|
|
57
|
+
except Exception: # pylint: disable=broad-exception-caught
|
|
58
|
+
_is_pickleable_cache[obj_type] = False
|
|
59
|
+
return _is_pickleable_cache[obj_type]
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
def load_from_log(log_file: Path):
|
|
63
|
+
"""
|
|
64
|
+
Load objects from a log file.
|
|
65
|
+
It reads the file in binary mode and uses pickle to deserialize the objects.
|
|
66
|
+
Returns a list of objects.
|
|
67
|
+
"""
|
|
68
|
+
objects = []
|
|
69
|
+
with open(log_file, "rb") as f:
|
|
70
|
+
while True:
|
|
71
|
+
try:
|
|
72
|
+
obj = pickle.load(f)
|
|
73
|
+
objects.append(obj)
|
|
74
|
+
except EOFError:
|
|
75
|
+
break
|
|
76
|
+
return objects
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
def safe_deepcopy(obj):
|
|
80
|
+
"""
|
|
81
|
+
A safe deepcopy function that handles unpickleable objects.
|
|
82
|
+
It uses 'is_pickleable' to check if the object is serializable and
|
|
83
|
+
performs a shallow copy for unpickleable objects.
|
|
84
|
+
"""
|
|
85
|
+
if isinstance(obj, Exception):
|
|
86
|
+
# Wrap exceptions to make them pickleable
|
|
87
|
+
obj = ExceptionWrapper(obj)
|
|
88
|
+
|
|
89
|
+
if is_pickleable(obj):
|
|
90
|
+
return pickle.loads(pickle.dumps(obj)) # Fast path
|
|
91
|
+
|
|
92
|
+
if isinstance(obj, Mapping):
|
|
93
|
+
return {k: safe_deepcopy(v) for k, v in obj.items() if is_pickleable(k)}
|
|
94
|
+
|
|
95
|
+
if isinstance(obj, Sequence) and not isinstance(obj, str):
|
|
96
|
+
return [safe_deepcopy(item) for item in obj]
|
|
97
|
+
|
|
98
|
+
if hasattr(obj, "__dict__"):
|
|
99
|
+
copy_obj = copy(obj)
|
|
100
|
+
for attr, value in vars(obj).items():
|
|
101
|
+
if is_pickleable(value):
|
|
102
|
+
setattr(copy_obj, attr, safe_deepcopy(value))
|
|
103
|
+
else:
|
|
104
|
+
setattr(copy_obj, attr, None) # Remove unpickleable field
|
|
105
|
+
return copy_obj
|
|
106
|
+
|
|
107
|
+
return None # fallback for non-serializable, non-introspectable objects
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
def save_objects_to_logfile(objects: list, log_dir=LOGS_DIR):
|
|
111
|
+
"""Save the objects to a (pickle) log file"""
|
|
112
|
+
with ObjectLogger(log_dir=log_dir) as object_logger:
|
|
113
|
+
for obj in objects:
|
|
114
|
+
object_logger.log(obj)
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
class ObjectLogger:
|
|
118
|
+
"""
|
|
119
|
+
A context manager for logging objects to a file.
|
|
120
|
+
It uses pickle to save the objects and handles exceptions during the save process.
|
|
121
|
+
"""
|
|
122
|
+
|
|
123
|
+
def __init__(
|
|
124
|
+
self,
|
|
125
|
+
log_dir=LOGS_DIR,
|
|
126
|
+
verbose: bool = True,
|
|
127
|
+
debug: bool | None = None,
|
|
128
|
+
parent_logger: Union["ObjectLogger", NoneType] = None,
|
|
129
|
+
):
|
|
130
|
+
self.verbose = verbose
|
|
131
|
+
self.debug = (
|
|
132
|
+
debug if debug is not None else (LOG_LEVEL == logging.DEBUG)
|
|
133
|
+
) # Use the debug level from the config if not provided
|
|
134
|
+
self.log_dir = log_dir
|
|
135
|
+
self.file = None
|
|
136
|
+
self.parent_logger = parent_logger
|
|
137
|
+
self.init_log_file()
|
|
138
|
+
|
|
139
|
+
def has_parent(self):
|
|
140
|
+
"""
|
|
141
|
+
Check if the logger has a parent.
|
|
142
|
+
If it does, it will not create a new log file.
|
|
143
|
+
"""
|
|
144
|
+
return self.parent_logger is not None
|
|
145
|
+
|
|
146
|
+
def init_log_file(self):
|
|
147
|
+
"""Initialize log file for recording agent operations."""
|
|
148
|
+
if self.has_parent():
|
|
149
|
+
# Do nothing: Delegates to the logger
|
|
150
|
+
return
|
|
151
|
+
# Create log file name
|
|
152
|
+
self.log_dir.mkdir(parents=True, exist_ok=True)
|
|
153
|
+
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
|
154
|
+
self.log_file = self.log_dir / f"agent_run.{timestamp}.pkl"
|
|
155
|
+
logger.info("Logging to %s", self.log_file)
|
|
156
|
+
|
|
157
|
+
def __enter__(self):
|
|
158
|
+
if self.has_parent():
|
|
159
|
+
# Do nothing: Delegates to the logger
|
|
160
|
+
return self
|
|
161
|
+
self.file = open(self.log_file, "ab") # append in binary mode
|
|
162
|
+
return self
|
|
163
|
+
|
|
164
|
+
def log(self, obj):
|
|
165
|
+
"""
|
|
166
|
+
Log an object to the file.
|
|
167
|
+
It uses safe_deepcopy to ensure the object is pickleable.
|
|
168
|
+
"""
|
|
169
|
+
if self.has_parent():
|
|
170
|
+
# Delegate to the parent logger
|
|
171
|
+
self.parent_logger.log(obj)
|
|
172
|
+
else:
|
|
173
|
+
try:
|
|
174
|
+
if self.debug:
|
|
175
|
+
rich.print(obj, flush=True)
|
|
176
|
+
elif self.verbose:
|
|
177
|
+
print(obj, flush=True)
|
|
178
|
+
obj_to_save = safe_deepcopy(obj)
|
|
179
|
+
pickle.dump(obj_to_save, self.file)
|
|
180
|
+
self.file.flush() # ensure it's written immediately
|
|
181
|
+
except Exception as e: # pylint: disable=broad-exception-caught
|
|
182
|
+
logger.error("Failed to log object: %s", e)
|
|
183
|
+
logger.error(traceback.format_exc())
|
|
184
|
+
|
|
185
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
186
|
+
if self.has_parent():
|
|
187
|
+
# Do nothing: Delegates to the logger
|
|
188
|
+
pass
|
|
189
|
+
elif self.file:
|
|
190
|
+
self.file.close()
|
|
191
|
+
|
|
192
|
+
|
|
193
|
+
class NullObjectLogger:
|
|
194
|
+
"""
|
|
195
|
+
A null logger that does nothing.
|
|
196
|
+
"""
|
|
197
|
+
|
|
198
|
+
def __init__(self, **kwargs):
|
|
199
|
+
pass
|
|
200
|
+
|
|
201
|
+
def __enter__(self):
|
|
202
|
+
pass
|
|
203
|
+
|
|
204
|
+
def log(self, obj):
|
|
205
|
+
"""Log an object to the configured destination."""
|
|
206
|
+
|
|
207
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
208
|
+
pass
|
|
209
|
+
|
|
210
|
+
|
|
211
|
+
class PrintObjectLogger:
|
|
212
|
+
"""
|
|
213
|
+
Print to stdout
|
|
214
|
+
"""
|
|
215
|
+
|
|
216
|
+
def __init__(self, **kwargs):
|
|
217
|
+
pass
|
|
218
|
+
|
|
219
|
+
def __enter__(self):
|
|
220
|
+
pass
|
|
221
|
+
|
|
222
|
+
def log(self, obj):
|
|
223
|
+
"""Log an object using rich print for formatted output."""
|
|
224
|
+
rich.print(obj, flush=True)
|
|
225
|
+
|
|
226
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
227
|
+
pass
|
|
@@ -0,0 +1,116 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Centralized logging configuration for AixTools, based on Python's standard logging.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import json
|
|
6
|
+
import logging
|
|
7
|
+
import logging.config
|
|
8
|
+
import os
|
|
9
|
+
import time
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
|
|
12
|
+
# PyYAML is an optional dependency.
|
|
13
|
+
try:
|
|
14
|
+
import yaml
|
|
15
|
+
except ImportError:
|
|
16
|
+
yaml = None
|
|
17
|
+
|
|
18
|
+
# --- Default Configuration ---
|
|
19
|
+
|
|
20
|
+
logging.Formatter.converter = time.gmtime
|
|
21
|
+
|
|
22
|
+
DEFAULT_LOGGING_CONFIG = {
|
|
23
|
+
"version": 1,
|
|
24
|
+
"disable_existing_loggers": False,
|
|
25
|
+
"filters": {
|
|
26
|
+
"context_filter": {
|
|
27
|
+
"()": "aixtools.logfilters.context_filter.ContextFilter",
|
|
28
|
+
}
|
|
29
|
+
},
|
|
30
|
+
"formatters": {
|
|
31
|
+
"color": {
|
|
32
|
+
"()": "colorlog.ColoredFormatter",
|
|
33
|
+
"format": "%(log_color)s%(asctime)s.%(msecs)03d %(levelname)-8s%(reset)s %(context)s[%(name)s] %(message)s",
|
|
34
|
+
"datefmt": "%Y-%m-%d %H:%M:%S",
|
|
35
|
+
"log_colors": {
|
|
36
|
+
"DEBUG": "cyan",
|
|
37
|
+
"INFO": "green",
|
|
38
|
+
"WARNING": "yellow",
|
|
39
|
+
"ERROR": "red",
|
|
40
|
+
"CRITICAL": "bold_red",
|
|
41
|
+
},
|
|
42
|
+
},
|
|
43
|
+
},
|
|
44
|
+
"handlers": {
|
|
45
|
+
"stream": {
|
|
46
|
+
"class": "colorlog.StreamHandler",
|
|
47
|
+
"formatter": "color",
|
|
48
|
+
"level": "INFO",
|
|
49
|
+
"filters": ["context_filter"],
|
|
50
|
+
},
|
|
51
|
+
},
|
|
52
|
+
"root": {
|
|
53
|
+
"handlers": ["stream"],
|
|
54
|
+
"level": "INFO",
|
|
55
|
+
},
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
# --- Public API ---
|
|
59
|
+
|
|
60
|
+
get_logger = logging.getLogger
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def configure_logging():
|
|
64
|
+
"""
|
|
65
|
+
Configure the logging system.
|
|
66
|
+
|
|
67
|
+
This function loads the logging configuration from a file or uses the
|
|
68
|
+
hardcoded default. The configuration source is determined in the
|
|
69
|
+
following order of precedence:
|
|
70
|
+
|
|
71
|
+
1. LOGGING_CONFIG_PATH environment variable.
|
|
72
|
+
2. logging.yaml in the current working directory.
|
|
73
|
+
3. logging.json in the current working directory.
|
|
74
|
+
4. Hardcoded default configuration.
|
|
75
|
+
"""
|
|
76
|
+
config_path_str = os.environ.get("LOGGING_CONFIG_PATH")
|
|
77
|
+
|
|
78
|
+
if config_path_str:
|
|
79
|
+
config_path = Path(config_path_str)
|
|
80
|
+
if not config_path.exists():
|
|
81
|
+
raise FileNotFoundError(f"Logging configuration file not found: {config_path}")
|
|
82
|
+
_load_config_from_file(config_path)
|
|
83
|
+
return
|
|
84
|
+
|
|
85
|
+
# Check for default config files in the current directory
|
|
86
|
+
for filename in ["logging.yaml", "logging.json"]:
|
|
87
|
+
config_path = Path.cwd() / filename
|
|
88
|
+
if config_path.exists():
|
|
89
|
+
_load_config_from_file(config_path)
|
|
90
|
+
return
|
|
91
|
+
|
|
92
|
+
# Fallback to the default configuration
|
|
93
|
+
logging.config.dictConfig(DEFAULT_LOGGING_CONFIG)
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
def _load_config_from_file(path: Path):
|
|
97
|
+
"""Load a logging configuration from a YAML or JSON file."""
|
|
98
|
+
if path.suffix in [".yaml", ".yml"] and yaml:
|
|
99
|
+
config = yaml.safe_load(path.read_text(encoding="utf-8"))
|
|
100
|
+
elif path.suffix == ".json":
|
|
101
|
+
config = json.loads(path.read_text(encoding="utf-8"))
|
|
102
|
+
else:
|
|
103
|
+
raise ValueError(
|
|
104
|
+
f"Unsupported configuration file format: {path.suffix}. "
|
|
105
|
+
"Please use .yaml or .json. "
|
|
106
|
+
"For YAML support, ensure PyYAML is installed (`uv add pyyaml`)."
|
|
107
|
+
)
|
|
108
|
+
|
|
109
|
+
if config:
|
|
110
|
+
logging.config.dictConfig(config)
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
# --- Initial Configuration ---
|
|
114
|
+
|
|
115
|
+
# Automatically configure logging when the module is imported.
|
|
116
|
+
configure_logging()
|
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Pydantic models for logging system.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from datetime import UTC, datetime
|
|
6
|
+
from enum import Enum
|
|
7
|
+
from typing import Any
|
|
8
|
+
|
|
9
|
+
from pydantic import BaseModel, Field
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class LogType(str, Enum):
|
|
13
|
+
"""Type of log entry."""
|
|
14
|
+
|
|
15
|
+
COMMAND = "command"
|
|
16
|
+
CODE = "code"
|
|
17
|
+
SYSTEM = "system"
|
|
18
|
+
SERVICE = "service"
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class Language(str, Enum):
|
|
22
|
+
"""Programming language of the code."""
|
|
23
|
+
|
|
24
|
+
PYTHON = "python"
|
|
25
|
+
JAVASCRIPT = "javascript"
|
|
26
|
+
TYPESCRIPT = "typescript"
|
|
27
|
+
SHELL = "shell"
|
|
28
|
+
BASH = "bash"
|
|
29
|
+
OTHER = "other"
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class ProcessResult(BaseModel):
|
|
33
|
+
"""
|
|
34
|
+
Process results from a command or code execution.
|
|
35
|
+
Includes exit code, stdout, and stderr.
|
|
36
|
+
"""
|
|
37
|
+
|
|
38
|
+
exit_code: int = Field(description="Exit code of the command or process")
|
|
39
|
+
stdout: str = Field(description="Standard output of the command or process")
|
|
40
|
+
stderr: str = Field(description="Standard error of the command or process")
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
class BaseLogEntry(BaseModel):
|
|
44
|
+
"""Base model for all log entries."""
|
|
45
|
+
|
|
46
|
+
id: str = Field(description="Unique identifier for the log entry")
|
|
47
|
+
user_id: str = Field(description="ID of the user who initiated the action")
|
|
48
|
+
session_id: str = Field(description="ID of the session")
|
|
49
|
+
timestamp: datetime = Field(
|
|
50
|
+
default_factory=lambda: datetime.now(UTC),
|
|
51
|
+
description="Time when the log entry was created",
|
|
52
|
+
)
|
|
53
|
+
log_type: LogType = Field(description="Type of log entry")
|
|
54
|
+
container_id: str | None = Field(None, description="ID of the container where the action was performed")
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
class CommandLogEntry(BaseLogEntry):
|
|
58
|
+
"""Log entry for shell command execution."""
|
|
59
|
+
|
|
60
|
+
log_type: LogType = LogType.COMMAND
|
|
61
|
+
command: str = Field(description="Shell command that was executed")
|
|
62
|
+
working_directory: str = Field(description="Working directory where the command was executed")
|
|
63
|
+
process_result: ProcessResult | None = Field(
|
|
64
|
+
None,
|
|
65
|
+
description="Process results: exit status, STDOUT, and STDERR from the command",
|
|
66
|
+
)
|
|
67
|
+
duration_ms: int | None = Field(None, description="Duration of command execution in milliseconds")
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
class CodeLogEntry(BaseLogEntry):
|
|
71
|
+
"""Log entry for code execution."""
|
|
72
|
+
|
|
73
|
+
log_type: LogType = LogType.CODE
|
|
74
|
+
language: Language = Field(description="Programming language of the code")
|
|
75
|
+
code: str = Field(description="Code that was executed")
|
|
76
|
+
file_path: str | None = Field(None, description="Path to the file where the code was saved")
|
|
77
|
+
process_result: ProcessResult | None = Field(
|
|
78
|
+
None,
|
|
79
|
+
description="Process results: exit status, STDOUT, and STDERR from the command",
|
|
80
|
+
)
|
|
81
|
+
duration_ms: int | None = Field(None, description="Duration of code execution in milliseconds")
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
class SystemLogEntry(BaseLogEntry):
|
|
85
|
+
"""Log entry for system events."""
|
|
86
|
+
|
|
87
|
+
log_type: LogType = LogType.SYSTEM
|
|
88
|
+
event: str = Field(description="Description of the system event")
|
|
89
|
+
details: dict[str, Any] = Field(default_factory=dict, description="Additional details about the event")
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
class ServiceLogEntry(BaseLogEntry):
|
|
93
|
+
"""Log entry for service events."""
|
|
94
|
+
|
|
95
|
+
log_type: LogType = LogType.SERVICE
|
|
96
|
+
service_id: str = Field(description="ID of the service")
|
|
97
|
+
event: str = Field(description="Description of the service event")
|
|
98
|
+
details: dict[str, Any] = Field(default_factory=dict, description="Additional details about the event")
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
# Union type for all log entry types
|
|
102
|
+
LogEntry = CommandLogEntry | CodeLogEntry | SystemLogEntry | ServiceLogEntry
|