aixtools 0.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of aixtools might be problematic. Click here for more details.
- aixtools/.chainlit/config.toml +113 -0
- aixtools/.chainlit/translations/bn.json +214 -0
- aixtools/.chainlit/translations/en-US.json +214 -0
- aixtools/.chainlit/translations/gu.json +214 -0
- aixtools/.chainlit/translations/he-IL.json +214 -0
- aixtools/.chainlit/translations/hi.json +214 -0
- aixtools/.chainlit/translations/ja.json +214 -0
- aixtools/.chainlit/translations/kn.json +214 -0
- aixtools/.chainlit/translations/ml.json +214 -0
- aixtools/.chainlit/translations/mr.json +214 -0
- aixtools/.chainlit/translations/nl.json +214 -0
- aixtools/.chainlit/translations/ta.json +214 -0
- aixtools/.chainlit/translations/te.json +214 -0
- aixtools/.chainlit/translations/zh-CN.json +214 -0
- aixtools/__init__.py +11 -0
- aixtools/_version.py +34 -0
- aixtools/a2a/app.py +126 -0
- aixtools/a2a/google_sdk/__init__.py +0 -0
- aixtools/a2a/google_sdk/card.py +27 -0
- aixtools/a2a/google_sdk/pydantic_ai_adapter/agent_executor.py +199 -0
- aixtools/a2a/google_sdk/pydantic_ai_adapter/storage.py +26 -0
- aixtools/a2a/google_sdk/remote_agent_connection.py +88 -0
- aixtools/a2a/google_sdk/utils.py +59 -0
- aixtools/a2a/utils.py +115 -0
- aixtools/agents/__init__.py +12 -0
- aixtools/agents/agent.py +164 -0
- aixtools/agents/agent_batch.py +71 -0
- aixtools/agents/prompt.py +97 -0
- aixtools/app.py +143 -0
- aixtools/chainlit.md +14 -0
- aixtools/compliance/__init__.py +9 -0
- aixtools/compliance/private_data.py +138 -0
- aixtools/context.py +17 -0
- aixtools/db/__init__.py +17 -0
- aixtools/db/database.py +110 -0
- aixtools/db/vector_db.py +115 -0
- aixtools/google/client.py +25 -0
- aixtools/log_view/__init__.py +17 -0
- aixtools/log_view/app.py +195 -0
- aixtools/log_view/display.py +285 -0
- aixtools/log_view/export.py +51 -0
- aixtools/log_view/filters.py +41 -0
- aixtools/log_view/log_utils.py +26 -0
- aixtools/log_view/node_summary.py +229 -0
- aixtools/logfilters/__init__.py +7 -0
- aixtools/logfilters/context_filter.py +67 -0
- aixtools/logging/__init__.py +30 -0
- aixtools/logging/log_objects.py +227 -0
- aixtools/logging/logging_config.py +161 -0
- aixtools/logging/mcp_log_models.py +102 -0
- aixtools/logging/mcp_logger.py +172 -0
- aixtools/logging/model_patch_logging.py +87 -0
- aixtools/logging/open_telemetry.py +36 -0
- aixtools/mcp/__init__.py +9 -0
- aixtools/mcp/client.py +375 -0
- aixtools/mcp/example_client.py +30 -0
- aixtools/mcp/example_server.py +22 -0
- aixtools/mcp/fast_mcp_log.py +31 -0
- aixtools/mcp/faulty_mcp.py +319 -0
- aixtools/model_patch/model_patch.py +63 -0
- aixtools/server/__init__.py +29 -0
- aixtools/server/app_mounter.py +90 -0
- aixtools/server/path.py +72 -0
- aixtools/server/utils.py +70 -0
- aixtools/server/workspace_privacy.py +65 -0
- aixtools/testing/__init__.py +9 -0
- aixtools/testing/aix_test_model.py +149 -0
- aixtools/testing/mock_tool.py +66 -0
- aixtools/testing/model_patch_cache.py +279 -0
- aixtools/tools/doctor/__init__.py +3 -0
- aixtools/tools/doctor/tool_doctor.py +61 -0
- aixtools/tools/doctor/tool_recommendation.py +44 -0
- aixtools/utils/__init__.py +35 -0
- aixtools/utils/chainlit/cl_agent_show.py +82 -0
- aixtools/utils/chainlit/cl_utils.py +168 -0
- aixtools/utils/config.py +131 -0
- aixtools/utils/config_util.py +69 -0
- aixtools/utils/enum_with_description.py +37 -0
- aixtools/utils/files.py +17 -0
- aixtools/utils/persisted_dict.py +99 -0
- aixtools/utils/utils.py +167 -0
- aixtools/vault/__init__.py +7 -0
- aixtools/vault/vault.py +137 -0
- aixtools-0.0.0.dist-info/METADATA +669 -0
- aixtools-0.0.0.dist-info/RECORD +88 -0
- aixtools-0.0.0.dist-info/WHEEL +5 -0
- aixtools-0.0.0.dist-info/entry_points.txt +2 -0
- aixtools-0.0.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,161 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Centralized logging configuration for AixTools, based on Python's standard logging.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import json
|
|
6
|
+
import logging
|
|
7
|
+
import logging.config
|
|
8
|
+
import os
|
|
9
|
+
import sys
|
|
10
|
+
import time
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
|
|
13
|
+
# PyYAML is an optional dependency.
|
|
14
|
+
try:
|
|
15
|
+
import yaml
|
|
16
|
+
except ImportError:
|
|
17
|
+
yaml = None
|
|
18
|
+
|
|
19
|
+
# --- Default Configuration ---
|
|
20
|
+
|
|
21
|
+
logging.Formatter.converter = time.gmtime
|
|
22
|
+
|
|
23
|
+
DEFAULT_LOGGING_CONFIG = {
|
|
24
|
+
"version": 1,
|
|
25
|
+
"disable_existing_loggers": False,
|
|
26
|
+
"filters": {
|
|
27
|
+
"context_filter": {
|
|
28
|
+
"()": "aixtools.logfilters.context_filter.ContextFilter",
|
|
29
|
+
}
|
|
30
|
+
},
|
|
31
|
+
"formatters": {
|
|
32
|
+
"color": {
|
|
33
|
+
"()": "colorlog.ColoredFormatter",
|
|
34
|
+
"format": "%(log_color)s%(asctime)s.%(msecs)03d %(levelname)-8s%(reset)s %(context)s[%(name)s] %(message)s",
|
|
35
|
+
"datefmt": "%Y-%m-%d %H:%M:%S",
|
|
36
|
+
"log_colors": {
|
|
37
|
+
"DEBUG": "cyan",
|
|
38
|
+
"INFO": "green",
|
|
39
|
+
"WARNING": "yellow",
|
|
40
|
+
"ERROR": "red",
|
|
41
|
+
"CRITICAL": "bold_red",
|
|
42
|
+
},
|
|
43
|
+
},
|
|
44
|
+
},
|
|
45
|
+
"handlers": {
|
|
46
|
+
"stream": {
|
|
47
|
+
"class": "colorlog.StreamHandler",
|
|
48
|
+
"formatter": "color",
|
|
49
|
+
"level": "INFO",
|
|
50
|
+
"filters": ["context_filter"],
|
|
51
|
+
},
|
|
52
|
+
},
|
|
53
|
+
"root": {
|
|
54
|
+
"handlers": ["stream"],
|
|
55
|
+
"level": "INFO",
|
|
56
|
+
},
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
# --- Public API ---
|
|
60
|
+
|
|
61
|
+
get_logger = logging.getLogger
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def configure_logging():
|
|
65
|
+
"""
|
|
66
|
+
Configure the logging system.
|
|
67
|
+
|
|
68
|
+
This function loads the logging configuration from a file or uses the
|
|
69
|
+
hardcoded default. The configuration source is determined in the
|
|
70
|
+
following order of precedence:
|
|
71
|
+
|
|
72
|
+
1. LOGGING_CONFIG_PATH environment variable.
|
|
73
|
+
2. logging.yaml in the current working directory.
|
|
74
|
+
3. logging.json in the current working directory.
|
|
75
|
+
4. Hardcoded default configuration.
|
|
76
|
+
|
|
77
|
+
Special handling for pytest: If running under pytest without explicit
|
|
78
|
+
log flags, console logging is suppressed to avoid interfering with
|
|
79
|
+
pytest's own log capture mechanism.
|
|
80
|
+
"""
|
|
81
|
+
# Detect if running under pytest and suppress console logging unless explicitly requested
|
|
82
|
+
is_pytest = "pytest" in sys.modules or "pytest" in sys.argv[0] if sys.argv else False
|
|
83
|
+
|
|
84
|
+
# Check for live log flags - handle both separate and combined flags
|
|
85
|
+
wants_live_logs = False
|
|
86
|
+
if sys.argv:
|
|
87
|
+
for arg in sys.argv:
|
|
88
|
+
# Check for explicit --log-cli
|
|
89
|
+
if arg == "--log-cli":
|
|
90
|
+
wants_live_logs = True
|
|
91
|
+
break
|
|
92
|
+
# Check for -s either standalone or combined with other flags (like -vsk, -vs, etc.)
|
|
93
|
+
if arg.startswith("-") and not arg.startswith("--") and "s" in arg:
|
|
94
|
+
wants_live_logs = True
|
|
95
|
+
break
|
|
96
|
+
|
|
97
|
+
if is_pytest and not wants_live_logs:
|
|
98
|
+
# Use a minimal configuration that doesn't output to console during pytest
|
|
99
|
+
pytest_config = {
|
|
100
|
+
"version": 1,
|
|
101
|
+
"disable_existing_loggers": False,
|
|
102
|
+
"formatters": {
|
|
103
|
+
"simple": {
|
|
104
|
+
"format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s",
|
|
105
|
+
}
|
|
106
|
+
},
|
|
107
|
+
"handlers": {
|
|
108
|
+
# Only use NullHandler to suppress console output but allow pytest log capture
|
|
109
|
+
"null": {
|
|
110
|
+
"class": "logging.NullHandler",
|
|
111
|
+
}
|
|
112
|
+
},
|
|
113
|
+
"root": {
|
|
114
|
+
"handlers": ["null"],
|
|
115
|
+
"level": "INFO",
|
|
116
|
+
},
|
|
117
|
+
}
|
|
118
|
+
logging.config.dictConfig(pytest_config)
|
|
119
|
+
return
|
|
120
|
+
|
|
121
|
+
config_path_str = os.environ.get("LOGGING_CONFIG_PATH")
|
|
122
|
+
|
|
123
|
+
if config_path_str:
|
|
124
|
+
config_path = Path(config_path_str)
|
|
125
|
+
if not config_path.exists():
|
|
126
|
+
raise FileNotFoundError(f"Logging configuration file not found: {config_path}")
|
|
127
|
+
_load_config_from_file(config_path)
|
|
128
|
+
return
|
|
129
|
+
|
|
130
|
+
# Check for default config files in the current directory
|
|
131
|
+
for filename in ["logging.yaml", "logging.json"]:
|
|
132
|
+
config_path = Path.cwd() / filename
|
|
133
|
+
if config_path.exists():
|
|
134
|
+
_load_config_from_file(config_path)
|
|
135
|
+
return
|
|
136
|
+
|
|
137
|
+
# Fallback to the default configuration
|
|
138
|
+
logging.config.dictConfig(DEFAULT_LOGGING_CONFIG)
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
def _load_config_from_file(path: Path):
|
|
142
|
+
"""Load a logging configuration from a YAML or JSON file."""
|
|
143
|
+
if path.suffix in [".yaml", ".yml"] and yaml:
|
|
144
|
+
config = yaml.safe_load(path.read_text(encoding="utf-8"))
|
|
145
|
+
elif path.suffix == ".json":
|
|
146
|
+
config = json.loads(path.read_text(encoding="utf-8"))
|
|
147
|
+
else:
|
|
148
|
+
raise ValueError(
|
|
149
|
+
f"Unsupported configuration file format: {path.suffix}. "
|
|
150
|
+
"Please use .yaml or .json. "
|
|
151
|
+
"For YAML support, ensure PyYAML is installed (`uv add pyyaml`)."
|
|
152
|
+
)
|
|
153
|
+
|
|
154
|
+
if config:
|
|
155
|
+
logging.config.dictConfig(config)
|
|
156
|
+
|
|
157
|
+
|
|
158
|
+
# --- Initial Configuration ---
|
|
159
|
+
|
|
160
|
+
# Automatically configure logging when the module is imported.
|
|
161
|
+
configure_logging()
|
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Pydantic models for logging system.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from datetime import UTC, datetime
|
|
6
|
+
from enum import Enum
|
|
7
|
+
from typing import Any
|
|
8
|
+
|
|
9
|
+
from pydantic import BaseModel, Field
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class LogType(str, Enum):
|
|
13
|
+
"""Type of log entry."""
|
|
14
|
+
|
|
15
|
+
COMMAND = "command"
|
|
16
|
+
CODE = "code"
|
|
17
|
+
SYSTEM = "system"
|
|
18
|
+
SERVICE = "service"
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class Language(str, Enum):
|
|
22
|
+
"""Programming language of the code."""
|
|
23
|
+
|
|
24
|
+
PYTHON = "python"
|
|
25
|
+
JAVASCRIPT = "javascript"
|
|
26
|
+
TYPESCRIPT = "typescript"
|
|
27
|
+
SHELL = "shell"
|
|
28
|
+
BASH = "bash"
|
|
29
|
+
OTHER = "other"
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class ProcessResult(BaseModel):
|
|
33
|
+
"""
|
|
34
|
+
Process results from a command or code execution.
|
|
35
|
+
Includes exit code, stdout, and stderr.
|
|
36
|
+
"""
|
|
37
|
+
|
|
38
|
+
exit_code: int = Field(description="Exit code of the command or process")
|
|
39
|
+
stdout: str = Field(description="Standard output of the command or process")
|
|
40
|
+
stderr: str = Field(description="Standard error of the command or process")
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
class BaseLogEntry(BaseModel):
|
|
44
|
+
"""Base model for all log entries."""
|
|
45
|
+
|
|
46
|
+
id: str = Field(description="Unique identifier for the log entry")
|
|
47
|
+
user_id: str = Field(description="ID of the user who initiated the action")
|
|
48
|
+
session_id: str = Field(description="ID of the session")
|
|
49
|
+
timestamp: datetime = Field(
|
|
50
|
+
default_factory=lambda: datetime.now(UTC),
|
|
51
|
+
description="Time when the log entry was created",
|
|
52
|
+
)
|
|
53
|
+
log_type: LogType = Field(description="Type of log entry")
|
|
54
|
+
container_id: str | None = Field(None, description="ID of the container where the action was performed")
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
class CommandLogEntry(BaseLogEntry):
|
|
58
|
+
"""Log entry for shell command execution."""
|
|
59
|
+
|
|
60
|
+
log_type: LogType = LogType.COMMAND
|
|
61
|
+
command: str = Field(description="Shell command that was executed")
|
|
62
|
+
working_directory: str = Field(description="Working directory where the command was executed")
|
|
63
|
+
process_result: ProcessResult | None = Field(
|
|
64
|
+
None,
|
|
65
|
+
description="Process results: exit status, STDOUT, and STDERR from the command",
|
|
66
|
+
)
|
|
67
|
+
duration_ms: int | None = Field(None, description="Duration of command execution in milliseconds")
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
class CodeLogEntry(BaseLogEntry):
|
|
71
|
+
"""Log entry for code execution."""
|
|
72
|
+
|
|
73
|
+
log_type: LogType = LogType.CODE
|
|
74
|
+
language: Language = Field(description="Programming language of the code")
|
|
75
|
+
code: str = Field(description="Code that was executed")
|
|
76
|
+
file_path: str | None = Field(None, description="Path to the file where the code was saved")
|
|
77
|
+
process_result: ProcessResult | None = Field(
|
|
78
|
+
None,
|
|
79
|
+
description="Process results: exit status, STDOUT, and STDERR from the command",
|
|
80
|
+
)
|
|
81
|
+
duration_ms: int | None = Field(None, description="Duration of code execution in milliseconds")
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
class SystemLogEntry(BaseLogEntry):
|
|
85
|
+
"""Log entry for system events."""
|
|
86
|
+
|
|
87
|
+
log_type: LogType = LogType.SYSTEM
|
|
88
|
+
event: str = Field(description="Description of the system event")
|
|
89
|
+
details: dict[str, Any] = Field(default_factory=dict, description="Additional details about the event")
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
class ServiceLogEntry(BaseLogEntry):
|
|
93
|
+
"""Log entry for service events."""
|
|
94
|
+
|
|
95
|
+
log_type: LogType = LogType.SERVICE
|
|
96
|
+
service_id: str = Field(description="ID of the service")
|
|
97
|
+
event: str = Field(description="Description of the service event")
|
|
98
|
+
details: dict[str, Any] = Field(default_factory=dict, description="Additional details about the event")
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
# Union type for all log entry types
|
|
102
|
+
LogEntry = CommandLogEntry | CodeLogEntry | SystemLogEntry | ServiceLogEntry
|
|
@@ -0,0 +1,172 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Logger implementations for MCP server.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import json
|
|
6
|
+
from abc import ABC, abstractmethod
|
|
7
|
+
from datetime import datetime
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
|
|
10
|
+
from aixtools.logging.logging_config import get_logger
|
|
11
|
+
|
|
12
|
+
from .mcp_log_models import CodeLogEntry, CommandLogEntry, LogEntry, ServiceLogEntry, SystemLogEntry
|
|
13
|
+
|
|
14
|
+
logger = get_logger(__name__)
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def log_with_default_logger(entry: LogEntry) -> None:
|
|
18
|
+
"""
|
|
19
|
+
Formats a log entry into a human-readable string and logs it.
|
|
20
|
+
"""
|
|
21
|
+
if isinstance(entry, SystemLogEntry):
|
|
22
|
+
logger.info("%s: %s", entry.event, entry.details or "")
|
|
23
|
+
elif isinstance(entry, ServiceLogEntry):
|
|
24
|
+
logger.info("%s: %s", entry.event, entry.details or "")
|
|
25
|
+
elif isinstance(entry, CodeLogEntry):
|
|
26
|
+
logger.info("%s code: %s", entry.language, entry.code)
|
|
27
|
+
elif isinstance(entry, CommandLogEntry):
|
|
28
|
+
logger.info("%s, CWD: %s", entry.command, entry.working_directory)
|
|
29
|
+
else:
|
|
30
|
+
logger.debug("Logging entry: %s", entry.model_dump_json(indent=2))
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class McpLogger(ABC):
|
|
34
|
+
"""Abstract base class for loggers."""
|
|
35
|
+
|
|
36
|
+
@abstractmethod
|
|
37
|
+
def log(self, entry: LogEntry) -> None:
|
|
38
|
+
"""Log an entry."""
|
|
39
|
+
|
|
40
|
+
@abstractmethod
|
|
41
|
+
def get_logs( # noqa: PLR0913 # pylint: disable=too-many-arguments,too-many-positional-arguments
|
|
42
|
+
self,
|
|
43
|
+
user_id: str | None = None,
|
|
44
|
+
session_id: str | None = None,
|
|
45
|
+
container_id: str | None = None,
|
|
46
|
+
start_time: datetime | None = None,
|
|
47
|
+
end_time: datetime | None = None,
|
|
48
|
+
limit: int = 100,
|
|
49
|
+
) -> list[LogEntry]:
|
|
50
|
+
"""Get logs with optional filters."""
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
class JSONFileMcpLogger(McpLogger):
|
|
54
|
+
"""Logger that stores logs in a single JSON file."""
|
|
55
|
+
|
|
56
|
+
def __init__(self, log_dir: str | Path):
|
|
57
|
+
"""Initialize the logger."""
|
|
58
|
+
self.log_dir = Path(log_dir)
|
|
59
|
+
self.log_dir.mkdir(parents=True, exist_ok=True)
|
|
60
|
+
self.log_file_path = self.log_dir / "mcp_logs.jsonl"
|
|
61
|
+
self.log_file = open(self.log_file_path, "a", encoding="utf-8") # pylint: disable=consider-using-with
|
|
62
|
+
|
|
63
|
+
def __del__(self):
|
|
64
|
+
"""Ensure file is closed when the logger is destroyed."""
|
|
65
|
+
if hasattr(self, "log_file") and self.log_file and not self.log_file.closed:
|
|
66
|
+
self.log_file.close()
|
|
67
|
+
|
|
68
|
+
def _get_log_file_path(self) -> Path:
|
|
69
|
+
"""Get the path to the log file."""
|
|
70
|
+
return self.log_file_path
|
|
71
|
+
|
|
72
|
+
def log(self, entry: LogEntry) -> None:
|
|
73
|
+
"""Log an entry to the JSON file."""
|
|
74
|
+
log_with_default_logger(entry)
|
|
75
|
+
# Convert the entry to a JSON string
|
|
76
|
+
entry_json = entry.model_dump_json()
|
|
77
|
+
# Append the entry to the log file and flush to ensure it's written
|
|
78
|
+
self.log_file.write(entry_json + "\n")
|
|
79
|
+
self.log_file.flush()
|
|
80
|
+
|
|
81
|
+
def get_logs( # noqa: PLR0913, PLR0912 # pylint: disable=too-many-arguments,too-many-positional-arguments,too-many-branches
|
|
82
|
+
self,
|
|
83
|
+
user_id: str | None = None,
|
|
84
|
+
session_id: str | None = None,
|
|
85
|
+
container_id: str | None = None,
|
|
86
|
+
start_time: datetime | None = None,
|
|
87
|
+
end_time: datetime | None = None,
|
|
88
|
+
limit: int = 100,
|
|
89
|
+
) -> list[LogEntry]:
|
|
90
|
+
"""Get logs with optional filters.
|
|
91
|
+
|
|
92
|
+
Args:
|
|
93
|
+
user_id: Filter by user ID.
|
|
94
|
+
session_id: Filter by session ID.
|
|
95
|
+
container_id: Filter by container ID.
|
|
96
|
+
start_time: Filter by start time.
|
|
97
|
+
end_time: Filter by end time.
|
|
98
|
+
limit: Maximum number of logs to return.
|
|
99
|
+
|
|
100
|
+
Returns:
|
|
101
|
+
List of log entries.
|
|
102
|
+
"""
|
|
103
|
+
logs: list[LogEntry] = []
|
|
104
|
+
|
|
105
|
+
# Ensure any pending writes are flushed to disk
|
|
106
|
+
self.log_file.flush()
|
|
107
|
+
|
|
108
|
+
# Read from the single log file
|
|
109
|
+
if self.log_file_path.exists():
|
|
110
|
+
with open(self.log_file_path, "r", encoding="utf-8") as f:
|
|
111
|
+
for line in f:
|
|
112
|
+
try:
|
|
113
|
+
# Parse the JSON entry
|
|
114
|
+
entry_dict = json.loads(line)
|
|
115
|
+
|
|
116
|
+
# Convert timestamp string to datetime
|
|
117
|
+
entry_dict["timestamp"] = datetime.fromisoformat(entry_dict["timestamp"])
|
|
118
|
+
|
|
119
|
+
# Apply filters
|
|
120
|
+
if user_id and entry_dict.get("user_id") != user_id:
|
|
121
|
+
continue
|
|
122
|
+
if session_id and entry_dict.get("session_id") != session_id:
|
|
123
|
+
continue
|
|
124
|
+
if container_id and entry_dict.get("container_id") != container_id:
|
|
125
|
+
continue
|
|
126
|
+
if start_time and entry_dict["timestamp"] < start_time:
|
|
127
|
+
continue
|
|
128
|
+
if end_time and entry_dict["timestamp"] > end_time:
|
|
129
|
+
continue
|
|
130
|
+
|
|
131
|
+
# Create the appropriate log entry object based on log_type
|
|
132
|
+
log_type = entry_dict["log_type"]
|
|
133
|
+
if log_type == "command":
|
|
134
|
+
entry = CommandLogEntry(**entry_dict)
|
|
135
|
+
elif log_type == "code":
|
|
136
|
+
entry = CodeLogEntry(**entry_dict)
|
|
137
|
+
elif log_type == "system":
|
|
138
|
+
entry = SystemLogEntry(**entry_dict)
|
|
139
|
+
else:
|
|
140
|
+
continue
|
|
141
|
+
|
|
142
|
+
logs.append(entry)
|
|
143
|
+
except (json.JSONDecodeError, KeyError) as e:
|
|
144
|
+
logger.error("Error parsing log entry: %s", e)
|
|
145
|
+
continue
|
|
146
|
+
|
|
147
|
+
# Check if we've reached the limit
|
|
148
|
+
if len(logs) >= limit:
|
|
149
|
+
break
|
|
150
|
+
|
|
151
|
+
# Sort logs by timestamp (newest first)
|
|
152
|
+
logs.sort(key=lambda x: x.timestamp, reverse=True)
|
|
153
|
+
|
|
154
|
+
return logs[:limit]
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
# Global logger instance
|
|
158
|
+
_mcp_logger: McpLogger | None = None
|
|
159
|
+
|
|
160
|
+
|
|
161
|
+
def initialize_mcp_logger(mcp_logger: McpLogger) -> None:
|
|
162
|
+
"""Initialize the MCP logger"""
|
|
163
|
+
global _mcp_logger # noqa: PLW0603, pylint: disable=global-statement
|
|
164
|
+
_mcp_logger = mcp_logger
|
|
165
|
+
|
|
166
|
+
|
|
167
|
+
def get_mcp_logger() -> McpLogger:
|
|
168
|
+
"""Get the global logger for MCP server."""
|
|
169
|
+
if _mcp_logger is None:
|
|
170
|
+
raise RuntimeError("Logger not initialized")
|
|
171
|
+
|
|
172
|
+
return _mcp_logger
|
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Logging utilities for model patching and request/response tracking.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import functools
|
|
6
|
+
from contextlib import asynccontextmanager
|
|
7
|
+
from uuid import uuid4
|
|
8
|
+
|
|
9
|
+
from aixtools.logging.logging_config import get_logger
|
|
10
|
+
from aixtools.model_patch.model_patch import (
|
|
11
|
+
ModelRawRequest,
|
|
12
|
+
ModelRawRequestResult,
|
|
13
|
+
ModelRawRequestYieldItem,
|
|
14
|
+
get_request_fn,
|
|
15
|
+
get_request_stream_fn,
|
|
16
|
+
model_patch,
|
|
17
|
+
)
|
|
18
|
+
|
|
19
|
+
logger = get_logger(__name__)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def log_async_method(fn, agent_logger):
|
|
23
|
+
"""Log async method calls"""
|
|
24
|
+
|
|
25
|
+
@functools.wraps(fn)
|
|
26
|
+
async def model_request_logger_wrapper(*args, **kwargs):
|
|
27
|
+
# Log request
|
|
28
|
+
uuid = str(uuid4()) # Create a unique ID for this request
|
|
29
|
+
log_object = ModelRawRequest(method_name=fn.__name__, request_id=uuid, args=args, kwargs=kwargs)
|
|
30
|
+
agent_logger.log(log_object)
|
|
31
|
+
# Invoke the original method
|
|
32
|
+
try:
|
|
33
|
+
result = await fn(*args, **kwargs)
|
|
34
|
+
# Log results
|
|
35
|
+
log_object = ModelRawRequestResult(method_name=fn.__name__, request_id=uuid, result=result)
|
|
36
|
+
agent_logger.log(log_object)
|
|
37
|
+
except Exception as e:
|
|
38
|
+
# Log exception
|
|
39
|
+
agent_logger.log(e)
|
|
40
|
+
raise e
|
|
41
|
+
return result
|
|
42
|
+
|
|
43
|
+
return model_request_logger_wrapper
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def log_async_stream(fn, agent_logger):
|
|
47
|
+
"""Log async streaming method calls with individual item tracking."""
|
|
48
|
+
|
|
49
|
+
@functools.wraps(fn)
|
|
50
|
+
@asynccontextmanager
|
|
51
|
+
async def model_request_stream_logger_wrapper(*args, **kwargs):
|
|
52
|
+
# Log request
|
|
53
|
+
uuid = str(uuid4()) # Create a unique ID for this request
|
|
54
|
+
log_object = ModelRawRequest(method_name=fn.__name__, request_id=uuid, args=args, kwargs=kwargs)
|
|
55
|
+
agent_logger.log(log_object)
|
|
56
|
+
# Invoke the original method
|
|
57
|
+
async with fn(*args, **kwargs) as stream:
|
|
58
|
+
|
|
59
|
+
async def gen():
|
|
60
|
+
item_num = 0
|
|
61
|
+
try:
|
|
62
|
+
async for item in stream:
|
|
63
|
+
# Log yielded items
|
|
64
|
+
log_object = ModelRawRequestYieldItem(
|
|
65
|
+
method_name=fn.__name__, request_id=uuid, item=item, item_num=item_num
|
|
66
|
+
)
|
|
67
|
+
agent_logger.log(log_object)
|
|
68
|
+
item_num += 1
|
|
69
|
+
yield item
|
|
70
|
+
except Exception as e:
|
|
71
|
+
# Log exception
|
|
72
|
+
agent_logger.log(e)
|
|
73
|
+
raise e
|
|
74
|
+
|
|
75
|
+
yield gen()
|
|
76
|
+
|
|
77
|
+
return model_request_stream_logger_wrapper
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
def model_patch_logging(model, agent_logger):
|
|
81
|
+
"""Patch model with logging methods"""
|
|
82
|
+
logger.debug("Patching model with logging")
|
|
83
|
+
return model_patch(
|
|
84
|
+
model,
|
|
85
|
+
request_method=log_async_method(get_request_fn(model), agent_logger),
|
|
86
|
+
request_stream_method=log_async_stream(get_request_stream_fn(model), agent_logger),
|
|
87
|
+
)
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
"""
|
|
2
|
+
OpenTelemetry integration for logging and tracing agent operations.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import os
|
|
6
|
+
|
|
7
|
+
import logfire # pylint: disable=import-error
|
|
8
|
+
from pydantic_ai import Agent
|
|
9
|
+
|
|
10
|
+
from aixtools.utils.config import LOGFIRE_TOKEN, LOGFIRE_TRACES_ENDPOINT
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def open_telemetry_on():
|
|
14
|
+
"""Configure and enable OpenTelemetry tracing with LogFire integration."""
|
|
15
|
+
service_name = "agent_poc"
|
|
16
|
+
|
|
17
|
+
if LOGFIRE_TRACES_ENDPOINT:
|
|
18
|
+
os.environ["OTEL_EXPORTER_OTLP_TRACES_ENDPOINT"] = LOGFIRE_TRACES_ENDPOINT
|
|
19
|
+
logfire.configure(
|
|
20
|
+
service_name=service_name,
|
|
21
|
+
# Sending to Logfire is on by default regardless of the OTEL env vars.
|
|
22
|
+
# Keep this line here if you don't want to send to both Jaeger and Logfire.
|
|
23
|
+
send_to_logfire=False,
|
|
24
|
+
)
|
|
25
|
+
Agent.instrument_all(True)
|
|
26
|
+
return
|
|
27
|
+
|
|
28
|
+
if LOGFIRE_TOKEN:
|
|
29
|
+
logfire.configure(
|
|
30
|
+
token=LOGFIRE_TOKEN,
|
|
31
|
+
service_name=service_name,
|
|
32
|
+
)
|
|
33
|
+
Agent.instrument_all(True)
|
|
34
|
+
return
|
|
35
|
+
|
|
36
|
+
print("OpenTelemetry is not enabled. Set the LOGFIRE_TOKEN or LOGFIRE_TRACES_ENDPOINT environment variable.")
|