aixtools 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of aixtools might be problematic. Click here for more details.
- aixtools/__init__.py +5 -0
- aixtools/a2a/__init__.py +5 -0
- aixtools/a2a/app.py +126 -0
- aixtools/a2a/utils.py +115 -0
- aixtools/agents/__init__.py +12 -0
- aixtools/agents/agent.py +164 -0
- aixtools/agents/agent_batch.py +74 -0
- aixtools/app.py +143 -0
- aixtools/context.py +12 -0
- aixtools/db/__init__.py +17 -0
- aixtools/db/database.py +110 -0
- aixtools/db/vector_db.py +115 -0
- aixtools/log_view/__init__.py +17 -0
- aixtools/log_view/app.py +195 -0
- aixtools/log_view/display.py +285 -0
- aixtools/log_view/export.py +51 -0
- aixtools/log_view/filters.py +41 -0
- aixtools/log_view/log_utils.py +26 -0
- aixtools/log_view/node_summary.py +229 -0
- aixtools/logfilters/__init__.py +7 -0
- aixtools/logfilters/context_filter.py +67 -0
- aixtools/logging/__init__.py +30 -0
- aixtools/logging/log_objects.py +227 -0
- aixtools/logging/logging_config.py +116 -0
- aixtools/logging/mcp_log_models.py +102 -0
- aixtools/logging/mcp_logger.py +172 -0
- aixtools/logging/model_patch_logging.py +87 -0
- aixtools/logging/open_telemetry.py +36 -0
- aixtools/mcp/__init__.py +9 -0
- aixtools/mcp/example_client.py +30 -0
- aixtools/mcp/example_server.py +22 -0
- aixtools/mcp/fast_mcp_log.py +31 -0
- aixtools/mcp/faulty_mcp.py +320 -0
- aixtools/model_patch/model_patch.py +65 -0
- aixtools/server/__init__.py +23 -0
- aixtools/server/app_mounter.py +90 -0
- aixtools/server/path.py +72 -0
- aixtools/server/utils.py +70 -0
- aixtools/testing/__init__.py +9 -0
- aixtools/testing/aix_test_model.py +147 -0
- aixtools/testing/mock_tool.py +66 -0
- aixtools/testing/model_patch_cache.py +279 -0
- aixtools/tools/doctor/__init__.py +3 -0
- aixtools/tools/doctor/tool_doctor.py +61 -0
- aixtools/tools/doctor/tool_recommendation.py +44 -0
- aixtools/utils/__init__.py +35 -0
- aixtools/utils/chainlit/cl_agent_show.py +82 -0
- aixtools/utils/chainlit/cl_utils.py +168 -0
- aixtools/utils/config.py +118 -0
- aixtools/utils/config_util.py +69 -0
- aixtools/utils/enum_with_description.py +37 -0
- aixtools/utils/persisted_dict.py +99 -0
- aixtools/utils/utils.py +160 -0
- aixtools-0.1.0.dist-info/METADATA +355 -0
- aixtools-0.1.0.dist-info/RECORD +58 -0
- aixtools-0.1.0.dist-info/WHEEL +5 -0
- aixtools-0.1.0.dist-info/entry_points.txt +2 -0
- aixtools-0.1.0.dist-info/top_level.txt +1 -0
aixtools/db/database.py
ADDED
|
@@ -0,0 +1,110 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Database Interface for Clinical Trials Information.
|
|
3
|
+
|
|
4
|
+
This module provides a database interface for querying clinical trials data
|
|
5
|
+
from the SQLite database.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import sqlite3
|
|
9
|
+
from contextlib import contextmanager
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
from typing import Any
|
|
12
|
+
|
|
13
|
+
import pandas as pd
|
|
14
|
+
|
|
15
|
+
from aixtools.logging.logging_config import get_logger
|
|
16
|
+
|
|
17
|
+
logger = get_logger(__name__)
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class DatabaseError(Exception):
|
|
21
|
+
"""Exception raised for database-related errors."""
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class SqliteDb:
|
|
25
|
+
"""
|
|
26
|
+
Database interface.
|
|
27
|
+
"""
|
|
28
|
+
|
|
29
|
+
def __init__(self, db_path: str | Path):
|
|
30
|
+
"""Initialize the database interface"""
|
|
31
|
+
self.db_path = Path(db_path)
|
|
32
|
+
if not self.db_path.exists():
|
|
33
|
+
raise FileNotFoundError(f"Database file not found: {self.db_path}")
|
|
34
|
+
# Test connection
|
|
35
|
+
with self.connection() as conn:
|
|
36
|
+
logger.info("Connected to database: %s, connection: %s", self.db_path, conn)
|
|
37
|
+
|
|
38
|
+
@contextmanager
|
|
39
|
+
def connection(self):
|
|
40
|
+
"""
|
|
41
|
+
Context manager for database connections.
|
|
42
|
+
|
|
43
|
+
Yields:
|
|
44
|
+
sqlite3.Connection: An active database connection
|
|
45
|
+
"""
|
|
46
|
+
conn = None
|
|
47
|
+
try:
|
|
48
|
+
conn = sqlite3.connect(self.db_path)
|
|
49
|
+
# Enable dictionary row factory
|
|
50
|
+
conn.row_factory = sqlite3.Row
|
|
51
|
+
yield conn
|
|
52
|
+
except sqlite3.Error as e:
|
|
53
|
+
raise DatabaseError(f"Database error: {e}") from e
|
|
54
|
+
finally:
|
|
55
|
+
if conn:
|
|
56
|
+
conn.close()
|
|
57
|
+
|
|
58
|
+
def query(self, query: str, params: dict[str, Any] | None = None) -> list[dict[str, Any]]:
|
|
59
|
+
"""
|
|
60
|
+
Execute a SQL query and return the results as a list of dictionaries.
|
|
61
|
+
|
|
62
|
+
Args:
|
|
63
|
+
query: SQL query to execute
|
|
64
|
+
params: Parameters for the query
|
|
65
|
+
|
|
66
|
+
Returns:
|
|
67
|
+
List of dictionaries representing the query results
|
|
68
|
+
"""
|
|
69
|
+
with self.connection() as conn:
|
|
70
|
+
cursor = conn.cursor()
|
|
71
|
+
if params:
|
|
72
|
+
cursor.execute(query, params)
|
|
73
|
+
else:
|
|
74
|
+
cursor.execute(query)
|
|
75
|
+
|
|
76
|
+
results = cursor.fetchall()
|
|
77
|
+
# Convert sqlite3.Row objects to dictionaries
|
|
78
|
+
return [dict(row) for row in results]
|
|
79
|
+
|
|
80
|
+
def query_df(self, query: str, params: dict[str, Any] | None = None) -> pd.DataFrame:
|
|
81
|
+
"""
|
|
82
|
+
Execute a SQL query and return the results as a pandas DataFrame.
|
|
83
|
+
|
|
84
|
+
Args:
|
|
85
|
+
query: SQL query to execute.
|
|
86
|
+
params: Parameters to substitute in the query.
|
|
87
|
+
|
|
88
|
+
Returns:
|
|
89
|
+
A pandas DataFrame containing the query results.
|
|
90
|
+
"""
|
|
91
|
+
with self.connection() as conn:
|
|
92
|
+
if params:
|
|
93
|
+
df = pd.read_sql_query(query, conn, params=params)
|
|
94
|
+
else:
|
|
95
|
+
df = pd.read_sql_query(query, conn)
|
|
96
|
+
return df
|
|
97
|
+
|
|
98
|
+
def validate(self, query) -> str | None:
|
|
99
|
+
"""
|
|
100
|
+
Validate the SQL query by executing an EXPLAIN QUERY PLAN statement.
|
|
101
|
+
Returns the error string if there is an issue, otherwise returns None
|
|
102
|
+
"""
|
|
103
|
+
with self.connection() as conn:
|
|
104
|
+
try:
|
|
105
|
+
cursor = conn.cursor()
|
|
106
|
+
cursor.execute(f"EXPLAIN QUERY PLAN\n{query}")
|
|
107
|
+
cursor.fetchall()
|
|
108
|
+
return None
|
|
109
|
+
except Exception as e: # pylint: disable=broad-exception-caught
|
|
110
|
+
return str(e)
|
aixtools/db/vector_db.py
ADDED
|
@@ -0,0 +1,115 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Vector database implementation for embedding storage and similarity search.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from langchain_chroma import Chroma
|
|
6
|
+
from langchain_core.embeddings import Embeddings
|
|
7
|
+
from langchain_ollama import OllamaEmbeddings
|
|
8
|
+
from langchain_openai import AzureOpenAIEmbeddings, OpenAIEmbeddings
|
|
9
|
+
|
|
10
|
+
from aixtools.logging.logging_config import get_logger
|
|
11
|
+
from aixtools.utils.config import (
|
|
12
|
+
AZURE_OPENAI_API_KEY,
|
|
13
|
+
AZURE_VDB_EMBEDDINGS_MODEL_NAME,
|
|
14
|
+
OLLAMA_VDB_EMBEDDINGS_MODEL_NAME,
|
|
15
|
+
OPENAI_API_KEY,
|
|
16
|
+
OPENAI_VDB_EMBEDDINGS_MODEL_NAME,
|
|
17
|
+
VDB_CHROMA_PATH,
|
|
18
|
+
VDB_EMBEDDINGS_MODEL_FAMILY,
|
|
19
|
+
)
|
|
20
|
+
|
|
21
|
+
CREATE_DB = False
|
|
22
|
+
|
|
23
|
+
_vector_dbs = {}
|
|
24
|
+
|
|
25
|
+
logger = get_logger(__name__)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def get_vdb_embedding(model_family=VDB_EMBEDDINGS_MODEL_FAMILY) -> Embeddings:
|
|
29
|
+
"""Get the embedding model for vector storage"""
|
|
30
|
+
match model_family:
|
|
31
|
+
case "openai":
|
|
32
|
+
return OpenAIEmbeddings(model=OPENAI_VDB_EMBEDDINGS_MODEL_NAME, api_key=OPENAI_API_KEY) # type: ignore
|
|
33
|
+
case "azure":
|
|
34
|
+
return AzureOpenAIEmbeddings( # type: ignore
|
|
35
|
+
model=AZURE_VDB_EMBEDDINGS_MODEL_NAME, api_key=AZURE_OPENAI_API_KEY
|
|
36
|
+
)
|
|
37
|
+
case "ollama":
|
|
38
|
+
return OllamaEmbeddings(model=OLLAMA_VDB_EMBEDDINGS_MODEL_NAME) # type: ignore
|
|
39
|
+
case _:
|
|
40
|
+
raise ValueError(f"Model family {model_family} not supported")
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def get_vector_db(collection_name: str) -> Chroma:
|
|
44
|
+
"""Implement singleton pattern for database connections"""
|
|
45
|
+
# _vector_dbs will not be re-assigned, but it will be modified
|
|
46
|
+
global _vector_dbs # noqa: PLW0602, pylint: disable=protected-access,global-variable-not-assigned
|
|
47
|
+
if collection_name not in _vector_dbs:
|
|
48
|
+
print(f"Creating new DB connection: {collection_name=}")
|
|
49
|
+
vdb = Chroma(
|
|
50
|
+
persist_directory=str(VDB_CHROMA_PATH),
|
|
51
|
+
collection_name=collection_name,
|
|
52
|
+
embedding_function=get_vdb_embedding(),
|
|
53
|
+
)
|
|
54
|
+
_vector_dbs[collection_name] = vdb
|
|
55
|
+
return _vector_dbs[collection_name]
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def vdb_add(vdb: Chroma, text: str, doc_id: str, meta=list[dict] | dict | None, force=False) -> str | None:
|
|
59
|
+
"""
|
|
60
|
+
Add a document to the database if it's not already there.
|
|
61
|
+
"""
|
|
62
|
+
if not force and vdb_has_id(vdb, doc_id):
|
|
63
|
+
return None # Document already exists, return None
|
|
64
|
+
if isinstance(meta, list):
|
|
65
|
+
metadatas = meta
|
|
66
|
+
elif isinstance(meta, dict):
|
|
67
|
+
metadatas = [meta]
|
|
68
|
+
else:
|
|
69
|
+
metadatas = None
|
|
70
|
+
ids = vdb.add_texts(texts=[text], ids=[doc_id], metadatas=metadatas) # type: ignore
|
|
71
|
+
if not ids:
|
|
72
|
+
return None
|
|
73
|
+
return ids[0] # Return the id of the added document
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
def vdb_get_by_id(vdb: Chroma, doc_id: str):
|
|
77
|
+
"""Get document with by id"""
|
|
78
|
+
collection = vdb._collection # pylint: disable=protected-access
|
|
79
|
+
return collection.get(ids=[doc_id]) # query by id
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
def vdb_has_id(vdb: Chroma, doc_id: str):
|
|
83
|
+
"""Check if a document with a given id exists in the database"""
|
|
84
|
+
result = vdb_get_by_id(vdb, doc_id)
|
|
85
|
+
return len(result["ids"]) > 0
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
# Load database
|
|
89
|
+
def vdb_query( # noqa: PLR0913, pylint: disable=too-many-arguments,too-many-positional-arguments
|
|
90
|
+
vdb: Chroma,
|
|
91
|
+
query: str,
|
|
92
|
+
filter: dict[str, str] | None = None, # pylint: disable=redefined-builtin
|
|
93
|
+
where_document: dict[str, str] | None = None,
|
|
94
|
+
max_items=10,
|
|
95
|
+
similarity_threshold=None,
|
|
96
|
+
):
|
|
97
|
+
"""
|
|
98
|
+
Query vector database with a given query, return top k results.
|
|
99
|
+
Args:
|
|
100
|
+
query: str, query string
|
|
101
|
+
max_items: int, maximum number of items to return
|
|
102
|
+
similarity_threshold: float, similarity threshold to filter the results
|
|
103
|
+
"""
|
|
104
|
+
results = vdb.similarity_search_with_relevance_scores(
|
|
105
|
+
query, k=max_items, filter=filter, where_document=where_document
|
|
106
|
+
)
|
|
107
|
+
logger.debug(
|
|
108
|
+
"Got %s results before filter, first one's similarity score is: %s",
|
|
109
|
+
len(results),
|
|
110
|
+
results[0][1] if results else None,
|
|
111
|
+
)
|
|
112
|
+
if similarity_threshold is not None:
|
|
113
|
+
results = [(doc_id, score) for doc_id, score in results if score > similarity_threshold]
|
|
114
|
+
print(f"Got {len(results)} results after filter")
|
|
115
|
+
return results
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Streamlit application to visualize agent nodes from log files.
|
|
3
|
+
|
|
4
|
+
This package provides tools to:
|
|
5
|
+
- View the most recent log file by default
|
|
6
|
+
- Open and analyze other log files
|
|
7
|
+
- Visualize nodes from agent runs with expandable/collapsible sections
|
|
8
|
+
- Filter nodes by various criteria
|
|
9
|
+
- Export visualizations
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
from aixtools.log_view.app import main, main_cli
|
|
13
|
+
|
|
14
|
+
__all__ = [
|
|
15
|
+
"main",
|
|
16
|
+
"main_cli",
|
|
17
|
+
]
|
aixtools/log_view/app.py
ADDED
|
@@ -0,0 +1,195 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Main application module for the Agent Log Viewer.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import argparse
|
|
6
|
+
import os
|
|
7
|
+
import subprocess
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
|
|
10
|
+
import streamlit as st
|
|
11
|
+
|
|
12
|
+
from aixtools.log_view.display import display_node
|
|
13
|
+
from aixtools.log_view.export import export_nodes_to_json
|
|
14
|
+
from aixtools.log_view.filters import filter_nodes
|
|
15
|
+
from aixtools.log_view.log_utils import format_timestamp_from_filename, get_log_files
|
|
16
|
+
from aixtools.log_view.node_summary import NodeTitle, extract_node_types
|
|
17
|
+
|
|
18
|
+
# Now we can import our modules
|
|
19
|
+
from aixtools.logging.log_objects import load_from_log
|
|
20
|
+
from aixtools.utils.config import LOGS_DIR
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def main(log_dir: Path | None = None): # noqa: PLR0915, pylint: disable=too-many-locals,too-many-statements
|
|
24
|
+
"""Main function to run the Streamlit app."""
|
|
25
|
+
st.set_page_config(
|
|
26
|
+
page_title="Agent Log Viewer",
|
|
27
|
+
layout="wide",
|
|
28
|
+
)
|
|
29
|
+
|
|
30
|
+
st.title("Agent Log Viewer")
|
|
31
|
+
|
|
32
|
+
# Use provided log directory or default
|
|
33
|
+
if log_dir is None:
|
|
34
|
+
log_dir = LOGS_DIR
|
|
35
|
+
|
|
36
|
+
# Create the logs directory if it doesn't exist
|
|
37
|
+
os.makedirs(log_dir, exist_ok=True)
|
|
38
|
+
|
|
39
|
+
st.sidebar.header("Settings")
|
|
40
|
+
|
|
41
|
+
# Allow user to select a different log directory
|
|
42
|
+
custom_log_dir = st.sidebar.text_input("Log Directory", value=str(log_dir))
|
|
43
|
+
if custom_log_dir and custom_log_dir != str(log_dir):
|
|
44
|
+
log_dir = Path(custom_log_dir)
|
|
45
|
+
# Create the custom directory if it doesn't exist
|
|
46
|
+
os.makedirs(log_dir, exist_ok=True)
|
|
47
|
+
|
|
48
|
+
# Get log files
|
|
49
|
+
log_files = get_log_files(log_dir)
|
|
50
|
+
|
|
51
|
+
if not log_files:
|
|
52
|
+
st.warning(f"No log files found in {log_dir}")
|
|
53
|
+
st.info("Run an agent with logging enabled to create log files.")
|
|
54
|
+
return
|
|
55
|
+
|
|
56
|
+
# Create a dictionary of log files with formatted timestamps as display names
|
|
57
|
+
log_file_options = {f"{format_timestamp_from_filename(f.name)} - {f.name}": f for f in log_files}
|
|
58
|
+
|
|
59
|
+
# Select log file (default to most recent)
|
|
60
|
+
selected_log_file_name = st.sidebar.selectbox(
|
|
61
|
+
"Select Log File",
|
|
62
|
+
options=list(log_file_options.keys()),
|
|
63
|
+
index=0,
|
|
64
|
+
)
|
|
65
|
+
|
|
66
|
+
selected_log_file = log_file_options[selected_log_file_name]
|
|
67
|
+
|
|
68
|
+
st.sidebar.info(f"Selected: {selected_log_file.name}")
|
|
69
|
+
|
|
70
|
+
# Load nodes
|
|
71
|
+
try:
|
|
72
|
+
with st.spinner("Loading log file..."):
|
|
73
|
+
nodes = load_from_log(selected_log_file)
|
|
74
|
+
|
|
75
|
+
st.success(f"Loaded {len(nodes)} nodes from {selected_log_file.name}")
|
|
76
|
+
|
|
77
|
+
# Create filter section in sidebar
|
|
78
|
+
st.sidebar.header("Filters")
|
|
79
|
+
|
|
80
|
+
# Text filter
|
|
81
|
+
filter_text = st.sidebar.text_input("Text Search", help="Filter nodes containing this text")
|
|
82
|
+
|
|
83
|
+
# Extract node types for filtering
|
|
84
|
+
node_types = extract_node_types(nodes)
|
|
85
|
+
|
|
86
|
+
# Type filter
|
|
87
|
+
selected_types = st.sidebar.multiselect(
|
|
88
|
+
"Node Types", options=sorted(node_types), default=[], help="Select node types to display"
|
|
89
|
+
)
|
|
90
|
+
|
|
91
|
+
# Attribute filter
|
|
92
|
+
filter_attribute = st.sidebar.text_input("Has Attribute", help="Filter nodes that have this attribute")
|
|
93
|
+
|
|
94
|
+
# Regex filter
|
|
95
|
+
filter_regex = st.sidebar.text_input("Regex Pattern", help="Filter nodes matching this regex pattern")
|
|
96
|
+
|
|
97
|
+
# Combine all filters
|
|
98
|
+
filters = {"text": filter_text, "types": selected_types, "attribute": filter_attribute, "regex": filter_regex}
|
|
99
|
+
|
|
100
|
+
# Apply filters
|
|
101
|
+
filtered_nodes = filter_nodes(nodes, filters)
|
|
102
|
+
|
|
103
|
+
# Show filter results
|
|
104
|
+
if len(filtered_nodes) != len(nodes):
|
|
105
|
+
st.info(f"Filtered to {len(filtered_nodes)} of {len(nodes)} nodes")
|
|
106
|
+
|
|
107
|
+
# Display options
|
|
108
|
+
st.sidebar.header("Display Options")
|
|
109
|
+
|
|
110
|
+
# Option to expand all nodes by default
|
|
111
|
+
expand_all = st.sidebar.checkbox("Expand All Nodes", value=False)
|
|
112
|
+
|
|
113
|
+
# Option to select output format
|
|
114
|
+
display_format = st.sidebar.radio(
|
|
115
|
+
"Display Format",
|
|
116
|
+
options=["Markdown", "Rich", "JSON"],
|
|
117
|
+
index=0,
|
|
118
|
+
help="Select the format for displaying node content",
|
|
119
|
+
)
|
|
120
|
+
|
|
121
|
+
# Export options
|
|
122
|
+
st.sidebar.header("Export")
|
|
123
|
+
|
|
124
|
+
# Export to JSON
|
|
125
|
+
if st.sidebar.button("Export to JSON"):
|
|
126
|
+
json_str = export_nodes_to_json(filtered_nodes)
|
|
127
|
+
st.sidebar.download_button(
|
|
128
|
+
label="Download JSON",
|
|
129
|
+
data=json_str,
|
|
130
|
+
file_name=f"agent_nodes_{selected_log_file.stem}.json",
|
|
131
|
+
mime="application/json",
|
|
132
|
+
)
|
|
133
|
+
|
|
134
|
+
# Main content area - display nodes
|
|
135
|
+
if filtered_nodes:
|
|
136
|
+
node_title = NodeTitle()
|
|
137
|
+
# Display nodes with proper formatting
|
|
138
|
+
for i, node in enumerate(filtered_nodes):
|
|
139
|
+
# Create a header for each node
|
|
140
|
+
node_header = f"{i}: {node_title.summary(node)}"
|
|
141
|
+
|
|
142
|
+
# Display the node with proper formatting
|
|
143
|
+
with st.expander(node_header, expanded=expand_all):
|
|
144
|
+
try:
|
|
145
|
+
display_node(node, display_format=display_format)
|
|
146
|
+
except Exception as e: # pylint: disable=broad-exception-caught
|
|
147
|
+
st.error(f"Error displaying node: {e}")
|
|
148
|
+
st.exception(e)
|
|
149
|
+
else:
|
|
150
|
+
st.warning("No nodes match the current filters")
|
|
151
|
+
|
|
152
|
+
except Exception as e: # pylint: disable=broad-exception-caught
|
|
153
|
+
st.error(f"Error loading or processing log file: {e}")
|
|
154
|
+
st.exception(e)
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
def parse_args():
|
|
158
|
+
"""Parse command line arguments."""
|
|
159
|
+
parser = argparse.ArgumentParser(description="Agent Log Viewer")
|
|
160
|
+
parser.add_argument("log_dir", nargs="?", type=Path, help="Directory containing log files (default: DATA_DIR/logs)")
|
|
161
|
+
return parser.parse_args()
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
def main_cli():
|
|
165
|
+
"""Entry point for the command-line tool."""
|
|
166
|
+
cmd_args = parse_args()
|
|
167
|
+
|
|
168
|
+
# Print a message to indicate the app is starting
|
|
169
|
+
print("Starting Agent Log Viewer...")
|
|
170
|
+
print(f"Log directory: {cmd_args.log_dir or LOGS_DIR}")
|
|
171
|
+
|
|
172
|
+
# Launch the Streamlit app
|
|
173
|
+
|
|
174
|
+
# Get the path to this script
|
|
175
|
+
script_path = Path(__file__).resolve()
|
|
176
|
+
|
|
177
|
+
# Use streamlit run to start the app
|
|
178
|
+
cmd = ["streamlit", "run", str(script_path)]
|
|
179
|
+
|
|
180
|
+
# Add log_dir argument if provided
|
|
181
|
+
if cmd_args.log_dir:
|
|
182
|
+
cmd.extend(["--", str(cmd_args.log_dir)])
|
|
183
|
+
|
|
184
|
+
# Run the command
|
|
185
|
+
try:
|
|
186
|
+
subprocess.run(cmd, check=False)
|
|
187
|
+
except KeyboardInterrupt:
|
|
188
|
+
print("\nShutting down Agent Log Viewer...")
|
|
189
|
+
except Exception as e: # pylint: disable=broad-exception-caught
|
|
190
|
+
print(f"Error running Streamlit app: {e}")
|
|
191
|
+
|
|
192
|
+
|
|
193
|
+
if __name__ == "__main__":
|
|
194
|
+
args = parse_args()
|
|
195
|
+
main(args.log_dir)
|