logler 1.0.7__cp311-cp311-win_amd64.whl → 1.1.2__cp311-cp311-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- logler/__init__.py +106 -1
- logler/bootstrap.py +1 -3
- logler/cli.py +1 -55
- logler/investigate.py +59 -14
- logler/llm_cli.py +491 -0
- logler/models.py +523 -0
- logler/sql.py +147 -0
- logler/tree_formatter.py +20 -9
- {logler-1.0.7.dist-info → logler-1.1.2.dist-info}/METADATA +40 -85
- logler-1.1.2.dist-info/RECORD +23 -0
- logler_rs/logler_rs.cp311-win_amd64.pyd +0 -0
- logler/web/__init__.py +0 -3
- logler/web/app.py +0 -810
- logler/web/static/css/tailwind.css +0 -1
- logler/web/static/css/tailwind.input.css +0 -3
- logler/web/static/logler-logo.png +0 -0
- logler/web/tailwind.config.cjs +0 -9
- logler/web/templates/index.html +0 -1454
- logler-1.0.7.dist-info/RECORD +0 -28
- {logler-1.0.7.dist-info → logler-1.1.2.dist-info}/WHEEL +0 -0
- {logler-1.0.7.dist-info → logler-1.1.2.dist-info}/entry_points.txt +0 -0
- {logler-1.0.7.dist-info → logler-1.1.2.dist-info}/licenses/LICENSE +0 -0
logler/__init__.py
CHANGED
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
Logler - Beautiful local log viewer with thread tracking and real-time updates.
|
|
3
3
|
"""
|
|
4
4
|
|
|
5
|
-
__version__ = "1.
|
|
5
|
+
__version__ = "1.1.2"
|
|
6
6
|
__author__ = "Logler Contributors"
|
|
7
7
|
|
|
8
8
|
from .parser import LogParser, LogEntry
|
|
@@ -10,7 +10,62 @@ from .tracker import ThreadTracker
|
|
|
10
10
|
from .log_reader import LogReader
|
|
11
11
|
from .tree_formatter import format_tree, format_waterfall, print_tree, print_waterfall
|
|
12
12
|
|
|
13
|
+
# Pydantic models for type-safe log analysis
|
|
14
|
+
from .models import (
|
|
15
|
+
# Core entry models
|
|
16
|
+
LogEntry as TypedLogEntry,
|
|
17
|
+
LogLevel,
|
|
18
|
+
LogFormat,
|
|
19
|
+
# Search models
|
|
20
|
+
SearchResult,
|
|
21
|
+
SearchResults,
|
|
22
|
+
SearchSummary,
|
|
23
|
+
SearchCount,
|
|
24
|
+
# Timeline models
|
|
25
|
+
ThreadTimeline,
|
|
26
|
+
# Hierarchy models
|
|
27
|
+
SpanNode,
|
|
28
|
+
ThreadHierarchy,
|
|
29
|
+
BottleneckInfo,
|
|
30
|
+
NodeType,
|
|
31
|
+
DetectionMethod,
|
|
32
|
+
# Pattern models
|
|
33
|
+
PatternMatch,
|
|
34
|
+
PatternResults,
|
|
35
|
+
# Sampling
|
|
36
|
+
SamplingResult,
|
|
37
|
+
# Error analysis
|
|
38
|
+
ErrorAnalysis,
|
|
39
|
+
RootCause,
|
|
40
|
+
PropagationChain,
|
|
41
|
+
ImpactSummary,
|
|
42
|
+
# File/context
|
|
43
|
+
FileMetadata,
|
|
44
|
+
ContextResult,
|
|
45
|
+
# Cross-service
|
|
46
|
+
TimelineEntry,
|
|
47
|
+
CrossServiceTimeline,
|
|
48
|
+
# Correlation
|
|
49
|
+
CorrelationLink,
|
|
50
|
+
CorrelationChains,
|
|
51
|
+
# Export
|
|
52
|
+
TraceSpan,
|
|
53
|
+
TraceExport,
|
|
54
|
+
# Insights
|
|
55
|
+
Insight,
|
|
56
|
+
InsightsResult,
|
|
57
|
+
# Schema
|
|
58
|
+
SchemaField,
|
|
59
|
+
LogSchema,
|
|
60
|
+
# Helper functions
|
|
61
|
+
parse_log_entry,
|
|
62
|
+
parse_search_results,
|
|
63
|
+
parse_thread_hierarchy,
|
|
64
|
+
parse_error_analysis,
|
|
65
|
+
)
|
|
66
|
+
|
|
13
67
|
__all__ = [
|
|
68
|
+
# Original exports
|
|
14
69
|
"LogParser",
|
|
15
70
|
"LogEntry",
|
|
16
71
|
"ThreadTracker",
|
|
@@ -19,4 +74,54 @@ __all__ = [
|
|
|
19
74
|
"format_waterfall",
|
|
20
75
|
"print_tree",
|
|
21
76
|
"print_waterfall",
|
|
77
|
+
# Pydantic models - Core
|
|
78
|
+
"TypedLogEntry",
|
|
79
|
+
"LogLevel",
|
|
80
|
+
"LogFormat",
|
|
81
|
+
# Pydantic models - Search
|
|
82
|
+
"SearchResult",
|
|
83
|
+
"SearchResults",
|
|
84
|
+
"SearchSummary",
|
|
85
|
+
"SearchCount",
|
|
86
|
+
# Pydantic models - Timeline
|
|
87
|
+
"ThreadTimeline",
|
|
88
|
+
# Pydantic models - Hierarchy
|
|
89
|
+
"SpanNode",
|
|
90
|
+
"ThreadHierarchy",
|
|
91
|
+
"BottleneckInfo",
|
|
92
|
+
"NodeType",
|
|
93
|
+
"DetectionMethod",
|
|
94
|
+
# Pydantic models - Patterns
|
|
95
|
+
"PatternMatch",
|
|
96
|
+
"PatternResults",
|
|
97
|
+
# Pydantic models - Sampling
|
|
98
|
+
"SamplingResult",
|
|
99
|
+
# Pydantic models - Error analysis
|
|
100
|
+
"ErrorAnalysis",
|
|
101
|
+
"RootCause",
|
|
102
|
+
"PropagationChain",
|
|
103
|
+
"ImpactSummary",
|
|
104
|
+
# Pydantic models - File/context
|
|
105
|
+
"FileMetadata",
|
|
106
|
+
"ContextResult",
|
|
107
|
+
# Pydantic models - Cross-service
|
|
108
|
+
"TimelineEntry",
|
|
109
|
+
"CrossServiceTimeline",
|
|
110
|
+
# Pydantic models - Correlation
|
|
111
|
+
"CorrelationLink",
|
|
112
|
+
"CorrelationChains",
|
|
113
|
+
# Pydantic models - Export
|
|
114
|
+
"TraceSpan",
|
|
115
|
+
"TraceExport",
|
|
116
|
+
# Pydantic models - Insights
|
|
117
|
+
"Insight",
|
|
118
|
+
"InsightsResult",
|
|
119
|
+
# Pydantic models - Schema
|
|
120
|
+
"SchemaField",
|
|
121
|
+
"LogSchema",
|
|
122
|
+
# Helper functions
|
|
123
|
+
"parse_log_entry",
|
|
124
|
+
"parse_search_results",
|
|
125
|
+
"parse_thread_hierarchy",
|
|
126
|
+
"parse_error_analysis",
|
|
22
127
|
]
|
logler/bootstrap.py
CHANGED
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
Helper to ensure the Rust backend is installed.
|
|
3
3
|
|
|
4
4
|
Attempts to import `logler_rs`; if missing, runs `maturin develop` against
|
|
5
|
-
`crates/logler-py/Cargo.toml
|
|
5
|
+
`crates/logler-py/Cargo.toml`.
|
|
6
6
|
"""
|
|
7
7
|
|
|
8
8
|
from __future__ import annotations
|
|
@@ -33,8 +33,6 @@ def ensure_rust_backend(auto_install: bool = True) -> bool:
|
|
|
33
33
|
"--release",
|
|
34
34
|
"-m",
|
|
35
35
|
str(repo_root / "crates" / "logler-py" / "Cargo.toml"),
|
|
36
|
-
"--features",
|
|
37
|
-
"sql",
|
|
38
36
|
]
|
|
39
37
|
try:
|
|
40
38
|
subprocess.run(
|
logler/cli.py
CHANGED
|
@@ -2,16 +2,12 @@
|
|
|
2
2
|
Command-line interface for Logler.
|
|
3
3
|
"""
|
|
4
4
|
|
|
5
|
+
import asyncio
|
|
5
6
|
import click
|
|
6
7
|
import sys
|
|
7
|
-
from pathlib import Path
|
|
8
8
|
from typing import Optional
|
|
9
|
-
import asyncio
|
|
10
|
-
import socket
|
|
11
|
-
from contextlib import closing
|
|
12
9
|
|
|
13
10
|
from .terminal import TerminalViewer
|
|
14
|
-
from .web.app import run_server
|
|
15
11
|
from .llm_cli import llm as llm_group
|
|
16
12
|
|
|
17
13
|
|
|
@@ -28,56 +24,6 @@ def main(ctx):
|
|
|
28
24
|
click.echo(ctx.get_help())
|
|
29
25
|
|
|
30
26
|
|
|
31
|
-
def _find_open_port(host: str, start_port: int, max_tries: int = 20) -> int:
|
|
32
|
-
"""Find the next available port starting from start_port."""
|
|
33
|
-
for candidate in range(start_port, start_port + max_tries):
|
|
34
|
-
with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as sock:
|
|
35
|
-
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
|
36
|
-
try:
|
|
37
|
-
sock.bind((host, candidate))
|
|
38
|
-
return candidate
|
|
39
|
-
except OSError:
|
|
40
|
-
continue
|
|
41
|
-
raise RuntimeError(f"No open port found in range {start_port}-{start_port + max_tries - 1}")
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
@main.command()
|
|
45
|
-
@click.option("--host", default="0.0.0.0", help="Host to bind to")
|
|
46
|
-
@click.option("--port", default=7607, help="Port to bind to (default 7607 ~ 'LOGL')")
|
|
47
|
-
@click.option(
|
|
48
|
-
"--auto-port/--no-auto-port",
|
|
49
|
-
default=True,
|
|
50
|
-
help="Pick the next free port if the chosen one is busy",
|
|
51
|
-
)
|
|
52
|
-
@click.option("--open", "-o", is_flag=True, help="Open browser automatically")
|
|
53
|
-
@click.argument("files", nargs=-1, type=click.Path(exists=True))
|
|
54
|
-
def serve(host: str, port: int, auto_port: bool, open: bool, files: tuple):
|
|
55
|
-
"""
|
|
56
|
-
Start the web server interface.
|
|
57
|
-
|
|
58
|
-
Examples:
|
|
59
|
-
logler serve # Start with file picker
|
|
60
|
-
logler serve app.log # Start with specific file
|
|
61
|
-
logler serve *.log # Start with multiple files
|
|
62
|
-
"""
|
|
63
|
-
if auto_port:
|
|
64
|
-
chosen_port = _find_open_port(host, port)
|
|
65
|
-
if chosen_port != port:
|
|
66
|
-
click.echo(f"⚠️ Port {port} busy, using {chosen_port} instead")
|
|
67
|
-
port = chosen_port
|
|
68
|
-
|
|
69
|
-
click.echo(f"🚀 Starting Logler web server on http://{host}:{port}")
|
|
70
|
-
|
|
71
|
-
file_paths = [str(Path(f).absolute()) for f in files] if files else []
|
|
72
|
-
|
|
73
|
-
if open:
|
|
74
|
-
import webbrowser
|
|
75
|
-
|
|
76
|
-
webbrowser.open(f"http://localhost:{port}")
|
|
77
|
-
|
|
78
|
-
asyncio.run(run_server(host, port, file_paths))
|
|
79
|
-
|
|
80
|
-
|
|
81
27
|
@main.command()
|
|
82
28
|
@click.argument("files", nargs=-1, required=True, type=click.Path(exists=True))
|
|
83
29
|
@click.option("-n", "--lines", type=int, help="Number of lines to show")
|
logler/investigate.py
CHANGED
|
@@ -1942,7 +1942,20 @@ class Investigator:
|
|
|
1942
1942
|
"""Search loaded files."""
|
|
1943
1943
|
filters = {"levels": []}
|
|
1944
1944
|
if level:
|
|
1945
|
-
|
|
1945
|
+
level_map = {
|
|
1946
|
+
"trace": "Trace",
|
|
1947
|
+
"debug": "Debug",
|
|
1948
|
+
"info": "Info",
|
|
1949
|
+
"warn": "Warn",
|
|
1950
|
+
"warning": "Warn",
|
|
1951
|
+
"error": "Error",
|
|
1952
|
+
"fatal": "Fatal",
|
|
1953
|
+
"critical": "Fatal",
|
|
1954
|
+
}
|
|
1955
|
+
normalized_level = level_map.get(level.lower())
|
|
1956
|
+
if not normalized_level:
|
|
1957
|
+
raise ValueError(f"Unknown log level: {level}")
|
|
1958
|
+
filters["levels"] = [normalized_level]
|
|
1946
1959
|
if thread_id:
|
|
1947
1960
|
filters["thread_id"] = thread_id
|
|
1948
1961
|
if correlation_id:
|
|
@@ -2005,7 +2018,7 @@ class Investigator:
|
|
|
2005
2018
|
|
|
2006
2019
|
def sql_query(self, query: str) -> List[Dict[str, Any]]:
|
|
2007
2020
|
"""
|
|
2008
|
-
Execute SQL query on loaded logs
|
|
2021
|
+
Execute SQL query on loaded logs.
|
|
2009
2022
|
|
|
2010
2023
|
Args:
|
|
2011
2024
|
query: SQL query string
|
|
@@ -2021,24 +2034,52 @@ class Investigator:
|
|
|
2021
2034
|
ORDER BY count DESC
|
|
2022
2035
|
\"\"\")
|
|
2023
2036
|
"""
|
|
2024
|
-
|
|
2025
|
-
|
|
2026
|
-
result_json = self._investigator.sql_query(query)
|
|
2037
|
+
engine = self._get_sql_engine()
|
|
2038
|
+
result_json = engine.query(query)
|
|
2027
2039
|
return json.loads(result_json)
|
|
2028
2040
|
|
|
2029
2041
|
def sql_tables(self) -> List[str]:
|
|
2030
|
-
"""Get list of available SQL tables
|
|
2031
|
-
|
|
2032
|
-
|
|
2033
|
-
return self._investigator.sql_tables()
|
|
2042
|
+
"""Get list of available SQL tables."""
|
|
2043
|
+
engine = self._get_sql_engine()
|
|
2044
|
+
return engine.get_tables()
|
|
2034
2045
|
|
|
2035
2046
|
def sql_schema(self, table: str) -> List[Dict[str, Any]]:
|
|
2036
|
-
"""Get schema for a SQL table
|
|
2037
|
-
|
|
2038
|
-
|
|
2039
|
-
result_json = self._investigator.sql_schema(table)
|
|
2047
|
+
"""Get schema for a SQL table."""
|
|
2048
|
+
engine = self._get_sql_engine()
|
|
2049
|
+
result_json = engine.get_schema(table)
|
|
2040
2050
|
return json.loads(result_json)
|
|
2041
2051
|
|
|
2052
|
+
def _get_sql_engine(self):
|
|
2053
|
+
"""Get a SQL engine loaded with current log data."""
|
|
2054
|
+
from logler.parser import LogParser
|
|
2055
|
+
from logler.sql import SqlEngine
|
|
2056
|
+
|
|
2057
|
+
# Parse files and build index
|
|
2058
|
+
parser = LogParser()
|
|
2059
|
+
indices: Dict[str, Any] = {}
|
|
2060
|
+
|
|
2061
|
+
for file_path in self._files:
|
|
2062
|
+
entries = []
|
|
2063
|
+
with open(file_path, encoding="utf-8", errors="replace") as f:
|
|
2064
|
+
for line_number, line in enumerate(f, start=1):
|
|
2065
|
+
line = line.rstrip("\n\r")
|
|
2066
|
+
if line:
|
|
2067
|
+
entry = parser.parse_line(line_number, line)
|
|
2068
|
+
entries.append(entry)
|
|
2069
|
+
|
|
2070
|
+
# Create a simple object with entries attribute
|
|
2071
|
+
class LogIndex:
|
|
2072
|
+
pass
|
|
2073
|
+
|
|
2074
|
+
idx = LogIndex()
|
|
2075
|
+
idx.entries = entries
|
|
2076
|
+
indices[file_path] = idx
|
|
2077
|
+
|
|
2078
|
+
# Create and load SQL engine
|
|
2079
|
+
engine = SqlEngine()
|
|
2080
|
+
engine.load_files(indices)
|
|
2081
|
+
return engine
|
|
2082
|
+
|
|
2042
2083
|
def build_hierarchy(
|
|
2043
2084
|
self,
|
|
2044
2085
|
root_identifier: str,
|
|
@@ -2156,7 +2197,11 @@ def cross_service_timeline(
|
|
|
2156
2197
|
|
|
2157
2198
|
for service_name, service_files in files.items():
|
|
2158
2199
|
if correlation_id:
|
|
2159
|
-
|
|
2200
|
+
# WORKAROUND: Only pass correlation_id OR trace_id, not both, to avoid
|
|
2201
|
+
# Rust-side deduplication bug that causes duplicate entries when multiple
|
|
2202
|
+
# IDs match the same log entry. Prefer correlation_id when both are provided.
|
|
2203
|
+
# TODO: Remove this workaround when Rust deduplication is fixed (Phase 2)
|
|
2204
|
+
result = follow_thread(service_files, correlation_id=correlation_id)
|
|
2160
2205
|
entries = result.get("entries", [])
|
|
2161
2206
|
elif trace_id:
|
|
2162
2207
|
result = follow_thread(service_files, trace_id=trace_id)
|