smart-log-reader 1.0.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- smart_log_reader-1.0.0/LICENSE +21 -0
- smart_log_reader-1.0.0/PKG-INFO +117 -0
- smart_log_reader-1.0.0/README.md +97 -0
- smart_log_reader-1.0.0/pyproject.toml +32 -0
- smart_log_reader-1.0.0/setup.cfg +4 -0
- smart_log_reader-1.0.0/setup.py +2 -0
- smart_log_reader-1.0.0/smart_log_reader/analyzer.py +118 -0
- smart_log_reader-1.0.0/smart_log_reader/base.py +98 -0
- smart_log_reader-1.0.0/smart_log_reader/cli.py +187 -0
- smart_log_reader-1.0.0/smart_log_reader/display.py +52 -0
- smart_log_reader-1.0.0/smart_log_reader/formats.py +344 -0
- smart_log_reader-1.0.0/smart_log_reader/html_export.py +453 -0
- smart_log_reader-1.0.0/smart_log_reader/json_export.py +36 -0
- smart_log_reader-1.0.0/smart_log_reader/models.py +47 -0
- smart_log_reader-1.0.0/smart_log_reader/registry.py +54 -0
- smart_log_reader-1.0.0/smart_log_reader.egg-info/PKG-INFO +117 -0
- smart_log_reader-1.0.0/smart_log_reader.egg-info/SOURCES.txt +22 -0
- smart_log_reader-1.0.0/smart_log_reader.egg-info/dependency_links.txt +1 -0
- smart_log_reader-1.0.0/smart_log_reader.egg-info/entry_points.txt +2 -0
- smart_log_reader-1.0.0/smart_log_reader.egg-info/requires.txt +4 -0
- smart_log_reader-1.0.0/smart_log_reader.egg-info/top_level.txt +1 -0
- smart_log_reader-1.0.0/tests/test_analyzer.py +48 -0
- smart_log_reader-1.0.0/tests/test_cli.py +32 -0
- smart_log_reader-1.0.0/tests/test_parsers.py +35 -0
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026 ABHIRAMSSGIT
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
|
@@ -0,0 +1,117 @@
|
|
|
1
|
+
Metadata-Version: 2.1
|
|
2
|
+
Name: smart-log-reader
|
|
3
|
+
Version: 1.0.0
|
|
4
|
+
Summary: Intelligent, extensible log reader and analyzer with color-coded output and smart error grouping.
|
|
5
|
+
Author-email: ABHINAVSS <abhinavssabhi123@gmail.com>
|
|
6
|
+
License: MIT
|
|
7
|
+
Keywords: log,parser,analyzer,cli,devops
|
|
8
|
+
Classifier: Programming Language :: Python :: 3
|
|
9
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
10
|
+
Classifier: Topic :: System :: Logging
|
|
11
|
+
Classifier: Topic :: Utilities
|
|
12
|
+
Classifier: Environment :: Console
|
|
13
|
+
Requires-Python: >=3.8
|
|
14
|
+
Description-Content-Type: text/markdown
|
|
15
|
+
License-File: LICENSE
|
|
16
|
+
Requires-Dist: typer[all]>=0.9
|
|
17
|
+
Requires-Dist: rich>=13.0
|
|
18
|
+
Requires-Dist: python-dateutil>=2.8
|
|
19
|
+
Requires-Dist: rapidfuzz>=3.0
|
|
20
|
+
|
|
21
|
+
# 🐍 smart-log-reader
|
|
22
|
+
|
|
23
|
+
**Stop grep-ing blindly.**
|
|
24
|
+
smart-log-reader is an intelligent, terminal-first log analyzer that instantly parses massive log files, groups similar errors using fuzzy matching, and outputs beautifully formatted, color-coded summaries directly in your terminal.
|
|
25
|
+
|
|
26
|
+
When the terminal isn't enough, export a fully interactive HTML dashboard and serve it securely from your remote server to your local machine via an SSH tunnel.
|
|
27
|
+
|
|
28
|
+
## 🚀 Installation
|
|
29
|
+
|
|
30
|
+
Because this is a standalone CLI tool, **do not install it globally with pip**.
|
|
31
|
+
Use `pipx` to install it in an isolated environment so it never conflicts with your system dependencies.
|
|
32
|
+
|
|
33
|
+
```bash
|
|
34
|
+
pipx install smart-log-reader
|
|
35
|
+
````
|
|
36
|
+
|
|
37
|
+
## 🔥 Quick Start: Real-World Scenarios
|
|
38
|
+
#### 1. The "Server is on fire" check (Auto-detects format, groups errors)
|
|
39
|
+
```
|
|
40
|
+
smart-log-reader -f /var/log/syslog -l ERROR
|
|
41
|
+
```
|
|
42
|
+
|
|
43
|
+
#### 2. The "Odoo is crashing" audit (Filter by time and keyword)
|
|
44
|
+
```
|
|
45
|
+
smart-log-reader -f odoo-server.log -t odoo -k "psycopg2" -s "2024-03-05 10:00:00"
|
|
46
|
+
```
|
|
47
|
+
|
|
48
|
+
#### 3. Generate an interactive HTML report and serve it securely
|
|
49
|
+
```
|
|
50
|
+
smart-log-reader -f production.log -t python -x html --serve
|
|
51
|
+
```
|
|
52
|
+
|
|
53
|
+
## ✨ Core Features
|
|
54
|
+
|
|
55
|
+
- **Format Auto-Detection**
|
|
56
|
+
Automatically recognizes Python, Django, Flask, Odoo, Nginx, Apache, PostgreSQL, MySQL, and JSON-line logs.
|
|
57
|
+
|
|
58
|
+
- **Intelligent Error Grouping**
|
|
59
|
+
Uses `rapidfuzz` (85% threshold) to group thousands of repetitive tracebacks into unique "Core Issues", showing you exactly what broke and how often.
|
|
60
|
+
|
|
61
|
+
- **Zero-Memory Streaming**
|
|
62
|
+
Processes multi-gigabyte log files line-by-line without eating up your server's RAM.
|
|
63
|
+
|
|
64
|
+
- **Multi-line Unification**
|
|
65
|
+
Automatically stitches stack traces and JSON payloads back together into single, readable entries.
|
|
66
|
+
|
|
67
|
+
## 🛡️ The Killer Feature: Secure HTML Export & Tunneling
|
|
68
|
+
|
|
69
|
+
When debugging remote production servers, downloading gigabytes of logs is tedious and risky.
|
|
70
|
+
smart-log-reader solves this natively.
|
|
71
|
+
|
|
72
|
+
With `--serve`, it:
|
|
73
|
+
|
|
74
|
+
- generates a self-contained HTML dashboard
|
|
75
|
+
- starts a minimal, **localhost-only** HTTP server on the production machine
|
|
76
|
+
- lets you view it securely via SSH tunnel
|
|
77
|
+
|
|
78
|
+
**Your log data never leaves the server.**
|
|
79
|
+
|
|
80
|
+
### How to use it
|
|
81
|
+
|
|
82
|
+
**On the remote server:**
|
|
83
|
+
|
|
84
|
+
```bash
|
|
85
|
+
smart-log-reader -f /var/log/nginx/error.log -x html --serve --port 8080
|
|
86
|
+
```
|
|
87
|
+
|
|
88
|
+
**On your local machine:**
|
|
89
|
+
|
|
90
|
+
```bash
|
|
91
|
+
ssh -L 8080:127.0.0.1:8080 your-user@your-remote-server.com
|
|
92
|
+
```
|
|
93
|
+
|
|
94
|
+
Then open in your browser:
|
|
95
|
+
**http://localhost:8080/report_name.html**
|
|
96
|
+
|
|
97
|
+
> **Alternative (LAN / VPN only):** use `--serve-public` → gets a token-protected URL, no SSH tunnel needed.
|
|
98
|
+
|
|
99
|
+
## 🛠️ CLI Reference
|
|
100
|
+
|
|
101
|
+
| Flag | Description |
|
|
102
|
+
|-------------------|-----------------------------------------------------------------------------|
|
|
103
|
+
| `-f, --file` | **(Required)** Path to the log file |
|
|
104
|
+
| `-t, --log-type` | Force parser: `auto`, `python`, `django`, `flask`, `odoo`, `nginx`, `apache`, `postgresql`, `mysql`, `mariadb`, `jsonline` |
|
|
105
|
+
| `-l, --level` | Filter by severity: `ALL`, `ERROR`, `WARNING`, `INFO`, `DEBUG` |
|
|
106
|
+
| `-k, --keyword` | Filter logs containing a specific string or regex pattern |
|
|
107
|
+
| `-s, --start-time`| Start time filter (e.g. `YYYY-MM-DD HH:MM:SS`) |
|
|
108
|
+
| `-e, --end-time` | End time filter |
|
|
109
|
+
| `-x, --export` | Export format: `none`, `json`, `html` |
|
|
110
|
+
| `--serve` | Starts a localhost-only HTTP server for the HTML export |
|
|
111
|
+
| `--serve-public` | **[LAN/VPN ONLY]** Binds server to 0.0.0.0 with secure URL token |
|
|
112
|
+
| `-p, --port` | Specify port for the HTTP server (defaults to random available port) |
|
|
113
|
+
| `-g, --no-group-errors` | Disables fuzzy error grouping |
|
|
114
|
+
|
|
115
|
+
## License
|
|
116
|
+
|
|
117
|
+
[MIT](LICENSE)
|
|
@@ -0,0 +1,97 @@
|
|
|
1
|
+
# 🐍 smart-log-reader
|
|
2
|
+
|
|
3
|
+
**Stop grep-ing blindly.**
|
|
4
|
+
smart-log-reader is an intelligent, terminal-first log analyzer that instantly parses massive log files, groups similar errors using fuzzy matching, and outputs beautifully formatted, color-coded summaries directly in your terminal.
|
|
5
|
+
|
|
6
|
+
When the terminal isn't enough, export a fully interactive HTML dashboard and serve it securely from your remote server to your local machine via an SSH tunnel.
|
|
7
|
+
|
|
8
|
+
## 🚀 Installation
|
|
9
|
+
|
|
10
|
+
Because this is a standalone CLI tool, **do not install it globally with pip**.
|
|
11
|
+
Use `pipx` to install it in an isolated environment so it never conflicts with your system dependencies.
|
|
12
|
+
|
|
13
|
+
```bash
|
|
14
|
+
pipx install smart-log-reader
|
|
15
|
+
````
|
|
16
|
+
|
|
17
|
+
## 🔥 Quick Start: Real-World Scenarios
|
|
18
|
+
#### 1. The "Server is on fire" check (Auto-detects format, groups errors)
|
|
19
|
+
```
|
|
20
|
+
smart-log-reader -f /var/log/syslog -l ERROR
|
|
21
|
+
```
|
|
22
|
+
|
|
23
|
+
#### 2. The "Odoo is crashing" audit (Filter by time and keyword)
|
|
24
|
+
```
|
|
25
|
+
smart-log-reader -f odoo-server.log -t odoo -k "psycopg2" -s "2024-03-05 10:00:00"
|
|
26
|
+
```
|
|
27
|
+
|
|
28
|
+
#### 3. Generate an interactive HTML report and serve it securely
|
|
29
|
+
```
|
|
30
|
+
smart-log-reader -f production.log -t python -x html --serve
|
|
31
|
+
```
|
|
32
|
+
|
|
33
|
+
## ✨ Core Features
|
|
34
|
+
|
|
35
|
+
- **Format Auto-Detection**
|
|
36
|
+
Automatically recognizes Python, Django, Flask, Odoo, Nginx, Apache, PostgreSQL, MySQL, and JSON-line logs.
|
|
37
|
+
|
|
38
|
+
- **Intelligent Error Grouping**
|
|
39
|
+
Uses `rapidfuzz` (85% threshold) to group thousands of repetitive tracebacks into unique "Core Issues", showing you exactly what broke and how often.
|
|
40
|
+
|
|
41
|
+
- **Zero-Memory Streaming**
|
|
42
|
+
Processes multi-gigabyte log files line-by-line without eating up your server's RAM.
|
|
43
|
+
|
|
44
|
+
- **Multi-line Unification**
|
|
45
|
+
Automatically stitches stack traces and JSON payloads back together into single, readable entries.
|
|
46
|
+
|
|
47
|
+
## 🛡️ The Killer Feature: Secure HTML Export & Tunneling
|
|
48
|
+
|
|
49
|
+
When debugging remote production servers, downloading gigabytes of logs is tedious and risky.
|
|
50
|
+
smart-log-reader solves this natively.
|
|
51
|
+
|
|
52
|
+
With `--serve`, it:
|
|
53
|
+
|
|
54
|
+
- generates a self-contained HTML dashboard
|
|
55
|
+
- starts a minimal, **localhost-only** HTTP server on the production machine
|
|
56
|
+
- lets you view it securely via SSH tunnel
|
|
57
|
+
|
|
58
|
+
**Your log data never leaves the server.**
|
|
59
|
+
|
|
60
|
+
### How to use it
|
|
61
|
+
|
|
62
|
+
**On the remote server:**
|
|
63
|
+
|
|
64
|
+
```bash
|
|
65
|
+
smart-log-reader -f /var/log/nginx/error.log -x html --serve --port 8080
|
|
66
|
+
```
|
|
67
|
+
|
|
68
|
+
**On your local machine:**
|
|
69
|
+
|
|
70
|
+
```bash
|
|
71
|
+
ssh -L 8080:127.0.0.1:8080 your-user@your-remote-server.com
|
|
72
|
+
```
|
|
73
|
+
|
|
74
|
+
Then open in your browser:
|
|
75
|
+
**http://localhost:8080/report_name.html**
|
|
76
|
+
|
|
77
|
+
> **Alternative (LAN / VPN only):** use `--serve-public` → gets a token-protected URL, no SSH tunnel needed.
|
|
78
|
+
|
|
79
|
+
## 🛠️ CLI Reference
|
|
80
|
+
|
|
81
|
+
| Flag | Description |
|
|
82
|
+
|-------------------|-----------------------------------------------------------------------------|
|
|
83
|
+
| `-f, --file` | **(Required)** Path to the log file |
|
|
84
|
+
| `-t, --log-type` | Force parser: `auto`, `python`, `django`, `flask`, `odoo`, `nginx`, `apache`, `postgresql`, `mysql`, `mariadb`, `jsonline` |
|
|
85
|
+
| `-l, --level` | Filter by severity: `ALL`, `ERROR`, `WARNING`, `INFO`, `DEBUG` |
|
|
86
|
+
| `-k, --keyword` | Filter logs containing a specific string or regex pattern |
|
|
87
|
+
| `-s, --start-time`| Start time filter (e.g. `YYYY-MM-DD HH:MM:SS`) |
|
|
88
|
+
| `-e, --end-time` | End time filter |
|
|
89
|
+
| `-x, --export` | Export format: `none`, `json`, `html` |
|
|
90
|
+
| `--serve` | Starts a localhost-only HTTP server for the HTML export |
|
|
91
|
+
| `--serve-public` | **[LAN/VPN ONLY]** Binds server to 0.0.0.0 with secure URL token |
|
|
92
|
+
| `-p, --port` | Specify port for the HTTP server (defaults to random available port) |
|
|
93
|
+
| `-g, --no-group-errors` | Disables fuzzy error grouping |
|
|
94
|
+
|
|
95
|
+
## License
|
|
96
|
+
|
|
97
|
+
[MIT](LICENSE)
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
[build-system]
|
|
2
|
+
requires = ["setuptools>=68.0", "wheel"]
|
|
3
|
+
build-backend = "setuptools.build_meta"
|
|
4
|
+
|
|
5
|
+
[project]
|
|
6
|
+
name = "smart-log-reader"
|
|
7
|
+
version = "1.0.0"
|
|
8
|
+
description = "Intelligent, extensible log reader and analyzer with color-coded output and smart error grouping."
|
|
9
|
+
readme = "README.md"
|
|
10
|
+
license = {text = "MIT"}
|
|
11
|
+
requires-python = ">=3.8"
|
|
12
|
+
authors = [{name = "ABHINAVSS", email='abhinavssabhi123@gmail.com'}]
|
|
13
|
+
keywords = ["log", "parser", "analyzer", "cli", "devops"]
|
|
14
|
+
classifiers = [
|
|
15
|
+
"Programming Language :: Python :: 3",
|
|
16
|
+
"License :: OSI Approved :: MIT License",
|
|
17
|
+
"Topic :: System :: Logging",
|
|
18
|
+
"Topic :: Utilities",
|
|
19
|
+
"Environment :: Console",
|
|
20
|
+
]
|
|
21
|
+
dependencies = [
|
|
22
|
+
"typer[all]>=0.9",
|
|
23
|
+
"rich>=13.0",
|
|
24
|
+
"python-dateutil>=2.8",
|
|
25
|
+
"rapidfuzz>=3.0",
|
|
26
|
+
]
|
|
27
|
+
|
|
28
|
+
[project.scripts]
|
|
29
|
+
smart-log-reader = "smart_log_reader.cli:app"
|
|
30
|
+
|
|
31
|
+
[tool.setuptools.packages.find]
|
|
32
|
+
include = ["smart_log_reader*"]
|
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
"""Log analysis: filtering, fuzzy error grouping, statistics."""
|
|
2
|
+
from __future__ import annotations
|
|
3
|
+
from datetime import datetime
|
|
4
|
+
from typing import Iterator, Optional
|
|
5
|
+
|
|
6
|
+
from rapidfuzz import fuzz
|
|
7
|
+
|
|
8
|
+
from .models import AnalysisResult, ErrorGroup, LogEntry
|
|
9
|
+
|
|
10
|
+
SIMILARITY_THRESHOLD = 85
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def _core_issue(msg: str) -> str:
|
|
14
|
+
"""Extract first meaningful line as core issue."""
|
|
15
|
+
for line in msg.split("\n"):
|
|
16
|
+
s = line.strip()
|
|
17
|
+
if s:
|
|
18
|
+
return s[:200]
|
|
19
|
+
return msg[:200]
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def filter_entries(
|
|
23
|
+
entries: Iterator[LogEntry],
|
|
24
|
+
level: str = "ALL",
|
|
25
|
+
keyword: Optional[str] = None,
|
|
26
|
+
start_time: Optional[datetime] = None,
|
|
27
|
+
end_time: Optional[datetime] = None,
|
|
28
|
+
) -> Iterator[LogEntry]:
|
|
29
|
+
"""Apply level, keyword, and time-range filters."""
|
|
30
|
+
import re
|
|
31
|
+
kw_re = re.compile(keyword, re.IGNORECASE) if keyword else None
|
|
32
|
+
|
|
33
|
+
for e in entries:
|
|
34
|
+
if level != "ALL" and e.level != level.upper():
|
|
35
|
+
continue
|
|
36
|
+
if start_time and e.timestamp and e.timestamp < start_time:
|
|
37
|
+
continue
|
|
38
|
+
if end_time and e.timestamp and e.timestamp > end_time:
|
|
39
|
+
continue
|
|
40
|
+
if kw_re and not kw_re.search(e.message) and not kw_re.search(e.full_entry):
|
|
41
|
+
continue
|
|
42
|
+
yield e
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def group_errors(entries: list[LogEntry], threshold: int = SIMILARITY_THRESHOLD) -> list[ErrorGroup]:
|
|
46
|
+
"""Group similar ERROR entries using fuzzy matching."""
|
|
47
|
+
errors = [e for e in entries if e.level == "ERROR"]
|
|
48
|
+
if not errors:
|
|
49
|
+
return []
|
|
50
|
+
|
|
51
|
+
groups: list[ErrorGroup] = []
|
|
52
|
+
for entry in errors:
|
|
53
|
+
core = _core_issue(entry.message)
|
|
54
|
+
matched = False
|
|
55
|
+
for g in groups:
|
|
56
|
+
if fuzz.token_sort_ratio(core, g.core_issue) >= threshold:
|
|
57
|
+
g.count += 1
|
|
58
|
+
g.entries.append(entry)
|
|
59
|
+
if entry.timestamp:
|
|
60
|
+
if g.first_seen is None or entry.timestamp < g.first_seen:
|
|
61
|
+
g.first_seen = entry.timestamp
|
|
62
|
+
if g.last_seen is None or entry.timestamp > g.last_seen:
|
|
63
|
+
g.last_seen = entry.timestamp
|
|
64
|
+
entry.category = g.core_issue[:80]
|
|
65
|
+
entry.occurrence_count = g.count
|
|
66
|
+
matched = True
|
|
67
|
+
break
|
|
68
|
+
if not matched:
|
|
69
|
+
g = ErrorGroup(
|
|
70
|
+
representative=entry.message,
|
|
71
|
+
core_issue=core,
|
|
72
|
+
count=1,
|
|
73
|
+
first_seen=entry.timestamp,
|
|
74
|
+
last_seen=entry.timestamp,
|
|
75
|
+
entries=[entry],
|
|
76
|
+
)
|
|
77
|
+
groups.append(g)
|
|
78
|
+
entry.category = core[:80]
|
|
79
|
+
|
|
80
|
+
# Back-fill occurrence counts
|
|
81
|
+
for g in groups:
|
|
82
|
+
for e in g.entries:
|
|
83
|
+
e.occurrence_count = g.count
|
|
84
|
+
groups.sort(key=lambda g: g.count, reverse=True)
|
|
85
|
+
return groups
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
def analyze(
|
|
89
|
+
entries: list[LogEntry],
|
|
90
|
+
do_group: bool = True,
|
|
91
|
+
detected_format: str = "generic",
|
|
92
|
+
) -> AnalysisResult:
|
|
93
|
+
"""Build full analysis result from filtered entries."""
|
|
94
|
+
result = AnalysisResult(detected_format=detected_format)
|
|
95
|
+
result.entries = entries
|
|
96
|
+
result.parsed_entries = len(entries)
|
|
97
|
+
|
|
98
|
+
for e in entries:
|
|
99
|
+
result.total_lines += 1
|
|
100
|
+
lvl = e.level
|
|
101
|
+
if lvl == "ERROR":
|
|
102
|
+
result.error_count += 1
|
|
103
|
+
elif lvl == "WARNING":
|
|
104
|
+
result.warning_count += 1
|
|
105
|
+
elif lvl == "INFO":
|
|
106
|
+
result.info_count += 1
|
|
107
|
+
elif lvl == "DEBUG":
|
|
108
|
+
result.debug_count += 1
|
|
109
|
+
if e.timestamp:
|
|
110
|
+
if result.time_span_start is None or e.timestamp < result.time_span_start:
|
|
111
|
+
result.time_span_start = e.timestamp
|
|
112
|
+
if result.time_span_end is None or e.timestamp > result.time_span_end:
|
|
113
|
+
result.time_span_end = e.timestamp
|
|
114
|
+
|
|
115
|
+
if do_group:
|
|
116
|
+
result.error_groups = group_errors(entries)
|
|
117
|
+
result.unique_errors = len(result.error_groups)
|
|
118
|
+
return result
|
|
@@ -0,0 +1,98 @@
|
|
|
1
|
+
"""Base parser with multi-line handling."""
|
|
2
|
+
from __future__ import annotations
|
|
3
|
+
import re
|
|
4
|
+
from datetime import datetime
|
|
5
|
+
from typing import Iterator, Optional, TextIO
|
|
6
|
+
|
|
7
|
+
from dateutil import parser as dtparser
|
|
8
|
+
|
|
9
|
+
from .models import LogEntry
|
|
10
|
+
|
|
11
|
+
# Broad pattern that matches most timestamp-prefixed log lines
|
|
12
|
+
_TIMESTAMP_START = re.compile(
|
|
13
|
+
r"^\d{4}[-/]\d{2}[-/]\d{2}[T ]\d{2}:\d{2}" # ISO-ish
|
|
14
|
+
r"|^\w{3}\s+\d{1,2}\s+\d{2}:\d{2}:\d{2}" # syslog
|
|
15
|
+
r"|^\[\d{4}[-/]\d{2}[-/]\d{2}" # bracket-wrapped
|
|
16
|
+
)
|
|
17
|
+
|
|
18
|
+
LEVEL_NORM = {
|
|
19
|
+
"CRITICAL": "ERROR", "FATAL": "ERROR", "SEVERE": "ERROR", "PANIC": "ERROR",
|
|
20
|
+
"WARN": "WARNING", "WARNING": "WARNING",
|
|
21
|
+
"ERROR": "ERROR", "INFO": "INFO", "DEBUG": "DEBUG",
|
|
22
|
+
"NOTICE": "INFO", "LOG": "INFO", "NOTE": "INFO",
|
|
23
|
+
"DEBUG1": "DEBUG", "DEBUG2": "DEBUG", "DEBUG3": "DEBUG",
|
|
24
|
+
"DEBUG4": "DEBUG", "DEBUG5": "DEBUG",
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def normalize_level(raw: str) -> str:
|
|
29
|
+
return LEVEL_NORM.get(raw.upper(), raw.upper())
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def safe_parse_ts(text: str) -> Optional[datetime]:
|
|
33
|
+
"""Try to parse a timestamp string, return None on failure."""
|
|
34
|
+
if not text:
|
|
35
|
+
return None
|
|
36
|
+
try:
|
|
37
|
+
return dtparser.parse(text, fuzzy=True)
|
|
38
|
+
except (ValueError, OverflowError):
|
|
39
|
+
return None
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
class BaseParser:
|
|
43
|
+
"""Base class all format parsers extend."""
|
|
44
|
+
|
|
45
|
+
name: str = "generic"
|
|
46
|
+
# Subclasses override with compiled regex
|
|
47
|
+
primary_pattern: Optional[re.Pattern] = None
|
|
48
|
+
# How confident is this parser for a sample line (0-1)
|
|
49
|
+
|
|
50
|
+
@classmethod
|
|
51
|
+
def confidence(cls, sample_lines: list[str]) -> float:
|
|
52
|
+
"""Return 0-1 confidence that these lines match this parser."""
|
|
53
|
+
if cls.primary_pattern is None:
|
|
54
|
+
return 0.05 # generic fallback
|
|
55
|
+
hits = sum(1 for l in sample_lines if cls.primary_pattern.search(l))
|
|
56
|
+
return hits / max(len(sample_lines), 1)
|
|
57
|
+
|
|
58
|
+
def parse_line(self, line: str) -> Optional[LogEntry]:
|
|
59
|
+
"""Parse a single line. Return None if it's a continuation."""
|
|
60
|
+
return LogEntry(raw=line, message=line.strip(), full_entry=line)
|
|
61
|
+
|
|
62
|
+
def is_continuation(self, line: str) -> bool:
|
|
63
|
+
"""Return True if line is a continuation of the previous entry (stack trace etc)."""
|
|
64
|
+
if not line.strip():
|
|
65
|
+
return True
|
|
66
|
+
return not bool(_TIMESTAMP_START.match(line))
|
|
67
|
+
|
|
68
|
+
def stream_entries(self, fh: TextIO) -> Iterator[LogEntry]:
|
|
69
|
+
"""Stream parsed entries from a file handle, merging multi-line entries."""
|
|
70
|
+
current: Optional[LogEntry] = None
|
|
71
|
+
line_num = 0
|
|
72
|
+
|
|
73
|
+
for raw_line in fh:
|
|
74
|
+
line_num += 1
|
|
75
|
+
# Try to parse as new entry
|
|
76
|
+
if not self.is_continuation(raw_line):
|
|
77
|
+
parsed = self.parse_line(raw_line)
|
|
78
|
+
if parsed is not None:
|
|
79
|
+
if current is not None:
|
|
80
|
+
yield current
|
|
81
|
+
parsed.line_number = line_num
|
|
82
|
+
current = parsed
|
|
83
|
+
continue
|
|
84
|
+
|
|
85
|
+
# It's a continuation line — append to current
|
|
86
|
+
if current is not None:
|
|
87
|
+
current.full_entry += raw_line
|
|
88
|
+
# Append meaningful content to message
|
|
89
|
+
stripped = raw_line.strip()
|
|
90
|
+
if stripped:
|
|
91
|
+
current.message += "\n" + stripped
|
|
92
|
+
else:
|
|
93
|
+
# Orphan continuation — emit as standalone
|
|
94
|
+
entry = LogEntry(raw=raw_line, message=raw_line.strip(), full_entry=raw_line, line_number=line_num)
|
|
95
|
+
yield entry
|
|
96
|
+
|
|
97
|
+
if current is not None:
|
|
98
|
+
yield current
|
|
@@ -0,0 +1,187 @@
|
|
|
1
|
+
"""CLI entry point using Typer."""
|
|
2
|
+
from __future__ import annotations
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
from typing import Optional
|
|
5
|
+
|
|
6
|
+
import typer
|
|
7
|
+
from dateutil import parser as dtparser
|
|
8
|
+
from rich.console import Console
|
|
9
|
+
|
|
10
|
+
from .analyzer import analyze, filter_entries
|
|
11
|
+
from .display import display
|
|
12
|
+
from .registry import detect_format, get_parser
|
|
13
|
+
|
|
14
|
+
app = typer.Typer(
|
|
15
|
+
name="smart-log-reader",
|
|
16
|
+
help="Intelligent log reader and analyzer with color-coded output and smart error grouping.",
|
|
17
|
+
add_completion=False,
|
|
18
|
+
)
|
|
19
|
+
|
|
20
|
+
LOG_TYPES = ["auto", "python", "django", "flask", "odoo", "nginx", "apache",
|
|
21
|
+
"postgresql", "mysql", "mariadb", "generic", "jsonline"]
|
|
22
|
+
LEVELS = ["ALL", "ERROR", "WARNING", "INFO", "DEBUG"]
|
|
23
|
+
EXPORT_FORMATS = ["none", "json", "html"]
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def _parse_time(val):
|
|
27
|
+
if not val:
|
|
28
|
+
return None
|
|
29
|
+
try:
|
|
30
|
+
return dtparser.parse(val, fuzzy=True)
|
|
31
|
+
except (ValueError, OverflowError):
|
|
32
|
+
raise typer.BadParameter(f"Cannot parse datetime: {val}")
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def _safe_output_path(log_file: Path, fmt: str, user_output: Optional[Path]) -> Path:
|
|
36
|
+
"""
|
|
37
|
+
Resolve the output path for export files.
|
|
38
|
+
|
|
39
|
+
Priority:
|
|
40
|
+
1. User explicitly provided --output → honour it as-is (their responsibility).
|
|
41
|
+
2. Otherwise → write to ~/.smart-log-reader/reports/
|
|
42
|
+
with a timestamped name so we never hit
|
|
43
|
+
PermissionError on /var/log/* or similar.
|
|
44
|
+
"""
|
|
45
|
+
if user_output:
|
|
46
|
+
return user_output
|
|
47
|
+
|
|
48
|
+
from .html_export import safe_report_path
|
|
49
|
+
ext = {"json": ".json", "html": ".html"}.get(fmt, ".out")
|
|
50
|
+
return safe_report_path(log_file, suffix=ext)
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def version_callback(value):
|
|
54
|
+
if value:
|
|
55
|
+
import importlib.metadata
|
|
56
|
+
try:
|
|
57
|
+
ver = importlib.metadata.version("smart-log-reader")
|
|
58
|
+
except importlib.metadata.PackageNotFoundError:
|
|
59
|
+
ver = "unknown"
|
|
60
|
+
typer.echo(f"smart-log-reader v{ver}")
|
|
61
|
+
raise typer.Exit()
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
@app.command()
|
|
65
|
+
def main(
|
|
66
|
+
file: Path = typer.Option(..., "--file", "-f", help="Path to the log file."),
|
|
67
|
+
log_type: str = typer.Option("auto", "--log-type", "-t",
|
|
68
|
+
help=f"Log format: {', '.join(LOG_TYPES)}"),
|
|
69
|
+
level: str = typer.Option("ALL", "--level", "-l",
|
|
70
|
+
help=f"Filter by level: {', '.join(LEVELS)}"),
|
|
71
|
+
keyword: Optional[str] = typer.Option(None, "--keyword", "-k",
|
|
72
|
+
help="Keyword or regex filter."),
|
|
73
|
+
start_time: Optional[str] = typer.Option(None, "--start-time", "-s",
|
|
74
|
+
help="Start time filter. (YYYY-MM-DD HH:MM:SS)"),
|
|
75
|
+
end_time: Optional[str] = typer.Option(None, "--end-time", "-e",
|
|
76
|
+
help="End time filter. (YYYY-MM-DD HH:MM:SS)"),
|
|
77
|
+
export: str = typer.Option("none", "--export", "-x",
|
|
78
|
+
help=f"Export format: {', '.join(EXPORT_FORMATS)}"),
|
|
79
|
+
output: Optional[Path] = typer.Option(
|
|
80
|
+
None, "--output", "-o",
|
|
81
|
+
help="Custom output path. Defaults to ~/.smart-log-reader/reports/<name>_<ts>.<ext>",
|
|
82
|
+
),
|
|
83
|
+
group_errors: bool = typer.Option(True, "--group-errors/--no-group-errors", "-g",
|
|
84
|
+
help="Enable error grouping."),
|
|
85
|
+
color: bool = typer.Option(True, "--color/--no-color", help="Color output."),
|
|
86
|
+
|
|
87
|
+
# ── serving flags ─────────────────────────────────────────────────────────
|
|
88
|
+
serve: bool = typer.Option(
|
|
89
|
+
False, "--serve",
|
|
90
|
+
help=(
|
|
91
|
+
"After HTML export, start a localhost-only HTTP server. "
|
|
92
|
+
"Access via SSH tunnel: ssh -L <port>:127.0.0.1:<port> user@server"
|
|
93
|
+
),
|
|
94
|
+
),
|
|
95
|
+
serve_public: bool = typer.Option(
|
|
96
|
+
False, "--serve-public",
|
|
97
|
+
help=(
|
|
98
|
+
"[INSECURE — LAN/VPN only] Bind to 0.0.0.0 with a one-time token. "
|
|
99
|
+
"Use only inside a trusted network. Never on a public internet-facing server."
|
|
100
|
+
),
|
|
101
|
+
),
|
|
102
|
+
port: int = typer.Option(0, "--port", "-p",
|
|
103
|
+
help="Port for --serve / --serve-public (0 = auto)."),
|
|
104
|
+
|
|
105
|
+
version: Optional[bool] = typer.Option(None, "--version", "-v",
|
|
106
|
+
callback=version_callback, is_eager=True),
|
|
107
|
+
):
|
|
108
|
+
"""Parse, analyze, and display log files with smart error grouping."""
|
|
109
|
+
console = Console(force_terminal=color, no_color=not color)
|
|
110
|
+
|
|
111
|
+
# ── validation ─────────────────────────────────────────────────────────────
|
|
112
|
+
if not file.exists():
|
|
113
|
+
console.print(f"[red]Error: File not found: {file}[/red]")
|
|
114
|
+
raise typer.Exit(1)
|
|
115
|
+
|
|
116
|
+
if log_type not in LOG_TYPES:
|
|
117
|
+
console.print(f"[red]Invalid log type. Choose from: {', '.join(LOG_TYPES)}[/red]")
|
|
118
|
+
raise typer.Exit(1)
|
|
119
|
+
|
|
120
|
+
if export not in EXPORT_FORMATS:
|
|
121
|
+
console.print(f"[red]Invalid export format. Choose from: {', '.join(EXPORT_FORMATS)}[/red]")
|
|
122
|
+
raise typer.Exit(1)
|
|
123
|
+
|
|
124
|
+
if (serve or serve_public) and export not in ("html", "none"):
|
|
125
|
+
console.print("[yellow]--serve requires HTML export. Switching --export to html.[/yellow]")
|
|
126
|
+
if serve or serve_public:
|
|
127
|
+
export = "html"
|
|
128
|
+
|
|
129
|
+
if serve and serve_public:
|
|
130
|
+
console.print("[red]Use either --serve (localhost) or --serve-public, not both.[/red]")
|
|
131
|
+
raise typer.Exit(1)
|
|
132
|
+
|
|
133
|
+
# ── parse & filter ─────────────────────────────────────────────────────────
|
|
134
|
+
fmt = log_type if log_type != "auto" else detect_format(str(file))
|
|
135
|
+
console.print(f"[dim]Using parser: {fmt}[/dim]")
|
|
136
|
+
|
|
137
|
+
parser = get_parser(fmt)
|
|
138
|
+
st = _parse_time(start_time)
|
|
139
|
+
et = _parse_time(end_time)
|
|
140
|
+
|
|
141
|
+
with open(file, "r", errors="replace") as fh:
|
|
142
|
+
raw_entries = parser.stream_entries(fh)
|
|
143
|
+
filtered = filter_entries(raw_entries, level=level, keyword=keyword,
|
|
144
|
+
start_time=st, end_time=et)
|
|
145
|
+
entries = list(filtered)
|
|
146
|
+
|
|
147
|
+
if not entries:
|
|
148
|
+
console.print("[yellow]No log entries matched your filters.[/yellow]")
|
|
149
|
+
raise typer.Exit(0)
|
|
150
|
+
|
|
151
|
+
result = analyze(entries, do_group=group_errors, detected_format=fmt)
|
|
152
|
+
|
|
153
|
+
# ── terminal display ───────────────────────────────────────────────────────
|
|
154
|
+
display(result, console=console)
|
|
155
|
+
|
|
156
|
+
# ── export ─────────────────────────────────────────────────────────────────
|
|
157
|
+
if export == "none":
|
|
158
|
+
return
|
|
159
|
+
|
|
160
|
+
out_path = _safe_output_path(file, export, output)
|
|
161
|
+
|
|
162
|
+
if export == "json":
|
|
163
|
+
from .json_export import export_json
|
|
164
|
+
export_json(result, out_path)
|
|
165
|
+
console.print(f"\n[bold green]Exported JSON →[/bold green] {out_path}")
|
|
166
|
+
|
|
167
|
+
elif export == "html":
|
|
168
|
+
from .html_export import export_html, serve_html, prune_old_reports
|
|
169
|
+
|
|
170
|
+
export_html(result, out_path)
|
|
171
|
+
prune_old_reports(keep=20) # keep the last 20 reports, silently drop older ones
|
|
172
|
+
console.print(f"\n[bold green]Exported HTML →[/bold green] {out_path}")
|
|
173
|
+
console.print(f"[dim]Reports folder: {out_path.parent}[/dim]")
|
|
174
|
+
|
|
175
|
+
if serve or serve_public:
|
|
176
|
+
serve_html(out_path, port=port, public=serve_public)
|
|
177
|
+
else:
|
|
178
|
+
console.print(
|
|
179
|
+
"\n[dim]Tip: add [bold]--serve[/bold] to view in a browser via SSH tunnel, "
|
|
180
|
+
"or copy the file to your local machine with:[/dim]"
|
|
181
|
+
)
|
|
182
|
+
console.print(
|
|
183
|
+
f"[dim cyan] scp <user>@<server>:{out_path} ~/Downloads/[/dim cyan]"
|
|
184
|
+
)
|
|
185
|
+
|
|
186
|
+
|
|
187
|
+
app()
|