logdetective 0.11.1__tar.gz → 1.0.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {logdetective-0.11.1 → logdetective-1.0.0}/PKG-INFO +15 -7
- {logdetective-0.11.1 → logdetective-1.0.0}/README.md +14 -6
- {logdetective-0.11.1 → logdetective-1.0.0}/logdetective/server/gitlab.py +35 -9
- {logdetective-0.11.1 → logdetective-1.0.0}/logdetective/server/llm.py +5 -2
- {logdetective-0.11.1 → logdetective-1.0.0}/pyproject.toml +1 -1
- {logdetective-0.11.1 → logdetective-1.0.0}/LICENSE +0 -0
- {logdetective-0.11.1 → logdetective-1.0.0}/logdetective/__init__.py +0 -0
- {logdetective-0.11.1 → logdetective-1.0.0}/logdetective/constants.py +0 -0
- {logdetective-0.11.1 → logdetective-1.0.0}/logdetective/drain3.ini +0 -0
- {logdetective-0.11.1 → logdetective-1.0.0}/logdetective/extractors.py +0 -0
- {logdetective-0.11.1 → logdetective-1.0.0}/logdetective/logdetective.py +0 -0
- {logdetective-0.11.1 → logdetective-1.0.0}/logdetective/models.py +0 -0
- {logdetective-0.11.1 → logdetective-1.0.0}/logdetective/prompts.yml +0 -0
- {logdetective-0.11.1 → logdetective-1.0.0}/logdetective/remote_log.py +0 -0
- {logdetective-0.11.1 → logdetective-1.0.0}/logdetective/server/__init__.py +0 -0
- {logdetective-0.11.1 → logdetective-1.0.0}/logdetective/server/compressors.py +0 -0
- {logdetective-0.11.1 → logdetective-1.0.0}/logdetective/server/config.py +0 -0
- {logdetective-0.11.1 → logdetective-1.0.0}/logdetective/server/database/__init__.py +0 -0
- {logdetective-0.11.1 → logdetective-1.0.0}/logdetective/server/database/base.py +0 -0
- {logdetective-0.11.1 → logdetective-1.0.0}/logdetective/server/database/models/__init__.py +0 -0
- {logdetective-0.11.1 → logdetective-1.0.0}/logdetective/server/database/models/merge_request_jobs.py +0 -0
- {logdetective-0.11.1 → logdetective-1.0.0}/logdetective/server/database/models/metrics.py +0 -0
- {logdetective-0.11.1 → logdetective-1.0.0}/logdetective/server/emoji.py +0 -0
- {logdetective-0.11.1 → logdetective-1.0.0}/logdetective/server/metric.py +0 -0
- {logdetective-0.11.1 → logdetective-1.0.0}/logdetective/server/models.py +0 -0
- {logdetective-0.11.1 → logdetective-1.0.0}/logdetective/server/plot.py +0 -0
- {logdetective-0.11.1 → logdetective-1.0.0}/logdetective/server/server.py +0 -0
- {logdetective-0.11.1 → logdetective-1.0.0}/logdetective/server/templates/gitlab_full_comment.md.j2 +0 -0
- {logdetective-0.11.1 → logdetective-1.0.0}/logdetective/server/templates/gitlab_short_comment.md.j2 +0 -0
- {logdetective-0.11.1 → logdetective-1.0.0}/logdetective/utils.py +0 -0
- {logdetective-0.11.1 → logdetective-1.0.0}/logdetective.1.asciidoc +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.3
|
|
2
2
|
Name: logdetective
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 1.0.0
|
|
4
4
|
Summary: Log using LLM AI to search for build/test failures and provide ideas for fixing these.
|
|
5
5
|
License: Apache-2.0
|
|
6
6
|
Author: Jiri Podivin
|
|
@@ -48,14 +48,16 @@ Log Detective
|
|
|
48
48
|
|
|
49
49
|
[PyPI Releases]: https://pypi.org/project/logdetective/#history
|
|
50
50
|
|
|
51
|
-
A
|
|
51
|
+
A tool, service and RHEL process integration to analyze logs using a Large Language Model (LLM) and a [Drain template miner](https://github.com/logpai/Drain3).
|
|
52
|
+
|
|
53
|
+
The service that explains logs is available here: https://logdetective.com/explain
|
|
52
54
|
|
|
53
55
|
Note: if you are looking for code of website logdetective.com it is in [github.com/fedora-copr/logdetective-website](https://github.com/fedora-copr/logdetective-website).
|
|
54
56
|
|
|
55
57
|
Installation
|
|
56
58
|
------------
|
|
57
59
|
|
|
58
|
-
**Fedora
|
|
60
|
+
**Fedora 41+**
|
|
59
61
|
|
|
60
62
|
dnf install logdetective
|
|
61
63
|
|
|
@@ -70,11 +72,12 @@ First, ensure that the necessary dependencies for the `llama-cpp-python` project
|
|
|
70
72
|
|
|
71
73
|
Then, install the `logdetective` project using pip:
|
|
72
74
|
|
|
73
|
-
# then install logdetective project
|
|
74
75
|
pip install logdetective
|
|
75
76
|
|
|
76
77
|
**Local repository install**
|
|
77
78
|
|
|
79
|
+
Clone this repository and install with pip:
|
|
80
|
+
|
|
78
81
|
pip install .
|
|
79
82
|
|
|
80
83
|
Usage
|
|
@@ -111,14 +114,14 @@ Example of altered prompts:
|
|
|
111
114
|
logdetective https://kojipkgs.fedoraproject.org//work/tasks/3367/131313367/build.log --prompts ~/my-prompts.yml
|
|
112
115
|
|
|
113
116
|
|
|
114
|
-
Note that streaming with some models (notably Meta-Llama-3 is broken) is broken and can be
|
|
117
|
+
Note that streaming with some models (notably Meta-Llama-3 is broken) is broken and can be worked around by `no-stream` option:
|
|
115
118
|
|
|
116
119
|
logdetective https://example.com/logs.txt --model QuantFactory/Meta-Llama-3-8B-Instruct-GGUF --no-stream
|
|
117
120
|
|
|
118
121
|
|
|
119
122
|
Real Example
|
|
120
123
|
------------
|
|
121
|
-
Let's have a look at a real world example. Log Detective can work with any logs though we optimize it for build logs.
|
|
124
|
+
Let's have a look at a real world example. Log Detective can work with any logs though we optimize it for RPM build logs.
|
|
122
125
|
|
|
123
126
|
We're going to analyze a failed build of a python-based library that happened in Fedora Koji buildsystem:
|
|
124
127
|
```
|
|
@@ -184,8 +187,13 @@ Contributing
|
|
|
184
187
|
------------
|
|
185
188
|
|
|
186
189
|
Contributions are welcome! Please submit a pull request if you have any improvements or new features to add. Make sure your changes pass all existing tests before submitting.
|
|
190
|
+
For bigger code changes, please consult us first by creating an issue.
|
|
191
|
+
|
|
192
|
+
We are always looking for more annotated snippets that will increase the quality of Log Detective's results. The contributions happen in our website: https://logdetective.com/
|
|
193
|
+
|
|
194
|
+
Log Detective performs several inference queries while evaluating a log file. Prompts are stored in a separate file (more info below: https://github.com/fedora-copr/logdetective?tab=readme-ov-file#system-prompts). If you have an idea for improvements to our prompts, please open a PR and we'd happy to test it out.
|
|
187
195
|
|
|
188
|
-
To develop
|
|
196
|
+
To develop Log Detective, you should fork this repository, clone your fork, and install dependencies using pip:
|
|
189
197
|
|
|
190
198
|
git clone https://github.com/yourusername/logdetective.git
|
|
191
199
|
cd logdetective
|
|
@@ -5,14 +5,16 @@ Log Detective
|
|
|
5
5
|
|
|
6
6
|
[PyPI Releases]: https://pypi.org/project/logdetective/#history
|
|
7
7
|
|
|
8
|
-
A
|
|
8
|
+
A tool, service and RHEL process integration to analyze logs using a Large Language Model (LLM) and a [Drain template miner](https://github.com/logpai/Drain3).
|
|
9
|
+
|
|
10
|
+
The service that explains logs is available here: https://logdetective.com/explain
|
|
9
11
|
|
|
10
12
|
Note: if you are looking for code of website logdetective.com it is in [github.com/fedora-copr/logdetective-website](https://github.com/fedora-copr/logdetective-website).
|
|
11
13
|
|
|
12
14
|
Installation
|
|
13
15
|
------------
|
|
14
16
|
|
|
15
|
-
**Fedora
|
|
17
|
+
**Fedora 41+**
|
|
16
18
|
|
|
17
19
|
dnf install logdetective
|
|
18
20
|
|
|
@@ -27,11 +29,12 @@ First, ensure that the necessary dependencies for the `llama-cpp-python` project
|
|
|
27
29
|
|
|
28
30
|
Then, install the `logdetective` project using pip:
|
|
29
31
|
|
|
30
|
-
# then install logdetective project
|
|
31
32
|
pip install logdetective
|
|
32
33
|
|
|
33
34
|
**Local repository install**
|
|
34
35
|
|
|
36
|
+
Clone this repository and install with pip:
|
|
37
|
+
|
|
35
38
|
pip install .
|
|
36
39
|
|
|
37
40
|
Usage
|
|
@@ -68,14 +71,14 @@ Example of altered prompts:
|
|
|
68
71
|
logdetective https://kojipkgs.fedoraproject.org//work/tasks/3367/131313367/build.log --prompts ~/my-prompts.yml
|
|
69
72
|
|
|
70
73
|
|
|
71
|
-
Note that streaming with some models (notably Meta-Llama-3 is broken) is broken and can be
|
|
74
|
+
Note that streaming with some models (notably Meta-Llama-3 is broken) is broken and can be worked around by `no-stream` option:
|
|
72
75
|
|
|
73
76
|
logdetective https://example.com/logs.txt --model QuantFactory/Meta-Llama-3-8B-Instruct-GGUF --no-stream
|
|
74
77
|
|
|
75
78
|
|
|
76
79
|
Real Example
|
|
77
80
|
------------
|
|
78
|
-
Let's have a look at a real world example. Log Detective can work with any logs though we optimize it for build logs.
|
|
81
|
+
Let's have a look at a real world example. Log Detective can work with any logs though we optimize it for RPM build logs.
|
|
79
82
|
|
|
80
83
|
We're going to analyze a failed build of a python-based library that happened in Fedora Koji buildsystem:
|
|
81
84
|
```
|
|
@@ -141,8 +144,13 @@ Contributing
|
|
|
141
144
|
------------
|
|
142
145
|
|
|
143
146
|
Contributions are welcome! Please submit a pull request if you have any improvements or new features to add. Make sure your changes pass all existing tests before submitting.
|
|
147
|
+
For bigger code changes, please consult us first by creating an issue.
|
|
148
|
+
|
|
149
|
+
We are always looking for more annotated snippets that will increase the quality of Log Detective's results. The contributions happen in our website: https://logdetective.com/
|
|
150
|
+
|
|
151
|
+
Log Detective performs several inference queries while evaluating a log file. Prompts are stored in a separate file (more info below: https://github.com/fedora-copr/logdetective?tab=readme-ov-file#system-prompts). If you have an idea for improvements to our prompts, please open a PR and we'd happy to test it out.
|
|
144
152
|
|
|
145
|
-
To develop
|
|
153
|
+
To develop Log Detective, you should fork this repository, clone your fork, and install dependencies using pip:
|
|
146
154
|
|
|
147
155
|
git clone https://github.com/yourusername/logdetective.git
|
|
148
156
|
cd logdetective
|
|
@@ -93,7 +93,7 @@ async def process_gitlab_job_event(
|
|
|
93
93
|
preprocessed_log.close()
|
|
94
94
|
|
|
95
95
|
# check if this project is on the opt-in list for posting comments.
|
|
96
|
-
if project.name
|
|
96
|
+
if not is_eligible_package(project.name):
|
|
97
97
|
LOG.info("Not publishing comment for unrecognized package %s", project.name)
|
|
98
98
|
return
|
|
99
99
|
|
|
@@ -111,6 +111,31 @@ async def process_gitlab_job_event(
|
|
|
111
111
|
return staged_response
|
|
112
112
|
|
|
113
113
|
|
|
114
|
+
def is_eligible_package(project_name: str):
|
|
115
|
+
"""Check whether the provided package name is eligible for posting
|
|
116
|
+
comments to the merge request"""
|
|
117
|
+
|
|
118
|
+
# First check the allow-list. If it's not allowed, we deny.
|
|
119
|
+
allowed = False
|
|
120
|
+
for pattern in SERVER_CONFIG.general.packages:
|
|
121
|
+
print(f"include {pattern}")
|
|
122
|
+
if re.search(pattern, project_name):
|
|
123
|
+
allowed = True
|
|
124
|
+
break
|
|
125
|
+
if not allowed:
|
|
126
|
+
# The project did not match any of the permitted regular expressions
|
|
127
|
+
return False
|
|
128
|
+
|
|
129
|
+
# Next, check the deny-list. If it was allowed before, but denied here, we deny.
|
|
130
|
+
for pattern in SERVER_CONFIG.general.excluded_packages:
|
|
131
|
+
print(f"Exclude {pattern}")
|
|
132
|
+
if re.search(pattern, project_name):
|
|
133
|
+
return False
|
|
134
|
+
|
|
135
|
+
# It was allowed and not denied, so return True to indicate it is eligible
|
|
136
|
+
return True
|
|
137
|
+
|
|
138
|
+
|
|
114
139
|
class LogsTooLargeError(RuntimeError):
|
|
115
140
|
"""The log archive exceeds the configured maximum size"""
|
|
116
141
|
|
|
@@ -174,16 +199,17 @@ async def retrieve_and_preprocess_koji_logs(
|
|
|
174
199
|
with artifacts_zip.open(zipinfo.filename) as task_failed_log:
|
|
175
200
|
contents = task_failed_log.read().decode("utf-8")
|
|
176
201
|
match = FAILURE_LOG_REGEX.search(contents)
|
|
177
|
-
if
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
202
|
+
if match:
|
|
203
|
+
failure_log_name = match.group(1)
|
|
204
|
+
failed_arches[architecture] = PurePath(path.parent, failure_log_name)
|
|
205
|
+
else:
|
|
206
|
+
LOG.info(
|
|
182
207
|
"task_failed.log does not indicate which log contains the failure."
|
|
183
208
|
)
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
209
|
+
# The best thing we can do at this point is return the
|
|
210
|
+
# task_failed.log, since it will probably contain the most
|
|
211
|
+
# relevant information
|
|
212
|
+
failed_arches[architecture] = path
|
|
187
213
|
|
|
188
214
|
if not failed_arches:
|
|
189
215
|
# No failed task found in the sub-tasks.
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import os
|
|
2
2
|
import asyncio
|
|
3
3
|
import json
|
|
4
|
+
import random
|
|
4
5
|
from typing import List, Tuple, Dict, Any, Union
|
|
5
6
|
|
|
6
7
|
import backoff
|
|
@@ -102,7 +103,7 @@ def should_we_giveup(exc: aiohttp.ClientResponseError) -> bool:
|
|
|
102
103
|
> a truthy value if the exception should not be retried
|
|
103
104
|
"""
|
|
104
105
|
LOG.info("Should we give up on retrying error %s", exc)
|
|
105
|
-
return exc.status <
|
|
106
|
+
return exc.status < 400
|
|
106
107
|
|
|
107
108
|
|
|
108
109
|
def we_give_up(details: backoff._typing.Details):
|
|
@@ -110,6 +111,7 @@ def we_give_up(details: backoff._typing.Details):
|
|
|
110
111
|
retries didn't work (or we got a different exc)
|
|
111
112
|
we give up and raise proper 500 for our API endpoint
|
|
112
113
|
"""
|
|
114
|
+
LOG.error("Last exception: %s", details["exception"])
|
|
113
115
|
LOG.error("Inference error: %s", details["args"])
|
|
114
116
|
raise HTTPException(500, "Request to the inference API failed")
|
|
115
117
|
|
|
@@ -117,7 +119,8 @@ def we_give_up(details: backoff._typing.Details):
|
|
|
117
119
|
@backoff.on_exception(
|
|
118
120
|
lambda: backoff.constant([10, 30, 120]),
|
|
119
121
|
aiohttp.ClientResponseError,
|
|
120
|
-
max_tries=3
|
|
122
|
+
max_tries=4, # 4 tries and 3 retries
|
|
123
|
+
jitter=lambda wait_gen_value: random.uniform(wait_gen_value, wait_gen_value + 30),
|
|
121
124
|
giveup=should_we_giveup,
|
|
122
125
|
raise_on_giveup=False,
|
|
123
126
|
on_giveup=we_give_up,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{logdetective-0.11.1 → logdetective-1.0.0}/logdetective/server/database/models/merge_request_jobs.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{logdetective-0.11.1 → logdetective-1.0.0}/logdetective/server/templates/gitlab_full_comment.md.j2
RENAMED
|
File without changes
|
{logdetective-0.11.1 → logdetective-1.0.0}/logdetective/server/templates/gitlab_short_comment.md.j2
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|