logdetective 0.5.8__py3-none-any.whl → 0.5.10__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- logdetective/constants.py +6 -1
- logdetective/extractors.py +9 -3
- logdetective/logdetective.py +23 -3
- logdetective/models.py +33 -0
- logdetective/prompts.yml +55 -0
- logdetective/server/server.py +11 -10
- logdetective/server/utils.py +13 -1
- logdetective/utils.py +17 -15
- {logdetective-0.5.8.dist-info → logdetective-0.5.10.dist-info}/METADATA +18 -3
- logdetective-0.5.10.dist-info/RECORD +23 -0
- logdetective-0.5.8.dist-info/RECORD +0 -21
- {logdetective-0.5.8.dist-info → logdetective-0.5.10.dist-info}/LICENSE +0 -0
- {logdetective-0.5.8.dist-info → logdetective-0.5.10.dist-info}/WHEEL +0 -0
- {logdetective-0.5.8.dist-info → logdetective-0.5.10.dist-info}/entry_points.txt +0 -0
logdetective/constants.py
CHANGED
|
@@ -1,3 +1,8 @@
|
|
|
1
|
+
"""This file contains various constants to be used as a fallback
|
|
2
|
+
in case other values are not specified. Prompt templates should be modified
|
|
3
|
+
in prompts.yaml instead.
|
|
4
|
+
"""
|
|
5
|
+
|
|
1
6
|
# pylint: disable=line-too-long
|
|
2
7
|
DEFAULT_ADVISOR = "fedora-copr/Mistral-7B-Instruct-v0.2-GGUF"
|
|
3
8
|
|
|
@@ -19,7 +24,7 @@ Analysis:
|
|
|
19
24
|
|
|
20
25
|
"""
|
|
21
26
|
|
|
22
|
-
|
|
27
|
+
SUMMARIZATION_PROMPT_TEMPLATE = """
|
|
23
28
|
Does following log contain error or issue?
|
|
24
29
|
|
|
25
30
|
Log:
|
logdetective/extractors.py
CHANGED
|
@@ -6,7 +6,7 @@ import drain3
|
|
|
6
6
|
from drain3.template_miner_config import TemplateMinerConfig
|
|
7
7
|
from llama_cpp import Llama, LlamaGrammar
|
|
8
8
|
|
|
9
|
-
from logdetective.constants import
|
|
9
|
+
from logdetective.constants import SUMMARIZATION_PROMPT_TEMPLATE
|
|
10
10
|
from logdetective.utils import get_chunks
|
|
11
11
|
|
|
12
12
|
LOG = logging.getLogger("logdetective")
|
|
@@ -17,12 +17,18 @@ class LLMExtractor:
|
|
|
17
17
|
A class that extracts relevant information from logs using a language model.
|
|
18
18
|
"""
|
|
19
19
|
|
|
20
|
-
def __init__(
|
|
20
|
+
def __init__(
|
|
21
|
+
self,
|
|
22
|
+
model: Llama,
|
|
23
|
+
n_lines: int = 2,
|
|
24
|
+
prompt: str = SUMMARIZATION_PROMPT_TEMPLATE,
|
|
25
|
+
):
|
|
21
26
|
self.model = model
|
|
22
27
|
self.n_lines = n_lines
|
|
23
28
|
self.grammar = LlamaGrammar.from_string(
|
|
24
29
|
'root ::= ("Yes" | "No")', verbose=False
|
|
25
30
|
)
|
|
31
|
+
self.prompt = prompt
|
|
26
32
|
|
|
27
33
|
def __call__(
|
|
28
34
|
self, log: str, n_lines: int = 2, neighbors: bool = False
|
|
@@ -41,7 +47,7 @@ class LLMExtractor:
|
|
|
41
47
|
|
|
42
48
|
for i in range(0, len(log_lines), self.n_lines):
|
|
43
49
|
block = "\n".join(log_lines[i: i + self.n_lines])
|
|
44
|
-
prompt =
|
|
50
|
+
prompt = self.prompt.format(log)
|
|
45
51
|
out = self.model(prompt, max_tokens=7, grammar=self.grammar)
|
|
46
52
|
out = f"{out['choices'][0]['text']}\n"
|
|
47
53
|
results.append((block, out))
|
logdetective/logdetective.py
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import argparse
|
|
2
2
|
import logging
|
|
3
3
|
import sys
|
|
4
|
+
import os
|
|
4
5
|
|
|
5
6
|
from logdetective.constants import DEFAULT_ADVISOR
|
|
6
7
|
from logdetective.utils import (
|
|
@@ -9,6 +10,7 @@ from logdetective.utils import (
|
|
|
9
10
|
retrieve_log_content,
|
|
10
11
|
format_snippets,
|
|
11
12
|
compute_certainty,
|
|
13
|
+
load_prompts,
|
|
12
14
|
)
|
|
13
15
|
from logdetective.extractors import LLMExtractor, DrainExtractor
|
|
14
16
|
|
|
@@ -65,10 +67,16 @@ def setup_args():
|
|
|
65
67
|
)
|
|
66
68
|
parser.add_argument("-v", "--verbose", action="count", default=0)
|
|
67
69
|
parser.add_argument("-q", "--quiet", action="store_true")
|
|
70
|
+
parser.add_argument(
|
|
71
|
+
"--prompts",
|
|
72
|
+
type=str,
|
|
73
|
+
default=f"{os.path.dirname(__file__)}/prompts.yml",
|
|
74
|
+
help="Path to prompt configuration file."
|
|
75
|
+
)
|
|
68
76
|
return parser.parse_args()
|
|
69
77
|
|
|
70
78
|
|
|
71
|
-
def main(): # pylint: disable=too-many-statements
|
|
79
|
+
def main(): # pylint: disable=too-many-statements,too-many-locals
|
|
72
80
|
"""Main execution function."""
|
|
73
81
|
args = setup_args()
|
|
74
82
|
|
|
@@ -83,6 +91,9 @@ def main(): # pylint: disable=too-many-statements
|
|
|
83
91
|
if args.quiet:
|
|
84
92
|
log_level = 0
|
|
85
93
|
|
|
94
|
+
# Get prompts configuration
|
|
95
|
+
prompts_configuration = load_prompts(args.prompts)
|
|
96
|
+
|
|
86
97
|
logging.basicConfig(stream=sys.stdout)
|
|
87
98
|
LOG.setLevel(log_level)
|
|
88
99
|
|
|
@@ -103,7 +114,11 @@ def main(): # pylint: disable=too-many-statements
|
|
|
103
114
|
)
|
|
104
115
|
else:
|
|
105
116
|
summarizer_model = initialize_model(args.summarizer, verbose=args.verbose > 2)
|
|
106
|
-
extractor = LLMExtractor(
|
|
117
|
+
extractor = LLMExtractor(
|
|
118
|
+
summarizer_model,
|
|
119
|
+
args.verbose > 1,
|
|
120
|
+
prompts_configuration.summarization_prompt_template,
|
|
121
|
+
)
|
|
107
122
|
|
|
108
123
|
LOG.info("Getting summary")
|
|
109
124
|
|
|
@@ -127,7 +142,12 @@ def main(): # pylint: disable=too-many-statements
|
|
|
127
142
|
stream = True
|
|
128
143
|
if args.no_stream:
|
|
129
144
|
stream = False
|
|
130
|
-
response = process_log(
|
|
145
|
+
response = process_log(
|
|
146
|
+
log_summary,
|
|
147
|
+
model,
|
|
148
|
+
stream,
|
|
149
|
+
prompt_template=prompts_configuration.prompt_template,
|
|
150
|
+
)
|
|
131
151
|
probs = []
|
|
132
152
|
print("Explanation:")
|
|
133
153
|
# We need to extract top token probability from the response
|
logdetective/models.py
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
from typing import Optional
|
|
2
|
+
from pydantic import BaseModel
|
|
3
|
+
|
|
4
|
+
from logdetective.constants import (
|
|
5
|
+
PROMPT_TEMPLATE,
|
|
6
|
+
PROMPT_TEMPLATE_STAGED,
|
|
7
|
+
SUMMARIZATION_PROMPT_TEMPLATE,
|
|
8
|
+
SNIPPET_PROMPT_TEMPLATE,
|
|
9
|
+
)
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class PromptConfig(BaseModel):
|
|
13
|
+
"""Configuration for basic log detective prompts."""
|
|
14
|
+
|
|
15
|
+
prompt_template: str = PROMPT_TEMPLATE
|
|
16
|
+
summarization_prompt_template: str = SUMMARIZATION_PROMPT_TEMPLATE
|
|
17
|
+
snippet_prompt_template: str = SNIPPET_PROMPT_TEMPLATE
|
|
18
|
+
prompt_template_staged: str = PROMPT_TEMPLATE_STAGED
|
|
19
|
+
|
|
20
|
+
def __init__(self, data: Optional[dict] = None):
|
|
21
|
+
super().__init__()
|
|
22
|
+
if data is None:
|
|
23
|
+
return
|
|
24
|
+
self.prompt_template = data.get("prompt_template", PROMPT_TEMPLATE)
|
|
25
|
+
self.summarization_prompt_template = data.get(
|
|
26
|
+
"summarization_prompt_template", SUMMARIZATION_PROMPT_TEMPLATE
|
|
27
|
+
)
|
|
28
|
+
self.snippet_prompt_template = data.get(
|
|
29
|
+
"snippet_prompt_template", SNIPPET_PROMPT_TEMPLATE
|
|
30
|
+
)
|
|
31
|
+
self.prompt_template_staged = data.get(
|
|
32
|
+
"prompt_template_staged", PROMPT_TEMPLATE_STAGED
|
|
33
|
+
)
|
logdetective/prompts.yml
ADDED
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
# This file is intended for customization of prompts
|
|
2
|
+
# It is used only in server mode.
|
|
3
|
+
# On command line you have to load it using --prompts
|
|
4
|
+
# The defaults are stored in constants.py
|
|
5
|
+
|
|
6
|
+
prompt_template: |
|
|
7
|
+
Given following log snippets, and nothing else, explain what failure, if any, occured during build of this package.
|
|
8
|
+
|
|
9
|
+
Analysis of the snippets must be in a format of [X] : [Y], where [X] is a log snippet, and [Y] is the explanation.
|
|
10
|
+
Snippets themselves must not be altered in any way whatsoever.
|
|
11
|
+
|
|
12
|
+
Snippets are delimited with '================'.
|
|
13
|
+
|
|
14
|
+
Finally, drawing on information from all snippets, provide complete explanation of the issue and recommend solution.
|
|
15
|
+
|
|
16
|
+
Snippets:
|
|
17
|
+
|
|
18
|
+
{}
|
|
19
|
+
|
|
20
|
+
Analysis:
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
summarization_prompt_template: |
|
|
24
|
+
Does following log contain error or issue?
|
|
25
|
+
|
|
26
|
+
Log:
|
|
27
|
+
|
|
28
|
+
{}
|
|
29
|
+
|
|
30
|
+
Answer:
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
snippet_prompt_template: |
|
|
34
|
+
Analyse following RPM build log snippet. Describe contents accurately, without speculation or suggestions for resolution.
|
|
35
|
+
|
|
36
|
+
Snippet:
|
|
37
|
+
|
|
38
|
+
{}
|
|
39
|
+
|
|
40
|
+
Analysis:
|
|
41
|
+
|
|
42
|
+
prompt_template_staged: |
|
|
43
|
+
Given following log snippets, their explanation, and nothing else, explain what failure, if any, occured during build of this package.
|
|
44
|
+
|
|
45
|
+
Snippets are in a format of [X] : [Y], where [X] is a log snippet, and [Y] is the explanation.
|
|
46
|
+
|
|
47
|
+
Snippets are delimited with '================'.
|
|
48
|
+
|
|
49
|
+
Drawing on information from all snippets, provide complete explanation of the issue and recommend solution.
|
|
50
|
+
|
|
51
|
+
Snippets:
|
|
52
|
+
|
|
53
|
+
{}
|
|
54
|
+
|
|
55
|
+
Analysis:
|
logdetective/server/server.py
CHANGED
|
@@ -21,19 +21,18 @@ import gitlab.v4.objects
|
|
|
21
21
|
import jinja2
|
|
22
22
|
import requests
|
|
23
23
|
|
|
24
|
-
from logdetective.constants import (
|
|
25
|
-
PROMPT_TEMPLATE,
|
|
26
|
-
SNIPPET_PROMPT_TEMPLATE,
|
|
27
|
-
PROMPT_TEMPLATE_STAGED,
|
|
28
|
-
)
|
|
29
24
|
from logdetective.extractors import DrainExtractor
|
|
30
25
|
from logdetective.utils import (
|
|
31
26
|
validate_url,
|
|
32
27
|
compute_certainty,
|
|
33
28
|
format_snippets,
|
|
29
|
+
load_prompts,
|
|
30
|
+
)
|
|
31
|
+
from logdetective.server.utils import (
|
|
32
|
+
load_server_config,
|
|
33
|
+
get_log,
|
|
34
34
|
format_analyzed_snippets,
|
|
35
35
|
)
|
|
36
|
-
from logdetective.server.utils import load_server_config, get_log
|
|
37
36
|
from logdetective.server.metric import track_request
|
|
38
37
|
from logdetective.server.models import (
|
|
39
38
|
BuildLog,
|
|
@@ -51,8 +50,10 @@ LLM_CPP_SERVER_TIMEOUT = os.environ.get("LLAMA_CPP_SERVER_TIMEOUT", 600)
|
|
|
51
50
|
LOG_SOURCE_REQUEST_TIMEOUT = os.environ.get("LOG_SOURCE_REQUEST_TIMEOUT", 60)
|
|
52
51
|
API_TOKEN = os.environ.get("LOGDETECTIVE_TOKEN", None)
|
|
53
52
|
SERVER_CONFIG_PATH = os.environ.get("LOGDETECTIVE_SERVER_CONF", None)
|
|
53
|
+
SERVER_PROMPT_PATH = os.environ.get("LOGDETECTIVE_PROMPTS", None)
|
|
54
54
|
|
|
55
55
|
SERVER_CONFIG = load_server_config(SERVER_CONFIG_PATH)
|
|
56
|
+
PROMPT_CONFIG = load_prompts(SERVER_PROMPT_PATH)
|
|
56
57
|
|
|
57
58
|
MR_REGEX = re.compile(r"refs/merge-requests/(\d+)/.*$")
|
|
58
59
|
FAILURE_LOG_REGEX = re.compile(r"(\w*\.log)")
|
|
@@ -298,7 +299,7 @@ async def analyze_log(build_log: BuildLog):
|
|
|
298
299
|
log_summary = mine_logs(log_text)
|
|
299
300
|
log_summary = format_snippets(log_summary)
|
|
300
301
|
response = await submit_text(
|
|
301
|
-
|
|
302
|
+
PROMPT_CONFIG.prompt_template.format(log_summary),
|
|
302
303
|
api_endpoint=SERVER_CONFIG.inference.api_endpoint,
|
|
303
304
|
)
|
|
304
305
|
certainty = 0
|
|
@@ -338,7 +339,7 @@ async def perform_staged_analysis(log_text: str) -> StagedResponse:
|
|
|
338
339
|
analyzed_snippets = await asyncio.gather(
|
|
339
340
|
*[
|
|
340
341
|
submit_text(
|
|
341
|
-
|
|
342
|
+
PROMPT_CONFIG.snippet_prompt_template.format(s),
|
|
342
343
|
api_endpoint=SERVER_CONFIG.inference.api_endpoint,
|
|
343
344
|
)
|
|
344
345
|
for s in log_summary
|
|
@@ -349,7 +350,7 @@ async def perform_staged_analysis(log_text: str) -> StagedResponse:
|
|
|
349
350
|
AnalyzedSnippet(line_number=e[0][0], text=e[0][1], explanation=e[1])
|
|
350
351
|
for e in zip(log_summary, analyzed_snippets)
|
|
351
352
|
]
|
|
352
|
-
final_prompt =
|
|
353
|
+
final_prompt = PROMPT_CONFIG.prompt_template_staged.format(
|
|
353
354
|
format_analyzed_snippets(analyzed_snippets)
|
|
354
355
|
)
|
|
355
356
|
|
|
@@ -395,7 +396,7 @@ async def analyze_log_stream(build_log: BuildLog):
|
|
|
395
396
|
headers["Authorization"] = f"Bearer {SERVER_CONFIG.inference.api_token}"
|
|
396
397
|
|
|
397
398
|
stream = await submit_text_chat_completions(
|
|
398
|
-
|
|
399
|
+
PROMPT_CONFIG.prompt_template.format(log_summary), stream=True, headers=headers
|
|
399
400
|
)
|
|
400
401
|
|
|
401
402
|
return StreamingResponse(stream)
|
logdetective/server/utils.py
CHANGED
|
@@ -1,6 +1,18 @@
|
|
|
1
1
|
import logging
|
|
2
2
|
import yaml
|
|
3
|
-
from logdetective.
|
|
3
|
+
from logdetective.constants import SNIPPET_DELIMITER
|
|
4
|
+
from logdetective.server.models import Config, AnalyzedSnippet
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def format_analyzed_snippets(snippets: list[AnalyzedSnippet]) -> str:
|
|
8
|
+
"""Format snippets for submission into staged prompt."""
|
|
9
|
+
summary = f"\n{SNIPPET_DELIMITER}\n".join(
|
|
10
|
+
[
|
|
11
|
+
f"[{e.text}] at line [{e.line_number}]: [{e.explanation.text}]"
|
|
12
|
+
for e in snippets
|
|
13
|
+
]
|
|
14
|
+
)
|
|
15
|
+
return summary
|
|
4
16
|
|
|
5
17
|
|
|
6
18
|
def load_server_config(path: str | None) -> Config:
|
logdetective/utils.py
CHANGED
|
@@ -4,10 +4,11 @@ from typing import Iterator, List, Dict, Tuple, Generator
|
|
|
4
4
|
from urllib.parse import urlparse
|
|
5
5
|
import numpy as np
|
|
6
6
|
import requests
|
|
7
|
+
import yaml
|
|
7
8
|
|
|
8
9
|
from llama_cpp import Llama, CreateCompletionResponse, CreateCompletionStreamResponse
|
|
9
|
-
from logdetective.
|
|
10
|
-
|
|
10
|
+
from logdetective.models import PromptConfig
|
|
11
|
+
|
|
11
12
|
|
|
12
13
|
LOG = logging.getLogger("logdetective")
|
|
13
14
|
|
|
@@ -110,7 +111,7 @@ def compute_certainty(probs: List[Dict]) -> float:
|
|
|
110
111
|
|
|
111
112
|
|
|
112
113
|
def process_log(
|
|
113
|
-
log: str, model: Llama, stream: bool
|
|
114
|
+
log: str, model: Llama, stream: bool, prompt_template: str
|
|
114
115
|
) -> CreateCompletionResponse | Iterator[CreateCompletionStreamResponse]:
|
|
115
116
|
"""Processes a given log using the provided language model and returns its summary.
|
|
116
117
|
|
|
@@ -122,7 +123,7 @@ def process_log(
|
|
|
122
123
|
str: The summary of the given log generated by the language model.
|
|
123
124
|
"""
|
|
124
125
|
response = model(
|
|
125
|
-
prompt=
|
|
126
|
+
prompt=prompt_template.format(log), stream=stream, max_tokens=0, logprobs=1
|
|
126
127
|
)
|
|
127
128
|
|
|
128
129
|
return response
|
|
@@ -175,17 +176,6 @@ def format_snippets(snippets: list[str] | list[Tuple[int, str]]) -> str:
|
|
|
175
176
|
return summary
|
|
176
177
|
|
|
177
178
|
|
|
178
|
-
def format_analyzed_snippets(snippets: list[AnalyzedSnippet]) -> str:
|
|
179
|
-
"""Format snippets for submission into staged prompt."""
|
|
180
|
-
summary = f"\n{SNIPPET_DELIMITER}\n".join(
|
|
181
|
-
[
|
|
182
|
-
f"[{e.text}] at line [{e.line_number}]: [{e.explanation.text}]"
|
|
183
|
-
for e in snippets
|
|
184
|
-
]
|
|
185
|
-
)
|
|
186
|
-
return summary
|
|
187
|
-
|
|
188
|
-
|
|
189
179
|
def validate_url(url: str) -> bool:
|
|
190
180
|
"""Validate incoming URL to be at least somewhat sensible for log files
|
|
191
181
|
Only http and https protocols permitted. No result, params or query fields allowed.
|
|
@@ -199,3 +189,15 @@ def validate_url(url: str) -> bool:
|
|
|
199
189
|
if not (result.path or result.netloc):
|
|
200
190
|
return False
|
|
201
191
|
return True
|
|
192
|
+
|
|
193
|
+
|
|
194
|
+
def load_prompts(path: str | None) -> PromptConfig:
|
|
195
|
+
"""Load prompts from given yaml file if there is one.
|
|
196
|
+
Alternatively use defaults."""
|
|
197
|
+
if path:
|
|
198
|
+
try:
|
|
199
|
+
with open(path, "r") as file:
|
|
200
|
+
return PromptConfig(yaml.safe_load(file))
|
|
201
|
+
except FileNotFoundError:
|
|
202
|
+
print("Prompt configuration file not found, reverting to defaults.")
|
|
203
|
+
return PromptConfig()
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.3
|
|
2
2
|
Name: logdetective
|
|
3
|
-
Version: 0.5.
|
|
3
|
+
Version: 0.5.10
|
|
4
4
|
Summary: Log using LLM AI to search for build/test failures and provide ideas for fixing these.
|
|
5
5
|
License: Apache-2.0
|
|
6
6
|
Author: Jiri Podivin
|
|
@@ -29,9 +29,9 @@ Requires-Dist: matplotlib (>=3.8.4,<4.0.0) ; extra == "server" or extra == "serv
|
|
|
29
29
|
Requires-Dist: numpy (>=1.26.0)
|
|
30
30
|
Requires-Dist: psycopg2 (>=2.9.9,<3.0.0) ; extra == "server"
|
|
31
31
|
Requires-Dist: psycopg2-binary (>=2.9.9,<3.0.0) ; extra == "server-testing"
|
|
32
|
-
Requires-Dist: pydantic (>=2.8.2,<3.0.0)
|
|
32
|
+
Requires-Dist: pydantic (>=2.8.2,<3.0.0)
|
|
33
33
|
Requires-Dist: python-gitlab (>=4.4.0)
|
|
34
|
-
Requires-Dist: pyyaml (>=6.0.1,<7.0.0)
|
|
34
|
+
Requires-Dist: pyyaml (>=6.0.1,<7.0.0)
|
|
35
35
|
Requires-Dist: requests (>0.2.31)
|
|
36
36
|
Requires-Dist: sqlalchemy (>=2.0.36,<3.0.0) ; extra == "server" or extra == "server-testing"
|
|
37
37
|
Project-URL: homepage, https://github.com/fedora-copr/logdetective
|
|
@@ -363,6 +363,21 @@ http GET "localhost:8080/metrics/analyze/requests?days=5" > /tmp/plot_days.svg
|
|
|
363
363
|
http GET "localhost:8080/metrics/analyze/requests?weeks=5" > /tmp/plot_weeks.svg
|
|
364
364
|
```
|
|
365
365
|
|
|
366
|
+
System Prompts
|
|
367
|
+
--------------
|
|
368
|
+
|
|
369
|
+
Prompt templates used by Log Detective are stored in the `prompts.yml` file.
|
|
370
|
+
It is possible to modify the file in place, or provide your own.
|
|
371
|
+
In CLI you can override prompt templates location using `--prompts` option,
|
|
372
|
+
while in the container service deployment the `LOGDETECTIVE_PROMPTS` environment variable
|
|
373
|
+
is used instead.
|
|
374
|
+
|
|
375
|
+
Prompts need to have a form compatible with python [format string syntax](https://docs.python.org/3/library/string.html#format-string-syntax)
|
|
376
|
+
with spaces, or replacement fields marked with curly braces, `{}` left for insertion of snippets.
|
|
377
|
+
|
|
378
|
+
Number of replacement fields in new prompts, must be the same as in originals.
|
|
379
|
+
Although their position may be different.
|
|
380
|
+
|
|
366
381
|
License
|
|
367
382
|
-------
|
|
368
383
|
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
logdetective/__init__.py,sha256=VqRngDcuFT7JWms8Qc_MsOvajoXVOKPr-S1kqY3Pqhc,59
|
|
2
|
+
logdetective/constants.py,sha256=eiS6eYhEgl_Rlyi_B9j00DDp9A-UDhuFz3ACWtKf_SU,1558
|
|
3
|
+
logdetective/drain3.ini,sha256=ni91eCT1TwTznZwcqWoOVMQcGEnWhEDNCoTPF7cfGfY,1360
|
|
4
|
+
logdetective/extractors.py,sha256=7ahzWbTtU9MveG1Q7wU9LO8OJgs85X-cHmWltUhCe9M,3491
|
|
5
|
+
logdetective/logdetective.py,sha256=SDuzeS9sMp7rs6cTZAEd0ajtyWv9XnDkEPTF82nwaYo,5390
|
|
6
|
+
logdetective/models.py,sha256=nrGBmMRu8i6UhFflQKAp81Y3Sd_Aaoor0i_yqSJoLT0,1115
|
|
7
|
+
logdetective/prompts.yml,sha256=3orDNqqZNadWCaNncgfk8D3Pqqef4IzfScoa_jUJzCY,1452
|
|
8
|
+
logdetective/server/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
9
|
+
logdetective/server/database/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
10
|
+
logdetective/server/database/base.py,sha256=oMJUvbWeapIUP-8Cf_DR9ptFg8CsYeaBAIjOVEzx8SM,1668
|
|
11
|
+
logdetective/server/database/models.py,sha256=arIahOCT-hTmh904DXrWSkH7rlo13Ppu-OO80huX5Dc,6118
|
|
12
|
+
logdetective/server/metric.py,sha256=VYMifrfIhcqgyu6YYN0c1nt8fC1iJ2_LCB7Bh2AheoE,2679
|
|
13
|
+
logdetective/server/models.py,sha256=cf1ngu_-19rP_i49s5cEwIzh6SfL_ZpVy4EykCpfWck,8076
|
|
14
|
+
logdetective/server/plot.py,sha256=3o-CNHjel04ekpwSB4ckV7dbiF663cfPkimQ0aP9U_8,7073
|
|
15
|
+
logdetective/server/server.py,sha256=FDKx-6wsVoEwdEgcoepAT3GL0gZKjMSpB1VU-jaKt2w,24618
|
|
16
|
+
logdetective/server/templates/gitlab_comment.md.j2,sha256=kheTkhQ-LfuFkr8av-Mw2a-9VYEUbDTLwaa-CKI6OkI,1622
|
|
17
|
+
logdetective/server/utils.py,sha256=6y4gZCwQG4HcjWJwYdzwP46Jsm3xoNXZWH4kYmSWVZA,1741
|
|
18
|
+
logdetective/utils.py,sha256=_cBBkBwZHX5qxy0K5WK2MnHA4x_oor7R-QED2VZLbCA,6226
|
|
19
|
+
logdetective-0.5.10.dist-info/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358
|
|
20
|
+
logdetective-0.5.10.dist-info/METADATA,sha256=NbD3YEoEU-YAhH-VjOo95qWxyk1T1bq5wCih4N5oyqs,14738
|
|
21
|
+
logdetective-0.5.10.dist-info/WHEEL,sha256=fGIA9gx4Qxk2KDKeNJCbOEwSrmLtjWCwzBz351GyrPQ,88
|
|
22
|
+
logdetective-0.5.10.dist-info/entry_points.txt,sha256=3K_vXja6PmcA8sNdUi63WdImeiNhVZcEGPTaoJmltfA,63
|
|
23
|
+
logdetective-0.5.10.dist-info/RECORD,,
|
|
@@ -1,21 +0,0 @@
|
|
|
1
|
-
logdetective/__init__.py,sha256=VqRngDcuFT7JWms8Qc_MsOvajoXVOKPr-S1kqY3Pqhc,59
|
|
2
|
-
logdetective/constants.py,sha256=SPSs1Bq6zPms3RsFTmsADwgrnFTn4fefNHzrB-M3RAE,1383
|
|
3
|
-
logdetective/drain3.ini,sha256=ni91eCT1TwTznZwcqWoOVMQcGEnWhEDNCoTPF7cfGfY,1360
|
|
4
|
-
logdetective/extractors.py,sha256=cjxndfJaQur54GXksIQXL7YTxkOng8I8UnQZMN2t5_w,3388
|
|
5
|
-
logdetective/logdetective.py,sha256=KN0KASW63VAnrjVeXK5AO0ob-vSexutTyeg1fd4uj70,4884
|
|
6
|
-
logdetective/server/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
7
|
-
logdetective/server/database/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
8
|
-
logdetective/server/database/base.py,sha256=oMJUvbWeapIUP-8Cf_DR9ptFg8CsYeaBAIjOVEzx8SM,1668
|
|
9
|
-
logdetective/server/database/models.py,sha256=arIahOCT-hTmh904DXrWSkH7rlo13Ppu-OO80huX5Dc,6118
|
|
10
|
-
logdetective/server/metric.py,sha256=VYMifrfIhcqgyu6YYN0c1nt8fC1iJ2_LCB7Bh2AheoE,2679
|
|
11
|
-
logdetective/server/models.py,sha256=cf1ngu_-19rP_i49s5cEwIzh6SfL_ZpVy4EykCpfWck,8076
|
|
12
|
-
logdetective/server/plot.py,sha256=3o-CNHjel04ekpwSB4ckV7dbiF663cfPkimQ0aP9U_8,7073
|
|
13
|
-
logdetective/server/server.py,sha256=ALVD9cwG4d8OQXfOPbRtt4y0nlh2C-8jP8pQeaufC3g,24533
|
|
14
|
-
logdetective/server/templates/gitlab_comment.md.j2,sha256=kheTkhQ-LfuFkr8av-Mw2a-9VYEUbDTLwaa-CKI6OkI,1622
|
|
15
|
-
logdetective/server/utils.py,sha256=OFvhttjv3yp8kfim5_s4mNG8ly21qyILxE0o3DcVVKg,1340
|
|
16
|
-
logdetective/utils.py,sha256=eudens1_T6iTtYhyzoYCpwuWgFHUMDSt6eWnrAB-mAI,6188
|
|
17
|
-
logdetective-0.5.8.dist-info/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358
|
|
18
|
-
logdetective-0.5.8.dist-info/METADATA,sha256=dUCiCPfW8ILyshanWpb_zHdm9q3LIBYKZUWyfQWqsCA,14115
|
|
19
|
-
logdetective-0.5.8.dist-info/WHEEL,sha256=fGIA9gx4Qxk2KDKeNJCbOEwSrmLtjWCwzBz351GyrPQ,88
|
|
20
|
-
logdetective-0.5.8.dist-info/entry_points.txt,sha256=3K_vXja6PmcA8sNdUi63WdImeiNhVZcEGPTaoJmltfA,63
|
|
21
|
-
logdetective-0.5.8.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|