logdetective 0.2.8__tar.gz → 0.2.9__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: logdetective
3
- Version: 0.2.8
3
+ Version: 0.2.9
4
4
  Summary: Log using LLM AI to search for build/test failures and provide ideas for fixing these.
5
5
  License: Apache-2.0
6
6
  Author: Jiri Podivin
@@ -32,9 +32,7 @@ Answer:
32
32
  """
33
33
 
34
34
  SNIPPET_PROMPT_TEMPLATE = """
35
- Analyse following RPM build log snippet.
36
- Analysis of the snippets must be in a format of [X] : [Y], where [X] is a log snippet, and [Y] is the explanation.
37
- Snippets themselves must not be altered in any way whatsoever.
35
+ Analyse following RPM build log snippet. Decribe contents accurately, without speculation or suggestions for resolution.
38
36
 
39
37
  Snippet:
40
38
 
@@ -43,3 +41,22 @@ Snippet:
43
41
  Analysis:
44
42
 
45
43
  """
44
+
45
+ PROMPT_TEMPLATE_STAGED = """
46
+ Given following log snippets, their explanation, and nothing else, explain what failure, if any, occured during build of this package.
47
+
48
+ Snippets are in a format of [X] : [Y], where [X] is a log snippet, and [Y] is the explanation.
49
+
50
+ Snippets are delimited with '================'.
51
+
52
+ Drawing on information from all snippets, provide complete explanation of the issue and recommend solution.
53
+
54
+ Snippets:
55
+
56
+ {}
57
+
58
+ Analysis:
59
+
60
+ """
61
+
62
+ SNIPPET_DELIMITER = '================'
@@ -2,7 +2,7 @@ import asyncio
2
2
  import json
3
3
  import logging
4
4
  import os
5
- from typing import List, Annotated
5
+ from typing import List, Annotated, Dict
6
6
 
7
7
  from llama_cpp import CreateCompletionResponse
8
8
  from fastapi import FastAPI, HTTPException, Depends, Header
@@ -10,7 +10,9 @@ from fastapi.responses import StreamingResponse
10
10
  from pydantic import BaseModel
11
11
  import requests
12
12
 
13
- from logdetective.constants import PROMPT_TEMPLATE, SNIPPET_PROMPT_TEMPLATE
13
+ from logdetective.constants import (
14
+ PROMPT_TEMPLATE, SNIPPET_PROMPT_TEMPLATE,
15
+ PROMPT_TEMPLATE_STAGED, SNIPPET_DELIMITER)
14
16
  from logdetective.extractors import DrainExtractor
15
17
  from logdetective.utils import validate_url, compute_certainty
16
18
 
@@ -38,10 +40,10 @@ class StagedResponse(Response):
38
40
  explanation: CreateCompletionResponse
39
41
  https://llama-cpp-python.readthedocs.io/en/latest/api-reference/#llama_cpp.llama_types.CreateCompletionResponse
40
42
  response_certainty: float
41
- snippets: list of CreateCompletionResponse
43
+ snippets:
44
+ list of dictionaries { 'snippet' : '<original_text>, 'comment': CreateCompletionResponse }
42
45
  """
43
- snippets: List[CreateCompletionResponse]
44
-
46
+ snippets: List[Dict[str, str | CreateCompletionResponse]]
45
47
 
46
48
  LOG = logging.getLogger("logdetective")
47
49
 
@@ -208,10 +210,18 @@ async def analyze_log_staged(build_log: BuildLog):
208
210
  analyzed_snippets = await asyncio.gather(
209
211
  *[submit_text(SNIPPET_PROMPT_TEMPLATE.format(s)) for s in log_summary])
210
212
 
211
- final_analysis = await submit_text(
212
- PROMPT_TEMPLATE.format([e["choices"][0]["text"] for e in analyzed_snippets]))
213
+ analyzed_snippets = [
214
+ {"snippet":e[0], "comment":e[1]} for e in zip(log_summary, analyzed_snippets)]
215
+
216
+ final_prompt = PROMPT_TEMPLATE_STAGED.format(
217
+ f"\n{SNIPPET_DELIMITER}\n".join([
218
+ f"[{e["snippet"]}] : [{e["comment"]["choices"][0]["text"]}]"
219
+ for e in analyzed_snippets]))
220
+
221
+ final_analysis = await submit_text(final_prompt)
213
222
 
214
223
  certainty = 0
224
+
215
225
  if "logprobs" in final_analysis["choices"][0]:
216
226
  try:
217
227
  certainty = compute_certainty(
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "logdetective"
3
- version = "0.2.8"
3
+ version = "0.2.9"
4
4
  description = "Log using LLM AI to search for build/test failures and provide ideas for fixing these."
5
5
  authors = ["Jiri Podivin <jpodivin@gmail.com>"]
6
6
  license = "Apache-2.0"
File without changes
File without changes