logdetective 0.4.0__py3-none-any.whl → 2.11.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- logdetective/constants.py +33 -12
- logdetective/extractors.py +137 -68
- logdetective/logdetective.py +102 -33
- logdetective/models.py +99 -0
- logdetective/prompts-summary-first.yml +20 -0
- logdetective/prompts-summary-only.yml +13 -0
- logdetective/prompts.yml +90 -0
- logdetective/remote_log.py +67 -0
- logdetective/server/compressors.py +186 -0
- logdetective/server/config.py +78 -0
- logdetective/server/database/base.py +34 -26
- logdetective/server/database/models/__init__.py +33 -0
- logdetective/server/database/models/exceptions.py +17 -0
- logdetective/server/database/models/koji.py +143 -0
- logdetective/server/database/models/merge_request_jobs.py +623 -0
- logdetective/server/database/models/metrics.py +427 -0
- logdetective/server/emoji.py +148 -0
- logdetective/server/exceptions.py +37 -0
- logdetective/server/gitlab.py +451 -0
- logdetective/server/koji.py +159 -0
- logdetective/server/llm.py +309 -0
- logdetective/server/metric.py +75 -30
- logdetective/server/models.py +426 -23
- logdetective/server/plot.py +432 -0
- logdetective/server/server.py +580 -468
- logdetective/server/templates/base_response.html.j2 +59 -0
- logdetective/server/templates/gitlab_full_comment.md.j2 +73 -0
- logdetective/server/templates/gitlab_short_comment.md.j2 +62 -0
- logdetective/server/utils.py +98 -32
- logdetective/skip_snippets.yml +12 -0
- logdetective/utils.py +187 -73
- logdetective-2.11.0.dist-info/METADATA +568 -0
- logdetective-2.11.0.dist-info/RECORD +40 -0
- {logdetective-0.4.0.dist-info → logdetective-2.11.0.dist-info}/WHEEL +1 -1
- logdetective/server/database/models.py +0 -88
- logdetective-0.4.0.dist-info/METADATA +0 -333
- logdetective-0.4.0.dist-info/RECORD +0 -19
- {logdetective-0.4.0.dist-info → logdetective-2.11.0.dist-info}/entry_points.txt +0 -0
- {logdetective-0.4.0.dist-info → logdetective-2.11.0.dist-info/licenses}/LICENSE +0 -0
|
@@ -0,0 +1,143 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
from typing import Optional
|
|
3
|
+
from datetime import datetime, timedelta, timezone
|
|
4
|
+
from sqlalchemy import BigInteger, DateTime, ForeignKey, Integer, String, select
|
|
5
|
+
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
|
6
|
+
|
|
7
|
+
from sqlalchemy.exc import OperationalError
|
|
8
|
+
import backoff
|
|
9
|
+
|
|
10
|
+
from logdetective.server.config import SERVER_CONFIG
|
|
11
|
+
from logdetective.server.compressors import LLMResponseCompressor
|
|
12
|
+
from logdetective.server.database.models.metrics import AnalyzeRequestMetrics
|
|
13
|
+
from logdetective.server.database.base import Base, transaction, DB_MAX_RETRIES
|
|
14
|
+
from logdetective.server.database.models.exceptions import (
|
|
15
|
+
KojiTaskNotFoundError,
|
|
16
|
+
KojiTaskNotAnalyzedError,
|
|
17
|
+
KojiTaskAnalysisTimeoutError,
|
|
18
|
+
AnalyzeRequestMetricsNotFroundError,
|
|
19
|
+
)
|
|
20
|
+
from logdetective.server.models import KojiStagedResponse
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class KojiTaskAnalysis(Base):
|
|
24
|
+
"""Store details for the koji task analysis"""
|
|
25
|
+
|
|
26
|
+
__tablename__ = "koji_task_analysis"
|
|
27
|
+
|
|
28
|
+
id: Mapped[int] = mapped_column(Integer, primary_key=True)
|
|
29
|
+
koji_instance: Mapped[str] = mapped_column(String(255), nullable=False, index=True)
|
|
30
|
+
task_id: Mapped[int] = mapped_column(BigInteger, nullable=False, index=True, unique=True)
|
|
31
|
+
log_file_name: Mapped[str] = mapped_column(String(255), nullable=False, index=True)
|
|
32
|
+
request_received_at: Mapped[datetime] = mapped_column(
|
|
33
|
+
DateTime(timezone=True),
|
|
34
|
+
nullable=False,
|
|
35
|
+
index=True,
|
|
36
|
+
default=datetime.now(timezone.utc),
|
|
37
|
+
comment="Timestamp when the request was received",
|
|
38
|
+
)
|
|
39
|
+
response_id: Mapped[Optional[int]] = mapped_column(
|
|
40
|
+
Integer,
|
|
41
|
+
ForeignKey("analyze_request_metrics.id"),
|
|
42
|
+
nullable=True,
|
|
43
|
+
index=False,
|
|
44
|
+
comment="The id of the analyze request metrics for this task",
|
|
45
|
+
)
|
|
46
|
+
response: Mapped[Optional["AnalyzeRequestMetrics"]] = relationship(
|
|
47
|
+
"AnalyzeRequestMetrics",
|
|
48
|
+
back_populates="koji_tasks"
|
|
49
|
+
)
|
|
50
|
+
|
|
51
|
+
@classmethod
|
|
52
|
+
@backoff.on_exception(backoff.expo, OperationalError, max_tries=DB_MAX_RETRIES)
|
|
53
|
+
async def create_or_restart(
|
|
54
|
+
cls, koji_instance: str, task_id: int, log_file_name: str
|
|
55
|
+
):
|
|
56
|
+
"""Create a new koji task analysis"""
|
|
57
|
+
query = select(cls).filter(
|
|
58
|
+
cls.koji_instance == koji_instance, cls.task_id == task_id
|
|
59
|
+
)
|
|
60
|
+
async with transaction(commit=True) as session:
|
|
61
|
+
# Check if the task analysis already exists
|
|
62
|
+
query_result = await session.execute(query)
|
|
63
|
+
koji_task_analysis = query_result.first()
|
|
64
|
+
if koji_task_analysis:
|
|
65
|
+
# If it does, update the request_received_at timestamp
|
|
66
|
+
koji_task_analysis.request_received_at = datetime.now(timezone.utc)
|
|
67
|
+
session.add(koji_task_analysis)
|
|
68
|
+
await session.flush()
|
|
69
|
+
return
|
|
70
|
+
|
|
71
|
+
# If it doesn't, create a new one
|
|
72
|
+
koji_task_analysis = KojiTaskAnalysis()
|
|
73
|
+
koji_task_analysis.koji_instance = koji_instance
|
|
74
|
+
koji_task_analysis.task_id = task_id
|
|
75
|
+
koji_task_analysis.log_file_name = log_file_name
|
|
76
|
+
session.add(koji_task_analysis)
|
|
77
|
+
await session.flush()
|
|
78
|
+
|
|
79
|
+
@classmethod
|
|
80
|
+
@backoff.on_exception(backoff.expo, OperationalError, max_tries=DB_MAX_RETRIES)
|
|
81
|
+
async def add_response(cls, task_id: int, metric_id: int):
|
|
82
|
+
"""Add a response to a koji task analysis"""
|
|
83
|
+
query = select(cls).filter(cls.task_id == task_id)
|
|
84
|
+
metrics_query = select(AnalyzeRequestMetrics).filter(
|
|
85
|
+
AnalyzeRequestMetrics.id == metric_id
|
|
86
|
+
)
|
|
87
|
+
async with transaction(commit=True) as session:
|
|
88
|
+
query_result = await session.execute(query)
|
|
89
|
+
koji_task_analysis = query_result.scalars().first()
|
|
90
|
+
if not koji_task_analysis:
|
|
91
|
+
raise AnalyzeRequestMetricsNotFroundError(
|
|
92
|
+
f"No AnalyzeRequestMetrics record found for id {metric_id}")
|
|
93
|
+
# Ensure that the task analysis doesn't already have a response
|
|
94
|
+
if koji_task_analysis.response:
|
|
95
|
+
# This is probably due to an analysis that took so long that
|
|
96
|
+
# a follow-up analysis was started before this one completed.
|
|
97
|
+
# We want to maintain consistency between the response we
|
|
98
|
+
# returned to the consumer, so we'll just drop this extra one
|
|
99
|
+
# on the floor and keep the one saved in the database.
|
|
100
|
+
return
|
|
101
|
+
metrics_query_result = await session.execute(metrics_query)
|
|
102
|
+
metric = metrics_query_result.scalars().first()
|
|
103
|
+
koji_task_analysis.response = metric
|
|
104
|
+
session.add(koji_task_analysis)
|
|
105
|
+
await session.flush()
|
|
106
|
+
|
|
107
|
+
@classmethod
|
|
108
|
+
@backoff.on_exception(backoff.expo, OperationalError, max_tries=DB_MAX_RETRIES)
|
|
109
|
+
async def get_response_by_task_id(cls, task_id: int) -> KojiStagedResponse:
|
|
110
|
+
"""Get a koji task analysis by task id"""
|
|
111
|
+
query = select(cls).filter(cls.task_id == task_id)
|
|
112
|
+
async with transaction(commit=False) as session:
|
|
113
|
+
query_result = await session.execute(query)
|
|
114
|
+
koji_task_analysis = query_result.scalars().first()
|
|
115
|
+
if not koji_task_analysis:
|
|
116
|
+
raise KojiTaskNotFoundError(f"Task {task_id} not yet analyzed")
|
|
117
|
+
|
|
118
|
+
if not koji_task_analysis.response:
|
|
119
|
+
# Check if the task analysis has timed out
|
|
120
|
+
if koji_task_analysis.request_received_at.replace(
|
|
121
|
+
tzinfo=timezone.utc
|
|
122
|
+
) + timedelta(
|
|
123
|
+
minutes=SERVER_CONFIG.koji.analysis_timeout
|
|
124
|
+
) < datetime.now(timezone.utc):
|
|
125
|
+
raise KojiTaskAnalysisTimeoutError(
|
|
126
|
+
f"Task {task_id} analysis has timed out"
|
|
127
|
+
)
|
|
128
|
+
|
|
129
|
+
# Task analysis is still in progress, so we need to let the
|
|
130
|
+
# consumer know
|
|
131
|
+
raise KojiTaskNotAnalyzedError(
|
|
132
|
+
f"Task {task_id} analysis is still in progress"
|
|
133
|
+
)
|
|
134
|
+
|
|
135
|
+
# We need to decompress the response message and return it
|
|
136
|
+
response = LLMResponseCompressor.unzip(
|
|
137
|
+
koji_task_analysis.response.compressed_response
|
|
138
|
+
)
|
|
139
|
+
return KojiStagedResponse(
|
|
140
|
+
task_id=task_id,
|
|
141
|
+
log_file_name=koji_task_analysis.log_file_name,
|
|
142
|
+
response=response,
|
|
143
|
+
)
|