logdetective 2.8.0__py3-none-any.whl → 2.10.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- logdetective/server/compressors.py +7 -10
- logdetective/server/database/models/exceptions.py +4 -0
- logdetective/server/database/models/koji.py +4 -0
- logdetective/server/database/models/metrics.py +1 -2
- logdetective/server/emoji.py +4 -0
- logdetective/server/exceptions.py +4 -0
- logdetective/server/metric.py +9 -4
- logdetective/server/models.py +17 -46
- logdetective/server/plot.py +1 -1
- logdetective/server/server.py +17 -5
- logdetective/server/templates/gitlab_full_comment.md.j2 +9 -10
- logdetective/server/utils.py +2 -3
- logdetective/utils.py +12 -13
- {logdetective-2.8.0.dist-info → logdetective-2.10.0.dist-info}/METADATA +28 -2
- {logdetective-2.8.0.dist-info → logdetective-2.10.0.dist-info}/RECORD +18 -18
- {logdetective-2.8.0.dist-info → logdetective-2.10.0.dist-info}/WHEEL +0 -0
- {logdetective-2.8.0.dist-info → logdetective-2.10.0.dist-info}/entry_points.txt +0 -0
- {logdetective-2.8.0.dist-info → logdetective-2.10.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -36,20 +36,17 @@ class TextCompressor:
|
|
|
36
36
|
zip_buffer.seek(0)
|
|
37
37
|
return zip_buffer.getvalue()
|
|
38
38
|
|
|
39
|
-
def unzip(self, zip_data:
|
|
39
|
+
def unzip(self, zip_data: bytes) -> Dict[str, str]:
|
|
40
40
|
"""
|
|
41
41
|
Uncompress data created by TextCompressor.zip().
|
|
42
42
|
|
|
43
43
|
Args:
|
|
44
|
-
zip_data: A zipped stream of bytes
|
|
44
|
+
zip_data: A zipped stream of bytes
|
|
45
45
|
|
|
46
46
|
Returns:
|
|
47
47
|
{file_name: str}: The decompressed content as a dict of file names and UTF-8 strings
|
|
48
48
|
"""
|
|
49
|
-
|
|
50
|
-
zip_buffer = io.BytesIO(zip_data)
|
|
51
|
-
else:
|
|
52
|
-
zip_buffer = zip_data
|
|
49
|
+
zip_buffer = io.BytesIO(zip_data)
|
|
53
50
|
|
|
54
51
|
content = {}
|
|
55
52
|
with zipfile.ZipFile(zip_buffer, "r") as zip_file:
|
|
@@ -95,12 +92,12 @@ class RemoteLogCompressor:
|
|
|
95
92
|
return self.zip_text(content_text)
|
|
96
93
|
|
|
97
94
|
@classmethod
|
|
98
|
-
def unzip(cls, zip_data:
|
|
95
|
+
def unzip(cls, zip_data: bytes) -> str:
|
|
99
96
|
"""
|
|
100
97
|
Uncompress the zipped content of the remote log.
|
|
101
98
|
|
|
102
99
|
Args:
|
|
103
|
-
zip_data: Compressed data as bytes
|
|
100
|
+
zip_data: Compressed data as bytes
|
|
104
101
|
|
|
105
102
|
Returns:
|
|
106
103
|
str: The decompressed log content
|
|
@@ -147,13 +144,13 @@ class LLMResponseCompressor:
|
|
|
147
144
|
|
|
148
145
|
@classmethod
|
|
149
146
|
def unzip(
|
|
150
|
-
cls, zip_data:
|
|
147
|
+
cls, zip_data: bytes
|
|
151
148
|
) -> Union[StagedResponse, Response]:
|
|
152
149
|
"""
|
|
153
150
|
Uncompress the zipped content of the LLM response.
|
|
154
151
|
|
|
155
152
|
Args:
|
|
156
|
-
zip_data: Compressed data as bytes
|
|
153
|
+
zip_data: Compressed data as bytes
|
|
157
154
|
|
|
158
155
|
Returns:
|
|
159
156
|
Union[StagedResponse, Response]: The decompressed (partial) response object,
|
|
@@ -11,3 +11,7 @@ class KojiTaskNotAnalyzedError(Exception):
|
|
|
11
11
|
|
|
12
12
|
class KojiTaskAnalysisTimeoutError(Exception):
|
|
13
13
|
"""Exception raised when a koji task analysis has timed out"""
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class AnalyzeRequestMetricsNotFroundError(Exception):
|
|
17
|
+
"""Exception raised when AnalyzeRequestMetrics is not found"""
|
|
@@ -15,6 +15,7 @@ from logdetective.server.database.models.exceptions import (
|
|
|
15
15
|
KojiTaskNotFoundError,
|
|
16
16
|
KojiTaskNotAnalyzedError,
|
|
17
17
|
KojiTaskAnalysisTimeoutError,
|
|
18
|
+
AnalyzeRequestMetricsNotFroundError,
|
|
18
19
|
)
|
|
19
20
|
from logdetective.server.models import KojiStagedResponse
|
|
20
21
|
|
|
@@ -86,6 +87,9 @@ class KojiTaskAnalysis(Base):
|
|
|
86
87
|
async with transaction(commit=True) as session:
|
|
87
88
|
query_result = await session.execute(query)
|
|
88
89
|
koji_task_analysis = query_result.scalars().first()
|
|
90
|
+
if not koji_task_analysis:
|
|
91
|
+
raise AnalyzeRequestMetricsNotFroundError(
|
|
92
|
+
f"No AnalyzeRequestMetrics record found for id {metric_id}")
|
|
89
93
|
# Ensure that the task analysis doesn't already have a response
|
|
90
94
|
if koji_task_analysis.response:
|
|
91
95
|
# This is probably due to an analysis that took so long that
|
|
@@ -1,5 +1,4 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
|
-
import io
|
|
3
2
|
import enum
|
|
4
3
|
import datetime
|
|
5
4
|
from typing import Optional, List, Self, Tuple, TYPE_CHECKING
|
|
@@ -107,7 +106,7 @@ class AnalyzeRequestMetrics(Base):
|
|
|
107
106
|
async def create(
|
|
108
107
|
cls,
|
|
109
108
|
endpoint: EndpointType,
|
|
110
|
-
compressed_log:
|
|
109
|
+
compressed_log: bytes,
|
|
111
110
|
request_received_at: Optional[datetime.datetime] = None,
|
|
112
111
|
) -> int:
|
|
113
112
|
"""Create AnalyzeRequestMetrics new line
|
logdetective/server/emoji.py
CHANGED
|
@@ -42,6 +42,10 @@ async def collect_emojis_for_mr(
|
|
|
42
42
|
mr_jobs = await GitlabMergeRequestJobs.get_by_mr_iid(url, project_id, mr_iid) or []
|
|
43
43
|
|
|
44
44
|
comments = [await Comments.get_by_mr_job(mr_job) for mr_job in mr_jobs]
|
|
45
|
+
# Filter all cases when no comments were found. This shouldn't happen if the database
|
|
46
|
+
# is in good order. But checking for it can't hurt.
|
|
47
|
+
comments = [comment for comment in comments if isinstance(comment, Comments)]
|
|
48
|
+
|
|
45
49
|
await collect_emojis_in_comments(comments, gitlab_conn)
|
|
46
50
|
|
|
47
51
|
|
|
@@ -31,3 +31,7 @@ class LogDetectiveConnectionError(LogDetectiveKojiException):
|
|
|
31
31
|
|
|
32
32
|
class LogsTooLargeError(LogDetectiveKojiException):
|
|
33
33
|
"""The log archive exceeds the configured maximum size"""
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
class LogDetectiveMetricsError(LogDetectiveException):
|
|
37
|
+
"""Exception was encountered while recording metrics"""
|
logdetective/server/metric.py
CHANGED
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
import io
|
|
2
1
|
import inspect
|
|
3
2
|
import datetime
|
|
4
3
|
|
|
@@ -13,14 +12,15 @@ from logdetective.remote_log import RemoteLog
|
|
|
13
12
|
from logdetective.server.config import LOG
|
|
14
13
|
from logdetective.server.compressors import LLMResponseCompressor, RemoteLogCompressor
|
|
15
14
|
from logdetective.server.database.models import EndpointType, AnalyzeRequestMetrics
|
|
15
|
+
from logdetective.server.exceptions import LogDetectiveMetricsError
|
|
16
16
|
|
|
17
17
|
|
|
18
18
|
async def add_new_metrics(
|
|
19
|
-
api_name:
|
|
19
|
+
api_name: EndpointType,
|
|
20
20
|
url: Optional[str] = None,
|
|
21
21
|
http_session: Optional[aiohttp.ClientSession] = None,
|
|
22
22
|
received_at: Optional[datetime.datetime] = None,
|
|
23
|
-
compressed_log_content: Optional[
|
|
23
|
+
compressed_log_content: Optional[bytes] = None,
|
|
24
24
|
) -> int:
|
|
25
25
|
"""Add a new database entry for a received request.
|
|
26
26
|
|
|
@@ -29,6 +29,10 @@ async def add_new_metrics(
|
|
|
29
29
|
and the log (in a zip format) for which analysis is requested.
|
|
30
30
|
"""
|
|
31
31
|
if not compressed_log_content:
|
|
32
|
+
if not (url and http_session):
|
|
33
|
+
raise LogDetectiveMetricsError(
|
|
34
|
+
f"""Remote log can not be retrieved without URL and http session.
|
|
35
|
+
URL: {url}, http session:{http_session}""")
|
|
32
36
|
remote_log = RemoteLog(url, http_session)
|
|
33
37
|
compressed_log_content = await RemoteLogCompressor(remote_log).zip_content()
|
|
34
38
|
|
|
@@ -109,7 +113,8 @@ def track_request(name=None):
|
|
|
109
113
|
async def async_decorated_function(*args, **kwargs):
|
|
110
114
|
log_url = kwargs["build_log"].url
|
|
111
115
|
metrics_id = await add_new_metrics(
|
|
112
|
-
name if name else f.__name__,
|
|
116
|
+
api_name=EndpointType(name if name else f.__name__),
|
|
117
|
+
url=log_url, http_session=kwargs["http_session"]
|
|
113
118
|
)
|
|
114
119
|
response = await f(*args, **kwargs)
|
|
115
120
|
await update_metrics(metrics_id, response)
|
logdetective/server/models.py
CHANGED
|
@@ -10,6 +10,7 @@ from pydantic import (
|
|
|
10
10
|
field_validator,
|
|
11
11
|
NonNegativeFloat,
|
|
12
12
|
HttpUrl,
|
|
13
|
+
PrivateAttr,
|
|
13
14
|
)
|
|
14
15
|
|
|
15
16
|
import aiohttp
|
|
@@ -183,8 +184,8 @@ class InferenceConfig(BaseModel): # pylint: disable=too-many-instance-attribute
|
|
|
183
184
|
user_role: str = USER_ROLE_DEFAULT
|
|
184
185
|
system_role: str = SYSTEM_ROLE_DEFAULT
|
|
185
186
|
llm_api_timeout: float = 15.0
|
|
186
|
-
|
|
187
|
-
|
|
187
|
+
_limiter: AsyncLimiter = PrivateAttr(
|
|
188
|
+
default_factory=lambda: AsyncLimiter(LLM_DEFAULT_REQUESTS_PER_MINUTE))
|
|
188
189
|
|
|
189
190
|
def __init__(self, data: Optional[dict] = None):
|
|
190
191
|
super().__init__()
|
|
@@ -207,40 +208,6 @@ class InferenceConfig(BaseModel): # pylint: disable=too-many-instance-attribute
|
|
|
207
208
|
self.llm_api_timeout = data.get("llm_api_timeout", 15.0)
|
|
208
209
|
self._limiter = AsyncLimiter(self._requests_per_minute)
|
|
209
210
|
|
|
210
|
-
def __del__(self):
|
|
211
|
-
# Close connection when this object is destroyed
|
|
212
|
-
if self._http_session:
|
|
213
|
-
try:
|
|
214
|
-
loop = asyncio.get_running_loop()
|
|
215
|
-
loop.create_task(self._http_session.close())
|
|
216
|
-
except RuntimeError:
|
|
217
|
-
# No loop running, so create one to close the session
|
|
218
|
-
loop = asyncio.new_event_loop()
|
|
219
|
-
loop.run_until_complete(self._http_session.close())
|
|
220
|
-
loop.close()
|
|
221
|
-
except Exception: # pylint: disable=broad-exception-caught
|
|
222
|
-
# We should only get here if we're shutting down, so we don't
|
|
223
|
-
# really care if the close() completes cleanly.
|
|
224
|
-
pass
|
|
225
|
-
|
|
226
|
-
def get_http_session(self):
|
|
227
|
-
"""Return the internal HTTP session so it can be used to contect the
|
|
228
|
-
LLM server. May be used as a context manager."""
|
|
229
|
-
|
|
230
|
-
# Create the session on the first attempt. We need to do this "lazily"
|
|
231
|
-
# because it needs to happen once the event loop is running, even
|
|
232
|
-
# though the initialization itself is synchronous.
|
|
233
|
-
if not self._http_session:
|
|
234
|
-
self._http_session = aiohttp.ClientSession(
|
|
235
|
-
base_url=self.url,
|
|
236
|
-
timeout=aiohttp.ClientTimeout(
|
|
237
|
-
total=self.http_timeout,
|
|
238
|
-
connect=3.07,
|
|
239
|
-
),
|
|
240
|
-
)
|
|
241
|
-
|
|
242
|
-
return self._http_session
|
|
243
|
-
|
|
244
211
|
def get_limiter(self):
|
|
245
212
|
"""Return the limiter object so it can be used as a context manager"""
|
|
246
213
|
return self._limiter
|
|
@@ -254,7 +221,7 @@ class ExtractorConfig(BaseModel):
|
|
|
254
221
|
max_snippet_len: int = 2000
|
|
255
222
|
csgrep: bool = False
|
|
256
223
|
|
|
257
|
-
_extractors: List[Extractor] =
|
|
224
|
+
_extractors: List[Extractor] = PrivateAttr(default_factory=list)
|
|
258
225
|
|
|
259
226
|
def _setup_extractors(self):
|
|
260
227
|
"""Initialize extractors with common settings."""
|
|
@@ -322,8 +289,8 @@ class GitLabInstanceConfig(BaseModel): # pylint: disable=too-many-instance-attr
|
|
|
322
289
|
webhook_secrets: Optional[List[str]] = None
|
|
323
290
|
|
|
324
291
|
timeout: float = 5.0
|
|
325
|
-
_conn: Gitlab = None
|
|
326
|
-
_http_session: aiohttp.ClientSession = None
|
|
292
|
+
_conn: Gitlab | None = PrivateAttr(default=None)
|
|
293
|
+
_http_session: aiohttp.ClientSession | None = PrivateAttr(default=None)
|
|
327
294
|
|
|
328
295
|
# Maximum size of artifacts.zip in MiB. (default: 300 MiB)
|
|
329
296
|
max_artifact_size: int = 300 * 1024 * 1024
|
|
@@ -409,8 +376,8 @@ class KojiInstanceConfig(BaseModel):
|
|
|
409
376
|
xmlrpc_url: str = ""
|
|
410
377
|
tokens: List[str] = []
|
|
411
378
|
|
|
412
|
-
_conn: Optional[koji.ClientSession] = None
|
|
413
|
-
_callbacks: defaultdict[int, set[str]] = defaultdict(set)
|
|
379
|
+
_conn: Optional[koji.ClientSession] = PrivateAttr(default=None)
|
|
380
|
+
_callbacks: defaultdict[int, set[str]] = PrivateAttr(default_factory=lambda: defaultdict(set))
|
|
414
381
|
|
|
415
382
|
def __init__(self, name: str, data: Optional[dict] = None):
|
|
416
383
|
super().__init__()
|
|
@@ -500,8 +467,8 @@ class LogConfig(BaseModel):
|
|
|
500
467
|
class GeneralConfig(BaseModel):
|
|
501
468
|
"""General config options for Log Detective"""
|
|
502
469
|
|
|
503
|
-
packages: List[str] =
|
|
504
|
-
excluded_packages: List[str] =
|
|
470
|
+
packages: List[str] = []
|
|
471
|
+
excluded_packages: List[str] = []
|
|
505
472
|
devmode: bool = False
|
|
506
473
|
sentry_dsn: HttpUrl | None = None
|
|
507
474
|
collect_emojis_interval: int = 60 * 60 # seconds
|
|
@@ -568,7 +535,8 @@ class TimePeriod(BaseModel):
|
|
|
568
535
|
@model_validator(mode="before")
|
|
569
536
|
@classmethod
|
|
570
537
|
def check_exclusive_fields(cls, data):
|
|
571
|
-
"""Check that only one key between weeks, days and hours is defined
|
|
538
|
+
"""Check that only one key between weeks, days and hours is defined,
|
|
539
|
+
if no period is specified, fall back to 2 days."""
|
|
572
540
|
if isinstance(data, dict):
|
|
573
541
|
how_many_fields = sum(
|
|
574
542
|
1
|
|
@@ -594,6 +562,7 @@ class TimePeriod(BaseModel):
|
|
|
594
562
|
|
|
595
563
|
def get_time_period(self) -> datetime.timedelta:
|
|
596
564
|
"""Get the period of time represented by this input model.
|
|
565
|
+
Will default to 2 days, if no period is set.
|
|
597
566
|
|
|
598
567
|
Returns:
|
|
599
568
|
datetime.timedelta: The time period as a timedelta object.
|
|
@@ -605,10 +574,12 @@ class TimePeriod(BaseModel):
|
|
|
605
574
|
delta = datetime.timedelta(days=self.days)
|
|
606
575
|
elif self.hours:
|
|
607
576
|
delta = datetime.timedelta(hours=self.hours)
|
|
577
|
+
else:
|
|
578
|
+
delta = datetime.timedelta(days=2)
|
|
608
579
|
return delta
|
|
609
580
|
|
|
610
581
|
def get_period_start_time(
|
|
611
|
-
self, end_time: datetime.datetime = None
|
|
582
|
+
self, end_time: Optional[datetime.datetime] = None
|
|
612
583
|
) -> datetime.datetime:
|
|
613
584
|
"""Calculate the start time of this period based on the end time.
|
|
614
585
|
|
|
@@ -621,5 +592,5 @@ class TimePeriod(BaseModel):
|
|
|
621
592
|
"""
|
|
622
593
|
time = end_time or datetime.datetime.now(datetime.timezone.utc)
|
|
623
594
|
if time.tzinfo is None:
|
|
624
|
-
|
|
595
|
+
time = time.replace(tzinfo=datetime.timezone.utc)
|
|
625
596
|
return time - self.get_time_period()
|
logdetective/server/plot.py
CHANGED
|
@@ -69,7 +69,7 @@ def create_time_series_arrays(
|
|
|
69
69
|
plot_def: Definition,
|
|
70
70
|
start_time: datetime.datetime,
|
|
71
71
|
end_time: datetime.datetime,
|
|
72
|
-
value_type: Optional[Union[int, float]] = int,
|
|
72
|
+
value_type: Optional[Union[type[int], type[float]]] = int,
|
|
73
73
|
) -> tuple[numpy.ndarray, numpy.ndarray]:
|
|
74
74
|
"""Create time series arrays from a dictionary of values.
|
|
75
75
|
|
logdetective/server/server.py
CHANGED
|
@@ -139,7 +139,19 @@ def requires_token_when_set(authorization: Annotated[str | None, Header()] = Non
|
|
|
139
139
|
raise HTTPException(status_code=401, detail="No token provided.")
|
|
140
140
|
|
|
141
141
|
|
|
142
|
-
app = FastAPI(
|
|
142
|
+
app = FastAPI(
|
|
143
|
+
title="Log Detective",
|
|
144
|
+
contact={
|
|
145
|
+
"name": "Log Detective developers",
|
|
146
|
+
"url": "https://github.com/fedora-copr/logdetective",
|
|
147
|
+
"email": "copr-devel@lists.fedorahosted.org"
|
|
148
|
+
},
|
|
149
|
+
license_info={
|
|
150
|
+
"name": "Apache-2.0",
|
|
151
|
+
"url": "https://www.apache.org/licenses/LICENSE-2.0.html",
|
|
152
|
+
},
|
|
153
|
+
version=get_version(),
|
|
154
|
+
dependencies=[Depends(requires_token_when_set)], lifespan=lifespan)
|
|
143
155
|
|
|
144
156
|
|
|
145
157
|
@app.post("/analyze", response_model=Response)
|
|
@@ -302,7 +314,7 @@ async def analyze_koji_task(task_id: int, koji_instance_config: KojiInstanceConf
|
|
|
302
314
|
# to retrieve the metric ID to associate it with the koji task analysis.
|
|
303
315
|
|
|
304
316
|
metrics_id = await add_new_metrics(
|
|
305
|
-
|
|
317
|
+
EndpointType.ANALYZE_KOJI_TASK,
|
|
306
318
|
log_text,
|
|
307
319
|
received_at=datetime.datetime.now(datetime.timezone.utc),
|
|
308
320
|
compressed_log_content=RemoteLogCompressor.zip_text(log_text),
|
|
@@ -357,10 +369,10 @@ async def async_log(msg):
|
|
|
357
369
|
return msg
|
|
358
370
|
|
|
359
371
|
|
|
360
|
-
@app.get("/version")
|
|
372
|
+
@app.get("/version", response_class=BasicResponse)
|
|
361
373
|
async def get_version_wrapper():
|
|
362
374
|
"""Get the version of logdetective"""
|
|
363
|
-
return get_version()
|
|
375
|
+
return BasicResponse(content=get_version())
|
|
364
376
|
|
|
365
377
|
|
|
366
378
|
@app.post("/analyze/stream", response_class=StreamingResponse)
|
|
@@ -529,7 +541,7 @@ async def schedule_emoji_collection_for_mr(
|
|
|
529
541
|
key = (forge, project_id, mr_iid)
|
|
530
542
|
|
|
531
543
|
# FIXME: Look up the connection from the Forge # pylint: disable=fixme
|
|
532
|
-
gitlab_conn = SERVER_CONFIG.gitlab.instances[forge.value]
|
|
544
|
+
gitlab_conn = SERVER_CONFIG.gitlab.instances[forge.value].get_connection()
|
|
533
545
|
|
|
534
546
|
LOG.debug("Looking up emojis for %s, %d, %d", forge, project_id, mr_iid)
|
|
535
547
|
await collect_emojis_for_mr(project_id, mr_iid, gitlab_conn)
|
|
@@ -9,16 +9,15 @@ Please know that the explanation was provided by AI and may be incorrect.
|
|
|
9
9
|
{{ explanation }}
|
|
10
10
|
</details>
|
|
11
11
|
<details>
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
</ul>
|
|
12
|
+
{#
|
|
13
|
+
Formatted so that we don't trigger GitLab markdown
|
|
14
|
+
#}
|
|
15
|
+
<summary>Snippets</summary>
|
|
16
|
+
<ul>
|
|
17
|
+
{% for snippet in snippets -%}
|
|
18
|
+
<li><div><b>Line {{ snippet.line_number }}:</b> <code>{{ snippet.text | e }}</code><br>{{ snippet.explanation.text | e }}</div></li>
|
|
19
|
+
{%- endfor %}
|
|
20
|
+
</ul>
|
|
22
21
|
</details>
|
|
23
22
|
<details>
|
|
24
23
|
<summary>Logs</summary>
|
logdetective/server/utils.py
CHANGED
|
@@ -3,7 +3,6 @@ from importlib.metadata import version
|
|
|
3
3
|
|
|
4
4
|
import aiohttp
|
|
5
5
|
from fastapi import HTTPException
|
|
6
|
-
from fastapi.responses import Response as BasicResponse
|
|
7
6
|
|
|
8
7
|
from logdetective.constants import SNIPPET_DELIMITER
|
|
9
8
|
from logdetective.server.config import LOG
|
|
@@ -106,6 +105,6 @@ def construct_final_prompt(formatted_snippets: str, prompt_template: str) -> str
|
|
|
106
105
|
return final_prompt
|
|
107
106
|
|
|
108
107
|
|
|
109
|
-
def get_version() ->
|
|
108
|
+
def get_version() -> str:
|
|
110
109
|
"""Obtain the version number using importlib"""
|
|
111
|
-
return
|
|
110
|
+
return version('logdetective')
|
logdetective/utils.py
CHANGED
|
@@ -192,22 +192,21 @@ def format_snippets(snippets: list[str] | list[Tuple[int, str]]) -> str:
|
|
|
192
192
|
Line number must be first element in the tuple. Mixed format of snippets
|
|
193
193
|
is permitted, but may have impact on inference.
|
|
194
194
|
"""
|
|
195
|
-
summary = ""
|
|
195
|
+
summary = "\n"
|
|
196
196
|
for i, s in enumerate(snippets):
|
|
197
197
|
if isinstance(s, tuple):
|
|
198
|
-
|
|
199
|
-
Snippet No. {i} at line #{
|
|
200
|
-
|
|
201
|
-
{s[1]}
|
|
202
|
-
{SNIPPET_DELIMITER}
|
|
203
|
-
"""
|
|
198
|
+
line_number, snippet_content = s
|
|
199
|
+
header = f"Snippet No. {i} at line #{line_number}:"
|
|
204
200
|
else:
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
{
|
|
209
|
-
|
|
210
|
-
""
|
|
201
|
+
header = f"Snippet No. {i}:"
|
|
202
|
+
snippet_content = s
|
|
203
|
+
summary += (
|
|
204
|
+
f"{header}\n"
|
|
205
|
+
"\n"
|
|
206
|
+
f"{snippet_content}\n"
|
|
207
|
+
f"{SNIPPET_DELIMITER}\n"
|
|
208
|
+
f"\n"
|
|
209
|
+
)
|
|
211
210
|
return summary
|
|
212
211
|
|
|
213
212
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: logdetective
|
|
3
|
-
Version: 2.
|
|
3
|
+
Version: 2.10.0
|
|
4
4
|
Summary: Log using LLM AI to search for build/test failures and provide ideas for fixing these.
|
|
5
5
|
License: Apache-2.0
|
|
6
6
|
License-File: LICENSE
|
|
@@ -28,7 +28,7 @@ Requires-Dist: aiolimiter (>=1.0.0,<2.0.0) ; extra == "server"
|
|
|
28
28
|
Requires-Dist: aioresponses (>=0.7.8,<0.8.0) ; extra == "testing"
|
|
29
29
|
Requires-Dist: alembic (>=1.13.3,<2.0.0) ; extra == "server" or extra == "server-testing"
|
|
30
30
|
Requires-Dist: asciidoc[testing] (>=10.2.1,<11.0.0) ; extra == "testing"
|
|
31
|
-
Requires-Dist: asyncpg (>=0.30.0,<0.
|
|
31
|
+
Requires-Dist: asyncpg (>=0.30.0,<1.0.0) ; extra == "server" or extra == "server-testing"
|
|
32
32
|
Requires-Dist: backoff (==2.2.1) ; extra == "server" or extra == "server-testing"
|
|
33
33
|
Requires-Dist: drain3 (>=0.9.11,<0.10.0)
|
|
34
34
|
Requires-Dist: fastapi (>=0.111.1,<1.0.0) ; extra == "server" or extra == "server-testing"
|
|
@@ -325,6 +325,32 @@ podman-compose up server
|
|
|
325
325
|
|
|
326
326
|
- Run Visual Stdio Code debug configuration named *Python Debug: Remote Attach*
|
|
327
327
|
|
|
328
|
+
Visual Studio Code CLI debugging
|
|
329
|
+
--------------------------------
|
|
330
|
+
|
|
331
|
+
When debugging the CLI application, the `./scripts/debug_runner.py` script can be used
|
|
332
|
+
as a stand in for stump script created during package installation.
|
|
333
|
+
|
|
334
|
+
Using `launch.json`, or similar alternative, arguments can be specified for testing.
|
|
335
|
+
|
|
336
|
+
Example:
|
|
337
|
+
|
|
338
|
+
```
|
|
339
|
+
{
|
|
340
|
+
"version": "0.2.0",
|
|
341
|
+
"configurations": [
|
|
342
|
+
{
|
|
343
|
+
"name": "Python: Debug Installed Module",
|
|
344
|
+
"type": "debugpy",
|
|
345
|
+
"request": "launch",
|
|
346
|
+
"console": "integratedTerminal",
|
|
347
|
+
"program": "${workspaceFolder}/scripts/debug_runner.py",
|
|
348
|
+
"args": [<URL_OF_A_LOG>]
|
|
349
|
+
}
|
|
350
|
+
]
|
|
351
|
+
}
|
|
352
|
+
```
|
|
353
|
+
|
|
328
354
|
Server
|
|
329
355
|
======
|
|
330
356
|
|
|
@@ -9,32 +9,32 @@ logdetective/prompts-summary-only.yml,sha256=8U9AMJV8ePW-0CoXOXlQoO92DAJDeutIT8n
|
|
|
9
9
|
logdetective/prompts.yml,sha256=i3z6Jcb4ScVi7LsxOpDlKiXrcvql3qO_JnLzkAKMn1c,3870
|
|
10
10
|
logdetective/remote_log.py,sha256=28QvdQiy7RBnd86EKCq_A75P21gSNlCbgxJe5XAe9MA,2258
|
|
11
11
|
logdetective/server/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
12
|
-
logdetective/server/compressors.py,sha256=
|
|
12
|
+
logdetective/server/compressors.py,sha256=y4aFYJ_9CbYdKuAI39Kc9GQSdPN8cSJ2c_VAz3T47EE,5249
|
|
13
13
|
logdetective/server/config.py,sha256=cKUmNCJyNyEid0bPTiUjr8CQuBYBab5bC79Axk2h0z8,2525
|
|
14
14
|
logdetective/server/database/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
15
15
|
logdetective/server/database/base.py,sha256=HSV2tgye7iYTDzJD1Q5X7_nlLuTMIFP-hRVQMYxngHQ,2073
|
|
16
16
|
logdetective/server/database/models/__init__.py,sha256=zoZMCt1_7tewDa6eEIIX_xrdN-tLegSiPNg5NiYaV3o,850
|
|
17
|
-
logdetective/server/database/models/exceptions.py,sha256=
|
|
18
|
-
logdetective/server/database/models/koji.py,sha256=
|
|
17
|
+
logdetective/server/database/models/exceptions.py,sha256=4ED7FSSA1liV9-7VIN2BwUiz6XlmP97Y1loKnsoNdD8,507
|
|
18
|
+
logdetective/server/database/models/koji.py,sha256=HNWxHYDxf4JN9K2ue8-V8dH-0XY5ZmxqH7Y9lAIbILA,6436
|
|
19
19
|
logdetective/server/database/models/merge_request_jobs.py,sha256=MxiAVKQIsQMbFylBsmYBmVXYvid-4_5mwwXLfWdp6_w,19965
|
|
20
|
-
logdetective/server/database/models/metrics.py,sha256=
|
|
21
|
-
logdetective/server/emoji.py,sha256=
|
|
22
|
-
logdetective/server/exceptions.py,sha256=
|
|
20
|
+
logdetective/server/database/models/metrics.py,sha256=4xsUdbtlp5PI1-iJQc5Dd8EPDgVVplD9hJRWeRDn43k,15443
|
|
21
|
+
logdetective/server/emoji.py,sha256=nt3i_D5bk67RF4SlIetlLhLcgcxz9TEniC2iRYJx81w,5066
|
|
22
|
+
logdetective/server/exceptions.py,sha256=WN715KLL3ya6FiZ95v70VSbNuVhGuHFzxm2OeEPWQCw,981
|
|
23
23
|
logdetective/server/gitlab.py,sha256=putpnf8PfGsCZJsqWZA1rMovRGnyagoQmgpKLqtA-aQ,16743
|
|
24
24
|
logdetective/server/koji.py,sha256=LG1pRiKUFvYFRKzgQoUG3pUHfcEwMoaMNjUSMKw_pBA,5640
|
|
25
25
|
logdetective/server/llm.py,sha256=bmA6LsV80OdO60q4WLoKuehuVDEYq-HhBAYcZeLfrv8,10150
|
|
26
|
-
logdetective/server/metric.py,sha256
|
|
27
|
-
logdetective/server/models.py,sha256=
|
|
28
|
-
logdetective/server/plot.py,sha256=
|
|
29
|
-
logdetective/server/server.py,sha256=
|
|
26
|
+
logdetective/server/metric.py,sha256=wLOpgcAch3rwhPA5P2YWUeMNAPsvRGseRjH5HlTb7JM,4529
|
|
27
|
+
logdetective/server/models.py,sha256=AJyycAEEl2o6TH4eAqVMlt5woqAB5M8ze2L575leA_I,19835
|
|
28
|
+
logdetective/server/plot.py,sha256=8LERgY3vQckaHZV2PZfOrZT8CjCAiji57QCmRW24Rfo,14697
|
|
29
|
+
logdetective/server/server.py,sha256=JueU-5c8t9h1CZy4gtoEeT8VSEirpeS0K3wrfqTPvAc,25381
|
|
30
30
|
logdetective/server/templates/base_response.html.j2,sha256=BJGGV_Xb0Lnue8kq32oG9lI5CQDf9vce7HMYsP-Pvb4,2040
|
|
31
|
-
logdetective/server/templates/gitlab_full_comment.md.j2,sha256=
|
|
31
|
+
logdetective/server/templates/gitlab_full_comment.md.j2,sha256=4UujUzl3lmdbNEADsxn3HVrjfUiUu2FvUlp9MDFGXQI,2321
|
|
32
32
|
logdetective/server/templates/gitlab_short_comment.md.j2,sha256=2krnMlGqqju2V_6pE0UqUR1P674OFaeX5BMyY5htTOQ,2022
|
|
33
|
-
logdetective/server/utils.py,sha256=
|
|
33
|
+
logdetective/server/utils.py,sha256=0BZ8WmzXNEtkUty1kOyFbBxDZWL0Icc8BUrxuHw9uvs,4015
|
|
34
34
|
logdetective/skip_snippets.yml,sha256=reGlhPPCo06nNUJWiC2LY-OJOoPdcyOB7QBTSMeh0eg,487
|
|
35
|
-
logdetective/utils.py,sha256=
|
|
36
|
-
logdetective-2.
|
|
37
|
-
logdetective-2.
|
|
38
|
-
logdetective-2.
|
|
39
|
-
logdetective-2.
|
|
40
|
-
logdetective-2.
|
|
35
|
+
logdetective/utils.py,sha256=yalhySOF_Gzmqx_Ft9qad3TplAfZ6LOmauGXEJfKWiE,9803
|
|
36
|
+
logdetective-2.10.0.dist-info/METADATA,sha256=ii3l-h7Tpnc9BByVL1BYDj8XkbzKuqD1YW-A4gbm66E,23273
|
|
37
|
+
logdetective-2.10.0.dist-info/WHEEL,sha256=zp0Cn7JsFoX2ATtOhtaFYIiE2rmFAD4OcMhtUki8W3U,88
|
|
38
|
+
logdetective-2.10.0.dist-info/entry_points.txt,sha256=3K_vXja6PmcA8sNdUi63WdImeiNhVZcEGPTaoJmltfA,63
|
|
39
|
+
logdetective-2.10.0.dist-info/licenses/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358
|
|
40
|
+
logdetective-2.10.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|