logdetective 0.4.0__py3-none-any.whl → 2.11.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. logdetective/constants.py +33 -12
  2. logdetective/extractors.py +137 -68
  3. logdetective/logdetective.py +102 -33
  4. logdetective/models.py +99 -0
  5. logdetective/prompts-summary-first.yml +20 -0
  6. logdetective/prompts-summary-only.yml +13 -0
  7. logdetective/prompts.yml +90 -0
  8. logdetective/remote_log.py +67 -0
  9. logdetective/server/compressors.py +186 -0
  10. logdetective/server/config.py +78 -0
  11. logdetective/server/database/base.py +34 -26
  12. logdetective/server/database/models/__init__.py +33 -0
  13. logdetective/server/database/models/exceptions.py +17 -0
  14. logdetective/server/database/models/koji.py +143 -0
  15. logdetective/server/database/models/merge_request_jobs.py +623 -0
  16. logdetective/server/database/models/metrics.py +427 -0
  17. logdetective/server/emoji.py +148 -0
  18. logdetective/server/exceptions.py +37 -0
  19. logdetective/server/gitlab.py +451 -0
  20. logdetective/server/koji.py +159 -0
  21. logdetective/server/llm.py +309 -0
  22. logdetective/server/metric.py +75 -30
  23. logdetective/server/models.py +426 -23
  24. logdetective/server/plot.py +432 -0
  25. logdetective/server/server.py +580 -468
  26. logdetective/server/templates/base_response.html.j2 +59 -0
  27. logdetective/server/templates/gitlab_full_comment.md.j2 +73 -0
  28. logdetective/server/templates/gitlab_short_comment.md.j2 +62 -0
  29. logdetective/server/utils.py +98 -32
  30. logdetective/skip_snippets.yml +12 -0
  31. logdetective/utils.py +187 -73
  32. logdetective-2.11.0.dist-info/METADATA +568 -0
  33. logdetective-2.11.0.dist-info/RECORD +40 -0
  34. {logdetective-0.4.0.dist-info → logdetective-2.11.0.dist-info}/WHEEL +1 -1
  35. logdetective/server/database/models.py +0 -88
  36. logdetective-0.4.0.dist-info/METADATA +0 -333
  37. logdetective-0.4.0.dist-info/RECORD +0 -19
  38. {logdetective-0.4.0.dist-info → logdetective-2.11.0.dist-info}/entry_points.txt +0 -0
  39. {logdetective-0.4.0.dist-info → logdetective-2.11.0.dist-info/licenses}/LICENSE +0 -0
@@ -0,0 +1,427 @@
1
+ from __future__ import annotations
2
+ import enum
3
+ import datetime
4
+ from typing import Optional, List, Self, Tuple, TYPE_CHECKING
5
+
6
+ import backoff
7
+
8
+ from sqlalchemy import (
9
+ Integer,
10
+ Float,
11
+ DateTime,
12
+ Enum,
13
+ func,
14
+ select,
15
+ distinct,
16
+ ForeignKey,
17
+ LargeBinary,
18
+ )
19
+ from sqlalchemy.orm import Mapped, mapped_column, relationship, aliased
20
+ from sqlalchemy.exc import OperationalError
21
+
22
+ from logdetective.server.database.base import Base, transaction, DB_MAX_RETRIES
23
+ from logdetective.server.database.models.merge_request_jobs import (
24
+ GitlabMergeRequestJobs,
25
+ Forge,
26
+ )
27
+
28
+
29
+ if TYPE_CHECKING:
30
+ from .koji import KojiTaskAnalysis
31
+
32
+
33
+ class EndpointType(enum.Enum):
34
+ """Different analyze endpoints"""
35
+
36
+ ANALYZE = "analyze_log"
37
+ ANALYZE_STAGED = "analyze_log_staged"
38
+ ANALYZE_STREAM = "analyze_log_stream"
39
+ ANALYZE_GITLAB_JOB = "analyze_gitlab_job"
40
+ ANALYZE_KOJI_TASK = "analyze_koji_task"
41
+
42
+
43
+ class AnalyzeRequestMetrics(Base):
44
+ """Store data related to received requests and given responses"""
45
+
46
+ __tablename__ = "analyze_request_metrics"
47
+
48
+ id: Mapped[int] = mapped_column(Integer, primary_key=True)
49
+ endpoint: Mapped[EndpointType] = mapped_column(
50
+ Enum(EndpointType),
51
+ nullable=False,
52
+ index=True,
53
+ comment="The service endpoint that was called",
54
+ )
55
+ request_received_at: Mapped[datetime.datetime] = mapped_column(
56
+ DateTime(timezone=True),
57
+ nullable=False,
58
+ index=True,
59
+ default=datetime.datetime.now(datetime.timezone.utc),
60
+ comment="Timestamp when the request was received",
61
+ )
62
+ compressed_log: Mapped[bytes] = mapped_column(
63
+ LargeBinary(length=314572800), # 300MB limit (300 * 1024 * 1024)
64
+ nullable=False,
65
+ index=False,
66
+ comment="Log processed, saved in a zip format",
67
+ )
68
+ compressed_response: Mapped[Optional[bytes]] = mapped_column(
69
+ LargeBinary(length=314572800), # 300MB limit (300 * 1024 * 1024)
70
+ nullable=True,
71
+ index=False,
72
+ comment="Given response (with explanation and snippets) saved in a zip format",
73
+ )
74
+ response_sent_at: Mapped[Optional[datetime.datetime]] = mapped_column(
75
+ DateTime(timezone=True),
76
+ nullable=True,
77
+ comment="Timestamp when the response was sent back",
78
+ )
79
+ response_length: Mapped[Optional[int]] = mapped_column(
80
+ Integer, nullable=True, comment="Length of the response in chars"
81
+ )
82
+ response_certainty: Mapped[Optional[float]] = mapped_column(
83
+ Float, nullable=True, comment="Certainty for generated response"
84
+ )
85
+
86
+ merge_request_job_id: Mapped[Optional[int]] = mapped_column(
87
+ Integer,
88
+ ForeignKey("gitlab_merge_request_jobs.id"),
89
+ nullable=True,
90
+ index=False,
91
+ comment="Is this an analyze request coming from a merge request?",
92
+ )
93
+
94
+ mr_job: Mapped[Optional["GitlabMergeRequestJobs"]] = relationship(
95
+ "GitlabMergeRequestJobs",
96
+ back_populates="request_metrics"
97
+ )
98
+
99
+ koji_tasks: Mapped[List["KojiTaskAnalysis"]] = relationship(
100
+ "KojiTaskAnalysis",
101
+ back_populates="response"
102
+ )
103
+
104
+ @classmethod
105
+ @backoff.on_exception(backoff.expo, OperationalError, max_tries=DB_MAX_RETRIES)
106
+ async def create(
107
+ cls,
108
+ endpoint: EndpointType,
109
+ compressed_log: bytes,
110
+ request_received_at: Optional[datetime.datetime] = None,
111
+ ) -> int:
112
+ """Create AnalyzeRequestMetrics new line
113
+ with data related to a received request"""
114
+ async with transaction(commit=True) as session:
115
+ metrics = AnalyzeRequestMetrics()
116
+ metrics.endpoint = endpoint
117
+ metrics.compressed_log = compressed_log
118
+ metrics.request_received_at = request_received_at or datetime.datetime.now(
119
+ datetime.timezone.utc
120
+ )
121
+ session.add(metrics)
122
+ await session.flush()
123
+ return metrics.id
124
+
125
+ @classmethod
126
+ @backoff.on_exception(backoff.expo, OperationalError, max_tries=DB_MAX_RETRIES)
127
+ async def update( # pylint: disable=too-many-arguments disable=too-many-positional-arguments
128
+ cls,
129
+ id_: int,
130
+ response_sent_at: DateTime,
131
+ response_length: int,
132
+ response_certainty: float,
133
+ compressed_response: bytes,
134
+ ) -> None:
135
+ """Update a row
136
+ with data related to the given response"""
137
+ query = select(AnalyzeRequestMetrics).filter(AnalyzeRequestMetrics.id == id_)
138
+ async with transaction(commit=True) as session:
139
+ query_result = await session.execute(query)
140
+ metrics = query_result.scalars().first()
141
+ if metrics is None:
142
+ raise ValueError("Returned `AnalyzeRequestMetrics` table is empty.")
143
+ metrics.response_sent_at = response_sent_at
144
+ metrics.response_length = response_length
145
+ metrics.response_certainty = response_certainty
146
+ metrics.compressed_response = compressed_response
147
+ session.add(metrics)
148
+
149
+ @classmethod
150
+ @backoff.on_exception(backoff.expo, OperationalError, max_tries=DB_MAX_RETRIES)
151
+ async def get_metric_by_id(
152
+ cls,
153
+ id_: int,
154
+ ) -> Self:
155
+ """Update a row
156
+ with data related to the given response"""
157
+ query = select(AnalyzeRequestMetrics).filter(AnalyzeRequestMetrics.id == id_)
158
+ async with transaction(commit=True) as session:
159
+ query_result = await session.execute(query)
160
+ metric = query_result.scalars().first()
161
+ if metric is None:
162
+ raise ValueError("Returned `AnalyzeRequestMetrics` table is empty.")
163
+ return metric
164
+
165
+ async def add_mr_job(
166
+ self,
167
+ forge: Forge,
168
+ project_id: int,
169
+ mr_iid: int,
170
+ job_id: int,
171
+ ) -> None:
172
+ """This request was triggered by a merge request job.
173
+ Link it.
174
+
175
+ Args:
176
+ forge: forge name
177
+ project_id: forge project id
178
+ mr_iid: merge request forge iid
179
+ job_id: forge job id
180
+ """
181
+ mr_job = await GitlabMergeRequestJobs.get_or_create(
182
+ forge, project_id, mr_iid, job_id
183
+ )
184
+ self.merge_request_job_id = mr_job.id
185
+ async with transaction(commit=True) as session:
186
+ await session.merge(self)
187
+
188
+ @classmethod
189
+ async def get_requests_metrics_for_mr_job(
190
+ cls,
191
+ forge: Forge,
192
+ project_id: int,
193
+ mr_iid: int,
194
+ job_id: int,
195
+ ) -> List[Self]:
196
+ """Search for all requests triggered by the specified merge request job.
197
+
198
+ Args:
199
+ forge: forge name
200
+ project_id: forge project id
201
+ mr_iid: merge request forge iid
202
+ job_id: forge job id
203
+ """
204
+ mr_job_alias = aliased(GitlabMergeRequestJobs)
205
+ query = (
206
+ select(cls)
207
+ .join(mr_job_alias, cls.merge_request_job_id == mr_job_alias.id)
208
+ .filter(
209
+ mr_job_alias.forge == forge,
210
+ mr_job_alias.mr_iid == mr_iid,
211
+ mr_job_alias.project_id == project_id,
212
+ mr_job_alias.job_id == job_id,
213
+ )
214
+ )
215
+ async with transaction(commit=False) as session:
216
+ query_result = await session.execute(query)
217
+ metrics = query_result.scalars().all()
218
+ return metrics
219
+
220
+ @classmethod
221
+ def get_postgres_time_format(cls, time_format):
222
+ """Map python time format in the PostgreSQL format."""
223
+ if time_format == "%Y-%m-%d":
224
+ pgsql_time_format = "YYYY-MM-DD"
225
+ else:
226
+ pgsql_time_format = "YYYY-MM-DD HH24"
227
+ return pgsql_time_format
228
+
229
+ @classmethod
230
+ def get_dictionary_with_datetime_keys(
231
+ cls, time_format: str, query_results: List[Tuple[str, int]]
232
+ ) -> dict[datetime.datetime, int]:
233
+ """Convert from a list of tuples with str as first values
234
+ to a dictionary with datetime keys"""
235
+ new_dict = {
236
+ datetime.datetime.strptime(e[0], time_format): e[1] for e in query_results
237
+ }
238
+ return new_dict
239
+
240
+ @classmethod
241
+ def _get_requests_by_time_for_postgres(
242
+ cls, start_time, end_time, time_format, endpoint
243
+ ):
244
+ """Get total requests number in time period.
245
+
246
+ func.to_char is PostgreSQL specific.
247
+ Let's unit tests replace this function with the SQLite version.
248
+ """
249
+ pgsql_time_format = cls.get_postgres_time_format(time_format)
250
+
251
+ requests_by_time_format = (
252
+ select(
253
+ cls.id,
254
+ func.to_char(cls.request_received_at, pgsql_time_format).label(
255
+ "time_format"
256
+ ),
257
+ )
258
+ .filter(cls.request_received_at.between(start_time, end_time))
259
+ .filter(cls.endpoint == endpoint)
260
+ .cte("requests_by_time_format")
261
+ )
262
+ return requests_by_time_format
263
+
264
+ @classmethod
265
+ async def get_requests_in_period(
266
+ cls,
267
+ start_time: datetime.datetime,
268
+ end_time: datetime.datetime,
269
+ time_format: str,
270
+ endpoint: Optional[EndpointType] = EndpointType.ANALYZE,
271
+ ) -> dict[datetime.datetime, int]:
272
+ """
273
+ Get a dictionary with request counts grouped by time units within a specified period.
274
+
275
+ Args:
276
+ start_time (datetime): The start of the time period to query
277
+ end_time (datetime): The end of the time period to query
278
+ time_format (str): The strftime format string to format timestamps (e.g., '%Y-%m-%d')
279
+ endpoint (EndpointType): The analyze API endpoint to query
280
+
281
+ Returns:
282
+ dict[datetime, int]: A dictionary mapping datetime objects to request counts
283
+ """
284
+ async with transaction(commit=False) as session:
285
+ requests_by_time_format = cls._get_requests_by_time_for_postgres(
286
+ start_time, end_time, time_format, endpoint
287
+ )
288
+
289
+ count_requests_by_time_format = select(
290
+ requests_by_time_format.c.time_format,
291
+ func.count(distinct(requests_by_time_format.c.id)), # pylint: disable=not-callable
292
+ ).group_by("time_format")
293
+
294
+ query_results = await session.execute(count_requests_by_time_format)
295
+ results = query_results.all()
296
+
297
+ return cls.get_dictionary_with_datetime_keys(time_format, results)
298
+
299
+ @classmethod
300
+ async def _get_average_responses_times_for_postgres(
301
+ cls, start_time, end_time, time_format, endpoint
302
+ ):
303
+ """Get average responses time.
304
+
305
+ func.to_char is PostgreSQL specific.
306
+ Let's unit tests replace this function with the SQLite version.
307
+ """
308
+ async with transaction(commit=False) as session:
309
+ pgsql_time_format = cls.get_postgres_time_format(time_format)
310
+
311
+ average_responses_times = (
312
+ select(
313
+ func.to_char(cls.request_received_at, pgsql_time_format).label(
314
+ "time_range"
315
+ ),
316
+ (
317
+ func.avg(
318
+ func.extract( # pylint: disable=not-callable
319
+ "epoch", cls.response_sent_at - cls.request_received_at
320
+ )
321
+ )
322
+ ).label("average_response_seconds"),
323
+ )
324
+ .filter(cls.request_received_at.between(start_time, end_time))
325
+ .filter(cls.endpoint == endpoint)
326
+ .group_by("time_range")
327
+ .order_by("time_range")
328
+ )
329
+
330
+ query_results = await session.execute(average_responses_times)
331
+ results = query_results.all()
332
+ return results
333
+
334
+ @classmethod
335
+ async def get_responses_average_time_in_period(
336
+ cls,
337
+ start_time: datetime.datetime,
338
+ end_time: datetime.datetime,
339
+ time_format: str,
340
+ endpoint: Optional[EndpointType] = EndpointType.ANALYZE,
341
+ ) -> dict[datetime.datetime, int]:
342
+ """
343
+ Get a dictionary with average responses times
344
+ grouped by time units within a specified period.
345
+
346
+ Args:
347
+ start_time (datetime): The start of the time period to query
348
+ end_time (datetime): The end of the time period to query
349
+ time_format (str): The strftime format string to format timestamps (e.g., '%Y-%m-%d')
350
+ endpoint (EndpointType): The analyze API endpoint to query
351
+
352
+ Returns:
353
+ dict[datetime, int]: A dictionary mapping datetime objects
354
+ to average responses times
355
+ """
356
+ async with transaction(commit=False) as _:
357
+ average_responses_times = (
358
+ await cls._get_average_responses_times_for_postgres(
359
+ start_time, end_time, time_format, endpoint
360
+ )
361
+ )
362
+
363
+ return cls.get_dictionary_with_datetime_keys(
364
+ time_format, average_responses_times
365
+ )
366
+
367
+ @classmethod
368
+ async def _get_average_responses_lengths_for_postgres(
369
+ cls, start_time, end_time, time_format, endpoint
370
+ ):
371
+ """Get average responses length.
372
+
373
+ func.to_char is PostgreSQL specific.
374
+ Let's unit tests replace this function with the SQLite version.
375
+ """
376
+ async with transaction(commit=False) as session:
377
+ pgsql_time_format = cls.get_postgres_time_format(time_format)
378
+
379
+ average_responses_lengths = (
380
+ select(
381
+ func.to_char(cls.request_received_at, pgsql_time_format).label(
382
+ "time_range"
383
+ ),
384
+ (func.avg(cls.response_length)).label("average_responses_length"),
385
+ )
386
+ .filter(cls.request_received_at.between(start_time, end_time))
387
+ .filter(cls.endpoint == endpoint)
388
+ .group_by("time_range")
389
+ .order_by("time_range")
390
+ )
391
+
392
+ query_results = await session.execute(average_responses_lengths)
393
+ results = query_results.all()
394
+ return results
395
+
396
+ @classmethod
397
+ async def get_responses_average_length_in_period(
398
+ cls,
399
+ start_time: datetime.datetime,
400
+ end_time: datetime.datetime,
401
+ time_format: str,
402
+ endpoint: Optional[EndpointType] = EndpointType.ANALYZE,
403
+ ) -> dict[datetime.datetime, int]:
404
+ """
405
+ Get a dictionary with average responses length
406
+ grouped by time units within a specified period.
407
+
408
+ Args:
409
+ start_time (datetime): The start of the time period to query
410
+ end_time (datetime): The end of the time period to query
411
+ time_format (str): The strftime format string to format timestamps (e.g., '%Y-%m-%d')
412
+ endpoint (EndpointType): The analyze API endpoint to query
413
+
414
+ Returns:
415
+ dict[datetime, int]: A dictionary mapping datetime objects
416
+ to average responses lengths
417
+ """
418
+ async with transaction(commit=False) as _:
419
+ average_responses_lengths = (
420
+ await cls._get_average_responses_lengths_for_postgres(
421
+ start_time, end_time, time_format, endpoint
422
+ )
423
+ )
424
+
425
+ return cls.get_dictionary_with_datetime_keys(
426
+ time_format, average_responses_lengths
427
+ )
@@ -0,0 +1,148 @@
1
+ import asyncio
2
+
3
+ from typing import List
4
+ from collections import Counter
5
+
6
+ import gitlab
7
+
8
+ from logdetective.server.models import TimePeriod
9
+ from logdetective.server.database.models import (
10
+ Comments,
11
+ Reactions,
12
+ GitlabMergeRequestJobs,
13
+ Forge,
14
+ )
15
+ from logdetective.server.config import LOG
16
+
17
+
18
+ async def collect_emojis(gitlab_conn: gitlab.Gitlab, period: TimePeriod):
19
+ """
20
+ Collect emoji feedback from logdetective comments saved in database.
21
+ Check only comments created in the last given period of time.
22
+ """
23
+ comments = await Comments.get_since(period.get_period_start_time()) or []
24
+ comments_for_gitlab_connection = [
25
+ comment for comment in comments if comment.forge == gitlab_conn.url
26
+ ]
27
+ await collect_emojis_in_comments(comments_for_gitlab_connection, gitlab_conn)
28
+
29
+
30
+ async def collect_emojis_for_mr(
31
+ project_id: int, mr_iid: int, gitlab_conn: gitlab.Gitlab
32
+ ):
33
+ """
34
+ Collect emoji feedback from logdetective comments in the specified MR.
35
+ """
36
+ comments = []
37
+ try:
38
+ url = Forge(gitlab_conn.url)
39
+ except ValueError as ex:
40
+ LOG.exception("Attempt to use unrecognized Forge `%s`", gitlab_conn.url)
41
+ raise ex
42
+ mr_jobs = await GitlabMergeRequestJobs.get_by_mr_iid(url, project_id, mr_iid) or []
43
+
44
+ comments = [await Comments.get_by_mr_job(mr_job) for mr_job in mr_jobs]
45
+ # Filter all cases when no comments were found. This shouldn't happen if the database
46
+ # is in good order. But checking for it can't hurt.
47
+ comments = [comment for comment in comments if isinstance(comment, Comments)]
48
+
49
+ await collect_emojis_in_comments(comments, gitlab_conn)
50
+
51
+
52
+ async def collect_emojis_in_comments( # pylint: disable=too-many-locals
53
+ comments: List[Comments], gitlab_conn: gitlab.Gitlab
54
+ ):
55
+ """
56
+ Collect emoji feedback from specified logdetective comments.
57
+ """
58
+ projects = {}
59
+ merge_requests = {}
60
+ for comment in comments:
61
+ mr_job_db = await GitlabMergeRequestJobs.get_by_id(comment.merge_request_job_id)
62
+ if not mr_job_db:
63
+ continue
64
+ try:
65
+ if mr_job_db.id not in projects:
66
+ project = await asyncio.to_thread(
67
+ gitlab_conn.projects.get, mr_job_db.project_id
68
+ )
69
+
70
+ projects[mr_job_db.id] = project
71
+ else:
72
+ project = projects[mr_job_db.id]
73
+ merge_request_iid = mr_job_db.mr_iid
74
+ if merge_request_iid not in merge_requests:
75
+ merge_request = await asyncio.to_thread(
76
+ project.mergerequests.get, merge_request_iid
77
+ )
78
+
79
+ merge_requests[merge_request_iid] = merge_request
80
+ else:
81
+ merge_request = merge_requests[merge_request_iid]
82
+
83
+ discussion = await asyncio.to_thread(
84
+ merge_request.discussions.get, comment.comment_id
85
+ )
86
+
87
+ # Get the ID of the first note
88
+ if "notes" not in discussion.attributes or len(discussion.attributes["notes"]) == 0:
89
+ LOG.warning(
90
+ "No notes were found in comment %s in merge request %d",
91
+ comment.comment_id,
92
+ merge_request_iid,
93
+ )
94
+ continue
95
+
96
+ note_id = discussion.attributes["notes"][0]["id"]
97
+ note = await asyncio.to_thread(merge_request.notes.get, note_id)
98
+
99
+ # Log warning with full stack trace, in case we can't find the right
100
+ # discussion, merge request or project.
101
+ # All of these objects can be lost, and we shouldn't treat as an error.
102
+ # Other exceptions are raised.
103
+ except gitlab.GitlabError as e:
104
+ if e.response_code == 404:
105
+ LOG.warning(
106
+ "Couldn't retrieve emoji counts for comment %s due to GitlabError",
107
+ comment.comment_id, exc_info=True)
108
+ continue
109
+ LOG.error("Error encountered while processing emoji counts for GitLab comment %s",
110
+ comment.comment_id, exc_info=True)
111
+ raise
112
+
113
+ emoji_counts = Counter(emoji.name for emoji in note.awardemojis.list())
114
+
115
+ # keep track of not updated reactions
116
+ # because we need to remove them
117
+ old_emojis = [
118
+ reaction.reaction_type
119
+ for reaction in await Reactions.get_all_reactions(
120
+ comment.forge,
121
+ mr_job_db.project_id,
122
+ mr_job_db.mr_iid,
123
+ mr_job_db.job_id,
124
+ comment.comment_id,
125
+ )
126
+ ]
127
+ for key, value in emoji_counts.items():
128
+ await Reactions.create_or_update(
129
+ comment.forge,
130
+ mr_job_db.project_id,
131
+ mr_job_db.mr_iid,
132
+ mr_job_db.job_id,
133
+ comment.comment_id,
134
+ key,
135
+ value,
136
+ )
137
+ if key in old_emojis:
138
+ old_emojis.remove(key)
139
+
140
+ # not updated reactions has been removed, drop them
141
+ await Reactions.delete(
142
+ comment.forge,
143
+ mr_job_db.project_id,
144
+ mr_job_db.mr_iid,
145
+ mr_job_db.job_id,
146
+ comment.comment_id,
147
+ old_emojis,
148
+ )
@@ -0,0 +1,37 @@
1
+ """Exception classes for Log Detective server."""
2
+
3
+
4
+ class LogDetectiveException(Exception):
5
+ """Base exception for Log Detective server."""
6
+
7
+
8
+ class LogsMissingError(LogDetectiveException):
9
+ """The logs are missing, possibly due to garbage-collection"""
10
+
11
+
12
+ class LogDetectiveKojiException(LogDetectiveException):
13
+ """Base exception for Koji-related errors."""
14
+
15
+
16
+ class KojiInvalidTaskID(LogDetectiveKojiException):
17
+ """The task ID is invalid."""
18
+
19
+
20
+ class UnknownTaskType(LogDetectiveKojiException):
21
+ """The task type is not supported."""
22
+
23
+
24
+ class NoFailedTask(LogDetectiveKojiException):
25
+ """The task is not in the FAILED state."""
26
+
27
+
28
+ class LogDetectiveConnectionError(LogDetectiveKojiException):
29
+ """A connection error occurred."""
30
+
31
+
32
+ class LogsTooLargeError(LogDetectiveKojiException):
33
+ """The log archive exceeds the configured maximum size"""
34
+
35
+
36
+ class LogDetectiveMetricsError(LogDetectiveException):
37
+ """Exception was encountered while recording metrics"""