logdetective 2.4.1__py3-none-any.whl → 2.6.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- logdetective/prompts-summary-first.yml +0 -2
- logdetective/prompts.yml +0 -3
- logdetective/server/database/base.py +31 -26
- logdetective/server/database/models/__init__.py +2 -2
- logdetective/server/database/models/koji.py +43 -30
- logdetective/server/database/models/merge_request_jobs.py +205 -186
- logdetective/server/database/models/metrics.py +86 -59
- logdetective/server/emoji.py +7 -7
- logdetective/server/gitlab.py +6 -6
- logdetective/server/metric.py +9 -9
- logdetective/server/plot.py +12 -10
- logdetective/server/server.py +19 -11
- logdetective/server/templates/gitlab_full_comment.md.j2 +7 -7
- logdetective/server/templates/gitlab_short_comment.md.j2 +7 -7
- logdetective/server/utils.py +7 -0
- logdetective/utils.py +36 -29
- {logdetective-2.4.1.dist-info → logdetective-2.6.0.dist-info}/METADATA +18 -3
- {logdetective-2.4.1.dist-info → logdetective-2.6.0.dist-info}/RECORD +21 -21
- {logdetective-2.4.1.dist-info → logdetective-2.6.0.dist-info}/WHEEL +0 -0
- {logdetective-2.4.1.dist-info → logdetective-2.6.0.dist-info}/entry_points.txt +0 -0
- {logdetective-2.4.1.dist-info → logdetective-2.6.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -1,12 +1,12 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
1
2
|
import io
|
|
2
3
|
import enum
|
|
3
4
|
import datetime
|
|
4
|
-
from typing import Optional, List, Self, Tuple
|
|
5
|
+
from typing import Optional, List, Self, Tuple, TYPE_CHECKING
|
|
5
6
|
|
|
6
7
|
import backoff
|
|
7
8
|
|
|
8
9
|
from sqlalchemy import (
|
|
9
|
-
Column,
|
|
10
10
|
Integer,
|
|
11
11
|
Float,
|
|
12
12
|
DateTime,
|
|
@@ -17,7 +17,7 @@ from sqlalchemy import (
|
|
|
17
17
|
ForeignKey,
|
|
18
18
|
LargeBinary,
|
|
19
19
|
)
|
|
20
|
-
from sqlalchemy.orm import relationship, aliased
|
|
20
|
+
from sqlalchemy.orm import Mapped, mapped_column, relationship, aliased
|
|
21
21
|
from sqlalchemy.exc import OperationalError
|
|
22
22
|
|
|
23
23
|
from logdetective.server.database.base import Base, transaction, DB_MAX_RETRIES
|
|
@@ -27,6 +27,10 @@ from logdetective.server.database.models.merge_request_jobs import (
|
|
|
27
27
|
)
|
|
28
28
|
|
|
29
29
|
|
|
30
|
+
if TYPE_CHECKING:
|
|
31
|
+
from .koji import KojiTaskAnalysis
|
|
32
|
+
|
|
33
|
+
|
|
30
34
|
class EndpointType(enum.Enum):
|
|
31
35
|
"""Different analyze endpoints"""
|
|
32
36
|
|
|
@@ -42,43 +46,45 @@ class AnalyzeRequestMetrics(Base):
|
|
|
42
46
|
|
|
43
47
|
__tablename__ = "analyze_request_metrics"
|
|
44
48
|
|
|
45
|
-
id =
|
|
46
|
-
endpoint =
|
|
49
|
+
id: Mapped[int] = mapped_column(Integer, primary_key=True)
|
|
50
|
+
endpoint: Mapped[EndpointType] = mapped_column(
|
|
47
51
|
Enum(EndpointType),
|
|
48
52
|
nullable=False,
|
|
49
53
|
index=True,
|
|
50
54
|
comment="The service endpoint that was called",
|
|
51
55
|
)
|
|
52
|
-
request_received_at =
|
|
53
|
-
DateTime,
|
|
56
|
+
request_received_at: Mapped[datetime.datetime] = mapped_column(
|
|
57
|
+
DateTime(timezone=True),
|
|
54
58
|
nullable=False,
|
|
55
59
|
index=True,
|
|
56
60
|
default=datetime.datetime.now(datetime.timezone.utc),
|
|
57
61
|
comment="Timestamp when the request was received",
|
|
58
62
|
)
|
|
59
|
-
compressed_log =
|
|
63
|
+
compressed_log: Mapped[bytes] = mapped_column(
|
|
60
64
|
LargeBinary(length=314572800), # 300MB limit (300 * 1024 * 1024)
|
|
61
65
|
nullable=False,
|
|
62
66
|
index=False,
|
|
63
67
|
comment="Log processed, saved in a zip format",
|
|
64
68
|
)
|
|
65
|
-
compressed_response =
|
|
69
|
+
compressed_response: Mapped[Optional[bytes]] = mapped_column(
|
|
66
70
|
LargeBinary(length=314572800), # 300MB limit (300 * 1024 * 1024)
|
|
67
71
|
nullable=True,
|
|
68
72
|
index=False,
|
|
69
73
|
comment="Given response (with explanation and snippets) saved in a zip format",
|
|
70
74
|
)
|
|
71
|
-
response_sent_at =
|
|
72
|
-
DateTime
|
|
75
|
+
response_sent_at: Mapped[Optional[datetime.datetime]] = mapped_column(
|
|
76
|
+
DateTime(timezone=True),
|
|
77
|
+
nullable=True,
|
|
78
|
+
comment="Timestamp when the response was sent back",
|
|
73
79
|
)
|
|
74
|
-
response_length =
|
|
80
|
+
response_length: Mapped[Optional[int]] = mapped_column(
|
|
75
81
|
Integer, nullable=True, comment="Length of the response in chars"
|
|
76
82
|
)
|
|
77
|
-
response_certainty =
|
|
83
|
+
response_certainty: Mapped[Optional[float]] = mapped_column(
|
|
78
84
|
Float, nullable=True, comment="Certainty for generated response"
|
|
79
85
|
)
|
|
80
86
|
|
|
81
|
-
merge_request_job_id =
|
|
87
|
+
merge_request_job_id: Mapped[Optional[int]] = mapped_column(
|
|
82
88
|
Integer,
|
|
83
89
|
ForeignKey("gitlab_merge_request_jobs.id"),
|
|
84
90
|
nullable=True,
|
|
@@ -86,11 +92,19 @@ class AnalyzeRequestMetrics(Base):
|
|
|
86
92
|
comment="Is this an analyze request coming from a merge request?",
|
|
87
93
|
)
|
|
88
94
|
|
|
89
|
-
mr_job
|
|
95
|
+
mr_job: Mapped[Optional["GitlabMergeRequestJobs"]] = relationship(
|
|
96
|
+
"GitlabMergeRequestJobs",
|
|
97
|
+
back_populates="request_metrics"
|
|
98
|
+
)
|
|
99
|
+
|
|
100
|
+
koji_tasks: Mapped[List["KojiTaskAnalysis"]] = relationship(
|
|
101
|
+
"KojiTaskAnalysis",
|
|
102
|
+
back_populates="response"
|
|
103
|
+
)
|
|
90
104
|
|
|
91
105
|
@classmethod
|
|
92
106
|
@backoff.on_exception(backoff.expo, OperationalError, max_tries=DB_MAX_RETRIES)
|
|
93
|
-
def create(
|
|
107
|
+
async def create(
|
|
94
108
|
cls,
|
|
95
109
|
endpoint: EndpointType,
|
|
96
110
|
compressed_log: io.BytesIO,
|
|
@@ -98,7 +112,7 @@ class AnalyzeRequestMetrics(Base):
|
|
|
98
112
|
) -> int:
|
|
99
113
|
"""Create AnalyzeRequestMetrics new line
|
|
100
114
|
with data related to a received request"""
|
|
101
|
-
with transaction(commit=True) as session:
|
|
115
|
+
async with transaction(commit=True) as session:
|
|
102
116
|
metrics = AnalyzeRequestMetrics()
|
|
103
117
|
metrics.endpoint = endpoint
|
|
104
118
|
metrics.compressed_log = compressed_log
|
|
@@ -106,12 +120,12 @@ class AnalyzeRequestMetrics(Base):
|
|
|
106
120
|
datetime.timezone.utc
|
|
107
121
|
)
|
|
108
122
|
session.add(metrics)
|
|
109
|
-
session.flush()
|
|
123
|
+
await session.flush()
|
|
110
124
|
return metrics.id
|
|
111
125
|
|
|
112
126
|
@classmethod
|
|
113
127
|
@backoff.on_exception(backoff.expo, OperationalError, max_tries=DB_MAX_RETRIES)
|
|
114
|
-
def update( # pylint: disable=too-many-arguments disable=too-many-positional-arguments
|
|
128
|
+
async def update( # pylint: disable=too-many-arguments disable=too-many-positional-arguments
|
|
115
129
|
cls,
|
|
116
130
|
id_: int,
|
|
117
131
|
response_sent_at: DateTime,
|
|
@@ -121,8 +135,10 @@ class AnalyzeRequestMetrics(Base):
|
|
|
121
135
|
) -> None:
|
|
122
136
|
"""Update a row
|
|
123
137
|
with data related to the given response"""
|
|
124
|
-
|
|
125
|
-
|
|
138
|
+
query = select(AnalyzeRequestMetrics).filter(AnalyzeRequestMetrics.id == id_)
|
|
139
|
+
async with transaction(commit=True) as session:
|
|
140
|
+
query_result = await session.execute(query)
|
|
141
|
+
metrics = query_result.scalars().first()
|
|
126
142
|
if metrics is None:
|
|
127
143
|
raise ValueError("Returned `AnalyzeRequestMetrics` table is empty.")
|
|
128
144
|
metrics.response_sent_at = response_sent_at
|
|
@@ -133,19 +149,21 @@ class AnalyzeRequestMetrics(Base):
|
|
|
133
149
|
|
|
134
150
|
@classmethod
|
|
135
151
|
@backoff.on_exception(backoff.expo, OperationalError, max_tries=DB_MAX_RETRIES)
|
|
136
|
-
def get_metric_by_id(
|
|
152
|
+
async def get_metric_by_id(
|
|
137
153
|
cls,
|
|
138
154
|
id_: int,
|
|
139
155
|
) -> Self:
|
|
140
156
|
"""Update a row
|
|
141
157
|
with data related to the given response"""
|
|
142
|
-
|
|
143
|
-
|
|
158
|
+
query = select(AnalyzeRequestMetrics).filter(AnalyzeRequestMetrics.id == id_)
|
|
159
|
+
async with transaction(commit=True) as session:
|
|
160
|
+
query_result = await session.execute(query)
|
|
161
|
+
metric = query_result.scalars().first()
|
|
144
162
|
if metric is None:
|
|
145
163
|
raise ValueError("Returned `AnalyzeRequestMetrics` table is empty.")
|
|
146
164
|
return metric
|
|
147
165
|
|
|
148
|
-
def add_mr_job(
|
|
166
|
+
async def add_mr_job(
|
|
149
167
|
self,
|
|
150
168
|
forge: Forge,
|
|
151
169
|
project_id: int,
|
|
@@ -161,13 +179,15 @@ class AnalyzeRequestMetrics(Base):
|
|
|
161
179
|
mr_iid: merge request forge iid
|
|
162
180
|
job_id: forge job id
|
|
163
181
|
"""
|
|
164
|
-
mr_job = GitlabMergeRequestJobs.get_or_create(
|
|
182
|
+
mr_job = await GitlabMergeRequestJobs.get_or_create(
|
|
183
|
+
forge, project_id, mr_iid, job_id
|
|
184
|
+
)
|
|
165
185
|
self.merge_request_job_id = mr_job.id
|
|
166
|
-
with transaction(commit=True) as session:
|
|
167
|
-
session.merge(self)
|
|
186
|
+
async with transaction(commit=True) as session:
|
|
187
|
+
await session.merge(self)
|
|
168
188
|
|
|
169
189
|
@classmethod
|
|
170
|
-
def get_requests_metrics_for_mr_job(
|
|
190
|
+
async def get_requests_metrics_for_mr_job(
|
|
171
191
|
cls,
|
|
172
192
|
forge: Forge,
|
|
173
193
|
project_id: int,
|
|
@@ -182,19 +202,20 @@ class AnalyzeRequestMetrics(Base):
|
|
|
182
202
|
mr_iid: merge request forge iid
|
|
183
203
|
job_id: forge job id
|
|
184
204
|
"""
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
.
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
mr_job_alias.job_id == job_id,
|
|
195
|
-
)
|
|
196
|
-
.all()
|
|
205
|
+
mr_job_alias = aliased(GitlabMergeRequestJobs)
|
|
206
|
+
query = (
|
|
207
|
+
select(cls)
|
|
208
|
+
.join(mr_job_alias, cls.merge_request_job_id == mr_job_alias.id)
|
|
209
|
+
.filter(
|
|
210
|
+
mr_job_alias.forge == forge,
|
|
211
|
+
mr_job_alias.mr_iid == mr_iid,
|
|
212
|
+
mr_job_alias.project_id == project_id,
|
|
213
|
+
mr_job_alias.job_id == job_id,
|
|
197
214
|
)
|
|
215
|
+
)
|
|
216
|
+
async with transaction(commit=False) as session:
|
|
217
|
+
query_result = await session.execute(query)
|
|
218
|
+
metrics = query_result.scalars().all()
|
|
198
219
|
return metrics
|
|
199
220
|
|
|
200
221
|
@classmethod
|
|
@@ -242,7 +263,7 @@ class AnalyzeRequestMetrics(Base):
|
|
|
242
263
|
return requests_by_time_format
|
|
243
264
|
|
|
244
265
|
@classmethod
|
|
245
|
-
def get_requests_in_period(
|
|
266
|
+
async def get_requests_in_period(
|
|
246
267
|
cls,
|
|
247
268
|
start_time: datetime.datetime,
|
|
248
269
|
end_time: datetime.datetime,
|
|
@@ -261,7 +282,7 @@ class AnalyzeRequestMetrics(Base):
|
|
|
261
282
|
Returns:
|
|
262
283
|
dict[datetime, int]: A dictionary mapping datetime objects to request counts
|
|
263
284
|
"""
|
|
264
|
-
with transaction(commit=False) as session:
|
|
285
|
+
async with transaction(commit=False) as session:
|
|
265
286
|
requests_by_time_format = cls._get_requests_by_time_for_postgres(
|
|
266
287
|
start_time, end_time, time_format, endpoint
|
|
267
288
|
)
|
|
@@ -271,13 +292,13 @@ class AnalyzeRequestMetrics(Base):
|
|
|
271
292
|
func.count(distinct(requests_by_time_format.c.id)), # pylint: disable=not-callable
|
|
272
293
|
).group_by("time_format")
|
|
273
294
|
|
|
274
|
-
|
|
275
|
-
results =
|
|
295
|
+
query_results = await session.execute(count_requests_by_time_format)
|
|
296
|
+
results = query_results.all()
|
|
276
297
|
|
|
277
298
|
return cls.get_dictionary_with_datetime_keys(time_format, results)
|
|
278
299
|
|
|
279
300
|
@classmethod
|
|
280
|
-
def _get_average_responses_times_for_postgres(
|
|
301
|
+
async def _get_average_responses_times_for_postgres(
|
|
281
302
|
cls, start_time, end_time, time_format, endpoint
|
|
282
303
|
):
|
|
283
304
|
"""Get average responses time.
|
|
@@ -285,7 +306,7 @@ class AnalyzeRequestMetrics(Base):
|
|
|
285
306
|
func.to_char is PostgreSQL specific.
|
|
286
307
|
Let's unit tests replace this function with the SQLite version.
|
|
287
308
|
"""
|
|
288
|
-
with transaction(commit=False) as session:
|
|
309
|
+
async with transaction(commit=False) as session:
|
|
289
310
|
pgsql_time_format = cls.get_postgres_time_format(time_format)
|
|
290
311
|
|
|
291
312
|
average_responses_times = (
|
|
@@ -307,11 +328,12 @@ class AnalyzeRequestMetrics(Base):
|
|
|
307
328
|
.order_by("time_range")
|
|
308
329
|
)
|
|
309
330
|
|
|
310
|
-
|
|
331
|
+
query_results = await session.execute(average_responses_times)
|
|
332
|
+
results = query_results.all()
|
|
311
333
|
return results
|
|
312
334
|
|
|
313
335
|
@classmethod
|
|
314
|
-
def get_responses_average_time_in_period(
|
|
336
|
+
async def get_responses_average_time_in_period(
|
|
315
337
|
cls,
|
|
316
338
|
start_time: datetime.datetime,
|
|
317
339
|
end_time: datetime.datetime,
|
|
@@ -332,9 +354,11 @@ class AnalyzeRequestMetrics(Base):
|
|
|
332
354
|
dict[datetime, int]: A dictionary mapping datetime objects
|
|
333
355
|
to average responses times
|
|
334
356
|
"""
|
|
335
|
-
with transaction(commit=False) as _:
|
|
336
|
-
average_responses_times =
|
|
337
|
-
|
|
357
|
+
async with transaction(commit=False) as _:
|
|
358
|
+
average_responses_times = (
|
|
359
|
+
await cls._get_average_responses_times_for_postgres(
|
|
360
|
+
start_time, end_time, time_format, endpoint
|
|
361
|
+
)
|
|
338
362
|
)
|
|
339
363
|
|
|
340
364
|
return cls.get_dictionary_with_datetime_keys(
|
|
@@ -342,7 +366,7 @@ class AnalyzeRequestMetrics(Base):
|
|
|
342
366
|
)
|
|
343
367
|
|
|
344
368
|
@classmethod
|
|
345
|
-
def _get_average_responses_lengths_for_postgres(
|
|
369
|
+
async def _get_average_responses_lengths_for_postgres(
|
|
346
370
|
cls, start_time, end_time, time_format, endpoint
|
|
347
371
|
):
|
|
348
372
|
"""Get average responses length.
|
|
@@ -350,7 +374,7 @@ class AnalyzeRequestMetrics(Base):
|
|
|
350
374
|
func.to_char is PostgreSQL specific.
|
|
351
375
|
Let's unit tests replace this function with the SQLite version.
|
|
352
376
|
"""
|
|
353
|
-
with transaction(commit=False) as session:
|
|
377
|
+
async with transaction(commit=False) as session:
|
|
354
378
|
pgsql_time_format = cls.get_postgres_time_format(time_format)
|
|
355
379
|
|
|
356
380
|
average_responses_lengths = (
|
|
@@ -366,11 +390,12 @@ class AnalyzeRequestMetrics(Base):
|
|
|
366
390
|
.order_by("time_range")
|
|
367
391
|
)
|
|
368
392
|
|
|
369
|
-
|
|
393
|
+
query_results = await session.execute(average_responses_lengths)
|
|
394
|
+
results = query_results.all()
|
|
370
395
|
return results
|
|
371
396
|
|
|
372
397
|
@classmethod
|
|
373
|
-
def get_responses_average_length_in_period(
|
|
398
|
+
async def get_responses_average_length_in_period(
|
|
374
399
|
cls,
|
|
375
400
|
start_time: datetime.datetime,
|
|
376
401
|
end_time: datetime.datetime,
|
|
@@ -391,9 +416,11 @@ class AnalyzeRequestMetrics(Base):
|
|
|
391
416
|
dict[datetime, int]: A dictionary mapping datetime objects
|
|
392
417
|
to average responses lengths
|
|
393
418
|
"""
|
|
394
|
-
with transaction(commit=False) as _:
|
|
395
|
-
average_responses_lengths =
|
|
396
|
-
|
|
419
|
+
async with transaction(commit=False) as _:
|
|
420
|
+
average_responses_lengths = (
|
|
421
|
+
await cls._get_average_responses_lengths_for_postgres(
|
|
422
|
+
start_time, end_time, time_format, endpoint
|
|
423
|
+
)
|
|
397
424
|
)
|
|
398
425
|
|
|
399
426
|
return cls.get_dictionary_with_datetime_keys(
|
logdetective/server/emoji.py
CHANGED
|
@@ -20,7 +20,7 @@ async def collect_emojis(gitlab_conn: gitlab.Gitlab, period: TimePeriod):
|
|
|
20
20
|
Collect emoji feedback from logdetective comments saved in database.
|
|
21
21
|
Check only comments created in the last given period of time.
|
|
22
22
|
"""
|
|
23
|
-
comments = Comments.get_since(period.get_period_start_time()) or []
|
|
23
|
+
comments = await Comments.get_since(period.get_period_start_time()) or []
|
|
24
24
|
comments_for_gitlab_connection = [
|
|
25
25
|
comment for comment in comments if comment.forge == gitlab_conn.url
|
|
26
26
|
]
|
|
@@ -39,9 +39,9 @@ async def collect_emojis_for_mr(
|
|
|
39
39
|
except ValueError as ex:
|
|
40
40
|
LOG.exception("Attempt to use unrecognized Forge `%s`", gitlab_conn.url)
|
|
41
41
|
raise ex
|
|
42
|
-
mr_jobs = GitlabMergeRequestJobs.get_by_mr_iid(url, project_id, mr_iid) or []
|
|
42
|
+
mr_jobs = await GitlabMergeRequestJobs.get_by_mr_iid(url, project_id, mr_iid) or []
|
|
43
43
|
|
|
44
|
-
comments = [Comments.get_by_mr_job(mr_job) for mr_job in mr_jobs]
|
|
44
|
+
comments = [await Comments.get_by_mr_job(mr_job) for mr_job in mr_jobs]
|
|
45
45
|
await collect_emojis_in_comments(comments, gitlab_conn)
|
|
46
46
|
|
|
47
47
|
|
|
@@ -73,7 +73,7 @@ async def collect_emojis_in_comments( # pylint: disable=too-many-locals
|
|
|
73
73
|
projects = {}
|
|
74
74
|
merge_requests = {}
|
|
75
75
|
for comment in comments:
|
|
76
|
-
mr_job_db = GitlabMergeRequestJobs.get_by_id(comment.merge_request_job_id)
|
|
76
|
+
mr_job_db = await GitlabMergeRequestJobs.get_by_id(comment.merge_request_job_id)
|
|
77
77
|
if not mr_job_db:
|
|
78
78
|
continue
|
|
79
79
|
if mr_job_db.id not in projects:
|
|
@@ -114,7 +114,7 @@ async def collect_emojis_in_comments( # pylint: disable=too-many-locals
|
|
|
114
114
|
# because we need to remove them
|
|
115
115
|
old_emojis = [
|
|
116
116
|
reaction.reaction_type
|
|
117
|
-
for reaction in Reactions.get_all_reactions(
|
|
117
|
+
for reaction in await Reactions.get_all_reactions(
|
|
118
118
|
comment.forge,
|
|
119
119
|
mr_job_db.project_id,
|
|
120
120
|
mr_job_db.mr_iid,
|
|
@@ -123,7 +123,7 @@ async def collect_emojis_in_comments( # pylint: disable=too-many-locals
|
|
|
123
123
|
)
|
|
124
124
|
]
|
|
125
125
|
for key, value in emoji_counts.items():
|
|
126
|
-
Reactions.create_or_update(
|
|
126
|
+
await Reactions.create_or_update(
|
|
127
127
|
comment.forge,
|
|
128
128
|
mr_job_db.project_id,
|
|
129
129
|
mr_job_db.mr_iid,
|
|
@@ -136,7 +136,7 @@ async def collect_emojis_in_comments( # pylint: disable=too-many-locals
|
|
|
136
136
|
old_emojis.remove(key)
|
|
137
137
|
|
|
138
138
|
# not updated reactions has been removed, drop them
|
|
139
|
-
Reactions.delete(
|
|
139
|
+
await Reactions.delete(
|
|
140
140
|
comment.forge,
|
|
141
141
|
mr_job_db.project_id,
|
|
142
142
|
mr_job_db.mr_iid,
|
logdetective/server/gitlab.py
CHANGED
|
@@ -80,7 +80,7 @@ async def process_gitlab_job_event(
|
|
|
80
80
|
# Check if this is a resubmission of an existing, completed job.
|
|
81
81
|
# If it is, we'll exit out here and not waste time retrieving the logs,
|
|
82
82
|
# running a new analysis or trying to submit a new comment.
|
|
83
|
-
mr_job_db = GitlabMergeRequestJobs.get_by_details(
|
|
83
|
+
mr_job_db = await GitlabMergeRequestJobs.get_by_details(
|
|
84
84
|
forge=forge,
|
|
85
85
|
project_id=project.id,
|
|
86
86
|
mr_iid=merge_request_iid,
|
|
@@ -109,7 +109,7 @@ async def process_gitlab_job_event(
|
|
|
109
109
|
compressed_log_content=RemoteLogCompressor.zip_text(log_text),
|
|
110
110
|
)
|
|
111
111
|
staged_response = await perform_staged_analysis(log_text=log_text)
|
|
112
|
-
update_metrics(metrics_id, staged_response)
|
|
112
|
+
await update_metrics(metrics_id, staged_response)
|
|
113
113
|
preprocessed_log.close()
|
|
114
114
|
|
|
115
115
|
# check if this project is on the opt-in list for posting comments.
|
|
@@ -357,13 +357,13 @@ async def comment_on_mr( # pylint: disable=too-many-arguments disable=too-many-
|
|
|
357
357
|
await asyncio.to_thread(note.save)
|
|
358
358
|
|
|
359
359
|
# Save the new comment to the database
|
|
360
|
-
metrics = AnalyzeRequestMetrics.get_metric_by_id(metrics_id)
|
|
361
|
-
Comments.create(
|
|
360
|
+
metrics = await AnalyzeRequestMetrics.get_metric_by_id(metrics_id)
|
|
361
|
+
await Comments.create(
|
|
362
362
|
forge,
|
|
363
363
|
project.id,
|
|
364
364
|
merge_request_iid,
|
|
365
365
|
job.id,
|
|
366
|
-
discussion.id,
|
|
366
|
+
str(discussion.id),
|
|
367
367
|
metrics,
|
|
368
368
|
)
|
|
369
369
|
|
|
@@ -378,7 +378,7 @@ async def suppress_latest_comment(
|
|
|
378
378
|
superseded by a new push."""
|
|
379
379
|
|
|
380
380
|
# Ask the database for the last known comment for this MR
|
|
381
|
-
previous_comment = Comments.get_latest_comment(
|
|
381
|
+
previous_comment = await Comments.get_latest_comment(
|
|
382
382
|
gitlab_instance, project.id, merge_request_iid
|
|
383
383
|
)
|
|
384
384
|
|
logdetective/server/metric.py
CHANGED
|
@@ -32,7 +32,7 @@ async def add_new_metrics(
|
|
|
32
32
|
remote_log = RemoteLog(url, http_session)
|
|
33
33
|
compressed_log_content = await RemoteLogCompressor(remote_log).zip_content()
|
|
34
34
|
|
|
35
|
-
return AnalyzeRequestMetrics.create(
|
|
35
|
+
return await AnalyzeRequestMetrics.create(
|
|
36
36
|
endpoint=EndpointType(api_name),
|
|
37
37
|
compressed_log=compressed_log_content,
|
|
38
38
|
request_received_at=received_at
|
|
@@ -41,7 +41,7 @@ async def add_new_metrics(
|
|
|
41
41
|
)
|
|
42
42
|
|
|
43
43
|
|
|
44
|
-
def update_metrics(
|
|
44
|
+
async def update_metrics(
|
|
45
45
|
metrics_id: int,
|
|
46
46
|
response: Union[models.Response, models.StagedResponse, StreamingResponse],
|
|
47
47
|
sent_at: Optional[datetime.datetime] = None,
|
|
@@ -73,12 +73,12 @@ def update_metrics(
|
|
|
73
73
|
response_certainty = (
|
|
74
74
|
response.response_certainty if hasattr(response, "response_certainty") else None
|
|
75
75
|
)
|
|
76
|
-
AnalyzeRequestMetrics.update(
|
|
77
|
-
metrics_id,
|
|
78
|
-
response_sent_at,
|
|
79
|
-
response_length,
|
|
80
|
-
response_certainty,
|
|
81
|
-
compressed_response,
|
|
76
|
+
await AnalyzeRequestMetrics.update(
|
|
77
|
+
id_=metrics_id,
|
|
78
|
+
response_sent_at=response_sent_at,
|
|
79
|
+
response_length=response_length,
|
|
80
|
+
response_certainty=response_certainty,
|
|
81
|
+
compressed_response=compressed_response,
|
|
82
82
|
)
|
|
83
83
|
|
|
84
84
|
|
|
@@ -112,7 +112,7 @@ def track_request(name=None):
|
|
|
112
112
|
name if name else f.__name__, log_url, kwargs["http_session"]
|
|
113
113
|
)
|
|
114
114
|
response = await f(*args, **kwargs)
|
|
115
|
-
update_metrics(metrics_id, response)
|
|
115
|
+
await update_metrics(metrics_id, response)
|
|
116
116
|
return response
|
|
117
117
|
|
|
118
118
|
if inspect.iscoroutinefunction(f):
|
logdetective/server/plot.py
CHANGED
|
@@ -163,7 +163,7 @@ def _add_line_chart( # pylint: disable=too-many-arguments disable=too-many-posi
|
|
|
163
163
|
ax.tick_params(axis="y", labelcolor=color)
|
|
164
164
|
|
|
165
165
|
|
|
166
|
-
def requests_per_time(
|
|
166
|
+
async def requests_per_time(
|
|
167
167
|
period_of_time: TimePeriod,
|
|
168
168
|
endpoint: EndpointType = EndpointType.ANALYZE,
|
|
169
169
|
end_time: Optional[datetime.datetime] = None,
|
|
@@ -191,7 +191,7 @@ def requests_per_time(
|
|
|
191
191
|
end_time = end_time or datetime.datetime.now(datetime.timezone.utc)
|
|
192
192
|
start_time = period_of_time.get_period_start_time(end_time)
|
|
193
193
|
plot_def = Definition(period_of_time)
|
|
194
|
-
requests_counts = AnalyzeRequestMetrics.get_requests_in_period(
|
|
194
|
+
requests_counts = await AnalyzeRequestMetrics.get_requests_in_period(
|
|
195
195
|
start_time, end_time, plot_def.time_format, endpoint
|
|
196
196
|
)
|
|
197
197
|
timestamps, counts = create_time_series_arrays(
|
|
@@ -218,7 +218,7 @@ def requests_per_time(
|
|
|
218
218
|
return fig
|
|
219
219
|
|
|
220
220
|
|
|
221
|
-
def average_time_per_responses( # pylint: disable=too-many-locals
|
|
221
|
+
async def average_time_per_responses( # pylint: disable=too-many-locals
|
|
222
222
|
period_of_time: TimePeriod,
|
|
223
223
|
endpoint: EndpointType = EndpointType.ANALYZE,
|
|
224
224
|
end_time: Optional[datetime.datetime] = None,
|
|
@@ -246,8 +246,10 @@ def average_time_per_responses( # pylint: disable=too-many-locals
|
|
|
246
246
|
end_time = end_time or datetime.datetime.now(datetime.timezone.utc)
|
|
247
247
|
start_time = period_of_time.get_period_start_time(end_time)
|
|
248
248
|
plot_def = Definition(period_of_time)
|
|
249
|
-
responses_average_time =
|
|
250
|
-
|
|
249
|
+
responses_average_time = (
|
|
250
|
+
await AnalyzeRequestMetrics.get_responses_average_time_in_period(
|
|
251
|
+
start_time, end_time, plot_def.time_format, endpoint
|
|
252
|
+
)
|
|
251
253
|
)
|
|
252
254
|
timestamps, average_time = create_time_series_arrays(
|
|
253
255
|
responses_average_time,
|
|
@@ -263,7 +265,7 @@ def average_time_per_responses( # pylint: disable=too-many-locals
|
|
|
263
265
|
)
|
|
264
266
|
|
|
265
267
|
responses_average_length = (
|
|
266
|
-
AnalyzeRequestMetrics.get_responses_average_length_in_period(
|
|
268
|
+
await AnalyzeRequestMetrics.get_responses_average_length_in_period(
|
|
267
269
|
start_time, end_time, plot_def.time_format, endpoint
|
|
268
270
|
)
|
|
269
271
|
)
|
|
@@ -292,7 +294,7 @@ def average_time_per_responses( # pylint: disable=too-many-locals
|
|
|
292
294
|
return fig
|
|
293
295
|
|
|
294
296
|
|
|
295
|
-
def _collect_emoji_data(
|
|
297
|
+
async def _collect_emoji_data(
|
|
296
298
|
start_time: datetime.datetime, plot_def: Definition
|
|
297
299
|
) -> Dict[str, Dict[datetime.datetime, int]]:
|
|
298
300
|
"""Collect and organize emoji feedback data
|
|
@@ -300,7 +302,7 @@ def _collect_emoji_data(
|
|
|
300
302
|
Counts all emojis given to logdetective comments created since start_time.
|
|
301
303
|
Collect counts in time accordingly to the plot definition.
|
|
302
304
|
"""
|
|
303
|
-
reactions = Reactions.get_since(start_time)
|
|
305
|
+
reactions = await Reactions.get_since(start_time)
|
|
304
306
|
reactions_values_dict: Dict[str, Dict] = {}
|
|
305
307
|
for comment_created_at, reaction in reactions:
|
|
306
308
|
comment_created_at_formatted = comment_created_at.strptime(
|
|
@@ -369,7 +371,7 @@ def _plot_emoji_data( # pylint: disable=too-many-locals
|
|
|
369
371
|
return emoji_lines, emoji_labels
|
|
370
372
|
|
|
371
373
|
|
|
372
|
-
def emojis_per_time(
|
|
374
|
+
async def emojis_per_time(
|
|
373
375
|
period_of_time: TimePeriod,
|
|
374
376
|
end_time: Optional[datetime.datetime] = None,
|
|
375
377
|
) -> figure.Figure:
|
|
@@ -395,7 +397,7 @@ def emojis_per_time(
|
|
|
395
397
|
plot_def = Definition(period_of_time)
|
|
396
398
|
end_time = end_time or datetime.datetime.now(datetime.timezone.utc)
|
|
397
399
|
start_time = period_of_time.get_period_start_time(end_time)
|
|
398
|
-
reactions_values_dict = _collect_emoji_data(start_time, plot_def)
|
|
400
|
+
reactions_values_dict = await _collect_emoji_data(start_time, plot_def)
|
|
399
401
|
|
|
400
402
|
fig, ax = pyplot.subplots(figsize=(12, 6))
|
|
401
403
|
|
logdetective/server/server.py
CHANGED
|
@@ -67,6 +67,7 @@ from logdetective.server.emoji import (
|
|
|
67
67
|
collect_emojis_for_mr,
|
|
68
68
|
)
|
|
69
69
|
from logdetective.server.compressors import RemoteLogCompressor
|
|
70
|
+
from logdetective.server.utils import get_version
|
|
70
71
|
|
|
71
72
|
|
|
72
73
|
LOG_SOURCE_REQUEST_TIMEOUT = os.environ.get("LOG_SOURCE_REQUEST_TIMEOUT", 60)
|
|
@@ -89,7 +90,7 @@ async def lifespan(fapp: FastAPI):
|
|
|
89
90
|
)
|
|
90
91
|
|
|
91
92
|
# Ensure that the database is initialized.
|
|
92
|
-
logdetective.server.database.base.init()
|
|
93
|
+
await logdetective.server.database.base.init()
|
|
93
94
|
|
|
94
95
|
# Start the background task scheduler for collecting emojis
|
|
95
96
|
asyncio.create_task(schedule_collect_emojis_task())
|
|
@@ -306,11 +307,10 @@ async def analyze_koji_task(task_id: int, koji_instance_config: KojiInstanceConf
|
|
|
306
307
|
received_at=datetime.datetime.now(datetime.timezone.utc),
|
|
307
308
|
compressed_log_content=RemoteLogCompressor.zip_text(log_text),
|
|
308
309
|
)
|
|
309
|
-
|
|
310
310
|
# We need to associate the metric ID with the koji task analysis.
|
|
311
311
|
# This will create the new row without a response, which we will use as
|
|
312
312
|
# an indicator that the analysis is in progress.
|
|
313
|
-
KojiTaskAnalysis.create_or_restart(
|
|
313
|
+
await KojiTaskAnalysis.create_or_restart(
|
|
314
314
|
koji_instance=koji_instance_config.xmlrpc_url,
|
|
315
315
|
task_id=task_id,
|
|
316
316
|
log_file_name=log_file_name,
|
|
@@ -319,8 +319,8 @@ async def analyze_koji_task(task_id: int, koji_instance_config: KojiInstanceConf
|
|
|
319
319
|
|
|
320
320
|
# Now that we have the response, we can update the metrics and mark the
|
|
321
321
|
# koji task analysis as completed.
|
|
322
|
-
update_metrics(metrics_id, response)
|
|
323
|
-
KojiTaskAnalysis.add_response(task_id, metrics_id)
|
|
322
|
+
await update_metrics(metrics_id, response)
|
|
323
|
+
await KojiTaskAnalysis.add_response(task_id, metrics_id)
|
|
324
324
|
|
|
325
325
|
# Notify any callbacks that the analysis is complete.
|
|
326
326
|
for callback in koji_instance_config.get_callbacks(task_id):
|
|
@@ -357,6 +357,12 @@ async def async_log(msg):
|
|
|
357
357
|
return msg
|
|
358
358
|
|
|
359
359
|
|
|
360
|
+
@app.get("/version")
|
|
361
|
+
async def get_version_wrapper():
|
|
362
|
+
"""Get the version of logdetective"""
|
|
363
|
+
return get_version()
|
|
364
|
+
|
|
365
|
+
|
|
360
366
|
@app.post("/analyze/stream", response_class=StreamingResponse)
|
|
361
367
|
@track_request()
|
|
362
368
|
async def analyze_log_stream(
|
|
@@ -616,22 +622,24 @@ async def get_metrics(
|
|
|
616
622
|
async def handler():
|
|
617
623
|
"""Show statistics for the specified endpoint and plot."""
|
|
618
624
|
if plot == Plot.REQUESTS:
|
|
619
|
-
fig = plot_engine.requests_per_time(period_since_now, endpoint_type)
|
|
625
|
+
fig = await plot_engine.requests_per_time(period_since_now, endpoint_type)
|
|
620
626
|
return _svg_figure_response(fig)
|
|
621
627
|
if plot == Plot.RESPONSES:
|
|
622
|
-
fig = plot_engine.average_time_per_responses(
|
|
628
|
+
fig = await plot_engine.average_time_per_responses(
|
|
623
629
|
period_since_now, endpoint_type
|
|
624
630
|
)
|
|
625
631
|
return _svg_figure_response(fig)
|
|
626
632
|
if plot == Plot.EMOJIS:
|
|
627
|
-
fig = plot_engine.emojis_per_time(period_since_now)
|
|
633
|
+
fig = await plot_engine.emojis_per_time(period_since_now)
|
|
628
634
|
return _svg_figure_response(fig)
|
|
629
635
|
# BOTH
|
|
630
|
-
fig_requests = plot_engine.requests_per_time(
|
|
631
|
-
|
|
636
|
+
fig_requests = await plot_engine.requests_per_time(
|
|
637
|
+
period_since_now, endpoint_type
|
|
638
|
+
)
|
|
639
|
+
fig_responses = await plot_engine.average_time_per_responses(
|
|
632
640
|
period_since_now, endpoint_type
|
|
633
641
|
)
|
|
634
|
-
fig_emojis = plot_engine.emojis_per_time(period_since_now)
|
|
642
|
+
fig_emojis = await plot_engine.emojis_per_time(period_since_now)
|
|
635
643
|
return _multiple_svg_figures_response([fig_requests, fig_responses, fig_emojis])
|
|
636
644
|
|
|
637
645
|
descriptions = {
|