logdetective 2.4.1__py3-none-any.whl → 2.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- logdetective/server/database/base.py +25 -23
- logdetective/server/database/models/koji.py +29 -22
- logdetective/server/database/models/merge_request_jobs.py +163 -164
- logdetective/server/database/models/metrics.py +61 -46
- logdetective/server/emoji.py +7 -7
- logdetective/server/gitlab.py +6 -6
- logdetective/server/metric.py +9 -9
- logdetective/server/plot.py +12 -10
- logdetective/server/server.py +19 -11
- logdetective/server/utils.py +7 -0
- logdetective/utils.py +36 -29
- {logdetective-2.4.1.dist-info → logdetective-2.5.0.dist-info}/METADATA +4 -3
- {logdetective-2.4.1.dist-info → logdetective-2.5.0.dist-info}/RECORD +16 -16
- {logdetective-2.4.1.dist-info → logdetective-2.5.0.dist-info}/WHEEL +0 -0
- {logdetective-2.4.1.dist-info → logdetective-2.5.0.dist-info}/entry_points.txt +0 -0
- {logdetective-2.4.1.dist-info → logdetective-2.5.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -50,7 +50,7 @@ class AnalyzeRequestMetrics(Base):
|
|
|
50
50
|
comment="The service endpoint that was called",
|
|
51
51
|
)
|
|
52
52
|
request_received_at = Column(
|
|
53
|
-
DateTime,
|
|
53
|
+
DateTime(timezone=True),
|
|
54
54
|
nullable=False,
|
|
55
55
|
index=True,
|
|
56
56
|
default=datetime.datetime.now(datetime.timezone.utc),
|
|
@@ -69,7 +69,9 @@ class AnalyzeRequestMetrics(Base):
|
|
|
69
69
|
comment="Given response (with explanation and snippets) saved in a zip format",
|
|
70
70
|
)
|
|
71
71
|
response_sent_at = Column(
|
|
72
|
-
DateTime
|
|
72
|
+
DateTime(timezone=True),
|
|
73
|
+
nullable=True,
|
|
74
|
+
comment="Timestamp when the response was sent back",
|
|
73
75
|
)
|
|
74
76
|
response_length = Column(
|
|
75
77
|
Integer, nullable=True, comment="Length of the response in chars"
|
|
@@ -90,7 +92,7 @@ class AnalyzeRequestMetrics(Base):
|
|
|
90
92
|
|
|
91
93
|
@classmethod
|
|
92
94
|
@backoff.on_exception(backoff.expo, OperationalError, max_tries=DB_MAX_RETRIES)
|
|
93
|
-
def create(
|
|
95
|
+
async def create(
|
|
94
96
|
cls,
|
|
95
97
|
endpoint: EndpointType,
|
|
96
98
|
compressed_log: io.BytesIO,
|
|
@@ -98,7 +100,7 @@ class AnalyzeRequestMetrics(Base):
|
|
|
98
100
|
) -> int:
|
|
99
101
|
"""Create AnalyzeRequestMetrics new line
|
|
100
102
|
with data related to a received request"""
|
|
101
|
-
with transaction(commit=True) as session:
|
|
103
|
+
async with transaction(commit=True) as session:
|
|
102
104
|
metrics = AnalyzeRequestMetrics()
|
|
103
105
|
metrics.endpoint = endpoint
|
|
104
106
|
metrics.compressed_log = compressed_log
|
|
@@ -106,12 +108,12 @@ class AnalyzeRequestMetrics(Base):
|
|
|
106
108
|
datetime.timezone.utc
|
|
107
109
|
)
|
|
108
110
|
session.add(metrics)
|
|
109
|
-
session.flush()
|
|
111
|
+
await session.flush()
|
|
110
112
|
return metrics.id
|
|
111
113
|
|
|
112
114
|
@classmethod
|
|
113
115
|
@backoff.on_exception(backoff.expo, OperationalError, max_tries=DB_MAX_RETRIES)
|
|
114
|
-
def update( # pylint: disable=too-many-arguments disable=too-many-positional-arguments
|
|
116
|
+
async def update( # pylint: disable=too-many-arguments disable=too-many-positional-arguments
|
|
115
117
|
cls,
|
|
116
118
|
id_: int,
|
|
117
119
|
response_sent_at: DateTime,
|
|
@@ -121,8 +123,10 @@ class AnalyzeRequestMetrics(Base):
|
|
|
121
123
|
) -> None:
|
|
122
124
|
"""Update a row
|
|
123
125
|
with data related to the given response"""
|
|
124
|
-
|
|
125
|
-
|
|
126
|
+
query = select(AnalyzeRequestMetrics).filter(AnalyzeRequestMetrics.id == id_)
|
|
127
|
+
async with transaction(commit=True) as session:
|
|
128
|
+
query_result = await session.execute(query)
|
|
129
|
+
metrics = query_result.scalars().first()
|
|
126
130
|
if metrics is None:
|
|
127
131
|
raise ValueError("Returned `AnalyzeRequestMetrics` table is empty.")
|
|
128
132
|
metrics.response_sent_at = response_sent_at
|
|
@@ -133,19 +137,21 @@ class AnalyzeRequestMetrics(Base):
|
|
|
133
137
|
|
|
134
138
|
@classmethod
|
|
135
139
|
@backoff.on_exception(backoff.expo, OperationalError, max_tries=DB_MAX_RETRIES)
|
|
136
|
-
def get_metric_by_id(
|
|
140
|
+
async def get_metric_by_id(
|
|
137
141
|
cls,
|
|
138
142
|
id_: int,
|
|
139
143
|
) -> Self:
|
|
140
144
|
"""Update a row
|
|
141
145
|
with data related to the given response"""
|
|
142
|
-
|
|
143
|
-
|
|
146
|
+
query = select(AnalyzeRequestMetrics).filter(AnalyzeRequestMetrics.id == id_)
|
|
147
|
+
async with transaction(commit=True) as session:
|
|
148
|
+
query_result = await session.execute(query)
|
|
149
|
+
metric = query_result.scalars().first()
|
|
144
150
|
if metric is None:
|
|
145
151
|
raise ValueError("Returned `AnalyzeRequestMetrics` table is empty.")
|
|
146
152
|
return metric
|
|
147
153
|
|
|
148
|
-
def add_mr_job(
|
|
154
|
+
async def add_mr_job(
|
|
149
155
|
self,
|
|
150
156
|
forge: Forge,
|
|
151
157
|
project_id: int,
|
|
@@ -161,13 +167,15 @@ class AnalyzeRequestMetrics(Base):
|
|
|
161
167
|
mr_iid: merge request forge iid
|
|
162
168
|
job_id: forge job id
|
|
163
169
|
"""
|
|
164
|
-
mr_job = GitlabMergeRequestJobs.get_or_create(
|
|
170
|
+
mr_job = await GitlabMergeRequestJobs.get_or_create(
|
|
171
|
+
forge, project_id, mr_iid, job_id
|
|
172
|
+
)
|
|
165
173
|
self.merge_request_job_id = mr_job.id
|
|
166
|
-
with transaction(commit=True) as session:
|
|
167
|
-
session.merge(self)
|
|
174
|
+
async with transaction(commit=True) as session:
|
|
175
|
+
await session.merge(self)
|
|
168
176
|
|
|
169
177
|
@classmethod
|
|
170
|
-
def get_requests_metrics_for_mr_job(
|
|
178
|
+
async def get_requests_metrics_for_mr_job(
|
|
171
179
|
cls,
|
|
172
180
|
forge: Forge,
|
|
173
181
|
project_id: int,
|
|
@@ -182,19 +190,20 @@ class AnalyzeRequestMetrics(Base):
|
|
|
182
190
|
mr_iid: merge request forge iid
|
|
183
191
|
job_id: forge job id
|
|
184
192
|
"""
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
.
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
mr_job_alias.job_id == job_id,
|
|
195
|
-
)
|
|
196
|
-
.all()
|
|
193
|
+
mr_job_alias = aliased(GitlabMergeRequestJobs)
|
|
194
|
+
query = (
|
|
195
|
+
select(cls)
|
|
196
|
+
.join(mr_job_alias, cls.merge_request_job_id == mr_job_alias.id)
|
|
197
|
+
.filter(
|
|
198
|
+
mr_job_alias.forge == forge,
|
|
199
|
+
mr_job_alias.mr_iid == mr_iid,
|
|
200
|
+
mr_job_alias.project_id == project_id,
|
|
201
|
+
mr_job_alias.job_id == job_id,
|
|
197
202
|
)
|
|
203
|
+
)
|
|
204
|
+
async with transaction(commit=False) as session:
|
|
205
|
+
query_result = await session.execute(query)
|
|
206
|
+
metrics = query_result.scalars().all()
|
|
198
207
|
return metrics
|
|
199
208
|
|
|
200
209
|
@classmethod
|
|
@@ -242,7 +251,7 @@ class AnalyzeRequestMetrics(Base):
|
|
|
242
251
|
return requests_by_time_format
|
|
243
252
|
|
|
244
253
|
@classmethod
|
|
245
|
-
def get_requests_in_period(
|
|
254
|
+
async def get_requests_in_period(
|
|
246
255
|
cls,
|
|
247
256
|
start_time: datetime.datetime,
|
|
248
257
|
end_time: datetime.datetime,
|
|
@@ -261,7 +270,7 @@ class AnalyzeRequestMetrics(Base):
|
|
|
261
270
|
Returns:
|
|
262
271
|
dict[datetime, int]: A dictionary mapping datetime objects to request counts
|
|
263
272
|
"""
|
|
264
|
-
with transaction(commit=False) as session:
|
|
273
|
+
async with transaction(commit=False) as session:
|
|
265
274
|
requests_by_time_format = cls._get_requests_by_time_for_postgres(
|
|
266
275
|
start_time, end_time, time_format, endpoint
|
|
267
276
|
)
|
|
@@ -271,13 +280,13 @@ class AnalyzeRequestMetrics(Base):
|
|
|
271
280
|
func.count(distinct(requests_by_time_format.c.id)), # pylint: disable=not-callable
|
|
272
281
|
).group_by("time_format")
|
|
273
282
|
|
|
274
|
-
|
|
275
|
-
results =
|
|
283
|
+
query_results = await session.execute(count_requests_by_time_format)
|
|
284
|
+
results = query_results.all()
|
|
276
285
|
|
|
277
286
|
return cls.get_dictionary_with_datetime_keys(time_format, results)
|
|
278
287
|
|
|
279
288
|
@classmethod
|
|
280
|
-
def _get_average_responses_times_for_postgres(
|
|
289
|
+
async def _get_average_responses_times_for_postgres(
|
|
281
290
|
cls, start_time, end_time, time_format, endpoint
|
|
282
291
|
):
|
|
283
292
|
"""Get average responses time.
|
|
@@ -285,7 +294,7 @@ class AnalyzeRequestMetrics(Base):
|
|
|
285
294
|
func.to_char is PostgreSQL specific.
|
|
286
295
|
Let's unit tests replace this function with the SQLite version.
|
|
287
296
|
"""
|
|
288
|
-
with transaction(commit=False) as session:
|
|
297
|
+
async with transaction(commit=False) as session:
|
|
289
298
|
pgsql_time_format = cls.get_postgres_time_format(time_format)
|
|
290
299
|
|
|
291
300
|
average_responses_times = (
|
|
@@ -307,11 +316,12 @@ class AnalyzeRequestMetrics(Base):
|
|
|
307
316
|
.order_by("time_range")
|
|
308
317
|
)
|
|
309
318
|
|
|
310
|
-
|
|
319
|
+
query_results = await session.execute(average_responses_times)
|
|
320
|
+
results = query_results.all()
|
|
311
321
|
return results
|
|
312
322
|
|
|
313
323
|
@classmethod
|
|
314
|
-
def get_responses_average_time_in_period(
|
|
324
|
+
async def get_responses_average_time_in_period(
|
|
315
325
|
cls,
|
|
316
326
|
start_time: datetime.datetime,
|
|
317
327
|
end_time: datetime.datetime,
|
|
@@ -332,9 +342,11 @@ class AnalyzeRequestMetrics(Base):
|
|
|
332
342
|
dict[datetime, int]: A dictionary mapping datetime objects
|
|
333
343
|
to average responses times
|
|
334
344
|
"""
|
|
335
|
-
with transaction(commit=False) as _:
|
|
336
|
-
average_responses_times =
|
|
337
|
-
|
|
345
|
+
async with transaction(commit=False) as _:
|
|
346
|
+
average_responses_times = (
|
|
347
|
+
await cls._get_average_responses_times_for_postgres(
|
|
348
|
+
start_time, end_time, time_format, endpoint
|
|
349
|
+
)
|
|
338
350
|
)
|
|
339
351
|
|
|
340
352
|
return cls.get_dictionary_with_datetime_keys(
|
|
@@ -342,7 +354,7 @@ class AnalyzeRequestMetrics(Base):
|
|
|
342
354
|
)
|
|
343
355
|
|
|
344
356
|
@classmethod
|
|
345
|
-
def _get_average_responses_lengths_for_postgres(
|
|
357
|
+
async def _get_average_responses_lengths_for_postgres(
|
|
346
358
|
cls, start_time, end_time, time_format, endpoint
|
|
347
359
|
):
|
|
348
360
|
"""Get average responses length.
|
|
@@ -350,7 +362,7 @@ class AnalyzeRequestMetrics(Base):
|
|
|
350
362
|
func.to_char is PostgreSQL specific.
|
|
351
363
|
Let's unit tests replace this function with the SQLite version.
|
|
352
364
|
"""
|
|
353
|
-
with transaction(commit=False) as session:
|
|
365
|
+
async with transaction(commit=False) as session:
|
|
354
366
|
pgsql_time_format = cls.get_postgres_time_format(time_format)
|
|
355
367
|
|
|
356
368
|
average_responses_lengths = (
|
|
@@ -366,11 +378,12 @@ class AnalyzeRequestMetrics(Base):
|
|
|
366
378
|
.order_by("time_range")
|
|
367
379
|
)
|
|
368
380
|
|
|
369
|
-
|
|
381
|
+
query_results = await session.execute(average_responses_lengths)
|
|
382
|
+
results = query_results.all()
|
|
370
383
|
return results
|
|
371
384
|
|
|
372
385
|
@classmethod
|
|
373
|
-
def get_responses_average_length_in_period(
|
|
386
|
+
async def get_responses_average_length_in_period(
|
|
374
387
|
cls,
|
|
375
388
|
start_time: datetime.datetime,
|
|
376
389
|
end_time: datetime.datetime,
|
|
@@ -391,9 +404,11 @@ class AnalyzeRequestMetrics(Base):
|
|
|
391
404
|
dict[datetime, int]: A dictionary mapping datetime objects
|
|
392
405
|
to average responses lengths
|
|
393
406
|
"""
|
|
394
|
-
with transaction(commit=False) as _:
|
|
395
|
-
average_responses_lengths =
|
|
396
|
-
|
|
407
|
+
async with transaction(commit=False) as _:
|
|
408
|
+
average_responses_lengths = (
|
|
409
|
+
await cls._get_average_responses_lengths_for_postgres(
|
|
410
|
+
start_time, end_time, time_format, endpoint
|
|
411
|
+
)
|
|
397
412
|
)
|
|
398
413
|
|
|
399
414
|
return cls.get_dictionary_with_datetime_keys(
|
logdetective/server/emoji.py
CHANGED
|
@@ -20,7 +20,7 @@ async def collect_emojis(gitlab_conn: gitlab.Gitlab, period: TimePeriod):
|
|
|
20
20
|
Collect emoji feedback from logdetective comments saved in database.
|
|
21
21
|
Check only comments created in the last given period of time.
|
|
22
22
|
"""
|
|
23
|
-
comments = Comments.get_since(period.get_period_start_time()) or []
|
|
23
|
+
comments = await Comments.get_since(period.get_period_start_time()) or []
|
|
24
24
|
comments_for_gitlab_connection = [
|
|
25
25
|
comment for comment in comments if comment.forge == gitlab_conn.url
|
|
26
26
|
]
|
|
@@ -39,9 +39,9 @@ async def collect_emojis_for_mr(
|
|
|
39
39
|
except ValueError as ex:
|
|
40
40
|
LOG.exception("Attempt to use unrecognized Forge `%s`", gitlab_conn.url)
|
|
41
41
|
raise ex
|
|
42
|
-
mr_jobs = GitlabMergeRequestJobs.get_by_mr_iid(url, project_id, mr_iid) or []
|
|
42
|
+
mr_jobs = await GitlabMergeRequestJobs.get_by_mr_iid(url, project_id, mr_iid) or []
|
|
43
43
|
|
|
44
|
-
comments = [Comments.get_by_mr_job(mr_job) for mr_job in mr_jobs]
|
|
44
|
+
comments = [await Comments.get_by_mr_job(mr_job) for mr_job in mr_jobs]
|
|
45
45
|
await collect_emojis_in_comments(comments, gitlab_conn)
|
|
46
46
|
|
|
47
47
|
|
|
@@ -73,7 +73,7 @@ async def collect_emojis_in_comments( # pylint: disable=too-many-locals
|
|
|
73
73
|
projects = {}
|
|
74
74
|
merge_requests = {}
|
|
75
75
|
for comment in comments:
|
|
76
|
-
mr_job_db = GitlabMergeRequestJobs.get_by_id(comment.merge_request_job_id)
|
|
76
|
+
mr_job_db = await GitlabMergeRequestJobs.get_by_id(comment.merge_request_job_id)
|
|
77
77
|
if not mr_job_db:
|
|
78
78
|
continue
|
|
79
79
|
if mr_job_db.id not in projects:
|
|
@@ -114,7 +114,7 @@ async def collect_emojis_in_comments( # pylint: disable=too-many-locals
|
|
|
114
114
|
# because we need to remove them
|
|
115
115
|
old_emojis = [
|
|
116
116
|
reaction.reaction_type
|
|
117
|
-
for reaction in Reactions.get_all_reactions(
|
|
117
|
+
for reaction in await Reactions.get_all_reactions(
|
|
118
118
|
comment.forge,
|
|
119
119
|
mr_job_db.project_id,
|
|
120
120
|
mr_job_db.mr_iid,
|
|
@@ -123,7 +123,7 @@ async def collect_emojis_in_comments( # pylint: disable=too-many-locals
|
|
|
123
123
|
)
|
|
124
124
|
]
|
|
125
125
|
for key, value in emoji_counts.items():
|
|
126
|
-
Reactions.create_or_update(
|
|
126
|
+
await Reactions.create_or_update(
|
|
127
127
|
comment.forge,
|
|
128
128
|
mr_job_db.project_id,
|
|
129
129
|
mr_job_db.mr_iid,
|
|
@@ -136,7 +136,7 @@ async def collect_emojis_in_comments( # pylint: disable=too-many-locals
|
|
|
136
136
|
old_emojis.remove(key)
|
|
137
137
|
|
|
138
138
|
# not updated reactions has been removed, drop them
|
|
139
|
-
Reactions.delete(
|
|
139
|
+
await Reactions.delete(
|
|
140
140
|
comment.forge,
|
|
141
141
|
mr_job_db.project_id,
|
|
142
142
|
mr_job_db.mr_iid,
|
logdetective/server/gitlab.py
CHANGED
|
@@ -80,7 +80,7 @@ async def process_gitlab_job_event(
|
|
|
80
80
|
# Check if this is a resubmission of an existing, completed job.
|
|
81
81
|
# If it is, we'll exit out here and not waste time retrieving the logs,
|
|
82
82
|
# running a new analysis or trying to submit a new comment.
|
|
83
|
-
mr_job_db = GitlabMergeRequestJobs.get_by_details(
|
|
83
|
+
mr_job_db = await GitlabMergeRequestJobs.get_by_details(
|
|
84
84
|
forge=forge,
|
|
85
85
|
project_id=project.id,
|
|
86
86
|
mr_iid=merge_request_iid,
|
|
@@ -109,7 +109,7 @@ async def process_gitlab_job_event(
|
|
|
109
109
|
compressed_log_content=RemoteLogCompressor.zip_text(log_text),
|
|
110
110
|
)
|
|
111
111
|
staged_response = await perform_staged_analysis(log_text=log_text)
|
|
112
|
-
update_metrics(metrics_id, staged_response)
|
|
112
|
+
await update_metrics(metrics_id, staged_response)
|
|
113
113
|
preprocessed_log.close()
|
|
114
114
|
|
|
115
115
|
# check if this project is on the opt-in list for posting comments.
|
|
@@ -357,13 +357,13 @@ async def comment_on_mr( # pylint: disable=too-many-arguments disable=too-many-
|
|
|
357
357
|
await asyncio.to_thread(note.save)
|
|
358
358
|
|
|
359
359
|
# Save the new comment to the database
|
|
360
|
-
metrics = AnalyzeRequestMetrics.get_metric_by_id(metrics_id)
|
|
361
|
-
Comments.create(
|
|
360
|
+
metrics = await AnalyzeRequestMetrics.get_metric_by_id(metrics_id)
|
|
361
|
+
await Comments.create(
|
|
362
362
|
forge,
|
|
363
363
|
project.id,
|
|
364
364
|
merge_request_iid,
|
|
365
365
|
job.id,
|
|
366
|
-
discussion.id,
|
|
366
|
+
str(discussion.id),
|
|
367
367
|
metrics,
|
|
368
368
|
)
|
|
369
369
|
|
|
@@ -378,7 +378,7 @@ async def suppress_latest_comment(
|
|
|
378
378
|
superseded by a new push."""
|
|
379
379
|
|
|
380
380
|
# Ask the database for the last known comment for this MR
|
|
381
|
-
previous_comment = Comments.get_latest_comment(
|
|
381
|
+
previous_comment = await Comments.get_latest_comment(
|
|
382
382
|
gitlab_instance, project.id, merge_request_iid
|
|
383
383
|
)
|
|
384
384
|
|
logdetective/server/metric.py
CHANGED
|
@@ -32,7 +32,7 @@ async def add_new_metrics(
|
|
|
32
32
|
remote_log = RemoteLog(url, http_session)
|
|
33
33
|
compressed_log_content = await RemoteLogCompressor(remote_log).zip_content()
|
|
34
34
|
|
|
35
|
-
return AnalyzeRequestMetrics.create(
|
|
35
|
+
return await AnalyzeRequestMetrics.create(
|
|
36
36
|
endpoint=EndpointType(api_name),
|
|
37
37
|
compressed_log=compressed_log_content,
|
|
38
38
|
request_received_at=received_at
|
|
@@ -41,7 +41,7 @@ async def add_new_metrics(
|
|
|
41
41
|
)
|
|
42
42
|
|
|
43
43
|
|
|
44
|
-
def update_metrics(
|
|
44
|
+
async def update_metrics(
|
|
45
45
|
metrics_id: int,
|
|
46
46
|
response: Union[models.Response, models.StagedResponse, StreamingResponse],
|
|
47
47
|
sent_at: Optional[datetime.datetime] = None,
|
|
@@ -73,12 +73,12 @@ def update_metrics(
|
|
|
73
73
|
response_certainty = (
|
|
74
74
|
response.response_certainty if hasattr(response, "response_certainty") else None
|
|
75
75
|
)
|
|
76
|
-
AnalyzeRequestMetrics.update(
|
|
77
|
-
metrics_id,
|
|
78
|
-
response_sent_at,
|
|
79
|
-
response_length,
|
|
80
|
-
response_certainty,
|
|
81
|
-
compressed_response,
|
|
76
|
+
await AnalyzeRequestMetrics.update(
|
|
77
|
+
id_=metrics_id,
|
|
78
|
+
response_sent_at=response_sent_at,
|
|
79
|
+
response_length=response_length,
|
|
80
|
+
response_certainty=response_certainty,
|
|
81
|
+
compressed_response=compressed_response,
|
|
82
82
|
)
|
|
83
83
|
|
|
84
84
|
|
|
@@ -112,7 +112,7 @@ def track_request(name=None):
|
|
|
112
112
|
name if name else f.__name__, log_url, kwargs["http_session"]
|
|
113
113
|
)
|
|
114
114
|
response = await f(*args, **kwargs)
|
|
115
|
-
update_metrics(metrics_id, response)
|
|
115
|
+
await update_metrics(metrics_id, response)
|
|
116
116
|
return response
|
|
117
117
|
|
|
118
118
|
if inspect.iscoroutinefunction(f):
|
logdetective/server/plot.py
CHANGED
|
@@ -163,7 +163,7 @@ def _add_line_chart( # pylint: disable=too-many-arguments disable=too-many-posi
|
|
|
163
163
|
ax.tick_params(axis="y", labelcolor=color)
|
|
164
164
|
|
|
165
165
|
|
|
166
|
-
def requests_per_time(
|
|
166
|
+
async def requests_per_time(
|
|
167
167
|
period_of_time: TimePeriod,
|
|
168
168
|
endpoint: EndpointType = EndpointType.ANALYZE,
|
|
169
169
|
end_time: Optional[datetime.datetime] = None,
|
|
@@ -191,7 +191,7 @@ def requests_per_time(
|
|
|
191
191
|
end_time = end_time or datetime.datetime.now(datetime.timezone.utc)
|
|
192
192
|
start_time = period_of_time.get_period_start_time(end_time)
|
|
193
193
|
plot_def = Definition(period_of_time)
|
|
194
|
-
requests_counts = AnalyzeRequestMetrics.get_requests_in_period(
|
|
194
|
+
requests_counts = await AnalyzeRequestMetrics.get_requests_in_period(
|
|
195
195
|
start_time, end_time, plot_def.time_format, endpoint
|
|
196
196
|
)
|
|
197
197
|
timestamps, counts = create_time_series_arrays(
|
|
@@ -218,7 +218,7 @@ def requests_per_time(
|
|
|
218
218
|
return fig
|
|
219
219
|
|
|
220
220
|
|
|
221
|
-
def average_time_per_responses( # pylint: disable=too-many-locals
|
|
221
|
+
async def average_time_per_responses( # pylint: disable=too-many-locals
|
|
222
222
|
period_of_time: TimePeriod,
|
|
223
223
|
endpoint: EndpointType = EndpointType.ANALYZE,
|
|
224
224
|
end_time: Optional[datetime.datetime] = None,
|
|
@@ -246,8 +246,10 @@ def average_time_per_responses( # pylint: disable=too-many-locals
|
|
|
246
246
|
end_time = end_time or datetime.datetime.now(datetime.timezone.utc)
|
|
247
247
|
start_time = period_of_time.get_period_start_time(end_time)
|
|
248
248
|
plot_def = Definition(period_of_time)
|
|
249
|
-
responses_average_time =
|
|
250
|
-
|
|
249
|
+
responses_average_time = (
|
|
250
|
+
await AnalyzeRequestMetrics.get_responses_average_time_in_period(
|
|
251
|
+
start_time, end_time, plot_def.time_format, endpoint
|
|
252
|
+
)
|
|
251
253
|
)
|
|
252
254
|
timestamps, average_time = create_time_series_arrays(
|
|
253
255
|
responses_average_time,
|
|
@@ -263,7 +265,7 @@ def average_time_per_responses( # pylint: disable=too-many-locals
|
|
|
263
265
|
)
|
|
264
266
|
|
|
265
267
|
responses_average_length = (
|
|
266
|
-
AnalyzeRequestMetrics.get_responses_average_length_in_period(
|
|
268
|
+
await AnalyzeRequestMetrics.get_responses_average_length_in_period(
|
|
267
269
|
start_time, end_time, plot_def.time_format, endpoint
|
|
268
270
|
)
|
|
269
271
|
)
|
|
@@ -292,7 +294,7 @@ def average_time_per_responses( # pylint: disable=too-many-locals
|
|
|
292
294
|
return fig
|
|
293
295
|
|
|
294
296
|
|
|
295
|
-
def _collect_emoji_data(
|
|
297
|
+
async def _collect_emoji_data(
|
|
296
298
|
start_time: datetime.datetime, plot_def: Definition
|
|
297
299
|
) -> Dict[str, Dict[datetime.datetime, int]]:
|
|
298
300
|
"""Collect and organize emoji feedback data
|
|
@@ -300,7 +302,7 @@ def _collect_emoji_data(
|
|
|
300
302
|
Counts all emojis given to logdetective comments created since start_time.
|
|
301
303
|
Collect counts in time accordingly to the plot definition.
|
|
302
304
|
"""
|
|
303
|
-
reactions = Reactions.get_since(start_time)
|
|
305
|
+
reactions = await Reactions.get_since(start_time)
|
|
304
306
|
reactions_values_dict: Dict[str, Dict] = {}
|
|
305
307
|
for comment_created_at, reaction in reactions:
|
|
306
308
|
comment_created_at_formatted = comment_created_at.strptime(
|
|
@@ -369,7 +371,7 @@ def _plot_emoji_data( # pylint: disable=too-many-locals
|
|
|
369
371
|
return emoji_lines, emoji_labels
|
|
370
372
|
|
|
371
373
|
|
|
372
|
-
def emojis_per_time(
|
|
374
|
+
async def emojis_per_time(
|
|
373
375
|
period_of_time: TimePeriod,
|
|
374
376
|
end_time: Optional[datetime.datetime] = None,
|
|
375
377
|
) -> figure.Figure:
|
|
@@ -395,7 +397,7 @@ def emojis_per_time(
|
|
|
395
397
|
plot_def = Definition(period_of_time)
|
|
396
398
|
end_time = end_time or datetime.datetime.now(datetime.timezone.utc)
|
|
397
399
|
start_time = period_of_time.get_period_start_time(end_time)
|
|
398
|
-
reactions_values_dict = _collect_emoji_data(start_time, plot_def)
|
|
400
|
+
reactions_values_dict = await _collect_emoji_data(start_time, plot_def)
|
|
399
401
|
|
|
400
402
|
fig, ax = pyplot.subplots(figsize=(12, 6))
|
|
401
403
|
|
logdetective/server/server.py
CHANGED
|
@@ -67,6 +67,7 @@ from logdetective.server.emoji import (
|
|
|
67
67
|
collect_emojis_for_mr,
|
|
68
68
|
)
|
|
69
69
|
from logdetective.server.compressors import RemoteLogCompressor
|
|
70
|
+
from logdetective.server.utils import get_version
|
|
70
71
|
|
|
71
72
|
|
|
72
73
|
LOG_SOURCE_REQUEST_TIMEOUT = os.environ.get("LOG_SOURCE_REQUEST_TIMEOUT", 60)
|
|
@@ -89,7 +90,7 @@ async def lifespan(fapp: FastAPI):
|
|
|
89
90
|
)
|
|
90
91
|
|
|
91
92
|
# Ensure that the database is initialized.
|
|
92
|
-
logdetective.server.database.base.init()
|
|
93
|
+
await logdetective.server.database.base.init()
|
|
93
94
|
|
|
94
95
|
# Start the background task scheduler for collecting emojis
|
|
95
96
|
asyncio.create_task(schedule_collect_emojis_task())
|
|
@@ -306,11 +307,10 @@ async def analyze_koji_task(task_id: int, koji_instance_config: KojiInstanceConf
|
|
|
306
307
|
received_at=datetime.datetime.now(datetime.timezone.utc),
|
|
307
308
|
compressed_log_content=RemoteLogCompressor.zip_text(log_text),
|
|
308
309
|
)
|
|
309
|
-
|
|
310
310
|
# We need to associate the metric ID with the koji task analysis.
|
|
311
311
|
# This will create the new row without a response, which we will use as
|
|
312
312
|
# an indicator that the analysis is in progress.
|
|
313
|
-
KojiTaskAnalysis.create_or_restart(
|
|
313
|
+
await KojiTaskAnalysis.create_or_restart(
|
|
314
314
|
koji_instance=koji_instance_config.xmlrpc_url,
|
|
315
315
|
task_id=task_id,
|
|
316
316
|
log_file_name=log_file_name,
|
|
@@ -319,8 +319,8 @@ async def analyze_koji_task(task_id: int, koji_instance_config: KojiInstanceConf
|
|
|
319
319
|
|
|
320
320
|
# Now that we have the response, we can update the metrics and mark the
|
|
321
321
|
# koji task analysis as completed.
|
|
322
|
-
update_metrics(metrics_id, response)
|
|
323
|
-
KojiTaskAnalysis.add_response(task_id, metrics_id)
|
|
322
|
+
await update_metrics(metrics_id, response)
|
|
323
|
+
await KojiTaskAnalysis.add_response(task_id, metrics_id)
|
|
324
324
|
|
|
325
325
|
# Notify any callbacks that the analysis is complete.
|
|
326
326
|
for callback in koji_instance_config.get_callbacks(task_id):
|
|
@@ -357,6 +357,12 @@ async def async_log(msg):
|
|
|
357
357
|
return msg
|
|
358
358
|
|
|
359
359
|
|
|
360
|
+
@app.get("/version")
|
|
361
|
+
async def get_version_wrapper():
|
|
362
|
+
"""Get the version of logdetective"""
|
|
363
|
+
return get_version()
|
|
364
|
+
|
|
365
|
+
|
|
360
366
|
@app.post("/analyze/stream", response_class=StreamingResponse)
|
|
361
367
|
@track_request()
|
|
362
368
|
async def analyze_log_stream(
|
|
@@ -616,22 +622,24 @@ async def get_metrics(
|
|
|
616
622
|
async def handler():
|
|
617
623
|
"""Show statistics for the specified endpoint and plot."""
|
|
618
624
|
if plot == Plot.REQUESTS:
|
|
619
|
-
fig = plot_engine.requests_per_time(period_since_now, endpoint_type)
|
|
625
|
+
fig = await plot_engine.requests_per_time(period_since_now, endpoint_type)
|
|
620
626
|
return _svg_figure_response(fig)
|
|
621
627
|
if plot == Plot.RESPONSES:
|
|
622
|
-
fig = plot_engine.average_time_per_responses(
|
|
628
|
+
fig = await plot_engine.average_time_per_responses(
|
|
623
629
|
period_since_now, endpoint_type
|
|
624
630
|
)
|
|
625
631
|
return _svg_figure_response(fig)
|
|
626
632
|
if plot == Plot.EMOJIS:
|
|
627
|
-
fig = plot_engine.emojis_per_time(period_since_now)
|
|
633
|
+
fig = await plot_engine.emojis_per_time(period_since_now)
|
|
628
634
|
return _svg_figure_response(fig)
|
|
629
635
|
# BOTH
|
|
630
|
-
fig_requests = plot_engine.requests_per_time(
|
|
631
|
-
|
|
636
|
+
fig_requests = await plot_engine.requests_per_time(
|
|
637
|
+
period_since_now, endpoint_type
|
|
638
|
+
)
|
|
639
|
+
fig_responses = await plot_engine.average_time_per_responses(
|
|
632
640
|
period_since_now, endpoint_type
|
|
633
641
|
)
|
|
634
|
-
fig_emojis = plot_engine.emojis_per_time(period_since_now)
|
|
642
|
+
fig_emojis = await plot_engine.emojis_per_time(period_since_now)
|
|
635
643
|
return _multiple_svg_figures_response([fig_requests, fig_responses, fig_emojis])
|
|
636
644
|
|
|
637
645
|
descriptions = {
|
logdetective/server/utils.py
CHANGED
|
@@ -1,7 +1,9 @@
|
|
|
1
1
|
from typing import List
|
|
2
|
+
from importlib.metadata import version
|
|
2
3
|
|
|
3
4
|
import aiohttp
|
|
4
5
|
from fastapi import HTTPException
|
|
6
|
+
from fastapi.responses import Response as BasicResponse
|
|
5
7
|
|
|
6
8
|
from logdetective.constants import SNIPPET_DELIMITER
|
|
7
9
|
from logdetective.server.config import LOG
|
|
@@ -102,3 +104,8 @@ def construct_final_prompt(formatted_snippets: str, prompt_template: str) -> str
|
|
|
102
104
|
|
|
103
105
|
final_prompt = prompt_template.format(formatted_snippets)
|
|
104
106
|
return final_prompt
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
def get_version() -> BasicResponse:
|
|
110
|
+
"""Obtain the version number using importlib"""
|
|
111
|
+
return BasicResponse(content=version('logdetective'))
|