argus-alm 0.12.2__py3-none-any.whl → 0.12.4b1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- argus/backend/cli.py +1 -1
- argus/backend/controller/admin_api.py +26 -0
- argus/backend/controller/api.py +26 -1
- argus/backend/controller/main.py +21 -0
- argus/backend/controller/testrun_api.py +132 -1
- argus/backend/controller/view_api.py +162 -0
- argus/backend/models/web.py +16 -0
- argus/backend/plugins/core.py +28 -5
- argus/backend/plugins/driver_matrix_tests/controller.py +39 -0
- argus/backend/plugins/driver_matrix_tests/model.py +252 -4
- argus/backend/plugins/driver_matrix_tests/raw_types.py +27 -0
- argus/backend/plugins/driver_matrix_tests/service.py +18 -0
- argus/backend/plugins/driver_matrix_tests/udt.py +14 -13
- argus/backend/plugins/generic/model.py +6 -3
- argus/backend/plugins/loader.py +2 -2
- argus/backend/plugins/sct/controller.py +31 -0
- argus/backend/plugins/sct/plugin.py +2 -1
- argus/backend/plugins/sct/service.py +101 -3
- argus/backend/plugins/sct/testrun.py +8 -2
- argus/backend/plugins/sct/types.py +18 -0
- argus/backend/plugins/sct/udt.py +6 -0
- argus/backend/plugins/sirenada/model.py +1 -1
- argus/backend/service/argus_service.py +116 -11
- argus/backend/service/build_system_monitor.py +37 -7
- argus/backend/service/jenkins_service.py +176 -1
- argus/backend/service/release_manager.py +14 -0
- argus/backend/service/stats.py +179 -21
- argus/backend/service/testrun.py +44 -5
- argus/backend/service/views.py +258 -0
- argus/backend/template_filters.py +7 -0
- argus/backend/util/common.py +14 -2
- argus/client/driver_matrix_tests/cli.py +110 -0
- argus/client/driver_matrix_tests/client.py +56 -193
- argus/client/sct/client.py +34 -0
- argus_alm-0.12.4b1.dist-info/METADATA +129 -0
- {argus_alm-0.12.2.dist-info → argus_alm-0.12.4b1.dist-info}/RECORD +39 -36
- {argus_alm-0.12.2.dist-info → argus_alm-0.12.4b1.dist-info}/WHEEL +1 -1
- {argus_alm-0.12.2.dist-info → argus_alm-0.12.4b1.dist-info}/entry_points.txt +1 -0
- argus_alm-0.12.2.dist-info/METADATA +0 -206
- {argus_alm-0.12.2.dist-info → argus_alm-0.12.4b1.dist-info}/LICENSE +0 -0
argus/backend/service/stats.py
CHANGED
|
@@ -1,14 +1,17 @@
|
|
|
1
|
+
from collections import defaultdict
|
|
2
|
+
from functools import reduce
|
|
1
3
|
import logging
|
|
2
4
|
|
|
3
5
|
from datetime import datetime
|
|
4
6
|
from typing import TypedDict
|
|
5
7
|
from uuid import UUID
|
|
6
8
|
|
|
9
|
+
from cassandra.cqlengine.models import Model
|
|
7
10
|
from argus.backend.plugins.loader import all_plugin_models
|
|
8
|
-
from argus.backend.util.common import get_build_number
|
|
11
|
+
from argus.backend.util.common import chunk, get_build_number
|
|
9
12
|
from argus.backend.util.enums import TestStatus, TestInvestigationStatus
|
|
10
13
|
from argus.backend.models.web import ArgusGithubIssue, ArgusRelease, ArgusGroup, ArgusTest,\
|
|
11
|
-
ArgusScheduleTest, ArgusTestRunComment
|
|
14
|
+
ArgusScheduleTest, ArgusTestRunComment, ArgusUserView
|
|
12
15
|
from argus.backend.db import ScyllaCluster
|
|
13
16
|
|
|
14
17
|
LOGGER = logging.getLogger(__name__)
|
|
@@ -140,6 +143,96 @@ def generate_field_status_map(
|
|
|
140
143
|
status_map[run_number] = (run[field_name], run)
|
|
141
144
|
return status_map
|
|
142
145
|
|
|
146
|
+
class ViewStats:
|
|
147
|
+
def __init__(self, release: ArgusUserView) -> None:
|
|
148
|
+
self.release = release
|
|
149
|
+
self.groups: list[GroupStats] = []
|
|
150
|
+
self.status_map = {status: 0 for status in TestStatus}
|
|
151
|
+
self.total_tests = 0
|
|
152
|
+
self.last_status = TestStatus.NOT_PLANNED
|
|
153
|
+
self.last_investigation_status = TestInvestigationStatus.NOT_INVESTIGATED
|
|
154
|
+
self.has_bug_report = False
|
|
155
|
+
self.issues: list[ArgusGithubIssue] = []
|
|
156
|
+
self.comments: list[ArgusTestRunComment] = []
|
|
157
|
+
self.test_schedules: dict[UUID, ArgusScheduleTest] = {}
|
|
158
|
+
self.forced_collection = False
|
|
159
|
+
self.rows = []
|
|
160
|
+
self.releases = {}
|
|
161
|
+
self.all_tests = []
|
|
162
|
+
|
|
163
|
+
def to_dict(self) -> dict:
|
|
164
|
+
converted_groups = {str(group.group.id): group.to_dict() for group in self.groups}
|
|
165
|
+
aggregated_investigation_status = {}
|
|
166
|
+
for group in converted_groups.values():
|
|
167
|
+
for investigation_status in TestInvestigationStatus:
|
|
168
|
+
current_status = aggregated_investigation_status.get(investigation_status.value, {})
|
|
169
|
+
result = {
|
|
170
|
+
status.value: current_status.get(status.value, 0) + group.get(investigation_status.value, {}).get(status, 0)
|
|
171
|
+
for status in TestStatus
|
|
172
|
+
}
|
|
173
|
+
aggregated_investigation_status[investigation_status.value] = result
|
|
174
|
+
|
|
175
|
+
return {
|
|
176
|
+
"release": dict(self.release.items()),
|
|
177
|
+
"releases": self.releases,
|
|
178
|
+
"groups": converted_groups,
|
|
179
|
+
"total": self.total_tests,
|
|
180
|
+
**self.status_map,
|
|
181
|
+
"disabled": False,
|
|
182
|
+
"perpetual": False,
|
|
183
|
+
"lastStatus": self.last_investigation_status,
|
|
184
|
+
"lastInvestigationStatus": self.last_investigation_status,
|
|
185
|
+
"hasBugReport": self.has_bug_report,
|
|
186
|
+
**aggregated_investigation_status
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
def _fetch_multiple_release_queries(self, entity: Model, releases: list[str]):
|
|
190
|
+
result_set = []
|
|
191
|
+
for release_id in releases:
|
|
192
|
+
result_set.extend(entity.filter(release_id=release_id).all())
|
|
193
|
+
return result_set
|
|
194
|
+
|
|
195
|
+
def collect(self, rows: list[TestRunStatRow], limited=False, force=False, dict: dict[str, TestRunStatRow] | None = None, tests: list[ArgusTest] = None) -> None:
|
|
196
|
+
self.forced_collection = force
|
|
197
|
+
all_release_ids = list({t.release_id for t in tests})
|
|
198
|
+
if not limited:
|
|
199
|
+
self.test_schedules = reduce(
|
|
200
|
+
lambda acc, row: acc[row["test_id"]].append(row) or acc,
|
|
201
|
+
self._fetch_multiple_release_queries(ArgusScheduleTest, all_release_ids),
|
|
202
|
+
defaultdict(list)
|
|
203
|
+
)
|
|
204
|
+
|
|
205
|
+
self.rows = rows
|
|
206
|
+
self.dict = dict
|
|
207
|
+
if not limited or force:
|
|
208
|
+
self.issues = reduce(
|
|
209
|
+
lambda acc, row: acc[row["run_id"]].append(row) or acc,
|
|
210
|
+
self._fetch_multiple_release_queries(ArgusGithubIssue, all_release_ids),
|
|
211
|
+
defaultdict(list)
|
|
212
|
+
)
|
|
213
|
+
self.comments = reduce(
|
|
214
|
+
lambda acc, row: acc[row["test_run_id"]].append(row) or acc,
|
|
215
|
+
self._fetch_multiple_release_queries(ArgusTestRunComment, all_release_ids),
|
|
216
|
+
defaultdict(list)
|
|
217
|
+
)
|
|
218
|
+
self.all_tests = tests
|
|
219
|
+
groups = []
|
|
220
|
+
for slice in chunk(list({t.release_id for t in tests})):
|
|
221
|
+
self.releases.update({str(release.id): release for release in ArgusRelease.filter(id__in=slice).all()})
|
|
222
|
+
|
|
223
|
+
for slice in chunk(list({t.group_id for t in tests})):
|
|
224
|
+
groups.extend(ArgusGroup.filter(id__in=slice).all())
|
|
225
|
+
for group in groups:
|
|
226
|
+
if group.enabled:
|
|
227
|
+
stats = GroupStats(group=group, parent_release=self)
|
|
228
|
+
stats.collect(limited=limited)
|
|
229
|
+
self.groups.append(stats)
|
|
230
|
+
|
|
231
|
+
def increment_status(self, status=TestStatus.NOT_PLANNED):
|
|
232
|
+
self.total_tests += 1
|
|
233
|
+
self.status_map[TestStatus(status)] += 1
|
|
234
|
+
self.last_status = TestStatus(status)
|
|
235
|
+
|
|
143
236
|
|
|
144
237
|
class ReleaseStats:
|
|
145
238
|
def __init__(self, release: ArgusRelease) -> None:
|
|
@@ -152,7 +245,7 @@ class ReleaseStats:
|
|
|
152
245
|
self.has_bug_report = False
|
|
153
246
|
self.issues: list[ArgusGithubIssue] = []
|
|
154
247
|
self.comments: list[ArgusTestRunComment] = []
|
|
155
|
-
self.test_schedules:
|
|
248
|
+
self.test_schedules: dict[UUID, ArgusScheduleTest] = {}
|
|
156
249
|
self.forced_collection = False
|
|
157
250
|
self.rows = []
|
|
158
251
|
self.all_tests = []
|
|
@@ -182,21 +275,32 @@ class ReleaseStats:
|
|
|
182
275
|
**aggregated_investigation_status
|
|
183
276
|
}
|
|
184
277
|
|
|
185
|
-
def collect(self, rows: list[TestRunStatRow], limited=False, force=False) -> None:
|
|
278
|
+
def collect(self, rows: list[TestRunStatRow], limited=False, force=False, dict: dict | None = None, tests=None) -> None:
|
|
186
279
|
self.forced_collection = force
|
|
187
280
|
if not self.release.enabled and not force:
|
|
188
281
|
return
|
|
189
282
|
|
|
190
283
|
if not self.release.perpetual and not limited:
|
|
191
|
-
self.test_schedules =
|
|
192
|
-
|
|
193
|
-
|
|
284
|
+
self.test_schedules = reduce(
|
|
285
|
+
lambda acc, row: acc[row["test_id"]].append(row) or acc,
|
|
286
|
+
ArgusScheduleTest.filter(release_id=self.release.id).all(),
|
|
287
|
+
defaultdict(list)
|
|
288
|
+
)
|
|
194
289
|
|
|
195
290
|
self.rows = rows
|
|
291
|
+
self.dict = dict
|
|
196
292
|
if not limited or force:
|
|
197
|
-
self.issues =
|
|
198
|
-
|
|
199
|
-
|
|
293
|
+
self.issues = reduce(
|
|
294
|
+
lambda acc, row: acc[row["run_id"]].append(row) or acc,
|
|
295
|
+
ArgusGithubIssue.filter(release_id=self.release.id).all(),
|
|
296
|
+
defaultdict(list)
|
|
297
|
+
)
|
|
298
|
+
self.comments = reduce(
|
|
299
|
+
lambda acc, row: acc[row["test_run_id"]].append(row) or acc,
|
|
300
|
+
ArgusTestRunComment.filter(release_id=self.release.id).all(),
|
|
301
|
+
defaultdict(list)
|
|
302
|
+
)
|
|
303
|
+
self.all_tests = ArgusTest.filter(release_id=self.release.id).all() if not tests else tests
|
|
200
304
|
groups: list[ArgusGroup] = ArgusGroup.filter(release_id=self.release.id).all()
|
|
201
305
|
for group in groups:
|
|
202
306
|
if group.enabled:
|
|
@@ -250,8 +354,7 @@ class GroupStats:
|
|
|
250
354
|
stats = TestStats(
|
|
251
355
|
test=test,
|
|
252
356
|
parent_group=self,
|
|
253
|
-
schedules=
|
|
254
|
-
schedule for schedule in self.parent_release.test_schedules if schedule.test_id == test.id)
|
|
357
|
+
schedules=self.parent_release.test_schedules.get(test.id, [])
|
|
255
358
|
)
|
|
256
359
|
stats.collect(limited=limited)
|
|
257
360
|
self.tests.append(stats)
|
|
@@ -292,13 +395,17 @@ class TestStats:
|
|
|
292
395
|
"hasBugReport": self.has_bug_report,
|
|
293
396
|
"hasComments": self.has_comments,
|
|
294
397
|
"buildNumber": self.tracked_run_number,
|
|
398
|
+
"buildId": self.test.build_system_id,
|
|
295
399
|
}
|
|
296
400
|
|
|
297
401
|
def collect(self, limited=False):
|
|
298
402
|
|
|
299
403
|
# TODO: Parametrize run limit
|
|
300
404
|
# FIXME: This is only a mitigation, build_number overflows on the build system side.
|
|
301
|
-
|
|
405
|
+
if not self.parent_group.parent_release.dict:
|
|
406
|
+
last_runs = [r for r in self.parent_group.parent_release.rows if r["build_id"] == self.test.build_system_id]
|
|
407
|
+
else:
|
|
408
|
+
last_runs = self.parent_group.parent_release.dict.get(self.test.build_system_id, [])
|
|
302
409
|
last_runs: list[TestRunStatRow] = sorted(
|
|
303
410
|
last_runs, reverse=True, key=lambda r: get_build_number(r["build_job_url"]))
|
|
304
411
|
try:
|
|
@@ -326,8 +433,8 @@ class TestStats:
|
|
|
326
433
|
"build_job_name": run["build_id"],
|
|
327
434
|
"start_time": run["start_time"],
|
|
328
435
|
"assignee": run["assignee"],
|
|
329
|
-
"issues": [dict(
|
|
330
|
-
"comments": [dict(
|
|
436
|
+
"issues": [dict(issue.items()) for issue in self.parent_group.parent_release.issues[run["id"]]],
|
|
437
|
+
"comments": [dict(comment.items()) for comment in self.parent_group.parent_release.comments[run["id"]]],
|
|
331
438
|
}
|
|
332
439
|
for run in last_runs
|
|
333
440
|
]
|
|
@@ -335,8 +442,8 @@ class TestStats:
|
|
|
335
442
|
target_run = next(run for run in self.last_runs if run["id"] == worst_case[1]["id"])
|
|
336
443
|
except StopIteration:
|
|
337
444
|
target_run = worst_case[1]
|
|
338
|
-
target_run["issues"] = [dict(
|
|
339
|
-
target_run["comments"] = [dict(
|
|
445
|
+
target_run["issues"] = [dict(issue.items()) for issue in self.parent_group.parent_release.issues[target_run["id"]]]
|
|
446
|
+
target_run["comments"] = [dict(comment.items()) for comment in self.parent_group.parent_release.comments[target_run["id"]]]
|
|
340
447
|
self.has_bug_report = len(target_run["issues"]) > 0
|
|
341
448
|
self.parent_group.parent_release.has_bug_report = self.has_bug_report or self.parent_group.parent_release.has_bug_report
|
|
342
449
|
self.has_comments = len(target_run["comments"]) > 0
|
|
@@ -356,13 +463,15 @@ class ReleaseStatsCollector:
|
|
|
356
463
|
|
|
357
464
|
def collect(self, limited=False, force=False, include_no_version=False) -> dict:
|
|
358
465
|
self.release: ArgusRelease = ArgusRelease.get(name=self.release_name)
|
|
359
|
-
|
|
360
|
-
|
|
466
|
+
all_tests: list[ArgusTest] = list(ArgusTest.filter(release_id=self.release.id).all())
|
|
467
|
+
build_ids = reduce(lambda acc, test: acc[test.plugin_name or "unknown"].append(test.build_system_id) or acc, all_tests, defaultdict(list))
|
|
468
|
+
self.release_rows = [futures for plugin in all_plugin_models()
|
|
469
|
+
for futures in plugin.get_stats_for_release(release=self.release, build_ids=build_ids.get(plugin._plugin_name, []))]
|
|
470
|
+
self.release_rows = [row for future in self.release_rows for row in future.result()]
|
|
361
471
|
if self.release.dormant and not force:
|
|
362
472
|
return {
|
|
363
473
|
"dormant": True
|
|
364
474
|
}
|
|
365
|
-
|
|
366
475
|
if self.release_version:
|
|
367
476
|
if include_no_version:
|
|
368
477
|
expr = lambda row: row["scylla_version"] == self.release_version or not row["scylla_version"]
|
|
@@ -376,7 +485,56 @@ class ReleaseStatsCollector:
|
|
|
376
485
|
else:
|
|
377
486
|
expr = lambda row: row["scylla_version"]
|
|
378
487
|
self.release_rows = list(filter(expr, self.release_rows))
|
|
488
|
+
self.release_dict = {}
|
|
489
|
+
for row in self.release_rows:
|
|
490
|
+
runs = self.release_dict.get(row["build_id"], [])
|
|
491
|
+
runs.append(row)
|
|
492
|
+
self.release_dict[row["build_id"]] = runs
|
|
379
493
|
|
|
380
494
|
self.release_stats = ReleaseStats(release=self.release)
|
|
381
|
-
self.release_stats.collect(rows=self.release_rows, limited=limited, force=force)
|
|
495
|
+
self.release_stats.collect(rows=self.release_rows, limited=limited, force=force, dict=self.release_dict, tests=all_tests)
|
|
382
496
|
return self.release_stats.to_dict()
|
|
497
|
+
|
|
498
|
+
|
|
499
|
+
class ViewStatsCollector:
|
|
500
|
+
def __init__(self, view_id: UUID, filter: str | None = None) -> None:
|
|
501
|
+
self.database = ScyllaCluster.get()
|
|
502
|
+
self.session = self.database.get_session()
|
|
503
|
+
self.view = None
|
|
504
|
+
self.view_stats = None
|
|
505
|
+
self.view_rows = []
|
|
506
|
+
self.runs_by_build_id = {}
|
|
507
|
+
self.view_id = view_id
|
|
508
|
+
self.filter = filter
|
|
509
|
+
|
|
510
|
+
def collect(self, limited=False, force=False, include_no_version=False) -> dict:
|
|
511
|
+
self.view: ArgusUserView = ArgusUserView.get(id=self.view_id)
|
|
512
|
+
all_tests: list[ArgusTest] = []
|
|
513
|
+
for slice in chunk(self.view.tests):
|
|
514
|
+
all_tests.extend(ArgusTest.filter(id__in=slice).all())
|
|
515
|
+
build_ids = reduce(lambda acc, test: acc[test.plugin_name or "unknown"].append(test.build_system_id) or acc, all_tests, defaultdict(list))
|
|
516
|
+
self.view_rows = [futures for plugin in all_plugin_models()
|
|
517
|
+
for futures in plugin.get_stats_for_release(release=self.view, build_ids=build_ids.get(plugin._plugin_name, []))]
|
|
518
|
+
self.view_rows = [row for future in self.view_rows for row in future.result()]
|
|
519
|
+
|
|
520
|
+
if self.filter:
|
|
521
|
+
if include_no_version:
|
|
522
|
+
expr = lambda row: row["scylla_version"] == self.filter or not row["scylla_version"]
|
|
523
|
+
elif self.filter == "!noVersion":
|
|
524
|
+
expr = lambda row: not row["scylla_version"]
|
|
525
|
+
else:
|
|
526
|
+
expr = lambda row: row["scylla_version"] == self.filter
|
|
527
|
+
else:
|
|
528
|
+
if include_no_version:
|
|
529
|
+
expr = lambda row: row
|
|
530
|
+
else:
|
|
531
|
+
expr = lambda row: row["scylla_version"]
|
|
532
|
+
self.view_rows = list(filter(expr, self.view_rows))
|
|
533
|
+
for row in self.view_rows:
|
|
534
|
+
runs = self.runs_by_build_id.get(row["build_id"], [])
|
|
535
|
+
runs.append(row)
|
|
536
|
+
self.runs_by_build_id[row["build_id"]] = runs
|
|
537
|
+
|
|
538
|
+
self.view_stats = ViewStats(release=self.view)
|
|
539
|
+
self.view_stats.collect(rows=self.view_rows, limited=limited, force=force, dict=self.runs_by_build_id, tests=all_tests)
|
|
540
|
+
return self.view_stats.to_dict()
|
argus/backend/service/testrun.py
CHANGED
|
@@ -1,4 +1,6 @@
|
|
|
1
|
+
from collections import defaultdict
|
|
1
2
|
from datetime import datetime, timedelta
|
|
3
|
+
from functools import reduce
|
|
2
4
|
import json
|
|
3
5
|
import logging
|
|
4
6
|
import re
|
|
@@ -22,6 +24,7 @@ from argus.backend.models.web import (
|
|
|
22
24
|
ArgusRelease,
|
|
23
25
|
ArgusTest,
|
|
24
26
|
ArgusTestRunComment,
|
|
27
|
+
ArgusUserView,
|
|
25
28
|
User,
|
|
26
29
|
UserOauthToken,
|
|
27
30
|
)
|
|
@@ -33,7 +36,7 @@ from argus.backend.events.event_processors import EVENT_PROCESSORS
|
|
|
33
36
|
from argus.backend.service.event_service import EventService
|
|
34
37
|
from argus.backend.service.notification_manager import NotificationManagerService
|
|
35
38
|
from argus.backend.service.stats import ComparableTestStatus
|
|
36
|
-
from argus.backend.util.common import get_build_number, strip_html_tags
|
|
39
|
+
from argus.backend.util.common import chunk, get_build_number, strip_html_tags
|
|
37
40
|
from argus.backend.util.enums import TestInvestigationStatus, TestStatus
|
|
38
41
|
|
|
39
42
|
LOGGER = logging.getLogger(__name__)
|
|
@@ -163,6 +166,12 @@ class TestRunService:
|
|
|
163
166
|
def change_run_assignee(self, test_id: UUID, run_id: UUID, new_assignee: UUID | None):
|
|
164
167
|
test = ArgusTest.get(id=test_id)
|
|
165
168
|
plugin = self.get_plugin(plugin_name=test.plugin_name)
|
|
169
|
+
if not plugin:
|
|
170
|
+
return {
|
|
171
|
+
"test_run_id": run.id,
|
|
172
|
+
"assignee": None
|
|
173
|
+
}
|
|
174
|
+
|
|
166
175
|
run: PluginModelBase = plugin.model.get(id=run_id)
|
|
167
176
|
old_assignee = run.assignee
|
|
168
177
|
run.assignee = new_assignee
|
|
@@ -219,6 +228,7 @@ class TestRunService:
|
|
|
219
228
|
plugin = self.get_plugin(test.plugin_name)
|
|
220
229
|
release: ArgusRelease = ArgusRelease.get(id=test.release_id)
|
|
221
230
|
comment = ArgusTestRunComment()
|
|
231
|
+
comment.test_id = test.id
|
|
222
232
|
comment.message = message_stripped
|
|
223
233
|
comment.reactions = reactions
|
|
224
234
|
comment.mentions = [m.id for m in mentions]
|
|
@@ -374,13 +384,23 @@ class TestRunService:
|
|
|
374
384
|
|
|
375
385
|
return response
|
|
376
386
|
|
|
387
|
+
def _get_github_issues_for_view(self, view_id: UUID | str) -> list[ArgusGithubIssue]:
|
|
388
|
+
view: ArgusUserView = ArgusUserView.get(id=view_id)
|
|
389
|
+
issues = []
|
|
390
|
+
for batch in chunk(view.tests):
|
|
391
|
+
issues.extend(ArgusGithubIssue.filter(test_id__in=batch).allow_filtering().all())
|
|
392
|
+
|
|
393
|
+
return issues
|
|
394
|
+
|
|
377
395
|
def get_github_issues(self, filter_key: str, filter_id: UUID, aggregate_by_issue: bool = False) -> dict:
|
|
378
|
-
if filter_key not in ["release_id", "group_id", "test_id", "run_id", "user_id"]:
|
|
396
|
+
if filter_key not in ["release_id", "group_id", "test_id", "run_id", "user_id", "view_id"]:
|
|
379
397
|
raise Exception(
|
|
380
|
-
"filter_key can only be one of: \"release_id\", \"group_id\", \"test_id\", \"run_id\", \"user_id\""
|
|
398
|
+
"filter_key can only be one of: \"release_id\", \"group_id\", \"test_id\", \"run_id\", \"user_id\", \"view_id\""
|
|
381
399
|
)
|
|
382
|
-
|
|
383
|
-
|
|
400
|
+
if filter_key == "view_id":
|
|
401
|
+
all_issues = self._get_github_issues_for_view(filter_id)
|
|
402
|
+
else:
|
|
403
|
+
all_issues = ArgusGithubIssue.filter(**{filter_key: filter_id}).all()
|
|
384
404
|
if aggregate_by_issue:
|
|
385
405
|
runs_by_issue = {}
|
|
386
406
|
response = []
|
|
@@ -398,6 +418,25 @@ class TestRunService:
|
|
|
398
418
|
response = [dict(issue.items()) for issue in all_issues]
|
|
399
419
|
return response
|
|
400
420
|
|
|
421
|
+
def resolve_run_build_id_and_number_multiple(self, runs: list[tuple[UUID, UUID]]) -> dict[UUID, dict[str, Any]]:
|
|
422
|
+
test_ids = [r[0] for r in runs]
|
|
423
|
+
all_tests: list = []
|
|
424
|
+
for id_slice in chunk(test_ids):
|
|
425
|
+
all_tests.extend(ArgusTest.filter(id__in=id_slice).all())
|
|
426
|
+
|
|
427
|
+
tests: dict[str, ArgusTest] = {str(t.id): t for t in all_tests}
|
|
428
|
+
runs_by_plugin = reduce(lambda acc, val: acc[tests[val[0]].plugin_name].append(val[1]) or acc, runs, defaultdict(list))
|
|
429
|
+
all_runs = {}
|
|
430
|
+
for plugin, run_ids in runs_by_plugin.items():
|
|
431
|
+
model = AVAILABLE_PLUGINS.get(plugin).model
|
|
432
|
+
model_runs = []
|
|
433
|
+
for run_id in run_ids:
|
|
434
|
+
model_runs.append(model.filter(id=run_id).only(["build_id", "start_time", "build_job_url", "id", "test_id"]).get())
|
|
435
|
+
all_runs.update({ str(run["id"]): {**run, "build_number": get_build_number(run["build_job_url"])} for run in model_runs })
|
|
436
|
+
|
|
437
|
+
return all_runs
|
|
438
|
+
|
|
439
|
+
|
|
401
440
|
def delete_github_issue(self, issue_id: UUID) -> dict:
|
|
402
441
|
issue: ArgusGithubIssue = ArgusGithubIssue.get(id=issue_id)
|
|
403
442
|
|
|
@@ -0,0 +1,258 @@
|
|
|
1
|
+
import datetime
|
|
2
|
+
import logging
|
|
3
|
+
import re
|
|
4
|
+
from functools import partial, reduce
|
|
5
|
+
from typing import Any, Callable, TypedDict
|
|
6
|
+
from urllib.parse import unquote
|
|
7
|
+
from uuid import UUID
|
|
8
|
+
|
|
9
|
+
from cassandra.cqlengine.models import Model
|
|
10
|
+
from argus.backend.models.web import ArgusGroup, ArgusRelease, ArgusTest, ArgusUserView, User
|
|
11
|
+
from argus.backend.plugins.loader import all_plugin_models
|
|
12
|
+
from argus.backend.util.common import chunk, current_user
|
|
13
|
+
|
|
14
|
+
LOGGER = logging.getLogger(__name__)
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class UserViewException(Exception):
|
|
18
|
+
pass
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class ViewUpdateRequest(TypedDict):
|
|
22
|
+
name: str
|
|
23
|
+
description: str
|
|
24
|
+
display_name: str
|
|
25
|
+
tests: list[str]
|
|
26
|
+
widget_settings: str
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class UserViewService:
|
|
30
|
+
ADD_ALL_ID = UUID("db6f33b2-660b-4639-ba7f-79725ef96616")
|
|
31
|
+
def create_view(self, name: str, items: list[str], widget_settings: str, description: str = None, display_name: str = None) -> ArgusUserView:
|
|
32
|
+
try:
|
|
33
|
+
name_check = ArgusUserView.get(name=name)
|
|
34
|
+
raise UserViewException(f"View with name {name} already exists: {name_check.id}", name, name_check, name_check.id)
|
|
35
|
+
except ArgusUserView.DoesNotExist:
|
|
36
|
+
pass
|
|
37
|
+
view = ArgusUserView()
|
|
38
|
+
view.name = name
|
|
39
|
+
view.display_name = display_name or name
|
|
40
|
+
view.description = description
|
|
41
|
+
view.widget_settings = widget_settings
|
|
42
|
+
view.tests = []
|
|
43
|
+
for entity in items:
|
|
44
|
+
entity_type, entity_id = entity.split(":")
|
|
45
|
+
match (entity_type):
|
|
46
|
+
case "release":
|
|
47
|
+
view.tests.extend(t.id for t in ArgusTest.filter(release_id=entity_id).all())
|
|
48
|
+
view.release_ids.append(entity_id)
|
|
49
|
+
case "group":
|
|
50
|
+
view.tests.extend(t.id for t in ArgusTest.filter(group_id=entity_id).all())
|
|
51
|
+
view.group_ids.append(entity_id)
|
|
52
|
+
case "test":
|
|
53
|
+
view.tests.append(entity_id)
|
|
54
|
+
view.user_id = current_user().id
|
|
55
|
+
|
|
56
|
+
view.save()
|
|
57
|
+
return view
|
|
58
|
+
|
|
59
|
+
@staticmethod
|
|
60
|
+
def index_mapper(item: Model, type = "test"):
|
|
61
|
+
mapped = dict(item)
|
|
62
|
+
mapped["type"] = type
|
|
63
|
+
return mapped
|
|
64
|
+
|
|
65
|
+
def test_lookup(self, query: str):
|
|
66
|
+
def check_visibility(entity: dict):
|
|
67
|
+
if not entity["enabled"]:
|
|
68
|
+
return False
|
|
69
|
+
if entity.get("group") and not entity["group"]["enabled"]:
|
|
70
|
+
return False
|
|
71
|
+
if entity.get("release") and not entity["release"]["enabled"]:
|
|
72
|
+
return False
|
|
73
|
+
return True
|
|
74
|
+
|
|
75
|
+
def facet_extraction(query: str) -> str:
|
|
76
|
+
extractor = re.compile(r"(?:(?P<name>(?:release|group|type)):(?P<value>\"?[\w\d\.\-]*\"?))")
|
|
77
|
+
facets = re.findall(extractor, query)
|
|
78
|
+
|
|
79
|
+
return (re.sub(extractor, "", query).strip(), facets)
|
|
80
|
+
|
|
81
|
+
def type_facet_filter(item: dict, key: str, facet_query: str):
|
|
82
|
+
entity_type: str = item[key]
|
|
83
|
+
return facet_query.lower() == entity_type
|
|
84
|
+
|
|
85
|
+
def facet_filter(item: dict, key: str, facet_query: str):
|
|
86
|
+
if entity := item.get(key):
|
|
87
|
+
name: str = entity.get("pretty_name") or entity.get("name")
|
|
88
|
+
return facet_query.lower() in name.lower() if name else False
|
|
89
|
+
return False
|
|
90
|
+
|
|
91
|
+
def facet_wrapper(query_func: Callable[[dict], bool], facet_query: str, facet_type: str) -> bool:
|
|
92
|
+
def inner(item: dict, query: str):
|
|
93
|
+
return query_func(item, query) and facet_funcs[facet_type](item, facet_type, facet_query)
|
|
94
|
+
return inner
|
|
95
|
+
|
|
96
|
+
facet_funcs = {
|
|
97
|
+
"type": type_facet_filter,
|
|
98
|
+
"release": facet_filter,
|
|
99
|
+
"group": facet_filter,
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
def index_searcher(item, query: str):
|
|
103
|
+
name: str = item["pretty_name"] or item["name"]
|
|
104
|
+
return unquote(query).lower() in name.lower() if query else True
|
|
105
|
+
|
|
106
|
+
text_query, facets = facet_extraction(query)
|
|
107
|
+
search_func = index_searcher
|
|
108
|
+
for facet, value in facets:
|
|
109
|
+
if facet in facet_funcs.keys():
|
|
110
|
+
search_func = facet_wrapper(query_func=search_func, facet_query=value, facet_type=facet)
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
all_tests = ArgusTest.all()
|
|
114
|
+
all_releases = ArgusRelease.all()
|
|
115
|
+
all_groups = ArgusGroup.all()
|
|
116
|
+
release_by_id = {release.id: partial(self.index_mapper, type="release")(release) for release in all_releases}
|
|
117
|
+
group_by_id = {group.id: partial(self.index_mapper, type="group")(group) for group in all_groups}
|
|
118
|
+
index = [self.index_mapper(t) for t in all_tests]
|
|
119
|
+
index = [*release_by_id.values(), *group_by_id.values(), *index]
|
|
120
|
+
for item in index:
|
|
121
|
+
item["group"] = group_by_id.get(item.get("group_id"))
|
|
122
|
+
item["release"] = release_by_id.get(item.get("release_id"))
|
|
123
|
+
|
|
124
|
+
results = filter(partial(search_func, query=text_query), index)
|
|
125
|
+
|
|
126
|
+
return [{ "id": self.ADD_ALL_ID, "name": "Add all...", "type": "special" }, *list(res for res in results if check_visibility(res))]
|
|
127
|
+
|
|
128
|
+
def update_view(self, view_id: str | UUID, update_data: ViewUpdateRequest) -> bool:
|
|
129
|
+
view: ArgusUserView = ArgusUserView.get(id=view_id)
|
|
130
|
+
if view.user_id != current_user().id and not current_user().is_admin():
|
|
131
|
+
raise UserViewException("Unable to modify other users' views")
|
|
132
|
+
for key in ["user_id", "id"]:
|
|
133
|
+
update_data.pop(key, None)
|
|
134
|
+
items = update_data.pop("items")
|
|
135
|
+
for k, value in update_data.items():
|
|
136
|
+
view[k] = value
|
|
137
|
+
view.tests = []
|
|
138
|
+
view.release_ids = []
|
|
139
|
+
view.group_ids = []
|
|
140
|
+
for entity in items:
|
|
141
|
+
entity_type, entity_id = entity.split(":")
|
|
142
|
+
match (entity_type):
|
|
143
|
+
case "release":
|
|
144
|
+
view.tests.extend(t.id for t in ArgusTest.filter(release_id=entity_id).all())
|
|
145
|
+
view.release_ids.append(entity_id)
|
|
146
|
+
case "group":
|
|
147
|
+
view.tests.extend(t.id for t in ArgusTest.filter(group_id=entity_id).all())
|
|
148
|
+
view.group_ids.append(entity_id)
|
|
149
|
+
case "test":
|
|
150
|
+
view.tests.append(entity_id)
|
|
151
|
+
view.last_updated = datetime.datetime.utcnow()
|
|
152
|
+
view.save()
|
|
153
|
+
return True
|
|
154
|
+
|
|
155
|
+
def delete_view(self, view_id: str | UUID) -> bool:
|
|
156
|
+
view = ArgusUserView.get(id=view_id)
|
|
157
|
+
if view.user_id != current_user().id and not current_user().is_admin():
|
|
158
|
+
raise UserViewException("Unable to modify other users' views")
|
|
159
|
+
view.delete()
|
|
160
|
+
|
|
161
|
+
return True
|
|
162
|
+
|
|
163
|
+
def get_view(self, view_id: str | UUID) -> ArgusUserView:
|
|
164
|
+
view: ArgusUserView = ArgusUserView.get(id=view_id)
|
|
165
|
+
if datetime.datetime.utcnow() - (view.last_updated or datetime.datetime.fromtimestamp(0)) > datetime.timedelta(hours=1):
|
|
166
|
+
self.refresh_stale_view(view)
|
|
167
|
+
return view
|
|
168
|
+
|
|
169
|
+
def get_view_by_name(self, view_name: str) -> ArgusUserView:
|
|
170
|
+
view: ArgusUserView = ArgusUserView.get(name=view_name)
|
|
171
|
+
if datetime.datetime.utcnow() - (view.last_updated or datetime.datetime.fromtimestamp(0)) > datetime.timedelta(hours=1):
|
|
172
|
+
self.refresh_stale_view(view)
|
|
173
|
+
return view
|
|
174
|
+
|
|
175
|
+
def get_all_views(self, user: User | None = None) -> list[ArgusUserView]:
|
|
176
|
+
if user:
|
|
177
|
+
return list(ArgusUserView.filter(user_id=user.id).all())
|
|
178
|
+
return list(ArgusUserView.filter().all())
|
|
179
|
+
|
|
180
|
+
def resolve_view_tests(self, view_id: str | UUID) -> list[ArgusTest]:
|
|
181
|
+
view = ArgusUserView.get(id=view_id)
|
|
182
|
+
return self.resolve_tests_by_id(view.tests)
|
|
183
|
+
|
|
184
|
+
def resolve_tests_by_id(self, test_ids: list[str | UUID]) -> list[ArgusTest]:
|
|
185
|
+
tests = []
|
|
186
|
+
for batch in chunk(test_ids):
|
|
187
|
+
tests.extend(ArgusTest.filter(id__in=batch).all())
|
|
188
|
+
|
|
189
|
+
return tests
|
|
190
|
+
|
|
191
|
+
def batch_resolve_entity(self, entity: Model, param_name: str, entity_ids: list[UUID]) -> list[Model]:
|
|
192
|
+
result = []
|
|
193
|
+
for batch in chunk(entity_ids):
|
|
194
|
+
result.extend(entity.filter(**{f"{param_name}__in": batch}).allow_filtering().all())
|
|
195
|
+
return result
|
|
196
|
+
|
|
197
|
+
def refresh_stale_view(self, view: ArgusUserView):
|
|
198
|
+
view.tests = [test.id for test in self.resolve_view_tests(view.id)]
|
|
199
|
+
all_tests = set(view.tests)
|
|
200
|
+
all_tests.update(test.id for test in self.batch_resolve_entity(ArgusTest, "group_id", view.group_ids))
|
|
201
|
+
all_tests.update(test.id for test in self.batch_resolve_entity(ArgusTest, "release_id", view.release_ids))
|
|
202
|
+
view.tests = list(all_tests)
|
|
203
|
+
view.last_updated = datetime.datetime.utcnow()
|
|
204
|
+
view.save()
|
|
205
|
+
|
|
206
|
+
return view
|
|
207
|
+
|
|
208
|
+
def resolve_releases_for_tests(self, tests: list[ArgusTest]):
|
|
209
|
+
releases = []
|
|
210
|
+
unique_release_ids = reduce(lambda releases, test: releases.add(test.release_id) or releases, tests, set())
|
|
211
|
+
for batch in chunk(unique_release_ids):
|
|
212
|
+
releases.extend(ArgusRelease.filter(id__in=batch).all())
|
|
213
|
+
|
|
214
|
+
return releases
|
|
215
|
+
|
|
216
|
+
def resolve_groups_for_tests(self, tests: list[ArgusTest]):
|
|
217
|
+
releases = []
|
|
218
|
+
unique_release_ids = reduce(lambda groups, test: groups.add(test.group_id) or groups, tests, set())
|
|
219
|
+
for batch in chunk(unique_release_ids):
|
|
220
|
+
releases.extend(ArgusGroup.filter(id__in=batch).all())
|
|
221
|
+
|
|
222
|
+
return releases
|
|
223
|
+
|
|
224
|
+
def get_versions_for_view(self, view_id: str | UUID) -> list[str]:
|
|
225
|
+
tests = self.resolve_view_tests(view_id)
|
|
226
|
+
unique_versions = {ver for plugin in all_plugin_models()
|
|
227
|
+
for ver in plugin.get_distinct_versions_for_view(tests=tests)}
|
|
228
|
+
|
|
229
|
+
return sorted(list(unique_versions), reverse=True)
|
|
230
|
+
|
|
231
|
+
def resolve_view_for_edit(self, view_id: str | UUID) -> dict:
|
|
232
|
+
view: ArgusUserView = ArgusUserView.get(id=view_id)
|
|
233
|
+
resolved = dict(view)
|
|
234
|
+
view_groups = self.batch_resolve_entity(ArgusGroup, "id", view.group_ids)
|
|
235
|
+
view_releases = self.batch_resolve_entity(ArgusRelease, "id", view.release_ids)
|
|
236
|
+
view_tests = self.resolve_view_tests(view.id)
|
|
237
|
+
all_groups = { group.id: partial(self.index_mapper, type="group")(group) for group in self.resolve_releases_for_tests(view_tests) }
|
|
238
|
+
all_releases ={ release.id: partial(self.index_mapper, type="release")(release) for release in self.resolve_releases_for_tests(view_tests) }
|
|
239
|
+
entities_by_id = {
|
|
240
|
+
entity.id: partial(self.index_mapper, type="release" if isinstance(entity, ArgusRelease) else "group")(entity)
|
|
241
|
+
for container in [view_releases, view_groups]
|
|
242
|
+
for entity in container
|
|
243
|
+
}
|
|
244
|
+
|
|
245
|
+
items = []
|
|
246
|
+
for test in view_tests:
|
|
247
|
+
if not (entities_by_id.get(test.group_id) or entities_by_id.get(test.release_id)):
|
|
248
|
+
item = dict(test)
|
|
249
|
+
item["type"] = "test"
|
|
250
|
+
items.append(item)
|
|
251
|
+
|
|
252
|
+
items = [*entities_by_id.values(), *items]
|
|
253
|
+
for entity in items:
|
|
254
|
+
entity["group"] = all_groups.get(entity.get("group_id"), {}).get("pretty_name") or all_groups.get(entity.get("group_id"), {}).get("name")
|
|
255
|
+
entity["release"] = all_releases.get(entity.get("release_id"), {}).get("pretty_name") or all_releases.get(entity.get("release_id"), {}).get("name")
|
|
256
|
+
|
|
257
|
+
resolved["items"] = items
|
|
258
|
+
return resolved
|
|
@@ -18,3 +18,10 @@ def safe_user(user: User):
|
|
|
18
18
|
user_dict = dict(user.items())
|
|
19
19
|
del user_dict["password"]
|
|
20
20
|
return user_dict
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
@is_filter("formatted_date")
|
|
24
|
+
def formatted_date(date: datetime | None):
|
|
25
|
+
if date:
|
|
26
|
+
return date.strftime("%d/%m/%Y %H:%M:%S")
|
|
27
|
+
return "#unknown"
|