argus-alm 0.14.2__py3-none-any.whl → 0.15.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- argus/_version.py +21 -0
- argus/backend/.gitkeep +0 -0
- argus/backend/__init__.py +0 -0
- argus/backend/cli.py +57 -0
- argus/backend/controller/__init__.py +0 -0
- argus/backend/controller/admin.py +20 -0
- argus/backend/controller/admin_api.py +355 -0
- argus/backend/controller/api.py +589 -0
- argus/backend/controller/auth.py +67 -0
- argus/backend/controller/client_api.py +109 -0
- argus/backend/controller/main.py +316 -0
- argus/backend/controller/notification_api.py +72 -0
- argus/backend/controller/notifications.py +13 -0
- argus/backend/controller/planner_api.py +194 -0
- argus/backend/controller/team.py +129 -0
- argus/backend/controller/team_ui.py +19 -0
- argus/backend/controller/testrun_api.py +513 -0
- argus/backend/controller/view_api.py +188 -0
- argus/backend/controller/views_widgets/__init__.py +0 -0
- argus/backend/controller/views_widgets/graphed_stats.py +54 -0
- argus/backend/controller/views_widgets/graphs.py +68 -0
- argus/backend/controller/views_widgets/highlights.py +135 -0
- argus/backend/controller/views_widgets/nemesis_stats.py +26 -0
- argus/backend/controller/views_widgets/summary.py +43 -0
- argus/backend/db.py +98 -0
- argus/backend/error_handlers.py +41 -0
- argus/backend/events/event_processors.py +34 -0
- argus/backend/models/__init__.py +0 -0
- argus/backend/models/argus_ai.py +24 -0
- argus/backend/models/github_issue.py +60 -0
- argus/backend/models/plan.py +24 -0
- argus/backend/models/result.py +187 -0
- argus/backend/models/runtime_store.py +58 -0
- argus/backend/models/view_widgets.py +25 -0
- argus/backend/models/web.py +403 -0
- argus/backend/plugins/__init__.py +0 -0
- argus/backend/plugins/core.py +248 -0
- argus/backend/plugins/driver_matrix_tests/controller.py +66 -0
- argus/backend/plugins/driver_matrix_tests/model.py +429 -0
- argus/backend/plugins/driver_matrix_tests/plugin.py +21 -0
- argus/backend/plugins/driver_matrix_tests/raw_types.py +62 -0
- argus/backend/plugins/driver_matrix_tests/service.py +61 -0
- argus/backend/plugins/driver_matrix_tests/udt.py +42 -0
- argus/backend/plugins/generic/model.py +86 -0
- argus/backend/plugins/generic/plugin.py +15 -0
- argus/backend/plugins/generic/types.py +14 -0
- argus/backend/plugins/loader.py +39 -0
- argus/backend/plugins/sct/controller.py +224 -0
- argus/backend/plugins/sct/plugin.py +37 -0
- argus/backend/plugins/sct/resource_setup.py +177 -0
- argus/backend/plugins/sct/service.py +682 -0
- argus/backend/plugins/sct/testrun.py +288 -0
- argus/backend/plugins/sct/udt.py +100 -0
- argus/backend/plugins/sirenada/model.py +118 -0
- argus/backend/plugins/sirenada/plugin.py +16 -0
- argus/backend/service/admin.py +26 -0
- argus/backend/service/argus_service.py +696 -0
- argus/backend/service/build_system_monitor.py +185 -0
- argus/backend/service/client_service.py +127 -0
- argus/backend/service/event_service.py +18 -0
- argus/backend/service/github_service.py +233 -0
- argus/backend/service/jenkins_service.py +269 -0
- argus/backend/service/notification_manager.py +159 -0
- argus/backend/service/planner_service.py +608 -0
- argus/backend/service/release_manager.py +229 -0
- argus/backend/service/results_service.py +690 -0
- argus/backend/service/stats.py +610 -0
- argus/backend/service/team_manager_service.py +82 -0
- argus/backend/service/test_lookup.py +172 -0
- argus/backend/service/testrun.py +489 -0
- argus/backend/service/user.py +308 -0
- argus/backend/service/views.py +219 -0
- argus/backend/service/views_widgets/__init__.py +0 -0
- argus/backend/service/views_widgets/graphed_stats.py +180 -0
- argus/backend/service/views_widgets/highlights.py +374 -0
- argus/backend/service/views_widgets/nemesis_stats.py +34 -0
- argus/backend/template_filters.py +27 -0
- argus/backend/tests/__init__.py +0 -0
- argus/backend/tests/client_service/__init__.py +0 -0
- argus/backend/tests/client_service/test_submit_results.py +79 -0
- argus/backend/tests/conftest.py +180 -0
- argus/backend/tests/results_service/__init__.py +0 -0
- argus/backend/tests/results_service/test_best_results.py +178 -0
- argus/backend/tests/results_service/test_cell.py +65 -0
- argus/backend/tests/results_service/test_chartjs_additional_functions.py +259 -0
- argus/backend/tests/results_service/test_create_chartjs.py +220 -0
- argus/backend/tests/results_service/test_result_metadata.py +100 -0
- argus/backend/tests/results_service/test_results_service.py +203 -0
- argus/backend/tests/results_service/test_validation_rules.py +213 -0
- argus/backend/tests/view_widgets/__init__.py +0 -0
- argus/backend/tests/view_widgets/test_highlights_api.py +532 -0
- argus/backend/util/common.py +65 -0
- argus/backend/util/config.py +38 -0
- argus/backend/util/encoders.py +56 -0
- argus/backend/util/logsetup.py +80 -0
- argus/backend/util/module_loaders.py +30 -0
- argus/backend/util/send_email.py +91 -0
- argus/client/base.py +1 -3
- argus/client/driver_matrix_tests/cli.py +17 -8
- argus/client/generic/cli.py +4 -2
- argus/client/generic/client.py +1 -0
- argus/client/generic_result.py +48 -9
- argus/client/sct/client.py +1 -3
- argus/client/sirenada/client.py +4 -1
- argus/client/tests/__init__.py +0 -0
- argus/client/tests/conftest.py +19 -0
- argus/client/tests/test_package.py +45 -0
- argus/client/tests/test_results.py +224 -0
- argus/common/sct_types.py +3 -0
- argus/common/sirenada_types.py +1 -1
- {argus_alm-0.14.2.dist-info → argus_alm-0.15.2.dist-info}/METADATA +43 -19
- argus_alm-0.15.2.dist-info/RECORD +122 -0
- {argus_alm-0.14.2.dist-info → argus_alm-0.15.2.dist-info}/WHEEL +2 -1
- argus_alm-0.15.2.dist-info/entry_points.txt +3 -0
- argus_alm-0.15.2.dist-info/top_level.txt +1 -0
- argus_alm-0.14.2.dist-info/RECORD +0 -20
- argus_alm-0.14.2.dist-info/entry_points.txt +0 -4
- {argus_alm-0.14.2.dist-info → argus_alm-0.15.2.dist-info/licenses}/LICENSE +0 -0
|
@@ -0,0 +1,185 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
from abc import ABC, abstractmethod
|
|
3
|
+
import jenkins
|
|
4
|
+
import click
|
|
5
|
+
import re
|
|
6
|
+
from flask import current_app
|
|
7
|
+
from flask.cli import with_appcontext
|
|
8
|
+
|
|
9
|
+
from argus.backend.db import ScyllaCluster
|
|
10
|
+
from argus.backend.models.web import ArgusRelease, ArgusGroup, ArgusTest, ArgusTestException
|
|
11
|
+
from argus.backend.service.release_manager import ReleaseManagerService
|
|
12
|
+
|
|
13
|
+
LOGGER = logging.getLogger(__name__)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class ArgusTestsMonitor(ABC):
|
|
17
|
+
BUILD_SYSTEM_FILTERED_PREFIXES = [
|
|
18
|
+
|
|
19
|
+
]
|
|
20
|
+
|
|
21
|
+
def __init__(self) -> None:
|
|
22
|
+
self._cluster = ScyllaCluster.get()
|
|
23
|
+
self._existing_releases = list(ArgusRelease.objects().limit(None))
|
|
24
|
+
self._existing_groups = list(ArgusGroup.objects().limit(None))
|
|
25
|
+
self._existing_tests = list(ArgusTest.objects().limit(None))
|
|
26
|
+
self._filtered_groups: list[str] = self.BUILD_SYSTEM_FILTERED_PREFIXES
|
|
27
|
+
|
|
28
|
+
def create_release(self, release_name):
|
|
29
|
+
release = ArgusRelease()
|
|
30
|
+
release.name = release_name
|
|
31
|
+
release.save()
|
|
32
|
+
|
|
33
|
+
return release
|
|
34
|
+
|
|
35
|
+
def create_group(self, release: ArgusRelease, group_name: str, build_id: str, group_pretty_name: str | None = None):
|
|
36
|
+
group = ArgusGroup()
|
|
37
|
+
group.release_id = release.id
|
|
38
|
+
group.name = group_name
|
|
39
|
+
group.build_system_id = build_id
|
|
40
|
+
if group_pretty_name:
|
|
41
|
+
group.pretty_name = group_pretty_name
|
|
42
|
+
group.save()
|
|
43
|
+
|
|
44
|
+
return group
|
|
45
|
+
|
|
46
|
+
def create_test(self, release: ArgusRelease, group: ArgusGroup,
|
|
47
|
+
test_name: str, build_id: str, build_url: str) -> ArgusTest:
|
|
48
|
+
test = ArgusTest()
|
|
49
|
+
test.name = test_name
|
|
50
|
+
test.group_id = group.id
|
|
51
|
+
test.release_id = release.id
|
|
52
|
+
test.build_system_id = build_id
|
|
53
|
+
test.build_system_url = build_url
|
|
54
|
+
test.validate_build_system_id()
|
|
55
|
+
test.save()
|
|
56
|
+
ReleaseManagerService().move_test_runs(test)
|
|
57
|
+
|
|
58
|
+
return test
|
|
59
|
+
|
|
60
|
+
@abstractmethod
|
|
61
|
+
def collect(self):
|
|
62
|
+
raise NotImplementedError()
|
|
63
|
+
|
|
64
|
+
def check_filter(self, group_name: str) -> bool:
|
|
65
|
+
for prefix in self._filtered_groups:
|
|
66
|
+
if prefix.lower() in group_name.lower():
|
|
67
|
+
return False
|
|
68
|
+
|
|
69
|
+
return True
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
class JenkinsMonitor(ArgusTestsMonitor):
|
|
73
|
+
|
|
74
|
+
BUILD_SYSTEM_FILTERED_PREFIXES = [
|
|
75
|
+
"releng",
|
|
76
|
+
]
|
|
77
|
+
|
|
78
|
+
JENKINS_MONITORED_RELEASES = [
|
|
79
|
+
r"^scylla-master$",
|
|
80
|
+
r"^scylla-staging$",
|
|
81
|
+
r"^scylla-\d+\.\d+$",
|
|
82
|
+
r"^manager-3.\d+$",
|
|
83
|
+
r"^scylla-operator/operator-master$",
|
|
84
|
+
r"^scylla-operator/operator-\d+.\d+$",
|
|
85
|
+
r"^scylla-enterprise$",
|
|
86
|
+
r"^enterprise-20\d{2}\.\d+$",
|
|
87
|
+
r"^siren-tests$",
|
|
88
|
+
]
|
|
89
|
+
|
|
90
|
+
def __init__(self) -> None:
|
|
91
|
+
super().__init__()
|
|
92
|
+
self._jenkins = jenkins.Jenkins(url=current_app.config["JENKINS_URL"],
|
|
93
|
+
username=current_app.config["JENKINS_USER"],
|
|
94
|
+
password=current_app.config["JENKINS_API_TOKEN"])
|
|
95
|
+
self._monitored_releases = self.JENKINS_MONITORED_RELEASES
|
|
96
|
+
|
|
97
|
+
def _check_release_name(self, release_name: str):
|
|
98
|
+
return any(re.match(pattern, release_name, re.IGNORECASE) for pattern in self._monitored_releases)
|
|
99
|
+
|
|
100
|
+
def collect(self):
|
|
101
|
+
click.echo("Collecting new tests from jenkins")
|
|
102
|
+
all_jobs = self._jenkins.get_all_jobs()
|
|
103
|
+
all_monitored_folders = [job for job in all_jobs if self._check_release_name(job["fullname"])]
|
|
104
|
+
LOGGER.info("Will collect %s", [f["fullname"] for f in all_monitored_folders])
|
|
105
|
+
|
|
106
|
+
for release in all_monitored_folders:
|
|
107
|
+
LOGGER.info("Processing release %s", release["name"])
|
|
108
|
+
try:
|
|
109
|
+
saved_release = ArgusRelease.get(name=release["name"])
|
|
110
|
+
LOGGER.info("Release %s exists", release["name"])
|
|
111
|
+
except ArgusRelease.DoesNotExist:
|
|
112
|
+
LOGGER.warning("Release %s does not exist, creating...", release["name"])
|
|
113
|
+
saved_release = self.create_release(release["name"])
|
|
114
|
+
self._existing_releases.append(saved_release)
|
|
115
|
+
|
|
116
|
+
try:
|
|
117
|
+
groups = self.collect_groups_for_release(release["jobs"])
|
|
118
|
+
except KeyError:
|
|
119
|
+
LOGGER.error("Empty release!\n %s", release)
|
|
120
|
+
continue
|
|
121
|
+
folder_stack = [dict(parent_name="", parent_display_name="", group=g) for g in reversed(groups)]
|
|
122
|
+
root_folder = {
|
|
123
|
+
"parent_name": "",
|
|
124
|
+
"parent_display_name": "",
|
|
125
|
+
"group": {
|
|
126
|
+
"name": f"{release['fullname']}-root",
|
|
127
|
+
"displayName": "-- root directory --",
|
|
128
|
+
"fullname": release["fullname"],
|
|
129
|
+
"jobs": self.collect_root_folder_jobs(release["jobs"]),
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
folder_stack.append(root_folder)
|
|
133
|
+
while len(folder_stack) != 0:
|
|
134
|
+
group_dict = folder_stack.pop()
|
|
135
|
+
group = group_dict["group"]
|
|
136
|
+
LOGGER.info("Processing group %s for release %s", group["name"], saved_release.name)
|
|
137
|
+
try:
|
|
138
|
+
group_name = group["name"] if not group_dict["parent_name"] else f"{group_dict['parent_name']}-{group['name']}"
|
|
139
|
+
saved_group = filter(lambda g: g.build_system_id == group["fullname"], self._existing_groups)
|
|
140
|
+
saved_group = next(saved_group)
|
|
141
|
+
LOGGER.info("Group %s already exists. (id: %s)", saved_group.build_system_id, saved_group.id)
|
|
142
|
+
except StopIteration:
|
|
143
|
+
LOGGER.warning(
|
|
144
|
+
"Group %s for release %s doesn't exist, creating...", group_name, saved_release.name)
|
|
145
|
+
try:
|
|
146
|
+
display_name = group.get("displayName", self._jenkins.get_job_info(
|
|
147
|
+
name=group["fullname"])["displayName"])
|
|
148
|
+
display_name = display_name if not group_dict[
|
|
149
|
+
"parent_display_name"] else f"{group_dict['parent_display_name']} - {display_name}"
|
|
150
|
+
except Exception:
|
|
151
|
+
display_name = None
|
|
152
|
+
|
|
153
|
+
saved_group = self.create_group(saved_release, group_name, group["fullname"], display_name)
|
|
154
|
+
self._existing_groups.append(saved_group)
|
|
155
|
+
|
|
156
|
+
for job in group["jobs"]:
|
|
157
|
+
LOGGER.info("Processing job %s for release %s and group %s",
|
|
158
|
+
job["fullname"], saved_group.name, saved_release.name)
|
|
159
|
+
saved_test = None
|
|
160
|
+
if "Folder" in job["_class"]:
|
|
161
|
+
folder_stack.append(dict(parent_name=saved_group.name,
|
|
162
|
+
parent_display_name=saved_group.pretty_name, group=job))
|
|
163
|
+
if "WorkflowJob" in job["_class"]:
|
|
164
|
+
try:
|
|
165
|
+
saved_test = filter(lambda t: t.build_system_id == job["fullname"], self._existing_tests)
|
|
166
|
+
saved_test = next(saved_test)
|
|
167
|
+
LOGGER.info("Test %s already exists. (id: %s)", saved_test.build_system_id, saved_test.id)
|
|
168
|
+
except StopIteration:
|
|
169
|
+
LOGGER.warning("Test %s for release %s (group %s) doesn't exist, creating...",
|
|
170
|
+
job["name"], saved_release.name, saved_group.name)
|
|
171
|
+
try:
|
|
172
|
+
saved_test = self.create_test(
|
|
173
|
+
saved_release, saved_group, job["name"], job["fullname"], job["url"])
|
|
174
|
+
self._existing_tests.append(saved_test)
|
|
175
|
+
except ArgusTestException:
|
|
176
|
+
LOGGER.error("Unable to create test for build_id %s", job["fullname"], exc_info=True)
|
|
177
|
+
|
|
178
|
+
def collect_groups_for_release(self, jobs):
|
|
179
|
+
groups = [folder for folder in jobs if "Folder" in folder["_class"]]
|
|
180
|
+
groups = [group for group in groups if self.check_filter(group["name"])]
|
|
181
|
+
|
|
182
|
+
return groups
|
|
183
|
+
|
|
184
|
+
def collect_root_folder_jobs(self, jobs):
|
|
185
|
+
return [job for job in jobs if "WorkflowJob" in job["_class"]]
|
|
@@ -0,0 +1,127 @@
|
|
|
1
|
+
from dataclasses import asdict
|
|
2
|
+
from datetime import datetime
|
|
3
|
+
from uuid import UUID
|
|
4
|
+
|
|
5
|
+
from argus.backend.db import ScyllaCluster
|
|
6
|
+
from argus.backend.error_handlers import DataValidationError
|
|
7
|
+
from argus.backend.models.result import ArgusGenericResultMetadata, ArgusGenericResultData
|
|
8
|
+
from argus.backend.plugins.core import PluginModelBase
|
|
9
|
+
from argus.backend.plugins.loader import AVAILABLE_PLUGINS
|
|
10
|
+
from argus.backend.service.results_service import ResultsService, Cell
|
|
11
|
+
from argus.common.enums import TestStatus
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class ClientException(Exception):
|
|
15
|
+
pass
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class ClientService:
|
|
19
|
+
PLUGINS = {name: plugin.model for name, plugin in AVAILABLE_PLUGINS.items()}
|
|
20
|
+
|
|
21
|
+
def __init__(self) -> None:
|
|
22
|
+
self.cluster = ScyllaCluster.get()
|
|
23
|
+
|
|
24
|
+
def get_model(self, run_type: str) -> PluginModelBase:
|
|
25
|
+
cls = self.PLUGINS.get(run_type)
|
|
26
|
+
if not cls:
|
|
27
|
+
raise ClientException(f"Unsupported run type: {run_type}", run_type)
|
|
28
|
+
return cls
|
|
29
|
+
|
|
30
|
+
def submit_run(self, run_type: str, request_data: dict) -> str:
|
|
31
|
+
model = self.get_model(run_type)
|
|
32
|
+
model.submit_run(request_data=request_data)
|
|
33
|
+
|
|
34
|
+
return "Created"
|
|
35
|
+
|
|
36
|
+
def get_run(self, run_type: str, run_id: str):
|
|
37
|
+
model = self.get_model(run_type)
|
|
38
|
+
try:
|
|
39
|
+
run = model.get(id=run_id)
|
|
40
|
+
except model.DoesNotExist:
|
|
41
|
+
return None
|
|
42
|
+
return run
|
|
43
|
+
|
|
44
|
+
def heartbeat(self, run_type: str, run_id: str) -> int:
|
|
45
|
+
model = self.get_model(run_type)
|
|
46
|
+
run = model.load_test_run(UUID(run_id))
|
|
47
|
+
run.update_heartbeat()
|
|
48
|
+
run.save()
|
|
49
|
+
return run.heartbeat
|
|
50
|
+
|
|
51
|
+
def get_run_status(self, run_type: str, run_id: str) -> str:
|
|
52
|
+
model = self.get_model(run_type)
|
|
53
|
+
run = model.load_test_run(UUID(run_id))
|
|
54
|
+
return run.status
|
|
55
|
+
|
|
56
|
+
def update_run_status(self, run_type: str, run_id: str, new_status: str) -> str:
|
|
57
|
+
model = self.get_model(run_type)
|
|
58
|
+
run = model.load_test_run(UUID(run_id))
|
|
59
|
+
run.change_status(new_status=TestStatus(new_status))
|
|
60
|
+
run.save()
|
|
61
|
+
|
|
62
|
+
return run.status
|
|
63
|
+
|
|
64
|
+
def submit_product_version(self, run_type: str, run_id: str, version: str) -> str:
|
|
65
|
+
model = self.get_model(run_type)
|
|
66
|
+
run = model.load_test_run(UUID(run_id))
|
|
67
|
+
run.submit_product_version(version)
|
|
68
|
+
run.save()
|
|
69
|
+
|
|
70
|
+
return "Submitted"
|
|
71
|
+
|
|
72
|
+
def submit_logs(self, run_type: str, run_id: str, logs: list[dict]) -> str:
|
|
73
|
+
model = self.get_model(run_type)
|
|
74
|
+
run = model.load_test_run(UUID(run_id))
|
|
75
|
+
run.submit_logs(logs)
|
|
76
|
+
run.save()
|
|
77
|
+
|
|
78
|
+
return "Submitted"
|
|
79
|
+
|
|
80
|
+
def finish_run(self, run_type: str, run_id: str, payload: dict | None = None) -> str:
|
|
81
|
+
model = self.get_model(run_type)
|
|
82
|
+
run = model.load_test_run(UUID(run_id))
|
|
83
|
+
run.finish_run(payload)
|
|
84
|
+
run.save()
|
|
85
|
+
|
|
86
|
+
return "Finalized"
|
|
87
|
+
|
|
88
|
+
def submit_results(self, run_type: str, run_id: str, results: dict) -> dict[str, str]:
|
|
89
|
+
model = self.get_model(run_type)
|
|
90
|
+
try:
|
|
91
|
+
run = model.load_test_run(UUID(run_id))
|
|
92
|
+
except model.DoesNotExist:
|
|
93
|
+
return {"status": "error", "response": {
|
|
94
|
+
"exception": "DoesNotExist",
|
|
95
|
+
"arguments": [run_id]
|
|
96
|
+
}}
|
|
97
|
+
table_name = results["meta"]["name"]
|
|
98
|
+
results_service = ResultsService()
|
|
99
|
+
cells = [Cell(**cell) for cell in results["results"]]
|
|
100
|
+
table_metadata = results_service.get_table_metadata(test_id=run.test_id, table_name=table_name)
|
|
101
|
+
if table_metadata:
|
|
102
|
+
table_metadata = table_metadata.update_if_changed(results["meta"])
|
|
103
|
+
else:
|
|
104
|
+
table_metadata = ArgusGenericResultMetadata(test_id=run.test_id, **results["meta"])
|
|
105
|
+
table_metadata.save()
|
|
106
|
+
if results.get("sut_timestamp", 0) == 0:
|
|
107
|
+
results["sut_timestamp"] = run.sut_timestamp(results.get(
|
|
108
|
+
'sut_package_name', 'scylla-server')) # automatic sut_timestamp
|
|
109
|
+
results["sut_timestamp"] = datetime.fromtimestamp(results["sut_timestamp"])
|
|
110
|
+
best_results = results_service.update_best_results(test_id=run.test_id, table_name=table_name, table_metadata=table_metadata,
|
|
111
|
+
cells=cells, run_id=run_id)
|
|
112
|
+
table_name = results["meta"]["name"]
|
|
113
|
+
sut_timestamp = results["sut_timestamp"]
|
|
114
|
+
result_failed = False
|
|
115
|
+
for cell in cells:
|
|
116
|
+
cell.update_cell_status_based_on_rules(table_metadata, best_results)
|
|
117
|
+
if cell.status == "ERROR":
|
|
118
|
+
result_failed = True
|
|
119
|
+
ArgusGenericResultData(test_id=run.test_id,
|
|
120
|
+
run_id=run.id,
|
|
121
|
+
name=table_name,
|
|
122
|
+
sut_timestamp=sut_timestamp,
|
|
123
|
+
**asdict(cell)
|
|
124
|
+
).save()
|
|
125
|
+
if result_failed:
|
|
126
|
+
raise DataValidationError()
|
|
127
|
+
return {"status": "ok", "message": "Results submitted"}
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
from datetime import datetime
|
|
2
|
+
import json
|
|
3
|
+
from argus.backend.models.web import ArgusEvent, ArgusEventTypes
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class EventService:
|
|
7
|
+
@staticmethod
|
|
8
|
+
def create_run_event(kind: ArgusEventTypes, body: dict, user_id=None, run_id=None, release_id=None, group_id=None, test_id=None):
|
|
9
|
+
event = ArgusEvent()
|
|
10
|
+
event.release_id = release_id
|
|
11
|
+
event.group_id = group_id
|
|
12
|
+
event.test_id = test_id
|
|
13
|
+
event.user_id = user_id
|
|
14
|
+
event.run_id = run_id
|
|
15
|
+
event.body = json.dumps(body, ensure_ascii=True, separators=(',', ':'))
|
|
16
|
+
event.kind = kind.value
|
|
17
|
+
event.created_at = datetime.utcnow()
|
|
18
|
+
event.save()
|
|
@@ -0,0 +1,233 @@
|
|
|
1
|
+
import re
|
|
2
|
+
import logging
|
|
3
|
+
from collections import defaultdict
|
|
4
|
+
from datetime import UTC, datetime
|
|
5
|
+
from functools import reduce
|
|
6
|
+
from uuid import UUID
|
|
7
|
+
from flask import current_app, g
|
|
8
|
+
from github import Github, Auth
|
|
9
|
+
|
|
10
|
+
from argus.backend.models.runtime_store import RuntimeStore
|
|
11
|
+
from argus.backend.models.web import ArgusEventTypes, ArgusTest, ArgusUserView
|
|
12
|
+
from argus.backend.models.github_issue import GithubIssue, IssueAssignee, IssueLink, IssueLabel
|
|
13
|
+
from argus.backend.plugins.core import PluginInfoBase
|
|
14
|
+
from argus.backend.plugins.loader import AVAILABLE_PLUGINS
|
|
15
|
+
from argus.backend.service.event_service import EventService
|
|
16
|
+
from argus.backend.util.common import chunk
|
|
17
|
+
|
|
18
|
+
LOGGER = logging.getLogger(__name__)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class GithubService:
|
|
22
|
+
LAST_RAN_KEY = "github_service_last_issue_refresh"
|
|
23
|
+
|
|
24
|
+
plugins = AVAILABLE_PLUGINS
|
|
25
|
+
|
|
26
|
+
def __init__(self):
|
|
27
|
+
auth = Auth.Token(token=self.get_installation_token())
|
|
28
|
+
self.gh = Github(auth=auth, per_page=1000)
|
|
29
|
+
|
|
30
|
+
def get_plugin(self, plugin_name: str) -> PluginInfoBase | None:
|
|
31
|
+
return self.plugins.get(plugin_name)
|
|
32
|
+
|
|
33
|
+
def get_installation_token(self):
|
|
34
|
+
self._refresh_installation_token()
|
|
35
|
+
return current_app.config.get("GITHUB_ACCESS_TOKEN")
|
|
36
|
+
|
|
37
|
+
def _refresh_installation_token(self):
|
|
38
|
+
# TODO: To be replaced by JWT refreshing logic once we have Github App in place
|
|
39
|
+
pass
|
|
40
|
+
|
|
41
|
+
def refresh_stale_issues(self):
|
|
42
|
+
try:
|
|
43
|
+
last_ran = RuntimeStore.get(key=self.LAST_RAN_KEY)
|
|
44
|
+
except RuntimeStore.DoesNotExist:
|
|
45
|
+
last_ran = RuntimeStore()
|
|
46
|
+
last_ran.key = self.LAST_RAN_KEY
|
|
47
|
+
last_ran.value = datetime(year=2020, month=1, day=1, hour=0, minute=0, tzinfo=UTC)
|
|
48
|
+
last_ran.save()
|
|
49
|
+
|
|
50
|
+
LOGGER.info("Starting Github Issue sync...")
|
|
51
|
+
check_time = datetime.now(tz=UTC)
|
|
52
|
+
|
|
53
|
+
all_issues: list[GithubIssue] = list(GithubIssue.all())
|
|
54
|
+
issues_by_identifier = {
|
|
55
|
+
f"{issue.owner.lower()}/{issue.repo.lower()}#{issue.number}": issue for issue in all_issues}
|
|
56
|
+
touch_count = 0
|
|
57
|
+
|
|
58
|
+
unique_repos = {f"{issue.owner}/{issue.repo}" for issue in all_issues}
|
|
59
|
+
for idx, repo in enumerate(unique_repos):
|
|
60
|
+
LOGGER.info("[%s/%s] Fetching %s...", idx + 1, len(unique_repos), repo)
|
|
61
|
+
repo = self.gh.get_repo(repo)
|
|
62
|
+
issues = repo.get_issues(since=last_ran.value, state="all", direction="desc", sort="created")
|
|
63
|
+
for issue_idx, issue in enumerate(issues):
|
|
64
|
+
match = re.match(
|
|
65
|
+
r"http(s)?://(www\.)?github\.com/(?P<owner>[\w\d]+)/"
|
|
66
|
+
r"(?P<repo>[\w\d\-_]+)/(?P<type>issues|pull)/(?P<issue_number>\d+)(/)?",
|
|
67
|
+
issue.html_url,
|
|
68
|
+
)
|
|
69
|
+
|
|
70
|
+
identifier = f"{match.group('owner').lower()}/{match.group('repo').lower()}#{match.group('issue_number')}"
|
|
71
|
+
issue_to_update = issues_by_identifier.get(identifier)
|
|
72
|
+
if not issue_to_update:
|
|
73
|
+
LOGGER.debug("[%s/%s] No issue found for %s...", issue_idx + 1, "?", identifier)
|
|
74
|
+
continue
|
|
75
|
+
LOGGER.debug("[%s/%s] Refreshing %s...", issue_idx + 1, "?", identifier)
|
|
76
|
+
issue_to_update.title = issue.title
|
|
77
|
+
issue_to_update.state = issue.state
|
|
78
|
+
issue_to_update.labels = [IssueLabel(
|
|
79
|
+
id=label.id, name=label.name, color=label.color, description=label.description) for label in issue.labels]
|
|
80
|
+
issue_to_update.assignees = [IssueAssignee(
|
|
81
|
+
login=assignee.login, html_url=assignee.html_url) for assignee in issue.assignees]
|
|
82
|
+
issue_to_update.save()
|
|
83
|
+
touch_count += 1
|
|
84
|
+
|
|
85
|
+
LOGGER.info("Finished. Found %s out of %s issues", touch_count, len(all_issues))
|
|
86
|
+
last_ran.value = check_time
|
|
87
|
+
last_ran.save()
|
|
88
|
+
|
|
89
|
+
def submit_github_issue(self, issue_url: str, test_id: UUID, run_id: UUID):
|
|
90
|
+
match = re.match(
|
|
91
|
+
r"http(s)?://(www\.)?github\.com/(?P<owner>[\w\d]+)/"
|
|
92
|
+
r"(?P<repo>[\w\d\-_]+)/(?P<type>issues|pull)/(?P<issue_number>\d+)(/)?",
|
|
93
|
+
issue_url,
|
|
94
|
+
)
|
|
95
|
+
if not match:
|
|
96
|
+
raise Exception("URL doesn't match Github schema")
|
|
97
|
+
|
|
98
|
+
test: ArgusTest = ArgusTest.get(id=test_id)
|
|
99
|
+
plugin = self.get_plugin(plugin_name=test.plugin_name)
|
|
100
|
+
run = plugin.model.get(id=run_id)
|
|
101
|
+
|
|
102
|
+
existing = True
|
|
103
|
+
try:
|
|
104
|
+
issue = GithubIssue.get(url=issue_url)
|
|
105
|
+
except:
|
|
106
|
+
issue = None
|
|
107
|
+
existing = False
|
|
108
|
+
if not issue:
|
|
109
|
+
repo_id = f"{match.group('owner')}/{match.group('repo')}"
|
|
110
|
+
remote_repo = self.gh.get_repo(repo_id)
|
|
111
|
+
remote_issue = remote_repo.get_issue(int(match.group("issue_number")))
|
|
112
|
+
|
|
113
|
+
issue = GithubIssue()
|
|
114
|
+
issue.user_id = g.user.id
|
|
115
|
+
issue.type = match.group("type")
|
|
116
|
+
issue.owner = remote_issue.repository.owner.name
|
|
117
|
+
issue.repo = remote_issue.repository.name
|
|
118
|
+
issue.number = remote_issue.number
|
|
119
|
+
issue.state = remote_issue.state
|
|
120
|
+
issue.title = remote_issue.title
|
|
121
|
+
issue.url = issue_url
|
|
122
|
+
issue.repo_identifier = repo_id
|
|
123
|
+
for label in remote_issue.labels:
|
|
124
|
+
l = IssueLabel()
|
|
125
|
+
l.id = label.id
|
|
126
|
+
l.name = label.name
|
|
127
|
+
l.description = label.description
|
|
128
|
+
l.color = label.color
|
|
129
|
+
issue.labels.append(l)
|
|
130
|
+
|
|
131
|
+
for assignee in remote_issue.assignees:
|
|
132
|
+
a = IssueAssignee()
|
|
133
|
+
a.login = assignee.login
|
|
134
|
+
a.html_url = assignee.html_url
|
|
135
|
+
issue.assignees.append(a)
|
|
136
|
+
|
|
137
|
+
issue.save()
|
|
138
|
+
|
|
139
|
+
link = IssueLink()
|
|
140
|
+
link.run_id = run.id
|
|
141
|
+
link.issue_id = issue.id
|
|
142
|
+
link.release_id = test.release_id
|
|
143
|
+
link.test_id = test.id
|
|
144
|
+
link.group_id = test.group_id
|
|
145
|
+
|
|
146
|
+
link.save()
|
|
147
|
+
|
|
148
|
+
EventService.create_run_event(
|
|
149
|
+
kind=ArgusEventTypes.TestRunIssueAdded,
|
|
150
|
+
body={
|
|
151
|
+
"message": f"An issue titled \"{{title}}\" was {'attached' if existing else 'added'} by {{username}}",
|
|
152
|
+
"username": g.user.username,
|
|
153
|
+
"url": issue_url,
|
|
154
|
+
"title": issue.title,
|
|
155
|
+
"state": issue.state,
|
|
156
|
+
},
|
|
157
|
+
user_id=g.user.id,
|
|
158
|
+
run_id=link.run_id,
|
|
159
|
+
release_id=link.release_id,
|
|
160
|
+
group_id=link.group_id,
|
|
161
|
+
test_id=link.test_id
|
|
162
|
+
)
|
|
163
|
+
|
|
164
|
+
response = {
|
|
165
|
+
**dict(list(issue.items())),
|
|
166
|
+
"title": issue.title,
|
|
167
|
+
"state": issue.state,
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
return response
|
|
171
|
+
|
|
172
|
+
def _get_github_issues_for_view(self, view_id: UUID | str) -> list[IssueLink]:
|
|
173
|
+
view: ArgusUserView = ArgusUserView.get(id=view_id)
|
|
174
|
+
links = []
|
|
175
|
+
for batch in chunk(view.tests):
|
|
176
|
+
links.extend(IssueLink.filter(test_id__in=batch).allow_filtering().all())
|
|
177
|
+
|
|
178
|
+
return links
|
|
179
|
+
|
|
180
|
+
def get_github_issues(self, filter_key: str, filter_id: UUID, aggregate_by_issue: bool = False) -> dict:
|
|
181
|
+
if filter_key not in ["release_id", "group_id", "test_id", "run_id", "user_id", "view_id"]:
|
|
182
|
+
raise Exception(
|
|
183
|
+
"filter_key can only be one of: \"release_id\", \"group_id\", \"test_id\", \"run_id\", \"user_id\", \"view_id\""
|
|
184
|
+
)
|
|
185
|
+
if filter_key == "view_id":
|
|
186
|
+
links = list(self._get_github_issues_for_view(filter_id))
|
|
187
|
+
else:
|
|
188
|
+
links = list(IssueLink.filter(**{filter_key: filter_id}).allow_filtering().all())
|
|
189
|
+
issues = reduce(lambda acc, link: acc[link.issue_id].append(link) or acc, links, defaultdict(list))
|
|
190
|
+
resolved_issues = []
|
|
191
|
+
for batch in chunk(issues.keys()):
|
|
192
|
+
resolved_issues.extend(GithubIssue.filter(id__in=batch).all())
|
|
193
|
+
if aggregate_by_issue:
|
|
194
|
+
response = []
|
|
195
|
+
for issue in resolved_issues:
|
|
196
|
+
issue_dict = dict(issue.items())
|
|
197
|
+
issue_dict["links"] = issues[issue.id]
|
|
198
|
+
response.append(issue_dict)
|
|
199
|
+
|
|
200
|
+
else:
|
|
201
|
+
response = [dict(issue.items()) for issue in resolved_issues]
|
|
202
|
+
return response
|
|
203
|
+
|
|
204
|
+
def delete_github_issue(self, issue_id: UUID, run_id: UUID) -> dict:
|
|
205
|
+
issue: GithubIssue = GithubIssue.get(id=issue_id)
|
|
206
|
+
links = list(IssueLink.filter(issue_id=issue_id).allow_filtering().all())
|
|
207
|
+
link: IssueLink = IssueLink.get(run_id=run_id, issue_id=issue_id)
|
|
208
|
+
remaining_links = len(list(filter(lambda l: l.run_id != link.run_id and link.issue_id != issue_id, links)))
|
|
209
|
+
|
|
210
|
+
EventService.create_run_event(
|
|
211
|
+
kind=ArgusEventTypes.TestRunIssueRemoved,
|
|
212
|
+
body={
|
|
213
|
+
"message": "An issue titled \"{title}\" was removed by {username} from \"{run_id}\"",
|
|
214
|
+
"username": g.user.username,
|
|
215
|
+
"url": issue.url,
|
|
216
|
+
"title": issue.title,
|
|
217
|
+
"state": issue.state,
|
|
218
|
+
"run_id": run_id,
|
|
219
|
+
},
|
|
220
|
+
user_id=g.user.id,
|
|
221
|
+
run_id=link.run_id,
|
|
222
|
+
release_id=link.release_id,
|
|
223
|
+
group_id=link.group_id,
|
|
224
|
+
test_id=link.test_id
|
|
225
|
+
)
|
|
226
|
+
|
|
227
|
+
link.delete()
|
|
228
|
+
if remaining_links == 0:
|
|
229
|
+
issue.delete()
|
|
230
|
+
|
|
231
|
+
return {
|
|
232
|
+
"deleted": issue_id if remaining_links == 0 else (link.run_id, link.issue_id)
|
|
233
|
+
}
|