argus-alm 0.12.10__py3-none-any.whl → 0.13.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- argus/client/base.py +1 -1
- argus/client/driver_matrix_tests/cli.py +2 -2
- argus/client/driver_matrix_tests/client.py +1 -1
- argus/client/generic/cli.py +22 -2
- argus/client/generic/client.py +22 -0
- argus/client/generic_result.py +3 -3
- argus/client/sct/client.py +5 -4
- argus/client/sirenada/client.py +1 -1
- {argus_alm-0.12.10.dist-info → argus_alm-0.13.1.dist-info}/METADATA +2 -4
- argus_alm-0.13.1.dist-info/RECORD +20 -0
- argus/backend/.gitkeep +0 -0
- argus/backend/cli.py +0 -41
- argus/backend/controller/__init__.py +0 -0
- argus/backend/controller/admin.py +0 -20
- argus/backend/controller/admin_api.py +0 -354
- argus/backend/controller/api.py +0 -529
- argus/backend/controller/auth.py +0 -67
- argus/backend/controller/client_api.py +0 -108
- argus/backend/controller/main.py +0 -274
- argus/backend/controller/notification_api.py +0 -72
- argus/backend/controller/notifications.py +0 -13
- argus/backend/controller/team.py +0 -126
- argus/backend/controller/team_ui.py +0 -18
- argus/backend/controller/testrun_api.py +0 -482
- argus/backend/controller/view_api.py +0 -162
- argus/backend/db.py +0 -100
- argus/backend/error_handlers.py +0 -21
- argus/backend/events/event_processors.py +0 -34
- argus/backend/models/__init__.py +0 -0
- argus/backend/models/result.py +0 -138
- argus/backend/models/web.py +0 -389
- argus/backend/plugins/__init__.py +0 -0
- argus/backend/plugins/core.py +0 -225
- argus/backend/plugins/driver_matrix_tests/controller.py +0 -63
- argus/backend/plugins/driver_matrix_tests/model.py +0 -421
- argus/backend/plugins/driver_matrix_tests/plugin.py +0 -22
- argus/backend/plugins/driver_matrix_tests/raw_types.py +0 -62
- argus/backend/plugins/driver_matrix_tests/service.py +0 -60
- argus/backend/plugins/driver_matrix_tests/udt.py +0 -42
- argus/backend/plugins/generic/model.py +0 -79
- argus/backend/plugins/generic/plugin.py +0 -16
- argus/backend/plugins/generic/types.py +0 -13
- argus/backend/plugins/loader.py +0 -40
- argus/backend/plugins/sct/controller.py +0 -185
- argus/backend/plugins/sct/plugin.py +0 -38
- argus/backend/plugins/sct/resource_setup.py +0 -178
- argus/backend/plugins/sct/service.py +0 -491
- argus/backend/plugins/sct/testrun.py +0 -272
- argus/backend/plugins/sct/udt.py +0 -101
- argus/backend/plugins/sirenada/model.py +0 -113
- argus/backend/plugins/sirenada/plugin.py +0 -17
- argus/backend/service/admin.py +0 -27
- argus/backend/service/argus_service.py +0 -688
- argus/backend/service/build_system_monitor.py +0 -188
- argus/backend/service/client_service.py +0 -122
- argus/backend/service/event_service.py +0 -18
- argus/backend/service/jenkins_service.py +0 -240
- argus/backend/service/notification_manager.py +0 -150
- argus/backend/service/release_manager.py +0 -230
- argus/backend/service/results_service.py +0 -317
- argus/backend/service/stats.py +0 -540
- argus/backend/service/team_manager_service.py +0 -83
- argus/backend/service/testrun.py +0 -559
- argus/backend/service/user.py +0 -307
- argus/backend/service/views.py +0 -258
- argus/backend/template_filters.py +0 -27
- argus/backend/tests/__init__.py +0 -0
- argus/backend/tests/argus_web.test.yaml +0 -39
- argus/backend/tests/conftest.py +0 -44
- argus/backend/tests/results_service/__init__.py +0 -0
- argus/backend/tests/results_service/test_best_results.py +0 -70
- argus/backend/util/common.py +0 -65
- argus/backend/util/config.py +0 -38
- argus/backend/util/encoders.py +0 -41
- argus/backend/util/logsetup.py +0 -81
- argus/backend/util/module_loaders.py +0 -30
- argus/backend/util/send_email.py +0 -91
- argus/client/generic_result_old.py +0 -143
- argus/db/.gitkeep +0 -0
- argus/db/argus_json.py +0 -14
- argus/db/cloud_types.py +0 -125
- argus/db/config.py +0 -135
- argus/db/db_types.py +0 -139
- argus/db/interface.py +0 -370
- argus/db/testrun.py +0 -740
- argus/db/utils.py +0 -15
- argus_alm-0.12.10.dist-info/RECORD +0 -96
- /argus/{backend → common}/__init__.py +0 -0
- /argus/{backend/util → common}/enums.py +0 -0
- /argus/{backend/plugins/sct/types.py → common/sct_types.py} +0 -0
- /argus/{backend/plugins/sirenada/types.py → common/sirenada_types.py} +0 -0
- {argus_alm-0.12.10.dist-info → argus_alm-0.13.1.dist-info}/LICENSE +0 -0
- {argus_alm-0.12.10.dist-info → argus_alm-0.13.1.dist-info}/WHEEL +0 -0
- {argus_alm-0.12.10.dist-info → argus_alm-0.13.1.dist-info}/entry_points.txt +0 -0
|
@@ -1,491 +0,0 @@
|
|
|
1
|
-
import base64
|
|
2
|
-
from dataclasses import dataclass
|
|
3
|
-
from datetime import datetime
|
|
4
|
-
from functools import reduce
|
|
5
|
-
import logging
|
|
6
|
-
import math
|
|
7
|
-
import re
|
|
8
|
-
from time import time
|
|
9
|
-
from xml.etree import ElementTree
|
|
10
|
-
from flask import g
|
|
11
|
-
from argus.backend.models.web import ArgusEventTypes
|
|
12
|
-
from argus.backend.plugins.sct.testrun import SCTJunitReports, SCTTestRun, SubtestType
|
|
13
|
-
from argus.backend.plugins.sct.types import GeminiResultsRequest, PerformanceResultsRequest, ResourceUpdateRequest
|
|
14
|
-
from argus.backend.plugins.sct.udt import (
|
|
15
|
-
CloudInstanceDetails,
|
|
16
|
-
CloudResource,
|
|
17
|
-
EventsBySeverity,
|
|
18
|
-
NemesisRunInfo,
|
|
19
|
-
NodeDescription,
|
|
20
|
-
PackageVersion,
|
|
21
|
-
PerformanceHDRHistogram,
|
|
22
|
-
)
|
|
23
|
-
from argus.backend.service.event_service import EventService
|
|
24
|
-
from argus.backend.util.common import get_build_number
|
|
25
|
-
from argus.backend.util.enums import NemesisStatus, ResourceState, TestStatus
|
|
26
|
-
|
|
27
|
-
LOGGER = logging.getLogger(__name__)
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
class SCTServiceException(Exception):
|
|
31
|
-
pass
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
@dataclass(init=True, repr=True)
|
|
35
|
-
class NemesisSubmissionRequest:
|
|
36
|
-
name: str
|
|
37
|
-
class_name: str
|
|
38
|
-
start_time: int
|
|
39
|
-
node_name: str
|
|
40
|
-
node_ip: str
|
|
41
|
-
node_shards: int
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
@dataclass(init=True, repr=True)
|
|
45
|
-
class NemesisFinalizationRequest:
|
|
46
|
-
name: str
|
|
47
|
-
start_time: int
|
|
48
|
-
status: str
|
|
49
|
-
message: str
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
@dataclass(init=True, repr=True)
|
|
53
|
-
class EventSubmissionRequest:
|
|
54
|
-
severity: str
|
|
55
|
-
total_events: int
|
|
56
|
-
messages: list[str]
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
class SCTService:
|
|
60
|
-
|
|
61
|
-
@staticmethod
|
|
62
|
-
def submit_packages(run_id: str, packages: list[dict]) -> str:
|
|
63
|
-
try:
|
|
64
|
-
run: SCTTestRun = SCTTestRun.get(id=run_id)
|
|
65
|
-
for package_dict in packages:
|
|
66
|
-
package = PackageVersion(**package_dict)
|
|
67
|
-
if package not in run.packages:
|
|
68
|
-
run.packages.append(package)
|
|
69
|
-
run.save()
|
|
70
|
-
except SCTTestRun.DoesNotExist as exception:
|
|
71
|
-
LOGGER.error("Run %s not found for SCTTestRun", run_id)
|
|
72
|
-
raise SCTServiceException("Run not found", run_id) from exception
|
|
73
|
-
|
|
74
|
-
return "added"
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
@staticmethod
|
|
78
|
-
def set_sct_runner(run_id: str, public_ip: str, private_ip: str, region: str, backend: str):
|
|
79
|
-
try:
|
|
80
|
-
run: SCTTestRun = SCTTestRun.get(id=run_id)
|
|
81
|
-
run.sct_runner_host = CloudInstanceDetails(
|
|
82
|
-
public_ip=public_ip,
|
|
83
|
-
private_ip=private_ip,
|
|
84
|
-
provider=backend,
|
|
85
|
-
region=region,
|
|
86
|
-
)
|
|
87
|
-
run.save()
|
|
88
|
-
except SCTTestRun.DoesNotExist as exception:
|
|
89
|
-
LOGGER.error("Run %s not found for SCTTestRun", run_id)
|
|
90
|
-
raise SCTServiceException("Run not found", run_id) from exception
|
|
91
|
-
|
|
92
|
-
return "updated"
|
|
93
|
-
|
|
94
|
-
@staticmethod
|
|
95
|
-
def submit_screenshots(run_id: str, screenshot_links: list[str]) -> str:
|
|
96
|
-
try:
|
|
97
|
-
run: SCTTestRun = SCTTestRun.get(id=run_id)
|
|
98
|
-
for link in screenshot_links:
|
|
99
|
-
run.add_screenshot(link)
|
|
100
|
-
run.save()
|
|
101
|
-
except SCTTestRun.DoesNotExist as exception:
|
|
102
|
-
LOGGER.error("Run %s not found for SCTTestRun", run_id)
|
|
103
|
-
raise SCTServiceException("Run not found", run_id) from exception
|
|
104
|
-
|
|
105
|
-
return "submitted"
|
|
106
|
-
|
|
107
|
-
@staticmethod
|
|
108
|
-
def submit_gemini_results(run_id: str, gemini_data: GeminiResultsRequest) -> str:
|
|
109
|
-
try:
|
|
110
|
-
run: SCTTestRun = SCTTestRun.get(id=run_id)
|
|
111
|
-
run.subtest_name = SubtestType.GEMINI.value
|
|
112
|
-
run.oracle_nodes_count = gemini_data.get("oracle_nodes_count")
|
|
113
|
-
run.oracle_node_ami_id = gemini_data.get("oracle_node_ami_id")
|
|
114
|
-
run.oracle_node_instance_type = gemini_data.get("oracle_node_instance_type")
|
|
115
|
-
run.oracle_node_scylla_version = gemini_data.get("oracle_node_scylla_version")
|
|
116
|
-
run.gemini_command = gemini_data.get("gemini_command")
|
|
117
|
-
run.gemini_version = gemini_data.get("gemini_version")
|
|
118
|
-
run.gemini_status = gemini_data.get("gemini_status")
|
|
119
|
-
run.gemini_seed = str(gemini_data.get("gemini_seed"))
|
|
120
|
-
run.gemini_write_ops = gemini_data.get("gemini_write_ops")
|
|
121
|
-
run.gemini_write_errors = gemini_data.get("gemini_write_errors")
|
|
122
|
-
run.gemini_read_ops = gemini_data.get("gemini_read_ops")
|
|
123
|
-
run.gemini_read_errors = gemini_data.get("gemini_read_errors")
|
|
124
|
-
run.save()
|
|
125
|
-
|
|
126
|
-
if run.gemini_status != "PASSED":
|
|
127
|
-
run.status = TestStatus.FAILED
|
|
128
|
-
EventService.create_run_event(kind=ArgusEventTypes.TestRunStatusChanged, body={
|
|
129
|
-
"message": "[{username}] Setting run status to {status} due to Gemini reporting following status: {gemini_status}",
|
|
130
|
-
"username": g.user.username,
|
|
131
|
-
"status": TestStatus.FAILED.value,
|
|
132
|
-
"gemini_status": run.gemini_status,
|
|
133
|
-
}, user_id=g.user.id, run_id=run_id, release_id=run.release_id, test_id=run.test_id)
|
|
134
|
-
run.save()
|
|
135
|
-
except SCTTestRun.DoesNotExist as exception:
|
|
136
|
-
LOGGER.error("Run %s not found for SCTTestRun", run_id)
|
|
137
|
-
raise SCTServiceException("Run not found", run_id) from exception
|
|
138
|
-
|
|
139
|
-
return "submitted"
|
|
140
|
-
|
|
141
|
-
@staticmethod
|
|
142
|
-
def submit_performance_results(run_id: str, performance_results: PerformanceResultsRequest):
|
|
143
|
-
# pylint: disable=too-many-statements
|
|
144
|
-
try:
|
|
145
|
-
run: SCTTestRun = SCTTestRun.get(id=run_id)
|
|
146
|
-
run.subtest_name = SubtestType.PERFORMANCE.value
|
|
147
|
-
run.perf_op_rate_average = performance_results.get("perf_op_rate_average")
|
|
148
|
-
run.perf_op_rate_total = performance_results.get("perf_op_rate_total")
|
|
149
|
-
run.perf_avg_latency_99th = performance_results.get("perf_avg_latency_99th")
|
|
150
|
-
run.perf_avg_latency_mean = performance_results.get("perf_avg_latency_mean")
|
|
151
|
-
run.perf_total_errors = performance_results.get("perf_total_errors")
|
|
152
|
-
run.stress_cmd = performance_results.get("stress_cmd")
|
|
153
|
-
run.test_name = performance_results.get("test_name")
|
|
154
|
-
run.save()
|
|
155
|
-
|
|
156
|
-
is_latency_test = "latency" in run.test_name
|
|
157
|
-
threshold_negative = -10
|
|
158
|
-
|
|
159
|
-
def cmp(lhs, rhs):
|
|
160
|
-
delta = rhs - lhs
|
|
161
|
-
change = int(math.fabs(delta) * 100 / rhs)
|
|
162
|
-
return change if delta >= 0 else change * -1
|
|
163
|
-
|
|
164
|
-
previous_runs = SCTTestRun.get_perf_results_for_test_name(run.build_id, run.start_time, run.test_name)
|
|
165
|
-
metrics_to_check = ["perf_avg_latency_99th", "perf_avg_latency_mean"] if is_latency_test else ["perf_op_rate_total"]
|
|
166
|
-
|
|
167
|
-
older_runs_by_version = {}
|
|
168
|
-
for prev_run in previous_runs:
|
|
169
|
-
if not older_runs_by_version.get(prev_run["scylla_version"]):
|
|
170
|
-
older_runs_by_version[prev_run["scylla_version"]] = []
|
|
171
|
-
older_runs_by_version[prev_run["scylla_version"]].append(prev_run)
|
|
172
|
-
|
|
173
|
-
regression_found = False
|
|
174
|
-
regression_info = {
|
|
175
|
-
"version": None,
|
|
176
|
-
"delta": None,
|
|
177
|
-
"id": None,
|
|
178
|
-
"metric": None,
|
|
179
|
-
"job_url": None,
|
|
180
|
-
}
|
|
181
|
-
|
|
182
|
-
if performance_results["histograms"]:
|
|
183
|
-
for histogram in performance_results["histograms"]:
|
|
184
|
-
run.histograms = { k: PerformanceHDRHistogram(**v) for k, v in histogram.items() }
|
|
185
|
-
|
|
186
|
-
for version, runs in older_runs_by_version.items():
|
|
187
|
-
for metric in metrics_to_check:
|
|
188
|
-
# pylint: disable=cell-var-from-loop
|
|
189
|
-
best_run = sorted(runs, reverse=(not is_latency_test), key=lambda v: v[metric])[0]
|
|
190
|
-
last_run = runs[0]
|
|
191
|
-
|
|
192
|
-
metric_to_best = cmp(run[metric], best_run[metric])
|
|
193
|
-
metric_to_last = cmp(run[metric], last_run[metric])
|
|
194
|
-
if metric_to_last < threshold_negative:
|
|
195
|
-
regression_found = True
|
|
196
|
-
regression_info["metric"] = metric
|
|
197
|
-
regression_info["version"] = version
|
|
198
|
-
regression_info["job_url"] = last_run["build_job_url"]
|
|
199
|
-
regression_info["id"] = str(last_run["id"])
|
|
200
|
-
regression_info["delta"] = metric_to_last
|
|
201
|
-
break
|
|
202
|
-
|
|
203
|
-
if metric_to_best < threshold_negative:
|
|
204
|
-
regression_found = True
|
|
205
|
-
regression_info["metric"] = metric
|
|
206
|
-
regression_info["version"] = version
|
|
207
|
-
regression_info["job_url"] = best_run["build_job_url"]
|
|
208
|
-
regression_info["id"] = str(best_run["id"])
|
|
209
|
-
regression_info["delta"] = metric_to_best
|
|
210
|
-
break
|
|
211
|
-
|
|
212
|
-
if regression_found:
|
|
213
|
-
break
|
|
214
|
-
|
|
215
|
-
if regression_found:
|
|
216
|
-
run.status = TestStatus.FAILED.value
|
|
217
|
-
run.save()
|
|
218
|
-
EventService.create_run_event(kind=ArgusEventTypes.TestRunStatusChanged, body={
|
|
219
|
-
"message": "[{username}] Setting run status to {status} due to performance metric '{metric}' falling "
|
|
220
|
-
"below allowed threshold ({threshold_negative}): {delta}% compared to "
|
|
221
|
-
"<a href='/test/{test_id}/runs?additionalRuns[]={base_run_id}&additionalRuns[]={previous_run_id}'>This {version} (#{build_number}) run</a>",
|
|
222
|
-
"username": g.user.username,
|
|
223
|
-
"status": TestStatus.FAILED.value,
|
|
224
|
-
"metric": regression_info["metric"],
|
|
225
|
-
"threshold_negative": threshold_negative,
|
|
226
|
-
"delta": regression_info["delta"],
|
|
227
|
-
"test_id": str(run.test_id),
|
|
228
|
-
"base_run_id": str(run.id),
|
|
229
|
-
"previous_run_id": regression_info["id"],
|
|
230
|
-
"version": regression_info["version"],
|
|
231
|
-
"build_number": get_build_number(regression_info["job_url"])
|
|
232
|
-
}, user_id=g.user.id, run_id=run_id, release_id=run.release_id, test_id=run.test_id)
|
|
233
|
-
else:
|
|
234
|
-
# NOTE: This will override status set by SCT Events.
|
|
235
|
-
run.status = TestStatus.PASSED.value
|
|
236
|
-
run.save()
|
|
237
|
-
|
|
238
|
-
except SCTTestRun.DoesNotExist as exception:
|
|
239
|
-
LOGGER.error("Run %s not found for SCTTestRun", run_id)
|
|
240
|
-
raise SCTServiceException("Run not found", run_id) from exception
|
|
241
|
-
|
|
242
|
-
return "submitted"
|
|
243
|
-
|
|
244
|
-
@staticmethod
|
|
245
|
-
def get_performance_history_for_test(run_id: str):
|
|
246
|
-
try:
|
|
247
|
-
run: SCTTestRun = SCTTestRun.get(id=run_id)
|
|
248
|
-
rows = run.get_perf_results_for_test_name(build_id=run.build_id, start_time=run.start_time, test_name=run.test_name)
|
|
249
|
-
return rows
|
|
250
|
-
except SCTTestRun.DoesNotExist as exception:
|
|
251
|
-
LOGGER.error("Run %s not found for SCTTestRun", run_id)
|
|
252
|
-
raise SCTServiceException("Run not found", run_id) from exception
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
@staticmethod
|
|
256
|
-
def create_resource(run_id: str, resource_details: dict) -> str:
|
|
257
|
-
instance_details = CloudInstanceDetails(**resource_details.pop("instance_details"))
|
|
258
|
-
resource = CloudResource(**resource_details, instance_info=instance_details)
|
|
259
|
-
try:
|
|
260
|
-
run: SCTTestRun = SCTTestRun.get(id=run_id)
|
|
261
|
-
run.get_resources().append(resource)
|
|
262
|
-
run.save()
|
|
263
|
-
except SCTTestRun.DoesNotExist as exception:
|
|
264
|
-
LOGGER.error("Run %s not found for SCTTestRun", run_id)
|
|
265
|
-
raise SCTServiceException("Run not found", run_id) from exception
|
|
266
|
-
|
|
267
|
-
return "created"
|
|
268
|
-
|
|
269
|
-
@staticmethod
|
|
270
|
-
def update_resource_shards(run_id: str, resource_name: str, new_shards: int) -> str:
|
|
271
|
-
try:
|
|
272
|
-
run: SCTTestRun = SCTTestRun.get(id=run_id)
|
|
273
|
-
resource = next(res for res in run.get_resources() if res.name == resource_name)
|
|
274
|
-
resource.get_instance_info().shards_amount = new_shards
|
|
275
|
-
run.save()
|
|
276
|
-
except StopIteration as exception:
|
|
277
|
-
LOGGER.error("Resource %s not found in run %s", resource_name, run_id)
|
|
278
|
-
raise SCTServiceException("Resource not found", resource_name) from exception
|
|
279
|
-
except SCTTestRun.DoesNotExist as exception:
|
|
280
|
-
LOGGER.error("Run %s not found for SCTTestRun", run_id)
|
|
281
|
-
raise SCTServiceException("Run not found", run_id) from exception
|
|
282
|
-
|
|
283
|
-
return "updated"
|
|
284
|
-
|
|
285
|
-
@staticmethod
|
|
286
|
-
def update_resource(run_id: str, resource_name: str, update_data: ResourceUpdateRequest) -> str:
|
|
287
|
-
try:
|
|
288
|
-
fields_updated = {}
|
|
289
|
-
run: SCTTestRun = SCTTestRun.get(id=run_id)
|
|
290
|
-
resource = next(res for res in run.get_resources() if res.name == resource_name)
|
|
291
|
-
instance_info = update_data.pop("instance_info", None)
|
|
292
|
-
resource.state = ResourceState(update_data.get("state", resource.state)).value
|
|
293
|
-
if instance_info:
|
|
294
|
-
resource_instance_info = resource.get_instance_info()
|
|
295
|
-
for k, v in instance_info.items():
|
|
296
|
-
if k in resource_instance_info.keys():
|
|
297
|
-
resource_instance_info[k] = v
|
|
298
|
-
fields_updated[k] = v
|
|
299
|
-
run.save()
|
|
300
|
-
except StopIteration as exception:
|
|
301
|
-
LOGGER.error("Resource %s not found in run %s", resource_name, run_id)
|
|
302
|
-
raise SCTServiceException("Resource not found", resource_name) from exception
|
|
303
|
-
except SCTTestRun.DoesNotExist as exception:
|
|
304
|
-
LOGGER.error("Run %s not found for SCTTestRun", run_id)
|
|
305
|
-
raise SCTServiceException("Run not found", run_id) from exception
|
|
306
|
-
|
|
307
|
-
return {
|
|
308
|
-
"state": "updated",
|
|
309
|
-
"fields": fields_updated
|
|
310
|
-
}
|
|
311
|
-
|
|
312
|
-
@staticmethod
|
|
313
|
-
def terminate_resource(run_id: str, resource_name: str, reason: str) -> str:
|
|
314
|
-
try:
|
|
315
|
-
run: SCTTestRun = SCTTestRun.get(id=run_id)
|
|
316
|
-
resource = next(res for res in run.get_resources() if res.name == resource_name)
|
|
317
|
-
resource.get_instance_info().termination_reason = reason
|
|
318
|
-
resource.get_instance_info().termination_time = int(time())
|
|
319
|
-
resource.state = ResourceState.TERMINATED.value
|
|
320
|
-
run.save()
|
|
321
|
-
except StopIteration as exception:
|
|
322
|
-
LOGGER.error("Resource %s not found in run %s", resource_name, run_id)
|
|
323
|
-
raise SCTServiceException("Resource not found", resource_name) from exception
|
|
324
|
-
except SCTTestRun.DoesNotExist as exception:
|
|
325
|
-
LOGGER.error("Run %s not found for SCTTestRun", run_id)
|
|
326
|
-
raise SCTServiceException("Run not found", run_id) from exception
|
|
327
|
-
|
|
328
|
-
return "terminated"
|
|
329
|
-
|
|
330
|
-
@staticmethod
|
|
331
|
-
def submit_nemesis(run_id: str, nemesis_details: dict) -> str:
|
|
332
|
-
nem_req = NemesisSubmissionRequest(**nemesis_details)
|
|
333
|
-
node_desc = NodeDescription(name=nem_req.node_name, ip=nem_req.node_ip, shards=nem_req.node_shards)
|
|
334
|
-
nemesis_info = NemesisRunInfo(
|
|
335
|
-
class_name=nem_req.class_name,
|
|
336
|
-
name=nem_req.name,
|
|
337
|
-
start_time=int(nem_req.start_time),
|
|
338
|
-
end_time=0,
|
|
339
|
-
duration=0,
|
|
340
|
-
stack_trace="",
|
|
341
|
-
status=NemesisStatus.RUNNING.value,
|
|
342
|
-
target_node=node_desc,
|
|
343
|
-
)
|
|
344
|
-
try:
|
|
345
|
-
run: SCTTestRun = SCTTestRun.get(id=run_id)
|
|
346
|
-
run.add_nemesis(nemesis_info)
|
|
347
|
-
run.save()
|
|
348
|
-
except SCTTestRun.DoesNotExist as exception:
|
|
349
|
-
LOGGER.error("Run %s not found for SCTTestRun", run_id)
|
|
350
|
-
raise SCTServiceException("Run not found", run_id) from exception
|
|
351
|
-
|
|
352
|
-
return "created"
|
|
353
|
-
|
|
354
|
-
@staticmethod
|
|
355
|
-
def finalize_nemesis(run_id: str, nemesis_details: dict) -> str:
|
|
356
|
-
nem_req = NemesisFinalizationRequest(**nemesis_details)
|
|
357
|
-
try:
|
|
358
|
-
run: SCTTestRun = SCTTestRun.get(id=run_id)
|
|
359
|
-
nemesis = next(nem for nem in run.get_nemeses() if nem.name ==
|
|
360
|
-
nem_req.name and nem.start_time == nem_req.start_time)
|
|
361
|
-
nemesis.status = NemesisStatus(nem_req.status).value
|
|
362
|
-
nemesis.stack_trace = nem_req.message
|
|
363
|
-
nemesis.end_time = int(time())
|
|
364
|
-
run.save()
|
|
365
|
-
except StopIteration as exception:
|
|
366
|
-
LOGGER.error("Nemesis %s (%s) not found for run %s", nem_req.name, nem_req.start_time, run_id)
|
|
367
|
-
raise SCTServiceException("Nemesis not found", (nem_req.name, nem_req.start_time)) from exception
|
|
368
|
-
except SCTTestRun.DoesNotExist as exception:
|
|
369
|
-
LOGGER.error("Run %s not found for SCTTestRun", run_id)
|
|
370
|
-
raise SCTServiceException("Run not found", run_id) from exception
|
|
371
|
-
|
|
372
|
-
return "updated"
|
|
373
|
-
|
|
374
|
-
@staticmethod
|
|
375
|
-
def submit_events(run_id: str, events: list[dict]) -> str:
|
|
376
|
-
wrapped_events = [EventSubmissionRequest(**ev) for ev in events]
|
|
377
|
-
try:
|
|
378
|
-
run: SCTTestRun = SCTTestRun.get(id=run_id)
|
|
379
|
-
for event in wrapped_events:
|
|
380
|
-
wrapper = EventsBySeverity(severity=event.severity,
|
|
381
|
-
event_amount=event.total_events, last_events=event.messages)
|
|
382
|
-
run.get_events().append(wrapper)
|
|
383
|
-
coredumps = SCTService.locate_coredumps(run, run.get_events())
|
|
384
|
-
run.submit_logs(coredumps)
|
|
385
|
-
run.save()
|
|
386
|
-
except SCTTestRun.DoesNotExist as exception:
|
|
387
|
-
LOGGER.error("Run %s not found for SCTTestRun", run_id)
|
|
388
|
-
raise SCTServiceException("Run not found", run_id) from exception
|
|
389
|
-
|
|
390
|
-
return "added"
|
|
391
|
-
|
|
392
|
-
@staticmethod
|
|
393
|
-
def locate_coredumps(run: SCTTestRun, events: list[EventsBySeverity]) -> list[dict]:
|
|
394
|
-
flat_messages: list[str] = []
|
|
395
|
-
links = []
|
|
396
|
-
for es in events:
|
|
397
|
-
flat_messages.extend(es.last_events)
|
|
398
|
-
coredump_events = filter(lambda v: "coredumpevent" in v.lower(), flat_messages)
|
|
399
|
-
for idx, event in enumerate(coredump_events):
|
|
400
|
-
core_pattern = r"corefile_url=(?P<url>.+)$"
|
|
401
|
-
ts_pattern = r"^(?P<ts>\d{4}-\d{2}-\d{2} ([\d:]*)\.\d{3})"
|
|
402
|
-
node_name_pattern = r"node=(?P<name>.+)$"
|
|
403
|
-
core_url_match = re.search(core_pattern, event, re.MULTILINE)
|
|
404
|
-
node_name_match = re.search(node_name_pattern, event, re.MULTILINE)
|
|
405
|
-
ts_match = re.search(ts_pattern, event)
|
|
406
|
-
if core_url_match:
|
|
407
|
-
node_name = node_name_match.group("name") if node_name_match else f"unknown-node-{idx}"
|
|
408
|
-
split_name = node_name.split(" ")
|
|
409
|
-
node_name = split_name[1] if len(split_name) >= 2 else node_name
|
|
410
|
-
url = core_url_match.group("url")
|
|
411
|
-
timestamp_component = ""
|
|
412
|
-
if ts_match:
|
|
413
|
-
try:
|
|
414
|
-
timestamp = datetime.fromisoformat(ts_match.group("ts"))
|
|
415
|
-
timestamp_component = timestamp.strftime("-%Y-%m-%d_%H-%M-%S")
|
|
416
|
-
except ValueError:
|
|
417
|
-
pass
|
|
418
|
-
log_link = {
|
|
419
|
-
"log_name": f"core.scylla-{node_name}{timestamp_component}.gz",
|
|
420
|
-
"log_link": url
|
|
421
|
-
}
|
|
422
|
-
links.append(log_link)
|
|
423
|
-
return links
|
|
424
|
-
|
|
425
|
-
@staticmethod
|
|
426
|
-
def get_scylla_version_kernels_report(release_name: str):
|
|
427
|
-
all_release_runs = SCTTestRun.get_version_data_for_release(release_name=release_name)
|
|
428
|
-
kernels_by_version = {}
|
|
429
|
-
kernel_metadata = {}
|
|
430
|
-
for run in all_release_runs:
|
|
431
|
-
packages = run["packages"]
|
|
432
|
-
if not packages:
|
|
433
|
-
continue
|
|
434
|
-
scylla_pkgs = {p["name"]: p for p in packages if "scylla-server" in p["name"]}
|
|
435
|
-
scylla_pkg = scylla_pkgs["scylla-server-upgraded"] if scylla_pkgs.get(
|
|
436
|
-
"scylla-server-upgraded") else scylla_pkgs.get("scylla-server")
|
|
437
|
-
version = f"{scylla_pkg['version']}-{scylla_pkg['date']}.{scylla_pkg['revision_id']}" if scylla_pkgs else "unknown"
|
|
438
|
-
kernel_packages = [p for p in packages if "kernel" in p["name"]]
|
|
439
|
-
kernel_package = kernel_packages[0] if len(kernel_packages) > 0 else None
|
|
440
|
-
if not kernel_package:
|
|
441
|
-
continue
|
|
442
|
-
version_list = set(kernels_by_version.get(version, []))
|
|
443
|
-
version_list.add(kernel_package["version"])
|
|
444
|
-
kernels_by_version[version] = list(version_list)
|
|
445
|
-
metadata = kernel_metadata.get(
|
|
446
|
-
kernel_package.version,
|
|
447
|
-
{
|
|
448
|
-
"passed": 0,
|
|
449
|
-
"failed": 0,
|
|
450
|
-
"aborted": 0,
|
|
451
|
-
}
|
|
452
|
-
)
|
|
453
|
-
if run["status"] in ["passed", "failed", "aborted", "test_error"]:
|
|
454
|
-
metadata[run["status"]] += 1
|
|
455
|
-
kernel_metadata[kernel_package["version"]] = metadata
|
|
456
|
-
|
|
457
|
-
return {
|
|
458
|
-
"versions": kernels_by_version,
|
|
459
|
-
"metadata": kernel_metadata
|
|
460
|
-
}
|
|
461
|
-
|
|
462
|
-
@staticmethod
|
|
463
|
-
def junit_submit(run_id: str, file_name: str, content: str) -> bool:
|
|
464
|
-
try:
|
|
465
|
-
report = SCTJunitReports.get(test_id=run_id, file_name=file_name)
|
|
466
|
-
if report:
|
|
467
|
-
raise SCTServiceException(f"Report {file_name} already exists.", file_name)
|
|
468
|
-
except SCTJunitReports.DoesNotExist:
|
|
469
|
-
pass
|
|
470
|
-
report = SCTJunitReports()
|
|
471
|
-
report.test_id = run_id
|
|
472
|
-
report.file_name = file_name
|
|
473
|
-
|
|
474
|
-
xml_content = str(base64.decodebytes(bytes(content, encoding="utf-8")), encoding="utf-8")
|
|
475
|
-
try:
|
|
476
|
-
_ = ElementTree.fromstring(xml_content)
|
|
477
|
-
except Exception:
|
|
478
|
-
raise SCTServiceException(f"Malformed JUnit report submitted")
|
|
479
|
-
|
|
480
|
-
report.report = xml_content
|
|
481
|
-
report.save()
|
|
482
|
-
|
|
483
|
-
return True
|
|
484
|
-
|
|
485
|
-
@staticmethod
|
|
486
|
-
def junit_get_all(run_id: str) -> list[SCTJunitReports]:
|
|
487
|
-
return list(SCTJunitReports.filter(test_id=run_id).all())
|
|
488
|
-
|
|
489
|
-
@staticmethod
|
|
490
|
-
def junit_get_single(run_id: str, file_name: str) -> SCTJunitReports:
|
|
491
|
-
return SCTJunitReports.get(test_id=run_id, file_name=file_name)
|