argus-alm 0.12.2__py3-none-any.whl → 0.12.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- argus/backend/cli.py +1 -1
- argus/backend/controller/testrun_api.py +116 -1
- argus/backend/plugins/core.py +12 -4
- argus/backend/plugins/driver_matrix_tests/model.py +1 -1
- argus/backend/plugins/generic/model.py +1 -1
- argus/backend/plugins/loader.py +2 -2
- argus/backend/plugins/sct/controller.py +31 -0
- argus/backend/plugins/sct/plugin.py +2 -1
- argus/backend/plugins/sct/service.py +89 -3
- argus/backend/plugins/sct/testrun.py +8 -2
- argus/backend/plugins/sct/types.py +18 -0
- argus/backend/plugins/sct/udt.py +6 -0
- argus/backend/plugins/sirenada/model.py +1 -1
- argus/backend/service/argus_service.py +11 -2
- argus/backend/service/jenkins_service.py +175 -1
- argus/backend/service/stats.py +35 -13
- argus/client/sct/client.py +34 -0
- {argus_alm-0.12.2.dist-info → argus_alm-0.12.3.dist-info}/METADATA +2 -1
- {argus_alm-0.12.2.dist-info → argus_alm-0.12.3.dist-info}/RECORD +22 -22
- {argus_alm-0.12.2.dist-info → argus_alm-0.12.3.dist-info}/WHEEL +1 -1
- {argus_alm-0.12.2.dist-info → argus_alm-0.12.3.dist-info}/LICENSE +0 -0
- {argus_alm-0.12.2.dist-info → argus_alm-0.12.3.dist-info}/entry_points.txt +0 -0
argus/backend/cli.py
CHANGED
|
@@ -22,7 +22,7 @@ def sync_models_command():
|
|
|
22
22
|
LOGGER.info("Synchronizing plugin type %s...", user_type.__name__)
|
|
23
23
|
sync_type(ks_name=cluster.config["SCYLLA_KEYSPACE_NAME"], type_model=user_type)
|
|
24
24
|
LOGGER.info("Synchronizing plugin models...")
|
|
25
|
-
for model in all_plugin_models():
|
|
25
|
+
for model in all_plugin_models(True):
|
|
26
26
|
LOGGER.info("Synchronizing plugin model %s...", model.__name__)
|
|
27
27
|
sync_table(model=model, keyspaces=[cluster.config["SCYLLA_KEYSPACE_NAME"]])
|
|
28
28
|
|
|
@@ -6,6 +6,7 @@ from flask import (
|
|
|
6
6
|
)
|
|
7
7
|
|
|
8
8
|
from argus.backend.error_handlers import handle_api_exception
|
|
9
|
+
from argus.backend.models.web import ArgusTest
|
|
9
10
|
from argus.backend.service.jenkins_service import JenkinsService
|
|
10
11
|
from argus.backend.service.testrun import TestRunService
|
|
11
12
|
from argus.backend.service.user import api_login_required
|
|
@@ -336,4 +337,118 @@ def get_queue_info():
|
|
|
336
337
|
"response": {
|
|
337
338
|
"queueItem": result
|
|
338
339
|
}
|
|
339
|
-
}
|
|
340
|
+
}
|
|
341
|
+
|
|
342
|
+
|
|
343
|
+
@bp.route("/jenkins/clone/targets")
|
|
344
|
+
@api_login_required
|
|
345
|
+
def get_clone_targets():
|
|
346
|
+
test_id = request.args.get("testId")
|
|
347
|
+
if not test_id:
|
|
348
|
+
raise Exception("No testId provided")
|
|
349
|
+
service = JenkinsService()
|
|
350
|
+
result = service.get_releases_for_clone(test_id)
|
|
351
|
+
|
|
352
|
+
return {
|
|
353
|
+
"status": "ok",
|
|
354
|
+
"response": {
|
|
355
|
+
"targets": result
|
|
356
|
+
}
|
|
357
|
+
}
|
|
358
|
+
|
|
359
|
+
|
|
360
|
+
@bp.route("/jenkins/clone/groups")
|
|
361
|
+
@api_login_required
|
|
362
|
+
def get_groups_for_target():
|
|
363
|
+
target_id = request.args.get("targetId")
|
|
364
|
+
if not target_id:
|
|
365
|
+
raise Exception("No targetId provided")
|
|
366
|
+
service = JenkinsService()
|
|
367
|
+
result = service.get_groups_for_release(target_id)
|
|
368
|
+
|
|
369
|
+
return {
|
|
370
|
+
"status": "ok",
|
|
371
|
+
"response": {
|
|
372
|
+
"groups": result
|
|
373
|
+
}
|
|
374
|
+
}
|
|
375
|
+
|
|
376
|
+
|
|
377
|
+
@bp.route("/jenkins/clone/create", methods=["POST"])
|
|
378
|
+
@api_login_required
|
|
379
|
+
def clone_jenkins_job():
|
|
380
|
+
|
|
381
|
+
payload = get_payload(request)
|
|
382
|
+
service = JenkinsService()
|
|
383
|
+
|
|
384
|
+
result = service.clone_job(
|
|
385
|
+
current_test_id=payload["currentTestId"],
|
|
386
|
+
new_name=payload["newName"],
|
|
387
|
+
target=payload["target"],
|
|
388
|
+
group=payload["group"],
|
|
389
|
+
advanced_settings=payload["advancedSettings"],
|
|
390
|
+
)
|
|
391
|
+
|
|
392
|
+
return {
|
|
393
|
+
"status": "ok",
|
|
394
|
+
"response": result
|
|
395
|
+
}
|
|
396
|
+
|
|
397
|
+
|
|
398
|
+
@bp.route("/jenkins/clone/build", methods=["POST"])
|
|
399
|
+
@api_login_required
|
|
400
|
+
def clone_build_jenkins_job():
|
|
401
|
+
|
|
402
|
+
payload = get_payload(request)
|
|
403
|
+
service = JenkinsService()
|
|
404
|
+
|
|
405
|
+
result = service.clone_build_job(build_id=payload["buildId"], params=payload["parameters"])
|
|
406
|
+
|
|
407
|
+
return {
|
|
408
|
+
"status": "ok",
|
|
409
|
+
"response": result
|
|
410
|
+
}
|
|
411
|
+
|
|
412
|
+
|
|
413
|
+
@bp.route("/jenkins/clone/settings")
|
|
414
|
+
@api_login_required
|
|
415
|
+
def get_clone_job_advanced_settings():
|
|
416
|
+
build_id = request.args.get("buildId")
|
|
417
|
+
if not build_id:
|
|
418
|
+
raise Exception("No testId provided")
|
|
419
|
+
service = JenkinsService()
|
|
420
|
+
result = service.get_advanced_settings(build_id)
|
|
421
|
+
|
|
422
|
+
return {
|
|
423
|
+
"status": "ok",
|
|
424
|
+
"response": result
|
|
425
|
+
}
|
|
426
|
+
|
|
427
|
+
|
|
428
|
+
@bp.route("/jenkins/clone/settings/change", methods=["POST"])
|
|
429
|
+
@api_login_required
|
|
430
|
+
def set_job_settings():
|
|
431
|
+
payload = get_payload(request)
|
|
432
|
+
service = JenkinsService()
|
|
433
|
+
test = ArgusTest.get(build_system_id=payload["buildId"])
|
|
434
|
+
result = service.adjust_job_settings(build_id=test.build_system_id, plugin_name=test.plugin_name, settings=payload["settings"])
|
|
435
|
+
|
|
436
|
+
return {
|
|
437
|
+
"status": "ok",
|
|
438
|
+
"response": result
|
|
439
|
+
}
|
|
440
|
+
|
|
441
|
+
|
|
442
|
+
@bp.route("/jenkins/clone/settings/validate", methods=["POST"])
|
|
443
|
+
@api_login_required
|
|
444
|
+
def clone_validate_new_settings():
|
|
445
|
+
|
|
446
|
+
payload = get_payload(request)
|
|
447
|
+
service = JenkinsService()
|
|
448
|
+
|
|
449
|
+
result = service.verify_job_settings(build_id=payload["buildId"], new_settings=payload["newSettings"])
|
|
450
|
+
|
|
451
|
+
return {
|
|
452
|
+
"status": "ok",
|
|
453
|
+
"response": result
|
|
454
|
+
}
|
argus/backend/plugins/core.py
CHANGED
|
@@ -132,12 +132,20 @@ class PluginModelBase(Model):
|
|
|
132
132
|
return bound_query
|
|
133
133
|
|
|
134
134
|
@classmethod
|
|
135
|
-
def get_stats_for_release(cls, release: ArgusRelease):
|
|
135
|
+
def get_stats_for_release(cls, release: ArgusRelease, build_ids=list[str]):
|
|
136
136
|
cluster = ScyllaCluster.get()
|
|
137
137
|
query = cluster.prepare(cls._stats_query())
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
138
|
+
futures = []
|
|
139
|
+
step = 0
|
|
140
|
+
step_size = 90
|
|
141
|
+
total_tests = len(build_ids)
|
|
142
|
+
while total_tests > 0:
|
|
143
|
+
id_slice = build_ids[step:step+step_size]
|
|
144
|
+
futures.append(cluster.session.execute_async(query=query, parameters=(id_slice,)))
|
|
145
|
+
step += step_size
|
|
146
|
+
total_tests = max(0, total_tests - step_size)
|
|
147
|
+
|
|
148
|
+
return futures
|
|
141
149
|
|
|
142
150
|
@classmethod
|
|
143
151
|
def get_run_meta_by_build_id(cls, build_id: str, limit: int = 10):
|
|
@@ -44,7 +44,7 @@ class DriverTestRun(PluginModelBase):
|
|
|
44
44
|
@classmethod
|
|
45
45
|
def _stats_query(cls) -> str:
|
|
46
46
|
return ("SELECT id, test_id, group_id, release_id, status, start_time, build_job_url, build_id, "
|
|
47
|
-
f"assignee, end_time, investigation_status, heartbeat, scylla_version FROM {cls.table_name()} WHERE
|
|
47
|
+
f"assignee, end_time, investigation_status, heartbeat, scylla_version FROM {cls.table_name()} WHERE build_id IN ? PER PARTITION LIMIT 15")
|
|
48
48
|
|
|
49
49
|
@classmethod
|
|
50
50
|
def get_distinct_product_versions(cls, release: ArgusRelease) -> list[str]:
|
|
@@ -24,7 +24,7 @@ class GenericRun(PluginModelBase):
|
|
|
24
24
|
@classmethod
|
|
25
25
|
def _stats_query(cls) -> str:
|
|
26
26
|
return ("SELECT id, test_id, group_id, release_id, status, start_time, build_job_url, build_id, "
|
|
27
|
-
f"assignee, end_time, investigation_status, heartbeat, scylla_version FROM {cls.table_name()} WHERE
|
|
27
|
+
f"assignee, end_time, investigation_status, heartbeat, scylla_version FROM {cls.table_name()} WHERE build_id IN ? PER PARTITION LIMIT 15")
|
|
28
28
|
|
|
29
29
|
@classmethod
|
|
30
30
|
def get_distinct_product_versions(cls, release: ArgusRelease, cluster: ScyllaCluster = None) -> list[str]:
|
argus/backend/plugins/loader.py
CHANGED
|
@@ -32,8 +32,8 @@ def plugin_loader() -> dict[str, PluginInfoBase]:
|
|
|
32
32
|
AVAILABLE_PLUGINS = plugin_loader()
|
|
33
33
|
|
|
34
34
|
|
|
35
|
-
def all_plugin_models() -> list[PluginModelBase]:
|
|
36
|
-
return [model for plugin in AVAILABLE_PLUGINS.values() for model in plugin.all_models]
|
|
35
|
+
def all_plugin_models(include_all=False) -> list[PluginModelBase]:
|
|
36
|
+
return [model for plugin in AVAILABLE_PLUGINS.values() for model in plugin.all_models if issubclass(model, PluginModelBase) or include_all]
|
|
37
37
|
|
|
38
38
|
|
|
39
39
|
def all_plugin_types():
|
|
@@ -81,6 +81,17 @@ def sct_resource_update_shards(run_id: str, resource_name: str):
|
|
|
81
81
|
}
|
|
82
82
|
|
|
83
83
|
|
|
84
|
+
@bp.route("/<string:run_id>/resource/<string:resource_name>/update", methods=["POST"])
|
|
85
|
+
@api_login_required
|
|
86
|
+
def sct_resource_update(run_id: str, resource_name: str):
|
|
87
|
+
payload = get_payload(request)
|
|
88
|
+
result = SCTService.update_resource(run_id=run_id, resource_name=resource_name, update_data=payload["update_data"])
|
|
89
|
+
return {
|
|
90
|
+
"status": "ok",
|
|
91
|
+
"response": result
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
|
|
84
95
|
@bp.route("/<string:run_id>/nemesis/submit", methods=["POST"])
|
|
85
96
|
@api_login_required
|
|
86
97
|
def sct_nemesis_submit(run_id: str):
|
|
@@ -152,3 +163,23 @@ def sct_get_kernel_report(release_name: str):
|
|
|
152
163
|
"status": "ok",
|
|
153
164
|
"response": result
|
|
154
165
|
}
|
|
166
|
+
|
|
167
|
+
|
|
168
|
+
@bp.route("/<string:run_id>/junit/submit", methods=["POST"])
|
|
169
|
+
@api_login_required
|
|
170
|
+
def sct_submit_junit_report(run_id: str):
|
|
171
|
+
payload = get_payload(request)
|
|
172
|
+
result = SCTService.junit_submit(run_id, payload["file_name"], payload["content"])
|
|
173
|
+
return {
|
|
174
|
+
"status": "ok",
|
|
175
|
+
"response": result
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
@bp.route("/<string:run_id>/junit/get_all", methods=["GET"])
|
|
179
|
+
@api_login_required
|
|
180
|
+
def sct_get_junit_reports(run_id: str):
|
|
181
|
+
result = SCTService.junit_get_all(run_id)
|
|
182
|
+
return {
|
|
183
|
+
"status": "ok",
|
|
184
|
+
"response": result
|
|
185
|
+
}
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
from flask import Blueprint
|
|
2
2
|
|
|
3
|
-
from argus.backend.plugins.sct.testrun import SCTTestRun
|
|
3
|
+
from argus.backend.plugins.sct.testrun import SCTJunitReports, SCTTestRun
|
|
4
4
|
from argus.backend.plugins.sct.controller import bp as sct_bp
|
|
5
5
|
from argus.backend.plugins.core import PluginInfoBase, PluginModelBase
|
|
6
6
|
from argus.backend.plugins.sct.udt import (
|
|
@@ -23,6 +23,7 @@ class PluginInfo(PluginInfoBase):
|
|
|
23
23
|
controller: Blueprint = sct_bp
|
|
24
24
|
all_models = [
|
|
25
25
|
SCTTestRun,
|
|
26
|
+
SCTJunitReports,
|
|
26
27
|
]
|
|
27
28
|
all_types = [
|
|
28
29
|
NemesisRunInfo,
|
|
@@ -1,12 +1,15 @@
|
|
|
1
|
+
import base64
|
|
1
2
|
from dataclasses import dataclass
|
|
2
3
|
from functools import reduce
|
|
3
4
|
import logging
|
|
4
5
|
import math
|
|
6
|
+
import re
|
|
5
7
|
from time import time
|
|
8
|
+
from xml.etree import ElementTree
|
|
6
9
|
from flask import g
|
|
7
10
|
from argus.backend.models.web import ArgusEventTypes
|
|
8
|
-
from argus.backend.plugins.sct.testrun import SCTTestRun, SubtestType
|
|
9
|
-
from argus.backend.plugins.sct.types import GeminiResultsRequest, PerformanceResultsRequest
|
|
11
|
+
from argus.backend.plugins.sct.testrun import SCTJunitReports, SCTTestRun, SubtestType
|
|
12
|
+
from argus.backend.plugins.sct.types import GeminiResultsRequest, PerformanceResultsRequest, ResourceUpdateRequest
|
|
10
13
|
from argus.backend.plugins.sct.udt import (
|
|
11
14
|
CloudInstanceDetails,
|
|
12
15
|
CloudResource,
|
|
@@ -60,7 +63,8 @@ class SCTService:
|
|
|
60
63
|
run: SCTTestRun = SCTTestRun.get(id=run_id)
|
|
61
64
|
for package_dict in packages:
|
|
62
65
|
package = PackageVersion(**package_dict)
|
|
63
|
-
run.packages
|
|
66
|
+
if package not in run.packages:
|
|
67
|
+
run.packages.append(package)
|
|
64
68
|
run.save()
|
|
65
69
|
except SCTTestRun.DoesNotExist as exception:
|
|
66
70
|
LOGGER.error("Run %s not found for SCTTestRun", run_id)
|
|
@@ -277,6 +281,33 @@ class SCTService:
|
|
|
277
281
|
|
|
278
282
|
return "updated"
|
|
279
283
|
|
|
284
|
+
@staticmethod
|
|
285
|
+
def update_resource(run_id: str, resource_name: str, update_data: ResourceUpdateRequest) -> str:
|
|
286
|
+
try:
|
|
287
|
+
fields_updated = {}
|
|
288
|
+
run: SCTTestRun = SCTTestRun.get(id=run_id)
|
|
289
|
+
resource = next(res for res in run.get_resources() if res.name == resource_name)
|
|
290
|
+
instance_info = update_data.pop("instance_info", None)
|
|
291
|
+
resource.state = ResourceState(update_data.get("state", resource.state)).value
|
|
292
|
+
if instance_info:
|
|
293
|
+
resource_instance_info = resource.get_instance_info()
|
|
294
|
+
for k, v in instance_info.items():
|
|
295
|
+
if k in resource_instance_info.keys():
|
|
296
|
+
resource_instance_info[k] = v
|
|
297
|
+
fields_updated[k] = v
|
|
298
|
+
run.save()
|
|
299
|
+
except StopIteration as exception:
|
|
300
|
+
LOGGER.error("Resource %s not found in run %s", resource_name, run_id)
|
|
301
|
+
raise SCTServiceException("Resource not found", resource_name) from exception
|
|
302
|
+
except SCTTestRun.DoesNotExist as exception:
|
|
303
|
+
LOGGER.error("Run %s not found for SCTTestRun", run_id)
|
|
304
|
+
raise SCTServiceException("Run not found", run_id) from exception
|
|
305
|
+
|
|
306
|
+
return {
|
|
307
|
+
"state": "updated",
|
|
308
|
+
"fields": fields_updated
|
|
309
|
+
}
|
|
310
|
+
|
|
280
311
|
@staticmethod
|
|
281
312
|
def terminate_resource(run_id: str, resource_name: str, reason: str) -> str:
|
|
282
313
|
try:
|
|
@@ -348,6 +379,8 @@ class SCTService:
|
|
|
348
379
|
wrapper = EventsBySeverity(severity=event.severity,
|
|
349
380
|
event_amount=event.total_events, last_events=event.messages)
|
|
350
381
|
run.get_events().append(wrapper)
|
|
382
|
+
coredumps = SCTService.locate_coredumps(run, run.get_events())
|
|
383
|
+
run.submit_logs(coredumps)
|
|
351
384
|
run.save()
|
|
352
385
|
except SCTTestRun.DoesNotExist as exception:
|
|
353
386
|
LOGGER.error("Run %s not found for SCTTestRun", run_id)
|
|
@@ -355,6 +388,28 @@ class SCTService:
|
|
|
355
388
|
|
|
356
389
|
return "added"
|
|
357
390
|
|
|
391
|
+
@staticmethod
|
|
392
|
+
def locate_coredumps(run: SCTTestRun, events: list[EventsBySeverity]) -> list[dict]:
|
|
393
|
+
flat_messages: list[str] = []
|
|
394
|
+
links = []
|
|
395
|
+
for es in events:
|
|
396
|
+
flat_messages.extend(es.last_events)
|
|
397
|
+
coredump_events = filter(lambda v: "coredumpevent" in v.lower(), flat_messages)
|
|
398
|
+
for idx, event in enumerate(coredump_events):
|
|
399
|
+
core_pattern = r"corefile_url=(?P<url>.+)$"
|
|
400
|
+
node_name_pattern = r"node=(?P<name>.+)$"
|
|
401
|
+
core_url_match = re.search(core_pattern, event, re.MULTILINE)
|
|
402
|
+
node_name_match = re.search(node_name_pattern, event, re.MULTILINE)
|
|
403
|
+
if core_url_match:
|
|
404
|
+
node_name = node_name_match.group("name") if node_name_match else f"unknown-node-{idx}"
|
|
405
|
+
url = core_url_match.group("url")
|
|
406
|
+
log_link = {
|
|
407
|
+
"log_name": f"COREDUMP-{node_name}",
|
|
408
|
+
"log_link": url
|
|
409
|
+
}
|
|
410
|
+
links.append(log_link)
|
|
411
|
+
return links
|
|
412
|
+
|
|
358
413
|
@staticmethod
|
|
359
414
|
def get_scylla_version_kernels_report(release_name: str):
|
|
360
415
|
all_release_runs = SCTTestRun.get_version_data_for_release(release_name=release_name)
|
|
@@ -391,3 +446,34 @@ class SCTService:
|
|
|
391
446
|
"versions": kernels_by_version,
|
|
392
447
|
"metadata": kernel_metadata
|
|
393
448
|
}
|
|
449
|
+
|
|
450
|
+
@staticmethod
|
|
451
|
+
def junit_submit(run_id: str, file_name: str, content: str) -> bool:
|
|
452
|
+
try:
|
|
453
|
+
report = SCTJunitReports.get(test_id=run_id, file_name=file_name)
|
|
454
|
+
if report:
|
|
455
|
+
raise SCTServiceException(f"Report {file_name} already exists.", file_name)
|
|
456
|
+
except SCTJunitReports.DoesNotExist:
|
|
457
|
+
pass
|
|
458
|
+
report = SCTJunitReports()
|
|
459
|
+
report.test_id = run_id
|
|
460
|
+
report.file_name = file_name
|
|
461
|
+
|
|
462
|
+
xml_content = str(base64.decodebytes(bytes(content, encoding="utf-8")), encoding="utf-8")
|
|
463
|
+
try:
|
|
464
|
+
_ = ElementTree.fromstring(xml_content)
|
|
465
|
+
except Exception:
|
|
466
|
+
raise SCTServiceException(f"Malformed JUnit report submitted")
|
|
467
|
+
|
|
468
|
+
report.report = xml_content
|
|
469
|
+
report.save()
|
|
470
|
+
|
|
471
|
+
return True
|
|
472
|
+
|
|
473
|
+
@staticmethod
|
|
474
|
+
def junit_get_all(run_id: str) -> list[SCTJunitReports]:
|
|
475
|
+
return list(SCTJunitReports.filter(test_id=run_id).all())
|
|
476
|
+
|
|
477
|
+
@staticmethod
|
|
478
|
+
def junit_get_single(run_id: str, file_name: str) -> SCTJunitReports:
|
|
479
|
+
return SCTJunitReports.get(test_id=run_id, file_name=file_name)
|
|
@@ -6,7 +6,7 @@ from typing import Optional
|
|
|
6
6
|
from uuid import UUID
|
|
7
7
|
|
|
8
8
|
from cassandra.cqlengine import columns
|
|
9
|
-
from cassandra.cqlengine.models import _DoesNotExist
|
|
9
|
+
from cassandra.cqlengine.models import _DoesNotExist, Model
|
|
10
10
|
from argus.backend.db import ScyllaCluster
|
|
11
11
|
from argus.backend.models.web import ArgusRelease
|
|
12
12
|
from argus.backend.plugins.core import PluginModelBase
|
|
@@ -113,7 +113,7 @@ class SCTTestRun(PluginModelBase):
|
|
|
113
113
|
@classmethod
|
|
114
114
|
def _stats_query(cls) -> str:
|
|
115
115
|
return ("SELECT id, test_id, group_id, release_id, status, start_time, build_job_url, build_id, "
|
|
116
|
-
f"assignee, end_time, investigation_status, heartbeat, scylla_version FROM {cls.table_name()} WHERE
|
|
116
|
+
f"assignee, end_time, investigation_status, heartbeat, scylla_version FROM {cls.table_name()} WHERE build_id IN ? PER PARTITION LIMIT 15")
|
|
117
117
|
|
|
118
118
|
@classmethod
|
|
119
119
|
def load_test_run(cls, run_id: UUID) -> 'SCTTestRun':
|
|
@@ -248,3 +248,9 @@ class SCTTestRun(PluginModelBase):
|
|
|
248
248
|
self._add_new_event_type(event)
|
|
249
249
|
|
|
250
250
|
self._collect_event_message(event, event_message)
|
|
251
|
+
|
|
252
|
+
|
|
253
|
+
class SCTJunitReports(Model):
|
|
254
|
+
test_id = columns.UUID(primary_key=True, partition_key=True, required=True)
|
|
255
|
+
file_name = columns.Text(primary_key=True, required=True)
|
|
256
|
+
report = columns.Text(required=True)
|
|
@@ -36,3 +36,21 @@ class PerformanceResultsRequest(TypedDict):
|
|
|
36
36
|
perf_total_errors: str
|
|
37
37
|
|
|
38
38
|
histograms: list[dict[str, RawHDRHistogram]] | None
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
class InstanceInfoUpdateRequest(TypedDict):
|
|
42
|
+
provider: str
|
|
43
|
+
region: str
|
|
44
|
+
public_ip: str
|
|
45
|
+
private_ip: str
|
|
46
|
+
dc_name: str
|
|
47
|
+
rack_name: str
|
|
48
|
+
creation_time: int
|
|
49
|
+
termination_time: int
|
|
50
|
+
termination_reason: str
|
|
51
|
+
shards_amount: int
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
class ResourceUpdateRequest(TypedDict):
|
|
55
|
+
state: str
|
|
56
|
+
instance_info: InstanceInfoUpdateRequest
|
argus/backend/plugins/sct/udt.py
CHANGED
|
@@ -14,6 +14,12 @@ class PackageVersion(UserType):
|
|
|
14
14
|
build_id = columns.Text()
|
|
15
15
|
|
|
16
16
|
|
|
17
|
+
def __eq__(self, other):
|
|
18
|
+
if isinstance(other, PackageVersion):
|
|
19
|
+
return all(getattr(self, a) == getattr(other, a) for a in ["name", "version", "date", "revision_id", "build_id"])
|
|
20
|
+
return super().__eq__(other)
|
|
21
|
+
|
|
22
|
+
|
|
17
23
|
class CloudInstanceDetails(UserType):
|
|
18
24
|
__type_name__ = "CloudInstanceDetails_v3"
|
|
19
25
|
provider = columns.Text()
|
|
@@ -47,7 +47,7 @@ class SirenadaRun(PluginModelBase):
|
|
|
47
47
|
@classmethod
|
|
48
48
|
def _stats_query(cls) -> str:
|
|
49
49
|
return ("SELECT id, test_id, group_id, release_id, status, start_time, build_job_url, build_id, "
|
|
50
|
-
f"assignee, end_time, investigation_status, heartbeat, scylla_version FROM {cls.table_name()} WHERE
|
|
50
|
+
f"assignee, end_time, investigation_status, heartbeat, scylla_version FROM {cls.table_name()} WHERE build_id IN ? PER PARTITION LIMIT 15")
|
|
51
51
|
|
|
52
52
|
@classmethod
|
|
53
53
|
def get_distinct_product_versions(cls, release: ArgusRelease, cluster: ScyllaCluster = None) -> list[str]:
|
|
@@ -487,8 +487,17 @@ class ArgusService:
|
|
|
487
487
|
groups = ArgusGroup.filter(release_id=release_id).all()
|
|
488
488
|
group_ids = [group.id for group in groups if group.enabled]
|
|
489
489
|
|
|
490
|
-
|
|
491
|
-
schedule_ids =
|
|
490
|
+
total_ids = len(group_ids)
|
|
491
|
+
schedule_ids = set()
|
|
492
|
+
step = 0
|
|
493
|
+
step_size = 60
|
|
494
|
+
while total_ids > 0:
|
|
495
|
+
group_slice = group_ids[step:step+step_size]
|
|
496
|
+
scheduled_groups = ArgusScheduleGroup.filter(release_id=release.id, group_id__in=group_slice).all()
|
|
497
|
+
batch_ids = {schedule.schedule_id for schedule in scheduled_groups}
|
|
498
|
+
schedule_ids.union(batch_ids)
|
|
499
|
+
total_ids = max(0, total_ids - step_size)
|
|
500
|
+
step += step_size
|
|
492
501
|
|
|
493
502
|
schedules = ArgusSchedule.filter(release_id=release.id, id__in=tuple(schedule_ids)).all()
|
|
494
503
|
|
|
@@ -1,12 +1,17 @@
|
|
|
1
|
+
import re
|
|
2
|
+
import requests
|
|
1
3
|
from typing import Any, TypedDict
|
|
4
|
+
from uuid import UUID
|
|
2
5
|
import xml.etree.ElementTree as ET
|
|
3
6
|
import jenkins
|
|
4
7
|
import logging
|
|
5
8
|
|
|
6
9
|
from flask import current_app, g
|
|
7
10
|
|
|
8
|
-
|
|
11
|
+
from argus.backend.models.web import ArgusGroup, ArgusRelease, ArgusTest, UserOauthToken
|
|
9
12
|
|
|
13
|
+
LOGGER = logging.getLogger(__name__)
|
|
14
|
+
GITHUB_REPO_RE = r"(?P<http>^https?:\/\/(www\.)?github\.com\/(?P<user>[\w\d\-]+)\/(?P<repo>[\w\d\-]+)(\.git)?$)|(?P<ssh>git@github\.com:(?P<ssh_user>[\w\d\-]+)\/(?P<ssh_repo>[\w\d\-]+)(\.git)?)"
|
|
10
15
|
|
|
11
16
|
class Parameter(TypedDict):
|
|
12
17
|
_class: str
|
|
@@ -15,9 +20,31 @@ class Parameter(TypedDict):
|
|
|
15
20
|
value: Any
|
|
16
21
|
|
|
17
22
|
|
|
23
|
+
class JenkinsServiceError(Exception):
|
|
24
|
+
pass
|
|
25
|
+
|
|
26
|
+
|
|
18
27
|
class JenkinsService:
|
|
19
28
|
RESERVED_PARAMETER_NAME = "requested_by_user"
|
|
20
29
|
|
|
30
|
+
SETTINGS_CONFIG_MAP = {
|
|
31
|
+
"scylla-cluster-tests": {
|
|
32
|
+
"gitRepo": "*//scm/userRemoteConfigs/hudson.plugins.git.UserRemoteConfig/url",
|
|
33
|
+
"gitBranch": "*//scm/branches/hudson.plugins.git.BranchSpec/name",
|
|
34
|
+
"pipelineFile": "*//scriptPath",
|
|
35
|
+
},
|
|
36
|
+
"driver-matrix-tests": {
|
|
37
|
+
"gitRepo": "*//scm/userRemoteConfigs/hudson.plugins.git.UserRemoteConfig/url",
|
|
38
|
+
"gitBranch": "*//scm/branches/hudson.plugins.git.BranchSpec/name",
|
|
39
|
+
"pipelineFile": "*//scriptPath",
|
|
40
|
+
},
|
|
41
|
+
"sirenada": {
|
|
42
|
+
"gitRepo": "*//scm/userRemoteConfigs/hudson.plugins.git.UserRemoteConfig/url",
|
|
43
|
+
"gitBranch": "*//scm/branches/hudson.plugins.git.BranchSpec/name",
|
|
44
|
+
"pipelineFile": "*//scriptPath",
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
|
|
21
48
|
def __init__(self) -> None:
|
|
22
49
|
self._jenkins = jenkins.Jenkins(url=current_app.config["JENKINS_URL"],
|
|
23
50
|
username=current_app.config["JENKINS_USER"],
|
|
@@ -42,6 +69,153 @@ class JenkinsService:
|
|
|
42
69
|
|
|
43
70
|
return params
|
|
44
71
|
|
|
72
|
+
def get_releases_for_clone(self, test_id: str):
|
|
73
|
+
test_id = UUID(test_id)
|
|
74
|
+
# TODO: Filtering based on origin location / user preferences
|
|
75
|
+
_: ArgusTest = ArgusTest.get(id=test_id)
|
|
76
|
+
|
|
77
|
+
releases = list(ArgusRelease.all())
|
|
78
|
+
|
|
79
|
+
return sorted(releases, key=lambda r: r.pretty_name if r.pretty_name else r.name)
|
|
80
|
+
|
|
81
|
+
def get_groups_for_release(self, release_id: str):
|
|
82
|
+
groups = list(ArgusGroup.filter(release_id=release_id).all())
|
|
83
|
+
|
|
84
|
+
return sorted(groups, key=lambda g: g.pretty_name if g.pretty_name else g.name)
|
|
85
|
+
|
|
86
|
+
def _verify_sct_settings(self, new_settings: dict[str, str]) -> tuple[bool, str]:
|
|
87
|
+
if not (match := re.match(GITHUB_REPO_RE, new_settings["gitRepo"])):
|
|
88
|
+
return (False, "Repository doesn't conform to GitHub schema")
|
|
89
|
+
|
|
90
|
+
git_info = match.groupdict()
|
|
91
|
+
if git_info.get("ssh"):
|
|
92
|
+
repo = git_info["ssh_repo"]
|
|
93
|
+
user = git_info["ssh_user"]
|
|
94
|
+
else:
|
|
95
|
+
repo = git_info["repo"]
|
|
96
|
+
user = git_info["user"]
|
|
97
|
+
|
|
98
|
+
user_tokens = UserOauthToken.filter(user_id=g.user.id).all()
|
|
99
|
+
token = None
|
|
100
|
+
for tok in user_tokens:
|
|
101
|
+
if tok.kind == "github":
|
|
102
|
+
token = tok.token
|
|
103
|
+
break
|
|
104
|
+
if not token:
|
|
105
|
+
raise JenkinsServiceError("Github token not found")
|
|
106
|
+
|
|
107
|
+
response = requests.get(
|
|
108
|
+
url=f"https://api.github.com/repos/{user}/{repo}/contents/{new_settings['pipelineFile']}?ref={new_settings['gitBranch']}",
|
|
109
|
+
headers={
|
|
110
|
+
"Accept": "application/vnd.github+json",
|
|
111
|
+
"Authorization": f"Bearer {token}",
|
|
112
|
+
}
|
|
113
|
+
)
|
|
114
|
+
|
|
115
|
+
if response.status_code == 404:
|
|
116
|
+
return (False, f"Pipeline file not found in the <a href=\"https://github.com/{user}/{repo}/tree/{new_settings['gitBranch']}\"> target repository</a>, please check the repository before continuing")
|
|
117
|
+
|
|
118
|
+
if response.status_code == 403:
|
|
119
|
+
return (True, "No access to this repository using your token. The pipeline file cannot be verified.")
|
|
120
|
+
|
|
121
|
+
if response.status_code == 200:
|
|
122
|
+
return (True, "")
|
|
123
|
+
|
|
124
|
+
return (False, "Generic Error")
|
|
125
|
+
|
|
126
|
+
def verify_job_settings(self, build_id: str, new_settings: dict[str, str]) -> tuple[bool, str]:
|
|
127
|
+
PLUGIN_MAP = {
|
|
128
|
+
"scylla-cluster-tests": self._verify_sct_settings,
|
|
129
|
+
# for now they match
|
|
130
|
+
"sirenada": self._verify_sct_settings,
|
|
131
|
+
"driver-matrix-tests": self._verify_sct_settings,
|
|
132
|
+
}
|
|
133
|
+
test: ArgusTest = ArgusTest.get(build_system_id=build_id)
|
|
134
|
+
plugin_name = test.plugin_name
|
|
135
|
+
|
|
136
|
+
validated, message = PLUGIN_MAP.get(plugin_name, lambda _: (True, ""))(new_settings)
|
|
137
|
+
|
|
138
|
+
return {
|
|
139
|
+
"validated": validated,
|
|
140
|
+
"message": message,
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
def get_advanced_settings(self, build_id: str):
|
|
144
|
+
test: ArgusTest = ArgusTest.get(build_system_id=build_id)
|
|
145
|
+
plugin_name = test.plugin_name
|
|
146
|
+
|
|
147
|
+
if not (plugin_settings := self.SETTINGS_CONFIG_MAP.get(plugin_name)):
|
|
148
|
+
return {}
|
|
149
|
+
|
|
150
|
+
settings = {}
|
|
151
|
+
raw_config = self._jenkins.get_job_config(name=build_id)
|
|
152
|
+
config = ET.fromstring(raw_config)
|
|
153
|
+
|
|
154
|
+
for setting, xpath in plugin_settings.items():
|
|
155
|
+
value = config.find(xpath)
|
|
156
|
+
settings[setting] = value.text
|
|
157
|
+
|
|
158
|
+
return settings
|
|
159
|
+
|
|
160
|
+
def adjust_job_settings(self, build_id: str, plugin_name: str, settings: dict[str, str]):
|
|
161
|
+
xpath_map = self.SETTINGS_CONFIG_MAP.get(plugin_name)
|
|
162
|
+
if not xpath_map:
|
|
163
|
+
return
|
|
164
|
+
|
|
165
|
+
config = self._jenkins.get_job_config(name=build_id)
|
|
166
|
+
xml = ET.fromstring(config)
|
|
167
|
+
for setting, value in settings.items():
|
|
168
|
+
element = xml.find(xpath_map[setting])
|
|
169
|
+
element.text = value
|
|
170
|
+
|
|
171
|
+
adjusted_config = ET.tostring(xml, encoding="unicode")
|
|
172
|
+
self._jenkins.reconfig_job(name=build_id, config_xml=adjusted_config)
|
|
173
|
+
|
|
174
|
+
def clone_job(self, current_test_id: str, new_name: str, target: str, group: str, advanced_settings: bool | dict[str, str]):
|
|
175
|
+
cloned_test: ArgusTest = ArgusTest.get(id=current_test_id)
|
|
176
|
+
target_release: ArgusRelease = ArgusRelease.get(id=target)
|
|
177
|
+
target_group: ArgusGroup = ArgusGroup.get(id=group)
|
|
178
|
+
|
|
179
|
+
if target_group.id == cloned_test.id and new_name == cloned_test.name:
|
|
180
|
+
raise JenkinsServiceError("Unable to clone: source and destination are the same")
|
|
181
|
+
|
|
182
|
+
if not target_group.build_system_id:
|
|
183
|
+
raise JenkinsServiceError("Unable to clone: target group is missing jenkins folder path")
|
|
184
|
+
|
|
185
|
+
jenkins_new_build_id = f"{target_group.build_system_id}/{new_name}"
|
|
186
|
+
|
|
187
|
+
new_test = ArgusTest()
|
|
188
|
+
new_test.name = new_name
|
|
189
|
+
new_test.build_system_id = jenkins_new_build_id
|
|
190
|
+
new_test.group_id = target_group.id
|
|
191
|
+
new_test.release_id = target_release.id
|
|
192
|
+
new_test.plugin_name = cloned_test.plugin_name
|
|
193
|
+
|
|
194
|
+
old_config = self._jenkins.get_job_config(name=cloned_test.build_system_id)
|
|
195
|
+
LOGGER.info(old_config)
|
|
196
|
+
xml = ET.fromstring(old_config)
|
|
197
|
+
display_name = xml.find("displayName")
|
|
198
|
+
display_name.text = new_name
|
|
199
|
+
new_config = ET.tostring(xml, encoding="unicode")
|
|
200
|
+
self._jenkins.create_job(name=jenkins_new_build_id, config_xml=new_config)
|
|
201
|
+
new_job_info = self._jenkins.get_job_info(name=jenkins_new_build_id)
|
|
202
|
+
new_test.build_system_url = new_job_info["url"]
|
|
203
|
+
new_test.save()
|
|
204
|
+
|
|
205
|
+
if advanced_settings:
|
|
206
|
+
self.adjust_job_settings(build_id=jenkins_new_build_id, plugin_name=new_test.plugin_name, settings=advanced_settings)
|
|
207
|
+
|
|
208
|
+
return {
|
|
209
|
+
"new_job": new_job_info,
|
|
210
|
+
"new_entity": new_test,
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
def clone_build_job(self, build_id: str, params: dict[str, str]):
|
|
214
|
+
queue_item = self.build_job(build_id=build_id, params=params)
|
|
215
|
+
return {
|
|
216
|
+
"queueItem": queue_item,
|
|
217
|
+
}
|
|
218
|
+
|
|
45
219
|
def build_job(self, build_id: str, params: dict, user_override: str = None):
|
|
46
220
|
queue_number = self._jenkins.build_job(build_id, {
|
|
47
221
|
**params,
|
argus/backend/service/stats.py
CHANGED
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
from collections import defaultdict
|
|
2
|
+
from functools import reduce
|
|
1
3
|
import logging
|
|
2
4
|
|
|
3
5
|
from datetime import datetime
|
|
@@ -182,7 +184,7 @@ class ReleaseStats:
|
|
|
182
184
|
**aggregated_investigation_status
|
|
183
185
|
}
|
|
184
186
|
|
|
185
|
-
def collect(self, rows: list[TestRunStatRow], limited=False, force=False) -> None:
|
|
187
|
+
def collect(self, rows: list[TestRunStatRow], limited=False, force=False, dict: dict | None = None, tests=None) -> None:
|
|
186
188
|
self.forced_collection = force
|
|
187
189
|
if not self.release.enabled and not force:
|
|
188
190
|
return
|
|
@@ -193,10 +195,19 @@ class ReleaseStats:
|
|
|
193
195
|
).all())
|
|
194
196
|
|
|
195
197
|
self.rows = rows
|
|
198
|
+
self.dict = dict
|
|
196
199
|
if not limited or force:
|
|
197
|
-
self.issues =
|
|
198
|
-
|
|
199
|
-
|
|
200
|
+
self.issues = reduce(
|
|
201
|
+
lambda acc, row: acc[row["run_id"]].append(row) or acc,
|
|
202
|
+
ArgusGithubIssue.filter(release_id=self.release.id).all(),
|
|
203
|
+
defaultdict(list)
|
|
204
|
+
)
|
|
205
|
+
self.comments = reduce(
|
|
206
|
+
lambda acc, row: acc[row["test_run_id"]].append(row) or acc,
|
|
207
|
+
ArgusTestRunComment.filter(release_id=self.release.id).all(),
|
|
208
|
+
defaultdict(list)
|
|
209
|
+
)
|
|
210
|
+
self.all_tests = ArgusTest.filter(release_id=self.release.id).all() if not tests else tests
|
|
200
211
|
groups: list[ArgusGroup] = ArgusGroup.filter(release_id=self.release.id).all()
|
|
201
212
|
for group in groups:
|
|
202
213
|
if group.enabled:
|
|
@@ -292,13 +303,17 @@ class TestStats:
|
|
|
292
303
|
"hasBugReport": self.has_bug_report,
|
|
293
304
|
"hasComments": self.has_comments,
|
|
294
305
|
"buildNumber": self.tracked_run_number,
|
|
306
|
+
"buildId": self.test.build_system_id,
|
|
295
307
|
}
|
|
296
308
|
|
|
297
309
|
def collect(self, limited=False):
|
|
298
310
|
|
|
299
311
|
# TODO: Parametrize run limit
|
|
300
312
|
# FIXME: This is only a mitigation, build_number overflows on the build system side.
|
|
301
|
-
|
|
313
|
+
if not self.parent_group.parent_release.dict:
|
|
314
|
+
last_runs = [r for r in self.parent_group.parent_release.rows if r["build_id"] == self.test.build_system_id]
|
|
315
|
+
else:
|
|
316
|
+
last_runs = self.parent_group.parent_release.dict.get(self.test.build_system_id, [])
|
|
302
317
|
last_runs: list[TestRunStatRow] = sorted(
|
|
303
318
|
last_runs, reverse=True, key=lambda r: get_build_number(r["build_job_url"]))
|
|
304
319
|
try:
|
|
@@ -326,8 +341,8 @@ class TestStats:
|
|
|
326
341
|
"build_job_name": run["build_id"],
|
|
327
342
|
"start_time": run["start_time"],
|
|
328
343
|
"assignee": run["assignee"],
|
|
329
|
-
"issues": [dict(
|
|
330
|
-
"comments": [dict(
|
|
344
|
+
"issues": [dict(issue.items()) for issue in self.parent_group.parent_release.issues[run["id"]]],
|
|
345
|
+
"comments": [dict(comment.items()) for comment in self.parent_group.parent_release.comments[run["id"]]],
|
|
331
346
|
}
|
|
332
347
|
for run in last_runs
|
|
333
348
|
]
|
|
@@ -335,8 +350,8 @@ class TestStats:
|
|
|
335
350
|
target_run = next(run for run in self.last_runs if run["id"] == worst_case[1]["id"])
|
|
336
351
|
except StopIteration:
|
|
337
352
|
target_run = worst_case[1]
|
|
338
|
-
target_run["issues"] = [dict(
|
|
339
|
-
target_run["comments"] = [dict(
|
|
353
|
+
target_run["issues"] = [dict(issue.items()) for issue in self.parent_group.parent_release.issues[target_run["id"]]]
|
|
354
|
+
target_run["comments"] = [dict(comment.items()) for comment in self.parent_group.parent_release.comments[target_run["id"]]]
|
|
340
355
|
self.has_bug_report = len(target_run["issues"]) > 0
|
|
341
356
|
self.parent_group.parent_release.has_bug_report = self.has_bug_report or self.parent_group.parent_release.has_bug_report
|
|
342
357
|
self.has_comments = len(target_run["comments"]) > 0
|
|
@@ -356,13 +371,15 @@ class ReleaseStatsCollector:
|
|
|
356
371
|
|
|
357
372
|
def collect(self, limited=False, force=False, include_no_version=False) -> dict:
|
|
358
373
|
self.release: ArgusRelease = ArgusRelease.get(name=self.release_name)
|
|
359
|
-
|
|
360
|
-
|
|
374
|
+
all_tests = ArgusTest.filter(release_id=self.release.id).all()
|
|
375
|
+
build_ids = [t.build_system_id for t in all_tests]
|
|
376
|
+
self.release_rows = [futures for plugin in all_plugin_models()
|
|
377
|
+
for futures in plugin.get_stats_for_release(release=self.release, build_ids=build_ids)]
|
|
378
|
+
self.release_rows = [row for future in self.release_rows for row in future.result()]
|
|
361
379
|
if self.release.dormant and not force:
|
|
362
380
|
return {
|
|
363
381
|
"dormant": True
|
|
364
382
|
}
|
|
365
|
-
|
|
366
383
|
if self.release_version:
|
|
367
384
|
if include_no_version:
|
|
368
385
|
expr = lambda row: row["scylla_version"] == self.release_version or not row["scylla_version"]
|
|
@@ -376,7 +393,12 @@ class ReleaseStatsCollector:
|
|
|
376
393
|
else:
|
|
377
394
|
expr = lambda row: row["scylla_version"]
|
|
378
395
|
self.release_rows = list(filter(expr, self.release_rows))
|
|
396
|
+
self.release_dict = {}
|
|
397
|
+
for row in self.release_rows:
|
|
398
|
+
runs = self.release_dict.get(row["build_id"], [])
|
|
399
|
+
runs.append(row)
|
|
400
|
+
self.release_dict[row["build_id"]] = runs
|
|
379
401
|
|
|
380
402
|
self.release_stats = ReleaseStats(release=self.release)
|
|
381
|
-
self.release_stats.collect(rows=self.release_rows, limited=limited, force=force)
|
|
403
|
+
self.release_stats.collect(rows=self.release_rows, limited=limited, force=force, dict=self.release_dict, tests=all_tests)
|
|
382
404
|
return self.release_stats.to_dict()
|
argus/client/sct/client.py
CHANGED
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
import base64
|
|
2
|
+
from typing import Any
|
|
1
3
|
from uuid import UUID
|
|
2
4
|
from dataclasses import asdict
|
|
3
5
|
from argus.backend.plugins.sct.types import GeminiResultsRequest, PerformanceResultsRequest
|
|
@@ -15,6 +17,7 @@ class ArgusSCTClient(ArgusClient):
|
|
|
15
17
|
SUBMIT_SCREENSHOTS = "/sct/$id/screenshots/submit"
|
|
16
18
|
CREATE_RESOURCE = "/sct/$id/resource/create"
|
|
17
19
|
TERMINATE_RESOURCE = "/sct/$id/resource/$name/terminate"
|
|
20
|
+
UPDATE_RESOURCE = "/sct/$id/resource/$name/update"
|
|
18
21
|
SET_SCT_RUNNER = "/sct/$id/sct_runner/set"
|
|
19
22
|
UPDATE_SHARDS_FOR_RESOURCE = "/sct/$id/resource/$name/shards"
|
|
20
23
|
SUBMIT_NEMESIS = "/sct/$id/nemesis/submit"
|
|
@@ -22,6 +25,7 @@ class ArgusSCTClient(ArgusClient):
|
|
|
22
25
|
SUBMIT_PERFORMANCE_RESULTS = "/sct/$id/performance/submit"
|
|
23
26
|
FINALIZE_NEMESIS = "/sct/$id/nemesis/finalize"
|
|
24
27
|
SUBMIT_EVENTS = "/sct/$id/events/submit"
|
|
28
|
+
SUBMIT_JUNIT_REPORT = "/sct/$id/junit/submit"
|
|
25
29
|
|
|
26
30
|
def __init__(self, run_id: UUID, auth_token: str, base_url: str, api_version="v1") -> None:
|
|
27
31
|
super().__init__(auth_token, base_url, api_version)
|
|
@@ -204,6 +208,22 @@ class ArgusSCTClient(ArgusClient):
|
|
|
204
208
|
)
|
|
205
209
|
self.check_response(response)
|
|
206
210
|
|
|
211
|
+
|
|
212
|
+
def update_resource(self, name: str, update_data: dict[str, Any]) -> None:
|
|
213
|
+
"""
|
|
214
|
+
Update fields of the resource.
|
|
215
|
+
"""
|
|
216
|
+
response = self.post(
|
|
217
|
+
endpoint=self.Routes.UPDATE_RESOURCE,
|
|
218
|
+
location_params={"id": str(self.run_id), "name": name},
|
|
219
|
+
body={
|
|
220
|
+
**self.generic_body,
|
|
221
|
+
"update_data": update_data,
|
|
222
|
+
}
|
|
223
|
+
)
|
|
224
|
+
self.check_response(response)
|
|
225
|
+
|
|
226
|
+
|
|
207
227
|
def submit_nemesis(self, name: str, class_name: str, start_time: int,
|
|
208
228
|
target_name: str, target_ip: str, target_shards: int) -> None:
|
|
209
229
|
"""
|
|
@@ -265,3 +285,17 @@ class ArgusSCTClient(ArgusClient):
|
|
|
265
285
|
Updates a heartbeat field for an already existing test.
|
|
266
286
|
"""
|
|
267
287
|
super().heartbeat(run_type=self.test_type, run_id=self.run_id)
|
|
288
|
+
|
|
289
|
+
def sct_submit_junit_report(self, file_name: str, raw_content: str) -> None:
|
|
290
|
+
"""
|
|
291
|
+
Submits a JUnit-formatted XML report to argus
|
|
292
|
+
"""
|
|
293
|
+
response = self.post(
|
|
294
|
+
endpoint=self.Routes.SUBMIT_JUNIT_REPORT,
|
|
295
|
+
location_params={"id": str(self.run_id)},
|
|
296
|
+
body={
|
|
297
|
+
**self.generic_body,
|
|
298
|
+
"file_name": file_name,
|
|
299
|
+
"content": str(base64.encodebytes(bytes(raw_content, encoding="utf-8")), encoding="utf-8")
|
|
300
|
+
}
|
|
301
|
+
)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: argus-alm
|
|
3
|
-
Version: 0.12.
|
|
3
|
+
Version: 0.12.3
|
|
4
4
|
Summary: Argus
|
|
5
5
|
Home-page: https://github.com/scylladb/argus
|
|
6
6
|
License: Apache-2.0
|
|
@@ -11,6 +11,7 @@ Classifier: License :: OSI Approved :: Apache Software License
|
|
|
11
11
|
Classifier: Programming Language :: Python :: 3
|
|
12
12
|
Classifier: Programming Language :: Python :: 3.10
|
|
13
13
|
Classifier: Programming Language :: Python :: 3.11
|
|
14
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
14
15
|
Requires-Dist: click (>=8.1.3,<9.0.0)
|
|
15
16
|
Requires-Dist: requests (>=2.26.0,<3.0.0)
|
|
16
17
|
Project-URL: Repository, https://github.com/scylladb/argus
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
argus/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
2
2
|
argus/backend/.gitkeep,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
3
3
|
argus/backend/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
4
|
-
argus/backend/cli.py,sha256=
|
|
4
|
+
argus/backend/cli.py,sha256=fWSS1m0mhQeCwfH58Qfs4Cicxc95IKi9vwmQn3SUYs0,1346
|
|
5
5
|
argus/backend/controller/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
6
6
|
argus/backend/controller/admin.py,sha256=2z29RX7ZQO_VTklSKH9RrEj-Ag2SsvyOaIzWDKr0ahQ,575
|
|
7
7
|
argus/backend/controller/admin_api.py,sha256=9CkewYmnqKJpSKoXyKDMxuMziyAWQ1lRD3dMSh5R3PI,6249
|
|
@@ -13,43 +13,43 @@ argus/backend/controller/notification_api.py,sha256=wz7V4nE6Mxclpq78P8gNnCyeQ7xA
|
|
|
13
13
|
argus/backend/controller/notifications.py,sha256=zMSJln72BGU6Q_nQvJesMnuvJ57Ucbov4M2ZI-37Bxo,290
|
|
14
14
|
argus/backend/controller/team.py,sha256=G6LdIBaYgfG0Qr4RhNQ53MZVdh4wcuotsIIpFwhTJ3w,3101
|
|
15
15
|
argus/backend/controller/team_ui.py,sha256=B7N1_Kzl6Rac8BV3FbKj55pGAS_dht47rYhAi94PC8A,589
|
|
16
|
-
argus/backend/controller/testrun_api.py,sha256=
|
|
16
|
+
argus/backend/controller/testrun_api.py,sha256=5VWJnYlPuh3GQyDsKqE0b1zw7rFGlGTg16BHCsiYnB4,11883
|
|
17
17
|
argus/backend/db.py,sha256=bBiraYD05Qex28yZHjSP1bRlcMsc6oTYGt792zXmaHo,4101
|
|
18
18
|
argus/backend/error_handlers.py,sha256=IEjz7Vzfldv1PTOeHrpRWmRsgBrHtAW0PXHUJZDovAE,480
|
|
19
19
|
argus/backend/events/event_processors.py,sha256=bsmBayiXvlGn3aqiT2z9WgwnVBRtn2cRqkgn4pLodck,1291
|
|
20
20
|
argus/backend/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
21
21
|
argus/backend/models/web.py,sha256=dUWQQaw5TivSCOuGQqAxIH9yVraGXX7sayNBloIS3n8,12206
|
|
22
22
|
argus/backend/plugins/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
23
|
-
argus/backend/plugins/core.py,sha256=
|
|
23
|
+
argus/backend/plugins/core.py,sha256=fZG24qysFyOfjBPAdEIHb0UXIQYI3szJZvldqBTBIC0,7910
|
|
24
24
|
argus/backend/plugins/driver_matrix_tests/controller.py,sha256=9Q6QCripzM528SGsQnYupM6k7HW7hUzkNyjJFiXXfEw,739
|
|
25
|
-
argus/backend/plugins/driver_matrix_tests/model.py,sha256=
|
|
25
|
+
argus/backend/plugins/driver_matrix_tests/model.py,sha256=Qto0_TQec-ZDJzAsn28P3fEWfHloMtsNSRUS3bJ4ZJ0,6844
|
|
26
26
|
argus/backend/plugins/driver_matrix_tests/plugin.py,sha256=72ESU7s8C6ovVMfJTlYwtaokdvRp_HJF1_czm1UMhKg,745
|
|
27
27
|
argus/backend/plugins/driver_matrix_tests/raw_types.py,sha256=A108HCnv5q0RHfNRhUJrTpRy3fG7sPxr9Sk4gfsbooU,600
|
|
28
28
|
argus/backend/plugins/driver_matrix_tests/service.py,sha256=dxb8VGTJLIyVqVrZ4RtzCXgnmS2qg2RAGhcp_SARt9I,1737
|
|
29
29
|
argus/backend/plugins/driver_matrix_tests/udt.py,sha256=6lydzF3AQHm3GR5bKEDu1xLPYsLaBL3o-wu9NpabbqA,1134
|
|
30
|
-
argus/backend/plugins/generic/model.py,sha256=
|
|
30
|
+
argus/backend/plugins/generic/model.py,sha256=mX-IgBCxvWj_S_v_b2L4uitHGPR1apCz993uL4bnRD4,3118
|
|
31
31
|
argus/backend/plugins/generic/plugin.py,sha256=5URbQVUCizrk-KZqb6I0P_8nLUekjYh-Js7ZLKVoBAA,407
|
|
32
32
|
argus/backend/plugins/generic/types.py,sha256=jlZUcQ7r153ziyl3ZJmix7AzL2G1aX9N_z-4Kw9trWc,267
|
|
33
|
-
argus/backend/plugins/loader.py,sha256=
|
|
34
|
-
argus/backend/plugins/sct/controller.py,sha256=
|
|
35
|
-
argus/backend/plugins/sct/plugin.py,sha256=
|
|
33
|
+
argus/backend/plugins/loader.py,sha256=6PUrMjXKoCSDazMRkUHt8qxpniRhuqhY8Tof8lzeunk,1390
|
|
34
|
+
argus/backend/plugins/sct/controller.py,sha256=NF11JLoUJ13whghlxRrVex9rLMgFtlkczUAAKAM9vYg,5738
|
|
35
|
+
argus/backend/plugins/sct/plugin.py,sha256=_sOMcXLoFfeG9jwj_t48C4IFvY87juK8ApR6tfSw6q4,1007
|
|
36
36
|
argus/backend/plugins/sct/resource_setup.py,sha256=hwfAOu-oKOH42tjtzJhiqwq_MtUE9_HevoFyql8JKqY,10120
|
|
37
|
-
argus/backend/plugins/sct/service.py,sha256=
|
|
38
|
-
argus/backend/plugins/sct/testrun.py,sha256=
|
|
39
|
-
argus/backend/plugins/sct/types.py,sha256=
|
|
40
|
-
argus/backend/plugins/sct/udt.py,sha256=
|
|
41
|
-
argus/backend/plugins/sirenada/model.py,sha256=
|
|
37
|
+
argus/backend/plugins/sct/service.py,sha256=e7zuvSMvUSl8pos8VG17TK_1BrRM4Ld2QHzJwOdBTQY,21379
|
|
38
|
+
argus/backend/plugins/sct/testrun.py,sha256=NopC3Gnzf2m65S_E4DedAlgnbFvyaMQH5aLJip6ZPrE,9660
|
|
39
|
+
argus/backend/plugins/sct/types.py,sha256=Gw1y4iqYguqNqTh_GopLDFho8vuGaOGuK7fjaHYhAOQ,1326
|
|
40
|
+
argus/backend/plugins/sct/udt.py,sha256=V_x8_yw8rV7Q_QRBYayqtTNsPdZvjzOxWpRhXP1XAzs,3119
|
|
41
|
+
argus/backend/plugins/sirenada/model.py,sha256=KVnI75BacuBryc5lR_Aai-mEOs7CB9xxhb7J-YRU3bc,4705
|
|
42
42
|
argus/backend/plugins/sirenada/plugin.py,sha256=AlQAakwy3u-OqAqqK3RonUR5oDm-JoiwBUDUF3YEVP4,447
|
|
43
43
|
argus/backend/plugins/sirenada/types.py,sha256=Gm3XMK9YJoozVaeM9XE7n8iRxA6PKBrS23Mo2vJfdLs,697
|
|
44
44
|
argus/backend/service/admin.py,sha256=_VnWl3CkZBOAie_pPbd9sbXZUpBf2SApyNoFZLfB_QI,637
|
|
45
|
-
argus/backend/service/argus_service.py,sha256=
|
|
45
|
+
argus/backend/service/argus_service.py,sha256=t6pkjFOnIWoos_5TnrntwpzgNg6vQYVa_NgWHFIGcM8,24515
|
|
46
46
|
argus/backend/service/build_system_monitor.py,sha256=_Bxugk4CCh2z-I-i74QMPe7M8j0dmBMDri2dd3WGVew,7328
|
|
47
47
|
argus/backend/service/client_service.py,sha256=CS5esppd9s-SgUYE-HVLkfz-MrN8zxPouf9e4VlPV_M,2326
|
|
48
48
|
argus/backend/service/event_service.py,sha256=iYeqxN2QCYTjYB1WPPv4BEFLXG0Oz3TvskkaK4v9pVY,654
|
|
49
|
-
argus/backend/service/jenkins_service.py,sha256=
|
|
49
|
+
argus/backend/service/jenkins_service.py,sha256=gAEPGHm-HaazHHyVXK5d7uG24oe5t9XlnDjPre-qzWg,9656
|
|
50
50
|
argus/backend/service/notification_manager.py,sha256=h00Ej_-hH9H7pq0wah_1TH8dnpPyPNsgVJNO1rwJi7o,7011
|
|
51
51
|
argus/backend/service/release_manager.py,sha256=DymV5OI53ClRdc4p7jlCGT6xPqrjIxHjs8EDiyWffqQ,7362
|
|
52
|
-
argus/backend/service/stats.py,sha256=
|
|
52
|
+
argus/backend/service/stats.py,sha256=Vdm24PtMxFQFTaP1dDJZ9nsrcSLYrTIFi8Ju_-zuSrY,16308
|
|
53
53
|
argus/backend/service/team_manager_service.py,sha256=zY5dvy3ffvQbJuXBvlWKE5dS5LQ3ss6tkFE-cwFZsdw,3010
|
|
54
54
|
argus/backend/service/testrun.py,sha256=ggjTcWjyvsPXSSv_v80rrp3bHaqCOjcxYEewXrW9AXg,19706
|
|
55
55
|
argus/backend/service/user.py,sha256=N3t43rgKMnSsPXU5R9bigEEGbPjYrc6MsJtof3z7kDE,9027
|
|
@@ -66,7 +66,7 @@ argus/client/base.py,sha256=-R-BINTolY06lUQLOLGlsWzza4fBdtLBW-4V3NT64vg,6755
|
|
|
66
66
|
argus/client/driver_matrix_tests/client.py,sha256=ekuTkEZbcVqhg6DAC6PvGuC9bXuu4DSOjk96vcLzmuQ,7817
|
|
67
67
|
argus/client/generic/cli.py,sha256=IJkgEZ5VOAeqp5SlLM13Y5m8e34Cqnyz8WkfeKoN7so,2208
|
|
68
68
|
argus/client/generic/client.py,sha256=l4PDjDy65Mm2OI9ZLSnyd8_2i4Ei1Pp9yRt3bRX8s2Y,1114
|
|
69
|
-
argus/client/sct/client.py,sha256=
|
|
69
|
+
argus/client/sct/client.py,sha256=DtRA0Ra3ycUcedDYfZZW1jER0nc8vdYHaY6DT0te4x0,11341
|
|
70
70
|
argus/client/sct/types.py,sha256=VLgVe7qPmJtCLqtPnuX8N8kMKZq-iY3SKz68nvU6nJ4,371
|
|
71
71
|
argus/client/sirenada/client.py,sha256=ilcyLXJb-0gKbmb9WSPr-Yvldh73joGBhRDoilQoSJ4,6220
|
|
72
72
|
argus/db/.gitkeep,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -77,8 +77,8 @@ argus/db/db_types.py,sha256=iLbmrUaDzrBw0kDCnvW0FSZ9-kNc3uQY-fsbIPymV4E,3612
|
|
|
77
77
|
argus/db/interface.py,sha256=HroyA1Yijz5cXLdYbxorHCEu0GH9VeMMqB36IHTlcew,17146
|
|
78
78
|
argus/db/testrun.py,sha256=0YG7FIH5FLQeNlYULxC6rhhyru2rziSMe3qKtYzTBnc,26014
|
|
79
79
|
argus/db/utils.py,sha256=YAWsuLjUScSgKgdaL5aF4Sgr13gqH29Mb5cLctX4V_w,337
|
|
80
|
-
argus_alm-0.12.
|
|
81
|
-
argus_alm-0.12.
|
|
82
|
-
argus_alm-0.12.
|
|
83
|
-
argus_alm-0.12.
|
|
84
|
-
argus_alm-0.12.
|
|
80
|
+
argus_alm-0.12.3.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
|
81
|
+
argus_alm-0.12.3.dist-info/METADATA,sha256=Y7opbIMG2m_jRxQyZkmPePpz5BxDIokPbgSxf2AOH7M,6913
|
|
82
|
+
argus_alm-0.12.3.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
|
|
83
|
+
argus_alm-0.12.3.dist-info/entry_points.txt,sha256=zEqrAK95P8AAhKbwO4lgrQzKBWqCzHH9zlUPCaCVHoQ,69
|
|
84
|
+
argus_alm-0.12.3.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|