argus-alm 0.12.4b2__tar.gz → 0.12.7__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/PKG-INFO +1 -1
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/controller/admin_api.py +67 -2
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/controller/api.py +14 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/controller/auth.py +15 -13
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/controller/client_api.py +20 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/controller/main.py +1 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/controller/testrun_api.py +11 -0
- argus_alm-0.12.7/argus/backend/models/result.py +63 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/models/web.py +4 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/plugins/core.py +5 -3
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/plugins/sct/testrun.py +9 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/service/argus_service.py +15 -5
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/service/build_system_monitor.py +3 -3
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/service/client_service.py +30 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/service/jenkins_service.py +3 -1
- argus_alm-0.12.7/argus/backend/service/results_service.py +140 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/service/testrun.py +25 -1
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/service/user.py +61 -4
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/service/views.py +3 -3
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/util/config.py +3 -1
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/util/encoders.py +17 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/client/base.py +34 -1
- argus_alm-0.12.7/argus/client/generic_result.py +99 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/pyproject.toml +8 -8
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/LICENSE +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/README.md +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/__init__.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/.gitkeep +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/__init__.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/cli.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/controller/__init__.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/controller/admin.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/controller/notification_api.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/controller/notifications.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/controller/team.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/controller/team_ui.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/controller/view_api.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/db.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/error_handlers.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/events/event_processors.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/models/__init__.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/plugins/__init__.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/plugins/driver_matrix_tests/controller.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/plugins/driver_matrix_tests/model.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/plugins/driver_matrix_tests/plugin.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/plugins/driver_matrix_tests/raw_types.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/plugins/driver_matrix_tests/service.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/plugins/driver_matrix_tests/udt.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/plugins/generic/model.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/plugins/generic/plugin.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/plugins/generic/types.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/plugins/loader.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/plugins/sct/controller.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/plugins/sct/plugin.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/plugins/sct/resource_setup.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/plugins/sct/service.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/plugins/sct/types.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/plugins/sct/udt.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/plugins/sirenada/model.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/plugins/sirenada/plugin.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/plugins/sirenada/types.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/service/admin.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/service/event_service.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/service/notification_manager.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/service/release_manager.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/service/stats.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/service/team_manager_service.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/template_filters.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/util/common.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/util/enums.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/util/logsetup.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/util/module_loaders.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/backend/util/send_email.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/client/__init__.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/client/driver_matrix_tests/cli.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/client/driver_matrix_tests/client.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/client/generic/cli.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/client/generic/client.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/client/sct/client.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/client/sct/types.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/client/sirenada/client.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/db/.gitkeep +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/db/argus_json.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/db/cloud_types.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/db/config.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/db/db_types.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/db/interface.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/db/testrun.py +0 -0
- {argus_alm-0.12.4b2 → argus_alm-0.12.7}/argus/db/utils.py +0 -0
|
@@ -7,8 +7,8 @@ from flask import (
|
|
|
7
7
|
)
|
|
8
8
|
from argus.backend.error_handlers import handle_api_exception
|
|
9
9
|
from argus.backend.service.release_manager import ReleaseEditPayload, ReleaseManagerService
|
|
10
|
-
from argus.backend.service.user import api_login_required, check_roles
|
|
11
|
-
from argus.backend.models.web import UserRoles
|
|
10
|
+
from argus.backend.service.user import UserService, api_login_required, check_roles
|
|
11
|
+
from argus.backend.models.web import User, UserRoles
|
|
12
12
|
|
|
13
13
|
bp = Blueprint('admin_api', __name__, url_prefix='/api/v1')
|
|
14
14
|
LOGGER = logging.getLogger(__name__)
|
|
@@ -287,3 +287,68 @@ def quick_toggle_group_enabled():
|
|
|
287
287
|
"status": "ok",
|
|
288
288
|
"response": res
|
|
289
289
|
}
|
|
290
|
+
|
|
291
|
+
|
|
292
|
+
@bp.route("/users", methods=["GET"])
|
|
293
|
+
@check_roles(UserRoles.Admin)
|
|
294
|
+
@api_login_required
|
|
295
|
+
def user_info():
|
|
296
|
+
result = UserService().get_users_privileged()
|
|
297
|
+
|
|
298
|
+
return {
|
|
299
|
+
"status": "ok",
|
|
300
|
+
"response": result
|
|
301
|
+
}
|
|
302
|
+
|
|
303
|
+
|
|
304
|
+
@bp.route("/user/<string:user_id>/email/set", methods=["POST"])
|
|
305
|
+
@check_roles(UserRoles.Admin)
|
|
306
|
+
@api_login_required
|
|
307
|
+
def user_change_email(user_id: str):
|
|
308
|
+
payload = get_payload(request)
|
|
309
|
+
|
|
310
|
+
user = User.get(id=user_id)
|
|
311
|
+
result = UserService().update_email(user=user, new_email=payload["newEmail"])
|
|
312
|
+
|
|
313
|
+
return {
|
|
314
|
+
"status": "ok",
|
|
315
|
+
"response": result
|
|
316
|
+
}
|
|
317
|
+
|
|
318
|
+
|
|
319
|
+
@bp.route("/user/<string:user_id>/delete", methods=["POST"])
|
|
320
|
+
@check_roles(UserRoles.Admin)
|
|
321
|
+
@api_login_required
|
|
322
|
+
def user_delete(user_id: str):
|
|
323
|
+
result = UserService().delete_user(user_id=user_id)
|
|
324
|
+
|
|
325
|
+
return {
|
|
326
|
+
"status": "ok",
|
|
327
|
+
"response": result
|
|
328
|
+
}
|
|
329
|
+
|
|
330
|
+
|
|
331
|
+
@bp.route("/user/<string:user_id>/password/set", methods=["POST"])
|
|
332
|
+
@check_roles(UserRoles.Admin)
|
|
333
|
+
@api_login_required
|
|
334
|
+
def user_change_password(user_id: str):
|
|
335
|
+
payload = get_payload(request)
|
|
336
|
+
|
|
337
|
+
user = User.get(id=user_id)
|
|
338
|
+
result = UserService().update_password(user=user, old_password="", new_password=payload["newPassword"], force=True)
|
|
339
|
+
|
|
340
|
+
return {
|
|
341
|
+
"status": "ok",
|
|
342
|
+
"response": result
|
|
343
|
+
}
|
|
344
|
+
|
|
345
|
+
@bp.route("/user/<string:user_id>/admin/toggle", methods=["POST"])
|
|
346
|
+
@check_roles(UserRoles.Admin)
|
|
347
|
+
@api_login_required
|
|
348
|
+
def user_toggle_admin(user_id: str):
|
|
349
|
+
result = UserService().toggle_admin(user_id=user_id)
|
|
350
|
+
|
|
351
|
+
return {
|
|
352
|
+
"status": "ok",
|
|
353
|
+
"response": result
|
|
354
|
+
}
|
|
@@ -14,6 +14,7 @@ from argus.backend.controller.testrun_api import bp as testrun_bp
|
|
|
14
14
|
from argus.backend.controller.team import bp as team_bp
|
|
15
15
|
from argus.backend.controller.view_api import bp as view_bp
|
|
16
16
|
from argus.backend.service.argus_service import ArgusService, ScheduleUpdateRequest
|
|
17
|
+
from argus.backend.service.results_service import ResultsService
|
|
17
18
|
from argus.backend.service.user import UserService, api_login_required
|
|
18
19
|
from argus.backend.service.stats import ReleaseStatsCollector
|
|
19
20
|
from argus.backend.models.web import ArgusRelease, ArgusGroup, ArgusTest, User, UserOauthToken
|
|
@@ -381,6 +382,19 @@ def test_info():
|
|
|
381
382
|
"response": info
|
|
382
383
|
}
|
|
383
384
|
|
|
385
|
+
@bp.route("/test-results", methods=["GET"])
|
|
386
|
+
@api_login_required
|
|
387
|
+
def test_results():
|
|
388
|
+
test_id = request.args.get("testId")
|
|
389
|
+
if not test_id:
|
|
390
|
+
raise Exception("No testId provided")
|
|
391
|
+
service = ResultsService()
|
|
392
|
+
info = service.get_results(test_id=UUID(test_id))
|
|
393
|
+
|
|
394
|
+
return {
|
|
395
|
+
"status": "ok",
|
|
396
|
+
"response": info
|
|
397
|
+
}
|
|
384
398
|
|
|
385
399
|
@bp.route("/test_run/comment/get", methods=["GET"]) # TODO: remove
|
|
386
400
|
@api_login_required
|
|
@@ -5,7 +5,7 @@ from flask import (
|
|
|
5
5
|
)
|
|
6
6
|
from werkzeug.security import check_password_hash
|
|
7
7
|
from argus.backend.models.web import User
|
|
8
|
-
from argus.backend.service.user import UserService, load_logged_in_user, login_required
|
|
8
|
+
from argus.backend.service.user import UserService, UserServiceException, load_logged_in_user, login_required
|
|
9
9
|
|
|
10
10
|
bp = Blueprint('auth', __name__, url_prefix='/auth')
|
|
11
11
|
|
|
@@ -21,24 +21,26 @@ def login():
|
|
|
21
21
|
session["csrf_token"] = token
|
|
22
22
|
|
|
23
23
|
if request.method == 'POST':
|
|
24
|
-
username = request.form["username"]
|
|
25
|
-
password = request.form["password"]
|
|
26
|
-
error = None
|
|
27
24
|
try:
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
25
|
+
if "password" not in current_app.config.get("LOGIN_METHODS", []):
|
|
26
|
+
raise UserServiceException("Password Login is disabled")
|
|
27
|
+
username = request.form["username"]
|
|
28
|
+
password = request.form["password"]
|
|
31
29
|
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
30
|
+
try:
|
|
31
|
+
user: User = User.get(username=username)
|
|
32
|
+
except User.DoesNotExist:
|
|
33
|
+
raise UserServiceException("User not found")
|
|
34
|
+
|
|
35
|
+
if not check_password_hash(user.password, password):
|
|
36
|
+
raise UserServiceException("Incorrect Password")
|
|
36
37
|
|
|
37
|
-
if not error:
|
|
38
38
|
session.clear()
|
|
39
39
|
session["user_id"] = str(user.id)
|
|
40
40
|
session["csrf_token"] = token
|
|
41
|
-
|
|
41
|
+
except UserServiceException as exc:
|
|
42
|
+
flash(next(iter(exc.args), "No message"), category="error")
|
|
43
|
+
|
|
42
44
|
return redirect(url_for('main.home'))
|
|
43
45
|
|
|
44
46
|
return render_template('auth/login.html.j2',
|
|
@@ -23,6 +23,15 @@ def submit_run(run_type: str):
|
|
|
23
23
|
"response": result
|
|
24
24
|
}
|
|
25
25
|
|
|
26
|
+
@bp.route("/testrun/<string:run_type>/<string:run_id>/get", methods=["GET"])
|
|
27
|
+
@api_login_required
|
|
28
|
+
def get_run(run_type: str, run_id: str):
|
|
29
|
+
result = ClientService().get_run(run_type=run_type, run_id=run_id)
|
|
30
|
+
return {
|
|
31
|
+
"status": "ok",
|
|
32
|
+
"response": result
|
|
33
|
+
}
|
|
34
|
+
|
|
26
35
|
|
|
27
36
|
@bp.route("/testrun/<string:run_type>/<string:run_id>/heartbeat", methods=["POST"])
|
|
28
37
|
@api_login_required
|
|
@@ -90,3 +99,14 @@ def run_finalize(run_type: str, run_id: str):
|
|
|
90
99
|
"status": "ok",
|
|
91
100
|
"response": result
|
|
92
101
|
}
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
@bp.route("/testrun/<string:run_type>/<string:run_id>/submit_results", methods=["POST"])
|
|
105
|
+
@api_login_required
|
|
106
|
+
def submit_results(run_type: str, run_id: str):
|
|
107
|
+
payload = get_payload(request)
|
|
108
|
+
result = ClientService().submit_results(run_type=run_type, run_id=run_id, results=payload)
|
|
109
|
+
return {
|
|
110
|
+
"status": "ok",
|
|
111
|
+
"response": result
|
|
112
|
+
}
|
|
@@ -152,6 +152,7 @@ def profile_oauth_github_callback():
|
|
|
152
152
|
try:
|
|
153
153
|
first_run_info = service.github_callback(req_code)
|
|
154
154
|
except Exception as exc: # pylint: disable=broad-except
|
|
155
|
+
LOGGER.error("An error occured in callback", exc_info=True)
|
|
155
156
|
flash(message=exc.args[0], category="error")
|
|
156
157
|
return redirect(url_for("main.error", type=403))
|
|
157
158
|
if first_run_info:
|
|
@@ -63,6 +63,17 @@ def test_run_activity(run_id: str):
|
|
|
63
63
|
}
|
|
64
64
|
|
|
65
65
|
|
|
66
|
+
|
|
67
|
+
@bp.route("/run/<string:test_id>/<string:run_id>/fetch_results", methods=["GET"])
|
|
68
|
+
@api_login_required
|
|
69
|
+
def fetch_results(test_id: str, run_id: str):
|
|
70
|
+
tables = TestRunService().fetch_results(test_id=UUID(test_id), run_id=UUID(run_id))
|
|
71
|
+
return {
|
|
72
|
+
"status": "ok",
|
|
73
|
+
"tables": tables
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
|
|
66
77
|
@bp.route("/test/<string:test_id>/run/<string:run_id>/status/set", methods=["POST"])
|
|
67
78
|
@api_login_required
|
|
68
79
|
def set_testrun_status(test_id: str, run_id: str):
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
from cassandra.cqlengine import columns
|
|
2
|
+
from cassandra.cqlengine.models import Model
|
|
3
|
+
from cassandra.cqlengine.usertype import UserType
|
|
4
|
+
from enum import Enum
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class Status(Enum):
|
|
8
|
+
PASS = 0
|
|
9
|
+
WARNING = 1
|
|
10
|
+
ERROR = 2
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class ColumnMetadata(UserType):
|
|
14
|
+
name = columns.Ascii()
|
|
15
|
+
unit = columns.Text()
|
|
16
|
+
type = columns.Ascii()
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class ArgusGenericResultMetadata(Model):
|
|
20
|
+
__table_name__ = "generic_result_metadata_v1"
|
|
21
|
+
test_id = columns.UUID(partition_key=True)
|
|
22
|
+
name = columns.Text(required=True, primary_key=True)
|
|
23
|
+
description = columns.Text()
|
|
24
|
+
columns_meta = columns.List(value_type=columns.UserDefinedType(ColumnMetadata))
|
|
25
|
+
rows_meta = columns.List(value_type=columns.Ascii())
|
|
26
|
+
|
|
27
|
+
def __init__(self, **kwargs):
|
|
28
|
+
kwargs["columns_meta"] = [ColumnMetadata(**col) for col in kwargs.pop('columns_meta', [])]
|
|
29
|
+
super().__init__(**kwargs)
|
|
30
|
+
|
|
31
|
+
def update_if_changed(self, new_data: dict) -> None:
|
|
32
|
+
"""
|
|
33
|
+
Updates table metadata if changed column/description or new rows were added.
|
|
34
|
+
See that rows can only be added, not removed once was sent.
|
|
35
|
+
Columns may be removed, but data in results table persists.
|
|
36
|
+
"""
|
|
37
|
+
updated = False
|
|
38
|
+
for field, value in new_data.items():
|
|
39
|
+
if field == "columns_meta":
|
|
40
|
+
value = [ColumnMetadata(**col) for col in value]
|
|
41
|
+
elif field == "rows_meta":
|
|
42
|
+
added_rows = []
|
|
43
|
+
for row in value:
|
|
44
|
+
if row not in self.rows_meta:
|
|
45
|
+
added_rows.append(row)
|
|
46
|
+
value = self.rows_meta + added_rows
|
|
47
|
+
if getattr(self, field) != value:
|
|
48
|
+
setattr(self, field, value)
|
|
49
|
+
updated = True
|
|
50
|
+
|
|
51
|
+
if updated:
|
|
52
|
+
self.save()
|
|
53
|
+
|
|
54
|
+
class ArgusGenericResultData(Model):
|
|
55
|
+
__table_name__ = "generic_result_data_v1"
|
|
56
|
+
test_id = columns.UUID(partition_key=True)
|
|
57
|
+
name = columns.Text(partition_key=True)
|
|
58
|
+
run_id = columns.UUID(primary_key=True)
|
|
59
|
+
column = columns.Ascii(primary_key=True, index=True)
|
|
60
|
+
row = columns.Ascii(primary_key=True, index=True)
|
|
61
|
+
sut_timestamp = columns.DateTime() # for sorting
|
|
62
|
+
value = columns.Double()
|
|
63
|
+
status = columns.Ascii()
|
|
@@ -6,6 +6,8 @@ from cassandra.cqlengine.usertype import UserType
|
|
|
6
6
|
from cassandra.cqlengine import columns
|
|
7
7
|
from cassandra.util import uuid_from_time, unix_time_from_uuid1 # pylint: disable=no-name-in-module
|
|
8
8
|
|
|
9
|
+
from argus.backend.models.result import ArgusGenericResultMetadata, ArgusGenericResultData
|
|
10
|
+
|
|
9
11
|
|
|
10
12
|
def uuid_now():
|
|
11
13
|
return uuid_from_time(datetime.utcnow())
|
|
@@ -377,6 +379,8 @@ USED_MODELS: list[Model] = [
|
|
|
377
379
|
ArgusScheduleAssignee,
|
|
378
380
|
ArgusScheduleGroup,
|
|
379
381
|
ArgusScheduleTest,
|
|
382
|
+
ArgusGenericResultMetadata,
|
|
383
|
+
ArgusGenericResultData,
|
|
380
384
|
]
|
|
381
385
|
|
|
382
386
|
USED_TYPES: list[UserType] = [
|
|
@@ -105,16 +105,16 @@ class PluginModelBase(Model):
|
|
|
105
105
|
assignees = ArgusScheduleAssignee.filter(
|
|
106
106
|
schedule_id=schedule.id
|
|
107
107
|
).all()
|
|
108
|
-
assignees_uuids.
|
|
108
|
+
assignees_uuids.extend([assignee.assignee for assignee in assignees])
|
|
109
109
|
|
|
110
110
|
return assignees_uuids[0] if len(assignees_uuids) > 0 else None
|
|
111
111
|
|
|
112
112
|
@classmethod
|
|
113
|
-
def get_jobs_assigned_to_user(cls,
|
|
113
|
+
def get_jobs_assigned_to_user(cls, user_id: str | UUID):
|
|
114
114
|
cluster = ScyllaCluster.get()
|
|
115
115
|
query = cluster.prepare("SELECT build_id, start_time, release_id, group_id, assignee, "
|
|
116
116
|
f"test_id, id, status, investigation_status, build_job_url, scylla_version FROM {cls.table_name()} WHERE assignee = ?")
|
|
117
|
-
rows = cluster.session.execute(query=query, parameters=(
|
|
117
|
+
rows = cluster.session.execute(query=query, parameters=(user_id,))
|
|
118
118
|
|
|
119
119
|
return list(rows)
|
|
120
120
|
|
|
@@ -213,6 +213,8 @@ class PluginModelBase(Model):
|
|
|
213
213
|
def finish_run(self, payload: dict = None):
|
|
214
214
|
raise NotImplementedError()
|
|
215
215
|
|
|
216
|
+
def sut_timestamp(self) -> float:
|
|
217
|
+
raise NotImplementedError()
|
|
216
218
|
|
|
217
219
|
class PluginInfoBase:
|
|
218
220
|
# pylint: disable=too-few-public-methods
|
|
@@ -109,6 +109,7 @@ class SCTTestRun(PluginModelBase):
|
|
|
109
109
|
stress_cmd = columns.Text()
|
|
110
110
|
|
|
111
111
|
histograms = columns.List(value_type=columns.Map(key_type=columns.Text(), value_type=columns.UserDefinedType(user_type=PerformanceHDRHistogram)))
|
|
112
|
+
test_method = columns.Ascii()
|
|
112
113
|
|
|
113
114
|
@classmethod
|
|
114
115
|
def _stats_query(cls) -> str:
|
|
@@ -199,6 +200,7 @@ class SCTTestRun(PluginModelBase):
|
|
|
199
200
|
|
|
200
201
|
run.config_files = req.sct_config.get("config_files")
|
|
201
202
|
run.region_name = regions
|
|
203
|
+
run.test_method = req.sct_config.get("test_method")
|
|
202
204
|
run.save()
|
|
203
205
|
|
|
204
206
|
return run
|
|
@@ -249,6 +251,13 @@ class SCTTestRun(PluginModelBase):
|
|
|
249
251
|
|
|
250
252
|
self._collect_event_message(event, event_message)
|
|
251
253
|
|
|
254
|
+
def sut_timestamp(self) -> float:
|
|
255
|
+
"""converts scylla-server date to timestamp and adds revision in subseconds precision to diffirentiate
|
|
256
|
+
scylla versions from the same day. It's not perfect, but we don't know exact version time."""
|
|
257
|
+
scylla_package = [package for package in self.packages if package.name == "scylla-server"][0]
|
|
258
|
+
return (datetime.strptime(scylla_package.date, '%Y%m%d').timestamp()
|
|
259
|
+
+ int(scylla_package.revision_id, 16) % 1000000 / 1000000)
|
|
260
|
+
|
|
252
261
|
|
|
253
262
|
class SCTJunitReports(Model):
|
|
254
263
|
test_id = columns.UUID(primary_key=True, partition_key=True, required=True)
|
|
@@ -459,8 +459,13 @@ class ArgusService:
|
|
|
459
459
|
self.update_schedule_comment({"newComment": comment, "releaseId": test.release_id, "groupId": test.group_id, "testId": test.id})
|
|
460
460
|
|
|
461
461
|
schedule_assignee: ArgusScheduleAssignee = ArgusScheduleAssignee.get(schedule_id=schedule_id)
|
|
462
|
-
|
|
463
|
-
|
|
462
|
+
new_assignee = ArgusScheduleAssignee()
|
|
463
|
+
new_assignee.assignee = assignee
|
|
464
|
+
new_assignee.release_id = schedule_assignee.release_id
|
|
465
|
+
new_assignee.schedule_id = schedule_assignee.schedule_id
|
|
466
|
+
new_assignee.save()
|
|
467
|
+
schedule_assignee.delete()
|
|
468
|
+
|
|
464
469
|
return True
|
|
465
470
|
|
|
466
471
|
def delete_schedule(self, payload: dict) -> dict:
|
|
@@ -493,11 +498,16 @@ class ArgusService:
|
|
|
493
498
|
full_schedule["assignees"] = [assignee.assignee for assignee in assignees]
|
|
494
499
|
|
|
495
500
|
if len(assignees) > 0:
|
|
496
|
-
|
|
501
|
+
try:
|
|
502
|
+
schedule_user = User.get(id=assignees[0].assignee)
|
|
503
|
+
except User.DoesNotExist:
|
|
504
|
+
schedule_user = User()
|
|
505
|
+
schedule_user.id = assignees[0].assignee
|
|
506
|
+
LOGGER.warning("Deleting orphaned user assignments")
|
|
497
507
|
service = TestRunService()
|
|
498
508
|
|
|
499
509
|
for model in all_plugin_models():
|
|
500
|
-
for run in model.get_jobs_assigned_to_user(schedule_user):
|
|
510
|
+
for run in model.get_jobs_assigned_to_user(schedule_user.id):
|
|
501
511
|
if run["release_id"] != release.id:
|
|
502
512
|
continue
|
|
503
513
|
if run["test_id"] not in full_schedule["tests"]:
|
|
@@ -644,7 +654,7 @@ class ArgusService:
|
|
|
644
654
|
today = datetime.datetime.now()
|
|
645
655
|
validity_period = today - datetime.timedelta(days=current_app.config.get("JOB_VALIDITY_PERIOD_DAYS", 30))
|
|
646
656
|
for plugin in all_plugin_models():
|
|
647
|
-
for run in plugin.get_jobs_assigned_to_user(
|
|
657
|
+
for run in plugin.get_jobs_assigned_to_user(user_id=user.id):
|
|
648
658
|
if run["start_time"] >= validity_period:
|
|
649
659
|
yield run
|
|
650
660
|
|
|
@@ -20,9 +20,9 @@ class ArgusTestsMonitor(ABC):
|
|
|
20
20
|
|
|
21
21
|
def __init__(self) -> None:
|
|
22
22
|
self._cluster = ScyllaCluster.get()
|
|
23
|
-
self._existing_releases = list(ArgusRelease.
|
|
24
|
-
self._existing_groups = list(ArgusGroup.
|
|
25
|
-
self._existing_tests = list(ArgusTest.
|
|
23
|
+
self._existing_releases = list(ArgusRelease.objects().limit(None))
|
|
24
|
+
self._existing_groups = list(ArgusGroup.objects().limit(None))
|
|
25
|
+
self._existing_tests = list(ArgusTest.objects().limit(None))
|
|
26
26
|
self._filtered_groups: list[str] = self.BUILD_SYSTEM_FILTERED_PREFIXES
|
|
27
27
|
|
|
28
28
|
def create_release(self, release_name):
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
from uuid import UUID
|
|
2
2
|
from argus.backend.db import ScyllaCluster
|
|
3
|
+
from argus.backend.models.result import ArgusGenericResultMetadata, ArgusGenericResultData
|
|
3
4
|
from argus.backend.plugins.core import PluginModelBase
|
|
4
5
|
from argus.backend.plugins.loader import AVAILABLE_PLUGINS
|
|
5
6
|
from argus.backend.util.enums import TestStatus
|
|
@@ -25,6 +26,14 @@ class ClientService:
|
|
|
25
26
|
model = self.get_model(run_type)
|
|
26
27
|
model.submit_run(request_data=request_data)
|
|
27
28
|
return "Created"
|
|
29
|
+
|
|
30
|
+
def get_run(self, run_type: str, run_id: str):
|
|
31
|
+
model = self.get_model(run_type)
|
|
32
|
+
try:
|
|
33
|
+
run = model.get(id=run_id)
|
|
34
|
+
except model.DoesNotExist:
|
|
35
|
+
return None
|
|
36
|
+
return run
|
|
28
37
|
|
|
29
38
|
def heartbeat(self, run_type: str, run_id: str) -> int:
|
|
30
39
|
model = self.get_model(run_type)
|
|
@@ -69,3 +78,24 @@ class ClientService:
|
|
|
69
78
|
run.save()
|
|
70
79
|
|
|
71
80
|
return "Finalized"
|
|
81
|
+
|
|
82
|
+
def submit_results(self, run_type: str, run_id: str, results: dict) -> str:
|
|
83
|
+
model = self.get_model(run_type)
|
|
84
|
+
run = model.load_test_run(UUID(run_id))
|
|
85
|
+
existing_table = ArgusGenericResultMetadata.objects(test_id=run.test_id, name=results["meta"]["name"]).first()
|
|
86
|
+
if existing_table:
|
|
87
|
+
existing_table.update_if_changed(results["meta"])
|
|
88
|
+
else:
|
|
89
|
+
ArgusGenericResultMetadata(test_id=run.test_id, **results["meta"]).save()
|
|
90
|
+
if results.get("sut_timestamp", 0) == 0:
|
|
91
|
+
results["sut_timestamp"] = run.sut_timestamp() # automatic sut_timestamp
|
|
92
|
+
table_name = results["meta"]["name"]
|
|
93
|
+
sut_timestamp = results["sut_timestamp"]
|
|
94
|
+
for cell in results["results"]:
|
|
95
|
+
ArgusGenericResultData(test_id=run.test_id,
|
|
96
|
+
run_id=run.id,
|
|
97
|
+
name=table_name,
|
|
98
|
+
sut_timestamp=sut_timestamp,
|
|
99
|
+
**cell
|
|
100
|
+
).save()
|
|
101
|
+
return "Submitted"
|
|
@@ -220,7 +220,9 @@ class JenkinsService:
|
|
|
220
220
|
def build_job(self, build_id: str, params: dict, user_override: str = None):
|
|
221
221
|
queue_number = self._jenkins.build_job(build_id, {
|
|
222
222
|
**params,
|
|
223
|
-
|
|
223
|
+
# use the user's email as the default value for the requested by user parameter,
|
|
224
|
+
# so it would align with how SCT default works, on runs not trigger by argus
|
|
225
|
+
self.RESERVED_PARAMETER_NAME: g.user.email.split('@')[0] if not user_override else user_override
|
|
224
226
|
})
|
|
225
227
|
return queue_number
|
|
226
228
|
|
|
@@ -0,0 +1,140 @@
|
|
|
1
|
+
import copy
|
|
2
|
+
import logging
|
|
3
|
+
import math
|
|
4
|
+
from typing import List, Dict, Any
|
|
5
|
+
from uuid import UUID
|
|
6
|
+
|
|
7
|
+
from argus.backend.db import ScyllaCluster
|
|
8
|
+
from argus.backend.models.result import ArgusGenericResultMetadata, ArgusGenericResultData
|
|
9
|
+
|
|
10
|
+
LOGGER = logging.getLogger(__name__)
|
|
11
|
+
|
|
12
|
+
default_options = {
|
|
13
|
+
"scales": {
|
|
14
|
+
"y": {
|
|
15
|
+
"beginAtZero": True,
|
|
16
|
+
"title": {
|
|
17
|
+
"display": True,
|
|
18
|
+
"text": ''
|
|
19
|
+
}
|
|
20
|
+
},
|
|
21
|
+
"x": {
|
|
22
|
+
"type": "time",
|
|
23
|
+
"time": {
|
|
24
|
+
"unit": "day",
|
|
25
|
+
"displayFormats": {
|
|
26
|
+
"day": "yyyy-MM-dd",
|
|
27
|
+
},
|
|
28
|
+
},
|
|
29
|
+
"title": {
|
|
30
|
+
"display": True,
|
|
31
|
+
"text": 'SUT Date'
|
|
32
|
+
}
|
|
33
|
+
},
|
|
34
|
+
},
|
|
35
|
+
"elements": {
|
|
36
|
+
"line": {
|
|
37
|
+
"tension": .1,
|
|
38
|
+
}
|
|
39
|
+
},
|
|
40
|
+
"plugins": {
|
|
41
|
+
"legend": {
|
|
42
|
+
"position": 'top',
|
|
43
|
+
},
|
|
44
|
+
"title": {
|
|
45
|
+
"display": True,
|
|
46
|
+
"text": ''
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
colors = [
|
|
52
|
+
'rgba(255, 0, 0, 1.0)', # Red
|
|
53
|
+
'rgba(0, 255, 0, 1.0)', # Green
|
|
54
|
+
'rgba(0, 0, 255, 1.0)', # Blue
|
|
55
|
+
'rgba(0, 255, 255, 1.0)', # Cyan
|
|
56
|
+
'rgba(255, 0, 255, 1.0)', # Magenta
|
|
57
|
+
'rgba(255, 255, 0, 1.0)', # Yellow
|
|
58
|
+
'rgba(255, 165, 0, 1.0)', # Orange
|
|
59
|
+
'rgba(128, 0, 128, 1.0)', # Purple
|
|
60
|
+
'rgba(50, 205, 50, 1.0)', # Lime
|
|
61
|
+
'rgba(255, 192, 203, 1.0)', # Pink
|
|
62
|
+
'rgba(0, 128, 128, 1.0)', # Teal
|
|
63
|
+
'rgba(165, 42, 42, 1.0)', # Brown
|
|
64
|
+
'rgba(0, 0, 128, 1.0)', # Navy
|
|
65
|
+
'rgba(128, 128, 0, 1.0)', # Olive
|
|
66
|
+
'rgba(255, 127, 80, 1.0)' # Coral
|
|
67
|
+
]
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
def get_sorted_data_for_column_and_row(data: List[Dict[str, Any]], column: str, row: str) -> List[Dict[str, Any]]:
|
|
71
|
+
return sorted([{"x": entry.sut_timestamp.strftime('%Y-%m-%dT%H:%M:%SZ'),
|
|
72
|
+
"y": entry.value,
|
|
73
|
+
"id": entry.run_id}
|
|
74
|
+
for entry in data if entry['column'] == column and entry['row'] == row],
|
|
75
|
+
key=lambda x: x['x'])
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def get_min_max_y(datasets: List[Dict[str, Any]]) -> (float, float):
|
|
79
|
+
"""0.5 - 1.5 of min/max of 50% results"""
|
|
80
|
+
y = [entry['y'] for dataset in datasets for entry in dataset['data']]
|
|
81
|
+
if not y:
|
|
82
|
+
return 0, 0
|
|
83
|
+
sorted_y = sorted(y)
|
|
84
|
+
lower_percentile_index = int(0.25 * len(sorted_y))
|
|
85
|
+
upper_percentile_index = int(0.75 * len(sorted_y)) - 1
|
|
86
|
+
y_min = sorted_y[lower_percentile_index]
|
|
87
|
+
y_max = sorted_y[upper_percentile_index]
|
|
88
|
+
return math.floor(0.5 * y_min), math.ceil(1.5 * y_max)
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
def round_datasets_to_min_max(datasets: List[Dict[str, Any]], min_y: float, max_y: float) -> List[Dict[str, Any]]:
|
|
92
|
+
"""Round values to min/max and provide original value for tooltip"""
|
|
93
|
+
for dataset in datasets:
|
|
94
|
+
for entry in dataset['data']:
|
|
95
|
+
val = entry['y']
|
|
96
|
+
if val > max_y:
|
|
97
|
+
entry['y'] = max_y
|
|
98
|
+
entry['ori'] = val
|
|
99
|
+
elif val < min_y:
|
|
100
|
+
entry['y'] = min_y
|
|
101
|
+
entry['ori'] = val
|
|
102
|
+
return datasets
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
def create_chartjs(table, data):
|
|
106
|
+
graphs = []
|
|
107
|
+
for column in table.columns_meta:
|
|
108
|
+
datasets = [
|
|
109
|
+
{"label": row,
|
|
110
|
+
"borderColor": colors[idx % len(colors)],
|
|
111
|
+
"borderWidth": 3,
|
|
112
|
+
"showLine": True,
|
|
113
|
+
"data": get_sorted_data_for_column_and_row(data, column.name, row)} for idx, row in enumerate(table.rows_meta)]
|
|
114
|
+
min_y, max_y = get_min_max_y(datasets)
|
|
115
|
+
datasets = round_datasets_to_min_max(datasets, min_y, max_y)
|
|
116
|
+
if not min_y + max_y:
|
|
117
|
+
# filter out those without data
|
|
118
|
+
continue
|
|
119
|
+
options = copy.deepcopy(default_options)
|
|
120
|
+
options["plugins"]["title"]["text"] = f"{table.name} - {column.name}"
|
|
121
|
+
options["scales"]["y"]["title"]["text"] = f"[{column.unit}]"
|
|
122
|
+
options["scales"]["y"]["min"] = min_y
|
|
123
|
+
options["scales"]["y"]["max"] = max_y
|
|
124
|
+
graphs.append({"options": options, "data":
|
|
125
|
+
{"datasets": datasets}})
|
|
126
|
+
return graphs
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
class ResultsService:
|
|
130
|
+
|
|
131
|
+
def __init__(self, database_session=None):
|
|
132
|
+
self.session = database_session if database_session else ScyllaCluster.get_session()
|
|
133
|
+
|
|
134
|
+
def get_results(self, test_id: UUID):
|
|
135
|
+
graphs = []
|
|
136
|
+
res = ArgusGenericResultMetadata.objects(test_id=test_id).all()
|
|
137
|
+
for table in res:
|
|
138
|
+
data = ArgusGenericResultData.objects(test_id=test_id, name=table.name).all()
|
|
139
|
+
graphs.extend(create_chartjs(table, data))
|
|
140
|
+
return graphs
|
|
@@ -13,6 +13,7 @@ from flask import g
|
|
|
13
13
|
from cassandra.query import BatchStatement, ConsistencyLevel
|
|
14
14
|
from cassandra.cqlengine.query import BatchQuery
|
|
15
15
|
from argus.backend.db import ScyllaCluster
|
|
16
|
+
from argus.backend.models.result import ArgusGenericResultMetadata
|
|
16
17
|
|
|
17
18
|
from argus.backend.models.web import (
|
|
18
19
|
ArgusEvent,
|
|
@@ -222,7 +223,7 @@ class TestRunService:
|
|
|
222
223
|
mentions = set(mentions)
|
|
223
224
|
for potential_mention in re.findall(self.RE_MENTION, message_stripped):
|
|
224
225
|
if user := User.exists_by_name(potential_mention.lstrip("@")):
|
|
225
|
-
mentions.add(user)
|
|
226
|
+
mentions.add(user) if user.id != g.user.id else None
|
|
226
227
|
|
|
227
228
|
test: ArgusTest = ArgusTest.get(id=test_id)
|
|
228
229
|
plugin = self.get_plugin(test.plugin_name)
|
|
@@ -306,6 +307,29 @@ class TestRunService:
|
|
|
306
307
|
}
|
|
307
308
|
return response
|
|
308
309
|
|
|
310
|
+
def fetch_results(self, test_id: UUID, run_id: UUID) -> list[dict]:
|
|
311
|
+
cluster = ScyllaCluster.get()
|
|
312
|
+
query_fields = ["column", "row", "value", "status"]
|
|
313
|
+
raw_query = (f"SELECT {','.join(query_fields)},WRITETIME(value) as ordering "
|
|
314
|
+
f"FROM generic_result_data_v1 WHERE test_id = ? AND run_id = ? AND name = ?")
|
|
315
|
+
query = cluster.prepare(raw_query)
|
|
316
|
+
tables_meta = ArgusGenericResultMetadata.filter(test_id=test_id)
|
|
317
|
+
tables = []
|
|
318
|
+
for table in tables_meta:
|
|
319
|
+
cells = cluster.session.execute(query=query, parameters=(test_id, run_id, table.name))
|
|
320
|
+
if not cells:
|
|
321
|
+
continue
|
|
322
|
+
cells = [dict(cell.items()) for cell in cells]
|
|
323
|
+
tables.append({'meta': {
|
|
324
|
+
'name': table.name,
|
|
325
|
+
'description': table.description,
|
|
326
|
+
'columns_meta': table.columns_meta,
|
|
327
|
+
'rows_meta': table.rows_meta,
|
|
328
|
+
},
|
|
329
|
+
'cells': [{k: v for k, v in cell.items() if k in query_fields} for cell in cells],
|
|
330
|
+
'order': min([cell['ordering'] for cell in cells] or [0])})
|
|
331
|
+
return sorted(tables, key=lambda x: x['order'])
|
|
332
|
+
|
|
309
333
|
def submit_github_issue(self, issue_url: str, test_id: UUID, run_id: UUID):
|
|
310
334
|
user_tokens = UserOauthToken.filter(user_id=g.user.id).all()
|
|
311
335
|
token = None
|