argus-alm 0.12.5__py3-none-any.whl → 0.12.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. argus/backend/controller/admin_api.py +67 -2
  2. argus/backend/controller/api.py +18 -1
  3. argus/backend/controller/auth.py +15 -13
  4. argus/backend/controller/client_api.py +10 -5
  5. argus/backend/controller/main.py +1 -0
  6. argus/backend/controller/testrun_api.py +2 -1
  7. argus/backend/models/result.py +40 -6
  8. argus/backend/plugins/core.py +3 -3
  9. argus/backend/plugins/driver_matrix_tests/controller.py +39 -0
  10. argus/backend/plugins/driver_matrix_tests/model.py +248 -2
  11. argus/backend/plugins/driver_matrix_tests/raw_types.py +27 -0
  12. argus/backend/plugins/driver_matrix_tests/service.py +18 -0
  13. argus/backend/plugins/driver_matrix_tests/udt.py +14 -13
  14. argus/backend/plugins/sct/testrun.py +9 -3
  15. argus/backend/service/argus_service.py +15 -5
  16. argus/backend/service/build_system_monitor.py +3 -3
  17. argus/backend/service/client_service.py +22 -4
  18. argus/backend/service/jenkins_service.py +3 -1
  19. argus/backend/service/results_service.py +201 -0
  20. argus/backend/service/testrun.py +2 -19
  21. argus/backend/service/user.py +61 -4
  22. argus/backend/service/views.py +3 -3
  23. argus/backend/util/config.py +3 -1
  24. argus/backend/util/encoders.py +17 -0
  25. argus/client/base.py +18 -1
  26. argus/client/driver_matrix_tests/cli.py +110 -0
  27. argus/client/driver_matrix_tests/client.py +56 -193
  28. argus/client/generic_result.py +10 -5
  29. {argus_alm-0.12.5.dist-info → argus_alm-0.12.8.dist-info}/METADATA +1 -1
  30. {argus_alm-0.12.5.dist-info → argus_alm-0.12.8.dist-info}/RECORD +33 -31
  31. {argus_alm-0.12.5.dist-info → argus_alm-0.12.8.dist-info}/entry_points.txt +1 -0
  32. {argus_alm-0.12.5.dist-info → argus_alm-0.12.8.dist-info}/LICENSE +0 -0
  33. {argus_alm-0.12.5.dist-info → argus_alm-0.12.8.dist-info}/WHEEL +0 -0
@@ -1,4 +1,6 @@
1
+ from dataclasses import dataclass
1
2
  from typing import TypedDict
3
+ from uuid import UUID
2
4
 
3
5
 
4
6
  class RawMatrixTestCase(TypedDict):
@@ -33,3 +35,28 @@ class RawMatrixTestResult(TypedDict):
33
35
  time: float
34
36
  timestamp: str
35
37
  suites: list[RawMatrixTestSuite]
38
+
39
+
40
+ @dataclass(init=True, frozen=True)
41
+ class DriverMatrixSubmitResultRequest():
42
+ schema_version: str
43
+ run_id: UUID
44
+ driver_type: str
45
+ driver_name: str
46
+ raw_xml: str
47
+
48
+
49
+ @dataclass(init=True, frozen=True)
50
+ class DriverMatrixSubmitFailureRequest():
51
+ schema_version: str
52
+ run_id: UUID
53
+ driver_type: str
54
+ driver_name: str
55
+ failure_reason: str
56
+
57
+
58
+ @dataclass(init=True, frozen=True)
59
+ class DriverMatrixSubmitEnvRequest():
60
+ schema_version: str
61
+ run_id: UUID
62
+ raw_env: str
@@ -1,8 +1,13 @@
1
+ import base64
2
+ import logging
3
+ from uuid import UUID
1
4
  from argus.backend.db import ScyllaCluster
2
5
  from argus.backend.models.web import ArgusRelease, ArgusTest
3
6
  from argus.backend.plugins.driver_matrix_tests.model import DriverTestRun
4
7
 
5
8
 
9
+ LOGGER = logging.getLogger(__name__)
10
+
6
11
  class DriverMatrixService:
7
12
  def tested_versions_report(self, build_id: str) -> dict:
8
13
  db = ScyllaCluster.get()
@@ -40,3 +45,16 @@ class DriverMatrixService:
40
45
  "versions": version_map,
41
46
  }
42
47
  return response
48
+
49
+ def submit_driver_result(self, run_id: UUID | str, driver_name: str, driver_type: str, raw_xml: str) -> bool:
50
+ xml_data = base64.decodebytes(bytes(raw_xml, encoding="utf-8"))
51
+ DriverTestRun.submit_driver_result(UUID(run_id), driver_name, driver_type, xml_data)
52
+ return True
53
+
54
+ def submit_driver_failure(self, run_id: UUID | str, driver_name: str, driver_type: str, failure_reason: str) -> bool:
55
+ DriverTestRun.submit_driver_failure(UUID(run_id), driver_name, driver_type, failure_reason)
56
+ return True
57
+
58
+ def submit_env_info(self, run_id: UUID | str, raw_env: str) -> bool:
59
+ DriverTestRun.submit_env_info(UUID(run_id), raw_env)
60
+ return True
@@ -12,12 +12,12 @@ class TestCase(UserType):
12
12
 
13
13
  class TestSuite(UserType):
14
14
  name = columns.Text()
15
- tests_total = columns.Integer()
16
- failures = columns.Integer()
17
- disabled = columns.Integer()
18
- skipped = columns.Integer()
19
- passed = columns.Integer()
20
- errors = columns.Integer()
15
+ tests_total = columns.Integer(default=lambda: 0)
16
+ failures = columns.Integer(default=lambda: 0)
17
+ disabled = columns.Integer(default=lambda: 0)
18
+ skipped = columns.Integer(default=lambda: 0)
19
+ passed = columns.Integer(default=lambda: 0)
20
+ errors = columns.Integer(default=lambda: 0)
21
21
  time = columns.Float()
22
22
  cases = columns.List(value_type=columns.UserDefinedType(user_type=TestCase))
23
23
 
@@ -25,14 +25,15 @@ class TestSuite(UserType):
25
25
  class TestCollection(UserType):
26
26
  name = columns.Text()
27
27
  driver = columns.Text()
28
- tests_total = columns.Integer()
29
- failures = columns.Integer()
30
- disabled = columns.Integer()
31
- skipped = columns.Integer()
32
- passed = columns.Integer()
33
- errors = columns.Integer()
28
+ tests_total = columns.Integer(default=lambda: 0)
29
+ failure_message = columns.Text()
30
+ failures = columns.Integer(default=lambda: 0)
31
+ disabled = columns.Integer(default=lambda: 0)
32
+ skipped = columns.Integer(default=lambda: 0)
33
+ passed = columns.Integer(default=lambda: 0)
34
+ errors = columns.Integer(default=lambda: 0)
34
35
  timestamp = columns.DateTime()
35
- time = columns.Float()
36
+ time = columns.Float(default=lambda: 0.0)
36
37
  suites = columns.List(value_type=columns.UserDefinedType(user_type=TestSuite))
37
38
 
38
39
 
@@ -1,6 +1,6 @@
1
1
  from enum import Enum
2
2
  import logging
3
- from datetime import datetime
3
+ from datetime import datetime, timezone
4
4
  from dataclasses import dataclass, field
5
5
  from typing import Optional
6
6
  from uuid import UUID
@@ -109,6 +109,7 @@ class SCTTestRun(PluginModelBase):
109
109
  stress_cmd = columns.Text()
110
110
 
111
111
  histograms = columns.List(value_type=columns.Map(key_type=columns.Text(), value_type=columns.UserDefinedType(user_type=PerformanceHDRHistogram)))
112
+ test_method = columns.Ascii()
112
113
 
113
114
  @classmethod
114
115
  def _stats_query(cls) -> str:
@@ -199,6 +200,7 @@ class SCTTestRun(PluginModelBase):
199
200
 
200
201
  run.config_files = req.sct_config.get("config_files")
201
202
  run.region_name = regions
203
+ run.test_method = req.sct_config.get("test_method")
202
204
  run.save()
203
205
 
204
206
  return run
@@ -252,8 +254,12 @@ class SCTTestRun(PluginModelBase):
252
254
  def sut_timestamp(self) -> float:
253
255
  """converts scylla-server date to timestamp and adds revision in subseconds precision to diffirentiate
254
256
  scylla versions from the same day. It's not perfect, but we don't know exact version time."""
255
- scylla_package = [package for package in self.packages if package.name == "scylla-server"][0]
256
- return (datetime.strptime(scylla_package.date, '%Y%m%d').timestamp()
257
+ try:
258
+ scylla_package_upgraded = [package for package in self.packages if package.name == "scylla-server-upgraded"][0]
259
+ except IndexError:
260
+ scylla_package_upgraded = None
261
+ scylla_package = scylla_package_upgraded or [package for package in self.packages if package.name == "scylla-server"][0]
262
+ return (datetime.strptime(scylla_package.date, '%Y%m%d').replace(tzinfo=timezone.utc).timestamp()
257
263
  + int(scylla_package.revision_id, 16) % 1000000 / 1000000)
258
264
 
259
265
 
@@ -459,8 +459,13 @@ class ArgusService:
459
459
  self.update_schedule_comment({"newComment": comment, "releaseId": test.release_id, "groupId": test.group_id, "testId": test.id})
460
460
 
461
461
  schedule_assignee: ArgusScheduleAssignee = ArgusScheduleAssignee.get(schedule_id=schedule_id)
462
- schedule_assignee.assignee = assignee
463
- schedule_assignee.save()
462
+ new_assignee = ArgusScheduleAssignee()
463
+ new_assignee.assignee = assignee
464
+ new_assignee.release_id = schedule_assignee.release_id
465
+ new_assignee.schedule_id = schedule_assignee.schedule_id
466
+ new_assignee.save()
467
+ schedule_assignee.delete()
468
+
464
469
  return True
465
470
 
466
471
  def delete_schedule(self, payload: dict) -> dict:
@@ -493,11 +498,16 @@ class ArgusService:
493
498
  full_schedule["assignees"] = [assignee.assignee for assignee in assignees]
494
499
 
495
500
  if len(assignees) > 0:
496
- schedule_user = User.get(id=assignees[0].assignee)
501
+ try:
502
+ schedule_user = User.get(id=assignees[0].assignee)
503
+ except User.DoesNotExist:
504
+ schedule_user = User()
505
+ schedule_user.id = assignees[0].assignee
506
+ LOGGER.warning("Deleting orphaned user assignments")
497
507
  service = TestRunService()
498
508
 
499
509
  for model in all_plugin_models():
500
- for run in model.get_jobs_assigned_to_user(schedule_user):
510
+ for run in model.get_jobs_assigned_to_user(schedule_user.id):
501
511
  if run["release_id"] != release.id:
502
512
  continue
503
513
  if run["test_id"] not in full_schedule["tests"]:
@@ -644,7 +654,7 @@ class ArgusService:
644
654
  today = datetime.datetime.now()
645
655
  validity_period = today - datetime.timedelta(days=current_app.config.get("JOB_VALIDITY_PERIOD_DAYS", 30))
646
656
  for plugin in all_plugin_models():
647
- for run in plugin.get_jobs_assigned_to_user(user=user):
657
+ for run in plugin.get_jobs_assigned_to_user(user_id=user.id):
648
658
  if run["start_time"] >= validity_period:
649
659
  yield run
650
660
 
@@ -20,9 +20,9 @@ class ArgusTestsMonitor(ABC):
20
20
 
21
21
  def __init__(self) -> None:
22
22
  self._cluster = ScyllaCluster.get()
23
- self._existing_releases = list(ArgusRelease.all())
24
- self._existing_groups = list(ArgusGroup.all())
25
- self._existing_tests = list(ArgusTest.all())
23
+ self._existing_releases = list(ArgusRelease.objects().limit(None))
24
+ self._existing_groups = list(ArgusGroup.objects().limit(None))
25
+ self._existing_tests = list(ArgusTest.objects().limit(None))
26
26
  self._filtered_groups: list[str] = self.BUILD_SYSTEM_FILTERED_PREFIXES
27
27
 
28
28
  def create_release(self, release_name):
@@ -26,6 +26,14 @@ class ClientService:
26
26
  model = self.get_model(run_type)
27
27
  model.submit_run(request_data=request_data)
28
28
  return "Created"
29
+
30
+ def get_run(self, run_type: str, run_id: str):
31
+ model = self.get_model(run_type)
32
+ try:
33
+ run = model.get(id=run_id)
34
+ except model.DoesNotExist:
35
+ return None
36
+ return run
29
37
 
30
38
  def heartbeat(self, run_type: str, run_id: str) -> int:
31
39
  model = self.get_model(run_type)
@@ -71,10 +79,20 @@ class ClientService:
71
79
 
72
80
  return "Finalized"
73
81
 
74
- def submit_results(self, run_type: str, run_id: str, results: dict) -> str:
82
+ def submit_results(self, run_type: str, run_id: str, results: dict) -> dict[str, str]:
75
83
  model = self.get_model(run_type)
76
- run = model.load_test_run(UUID(run_id))
77
- ArgusGenericResultMetadata(test_id=run.test_id, **results["meta"]).save()
84
+ try:
85
+ run = model.load_test_run(UUID(run_id))
86
+ except model.DoesNotExist:
87
+ return {"status": "error", "response": {
88
+ "exception": "DoesNotExist",
89
+ "arguments": [run_id]
90
+ }}
91
+ existing_table = ArgusGenericResultMetadata.objects(test_id=run.test_id, name=results["meta"]["name"]).first()
92
+ if existing_table:
93
+ existing_table.update_if_changed(results["meta"])
94
+ else:
95
+ ArgusGenericResultMetadata(test_id=run.test_id, **results["meta"]).save()
78
96
  if results.get("sut_timestamp", 0) == 0:
79
97
  results["sut_timestamp"] = run.sut_timestamp() # automatic sut_timestamp
80
98
  table_name = results["meta"]["name"]
@@ -86,4 +104,4 @@ class ClientService:
86
104
  sut_timestamp=sut_timestamp,
87
105
  **cell
88
106
  ).save()
89
- return "Submitted"
107
+ return {"status": "ok", "message": "Results submitted"}
@@ -220,7 +220,9 @@ class JenkinsService:
220
220
  def build_job(self, build_id: str, params: dict, user_override: str = None):
221
221
  queue_number = self._jenkins.build_job(build_id, {
222
222
  **params,
223
- self.RESERVED_PARAMETER_NAME: g.user.username if not user_override else user_override
223
+ # use the user's email as the default value for the requested by user parameter,
224
+ # so it would align with how SCT default works, on runs not trigger by argus
225
+ self.RESERVED_PARAMETER_NAME: g.user.email.split('@')[0] if not user_override else user_override
224
226
  })
225
227
  return queue_number
226
228
 
@@ -0,0 +1,201 @@
1
+ import copy
2
+ import logging
3
+ import math
4
+ from typing import List, Dict, Any
5
+ from uuid import UUID
6
+
7
+ from argus.backend.db import ScyllaCluster
8
+ from argus.backend.models.result import ArgusGenericResultMetadata, ArgusGenericResultData
9
+
10
+ LOGGER = logging.getLogger(__name__)
11
+
12
+ default_options = {
13
+ "scales": {
14
+ "y": {
15
+ "beginAtZero": True,
16
+ "title": {
17
+ "display": True,
18
+ "text": ''
19
+ }
20
+ },
21
+ "x": {
22
+ "type": "time",
23
+ "time": {
24
+ "unit": "day",
25
+ "displayFormats": {
26
+ "day": "yyyy-MM-dd",
27
+ },
28
+ },
29
+ "title": {
30
+ "display": True,
31
+ "text": 'SUT Date'
32
+ }
33
+ },
34
+ },
35
+ "elements": {
36
+ "line": {
37
+ "tension": .1,
38
+ }
39
+ },
40
+ "plugins": {
41
+ "legend": {
42
+ "position": 'top',
43
+ },
44
+ "title": {
45
+ "display": True,
46
+ "text": ''
47
+ }
48
+ }
49
+ }
50
+
51
+ colors = [
52
+ 'rgba(255, 0, 0, 1.0)', # Red
53
+ 'rgba(0, 255, 0, 1.0)', # Green
54
+ 'rgba(0, 0, 255, 1.0)', # Blue
55
+ 'rgba(0, 255, 255, 1.0)', # Cyan
56
+ 'rgba(255, 0, 255, 1.0)', # Magenta
57
+ 'rgba(255, 255, 0, 1.0)', # Yellow
58
+ 'rgba(255, 165, 0, 1.0)', # Orange
59
+ 'rgba(128, 0, 128, 1.0)', # Purple
60
+ 'rgba(50, 205, 50, 1.0)', # Lime
61
+ 'rgba(255, 192, 203, 1.0)', # Pink
62
+ 'rgba(0, 128, 128, 1.0)', # Teal
63
+ 'rgba(165, 42, 42, 1.0)', # Brown
64
+ 'rgba(0, 0, 128, 1.0)', # Navy
65
+ 'rgba(128, 128, 0, 1.0)', # Olive
66
+ 'rgba(255, 127, 80, 1.0)' # Coral
67
+ ]
68
+
69
+
70
+ def get_sorted_data_for_column_and_row(data: List[ArgusGenericResultData], column: str, row: str) -> List[Dict[str, Any]]:
71
+ return sorted([{"x": entry.sut_timestamp.strftime('%Y-%m-%dT%H:%M:%SZ'),
72
+ "y": entry.value,
73
+ "id": entry.run_id}
74
+ for entry in data if entry.column == column and entry.row == row],
75
+ key=lambda point: point["x"])
76
+
77
+
78
+ def get_min_max_y(datasets: List[Dict[str, Any]]) -> (float, float):
79
+ """0.5 - 1.5 of min/max of 50% results"""
80
+ y = [entry['y'] for dataset in datasets for entry in dataset['data']]
81
+ if not y:
82
+ return 0, 0
83
+ sorted_y = sorted(y)
84
+ lower_percentile_index = int(0.25 * len(sorted_y))
85
+ upper_percentile_index = int(0.75 * len(sorted_y)) - 1
86
+ y_min = sorted_y[lower_percentile_index]
87
+ y_max = sorted_y[upper_percentile_index]
88
+ return math.floor(0.5 * y_min), math.ceil(1.5 * y_max)
89
+
90
+
91
+ def round_datasets_to_min_max(datasets: List[Dict[str, Any]], min_y: float, max_y: float) -> List[Dict[str, Any]]:
92
+ """Round values to min/max and provide original value for tooltip"""
93
+ for dataset in datasets:
94
+ for entry in dataset['data']:
95
+ val = entry['y']
96
+ if val > max_y:
97
+ entry['y'] = max_y
98
+ entry['ori'] = val
99
+ elif val < min_y:
100
+ entry['y'] = min_y
101
+ entry['ori'] = val
102
+ return datasets
103
+
104
+
105
+ def create_chartjs(table, data):
106
+ graphs = []
107
+ for column in table.columns_meta:
108
+ if column.type == "TEXT":
109
+ # skip text columns
110
+ continue
111
+ datasets = [
112
+ {"label": row,
113
+ "borderColor": colors[idx % len(colors)],
114
+ "borderWidth": 3,
115
+ "showLine": True,
116
+ "data": get_sorted_data_for_column_and_row(data, column.name, row)} for idx, row in enumerate(table.rows_meta)]
117
+ min_y, max_y = get_min_max_y(datasets)
118
+ datasets = round_datasets_to_min_max(datasets, min_y, max_y)
119
+ if not min_y + max_y:
120
+ # filter out those without data
121
+ continue
122
+ options = copy.deepcopy(default_options)
123
+ options["plugins"]["title"]["text"] = f"{table.name} - {column.name}"
124
+ options["scales"]["y"]["title"]["text"] = f"[{column.unit}]" if column.unit else ""
125
+ options["scales"]["y"]["min"] = min_y
126
+ options["scales"]["y"]["max"] = max_y
127
+ graphs.append({"options": options, "data":
128
+ {"datasets": datasets}})
129
+ return graphs
130
+
131
+
132
+ def calculate_graph_ticks(graphs: List[Dict]) -> dict[str, str]:
133
+ min_x, max_x = None, None
134
+
135
+ for graph in graphs:
136
+ for dataset in graph["data"]["datasets"]:
137
+ if not dataset["data"]:
138
+ continue
139
+ first_x = dataset["data"][0]["x"]
140
+ last_x = dataset["data"][-1]["x"]
141
+ if min_x is None or first_x < min_x:
142
+ min_x = first_x
143
+ if max_x is None or last_x > max_x:
144
+ max_x = last_x
145
+ return {"min": min_x[:10], "max": max_x[:10]}
146
+
147
+
148
+ class ResultsService:
149
+
150
+ def __init__(self):
151
+ self.cluster = ScyllaCluster.get()
152
+
153
+ def _get_tables_metadata(self, test_id: UUID) -> list[ArgusGenericResultMetadata]:
154
+ query_fields = ["name", "description", "columns_meta", "rows_meta"]
155
+ raw_query = (f"SELECT {','.join(query_fields)}"
156
+ f" FROM generic_result_metadata_v1 WHERE test_id = ?")
157
+ query = self.cluster.prepare(raw_query)
158
+ tables_meta = self.cluster.session.execute(query=query, parameters=(test_id,))
159
+ return [ArgusGenericResultMetadata(**table) for table in tables_meta]
160
+
161
+ def get_run_results(self, test_id: UUID, run_id: UUID) -> list[dict]:
162
+ query_fields = ["column", "row", "value", "value_text", "status"]
163
+ raw_query = (f"SELECT {','.join(query_fields)},WRITETIME(status) as ordering"
164
+ f" FROM generic_result_data_v1 WHERE test_id = ? AND run_id = ? AND name = ?")
165
+ query = self.cluster.prepare(raw_query)
166
+ tables_meta = self._get_tables_metadata(test_id=test_id)
167
+ tables = []
168
+ for table in tables_meta:
169
+ cells = self.cluster.session.execute(query=query, parameters=(test_id, run_id, table.name))
170
+ if not cells:
171
+ continue
172
+ cells = [dict(cell.items()) for cell in cells]
173
+ tables.append({'meta': {
174
+ 'name': table.name,
175
+ 'description': table.description,
176
+ 'columns_meta': table.columns_meta,
177
+ 'rows_meta': table.rows_meta,
178
+ },
179
+ 'cells': [{k: v for k, v in cell.items() if k in query_fields} for cell in cells],
180
+ 'order': min([cell['ordering'] for cell in cells] or [0])})
181
+ return sorted(tables, key=lambda x: x['order'])
182
+
183
+ def get_test_graphs(self, test_id: UUID):
184
+ query_fields = ["run_id", "column", "row", "value", "status", "sut_timestamp"]
185
+ raw_query = (f"SELECT {','.join(query_fields)}"
186
+ f" FROM generic_result_data_v1 WHERE test_id = ? AND name = ? LIMIT 2147483647")
187
+ query = self.cluster.prepare(raw_query)
188
+ tables_meta = self._get_tables_metadata(test_id=test_id)
189
+ graphs = []
190
+ for table in tables_meta:
191
+ data = self.cluster.session.execute(query=query, parameters=(test_id, table.name))
192
+ data = [ArgusGenericResultData(**cell) for cell in data]
193
+ if not data:
194
+ continue
195
+ graphs.extend(create_chartjs(table, data))
196
+ ticks = calculate_graph_ticks(graphs)
197
+ return graphs, ticks
198
+
199
+ def is_results_exist(self, test_id: UUID):
200
+ """Verify if results for given test id exist at all."""
201
+ return bool(ArgusGenericResultMetadata.objects(test_id=test_id).only(["name"]).limit(1))
@@ -13,7 +13,7 @@ from flask import g
13
13
  from cassandra.query import BatchStatement, ConsistencyLevel
14
14
  from cassandra.cqlengine.query import BatchQuery
15
15
  from argus.backend.db import ScyllaCluster
16
- from argus.backend.models.result import ArgusGenericResultMetadata, ArgusGenericResultData
16
+ from argus.backend.models.result import ArgusGenericResultMetadata
17
17
 
18
18
  from argus.backend.models.web import (
19
19
  ArgusEvent,
@@ -223,7 +223,7 @@ class TestRunService:
223
223
  mentions = set(mentions)
224
224
  for potential_mention in re.findall(self.RE_MENTION, message_stripped):
225
225
  if user := User.exists_by_name(potential_mention.lstrip("@")):
226
- mentions.add(user)
226
+ mentions.add(user) if user.id != g.user.id else None
227
227
 
228
228
  test: ArgusTest = ArgusTest.get(id=test_id)
229
229
  plugin = self.get_plugin(test.plugin_name)
@@ -307,23 +307,6 @@ class TestRunService:
307
307
  }
308
308
  return response
309
309
 
310
- def fetch_results(self, test_id: UUID, run_id: UUID) -> dict:
311
- query_fields = ["column", "row", "value", "status"]
312
- tables_meta = ArgusGenericResultMetadata.filter(test_id=test_id)
313
- tables = []
314
- for table in tables_meta:
315
- cells = ArgusGenericResultData.objects.filter(test_id=test_id, run_id=run_id, name=table.name).only(query_fields)
316
- if not cells:
317
- continue
318
- tables.append({'meta': {
319
- 'name': table.name,
320
- 'description': table.description,
321
- 'columns_meta': table.columns_meta,
322
- 'rows_meta': table.rows_meta
323
- },
324
- 'cells': [{k:v for k,v in cell.items() if k in query_fields} for cell in cells]})
325
-
326
- return tables
327
310
 
328
311
  def submit_github_issue(self, issue_url: str, test_id: UUID, run_id: UUID):
329
312
  user_tokens = UserOauthToken.filter(user_id=g.user.id).all()
@@ -17,6 +17,9 @@ from argus.backend.models.web import User, UserOauthToken, UserRoles, WebFileSto
17
17
  from argus.backend.util.common import FlaskView
18
18
 
19
19
 
20
+ class UserServiceException(Exception):
21
+ pass
22
+
20
23
  class GithubOrganizationMissingError(Exception):
21
24
  pass
22
25
 
@@ -29,6 +32,8 @@ class UserService:
29
32
 
30
33
  @staticmethod
31
34
  def check_roles(roles: list[UserRoles] | UserRoles, user: User) -> bool:
35
+ if not user:
36
+ return False
32
37
  if isinstance(roles, str):
33
38
  return roles in user.roles
34
39
  elif isinstance(roles, list):
@@ -38,6 +43,8 @@ class UserService:
38
43
  return False
39
44
 
40
45
  def github_callback(self, req_code: str) -> dict | None:
46
+ if "gh" not in current_app.config.get("LOGIN_METHODS", []):
47
+ raise UserServiceException("Github Login is disabled")
41
48
  # pylint: disable=too-many-locals
42
49
  oauth_response = requests.post(
43
50
  "https://github.com/login/oauth/access_token",
@@ -90,7 +97,10 @@ class UserService:
90
97
  except User.DoesNotExist:
91
98
  user = User()
92
99
  user.username = user_info.get("login")
93
- user.email = email_info[-1].get("email")
100
+ # pick only scylladb.com emails
101
+ scylla_email = next(iter([email.get("email") for email in email_info if email.get("email").endswith("@scylladb.com")]), None)
102
+ primary_email = next(iter([email.get("email") for email in email_info if email.get("primary") and email.get("verified")]), None)
103
+ user.email = scylla_email or primary_email
94
104
  user.full_name = user_info.get("name", user_info.get("login"))
95
105
  user.registration_date = datetime.utcnow()
96
106
  user.roles = ["ROLE_USER"]
@@ -137,6 +147,15 @@ class UserService:
137
147
  def get_users(self) -> dict:
138
148
  users = User.all()
139
149
  return {str(user.id): user.to_json() for user in users}
150
+
151
+ def get_users_privileged(self) -> dict:
152
+ users = User.all()
153
+ users = {str(user.id): dict(user.items()) for user in users}
154
+ for user in users.values():
155
+ user.pop("password")
156
+ user.pop("api_token")
157
+
158
+ return users
140
159
 
141
160
  def generate_token(self, user: User):
142
161
  token_digest = f"{user.username}-{int(time())}-{base64.encodebytes(os.urandom(128)).decode(encoding='utf-8')}"
@@ -150,13 +169,51 @@ class UserService:
150
169
  user.email = new_email
151
170
  user.save()
152
171
 
153
- def update_password(self, user: User, old_password: str, new_password: str):
154
- if check_password_hash(user.password, old_password):
155
- raise Exception("Incorrect old password")
172
+ return True
173
+
174
+ def toggle_admin(self, user_id: str):
175
+ user: User = User.get(id=user_id)
176
+
177
+ if user.id == g.user.id:
178
+ raise UserServiceException("Cannot toggle admin role from yourself.")
179
+
180
+ is_admin = UserService.check_roles(UserRoles.Admin, user)
181
+
182
+ if is_admin:
183
+ user.roles.remove(UserRoles.Admin)
184
+ else:
185
+ user.set_as_admin()
186
+
187
+ user.save()
188
+ return True
189
+
190
+ def delete_user(self, user_id: str):
191
+ user: User = User.get(id=user_id)
192
+ if user.id == g.user.id:
193
+ raise UserServiceException("Cannot delete user that you are logged in as.")
194
+
195
+ if user.is_admin():
196
+ raise UserServiceException("Cannot delete admin users. Unset admin flag before deleting")
197
+
198
+ user.delete()
199
+
200
+ return True
201
+
202
+ def update_password(self, user: User, old_password: str, new_password: str, force = False):
203
+ if not check_password_hash(user.password, old_password) and not force:
204
+ raise UserServiceException("Incorrect old password")
205
+
206
+ if not new_password:
207
+ raise UserServiceException("Empty new password")
208
+
209
+ if len(new_password) < 5:
210
+ raise UserServiceException("New password is too short")
156
211
 
157
212
  user.password = generate_password_hash(new_password)
158
213
  user.save()
159
214
 
215
+ return True
216
+
160
217
  def update_name(self, user: User, new_name: str):
161
218
  user.full_name = new_name
162
219
  user.save()
@@ -110,9 +110,9 @@ class UserViewService:
110
110
  search_func = facet_wrapper(query_func=search_func, facet_query=value, facet_type=facet)
111
111
 
112
112
 
113
- all_tests = ArgusTest.all()
114
- all_releases = ArgusRelease.all()
115
- all_groups = ArgusGroup.all()
113
+ all_tests = ArgusTest.objects().limit(None)
114
+ all_releases = ArgusRelease.objects().limit(None)
115
+ all_groups = ArgusGroup.objects().limit(None)
116
116
  release_by_id = {release.id: partial(self.index_mapper, type="release")(release) for release in all_releases}
117
117
  group_by_id = {group.id: partial(self.index_mapper, type="group")(group) for group in all_groups}
118
118
  index = [self.index_mapper(t) for t in all_tests]
@@ -10,8 +10,10 @@ LOGGER = logging.getLogger(__name__)
10
10
  class Config:
11
11
  CONFIG = None
12
12
  CONFIG_PATHS = [
13
- Path("./config/argus_web.yaml"),
13
+ Path(__file__).parents[3] / "config" / "argus_web.yaml",
14
14
  Path("argus_web.yaml"),
15
+ Path("../config/argus_web.yaml"),
16
+
15
17
  ]
16
18
 
17
19
  @classmethod
@@ -3,6 +3,7 @@ import logging
3
3
  from json.encoder import JSONEncoder
4
4
  from uuid import UUID
5
5
 
6
+ from flask.json.provider import DefaultJSONProvider
6
7
  import cassandra.cqlengine.usertype as ut
7
8
  import cassandra.cqlengine.models as m
8
9
 
@@ -22,3 +23,19 @@ class ArgusJSONEncoder(JSONEncoder):
22
23
  return o.strftime("%Y-%m-%dT%H:%M:%SZ")
23
24
  case _:
24
25
  return super().default(o)
26
+
27
+
28
+ class ArgusJSONProvider(DefaultJSONProvider):
29
+
30
+ def default(self, o):
31
+ match o:
32
+ case UUID():
33
+ return str(o)
34
+ case ut.UserType():
35
+ return dict(o.items())
36
+ case m.Model():
37
+ return dict(o.items())
38
+ case datetime():
39
+ return o.strftime("%Y-%m-%dT%H:%M:%SZ")
40
+ case _:
41
+ return super().default(o)