argus-alm 0.12.9__py3-none-any.whl → 0.13.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (93) hide show
  1. argus/client/base.py +1 -1
  2. argus/client/driver_matrix_tests/cli.py +2 -2
  3. argus/client/driver_matrix_tests/client.py +1 -1
  4. argus/client/generic/cli.py +2 -2
  5. argus/client/generic_result.py +3 -2
  6. argus/client/sct/client.py +3 -3
  7. argus/client/sirenada/client.py +1 -1
  8. {argus_alm-0.12.9.dist-info → argus_alm-0.13.0.dist-info}/METADATA +2 -4
  9. argus_alm-0.13.0.dist-info/RECORD +20 -0
  10. argus/backend/.gitkeep +0 -0
  11. argus/backend/cli.py +0 -41
  12. argus/backend/controller/__init__.py +0 -0
  13. argus/backend/controller/admin.py +0 -20
  14. argus/backend/controller/admin_api.py +0 -354
  15. argus/backend/controller/api.py +0 -529
  16. argus/backend/controller/auth.py +0 -67
  17. argus/backend/controller/client_api.py +0 -108
  18. argus/backend/controller/main.py +0 -274
  19. argus/backend/controller/notification_api.py +0 -72
  20. argus/backend/controller/notifications.py +0 -13
  21. argus/backend/controller/team.py +0 -126
  22. argus/backend/controller/team_ui.py +0 -18
  23. argus/backend/controller/testrun_api.py +0 -482
  24. argus/backend/controller/view_api.py +0 -162
  25. argus/backend/db.py +0 -100
  26. argus/backend/error_handlers.py +0 -21
  27. argus/backend/events/event_processors.py +0 -34
  28. argus/backend/models/__init__.py +0 -0
  29. argus/backend/models/result.py +0 -138
  30. argus/backend/models/web.py +0 -389
  31. argus/backend/plugins/__init__.py +0 -0
  32. argus/backend/plugins/core.py +0 -225
  33. argus/backend/plugins/driver_matrix_tests/controller.py +0 -63
  34. argus/backend/plugins/driver_matrix_tests/model.py +0 -421
  35. argus/backend/plugins/driver_matrix_tests/plugin.py +0 -22
  36. argus/backend/plugins/driver_matrix_tests/raw_types.py +0 -62
  37. argus/backend/plugins/driver_matrix_tests/service.py +0 -60
  38. argus/backend/plugins/driver_matrix_tests/udt.py +0 -42
  39. argus/backend/plugins/generic/model.py +0 -79
  40. argus/backend/plugins/generic/plugin.py +0 -16
  41. argus/backend/plugins/generic/types.py +0 -13
  42. argus/backend/plugins/loader.py +0 -40
  43. argus/backend/plugins/sct/controller.py +0 -185
  44. argus/backend/plugins/sct/plugin.py +0 -38
  45. argus/backend/plugins/sct/resource_setup.py +0 -178
  46. argus/backend/plugins/sct/service.py +0 -491
  47. argus/backend/plugins/sct/testrun.py +0 -272
  48. argus/backend/plugins/sct/udt.py +0 -101
  49. argus/backend/plugins/sirenada/model.py +0 -113
  50. argus/backend/plugins/sirenada/plugin.py +0 -17
  51. argus/backend/service/admin.py +0 -27
  52. argus/backend/service/argus_service.py +0 -688
  53. argus/backend/service/build_system_monitor.py +0 -188
  54. argus/backend/service/client_service.py +0 -122
  55. argus/backend/service/event_service.py +0 -18
  56. argus/backend/service/jenkins_service.py +0 -240
  57. argus/backend/service/notification_manager.py +0 -150
  58. argus/backend/service/release_manager.py +0 -230
  59. argus/backend/service/results_service.py +0 -317
  60. argus/backend/service/stats.py +0 -540
  61. argus/backend/service/team_manager_service.py +0 -83
  62. argus/backend/service/testrun.py +0 -559
  63. argus/backend/service/user.py +0 -307
  64. argus/backend/service/views.py +0 -258
  65. argus/backend/template_filters.py +0 -27
  66. argus/backend/tests/__init__.py +0 -0
  67. argus/backend/tests/argus_web.test.yaml +0 -39
  68. argus/backend/tests/conftest.py +0 -44
  69. argus/backend/tests/results_service/__init__.py +0 -0
  70. argus/backend/tests/results_service/test_best_results.py +0 -70
  71. argus/backend/util/common.py +0 -65
  72. argus/backend/util/config.py +0 -38
  73. argus/backend/util/encoders.py +0 -41
  74. argus/backend/util/logsetup.py +0 -81
  75. argus/backend/util/module_loaders.py +0 -30
  76. argus/backend/util/send_email.py +0 -91
  77. argus/client/generic_result_old.py +0 -143
  78. argus/db/.gitkeep +0 -0
  79. argus/db/argus_json.py +0 -14
  80. argus/db/cloud_types.py +0 -125
  81. argus/db/config.py +0 -135
  82. argus/db/db_types.py +0 -139
  83. argus/db/interface.py +0 -370
  84. argus/db/testrun.py +0 -740
  85. argus/db/utils.py +0 -15
  86. argus_alm-0.12.9.dist-info/RECORD +0 -96
  87. /argus/{backend → common}/__init__.py +0 -0
  88. /argus/{backend/util → common}/enums.py +0 -0
  89. /argus/{backend/plugins/sct/types.py → common/sct_types.py} +0 -0
  90. /argus/{backend/plugins/sirenada/types.py → common/sirenada_types.py} +0 -0
  91. {argus_alm-0.12.9.dist-info → argus_alm-0.13.0.dist-info}/LICENSE +0 -0
  92. {argus_alm-0.12.9.dist-info → argus_alm-0.13.0.dist-info}/WHEEL +0 -0
  93. {argus_alm-0.12.9.dist-info → argus_alm-0.13.0.dist-info}/entry_points.txt +0 -0
argus/db/testrun.py DELETED
@@ -1,740 +0,0 @@
1
- # TODO: Deprecated, will be removed once REST API client is ready
2
- import logging
3
- import datetime
4
- import time
5
- import traceback
6
- import sys
7
- import threading
8
- from dataclasses import asdict, is_dataclass, fields, Field, dataclass
9
- from typing import Any
10
- from uuid import uuid4, UUID
11
-
12
- from argus.db.config import BaseConfig
13
- from argus.db.utils import is_list_homogeneous
14
- from argus.db.cloud_types import CloudResource, CloudInstanceDetails, BaseCloudSetupDetails
15
- from argus.db.interface import ArgusDatabase
16
- from argus.db.db_types import ColumnInfo, CollectionHint, NemesisRunInfo, TestStatus, TestInvestigationStatus, \
17
- EventsBySeverity, PackageVersion
18
- from argus.backend.models.web import ArgusRelease, ArgusGroup, ArgusTest, ArgusSchedule, ArgusScheduleAssignee, ArgusScheduleGroup, \
19
- ArgusScheduleTest
20
-
21
- LOGGER = logging.getLogger(__name__)
22
-
23
-
24
- class TestInfoSerializationError(Exception):
25
- pass
26
-
27
-
28
- class TestInfoSchemaError(Exception):
29
- pass
30
-
31
-
32
- class TestInfoValueError(Exception):
33
- pass
34
-
35
-
36
- class BaseTestInfo:
37
- EXPOSED_ATTRIBUTES = {}
38
- ATTRIBUTE_CONSTRAINTS = {}
39
- COLLECTION_HINTS = {}
40
-
41
- def __init__(self, *args, **kwargs):
42
- pass
43
-
44
- @classmethod
45
- def create_skeleton(cls):
46
- pass
47
-
48
- @classmethod
49
- def schema(cls):
50
- data = {}
51
- for attr, column_type in cls.EXPOSED_ATTRIBUTES.items():
52
- value = None
53
- if column_type is list or column_type is tuple:
54
- value = cls.schema_process_collection(attr)
55
- column_type = CollectionHint
56
- constraints = cls.ATTRIBUTE_CONSTRAINTS.get(attr, [])
57
- column_info = ColumnInfo(
58
- name=attr, type=column_type, value=value, constraints=constraints)
59
- data[attr] = column_info
60
-
61
- return data
62
-
63
- @classmethod
64
- def schema_process_collection(cls, attr_name: str):
65
- hint = cls.COLLECTION_HINTS.get(attr_name)
66
- if not hint:
67
- raise TestInfoSchemaError(
68
- "Encountered a collection and no collection hint was found")
69
-
70
- return hint
71
-
72
- def serialize(self):
73
- data = {}
74
- for attr in self.EXPOSED_ATTRIBUTES:
75
- attribute_value = getattr(self, attr)
76
- if isinstance(attribute_value, list):
77
- attribute_value = self._process_list(attribute_value)
78
- elif is_dataclass(attribute_value):
79
- attribute_value = asdict(attribute_value)
80
-
81
- data[attr] = attribute_value
82
-
83
- return data
84
-
85
- @staticmethod
86
- def _process_list(list_to_check: list[Any]):
87
- if len(list_to_check) == 0:
88
- return list_to_check
89
-
90
- if not is_list_homogeneous(list_to_check):
91
- raise TestInfoSerializationError("Detected a non-homogenous list")
92
-
93
- contains_dataclass = is_dataclass(list_to_check[0])
94
-
95
- if contains_dataclass:
96
- return [asdict(dc) for dc in list_to_check]
97
-
98
- return list_to_check
99
-
100
- @classmethod
101
- def __get_validators__(cls):
102
- yield cls.validate
103
-
104
- @classmethod
105
- def validate(cls, value, field): # pylint: disable=unused-argument
106
- return value
107
-
108
-
109
- class TestDetails(BaseTestInfo):
110
- # pylint: disable=too-many-instance-attributes
111
- EXPOSED_ATTRIBUTES = {"scm_revision_id": str, "started_by": str, "build_job_url": str,
112
- "start_time": datetime.datetime, "end_time": datetime.datetime,
113
- "config_files": list, "packages": list, "scylla_version": str,
114
- "yaml_test_duration": int,
115
- }
116
- COLLECTION_HINTS = {
117
- "packages": CollectionHint(list[PackageVersion]),
118
- "config_files": CollectionHint(list[str]),
119
- }
120
-
121
- def __init__(self, scm_revision_id: str,
122
- started_by: str, build_job_url: str,
123
- yaml_test_duration: int, start_time: datetime,
124
- config_files: list[str], packages: list[PackageVersion],
125
- end_time: datetime.datetime = datetime.datetime.utcfromtimestamp(0),
126
- scylla_version: str | None = None):
127
- # pylint: disable=too-many-arguments
128
- super().__init__()
129
- self.scm_revision_id = scm_revision_id
130
- self.started_by = started_by
131
- self.build_job_url = build_job_url
132
- self.start_time = start_time
133
- self.yaml_test_duration = yaml_test_duration
134
- if not (is_list_homogeneous(packages) or (
135
- len(packages) > 0 and isinstance(next(iter(packages)), PackageVersion))):
136
- raise TestInfoValueError(
137
- "Package list contains incorrect values", packages)
138
- self.packages = packages
139
- self.config_files = config_files
140
- self.end_time = end_time
141
- self.scylla_version = scylla_version
142
-
143
- @classmethod
144
- def from_db_row(cls, row):
145
- if row.packages:
146
- packages = [PackageVersion.from_db_udt(
147
- udt) for udt in row.packages]
148
- else:
149
- packages = []
150
-
151
- config_files = row.config_files if row.config_files else []
152
-
153
- return cls(scm_revision_id=row.scm_revision_id, started_by=row.started_by, build_job_url=row.build_job_url,
154
- start_time=row.start_time, end_time=row.end_time, yaml_test_duration=row.yaml_test_duration,
155
- config_files=config_files, scylla_version=row.scylla_version,
156
- packages=packages)
157
-
158
- def set_test_end_time(self):
159
- self.end_time = datetime.datetime.utcnow().replace(microsecond=0)
160
-
161
-
162
- class TestResourcesSetup(BaseTestInfo):
163
- EXPOSED_ATTRIBUTES = {
164
- "sct_runner_host": CloudInstanceDetails,
165
- "region_name": list,
166
- "cloud_setup": BaseCloudSetupDetails
167
- }
168
- COLLECTION_HINTS = {
169
- "region_name": CollectionHint(list[str]),
170
- }
171
-
172
- def __init__(self, sct_runner_host: CloudInstanceDetails,
173
- region_name: list[str], cloud_setup: BaseCloudSetupDetails):
174
- super().__init__()
175
- self.sct_runner_host = sct_runner_host
176
- self.region_name = region_name
177
- self.cloud_setup = cloud_setup
178
-
179
- @classmethod
180
- def from_db_row(cls, row):
181
- runner = CloudInstanceDetails.from_db_udt(row.sct_runner_host)
182
- cloud_setup = BaseCloudSetupDetails.from_db_udt(row.cloud_setup)
183
-
184
- regions = row.region_name if row.region_name else []
185
-
186
- return cls(sct_runner_host=runner, region_name=regions,
187
- cloud_setup=cloud_setup)
188
-
189
-
190
- class TestLogs(BaseTestInfo):
191
- EXPOSED_ATTRIBUTES = {"logs": list}
192
- COLLECTION_HINTS = {
193
- "logs": CollectionHint(list[tuple[str, str]])
194
- }
195
-
196
- def __init__(self):
197
- super().__init__()
198
- self._log_collection = []
199
-
200
- def add_log(self, log_type: str, log_url: str | list[str]) -> None:
201
- if isinstance(log_url, str):
202
- self._log_collection.append((log_type, log_url))
203
- elif isinstance(log_url, list):
204
- for log in log_url:
205
- self._log_collection.append((log_type, log))
206
- else:
207
- LOGGER.warning("Unknown log type encountered: %s", log_url)
208
-
209
- @property
210
- def logs(self) -> list[tuple[str, str]]:
211
- return self._log_collection
212
-
213
- @classmethod
214
- def from_db_row(cls, row):
215
- logs = cls()
216
- if row.logs:
217
- for log_type, log_url in row.logs:
218
- logs.add_log(log_type, log_url)
219
-
220
- return logs
221
-
222
-
223
- class TestResources(BaseTestInfo):
224
- EXPOSED_ATTRIBUTES = {"allocated_resources": list}
225
- COLLECTION_HINTS = {
226
- "allocated_resources": CollectionHint(list[CloudResource]),
227
- }
228
-
229
- def __init__(self):
230
- super().__init__()
231
- self._allocated_resources = []
232
-
233
- def attach_resource(self, resource: CloudResource):
234
- self._allocated_resources.append(resource)
235
- self._allocated_resources.sort(key=lambda v: v.name)
236
-
237
- def detach_resource(self, resource: CloudResource, reason: str = "unspecified reason"):
238
- resource_to_detach = next(
239
- r for r in self._allocated_resources if r == resource)
240
- resource_to_detach.terminate(reason=reason)
241
-
242
- @property
243
- def allocated_resources(self) -> list[CloudResource]:
244
- return self._allocated_resources
245
-
246
- @classmethod
247
- def from_db_row(cls, row):
248
- resources = cls()
249
- resource_row = row.allocated_resources if row.allocated_resources else []
250
- for resource in resource_row:
251
- cloud_resource = CloudResource.from_db_udt(resource)
252
- resources.allocated_resources.append(cloud_resource)
253
- resources.allocated_resources.sort(key=lambda v: v.name)
254
- return resources
255
-
256
-
257
- class TestResults(BaseTestInfo):
258
- # pylint: disable=too-many-arguments
259
- EXPOSED_ATTRIBUTES = {"status": str, "events": list,
260
- "nemesis_data": list, "screenshots": list}
261
- COLLECTION_HINTS = {
262
- "events": CollectionHint(list[EventsBySeverity]),
263
- "nemesis_data": CollectionHint(list[NemesisRunInfo]),
264
- "screenshots": CollectionHint(list[str]),
265
- }
266
-
267
- def __init__(self, status: TestStatus, events: list[EventsBySeverity] = None,
268
- nemesis_data: list[NemesisRunInfo] = None, screenshots: list[str] = None,
269
- max_stored_events=25):
270
- super().__init__()
271
- if isinstance(status, TestStatus):
272
- self._status = status.value
273
- else:
274
- self._status = TestStatus(status).value
275
- self.events = events if events else []
276
- self.nemesis_data = nemesis_data if nemesis_data else []
277
- self.screenshots = screenshots if screenshots else []
278
- self.max_stored_events = max_stored_events
279
-
280
- @classmethod
281
- def from_db_row(cls, row):
282
- if row.events:
283
- events = [EventsBySeverity.from_db_udt(
284
- event) for event in row.events]
285
- else:
286
- events = []
287
-
288
- if row.nemesis_data:
289
- nemesis_data = [NemesisRunInfo.from_db_udt(
290
- nemesis) for nemesis in row.nemesis_data]
291
- else:
292
- nemesis_data = []
293
-
294
- if row.screenshots:
295
- screenshots = row.screenshots
296
- else:
297
- screenshots = []
298
-
299
- return cls(status=row.status, events=events, nemesis_data=nemesis_data, screenshots=screenshots)
300
-
301
- def _add_new_event_type(self, event: EventsBySeverity):
302
- if len([v for v in self.events if v.severity == event.severity]) > 0:
303
- raise TestInfoValueError(
304
- f"Severity event collection {event.severity} already exists in TestResults")
305
-
306
- self.events.append(event)
307
-
308
- def _collect_event_message(self, event: EventsBySeverity, message: str):
309
- if len(event.last_events) >= self.max_stored_events:
310
- event.last_events = event.last_events[1:]
311
-
312
- event.event_amount += 1
313
- event.last_events.append(message)
314
-
315
- def add_nemesis(self, nemesis: NemesisRunInfo):
316
- self.nemesis_data.append(nemesis)
317
-
318
- def add_event(self, event_severity: str, event_message: str):
319
- try:
320
- event = next(filter(lambda v: v.severity ==
321
- event_severity, self.events))
322
- except StopIteration:
323
- event = EventsBySeverity(
324
- severity=event_severity, event_amount=0, last_events=[])
325
- self._add_new_event_type(event)
326
-
327
- self._collect_event_message(event, event_message)
328
-
329
- def add_screenshot(self, screenshot_link: str):
330
- self.screenshots.append(screenshot_link)
331
-
332
- @property
333
- def status(self) -> TestStatus:
334
- return TestStatus(self._status)
335
-
336
- @status.setter
337
- def status(self, value: TestStatus):
338
- self._status = TestStatus(value).value
339
-
340
-
341
- @dataclass
342
- class TestRunInfo:
343
- details: TestDetails
344
- setup: TestResourcesSetup
345
- resources: TestResources
346
- logs: TestLogs
347
- results: TestResults
348
-
349
-
350
- class TestRun:
351
- # pylint: disable=too-many-instance-attributes
352
- EXPOSED_ATTRIBUTES = {"id": UUID, "group_id": UUID, "release_id": UUID,
353
- "build_id": str, "test_id": UUID,
354
- "assignee": UUID, "heartbeat": int, "investigation_status": str}
355
- ATTRIBUTE_CONSTRAINTS = {
356
- }
357
- PRIMARY_KEYS = {
358
- "build_id": (str, "partition"),
359
- "start_time": (datetime.datetime, "clustering"),
360
- }
361
- CLUSTERING_ORDER = {
362
- "start_time": "DESC",
363
- }
364
- INDICES = ["release_id", "group_id", "test_id", "id", "assignee", "status"]
365
- _USING_RUNINFO = TestRunInfo
366
- _TABLE_NAME = "test_runs_v8"
367
- _IS_TABLE_INITIALIZED = False
368
- _ARGUS_DB_INTERFACE = None
369
-
370
- def __init__(self, test_id: UUID, assignee: UUID, build_id: str,
371
- run_info: TestRunInfo, config: BaseConfig = None, argus_interface: ArgusDatabase = None,
372
- investigation_status: str = TestInvestigationStatus.NOT_INVESTIGATED):
373
- # pylint: disable=too-many-arguments
374
- if not test_id:
375
- test_id = uuid4()
376
- self._save_lock = threading.Lock()
377
- self._id = test_id
378
- self._build_id = build_id
379
- self._group_id = None
380
- self._release_id = None
381
- self._test_id = None
382
- self._assignee = assignee
383
- self._investigation_status = investigation_status
384
- self._run_info = run_info
385
- self._heartbeat = int(time.time())
386
- self._config = config
387
- for field in fields(run_info):
388
- setattr(self, field.name, getattr(run_info, field.name))
389
-
390
- if argus_interface:
391
- self.argus = argus_interface
392
-
393
- @classmethod
394
- def table_name(cls) -> str:
395
- return cls._TABLE_NAME
396
-
397
- @classmethod
398
- def from_db_row(cls, row, config: BaseConfig = None):
399
- if not cls._IS_TABLE_INITIALIZED:
400
- cls.init_own_table(config)
401
- nested_fields = {}
402
- for field in fields(cls._USING_RUNINFO):
403
- nested_fields[field.name] = field.type.from_db_row(row)
404
-
405
- run_info = cls._USING_RUNINFO(**nested_fields)
406
- run = cls(test_id=row.id, assignee=row.assignee, build_id=row.build_id,
407
- run_info=run_info, investigation_status=row.investigation_status)
408
- run.heartbeat = row.heartbeat
409
- run.group_id = row.group_id
410
- run.release_id = row.release_id
411
- run.test_id = row.test_id
412
- return run
413
-
414
- @classmethod
415
- def from_id(cls, test_id: UUID, config: BaseConfig = None):
416
- if not cls._IS_TABLE_INITIALIZED:
417
- cls.init_own_table(config)
418
- database = cls.get_argus()
419
- if row := database.fetch(cls._TABLE_NAME, test_id):
420
- return cls.from_db_row(row)
421
-
422
- return None
423
-
424
- @classmethod
425
- def from_pk(cls, pk: tuple[str, datetime.datetime], config: BaseConfig = None):
426
- if not cls._IS_TABLE_INITIALIZED:
427
- cls.init_own_table(config)
428
- database = cls.get_argus()
429
- if row := database.fetch_generic(cls._TABLE_NAME, pk, "WHERE build_id = ? and start_time = ?"):
430
- return cls.from_db_row(row)
431
-
432
- return None
433
-
434
- @classmethod
435
- def get_argus(cls, config: BaseConfig = None) -> ArgusDatabase:
436
- if not cls._ARGUS_DB_INTERFACE:
437
- cls._ARGUS_DB_INTERFACE = ArgusDatabase(config=config)
438
- return cls._ARGUS_DB_INTERFACE
439
-
440
- @classmethod
441
- def set_argus(cls, argus_interface: ArgusDatabase):
442
- cls._ARGUS_DB_INTERFACE = argus_interface
443
- cls._IS_TABLE_INITIALIZED = False
444
-
445
- @property
446
- def argus(self) -> ArgusDatabase:
447
- if not self._ARGUS_DB_INTERFACE:
448
- self.get_argus(self._config)
449
- return self._ARGUS_DB_INTERFACE
450
-
451
- @argus.setter
452
- def argus(self, interface: ArgusDatabase | None):
453
- self._ARGUS_DB_INTERFACE = interface # pylint: disable=invalid-name
454
- self._IS_TABLE_INITIALIZED = False # pylint: disable=invalid-name
455
-
456
- @property
457
- def heartbeat(self) -> int:
458
- return self._heartbeat
459
-
460
- @heartbeat.setter
461
- def heartbeat(self, value: int | float):
462
- self._heartbeat = int(value)
463
-
464
- @property
465
- def build_id(self) -> str:
466
- return self._build_id
467
-
468
- @build_id.setter
469
- def build_id(self, value: str) -> None:
470
- self._build_id = str(value)
471
-
472
- @property
473
- def id(self) -> UUID: # pylint: disable=invalid-name
474
- return self._id
475
-
476
- @property
477
- def group_id(self) -> UUID:
478
- return self._group_id
479
-
480
- @property
481
- def release_id(self) -> UUID:
482
- return self._release_id
483
-
484
- @property
485
- def test_id(self) -> UUID:
486
- return self._test_id
487
-
488
- @release_id.setter
489
- def release_id(self, value: UUID) -> None:
490
- self._release_id = value
491
-
492
- @group_id.setter
493
- def group_id(self, value: UUID) -> None:
494
- self._group_id = value
495
-
496
- @test_id.setter
497
- def test_id(self, value: UUID) -> None:
498
- self._test_id = value
499
-
500
- @property
501
- def assignee(self) -> UUID:
502
- return self._assignee
503
-
504
- @assignee.setter
505
- def assignee(self, value):
506
- self._assignee = value
507
-
508
- @property
509
- def investigation_status(self) -> str:
510
- return self._investigation_status
511
-
512
- @investigation_status.setter
513
- def investigation_status(self, value: TestInvestigationStatus | str):
514
- self._investigation_status = TestInvestigationStatus(value)
515
-
516
- def serialize(self) -> dict[str, Any]:
517
- LOGGER.debug("Serializing test run...")
518
- nested_data = {}
519
- for field in fields(self._USING_RUNINFO):
520
- field: Field
521
- value: BaseTestInfo = getattr(self, field.name)
522
- nested_data = {
523
- **nested_data,
524
- **value.serialize()
525
- }
526
-
527
- data = {
528
- "build_id": self._build_id,
529
- "id": self._id,
530
- "group_id": self._group_id,
531
- "release_id": self._release_id,
532
- "test_id": self._test_id,
533
- "assignee": self._assignee,
534
- "heartbeat": self._heartbeat,
535
- "investigation_status": self._investigation_status,
536
- **nested_data
537
- }
538
- LOGGER.debug("Serialized Data: %s", data)
539
- return data
540
-
541
- @classmethod
542
- def init_own_table(cls, config: BaseConfig = None):
543
- LOGGER.debug("Initializing TestRun table...")
544
- cls.get_argus(config).init_table(
545
- table_name=cls._TABLE_NAME, column_info=cls.schema())
546
- cls._IS_TABLE_INITIALIZED = True
547
-
548
- @classmethod
549
- def set_table_name(cls, new_table_name: str):
550
- cls._TABLE_NAME = new_table_name
551
- cls._IS_TABLE_INITIALIZED = False
552
-
553
- @classmethod
554
- def schema(cls) -> dict[str, ColumnInfo]:
555
- data = {}
556
- LOGGER.debug("Dumping full schema...")
557
- for attr, column_type in cls.EXPOSED_ATTRIBUTES.items():
558
- value = None
559
- constraints = cls.ATTRIBUTE_CONSTRAINTS.get(attr, [])
560
- column_info = ColumnInfo(
561
- name=attr, type=column_type, value=value, constraints=constraints)
562
- data[attr] = column_info
563
-
564
- schema_dump = {}
565
- for field in fields(cls._USING_RUNINFO):
566
- schema_dump = {
567
- **schema_dump,
568
- **field.type.schema(),
569
- }
570
-
571
- full_schema = dict(
572
- **{"$tablekeys$": cls.PRIMARY_KEYS},
573
- **{"$clustering_order$": cls.CLUSTERING_ORDER},
574
- **{"$indices$": cls.INDICES},
575
- **data,
576
- **schema_dump
577
- )
578
- LOGGER.debug("Full Schema: %s", full_schema)
579
- return full_schema
580
-
581
- def save(self):
582
- with self._save_lock:
583
- if not self._IS_TABLE_INITIALIZED:
584
- self.init_own_table(self._config)
585
- if not self.exists():
586
- self._assign_categories()
587
- if not self.assignee:
588
- try:
589
- self.assignee = self._get_current_assignee_from_schedule()
590
- except Exception: # pylint: disable=broad-except
591
- LOGGER.warning("Error getting assignee from database")
592
- LOGGER.debug("Details: ", exc_info=True)
593
-
594
- LOGGER.debug("Inserting data for test run: %s", self.id)
595
- self.argus.insert(table_name=self._TABLE_NAME,
596
- run_data=self.serialize())
597
- else:
598
- LOGGER.debug("Updating data for test run: %s", self.id)
599
- self.argus.update(table_name=self._TABLE_NAME,
600
- run_data=self.serialize())
601
-
602
- def exists(self) -> bool:
603
- if not self._IS_TABLE_INITIALIZED:
604
- self.init_own_table(self._config)
605
-
606
- if self.argus.fetch(table_name=self._TABLE_NAME, run_id=self.id):
607
- return True
608
- return False
609
-
610
- def _assign_categories(self):
611
- key = self._build_id
612
- try:
613
- test: ArgusTest = ArgusTest.using(
614
- connection=self.argus.CQL_ENGINE_CONNECTION_NAME
615
- ).get(build_system_id=key)
616
- self.release_id = test.release_id
617
- self.group_id = test.group_id
618
- self.test_id = test.id
619
- except ArgusTest.DoesNotExist:
620
- LOGGER.warning(
621
- "Test entity missing for key \"%s\", run won't be visible until this is corrected", key)
622
-
623
- def _get_current_assignee_from_schedule(self) -> UUID:
624
- """
625
- Iterate over all schedules (groups and tests) and return first available assignee
626
- """
627
- associated_test = ArgusTest.using(
628
- connection=self.argus.CQL_ENGINE_CONNECTION_NAME
629
- ).get(build_system_id=self.build_id)
630
- associated_group = ArgusGroup.using(
631
- connection=self.argus.CQL_ENGINE_CONNECTION_NAME
632
- ).get(id=associated_test.group_id)
633
- associated_release = ArgusRelease.using(
634
- connection=self.argus.CQL_ENGINE_CONNECTION_NAME
635
- ).get(id=associated_test.release_id)
636
-
637
- scheduled_groups = ArgusScheduleGroup.filter(
638
- release_id=associated_release.id, group_id=associated_group.id
639
- ).all().using(
640
- connection=self.argus.CQL_ENGINE_CONNECTION_NAME
641
- )
642
-
643
- scheduled_tests = ArgusScheduleTest.filter(
644
- release_id=associated_release.id, test_id=associated_test.id
645
- ).all().using(
646
- connection=self.argus.CQL_ENGINE_CONNECTION_NAME
647
- )
648
-
649
- unique_schedule_ids = {scheduled_obj.schedule_id for scheduled_obj in [
650
- *scheduled_tests, *scheduled_groups]}
651
-
652
- schedules = ArgusSchedule.filter(
653
- release_id=associated_release.id, id__in=tuple(
654
- unique_schedule_ids)
655
- ).all().using(
656
- connection=self.argus.CQL_ENGINE_CONNECTION_NAME
657
- )
658
-
659
- today = datetime.datetime.utcnow()
660
-
661
- valid_schedules = []
662
- for schedule in schedules:
663
- if schedule.period_start <= today <= schedule.period_end:
664
- valid_schedules.append(schedule)
665
-
666
- assignees_uuids = []
667
- for schedule in valid_schedules:
668
- assignees = ArgusScheduleAssignee.filter(
669
- schedule_id=schedule.id
670
- ).all().using(
671
- connection=self.argus.CQL_ENGINE_CONNECTION_NAME
672
- )
673
- assignees_uuids.append(
674
- *[assignee.assignee for assignee in assignees])
675
-
676
- return assignees_uuids[0] if len(assignees_uuids) > 0 else None
677
-
678
- def shutdown(self):
679
- LOGGER.debug("Shutting down cluster connection...")
680
- self.argus.cluster.shutdown()
681
-
682
- @property
683
- def run_info(self) -> TestRunInfo:
684
- return self._run_info
685
-
686
-
687
- class TestRunWithHeartbeat(TestRun):
688
- def __init__(self, test_id: UUID, assignee: UUID, build_id: str,
689
- run_info: TestRunInfo, heartbeat_interval=30, config: BaseConfig = None,
690
- argus_interface: ArgusDatabase = None, investigation_status: str = TestInvestigationStatus.NOT_INVESTIGATED,):
691
- # pylint: disable=too-many-arguments
692
- self._heartbeat_interval = heartbeat_interval
693
- self._shutdown_event = threading.Event()
694
- super().__init__(test_id=test_id, assignee=assignee, build_id=build_id,
695
- investigation_status=investigation_status, run_info=run_info,
696
- config=config, argus_interface=argus_interface)
697
- self._thread = threading.Thread(target=self._heartbeat_entry,
698
- name=f"{self.__class__.__name__}-{self.id}-heartbeat", daemon=True)
699
- self._heartbeat_statement = self.argus.session.prepare(
700
- f"UPDATE {TestRun.table_name()} SET heartbeat = ? WHERE build_id = ? AND start_time = ?")
701
- self._thread.start()
702
-
703
- @property
704
- def heartbeat_interval(self) -> int:
705
- return self._heartbeat_interval
706
-
707
- @heartbeat_interval.setter
708
- def heartbeat_interval(self, value: float | int):
709
- self._heartbeat_interval = value
710
-
711
- @property
712
- def thread(self):
713
- return self._thread
714
-
715
- def _heartbeat_entry(self):
716
- while True:
717
- time.sleep(self.heartbeat_interval)
718
- if self._shutdown_event.is_set():
719
- break
720
- LOGGER.debug("Sending heartbeat...")
721
- self.heartbeat = time.time()
722
- bound_statement = self._heartbeat_statement.bind(
723
- (self.heartbeat, self.build_id, self.run_info.details.start_time))
724
- self.argus.session.execute(bound_statement)
725
- LOGGER.debug("Heartbeat exit")
726
-
727
- def shutdown(self):
728
- self._shutdown_event.set()
729
- LOGGER.debug("Waiting for the heartbeat thread to exit...")
730
- self._thread.join(timeout=self.heartbeat_interval + 10)
731
- if self._thread.is_alive():
732
- LOGGER.warning(
733
- "Heartbeat thread was not able to shut down correctly. Stack trace:")
734
- # pylint: disable=protected-access
735
- current_threads = sys._current_frames()
736
- stack_trace = traceback.extract_stack(
737
- current_threads[self._thread.ident])
738
- LOGGER.warning(
739
- "\n".join([f'#{lineno:3} : {line:50}: {fname}' for fname, lineno, _, line in stack_trace]))
740
- super().shutdown()