argus-alm 0.12.8__py3-none-any.whl → 0.12.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- argus/backend/cli.py +3 -1
- argus/backend/db.py +1 -1
- argus/backend/models/result.py +82 -19
- argus/backend/models/web.py +2 -1
- argus/backend/plugins/loader.py +1 -1
- argus/backend/plugins/sct/testrun.py +4 -1
- argus/backend/service/client_service.py +25 -10
- argus/backend/service/notification_manager.py +4 -2
- argus/backend/service/results_service.py +119 -3
- argus/backend/service/testrun.py +16 -0
- argus/backend/tests/__init__.py +0 -0
- argus/backend/tests/argus_web.test.yaml +39 -0
- argus/backend/tests/conftest.py +44 -0
- argus/backend/tests/results_service/__init__.py +0 -0
- argus/backend/tests/results_service/test_best_results.py +70 -0
- argus/client/generic_result.py +26 -2
- {argus_alm-0.12.8.dist-info → argus_alm-0.12.9.dist-info}/METADATA +1 -1
- {argus_alm-0.12.8.dist-info → argus_alm-0.12.9.dist-info}/RECORD +21 -16
- {argus_alm-0.12.8.dist-info → argus_alm-0.12.9.dist-info}/LICENSE +0 -0
- {argus_alm-0.12.8.dist-info → argus_alm-0.12.9.dist-info}/WHEEL +0 -0
- {argus_alm-0.12.8.dist-info → argus_alm-0.12.9.dist-info}/entry_points.txt +0 -0
argus/backend/cli.py
CHANGED
|
@@ -15,6 +15,9 @@ LOGGER = logging.getLogger(__name__)
|
|
|
15
15
|
@click.command('sync-models')
|
|
16
16
|
@with_appcontext
|
|
17
17
|
def sync_models_command():
|
|
18
|
+
sync_models()
|
|
19
|
+
|
|
20
|
+
def sync_models():
|
|
18
21
|
cluster = ScyllaCluster.get()
|
|
19
22
|
cluster.sync_core_tables()
|
|
20
23
|
LOGGER.info("Synchronizing plugin types...")
|
|
@@ -29,7 +32,6 @@ def sync_models_command():
|
|
|
29
32
|
LOGGER.info("Plugins ready.")
|
|
30
33
|
click.echo("All models synchronized.")
|
|
31
34
|
|
|
32
|
-
|
|
33
35
|
@cli_bp.cli.add_command
|
|
34
36
|
@click.command('scan-jenkins')
|
|
35
37
|
@with_appcontext
|
argus/backend/db.py
CHANGED
|
@@ -54,7 +54,7 @@ class ScyllaCluster:
|
|
|
54
54
|
return self.cluster.connect(keyspace=self.config["SCYLLA_KEYSPACE_NAME"])
|
|
55
55
|
|
|
56
56
|
@classmethod
|
|
57
|
-
def get(cls, config:
|
|
57
|
+
def get(cls, config: dict = None) -> 'ScyllaCluster':
|
|
58
58
|
if cls.APP_INSTANCE:
|
|
59
59
|
return cls.APP_INSTANCE
|
|
60
60
|
|
argus/backend/models/result.py
CHANGED
|
@@ -1,22 +1,21 @@
|
|
|
1
|
+
import math
|
|
2
|
+
from datetime import datetime, timezone
|
|
3
|
+
|
|
1
4
|
from cassandra.cqlengine import columns
|
|
2
5
|
from cassandra.cqlengine.models import Model
|
|
3
6
|
from cassandra.cqlengine.usertype import UserType
|
|
4
7
|
|
|
5
|
-
class BestResult(UserType):
|
|
6
|
-
date = columns.DateTime()
|
|
7
|
-
value = columns.Double()
|
|
8
|
-
run_id = columns.UUID()
|
|
9
|
-
|
|
10
8
|
class ValidationRules(UserType):
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
9
|
+
valid_from = columns.DateTime()
|
|
10
|
+
best_pct = columns.Double() # max value limit relative to best result in percent unit
|
|
11
|
+
best_abs = columns.Double() # max value limit relative to best result in absolute unit
|
|
12
|
+
fixed_limit = columns.Double() # fixed limit
|
|
15
13
|
|
|
16
14
|
class ColumnMetadata(UserType):
|
|
17
15
|
name = columns.Ascii()
|
|
18
16
|
unit = columns.Text()
|
|
19
17
|
type = columns.Ascii()
|
|
18
|
+
higher_is_better = columns.Boolean() # used for tracking best results, if None - no tracking
|
|
20
19
|
|
|
21
20
|
|
|
22
21
|
class ArgusGenericResultMetadata(Model):
|
|
@@ -25,17 +24,68 @@ class ArgusGenericResultMetadata(Model):
|
|
|
25
24
|
name = columns.Text(required=True, primary_key=True)
|
|
26
25
|
description = columns.Text()
|
|
27
26
|
columns_meta = columns.List(value_type=columns.UserDefinedType(ColumnMetadata))
|
|
28
|
-
validation_rules = columns.Map(key_type=columns.Ascii(), value_type=columns.List(
|
|
29
|
-
best_results = columns.Map(key_type=columns.Ascii(), value_type=columns.UserDefinedType(BestResult))
|
|
27
|
+
validation_rules = columns.Map(key_type=columns.Ascii(), value_type=columns.List(columns.UserDefinedType(ValidationRules)))
|
|
30
28
|
rows_meta = columns.List(value_type=columns.Ascii())
|
|
31
29
|
|
|
32
30
|
def __init__(self, **kwargs):
|
|
33
31
|
kwargs["columns_meta"] = [ColumnMetadata(**col) for col in kwargs.pop('columns_meta', [])]
|
|
34
|
-
|
|
35
|
-
|
|
32
|
+
validation_rules = kwargs.pop('validation_rules', {})
|
|
33
|
+
|
|
34
|
+
if validation_rules:
|
|
35
|
+
for column, rule in validation_rules.items():
|
|
36
|
+
if not isinstance(rule, list):
|
|
37
|
+
rule['valid_from'] = datetime.now(timezone.utc)
|
|
38
|
+
validation_rules[column] = [rule]
|
|
39
|
+
kwargs["validation_rules"] = {k: [ValidationRules(**rules) for rules in v] for k, v in validation_rules.items()}
|
|
36
40
|
super().__init__(**kwargs)
|
|
37
41
|
|
|
38
|
-
def
|
|
42
|
+
def update_validation_rules(self, key: str, new_rule_dict: dict) -> bool:
|
|
43
|
+
"""
|
|
44
|
+
Checks if the most recent ValidationRule for the given key matches the new_rule_dict.
|
|
45
|
+
If not, adds the new rule to the list with the current timestamp.
|
|
46
|
+
|
|
47
|
+
:param key: The key (column name) in the validation_rules map to update.
|
|
48
|
+
:param new_rule_dict: A dictionary containing the new validation rule values.
|
|
49
|
+
:return: True if a new rule was added, False if the existing rule matches.
|
|
50
|
+
"""
|
|
51
|
+
rules_list = self.validation_rules.get(key, [])
|
|
52
|
+
most_recent_rule = None
|
|
53
|
+
|
|
54
|
+
if rules_list:
|
|
55
|
+
most_recent_rule = rules_list[-1]
|
|
56
|
+
|
|
57
|
+
fields_to_compare = [field for field in ValidationRules._fields if field != 'valid_from']
|
|
58
|
+
rules_match = True
|
|
59
|
+
if most_recent_rule:
|
|
60
|
+
for field in fields_to_compare:
|
|
61
|
+
db_value = getattr(most_recent_rule, field)
|
|
62
|
+
new_value = new_rule_dict.get(field)
|
|
63
|
+
if db_value is None and new_value is None:
|
|
64
|
+
continue
|
|
65
|
+
if db_value is None or new_value is None:
|
|
66
|
+
rules_match = False
|
|
67
|
+
break
|
|
68
|
+
if not math.isclose(db_value, new_value, rel_tol=1e-9, abs_tol=0.0):
|
|
69
|
+
rules_match = False
|
|
70
|
+
break
|
|
71
|
+
else:
|
|
72
|
+
rules_match = False
|
|
73
|
+
|
|
74
|
+
if not rules_match:
|
|
75
|
+
new_rule = ValidationRules(
|
|
76
|
+
valid_from=datetime.now(timezone.utc),
|
|
77
|
+
best_pct=new_rule_dict.get('best_pct'),
|
|
78
|
+
best_abs=new_rule_dict.get('best_abs'),
|
|
79
|
+
fixed_limit=new_rule_dict.get('fixed_limit')
|
|
80
|
+
)
|
|
81
|
+
rules_list.append(new_rule)
|
|
82
|
+
self.validation_rules = self.validation_rules or {}
|
|
83
|
+
self.validation_rules.update({key: rules_list})
|
|
84
|
+
return True
|
|
85
|
+
|
|
86
|
+
return False # Existing rule matches
|
|
87
|
+
|
|
88
|
+
def update_if_changed(self, new_data: dict) -> "ArgusGenericResultMetadata":
|
|
39
89
|
"""
|
|
40
90
|
Updates table metadata if changed column/description or new rows were added.
|
|
41
91
|
See that rows can only be added, not removed once was sent.
|
|
@@ -45,22 +95,26 @@ class ArgusGenericResultMetadata(Model):
|
|
|
45
95
|
for field, value in new_data.items():
|
|
46
96
|
if field == "columns_meta":
|
|
47
97
|
value = [ColumnMetadata(**col) for col in value]
|
|
98
|
+
if self.columns_meta != value:
|
|
99
|
+
self.columns_meta = value
|
|
100
|
+
updated = True
|
|
48
101
|
elif field == "rows_meta":
|
|
49
102
|
added_rows = []
|
|
50
103
|
for row in value:
|
|
51
104
|
if row not in self.rows_meta:
|
|
52
105
|
added_rows.append(row)
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
value = {k: [BestResult(**z) for z in v] for k, v in value.items()}
|
|
106
|
+
updated = True
|
|
107
|
+
self.rows_meta += added_rows
|
|
56
108
|
elif field == "validation_rules":
|
|
57
|
-
|
|
58
|
-
|
|
109
|
+
if any([self.update_validation_rules(key, rules) for key, rules in value.items()]):
|
|
110
|
+
updated = True
|
|
111
|
+
elif getattr(self, field) != value:
|
|
59
112
|
setattr(self, field, value)
|
|
60
113
|
updated = True
|
|
61
114
|
|
|
62
115
|
if updated:
|
|
63
116
|
self.save()
|
|
117
|
+
return self
|
|
64
118
|
|
|
65
119
|
class ArgusGenericResultData(Model):
|
|
66
120
|
__table_name__ = "generic_result_data_v1"
|
|
@@ -73,3 +127,12 @@ class ArgusGenericResultData(Model):
|
|
|
73
127
|
value = columns.Double()
|
|
74
128
|
value_text = columns.Text()
|
|
75
129
|
status = columns.Ascii()
|
|
130
|
+
|
|
131
|
+
class ArgusBestResultData(Model):
|
|
132
|
+
__table_name__ = "generic_result_best_v1"
|
|
133
|
+
test_id = columns.UUID(partition_key=True)
|
|
134
|
+
name = columns.Text(partition_key=True)
|
|
135
|
+
key = columns.Ascii(primary_key=True) # represents pair column:row
|
|
136
|
+
result_date = columns.DateTime(primary_key=True, clustering_order="DESC")
|
|
137
|
+
value = columns.Double()
|
|
138
|
+
run_id = columns.UUID()
|
argus/backend/models/web.py
CHANGED
|
@@ -6,7 +6,7 @@ from cassandra.cqlengine.usertype import UserType
|
|
|
6
6
|
from cassandra.cqlengine import columns
|
|
7
7
|
from cassandra.util import uuid_from_time, unix_time_from_uuid1 # pylint: disable=no-name-in-module
|
|
8
8
|
|
|
9
|
-
from argus.backend.models.result import ArgusGenericResultMetadata, ArgusGenericResultData
|
|
9
|
+
from argus.backend.models.result import ArgusGenericResultMetadata, ArgusGenericResultData, ArgusBestResultData
|
|
10
10
|
|
|
11
11
|
|
|
12
12
|
def uuid_now():
|
|
@@ -381,6 +381,7 @@ USED_MODELS: list[Model] = [
|
|
|
381
381
|
ArgusScheduleTest,
|
|
382
382
|
ArgusGenericResultMetadata,
|
|
383
383
|
ArgusGenericResultData,
|
|
384
|
+
ArgusBestResultData,
|
|
384
385
|
]
|
|
385
386
|
|
|
386
387
|
USED_TYPES: list[UserType] = [
|
argus/backend/plugins/loader.py
CHANGED
|
@@ -30,7 +30,7 @@ def plugin_loader() -> dict[str, PluginInfoBase]:
|
|
|
30
30
|
|
|
31
31
|
|
|
32
32
|
AVAILABLE_PLUGINS = plugin_loader()
|
|
33
|
-
|
|
33
|
+
print(AVAILABLE_PLUGINS)
|
|
34
34
|
|
|
35
35
|
def all_plugin_models(include_all=False) -> list[PluginModelBase]:
|
|
36
36
|
return [model for plugin in AVAILABLE_PLUGINS.values() for model in plugin.all_models if issubclass(model, PluginModelBase) or include_all]
|
|
@@ -258,7 +258,10 @@ class SCTTestRun(PluginModelBase):
|
|
|
258
258
|
scylla_package_upgraded = [package for package in self.packages if package.name == "scylla-server-upgraded"][0]
|
|
259
259
|
except IndexError:
|
|
260
260
|
scylla_package_upgraded = None
|
|
261
|
-
|
|
261
|
+
try:
|
|
262
|
+
scylla_package = [package for package in self.packages if package.name == "scylla-server"][0]
|
|
263
|
+
except IndexError:
|
|
264
|
+
raise ValueError("Scylla package not found in packages - cannot determine SUT timestamp")
|
|
262
265
|
return (datetime.strptime(scylla_package.date, '%Y%m%d').replace(tzinfo=timezone.utc).timestamp()
|
|
263
266
|
+ int(scylla_package.revision_id, 16) % 1000000 / 1000000)
|
|
264
267
|
|
|
@@ -1,8 +1,14 @@
|
|
|
1
|
+
import operator
|
|
2
|
+
from dataclasses import asdict, is_dataclass
|
|
3
|
+
from datetime import datetime, timezone
|
|
4
|
+
from functools import partial
|
|
1
5
|
from uuid import UUID
|
|
6
|
+
|
|
2
7
|
from argus.backend.db import ScyllaCluster
|
|
3
8
|
from argus.backend.models.result import ArgusGenericResultMetadata, ArgusGenericResultData
|
|
4
9
|
from argus.backend.plugins.core import PluginModelBase
|
|
5
10
|
from argus.backend.plugins.loader import AVAILABLE_PLUGINS
|
|
11
|
+
from argus.backend.service.results_service import ResultsService, Cell
|
|
6
12
|
from argus.backend.util.enums import TestStatus
|
|
7
13
|
|
|
8
14
|
|
|
@@ -25,8 +31,9 @@ class ClientService:
|
|
|
25
31
|
def submit_run(self, run_type: str, request_data: dict) -> str:
|
|
26
32
|
model = self.get_model(run_type)
|
|
27
33
|
model.submit_run(request_data=request_data)
|
|
34
|
+
|
|
28
35
|
return "Created"
|
|
29
|
-
|
|
36
|
+
|
|
30
37
|
def get_run(self, run_type: str, run_id: str):
|
|
31
38
|
model = self.get_model(run_type)
|
|
32
39
|
try:
|
|
@@ -85,23 +92,31 @@ class ClientService:
|
|
|
85
92
|
run = model.load_test_run(UUID(run_id))
|
|
86
93
|
except model.DoesNotExist:
|
|
87
94
|
return {"status": "error", "response": {
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
95
|
+
"exception": "DoesNotExist",
|
|
96
|
+
"arguments": [run_id]
|
|
97
|
+
}}
|
|
98
|
+
table_name = results["meta"]["name"]
|
|
99
|
+
results_service = ResultsService()
|
|
100
|
+
cells = [Cell(**cell) for cell in results["results"]]
|
|
101
|
+
table_metadata = results_service.get_table_metadata(test_id=run.test_id, table_name=table_name)
|
|
102
|
+
if table_metadata:
|
|
103
|
+
table_metadata = table_metadata.update_if_changed(results["meta"])
|
|
94
104
|
else:
|
|
95
|
-
ArgusGenericResultMetadata(test_id=run.test_id, **results["meta"])
|
|
105
|
+
table_metadata = ArgusGenericResultMetadata(test_id=run.test_id, **results["meta"])
|
|
106
|
+
table_metadata.save()
|
|
96
107
|
if results.get("sut_timestamp", 0) == 0:
|
|
97
108
|
results["sut_timestamp"] = run.sut_timestamp() # automatic sut_timestamp
|
|
109
|
+
results["sut_timestamp"] = datetime.fromtimestamp(results["sut_timestamp"])
|
|
110
|
+
best_results = results_service.update_best_results(test_id=run.test_id, table_name=table_name, table_metadata=table_metadata,
|
|
111
|
+
cells=cells, run_id=run_id)
|
|
98
112
|
table_name = results["meta"]["name"]
|
|
99
113
|
sut_timestamp = results["sut_timestamp"]
|
|
100
|
-
for cell in
|
|
114
|
+
for cell in cells:
|
|
115
|
+
cell.update_cell_status_based_on_rules(table_metadata, best_results)
|
|
101
116
|
ArgusGenericResultData(test_id=run.test_id,
|
|
102
117
|
run_id=run.id,
|
|
103
118
|
name=table_name,
|
|
104
119
|
sut_timestamp=sut_timestamp,
|
|
105
|
-
**cell
|
|
120
|
+
**asdict(cell)
|
|
106
121
|
).save()
|
|
107
122
|
return {"status": "ok", "message": "Results submitted"}
|
|
@@ -93,7 +93,8 @@ class NotificationSenderBase:
|
|
|
93
93
|
|
|
94
94
|
class ArgusDBNotificationSaver(NotificationSenderBase):
|
|
95
95
|
CONTENT_TEMPLATES = {
|
|
96
|
-
ArgusNotificationTypes.Mention: lambda p: render_template("notifications/mention.html.j2", **p if p else {})
|
|
96
|
+
ArgusNotificationTypes.Mention: lambda p: render_template("notifications/mention.html.j2", **p if p else {}),
|
|
97
|
+
ArgusNotificationTypes.AssigneeChange: lambda p: render_template("notifications/assigned.html.j2", **p if p else {}),
|
|
97
98
|
}
|
|
98
99
|
|
|
99
100
|
def send_notification(self, receiver: UUID, sender: UUID, notification_type: ArgusNotificationTypes, source_type: ArgusNotificationSourceTypes,
|
|
@@ -117,7 +118,8 @@ class ArgusDBNotificationSaver(NotificationSenderBase):
|
|
|
117
118
|
class EmailNotificationServiceSender(NotificationSenderBase):
|
|
118
119
|
CONTENT_TEMPLATES = {
|
|
119
120
|
ArgusNotificationTypes.Mention: lambda p: render_template(
|
|
120
|
-
"notifications/email_mention.html.j2", **p if p else {})
|
|
121
|
+
"notifications/email_mention.html.j2", **p if p else {}),
|
|
122
|
+
ArgusNotificationTypes.AssigneeChange: lambda p: render_template("notifications/assigned_email.html.j2", **p if p else {}),
|
|
121
123
|
}
|
|
122
124
|
|
|
123
125
|
def __init__(self):
|
|
@@ -1,14 +1,61 @@
|
|
|
1
1
|
import copy
|
|
2
2
|
import logging
|
|
3
3
|
import math
|
|
4
|
+
import operator
|
|
5
|
+
from datetime import datetime, timezone
|
|
6
|
+
from functools import partial
|
|
4
7
|
from typing import List, Dict, Any
|
|
5
8
|
from uuid import UUID
|
|
6
9
|
|
|
10
|
+
from dataclasses import dataclass
|
|
7
11
|
from argus.backend.db import ScyllaCluster
|
|
8
|
-
from argus.backend.models.result import ArgusGenericResultMetadata, ArgusGenericResultData
|
|
12
|
+
from argus.backend.models.result import ArgusGenericResultMetadata, ArgusGenericResultData, ArgusBestResultData
|
|
9
13
|
|
|
10
14
|
LOGGER = logging.getLogger(__name__)
|
|
11
15
|
|
|
16
|
+
|
|
17
|
+
@dataclass
|
|
18
|
+
class BestResult:
|
|
19
|
+
key: str
|
|
20
|
+
value: float
|
|
21
|
+
result_date: datetime
|
|
22
|
+
run_id: str
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
@dataclass
|
|
26
|
+
class Cell:
|
|
27
|
+
column: str
|
|
28
|
+
row: str
|
|
29
|
+
status: str
|
|
30
|
+
value: Any | None = None
|
|
31
|
+
value_text: str | None = None
|
|
32
|
+
|
|
33
|
+
def update_cell_status_based_on_rules(self, table_metadata: ArgusGenericResultMetadata, best_results: dict[str, BestResult],
|
|
34
|
+
) -> None:
|
|
35
|
+
column_validation_rules = table_metadata.validation_rules.get(self.column)
|
|
36
|
+
rules = column_validation_rules[-1] if column_validation_rules else {}
|
|
37
|
+
higher_is_better = next((col.higher_is_better for col in table_metadata.columns_meta if col.name == self.column), None)
|
|
38
|
+
if not rules or self.status != "UNSET" or higher_is_better is None:
|
|
39
|
+
return
|
|
40
|
+
is_better = partial(operator.gt, self.value) if higher_is_better else partial(operator.lt, self.value)
|
|
41
|
+
key = f"{self.column}:{self.row}"
|
|
42
|
+
limits = []
|
|
43
|
+
if rules.fixed_limit is not None:
|
|
44
|
+
limits.append(rules.fixed_limit)
|
|
45
|
+
|
|
46
|
+
if best_result := best_results.get(key):
|
|
47
|
+
best_value = best_result.value
|
|
48
|
+
if (best_pct := rules.best_pct) is not None:
|
|
49
|
+
multiplier = 1 - best_pct / 100 if higher_is_better else 1 + best_pct / 100
|
|
50
|
+
limits.append(best_value * multiplier)
|
|
51
|
+
if (best_abs := rules.best_abs) is not None:
|
|
52
|
+
limits.append(best_value - best_abs if higher_is_better else best_value + best_abs)
|
|
53
|
+
if all(is_better(limit) for limit in limits):
|
|
54
|
+
self.status = "PASS"
|
|
55
|
+
else:
|
|
56
|
+
self.status = "ERROR"
|
|
57
|
+
|
|
58
|
+
|
|
12
59
|
default_options = {
|
|
13
60
|
"scales": {
|
|
14
61
|
"y": {
|
|
@@ -146,10 +193,10 @@ def calculate_graph_ticks(graphs: List[Dict]) -> dict[str, str]:
|
|
|
146
193
|
|
|
147
194
|
|
|
148
195
|
class ResultsService:
|
|
149
|
-
|
|
196
|
+
|
|
150
197
|
def __init__(self):
|
|
151
198
|
self.cluster = ScyllaCluster.get()
|
|
152
|
-
|
|
199
|
+
|
|
153
200
|
def _get_tables_metadata(self, test_id: UUID) -> list[ArgusGenericResultMetadata]:
|
|
154
201
|
query_fields = ["name", "description", "columns_meta", "rows_meta"]
|
|
155
202
|
raw_query = (f"SELECT {','.join(query_fields)}"
|
|
@@ -158,6 +205,12 @@ class ResultsService:
|
|
|
158
205
|
tables_meta = self.cluster.session.execute(query=query, parameters=(test_id,))
|
|
159
206
|
return [ArgusGenericResultMetadata(**table) for table in tables_meta]
|
|
160
207
|
|
|
208
|
+
def get_table_metadata(self, test_id: UUID, table_name: str) -> ArgusGenericResultMetadata:
|
|
209
|
+
raw_query = ("SELECT * FROM generic_result_metadata_v1 WHERE test_id = ? AND name = ?")
|
|
210
|
+
query = self.cluster.prepare(raw_query)
|
|
211
|
+
table_meta = self.cluster.session.execute(query=query, parameters=(test_id, table_name))
|
|
212
|
+
return [ArgusGenericResultMetadata(**table) for table in table_meta][0] if table_meta else None
|
|
213
|
+
|
|
161
214
|
def get_run_results(self, test_id: UUID, run_id: UUID) -> list[dict]:
|
|
162
215
|
query_fields = ["column", "row", "value", "value_text", "status"]
|
|
163
216
|
raw_query = (f"SELECT {','.join(query_fields)},WRITETIME(status) as ordering"
|
|
@@ -199,3 +252,66 @@ class ResultsService:
|
|
|
199
252
|
def is_results_exist(self, test_id: UUID):
|
|
200
253
|
"""Verify if results for given test id exist at all."""
|
|
201
254
|
return bool(ArgusGenericResultMetadata.objects(test_id=test_id).only(["name"]).limit(1))
|
|
255
|
+
|
|
256
|
+
def get_best_results(self, test_id: UUID, name: str) -> List[BestResult]:
|
|
257
|
+
query_fields = ["key", "value", "result_date", "run_id"]
|
|
258
|
+
raw_query = (f"SELECT {','.join(query_fields)}"
|
|
259
|
+
f" FROM generic_result_best_v1 WHERE test_id = ? and name = ?")
|
|
260
|
+
query = self.cluster.prepare(raw_query)
|
|
261
|
+
best_results = self.cluster.session.execute(query=query, parameters=(test_id, name))
|
|
262
|
+
return [BestResult(**best) for best in best_results]
|
|
263
|
+
|
|
264
|
+
@staticmethod
|
|
265
|
+
def _update_best_value(best_results: dict[str, list[dict]], higher_is_better_map: dict[str, bool | None], cells: list[dict],
|
|
266
|
+
sut_timestamp: float, run_id: str
|
|
267
|
+
) -> dict[str, list[dict]]:
|
|
268
|
+
|
|
269
|
+
for cell in cells:
|
|
270
|
+
if "column" not in cell or "row" not in cell or "value" not in cell:
|
|
271
|
+
continue
|
|
272
|
+
column, row, value = cell["column"], cell["row"], cell["value"]
|
|
273
|
+
key_name = f"{column}_{row}"
|
|
274
|
+
if higher_is_better_map[column] is None:
|
|
275
|
+
# skipping updating best value when higher_is_better is not set (not enabled by user)
|
|
276
|
+
return best_results
|
|
277
|
+
if key_name not in best_results:
|
|
278
|
+
best_results[key_name] = []
|
|
279
|
+
current_best = None
|
|
280
|
+
else:
|
|
281
|
+
current_best = best_results[key_name][-1]
|
|
282
|
+
if current_best["sut_timestamp"].timestamp() > sut_timestamp:
|
|
283
|
+
# skip updating best value when testing older version than current best
|
|
284
|
+
# as would have to update all values between these dates to make cells statuses to be consistent
|
|
285
|
+
return best_results
|
|
286
|
+
|
|
287
|
+
is_better = partial(operator.gt, value) if higher_is_better_map[column] else partial(operator.lt, value)
|
|
288
|
+
if current_best is None or is_better(current_best["value"]):
|
|
289
|
+
best_results[key_name].append({"sut_timestamp": sut_timestamp, "value": value, "run_id": run_id})
|
|
290
|
+
return best_results
|
|
291
|
+
|
|
292
|
+
def update_best_results(self, test_id: UUID, table_name: str, cells: list[Cell],
|
|
293
|
+
table_metadata: ArgusGenericResultMetadata, run_id: str) -> dict[str, BestResult]:
|
|
294
|
+
"""update best results for given test_id and table_name based on cells values - if any value is better than current best"""
|
|
295
|
+
higher_is_better_map = {meta["name"]: meta.higher_is_better for meta in table_metadata.columns_meta}
|
|
296
|
+
best_results = {}
|
|
297
|
+
for best in self.get_best_results(test_id=test_id, name=table_name):
|
|
298
|
+
if best.key not in best_results:
|
|
299
|
+
best_results[best.key] = best
|
|
300
|
+
|
|
301
|
+
for cell in cells:
|
|
302
|
+
if cell.value is None:
|
|
303
|
+
# textual value, skip
|
|
304
|
+
continue
|
|
305
|
+
key = f"{cell.column}:{cell.row}"
|
|
306
|
+
if higher_is_better_map[cell.column] is None:
|
|
307
|
+
# skipping updating best value when higher_is_better is not set (not enabled by user)
|
|
308
|
+
continue
|
|
309
|
+
current_best = best_results.get(key)
|
|
310
|
+
is_better = partial(operator.gt, cell.value) if higher_is_better_map[cell.column] \
|
|
311
|
+
else partial(operator.lt, cell.value)
|
|
312
|
+
if current_best is None or is_better(current_best.value):
|
|
313
|
+
result_date = datetime.now(timezone.utc)
|
|
314
|
+
best_results[key] = BestResult(key=key, value=cell.value, result_date=result_date, run_id=run_id)
|
|
315
|
+
ArgusBestResultData(test_id=test_id, name=table_name, key=key, value=cell.value, result_date=result_date,
|
|
316
|
+
run_id=run_id).save()
|
|
317
|
+
return best_results
|
argus/backend/service/testrun.py
CHANGED
|
@@ -203,6 +203,22 @@ class TestRunService:
|
|
|
203
203
|
group_id=test.group_id,
|
|
204
204
|
test_id=test.id
|
|
205
205
|
)
|
|
206
|
+
if new_assignee_user.id != g.user.id:
|
|
207
|
+
self.notification_manager.send_notification(
|
|
208
|
+
receiver=new_assignee_user.id,
|
|
209
|
+
sender=g.user.id,
|
|
210
|
+
notification_type=ArgusNotificationTypes.AssigneeChange,
|
|
211
|
+
source_type=ArgusNotificationSourceTypes.TestRun,
|
|
212
|
+
source_id=run.id,
|
|
213
|
+
source_message=str(run.test_id),
|
|
214
|
+
content_params={
|
|
215
|
+
"username": g.user.username,
|
|
216
|
+
"run_id": run.id,
|
|
217
|
+
"test_id": test.id,
|
|
218
|
+
"build_id": run.build_id,
|
|
219
|
+
"build_number": get_build_number(run.build_job_url),
|
|
220
|
+
}
|
|
221
|
+
)
|
|
206
222
|
return {
|
|
207
223
|
"test_run_id": run.id,
|
|
208
224
|
"assignee": str(new_assignee_user.id) if new_assignee_user else None
|
|
File without changes
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
# BASE URL FOR ARGUS APPLICATION
|
|
2
|
+
BASE_URL: "https://argus.scylladb.com"
|
|
3
|
+
# Main DB Cluster contact points
|
|
4
|
+
SCYLLA_CONTACT_POINTS:
|
|
5
|
+
- 172.18.0.2
|
|
6
|
+
# Username
|
|
7
|
+
SCYLLA_USERNAME: cassandra
|
|
8
|
+
# Password
|
|
9
|
+
SCYLLA_PASSWORD: cassandra
|
|
10
|
+
# Default keyspace (can be created using 'create-keyspace' command with RF set to the number of contact points)
|
|
11
|
+
SCYLLA_KEYSPACE_NAME: test_argus
|
|
12
|
+
# Replication factor used - if set, will override contact_points as amount of nodes for replication
|
|
13
|
+
# SCYLLA_REPLICATION_FACTOR: 3
|
|
14
|
+
LOGIN_METHODS:
|
|
15
|
+
- gh
|
|
16
|
+
# Application log level
|
|
17
|
+
APP_LOG_LEVEL: INFO
|
|
18
|
+
# Secret key used to match session data
|
|
19
|
+
SECRET_KEY: MUSTBEUNIQUE1
|
|
20
|
+
# Client ID of a github oauth application
|
|
21
|
+
GITHUB_CLIENT_ID: not_set
|
|
22
|
+
# Scopes used for Github Application:
|
|
23
|
+
# GITHUB_SCOPES: 'user:email read:user read:org repo'
|
|
24
|
+
# Client secret of a github oauth application
|
|
25
|
+
GITHUB_CLIENT_SECRET: not_set
|
|
26
|
+
# Github personal access token
|
|
27
|
+
GITHUB_ACCESS_TOKEN: unknown
|
|
28
|
+
# List of required organization names (Comment out to disable organization requirement)
|
|
29
|
+
GITHUB_REQUIRED_ORGANIZATIONS:
|
|
30
|
+
# at least one is required for user to successfully authenticate
|
|
31
|
+
BUILD_SYSTEM_FILTERED_PREFIXES:
|
|
32
|
+
- prefixToExclude
|
|
33
|
+
JENKINS_URL: https://jenkins.scylladb.com
|
|
34
|
+
JENKINS_USER: not_set
|
|
35
|
+
JENKINS_API_TOKEN_NAME: not_set
|
|
36
|
+
JENKINS_API_TOKEN: not_set
|
|
37
|
+
JENKINS_MONITORED_RELEASES:
|
|
38
|
+
- not_set
|
|
39
|
+
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
import os
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
from unittest.mock import patch
|
|
4
|
+
|
|
5
|
+
from _pytest.fixtures import fixture
|
|
6
|
+
|
|
7
|
+
from argus.backend.cli import sync_models
|
|
8
|
+
from argus.backend.db import ScyllaCluster
|
|
9
|
+
from argus.backend.service.client_service import ClientService
|
|
10
|
+
from argus.backend.service.release_manager import ReleaseManagerService
|
|
11
|
+
from argus.backend.util.config import Config
|
|
12
|
+
import logging
|
|
13
|
+
os.environ['CQLENG_ALLOW_SCHEMA_MANAGEMENT'] = '1'
|
|
14
|
+
logging.getLogger('cassandra').setLevel(logging.WARNING)
|
|
15
|
+
logging.getLogger('cassandra.connection').setLevel(logging.WARNING)
|
|
16
|
+
logging.getLogger('cassandra.pool').setLevel(logging.WARNING)
|
|
17
|
+
logging.getLogger('cassandra.cluster').setLevel(logging.WARNING)
|
|
18
|
+
|
|
19
|
+
def truncate_all_tables(session):
|
|
20
|
+
for table in session.cluster.metadata.keyspaces[session.keyspace].tables:
|
|
21
|
+
session.execute(f"TRUNCATE {table}")
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
@fixture(autouse=True, scope='session')
|
|
25
|
+
def argus_db():
|
|
26
|
+
Config.CONFIG_PATHS = [Path(__file__).parent / "argus_web.test.yaml"]
|
|
27
|
+
config = Config.load_yaml_config()
|
|
28
|
+
database = ScyllaCluster.get(config)
|
|
29
|
+
session = database.cluster.connect(keyspace=config["SCYLLA_KEYSPACE_NAME"])
|
|
30
|
+
ScyllaCluster.get_session = lambda: session # monkey patching to escape need for flask app context
|
|
31
|
+
|
|
32
|
+
sync_models()
|
|
33
|
+
truncate_all_tables(database.session)
|
|
34
|
+
yield database
|
|
35
|
+
database.shutdown()
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
@fixture(autouse=True, scope='session')
|
|
39
|
+
def release_manager_service(argus_db):
|
|
40
|
+
return ReleaseManagerService()
|
|
41
|
+
|
|
42
|
+
@fixture(autouse=True, scope='session')
|
|
43
|
+
def client_service(argus_db):
|
|
44
|
+
return ClientService()
|
|
File without changes
|
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import uuid
|
|
3
|
+
from dataclasses import asdict
|
|
4
|
+
from typing import Optional, Dict
|
|
5
|
+
|
|
6
|
+
from _pytest.fixtures import fixture
|
|
7
|
+
|
|
8
|
+
from argus.backend.plugins.sct.testrun import SCTTestRunSubmissionRequest
|
|
9
|
+
from argus.client.generic_result import GenericResultTable, ColumnMetadata, ResultType, ValidationRule
|
|
10
|
+
|
|
11
|
+
LOGGER = logging.getLogger(__name__)
|
|
12
|
+
|
|
13
|
+
@fixture(autouse=True, scope='session')
|
|
14
|
+
def release(release_manager_service):
|
|
15
|
+
return release_manager_service.create_release("best_results", "best_results", False)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
@fixture(autouse=True, scope='session')
|
|
19
|
+
def group(release_manager_service, release):
|
|
20
|
+
return release_manager_service.create_group("br_group", "best_results", build_system_id="best_results", release_id=str(release.id))
|
|
21
|
+
|
|
22
|
+
def get_fake_test_run(
|
|
23
|
+
schema_version: str = "1.0.0",
|
|
24
|
+
run_id: str = str(uuid.uuid4()),
|
|
25
|
+
job_name: str = "default_job_name",
|
|
26
|
+
job_url: str = "http://example.com",
|
|
27
|
+
started_by: str = "default_user",
|
|
28
|
+
commit_id: str = "default_commit_id",
|
|
29
|
+
sct_config: dict | None = None,
|
|
30
|
+
origin_url: str | None = None,
|
|
31
|
+
branch_name: str | None = "main",
|
|
32
|
+
runner_public_ip: str | None = None,
|
|
33
|
+
runner_private_ip: str | None = None
|
|
34
|
+
) -> tuple[str, dict]:
|
|
35
|
+
return "scylla-cluster-tests", asdict(SCTTestRunSubmissionRequest(
|
|
36
|
+
schema_version=schema_version,
|
|
37
|
+
run_id=run_id,
|
|
38
|
+
job_name=job_name,
|
|
39
|
+
job_url=job_url,
|
|
40
|
+
started_by=started_by,
|
|
41
|
+
commit_id=commit_id,
|
|
42
|
+
sct_config=sct_config,
|
|
43
|
+
origin_url=origin_url,
|
|
44
|
+
branch_name=branch_name,
|
|
45
|
+
runner_public_ip=runner_public_ip,
|
|
46
|
+
runner_private_ip=runner_private_ip
|
|
47
|
+
))
|
|
48
|
+
|
|
49
|
+
class SampleTable(GenericResultTable):
|
|
50
|
+
class Meta:
|
|
51
|
+
name = "Test Table Name"
|
|
52
|
+
description = "Test Table Description"
|
|
53
|
+
Columns = [ColumnMetadata(name="float col name", unit="ms", type=ResultType.FLOAT, higher_is_better=False),
|
|
54
|
+
ColumnMetadata(name="int col name", unit="ms", type=ResultType.INTEGER, higher_is_better=False),
|
|
55
|
+
ColumnMetadata(name="duration col name", unit="s", type=ResultType.DURATION, higher_is_better=False),
|
|
56
|
+
ColumnMetadata(name="non tracked col name", unit="", type=ResultType.FLOAT),
|
|
57
|
+
ColumnMetadata(name="text col name", unit="", type=ResultType.TEXT),
|
|
58
|
+
]
|
|
59
|
+
ValidationRules = {"float col name": ValidationRule(best_abs=4),
|
|
60
|
+
"int col name": ValidationRule(best_pct=50, best_abs=5),
|
|
61
|
+
"duration col name": ValidationRule(fixed_limit=590)
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
def test_argus_tracks_best_result(release_manager_service, client_service, release, group):
|
|
65
|
+
test = release_manager_service.create_test('track_best_result', 'track_best_result', 'track_best_result', 'track_best_result',
|
|
66
|
+
group_id=str(group.id), release_id=str(release.id), plugin_name='sct')
|
|
67
|
+
print(test)
|
|
68
|
+
LOGGER.warning(f"available plugins: {client_service.PLUGINS}")
|
|
69
|
+
client_service.submit_run(*get_fake_test_run())
|
|
70
|
+
assert test
|
argus/client/generic_result.py
CHANGED
|
@@ -28,15 +28,30 @@ class ColumnMetadata:
|
|
|
28
28
|
name: str
|
|
29
29
|
unit: str
|
|
30
30
|
type: ResultType
|
|
31
|
+
higher_is_better: bool = None
|
|
31
32
|
|
|
32
33
|
def as_dict(self) -> dict:
|
|
33
34
|
return {
|
|
34
35
|
"name": self.name,
|
|
35
36
|
"unit": self.unit,
|
|
36
|
-
"type": str(self.type)
|
|
37
|
+
"type": str(self.type),
|
|
38
|
+
"higher_is_better": self.higher_is_better
|
|
37
39
|
}
|
|
38
40
|
|
|
39
41
|
|
|
42
|
+
@dataclass
|
|
43
|
+
class ValidationRule:
|
|
44
|
+
best_pct: float | None = None # max value limit relative to best result in percent unit
|
|
45
|
+
best_abs: float | None = None # max value limit relative to best result in absolute unit
|
|
46
|
+
fixed_limit: float | None = None
|
|
47
|
+
|
|
48
|
+
def as_dict(self) -> dict:
|
|
49
|
+
return {
|
|
50
|
+
"best_pct": self.best_pct,
|
|
51
|
+
"best_abs": self.best_abs,
|
|
52
|
+
"fixed_limit": self.fixed_limit
|
|
53
|
+
}
|
|
54
|
+
|
|
40
55
|
class ResultTableMeta(type):
|
|
41
56
|
def __new__(cls, name, bases, dct):
|
|
42
57
|
cls_instance = super().__new__(cls, name, bases, dct)
|
|
@@ -48,6 +63,14 @@ class ResultTableMeta(type):
|
|
|
48
63
|
cls_instance.columns = meta.Columns
|
|
49
64
|
cls_instance.column_types = {column.name: column.type for column in cls_instance.columns}
|
|
50
65
|
cls_instance.rows = []
|
|
66
|
+
for col_name, rule in meta.ValidationRules.items():
|
|
67
|
+
if col_name not in cls_instance.column_types:
|
|
68
|
+
raise ValueError(f"ValidationRule column {col_name} not found in the table")
|
|
69
|
+
if cls_instance.column_types[col_name] == ResultType.TEXT:
|
|
70
|
+
raise ValueError(f"Validation rules don't apply to TEXT columns")
|
|
71
|
+
if not isinstance(rule, ValidationRule):
|
|
72
|
+
raise ValueError(f"Validation rule for column {col_name} is not of type ValidationRule")
|
|
73
|
+
cls_instance.validation_rules = meta.ValidationRules
|
|
51
74
|
return cls_instance
|
|
52
75
|
|
|
53
76
|
|
|
@@ -87,7 +110,8 @@ class GenericResultTable(metaclass=ResultTableMeta):
|
|
|
87
110
|
"name": self.name,
|
|
88
111
|
"description": self.description,
|
|
89
112
|
"columns_meta": [column.as_dict() for column in self.columns],
|
|
90
|
-
"rows_meta": rows
|
|
113
|
+
"rows_meta": rows,
|
|
114
|
+
"validation_rules": {k: v.as_dict() for k, v in self.validation_rules.items()}
|
|
91
115
|
}
|
|
92
116
|
return {
|
|
93
117
|
"meta": meta_info,
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
argus/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
2
2
|
argus/backend/.gitkeep,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
3
3
|
argus/backend/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
4
|
-
argus/backend/cli.py,sha256=
|
|
4
|
+
argus/backend/cli.py,sha256=ULa9KVv7RR3DVHKlYvsSdIfbX5s-VuiPuhWWRfPkdGU,1383
|
|
5
5
|
argus/backend/controller/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
6
6
|
argus/backend/controller/admin.py,sha256=2z29RX7ZQO_VTklSKH9RrEj-Ag2SsvyOaIzWDKr0ahQ,575
|
|
7
7
|
argus/backend/controller/admin_api.py,sha256=lj5g6rdoKN9X13H9hXmKYx_-9tftt6HSftiNFLCr_kY,8567
|
|
@@ -15,12 +15,12 @@ argus/backend/controller/team.py,sha256=G6LdIBaYgfG0Qr4RhNQ53MZVdh4wcuotsIIpFwhT
|
|
|
15
15
|
argus/backend/controller/team_ui.py,sha256=B7N1_Kzl6Rac8BV3FbKj55pGAS_dht47rYhAi94PC8A,589
|
|
16
16
|
argus/backend/controller/testrun_api.py,sha256=MCcVmbfNuyODCVbxF8TfcvVu9sMT0xO1UzS4VgumQiM,12638
|
|
17
17
|
argus/backend/controller/view_api.py,sha256=rI7LwcS7keK37nYx76D9StFV_rLHcNkHan8OhFgBrhM,4106
|
|
18
|
-
argus/backend/db.py,sha256=
|
|
18
|
+
argus/backend/db.py,sha256=IgeGjZKTHSoyGrd5f2A7uYn6Pm6DU2-ZIESHmfJwkoE,4099
|
|
19
19
|
argus/backend/error_handlers.py,sha256=IEjz7Vzfldv1PTOeHrpRWmRsgBrHtAW0PXHUJZDovAE,480
|
|
20
20
|
argus/backend/events/event_processors.py,sha256=bsmBayiXvlGn3aqiT2z9WgwnVBRtn2cRqkgn4pLodck,1291
|
|
21
21
|
argus/backend/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
22
|
-
argus/backend/models/result.py,sha256=
|
|
23
|
-
argus/backend/models/web.py,sha256=
|
|
22
|
+
argus/backend/models/result.py,sha256=DOgxHwerJIyMsqsbDdMYSojkUPiWsu6mHCq9B8JpaBA,5899
|
|
23
|
+
argus/backend/models/web.py,sha256=eJybumxShBrFIdi0H0zHcEuAusfpZKpDrZ4Bxl_JyUY,13141
|
|
24
24
|
argus/backend/plugins/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
25
25
|
argus/backend/plugins/core.py,sha256=UsrK8oWyhpDSLEcnqAQsEqYVUIX0eCQB862Em7GMQLo,8690
|
|
26
26
|
argus/backend/plugins/driver_matrix_tests/controller.py,sha256=GdPpProzsVXQw8A4h2IS8inUPdr_Q4IN93i6ocOThS8,2213
|
|
@@ -32,12 +32,12 @@ argus/backend/plugins/driver_matrix_tests/udt.py,sha256=WRtnJU1dZcLXZJQgfU0mgjNz
|
|
|
32
32
|
argus/backend/plugins/generic/model.py,sha256=QLVO7QhGr38Hz0VO-BlDYF7LhRX7Pl049vw4W_VMT8o,3302
|
|
33
33
|
argus/backend/plugins/generic/plugin.py,sha256=5URbQVUCizrk-KZqb6I0P_8nLUekjYh-Js7ZLKVoBAA,407
|
|
34
34
|
argus/backend/plugins/generic/types.py,sha256=jlZUcQ7r153ziyl3ZJmix7AzL2G1aX9N_z-4Kw9trWc,267
|
|
35
|
-
argus/backend/plugins/loader.py,sha256=
|
|
35
|
+
argus/backend/plugins/loader.py,sha256=L3OUNt0e2RrxGD5dIl0PonEAKZj0f9gXcpk7_ifjVuc,1414
|
|
36
36
|
argus/backend/plugins/sct/controller.py,sha256=NF11JLoUJ13whghlxRrVex9rLMgFtlkczUAAKAM9vYg,5738
|
|
37
37
|
argus/backend/plugins/sct/plugin.py,sha256=_sOMcXLoFfeG9jwj_t48C4IFvY87juK8ApR6tfSw6q4,1007
|
|
38
38
|
argus/backend/plugins/sct/resource_setup.py,sha256=hwfAOu-oKOH42tjtzJhiqwq_MtUE9_HevoFyql8JKqY,10120
|
|
39
39
|
argus/backend/plugins/sct/service.py,sha256=ygAL85BkyyovJ1xHktlCQJdJS8CrerJZ_Tbr3EXqsg4,22021
|
|
40
|
-
argus/backend/plugins/sct/testrun.py,sha256=
|
|
40
|
+
argus/backend/plugins/sct/testrun.py,sha256=O7LanXF-fHiHfUAqsr7ALhTmcpVfjAYQODQTdB-u3Ig,10656
|
|
41
41
|
argus/backend/plugins/sct/types.py,sha256=Gw1y4iqYguqNqTh_GopLDFho8vuGaOGuK7fjaHYhAOQ,1326
|
|
42
42
|
argus/backend/plugins/sct/udt.py,sha256=V_x8_yw8rV7Q_QRBYayqtTNsPdZvjzOxWpRhXP1XAzs,3119
|
|
43
43
|
argus/backend/plugins/sirenada/model.py,sha256=KVnI75BacuBryc5lR_Aai-mEOs7CB9xxhb7J-YRU3bc,4705
|
|
@@ -46,18 +46,23 @@ argus/backend/plugins/sirenada/types.py,sha256=Gm3XMK9YJoozVaeM9XE7n8iRxA6PKBrS2
|
|
|
46
46
|
argus/backend/service/admin.py,sha256=_VnWl3CkZBOAie_pPbd9sbXZUpBf2SApyNoFZLfB_QI,637
|
|
47
47
|
argus/backend/service/argus_service.py,sha256=YF6El9CyIelePDrCydVn4K82sd7CzCoZNmcvn2ZeR9I,29266
|
|
48
48
|
argus/backend/service/build_system_monitor.py,sha256=QB7RfMMuA2VJ4oUAOAqLxOwxqaQE52_4ZhsASVcoXkU,8296
|
|
49
|
-
argus/backend/service/client_service.py,sha256=
|
|
49
|
+
argus/backend/service/client_service.py,sha256=XAAZ8VOVg2ppwcWVh8asGvYdlMuqFWjkcF-z-DCKD9c,4754
|
|
50
50
|
argus/backend/service/event_service.py,sha256=iYeqxN2QCYTjYB1WPPv4BEFLXG0Oz3TvskkaK4v9pVY,654
|
|
51
51
|
argus/backend/service/jenkins_service.py,sha256=njomagkliIWKisR9FmhKKqZ8y9NijyJ3hUQe23gl2U4,9878
|
|
52
|
-
argus/backend/service/notification_manager.py,sha256=
|
|
52
|
+
argus/backend/service/notification_manager.py,sha256=hOeFTZ8HhcFa7Ifo9aS-DUsivo5MoCTHyjmUgvH2fyY,7271
|
|
53
53
|
argus/backend/service/release_manager.py,sha256=d1J6llBb4aKgFPrsPTPYpV9NnGx772jeORZjs-ojYGE,7771
|
|
54
|
-
argus/backend/service/results_service.py,sha256=
|
|
54
|
+
argus/backend/service/results_service.py,sha256=WeVeJbOOA1KJCTw3vIUcbWE4YqldwwnTT-mYJMnUqvA,13415
|
|
55
55
|
argus/backend/service/stats.py,sha256=-V94A8EUlQBvwG53oJTL4U1EzR4vciEF7Niu-efTL6Y,22713
|
|
56
56
|
argus/backend/service/team_manager_service.py,sha256=zY5dvy3ffvQbJuXBvlWKE5dS5LQ3ss6tkFE-cwFZsdw,3010
|
|
57
|
-
argus/backend/service/testrun.py,sha256=
|
|
57
|
+
argus/backend/service/testrun.py,sha256=L9JC0D9J8D5QkuT_HxFBK_Z31j1rdiT1cRo7s4zjhqo,22251
|
|
58
58
|
argus/backend/service/user.py,sha256=DC8fII7mElWGB-pMGyn4uzaJHIbmxzh_ZWf2POmlBkU,10936
|
|
59
59
|
argus/backend/service/views.py,sha256=gUzwQv3fasGh0hRvivCr64XooQhG3c1V1KcxgMjC2qM,11292
|
|
60
60
|
argus/backend/template_filters.py,sha256=04PHl0DiN4PBHQ82HMAmTfww09fGMXcYy-I5BU_b1s4,682
|
|
61
|
+
argus/backend/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
62
|
+
argus/backend/tests/argus_web.test.yaml,sha256=_ompiLY3zXuGjuMenIR0UtJmkTW3RjFWejFS05o850I,1325
|
|
63
|
+
argus/backend/tests/conftest.py,sha256=rg1KVQhO0wEs98HYV4kwtQxODUXq-S1MUy9_53v8qbI,1561
|
|
64
|
+
argus/backend/tests/results_service/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
65
|
+
argus/backend/tests/results_service/test_best_results.py,sha256=JE1sOecg_TPmdcpteSUMSFx4VFwXUvyJ3736R8l7IiU,3095
|
|
61
66
|
argus/backend/util/common.py,sha256=vLMit9ZBBN8S4-dw32LIhjtaEOX_5hwWneHILS_SNBg,1723
|
|
62
67
|
argus/backend/util/config.py,sha256=1HpHm8Du6yz61gwAE1vR6uwuHCStaSerirbEhBLnDws,927
|
|
63
68
|
argus/backend/util/encoders.py,sha256=5AfJbs2V3gOOg5LtFLZAtBqlnSdX8HHITT7r9Wu-law,1129
|
|
@@ -71,7 +76,7 @@ argus/client/driver_matrix_tests/cli.py,sha256=PIK4IyA4qku7jCnJ8A0i59DeVl1jvMWYu
|
|
|
71
76
|
argus/client/driver_matrix_tests/client.py,sha256=UPryBku2rg6IV2wKKDkclXHnH3r6EYwWdds65wLC-KU,2748
|
|
72
77
|
argus/client/generic/cli.py,sha256=IJkgEZ5VOAeqp5SlLM13Y5m8e34Cqnyz8WkfeKoN7so,2208
|
|
73
78
|
argus/client/generic/client.py,sha256=l4PDjDy65Mm2OI9ZLSnyd8_2i4Ei1Pp9yRt3bRX8s2Y,1114
|
|
74
|
-
argus/client/generic_result.py,sha256=
|
|
79
|
+
argus/client/generic_result.py,sha256=Fyo-ooFoO-w64BbXAlPgg0XOuuozTjv-51rAdsxecno,4197
|
|
75
80
|
argus/client/generic_result_old.py,sha256=Oi15Gu8WbXK_WruF0IU-Fokr-I1k8mzg1MpHbmpt50M,4662
|
|
76
81
|
argus/client/sct/client.py,sha256=DtRA0Ra3ycUcedDYfZZW1jER0nc8vdYHaY6DT0te4x0,11341
|
|
77
82
|
argus/client/sct/types.py,sha256=VLgVe7qPmJtCLqtPnuX8N8kMKZq-iY3SKz68nvU6nJ4,371
|
|
@@ -84,8 +89,8 @@ argus/db/db_types.py,sha256=iLbmrUaDzrBw0kDCnvW0FSZ9-kNc3uQY-fsbIPymV4E,3612
|
|
|
84
89
|
argus/db/interface.py,sha256=HroyA1Yijz5cXLdYbxorHCEu0GH9VeMMqB36IHTlcew,17146
|
|
85
90
|
argus/db/testrun.py,sha256=0YG7FIH5FLQeNlYULxC6rhhyru2rziSMe3qKtYzTBnc,26014
|
|
86
91
|
argus/db/utils.py,sha256=YAWsuLjUScSgKgdaL5aF4Sgr13gqH29Mb5cLctX4V_w,337
|
|
87
|
-
argus_alm-0.12.
|
|
88
|
-
argus_alm-0.12.
|
|
89
|
-
argus_alm-0.12.
|
|
90
|
-
argus_alm-0.12.
|
|
91
|
-
argus_alm-0.12.
|
|
92
|
+
argus_alm-0.12.9.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
|
93
|
+
argus_alm-0.12.9.dist-info/METADATA,sha256=i8u6MV6uYgTu74q8FWwy7eYWDaMmrMGrh0VNRF01-k8,3508
|
|
94
|
+
argus_alm-0.12.9.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
|
|
95
|
+
argus_alm-0.12.9.dist-info/entry_points.txt,sha256=pcYW8nxZuDaymxE8tn86K0dq8eEodUdiS0sSvwEQ_zU,137
|
|
96
|
+
argus_alm-0.12.9.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|