argus-alm 0.15.2__py3-none-any.whl → 0.15.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- argus/_version.py +2 -2
- argus/client/generic_result.py +6 -1
- {argus_alm-0.15.2.dist-info → argus_alm-0.15.3.dist-info}/METADATA +1 -1
- argus_alm-0.15.3.dist-info/RECORD +22 -0
- argus/backend/.gitkeep +0 -0
- argus/backend/__init__.py +0 -0
- argus/backend/cli.py +0 -57
- argus/backend/controller/__init__.py +0 -0
- argus/backend/controller/admin.py +0 -20
- argus/backend/controller/admin_api.py +0 -355
- argus/backend/controller/api.py +0 -589
- argus/backend/controller/auth.py +0 -67
- argus/backend/controller/client_api.py +0 -109
- argus/backend/controller/main.py +0 -316
- argus/backend/controller/notification_api.py +0 -72
- argus/backend/controller/notifications.py +0 -13
- argus/backend/controller/planner_api.py +0 -194
- argus/backend/controller/team.py +0 -129
- argus/backend/controller/team_ui.py +0 -19
- argus/backend/controller/testrun_api.py +0 -513
- argus/backend/controller/view_api.py +0 -188
- argus/backend/controller/views_widgets/__init__.py +0 -0
- argus/backend/controller/views_widgets/graphed_stats.py +0 -54
- argus/backend/controller/views_widgets/graphs.py +0 -68
- argus/backend/controller/views_widgets/highlights.py +0 -135
- argus/backend/controller/views_widgets/nemesis_stats.py +0 -26
- argus/backend/controller/views_widgets/summary.py +0 -43
- argus/backend/db.py +0 -98
- argus/backend/error_handlers.py +0 -41
- argus/backend/events/event_processors.py +0 -34
- argus/backend/models/__init__.py +0 -0
- argus/backend/models/argus_ai.py +0 -24
- argus/backend/models/github_issue.py +0 -60
- argus/backend/models/plan.py +0 -24
- argus/backend/models/result.py +0 -187
- argus/backend/models/runtime_store.py +0 -58
- argus/backend/models/view_widgets.py +0 -25
- argus/backend/models/web.py +0 -403
- argus/backend/plugins/__init__.py +0 -0
- argus/backend/plugins/core.py +0 -248
- argus/backend/plugins/driver_matrix_tests/controller.py +0 -66
- argus/backend/plugins/driver_matrix_tests/model.py +0 -429
- argus/backend/plugins/driver_matrix_tests/plugin.py +0 -21
- argus/backend/plugins/driver_matrix_tests/raw_types.py +0 -62
- argus/backend/plugins/driver_matrix_tests/service.py +0 -61
- argus/backend/plugins/driver_matrix_tests/udt.py +0 -42
- argus/backend/plugins/generic/model.py +0 -86
- argus/backend/plugins/generic/plugin.py +0 -15
- argus/backend/plugins/generic/types.py +0 -14
- argus/backend/plugins/loader.py +0 -39
- argus/backend/plugins/sct/controller.py +0 -224
- argus/backend/plugins/sct/plugin.py +0 -37
- argus/backend/plugins/sct/resource_setup.py +0 -177
- argus/backend/plugins/sct/service.py +0 -682
- argus/backend/plugins/sct/testrun.py +0 -288
- argus/backend/plugins/sct/udt.py +0 -100
- argus/backend/plugins/sirenada/model.py +0 -118
- argus/backend/plugins/sirenada/plugin.py +0 -16
- argus/backend/service/admin.py +0 -26
- argus/backend/service/argus_service.py +0 -696
- argus/backend/service/build_system_monitor.py +0 -185
- argus/backend/service/client_service.py +0 -127
- argus/backend/service/event_service.py +0 -18
- argus/backend/service/github_service.py +0 -233
- argus/backend/service/jenkins_service.py +0 -269
- argus/backend/service/notification_manager.py +0 -159
- argus/backend/service/planner_service.py +0 -608
- argus/backend/service/release_manager.py +0 -229
- argus/backend/service/results_service.py +0 -690
- argus/backend/service/stats.py +0 -610
- argus/backend/service/team_manager_service.py +0 -82
- argus/backend/service/test_lookup.py +0 -172
- argus/backend/service/testrun.py +0 -489
- argus/backend/service/user.py +0 -308
- argus/backend/service/views.py +0 -219
- argus/backend/service/views_widgets/__init__.py +0 -0
- argus/backend/service/views_widgets/graphed_stats.py +0 -180
- argus/backend/service/views_widgets/highlights.py +0 -374
- argus/backend/service/views_widgets/nemesis_stats.py +0 -34
- argus/backend/template_filters.py +0 -27
- argus/backend/tests/__init__.py +0 -0
- argus/backend/tests/client_service/__init__.py +0 -0
- argus/backend/tests/client_service/test_submit_results.py +0 -79
- argus/backend/tests/conftest.py +0 -180
- argus/backend/tests/results_service/__init__.py +0 -0
- argus/backend/tests/results_service/test_best_results.py +0 -178
- argus/backend/tests/results_service/test_cell.py +0 -65
- argus/backend/tests/results_service/test_chartjs_additional_functions.py +0 -259
- argus/backend/tests/results_service/test_create_chartjs.py +0 -220
- argus/backend/tests/results_service/test_result_metadata.py +0 -100
- argus/backend/tests/results_service/test_results_service.py +0 -203
- argus/backend/tests/results_service/test_validation_rules.py +0 -213
- argus/backend/tests/view_widgets/__init__.py +0 -0
- argus/backend/tests/view_widgets/test_highlights_api.py +0 -532
- argus/backend/util/common.py +0 -65
- argus/backend/util/config.py +0 -38
- argus/backend/util/encoders.py +0 -56
- argus/backend/util/logsetup.py +0 -80
- argus/backend/util/module_loaders.py +0 -30
- argus/backend/util/send_email.py +0 -91
- argus/client/tests/__init__.py +0 -0
- argus/client/tests/conftest.py +0 -19
- argus/client/tests/test_package.py +0 -45
- argus/client/tests/test_results.py +0 -224
- argus_alm-0.15.2.dist-info/RECORD +0 -122
- {argus_alm-0.15.2.dist-info → argus_alm-0.15.3.dist-info}/WHEEL +0 -0
- {argus_alm-0.15.2.dist-info → argus_alm-0.15.3.dist-info}/entry_points.txt +0 -0
- {argus_alm-0.15.2.dist-info → argus_alm-0.15.3.dist-info}/licenses/LICENSE +0 -0
- {argus_alm-0.15.2.dist-info → argus_alm-0.15.3.dist-info}/top_level.txt +0 -0
|
@@ -1,180 +0,0 @@
|
|
|
1
|
-
import logging
|
|
2
|
-
from uuid import UUID
|
|
3
|
-
import json
|
|
4
|
-
import re
|
|
5
|
-
from argus.backend.db import ScyllaCluster
|
|
6
|
-
from argus.backend.plugins.sct.testrun import SCTTestRun
|
|
7
|
-
from argus.backend.models.github_issue import GithubIssue, IssueLink
|
|
8
|
-
from argus.backend.util.common import chunk, get_build_number
|
|
9
|
-
|
|
10
|
-
LOGGER = logging.getLogger(__name__)
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
class GraphedStatsService:
|
|
14
|
-
def __init__(self) -> None:
|
|
15
|
-
self.cluster = ScyllaCluster.get()
|
|
16
|
-
|
|
17
|
-
def get_graphed_stats(self, test_id: UUID, filters=None):
|
|
18
|
-
rows = SCTTestRun.filter(test_id=test_id).only([
|
|
19
|
-
"build_id",
|
|
20
|
-
"start_time",
|
|
21
|
-
"end_time",
|
|
22
|
-
"id",
|
|
23
|
-
"nemesis_data",
|
|
24
|
-
"investigation_status",
|
|
25
|
-
"packages",
|
|
26
|
-
"status"
|
|
27
|
-
]).all()
|
|
28
|
-
|
|
29
|
-
release_data = {
|
|
30
|
-
"test_runs": [],
|
|
31
|
-
"nemesis_data": []
|
|
32
|
-
}
|
|
33
|
-
|
|
34
|
-
filter_patterns = []
|
|
35
|
-
if filters:
|
|
36
|
-
try:
|
|
37
|
-
filter_patterns = [re.compile(pattern) for pattern in json.loads(filters)]
|
|
38
|
-
except (json.JSONDecodeError, re.error) as e:
|
|
39
|
-
LOGGER.error(f"Error parsing filters: {e}")
|
|
40
|
-
|
|
41
|
-
for run in [row for row in rows if row["investigation_status"].lower() != "ignored"]:
|
|
42
|
-
# Skip if build_id matches any filter pattern
|
|
43
|
-
if filter_patterns and any(pattern.search(run["build_id"]) for pattern in filter_patterns):
|
|
44
|
-
continue
|
|
45
|
-
try:
|
|
46
|
-
version = [package.version for package in run["packages"] if package.name == "scylla-server"][0]
|
|
47
|
-
except (IndexError, TypeError):
|
|
48
|
-
version = "unknown"
|
|
49
|
-
|
|
50
|
-
duration = (run["end_time"] - run["start_time"]).total_seconds() if run["end_time"] else 0
|
|
51
|
-
release_data["test_runs"].append({
|
|
52
|
-
"build_id": run["build_id"],
|
|
53
|
-
"start_time": run["start_time"].timestamp(),
|
|
54
|
-
"duration": duration if duration > 0 else 0,
|
|
55
|
-
"status": run["status"],
|
|
56
|
-
"version": version,
|
|
57
|
-
"run_id": str(run["id"]),
|
|
58
|
-
"investigation_status": run["investigation_status"]
|
|
59
|
-
})
|
|
60
|
-
|
|
61
|
-
if run["nemesis_data"]:
|
|
62
|
-
for nemesis in [n for n in run["nemesis_data"] if n.status in ("succeeded", "failed")]:
|
|
63
|
-
release_data["nemesis_data"].append({
|
|
64
|
-
"version": version,
|
|
65
|
-
"name": nemesis.name.split("disrupt_")[-1],
|
|
66
|
-
"start_time": nemesis.start_time,
|
|
67
|
-
"duration": nemesis.end_time - nemesis.start_time,
|
|
68
|
-
"status": nemesis.status,
|
|
69
|
-
"run_id": str(run["id"]),
|
|
70
|
-
"stack_trace": nemesis.stack_trace,
|
|
71
|
-
"build_id": run["build_id"]
|
|
72
|
-
})
|
|
73
|
-
|
|
74
|
-
return release_data
|
|
75
|
-
|
|
76
|
-
def get_runs_details(self, run_ids: list[str]):
|
|
77
|
-
"""Get detailed information for provided test runs including assignee and attached issues.
|
|
78
|
-
|
|
79
|
-
Args:
|
|
80
|
-
run_ids: List of run IDs to fetch detailed information for
|
|
81
|
-
|
|
82
|
-
Returns:
|
|
83
|
-
Dictionary mapping run IDs to their detailed information (build_id, start_time, assignee, version, and issues)
|
|
84
|
-
"""
|
|
85
|
-
result = {}
|
|
86
|
-
|
|
87
|
-
if not run_ids:
|
|
88
|
-
return result
|
|
89
|
-
|
|
90
|
-
# Step 1: Get issue links for all run_ids in batches
|
|
91
|
-
all_issue_links = {}
|
|
92
|
-
for batch_run_ids in chunk(run_ids):
|
|
93
|
-
batch_links = IssueLink.objects.filter(run_id__in=batch_run_ids).only(["run_id", "issue_id"]).all()
|
|
94
|
-
|
|
95
|
-
for link in batch_links:
|
|
96
|
-
run_id_str = str(link.run_id)
|
|
97
|
-
if run_id_str not in all_issue_links:
|
|
98
|
-
all_issue_links[run_id_str] = []
|
|
99
|
-
all_issue_links[run_id_str].append(link.issue_id)
|
|
100
|
-
|
|
101
|
-
# Step 2: Fetch all unique issue details
|
|
102
|
-
all_issue_ids = set()
|
|
103
|
-
for links in all_issue_links.values():
|
|
104
|
-
all_issue_ids.update(links)
|
|
105
|
-
|
|
106
|
-
issues_by_id = {}
|
|
107
|
-
if all_issue_ids:
|
|
108
|
-
for batch_issue_ids in chunk(list(all_issue_ids)):
|
|
109
|
-
batch_issues = GithubIssue.filter(id__in=batch_issue_ids).only(
|
|
110
|
-
["id", "state", "title", "number", "url"]).all()
|
|
111
|
-
|
|
112
|
-
for issue in batch_issues:
|
|
113
|
-
issues_by_id[issue.id] = issue
|
|
114
|
-
|
|
115
|
-
# Step 3: Fetch test runs for all provided run_ids
|
|
116
|
-
test_runs = {}
|
|
117
|
-
for run_id in run_ids:
|
|
118
|
-
try:
|
|
119
|
-
test_run = SCTTestRun.filter(id=run_id).only(
|
|
120
|
-
["id", "status", "build_id", "start_time", "assignee", "investigation_status", "packages", "build_job_url"]).get()
|
|
121
|
-
test_runs[run_id] = test_run
|
|
122
|
-
except Exception as e:
|
|
123
|
-
LOGGER.error(f"Failed to fetch test run {run_id}: {str(e)}")
|
|
124
|
-
|
|
125
|
-
# Step 4: Build result with run and issue details
|
|
126
|
-
for run_id in run_ids:
|
|
127
|
-
try:
|
|
128
|
-
test_run = test_runs.get(run_id)
|
|
129
|
-
if not test_run:
|
|
130
|
-
result[run_id] = {
|
|
131
|
-
"build_id": None,
|
|
132
|
-
"start_time": None,
|
|
133
|
-
"assignee": None,
|
|
134
|
-
"version": "unknown",
|
|
135
|
-
"issues": []
|
|
136
|
-
}
|
|
137
|
-
continue
|
|
138
|
-
|
|
139
|
-
links = all_issue_links.get(run_id, [])
|
|
140
|
-
issues = [issues_by_id[issue_id] for issue_id in links if issue_id in issues_by_id]
|
|
141
|
-
|
|
142
|
-
build_number = get_build_number(test_run.build_job_url)
|
|
143
|
-
|
|
144
|
-
# Get Scylla version from packages
|
|
145
|
-
for pkg_name in ["scylla-server-upgraded", "scylla-server-upgrade-target", "scylla-server", "scylla-server-target"]:
|
|
146
|
-
sut_version = next(
|
|
147
|
-
(f"{pkg.version}-{pkg.date}" for pkg in test_run.packages if pkg.name == pkg_name), None)
|
|
148
|
-
if sut_version:
|
|
149
|
-
break
|
|
150
|
-
else:
|
|
151
|
-
sut_version = "unknown"
|
|
152
|
-
|
|
153
|
-
result[run_id] = {
|
|
154
|
-
"build_id": f"{test_run.build_id}#{build_number}",
|
|
155
|
-
"status": test_run.status,
|
|
156
|
-
"start_time": test_run.start_time.isoformat(),
|
|
157
|
-
"assignee": test_run.assignee,
|
|
158
|
-
"version": sut_version,
|
|
159
|
-
"investigation_status": test_run.investigation_status,
|
|
160
|
-
"issues": [
|
|
161
|
-
{
|
|
162
|
-
"number": issue.number,
|
|
163
|
-
"state": issue.state,
|
|
164
|
-
"title": issue.title,
|
|
165
|
-
"url": issue.url,
|
|
166
|
-
}
|
|
167
|
-
for issue in issues
|
|
168
|
-
],
|
|
169
|
-
}
|
|
170
|
-
except Exception as e:
|
|
171
|
-
LOGGER.error(f"Error fetching details for run {run_id}: {str(e)}")
|
|
172
|
-
result[run_id] = {
|
|
173
|
-
"build_id": None,
|
|
174
|
-
"start_time": None,
|
|
175
|
-
"assignee": None,
|
|
176
|
-
"version": "unknown",
|
|
177
|
-
"issues": []
|
|
178
|
-
}
|
|
179
|
-
|
|
180
|
-
return result
|
|
@@ -1,374 +0,0 @@
|
|
|
1
|
-
from dataclasses import dataclass
|
|
2
|
-
from datetime import datetime, UTC
|
|
3
|
-
from uuid import UUID
|
|
4
|
-
import re
|
|
5
|
-
|
|
6
|
-
from flask import abort, g
|
|
7
|
-
|
|
8
|
-
from argus.backend.db import ScyllaCluster
|
|
9
|
-
from argus.backend.models.view_widgets import WidgetHighlights, WidgetComment
|
|
10
|
-
from argus.backend.models.web import ArgusNotificationTypes, ArgusNotificationSourceTypes, ArgusUserView, User
|
|
11
|
-
from argus.backend.service.notification_manager import NotificationManagerService
|
|
12
|
-
from argus.backend.util.common import strip_html_tags
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
@dataclass
|
|
16
|
-
class Highlight:
|
|
17
|
-
view_id: UUID
|
|
18
|
-
index: int
|
|
19
|
-
created_at: datetime
|
|
20
|
-
creator_id: UUID
|
|
21
|
-
content: str
|
|
22
|
-
archived_at: datetime
|
|
23
|
-
comments_count: int
|
|
24
|
-
|
|
25
|
-
@classmethod
|
|
26
|
-
def from_db_model(cls, model: WidgetHighlights):
|
|
27
|
-
created_at = model.created_at.replace(tzinfo=UTC).timestamp()
|
|
28
|
-
archived_at = model.archived_at.replace(tzinfo=UTC).timestamp() if model.archived_at else None
|
|
29
|
-
return cls(
|
|
30
|
-
view_id=model.view_id,
|
|
31
|
-
index=model.index,
|
|
32
|
-
created_at=created_at,
|
|
33
|
-
creator_id=model.creator_id,
|
|
34
|
-
content=model.content,
|
|
35
|
-
archived_at=archived_at,
|
|
36
|
-
comments_count=model.comments_count,
|
|
37
|
-
)
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
@dataclass
|
|
41
|
-
class ActionItem(Highlight):
|
|
42
|
-
assignee_id: UUID | None
|
|
43
|
-
completed: bool
|
|
44
|
-
|
|
45
|
-
@classmethod
|
|
46
|
-
def from_db_model(cls, model: WidgetHighlights):
|
|
47
|
-
created_at = model.created_at.replace(tzinfo=UTC).timestamp()
|
|
48
|
-
archived_at = model.archived_at.replace(tzinfo=UTC).timestamp() if model.archived_at else None
|
|
49
|
-
return cls(
|
|
50
|
-
view_id=model.view_id,
|
|
51
|
-
index=model.index,
|
|
52
|
-
created_at=created_at,
|
|
53
|
-
creator_id=model.creator_id,
|
|
54
|
-
content=model.content,
|
|
55
|
-
archived_at=archived_at,
|
|
56
|
-
comments_count=model.comments_count,
|
|
57
|
-
assignee_id=model.assignee_id,
|
|
58
|
-
completed=model.completed,
|
|
59
|
-
)
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
@dataclass
|
|
63
|
-
class CommentCreate:
|
|
64
|
-
view_id: UUID
|
|
65
|
-
index: int
|
|
66
|
-
highlight_created_at: float
|
|
67
|
-
content: str
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
@dataclass
|
|
71
|
-
class CommentUpdate:
|
|
72
|
-
view_id: UUID
|
|
73
|
-
index: int
|
|
74
|
-
highlight_created_at: float
|
|
75
|
-
created_at: float
|
|
76
|
-
content: str
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
@dataclass
|
|
80
|
-
class CommentDelete:
|
|
81
|
-
view_id: UUID
|
|
82
|
-
index: int
|
|
83
|
-
highlight_created_at: float
|
|
84
|
-
created_at: float
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
@dataclass
|
|
88
|
-
class Comment:
|
|
89
|
-
view_id: UUID
|
|
90
|
-
index: int
|
|
91
|
-
highlight_created_at: datetime
|
|
92
|
-
created_at: datetime
|
|
93
|
-
creator_id: UUID
|
|
94
|
-
content: str
|
|
95
|
-
|
|
96
|
-
@classmethod
|
|
97
|
-
def from_db_model(cls, model: WidgetComment):
|
|
98
|
-
highlight_created_at = model.highlight_at.replace(tzinfo=UTC).timestamp()
|
|
99
|
-
created_at = model.created_at.replace(tzinfo=UTC).timestamp()
|
|
100
|
-
return cls(
|
|
101
|
-
view_id=model.view_id,
|
|
102
|
-
index=model.index,
|
|
103
|
-
highlight_created_at=highlight_created_at,
|
|
104
|
-
created_at=created_at,
|
|
105
|
-
creator_id=model.creator_id,
|
|
106
|
-
content=model.content,
|
|
107
|
-
)
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
@dataclass
|
|
111
|
-
class HighlightCreate:
|
|
112
|
-
view_id: UUID
|
|
113
|
-
index: int
|
|
114
|
-
content: str
|
|
115
|
-
is_task: bool
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
@dataclass
|
|
119
|
-
class HighlightArchive:
|
|
120
|
-
view_id: UUID
|
|
121
|
-
index: int
|
|
122
|
-
created_at: float
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
@dataclass
|
|
126
|
-
class HighlightUpdate:
|
|
127
|
-
view_id: UUID
|
|
128
|
-
index: int
|
|
129
|
-
created_at: float
|
|
130
|
-
content: str
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
@dataclass
|
|
134
|
-
class HighlightSetAssignee:
|
|
135
|
-
view_id: UUID
|
|
136
|
-
index: int
|
|
137
|
-
created_at: float
|
|
138
|
-
assignee_id: UUID | None = None
|
|
139
|
-
|
|
140
|
-
def __post_init__(self):
|
|
141
|
-
if self.assignee_id and not isinstance(self.assignee_id, UUID):
|
|
142
|
-
self.assignee_id = UUID(self.assignee_id)
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
@dataclass
|
|
146
|
-
class HighlightSetCompleted:
|
|
147
|
-
view_id: UUID
|
|
148
|
-
index: int
|
|
149
|
-
created_at: float
|
|
150
|
-
completed: bool
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
class HighlightsService:
|
|
154
|
-
|
|
155
|
-
def __init__(self) -> None:
|
|
156
|
-
self.cluster = ScyllaCluster.get()
|
|
157
|
-
self.RE_MENTION = r"@[\w-]+"
|
|
158
|
-
|
|
159
|
-
def _process_mentions(self, content: str) -> set:
|
|
160
|
-
"""Process mentions from content and return set of users to notify."""
|
|
161
|
-
content_stripped = strip_html_tags(content)
|
|
162
|
-
mentions = set()
|
|
163
|
-
for potential_mention in re.findall(self.RE_MENTION, content_stripped):
|
|
164
|
-
if user := User.exists_by_name(potential_mention.lstrip("@")):
|
|
165
|
-
mentions.add(user) if user.id != g.user.id else None
|
|
166
|
-
return mentions, content_stripped
|
|
167
|
-
|
|
168
|
-
def _send_highlight_notifications(self, mentions: set, content: str, view_id: UUID, sender_id: UUID, is_action_item: bool, is_comment: bool = False):
|
|
169
|
-
"""Send notifications to mentioned users."""
|
|
170
|
-
view = ArgusUserView.get(id=view_id)
|
|
171
|
-
highlight_type = "action item" if is_action_item else "highlight"
|
|
172
|
-
for mention in mentions:
|
|
173
|
-
NotificationManagerService().send_notification(
|
|
174
|
-
receiver=mention.id,
|
|
175
|
-
sender=sender_id,
|
|
176
|
-
notification_type=ArgusNotificationTypes.ViewHighlightMention,
|
|
177
|
-
source_type=ArgusNotificationSourceTypes.ViewActionItem if is_action_item else ArgusNotificationSourceTypes.ViewHighlight,
|
|
178
|
-
source_id=view_id,
|
|
179
|
-
source_message=content,
|
|
180
|
-
content_params={
|
|
181
|
-
"username": g.user.username,
|
|
182
|
-
"view_id": view.id,
|
|
183
|
-
"view_name": view.name,
|
|
184
|
-
"display_name": view.display_name,
|
|
185
|
-
"highlight_type": f'{highlight_type}{" comment" if is_comment else ""}',
|
|
186
|
-
"message": content,
|
|
187
|
-
}
|
|
188
|
-
)
|
|
189
|
-
|
|
190
|
-
def create(
|
|
191
|
-
self,
|
|
192
|
-
creator: UUID,
|
|
193
|
-
payload: HighlightCreate,
|
|
194
|
-
) -> Highlight | ActionItem:
|
|
195
|
-
mentions, content_stripped = self._process_mentions(payload.content)
|
|
196
|
-
|
|
197
|
-
highlight = WidgetHighlights(
|
|
198
|
-
view_id=payload.view_id,
|
|
199
|
-
index=payload.index,
|
|
200
|
-
created_at=datetime.now(UTC),
|
|
201
|
-
creator_id=creator,
|
|
202
|
-
content=content_stripped,
|
|
203
|
-
completed=None if not payload.is_task else False,
|
|
204
|
-
archived=datetime.fromtimestamp(0, tz=UTC),
|
|
205
|
-
comments_count=0,
|
|
206
|
-
)
|
|
207
|
-
highlight.save()
|
|
208
|
-
|
|
209
|
-
self._send_highlight_notifications(mentions, content_stripped, payload.view_id, creator, payload.is_task)
|
|
210
|
-
|
|
211
|
-
if payload.is_task:
|
|
212
|
-
return ActionItem.from_db_model(highlight)
|
|
213
|
-
return Highlight.from_db_model(highlight)
|
|
214
|
-
|
|
215
|
-
def archive_highlight(self, payload: HighlightArchive):
|
|
216
|
-
entry = WidgetHighlights.objects(
|
|
217
|
-
view_id=payload.view_id, index=payload.index, created_at=datetime.fromtimestamp(payload.created_at, tz=UTC)
|
|
218
|
-
).first()
|
|
219
|
-
if entry:
|
|
220
|
-
entry.archived_at = datetime.now(UTC)
|
|
221
|
-
entry.save()
|
|
222
|
-
|
|
223
|
-
def unarchive_highlight(self, payload: HighlightArchive):
|
|
224
|
-
entry = WidgetHighlights.objects(
|
|
225
|
-
view_id=payload.view_id,
|
|
226
|
-
index=payload.index,
|
|
227
|
-
created_at=datetime.fromtimestamp(payload.created_at, tz=UTC)
|
|
228
|
-
).first()
|
|
229
|
-
if entry:
|
|
230
|
-
entry.archived_at = datetime.fromtimestamp(0, tz=UTC)
|
|
231
|
-
entry.save()
|
|
232
|
-
|
|
233
|
-
def update_highlight(self, user_id: UUID, payload: HighlightUpdate) -> Highlight | ActionItem:
|
|
234
|
-
entry = WidgetHighlights.objects(
|
|
235
|
-
view_id=payload.view_id,
|
|
236
|
-
index=payload.index,
|
|
237
|
-
created_at=datetime.fromtimestamp(payload.created_at, tz=UTC)
|
|
238
|
-
).first()
|
|
239
|
-
if not entry:
|
|
240
|
-
abort(404, description="Highlight not found")
|
|
241
|
-
if entry.creator_id != user_id:
|
|
242
|
-
abort(403, description="Not authorized to update highlight")
|
|
243
|
-
|
|
244
|
-
mentions, content_stripped = self._process_mentions(payload.content)
|
|
245
|
-
entry.content = content_stripped
|
|
246
|
-
entry.save()
|
|
247
|
-
|
|
248
|
-
self._send_highlight_notifications(mentions, content_stripped, payload.view_id,
|
|
249
|
-
user_id, entry.completed is not None)
|
|
250
|
-
|
|
251
|
-
if entry.completed is None:
|
|
252
|
-
return Highlight.from_db_model(entry)
|
|
253
|
-
else:
|
|
254
|
-
return ActionItem.from_db_model(entry)
|
|
255
|
-
|
|
256
|
-
def set_assignee(self, payload: HighlightSetAssignee) -> ActionItem:
|
|
257
|
-
entry = WidgetHighlights.objects(
|
|
258
|
-
view_id=payload.view_id,
|
|
259
|
-
index=payload.index,
|
|
260
|
-
created_at=datetime.fromtimestamp(payload.created_at, tz=UTC)
|
|
261
|
-
).first()
|
|
262
|
-
if not entry or entry.completed is None:
|
|
263
|
-
abort(404, description="ActionItem not found")
|
|
264
|
-
if payload.assignee_id is None:
|
|
265
|
-
entry.assignee_id = None
|
|
266
|
-
else:
|
|
267
|
-
entry.assignee_id = payload.assignee_id
|
|
268
|
-
entry.save()
|
|
269
|
-
return ActionItem.from_db_model(entry)
|
|
270
|
-
|
|
271
|
-
def set_completed(self, payload: HighlightSetCompleted) -> ActionItem:
|
|
272
|
-
entry = WidgetHighlights.objects(
|
|
273
|
-
view_id=payload.view_id,
|
|
274
|
-
index=payload.index,
|
|
275
|
-
created_at=datetime.fromtimestamp(payload.created_at, tz=UTC)
|
|
276
|
-
).first()
|
|
277
|
-
if not entry or entry.completed is None:
|
|
278
|
-
abort(404, description="ActionItem not found")
|
|
279
|
-
entry.completed = payload.completed
|
|
280
|
-
entry.save()
|
|
281
|
-
return ActionItem.from_db_model(entry)
|
|
282
|
-
|
|
283
|
-
def get_highlights(self, view_id: UUID, index: int) -> tuple[list[Highlight], list[ActionItem]]:
|
|
284
|
-
entries = WidgetHighlights.objects(view_id=view_id, index=index)
|
|
285
|
-
highlights = [Highlight.from_db_model(entry) for entry in entries if entry.completed is None]
|
|
286
|
-
action_items = [ActionItem.from_db_model(entry) for entry in entries if entry.completed is not None]
|
|
287
|
-
return highlights, action_items
|
|
288
|
-
|
|
289
|
-
def create_comment(self, creator_id: UUID, payload: CommentCreate) -> Comment:
|
|
290
|
-
highlight_created_at = datetime.fromtimestamp(payload.highlight_created_at, tz=UTC)
|
|
291
|
-
highlight = WidgetHighlights.objects(
|
|
292
|
-
view_id=payload.view_id, index=payload.index, created_at=highlight_created_at).first()
|
|
293
|
-
if not highlight:
|
|
294
|
-
abort(404, description="Highlight not found")
|
|
295
|
-
created_at = datetime.now(UTC)
|
|
296
|
-
mentions, content_stripped = self._process_mentions(payload.content)
|
|
297
|
-
comment = WidgetComment(
|
|
298
|
-
view_id=payload.view_id,
|
|
299
|
-
index=payload.index,
|
|
300
|
-
highlight_at=highlight_created_at,
|
|
301
|
-
created_at=created_at,
|
|
302
|
-
creator_id=creator_id,
|
|
303
|
-
content=payload.content,
|
|
304
|
-
)
|
|
305
|
-
comment.save()
|
|
306
|
-
highlight.comments_count += 1
|
|
307
|
-
highlight.save()
|
|
308
|
-
self._send_highlight_notifications(mentions, content_stripped, payload.view_id,
|
|
309
|
-
creator_id, highlight.completed is not None, is_comment=True)
|
|
310
|
-
return Comment.from_db_model(comment)
|
|
311
|
-
|
|
312
|
-
def update_comment(self, user_id: UUID, payload: CommentUpdate) -> Comment:
|
|
313
|
-
highlight_created_at = datetime.fromtimestamp(payload.highlight_created_at, tz=UTC)
|
|
314
|
-
created_at = datetime.fromtimestamp(payload.created_at, tz=UTC)
|
|
315
|
-
comment = WidgetComment.objects(
|
|
316
|
-
view_id=payload.view_id,
|
|
317
|
-
index=payload.index,
|
|
318
|
-
highlight_at=highlight_created_at,
|
|
319
|
-
created_at=created_at,
|
|
320
|
-
).first()
|
|
321
|
-
if not comment:
|
|
322
|
-
abort(404, description="Comment not found")
|
|
323
|
-
if comment.creator_id != user_id:
|
|
324
|
-
abort(403, description="Not authorized to update comment")
|
|
325
|
-
mentions, content_stripped = self._process_mentions(payload.content)
|
|
326
|
-
comment.content = payload.content
|
|
327
|
-
comment.save()
|
|
328
|
-
self._send_highlight_notifications(mentions, content_stripped, payload.view_id, user_id, WidgetHighlights.objects(
|
|
329
|
-
view_id=payload.view_id, index=payload.index, created_at=highlight_created_at).first().completed is not None, is_comment=True)
|
|
330
|
-
return Comment.from_db_model(comment)
|
|
331
|
-
|
|
332
|
-
def delete_comment(self, user_id: UUID, payload: CommentDelete):
|
|
333
|
-
index = int(payload.index)
|
|
334
|
-
highlight_created_at = datetime.fromtimestamp(payload.highlight_created_at, tz=UTC)
|
|
335
|
-
created_at = datetime.fromtimestamp(payload.created_at, tz=UTC)
|
|
336
|
-
comment = WidgetComment.objects(
|
|
337
|
-
view_id=payload.view_id,
|
|
338
|
-
index=index,
|
|
339
|
-
highlight_at=highlight_created_at,
|
|
340
|
-
created_at=created_at,
|
|
341
|
-
).first()
|
|
342
|
-
if not comment:
|
|
343
|
-
abort(404, description="Comment not found")
|
|
344
|
-
if comment.creator_id != user_id:
|
|
345
|
-
abort(403, description="Not authorized to delete comment")
|
|
346
|
-
comment.delete()
|
|
347
|
-
highlight = WidgetHighlights.objects(view_id=payload.view_id, index=index,
|
|
348
|
-
created_at=highlight_created_at).first()
|
|
349
|
-
if not highlight:
|
|
350
|
-
abort(404, description="Highlight not found")
|
|
351
|
-
highlight.comments_count -= 1
|
|
352
|
-
highlight.save()
|
|
353
|
-
|
|
354
|
-
def get_comments(self, view_id: UUID, index: int, highlight_created_at: float) -> list[Comment]:
|
|
355
|
-
highlight_created_at = datetime.fromtimestamp(highlight_created_at, tz=UTC)
|
|
356
|
-
comments = WidgetComment.objects(view_id=view_id, index=index, highlight_at=highlight_created_at)
|
|
357
|
-
return [Comment.from_db_model(c) for c in comments]
|
|
358
|
-
|
|
359
|
-
def send_action_notification(self, sender_id: UUID, username: str, view_id: UUID, assignee_id: UUID, action: str):
|
|
360
|
-
view = ArgusUserView.get(id=view_id)
|
|
361
|
-
NotificationManagerService().send_notification(
|
|
362
|
-
receiver=assignee_id,
|
|
363
|
-
sender=sender_id,
|
|
364
|
-
notification_type=ArgusNotificationTypes.ViewActionItemAssignee,
|
|
365
|
-
source_type=ArgusNotificationSourceTypes.ViewActionItem,
|
|
366
|
-
source_id=view_id,
|
|
367
|
-
source_message="",
|
|
368
|
-
content_params={
|
|
369
|
-
"username": username,
|
|
370
|
-
"view_name": view.name,
|
|
371
|
-
"display_name": view.display_name,
|
|
372
|
-
"action": action,
|
|
373
|
-
}
|
|
374
|
-
)
|
|
@@ -1,34 +0,0 @@
|
|
|
1
|
-
from uuid import UUID
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
from argus.backend.db import ScyllaCluster
|
|
5
|
-
from argus.backend.plugins.sct.testrun import SCTTestRun
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
class NemesisStatsService:
|
|
9
|
-
def __init__(self) -> None:
|
|
10
|
-
self.cluster = ScyllaCluster.get()
|
|
11
|
-
|
|
12
|
-
def get_nemesis_data(self, test_id: UUID):
|
|
13
|
-
rows = SCTTestRun.filter(test_id=test_id).only(["id", "nemesis_data", "investigation_status", "packages"]).all()
|
|
14
|
-
nemesis_data = []
|
|
15
|
-
for run in [row for row in rows if row["investigation_status"].lower() != "ignored"]:
|
|
16
|
-
try:
|
|
17
|
-
version = [package.version for package in run["packages"] if package.name == "scylla-server"][0]
|
|
18
|
-
except (IndexError, TypeError):
|
|
19
|
-
continue
|
|
20
|
-
if not run["nemesis_data"]:
|
|
21
|
-
continue
|
|
22
|
-
for nemesis in [nemesis for nemesis in run["nemesis_data"] if nemesis.status in ("succeeded", "failed")]:
|
|
23
|
-
nemesis_data.append(
|
|
24
|
-
{
|
|
25
|
-
"version": version,
|
|
26
|
-
"name": nemesis.name.split("disrupt_")[-1],
|
|
27
|
-
"start_time": nemesis.start_time,
|
|
28
|
-
"duration": nemesis.end_time - nemesis.start_time,
|
|
29
|
-
"status": nemesis.status,
|
|
30
|
-
"run_id": run["id"],
|
|
31
|
-
"stack_trace": nemesis.stack_trace,
|
|
32
|
-
}
|
|
33
|
-
)
|
|
34
|
-
return nemesis_data
|
|
@@ -1,27 +0,0 @@
|
|
|
1
|
-
from functools import partial
|
|
2
|
-
from datetime import datetime
|
|
3
|
-
|
|
4
|
-
from argus.backend.models.web import User
|
|
5
|
-
from argus.backend.util.module_loaders import is_filter, export_functions
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
export_filters = partial(export_functions, module_name=__name__, attr="is_filter")
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
@is_filter("from_timestamp")
|
|
12
|
-
def from_timestamp_filter(timestamp: int):
|
|
13
|
-
return datetime.utcfromtimestamp(timestamp)
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
@is_filter("safe_user")
|
|
17
|
-
def safe_user(user: User):
|
|
18
|
-
user_dict = dict(user.items())
|
|
19
|
-
del user_dict["password"]
|
|
20
|
-
return user_dict
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
@is_filter("formatted_date")
|
|
24
|
-
def formatted_date(date: datetime | None):
|
|
25
|
-
if date:
|
|
26
|
-
return date.strftime("%d/%m/%Y %H:%M:%S")
|
|
27
|
-
return "#unknown"
|
argus/backend/tests/__init__.py
DELETED
|
File without changes
|
|
File without changes
|