suprema-biostar-mcp 1.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- biostar_x_mcp_server/__init__.py +25 -0
- biostar_x_mcp_server/__main__.py +15 -0
- biostar_x_mcp_server/config.py +87 -0
- biostar_x_mcp_server/handlers/__init__.py +35 -0
- biostar_x_mcp_server/handlers/access_handler.py +2162 -0
- biostar_x_mcp_server/handlers/audit_handler.py +489 -0
- biostar_x_mcp_server/handlers/auth_handler.py +216 -0
- biostar_x_mcp_server/handlers/base_handler.py +228 -0
- biostar_x_mcp_server/handlers/card_handler.py +746 -0
- biostar_x_mcp_server/handlers/device_handler.py +4344 -0
- biostar_x_mcp_server/handlers/door_handler.py +3969 -0
- biostar_x_mcp_server/handlers/event_handler.py +1331 -0
- biostar_x_mcp_server/handlers/file_handler.py +212 -0
- biostar_x_mcp_server/handlers/help_web_handler.py +379 -0
- biostar_x_mcp_server/handlers/log_handler.py +1051 -0
- biostar_x_mcp_server/handlers/navigation_handler.py +109 -0
- biostar_x_mcp_server/handlers/occupancy_handler.py +541 -0
- biostar_x_mcp_server/handlers/user_handler.py +3568 -0
- biostar_x_mcp_server/schemas/__init__.py +21 -0
- biostar_x_mcp_server/schemas/access.py +158 -0
- biostar_x_mcp_server/schemas/audit.py +73 -0
- biostar_x_mcp_server/schemas/auth.py +24 -0
- biostar_x_mcp_server/schemas/cards.py +128 -0
- biostar_x_mcp_server/schemas/devices.py +496 -0
- biostar_x_mcp_server/schemas/doors.py +306 -0
- biostar_x_mcp_server/schemas/events.py +104 -0
- biostar_x_mcp_server/schemas/files.py +7 -0
- biostar_x_mcp_server/schemas/help.py +29 -0
- biostar_x_mcp_server/schemas/logs.py +33 -0
- biostar_x_mcp_server/schemas/occupancy.py +19 -0
- biostar_x_mcp_server/schemas/tool_response.py +29 -0
- biostar_x_mcp_server/schemas/users.py +166 -0
- biostar_x_mcp_server/server.py +335 -0
- biostar_x_mcp_server/session.py +221 -0
- biostar_x_mcp_server/tool_manager.py +172 -0
- biostar_x_mcp_server/tools/__init__.py +45 -0
- biostar_x_mcp_server/tools/access.py +510 -0
- biostar_x_mcp_server/tools/audit.py +227 -0
- biostar_x_mcp_server/tools/auth.py +59 -0
- biostar_x_mcp_server/tools/cards.py +269 -0
- biostar_x_mcp_server/tools/categories.py +197 -0
- biostar_x_mcp_server/tools/devices.py +1552 -0
- biostar_x_mcp_server/tools/doors.py +865 -0
- biostar_x_mcp_server/tools/events.py +305 -0
- biostar_x_mcp_server/tools/files.py +28 -0
- biostar_x_mcp_server/tools/help.py +80 -0
- biostar_x_mcp_server/tools/logs.py +123 -0
- biostar_x_mcp_server/tools/navigation.py +89 -0
- biostar_x_mcp_server/tools/occupancy.py +91 -0
- biostar_x_mcp_server/tools/users.py +1113 -0
- biostar_x_mcp_server/utils/__init__.py +31 -0
- biostar_x_mcp_server/utils/category_mapper.py +206 -0
- biostar_x_mcp_server/utils/decorators.py +101 -0
- biostar_x_mcp_server/utils/language_detector.py +51 -0
- biostar_x_mcp_server/utils/search.py +42 -0
- biostar_x_mcp_server/utils/timezone.py +122 -0
- suprema_biostar_mcp-1.0.1.dist-info/METADATA +163 -0
- suprema_biostar_mcp-1.0.1.dist-info/RECORD +61 -0
- suprema_biostar_mcp-1.0.1.dist-info/WHEEL +4 -0
- suprema_biostar_mcp-1.0.1.dist-info/entry_points.txt +2 -0
- suprema_biostar_mcp-1.0.1.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,2162 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import json
|
|
3
|
+
import asyncio
|
|
4
|
+
from typing import Sequence, Dict, Any, List, Optional, Tuple, Set
|
|
5
|
+
from mcp.types import TextContent
|
|
6
|
+
import httpx
|
|
7
|
+
from .base_handler import BaseHandler
|
|
8
|
+
|
|
9
|
+
logger = logging.getLogger(__name__)
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class AccessHandler(BaseHandler):
|
|
13
|
+
"""Handle access control operations."""
|
|
14
|
+
|
|
15
|
+
# ----------------------------------------------------------------------
|
|
16
|
+
# Access Group methods
|
|
17
|
+
# ----------------------------------------------------------------------
|
|
18
|
+
async def get_access_groups(self, args: Dict[str, Any]) -> Sequence[TextContent]:
|
|
19
|
+
"""Get list of access groups."""
|
|
20
|
+
try:
|
|
21
|
+
self.check_auth()
|
|
22
|
+
headers = {
|
|
23
|
+
"bs-session-id": self.get_session_id(),
|
|
24
|
+
"Content-Type": "application/json"
|
|
25
|
+
}
|
|
26
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
27
|
+
response = await client.get(
|
|
28
|
+
f"{self.session.config.biostar_url}/api/access_groups",
|
|
29
|
+
headers=headers
|
|
30
|
+
)
|
|
31
|
+
if response.status_code != 200:
|
|
32
|
+
return self.error_response(f"API call failed: {response.status_code} - {response.text}")
|
|
33
|
+
data = response.json()
|
|
34
|
+
groups = data.get("AccessGroupCollection", {}).get("rows", []) or []
|
|
35
|
+
return self.success_response({
|
|
36
|
+
"message": f"Found {len(groups)} access groups",
|
|
37
|
+
"total": len(groups),
|
|
38
|
+
"groups": [self.format_access_group_info(group) for group in groups]
|
|
39
|
+
})
|
|
40
|
+
except Exception as e:
|
|
41
|
+
return await self.handle_api_error(e)
|
|
42
|
+
|
|
43
|
+
async def get_access_group(self, args: Dict[str, Any]) -> Sequence[TextContent]:
|
|
44
|
+
"""Get specific access group details."""
|
|
45
|
+
try:
|
|
46
|
+
self.check_auth()
|
|
47
|
+
group_id = args["group_id"]
|
|
48
|
+
headers = {
|
|
49
|
+
"bs-session-id": self.get_session_id(),
|
|
50
|
+
"Content-Type": "application/json"
|
|
51
|
+
}
|
|
52
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
53
|
+
response = await client.get(
|
|
54
|
+
f"{self.session.config.biostar_url}/api/access_groups/{group_id}",
|
|
55
|
+
headers=headers
|
|
56
|
+
)
|
|
57
|
+
if response.status_code != 200:
|
|
58
|
+
return self.error_response(f"API call failed: {response.status_code} - {response.text}")
|
|
59
|
+
data = response.json()
|
|
60
|
+
group = data.get("AccessGroup", {}) or {}
|
|
61
|
+
return self.success_response({
|
|
62
|
+
"group": self.format_access_group_info(group)
|
|
63
|
+
})
|
|
64
|
+
except Exception as e:
|
|
65
|
+
return await self.handle_api_error(e)
|
|
66
|
+
|
|
67
|
+
async def create_access_group(self, args: Dict[str, Any]) -> Sequence[TextContent]:
|
|
68
|
+
"""
|
|
69
|
+
Create a new access group.
|
|
70
|
+
|
|
71
|
+
API spec alignment:
|
|
72
|
+
- POST /api/access_groups (NO trailing slash)
|
|
73
|
+
- Body:
|
|
74
|
+
{
|
|
75
|
+
"AccessGroup": {
|
|
76
|
+
"name": "str", # required
|
|
77
|
+
"description": "str", # optional
|
|
78
|
+
"users": [ {"user_id": "1"}, ... ], # optional
|
|
79
|
+
"user_groups": [ {"id": 1106, "name": "ACE Group"}, ... ], # optional
|
|
80
|
+
"access_levels": [ {"id": "2"}, ... ] # optional
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
Business rules:
|
|
85
|
+
- Floor levels are ignored with a warning.
|
|
86
|
+
- User groups:
|
|
87
|
+
* If 'user_group_ids' are provided → validate against GET /api/user_groups (flattened).
|
|
88
|
+
* Else if 'user_group_search_text' is provided → 3-case:
|
|
89
|
+
(0) no match → return full list with needs_selection=true
|
|
90
|
+
(1) single → proceed with that group
|
|
91
|
+
(>=2) multi → return candidates with needs_selection=true
|
|
92
|
+
* If neither is provided → do not include user_groups.
|
|
93
|
+
- Do NOT auto-select anything else.
|
|
94
|
+
"""
|
|
95
|
+
try:
|
|
96
|
+
self.check_auth()
|
|
97
|
+
headers = {
|
|
98
|
+
"bs-session-id": self.get_session_id(),
|
|
99
|
+
"Content-Type": "application/json"
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
name: str = args["name"]
|
|
103
|
+
description: str = (args.get("description") or "").strip()
|
|
104
|
+
|
|
105
|
+
# Optional inputs
|
|
106
|
+
raw_user_ids = args.get("user_ids") or []
|
|
107
|
+
|
|
108
|
+
# CRITICAL: Ensure access_level_ids is always a list
|
|
109
|
+
raw_access_level_ids_input = args.get("access_level_ids")
|
|
110
|
+
if raw_access_level_ids_input is None:
|
|
111
|
+
raw_access_level_ids = []
|
|
112
|
+
elif isinstance(raw_access_level_ids_input, list):
|
|
113
|
+
raw_access_level_ids = raw_access_level_ids_input
|
|
114
|
+
else:
|
|
115
|
+
# Single value (int or string) → convert to list
|
|
116
|
+
raw_access_level_ids = [raw_access_level_ids_input]
|
|
117
|
+
|
|
118
|
+
# User group inputs (NEW)
|
|
119
|
+
# CRITICAL: Ensure user_group_ids is always a list
|
|
120
|
+
raw_user_group_ids_input = args.get("user_group_ids")
|
|
121
|
+
if raw_user_group_ids_input is None:
|
|
122
|
+
raw_user_group_ids = []
|
|
123
|
+
elif isinstance(raw_user_group_ids_input, list):
|
|
124
|
+
raw_user_group_ids = raw_user_group_ids_input
|
|
125
|
+
else:
|
|
126
|
+
# Single value (int or string) → convert to list
|
|
127
|
+
raw_user_group_ids = [raw_user_group_ids_input]
|
|
128
|
+
|
|
129
|
+
user_group_search_text = (args.get("user_group_search_text") or args.get("user_group_name") or "").strip()
|
|
130
|
+
|
|
131
|
+
# Disallowed inputs (ignored with warning)
|
|
132
|
+
ignored_fields: List[str] = []
|
|
133
|
+
for forbidden in ["floor_levels", "parent_id"]:
|
|
134
|
+
if forbidden in args and args[forbidden]:
|
|
135
|
+
ignored_fields.append(forbidden)
|
|
136
|
+
|
|
137
|
+
# Build payload
|
|
138
|
+
ag_body: Dict[str, Any] = {"name": name, "description": description}
|
|
139
|
+
|
|
140
|
+
if raw_user_ids:
|
|
141
|
+
ag_body["users"] = [{"user_id": str(uid)} for uid in raw_user_ids if str(uid).strip()]
|
|
142
|
+
|
|
143
|
+
if raw_access_level_ids:
|
|
144
|
+
# CRITICAL: Access levels MUST include both 'id' and 'name'
|
|
145
|
+
# First, fetch all access levels to get the name mapping
|
|
146
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
147
|
+
al_resp = await client.get(
|
|
148
|
+
f"{self.session.config.biostar_url}/api/access_levels?limit=9999&order_by=id:false",
|
|
149
|
+
headers=headers
|
|
150
|
+
)
|
|
151
|
+
|
|
152
|
+
al_id_to_name = {}
|
|
153
|
+
if al_resp.status_code == 200:
|
|
154
|
+
al_data = al_resp.json() or {}
|
|
155
|
+
al_rows = al_data.get("AccessLevelCollection", {}).get("rows", []) or []
|
|
156
|
+
for al in al_rows:
|
|
157
|
+
al_id = al.get("id")
|
|
158
|
+
if al_id is not None:
|
|
159
|
+
al_id_to_name[str(al_id)] = al.get("name", "")
|
|
160
|
+
logger.info(f" Loaded {len(al_id_to_name)} Access Levels for validation: {list(al_id_to_name.keys())}")
|
|
161
|
+
else:
|
|
162
|
+
logger.error(f" Failed to load Access Levels: {al_resp.status_code} - {al_resp.text[:200]}")
|
|
163
|
+
|
|
164
|
+
# Validate and build access_levels with both id and name
|
|
165
|
+
logger.info(f" Validating access_level_ids: {raw_access_level_ids}")
|
|
166
|
+
access_levels_payload = []
|
|
167
|
+
invalid_al_ids = []
|
|
168
|
+
for alid in raw_access_level_ids:
|
|
169
|
+
alid_str = str(alid).strip()
|
|
170
|
+
if not alid_str:
|
|
171
|
+
continue
|
|
172
|
+
if alid_str not in al_id_to_name:
|
|
173
|
+
invalid_al_ids.append(alid)
|
|
174
|
+
else:
|
|
175
|
+
access_levels_payload.append({
|
|
176
|
+
"id": alid_str,
|
|
177
|
+
"name": al_id_to_name[alid_str]
|
|
178
|
+
})
|
|
179
|
+
|
|
180
|
+
if invalid_al_ids:
|
|
181
|
+
logger.error(f" Access Level validation failed! Invalid IDs: {invalid_al_ids}, Available: {list(al_id_to_name.keys())}")
|
|
182
|
+
return self.error_response(
|
|
183
|
+
"Invalid access_level_ids provided.",
|
|
184
|
+
{
|
|
185
|
+
"status": "access_level_validation_failed",
|
|
186
|
+
"invalid_access_level_ids": invalid_al_ids,
|
|
187
|
+
"available_access_levels": [{"id": al_id, "name": al_name} for al_id, al_name in al_id_to_name.items()]
|
|
188
|
+
}
|
|
189
|
+
)
|
|
190
|
+
|
|
191
|
+
ag_body["access_levels"] = access_levels_payload
|
|
192
|
+
|
|
193
|
+
# Resolve user_groups
|
|
194
|
+
user_groups_payload: List[Dict[str, Any]] = []
|
|
195
|
+
if raw_user_group_ids:
|
|
196
|
+
ug_all = await self._list_all_user_groups(headers)
|
|
197
|
+
id_to_name = {
|
|
198
|
+
str(int(str(g["id"]).strip())): (g.get("name") or "")
|
|
199
|
+
for g in ug_all if g.get("id") is not None
|
|
200
|
+
}
|
|
201
|
+
invalid_ids: List[Any] = []
|
|
202
|
+
for gid in raw_user_group_ids:
|
|
203
|
+
try:
|
|
204
|
+
key = str(int(str(gid).strip()))
|
|
205
|
+
if key in id_to_name:
|
|
206
|
+
user_groups_payload.append({"id": int(key), "name": id_to_name[key]})
|
|
207
|
+
else:
|
|
208
|
+
invalid_ids.append(int(key))
|
|
209
|
+
except Exception:
|
|
210
|
+
invalid_ids.append(gid)
|
|
211
|
+
|
|
212
|
+
if invalid_ids:
|
|
213
|
+
return self.error_response(
|
|
214
|
+
"Invalid user_group_ids provided.",
|
|
215
|
+
{
|
|
216
|
+
"status": "user_group_validation_failed",
|
|
217
|
+
"invalid_user_group_ids": invalid_ids,
|
|
218
|
+
"available_user_groups": [
|
|
219
|
+
{"id": int(str(g["id"]).strip()), "name": g.get("name")}
|
|
220
|
+
for g in ug_all if g.get("id") is not None
|
|
221
|
+
]
|
|
222
|
+
}
|
|
223
|
+
)
|
|
224
|
+
|
|
225
|
+
elif user_group_search_text:
|
|
226
|
+
ug_all = await self._list_all_user_groups(headers)
|
|
227
|
+
if not ug_all:
|
|
228
|
+
return self.error_response(
|
|
229
|
+
"Failed to list user groups (empty or API error).",
|
|
230
|
+
{"status": "user_group_list_failed"}
|
|
231
|
+
)
|
|
232
|
+
|
|
233
|
+
matches = self._filter_user_groups_by_name(ug_all, user_group_search_text)
|
|
234
|
+
|
|
235
|
+
if len(matches) == 0:
|
|
236
|
+
return self.success_response({
|
|
237
|
+
"message": "No user group matched the query. Here is the full user group list. Please pick one.",
|
|
238
|
+
"status": "user_group_not_found",
|
|
239
|
+
"needs_selection": True,
|
|
240
|
+
"user_groups": [
|
|
241
|
+
{"id": int(str(g["id"]).strip()), "name": g.get("name")}
|
|
242
|
+
for g in ug_all if g.get("id") is not None
|
|
243
|
+
]
|
|
244
|
+
})
|
|
245
|
+
|
|
246
|
+
if len(matches) > 1:
|
|
247
|
+
return self.success_response({
|
|
248
|
+
"message": "Multiple user groups matched. Please pick one or more.",
|
|
249
|
+
"status": "ambiguous_user_group",
|
|
250
|
+
"needs_selection": True,
|
|
251
|
+
"candidates": [
|
|
252
|
+
{"id": int(str(g["id"]).strip()), "name": g.get("name")}
|
|
253
|
+
for g in matches if g.get("id") is not None
|
|
254
|
+
]
|
|
255
|
+
})
|
|
256
|
+
|
|
257
|
+
# exactly one match
|
|
258
|
+
mg = matches[0]
|
|
259
|
+
if mg.get("id") is not None:
|
|
260
|
+
user_groups_payload.append({"id": int(str(mg["id"]).strip()), "name": mg.get("name")})
|
|
261
|
+
|
|
262
|
+
if user_groups_payload:
|
|
263
|
+
ag_body["user_groups"] = user_groups_payload
|
|
264
|
+
|
|
265
|
+
# NEW: Add sync_hint for user_groups and access_levels
|
|
266
|
+
sync_hint = {}
|
|
267
|
+
if raw_access_level_ids:
|
|
268
|
+
sync_hint["new_access_levels"] = [str(al_id) for al_id in raw_access_level_ids]
|
|
269
|
+
if user_groups_payload:
|
|
270
|
+
sync_hint["new_user_groups"] = [str(ug["id"]) for ug in user_groups_payload]
|
|
271
|
+
|
|
272
|
+
if sync_hint:
|
|
273
|
+
ag_body["sync_hint"] = sync_hint
|
|
274
|
+
|
|
275
|
+
payload = {"AccessGroup": ag_body}
|
|
276
|
+
|
|
277
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
278
|
+
response = await client.post(
|
|
279
|
+
f"{self.session.config.biostar_url}/api/access_groups",
|
|
280
|
+
headers=headers,
|
|
281
|
+
json=payload
|
|
282
|
+
)
|
|
283
|
+
|
|
284
|
+
if response.status_code not in (200, 201):
|
|
285
|
+
return self.error_response(
|
|
286
|
+
f"API call failed: {response.status_code} - {response.text}",
|
|
287
|
+
{"request_body": payload}
|
|
288
|
+
)
|
|
289
|
+
|
|
290
|
+
rj = {}
|
|
291
|
+
try:
|
|
292
|
+
rj = response.json() or {}
|
|
293
|
+
except Exception:
|
|
294
|
+
pass
|
|
295
|
+
|
|
296
|
+
ag_obj = rj.get("AccessGroup", {}) or {}
|
|
297
|
+
group_id = ag_obj.get("id") or rj.get("id")
|
|
298
|
+
group_name = ag_obj.get("name") or name
|
|
299
|
+
device_resp = rj.get("DeviceResponse")
|
|
300
|
+
|
|
301
|
+
out = {
|
|
302
|
+
"message": f"Access group '{group_name}' created successfully",
|
|
303
|
+
"group_id": group_id,
|
|
304
|
+
"group_name": group_name,
|
|
305
|
+
"ignored_fields": ignored_fields
|
|
306
|
+
}
|
|
307
|
+
if raw_user_ids:
|
|
308
|
+
out["users_added"] = len(raw_user_ids)
|
|
309
|
+
if raw_access_level_ids:
|
|
310
|
+
out["access_levels_attached"] = [str(x) for x in raw_access_level_ids]
|
|
311
|
+
if user_groups_payload:
|
|
312
|
+
out["user_groups_attached"] = [{"id": ug["id"], "name": ug["name"]} for ug in user_groups_payload]
|
|
313
|
+
if device_resp is not None:
|
|
314
|
+
out["device_response"] = device_resp
|
|
315
|
+
|
|
316
|
+
return self.success_response(out)
|
|
317
|
+
|
|
318
|
+
except Exception as e:
|
|
319
|
+
return await self.handle_api_error(e)
|
|
320
|
+
|
|
321
|
+
async def _list_all_user_groups(self, headers: Dict[str, str]) -> List[Dict[str, Any]]:
|
|
322
|
+
"""
|
|
323
|
+
GET /api/user_groups and return a flattened list of groups.
|
|
324
|
+
- Includes top-level rows and each row's 'user_groups' children.
|
|
325
|
+
- Ensures children are included even if not present as separate rows.
|
|
326
|
+
- Best-effort depth: parent's depth + 1 if available; otherwise None.
|
|
327
|
+
- IDs are normalized to int when possible.
|
|
328
|
+
"""
|
|
329
|
+
try:
|
|
330
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
331
|
+
r = await client.get(f"{self.session.config.biostar_url}/api/user_groups", headers=headers)
|
|
332
|
+
if r.status_code != 200:
|
|
333
|
+
logger.error("List user_groups failed: %s %s", r.status_code, r.text)
|
|
334
|
+
return []
|
|
335
|
+
|
|
336
|
+
data = r.json() or {}
|
|
337
|
+
rows = data.get("UserGroupCollection", {}).get("rows", []) or []
|
|
338
|
+
|
|
339
|
+
flat: Dict[str, Dict[str, Any]] = {}
|
|
340
|
+
|
|
341
|
+
def add(g: Dict[str, Any], depth_hint: Optional[int] = None):
|
|
342
|
+
gid = g.get("id")
|
|
343
|
+
if gid is None:
|
|
344
|
+
return
|
|
345
|
+
try:
|
|
346
|
+
gid_int = int(str(gid).strip())
|
|
347
|
+
except Exception:
|
|
348
|
+
return
|
|
349
|
+
name = g.get("name") or ""
|
|
350
|
+
if depth_hint is not None:
|
|
351
|
+
depth_val = depth_hint
|
|
352
|
+
else:
|
|
353
|
+
try:
|
|
354
|
+
depth_val = int(str(g.get("depth")).strip()) if g.get("depth") is not None else None
|
|
355
|
+
except Exception:
|
|
356
|
+
depth_val = None
|
|
357
|
+
key = str(gid_int)
|
|
358
|
+
if key not in flat:
|
|
359
|
+
flat[key] = {"id": gid_int, "name": name, "depth": depth_val}
|
|
360
|
+
|
|
361
|
+
# Top-level rows
|
|
362
|
+
for g in rows:
|
|
363
|
+
add(g)
|
|
364
|
+
|
|
365
|
+
# Children inside each row
|
|
366
|
+
for g in rows:
|
|
367
|
+
try:
|
|
368
|
+
base_depth = int(str(g.get("depth")).strip()) if g.get("depth") is not None else 0
|
|
369
|
+
except Exception:
|
|
370
|
+
base_depth = 0
|
|
371
|
+
for ch in (g.get("user_groups") or []):
|
|
372
|
+
add(ch, depth_hint=base_depth + 1)
|
|
373
|
+
|
|
374
|
+
return list(flat.values())
|
|
375
|
+
|
|
376
|
+
except Exception as e:
|
|
377
|
+
logger.exception("list_all_user_groups error: %s", e)
|
|
378
|
+
return []
|
|
379
|
+
|
|
380
|
+
def _filter_user_groups_by_name(self, rows: List[Dict[str, Any]], query: str) -> List[Dict[str, Any]]:
|
|
381
|
+
"""Case-insensitive substring match with whitespace normalization."""
|
|
382
|
+
q = " ".join((query or "").lower().split())
|
|
383
|
+
out: List[Dict[str, Any]] = []
|
|
384
|
+
for g in rows:
|
|
385
|
+
name = str(g.get("name") or "")
|
|
386
|
+
norm = " ".join(name.lower().split())
|
|
387
|
+
if q in norm:
|
|
388
|
+
out.append(g)
|
|
389
|
+
return out
|
|
390
|
+
|
|
391
|
+
async def update_access_group(self, args: Dict[str, Any]) -> Sequence[TextContent]:
|
|
392
|
+
"""
|
|
393
|
+
Update an existing access group (PUT /api/access_groups/{id}).
|
|
394
|
+
|
|
395
|
+
Behavior:
|
|
396
|
+
- User groups: supports both replacement and delta operations.
|
|
397
|
+
* Replacement:
|
|
398
|
+
- If "user_group_ids" key is PRESENT (even if empty) → replace with that exact list.
|
|
399
|
+
(Empty list explicitly clears all user groups.)
|
|
400
|
+
- Optional helper: "user_group_search_text" (single-match → replacement),
|
|
401
|
+
for backward compatibility. Prefer explicit ids or delta fields below.
|
|
402
|
+
* Delta:
|
|
403
|
+
- add_user_group_ids / add_user_group_search_text
|
|
404
|
+
- remove_user_group_ids / remove_user_group_search_text
|
|
405
|
+
Each search_text uses 3-case logic (0/1/>=2) like create():
|
|
406
|
+
0 → return full list with needs_selection=true
|
|
407
|
+
1 → apply add/remove
|
|
408
|
+
>=2 → return candidates with needs_selection=true
|
|
409
|
+
- Users:
|
|
410
|
+
* If user_ids is provided → replace full 'users'
|
|
411
|
+
* Else apply delta with new_users / delete_users (and include those arrays in payload)
|
|
412
|
+
- Access levels:
|
|
413
|
+
* access_level_ids → replace full 'access_levels'
|
|
414
|
+
- Floor levels: ignored with warning.
|
|
415
|
+
- Only provided fields are included in PUT body; others are preserved server-side.
|
|
416
|
+
"""
|
|
417
|
+
try:
|
|
418
|
+
self.check_auth()
|
|
419
|
+
group_id = int(args["group_id"])
|
|
420
|
+
headers = {"bs-session-id": self.get_session_id(), "Content-Type": "application/json"}
|
|
421
|
+
|
|
422
|
+
# --- Read current group for safe deltas ---
|
|
423
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
424
|
+
get_resp = await client.get(
|
|
425
|
+
f"{self.session.config.biostar_url}/api/access_groups/{group_id}",
|
|
426
|
+
headers=headers
|
|
427
|
+
)
|
|
428
|
+
if get_resp.status_code != 200:
|
|
429
|
+
return self.error_response(f"Failed to get access group: {get_resp.status_code} - {get_resp.text}")
|
|
430
|
+
current = (get_resp.json() or {}).get("AccessGroup", {}) or {}
|
|
431
|
+
|
|
432
|
+
# --- Current snapshots ---
|
|
433
|
+
cur_users_field = current.get("users") or []
|
|
434
|
+
cur_user_ids: Set[str] = set()
|
|
435
|
+
for u in (cur_users_field if isinstance(cur_users_field, list) else []):
|
|
436
|
+
uid = u.get("user_id") if isinstance(u, dict) else None
|
|
437
|
+
if uid is not None:
|
|
438
|
+
cur_user_ids.add(str(uid))
|
|
439
|
+
|
|
440
|
+
cur_ug_field = current.get("user_groups") or []
|
|
441
|
+
cur_ug_ids: Set[int] = set()
|
|
442
|
+
cur_ug_name_by_id: Dict[int, str] = {}
|
|
443
|
+
for g in (cur_ug_field if isinstance(cur_ug_field, list) else []):
|
|
444
|
+
try:
|
|
445
|
+
gi = int(str(g.get("id")))
|
|
446
|
+
cur_ug_ids.add(gi)
|
|
447
|
+
cur_ug_name_by_id[gi] = g.get("name") or ""
|
|
448
|
+
except Exception:
|
|
449
|
+
continue
|
|
450
|
+
|
|
451
|
+
# --- Inputs ---
|
|
452
|
+
name_in = args.get("name")
|
|
453
|
+
desc_in = args.get("description")
|
|
454
|
+
|
|
455
|
+
raw_user_ids = args.get("user_ids") or []
|
|
456
|
+
new_users_raw = args.get("new_users") or []
|
|
457
|
+
del_users_raw = args.get("delete_users") or []
|
|
458
|
+
|
|
459
|
+
# CRITICAL: Ensure access_level_ids is always a list
|
|
460
|
+
raw_access_level_ids_input = args.get("access_level_ids")
|
|
461
|
+
if raw_access_level_ids_input is None:
|
|
462
|
+
raw_access_level_ids = []
|
|
463
|
+
elif isinstance(raw_access_level_ids_input, list):
|
|
464
|
+
raw_access_level_ids = raw_access_level_ids_input
|
|
465
|
+
else:
|
|
466
|
+
# Single value (int or string) → convert to list
|
|
467
|
+
raw_access_level_ids = [raw_access_level_ids_input]
|
|
468
|
+
|
|
469
|
+
# Replacement (presence-based)
|
|
470
|
+
has_user_group_ids_key = "user_group_ids" in args
|
|
471
|
+
|
|
472
|
+
# CRITICAL: Ensure user_group_ids is always a list
|
|
473
|
+
raw_user_group_ids_input = args.get("user_group_ids")
|
|
474
|
+
if raw_user_group_ids_input is None:
|
|
475
|
+
raw_user_group_ids = []
|
|
476
|
+
elif isinstance(raw_user_group_ids_input, list):
|
|
477
|
+
raw_user_group_ids = raw_user_group_ids_input
|
|
478
|
+
else:
|
|
479
|
+
# Single value (int or string) → convert to list
|
|
480
|
+
raw_user_group_ids = [raw_user_group_ids_input]
|
|
481
|
+
|
|
482
|
+
# Legacy single-search (replacement)
|
|
483
|
+
replacement_search_text = (args.get("user_group_search_text") or args.get("user_group_name") or "").strip()
|
|
484
|
+
|
|
485
|
+
# Delta operations
|
|
486
|
+
# CRITICAL: Ensure add/remove user_group_ids are always lists
|
|
487
|
+
add_ug_ids_raw_input = args.get("add_user_group_ids")
|
|
488
|
+
if add_ug_ids_raw_input is None:
|
|
489
|
+
add_ug_ids_raw = []
|
|
490
|
+
elif isinstance(add_ug_ids_raw_input, list):
|
|
491
|
+
add_ug_ids_raw = add_ug_ids_raw_input
|
|
492
|
+
else:
|
|
493
|
+
add_ug_ids_raw = [add_ug_ids_raw_input]
|
|
494
|
+
|
|
495
|
+
remove_ug_ids_raw_input = args.get("remove_user_group_ids")
|
|
496
|
+
if remove_ug_ids_raw_input is None:
|
|
497
|
+
remove_ug_ids_raw = []
|
|
498
|
+
elif isinstance(remove_ug_ids_raw_input, list):
|
|
499
|
+
remove_ug_ids_raw = remove_ug_ids_raw_input
|
|
500
|
+
else:
|
|
501
|
+
remove_ug_ids_raw = [remove_ug_ids_raw_input]
|
|
502
|
+
add_ug_search_text = (args.get("add_user_group_search_text") or "").strip()
|
|
503
|
+
remove_ug_search_text = (args.get("remove_user_group_search_text") or "").strip()
|
|
504
|
+
|
|
505
|
+
# Ignored fields parity with create()
|
|
506
|
+
ignored_fields: List[str] = []
|
|
507
|
+
for forbidden in ["floor_levels", "parent_id"]:
|
|
508
|
+
if forbidden in args and args[forbidden]:
|
|
509
|
+
ignored_fields.append(forbidden)
|
|
510
|
+
|
|
511
|
+
# --- Helpers ---
|
|
512
|
+
def _norm_int_list(raw) -> List[int]:
|
|
513
|
+
out, seen = [], set()
|
|
514
|
+
for x in (raw or []):
|
|
515
|
+
try:
|
|
516
|
+
v = int(str(x).strip())
|
|
517
|
+
if v not in seen:
|
|
518
|
+
seen.add(v); out.append(v)
|
|
519
|
+
except Exception:
|
|
520
|
+
continue
|
|
521
|
+
return out
|
|
522
|
+
|
|
523
|
+
def _norm_user_ids(raw) -> List[str]:
|
|
524
|
+
out, seen = [], set()
|
|
525
|
+
for x in (raw or []):
|
|
526
|
+
s = str(x).strip()
|
|
527
|
+
if not s:
|
|
528
|
+
continue
|
|
529
|
+
try:
|
|
530
|
+
v = str(int(s))
|
|
531
|
+
except Exception:
|
|
532
|
+
v = s
|
|
533
|
+
if v not in seen:
|
|
534
|
+
seen.add(v); out.append(v)
|
|
535
|
+
return out
|
|
536
|
+
|
|
537
|
+
# --- Build update object incrementally ---
|
|
538
|
+
update_obj: Dict[str, Any] = {}
|
|
539
|
+
if name_in is not None:
|
|
540
|
+
update_obj["name"] = name_in
|
|
541
|
+
if desc_in is not None:
|
|
542
|
+
update_obj["description"] = desc_in
|
|
543
|
+
|
|
544
|
+
# Access levels (replacement)
|
|
545
|
+
if raw_access_level_ids:
|
|
546
|
+
# CRITICAL: Access levels MUST include both 'id' and 'name'
|
|
547
|
+
# First, fetch all access levels to get the name mapping
|
|
548
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
549
|
+
al_resp = await client.get(
|
|
550
|
+
f"{self.session.config.biostar_url}/api/access_levels?limit=9999&order_by=id:false",
|
|
551
|
+
headers=headers
|
|
552
|
+
)
|
|
553
|
+
|
|
554
|
+
al_id_to_name = {}
|
|
555
|
+
if al_resp.status_code == 200:
|
|
556
|
+
al_data = al_resp.json() or {}
|
|
557
|
+
al_rows = al_data.get("AccessLevelCollection", {}).get("rows", []) or []
|
|
558
|
+
for al in al_rows:
|
|
559
|
+
al_id = al.get("id")
|
|
560
|
+
if al_id is not None:
|
|
561
|
+
al_id_to_name[str(al_id)] = al.get("name", "")
|
|
562
|
+
logger.info(f" Loaded {len(al_id_to_name)} Access Levels for validation: {list(al_id_to_name.keys())}")
|
|
563
|
+
else:
|
|
564
|
+
logger.error(f" Failed to load Access Levels: {al_resp.status_code} - {al_resp.text[:200]}")
|
|
565
|
+
|
|
566
|
+
# Validate and build access_levels with both id and name
|
|
567
|
+
logger.info(f" Validating access_level_ids: {raw_access_level_ids}")
|
|
568
|
+
access_levels_payload = []
|
|
569
|
+
invalid_al_ids = []
|
|
570
|
+
for alid in raw_access_level_ids:
|
|
571
|
+
alid_str = str(alid).strip()
|
|
572
|
+
if not alid_str:
|
|
573
|
+
continue
|
|
574
|
+
if alid_str not in al_id_to_name:
|
|
575
|
+
invalid_al_ids.append(alid)
|
|
576
|
+
else:
|
|
577
|
+
access_levels_payload.append({
|
|
578
|
+
"id": alid_str,
|
|
579
|
+
"name": al_id_to_name[alid_str]
|
|
580
|
+
})
|
|
581
|
+
|
|
582
|
+
if invalid_al_ids:
|
|
583
|
+
logger.error(f" Access Level validation failed! Invalid IDs: {invalid_al_ids}, Available: {list(al_id_to_name.keys())}")
|
|
584
|
+
return self.error_response(
|
|
585
|
+
"Invalid access_level_ids provided.",
|
|
586
|
+
{
|
|
587
|
+
"status": "access_level_validation_failed",
|
|
588
|
+
"invalid_access_level_ids": invalid_al_ids,
|
|
589
|
+
"available_access_levels": [{"id": al_id, "name": al_name} for al_id, al_name in al_id_to_name.items()]
|
|
590
|
+
}
|
|
591
|
+
)
|
|
592
|
+
|
|
593
|
+
update_obj["access_levels"] = access_levels_payload
|
|
594
|
+
|
|
595
|
+
# ------------------------------
|
|
596
|
+
# USER GROUPS: replacement / delta
|
|
597
|
+
# ------------------------------
|
|
598
|
+
ug_all_cache: Optional[List[Dict[str, Any]]] = None
|
|
599
|
+
ug_final_ids: Optional[Set[int]] = None
|
|
600
|
+
ug_msg_parts: List[str] = []
|
|
601
|
+
|
|
602
|
+
async def _load_ug_all() -> List[Dict[str, Any]]:
|
|
603
|
+
nonlocal ug_all_cache
|
|
604
|
+
if ug_all_cache is None:
|
|
605
|
+
ug_all_cache = await self._list_all_user_groups(headers)
|
|
606
|
+
return ug_all_cache
|
|
607
|
+
|
|
608
|
+
def _id_to_name_map(rows: List[Dict[str, Any]]) -> Dict[int, str]:
|
|
609
|
+
m: Dict[int, str] = {}
|
|
610
|
+
for g in rows:
|
|
611
|
+
gid = g.get("id")
|
|
612
|
+
if gid is None:
|
|
613
|
+
continue
|
|
614
|
+
try:
|
|
615
|
+
gi = int(str(gid))
|
|
616
|
+
m[gi] = g.get("name") or ""
|
|
617
|
+
except Exception:
|
|
618
|
+
continue
|
|
619
|
+
return m
|
|
620
|
+
|
|
621
|
+
# Replacement via explicit ids (key presence matters)
|
|
622
|
+
if has_user_group_ids_key:
|
|
623
|
+
ids = _norm_int_list(raw_user_group_ids) # may be empty
|
|
624
|
+
if ids:
|
|
625
|
+
ug_all = await _load_ug_all()
|
|
626
|
+
id2name = _id_to_name_map(ug_all)
|
|
627
|
+
invalid = [i for i in ids if i not in id2name]
|
|
628
|
+
if invalid:
|
|
629
|
+
return self.error_response(
|
|
630
|
+
"Invalid user_group_ids provided.",
|
|
631
|
+
{
|
|
632
|
+
"status": "user_group_validation_failed",
|
|
633
|
+
"invalid_user_group_ids": invalid,
|
|
634
|
+
"available_user_groups": [{"id": i, "name": n} for i, n in id2name.items()]
|
|
635
|
+
}
|
|
636
|
+
)
|
|
637
|
+
ug_final_ids = set(ids)
|
|
638
|
+
else:
|
|
639
|
+
# explicit clear
|
|
640
|
+
ug_final_ids = set()
|
|
641
|
+
ug_msg_parts.append("user groups replaced")
|
|
642
|
+
|
|
643
|
+
# Replacement via single search (legacy/compat)
|
|
644
|
+
elif replacement_search_text:
|
|
645
|
+
ug_all = await _load_ug_all()
|
|
646
|
+
matches = self._filter_user_groups_by_name(ug_all, replacement_search_text)
|
|
647
|
+
|
|
648
|
+
if len(matches) == 0:
|
|
649
|
+
return self.success_response({
|
|
650
|
+
"message": "No user group matched the query. Here is the full user group list. Please pick one.",
|
|
651
|
+
"status": "user_group_not_found",
|
|
652
|
+
"needs_selection": True,
|
|
653
|
+
"user_groups": [
|
|
654
|
+
{"id": int(str(g["id"]).strip()), "name": g.get("name")}
|
|
655
|
+
for g in ug_all if g.get("id") is not None
|
|
656
|
+
]
|
|
657
|
+
})
|
|
658
|
+
if len(matches) > 1:
|
|
659
|
+
return self.success_response({
|
|
660
|
+
"message": "Multiple user groups matched. Please pick one or more.",
|
|
661
|
+
"status": "ambiguous_user_group",
|
|
662
|
+
"needs_selection": True,
|
|
663
|
+
"candidates": [
|
|
664
|
+
{"id": int(str(g["id"]).strip()), "name": g.get("name")}
|
|
665
|
+
for g in matches if g.get("id") is not None
|
|
666
|
+
]
|
|
667
|
+
})
|
|
668
|
+
|
|
669
|
+
mg = matches[0]
|
|
670
|
+
if mg.get("id") is not None:
|
|
671
|
+
ug_final_ids = {int(str(mg["id"]).strip())}
|
|
672
|
+
ug_msg_parts.append("user groups replaced (single-match)")
|
|
673
|
+
|
|
674
|
+
# Delta operations (apply on top of current)
|
|
675
|
+
if (add_ug_ids_raw or remove_ug_ids_raw or add_ug_search_text or remove_ug_search_text):
|
|
676
|
+
# start from:
|
|
677
|
+
base_ids = set(cur_ug_ids) if ug_final_ids is None else set(ug_final_ids)
|
|
678
|
+
|
|
679
|
+
ug_all = await _load_ug_all()
|
|
680
|
+
id2name = _id_to_name_map(ug_all)
|
|
681
|
+
|
|
682
|
+
# add by ids
|
|
683
|
+
add_ids = _norm_int_list(add_ug_ids_raw)
|
|
684
|
+
invalid_add = [i for i in add_ids if i not in id2name]
|
|
685
|
+
if invalid_add:
|
|
686
|
+
return self.error_response(
|
|
687
|
+
"Invalid add_user_group_ids provided.",
|
|
688
|
+
{
|
|
689
|
+
"status": "user_group_validation_failed",
|
|
690
|
+
"invalid_user_group_ids": invalid_add,
|
|
691
|
+
"available_user_groups": [{"id": i, "name": n} for i, n in id2name.items()]
|
|
692
|
+
}
|
|
693
|
+
)
|
|
694
|
+
for i in add_ids:
|
|
695
|
+
base_ids.add(i)
|
|
696
|
+
|
|
697
|
+
# add by search
|
|
698
|
+
if add_ug_search_text:
|
|
699
|
+
matches = self._filter_user_groups_by_name(ug_all, add_ug_search_text)
|
|
700
|
+
if len(matches) == 0:
|
|
701
|
+
return self.success_response({
|
|
702
|
+
"message": "No user group matched the add query. Here is the full user group list. Please pick one.",
|
|
703
|
+
"status": "user_group_not_found_for_add",
|
|
704
|
+
"needs_selection": True,
|
|
705
|
+
"user_groups": [{"id": int(str(g["id"]).strip()), "name": g.get("name")} for g in ug_all if g.get("id") is not None]
|
|
706
|
+
})
|
|
707
|
+
if len(matches) > 1:
|
|
708
|
+
return self.success_response({
|
|
709
|
+
"message": "Multiple user groups matched for add. Please pick one or more.",
|
|
710
|
+
"status": "ambiguous_user_group_for_add",
|
|
711
|
+
"needs_selection": True,
|
|
712
|
+
"candidates": [{"id": int(str(g["id"]).strip()), "name": g.get("name")} for g in matches if g.get("id") is not None]
|
|
713
|
+
})
|
|
714
|
+
mi = int(str(matches[0]["id"]).strip())
|
|
715
|
+
base_ids.add(mi)
|
|
716
|
+
|
|
717
|
+
# remove by ids
|
|
718
|
+
rem_ids = _norm_int_list(remove_ug_ids_raw)
|
|
719
|
+
# (no strict invalid check for remove; silently ignore unknown)
|
|
720
|
+
for i in rem_ids:
|
|
721
|
+
if i in base_ids:
|
|
722
|
+
base_ids.remove(i)
|
|
723
|
+
|
|
724
|
+
# remove by search
|
|
725
|
+
if remove_ug_search_text:
|
|
726
|
+
matches = self._filter_user_groups_by_name(ug_all, remove_ug_search_text)
|
|
727
|
+
if len(matches) == 0:
|
|
728
|
+
return self.success_response({
|
|
729
|
+
"message": "No user group matched the remove query. Here is the full user group list. Please pick one.",
|
|
730
|
+
"status": "user_group_not_found_for_remove",
|
|
731
|
+
"needs_selection": True,
|
|
732
|
+
"user_groups": [{"id": int(str(g["id"]).strip()), "name": g.get("name")} for g in ug_all if g.get("id") is not None]
|
|
733
|
+
})
|
|
734
|
+
if len(matches) > 1:
|
|
735
|
+
return self.success_response({
|
|
736
|
+
"message": "Multiple user groups matched for remove. Please pick one or more.",
|
|
737
|
+
"status": "ambiguous_user_group_for_remove",
|
|
738
|
+
"needs_selection": True,
|
|
739
|
+
"candidates": [{"id": int(str(g["id"]).strip()), "name": g.get("name")} for g in matches if g.get("id") is not None]
|
|
740
|
+
})
|
|
741
|
+
mi = int(str(matches[0]["id"]).strip())
|
|
742
|
+
if mi in base_ids:
|
|
743
|
+
base_ids.remove(mi)
|
|
744
|
+
|
|
745
|
+
ug_final_ids = base_ids
|
|
746
|
+
ug_msg_parts.append("user groups delta applied")
|
|
747
|
+
|
|
748
|
+
# If we computed a final set, include in payload (replacement semantics)
|
|
749
|
+
if ug_final_ids is not None:
|
|
750
|
+
# resolve names (for nice payload)
|
|
751
|
+
ug_all = await _load_ug_all()
|
|
752
|
+
id2name = _id_to_name_map(ug_all)
|
|
753
|
+
update_obj["user_groups"] = [{"id": i, "name": id2name.get(i, cur_ug_name_by_id.get(i, ""))} for i in sorted(list(ug_final_ids))]
|
|
754
|
+
|
|
755
|
+
# ------------------------------
|
|
756
|
+
# USERS: replacement / delta
|
|
757
|
+
# ------------------------------
|
|
758
|
+
final_users_set: Optional[Set[str]] = None
|
|
759
|
+
new_users_norm = _norm_user_ids(new_users_raw)
|
|
760
|
+
del_users_norm = set(_norm_user_ids(del_users_raw))
|
|
761
|
+
|
|
762
|
+
if raw_user_ids:
|
|
763
|
+
final_users_set = set(_norm_user_ids(raw_user_ids))
|
|
764
|
+
update_obj["users"] = [{"user_id": uid} for uid in sorted(final_users_set, key=lambda s: int(s))]
|
|
765
|
+
elif new_users_norm or del_users_norm:
|
|
766
|
+
final_users_set = set(cur_user_ids)
|
|
767
|
+
for uid in new_users_norm:
|
|
768
|
+
final_users_set.add(uid)
|
|
769
|
+
if del_users_norm:
|
|
770
|
+
final_users_set = {uid for uid in final_users_set if uid not in del_users_norm}
|
|
771
|
+
update_obj["users"] = [{"user_id": uid} for uid in sorted(final_users_set, key=lambda s: int(s))]
|
|
772
|
+
if new_users_norm:
|
|
773
|
+
update_obj["new_users"] = [{"user_id": uid} for uid in new_users_norm]
|
|
774
|
+
if del_users_norm:
|
|
775
|
+
update_obj["delete_users"] = [{"user_id": uid} for uid in sorted(list(del_users_norm), key=lambda s: int(s))]
|
|
776
|
+
|
|
777
|
+
# --- Short-circuit if nothing to update ---
|
|
778
|
+
if not update_obj:
|
|
779
|
+
return self.success_response({
|
|
780
|
+
"message": f"Access group {group_id}: no changes applied",
|
|
781
|
+
"ignored_fields": ignored_fields
|
|
782
|
+
})
|
|
783
|
+
|
|
784
|
+
# NEW: Add sync_hint for newly added user_groups and access_levels
|
|
785
|
+
sync_hint = {}
|
|
786
|
+
|
|
787
|
+
# Track new access levels (if any)
|
|
788
|
+
if raw_access_level_ids:
|
|
789
|
+
cur_al_field = current.get("access_levels") or []
|
|
790
|
+
cur_al_ids = {str(al.get("id")) for al in (cur_al_field if isinstance(cur_al_field, list) else []) if al.get("id") is not None}
|
|
791
|
+
new_al_ids = [str(al_id) for al_id in raw_access_level_ids if str(al_id) not in cur_al_ids]
|
|
792
|
+
if new_al_ids:
|
|
793
|
+
sync_hint["new_access_levels"] = new_al_ids
|
|
794
|
+
|
|
795
|
+
# Track new user groups (if any)
|
|
796
|
+
if ug_final_ids is not None:
|
|
797
|
+
new_ug_ids = [str(ug_id) for ug_id in ug_final_ids if ug_id not in cur_ug_ids]
|
|
798
|
+
if new_ug_ids:
|
|
799
|
+
sync_hint["new_user_groups"] = new_ug_ids
|
|
800
|
+
|
|
801
|
+
if sync_hint:
|
|
802
|
+
update_obj["sync_hint"] = sync_hint
|
|
803
|
+
|
|
804
|
+
payload = {"AccessGroup": update_obj}
|
|
805
|
+
|
|
806
|
+
# --- PUT ---
|
|
807
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
808
|
+
put_resp = await client.put(
|
|
809
|
+
f"{self.session.config.biostar_url}/api/access_groups/{group_id}",
|
|
810
|
+
headers=headers,
|
|
811
|
+
json=payload
|
|
812
|
+
)
|
|
813
|
+
|
|
814
|
+
if put_resp.status_code != 200:
|
|
815
|
+
return self.error_response(
|
|
816
|
+
f"API call failed: {put_resp.status_code} - {put_resp.text}",
|
|
817
|
+
{"request_body": payload}
|
|
818
|
+
)
|
|
819
|
+
|
|
820
|
+
rj = {}
|
|
821
|
+
try:
|
|
822
|
+
rj = put_resp.json() or {}
|
|
823
|
+
except Exception:
|
|
824
|
+
pass
|
|
825
|
+
|
|
826
|
+
# --- Summary ---
|
|
827
|
+
msg_parts = []
|
|
828
|
+
if name_in is not None or desc_in is not None:
|
|
829
|
+
msg_parts.append("group fields updated")
|
|
830
|
+
if "users" in update_obj:
|
|
831
|
+
msg_parts.append(f"users set = {len(update_obj['users'])}")
|
|
832
|
+
if "new_users" in update_obj or "delete_users" in update_obj:
|
|
833
|
+
add_n = len(update_obj.get("new_users", []))
|
|
834
|
+
del_n = len(update_obj.get("delete_users", []))
|
|
835
|
+
msg_parts.append(f"user delta (+{add_n}/-{del_n})")
|
|
836
|
+
if "access_levels" in update_obj:
|
|
837
|
+
msg_parts.append(f"access levels set = {len(update_obj['access_levels'])}")
|
|
838
|
+
if "user_groups" in update_obj:
|
|
839
|
+
msg_parts.append(f"user groups set = {len(update_obj['user_groups'])}")
|
|
840
|
+
if not msg_parts:
|
|
841
|
+
msg_parts.append("updated")
|
|
842
|
+
|
|
843
|
+
out = {
|
|
844
|
+
"message": f"Access group {group_id}: " + ", ".join(msg_parts + ug_msg_parts),
|
|
845
|
+
"ignored_fields": ignored_fields,
|
|
846
|
+
"request_body": payload
|
|
847
|
+
}
|
|
848
|
+
device_resp = rj.get("DeviceResponse")
|
|
849
|
+
if device_resp is not None:
|
|
850
|
+
out["device_response"] = device_resp
|
|
851
|
+
|
|
852
|
+
return self.success_response(out)
|
|
853
|
+
|
|
854
|
+
except Exception as e:
|
|
855
|
+
return await self.handle_api_error(e)
|
|
856
|
+
|
|
857
|
+
|
|
858
|
+
async def delete_access_group(self, args: Dict[str, Any]) -> Sequence[TextContent]:
|
|
859
|
+
"""Delete an access group."""
|
|
860
|
+
try:
|
|
861
|
+
self.check_auth()
|
|
862
|
+
|
|
863
|
+
group_id = args["group_id"]
|
|
864
|
+
|
|
865
|
+
headers = {
|
|
866
|
+
"bs-session-id": self.get_session_id(),
|
|
867
|
+
"Content-Type": "application/json"
|
|
868
|
+
}
|
|
869
|
+
|
|
870
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
871
|
+
response = await client.delete(
|
|
872
|
+
f"{self.session.config.biostar_url}/api/access_groups/{group_id}",
|
|
873
|
+
headers=headers
|
|
874
|
+
)
|
|
875
|
+
|
|
876
|
+
if response.status_code not in [200, 204]:
|
|
877
|
+
return self.error_response(f"API call failed: {response.status_code} - {response.text}")
|
|
878
|
+
|
|
879
|
+
return self.success_response({
|
|
880
|
+
"message": f"Access group {group_id} deleted successfully"
|
|
881
|
+
})
|
|
882
|
+
|
|
883
|
+
except Exception as e:
|
|
884
|
+
return await self.handle_api_error(e)
|
|
885
|
+
|
|
886
|
+
# ----------------------------------------------------------------------
|
|
887
|
+
# Access Level methods
|
|
888
|
+
# ----------------------------------------------------------------------
|
|
889
|
+
async def get_access_levels(self, args: Dict[str, Any]) -> Sequence[TextContent]:
|
|
890
|
+
"""Get list of access levels (supports View All params)."""
|
|
891
|
+
try:
|
|
892
|
+
self.check_auth()
|
|
893
|
+
|
|
894
|
+
limit = int(args.get("limit", 50)) if isinstance(args, dict) else 50
|
|
895
|
+
offset = int(args.get("offset", 0)) if isinstance(args, dict) else 0
|
|
896
|
+
order_by = args.get("order_by", "id:false") if isinstance(args, dict) else "id:false"
|
|
897
|
+
|
|
898
|
+
headers = {
|
|
899
|
+
"bs-session-id": self.get_session_id(),
|
|
900
|
+
"Content-Type": "application/json"
|
|
901
|
+
}
|
|
902
|
+
|
|
903
|
+
params = {"limit": limit, "offset": offset, "order_by": order_by}
|
|
904
|
+
|
|
905
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
906
|
+
response = await client.get(
|
|
907
|
+
f"{self.session.config.biostar_url}/api/access_levels",
|
|
908
|
+
headers=headers,
|
|
909
|
+
params=params
|
|
910
|
+
)
|
|
911
|
+
|
|
912
|
+
if response.status_code != 200:
|
|
913
|
+
return self.error_response(f"API call failed: {response.status_code} - {response.text}")
|
|
914
|
+
|
|
915
|
+
data = response.json() or {}
|
|
916
|
+
levels = data.get("AccessLevelCollection", {}).get("rows", []) or data.get("rows", []) or []
|
|
917
|
+
|
|
918
|
+
return self.success_response({
|
|
919
|
+
"message": f"Found {len(levels)} access levels",
|
|
920
|
+
"total": len(levels),
|
|
921
|
+
"levels": [self.format_access_level_info(level) for level in levels]
|
|
922
|
+
})
|
|
923
|
+
|
|
924
|
+
except Exception as e:
|
|
925
|
+
return await self.handle_api_error(e)
|
|
926
|
+
|
|
927
|
+
async def get_access_level(self, args: Dict[str, Any]) -> Sequence[TextContent]:
|
|
928
|
+
"""Get specific access level details."""
|
|
929
|
+
try:
|
|
930
|
+
self.check_auth()
|
|
931
|
+
|
|
932
|
+
level_id = args["level_id"]
|
|
933
|
+
|
|
934
|
+
headers = {
|
|
935
|
+
"bs-session-id": self.get_session_id(),
|
|
936
|
+
"Content-Type": "application/json"
|
|
937
|
+
}
|
|
938
|
+
|
|
939
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
940
|
+
response = await client.get(
|
|
941
|
+
f"{self.session.config.biostar_url}/api/access_levels/{level_id}",
|
|
942
|
+
headers=headers
|
|
943
|
+
)
|
|
944
|
+
|
|
945
|
+
if response.status_code != 200:
|
|
946
|
+
return self.error_response(f"API call failed: {response.status_code} - {response.text}")
|
|
947
|
+
|
|
948
|
+
data = response.json()
|
|
949
|
+
level = data.get("AccessLevel", {})
|
|
950
|
+
|
|
951
|
+
return self.success_response({
|
|
952
|
+
"level": self.format_access_level_info(level)
|
|
953
|
+
})
|
|
954
|
+
|
|
955
|
+
except Exception as e:
|
|
956
|
+
return await self.handle_api_error(e)
|
|
957
|
+
|
|
958
|
+
async def create_access_level(self, args: Dict[str, Any]) -> Sequence[TextContent]:
|
|
959
|
+
"""
|
|
960
|
+
Create an Access Level with strict door validation and enforced 'Always' schedule.
|
|
961
|
+
🆕 Supports simple mode (door_ids/door_names) and advanced mode (access_level_items).
|
|
962
|
+
"""
|
|
963
|
+
try:
|
|
964
|
+
self.check_auth()
|
|
965
|
+
|
|
966
|
+
headers = {
|
|
967
|
+
"bs-session-id": self.get_session_id(),
|
|
968
|
+
"Content-Type": "application/json"
|
|
969
|
+
}
|
|
970
|
+
|
|
971
|
+
confirm: bool = bool(args.get("confirm", True)) # ← 기본값 True로 변경 (자동 생성)
|
|
972
|
+
name: str = str(args.get("name", "")).strip()
|
|
973
|
+
if not name:
|
|
974
|
+
return self.error_response(
|
|
975
|
+
" 'name' parameter is required and cannot be empty.",
|
|
976
|
+
{"hint": "Provide a unique name for the Access Level"}
|
|
977
|
+
)
|
|
978
|
+
description: str = (args.get("description") or "").strip()
|
|
979
|
+
|
|
980
|
+
logger.info(f" Creating Access Level: name='{name}', description='{description}'")
|
|
981
|
+
|
|
982
|
+
# 🆕 Simple mode: door_ids or door_names
|
|
983
|
+
# CRITICAL: Normalize to list and parse JSON strings
|
|
984
|
+
door_ids_input = args.get("door_ids") or []
|
|
985
|
+
door_ids_simple: List[int] = []
|
|
986
|
+
|
|
987
|
+
if isinstance(door_ids_input, list):
|
|
988
|
+
for item in door_ids_input:
|
|
989
|
+
if isinstance(item, int):
|
|
990
|
+
door_ids_simple.append(item)
|
|
991
|
+
elif isinstance(item, str):
|
|
992
|
+
# Try to parse as JSON array first
|
|
993
|
+
try:
|
|
994
|
+
parsed = json.loads(item)
|
|
995
|
+
if isinstance(parsed, list):
|
|
996
|
+
door_ids_simple.extend([int(x) for x in parsed if x])
|
|
997
|
+
else:
|
|
998
|
+
door_ids_simple.append(int(parsed))
|
|
999
|
+
except (json.JSONDecodeError, ValueError):
|
|
1000
|
+
# Try direct int conversion
|
|
1001
|
+
try:
|
|
1002
|
+
door_ids_simple.append(int(item))
|
|
1003
|
+
except ValueError:
|
|
1004
|
+
logger.warning(f" Could not convert door_id to int: {item}")
|
|
1005
|
+
elif isinstance(door_ids_input, int):
|
|
1006
|
+
door_ids_simple.append(door_ids_input)
|
|
1007
|
+
elif isinstance(door_ids_input, str):
|
|
1008
|
+
try:
|
|
1009
|
+
parsed = json.loads(door_ids_input)
|
|
1010
|
+
if isinstance(parsed, list):
|
|
1011
|
+
door_ids_simple.extend([int(x) for x in parsed if x])
|
|
1012
|
+
else:
|
|
1013
|
+
door_ids_simple.append(int(parsed))
|
|
1014
|
+
except (json.JSONDecodeError, ValueError):
|
|
1015
|
+
try:
|
|
1016
|
+
door_ids_simple.append(int(door_ids_input))
|
|
1017
|
+
except ValueError:
|
|
1018
|
+
pass
|
|
1019
|
+
|
|
1020
|
+
door_names_input = args.get("door_names") or []
|
|
1021
|
+
door_names_simple: List[str] = []
|
|
1022
|
+
|
|
1023
|
+
if isinstance(door_names_input, list):
|
|
1024
|
+
for item in door_names_input:
|
|
1025
|
+
if isinstance(item, str):
|
|
1026
|
+
# Try to parse as JSON array first
|
|
1027
|
+
try:
|
|
1028
|
+
parsed = json.loads(item)
|
|
1029
|
+
if isinstance(parsed, list):
|
|
1030
|
+
door_names_simple.extend([str(x) for x in parsed if x])
|
|
1031
|
+
else:
|
|
1032
|
+
door_names_simple.append(str(parsed))
|
|
1033
|
+
except json.JSONDecodeError:
|
|
1034
|
+
# Not JSON, use as-is
|
|
1035
|
+
door_names_simple.append(item)
|
|
1036
|
+
elif isinstance(door_names_input, str):
|
|
1037
|
+
try:
|
|
1038
|
+
parsed = json.loads(door_names_input)
|
|
1039
|
+
if isinstance(parsed, list):
|
|
1040
|
+
door_names_simple.extend([str(x) for x in parsed if x])
|
|
1041
|
+
else:
|
|
1042
|
+
door_names_simple.append(str(parsed))
|
|
1043
|
+
except json.JSONDecodeError:
|
|
1044
|
+
door_names_simple.append(door_names_input)
|
|
1045
|
+
|
|
1046
|
+
logger.info(f" Normalized: door_ids={door_ids_simple}, door_names={door_names_simple}")
|
|
1047
|
+
|
|
1048
|
+
raw_items: List[Dict[str, Any]] = args.get("access_level_items") or []
|
|
1049
|
+
|
|
1050
|
+
# 🆕 Convert simple mode to access_level_items format
|
|
1051
|
+
if door_ids_simple or door_names_simple:
|
|
1052
|
+
logger.info(f"🆕 Simple mode detected: door_ids={door_ids_simple}, door_names={door_names_simple}")
|
|
1053
|
+
|
|
1054
|
+
# Fetch all doors to resolve names
|
|
1055
|
+
all_doors = await self._get_all_doors(headers)
|
|
1056
|
+
door_name_to_id = {str(d.get("name", "")).strip().lower(): int(d["id"]) for d in all_doors if d.get("id")}
|
|
1057
|
+
|
|
1058
|
+
logger.info(f" Available doors for lookup: {list(door_name_to_id.keys())}")
|
|
1059
|
+
|
|
1060
|
+
# Resolve door_names to IDs
|
|
1061
|
+
resolved_ids = list(door_ids_simple) # Start with explicit IDs
|
|
1062
|
+
not_found_names = []
|
|
1063
|
+
|
|
1064
|
+
for name in door_names_simple:
|
|
1065
|
+
name_lower = name.strip().lower()
|
|
1066
|
+
logger.info(f" Looking for door: '{name}' (normalized: '{name_lower}')")
|
|
1067
|
+
|
|
1068
|
+
if name_lower in door_name_to_id:
|
|
1069
|
+
door_id = door_name_to_id[name_lower]
|
|
1070
|
+
resolved_ids.append(door_id)
|
|
1071
|
+
logger.info(f" Found: ID {door_id}")
|
|
1072
|
+
else:
|
|
1073
|
+
not_found_names.append(name)
|
|
1074
|
+
logger.error(f" Not found: '{name}' (looking for '{name_lower}')")
|
|
1075
|
+
|
|
1076
|
+
if not_found_names:
|
|
1077
|
+
door_list_formatted = "\n".join([f" - {d['name']} (ID: {d['id']})" for d in all_doors])
|
|
1078
|
+
logger.error(f" Door name lookup FAILED for: {not_found_names}")
|
|
1079
|
+
logger.error(f" Available doors:\n{door_list_formatted}")
|
|
1080
|
+
|
|
1081
|
+
# Format not found names outside f-string (f-string cannot include backslash)
|
|
1082
|
+
not_found_str = ", ".join([f"'{n}'" for n in not_found_names])
|
|
1083
|
+
|
|
1084
|
+
return self.error_response(
|
|
1085
|
+
f" Door name(s) not found: {not_found_str}\n\n"
|
|
1086
|
+
f" Available doors ({len(all_doors)} total):\n{door_list_formatted}\n\n"
|
|
1087
|
+
f" Tip: Door names are case-insensitive. Use exact names from the list above.",
|
|
1088
|
+
{
|
|
1089
|
+
"status": "door_not_found",
|
|
1090
|
+
"not_found": not_found_names,
|
|
1091
|
+
"available_doors": all_doors,
|
|
1092
|
+
"hint": "Use exact door names or door IDs instead"
|
|
1093
|
+
}
|
|
1094
|
+
)
|
|
1095
|
+
|
|
1096
|
+
if not resolved_ids:
|
|
1097
|
+
return self.error_response(
|
|
1098
|
+
" No valid doors specified in door_ids or door_names.",
|
|
1099
|
+
{"available_doors": all_doors}
|
|
1100
|
+
)
|
|
1101
|
+
|
|
1102
|
+
# Convert to access_level_items format
|
|
1103
|
+
raw_items = [{"doors": [{"id": did} for did in resolved_ids]}]
|
|
1104
|
+
logger.info(f" Converted to access_level_items: {len(resolved_ids)} doors")
|
|
1105
|
+
|
|
1106
|
+
# 1) FIRST: Reject empty items BEFORE duplicate check - PROVIDE CLEAR RETRY EXAMPLE
|
|
1107
|
+
if not isinstance(raw_items, list) or len(raw_items) == 0:
|
|
1108
|
+
all_doors = await self._get_all_doors(headers)
|
|
1109
|
+
|
|
1110
|
+
logger.warning(f" create-access-level called without doors for '{name}'. Available: {len(all_doors)} doors")
|
|
1111
|
+
|
|
1112
|
+
# Build concrete retry example using SIMPLE MODE
|
|
1113
|
+
door_ids_example = ", ".join([str(d["id"]) for d in all_doors[:3]])
|
|
1114
|
+
door_names_example = ", ".join([f'"{d["name"]}"' for d in all_doors[:3]])
|
|
1115
|
+
|
|
1116
|
+
retry_example_simple = (
|
|
1117
|
+
f'🆕 SIMPLE MODE (Recommended):\n'
|
|
1118
|
+
f'create-access-level(\n'
|
|
1119
|
+
f' name="{name}",\n'
|
|
1120
|
+
f' door_ids=[{door_ids_example}], # Use door IDs\n'
|
|
1121
|
+
f' confirm=true\n'
|
|
1122
|
+
f')\n\n'
|
|
1123
|
+
f'OR:\n'
|
|
1124
|
+
f'create-access-level(\n'
|
|
1125
|
+
f' name="{name}",\n'
|
|
1126
|
+
f' door_names=[{door_names_example}], # Use door names\n'
|
|
1127
|
+
f' confirm=true\n'
|
|
1128
|
+
f')'
|
|
1129
|
+
)
|
|
1130
|
+
|
|
1131
|
+
return self.error_response(
|
|
1132
|
+
f" Door selection required! You MUST provide doors in one of these ways:\n"
|
|
1133
|
+
f" 1. door_ids=[...] - List of door IDs\n"
|
|
1134
|
+
f" 2. door_names=[...] - List of door names\n"
|
|
1135
|
+
f" 3. access_level_items=[{{...}}] - Advanced structure\n\n"
|
|
1136
|
+
f" Available doors ({len(all_doors)} total):\n" +
|
|
1137
|
+
"\n".join([f" - ID {d['id']}: {d['name']}" for d in all_doors]) +
|
|
1138
|
+
f"\n\n RETRY NOW with this format:\n{retry_example_simple}",
|
|
1139
|
+
{
|
|
1140
|
+
"status": "door_selection_required",
|
|
1141
|
+
"available_doors": all_doors,
|
|
1142
|
+
"retry_example": retry_example_simple,
|
|
1143
|
+
"hint": f"Use door_ids or door_names parameter for simple access levels!"
|
|
1144
|
+
}
|
|
1145
|
+
)
|
|
1146
|
+
|
|
1147
|
+
# 2) Duplicate name check with SIMPLE AUTO-DELETE
|
|
1148
|
+
logger.info(f" Checking if Access Level '{name}' already exists...")
|
|
1149
|
+
existing = await self._find_access_level_by_name(headers, name)
|
|
1150
|
+
auto_update_on_exist = bool(args.get("auto_update_on_exist", True)) # 🆕 기본값 True로 자동 업데이트
|
|
1151
|
+
|
|
1152
|
+
if existing:
|
|
1153
|
+
existing_id = existing.get("id")
|
|
1154
|
+
logger.warning(f" Access Level '{name}' already exists (ID: {existing_id}).")
|
|
1155
|
+
logger.info(f" Existing doors: {existing.get('access_levels', [])}")
|
|
1156
|
+
|
|
1157
|
+
if auto_update_on_exist:
|
|
1158
|
+
logger.info(f" Auto-update enabled. Deleting existing Access Level '{name}' (ID: {existing_id}) and recreating...")
|
|
1159
|
+
|
|
1160
|
+
# 🆕 간단하게: 무조건 삭제 후 재생성
|
|
1161
|
+
delete_success = False
|
|
1162
|
+
try:
|
|
1163
|
+
async with httpx.AsyncClient(verify=False, timeout=10) as client:
|
|
1164
|
+
del_resp = await client.delete(
|
|
1165
|
+
f"{self.session.config.biostar_url}/api/access_levels/{existing_id}",
|
|
1166
|
+
headers=headers
|
|
1167
|
+
)
|
|
1168
|
+
if del_resp.status_code in (200, 204):
|
|
1169
|
+
logger.info(f" Successfully deleted existing Access Level '{name}' (ID: {existing_id})")
|
|
1170
|
+
delete_success = True
|
|
1171
|
+
else:
|
|
1172
|
+
logger.error(f" Delete FAILED with {del_resp.status_code}: {del_resp.text[:500]}")
|
|
1173
|
+
except Exception as e:
|
|
1174
|
+
logger.error(f" Exception during delete of Access Level '{name}': {e}")
|
|
1175
|
+
|
|
1176
|
+
if delete_success:
|
|
1177
|
+
logger.info(f" Proceeding to create new Access Level '{name}'...")
|
|
1178
|
+
else:
|
|
1179
|
+
logger.warning(f" Delete failed, but proceeding to create anyway (may fail with duplicate name error)...")
|
|
1180
|
+
|
|
1181
|
+
# 반드시 계속 진행!
|
|
1182
|
+
else:
|
|
1183
|
+
logger.error(f" Auto-update disabled. Skipping creation. (This should not happen with default settings!)")
|
|
1184
|
+
return self.success_response({
|
|
1185
|
+
"message": f" Access Level '{name}' already exists. Set auto_update_on_exist=true to update automatically.",
|
|
1186
|
+
"exists": True,
|
|
1187
|
+
"existing": self.format_access_level_info(existing),
|
|
1188
|
+
"note": "Use update-access-level tool or set auto_update_on_exist=true"
|
|
1189
|
+
})
|
|
1190
|
+
else:
|
|
1191
|
+
logger.info(f" Access Level '{name}' does not exist. Proceeding with creation...")
|
|
1192
|
+
|
|
1193
|
+
# 3) Normalize items (doors only)
|
|
1194
|
+
logger.info(f" Step 3: Normalizing items...")
|
|
1195
|
+
logger.info(f" Input raw_items: {raw_items}")
|
|
1196
|
+
normalized_items, normalization_report = self._normalize_al_items_doors_only(raw_items)
|
|
1197
|
+
logger.info(f" Normalized items count: {len(normalized_items)}")
|
|
1198
|
+
logger.info(f" Normalization report: {normalization_report}")
|
|
1199
|
+
|
|
1200
|
+
# 4) Validate presence of doors in each item - PROVIDE CLEAR RETRY EXAMPLE
|
|
1201
|
+
logger.info(f" Step 4: Validating door presence...")
|
|
1202
|
+
items_missing_doors: List[int] = [i for i, it in enumerate(normalized_items) if not it.get("doors")]
|
|
1203
|
+
logger.info(f" Items missing doors: {items_missing_doors}")
|
|
1204
|
+
if items_missing_doors:
|
|
1205
|
+
all_doors = await self._get_all_doors(headers)
|
|
1206
|
+
|
|
1207
|
+
# Build concrete retry example
|
|
1208
|
+
door_list_str = ", ".join([f'{{"id": {d["id"]}, "name": "{d["name"]}"}}' for d in all_doors[:3]])
|
|
1209
|
+
retry_example = (
|
|
1210
|
+
f'create-access-level(\n'
|
|
1211
|
+
f' name="{name}",\n'
|
|
1212
|
+
f' access_level_items=[{{\n'
|
|
1213
|
+
f' "doors": [{door_list_str}],\n'
|
|
1214
|
+
f' "schedule_id": {{"id": "1", "name": "Always"}}\n'
|
|
1215
|
+
f' }}],\n'
|
|
1216
|
+
f' confirm=true\n'
|
|
1217
|
+
f')'
|
|
1218
|
+
)
|
|
1219
|
+
|
|
1220
|
+
return self.error_response(
|
|
1221
|
+
f" Each access_level_item must include doors!\n\n"
|
|
1222
|
+
f" Available doors ({len(all_doors)} total):\n" +
|
|
1223
|
+
"\n".join([f" - ID {d['id']}: {d['name']}" for d in all_doors]) +
|
|
1224
|
+
f"\n\n RETRY NOW with this format:\n{retry_example}",
|
|
1225
|
+
{
|
|
1226
|
+
"status": "door_selection_required",
|
|
1227
|
+
"items_missing_doors": items_missing_doors,
|
|
1228
|
+
"available_doors": self._format_door_options(all_doors),
|
|
1229
|
+
"prompt": f"Select doors for each item to create Access Level '{name}'.",
|
|
1230
|
+
"hint": "Each item requires at least one door id."
|
|
1231
|
+
}
|
|
1232
|
+
)
|
|
1233
|
+
|
|
1234
|
+
# 5) Validate doors exist
|
|
1235
|
+
logger.info(f" Step 5: Validating doors...")
|
|
1236
|
+
all_doors = await self._get_all_doors(headers)
|
|
1237
|
+
logger.info(f" Retrieved {len(all_doors)} doors from API")
|
|
1238
|
+
known_door_ids: Set[int] = {int(d["id"]) for d in all_doors if d.get("id") is not None}
|
|
1239
|
+
logger.info(f" Known door IDs: {sorted(list(known_door_ids))}")
|
|
1240
|
+
|
|
1241
|
+
provided_door_ids: Set[int] = set(self._collect_door_ids_from_items(normalized_items))
|
|
1242
|
+
logger.info(f" Provided door IDs: {sorted(list(provided_door_ids))}")
|
|
1243
|
+
invalid_door_ids: List[int] = sorted([d for d in provided_door_ids if d not in known_door_ids])
|
|
1244
|
+
if invalid_door_ids:
|
|
1245
|
+
# Build concrete retry example with VALID doors
|
|
1246
|
+
door_list_str = ", ".join([f'{{"id": {d["id"]}, "name": "{d["name"]}"}}' for d in all_doors[:3]])
|
|
1247
|
+
retry_example = (
|
|
1248
|
+
f'create-access-level(\n'
|
|
1249
|
+
f' name="{name}",\n'
|
|
1250
|
+
f' access_level_items=[{{\n'
|
|
1251
|
+
f' "doors": [{door_list_str}],\n'
|
|
1252
|
+
f' "schedule_id": {{"id": "1", "name": "Always"}}\n'
|
|
1253
|
+
f' }}],\n'
|
|
1254
|
+
f' confirm=true\n'
|
|
1255
|
+
f')'
|
|
1256
|
+
)
|
|
1257
|
+
|
|
1258
|
+
return self.error_response(
|
|
1259
|
+
f" Invalid door IDs: {invalid_door_ids}\n\n"
|
|
1260
|
+
f" Valid doors ({len(all_doors)} total):\n" +
|
|
1261
|
+
"\n".join([f" - ID {d['id']}: {d['name']}" for d in all_doors]) +
|
|
1262
|
+
f"\n\n RETRY NOW with valid door IDs:\n{retry_example}",
|
|
1263
|
+
{
|
|
1264
|
+
"status": "door_selection_required",
|
|
1265
|
+
"invalid_door_ids": invalid_door_ids,
|
|
1266
|
+
"available_doors": all_doors, # ← Simple list: [{"id": 125, "name": "Main Entrance"}, ...]
|
|
1267
|
+
"retry_example": retry_example,
|
|
1268
|
+
"hint": f"Use only valid door IDs from available_doors list!"
|
|
1269
|
+
}
|
|
1270
|
+
)
|
|
1271
|
+
|
|
1272
|
+
# 6) Ensure 'Always' schedule id
|
|
1273
|
+
logger.info(f" Step 6: Getting 'Always' schedule ID...")
|
|
1274
|
+
always_id_str = await self._get_or_create_always_schedule_id(headers)
|
|
1275
|
+
logger.info(f" Schedule ID: {always_id_str}")
|
|
1276
|
+
if not always_id_str:
|
|
1277
|
+
logger.error(f" Failed to get Always schedule ID!")
|
|
1278
|
+
return self.error_response(
|
|
1279
|
+
"Failed to resolve 'Always' schedule id.",
|
|
1280
|
+
{"status": "schedule_resolution_failed"}
|
|
1281
|
+
)
|
|
1282
|
+
|
|
1283
|
+
# 7) Build door name mapping
|
|
1284
|
+
logger.info(f" Step 7: Building door name mapping...")
|
|
1285
|
+
door_name_map: Dict[int, str] = {int(d["id"]): d.get("name", "") for d in all_doors if d.get("id") is not None}
|
|
1286
|
+
logger.info(f" Mapped {len(door_name_map)} doors")
|
|
1287
|
+
|
|
1288
|
+
# 8) Build items with enforced 'Always' schedule and door names
|
|
1289
|
+
logger.info(f" Step 8: Building pruned items...")
|
|
1290
|
+
pruned_items: List[Dict[str, Any]] = []
|
|
1291
|
+
for it in normalized_items:
|
|
1292
|
+
valid_doors_with_names = []
|
|
1293
|
+
for d in it.get("doors", []):
|
|
1294
|
+
door_id = int(d.get("id"))
|
|
1295
|
+
if door_id in known_door_ids:
|
|
1296
|
+
valid_doors_with_names.append({
|
|
1297
|
+
"id": door_id,
|
|
1298
|
+
"name": door_name_map.get(door_id, "")
|
|
1299
|
+
})
|
|
1300
|
+
if not valid_doors_with_names:
|
|
1301
|
+
continue
|
|
1302
|
+
pruned_items.append({
|
|
1303
|
+
"doors": valid_doors_with_names,
|
|
1304
|
+
"schedule_id": {"id": str(always_id_str), "name": "Always"}
|
|
1305
|
+
})
|
|
1306
|
+
|
|
1307
|
+
logger.info(f" Final pruned_items count: {len(pruned_items)}")
|
|
1308
|
+
|
|
1309
|
+
if not pruned_items:
|
|
1310
|
+
logger.error(f" No valid doors in pruned_items!")
|
|
1311
|
+
return self.error_response(
|
|
1312
|
+
"No valid doors to create an Access Level.",
|
|
1313
|
+
{
|
|
1314
|
+
"status": "door_selection_required",
|
|
1315
|
+
"available_doors": self._format_door_options(all_doors)
|
|
1316
|
+
}
|
|
1317
|
+
)
|
|
1318
|
+
|
|
1319
|
+
# 9) Build request payload
|
|
1320
|
+
logger.info(f" Step 9: Building payload...")
|
|
1321
|
+
payload = self._build_al_payload(
|
|
1322
|
+
name=name,
|
|
1323
|
+
description=description,
|
|
1324
|
+
items=pruned_items
|
|
1325
|
+
)
|
|
1326
|
+
logger.info(f" Payload built: {json.dumps(payload, indent=2)[:500]}...")
|
|
1327
|
+
|
|
1328
|
+
# 10) Preview
|
|
1329
|
+
logger.info(f" Step 10: Checking confirm flag... (confirm={confirm})")
|
|
1330
|
+
if not confirm:
|
|
1331
|
+
return self.success_response({
|
|
1332
|
+
"message": "Preview the Access Level creation plan with 'Always' schedule applied.",
|
|
1333
|
+
"needs_confirmation": True,
|
|
1334
|
+
"preview": {
|
|
1335
|
+
"request_body": payload,
|
|
1336
|
+
"normalized_items": normalization_report
|
|
1337
|
+
}
|
|
1338
|
+
})
|
|
1339
|
+
|
|
1340
|
+
# 11) Confirmed creation with RETRY logic
|
|
1341
|
+
logger.info(f" Step 11: Starting POST request with retry logic...")
|
|
1342
|
+
door_count = sum(len(item.get("doors", [])) for item in pruned_items)
|
|
1343
|
+
logger.info(f" Total door count: {door_count}")
|
|
1344
|
+
max_retries = 3
|
|
1345
|
+
retry_delay = 1 # seconds
|
|
1346
|
+
|
|
1347
|
+
logger.info(f" READY TO POST! Starting retry loop...")
|
|
1348
|
+
for attempt in range(1, max_retries + 1):
|
|
1349
|
+
logger.info(f" Creating Access Level '{name}' with {door_count} doors... (Attempt {attempt}/{max_retries})")
|
|
1350
|
+
logger.debug(f" Request payload: {json.dumps(payload, indent=2)}")
|
|
1351
|
+
|
|
1352
|
+
try:
|
|
1353
|
+
async with httpx.AsyncClient(verify=False, timeout=30) as client:
|
|
1354
|
+
resp = await client.post(
|
|
1355
|
+
f"{self.session.config.biostar_url}/api/access_levels",
|
|
1356
|
+
headers=headers,
|
|
1357
|
+
json=payload
|
|
1358
|
+
)
|
|
1359
|
+
|
|
1360
|
+
logger.info(f" Response: {resp.status_code} - {resp.text[:500]}")
|
|
1361
|
+
|
|
1362
|
+
if resp.status_code in (200, 201):
|
|
1363
|
+
# 성공!
|
|
1364
|
+
break
|
|
1365
|
+
|
|
1366
|
+
# 실패 - 재시도 가능한지 확인
|
|
1367
|
+
logger.warning(f" Attempt {attempt} failed: {resp.status_code}")
|
|
1368
|
+
|
|
1369
|
+
if attempt < max_retries:
|
|
1370
|
+
logger.info(f" Retrying in {retry_delay} seconds...")
|
|
1371
|
+
await asyncio.sleep(retry_delay)
|
|
1372
|
+
retry_delay *= 2 # Exponential backoff
|
|
1373
|
+
else:
|
|
1374
|
+
# 최종 실패
|
|
1375
|
+
logger.error(f" Access Level creation FAILED after {max_retries} attempts")
|
|
1376
|
+
logger.error(f" Final error: {resp.status_code} - {resp.text}")
|
|
1377
|
+
logger.error(f" Request body: {json.dumps(payload, indent=2)}")
|
|
1378
|
+
return self.error_response(
|
|
1379
|
+
f" API call failed after {max_retries} attempts: {resp.status_code}\n"
|
|
1380
|
+
f"Error: {resp.text[:500]}\n\n"
|
|
1381
|
+
f"Troubleshooting:\n"
|
|
1382
|
+
f" 1. Check door IDs are valid: {[d['id'] for item in pruned_items for d in item.get('doors', [])]}\n"
|
|
1383
|
+
f" 2. Verify 'Always' schedule exists\n"
|
|
1384
|
+
f" 3. Confirm user has permission to create access levels\n"
|
|
1385
|
+
f" 4. Check BioStar server logs for more details",
|
|
1386
|
+
{
|
|
1387
|
+
"status_code": resp.status_code,
|
|
1388
|
+
"error": resp.text,
|
|
1389
|
+
"request_body": payload,
|
|
1390
|
+
"attempts": max_retries
|
|
1391
|
+
}
|
|
1392
|
+
)
|
|
1393
|
+
|
|
1394
|
+
except asyncio.TimeoutError:
|
|
1395
|
+
logger.error(f" Timeout on attempt {attempt}")
|
|
1396
|
+
if attempt < max_retries:
|
|
1397
|
+
logger.info(f" Retrying in {retry_delay} seconds...")
|
|
1398
|
+
await asyncio.sleep(retry_delay)
|
|
1399
|
+
retry_delay *= 2
|
|
1400
|
+
else:
|
|
1401
|
+
return self.error_response(
|
|
1402
|
+
f" Timeout after {max_retries} attempts. BioStar server may be slow or unresponsive.",
|
|
1403
|
+
{"attempts": max_retries, "error": "timeout"}
|
|
1404
|
+
)
|
|
1405
|
+
|
|
1406
|
+
except Exception as e:
|
|
1407
|
+
logger.error(f" Unexpected error on attempt {attempt}: {e}")
|
|
1408
|
+
if attempt < max_retries:
|
|
1409
|
+
logger.info(f" Retrying in {retry_delay} seconds...")
|
|
1410
|
+
await asyncio.sleep(retry_delay)
|
|
1411
|
+
retry_delay *= 2
|
|
1412
|
+
else:
|
|
1413
|
+
return self.error_response(
|
|
1414
|
+
f" Unexpected error after {max_retries} attempts: {str(e)}",
|
|
1415
|
+
{"attempts": max_retries, "error": str(e)}
|
|
1416
|
+
)
|
|
1417
|
+
|
|
1418
|
+
rj: Dict[str, Any] = {}
|
|
1419
|
+
try:
|
|
1420
|
+
rj = resp.json()
|
|
1421
|
+
except Exception as e:
|
|
1422
|
+
logger.warning(f" Failed to parse JSON response: {e}")
|
|
1423
|
+
rj = {}
|
|
1424
|
+
|
|
1425
|
+
al_id = (
|
|
1426
|
+
rj.get("AccessLevel", {}).get("id")
|
|
1427
|
+
or rj.get("id")
|
|
1428
|
+
or rj.get("AccessLevel", {}).get("access_level_id")
|
|
1429
|
+
)
|
|
1430
|
+
|
|
1431
|
+
logger.info(f" Access Level '{name}' created successfully!")
|
|
1432
|
+
logger.info(f" ID: {al_id}")
|
|
1433
|
+
logger.info(f" Doors: {door_count}")
|
|
1434
|
+
logger.info(f" Schedule: Always")
|
|
1435
|
+
|
|
1436
|
+
return self.success_response({
|
|
1437
|
+
"message": f" Access Level '{name}' created successfully!",
|
|
1438
|
+
"access_level_id": al_id,
|
|
1439
|
+
"door_count": door_count,
|
|
1440
|
+
"schedule": "Always",
|
|
1441
|
+
"details": {
|
|
1442
|
+
"name": name,
|
|
1443
|
+
"description": description,
|
|
1444
|
+
"doors": [d["name"] for item in pruned_items for d in item.get("doors", [])]
|
|
1445
|
+
}
|
|
1446
|
+
})
|
|
1447
|
+
|
|
1448
|
+
except Exception as e:
|
|
1449
|
+
logger.error(f" EXCEPTION in create_access_level: {type(e).__name__}: {e}")
|
|
1450
|
+
logger.exception("Full traceback:")
|
|
1451
|
+
return await self.handle_api_error(e)
|
|
1452
|
+
|
|
1453
|
+
async def _get_or_create_always_schedule_id(self, headers: Dict[str, str]) -> Optional[str]:
|
|
1454
|
+
"""
|
|
1455
|
+
Resolve 'Always' schedule id from /api/schedules.
|
|
1456
|
+
If not found, create a weekly 'Always' schedule (0..6 each with [0,1440]) and return its id.
|
|
1457
|
+
"""
|
|
1458
|
+
# 1) Try to find existing
|
|
1459
|
+
try:
|
|
1460
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
1461
|
+
resp = await client.get(f"{self.session.config.biostar_url}/api/schedules", headers=headers)
|
|
1462
|
+
if resp.status_code == 200:
|
|
1463
|
+
data = resp.json() or {}
|
|
1464
|
+
rows = data.get("ScheduleCollection", {}).get("rows") or data.get("rows") or []
|
|
1465
|
+
for s in rows:
|
|
1466
|
+
nm = (s.get("name") or s.get("Schedule", {}).get("name") or "").strip().lower()
|
|
1467
|
+
sid = s.get("id") or s.get("Schedule", {}).get("id")
|
|
1468
|
+
if nm == "always" and sid is not None:
|
|
1469
|
+
return str(sid)
|
|
1470
|
+
except Exception:
|
|
1471
|
+
pass
|
|
1472
|
+
|
|
1473
|
+
# 2) Create if missing
|
|
1474
|
+
try:
|
|
1475
|
+
daily_schedules = [{"day_index": i, "time_segments": [{"start_time": 0, "end_time": 1440}]} for i in range(0, 7)]
|
|
1476
|
+
payload = {
|
|
1477
|
+
"Schedule": {
|
|
1478
|
+
"name": "Always",
|
|
1479
|
+
"description": "Auto-created by MCP for Access Level creation",
|
|
1480
|
+
"daily_schedules": daily_schedules,
|
|
1481
|
+
"holiday_schedules": [],
|
|
1482
|
+
"days_of_iteration": 7,
|
|
1483
|
+
"use_daily_iteration": False
|
|
1484
|
+
}
|
|
1485
|
+
}
|
|
1486
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
1487
|
+
c_resp = await client.post(f"{self.session.config.biostar_url}/api/schedules", headers=headers, json=payload)
|
|
1488
|
+
if c_resp.status_code in (200, 201):
|
|
1489
|
+
try:
|
|
1490
|
+
rj = c_resp.json() or {}
|
|
1491
|
+
sid = rj.get("Schedule", {}).get("id") or rj.get("id")
|
|
1492
|
+
if sid is not None:
|
|
1493
|
+
return str(sid)
|
|
1494
|
+
except Exception:
|
|
1495
|
+
return None
|
|
1496
|
+
else:
|
|
1497
|
+
logger.error("Failed to create 'Always' schedule: %s %s", c_resp.status_code, c_resp.text)
|
|
1498
|
+
return None
|
|
1499
|
+
except Exception:
|
|
1500
|
+
return None
|
|
1501
|
+
|
|
1502
|
+
async def _get_all_doors(self, headers: Dict[str, str]) -> List[Dict[str, Any]]:
|
|
1503
|
+
"""Fetch all doors (GET /api/doors)."""
|
|
1504
|
+
try:
|
|
1505
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
1506
|
+
resp = await client.get(f"{self.session.config.biostar_url}/api/doors", headers=headers)
|
|
1507
|
+
if resp.status_code != 200:
|
|
1508
|
+
return []
|
|
1509
|
+
data = resp.json() or {}
|
|
1510
|
+
rows = data.get("DoorCollection", {}).get("rows") or data.get("rows") or []
|
|
1511
|
+
out = []
|
|
1512
|
+
for d in rows:
|
|
1513
|
+
out.append({
|
|
1514
|
+
"id": d.get("id") or d.get("door_id") or d.get("Door", {}).get("id"),
|
|
1515
|
+
"name": d.get("name") or d.get("Door", {}).get("name") or ""
|
|
1516
|
+
})
|
|
1517
|
+
return out
|
|
1518
|
+
except Exception:
|
|
1519
|
+
return []
|
|
1520
|
+
|
|
1521
|
+
async def _view_all_access_levels(self, headers: Dict[str, str], limit: int = 50, offset: int = 0, order_by: str = "id:false") -> List[Dict[str, Any]]:
|
|
1522
|
+
"""
|
|
1523
|
+
View all Access Levels using official query params:
|
|
1524
|
+
GET /api/access_levels?limit={limit}&offset={offset}&order_by={order_by}
|
|
1525
|
+
"""
|
|
1526
|
+
try:
|
|
1527
|
+
params = {"limit": limit, "offset": offset, "order_by": order_by}
|
|
1528
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
1529
|
+
resp = await client.get(
|
|
1530
|
+
f"{self.session.config.biostar_url}/api/access_levels",
|
|
1531
|
+
headers=headers,
|
|
1532
|
+
params=params
|
|
1533
|
+
)
|
|
1534
|
+
if resp.status_code != 200:
|
|
1535
|
+
return []
|
|
1536
|
+
data = resp.json() or {}
|
|
1537
|
+
rows = data.get("AccessLevelCollection", {}).get("rows", []) or data.get("rows", []) or []
|
|
1538
|
+
return rows
|
|
1539
|
+
except Exception:
|
|
1540
|
+
return []
|
|
1541
|
+
|
|
1542
|
+
def _collect_door_ids_from_items(self, items: List[Dict[str, Any]]) -> List[int]:
|
|
1543
|
+
"""Collect distinct door ids from items that have 'doors': [{'id': int}, ...]."""
|
|
1544
|
+
acc: List[int] = []
|
|
1545
|
+
seen: Set[int] = set()
|
|
1546
|
+
for it in items:
|
|
1547
|
+
for d in it.get("doors", []):
|
|
1548
|
+
try:
|
|
1549
|
+
val = int(d.get("id"))
|
|
1550
|
+
if val not in seen:
|
|
1551
|
+
seen.add(val)
|
|
1552
|
+
acc.append(val)
|
|
1553
|
+
except Exception:
|
|
1554
|
+
continue
|
|
1555
|
+
return acc
|
|
1556
|
+
|
|
1557
|
+
def _format_door_options(self, doors: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
|
|
1558
|
+
"""Format door options for client prompts."""
|
|
1559
|
+
return [{"id": d.get("id"), "name": d.get("name")} for d in doors if d.get("id") is not None]
|
|
1560
|
+
|
|
1561
|
+
async def _find_access_level_by_name(self, headers: Dict[str, str], name: str) -> Optional[Dict[str, Any]]:
|
|
1562
|
+
"""Find Access Level by case-insensitive name (list-all fallback)."""
|
|
1563
|
+
try:
|
|
1564
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
1565
|
+
resp = await client.get(f"{self.session.config.biostar_url}/api/access_levels", headers=headers)
|
|
1566
|
+
if resp.status_code != 200:
|
|
1567
|
+
return None
|
|
1568
|
+
data = resp.json() or {}
|
|
1569
|
+
rows = data.get("AccessLevelCollection", {}).get("rows", []) or data.get("rows", []) or []
|
|
1570
|
+
target = name.strip().lower()
|
|
1571
|
+
for r in rows:
|
|
1572
|
+
if str(r.get("name", "")).strip().lower() == target:
|
|
1573
|
+
return r
|
|
1574
|
+
return None
|
|
1575
|
+
except Exception:
|
|
1576
|
+
return None
|
|
1577
|
+
|
|
1578
|
+
async def _search_access_levels(self, headers: Dict[str, str], search_text: str) -> List[Dict[str, Any]]:
|
|
1579
|
+
"""
|
|
1580
|
+
Search Access Levels by name/description substring using:
|
|
1581
|
+
GET /api/access_levels?search_text=...
|
|
1582
|
+
Returns the 'rows' list.
|
|
1583
|
+
"""
|
|
1584
|
+
try:
|
|
1585
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
1586
|
+
resp = await client.get(
|
|
1587
|
+
f"{self.session.config.biostar_url}/api/access_levels",
|
|
1588
|
+
headers=headers,
|
|
1589
|
+
params={"search_text": search_text}
|
|
1590
|
+
)
|
|
1591
|
+
if resp.status_code != 200:
|
|
1592
|
+
return []
|
|
1593
|
+
data = resp.json() or {}
|
|
1594
|
+
rows = data.get("AccessLevelCollection", {}).get("rows", []) or []
|
|
1595
|
+
return rows
|
|
1596
|
+
except Exception:
|
|
1597
|
+
return []
|
|
1598
|
+
|
|
1599
|
+
def _normalize_al_items_doors_only(self, items: List[Dict[str, Any]]) -> Tuple[List[Dict[str, Any]], List[Dict[str, Any]]]:
|
|
1600
|
+
"""
|
|
1601
|
+
Normalize only doors for access_level_items:
|
|
1602
|
+
- doors: cast to int, drop invalid, de-duplicate while preserving order.
|
|
1603
|
+
- schedule_id from input is ignored (will be replaced with 'Always').
|
|
1604
|
+
"""
|
|
1605
|
+
normalized: List[Dict[str, Any]] = []
|
|
1606
|
+
report: List[Dict[str, Any]] = []
|
|
1607
|
+
|
|
1608
|
+
for idx, raw in enumerate(items):
|
|
1609
|
+
doors_raw = raw.get("doors") or []
|
|
1610
|
+
|
|
1611
|
+
tmp: List[int] = []
|
|
1612
|
+
for d in doors_raw:
|
|
1613
|
+
try:
|
|
1614
|
+
# Handle dict format: {'id': 267} or {'id': 267, 'name': '...'}
|
|
1615
|
+
if isinstance(d, dict):
|
|
1616
|
+
door_id = d.get("id")
|
|
1617
|
+
if door_id is not None:
|
|
1618
|
+
tmp.append(int(door_id))
|
|
1619
|
+
# Handle direct int or string format
|
|
1620
|
+
elif isinstance(d, (int, str)):
|
|
1621
|
+
tmp.append(int(d))
|
|
1622
|
+
except Exception:
|
|
1623
|
+
continue
|
|
1624
|
+
seen = set()
|
|
1625
|
+
dedup = []
|
|
1626
|
+
for di in tmp:
|
|
1627
|
+
if di not in seen:
|
|
1628
|
+
seen.add(di)
|
|
1629
|
+
dedup.append(di)
|
|
1630
|
+
doors_final = dedup # keep original order
|
|
1631
|
+
|
|
1632
|
+
item_payload: Dict[str, Any] = {}
|
|
1633
|
+
if doors_final:
|
|
1634
|
+
item_payload["doors"] = [{"id": d} for d in doors_final]
|
|
1635
|
+
|
|
1636
|
+
normalized.append(item_payload)
|
|
1637
|
+
|
|
1638
|
+
report.append({
|
|
1639
|
+
"index": idx,
|
|
1640
|
+
"input": {"doors": doors_raw},
|
|
1641
|
+
"normalized": {"doors": doors_final}
|
|
1642
|
+
})
|
|
1643
|
+
return normalized, report
|
|
1644
|
+
|
|
1645
|
+
def _build_al_payload(self, name: str, description: str, items: List[Dict[str, Any]]) -> Dict[str, Any]:
|
|
1646
|
+
"""Build payload for POST /api/access_levels."""
|
|
1647
|
+
body: Dict[str, Any] = {"AccessLevel": {"name": name, "description": description}}
|
|
1648
|
+
if items:
|
|
1649
|
+
body["AccessLevel"]["access_level_items"] = items
|
|
1650
|
+
return body
|
|
1651
|
+
|
|
1652
|
+
async def update_access_level(self, args: Dict[str, Any]) -> Sequence[TextContent]:
|
|
1653
|
+
"""
|
|
1654
|
+
Update an existing Access Level with enforced 'Always' schedule.
|
|
1655
|
+
|
|
1656
|
+
HARD GUARD: This method will NEVER create a new access level when the target does not exist.
|
|
1657
|
+
If target cannot be resolved, it returns status="target_not_found", includes available levels,
|
|
1658
|
+
and sets do_not_autocreate=True to instruct the caller not to escalate into creation.
|
|
1659
|
+
"""
|
|
1660
|
+
try:
|
|
1661
|
+
self.check_auth()
|
|
1662
|
+
headers = {"bs-session-id": self.get_session_id(), "Content-Type": "application/json"}
|
|
1663
|
+
|
|
1664
|
+
dry_run = bool(args.get("dry_run", False))
|
|
1665
|
+
level_id = args.get("level_id")
|
|
1666
|
+
level_name = (args.get("level_name") or "").strip()
|
|
1667
|
+
search_text = (args.get("search_text") or level_name).strip()
|
|
1668
|
+
|
|
1669
|
+
async def not_found_response(msg: str) -> Sequence[TextContent]:
|
|
1670
|
+
# View All AL per official query (limit/offset/order_by)
|
|
1671
|
+
available = await self._view_all_access_levels(headers, limit=50, offset=0, order_by="id:false")
|
|
1672
|
+
return self.error_response(
|
|
1673
|
+
msg,
|
|
1674
|
+
{
|
|
1675
|
+
"status": "target_not_found",
|
|
1676
|
+
"do_not_autocreate": True,
|
|
1677
|
+
"available_levels": [
|
|
1678
|
+
{"id": r.get("id"), "name": r.get("name"), "description": r.get("description")}
|
|
1679
|
+
for r in (available or [])
|
|
1680
|
+
],
|
|
1681
|
+
"example_get_url": f"{self.session.config.biostar_url}/api/access_levels?limit=50&offset=0&order_by=id:false",
|
|
1682
|
+
"example_curl": "curl --location 'https://127.0.0.1/api/access_levels?limit=50&offset=0&order_by=id:false'",
|
|
1683
|
+
"next_step": "Pick an existing access level by id, or explicitly ask to create a new one."
|
|
1684
|
+
}
|
|
1685
|
+
)
|
|
1686
|
+
|
|
1687
|
+
# -------- existence check (branch on 'exists?' vs 'not exists?') --------
|
|
1688
|
+
if not level_id:
|
|
1689
|
+
if not search_text:
|
|
1690
|
+
return await not_found_response(
|
|
1691
|
+
"No target specified or target not found. The requested access level does not exist."
|
|
1692
|
+
)
|
|
1693
|
+
|
|
1694
|
+
candidates = await self._search_access_levels(headers, search_text)
|
|
1695
|
+
|
|
1696
|
+
if level_name:
|
|
1697
|
+
exact = [r for r in candidates if (str(r.get("name") or "").strip() == level_name)]
|
|
1698
|
+
if len(exact) == 1:
|
|
1699
|
+
level_id = int(exact[0]["id"])
|
|
1700
|
+
elif len(exact) == 0:
|
|
1701
|
+
return await not_found_response(
|
|
1702
|
+
f"No exact match for level_name='{level_name}'. The requested access level does not exist."
|
|
1703
|
+
)
|
|
1704
|
+
else:
|
|
1705
|
+
return self.error_response(
|
|
1706
|
+
f"Multiple access levels found with name '{level_name}'. Please provide level_id.",
|
|
1707
|
+
{"status": "ambiguous_target", "matches": [{"id": m.get("id"), "name": m.get("name")} for m in exact]}
|
|
1708
|
+
)
|
|
1709
|
+
else:
|
|
1710
|
+
if len(candidates) == 1:
|
|
1711
|
+
level_id = int(candidates[0]["id"])
|
|
1712
|
+
elif len(candidates) == 0:
|
|
1713
|
+
return await not_found_response(
|
|
1714
|
+
f"No access levels match search_text='{search_text}'. The requested access level does not exist."
|
|
1715
|
+
)
|
|
1716
|
+
else:
|
|
1717
|
+
return self.error_response(
|
|
1718
|
+
f"Multiple candidates for search_text='{search_text}'. Provide level_name (exact) or level_id.",
|
|
1719
|
+
{"status": "ambiguous_target", "candidates": [{"id": c.get("id"), "name": c.get("name")} for c in candidates]}
|
|
1720
|
+
)
|
|
1721
|
+
|
|
1722
|
+
level_id = int(level_id)
|
|
1723
|
+
|
|
1724
|
+
# Get current AL by id; if not found → hard stop (no create)
|
|
1725
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
1726
|
+
get_resp = await client.get(
|
|
1727
|
+
f"{self.session.config.biostar_url}/api/access_levels/{level_id}",
|
|
1728
|
+
headers=headers
|
|
1729
|
+
)
|
|
1730
|
+
if get_resp.status_code != 200:
|
|
1731
|
+
return await not_found_response(
|
|
1732
|
+
f"Access Level id='{level_id}' not found. The requested access level does not exist."
|
|
1733
|
+
)
|
|
1734
|
+
cur = (get_resp.json() or {}).get("AccessLevel", {}) or {}
|
|
1735
|
+
|
|
1736
|
+
# ---------------- normal update path (exists) ----------------
|
|
1737
|
+
current_name = cur.get("name", "")
|
|
1738
|
+
current_desc = cur.get("description", "")
|
|
1739
|
+
items = cur.get("access_level_items", []) or []
|
|
1740
|
+
|
|
1741
|
+
# Flatten current doors
|
|
1742
|
+
current_doors_list: List[int] = []
|
|
1743
|
+
seen_doors: Set[int] = set()
|
|
1744
|
+
first_item_id: Optional[str] = None
|
|
1745
|
+
if items:
|
|
1746
|
+
first_item = items[0] or {}
|
|
1747
|
+
fi = first_item.get("id")
|
|
1748
|
+
if fi is not None:
|
|
1749
|
+
first_item_id = str(fi)
|
|
1750
|
+
for it in items:
|
|
1751
|
+
for d in (it.get("doors") or []):
|
|
1752
|
+
try:
|
|
1753
|
+
di = int(str(d.get("id")))
|
|
1754
|
+
if di not in seen_doors:
|
|
1755
|
+
seen_doors.add(di)
|
|
1756
|
+
current_doors_list.append(di)
|
|
1757
|
+
except Exception:
|
|
1758
|
+
continue
|
|
1759
|
+
|
|
1760
|
+
# Desired fields
|
|
1761
|
+
new_name = args.get("name", current_name)
|
|
1762
|
+
new_desc = args.get("description", current_desc)
|
|
1763
|
+
|
|
1764
|
+
set_doors_raw = args.get("set_doors")
|
|
1765
|
+
add_doors_raw = args.get("add_doors")
|
|
1766
|
+
remove_doors_raw = args.get("remove_doors")
|
|
1767
|
+
|
|
1768
|
+
def normalize_ids(raw) -> List[int]:
|
|
1769
|
+
if not raw:
|
|
1770
|
+
return []
|
|
1771
|
+
out, seen = [], set()
|
|
1772
|
+
for x in raw:
|
|
1773
|
+
try:
|
|
1774
|
+
v = int(str(x).strip())
|
|
1775
|
+
if v not in seen:
|
|
1776
|
+
seen.add(v); out.append(v)
|
|
1777
|
+
except Exception:
|
|
1778
|
+
continue
|
|
1779
|
+
return out
|
|
1780
|
+
|
|
1781
|
+
if set_doors_raw is not None:
|
|
1782
|
+
final_doors = normalize_ids(set_doors_raw)
|
|
1783
|
+
add_list, rem_list = [], [d for d in current_doors_list if d not in final_doors]
|
|
1784
|
+
else:
|
|
1785
|
+
final_doors = list(current_doors_list)
|
|
1786
|
+
add_list = normalize_ids(add_doors_raw)
|
|
1787
|
+
rem_list = normalize_ids(remove_doors_raw)
|
|
1788
|
+
for d in add_list:
|
|
1789
|
+
if d not in final_doors:
|
|
1790
|
+
final_doors.append(d)
|
|
1791
|
+
if rem_list:
|
|
1792
|
+
final_doors = [d for d in final_doors if d not in set(rem_list)]
|
|
1793
|
+
|
|
1794
|
+
# Door id validation (only if door op was requested)
|
|
1795
|
+
door_op_requested = (set_doors_raw is not None) or bool(add_list) or bool(rem_list)
|
|
1796
|
+
all_doors = await self._get_all_doors(headers)
|
|
1797
|
+
known_ids: Set[int] = {int(d["id"]) for d in all_doors if d.get("id") is not None}
|
|
1798
|
+
|
|
1799
|
+
invalid_set = [d for d in (normalize_ids(set_doors_raw) if set_doors_raw is not None else []) if d not in known_ids]
|
|
1800
|
+
invalid_add = [d for d in add_list if d not in known_ids]
|
|
1801
|
+
invalid_remove = [d for d in rem_list if d not in known_ids]
|
|
1802
|
+
|
|
1803
|
+
if invalid_set or invalid_add:
|
|
1804
|
+
return self.error_response(
|
|
1805
|
+
"Invalid door id(s) provided.",
|
|
1806
|
+
{
|
|
1807
|
+
"status": "door_validation_failed",
|
|
1808
|
+
"invalid_set_doors": invalid_set,
|
|
1809
|
+
"invalid_add_doors": invalid_add,
|
|
1810
|
+
"invalid_remove_doors": invalid_remove,
|
|
1811
|
+
"available_doors": self._format_door_options(all_doors)
|
|
1812
|
+
}
|
|
1813
|
+
)
|
|
1814
|
+
|
|
1815
|
+
# Ensure 'Always' schedule
|
|
1816
|
+
always_id = await self._get_or_create_always_schedule_id(headers)
|
|
1817
|
+
if not always_id:
|
|
1818
|
+
return self.error_response("Failed to resolve 'Always' schedule id.", {"status": "schedule_resolution_failed"})
|
|
1819
|
+
|
|
1820
|
+
# Build PUT payload (collapse into single item)
|
|
1821
|
+
item_obj: Dict[str, Any] = {
|
|
1822
|
+
"doors": [{"id": str(d)} for d in final_doors],
|
|
1823
|
+
"schedule_id": {"id": str(always_id)}
|
|
1824
|
+
}
|
|
1825
|
+
if first_item_id is not None:
|
|
1826
|
+
item_obj["id"] = str(first_item_id)
|
|
1827
|
+
|
|
1828
|
+
payload = {
|
|
1829
|
+
"AccessLevel": {
|
|
1830
|
+
"name": new_name,
|
|
1831
|
+
"description": new_desc,
|
|
1832
|
+
"access_level_items": [item_obj]
|
|
1833
|
+
}
|
|
1834
|
+
}
|
|
1835
|
+
|
|
1836
|
+
# Diffs
|
|
1837
|
+
door_diff = {
|
|
1838
|
+
"before": current_doors_list,
|
|
1839
|
+
"after": final_doors,
|
|
1840
|
+
"added": [d for d in final_doors if d not in current_doors_list],
|
|
1841
|
+
"removed": [d for d in current_doors_list if d not in final_doors],
|
|
1842
|
+
"ignored_remove": [d for d in (normalize_ids(remove_doors_raw) if remove_doors_raw else []) if d not in current_doors_list]
|
|
1843
|
+
}
|
|
1844
|
+
field_changes = {
|
|
1845
|
+
"name_changed": (new_name != current_name),
|
|
1846
|
+
"description_changed": (new_desc != current_desc),
|
|
1847
|
+
"doors_changed": (current_doors_list != final_doors)
|
|
1848
|
+
}
|
|
1849
|
+
|
|
1850
|
+
if dry_run:
|
|
1851
|
+
return self.success_response({
|
|
1852
|
+
"message": "Dry run: would update Access Level with enforced 'Always' schedule.",
|
|
1853
|
+
"level_id": level_id,
|
|
1854
|
+
"request_body": payload,
|
|
1855
|
+
"door_diff": door_diff,
|
|
1856
|
+
"field_changes": field_changes
|
|
1857
|
+
})
|
|
1858
|
+
|
|
1859
|
+
# PUT update
|
|
1860
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
1861
|
+
put_resp = await client.put(
|
|
1862
|
+
f"{self.session.config.biostar_url}/api/access_levels/{level_id}",
|
|
1863
|
+
headers=headers,
|
|
1864
|
+
json=payload
|
|
1865
|
+
)
|
|
1866
|
+
if put_resp.status_code != 200:
|
|
1867
|
+
return self.error_response(
|
|
1868
|
+
f"API call failed: {put_resp.status_code} - {put_resp.text}",
|
|
1869
|
+
{"request_body": payload, "door_diff": door_diff}
|
|
1870
|
+
)
|
|
1871
|
+
|
|
1872
|
+
rj = {}
|
|
1873
|
+
try:
|
|
1874
|
+
rj = put_resp.json() or {}
|
|
1875
|
+
except Exception:
|
|
1876
|
+
pass
|
|
1877
|
+
|
|
1878
|
+
device_resp = rj.get("DeviceResponse")
|
|
1879
|
+
msg_parts = []
|
|
1880
|
+
if field_changes["name_changed"]:
|
|
1881
|
+
msg_parts.append("name updated")
|
|
1882
|
+
if field_changes["description_changed"]:
|
|
1883
|
+
msg_parts.append("description updated")
|
|
1884
|
+
if field_changes["doors_changed"]:
|
|
1885
|
+
msg_parts.append(f"doors updated (+{len(door_diff['added'])}/-{len(door_diff['removed'])})")
|
|
1886
|
+
if not msg_parts:
|
|
1887
|
+
msg_parts.append("no effective changes")
|
|
1888
|
+
|
|
1889
|
+
out = {
|
|
1890
|
+
"message": f"Access level {level_id}: " + ", ".join(msg_parts),
|
|
1891
|
+
"request_body": payload,
|
|
1892
|
+
"door_diff": door_diff
|
|
1893
|
+
}
|
|
1894
|
+
if device_resp is not None:
|
|
1895
|
+
out["device_response"] = device_resp
|
|
1896
|
+
|
|
1897
|
+
return self.success_response(out)
|
|
1898
|
+
|
|
1899
|
+
except Exception as e:
|
|
1900
|
+
return await self.handle_api_error(e)
|
|
1901
|
+
|
|
1902
|
+
async def delete_access_level(self, args: Dict[str, Any]) -> Sequence[TextContent]:
|
|
1903
|
+
"""Delete an access level."""
|
|
1904
|
+
try:
|
|
1905
|
+
self.check_auth()
|
|
1906
|
+
|
|
1907
|
+
level_id = args["level_id"]
|
|
1908
|
+
|
|
1909
|
+
headers = {
|
|
1910
|
+
"bs-session-id": self.get_session_id(),
|
|
1911
|
+
"Content-Type": "application/json"
|
|
1912
|
+
}
|
|
1913
|
+
|
|
1914
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
1915
|
+
response = await client.delete(
|
|
1916
|
+
f"{self.session.config.biostar_url}/api/access_levels/{level_id}",
|
|
1917
|
+
headers=headers
|
|
1918
|
+
)
|
|
1919
|
+
|
|
1920
|
+
if response.status_code not in [200, 204]:
|
|
1921
|
+
return self.error_response(f"API call failed: {response.status_code} - {response.text}")
|
|
1922
|
+
|
|
1923
|
+
return self.success_response({
|
|
1924
|
+
"message": f"Access level {level_id} deleted successfully"
|
|
1925
|
+
})
|
|
1926
|
+
|
|
1927
|
+
except Exception as e:
|
|
1928
|
+
return await self.handle_api_error(e)
|
|
1929
|
+
|
|
1930
|
+
# ----------------------------------------------------------------------
|
|
1931
|
+
# Access Level <-> Access Group linking (ADD/REMOVE)
|
|
1932
|
+
# ----------------------------------------------------------------------
|
|
1933
|
+
async def _get_access_group_detail(self, headers: Dict[str, str], group_id: int) -> Optional[Dict[str, Any]]:
|
|
1934
|
+
"""GET /api/access_groups/{id} and return the 'AccessGroup' object or None."""
|
|
1935
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
1936
|
+
r = await client.get(f"{self.session.config.biostar_url}/api/access_groups/{group_id}", headers=headers)
|
|
1937
|
+
if r.status_code != 200:
|
|
1938
|
+
return None
|
|
1939
|
+
return (r.json() or {}).get("AccessGroup") or {}
|
|
1940
|
+
|
|
1941
|
+
async def _list_all_access_levels(self, headers: Dict[str, str]) -> List[Dict[str, Any]]:
|
|
1942
|
+
"""List all access levels using GET /api/access_levels with limit=0."""
|
|
1943
|
+
try:
|
|
1944
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
1945
|
+
r = await client.get(
|
|
1946
|
+
f"{self.session.config.biostar_url}/api/access_levels",
|
|
1947
|
+
headers=headers,
|
|
1948
|
+
params={"limit": 0, "offset": 0, "order_by": "id:false"}
|
|
1949
|
+
)
|
|
1950
|
+
if r.status_code != 200:
|
|
1951
|
+
return []
|
|
1952
|
+
data = r.json() or {}
|
|
1953
|
+
return data.get("AccessLevelCollection", {}).get("rows", []) or data.get("rows", []) or []
|
|
1954
|
+
except Exception:
|
|
1955
|
+
return []
|
|
1956
|
+
|
|
1957
|
+
def _norm_ids_str(self, raw: Any) -> List[str]:
|
|
1958
|
+
"""Normalize single or multiple numeric ids to a list of unique strings."""
|
|
1959
|
+
if raw is None:
|
|
1960
|
+
return []
|
|
1961
|
+
out, seen = [], set()
|
|
1962
|
+
if not isinstance(raw, (list, tuple, set)):
|
|
1963
|
+
raw = [raw]
|
|
1964
|
+
for x in raw:
|
|
1965
|
+
try:
|
|
1966
|
+
v = str(int(str(x).strip()))
|
|
1967
|
+
if v not in seen:
|
|
1968
|
+
seen.add(v)
|
|
1969
|
+
out.append(v)
|
|
1970
|
+
except Exception:
|
|
1971
|
+
continue
|
|
1972
|
+
return out
|
|
1973
|
+
|
|
1974
|
+
async def _add_or_remove_access_levels(self, *, op: str, args: Dict[str, Any]) -> Sequence[TextContent]:
|
|
1975
|
+
"""
|
|
1976
|
+
Add or remove access level ids from an access group while preserving other fields.
|
|
1977
|
+
|
|
1978
|
+
Steps:
|
|
1979
|
+
1) Resolve group and read current 'access_levels'
|
|
1980
|
+
2) Normalize requested ids (single or multiple)
|
|
1981
|
+
3) Validate existence against full list of access levels
|
|
1982
|
+
4) Compute union/difference
|
|
1983
|
+
5) PUT /api/access_groups/{id} with only {"AccessGroup":{"access_levels":[...]}}
|
|
1984
|
+
"""
|
|
1985
|
+
self.check_auth()
|
|
1986
|
+
headers = {"bs-session-id": self.get_session_id(), "Content-Type": "application/json"}
|
|
1987
|
+
|
|
1988
|
+
# Resolve group
|
|
1989
|
+
try:
|
|
1990
|
+
access_group_id = int(args["access_group_id"])
|
|
1991
|
+
except Exception:
|
|
1992
|
+
return self.error_response("access_group_id must be an integer")
|
|
1993
|
+
|
|
1994
|
+
group = await self._get_access_group_detail(headers, access_group_id)
|
|
1995
|
+
if not group:
|
|
1996
|
+
return self.error_response(
|
|
1997
|
+
f"Access group {access_group_id} not found.",
|
|
1998
|
+
{"status": "group_not_found", "group_id": access_group_id}
|
|
1999
|
+
)
|
|
2000
|
+
|
|
2001
|
+
# Current access levels in the group
|
|
2002
|
+
cur_levels = group.get("access_levels") or []
|
|
2003
|
+
cur_ids: Set[str] = {str(x.get("id")) for x in cur_levels if isinstance(x, dict) and x.get("id") is not None}
|
|
2004
|
+
|
|
2005
|
+
# Requested ids (single or multiple)
|
|
2006
|
+
req_ids = []
|
|
2007
|
+
if "access_level_ids" in args:
|
|
2008
|
+
req_ids = self._norm_ids_str(args.get("access_level_ids"))
|
|
2009
|
+
elif "access_level_id" in args:
|
|
2010
|
+
req_ids = self._norm_ids_str(args.get("access_level_id"))
|
|
2011
|
+
else:
|
|
2012
|
+
return self.error_response(
|
|
2013
|
+
"Provide 'access_level_id' or 'access_level_ids'.",
|
|
2014
|
+
{"status": "missing_access_level_ids"}
|
|
2015
|
+
)
|
|
2016
|
+
|
|
2017
|
+
if not req_ids:
|
|
2018
|
+
return self.error_response("No valid access level ids provided.", {"status": "empty_access_level_ids"})
|
|
2019
|
+
|
|
2020
|
+
# Validate existence
|
|
2021
|
+
all_levels = await self._list_all_access_levels(headers)
|
|
2022
|
+
all_id_set: Set[str] = {str(lv.get("id")) for lv in all_levels if lv.get("id") is not None}
|
|
2023
|
+
unknown = [x for x in req_ids if x not in all_id_set]
|
|
2024
|
+
if unknown:
|
|
2025
|
+
return self.error_response(
|
|
2026
|
+
"Requested access level(s) not found.",
|
|
2027
|
+
{
|
|
2028
|
+
"status": "access_level_not_found",
|
|
2029
|
+
"unknown_ids": unknown,
|
|
2030
|
+
"available_levels": [
|
|
2031
|
+
{"id": str(lv.get("id")), "name": lv.get("name"), "description": lv.get("description")}
|
|
2032
|
+
for lv in all_levels
|
|
2033
|
+
]
|
|
2034
|
+
}
|
|
2035
|
+
)
|
|
2036
|
+
|
|
2037
|
+
# Compute before/after
|
|
2038
|
+
before = sorted(cur_ids, key=lambda s: int(s))
|
|
2039
|
+
if op == "add":
|
|
2040
|
+
final_ids = set(cur_ids)
|
|
2041
|
+
for i in req_ids:
|
|
2042
|
+
final_ids.add(i)
|
|
2043
|
+
changed_added = [i for i in req_ids if i not in cur_ids]
|
|
2044
|
+
changed_removed: List[str] = []
|
|
2045
|
+
elif op == "remove":
|
|
2046
|
+
final_ids = {i for i in cur_ids if i not in set(req_ids)}
|
|
2047
|
+
changed_added = []
|
|
2048
|
+
changed_removed = [i for i in req_ids if i in cur_ids]
|
|
2049
|
+
else:
|
|
2050
|
+
return self.error_response("op must be 'add' or 'remove'")
|
|
2051
|
+
|
|
2052
|
+
after = sorted(final_ids, key=lambda s: int(s))
|
|
2053
|
+
if before == after:
|
|
2054
|
+
msg = "No changes applied: "
|
|
2055
|
+
if op == "add":
|
|
2056
|
+
msg += "all requested access levels are already assigned."
|
|
2057
|
+
else:
|
|
2058
|
+
msg += "none of the requested access levels were assigned to the group."
|
|
2059
|
+
return self.success_response({
|
|
2060
|
+
"message": msg,
|
|
2061
|
+
"group_id": access_group_id,
|
|
2062
|
+
"requested": req_ids,
|
|
2063
|
+
"current_access_levels": before
|
|
2064
|
+
})
|
|
2065
|
+
|
|
2066
|
+
# PUT only the 'access_levels' field to preserve others
|
|
2067
|
+
payload = {"AccessGroup": {"access_levels": [{"id": i} for i in after]}}
|
|
2068
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
2069
|
+
put_resp = await client.put(
|
|
2070
|
+
f"{self.session.config.biostar_url}/api/access_groups/{access_group_id}",
|
|
2071
|
+
headers=headers,
|
|
2072
|
+
json=payload
|
|
2073
|
+
)
|
|
2074
|
+
|
|
2075
|
+
if put_resp.status_code != 200:
|
|
2076
|
+
return self.error_response(
|
|
2077
|
+
f"API call failed: {put_resp.status_code} - {put_resp.text}",
|
|
2078
|
+
{"request_body": payload, "group_id": access_group_id}
|
|
2079
|
+
)
|
|
2080
|
+
|
|
2081
|
+
rj = {}
|
|
2082
|
+
try:
|
|
2083
|
+
rj = put_resp.json() or {}
|
|
2084
|
+
except Exception:
|
|
2085
|
+
pass
|
|
2086
|
+
|
|
2087
|
+
summary = {
|
|
2088
|
+
"group_id": access_group_id,
|
|
2089
|
+
"before": before,
|
|
2090
|
+
"after": after,
|
|
2091
|
+
"added": changed_added,
|
|
2092
|
+
"removed": changed_removed
|
|
2093
|
+
}
|
|
2094
|
+
out_msg = f"Access levels {'added to' if op=='add' else 'removed from'} group {access_group_id}."
|
|
2095
|
+
|
|
2096
|
+
return self.success_response({
|
|
2097
|
+
"message": out_msg,
|
|
2098
|
+
"summary": summary,
|
|
2099
|
+
"request_body": payload,
|
|
2100
|
+
"device_response": rj.get("DeviceResponse")
|
|
2101
|
+
})
|
|
2102
|
+
|
|
2103
|
+
# Public endpoints (tool handlers)
|
|
2104
|
+
async def add_access_level_to_group(self, args: Dict[str, Any]) -> Sequence[TextContent]:
|
|
2105
|
+
"""Add one or more access levels to a group (preserves other fields)."""
|
|
2106
|
+
try:
|
|
2107
|
+
return await self._add_or_remove_access_levels(op="add", args=args)
|
|
2108
|
+
except Exception as e:
|
|
2109
|
+
return await self.handle_api_error(e)
|
|
2110
|
+
|
|
2111
|
+
async def assign_access_level_to_group(self, args: Dict[str, Any]) -> Sequence[TextContent]:
|
|
2112
|
+
"""
|
|
2113
|
+
Backward-compatible handler for legacy tool name "assign-access-level-to-group".
|
|
2114
|
+
Delegates to add operation and supports single or multiple ids.
|
|
2115
|
+
"""
|
|
2116
|
+
try:
|
|
2117
|
+
# Accept both 'access_level_id' and 'access_level_ids' for compatibility
|
|
2118
|
+
return await self._add_or_remove_access_levels(op="add", args=args)
|
|
2119
|
+
except Exception as e:
|
|
2120
|
+
return await self.handle_api_error(e)
|
|
2121
|
+
|
|
2122
|
+
async def remove_access_level_from_group(self, args: Dict[str, Any]) -> Sequence[TextContent]:
|
|
2123
|
+
"""Remove one or more access levels from a group (preserves other fields)."""
|
|
2124
|
+
try:
|
|
2125
|
+
return await self._add_or_remove_access_levels(op="remove", args=args)
|
|
2126
|
+
except Exception as e:
|
|
2127
|
+
return await self.handle_api_error(e)
|
|
2128
|
+
|
|
2129
|
+
# ----------------------------------------------------------------------
|
|
2130
|
+
# Helper methods (formatters)
|
|
2131
|
+
# ----------------------------------------------------------------------
|
|
2132
|
+
def format_access_group_info(self, group: Dict[str, Any]) -> Dict[str, Any]:
|
|
2133
|
+
"""Format access group information for response."""
|
|
2134
|
+
users_field = group.get("users") or []
|
|
2135
|
+
user_count = len(users_field) if isinstance(users_field, list) else 0
|
|
2136
|
+
|
|
2137
|
+
al_field = group.get("access_levels") or []
|
|
2138
|
+
access_level_ids = [x.get("id") for x in al_field] if isinstance(al_field, list) else []
|
|
2139
|
+
|
|
2140
|
+
parent_id = group.get("parent_id", {}).get("id") if isinstance(group.get("parent_id"), dict) else group.get("parent_id")
|
|
2141
|
+
|
|
2142
|
+
return {
|
|
2143
|
+
"id": group.get("id"),
|
|
2144
|
+
"name": group.get("name"),
|
|
2145
|
+
"description": group.get("description"),
|
|
2146
|
+
"parent_id": parent_id,
|
|
2147
|
+
"user_count": user_count,
|
|
2148
|
+
"access_level_ids": access_level_ids
|
|
2149
|
+
}
|
|
2150
|
+
|
|
2151
|
+
def format_access_level_info(self, level: Dict[str, Any]) -> Dict[str, Any]:
|
|
2152
|
+
"""Format access level information for response."""
|
|
2153
|
+
access_items = level.get("access_level_items", [])
|
|
2154
|
+
first_item = access_items[0] if access_items else {}
|
|
2155
|
+
|
|
2156
|
+
return {
|
|
2157
|
+
"id": level.get("id"),
|
|
2158
|
+
"name": level.get("name"),
|
|
2159
|
+
"description": level.get("description"),
|
|
2160
|
+
"doors": first_item.get("doors", []),
|
|
2161
|
+
"schedule_id": first_item.get("schedule_id", {})
|
|
2162
|
+
}
|